mirror of https://github.com/sbt/sbt.git
Format in-sourced util modules
This commit is contained in:
parent
9494967e05
commit
dc2d4d613f
|
|
@ -14,11 +14,14 @@ trait AList[K[L[x]]] {
|
|||
def foldr[M[_], A](value: K[M], f: (M[_], A) => A, init: A): A
|
||||
|
||||
def toList[M[_]](value: K[M]): List[M[_]] = foldr[M, List[M[_]]](value, _ :: _, Nil)
|
||||
|
||||
def apply[M[_], C](value: K[M], f: K[Id] => C)(implicit a: Applicative[M]): M[C] =
|
||||
a.map(f, traverse[M, M, Id](value, idK[M])(a))
|
||||
}
|
||||
|
||||
object AList {
|
||||
type Empty = AList[({ type l[L[x]] = Unit })#l]
|
||||
|
||||
/** AList for Unit, which represents a sequence that is always empty.*/
|
||||
val empty: Empty = new Empty {
|
||||
def transform[M[_], N[_]](in: Unit, f: M ~> N) = ()
|
||||
|
|
@ -28,21 +31,23 @@ object AList {
|
|||
}
|
||||
|
||||
type SeqList[T] = AList[({ type l[L[x]] = List[L[T]] })#l]
|
||||
|
||||
/** AList for a homogeneous sequence. */
|
||||
def seq[T]: SeqList[T] = new SeqList[T] {
|
||||
def transform[M[_], N[_]](s: List[M[T]], f: M ~> N) = s.map(f.fn[T])
|
||||
def foldr[M[_], A](s: List[M[T]], f: (M[_], A) => A, init: A): A = (init /: s.reverse)((t, m) => f(m, t))
|
||||
override def apply[M[_], C](s: List[M[T]], f: List[T] => C)(implicit ap: Applicative[M]): M[C] =
|
||||
{
|
||||
def loop[V](in: List[M[T]], g: List[T] => V): M[V] =
|
||||
in match {
|
||||
case Nil => ap.pure(g(Nil))
|
||||
case x :: xs =>
|
||||
val h = (ts: List[T]) => (t: T) => g(t :: ts)
|
||||
ap.apply(loop(xs, h), x)
|
||||
}
|
||||
loop(s, f)
|
||||
}
|
||||
|
||||
override def apply[M[_], C](s: List[M[T]], f: List[T] => C)(implicit ap: Applicative[M]): M[C] = {
|
||||
def loop[V](in: List[M[T]], g: List[T] => V): M[V] =
|
||||
in match {
|
||||
case Nil => ap.pure(g(Nil))
|
||||
case x :: xs =>
|
||||
val h = (ts: List[T]) => (t: T) => g(t :: ts)
|
||||
ap.apply(loop(xs, h), x)
|
||||
}
|
||||
loop(s, f)
|
||||
}
|
||||
|
||||
def traverse[M[_], N[_], P[_]](s: List[M[T]], f: M ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[List[P[T]]] = ???
|
||||
}
|
||||
|
||||
|
|
@ -55,8 +60,9 @@ object AList {
|
|||
override def toList[M[_]](k: KL[M]) = k.toList
|
||||
}
|
||||
|
||||
/** AList for a single value. */
|
||||
type Single[A] = AList[({ type l[L[x]] = L[A] })#l]
|
||||
|
||||
/** AList for a single value. */
|
||||
def single[A]: Single[A] = new Single[A] {
|
||||
def transform[M[_], N[_]](a: M[A], f: M ~> N) = f(a)
|
||||
def foldr[M[_], T](a: M[A], f: (M[_], T) => T, init: T): T = f(a, init)
|
||||
|
|
@ -64,17 +70,18 @@ object AList {
|
|||
}
|
||||
|
||||
type ASplit[K[L[x]], B[x]] = AList[({ type l[L[x]] = K[(L ∙ B)#l] })#l]
|
||||
|
||||
/** AList that operates on the outer type constructor `A` of a composition `[x] A[B[x]]` for type constructors `A` and `B`*/
|
||||
def asplit[K[L[x]], B[x]](base: AList[K]): ASplit[K, B] = new ASplit[K, B] {
|
||||
type Split[L[x]] = K[(L ∙ B)#l]
|
||||
|
||||
def transform[M[_], N[_]](value: Split[M], f: M ~> N): Split[N] =
|
||||
base.transform[(M ∙ B)#l, (N ∙ B)#l](value, nestCon[M, N, B](f))
|
||||
|
||||
def traverse[M[_], N[_], P[_]](value: Split[M], f: M ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[Split[P]] =
|
||||
{
|
||||
val g = nestCon[M, (N ∙ P)#l, B](f)
|
||||
base.traverse[(M ∙ B)#l, N, (P ∙ B)#l](value, g)(np)
|
||||
}
|
||||
def traverse[M[_], N[_], P[_]](value: Split[M], f: M ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[Split[P]] = {
|
||||
val g = nestCon[M, (N ∙ P)#l, B](f)
|
||||
base.traverse[(M ∙ B)#l, N, (P ∙ B)#l](value, g)(np)
|
||||
}
|
||||
|
||||
def foldr[M[_], A](value: Split[M], f: (M[_], A) => A, init: A): A =
|
||||
base.foldr[(M ∙ B)#l, A](value, f, init)
|
||||
|
|
@ -87,11 +94,10 @@ object AList {
|
|||
type T2[M[_]] = (M[A], M[B])
|
||||
def transform[M[_], N[_]](t: T2[M], f: M ~> N): T2[N] = (f(t._1), f(t._2))
|
||||
def foldr[M[_], T](t: T2[M], f: (M[_], T) => T, init: T): T = f(t._1, f(t._2, init))
|
||||
def traverse[M[_], N[_], P[_]](t: T2[M], f: M ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[T2[P]] =
|
||||
{
|
||||
val g = (Tuple2.apply[P[A], P[B]] _).curried
|
||||
np.apply(np.map(g, f(t._1)), f(t._2))
|
||||
}
|
||||
def traverse[M[_], N[_], P[_]](t: T2[M], f: M ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[T2[P]] = {
|
||||
val g = (Tuple2.apply[P[A], P[B]] _).curried
|
||||
np.apply(np.map(g, f(t._1)), f(t._2))
|
||||
}
|
||||
}
|
||||
|
||||
sealed trait T3K[A, B, C] { type l[L[x]] = (L[A], L[B], L[C]) }
|
||||
|
|
@ -100,11 +106,10 @@ object AList {
|
|||
type T3[M[_]] = (M[A], M[B], M[C])
|
||||
def transform[M[_], N[_]](t: T3[M], f: M ~> N) = (f(t._1), f(t._2), f(t._3))
|
||||
def foldr[M[_], T](t: T3[M], f: (M[_], T) => T, init: T): T = f(t._1, f(t._2, f(t._3, init)))
|
||||
def traverse[M[_], N[_], P[_]](t: T3[M], f: M ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[T3[P]] =
|
||||
{
|
||||
val g = (Tuple3.apply[P[A], P[B], P[C]] _).curried
|
||||
np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3))
|
||||
}
|
||||
def traverse[M[_], N[_], P[_]](t: T3[M], f: M ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[T3[P]] = {
|
||||
val g = (Tuple3.apply[P[A], P[B], P[C]] _).curried
|
||||
np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3))
|
||||
}
|
||||
}
|
||||
|
||||
sealed trait T4K[A, B, C, D] { type l[L[x]] = (L[A], L[B], L[C], L[D]) }
|
||||
|
|
@ -113,11 +118,10 @@ object AList {
|
|||
type T4[M[_]] = (M[A], M[B], M[C], M[D])
|
||||
def transform[M[_], N[_]](t: T4[M], f: M ~> N) = (f(t._1), f(t._2), f(t._3), f(t._4))
|
||||
def foldr[M[_], T](t: T4[M], f: (M[_], T) => T, init: T): T = f(t._1, f(t._2, f(t._3, f(t._4, init))))
|
||||
def traverse[M[_], N[_], P[_]](t: T4[M], f: M ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[T4[P]] =
|
||||
{
|
||||
val g = (Tuple4.apply[P[A], P[B], P[C], P[D]] _).curried
|
||||
np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4))
|
||||
}
|
||||
def traverse[M[_], N[_], P[_]](t: T4[M], f: M ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[T4[P]] = {
|
||||
val g = (Tuple4.apply[P[A], P[B], P[C], P[D]] _).curried
|
||||
np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4))
|
||||
}
|
||||
}
|
||||
|
||||
sealed trait T5K[A, B, C, D, E] { type l[L[x]] = (L[A], L[B], L[C], L[D], L[E]) }
|
||||
|
|
@ -126,11 +130,10 @@ object AList {
|
|||
type T5[M[_]] = (M[A], M[B], M[C], M[D], M[E])
|
||||
def transform[M[_], N[_]](t: T5[M], f: M ~> N) = (f(t._1), f(t._2), f(t._3), f(t._4), f(t._5))
|
||||
def foldr[M[_], T](t: T5[M], f: (M[_], T) => T, init: T): T = f(t._1, f(t._2, f(t._3, f(t._4, f(t._5, init)))))
|
||||
def traverse[M[_], N[_], P[_]](t: T5[M], f: M ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[T5[P]] =
|
||||
{
|
||||
val g = (Tuple5.apply[P[A], P[B], P[C], P[D], P[E]] _).curried
|
||||
np.apply(np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)), f(t._5))
|
||||
}
|
||||
def traverse[M[_], N[_], P[_]](t: T5[M], f: M ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[T5[P]] = {
|
||||
val g = (Tuple5.apply[P[A], P[B], P[C], P[D], P[E]] _).curried
|
||||
np.apply(np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)), f(t._5))
|
||||
}
|
||||
}
|
||||
|
||||
sealed trait T6K[A, B, C, D, E, F] { type l[L[x]] = (L[A], L[B], L[C], L[D], L[E], L[F]) }
|
||||
|
|
@ -139,11 +142,10 @@ object AList {
|
|||
type T6[M[_]] = (M[A], M[B], M[C], M[D], M[E], M[F])
|
||||
def transform[M[_], N[_]](t: T6[M], f: M ~> N) = (f(t._1), f(t._2), f(t._3), f(t._4), f(t._5), f(t._6))
|
||||
def foldr[M[_], T](t: T6[M], f: (M[_], T) => T, init: T): T = f(t._1, f(t._2, f(t._3, f(t._4, f(t._5, f(t._6, init))))))
|
||||
def traverse[M[_], N[_], P[_]](t: T6[M], f: M ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[T6[P]] =
|
||||
{
|
||||
val g = (Tuple6.apply[P[A], P[B], P[C], P[D], P[E], P[F]] _).curried
|
||||
np.apply(np.apply(np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)), f(t._5)), f(t._6))
|
||||
}
|
||||
def traverse[M[_], N[_], P[_]](t: T6[M], f: M ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[T6[P]] = {
|
||||
val g = (Tuple6.apply[P[A], P[B], P[C], P[D], P[E], P[F]] _).curried
|
||||
np.apply(np.apply(np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)), f(t._5)), f(t._6))
|
||||
}
|
||||
}
|
||||
|
||||
sealed trait T7K[A, B, C, D, E, F, G] { type l[L[x]] = (L[A], L[B], L[C], L[D], L[E], L[F], L[G]) }
|
||||
|
|
@ -152,23 +154,22 @@ object AList {
|
|||
type T7[M[_]] = (M[A], M[B], M[C], M[D], M[E], M[F], M[G])
|
||||
def transform[M[_], N[_]](t: T7[M], f: M ~> N) = (f(t._1), f(t._2), f(t._3), f(t._4), f(t._5), f(t._6), f(t._7))
|
||||
def foldr[M[_], T](t: T7[M], f: (M[_], T) => T, init: T): T = f(t._1, f(t._2, f(t._3, f(t._4, f(t._5, f(t._6, f(t._7, init)))))))
|
||||
def traverse[M[_], N[_], P[_]](t: T7[M], f: M ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[T7[P]] =
|
||||
{
|
||||
val g = (Tuple7.apply[P[A], P[B], P[C], P[D], P[E], P[F], P[G]] _).curried
|
||||
np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)), f(t._5)), f(t._6)), f(t._7))
|
||||
}
|
||||
def traverse[M[_], N[_], P[_]](t: T7[M], f: M ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[T7[P]] = {
|
||||
val g = (Tuple7.apply[P[A], P[B], P[C], P[D], P[E], P[F], P[G]] _).curried
|
||||
np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)), f(t._5)), f(t._6)), f(t._7))
|
||||
}
|
||||
}
|
||||
|
||||
sealed trait T8K[A, B, C, D, E, F, G, H] { type l[L[x]] = (L[A], L[B], L[C], L[D], L[E], L[F], L[G], L[H]) }
|
||||
type T8List[A, B, C, D, E, F, G, H] = AList[T8K[A, B, C, D, E, F, G, H]#l]
|
||||
def tuple8[A, B, C, D, E, F, G, H]: T8List[A, B, C, D, E, F, G, H] = new T8List[A, B, C, D, E, F, G, H] {
|
||||
type T8[M[_]] = (M[A], M[B], M[C], M[D], M[E], M[F], M[G], M[H])
|
||||
def transform[M[_], N[_]](t: T8[M], f: M ~> N) = (f(t._1), f(t._2), f(t._3), f(t._4), f(t._5), f(t._6), f(t._7), f(t._8))
|
||||
def foldr[M[_], T](t: T8[M], f: (M[_], T) => T, init: T): T = f(t._1, f(t._2, f(t._3, f(t._4, f(t._5, f(t._6, f(t._7, f(t._8, init))))))))
|
||||
def traverse[M[_], N[_], P[_]](t: T8[M], f: M ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[T8[P]] =
|
||||
{
|
||||
val g = (Tuple8.apply[P[A], P[B], P[C], P[D], P[E], P[F], P[G], P[H]] _).curried
|
||||
np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)), f(t._5)), f(t._6)), f(t._7)), f(t._8))
|
||||
}
|
||||
def traverse[M[_], N[_], P[_]](t: T8[M], f: M ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[T8[P]] = {
|
||||
val g = (Tuple8.apply[P[A], P[B], P[C], P[D], P[E], P[F], P[G], P[H]] _).curried
|
||||
np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)), f(t._5)), f(t._6)), f(t._7)), f(t._8))
|
||||
}
|
||||
}
|
||||
|
||||
sealed trait T9K[A, B, C, D, E, F, G, H, I] { type l[L[x]] = (L[A], L[B], L[C], L[D], L[E], L[F], L[G], L[H], L[I]) }
|
||||
|
|
@ -177,11 +178,10 @@ object AList {
|
|||
type T9[M[_]] = (M[A], M[B], M[C], M[D], M[E], M[F], M[G], M[H], M[I])
|
||||
def transform[M[_], N[_]](t: T9[M], f: M ~> N) = (f(t._1), f(t._2), f(t._3), f(t._4), f(t._5), f(t._6), f(t._7), f(t._8), f(t._9))
|
||||
def foldr[M[_], T](t: T9[M], f: (M[_], T) => T, init: T): T = f(t._1, f(t._2, f(t._3, f(t._4, f(t._5, f(t._6, f(t._7, f(t._8, f(t._9, init)))))))))
|
||||
def traverse[M[_], N[_], P[_]](t: T9[M], f: M ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[T9[P]] =
|
||||
{
|
||||
val g = (Tuple9.apply[P[A], P[B], P[C], P[D], P[E], P[F], P[G], P[H], P[I]] _).curried
|
||||
np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)), f(t._5)), f(t._6)), f(t._7)), f(t._8)), f(t._9))
|
||||
}
|
||||
def traverse[M[_], N[_], P[_]](t: T9[M], f: M ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[T9[P]] = {
|
||||
val g = (Tuple9.apply[P[A], P[B], P[C], P[D], P[E], P[F], P[G], P[H], P[I]] _).curried
|
||||
np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)), f(t._5)), f(t._6)), f(t._7)), f(t._8)), f(t._9))
|
||||
}
|
||||
}
|
||||
|
||||
sealed trait T10K[A, B, C, D, E, F, G, H, I, J] { type l[L[x]] = (L[A], L[B], L[C], L[D], L[E], L[F], L[G], L[H], L[I], L[J]) }
|
||||
|
|
@ -190,11 +190,10 @@ object AList {
|
|||
type T10[M[_]] = (M[A], M[B], M[C], M[D], M[E], M[F], M[G], M[H], M[I], M[J])
|
||||
def transform[M[_], N[_]](t: T10[M], f: M ~> N) = (f(t._1), f(t._2), f(t._3), f(t._4), f(t._5), f(t._6), f(t._7), f(t._8), f(t._9), f(t._10))
|
||||
def foldr[M[_], T](t: T10[M], f: (M[_], T) => T, init: T): T = f(t._1, f(t._2, f(t._3, f(t._4, f(t._5, f(t._6, f(t._7, f(t._8, f(t._9, f(t._10, init))))))))))
|
||||
def traverse[M[_], N[_], P[_]](t: T10[M], f: M ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[T10[P]] =
|
||||
{
|
||||
val g = (Tuple10.apply[P[A], P[B], P[C], P[D], P[E], P[F], P[G], P[H], P[I], P[J]] _).curried
|
||||
np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)), f(t._5)), f(t._6)), f(t._7)), f(t._8)), f(t._9)), f(t._10))
|
||||
}
|
||||
def traverse[M[_], N[_], P[_]](t: T10[M], f: M ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[T10[P]] = {
|
||||
val g = (Tuple10.apply[P[A], P[B], P[C], P[D], P[E], P[F], P[G], P[H], P[I], P[J]] _).curried
|
||||
np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)), f(t._5)), f(t._6)), f(t._7)), f(t._8)), f(t._9)), f(t._10))
|
||||
}
|
||||
}
|
||||
|
||||
sealed trait T11K[A, B, C, D, E, F, G, H, I, J, K] { type l[L[x]] = (L[A], L[B], L[C], L[D], L[E], L[F], L[G], L[H], L[I], L[J], L[K]) }
|
||||
|
|
@ -203,10 +202,9 @@ object AList {
|
|||
type T11[M[_]] = (M[A], M[B], M[C], M[D], M[E], M[F], M[G], M[H], M[I], M[J], M[K])
|
||||
def transform[M[_], N[_]](t: T11[M], f: M ~> N) = (f(t._1), f(t._2), f(t._3), f(t._4), f(t._5), f(t._6), f(t._7), f(t._8), f(t._9), f(t._10), f(t._11))
|
||||
def foldr[M[_], T](t: T11[M], f: (M[_], T) => T, init: T): T = f(t._1, f(t._2, f(t._3, f(t._4, f(t._5, f(t._6, f(t._7, f(t._8, f(t._9, f(t._10, f(t._11, init)))))))))))
|
||||
def traverse[M[_], N[_], P[_]](t: T11[M], f: M ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[T11[P]] =
|
||||
{
|
||||
val g = (Tuple11.apply[P[A], P[B], P[C], P[D], P[E], P[F], P[G], P[H], P[I], P[J], P[K]] _).curried
|
||||
np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)), f(t._5)), f(t._6)), f(t._7)), f(t._8)), f(t._9)), f(t._10)), f(t._11))
|
||||
}
|
||||
def traverse[M[_], N[_], P[_]](t: T11[M], f: M ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[T11[P]] = {
|
||||
val g = (Tuple11.apply[P[A], P[B], P[C], P[D], P[E], P[F], P[G], P[H], P[I], P[J], P[K]] _).curried
|
||||
np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)), f(t._5)), f(t._6)), f(t._7)), f(t._8)), f(t._9)), f(t._10)), f(t._11))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -42,54 +42,75 @@ sealed trait AttributeKey[T] {
|
|||
def rank: Int
|
||||
|
||||
def optJsonWriter: OptJsonWriter[T]
|
||||
|
||||
}
|
||||
|
||||
private[sbt] abstract class SharedAttributeKey[T] extends AttributeKey[T] {
|
||||
override final def toString = label
|
||||
override final def hashCode = label.hashCode
|
||||
override final def equals(o: Any) = (this eq o.asInstanceOf[AnyRef]) || (o match {
|
||||
case a: SharedAttributeKey[t] => a.label == this.label && a.manifest == this.manifest
|
||||
case _ => false
|
||||
})
|
||||
override final def equals(o: Any) =
|
||||
(this eq o.asInstanceOf[AnyRef]) || (o match {
|
||||
case a: SharedAttributeKey[t] => a.label == this.label && a.manifest == this.manifest
|
||||
case _ => false
|
||||
})
|
||||
final def isLocal: Boolean = false
|
||||
}
|
||||
|
||||
object AttributeKey {
|
||||
def apply[T](name: String)(implicit mf: Manifest[T], ojw: OptJsonWriter[T]): AttributeKey[T] =
|
||||
def apply[T: Manifest: OptJsonWriter](name: String): AttributeKey[T] =
|
||||
make(name, None, Nil, Int.MaxValue)
|
||||
|
||||
def apply[T](name: String, rank: Int)(implicit mf: Manifest[T], ojw: OptJsonWriter[T]): AttributeKey[T] =
|
||||
def apply[T: Manifest: OptJsonWriter](name: String, rank: Int): AttributeKey[T] =
|
||||
make(name, None, Nil, rank)
|
||||
|
||||
def apply[T](name: String, description: String)(implicit mf: Manifest[T], ojw: OptJsonWriter[T]): AttributeKey[T] =
|
||||
def apply[T: Manifest: OptJsonWriter](name: String, description: String): AttributeKey[T] =
|
||||
apply(name, description, Nil)
|
||||
|
||||
def apply[T](name: String, description: String, rank: Int)(implicit mf: Manifest[T], ojw: OptJsonWriter[T]): AttributeKey[T] =
|
||||
def apply[T: Manifest: OptJsonWriter](name: String,
|
||||
description: String,
|
||||
rank: Int): AttributeKey[T] =
|
||||
apply(name, description, Nil, rank)
|
||||
|
||||
def apply[T](name: String, description: String, extend: Seq[AttributeKey[_]])(implicit mf: Manifest[T], ojw: OptJsonWriter[T]): AttributeKey[T] =
|
||||
def apply[T: Manifest: OptJsonWriter](name: String,
|
||||
description: String,
|
||||
extend: Seq[AttributeKey[_]]): AttributeKey[T] =
|
||||
apply(name, description, extend, Int.MaxValue)
|
||||
|
||||
def apply[T](name: String, description: String, extend: Seq[AttributeKey[_]], rank: Int)(implicit mf: Manifest[T], ojw: OptJsonWriter[T]): AttributeKey[T] =
|
||||
def apply[T: Manifest: OptJsonWriter](name: String,
|
||||
description: String,
|
||||
extend: Seq[AttributeKey[_]],
|
||||
rank: Int): AttributeKey[T] =
|
||||
make(name, Some(description), extend, rank)
|
||||
|
||||
private[this] def make[T](name: String, description0: Option[String], extend0: Seq[AttributeKey[_]], rank0: Int)(implicit mf: Manifest[T], ojw: OptJsonWriter[T]): AttributeKey[T] = new SharedAttributeKey[T] {
|
||||
def manifest = mf
|
||||
val label = Util.hyphenToCamel(name)
|
||||
def description = description0
|
||||
def extend = extend0
|
||||
def rank = rank0
|
||||
def optJsonWriter = ojw
|
||||
}
|
||||
private[sbt] def local[T](implicit mf: Manifest[T], ojw: OptJsonWriter[T]): AttributeKey[T] = new AttributeKey[T] {
|
||||
def manifest = mf
|
||||
def label = LocalLabel
|
||||
def description = None
|
||||
def extend = Nil
|
||||
override def toString = label
|
||||
def isLocal: Boolean = true
|
||||
def rank = Int.MaxValue
|
||||
val optJsonWriter = ojw
|
||||
}
|
||||
private[this] def make[T](
|
||||
name: String,
|
||||
description0: Option[String],
|
||||
extend0: Seq[AttributeKey[_]],
|
||||
rank0: Int
|
||||
)(implicit mf: Manifest[T], ojw: OptJsonWriter[T]): AttributeKey[T] =
|
||||
new SharedAttributeKey[T] {
|
||||
def manifest = mf
|
||||
val label = Util.hyphenToCamel(name)
|
||||
def description = description0
|
||||
def extend = extend0
|
||||
def rank = rank0
|
||||
def optJsonWriter = ojw
|
||||
}
|
||||
|
||||
private[sbt] def local[T](implicit mf: Manifest[T], ojw: OptJsonWriter[T]): AttributeKey[T] =
|
||||
new AttributeKey[T] {
|
||||
def manifest = mf
|
||||
def label = LocalLabel
|
||||
def description = None
|
||||
def extend = Nil
|
||||
override def toString = label
|
||||
def isLocal: Boolean = true
|
||||
def rank = Int.MaxValue
|
||||
val optJsonWriter = ojw
|
||||
}
|
||||
|
||||
private[sbt] final val LocalLabel = "$" + "local"
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -98,6 +119,7 @@ object AttributeKey {
|
|||
* Excluding this possibility is the responsibility of the client if desired.
|
||||
*/
|
||||
trait AttributeMap {
|
||||
|
||||
/**
|
||||
* Gets the value of type `T` associated with the key `k`.
|
||||
* If a key with the same label but different type is defined, this method will fail.
|
||||
|
|
@ -142,8 +164,11 @@ trait AttributeMap {
|
|||
|
||||
/** `true` if there are no mappings in this map, `false` if there are. */
|
||||
def isEmpty: Boolean
|
||||
|
||||
}
|
||||
|
||||
object AttributeMap {
|
||||
|
||||
/** An [[AttributeMap]] without any mappings. */
|
||||
val empty: AttributeMap = new BasicAttributeMap(Map.empty)
|
||||
|
||||
|
|
@ -157,27 +182,38 @@ object AttributeMap {
|
|||
implicit def toNatTrans(map: AttributeMap): AttributeKey ~> Id = new (AttributeKey ~> Id) {
|
||||
def apply[T](key: AttributeKey[T]): T = map(key)
|
||||
}
|
||||
|
||||
}
|
||||
private class BasicAttributeMap(private val backing: Map[AttributeKey[_], Any]) extends AttributeMap {
|
||||
private class BasicAttributeMap(private val backing: Map[AttributeKey[_], Any])
|
||||
extends AttributeMap {
|
||||
|
||||
def isEmpty: Boolean = backing.isEmpty
|
||||
def apply[T](k: AttributeKey[T]) = backing(k).asInstanceOf[T]
|
||||
def get[T](k: AttributeKey[T]) = backing.get(k).asInstanceOf[Option[T]]
|
||||
def remove[T](k: AttributeKey[T]): AttributeMap = new BasicAttributeMap(backing - k)
|
||||
def contains[T](k: AttributeKey[T]) = backing.contains(k)
|
||||
def put[T](k: AttributeKey[T], value: T): AttributeMap = new BasicAttributeMap(backing.updated(k, value))
|
||||
|
||||
def put[T](k: AttributeKey[T], value: T): AttributeMap =
|
||||
new BasicAttributeMap(backing.updated(k, value))
|
||||
|
||||
def keys: Iterable[AttributeKey[_]] = backing.keys
|
||||
def ++(o: Iterable[AttributeEntry[_]]): AttributeMap =
|
||||
{
|
||||
val newBacking = (backing /: o) { case (b, AttributeEntry(key, value)) => b.updated(key, value) }
|
||||
new BasicAttributeMap(newBacking)
|
||||
|
||||
def ++(o: Iterable[AttributeEntry[_]]): AttributeMap = {
|
||||
val newBacking = (backing /: o) {
|
||||
case (b, AttributeEntry(key, value)) => b.updated(key, value)
|
||||
}
|
||||
new BasicAttributeMap(newBacking)
|
||||
}
|
||||
|
||||
def ++(o: AttributeMap): AttributeMap =
|
||||
o match {
|
||||
case bam: BasicAttributeMap => new BasicAttributeMap(backing ++ bam.backing)
|
||||
case _ => o ++ this
|
||||
}
|
||||
|
||||
def entries: Iterable[AttributeEntry[_]] =
|
||||
for ((k: AttributeKey[kt], v) <- backing) yield AttributeEntry(k, v.asInstanceOf[kt])
|
||||
|
||||
override def toString = entries.mkString("(", ", ", ")")
|
||||
}
|
||||
|
||||
|
|
@ -189,16 +225,21 @@ final case class AttributeEntry[T](key: AttributeKey[T], value: T) {
|
|||
|
||||
/** Associates a `metadata` map with `data`. */
|
||||
final case class Attributed[D](data: D)(val metadata: AttributeMap) {
|
||||
|
||||
/** Retrieves the associated value of `key` from the metadata. */
|
||||
def get[T](key: AttributeKey[T]): Option[T] = metadata.get(key)
|
||||
|
||||
/** Defines a mapping `key -> value` in the metadata. */
|
||||
def put[T](key: AttributeKey[T], value: T): Attributed[D] = Attributed(data)(metadata.put(key, value))
|
||||
def put[T](key: AttributeKey[T], value: T): Attributed[D] =
|
||||
Attributed(data)(metadata.put(key, value))
|
||||
|
||||
/** Transforms the data by applying `f`. */
|
||||
def map[T](f: D => T): Attributed[T] = Attributed(f(data))(metadata)
|
||||
|
||||
}
|
||||
|
||||
object Attributed {
|
||||
|
||||
/** Extracts the underlying data from the sequence `in`. */
|
||||
def data[T](in: Seq[Attributed[T]]): Seq[T] = in.map(_.data)
|
||||
|
||||
|
|
@ -207,4 +248,5 @@ object Attributed {
|
|||
|
||||
/** Associates an empty metadata map with `data`. */
|
||||
def blank[T](data: T): Attributed[T] = Attributed(data)(AttributeMap.empty)
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -6,19 +6,26 @@ object Classes {
|
|||
def pure[S](s: => S): M[S]
|
||||
def map[S, T](f: S => T, v: M[S]): M[T]
|
||||
}
|
||||
|
||||
trait Monad[M[_]] extends Applicative[M] {
|
||||
def flatten[T](m: M[M[T]]): M[T]
|
||||
}
|
||||
|
||||
implicit val optionMonad: Monad[Option] = new Monad[Option] {
|
||||
def apply[S, T](f: Option[S => T], v: Option[S]) = (f, v) match { case (Some(fv), Some(vv)) => Some(fv(vv)); case _ => None }
|
||||
def apply[S, T](f: Option[S => T], v: Option[S]) = (f, v) match {
|
||||
case (Some(fv), Some(vv)) => Some(fv(vv))
|
||||
case _ => None
|
||||
}
|
||||
|
||||
def pure[S](s: => S) = Some(s)
|
||||
def map[S, T](f: S => T, v: Option[S]) = v map f
|
||||
def flatten[T](m: Option[Option[T]]): Option[T] = m.flatten
|
||||
}
|
||||
|
||||
implicit val listMonad: Monad[List] = new Monad[List] {
|
||||
def apply[S, T](f: List[S => T], v: List[S]) = for (fv <- f; vv <- v) yield fv(vv)
|
||||
def pure[S](s: => S) = s :: Nil
|
||||
def map[S, T](f: S => T, v: List[S]) = v map f
|
||||
def flatten[T](m: List[List[T]]): List[T] = m.flatten
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -3,8 +3,7 @@
|
|||
*/
|
||||
package sbt.internal.util
|
||||
|
||||
trait Dag[Node <: Dag[Node]] {
|
||||
self: Node =>
|
||||
trait Dag[Node <: Dag[Node]] { self: Node =>
|
||||
|
||||
def dependencies: Iterable[Node]
|
||||
def topologicalSort = Dag.topologicalSort(self)(_.dependencies)
|
||||
|
|
@ -13,53 +12,58 @@ object Dag {
|
|||
import scala.collection.{ mutable, JavaConverters }
|
||||
import JavaConverters.asScalaSetConverter
|
||||
|
||||
def topologicalSort[T](root: T)(dependencies: T => Iterable[T]): List[T] = topologicalSort(root :: Nil)(dependencies)
|
||||
def topologicalSort[T](root: T)(dependencies: T => Iterable[T]): List[T] =
|
||||
topologicalSort(root :: Nil)(dependencies)
|
||||
|
||||
def topologicalSort[T](nodes: Iterable[T])(dependencies: T => Iterable[T]): List[T] =
|
||||
{
|
||||
val discovered = new mutable.HashSet[T]
|
||||
val finished = (new java.util.LinkedHashSet[T]).asScala
|
||||
def topologicalSort[T](nodes: Iterable[T])(dependencies: T => Iterable[T]): List[T] = {
|
||||
val discovered = new mutable.HashSet[T]
|
||||
val finished = (new java.util.LinkedHashSet[T]).asScala
|
||||
|
||||
def visitAll(nodes: Iterable[T]) = nodes foreach visit
|
||||
def visit(node: T): Unit = {
|
||||
if (!discovered(node)) {
|
||||
discovered(node) = true;
|
||||
try { visitAll(dependencies(node)); } catch { case c: Cyclic => throw node :: c }
|
||||
finished += node
|
||||
()
|
||||
} else if (!finished(node))
|
||||
throw new Cyclic(node)
|
||||
}
|
||||
|
||||
visitAll(nodes)
|
||||
|
||||
finished.toList
|
||||
def visitAll(nodes: Iterable[T]) = nodes foreach visit
|
||||
def visit(node: T): Unit = {
|
||||
if (!discovered(node)) {
|
||||
discovered(node) = true;
|
||||
try { visitAll(dependencies(node)); } catch { case c: Cyclic => throw node :: c }
|
||||
finished += node
|
||||
()
|
||||
} else if (!finished(node))
|
||||
throw new Cyclic(node)
|
||||
}
|
||||
|
||||
visitAll(nodes)
|
||||
|
||||
finished.toList
|
||||
}
|
||||
|
||||
// doesn't check for cycles
|
||||
def topologicalSortUnchecked[T](node: T)(dependencies: T => Iterable[T]): List[T] = topologicalSortUnchecked(node :: Nil)(dependencies)
|
||||
def topologicalSortUnchecked[T](node: T)(dependencies: T => Iterable[T]): List[T] =
|
||||
topologicalSortUnchecked(node :: Nil)(dependencies)
|
||||
|
||||
def topologicalSortUnchecked[T](nodes: Iterable[T])(dependencies: T => Iterable[T]): List[T] =
|
||||
{
|
||||
val discovered = new mutable.HashSet[T]
|
||||
var finished: List[T] = Nil
|
||||
def topologicalSortUnchecked[T](nodes: Iterable[T])(dependencies: T => Iterable[T]): List[T] = {
|
||||
val discovered = new mutable.HashSet[T]
|
||||
var finished: List[T] = Nil
|
||||
|
||||
def visitAll(nodes: Iterable[T]) = nodes foreach visit
|
||||
def visit(node: T): Unit = {
|
||||
if (!discovered(node)) {
|
||||
discovered(node) = true
|
||||
visitAll(dependencies(node))
|
||||
finished ::= node
|
||||
}
|
||||
def visitAll(nodes: Iterable[T]) = nodes foreach visit
|
||||
def visit(node: T): Unit = {
|
||||
if (!discovered(node)) {
|
||||
discovered(node) = true
|
||||
visitAll(dependencies(node))
|
||||
finished ::= node
|
||||
}
|
||||
|
||||
visitAll(nodes);
|
||||
finished;
|
||||
}
|
||||
|
||||
visitAll(nodes);
|
||||
finished;
|
||||
}
|
||||
|
||||
final class Cyclic(val value: Any, val all: List[Any], val complete: Boolean)
|
||||
extends Exception("Cyclic reference involving " +
|
||||
(if (complete) all.mkString("\n ", "\n ", "") else value)) {
|
||||
extends Exception(
|
||||
"Cyclic reference involving " +
|
||||
(if (complete) all.mkString("\n ", "\n ", "") else value)
|
||||
) {
|
||||
def this(value: Any) = this(value, value :: Nil, false)
|
||||
override def toString = getMessage
|
||||
|
||||
def ::(a: Any): Cyclic =
|
||||
if (complete)
|
||||
this
|
||||
|
|
@ -71,19 +75,25 @@ object Dag {
|
|||
|
||||
/** A directed graph with edges labeled positive or negative. */
|
||||
private[sbt] trait DirectedSignedGraph[Node] {
|
||||
|
||||
/**
|
||||
* Directed edge type that tracks the sign and target (head) vertex.
|
||||
* The sign can be obtained via [[isNegative]] and the target vertex via [[head]].
|
||||
*/
|
||||
type Arrow
|
||||
|
||||
/** List of initial nodes. */
|
||||
def nodes: List[Arrow]
|
||||
|
||||
/** Outgoing edges for `n`. */
|
||||
def dependencies(n: Node): List[Arrow]
|
||||
|
||||
/** `true` if the edge `a` is "negative", false if it is "positive". */
|
||||
def isNegative(a: Arrow): Boolean
|
||||
|
||||
/** The target of the directed edge `a`. */
|
||||
def head(a: Arrow): Node
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -92,36 +102,35 @@ object Dag {
|
|||
* If a cycle containing a "negative" edge is detected, its member edges are returned in order.
|
||||
* Otherwise, the empty list is returned.
|
||||
*/
|
||||
private[sbt] def findNegativeCycle[Node](graph: DirectedSignedGraph[Node]): List[graph.Arrow] =
|
||||
{
|
||||
import graph._
|
||||
val finished = new mutable.HashSet[Node]
|
||||
val visited = new mutable.HashSet[Node]
|
||||
private[sbt] def findNegativeCycle[Node](graph: DirectedSignedGraph[Node]): List[graph.Arrow] = {
|
||||
import graph._
|
||||
val finished = new mutable.HashSet[Node]
|
||||
val visited = new mutable.HashSet[Node]
|
||||
|
||||
def visit(edges: List[Arrow], stack: List[Arrow]): List[Arrow] = edges match {
|
||||
case Nil => Nil
|
||||
case edge :: tail =>
|
||||
val node = head(edge)
|
||||
if (!visited(node)) {
|
||||
visited += node
|
||||
visit(dependencies(node), edge :: stack) match {
|
||||
case Nil =>
|
||||
finished += node
|
||||
visit(tail, stack)
|
||||
case cycle => cycle
|
||||
}
|
||||
} else if (!finished(node)) {
|
||||
// cycle. If a negative edge is involved, it is an error.
|
||||
val between = edge :: stack.takeWhile(f => head(f) != node)
|
||||
if (between exists isNegative)
|
||||
between
|
||||
else
|
||||
def visit(edges: List[Arrow], stack: List[Arrow]): List[Arrow] = edges match {
|
||||
case Nil => Nil
|
||||
case edge :: tail =>
|
||||
val node = head(edge)
|
||||
if (!visited(node)) {
|
||||
visited += node
|
||||
visit(dependencies(node), edge :: stack) match {
|
||||
case Nil =>
|
||||
finished += node
|
||||
visit(tail, stack)
|
||||
} else
|
||||
case cycle => cycle
|
||||
}
|
||||
} else if (!finished(node)) {
|
||||
// cycle. If a negative edge is involved, it is an error.
|
||||
val between = edge :: stack.takeWhile(f => head(f) != node)
|
||||
if (between exists isNegative)
|
||||
between
|
||||
else
|
||||
visit(tail, stack)
|
||||
}
|
||||
|
||||
visit(graph.nodes, Nil)
|
||||
} else
|
||||
visit(tail, stack)
|
||||
}
|
||||
|
||||
visit(graph.nodes, Nil)
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -12,13 +12,16 @@ import Types._
|
|||
sealed trait HList {
|
||||
type Wrap[M[_]] <: HList
|
||||
}
|
||||
|
||||
sealed trait HNil extends HList {
|
||||
type Wrap[M[_]] = HNil
|
||||
def :+:[G](g: G): G :+: HNil = HCons(g, this)
|
||||
|
||||
override def toString = "HNil"
|
||||
}
|
||||
|
||||
object HNil extends HNil
|
||||
|
||||
final case class HCons[H, T <: HList](head: H, tail: T) extends HList {
|
||||
type Wrap[M[_]] = M[H] :+: T#Wrap[M]
|
||||
def :+:[G](g: G): G :+: H :+: T = HCons(g, this)
|
||||
|
|
@ -28,5 +31,6 @@ final case class HCons[H, T <: HList](head: H, tail: T) extends HList {
|
|||
|
||||
object HList {
|
||||
// contains no type information: not even A
|
||||
implicit def fromList[A](list: Traversable[A]): HList = ((HNil: HList) /: list)((hl, v) => HCons(v, hl))
|
||||
}
|
||||
implicit def fromList[A](list: Traversable[A]): HList =
|
||||
((HNil: HList) /: list)((hl, v) => HCons(v, hl))
|
||||
}
|
||||
|
|
|
|||
|
|
@ -21,7 +21,10 @@ trait HListFormats {
|
|||
}
|
||||
}
|
||||
|
||||
implicit def hconsFormat[H, T <: HList](implicit hf: JsonFormat[H], tf: HListJF[T]): JsonFormat[H :+: T] =
|
||||
implicit def hconsFormat[H, T <: HList](
|
||||
implicit hf: JsonFormat[H],
|
||||
tf: HListJF[T]
|
||||
): JsonFormat[H :+: T] =
|
||||
new JsonFormat[H :+: T] {
|
||||
def write[J](hcons: H :+: T, builder: Builder[J]) = {
|
||||
builder.beginArray()
|
||||
|
|
@ -34,7 +37,8 @@ trait HListFormats {
|
|||
case None => HCons(hf.read(None, unbuilder), tf.read(None, unbuilder))
|
||||
case Some(js) =>
|
||||
unbuilder.beginArray(js)
|
||||
val hcons = HCons(hf.read(Some(unbuilder.nextElement), unbuilder), tf.read(Some(js), unbuilder))
|
||||
val hcons =
|
||||
HCons(hf.read(Some(unbuilder.nextElement), unbuilder), tf.read(Some(js), unbuilder))
|
||||
unbuilder.endArray()
|
||||
hcons
|
||||
}
|
||||
|
|
@ -45,7 +49,10 @@ trait HListFormats {
|
|||
def write[J](obj: A, builder: Builder[J]): Unit
|
||||
}
|
||||
|
||||
implicit def hconsHListJF[H, T <: HList](implicit hf: JsonFormat[H], tf: HListJF[T]): HListJF[H :+: T] =
|
||||
implicit def hconsHListJF[H, T <: HList](
|
||||
implicit hf: JsonFormat[H],
|
||||
tf: HListJF[T]
|
||||
): HListJF[H :+: T] =
|
||||
new HListJF[H :+: T] {
|
||||
def write[J](hcons: H :+: T, builder: Builder[J]) = {
|
||||
hf.write(hcons.head, builder)
|
||||
|
|
@ -53,8 +60,9 @@ trait HListFormats {
|
|||
}
|
||||
|
||||
def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]) = jsOpt match {
|
||||
case None => HCons(hf.read(None, unbuilder), tf.read(None, unbuilder))
|
||||
case Some(js) => HCons(hf.read(Some(unbuilder.nextElement), unbuilder), tf.read(Some(js), unbuilder))
|
||||
case None => HCons(hf.read(None, unbuilder), tf.read(None, unbuilder))
|
||||
case Some(js) =>
|
||||
HCons(hf.read(Some(unbuilder.nextElement), unbuilder), tf.read(Some(js), unbuilder))
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -20,12 +20,13 @@ trait IDSet[T] {
|
|||
object IDSet {
|
||||
implicit def toTraversable[T]: IDSet[T] => Traversable[T] = _.all
|
||||
def apply[T](values: T*): IDSet[T] = apply(values)
|
||||
def apply[T](values: Iterable[T]): IDSet[T] =
|
||||
{
|
||||
val s = create[T]
|
||||
s ++= values
|
||||
s
|
||||
}
|
||||
|
||||
def apply[T](values: Iterable[T]): IDSet[T] = {
|
||||
val s = create[T]
|
||||
s ++= values
|
||||
s
|
||||
}
|
||||
|
||||
def create[T]: IDSet[T] = new IDSet[T] {
|
||||
private[this] val backing = new java.util.IdentityHashMap[T, AnyRef]
|
||||
private[this] val Dummy: AnyRef = ""
|
||||
|
|
@ -39,7 +40,10 @@ object IDSet {
|
|||
def all = collection.JavaConversions.collectionAsScalaIterable(backing.keySet)
|
||||
def toList = all.toList
|
||||
def isEmpty = backing.isEmpty
|
||||
def process[S](t: T)(ifSeen: S)(ifNew: => S) = if (contains(t)) ifSeen else { this += t; ifNew }
|
||||
|
||||
def process[S](t: T)(ifSeen: S)(ifNew: => S) =
|
||||
if (contains(t)) ifSeen else { this += t; ifNew }
|
||||
|
||||
override def toString = backing.toString
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -12,6 +12,7 @@ object EvaluationState extends Enumeration {
|
|||
abstract class EvaluateSettings[Scope] {
|
||||
protected val init: Init[Scope]
|
||||
import init._
|
||||
|
||||
protected def executor: Executor
|
||||
protected def compiledSettings: Seq[Compiled[_]]
|
||||
|
||||
|
|
@ -20,23 +21,33 @@ abstract class EvaluateSettings[Scope] {
|
|||
private[this] val complete = new LinkedBlockingQueue[Option[Throwable]]
|
||||
private[this] val static = PMap.empty[ScopedKey, INode]
|
||||
private[this] val allScopes: Set[Scope] = compiledSettings.map(_.key.scope).toSet
|
||||
private[this] def getStatic[T](key: ScopedKey[T]): INode[T] = static get key getOrElse sys.error("Illegal reference to key " + key)
|
||||
|
||||
private[this] def getStatic[T](key: ScopedKey[T]): INode[T] =
|
||||
static get key getOrElse sys.error("Illegal reference to key " + key)
|
||||
|
||||
private[this] val transform: Initialize ~> INode = new (Initialize ~> INode) {
|
||||
def apply[T](i: Initialize[T]): INode[T] = i match {
|
||||
case k: Keyed[s, T] @unchecked => single(getStatic(k.scopedKey), k.transform)
|
||||
case a: Apply[k, T] @unchecked => new MixedNode[k, T](a.alist.transform[Initialize, INode](a.inputs, transform), a.f, a.alist)
|
||||
case k: Keyed[s, T] @unchecked => single(getStatic(k.scopedKey), k.transform)
|
||||
case a: Apply[k, T] @unchecked =>
|
||||
new MixedNode[k, T](
|
||||
a.alist.transform[Initialize, INode](a.inputs, transform),
|
||||
a.f,
|
||||
a.alist
|
||||
)
|
||||
case b: Bind[s, T] @unchecked => new BindNode[s, T](transform(b.in), x => transform(b.f(x)))
|
||||
case v: Value[T] @unchecked => constant(v.value)
|
||||
case v: ValidationCapture[T] @unchecked => strictConstant(v.key)
|
||||
case t: TransformCapture => strictConstant(t.f)
|
||||
case o: Optional[s, T] @unchecked => o.a match {
|
||||
case None => constant(() => o.f(None))
|
||||
case Some(i) => single[s, T](transform(i), x => o.f(Some(x)))
|
||||
}
|
||||
case x if x == StaticScopes => strictConstant(allScopes.asInstanceOf[T]) // can't convince scalac that StaticScopes => T == Set[Scope]
|
||||
case o: Optional[s, T] @unchecked =>
|
||||
o.a match {
|
||||
case None => constant(() => o.f(None))
|
||||
case Some(i) => single[s, T](transform(i), x => o.f(Some(x)))
|
||||
}
|
||||
case x if x == StaticScopes =>
|
||||
strictConstant(allScopes.asInstanceOf[T]) // can't convince scalac that StaticScopes => T == Set[Scope]
|
||||
}
|
||||
}
|
||||
|
||||
private[this] lazy val roots: Seq[INode[_]] = compiledSettings flatMap { cs =>
|
||||
(cs.settings map { s =>
|
||||
val t = transform(s.init)
|
||||
|
|
@ -44,42 +55,47 @@ abstract class EvaluateSettings[Scope] {
|
|||
t
|
||||
}): Seq[INode[_]]
|
||||
}
|
||||
|
||||
private[this] var running = new AtomicInteger
|
||||
private[this] var cancel = new AtomicBoolean(false)
|
||||
|
||||
def run(implicit delegates: Scope => Seq[Scope]): Settings[Scope] =
|
||||
{
|
||||
assert(running.get() == 0, "Already running")
|
||||
startWork()
|
||||
roots.foreach(_.registerIfNew())
|
||||
workComplete()
|
||||
complete.take() foreach { ex =>
|
||||
cancel.set(true)
|
||||
throw ex
|
||||
}
|
||||
getResults(delegates)
|
||||
def run(implicit delegates: Scope => Seq[Scope]): Settings[Scope] = {
|
||||
assert(running.get() == 0, "Already running")
|
||||
startWork()
|
||||
roots.foreach(_.registerIfNew())
|
||||
workComplete()
|
||||
complete.take() foreach { ex =>
|
||||
cancel.set(true)
|
||||
throw ex
|
||||
}
|
||||
getResults(delegates)
|
||||
}
|
||||
|
||||
private[this] def getResults(implicit delegates: Scope => Seq[Scope]) =
|
||||
(empty /: static.toTypedSeq) {
|
||||
case (ss, static.TPair(key, node)) =>
|
||||
if (key.key.isLocal) ss else ss.set(key.scope, key.key, node.get)
|
||||
}
|
||||
|
||||
private[this] val getValue = new (INode ~> Id) { def apply[T](node: INode[T]) = node.get }
|
||||
|
||||
private[this] def submitEvaluate(node: INode[_]) = submit(node.evaluate())
|
||||
private[this] def submitCallComplete[T](node: BindNode[_, T], value: T) = submit(node.callComplete(value))
|
||||
private[this] def submit(work: => Unit): Unit =
|
||||
{
|
||||
startWork()
|
||||
executor.execute(new Runnable { def run = if (!cancel.get()) run0(work) })
|
||||
}
|
||||
private[this] def run0(work: => Unit): Unit =
|
||||
{
|
||||
try { work } catch { case e: Throwable => complete.put(Some(e)) }
|
||||
workComplete()
|
||||
}
|
||||
|
||||
private[this] def submitCallComplete[T](node: BindNode[_, T], value: T) =
|
||||
submit(node.callComplete(value))
|
||||
|
||||
private[this] def submit(work: => Unit): Unit = {
|
||||
startWork()
|
||||
executor.execute(new Runnable { def run = if (!cancel.get()) run0(work) })
|
||||
}
|
||||
|
||||
private[this] def run0(work: => Unit): Unit = {
|
||||
try { work } catch { case e: Throwable => complete.put(Some(e)) }
|
||||
workComplete()
|
||||
}
|
||||
|
||||
private[this] def startWork(): Unit = { running.incrementAndGet(); () }
|
||||
|
||||
private[this] def workComplete(): Unit =
|
||||
if (running.decrementAndGet() == 0)
|
||||
complete.put(None)
|
||||
|
|
@ -91,26 +107,32 @@ abstract class EvaluateSettings[Scope] {
|
|||
private[this] var blockedOn: Int = 0
|
||||
private[this] val calledBy = new collection.mutable.ListBuffer[BindNode[_, T]]
|
||||
|
||||
override def toString = getClass.getName + " (state=" + state + ",blockedOn=" + blockedOn + ",calledBy=" + calledBy.size + ",blocking=" + blocking.size + "): " +
|
||||
keyString
|
||||
override def toString =
|
||||
getClass.getName + " (state=" + state + ",blockedOn=" + blockedOn + ",calledBy=" + calledBy.size + ",blocking=" + blocking.size + "): " +
|
||||
keyString
|
||||
|
||||
private[this] def keyString =
|
||||
(static.toSeq.flatMap { case (key, value) => if (value eq this) init.showFullKey.show(key) :: Nil else Nil }).headOption getOrElse "non-static"
|
||||
(static.toSeq.flatMap {
|
||||
case (key, value) => if (value eq this) init.showFullKey.show(key) :: Nil else Nil
|
||||
}).headOption getOrElse "non-static"
|
||||
|
||||
final def get: T = synchronized {
|
||||
assert(value != null, toString + " not evaluated")
|
||||
value
|
||||
}
|
||||
|
||||
final def doneOrBlock(from: INode[_]): Boolean = synchronized {
|
||||
val ready = state == Evaluated
|
||||
if (!ready) blocking += from
|
||||
registerIfNew()
|
||||
ready
|
||||
}
|
||||
|
||||
final def isDone: Boolean = synchronized { state == Evaluated }
|
||||
final def isNew: Boolean = synchronized { state == New }
|
||||
final def isCalling: Boolean = synchronized { state == Calling }
|
||||
final def registerIfNew(): Unit = synchronized { if (state == New) register() }
|
||||
|
||||
private[this] def register(): Unit = {
|
||||
assert(state == New, "Already registered and: " + toString)
|
||||
val deps = dependsOn
|
||||
|
|
@ -126,28 +148,36 @@ abstract class EvaluateSettings[Scope] {
|
|||
state = Ready
|
||||
submitEvaluate(this)
|
||||
}
|
||||
|
||||
final def unblocked(): Unit = synchronized {
|
||||
assert(state == Blocked, "Invalid state for unblocked() call: " + toString)
|
||||
blockedOn -= 1
|
||||
assert(blockedOn >= 0, "Negative blockedOn: " + blockedOn + " for " + toString)
|
||||
if (blockedOn == 0) schedule()
|
||||
}
|
||||
|
||||
final def evaluate(): Unit = synchronized { evaluate0() }
|
||||
|
||||
protected final def makeCall(source: BindNode[_, T], target: INode[T]): Unit = {
|
||||
assert(state == Ready, "Invalid state for call to makeCall: " + toString)
|
||||
state = Calling
|
||||
target.call(source)
|
||||
}
|
||||
|
||||
protected final def setValue(v: T): Unit = {
|
||||
assert(state != Evaluated, "Already evaluated (trying to set value to " + v + "): " + toString)
|
||||
assert(state != Evaluated,
|
||||
"Already evaluated (trying to set value to " + v + "): " + toString)
|
||||
if (v == null) sys.error("Setting value cannot be null: " + keyString)
|
||||
value = v
|
||||
state = Evaluated
|
||||
blocking foreach { _.unblocked() }
|
||||
blocking.clear()
|
||||
calledBy foreach { node => submitCallComplete(node, value) }
|
||||
calledBy foreach { node =>
|
||||
submitCallComplete(node, value)
|
||||
}
|
||||
calledBy.clear()
|
||||
}
|
||||
|
||||
final def call(by: BindNode[_, T]): Unit = synchronized {
|
||||
registerIfNew()
|
||||
state match {
|
||||
|
|
@ -156,13 +186,19 @@ abstract class EvaluateSettings[Scope] {
|
|||
}
|
||||
()
|
||||
}
|
||||
|
||||
protected def dependsOn: Seq[INode[_]]
|
||||
protected def evaluate0(): Unit
|
||||
}
|
||||
|
||||
private[this] def strictConstant[T](v: T): INode[T] = constant(() => v)
|
||||
private[this] def constant[T](f: () => T): INode[T] = new MixedNode[ConstK[Unit]#l, T]((), _ => f(), AList.empty)
|
||||
private[this] def single[S, T](in: INode[S], f: S => T): INode[T] = new MixedNode[({ type l[L[x]] = L[S] })#l, T](in, f, AList.single[S])
|
||||
|
||||
private[this] def constant[T](f: () => T): INode[T] =
|
||||
new MixedNode[ConstK[Unit]#l, T]((), _ => f(), AList.empty)
|
||||
|
||||
private[this] def single[S, T](in: INode[S], f: S => T): INode[T] =
|
||||
new MixedNode[({ type l[L[x]] = L[S] })#l, T](in, f, AList.single[S])
|
||||
|
||||
private[this] final class BindNode[S, T](in: INode[S], f: S => INode[T]) extends INode[T] {
|
||||
protected def dependsOn = in :: Nil
|
||||
protected def evaluate0(): Unit = makeCall(this, f(in.get))
|
||||
|
|
@ -171,7 +207,9 @@ abstract class EvaluateSettings[Scope] {
|
|||
setValue(value)
|
||||
}
|
||||
}
|
||||
private[this] final class MixedNode[K[L[x]], T](in: K[INode], f: K[Id] => T, alist: AList[K]) extends INode[T] {
|
||||
|
||||
private[this] final class MixedNode[K[L[x]], T](in: K[INode], f: K[Id] => T, alist: AList[K])
|
||||
extends INode[T] {
|
||||
protected def dependsOn = alist.toList(in)
|
||||
protected def evaluate0(): Unit = setValue(f(alist.transform(in, getValue)))
|
||||
}
|
||||
|
|
|
|||
|
|
@ -22,32 +22,38 @@ sealed trait KList[+M[_]] {
|
|||
/** Discards the heterogeneous type information and constructs a plain List from this KList's elements. */
|
||||
def toList: List[M[_]]
|
||||
}
|
||||
|
||||
final case class KCons[H, +T <: KList[M], +M[_]](head: M[H], tail: T) extends KList[M] {
|
||||
final type Transform[N[_]] = KCons[H, tail.Transform[N], N]
|
||||
|
||||
def transform[N[_]](f: M ~> N) = KCons(f(head), tail.transform(f))
|
||||
def toList: List[M[_]] = head :: tail.toList
|
||||
def apply[N[x] >: M[x], Z](f: Transform[Id] => Z)(implicit ap: Applicative[N]): N[Z] =
|
||||
{
|
||||
val g = (t: tail.Transform[Id]) => (h: H) => f(KCons[H, tail.Transform[Id], Id](h, t))
|
||||
ap.apply(tail.apply[N, H => Z](g), head)
|
||||
}
|
||||
def traverse[N[_], P[_]](f: M ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[Transform[P]] =
|
||||
{
|
||||
val tt: N[tail.Transform[P]] = tail.traverse[N, P](f)
|
||||
val g = (t: tail.Transform[P]) => (h: P[H]) => KCons(h, t)
|
||||
np.apply(np.map(g, tt), f(head))
|
||||
}
|
||||
|
||||
def apply[N[x] >: M[x], Z](f: Transform[Id] => Z)(implicit ap: Applicative[N]): N[Z] = {
|
||||
val g = (t: tail.Transform[Id]) => (h: H) => f(KCons[H, tail.Transform[Id], Id](h, t))
|
||||
ap.apply(tail.apply[N, H => Z](g), head)
|
||||
}
|
||||
|
||||
def traverse[N[_], P[_]](f: M ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[Transform[P]] = {
|
||||
val tt: N[tail.Transform[P]] = tail.traverse[N, P](f)
|
||||
val g = (t: tail.Transform[P]) => (h: P[H]) => KCons(h, t)
|
||||
np.apply(np.map(g, tt), f(head))
|
||||
}
|
||||
|
||||
def :^:[A, N[x] >: M[x]](h: N[A]) = KCons(h, this)
|
||||
override def foldr[B](f: (M[_], B) => B, init: B): B = f(head, tail.foldr(f, init))
|
||||
}
|
||||
|
||||
sealed abstract class KNil extends KList[Nothing] {
|
||||
final type Transform[N[_]] = KNil
|
||||
final def transform[N[_]](f: Nothing ~> N): Transform[N] = KNil
|
||||
final def toList = Nil
|
||||
final def apply[N[x], Z](f: KNil => Z)(implicit ap: Applicative[N]): N[Z] = ap.pure(f(KNil))
|
||||
final def traverse[N[_], P[_]](f: Nothing ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[KNil] = np.pure(KNil)
|
||||
|
||||
final def traverse[N[_], P[_]](f: Nothing ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[KNil] =
|
||||
np.pure(KNil)
|
||||
}
|
||||
|
||||
case object KNil extends KNil {
|
||||
def :^:[M[_], H](h: M[H]): KCons[H, KNil, M] = KCons(h, this)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -10,7 +10,11 @@ trait RMap[K[_], V[_]] {
|
|||
def get[T](k: K[T]): Option[V[T]]
|
||||
def contains[T](k: K[T]): Boolean
|
||||
def toSeq: Seq[(K[_], V[_])]
|
||||
def toTypedSeq: Seq[TPair[_]] = toSeq.map { case (k: K[t], v) => TPair[t](k, v.asInstanceOf[V[t]]) }
|
||||
|
||||
def toTypedSeq: Seq[TPair[_]] = toSeq.map {
|
||||
case (k: K[t], v) => TPair[t](k, v.asInstanceOf[V[t]])
|
||||
}
|
||||
|
||||
def keys: Iterable[K[_]]
|
||||
def values: Iterable[V[_]]
|
||||
def isEmpty: Boolean
|
||||
|
|
@ -23,19 +27,24 @@ trait IMap[K[_], V[_]] extends (K ~> V) with RMap[K, V] {
|
|||
def remove[T](k: K[T]): IMap[K, V]
|
||||
def mapValue[T](k: K[T], init: V[T], f: V[T] => V[T]): IMap[K, V]
|
||||
def mapValues[V2[_]](f: V ~> V2): IMap[K, V2]
|
||||
def mapSeparate[VL[_], VR[_]](f: V ~> ({ type l[T] = Either[VL[T], VR[T]] })#l): (IMap[K, VL], IMap[K, VR])
|
||||
def mapSeparate[VL[_], VR[_]](f: V ~> ({ type l[T] = Either[VL[T], VR[T]] })#l)
|
||||
: (IMap[K, VL], IMap[K, VR])
|
||||
}
|
||||
|
||||
trait PMap[K[_], V[_]] extends (K ~> V) with RMap[K, V] {
|
||||
def update[T](k: K[T], v: V[T]): Unit
|
||||
def remove[T](k: K[T]): Option[V[T]]
|
||||
def getOrUpdate[T](k: K[T], make: => V[T]): V[T]
|
||||
def mapValue[T](k: K[T], init: V[T], f: V[T] => V[T]): V[T]
|
||||
}
|
||||
|
||||
object PMap {
|
||||
implicit def toFunction[K[_], V[_]](map: PMap[K, V]): K[_] => V[_] = k => map(k)
|
||||
def empty[K[_], V[_]]: PMap[K, V] = new DelegatingPMap[K, V](new mutable.HashMap)
|
||||
}
|
||||
|
||||
object IMap {
|
||||
|
||||
/**
|
||||
* Only suitable for K that is invariant in its type parameter.
|
||||
* Option and List keys are not suitable, for example,
|
||||
|
|
@ -43,7 +52,9 @@ object IMap {
|
|||
*/
|
||||
def empty[K[_], V[_]]: IMap[K, V] = new IMap0[K, V](Map.empty)
|
||||
|
||||
private[this] class IMap0[K[_], V[_]](backing: Map[K[_], V[_]]) extends AbstractRMap[K, V] with IMap[K, V] {
|
||||
private[this] class IMap0[K[_], V[_]](backing: Map[K[_], V[_]])
|
||||
extends AbstractRMap[K, V]
|
||||
with IMap[K, V] {
|
||||
def get[T](k: K[T]): Option[V[T]] = (backing get k).asInstanceOf[Option[V[T]]]
|
||||
def put[T](k: K[T], v: V[T]) = new IMap0[K, V](backing.updated(k, v))
|
||||
def remove[T](k: K[T]) = new IMap0[K, V](backing - k)
|
||||
|
|
@ -54,17 +65,17 @@ object IMap {
|
|||
def mapValues[V2[_]](f: V ~> V2) =
|
||||
new IMap0[K, V2](backing.mapValues(x => f(x)))
|
||||
|
||||
def mapSeparate[VL[_], VR[_]](f: V ~> ({ type l[T] = Either[VL[T], VR[T]] })#l) =
|
||||
{
|
||||
val mapped = backing.iterator.map {
|
||||
case (k, v) => f(v) match {
|
||||
def mapSeparate[VL[_], VR[_]](f: V ~> ({ type l[T] = Either[VL[T], VR[T]] })#l) = {
|
||||
val mapped = backing.iterator.map {
|
||||
case (k, v) =>
|
||||
f(v) match {
|
||||
case Left(l) => Left((k, l))
|
||||
case Right(r) => Right((k, r))
|
||||
}
|
||||
}
|
||||
val (l, r) = Util.separateE[(K[_], VL[_]), (K[_], VR[_])](mapped.toList)
|
||||
(new IMap0[K, VL](l.toMap), new IMap0[K, VR](r.toMap))
|
||||
}
|
||||
val (l, r) = Util.separateE[(K[_], VL[_]), (K[_], VR[_])](mapped.toList)
|
||||
(new IMap0[K, VL](l.toMap), new IMap0[K, VR](r.toMap))
|
||||
}
|
||||
|
||||
def toSeq = backing.toSeq
|
||||
def keys = backing.keys
|
||||
|
|
@ -85,17 +96,20 @@ abstract class AbstractRMap[K[_], V[_]] extends RMap[K, V] {
|
|||
* Option and List keys are not suitable, for example,
|
||||
* because None <:< Option[String] and None <: Option[Int].
|
||||
*/
|
||||
class DelegatingPMap[K[_], V[_]](backing: mutable.Map[K[_], V[_]]) extends AbstractRMap[K, V] with PMap[K, V] {
|
||||
class DelegatingPMap[K[_], V[_]](backing: mutable.Map[K[_], V[_]])
|
||||
extends AbstractRMap[K, V]
|
||||
with PMap[K, V] {
|
||||
def get[T](k: K[T]): Option[V[T]] = cast[T](backing.get(k))
|
||||
def update[T](k: K[T], v: V[T]): Unit = { backing(k) = v }
|
||||
def remove[T](k: K[T]) = cast(backing.remove(k))
|
||||
def getOrUpdate[T](k: K[T], make: => V[T]) = cast[T](backing.getOrElseUpdate(k, make))
|
||||
def mapValue[T](k: K[T], init: V[T], f: V[T] => V[T]): V[T] =
|
||||
{
|
||||
val v = f(this get k getOrElse init)
|
||||
update(k, v)
|
||||
v
|
||||
}
|
||||
|
||||
def mapValue[T](k: K[T], init: V[T], f: V[T] => V[T]): V[T] = {
|
||||
val v = f(this get k getOrElse init)
|
||||
update(k, v)
|
||||
v
|
||||
}
|
||||
|
||||
def toSeq = backing.toSeq
|
||||
def keys = backing.keys
|
||||
def values = backing.values
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load Diff
|
|
@ -3,24 +3,25 @@ package sbt.internal.util
|
|||
object Signals {
|
||||
val CONT = "CONT"
|
||||
val INT = "INT"
|
||||
def withHandler[T](handler: () => Unit, signal: String = INT)(action: () => T): T =
|
||||
{
|
||||
val result =
|
||||
try {
|
||||
val signals = new Signals0
|
||||
signals.withHandler(signal, handler, action)
|
||||
} catch { case e: LinkageError => Right(action()) }
|
||||
|
||||
result match {
|
||||
case Left(e) => throw e
|
||||
case Right(v) => v
|
||||
}
|
||||
def withHandler[T](handler: () => Unit, signal: String = INT)(action: () => T): T = {
|
||||
val result =
|
||||
try {
|
||||
val signals = new Signals0
|
||||
signals.withHandler(signal, handler, action)
|
||||
} catch { case e: LinkageError => Right(action()) }
|
||||
|
||||
result match {
|
||||
case Left(e) => throw e
|
||||
case Right(v) => v
|
||||
}
|
||||
}
|
||||
|
||||
/** Helper interface so we can expose internals of signal-isms to others. */
|
||||
sealed trait Registration {
|
||||
def remove(): Unit
|
||||
}
|
||||
|
||||
/**
|
||||
* Register a signal handler that can be removed later.
|
||||
* NOTE: Does not stack with other signal handlers!!!!
|
||||
|
|
@ -58,29 +59,25 @@ object Signals {
|
|||
|
||||
// Must only be referenced using a
|
||||
// try { } catch { case e: LinkageError => ... }
|
||||
// block to
|
||||
// block to
|
||||
private final class Signals0 {
|
||||
def supported(signal: String): Boolean =
|
||||
{
|
||||
import sun.misc.Signal
|
||||
try { new Signal(signal); true }
|
||||
catch { case e: IllegalArgumentException => false }
|
||||
}
|
||||
def supported(signal: String): Boolean = {
|
||||
import sun.misc.Signal
|
||||
try { new Signal(signal); true } catch { case e: IllegalArgumentException => false }
|
||||
}
|
||||
|
||||
// returns a LinkageError in `action` as Left(t) in order to avoid it being
|
||||
// incorrectly swallowed as missing Signal/SignalHandler
|
||||
def withHandler[T](signal: String, handler: () => Unit, action: () => T): Either[Throwable, T] =
|
||||
{
|
||||
import sun.misc.{ Signal, SignalHandler }
|
||||
val intSignal = new Signal(signal)
|
||||
val newHandler = new SignalHandler {
|
||||
def handle(sig: Signal): Unit = { handler() }
|
||||
}
|
||||
|
||||
val oldHandler = Signal.handle(intSignal, newHandler)
|
||||
|
||||
try Right(action())
|
||||
catch { case e: LinkageError => Left(e) }
|
||||
finally { Signal.handle(intSignal, oldHandler); () }
|
||||
def withHandler[T](signal: String, handler: () => Unit, action: () => T): Either[Throwable, T] = {
|
||||
import sun.misc.{ Signal, SignalHandler }
|
||||
val intSignal = new Signal(signal)
|
||||
val newHandler = new SignalHandler {
|
||||
def handle(sig: Signal): Unit = { handler() }
|
||||
}
|
||||
|
||||
val oldHandler = Signal.handle(intSignal, newHandler)
|
||||
|
||||
try Right(action())
|
||||
catch { case e: LinkageError => Left(e) } finally { Signal.handle(intSignal, oldHandler); () }
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -20,9 +20,10 @@ trait TypeFunctions {
|
|||
|
||||
def nestCon[M[_], N[_], G[_]](f: M ~> N): (M ∙ G)#l ~> (N ∙ G)#l =
|
||||
f.asInstanceOf[(M ∙ G)#l ~> (N ∙ G)#l] // implemented with a cast to avoid extra object+method call. castless version:
|
||||
|
||||
/* new ( (M ∙ G)#l ~> (N ∙ G)#l ) {
|
||||
def apply[T](mg: M[G[T]]): N[G[T]] = f(mg)
|
||||
}*/
|
||||
def apply[T](mg: M[G[T]]): N[G[T]] = f(mg)
|
||||
} */
|
||||
|
||||
implicit def toFn1[A, B](f: A => B): Fn1[A, B] = new Fn1[A, B] {
|
||||
def ∙[C](g: C => A) = f compose g
|
||||
|
|
@ -31,6 +32,7 @@ trait TypeFunctions {
|
|||
type Endo[T] = T => T
|
||||
type ~>|[A[_], B[_]] = A ~> Compose[Option, B]#Apply
|
||||
}
|
||||
|
||||
object TypeFunctions extends TypeFunctions
|
||||
|
||||
trait ~>[-A[_], +B[_]] { outer =>
|
||||
|
|
@ -40,11 +42,13 @@ trait ~>[-A[_], +B[_]] { outer =>
|
|||
final def ∙[C, D](g: C => D)(implicit ev: D <:< A[D]): C => B[D] = i => apply(ev(g(i)))
|
||||
final def fn[T] = (t: A[T]) => apply[T](t)
|
||||
}
|
||||
|
||||
object ~> {
|
||||
import TypeFunctions._
|
||||
val Id: Id ~> Id = new (Id ~> Id) { def apply[T](a: T): T = a }
|
||||
implicit def tcIdEquals: (Id ~> Id) = Id
|
||||
}
|
||||
|
||||
trait Fn1[A, B] {
|
||||
def ∙[C](g: C => A): C => B
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -11,11 +11,10 @@ object Util {
|
|||
def separateE[A, B](ps: Seq[Either[A, B]]): (Seq[A], Seq[B]) =
|
||||
separate(ps)(Types.idFun)
|
||||
|
||||
def separate[T, A, B](ps: Seq[T])(f: T => Either[A, B]): (Seq[A], Seq[B]) =
|
||||
{
|
||||
val (a, b) = ((Nil: Seq[A], Nil: Seq[B]) /: ps)((xs, y) => prependEither(xs, f(y)))
|
||||
(a.reverse, b.reverse)
|
||||
}
|
||||
def separate[T, A, B](ps: Seq[T])(f: T => Either[A, B]): (Seq[A], Seq[B]) = {
|
||||
val (a, b) = ((Nil: Seq[A], Nil: Seq[B]) /: ps)((xs, y) => prependEither(xs, f(y)))
|
||||
(a.reverse, b.reverse)
|
||||
}
|
||||
|
||||
def prependEither[A, B](acc: (Seq[A], Seq[B]), next: Either[A, B]): (Seq[A], Seq[B]) =
|
||||
next match {
|
||||
|
|
|
|||
|
|
@ -9,44 +9,46 @@ import Prop._
|
|||
import scala.collection.mutable.HashSet
|
||||
|
||||
object DagSpecification extends Properties("Dag") {
|
||||
property("No repeated nodes") = forAll { (dag: TestDag) => isSet(dag.topologicalSort) }
|
||||
property("Sort contains node") = forAll { (dag: TestDag) => dag.topologicalSort.contains(dag) }
|
||||
property("Dependencies precede node") = forAll { (dag: TestDag) => dependenciesPrecedeNodes(dag.topologicalSort) }
|
||||
property("No repeated nodes") = forAll { (dag: TestDag) =>
|
||||
isSet(dag.topologicalSort)
|
||||
}
|
||||
property("Sort contains node") = forAll { (dag: TestDag) =>
|
||||
dag.topologicalSort.contains(dag)
|
||||
}
|
||||
property("Dependencies precede node") = forAll { (dag: TestDag) =>
|
||||
dependenciesPrecedeNodes(dag.topologicalSort)
|
||||
}
|
||||
|
||||
implicit lazy val arbTestDag: Arbitrary[TestDag] = Arbitrary(Gen.sized(dagGen))
|
||||
private def dagGen(nodeCount: Int): Gen[TestDag] =
|
||||
{
|
||||
val nodes = new HashSet[TestDag]
|
||||
def nonterminalGen(p: Gen.Parameters): Gen[TestDag] =
|
||||
{
|
||||
val seed = rng.Seed.random()
|
||||
for {
|
||||
i <- 0 until nodeCount
|
||||
nextDeps <- Gen.someOf(nodes).apply(p, seed)
|
||||
} nodes += new TestDag(i, nextDeps)
|
||||
for (nextDeps <- Gen.someOf(nodes)) yield new TestDag(nodeCount, nextDeps)
|
||||
}
|
||||
Gen.parameterized(nonterminalGen)
|
||||
private def dagGen(nodeCount: Int): Gen[TestDag] = {
|
||||
val nodes = new HashSet[TestDag]
|
||||
def nonterminalGen(p: Gen.Parameters): Gen[TestDag] = {
|
||||
val seed = rng.Seed.random()
|
||||
for {
|
||||
i <- 0 until nodeCount
|
||||
nextDeps <- Gen.someOf(nodes).apply(p, seed)
|
||||
} nodes += new TestDag(i, nextDeps)
|
||||
for (nextDeps <- Gen.someOf(nodes)) yield new TestDag(nodeCount, nextDeps)
|
||||
}
|
||||
Gen.parameterized(nonterminalGen)
|
||||
}
|
||||
|
||||
private def isSet[T](c: Seq[T]) = Set(c: _*).size == c.size
|
||||
private def dependenciesPrecedeNodes(sort: List[TestDag]) =
|
||||
{
|
||||
val seen = new HashSet[TestDag]
|
||||
def iterate(remaining: List[TestDag]): Boolean =
|
||||
{
|
||||
remaining match {
|
||||
case Nil => true
|
||||
case node :: tail =>
|
||||
if (node.dependencies.forall(seen.contains) && !seen.contains(node)) {
|
||||
seen += node
|
||||
iterate(tail)
|
||||
} else
|
||||
false
|
||||
}
|
||||
}
|
||||
iterate(sort)
|
||||
private def dependenciesPrecedeNodes(sort: List[TestDag]) = {
|
||||
val seen = new HashSet[TestDag]
|
||||
def iterate(remaining: List[TestDag]): Boolean = {
|
||||
remaining match {
|
||||
case Nil => true
|
||||
case node :: tail =>
|
||||
if (node.dependencies.forall(seen.contains) && !seen.contains(node)) {
|
||||
seen += node
|
||||
iterate(tail)
|
||||
} else
|
||||
false
|
||||
}
|
||||
}
|
||||
iterate(sort)
|
||||
}
|
||||
}
|
||||
class TestDag(id: Int, val dependencies: Iterable[TestDag]) extends Dag[TestDag] {
|
||||
override def toString = id + "->" + dependencies.mkString("[", ",", "]")
|
||||
|
|
|
|||
|
|
@ -6,13 +6,13 @@ import Prop._
|
|||
object KeyTest extends Properties("AttributeKey") {
|
||||
property("equality") = {
|
||||
compare(AttributeKey[Int]("test"), AttributeKey[Int]("test"), true) &&
|
||||
compare(AttributeKey[Int]("test"), AttributeKey[Int]("test", "description"), true) &&
|
||||
compare(AttributeKey[Int]("test", "a"), AttributeKey[Int]("test", "b"), true) &&
|
||||
compare(AttributeKey[Int]("test"), AttributeKey[Int]("tests"), false) &&
|
||||
compare(AttributeKey[Int]("test"), AttributeKey[Double]("test"), false) &&
|
||||
compare(AttributeKey[java.lang.Integer]("test"), AttributeKey[Int]("test"), false) &&
|
||||
compare(AttributeKey[Map[Int, String]]("test"), AttributeKey[Map[Int, String]]("test"), true) &&
|
||||
compare(AttributeKey[Map[Int, String]]("test"), AttributeKey[Map[Int, _]]("test"), false)
|
||||
compare(AttributeKey[Int]("test"), AttributeKey[Int]("test", "description"), true) &&
|
||||
compare(AttributeKey[Int]("test", "a"), AttributeKey[Int]("test", "b"), true) &&
|
||||
compare(AttributeKey[Int]("test"), AttributeKey[Int]("tests"), false) &&
|
||||
compare(AttributeKey[Int]("test"), AttributeKey[Double]("test"), false) &&
|
||||
compare(AttributeKey[java.lang.Integer]("test"), AttributeKey[Int]("test"), false) &&
|
||||
compare(AttributeKey[Map[Int, String]]("test"), AttributeKey[Map[Int, String]]("test"), true) &&
|
||||
compare(AttributeKey[Map[Int, String]]("test"), AttributeKey[Map[Int, _]]("test"), false)
|
||||
}
|
||||
|
||||
def compare(a: AttributeKey[_], b: AttributeKey[_], same: Boolean) =
|
||||
|
|
@ -26,7 +26,7 @@ object KeyTest extends Properties("AttributeKey") {
|
|||
def compare0(a: AttributeKey[_], b: AttributeKey[_], same: Boolean) =
|
||||
if (same) {
|
||||
("equality" |: (a == b)) &&
|
||||
("hash" |: (a.hashCode == b.hashCode))
|
||||
("hash" |: (a.hashCode == b.hashCode))
|
||||
} else
|
||||
("equality" |: (a != b))
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -8,7 +8,9 @@ object LiteralTest {
|
|||
def x[A[_], B[_]](f: A ~> B) = f
|
||||
|
||||
import Param._
|
||||
val f = x { (p: Param[Option, List]) => p.ret(p.in.toList) }
|
||||
val f = x { (p: Param[Option, List]) =>
|
||||
p.ret(p.in.toList)
|
||||
}
|
||||
|
||||
val a: List[Int] = f(Some(3))
|
||||
val b: List[String] = f(Some("aa"))
|
||||
|
|
|
|||
|
|
@ -15,4 +15,4 @@ object PMapTest {
|
|||
assert(y.head == 9)
|
||||
assert(y.tail.head == "a")
|
||||
assert(y.tail.tail == KNil)
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -3,7 +3,6 @@ package sbt.internal.util
|
|||
import sbt.util.Show
|
||||
|
||||
/** Define our settings system */
|
||||
|
||||
// A basic scope indexed by an integer.
|
||||
final case class Scope(nestIndex: Int, idAtIndex: Int = 0)
|
||||
|
||||
|
|
@ -14,15 +13,15 @@ final case class Scope(nestIndex: Int, idAtIndex: Int = 0)
|
|||
// That would be a general pain.)
|
||||
case class SettingsExample() extends Init[Scope] {
|
||||
// Provides a way of showing a Scope+AttributeKey[_]
|
||||
val showFullKey: Show[ScopedKey[_]] = Show[ScopedKey[_]]((key: ScopedKey[_]) =>
|
||||
{
|
||||
s"${key.scope.nestIndex}(${key.scope.idAtIndex})/${key.key.label}"
|
||||
})
|
||||
val showFullKey: Show[ScopedKey[_]] = Show[ScopedKey[_]]((key: ScopedKey[_]) => {
|
||||
s"${key.scope.nestIndex}(${key.scope.idAtIndex})/${key.key.label}"
|
||||
})
|
||||
|
||||
// A sample delegation function that delegates to a Scope with a lower index.
|
||||
val delegates: Scope => Seq[Scope] = {
|
||||
case s @ Scope(index, proj) =>
|
||||
s +: (if (index <= 0) Nil else { (if (proj > 0) List(Scope(index)) else Nil) ++: delegates(Scope(index - 1)) })
|
||||
s +: (if (index <= 0) Nil
|
||||
else { (if (proj > 0) List(Scope(index)) else Nil) ++: delegates(Scope(index - 1)) })
|
||||
}
|
||||
|
||||
// Not using this feature in this example.
|
||||
|
|
@ -32,7 +31,6 @@ case class SettingsExample() extends Init[Scope] {
|
|||
}
|
||||
|
||||
/** Usage Example **/
|
||||
|
||||
case class SettingsUsage(val settingsExample: SettingsExample) {
|
||||
import settingsExample._
|
||||
|
||||
|
|
@ -65,25 +63,25 @@ case class SettingsUsage(val settingsExample: SettingsExample) {
|
|||
}*/
|
||||
|
||||
/**
|
||||
* Output:
|
||||
* For the None results, we never defined the value and there was no value to delegate to.
|
||||
* For a3, we explicitly defined it to be 3.
|
||||
* a4 wasn't defined, so it delegates to a3 according to our delegates function.
|
||||
* b4 gets the value for a4 (which delegates to a3, so it is 3) and multiplies by 3
|
||||
* a5 is defined as the previous value of a5 + 1 and
|
||||
* since no previous value of a5 was defined, it delegates to a4, resulting in 3+1=4.
|
||||
* b5 isn't defined explicitly, so it delegates to b4 and is therefore equal to 9 as well
|
||||
* a0 = None
|
||||
* b0 = None
|
||||
* a1 = None
|
||||
* b1 = None
|
||||
* a2 = None
|
||||
* b2 = None
|
||||
* a3 = Some(3)
|
||||
* b3 = None
|
||||
* a4 = Some(3)
|
||||
* b4 = Some(9)
|
||||
* a5 = Some(4)
|
||||
* b5 = Some(9)
|
||||
*/
|
||||
* Output:
|
||||
* For the None results, we never defined the value and there was no value to delegate to.
|
||||
* For a3, we explicitly defined it to be 3.
|
||||
* a4 wasn't defined, so it delegates to a3 according to our delegates function.
|
||||
* b4 gets the value for a4 (which delegates to a3, so it is 3) and multiplies by 3
|
||||
* a5 is defined as the previous value of a5 + 1 and
|
||||
* since no previous value of a5 was defined, it delegates to a4, resulting in 3+1=4.
|
||||
* b5 isn't defined explicitly, so it delegates to b4 and is therefore equal to 9 as well
|
||||
* a0 = None
|
||||
* b0 = None
|
||||
* a1 = None
|
||||
* b1 = None
|
||||
* a2 = None
|
||||
* b2 = None
|
||||
* a3 = Some(3)
|
||||
* b3 = None
|
||||
* a4 = Some(3)
|
||||
* b4 = Some(9)
|
||||
* a5 = Some(4)
|
||||
* b5 = Some(9)
|
||||
*/
|
||||
}
|
||||
|
|
|
|||
|
|
@ -26,127 +26,131 @@ object SettingsTest extends Properties("settings") {
|
|||
}
|
||||
|
||||
property("Allows references to completed settings") = forAllNoShrink(30) { allowedReference }
|
||||
final def allowedReference(intermediate: Int): Prop =
|
||||
{
|
||||
val top = value(intermediate)
|
||||
def iterate(init: Initialize[Int]): Initialize[Int] =
|
||||
bind(init) { t =>
|
||||
if (t <= 0)
|
||||
top
|
||||
else
|
||||
iterate(value(t - 1))
|
||||
}
|
||||
evaluate(setting(chk, iterate(top)) :: Nil); true
|
||||
}
|
||||
|
||||
property("Derived setting chain depending on (prev derived, normal setting)") = forAllNoShrink(Gen.choose(1, 100).label("numSettings")) { derivedSettings }
|
||||
final def derivedSettings(nr: Int): Prop =
|
||||
{
|
||||
val genScopedKeys = {
|
||||
// We wan
|
||||
// t to generate lists of keys that DO NOT inclue the "ch" key we use to check things.
|
||||
val attrKeys = mkAttrKeys[Int](nr).filter(_.forall(_.label != "ch"))
|
||||
attrKeys map (_ map (ak => ScopedKey(Scope(0), ak)))
|
||||
}.label("scopedKeys").filter(_.nonEmpty)
|
||||
forAll(genScopedKeys) { scopedKeys =>
|
||||
try {
|
||||
// Note; It's evil to grab last IF you haven't verified the set can't be empty.
|
||||
val last = scopedKeys.last
|
||||
val derivedSettings: Seq[Setting[Int]] = (
|
||||
for {
|
||||
List(scoped0, scoped1) <- chk :: scopedKeys sliding 2
|
||||
nextInit = if (scoped0 == chk) chk
|
||||
else (scoped0 zipWith chk) { (p, _) => p + 1 }
|
||||
} yield derive(setting(scoped1, nextInit))
|
||||
).toSeq
|
||||
|
||||
{
|
||||
// Note: This causes a cycle refernec error, quite frequently.
|
||||
checkKey(last, Some(nr - 1), evaluate(setting(chk, value(0)) +: derivedSettings)) :| "Not derived?"
|
||||
} && {
|
||||
checkKey(last, None, evaluate(derivedSettings)) :| "Should not be derived"
|
||||
}
|
||||
} catch {
|
||||
case t: Throwable =>
|
||||
// TODO - For debugging only.
|
||||
t.printStackTrace(System.err)
|
||||
throw t
|
||||
final def allowedReference(intermediate: Int): Prop = {
|
||||
val top = value(intermediate)
|
||||
def iterate(init: Initialize[Int]): Initialize[Int] =
|
||||
bind(init) { t =>
|
||||
if (t <= 0)
|
||||
top
|
||||
else
|
||||
iterate(value(t - 1))
|
||||
}
|
||||
evaluate(setting(chk, iterate(top)) :: Nil); true
|
||||
}
|
||||
|
||||
property("Derived setting chain depending on (prev derived, normal setting)") =
|
||||
forAllNoShrink(Gen.choose(1, 100).label("numSettings")) { derivedSettings }
|
||||
final def derivedSettings(nr: Int): Prop = {
|
||||
val genScopedKeys = {
|
||||
// We wan
|
||||
// t to generate lists of keys that DO NOT inclue the "ch" key we use to check things.
|
||||
val attrKeys = mkAttrKeys[Int](nr).filter(_.forall(_.label != "ch"))
|
||||
attrKeys map (_ map (ak => ScopedKey(Scope(0), ak)))
|
||||
}.label("scopedKeys").filter(_.nonEmpty)
|
||||
forAll(genScopedKeys) { scopedKeys =>
|
||||
try {
|
||||
// Note; It's evil to grab last IF you haven't verified the set can't be empty.
|
||||
val last = scopedKeys.last
|
||||
val derivedSettings: Seq[Setting[Int]] = (
|
||||
for {
|
||||
List(scoped0, scoped1) <- chk :: scopedKeys sliding 2
|
||||
nextInit = if (scoped0 == chk) chk
|
||||
else
|
||||
(scoped0 zipWith chk) { (p, _) =>
|
||||
p + 1
|
||||
}
|
||||
} yield derive(setting(scoped1, nextInit))
|
||||
).toSeq
|
||||
|
||||
{
|
||||
// Note: This causes a cycle refernec error, quite frequently.
|
||||
checkKey(last, Some(nr - 1), evaluate(setting(chk, value(0)) +: derivedSettings)) :| "Not derived?"
|
||||
} && {
|
||||
checkKey(last, None, evaluate(derivedSettings)) :| "Should not be derived"
|
||||
}
|
||||
} catch {
|
||||
case t: Throwable =>
|
||||
// TODO - For debugging only.
|
||||
t.printStackTrace(System.err)
|
||||
throw t
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private def mkAttrKeys[T](nr: Int)(implicit mf: Manifest[T]): Gen[List[AttributeKey[T]]] =
|
||||
{
|
||||
import Gen._
|
||||
val nonEmptyAlphaStr =
|
||||
nonEmptyListOf(alphaChar).map(_.mkString).suchThat(_.forall(_.isLetter))
|
||||
private def mkAttrKeys[T](nr: Int)(implicit mf: Manifest[T]): Gen[List[AttributeKey[T]]] = {
|
||||
import Gen._
|
||||
val nonEmptyAlphaStr =
|
||||
nonEmptyListOf(alphaChar).map(_.mkString).suchThat(_.forall(_.isLetter))
|
||||
|
||||
(for {
|
||||
list <- Gen.listOfN(nr, nonEmptyAlphaStr) suchThat (l => l.size == l.distinct.size)
|
||||
item <- list
|
||||
} yield AttributeKey[T](item)).label(s"mkAttrKeys($nr)")
|
||||
(for {
|
||||
list <- Gen.listOfN(nr, nonEmptyAlphaStr) suchThat (l => l.size == l.distinct.size)
|
||||
item <- list
|
||||
} yield AttributeKey[T](item)).label(s"mkAttrKeys($nr)")
|
||||
}
|
||||
|
||||
property("Derived setting(s) replace DerivedSetting in the Seq[Setting[_]]") =
|
||||
derivedKeepsPosition
|
||||
final def derivedKeepsPosition: Prop = {
|
||||
val a: ScopedKey[Int] = ScopedKey(Scope(0), AttributeKey[Int]("a"))
|
||||
val b: ScopedKey[Int] = ScopedKey(Scope(0), AttributeKey[Int]("b"))
|
||||
val prop1 = {
|
||||
val settings: Seq[Setting[_]] = Seq(
|
||||
setting(a, value(3)),
|
||||
setting(b, value(6)),
|
||||
derive(setting(b, a)),
|
||||
setting(a, value(5)),
|
||||
setting(b, value(8))
|
||||
)
|
||||
val ev = evaluate(settings)
|
||||
checkKey(a, Some(5), ev) && checkKey(b, Some(8), ev)
|
||||
}
|
||||
val prop2 = {
|
||||
val settings: Seq[Setting[Int]] = Seq(
|
||||
setting(a, value(3)),
|
||||
setting(b, value(6)),
|
||||
derive(setting(b, a)),
|
||||
setting(a, value(5))
|
||||
)
|
||||
val ev = evaluate(settings)
|
||||
checkKey(a, Some(5), ev) && checkKey(b, Some(5), ev)
|
||||
}
|
||||
prop1 && prop2
|
||||
}
|
||||
|
||||
property("Derived setting(s) replace DerivedSetting in the Seq[Setting[_]]") = derivedKeepsPosition
|
||||
final def derivedKeepsPosition: Prop =
|
||||
{
|
||||
val a: ScopedKey[Int] = ScopedKey(Scope(0), AttributeKey[Int]("a"))
|
||||
val b: ScopedKey[Int] = ScopedKey(Scope(0), AttributeKey[Int]("b"))
|
||||
val prop1 = {
|
||||
val settings: Seq[Setting[_]] = Seq(
|
||||
setting(a, value(3)),
|
||||
setting(b, value(6)),
|
||||
derive(setting(b, a)),
|
||||
setting(a, value(5)),
|
||||
setting(b, value(8))
|
||||
)
|
||||
property(
|
||||
"DerivedSetting in ThisBuild scopes derived settings under projects thus allowing safe +="
|
||||
) = forAllNoShrink(Gen.choose(1, 100)) { derivedSettingsScope }
|
||||
final def derivedSettingsScope(nrProjects: Int): Prop = {
|
||||
forAll(mkAttrKeys[Int](2)) {
|
||||
case List(key, derivedKey) =>
|
||||
val projectKeys = for { proj <- 1 to nrProjects } yield ScopedKey(Scope(1, proj), key)
|
||||
val projectDerivedKeys = for { proj <- 1 to nrProjects } yield
|
||||
ScopedKey(Scope(1, proj), derivedKey)
|
||||
val globalKey = ScopedKey(Scope(0), key)
|
||||
val globalDerivedKey = ScopedKey(Scope(0), derivedKey)
|
||||
// Each project defines an initial value, but the update is defined in globalKey.
|
||||
// However, the derived Settings that come from this should be scoped in each project.
|
||||
val settings: Seq[Setting[_]] =
|
||||
derive(setting(globalDerivedKey, settingsExample.map(globalKey)(_ + 1))) +: projectKeys
|
||||
.map(pk => setting(pk, value(0)))
|
||||
val ev = evaluate(settings)
|
||||
checkKey(a, Some(5), ev) && checkKey(b, Some(8), ev)
|
||||
}
|
||||
val prop2 = {
|
||||
val settings: Seq[Setting[Int]] = Seq(
|
||||
setting(a, value(3)),
|
||||
setting(b, value(6)),
|
||||
derive(setting(b, a)),
|
||||
setting(a, value(5))
|
||||
)
|
||||
val ev = evaluate(settings)
|
||||
checkKey(a, Some(5), ev) && checkKey(b, Some(5), ev)
|
||||
}
|
||||
prop1 && prop2
|
||||
}
|
||||
|
||||
property("DerivedSetting in ThisBuild scopes derived settings under projects thus allowing safe +=") = forAllNoShrink(Gen.choose(1, 100)) { derivedSettingsScope }
|
||||
final def derivedSettingsScope(nrProjects: Int): Prop =
|
||||
{
|
||||
forAll(mkAttrKeys[Int](2)) {
|
||||
case List(key, derivedKey) =>
|
||||
val projectKeys = for { proj <- 1 to nrProjects } yield ScopedKey(Scope(1, proj), key)
|
||||
val projectDerivedKeys = for { proj <- 1 to nrProjects } yield ScopedKey(Scope(1, proj), derivedKey)
|
||||
val globalKey = ScopedKey(Scope(0), key)
|
||||
val globalDerivedKey = ScopedKey(Scope(0), derivedKey)
|
||||
// Each project defines an initial value, but the update is defined in globalKey.
|
||||
// However, the derived Settings that come from this should be scoped in each project.
|
||||
val settings: Seq[Setting[_]] =
|
||||
derive(setting(globalDerivedKey, settingsExample.map(globalKey)(_ + 1))) +: projectKeys.map(pk => setting(pk, value(0)))
|
||||
val ev = evaluate(settings)
|
||||
// Also check that the key has no value at the "global" scope
|
||||
val props = for { pk <- projectDerivedKeys } yield checkKey(pk, Some(1), ev)
|
||||
checkKey(globalDerivedKey, None, ev) && Prop.all(props: _*)
|
||||
}
|
||||
// Also check that the key has no value at the "global" scope
|
||||
val props = for { pk <- projectDerivedKeys } yield checkKey(pk, Some(1), ev)
|
||||
checkKey(globalDerivedKey, None, ev) && Prop.all(props: _*)
|
||||
}
|
||||
}
|
||||
|
||||
// Circular (dynamic) references currently loop infinitely.
|
||||
// This is the expected behavior (detecting dynamic cycles is expensive),
|
||||
// but it may be necessary to provide an option to detect them (with a performance hit)
|
||||
// This would test that cycle detection.
|
||||
// property("Catches circular references") = forAll(chainLengthGen) { checkCircularReferences _ }
|
||||
final def checkCircularReferences(intermediate: Int): Prop =
|
||||
{
|
||||
val ccr = new CCR(intermediate)
|
||||
try { evaluate(setting(chk, ccr.top) :: Nil); false }
|
||||
catch { case e: java.lang.Exception => true }
|
||||
// property("Catches circular references") = forAll(chainLengthGen) { checkCircularReferences _ }
|
||||
final def checkCircularReferences(intermediate: Int): Prop = {
|
||||
val ccr = new CCR(intermediate)
|
||||
try { evaluate(setting(chk, ccr.top) :: Nil); false } catch {
|
||||
case e: java.lang.Exception => true
|
||||
}
|
||||
}
|
||||
|
||||
def tests =
|
||||
for (i <- 0 to 5; k <- Seq(a, b)) yield {
|
||||
|
|
@ -154,7 +158,8 @@ object SettingsTest extends Properties("settings") {
|
|||
checkKey[Int](ScopedKey(Scope(i), k), expected, applied)
|
||||
}
|
||||
|
||||
lazy val expectedValues = None :: None :: None :: None :: None :: None :: Some(3) :: None :: Some(3) :: Some(9) :: Some(4) :: Some(9) :: Nil
|
||||
lazy val expectedValues = None :: None :: None :: None :: None :: None :: Some(3) :: None ::
|
||||
Some(3) :: Some(9) :: Some(4) :: Some(9) :: Nil
|
||||
|
||||
lazy val ch = AttributeKey[Int]("ch")
|
||||
lazy val chk = ScopedKey(Scope(0), ch)
|
||||
|
|
@ -165,26 +170,25 @@ object SettingsTest extends Properties("settings") {
|
|||
bind(prev) { v =>
|
||||
if (v <= 0) prev else chainBind(value(v - 1))
|
||||
}
|
||||
def singleIntTest(i: Initialize[Int], expected: Int) =
|
||||
{
|
||||
val eval = evaluate(setting(chk, i) :: Nil)
|
||||
checkKey(chk, Some(expected), eval)
|
||||
}
|
||||
def singleIntTest(i: Initialize[Int], expected: Int) = {
|
||||
val eval = evaluate(setting(chk, i) :: Nil)
|
||||
checkKey(chk, Some(expected), eval)
|
||||
}
|
||||
|
||||
def checkKey[T](key: ScopedKey[T], expected: Option[T], settings: Settings[Scope]) =
|
||||
{
|
||||
val value = settings.get(key.scope, key.key)
|
||||
("Key: " + key) |:
|
||||
("Value: " + value) |:
|
||||
("Expected: " + expected) |:
|
||||
(value == expected)
|
||||
}
|
||||
def checkKey[T](key: ScopedKey[T], expected: Option[T], settings: Settings[Scope]) = {
|
||||
val value = settings.get(key.scope, key.key)
|
||||
("Key: " + key) |:
|
||||
("Value: " + value) |:
|
||||
("Expected: " + expected) |:
|
||||
(value == expected)
|
||||
}
|
||||
|
||||
def evaluate(settings: Seq[Setting[_]]): Settings[Scope] =
|
||||
try { make(settings)(delegates, scopeLocal, showFullKey) }
|
||||
catch { case e: Throwable => e.printStackTrace; throw e }
|
||||
try { make(settings)(delegates, scopeLocal, showFullKey) } catch {
|
||||
case e: Throwable => e.printStackTrace; throw e
|
||||
}
|
||||
}
|
||||
// This setup is a workaround for module synchronization issues
|
||||
// This setup is a workaround for module synchronization issues
|
||||
final class CCR(intermediate: Int) {
|
||||
import SettingsTest.settingsExample._
|
||||
lazy val top = iterate(value(intermediate), intermediate)
|
||||
|
|
|
|||
|
|
@ -15,7 +15,10 @@ abstract class JLine extends LineReader {
|
|||
protected[this] def reader: ConsoleReader
|
||||
protected[this] def injectThreadSleep: Boolean
|
||||
protected[this] val in: InputStream = JLine.makeInputStream(injectThreadSleep)
|
||||
def readLine(prompt: String, mask: Option[Char] = None) = JLine.withJLine { unsynchronizedReadLine(prompt, mask) }
|
||||
|
||||
def readLine(prompt: String, mask: Option[Char] = None) = JLine.withJLine {
|
||||
unsynchronizedReadLine(prompt, mask)
|
||||
}
|
||||
|
||||
private[this] def unsynchronizedReadLine(prompt: String, mask: Option[Char]): Option[String] =
|
||||
readLineWithHistory(prompt, mask) map { x =>
|
||||
|
|
@ -25,34 +28,35 @@ abstract class JLine extends LineReader {
|
|||
private[this] def readLineWithHistory(prompt: String, mask: Option[Char]): Option[String] =
|
||||
reader.getHistory match {
|
||||
case fh: FileHistory =>
|
||||
try { readLineDirect(prompt, mask) }
|
||||
finally { fh.flush() }
|
||||
try readLineDirect(prompt, mask)
|
||||
finally fh.flush()
|
||||
case _ => readLineDirect(prompt, mask)
|
||||
}
|
||||
|
||||
private[this] def readLineDirect(prompt: String, mask: Option[Char]): Option[String] =
|
||||
if (handleCONT)
|
||||
Signals.withHandler(() => resume(), signal = Signals.CONT)(() => readLineDirectRaw(prompt, mask))
|
||||
Signals.withHandler(() => resume(), signal = Signals.CONT)(() =>
|
||||
readLineDirectRaw(prompt, mask))
|
||||
else
|
||||
readLineDirectRaw(prompt, mask)
|
||||
private[this] def readLineDirectRaw(prompt: String, mask: Option[Char]): Option[String] =
|
||||
{
|
||||
val newprompt = handleMultilinePrompt(prompt)
|
||||
try {
|
||||
mask match {
|
||||
case Some(m) => Option(reader.readLine(newprompt, m))
|
||||
case None => Option(reader.readLine(newprompt))
|
||||
}
|
||||
} catch {
|
||||
case e: InterruptedException => Option("")
|
||||
|
||||
private[this] def readLineDirectRaw(prompt: String, mask: Option[Char]): Option[String] = {
|
||||
val newprompt = handleMultilinePrompt(prompt)
|
||||
try {
|
||||
mask match {
|
||||
case Some(m) => Option(reader.readLine(newprompt, m))
|
||||
case None => Option(reader.readLine(newprompt))
|
||||
}
|
||||
} catch {
|
||||
case e: InterruptedException => Option("")
|
||||
}
|
||||
}
|
||||
|
||||
private[this] def handleMultilinePrompt(prompt: String): String = {
|
||||
val lines = """\r?\n""".r.split(prompt)
|
||||
lines.length match {
|
||||
case 0 | 1 => prompt
|
||||
case _ =>
|
||||
case _ =>
|
||||
// Workaround for regression jline/jline2#205
|
||||
reader.getOutput.write(lines.init.mkString("\n") + "\n")
|
||||
lines.last
|
||||
|
|
@ -66,6 +70,7 @@ abstract class JLine extends LineReader {
|
|||
reader.flush()
|
||||
}
|
||||
}
|
||||
|
||||
private[sbt] object JLine {
|
||||
private[this] val TerminalProperty = "jline.terminal"
|
||||
|
||||
|
|
@ -75,18 +80,19 @@ private[sbt] object JLine {
|
|||
// older Scala, since it shaded classes but not the system property
|
||||
private[sbt] def fixTerminalProperty(): Unit = {
|
||||
val newValue = System.getProperty(TerminalProperty) match {
|
||||
case "jline.UnixTerminal" => "unix"
|
||||
case "jline.UnixTerminal" => "unix"
|
||||
case null if System.getProperty("sbt.cygwin") != null => "unix"
|
||||
case "jline.WindowsTerminal" => "windows"
|
||||
case "jline.AnsiWindowsTerminal" => "windows"
|
||||
case "jline.UnsupportedTerminal" => "none"
|
||||
case x => x
|
||||
case "jline.WindowsTerminal" => "windows"
|
||||
case "jline.AnsiWindowsTerminal" => "windows"
|
||||
case "jline.UnsupportedTerminal" => "none"
|
||||
case x => x
|
||||
}
|
||||
if (newValue != null) System.setProperty(TerminalProperty, newValue)
|
||||
()
|
||||
}
|
||||
|
||||
protected[this] val originalIn = new FileInputStream(FileDescriptor.in)
|
||||
|
||||
private[sbt] def makeInputStream(injectThreadSleep: Boolean): InputStream =
|
||||
if (injectThreadSleep) new InputStreamWrapper(originalIn, Duration("50 ms"))
|
||||
else originalIn
|
||||
|
|
@ -94,11 +100,13 @@ private[sbt] object JLine {
|
|||
// When calling this, ensure that enableEcho has been or will be called.
|
||||
// TerminalFactory.get will initialize the terminal to disable echo.
|
||||
private def terminal = jline.TerminalFactory.get
|
||||
|
||||
private def withTerminal[T](f: jline.Terminal => T): T =
|
||||
synchronized {
|
||||
val t = terminal
|
||||
t.synchronized { f(t) }
|
||||
}
|
||||
|
||||
/**
|
||||
* For accessing the JLine Terminal object.
|
||||
* This ensures synchronized access as well as re-enabling echo after getting the Terminal.
|
||||
|
|
@ -108,7 +116,9 @@ private[sbt] object JLine {
|
|||
t.restore
|
||||
f(t)
|
||||
}
|
||||
|
||||
def createReader(): ConsoleReader = createReader(None, JLine.makeInputStream(true))
|
||||
|
||||
def createReader(historyPath: Option[File], in: InputStream): ConsoleReader =
|
||||
usingTerminal { t =>
|
||||
val cr = new ConsoleReader(in, System.out)
|
||||
|
|
@ -122,41 +132,42 @@ private[sbt] object JLine {
|
|||
cr.setHistory(h)
|
||||
cr
|
||||
}
|
||||
|
||||
def withJLine[T](action: => T): T =
|
||||
withTerminal { t =>
|
||||
t.init
|
||||
try { action }
|
||||
finally { t.restore }
|
||||
try { action } finally { t.restore }
|
||||
}
|
||||
|
||||
def simple(
|
||||
historyPath: Option[File],
|
||||
handleCONT: Boolean = HandleCONT,
|
||||
injectThreadSleep: Boolean = false
|
||||
historyPath: Option[File],
|
||||
handleCONT: Boolean = HandleCONT,
|
||||
injectThreadSleep: Boolean = false
|
||||
): SimpleReader = new SimpleReader(historyPath, handleCONT, injectThreadSleep)
|
||||
|
||||
val MaxHistorySize = 500
|
||||
val HandleCONT = !java.lang.Boolean.getBoolean("sbt.disable.cont") && Signals.supported(Signals.CONT)
|
||||
|
||||
val HandleCONT =
|
||||
!java.lang.Boolean.getBoolean("sbt.disable.cont") && Signals.supported(Signals.CONT)
|
||||
}
|
||||
|
||||
private[sbt] class InputStreamWrapper(is: InputStream, val poll: Duration) extends FilterInputStream(is) {
|
||||
@tailrec
|
||||
final override def read(): Int =
|
||||
private[sbt] class InputStreamWrapper(is: InputStream, val poll: Duration)
|
||||
extends FilterInputStream(is) {
|
||||
@tailrec final override def read(): Int =
|
||||
if (is.available() != 0) is.read()
|
||||
else {
|
||||
Thread.sleep(poll.toMillis)
|
||||
read()
|
||||
}
|
||||
|
||||
@tailrec
|
||||
final override def read(b: Array[Byte]): Int =
|
||||
@tailrec final override def read(b: Array[Byte]): Int =
|
||||
if (is.available() != 0) is.read(b)
|
||||
else {
|
||||
Thread.sleep(poll.toMillis)
|
||||
read(b)
|
||||
}
|
||||
|
||||
@tailrec
|
||||
final override def read(b: Array[Byte], off: Int, len: Int): Int =
|
||||
@tailrec final override def read(b: Array[Byte], off: Int, len: Int): Int =
|
||||
if (is.available() != 0) is.read(b, off, len)
|
||||
else {
|
||||
Thread.sleep(poll.toMillis)
|
||||
|
|
@ -167,23 +178,26 @@ private[sbt] class InputStreamWrapper(is: InputStream, val poll: Duration) exten
|
|||
trait LineReader {
|
||||
def readLine(prompt: String, mask: Option[Char] = None): Option[String]
|
||||
}
|
||||
|
||||
final class FullReader(
|
||||
historyPath: Option[File],
|
||||
complete: Parser[_],
|
||||
val handleCONT: Boolean = JLine.HandleCONT,
|
||||
val injectThreadSleep: Boolean = false
|
||||
historyPath: Option[File],
|
||||
complete: Parser[_],
|
||||
val handleCONT: Boolean = JLine.HandleCONT,
|
||||
val injectThreadSleep: Boolean = false
|
||||
) extends JLine {
|
||||
protected[this] val reader =
|
||||
{
|
||||
val cr = JLine.createReader(historyPath, in)
|
||||
sbt.internal.util.complete.JLineCompletion.installCustomCompletor(cr, complete)
|
||||
cr
|
||||
}
|
||||
protected[this] val reader = {
|
||||
val cr = JLine.createReader(historyPath, in)
|
||||
sbt.internal.util.complete.JLineCompletion.installCustomCompletor(cr, complete)
|
||||
cr
|
||||
}
|
||||
}
|
||||
|
||||
class SimpleReader private[sbt] (historyPath: Option[File], val handleCONT: Boolean, val injectThreadSleep: Boolean) extends JLine {
|
||||
class SimpleReader private[sbt] (
|
||||
historyPath: Option[File],
|
||||
val handleCONT: Boolean,
|
||||
val injectThreadSleep: Boolean
|
||||
) extends JLine {
|
||||
protected[this] val reader = JLine.createReader(historyPath, in)
|
||||
|
||||
}
|
||||
object SimpleReader extends SimpleReader(None, JLine.HandleCONT, false)
|
||||
|
||||
object SimpleReader extends SimpleReader(None, JLine.HandleCONT, false)
|
||||
|
|
|
|||
|
|
@ -11,18 +11,28 @@ package complete
|
|||
*/
|
||||
sealed trait Completions {
|
||||
def get: Set[Completion]
|
||||
|
||||
final def x(o: Completions): Completions = flatMap(_ x o)
|
||||
final def ++(o: Completions): Completions = Completions(get ++ o.get)
|
||||
final def +:(o: Completion): Completions = Completions(get + o)
|
||||
final def filter(f: Completion => Boolean): Completions = Completions(get filter f)
|
||||
final def filterS(f: String => Boolean): Completions = filter(c => f(c.append))
|
||||
|
||||
override def toString = get.mkString("Completions(", ",", ")")
|
||||
final def flatMap(f: Completion => Completions): Completions = Completions(get.flatMap(c => f(c).get))
|
||||
|
||||
final def flatMap(f: Completion => Completions): Completions =
|
||||
Completions(get.flatMap(c => f(c).get))
|
||||
|
||||
final def map(f: Completion => Completion): Completions = Completions(get map f)
|
||||
|
||||
override final def hashCode = get.hashCode
|
||||
override final def equals(o: Any) = o match { case c: Completions => get == c.get; case _ => false }
|
||||
override final def equals(o: Any) = o match {
|
||||
case c: Completions => get == c.get; case _ => false
|
||||
}
|
||||
}
|
||||
|
||||
object Completions {
|
||||
|
||||
/** Returns a lazy Completions instance using the provided Completion Set. */
|
||||
def apply(cs: => Set[Completion]): Completions = new Completions {
|
||||
lazy val get = cs
|
||||
|
|
@ -45,6 +55,7 @@ object Completions {
|
|||
|
||||
/** Returns a strict Completions instance containing only the provided Completion.*/
|
||||
def single(c: Completion): Completions = strict(Set.empty + c)
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -63,33 +74,45 @@ object Completions {
|
|||
* 2) the full token being completed, which is useful for presenting a user with choices to select
|
||||
*/
|
||||
sealed trait Completion {
|
||||
|
||||
/** The proposed suffix to append to the existing input to complete the last token in the input.*/
|
||||
def append: String
|
||||
|
||||
/** The string to present to the user to represent the full token being suggested.*/
|
||||
def display: String
|
||||
|
||||
/** True if this Completion is suggesting the empty string.*/
|
||||
def isEmpty: Boolean
|
||||
|
||||
/** Appends the completions in `o` with the completions in this Completion.*/
|
||||
def ++(o: Completion): Completion = Completion.concat(this, o)
|
||||
final def x(o: Completions): Completions = if (Completion evaluatesRight this) o.map(this ++ _) else Completions.strict(Set.empty + this)
|
||||
|
||||
final def x(o: Completions): Completions =
|
||||
if (Completion evaluatesRight this) o.map(this ++ _) else Completions.strict(Set.empty + this)
|
||||
|
||||
override final lazy val hashCode = Completion.hashCode(this)
|
||||
override final def equals(o: Any) = o match { case c: Completion => Completion.equal(this, c); case _ => false }
|
||||
override final def equals(o: Any) = o match {
|
||||
case c: Completion => Completion.equal(this, c); case _ => false
|
||||
}
|
||||
}
|
||||
|
||||
final class DisplayOnly(val display: String) extends Completion {
|
||||
def isEmpty = display.isEmpty
|
||||
def append = ""
|
||||
override def toString = "{" + display + "}"
|
||||
}
|
||||
|
||||
final class Token(val display: String, val append: String) extends Completion {
|
||||
def isEmpty = display.isEmpty && append.isEmpty
|
||||
override final def toString = "[" + display + "]++" + append
|
||||
}
|
||||
|
||||
final class Suggestion(val append: String) extends Completion {
|
||||
def isEmpty = append.isEmpty
|
||||
def display = append
|
||||
override def toString = append
|
||||
}
|
||||
|
||||
object Completion {
|
||||
def concat(a: Completion, b: Completion): Completion =
|
||||
(a, b) match {
|
||||
|
|
@ -98,6 +121,7 @@ object Completion {
|
|||
case _ if a.isEmpty => b
|
||||
case _ => a
|
||||
}
|
||||
|
||||
def evaluatesRight(a: Completion): Boolean =
|
||||
a match {
|
||||
case _: Suggestion => true
|
||||
|
|
@ -127,7 +151,8 @@ object Completion {
|
|||
def displayOnly(value: => String): Completion = new DisplayOnly(value)
|
||||
|
||||
// TODO: make strict in 0.13.0 to match Token
|
||||
def token(prepend: => String, append: => String): Completion = new Token(prepend + append, append)
|
||||
def token(prepend: => String, append: => String): Completion =
|
||||
new Token(prepend + append, append)
|
||||
|
||||
/** @since 0.12.1 */
|
||||
def tokenDisplay(append: String, display: String): Completion = new Token(display, append)
|
||||
|
|
|
|||
|
|
@ -5,12 +5,23 @@ import java.lang.Character.{ toLowerCase => lower }
|
|||
|
||||
/** @author Paul Phillips*/
|
||||
object EditDistance {
|
||||
|
||||
/**
|
||||
* Translated from the java version at
|
||||
* http://www.merriampark.com/ld.htm
|
||||
* which is declared to be public domain.
|
||||
*/
|
||||
def levenshtein(s: String, t: String, insertCost: Int = 1, deleteCost: Int = 1, subCost: Int = 1, transposeCost: Int = 1, matchCost: Int = 0, caseCost: Int = 1, transpositions: Boolean = false): Int = {
|
||||
def levenshtein(
|
||||
s: String,
|
||||
t: String,
|
||||
insertCost: Int = 1,
|
||||
deleteCost: Int = 1,
|
||||
subCost: Int = 1,
|
||||
transposeCost: Int = 1,
|
||||
matchCost: Int = 0,
|
||||
caseCost: Int = 1,
|
||||
transpositions: Boolean = false
|
||||
): Int = {
|
||||
val n = s.length
|
||||
val m = t.length
|
||||
if (n == 0) return m
|
||||
|
|
|
|||
|
|
@ -10,6 +10,7 @@ import sbt.io.IO
|
|||
* TAB key in the console.
|
||||
*/
|
||||
trait ExampleSource {
|
||||
|
||||
/**
|
||||
* @return a (possibly lazy) list of completion example strings. These strings are continuations of user's input. The
|
||||
* user's input is incremented with calls to [[withAddedPrefix]].
|
||||
|
|
@ -22,6 +23,7 @@ trait ExampleSource {
|
|||
* the just added prefix).
|
||||
*/
|
||||
def withAddedPrefix(addedPrefix: String): ExampleSource
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -29,7 +31,8 @@ trait ExampleSource {
|
|||
* @param examples the examples that will be displayed to the user when they press the TAB key.
|
||||
*/
|
||||
sealed case class FixedSetExamples(examples: Iterable[String]) extends ExampleSource {
|
||||
override def withAddedPrefix(addedPrefix: String): ExampleSource = FixedSetExamples(examplesWithRemovedPrefix(addedPrefix))
|
||||
override def withAddedPrefix(addedPrefix: String): ExampleSource =
|
||||
FixedSetExamples(examplesWithRemovedPrefix(addedPrefix))
|
||||
|
||||
override def apply(): Iterable[String] = examples
|
||||
|
||||
|
|
@ -46,12 +49,17 @@ sealed case class FixedSetExamples(examples: Iterable[String]) extends ExampleSo
|
|||
class FileExamples(base: File, prefix: String = "") extends ExampleSource {
|
||||
override def apply(): Stream[String] = files(base).map(_ substring prefix.length)
|
||||
|
||||
override def withAddedPrefix(addedPrefix: String): FileExamples = new FileExamples(base, prefix + addedPrefix)
|
||||
override def withAddedPrefix(addedPrefix: String): FileExamples =
|
||||
new FileExamples(base, prefix + addedPrefix)
|
||||
|
||||
protected def files(directory: File): Stream[String] = {
|
||||
val childPaths = IO.listFiles(directory).toStream
|
||||
val prefixedDirectChildPaths = childPaths map { IO.relativize(base, _).get } filter { _ startsWith prefix }
|
||||
val dirsToRecurseInto = childPaths filter { _.isDirectory } map { IO.relativize(base, _).get } filter { dirStartsWithPrefix }
|
||||
val prefixedDirectChildPaths = childPaths map { IO.relativize(base, _).get } filter {
|
||||
_ startsWith prefix
|
||||
}
|
||||
val dirsToRecurseInto = childPaths filter { _.isDirectory } map { IO.relativize(base, _).get } filter {
|
||||
dirStartsWithPrefix
|
||||
}
|
||||
prefixedDirectChildPaths append dirsToRecurseInto.flatMap(dir => files(new File(base, dir)))
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -7,13 +7,20 @@ package complete
|
|||
import History.number
|
||||
import java.io.File
|
||||
|
||||
final class History private (val lines: IndexedSeq[String], val path: Option[File], error: String => Unit) {
|
||||
final class History private (
|
||||
val lines: IndexedSeq[String],
|
||||
val path: Option[File],
|
||||
error: String => Unit
|
||||
) {
|
||||
private def reversed = lines.reverse
|
||||
|
||||
def all: Seq[String] = lines
|
||||
def size = lines.length
|
||||
def !! : Option[String] = !-(1)
|
||||
def apply(i: Int): Option[String] = if (0 <= i && i < size) Some(lines(i)) else { sys.error("Invalid history index: " + i) }
|
||||
|
||||
def apply(i: Int): Option[String] =
|
||||
if (0 <= i && i < size) Some(lines(i)) else { sys.error("Invalid history index: " + i) }
|
||||
|
||||
def !(i: Int): Option[String] = apply(i)
|
||||
|
||||
def !(s: String): Option[String] =
|
||||
|
|
@ -21,6 +28,7 @@ final class History private (val lines: IndexedSeq[String], val path: Option[Fil
|
|||
case Some(n) => if (n < 0) !-(-n) else apply(n)
|
||||
case None => nonEmpty(s) { reversed.find(_.startsWith(s)) }
|
||||
}
|
||||
|
||||
def !-(n: Int): Option[String] = apply(size - n - 1)
|
||||
|
||||
def !?(s: String): Option[String] = nonEmpty(s) { reversed.drop(1).find(_.contains(s)) }
|
||||
|
|
@ -32,13 +40,17 @@ final class History private (val lines: IndexedSeq[String], val path: Option[Fil
|
|||
act
|
||||
|
||||
def list(historySize: Int, show: Int): Seq[String] =
|
||||
lines.toList.drop(scala.math.max(0, lines.size - historySize)).zipWithIndex.map { case (line, number) => " " + number + " " + line }.takeRight(show max 1)
|
||||
lines.toList
|
||||
.drop(scala.math.max(0, lines.size - historySize))
|
||||
.zipWithIndex
|
||||
.map { case (line, number) => " " + number + " " + line }
|
||||
.takeRight(show max 1)
|
||||
}
|
||||
|
||||
object History {
|
||||
def apply(lines: Seq[String], path: Option[File], error: String => Unit): History = new History(lines.toIndexedSeq, path, sys.error)
|
||||
def apply(lines: Seq[String], path: Option[File], error: String => Unit): History =
|
||||
new History(lines.toIndexedSeq, path, sys.error)
|
||||
|
||||
def number(s: String): Option[Int] =
|
||||
try { Some(s.toInt) }
|
||||
catch { case e: NumberFormatException => None }
|
||||
try { Some(s.toInt) } catch { case e: NumberFormatException => None }
|
||||
}
|
||||
|
|
|
|||
|
|
@ -35,9 +35,14 @@ object HistoryCommands {
|
|||
StartsWithString -> "Execute the most recent command starting with 'string'",
|
||||
ContainsString -> "Execute the most recent command containing 'string'"
|
||||
)
|
||||
def helpString = "History commands:\n " + (descriptions.map { case (c, d) => c + " " + d }).mkString("\n ")
|
||||
def printHelp(): Unit =
|
||||
println(helpString)
|
||||
|
||||
def helpString =
|
||||
"History commands:\n " + (descriptions
|
||||
.map { case (c, d) => c + " " + d })
|
||||
.mkString("\n ")
|
||||
|
||||
def printHelp(): Unit = println(helpString)
|
||||
|
||||
def printHistory(history: complete.History, historySize: Int, show: Int): Unit =
|
||||
history.list(historySize, show).foreach(println)
|
||||
|
||||
|
|
@ -46,26 +51,32 @@ object HistoryCommands {
|
|||
val MaxLines = 500
|
||||
lazy val num = token(NatBasic, "<integer>")
|
||||
lazy val last = Last ^^^ { execute(_.!!) }
|
||||
lazy val list = ListCommands ~> (num ?? Int.MaxValue) map { show => (h: History) => { printHistory(h, MaxLines, show); Some(Nil) }
|
||||
|
||||
lazy val list = ListCommands ~> (num ?? Int.MaxValue) map { show => (h: History) =>
|
||||
{ printHistory(h, MaxLines, show); Some(Nil) }
|
||||
}
|
||||
|
||||
lazy val execStr = flag('?') ~ token(any.+.string, "<string>") map {
|
||||
case (contains, str) =>
|
||||
execute(h => if (contains) h !? str else h ! str)
|
||||
}
|
||||
|
||||
lazy val execInt = flag('-') ~ num map {
|
||||
case (neg, value) =>
|
||||
execute(h => if (neg) h !- value else h ! value)
|
||||
}
|
||||
|
||||
lazy val help = success((h: History) => { printHelp(); Some(Nil) })
|
||||
|
||||
def execute(f: History => Option[String]): History => Option[List[String]] = (h: History) =>
|
||||
{
|
||||
val command = f(h).filterNot(_.startsWith(Start))
|
||||
val lines = h.lines.toArray
|
||||
command.foreach(lines(lines.length - 1) = _)
|
||||
h.path foreach { h => IO.writeLines(h, lines) }
|
||||
Some(command.toList)
|
||||
def execute(f: History => Option[String]): History => Option[List[String]] = (h: History) => {
|
||||
val command = f(h).filterNot(_.startsWith(Start))
|
||||
val lines = h.lines.toArray
|
||||
command.foreach(lines(lines.length - 1) = _)
|
||||
h.path foreach { h =>
|
||||
IO.writeLines(h, lines)
|
||||
}
|
||||
Some(command.toList)
|
||||
}
|
||||
|
||||
val actionParser: Parser[complete.History => Option[List[String]]] =
|
||||
Start ~> (help | last | execInt | list | execStr) // execStr must come last
|
||||
|
|
|
|||
|
|
@ -12,19 +12,31 @@ import collection.JavaConversions
|
|||
object JLineCompletion {
|
||||
def installCustomCompletor(reader: ConsoleReader, parser: Parser[_]): Unit =
|
||||
installCustomCompletor(reader)(parserAsCompletor(parser))
|
||||
def installCustomCompletor(reader: ConsoleReader)(complete: (String, Int) => (Seq[String], Seq[String])): Unit =
|
||||
installCustomCompletor(customCompletor(complete), reader)
|
||||
def installCustomCompletor(complete: (ConsoleReader, Int) => Boolean, reader: ConsoleReader): Unit =
|
||||
{
|
||||
reader.removeCompleter(DummyCompletor)
|
||||
reader.addCompleter(DummyCompletor)
|
||||
reader.setCompletionHandler(new CustomHandler(complete))
|
||||
}
|
||||
|
||||
private[this] final class CustomHandler(completeImpl: (ConsoleReader, Int) => Boolean) extends CompletionHandler {
|
||||
def installCustomCompletor(reader: ConsoleReader)(
|
||||
complete: (String, Int) => (Seq[String], Seq[String])
|
||||
): Unit =
|
||||
installCustomCompletor(customCompletor(complete), reader)
|
||||
|
||||
def installCustomCompletor(
|
||||
complete: (ConsoleReader, Int) => Boolean,
|
||||
reader: ConsoleReader
|
||||
): Unit = {
|
||||
reader.removeCompleter(DummyCompletor)
|
||||
reader.addCompleter(DummyCompletor)
|
||||
reader.setCompletionHandler(new CustomHandler(complete))
|
||||
}
|
||||
|
||||
private[this] final class CustomHandler(completeImpl: (ConsoleReader, Int) => Boolean)
|
||||
extends CompletionHandler {
|
||||
private[this] var previous: Option[(String, Int)] = None
|
||||
private[this] var level: Int = 1
|
||||
override def complete(reader: ConsoleReader, candidates: java.util.List[CharSequence], position: Int) = {
|
||||
|
||||
override def complete(
|
||||
reader: ConsoleReader,
|
||||
candidates: java.util.List[CharSequence],
|
||||
position: Int
|
||||
) = {
|
||||
val current = Some(bufferSnapshot(reader))
|
||||
level = if (current == previous) level + 1 else 1
|
||||
previous = current
|
||||
|
|
@ -42,68 +54,74 @@ object JLineCompletion {
|
|||
// (ConsoleReader doesn't call the handler if there aren't any completions)
|
||||
// the custom handler will then throw away the candidates and call the custom function
|
||||
private[this] final object DummyCompletor extends Completer {
|
||||
override def complete(buffer: String, cursor: Int, candidates: java.util.List[CharSequence]): Int =
|
||||
{
|
||||
candidates.asInstanceOf[java.util.List[String]] add "dummy"
|
||||
0
|
||||
}
|
||||
override def complete(
|
||||
buffer: String,
|
||||
cursor: Int,
|
||||
candidates: java.util.List[CharSequence]
|
||||
): Int = {
|
||||
candidates.asInstanceOf[java.util.List[String]] add "dummy"
|
||||
0
|
||||
}
|
||||
}
|
||||
|
||||
def parserAsCompletor(p: Parser[_]): (String, Int) => (Seq[String], Seq[String]) =
|
||||
(str, level) => convertCompletions(Parser.completions(p, str, level))
|
||||
|
||||
def convertCompletions(c: Completions): (Seq[String], Seq[String]) =
|
||||
{
|
||||
val cs = c.get
|
||||
if (cs.isEmpty)
|
||||
(Nil, "{invalid input}" :: Nil)
|
||||
else
|
||||
convertCompletions(cs)
|
||||
}
|
||||
def convertCompletions(cs: Set[Completion]): (Seq[String], Seq[String]) =
|
||||
{
|
||||
val (insert, display) =
|
||||
((Set.empty[String], Set.empty[String]) /: cs) {
|
||||
case (t @ (insert, display), comp) =>
|
||||
if (comp.isEmpty) t else (insert + comp.append, appendNonEmpty(display, comp.display))
|
||||
}
|
||||
(insert.toSeq, display.toSeq.sorted)
|
||||
}
|
||||
def convertCompletions(c: Completions): (Seq[String], Seq[String]) = {
|
||||
val cs = c.get
|
||||
if (cs.isEmpty)
|
||||
(Nil, "{invalid input}" :: Nil)
|
||||
else
|
||||
convertCompletions(cs)
|
||||
}
|
||||
|
||||
def convertCompletions(cs: Set[Completion]): (Seq[String], Seq[String]) = {
|
||||
val (insert, display) =
|
||||
((Set.empty[String], Set.empty[String]) /: cs) {
|
||||
case (t @ (insert, display), comp) =>
|
||||
if (comp.isEmpty) t else (insert + comp.append, appendNonEmpty(display, comp.display))
|
||||
}
|
||||
(insert.toSeq, display.toSeq.sorted)
|
||||
}
|
||||
|
||||
def appendNonEmpty(set: Set[String], add: String) = if (add.trim.isEmpty) set else set + add
|
||||
|
||||
def customCompletor(f: (String, Int) => (Seq[String], Seq[String])): (ConsoleReader, Int) => Boolean =
|
||||
def customCompletor(
|
||||
f: (String, Int) => (Seq[String], Seq[String])): (ConsoleReader, Int) => Boolean =
|
||||
(reader, level) => {
|
||||
val success = complete(beforeCursor(reader), reader => f(reader, level), reader)
|
||||
reader.flush()
|
||||
success
|
||||
}
|
||||
|
||||
def bufferSnapshot(reader: ConsoleReader): (String, Int) =
|
||||
{
|
||||
val b = reader.getCursorBuffer
|
||||
(b.buffer.toString, b.cursor)
|
||||
}
|
||||
def beforeCursor(reader: ConsoleReader): String =
|
||||
{
|
||||
val b = reader.getCursorBuffer
|
||||
b.buffer.substring(0, b.cursor)
|
||||
}
|
||||
def bufferSnapshot(reader: ConsoleReader): (String, Int) = {
|
||||
val b = reader.getCursorBuffer
|
||||
(b.buffer.toString, b.cursor)
|
||||
}
|
||||
|
||||
def beforeCursor(reader: ConsoleReader): String = {
|
||||
val b = reader.getCursorBuffer
|
||||
b.buffer.substring(0, b.cursor)
|
||||
}
|
||||
|
||||
// returns false if there was nothing to insert and nothing to display
|
||||
def complete(beforeCursor: String, completions: String => (Seq[String], Seq[String]), reader: ConsoleReader): Boolean =
|
||||
{
|
||||
val (insert, display) = completions(beforeCursor)
|
||||
val common = commonPrefix(insert)
|
||||
if (common.isEmpty)
|
||||
if (display.isEmpty)
|
||||
()
|
||||
else
|
||||
showCompletions(display, reader)
|
||||
def complete(
|
||||
beforeCursor: String,
|
||||
completions: String => (Seq[String], Seq[String]),
|
||||
reader: ConsoleReader
|
||||
): Boolean = {
|
||||
val (insert, display) = completions(beforeCursor)
|
||||
val common = commonPrefix(insert)
|
||||
if (common.isEmpty)
|
||||
if (display.isEmpty)
|
||||
()
|
||||
else
|
||||
appendCompletion(common, reader)
|
||||
showCompletions(display, reader)
|
||||
else
|
||||
appendCompletion(common, reader)
|
||||
|
||||
!(common.isEmpty && display.isEmpty)
|
||||
}
|
||||
!(common.isEmpty && display.isEmpty)
|
||||
}
|
||||
|
||||
def appendCompletion(common: String, reader: ConsoleReader): Unit = {
|
||||
reader.getCursorBuffer.write(common)
|
||||
|
|
@ -118,11 +136,13 @@ object JLineCompletion {
|
|||
printCompletions(display, reader)
|
||||
reader.drawLine()
|
||||
}
|
||||
|
||||
def printCompletions(cs: Seq[String], reader: ConsoleReader): Unit = {
|
||||
val print = shouldPrint(cs, reader)
|
||||
reader.println()
|
||||
if (print) printLinesAndColumns(cs, reader)
|
||||
}
|
||||
|
||||
def printLinesAndColumns(cs: Seq[String], reader: ConsoleReader): Unit = {
|
||||
val (lines, columns) = cs partition hasNewline
|
||||
for (line <- lines) {
|
||||
|
|
@ -132,26 +152,27 @@ object JLineCompletion {
|
|||
}
|
||||
reader.printColumns(JavaConversions.seqAsJavaList(columns.map(_.trim)))
|
||||
}
|
||||
|
||||
def hasNewline(s: String): Boolean = s.indexOf('\n') >= 0
|
||||
def shouldPrint(cs: Seq[String], reader: ConsoleReader): Boolean =
|
||||
{
|
||||
val size = cs.size
|
||||
(size <= reader.getAutoprintThreshold) ||
|
||||
confirm("Display all %d possibilities? (y or n) ".format(size), 'y', 'n', reader)
|
||||
}
|
||||
def confirm(prompt: String, trueC: Char, falseC: Char, reader: ConsoleReader): Boolean =
|
||||
{
|
||||
reader.println()
|
||||
reader.print(prompt)
|
||||
reader.flush()
|
||||
reader.readCharacter(trueC, falseC) == trueC
|
||||
}
|
||||
|
||||
def shouldPrint(cs: Seq[String], reader: ConsoleReader): Boolean = {
|
||||
val size = cs.size
|
||||
(size <= reader.getAutoprintThreshold) ||
|
||||
confirm("Display all %d possibilities? (y or n) ".format(size), 'y', 'n', reader)
|
||||
}
|
||||
|
||||
def confirm(prompt: String, trueC: Char, falseC: Char, reader: ConsoleReader): Boolean = {
|
||||
reader.println()
|
||||
reader.print(prompt)
|
||||
reader.flush()
|
||||
reader.readCharacter(trueC, falseC) == trueC
|
||||
}
|
||||
|
||||
def commonPrefix(s: Seq[String]): String = if (s.isEmpty) "" else s reduceLeft commonPrefix
|
||||
def commonPrefix(a: String, b: String): String =
|
||||
{
|
||||
val len = scala.math.min(a.length, b.length)
|
||||
@tailrec def loop(i: Int): Int = if (i >= len) len else if (a(i) != b(i)) i else loop(i + 1)
|
||||
a.substring(0, loop(0))
|
||||
}
|
||||
|
||||
def commonPrefix(a: String, b: String): String = {
|
||||
val len = scala.math.min(a.length, b.length)
|
||||
@tailrec def loop(i: Int): Int = if (i >= len) len else if (a(i) != b(i)) i else loop(i + 1)
|
||||
a.substring(0, loop(0))
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -24,7 +24,9 @@ sealed trait Parser[+T] {
|
|||
def ifValid[S](p: => Parser[S]): Parser[S]
|
||||
def valid: Boolean
|
||||
}
|
||||
|
||||
sealed trait RichParser[A] {
|
||||
|
||||
/** Apply the original Parser and then apply `next` (in order). The result of both is provides as a pair. */
|
||||
def ~[B](next: Parser[B]): Parser[(A, B)]
|
||||
|
||||
|
|
@ -100,14 +102,19 @@ sealed trait RichParser[A] {
|
|||
* be displayed.
|
||||
* @return a new parser with a new source of completions.
|
||||
*/
|
||||
def examples(exampleSource: ExampleSource, maxNumberOfExamples: Int, removeInvalidExamples: Boolean): Parser[A]
|
||||
def examples(
|
||||
exampleSource: ExampleSource,
|
||||
maxNumberOfExamples: Int,
|
||||
removeInvalidExamples: Boolean
|
||||
): Parser[A]
|
||||
|
||||
/**
|
||||
* @param exampleSource the source of examples when displaying completions to the user.
|
||||
* @return a new parser with a new source of completions. It displays at most 25 completion examples and does not
|
||||
* remove invalid examples.
|
||||
*/
|
||||
def examples(exampleSource: ExampleSource): Parser[A] = examples(exampleSource, maxNumberOfExamples = 25, removeInvalidExamples = false)
|
||||
def examples(exampleSource: ExampleSource): Parser[A] =
|
||||
examples(exampleSource, maxNumberOfExamples = 25, removeInvalidExamples = false)
|
||||
|
||||
/** Converts a Parser returning a Char sequence to a Parser returning a String.*/
|
||||
def string(implicit ev: A <:< Seq[Char]): Parser[String]
|
||||
|
|
@ -139,14 +146,17 @@ object Parser extends ParserMain {
|
|||
def app[B, C](b: => Result[B])(f: (T, B) => C): Result[C]
|
||||
def toEither: Either[() => Seq[String], T]
|
||||
}
|
||||
|
||||
final case class Value[+T](value: T) extends Result[T] {
|
||||
def isFailure = false
|
||||
def isValid: Boolean = true
|
||||
def errors = Nil
|
||||
|
||||
def app[B, C](b: => Result[B])(f: (T, B) => C): Result[C] = b match {
|
||||
case fail: Failure => fail
|
||||
case Value(bv) => Value(f(value, bv))
|
||||
}
|
||||
|
||||
def &&(b: => Result[_]): Result[T] = b match { case f: Failure => f; case _ => this }
|
||||
def or[B >: T](b: => Result[B]): Result[B] = this
|
||||
def either[B](b: => Result[B]): Result[Either[T, B]] = Value(Left(value))
|
||||
|
|
@ -155,20 +165,25 @@ object Parser extends ParserMain {
|
|||
def filter(f: T => Boolean, msg: => String): Result[T] = if (f(value)) this else mkFailure(msg)
|
||||
def toEither = Right(value)
|
||||
}
|
||||
final class Failure private[sbt] (mkErrors: => Seq[String], val definitive: Boolean) extends Result[Nothing] {
|
||||
|
||||
final class Failure private[sbt] (mkErrors: => Seq[String], val definitive: Boolean)
|
||||
extends Result[Nothing] {
|
||||
lazy val errors: Seq[String] = mkErrors
|
||||
def isFailure = true
|
||||
def isValid = false
|
||||
def map[B](f: Nothing => B) = this
|
||||
def flatMap[B](f: Nothing => Result[B]) = this
|
||||
|
||||
def or[B](b: => Result[B]): Result[B] = b match {
|
||||
case v: Value[B] => v
|
||||
case f: Failure => if (definitive) this else this ++ f
|
||||
}
|
||||
|
||||
def either[B](b: => Result[B]): Result[Either[Nothing, B]] = b match {
|
||||
case Value(v) => Value(Right(v))
|
||||
case f: Failure => if (definitive) this else this ++ f
|
||||
}
|
||||
|
||||
def filter(f: Nothing => Boolean, msg: => String) = this
|
||||
def app[B, C](b: => Result[B])(f: (Nothing, B) => C): Result[C] = this
|
||||
def &&(b: => Result[_]) = this
|
||||
|
|
@ -176,8 +191,12 @@ object Parser extends ParserMain {
|
|||
|
||||
private[sbt] def ++(f: Failure) = mkFailures(errors ++ f.errors)
|
||||
}
|
||||
def mkFailures(errors: => Seq[String], definitive: Boolean = false): Failure = new Failure(errors.distinct, definitive)
|
||||
def mkFailure(error: => String, definitive: Boolean = false): Failure = new Failure(error :: Nil, definitive)
|
||||
|
||||
def mkFailures(errors: => Seq[String], definitive: Boolean = false): Failure =
|
||||
new Failure(errors.distinct, definitive)
|
||||
|
||||
def mkFailure(error: => String, definitive: Boolean = false): Failure =
|
||||
new Failure(error :: Nil, definitive)
|
||||
|
||||
def tuple[A, B](a: Option[A], b: Option[B]): Option[(A, B)] =
|
||||
(a, b) match { case (Some(av), Some(bv)) => Some((av, bv)); case _ => None }
|
||||
|
|
@ -198,7 +217,12 @@ object Parser extends ParserMain {
|
|||
}
|
||||
}
|
||||
|
||||
def filterParser[T](a: Parser[T], f: T => Boolean, seen: String, msg: String => String): Parser[T] =
|
||||
def filterParser[T](
|
||||
a: Parser[T],
|
||||
f: T => Boolean,
|
||||
seen: String,
|
||||
msg: String => String
|
||||
): Parser[T] =
|
||||
a.ifValid {
|
||||
a.result match {
|
||||
case Some(av) if f(av) => success(av)
|
||||
|
|
@ -211,8 +235,8 @@ object Parser extends ParserMain {
|
|||
b.ifValid {
|
||||
(a.result, b.result) match {
|
||||
case (Some(av), Some(bv)) => success((av, bv))
|
||||
case (Some(av), None) => b map { bv => (av, bv) }
|
||||
case (None, Some(bv)) => a map { av => (av, bv) }
|
||||
case (Some(av), None) => b map (bv => (av, bv))
|
||||
case (None, Some(bv)) => a map (av => (av, bv))
|
||||
case (None, None) => new SeqParser(a, b)
|
||||
}
|
||||
}
|
||||
|
|
@ -229,6 +253,7 @@ object Parser extends ParserMain {
|
|||
|
||||
def onFailure[T](delegate: Parser[T], msg: String): Parser[T] =
|
||||
if (delegate.valid) new OnFailure(delegate, msg) else failure(msg)
|
||||
|
||||
def trapAndFail[T](delegate: Parser[T]): Parser[T] =
|
||||
delegate.ifValid(new TrapAndFail(delegate))
|
||||
|
||||
|
|
@ -237,37 +262,49 @@ object Parser extends ParserMain {
|
|||
|
||||
def repeat[T](p: Parser[T], min: Int = 0, max: UpperBound = Infinite): Parser[Seq[T]] =
|
||||
repeat(None, p, min, max, Nil)
|
||||
private[complete] def repeat[T](partial: Option[Parser[T]], repeated: Parser[T], min: Int, max: UpperBound, revAcc: List[T]): Parser[Seq[T]] =
|
||||
{
|
||||
assume(min >= 0, "Minimum must be greater than or equal to zero (was " + min + ")")
|
||||
assume(max >= min, "Minimum must be less than or equal to maximum (min: " + min + ", max: " + max + ")")
|
||||
|
||||
def checkRepeated(invalidButOptional: => Parser[Seq[T]]): Parser[Seq[T]] =
|
||||
repeated match {
|
||||
case i: Invalid if min == 0 => invalidButOptional
|
||||
case i: Invalid => i
|
||||
case _ =>
|
||||
repeated.result match {
|
||||
case Some(value) => success(revAcc reverse_::: value :: Nil) // revAcc should be Nil here
|
||||
case None => if (max.isZero) success(revAcc.reverse) else new Repeat(partial, repeated, min, max, revAcc)
|
||||
}
|
||||
}
|
||||
private[complete] def repeat[T](
|
||||
partial: Option[Parser[T]],
|
||||
repeated: Parser[T],
|
||||
min: Int,
|
||||
max: UpperBound,
|
||||
revAcc: List[T]
|
||||
): Parser[Seq[T]] = {
|
||||
assume(min >= 0, "Minimum must be greater than or equal to zero (was " + min + ")")
|
||||
assume(max >= min,
|
||||
"Minimum must be less than or equal to maximum (min: " + min + ", max: " + max + ")")
|
||||
|
||||
partial match {
|
||||
case Some(part) =>
|
||||
part.ifValid {
|
||||
part.result match {
|
||||
case Some(value) => repeat(None, repeated, min, max, value :: revAcc)
|
||||
case None => checkRepeated(part.map(lv => (lv :: revAcc).reverse))
|
||||
}
|
||||
def checkRepeated(invalidButOptional: => Parser[Seq[T]]): Parser[Seq[T]] =
|
||||
repeated match {
|
||||
case i: Invalid if min == 0 => invalidButOptional
|
||||
case i: Invalid => i
|
||||
case _ =>
|
||||
repeated.result match {
|
||||
case Some(value) =>
|
||||
success(revAcc reverse_::: value :: Nil) // revAcc should be Nil here
|
||||
case None =>
|
||||
if (max.isZero) success(revAcc.reverse)
|
||||
else new Repeat(partial, repeated, min, max, revAcc)
|
||||
}
|
||||
case None => checkRepeated(success(Nil))
|
||||
}
|
||||
|
||||
partial match {
|
||||
case Some(part) =>
|
||||
part.ifValid {
|
||||
part.result match {
|
||||
case Some(value) => repeat(None, repeated, min, max, value :: revAcc)
|
||||
case None => checkRepeated(part.map(lv => (lv :: revAcc).reverse))
|
||||
}
|
||||
}
|
||||
case None => checkRepeated(success(Nil))
|
||||
}
|
||||
}
|
||||
|
||||
def and[T](a: Parser[T], b: Parser[_]): Parser[T] = a.ifValid(b.ifValid(new And(a, b)))
|
||||
}
|
||||
|
||||
trait ParserMain {
|
||||
|
||||
/** Provides combinators for Parsers.*/
|
||||
implicit def richParser[A](a: Parser[A]): RichParser[A] = new RichParser[A] {
|
||||
def ~[B](b: Parser[B]) = seqParser(a, b)
|
||||
|
|
@ -279,7 +316,7 @@ trait ParserMain {
|
|||
def map[B](f: A => B) = mapParser(a, f)
|
||||
def id = a
|
||||
|
||||
def ^^^[B](value: B): Parser[B] = a map { _ => value }
|
||||
def ^^^[B](value: B): Parser[B] = a map (_ => value)
|
||||
def ??[B >: A](alt: B): Parser[B] = a.? map { _ getOrElse alt }
|
||||
def <~[B](b: Parser[B]): Parser[A] = (a ~ b) map { case av ~ _ => av }
|
||||
def ~>[B](b: Parser[B]): Parser[B] = (a ~ b) map { case _ ~ bv => bv }
|
||||
|
|
@ -290,8 +327,17 @@ trait ParserMain {
|
|||
def &(o: Parser[_]) = and(a, o)
|
||||
def -(o: Parser[_]) = and(a, not(o, "Unexpected: " + o))
|
||||
def examples(s: String*): Parser[A] = examples(s.toSet)
|
||||
def examples(s: Set[String], check: Boolean = false): Parser[A] = examples(new FixedSetExamples(s), s.size, check)
|
||||
def examples(s: ExampleSource, maxNumberOfExamples: Int, removeInvalidExamples: Boolean): Parser[A] = Parser.examples(a, s, maxNumberOfExamples, removeInvalidExamples)
|
||||
|
||||
def examples(s: Set[String], check: Boolean = false): Parser[A] =
|
||||
examples(new FixedSetExamples(s), s.size, check)
|
||||
|
||||
def examples(
|
||||
s: ExampleSource,
|
||||
maxNumberOfExamples: Int,
|
||||
removeInvalidExamples: Boolean
|
||||
): Parser[A] =
|
||||
Parser.examples(a, s, maxNumberOfExamples, removeInvalidExamples)
|
||||
|
||||
def filter(f: A => Boolean, msg: String => String): Parser[A] = filterParser(a, f, "", msg)
|
||||
def string(implicit ev: A <:< Seq[Char]): Parser[String] = map(_.mkString)
|
||||
def flatMap[B](f: A => Parser[B]) = bindParser(a, f)
|
||||
|
|
@ -313,13 +359,15 @@ trait ParserMain {
|
|||
* Defines a parser that always fails on any input with messages `msgs`.
|
||||
* If `definitive` is `true`, any failures by later alternatives are discarded.
|
||||
*/
|
||||
def invalid(msgs: => Seq[String], definitive: Boolean = false): Parser[Nothing] = Invalid(mkFailures(msgs, definitive))
|
||||
def invalid(msgs: => Seq[String], definitive: Boolean = false): Parser[Nothing] =
|
||||
Invalid(mkFailures(msgs, definitive))
|
||||
|
||||
/**
|
||||
* Defines a parser that always fails on any input with message `msg`.
|
||||
* If `definitive` is `true`, any failures by later alternatives are discarded.
|
||||
*/
|
||||
def failure(msg: => String, definitive: Boolean = false): Parser[Nothing] = invalid(msg :: Nil, definitive)
|
||||
def failure(msg: => String, definitive: Boolean = false): Parser[Nothing] =
|
||||
invalid(msg :: Nil, definitive)
|
||||
|
||||
/** Defines a parser that always succeeds on empty input with the result `value`.*/
|
||||
def success[T](value: T): Parser[T] = new ValidParser[T] {
|
||||
|
|
@ -335,17 +383,17 @@ trait ParserMain {
|
|||
charClass(r contains _).examples(r.map(_.toString): _*)
|
||||
|
||||
/** Defines a Parser that parses a single character only if it is contained in `legal`.*/
|
||||
def chars(legal: String): Parser[Char] =
|
||||
{
|
||||
val set = legal.toSet
|
||||
charClass(set, "character in '" + legal + "'") examples (set.map(_.toString))
|
||||
}
|
||||
def chars(legal: String): Parser[Char] = {
|
||||
val set = legal.toSet
|
||||
charClass(set, "character in '" + legal + "'") examples (set.map(_.toString))
|
||||
}
|
||||
|
||||
/**
|
||||
* Defines a Parser that parses a single character only if the predicate `f` returns true for that character.
|
||||
* If this parser fails, `label` is used as the failure message.
|
||||
*/
|
||||
def charClass(f: Char => Boolean, label: String = "<unspecified>"): Parser[Char] = new CharacterClass(f, label)
|
||||
def charClass(f: Char => Boolean, label: String = "<unspecified>"): Parser[Char] =
|
||||
new CharacterClass(f, label)
|
||||
|
||||
/** Presents a single Char `ch` as a Parser that only parses that exact character. */
|
||||
implicit def literal(ch: Char): Parser[Char] = new ValidParser[Char] {
|
||||
|
|
@ -355,13 +403,16 @@ trait ParserMain {
|
|||
def completions(level: Int) = Completions.single(Completion.suggestion(ch.toString))
|
||||
override def toString = "'" + ch + "'"
|
||||
}
|
||||
|
||||
/** Presents a literal String `s` as a Parser that only parses that exact text and provides it as the result.*/
|
||||
implicit def literal(s: String): Parser[String] = stringLiteral(s, 0)
|
||||
|
||||
/** See [[unapply]]. */
|
||||
object ~ {
|
||||
|
||||
/** Convenience for destructuring a tuple that mirrors the `~` combinator.*/
|
||||
def unapply[A, B](t: (A, B)): Some[(A, B)] = Some(t)
|
||||
|
||||
}
|
||||
|
||||
/** Parses input `str` using `parser`. If successful, the result is provided wrapped in `Right`. If unsuccessful, an error message is provided in `Left`.*/
|
||||
|
|
@ -403,24 +454,22 @@ trait ParserMain {
|
|||
Parser.completions(parser, str, level).get foreach println
|
||||
|
||||
// intended to be temporary pending proper error feedback
|
||||
def result[T](p: Parser[T], s: String): Either[() => (Seq[String], Int), T] =
|
||||
{
|
||||
def loop(i: Int, a: Parser[T]): Either[() => (Seq[String], Int), T] =
|
||||
a match {
|
||||
case Invalid(f) => Left(() => (f.errors, i))
|
||||
case _ =>
|
||||
val ci = i + 1
|
||||
if (ci >= s.length)
|
||||
a.resultEmpty.toEither.left.map { msgs0 => () =>
|
||||
val msgs = msgs0()
|
||||
val nonEmpty = if (msgs.isEmpty) "Unexpected end of input" :: Nil else msgs
|
||||
(nonEmpty, ci)
|
||||
}
|
||||
else
|
||||
loop(ci, a derive s(ci))
|
||||
}
|
||||
loop(-1, p)
|
||||
}
|
||||
def result[T](p: Parser[T], s: String): Either[() => (Seq[String], Int), T] = {
|
||||
def loop(i: Int, a: Parser[T]): Either[() => (Seq[String], Int), T] =
|
||||
a match {
|
||||
case Invalid(f) => Left(() => (f.errors, i))
|
||||
case _ =>
|
||||
val ci = i + 1
|
||||
if (ci >= s.length)
|
||||
a.resultEmpty.toEither.left.map { msgs0 => () =>
|
||||
val msgs = msgs0()
|
||||
val nonEmpty = if (msgs.isEmpty) "Unexpected end of input" :: Nil else msgs
|
||||
(nonEmpty, ci)
|
||||
} else
|
||||
loop(ci, a derive s(ci))
|
||||
}
|
||||
loop(-1, p)
|
||||
}
|
||||
|
||||
/** Applies parser `p` to input `s`. */
|
||||
def apply[T](p: Parser[T])(s: String): Parser[T] =
|
||||
|
|
@ -454,7 +503,12 @@ trait ParserMain {
|
|||
* @tparam A the type of values that are returned by the parser.
|
||||
* @return
|
||||
*/
|
||||
def examples[A](a: Parser[A], completions: ExampleSource, maxNumberOfExamples: Int, removeInvalidExamples: Boolean): Parser[A] =
|
||||
def examples[A](
|
||||
a: Parser[A],
|
||||
completions: ExampleSource,
|
||||
maxNumberOfExamples: Int,
|
||||
removeInvalidExamples: Boolean
|
||||
): Parser[A] =
|
||||
if (a.valid) {
|
||||
a.result match {
|
||||
case Some(av) => success(av)
|
||||
|
|
@ -463,7 +517,11 @@ trait ParserMain {
|
|||
}
|
||||
} else a
|
||||
|
||||
def matched(t: Parser[_], seen: Vector[Char] = Vector.empty, partial: Boolean = false): Parser[String] =
|
||||
def matched(
|
||||
t: Parser[_],
|
||||
seen: Vector[Char] = Vector.empty,
|
||||
partial: Boolean = false
|
||||
): Parser[String] =
|
||||
t match {
|
||||
case i: Invalid => if (partial && seen.nonEmpty) success(seen.mkString) else i
|
||||
case _ =>
|
||||
|
|
@ -485,13 +543,15 @@ trait ParserMain {
|
|||
* When tab completion of part of this token is requested, no completions are returned if `hide` returns true for the current tab completion level.
|
||||
* Otherwise, the completions provided by the delegate `t` or a later derivative are appended to the prefix String already seen by this parser.
|
||||
*/
|
||||
def token[T](t: Parser[T], hide: Int => Boolean): Parser[T] = token(t, TokenCompletions.default.hideWhen(hide))
|
||||
def token[T](t: Parser[T], hide: Int => Boolean): Parser[T] =
|
||||
token(t, TokenCompletions.default.hideWhen(hide))
|
||||
|
||||
/**
|
||||
* Establishes delegate parser `t` as a single token of tab completion.
|
||||
* When tab completion of part of this token is requested, `description` is displayed for suggestions and no completions are ever performed.
|
||||
*/
|
||||
def token[T](t: Parser[T], description: String): Parser[T] = token(t, TokenCompletions.displayOnly(description))
|
||||
def token[T](t: Parser[T], description: String): Parser[T] =
|
||||
token(t, TokenCompletions.displayOnly(description))
|
||||
|
||||
/**
|
||||
* Establishes delegate parser `t` as a single token of tab completion.
|
||||
|
|
@ -524,24 +584,29 @@ trait ParserMain {
|
|||
|
||||
def oneOf[T](p: Seq[Parser[T]]): Parser[T] = p.reduceLeft(_ | _)
|
||||
def seq[T](p: Seq[Parser[T]]): Parser[Seq[T]] = seq0(p, Nil)
|
||||
def seq0[T](p: Seq[Parser[T]], errors: => Seq[String]): Parser[Seq[T]] =
|
||||
{
|
||||
val (newErrors, valid) = separate(p) { case Invalid(f) => Left(f.errors _); case ok => Right(ok) }
|
||||
def combinedErrors = errors ++ newErrors.flatMap(_())
|
||||
if (valid.isEmpty) invalid(combinedErrors) else new ParserSeq(valid, combinedErrors)
|
||||
}
|
||||
|
||||
def stringLiteral(s: String, start: Int): Parser[String] =
|
||||
{
|
||||
val len = s.length
|
||||
if (len == 0) sys.error("String literal cannot be empty") else if (start >= len) success(s) else new StringLiteral(s, start)
|
||||
def seq0[T](p: Seq[Parser[T]], errors: => Seq[String]): Parser[Seq[T]] = {
|
||||
val (newErrors, valid) = separate(p) {
|
||||
case Invalid(f) => Left(f.errors _); case ok => Right(ok)
|
||||
}
|
||||
def combinedErrors = errors ++ newErrors.flatMap(_())
|
||||
if (valid.isEmpty) invalid(combinedErrors) else new ParserSeq(valid, combinedErrors)
|
||||
}
|
||||
|
||||
def stringLiteral(s: String, start: Int): Parser[String] = {
|
||||
val len = s.length
|
||||
if (len == 0) sys.error("String literal cannot be empty")
|
||||
else if (start >= len) success(s)
|
||||
else new StringLiteral(s, start)
|
||||
}
|
||||
}
|
||||
|
||||
sealed trait ValidParser[T] extends Parser[T] {
|
||||
final def valid = true
|
||||
final def failure = None
|
||||
final def ifValid[S](p: => Parser[S]): Parser[S] = p
|
||||
}
|
||||
|
||||
private final case class Invalid(fail: Failure) extends Parser[Nothing] {
|
||||
def failure = Some(fail)
|
||||
def result = None
|
||||
|
|
@ -562,10 +627,17 @@ private final case class SoftInvalid(fail: Failure) extends ValidParser[Nothing]
|
|||
}
|
||||
|
||||
private final class TrapAndFail[A](a: Parser[A]) extends ValidParser[A] {
|
||||
def result = try { a.result } catch { case e: Exception => None }
|
||||
def result = try { a.result } catch { case e: Exception => None }
|
||||
def resultEmpty = try { a.resultEmpty } catch { case e: Exception => fail(e) }
|
||||
def derive(c: Char) = try { trapAndFail(a derive c) } catch { case e: Exception => Invalid(fail(e)) }
|
||||
def completions(level: Int) = try { a.completions(level) } catch { case e: Exception => Completions.nil }
|
||||
|
||||
def derive(c: Char) = try { trapAndFail(a derive c) } catch {
|
||||
case e: Exception => Invalid(fail(e))
|
||||
}
|
||||
|
||||
def completions(level: Int) = try { a.completions(level) } catch {
|
||||
case e: Exception => Completions.nil
|
||||
}
|
||||
|
||||
override def toString = "trap(" + a + ")"
|
||||
override def isTokenStart = a.isTokenStart
|
||||
private[this] def fail(e: Exception): Failure = mkFailure(e.toString)
|
||||
|
|
@ -573,23 +645,29 @@ private final class TrapAndFail[A](a: Parser[A]) extends ValidParser[A] {
|
|||
|
||||
private final class OnFailure[A](a: Parser[A], message: String) extends ValidParser[A] {
|
||||
def result = a.result
|
||||
def resultEmpty = a.resultEmpty match { case f: Failure => mkFailure(message); case v: Value[A] => v }
|
||||
|
||||
def resultEmpty = a.resultEmpty match {
|
||||
case f: Failure => mkFailure(message); case v: Value[A] => v
|
||||
}
|
||||
|
||||
def derive(c: Char) = onFailure(a derive c, message)
|
||||
def completions(level: Int) = a.completions(level)
|
||||
override def toString = "(" + a + " !!! \"" + message + "\" )"
|
||||
override def isTokenStart = a.isTokenStart
|
||||
}
|
||||
|
||||
private final class SeqParser[A, B](a: Parser[A], b: Parser[B]) extends ValidParser[(A, B)] {
|
||||
lazy val result = tuple(a.result, b.result)
|
||||
lazy val resultEmpty = a.resultEmpty seq b.resultEmpty
|
||||
def derive(c: Char) =
|
||||
{
|
||||
val common = a.derive(c) ~ b
|
||||
a.resultEmpty match {
|
||||
case Value(av) => common | b.derive(c).map(br => (av, br))
|
||||
case _: Failure => common
|
||||
}
|
||||
|
||||
def derive(c: Char) = {
|
||||
val common = a.derive(c) ~ b
|
||||
a.resultEmpty match {
|
||||
case Value(av) => common | b.derive(c).map(br => (av, br))
|
||||
case _: Failure => common
|
||||
}
|
||||
}
|
||||
|
||||
def completions(level: Int) = a.completions(level) x b.completions(level)
|
||||
override def toString = "(" + a + " ~ " + b + ")"
|
||||
}
|
||||
|
|
@ -601,6 +679,7 @@ private final class HomParser[A](a: Parser[A], b: Parser[A]) extends ValidParser
|
|||
def completions(level: Int) = a.completions(level) ++ b.completions(level)
|
||||
override def toString = "(" + a + " | " + b + ")"
|
||||
}
|
||||
|
||||
private final class HetParser[A, B](a: Parser[A], b: Parser[B]) extends ValidParser[Either[A, B]] {
|
||||
lazy val result = tuple(a.result, b.result) map { case (a, b) => Left(a) }
|
||||
def derive(c: Char) = (a derive c) || (b derive c)
|
||||
|
|
@ -608,27 +687,33 @@ private final class HetParser[A, B](a: Parser[A], b: Parser[B]) extends ValidPar
|
|||
def completions(level: Int) = a.completions(level) ++ b.completions(level)
|
||||
override def toString = "(" + a + " || " + b + ")"
|
||||
}
|
||||
private final class ParserSeq[T](a: Seq[Parser[T]], errors: => Seq[String]) extends ValidParser[Seq[T]] {
|
||||
|
||||
private final class ParserSeq[T](a: Seq[Parser[T]], errors: => Seq[String])
|
||||
extends ValidParser[Seq[T]] {
|
||||
assert(a.nonEmpty)
|
||||
lazy val resultEmpty: Result[Seq[T]] =
|
||||
{
|
||||
val res = a.map(_.resultEmpty)
|
||||
val (failures, values) = separate(res)(_.toEither)
|
||||
// if(failures.isEmpty) Value(values) else mkFailures(failures.flatMap(_()) ++ errors)
|
||||
if (values.nonEmpty) Value(values) else mkFailures(failures.flatMap(_()) ++ errors)
|
||||
}
|
||||
|
||||
lazy val resultEmpty: Result[Seq[T]] = {
|
||||
val res = a.map(_.resultEmpty)
|
||||
val (failures, values) = separate(res)(_.toEither)
|
||||
// if(failures.isEmpty) Value(values) else mkFailures(failures.flatMap(_()) ++ errors)
|
||||
if (values.nonEmpty) Value(values) else mkFailures(failures.flatMap(_()) ++ errors)
|
||||
}
|
||||
|
||||
def result = {
|
||||
val success = a.flatMap(_.result)
|
||||
if (success.length == a.length) Some(success) else None
|
||||
}
|
||||
|
||||
def completions(level: Int) = a.map(_.completions(level)).reduceLeft(_ ++ _)
|
||||
def derive(c: Char) = seq0(a.map(_ derive c), errors)
|
||||
|
||||
override def toString = "seq(" + a + ")"
|
||||
}
|
||||
|
||||
private final class BindParser[A, B](a: Parser[A], f: A => Parser[B]) extends ValidParser[B] {
|
||||
lazy val result = a.result flatMap { av => f(av).result }
|
||||
lazy val resultEmpty = a.resultEmpty flatMap { av => f(av).resultEmpty }
|
||||
lazy val result = a.result flatMap (av => f(av).result)
|
||||
lazy val resultEmpty = a.resultEmpty flatMap (av => f(av).resultEmpty)
|
||||
|
||||
def completions(level: Int) =
|
||||
a.completions(level) flatMap { c =>
|
||||
apply(a)(c.append).resultEmpty match {
|
||||
|
|
@ -637,17 +722,19 @@ private final class BindParser[A, B](a: Parser[A], f: A => Parser[B]) extends Va
|
|||
}
|
||||
}
|
||||
|
||||
def derive(c: Char) =
|
||||
{
|
||||
val common = a derive c flatMap f
|
||||
a.resultEmpty match {
|
||||
case Value(av) => common | derive1(f(av), c)
|
||||
case _: Failure => common
|
||||
}
|
||||
def derive(c: Char) = {
|
||||
val common = a derive c flatMap f
|
||||
a.resultEmpty match {
|
||||
case Value(av) => common | derive1(f(av), c)
|
||||
case _: Failure => common
|
||||
}
|
||||
}
|
||||
|
||||
override def isTokenStart = a.isTokenStart
|
||||
|
||||
override def toString = "bind(" + a + ")"
|
||||
}
|
||||
|
||||
private final class MapParser[A, B](a: Parser[A], f: A => B) extends ValidParser[B] {
|
||||
lazy val result = a.result map f
|
||||
lazy val resultEmpty = a.resultEmpty map f
|
||||
|
|
@ -656,35 +743,53 @@ private final class MapParser[A, B](a: Parser[A], f: A => B) extends ValidParser
|
|||
override def isTokenStart = a.isTokenStart
|
||||
override def toString = "map(" + a + ")"
|
||||
}
|
||||
private final class Filter[T](p: Parser[T], f: T => Boolean, seen: String, msg: String => String) extends ValidParser[T] {
|
||||
|
||||
private final class Filter[T](p: Parser[T], f: T => Boolean, seen: String, msg: String => String)
|
||||
extends ValidParser[T] {
|
||||
def filterResult(r: Result[T]) = r.filter(f, msg(seen))
|
||||
lazy val result = p.result filter f
|
||||
lazy val resultEmpty = filterResult(p.resultEmpty)
|
||||
def derive(c: Char) = filterParser(p derive c, f, seen + c, msg)
|
||||
def completions(level: Int) = p.completions(level) filterS { s => filterResult(apply(p)(s).resultEmpty).isValid }
|
||||
|
||||
def completions(level: Int) = p.completions(level) filterS { s =>
|
||||
filterResult(apply(p)(s).resultEmpty).isValid
|
||||
}
|
||||
|
||||
override def toString = "filter(" + p + ")"
|
||||
override def isTokenStart = p.isTokenStart
|
||||
}
|
||||
private final class MatchedString(delegate: Parser[_], seenV: Vector[Char], partial: Boolean) extends ValidParser[String] {
|
||||
|
||||
private final class MatchedString(delegate: Parser[_], seenV: Vector[Char], partial: Boolean)
|
||||
extends ValidParser[String] {
|
||||
lazy val seen = seenV.mkString
|
||||
def derive(c: Char) = matched(delegate derive c, seenV :+ c, partial)
|
||||
def completions(level: Int) = delegate.completions(level)
|
||||
def result = if (delegate.result.isDefined) Some(seen) else None
|
||||
def resultEmpty = delegate.resultEmpty match { case f: Failure if !partial => f; case _ => Value(seen) }
|
||||
|
||||
def resultEmpty = delegate.resultEmpty match {
|
||||
case f: Failure if !partial => f; case _ => Value(seen)
|
||||
}
|
||||
|
||||
override def isTokenStart = delegate.isTokenStart
|
||||
override def toString = "matched(" + partial + ", " + seen + ", " + delegate + ")"
|
||||
}
|
||||
private final class TokenStart[T](delegate: Parser[T], seen: String, complete: TokenCompletions) extends ValidParser[T] {
|
||||
|
||||
private final class TokenStart[T](delegate: Parser[T], seen: String, complete: TokenCompletions)
|
||||
extends ValidParser[T] {
|
||||
def derive(c: Char) = mkToken(delegate derive c, seen + c, complete)
|
||||
|
||||
def completions(level: Int) = complete match {
|
||||
case dc: TokenCompletions.Delegating => dc.completions(seen, level, delegate.completions(level))
|
||||
case fc: TokenCompletions.Fixed => fc.completions(seen, level)
|
||||
case dc: TokenCompletions.Delegating =>
|
||||
dc.completions(seen, level, delegate.completions(level))
|
||||
case fc: TokenCompletions.Fixed => fc.completions(seen, level)
|
||||
}
|
||||
|
||||
def result = delegate.result
|
||||
def resultEmpty = delegate.resultEmpty
|
||||
override def isTokenStart = true
|
||||
override def toString = "token('" + complete + ", " + delegate + ")"
|
||||
}
|
||||
|
||||
private final class And[T](a: Parser[T], b: Parser[_]) extends ValidParser[T] {
|
||||
lazy val result = tuple(a.result, b.result) map { _._1 }
|
||||
def derive(c: Char) = (a derive c) & (b derive c)
|
||||
|
|
@ -697,10 +802,12 @@ private final class Not(delegate: Parser[_], failMessage: String) extends ValidP
|
|||
def derive(c: Char) = if (delegate.valid) not(delegate derive c, failMessage) else this
|
||||
def completions(level: Int) = Completions.empty
|
||||
def result = None
|
||||
|
||||
lazy val resultEmpty = delegate.resultEmpty match {
|
||||
case f: Failure => Value(())
|
||||
case v: Value[_] => mkFailure(failMessage)
|
||||
}
|
||||
|
||||
override def toString = " -(%s)".format(delegate)
|
||||
}
|
||||
|
||||
|
|
@ -719,9 +826,18 @@ private final class Not(delegate: Parser[_], failMessage: String) extends ValidP
|
|||
* @param removeInvalidExamples indicates whether to remove examples that are deemed invalid by the delegate parser.
|
||||
* @tparam T the type of value produced by the parser.
|
||||
*/
|
||||
private final class ParserWithExamples[T](delegate: Parser[T], exampleSource: ExampleSource, maxNumberOfExamples: Int, removeInvalidExamples: Boolean) extends ValidParser[T] {
|
||||
private final class ParserWithExamples[T](
|
||||
delegate: Parser[T],
|
||||
exampleSource: ExampleSource,
|
||||
maxNumberOfExamples: Int,
|
||||
removeInvalidExamples: Boolean
|
||||
) extends ValidParser[T] {
|
||||
|
||||
def derive(c: Char) =
|
||||
examples(delegate derive c, exampleSource.withAddedPrefix(c.toString), maxNumberOfExamples, removeInvalidExamples)
|
||||
examples(delegate derive c,
|
||||
exampleSource.withAddedPrefix(c.toString),
|
||||
maxNumberOfExamples,
|
||||
removeInvalidExamples)
|
||||
|
||||
def result = delegate.result
|
||||
|
||||
|
|
@ -749,15 +865,21 @@ private final class ParserWithExamples[T](delegate: Parser[T], exampleSource: Ex
|
|||
apply(delegate)(example).resultEmpty.isValid
|
||||
}
|
||||
}
|
||||
|
||||
private final class StringLiteral(str: String, start: Int) extends ValidParser[String] {
|
||||
assert(0 <= start && start < str.length)
|
||||
|
||||
def failMsg = "Expected '" + str + "'"
|
||||
def resultEmpty = mkFailure(failMsg)
|
||||
def result = None
|
||||
def derive(c: Char) = if (str.charAt(start) == c) stringLiteral(str, start + 1) else new Invalid(resultEmpty)
|
||||
|
||||
def derive(c: Char) =
|
||||
if (str.charAt(start) == c) stringLiteral(str, start + 1) else new Invalid(resultEmpty)
|
||||
|
||||
def completions(level: Int) = Completions.single(Completion.suggestion(str.substring(start)))
|
||||
override def toString = '"' + str + '"'
|
||||
}
|
||||
|
||||
private final class CharacterClass(f: Char => Boolean, label: String) extends ValidParser[Char] {
|
||||
def result = None
|
||||
def resultEmpty = mkFailure("Expected " + label)
|
||||
|
|
@ -765,6 +887,7 @@ private final class CharacterClass(f: Char => Boolean, label: String) extends Va
|
|||
def completions(level: Int) = Completions.empty
|
||||
override def toString = "class(" + label + ")"
|
||||
}
|
||||
|
||||
private final class Optional[T](delegate: Parser[T]) extends ValidParser[Option[T]] {
|
||||
def result = delegate.result map some.fn
|
||||
def resultEmpty = Value(None)
|
||||
|
|
@ -772,7 +895,14 @@ private final class Optional[T](delegate: Parser[T]) extends ValidParser[Option[
|
|||
def completions(level: Int) = Completion.empty +: delegate.completions(level)
|
||||
override def toString = delegate.toString + "?"
|
||||
}
|
||||
private final class Repeat[T](partial: Option[Parser[T]], repeated: Parser[T], min: Int, max: UpperBound, accumulatedReverse: List[T]) extends ValidParser[Seq[T]] {
|
||||
|
||||
private final class Repeat[T](
|
||||
partial: Option[Parser[T]],
|
||||
repeated: Parser[T],
|
||||
min: Int,
|
||||
max: UpperBound,
|
||||
accumulatedReverse: List[T]
|
||||
) extends ValidParser[Seq[T]] {
|
||||
assume(0 <= min, "Minimum occurences must be non-negative")
|
||||
assume(max >= min, "Minimum occurences must be less than the maximum occurences")
|
||||
|
||||
|
|
@ -787,37 +917,39 @@ private final class Repeat[T](partial: Option[Parser[T]], repeated: Parser[T], m
|
|||
case None => repeatDerive(c, accumulatedReverse)
|
||||
}
|
||||
|
||||
def repeatDerive(c: Char, accRev: List[T]): Parser[Seq[T]] = repeat(Some(repeated derive c), repeated, scala.math.max(0, min - 1), max.decrement, accRev)
|
||||
def repeatDerive(c: Char, accRev: List[T]): Parser[Seq[T]] =
|
||||
repeat(Some(repeated derive c), repeated, scala.math.max(0, min - 1), max.decrement, accRev)
|
||||
|
||||
def completions(level: Int) =
|
||||
{
|
||||
def pow(comp: Completions, exp: Completions, n: Int): Completions =
|
||||
if (n == 1) comp else pow(comp x exp, exp, n - 1)
|
||||
def completions(level: Int) = {
|
||||
def pow(comp: Completions, exp: Completions, n: Int): Completions =
|
||||
if (n == 1) comp else pow(comp x exp, exp, n - 1)
|
||||
|
||||
val repC = repeated.completions(level)
|
||||
val fin = if (min == 0) Completion.empty +: repC else pow(repC, repC, min)
|
||||
partial match {
|
||||
case Some(p) => p.completions(level) x fin
|
||||
case None => fin
|
||||
}
|
||||
val repC = repeated.completions(level)
|
||||
val fin = if (min == 0) Completion.empty +: repC else pow(repC, repC, min)
|
||||
partial match {
|
||||
case Some(p) => p.completions(level) x fin
|
||||
case None => fin
|
||||
}
|
||||
}
|
||||
|
||||
def result = None
|
||||
lazy val resultEmpty: Result[Seq[T]] =
|
||||
{
|
||||
val partialAccumulatedOption =
|
||||
partial match {
|
||||
case None => Value(accumulatedReverse)
|
||||
case Some(partialPattern) => partialPattern.resultEmpty.map(_ :: accumulatedReverse)
|
||||
}
|
||||
(partialAccumulatedOption app repeatedParseEmpty)(_ reverse_::: _)
|
||||
}
|
||||
private def repeatedParseEmpty: Result[List[T]] =
|
||||
{
|
||||
if (min == 0)
|
||||
Value(Nil)
|
||||
else
|
||||
// forced determinism
|
||||
for (value <- repeated.resultEmpty) yield makeList(min, value)
|
||||
}
|
||||
|
||||
lazy val resultEmpty: Result[Seq[T]] = {
|
||||
val partialAccumulatedOption =
|
||||
partial match {
|
||||
case None => Value(accumulatedReverse)
|
||||
case Some(partialPattern) => partialPattern.resultEmpty.map(_ :: accumulatedReverse)
|
||||
}
|
||||
(partialAccumulatedOption app repeatedParseEmpty)(_ reverse_::: _)
|
||||
}
|
||||
|
||||
private def repeatedParseEmpty: Result[List[T]] = {
|
||||
if (min == 0)
|
||||
Value(Nil)
|
||||
else
|
||||
// forced determinism
|
||||
for (value <- repeated.resultEmpty) yield makeList(min, value)
|
||||
}
|
||||
|
||||
override def toString = "repeat(" + min + "," + max + "," + partial + "," + repeated + ")"
|
||||
}
|
||||
|
|
|
|||
|
|
@ -7,10 +7,19 @@ package complete
|
|||
import Parser._
|
||||
import java.io.File
|
||||
import java.net.URI
|
||||
import java.lang.Character.{ getType, MATH_SYMBOL, OTHER_SYMBOL, DASH_PUNCTUATION, OTHER_PUNCTUATION, MODIFIER_SYMBOL, CURRENCY_SYMBOL }
|
||||
import java.lang.Character.{
|
||||
getType,
|
||||
MATH_SYMBOL,
|
||||
OTHER_SYMBOL,
|
||||
DASH_PUNCTUATION,
|
||||
OTHER_PUNCTUATION,
|
||||
MODIFIER_SYMBOL,
|
||||
CURRENCY_SYMBOL
|
||||
}
|
||||
|
||||
/** Provides standard implementations of commonly useful [[Parser]]s. */
|
||||
trait Parsers {
|
||||
|
||||
/** Matches the end of input, providing no useful result on success. */
|
||||
lazy val EOF = not(any, "Expected EOF")
|
||||
|
||||
|
|
@ -24,10 +33,12 @@ trait Parsers {
|
|||
lazy val Digit = charClass(_.isDigit, "digit") examples DigitSet
|
||||
|
||||
/** Set containing Chars for hexadecimal digits 0-9 and A-F (but not a-f). */
|
||||
lazy val HexDigitSet = Set('0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B', 'C', 'D', 'E', 'F')
|
||||
lazy val HexDigitSet =
|
||||
Set('0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B', 'C', 'D', 'E', 'F')
|
||||
|
||||
/** Parses a single hexadecimal digit (0-9, a-f, A-F). */
|
||||
lazy val HexDigit = charClass(c => HexDigitSet(c.toUpper), "hex digit") examples HexDigitSet.map(_.toString)
|
||||
lazy val HexDigit = charClass(c => HexDigitSet(c.toUpper), "hex digit") examples HexDigitSet.map(
|
||||
_.toString)
|
||||
|
||||
/** Parses a single letter, according to Char.isLetter, into a Char. */
|
||||
lazy val Letter = charClass(_.isLetter, "letter")
|
||||
|
|
@ -70,14 +81,22 @@ trait Parsers {
|
|||
|
||||
/** Returns true if `c` an operator character. */
|
||||
def isOpChar(c: Char) = !isDelimiter(c) && isOpType(getType(c))
|
||||
def isOpType(cat: Int) = cat match { case MATH_SYMBOL | OTHER_SYMBOL | DASH_PUNCTUATION | OTHER_PUNCTUATION | MODIFIER_SYMBOL | CURRENCY_SYMBOL => true; case _ => false }
|
||||
|
||||
def isOpType(cat: Int) = cat match {
|
||||
case MATH_SYMBOL | OTHER_SYMBOL | DASH_PUNCTUATION | OTHER_PUNCTUATION | MODIFIER_SYMBOL |
|
||||
CURRENCY_SYMBOL =>
|
||||
true; case _ => false
|
||||
}
|
||||
|
||||
/** Returns true if `c` is a dash `-`, a letter, digit, or an underscore `_`. */
|
||||
def isIDChar(c: Char) = isScalaIDChar(c) || c == '-'
|
||||
|
||||
/** Returns true if `c` is a letter, digit, or an underscore `_`. */
|
||||
def isScalaIDChar(c: Char) = c.isLetterOrDigit || c == '_'
|
||||
|
||||
def isDelimiter(c: Char) = c match { case '`' | '\'' | '\"' | /*';' | */ ',' | '.' => true; case _ => false }
|
||||
def isDelimiter(c: Char) = c match {
|
||||
case '`' | '\'' | '\"' | /*';' | */ ',' | '.' => true; case _ => false
|
||||
}
|
||||
|
||||
/** Matches a single character that is not a whitespace character. */
|
||||
lazy val NotSpaceClass = charClass(!_.isWhitespace, "non-whitespace character")
|
||||
|
|
@ -120,17 +139,22 @@ trait Parsers {
|
|||
|
||||
/** Matches any character except a double quote or whitespace. */
|
||||
lazy val NotDQuoteSpaceClass =
|
||||
charClass({ c: Char => (c != DQuoteChar) && !c.isWhitespace }, "non-double-quote-space character")
|
||||
charClass({ c: Char =>
|
||||
(c != DQuoteChar) && !c.isWhitespace
|
||||
}, "non-double-quote-space character")
|
||||
|
||||
/** Matches any character except a double quote or backslash. */
|
||||
lazy val NotDQuoteBackslashClass =
|
||||
charClass({ c: Char => (c != DQuoteChar) && (c != BackslashChar) }, "non-double-quote-backslash character")
|
||||
charClass({ c: Char =>
|
||||
(c != DQuoteChar) && (c != BackslashChar)
|
||||
}, "non-double-quote-backslash character")
|
||||
|
||||
/** Matches a single character that is valid somewhere in a URI. */
|
||||
lazy val URIChar = charClass(alphanum) | chars("_-!.~'()*,;:$&+=?/[]@%#")
|
||||
|
||||
/** Returns true if `c` is an ASCII letter or digit. */
|
||||
def alphanum(c: Char) = ('a' <= c && c <= 'z') || ('A' <= c && c <= 'Z') || ('0' <= c && c <= '9')
|
||||
def alphanum(c: Char) =
|
||||
('a' <= c && c <= 'z') || ('A' <= c && c <= 'Z') || ('0' <= c && c <= '9')
|
||||
|
||||
/**
|
||||
* @param base the directory used for completion proposals (when the user presses the TAB key). Only paths under this
|
||||
|
|
@ -192,7 +216,9 @@ trait Parsers {
|
|||
* A unicode escape begins with a backslash, followed by a `u` and 4 hexadecimal digits representing the unicode value.
|
||||
*/
|
||||
lazy val UnicodeEscape: Parser[Char] =
|
||||
("u" ~> repeat(HexDigit, 4, 4)) map { seq => Integer.parseInt(seq.mkString, 16).toChar }
|
||||
("u" ~> repeat(HexDigit, 4, 4)) map { seq =>
|
||||
Integer.parseInt(seq.mkString, 16).toChar
|
||||
}
|
||||
|
||||
/** Parses an unquoted, non-empty String value that cannot start with a double quote and cannot contain whitespace.*/
|
||||
lazy val NotQuoted = (NotDQuoteSpaceClass ~ OptNotSpace) map { case (c, s) => c.toString + s }
|
||||
|
|
@ -212,20 +238,25 @@ trait Parsers {
|
|||
(rep ~ (sep ~> rep).*).map { case (x ~ xs) => x +: xs }
|
||||
|
||||
/** Wraps the result of `p` in `Some`.*/
|
||||
def some[T](p: Parser[T]): Parser[Option[T]] = p map { v => Some(v) }
|
||||
def some[T](p: Parser[T]): Parser[Option[T]] = p map { v =>
|
||||
Some(v)
|
||||
}
|
||||
|
||||
/**
|
||||
* Applies `f` to the result of `p`, transforming any exception when evaluating
|
||||
* `f` into a parse failure with the exception `toString` as the message.
|
||||
*/
|
||||
def mapOrFail[S, T](p: Parser[S])(f: S => T): Parser[T] =
|
||||
p flatMap { s => try { success(f(s)) } catch { case e: Exception => failure(e.toString) } }
|
||||
p flatMap { s =>
|
||||
try { success(f(s)) } catch { case e: Exception => failure(e.toString) }
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses a space-delimited, possibly empty sequence of arguments.
|
||||
* The arguments may use quotes and escapes according to [[StringBasic]].
|
||||
*/
|
||||
def spaceDelimited(display: String): Parser[Seq[String]] = (token(Space) ~> token(StringBasic, display)).* <~ SpaceClass.*
|
||||
def spaceDelimited(display: String): Parser[Seq[String]] =
|
||||
(token(Space) ~> token(StringBasic, display)).* <~ SpaceClass.*
|
||||
|
||||
/** Applies `p` and uses `true` as the result if it succeeds and turns failure into a result of `false`. */
|
||||
def flag[T](p: Parser[T]): Parser[Boolean] = (p ^^^ true) ?? false
|
||||
|
|
@ -236,14 +267,17 @@ trait Parsers {
|
|||
* The parsers obtained in this way are separated by `sep`, whose result is discarded and only the sequence
|
||||
* of values from the parsers returned by `p` is used for the result.
|
||||
*/
|
||||
def repeatDep[A](p: Seq[A] => Parser[A], sep: Parser[Any]): Parser[Seq[A]] =
|
||||
{
|
||||
def loop(acc: Seq[A]): Parser[Seq[A]] = {
|
||||
val next = (sep ~> p(acc)) flatMap { result => loop(acc :+ result) }
|
||||
next ?? acc
|
||||
def repeatDep[A](p: Seq[A] => Parser[A], sep: Parser[Any]): Parser[Seq[A]] = {
|
||||
def loop(acc: Seq[A]): Parser[Seq[A]] = {
|
||||
val next = (sep ~> p(acc)) flatMap { result =>
|
||||
loop(acc :+ result)
|
||||
}
|
||||
p(Vector()) flatMap { first => loop(Seq(first)) }
|
||||
next ?? acc
|
||||
}
|
||||
p(Vector()) flatMap { first =>
|
||||
loop(Seq(first))
|
||||
}
|
||||
}
|
||||
|
||||
/** Applies String.trim to the result of `p`. */
|
||||
def trimmed(p: Parser[String]) = p map { _.trim }
|
||||
|
|
@ -260,10 +294,12 @@ object Parsers extends Parsers
|
|||
|
||||
/** Provides common [[Parser]] implementations and helper methods.*/
|
||||
object DefaultParsers extends Parsers with ParserMain {
|
||||
|
||||
/** Applies parser `p` to input `s` and returns `true` if the parse was successful. */
|
||||
def matches(p: Parser[_], s: String): Boolean =
|
||||
apply(p)(s).resultEmpty.isValid
|
||||
|
||||
/** Returns `true` if `s` parses successfully according to [[ID]].*/
|
||||
def validID(s: String): Boolean = matches(ID, s)
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -2,29 +2,30 @@ package sbt.internal.util
|
|||
package complete
|
||||
|
||||
object ProcessError {
|
||||
def apply(command: String, msgs: Seq[String], index: Int): String =
|
||||
{
|
||||
val (line, modIndex) = extractLine(command, index)
|
||||
val point = pointerSpace(command, modIndex)
|
||||
msgs.mkString("\n") + "\n" + line + "\n" + point + "^"
|
||||
}
|
||||
def extractLine(s: String, i: Int): (String, Int) =
|
||||
{
|
||||
val notNewline = (c: Char) => c != '\n' && c != '\r'
|
||||
val left = takeRightWhile(s.substring(0, i))(notNewline)
|
||||
val right = s substring i takeWhile notNewline
|
||||
(left + right, left.length)
|
||||
}
|
||||
def takeRightWhile(s: String)(pred: Char => Boolean): String =
|
||||
{
|
||||
def loop(i: Int): String =
|
||||
if (i < 0)
|
||||
s
|
||||
else if (pred(s(i)))
|
||||
loop(i - 1)
|
||||
else
|
||||
s.substring(i + 1)
|
||||
loop(s.length - 1)
|
||||
}
|
||||
def pointerSpace(s: String, i: Int): String = (s take i) map { case '\t' => '\t'; case _ => ' ' } mkString ""
|
||||
def apply(command: String, msgs: Seq[String], index: Int): String = {
|
||||
val (line, modIndex) = extractLine(command, index)
|
||||
val point = pointerSpace(command, modIndex)
|
||||
msgs.mkString("\n") + "\n" + line + "\n" + point + "^"
|
||||
}
|
||||
|
||||
def extractLine(s: String, i: Int): (String, Int) = {
|
||||
val notNewline = (c: Char) => c != '\n' && c != '\r'
|
||||
val left = takeRightWhile(s.substring(0, i))(notNewline)
|
||||
val right = s substring i takeWhile notNewline
|
||||
(left + right, left.length)
|
||||
}
|
||||
|
||||
def takeRightWhile(s: String)(pred: Char => Boolean): String = {
|
||||
def loop(i: Int): String =
|
||||
if (i < 0)
|
||||
s
|
||||
else if (pred(s(i)))
|
||||
loop(i - 1)
|
||||
else
|
||||
s.substring(i + 1)
|
||||
loop(s.length - 1)
|
||||
}
|
||||
|
||||
def pointerSpace(s: String, i: Int): String =
|
||||
(s take i) map { case '\t' => '\t'; case _ => ' ' } mkString ""
|
||||
}
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@ import Completion.{ token => ctoken, tokenDisplay }
|
|||
sealed trait TokenCompletions {
|
||||
def hideWhen(f: Int => Boolean): TokenCompletions
|
||||
}
|
||||
|
||||
object TokenCompletions {
|
||||
private[sbt] abstract class Delegating extends TokenCompletions { outer =>
|
||||
def completions(seen: String, level: Int, delegate: Completions): Completions
|
||||
|
|
@ -14,6 +15,7 @@ object TokenCompletions {
|
|||
if (hide(level)) Completions.nil else outer.completions(seen, level, delegate)
|
||||
}
|
||||
}
|
||||
|
||||
private[sbt] abstract class Fixed extends TokenCompletions { outer =>
|
||||
def completions(seen: String, level: Int): Completions
|
||||
final def hideWhen(hide: Int => Boolean): TokenCompletions = new Fixed {
|
||||
|
|
@ -22,17 +24,23 @@ object TokenCompletions {
|
|||
}
|
||||
}
|
||||
|
||||
val default: TokenCompletions = mapDelegateCompletions((seen, level, c) => ctoken(seen, c.append))
|
||||
val default: TokenCompletions = mapDelegateCompletions(
|
||||
(seen, level, c) => ctoken(seen, c.append))
|
||||
|
||||
def displayOnly(msg: String): TokenCompletions = new Fixed {
|
||||
def completions(seen: String, level: Int) = Completions.single(Completion.displayOnly(msg))
|
||||
}
|
||||
def overrideDisplay(msg: String): TokenCompletions = mapDelegateCompletions((seen, level, c) => tokenDisplay(display = msg, append = c.append))
|
||||
|
||||
def overrideDisplay(msg: String): TokenCompletions =
|
||||
mapDelegateCompletions((seen, level, c) => tokenDisplay(display = msg, append = c.append))
|
||||
|
||||
def fixed(f: (String, Int) => Completions): TokenCompletions = new Fixed {
|
||||
def completions(seen: String, level: Int) = f(seen, level)
|
||||
}
|
||||
def mapDelegateCompletions(f: (String, Int, Completion) => Completion): TokenCompletions = new Delegating {
|
||||
def completions(seen: String, level: Int, delegate: Completions) = Completions(delegate.get.map(c => f(seen, level, c)))
|
||||
}
|
||||
|
||||
def mapDelegateCompletions(f: (String, Int, Completion) => Completion): TokenCompletions =
|
||||
new Delegating {
|
||||
def completions(seen: String, level: Int, delegate: Completions) =
|
||||
Completions(delegate.get.map(c => f(seen, level, c)))
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -20,6 +20,7 @@ private[sbt] final class TypeString(val base: String, val args: List[TypeString]
|
|||
}
|
||||
|
||||
private[sbt] object TypeString {
|
||||
|
||||
/** Makes the string representation of a type as returned by Manifest.toString more readable.*/
|
||||
def cleanup(typeString: String): String =
|
||||
parse(typeString, typeStringParser) match {
|
||||
|
|
@ -59,6 +60,7 @@ private[sbt] object TypeString {
|
|||
final val JavaPrefix = "java.lang."
|
||||
/* scala.collection.X -> X */
|
||||
val ShortenCollection = Set("Seq", "List", "Set", "Map", "Iterable")
|
||||
|
||||
val TypeMap = Map(
|
||||
"java.io.File" -> "File",
|
||||
"java.net.URL" -> "URL",
|
||||
|
|
@ -69,12 +71,13 @@ private[sbt] object TypeString {
|
|||
* A Parser that extracts basic structure from the string representation of a type from Manifest.toString.
|
||||
* This is rudimentary and essentially only decomposes the string into names and arguments for parameterized types.
|
||||
*/
|
||||
lazy val typeStringParser: Parser[TypeString] =
|
||||
{
|
||||
def isFullScalaIDChar(c: Char) = isScalaIDChar(c) || c == '.' || c == '$'
|
||||
lazy val fullScalaID = identifier(IDStart, charClass(isFullScalaIDChar, "Scala identifier character"))
|
||||
lazy val tpe: Parser[TypeString] =
|
||||
for (id <- fullScalaID; args <- ('[' ~> rep1sep(tpe, ',') <~ ']').?) yield new TypeString(id, args.toList.flatten)
|
||||
tpe
|
||||
}
|
||||
}
|
||||
lazy val typeStringParser: Parser[TypeString] = {
|
||||
def isFullScalaIDChar(c: Char) = isScalaIDChar(c) || c == '.' || c == '$'
|
||||
lazy val fullScalaID =
|
||||
identifier(IDStart, charClass(isFullScalaIDChar, "Scala identifier character"))
|
||||
lazy val tpe: Parser[TypeString] =
|
||||
for (id <- fullScalaID; args <- ('[' ~> rep1sep(tpe, ',') <~ ']').?)
|
||||
yield new TypeString(id, args.toList.flatten)
|
||||
tpe
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -5,30 +5,41 @@ package sbt.internal.util
|
|||
package complete
|
||||
|
||||
sealed trait UpperBound {
|
||||
|
||||
/** True if and only if the given value meets this bound.*/
|
||||
def >=(min: Int): Boolean
|
||||
|
||||
/** True if and only if this bound is one.*/
|
||||
def isOne: Boolean
|
||||
|
||||
/** True if and only if this bound is zero.*/
|
||||
def isZero: Boolean
|
||||
|
||||
/**
|
||||
* If this bound is zero or Infinite, `decrement` returns this bound.
|
||||
* Otherwise, this bound is finite and greater than zero and `decrement` returns the bound that is one less than this bound.
|
||||
*/
|
||||
def decrement: UpperBound
|
||||
|
||||
/** True if and only if this is unbounded.*/
|
||||
def isInfinite: Boolean
|
||||
|
||||
}
|
||||
|
||||
/** Represents unbounded. */
|
||||
case object Infinite extends UpperBound {
|
||||
|
||||
/** All finite numbers meet this bound. */
|
||||
def >=(min: Int) = true
|
||||
|
||||
def isOne = false
|
||||
def isZero = false
|
||||
def decrement = this
|
||||
def isInfinite = true
|
||||
|
||||
override def toString = "Infinity"
|
||||
}
|
||||
|
||||
/**
|
||||
* Represents a finite upper bound. The maximum allowed value is 'value', inclusive.
|
||||
* It must positive.
|
||||
|
|
@ -43,6 +54,7 @@ final case class Finite(value: Int) extends UpperBound {
|
|||
def isInfinite = false
|
||||
override def toString = value.toString
|
||||
}
|
||||
|
||||
object UpperBound {
|
||||
implicit def intToFinite(i: Int): Finite = Finite(i)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -56,27 +56,29 @@ object ParserTest extends Properties("Completing Parser") {
|
|||
def checkOne(in: String, parser: Parser[_], expect: Completion): Prop =
|
||||
completions(parser, in, 1) == Completions.single(expect)
|
||||
|
||||
def checkAll(in: String, parser: Parser[_], expect: Completions): Prop =
|
||||
{
|
||||
val cs = completions(parser, in, 1)
|
||||
("completions: " + cs) |: ("Expected: " + expect) |: (cs == expect: Prop)
|
||||
}
|
||||
def checkAll(in: String, parser: Parser[_], expect: Completions): Prop = {
|
||||
val cs = completions(parser, in, 1)
|
||||
("completions: " + cs) |: ("Expected: " + expect) |: (cs == expect: Prop)
|
||||
}
|
||||
|
||||
def checkInvalid(in: String) =
|
||||
(("token '" + in + "'") |: checkInv(in, nested)) &&
|
||||
(("display '" + in + "'") |: checkInv(in, nestedDisplay))
|
||||
|
||||
def checkInv(in: String, parser: Parser[_]): Prop =
|
||||
{
|
||||
val cs = completions(parser, in, 1)
|
||||
("completions: " + cs) |: (cs == Completions.nil: Prop)
|
||||
}
|
||||
def checkInv(in: String, parser: Parser[_]): Prop = {
|
||||
val cs = completions(parser, in, 1)
|
||||
("completions: " + cs) |: (cs == Completions.nil: Prop)
|
||||
}
|
||||
|
||||
property("nested tokens a") = checkSingle("", Completion.token("", "a1"))(Completion.displayOnly("<a1>"))
|
||||
property("nested tokens a1") = checkSingle("a", Completion.token("a", "1"))(Completion.displayOnly("<a1>"))
|
||||
property("nested tokens a") =
|
||||
checkSingle("", Completion.token("", "a1"))(Completion.displayOnly("<a1>"))
|
||||
property("nested tokens a1") =
|
||||
checkSingle("a", Completion.token("a", "1"))(Completion.displayOnly("<a1>"))
|
||||
property("nested tokens a inv") = checkInvalid("b")
|
||||
property("nested tokens b") = checkSingle("a1", Completion.token("", "b2"))(Completion.displayOnly("<b2>"))
|
||||
property("nested tokens b2") = checkSingle("a1b", Completion.token("b", "2"))(Completion.displayOnly("<b2>"))
|
||||
property("nested tokens b") =
|
||||
checkSingle("a1", Completion.token("", "b2"))(Completion.displayOnly("<b2>"))
|
||||
property("nested tokens b2") =
|
||||
checkSingle("a1b", Completion.token("b", "2"))(Completion.displayOnly("<b2>"))
|
||||
property("nested tokens b inv") = checkInvalid("a1a")
|
||||
property("nested tokens c") = checkSingle("a1b2", Completion.suggestion("c3"))()
|
||||
property("nested tokens c3") = checkSingle("a1b2c", Completion.suggestion("3"))()
|
||||
|
|
@ -86,14 +88,16 @@ object ParserTest extends Properties("Completing Parser") {
|
|||
property("suggest port") = checkOne(" ", spacePort, Completion.displayOnly("<port>"))
|
||||
property("no suggest at end") = checkOne("asdf", "asdf", Completion.suggestion(""))
|
||||
property("no suggest at token end") = checkOne("asdf", token("asdf"), Completion.suggestion(""))
|
||||
property("empty suggest for examples") = checkOne("asdf", any.+.examples("asdf", "qwer"), Completion.suggestion(""))
|
||||
property("empty suggest for examples token") = checkOne("asdf", token(any.+.examples("asdf", "qwer")), Completion.suggestion(""))
|
||||
property("empty suggest for examples") =
|
||||
checkOne("asdf", any.+.examples("asdf", "qwer"), Completion.suggestion(""))
|
||||
property("empty suggest for examples token") =
|
||||
checkOne("asdf", token(any.+.examples("asdf", "qwer")), Completion.suggestion(""))
|
||||
|
||||
val colors = Set("blue", "green", "red")
|
||||
val base = (seen: Seq[String]) => token(ID examples (colors -- seen))
|
||||
val sep = token(Space)
|
||||
val repeat = repeatDep(base, sep)
|
||||
def completionStrings(ss: Set[String]): Completions = Completions(ss.map { s => Completion.token("", s) })
|
||||
def completionStrings(ss: Set[String]) = Completions(ss map (Completion.token("", _)))
|
||||
|
||||
property("repeatDep no suggestions for bad input") = checkInv(".", repeat)
|
||||
property("repeatDep suggest all") = checkAll("", repeat, completionStrings(colors))
|
||||
|
|
@ -116,7 +120,9 @@ object ParserExample {
|
|||
val name = token("test")
|
||||
val options = (ws ~> token("quick" | "failed" | "new")).*
|
||||
val exampleSet = Set("am", "is", "are", "was", "were")
|
||||
val include = (ws ~> token(examples(notws.string, new FixedSetExamples(exampleSet), exampleSet.size, false))).*
|
||||
val include = (ws ~> token(
|
||||
examples(notws.string, new FixedSetExamples(exampleSet), exampleSet.size, false)
|
||||
)).*
|
||||
|
||||
val t = name ~ options ~ include
|
||||
|
||||
|
|
|
|||
|
|
@ -8,31 +8,31 @@ class FileExamplesTest extends UnitSpec {
|
|||
|
||||
"listing all files in an absolute base directory" should
|
||||
"produce the entire base directory's contents" in {
|
||||
val _ = new DirectoryStructure {
|
||||
fileExamples().toList should contain theSameElementsAs (allRelativizedPaths)
|
||||
}
|
||||
val _ = new DirectoryStructure {
|
||||
fileExamples().toList should contain theSameElementsAs (allRelativizedPaths)
|
||||
}
|
||||
}
|
||||
|
||||
"listing files with a prefix that matches none" should
|
||||
"produce an empty list" in {
|
||||
val _ = new DirectoryStructure(withCompletionPrefix = "z") {
|
||||
fileExamples().toList shouldBe empty
|
||||
}
|
||||
val _ = new DirectoryStructure(withCompletionPrefix = "z") {
|
||||
fileExamples().toList shouldBe empty
|
||||
}
|
||||
}
|
||||
|
||||
"listing single-character prefixed files" should
|
||||
"produce matching paths only" in {
|
||||
val _ = new DirectoryStructure(withCompletionPrefix = "f") {
|
||||
fileExamples().toList should contain theSameElementsAs (prefixedPathsOnly)
|
||||
}
|
||||
val _ = new DirectoryStructure(withCompletionPrefix = "f") {
|
||||
fileExamples().toList should contain theSameElementsAs (prefixedPathsOnly)
|
||||
}
|
||||
}
|
||||
|
||||
"listing directory-prefixed files" should
|
||||
"produce matching paths only" in {
|
||||
val _ = new DirectoryStructure(withCompletionPrefix = "far") {
|
||||
fileExamples().toList should contain theSameElementsAs (prefixedPathsOnly)
|
||||
}
|
||||
val _ = new DirectoryStructure(withCompletionPrefix = "far") {
|
||||
fileExamples().toList should contain theSameElementsAs (prefixedPathsOnly)
|
||||
}
|
||||
}
|
||||
|
||||
it should "produce sub-dir contents only when appending a file separator to the directory" in {
|
||||
val _ = new DirectoryStructure(withCompletionPrefix = "far" + File.separator) {
|
||||
|
|
@ -42,17 +42,17 @@ class FileExamplesTest extends UnitSpec {
|
|||
|
||||
"listing files with a sub-path prefix" should
|
||||
"produce matching paths only" in {
|
||||
val _ = new DirectoryStructure(withCompletionPrefix = "far" + File.separator + "ba") {
|
||||
fileExamples().toList should contain theSameElementsAs (prefixedPathsOnly)
|
||||
}
|
||||
val _ = new DirectoryStructure(withCompletionPrefix = "far" + File.separator + "ba") {
|
||||
fileExamples().toList should contain theSameElementsAs (prefixedPathsOnly)
|
||||
}
|
||||
}
|
||||
|
||||
"completing a full path" should
|
||||
"produce a list with an empty string" in {
|
||||
val _ = new DirectoryStructure(withCompletionPrefix = "bazaar") {
|
||||
fileExamples().toList shouldEqual List("")
|
||||
}
|
||||
val _ = new DirectoryStructure(withCompletionPrefix = "bazaar") {
|
||||
fileExamples().toList shouldEqual List("")
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: Remove DelayedInit - https://github.com/scala/scala/releases/tag/v2.11.0-RC1
|
||||
class DirectoryStructure(withCompletionPrefix: String = "") extends DelayedInit {
|
||||
|
|
@ -64,17 +64,19 @@ class FileExamplesTest extends UnitSpec {
|
|||
var nestedDirectories: List[File] = _
|
||||
|
||||
def allRelativizedPaths: List[String] =
|
||||
(childFiles ++ childDirectories ++ nestedFiles ++ nestedDirectories).map(relativize(baseDir, _).get)
|
||||
(childFiles ++ childDirectories ++ nestedFiles ++ nestedDirectories)
|
||||
.map(relativize(baseDir, _).get)
|
||||
|
||||
def prefixedPathsOnly: List[String] =
|
||||
allRelativizedPaths.filter(_ startsWith withCompletionPrefix).map(_ substring withCompletionPrefix.length)
|
||||
allRelativizedPaths
|
||||
.filter(_ startsWith withCompletionPrefix)
|
||||
.map(_ substring withCompletionPrefix.length)
|
||||
|
||||
override def delayedInit(testBody: => Unit): Unit = {
|
||||
withTemporaryDirectory {
|
||||
tempDir =>
|
||||
createSampleDirStructure(tempDir)
|
||||
fileExamples = new FileExamples(baseDir, withCompletionPrefix)
|
||||
testBody
|
||||
withTemporaryDirectory { tempDir =>
|
||||
createSampleDirStructure(tempDir)
|
||||
fileExamples = new FileExamples(baseDir, withCompletionPrefix)
|
||||
testBody
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -90,7 +92,8 @@ class FileExamplesTest extends UnitSpec {
|
|||
baseDir = tempDir
|
||||
}
|
||||
|
||||
private def toChildFiles(baseDir: File, files: List[String]): List[File] = files.map(new File(baseDir, _))
|
||||
private def toChildFiles(baseDir: File, files: List[String]): List[File] =
|
||||
files.map(new File(baseDir, _))
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -5,7 +5,8 @@ class FixedSetExamplesTest extends UnitSpec {
|
|||
|
||||
"adding a prefix" should "produce a smaller set of examples with the prefix removed" in {
|
||||
val _ = new Examples {
|
||||
fixedSetExamples.withAddedPrefix("f")() should contain theSameElementsAs (List("oo", "ool", "u"))
|
||||
fixedSetExamples.withAddedPrefix("f")() should contain theSameElementsAs
|
||||
(List("oo", "ool", "u"))
|
||||
fixedSetExamples.withAddedPrefix("fo")() should contain theSameElementsAs (List("o", "ol"))
|
||||
fixedSetExamples.withAddedPrefix("b")() should contain theSameElementsAs (List("ar"))
|
||||
}
|
||||
|
|
|
|||
|
|
@ -7,62 +7,70 @@ class ParserWithExamplesTest extends UnitSpec {
|
|||
|
||||
"listing a limited number of completions" should
|
||||
"grab only the needed number of elements from the iterable source of examples" in {
|
||||
val _ = new ParserWithLazyExamples {
|
||||
parserWithExamples.completions(0)
|
||||
examples.size shouldEqual maxNumberOfExamples
|
||||
}
|
||||
val _ = new ParserWithLazyExamples {
|
||||
parserWithExamples.completions(0)
|
||||
examples.size shouldEqual maxNumberOfExamples
|
||||
}
|
||||
}
|
||||
|
||||
"listing only valid completions" should
|
||||
"use the delegate parser to remove invalid examples" in {
|
||||
val _ = new ParserWithValidExamples {
|
||||
val validCompletions = Completions(Set(
|
||||
val _ = new ParserWithValidExamples {
|
||||
val validCompletions = Completions(
|
||||
Set(
|
||||
suggestion("blue"),
|
||||
suggestion("red")
|
||||
))
|
||||
parserWithExamples.completions(0) shouldEqual validCompletions
|
||||
}
|
||||
parserWithExamples.completions(0) shouldEqual validCompletions
|
||||
}
|
||||
}
|
||||
|
||||
"listing valid completions in a derived parser" should
|
||||
"produce only valid examples that start with the character of the derivation" in {
|
||||
val _ = new ParserWithValidExamples {
|
||||
val derivedCompletions = Completions(Set(
|
||||
val _ = new ParserWithValidExamples {
|
||||
val derivedCompletions = Completions(
|
||||
Set(
|
||||
suggestion("lue")
|
||||
))
|
||||
parserWithExamples.derive('b').completions(0) shouldEqual derivedCompletions
|
||||
}
|
||||
parserWithExamples.derive('b').completions(0) shouldEqual derivedCompletions
|
||||
}
|
||||
}
|
||||
|
||||
"listing valid and invalid completions" should
|
||||
"produce the entire source of examples" in {
|
||||
val _ = new parserWithAllExamples {
|
||||
val completions = Completions(examples.map(suggestion(_)).toSet)
|
||||
parserWithExamples.completions(0) shouldEqual completions
|
||||
}
|
||||
val _ = new parserWithAllExamples {
|
||||
val completions = Completions(examples.map(suggestion(_)).toSet)
|
||||
parserWithExamples.completions(0) shouldEqual completions
|
||||
}
|
||||
}
|
||||
|
||||
"listing valid and invalid completions in a derived parser" should
|
||||
"produce only examples that start with the character of the derivation" in {
|
||||
val _ = new parserWithAllExamples {
|
||||
val derivedCompletions = Completions(Set(
|
||||
val _ = new parserWithAllExamples {
|
||||
val derivedCompletions = Completions(
|
||||
Set(
|
||||
suggestion("lue"),
|
||||
suggestion("lock")
|
||||
))
|
||||
parserWithExamples.derive('b').completions(0) shouldEqual derivedCompletions
|
||||
}
|
||||
parserWithExamples.derive('b').completions(0) shouldEqual derivedCompletions
|
||||
}
|
||||
}
|
||||
|
||||
class ParserWithLazyExamples extends ParserExample(GrowableSourceOfExamples(), maxNumberOfExamples = 5, removeInvalidExamples = false)
|
||||
class ParserWithLazyExamples
|
||||
extends ParserExample(
|
||||
GrowableSourceOfExamples(),
|
||||
maxNumberOfExamples = 5,
|
||||
removeInvalidExamples = false
|
||||
)
|
||||
|
||||
class ParserWithValidExamples extends ParserExample(removeInvalidExamples = true)
|
||||
|
||||
class parserWithAllExamples extends ParserExample(removeInvalidExamples = false)
|
||||
|
||||
case class ParserExample(
|
||||
examples: Iterable[String] = Set("blue", "yellow", "greeen", "block", "red"),
|
||||
maxNumberOfExamples: Int = 25,
|
||||
removeInvalidExamples: Boolean
|
||||
examples: Iterable[String] = Set("blue", "yellow", "greeen", "block", "red"),
|
||||
maxNumberOfExamples: Int = 25,
|
||||
removeInvalidExamples: Boolean
|
||||
) {
|
||||
|
||||
import DefaultParsers._
|
||||
|
|
|
|||
|
|
@ -5,7 +5,8 @@ import scala.annotation.tailrec
|
|||
import Formula.{ And, True }
|
||||
|
||||
/*
|
||||
Defines a propositional logic with negation as failure and only allows stratified rule sets (negation must be acyclic) in order to have a unique minimal model.
|
||||
Defines a propositional logic with negation as failure and only allows stratified rule sets
|
||||
(negation must be acyclic) in order to have a unique minimal model.
|
||||
|
||||
For example, this is not allowed:
|
||||
+ p :- not q
|
||||
|
|
@ -24,7 +25,7 @@ as is this:
|
|||
+ https://en.wikipedia.org/wiki/Propositional_logic
|
||||
+ https://en.wikipedia.org/wiki/Stable_model_semantics
|
||||
+ http://www.w3.org/2005/rules/wg/wiki/negation
|
||||
*/
|
||||
*/
|
||||
|
||||
/** Disjunction (or) of the list of clauses. */
|
||||
final case class Clauses(clauses: List[Clause]) {
|
||||
|
|
@ -36,16 +37,21 @@ final case class Clause(body: Formula, head: Set[Atom])
|
|||
|
||||
/** A literal is an [[Atom]] or its [[negation|Negated]]. */
|
||||
sealed abstract class Literal extends Formula {
|
||||
|
||||
/** The underlying (positive) atom. */
|
||||
def atom: Atom
|
||||
|
||||
/** Negates this literal.*/
|
||||
def unary_! : Literal
|
||||
|
||||
}
|
||||
|
||||
/** A variable with name `label`. */
|
||||
final case class Atom(label: String) extends Literal {
|
||||
def atom = this
|
||||
def unary_! : Negated = Negated(this)
|
||||
}
|
||||
|
||||
/**
|
||||
* A negated atom, in the sense of negation as failure, not logical negation.
|
||||
* That is, it is true if `atom` is not known/defined.
|
||||
|
|
@ -60,6 +66,7 @@ final case class Negated(atom: Atom) extends Literal {
|
|||
* This is less convenient when defining clauses, but is not less powerful.)
|
||||
*/
|
||||
sealed abstract class Formula {
|
||||
|
||||
/** Constructs a clause that proves `atoms` when this formula is true. */
|
||||
def proves(atom: Atom, atoms: Atom*): Clause = Clause(this, (atom +: atoms).toSet)
|
||||
|
||||
|
|
@ -72,40 +79,46 @@ sealed abstract class Formula {
|
|||
case (a: Literal, And(bs)) => And(bs + a)
|
||||
case (a: Literal, b: Literal) => And(Set(a, b))
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
object Formula {
|
||||
|
||||
/** A conjunction of literals. */
|
||||
final case class And(literals: Set[Literal]) extends Formula {
|
||||
assert(literals.nonEmpty, "'And' requires at least one literal.")
|
||||
}
|
||||
|
||||
final case object True extends Formula
|
||||
|
||||
}
|
||||
|
||||
object Logic {
|
||||
def reduceAll(clauses: List[Clause], initialFacts: Set[Literal]): Either[LogicException, Matched] =
|
||||
def reduceAll(
|
||||
clauses: List[Clause],
|
||||
initialFacts: Set[Literal]
|
||||
): Either[LogicException, Matched] =
|
||||
reduce(Clauses(clauses), initialFacts)
|
||||
|
||||
/**
|
||||
* Computes the variables in the unique stable model for the program represented by `clauses` and `initialFacts`.
|
||||
* `clause` may not have any negative feedback (that is, negation is acyclic)
|
||||
* Computes the variables in the unique stable model for the program represented by `clauses` and
|
||||
* `initialFacts`. `clause` may not have any negative feedback (that is, negation is acyclic)
|
||||
* and `initialFacts` cannot be in the head of any clauses in `clause`.
|
||||
* These restrictions ensure that the logic program has a unique minimal model.
|
||||
*/
|
||||
def reduce(clauses: Clauses, initialFacts: Set[Literal]): Either[LogicException, Matched] =
|
||||
{
|
||||
val (posSeq, negSeq) = separate(initialFacts.toSeq)
|
||||
val (pos, neg) = (posSeq.toSet, negSeq.toSet)
|
||||
def reduce(clauses: Clauses, initialFacts: Set[Literal]): Either[LogicException, Matched] = {
|
||||
val (posSeq, negSeq) = separate(initialFacts.toSeq)
|
||||
val (pos, neg) = (posSeq.toSet, negSeq.toSet)
|
||||
|
||||
val problem =
|
||||
checkContradictions(pos, neg) orElse
|
||||
checkOverlap(clauses, pos) orElse
|
||||
checkAcyclic(clauses)
|
||||
val problem =
|
||||
checkContradictions(pos, neg) orElse
|
||||
checkOverlap(clauses, pos) orElse
|
||||
checkAcyclic(clauses)
|
||||
|
||||
problem.toLeft(
|
||||
reduce0(clauses, initialFacts, Matched.empty)
|
||||
)
|
||||
}
|
||||
problem.toLeft(
|
||||
reduce0(clauses, initialFacts, Matched.empty)
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Verifies `initialFacts` are not in the head of any `clauses`.
|
||||
|
|
@ -113,13 +126,19 @@ object Logic {
|
|||
* This isn't necessarily a problem, but the main sbt use cases expects
|
||||
* a proven atom to have at least one clause satisfied.
|
||||
*/
|
||||
private[this] def checkOverlap(clauses: Clauses, initialFacts: Set[Atom]): Option[InitialOverlap] = {
|
||||
private[this] def checkOverlap(
|
||||
clauses: Clauses,
|
||||
initialFacts: Set[Atom]
|
||||
): Option[InitialOverlap] = {
|
||||
val as = atoms(clauses)
|
||||
val initialOverlap = initialFacts.filter(as.inHead)
|
||||
if (initialOverlap.nonEmpty) Some(new InitialOverlap(initialOverlap)) else None
|
||||
}
|
||||
|
||||
private[this] def checkContradictions(pos: Set[Atom], neg: Set[Atom]): Option[InitialContradictions] = {
|
||||
private[this] def checkContradictions(
|
||||
pos: Set[Atom],
|
||||
neg: Set[Atom]
|
||||
): Option[InitialContradictions] = {
|
||||
val contradictions = pos intersect neg
|
||||
if (contradictions.nonEmpty) Some(new InitialContradictions(contradictions)) else None
|
||||
}
|
||||
|
|
@ -129,14 +148,17 @@ object Logic {
|
|||
val cycle = Dag.findNegativeCycle(graph(deps))
|
||||
if (cycle.nonEmpty) Some(new CyclicNegation(cycle)) else None
|
||||
}
|
||||
|
||||
private[this] def graph(deps: Map[Atom, Set[Literal]]) = new Dag.DirectedSignedGraph[Atom] {
|
||||
type Arrow = Literal
|
||||
def nodes = deps.keys.toList
|
||||
def dependencies(a: Atom) = deps.getOrElse(a, Set.empty).toList
|
||||
|
||||
def isNegative(b: Literal) = b match {
|
||||
case Negated(_) => true
|
||||
case Atom(_) => false
|
||||
}
|
||||
|
||||
def head(b: Literal) = b.atom
|
||||
}
|
||||
|
||||
|
|
@ -144,52 +166,72 @@ object Logic {
|
|||
(Map.empty[Atom, Set[Literal]] /: clauses.clauses) {
|
||||
case (m, Clause(formula, heads)) =>
|
||||
val deps = literals(formula)
|
||||
(m /: heads) { (n, head) => n.updated(head, n.getOrElse(head, Set.empty) ++ deps) }
|
||||
(m /: heads) { (n, head) =>
|
||||
n.updated(head, n.getOrElse(head, Set.empty) ++ deps)
|
||||
}
|
||||
}
|
||||
|
||||
sealed abstract class LogicException(override val toString: String)
|
||||
final class InitialContradictions(val literals: Set[Atom]) extends LogicException("Initial facts cannot be both true and false:\n\t" + literals.mkString("\n\t"))
|
||||
final class InitialOverlap(val literals: Set[Atom]) extends LogicException("Initial positive facts cannot be implied by any clauses:\n\t" + literals.mkString("\n\t"))
|
||||
final class CyclicNegation(val cycle: List[Literal]) extends LogicException("Negation may not be involved in a cycle:\n\t" + cycle.mkString("\n\t"))
|
||||
|
||||
final class InitialContradictions(val literals: Set[Atom])
|
||||
extends LogicException(
|
||||
"Initial facts cannot be both true and false:\n\t" + literals.mkString("\n\t")
|
||||
)
|
||||
|
||||
final class InitialOverlap(val literals: Set[Atom])
|
||||
extends LogicException(
|
||||
"Initial positive facts cannot be implied by any clauses:\n\t" + literals.mkString("\n\t")
|
||||
)
|
||||
|
||||
final class CyclicNegation(val cycle: List[Literal])
|
||||
extends LogicException(
|
||||
"Negation may not be involved in a cycle:\n\t" + cycle.mkString("\n\t")
|
||||
)
|
||||
|
||||
/** Tracks proven atoms in the reverse order they were proved. */
|
||||
final class Matched private (val provenSet: Set[Atom], reverseOrdered: List[Atom]) {
|
||||
def add(atoms: Set[Atom]): Matched = add(atoms.toList)
|
||||
|
||||
def add(atoms: List[Atom]): Matched = {
|
||||
val newOnly = atoms.filterNot(provenSet)
|
||||
new Matched(provenSet ++ newOnly, newOnly ::: reverseOrdered)
|
||||
}
|
||||
|
||||
def ordered: List[Atom] = reverseOrdered.reverse
|
||||
override def toString = ordered.map(_.label).mkString("Matched(", ",", ")")
|
||||
}
|
||||
|
||||
object Matched {
|
||||
val empty = new Matched(Set.empty, Nil)
|
||||
}
|
||||
|
||||
/** Separates a sequence of literals into `(pos, neg)` atom sequences. */
|
||||
private[this] def separate(lits: Seq[Literal]): (Seq[Atom], Seq[Atom]) = Util.separate(lits) {
|
||||
case a: Atom => Left(a)
|
||||
case Negated(n) => Right(n)
|
||||
}
|
||||
private[this] def separate(lits: Seq[Literal]): (Seq[Atom], Seq[Atom]) =
|
||||
Util.separate(lits) {
|
||||
case a: Atom => Left(a)
|
||||
case Negated(n) => Right(n)
|
||||
}
|
||||
|
||||
/**
|
||||
* Finds clauses that have no body and thus prove their head.
|
||||
* Returns `(<proven atoms>, <remaining unproven clauses>)`.
|
||||
*/
|
||||
private[this] def findProven(c: Clauses): (Set[Atom], List[Clause]) =
|
||||
{
|
||||
val (proven, unproven) = c.clauses.partition(_.body == True)
|
||||
(proven.flatMap(_.head).toSet, unproven)
|
||||
}
|
||||
private[this] def findProven(c: Clauses): (Set[Atom], List[Clause]) = {
|
||||
val (proven, unproven) = c.clauses.partition(_.body == True)
|
||||
(proven.flatMap(_.head).toSet, unproven)
|
||||
}
|
||||
|
||||
private[this] def keepPositive(lits: Set[Literal]): Set[Atom] =
|
||||
lits.collect { case a: Atom => a }.toSet
|
||||
|
||||
// precondition: factsToProcess contains no contradictions
|
||||
@tailrec
|
||||
private[this] def reduce0(clauses: Clauses, factsToProcess: Set[Literal], state: Matched): Matched =
|
||||
@tailrec private[this] def reduce0(
|
||||
clauses: Clauses,
|
||||
factsToProcess: Set[Literal],
|
||||
state: Matched
|
||||
): Matched =
|
||||
applyAll(clauses, factsToProcess) match {
|
||||
case None => // all of the remaining clauses failed on the new facts
|
||||
state
|
||||
case None => state // all of the remaining clauses failed on the new facts
|
||||
case Some(applied) =>
|
||||
val (proven, unprovenClauses) = findProven(applied)
|
||||
val processedFacts = state add keepPositive(factsToProcess)
|
||||
|
|
@ -199,7 +241,8 @@ object Logic {
|
|||
newState // no remaining clauses, done.
|
||||
else {
|
||||
val unproven = Clauses(unprovenClauses)
|
||||
val nextFacts: Set[Literal] = if (newlyProven.nonEmpty) newlyProven.toSet else inferFailure(unproven)
|
||||
val nextFacts: Set[Literal] =
|
||||
if (newlyProven.nonEmpty) newlyProven.toSet else inferFailure(unproven)
|
||||
reduce0(unproven, nextFacts, newState)
|
||||
}
|
||||
}
|
||||
|
|
@ -208,26 +251,30 @@ object Logic {
|
|||
* Finds negated atoms under the negation as failure rule and returns them.
|
||||
* This should be called only after there are no more known atoms to be substituted.
|
||||
*/
|
||||
private[this] def inferFailure(clauses: Clauses): Set[Literal] =
|
||||
{
|
||||
/* At this point, there is at least one clause and one of the following is the case as the result of the acyclic negation rule:
|
||||
i. there is at least one variable that occurs in a clause body but not in the head of a clause
|
||||
ii. there is at least one variable that occurs in the head of a clause and does not transitively depend on a negated variable
|
||||
In either case, each such variable x cannot be proven true and therefore proves 'not x' (negation as failure, !x in the code).
|
||||
*/
|
||||
val allAtoms = atoms(clauses)
|
||||
val newFacts: Set[Literal] = negated(allAtoms.triviallyFalse)
|
||||
if (newFacts.nonEmpty)
|
||||
newFacts
|
||||
else {
|
||||
val possiblyTrue = hasNegatedDependency(clauses.clauses, Relation.empty, Relation.empty)
|
||||
val newlyFalse: Set[Literal] = negated(allAtoms.inHead -- possiblyTrue)
|
||||
if (newlyFalse.nonEmpty)
|
||||
newlyFalse
|
||||
else // should never happen due to the acyclic negation rule
|
||||
sys.error(s"No progress:\n\tclauses: $clauses\n\tpossibly true: $possiblyTrue")
|
||||
}
|
||||
private[this] def inferFailure(clauses: Clauses): Set[Literal] = {
|
||||
/* At this point, there is at least one clause and one of the following is the case as the
|
||||
result of the acyclic negation rule:
|
||||
i. there is at least one variable that occurs in a clause body but not in the head of a
|
||||
clause
|
||||
ii. there is at least one variable that occurs in the head of a clause and does not
|
||||
transitively depend on a negated variable
|
||||
|
||||
In either case, each such variable x cannot be proven true and therefore proves 'not x'
|
||||
(negation as failure, !x in the code).
|
||||
*/
|
||||
val allAtoms = atoms(clauses)
|
||||
val newFacts: Set[Literal] = negated(allAtoms.triviallyFalse)
|
||||
if (newFacts.nonEmpty)
|
||||
newFacts
|
||||
else {
|
||||
val possiblyTrue = hasNegatedDependency(clauses.clauses, Relation.empty, Relation.empty)
|
||||
val newlyFalse: Set[Literal] = negated(allAtoms.inHead -- possiblyTrue)
|
||||
if (newlyFalse.nonEmpty)
|
||||
newlyFalse
|
||||
else // should never happen due to the acyclic negation rule
|
||||
sys.error(s"No progress:\n\tclauses: $clauses\n\tpossibly true: $possiblyTrue")
|
||||
}
|
||||
}
|
||||
|
||||
private[this] def negated(atoms: Set[Atom]): Set[Literal] = atoms.map(a => Negated(a))
|
||||
|
||||
|
|
@ -238,11 +285,16 @@ object Logic {
|
|||
* d :- a
|
||||
*/
|
||||
@tailrec
|
||||
def hasNegatedDependency(clauses: Seq[Clause], posDeps: Relation[Atom, Atom], negDeps: Relation[Atom, Atom]): List[Atom] =
|
||||
def hasNegatedDependency(
|
||||
clauses: Seq[Clause],
|
||||
posDeps: Relation[Atom, Atom],
|
||||
negDeps: Relation[Atom, Atom]
|
||||
): List[Atom] =
|
||||
clauses match {
|
||||
case Seq() =>
|
||||
// because cycles between positive literals are allowed, this isn't strictly a topological sort
|
||||
Dag.topologicalSortUnchecked(negDeps._1s)(posDeps.reverse)
|
||||
|
||||
case Clause(formula, head) +: tail =>
|
||||
// collect direct positive and negative literals and track them in separate graphs
|
||||
val (pos, neg) = directDeps(formula)
|
||||
|
|
@ -259,6 +311,7 @@ object Logic {
|
|||
case Negated(a) => Right(a)
|
||||
case a: Atom => Left(a)
|
||||
}
|
||||
|
||||
private[this] def literals(formula: Formula): Set[Literal] = formula match {
|
||||
case And(lits) => lits
|
||||
case l: Literal => Set(l)
|
||||
|
|
@ -278,10 +331,13 @@ object Logic {
|
|||
|
||||
/** Represents the set of atoms in the heads of clauses and in the bodies (formulas) of clauses. */
|
||||
final case class Atoms(inHead: Set[Atom], inFormula: Set[Atom]) {
|
||||
|
||||
/** Concatenates this with `as`. */
|
||||
def ++(as: Atoms): Atoms = Atoms(inHead ++ as.inHead, inFormula ++ as.inFormula)
|
||||
|
||||
/** Atoms that cannot be true because they do not occur in a head. */
|
||||
def triviallyFalse: Set[Atom] = inFormula -- inHead
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -297,29 +353,26 @@ object Logic {
|
|||
* Postcondition: no atom in `facts` is present in the result
|
||||
* Postcondition: No clauses have an empty head
|
||||
*/
|
||||
def applyAll(cs: Clauses, facts: Set[Literal]): Option[Clauses] =
|
||||
{
|
||||
val newClauses =
|
||||
if (facts.isEmpty)
|
||||
cs.clauses.filter(_.head.nonEmpty) // still need to drop clauses with an empty head
|
||||
else
|
||||
cs.clauses.map(c => applyAll(c, facts)).flatMap(_.toList)
|
||||
if (newClauses.isEmpty) None else Some(Clauses(newClauses))
|
||||
}
|
||||
|
||||
def applyAll(c: Clause, facts: Set[Literal]): Option[Clause] =
|
||||
{
|
||||
val atoms = facts.map(_.atom)
|
||||
val newHead = c.head -- atoms // 3.
|
||||
if (newHead.isEmpty) // 4. empty head
|
||||
None
|
||||
def applyAll(cs: Clauses, facts: Set[Literal]): Option[Clauses] = {
|
||||
val newClauses =
|
||||
if (facts.isEmpty)
|
||||
cs.clauses.filter(_.head.nonEmpty) // still need to drop clauses with an empty head
|
||||
else
|
||||
substitute(c.body, facts).map(f => Clause(f, newHead)) // 1, 2
|
||||
}
|
||||
cs.clauses.map(c => applyAll(c, facts)).flatMap(_.toList)
|
||||
if (newClauses.isEmpty) None else Some(Clauses(newClauses))
|
||||
}
|
||||
|
||||
def applyAll(c: Clause, facts: Set[Literal]): Option[Clause] = {
|
||||
val atoms = facts.map(_.atom)
|
||||
val newHead = c.head -- atoms // 3.
|
||||
if (newHead.isEmpty) // 4. empty head
|
||||
None
|
||||
else
|
||||
substitute(c.body, facts).map(f => Clause(f, newHead)) // 1, 2
|
||||
}
|
||||
|
||||
/** Derives the formula that results from substituting `facts` into `formula`. */
|
||||
@tailrec
|
||||
def substitute(formula: Formula, facts: Set[Literal]): Option[Formula] = formula match {
|
||||
@tailrec def substitute(formula: Formula, facts: Set[Literal]): Option[Formula] = formula match {
|
||||
case And(lits) =>
|
||||
def negated(lits: Set[Literal]): Set[Literal] = lits.map(a => !a)
|
||||
if (lits.exists(negated(facts))) // 2.
|
||||
|
|
|
|||
Loading…
Reference in New Issue