Merge pull request #3302 from dwijnand/consume-util

In-source the modules of sbt/util required in sbt/sbt
This commit is contained in:
Dale Wijnand 2017-07-07 14:14:20 +01:00 committed by GitHub
commit 5f2e5cf10e
56 changed files with 6044 additions and 98 deletions

133
build.sbt
View File

@ -124,6 +124,34 @@ lazy val bundledLauncherProj =
/* ** subproject declarations ** */
val collectionProj = (project in file("internal") / "util-collection")
.settings(
testedBaseSettings,
Util.keywordsSettings,
name := "Collections",
libraryDependencies ++= Seq(sjsonNewScalaJson.value)
)
.configure(addSbtUtilPosition)
// Command line-related utilities.
val completeProj = (project in file("internal") / "util-complete")
.dependsOn(collectionProj)
.settings(
testedBaseSettings,
name := "Completion",
libraryDependencies += jline
)
.configure(addSbtIO, addSbtUtilControl)
// A logic with restricted negation as failure for a unique, stable model
val logicProj = (project in file("internal") / "util-logic")
.dependsOn(collectionProj)
.settings(
testedBaseSettings,
name := "Logic"
)
.configure(addSbtUtilRelation)
/* **** Intermediate-level Modules **** */
// Runner for uniform test interface
@ -154,21 +182,23 @@ lazy val testAgentProj = (project in file("testing") / "agent")
// Basic task engine
lazy val taskProj = (project in file("tasks"))
.dependsOn(collectionProj)
.settings(
testedBaseSettings,
name := "Tasks"
)
.configure(addSbtUtilControl, addSbtUtilCollection)
.configure(addSbtUtilControl)
// Standard task system. This provides map, flatMap, join, and more on top of the basic task model.
lazy val stdTaskProj = (project in file("tasks-standard"))
.dependsOn(collectionProj)
.dependsOn(taskProj % "compile;test->test")
.settings(
testedBaseSettings,
name := "Task System",
testExclusive
)
.configure(addSbtUtilCollection, addSbtUtilLogging, addSbtUtilCache, addSbtIO)
.configure(addSbtIO, addSbtUtilLogging, addSbtUtilCache)
// Embedded Scala code runner
lazy val runProj = (project in file("run"))
@ -201,24 +231,23 @@ lazy val scriptedPluginProj = (project in scriptedPath / "plugin")
// Implementation and support code for defining actions.
lazy val actionsProj = (project in file("main-actions"))
.dependsOn(runProj, stdTaskProj, taskProj, testingProj)
.dependsOn(completeProj, runProj, stdTaskProj, taskProj, testingProj)
.settings(
testedBaseSettings,
name := "Actions",
libraryDependencies += sjsonNewScalaJson.value
)
.configure(
addSbtCompilerClasspath,
addSbtUtilCompletion,
addSbtCompilerApiInfo,
addSbtZinc,
addSbtCompilerIvyIntegration,
addSbtCompilerInterface,
addSbtIO,
addSbtUtilLogging,
addSbtUtilRelation,
addSbtCompilerInterface,
addSbtCompilerClasspath,
addSbtCompilerApiInfo,
addSbtUtilTracking,
addSbtLm,
addSbtUtilTracking
addSbtCompilerIvyIntegration,
addSbtZinc
)
lazy val protocolProj = (project in file("protocol"))
@ -237,7 +266,7 @@ lazy val protocolProj = (project in file("protocol"))
// General command support and core commands not specific to a build system
lazy val commandProj = (project in file("main-command"))
.enablePlugins(ContrabandPlugin, JsonCodecPlugin)
.dependsOn(protocolProj)
.dependsOn(protocolProj, completeProj)
.settings(
testedBaseSettings,
name := "Command",
@ -247,22 +276,23 @@ lazy val commandProj = (project in file("main-command"))
sourceManaged in (Compile, generateContrabands) := baseDirectory.value / "src" / "main" / "contraband-scala",
contrabandFormatsForType in generateContrabands in Compile := ContrabandConfig.getFormats
)
.configure(addSbtCompilerInterface,
addSbtIO,
addSbtUtilLogging,
addSbtUtilCompletion,
addSbtCompilerClasspath,
addSbtLm)
.configure(
addSbtIO,
addSbtUtilLogging,
addSbtCompilerInterface,
addSbtCompilerClasspath,
addSbtLm
)
// The core macro project defines the main logic of the DSL, abstracted
// away from several sbt implementators (tasks, settings, et cetera).
lazy val coreMacrosProj = (project in file("core-macros"))
.dependsOn(collectionProj)
.settings(
commonSettings,
name := "Core Macros",
libraryDependencies += "org.scala-lang" % "scala-compiler" % scalaVersion.value
)
.configure(addSbtUtilCollection)
/* Write all the compile-time dependencies of the spores macro to a file,
* in order to read it from the created Toolbox to run the neg tests. */
@ -286,19 +316,18 @@ lazy val generateToolboxClasspath = Def.task {
// Fixes scope=Scope for Setting (core defined in collectionProj) to define the settings system used in build definitions
lazy val mainSettingsProj = (project in file("main-settings"))
.dependsOn(commandProj, stdTaskProj, coreMacrosProj)
.dependsOn(completeProj, commandProj, stdTaskProj, coreMacrosProj)
.settings(
testedBaseSettings,
name := "Main Settings",
resourceGenerators in Compile += generateToolboxClasspath.taskValue
)
.configure(
addSbtUtilCache,
addSbtCompilerInterface,
addSbtUtilRelation,
addSbtUtilLogging,
addSbtIO,
addSbtUtilCompletion,
addSbtUtilLogging,
addSbtUtilCache,
addSbtUtilRelation,
addSbtCompilerInterface,
addSbtCompilerClasspath,
addSbtLm
)
@ -306,7 +335,7 @@ lazy val mainSettingsProj = (project in file("main-settings"))
// The main integration project for sbt. It brings all of the projects together, configures them, and provides for overriding conventions.
lazy val mainProj = (project in file("main"))
.enablePlugins(ContrabandPlugin)
.dependsOn(actionsProj, mainSettingsProj, runProj, commandProj)
.dependsOn(logicProj, actionsProj, mainSettingsProj, runProj, commandProj)
.settings(
testedBaseSettings,
name := "Main",
@ -315,12 +344,13 @@ lazy val mainProj = (project in file("main"))
baseDirectory.value / "src" / "main" / "contraband-scala",
sourceManaged in (Compile, generateContrabands) := baseDirectory.value / "src" / "main" / "contraband-scala"
)
.configure(addSbtCompilerInterface,
addSbtIO,
addSbtUtilLogging,
addSbtUtilLogic,
addSbtLm,
addSbtZincCompile)
.configure(
addSbtIO,
addSbtUtilLogging,
addSbtCompilerInterface,
addSbtLm,
addSbtZincCompile
)
// Strictly for bringing implicits and aliases from subsystems into the top-level sbt namespace through a single package object
// technically, we need a dependency on all of mainProj's dependencies, but we don't do that since this is strictly an integration project
@ -375,6 +405,9 @@ lazy val publishLauncher = TaskKey[Unit]("publish-launcher")
def allProjects =
Seq(
collectionProj,
logicProj,
completeProj,
testingProj,
testAgentProj,
taskProj,
@ -406,16 +439,14 @@ def otherRootSettings =
aggregate in bintrayRelease := false
) ++ inConfig(Scripted.RepoOverrideTest)(
Seq(
scriptedPrescripted := { _ =>
()
},
scriptedLaunchOpts := {
List("-Xmx1500M",
"-Xms512M",
"-server",
"-Dsbt.override.build.repos=true",
s"""-Dsbt.repository.config=${scriptedSource.value / "repo.config"}""")
},
scriptedPrescripted := (_ => ()),
scriptedLaunchOpts := List(
"-Xmx1500M",
"-Xms512M",
"-server",
"-Dsbt.override.build.repos=true",
s"""-Dsbt.repository.config=${scriptedSource.value / "repo.config"}"""
),
scripted := scriptedTask.evaluated,
scriptedUnpublished := scriptedUnpublishedTask.evaluated,
scriptedSource := (sourceDirectory in sbtProj).value / "repo-override-test"
@ -449,15 +480,17 @@ lazy val safeProjects: ScopeFilter = ScopeFilter(
)
lazy val otherUnitTests = taskKey[Unit]("Unit test other projects")
lazy val otherProjects: ScopeFilter = ScopeFilter(
inProjects(testingProj,
testAgentProj,
taskProj,
scriptedSbtProj,
scriptedPluginProj,
commandProj,
mainSettingsProj,
mainProj,
sbtProj),
inProjects(
testingProj,
testAgentProj,
taskProj,
scriptedSbtProj,
scriptedPluginProj,
commandProj,
mainSettingsProj,
mainProj,
sbtProj
),
inConfigurations(Test)
)

View File

@ -0,0 +1,3 @@
Simple Build Tool: Collection Component
Copyright 2010 Mark Harrah
Licensed under BSD-style license (see LICENSE)

View File

@ -0,0 +1,210 @@
package sbt.internal.util
import Classes.Applicative
import Types._
/**
* An abstraction over a higher-order type constructor `K[x[y]]` with the purpose of abstracting
* over heterogeneous sequences like `KList` and `TupleN` with elements with a common type
* constructor as well as homogeneous sequences `Seq[M[T]]`.
*/
trait AList[K[L[x]]] {
def transform[M[_], N[_]](value: K[M], f: M ~> N): K[N]
def traverse[M[_], N[_], P[_]](value: K[M], f: M ~> (N P)#l)(implicit np: Applicative[N]): N[K[P]]
def foldr[M[_], A](value: K[M], f: (M[_], A) => A, init: A): A
def toList[M[_]](value: K[M]): List[M[_]] = foldr[M, List[M[_]]](value, _ :: _, Nil)
def apply[M[_], C](value: K[M], f: K[Id] => C)(implicit a: Applicative[M]): M[C] =
a.map(f, traverse[M, M, Id](value, idK[M])(a))
}
object AList {
type Empty = AList[({ type l[L[x]] = Unit })#l]
/** AList for Unit, which represents a sequence that is always empty.*/
val empty: Empty = new Empty {
def transform[M[_], N[_]](in: Unit, f: M ~> N) = ()
def foldr[M[_], T](in: Unit, f: (M[_], T) => T, init: T) = init
override def apply[M[_], C](in: Unit, f: Unit => C)(implicit app: Applicative[M]): M[C] = app.pure(f(()))
def traverse[M[_], N[_], P[_]](in: Unit, f: M ~> (N P)#l)(implicit np: Applicative[N]): N[Unit] = np.pure(())
}
type SeqList[T] = AList[({ type l[L[x]] = List[L[T]] })#l]
/** AList for a homogeneous sequence. */
def seq[T]: SeqList[T] = new SeqList[T] {
def transform[M[_], N[_]](s: List[M[T]], f: M ~> N) = s.map(f.fn[T])
def foldr[M[_], A](s: List[M[T]], f: (M[_], A) => A, init: A): A = (init /: s.reverse)((t, m) => f(m, t))
override def apply[M[_], C](s: List[M[T]], f: List[T] => C)(implicit ap: Applicative[M]): M[C] = {
def loop[V](in: List[M[T]], g: List[T] => V): M[V] =
in match {
case Nil => ap.pure(g(Nil))
case x :: xs =>
val h = (ts: List[T]) => (t: T) => g(t :: ts)
ap.apply(loop(xs, h), x)
}
loop(s, f)
}
def traverse[M[_], N[_], P[_]](s: List[M[T]], f: M ~> (N P)#l)(implicit np: Applicative[N]): N[List[P[T]]] = ???
}
/** AList for the arbitrary arity data structure KList. */
def klist[KL[M[_]] <: KList[M] { type Transform[N[_]] = KL[N] }]: AList[KL] = new AList[KL] {
def transform[M[_], N[_]](k: KL[M], f: M ~> N) = k.transform(f)
def foldr[M[_], T](k: KL[M], f: (M[_], T) => T, init: T): T = k.foldr(f, init)
override def apply[M[_], C](k: KL[M], f: KL[Id] => C)(implicit app: Applicative[M]): M[C] = k.apply(f)(app)
def traverse[M[_], N[_], P[_]](k: KL[M], f: M ~> (N P)#l)(implicit np: Applicative[N]): N[KL[P]] = k.traverse[N, P](f)(np)
override def toList[M[_]](k: KL[M]) = k.toList
}
type Single[A] = AList[({ type l[L[x]] = L[A] })#l]
/** AList for a single value. */
def single[A]: Single[A] = new Single[A] {
def transform[M[_], N[_]](a: M[A], f: M ~> N) = f(a)
def foldr[M[_], T](a: M[A], f: (M[_], T) => T, init: T): T = f(a, init)
def traverse[M[_], N[_], P[_]](a: M[A], f: M ~> (N P)#l)(implicit np: Applicative[N]): N[P[A]] = f(a)
}
type ASplit[K[L[x]], B[x]] = AList[({ type l[L[x]] = K[(L B)#l] })#l]
/** AList that operates on the outer type constructor `A` of a composition `[x] A[B[x]]` for type constructors `A` and `B`*/
def asplit[K[L[x]], B[x]](base: AList[K]): ASplit[K, B] = new ASplit[K, B] {
type Split[L[x]] = K[(L B)#l]
def transform[M[_], N[_]](value: Split[M], f: M ~> N): Split[N] =
base.transform[(M B)#l, (N B)#l](value, nestCon[M, N, B](f))
def traverse[M[_], N[_], P[_]](value: Split[M], f: M ~> (N P)#l)(implicit np: Applicative[N]): N[Split[P]] = {
val g = nestCon[M, (N P)#l, B](f)
base.traverse[(M B)#l, N, (P B)#l](value, g)(np)
}
def foldr[M[_], A](value: Split[M], f: (M[_], A) => A, init: A): A =
base.foldr[(M B)#l, A](value, f, init)
}
// TODO: auto-generate
sealed trait T2K[A, B] { type l[L[x]] = (L[A], L[B]) }
type T2List[A, B] = AList[T2K[A, B]#l]
def tuple2[A, B]: T2List[A, B] = new T2List[A, B] {
type T2[M[_]] = (M[A], M[B])
def transform[M[_], N[_]](t: T2[M], f: M ~> N): T2[N] = (f(t._1), f(t._2))
def foldr[M[_], T](t: T2[M], f: (M[_], T) => T, init: T): T = f(t._1, f(t._2, init))
def traverse[M[_], N[_], P[_]](t: T2[M], f: M ~> (N P)#l)(implicit np: Applicative[N]): N[T2[P]] = {
val g = (Tuple2.apply[P[A], P[B]] _).curried
np.apply(np.map(g, f(t._1)), f(t._2))
}
}
sealed trait T3K[A, B, C] { type l[L[x]] = (L[A], L[B], L[C]) }
type T3List[A, B, C] = AList[T3K[A, B, C]#l]
def tuple3[A, B, C]: T3List[A, B, C] = new T3List[A, B, C] {
type T3[M[_]] = (M[A], M[B], M[C])
def transform[M[_], N[_]](t: T3[M], f: M ~> N) = (f(t._1), f(t._2), f(t._3))
def foldr[M[_], T](t: T3[M], f: (M[_], T) => T, init: T): T = f(t._1, f(t._2, f(t._3, init)))
def traverse[M[_], N[_], P[_]](t: T3[M], f: M ~> (N P)#l)(implicit np: Applicative[N]): N[T3[P]] = {
val g = (Tuple3.apply[P[A], P[B], P[C]] _).curried
np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3))
}
}
sealed trait T4K[A, B, C, D] { type l[L[x]] = (L[A], L[B], L[C], L[D]) }
type T4List[A, B, C, D] = AList[T4K[A, B, C, D]#l]
def tuple4[A, B, C, D]: T4List[A, B, C, D] = new T4List[A, B, C, D] {
type T4[M[_]] = (M[A], M[B], M[C], M[D])
def transform[M[_], N[_]](t: T4[M], f: M ~> N) = (f(t._1), f(t._2), f(t._3), f(t._4))
def foldr[M[_], T](t: T4[M], f: (M[_], T) => T, init: T): T = f(t._1, f(t._2, f(t._3, f(t._4, init))))
def traverse[M[_], N[_], P[_]](t: T4[M], f: M ~> (N P)#l)(implicit np: Applicative[N]): N[T4[P]] = {
val g = (Tuple4.apply[P[A], P[B], P[C], P[D]] _).curried
np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4))
}
}
sealed trait T5K[A, B, C, D, E] { type l[L[x]] = (L[A], L[B], L[C], L[D], L[E]) }
type T5List[A, B, C, D, E] = AList[T5K[A, B, C, D, E]#l]
def tuple5[A, B, C, D, E]: T5List[A, B, C, D, E] = new T5List[A, B, C, D, E] {
type T5[M[_]] = (M[A], M[B], M[C], M[D], M[E])
def transform[M[_], N[_]](t: T5[M], f: M ~> N) = (f(t._1), f(t._2), f(t._3), f(t._4), f(t._5))
def foldr[M[_], T](t: T5[M], f: (M[_], T) => T, init: T): T = f(t._1, f(t._2, f(t._3, f(t._4, f(t._5, init)))))
def traverse[M[_], N[_], P[_]](t: T5[M], f: M ~> (N P)#l)(implicit np: Applicative[N]): N[T5[P]] = {
val g = (Tuple5.apply[P[A], P[B], P[C], P[D], P[E]] _).curried
np.apply(np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)), f(t._5))
}
}
sealed trait T6K[A, B, C, D, E, F] { type l[L[x]] = (L[A], L[B], L[C], L[D], L[E], L[F]) }
type T6List[A, B, C, D, E, F] = AList[T6K[A, B, C, D, E, F]#l]
def tuple6[A, B, C, D, E, F]: T6List[A, B, C, D, E, F] = new T6List[A, B, C, D, E, F] {
type T6[M[_]] = (M[A], M[B], M[C], M[D], M[E], M[F])
def transform[M[_], N[_]](t: T6[M], f: M ~> N) = (f(t._1), f(t._2), f(t._3), f(t._4), f(t._5), f(t._6))
def foldr[M[_], T](t: T6[M], f: (M[_], T) => T, init: T): T = f(t._1, f(t._2, f(t._3, f(t._4, f(t._5, f(t._6, init))))))
def traverse[M[_], N[_], P[_]](t: T6[M], f: M ~> (N P)#l)(implicit np: Applicative[N]): N[T6[P]] = {
val g = (Tuple6.apply[P[A], P[B], P[C], P[D], P[E], P[F]] _).curried
np.apply(np.apply(np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)), f(t._5)), f(t._6))
}
}
sealed trait T7K[A, B, C, D, E, F, G] { type l[L[x]] = (L[A], L[B], L[C], L[D], L[E], L[F], L[G]) }
type T7List[A, B, C, D, E, F, G] = AList[T7K[A, B, C, D, E, F, G]#l]
def tuple7[A, B, C, D, E, F, G]: T7List[A, B, C, D, E, F, G] = new T7List[A, B, C, D, E, F, G] {
type T7[M[_]] = (M[A], M[B], M[C], M[D], M[E], M[F], M[G])
def transform[M[_], N[_]](t: T7[M], f: M ~> N) = (f(t._1), f(t._2), f(t._3), f(t._4), f(t._5), f(t._6), f(t._7))
def foldr[M[_], T](t: T7[M], f: (M[_], T) => T, init: T): T = f(t._1, f(t._2, f(t._3, f(t._4, f(t._5, f(t._6, f(t._7, init)))))))
def traverse[M[_], N[_], P[_]](t: T7[M], f: M ~> (N P)#l)(implicit np: Applicative[N]): N[T7[P]] = {
val g = (Tuple7.apply[P[A], P[B], P[C], P[D], P[E], P[F], P[G]] _).curried
np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)), f(t._5)), f(t._6)), f(t._7))
}
}
sealed trait T8K[A, B, C, D, E, F, G, H] { type l[L[x]] = (L[A], L[B], L[C], L[D], L[E], L[F], L[G], L[H]) }
type T8List[A, B, C, D, E, F, G, H] = AList[T8K[A, B, C, D, E, F, G, H]#l]
def tuple8[A, B, C, D, E, F, G, H]: T8List[A, B, C, D, E, F, G, H] = new T8List[A, B, C, D, E, F, G, H] {
type T8[M[_]] = (M[A], M[B], M[C], M[D], M[E], M[F], M[G], M[H])
def transform[M[_], N[_]](t: T8[M], f: M ~> N) = (f(t._1), f(t._2), f(t._3), f(t._4), f(t._5), f(t._6), f(t._7), f(t._8))
def foldr[M[_], T](t: T8[M], f: (M[_], T) => T, init: T): T = f(t._1, f(t._2, f(t._3, f(t._4, f(t._5, f(t._6, f(t._7, f(t._8, init))))))))
def traverse[M[_], N[_], P[_]](t: T8[M], f: M ~> (N P)#l)(implicit np: Applicative[N]): N[T8[P]] = {
val g = (Tuple8.apply[P[A], P[B], P[C], P[D], P[E], P[F], P[G], P[H]] _).curried
np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)), f(t._5)), f(t._6)), f(t._7)), f(t._8))
}
}
sealed trait T9K[A, B, C, D, E, F, G, H, I] { type l[L[x]] = (L[A], L[B], L[C], L[D], L[E], L[F], L[G], L[H], L[I]) }
type T9List[A, B, C, D, E, F, G, H, I] = AList[T9K[A, B, C, D, E, F, G, H, I]#l]
def tuple9[A, B, C, D, E, F, G, H, I]: T9List[A, B, C, D, E, F, G, H, I] = new T9List[A, B, C, D, E, F, G, H, I] {
type T9[M[_]] = (M[A], M[B], M[C], M[D], M[E], M[F], M[G], M[H], M[I])
def transform[M[_], N[_]](t: T9[M], f: M ~> N) = (f(t._1), f(t._2), f(t._3), f(t._4), f(t._5), f(t._6), f(t._7), f(t._8), f(t._9))
def foldr[M[_], T](t: T9[M], f: (M[_], T) => T, init: T): T = f(t._1, f(t._2, f(t._3, f(t._4, f(t._5, f(t._6, f(t._7, f(t._8, f(t._9, init)))))))))
def traverse[M[_], N[_], P[_]](t: T9[M], f: M ~> (N P)#l)(implicit np: Applicative[N]): N[T9[P]] = {
val g = (Tuple9.apply[P[A], P[B], P[C], P[D], P[E], P[F], P[G], P[H], P[I]] _).curried
np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)), f(t._5)), f(t._6)), f(t._7)), f(t._8)), f(t._9))
}
}
sealed trait T10K[A, B, C, D, E, F, G, H, I, J] { type l[L[x]] = (L[A], L[B], L[C], L[D], L[E], L[F], L[G], L[H], L[I], L[J]) }
type T10List[A, B, C, D, E, F, G, H, I, J] = AList[T10K[A, B, C, D, E, F, G, H, I, J]#l]
def tuple10[A, B, C, D, E, F, G, H, I, J]: T10List[A, B, C, D, E, F, G, H, I, J] = new T10List[A, B, C, D, E, F, G, H, I, J] {
type T10[M[_]] = (M[A], M[B], M[C], M[D], M[E], M[F], M[G], M[H], M[I], M[J])
def transform[M[_], N[_]](t: T10[M], f: M ~> N) = (f(t._1), f(t._2), f(t._3), f(t._4), f(t._5), f(t._6), f(t._7), f(t._8), f(t._9), f(t._10))
def foldr[M[_], T](t: T10[M], f: (M[_], T) => T, init: T): T = f(t._1, f(t._2, f(t._3, f(t._4, f(t._5, f(t._6, f(t._7, f(t._8, f(t._9, f(t._10, init))))))))))
def traverse[M[_], N[_], P[_]](t: T10[M], f: M ~> (N P)#l)(implicit np: Applicative[N]): N[T10[P]] = {
val g = (Tuple10.apply[P[A], P[B], P[C], P[D], P[E], P[F], P[G], P[H], P[I], P[J]] _).curried
np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)), f(t._5)), f(t._6)), f(t._7)), f(t._8)), f(t._9)), f(t._10))
}
}
sealed trait T11K[A, B, C, D, E, F, G, H, I, J, K] { type l[L[x]] = (L[A], L[B], L[C], L[D], L[E], L[F], L[G], L[H], L[I], L[J], L[K]) }
type T11List[A, B, C, D, E, F, G, H, I, J, K] = AList[T11K[A, B, C, D, E, F, G, H, I, J, K]#l]
def tuple11[A, B, C, D, E, F, G, H, I, J, K]: T11List[A, B, C, D, E, F, G, H, I, J, K] = new T11List[A, B, C, D, E, F, G, H, I, J, K] {
type T11[M[_]] = (M[A], M[B], M[C], M[D], M[E], M[F], M[G], M[H], M[I], M[J], M[K])
def transform[M[_], N[_]](t: T11[M], f: M ~> N) = (f(t._1), f(t._2), f(t._3), f(t._4), f(t._5), f(t._6), f(t._7), f(t._8), f(t._9), f(t._10), f(t._11))
def foldr[M[_], T](t: T11[M], f: (M[_], T) => T, init: T): T = f(t._1, f(t._2, f(t._3, f(t._4, f(t._5, f(t._6, f(t._7, f(t._8, f(t._9, f(t._10, f(t._11, init)))))))))))
def traverse[M[_], N[_], P[_]](t: T11[M], f: M ~> (N P)#l)(implicit np: Applicative[N]): N[T11[P]] = {
val g = (Tuple11.apply[P[A], P[B], P[C], P[D], P[E], P[F], P[G], P[H], P[I], P[J], P[K]] _).curried
np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)), f(t._5)), f(t._6)), f(t._7)), f(t._8)), f(t._9)), f(t._10)), f(t._11))
}
}
}

View File

@ -0,0 +1,252 @@
/* sbt -- Simple Build Tool
* Copyright 2010 Mark Harrah
*/
package sbt.internal.util
import Types._
import scala.reflect.Manifest
import sbt.util.OptJsonWriter
// T must be invariant to work properly.
// Because it is sealed and the only instances go through AttributeKey.apply,
// a single AttributeKey instance cannot conform to AttributeKey[T] for different Ts
/**
* A key in an [[AttributeMap]] that constrains its associated value to be of type `T`.
* The key is uniquely defined by its [[label]] and type `T`, represented at runtime by [[manifest]].
*/
sealed trait AttributeKey[T] {
/** The runtime evidence for `T` */
def manifest: Manifest[T]
/** The label is the identifier for the key and is camelCase by convention. */
def label: String
/** An optional, brief description of the key. */
def description: Option[String]
/**
* In environments that support delegation, looking up this key when it has no associated value will delegate to the values associated with these keys.
* The delegation proceeds in order the keys are returned here.
*/
def extend: Seq[AttributeKey[_]]
/**
* Specifies whether this key is a local, anonymous key (`true`) or not (`false`).
* This is typically only used for programmatic, intermediate keys that should not be referenced outside of a specific scope.
*/
def isLocal: Boolean
/** Identifies the relative importance of a key among other keys.*/
def rank: Int
def optJsonWriter: OptJsonWriter[T]
}
private[sbt] abstract class SharedAttributeKey[T] extends AttributeKey[T] {
override final def toString = label
override final def hashCode = label.hashCode
override final def equals(o: Any) =
(this eq o.asInstanceOf[AnyRef]) || (o match {
case a: SharedAttributeKey[t] => a.label == this.label && a.manifest == this.manifest
case _ => false
})
final def isLocal: Boolean = false
}
object AttributeKey {
def apply[T: Manifest: OptJsonWriter](name: String): AttributeKey[T] =
make(name, None, Nil, Int.MaxValue)
def apply[T: Manifest: OptJsonWriter](name: String, rank: Int): AttributeKey[T] =
make(name, None, Nil, rank)
def apply[T: Manifest: OptJsonWriter](name: String, description: String): AttributeKey[T] =
apply(name, description, Nil)
def apply[T: Manifest: OptJsonWriter](name: String,
description: String,
rank: Int): AttributeKey[T] =
apply(name, description, Nil, rank)
def apply[T: Manifest: OptJsonWriter](name: String,
description: String,
extend: Seq[AttributeKey[_]]): AttributeKey[T] =
apply(name, description, extend, Int.MaxValue)
def apply[T: Manifest: OptJsonWriter](name: String,
description: String,
extend: Seq[AttributeKey[_]],
rank: Int): AttributeKey[T] =
make(name, Some(description), extend, rank)
private[this] def make[T](
name: String,
description0: Option[String],
extend0: Seq[AttributeKey[_]],
rank0: Int
)(implicit mf: Manifest[T], ojw: OptJsonWriter[T]): AttributeKey[T] =
new SharedAttributeKey[T] {
def manifest = mf
val label = Util.hyphenToCamel(name)
def description = description0
def extend = extend0
def rank = rank0
def optJsonWriter = ojw
}
private[sbt] def local[T](implicit mf: Manifest[T], ojw: OptJsonWriter[T]): AttributeKey[T] =
new AttributeKey[T] {
def manifest = mf
def label = LocalLabel
def description = None
def extend = Nil
override def toString = label
def isLocal: Boolean = true
def rank = Int.MaxValue
val optJsonWriter = ojw
}
private[sbt] final val LocalLabel = "$" + "local"
}
/**
* An immutable map where a key is the tuple `(String,T)` for a fixed type `T` and can only be associated with values of type `T`.
* It is therefore possible for this map to contain mappings for keys with the same label but different types.
* Excluding this possibility is the responsibility of the client if desired.
*/
trait AttributeMap {
/**
* Gets the value of type `T` associated with the key `k`.
* If a key with the same label but different type is defined, this method will fail.
*/
def apply[T](k: AttributeKey[T]): T
/**
* Gets the value of type `T` associated with the key `k` or `None` if no value is associated.
* If a key with the same label but a different type is defined, this method will return `None`.
*/
def get[T](k: AttributeKey[T]): Option[T]
/**
* Returns this map without the mapping for `k`.
* This method will not remove a mapping for a key with the same label but a different type.
*/
def remove[T](k: AttributeKey[T]): AttributeMap
/**
* Returns true if this map contains a mapping for `k`.
* If a key with the same label but a different type is defined in this map, this method will return `false`.
*/
def contains[T](k: AttributeKey[T]): Boolean
/**
* Adds the mapping `k -> value` to this map, replacing any existing mapping for `k`.
* Any mappings for keys with the same label but different types are unaffected.
*/
def put[T](k: AttributeKey[T], value: T): AttributeMap
/** All keys with defined mappings. There may be multiple keys with the same `label`, but different types. */
def keys: Iterable[AttributeKey[_]]
/** Adds the mappings in `o` to this map, with mappings in `o` taking precedence over existing mappings.*/
def ++(o: Iterable[AttributeEntry[_]]): AttributeMap
/** Combines the mappings in `o` with the mappings in this map, with mappings in `o` taking precedence over existing mappings.*/
def ++(o: AttributeMap): AttributeMap
/** All mappings in this map. The [[AttributeEntry]] type preserves the typesafety of mappings, although the specific types are unknown.*/
def entries: Iterable[AttributeEntry[_]]
/** `true` if there are no mappings in this map, `false` if there are. */
def isEmpty: Boolean
}
object AttributeMap {
/** An [[AttributeMap]] without any mappings. */
val empty: AttributeMap = new BasicAttributeMap(Map.empty)
/** Constructs an [[AttributeMap]] containing the given `entries`. */
def apply(entries: Iterable[AttributeEntry[_]]): AttributeMap = empty ++ entries
/** Constructs an [[AttributeMap]] containing the given `entries`.*/
def apply(entries: AttributeEntry[_]*): AttributeMap = empty ++ entries
/** Presents an `AttributeMap` as a natural transformation. */
implicit def toNatTrans(map: AttributeMap): AttributeKey ~> Id = new (AttributeKey ~> Id) {
def apply[T](key: AttributeKey[T]): T = map(key)
}
}
private class BasicAttributeMap(private val backing: Map[AttributeKey[_], Any])
extends AttributeMap {
def isEmpty: Boolean = backing.isEmpty
def apply[T](k: AttributeKey[T]) = backing(k).asInstanceOf[T]
def get[T](k: AttributeKey[T]) = backing.get(k).asInstanceOf[Option[T]]
def remove[T](k: AttributeKey[T]): AttributeMap = new BasicAttributeMap(backing - k)
def contains[T](k: AttributeKey[T]) = backing.contains(k)
def put[T](k: AttributeKey[T], value: T): AttributeMap =
new BasicAttributeMap(backing.updated(k, value))
def keys: Iterable[AttributeKey[_]] = backing.keys
def ++(o: Iterable[AttributeEntry[_]]): AttributeMap = {
val newBacking = (backing /: o) {
case (b, AttributeEntry(key, value)) => b.updated(key, value)
}
new BasicAttributeMap(newBacking)
}
def ++(o: AttributeMap): AttributeMap =
o match {
case bam: BasicAttributeMap => new BasicAttributeMap(backing ++ bam.backing)
case _ => o ++ this
}
def entries: Iterable[AttributeEntry[_]] =
for ((k: AttributeKey[kt], v) <- backing) yield AttributeEntry(k, v.asInstanceOf[kt])
override def toString = entries.mkString("(", ", ", ")")
}
// type inference required less generality
/** A map entry where `key` is constrained to only be associated with a fixed value of type `T`. */
final case class AttributeEntry[T](key: AttributeKey[T], value: T) {
override def toString = key.label + ": " + value
}
/** Associates a `metadata` map with `data`. */
final case class Attributed[D](data: D)(val metadata: AttributeMap) {
/** Retrieves the associated value of `key` from the metadata. */
def get[T](key: AttributeKey[T]): Option[T] = metadata.get(key)
/** Defines a mapping `key -> value` in the metadata. */
def put[T](key: AttributeKey[T], value: T): Attributed[D] =
Attributed(data)(metadata.put(key, value))
/** Transforms the data by applying `f`. */
def map[T](f: D => T): Attributed[T] = Attributed(f(data))(metadata)
}
object Attributed {
/** Extracts the underlying data from the sequence `in`. */
def data[T](in: Seq[Attributed[T]]): Seq[T] = in.map(_.data)
/** Associates empty metadata maps with each entry of `in`.*/
def blankSeq[T](in: Seq[T]): Seq[Attributed[T]] = in map blank
/** Associates an empty metadata map with `data`. */
def blank[T](data: T): Attributed[T] = Attributed(data)(AttributeMap.empty)
}

View File

@ -0,0 +1,31 @@
package sbt.internal.util
object Classes {
trait Applicative[M[_]] {
def apply[S, T](f: M[S => T], v: M[S]): M[T]
def pure[S](s: => S): M[S]
def map[S, T](f: S => T, v: M[S]): M[T]
}
trait Monad[M[_]] extends Applicative[M] {
def flatten[T](m: M[M[T]]): M[T]
}
implicit val optionMonad: Monad[Option] = new Monad[Option] {
def apply[S, T](f: Option[S => T], v: Option[S]) = (f, v) match {
case (Some(fv), Some(vv)) => Some(fv(vv))
case _ => None
}
def pure[S](s: => S) = Some(s)
def map[S, T](f: S => T, v: Option[S]) = v map f
def flatten[T](m: Option[Option[T]]): Option[T] = m.flatten
}
implicit val listMonad: Monad[List] = new Monad[List] {
def apply[S, T](f: List[S => T], v: List[S]) = for (fv <- f; vv <- v) yield fv(vv)
def pure[S](s: => S) = s :: Nil
def map[S, T](f: S => T, v: List[S]) = v map f
def flatten[T](m: List[List[T]]): List[T] = m.flatten
}
}

View File

@ -0,0 +1,136 @@
/* sbt -- Simple Build Tool
* Copyright 2008, 2009, 2010 David MacIver, Mark Harrah
*/
package sbt.internal.util
trait Dag[Node <: Dag[Node]] { self: Node =>
def dependencies: Iterable[Node]
def topologicalSort = Dag.topologicalSort(self)(_.dependencies)
}
object Dag {
import scala.collection.{ mutable, JavaConverters }
import JavaConverters.asScalaSetConverter
def topologicalSort[T](root: T)(dependencies: T => Iterable[T]): List[T] =
topologicalSort(root :: Nil)(dependencies)
def topologicalSort[T](nodes: Iterable[T])(dependencies: T => Iterable[T]): List[T] = {
val discovered = new mutable.HashSet[T]
val finished = (new java.util.LinkedHashSet[T]).asScala
def visitAll(nodes: Iterable[T]) = nodes foreach visit
def visit(node: T): Unit = {
if (!discovered(node)) {
discovered(node) = true;
try { visitAll(dependencies(node)); } catch { case c: Cyclic => throw node :: c }
finished += node
()
} else if (!finished(node))
throw new Cyclic(node)
}
visitAll(nodes)
finished.toList
}
// doesn't check for cycles
def topologicalSortUnchecked[T](node: T)(dependencies: T => Iterable[T]): List[T] =
topologicalSortUnchecked(node :: Nil)(dependencies)
def topologicalSortUnchecked[T](nodes: Iterable[T])(dependencies: T => Iterable[T]): List[T] = {
val discovered = new mutable.HashSet[T]
var finished: List[T] = Nil
def visitAll(nodes: Iterable[T]) = nodes foreach visit
def visit(node: T): Unit = {
if (!discovered(node)) {
discovered(node) = true
visitAll(dependencies(node))
finished ::= node
}
}
visitAll(nodes);
finished;
}
final class Cyclic(val value: Any, val all: List[Any], val complete: Boolean)
extends Exception(
"Cyclic reference involving " +
(if (complete) all.mkString("\n ", "\n ", "") else value)
) {
def this(value: Any) = this(value, value :: Nil, false)
override def toString = getMessage
def ::(a: Any): Cyclic =
if (complete)
this
else if (a == value)
new Cyclic(value, all, true)
else
new Cyclic(value, a :: all, false)
}
/** A directed graph with edges labeled positive or negative. */
private[sbt] trait DirectedSignedGraph[Node] {
/**
* Directed edge type that tracks the sign and target (head) vertex.
* The sign can be obtained via [[isNegative]] and the target vertex via [[head]].
*/
type Arrow
/** List of initial nodes. */
def nodes: List[Arrow]
/** Outgoing edges for `n`. */
def dependencies(n: Node): List[Arrow]
/** `true` if the edge `a` is "negative", false if it is "positive". */
def isNegative(a: Arrow): Boolean
/** The target of the directed edge `a`. */
def head(a: Arrow): Node
}
/**
* Traverses a directed graph defined by `graph` looking for a cycle that includes a "negative" edge.
* The directed edges are weighted by the caller as "positive" or "negative".
* If a cycle containing a "negative" edge is detected, its member edges are returned in order.
* Otherwise, the empty list is returned.
*/
private[sbt] def findNegativeCycle[Node](graph: DirectedSignedGraph[Node]): List[graph.Arrow] = {
import graph._
val finished = new mutable.HashSet[Node]
val visited = new mutable.HashSet[Node]
def visit(edges: List[Arrow], stack: List[Arrow]): List[Arrow] = edges match {
case Nil => Nil
case edge :: tail =>
val node = head(edge)
if (!visited(node)) {
visited += node
visit(dependencies(node), edge :: stack) match {
case Nil =>
finished += node
visit(tail, stack)
case cycle => cycle
}
} else if (!finished(node)) {
// cycle. If a negative edge is involved, it is an error.
val between = edge :: stack.takeWhile(f => head(f) != node)
if (between exists isNegative)
between
else
visit(tail, stack)
} else
visit(tail, stack)
}
visit(graph.nodes, Nil)
}
}

View File

@ -0,0 +1,36 @@
/* sbt -- Simple Build Tool
* Copyright 2010 Mark Harrah
*/
package sbt.internal.util
import Types._
/**
* A minimal heterogeneous list type. For background, see
* http://apocalisp.wordpress.com/2010/07/06/type-level-programming-in-scala-part-6a-heterogeneous-list basics/
*/
sealed trait HList {
type Wrap[M[_]] <: HList
}
sealed trait HNil extends HList {
type Wrap[M[_]] = HNil
def :+:[G](g: G): G :+: HNil = HCons(g, this)
override def toString = "HNil"
}
object HNil extends HNil
final case class HCons[H, T <: HList](head: H, tail: T) extends HList {
type Wrap[M[_]] = M[H] :+: T#Wrap[M]
def :+:[G](g: G): G :+: H :+: T = HCons(g, this)
override def toString = head + " :+: " + tail.toString
}
object HList {
// contains no type information: not even A
implicit def fromList[A](list: Traversable[A]): HList =
((HNil: HList) /: list)((hl, v) => HCons(v, hl))
}

View File

@ -0,0 +1,78 @@
package sbt
package internal
package util
import sjsonnew._
import Types.:+:
trait HListFormats {
implicit val lnilFormat1: JsonFormat[HNil] = forHNil(HNil)
implicit val lnilFormat2: JsonFormat[HNil.type] = forHNil(HNil)
private def forHNil[A <: HNil](hnil: A): JsonFormat[A] = new JsonFormat[A] {
def write[J](x: A, builder: Builder[J]): Unit = {
builder.beginArray()
builder.endArray()
}
def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]): A = jsOpt match {
case None => hnil
case Some(js) => unbuilder.beginArray(js); unbuilder.endArray(); hnil
}
}
implicit def hconsFormat[H, T <: HList](
implicit hf: JsonFormat[H],
tf: HListJF[T]
): JsonFormat[H :+: T] =
new JsonFormat[H :+: T] {
def write[J](hcons: H :+: T, builder: Builder[J]) = {
builder.beginArray()
hf.write(hcons.head, builder)
tf.write(hcons.tail, builder)
builder.endArray()
}
def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]) = jsOpt match {
case None => HCons(hf.read(None, unbuilder), tf.read(None, unbuilder))
case Some(js) =>
unbuilder.beginArray(js)
val hcons =
HCons(hf.read(Some(unbuilder.nextElement), unbuilder), tf.read(Some(js), unbuilder))
unbuilder.endArray()
hcons
}
}
trait HListJF[A <: HList] {
def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]): A
def write[J](obj: A, builder: Builder[J]): Unit
}
implicit def hconsHListJF[H, T <: HList](
implicit hf: JsonFormat[H],
tf: HListJF[T]
): HListJF[H :+: T] =
new HListJF[H :+: T] {
def write[J](hcons: H :+: T, builder: Builder[J]) = {
hf.write(hcons.head, builder)
tf.write(hcons.tail, builder)
}
def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]) = jsOpt match {
case None => HCons(hf.read(None, unbuilder), tf.read(None, unbuilder))
case Some(js) =>
HCons(hf.read(Some(unbuilder.nextElement), unbuilder), tf.read(Some(js), unbuilder))
}
}
implicit val lnilHListJF1: HListJF[HNil] = hnilHListJF(HNil)
implicit val lnilHListJF2: HListJF[HNil.type] = hnilHListJF(HNil)
implicit def hnilHListJF[A <: HNil](hnil: A): HListJF[A] = new HListJF[A] {
def write[J](hcons: A, builder: Builder[J]) = ()
def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]) = hnil
}
}
object HListFormats extends HListFormats

View File

@ -0,0 +1,49 @@
/* sbt -- Simple Build Tool
* Copyright 2010 Mark Harrah
*/
package sbt.internal.util
/** A mutable set interface that uses object identity to test for set membership.*/
trait IDSet[T] {
def apply(t: T): Boolean
def contains(t: T): Boolean
def +=(t: T): Unit
def ++=(t: Iterable[T]): Unit
def -=(t: T): Boolean
def all: collection.Iterable[T]
def toList: List[T]
def isEmpty: Boolean
def foreach(f: T => Unit): Unit
def process[S](t: T)(ifSeen: S)(ifNew: => S): S
}
object IDSet {
implicit def toTraversable[T]: IDSet[T] => Traversable[T] = _.all
def apply[T](values: T*): IDSet[T] = apply(values)
def apply[T](values: Iterable[T]): IDSet[T] = {
val s = create[T]
s ++= values
s
}
def create[T]: IDSet[T] = new IDSet[T] {
private[this] val backing = new java.util.IdentityHashMap[T, AnyRef]
private[this] val Dummy: AnyRef = ""
def apply(t: T) = contains(t)
def contains(t: T) = backing.containsKey(t)
def foreach(f: T => Unit) = all foreach f
def +=(t: T) = { backing.put(t, Dummy); () }
def ++=(t: Iterable[T]) = t foreach +=
def -=(t: T) = if (backing.remove(t) eq null) false else true
def all = collection.JavaConversions.collectionAsScalaIterable(backing.keySet)
def toList = all.toList
def isEmpty = backing.isEmpty
def process[S](t: T)(ifSeen: S)(ifNew: => S) =
if (contains(t)) ifSeen else { this += t; ifNew }
override def toString = backing.toString
}
}

View File

@ -0,0 +1,216 @@
package sbt.internal.util
import java.lang.Runnable
import java.util.concurrent.{ atomic, Executor, LinkedBlockingQueue }
import atomic.{ AtomicBoolean, AtomicInteger }
import Types.{ ConstK, Id }
object EvaluationState extends Enumeration {
val New, Blocked, Ready, Calling, Evaluated = Value
}
abstract class EvaluateSettings[Scope] {
protected val init: Init[Scope]
import init._
protected def executor: Executor
protected def compiledSettings: Seq[Compiled[_]]
import EvaluationState.{ Value => EvaluationState, _ }
private[this] val complete = new LinkedBlockingQueue[Option[Throwable]]
private[this] val static = PMap.empty[ScopedKey, INode]
private[this] val allScopes: Set[Scope] = compiledSettings.map(_.key.scope).toSet
private[this] def getStatic[T](key: ScopedKey[T]): INode[T] =
static get key getOrElse sys.error("Illegal reference to key " + key)
private[this] val transform: Initialize ~> INode = new (Initialize ~> INode) {
def apply[T](i: Initialize[T]): INode[T] = i match {
case k: Keyed[s, T] @unchecked => single(getStatic(k.scopedKey), k.transform)
case a: Apply[k, T] @unchecked =>
new MixedNode[k, T](
a.alist.transform[Initialize, INode](a.inputs, transform),
a.f,
a.alist
)
case b: Bind[s, T] @unchecked => new BindNode[s, T](transform(b.in), x => transform(b.f(x)))
case v: Value[T] @unchecked => constant(v.value)
case v: ValidationCapture[T] @unchecked => strictConstant(v.key)
case t: TransformCapture => strictConstant(t.f)
case o: Optional[s, T] @unchecked =>
o.a match {
case None => constant(() => o.f(None))
case Some(i) => single[s, T](transform(i), x => o.f(Some(x)))
}
case x if x == StaticScopes =>
strictConstant(allScopes.asInstanceOf[T]) // can't convince scalac that StaticScopes => T == Set[Scope]
}
}
private[this] lazy val roots: Seq[INode[_]] = compiledSettings flatMap { cs =>
(cs.settings map { s =>
val t = transform(s.init)
static(s.key) = t
t
}): Seq[INode[_]]
}
private[this] var running = new AtomicInteger
private[this] var cancel = new AtomicBoolean(false)
def run(implicit delegates: Scope => Seq[Scope]): Settings[Scope] = {
assert(running.get() == 0, "Already running")
startWork()
roots.foreach(_.registerIfNew())
workComplete()
complete.take() foreach { ex =>
cancel.set(true)
throw ex
}
getResults(delegates)
}
private[this] def getResults(implicit delegates: Scope => Seq[Scope]) =
(empty /: static.toTypedSeq) {
case (ss, static.TPair(key, node)) =>
if (key.key.isLocal) ss else ss.set(key.scope, key.key, node.get)
}
private[this] val getValue = new (INode ~> Id) { def apply[T](node: INode[T]) = node.get }
private[this] def submitEvaluate(node: INode[_]) = submit(node.evaluate())
private[this] def submitCallComplete[T](node: BindNode[_, T], value: T) =
submit(node.callComplete(value))
private[this] def submit(work: => Unit): Unit = {
startWork()
executor.execute(new Runnable { def run = if (!cancel.get()) run0(work) })
}
private[this] def run0(work: => Unit): Unit = {
try { work } catch { case e: Throwable => complete.put(Some(e)) }
workComplete()
}
private[this] def startWork(): Unit = { running.incrementAndGet(); () }
private[this] def workComplete(): Unit =
if (running.decrementAndGet() == 0)
complete.put(None)
private[this] sealed abstract class INode[T] {
private[this] var state: EvaluationState = New
private[this] var value: T = _
private[this] val blocking = new collection.mutable.ListBuffer[INode[_]]
private[this] var blockedOn: Int = 0
private[this] val calledBy = new collection.mutable.ListBuffer[BindNode[_, T]]
override def toString =
getClass.getName + " (state=" + state + ",blockedOn=" + blockedOn + ",calledBy=" + calledBy.size + ",blocking=" + blocking.size + "): " +
keyString
private[this] def keyString =
(static.toSeq.flatMap {
case (key, value) => if (value eq this) init.showFullKey.show(key) :: Nil else Nil
}).headOption getOrElse "non-static"
final def get: T = synchronized {
assert(value != null, toString + " not evaluated")
value
}
final def doneOrBlock(from: INode[_]): Boolean = synchronized {
val ready = state == Evaluated
if (!ready) blocking += from
registerIfNew()
ready
}
final def isDone: Boolean = synchronized { state == Evaluated }
final def isNew: Boolean = synchronized { state == New }
final def isCalling: Boolean = synchronized { state == Calling }
final def registerIfNew(): Unit = synchronized { if (state == New) register() }
private[this] def register(): Unit = {
assert(state == New, "Already registered and: " + toString)
val deps = dependsOn
blockedOn = deps.size - deps.count(_.doneOrBlock(this))
if (blockedOn == 0)
schedule()
else
state = Blocked
}
final def schedule(): Unit = synchronized {
assert(state == New || state == Blocked, "Invalid state for schedule() call: " + toString)
state = Ready
submitEvaluate(this)
}
final def unblocked(): Unit = synchronized {
assert(state == Blocked, "Invalid state for unblocked() call: " + toString)
blockedOn -= 1
assert(blockedOn >= 0, "Negative blockedOn: " + blockedOn + " for " + toString)
if (blockedOn == 0) schedule()
}
final def evaluate(): Unit = synchronized { evaluate0() }
protected final def makeCall(source: BindNode[_, T], target: INode[T]): Unit = {
assert(state == Ready, "Invalid state for call to makeCall: " + toString)
state = Calling
target.call(source)
}
protected final def setValue(v: T): Unit = {
assert(state != Evaluated,
"Already evaluated (trying to set value to " + v + "): " + toString)
if (v == null) sys.error("Setting value cannot be null: " + keyString)
value = v
state = Evaluated
blocking foreach { _.unblocked() }
blocking.clear()
calledBy foreach { node =>
submitCallComplete(node, value)
}
calledBy.clear()
}
final def call(by: BindNode[_, T]): Unit = synchronized {
registerIfNew()
state match {
case Evaluated => submitCallComplete(by, value)
case _ => calledBy += by
}
()
}
protected def dependsOn: Seq[INode[_]]
protected def evaluate0(): Unit
}
private[this] def strictConstant[T](v: T): INode[T] = constant(() => v)
private[this] def constant[T](f: () => T): INode[T] =
new MixedNode[ConstK[Unit]#l, T]((), _ => f(), AList.empty)
private[this] def single[S, T](in: INode[S], f: S => T): INode[T] =
new MixedNode[({ type l[L[x]] = L[S] })#l, T](in, f, AList.single[S])
private[this] final class BindNode[S, T](in: INode[S], f: S => INode[T]) extends INode[T] {
protected def dependsOn = in :: Nil
protected def evaluate0(): Unit = makeCall(this, f(in.get))
def callComplete(value: T): Unit = synchronized {
assert(isCalling, "Invalid state for callComplete(" + value + "): " + toString)
setValue(value)
}
}
private[this] final class MixedNode[K[L[x]], T](in: K[INode], f: K[Id] => T, alist: AList[K])
extends INode[T] {
protected def dependsOn = alist.toList(in)
protected def evaluate0(): Unit = setValue(f(alist.transform(in, getValue)))
}
}

View File

@ -0,0 +1,59 @@
package sbt.internal.util
import Types._
import Classes.Applicative
/** Heterogeneous list with each element having type M[T] for some type T.*/
sealed trait KList[+M[_]] {
type Transform[N[_]] <: KList[N]
/** Apply the natural transformation `f` to each element. */
def transform[N[_]](f: M ~> N): Transform[N]
/** Folds this list using a function that operates on the homogeneous type of the elements of this list. */
def foldr[B](f: (M[_], B) => B, init: B): B = init // had trouble defining it in KNil
/** Applies `f` to the elements of this list in the applicative functor defined by `ap`. */
def apply[N[x] >: M[x], Z](f: Transform[Id] => Z)(implicit ap: Applicative[N]): N[Z]
/** Equivalent to `transform(f) . apply(x => x)`, this is the essence of the iterator at the level of natural transformations.*/
def traverse[N[_], P[_]](f: M ~> (N P)#l)(implicit np: Applicative[N]): N[Transform[P]]
/** Discards the heterogeneous type information and constructs a plain List from this KList's elements. */
def toList: List[M[_]]
}
final case class KCons[H, +T <: KList[M], +M[_]](head: M[H], tail: T) extends KList[M] {
final type Transform[N[_]] = KCons[H, tail.Transform[N], N]
def transform[N[_]](f: M ~> N) = KCons(f(head), tail.transform(f))
def toList: List[M[_]] = head :: tail.toList
def apply[N[x] >: M[x], Z](f: Transform[Id] => Z)(implicit ap: Applicative[N]): N[Z] = {
val g = (t: tail.Transform[Id]) => (h: H) => f(KCons[H, tail.Transform[Id], Id](h, t))
ap.apply(tail.apply[N, H => Z](g), head)
}
def traverse[N[_], P[_]](f: M ~> (N P)#l)(implicit np: Applicative[N]): N[Transform[P]] = {
val tt: N[tail.Transform[P]] = tail.traverse[N, P](f)
val g = (t: tail.Transform[P]) => (h: P[H]) => KCons(h, t)
np.apply(np.map(g, tt), f(head))
}
def :^:[A, N[x] >: M[x]](h: N[A]) = KCons(h, this)
override def foldr[B](f: (M[_], B) => B, init: B): B = f(head, tail.foldr(f, init))
}
sealed abstract class KNil extends KList[Nothing] {
final type Transform[N[_]] = KNil
final def transform[N[_]](f: Nothing ~> N): Transform[N] = KNil
final def toList = Nil
final def apply[N[x], Z](f: KNil => Z)(implicit ap: Applicative[N]): N[Z] = ap.pure(f(KNil))
final def traverse[N[_], P[_]](f: Nothing ~> (N P)#l)(implicit np: Applicative[N]): N[KNil] =
np.pure(KNil)
}
case object KNil extends KNil {
def :^:[M[_], H](h: M[H]): KCons[H, KNil, M] = KCons(h, this)
}

View File

@ -0,0 +1,122 @@
/* sbt -- Simple Build Tool
* Copyright 2010 Mark Harrah
*/
package sbt.internal.util
import collection.mutable
trait RMap[K[_], V[_]] {
def apply[T](k: K[T]): V[T]
def get[T](k: K[T]): Option[V[T]]
def contains[T](k: K[T]): Boolean
def toSeq: Seq[(K[_], V[_])]
def toTypedSeq: Seq[TPair[_]] = toSeq.map {
case (k: K[t], v) => TPair[t](k, v.asInstanceOf[V[t]])
}
def keys: Iterable[K[_]]
def values: Iterable[V[_]]
def isEmpty: Boolean
sealed case class TPair[T](key: K[T], value: V[T])
}
trait IMap[K[_], V[_]] extends (K ~> V) with RMap[K, V] {
def put[T](k: K[T], v: V[T]): IMap[K, V]
def remove[T](k: K[T]): IMap[K, V]
def mapValue[T](k: K[T], init: V[T], f: V[T] => V[T]): IMap[K, V]
def mapValues[V2[_]](f: V ~> V2): IMap[K, V2]
def mapSeparate[VL[_], VR[_]](f: V ~> ({ type l[T] = Either[VL[T], VR[T]] })#l)
: (IMap[K, VL], IMap[K, VR])
}
trait PMap[K[_], V[_]] extends (K ~> V) with RMap[K, V] {
def update[T](k: K[T], v: V[T]): Unit
def remove[T](k: K[T]): Option[V[T]]
def getOrUpdate[T](k: K[T], make: => V[T]): V[T]
def mapValue[T](k: K[T], init: V[T], f: V[T] => V[T]): V[T]
}
object PMap {
implicit def toFunction[K[_], V[_]](map: PMap[K, V]): K[_] => V[_] = k => map(k)
def empty[K[_], V[_]]: PMap[K, V] = new DelegatingPMap[K, V](new mutable.HashMap)
}
object IMap {
/**
* Only suitable for K that is invariant in its type parameter.
* Option and List keys are not suitable, for example,
* because None &lt;:&lt; Option[String] and None &lt;: Option[Int].
*/
def empty[K[_], V[_]]: IMap[K, V] = new IMap0[K, V](Map.empty)
private[this] class IMap0[K[_], V[_]](backing: Map[K[_], V[_]])
extends AbstractRMap[K, V]
with IMap[K, V] {
def get[T](k: K[T]): Option[V[T]] = (backing get k).asInstanceOf[Option[V[T]]]
def put[T](k: K[T], v: V[T]) = new IMap0[K, V](backing.updated(k, v))
def remove[T](k: K[T]) = new IMap0[K, V](backing - k)
def mapValue[T](k: K[T], init: V[T], f: V[T] => V[T]) =
put(k, f(this get k getOrElse init))
def mapValues[V2[_]](f: V ~> V2) =
new IMap0[K, V2](backing.mapValues(x => f(x)))
def mapSeparate[VL[_], VR[_]](f: V ~> ({ type l[T] = Either[VL[T], VR[T]] })#l) = {
val mapped = backing.iterator.map {
case (k, v) =>
f(v) match {
case Left(l) => Left((k, l))
case Right(r) => Right((k, r))
}
}
val (l, r) = Util.separateE[(K[_], VL[_]), (K[_], VR[_])](mapped.toList)
(new IMap0[K, VL](l.toMap), new IMap0[K, VR](r.toMap))
}
def toSeq = backing.toSeq
def keys = backing.keys
def values = backing.values
def isEmpty = backing.isEmpty
override def toString = backing.toString
}
}
abstract class AbstractRMap[K[_], V[_]] extends RMap[K, V] {
def apply[T](k: K[T]): V[T] = get(k).get
def contains[T](k: K[T]): Boolean = get(k).isDefined
}
/**
* Only suitable for K that is invariant in its type parameter.
* Option and List keys are not suitable, for example,
* because None &lt;:&lt; Option[String] and None &lt;: Option[Int].
*/
class DelegatingPMap[K[_], V[_]](backing: mutable.Map[K[_], V[_]])
extends AbstractRMap[K, V]
with PMap[K, V] {
def get[T](k: K[T]): Option[V[T]] = cast[T](backing.get(k))
def update[T](k: K[T], v: V[T]): Unit = { backing(k) = v }
def remove[T](k: K[T]) = cast(backing.remove(k))
def getOrUpdate[T](k: K[T], make: => V[T]) = cast[T](backing.getOrElseUpdate(k, make))
def mapValue[T](k: K[T], init: V[T], f: V[T] => V[T]): V[T] = {
val v = f(this get k getOrElse init)
update(k, v)
v
}
def toSeq = backing.toSeq
def keys = backing.keys
def values = backing.values
def isEmpty = backing.isEmpty
private[this] def cast[T](v: V[_]): V[T] = v.asInstanceOf[V[T]]
private[this] def cast[T](o: Option[V[_]]): Option[V[T]] = o map cast[T]
override def toString = backing.toString
}

View File

@ -0,0 +1,28 @@
/* sbt -- Simple Build Tool
* Copyright 2010 Mark Harrah
*/
package sbt.internal.util
// Used to emulate ~> literals
trait Param[A[_], B[_]] {
type T
def in: A[T]
def ret(out: B[T]): Unit
def ret: B[T]
}
object Param {
implicit def pToT[A[_], B[_]](p: Param[A, B] => Unit): A ~> B = new (A ~> B) {
def apply[s](a: A[s]): B[s] = {
val v: Param[A, B] { type T = s } = new Param[A, B] {
type T = s
def in = a
private var r: B[T] = _
def ret(b: B[T]): Unit = { r = b }
def ret: B[T] = r
}
p(v)
v.ret
}
}
}

View File

@ -0,0 +1,850 @@
/* sbt -- Simple Build Tool
* Copyright 2011 Mark Harrah
*/
package sbt.internal.util
import scala.language.existentials
import Types._
import sbt.util.Show
sealed trait Settings[Scope] {
def data: Map[Scope, AttributeMap]
def keys(scope: Scope): Set[AttributeKey[_]]
def scopes: Set[Scope]
def definingScope(scope: Scope, key: AttributeKey[_]): Option[Scope]
def allKeys[T](f: (Scope, AttributeKey[_]) => T): Seq[T]
def get[T](scope: Scope, key: AttributeKey[T]): Option[T]
def getDirect[T](scope: Scope, key: AttributeKey[T]): Option[T]
def set[T](scope: Scope, key: AttributeKey[T], value: T): Settings[Scope]
}
private final class Settings0[Scope](
val data: Map[Scope, AttributeMap],
val delegates: Scope => Seq[Scope]
) extends Settings[Scope] {
def scopes: Set[Scope] = data.keySet
def keys(scope: Scope) = data(scope).keys.toSet
def allKeys[T](f: (Scope, AttributeKey[_]) => T): Seq[T] =
data.flatMap { case (scope, map) => map.keys.map(k => f(scope, k)) }.toSeq
def get[T](scope: Scope, key: AttributeKey[T]): Option[T] =
delegates(scope).toStream.flatMap(sc => getDirect(sc, key)).headOption
def definingScope(scope: Scope, key: AttributeKey[_]): Option[Scope] =
delegates(scope).toStream.find(sc => getDirect(sc, key).isDefined)
def getDirect[T](scope: Scope, key: AttributeKey[T]): Option[T] =
(data get scope).flatMap(_ get key)
def set[T](scope: Scope, key: AttributeKey[T], value: T): Settings[Scope] = {
val map = data getOrElse (scope, AttributeMap.empty)
val newData = data.updated(scope, map.put(key, value))
new Settings0(newData, delegates)
}
}
// delegates should contain the input Scope as the first entry
// this trait is intended to be mixed into an object
trait Init[Scope] {
/** The Show instance used when a detailed String needs to be generated.
* It is typically used when no context is available.
*/
def showFullKey: Show[ScopedKey[_]]
sealed case class ScopedKey[T](scope: Scope, key: AttributeKey[T]) extends KeyedInitialize[T] {
def scopedKey = this
}
type SettingSeq[T] = Seq[Setting[T]]
type ScopedMap = IMap[ScopedKey, SettingSeq]
type CompiledMap = Map[ScopedKey[_], Compiled[_]]
type MapScoped = ScopedKey ~> ScopedKey
type ValidatedRef[T] = Either[Undefined, ScopedKey[T]]
type ValidatedInit[T] = Either[Seq[Undefined], Initialize[T]]
type ValidateRef = ScopedKey ~> ValidatedRef
type ScopeLocal = ScopedKey[_] => Seq[Setting[_]]
type MapConstant = ScopedKey ~> Option
private[sbt] abstract class ValidateKeyRef {
def apply[T](key: ScopedKey[T], selfRefOk: Boolean): ValidatedRef[T]
}
/**
* The result of this initialization is the composition of applied transformations.
* This can be useful when dealing with dynamic Initialize values.
*/
lazy val capturedTransformations: Initialize[Initialize ~> Initialize] =
new TransformCapture(idK[Initialize])
def setting[T](
key: ScopedKey[T],
init: Initialize[T],
pos: SourcePosition = NoPosition
): Setting[T] = new Setting[T](key, init, pos)
def valueStrict[T](value: T): Initialize[T] = pure(() => value)
def value[T](value: => T): Initialize[T] = pure(value _)
def pure[T](value: () => T): Initialize[T] = new Value(value)
def optional[T, U](i: Initialize[T])(f: Option[T] => U): Initialize[U] = new Optional(Some(i), f)
def update[T](key: ScopedKey[T])(f: T => T): Setting[T] =
setting[T](key, map(key)(f), NoPosition)
def bind[S, T](in: Initialize[S])(f: S => Initialize[T]): Initialize[T] = new Bind(f, in)
def map[S, T](in: Initialize[S])(f: S => T): Initialize[T] =
new Apply[({ type l[L[x]] = L[S] })#l, T](f, in, AList.single[S])
def app[K[L[x]], T](inputs: K[Initialize])(f: K[Id] => T)(
implicit alist: AList[K]
): Initialize[T] = new Apply[K, T](f, inputs, alist)
def uniform[S, T](inputs: Seq[Initialize[S]])(f: Seq[S] => T): Initialize[T] =
new Apply[({ type l[L[x]] = List[L[S]] })#l, T](f, inputs.toList, AList.seq[S])
/**
* The result of this initialization is the validated `key`.
* No dependency is introduced on `key`. If `selfRefOk` is true, validation will not fail if the key is referenced by a definition of `key`.
* That is, key := f(validated(key).value) is allowed only if `selfRefOk == true`.
*/
private[sbt] final def validated[T](
key: ScopedKey[T],
selfRefOk: Boolean
): ValidationCapture[T] =
new ValidationCapture(key, selfRefOk)
/**
* Constructs a derived setting that will be automatically defined in every scope where one of its dependencies
* is explicitly defined and the where the scope matches `filter`.
* A setting initialized with dynamic dependencies is only allowed if `allowDynamic` is true.
* Only the static dependencies are tracked, however. Dependencies on previous values do not introduce a derived setting either.
*/
final def derive[T](
s: Setting[T],
allowDynamic: Boolean = false,
filter: Scope => Boolean = const(true),
trigger: AttributeKey[_] => Boolean = const(true),
default: Boolean = false
): Setting[T] = {
deriveAllowed(s, allowDynamic) foreach sys.error
val d = new DerivedSetting[T](s.key, s.init, s.pos, filter, trigger)
if (default) d.default() else d
}
def deriveAllowed[T](s: Setting[T], allowDynamic: Boolean): Option[String] = s.init match {
case _: Bind[_, _] if !allowDynamic => Some("Cannot derive from dynamic dependencies.")
case _ => None
}
// id is used for equality
private[sbt] final def defaultSetting[T](s: Setting[T]): Setting[T] = s.default()
private[sbt] def defaultSettings(ss: Seq[Setting[_]]): Seq[Setting[_]] =
ss.map(s => defaultSetting(s))
private[this] final val nextID = new java.util.concurrent.atomic.AtomicLong
private[this] final def nextDefaultID(): Long = nextID.incrementAndGet()
def empty(implicit delegates: Scope => Seq[Scope]): Settings[Scope] =
new Settings0(Map.empty, delegates)
def asTransform(s: Settings[Scope]): ScopedKey ~> Id = new (ScopedKey ~> Id) {
def apply[T](k: ScopedKey[T]): T = getValue(s, k)
}
def getValue[T](s: Settings[Scope], k: ScopedKey[T]) =
s.get(k.scope, k.key) getOrElse (throw new InvalidReference(k))
def asFunction[T](s: Settings[Scope]): ScopedKey[T] => T = k => getValue(s, k)
def mapScope(f: Scope => Scope): MapScoped = new MapScoped {
def apply[T](k: ScopedKey[T]): ScopedKey[T] = k.copy(scope = f(k.scope))
}
private final class InvalidReference(val key: ScopedKey[_])
extends RuntimeException(
"Internal settings error: invalid reference to " + showFullKey.show(key)
)
private[this] def applyDefaults(ss: Seq[Setting[_]]): Seq[Setting[_]] = {
val (defaults, others) = Util.separate[Setting[_], DefaultSetting[_], Setting[_]](ss) {
case u: DefaultSetting[_] => Left(u); case s => Right(s)
}
defaults.distinct ++ others
}
def compiled(init: Seq[Setting[_]], actual: Boolean = true)(
implicit delegates: Scope => Seq[Scope],
scopeLocal: ScopeLocal,
display: Show[ScopedKey[_]]
): CompiledMap = {
val initDefaults = applyDefaults(init)
// inject derived settings into scopes where their dependencies are directly defined
// and prepend per-scope settings
val derived = deriveAndLocal(initDefaults)
// group by Scope/Key, dropping dead initializations
val sMap: ScopedMap = grouped(derived)
// delegate references to undefined values according to 'delegates'
val dMap: ScopedMap = if (actual) delegate(sMap)(delegates, display) else sMap
// merge Seq[Setting[_]] into Compiled
compile(dMap)
}
def make(init: Seq[Setting[_]])(
implicit delegates: Scope => Seq[Scope],
scopeLocal: ScopeLocal,
display: Show[ScopedKey[_]]
): Settings[Scope] = {
val cMap = compiled(init)(delegates, scopeLocal, display)
// order the initializations. cyclic references are detected here.
val ordered: Seq[Compiled[_]] = sort(cMap)
// evaluation: apply the initializations.
try { applyInits(ordered) } catch {
case rru: RuntimeUndefined =>
throw Uninitialized(cMap.keys.toSeq, delegates, rru.undefined, true)
}
}
def sort(cMap: CompiledMap): Seq[Compiled[_]] =
Dag.topologicalSort(cMap.values)(_.dependencies.map(cMap))
def compile(sMap: ScopedMap): CompiledMap =
sMap.toTypedSeq.map {
case sMap.TPair(k, ss) =>
val deps = ss.flatMap(_.dependencies).toSet
(k, new Compiled(k, deps, ss))
}.toMap
def grouped(init: Seq[Setting[_]]): ScopedMap =
((IMap.empty: ScopedMap) /: init)((m, s) => add(m, s))
def add[T](m: ScopedMap, s: Setting[T]): ScopedMap =
m.mapValue[T](s.key, Nil, ss => append(ss, s))
def append[T](ss: Seq[Setting[T]], s: Setting[T]): Seq[Setting[T]] =
if (s.definitive) s :: Nil else ss :+ s
def addLocal(init: Seq[Setting[_]])(implicit scopeLocal: ScopeLocal): Seq[Setting[_]] =
init.flatMap(_.dependencies flatMap scopeLocal) ++ init
def delegate(sMap: ScopedMap)(
implicit delegates: Scope => Seq[Scope],
display: Show[ScopedKey[_]]
): ScopedMap = {
def refMap(ref: Setting[_], isFirst: Boolean) = new ValidateKeyRef {
def apply[T](k: ScopedKey[T], selfRefOk: Boolean) =
delegateForKey(sMap, k, delegates(k.scope), ref, selfRefOk || !isFirst)
}
type ValidatedSettings[T] = Either[Seq[Undefined], SettingSeq[T]]
val f = new (SettingSeq ~> ValidatedSettings) {
def apply[T](ks: Seq[Setting[T]]) = {
val (undefs, valid) = Util.separate(ks.zipWithIndex) {
case (s, i) => s validateKeyReferenced refMap(s, i == 0)
}
if (undefs.isEmpty) Right(valid) else Left(undefs.flatten)
}
}
type Undefs[_] = Seq[Undefined]
val (undefineds, result) = sMap.mapSeparate[Undefs, SettingSeq](f)
if (undefineds.isEmpty)
result
else
throw Uninitialized(sMap.keys.toSeq, delegates, undefineds.values.flatten.toList, false)
}
private[this] def delegateForKey[T](
sMap: ScopedMap,
k: ScopedKey[T],
scopes: Seq[Scope],
ref: Setting[_],
selfRefOk: Boolean
): Either[Undefined, ScopedKey[T]] = {
val skeys = scopes.iterator.map(x => ScopedKey(x, k.key))
val definedAt = skeys.find(sk => (selfRefOk || ref.key != sk) && (sMap contains sk))
definedAt.toRight(Undefined(ref, k))
}
private[this] def applyInits(ordered: Seq[Compiled[_]])(
implicit delegates: Scope => Seq[Scope]
): Settings[Scope] = {
val x =
java.util.concurrent.Executors.newFixedThreadPool(Runtime.getRuntime.availableProcessors)
try {
val eval: EvaluateSettings[Scope] = new EvaluateSettings[Scope] {
override val init: Init.this.type = Init.this
def compiledSettings = ordered
def executor = x
}
eval.run
} finally { x.shutdown() }
}
def showUndefined(u: Undefined, validKeys: Seq[ScopedKey[_]], delegates: Scope => Seq[Scope])(
implicit display: Show[ScopedKey[_]]
): String = {
val guessed = guessIntendedScope(validKeys, delegates, u.referencedKey)
val derived = u.defining.isDerived
val refString = display.show(u.defining.key)
val sourceString = if (derived) "" else parenPosString(u.defining)
val guessedString =
if (derived) ""
else guessed.map(g => "\n Did you mean " + display.show(g) + " ?").toList.mkString
val derivedString =
if (derived) ", which is a derived setting that needs this key to be defined in this scope."
else ""
display.show(u.referencedKey) + " from " + refString + sourceString + derivedString + guessedString
}
private[this] def parenPosString(s: Setting[_]): String =
s.positionString match { case None => ""; case Some(s) => " (" + s + ")" }
def guessIntendedScope(
validKeys: Seq[ScopedKey[_]],
delegates: Scope => Seq[Scope],
key: ScopedKey[_]
): Option[ScopedKey[_]] = {
val distances = validKeys.flatMap { validKey =>
refinedDistance(delegates, validKey, key).map(dist => (dist, validKey))
}
distances.sortBy(_._1).map(_._2).headOption
}
def refinedDistance(
delegates: Scope => Seq[Scope],
a: ScopedKey[_],
b: ScopedKey[_]
): Option[Int] =
if (a.key != b.key || a == b) None
else {
val dist = delegates(a.scope).indexOf(b.scope)
if (dist < 0) None else Some(dist)
}
final class Uninitialized(val undefined: Seq[Undefined], override val toString: String)
extends Exception(toString)
final class Undefined private[sbt] (val defining: Setting[_], val referencedKey: ScopedKey[_])
final class RuntimeUndefined(val undefined: Seq[Undefined])
extends RuntimeException("References to undefined settings at runtime.") {
override def getMessage =
super.getMessage + undefined.map { u =>
"\n" + u.defining + " referenced from " + u.referencedKey
}.mkString
}
def Undefined(defining: Setting[_], referencedKey: ScopedKey[_]): Undefined =
new Undefined(defining, referencedKey)
def Uninitialized(
validKeys: Seq[ScopedKey[_]],
delegates: Scope => Seq[Scope],
keys: Seq[Undefined],
runtime: Boolean
)(implicit display: Show[ScopedKey[_]]): Uninitialized = {
assert(keys.nonEmpty)
val suffix = if (keys.length > 1) "s" else ""
val prefix = if (runtime) "Runtime reference" else "Reference"
val keysString =
keys.map(u => showUndefined(u, validKeys, delegates)).mkString("\n\n ", "\n\n ", "")
new Uninitialized(
keys,
prefix + suffix + " to undefined setting" + suffix + ": " + keysString + "\n ")
}
final class Compiled[T](
val key: ScopedKey[T],
val dependencies: Iterable[ScopedKey[_]],
val settings: Seq[Setting[T]]
) {
override def toString = showFullKey.show(key)
}
final class Flattened(val key: ScopedKey[_], val dependencies: Iterable[ScopedKey[_]])
def flattenLocals(compiled: CompiledMap): Map[ScopedKey[_], Flattened] = {
val locals = compiled flatMap {
case (key, comp) => if (key.key.isLocal) Seq[Compiled[_]](comp) else Nil
}
val ordered = Dag.topologicalSort(locals)(_.dependencies.flatMap(dep =>
if (dep.key.isLocal) Seq[Compiled[_]](compiled(dep)) else Nil))
def flatten(
cmap: Map[ScopedKey[_], Flattened],
key: ScopedKey[_],
deps: Iterable[ScopedKey[_]]
): Flattened =
new Flattened(
key,
deps.flatMap(dep => if (dep.key.isLocal) cmap(dep).dependencies else dep :: Nil))
val empty = Map.empty[ScopedKey[_], Flattened]
val flattenedLocals = (empty /: ordered) { (cmap, c) =>
cmap.updated(c.key, flatten(cmap, c.key, c.dependencies))
}
compiled flatMap {
case (key, comp) =>
if (key.key.isLocal)
Nil
else
Seq[(ScopedKey[_], Flattened)]((key, flatten(flattenedLocals, key, comp.dependencies)))
}
}
def definedAtString(settings: Seq[Setting[_]]): String = {
val posDefined = settings.flatMap(_.positionString.toList)
if (posDefined.nonEmpty) {
val header =
if (posDefined.size == settings.size) "defined at:"
else
"some of the defining occurrences:"
header + (posDefined.distinct mkString ("\n\t", "\n\t", "\n"))
} else ""
}
/**
* Intersects two scopes, returning the more specific one if they intersect, or None otherwise.
*/
private[sbt] def intersect(s1: Scope, s2: Scope)(
implicit delegates: Scope => Seq[Scope]): Option[Scope] =
if (delegates(s1).contains(s2)) Some(s1) // s1 is more specific
else if (delegates(s2).contains(s1)) Some(s2) // s2 is more specific
else None
private[this] def deriveAndLocal(init: Seq[Setting[_]])(
implicit delegates: Scope => Seq[Scope],
scopeLocal: ScopeLocal
): Seq[Setting[_]] = {
import collection.mutable
final class Derived(val setting: DerivedSetting[_]) {
val dependencies = setting.dependencies.map(_.key)
def triggeredBy = dependencies.filter(setting.trigger)
val inScopes = new mutable.HashSet[Scope]
val outputs = new mutable.ListBuffer[Setting[_]]
}
final class Deriveds(val key: AttributeKey[_], val settings: mutable.ListBuffer[Derived]) {
def dependencies = settings.flatMap(_.dependencies)
// This is mainly for use in the cyclic reference error message
override def toString =
s"Derived settings for ${key.label}, ${definedAtString(settings.map(_.setting))}"
}
// separate `derived` settings from normal settings (`defs`)
val (derived, rawDefs) = Util.separate[Setting[_], Derived, Setting[_]](init) {
case d: DerivedSetting[_] => Left(new Derived(d)); case s => Right(s)
}
val defs = addLocal(rawDefs)(scopeLocal)
// group derived settings by the key they define
val derivsByDef = new mutable.HashMap[AttributeKey[_], Deriveds]
for (s <- derived) {
val key = s.setting.key.key
derivsByDef.getOrElseUpdate(key, new Deriveds(key, new mutable.ListBuffer)).settings += s
}
// index derived settings by triggering key. This maps a key to the list of settings potentially derived from it.
val derivedBy = new mutable.HashMap[AttributeKey[_], mutable.ListBuffer[Derived]]
for (s <- derived; d <- s.triggeredBy)
derivedBy.getOrElseUpdate(d, new mutable.ListBuffer) += s
// Map a DerivedSetting[_] to the `Derived` struct wrapping it. Used to ultimately replace a DerivedSetting with
// the `Setting`s that were actually derived from it: `Derived.outputs`
val derivedToStruct: Map[DerivedSetting[_], Derived] = (derived map { s =>
s.setting -> s
}).toMap
// set of defined scoped keys, used to ensure a derived setting is only added if all dependencies are present
val defined = new mutable.HashSet[ScopedKey[_]]
def addDefs(ss: Seq[Setting[_]]): Unit = { for (s <- ss) defined += s.key }
addDefs(defs)
// true iff the scoped key is in `defined`, taking delegation into account
def isDefined(key: AttributeKey[_], scope: Scope) =
delegates(scope).exists(s => defined.contains(ScopedKey(s, key)))
// true iff all dependencies of derived setting `d` have a value (potentially via delegation) in `scope`
def allDepsDefined(d: Derived, scope: Scope, local: Set[AttributeKey[_]]): Boolean =
d.dependencies.forall(dep => local(dep) || isDefined(dep, scope))
// Returns the list of injectable derived settings and their local settings for `sk`.
// The settings are to be injected under `outputScope` = whichever scope is more specific of:
// * the dependency's (`sk`) scope
// * the DerivedSetting's scope in which it has been declared, `definingScope`
// provided that these two scopes intersect.
// A derived setting is injectable if:
// 1. it has not been previously injected into outputScope
// 2. it applies to outputScope (as determined by its `filter`)
// 3. all of its dependencies are defined for outputScope (allowing for delegation)
// This needs to handle local settings because a derived setting wouldn't be injected if it's local setting didn't exist yet.
val deriveFor = (sk: ScopedKey[_]) => {
val derivedForKey: List[Derived] = derivedBy.get(sk.key).toList.flatten
val scope = sk.scope
def localAndDerived(d: Derived): Seq[Setting[_]] = {
def definingScope = d.setting.key.scope
val outputScope = intersect(scope, definingScope)
outputScope collect {
case s if !d.inScopes.contains(s) && d.setting.filter(s) =>
val local = d.dependencies.flatMap(dep => scopeLocal(ScopedKey(s, dep)))
if (allDepsDefined(d, s, local.map(_.key.key).toSet)) {
d.inScopes.add(s)
val out = local :+ d.setting.setScope(s)
d.outputs ++= out
out
} else
Nil
} getOrElse Nil
}
derivedForKey.flatMap(localAndDerived)
}
val processed = new mutable.HashSet[ScopedKey[_]]
// derives settings, transitively so that a derived setting can trigger another
def process(rem: List[Setting[_]]): Unit = rem match {
case s :: ss =>
val sk = s.key
val ds = if (processed.add(sk)) deriveFor(sk) else Nil
addDefs(ds)
process(ds ::: ss)
case Nil =>
}
process(defs.toList)
// Take all the original defs and DerivedSettings along with locals, replace each DerivedSetting with the actual
// settings that were derived.
val allDefs = addLocal(init)(scopeLocal)
allDefs flatMap {
case d: DerivedSetting[_] => (derivedToStruct get d map (_.outputs)).toStream.flatten;
case s => Stream(s)
}
}
sealed trait Initialize[T] {
def dependencies: Seq[ScopedKey[_]]
def apply[S](g: T => S): Initialize[S]
private[sbt] def mapReferenced(g: MapScoped): Initialize[T]
private[sbt] def mapConstant(g: MapConstant): Initialize[T]
private[sbt] def validateReferenced(g: ValidateRef): ValidatedInit[T] =
validateKeyReferenced(new ValidateKeyRef {
def apply[B](key: ScopedKey[B], selfRefOk: Boolean) = g(key)
})
private[sbt] def validateKeyReferenced(g: ValidateKeyRef): ValidatedInit[T]
def evaluate(map: Settings[Scope]): T
def zip[S](o: Initialize[S]): Initialize[(T, S)] = zipTupled(o)(idFun)
def zipWith[S, U](o: Initialize[S])(f: (T, S) => U): Initialize[U] = zipTupled(o)(f.tupled)
private[this] def zipTupled[S, U](o: Initialize[S])(f: ((T, S)) => U): Initialize[U] =
new Apply[({ type l[L[x]] = (L[T], L[S]) })#l, U](f, (this, o), AList.tuple2[T, S])
/** A fold on the static attributes of this and nested Initializes. */
private[sbt] def processAttributes[S](init: S)(f: (S, AttributeMap) => S): S
}
object Initialize {
implicit def joinInitialize[T](s: Seq[Initialize[T]]): JoinInitSeq[T] = new JoinInitSeq(s)
final class JoinInitSeq[T](s: Seq[Initialize[T]]) {
def joinWith[S](f: Seq[T] => S): Initialize[S] = uniform(s)(f)
def join: Initialize[Seq[T]] = uniform(s)(idFun)
}
def join[T](inits: Seq[Initialize[T]]): Initialize[Seq[T]] = uniform(inits)(idFun)
def joinAny[M[_]](inits: Seq[Initialize[M[T]] forSome { type T }]): Initialize[Seq[M[_]]] =
join(inits.asInstanceOf[Seq[Initialize[M[Any]]]])
.asInstanceOf[Initialize[Seq[M[T] forSome { type T }]]]
}
object SettingsDefinition {
implicit def unwrapSettingsDefinition(d: SettingsDefinition): Seq[Setting[_]] = d.settings
implicit def wrapSettingsDefinition(ss: Seq[Setting[_]]): SettingsDefinition =
new SettingList(ss)
}
sealed trait SettingsDefinition {
def settings: Seq[Setting[_]]
}
final class SettingList(val settings: Seq[Setting[_]]) extends SettingsDefinition
sealed class Setting[T] private[Init] (
val key: ScopedKey[T],
val init: Initialize[T],
val pos: SourcePosition
) extends SettingsDefinition {
def settings = this :: Nil
def definitive: Boolean = !init.dependencies.contains(key)
def dependencies: Seq[ScopedKey[_]] = remove(init.dependencies, key)
def mapReferenced(g: MapScoped): Setting[T] = make(key, init mapReferenced g, pos)
def validateReferenced(g: ValidateRef): Either[Seq[Undefined], Setting[T]] =
(init validateReferenced g).right.map(newI => make(key, newI, pos))
private[sbt] def validateKeyReferenced(g: ValidateKeyRef): Either[Seq[Undefined], Setting[T]] =
(init validateKeyReferenced g).right.map(newI => make(key, newI, pos))
def mapKey(g: MapScoped): Setting[T] = make(g(key), init, pos)
def mapInit(f: (ScopedKey[T], T) => T): Setting[T] = make(key, init(t => f(key, t)), pos)
def mapConstant(g: MapConstant): Setting[T] = make(key, init mapConstant g, pos)
def withPos(pos: SourcePosition) = make(key, init, pos)
def positionString: Option[String] = pos match {
case pos: FilePosition => Some(pos.path + ":" + pos.startLine)
case NoPosition => None
}
private[sbt] def mapInitialize(f: Initialize[T] => Initialize[T]): Setting[T] =
make(key, f(init), pos)
override def toString = "setting(" + key + ") at " + pos
protected[this] def make[B](
key: ScopedKey[B],
init: Initialize[B],
pos: SourcePosition
): Setting[B] = new Setting[B](key, init, pos)
protected[sbt] def isDerived: Boolean = false
private[sbt] def setScope(s: Scope): Setting[T] =
make(key.copy(scope = s), init.mapReferenced(mapScope(const(s))), pos)
/** Turn this setting into a `DefaultSetting` if it's not already, otherwise returns `this` */
private[sbt] def default(id: => Long = nextDefaultID()): DefaultSetting[T] =
DefaultSetting(key, init, pos, id)
}
private[Init] sealed class DerivedSetting[T](
sk: ScopedKey[T],
i: Initialize[T],
p: SourcePosition,
val filter: Scope => Boolean,
val trigger: AttributeKey[_] => Boolean
) extends Setting[T](sk, i, p) {
override def make[B](key: ScopedKey[B], init: Initialize[B], pos: SourcePosition): Setting[B] =
new DerivedSetting[B](key, init, pos, filter, trigger)
protected[sbt] override def isDerived: Boolean = true
override def default(_id: => Long): DefaultSetting[T] =
new DerivedSetting[T](sk, i, p, filter, trigger) with DefaultSetting[T] { val id = _id }
override def toString = "derived " + super.toString
}
// Only keep the first occurrence of this setting and move it to the front so that it has lower precedence than non-defaults.
// This is intended for internal sbt use only, where alternatives like Plugin.globalSettings are not available.
private[Init] sealed trait DefaultSetting[T] extends Setting[T] {
val id: Long
override def make[B](key: ScopedKey[B], init: Initialize[B], pos: SourcePosition): Setting[B] =
super.make(key, init, pos) default id
override final def hashCode = id.hashCode
override final def equals(o: Any): Boolean = o match {
case d: DefaultSetting[_] => d.id == id; case _ => false
}
override def toString = s"default($id) " + super.toString
override def default(id: => Long) = this
}
object DefaultSetting {
def apply[T](sk: ScopedKey[T], i: Initialize[T], p: SourcePosition, _id: Long) =
new Setting[T](sk, i, p) with DefaultSetting[T] { val id = _id }
}
private[this] def handleUndefined[T](vr: ValidatedInit[T]): Initialize[T] = vr match {
case Left(undefs) => throw new RuntimeUndefined(undefs)
case Right(x) => x
}
private[this] lazy val getValidated =
new (ValidatedInit ~> Initialize) { def apply[T](v: ValidatedInit[T]) = handleUndefined[T](v) }
// mainly for reducing generated class count
private[this] def validateKeyReferencedT(g: ValidateKeyRef) =
new (Initialize ~> ValidatedInit) {
def apply[T](i: Initialize[T]) = i validateKeyReferenced g
}
private[this] def mapReferencedT(g: MapScoped) =
new (Initialize ~> Initialize) { def apply[T](i: Initialize[T]) = i mapReferenced g }
private[this] def mapConstantT(g: MapConstant) =
new (Initialize ~> Initialize) { def apply[T](i: Initialize[T]) = i mapConstant g }
private[this] def evaluateT(g: Settings[Scope]) =
new (Initialize ~> Id) { def apply[T](i: Initialize[T]) = i evaluate g }
private[this] def deps(ls: Seq[Initialize[_]]): Seq[ScopedKey[_]] = ls.flatMap(_.dependencies)
sealed trait Keyed[S, T] extends Initialize[T] {
def scopedKey: ScopedKey[S]
def transform: S => T
final def dependencies = scopedKey :: Nil
final def apply[Z](g: T => Z): Initialize[Z] = new GetValue(scopedKey, g compose transform)
final def evaluate(ss: Settings[Scope]): T = transform(getValue(ss, scopedKey))
final def mapReferenced(g: MapScoped): Initialize[T] = new GetValue(g(scopedKey), transform)
private[sbt] final def validateKeyReferenced(g: ValidateKeyRef): ValidatedInit[T] =
g(scopedKey, false) match {
case Left(un) => Left(un :: Nil)
case Right(nk) => Right(new GetValue(nk, transform))
}
final def mapConstant(g: MapConstant): Initialize[T] = g(scopedKey) match {
case None => this
case Some(const) => new Value(() => transform(const))
}
private[sbt] def processAttributes[B](init: B)(f: (B, AttributeMap) => B): B = init
}
private[this] final class GetValue[S, T](val scopedKey: ScopedKey[S], val transform: S => T)
extends Keyed[S, T]
trait KeyedInitialize[T] extends Keyed[T, T] {
final val transform = idFun[T]
}
private[sbt] final class TransformCapture(val f: Initialize ~> Initialize)
extends Initialize[Initialize ~> Initialize] {
def dependencies = Nil
def apply[Z](g2: (Initialize ~> Initialize) => Z): Initialize[Z] = map(this)(g2)
def evaluate(ss: Settings[Scope]): Initialize ~> Initialize = f
def mapReferenced(g: MapScoped) = new TransformCapture(mapReferencedT(g) f)
def mapConstant(g: MapConstant) = new TransformCapture(mapConstantT(g) f)
def validateKeyReferenced(g: ValidateKeyRef) =
Right(new TransformCapture(getValidated validateKeyReferencedT(g) f))
private[sbt] def processAttributes[S](init: S)(f: (S, AttributeMap) => S): S = init
}
private[sbt] final class ValidationCapture[T](val key: ScopedKey[T], val selfRefOk: Boolean)
extends Initialize[ScopedKey[T]] {
def dependencies = Nil
def apply[Z](g2: ScopedKey[T] => Z): Initialize[Z] = map(this)(g2)
def evaluate(ss: Settings[Scope]) = key
def mapReferenced(g: MapScoped) = new ValidationCapture(g(key), selfRefOk)
def mapConstant(g: MapConstant) = this
def validateKeyReferenced(g: ValidateKeyRef) = g(key, selfRefOk) match {
case Left(un) => Left(un :: Nil)
case Right(k) => Right(new ValidationCapture(k, selfRefOk))
}
private[sbt] def processAttributes[S](init: S)(f: (S, AttributeMap) => S): S = init
}
private[sbt] final class Bind[S, T](val f: S => Initialize[T], val in: Initialize[S])
extends Initialize[T] {
def dependencies = in.dependencies
def apply[Z](g: T => Z): Initialize[Z] = new Bind[S, Z](s => f(s)(g), in)
def evaluate(ss: Settings[Scope]): T = f(in evaluate ss) evaluate ss
def mapReferenced(g: MapScoped) = new Bind[S, T](s => f(s) mapReferenced g, in mapReferenced g)
def validateKeyReferenced(g: ValidateKeyRef) = (in validateKeyReferenced g).right.map {
validIn =>
new Bind[S, T](s => handleUndefined(f(s) validateKeyReferenced g), validIn)
}
def mapConstant(g: MapConstant) = new Bind[S, T](s => f(s) mapConstant g, in mapConstant g)
private[sbt] def processAttributes[B](init: B)(f: (B, AttributeMap) => B): B =
in.processAttributes(init)(f)
}
private[sbt] final class Optional[S, T](val a: Option[Initialize[S]], val f: Option[S] => T)
extends Initialize[T] {
def dependencies = deps(a.toList)
def apply[Z](g: T => Z): Initialize[Z] = new Optional[S, Z](a, g compose f)
def mapReferenced(g: MapScoped) = new Optional(a map mapReferencedT(g).fn, f)
def validateKeyReferenced(g: ValidateKeyRef) = a match {
case None => Right(this)
case Some(i) => Right(new Optional(i.validateKeyReferenced(g).right.toOption, f))
}
def mapConstant(g: MapConstant): Initialize[T] = new Optional(a map mapConstantT(g).fn, f)
def evaluate(ss: Settings[Scope]): T = f(a.flatMap(i => trapBadRef(evaluateT(ss)(i))))
// proper solution is for evaluate to be deprecated or for external use only and a new internal method returning Either be used
private[this] def trapBadRef[A](run: => A): Option[A] =
try Some(run)
catch { case e: InvalidReference => None }
private[sbt] def processAttributes[B](init: B)(f: (B, AttributeMap) => B): B = a match {
case None => init
case Some(i) => i.processAttributes(init)(f)
}
}
private[sbt] final class Value[T](val value: () => T) extends Initialize[T] {
def dependencies = Nil
def mapReferenced(g: MapScoped) = this
def validateKeyReferenced(g: ValidateKeyRef) = Right(this)
def apply[S](g: T => S) = new Value[S](() => g(value()))
def mapConstant(g: MapConstant) = this
def evaluate(map: Settings[Scope]): T = value()
private[sbt] def processAttributes[S](init: S)(f: (S, AttributeMap) => S): S = init
}
private[sbt] final object StaticScopes extends Initialize[Set[Scope]] {
def dependencies = Nil
def mapReferenced(g: MapScoped) = this
def validateKeyReferenced(g: ValidateKeyRef) = Right(this)
def apply[S](g: Set[Scope] => S) = map(this)(g)
def mapConstant(g: MapConstant) = this
def evaluate(map: Settings[Scope]) = map.scopes
private[sbt] def processAttributes[S](init: S)(f: (S, AttributeMap) => S): S = init
}
private[sbt] final class Apply[K[L[x]], T](
val f: K[Id] => T,
val inputs: K[Initialize],
val alist: AList[K]
) extends Initialize[T] {
def dependencies = deps(alist.toList(inputs))
def mapReferenced(g: MapScoped) = mapInputs(mapReferencedT(g))
def apply[S](g: T => S) = new Apply(g compose f, inputs, alist)
def mapConstant(g: MapConstant) = mapInputs(mapConstantT(g))
def mapInputs(g: Initialize ~> Initialize): Initialize[T] =
new Apply(f, alist.transform(inputs, g), alist)
def evaluate(ss: Settings[Scope]) = f(alist.transform(inputs, evaluateT(ss)))
def validateKeyReferenced(g: ValidateKeyRef) = {
val tx = alist.transform(inputs, validateKeyReferencedT(g))
val undefs = alist.toList(tx).flatMap(_.left.toSeq.flatten)
val get = new (ValidatedInit ~> Initialize) {
def apply[B](vr: ValidatedInit[B]) = vr.right.get
}
if (undefs.isEmpty) Right(new Apply(f, alist.transform(tx, get), alist)) else Left(undefs)
}
private[sbt] def processAttributes[S](init: S)(f: (S, AttributeMap) => S): S =
(init /: alist.toList(inputs)) { (v, i) =>
i.processAttributes(v)(f)
}
}
private def remove[T](s: Seq[T], v: T) = s filterNot (_ == v)
}

View File

@ -0,0 +1,83 @@
package sbt.internal.util
object Signals {
val CONT = "CONT"
val INT = "INT"
def withHandler[T](handler: () => Unit, signal: String = INT)(action: () => T): T = {
val result =
try {
val signals = new Signals0
signals.withHandler(signal, handler, action)
} catch { case e: LinkageError => Right(action()) }
result match {
case Left(e) => throw e
case Right(v) => v
}
}
/** Helper interface so we can expose internals of signal-isms to others. */
sealed trait Registration {
def remove(): Unit
}
/**
* Register a signal handler that can be removed later.
* NOTE: Does not stack with other signal handlers!!!!
*/
def register(handler: () => Unit, signal: String = INT): Registration =
// TODO - Maybe we can just ignore things if not is-supported.
if (supported(signal)) {
import sun.misc.{ Signal, SignalHandler }
val intSignal = new Signal(signal)
val newHandler = new SignalHandler {
def handle(sig: Signal): Unit = { handler() }
}
val oldHandler = Signal.handle(intSignal, newHandler)
object unregisterNewHandler extends Registration {
override def remove(): Unit = {
Signal.handle(intSignal, oldHandler)
()
}
}
unregisterNewHandler
} else {
// TODO - Maybe we should just throw an exception if we don't support signals...
object NullUnregisterNewHandler extends Registration {
override def remove(): Unit = ()
}
NullUnregisterNewHandler
}
def supported(signal: String): Boolean =
try {
val signals = new Signals0
signals.supported(signal)
} catch { case e: LinkageError => false }
}
// Must only be referenced using a
// try { } catch { case e: LinkageError => ... }
// block to
private final class Signals0 {
def supported(signal: String): Boolean = {
import sun.misc.Signal
try { new Signal(signal); true } catch { case e: IllegalArgumentException => false }
}
// returns a LinkageError in `action` as Left(t) in order to avoid it being
// incorrectly swallowed as missing Signal/SignalHandler
def withHandler[T](signal: String, handler: () => Unit, action: () => T): Either[Throwable, T] = {
import sun.misc.{ Signal, SignalHandler }
val intSignal = new Signal(signal)
val newHandler = new SignalHandler {
def handle(sig: Signal): Unit = { handler() }
}
val oldHandler = Signal.handle(intSignal, newHandler)
try Right(action())
catch { case e: LinkageError => Left(e) } finally { Signal.handle(intSignal, oldHandler); () }
}
}

View File

@ -0,0 +1,54 @@
/* sbt -- Simple Build Tool
* Copyright 2010 Mark Harrah
*/
package sbt.internal.util
trait TypeFunctions {
type Id[X] = X
sealed trait Const[A] { type Apply[B] = A }
sealed trait ConstK[A] { type l[L[x]] = A }
sealed trait Compose[A[_], B[_]] { type Apply[T] = A[B[T]] }
sealed trait [A[_], B[_]] { type l[T] = A[B[T]] }
sealed trait P1of2[M[_, _], A] { type Apply[B] = M[A, B]; type Flip[B] = M[B, A] }
final val left = new (Id ~> P1of2[Left, Nothing]#Flip) { def apply[T](t: T) = Left(t) }
final val right = new (Id ~> P1of2[Right, Nothing]#Apply) { def apply[T](t: T) = Right(t) }
final val some = new (Id ~> Some) { def apply[T](t: T) = Some(t) }
final def idFun[T] = (t: T) => t
final def const[A, B](b: B): A => B = _ => b
final def idK[M[_]]: M ~> M = new (M ~> M) { def apply[T](m: M[T]): M[T] = m }
def nestCon[M[_], N[_], G[_]](f: M ~> N): (M G)#l ~> (N G)#l =
f.asInstanceOf[(M G)#l ~> (N G)#l] // implemented with a cast to avoid extra object+method call. castless version:
/* new ( (M ∙ G)#l ~> (N ∙ G)#l ) {
def apply[T](mg: M[G[T]]): N[G[T]] = f(mg)
} */
implicit def toFn1[A, B](f: A => B): Fn1[A, B] = new Fn1[A, B] {
def [C](g: C => A) = f compose g
}
type Endo[T] = T => T
type ~>|[A[_], B[_]] = A ~> Compose[Option, B]#Apply
}
object TypeFunctions extends TypeFunctions
trait ~>[-A[_], +B[_]] { outer =>
def apply[T](a: A[T]): B[T]
// directly on ~> because of type inference limitations
final def [C[_]](g: C ~> A): C ~> B = new (C ~> B) { def apply[T](c: C[T]) = outer.apply(g(c)) }
final def [C, D](g: C => D)(implicit ev: D <:< A[D]): C => B[D] = i => apply(ev(g(i)))
final def fn[T] = (t: A[T]) => apply[T](t)
}
object ~> {
import TypeFunctions._
val Id: Id ~> Id = new (Id ~> Id) { def apply[T](a: T): T = a }
implicit def tcIdEquals: (Id ~> Id) = Id
}
trait Fn1[A, B] {
def [C](g: C => A): C => B
}

View File

@ -0,0 +1,12 @@
/* sbt -- Simple Build Tool
* Copyright 2010 Mark Harrah
*/
package sbt.internal.util
object Types extends Types
trait Types extends TypeFunctions {
val :^: = KCons
type :+:[H, T <: HList] = HCons[H, T]
val :+: = HCons
}

View File

@ -0,0 +1,40 @@
/* sbt -- Simple Build Tool
* Copyright 2011 Mark Harrah
*/
package sbt.internal.util
import java.util.Locale
object Util {
def makeList[T](size: Int, value: T): List[T] = List.fill(size)(value)
def separateE[A, B](ps: Seq[Either[A, B]]): (Seq[A], Seq[B]) =
separate(ps)(Types.idFun)
def separate[T, A, B](ps: Seq[T])(f: T => Either[A, B]): (Seq[A], Seq[B]) = {
val (a, b) = ((Nil: Seq[A], Nil: Seq[B]) /: ps)((xs, y) => prependEither(xs, f(y)))
(a.reverse, b.reverse)
}
def prependEither[A, B](acc: (Seq[A], Seq[B]), next: Either[A, B]): (Seq[A], Seq[B]) =
next match {
case Left(l) => (l +: acc._1, acc._2)
case Right(r) => (acc._1, r +: acc._2)
}
def pairID[A, B] = (a: A, b: B) => (a, b)
private[this] lazy val Hyphen = """-(\p{javaLowerCase})""".r
def hasHyphen(s: String): Boolean = s.indexOf('-') >= 0
def hyphenToCamel(s: String): String =
if (hasHyphen(s)) Hyphen.replaceAllIn(s, _.group(1).toUpperCase(Locale.ENGLISH)) else s
private[this] lazy val Camel = """(\p{javaLowerCase})(\p{javaUpperCase})""".r
def camelToHyphen(s: String): String =
Camel.replaceAllIn(s, m => m.group(1) + "-" + m.group(2).toLowerCase(Locale.ENGLISH))
def quoteIfKeyword(s: String): String = if (ScalaKeywords.values(s)) '`' + s + '`' else s
}

View File

@ -0,0 +1,22 @@
package sbt.util
import sjsonnew.JsonWriter
sealed trait OptJsonWriter[A]
final case class NoJsonWriter[A]() extends OptJsonWriter[A]
final case class SomeJsonWriter[A](value: JsonWriter[A]) extends OptJsonWriter[A]
trait OptJsonWriter0 {
implicit def fallback[A]: NoJsonWriter[A] = NoJsonWriter()
}
object OptJsonWriter extends OptJsonWriter0 {
implicit def lift[A](implicit z: JsonWriter[A]): SomeJsonWriter[A] = SomeJsonWriter(z)
trait StrictMode0 {
implicit def conflictingFallback1[A]: NoJsonWriter[A] = NoJsonWriter()
implicit def conflictingFallback2[A]: NoJsonWriter[A] = NoJsonWriter()
}
object StrictMode extends StrictMode0 {
implicit def lift[A](implicit z: JsonWriter[A]): SomeJsonWriter[A] = SomeJsonWriter(z)
}
}

View File

@ -0,0 +1,12 @@
package sbt.util
trait Show[A] {
def show(a: A): String
}
object Show {
def apply[A](f: A => String): Show[A] = new Show[A] { def show(a: A): String = f(a) }
def fromToString[A]: Show[A] = new Show[A] {
def show(a: A): String = a.toString
}
}

View File

@ -0,0 +1,55 @@
/* sbt -- Simple Build Tool
* Copyright 2008 Mark Harrah */
package sbt.internal.util
import org.scalacheck._
import Prop._
import scala.collection.mutable.HashSet
object DagSpecification extends Properties("Dag") {
property("No repeated nodes") = forAll { (dag: TestDag) =>
isSet(dag.topologicalSort)
}
property("Sort contains node") = forAll { (dag: TestDag) =>
dag.topologicalSort.contains(dag)
}
property("Dependencies precede node") = forAll { (dag: TestDag) =>
dependenciesPrecedeNodes(dag.topologicalSort)
}
implicit lazy val arbTestDag: Arbitrary[TestDag] = Arbitrary(Gen.sized(dagGen))
private def dagGen(nodeCount: Int): Gen[TestDag] = {
val nodes = new HashSet[TestDag]
def nonterminalGen(p: Gen.Parameters): Gen[TestDag] = {
val seed = rng.Seed.random()
for {
i <- 0 until nodeCount
nextDeps <- Gen.someOf(nodes).apply(p, seed)
} nodes += new TestDag(i, nextDeps)
for (nextDeps <- Gen.someOf(nodes)) yield new TestDag(nodeCount, nextDeps)
}
Gen.parameterized(nonterminalGen)
}
private def isSet[T](c: Seq[T]) = Set(c: _*).size == c.size
private def dependenciesPrecedeNodes(sort: List[TestDag]) = {
val seen = new HashSet[TestDag]
def iterate(remaining: List[TestDag]): Boolean = {
remaining match {
case Nil => true
case node :: tail =>
if (node.dependencies.forall(seen.contains) && !seen.contains(node)) {
seen += node
iterate(tail)
} else
false
}
}
iterate(sort)
}
}
class TestDag(id: Int, val dependencies: Iterable[TestDag]) extends Dag[TestDag] {
override def toString = id + "->" + dependencies.mkString("[", ",", "]")
}

View File

@ -0,0 +1,28 @@
package sbt
package internal
package util
import scalajson.ast.unsafe._
import sjsonnew._, BasicJsonProtocol._, support.scalajson.unsafe._
import HListFormats._
class HListFormatSpec extends UnitSpec {
val quux = 23 :+: "quux" :+: true :+: HNil
it should "round trip quux" in assertRoundTrip(quux)
it should "round trip hnil" in assertRoundTrip(HNil)
it should "have a flat structure for quux" in assertJsonString(quux, """[23,"quux",true]""")
it should "have a flat structure for hnil" in assertJsonString(HNil, "[]")
def assertRoundTrip[A: JsonWriter: JsonReader](x: A) = {
val jsonString: String = toJsonString(x)
val jValue: JValue = Parser.parseUnsafe(jsonString)
val y: A = Converter.fromJson[A](jValue).get
assert(x === y)
}
def assertJsonString[A: JsonWriter](x: A, s: String) = assert(toJsonString(x) === s)
def toJsonString[A: JsonWriter](x: A): String = CompactPrinter(Converter.toJson(x).get)
}

View File

@ -0,0 +1,32 @@
package sbt.internal.util
import org.scalacheck._
import Prop._
object KeyTest extends Properties("AttributeKey") {
property("equality") = {
compare(AttributeKey[Int]("test"), AttributeKey[Int]("test"), true) &&
compare(AttributeKey[Int]("test"), AttributeKey[Int]("test", "description"), true) &&
compare(AttributeKey[Int]("test", "a"), AttributeKey[Int]("test", "b"), true) &&
compare(AttributeKey[Int]("test"), AttributeKey[Int]("tests"), false) &&
compare(AttributeKey[Int]("test"), AttributeKey[Double]("test"), false) &&
compare(AttributeKey[java.lang.Integer]("test"), AttributeKey[Int]("test"), false) &&
compare(AttributeKey[Map[Int, String]]("test"), AttributeKey[Map[Int, String]]("test"), true) &&
compare(AttributeKey[Map[Int, String]]("test"), AttributeKey[Map[Int, _]]("test"), false)
}
def compare(a: AttributeKey[_], b: AttributeKey[_], same: Boolean) =
("a.label: " + a.label) |:
("a.manifest: " + a.manifest) |:
("b.label: " + b.label) |:
("b.manifest: " + b.manifest) |:
("expected equal? " + same) |:
compare0(a, b, same)
def compare0(a: AttributeKey[_], b: AttributeKey[_], same: Boolean) =
if (same) {
("equality" |: (a == b)) &&
("hash" |: (a.hashCode == b.hashCode))
} else
("equality" |: (a != b))
}

View File

@ -0,0 +1,17 @@
/* sbt -- Simple Build Tool
* Copyright 2010 Mark Harrah
*/
package sbt.internal.util
// compilation test
object LiteralTest {
def x[A[_], B[_]](f: A ~> B) = f
import Param._
val f = x { (p: Param[Option, List]) =>
p.ret(p.in.toList)
}
val a: List[Int] = f(Some(3))
val b: List[String] = f(Some("aa"))
}

View File

@ -0,0 +1,18 @@
/* sbt -- Simple Build Tool
* Copyright 2010 Mark Harrah
*/
package sbt.internal.util
import Types._
// compilation test
object PMapTest {
val mp = new DelegatingPMap[Some, Id](new collection.mutable.HashMap)
mp(Some("asdf")) = "a"
mp(Some(3)) = 9
val x = Some(3) :^: Some("asdf") :^: KNil
val y = x.transform[Id](mp)
assert(y.head == 9)
assert(y.tail.head == "a")
assert(y.tail.tail == KNil)
}

View File

@ -0,0 +1,87 @@
package sbt.internal.util
import sbt.util.Show
/** Define our settings system */
// A basic scope indexed by an integer.
final case class Scope(nestIndex: Int, idAtIndex: Int = 0)
// Extend the Init trait.
// (It is done this way because the Scope type parameter is used everywhere in Init.
// Lots of type constructors would become binary, which as you may know requires lots of type lambdas
// when you want a type function with only one parameter.
// That would be a general pain.)
case class SettingsExample() extends Init[Scope] {
// Provides a way of showing a Scope+AttributeKey[_]
val showFullKey: Show[ScopedKey[_]] = Show[ScopedKey[_]]((key: ScopedKey[_]) => {
s"${key.scope.nestIndex}(${key.scope.idAtIndex})/${key.key.label}"
})
// A sample delegation function that delegates to a Scope with a lower index.
val delegates: Scope => Seq[Scope] = {
case s @ Scope(index, proj) =>
s +: (if (index <= 0) Nil
else { (if (proj > 0) List(Scope(index)) else Nil) ++: delegates(Scope(index - 1)) })
}
// Not using this feature in this example.
val scopeLocal: ScopeLocal = _ => Nil
// These three functions + a scope (here, Scope) are sufficient for defining our settings system.
}
/** Usage Example **/
case class SettingsUsage(val settingsExample: SettingsExample) {
import settingsExample._
// Define some keys
val a = AttributeKey[Int]("a")
val b = AttributeKey[Int]("b")
// Scope these keys
val a3 = ScopedKey(Scope(3), a)
val a4 = ScopedKey(Scope(4), a)
val a5 = ScopedKey(Scope(5), a)
val b4 = ScopedKey(Scope(4), b)
// Define some settings
val mySettings: Seq[Setting[_]] = Seq(
setting(a3, value(3)),
setting(b4, map(a4)(_ * 3)),
update(a5)(_ + 1)
)
// "compiles" and applies the settings.
// This can be split into multiple steps to access intermediate results if desired.
// The 'inspect' command operates on the output of 'compile', for example.
val applied: Settings[Scope] = make(mySettings)(delegates, scopeLocal, showFullKey)
// Show results.
/* for(i <- 0 to 5; k <- Seq(a, b)) {
println( k.label + i + " = " + applied.get( Scope(i), k) )
}*/
/**
* Output:
* For the None results, we never defined the value and there was no value to delegate to.
* For a3, we explicitly defined it to be 3.
* a4 wasn't defined, so it delegates to a3 according to our delegates function.
* b4 gets the value for a4 (which delegates to a3, so it is 3) and multiplies by 3
* a5 is defined as the previous value of a5 + 1 and
* since no previous value of a5 was defined, it delegates to a4, resulting in 3+1=4.
* b5 isn't defined explicitly, so it delegates to b4 and is therefore equal to 9 as well
* a0 = None
* b0 = None
* a1 = None
* b1 = None
* a2 = None
* b2 = None
* a3 = Some(3)
* b3 = None
* a4 = Some(3)
* b4 = Some(9)
* a5 = Some(4)
* b5 = Some(9)
*/
}

View File

@ -0,0 +1,202 @@
package sbt.internal.util
import org.scalacheck._
import Prop._
object SettingsTest extends Properties("settings") {
val settingsExample: SettingsExample = SettingsExample()
import settingsExample._
val settingsUsage = SettingsUsage(settingsExample)
import settingsUsage._
import scala.reflect.Manifest
final val ChainMax = 5000
lazy val chainLengthGen = Gen.choose(1, ChainMax)
property("Basic settings test") = secure(all(tests: _*))
property("Basic chain") = forAll(chainLengthGen) { (i: Int) =>
val abs = math.abs(i)
singleIntTest(chain(abs, value(0)), abs)
}
property("Basic bind chain") = forAll(chainLengthGen) { (i: Int) =>
val abs = math.abs(i)
singleIntTest(chainBind(value(abs)), 0)
}
property("Allows references to completed settings") = forAllNoShrink(30) { allowedReference }
final def allowedReference(intermediate: Int): Prop = {
val top = value(intermediate)
def iterate(init: Initialize[Int]): Initialize[Int] =
bind(init) { t =>
if (t <= 0)
top
else
iterate(value(t - 1))
}
evaluate(setting(chk, iterate(top)) :: Nil); true
}
property("Derived setting chain depending on (prev derived, normal setting)") =
forAllNoShrink(Gen.choose(1, 100).label("numSettings")) { derivedSettings }
final def derivedSettings(nr: Int): Prop = {
val genScopedKeys = {
// We wan
// t to generate lists of keys that DO NOT inclue the "ch" key we use to check things.
val attrKeys = mkAttrKeys[Int](nr).filter(_.forall(_.label != "ch"))
attrKeys map (_ map (ak => ScopedKey(Scope(0), ak)))
}.label("scopedKeys").filter(_.nonEmpty)
forAll(genScopedKeys) { scopedKeys =>
try {
// Note; It's evil to grab last IF you haven't verified the set can't be empty.
val last = scopedKeys.last
val derivedSettings: Seq[Setting[Int]] = (
for {
List(scoped0, scoped1) <- chk :: scopedKeys sliding 2
nextInit = if (scoped0 == chk) chk
else
(scoped0 zipWith chk) { (p, _) =>
p + 1
}
} yield derive(setting(scoped1, nextInit))
).toSeq
{
// Note: This causes a cycle refernec error, quite frequently.
checkKey(last, Some(nr - 1), evaluate(setting(chk, value(0)) +: derivedSettings)) :| "Not derived?"
} && {
checkKey(last, None, evaluate(derivedSettings)) :| "Should not be derived"
}
} catch {
case t: Throwable =>
// TODO - For debugging only.
t.printStackTrace(System.err)
throw t
}
}
}
private def mkAttrKeys[T](nr: Int)(implicit mf: Manifest[T]): Gen[List[AttributeKey[T]]] = {
import Gen._
val nonEmptyAlphaStr =
nonEmptyListOf(alphaChar).map(_.mkString).suchThat(_.forall(_.isLetter))
(for {
list <- Gen.listOfN(nr, nonEmptyAlphaStr) suchThat (l => l.size == l.distinct.size)
item <- list
} yield AttributeKey[T](item)).label(s"mkAttrKeys($nr)")
}
property("Derived setting(s) replace DerivedSetting in the Seq[Setting[_]]") =
derivedKeepsPosition
final def derivedKeepsPosition: Prop = {
val a: ScopedKey[Int] = ScopedKey(Scope(0), AttributeKey[Int]("a"))
val b: ScopedKey[Int] = ScopedKey(Scope(0), AttributeKey[Int]("b"))
val prop1 = {
val settings: Seq[Setting[_]] = Seq(
setting(a, value(3)),
setting(b, value(6)),
derive(setting(b, a)),
setting(a, value(5)),
setting(b, value(8))
)
val ev = evaluate(settings)
checkKey(a, Some(5), ev) && checkKey(b, Some(8), ev)
}
val prop2 = {
val settings: Seq[Setting[Int]] = Seq(
setting(a, value(3)),
setting(b, value(6)),
derive(setting(b, a)),
setting(a, value(5))
)
val ev = evaluate(settings)
checkKey(a, Some(5), ev) && checkKey(b, Some(5), ev)
}
prop1 && prop2
}
property(
"DerivedSetting in ThisBuild scopes derived settings under projects thus allowing safe +="
) = forAllNoShrink(Gen.choose(1, 100)) { derivedSettingsScope }
final def derivedSettingsScope(nrProjects: Int): Prop = {
forAll(mkAttrKeys[Int](2)) {
case List(key, derivedKey) =>
val projectKeys = for { proj <- 1 to nrProjects } yield ScopedKey(Scope(1, proj), key)
val projectDerivedKeys = for { proj <- 1 to nrProjects } yield
ScopedKey(Scope(1, proj), derivedKey)
val globalKey = ScopedKey(Scope(0), key)
val globalDerivedKey = ScopedKey(Scope(0), derivedKey)
// Each project defines an initial value, but the update is defined in globalKey.
// However, the derived Settings that come from this should be scoped in each project.
val settings: Seq[Setting[_]] =
derive(setting(globalDerivedKey, settingsExample.map(globalKey)(_ + 1))) +: projectKeys
.map(pk => setting(pk, value(0)))
val ev = evaluate(settings)
// Also check that the key has no value at the "global" scope
val props = for { pk <- projectDerivedKeys } yield checkKey(pk, Some(1), ev)
checkKey(globalDerivedKey, None, ev) && Prop.all(props: _*)
}
}
// Circular (dynamic) references currently loop infinitely.
// This is the expected behavior (detecting dynamic cycles is expensive),
// but it may be necessary to provide an option to detect them (with a performance hit)
// This would test that cycle detection.
// property("Catches circular references") = forAll(chainLengthGen) { checkCircularReferences _ }
final def checkCircularReferences(intermediate: Int): Prop = {
val ccr = new CCR(intermediate)
try { evaluate(setting(chk, ccr.top) :: Nil); false } catch {
case e: java.lang.Exception => true
}
}
def tests =
for (i <- 0 to 5; k <- Seq(a, b)) yield {
val expected = expectedValues(2 * i + (if (k == a) 0 else 1))
checkKey[Int](ScopedKey(Scope(i), k), expected, applied)
}
lazy val expectedValues = None :: None :: None :: None :: None :: None :: Some(3) :: None ::
Some(3) :: Some(9) :: Some(4) :: Some(9) :: Nil
lazy val ch = AttributeKey[Int]("ch")
lazy val chk = ScopedKey(Scope(0), ch)
def chain(i: Int, prev: Initialize[Int]): Initialize[Int] =
if (i <= 0) prev else chain(i - 1, prev(_ + 1))
def chainBind(prev: Initialize[Int]): Initialize[Int] =
bind(prev) { v =>
if (v <= 0) prev else chainBind(value(v - 1))
}
def singleIntTest(i: Initialize[Int], expected: Int) = {
val eval = evaluate(setting(chk, i) :: Nil)
checkKey(chk, Some(expected), eval)
}
def checkKey[T](key: ScopedKey[T], expected: Option[T], settings: Settings[Scope]) = {
val value = settings.get(key.scope, key.key)
("Key: " + key) |:
("Value: " + value) |:
("Expected: " + expected) |:
(value == expected)
}
def evaluate(settings: Seq[Setting[_]]): Settings[Scope] =
try { make(settings)(delegates, scopeLocal, showFullKey) } catch {
case e: Throwable => e.printStackTrace; throw e
}
}
// This setup is a workaround for module synchronization issues
final class CCR(intermediate: Int) {
import SettingsTest.settingsExample._
lazy val top = iterate(value(intermediate), intermediate)
def iterate(init: Initialize[Int], i: Int): Initialize[Int] =
bind(init) { t =>
if (t <= 0)
top
else
iterate(value(t - 1), t - 1)
}
}

View File

@ -0,0 +1,5 @@
package sbt.internal.util
import org.scalatest._
abstract class UnitSpec extends FlatSpec with Matchers

View File

@ -0,0 +1,3 @@
Simple Build Tool: Completion Component
Copyright 2010 Mark Harrah
Licensed under BSD-style license (see LICENSE)

View File

@ -0,0 +1,203 @@
/* sbt -- Simple Build Tool
* Copyright 2008, 2009 Mark Harrah
*/
package sbt.internal.util
import jline.console.ConsoleReader
import jline.console.history.{ FileHistory, MemoryHistory }
import java.io.{ File, InputStream, FileInputStream, FileDescriptor, FilterInputStream }
import complete.Parser
import scala.concurrent.duration.Duration
import scala.annotation.tailrec
abstract class JLine extends LineReader {
protected[this] def handleCONT: Boolean
protected[this] def reader: ConsoleReader
protected[this] def injectThreadSleep: Boolean
protected[this] val in: InputStream = JLine.makeInputStream(injectThreadSleep)
def readLine(prompt: String, mask: Option[Char] = None) = JLine.withJLine {
unsynchronizedReadLine(prompt, mask)
}
private[this] def unsynchronizedReadLine(prompt: String, mask: Option[Char]): Option[String] =
readLineWithHistory(prompt, mask) map { x =>
x.trim
}
private[this] def readLineWithHistory(prompt: String, mask: Option[Char]): Option[String] =
reader.getHistory match {
case fh: FileHistory =>
try readLineDirect(prompt, mask)
finally fh.flush()
case _ => readLineDirect(prompt, mask)
}
private[this] def readLineDirect(prompt: String, mask: Option[Char]): Option[String] =
if (handleCONT)
Signals.withHandler(() => resume(), signal = Signals.CONT)(() =>
readLineDirectRaw(prompt, mask))
else
readLineDirectRaw(prompt, mask)
private[this] def readLineDirectRaw(prompt: String, mask: Option[Char]): Option[String] = {
val newprompt = handleMultilinePrompt(prompt)
try {
mask match {
case Some(m) => Option(reader.readLine(newprompt, m))
case None => Option(reader.readLine(newprompt))
}
} catch {
case e: InterruptedException => Option("")
}
}
private[this] def handleMultilinePrompt(prompt: String): String = {
val lines = """\r?\n""".r.split(prompt)
lines.length match {
case 0 | 1 => prompt
case _ =>
// Workaround for regression jline/jline2#205
reader.getOutput.write(lines.init.mkString("\n") + "\n")
lines.last
}
}
private[this] def resume(): Unit = {
jline.TerminalFactory.reset
JLine.terminal.init
reader.drawLine()
reader.flush()
}
}
private[sbt] object JLine {
private[this] val TerminalProperty = "jline.terminal"
fixTerminalProperty()
// translate explicit class names to type in order to support
// older Scala, since it shaded classes but not the system property
private[sbt] def fixTerminalProperty(): Unit = {
val newValue = System.getProperty(TerminalProperty) match {
case "jline.UnixTerminal" => "unix"
case null if System.getProperty("sbt.cygwin") != null => "unix"
case "jline.WindowsTerminal" => "windows"
case "jline.AnsiWindowsTerminal" => "windows"
case "jline.UnsupportedTerminal" => "none"
case x => x
}
if (newValue != null) System.setProperty(TerminalProperty, newValue)
()
}
protected[this] val originalIn = new FileInputStream(FileDescriptor.in)
private[sbt] def makeInputStream(injectThreadSleep: Boolean): InputStream =
if (injectThreadSleep) new InputStreamWrapper(originalIn, Duration("50 ms"))
else originalIn
// When calling this, ensure that enableEcho has been or will be called.
// TerminalFactory.get will initialize the terminal to disable echo.
private def terminal = jline.TerminalFactory.get
private def withTerminal[T](f: jline.Terminal => T): T =
synchronized {
val t = terminal
t.synchronized { f(t) }
}
/**
* For accessing the JLine Terminal object.
* This ensures synchronized access as well as re-enabling echo after getting the Terminal.
*/
def usingTerminal[T](f: jline.Terminal => T): T =
withTerminal { t =>
t.restore
f(t)
}
def createReader(): ConsoleReader = createReader(None, JLine.makeInputStream(true))
def createReader(historyPath: Option[File], in: InputStream): ConsoleReader =
usingTerminal { t =>
val cr = new ConsoleReader(in, System.out)
cr.setExpandEvents(false) // https://issues.scala-lang.org/browse/SI-7650
cr.setBellEnabled(false)
val h = historyPath match {
case None => new MemoryHistory
case Some(file) => new FileHistory(file)
}
h.setMaxSize(MaxHistorySize)
cr.setHistory(h)
cr
}
def withJLine[T](action: => T): T =
withTerminal { t =>
t.init
try { action } finally { t.restore }
}
def simple(
historyPath: Option[File],
handleCONT: Boolean = HandleCONT,
injectThreadSleep: Boolean = false
): SimpleReader = new SimpleReader(historyPath, handleCONT, injectThreadSleep)
val MaxHistorySize = 500
val HandleCONT =
!java.lang.Boolean.getBoolean("sbt.disable.cont") && Signals.supported(Signals.CONT)
}
private[sbt] class InputStreamWrapper(is: InputStream, val poll: Duration)
extends FilterInputStream(is) {
@tailrec final override def read(): Int =
if (is.available() != 0) is.read()
else {
Thread.sleep(poll.toMillis)
read()
}
@tailrec final override def read(b: Array[Byte]): Int =
if (is.available() != 0) is.read(b)
else {
Thread.sleep(poll.toMillis)
read(b)
}
@tailrec final override def read(b: Array[Byte], off: Int, len: Int): Int =
if (is.available() != 0) is.read(b, off, len)
else {
Thread.sleep(poll.toMillis)
read(b, off, len)
}
}
trait LineReader {
def readLine(prompt: String, mask: Option[Char] = None): Option[String]
}
final class FullReader(
historyPath: Option[File],
complete: Parser[_],
val handleCONT: Boolean = JLine.HandleCONT,
val injectThreadSleep: Boolean = false
) extends JLine {
protected[this] val reader = {
val cr = JLine.createReader(historyPath, in)
sbt.internal.util.complete.JLineCompletion.installCustomCompletor(cr, complete)
cr
}
}
class SimpleReader private[sbt] (
historyPath: Option[File],
val handleCONT: Boolean,
val injectThreadSleep: Boolean
) extends JLine {
protected[this] val reader = JLine.createReader(historyPath, in)
}
object SimpleReader extends SimpleReader(None, JLine.HandleCONT, false)

View File

@ -0,0 +1,162 @@
/* sbt -- Simple Build Tool
* Copyright 2010 Mark Harrah
*/
package sbt.internal.util
package complete
/**
* Represents a set of completions.
* It exists instead of implicitly defined operations on top of Set[Completion]
* for laziness.
*/
sealed trait Completions {
def get: Set[Completion]
final def x(o: Completions): Completions = flatMap(_ x o)
final def ++(o: Completions): Completions = Completions(get ++ o.get)
final def +:(o: Completion): Completions = Completions(get + o)
final def filter(f: Completion => Boolean): Completions = Completions(get filter f)
final def filterS(f: String => Boolean): Completions = filter(c => f(c.append))
override def toString = get.mkString("Completions(", ",", ")")
final def flatMap(f: Completion => Completions): Completions =
Completions(get.flatMap(c => f(c).get))
final def map(f: Completion => Completion): Completions = Completions(get map f)
override final def hashCode = get.hashCode
override final def equals(o: Any) = o match {
case c: Completions => get == c.get; case _ => false
}
}
object Completions {
/** Returns a lazy Completions instance using the provided Completion Set. */
def apply(cs: => Set[Completion]): Completions = new Completions {
lazy val get = cs
}
/** Returns a strict Completions instance using the provided Completion Set. */
def strict(cs: Set[Completion]): Completions = apply(cs)
/**
* No suggested completions, not even the empty Completion.
* This typically represents invalid input.
*/
val nil: Completions = strict(Set.empty)
/**
* Only includes an empty Suggestion.
* This typically represents valid input that either has no completions or accepts no further input.
*/
val empty: Completions = strict(Set.empty + Completion.empty)
/** Returns a strict Completions instance containing only the provided Completion.*/
def single(c: Completion): Completions = strict(Set.empty + c)
}
/**
* Represents a completion.
* The abstract members `display` and `append` are best explained with an example.
*
* Assuming space-delimited tokens, processing this:
* am is are w<TAB>
* could produce these Completions:
* Completion { display = "was"; append = "as" }
* Completion { display = "were"; append = "ere" }
* to suggest the tokens "was" and "were".
*
* In this way, two pieces of information are preserved:
* 1) what needs to be appended to the current input if a completion is selected
* 2) the full token being completed, which is useful for presenting a user with choices to select
*/
sealed trait Completion {
/** The proposed suffix to append to the existing input to complete the last token in the input.*/
def append: String
/** The string to present to the user to represent the full token being suggested.*/
def display: String
/** True if this Completion is suggesting the empty string.*/
def isEmpty: Boolean
/** Appends the completions in `o` with the completions in this Completion.*/
def ++(o: Completion): Completion = Completion.concat(this, o)
final def x(o: Completions): Completions =
if (Completion evaluatesRight this) o.map(this ++ _) else Completions.strict(Set.empty + this)
override final lazy val hashCode = Completion.hashCode(this)
override final def equals(o: Any) = o match {
case c: Completion => Completion.equal(this, c); case _ => false
}
}
final class DisplayOnly(val display: String) extends Completion {
def isEmpty = display.isEmpty
def append = ""
override def toString = "{" + display + "}"
}
final class Token(val display: String, val append: String) extends Completion {
def isEmpty = display.isEmpty && append.isEmpty
override final def toString = "[" + display + "]++" + append
}
final class Suggestion(val append: String) extends Completion {
def isEmpty = append.isEmpty
def display = append
override def toString = append
}
object Completion {
def concat(a: Completion, b: Completion): Completion =
(a, b) match {
case (as: Suggestion, bs: Suggestion) => suggestion(as.append + bs.append)
case (at: Token, _) if at.append.isEmpty => b
case _ if a.isEmpty => b
case _ => a
}
def evaluatesRight(a: Completion): Boolean =
a match {
case _: Suggestion => true
case at: Token if at.append.isEmpty => true
case _ => a.isEmpty
}
def equal(a: Completion, b: Completion): Boolean =
(a, b) match {
case (as: Suggestion, bs: Suggestion) => as.append == bs.append
case (ad: DisplayOnly, bd: DisplayOnly) => ad.display == bd.display
case (at: Token, bt: Token) => at.display == bt.display && at.append == bt.append
case _ => false
}
def hashCode(a: Completion): Int =
a match {
case as: Suggestion => (0, as.append).hashCode
case ad: DisplayOnly => (1, ad.display).hashCode
case at: Token => (2, at.display, at.append).hashCode
}
val empty: Completion = suggestion("")
def single(c: Char): Completion = suggestion(c.toString)
// TODO: make strict in 0.13.0 to match DisplayOnly
def displayOnly(value: => String): Completion = new DisplayOnly(value)
// TODO: make strict in 0.13.0 to match Token
def token(prepend: => String, append: => String): Completion =
new Token(prepend + append, append)
/** @since 0.12.1 */
def tokenDisplay(append: String, display: String): Completion = new Token(display, append)
// TODO: make strict in 0.13.0 to match Suggestion
def suggestion(value: => String): Completion = new Suggestion(value)
}

View File

@ -0,0 +1,52 @@
package sbt.internal.util
package complete
import java.lang.Character.{ toLowerCase => lower }
/** @author Paul Phillips*/
object EditDistance {
/**
* Translated from the java version at
* http://www.merriampark.com/ld.htm
* which is declared to be public domain.
*/
def levenshtein(
s: String,
t: String,
insertCost: Int = 1,
deleteCost: Int = 1,
subCost: Int = 1,
transposeCost: Int = 1,
matchCost: Int = 0,
caseCost: Int = 1,
transpositions: Boolean = false
): Int = {
val n = s.length
val m = t.length
if (n == 0) return m
if (m == 0) return n
val d = Array.ofDim[Int](n + 1, m + 1)
0 to n foreach (x => d(x)(0) = x)
0 to m foreach (x => d(0)(x) = x)
for (i <- 1 to n; s_i = s(i - 1); j <- 1 to m) {
val t_j = t(j - 1)
val cost = if (s_i == t_j) matchCost else if (lower(s_i) == lower(t_j)) caseCost else subCost
val c1 = d(i - 1)(j) + deleteCost
val c2 = d(i)(j - 1) + insertCost
val c3 = d(i - 1)(j - 1) + cost
d(i)(j) = c1 min c2 min c3
if (transpositions) {
if (i > 1 && j > 1 && s(i - 1) == t(j - 2) && s(i - 2) == t(j - 1))
d(i)(j) = d(i)(j) min (d(i - 2)(j - 2) + cost)
}
}
d(n)(m)
}
}

View File

@ -0,0 +1,68 @@
package sbt.internal.util
package complete
import java.io.File
import sbt.io.IO
/**
* These sources of examples are used in parsers for user input completion. An example of such a source is the
* [[sbt.complete.FileExamples]] class, which provides a list of suggested files to the user as they press the
* TAB key in the console.
*/
trait ExampleSource {
/**
* @return a (possibly lazy) list of completion example strings. These strings are continuations of user's input. The
* user's input is incremented with calls to [[withAddedPrefix]].
*/
def apply(): Iterable[String]
/**
* @param addedPrefix a string that just typed in by the user.
* @return a new source of only those examples that start with the string typed by the user so far (with addition of
* the just added prefix).
*/
def withAddedPrefix(addedPrefix: String): ExampleSource
}
/**
* A convenience example source that wraps any collection of strings into a source of examples.
* @param examples the examples that will be displayed to the user when they press the TAB key.
*/
sealed case class FixedSetExamples(examples: Iterable[String]) extends ExampleSource {
override def withAddedPrefix(addedPrefix: String): ExampleSource =
FixedSetExamples(examplesWithRemovedPrefix(addedPrefix))
override def apply(): Iterable[String] = examples
private def examplesWithRemovedPrefix(prefix: String) = examples.collect {
case example if example startsWith prefix => example substring prefix.length
}
}
/**
* Provides path completion examples based on files in the base directory.
* @param base the directory within which this class will search for completion examples.
* @param prefix the part of the path already written by the user.
*/
class FileExamples(base: File, prefix: String = "") extends ExampleSource {
override def apply(): Stream[String] = files(base).map(_ substring prefix.length)
override def withAddedPrefix(addedPrefix: String): FileExamples =
new FileExamples(base, prefix + addedPrefix)
protected def files(directory: File): Stream[String] = {
val childPaths = IO.listFiles(directory).toStream
val prefixedDirectChildPaths = childPaths map { IO.relativize(base, _).get } filter {
_ startsWith prefix
}
val dirsToRecurseInto = childPaths filter { _.isDirectory } map { IO.relativize(base, _).get } filter {
dirStartsWithPrefix
}
prefixedDirectChildPaths append dirsToRecurseInto.flatMap(dir => files(new File(base, dir)))
}
private def dirStartsWithPrefix(relativizedPath: String): Boolean =
(relativizedPath startsWith prefix) || (prefix startsWith relativizedPath)
}

View File

@ -0,0 +1,56 @@
/* sbt -- Simple Build Tool
* Copyright 2010 Mark Harrah
*/
package sbt.internal.util
package complete
import History.number
import java.io.File
final class History private (
val lines: IndexedSeq[String],
val path: Option[File],
error: String => Unit
) {
private def reversed = lines.reverse
def all: Seq[String] = lines
def size = lines.length
def !! : Option[String] = !-(1)
def apply(i: Int): Option[String] =
if (0 <= i && i < size) Some(lines(i)) else { sys.error("Invalid history index: " + i) }
def !(i: Int): Option[String] = apply(i)
def !(s: String): Option[String] =
number(s) match {
case Some(n) => if (n < 0) !-(-n) else apply(n)
case None => nonEmpty(s) { reversed.find(_.startsWith(s)) }
}
def !-(n: Int): Option[String] = apply(size - n - 1)
def !?(s: String): Option[String] = nonEmpty(s) { reversed.drop(1).find(_.contains(s)) }
private def nonEmpty[T](s: String)(act: => Option[T]): Option[T] =
if (s.isEmpty)
sys.error("No action specified to history command")
else
act
def list(historySize: Int, show: Int): Seq[String] =
lines.toList
.drop(scala.math.max(0, lines.size - historySize))
.zipWithIndex
.map { case (line, number) => " " + number + " " + line }
.takeRight(show max 1)
}
object History {
def apply(lines: Seq[String], path: Option[File], error: String => Unit): History =
new History(lines.toIndexedSeq, path, sys.error)
def number(s: String): Option[Int] =
try { Some(s.toInt) } catch { case e: NumberFormatException => None }
}

View File

@ -0,0 +1,83 @@
/* sbt -- Simple Build Tool
* Copyright 2010 Mark Harrah
*/
package sbt.internal.util
package complete
import sbt.io.IO
object HistoryCommands {
val Start = "!"
// second characters
val Contains = "?"
val Last = "!"
val ListCommands = ":"
def ContainsFull = h(Contains)
def LastFull = h(Last)
def ListFull = h(ListCommands)
def ListN = ListFull + "n"
def ContainsString = ContainsFull + "string"
def StartsWithString = Start + "string"
def Previous = Start + "-n"
def Nth = Start + "n"
private def h(s: String) = Start + s
def plainCommands = Seq(ListFull, Start, LastFull, ContainsFull)
def descriptions = Seq(
LastFull -> "Execute the last command again",
ListFull -> "Show all previous commands",
ListN -> "Show the last n commands",
Nth -> ("Execute the command with index n, as shown by the " + ListFull + " command"),
Previous -> "Execute the nth command before this one",
StartsWithString -> "Execute the most recent command starting with 'string'",
ContainsString -> "Execute the most recent command containing 'string'"
)
def helpString =
"History commands:\n " + (descriptions
.map { case (c, d) => c + " " + d })
.mkString("\n ")
def printHelp(): Unit = println(helpString)
def printHistory(history: complete.History, historySize: Int, show: Int): Unit =
history.list(historySize, show).foreach(println)
import DefaultParsers._
val MaxLines = 500
lazy val num = token(NatBasic, "<integer>")
lazy val last = Last ^^^ { execute(_.!!) }
lazy val list = ListCommands ~> (num ?? Int.MaxValue) map { show => (h: History) =>
{ printHistory(h, MaxLines, show); Some(Nil) }
}
lazy val execStr = flag('?') ~ token(any.+.string, "<string>") map {
case (contains, str) =>
execute(h => if (contains) h !? str else h ! str)
}
lazy val execInt = flag('-') ~ num map {
case (neg, value) =>
execute(h => if (neg) h !- value else h ! value)
}
lazy val help = success((h: History) => { printHelp(); Some(Nil) })
def execute(f: History => Option[String]): History => Option[List[String]] = (h: History) => {
val command = f(h).filterNot(_.startsWith(Start))
val lines = h.lines.toArray
command.foreach(lines(lines.length - 1) = _)
h.path foreach { h =>
IO.writeLines(h, lines)
}
Some(command.toList)
}
val actionParser: Parser[complete.History => Option[List[String]]] =
Start ~> (help | last | execInt | list | execStr) // execStr must come last
}

View File

@ -0,0 +1,178 @@
/* sbt -- Simple Build Tool
* Copyright 2011 Mark Harrah
*/
package sbt.internal.util
package complete
import jline.console.ConsoleReader
import jline.console.completer.{ Completer, CompletionHandler }
import scala.annotation.tailrec
import collection.JavaConversions
object JLineCompletion {
def installCustomCompletor(reader: ConsoleReader, parser: Parser[_]): Unit =
installCustomCompletor(reader)(parserAsCompletor(parser))
def installCustomCompletor(reader: ConsoleReader)(
complete: (String, Int) => (Seq[String], Seq[String])
): Unit =
installCustomCompletor(customCompletor(complete), reader)
def installCustomCompletor(
complete: (ConsoleReader, Int) => Boolean,
reader: ConsoleReader
): Unit = {
reader.removeCompleter(DummyCompletor)
reader.addCompleter(DummyCompletor)
reader.setCompletionHandler(new CustomHandler(complete))
}
private[this] final class CustomHandler(completeImpl: (ConsoleReader, Int) => Boolean)
extends CompletionHandler {
private[this] var previous: Option[(String, Int)] = None
private[this] var level: Int = 1
override def complete(
reader: ConsoleReader,
candidates: java.util.List[CharSequence],
position: Int
) = {
val current = Some(bufferSnapshot(reader))
level = if (current == previous) level + 1 else 1
previous = current
try completeImpl(reader, level)
catch {
case e: Exception =>
reader.print("\nException occurred while determining completions.")
e.printStackTrace()
false
}
}
}
// always provides dummy completions so that the custom completion handler gets called
// (ConsoleReader doesn't call the handler if there aren't any completions)
// the custom handler will then throw away the candidates and call the custom function
private[this] final object DummyCompletor extends Completer {
override def complete(
buffer: String,
cursor: Int,
candidates: java.util.List[CharSequence]
): Int = {
candidates.asInstanceOf[java.util.List[String]] add "dummy"
0
}
}
def parserAsCompletor(p: Parser[_]): (String, Int) => (Seq[String], Seq[String]) =
(str, level) => convertCompletions(Parser.completions(p, str, level))
def convertCompletions(c: Completions): (Seq[String], Seq[String]) = {
val cs = c.get
if (cs.isEmpty)
(Nil, "{invalid input}" :: Nil)
else
convertCompletions(cs)
}
def convertCompletions(cs: Set[Completion]): (Seq[String], Seq[String]) = {
val (insert, display) =
((Set.empty[String], Set.empty[String]) /: cs) {
case (t @ (insert, display), comp) =>
if (comp.isEmpty) t else (insert + comp.append, appendNonEmpty(display, comp.display))
}
(insert.toSeq, display.toSeq.sorted)
}
def appendNonEmpty(set: Set[String], add: String) = if (add.trim.isEmpty) set else set + add
def customCompletor(
f: (String, Int) => (Seq[String], Seq[String])): (ConsoleReader, Int) => Boolean =
(reader, level) => {
val success = complete(beforeCursor(reader), reader => f(reader, level), reader)
reader.flush()
success
}
def bufferSnapshot(reader: ConsoleReader): (String, Int) = {
val b = reader.getCursorBuffer
(b.buffer.toString, b.cursor)
}
def beforeCursor(reader: ConsoleReader): String = {
val b = reader.getCursorBuffer
b.buffer.substring(0, b.cursor)
}
// returns false if there was nothing to insert and nothing to display
def complete(
beforeCursor: String,
completions: String => (Seq[String], Seq[String]),
reader: ConsoleReader
): Boolean = {
val (insert, display) = completions(beforeCursor)
val common = commonPrefix(insert)
if (common.isEmpty)
if (display.isEmpty)
()
else
showCompletions(display, reader)
else
appendCompletion(common, reader)
!(common.isEmpty && display.isEmpty)
}
def appendCompletion(common: String, reader: ConsoleReader): Unit = {
reader.getCursorBuffer.write(common)
reader.redrawLine()
}
/**
* `display` is assumed to be the exact strings requested to be displayed.
* In particular, duplicates should have been removed already.
*/
def showCompletions(display: Seq[String], reader: ConsoleReader): Unit = {
printCompletions(display, reader)
reader.drawLine()
}
def printCompletions(cs: Seq[String], reader: ConsoleReader): Unit = {
val print = shouldPrint(cs, reader)
reader.println()
if (print) printLinesAndColumns(cs, reader)
}
def printLinesAndColumns(cs: Seq[String], reader: ConsoleReader): Unit = {
val (lines, columns) = cs partition hasNewline
for (line <- lines) {
reader.print(line)
if (line.charAt(line.length - 1) != '\n')
reader.println()
}
reader.printColumns(JavaConversions.seqAsJavaList(columns.map(_.trim)))
}
def hasNewline(s: String): Boolean = s.indexOf('\n') >= 0
def shouldPrint(cs: Seq[String], reader: ConsoleReader): Boolean = {
val size = cs.size
(size <= reader.getAutoprintThreshold) ||
confirm("Display all %d possibilities? (y or n) ".format(size), 'y', 'n', reader)
}
def confirm(prompt: String, trueC: Char, falseC: Char, reader: ConsoleReader): Boolean = {
reader.println()
reader.print(prompt)
reader.flush()
reader.readCharacter(trueC, falseC) == trueC
}
def commonPrefix(s: Seq[String]): String = if (s.isEmpty) "" else s reduceLeft commonPrefix
def commonPrefix(a: String, b: String): String = {
val len = scala.math.min(a.length, b.length)
@tailrec def loop(i: Int): Int = if (i >= len) len else if (a(i) != b(i)) i else loop(i + 1)
a.substring(0, loop(0))
}
}

View File

@ -0,0 +1,955 @@
/* sbt -- Simple Build Tool
* Copyright 2008, 2010, 2011 Mark Harrah
*/
package sbt.internal.util
package complete
import Parser._
import sbt.internal.util.Types.{ left, right, some }
import sbt.internal.util.Util.{ makeList, separate }
/**
* A String parser that provides semi-automatic tab completion.
* A successful parse results in a value of type `T`.
* The methods in this trait are what must be implemented to define a new Parser implementation, but are not typically useful for common usage.
* Instead, most useful methods for combining smaller parsers into larger parsers are implicitly added by the [[RichParser]] type.
*/
sealed trait Parser[+T] {
def derive(i: Char): Parser[T]
def resultEmpty: Result[T]
def result: Option[T]
def completions(level: Int): Completions
def failure: Option[Failure]
def isTokenStart = false
def ifValid[S](p: => Parser[S]): Parser[S]
def valid: Boolean
}
sealed trait RichParser[A] {
/** Apply the original Parser and then apply `next` (in order). The result of both is provides as a pair. */
def ~[B](next: Parser[B]): Parser[(A, B)]
/** Apply the original Parser one or more times and provide the non-empty sequence of results.*/
def + : Parser[Seq[A]]
/** Apply the original Parser zero or more times and provide the (potentially empty) sequence of results.*/
def * : Parser[Seq[A]]
/** Apply the original Parser zero or one times, returning None if it was applied zero times or the result wrapped in Some if it was applied once.*/
def ? : Parser[Option[A]]
/** Apply either the original Parser or `b`.*/
def |[B >: A](b: Parser[B]): Parser[B]
/** Apply either the original Parser or `b`.*/
def ||[B](b: Parser[B]): Parser[Either[A, B]]
/** Apply the original Parser to the input and then apply `f` to the result.*/
def map[B](f: A => B): Parser[B]
/**
* Returns the original parser. This is useful for converting literals to Parsers.
* For example, `'c'.id` or `"asdf".id`
*/
def id: Parser[A]
/** Apply the original Parser, but provide `value` as the result if it succeeds. */
def ^^^[B](value: B): Parser[B]
/** Apply the original Parser, but provide `alt` as the result if it fails.*/
def ??[B >: A](alt: B): Parser[B]
/**
* Produces a Parser that applies the original Parser and then applies `next` (in order), discarding the result of `next`.
* (The arrow point in the direction of the retained result.)
*/
def <~[B](b: Parser[B]): Parser[A]
/**
* Produces a Parser that applies the original Parser and then applies `next` (in order), discarding the result of the original parser.
* (The arrow point in the direction of the retained result.)
*/
def ~>[B](b: Parser[B]): Parser[B]
/** Uses the specified message if the original Parser fails.*/
def !!!(msg: String): Parser[A]
/**
* If an exception is thrown by the original Parser,
* capture it and fail locally instead of allowing the exception to propagate up and terminate parsing.
*/
def failOnException: Parser[A]
/**
* Apply the original parser, but only succeed if `o` also succeeds.
* Note that `o` does not need to consume the same amount of input to satisfy this condition.
*/
def &(o: Parser[_]): Parser[A]
/** Explicitly defines the completions for the original Parser.*/
def examples(s: String*): Parser[A]
/** Explicitly defines the completions for the original Parser.*/
def examples(s: Set[String], check: Boolean = false): Parser[A]
/**
* @param exampleSource the source of examples when displaying completions to the user.
* @param maxNumberOfExamples limits the number of examples that the source of examples should return. This can
* prevent lengthy pauses and avoids bad interactive user experience.
* @param removeInvalidExamples indicates whether completion examples should be checked for validity (against the
* given parser). Invalid examples will be filtered out and only valid suggestions will
* be displayed.
* @return a new parser with a new source of completions.
*/
def examples(
exampleSource: ExampleSource,
maxNumberOfExamples: Int,
removeInvalidExamples: Boolean
): Parser[A]
/**
* @param exampleSource the source of examples when displaying completions to the user.
* @return a new parser with a new source of completions. It displays at most 25 completion examples and does not
* remove invalid examples.
*/
def examples(exampleSource: ExampleSource): Parser[A] =
examples(exampleSource, maxNumberOfExamples = 25, removeInvalidExamples = false)
/** Converts a Parser returning a Char sequence to a Parser returning a String.*/
def string(implicit ev: A <:< Seq[Char]): Parser[String]
/**
* Produces a Parser that filters the original parser.
* If 'f' is not true when applied to the output of the original parser, the Parser returned by this method fails.
* The failure message is constructed by applying `msg` to the String that was successfully parsed by the original parser.
*/
def filter(f: A => Boolean, msg: String => String): Parser[A]
/** Applies the original parser, applies `f` to the result to get the next parser, and applies that parser and uses its result for the overall result. */
def flatMap[B](f: A => Parser[B]): Parser[B]
}
/** Contains Parser implementation helper methods not typically needed for using parsers. */
object Parser extends ParserMain {
sealed abstract class Result[+T] {
def isFailure: Boolean
def isValid: Boolean
def errors: Seq[String]
def or[B >: T](b: => Result[B]): Result[B]
def either[B](b: => Result[B]): Result[Either[T, B]]
def map[B](f: T => B): Result[B]
def flatMap[B](f: T => Result[B]): Result[B]
def &&(b: => Result[_]): Result[T]
def filter(f: T => Boolean, msg: => String): Result[T]
def seq[B](b: => Result[B]): Result[(T, B)] = app(b)((m, n) => (m, n))
def app[B, C](b: => Result[B])(f: (T, B) => C): Result[C]
def toEither: Either[() => Seq[String], T]
}
final case class Value[+T](value: T) extends Result[T] {
def isFailure = false
def isValid: Boolean = true
def errors = Nil
def app[B, C](b: => Result[B])(f: (T, B) => C): Result[C] = b match {
case fail: Failure => fail
case Value(bv) => Value(f(value, bv))
}
def &&(b: => Result[_]): Result[T] = b match { case f: Failure => f; case _ => this }
def or[B >: T](b: => Result[B]): Result[B] = this
def either[B](b: => Result[B]): Result[Either[T, B]] = Value(Left(value))
def map[B](f: T => B): Result[B] = Value(f(value))
def flatMap[B](f: T => Result[B]): Result[B] = f(value)
def filter(f: T => Boolean, msg: => String): Result[T] = if (f(value)) this else mkFailure(msg)
def toEither = Right(value)
}
final class Failure private[sbt] (mkErrors: => Seq[String], val definitive: Boolean)
extends Result[Nothing] {
lazy val errors: Seq[String] = mkErrors
def isFailure = true
def isValid = false
def map[B](f: Nothing => B) = this
def flatMap[B](f: Nothing => Result[B]) = this
def or[B](b: => Result[B]): Result[B] = b match {
case v: Value[B] => v
case f: Failure => if (definitive) this else this ++ f
}
def either[B](b: => Result[B]): Result[Either[Nothing, B]] = b match {
case Value(v) => Value(Right(v))
case f: Failure => if (definitive) this else this ++ f
}
def filter(f: Nothing => Boolean, msg: => String) = this
def app[B, C](b: => Result[B])(f: (Nothing, B) => C): Result[C] = this
def &&(b: => Result[_]) = this
def toEither = Left(() => errors)
private[sbt] def ++(f: Failure) = mkFailures(errors ++ f.errors)
}
def mkFailures(errors: => Seq[String], definitive: Boolean = false): Failure =
new Failure(errors.distinct, definitive)
def mkFailure(error: => String, definitive: Boolean = false): Failure =
new Failure(error :: Nil, definitive)
def tuple[A, B](a: Option[A], b: Option[B]): Option[(A, B)] =
(a, b) match { case (Some(av), Some(bv)) => Some((av, bv)); case _ => None }
def mapParser[A, B](a: Parser[A], f: A => B): Parser[B] =
a.ifValid {
a.result match {
case Some(av) => success(f(av))
case None => new MapParser(a, f)
}
}
def bindParser[A, B](a: Parser[A], f: A => Parser[B]): Parser[B] =
a.ifValid {
a.result match {
case Some(av) => f(av)
case None => new BindParser(a, f)
}
}
def filterParser[T](
a: Parser[T],
f: T => Boolean,
seen: String,
msg: String => String
): Parser[T] =
a.ifValid {
a.result match {
case Some(av) if f(av) => success(av)
case _ => new Filter(a, f, seen, msg)
}
}
def seqParser[A, B](a: Parser[A], b: Parser[B]): Parser[(A, B)] =
a.ifValid {
b.ifValid {
(a.result, b.result) match {
case (Some(av), Some(bv)) => success((av, bv))
case (Some(av), None) => b map (bv => (av, bv))
case (None, Some(bv)) => a map (av => (av, bv))
case (None, None) => new SeqParser(a, b)
}
}
}
def choiceParser[A, B](a: Parser[A], b: Parser[B]): Parser[Either[A, B]] =
if (a.valid)
if (b.valid) new HetParser(a, b) else a.map(left.fn)
else
b.map(right.fn)
def opt[T](a: Parser[T]): Parser[Option[T]] =
if (a.valid) new Optional(a) else success(None)
def onFailure[T](delegate: Parser[T], msg: String): Parser[T] =
if (delegate.valid) new OnFailure(delegate, msg) else failure(msg)
def trapAndFail[T](delegate: Parser[T]): Parser[T] =
delegate.ifValid(new TrapAndFail(delegate))
def zeroOrMore[T](p: Parser[T]): Parser[Seq[T]] = repeat(p, 0, Infinite)
def oneOrMore[T](p: Parser[T]): Parser[Seq[T]] = repeat(p, 1, Infinite)
def repeat[T](p: Parser[T], min: Int = 0, max: UpperBound = Infinite): Parser[Seq[T]] =
repeat(None, p, min, max, Nil)
private[complete] def repeat[T](
partial: Option[Parser[T]],
repeated: Parser[T],
min: Int,
max: UpperBound,
revAcc: List[T]
): Parser[Seq[T]] = {
assume(min >= 0, "Minimum must be greater than or equal to zero (was " + min + ")")
assume(max >= min,
"Minimum must be less than or equal to maximum (min: " + min + ", max: " + max + ")")
def checkRepeated(invalidButOptional: => Parser[Seq[T]]): Parser[Seq[T]] =
repeated match {
case i: Invalid if min == 0 => invalidButOptional
case i: Invalid => i
case _ =>
repeated.result match {
case Some(value) =>
success(revAcc reverse_::: value :: Nil) // revAcc should be Nil here
case None =>
if (max.isZero) success(revAcc.reverse)
else new Repeat(partial, repeated, min, max, revAcc)
}
}
partial match {
case Some(part) =>
part.ifValid {
part.result match {
case Some(value) => repeat(None, repeated, min, max, value :: revAcc)
case None => checkRepeated(part.map(lv => (lv :: revAcc).reverse))
}
}
case None => checkRepeated(success(Nil))
}
}
def and[T](a: Parser[T], b: Parser[_]): Parser[T] = a.ifValid(b.ifValid(new And(a, b)))
}
trait ParserMain {
/** Provides combinators for Parsers.*/
implicit def richParser[A](a: Parser[A]): RichParser[A] = new RichParser[A] {
def ~[B](b: Parser[B]) = seqParser(a, b)
def ||[B](b: Parser[B]) = choiceParser(a, b)
def |[B >: A](b: Parser[B]) = homParser(a, b)
def ? = opt(a)
def * = zeroOrMore(a)
def + = oneOrMore(a)
def map[B](f: A => B) = mapParser(a, f)
def id = a
def ^^^[B](value: B): Parser[B] = a map (_ => value)
def ??[B >: A](alt: B): Parser[B] = a.? map { _ getOrElse alt }
def <~[B](b: Parser[B]): Parser[A] = (a ~ b) map { case av ~ _ => av }
def ~>[B](b: Parser[B]): Parser[B] = (a ~ b) map { case _ ~ bv => bv }
def !!!(msg: String): Parser[A] = onFailure(a, msg)
def failOnException: Parser[A] = trapAndFail(a)
def unary_- = not(a, "Unexpected: " + a)
def &(o: Parser[_]) = and(a, o)
def -(o: Parser[_]) = and(a, not(o, "Unexpected: " + o))
def examples(s: String*): Parser[A] = examples(s.toSet)
def examples(s: Set[String], check: Boolean = false): Parser[A] =
examples(new FixedSetExamples(s), s.size, check)
def examples(
s: ExampleSource,
maxNumberOfExamples: Int,
removeInvalidExamples: Boolean
): Parser[A] =
Parser.examples(a, s, maxNumberOfExamples, removeInvalidExamples)
def filter(f: A => Boolean, msg: String => String): Parser[A] = filterParser(a, f, "", msg)
def string(implicit ev: A <:< Seq[Char]): Parser[String] = map(_.mkString)
def flatMap[B](f: A => Parser[B]) = bindParser(a, f)
}
implicit def literalRichCharParser(c: Char): RichParser[Char] = richParser(c)
implicit def literalRichStringParser(s: String): RichParser[String] = richParser(s)
/**
* Construct a parser that is valid, but has no valid result. This is used as a way
* to provide a definitive Failure when a parser doesn't match empty input. For example,
* in `softFailure(...) | p`, if `p` doesn't match the empty sequence, the failure will come
* from the Parser constructed by the `softFailure` method.
*/
private[sbt] def softFailure(msg: => String, definitive: Boolean = false): Parser[Nothing] =
SoftInvalid(mkFailures(msg :: Nil, definitive))
/**
* Defines a parser that always fails on any input with messages `msgs`.
* If `definitive` is `true`, any failures by later alternatives are discarded.
*/
def invalid(msgs: => Seq[String], definitive: Boolean = false): Parser[Nothing] =
Invalid(mkFailures(msgs, definitive))
/**
* Defines a parser that always fails on any input with message `msg`.
* If `definitive` is `true`, any failures by later alternatives are discarded.
*/
def failure(msg: => String, definitive: Boolean = false): Parser[Nothing] =
invalid(msg :: Nil, definitive)
/** Defines a parser that always succeeds on empty input with the result `value`.*/
def success[T](value: T): Parser[T] = new ValidParser[T] {
override def result = Some(value)
def resultEmpty = Value(value)
def derive(c: Char) = Parser.failure("Expected end of input.")
def completions(level: Int) = Completions.empty
override def toString = "success(" + value + ")"
}
/** Presents a Char range as a Parser. A single Char is parsed only if it is in the given range.*/
implicit def range(r: collection.immutable.NumericRange[Char]): Parser[Char] =
charClass(r contains _).examples(r.map(_.toString): _*)
/** Defines a Parser that parses a single character only if it is contained in `legal`.*/
def chars(legal: String): Parser[Char] = {
val set = legal.toSet
charClass(set, "character in '" + legal + "'") examples (set.map(_.toString))
}
/**
* Defines a Parser that parses a single character only if the predicate `f` returns true for that character.
* If this parser fails, `label` is used as the failure message.
*/
def charClass(f: Char => Boolean, label: String = "<unspecified>"): Parser[Char] =
new CharacterClass(f, label)
/** Presents a single Char `ch` as a Parser that only parses that exact character. */
implicit def literal(ch: Char): Parser[Char] = new ValidParser[Char] {
def result = None
def resultEmpty = mkFailure("Expected '" + ch + "'")
def derive(c: Char) = if (c == ch) success(ch) else new Invalid(resultEmpty)
def completions(level: Int) = Completions.single(Completion.suggestion(ch.toString))
override def toString = "'" + ch + "'"
}
/** Presents a literal String `s` as a Parser that only parses that exact text and provides it as the result.*/
implicit def literal(s: String): Parser[String] = stringLiteral(s, 0)
/** See [[unapply]]. */
object ~ {
/** Convenience for destructuring a tuple that mirrors the `~` combinator.*/
def unapply[A, B](t: (A, B)): Some[(A, B)] = Some(t)
}
/** Parses input `str` using `parser`. If successful, the result is provided wrapped in `Right`. If unsuccessful, an error message is provided in `Left`.*/
def parse[T](str: String, parser: Parser[T]): Either[String, T] =
Parser.result(parser, str).left.map { failures =>
val (msgs, pos) = failures()
ProcessError(str, msgs, pos)
}
/**
* Convenience method to use when developing a parser.
* `parser` is applied to the input `str`.
* If `completions` is true, the available completions for the input are displayed.
* Otherwise, the result of parsing is printed using the result's `toString` method.
* If parsing fails, the error message is displayed.
*
* See also [[sampleParse]] and [[sampleCompletions]].
*/
def sample(str: String, parser: Parser[_], completions: Boolean = false): Unit =
if (completions) sampleCompletions(str, parser) else sampleParse(str, parser)
/**
* Convenience method to use when developing a parser.
* `parser` is applied to the input `str` and the result of parsing is printed using the result's `toString` method.
* If parsing fails, the error message is displayed.
*/
def sampleParse(str: String, parser: Parser[_]): Unit =
parse(str, parser) match {
case Left(msg) => println(msg)
case Right(v) => println(v)
}
/**
* Convenience method to use when developing a parser.
* `parser` is applied to the input `str` and the available completions are displayed on separate lines.
* If parsing fails, the error message is displayed.
*/
def sampleCompletions(str: String, parser: Parser[_], level: Int = 1): Unit =
Parser.completions(parser, str, level).get foreach println
// intended to be temporary pending proper error feedback
def result[T](p: Parser[T], s: String): Either[() => (Seq[String], Int), T] = {
def loop(i: Int, a: Parser[T]): Either[() => (Seq[String], Int), T] =
a match {
case Invalid(f) => Left(() => (f.errors, i))
case _ =>
val ci = i + 1
if (ci >= s.length)
a.resultEmpty.toEither.left.map { msgs0 => () =>
val msgs = msgs0()
val nonEmpty = if (msgs.isEmpty) "Unexpected end of input" :: Nil else msgs
(nonEmpty, ci)
} else
loop(ci, a derive s(ci))
}
loop(-1, p)
}
/** Applies parser `p` to input `s`. */
def apply[T](p: Parser[T])(s: String): Parser[T] =
(p /: s)(derive1)
/** Applies parser `p` to a single character of input. */
def derive1[T](p: Parser[T], c: Char): Parser[T] =
if (p.valid) p.derive(c) else p
/**
* Applies parser `p` to input `s` and returns the completions at verbosity `level`.
* The interpretation of `level` is up to parser definitions, but 0 is the default by convention,
* with increasing positive numbers corresponding to increasing verbosity. Typically no more than
* a few levels are defined.
*/
def completions(p: Parser[_], s: String, level: Int): Completions =
// The x Completions.empty removes any trailing token completions where append.isEmpty
apply(p)(s).completions(level) x Completions.empty
def examples[A](a: Parser[A], completions: Set[String], check: Boolean = false): Parser[A] =
examples(a, new FixedSetExamples(completions), completions.size, check)
/**
* @param a the parser to decorate with a source of examples. All validation and parsing is delegated to this parser,
* only [[Parser.completions]] is modified.
* @param completions the source of examples when displaying completions to the user.
* @param maxNumberOfExamples limits the number of examples that the source of examples should return. This can
* prevent lengthy pauses and avoids bad interactive user experience.
* @param removeInvalidExamples indicates whether completion examples should be checked for validity (against the given parser). An
* exception is thrown if the example source contains no valid completion suggestions.
* @tparam A the type of values that are returned by the parser.
* @return
*/
def examples[A](
a: Parser[A],
completions: ExampleSource,
maxNumberOfExamples: Int,
removeInvalidExamples: Boolean
): Parser[A] =
if (a.valid) {
a.result match {
case Some(av) => success(av)
case None =>
new ParserWithExamples(a, completions, maxNumberOfExamples, removeInvalidExamples)
}
} else a
def matched(
t: Parser[_],
seen: Vector[Char] = Vector.empty,
partial: Boolean = false
): Parser[String] =
t match {
case i: Invalid => if (partial && seen.nonEmpty) success(seen.mkString) else i
case _ =>
if (t.result.isEmpty)
new MatchedString(t, seen, partial)
else
success(seen.mkString)
}
/**
* Establishes delegate parser `t` as a single token of tab completion.
* When tab completion of part of this token is requested, the completions provided by the delegate `t` or a later derivative are appended to
* the prefix String already seen by this parser.
*/
def token[T](t: Parser[T]): Parser[T] = token(t, TokenCompletions.default)
/**
* Establishes delegate parser `t` as a single token of tab completion.
* When tab completion of part of this token is requested, no completions are returned if `hide` returns true for the current tab completion level.
* Otherwise, the completions provided by the delegate `t` or a later derivative are appended to the prefix String already seen by this parser.
*/
def token[T](t: Parser[T], hide: Int => Boolean): Parser[T] =
token(t, TokenCompletions.default.hideWhen(hide))
/**
* Establishes delegate parser `t` as a single token of tab completion.
* When tab completion of part of this token is requested, `description` is displayed for suggestions and no completions are ever performed.
*/
def token[T](t: Parser[T], description: String): Parser[T] =
token(t, TokenCompletions.displayOnly(description))
/**
* Establishes delegate parser `t` as a single token of tab completion.
* When tab completion of part of this token is requested, `display` is used as the printed suggestion, but the completions from the delegate
* parser `t` are used to complete if unambiguous.
*/
def tokenDisplay[T](t: Parser[T], display: String): Parser[T] =
token(t, TokenCompletions.overrideDisplay(display))
def token[T](t: Parser[T], complete: TokenCompletions): Parser[T] =
mkToken(t, "", complete)
private[sbt] def mkToken[T](t: Parser[T], seen: String, complete: TokenCompletions): Parser[T] =
if (t.valid && !t.isTokenStart)
if (t.result.isEmpty) new TokenStart(t, seen, complete) else t
else
t
def homParser[A](a: Parser[A], b: Parser[A]): Parser[A] = (a, b) match {
case (Invalid(af), Invalid(bf)) => Invalid(af ++ bf)
case (Invalid(_), bv) => bv
case (av, Invalid(_)) => av
case (av, bv) => new HomParser(a, b)
}
def not(p: Parser[_], failMessage: String): Parser[Unit] = p.result match {
case None => new Not(p, failMessage)
case Some(_) => failure(failMessage)
}
def oneOf[T](p: Seq[Parser[T]]): Parser[T] = p.reduceLeft(_ | _)
def seq[T](p: Seq[Parser[T]]): Parser[Seq[T]] = seq0(p, Nil)
def seq0[T](p: Seq[Parser[T]], errors: => Seq[String]): Parser[Seq[T]] = {
val (newErrors, valid) = separate(p) {
case Invalid(f) => Left(f.errors _); case ok => Right(ok)
}
def combinedErrors = errors ++ newErrors.flatMap(_())
if (valid.isEmpty) invalid(combinedErrors) else new ParserSeq(valid, combinedErrors)
}
def stringLiteral(s: String, start: Int): Parser[String] = {
val len = s.length
if (len == 0) sys.error("String literal cannot be empty")
else if (start >= len) success(s)
else new StringLiteral(s, start)
}
}
sealed trait ValidParser[T] extends Parser[T] {
final def valid = true
final def failure = None
final def ifValid[S](p: => Parser[S]): Parser[S] = p
}
private final case class Invalid(fail: Failure) extends Parser[Nothing] {
def failure = Some(fail)
def result = None
def resultEmpty = fail
def derive(c: Char) = sys.error("Invalid.")
def completions(level: Int) = Completions.nil
override def toString = fail.errors.mkString("; ")
def valid = false
def ifValid[S](p: => Parser[S]): Parser[S] = this
}
private final case class SoftInvalid(fail: Failure) extends ValidParser[Nothing] {
def result = None
def resultEmpty = fail
def derive(c: Char) = Invalid(fail)
def completions(level: Int) = Completions.nil
override def toString = fail.errors.mkString("; ")
}
private final class TrapAndFail[A](a: Parser[A]) extends ValidParser[A] {
def result = try { a.result } catch { case e: Exception => None }
def resultEmpty = try { a.resultEmpty } catch { case e: Exception => fail(e) }
def derive(c: Char) = try { trapAndFail(a derive c) } catch {
case e: Exception => Invalid(fail(e))
}
def completions(level: Int) = try { a.completions(level) } catch {
case e: Exception => Completions.nil
}
override def toString = "trap(" + a + ")"
override def isTokenStart = a.isTokenStart
private[this] def fail(e: Exception): Failure = mkFailure(e.toString)
}
private final class OnFailure[A](a: Parser[A], message: String) extends ValidParser[A] {
def result = a.result
def resultEmpty = a.resultEmpty match {
case f: Failure => mkFailure(message); case v: Value[A] => v
}
def derive(c: Char) = onFailure(a derive c, message)
def completions(level: Int) = a.completions(level)
override def toString = "(" + a + " !!! \"" + message + "\" )"
override def isTokenStart = a.isTokenStart
}
private final class SeqParser[A, B](a: Parser[A], b: Parser[B]) extends ValidParser[(A, B)] {
lazy val result = tuple(a.result, b.result)
lazy val resultEmpty = a.resultEmpty seq b.resultEmpty
def derive(c: Char) = {
val common = a.derive(c) ~ b
a.resultEmpty match {
case Value(av) => common | b.derive(c).map(br => (av, br))
case _: Failure => common
}
}
def completions(level: Int) = a.completions(level) x b.completions(level)
override def toString = "(" + a + " ~ " + b + ")"
}
private final class HomParser[A](a: Parser[A], b: Parser[A]) extends ValidParser[A] {
lazy val result = tuple(a.result, b.result) map (_._1)
def derive(c: Char) = (a derive c) | (b derive c)
lazy val resultEmpty = a.resultEmpty or b.resultEmpty
def completions(level: Int) = a.completions(level) ++ b.completions(level)
override def toString = "(" + a + " | " + b + ")"
}
private final class HetParser[A, B](a: Parser[A], b: Parser[B]) extends ValidParser[Either[A, B]] {
lazy val result = tuple(a.result, b.result) map { case (a, b) => Left(a) }
def derive(c: Char) = (a derive c) || (b derive c)
lazy val resultEmpty = a.resultEmpty either b.resultEmpty
def completions(level: Int) = a.completions(level) ++ b.completions(level)
override def toString = "(" + a + " || " + b + ")"
}
private final class ParserSeq[T](a: Seq[Parser[T]], errors: => Seq[String])
extends ValidParser[Seq[T]] {
assert(a.nonEmpty)
lazy val resultEmpty: Result[Seq[T]] = {
val res = a.map(_.resultEmpty)
val (failures, values) = separate(res)(_.toEither)
// if(failures.isEmpty) Value(values) else mkFailures(failures.flatMap(_()) ++ errors)
if (values.nonEmpty) Value(values) else mkFailures(failures.flatMap(_()) ++ errors)
}
def result = {
val success = a.flatMap(_.result)
if (success.length == a.length) Some(success) else None
}
def completions(level: Int) = a.map(_.completions(level)).reduceLeft(_ ++ _)
def derive(c: Char) = seq0(a.map(_ derive c), errors)
override def toString = "seq(" + a + ")"
}
private final class BindParser[A, B](a: Parser[A], f: A => Parser[B]) extends ValidParser[B] {
lazy val result = a.result flatMap (av => f(av).result)
lazy val resultEmpty = a.resultEmpty flatMap (av => f(av).resultEmpty)
def completions(level: Int) =
a.completions(level) flatMap { c =>
apply(a)(c.append).resultEmpty match {
case _: Failure => Completions.strict(Set.empty + c)
case Value(av) => c x f(av).completions(level)
}
}
def derive(c: Char) = {
val common = a derive c flatMap f
a.resultEmpty match {
case Value(av) => common | derive1(f(av), c)
case _: Failure => common
}
}
override def isTokenStart = a.isTokenStart
override def toString = "bind(" + a + ")"
}
private final class MapParser[A, B](a: Parser[A], f: A => B) extends ValidParser[B] {
lazy val result = a.result map f
lazy val resultEmpty = a.resultEmpty map f
def derive(c: Char) = (a derive c) map f
def completions(level: Int) = a.completions(level)
override def isTokenStart = a.isTokenStart
override def toString = "map(" + a + ")"
}
private final class Filter[T](p: Parser[T], f: T => Boolean, seen: String, msg: String => String)
extends ValidParser[T] {
def filterResult(r: Result[T]) = r.filter(f, msg(seen))
lazy val result = p.result filter f
lazy val resultEmpty = filterResult(p.resultEmpty)
def derive(c: Char) = filterParser(p derive c, f, seen + c, msg)
def completions(level: Int) = p.completions(level) filterS { s =>
filterResult(apply(p)(s).resultEmpty).isValid
}
override def toString = "filter(" + p + ")"
override def isTokenStart = p.isTokenStart
}
private final class MatchedString(delegate: Parser[_], seenV: Vector[Char], partial: Boolean)
extends ValidParser[String] {
lazy val seen = seenV.mkString
def derive(c: Char) = matched(delegate derive c, seenV :+ c, partial)
def completions(level: Int) = delegate.completions(level)
def result = if (delegate.result.isDefined) Some(seen) else None
def resultEmpty = delegate.resultEmpty match {
case f: Failure if !partial => f; case _ => Value(seen)
}
override def isTokenStart = delegate.isTokenStart
override def toString = "matched(" + partial + ", " + seen + ", " + delegate + ")"
}
private final class TokenStart[T](delegate: Parser[T], seen: String, complete: TokenCompletions)
extends ValidParser[T] {
def derive(c: Char) = mkToken(delegate derive c, seen + c, complete)
def completions(level: Int) = complete match {
case dc: TokenCompletions.Delegating =>
dc.completions(seen, level, delegate.completions(level))
case fc: TokenCompletions.Fixed => fc.completions(seen, level)
}
def result = delegate.result
def resultEmpty = delegate.resultEmpty
override def isTokenStart = true
override def toString = "token('" + complete + ", " + delegate + ")"
}
private final class And[T](a: Parser[T], b: Parser[_]) extends ValidParser[T] {
lazy val result = tuple(a.result, b.result) map { _._1 }
def derive(c: Char) = (a derive c) & (b derive c)
def completions(level: Int) = a.completions(level).filterS(s => apply(b)(s).resultEmpty.isValid)
lazy val resultEmpty = a.resultEmpty && b.resultEmpty
override def toString = "(%s) && (%s)".format(a, b)
}
private final class Not(delegate: Parser[_], failMessage: String) extends ValidParser[Unit] {
def derive(c: Char) = if (delegate.valid) not(delegate derive c, failMessage) else this
def completions(level: Int) = Completions.empty
def result = None
lazy val resultEmpty = delegate.resultEmpty match {
case f: Failure => Value(())
case v: Value[_] => mkFailure(failMessage)
}
override def toString = " -(%s)".format(delegate)
}
/**
* This class wraps an existing parser (the delegate), and replaces the delegate's completions with examples from
* the given example source.
*
* This class asks the example source for a limited amount of examples (to prevent lengthy and expensive
* computations and large amounts of allocated data). It then passes these examples on to the UI.
*
* @param delegate the parser to decorate with completion examples (i.e., completion of user input).
* @param exampleSource the source from which this class will take examples (potentially filter them with the delegate
* parser), and pass them to the UI.
* @param maxNumberOfExamples the maximum number of completions to read from the example source and pass to the UI. This
* limit prevents lengthy example generation and allocation of large amounts of memory.
* @param removeInvalidExamples indicates whether to remove examples that are deemed invalid by the delegate parser.
* @tparam T the type of value produced by the parser.
*/
private final class ParserWithExamples[T](
delegate: Parser[T],
exampleSource: ExampleSource,
maxNumberOfExamples: Int,
removeInvalidExamples: Boolean
) extends ValidParser[T] {
def derive(c: Char) =
examples(delegate derive c,
exampleSource.withAddedPrefix(c.toString),
maxNumberOfExamples,
removeInvalidExamples)
def result = delegate.result
lazy val resultEmpty = delegate.resultEmpty
def completions(level: Int) = {
if (exampleSource().isEmpty)
if (resultEmpty.isValid) Completions.nil else Completions.empty
else {
val examplesBasedOnTheResult = filteredExamples.take(maxNumberOfExamples).toSet
Completions(examplesBasedOnTheResult.map(ex => Completion.suggestion(ex)))
}
}
override def toString = "examples(" + delegate + ", " + exampleSource().take(2).toList + ")"
private def filteredExamples: Iterable[String] = {
if (removeInvalidExamples)
exampleSource().filter(isExampleValid)
else
exampleSource()
}
private def isExampleValid(example: String): Boolean = {
apply(delegate)(example).resultEmpty.isValid
}
}
private final class StringLiteral(str: String, start: Int) extends ValidParser[String] {
assert(0 <= start && start < str.length)
def failMsg = "Expected '" + str + "'"
def resultEmpty = mkFailure(failMsg)
def result = None
def derive(c: Char) =
if (str.charAt(start) == c) stringLiteral(str, start + 1) else new Invalid(resultEmpty)
def completions(level: Int) = Completions.single(Completion.suggestion(str.substring(start)))
override def toString = '"' + str + '"'
}
private final class CharacterClass(f: Char => Boolean, label: String) extends ValidParser[Char] {
def result = None
def resultEmpty = mkFailure("Expected " + label)
def derive(c: Char) = if (f(c)) success(c) else Invalid(resultEmpty)
def completions(level: Int) = Completions.empty
override def toString = "class(" + label + ")"
}
private final class Optional[T](delegate: Parser[T]) extends ValidParser[Option[T]] {
def result = delegate.result map some.fn
def resultEmpty = Value(None)
def derive(c: Char) = (delegate derive c).map(some.fn)
def completions(level: Int) = Completion.empty +: delegate.completions(level)
override def toString = delegate.toString + "?"
}
private final class Repeat[T](
partial: Option[Parser[T]],
repeated: Parser[T],
min: Int,
max: UpperBound,
accumulatedReverse: List[T]
) extends ValidParser[Seq[T]] {
assume(0 <= min, "Minimum occurences must be non-negative")
assume(max >= min, "Minimum occurences must be less than the maximum occurences")
def derive(c: Char) =
partial match {
case Some(part) =>
val partD = repeat(Some(part derive c), repeated, min, max, accumulatedReverse)
part.resultEmpty match {
case Value(pv) => partD | repeatDerive(c, pv :: accumulatedReverse)
case _: Failure => partD
}
case None => repeatDerive(c, accumulatedReverse)
}
def repeatDerive(c: Char, accRev: List[T]): Parser[Seq[T]] =
repeat(Some(repeated derive c), repeated, scala.math.max(0, min - 1), max.decrement, accRev)
def completions(level: Int) = {
def pow(comp: Completions, exp: Completions, n: Int): Completions =
if (n == 1) comp else pow(comp x exp, exp, n - 1)
val repC = repeated.completions(level)
val fin = if (min == 0) Completion.empty +: repC else pow(repC, repC, min)
partial match {
case Some(p) => p.completions(level) x fin
case None => fin
}
}
def result = None
lazy val resultEmpty: Result[Seq[T]] = {
val partialAccumulatedOption =
partial match {
case None => Value(accumulatedReverse)
case Some(partialPattern) => partialPattern.resultEmpty.map(_ :: accumulatedReverse)
}
(partialAccumulatedOption app repeatedParseEmpty)(_ reverse_::: _)
}
private def repeatedParseEmpty: Result[List[T]] = {
if (min == 0)
Value(Nil)
else
// forced determinism
for (value <- repeated.resultEmpty) yield makeList(min, value)
}
override def toString = "repeat(" + min + "," + max + "," + partial + "," + repeated + ")"
}

View File

@ -0,0 +1,305 @@
/* sbt -- Simple Build Tool
* Copyright 2011 Mark Harrah
*/
package sbt.internal.util
package complete
import Parser._
import java.io.File
import java.net.URI
import java.lang.Character.{
getType,
MATH_SYMBOL,
OTHER_SYMBOL,
DASH_PUNCTUATION,
OTHER_PUNCTUATION,
MODIFIER_SYMBOL,
CURRENCY_SYMBOL
}
/** Provides standard implementations of commonly useful [[Parser]]s. */
trait Parsers {
/** Matches the end of input, providing no useful result on success. */
lazy val EOF = not(any, "Expected EOF")
/** Parses any single character and provides that character as the result. */
lazy val any: Parser[Char] = charClass(_ => true, "any character")
/** Set that contains each digit in a String representation.*/
lazy val DigitSet = Set("0", "1", "2", "3", "4", "5", "6", "7", "8", "9")
/** Parses any single digit and provides that digit as a Char as the result.*/
lazy val Digit = charClass(_.isDigit, "digit") examples DigitSet
/** Set containing Chars for hexadecimal digits 0-9 and A-F (but not a-f). */
lazy val HexDigitSet =
Set('0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B', 'C', 'D', 'E', 'F')
/** Parses a single hexadecimal digit (0-9, a-f, A-F). */
lazy val HexDigit = charClass(c => HexDigitSet(c.toUpper), "hex digit") examples HexDigitSet.map(
_.toString)
/** Parses a single letter, according to Char.isLetter, into a Char. */
lazy val Letter = charClass(_.isLetter, "letter")
/** Parses the first Char in an sbt identifier, which must be a [[Letter]].*/
def IDStart = Letter
/** Parses an identifier Char other than the first character. This includes letters, digits, dash `-`, and underscore `_`.*/
lazy val IDChar = charClass(isIDChar, "ID character")
/** Parses an identifier String, which must start with [[IDStart]] and contain zero or more [[IDChar]]s after that. */
lazy val ID = identifier(IDStart, IDChar)
/** Parses a single operator Char, as allowed by [[isOpChar]]. */
lazy val OpChar = charClass(isOpChar, "symbol")
/** Parses a non-empty operator String, which consists only of characters allowed by [[OpChar]]. */
lazy val Op = OpChar.+.string
/** Parses either an operator String defined by [[Op]] or a non-symbolic identifier defined by [[ID]]. */
lazy val OpOrID = ID | Op
/** Parses a single, non-symbolic Scala identifier Char. Valid characters are letters, digits, and the underscore character `_`. */
lazy val ScalaIDChar = charClass(isScalaIDChar, "Scala identifier character")
/** Parses a non-symbolic Scala-like identifier. The identifier must start with [[IDStart]] and contain zero or more [[ScalaIDChar]]s after that.*/
lazy val ScalaID = identifier(IDStart, ScalaIDChar)
/** Parses a String that starts with `start` and is followed by zero or more characters parsed by `rep`.*/
def identifier(start: Parser[Char], rep: Parser[Char]): Parser[String] =
start ~ rep.* map { case x ~ xs => (x +: xs).mkString }
def opOrIDSpaced(s: String): Parser[Char] =
if (DefaultParsers.matches(ID, s))
OpChar | SpaceClass
else if (DefaultParsers.matches(Op, s))
IDChar | SpaceClass
else
any
/** Returns true if `c` an operator character. */
def isOpChar(c: Char) = !isDelimiter(c) && isOpType(getType(c))
def isOpType(cat: Int) = cat match {
case MATH_SYMBOL | OTHER_SYMBOL | DASH_PUNCTUATION | OTHER_PUNCTUATION | MODIFIER_SYMBOL |
CURRENCY_SYMBOL =>
true; case _ => false
}
/** Returns true if `c` is a dash `-`, a letter, digit, or an underscore `_`. */
def isIDChar(c: Char) = isScalaIDChar(c) || c == '-'
/** Returns true if `c` is a letter, digit, or an underscore `_`. */
def isScalaIDChar(c: Char) = c.isLetterOrDigit || c == '_'
def isDelimiter(c: Char) = c match {
case '`' | '\'' | '\"' | /*';' | */ ',' | '.' => true; case _ => false
}
/** Matches a single character that is not a whitespace character. */
lazy val NotSpaceClass = charClass(!_.isWhitespace, "non-whitespace character")
/** Matches a single whitespace character, as determined by Char.isWhitespace.*/
lazy val SpaceClass = charClass(_.isWhitespace, "whitespace character")
/** Matches a non-empty String consisting of non-whitespace characters. */
lazy val NotSpace = NotSpaceClass.+.string
/** Matches a possibly empty String consisting of non-whitespace characters. */
lazy val OptNotSpace = NotSpaceClass.*.string
/**
* Matches a non-empty String consisting of whitespace characters.
* The suggested tab completion is a single, constant space character.
*/
lazy val Space = SpaceClass.+.examples(" ")
/**
* Matches a possibly empty String consisting of whitespace characters.
* The suggested tab completion is a single, constant space character.
*/
lazy val OptSpace = SpaceClass.*.examples(" ")
/** Parses a non-empty String that contains only valid URI characters, as defined by [[URIChar]].*/
lazy val URIClass = URIChar.+.string !!! "Invalid URI"
/** Triple-quotes, as used for verbatim quoting.*/
lazy val VerbatimDQuotes = "\"\"\""
/** Double quote character. */
lazy val DQuoteChar = '\"'
/** Backslash character. */
lazy val BackslashChar = '\\'
/** Matches a single double quote. */
lazy val DQuoteClass = charClass(_ == DQuoteChar, "double-quote character")
/** Matches any character except a double quote or whitespace. */
lazy val NotDQuoteSpaceClass =
charClass({ c: Char =>
(c != DQuoteChar) && !c.isWhitespace
}, "non-double-quote-space character")
/** Matches any character except a double quote or backslash. */
lazy val NotDQuoteBackslashClass =
charClass({ c: Char =>
(c != DQuoteChar) && (c != BackslashChar)
}, "non-double-quote-backslash character")
/** Matches a single character that is valid somewhere in a URI. */
lazy val URIChar = charClass(alphanum) | chars("_-!.~'()*,;:$&+=?/[]@%#")
/** Returns true if `c` is an ASCII letter or digit. */
def alphanum(c: Char) =
('a' <= c && c <= 'z') || ('A' <= c && c <= 'Z') || ('0' <= c && c <= '9')
/**
* @param base the directory used for completion proposals (when the user presses the TAB key). Only paths under this
* directory will be proposed.
* @return the file that was parsed from the input string. The returned path may or may not exist.
*/
def fileParser(base: File): Parser[File] =
OptSpace ~> StringBasic
.examples(new FileExamples(base))
.map(new File(_))
/** Parses a port number. Currently, this accepts any integer and presents a tab completion suggestion of `<port>`. */
lazy val Port = token(IntBasic, "<port>")
/** Parses a signed integer. */
lazy val IntBasic = mapOrFail('-'.? ~ Digit.+)(Function.tupled(toInt))
/** Parses an unsigned integer. */
lazy val NatBasic = mapOrFail(Digit.+)(_.mkString.toInt)
private[this] def toInt(neg: Option[Char], digits: Seq[Char]): Int =
(neg.toSeq ++ digits).mkString.toInt
/** Parses the lower-case values `true` and `false` into their respesct Boolean values. */
lazy val Bool = ("true" ^^^ true) | ("false" ^^^ false)
/**
* Parses a potentially quoted String value. The value may be verbatim quoted ([[StringVerbatim]]),
* quoted with interpreted escapes ([[StringEscapable]]), or unquoted ([[NotQuoted]]).
*/
lazy val StringBasic = StringVerbatim | StringEscapable | NotQuoted
/**
* Parses a verbatim quoted String value, discarding the quotes in the result. This kind of quoted text starts with triple quotes `"""`
* and ends at the next triple quotes and may contain any character in between.
*/
lazy val StringVerbatim: Parser[String] = VerbatimDQuotes ~>
any.+.string.filter(!_.contains(VerbatimDQuotes), _ => "Invalid verbatim string") <~
VerbatimDQuotes
/**
* Parses a string value, interpreting escapes and discarding the surrounding quotes in the result.
* See [[EscapeSequence]] for supported escapes.
*/
lazy val StringEscapable: Parser[String] =
(DQuoteChar ~> (NotDQuoteBackslashClass | EscapeSequence).+.string <~ DQuoteChar |
(DQuoteChar ~ DQuoteChar) ^^^ "")
/**
* Parses a single escape sequence into the represented Char.
* Escapes start with a backslash and are followed by `u` for a [[UnicodeEscape]] or by `b`, `t`, `n`, `f`, `r`, `"`, `'`, `\` for standard escapes.
*/
lazy val EscapeSequence: Parser[Char] =
BackslashChar ~> ('b' ^^^ '\b' | 't' ^^^ '\t' | 'n' ^^^ '\n' | 'f' ^^^ '\f' | 'r' ^^^ '\r' |
'\"' ^^^ '\"' | '\'' ^^^ '\'' | '\\' ^^^ '\\' | UnicodeEscape)
/**
* Parses a single unicode escape sequence into the represented Char.
* A unicode escape begins with a backslash, followed by a `u` and 4 hexadecimal digits representing the unicode value.
*/
lazy val UnicodeEscape: Parser[Char] =
("u" ~> repeat(HexDigit, 4, 4)) map { seq =>
Integer.parseInt(seq.mkString, 16).toChar
}
/** Parses an unquoted, non-empty String value that cannot start with a double quote and cannot contain whitespace.*/
lazy val NotQuoted = (NotDQuoteSpaceClass ~ OptNotSpace) map { case (c, s) => c.toString + s }
/**
* Applies `rep` zero or more times, separated by `sep`.
* The result is the (possibly empty) sequence of results from the multiple `rep` applications. The `sep` results are discarded.
*/
def repsep[T](rep: Parser[T], sep: Parser[_]): Parser[Seq[T]] =
rep1sep(rep, sep) ?? Nil
/**
* Applies `rep` one or more times, separated by `sep`.
* The result is the non-empty sequence of results from the multiple `rep` applications. The `sep` results are discarded.
*/
def rep1sep[T](rep: Parser[T], sep: Parser[_]): Parser[Seq[T]] =
(rep ~ (sep ~> rep).*).map { case (x ~ xs) => x +: xs }
/** Wraps the result of `p` in `Some`.*/
def some[T](p: Parser[T]): Parser[Option[T]] = p map { v =>
Some(v)
}
/**
* Applies `f` to the result of `p`, transforming any exception when evaluating
* `f` into a parse failure with the exception `toString` as the message.
*/
def mapOrFail[S, T](p: Parser[S])(f: S => T): Parser[T] =
p flatMap { s =>
try { success(f(s)) } catch { case e: Exception => failure(e.toString) }
}
/**
* Parses a space-delimited, possibly empty sequence of arguments.
* The arguments may use quotes and escapes according to [[StringBasic]].
*/
def spaceDelimited(display: String): Parser[Seq[String]] =
(token(Space) ~> token(StringBasic, display)).* <~ SpaceClass.*
/** Applies `p` and uses `true` as the result if it succeeds and turns failure into a result of `false`. */
def flag[T](p: Parser[T]): Parser[Boolean] = (p ^^^ true) ?? false
/**
* Defines a sequence parser where the parser used for each part depends on the previously parsed values.
* `p` is applied to the (possibly empty) sequence of already parsed values to obtain the next parser to use.
* The parsers obtained in this way are separated by `sep`, whose result is discarded and only the sequence
* of values from the parsers returned by `p` is used for the result.
*/
def repeatDep[A](p: Seq[A] => Parser[A], sep: Parser[Any]): Parser[Seq[A]] = {
def loop(acc: Seq[A]): Parser[Seq[A]] = {
val next = (sep ~> p(acc)) flatMap { result =>
loop(acc :+ result)
}
next ?? acc
}
p(Vector()) flatMap { first =>
loop(Seq(first))
}
}
/** Applies String.trim to the result of `p`. */
def trimmed(p: Parser[String]) = p map { _.trim }
/** Parses a URI that is valid according to the single argument java.net.URI constructor. */
lazy val basicUri = mapOrFail(URIClass)(uri => new URI(uri))
/** Parses a URI that is valid according to the single argument java.net.URI constructor, using `ex` as tab completion examples. */
def Uri(ex: Set[URI]) = basicUri examples (ex.map(_.toString))
}
/** Provides standard [[Parser]] implementations. */
object Parsers extends Parsers
/** Provides common [[Parser]] implementations and helper methods.*/
object DefaultParsers extends Parsers with ParserMain {
/** Applies parser `p` to input `s` and returns `true` if the parse was successful. */
def matches(p: Parser[_], s: String): Boolean =
apply(p)(s).resultEmpty.isValid
/** Returns `true` if `s` parses successfully according to [[ID]].*/
def validID(s: String): Boolean = matches(ID, s)
}

View File

@ -0,0 +1,31 @@
package sbt.internal.util
package complete
object ProcessError {
def apply(command: String, msgs: Seq[String], index: Int): String = {
val (line, modIndex) = extractLine(command, index)
val point = pointerSpace(command, modIndex)
msgs.mkString("\n") + "\n" + line + "\n" + point + "^"
}
def extractLine(s: String, i: Int): (String, Int) = {
val notNewline = (c: Char) => c != '\n' && c != '\r'
val left = takeRightWhile(s.substring(0, i))(notNewline)
val right = s substring i takeWhile notNewline
(left + right, left.length)
}
def takeRightWhile(s: String)(pred: Char => Boolean): String = {
def loop(i: Int): String =
if (i < 0)
s
else if (pred(s(i)))
loop(i - 1)
else
s.substring(i + 1)
loop(s.length - 1)
}
def pointerSpace(s: String, i: Int): String =
(s take i) map { case '\t' => '\t'; case _ => ' ' } mkString ""
}

View File

@ -0,0 +1,46 @@
package sbt.internal.util
package complete
import Completion.{ token => ctoken, tokenDisplay }
sealed trait TokenCompletions {
def hideWhen(f: Int => Boolean): TokenCompletions
}
object TokenCompletions {
private[sbt] abstract class Delegating extends TokenCompletions { outer =>
def completions(seen: String, level: Int, delegate: Completions): Completions
final def hideWhen(hide: Int => Boolean): TokenCompletions = new Delegating {
def completions(seen: String, level: Int, delegate: Completions): Completions =
if (hide(level)) Completions.nil else outer.completions(seen, level, delegate)
}
}
private[sbt] abstract class Fixed extends TokenCompletions { outer =>
def completions(seen: String, level: Int): Completions
final def hideWhen(hide: Int => Boolean): TokenCompletions = new Fixed {
def completions(seen: String, level: Int) =
if (hide(level)) Completions.nil else outer.completions(seen, level)
}
}
val default: TokenCompletions = mapDelegateCompletions(
(seen, level, c) => ctoken(seen, c.append))
def displayOnly(msg: String): TokenCompletions = new Fixed {
def completions(seen: String, level: Int) = Completions.single(Completion.displayOnly(msg))
}
def overrideDisplay(msg: String): TokenCompletions =
mapDelegateCompletions((seen, level, c) => tokenDisplay(display = msg, append = c.append))
def fixed(f: (String, Int) => Completions): TokenCompletions = new Fixed {
def completions(seen: String, level: Int) = f(seen, level)
}
def mapDelegateCompletions(f: (String, Int, Completion) => Completion): TokenCompletions =
new Delegating {
def completions(seen: String, level: Int, delegate: Completions) =
Completions(delegate.get.map(c => f(seen, level, c)))
}
}

View File

@ -0,0 +1,83 @@
package sbt.internal.util
package complete
import DefaultParsers._
import TypeString._
/**
* Basic representation of types parsed from Manifest.toString.
* This can only represent the structure of parameterized types.
* All other types are represented by a TypeString with an empty `args`.
*/
private[sbt] final class TypeString(val base: String, val args: List[TypeString]) {
override def toString =
if (base.startsWith(FunctionName))
args.dropRight(1).mkString("(", ",", ")") + " => " + args.last
else if (base.startsWith(TupleName))
args.mkString("(", ",", ")")
else
cleanupTypeName(base) + (if (args.isEmpty) "" else args.mkString("[", ",", "]"))
}
private[sbt] object TypeString {
/** Makes the string representation of a type as returned by Manifest.toString more readable.*/
def cleanup(typeString: String): String =
parse(typeString, typeStringParser) match {
case Right(ts) => ts.toString
case Left(err) => typeString
}
/**
* Makes a fully qualified type name provided by Manifest.toString more readable.
* The argument should be just a name (like scala.Tuple2) and not a full type (like scala.Tuple2[Int,Boolean])
*/
def cleanupTypeName(base: String): String =
dropPrefix(base).replace('$', '.')
/**
* Removes prefixes from a fully qualified type name that are unnecessary in the presence of standard imports for an sbt setting.
* This does not use the compiler and is therefore a conservative approximation.
*/
def dropPrefix(base: String): String =
if (base.startsWith(SbtPrefix))
base.substring(SbtPrefix.length)
else if (base.startsWith(CollectionPrefix)) {
val simple = base.substring(CollectionPrefix.length)
if (ShortenCollection(simple)) simple else base
} else if (base.startsWith(ScalaPrefix))
base.substring(ScalaPrefix.length)
else if (base.startsWith(JavaPrefix))
base.substring(JavaPrefix.length)
else
TypeMap.getOrElse(base, base)
final val CollectionPrefix = "scala.collection."
final val FunctionName = "scala.Function"
final val TupleName = "scala.Tuple"
final val SbtPrefix = "sbt."
final val ScalaPrefix = "scala."
final val JavaPrefix = "java.lang."
/* scala.collection.X -> X */
val ShortenCollection = Set("Seq", "List", "Set", "Map", "Iterable")
val TypeMap = Map(
"java.io.File" -> "File",
"java.net.URL" -> "URL",
"java.net.URI" -> "URI"
)
/**
* A Parser that extracts basic structure from the string representation of a type from Manifest.toString.
* This is rudimentary and essentially only decomposes the string into names and arguments for parameterized types.
*/
lazy val typeStringParser: Parser[TypeString] = {
def isFullScalaIDChar(c: Char) = isScalaIDChar(c) || c == '.' || c == '$'
lazy val fullScalaID =
identifier(IDStart, charClass(isFullScalaIDChar, "Scala identifier character"))
lazy val tpe: Parser[TypeString] =
for (id <- fullScalaID; args <- ('[' ~> rep1sep(tpe, ',') <~ ']').?)
yield new TypeString(id, args.toList.flatten)
tpe
}
}

View File

@ -0,0 +1,60 @@
/* sbt -- Simple Build Tool
* Copyright 2008,2010 Mark Harrah
*/
package sbt.internal.util
package complete
sealed trait UpperBound {
/** True if and only if the given value meets this bound.*/
def >=(min: Int): Boolean
/** True if and only if this bound is one.*/
def isOne: Boolean
/** True if and only if this bound is zero.*/
def isZero: Boolean
/**
* If this bound is zero or Infinite, `decrement` returns this bound.
* Otherwise, this bound is finite and greater than zero and `decrement` returns the bound that is one less than this bound.
*/
def decrement: UpperBound
/** True if and only if this is unbounded.*/
def isInfinite: Boolean
}
/** Represents unbounded. */
case object Infinite extends UpperBound {
/** All finite numbers meet this bound. */
def >=(min: Int) = true
def isOne = false
def isZero = false
def decrement = this
def isInfinite = true
override def toString = "Infinity"
}
/**
* Represents a finite upper bound. The maximum allowed value is 'value', inclusive.
* It must positive.
*/
final case class Finite(value: Int) extends UpperBound {
assume(value >= 0, "Maximum occurences must be nonnegative.")
def >=(min: Int) = value >= min
def isOne = value == 1
def isZero = value == 0
def decrement = Finite(scala.math.max(0, value - 1))
def isInfinite = false
override def toString = value.toString
}
object UpperBound {
implicit def intToFinite(i: Int): Finite = Finite(i)
}

View File

@ -0,0 +1,155 @@
package sbt.internal.util
package complete
object JLineTest {
import DefaultParsers._
val one = "blue" | "green" | "black"
val two = token("color" ~> Space) ~> token(one)
val three = token("color" ~> Space) ~> token(ID.examples("blue", "green", "black"))
val four = token("color" ~> Space) ~> token(ID, "<color name>")
val num = token(NatBasic)
val five = (num ~ token("+" | "-") ~ num) <~ token('=') flatMap {
case a ~ "+" ~ b => token((a + b).toString)
case a ~ "-" ~ b => token((a - b).toString)
}
val parsers = Map("1" -> one, "2" -> two, "3" -> three, "4" -> four, "5" -> five)
def main(args: Array[String]): Unit = {
import jline.TerminalFactory
import jline.console.ConsoleReader
val reader = new ConsoleReader()
TerminalFactory.get.init
val parser = parsers(args(0))
JLineCompletion.installCustomCompletor(reader, parser)
def loop(): Unit = {
val line = reader.readLine("> ")
if (line ne null) {
println("Result: " + apply(parser)(line).resultEmpty)
loop()
}
}
loop()
}
}
import Parser._
import org.scalacheck._
object ParserTest extends Properties("Completing Parser") {
import Parsers._
import DefaultParsers.matches
val nested = (token("a1") ~ token("b2")) ~ "c3"
val nestedDisplay = (token("a1", "<a1>") ~ token("b2", "<b2>")) ~ "c3"
val spacePort = token(Space) ~> Port
def p[T](f: T): T = { println(f); f }
def checkSingle(in: String, expect: Completion)(expectDisplay: Completion = expect) =
(("token '" + in + "'") |: checkOne(in, nested, expect)) &&
(("display '" + in + "'") |: checkOne(in, nestedDisplay, expectDisplay))
def checkOne(in: String, parser: Parser[_], expect: Completion): Prop =
completions(parser, in, 1) == Completions.single(expect)
def checkAll(in: String, parser: Parser[_], expect: Completions): Prop = {
val cs = completions(parser, in, 1)
("completions: " + cs) |: ("Expected: " + expect) |: (cs == expect: Prop)
}
def checkInvalid(in: String) =
(("token '" + in + "'") |: checkInv(in, nested)) &&
(("display '" + in + "'") |: checkInv(in, nestedDisplay))
def checkInv(in: String, parser: Parser[_]): Prop = {
val cs = completions(parser, in, 1)
("completions: " + cs) |: (cs == Completions.nil: Prop)
}
property("nested tokens a") =
checkSingle("", Completion.token("", "a1"))(Completion.displayOnly("<a1>"))
property("nested tokens a1") =
checkSingle("a", Completion.token("a", "1"))(Completion.displayOnly("<a1>"))
property("nested tokens a inv") = checkInvalid("b")
property("nested tokens b") =
checkSingle("a1", Completion.token("", "b2"))(Completion.displayOnly("<b2>"))
property("nested tokens b2") =
checkSingle("a1b", Completion.token("b", "2"))(Completion.displayOnly("<b2>"))
property("nested tokens b inv") = checkInvalid("a1a")
property("nested tokens c") = checkSingle("a1b2", Completion.suggestion("c3"))()
property("nested tokens c3") = checkSingle("a1b2c", Completion.suggestion("3"))()
property("nested tokens c inv") = checkInvalid("a1b2a")
property("suggest space") = checkOne("", spacePort, Completion.token("", " "))
property("suggest port") = checkOne(" ", spacePort, Completion.displayOnly("<port>"))
property("no suggest at end") = checkOne("asdf", "asdf", Completion.suggestion(""))
property("no suggest at token end") = checkOne("asdf", token("asdf"), Completion.suggestion(""))
property("empty suggest for examples") =
checkOne("asdf", any.+.examples("asdf", "qwer"), Completion.suggestion(""))
property("empty suggest for examples token") =
checkOne("asdf", token(any.+.examples("asdf", "qwer")), Completion.suggestion(""))
val colors = Set("blue", "green", "red")
val base = (seen: Seq[String]) => token(ID examples (colors -- seen))
val sep = token(Space)
val repeat = repeatDep(base, sep)
def completionStrings(ss: Set[String]) = Completions(ss map (Completion.token("", _)))
property("repeatDep no suggestions for bad input") = checkInv(".", repeat)
property("repeatDep suggest all") = checkAll("", repeat, completionStrings(colors))
property("repeatDep suggest remaining two") = {
val first = colors.toSeq.head
checkAll(first + " ", repeat, completionStrings(colors - first))
}
property("repeatDep suggest remaining one") = {
val take = colors.toSeq.take(2)
checkAll(take.mkString("", " ", " "), repeat, completionStrings(colors -- take))
}
property("repeatDep requires at least one token") = !matches(repeat, "")
property("repeatDep accepts one token") = matches(repeat, colors.toSeq.head)
property("repeatDep accepts two tokens") = matches(repeat, colors.toSeq.take(2).mkString(" "))
}
object ParserExample {
val ws = charClass(_.isWhitespace).+
val notws = charClass(!_.isWhitespace).+
val name = token("test")
val options = (ws ~> token("quick" | "failed" | "new")).*
val exampleSet = Set("am", "is", "are", "was", "were")
val include = (ws ~> token(
examples(notws.string, new FixedSetExamples(exampleSet), exampleSet.size, false)
)).*
val t = name ~ options ~ include
// Get completions for some different inputs
println(completions(t, "te", 1))
println(completions(t, "test ", 1))
println(completions(t, "test w", 1))
// Get the parsed result for different inputs
println(apply(t)("te").resultEmpty)
println(apply(t)("test").resultEmpty)
println(apply(t)("test w").resultEmpty)
println(apply(t)("test was were").resultEmpty)
def run(n: Int): Unit = {
val a = 'a'.id
val aq = a.?
val aqn = repeat(aq, min = n, max = n)
val an = repeat(a, min = n, max = n)
val ann = aqn ~ an
def r = apply(ann)("a" * (n * 2)).resultEmpty
println(r.isValid)
}
def run2(n: Int): Unit = {
val ab = "ab".?.*
val r = apply(ab)("a" * n).resultEmpty
println(r)
}
}

View File

@ -0,0 +1,5 @@
package sbt.internal.util
import org.scalatest._
abstract class UnitSpec extends FlatSpec with Matchers

View File

@ -0,0 +1,99 @@
package sbt.internal.util
package complete
import java.io.File
import sbt.io.IO._
class FileExamplesTest extends UnitSpec {
"listing all files in an absolute base directory" should
"produce the entire base directory's contents" in {
val _ = new DirectoryStructure {
fileExamples().toList should contain theSameElementsAs (allRelativizedPaths)
}
}
"listing files with a prefix that matches none" should
"produce an empty list" in {
val _ = new DirectoryStructure(withCompletionPrefix = "z") {
fileExamples().toList shouldBe empty
}
}
"listing single-character prefixed files" should
"produce matching paths only" in {
val _ = new DirectoryStructure(withCompletionPrefix = "f") {
fileExamples().toList should contain theSameElementsAs (prefixedPathsOnly)
}
}
"listing directory-prefixed files" should
"produce matching paths only" in {
val _ = new DirectoryStructure(withCompletionPrefix = "far") {
fileExamples().toList should contain theSameElementsAs (prefixedPathsOnly)
}
}
it should "produce sub-dir contents only when appending a file separator to the directory" in {
val _ = new DirectoryStructure(withCompletionPrefix = "far" + File.separator) {
fileExamples().toList should contain theSameElementsAs (prefixedPathsOnly)
}
}
"listing files with a sub-path prefix" should
"produce matching paths only" in {
val _ = new DirectoryStructure(withCompletionPrefix = "far" + File.separator + "ba") {
fileExamples().toList should contain theSameElementsAs (prefixedPathsOnly)
}
}
"completing a full path" should
"produce a list with an empty string" in {
val _ = new DirectoryStructure(withCompletionPrefix = "bazaar") {
fileExamples().toList shouldEqual List("")
}
}
// TODO: Remove DelayedInit - https://github.com/scala/scala/releases/tag/v2.11.0-RC1
class DirectoryStructure(withCompletionPrefix: String = "") extends DelayedInit {
var fileExamples: FileExamples = _
var baseDir: File = _
var childFiles: List[File] = _
var childDirectories: List[File] = _
var nestedFiles: List[File] = _
var nestedDirectories: List[File] = _
def allRelativizedPaths: List[String] =
(childFiles ++ childDirectories ++ nestedFiles ++ nestedDirectories)
.map(relativize(baseDir, _).get)
def prefixedPathsOnly: List[String] =
allRelativizedPaths
.filter(_ startsWith withCompletionPrefix)
.map(_ substring withCompletionPrefix.length)
override def delayedInit(testBody: => Unit): Unit = {
withTemporaryDirectory { tempDir =>
createSampleDirStructure(tempDir)
fileExamples = new FileExamples(baseDir, withCompletionPrefix)
testBody
}
}
private def createSampleDirStructure(tempDir: File): Unit = {
childFiles = toChildFiles(tempDir, List("foo", "bar", "bazaar"))
childDirectories = toChildFiles(tempDir, List("moo", "far"))
nestedFiles = toChildFiles(childDirectories(1), List("farfile1", "barfile2"))
nestedDirectories = toChildFiles(childDirectories(1), List("fardir1", "bardir2"))
(childDirectories ++ nestedDirectories).map(_.mkdirs())
(childFiles ++ nestedFiles).map(_.createNewFile())
baseDir = tempDir
}
private def toChildFiles(baseDir: File, files: List[String]): List[File] =
files.map(new File(baseDir, _))
}
}

View File

@ -0,0 +1,25 @@
package sbt.internal.util
package complete
class FixedSetExamplesTest extends UnitSpec {
"adding a prefix" should "produce a smaller set of examples with the prefix removed" in {
val _ = new Examples {
fixedSetExamples.withAddedPrefix("f")() should contain theSameElementsAs
(List("oo", "ool", "u"))
fixedSetExamples.withAddedPrefix("fo")() should contain theSameElementsAs (List("o", "ol"))
fixedSetExamples.withAddedPrefix("b")() should contain theSameElementsAs (List("ar"))
}
}
"without a prefix" should "produce the original set" in {
val _ = new Examples {
fixedSetExamples() shouldBe exampleSet
}
}
trait Examples {
val exampleSet = List("foo", "bar", "fool", "fu")
val fixedSetExamples = FixedSetExamples(exampleSet)
}
}

View File

@ -0,0 +1,107 @@
package sbt.internal.util
package complete
import Completion._
class ParserWithExamplesTest extends UnitSpec {
"listing a limited number of completions" should
"grab only the needed number of elements from the iterable source of examples" in {
val _ = new ParserWithLazyExamples {
parserWithExamples.completions(0)
examples.size shouldEqual maxNumberOfExamples
}
}
"listing only valid completions" should
"use the delegate parser to remove invalid examples" in {
val _ = new ParserWithValidExamples {
val validCompletions = Completions(
Set(
suggestion("blue"),
suggestion("red")
))
parserWithExamples.completions(0) shouldEqual validCompletions
}
}
"listing valid completions in a derived parser" should
"produce only valid examples that start with the character of the derivation" in {
val _ = new ParserWithValidExamples {
val derivedCompletions = Completions(
Set(
suggestion("lue")
))
parserWithExamples.derive('b').completions(0) shouldEqual derivedCompletions
}
}
"listing valid and invalid completions" should
"produce the entire source of examples" in {
val _ = new parserWithAllExamples {
val completions = Completions(examples.map(suggestion(_)).toSet)
parserWithExamples.completions(0) shouldEqual completions
}
}
"listing valid and invalid completions in a derived parser" should
"produce only examples that start with the character of the derivation" in {
val _ = new parserWithAllExamples {
val derivedCompletions = Completions(
Set(
suggestion("lue"),
suggestion("lock")
))
parserWithExamples.derive('b').completions(0) shouldEqual derivedCompletions
}
}
class ParserWithLazyExamples
extends ParserExample(
GrowableSourceOfExamples(),
maxNumberOfExamples = 5,
removeInvalidExamples = false
)
class ParserWithValidExamples extends ParserExample(removeInvalidExamples = true)
class parserWithAllExamples extends ParserExample(removeInvalidExamples = false)
case class ParserExample(
examples: Iterable[String] = Set("blue", "yellow", "greeen", "block", "red"),
maxNumberOfExamples: Int = 25,
removeInvalidExamples: Boolean
) {
import DefaultParsers._
val colorParser = "blue" | "green" | "black" | "red"
val parserWithExamples: Parser[String] = new ParserWithExamples[String](
colorParser,
FixedSetExamples(examples),
maxNumberOfExamples,
removeInvalidExamples
)
}
case class GrowableSourceOfExamples() extends Iterable[String] {
private var numberOfIteratedElements: Int = 0
override def iterator: Iterator[String] = {
new Iterator[String] {
var currentElement = 0
override def next(): String = {
currentElement += 1
numberOfIteratedElements = Math.max(currentElement, numberOfIteratedElements)
numberOfIteratedElements.toString
}
override def hasNext: Boolean = true
}
}
override def size: Int = numberOfIteratedElements
}
}

View File

@ -0,0 +1,389 @@
package sbt.internal.util
package logic
import scala.annotation.tailrec
import Formula.{ And, True }
/*
Defines a propositional logic with negation as failure and only allows stratified rule sets
(negation must be acyclic) in order to have a unique minimal model.
For example, this is not allowed:
+ p :- not q
+ q :- not p
but this is:
+ p :- q
+ q :- p
as is this:
+ p :- q
+ q := not r
Some useful links:
+ https://en.wikipedia.org/wiki/Nonmonotonic_logic
+ https://en.wikipedia.org/wiki/Negation_as_failure
+ https://en.wikipedia.org/wiki/Propositional_logic
+ https://en.wikipedia.org/wiki/Stable_model_semantics
+ http://www.w3.org/2005/rules/wg/wiki/negation
*/
/** Disjunction (or) of the list of clauses. */
final case class Clauses(clauses: List[Clause]) {
assert(clauses.nonEmpty, "At least one clause is required.")
}
/** When the `body` Formula succeeds, atoms in `head` are true. */
final case class Clause(body: Formula, head: Set[Atom])
/** A literal is an [[Atom]] or its [[negation|Negated]]. */
sealed abstract class Literal extends Formula {
/** The underlying (positive) atom. */
def atom: Atom
/** Negates this literal.*/
def unary_! : Literal
}
/** A variable with name `label`. */
final case class Atom(label: String) extends Literal {
def atom = this
def unary_! : Negated = Negated(this)
}
/**
* A negated atom, in the sense of negation as failure, not logical negation.
* That is, it is true if `atom` is not known/defined.
*/
final case class Negated(atom: Atom) extends Literal {
def unary_! : Atom = atom
}
/**
* A formula consists of variables, negation, and conjunction (and).
* (Disjunction is not currently included- it is modeled at the level of a sequence of clauses.
* This is less convenient when defining clauses, but is not less powerful.)
*/
sealed abstract class Formula {
/** Constructs a clause that proves `atoms` when this formula is true. */
def proves(atom: Atom, atoms: Atom*): Clause = Clause(this, (atom +: atoms).toSet)
/** Constructs a formula that is true iff this formula and `f` are both true.*/
def &&(f: Formula): Formula = (this, f) match {
case (True, x) => x
case (x, True) => x
case (And(as), And(bs)) => And(as ++ bs)
case (And(as), b: Literal) => And(as + b)
case (a: Literal, And(bs)) => And(bs + a)
case (a: Literal, b: Literal) => And(Set(a, b))
}
}
object Formula {
/** A conjunction of literals. */
final case class And(literals: Set[Literal]) extends Formula {
assert(literals.nonEmpty, "'And' requires at least one literal.")
}
final case object True extends Formula
}
object Logic {
def reduceAll(
clauses: List[Clause],
initialFacts: Set[Literal]
): Either[LogicException, Matched] =
reduce(Clauses(clauses), initialFacts)
/**
* Computes the variables in the unique stable model for the program represented by `clauses` and
* `initialFacts`. `clause` may not have any negative feedback (that is, negation is acyclic)
* and `initialFacts` cannot be in the head of any clauses in `clause`.
* These restrictions ensure that the logic program has a unique minimal model.
*/
def reduce(clauses: Clauses, initialFacts: Set[Literal]): Either[LogicException, Matched] = {
val (posSeq, negSeq) = separate(initialFacts.toSeq)
val (pos, neg) = (posSeq.toSet, negSeq.toSet)
val problem =
checkContradictions(pos, neg) orElse
checkOverlap(clauses, pos) orElse
checkAcyclic(clauses)
problem.toLeft(
reduce0(clauses, initialFacts, Matched.empty)
)
}
/**
* Verifies `initialFacts` are not in the head of any `clauses`.
* This avoids the situation where an atom is proved but no clauses prove it.
* This isn't necessarily a problem, but the main sbt use cases expects
* a proven atom to have at least one clause satisfied.
*/
private[this] def checkOverlap(
clauses: Clauses,
initialFacts: Set[Atom]
): Option[InitialOverlap] = {
val as = atoms(clauses)
val initialOverlap = initialFacts.filter(as.inHead)
if (initialOverlap.nonEmpty) Some(new InitialOverlap(initialOverlap)) else None
}
private[this] def checkContradictions(
pos: Set[Atom],
neg: Set[Atom]
): Option[InitialContradictions] = {
val contradictions = pos intersect neg
if (contradictions.nonEmpty) Some(new InitialContradictions(contradictions)) else None
}
private[this] def checkAcyclic(clauses: Clauses): Option[CyclicNegation] = {
val deps = dependencyMap(clauses)
val cycle = Dag.findNegativeCycle(graph(deps))
if (cycle.nonEmpty) Some(new CyclicNegation(cycle)) else None
}
private[this] def graph(deps: Map[Atom, Set[Literal]]) = new Dag.DirectedSignedGraph[Atom] {
type Arrow = Literal
def nodes = deps.keys.toList
def dependencies(a: Atom) = deps.getOrElse(a, Set.empty).toList
def isNegative(b: Literal) = b match {
case Negated(_) => true
case Atom(_) => false
}
def head(b: Literal) = b.atom
}
private[this] def dependencyMap(clauses: Clauses): Map[Atom, Set[Literal]] =
(Map.empty[Atom, Set[Literal]] /: clauses.clauses) {
case (m, Clause(formula, heads)) =>
val deps = literals(formula)
(m /: heads) { (n, head) =>
n.updated(head, n.getOrElse(head, Set.empty) ++ deps)
}
}
sealed abstract class LogicException(override val toString: String)
final class InitialContradictions(val literals: Set[Atom])
extends LogicException(
"Initial facts cannot be both true and false:\n\t" + literals.mkString("\n\t")
)
final class InitialOverlap(val literals: Set[Atom])
extends LogicException(
"Initial positive facts cannot be implied by any clauses:\n\t" + literals.mkString("\n\t")
)
final class CyclicNegation(val cycle: List[Literal])
extends LogicException(
"Negation may not be involved in a cycle:\n\t" + cycle.mkString("\n\t")
)
/** Tracks proven atoms in the reverse order they were proved. */
final class Matched private (val provenSet: Set[Atom], reverseOrdered: List[Atom]) {
def add(atoms: Set[Atom]): Matched = add(atoms.toList)
def add(atoms: List[Atom]): Matched = {
val newOnly = atoms.filterNot(provenSet)
new Matched(provenSet ++ newOnly, newOnly ::: reverseOrdered)
}
def ordered: List[Atom] = reverseOrdered.reverse
override def toString = ordered.map(_.label).mkString("Matched(", ",", ")")
}
object Matched {
val empty = new Matched(Set.empty, Nil)
}
/** Separates a sequence of literals into `(pos, neg)` atom sequences. */
private[this] def separate(lits: Seq[Literal]): (Seq[Atom], Seq[Atom]) =
Util.separate(lits) {
case a: Atom => Left(a)
case Negated(n) => Right(n)
}
/**
* Finds clauses that have no body and thus prove their head.
* Returns `(<proven atoms>, <remaining unproven clauses>)`.
*/
private[this] def findProven(c: Clauses): (Set[Atom], List[Clause]) = {
val (proven, unproven) = c.clauses.partition(_.body == True)
(proven.flatMap(_.head).toSet, unproven)
}
private[this] def keepPositive(lits: Set[Literal]): Set[Atom] =
lits.collect { case a: Atom => a }.toSet
// precondition: factsToProcess contains no contradictions
@tailrec private[this] def reduce0(
clauses: Clauses,
factsToProcess: Set[Literal],
state: Matched
): Matched =
applyAll(clauses, factsToProcess) match {
case None => state // all of the remaining clauses failed on the new facts
case Some(applied) =>
val (proven, unprovenClauses) = findProven(applied)
val processedFacts = state add keepPositive(factsToProcess)
val newlyProven = proven -- processedFacts.provenSet
val newState = processedFacts add newlyProven
if (unprovenClauses.isEmpty)
newState // no remaining clauses, done.
else {
val unproven = Clauses(unprovenClauses)
val nextFacts: Set[Literal] =
if (newlyProven.nonEmpty) newlyProven.toSet else inferFailure(unproven)
reduce0(unproven, nextFacts, newState)
}
}
/**
* Finds negated atoms under the negation as failure rule and returns them.
* This should be called only after there are no more known atoms to be substituted.
*/
private[this] def inferFailure(clauses: Clauses): Set[Literal] = {
/* At this point, there is at least one clause and one of the following is the case as the
result of the acyclic negation rule:
i. there is at least one variable that occurs in a clause body but not in the head of a
clause
ii. there is at least one variable that occurs in the head of a clause and does not
transitively depend on a negated variable
In either case, each such variable x cannot be proven true and therefore proves 'not x'
(negation as failure, !x in the code).
*/
val allAtoms = atoms(clauses)
val newFacts: Set[Literal] = negated(allAtoms.triviallyFalse)
if (newFacts.nonEmpty)
newFacts
else {
val possiblyTrue = hasNegatedDependency(clauses.clauses, Relation.empty, Relation.empty)
val newlyFalse: Set[Literal] = negated(allAtoms.inHead -- possiblyTrue)
if (newlyFalse.nonEmpty)
newlyFalse
else // should never happen due to the acyclic negation rule
sys.error(s"No progress:\n\tclauses: $clauses\n\tpossibly true: $possiblyTrue")
}
}
private[this] def negated(atoms: Set[Atom]): Set[Literal] = atoms.map(a => Negated(a))
/**
* Computes the set of atoms in `clauses` that directly or transitively take a negated atom as input.
* For example, for the following clauses, this method would return `List(a, d)` :
* a :- b, not c
* d :- a
*/
@tailrec
def hasNegatedDependency(
clauses: Seq[Clause],
posDeps: Relation[Atom, Atom],
negDeps: Relation[Atom, Atom]
): List[Atom] =
clauses match {
case Seq() =>
// because cycles between positive literals are allowed, this isn't strictly a topological sort
Dag.topologicalSortUnchecked(negDeps._1s)(posDeps.reverse)
case Clause(formula, head) +: tail =>
// collect direct positive and negative literals and track them in separate graphs
val (pos, neg) = directDeps(formula)
val (newPos, newNeg) = ((posDeps, negDeps) /: head) {
case ((pdeps, ndeps), d) =>
(pdeps + (d, pos), ndeps + (d, neg))
}
hasNegatedDependency(tail, newPos, newNeg)
}
/** Computes the `(positive, negative)` literals in `formula`. */
private[this] def directDeps(formula: Formula): (Seq[Atom], Seq[Atom]) =
Util.separate(literals(formula).toSeq) {
case Negated(a) => Right(a)
case a: Atom => Left(a)
}
private[this] def literals(formula: Formula): Set[Literal] = formula match {
case And(lits) => lits
case l: Literal => Set(l)
case True => Set.empty
}
/** Computes the atoms in the heads and bodies of the clauses in `clause`. */
def atoms(cs: Clauses): Atoms = cs.clauses.map(c => Atoms(c.head, atoms(c.body))).reduce(_ ++ _)
/** Computes the set of all atoms in `formula`. */
def atoms(formula: Formula): Set[Atom] = formula match {
case And(lits) => lits.map(_.atom)
case Negated(lit) => Set(lit)
case a: Atom => Set(a)
case True => Set()
}
/** Represents the set of atoms in the heads of clauses and in the bodies (formulas) of clauses. */
final case class Atoms(inHead: Set[Atom], inFormula: Set[Atom]) {
/** Concatenates this with `as`. */
def ++(as: Atoms): Atoms = Atoms(inHead ++ as.inHead, inFormula ++ as.inFormula)
/** Atoms that cannot be true because they do not occur in a head. */
def triviallyFalse: Set[Atom] = inFormula -- inHead
}
/**
* Applies known facts to `clause`s, deriving a new, possibly empty list of clauses.
* 1. If a fact is in the body of a clause, the derived clause has that fact removed from the body.
* 2. If the negation of a fact is in a body of a clause, that clause fails and is removed.
* 3. If a fact or its negation is in the head of a clause, the derived clause has that fact (or its negation) removed from the head.
* 4. If a head is empty, the clause proves nothing and is removed.
*
* NOTE: empty bodies do not cause a clause to succeed yet.
* All known facts must be applied before this can be done in order to avoid inconsistencies.
* Precondition: no contradictions in `facts`
* Postcondition: no atom in `facts` is present in the result
* Postcondition: No clauses have an empty head
*/
def applyAll(cs: Clauses, facts: Set[Literal]): Option[Clauses] = {
val newClauses =
if (facts.isEmpty)
cs.clauses.filter(_.head.nonEmpty) // still need to drop clauses with an empty head
else
cs.clauses.map(c => applyAll(c, facts)).flatMap(_.toList)
if (newClauses.isEmpty) None else Some(Clauses(newClauses))
}
def applyAll(c: Clause, facts: Set[Literal]): Option[Clause] = {
val atoms = facts.map(_.atom)
val newHead = c.head -- atoms // 3.
if (newHead.isEmpty) // 4. empty head
None
else
substitute(c.body, facts).map(f => Clause(f, newHead)) // 1, 2
}
/** Derives the formula that results from substituting `facts` into `formula`. */
@tailrec def substitute(formula: Formula, facts: Set[Literal]): Option[Formula] = formula match {
case And(lits) =>
def negated(lits: Set[Literal]): Set[Literal] = lits.map(a => !a)
if (lits.exists(negated(facts))) // 2.
None
else {
val newLits = lits -- facts
val newF = if (newLits.isEmpty) True else And(newLits)
Some(newF) // 1.
}
case True => Some(True)
case lit: Literal => // define in terms of And
substitute(And(Set(lit)), facts)
}
}

View File

@ -0,0 +1,118 @@
package sbt.internal.util
package logic
import org.scalacheck._
import Prop.secure
import Logic.{ LogicException, Matched }
object LogicTest extends Properties("Logic") {
import TestClauses._
property("Handles trivial resolution.") = secure(expect(trivial, Set(A)))
property("Handles less trivial resolution.") = secure(expect(lessTrivial, Set(B, A, D)))
property("Handles cycles without negation") = secure(expect(cycles, Set(F, A, B)))
property("Handles basic exclusion.") = secure(expect(excludedPos, Set()))
property("Handles exclusion of head proved by negation.") = secure(expect(excludedNeg, Set()))
// TODO: actually check ordering, probably as part of a check that dependencies are satisifed
property("Properly orders results.") = secure(expect(ordering, Set(B, A, C, E, F)))
property("Detects cyclic negation") = secure(
Logic.reduceAll(badClauses, Set()) match {
case Right(res) => false
case Left(err: Logic.CyclicNegation) => true
case Left(err) => sys.error(s"Expected cyclic error, got: $err")
}
)
def expect(result: Either[LogicException, Matched], expected: Set[Atom]) = result match {
case Left(err) => false
case Right(res) =>
val actual = res.provenSet
if (actual != expected)
sys.error(s"Expected to prove $expected, but actually proved $actual")
else
true
}
}
object TestClauses {
val A = Atom("A")
val B = Atom("B")
val C = Atom("C")
val D = Atom("D")
val E = Atom("E")
val F = Atom("F")
val G = Atom("G")
val clauses =
A.proves(B) ::
A.proves(F) ::
B.proves(F) ::
F.proves(A) ::
(!C).proves(F) ::
D.proves(C) ::
C.proves(D) ::
Nil
val cycles = Logic.reduceAll(clauses, Set())
val badClauses =
A.proves(D) ::
clauses
val excludedNeg = {
val cs =
(!A).proves(B) ::
Nil
val init =
(!A) ::
(!B) ::
Nil
Logic.reduceAll(cs, init.toSet)
}
val excludedPos = {
val cs =
A.proves(B) ::
Nil
val init =
A ::
(!B) ::
Nil
Logic.reduceAll(cs, init.toSet)
}
val trivial = {
val cs =
Formula.True.proves(A) ::
Nil
Logic.reduceAll(cs, Set.empty)
}
val lessTrivial = {
val cs =
Formula.True.proves(A) ::
Formula.True.proves(B) ::
(A && B && (!C)).proves(D) ::
Nil
Logic.reduceAll(cs, Set())
}
val ordering = {
val cs =
E.proves(F) ::
(C && !D).proves(E) ::
(A && B).proves(C) ::
Nil
Logic.reduceAll(cs, Set(A, B))
}
def all(): Unit = {
println(s"Cycles: $cycles")
println(s"xNeg: $excludedNeg")
println(s"xPos: $excludedPos")
println(s"trivial: $trivial")
println(s"lessTrivial: $lessTrivial")
println(s"ordering: $ordering")
}
}

View File

@ -16,6 +16,7 @@ import sbt.util.Logger
import sbt.util.{ CacheStoreFactory, FilesInfo, ModifiedFileInfo, PlainFileInfo }
import sbt.internal.util.HNil
import sbt.internal.util.HListFormats._
import sbt.util.FileInfo.{ exists, lastModified }
import sbt.util.CacheImplicits._
import sbt.util.Tracked.inputChanged

View File

@ -11,6 +11,7 @@ import sbt.io.syntax._
import sbt.io.IO
import sbt.internal.util.Types.:+:
import sbt.internal.util.HListFormats._
import sbt.util.CacheImplicits._
import sbt.util.Tracked.inputChanged
import sbt.util.{ CacheStoreFactory, FilesInfo, HashFileInfo, ModifiedFileInfo, PlainFileInfo }

View File

@ -3,6 +3,7 @@ package sbt.internal
import sbt.internal.librarymanagement._
import sbt.internal.util.Types._
import sbt.internal.util.{ HList, HNil }
import sbt.internal.util.HListFormats._
import sbt.io.{ Hash, IO }
import sbt.librarymanagement._
import sbt.util.CacheImplicits._

View File

@ -5,6 +5,7 @@ import java.io.File
import sbt.internal.librarymanagement._
import sbt.internal.util.HNil
import sbt.internal.util.Types._
import sbt.internal.util.HListFormats._
import sbt.librarymanagement._
import sbt.librarymanagement.syntax._
import sbt.util.CacheImplicits._

View File

@ -13,23 +13,19 @@ object Dependencies {
// sbt modules
private val ioVersion = "1.0.0-M12"
private val utilVersion = "1.0.0-M25"
private val lmVersion = "1.0.0-X16"
private val zincVersion = "1.0.0-X17"
private val utilVersion = "1.0.0-M26"
private val lmVersion = "1.0.0-X17"
private val zincVersion = "1.0.0-X18"
private val sbtIO = "org.scala-sbt" %% "io" % ioVersion
private val utilApplyMacro = "org.scala-sbt" %% "util-apply-macro" % utilVersion
private val utilCache = "org.scala-sbt" %% "util-cache" % utilVersion
private val utilCollection = "org.scala-sbt" %% "util-collection" % utilVersion
private val utilCompletion = "org.scala-sbt" %% "util-completion" % utilVersion
private val utilControl = "org.scala-sbt" %% "util-control" % utilVersion
private val utilPosition = "org.scala-sbt" %% "util-position" % utilVersion
private val utilLogging = "org.scala-sbt" %% "util-logging" % utilVersion
private val utilLogic = "org.scala-sbt" %% "util-logic" % utilVersion
private val utilCache = "org.scala-sbt" %% "util-cache" % utilVersion
private val utilControl = "org.scala-sbt" %% "util-control" % utilVersion
private val utilRelation = "org.scala-sbt" %% "util-relation" % utilVersion
private val utilScripted = "org.scala-sbt" %% "util-scripted" % utilVersion
private val utilTesting = "org.scala-sbt" %% "util-testing" % utilVersion
private val utilTracking = "org.scala-sbt" %% "util-tracking" % utilVersion
private val utilScripted = "org.scala-sbt" %% "util-scripted" % utilVersion
private val libraryManagement = "org.scala-sbt" %% "librarymanagement" % lmVersion
@ -37,10 +33,10 @@ object Dependencies {
val rawLauncher = "org.scala-sbt" % "launcher" % "1.0.0"
val testInterface = "org.scala-sbt" % "test-interface" % "1.0"
private val compilerInterface = "org.scala-sbt" % "compiler-interface" % zincVersion
private val compilerClasspath = "org.scala-sbt" %% "zinc-classpath" % zincVersion
private val compilerApiInfo = "org.scala-sbt" %% "zinc-apiinfo" % zincVersion
private val compilerBridge = "org.scala-sbt" %% "compiler-bridge" % zincVersion
private val compilerClasspath = "org.scala-sbt" %% "zinc-classpath" % zincVersion
private val compilerInterface = "org.scala-sbt" % "compiler-interface" % zincVersion
private val compilerIvyIntegration = "org.scala-sbt" %% "zinc-ivy-integration" % zincVersion
private val zinc = "org.scala-sbt" %% "zinc" % zincVersion
private val zincCompile = "org.scala-sbt" %% "zinc-compile" % zincVersion
@ -58,51 +54,39 @@ object Dependencies {
lazy val sbtLmPath = getSbtModulePath("sbtlm.path", "sbt/lm")
lazy val sbtZincPath = getSbtModulePath("sbtzinc.path", "sbt/zinc")
def addSbtModule(p: Project,
path: Option[String],
projectName: String,
m: ModuleID,
c: Option[Configuration] = None) =
def addSbtModule(p: Project, path: Option[String], projectName: String, m: ModuleID) =
path match {
case Some(f) =>
p dependsOn c.fold[ClasspathDep[ProjectReference]](ProjectRef(file(f), projectName))(
ProjectRef(file(f), projectName) % _)
case None => p settings (libraryDependencies += c.fold(m)(m % _))
case Some(f) => p dependsOn ProjectRef(file(f), projectName)
case None => p settings (libraryDependencies += m)
}
def addSbtIO(p: Project): Project = addSbtModule(p, sbtIoPath, "io", sbtIO)
def addSbtUtilApplyMacro(p: Project): Project =
addSbtModule(p, sbtUtilPath, "utilApplyMacro", utilApplyMacro)
def addSbtUtilCache(p: Project): Project = addSbtModule(p, sbtUtilPath, "utilCache", utilCache)
def addSbtUtilCollection(p: Project): Project =
addSbtModule(p, sbtUtilPath, "utilCollection", utilCollection)
def addSbtUtilCompletion(p: Project): Project =
addSbtModule(p, sbtUtilPath, "utilComplete", utilCompletion)
def addSbtUtilControl(p: Project): Project =
addSbtModule(p, sbtUtilPath, "utilControl", utilControl)
def addSbtUtilPosition(p: Project): Project =
addSbtModule(p, sbtUtilPath, "utilPosition", utilPosition)
def addSbtUtilLogging(p: Project): Project =
addSbtModule(p, sbtUtilPath, "utilLogging", utilLogging)
def addSbtUtilLogic(p: Project): Project = addSbtModule(p, sbtUtilPath, "utilLogic", utilLogic)
def addSbtUtilCache(p: Project): Project =
addSbtModule(p, sbtUtilPath, "utilCache", utilCache)
def addSbtUtilControl(p: Project): Project =
addSbtModule(p, sbtUtilPath, "utilControl", utilControl)
def addSbtUtilRelation(p: Project): Project =
addSbtModule(p, sbtUtilPath, "utilRelation", utilRelation)
def addSbtUtilScripted(p: Project): Project =
addSbtModule(p, sbtUtilPath, "utilScripted", utilScripted)
def addSbtUtilTesting(p: Project): Project =
addSbtModule(p, sbtUtilPath, "utilTesting", utilTesting, Some(Test))
def addSbtUtilTracking(p: Project): Project =
addSbtModule(p, sbtUtilPath, "utilTracking", utilTracking)
def addSbtUtilScripted(p: Project): Project =
addSbtModule(p, sbtUtilPath, "utilScripted", utilScripted)
def addSbtLm(p: Project): Project = addSbtModule(p, sbtLmPath, "lm", libraryManagement)
def addSbtCompilerInterface(p: Project): Project =
addSbtModule(p, sbtZincPath, "compilerInterface", compilerInterface)
def addSbtCompilerClasspath(p: Project): Project =
addSbtModule(p, sbtZincPath, "zincClasspath", compilerClasspath)
def addSbtCompilerApiInfo(p: Project): Project =
addSbtModule(p, sbtZincPath, "zincApiInfo", compilerApiInfo)
def addSbtCompilerBridge(p: Project): Project =
addSbtModule(p, sbtZincPath, "compilerBridge", compilerBridge)
def addSbtCompilerClasspath(p: Project): Project =
addSbtModule(p, sbtZincPath, "zincClasspath", compilerClasspath)
def addSbtCompilerInterface(p: Project): Project =
addSbtModule(p, sbtZincPath, "compilerInterface", compilerInterface)
def addSbtCompilerIvyIntegration(p: Project): Project =
addSbtModule(p, sbtZincPath, "zincIvyIntegration", compilerIvyIntegration)
def addSbtZinc(p: Project): Project = addSbtModule(p, sbtZincPath, "zinc", zinc)
@ -110,6 +94,8 @@ object Dependencies {
addSbtModule(p, sbtZincPath, "zincCompile", zincCompile)
val sjsonNewScalaJson = Def.setting { "com.eed3si9n" %% "sjson-new-scalajson" % contrabandSjsonNewVersion.value }
val jline = "jline" % "jline" % "2.14.4"
val scalatest = "org.scalatest" %% "scalatest" % "3.0.1"
val scalaCheck = "org.scalacheck" %% "scalacheck" % "1.13.4"
val specs2 = "org.specs2" %% "specs2" % "2.4.17"

View File

@ -162,13 +162,14 @@ object Util {
def writeScalaKeywords(base: File, keywords: Set[String]): File = {
val init = keywords.map(tn => '"' + tn + '"').mkString("Set(", ", ", ")")
val ObjectName = "ScalaKeywords"
val PackageName = "sbt"
val keywordsSrc =
"""package %s
object %s {
val values = %s
}""".format(PackageName, ObjectName, init)
val out = base / PackageName.replace('.', '/') / (ObjectName + ".scala")
val PackageName = "sbt.internal.util"
val keywordsSrc = s"""
|package $PackageName
|object $ObjectName {
| val values = $init
|}
""".trim.stripMargin
val out = base / PackageName.replace('.', '/') / s"$ObjectName.scala"
IO.write(out, keywordsSrc)
out
}
@ -179,7 +180,8 @@ object %s {
scalaKeywords := getScalaKeywords,
generateKeywords := writeScalaKeywords(sourceManaged.value, scalaKeywords.value),
sourceGenerators += Def.task(Seq(generateKeywords.value)).taskValue
))
)
)
}
object Licensed {

View File

@ -24,6 +24,7 @@ lazy val root = (project in file("."))
type In = IvyConfiguration :+: ModuleSettings :+: UpdateConfiguration :+: HNil
import sbt.util.CacheImplicits._
import sbt.internal.util.HListFormats._
import sbt.internal.AltLibraryManagementCodec._
val f: In => Unit =