Merge pull request #4064 from dwijnand/cleanup/formatting

Cleanup code formatting
This commit is contained in:
Dale Wijnand 2018-04-24 22:18:24 +01:00 committed by GitHub
commit 5f3b8c561c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
142 changed files with 3749 additions and 2374 deletions

View File

@ -8,3 +8,11 @@ docstrings = JavaDoc
# This also seems more idiomatic to include whitespace in import x.{ yyy }
spaces.inImportCurlyBraces = true
# This is more idiomatic Scala.
# http://docs.scala-lang.org/style/indentation.html#methods-with-numerous-arguments
align.openParenCallSite = false
align.openParenDefnSite = false
# For better code clarity
danglingParentheses = true

View File

@ -37,7 +37,7 @@ def buildLevelSettings: Seq[Setting[_]] =
resolvers += Resolver.mavenLocal,
scalafmtOnCompile := true,
scalafmtOnCompile in Sbt := false,
scalafmtVersion := "1.3.0",
scalafmtVersion := "1.4.0",
))
def commonSettings: Seq[Setting[_]] = Def.settings(

View File

@ -29,8 +29,9 @@ object ContextUtil {
* Given `myImplicitConversion(someValue).extensionMethod`, where `extensionMethod` is a macro that uses this
* method, the result of this method is `f(<Tree of someValue>)`.
*/
def selectMacroImpl[T: c.WeakTypeTag](c: blackbox.Context)(
f: (c.Expr[Any], c.Position) => c.Expr[T]): c.Expr[T] = {
def selectMacroImpl[T: c.WeakTypeTag](
c: blackbox.Context
)(f: (c.Expr[Any], c.Position) => c.Expr[T]): c.Expr[T] = {
import c.universe._
c.macroApplication match {
case s @ Select(Apply(_, t :: Nil), _) => f(c.Expr[Any](t), s.pos)
@ -211,12 +212,14 @@ final class ContextUtil[C <: blackbox.Context](val ctx: C) {
def changeOwner(tree: Tree, prev: Symbol, next: Symbol): Unit =
new ChangeOwnerAndModuleClassTraverser(
prev.asInstanceOf[global.Symbol],
next.asInstanceOf[global.Symbol]).traverse(tree.asInstanceOf[global.Tree])
next.asInstanceOf[global.Symbol]
).traverse(tree.asInstanceOf[global.Tree])
// Workaround copied from scala/async:can be removed once https://github.com/scala/scala/pull/3179 is merged.
private[this] class ChangeOwnerAndModuleClassTraverser(oldowner: global.Symbol,
newowner: global.Symbol)
extends global.ChangeOwnerTraverser(oldowner, newowner) {
private[this] class ChangeOwnerAndModuleClassTraverser(
oldowner: global.Symbol,
newowner: global.Symbol
) extends global.ChangeOwnerTraverser(oldowner, newowner) {
override def traverse(tree: global.Tree): Unit = {
tree match {
case _: global.DefTree => change(tree.symbol.moduleClass)
@ -248,7 +251,8 @@ final class ContextUtil[C <: blackbox.Context](val ctx: C) {
* the type constructor `[x] List[x]`.
*/
def extractTC(tcp: AnyRef with Singleton, name: String)(
implicit it: ctx.TypeTag[tcp.type]): ctx.Type = {
implicit it: ctx.TypeTag[tcp.type]
): ctx.Type = {
val itTpe = it.tpe.asInstanceOf[global.Type]
val m = itTpe.nonPrivateMember(global.newTypeName(name))
val tc = itTpe.memberInfo(m).asInstanceOf[ctx.universe.Type]
@ -262,8 +266,10 @@ final class ContextUtil[C <: blackbox.Context](val ctx: C) {
* Typically, `f` is a `Select` or `Ident`.
* The wrapper is replaced with the result of `subWrapper(<Type of T>, <Tree of v>, <wrapper Tree>)`
*/
def transformWrappers(t: Tree,
subWrapper: (String, Type, Tree, Tree) => Converted[ctx.type]): Tree = {
def transformWrappers(
t: Tree,
subWrapper: (String, Type, Tree, Tree) => Converted[ctx.type]
): Tree = {
// the main tree transformer that replaces calls to InputWrapper.wrap(x) with
// plain Idents that reference the actual input value
object appTransformer extends Transformer {

View File

@ -26,9 +26,10 @@ sealed trait Converted[C <: blackbox.Context with Singleton] {
}
object Converted {
def NotApplicable[C <: blackbox.Context with Singleton] = new NotApplicable[C]
final case class Failure[C <: blackbox.Context with Singleton](position: C#Position,
message: String)
extends Converted[C] {
final case class Failure[C <: blackbox.Context with Singleton](
position: C#Position,
message: String
) extends Converted[C] {
def isSuccess = false
def transform(f: C#Tree => C#Tree): Converted[C] = new Failure(position, message)
}
@ -36,9 +37,10 @@ object Converted {
def isSuccess = false
def transform(f: C#Tree => C#Tree): Converted[C] = this
}
final case class Success[C <: blackbox.Context with Singleton](tree: C#Tree,
finalTransform: C#Tree => C#Tree)
extends Converted[C] {
final case class Success[C <: blackbox.Context with Singleton](
tree: C#Tree,
finalTransform: C#Tree => C#Tree
) extends Converted[C] {
def isSuccess = true
def transform(f: C#Tree => C#Tree): Converted[C] = Success(f(tree), finalTransform)
}

View File

@ -41,9 +41,11 @@ object Instance {
final val MapName = "map"
final val InstanceTCName = "M"
final class Input[U <: Universe with Singleton](val tpe: U#Type,
val expr: U#Tree,
val local: U#ValDef)
final class Input[U <: Universe with Singleton](
val tpe: U#Type,
val expr: U#Tree,
val local: U#ValDef
)
trait Transform[C <: blackbox.Context with Singleton, N[_]] {
def apply(in: C#Tree): C#Tree
}

View File

@ -13,8 +13,9 @@ import macros._
/** A `TupleBuilder` that uses a KList as the tuple representation.*/
object KListBuilder extends TupleBuilder {
def make(c: blackbox.Context)(mt: c.Type,
inputs: Inputs[c.universe.type]): BuilderResult[c.type] =
def make(
c: blackbox.Context
)(mt: c.Type, inputs: Inputs[c.universe.type]): BuilderResult[c.type] =
new BuilderResult[c.type] {
val ctx: c.type = c
val util = ContextUtil[c.type](c)
@ -47,15 +48,20 @@ object KListBuilder extends TupleBuilder {
case Nil => revBindings.reverse
}
private[this] def makeKList(revInputs: Inputs[c.universe.type],
klist: Tree,
klistType: Type): Tree =
private[this] def makeKList(
revInputs: Inputs[c.universe.type],
klist: Tree,
klistType: Type
): Tree =
revInputs match {
case in :: tail =>
val next = ApplyTree(
TypeApply(Ident(kcons),
TypeTree(in.tpe) :: TypeTree(klistType) :: TypeTree(mTC) :: Nil),
in.expr :: klist :: Nil)
TypeApply(
Ident(kcons),
TypeTree(in.tpe) :: TypeTree(klistType) :: TypeTree(mTC) :: Nil
),
in.expr :: klist :: Nil
)
makeKList(tail, next, appliedType(kconsTC, in.tpe :: klistType :: mTC :: Nil))
case Nil => klist
}

View File

@ -16,8 +16,9 @@ import macros._
* and `KList` for larger numbers of inputs. This builder cannot handle fewer than 2 inputs.
*/
object MixedBuilder extends TupleBuilder {
def make(c: blackbox.Context)(mt: c.Type,
inputs: Inputs[c.universe.type]): BuilderResult[c.type] = {
def make(
c: blackbox.Context
)(mt: c.Type, inputs: Inputs[c.universe.type]): BuilderResult[c.type] = {
val delegate = if (inputs.size > TupleNBuilder.MaxInputs) KListBuilder else TupleNBuilder
delegate.make(c)(mt, inputs)
}

View File

@ -35,8 +35,9 @@ trait TupleBuilder {
type Inputs[U <: Universe with Singleton] = List[Instance.Input[U]]
/** Constructs a one-time use Builder for Context `c` and type constructor `tcType`. */
def make(c: blackbox.Context)(tcType: c.Type,
inputs: Inputs[c.universe.type]): BuilderResult[c.type]
def make(
c: blackbox.Context
)(tcType: c.Type, inputs: Inputs[c.universe.type]): BuilderResult[c.type]
}
trait BuilderResult[C <: blackbox.Context with Singleton] {

View File

@ -22,8 +22,9 @@ object TupleNBuilder extends TupleBuilder {
final val MaxInputs = 11
final val TupleMethodName = "tuple"
def make(c: blackbox.Context)(mt: c.Type,
inputs: Inputs[c.universe.type]): BuilderResult[c.type] =
def make(
c: blackbox.Context
)(mt: c.Type, inputs: Inputs[c.universe.type]): BuilderResult[c.type] =
new BuilderResult[c.type] {
val util = ContextUtil[c.type](c)
import c.universe._
@ -34,8 +35,9 @@ object TupleNBuilder extends TupleBuilder {
val ctx: c.type = c
val representationC: PolyType = {
val tcVariable: Symbol = newTCVariable(util.initialOwner)
val tupleTypeArgs = inputs.map(in =>
internal.typeRef(NoPrefix, tcVariable, in.tpe :: Nil).asInstanceOf[global.Type])
val tupleTypeArgs = inputs.map(
in => internal.typeRef(NoPrefix, tcVariable, in.tpe :: Nil).asInstanceOf[global.Type]
)
val tuple = global.definitions.tupleType(tupleTypeArgs)
internal.polyType(tcVariable :: Nil, tuple.asInstanceOf[Type])
}
@ -47,10 +49,12 @@ object TupleNBuilder extends TupleBuilder {
}
def extract(param: ValDef): List[ValDef] = bindTuple(param, Nil, inputs.map(_.local), 1)
def bindTuple(param: ValDef,
revBindings: List[ValDef],
params: List[ValDef],
i: Int): List[ValDef] =
def bindTuple(
param: ValDef,
revBindings: List[ValDef],
params: List[ValDef],
i: Int
): List[ValDef] =
params match {
case (x @ ValDef(mods, name, tpt, _)) :: xs =>
val rhs = select(Ident(param.name), "_" + i.toString)

View File

@ -17,7 +17,9 @@ import Types._
*/
trait AList[K[L[x]]] {
def transform[M[_], N[_]](value: K[M], f: M ~> N): K[N]
def traverse[M[_], N[_], P[_]](value: K[M], f: M ~> (N P)#l)(implicit np: Applicative[N]): N[K[P]]
def traverse[M[_], N[_], P[_]](value: K[M], f: M ~> (N P)#l)(
implicit np: Applicative[N]
): N[K[P]]
def foldr[M[_], A](value: K[M], f: (M[_], A) => A, init: A): A
def toList[M[_]](value: K[M]): List[M[_]] = foldr[M, List[M[_]]](value, _ :: _, Nil)
@ -33,8 +35,11 @@ object AList {
val empty: Empty = new Empty {
def transform[M[_], N[_]](in: Unit, f: M ~> N) = ()
def foldr[M[_], T](in: Unit, f: (M[_], T) => T, init: T) = init
override def apply[M[_], C](in: Unit, f: Unit => C)(implicit app: Applicative[M]): M[C] = app.pure(f(()))
def traverse[M[_], N[_], P[_]](in: Unit, f: M ~> (N P)#l)(implicit np: Applicative[N]): N[Unit] = np.pure(())
override def apply[M[_], C](in: Unit, f: Unit => C)(implicit app: Applicative[M]): M[C] =
app.pure(f(()))
def traverse[M[_], N[_], P[_]](in: Unit, f: M ~> (N P)#l)(
implicit np: Applicative[N]
): N[Unit] = np.pure(())
}
type SeqList[T] = AList[λ[L[x] => List[L[T]]]]
@ -42,9 +47,12 @@ object AList {
/** AList for a homogeneous sequence. */
def seq[T]: SeqList[T] = new SeqList[T] {
def transform[M[_], N[_]](s: List[M[T]], f: M ~> N) = s.map(f.fn[T])
def foldr[M[_], A](s: List[M[T]], f: (M[_], A) => A, init: A): A = (init /: s.reverse)((t, m) => f(m, t))
def foldr[M[_], A](s: List[M[T]], f: (M[_], A) => A, init: A): A =
(init /: s.reverse)((t, m) => f(m, t))
override def apply[M[_], C](s: List[M[T]], f: List[T] => C)(implicit ap: Applicative[M]): M[C] = {
override def apply[M[_], C](s: List[M[T]], f: List[T] => C)(
implicit ap: Applicative[M]
): M[C] = {
def loop[V](in: List[M[T]], g: List[T] => V): M[V] =
in match {
case Nil => ap.pure(g(Nil))
@ -55,15 +63,20 @@ object AList {
loop(s, f)
}
def traverse[M[_], N[_], P[_]](s: List[M[T]], f: M ~> (N P)#l)(implicit np: Applicative[N]): N[List[P[T]]] = ???
def traverse[M[_], N[_], P[_]](s: List[M[T]], f: M ~> (N P)#l)(
implicit np: Applicative[N]
): N[List[P[T]]] = ???
}
/** AList for the arbitrary arity data structure KList. */
def klist[KL[M[_]] <: KList.Aux[M, KL]]: AList[KL] = new AList[KL] {
def transform[M[_], N[_]](k: KL[M], f: M ~> N) = k.transform(f)
def foldr[M[_], T](k: KL[M], f: (M[_], T) => T, init: T): T = k.foldr(f, init)
override def apply[M[_], C](k: KL[M], f: KL[Id] => C)(implicit app: Applicative[M]): M[C] = k.apply(f)(app)
def traverse[M[_], N[_], P[_]](k: KL[M], f: M ~> (N P)#l)(implicit np: Applicative[N]): N[KL[P]] = k.traverse[N, P](f)(np)
override def apply[M[_], C](k: KL[M], f: KL[Id] => C)(implicit app: Applicative[M]): M[C] =
k.apply(f)(app)
def traverse[M[_], N[_], P[_]](k: KL[M], f: M ~> (N P)#l)(
implicit np: Applicative[N]
): N[KL[P]] = k.traverse[N, P](f)(np)
override def toList[M[_]](k: KL[M]) = k.toList
}
@ -73,7 +86,9 @@ object AList {
def single[A]: Single[A] = new Single[A] {
def transform[M[_], N[_]](a: M[A], f: M ~> N) = f(a)
def foldr[M[_], T](a: M[A], f: (M[_], T) => T, init: T): T = f(a, init)
def traverse[M[_], N[_], P[_]](a: M[A], f: M ~> (N P)#l)(implicit np: Applicative[N]): N[P[A]] = f(a)
def traverse[M[_], N[_], P[_]](a: M[A], f: M ~> (N P)#l)(
implicit np: Applicative[N]
): N[P[A]] = f(a)
}
type ASplit[K[L[x]], B[x]] = AList[λ[L[x] => K[(L B)#l]]]
@ -85,7 +100,9 @@ object AList {
def transform[M[_], N[_]](value: Split[M], f: M ~> N): Split[N] =
base.transform[(M B)#l, (N B)#l](value, nestCon[M, N, B](f))
def traverse[M[_], N[_], P[_]](value: Split[M], f: M ~> (N P)#l)(implicit np: Applicative[N]): N[Split[P]] = {
def traverse[M[_], N[_], P[_]](value: Split[M], f: M ~> (N P)#l)(
implicit np: Applicative[N]
): N[Split[P]] = {
val g = nestCon[M, (N P)#l, B](f)
base.traverse[(M B)#l, N, (P B)#l](value, g)(np)
}
@ -101,7 +118,9 @@ object AList {
type T2[M[_]] = (M[A], M[B])
def transform[M[_], N[_]](t: T2[M], f: M ~> N): T2[N] = (f(t._1), f(t._2))
def foldr[M[_], T](t: T2[M], f: (M[_], T) => T, init: T): T = f(t._1, f(t._2, init))
def traverse[M[_], N[_], P[_]](t: T2[M], f: M ~> (N P)#l)(implicit np: Applicative[N]): N[T2[P]] = {
def traverse[M[_], N[_], P[_]](t: T2[M], f: M ~> (N P)#l)(
implicit np: Applicative[N]
): N[T2[P]] = {
val g = (Tuple2.apply[P[A], P[B]] _).curried
np.apply(np.map(g, f(t._1)), f(t._2))
}
@ -113,7 +132,9 @@ object AList {
type T3[M[_]] = (M[A], M[B], M[C])
def transform[M[_], N[_]](t: T3[M], f: M ~> N) = (f(t._1), f(t._2), f(t._3))
def foldr[M[_], T](t: T3[M], f: (M[_], T) => T, init: T): T = f(t._1, f(t._2, f(t._3, init)))
def traverse[M[_], N[_], P[_]](t: T3[M], f: M ~> (N P)#l)(implicit np: Applicative[N]): N[T3[P]] = {
def traverse[M[_], N[_], P[_]](t: T3[M], f: M ~> (N P)#l)(
implicit np: Applicative[N]
): N[T3[P]] = {
val g = (Tuple3.apply[P[A], P[B], P[C]] _).curried
np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3))
}
@ -124,8 +145,11 @@ object AList {
def tuple4[A, B, C, D]: T4List[A, B, C, D] = new T4List[A, B, C, D] {
type T4[M[_]] = (M[A], M[B], M[C], M[D])
def transform[M[_], N[_]](t: T4[M], f: M ~> N) = (f(t._1), f(t._2), f(t._3), f(t._4))
def foldr[M[_], T](t: T4[M], f: (M[_], T) => T, init: T): T = f(t._1, f(t._2, f(t._3, f(t._4, init))))
def traverse[M[_], N[_], P[_]](t: T4[M], f: M ~> (N P)#l)(implicit np: Applicative[N]): N[T4[P]] = {
def foldr[M[_], T](t: T4[M], f: (M[_], T) => T, init: T): T =
f(t._1, f(t._2, f(t._3, f(t._4, init))))
def traverse[M[_], N[_], P[_]](t: T4[M], f: M ~> (N P)#l)(
implicit np: Applicative[N]
): N[T4[P]] = {
val g = (Tuple4.apply[P[A], P[B], P[C], P[D]] _).curried
np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4))
}
@ -136,8 +160,11 @@ object AList {
def tuple5[A, B, C, D, E]: T5List[A, B, C, D, E] = new T5List[A, B, C, D, E] {
type T5[M[_]] = (M[A], M[B], M[C], M[D], M[E])
def transform[M[_], N[_]](t: T5[M], f: M ~> N) = (f(t._1), f(t._2), f(t._3), f(t._4), f(t._5))
def foldr[M[_], T](t: T5[M], f: (M[_], T) => T, init: T): T = f(t._1, f(t._2, f(t._3, f(t._4, f(t._5, init)))))
def traverse[M[_], N[_], P[_]](t: T5[M], f: M ~> (N P)#l)(implicit np: Applicative[N]): N[T5[P]] = {
def foldr[M[_], T](t: T5[M], f: (M[_], T) => T, init: T): T =
f(t._1, f(t._2, f(t._3, f(t._4, f(t._5, init)))))
def traverse[M[_], N[_], P[_]](t: T5[M], f: M ~> (N P)#l)(
implicit np: Applicative[N]
): N[T5[P]] = {
val g = (Tuple5.apply[P[A], P[B], P[C], P[D], P[E]] _).curried
np.apply(np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)), f(t._5))
}
@ -147,71 +174,213 @@ object AList {
type T6List[A, B, C, D, E, F] = AList[T6K[A, B, C, D, E, F]#l]
def tuple6[A, B, C, D, E, F]: T6List[A, B, C, D, E, F] = new T6List[A, B, C, D, E, F] {
type T6[M[_]] = (M[A], M[B], M[C], M[D], M[E], M[F])
def transform[M[_], N[_]](t: T6[M], f: M ~> N) = (f(t._1), f(t._2), f(t._3), f(t._4), f(t._5), f(t._6))
def foldr[M[_], T](t: T6[M], f: (M[_], T) => T, init: T): T = f(t._1, f(t._2, f(t._3, f(t._4, f(t._5, f(t._6, init))))))
def traverse[M[_], N[_], P[_]](t: T6[M], f: M ~> (N P)#l)(implicit np: Applicative[N]): N[T6[P]] = {
def transform[M[_], N[_]](t: T6[M], f: M ~> N) =
(f(t._1), f(t._2), f(t._3), f(t._4), f(t._5), f(t._6))
def foldr[M[_], T](t: T6[M], f: (M[_], T) => T, init: T): T =
f(t._1, f(t._2, f(t._3, f(t._4, f(t._5, f(t._6, init))))))
def traverse[M[_], N[_], P[_]](t: T6[M], f: M ~> (N P)#l)(
implicit np: Applicative[N]
): N[T6[P]] = {
val g = (Tuple6.apply[P[A], P[B], P[C], P[D], P[E], P[F]] _).curried
np.apply(np.apply(np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)), f(t._5)), f(t._6))
np.apply(
np.apply(
np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)),
f(t._5)
),
f(t._6)
)
}
}
sealed trait T7K[A, B, C, D, E, F, G] { type l[L[x]] = (L[A], L[B], L[C], L[D], L[E], L[F], L[G]) }
sealed trait T7K[A, B, C, D, E, F, G] {
type l[L[x]] = (L[A], L[B], L[C], L[D], L[E], L[F], L[G])
}
type T7List[A, B, C, D, E, F, G] = AList[T7K[A, B, C, D, E, F, G]#l]
def tuple7[A, B, C, D, E, F, G]: T7List[A, B, C, D, E, F, G] = new T7List[A, B, C, D, E, F, G] {
type T7[M[_]] = (M[A], M[B], M[C], M[D], M[E], M[F], M[G])
def transform[M[_], N[_]](t: T7[M], f: M ~> N) = (f(t._1), f(t._2), f(t._3), f(t._4), f(t._5), f(t._6), f(t._7))
def foldr[M[_], T](t: T7[M], f: (M[_], T) => T, init: T): T = f(t._1, f(t._2, f(t._3, f(t._4, f(t._5, f(t._6, f(t._7, init)))))))
def traverse[M[_], N[_], P[_]](t: T7[M], f: M ~> (N P)#l)(implicit np: Applicative[N]): N[T7[P]] = {
def transform[M[_], N[_]](t: T7[M], f: M ~> N) =
(f(t._1), f(t._2), f(t._3), f(t._4), f(t._5), f(t._6), f(t._7))
def foldr[M[_], T](t: T7[M], f: (M[_], T) => T, init: T): T =
f(t._1, f(t._2, f(t._3, f(t._4, f(t._5, f(t._6, f(t._7, init)))))))
def traverse[M[_], N[_], P[_]](t: T7[M], f: M ~> (N P)#l)(
implicit np: Applicative[N]
): N[T7[P]] = {
val g = (Tuple7.apply[P[A], P[B], P[C], P[D], P[E], P[F], P[G]] _).curried
np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)), f(t._5)), f(t._6)), f(t._7))
np.apply(
np.apply(
np.apply(
np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)),
f(t._5)
),
f(t._6)
),
f(t._7)
)
}
}
sealed trait T8K[A, B, C, D, E, F, G, H] { type l[L[x]] = (L[A], L[B], L[C], L[D], L[E], L[F], L[G], L[H]) }
sealed trait T8K[A, B, C, D, E, F, G, H] {
type l[L[x]] = (L[A], L[B], L[C], L[D], L[E], L[F], L[G], L[H])
}
type T8List[A, B, C, D, E, F, G, H] = AList[T8K[A, B, C, D, E, F, G, H]#l]
def tuple8[A, B, C, D, E, F, G, H]: T8List[A, B, C, D, E, F, G, H] = new T8List[A, B, C, D, E, F, G, H] {
type T8[M[_]] = (M[A], M[B], M[C], M[D], M[E], M[F], M[G], M[H])
def transform[M[_], N[_]](t: T8[M], f: M ~> N) = (f(t._1), f(t._2), f(t._3), f(t._4), f(t._5), f(t._6), f(t._7), f(t._8))
def foldr[M[_], T](t: T8[M], f: (M[_], T) => T, init: T): T = f(t._1, f(t._2, f(t._3, f(t._4, f(t._5, f(t._6, f(t._7, f(t._8, init))))))))
def traverse[M[_], N[_], P[_]](t: T8[M], f: M ~> (N P)#l)(implicit np: Applicative[N]): N[T8[P]] = {
val g = (Tuple8.apply[P[A], P[B], P[C], P[D], P[E], P[F], P[G], P[H]] _).curried
np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)), f(t._5)), f(t._6)), f(t._7)), f(t._8))
def tuple8[A, B, C, D, E, F, G, H]: T8List[A, B, C, D, E, F, G, H] =
new T8List[A, B, C, D, E, F, G, H] {
type T8[M[_]] = (M[A], M[B], M[C], M[D], M[E], M[F], M[G], M[H])
def transform[M[_], N[_]](t: T8[M], f: M ~> N) =
(f(t._1), f(t._2), f(t._3), f(t._4), f(t._5), f(t._6), f(t._7), f(t._8))
def foldr[M[_], T](t: T8[M], f: (M[_], T) => T, init: T): T =
f(t._1, f(t._2, f(t._3, f(t._4, f(t._5, f(t._6, f(t._7, f(t._8, init))))))))
def traverse[M[_], N[_], P[_]](t: T8[M], f: M ~> (N P)#l)(
implicit np: Applicative[N]
): N[T8[P]] = {
val g = (Tuple8.apply[P[A], P[B], P[C], P[D], P[E], P[F], P[G], P[H]] _).curried
np.apply(
np.apply(
np.apply(
np.apply(
np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)),
f(t._5)
),
f(t._6)
),
f(t._7)
),
f(t._8)
)
}
}
}
sealed trait T9K[A, B, C, D, E, F, G, H, I] { type l[L[x]] = (L[A], L[B], L[C], L[D], L[E], L[F], L[G], L[H], L[I]) }
sealed trait T9K[A, B, C, D, E, F, G, H, I] {
type l[L[x]] = (L[A], L[B], L[C], L[D], L[E], L[F], L[G], L[H], L[I])
}
type T9List[A, B, C, D, E, F, G, H, I] = AList[T9K[A, B, C, D, E, F, G, H, I]#l]
def tuple9[A, B, C, D, E, F, G, H, I]: T9List[A, B, C, D, E, F, G, H, I] = new T9List[A, B, C, D, E, F, G, H, I] {
type T9[M[_]] = (M[A], M[B], M[C], M[D], M[E], M[F], M[G], M[H], M[I])
def transform[M[_], N[_]](t: T9[M], f: M ~> N) = (f(t._1), f(t._2), f(t._3), f(t._4), f(t._5), f(t._6), f(t._7), f(t._8), f(t._9))
def foldr[M[_], T](t: T9[M], f: (M[_], T) => T, init: T): T = f(t._1, f(t._2, f(t._3, f(t._4, f(t._5, f(t._6, f(t._7, f(t._8, f(t._9, init)))))))))
def traverse[M[_], N[_], P[_]](t: T9[M], f: M ~> (N P)#l)(implicit np: Applicative[N]): N[T9[P]] = {
val g = (Tuple9.apply[P[A], P[B], P[C], P[D], P[E], P[F], P[G], P[H], P[I]] _).curried
np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)), f(t._5)), f(t._6)), f(t._7)), f(t._8)), f(t._9))
def tuple9[A, B, C, D, E, F, G, H, I]: T9List[A, B, C, D, E, F, G, H, I] =
new T9List[A, B, C, D, E, F, G, H, I] {
type T9[M[_]] = (M[A], M[B], M[C], M[D], M[E], M[F], M[G], M[H], M[I])
def transform[M[_], N[_]](t: T9[M], f: M ~> N) =
(f(t._1), f(t._2), f(t._3), f(t._4), f(t._5), f(t._6), f(t._7), f(t._8), f(t._9))
def foldr[M[_], T](t: T9[M], f: (M[_], T) => T, init: T): T =
f(t._1, f(t._2, f(t._3, f(t._4, f(t._5, f(t._6, f(t._7, f(t._8, f(t._9, init)))))))))
def traverse[M[_], N[_], P[_]](t: T9[M], f: M ~> (N P)#l)(
implicit np: Applicative[N]
): N[T9[P]] = {
val g = (Tuple9.apply[P[A], P[B], P[C], P[D], P[E], P[F], P[G], P[H], P[I]] _).curried
np.apply(
np.apply(
np.apply(
np.apply(
np.apply(
np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)),
f(t._5)
),
f(t._6)
),
f(t._7)
),
f(t._8)
),
f(t._9)
)
}
}
}
sealed trait T10K[A, B, C, D, E, F, G, H, I, J] { type l[L[x]] = (L[A], L[B], L[C], L[D], L[E], L[F], L[G], L[H], L[I], L[J]) }
sealed trait T10K[A, B, C, D, E, F, G, H, I, J] {
type l[L[x]] = (L[A], L[B], L[C], L[D], L[E], L[F], L[G], L[H], L[I], L[J])
}
type T10List[A, B, C, D, E, F, G, H, I, J] = AList[T10K[A, B, C, D, E, F, G, H, I, J]#l]
def tuple10[A, B, C, D, E, F, G, H, I, J]: T10List[A, B, C, D, E, F, G, H, I, J] = new T10List[A, B, C, D, E, F, G, H, I, J] {
type T10[M[_]] = (M[A], M[B], M[C], M[D], M[E], M[F], M[G], M[H], M[I], M[J])
def transform[M[_], N[_]](t: T10[M], f: M ~> N) = (f(t._1), f(t._2), f(t._3), f(t._4), f(t._5), f(t._6), f(t._7), f(t._8), f(t._9), f(t._10))
def foldr[M[_], T](t: T10[M], f: (M[_], T) => T, init: T): T = f(t._1, f(t._2, f(t._3, f(t._4, f(t._5, f(t._6, f(t._7, f(t._8, f(t._9, f(t._10, init))))))))))
def traverse[M[_], N[_], P[_]](t: T10[M], f: M ~> (N P)#l)(implicit np: Applicative[N]): N[T10[P]] = {
val g = (Tuple10.apply[P[A], P[B], P[C], P[D], P[E], P[F], P[G], P[H], P[I], P[J]] _).curried
np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)), f(t._5)), f(t._6)), f(t._7)), f(t._8)), f(t._9)), f(t._10))
def tuple10[A, B, C, D, E, F, G, H, I, J]: T10List[A, B, C, D, E, F, G, H, I, J] =
new T10List[A, B, C, D, E, F, G, H, I, J] {
type T10[M[_]] = (M[A], M[B], M[C], M[D], M[E], M[F], M[G], M[H], M[I], M[J])
def transform[M[_], N[_]](t: T10[M], f: M ~> N) =
(f(t._1), f(t._2), f(t._3), f(t._4), f(t._5), f(t._6), f(t._7), f(t._8), f(t._9), f(t._10))
def foldr[M[_], T](t: T10[M], f: (M[_], T) => T, init: T): T =
f(
t._1,
f(t._2, f(t._3, f(t._4, f(t._5, f(t._6, f(t._7, f(t._8, f(t._9, f(t._10, init)))))))))
)
def traverse[M[_], N[_], P[_]](t: T10[M], f: M ~> (N P)#l)(
implicit np: Applicative[N]
): N[T10[P]] = {
val g =
(Tuple10.apply[P[A], P[B], P[C], P[D], P[E], P[F], P[G], P[H], P[I], P[J]] _).curried
np.apply(
np.apply(
np.apply(
np.apply(
np.apply(
np.apply(
np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)),
f(t._5)
),
f(t._6)
),
f(t._7)
),
f(t._8)
),
f(t._9)
),
f(t._10)
)
}
}
}
sealed trait T11K[A, B, C, D, E, F, G, H, I, J, K] { type l[L[x]] = (L[A], L[B], L[C], L[D], L[E], L[F], L[G], L[H], L[I], L[J], L[K]) }
type T11List[A, B, C, D, E, F, G, H, I, J, K] = AList[T11K[A, B, C, D, E, F, G, H, I, J, K]#l]
def tuple11[A, B, C, D, E, F, G, H, I, J, K]: T11List[A, B, C, D, E, F, G, H, I, J, K] = new T11List[A, B, C, D, E, F, G, H, I, J, K] {
type T11[M[_]] = (M[A], M[B], M[C], M[D], M[E], M[F], M[G], M[H], M[I], M[J], M[K])
def transform[M[_], N[_]](t: T11[M], f: M ~> N) = (f(t._1), f(t._2), f(t._3), f(t._4), f(t._5), f(t._6), f(t._7), f(t._8), f(t._9), f(t._10), f(t._11))
def foldr[M[_], T](t: T11[M], f: (M[_], T) => T, init: T): T = f(t._1, f(t._2, f(t._3, f(t._4, f(t._5, f(t._6, f(t._7, f(t._8, f(t._9, f(t._10, f(t._11, init)))))))))))
def traverse[M[_], N[_], P[_]](t: T11[M], f: M ~> (N P)#l)(implicit np: Applicative[N]): N[T11[P]] = {
val g = (Tuple11.apply[P[A], P[B], P[C], P[D], P[E], P[F], P[G], P[H], P[I], P[J], P[K]] _).curried
np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)), f(t._5)), f(t._6)), f(t._7)), f(t._8)), f(t._9)), f(t._10)), f(t._11))
}
sealed trait T11K[A, B, C, D, E, F, G, H, I, J, K] {
type l[L[x]] = (L[A], L[B], L[C], L[D], L[E], L[F], L[G], L[H], L[I], L[J], L[K])
}
type T11List[A, B, C, D, E, F, G, H, I, J, K] = AList[T11K[A, B, C, D, E, F, G, H, I, J, K]#l]
def tuple11[A, B, C, D, E, F, G, H, I, J, K]: T11List[A, B, C, D, E, F, G, H, I, J, K] =
new T11List[A, B, C, D, E, F, G, H, I, J, K] {
type T11[M[_]] = (M[A], M[B], M[C], M[D], M[E], M[F], M[G], M[H], M[I], M[J], M[K])
def transform[M[_], N[_]](t: T11[M], f: M ~> N) =
(
f(t._1),
f(t._2),
f(t._3),
f(t._4),
f(t._5),
f(t._6),
f(t._7),
f(t._8),
f(t._9),
f(t._10),
f(t._11)
)
def foldr[M[_], T](t: T11[M], f: (M[_], T) => T, init: T): T =
f(
t._1,
f(
t._2,
f(t._3, f(t._4, f(t._5, f(t._6, f(t._7, f(t._8, f(t._9, f(t._10, f(t._11, init)))))))))
)
)
def traverse[M[_], N[_], P[_]](t: T11[M], f: M ~> (N P)#l)(
implicit np: Applicative[N]
): N[T11[P]] = {
val g = (Tuple11
.apply[P[A], P[B], P[C], P[D], P[E], P[F], P[G], P[H], P[I], P[J], P[K]] _).curried
np.apply(
np.apply(
np.apply(
np.apply(
np.apply(
np.apply(
np.apply(
np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)),
f(t._5)
),
f(t._6)
),
f(t._7)
),
f(t._8)
),
f(t._9)
),
f(t._10)
),
f(t._11)
)
}
}
}

View File

@ -71,20 +71,26 @@ object AttributeKey {
def apply[T: Manifest: OptJsonWriter](name: String, description: String): AttributeKey[T] =
apply(name, description, Nil)
def apply[T: Manifest: OptJsonWriter](name: String,
description: String,
rank: Int): AttributeKey[T] =
def apply[T: Manifest: OptJsonWriter](
name: String,
description: String,
rank: Int
): AttributeKey[T] =
apply(name, description, Nil, rank)
def apply[T: Manifest: OptJsonWriter](name: String,
description: String,
extend: Seq[AttributeKey[_]]): AttributeKey[T] =
def apply[T: Manifest: OptJsonWriter](
name: String,
description: String,
extend: Seq[AttributeKey[_]]
): AttributeKey[T] =
apply(name, description, extend, Int.MaxValue)
def apply[T: Manifest: OptJsonWriter](name: String,
description: String,
extend: Seq[AttributeKey[_]],
rank: Int): AttributeKey[T] =
def apply[T: Manifest: OptJsonWriter](
name: String,
description: String,
extend: Seq[AttributeKey[_]],
rank: Int
): AttributeKey[T] =
make(name, Some(description), extend, rank)
private[sbt] def copyWithRank[T](a: AttributeKey[T], rank: Int): AttributeKey[T] =

View File

@ -170,8 +170,10 @@ abstract class EvaluateSettings[Scope] {
}
protected final def setValue(v: T): Unit = {
assert(state != Evaluated,
"Already evaluated (trying to set value to " + v + "): " + toString)
assert(
state != Evaluated,
"Already evaluated (trying to set value to " + v + "): " + toString
)
if (v == null) sys.error("Setting value cannot be null: " + keyString)
value = v
state = Evaluated

View File

@ -357,7 +357,8 @@ trait Init[Scope] {
keys.map(u => showUndefined(u, validKeys, delegates)).mkString("\n\n ", "\n\n ", "")
new Uninitialized(
keys,
prefix + suffix + " to undefined setting" + suffix + ": " + keysString + "\n ")
prefix + suffix + " to undefined setting" + suffix + ": " + keysString + "\n "
)
}
final class Compiled[T](
@ -374,8 +375,9 @@ trait Init[Scope] {
val locals = compiled flatMap {
case (key, comp) => if (key.key.isLocal) Seq[Compiled[_]](comp) else Nil
}
val ordered = Dag.topologicalSort(locals)(_.dependencies.flatMap(dep =>
if (dep.key.isLocal) Seq[Compiled[_]](compiled(dep)) else Nil))
val ordered = Dag.topologicalSort(locals)(
_.dependencies.flatMap(dep => if (dep.key.isLocal) Seq[Compiled[_]](compiled(dep)) else Nil)
)
def flatten(
cmap: Map[ScopedKey[_], Flattened],
key: ScopedKey[_],
@ -383,7 +385,8 @@ trait Init[Scope] {
): Flattened =
new Flattened(
key,
deps.flatMap(dep => if (dep.key.isLocal) cmap(dep).dependencies else dep :: Nil))
deps.flatMap(dep => if (dep.key.isLocal) cmap(dep).dependencies else dep :: Nil)
)
val empty = Map.empty[ScopedKey[_], Flattened]
@ -415,7 +418,8 @@ trait Init[Scope] {
* Intersects two scopes, returning the more specific one if they intersect, or None otherwise.
*/
private[sbt] def intersect(s1: Scope, s2: Scope)(
implicit delegates: Scope => Seq[Scope]): Option[Scope] =
implicit delegates: Scope => Seq[Scope]
): Option[Scope] =
if (delegates(s1).contains(s2)) Some(s1) // s1 is more specific
else if (delegates(s2).contains(s1)) Some(s2) // s2 is more specific
else None

View File

@ -49,8 +49,9 @@ abstract class JLine extends LineReader {
private[this] def readLineDirect(prompt: String, mask: Option[Char]): Option[String] =
if (handleCONT)
Signals.withHandler(() => resume(), signal = Signals.CONT)(() =>
readLineDirectRaw(prompt, mask))
Signals.withHandler(() => resume(), signal = Signals.CONT)(
() => readLineDirectRaw(prompt, mask)
)
else
readLineDirectRaw(prompt, mask)

View File

@ -91,7 +91,8 @@ object JLineCompletion {
def appendNonEmpty(set: Set[String], add: String) = if (add.trim.isEmpty) set else set + add
def customCompletor(
f: (String, Int) => (Seq[String], Seq[String])): (ConsoleReader, Int) => Boolean =
f: (String, Int) => (Seq[String], Seq[String])
): (ConsoleReader, Int) => Boolean =
(reader, level) => {
val success = complete(beforeCursor(reader), reader => f(reader, level), reader)
reader.flush()

View File

@ -275,8 +275,10 @@ object Parser extends ParserMain {
revAcc: List[T]
): Parser[Seq[T]] = {
assume(min >= 0, "Minimum must be greater than or equal to zero (was " + min + ")")
assume(max >= min,
"Minimum must be less than or equal to maximum (min: " + min + ", max: " + max + ")")
assume(
max >= min,
"Minimum must be less than or equal to maximum (min: " + min + ", max: " + max + ")"
)
def checkRepeated(invalidButOptional: => Parser[Seq[T]]): Parser[Seq[T]] =
repeated match {
@ -836,10 +838,12 @@ private final class ParserWithExamples[T](
) extends ValidParser[T] {
def derive(c: Char) =
examples(delegate derive c,
exampleSource.withAddedPrefix(c.toString),
maxNumberOfExamples,
removeInvalidExamples)
examples(
delegate derive c,
exampleSource.withAddedPrefix(c.toString),
maxNumberOfExamples,
removeInvalidExamples
)
def result = delegate.result

View File

@ -44,7 +44,8 @@ trait Parsers {
/** Parses a single hexadecimal digit (0-9, a-f, A-F). */
lazy val HexDigit = charClass(c => HexDigitSet(c.toUpper), "hex digit") examples HexDigitSet.map(
_.toString)
_.toString
)
/** Parses a single letter, according to Char.isLetter, into a Char. */
lazy val Letter = charClass(_.isLetter, "letter")

View File

@ -14,7 +14,8 @@ object DefaultParsersSpec extends Properties("DefaultParsers") {
import DefaultParsers.{ ID, isIDChar, matches, validID }
property("∀ s ∈ String: validID(s) == matches(ID, s)") = forAll(
(s: String) => validID(s) == matches(ID, s))
(s: String) => validID(s) == matches(ID, s)
)
property("∀ s ∈ genID: matches(ID, s)") = forAll(genID)(s => matches(ID, s))
property("∀ s ∈ genID: validID(s)") = forAll(genID)(s => validID(s))

View File

@ -27,7 +27,8 @@ class ParserWithExamplesTest extends UnitSpec {
Set(
suggestion("blue"),
suggestion("red")
))
)
)
parserWithExamples.completions(0) shouldEqual validCompletions
}
}
@ -38,7 +39,8 @@ class ParserWithExamplesTest extends UnitSpec {
val derivedCompletions = Completions(
Set(
suggestion("lue")
))
)
)
parserWithExamples.derive('b').completions(0) shouldEqual derivedCompletions
}
}
@ -58,7 +60,8 @@ class ParserWithExamplesTest extends UnitSpec {
Set(
suggestion("lue"),
suggestion("lock")
))
)
)
parserWithExamples.derive('b').completions(0) shouldEqual derivedCompletions
}
}

View File

@ -20,25 +20,30 @@ final class Console(compiler: AnalyzingCompiler) {
def apply(classpath: Seq[File], log: Logger): Try[Unit] =
apply(classpath, Nil, "", "", log)
def apply(classpath: Seq[File],
options: Seq[String],
initialCommands: String,
cleanupCommands: String,
log: Logger): Try[Unit] =
def apply(
classpath: Seq[File],
options: Seq[String],
initialCommands: String,
cleanupCommands: String,
log: Logger
): Try[Unit] =
apply(classpath, options, initialCommands, cleanupCommands)(None, Nil)(log)
def apply(classpath: Seq[File],
options: Seq[String],
loader: ClassLoader,
initialCommands: String,
cleanupCommands: String)(bindings: (String, Any)*)(implicit log: Logger): Try[Unit] =
def apply(
classpath: Seq[File],
options: Seq[String],
loader: ClassLoader,
initialCommands: String,
cleanupCommands: String
)(bindings: (String, Any)*)(implicit log: Logger): Try[Unit] =
apply(classpath, options, initialCommands, cleanupCommands)(Some(loader), bindings)
def apply(classpath: Seq[File],
options: Seq[String],
initialCommands: String,
cleanupCommands: String)(loader: Option[ClassLoader], bindings: Seq[(String, Any)])(
implicit log: Logger): Try[Unit] = {
def apply(
classpath: Seq[File],
options: Seq[String],
initialCommands: String,
cleanupCommands: String
)(loader: Option[ClassLoader], bindings: Seq[(String, Any)])(implicit log: Logger): Try[Unit] = {
def console0() =
compiler.console(classpath, options, initialCommands, cleanupCommands, log)(loader, bindings)
// TODO: Fix JLine

View File

@ -20,18 +20,24 @@ import sbt.internal.util.ManagedLogger
object Doc {
import RawCompileLike._
def scaladoc(label: String,
cacheStoreFactory: CacheStoreFactory,
compiler: AnalyzingCompiler): Gen =
def scaladoc(
label: String,
cacheStoreFactory: CacheStoreFactory,
compiler: AnalyzingCompiler
): Gen =
scaladoc(label, cacheStoreFactory, compiler, Seq())
def scaladoc(label: String,
cacheStoreFactory: CacheStoreFactory,
compiler: AnalyzingCompiler,
fileInputOptions: Seq[String]): Gen =
cached(cacheStoreFactory,
fileInputOptions,
prepare(label + " Scala API documentation", compiler.doc))
def scaladoc(
label: String,
cacheStoreFactory: CacheStoreFactory,
compiler: AnalyzingCompiler,
fileInputOptions: Seq[String]
): Gen =
cached(
cacheStoreFactory,
fileInputOptions,
prepare(label + " Scala API documentation", compiler.doc)
)
@deprecated("Going away", "1.1.1")
def javadoc(

View File

@ -30,29 +30,37 @@ object DotGraph {
val toString = packageOnly compose fToString(sourceRoots)
apply(relations, outputDirectory, toString, toString)
}
def apply(relations: Relations,
outputDir: File,
sourceToString: File => String,
externalToString: File => String): Unit = {
def apply(
relations: Relations,
outputDir: File,
sourceToString: File => String,
externalToString: File => String
): Unit = {
def file(name: String) = new File(outputDir, name)
IO.createDirectory(outputDir)
generateGraph(file("int-class-deps"),
"dependencies",
relations.internalClassDep,
identity[String],
identity[String])
generateGraph(file("binary-dependencies"),
"externalDependencies",
relations.libraryDep,
externalToString,
sourceToString)
generateGraph(
file("int-class-deps"),
"dependencies",
relations.internalClassDep,
identity[String],
identity[String]
)
generateGraph(
file("binary-dependencies"),
"externalDependencies",
relations.libraryDep,
externalToString,
sourceToString
)
}
def generateGraph[K, V](file: File,
graphName: String,
relation: Relation[K, V],
keyToString: K => String,
valueToString: V => String): Unit = {
def generateGraph[K, V](
file: File,
graphName: String,
relation: Relation[K, V],
keyToString: K => String,
valueToString: V => String
): Unit = {
import scala.collection.mutable.{ HashMap, HashSet }
val mappedGraph = new HashMap[String, HashSet[String]]
for ((key, values) <- relation.forwardMap; keyString = keyToString(key); value <- values)

View File

@ -20,13 +20,15 @@ import sbt.protocol.testing._
import sbt.internal.util.ConsoleAppender
private[sbt] object ForkTests {
def apply(runners: Map[TestFramework, Runner],
tests: Vector[TestDefinition],
config: Execution,
classpath: Seq[File],
fork: ForkOptions,
log: Logger,
tag: Tag): Task[TestOutput] = {
def apply(
runners: Map[TestFramework, Runner],
tests: Vector[TestDefinition],
config: Execution,
classpath: Seq[File],
fork: ForkOptions,
log: Logger,
tag: Tag
): Task[TestOutput] = {
val opts = processOptions(config, tests, log)
import std.TaskExtra._
@ -43,12 +45,14 @@ private[sbt] object ForkTests {
}
}
private[this] def mainTestTask(runners: Map[TestFramework, Runner],
opts: ProcessedOptions,
classpath: Seq[File],
fork: ForkOptions,
log: Logger,
parallel: Boolean): Task[TestOutput] =
private[this] def mainTestTask(
runners: Map[TestFramework, Runner],
opts: ProcessedOptions,
classpath: Seq[File],
fork: ForkOptions,
log: Logger,
parallel: Boolean
): Task[TestOutput] =
std.TaskExtra.task {
val server = new ServerSocket(0)
val testListeners = opts.testListeners flatMap {
@ -68,7 +72,8 @@ private[sbt] object ForkTests {
} catch {
case e: java.net.SocketException =>
log.error(
"Could not accept connection from test agent: " + e.getClass + ": " + e.getMessage)
"Could not accept connection from test agent: " + e.getClass + ": " + e.getMessage
)
log.trace(e)
server.close()
return
@ -82,12 +87,14 @@ private[sbt] object ForkTests {
val config = new ForkConfiguration(ConsoleAppender.formatEnabledInEnv, parallel)
os.writeObject(config)
val taskdefs = opts.tests.map(
t =>
new TaskDef(t.name,
forkFingerprint(t.fingerprint),
t.explicitlySpecified,
t.selectors))
val taskdefs = opts.tests.map { t =>
new TaskDef(
t.name,
forkFingerprint(t.fingerprint),
t.explicitlySpecified,
t.selectors
)
}
os.writeObject(taskdefs.toArray)
os.writeInt(runners.size)
@ -117,20 +124,27 @@ private[sbt] object ForkTests {
val acceptorThread = new Thread(Acceptor)
acceptorThread.start()
val fullCp = classpath ++: Seq(IO.classLocationFile[ForkMain],
IO.classLocationFile[Framework])
val options = Seq("-classpath",
fullCp mkString File.pathSeparator,
classOf[ForkMain].getCanonicalName,
server.getLocalPort.toString)
val fullCp = classpath ++: Seq(
IO.classLocationFile[ForkMain],
IO.classLocationFile[Framework]
)
val options = Seq(
"-classpath",
fullCp mkString File.pathSeparator,
classOf[ForkMain].getCanonicalName,
server.getLocalPort.toString
)
val ec = Fork.java(fork, options)
val result =
if (ec != 0)
TestOutput(TestResult.Error,
Map(
"Running java with options " + options
.mkString(" ") + " failed with exit code " + ec -> SuiteResult.Error),
Iterable.empty)
TestOutput(
TestResult.Error,
Map(
"Running java with options " + options
.mkString(" ") + " failed with exit code " + ec -> SuiteResult.Error
),
Iterable.empty
)
else {
// Need to wait acceptor thread to finish its business
acceptorThread.join()
@ -151,11 +165,13 @@ private[sbt] object ForkTests {
case _ => sys.error("Unknown fingerprint type: " + f.getClass)
}
}
private final class React(is: ObjectInputStream,
os: ObjectOutputStream,
log: Logger,
listeners: Seq[TestReportListener],
results: mutable.Map[String, SuiteResult]) {
private final class React(
is: ObjectInputStream,
os: ObjectOutputStream,
log: Logger,
listeners: Seq[TestReportListener],
results: mutable.Map[String, SuiteResult]
) {
import ForkTags._
@annotation.tailrec
def react(): Unit = is.readObject match {

View File

@ -49,9 +49,11 @@ object Package {
}
}
final class Configuration(val sources: Seq[(File, String)],
val jar: File,
val options: Seq[PackageOption])
final class Configuration(
val sources: Seq[(File, String)],
val jar: File,
val options: Seq[PackageOption]
)
def apply(conf: Configuration, cacheStoreFactory: CacheStoreFactory, log: Logger): Unit = {
val manifest = new Manifest
val main = manifest.getMainAttributes
@ -65,9 +67,9 @@ object Package {
}
setVersion(main)
type Inputs = Map[File, String] :+: FilesInfo[ModifiedFileInfo] :+: Manifest :+: HNil
val cachedMakeJar = inputChanged(cacheStoreFactory make "inputs") {
(inChanged,
inputs: Map[File, String] :+: FilesInfo[ModifiedFileInfo] :+: Manifest :+: HNil) =>
(inChanged, inputs: Inputs) =>
import exists.format
val sources :+: _ :+: manifest :+: HNil = inputs
inputChanged(cacheStoreFactory make "output") { (outChanged, jar: PlainFileInfo) =>
@ -95,11 +97,13 @@ object Package {
val attribVals = Seq(name, version, orgName)
ManifestAttributes(attribKeys zip attribVals: _*)
}
def addImplManifestAttributes(name: String,
version: String,
homepage: Option[java.net.URL],
org: String,
orgName: String): PackageOption = {
def addImplManifestAttributes(
name: String,
version: String,
homepage: Option[java.net.URL],
org: String,
orgName: String
): PackageOption = {
import Attributes.Name._
// The ones in Attributes.Name are deprecated saying:

View File

@ -47,16 +47,19 @@ object RawCompileLike {
def cached(cacheStoreFactory: CacheStoreFactory, doCompile: Gen): Gen =
cached(cacheStoreFactory, Seq(), doCompile)
def cached(cacheStoreFactory: CacheStoreFactory,
fileInputOpts: Seq[String],
doCompile: Gen): Gen =
def cached(
cacheStoreFactory: CacheStoreFactory,
fileInputOpts: Seq[String],
doCompile: Gen
): Gen =
(sources, classpath, outputDirectory, options, maxErrors, log) => {
type Inputs =
FilesInfo[HashFileInfo] :+: FilesInfo[ModifiedFileInfo] :+: Seq[File] :+: File :+: Seq[
String] :+: Int :+: HNil
FilesInfo[HashFileInfo] :+: FilesInfo[ModifiedFileInfo] :+: Seq[File] :+: File :+:
Seq[String] :+: Int :+: HNil
val inputs
: Inputs = hash(sources.toSet ++ optionFiles(options, fileInputOpts)) :+: lastModified(
classpath.toSet) :+: classpath :+: outputDirectory :+: options :+: maxErrors :+: HNil
classpath.toSet
) :+: classpath :+: outputDirectory :+: options :+: maxErrors :+: HNil
val cachedComp = inputChanged(cacheStoreFactory make "inputs") { (inChanged, in: Inputs) =>
inputChanged(cacheStoreFactory make "output") {
(outChanged, outputs: FilesInfo[PlainFileInfo]) =>
@ -92,10 +95,12 @@ object RawCompileLike {
compiler(sources, classpath, outputDirectory, options)
}
def compile(label: String,
cacheStoreFactory: CacheStoreFactory,
instance: ScalaInstance,
cpOptions: ClasspathOptions): Gen =
def compile(
label: String,
cacheStoreFactory: CacheStoreFactory,
instance: ScalaInstance,
cpOptions: ClasspathOptions
): Gen =
cached(cacheStoreFactory, prepare(label + " sources", rawCompile(instance, cpOptions)))
val nop: Gen = (_, _, _, _, _, _) => ()

View File

@ -85,8 +85,10 @@ object Sync {
sys.error("Duplicate mappings:" + dups.mkString)
}
implicit def relationFormat[A, B](implicit af: JsonFormat[Map[A, Set[B]]],
bf: JsonFormat[Map[B, Set[A]]]): JsonFormat[Relation[A, B]] =
implicit def relationFormat[A, B](
implicit af: JsonFormat[Map[A, Set[B]]],
bf: JsonFormat[Map[B, Set[A]]]
): JsonFormat[Relation[A, B]] =
new JsonFormat[Relation[A, B]] {
def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]): Relation[A, B] =
jsOpt match {
@ -109,15 +111,18 @@ object Sync {
}
def writeInfo[F <: FileInfo](store: CacheStore,
relation: Relation[File, File],
info: Map[File, F])(implicit infoFormat: JsonFormat[F]): Unit =
def writeInfo[F <: FileInfo](
store: CacheStore,
relation: Relation[File, File],
info: Map[File, F]
)(implicit infoFormat: JsonFormat[F]): Unit =
store.write((relation, info))
type RelationInfo[F] = (Relation[File, File], Map[File, F])
def readInfo[F <: FileInfo](store: CacheStore)(
implicit infoFormat: JsonFormat[F]): RelationInfo[F] =
def readInfo[F <: FileInfo](
store: CacheStore
)(implicit infoFormat: JsonFormat[F]): RelationInfo[F] =
try { readUncaught[F](store)(infoFormat) } catch {
case _: IOException => (Relation.empty[File, File], Map.empty[File, F])
case _: ZipException => (Relation.empty[File, File], Map.empty[File, F])
@ -128,7 +133,8 @@ object Sync {
}
}
private def readUncaught[F <: FileInfo](store: CacheStore)(
implicit infoFormat: JsonFormat[F]): RelationInfo[F] =
private def readUncaught[F <: FileInfo](
store: CacheStore
)(implicit infoFormat: JsonFormat[F]): RelationInfo[F] =
store.read(default = (Relation.empty[File, File], Map.empty[File, F]))
}

View File

@ -31,13 +31,17 @@ trait TestResultLogger {
def run(log: Logger, results: Output, taskName: String): Unit
/** Only allow invocation if certain criteria is met, else use another `TestResultLogger` (defaulting to nothing) . */
final def onlyIf(f: (Output, String) => Boolean,
otherwise: TestResultLogger = TestResultLogger.Null) =
final def onlyIf(
f: (Output, String) => Boolean,
otherwise: TestResultLogger = TestResultLogger.Null
) =
TestResultLogger.choose(f, this, otherwise)
/** Allow invocation unless a certain predicate passes, in which case use another `TestResultLogger` (defaulting to nothing) . */
final def unless(f: (Output, String) => Boolean,
otherwise: TestResultLogger = TestResultLogger.Null) =
final def unless(
f: (Output, String) => Boolean,
otherwise: TestResultLogger = TestResultLogger.Null
) =
TestResultLogger.choose(f, otherwise, this)
}
@ -69,8 +73,10 @@ object TestResultLogger {
* @param f The `TestResultLogger` to choose if the predicate fails.
*/
def choose(cond: (Output, String) => Boolean, t: TestResultLogger, f: TestResultLogger) =
TestResultLogger((log, results, taskName) =>
(if (cond(results, taskName)) t else f).run(log, results, taskName))
TestResultLogger(
(log, results, taskName) =>
(if (cond(results, taskName)) t else f).run(log, results, taskName)
)
/** Transforms the input to be completely silent when the subject module doesn't contain any tests. */
def silenceWhenNoTests(d: Defaults.Main) =
@ -127,35 +133,39 @@ object TestResultLogger {
results.summaries.size > 1 || results.summaries.headOption.forall(_.summaryText.isEmpty)
val printStandard = TestResultLogger((log, results, _) => {
val (skippedCount,
errorsCount,
passedCount,
failuresCount,
ignoredCount,
canceledCount,
pendingCount,
val (
skippedCount,
errorsCount,
passedCount,
failuresCount,
ignoredCount,
canceledCount,
pendingCount,
) =
results.events.foldLeft((0, 0, 0, 0, 0, 0, 0)) {
case (acc, (_, testEvent)) =>
val (skippedAcc, errorAcc, passedAcc, failureAcc, ignoredAcc, canceledAcc, pendingAcc) =
acc
(skippedAcc + testEvent.skippedCount,
errorAcc + testEvent.errorCount,
passedAcc + testEvent.passedCount,
failureAcc + testEvent.failureCount,
ignoredAcc + testEvent.ignoredCount,
canceledAcc + testEvent.canceledCount,
pendingAcc + testEvent.pendingCount,
(
skippedAcc + testEvent.skippedCount,
errorAcc + testEvent.errorCount,
passedAcc + testEvent.passedCount,
failureAcc + testEvent.failureCount,
ignoredAcc + testEvent.ignoredCount,
canceledAcc + testEvent.canceledCount,
pendingAcc + testEvent.pendingCount,
)
}
val totalCount = failuresCount + errorsCount + skippedCount + passedCount
val base =
s"Total $totalCount, Failed $failuresCount, Errors $errorsCount, Passed $passedCount"
val otherCounts = Seq("Skipped" -> skippedCount,
"Ignored" -> ignoredCount,
"Canceled" -> canceledCount,
"Pending" -> pendingCount)
val otherCounts = Seq(
"Skipped" -> skippedCount,
"Ignored" -> ignoredCount,
"Canceled" -> canceledCount,
"Pending" -> pendingCount
)
val extra = otherCounts.filter(_._2 > 0).map { case (label, count) => s", $label $count" }
val postfix = base + extra.mkString
@ -184,6 +194,7 @@ object TestResultLogger {
})
val printNoTests = TestResultLogger(
(log, results, taskName) => log.info("No tests to run for " + taskName))
(log, results, taskName) => log.info("No tests to run for " + taskName)
)
}
}

View File

@ -44,9 +44,11 @@ object Tests {
* @param events The result of each test group (suite) executed during this test run.
* @param summaries Explicit summaries directly provided by test frameworks. This may be empty, in which case a default summary will be generated.
*/
final case class Output(overall: TestResult,
events: Map[String, SuiteResult],
summaries: Iterable[Summary])
final case class Output(
overall: TestResult,
events: Map[String, SuiteResult],
summaries: Iterable[Summary]
)
/**
* Summarizes a test run.
@ -138,9 +140,11 @@ object Tests {
val cleanup: Vector[ClassLoader => Unit],
val testListeners: Vector[TestReportListener]
)
private[sbt] def processOptions(config: Execution,
discovered: Vector[TestDefinition],
log: Logger): ProcessedOptions = {
private[sbt] def processOptions(
config: Execution,
discovered: Vector[TestDefinition],
log: Logger
): ProcessedOptions = {
import collection.mutable.{ HashSet, ListBuffer }
val testFilters = new ListBuffer[String => Boolean]
var orderedFilters = Seq[String => Boolean]()
@ -168,7 +172,8 @@ object Tests {
if (undefinedFrameworks.nonEmpty)
log.warn(
"Arguments defined for test frameworks that are not present:\n\t" + undefinedFrameworks
.mkString("\n\t"))
.mkString("\n\t")
)
def includeTest(test: TestDefinition) =
!excludeTestsSet.contains(test.name) && testFilters.forall(filter => filter(test.name))
@ -177,10 +182,12 @@ object Tests {
if (orderedFilters.isEmpty) filtered0
else orderedFilters.flatMap(f => filtered0.filter(d => f(d.name))).toList.distinct
val uniqueTests = distinctBy(tests)(_.name)
new ProcessedOptions(uniqueTests.toVector,
setup.toVector,
cleanup.toVector,
testListeners.toVector)
new ProcessedOptions(
uniqueTests.toVector,
setup.toVector,
cleanup.toVector,
testListeners.toVector
)
}
private[this] def distinctBy[T, K](in: Seq[T])(f: T => K): Seq[T] = {
@ -188,33 +195,39 @@ object Tests {
in.filter(t => seen.add(f(t)))
}
def apply(frameworks: Map[TestFramework, Framework],
testLoader: ClassLoader,
runners: Map[TestFramework, Runner],
discovered: Vector[TestDefinition],
config: Execution,
log: ManagedLogger): Task[Output] = {
def apply(
frameworks: Map[TestFramework, Framework],
testLoader: ClassLoader,
runners: Map[TestFramework, Runner],
discovered: Vector[TestDefinition],
config: Execution,
log: ManagedLogger
): Task[Output] = {
val o = processOptions(config, discovered, log)
testTask(testLoader,
frameworks,
runners,
o.tests,
o.setup,
o.cleanup,
log,
o.testListeners,
config)
testTask(
testLoader,
frameworks,
runners,
o.tests,
o.setup,
o.cleanup,
log,
o.testListeners,
config
)
}
def testTask(loader: ClassLoader,
frameworks: Map[TestFramework, Framework],
runners: Map[TestFramework, Runner],
tests: Vector[TestDefinition],
userSetup: Iterable[ClassLoader => Unit],
userCleanup: Iterable[ClassLoader => Unit],
log: ManagedLogger,
testListeners: Vector[TestReportListener],
config: Execution): Task[Output] = {
def testTask(
loader: ClassLoader,
frameworks: Map[TestFramework, Framework],
runners: Map[TestFramework, Runner],
tests: Vector[TestDefinition],
userSetup: Iterable[ClassLoader => Unit],
userCleanup: Iterable[ClassLoader => Unit],
log: ManagedLogger,
testListeners: Vector[TestReportListener],
config: Execution
): Task[Output] = {
def fj(actions: Iterable[() => Unit]): Task[Unit] = nop.dependsOn(actions.toSeq.fork(_()): _*)
def partApp(actions: Iterable[ClassLoader => Unit]) = actions.toSeq map { a => () =>
a(loader)
@ -239,31 +252,43 @@ object Tests {
}
type TestRunnable = (String, TestFunction)
private def createNestedRunnables(loader: ClassLoader,
testFun: TestFunction,
nestedTasks: Seq[TestTask]): Seq[(String, TestFunction)] =
private def createNestedRunnables(
loader: ClassLoader,
testFun: TestFunction,
nestedTasks: Seq[TestTask]
): Seq[(String, TestFunction)] =
nestedTasks.view.zipWithIndex map {
case (nt, idx) =>
val testFunDef = testFun.taskDef
(testFunDef.fullyQualifiedName,
TestFramework.createTestFunction(loader,
new TaskDef(testFunDef.fullyQualifiedName + "-" + idx,
testFunDef.fingerprint,
testFunDef.explicitlySpecified,
testFunDef.selectors),
testFun.runner,
nt))
(
testFunDef.fullyQualifiedName,
TestFramework.createTestFunction(
loader,
new TaskDef(
testFunDef.fullyQualifiedName + "-" + idx,
testFunDef.fingerprint,
testFunDef.explicitlySpecified,
testFunDef.selectors
),
testFun.runner,
nt
)
)
}
def makeParallel(loader: ClassLoader,
runnables: Iterable[TestRunnable],
setupTasks: Task[Unit],
tags: Seq[(Tag, Int)]): Task[Map[String, SuiteResult]] =
def makeParallel(
loader: ClassLoader,
runnables: Iterable[TestRunnable],
setupTasks: Task[Unit],
tags: Seq[(Tag, Int)]
): Task[Map[String, SuiteResult]] =
toTasks(loader, runnables.toSeq, tags).dependsOn(setupTasks)
def toTasks(loader: ClassLoader,
runnables: Seq[TestRunnable],
tags: Seq[(Tag, Int)]): Task[Map[String, SuiteResult]] = {
def toTasks(
loader: ClassLoader,
runnables: Seq[TestRunnable],
tags: Seq[(Tag, Int)]
): Task[Map[String, SuiteResult]] = {
val tasks = runnables.map { case (name, test) => toTask(loader, name, test, tags) }
tasks.join.map(_.foldLeft(Map.empty[String, SuiteResult]) {
case (sum, e) =>
@ -275,10 +300,12 @@ object Tests {
})
}
def toTask(loader: ClassLoader,
name: String,
fun: TestFunction,
tags: Seq[(Tag, Int)]): Task[Map[String, SuiteResult]] = {
def toTask(
loader: ClassLoader,
name: String,
fun: TestFunction,
tags: Seq[(Tag, Int)]
): Task[Map[String, SuiteResult]] = {
val base = task { (name, fun.apply()) }
val taggedBase = base.tagw(tags: _*).tag(fun.tags.map(ConcurrentRestrictions.Tag(_)): _*)
taggedBase flatMap {
@ -310,8 +337,10 @@ object Tests {
setupTasks: Task[Unit],
): Task[List[(String, SuiteResult)]] = {
@tailrec
def processRunnable(runnableList: List[TestRunnable],
acc: List[(String, SuiteResult)]): List[(String, SuiteResult)] =
def processRunnable(
runnableList: List[TestRunnable],
acc: List[(String, SuiteResult)]
): List[(String, SuiteResult)] =
runnableList match {
case hd :: rst =>
val testFun = hd._2
@ -361,9 +390,11 @@ object Tests {
((TestResult.Passed: TestResult) /: results) { (acc, result) =>
if (severity(acc) < severity(result)) result else acc
}
def discover(frameworks: Seq[Framework],
analysis: CompileAnalysis,
log: Logger): (Seq[TestDefinition], Set[String]) =
def discover(
frameworks: Seq[Framework],
analysis: CompileAnalysis,
log: Logger
): (Seq[TestDefinition], Set[String]) =
discover(frameworks flatMap TestFramework.getFingerprints, allDefs(analysis), log)
def allDefs(analysis: CompileAnalysis) = analysis match {
@ -379,9 +410,11 @@ object Tests {
all
}.toSeq
}
def discover(fingerprints: Seq[Fingerprint],
definitions: Seq[Definition],
log: Logger): (Seq[TestDefinition], Set[String]) = {
def discover(
fingerprints: Seq[Fingerprint],
definitions: Seq[Definition],
log: Logger
): (Seq[TestDefinition], Set[String]) = {
val subclasses = fingerprints collect {
case sub: SubclassFingerprint => (sub.superclassName, sub.isModule, sub)
};
@ -392,9 +425,11 @@ object Tests {
log.debug("Annotation fingerprints: " + annotations)
def firsts[A, B, C](s: Seq[(A, B, C)]): Set[A] = s.map(_._1).toSet
def defined(in: Seq[(String, Boolean, Fingerprint)],
names: Set[String],
IsModule: Boolean): Seq[Fingerprint] =
def defined(
in: Seq[(String, Boolean, Fingerprint)],
names: Set[String],
IsModule: Boolean
): Seq[Fingerprint] =
in collect { case (name, IsModule, print) if names(name) => print }
def toFingerprints(d: Discovered): Seq[Fingerprint] =

View File

@ -33,10 +33,12 @@ final class EvalImports(val strings: Seq[(String, Int)], val srcName: String)
* the module from that class loader. `generated` contains the compiled classes and cache files related
* to the expression. The name of the auto-generated module wrapping the expression is `enclosingModule`.
*/
final class EvalResult(val tpe: String,
val getValue: ClassLoader => Any,
val generated: Seq[File],
val enclosingModule: String)
final class EvalResult(
val tpe: String,
val getValue: ClassLoader => Any,
val generated: Seq[File],
val enclosingModule: String
)
/**
* The result of evaluating a group of Scala definitions. The definitions are wrapped in an auto-generated,
@ -45,10 +47,12 @@ final class EvalResult(val tpe: String,
* from the classpath that the definitions were compiled against. The list of vals with the requested types is `valNames`.
* The values for these may be obtained by providing the parent class loader to `values` as is done with `loader`.
*/
final class EvalDefinitions(val loader: ClassLoader => ClassLoader,
val generated: Seq[File],
val enclosingModule: String,
val valNames: Seq[String]) {
final class EvalDefinitions(
val loader: ClassLoader => ClassLoader,
val generated: Seq[File],
val enclosingModule: String,
val valNames: Seq[String]
) {
def values(parent: ClassLoader): Seq[Any] = {
val module = getModule(enclosingModule, loader(parent))
for (n <- valNames) yield module.getClass.getMethod(n).invoke(module)
@ -57,10 +61,12 @@ final class EvalDefinitions(val loader: ClassLoader => ClassLoader,
final class EvalException(msg: String) extends RuntimeException(msg)
// not thread safe, since it reuses a Global instance
final class Eval(optionsNoncp: Seq[String],
classpath: Seq[File],
mkReporter: Settings => Reporter,
backing: Option[File]) {
final class Eval(
optionsNoncp: Seq[String],
classpath: Seq[File],
mkReporter: Settings => Reporter,
backing: Option[File]
) {
def this(mkReporter: Settings => Reporter, backing: Option[File]) =
this(Nil, IO.classLocationFile[Product] :: Nil, mkReporter, backing)
def this() = this(s => new ConsoleReporter(s), None)
@ -96,11 +102,13 @@ final class Eval(optionsNoncp: Seq[String],
private[this] var toUnlinkLater = List[Symbol]()
private[this] def unlink(sym: Symbol) = sym.owner.info.decls.unlink(sym)
def eval(expression: String,
imports: EvalImports = noImports,
tpeName: Option[String] = None,
srcName: String = "<setting>",
line: Int = DefaultStartLine): EvalResult = {
def eval(
expression: String,
imports: EvalImports = noImports,
tpeName: Option[String] = None,
srcName: String = "<setting>",
line: Int = DefaultStartLine
): EvalResult = {
val ev = new EvalType[String] {
def makeUnit = mkUnit(srcName, line, expression)
def unlink = true
@ -120,11 +128,13 @@ final class Eval(optionsNoncp: Seq[String],
val value = (cl: ClassLoader) => getValue[Any](i.enclosingModule, i.loader(cl))
new EvalResult(i.extra, value, i.generated, i.enclosingModule)
}
def evalDefinitions(definitions: Seq[(String, scala.Range)],
imports: EvalImports,
srcName: String,
file: Option[File],
valTypes: Seq[String]): EvalDefinitions = {
def evalDefinitions(
definitions: Seq[(String, scala.Range)],
imports: EvalImports,
srcName: String,
file: Option[File],
valTypes: Seq[String]
): EvalDefinitions = {
require(definitions.nonEmpty, "Definitions to evaluate cannot be empty.")
val ev = new EvalType[Seq[String]] {
lazy val (fullUnit, defUnits) = mkDefsUnit(srcName, definitions)
@ -151,20 +161,27 @@ final class Eval(optionsNoncp: Seq[String],
new EvalDefinitions(i.loader, i.generated, i.enclosingModule, i.extra)
}
private[this] def evalCommon[T](content: Seq[String],
imports: EvalImports,
tpeName: Option[String],
ev: EvalType[T]): EvalIntermediate[T] = {
private[this] def evalCommon[T](
content: Seq[String],
imports: EvalImports,
tpeName: Option[String],
ev: EvalType[T]
): EvalIntermediate[T] = {
import Eval._
// TODO - We also encode the source of the setting into the hash to avoid conflicts where the exact SAME setting
// is defined in multiple evaluated instances with a backing. This leads to issues with finding a previous
// value on the classpath when compiling.
val hash = Hash.toHex(
Hash(bytes(
stringSeqBytes(content) :: optBytes(backing)(fileExistsBytes) :: stringSeqBytes(options) ::
seqBytes(classpath)(fileModifiedBytes) :: stringSeqBytes(imports.strings.map(_._1)) :: optBytes(
tpeName)(bytes) ::
bytes(ev.extraHash) :: Nil)))
Hash(
bytes(
stringSeqBytes(content) :: optBytes(backing)(fileExistsBytes) :: stringSeqBytes(options) ::
seqBytes(classpath)(fileModifiedBytes) :: stringSeqBytes(imports.strings.map(_._1)) :: optBytes(
tpeName
)(bytes) ::
bytes(ev.extraHash) :: Nil
)
)
)
val moduleName = makeModuleName(hash)
lazy val unit = {
@ -192,12 +209,14 @@ final class Eval(optionsNoncp: Seq[String],
// location of the cached type or definition information
private[this] def cacheFile(base: File, moduleName: String): File =
new File(base, moduleName + ".cache")
private[this] def compileAndLoad[T](run: Run,
unit: CompilationUnit,
imports: EvalImports,
backing: Option[File],
moduleName: String,
ev: EvalType[T]): (T, ClassLoader => ClassLoader) = {
private[this] def compileAndLoad[T](
run: Run,
unit: CompilationUnit,
imports: EvalImports,
backing: Option[File],
moduleName: String,
ev: EvalType[T]
): (T, ClassLoader => ClassLoader) = {
global.curRun = run
run.currentUnit = unit
val dir = outputDirectory(backing)
@ -242,18 +261,22 @@ final class Eval(optionsNoncp: Seq[String],
parent => getValue[Any](moduleName, new URLClassLoader(Array(dir.toURI.toURL), parent))
//wrap tree in object objectName { def WrapValName = <tree> }
def augment(parser: global.syntaxAnalyzer.UnitParser,
imports: Seq[Tree],
tree: Tree,
tpt: Tree,
objectName: String): Tree = {
def augment(
parser: global.syntaxAnalyzer.UnitParser,
imports: Seq[Tree],
tree: Tree,
tpt: Tree,
objectName: String
): Tree = {
val method = DefDef(NoMods, newTermName(WrapValName), Nil, Nil, tpt, tree)
syntheticModule(parser, imports, method :: Nil, objectName)
}
private[this] def syntheticModule(parser: global.syntaxAnalyzer.UnitParser,
imports: Seq[Tree],
definitions: List[Tree],
objectName: String): Tree = {
private[this] def syntheticModule(
parser: global.syntaxAnalyzer.UnitParser,
imports: Seq[Tree],
definitions: List[Tree],
objectName: String
): Tree = {
val emptyTypeName = nme.EMPTY.toTypeName
def emptyPkg = parser.atPos(0, 0, 0) { Ident(nme.EMPTY_PACKAGE_NAME) }
def emptyInit = DefDef(
@ -262,8 +285,10 @@ final class Eval(optionsNoncp: Seq[String],
Nil,
List(Nil),
TypeTree(),
Block(List(Apply(Select(Super(This(emptyTypeName), emptyTypeName), nme.CONSTRUCTOR), Nil)),
Literal(Constant(())))
Block(
List(Apply(Select(Super(This(emptyTypeName), emptyTypeName), nme.CONSTRUCTOR), Nil)),
Literal(Constant(()))
)
)
def moduleBody = Template(List(gen.scalaAnyRefConstr), noSelfType, emptyInit :: definitions)
@ -301,10 +326,12 @@ final class Eval(optionsNoncp: Seq[String],
private[this] def isTopLevelModule(s: Symbol): Boolean =
s.hasFlag(reflect.internal.Flags.MODULE) && s.owner.isPackageClass
private[this] final class EvalIntermediate[T](val extra: T,
val loader: ClassLoader => ClassLoader,
val generated: Seq[File],
val enclosingModule: String)
private[this] final class EvalIntermediate[T](
val extra: T,
val loader: ClassLoader => ClassLoader,
val generated: Seq[File],
val enclosingModule: String
)
private[this] def classExists(dir: File, name: String) = (new File(dir, name + ".class")).exists
// TODO: use the code from Analyzer
@ -318,10 +345,12 @@ final class Eval(optionsNoncp: Seq[String],
(s contains moduleName)
}
private[this] class ParseErrorStrings(val base: String,
val extraBlank: String,
val missingBlank: String,
val extraSemi: String)
private[this] class ParseErrorStrings(
val base: String,
val extraBlank: String,
val missingBlank: String,
val extraSemi: String
)
private[this] def definitionErrorStrings = new ParseErrorStrings(
base = "Error parsing definition.",
extraBlank = " Ensure that there are no blank lines within a definition.",
@ -340,9 +369,11 @@ final class Eval(optionsNoncp: Seq[String],
* Parses the provided compilation `unit` according to `f` and then performs checks on the final parser state
* to catch errors that are common when the content is embedded in a blank-line-delimited format.
*/
private[this] def parse[T](unit: CompilationUnit,
errors: ParseErrorStrings,
f: syntaxAnalyzer.UnitParser => T): (syntaxAnalyzer.UnitParser, T) = {
private[this] def parse[T](
unit: CompilationUnit,
errors: ParseErrorStrings,
f: syntaxAnalyzer.UnitParser => T
): (syntaxAnalyzer.UnitParser, T) = {
val parser = new syntaxAnalyzer.UnitParser(unit)
val tree = f(parser)
@ -443,7 +474,8 @@ final class Eval(optionsNoncp: Seq[String],
*/
private[this] def mkDefsUnit(
srcName: String,
definitions: Seq[(String, scala.Range)]): (CompilationUnit, Seq[CompilationUnit]) = {
definitions: Seq[(String, scala.Range)]
): (CompilationUnit, Seq[CompilationUnit]) = {
def fragmentUnit(content: String, lineMap: Array[Int]) =
new CompilationUnit(fragmentSourceFile(srcName, content, lineMap))

View File

@ -37,19 +37,21 @@ class CacheIvyTest extends Properties("CacheIvy") {
content = converter.toJsonUnsafe(value)
}
private def testCache[T: JsonFormat, U](f: (SingletonCache[T], CacheStore) => U)(
implicit cache: SingletonCache[T]): U = {
private def testCache[T: JsonFormat, U](
f: (SingletonCache[T], CacheStore) => U
)(implicit cache: SingletonCache[T]): U = {
val store = new InMemoryStore(Converter)
f(cache, store)
}
private def cachePreservesEquality[T: JsonFormat](m: T,
eq: (T, T) => Prop,
str: T => String): Prop = testCache[T, Prop] {
(cache, store) =>
cache.write(store, m)
val out = cache.read(store)
eq(out, m) :| s"Expected: ${str(m)}" :| s"Got: ${str(out)}"
private def cachePreservesEquality[T: JsonFormat](
m: T,
eq: (T, T) => Prop,
str: T => String
): Prop = testCache[T, Prop] { (cache, store) =>
cache.write(store, m)
val out = cache.read(store)
eq(out, m) :| s"Expected: ${str(m)}" :| s"Got: ${str(out)}"
}
implicit val arbConfigRef: Arbitrary[ConfigRef] = Arbitrary(

View File

@ -38,7 +38,8 @@ class EvalTest extends Properties("eval") {
val line = math.abs(l)
val src = "mismatch"
throws(classOf[RuntimeException])(
eval.eval(i.toString, tpeName = Some(BooleanType), line = line, srcName = src)) &&
eval.eval(i.toString, tpeName = Some(BooleanType), line = line, srcName = src)
) &&
hasErrors(line + 1, src)
}
@ -78,14 +79,17 @@ val p = {
property("explicit import") = forAll(testImport("import math.abs" :: Nil))
property("wildcard import") = forAll(testImport("import math._" :: Nil))
property("comma-separated imports") = forAll(
testImport("import annotation._, math._, meta._" :: Nil))
testImport("import annotation._, math._, meta._" :: Nil)
)
property("multiple imports") = forAll(
testImport("import annotation._" :: "import math._" :: "import meta._" :: Nil))
testImport("import annotation._" :: "import math._" :: "import meta._" :: Nil)
)
private[this] def testImport(imports: Seq[String]): Int => Prop =
i =>
value(eval.eval("abs(" + i + ")", new EvalImports(imports.zipWithIndex, "imp"))) == math.abs(
i)
i
)
private[this] def local(i: Int) = "{ class ETest(val i: Int); new ETest(" + i + ") }"
val LocalType = "AnyRef{val i: Int}"

View File

@ -21,8 +21,10 @@ object BasicCommandStrings {
val TerminateAction: String = Exit
def helpBrief =
(HelpCommand,
s"Displays this help message or prints detailed help on requested commands (run '$HelpCommand <command>').")
(
HelpCommand,
s"Displays this help message or prints detailed help on requested commands (run '$HelpCommand <command>')."
)
def helpDetailed = s"""$HelpCommand
Prints a help summary.
@ -131,8 +133,10 @@ $HelpCommand <regular expression>
def Multi = ";"
def MultiBrief =
(Multi + " <command> (" + Multi + " <command>)*",
"Runs the provided semicolon-separated commands.")
(
Multi + " <command> (" + Multi + " <command>)*",
"Runs the provided semicolon-separated commands."
)
def MultiDetailed =
Multi + " command1 " + Multi + """ command2 ...

View File

@ -80,7 +80,8 @@ object BasicCommands {
val h = (Help.empty /: s.definedCommands)(
(a, b) =>
a ++ (try b.help(s)
catch { case NonFatal(_) => Help.empty }))
catch { case NonFatal(_) => Help.empty })
)
val helpCommands = h.detail.keySet
val spacedArg = singleArgument(helpCommands).?
applyEffect(spacedArg)(runHelp(s, h))
@ -95,7 +96,8 @@ object BasicCommands {
def completionsCommand: Command =
Command(CompletionsCommand, CompletionsBrief, CompletionsDetailed)(_ => completionsParser)(
runCompletions(_)(_))
runCompletions(_)(_)
)
@deprecated("No longer public", "1.1.1")
def completionsParser(state: State): Parser[String] = completionsParser
@ -118,8 +120,9 @@ object BasicCommands {
def multiParser(s: State): Parser[List[String]] = {
val nonSemi = token(charClass(_ != ';').+, hide = const(true))
val semi = token(';' ~> OptSpace)
val part = semi flatMap (_ =>
matched((s.combinedParser & nonSemi) | nonSemi) <~ token(OptSpace))
val part = semi flatMap (
_ => matched((s.combinedParser & nonSemi) | nonSemi) <~ token(OptSpace)
)
(part map (_.trim)).+ map (_.toList)
}
@ -135,16 +138,19 @@ object BasicCommands {
matched(s.combinedParser | token(any, hide = const(true)))
def ifLast: Command =
Command(IfLast, Help.more(IfLast, IfLastDetailed))(otherCommandParser)((s, arg) =>
if (s.remainingCommands.isEmpty) arg :: s else s)
Command(IfLast, Help.more(IfLast, IfLastDetailed))(otherCommandParser)(
(s, arg) => if (s.remainingCommands.isEmpty) arg :: s else s
)
def append: Command =
Command(AppendCommand, Help.more(AppendCommand, AppendLastDetailed))(otherCommandParser)(
(s, arg) => s.copy(remainingCommands = s.remainingCommands :+ Exec(arg, s.source)))
(s, arg) => s.copy(remainingCommands = s.remainingCommands :+ Exec(arg, s.source))
)
def setOnFailure: Command =
Command(OnFailure, Help.more(OnFailure, OnFailureDetailed))(otherCommandParser)((s, arg) =>
s.copy(onFailure = Some(Exec(arg, s.source))))
Command(OnFailure, Help.more(OnFailure, OnFailureDetailed))(otherCommandParser)(
(s, arg) => s.copy(onFailure = Some(Exec(arg, s.source)))
)
private[sbt] def compatCommands = Seq(
Command.command(Compat.ClearOnFailure) { s =>
@ -154,7 +160,8 @@ object BasicCommands {
Command.arb(
s =>
token(Compat.OnFailure, hide = const(true))
.flatMap(_ => otherCommandParser(s))) { (s, arg) =>
.flatMap(_ => otherCommandParser(s))
) { (s, arg) =>
s.log.warn(Compat.OnFailureDeprecated)
s.copy(onFailure = Some(Exec(arg, s.source)))
},
@ -167,8 +174,9 @@ object BasicCommands {
def clearOnFailure: Command = Command.command(ClearOnFailure)(s => s.copy(onFailure = None))
def stashOnFailure: Command =
Command.command(StashOnFailure)(s =>
s.copy(onFailure = None).update(OnFailureStack)(s.onFailure :: _.toList.flatten))
Command.command(StashOnFailure)(
s => s.copy(onFailure = None).update(OnFailureStack)(s.onFailure :: _.toList.flatten)
)
def popOnFailure: Command = Command.command(PopOnFailure) { s =>
val stack = s.get(OnFailureStack).getOrElse(Nil)
@ -213,8 +221,9 @@ object BasicCommands {
private[this] def className: Parser[String] = {
val base = StringBasic & not('-' ~> any.*, "Class name cannot start with '-'.")
def single(s: String) = Completions.single(Completion.displayOnly(s))
val compl = TokenCompletions.fixed((seen, _) =>
if (seen.startsWith("-")) Completions.nil else single("<class name>"))
val compl = TokenCompletions.fixed(
(seen, _) => if (seen.startsWith("-")) Completions.nil else single("<class name>")
)
token(base, compl)
}
@ -402,7 +411,8 @@ object BasicCommands {
}
def delegateToAlias(name: String, orElse: Parser[() => State])(
state: State): Parser[() => State] =
state: State
): Parser[() => State] =
aliases(state, (nme, _) => nme == name).headOption match {
case None => orElse
case Some((n, v)) => aliasBody(n, v)(state)

View File

@ -18,11 +18,13 @@ object BasicKeys {
val historyPath = AttributeKey[Option[File]](
"history",
"The location where command line history is persisted.",
40)
40
)
val shellPrompt = AttributeKey[State => String](
"shell-prompt",
"The function that constructs the command prompt from the current build state.",
10000)
10000
)
val watch = AttributeKey[Watched]("watch", "Continuous execution configuration.", 1000)
val serverPort =
AttributeKey[Int]("server-port", "The port number used by server command.", 10000)
@ -31,25 +33,32 @@ object BasicKeys {
AttributeKey[String]("serverHost", "The host used by server command.", 10000)
val serverAuthentication =
AttributeKey[Set[ServerAuthentication]]("serverAuthentication",
"Method of authenticating server command.",
10000)
AttributeKey[Set[ServerAuthentication]](
"serverAuthentication",
"Method of authenticating server command.",
10000
)
val serverConnectionType =
AttributeKey[ConnectionType]("serverConnectionType",
"The wire protocol for the server command.",
10000)
AttributeKey[ConnectionType](
"serverConnectionType",
"The wire protocol for the server command.",
10000
)
val fullServerHandlers =
AttributeKey[Seq[ServerHandler]]("fullServerHandlers",
"Combines default server handlers and user-defined handlers.",
10000)
AttributeKey[Seq[ServerHandler]](
"fullServerHandlers",
"Combines default server handlers and user-defined handlers.",
10000
)
val autoStartServer =
AttributeKey[Boolean](
"autoStartServer",
"If true, the sbt server will startup automatically during interactive sessions.",
10000)
10000
)
// Unlike other BasicKeys, this is not used directly as a setting key,
// and severLog / logLevel is used instead.
@ -62,23 +71,28 @@ object BasicKeys {
private[sbt] val interactive = AttributeKey[Boolean](
"interactive",
"True if commands are currently being entered from an interactive environment.",
10)
10
)
private[sbt] val classLoaderCache = AttributeKey[ClassLoaderCache](
"class-loader-cache",
"Caches class loaders based on the classpath entries and last modified times.",
10)
10
)
private[sbt] val OnFailureStack = AttributeKey[List[Option[Exec]]](
"on-failure-stack",
"Stack that remembers on-failure handlers.",
10)
10
)
private[sbt] val explicitGlobalLogLevels = AttributeKey[Boolean](
"explicit-global-log-levels",
"True if the global logging levels were explicitly set by the user.",
10)
10
)
private[sbt] val templateResolverInfos = AttributeKey[Seq[TemplateResolverInfo]](
"templateResolverInfos",
"List of template resolver infos.",
1000)
1000
)
}
case class TemplateResolverInfo(module: ModuleID, implementationClass: String)

View File

@ -67,18 +67,21 @@ object Command {
new SimpleCommand(name, help, parser, AttributeMap.empty)
def make(name: String, briefHelp: (String, String), detail: String)(
parser: State => Parser[() => State]): Command =
parser: State => Parser[() => State]
): Command =
make(name, Help(name, briefHelp, detail))(parser)
// General command construction
/** Construct a command with the given name, parser and effect. */
def apply[T](name: String, help: Help = Help.empty)(parser: State => Parser[T])(
effect: (State, T) => State): Command =
def apply[T](name: String, help: Help = Help.empty)(
parser: State => Parser[T]
)(effect: (State, T) => State): Command =
make(name, help)(applyEffect(parser)(effect))
def apply[T](name: String, briefHelp: (String, String), detail: String)(
parser: State => Parser[T])(effect: (State, T) => State): Command =
parser: State => Parser[T]
)(effect: (State, T) => State): Command =
apply(name, Help(name, briefHelp, detail))(parser)(effect)
// No-argument command construction
@ -97,18 +100,21 @@ object Command {
make(name, help)(state => token(trimmed(spacedAny(name)) map apply1(f, state)))
def single(name: String, briefHelp: (String, String), detail: String)(
f: (State, String) => State): Command =
f: (State, String) => State
): Command =
single(name, Help(name, briefHelp, detail))(f)
// Multi-argument command construction
/** Construct a multi-argument command with the given name, tab completion display and effect. */
def args(name: String, display: String, help: Help = Help.empty)(
f: (State, Seq[String]) => State): Command =
f: (State, Seq[String]) => State
): Command =
make(name, help)(state => spaceDelimited(display) map apply1(f, state))
def args(name: String, briefHelp: (String, String), detail: String, display: String)(
f: (State, Seq[String]) => State): Command =
f: (State, Seq[String]) => State
): Command =
args(name, display, Help(name, briefHelp, detail))(f)
// create ArbitraryCommand
@ -120,7 +126,8 @@ object Command {
customHelp(parser, const(help))
def arb[T](parser: State => Parser[T], help: Help = Help.empty)(
effect: (State, T) => State): Command =
effect: (State, T) => State
): Command =
custom(applyEffect(parser)(effect), help)
// misc Command object utilities
@ -129,8 +136,9 @@ object Command {
def applyEffect[T](p: Parser[T])(f: T => State): Parser[() => State] = p map (t => () => f(t))
def applyEffect[T](parser: State => Parser[T])(
effect: (State, T) => State): State => Parser[() => State] =
def applyEffect[T](
parser: State => Parser[T]
)(effect: (State, T) => State): State => Parser[() => State] =
s => applyEffect(parser(s))(t => effect(s, t))
def combine(cmds: Seq[Command]): State => Parser[() => State] = {
@ -140,7 +148,8 @@ object Command {
}
private[this] def separateCommands(
cmds: Seq[Command]): (Seq[SimpleCommand], Seq[ArbitraryCommand]) =
cmds: Seq[Command]
): (Seq[SimpleCommand], Seq[ArbitraryCommand]) =
Util.separate(cmds) { case s: SimpleCommand => Left(s); case a: ArbitraryCommand => Right(a) }
private[this] def apply1[A, B, C](f: (A, B) => C, a: A): B => () => C = b => () => f(a, b)
@ -155,13 +164,16 @@ object Command {
}
def simpleParser(
commandMap: Map[String, State => Parser[() => State]]): State => Parser[() => State] =
commandMap: Map[String, State => Parser[() => State]]
): State => Parser[() => State] =
state =>
token(OpOrID examples commandMap.keys.toSet) flatMap (id =>
(commandMap get id) match {
case None => failure(invalidValue("command", commandMap.keys)(id))
case Some(c) => c(state)
})
token(OpOrID examples commandMap.keys.toSet) flatMap (
id =>
(commandMap get id) match {
case None => failure(invalidValue("command", commandMap.keys)(id))
case Some(c) => c(state)
}
)
def process(command: String, state: State): State = {
val parser = combine(state.definedCommands)
@ -181,10 +193,12 @@ object Command {
if (suggested.isEmpty) "" else suggested.mkString(" (similar: ", ", ", ")")
}
def suggestions(a: String,
bs: Seq[String],
maxDistance: Int = 3,
maxSuggestions: Int = 3): Seq[String] =
def suggestions(
a: String,
bs: Seq[String],
maxDistance: Int = 3,
maxSuggestions: Int = 3
): Seq[String] =
bs map (b => (b, distance(a, b))) filter (_._2 <= maxDistance) sortBy (_._2) take (maxSuggestions) map (_._1)
def distance(a: String, b: String): Int =
@ -233,9 +247,11 @@ object Help {
def apply(briefHelp: Seq[(String, String)], detailedHelp: Map[String, String]): Help =
apply(briefHelp, detailedHelp, Set.empty[String])
def apply(briefHelp: Seq[(String, String)],
detailedHelp: Map[String, String],
more: Set[String]): Help =
def apply(
briefHelp: Seq[(String, String)],
detailedHelp: Map[String, String],
more: Set[String]
): Help =
new Help0(briefHelp, detailedHelp, more)
def more(name: String, detailedHelp: String): Help =

View File

@ -12,21 +12,23 @@ import java.io.File
final case class Exit(code: Int) extends xsbti.Exit {
require(code >= 0)
}
final case class Reboot(scalaVersion: String,
argsList: Seq[String],
app: xsbti.ApplicationID,
baseDirectory: File)
extends xsbti.Reboot {
final case class Reboot(
scalaVersion: String,
argsList: Seq[String],
app: xsbti.ApplicationID,
baseDirectory: File
) extends xsbti.Reboot {
def arguments = argsList.toArray
}
final case class ApplicationID(groupID: String,
name: String,
version: String,
mainClass: String,
components: Seq[String],
crossVersionedValue: xsbti.CrossValue,
extra: Seq[File])
extends xsbti.ApplicationID {
final case class ApplicationID(
groupID: String,
name: String,
version: String,
mainClass: String,
components: Seq[String],
crossVersionedValue: xsbti.CrossValue,
extra: Seq[File]
) extends xsbti.ApplicationID {
def mainComponents = components.toArray
def classpathExtra = extra.toArray
def crossVersioned = crossVersionedValue != xsbti.CrossValue.Disabled
@ -35,11 +37,13 @@ object ApplicationID {
def apply(delegate: xsbti.ApplicationID, newVersion: String): ApplicationID =
apply(delegate).copy(version = newVersion)
def apply(delegate: xsbti.ApplicationID): ApplicationID =
ApplicationID(delegate.groupID,
delegate.name,
delegate.version,
delegate.mainClass,
delegate.mainComponents,
delegate.crossVersionedValue,
delegate.classpathExtra)
ApplicationID(
delegate.groupID,
delegate.name,
delegate.version,
delegate.mainClass,
delegate.mainComponents,
delegate.crossVersionedValue,
delegate.classpathExtra
)
}

View File

@ -287,8 +287,9 @@ object State {
def fail = {
import BasicCommandStrings.Compat.{ FailureWall => CompatFailureWall }
val remaining =
s.remainingCommands.dropWhile(c =>
c.commandLine != FailureWall && c.commandLine != CompatFailureWall)
s.remainingCommands.dropWhile(
c => c.commandLine != FailureWall && c.commandLine != CompatFailureWall
)
if (remaining.isEmpty)
applyOnFailure(s, Nil, exit(ok = false))
else

View File

@ -113,7 +113,8 @@ object Watched {
} catch {
case e: Exception =>
s.log.error(
"Error occurred obtaining files to watch. Terminating continuous execution...")
"Error occurred obtaining files to watch. Terminating continuous execution..."
)
s.handleError(e)
(false, watchState)
}
@ -133,8 +134,10 @@ object Watched {
AttributeKey[WatchState]("watch state", "Internal: tracks state for continuous execution.")
val ContinuousWatchService =
AttributeKey[WatchService]("watch service",
"Internal: tracks watch service for continuous execution.")
AttributeKey[WatchService](
"watch service",
"Internal: tracks watch service for continuous execution."
)
val Configuration =
AttributeKey[Watched]("watched-configuration", "Configures continuous execution.")

View File

@ -40,9 +40,11 @@ private[sbt] object Server {
with TokenFileFormats
object JsonProtocol extends JsonProtocol
def start(connection: ServerConnection,
onIncomingSocket: (Socket, ServerInstance) => Unit,
log: Logger): ServerInstance =
def start(
connection: ServerConnection,
onIncomingSocket: (Socket, ServerInstance) => Unit,
log: Logger
): ServerInstance =
new ServerInstance { self =>
import connection._
val running = new AtomicBoolean(false)
@ -67,7 +69,8 @@ private[sbt] object Server {
"socket file absolute path too long; " +
"either switch to another connection type " +
"or define a short \"SBT_GLOBAL_SERVER_DIR\" value. " +
s"Current path: ${path}")
s"Current path: ${path}"
)
tryClient(new UnixDomainSocket(path))
prepareSocketfile()
addServerError(new UnixDomainServerSocket(path))

View File

@ -34,14 +34,18 @@ object ServerHandler {
})
}
final class ServerIntent(val onRequest: PartialFunction[JsonRpcRequestMessage, Unit],
val onNotification: PartialFunction[JsonRpcNotificationMessage, Unit]) {
final class ServerIntent(
val onRequest: PartialFunction[JsonRpcRequestMessage, Unit],
val onNotification: PartialFunction[JsonRpcNotificationMessage, Unit]
) {
override def toString: String = s"ServerIntent(...)"
}
object ServerIntent {
def apply(onRequest: PartialFunction[JsonRpcRequestMessage, Unit],
onNotification: PartialFunction[JsonRpcNotificationMessage, Unit]): ServerIntent =
def apply(
onRequest: PartialFunction[JsonRpcRequestMessage, Unit],
onNotification: PartialFunction[JsonRpcNotificationMessage, Unit]
): ServerIntent =
new ServerIntent(onRequest, onNotification)
def request(onRequest: PartialFunction[JsonRpcRequestMessage, Unit]): ServerIntent =

View File

@ -18,12 +18,14 @@ import sbt.io.{ AllPassFilter, NothingFilter }
object Append {
@implicitNotFound(
msg = "No implicit for Append.Value[${A}, ${B}] found,\n so ${B} cannot be appended to ${A}")
msg = "No implicit for Append.Value[${A}, ${B}] found,\n so ${B} cannot be appended to ${A}"
)
trait Value[A, B] {
def appendValue(a: A, b: B): A
}
@implicitNotFound(
msg = "No implicit for Append.Values[${A}, ${B}] found,\n so ${B} cannot be appended to ${A}")
msg = "No implicit for Append.Values[${A}, ${B}] found,\n so ${B} cannot be appended to ${A}"
)
trait Values[A, -B] {
def appendValues(a: A, b: B): A
}

View File

@ -27,11 +27,13 @@ object Def extends Init[Scope] with TaskMacroExtra {
val resolvedScoped = SettingKey[ScopedKey[_]](
"resolved-scoped",
"The ScopedKey for the referencing setting or task.",
KeyRanks.DSetting)
KeyRanks.DSetting
)
private[sbt] val taskDefinitionKey = AttributeKey[ScopedKey[_]](
"task-definition-key",
"Internal: used to map a task back to its ScopedKey.",
Invisible)
Invisible
)
lazy val showFullKey: Show[ScopedKey[_]] = showFullKey(None)
@ -56,7 +58,8 @@ object Def extends Init[Scope] with TaskMacroExtra {
key.scope,
withColor(key.key.label, keyNameColor),
ref => displayRelative2(current, ref)
))
)
)
@deprecated("Use showBuildRelativeKey2 which doesn't take the unused multi param", "1.1.1")
def showBuildRelativeKey(
@ -76,7 +79,8 @@ object Def extends Init[Scope] with TaskMacroExtra {
key.scope,
withColor(key.key.label, keyNameColor),
ref => displayBuildRelative(currentBuild, ref)
))
)
)
/**
* Returns a String expression for the given [[Reference]] (BuildRef, [[ProjectRef]], etc)
@ -96,9 +100,11 @@ object Def extends Init[Scope] with TaskMacroExtra {
* Constructs the String of a given [[Reference]] relative to current.
* Note that this no longer takes "multi" parameter, and omits the subproject id at all times.
*/
private[sbt] def displayRelative(current: ProjectRef,
project: Reference,
trailingSlash: Boolean): String = {
private[sbt] def displayRelative(
current: ProjectRef,
project: Reference,
trailingSlash: Boolean
): String = {
val trailing = if (trailingSlash) " /" else ""
project match {
case BuildRef(current.build) => "ThisBuild" + trailing
@ -145,11 +151,14 @@ object Def extends Init[Scope] with TaskMacroExtra {
else None) orElse
s.dependencies
.find(k => k.scope != ThisScope)
.map(k =>
s"Scope cannot be defined for dependency ${k.key.label} of ${definedSettingString(s)}")
.map(
k =>
s"Scope cannot be defined for dependency ${k.key.label} of ${definedSettingString(s)}"
)
override def intersect(s1: Scope, s2: Scope)(
implicit delegates: Scope => Seq[Scope]): Option[Scope] =
implicit delegates: Scope => Seq[Scope]
): Option[Scope] =
if (s2 == GlobalScope) Some(s1) // s1 is more specific
else if (s1 == GlobalScope) Some(s2) // s2 is more specific
else super.intersect(s1, s2)
@ -230,7 +239,8 @@ object Def extends Init[Scope] with TaskMacroExtra {
private[sbt] def dummyTask[T](name: String): Task[T] = {
import std.TaskExtra.{ task => newTask, _ }
val base: Task[T] = newTask(
sys.error("Dummy task '" + name + "' did not get converted to a full task.")) named name
sys.error("Dummy task '" + name + "' did not get converted to a full task.")
) named name
base.copy(info = base.info.set(isDummyTask, true))
}
@ -240,13 +250,15 @@ object Def extends Init[Scope] with TaskMacroExtra {
private[sbt] val isDummyTask = AttributeKey[Boolean](
"is-dummy-task",
"Internal: used to identify dummy tasks. sbt injects values for these tasks at the start of task execution.",
Invisible)
Invisible
)
private[sbt] val (stateKey, dummyState) = dummy[State]("state", "Current build state.")
private[sbt] val (streamsManagerKey, dummyStreamsManager) = Def.dummy[std.Streams[ScopedKey[_]]](
"streams-manager",
"Streams manager, which provides streams for different contexts.")
"Streams manager, which provides streams for different contexts."
)
}
// these need to be mixed into the sbt package object

View File

@ -26,6 +26,8 @@ private final class DelegateIndex0(refs: Map[ProjectRef, ProjectDelegates]) exte
case None => Select(conf) :: Zero :: Nil
}
}
private final class ProjectDelegates(val ref: ProjectRef,
val refs: Seq[ScopeAxis[ResolvedReference]],
val confs: Map[ConfigKey, Seq[ScopeAxis[ConfigKey]]])
private final class ProjectDelegates(
val ref: ProjectRef,
val refs: Seq[ScopeAxis[ResolvedReference]],
val confs: Map[ConfigKey, Seq[ScopeAxis[ConfigKey]]]
)

View File

@ -22,13 +22,15 @@ final class InputTask[T] private (val parser: State => Parser[Task[T]]) {
new InputTask[T](s => Parser(parser(s))(in))
def fullInput(in: String): InputTask[T] =
new InputTask[T](s =>
Parser.parse(in, parser(s)) match {
case Right(v) => Parser.success(v)
case Left(msg) =>
val indented = msg.lines.map(" " + _).mkString("\n")
Parser.failure(s"Invalid programmatic input:\n$indented")
})
new InputTask[T](
s =>
Parser.parse(in, parser(s)) match {
case Right(v) => Parser.success(v)
case Left(msg) =>
val indented = msg.lines.map(" " + _).mkString("\n")
Parser.failure(s"Invalid programmatic input:\n$indented")
}
)
}
object InputTask {
@ -38,14 +40,18 @@ object InputTask {
import std.FullInstance._
def toTask(in: String): Initialize[Task[T]] = flatten(
(Def.stateKey zipWith i)((sTask, it) =>
sTask map (s =>
Parser.parse(in, it.parser(s)) match {
case Right(t) => Def.value(t)
case Left(msg) =>
val indented = msg.lines.map(" " + _).mkString("\n")
sys.error(s"Invalid programmatic input:\n$indented")
}))
(Def.stateKey zipWith i)(
(sTask, it) =>
sTask map (
s =>
Parser.parse(in, it.parser(s)) match {
case Right(t) => Def.value(t)
case Left(msg) =>
val indented = msg.lines.map(" " + _).mkString("\n")
sys.error(s"Invalid programmatic input:\n$indented")
}
)
)
)
}
@ -67,12 +73,14 @@ object InputTask {
def free[I, T](p: State => Parser[I])(c: I => Task[T]): InputTask[T] = free(s => p(s) map c)
def separate[I, T](p: State => Parser[I])(
action: Initialize[I => Task[T]]): Initialize[InputTask[T]] =
def separate[I, T](
p: State => Parser[I]
)(action: Initialize[I => Task[T]]): Initialize[InputTask[T]] =
separate(Def value p)(action)
def separate[I, T](p: Initialize[State => Parser[I]])(
action: Initialize[I => Task[T]]): Initialize[InputTask[T]] =
def separate[I, T](
p: Initialize[State => Parser[I]]
)(action: Initialize[I => Task[T]]): Initialize[InputTask[T]] =
p.zipWith(action)((parser, act) => free(parser)(act))
/** Constructs an InputTask that accepts no user input. */
@ -86,8 +94,9 @@ object InputTask {
* a) a Parser constructed using other Settings, but not Tasks
* b) a dynamically constructed Task that uses Settings, Tasks, and the result of parsing.
*/
def createDyn[I, T](p: Initialize[State => Parser[I]])(
action: Initialize[Task[I => Initialize[Task[T]]]]): Initialize[InputTask[T]] =
def createDyn[I, T](
p: Initialize[State => Parser[I]]
)(action: Initialize[Task[I => Initialize[Task[T]]]]): Initialize[InputTask[T]] =
separate(p)(std.FullInstance.flattenFun[I, T](action))
/** A dummy parser that consumes no input and produces nothing useful (unit).*/
@ -103,8 +112,9 @@ object InputTask {
i(Types.const)
@deprecated("Use another InputTask constructor or the `Def.inputTask` macro.", "0.13.0")
def apply[I, T](p: Initialize[State => Parser[I]])(
action: TaskKey[I] => Initialize[Task[T]]): Initialize[InputTask[T]] = {
def apply[I, T](
p: Initialize[State => Parser[I]]
)(action: TaskKey[I] => Initialize[Task[T]]): Initialize[InputTask[T]] = {
val dummyKey = localKey[Task[I]]
val (marker, dummy) = dummyTask[I]
val it = action(TaskKey(dummyKey)) mapConstant subResultForDummy(dummyKey, dummy)
@ -141,9 +151,11 @@ object InputTask {
(key, t)
}
private[this] def subForDummy[I, T](marker: AttributeKey[Option[I]],
value: I,
task: Task[T]): Task[T] = {
private[this] def subForDummy[I, T](
marker: AttributeKey[Option[I]],
value: I,
task: Task[T]
): Task[T] = {
val seen = new java.util.IdentityHashMap[Task[_], Task[_]]
lazy val f: Task ~> Task = new (Task ~> Task) {
def apply[A](t: Task[A]): Task[A] = {

View File

@ -53,11 +53,13 @@ object Previous {
private[sbt] val references = SettingKey[References](
"previous-references",
"Collects all static references to previous values of tasks.",
KeyRanks.Invisible)
KeyRanks.Invisible
)
private[sbt] val cache = TaskKey[Previous](
"previous-cache",
"Caches previous values of tasks read from disk for the duration of a task execution.",
KeyRanks.Invisible)
KeyRanks.Invisible
)
/** Records references to previous task value. This should be completely populated after settings finish loading. */
private[sbt] final class References {
@ -72,9 +74,11 @@ object Previous {
}
/** Persists values of tasks t where there is some task referencing it via t.previous. */
private[sbt] def complete(referenced: References,
results: RMap[Task, Result],
streams: Streams): Unit = {
private[sbt] def complete(
referenced: References,
results: RMap[Task, Result],
streams: Streams
): Unit = {
val map = referenced.getReferences
def impl[T](key: ScopedKey[_], result: T): Unit =
for (i <- map.get(key.asInstanceOf[ScopedTaskKey[T]])) {

View File

@ -11,12 +11,14 @@ import scala.annotation.implicitNotFound
object Remove {
@implicitNotFound(
msg = "No implicit for Remove.Value[${A}, ${B}] found,\n so ${B} cannot be removed from ${A}")
msg = "No implicit for Remove.Value[${A}, ${B}] found,\n so ${B} cannot be removed from ${A}"
)
trait Value[A, B] extends Any {
def removeValue(a: A, b: B): A
}
@implicitNotFound(
msg = "No implicit for Remove.Values[${A}, ${B}] found,\n so ${B} cannot be removed from ${A}")
msg = "No implicit for Remove.Values[${A}, ${B}] found,\n so ${B} cannot be removed from ${A}"
)
trait Values[A, -B] extends Any {
def removeValues(a: A, b: B): A
}

View File

@ -13,10 +13,12 @@ import sbt.internal.util.{ AttributeKey, AttributeMap, Dag }
import sbt.io.IO
final case class Scope(project: ScopeAxis[Reference],
config: ScopeAxis[ConfigKey],
task: ScopeAxis[AttributeKey[_]],
extra: ScopeAxis[AttributeMap]) {
final case class Scope(
project: ScopeAxis[Reference],
config: ScopeAxis[ConfigKey],
task: ScopeAxis[AttributeKey[_]],
extra: ScopeAxis[AttributeMap]
) {
def in(project: Reference, config: ConfigKey): Scope =
copy(project = Select(project), config = Select(config))
def in(config: ConfigKey, task: AttributeKey[_]): Scope =
@ -106,17 +108,21 @@ object Scope {
else
IO.directoryURI(current resolve uri)
def resolveReference(current: URI,
rootProject: URI => String,
ref: Reference): ResolvedReference =
def resolveReference(
current: URI,
rootProject: URI => String,
ref: Reference
): ResolvedReference =
ref match {
case br: BuildReference => resolveBuildRef(current, br)
case pr: ProjectReference => resolveProjectRef(current, rootProject, pr)
}
def resolveProjectRef(current: URI,
rootProject: URI => String,
ref: ProjectReference): ProjectRef =
def resolveProjectRef(
current: URI,
rootProject: URI => String,
ref: ProjectReference
): ProjectRef =
ref match {
case LocalRootProject => ProjectRef(current, rootProject(current))
case LocalProject(id) => ProjectRef(current, id)
@ -164,10 +170,12 @@ object Scope {
def displayMasked(scope: Scope, sep: String, mask: ScopeMask, showZeroConfig: Boolean): String =
displayMasked(scope, sep, showProject, mask, showZeroConfig)
def displayMasked(scope: Scope,
sep: String,
showProject: Reference => String,
mask: ScopeMask): String =
def displayMasked(
scope: Scope,
sep: String,
showProject: Reference => String,
mask: ScopeMask
): String =
displayMasked(scope, sep, showProject, mask, false)
/**
@ -177,11 +185,13 @@ object Scope {
* Technically speaking an unspecified configuration axis defaults to
* the scope delegation (first configuration defining the key, then Zero).
*/
def displayMasked(scope: Scope,
sep: String,
showProject: Reference => String,
mask: ScopeMask,
showZeroConfig: Boolean): String = {
def displayMasked(
scope: Scope,
sep: String,
showProject: Reference => String,
mask: ScopeMask,
showZeroConfig: Boolean
): String = {
import scope.{ project, config, task, extra }
val zeroConfig = if (showZeroConfig) "Zero /" else ""
val configPrefix = config.foldStrict(display, zeroConfig, "./")
@ -190,11 +200,13 @@ object Scope {
val postfix = if (extras.isEmpty) "" else extras.mkString("(", ", ", ")")
if (scope == GlobalScope) "Global / " + sep + postfix
else
mask.concatShow(appendSpace(projectPrefix(project, showProject)),
appendSpace(configPrefix),
appendSpace(taskPrefix),
sep,
postfix)
mask.concatShow(
appendSpace(projectPrefix(project, showProject)),
appendSpace(configPrefix),
appendSpace(taskPrefix),
sep,
postfix
)
}
private[sbt] def appendSpace(s: String): String =
@ -207,12 +219,16 @@ object Scope {
(!mask.task || a.task == b.task) &&
(!mask.extra || a.extra == b.extra)
def projectPrefix(project: ScopeAxis[Reference],
show: Reference => String = showProject): String =
def projectPrefix(
project: ScopeAxis[Reference],
show: Reference => String = showProject
): String =
project.foldStrict(show, "Zero /", "./")
def projectPrefix012Style(project: ScopeAxis[Reference],
show: Reference => String = showProject): String =
def projectPrefix012Style(
project: ScopeAxis[Reference],
show: Reference => String = showProject
): String =
project.foldStrict(show, "*/", "./")
def showProject = (ref: Reference) => Reference.display(ref) + " /"
@ -332,27 +348,32 @@ object Scope {
}
private[this] def delegateIndex(ref: ProjectRef, confs: Seq[ConfigKey])(
projectInherit: ProjectRef => Seq[ProjectRef],
configInherit: (ResolvedReference, ConfigKey) => Seq[ConfigKey]): ProjectDelegates = {
configInherit: (ResolvedReference, ConfigKey) => Seq[ConfigKey]
): ProjectDelegates = {
val refDelegates = withRawBuilds(linearize(Select(ref), false)(projectInherit))
val configs = confs map { c =>
axisDelegates(configInherit, ref, c)
}
new ProjectDelegates(ref, refDelegates, configs.toMap)
}
def axisDelegates[T](direct: (ResolvedReference, T) => Seq[T],
ref: ResolvedReference,
init: T): (T, Seq[ScopeAxis[T]]) =
def axisDelegates[T](
direct: (ResolvedReference, T) => Seq[T],
ref: ResolvedReference,
init: T
): (T, Seq[ScopeAxis[T]]) =
(init, linearize(Select(init))(direct(ref, _)))
def linearize[T](axis: ScopeAxis[T], appendZero: Boolean = true)(
inherit: T => Seq[T]): Seq[ScopeAxis[T]] =
inherit: T => Seq[T]
): Seq[ScopeAxis[T]] =
axis match {
case Select(x) => topologicalSort[T](x, appendZero)(inherit)
case Zero | This => if (appendZero) Zero :: Nil else Nil
}
def topologicalSort[T](node: T, appendZero: Boolean)(
dependencies: T => Seq[T]): Seq[ScopeAxis[T]] = {
dependencies: T => Seq[T]
): Seq[ScopeAxis[T]] = {
val o = Dag.topologicalSortUnchecked(node)(dependencies).map(Select.apply)
if (appendZero) o ::: Zero :: Nil
else o

View File

@ -337,8 +337,10 @@ object Scoped {
def transform(f: S => S, source: SourcePosition): Setting[Task[S]] =
set(scopedKey(_ map f), source)
@deprecated("No longer needed with new task syntax and SettingKey inheriting from Initialize.",
"0.13.2")
@deprecated(
"No longer needed with new task syntax and SettingKey inheriting from Initialize.",
"0.13.2"
)
def task: SettingKey[Task[S]] = scopedSetting(scope, key)
def toSettingKey: SettingKey[Task[S]] = scopedSetting(scope, key)
@ -401,8 +403,9 @@ object Scoped {
def dependsOn(tasks: AnyInitTask*): Initialize[InputTask[S]] = {
import TupleSyntax._
(i, Initialize.joinAny[Task](tasks))((thisTask, deps) =>
thisTask.mapTask(_.dependsOn(deps: _*)))
(i, Initialize.joinAny[Task](tasks))(
(thisTask, deps) => thisTask.mapTask(_.dependsOn(deps: _*))
)
}
}
@ -429,23 +432,27 @@ object Scoped {
@deprecated(
"Use the `result` method to create a task that returns the full Result of this task. Then, call `flatMap` on the new task.",
"0.13.0")
"0.13.0"
)
def flatMapR[T](f: Result[S] => Task[T]): Initialize[R[T]] = onTask(_.result flatMap f)
@deprecated(
"Use the `result` method to create a task that returns the full Result of this task. Then, call `map` on the new task.",
"0.13.0")
"0.13.0"
)
def mapR[T](f: Result[S] => T): Initialize[R[T]] = onTask(_.result map f)
@deprecated(
"Use the `failure` method to create a task that returns Incomplete when this task fails and then call `flatMap` on the new task.",
"0.13.0")
"0.13.0"
)
def flatFailure[T](f: Incomplete => Task[T]): Initialize[R[T]] =
onTask(_.result flatMap (f compose failM))
@deprecated(
"Use the `failure` method to create a task that returns Incomplete when this task fails and then call `map` on the new task.",
"0.13.0")
"0.13.0"
)
def mapFailure[T](f: Incomplete => T): Initialize[R[T]] = onTask(_.result map (f compose failM))
}

View File

@ -31,27 +31,33 @@ object InputWrapper {
private[std] final val WrapPreviousName = "wrapPrevious_\u2603\u2603"
@compileTimeOnly(
"`value` can only be called on a task within a task definition macro, such as :=, +=, ++=, or Def.task.")
"`value` can only be called on a task within a task definition macro, such as :=, +=, ++=, or Def.task."
)
def wrapTask_\u2603\u2603[T](@deprecated("unused", "") in: Any): T = implDetailError
@compileTimeOnly(
"`value` can only be used within a task or setting macro, such as :=, +=, ++=, Def.task, or Def.setting.")
"`value` can only be used within a task or setting macro, such as :=, +=, ++=, Def.task, or Def.setting."
)
def wrapInit_\u2603\u2603[T](@deprecated("unused", "") in: Any): T = implDetailError
@compileTimeOnly(
"`value` can only be called on a task within a task definition macro, such as :=, +=, ++=, or Def.task.")
"`value` can only be called on a task within a task definition macro, such as :=, +=, ++=, or Def.task."
)
def wrapInitTask_\u2603\u2603[T](@deprecated("unused", "") in: Any): T = implDetailError
@compileTimeOnly(
"`value` can only be called on an input task within a task definition macro, such as := or Def.inputTask.")
"`value` can only be called on an input task within a task definition macro, such as := or Def.inputTask."
)
def wrapInputTask_\u2603\u2603[T](@deprecated("unused", "") in: Any): T = implDetailError
@compileTimeOnly(
"`value` can only be called on an input task within a task definition macro, such as := or Def.inputTask.")
"`value` can only be called on an input task within a task definition macro, such as := or Def.inputTask."
)
def wrapInitInputTask_\u2603\u2603[T](@deprecated("unused", "") in: Any): T = implDetailError
@compileTimeOnly(
"`previous` can only be called on a task within a task or input task definition macro, such as :=, +=, ++=, Def.task, or Def.inputTask.")
"`previous` can only be called on a task within a task or input task definition macro, such as :=, +=, ++=, Def.task, or Def.inputTask."
)
def wrapPrevious_\u2603\u2603[T](@deprecated("unused", "") in: Any): T = implDetailError
private[this] def implDetailError =
@ -161,8 +167,9 @@ object InputWrapper {
}
/** Translates <task: TaskKey[T]>.previous(format) to Previous.runtime(<task>)(format).value*/
def previousMacroImpl[T: c.WeakTypeTag](c: blackbox.Context)(
format: c.Expr[sjsonnew.JsonFormat[T]]): c.Expr[Option[T]] = {
def previousMacroImpl[T: c.WeakTypeTag](
c: blackbox.Context
)(format: c.Expr[sjsonnew.JsonFormat[T]]): c.Expr[Option[T]] = {
import c.universe._
c.macroApplication match {
case a @ Apply(Select(Apply(_, t :: Nil), _), _) =>
@ -182,35 +189,42 @@ object InputWrapper {
sealed abstract class MacroTaskValue[T] {
@compileTimeOnly(
"`taskValue` can only be used within a setting macro, such as :=, +=, ++=, or Def.setting.")
"`taskValue` can only be used within a setting macro, such as :=, +=, ++=, or Def.setting."
)
def taskValue: Task[T] = macro InputWrapper.taskValueMacroImpl[T]
}
sealed abstract class MacroValue[T] {
@compileTimeOnly(
"`value` can only be used within a task or setting macro, such as :=, +=, ++=, Def.task, or Def.setting.")
"`value` can only be used within a task or setting macro, such as :=, +=, ++=, Def.task, or Def.setting."
)
def value: T = macro InputWrapper.valueMacroImpl[T]
}
sealed abstract class ParserInput[T] {
@compileTimeOnly(
"`parsed` can only be used within an input task macro, such as := or Def.inputTask.")
"`parsed` can only be used within an input task macro, such as := or Def.inputTask."
)
def parsed: T = macro ParserInput.parsedMacroImpl[T]
}
sealed abstract class InputEvaluated[T] {
@compileTimeOnly(
"`evaluated` can only be used within an input task macro, such as := or Def.inputTask.")
"`evaluated` can only be used within an input task macro, such as := or Def.inputTask."
)
def evaluated: T = macro InputWrapper.valueMacroImpl[T]
@compileTimeOnly(
"`inputTaskValue` can only be used within an input task macro, such as := or Def.inputTask.")
"`inputTaskValue` can only be used within an input task macro, such as := or Def.inputTask."
)
def inputTaskValue: InputTask[T] = macro InputWrapper.inputTaskValueMacroImpl[T]
}
sealed abstract class ParserInputTask[T] {
@compileTimeOnly(
"`parsed` can only be used within an input task macro, such as := or Def.inputTask.")
"`parsed` can only be used within an input task macro, such as := or Def.inputTask."
)
def parsed: Task[T] = macro ParserInput.parsedInputMacroImpl[T]
}
sealed abstract class MacroPrevious[T] {
@compileTimeOnly(
"`previous` can only be used within a task macro, such as :=, +=, ++=, or Def.task.")
"`previous` can only be used within a task macro, such as :=, +=, ++=, or Def.task."
)
def previous(implicit format: sjsonnew.JsonFormat[T]): Option[T] =
macro InputWrapper.previousMacroImpl[T]
}
@ -224,24 +238,29 @@ object ParserInput {
private[std] val WrapInitName = "initParser_\u2603\u2603"
@compileTimeOnly(
"`parsed` can only be used within an input task macro, such as := or Def.inputTask.")
"`parsed` can only be used within an input task macro, such as := or Def.inputTask."
)
def parser_\u2603\u2603[T](@deprecated("unused", "") i: Any): T =
sys.error("This method is an implementation detail and should not be referenced.")
@compileTimeOnly(
"`parsed` can only be used within an input task macro, such as := or Def.inputTask.")
"`parsed` can only be used within an input task macro, such as := or Def.inputTask."
)
def initParser_\u2603\u2603[T](@deprecated("unused", "") i: Any): T =
sys.error("This method is an implementation detail and should not be referenced.")
private[std] def wrap[T: c.WeakTypeTag](c: blackbox.Context)(ts: c.Expr[Any],
pos: c.Position): c.Expr[T] =
private[std] def wrap[T: c.WeakTypeTag](
c: blackbox.Context
)(ts: c.Expr[Any], pos: c.Position): c.Expr[T] =
InputWrapper.wrapImpl[T, ParserInput.type](c, ParserInput, WrapName)(ts, pos)
private[std] def wrapInit[T: c.WeakTypeTag](c: blackbox.Context)(ts: c.Expr[Any],
pos: c.Position): c.Expr[T] =
private[std] def wrapInit[T: c.WeakTypeTag](
c: blackbox.Context
)(ts: c.Expr[Any], pos: c.Position): c.Expr[T] =
InputWrapper.wrapImpl[T, ParserInput.type](c, ParserInput, WrapInitName)(ts, pos)
private[std] def inputParser[T: c.WeakTypeTag](c: blackbox.Context)(
t: c.Expr[InputTask[T]]): c.Expr[State => Parser[Task[T]]] =
private[std] def inputParser[T: c.WeakTypeTag](
c: blackbox.Context
)(t: c.Expr[InputTask[T]]): c.Expr[State => Parser[Task[T]]] =
c.universe.reify(t.splice.parser)
def parsedInputMacroImpl[T: c.WeakTypeTag](c: blackbox.Context): c.Expr[Task[T]] =
@ -261,8 +280,9 @@ object ParserInput {
wrap[Task[T]](c)(inputParser(c)(e), pos)
}
private def wrapInitInputTask[T: c.WeakTypeTag](c: blackbox.Context)(tree: c.Tree,
pos: c.Position) = {
private def wrapInitInputTask[T: c.WeakTypeTag](
c: blackbox.Context
)(tree: c.Tree, pos: c.Position) = {
val e = c.Expr[Initialize[InputTask[T]]](tree)
wrapInit[Task[T]](c)(c.universe.reify { Def.toIParser(e.splice) }, pos)
}

View File

@ -14,18 +14,21 @@ import scala.reflect.macros._
import sbt.util.OptJsonWriter
private[sbt] object KeyMacro {
def settingKeyImpl[T: c.WeakTypeTag](c: blackbox.Context)(
description: c.Expr[String]): c.Expr[SettingKey[T]] =
def settingKeyImpl[T: c.WeakTypeTag](
c: blackbox.Context
)(description: c.Expr[String]): c.Expr[SettingKey[T]] =
keyImpl2[T, SettingKey[T]](c) { (name, mf, ojw) =>
c.universe.reify { SettingKey[T](name.splice, description.splice)(mf.splice, ojw.splice) }
}
def taskKeyImpl[T: c.WeakTypeTag](c: blackbox.Context)(
description: c.Expr[String]): c.Expr[TaskKey[T]] =
def taskKeyImpl[T: c.WeakTypeTag](
c: blackbox.Context
)(description: c.Expr[String]): c.Expr[TaskKey[T]] =
keyImpl[T, TaskKey[T]](c) { (name, mf) =>
c.universe.reify { TaskKey[T](name.splice, description.splice)(mf.splice) }
}
def inputKeyImpl[T: c.WeakTypeTag](c: blackbox.Context)(
description: c.Expr[String]): c.Expr[InputKey[T]] =
def inputKeyImpl[T: c.WeakTypeTag](
c: blackbox.Context
)(description: c.Expr[String]): c.Expr[InputKey[T]] =
keyImpl[T, InputKey[T]](c) { (name, mf) =>
c.universe.reify { InputKey[T](name.splice, description.splice)(mf.splice) }
}
@ -45,7 +48,8 @@ private[sbt] object KeyMacro {
val enclosingValName = definingValName(
c,
methodName =>
s"""$methodName must be directly assigned to a val, such as `val x = $methodName[Int]("description")`.""")
s"""$methodName must be directly assigned to a val, such as `val x = $methodName[Int]("description")`."""
)
c.Expr[String](Literal(Constant(enclosingValName)))
}

View File

@ -46,11 +46,13 @@ object InitializeConvert extends Convert {
Converted.Success(t)
}
private def failTask[C <: blackbox.Context with Singleton](c: C)(
pos: c.Position): Converted[c.type] =
private def failTask[C <: blackbox.Context with Singleton](
c: C
)(pos: c.Position): Converted[c.type] =
Converted.Failure(pos, "A setting cannot depend on a task")
private def failPrevious[C <: blackbox.Context with Singleton](c: C)(
pos: c.Position): Converted[c.type] =
private def failPrevious[C <: blackbox.Context with Singleton](
c: C
)(pos: c.Position): Converted[c.type] =
Converted.Failure(pos, "A setting cannot depend on a task's previous value.")
}
@ -59,11 +61,14 @@ object SettingMacro {
def settingMacroImpl[T: c.WeakTypeTag](c: blackbox.Context)(t: c.Expr[T]): c.Expr[Initialize[T]] =
Instance.contImpl[T, Id](c, InitializeInstance, InitializeConvert, MixedBuilder, EmptyLinter)(
Left(t),
Instance.idTransform[c.type])
Instance.idTransform[c.type]
)
def settingDynMacroImpl[T: c.WeakTypeTag](c: blackbox.Context)(
t: c.Expr[Initialize[T]]): c.Expr[Initialize[T]] =
def settingDynMacroImpl[T: c.WeakTypeTag](
c: blackbox.Context
)(t: c.Expr[Initialize[T]]): c.Expr[Initialize[T]] =
Instance.contImpl[T, Id](c, InitializeInstance, InitializeConvert, MixedBuilder, EmptyLinter)(
Right(t),
Instance.idTransform[c.type])
Instance.idTransform[c.type]
)
}

View File

@ -56,9 +56,11 @@ object FullInstance
extends Instance.Composed[Initialize, Task](InitializeInstance, TaskInstance)
with MonadInstance {
type SS = sbt.internal.util.Settings[Scope]
val settingsData = TaskKey[SS]("settings-data",
"Provides access to the project data for the build.",
KeyRanks.DTask)
val settingsData = TaskKey[SS](
"settings-data",
"Provides access to the project data for the build.",
KeyRanks.DTask
)
def flatten[T](in: Initialize[Task[Initialize[Task[T]]]]): Initialize[Task[T]] = {
import TupleSyntax._
@ -98,29 +100,35 @@ object TaskMacro {
import LinterDSL.{ Empty => EmptyLinter }
def taskMacroImpl[T: c.WeakTypeTag](c: blackbox.Context)(
t: c.Expr[T]): c.Expr[Initialize[Task[T]]] =
def taskMacroImpl[T: c.WeakTypeTag](
c: blackbox.Context
)(t: c.Expr[T]): c.Expr[Initialize[Task[T]]] =
Instance.contImpl[T, Id](c, FullInstance, FullConvert, MixedBuilder, TaskLinterDSL)(
Left(t),
Instance.idTransform[c.type])
Instance.idTransform[c.type]
)
def taskDynMacroImpl[T: c.WeakTypeTag](c: blackbox.Context)(
t: c.Expr[Initialize[Task[T]]]): c.Expr[Initialize[Task[T]]] =
def taskDynMacroImpl[T: c.WeakTypeTag](
c: blackbox.Context
)(t: c.Expr[Initialize[Task[T]]]): c.Expr[Initialize[Task[T]]] =
Instance.contImpl[T, Id](c, FullInstance, FullConvert, MixedBuilder, TaskDynLinterDSL)(
Right(t),
Instance.idTransform[c.type])
Instance.idTransform[c.type]
)
/** Implementation of := macro for settings. */
def settingAssignMacroImpl[T: c.WeakTypeTag](c: blackbox.Context)(
v: c.Expr[T]): c.Expr[Setting[T]] = {
def settingAssignMacroImpl[T: c.WeakTypeTag](
c: blackbox.Context
)(v: c.Expr[T]): c.Expr[Setting[T]] = {
val init = SettingMacro.settingMacroImpl[T](c)(v)
val assign = transformMacroImpl(c)(init.tree)(AssignInitName)
c.Expr[Setting[T]](assign)
}
/** Implementation of := macro for tasks. */
def taskAssignMacroImpl[T: c.WeakTypeTag](c: blackbox.Context)(
v: c.Expr[T]): c.Expr[Setting[Task[T]]] = {
def taskAssignMacroImpl[T: c.WeakTypeTag](
c: blackbox.Context
)(v: c.Expr[T]): c.Expr[Setting[Task[T]]] = {
val init = taskMacroImpl[T](c)(v)
val assign = transformMacroImpl(c)(init.tree)(AssignInitName)
c.Expr[Setting[Task[T]]](assign)
@ -134,14 +142,16 @@ object TaskMacro {
): c.Expr[Setting[T]] =
ContextUtil.selectMacroImpl[Setting[T]](c)((_, pos) => c.abort(pos, assignMigration))
def fakeSettingAppend1Position[S: c.WeakTypeTag, V: c.WeakTypeTag](c: blackbox.Context)(
@deprecated("unused", "") v: c.Expr[Initialize[V]])(
def fakeSettingAppend1Position[S: c.WeakTypeTag, V: c.WeakTypeTag](
c: blackbox.Context
)(@deprecated("unused", "") v: c.Expr[Initialize[V]])(
@deprecated("unused", "") a: c.Expr[Append.Value[S, V]]
): c.Expr[Setting[S]] =
ContextUtil.selectMacroImpl[Setting[S]](c)((_, pos) => c.abort(pos, append1Migration))
def fakeSettingAppendNPosition[S: c.WeakTypeTag, V: c.WeakTypeTag](c: blackbox.Context)(
@deprecated("unused", "") vs: c.Expr[Initialize[V]])(
def fakeSettingAppendNPosition[S: c.WeakTypeTag, V: c.WeakTypeTag](
c: blackbox.Context
)(@deprecated("unused", "") vs: c.Expr[Initialize[V]])(
@deprecated("unused", "") a: c.Expr[Append.Values[S, V]]
): c.Expr[Setting[S]] =
ContextUtil.selectMacroImpl[Setting[S]](c)((_, pos) => c.abort(pos, appendNMigration))
@ -151,14 +161,16 @@ object TaskMacro {
): c.Expr[Setting[Task[T]]] =
ContextUtil.selectMacroImpl[Setting[Task[T]]](c)((_, pos) => c.abort(pos, assignMigration))
def fakeTaskAppend1Position[S: c.WeakTypeTag, V: c.WeakTypeTag](c: blackbox.Context)(
@deprecated("unused", "") v: c.Expr[Initialize[Task[V]]])(
def fakeTaskAppend1Position[S: c.WeakTypeTag, V: c.WeakTypeTag](
c: blackbox.Context
)(@deprecated("unused", "") v: c.Expr[Initialize[Task[V]]])(
@deprecated("unused", "") a: c.Expr[Append.Value[S, V]]
): c.Expr[Setting[Task[S]]] =
ContextUtil.selectMacroImpl[Setting[Task[S]]](c)((_, pos) => c.abort(pos, append1Migration))
def fakeTaskAppendNPosition[S: c.WeakTypeTag, V: c.WeakTypeTag](c: blackbox.Context)(
@deprecated("unused", "") vs: c.Expr[Initialize[Task[V]]])(
def fakeTaskAppendNPosition[S: c.WeakTypeTag, V: c.WeakTypeTag](
c: blackbox.Context
)(@deprecated("unused", "") vs: c.Expr[Initialize[Task[V]]])(
@deprecated("unused", "") a: c.Expr[Append.Values[S, V]]
): c.Expr[Setting[Task[S]]] =
ContextUtil.selectMacroImpl[Setting[Task[S]]](c)((_, pos) => c.abort(pos, appendNMigration))
@ -166,56 +178,66 @@ object TaskMacro {
// Implementations of <<= macro variations for tasks and settings.
// These just get the source position of the call site.
def itaskAssignPosition[T: c.WeakTypeTag](c: blackbox.Context)(
app: c.Expr[Initialize[Task[T]]]): c.Expr[Setting[Task[T]]] =
def itaskAssignPosition[T: c.WeakTypeTag](
c: blackbox.Context
)(app: c.Expr[Initialize[Task[T]]]): c.Expr[Setting[Task[T]]] =
settingAssignPosition(c)(app)
def taskAssignPositionT[T: c.WeakTypeTag](c: blackbox.Context)(
app: c.Expr[Task[T]]): c.Expr[Setting[Task[T]]] =
def taskAssignPositionT[T: c.WeakTypeTag](
c: blackbox.Context
)(app: c.Expr[Task[T]]): c.Expr[Setting[Task[T]]] =
itaskAssignPosition(c)(c.universe.reify { Def.valueStrict(app.splice) })
def taskAssignPositionPure[T: c.WeakTypeTag](c: blackbox.Context)(
app: c.Expr[T]): c.Expr[Setting[Task[T]]] =
def taskAssignPositionPure[T: c.WeakTypeTag](
c: blackbox.Context
)(app: c.Expr[T]): c.Expr[Setting[Task[T]]] =
taskAssignPositionT(c)(c.universe.reify { TaskExtra.constant(app.splice) })
def taskTransformPosition[S: c.WeakTypeTag](c: blackbox.Context)(
f: c.Expr[S => S]): c.Expr[Setting[Task[S]]] =
def taskTransformPosition[S: c.WeakTypeTag](
c: blackbox.Context
)(f: c.Expr[S => S]): c.Expr[Setting[Task[S]]] =
c.Expr[Setting[Task[S]]](transformMacroImpl(c)(f.tree)(TransformInitName))
def settingTransformPosition[S: c.WeakTypeTag](c: blackbox.Context)(
f: c.Expr[S => S]): c.Expr[Setting[S]] =
def settingTransformPosition[S: c.WeakTypeTag](
c: blackbox.Context
)(f: c.Expr[S => S]): c.Expr[Setting[S]] =
c.Expr[Setting[S]](transformMacroImpl(c)(f.tree)(TransformInitName))
def itaskTransformPosition[S: c.WeakTypeTag](c: blackbox.Context)(
f: c.Expr[S => S]): c.Expr[Setting[S]] =
def itaskTransformPosition[S: c.WeakTypeTag](
c: blackbox.Context
)(f: c.Expr[S => S]): c.Expr[Setting[S]] =
c.Expr[Setting[S]](transformMacroImpl(c)(f.tree)(TransformInitName))
def settingAssignPure[T: c.WeakTypeTag](c: blackbox.Context)(app: c.Expr[T]): c.Expr[Setting[T]] =
settingAssignPosition(c)(c.universe.reify { Def.valueStrict(app.splice) })
def settingAssignPosition[T: c.WeakTypeTag](c: blackbox.Context)(
app: c.Expr[Initialize[T]]): c.Expr[Setting[T]] =
def settingAssignPosition[T: c.WeakTypeTag](
c: blackbox.Context
)(app: c.Expr[Initialize[T]]): c.Expr[Setting[T]] =
c.Expr[Setting[T]](transformMacroImpl(c)(app.tree)(AssignInitName))
/** Implementation of := macro for tasks. */
def inputTaskAssignMacroImpl[T: c.WeakTypeTag](c: blackbox.Context)(
v: c.Expr[T]): c.Expr[Setting[InputTask[T]]] = {
def inputTaskAssignMacroImpl[T: c.WeakTypeTag](
c: blackbox.Context
)(v: c.Expr[T]): c.Expr[Setting[InputTask[T]]] = {
val init = inputTaskMacroImpl[T](c)(v)
val assign = transformMacroImpl(c)(init.tree)(AssignInitName)
c.Expr[Setting[InputTask[T]]](assign)
}
/** Implementation of += macro for tasks. */
def taskAppend1Impl[T: c.WeakTypeTag, U: c.WeakTypeTag](c: blackbox.Context)(v: c.Expr[U])(
a: c.Expr[Append.Value[T, U]]): c.Expr[Setting[Task[T]]] = {
def taskAppend1Impl[T: c.WeakTypeTag, U: c.WeakTypeTag](
c: blackbox.Context
)(v: c.Expr[U])(a: c.Expr[Append.Value[T, U]]): c.Expr[Setting[Task[T]]] = {
val init = taskMacroImpl[U](c)(v)
val append = appendMacroImpl(c)(init.tree, a.tree)(Append1InitName)
c.Expr[Setting[Task[T]]](append)
}
/** Implementation of += macro for settings. */
def settingAppend1Impl[T: c.WeakTypeTag, U: c.WeakTypeTag](c: blackbox.Context)(v: c.Expr[U])(
a: c.Expr[Append.Value[T, U]]): c.Expr[Setting[T]] = {
def settingAppend1Impl[T: c.WeakTypeTag, U: c.WeakTypeTag](
c: blackbox.Context
)(v: c.Expr[U])(a: c.Expr[Append.Value[T, U]]): c.Expr[Setting[T]] = {
import c.universe._
val ttpe = c.weakTypeOf[T]
val typeArgs = ttpe.typeArgs
@ -228,7 +250,8 @@ object TaskMacro {
case Apply(Apply(TypeApply(Select(preT, _), _), _), _) =>
val tree = Apply(
TypeApply(Select(preT, TermName("+=").encodedName), TypeTree(typeArgs.head) :: Nil),
Select(v.tree, TermName("taskValue").encodedName) :: Nil)
Select(v.tree, TermName("taskValue").encodedName) :: Nil
)
c.Expr[Setting[T]](tree)
case x => ContextUtil.unexpectedTree(x)
}
@ -240,55 +263,62 @@ object TaskMacro {
}
/** Implementation of ++= macro for tasks. */
def taskAppendNImpl[T: c.WeakTypeTag, U: c.WeakTypeTag](c: blackbox.Context)(vs: c.Expr[U])(
a: c.Expr[Append.Values[T, U]]): c.Expr[Setting[Task[T]]] = {
def taskAppendNImpl[T: c.WeakTypeTag, U: c.WeakTypeTag](
c: blackbox.Context
)(vs: c.Expr[U])(a: c.Expr[Append.Values[T, U]]): c.Expr[Setting[Task[T]]] = {
val init = taskMacroImpl[U](c)(vs)
val append = appendMacroImpl(c)(init.tree, a.tree)(AppendNInitName)
c.Expr[Setting[Task[T]]](append)
}
/** Implementation of ++= macro for settings. */
def settingAppendNImpl[T: c.WeakTypeTag, U: c.WeakTypeTag](c: blackbox.Context)(vs: c.Expr[U])(
a: c.Expr[Append.Values[T, U]]): c.Expr[Setting[T]] = {
def settingAppendNImpl[T: c.WeakTypeTag, U: c.WeakTypeTag](
c: blackbox.Context
)(vs: c.Expr[U])(a: c.Expr[Append.Values[T, U]]): c.Expr[Setting[T]] = {
val init = SettingMacro.settingMacroImpl[U](c)(vs)
val append = appendMacroImpl(c)(init.tree, a.tree)(AppendNInitName)
c.Expr[Setting[T]](append)
}
/** Implementation of -= macro for tasks. */
def taskRemove1Impl[T: c.WeakTypeTag, U: c.WeakTypeTag](c: blackbox.Context)(v: c.Expr[U])(
r: c.Expr[Remove.Value[T, U]]): c.Expr[Setting[Task[T]]] = {
def taskRemove1Impl[T: c.WeakTypeTag, U: c.WeakTypeTag](
c: blackbox.Context
)(v: c.Expr[U])(r: c.Expr[Remove.Value[T, U]]): c.Expr[Setting[Task[T]]] = {
val init = taskMacroImpl[U](c)(v)
val remove = removeMacroImpl(c)(init.tree, r.tree)(Remove1InitName)
c.Expr[Setting[Task[T]]](remove)
}
/** Implementation of -= macro for settings. */
def settingRemove1Impl[T: c.WeakTypeTag, U: c.WeakTypeTag](c: blackbox.Context)(v: c.Expr[U])(
r: c.Expr[Remove.Value[T, U]]): c.Expr[Setting[T]] = {
def settingRemove1Impl[T: c.WeakTypeTag, U: c.WeakTypeTag](
c: blackbox.Context
)(v: c.Expr[U])(r: c.Expr[Remove.Value[T, U]]): c.Expr[Setting[T]] = {
val init = SettingMacro.settingMacroImpl[U](c)(v)
val remove = removeMacroImpl(c)(init.tree, r.tree)(Remove1InitName)
c.Expr[Setting[T]](remove)
}
/** Implementation of --= macro for tasks. */
def taskRemoveNImpl[T: c.WeakTypeTag, U: c.WeakTypeTag](c: blackbox.Context)(vs: c.Expr[U])(
r: c.Expr[Remove.Values[T, U]]): c.Expr[Setting[Task[T]]] = {
def taskRemoveNImpl[T: c.WeakTypeTag, U: c.WeakTypeTag](
c: blackbox.Context
)(vs: c.Expr[U])(r: c.Expr[Remove.Values[T, U]]): c.Expr[Setting[Task[T]]] = {
val init = taskMacroImpl[U](c)(vs)
val remove = removeMacroImpl(c)(init.tree, r.tree)(RemoveNInitName)
c.Expr[Setting[Task[T]]](remove)
}
/** Implementation of --= macro for settings. */
def settingRemoveNImpl[T: c.WeakTypeTag, U: c.WeakTypeTag](c: blackbox.Context)(vs: c.Expr[U])(
r: c.Expr[Remove.Values[T, U]]): c.Expr[Setting[T]] = {
def settingRemoveNImpl[T: c.WeakTypeTag, U: c.WeakTypeTag](
c: blackbox.Context
)(vs: c.Expr[U])(r: c.Expr[Remove.Values[T, U]]): c.Expr[Setting[T]] = {
val init = SettingMacro.settingMacroImpl[U](c)(vs)
val remove = removeMacroImpl(c)(init.tree, r.tree)(RemoveNInitName)
c.Expr[Setting[T]](remove)
}
private[this] def appendMacroImpl(c: blackbox.Context)(init: c.Tree, append: c.Tree)(
newName: String): c.Tree = {
private[this] def appendMacroImpl(
c: blackbox.Context
)(init: c.Tree, append: c.Tree)(newName: String): c.Tree = {
import c.universe._
c.macroApplication match {
case Apply(Apply(TypeApply(Select(preT, _), targs), _), _) =>
@ -303,8 +333,9 @@ object TaskMacro {
}
}
private[this] def removeMacroImpl(c: blackbox.Context)(init: c.Tree, remove: c.Tree)(
newName: String): c.Tree = {
private[this] def removeMacroImpl(
c: blackbox.Context
)(init: c.Tree, remove: c.Tree)(newName: String): c.Tree = {
import c.universe._
c.macroApplication match {
case Apply(Apply(TypeApply(Select(preT, _), targs), _), _) =>
@ -328,8 +359,10 @@ object TaskMacro {
case Apply(Select(prefix, _), _) => prefix
case x => ContextUtil.unexpectedTree(x)
}
Apply.apply(Select(target, TermName(newName).encodedName),
init :: sourcePosition(c).tree :: Nil)
Apply.apply(
Select(target, TermName(newName).encodedName),
init :: sourcePosition(c).tree :: Nil
)
}
private[this] def sourcePosition(c: blackbox.Context): c.Expr[SourcePosition] = {
@ -347,7 +380,8 @@ object TaskMacro {
private[this] def settingSource(c: blackbox.Context, path: String, name: String): String = {
@tailrec def inEmptyPackage(s: c.Symbol): Boolean = s != c.universe.NoSymbol && (
s.owner == c.mirror.EmptyPackage || s.owner == c.mirror.EmptyPackageClass || inEmptyPackage(
s.owner)
s.owner
)
)
c.internal.enclosingOwner match {
case ec if !ec.isStatic => name
@ -361,16 +395,19 @@ object TaskMacro {
c.Expr[T](Literal(Constant(t)))
}
def inputTaskMacroImpl[T: c.WeakTypeTag](c: blackbox.Context)(
t: c.Expr[T]): c.Expr[Initialize[InputTask[T]]] =
def inputTaskMacroImpl[T: c.WeakTypeTag](
c: blackbox.Context
)(t: c.Expr[T]): c.Expr[Initialize[InputTask[T]]] =
inputTaskMacro0[T](c)(t)
def inputTaskDynMacroImpl[T: c.WeakTypeTag](c: blackbox.Context)(
t: c.Expr[Initialize[Task[T]]]): c.Expr[Initialize[InputTask[T]]] =
def inputTaskDynMacroImpl[T: c.WeakTypeTag](
c: blackbox.Context
)(t: c.Expr[Initialize[Task[T]]]): c.Expr[Initialize[InputTask[T]]] =
inputTaskDynMacro0[T](c)(t)
private[this] def inputTaskMacro0[T: c.WeakTypeTag](c: blackbox.Context)(
t: c.Expr[T]): c.Expr[Initialize[InputTask[T]]] =
private[this] def inputTaskMacro0[T: c.WeakTypeTag](
c: blackbox.Context
)(t: c.Expr[T]): c.Expr[Initialize[InputTask[T]]] =
iInitializeMacro(c)(t) { et =>
val pt = iParserMacro(c)(et) { pt =>
iTaskMacro(c)(pt)
@ -379,8 +416,8 @@ object TaskMacro {
}
private[this] def iInitializeMacro[M[_], T](c: blackbox.Context)(t: c.Expr[T])(
f: c.Expr[T] => c.Expr[M[T]])(implicit tt: c.WeakTypeTag[T],
mt: c.WeakTypeTag[M[T]]): c.Expr[Initialize[M[T]]] = {
f: c.Expr[T] => c.Expr[M[T]]
)(implicit tt: c.WeakTypeTag[T], mt: c.WeakTypeTag[M[T]]): c.Expr[Initialize[M[T]]] = {
val inner: Transform[c.type, M] = new Transform[c.type, M] {
def apply(in: c.Tree): c.Tree = f(c.Expr[T](in)).tree
}
@ -388,7 +425,8 @@ object TaskMacro {
Instance
.contImpl[T, M](c, InitializeInstance, InputInitConvert, MixedBuilder, EmptyLinter)(
Left(cond),
inner)
inner
)
}
private[this] def conditionInputTaskTree(c: blackbox.Context)(t: c.Tree): c.Tree = {
@ -424,25 +462,29 @@ object TaskMacro {
}
private[this] def iParserMacro[M[_], T](c: blackbox.Context)(t: c.Expr[T])(
f: c.Expr[T] => c.Expr[M[T]])(implicit tt: c.WeakTypeTag[T],
mt: c.WeakTypeTag[M[T]]): c.Expr[State => Parser[M[T]]] = {
f: c.Expr[T] => c.Expr[M[T]]
)(implicit tt: c.WeakTypeTag[T], mt: c.WeakTypeTag[M[T]]): c.Expr[State => Parser[M[T]]] = {
val inner: Transform[c.type, M] = new Transform[c.type, M] {
def apply(in: c.Tree): c.Tree = f(c.Expr[T](in)).tree
}
Instance.contImpl[T, M](c, ParserInstance, ParserConvert, MixedBuilder, LinterDSL.Empty)(
Left(t),
inner)
inner
)
}
private[this] def iTaskMacro[T: c.WeakTypeTag](c: blackbox.Context)(
t: c.Expr[T]): c.Expr[Task[T]] =
private[this] def iTaskMacro[T: c.WeakTypeTag](
c: blackbox.Context
)(t: c.Expr[T]): c.Expr[Task[T]] =
Instance
.contImpl[T, Id](c, TaskInstance, TaskConvert, MixedBuilder, EmptyLinter)(
Left(t),
Instance.idTransform)
Instance.idTransform
)
private[this] def inputTaskDynMacro0[T: c.WeakTypeTag](c: blackbox.Context)(
t: c.Expr[Initialize[Task[T]]]): c.Expr[Initialize[InputTask[T]]] = {
private[this] def inputTaskDynMacro0[T: c.WeakTypeTag](
c: blackbox.Context
)(t: c.Expr[Initialize[Task[T]]]): c.Expr[Initialize[InputTask[T]]] = {
import c.universe.{ Apply => ApplyTree, _ }
import internal.decorators._
@ -467,7 +509,8 @@ object TaskMacro {
if (result.isDefined) {
c.error(
qual.pos,
"Implementation restriction: a dynamic InputTask can only have a single input parser.")
"Implementation restriction: a dynamic InputTask can only have a single input parser."
)
EmptyTree
} else {
qual.foreach(checkQual)
@ -526,11 +569,13 @@ object PlainTaskMacro {
def taskImpl[T: c.WeakTypeTag](c: blackbox.Context)(t: c.Expr[T]): c.Expr[Task[T]] =
Instance.contImpl[T, Id](c, TaskInstance, TaskConvert, MixedBuilder, OnlyTaskLinterDSL)(
Left(t),
Instance.idTransform[c.type])
Instance.idTransform[c.type]
)
def taskDyn[T](t: Task[T]): Task[T] = macro taskDynImpl[T]
def taskDynImpl[T: c.WeakTypeTag](c: blackbox.Context)(t: c.Expr[Task[T]]): c.Expr[Task[T]] =
Instance.contImpl[T, Id](c, TaskInstance, TaskConvert, MixedBuilder, OnlyTaskDynLinterDSL)(
Right(t),
Instance.idTransform[c.type])
Instance.idTransform[c.type]
)
}

View File

@ -85,8 +85,10 @@ object SlashSyntaxSpec extends Properties("SlashSyntax") with SlashSyntax {
}
property("Reference? / ConfigKey? / key == key in ThisScope.copy(..)") = {
forAll((r: ScopeAxis[Reference], c: ScopeAxis[ConfigKey], k: Key) =>
expectValue(k in ThisScope.copy(project = r, config = c))(r / c / k))
forAll(
(r: ScopeAxis[Reference], c: ScopeAxis[ConfigKey], k: Key) =>
expectValue(k in ThisScope.copy(project = r, config = c))(r / c / k)
)
}
// property("Reference? / AttributeKey? / key == key in ThisScope.copy(..)") = {

View File

@ -23,7 +23,8 @@ abstract class BackgroundJobService extends Closeable {
* then you could process.destroy() for example.
*/
def runInBackground(spawningTask: ScopedKey[_], state: State)(
start: (Logger, File) => Unit): JobHandle
start: (Logger, File) => Unit
): JobHandle
/** Same as shutown. */
def close(): Unit
@ -51,7 +52,8 @@ object BackgroundJobService {
{
val stringIdParser: Parser[Seq[String]] = Space ~> token(
NotSpace examples handles.map(_.id.toString).toSet,
description = "<job id>").+
description = "<job id>"
).+
stringIdParser.map { strings =>
strings.map(Integer.parseInt(_)).flatMap(id => handles.find(_.id == id))
}

View File

@ -17,19 +17,25 @@ object BuildPaths {
val globalBaseDirectory = AttributeKey[File](
"global-base-directory",
"The base directory for global sbt configuration and staging.",
DSetting)
val globalPluginsDirectory = AttributeKey[File]("global-plugins-directory",
"The base directory for global sbt plugins.",
DSetting)
val globalSettingsDirectory = AttributeKey[File]("global-settings-directory",
"The base directory for global sbt settings.",
DSetting)
DSetting
)
val globalPluginsDirectory = AttributeKey[File](
"global-plugins-directory",
"The base directory for global sbt plugins.",
DSetting
)
val globalSettingsDirectory = AttributeKey[File](
"global-settings-directory",
"The base directory for global sbt settings.",
DSetting
)
val stagingDirectory =
AttributeKey[File]("staging-directory", "The directory for staging remote projects.", DSetting)
val dependencyBaseDirectory = AttributeKey[File](
"dependency-base-directory",
"The base directory for caching dependency resolution.",
DSetting)
DSetting
)
val globalZincDirectory =
AttributeKey[File]("global-zinc-directory", "The base directory for Zinc internals.", DSetting)
@ -56,7 +62,8 @@ object BuildPaths {
def getGlobalPluginsDirectory(state: State, globalBase: File): File =
fileSetting(globalPluginsDirectory, GlobalPluginsProperty, defaultGlobalPlugins(globalBase))(
state)
state
)
def getGlobalSettingsDirectory(state: State, globalBase: File): File =
fileSetting(globalSettingsDirectory, GlobalSettingsProperty, globalBase)(state)
@ -70,11 +77,13 @@ object BuildPaths {
fileSetting(globalZincDirectory, GlobalZincProperty, defaultGlobalZinc(globalBase))(state)
private[this] def fileSetting(stateKey: AttributeKey[File], property: String, default: File)(
state: State): File =
state: State
): File =
getFileSetting(stateKey, property, default)(state)
def getFileSetting(stateKey: AttributeKey[File], property: String, default: => File)(
state: State): File =
state: State
): File =
state get stateKey orElse getFileProperty(property) getOrElse default
def getFileProperty(name: String): Option[File] = Option(System.getProperty(name)) flatMap {

View File

@ -150,7 +150,8 @@ object Cross {
"configuration. This could result in subprojects cross building against Scala versions that they are " +
"not compatible with. Try issuing cross building command with tasks instead, since sbt will be able " +
"to ensure that cross building is only done using configured project and Scala version combinations " +
"that are configured.")
"that are configured."
)
state.log.debug("Scala versions configuration is:")
projCrossVersions.foreach {
case (project, versions) => state.log.debug(s"$project: $versions")
@ -174,12 +175,14 @@ object Cross {
case (version, projects) if aggCommand.contains(" ") =>
// If the command contains a space, then the `all` command won't work because it doesn't support issuing
// commands with spaces, so revert to running the command on each project one at a time
s"$SwitchCommand $verbose $version" :: projects.map(project =>
s"$project/$aggCommand")
s"$SwitchCommand $verbose $version" :: projects
.map(project => s"$project/$aggCommand")
case (version, projects) =>
// First switch scala version, then use the all command to run the command on each project concurrently
Seq(s"$SwitchCommand $verbose $version",
projects.map(_ + "/" + aggCommand).mkString("all ", " ", ""))
Seq(
s"$SwitchCommand $verbose $version",
projects.map(_ + "/" + aggCommand).mkString("all ", " ", "")
)
}
}
@ -188,8 +191,9 @@ object Cross {
}
def crossRestoreSession: Command =
Command.arb(_ => crossRestoreSessionParser, crossRestoreSessionHelp)((s, _) =>
crossRestoreSessionImpl(s))
Command.arb(_ => crossRestoreSessionParser, crossRestoreSessionHelp)(
(s, _) => crossRestoreSessionImpl(s)
)
private def crossRestoreSessionImpl(state: State): State = {
restoreCapturedSession(state, Project.extract(state))

File diff suppressed because it is too large Load Diff

View File

@ -172,9 +172,11 @@ object EvaluateTask {
val SystemProcessors = Runtime.getRuntime.availableProcessors
def extractedTaskConfig(extracted: Extracted,
structure: BuildStructure,
state: State): EvaluateTaskConfig = {
def extractedTaskConfig(
extracted: Extracted,
structure: BuildStructure,
state: State
): EvaluateTaskConfig = {
val rs = restrictions(extracted, structure)
val canceller = cancelStrategy(extracted, structure, state)
val progress = executeProgress(extracted, structure, state)
@ -193,10 +195,12 @@ object EvaluateTask {
}
def restrictions(extracted: Extracted, structure: BuildStructure): Seq[Tags.Rule] =
getSetting(Keys.concurrentRestrictions,
defaultRestrictions(extracted, structure),
extracted,
structure)
getSetting(
Keys.concurrentRestrictions,
defaultRestrictions(extracted, structure),
extracted,
structure
)
def maxWorkers(extracted: Extracted, structure: BuildStructure): Int =
if (getSetting(Keys.parallelExecution, true, extracted, structure))
@ -207,22 +211,27 @@ object EvaluateTask {
def cancelable(extracted: Extracted, structure: BuildStructure): Boolean =
getSetting(Keys.cancelable, false, extracted, structure)
def cancelStrategy(extracted: Extracted,
structure: BuildStructure,
state: State): TaskCancellationStrategy =
def cancelStrategy(
extracted: Extracted,
structure: BuildStructure,
state: State
): TaskCancellationStrategy =
getSetting(Keys.taskCancelStrategy, { (_: State) =>
TaskCancellationStrategy.Null
}, extracted, structure)(state)
private[sbt] def executeProgress(extracted: Extracted,
structure: BuildStructure,
state: State): ExecuteProgress[Task] = {
private[sbt] def executeProgress(
extracted: Extracted,
structure: BuildStructure,
state: State
): ExecuteProgress[Task] = {
import Types.const
val maker: State => Keys.TaskProgress = getSetting(
Keys.executeProgress,
const(new Keys.TaskProgress(defaultProgress)),
extracted,
structure)
structure
)
maker(state).progress
}
// TODO - Should this pull from Global or from the project itself?
@ -230,15 +239,19 @@ object EvaluateTask {
getSetting(Keys.forcegc in Global, GCUtil.defaultForceGarbageCollection, extracted, structure)
// TODO - Should this pull from Global or from the project itself?
private[sbt] def minForcegcInterval(extracted: Extracted, structure: BuildStructure): Duration =
getSetting(Keys.minForcegcInterval in Global,
GCUtil.defaultMinForcegcInterval,
extracted,
structure)
getSetting(
Keys.minForcegcInterval in Global,
GCUtil.defaultMinForcegcInterval,
extracted,
structure
)
def getSetting[T](key: SettingKey[T],
default: T,
extracted: Extracted,
structure: BuildStructure): T =
def getSetting[T](
key: SettingKey[T],
default: T,
extracted: Extracted,
structure: BuildStructure
): T =
key in extracted.currentRef get structure.data getOrElse default
def injectSettings: Seq[Setting[_]] = Seq(
@ -258,7 +271,8 @@ object EvaluateTask {
val evaluated =
apply(pluginDef, ScopedKey(pluginKey.scope, pluginKey.key), state, root, config)
val (newS, result) = evaluated getOrElse sys.error(
"Plugin data does not exist for plugin definition at " + pluginDef.root)
"Plugin data does not exist for plugin definition at " + pluginDef.root
)
Project.runUnloadHooks(newS) // discard states
processResult2(result)
}
@ -268,26 +282,32 @@ object EvaluateTask {
* If the task is not defined, None is returned. The provided task key is resolved against the current project `ref`.
* Task execution is configured according to settings defined in the loaded project.
*/
def apply[T](structure: BuildStructure,
taskKey: ScopedKey[Task[T]],
state: State,
ref: ProjectRef): Option[(State, Result[T])] =
apply[T](structure,
taskKey,
state,
ref,
extractedTaskConfig(Project.extract(state), structure, state))
def apply[T](
structure: BuildStructure,
taskKey: ScopedKey[Task[T]],
state: State,
ref: ProjectRef
): Option[(State, Result[T])] =
apply[T](
structure,
taskKey,
state,
ref,
extractedTaskConfig(Project.extract(state), structure, state)
)
/**
* Evaluates `taskKey` and returns the new State and the result of the task wrapped in Some.
* If the task is not defined, None is returned. The provided task key is resolved against the current project `ref`.
* `config` configures concurrency and canceling of task execution.
*/
def apply[T](structure: BuildStructure,
taskKey: ScopedKey[Task[T]],
state: State,
ref: ProjectRef,
config: EvaluateTaskConfig): Option[(State, Result[T])] = {
def apply[T](
structure: BuildStructure,
taskKey: ScopedKey[Task[T]],
state: State,
ref: ProjectRef,
config: EvaluateTaskConfig
): Option[(State, Result[T])] = {
withStreams(structure, state) { str =>
for ((task, toNode) <- getTask(structure, taskKey, state, str, ref))
yield runTask(task, state, str, structure.index.triggers, config)(toNode)
@ -335,34 +355,41 @@ object EvaluateTask {
try { f(str) } finally { str.close() }
}
def getTask[T](structure: BuildStructure,
taskKey: ScopedKey[Task[T]],
state: State,
streams: Streams,
ref: ProjectRef): Option[(Task[T], NodeView[Task])] = {
def getTask[T](
structure: BuildStructure,
taskKey: ScopedKey[Task[T]],
state: State,
streams: Streams,
ref: ProjectRef
): Option[(Task[T], NodeView[Task])] = {
val thisScope = Load.projectScope(ref)
val resolvedScope = Scope.replaceThis(thisScope)(taskKey.scope)
for (t <- structure.data.get(resolvedScope, taskKey.key))
yield (t, nodeView(state, streams, taskKey :: Nil))
}
def nodeView[HL <: HList](state: State,
streams: Streams,
roots: Seq[ScopedKey[_]],
dummies: DummyTaskMap = DummyTaskMap(Nil)): NodeView[Task] =
def nodeView[HL <: HList](
state: State,
streams: Streams,
roots: Seq[ScopedKey[_]],
dummies: DummyTaskMap = DummyTaskMap(Nil)
): NodeView[Task] =
Transform(
(dummyRoots, roots) :: (Def.dummyStreamsManager, streams) :: (dummyState, state) :: dummies)
(dummyRoots, roots) :: (Def.dummyStreamsManager, streams) :: (dummyState, state) :: dummies
)
def runTask[T](
root: Task[T],
state: State,
streams: Streams,
triggers: Triggers[Task],
config: EvaluateTaskConfig)(implicit taskToNode: NodeView[Task]): (State, Result[T]) = {
config: EvaluateTaskConfig
)(implicit taskToNode: NodeView[Task]): (State, Result[T]) = {
import ConcurrentRestrictions.{ completionService, tagged, tagsKey }
val log = state.log
log.debug(
s"Running task... Cancel: ${config.cancelStrategy}, check cycles: ${config.checkCycles}, forcegc: ${config.forceGarbageCollection}")
s"Running task... Cancel: ${config.cancelStrategy}, check cycles: ${config.checkCycles}, forcegc: ${config.forceGarbageCollection}"
)
val tags =
tagged[Task[_]](_.info get tagsKey getOrElse Map.empty, Tags.predicate(config.restrictions))
val (service, shutdownThreads) =
@ -383,9 +410,11 @@ object EvaluateTask {
case _ => true
}
def run() = {
val x = new Execute[Task](Execute.config(config.checkCycles, overwriteNode),
triggers,
config.progressReporter)(taskToNode)
val x = new Execute[Task](
Execute.config(config.checkCycles, overwriteNode),
triggers,
config.progressReporter
)(taskToNode)
val (newState, result) =
try {
val results = x.runKeep(root)(service)
@ -410,15 +439,19 @@ object EvaluateTask {
finally strat.onTaskEngineFinish(cancelState)
}
private[this] def storeValuesForPrevious(results: RMap[Task, Result],
state: State,
streams: Streams): Unit =
private[this] def storeValuesForPrevious(
results: RMap[Task, Result],
state: State,
streams: Streams
): Unit =
for (referenced <- Previous.references in Global get Project.structure(state).data)
Previous.complete(referenced, results, streams)
def applyResults[T](results: RMap[Task, Result],
state: State,
root: Task[T]): (State, Result[T]) =
def applyResults[T](
results: RMap[Task, Result],
state: State,
root: Task[T]
): (State, Result[T]) =
(stateTransform(results)(state), results(root))
def stateTransform(results: RMap[Task, Result]): State => State =
Function.chain(

View File

@ -16,9 +16,11 @@ import sbt.util.Show
import std.Transform.DummyTaskMap
import sbt.EvaluateTask.extractedTaskConfig
final case class Extracted(structure: BuildStructure,
session: SessionSettings,
currentRef: ProjectRef)(implicit val showKey: Show[ScopedKey[_]]) {
final case class Extracted(
structure: BuildStructure,
session: SessionSettings,
currentRef: ProjectRef
)(implicit val showKey: Show[ScopedKey[_]]) {
def rootProject = structure.rootProject
lazy val currentUnit = structure units currentRef.build
lazy val currentProject = currentUnit defined currentRef.project
@ -123,7 +125,8 @@ final case class Extracted(structure: BuildStructure,
@deprecated(
"This discards session settings. Migrate to appendWithSession or appendWithoutSession.",
"1.2.0")
"1.2.0"
)
def append(settings: Seq[Setting[_]], state: State): State =
appendWithoutSession(settings, state)

View File

@ -69,7 +69,8 @@ final class xMain extends xsbti.AppMain {
val state = StandardMain.initialState(
configuration,
Seq(defaults, early),
runEarly(DefaultsCommand) :: runEarly(InitCommand) :: BootCommand :: Nil)
runEarly(DefaultsCommand) :: runEarly(InitCommand) :: BootCommand :: Nil
)
StandardMain.runManaged(state)
}
}
@ -119,13 +120,17 @@ object StandardMain {
ConsoleOut.systemOutOverwrite(ConsoleOut.overwriteContaining("Resolving "))
def initialGlobalLogging: GlobalLogging =
GlobalLogging.initial(MainAppender.globalDefault(console),
File.createTempFile("sbt", ".log"),
console)
GlobalLogging.initial(
MainAppender.globalDefault(console),
File.createTempFile("sbt", ".log"),
console
)
def initialState(configuration: xsbti.AppConfiguration,
initialDefinitions: Seq[Command],
preCommands: Seq[String]): State = {
def initialState(
configuration: xsbti.AppConfiguration,
initialDefinitions: Seq[Command],
preCommands: Seq[String]
): State = {
// This is to workaround https://github.com/sbt/io/issues/110
sys.props.put("jna.nosys", "true")
@ -277,21 +282,27 @@ object BuiltinCommands {
catch { case _: Exception => None }
def settingsCommand: Command =
showSettingLike(SettingsCommand,
settingsPreamble,
KeyRanks.MainSettingCutoff,
key => !isTask(key.manifest))
showSettingLike(
SettingsCommand,
settingsPreamble,
KeyRanks.MainSettingCutoff,
key => !isTask(key.manifest)
)
def tasks: Command =
showSettingLike(TasksCommand,
tasksPreamble,
KeyRanks.MainTaskCutoff,
key => isTask(key.manifest))
showSettingLike(
TasksCommand,
tasksPreamble,
KeyRanks.MainTaskCutoff,
key => isTask(key.manifest)
)
def showSettingLike(command: String,
preamble: String,
cutoff: Int,
keep: AttributeKey[_] => Boolean): Command =
def showSettingLike(
command: String,
preamble: String,
cutoff: Int,
keep: AttributeKey[_] => Boolean
): Command =
Command(command, settingsBrief(command), settingsDetailed(command))(showSettingParser(keep)) {
case (s: State, (verbosity: Int, selected: Option[String])) =>
if (selected.isEmpty) System.out.println(preamble)
@ -302,8 +313,9 @@ object BuiltinCommands {
if (prominentOnly) System.out.println(moreAvailableMessage(command, selected.isDefined))
s
}
def showSettingParser(keepKeys: AttributeKey[_] => Boolean)(
s: State): Parser[(Int, Option[String])] =
def showSettingParser(
keepKeys: AttributeKey[_] => Boolean
)(s: State): Parser[(Int, Option[String])] =
verbosityParser ~ selectedParser(s, keepKeys).?
def selectedParser(s: State, keepKeys: AttributeKey[_] => Boolean): Parser[String] =
singleArgument(allTaskAndSettingKeys(s).filter(keepKeys).map(_.label).toSet)
@ -338,16 +350,19 @@ object BuiltinCommands {
def sortByRank(keys: Seq[AttributeKey[_]]): Seq[AttributeKey[_]] = keys.sortBy(_.rank)
def withDescription(keys: Seq[AttributeKey[_]]): Seq[AttributeKey[_]] =
keys.filter(_.description.isDefined)
def isTask(mf: Manifest[_])(implicit taskMF: Manifest[Task[_]],
inputMF: Manifest[InputTask[_]]): Boolean =
def isTask(
mf: Manifest[_]
)(implicit taskMF: Manifest[Task[_]], inputMF: Manifest[InputTask[_]]): Boolean =
mf.runtimeClass == taskMF.runtimeClass || mf.runtimeClass == inputMF.runtimeClass
def topNRanked(n: Int) = (keys: Seq[AttributeKey[_]]) => sortByRank(keys).take(n)
def highPass(rankCutoff: Int) =
(keys: Seq[AttributeKey[_]]) => sortByRank(keys).takeWhile(_.rank <= rankCutoff)
def tasksHelp(s: State,
filter: Seq[AttributeKey[_]] => Seq[AttributeKey[_]],
arg: Option[String]): String = {
def tasksHelp(
s: State,
filter: Seq[AttributeKey[_]] => Seq[AttributeKey[_]],
arg: Option[String]
): String = {
val commandAndDescription = taskDetail(filter(allTaskAndSettingKeys(s)))
arg match {
case Some(selected) => detail(selected, commandAndDescription.toMap)
@ -618,8 +633,9 @@ object BuiltinCommands {
}
def projects: Command =
Command(ProjectsCommand, (ProjectsCommand, projectsBrief), projectsDetailed)(s =>
projectsParser(s).?) {
Command(ProjectsCommand, (ProjectsCommand, projectsBrief), projectsDetailed)(
s => projectsParser(s).?
) {
case (s, Some(modifyBuilds)) => transformExtraBuilds(s, modifyBuilds)
case (s, None) => showProjects(s); s
}
@ -658,7 +674,8 @@ object BuiltinCommands {
@tailrec
private[this] def doLoadFailed(s: State, loadArg: String): State = {
val result = (SimpleReader.readLine(
"Project loading failed: (r)etry, (q)uit, (l)ast, or (i)gnore? ") getOrElse Quit)
"Project loading failed: (r)etry, (q)uit, (l)ast, or (i)gnore? "
) getOrElse Quit)
.toLowerCase(Locale.ENGLISH)
def matches(s: String) = !result.isEmpty && (s startsWith result)
def retry = loadProjectCommand(LoadProject, loadArg) :: s.clearGlobalLog
@ -684,8 +701,9 @@ object BuiltinCommands {
Nil
def loadProject: Command =
Command(LoadProject, LoadProjectBrief, LoadProjectDetailed)(loadProjectParser)((s, arg) =>
loadProjectCommands(arg) ::: s)
Command(LoadProject, LoadProjectBrief, LoadProjectDetailed)(loadProjectParser)(
(s, arg) => loadProjectCommands(arg) ::: s
)
private[this] def loadProjectParser: State => Parser[String] =
_ => matched(Project.loadActionParser)
@ -707,11 +725,13 @@ object BuiltinCommands {
Option(buildProperties.getProperty("sbt.version"))
} else None
sbtVersionOpt.foreach(version =>
if (version != app.id.version()) {
state.log.warn(s"""sbt version mismatch, current: ${app.id
.version()}, in build.properties: "$version", use 'reboot' to use the new value.""")
})
sbtVersionOpt.foreach(
version =>
if (version != app.id.version()) {
state.log.warn(s"""sbt version mismatch, current: ${app.id
.version()}, in build.properties: "$version", use 'reboot' to use the new value.""")
}
)
}
def doLoadProject(s0: State, action: LoadAction.Value): State = {
@ -758,8 +778,10 @@ object BuiltinCommands {
exchange publishEventMessage ConsolePromptEvent(s0)
val exec: Exec = exchange.blockUntilNextExec
val newState = s1
.copy(onFailure = Some(Exec(Shell, None)),
remainingCommands = exec +: Exec(Shell, None) +: s1.remainingCommands)
.copy(
onFailure = Some(Exec(Shell, None)),
remainingCommands = exec +: Exec(Shell, None) +: s1.remainingCommands
)
.setInteractive(true)
exchange publishEventMessage ConsoleUnpromptEvent(exec.source)
if (exec.commandLine.trim.isEmpty) newState

View File

@ -67,7 +67,8 @@ object MainLoop {
throw new xsbti.FullReload(e.arguments.toArray, false)
case NonFatal(e) =>
System.err.println(
"sbt appears to be exiting abnormally.\n The log file for this session is at " + logBacking.file)
"sbt appears to be exiting abnormally.\n The log file for this session is at " + logBacking.file
)
deleteLastLog(logBacking)
throw e
}

View File

@ -45,9 +45,11 @@ object Opts {
val sonatypeSnapshots = Resolver.sonatypeRepo("snapshots")
val sonatypeStaging = MavenRepository(
"sonatype-staging",
"https://oss.sonatype.org/service/local/staging/deploy/maven2")
"https://oss.sonatype.org/service/local/staging/deploy/maven2"
)
val mavenLocalFile = Resolver.file("Local Repository", userHome / ".m2" / "repository" asFile)(
Resolver.defaultPatterns)
Resolver.defaultPatterns
)
val sbtSnapshots = Resolver.bintrayRepo("sbt", "maven-snapshots")
val sbtIvySnapshots = Resolver.bintrayIvyRepo("sbt", "ivy-snapshots")
}

View File

@ -25,7 +25,8 @@ private[sbt] object PluginCross {
lazy val pluginSwitch: Command = {
def switchParser(state: State): Parser[(String, String)] = {
lazy val switchArgs = token(NotSpace.examples()) ~ (token(
Space ~> matched(state.combinedParser)) ?? "")
Space ~> matched(state.combinedParser)
) ?? "")
lazy val nextSpaced = spacedFirst(PluginSwitchCommand)
token(PluginSwitchCommand ~ OptSpace) flatMap { _ =>
switchArgs & nextSpaced
@ -58,8 +59,11 @@ private[sbt] object PluginCross {
def crossParser(state: State): Parser[String] =
token(PluginCrossCommand <~ OptSpace) flatMap { _ =>
token(
matched(state.combinedParser &
spacedFirst(PluginCrossCommand)))
matched(
state.combinedParser &
spacedFirst(PluginCrossCommand)
)
)
}
def crossVersions(state: State): List[String] = {
val x = Project.extract(state)

View File

@ -202,10 +202,12 @@ object Plugins extends PluginsFunctions {
_.head subsetOf knowledge0
})
log.debug(
s"deducing auto plugins based on known facts ${knowledge0.toString} and clauses ${clauses.toString}")
s"deducing auto plugins based on known facts ${knowledge0.toString} and clauses ${clauses.toString}"
)
Logic.reduce(
clauses,
(flattenConvert(requestedPlugins) ++ convertAll(alwaysEnabled)).toSet) match {
(flattenConvert(requestedPlugins) ++ convertAll(alwaysEnabled)).toSet
) match {
case Left(problem) => throw AutoPluginException(problem)
case Right(results) =>
log.debug(s" :: deduced result: ${results}")
@ -234,9 +236,11 @@ object Plugins extends PluginsFunctions {
private[sbt] def topologicalSort(ns: List[AutoPlugin]): List[AutoPlugin] = {
@tailrec
def doSort(found0: List[AutoPlugin],
notFound0: List[AutoPlugin],
limit0: Int): List[AutoPlugin] = {
def doSort(
found0: List[AutoPlugin],
notFound0: List[AutoPlugin],
limit0: Int
): List[AutoPlugin] = {
if (limit0 < 0) throw AutoPluginException(s"Failed to sort ${ns} topologically")
else if (notFound0.isEmpty) found0
else {
@ -252,11 +256,9 @@ object Plugins extends PluginsFunctions {
private[sbt] def translateMessage(e: LogicException) = e match {
case ic: InitialContradictions =>
s"Contradiction in selected plugins. These plugins were both included and excluded: ${literalsString(
ic.literals.toSeq)}"
s"Contradiction in selected plugins. These plugins were both included and excluded: ${literalsString(ic.literals.toSeq)}"
case io: InitialOverlap =>
s"Cannot directly enable plugins. Plugins are enabled when their required plugins are satisfied. The directly selected plugins were: ${literalsString(
io.literals.toSeq)}"
s"Cannot directly enable plugins. Plugins are enabled when their required plugins are satisfied. The directly selected plugins were: ${literalsString(io.literals.toSeq)}"
case cn: CyclicNegation =>
s"Cycles in plugin requirements cannot involve excludes. The problematic cycle is: ${literalsString(cn.cycle)}"
}
@ -273,9 +275,11 @@ object Plugins extends PluginsFunctions {
throw AutoPluginException(message)
}
private[this] def exclusionConflictError(requested: Plugins,
selected: Seq[AutoPlugin],
conflicting: Seq[AutoPlugin]): Unit = {
private[this] def exclusionConflictError(
requested: Plugins,
selected: Seq[AutoPlugin],
conflicting: Seq[AutoPlugin]
): Unit = {
def listConflicts(ns: Seq[AutoPlugin]) =
(ns map { c =>
val reasons = (if (flatten(requested) contains c) List("requested")
@ -427,8 +431,9 @@ ${listConflicts(conflicting)}""")
val pluginClazz = ap.getClass
existsAutoImportVal(pluginClazz)
.orElse(
catching(classOf[ClassNotFoundException]).opt(
Class.forName(s"${pluginClazz.getName}$autoImport$$", false, loader)))
catching(classOf[ClassNotFoundException])
.opt(Class.forName(s"${pluginClazz.getName}$autoImport$$", false, loader))
)
.isDefined
}

View File

@ -482,7 +482,8 @@ object Project extends ProjectExtra {
val newState = unloaded.copy(attributes = newAttrs)
// TODO: Fix this
onLoad(
updateCurrent(newState) /*LogManager.setGlobalLogLevels(updateCurrent(newState), structure.data)*/ )
updateCurrent(newState) /*LogManager.setGlobalLogLevels(updateCurrent(newState), structure.data)*/
)
}
def orIdentity[T](opt: Option[T => T]): T => T = opt getOrElse idFun
@ -517,8 +518,10 @@ object Project extends ProjectExtra {
val srvLogLevel: Option[Level.Value] = (logLevel in (ref, serverLog)).get(structure.data)
val hs: Option[Seq[ServerHandler]] = get(fullServerHandlers)
val commandDefs = allCommands.distinct.flatten[Command].map(_ tag (projectCommand, true))
val newDefinedCommands = commandDefs ++ BasicCommands.removeTagged(s.definedCommands,
projectCommand)
val newDefinedCommands = commandDefs ++ BasicCommands.removeTagged(
s.definedCommands,
projectCommand
)
val newAttrs =
s.attributes
.setCond(Watched.Configuration, watched)
@ -555,7 +558,8 @@ object Project extends ProjectExtra {
}
}
private[this] def overlappingTargets(
targets: Seq[(ProjectRef, File)]): Map[File, Seq[ProjectRef]] =
targets: Seq[(ProjectRef, File)]
): Map[File, Seq[ProjectRef]] =
targets.groupBy(_._2).filter(_._2.size > 1).mapValues(_.map(_._1))
private[this] def allTargets(data: Settings[Scope]): Seq[(ProjectRef, File)] = {
@ -588,15 +592,18 @@ object Project extends ProjectExtra {
def delegates(structure: BuildStructure, scope: Scope, key: AttributeKey[_]): Seq[ScopedKey[_]] =
structure.delegates(scope).map(d => ScopedKey(d, key))
def scopedKeyData(structure: BuildStructure,
scope: Scope,
key: AttributeKey[_]): Option[ScopedKeyData[_]] =
def scopedKeyData(
structure: BuildStructure,
scope: Scope,
key: AttributeKey[_]
): Option[ScopedKeyData[_]] =
structure.data.get(scope, key) map { v =>
ScopedKeyData(ScopedKey(scope, key), v)
}
def details(structure: BuildStructure, actual: Boolean, scope: Scope, key: AttributeKey[_])(
implicit display: Show[ScopedKey[_]]): String = {
implicit display: Show[ScopedKey[_]]
): String = {
val scoped = ScopedKey(scope, key)
val data = scopedKeyData(structure, scope, key) map { _.description } getOrElse {
@ -637,20 +644,24 @@ object Project extends ProjectExtra {
val reverse = reverseDependencies(cMap, scoped)
val derivedReverse = reverse.filter(r => derivedDependencies(r).contains(definingScoped)).toSet
def printDepScopes(baseLabel: String,
derivedLabel: String,
scopes: Iterable[ScopedKey[_]],
derived: Set[ScopedKey[_]]): String = {
def printDepScopes(
baseLabel: String,
derivedLabel: String,
scopes: Iterable[ScopedKey[_]],
derived: Set[ScopedKey[_]]
): String = {
val label = s"$baseLabel${if (derived.isEmpty) "" else s" (D=$derivedLabel)"}"
val prefix: ScopedKey[_] => String =
if (derived.isEmpty) const("") else sk => if (derived(sk)) "D " else " "
printScopes(label, scopes, prefix = prefix)
}
def printScopes(label: String,
scopes: Iterable[ScopedKey[_]],
max: Int = Int.MaxValue,
prefix: ScopedKey[_] => String = const("")) =
def printScopes(
label: String,
scopes: Iterable[ScopedKey[_]],
max: Int = Int.MaxValue,
prefix: ScopedKey[_] => String = const("")
) =
if (scopes.isEmpty) ""
else {
val (limited, more) =
@ -668,23 +679,27 @@ object Project extends ProjectExtra {
printScopes("Related", related, 10)
}
def settingGraph(structure: BuildStructure, basedir: File, scoped: ScopedKey[_])(
implicit display: Show[ScopedKey[_]]): SettingGraph =
implicit display: Show[ScopedKey[_]]
): SettingGraph =
SettingGraph(structure, basedir, scoped, 0)
def graphSettings(structure: BuildStructure, basedir: File)(
implicit display: Show[ScopedKey[_]]): Unit = {
implicit display: Show[ScopedKey[_]]
): Unit = {
def graph(actual: Boolean, name: String) =
graphSettings(structure, actual, name, new File(basedir, name + ".dot"))
graph(true, "actual_dependencies")
graph(false, "declared_dependencies")
}
def graphSettings(structure: BuildStructure, actual: Boolean, graphName: String, file: File)(
implicit display: Show[ScopedKey[_]]): Unit = {
implicit display: Show[ScopedKey[_]]
): Unit = {
val rel = relation(structure, actual)
val keyToString = display.show _
DotGraph.generateGraph(file, graphName, rel, keyToString, keyToString)
}
def relation(structure: BuildStructure, actual: Boolean)(
implicit display: Show[ScopedKey[_]]): Relation[ScopedKey[_], ScopedKey[_]] =
implicit display: Show[ScopedKey[_]]
): Relation[ScopedKey[_], ScopedKey[_]] =
relation(structure.settings, actual)(structure.delegates, structure.scopeLocal, display)
private[sbt] def relation(settings: Seq[Def.Setting[_]], actual: Boolean)(
@ -698,7 +713,8 @@ object Project extends ProjectExtra {
}
def showDefinitions(key: AttributeKey[_], defs: Seq[Scope])(
implicit display: Show[ScopedKey[_]]): String =
implicit display: Show[ScopedKey[_]]
): String =
showKeys(defs.map(scope => ScopedKey(scope, key)))
def showUses(defs: Seq[ScopedKey[_]])(implicit display: Show[ScopedKey[_]]): String =
@ -708,17 +724,21 @@ object Project extends ProjectExtra {
s.map(display.show).sorted.mkString("\n\t", "\n\t", "\n\n")
def definitions(structure: BuildStructure, actual: Boolean, key: AttributeKey[_])(
implicit display: Show[ScopedKey[_]]): Seq[Scope] =
implicit display: Show[ScopedKey[_]]
): Seq[Scope] =
relation(structure, actual)(display)._1s.toSeq flatMap { sk =>
if (sk.key == key) sk.scope :: Nil else Nil
}
def usedBy(structure: BuildStructure, actual: Boolean, key: AttributeKey[_])(
implicit display: Show[ScopedKey[_]]): Seq[ScopedKey[_]] =
implicit display: Show[ScopedKey[_]]
): Seq[ScopedKey[_]] =
relation(structure, actual)(display).all.toSeq flatMap {
case (a, b) => if (b.key == key) List[ScopedKey[_]](a) else Nil
}
def reverseDependencies(cMap: Map[ScopedKey[_], Flattened],
scoped: ScopedKey[_]): Iterable[ScopedKey[_]] =
def reverseDependencies(
cMap: Map[ScopedKey[_], Flattened],
scoped: ScopedKey[_]
): Iterable[ScopedKey[_]] =
for ((key, compiled) <- cMap; dep <- compiled.dependencies if dep == scoped) yield key
def setAll(extracted: Extracted, settings: Seq[Def.Setting[_]]): SessionSettings =
@ -726,7 +746,8 @@ object Project extends ProjectExtra {
val ExtraBuilds = AttributeKey[List[URI]](
"extra-builds",
"Extra build URIs to load in addition to the ones defined by the project.")
"Extra build URIs to load in addition to the ones defined by the project."
)
def extraBuilds(s: State): List[URI] = getOrNil(s, ExtraBuilds)
def getOrNil[T](s: State, key: AttributeKey[List[T]]): List[T] = s get key getOrElse Nil
def setExtraBuilds(s: State, extra: List[URI]): State = s.put(ExtraBuilds, extra)
@ -812,15 +833,20 @@ object Project extends ProjectExtra {
import TupleSyntax._
(Keys.resolvedScoped, i)(
(scoped, task) =>
tx(task,
(state, value) =>
persistAndSet(resolveContext(key, scoped.scope, state), state, value)(f)))
tx(
task,
(state, value) =>
persistAndSet(resolveContext(key, scoped.scope, state), state, value)(f)
)
)
}
def keepAs(key: TaskKey[S]): Def.Initialize[Task[S]] = {
import TupleSyntax._
(i, Keys.resolvedScoped)((t, scoped) =>
tx(t, (state, value) => set(resolveContext(key, scoped.scope, state), state, value)))
(i, Keys.resolvedScoped)(
(t, scoped) =>
tx(t, (state, value) => set(resolveContext(key, scoped.scope, state), state, value))
)
}
}
@ -831,7 +857,8 @@ object Project extends ProjectExtra {
val enclosingValName = std.KeyMacro.definingValName(
c,
methodName =>
s"""$methodName must be directly assigned to a val, such as `val x = $methodName`. Alternatively, you can use `sbt.Project.apply`""")
s"""$methodName must be directly assigned to a val, such as `val x = $methodName`. Alternatively, you can use `sbt.Project.apply`"""
)
val name = c.Expr[String](Literal(Constant(enclosingValName)))
reify { Project(name.splice, new File(name.splice)) }
}
@ -840,8 +867,9 @@ object Project extends ProjectExtra {
private[sbt] trait GeneratedRootProject
trait ProjectExtra {
implicit def configDependencyConstructor[T](p: T)(
implicit ev: T => ProjectReference): Constructor =
implicit def configDependencyConstructor[T](
p: T
)(implicit ev: T => ProjectReference): Constructor =
new Constructor(p)
implicit def classpathDependency[T](
@ -854,7 +882,8 @@ trait ProjectExtra {
new Scoped.RichInitializeTask(init)
implicit def richInitializeInputTask[T](
init: Initialize[InputTask[T]]): Scoped.RichInitializeInputTask[T] =
init: Initialize[InputTask[T]]
): Scoped.RichInitializeInputTask[T] =
new Scoped.RichInitializeInputTask(init)
implicit def richInitialize[T](i: Initialize[T]): Scoped.RichInitialize[T] =

View File

@ -17,13 +17,15 @@ class RichURI(uri: URI) {
* Note that this method simply passes the individual components of this URI to the URI constructor
* that accepts each component individually. It is thus limited by the implementation restrictions of the relevant methods.
*/
def copy(scheme: String = uri.getScheme,
userInfo: String = uri.getUserInfo,
host: String = uri.getHost,
port: Int = uri.getPort,
path: String = uri.getPath,
query: String = uri.getQuery,
fragment: String = uri.getFragment) =
def copy(
scheme: String = uri.getScheme,
userInfo: String = uri.getUserInfo,
host: String = uri.getHost,
port: Int = uri.getPort,
path: String = uri.getPath,
query: String = uri.getQuery,
fragment: String = uri.getFragment
) =
new URI(scheme, userInfo, host, port, path, query, fragment)
/** Returns `true` if the fragment of the URI is defined. */

View File

@ -30,9 +30,11 @@ object ScopeFilter {
* If a task filter is not supplied, global is selected.
* Generally, always specify the project axis.
*/
def apply(projects: ProjectFilter = inProjects(ThisProject),
configurations: ConfigurationFilter = zeroAxis,
tasks: TaskFilter = zeroAxis): ScopeFilter =
def apply(
projects: ProjectFilter = inProjects(ThisProject),
configurations: ConfigurationFilter = zeroAxis,
tasks: TaskFilter = zeroAxis
): ScopeFilter =
new ScopeFilter {
private[sbt] def apply(data: Data): Scope => Boolean = {
val pf = projects(data)
@ -116,27 +118,35 @@ object ScopeFilter {
* Selects Scopes that have a project axis that is aggregated by `ref`, transitively if `transitive` is true.
* If `includeRoot` is true, Scopes with `ref` itself as the project axis value are also selected.
*/
def inAggregates(ref: ProjectReference,
transitive: Boolean = true,
includeRoot: Boolean = true): ProjectFilter =
byDeps(ref,
transitive = transitive,
includeRoot = includeRoot,
aggregate = true,
classpath = false)
def inAggregates(
ref: ProjectReference,
transitive: Boolean = true,
includeRoot: Boolean = true
): ProjectFilter =
byDeps(
ref,
transitive = transitive,
includeRoot = includeRoot,
aggregate = true,
classpath = false
)
/**
* Selects Scopes that have a project axis that is a dependency of `ref`, transitively if `transitive` is true.
* If `includeRoot` is true, Scopes with `ref` itself as the project axis value are also selected.
*/
def inDependencies(ref: ProjectReference,
transitive: Boolean = true,
includeRoot: Boolean = true): ProjectFilter =
byDeps(ref,
transitive = transitive,
includeRoot = includeRoot,
aggregate = false,
classpath = true)
def inDependencies(
ref: ProjectReference,
transitive: Boolean = true,
includeRoot: Boolean = true
): ProjectFilter =
byDeps(
ref,
transitive = transitive,
includeRoot = includeRoot,
aggregate = false,
classpath = true
)
/** Selects Scopes that have a project axis with one of the provided values.*/
def inProjects(projects: ProjectReference*): ProjectFilter =
@ -172,9 +182,11 @@ object ScopeFilter {
* Information provided to Scope filters. These provide project relationships,
* project reference resolution, and the list of all static Scopes.
*/
private final class Data(val units: Map[URI, LoadedBuildUnit],
val resolve: ProjectReference => ProjectRef,
val allScopes: Set[Scope])
private final class Data(
val units: Map[URI, LoadedBuildUnit],
val resolve: ProjectReference => ProjectRef,
val allScopes: Set[Scope]
)
/** Constructs a Data instance from the list of static scopes and the project relationships.*/
private[this] val getData: Initialize[Data] =
@ -195,20 +207,24 @@ object ScopeFilter {
new Data(build.units, resolve, scopes)
}
private[this] def getDependencies(structure: Map[URI, LoadedBuildUnit],
classpath: Boolean,
aggregate: Boolean): ProjectRef => Seq[ProjectRef] =
private[this] def getDependencies(
structure: Map[URI, LoadedBuildUnit],
classpath: Boolean,
aggregate: Boolean
): ProjectRef => Seq[ProjectRef] =
ref =>
Project.getProject(ref, structure).toList flatMap { p =>
(if (classpath) p.dependencies.map(_.project) else Nil) ++
(if (aggregate) p.aggregate else Nil)
}
private[this] def byDeps(ref: ProjectReference,
transitive: Boolean,
includeRoot: Boolean,
aggregate: Boolean,
classpath: Boolean): ProjectFilter =
private[this] def byDeps(
ref: ProjectReference,
transitive: Boolean,
includeRoot: Boolean,
aggregate: Boolean,
classpath: Boolean
): ProjectFilter =
inResolvedProjects { data =>
val resolvedRef = data.resolve(ref)
val direct = getDependencies(data.units, classpath = classpath, aggregate = aggregate)

View File

@ -16,9 +16,11 @@ final case class ScopedKeyData[A](scoped: ScopedKey[A], value: Any) {
def typeName: String = fold(fmtMf("Task[%s]"), fmtMf("InputTask[%s]"), key.manifest.toString)
def settingValue: Option[Any] = fold(const(None), const(None), Some(value))
def description: String =
fold(fmtMf("Task: %s"),
fmtMf("Input task: %s"),
"Setting: %s = %s" format (key.manifest.toString, value.toString))
fold(
fmtMf("Task: %s"),
fmtMf("Input task: %s"),
"Setting: %s = %s" format (key.manifest.toString, value.toString)
)
def fold[T](targ: OptManifest[_] => T, itarg: OptManifest[_] => T, s: => T): T =
key.manifest.runtimeClass match {
case TaskClass => targ(key.manifest.typeArguments.head)

View File

@ -39,7 +39,8 @@ object ScriptedPlugin extends AutoPlugin {
val scriptedBatchExecution =
settingKey[Boolean]("Enables or disables batch execution for scripted.")
val scriptedParallelInstances = settingKey[Int](
"Configures the number of scripted instances for parallel testing, only used in batch mode.")
"Configures the number of scripted instances for parallel testing, only used in batch mode."
)
val scriptedRun = taskKey[Method]("")
val scriptedLaunchOpts =
settingKey[Seq[String]]("options to pass to jvm launching scripted tasks")

View File

@ -28,7 +28,8 @@ object SessionVar {
def emptyMap = Map(IMap.empty)
def persistAndSet[T](key: ScopedKey[Task[T]], state: State, value: T)(
implicit f: JsonFormat[T]): State = {
implicit f: JsonFormat[T]
): State = {
persist(key, state, value)(f)
set(key, state, value)
}
@ -70,7 +71,8 @@ object SessionVar {
get(key, state) orElse read(key, state)(f)
def loadAndSet[T](key: ScopedKey[Task[T]], state: State, setIfUnset: Boolean = true)(
implicit f: JsonFormat[T]): (State, Option[T]) =
implicit f: JsonFormat[T]
): (State, Option[T]) =
get(key, state) match {
case s: Some[T] => (state, s)
case None =>

View File

@ -22,7 +22,8 @@ import BasicCommandStrings._, BasicKeys._
private[sbt] object TemplateCommandUtil {
def templateCommand: Command =
Command(TemplateCommand, templateBrief, templateDetailed)(_ => templateCommandParser)(
runTemplate)
runTemplate
)
private def templateCommandParser: Parser[Seq[String]] =
(token(Space) ~> repsep(StringBasic, token(Space))) | (token(EOF) map (_ => Nil))
@ -35,10 +36,12 @@ private[sbt] object TemplateCommandUtil {
val templateStage = stagingDirectory / "new"
// This moves the target directory to a staging directory
// https://github.com/sbt/sbt/issues/2835
val state = extracted0.appendWithSession(Seq(
Keys.target := templateStage
),
s0)
val state = extracted0.appendWithSession(
Seq(
Keys.target := templateStage
),
s0
)
val infos = (state get templateResolverInfos getOrElse Nil).toList
val log = state.globalLogging.full
val extracted = (Project extract state)
@ -74,18 +77,22 @@ private[sbt] object TemplateCommandUtil {
case None => System.err.println("Template not found for: " + arguments.mkString(" "))
}
private def tryTemplate(info: TemplateResolverInfo,
arguments: List[String],
loader: ClassLoader): Boolean = {
private def tryTemplate(
info: TemplateResolverInfo,
arguments: List[String],
loader: ClassLoader
): Boolean = {
val resultObj = call(info.implementationClass, "isDefined", loader)(
classOf[Array[String]]
)(arguments.toArray)
resultObj.asInstanceOf[Boolean]
}
private def runTemplate(info: TemplateResolverInfo,
arguments: List[String],
loader: ClassLoader): Unit = {
private def runTemplate(
info: TemplateResolverInfo,
arguments: List[String],
loader: ClassLoader
): Unit = {
call(info.implementationClass, "run", loader)(classOf[Array[String]])(arguments.toArray)
()
}

View File

@ -32,44 +32,56 @@ object Act {
token(OptSpace ~> '/' <~ OptSpace).examples("/").map(_ => ())
// this does not take aggregation into account
def scopedKey(index: KeyIndex,
current: ProjectRef,
defaultConfigs: Option[ResolvedReference] => Seq[String],
keyMap: Map[String, AttributeKey[_]],
data: Settings[Scope]): Parser[ScopedKey[_]] =
def scopedKey(
index: KeyIndex,
current: ProjectRef,
defaultConfigs: Option[ResolvedReference] => Seq[String],
keyMap: Map[String, AttributeKey[_]],
data: Settings[Scope]
): Parser[ScopedKey[_]] =
scopedKeySelected(index, current, defaultConfigs, keyMap, data).map(_.key)
// the index should be an aggregated index for proper tab completion
def scopedKeyAggregated(current: ProjectRef,
defaultConfigs: Option[ResolvedReference] => Seq[String],
structure: BuildStructure): KeysParser =
for (selected <- scopedKeySelected(structure.index.aggregateKeyIndex,
current,
defaultConfigs,
structure.index.keyMap,
structure.data))
def scopedKeyAggregated(
current: ProjectRef,
defaultConfigs: Option[ResolvedReference] => Seq[String],
structure: BuildStructure
): KeysParser =
for (selected <- scopedKeySelected(
structure.index.aggregateKeyIndex,
current,
defaultConfigs,
structure.index.keyMap,
structure.data
))
yield Aggregation.aggregate(selected.key, selected.mask, structure.extra)
def scopedKeySelected(index: KeyIndex,
current: ProjectRef,
defaultConfigs: Option[ResolvedReference] => Seq[String],
keyMap: Map[String, AttributeKey[_]],
data: Settings[Scope]): Parser[ParsedKey] =
def scopedKeySelected(
index: KeyIndex,
current: ProjectRef,
defaultConfigs: Option[ResolvedReference] => Seq[String],
keyMap: Map[String, AttributeKey[_]],
data: Settings[Scope]
): Parser[ParsedKey] =
scopedKeyFull(index, current, defaultConfigs, keyMap) flatMap { choices =>
select(choices, data)(showRelativeKey2(current))
}
def scopedKeyFull(index: KeyIndex,
current: ProjectRef,
defaultConfigs: Option[ResolvedReference] => Seq[String],
keyMap: Map[String, AttributeKey[_]]): Parser[Seq[Parser[ParsedKey]]] = {
def scopedKeyFull(
index: KeyIndex,
current: ProjectRef,
defaultConfigs: Option[ResolvedReference] => Seq[String],
keyMap: Map[String, AttributeKey[_]]
): Parser[Seq[Parser[ParsedKey]]] = {
def fullKey =
for {
rawProject <- optProjectRef(index, current)
proj = resolveProject(rawProject, current)
confAmb <- configIdent(index configs proj,
index configIdents proj,
index.fromConfigIdent(proj))
confAmb <- configIdent(
index configs proj,
index configIdents proj,
index.fromConfigIdent(proj)
)
partialMask = ScopeMask(rawProject.isExplicit, confAmb.isExplicit, false, false)
} yield taskKeyExtra(index, defaultConfigs, keyMap, proj, confAmb, partialMask)
@ -78,12 +90,14 @@ object Act {
for {
g <- globalIdent
} yield
taskKeyExtra(index,
defaultConfigs,
keyMap,
None,
ParsedZero,
ScopeMask(true, true, false, false))
taskKeyExtra(
index,
defaultConfigs,
keyMap,
None,
ParsedZero,
ScopeMask(true, true, false, false)
)
globalKey | fullKey
}
@ -109,17 +123,21 @@ object Act {
new ParsedKey(makeScopedKey(proj, conf, task, extra, key), mask)
}
def makeScopedKey(proj: Option[ResolvedReference],
conf: Option[String],
task: Option[AttributeKey[_]],
extra: ScopeAxis[AttributeMap],
key: AttributeKey[_]): ScopedKey[_] =
def makeScopedKey(
proj: Option[ResolvedReference],
conf: Option[String],
task: Option[AttributeKey[_]],
extra: ScopeAxis[AttributeMap],
key: AttributeKey[_]
): ScopedKey[_] =
ScopedKey(
Scope(toAxis(proj, Zero), toAxis(conf map ConfigKey.apply, Zero), toAxis(task, Zero), extra),
key)
key
)
def select(allKeys: Seq[Parser[ParsedKey]], data: Settings[Scope])(
implicit show: Show[ScopedKey[_]]): Parser[ParsedKey] =
implicit show: Show[ScopedKey[_]]
): Parser[ParsedKey] =
seq(allKeys) flatMap { ss =>
val default = ss.headOption match {
case None => noValidKeys
@ -128,7 +146,8 @@ object Act {
selectFromValid(ss filter isValid(data), default)
}
def selectFromValid(ss: Seq[ParsedKey], default: Parser[ParsedKey])(
implicit show: Show[ScopedKey[_]]): Parser[ParsedKey] =
implicit show: Show[ScopedKey[_]]
): Parser[ParsedKey] =
selectByTask(selectByConfig(ss)) match {
case Seq() => default
case Seq(single) => success(single)
@ -179,9 +198,11 @@ object Act {
}
// New configuration parser that's able to parse configuration ident trailed by slash.
private[sbt] def configIdent(confs: Set[String],
idents: Set[String],
fromIdent: String => String): Parser[ParsedAxis[String]] = {
private[sbt] def configIdent(
confs: Set[String],
idents: Set[String],
fromIdent: String => String
): Parser[ParsedAxis[String]] = {
val oldSep: Parser[Char] = ':'
val sep: Parser[Unit] = spacedSlash !!! "Expected '/'"
token(
@ -195,14 +216,17 @@ object Act {
) ?? Omitted
}
def configs(explicit: ParsedAxis[String],
defaultConfigs: Option[ResolvedReference] => Seq[String],
proj: Option[ResolvedReference],
index: KeyIndex): Seq[Option[String]] =
def configs(
explicit: ParsedAxis[String],
defaultConfigs: Option[ResolvedReference] => Seq[String],
proj: Option[ResolvedReference],
index: KeyIndex
): Seq[Option[String]] =
explicit match {
case Omitted =>
None +: defaultConfigurations(proj, index, defaultConfigs).flatMap(
nonEmptyConfig(index, proj))
nonEmptyConfig(index, proj)
)
case ParsedZero | ParsedGlobal => None :: Nil
case pv: ParsedValue[x] => Some(pv.value) :: Nil
}
@ -214,15 +238,19 @@ object Act {
): Seq[String] =
if (index exists proj) defaultConfigs(proj) else Nil
def nonEmptyConfig(index: KeyIndex,
proj: Option[ResolvedReference]): String => Seq[Option[String]] =
def nonEmptyConfig(
index: KeyIndex,
proj: Option[ResolvedReference]
): String => Seq[Option[String]] =
config => if (index.isEmpty(proj, Some(config))) Nil else Some(config) :: Nil
def key(index: KeyIndex,
proj: Option[ResolvedReference],
conf: Option[String],
task: Option[AttributeKey[_]],
keyMap: Map[String, AttributeKey[_]]): Parser[AttributeKey[_]] = {
def key(
index: KeyIndex,
proj: Option[ResolvedReference],
conf: Option[String],
task: Option[AttributeKey[_]],
keyMap: Map[String, AttributeKey[_]]
): Parser[AttributeKey[_]] = {
def dropHyphenated(keys: Set[String]): Set[String] = keys.filterNot(Util.hasHyphen)
def keyParser(keys: Set[String]): Parser[AttributeKey[_]] =
token(ID !!! "Expected key" examples dropHyphenated(keys)) flatMap { keyString =>
@ -240,9 +268,11 @@ object Act {
keyParser(keys)
}
def getKey[T](keyMap: Map[String, AttributeKey[_]],
keyString: String,
f: AttributeKey[_] => T): Parser[T] =
def getKey[T](
keyMap: Map[String, AttributeKey[_]],
keyString: String,
f: AttributeKey[_] => T
): Parser[T] =
keyMap.get(keyString) match {
case Some(k) => success(f(k))
case None => failure(Command.invalidValue("key", keyMap.keys)(keyString))
@ -250,8 +280,10 @@ object Act {
val spacedComma = token(OptSpace ~ ',' ~ OptSpace)
def extraAxis(knownKeys: Map[String, AttributeKey[_]],
knownValues: IMap[AttributeKey, Set]): Parser[ScopeAxis[AttributeMap]] = {
def extraAxis(
knownKeys: Map[String, AttributeKey[_]],
knownValues: IMap[AttributeKey, Set]
): Parser[ScopeAxis[AttributeMap]] = {
val extrasP = extrasParser(knownKeys, knownValues)
val extras = token('(', hide = _ == 1 && knownValues.isEmpty) ~> extrasP <~ token(')')
optionalAxis(extras, Zero)
@ -271,7 +303,8 @@ object Act {
(token(
value(keyP)
| ZeroString ^^^ ParsedZero
| ZeroIdent ^^^ ParsedZero) <~ (token("::".id) | spacedSlash)) ?? Omitted
| ZeroIdent ^^^ ParsedZero
) <~ (token("::".id) | spacedSlash)) ?? Omitted
}
def resolveTask(task: ParsedAxis[AttributeKey[_]]): Option[AttributeKey[_]] =
@ -283,8 +316,10 @@ object Act {
def filterStrings(base: Parser[String], valid: Set[String], label: String): Parser[String] =
base.filter(valid, Command.invalidValue(label, valid))
def extrasParser(knownKeys: Map[String, AttributeKey[_]],
knownValues: IMap[AttributeKey, Set]): Parser[AttributeMap] = {
def extrasParser(
knownKeys: Map[String, AttributeKey[_]],
knownValues: IMap[AttributeKey, Set]
): Parser[AttributeMap] = {
val validKeys = knownKeys.filter { case (_, key) => knownValues get key exists (_.nonEmpty) }
if (validKeys.isEmpty)
failure("No valid extra keys.")
@ -292,8 +327,10 @@ object Act {
rep1sep(extraParser(validKeys, knownValues), spacedComma) map AttributeMap.apply
}
def extraParser(knownKeys: Map[String, AttributeKey[_]],
knownValues: IMap[AttributeKey, Set]): Parser[AttributeEntry[_]] = {
def extraParser(
knownKeys: Map[String, AttributeKey[_]],
knownValues: IMap[AttributeKey, Set]
): Parser[AttributeEntry[_]] = {
val keyp = knownIDParser(knownKeys, "Not a valid extra key") <~ token(':' ~ OptSpace)
keyp flatMap {
case key: AttributeKey[t] =>
@ -321,12 +358,15 @@ object Act {
value(resolvedReference(index, currentBuild, trailing))
}
private[sbt] def resolvedReferenceIdent(index: KeyIndex,
currentBuild: URI,
trailing: Parser[_]): Parser[ResolvedReference] = {
private[sbt] def resolvedReferenceIdent(
index: KeyIndex,
currentBuild: URI,
trailing: Parser[_]
): Parser[ResolvedReference] = {
def projectID(uri: URI) =
token(
DQuoteChar ~> examplesStrict(ID, index projects uri, "project ID") <~ DQuoteChar <~ OptSpace <~ ")" <~ trailing)
DQuoteChar ~> examplesStrict(ID, index projects uri, "project ID") <~ DQuoteChar <~ OptSpace <~ ")" <~ trailing
)
def projectRef(uri: URI) = projectID(uri) map { id =>
ProjectRef(uri, id)
}
@ -336,15 +376,18 @@ object Act {
val buildRef = token(
"ProjectRef(" ~> OptSpace ~> "uri(" ~> OptSpace ~> DQuoteChar ~>
resolvedURI <~ DQuoteChar <~ OptSpace <~ ")" <~ spacedComma)
resolvedURI <~ DQuoteChar <~ OptSpace <~ ")" <~ spacedComma
)
buildRef flatMap { uri =>
projectRef(uri)
}
}
def resolvedReference(index: KeyIndex,
currentBuild: URI,
trailing: Parser[_]): Parser[ResolvedReference] = {
def resolvedReference(
index: KeyIndex,
currentBuild: URI,
trailing: Parser[_]
): Parser[ResolvedReference] = {
def projectID(uri: URI) =
token(examplesStrict(ID, index projects uri, "project ID") <~ trailing)
def projectRef(uri: URI) = projectID(uri) map { id =>
@ -363,8 +406,10 @@ object Act {
def optProjectRef(index: KeyIndex, current: ProjectRef): Parser[ParsedAxis[ResolvedReference]] =
projectRef(index, current.build) ?? Omitted
def resolveProject(parsed: ParsedAxis[ResolvedReference],
current: ProjectRef): Option[ResolvedReference] =
def resolveProject(
parsed: ParsedAxis[ResolvedReference],
current: ProjectRef
): Option[ResolvedReference] =
parsed match {
case Omitted => Some(current)
case ParsedZero => None
@ -412,11 +457,13 @@ object Act {
def scopedKeyParser(extracted: Extracted): Parser[ScopedKey[_]] =
scopedKeyParser(extracted.structure, extracted.currentRef)
def scopedKeyParser(structure: BuildStructure, currentRef: ProjectRef): Parser[ScopedKey[_]] =
scopedKey(structure.index.keyIndex,
currentRef,
structure.extra.configurationsForAxis,
structure.index.keyMap,
structure.data)
scopedKey(
structure.index.keyIndex,
currentRef,
structure.extra.configurationsForAxis,
structure.index.keyMap,
structure.data
)
type KeysParser = Parser[Seq[ScopedKey[T]] forSome { type T }]
def aggregatedKeyParser(state: State): KeysParser = aggregatedKeyParser(Project extract state)
@ -435,17 +482,21 @@ object Act {
KeyValue(key, value)
}
}
private[this] def anyKeyValues(structure: BuildStructure,
keys: Seq[ScopedKey[_]]): Seq[KeyValue[_]] =
private[this] def anyKeyValues(
structure: BuildStructure,
keys: Seq[ScopedKey[_]]
): Seq[KeyValue[_]] =
keys.flatMap { key =>
getValue(structure.data, key.scope, key.key) map { value =>
KeyValue(key, value)
}
}
private[this] def getValue[T](data: Settings[Scope],
scope: Scope,
key: AttributeKey[T]): Option[T] =
private[this] def getValue[T](
data: Settings[Scope],
scope: Scope,
key: AttributeKey[T]
): Option[T] =
if (java.lang.Boolean.getBoolean("sbt.cli.nodelegation")) data.getDirect(scope, key)
else data.get(scope, key)

View File

@ -102,10 +102,12 @@ object Aggregation {
Complete(start, stop, result, newS)
}
def runTasks[HL <: HList, T](s: State,
ts: Values[Task[T]],
extra: DummyTaskMap,
show: ShowConfig)(implicit display: Show[ScopedKey[_]]): State = {
def runTasks[HL <: HList, T](
s: State,
ts: Values[Task[T]],
extra: DummyTaskMap,
show: ShowConfig
)(implicit display: Show[ScopedKey[_]]): State = {
val complete = timedRun[T](s, ts, extra)
showRun(complete, show)
complete.results match {
@ -226,8 +228,9 @@ object Aggregation {
reverse: Boolean
): Seq[ProjectRef] = {
val resRef = proj.map(p => extra.projectRefFor(extra.resolveRef(p)))
resRef.toList.flatMap(ref =>
if (reverse) extra.aggregates.reverse(ref) else extra.aggregates.forward(ref))
resRef.toList.flatMap(
ref => if (reverse) extra.aggregates.reverse(ref) else extra.aggregates.forward(ref)
)
}
def aggregate[T, Proj](

View File

@ -12,8 +12,10 @@ import sbt.internal.util.Types.idFun
import sbt.internal.util.Dag
import BuildDependencies._
final class BuildDependencies private (val classpath: DependencyMap[ClasspathDep[ProjectRef]],
val aggregate: DependencyMap[ProjectRef]) {
final class BuildDependencies private (
val classpath: DependencyMap[ClasspathDep[ProjectRef]],
val aggregate: DependencyMap[ProjectRef]
) {
def classpathRefs(ref: ProjectRef): Seq[ProjectRef] = classpath(ref) map getID
def classpathTransitiveRefs(ref: ProjectRef): Seq[ProjectRef] = classpathTransitive(ref)
@ -27,8 +29,10 @@ final class BuildDependencies private (val classpath: DependencyMap[ClasspathDep
new BuildDependencies(classpath, aggregate.updated(ref, deps ++ aggregate.getOrElse(ref, Nil)))
}
object BuildDependencies {
def apply(classpath: DependencyMap[ClasspathDep[ProjectRef]],
aggregate: DependencyMap[ProjectRef]): BuildDependencies =
def apply(
classpath: DependencyMap[ClasspathDep[ProjectRef]],
aggregate: DependencyMap[ProjectRef]
): BuildDependencies =
new BuildDependencies(classpath, aggregate)
type DependencyMap[D] = Map[ProjectRef, Seq[D]]

View File

@ -16,20 +16,24 @@ import sbt.internal.util.Types.{ const, idFun }
import sbt.util.Logger
import sbt.librarymanagement.ModuleID
final class MultiHandler[S, T](builtIn: S => Option[T],
root: Option[S => Option[T]],
nonRoots: List[(URI, S => Option[T])],
getURI: S => URI,
log: S => Logger) {
final class MultiHandler[S, T](
builtIn: S => Option[T],
root: Option[S => Option[T]],
nonRoots: List[(URI, S => Option[T])],
getURI: S => URI,
log: S => Logger
) {
def applyFun: S => Option[T] = apply
def apply(info: S): Option[T] =
(baseLoader(info), applyNonRoots(info)) match {
case (None, Nil) => None
case (None, xs @ (_, nr) :: ignored) =>
if (ignored.nonEmpty)
warn("Using first of multiple matching non-root build resolvers for " + getURI(info),
log(info),
xs)
warn(
"Using first of multiple matching non-root build resolvers for " + getURI(info),
log(info),
xs
)
Some(nr)
case (Some(b), xs) =>
if (xs.nonEmpty)
@ -72,28 +76,34 @@ object BuildLoader {
type Loader = LoadInfo => Option[() => BuildUnit]
type TransformAll = PartBuild => PartBuild
final class Components(val resolver: Resolver,
val builder: Builder,
val transformer: Transformer,
val full: Loader,
val transformAll: TransformAll) {
final class Components(
val resolver: Resolver,
val builder: Builder,
val transformer: Transformer,
val full: Loader,
val transformAll: TransformAll
) {
def |(cs: Components): Components =
new Components(resolver | cs.resolver,
builder | cs.builder,
seq(transformer, cs.transformer),
full | cs.full,
transformAll andThen cs.transformAll)
new Components(
resolver | cs.resolver,
builder | cs.builder,
seq(transformer, cs.transformer),
full | cs.full,
transformAll andThen cs.transformAll
)
}
def transform(t: Transformer): Components = components(transformer = t)
def resolve(r: Resolver): Components = components(resolver = r)
def build(b: Builder): Components = components(builder = b)
def full(f: Loader): Components = components(full = f)
def transformAll(t: TransformAll) = components(transformAll = t)
def components(resolver: Resolver = const(None),
builder: Builder = const(None),
transformer: Transformer = _.unit,
full: Loader = const(None),
transformAll: TransformAll = idFun) =
def components(
resolver: Resolver = const(None),
builder: Builder = const(None),
transformer: Transformer = _.unit,
full: Loader = const(None),
transformAll: TransformAll = idFun
) =
new Components(resolver, builder, transformer, full, transformAll)
def seq(a: Transformer, b: Transformer): Transformer = info => b(info.setUnit(a(info)))
@ -103,47 +113,55 @@ object BuildLoader {
def config: LoadBuildConfiguration
def state: State
}
final class ResolveInfo(val uri: URI,
val staging: File,
val config: LoadBuildConfiguration,
val state: State)
extends Info
final class BuildInfo(val uri: URI,
val base: File,
val config: LoadBuildConfiguration,
val state: State)
extends Info
final class TransformInfo(val uri: URI,
val base: File,
val unit: BuildUnit,
val config: LoadBuildConfiguration,
val state: State)
extends Info {
final class ResolveInfo(
val uri: URI,
val staging: File,
val config: LoadBuildConfiguration,
val state: State
) extends Info
final class BuildInfo(
val uri: URI,
val base: File,
val config: LoadBuildConfiguration,
val state: State
) extends Info
final class TransformInfo(
val uri: URI,
val base: File,
val unit: BuildUnit,
val config: LoadBuildConfiguration,
val state: State
) extends Info {
def setUnit(newUnit: BuildUnit): TransformInfo =
new TransformInfo(uri, base, newUnit, config, state)
}
final class LoadInfo(val uri: URI,
val staging: File,
val config: LoadBuildConfiguration,
val state: State,
val components: Components)
extends Info
final class LoadInfo(
val uri: URI,
val staging: File,
val config: LoadBuildConfiguration,
val state: State,
val components: Components
) extends Info
def apply(base: Components,
fail: URI => Nothing,
s: State,
config: LoadBuildConfiguration): BuildLoader = {
def apply(
base: Components,
fail: URI => Nothing,
s: State,
config: LoadBuildConfiguration
): BuildLoader = {
def makeMulti[S <: Info, T](base: S => Option[T]) =
new MultiHandler[S, T](base, None, Nil, _.uri, _.config.log)
new BuildLoader(fail,
s,
config,
makeMulti(base.resolver),
makeMulti(base.builder),
base.transformer,
makeMulti(base.full),
base.transformAll)
new BuildLoader(
fail,
s,
config,
makeMulti(base.resolver),
makeMulti(base.builder),
base.transformer,
makeMulti(base.full),
base.transformAll
)
}
def componentLoader: Loader = (info: LoadInfo) => {

View File

@ -20,14 +20,16 @@ import sbt.internal.util.Attributed.data
import sbt.util.Logger
import sjsonnew.shaded.scalajson.ast.unsafe.JValue
final class BuildStructure(val units: Map[URI, LoadedBuildUnit],
val root: URI,
val settings: Seq[Setting[_]],
val data: Settings[Scope],
val index: StructureIndex,
val streams: State => Streams,
val delegates: Scope => Seq[Scope],
val scopeLocal: ScopeLocal) {
final class BuildStructure(
val units: Map[URI, LoadedBuildUnit],
val root: URI,
val settings: Seq[Setting[_]],
val data: Settings[Scope],
val index: StructureIndex,
val streams: State => Streams,
val delegates: Scope => Seq[Scope],
val scopeLocal: ScopeLocal
) {
val rootProject: URI => String = Load getRootProject units
def allProjects: Seq[ResolvedProject] = units.values.flatMap(_.defined.values).toSeq
def allProjects(build: URI): Seq[ResolvedProject] =
@ -59,11 +61,12 @@ final class StructureIndex(
* @param rootProjects The list of project IDs for the projects considered roots of this build.
* The first root project is used as the default in several situations where a project is not otherwise selected.
*/
final class LoadedBuildUnit(val unit: BuildUnit,
val defined: Map[String, ResolvedProject],
val rootProjects: Seq[String],
val buildSettings: Seq[Setting[_]])
extends BuildUnitBase {
final class LoadedBuildUnit(
val unit: BuildUnit,
val defined: Map[String, ResolvedProject],
val rootProjects: Seq[String],
val buildSettings: Seq[Setting[_]]
) extends BuildUnitBase {
/**
* The project to use as the default when one is not otherwise selected.
@ -72,7 +75,8 @@ final class LoadedBuildUnit(val unit: BuildUnit,
val root = rootProjects match {
case Nil =>
throw new java.lang.AssertionError(
"assertion failed: No root projects defined for build unit " + unit)
"assertion failed: No root projects defined for build unit " + unit
)
case Seq(root, _*) => root
}
@ -157,8 +161,10 @@ case class DetectedAutoPlugin(name: String, value: AutoPlugin, hasAutoImport: Bo
* @param builds The [[BuildDef]]s detected in the build definition.
* This does not include the default [[BuildDef]] that sbt creates if none is defined.
*/
final class DetectedPlugins(val autoPlugins: Seq[DetectedAutoPlugin],
val builds: DetectedModules[BuildDef]) {
final class DetectedPlugins(
val autoPlugins: Seq[DetectedAutoPlugin],
val builds: DetectedModules[BuildDef]
) {
/**
* Sequence of import expressions for the build definition.
@ -201,10 +207,12 @@ final class DetectedPlugins(val autoPlugins: Seq[DetectedAutoPlugin],
* @param loader The class loader for the build definition project, notably excluding classes used for .sbt files.
* @param detected Auto-detected modules in the build definition.
*/
final class LoadedPlugins(val base: File,
val pluginData: PluginData,
val loader: ClassLoader,
val detected: DetectedPlugins) {
final class LoadedPlugins(
val base: File,
val pluginData: PluginData,
val loader: ClassLoader,
val detected: DetectedPlugins
) {
def fullClasspath: Seq[Attributed[File]] = pluginData.classpath
def classpath = data(fullClasspath)
}
@ -215,10 +223,12 @@ final class LoadedPlugins(val base: File,
* @param localBase The working location of the build on the filesystem.
* For local URIs, this is the same as `uri`, but for remote URIs, this is the local copy or workspace allocated for the build.
*/
final class BuildUnit(val uri: URI,
val localBase: File,
val definitions: LoadedDefinitions,
val plugins: LoadedPlugins) {
final class BuildUnit(
val uri: URI,
val localBase: File,
val definitions: LoadedDefinitions,
val plugins: LoadedPlugins
) {
override def toString =
if (uri.getScheme == "file") localBase.toString else (uri + " (locally: " + localBase + ")")
}
@ -234,11 +244,12 @@ final class LoadedBuild(val root: URI, val units: Map[URI, LoadedBuildUnit]) {
}
final class PartBuild(val root: URI, val units: Map[URI, PartBuildUnit])
sealed trait BuildUnitBase { def rootProjects: Seq[String]; def buildSettings: Seq[Setting[_]] }
final class PartBuildUnit(val unit: BuildUnit,
val defined: Map[String, Project],
val rootProjects: Seq[String],
val buildSettings: Seq[Setting[_]])
extends BuildUnitBase {
final class PartBuildUnit(
val unit: BuildUnit,
val defined: Map[String, Project],
val rootProjects: Seq[String],
val buildSettings: Seq[Setting[_]]
) extends BuildUnitBase {
def resolve(f: Project => ResolvedProject): LoadedBuildUnit =
new LoadedBuildUnit(unit, defined mapValues f toMap, rootProjects, buildSettings)
def resolveRefs(f: ProjectReference => ProjectRef): LoadedBuildUnit = resolve(_ resolve f)
@ -251,29 +262,37 @@ object BuildStreams {
final val BuildUnitPath = "$build"
final val StreamsDirectory = "streams"
def mkStreams(units: Map[URI, LoadedBuildUnit],
root: URI,
data: Settings[Scope]): State => Streams = s => {
def mkStreams(
units: Map[URI, LoadedBuildUnit],
root: URI,
data: Settings[Scope]
): State => Streams = s => {
implicit val isoString: sjsonnew.IsoString[JValue] =
sjsonnew.IsoString.iso(sjsonnew.support.scalajson.unsafe.CompactPrinter.apply,
sjsonnew.support.scalajson.unsafe.Parser.parseUnsafe)
sjsonnew.IsoString.iso(
sjsonnew.support.scalajson.unsafe.CompactPrinter.apply,
sjsonnew.support.scalajson.unsafe.Parser.parseUnsafe
)
(s get Keys.stateStreams) getOrElse {
std.Streams(path(units, root, data),
displayFull,
LogManager.construct(data, s),
sjsonnew.support.scalajson.unsafe.Converter)
std.Streams(
path(units, root, data),
displayFull,
LogManager.construct(data, s),
sjsonnew.support.scalajson.unsafe.Converter
)
}
}
def path(units: Map[URI, LoadedBuildUnit], root: URI, data: Settings[Scope])(
scoped: ScopedKey[_]): File =
scoped: ScopedKey[_]
): File =
resolvePath(projectPath(units, root, scoped, data), nonProjectPath(scoped))
def resolvePath(base: File, components: Seq[String]): File =
(base /: components)((b, p) => new File(b, p))
def pathComponent[T](axis: ScopeAxis[T], scoped: ScopedKey[_], label: String)(
show: T => String): String =
show: T => String
): String =
axis match {
case Zero => GlobalPath
case This =>
@ -292,10 +311,12 @@ object BuildStreams {
a.entries.toSeq.sortBy(_.key.label).map {
case AttributeEntry(key, value) => key.label + "=" + value.toString
} mkString (" ")
def projectPath(units: Map[URI, LoadedBuildUnit],
root: URI,
scoped: ScopedKey[_],
data: Settings[Scope]): File =
def projectPath(
units: Map[URI, LoadedBuildUnit],
root: URI,
scoped: ScopedKey[_],
data: Settings[Scope]
): File =
scoped.scope.project match {
case Zero => refTarget(GlobalScope, units(root).localBase, data) / GlobalPath
case Select(br @ BuildRef(uri)) => refTarget(br, units(uri).localBase, data) / BuildUnitPath

View File

@ -48,10 +48,12 @@ final class BuildUtil[Proj](
refOpt => configurations(projectForAxis(refOpt)).map(_.name)
}
object BuildUtil {
def apply(root: URI,
units: Map[URI, LoadedBuildUnit],
keyIndex: KeyIndex,
data: Settings[Scope]): BuildUtil[ResolvedProject] = {
def apply(
root: URI,
units: Map[URI, LoadedBuildUnit],
keyIndex: KeyIndex,
data: Settings[Scope]
): BuildUtil[ResolvedProject] = {
val getp = (build: URI, project: String) => Load.getProject(units, build, project)
val configs = (_: ResolvedProject).configurations.map(c => ConfigKey(c.name))
val aggregates = aggregationRelation(units)
@ -72,8 +74,9 @@ object BuildUtil {
def checkCycles(units: Map[URI, LoadedBuildUnit]): Unit = {
def getRef(pref: ProjectRef) = units(pref.build).defined(pref.project)
def deps(proj: ResolvedProject)(
base: ResolvedProject => Seq[ProjectRef]): Seq[ResolvedProject] =
def deps(
proj: ResolvedProject
)(base: ResolvedProject => Seq[ProjectRef]): Seq[ResolvedProject] =
Dag.topologicalSort(proj)(p => base(p) map getRef)
// check for cycles
for ((_, lbu) <- units; proj <- lbu.defined.values) {

View File

@ -147,7 +147,8 @@ private[sbt] final class CommandExchange {
server = Some(serverInstance)
case Some(Failure(_: AlreadyRunningException)) =>
s.log.warn(
"sbt server could not start because there's another instance of sbt running on this build.")
"sbt server could not start because there's another instance of sbt running on this build."
)
s.log.warn("Running multiple instances is unsupported")
server = None
firstInstance.set(false)

View File

@ -115,8 +115,10 @@ $LastCommand <key>
val InspectCommand = "inspect"
val inspectBrief =
(s"$InspectCommand [tree|uses|definitions|actual] <key>",
"Prints the value for 'key', the defining scope, delegates, related definitions, and dependencies.")
(
s"$InspectCommand [tree|uses|definitions|actual] <key>",
"Prints the value for 'key', the defining scope, delegates, related definitions, and dependencies."
)
val inspectDetailed = s"""
|$InspectCommand <key>
|

View File

@ -15,7 +15,8 @@ import xsbti.compile.ClasspathOptionsUtil
object ConsoleProject {
def apply(state: State, extra: String, cleanupCommands: String = "", options: Seq[String] = Nil)(
implicit log: Logger): Unit = {
implicit log: Logger
): Unit = {
val extracted = Project extract state
val cpImports = new Imports(extracted, state)
val bindings = ("currentState" -> state) :: ("extracted" -> extracted) :: ("cpHelpers" -> cpImports) :: Nil

View File

@ -113,9 +113,11 @@ private[sbt] abstract class AbstractBackgroundJobService extends BackgroundJobSe
protected def makeContext(id: Long, spawningTask: ScopedKey[_], state: State): ManagedLogger
def doRunInBackground(spawningTask: ScopedKey[_],
state: State,
start: (Logger, File) => BackgroundJob): JobHandle = {
def doRunInBackground(
spawningTask: ScopedKey[_],
state: State,
start: (Logger, File) => BackgroundJob
): JobHandle = {
val id = nextId.getAndIncrement()
val logger = makeContext(id, spawningTask, state)
val workingDir = serviceTempDir / s"job-$id"
@ -132,7 +134,8 @@ private[sbt] abstract class AbstractBackgroundJobService extends BackgroundJobSe
}
override def runInBackground(spawningTask: ScopedKey[_], state: State)(
start: (Logger, File) => Unit): JobHandle = {
start: (Logger, File) => Unit
): JobHandle = {
pool.run(this, spawningTask, state)(start)
}
@ -155,7 +158,8 @@ private[sbt] abstract class AbstractBackgroundJobService extends BackgroundJobSe
case _: DeadHandle @unchecked => () // nothing to stop or wait for
case other =>
sys.error(
s"BackgroundJobHandle does not originate with the current BackgroundJobService: $other")
s"BackgroundJobHandle does not originate with the current BackgroundJobService: $other"
)
}
private def withHandleTry(job: JobHandle)(f: ThreadJobHandle => Try[Unit]): Try[Unit] =
@ -163,8 +167,11 @@ private[sbt] abstract class AbstractBackgroundJobService extends BackgroundJobSe
case handle: ThreadJobHandle @unchecked => f(handle)
case _: DeadHandle @unchecked => Try(()) // nothing to stop or wait for
case other =>
Try(sys.error(
s"BackgroundJobHandle does not originate with the current BackgroundJobService: $other"))
Try(
sys.error(
s"BackgroundJobHandle does not originate with the current BackgroundJobService: $other"
)
)
}
override def stop(job: JobHandle): Unit =
@ -363,7 +370,8 @@ private[sbt] class BackgroundThreadPool extends java.io.Closeable {
}
def run(manager: AbstractBackgroundJobService, spawningTask: ScopedKey[_], state: State)(
work: (Logger, File) => Unit): JobHandle = {
work: (Logger, File) => Unit
): JobHandle = {
def start(logger: Logger, workingDir: File): BackgroundJob = {
val runnable = new BackgroundRunnable(spawningTask.key.label, { () =>
work(logger, workingDir)

View File

@ -44,9 +44,11 @@ private[sbt] object EvaluateConfigurations {
/**
* This represents the parsed expressions in a build sbt, as well as where they were defined.
*/
private[this] final class ParsedFile(val imports: Seq[(String, Int)],
val definitions: Seq[(String, LineRange)],
val settings: Seq[(String, LineRange)])
private[this] final class ParsedFile(
val imports: Seq[(String, Int)],
val definitions: Seq[(String, LineRange)],
val settings: Seq[(String, LineRange)]
)
/** The keywords we look for when classifying a string as a definition. */
private[this] val DefinitionKeywords = Seq("lazy val ", "def ", "val ")
@ -72,9 +74,11 @@ private[sbt] object EvaluateConfigurations {
*
* Note: This ignores any non-Setting[_] values in the file.
*/
def evaluateConfiguration(eval: Eval,
src: File,
imports: Seq[String]): LazyClassLoaded[Seq[Setting[_]]] =
def evaluateConfiguration(
eval: Eval,
src: File,
imports: Seq[String]
): LazyClassLoaded[Seq[Setting[_]]] =
evaluateConfiguration(eval, src, IO.readLines(src), imports, 0)
/**
@ -83,14 +87,17 @@ private[sbt] object EvaluateConfigurations {
*
* @param builtinImports The set of import statements to add to those parsed in the .sbt file.
*/
private[this] def parseConfiguration(file: File,
lines: Seq[String],
builtinImports: Seq[String],
offset: Int): ParsedFile = {
private[this] def parseConfiguration(
file: File,
lines: Seq[String],
builtinImports: Seq[String],
offset: Int
): ParsedFile = {
val (importStatements, settingsAndDefinitions) = splitExpressions(file, lines)
val allImports = builtinImports.map(s => (s, -1)) ++ addOffset(offset, importStatements)
val (definitions, settings) = splitSettingsDefinitions(
addOffsetToRange(offset, settingsAndDefinitions))
addOffsetToRange(offset, settingsAndDefinitions)
)
new ParsedFile(allImports, definitions, settings)
}
@ -104,11 +111,13 @@ private[sbt] object EvaluateConfigurations {
*
* @return Just the Setting[_] instances defined in the .sbt file.
*/
def evaluateConfiguration(eval: Eval,
file: File,
lines: Seq[String],
imports: Seq[String],
offset: Int): LazyClassLoaded[Seq[Setting[_]]] = {
def evaluateConfiguration(
eval: Eval,
file: File,
lines: Seq[String],
imports: Seq[String],
offset: Int
): LazyClassLoaded[Seq[Setting[_]]] = {
val l = evaluateSbtFile(eval, file, lines, imports, offset)
loader =>
l(loader).settings
@ -124,11 +133,13 @@ private[sbt] object EvaluateConfigurations {
* @return A function which can take an sbt classloader and return the raw types/configuration
* which was compiled/parsed for the given file.
*/
private[sbt] def evaluateSbtFile(eval: Eval,
file: File,
lines: Seq[String],
imports: Seq[String],
offset: Int): LazyClassLoaded[LoadedSbtFile] = {
private[sbt] def evaluateSbtFile(
eval: Eval,
file: File,
lines: Seq[String],
imports: Seq[String],
offset: Int
): LazyClassLoaded[LoadedSbtFile] = {
// TODO - Store the file on the LoadedSbtFile (or the parent dir) so we can accurately do
// detection for which project project manipulations should be applied.
val name = file.getPath
@ -170,12 +181,14 @@ private[sbt] object EvaluateConfigurations {
case DslEntry.ProjectManipulation(f) => f
}
// TODO -get project manipulations.
new LoadedSbtFile(settings,
projects,
importDefs,
manipulations,
definitions,
allGeneratedFiles)
new LoadedSbtFile(
settings,
projects,
importDefs,
manipulations,
definitions,
allGeneratedFiles
)
}
}
@ -208,19 +221,23 @@ private[sbt] object EvaluateConfigurations {
* @return A method that given an sbt classloader, can return the actual [[sbt.internal.DslEntry]] defined by
* the expression, and the sequence of .class files generated.
*/
private[sbt] def evaluateDslEntry(eval: Eval,
name: String,
imports: Seq[(String, Int)],
expression: String,
range: LineRange): TrackedEvalResult[DslEntry] = {
private[sbt] def evaluateDslEntry(
eval: Eval,
name: String,
imports: Seq[(String, Int)],
expression: String,
range: LineRange
): TrackedEvalResult[DslEntry] = {
// TODO - Should we try to namespace these between.sbt files? IF they hash to the same value, they may actually be
// exactly the same setting, so perhaps we don't care?
val result = try {
eval.eval(expression,
imports = new EvalImports(imports, name),
srcName = name,
tpeName = Some(SettingsDefinitionName),
line = range.start)
eval.eval(
expression,
imports = new EvalImports(imports, name),
srcName = name,
tpeName = Some(SettingsDefinitionName),
line = range.start
)
} catch {
case e: sbt.compiler.EvalException => throw new MessageOnlyException(e.getMessage)
}
@ -249,11 +266,13 @@ private[sbt] object EvaluateConfigurations {
*/
// Build DSL now includes non-Setting[_] type settings.
// Note: This method is used by the SET command, so we may want to evaluate that sucker a bit.
def evaluateSetting(eval: Eval,
name: String,
imports: Seq[(String, Int)],
expression: String,
range: LineRange): LazyClassLoaded[Seq[Setting[_]]] =
def evaluateSetting(
eval: Eval,
name: String,
imports: Seq[(String, Int)],
expression: String,
range: LineRange
): LazyClassLoaded[Seq[Setting[_]]] =
evaluateDslEntry(eval, name, imports, expression, range).result andThen {
case DslEntry.ProjectSettings(values) => values
case _ => Nil
@ -265,7 +284,8 @@ private[sbt] object EvaluateConfigurations {
*/
private[sbt] def splitExpressions(
file: File,
lines: Seq[String]): (Seq[(String, Int)], Seq[(String, LineRange)]) = {
lines: Seq[String]
): (Seq[(String, Int)], Seq[(String, LineRange)]) = {
val split = SbtParser(file, lines)
// TODO - Look at pulling the parsed expression trees from the SbtParser and stitch them back into a different
// scala compiler rather than re-parsing.
@ -273,7 +293,8 @@ private[sbt] object EvaluateConfigurations {
}
private[this] def splitSettingsDefinitions(
lines: Seq[(String, LineRange)]): (Seq[(String, LineRange)], Seq[(String, LineRange)]) =
lines: Seq[(String, LineRange)]
): (Seq[(String, LineRange)], Seq[(String, LineRange)]) =
lines partition { case (line, _) => isDefinition(line) }
private[this] def isDefinition(line: String): Boolean = {
@ -282,34 +303,41 @@ private[sbt] object EvaluateConfigurations {
}
private[this] def extractedValTypes: Seq[String] =
Seq(classOf[CompositeProject],
classOf[InputKey[_]],
classOf[TaskKey[_]],
classOf[SettingKey[_]])
.map(_.getName)
Seq(
classOf[CompositeProject],
classOf[InputKey[_]],
classOf[TaskKey[_]],
classOf[SettingKey[_]]
).map(_.getName)
private[this] def evaluateDefinitions(eval: Eval,
name: String,
imports: Seq[(String, Int)],
definitions: Seq[(String, LineRange)],
file: Option[File]): compiler.EvalDefinitions = {
private[this] def evaluateDefinitions(
eval: Eval,
name: String,
imports: Seq[(String, Int)],
definitions: Seq[(String, LineRange)],
file: Option[File]
): compiler.EvalDefinitions = {
val convertedRanges = definitions.map { case (s, r) => (s, r.start to r.end) }
eval.evalDefinitions(convertedRanges,
new EvalImports(imports, name),
name,
file,
extractedValTypes)
eval.evalDefinitions(
convertedRanges,
new EvalImports(imports, name),
name,
file,
extractedValTypes
)
}
}
object Index {
def taskToKeyMap(data: Settings[Scope]): Map[Task[_], ScopedKey[Task[_]]] = {
val pairs = data.scopes flatMap (scope =>
data.data(scope).entries collect {
case AttributeEntry(key, value: Task[_]) =>
(value, ScopedKey(scope, key.asInstanceOf[AttributeKey[Task[_]]]))
})
val pairs = data.scopes flatMap (
scope =>
data.data(scope).entries collect {
case AttributeEntry(key, value: Task[_]) =>
(value, ScopedKey(scope, key.asInstanceOf[AttributeKey[Task[_]]]))
}
)
pairs.toMap[Task[_], ScopedKey[Task[_]]]
}
@ -326,8 +354,9 @@ object Index {
def stringToKeyMap(settings: Set[AttributeKey[_]]): Map[String, AttributeKey[_]] =
stringToKeyMap0(settings)(_.label)
private[this] def stringToKeyMap0(settings: Set[AttributeKey[_]])(
label: AttributeKey[_] => String): Map[String, AttributeKey[_]] = {
private[this] def stringToKeyMap0(
settings: Set[AttributeKey[_]]
)(label: AttributeKey[_] => String): Map[String, AttributeKey[_]] = {
val multiMap = settings.groupBy(label)
val duplicates = multiMap collect { case (k, xs) if xs.size > 1 => (k, xs.map(_.manifest)) } collect {
case (k, xs) if xs.size > 1 => (k, xs)
@ -336,7 +365,8 @@ object Index {
multiMap.collect { case (k, v) if validID(k) => (k, v.head) } toMap
else
sys.error(
duplicates map { case (k, tps) => "'" + k + "' (" + tps.mkString(", ") + ")" } mkString ("Some keys were defined with the same name but different types: ", ", ", ""))
duplicates map { case (k, tps) => "'" + k + "' (" + tps.mkString(", ") + ")" } mkString ("Some keys were defined with the same name but different types: ", ", ", "")
)
}
private[this] type TriggerMap = collection.mutable.HashMap[Task[_], Seq[Task[_]]]

View File

@ -41,8 +41,10 @@ object GlobalPlugin {
injectInternalClasspath(Runtime, gp.internalClasspath),
injectInternalClasspath(Compile, gp.internalClasspath)
)
private[this] def injectInternalClasspath(config: Configuration,
cp: Seq[Attributed[File]]): Setting[_] =
private[this] def injectInternalClasspath(
config: Configuration,
cp: Seq[Attributed[File]]
): Setting[_] =
internalDependencyClasspath in config ~= { prev =>
(prev ++ cp).distinct
}
@ -50,8 +52,10 @@ object GlobalPlugin {
def build(base: File, s: State, config: LoadBuildConfiguration): (BuildStructure, State) = {
val newInject =
config.injectSettings.copy(global = config.injectSettings.global ++ globalPluginSettings)
val globalConfig = config.copy(injectSettings = newInject,
pluginManagement = config.pluginManagement.forGlobalPlugin)
val globalConfig = config.copy(
injectSettings = newInject,
pluginManagement = config.pluginManagement.forGlobalPlugin
)
val (eval, structure) = Load(base, s, globalConfig)
val session = Load.initialSession(structure, eval)
(structure, Project.setProject(session, structure, s))
@ -73,22 +77,26 @@ object GlobalPlugin {
// If we reference it directly (if it's an executionRoot) then it forces an update, which is not what we want.
val updateReport = Def.taskDyn { Def.task { update.value } }.value
GlobalPluginData(projectID.value,
projectDependencies.value,
depMap,
resolvers.value.toVector,
(fullClasspath in Runtime).value,
(prods ++ intcp).distinct)(updateReport)
GlobalPluginData(
projectID.value,
projectDependencies.value,
depMap,
resolvers.value.toVector,
(fullClasspath in Runtime).value,
(prods ++ intcp).distinct
)(updateReport)
}
val resolvedTaskInit = taskInit mapReferenced Project.mapScope(Scope replaceThis p)
val task = resolvedTaskInit evaluate data
val roots = resolvedTaskInit.dependencies
evaluate(state, structure, task, roots)
}
def evaluate[T](state: State,
structure: BuildStructure,
t: Task[T],
roots: Seq[ScopedKey[_]]): (State, T) = {
def evaluate[T](
state: State,
structure: BuildStructure,
t: Task[T],
roots: Seq[ScopedKey[_]]
): (State, T) = {
import EvaluateTask._
withStreams(structure, state) { str =>
val nv = nodeView(state, str, roots)
@ -105,13 +113,17 @@ object GlobalPlugin {
version := "0.0"
)
}
final case class GlobalPluginData(projectID: ModuleID,
dependencies: Seq[ModuleID],
descriptors: Map[ModuleRevisionId, ModuleDescriptor],
resolvers: Vector[Resolver],
fullClasspath: Classpath,
internalClasspath: Classpath)(val updateReport: UpdateReport)
final case class GlobalPlugin(data: GlobalPluginData,
structure: BuildStructure,
inject: Seq[Setting[_]],
base: File)
final case class GlobalPluginData(
projectID: ModuleID,
dependencies: Seq[ModuleID],
descriptors: Map[ModuleRevisionId, ModuleDescriptor],
resolvers: Vector[Resolver],
fullClasspath: Classpath,
internalClasspath: Classpath
)(val updateReport: UpdateReport)
final case class GlobalPlugin(
data: GlobalPluginData,
structure: BuildStructure,
inject: Seq[Setting[_]],
base: File
)

View File

@ -11,8 +11,10 @@ package internal
import Def.Setting
import java.net.URI
private[sbt] final class GroupedAutoPlugins(val all: Seq[AutoPlugin],
val byBuild: Map[URI, Seq[AutoPlugin]]) {
private[sbt] final class GroupedAutoPlugins(
val all: Seq[AutoPlugin],
val byBuild: Map[URI, Seq[AutoPlugin]]
) {
def globalSettings: Seq[Setting[_]] = all.flatMap(_.globalSettings)
def buildSettings(uri: URI): Seq[Setting[_]] =
byBuild.getOrElse(uri, Nil).flatMap(_.buildSettings)

View File

@ -50,19 +50,23 @@ object IvyConsole {
logLevel in Global := Level.Warn,
showSuccess in Global := false
)
val append = Load.transformSettings(Load.projectScope(currentRef),
currentRef.build,
rootProject,
depSettings)
val append = Load.transformSettings(
Load.projectScope(currentRef),
currentRef.build,
rootProject,
depSettings
)
val newStructure = Load.reapply(session.original ++ append, structure)
val newState = state.copy(remainingCommands = Exec(Keys.consoleQuick.key.label, None) :: Nil)
Project.setProject(session, newStructure, newState)
}
final case class Dependencies(managed: Seq[ModuleID],
resolvers: Seq[Resolver],
unmanaged: Seq[File])
final case class Dependencies(
managed: Seq[ModuleID],
resolvers: Seq[Resolver],
unmanaged: Seq[File]
)
def parseDependencies(args: Seq[String], log: Logger): Dependencies =
(Dependencies(Nil, Nil, Nil) /: args)(parseArgument(log))
def parseArgument(log: Logger)(acc: Dependencies, arg: String): Dependencies =

View File

@ -17,19 +17,25 @@ import sbt.librarymanagement.Configuration
object KeyIndex {
def empty: ExtendableKeyIndex = new KeyIndex0(emptyBuildIndex)
def apply(known: Iterable[ScopedKey[_]],
projects: Map[URI, Set[String]],
configurations: Map[String, Seq[Configuration]]): ExtendableKeyIndex =
def apply(
known: Iterable[ScopedKey[_]],
projects: Map[URI, Set[String]],
configurations: Map[String, Seq[Configuration]]
): ExtendableKeyIndex =
(base(projects, configurations) /: known) { _ add _ }
def aggregate(known: Iterable[ScopedKey[_]],
extra: BuildUtil[_],
projects: Map[URI, Set[String]],
configurations: Map[String, Seq[Configuration]]): ExtendableKeyIndex =
def aggregate(
known: Iterable[ScopedKey[_]],
extra: BuildUtil[_],
projects: Map[URI, Set[String]],
configurations: Map[String, Seq[Configuration]]
): ExtendableKeyIndex =
(base(projects, configurations) /: known) { (index, key) =>
index.addAggregated(key, extra)
}
private[this] def base(projects: Map[URI, Set[String]],
configurations: Map[String, Seq[Configuration]]): ExtendableKeyIndex = {
private[this] def base(
projects: Map[URI, Set[String]],
configurations: Map[String, Seq[Configuration]]
): ExtendableKeyIndex = {
val data = for {
(uri, ids) <- projects
} yield {
@ -78,23 +84,29 @@ trait KeyIndex {
// TODO, optimize
def isEmpty(proj: Option[ResolvedReference], conf: Option[String]): Boolean =
keys(proj, conf).isEmpty
def isEmpty(proj: Option[ResolvedReference],
conf: Option[String],
task: Option[AttributeKey[_]]): Boolean = keys(proj, conf, task).isEmpty
def isEmpty(
proj: Option[ResolvedReference],
conf: Option[String],
task: Option[AttributeKey[_]]
): Boolean = keys(proj, conf, task).isEmpty
def buildURIs: Set[URI]
def projects(uri: URI): Set[String]
def exists(project: Option[ResolvedReference]): Boolean
def configs(proj: Option[ResolvedReference]): Set[String]
def tasks(proj: Option[ResolvedReference], conf: Option[String]): Set[AttributeKey[_]]
def tasks(proj: Option[ResolvedReference],
conf: Option[String],
key: String): Set[AttributeKey[_]]
def tasks(
proj: Option[ResolvedReference],
conf: Option[String],
key: String
): Set[AttributeKey[_]]
def keys(proj: Option[ResolvedReference]): Set[String]
def keys(proj: Option[ResolvedReference], conf: Option[String]): Set[String]
def keys(proj: Option[ResolvedReference],
conf: Option[String],
task: Option[AttributeKey[_]]): Set[String]
def keys(
proj: Option[ResolvedReference],
conf: Option[String],
task: Option[AttributeKey[_]]
): Set[String]
private[sbt] def configIdents(project: Option[ResolvedReference]): Set[String]
private[sbt] def fromConfigIdent(proj: Option[ResolvedReference])(configIdent: String): String
}
@ -116,11 +128,15 @@ private[sbt] final class AKeyIndex(val data: Relation[Option[AttributeKey[_]], S
* data contains the mapping between a configuration and keys.
* identData contains the mapping between a configuration and its identifier.
*/
private[sbt] final class ConfigIndex(val data: Map[Option[String], AKeyIndex],
val identData: Map[String, String]) {
def add(config: Option[String],
task: Option[AttributeKey[_]],
key: AttributeKey[_]): ConfigIndex = {
private[sbt] final class ConfigIndex(
val data: Map[Option[String], AKeyIndex],
val identData: Map[String, String]
) {
def add(
config: Option[String],
task: Option[AttributeKey[_]],
key: AttributeKey[_]
): ConfigIndex = {
new ConfigIndex(data updated (config, keyIndex(config).add(task, key)), this.identData)
}
@ -141,20 +157,24 @@ private[sbt] final class ConfigIndex(val data: Map[Option[String], AKeyIndex],
configIdentsInverse.getOrElse(ident, Scope.unguessConfigIdent(ident))
}
private[sbt] final class ProjectIndex(val data: Map[Option[String], ConfigIndex]) {
def add(id: Option[String],
config: Option[String],
task: Option[AttributeKey[_]],
key: AttributeKey[_]): ProjectIndex =
def add(
id: Option[String],
config: Option[String],
task: Option[AttributeKey[_]],
key: AttributeKey[_]
): ProjectIndex =
new ProjectIndex(data updated (id, confIndex(id).add(config, task, key)))
def confIndex(id: Option[String]): ConfigIndex = getOr(data, id, emptyConfigIndex)
def projects: Set[String] = keySet(data)
}
private[sbt] final class BuildIndex(val data: Map[Option[URI], ProjectIndex]) {
def add(build: Option[URI],
project: Option[String],
config: Option[String],
task: Option[AttributeKey[_]],
key: AttributeKey[_]): BuildIndex =
def add(
build: Option[URI],
project: Option[String],
config: Option[String],
task: Option[AttributeKey[_]],
key: AttributeKey[_]
): BuildIndex =
new BuildIndex(data updated (build, projectIndex(build).add(project, config, task, key)))
def projectIndex(build: Option[URI]): ProjectIndex = getOr(data, build, emptyProjectIndex)
def builds: Set[URI] = keySet(data)
@ -176,18 +196,22 @@ private[sbt] final class KeyIndex0(val data: BuildIndex) extends ExtendableKeyIn
def tasks(proj: Option[ResolvedReference], conf: Option[String]): Set[AttributeKey[_]] =
keyIndex(proj, conf).tasks
def tasks(proj: Option[ResolvedReference],
conf: Option[String],
key: String): Set[AttributeKey[_]] = keyIndex(proj, conf).tasks(key)
def tasks(
proj: Option[ResolvedReference],
conf: Option[String],
key: String
): Set[AttributeKey[_]] = keyIndex(proj, conf).tasks(key)
def keys(proj: Option[ResolvedReference]): Set[String] =
(Set.empty[String] /: optConfigs(proj)) { (s, c) =>
s ++ keys(proj, c)
}
def keys(proj: Option[ResolvedReference], conf: Option[String]): Set[String] =
keyIndex(proj, conf).allKeys
def keys(proj: Option[ResolvedReference],
conf: Option[String],
task: Option[AttributeKey[_]]): Set[String] = keyIndex(proj, conf).keys(task)
def keys(
proj: Option[ResolvedReference],
conf: Option[String],
task: Option[AttributeKey[_]]
): Set[String] = keyIndex(proj, conf).keys(task)
def keyIndex(proj: Option[ResolvedReference], conf: Option[String]): AKeyIndex =
confIndex(proj).keyIndex(conf)
@ -217,10 +241,12 @@ private[sbt] final class KeyIndex0(val data: BuildIndex) extends ExtendableKeyIn
val (build, project) = parts(scoped.scope.project.toOption)
add1(build, project, scoped.scope.config, scoped.scope.task, scoped.key)
}
private[this] def add1(uri: Option[URI],
id: Option[String],
config: ScopeAxis[ConfigKey],
task: ScopeAxis[AttributeKey[_]],
key: AttributeKey[_]): ExtendableKeyIndex =
private[this] def add1(
uri: Option[URI],
id: Option[String],
config: ScopeAxis[ConfigKey],
task: ScopeAxis[AttributeKey[_]],
key: AttributeKey[_]
): ExtendableKeyIndex =
new KeyIndex0(data.add(uri, id, config.toOption.map(_.name), task.toOption, key))
}

View File

@ -309,8 +309,9 @@ private[sbt] object Load {
case _ => None
}
)
ss.map(s =>
s mapConstant setResolved(s.key) mapReferenced mapSpecial(s.key) mapInit setDefining)
ss.map(
s => s mapConstant setResolved(s.key) mapReferenced mapSpecial(s.key) mapInit setDefining
)
}
def setDefinitionKey[T](tk: Task[T], key: ScopedKey[_]): Task[T] =
@ -559,8 +560,10 @@ private[sbt] object Load {
def checkProjectBase(buildBase: File, projectBase: File): Unit = {
checkDirectory(projectBase)
assert(buildBase == projectBase || IO.relativize(buildBase, projectBase).isDefined,
s"Directory $projectBase is not contained in build root $buildBase")
assert(
buildBase == projectBase || IO.relativize(buildBase, projectBase).isDefined,
s"Directory $projectBase is not contained in build root $buildBase"
)
}
def checkBuildBase(base: File) = checkDirectory(base)
@ -581,8 +584,10 @@ private[sbt] object Load {
}
}
def checkAll(referenced: Map[URI, List[ProjectReference]],
builds: Map[URI, PartBuildUnit]): Unit = {
def checkAll(
referenced: Map[URI, List[ProjectReference]],
builds: Map[URI, PartBuildUnit]
): Unit = {
val rootProject = getRootProject(builds)
for ((uri, refs) <- referenced; ref <- refs) {
val ProjectRef(refURI, refID) = Scope.resolveProjectRef(uri, rootProject, ref)
@ -718,12 +723,15 @@ private[sbt] object Load {
// here on, so the autogenerated build aggregated can be removed from this code. ( I think)
// We may actually want to move it back here and have different flags in loadTransitive...
val hasRoot = loadedProjectsRaw.projects.exists(_.base == normBase) || defsScala.exists(
_.rootProject.isDefined)
_.rootProject.isDefined
)
val (loadedProjects, defaultBuildIfNone, keepClassFiles) =
if (hasRoot)
(loadedProjectsRaw.projects,
BuildDef.defaultEmpty,
loadedProjectsRaw.generatedConfigClassFiles)
(
loadedProjectsRaw.projects,
BuildDef.defaultEmpty,
loadedProjectsRaw.generatedConfigClassFiles
)
else {
val existingIDs = loadedProjectsRaw.projects.map(_.id)
val refs = existingIDs.map(id => ProjectRef(uri, id))
@ -732,9 +740,11 @@ private[sbt] object Load {
val defaultProjects = timed("Load.loadUnit: defaultProjects", log) {
loadProjects(projectsFromBuild(b, normBase), false)
}
(defaultProjects.projects ++ loadedProjectsRaw.projects,
b,
defaultProjects.generatedConfigClassFiles ++ loadedProjectsRaw.generatedConfigClassFiles)
(
defaultProjects.projects ++ loadedProjectsRaw.projects,
b,
defaultProjects.generatedConfigClassFiles ++ loadedProjectsRaw.generatedConfigClassFiles
)
}
// Now we clean stale class files.
// TODO - this may cause issues with multiple sbt clients, but that should be deprecated pending sbt-server anyway
@ -907,7 +917,8 @@ private[sbt] object Load {
discover(AddSettings.defaultSbtFiles, buildBase) match {
case DiscoveredProjects(Some(root), discovered, files, generated) =>
log.debug(
s"[Loading] Found root project ${root.id} w/ remaining ${discovered.map(_.id).mkString(",")}")
s"[Loading] Found root project ${root.id} w/ remaining ${discovered.map(_.id).mkString(",")}"
)
val (finalRoot, projectLevelExtra) =
timed(s"Load.loadTransitive: finalizeProject($root)", log) {
finalizeProject(root, files, true)
@ -957,18 +968,22 @@ private[sbt] object Load {
}
val result = root +: (acc ++ otherProjects.projects)
log.debug(
s"[Loading] Done in ${buildBase}, returning: ${result.map(_.id).mkString("(", ", ", ")")}")
s"[Loading] Done in ${buildBase}, returning: ${result.map(_.id).mkString("(", ", ", ")")}"
)
LoadedProjects(result, generated ++ otherGenerated ++ generatedConfigClassFiles)
}
case Nil =>
log.debug(
s"[Loading] Done in ${buildBase}, returning: ${acc.map(_.id).mkString("(", ", ", ")")}")
s"[Loading] Done in ${buildBase}, returning: ${acc.map(_.id).mkString("(", ", ", ")")}"
)
LoadedProjects(acc, generatedConfigClassFiles)
}
}
private[this] def translateAutoPluginException(e: AutoPluginException,
project: Project): AutoPluginException =
private[this] def translateAutoPluginException(
e: AutoPluginException,
project: Project
): AutoPluginException =
e.withPrefix(s"Error determining plugins for project '${project.id}' in ${project.base}:\n")
/**
@ -1157,7 +1172,8 @@ private[sbt] object Load {
injectSettings = config.injectSettings.copy(
global = autoPluginSettings ++ config.injectSettings.global,
project = config.pluginManagement.inject ++ config.injectSettings.project
))
)
)
def activateGlobalPlugin(config: LoadBuildConfiguration): LoadBuildConfiguration =
config.globalPlugin match {
@ -1266,8 +1282,9 @@ private[sbt] object Load {
def initialSession(structure: BuildStructure, rootEval: () => Eval, s: State): SessionSettings = {
val session = s get Keys.sessionSettings
val currentProject = session map (_.currentProject) getOrElse Map.empty
val currentBuild = session map (_.currentBuild) filter (uri =>
structure.units.keys exists (uri ==)) getOrElse structure.root
val currentBuild = session map (_.currentBuild) filter (
uri => structure.units.keys exists (uri ==)
) getOrElse structure.root
new SessionSettings(
currentBuild,
projectMap(structure, currentProject),

View File

@ -111,11 +111,13 @@ object LogManager {
}
// to change from global being the default to overriding, switch the order of state.get and data.get
def getOr[T](key: AttributeKey[T],
data: Settings[Scope],
scope: Scope,
state: State,
default: T): T =
def getOr[T](
key: AttributeKey[T],
data: Settings[Scope],
scope: Scope,
state: State,
default: T
): T =
data.get(scope, key) orElse state.get(key) getOrElse default
// This is the main function that is used to generate the logger for tasks.
@ -205,7 +207,8 @@ object LogManager {
val consoleOpt = consoleLocally(state, console)
LogExchange.bindLoggerAppenders(
loggerName,
(consoleOpt.toList map { _ -> screenLevel }) ::: (relay -> backingLevel) :: Nil)
(consoleOpt.toList map { _ -> screenLevel }) ::: (relay -> backingLevel) :: Nil
)
log
}

View File

@ -94,10 +94,12 @@ object PluginDiscovery {
* Discovers the names of top-level modules listed in resources named `resourceName` as per [[binaryModuleNames]] or
* available as analyzed source and extending from any of `subclasses` as per [[sourceModuleNames]].
*/
def binarySourceModuleNames(classpath: Seq[Attributed[File]],
loader: ClassLoader,
resourceName: String,
subclasses: String*): Seq[String] =
def binarySourceModuleNames(
classpath: Seq[Attributed[File]],
loader: ClassLoader,
resourceName: String,
subclasses: String*
): Seq[String] =
(
binaryModuleNames(data(classpath), loader, resourceName) ++
(analyzed(classpath) flatMap (a => sourceModuleNames(a, subclasses: _*)))
@ -120,9 +122,11 @@ object PluginDiscovery {
* `classpath` and `loader` are both required to ensure that `loader`
* doesn't bring in any resources outside of the intended `classpath`, such as from parent loaders.
*/
def binaryModuleNames(classpath: Seq[File],
loader: ClassLoader,
resourceName: String): Seq[String] = {
def binaryModuleNames(
classpath: Seq[File],
loader: ClassLoader,
resourceName: String
): Seq[String] = {
import collection.JavaConverters._
loader.getResources(resourceName).asScala.toSeq.filter(onClasspath(classpath)) flatMap { u =>
IO.readLinesURL(u).map(_.trim).filter(!_.isEmpty)
@ -136,7 +140,8 @@ object PluginDiscovery {
private[sbt] def binarySourceModules[T](
data: PluginData,
loader: ClassLoader,
resourceName: String)(implicit classTag: reflect.ClassTag[T]): DetectedModules[T] = {
resourceName: String
)(implicit classTag: reflect.ClassTag[T]): DetectedModules[T] = {
val classpath = data.classpath
val namesAndValues =
if (classpath.isEmpty) Nil
@ -148,9 +153,11 @@ object PluginDiscovery {
new DetectedModules(namesAndValues)
}
private[this] def loadModules[T: reflect.ClassTag](data: PluginData,
names: Seq[String],
loader: ClassLoader): Seq[(String, T)] =
private[this] def loadModules[T: reflect.ClassTag](
data: PluginData,
names: Seq[String],
loader: ClassLoader
): Seq[(String, T)] =
try ModuleUtilities.getCheckedObjects[T](names, loader)
catch {
case e: ExceptionInInitializerError =>
@ -170,7 +177,8 @@ object PluginDiscovery {
if (evictedStrings.isEmpty) ""
else
"\nNote that conflicts were resolved for some dependencies:\n\t" + evictedStrings.mkString(
"\n\t")
"\n\t"
)
throw new IncompatiblePluginsException(msgBase + msgExtra, t)
}
}

View File

@ -15,17 +15,21 @@ import sbt.librarymanagement.ModuleID
import java.net.{ URI, URL, URLClassLoader }
final case class PluginManagement(overrides: Set[ModuleID],
applyOverrides: Set[ModuleID],
loader: PluginClassLoader,
initialLoader: ClassLoader,
context: Context) {
final case class PluginManagement(
overrides: Set[ModuleID],
applyOverrides: Set[ModuleID],
loader: PluginClassLoader,
initialLoader: ClassLoader,
context: Context
) {
def shift: PluginManagement =
PluginManagement(Set.empty,
overrides,
new PluginClassLoader(initialLoader),
initialLoader,
context)
PluginManagement(
Set.empty,
overrides,
new PluginClassLoader(initialLoader),
initialLoader,
context
)
def addOverrides(os: Set[ModuleID]): PluginManagement =
copy(overrides = overrides ++ os)
@ -49,11 +53,13 @@ object PluginManagement {
val emptyContext: Context = Context(false, 0)
def apply(initialLoader: ClassLoader): PluginManagement =
PluginManagement(Set.empty,
Set.empty,
new PluginClassLoader(initialLoader),
initialLoader,
emptyContext)
PluginManagement(
Set.empty,
Set.empty,
new PluginClassLoader(initialLoader),
initialLoader,
emptyContext
)
def extractOverrides(classpath: Classpath): Set[ModuleID] =
classpath flatMap { _.metadata get Keys.moduleID.key map keepOverrideInfo } toSet;

View File

@ -48,8 +48,10 @@ private[sbt] class PluginsDebug(
activePrefix + debugDeactivated(notFoundKey, deactivated)
}
private[this] def debugDeactivated(notFoundKey: String,
deactivated: Seq[EnableDeactivated]): String = {
private[this] def debugDeactivated(
notFoundKey: String,
deactivated: Seq[EnableDeactivated]
): String = {
val (impossible, possible) = Util.separate(deactivated) {
case pi: PluginImpossible => Left(pi)
case pr: PluginRequirements => Right(pr)
@ -154,11 +156,13 @@ private[sbt] object PluginsDebug {
val perBuild: Map[URI, Set[AutoPlugin]] =
structure.units.mapValues(unit => availableAutoPlugins(unit).toSet)
val pluginsThisBuild = perBuild.getOrElse(currentRef.build, Set.empty).toList
lazy val context = Context(currentProject.plugins,
currentProject.autoPlugins,
Plugins.deducer(pluginsThisBuild),
pluginsThisBuild,
s.log)
lazy val context = Context(
currentProject.plugins,
currentProject.autoPlugins,
Plugins.deducer(pluginsThisBuild),
pluginsThisBuild,
s.log
)
lazy val debug = PluginsDebug(context.available)
if (!pluginsThisBuild.contains(plugin)) {
val availableInBuilds: List[URI] = perBuild.toList.filter(_._2(plugin)).map(_._1)
@ -222,10 +226,11 @@ private[sbt] object PluginsDebug {
sealed abstract class EnableDeactivated extends PluginEnable
/** Describes a [[plugin]] that cannot be activated in a [[context]] due to [[contradictions]] in requirements. */
final case class PluginImpossible(plugin: AutoPlugin,
context: Context,
contradictions: Set[AutoPlugin])
extends EnableDeactivated
final case class PluginImpossible(
plugin: AutoPlugin,
context: Context,
contradictions: Set[AutoPlugin]
) extends EnableDeactivated
/**
* Describes the requirements for activating [[plugin]] in [[context]].
@ -256,9 +261,11 @@ private[sbt] object PluginsDebug {
* affecting the other plugin. If empty, a direct exclusion is required.
* @param newlySelected If false, this plugin was selected in the original context.
*/
final case class DeactivatePlugin(plugin: AutoPlugin,
removeOneOf: Set[AutoPlugin],
newlySelected: Boolean)
final case class DeactivatePlugin(
plugin: AutoPlugin,
removeOneOf: Set[AutoPlugin],
newlySelected: Boolean
)
/** Determines how to enable [[AutoPlugin]] in [[Context]]. */
def pluginEnable(context: Context, plugin: AutoPlugin): PluginEnable =
@ -344,13 +351,15 @@ private[sbt] object PluginsDebug {
DeactivatePlugin(d, removeToDeactivate, newlySelected)
}
PluginRequirements(plugin,
context,
blockingExcludes,
addToExistingPlugins,
extraPlugins,
willRemove,
deactivate)
PluginRequirements(
plugin,
context,
blockingExcludes,
addToExistingPlugins,
extraPlugins,
willRemove,
deactivate
)
}
}
@ -376,13 +385,15 @@ private[sbt] object PluginsDebug {
/** String representation of [[PluginEnable]], intended for end users. */
def explainPluginEnable(ps: PluginEnable): String =
ps match {
case PluginRequirements(plugin,
_,
blockingExcludes,
enablingPlugins,
extraEnabledPlugins,
toBeRemoved,
deactivate) =>
case PluginRequirements(
plugin,
_,
blockingExcludes,
enablingPlugins,
extraEnabledPlugins,
toBeRemoved,
deactivate
) =>
def indent(str: String) = if (str.isEmpty) "" else s"\t$str"
def note(str: String) = if (str.isEmpty) "" else s"Note: $str"
val parts =
@ -490,8 +501,9 @@ private[sbt] object PluginsDebug {
s"$s1 $s2 $s3"
}
private[this] def pluginImpossibleN(plugin: AutoPlugin)(
contradictions: List[AutoPlugin]): String = {
private[this] def pluginImpossibleN(
plugin: AutoPlugin
)(contradictions: List[AutoPlugin]): String = {
val s1 = s"There is no way to enable plugin ${plugin.label}."
val s2 = s"It (or its dependencies) requires these plugins to be both present and absent:"
val s3 = s"Please report the problem to the plugin's author."

View File

@ -49,7 +49,8 @@ final class ProjectNavigation(s: State) {
setProject(uri, to)
else
fail(
s"Invalid project name '$to' in build $uri (type 'projects' to list available projects).")
s"Invalid project name '$to' in build $uri (type 'projects' to list available projects)."
)
def changeBuild(newBuild: URI): State =
if (structure.units contains newBuild)

View File

@ -11,10 +11,12 @@ package internal
import sbt.internal.util.AttributeKey
object Resolve {
def apply(index: BuildUtil[_],
current: ScopeAxis[Reference],
key: AttributeKey[_],
mask: ScopeMask): Scope => Scope = {
def apply(
index: BuildUtil[_],
current: ScopeAxis[Reference],
key: AttributeKey[_],
mask: ScopeMask
): Scope => Scope = {
val rs =
resolveProject(current, mask) _ ::
resolveExtra(mask) _ ::
@ -39,7 +41,8 @@ object Resolve {
else scope.copy(extra = Zero)
def resolveConfig[P](index: BuildUtil[P], key: AttributeKey[_], mask: ScopeMask)(
scope: Scope): Scope =
scope: Scope
): Scope =
if (mask.config)
scope
else {

View File

@ -25,7 +25,8 @@ object Script {
lazy val command =
Command.command(Name) { state =>
val scriptArg = state.remainingCommands.headOption map { _.commandLine } getOrElse sys.error(
"No script file specified")
"No script file specified"
)
val scriptFile = new File(scriptArg).getAbsoluteFile
val hash = Hash.halve(Hash.toHex(Hash(scriptFile.getAbsolutePath)))
val base = new File(CommandUtil.bootDirectory(state), hash)
@ -51,14 +52,18 @@ object Script {
}
val scriptAsSource = sources in Compile := script :: Nil
val asScript = scalacOptions ++= Seq("-Xscript", script.getName.stripSuffix(".scala"))
val scriptSettings = Seq(asScript,
scriptAsSource,
logLevel in Global := Level.Warn,
showSuccess in Global := false)
val append = Load.transformSettings(Load.projectScope(currentRef),
currentRef.build,
rootProject,
scriptSettings ++ embeddedSettings)
val scriptSettings = Seq(
asScript,
scriptAsSource,
logLevel in Global := Level.Warn,
showSuccess in Global := false
)
val append = Load.transformSettings(
Load.projectScope(currentRef),
currentRef.build,
rootProject,
scriptSettings ++ embeddedSettings
)
val newStructure = Load.reapply(session.original ++ append, structure)
val arguments = state.remainingCommands.drop(1).map(e => s""""${e.commandLine}"""")

View File

@ -40,8 +40,10 @@ final case class SessionSettings(
currentEval: () => Eval
) {
assert(currentProject contains currentBuild,
s"Current build ($currentBuild) not associated with a current project.")
assert(
currentProject contains currentBuild,
s"Current build ($currentBuild) not associated with a current project."
)
/**
* Modifiy the current state.
@ -52,9 +54,11 @@ final case class SessionSettings(
* @return A new SessionSettings object
*/
def setCurrent(build: URI, project: String, eval: () => Eval): SessionSettings =
copy(currentBuild = build,
currentProject = currentProject.updated(build, project),
currentEval = eval)
copy(
currentBuild = build,
currentProject = currentProject.updated(build, project),
currentEval = eval
)
/**
* @return The current ProjectRef with which we scope settings.
@ -147,7 +151,8 @@ object SessionSettings {
val oldSettings = (oldState get Keys.sessionSettings).toList.flatMap(_.append).flatMap(_._2)
if (newSession.append.isEmpty && oldSettings.nonEmpty)
oldState.log.warn(
"Discarding " + pluralize(oldSettings.size, " session setting") + ". Use 'session save' to persist session settings.")
"Discarding " + pluralize(oldSettings.size, " session setting") + ". Use 'session save' to persist session settings."
)
}
def removeRanges[T](in: Seq[T], ranges: Seq[(Int, Int)]): Seq[T] = {
@ -197,10 +202,12 @@ object SessionSettings {
reapply(newSession.copy(original = newSession.mergeSettings, append = Map.empty), s)
}
def writeSettings(pref: ProjectRef,
settings: List[SessionSetting],
original: Seq[Setting[_]],
structure: BuildStructure): (Seq[SessionSetting], Seq[Setting[_]]) = {
def writeSettings(
pref: ProjectRef,
settings: List[SessionSetting],
original: Seq[Setting[_]],
structure: BuildStructure
): (Seq[SessionSetting], Seq[Setting[_]]) = {
val project =
Project.getProject(pref, structure).getOrElse(sys.error("Invalid project reference " + pref))
val writeTo: File = BuildPaths
@ -224,9 +231,10 @@ object SessionSettings {
val RangePosition(_, r @ LineRange(start, end)) = s.pos
settings find (_._1.key == s.key) match {
case Some(ss @ (ns, newLines)) if !ns.init.dependencies.contains(ns.key) =>
val shifted = ns withPos RangePosition(path,
LineRange(start - offs,
start - offs + newLines.size))
val shifted = ns withPos RangePosition(
path,
LineRange(start - offs, start - offs + newLines.size)
)
(offs + end - start - newLines.size, shifted :: olds, ss +: repl)
case _ =>
val shifted = s withPos RangePosition(path, r shift -offs)
@ -324,9 +332,11 @@ save, save-all
lazy val parser =
token(Space) ~>
(token("list-all" ^^^ new Print(true)) | token("list" ^^^ new Print(false)) | token(
"clear" ^^^ new Clear(false)) |
"clear" ^^^ new Clear(false)
) |
token("save-all" ^^^ new Save(true)) | token("save" ^^^ new Save(false)) | token(
"clear-all" ^^^ new Clear(true)) |
"clear-all" ^^^ new Clear(true)
) |
remove)
lazy val remove = token("remove") ~> token(Space) ~> natSelect.map(ranges => new Remove(ranges))

View File

@ -24,9 +24,11 @@ import DefaultParsers._
* The verbose summary will typically use more vertical space and show full details,
* while the quiet summary will be a couple of lines and truncate information.
*/
private[sbt] class SetResult(val session: SessionSettings,
val verboseSummary: String,
val quietSummary: String)
private[sbt] class SetResult(
val session: SessionSettings,
val verboseSummary: String,
val quietSummary: String
)
/** Defines methods for implementing the `set` command.*/
private[sbt] object SettingCompletions {
@ -41,9 +43,12 @@ private[sbt] object SettingCompletions {
val r = relation(extracted.structure, true)
val allDefs = Def
.flattenLocals(
Def.compiled(extracted.structure.settings, true)(structure.delegates,
structure.scopeLocal,
implicitly[Show[ScopedKey[_]]]))
Def.compiled(extracted.structure.settings, true)(
structure.delegates,
structure.scopeLocal,
implicitly[Show[ScopedKey[_]]]
)
)
.keys
val projectScope = Load.projectScope(currentRef)
def resolve(s: Setting[_]): Seq[Setting[_]] =
@ -72,9 +77,11 @@ private[sbt] object SettingCompletions {
val append =
Load.transformSettings(Load.projectScope(currentRef), currentRef.build, rootProject, settings)
val newSession = session.appendSettings(append map (a => (a, arg.split('\n').toList)))
val r = relation(newSession.mergeSettings, true)(structure.delegates,
structure.scopeLocal,
implicitly)
val r = relation(newSession.mergeSettings, true)(
structure.delegates,
structure.scopeLocal,
implicitly
)
setResult(newSession, r, append)
}
@ -149,9 +156,11 @@ private[sbt] object SettingCompletions {
}
/** Parser for a Scope+AttributeKey (ScopedKey). */
def scopedKeyParser(keyMap: Map[String, AttributeKey[_]],
settings: Settings[Scope],
context: ResolvedProject): Parser[ScopedKey[_]] = {
def scopedKeyParser(
keyMap: Map[String, AttributeKey[_]],
settings: Settings[Scope],
context: ResolvedProject
): Parser[ScopedKey[_]] = {
val cutoff = KeyRanks.MainCutoff
val keyCompletions = fixedCompletions { (seen, level) =>
completeKey(seen, keyMap, level, cutoff, 10).toSet
@ -186,9 +195,11 @@ private[sbt] object SettingCompletions {
* The completions are restricted to be more useful. Currently, this parser will suggest
* only known axis values for configurations and tasks and only in that order.
*/
def scopeParser(key: AttributeKey[_],
settings: Settings[Scope],
context: ResolvedProject): Parser[Scope] = {
def scopeParser(
key: AttributeKey[_],
settings: Settings[Scope],
context: ResolvedProject
): Parser[Scope] = {
val data = settings.data
val allScopes = data.keys.toSeq
val definedScopes = data.toSeq flatMap {
@ -277,11 +288,13 @@ private[sbt] object SettingCompletions {
completeDescribed(seen, true, applicable)(assignDescription)
}
def completeKey(seen: String,
keys: Map[String, AttributeKey[_]],
level: Int,
prominentCutoff: Int,
detailLimit: Int): Seq[Completion] =
def completeKey(
seen: String,
keys: Map[String, AttributeKey[_]],
level: Int,
prominentCutoff: Int,
detailLimit: Int
): Seq[Completion] =
completeSelectDescribed(seen, level, keys, detailLimit)(_.description) {
case (_, v) => v.rank <= prominentCutoff
}
@ -290,13 +303,15 @@ private[sbt] object SettingCompletions {
seen: String,
level: Int,
definedChoices: Set[String],
allChoices: Map[String, T])(description: T => Option[String]): Seq[Completion] =
allChoices: Map[String, T]
)(description: T => Option[String]): Seq[Completion] =
completeSelectDescribed(seen, level, allChoices, 10)(description) {
case (k, _) => definedChoices(k)
}
def completeSelectDescribed[T](seen: String, level: Int, all: Map[String, T], detailLimit: Int)(
description: T => Option[String])(prominent: (String, T) => Boolean): Seq[Completion] = {
description: T => Option[String]
)(prominent: (String, T) => Boolean): Seq[Completion] = {
val applicable = all.toSeq.filter { case (k, _) => k startsWith seen }
val prominentOnly = applicable filter { case (k, v) => prominent(k, v) }
@ -306,7 +321,8 @@ private[sbt] object SettingCompletions {
completeDescribed(seen, showDescriptions, showKeys)(s => description(s).toList.mkString)
}
def completeDescribed[T](seen: String, showDescriptions: Boolean, in: Seq[(String, T)])(
description: T => String): Seq[Completion] = {
description: T => String
): Seq[Completion] = {
def appendString(id: String): String = id.stripPrefix(seen) + " "
if (in.isEmpty)
Nil
@ -337,7 +353,8 @@ private[sbt] object SettingCompletions {
def keyType[S](key: AttributeKey[_])(
onSetting: Manifest[_] => S,
onTask: Manifest[_] => S,
onInput: Manifest[_] => S)(implicit tm: Manifest[Task[_]], im: Manifest[InputTask[_]]): S = {
onInput: Manifest[_] => S
)(implicit tm: Manifest[Task[_]], im: Manifest[InputTask[_]]): S = {
def argTpe = key.manifest.typeArguments.head
val TaskClass = tm.runtimeClass
val InputTaskClass = im.runtimeClass

View File

@ -19,9 +19,11 @@ import sbt.io.IO
object SettingGraph {
def apply(structure: BuildStructure, basedir: File, scoped: ScopedKey[_], generation: Int)(
implicit display: Show[ScopedKey[_]]): SettingGraph = {
implicit display: Show[ScopedKey[_]]
): SettingGraph = {
val cMap = flattenLocals(
compiled(structure.settings, false)(structure.delegates, structure.scopeLocal, display))
compiled(structure.settings, false)(structure.delegates, structure.scopeLocal, display)
)
def loop(scoped: ScopedKey[_], generation: Int): SettingGraph = {
val key = scoped.key
val scope = scoped.scope
@ -34,14 +36,16 @@ object SettingGraph {
// val related = cMap.keys.filter(k => k.key == key && k.scope != scope)
// val reverse = reverseDependencies(cMap, scoped)
SettingGraph(display.show(scoped),
definedIn,
Project.scopedKeyData(structure, scope, key),
key.description,
basedir,
depends map { (x: ScopedKey[_]) =>
loop(x, generation + 1)
})
SettingGraph(
display.show(scoped),
definedIn,
Project.scopedKeyData(structure, scope, key),
key.description,
basedir,
depends map { (x: ScopedKey[_]) =>
loop(x, generation + 1)
}
)
}
loop(scoped, generation)
}

View File

@ -21,60 +21,81 @@ trait TaskSequential {
last: Initialize[Task[B]]
): Initialize[Task[B]] =
sequential(List(unitTask(task0)), last)
def sequential[A0, A1, B](task0: Initialize[Task[A0]],
task1: Initialize[Task[A1]],
last: Initialize[Task[B]]): Initialize[Task[B]] =
def sequential[A0, A1, B](
task0: Initialize[Task[A0]],
task1: Initialize[Task[A1]],
last: Initialize[Task[B]]
): Initialize[Task[B]] =
sequential(List(unitTask(task0), unitTask(task1)), last)
def sequential[A0, A1, A2, B](task0: Initialize[Task[A0]],
task1: Initialize[Task[A1]],
task2: Initialize[Task[A2]],
last: Initialize[Task[B]]): Initialize[Task[B]] =
def sequential[A0, A1, A2, B](
task0: Initialize[Task[A0]],
task1: Initialize[Task[A1]],
task2: Initialize[Task[A2]],
last: Initialize[Task[B]]
): Initialize[Task[B]] =
sequential(List(unitTask(task0), unitTask(task1), unitTask(task2)), last)
def sequential[A0, A1, A2, A3, B](task0: Initialize[Task[A0]],
task1: Initialize[Task[A1]],
task2: Initialize[Task[A2]],
task3: Initialize[Task[A3]],
last: Initialize[Task[B]]): Initialize[Task[B]] =
def sequential[A0, A1, A2, A3, B](
task0: Initialize[Task[A0]],
task1: Initialize[Task[A1]],
task2: Initialize[Task[A2]],
task3: Initialize[Task[A3]],
last: Initialize[Task[B]]
): Initialize[Task[B]] =
sequential(List(unitTask(task0), unitTask(task1), unitTask(task2), unitTask(task3)), last)
def sequential[A0, A1, A2, A3, A4, B](task0: Initialize[Task[A0]],
task1: Initialize[Task[A1]],
task2: Initialize[Task[A2]],
task3: Initialize[Task[A3]],
task4: Initialize[Task[A4]],
last: Initialize[Task[B]]): Initialize[Task[B]] =
def sequential[A0, A1, A2, A3, A4, B](
task0: Initialize[Task[A0]],
task1: Initialize[Task[A1]],
task2: Initialize[Task[A2]],
task3: Initialize[Task[A3]],
task4: Initialize[Task[A4]],
last: Initialize[Task[B]]
): Initialize[Task[B]] =
sequential(
List(unitTask(task0), unitTask(task1), unitTask(task2), unitTask(task3), unitTask(task4)),
last)
def sequential[A0, A1, A2, A3, A4, A5, B](task0: Initialize[Task[A0]],
task1: Initialize[Task[A1]],
task2: Initialize[Task[A2]],
task3: Initialize[Task[A3]],
task4: Initialize[Task[A4]],
task5: Initialize[Task[A5]],
last: Initialize[Task[B]]): Initialize[Task[B]] =
sequential(List(unitTask(task0),
unitTask(task1),
unitTask(task2),
unitTask(task3),
unitTask(task4),
unitTask(task5)),
last)
def sequential[A0, A1, A2, A3, A4, A5, A6, B](task0: Initialize[Task[A0]],
task1: Initialize[Task[A1]],
task2: Initialize[Task[A2]],
task3: Initialize[Task[A3]],
task4: Initialize[Task[A4]],
task5: Initialize[Task[A5]],
task6: Initialize[Task[A6]],
last: Initialize[Task[B]]): Initialize[Task[B]] =
sequential(List(unitTask(task0),
unitTask(task1),
unitTask(task2),
unitTask(task3),
unitTask(task4),
unitTask(task5),
unitTask(task6)),
last)
last
)
def sequential[A0, A1, A2, A3, A4, A5, B](
task0: Initialize[Task[A0]],
task1: Initialize[Task[A1]],
task2: Initialize[Task[A2]],
task3: Initialize[Task[A3]],
task4: Initialize[Task[A4]],
task5: Initialize[Task[A5]],
last: Initialize[Task[B]]
): Initialize[Task[B]] =
sequential(
List(
unitTask(task0),
unitTask(task1),
unitTask(task2),
unitTask(task3),
unitTask(task4),
unitTask(task5)
),
last
)
def sequential[A0, A1, A2, A3, A4, A5, A6, B](
task0: Initialize[Task[A0]],
task1: Initialize[Task[A1]],
task2: Initialize[Task[A2]],
task3: Initialize[Task[A3]],
task4: Initialize[Task[A4]],
task5: Initialize[Task[A5]],
task6: Initialize[Task[A6]],
last: Initialize[Task[B]]
): Initialize[Task[B]] =
sequential(
List(
unitTask(task0),
unitTask(task1),
unitTask(task2),
unitTask(task3),
unitTask(task4),
unitTask(task5),
unitTask(task6)
),
last
)
def sequential[A0, A1, A2, A3, A4, A5, A6, A7, B](
task0: Initialize[Task[A0]],
task1: Initialize[Task[A1]],
@ -84,16 +105,21 @@ trait TaskSequential {
task5: Initialize[Task[A5]],
task6: Initialize[Task[A6]],
task7: Initialize[Task[A7]],
last: Initialize[Task[B]]): Initialize[Task[B]] =
sequential(List(unitTask(task0),
unitTask(task1),
unitTask(task2),
unitTask(task3),
unitTask(task4),
unitTask(task5),
unitTask(task6),
unitTask(task7)),
last)
last: Initialize[Task[B]]
): Initialize[Task[B]] =
sequential(
List(
unitTask(task0),
unitTask(task1),
unitTask(task2),
unitTask(task3),
unitTask(task4),
unitTask(task5),
unitTask(task6),
unitTask(task7)
),
last
)
def sequential[A0, A1, A2, A3, A4, A5, A6, A7, A8, B](
task0: Initialize[Task[A0]],
task1: Initialize[Task[A1]],
@ -104,17 +130,20 @@ trait TaskSequential {
task6: Initialize[Task[A6]],
task7: Initialize[Task[A7]],
task8: Initialize[Task[A8]],
last: Initialize[Task[B]]): Initialize[Task[B]] =
last: Initialize[Task[B]]
): Initialize[Task[B]] =
sequential(
List(unitTask(task0),
unitTask(task1),
unitTask(task2),
unitTask(task3),
unitTask(task4),
unitTask(task5),
unitTask(task6),
unitTask(task7),
unitTask(task8)),
List(
unitTask(task0),
unitTask(task1),
unitTask(task2),
unitTask(task3),
unitTask(task4),
unitTask(task5),
unitTask(task6),
unitTask(task7),
unitTask(task8)
),
last
)
def sequential[A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, B](
@ -128,7 +157,8 @@ trait TaskSequential {
task7: Initialize[Task[A7]],
task8: Initialize[Task[A8]],
task9: Initialize[Task[A9]],
last: Initialize[Task[B]]): Initialize[Task[B]] =
last: Initialize[Task[B]]
): Initialize[Task[B]] =
sequential(
List(
unitTask(task0),
@ -156,7 +186,8 @@ trait TaskSequential {
task8: Initialize[Task[A8]],
task9: Initialize[Task[A9]],
task10: Initialize[Task[A10]],
last: Initialize[Task[B]]): Initialize[Task[B]] =
last: Initialize[Task[B]]
): Initialize[Task[B]] =
sequential(
List(
unitTask(task0),
@ -186,7 +217,8 @@ trait TaskSequential {
task9: Initialize[Task[A9]],
task10: Initialize[Task[A10]],
task11: Initialize[Task[A11]],
last: Initialize[Task[B]]): Initialize[Task[B]] =
last: Initialize[Task[B]]
): Initialize[Task[B]] =
sequential(
List(
unitTask(task0),
@ -218,7 +250,8 @@ trait TaskSequential {
task10: Initialize[Task[A10]],
task11: Initialize[Task[A11]],
task12: Initialize[Task[A12]],
last: Initialize[Task[B]]): Initialize[Task[B]] =
last: Initialize[Task[B]]
): Initialize[Task[B]] =
sequential(
List(
unitTask(task0),
@ -252,7 +285,8 @@ trait TaskSequential {
task11: Initialize[Task[A11]],
task12: Initialize[Task[A12]],
task13: Initialize[Task[A13]],
last: Initialize[Task[B]]): Initialize[Task[B]] =
last: Initialize[Task[B]]
): Initialize[Task[B]] =
sequential(
List(
unitTask(task0),
@ -288,7 +322,8 @@ trait TaskSequential {
task12: Initialize[Task[A12]],
task13: Initialize[Task[A13]],
task14: Initialize[Task[A14]],
last: Initialize[Task[B]]): Initialize[Task[B]] =
last: Initialize[Task[B]]
): Initialize[Task[B]] =
sequential(
List(
unitTask(task0),
@ -326,7 +361,8 @@ trait TaskSequential {
task13: Initialize[Task[A13]],
task14: Initialize[Task[A14]],
task15: Initialize[Task[A15]],
last: Initialize[Task[B]]): Initialize[Task[B]] =
last: Initialize[Task[B]]
): Initialize[Task[B]] =
sequential(
List(
unitTask(task0),
@ -366,7 +402,8 @@ trait TaskSequential {
task14: Initialize[Task[A14]],
task15: Initialize[Task[A15]],
task16: Initialize[Task[A16]],
last: Initialize[Task[B]]): Initialize[Task[B]] =
last: Initialize[Task[B]]
): Initialize[Task[B]] =
sequential(
List(
unitTask(task0),
@ -433,45 +470,49 @@ trait TaskSequential {
),
last
)
def sequential[A0,
A1,
A2,
A3,
A4,
A5,
A6,
A7,
A8,
A9,
A10,
A11,
A12,
A13,
A14,
A15,
A16,
A17,
A18,
B](task0: Initialize[Task[A0]],
task1: Initialize[Task[A1]],
task2: Initialize[Task[A2]],
task3: Initialize[Task[A3]],
task4: Initialize[Task[A4]],
task5: Initialize[Task[A5]],
task6: Initialize[Task[A6]],
task7: Initialize[Task[A7]],
task8: Initialize[Task[A8]],
task9: Initialize[Task[A9]],
task10: Initialize[Task[A10]],
task11: Initialize[Task[A11]],
task12: Initialize[Task[A12]],
task13: Initialize[Task[A13]],
task14: Initialize[Task[A14]],
task15: Initialize[Task[A15]],
task16: Initialize[Task[A16]],
task17: Initialize[Task[A17]],
task18: Initialize[Task[A18]],
last: Initialize[Task[B]]): Initialize[Task[B]] =
def sequential[
A0,
A1,
A2,
A3,
A4,
A5,
A6,
A7,
A8,
A9,
A10,
A11,
A12,
A13,
A14,
A15,
A16,
A17,
A18,
B
](
task0: Initialize[Task[A0]],
task1: Initialize[Task[A1]],
task2: Initialize[Task[A2]],
task3: Initialize[Task[A3]],
task4: Initialize[Task[A4]],
task5: Initialize[Task[A5]],
task6: Initialize[Task[A6]],
task7: Initialize[Task[A7]],
task8: Initialize[Task[A8]],
task9: Initialize[Task[A9]],
task10: Initialize[Task[A10]],
task11: Initialize[Task[A11]],
task12: Initialize[Task[A12]],
task13: Initialize[Task[A13]],
task14: Initialize[Task[A14]],
task15: Initialize[Task[A15]],
task16: Initialize[Task[A16]],
task17: Initialize[Task[A17]],
task18: Initialize[Task[A18]],
last: Initialize[Task[B]]
): Initialize[Task[B]] =
sequential(
List(
unitTask(task0),
@ -496,47 +537,51 @@ trait TaskSequential {
),
last
)
def sequential[A0,
A1,
A2,
A3,
A4,
A5,
A6,
A7,
A8,
A9,
A10,
A11,
A12,
A13,
A14,
A15,
A16,
A17,
A18,
A19,
B](task0: Initialize[Task[A0]],
task1: Initialize[Task[A1]],
task2: Initialize[Task[A2]],
task3: Initialize[Task[A3]],
task4: Initialize[Task[A4]],
task5: Initialize[Task[A5]],
task6: Initialize[Task[A6]],
task7: Initialize[Task[A7]],
task8: Initialize[Task[A8]],
task9: Initialize[Task[A9]],
task10: Initialize[Task[A10]],
task11: Initialize[Task[A11]],
task12: Initialize[Task[A12]],
task13: Initialize[Task[A13]],
task14: Initialize[Task[A14]],
task15: Initialize[Task[A15]],
task16: Initialize[Task[A16]],
task17: Initialize[Task[A17]],
task18: Initialize[Task[A18]],
task19: Initialize[Task[A19]],
last: Initialize[Task[B]]): Initialize[Task[B]] =
def sequential[
A0,
A1,
A2,
A3,
A4,
A5,
A6,
A7,
A8,
A9,
A10,
A11,
A12,
A13,
A14,
A15,
A16,
A17,
A18,
A19,
B
](
task0: Initialize[Task[A0]],
task1: Initialize[Task[A1]],
task2: Initialize[Task[A2]],
task3: Initialize[Task[A3]],
task4: Initialize[Task[A4]],
task5: Initialize[Task[A5]],
task6: Initialize[Task[A6]],
task7: Initialize[Task[A7]],
task8: Initialize[Task[A8]],
task9: Initialize[Task[A9]],
task10: Initialize[Task[A10]],
task11: Initialize[Task[A11]],
task12: Initialize[Task[A12]],
task13: Initialize[Task[A13]],
task14: Initialize[Task[A14]],
task15: Initialize[Task[A15]],
task16: Initialize[Task[A16]],
task17: Initialize[Task[A17]],
task18: Initialize[Task[A18]],
task19: Initialize[Task[A19]],
last: Initialize[Task[B]]
): Initialize[Task[B]] =
sequential(
List(
unitTask(task0),
@ -562,49 +607,53 @@ trait TaskSequential {
),
last
)
def sequential[A0,
A1,
A2,
A3,
A4,
A5,
A6,
A7,
A8,
A9,
A10,
A11,
A12,
A13,
A14,
A15,
A16,
A17,
A18,
A19,
A20,
B](task0: Initialize[Task[A0]],
task1: Initialize[Task[A1]],
task2: Initialize[Task[A2]],
task3: Initialize[Task[A3]],
task4: Initialize[Task[A4]],
task5: Initialize[Task[A5]],
task6: Initialize[Task[A6]],
task7: Initialize[Task[A7]],
task8: Initialize[Task[A8]],
task9: Initialize[Task[A9]],
task10: Initialize[Task[A10]],
task11: Initialize[Task[A11]],
task12: Initialize[Task[A12]],
task13: Initialize[Task[A13]],
task14: Initialize[Task[A14]],
task15: Initialize[Task[A15]],
task16: Initialize[Task[A16]],
task17: Initialize[Task[A17]],
task18: Initialize[Task[A18]],
task19: Initialize[Task[A19]],
task20: Initialize[Task[A20]],
last: Initialize[Task[B]]): Initialize[Task[B]] =
def sequential[
A0,
A1,
A2,
A3,
A4,
A5,
A6,
A7,
A8,
A9,
A10,
A11,
A12,
A13,
A14,
A15,
A16,
A17,
A18,
A19,
A20,
B
](
task0: Initialize[Task[A0]],
task1: Initialize[Task[A1]],
task2: Initialize[Task[A2]],
task3: Initialize[Task[A3]],
task4: Initialize[Task[A4]],
task5: Initialize[Task[A5]],
task6: Initialize[Task[A6]],
task7: Initialize[Task[A7]],
task8: Initialize[Task[A8]],
task9: Initialize[Task[A9]],
task10: Initialize[Task[A10]],
task11: Initialize[Task[A11]],
task12: Initialize[Task[A12]],
task13: Initialize[Task[A13]],
task14: Initialize[Task[A14]],
task15: Initialize[Task[A15]],
task16: Initialize[Task[A16]],
task17: Initialize[Task[A17]],
task18: Initialize[Task[A18]],
task19: Initialize[Task[A19]],
task20: Initialize[Task[A20]],
last: Initialize[Task[B]]
): Initialize[Task[B]] =
sequential(
List(
unitTask(task0),
@ -631,51 +680,55 @@ trait TaskSequential {
),
last
)
def sequential[A0,
A1,
A2,
A3,
A4,
A5,
A6,
A7,
A8,
A9,
A10,
A11,
A12,
A13,
A14,
A15,
A16,
A17,
A18,
A19,
A20,
A21,
B](task0: Initialize[Task[A0]],
task1: Initialize[Task[A1]],
task2: Initialize[Task[A2]],
task3: Initialize[Task[A3]],
task4: Initialize[Task[A4]],
task5: Initialize[Task[A5]],
task6: Initialize[Task[A6]],
task7: Initialize[Task[A7]],
task8: Initialize[Task[A8]],
task9: Initialize[Task[A9]],
task10: Initialize[Task[A10]],
task11: Initialize[Task[A11]],
task12: Initialize[Task[A12]],
task13: Initialize[Task[A13]],
task14: Initialize[Task[A14]],
task15: Initialize[Task[A15]],
task16: Initialize[Task[A16]],
task17: Initialize[Task[A17]],
task18: Initialize[Task[A18]],
task19: Initialize[Task[A19]],
task20: Initialize[Task[A20]],
task21: Initialize[Task[A21]],
last: Initialize[Task[B]]): Initialize[Task[B]] =
def sequential[
A0,
A1,
A2,
A3,
A4,
A5,
A6,
A7,
A8,
A9,
A10,
A11,
A12,
A13,
A14,
A15,
A16,
A17,
A18,
A19,
A20,
A21,
B
](
task0: Initialize[Task[A0]],
task1: Initialize[Task[A1]],
task2: Initialize[Task[A2]],
task3: Initialize[Task[A3]],
task4: Initialize[Task[A4]],
task5: Initialize[Task[A5]],
task6: Initialize[Task[A6]],
task7: Initialize[Task[A7]],
task8: Initialize[Task[A8]],
task9: Initialize[Task[A9]],
task10: Initialize[Task[A10]],
task11: Initialize[Task[A11]],
task12: Initialize[Task[A12]],
task13: Initialize[Task[A13]],
task14: Initialize[Task[A14]],
task15: Initialize[Task[A15]],
task16: Initialize[Task[A16]],
task17: Initialize[Task[A17]],
task18: Initialize[Task[A18]],
task19: Initialize[Task[A19]],
task20: Initialize[Task[A20]],
task21: Initialize[Task[A21]],
last: Initialize[Task[B]]
): Initialize[Task[B]] =
sequential(
List(
unitTask(task0),
@ -704,8 +757,10 @@ trait TaskSequential {
last
)
def sequential[B](tasks: Seq[Initialize[Task[Unit]]],
last: Initialize[Task[B]]): Initialize[Task[B]] =
def sequential[B](
tasks: Seq[Initialize[Task[Unit]]],
last: Initialize[Task[B]]
): Initialize[Task[B]] =
tasks.toList match {
case Nil => Def.task { last.value }
case x :: xs =>

View File

@ -52,10 +52,12 @@ private[sbt] final class TaskTimings(shutdown: Boolean) extends ExecuteProgress[
if (!shutdown)
start = System.nanoTime
}
def registered(state: Unit,
task: Task[_],
allDeps: Iterable[Task[_]],
pendingDeps: Iterable[Task[_]]) = {
def registered(
state: Unit,
task: Task[_],
allDeps: Iterable[Task[_]],
pendingDeps: Iterable[Task[_]]
) = {
pendingDeps foreach { t =>
if (transformNode(t).isEmpty) anonOwners.put(t, task)
}

View File

@ -85,7 +85,8 @@ private[sbt] object SbtParser {
val reporter = reporters.get(fileName)
if (reporter == null) {
scalacGlobalInitReporter.getOrElse(
sys.error(s"Sbt forgot to initialize `scalacGlobalInitReporter`."))
sys.error(s"Sbt forgot to initialize `scalacGlobalInitReporter`.")
)
} else reporter
}
@ -139,9 +140,11 @@ private[sbt] object SbtParser {
* The reporter id must be unique per parsing session.
* @return
*/
private[sbt] def parse(code: String,
filePath: String,
reporterId0: Option[String]): (Seq[Tree], String) = {
private[sbt] def parse(
code: String,
filePath: String,
reporterId0: Option[String]
): (Seq[Tree], String) = {
import defaultGlobalForParser._
val reporterId = reporterId0.getOrElse(s"$filePath-${Random.nextInt}")
val reporter = globalReporter.getOrCreateReporter(reporterId)
@ -204,7 +207,8 @@ private[sbt] case class SbtParser(file: File, lines: Seq[String]) extends Parsed
private def splitExpressions(
file: File,
lines: Seq[String]): (Seq[(String, Int)], Seq[(String, LineRange)], Seq[(String, Tree)]) = {
lines: Seq[String]
): (Seq[(String, Int)], Seq[(String, LineRange)], Seq[(String, Tree)]) = {
import sbt.internal.parser.MissingBracketHandler.findMissingText
val indexedLines = lines.toIndexedSeq
@ -224,7 +228,8 @@ private[sbt] case class SbtParser(file: File, lines: Seq[String]) extends Parsed
// Issue errors
val positionLine = badTree.pos.line
throw new MessageOnlyException(
s"""[$fileName]:$positionLine: Pattern matching in val statements is not supported""".stripMargin)
s"""[$fileName]:$positionLine: Pattern matching in val statements is not supported""".stripMargin
)
}
val (imports: Seq[Tree], statements: Seq[Tree]) = parsedTrees partition {
@ -262,9 +267,9 @@ private[sbt] case class SbtParser(file: File, lines: Seq[String]) extends Parsed
}
val stmtTreeLineRange = statements flatMap convertStatement
val importsLineRange = importsToLineRanges(content, imports)
(importsLineRange,
stmtTreeLineRange.map { case (stmt, _, lr) => (stmt, lr) },
stmtTreeLineRange.map { case (stmt, tree, _) => (stmt, tree) })
(importsLineRange, stmtTreeLineRange.map { case (stmt, _, lr) => (stmt, lr) }, stmtTreeLineRange.map {
case (stmt, tree, _) => (stmt, tree)
})
}
/**
@ -300,8 +305,10 @@ private[sbt] case class SbtParser(file: File, lines: Seq[String]) extends Parsed
* @param importsInOneLine - imports in line
* @return - text
*/
private def extractLine(modifiedContent: String,
importsInOneLine: Seq[((Int, Int), Int)]): String = {
private def extractLine(
modifiedContent: String,
importsInOneLine: Seq[((Int, Int), Int)]
): String = {
val (begin, end) = importsInOneLine.foldLeft((Int.MaxValue, Int.MinValue)) {
case ((min, max), ((start, end), _)) =>
(min.min(start), max.max(end))
@ -333,7 +340,8 @@ private[sbt] object MissingBracketHandler {
positionLine: Int,
fileName: String,
originalException: Throwable,
reporterId: Option[String] = Some(Random.nextInt.toString)): String = {
reporterId: Option[String] = Some(Random.nextInt.toString)
): String = {
findClosingBracketIndex(content, positionEnd) match {
case Some(index) =>
val text = content.substring(positionEnd, index + 1)
@ -342,16 +350,19 @@ private[sbt] object MissingBracketHandler {
case Success(_) =>
text
case Failure(_) =>
findMissingText(content,
index + 1,
positionLine,
fileName,
originalException,
reporterId)
findMissingText(
content,
index + 1,
positionLine,
fileName,
originalException,
reporterId
)
}
case _ =>
throw new MessageOnlyException(
s"""[$fileName]:$positionLine: ${originalException.getMessage}""".stripMargin)
s"""[$fileName]:$positionLine: ${originalException.getMessage}""".stripMargin
)
}
}

Some files were not shown because too many files have changed in this diff Show More