mirror of https://github.com/sbt/sbt.git
Improve if check and prohibit value inside anon
This commit does the following things: * Removes the boolean from the instance context passes to the linter. * Prohibits the use of value inside anonymous functions. * Improves the previous check of `value` inside if. The improvements have occurred thanks to the fix of an oversight in the traverser. As a result, several implementation of tasks have been rewritten because of new compilation failures by both checks. Note that the new check that prohibits the use of value inside anonymous functions ignores all the functions whose parameters have been synthesized by scalac (that can happen in a number of different scenarios, like for comprehensions). Other scripted tests have also been fixed. Running `.value` inside an anonymous function yields the following error: ``` [error] /data/rw/code/scala/sbt/main-settings/src/test/scala/sbt/std/TaskPosSpec.scala:50:24: The evaluation of `foo` inside an anonymous function is prohibited. [error] [error] Problem: Task invocations inside anonymous functions are evaluated independently of whether the anonymous function is invoked or not. [error] [error] Solution: [error] 1. Make `foo` evaluation explicit outside of the function body if you don't care about its evaluation. [error] 2. Use a dynamic task to evaluate `foo` and pass that value as a parameter to an anonymous function. [error] [error] val anon = () => foo.value + " " [error] ^ ```
This commit is contained in:
parent
41dce9e568
commit
ca3acc4e52
|
|
@ -188,7 +188,7 @@ object Instance {
|
|||
}
|
||||
|
||||
// applies the transformation
|
||||
linter.runLinter(c)(tree, t.isLeft)
|
||||
linter.runLinter(c)(tree)
|
||||
val tx = util.transformWrappers(tree, (n, tpe, t, replace) => sub(n, tpe, t, replace))
|
||||
// resetting attributes must be: a) local b) done here and not wider or else there are obscure errors
|
||||
val tr = makeApp(inner(tx))
|
||||
|
|
|
|||
|
|
@ -3,14 +3,11 @@ package sbt.internal.util.appmacro
|
|||
import scala.reflect.macros.blackbox
|
||||
|
||||
trait LinterDSL {
|
||||
def runLinter(ctx: blackbox.Context)(tree: ctx.Tree, isApplicableFromContext: => Boolean): Unit
|
||||
def runLinter(ctx: blackbox.Context)(tree: ctx.Tree): Unit
|
||||
}
|
||||
|
||||
object LinterDSL {
|
||||
object Empty extends LinterDSL {
|
||||
override def runLinter(ctx: blackbox.Context)(
|
||||
tree: ctx.Tree,
|
||||
isApplicableFromContext: => Boolean
|
||||
): Unit = ()
|
||||
override def runLinter(ctx: blackbox.Context)(tree: ctx.Tree): Unit = ()
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,85 +1,206 @@
|
|||
package sbt.std
|
||||
|
||||
import sbt.internal.util.ConsoleAppender
|
||||
import sbt.internal.util.appmacro.LinterDSL
|
||||
import sbt.internal.util.appmacro.{ Convert, Converted, LinterDSL }
|
||||
|
||||
import scala.collection.mutable.{ HashSet => MutableSet }
|
||||
import scala.io.AnsiColor
|
||||
import scala.reflect.internal.util.Position
|
||||
import scala.reflect.macros.blackbox
|
||||
|
||||
object TaskLinterDSL extends LinterDSL {
|
||||
override def runLinter(ctx: blackbox.Context)(
|
||||
tree: ctx.Tree,
|
||||
isApplicableFromContext: => Boolean
|
||||
): Unit = {
|
||||
abstract class BaseTaskLinterDSL extends LinterDSL {
|
||||
def isDynamicTask: Boolean
|
||||
def convert: Convert
|
||||
|
||||
override def runLinter(ctx: blackbox.Context)(tree: ctx.Tree): Unit = {
|
||||
import ctx.universe._
|
||||
val isTask = FullConvert.asPredicate(ctx)
|
||||
object traverser extends Traverser {
|
||||
val isTask = convert.asPredicate(ctx)
|
||||
class traverser extends Traverser {
|
||||
private val unchecked = symbolOf[sbt.unchecked].asClass
|
||||
private val uncheckedWrappers = MutableSet.empty[Tree]
|
||||
var insideIf: Boolean = false
|
||||
override def traverse(tree: ctx.universe.Tree): Unit = {
|
||||
tree match {
|
||||
case If(condition, thenp, elsep) =>
|
||||
super.traverse(condition)
|
||||
insideIf = true
|
||||
super.traverse(thenp)
|
||||
super.traverse(elsep)
|
||||
insideIf = false
|
||||
case s @ Apply(TypeApply(Select(_, nme), tpe :: Nil), qual :: Nil) =>
|
||||
val shouldIgnore = uncheckedWrappers.contains(s)
|
||||
val wrapperName = nme.decodedName.toString
|
||||
if (insideIf && !shouldIgnore && isTask(wrapperName, tpe.tpe, qual)) {
|
||||
val qualName = qual.symbol.name.decodedName.toString
|
||||
ctx.error(s.pos, TaskLinterDSLFeedback.useOfValueInsideIfExpression(qualName))
|
||||
}
|
||||
// Don't remove, makes the whole analysis work
|
||||
case Typed(expr, tt) =>
|
||||
super.traverse(expr)
|
||||
super.traverse(tt)
|
||||
case Typed(expr, tt: TypeTree) if tt.original != null =>
|
||||
tt.original match {
|
||||
case Annotated(annot, arg) =>
|
||||
Option(annot.tpe) match {
|
||||
case Some(AnnotatedType(annotations, _)) =>
|
||||
val symAnnotations = annotations.map(_.tree.tpe.typeSymbol)
|
||||
val isUnchecked = symAnnotations.contains(unchecked)
|
||||
if (isUnchecked) {
|
||||
// Unwrap the expression behind the typed that sbt macro adds
|
||||
val toAdd = arg match {
|
||||
case Typed(`expr`, _) => `expr`
|
||||
case `tree` => `tree`
|
||||
}
|
||||
uncheckedWrappers.add(toAdd)
|
||||
}
|
||||
case _ =>
|
||||
var insideAnon: Boolean = false
|
||||
|
||||
def handleUncheckedAnnotation(exprAtUseSite: Tree, tt: TypeTree): Unit = {
|
||||
tt.original match {
|
||||
case Annotated(annot, arg) =>
|
||||
Option(annot.tpe) match {
|
||||
case Some(AnnotatedType(annotations, _)) =>
|
||||
val tpeAnnotations = annotations.flatMap(ann => Option(ann.tree.tpe).toList)
|
||||
val symAnnotations = tpeAnnotations.map(_.typeSymbol)
|
||||
val isUnchecked = symAnnotations.contains(unchecked)
|
||||
if (isUnchecked) {
|
||||
// Use expression at use site, arg contains the old expr
|
||||
// Referential equality between the two doesn't hold
|
||||
val removedSbtWrapper = exprAtUseSite match {
|
||||
case Typed(t, _) => t
|
||||
case _ => exprAtUseSite
|
||||
}
|
||||
uncheckedWrappers.add(removedSbtWrapper)
|
||||
}
|
||||
case _ =>
|
||||
}
|
||||
super.traverse(expr)
|
||||
case tree => super.traverse(tree)
|
||||
case _ =>
|
||||
}
|
||||
}
|
||||
|
||||
override def traverse(tree: ctx.universe.Tree): Unit = {
|
||||
tree match {
|
||||
case s @ Apply(TypeApply(Select(selectQual, nme), tpe :: Nil), qual :: Nil) =>
|
||||
val shouldIgnore = uncheckedWrappers.contains(s)
|
||||
val wrapperName = nme.decodedName.toString
|
||||
if (!shouldIgnore && isTask(wrapperName, tpe.tpe, qual)) {
|
||||
val qualName =
|
||||
if (qual.symbol != null) qual.symbol.name.decodedName.toString
|
||||
else s.pos.lineContent
|
||||
if (insideIf && !isDynamicTask) {
|
||||
// Error on the use of value inside the if of a regular task (dyn task is ok)
|
||||
ctx.error(s.pos, TaskLinterDSLFeedback.useOfValueInsideIfExpression(qualName))
|
||||
}
|
||||
if (insideAnon) {
|
||||
// Error on the use of anonymous functions in any task or dynamic task
|
||||
ctx.error(s.pos, TaskLinterDSLFeedback.useOfValueInsideAnon(qualName))
|
||||
}
|
||||
} else traverse(selectQual)
|
||||
traverse(qual)
|
||||
case If(condition, thenp, elsep) =>
|
||||
traverse(condition)
|
||||
insideIf = true
|
||||
traverse(thenp)
|
||||
traverse(elsep)
|
||||
insideIf = false
|
||||
case Typed(expr, tpt: TypeTree) if tpt.original != null =>
|
||||
handleUncheckedAnnotation(expr, tpt)
|
||||
traverse(expr)
|
||||
traverse(tpt)
|
||||
case f @ Function(vparams, body) =>
|
||||
super.traverseTrees(vparams)
|
||||
if (!vparams.exists(_.mods.hasFlag(Flag.SYNTHETIC))) {
|
||||
insideAnon = true
|
||||
traverse(body)
|
||||
insideAnon = false
|
||||
} else traverse(body)
|
||||
case _ => super.traverse(tree)
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!isApplicableFromContext) ()
|
||||
else traverser.traverse(tree)
|
||||
(new traverser).traverse(tree)
|
||||
}
|
||||
}
|
||||
|
||||
object TaskLinterDSL extends BaseTaskLinterDSL {
|
||||
override val isDynamicTask: Boolean = false
|
||||
override def convert: Convert = FullConvert
|
||||
}
|
||||
|
||||
object OnlyTaskLinterDSL extends BaseTaskLinterDSL {
|
||||
override val isDynamicTask: Boolean = false
|
||||
override def convert: Convert = TaskConvert
|
||||
}
|
||||
|
||||
object TaskDynLinterDSL extends BaseTaskLinterDSL {
|
||||
override val isDynamicTask: Boolean = true
|
||||
override def convert: Convert = FullConvert
|
||||
}
|
||||
|
||||
object OnlyTaskDynLinterDSL extends BaseTaskLinterDSL {
|
||||
override val isDynamicTask: Boolean = true
|
||||
override def convert: Convert = TaskConvert
|
||||
}
|
||||
|
||||
object TaskLinterDSLFeedback {
|
||||
private final val startBold = if (ConsoleAppender.formatEnabled) AnsiColor.BOLD else ""
|
||||
private final val startRed = if (ConsoleAppender.formatEnabled) AnsiColor.RED else ""
|
||||
private final val startGreen = if (ConsoleAppender.formatEnabled) AnsiColor.GREEN else ""
|
||||
private final val reset = if (ConsoleAppender.formatEnabled) AnsiColor.RESET else ""
|
||||
|
||||
def useOfValueInsideIfExpression(offendingValue: String) =
|
||||
s"""${startBold}The evaluation of `${offendingValue}` happens always inside a regular task.$reset
|
||||
private final val ProblemHeader = s"${startRed}Problem${reset}"
|
||||
private final val SolutionHeader = s"${startGreen}Solution${reset}"
|
||||
|
||||
def useOfValueInsideAnon(task: String) =
|
||||
s"""${startBold}The evaluation of `$task` inside an anonymous function is prohibited.$reset
|
||||
|
|
||||
|${startRed}PROBLEM${reset}: `${offendingValue}` is inside the if expression of a regular task.
|
||||
|${ProblemHeader}: Task invocations inside anonymous functions are evaluated independently of whether the anonymous function is invoked or not.
|
||||
|
|
||||
|${SolutionHeader}:
|
||||
| 1. Make `$task` evaluation explicit outside of the function body if you don't care about its evaluation.
|
||||
| 2. Use a dynamic task to evaluate `$task` and pass that value as a parameter to an anonymous function.
|
||||
""".stripMargin
|
||||
|
||||
def useOfValueInsideIfExpression(task: String) =
|
||||
s"""${startBold}The evaluation of `$task` happens always inside a regular task.$reset
|
||||
|
|
||||
|${ProblemHeader}: `$task` is inside the if expression of a regular task.
|
||||
| Regular tasks always evaluate task inside the bodies of if expressions.
|
||||
|
|
||||
|${startGreen}SOLUTION${reset}:
|
||||
|${SolutionHeader}:
|
||||
| 1. If you only want to evaluate it when the if predicate is true or false, use a dynamic task.
|
||||
| 2. Otherwise, make the static evaluation explicit by evaluating `${offendingValue}` outside the if expression.
|
||||
| 2. Otherwise, make the static evaluation explicit by evaluating `$task` outside the if expression.
|
||||
""".stripMargin
|
||||
|
||||
/* If(
|
||||
Ident(TermName("condition")),
|
||||
Typed(
|
||||
Typed(Apply(TypeApply(Select(Ident(sbt.std.InputWrapper),
|
||||
TermName("wrapInitTask_$u2603$u2603")),
|
||||
List(TypeTree())),
|
||||
List(Ident(TermName("foo")))),
|
||||
TypeTree()),
|
||||
TypeTree().setOriginal(
|
||||
Annotated(
|
||||
Apply(Select(New(Ident(TypeName("unchecked"))), termNames.CONSTRUCTOR), List()),
|
||||
Typed(Apply(TypeApply(Select(Ident(sbt.std.InputWrapper),
|
||||
TermName("wrapInitTask_$u2603$u2603")),
|
||||
List(TypeTree())),
|
||||
List(Ident(TermName("foo")))),
|
||||
TypeTree())
|
||||
))
|
||||
),
|
||||
Typed(
|
||||
Typed(Apply(TypeApply(Select(Ident(sbt.std.InputWrapper),
|
||||
TermName("wrapInitTask_$u2603$u2603")),
|
||||
List(TypeTree())),
|
||||
List(Ident(TermName("bar")))),
|
||||
TypeTree()),
|
||||
TypeTree().setOriginal(
|
||||
Annotated(
|
||||
Apply(Select(New(Ident(TypeName("unchecked"))), termNames.CONSTRUCTOR), List()),
|
||||
Typed(Apply(TypeApply(Select(Ident(sbt.std.InputWrapper),
|
||||
TermName("wrapInitTask_$u2603$u2603")),
|
||||
List(TypeTree())),
|
||||
List(Ident(TermName("bar")))),
|
||||
TypeTree())
|
||||
))
|
||||
)
|
||||
)*/
|
||||
|
||||
/* Block(
|
||||
List(
|
||||
ValDef(
|
||||
Modifiers(),
|
||||
TermName("anon"),
|
||||
TypeTree(),
|
||||
Function(
|
||||
List(),
|
||||
Apply(
|
||||
Select(
|
||||
Typed(
|
||||
Apply(TypeApply(Select(Ident(sbt.std.InputWrapper),
|
||||
TermName("wrapInitTask_$u2603$u2603")),
|
||||
List(TypeTree())),
|
||||
List(Ident(TermName("fooNeg")))),
|
||||
TypeTree()
|
||||
),
|
||||
TermName("$plus")
|
||||
),
|
||||
List(Literal(Constant("")))
|
||||
)
|
||||
)
|
||||
)),
|
||||
If(
|
||||
Ident(TermName("condition")),
|
||||
Apply(Select(Ident(TermName("anon")), TermName("apply")), List()),
|
||||
Apply(Select(Ident(TermName("anon")), TermName("apply")), List())
|
||||
)
|
||||
)*/
|
||||
}
|
||||
|
|
|
|||
|
|
@ -99,7 +99,7 @@ object TaskMacro {
|
|||
|
||||
def taskDynMacroImpl[T: c.WeakTypeTag](c: blackbox.Context)(
|
||||
t: c.Expr[Initialize[Task[T]]]): c.Expr[Initialize[Task[T]]] =
|
||||
Instance.contImpl[T, Id](c, FullInstance, FullConvert, MixedBuilder, EmptyLinter)(
|
||||
Instance.contImpl[T, Id](c, FullInstance, FullConvert, MixedBuilder, TaskDynLinterDSL)(
|
||||
Right(t),
|
||||
Instance.idTransform[c.type])
|
||||
|
||||
|
|
@ -503,13 +503,13 @@ object TaskMacro {
|
|||
object PlainTaskMacro {
|
||||
def task[T](t: T): Task[T] = macro taskImpl[T]
|
||||
def taskImpl[T: c.WeakTypeTag](c: blackbox.Context)(t: c.Expr[T]): c.Expr[Task[T]] =
|
||||
Instance.contImpl[T, Id](c, TaskInstance, TaskConvert, MixedBuilder, TaskLinterDSL)(
|
||||
Instance.contImpl[T, Id](c, TaskInstance, TaskConvert, MixedBuilder, OnlyTaskLinterDSL)(
|
||||
Left(t),
|
||||
Instance.idTransform[c.type])
|
||||
|
||||
def taskDyn[T](t: Task[T]): Task[T] = macro taskDynImpl[T]
|
||||
def taskDynImpl[T: c.WeakTypeTag](c: blackbox.Context)(t: c.Expr[Task[T]]): c.Expr[Task[T]] =
|
||||
Instance.contImpl[T, Id](c, TaskInstance, TaskConvert, MixedBuilder, LinterDSL.Empty)(
|
||||
Instance.contImpl[T, Id](c, TaskInstance, TaskConvert, MixedBuilder, OnlyTaskDynLinterDSL)(
|
||||
Right(t),
|
||||
Instance.idTransform[c.type])
|
||||
}
|
||||
|
|
|
|||
|
|
@ -38,4 +38,32 @@ class TaskPosSpec {
|
|||
else bar.value: @unchecked
|
||||
}
|
||||
}
|
||||
|
||||
locally {
|
||||
// This is fix 1 for appearance of tasks inside anons
|
||||
import sbt._
|
||||
import sbt.Def._
|
||||
val foo = taskKey[String]("")
|
||||
var condition = true
|
||||
val baz = Def.task[String] {
|
||||
val fooResult = foo.value
|
||||
val anon = () => fooResult + " "
|
||||
if (condition) anon()
|
||||
else ""
|
||||
}
|
||||
}
|
||||
|
||||
locally {
|
||||
// This is fix 2 for appearance of tasks inside anons
|
||||
import sbt._
|
||||
import sbt.Def._
|
||||
val foo = taskKey[String]("")
|
||||
var condition = true
|
||||
val baz = Def.taskDyn[String] {
|
||||
val anon1 = (value: String) => value + " "
|
||||
if (condition) {
|
||||
Def.task(anon1(foo.value))
|
||||
} else Def.task("")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -11,12 +11,15 @@ class TaskNegSpec extends FunSuite {
|
|||
baseCompileOptions: String = s"-cp $toolboxClasspath")(code: String) = {
|
||||
val errorMessage = intercept[ToolBoxError] {
|
||||
eval(code, s"$compileOptions $baseCompileOptions")
|
||||
println("SUCCESS")
|
||||
}.getMessage
|
||||
println(errorMessage)
|
||||
val userMessage =
|
||||
s"""
|
||||
|FOUND: $errorMessage
|
||||
|EXPECTED: $errorSnippet
|
||||
""".stripMargin
|
||||
println(userMessage)
|
||||
assert(errorMessage.contains(errorSnippet), userMessage)
|
||||
}
|
||||
|
||||
|
|
@ -40,7 +43,28 @@ class TaskNegSpec extends FunSuite {
|
|||
}
|
||||
}
|
||||
|
||||
test("Fail on task invocation inside inside if of task returned by dynamic task") {
|
||||
test("Fail on task invocation inside `if` if it is used inside a regular task") {
|
||||
val fooNegError = TaskLinterDSLFeedback.useOfValueInsideIfExpression("fooNeg")
|
||||
val barNegError = TaskLinterDSLFeedback.useOfValueInsideIfExpression("barNeg")
|
||||
expectError(List(fooNegError, barNegError).mkString("\n")) {
|
||||
"""
|
||||
|import sbt._
|
||||
|import sbt.Def._
|
||||
|
|
||||
|val fooNeg = taskKey[String]("")
|
||||
|val barNeg = taskKey[String]("")
|
||||
|var condition = true
|
||||
|def bi(s: String) = s + " "
|
||||
|
|
||||
|val bazNeg = Def.task[String] {
|
||||
| if (condition) "" + fooNeg.value
|
||||
| else bi(barNeg.value)
|
||||
|}
|
||||
""".stripMargin
|
||||
}
|
||||
}
|
||||
|
||||
test("Fail on task invocation inside inside `if` of task returned by dynamic task") {
|
||||
expectError(TaskLinterDSLFeedback.useOfValueInsideIfExpression("fooNeg")) {
|
||||
"""
|
||||
|import sbt._
|
||||
|
|
@ -63,7 +87,7 @@ class TaskNegSpec extends FunSuite {
|
|||
}
|
||||
}
|
||||
|
||||
test("Fail on task invocation inside inside else of task returned by dynamic task") {
|
||||
test("Fail on task invocation inside else of task returned by dynamic task") {
|
||||
expectError(TaskLinterDSLFeedback.useOfValueInsideIfExpression("barNeg")) {
|
||||
"""
|
||||
|import sbt._
|
||||
|
|
@ -84,4 +108,64 @@ class TaskNegSpec extends FunSuite {
|
|||
""".stripMargin
|
||||
}
|
||||
}
|
||||
|
||||
test("Fail on task invocation inside anonymous function returned by regular task") {
|
||||
val fooNegError = TaskLinterDSLFeedback.useOfValueInsideAnon("fooNeg")
|
||||
expectError(fooNegError) {
|
||||
"""
|
||||
|import sbt._
|
||||
|import sbt.Def._
|
||||
|
|
||||
|val fooNeg = taskKey[String]("")
|
||||
|val barNeg = taskKey[String]("")
|
||||
|var condition = true
|
||||
|
|
||||
|val bazNeg = Def.task[String] {
|
||||
| val anon = () => fooNeg.value
|
||||
| if (condition) anon()
|
||||
| else anon()
|
||||
|}
|
||||
""".stripMargin
|
||||
}
|
||||
}
|
||||
|
||||
test("Fail on task invocation inside complex anonymous function returned by regular task") {
|
||||
val fooNegError = TaskLinterDSLFeedback.useOfValueInsideAnon("fooNeg")
|
||||
expectError(fooNegError) {
|
||||
"""
|
||||
|import sbt._
|
||||
|import sbt.Def._
|
||||
|
|
||||
|val fooNeg = taskKey[String]("")
|
||||
|var condition = true
|
||||
|
|
||||
|val bazNeg = Def.task[String] {
|
||||
| val anon = () => fooNeg.value + ""
|
||||
| if (condition) anon()
|
||||
| else anon()
|
||||
|}
|
||||
""".stripMargin
|
||||
}
|
||||
}
|
||||
|
||||
test("Fail on task invocation inside anonymous function returned by dynamic task") {
|
||||
val fooNegError = TaskLinterDSLFeedback.useOfValueInsideAnon("fooNeg")
|
||||
expectError(fooNegError) {
|
||||
"""
|
||||
|import sbt._
|
||||
|import sbt.Def._
|
||||
|
|
||||
|val fooNeg = taskKey[String]("")
|
||||
|val barNeg = taskKey[String]("")
|
||||
|var condition = true
|
||||
|
|
||||
|val bazNeg = Def.taskDyn[String] {
|
||||
| if (condition) {
|
||||
| val anon = () => fooNeg.value
|
||||
| Def.task(anon())
|
||||
| } else Def.task("")
|
||||
|}
|
||||
""".stripMargin
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -337,7 +337,8 @@ object Defaults extends BuildCommon {
|
|||
val srcs = unmanagedSources.value
|
||||
val f = (includeFilter in unmanagedSources).value
|
||||
val excl = (excludeFilter in unmanagedSources).value
|
||||
if (sourcesInBase.value) (srcs +++ baseDirectory.value * (f -- excl)).get else srcs
|
||||
val baseDir = baseDirectory.value
|
||||
if (sourcesInBase.value) (srcs +++ baseDir * (f -- excl)).get else srcs
|
||||
}
|
||||
)
|
||||
|
||||
|
|
@ -405,11 +406,12 @@ object Defaults extends BuildCommon {
|
|||
bootIvyConfiguration.value,
|
||||
scalaCompilerBridgeSource.value
|
||||
)(appConfiguration.value, streams.value.log)
|
||||
val classLoaderCache = state.value.classLoaderCache
|
||||
if (java.lang.Boolean.getBoolean("sbt.disable.interface.classloader.cache")) compilers
|
||||
else {
|
||||
compilers.withScalac(
|
||||
compilers.scalac match {
|
||||
case x: AnalyzingCompiler => x.withClassLoaderCache(state.value.classLoaderCache)
|
||||
case x: AnalyzingCompiler => x.withClassLoaderCache(classLoaderCache)
|
||||
case x => x
|
||||
}
|
||||
)
|
||||
|
|
@ -426,8 +428,9 @@ object Defaults extends BuildCommon {
|
|||
compileAnalysisFilename := {
|
||||
// Here, if the user wants cross-scala-versioning, we also append it
|
||||
// to the analysis cache, so we keep the scala versions separated.
|
||||
val binVersion = scalaBinaryVersion.value
|
||||
val extra =
|
||||
if (crossPaths.value) s"_${scalaBinaryVersion.value}"
|
||||
if (crossPaths.value) s"_$binVersion"
|
||||
else ""
|
||||
s"inc_compile${extra}.zip"
|
||||
},
|
||||
|
|
@ -553,11 +556,11 @@ object Defaults extends BuildCommon {
|
|||
def file(id: String) = files(id).headOption getOrElse sys.error(s"Missing ${id}.jar")
|
||||
val allFiles = toolReport.modules.flatMap(_.artifacts.map(_._2))
|
||||
val libraryJar = file(ScalaArtifacts.LibraryID)
|
||||
val binVersion = scalaBinaryVersion.value
|
||||
val compilerJar =
|
||||
if (ScalaInstance.isDotty(scalaVersion.value))
|
||||
file(ScalaArtifacts.dottyID(scalaBinaryVersion.value))
|
||||
else
|
||||
file(ScalaArtifacts.CompilerID)
|
||||
file(ScalaArtifacts.dottyID(binVersion))
|
||||
else file(ScalaArtifacts.CompilerID)
|
||||
val otherJars = allFiles.filterNot(x => x == libraryJar || x == compilerJar)
|
||||
new ScalaInstance(scalaVersion.value,
|
||||
makeClassLoader(state.value)(libraryJar :: compilerJar :: otherJars.toList),
|
||||
|
|
@ -590,9 +593,11 @@ object Defaults extends BuildCommon {
|
|||
data(fullClasspath.value),
|
||||
scalaInstance.value,
|
||||
IO.createUniqueDirectory(taskTemporaryDirectory.value)),
|
||||
loadedTestFrameworks := testFrameworks.value
|
||||
.flatMap(f => f.create(testLoader.value, streams.value.log).map(x => (f, x)).toIterable)
|
||||
.toMap,
|
||||
loadedTestFrameworks := {
|
||||
val loader = testLoader.value
|
||||
val log = streams.value.log
|
||||
testFrameworks.value.flatMap(f => f.create(loader, log).map(x => (f, x)).toIterable).toMap
|
||||
},
|
||||
definedTests := detectTests.value,
|
||||
definedTestNames := (definedTests map (_.map(_.name).distinct) storeAs definedTestNames triggeredBy compile).value,
|
||||
testFilter in testQuick := testQuickFilter.value,
|
||||
|
|
@ -1193,8 +1198,9 @@ object Defaults extends BuildCommon {
|
|||
inTask(key)(
|
||||
Seq(
|
||||
apiMappings ++= {
|
||||
if (autoAPIMappings.value)
|
||||
APIMappings.extract(dependencyClasspath.value, streams.value.log).toMap
|
||||
val dependencyCp = dependencyClasspath.value
|
||||
val log = streams.value.log
|
||||
if (autoAPIMappings.value) APIMappings.extract(dependencyCp, log).toMap
|
||||
else Map.empty[File, URL]
|
||||
},
|
||||
fileInputOptions := Seq("-doc-root-content", "-diagrams-dot-path"),
|
||||
|
|
@ -1393,9 +1399,9 @@ object Defaults extends BuildCommon {
|
|||
PomExtraDependencyAttributes.ScalaVersionKey -> scalaV)
|
||||
.withCrossVersion(Disabled())
|
||||
|
||||
def discoverSbtPluginNames: Initialize[Task[PluginDiscovery.DiscoveredNames]] = Def.task {
|
||||
if (sbtPlugin.value) PluginDiscovery.discoverSourceAll(compile.value)
|
||||
else PluginDiscovery.emptyDiscoveredNames
|
||||
def discoverSbtPluginNames: Initialize[Task[PluginDiscovery.DiscoveredNames]] = Def.taskDyn {
|
||||
if (sbtPlugin.value) Def.task(PluginDiscovery.discoverSourceAll(compile.value))
|
||||
else Def.task(PluginDiscovery.emptyDiscoveredNames)
|
||||
}
|
||||
|
||||
def copyResourcesTask =
|
||||
|
|
@ -1686,7 +1692,8 @@ object Classpaths {
|
|||
bootResolvers.value match {
|
||||
case Some(repos) if overrideBuildResolvers.value => proj +: repos
|
||||
case _ =>
|
||||
val base = if (sbtPlugin.value) sbtResolver.value +: sbtPluginReleases +: rs else rs
|
||||
val sbtResolverValue = sbtResolver.value
|
||||
val base = if (sbtPlugin.value) sbtResolverValue +: sbtPluginReleases +: rs else rs
|
||||
proj +: base
|
||||
}
|
||||
}).value,
|
||||
|
|
@ -1769,14 +1776,18 @@ object Classpaths {
|
|||
else "release",
|
||||
logging = ivyLoggingLevel.value),
|
||||
deliverConfiguration := deliverLocalConfiguration.value,
|
||||
publishConfiguration := publishConfig(
|
||||
packagedArtifacts.in(publish).value,
|
||||
if (publishMavenStyle.value) None else Some(deliver.value),
|
||||
resolverName = getPublishTo(publishTo.value).name,
|
||||
checksums = checksums.in(publish).value,
|
||||
logging = ivyLoggingLevel.value,
|
||||
overwrite = isSnapshot.value
|
||||
),
|
||||
publishConfiguration := {
|
||||
// TODO(jvican): I think this is a bug.
|
||||
val delivered = deliver.value
|
||||
publishConfig(
|
||||
packagedArtifacts.in(publish).value,
|
||||
if (publishMavenStyle.value) None else Some(delivered),
|
||||
resolverName = getPublishTo(publishTo.value).name,
|
||||
checksums = checksums.in(publish).value,
|
||||
logging = ivyLoggingLevel.value,
|
||||
overwrite = isSnapshot.value
|
||||
)
|
||||
},
|
||||
publishLocalConfiguration := publishConfig(
|
||||
packagedArtifacts.in(publishLocal).value,
|
||||
Some(deliverLocal.value),
|
||||
|
|
@ -1795,8 +1806,11 @@ object Classpaths {
|
|||
ivySbt := ivySbt0.value,
|
||||
ivyModule := { val is = ivySbt.value; new is.Module(moduleSettings.value) },
|
||||
transitiveUpdate := transitiveUpdateTask.value,
|
||||
updateCacheName := "update_cache" + (if (crossPaths.value) s"_${scalaBinaryVersion.value}"
|
||||
else ""),
|
||||
updateCacheName := {
|
||||
val binVersion = scalaBinaryVersion.value
|
||||
val suffix = if (crossPaths.value) s"_$binVersion" else ""
|
||||
s"update_cache$suffix"
|
||||
},
|
||||
evictionWarningOptions in update := EvictionWarningOptions.default,
|
||||
dependencyPositions := dependencyPositionsTask.value,
|
||||
unresolvedWarningConfiguration in update := UnresolvedWarningConfiguration(
|
||||
|
|
@ -1839,17 +1853,19 @@ object Classpaths {
|
|||
val out = is.withIvy(s.log)(_.getSettings.getDefaultIvyUserDir)
|
||||
val uwConfig = (unresolvedWarningConfiguration in update).value
|
||||
val depDir = dependencyCacheDirectory.value
|
||||
val ivy = ivyScala.value
|
||||
val st = state.value
|
||||
withExcludes(out, mod.classifiers, lock(app)) { excludes =>
|
||||
IvyActions.updateClassifiers(
|
||||
is,
|
||||
GetClassifiersConfiguration(mod,
|
||||
excludes,
|
||||
c.withArtifactFilter(c.artifactFilter.invert),
|
||||
ivyScala.value,
|
||||
ivy,
|
||||
srcTypes,
|
||||
docTypes),
|
||||
uwConfig,
|
||||
LogicalClock(state.value.hashCode),
|
||||
LogicalClock(st.hashCode),
|
||||
Some(depDir),
|
||||
Vector.empty,
|
||||
s.log
|
||||
|
|
@ -1871,12 +1887,13 @@ object Classpaths {
|
|||
val pluginAdjust =
|
||||
if (sbtPlugin.value) dependency.withConfigurations(Some(Provided.name)) +: base
|
||||
else base
|
||||
val sbtOrg = scalaOrganization.value
|
||||
val version = scalaVersion.value
|
||||
if (scalaHome.value.isDefined || ivyScala.value.isEmpty || !managedScalaInstance.value)
|
||||
pluginAdjust
|
||||
else {
|
||||
val version = scalaVersion.value
|
||||
val isDotty = ScalaInstance.isDotty(version)
|
||||
ScalaArtifacts.toolDependencies(scalaOrganization.value, version, isDotty) ++ pluginAdjust
|
||||
ScalaArtifacts.toolDependencies(sbtOrg, version, isDotty) ++ pluginAdjust
|
||||
}
|
||||
}
|
||||
)
|
||||
|
|
@ -1979,11 +1996,14 @@ object Classpaths {
|
|||
val app = appConfiguration.value
|
||||
val srcTypes = sourceArtifactTypes.value
|
||||
val docTypes = docArtifactTypes.value
|
||||
val out = is.withIvy(s.log)(_.getSettings.getDefaultIvyUserDir)
|
||||
val log = s.log
|
||||
val out = is.withIvy(log)(_.getSettings.getDefaultIvyUserDir)
|
||||
val uwConfig = (unresolvedWarningConfiguration in update).value
|
||||
val depDir = dependencyCacheDirectory.value
|
||||
val ivy = ivyScala.value
|
||||
val st = state.value
|
||||
withExcludes(out, mod.classifiers, lock(app)) { excludes =>
|
||||
val noExplicitCheck = ivyScala.value.map(_.withCheckExplicit(false))
|
||||
val noExplicitCheck = ivy.map(_.withCheckExplicit(false))
|
||||
IvyActions.transitiveScratch(
|
||||
is,
|
||||
"sbt",
|
||||
|
|
@ -1994,9 +2014,9 @@ object Classpaths {
|
|||
srcTypes,
|
||||
docTypes),
|
||||
uwConfig,
|
||||
LogicalClock(state.value.hashCode),
|
||||
LogicalClock(st.hashCode),
|
||||
Some(depDir),
|
||||
s.log
|
||||
log
|
||||
)
|
||||
}
|
||||
} tag (Tags.Update, Tags.Network)).value
|
||||
|
|
|
|||
|
|
@ -29,12 +29,14 @@ def addDep(projectName: String) =
|
|||
val checkApiMappings = taskKey[Unit]("Verifies that the API mappings are collected as expected.")
|
||||
|
||||
def expectedMappings = Def.task {
|
||||
val version = scalaVersion.value
|
||||
val binVersion = scalaBinaryVersion.value
|
||||
val ms = update.value.configuration(Compile.name).get.modules.flatMap { mod =>
|
||||
mod.artifacts.flatMap { case (a,f) =>
|
||||
val n = a.name.stripSuffix("_" + scalaBinaryVersion.value)
|
||||
val n = a.name.stripSuffix("_" + binVersion)
|
||||
n match {
|
||||
case "a" | "b" | "c" => (f, apiBase(n)) :: Nil
|
||||
case "scala-library" => (f, scalaLibraryBase(scalaVersion.value)) :: Nil
|
||||
case "scala-library" => (f, scalaLibraryBase(version)) :: Nil
|
||||
case _ => Nil
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -4,10 +4,11 @@ import complete.DefaultParsers._
|
|||
// Reset compiler iterations, necessary because tests run in batch mode
|
||||
val recordPreviousIterations = taskKey[Unit]("Record previous iterations.")
|
||||
recordPreviousIterations := {
|
||||
val log = streams.value.log
|
||||
CompileState.previousIterations = {
|
||||
val previousAnalysis = (previousCompile in Compile).value.analysis
|
||||
if (previousAnalysis.isEmpty) {
|
||||
streams.value.log.info("No previous analysis detected")
|
||||
log.info("No previous analysis detected")
|
||||
0
|
||||
} else {
|
||||
previousAnalysis.get match {
|
||||
|
|
|
|||
|
|
@ -6,10 +6,11 @@ logLevel := Level.Debug
|
|||
// Reset compiler iterations, necessary because tests run in batch mode
|
||||
val recordPreviousIterations = taskKey[Unit]("Record previous iterations.")
|
||||
recordPreviousIterations := {
|
||||
val log = streams.value.log
|
||||
CompileState.previousIterations = {
|
||||
val previousAnalysis = (previousCompile in Compile).value.analysis
|
||||
if (previousAnalysis.isEmpty) {
|
||||
streams.value.log.info("No previous analysis detected")
|
||||
log.info("No previous analysis detected")
|
||||
0
|
||||
} else {
|
||||
previousAnalysis.get match {
|
||||
|
|
|
|||
|
|
@ -4,10 +4,11 @@ import complete.DefaultParsers._
|
|||
// Reset compiler iterations, necessary because tests run in batch mode
|
||||
val recordPreviousIterations = taskKey[Unit]("Record previous iterations.")
|
||||
recordPreviousIterations := {
|
||||
val log = streams.value.log
|
||||
CompileState.previousIterations = {
|
||||
val previousAnalysis = (previousCompile in Compile).value.analysis
|
||||
if (previousAnalysis.isEmpty) {
|
||||
streams.value.log.info("No previous analysis detected")
|
||||
log.info("No previous analysis detected")
|
||||
0
|
||||
} else {
|
||||
previousAnalysis.get match {
|
||||
|
|
|
|||
|
|
@ -10,6 +10,7 @@ lazy val root = (project in file(".")).
|
|||
version := "1.0-SNAPSHOT",
|
||||
autoScalaLibrary := false,
|
||||
checkIvyXml := {
|
||||
val resolverConverter = updateOptions.value.resolverConverter
|
||||
ivySbt.value.withIvy(streams.value.log) { ivy =>
|
||||
val cacheDir = ivy.getSettings.getDefaultRepositoryCacheBasedir
|
||||
val xmlFile =
|
||||
|
|
@ -18,7 +19,7 @@ lazy val root = (project in file(".")).
|
|||
if(lines.isEmpty) sys.error(s"Unable to read $xmlFile, could not resolve geronimo...")
|
||||
// Note: We do not do this if the maven plugin is enabled, because there is no rewrite of ivy.xml, extra attributes
|
||||
// are handled in a different mechanism. This is a hacky mechanism to detect that.
|
||||
val isMavenResolver = updateOptions.value.resolverConverter != PartialFunction.empty
|
||||
val isMavenResolver = resolverConverter != PartialFunction.empty
|
||||
if(!isMavenResolver) assert(lines contains "xmlns:e", s"Failed to appropriately modify ivy.xml file for sbt extra attributes!\n$lines")
|
||||
|
||||
val xmlFile2 = cacheDir / "com.example" / "example-child" / "ivy-1.0-SNAPSHOT.xml"
|
||||
|
|
|
|||
|
|
@ -42,8 +42,9 @@ lazy val dependent = project.
|
|||
)
|
||||
|
||||
TaskKey[Unit]("dumpResolvers") := {
|
||||
streams.value.log.info(s" -- dependent/fullResolvers -- ")
|
||||
val log = streams.value.log
|
||||
log.info(s" -- dependent/fullResolvers -- ")
|
||||
(fullResolvers in dependent).value foreach { r =>
|
||||
streams.value.log.info(s" * ${r}")
|
||||
log.info(s" * ${r}")
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -4,10 +4,11 @@ import complete.DefaultParsers._
|
|||
// Reset compiler iterations, necessary because tests run in batch mode
|
||||
val recordPreviousIterations = taskKey[Unit]("Record previous iterations.")
|
||||
recordPreviousIterations := {
|
||||
val log = streams.value.log
|
||||
CompileState.previousIterations = {
|
||||
val previousAnalysis = (previousCompile in Compile).value.analysis
|
||||
if (previousAnalysis.isEmpty) {
|
||||
streams.value.log.info("No previous analysis detected")
|
||||
log.info("No previous analysis detected")
|
||||
0
|
||||
} else {
|
||||
previousAnalysis.get match {
|
||||
|
|
|
|||
|
|
@ -4,10 +4,11 @@ import complete.DefaultParsers._
|
|||
// Reset compiler iterations, necessary because tests run in batch mode
|
||||
val recordPreviousIterations = taskKey[Unit]("Record previous iterations.")
|
||||
recordPreviousIterations := {
|
||||
val log = streams.value.log
|
||||
CompileState.previousIterations = {
|
||||
val previousAnalysis = (previousCompile in Compile).value.analysis
|
||||
if (previousAnalysis.isEmpty) {
|
||||
streams.value.log.info("No previous analysis detected")
|
||||
log.info("No previous analysis detected")
|
||||
0
|
||||
} else {
|
||||
previousAnalysis.get match {
|
||||
|
|
|
|||
|
|
@ -4,10 +4,11 @@ import complete.DefaultParsers._
|
|||
// Reset compiler iterations, necessary because tests run in batch mode
|
||||
val recordPreviousIterations = taskKey[Unit]("Record previous iterations.")
|
||||
recordPreviousIterations := {
|
||||
val log = streams.value.log
|
||||
CompileState.previousIterations = {
|
||||
val previousAnalysis = (previousCompile in Compile).value.analysis
|
||||
if (previousAnalysis.isEmpty) {
|
||||
streams.value.log.info("No previous analysis detected")
|
||||
log.info("No previous analysis detected")
|
||||
0
|
||||
} else {
|
||||
previousAnalysis.get match {
|
||||
|
|
|
|||
|
|
@ -4,10 +4,11 @@ import complete.DefaultParsers._
|
|||
// Reset compiler iterations, necessary because tests run in batch mode
|
||||
val recordPreviousIterations = taskKey[Unit]("Record previous iterations.")
|
||||
recordPreviousIterations := {
|
||||
val log = streams.value.log
|
||||
CompileState.previousIterations = {
|
||||
val previousAnalysis = (previousCompile in Compile).value.analysis
|
||||
if (previousAnalysis.isEmpty) {
|
||||
streams.value.log.info("No previous analysis detected")
|
||||
log.info("No previous analysis detected")
|
||||
0
|
||||
} else {
|
||||
previousAnalysis.get match {
|
||||
|
|
|
|||
|
|
@ -4,10 +4,11 @@ import complete.DefaultParsers._
|
|||
// Reset compiler iterations, necessary because tests run in batch mode
|
||||
val recordPreviousIterations = taskKey[Unit]("Record previous iterations.")
|
||||
recordPreviousIterations := {
|
||||
val log = streams.value.log
|
||||
CompileState.previousIterations = {
|
||||
val previousAnalysis = (previousCompile in Compile).value.analysis
|
||||
if (previousAnalysis.isEmpty) {
|
||||
streams.value.log.info("No previous analysis detected")
|
||||
log.info("No previous analysis detected")
|
||||
0
|
||||
} else {
|
||||
previousAnalysis.get match {
|
||||
|
|
|
|||
|
|
@ -6,10 +6,11 @@ crossTarget in Compile := target.value
|
|||
// Reset compiler iterations, necessary because tests run in batch mode
|
||||
val recordPreviousIterations = taskKey[Unit]("Record previous iterations.")
|
||||
recordPreviousIterations := {
|
||||
val log = streams.value.log
|
||||
CompileState.previousIterations = {
|
||||
val previousAnalysis = (previousCompile in Compile).value.analysis
|
||||
if (previousAnalysis.isEmpty) {
|
||||
streams.value.log.info("No previous analysis detected")
|
||||
log.info("No previous analysis detected")
|
||||
0
|
||||
} else {
|
||||
previousAnalysis.get match {
|
||||
|
|
|
|||
|
|
@ -10,8 +10,9 @@ lazy val root = (project in file(".")).
|
|||
fork in Test := true,
|
||||
check := {
|
||||
val nbProc = java.lang.Runtime.getRuntime().availableProcessors()
|
||||
val log = streams.value.log
|
||||
if( nbProc < 4 ) {
|
||||
streams.value.log.warn("With fewer than 4 processors this test is meaningless")
|
||||
log.warn("With fewer than 4 processors this test is meaningless")
|
||||
} else {
|
||||
// we've got at least 4 processors, we'll check the upper end but also 3 and 4 as the upper might not
|
||||
// be reached if the system is under heavy load.
|
||||
|
|
|
|||
|
|
@ -2,16 +2,24 @@ scalaVersion in ThisBuild := "2.11.8"
|
|||
concurrentRestrictions in Global := Seq(Tags.limitAll(4))
|
||||
libraryDependencies += "org.specs2" %% "specs2-core" % "3.8.4" % Test
|
||||
inConfig(Test)(Seq(
|
||||
testGrouping := definedTests.value.map { test => new Tests.Group(test.name, Seq(test), Tests.SubProcess(
|
||||
ForkOptions(
|
||||
javaHome = javaHome.value,
|
||||
outputStrategy = outputStrategy.value,
|
||||
bootJars = Vector(),
|
||||
workingDirectory = Some(baseDirectory.value),
|
||||
runJVMOptions = javaOptions.value.toVector,
|
||||
connectInput = connectInput.value,
|
||||
envVars = envVars.value
|
||||
)
|
||||
))},
|
||||
testGrouping := {
|
||||
val home = javaHome.value
|
||||
val strategy = outputStrategy.value
|
||||
val baseDir = baseDirectory.value
|
||||
val options = javaOptions.value
|
||||
val connect = connectInput.value
|
||||
val vars = envVars.value
|
||||
definedTests.value.map { test => new Tests.Group(test.name, Seq(test), Tests.SubProcess(
|
||||
ForkOptions(
|
||||
javaHome = home,
|
||||
outputStrategy = strategy,
|
||||
bootJars = Vector(),
|
||||
workingDirectory = Some(baseDir),
|
||||
runJVMOptions = options.toVector,
|
||||
connectInput = connect,
|
||||
envVars = vars
|
||||
)
|
||||
))}
|
||||
},
|
||||
TaskKey[Unit]("test-failure") := test.failure.value
|
||||
))
|
||||
|
|
|
|||
|
|
@ -1,7 +1,9 @@
|
|||
testOptions in Test +=
|
||||
testOptions in Test += {
|
||||
val baseDir = baseDirectory.value
|
||||
Tests.Setup { () =>
|
||||
IO.touch(baseDirectory.value / "setup")
|
||||
IO.touch(baseDir / "setup")
|
||||
}
|
||||
}
|
||||
|
||||
testOptions in Test += {
|
||||
val t = baseDirectory.value / "tested"
|
||||
|
|
@ -11,4 +13,4 @@ testOptions in Test += {
|
|||
IO.delete(t)
|
||||
IO.touch(c)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
Loading…
Reference in New Issue