Merge pull request #3216 from scalacenter/macro-usability-improvements

Add first round of DSL checks to sbt
This commit is contained in:
eugene yokota 2017-05-25 14:30:40 -04:00 committed by GitHub
commit 239280f137
39 changed files with 726 additions and 182 deletions

1
.gitignore vendored
View File

@ -1,2 +1,3 @@
target/
__pycache__
toolbox.classpath

View File

@ -16,7 +16,7 @@ matrix:
env:
matrix:
- SBT_CMD=";test:compile;scalafmtCheck;safeUnitTests;otherUnitTests"
- SBT_CMD=";test:compile;scalafmt::test;test:scalafmt::test;mainSettingsProj/test;safeUnitTests;otherUnitTests"
# - SBT_CMD="mimaReportBinaryIssues"
- SBT_CMD="scripted actions/*"
- SBT_CMD="scripted apiinfo/* compiler-project/* ivy-deps-management/*"

View File

@ -54,7 +54,7 @@ def commonSettings: Seq[Setting[_]] =
import com.typesafe.tools.mima.core._, ProblemFilters._
Seq()
},
*/
*/
fork in compile := true,
fork in run := true
) flatMap (_.settings)
@ -71,6 +71,7 @@ def testedBaseSettings: Seq[Setting[_]] =
lazy val sbtRoot: Project = (project in file("."))
.enablePlugins(ScriptedPlugin) // , SiteScaladocPlugin, GhpagesPlugin)
.enablePlugins(ScalafmtPlugin)
.configs(Sxr.sxrConf)
.aggregateSeq(nonRoots)
.settings(
@ -107,6 +108,7 @@ lazy val bundledLauncherProj =
Release.launcherSettings(sbtLaunchJar)
)
.enablePlugins(SbtLauncherPlugin)
.enablePlugins(ScalafmtPlugin)
.settings(
name := "sbt-launch",
moduleName := "sbt-launch",
@ -125,6 +127,7 @@ lazy val bundledLauncherProj =
// Runner for uniform test interface
lazy val testingProj = (project in file("testing"))
.enablePlugins(ContrabandPlugin, JsonCodecPlugin)
.enablePlugins(ScalafmtPlugin)
.dependsOn(testAgentProj)
.settings(
baseSettings,
@ -138,17 +141,20 @@ lazy val testingProj = (project in file("testing"))
.configure(addSbtIO, addSbtCompilerClasspath, addSbtUtilLogging)
// Testing agent for running tests in a separate process.
lazy val testAgentProj = (project in file("testing") / "agent").settings(
minimalSettings,
crossScalaVersions := Seq(baseScalaVersion),
crossPaths := false,
autoScalaLibrary := false,
name := "Test Agent",
libraryDependencies += testInterface
)
lazy val testAgentProj = (project in file("testing") / "agent")
.enablePlugins(ScalafmtPlugin)
.settings(
minimalSettings,
crossScalaVersions := Seq(baseScalaVersion),
crossPaths := false,
autoScalaLibrary := false,
name := "Test Agent",
libraryDependencies += testInterface
)
// Basic task engine
lazy val taskProj = (project in file("tasks"))
.enablePlugins(ScalafmtPlugin)
.settings(
testedBaseSettings,
name := "Tasks"
@ -157,6 +163,7 @@ lazy val taskProj = (project in file("tasks"))
// Standard task system. This provides map, flatMap, join, and more on top of the basic task model.
lazy val stdTaskProj = (project in file("tasks-standard"))
.enablePlugins(ScalafmtPlugin)
.dependsOn(taskProj % "compile;test->test")
.settings(
testedBaseSettings,
@ -167,7 +174,7 @@ lazy val stdTaskProj = (project in file("tasks-standard"))
// Embedded Scala code runner
lazy val runProj = (project in file("run"))
.enablePlugins(ContrabandPlugin)
.enablePlugins(ContrabandPlugin, ScalafmtPlugin)
.settings(
testedBaseSettings,
name := "Run",
@ -178,6 +185,7 @@ lazy val runProj = (project in file("run"))
.configure(addSbtIO, addSbtUtilLogging, addSbtCompilerClasspath)
lazy val scriptedSbtProj = (project in scriptedPath / "sbt")
.enablePlugins(ScalafmtPlugin)
.dependsOn(commandProj)
.settings(
baseSettings,
@ -187,6 +195,7 @@ lazy val scriptedSbtProj = (project in scriptedPath / "sbt")
.configure(addSbtIO, addSbtUtilLogging, addSbtCompilerInterface, addSbtUtilScripted)
lazy val scriptedPluginProj = (project in scriptedPath / "plugin")
.enablePlugins(ScalafmtPlugin)
.dependsOn(sbtProj)
.settings(
baseSettings,
@ -196,6 +205,7 @@ lazy val scriptedPluginProj = (project in scriptedPath / "plugin")
// Implementation and support code for defining actions.
lazy val actionsProj = (project in file("main-actions"))
.enablePlugins(ScalafmtPlugin)
.dependsOn(runProj, stdTaskProj, taskProj, testingProj)
.settings(
testedBaseSettings,
@ -217,7 +227,7 @@ lazy val actionsProj = (project in file("main-actions"))
)
lazy val protocolProj = (project in file("protocol"))
.enablePlugins(ContrabandPlugin, JsonCodecPlugin)
.enablePlugins(ContrabandPlugin, JsonCodecPlugin, ScalafmtPlugin)
.settings(
testedBaseSettings,
name := "Protocol",
@ -231,7 +241,7 @@ lazy val protocolProj = (project in file("protocol"))
// General command support and core commands not specific to a build system
lazy val commandProj = (project in file("main-command"))
.enablePlugins(ContrabandPlugin, JsonCodecPlugin)
.enablePlugins(ContrabandPlugin, JsonCodecPlugin, ScalafmtPlugin)
.dependsOn(protocolProj)
.settings(
testedBaseSettings,
@ -252,6 +262,7 @@ lazy val commandProj = (project in file("main-command"))
// The core macro project defines the main logic of the DSL, abstracted
// away from several sbt implementators (tasks, settings, et cetera).
lazy val coreMacrosProj = (project in file("core-macros"))
.enablePlugins(ScalafmtPlugin)
.settings(
commonSettings,
name := "Core Macros",
@ -259,12 +270,34 @@ lazy val coreMacrosProj = (project in file("core-macros"))
)
.configure(addSbtUtilCollection)
/* Write all the compile-time dependencies of the spores macro to a file,
* in order to read it from the created Toolbox to run the neg tests. */
lazy val generateToolboxClasspath = Def.task {
val classpathAttributes = (dependencyClasspath in Compile).value
val dependenciesClasspath =
classpathAttributes.map(_.data.getAbsolutePath).mkString(":")
val scalaBinVersion = (scalaBinaryVersion in Compile).value
val targetDir = (target in Compile).value
val compiledClassesDir = targetDir / s"scala-$scalaBinVersion/classes"
val testClassesDir = targetDir / s"scala-$scalaBinVersion/test-classes"
val classpath = s"$compiledClassesDir:$testClassesDir:$dependenciesClasspath"
val resourceDir = (resourceDirectory in Compile).value
resourceDir.mkdir() // In case it doesn't exist
val toolboxTestClasspath = resourceDir / "toolbox.classpath"
IO.write(toolboxTestClasspath, classpath)
val result = List(toolboxTestClasspath.getAbsoluteFile)
streams.value.log.success("Wrote the classpath for the macro neg test suite.")
result
}
// Fixes scope=Scope for Setting (core defined in collectionProj) to define the settings system used in build definitions
lazy val mainSettingsProj = (project in file("main-settings"))
.enablePlugins(ScalafmtPlugin)
.dependsOn(commandProj, stdTaskProj, coreMacrosProj)
.settings(
testedBaseSettings,
name := "Main Settings"
name := "Main Settings",
resourceGenerators in Compile += generateToolboxClasspath.taskValue
)
.configure(
addSbtUtilCache,
@ -279,7 +312,7 @@ lazy val mainSettingsProj = (project in file("main-settings"))
// The main integration project for sbt. It brings all of the projects together, configures them, and provides for overriding conventions.
lazy val mainProj = (project in file("main"))
.enablePlugins(ContrabandPlugin)
.enablePlugins(ContrabandPlugin, ScalafmtPlugin)
.dependsOn(actionsProj, mainSettingsProj, runProj, commandProj)
.settings(
testedBaseSettings,
@ -300,6 +333,7 @@ lazy val mainProj = (project in file("main"))
// technically, we need a dependency on all of mainProj's dependencies, but we don't do that since this is strictly an integration project
// with the sole purpose of providing certain identifiers without qualification (with a package object)
lazy val sbtProj = (project in file("sbt"))
.enablePlugins(ScalafmtPlugin)
.dependsOn(mainProj, scriptedSbtProj % "test->test")
.settings(
baseSettings,
@ -445,13 +479,6 @@ def customCommands: Seq[Setting[_]] = Seq(
otherUnitTests := {
test.all(otherProjects).value
},
commands += Command.command("scalafmtCheck") { state =>
sys.process.Process("git diff --name-only --exit-code").! match {
case 0 => // ok
case x => sys.error("git diff detected! Did you compile before committing?")
}
state
},
commands += Command.command("release-sbt-local") { state =>
"clean" ::
"so compile" ::

View File

@ -109,12 +109,13 @@ final class ContextUtil[C <: blackbox.Context](val ctx: C) {
def illegalReference(defs: collection.Set[Symbol], sym: Symbol): Boolean =
sym != null && sym != NoSymbol && defs.contains(sym)
type PropertyChecker = (String, Type, Tree) => Boolean
/**
* A function that checks the provided tree for illegal references to M instances defined in the
* expression passed to the macro and for illegal dereferencing of M instances.
*/
def checkReferences(defs: collection.Set[Symbol],
isWrapper: (String, Type, Tree) => Boolean): Tree => Unit = {
def checkReferences(defs: collection.Set[Symbol], isWrapper: PropertyChecker): Tree => Unit = {
case s @ ApplyTree(TypeApply(Select(_, nme), tpe :: Nil), qual :: Nil) =>
if (isWrapper(nme.decodedName.toString, tpe.tpe, qual))
ctx.error(s.pos, DynamicDependencyError)

View File

@ -81,7 +81,12 @@ object Instance {
c: blackbox.Context,
i: Instance with Singleton,
convert: Convert,
builder: TupleBuilder)(t: Either[c.Expr[T], c.Expr[i.M[T]]], inner: Transform[c.type, N])(
builder: TupleBuilder,
linter: LinterDSL
)(
t: Either[c.Expr[T], c.Expr[i.M[T]]],
inner: Transform[c.type, N]
)(
implicit tt: c.WeakTypeTag[T],
nt: c.WeakTypeTag[N[T]],
it: c.TypeTag[i.type]
@ -183,6 +188,7 @@ object Instance {
}
// applies the transformation
linter.runLinter(c)(tree)
val tx = util.transformWrappers(tree, (n, tpe, t, replace) => sub(n, tpe, t, replace))
// resetting attributes must be: a) local b) done here and not wider or else there are obscure errors
val tr = makeApp(inner(tx))

View File

@ -0,0 +1,13 @@
package sbt.internal.util.appmacro
import scala.reflect.macros.blackbox
trait LinterDSL {
def runLinter(ctx: blackbox.Context)(tree: ctx.Tree): Unit
}
object LinterDSL {
object Empty extends LinterDSL {
override def runLinter(ctx: blackbox.Context)(tree: ctx.Tree): Unit = ()
}
}

View File

@ -2,9 +2,16 @@ package sbt
package std
import Def.Initialize
import sbt.internal.util.Types.{ idFun, Id }
import sbt.internal.util.Types.{ Id, idFun }
import sbt.internal.util.AList
import sbt.internal.util.appmacro.{ Convert, Converted, Instance, MixedBuilder, MonadInstance }
import sbt.internal.util.appmacro.{
Convert,
Converted,
Instance,
LinterDSL,
MixedBuilder,
MonadInstance
}
object InitializeInstance extends MonadInstance {
type M[x] = Initialize[x]
@ -41,15 +48,16 @@ object InitializeConvert extends Convert {
}
object SettingMacro {
import LinterDSL.{ Empty => EmptyLinter }
def settingMacroImpl[T: c.WeakTypeTag](c: blackbox.Context)(
t: c.Expr[T]): c.Expr[Initialize[T]] =
Instance.contImpl[T, Id](c, InitializeInstance, InitializeConvert, MixedBuilder)(
Instance.contImpl[T, Id](c, InitializeInstance, InitializeConvert, MixedBuilder, EmptyLinter)(
Left(t),
Instance.idTransform[c.type])
def settingDynMacroImpl[T: c.WeakTypeTag](c: blackbox.Context)(
t: c.Expr[Initialize[T]]): c.Expr[Initialize[T]] =
Instance.contImpl[T, Id](c, InitializeInstance, InitializeConvert, MixedBuilder)(
Instance.contImpl[T, Id](c, InitializeInstance, InitializeConvert, MixedBuilder, EmptyLinter)(
Right(t),
Instance.idTransform[c.type])
}

View File

@ -0,0 +1,206 @@
package sbt.std
import sbt.internal.util.ConsoleAppender
import sbt.internal.util.appmacro.{ Convert, Converted, LinterDSL }
import scala.collection.mutable.{ HashSet => MutableSet }
import scala.io.AnsiColor
import scala.reflect.internal.util.Position
import scala.reflect.macros.blackbox
abstract class BaseTaskLinterDSL extends LinterDSL {
def isDynamicTask: Boolean
def convert: Convert
override def runLinter(ctx: blackbox.Context)(tree: ctx.Tree): Unit = {
import ctx.universe._
val isTask = convert.asPredicate(ctx)
class traverser extends Traverser {
private val unchecked = symbolOf[sbt.sbtUnchecked].asClass
private val uncheckedWrappers = MutableSet.empty[Tree]
var insideIf: Boolean = false
var insideAnon: Boolean = false
def handleUncheckedAnnotation(exprAtUseSite: Tree, tt: TypeTree): Unit = {
tt.original match {
case Annotated(annot, arg) =>
Option(annot.tpe) match {
case Some(AnnotatedType(annotations, _)) =>
val tpeAnnotations = annotations.flatMap(ann => Option(ann.tree.tpe).toList)
val symAnnotations = tpeAnnotations.map(_.typeSymbol)
val isUnchecked = symAnnotations.contains(unchecked)
if (isUnchecked) {
// Use expression at use site, arg contains the old expr
// Referential equality between the two doesn't hold
val removedSbtWrapper = exprAtUseSite match {
case Typed(t, _) => t
case _ => exprAtUseSite
}
uncheckedWrappers.add(removedSbtWrapper)
}
case _ =>
}
case _ =>
}
}
override def traverse(tree: ctx.universe.Tree): Unit = {
tree match {
case s @ Apply(TypeApply(Select(selectQual, nme), tpe :: Nil), qual :: Nil) =>
val shouldIgnore = uncheckedWrappers.contains(s)
val wrapperName = nme.decodedName.toString
if (!shouldIgnore && isTask(wrapperName, tpe.tpe, qual)) {
val qualName =
if (qual.symbol != null) qual.symbol.name.decodedName.toString
else s.pos.lineContent
if (insideIf && !isDynamicTask) {
// Error on the use of value inside the if of a regular task (dyn task is ok)
ctx.error(s.pos, TaskLinterDSLFeedback.useOfValueInsideIfExpression(qualName))
}
if (insideAnon) {
// Error on the use of anonymous functions in any task or dynamic task
ctx.error(s.pos, TaskLinterDSLFeedback.useOfValueInsideAnon(qualName))
}
} else traverse(selectQual)
traverse(qual)
case If(condition, thenp, elsep) =>
traverse(condition)
insideIf = true
traverse(thenp)
traverse(elsep)
insideIf = false
case Typed(expr, tpt: TypeTree) if tpt.original != null =>
handleUncheckedAnnotation(expr, tpt)
traverse(expr)
traverse(tpt)
case f @ Function(vparams, body) =>
super.traverseTrees(vparams)
if (!vparams.exists(_.mods.hasFlag(Flag.SYNTHETIC))) {
insideAnon = true
traverse(body)
insideAnon = false
} else traverse(body)
case _ => super.traverse(tree)
}
}
}
(new traverser).traverse(tree)
}
}
object TaskLinterDSL extends BaseTaskLinterDSL {
override val isDynamicTask: Boolean = false
override def convert: Convert = FullConvert
}
object OnlyTaskLinterDSL extends BaseTaskLinterDSL {
override val isDynamicTask: Boolean = false
override def convert: Convert = TaskConvert
}
object TaskDynLinterDSL extends BaseTaskLinterDSL {
override val isDynamicTask: Boolean = true
override def convert: Convert = FullConvert
}
object OnlyTaskDynLinterDSL extends BaseTaskLinterDSL {
override val isDynamicTask: Boolean = true
override def convert: Convert = TaskConvert
}
object TaskLinterDSLFeedback {
private final val startBold = if (ConsoleAppender.formatEnabled) AnsiColor.BOLD else ""
private final val startRed = if (ConsoleAppender.formatEnabled) AnsiColor.RED else ""
private final val startGreen = if (ConsoleAppender.formatEnabled) AnsiColor.GREEN else ""
private final val reset = if (ConsoleAppender.formatEnabled) AnsiColor.RESET else ""
private final val ProblemHeader = s"${startRed}Problem${reset}"
private final val SolutionHeader = s"${startGreen}Solution${reset}"
def useOfValueInsideAnon(task: String) =
s"""${startBold}The evaluation of `$task` inside an anonymous function is prohibited.$reset
|
|${ProblemHeader}: Task invocations inside anonymous functions are evaluated independently of whether the anonymous function is invoked or not.
|
|${SolutionHeader}:
| 1. Make `$task` evaluation explicit outside of the function body if you don't care about its evaluation.
| 2. Use a dynamic task to evaluate `$task` and pass that value as a parameter to an anonymous function.
""".stripMargin
def useOfValueInsideIfExpression(task: String) =
s"""${startBold}The evaluation of `$task` happens always inside a regular task.$reset
|
|${ProblemHeader}: `$task` is inside the if expression of a regular task.
| Regular tasks always evaluate task inside the bodies of if expressions.
|
|${SolutionHeader}:
| 1. If you only want to evaluate it when the if predicate is true or false, use a dynamic task.
| 2. Otherwise, make the static evaluation explicit by evaluating `$task` outside the if expression.
""".stripMargin
/* If(
Ident(TermName("condition")),
Typed(
Typed(Apply(TypeApply(Select(Ident(sbt.std.InputWrapper),
TermName("wrapInitTask_$u2603$u2603")),
List(TypeTree())),
List(Ident(TermName("foo")))),
TypeTree()),
TypeTree().setOriginal(
Annotated(
Apply(Select(New(Ident(TypeName("unchecked"))), termNames.CONSTRUCTOR), List()),
Typed(Apply(TypeApply(Select(Ident(sbt.std.InputWrapper),
TermName("wrapInitTask_$u2603$u2603")),
List(TypeTree())),
List(Ident(TermName("foo")))),
TypeTree())
))
),
Typed(
Typed(Apply(TypeApply(Select(Ident(sbt.std.InputWrapper),
TermName("wrapInitTask_$u2603$u2603")),
List(TypeTree())),
List(Ident(TermName("bar")))),
TypeTree()),
TypeTree().setOriginal(
Annotated(
Apply(Select(New(Ident(TypeName("unchecked"))), termNames.CONSTRUCTOR), List()),
Typed(Apply(TypeApply(Select(Ident(sbt.std.InputWrapper),
TermName("wrapInitTask_$u2603$u2603")),
List(TypeTree())),
List(Ident(TermName("bar")))),
TypeTree())
))
)
)*/
/* Block(
List(
ValDef(
Modifiers(),
TermName("anon"),
TypeTree(),
Function(
List(),
Apply(
Select(
Typed(
Apply(TypeApply(Select(Ident(sbt.std.InputWrapper),
TermName("wrapInitTask_$u2603$u2603")),
List(TypeTree())),
List(Ident(TermName("fooNeg")))),
TypeTree()
),
TermName("$plus")
),
List(Literal(Constant("")))
)
)
)),
If(
Ident(TermName("condition")),
Apply(Select(Ident(TermName("anon")), TermName("apply")), List()),
Apply(Select(Ident(TermName("anon")), TermName("apply")), List())
)
)*/
}

View File

@ -2,8 +2,15 @@ package sbt
package std
import Def.{ Initialize, Setting }
import sbt.internal.util.Types.{ const, idFun, Id }
import sbt.internal.util.appmacro.{ ContextUtil, Converted, Instance, MixedBuilder, MonadInstance }
import sbt.internal.util.Types.{ Id, const, idFun }
import sbt.internal.util.appmacro.{
ContextUtil,
Converted,
Instance,
LinterDSL,
MixedBuilder,
MonadInstance
}
import Instance.Transform
import sbt.internal.util.complete.{ DefaultParsers, Parser }
import sbt.internal.util.{ AList, LinePosition, NoPosition, SourcePosition }
@ -82,15 +89,17 @@ object TaskMacro {
"""`<<=` operator is deprecated. Use `key := { x.value }` or `key ~= (old => { newValue })`.
|See http://www.scala-sbt.org/0.13/docs/Migrating-from-sbt-012x.html""".stripMargin
import LinterDSL.{ Empty => EmptyLinter }
def taskMacroImpl[T: c.WeakTypeTag](c: blackbox.Context)(
t: c.Expr[T]): c.Expr[Initialize[Task[T]]] =
Instance.contImpl[T, Id](c, FullInstance, FullConvert, MixedBuilder)(
Instance.contImpl[T, Id](c, FullInstance, FullConvert, MixedBuilder, TaskLinterDSL)(
Left(t),
Instance.idTransform[c.type])
def taskDynMacroImpl[T: c.WeakTypeTag](c: blackbox.Context)(
t: c.Expr[Initialize[Task[T]]]): c.Expr[Initialize[Task[T]]] =
Instance.contImpl[T, Id](c, FullInstance, FullConvert, MixedBuilder)(
Instance.contImpl[T, Id](c, FullInstance, FullConvert, MixedBuilder, TaskDynLinterDSL)(
Right(t),
Instance.idTransform[c.type])
@ -356,7 +365,9 @@ object TaskMacro {
}
val cond = c.Expr[T](conditionInputTaskTree(c)(t.tree))
Instance
.contImpl[T, M](c, InitializeInstance, InputInitConvert, MixedBuilder)(Left(cond), inner)
.contImpl[T, M](c, InitializeInstance, InputInitConvert, MixedBuilder, EmptyLinter)(
Left(cond),
inner)
}
private[this] def conditionInputTaskTree(c: blackbox.Context)(t: c.Tree): c.Tree = {
@ -397,13 +408,17 @@ object TaskMacro {
val inner: Transform[c.type, M] = new Transform[c.type, M] {
def apply(in: c.Tree): c.Tree = f(c.Expr[T](in)).tree
}
Instance.contImpl[T, M](c, ParserInstance, ParserConvert, MixedBuilder)(Left(t), inner)
Instance.contImpl[T, M](c, ParserInstance, ParserConvert, MixedBuilder, LinterDSL.Empty)(
Left(t),
inner)
}
private[this] def iTaskMacro[T: c.WeakTypeTag](c: blackbox.Context)(
t: c.Expr[T]): c.Expr[Task[T]] =
Instance
.contImpl[T, Id](c, TaskInstance, TaskConvert, MixedBuilder)(Left(t), Instance.idTransform)
.contImpl[T, Id](c, TaskInstance, TaskConvert, MixedBuilder, EmptyLinter)(
Left(t),
Instance.idTransform)
private[this] def inputTaskDynMacro0[T: c.WeakTypeTag](c: blackbox.Context)(
t: c.Expr[Initialize[Task[T]]]): c.Expr[Initialize[InputTask[T]]] = {
@ -488,13 +503,13 @@ object TaskMacro {
object PlainTaskMacro {
def task[T](t: T): Task[T] = macro taskImpl[T]
def taskImpl[T: c.WeakTypeTag](c: blackbox.Context)(t: c.Expr[T]): c.Expr[Task[T]] =
Instance.contImpl[T, Id](c, TaskInstance, TaskConvert, MixedBuilder)(
Instance.contImpl[T, Id](c, TaskInstance, TaskConvert, MixedBuilder, OnlyTaskLinterDSL)(
Left(t),
Instance.idTransform[c.type])
def taskDyn[T](t: Task[T]): Task[T] = macro taskDynImpl[T]
def taskDynImpl[T: c.WeakTypeTag](c: blackbox.Context)(t: c.Expr[Task[T]]): c.Expr[Task[T]] =
Instance.contImpl[T, Id](c, TaskInstance, TaskConvert, MixedBuilder)(
Instance.contImpl[T, Id](c, TaskInstance, TaskConvert, MixedBuilder, OnlyTaskDynLinterDSL)(
Right(t),
Instance.idTransform[c.type])
}

View File

@ -0,0 +1,13 @@
package sbt
import scala.annotation.Annotation
/** An annotation to designate that the annotated entity
* should not be considered for additional sbt compiler checks.
* These checks ensure that the DSL is predictable and prevents
* users from doing dangerous things at the cost of a stricter
* code structure.
*
* @since 1.0.0
*/
class sbtUnchecked extends Annotation

View File

@ -0,0 +1,69 @@
package sbt.std
class TaskPosSpec {
// Dynamic tasks can have task invocations inside if branches
locally {
import sbt._
import sbt.Def._
val foo = taskKey[String]("")
val bar = taskKey[String]("")
var condition = true
val baz = Def.taskDyn[String] {
if (condition) foo
else bar
}
}
// Dynamic settings can have setting invocations inside if branches
locally {
import sbt._
import sbt.Def._
val foo = settingKey[String]("")
val bar = settingKey[String]("")
var condition = true
val baz = Def.settingDyn[String] {
if (condition) foo
else bar
}
}
locally {
import sbt._
import sbt.Def._
val foo = taskKey[String]("")
val bar = taskKey[String]("")
var condition = true
val baz = Def.task[String] {
if (condition) foo.value: @sbtUnchecked
else bar.value: @sbtUnchecked
}
}
locally {
// This is fix 1 for appearance of tasks inside anons
import sbt._
import sbt.Def._
val foo = taskKey[String]("")
var condition = true
val baz = Def.task[String] {
val fooResult = foo.value
val anon = () => fooResult + " "
if (condition) anon()
else ""
}
}
locally {
// This is fix 2 for appearance of tasks inside anons
import sbt._
import sbt.Def._
val foo = taskKey[String]("")
var condition = true
val baz = Def.taskDyn[String] {
val anon1 = (value: String) => value + " "
if (condition) {
Def.task(anon1(foo.value))
} else Def.task("")
}
}
}

View File

@ -0,0 +1,30 @@
/*
* This file has been copy-pasted from spores.
* https://github.com/scalacenter/spores/blob/master/core/src/test/scala/scala/spores/TestUtil.scala
*/
package sbt.std
import scala.reflect._
object TestUtil {
import tools.reflect.ToolBox
def eval(code: String, compileOptions: String = ""): Any = {
val tb = mkToolbox(compileOptions)
tb.eval(tb.parse(code))
}
def mkToolbox(compileOptions: String = ""): ToolBox[_ <: scala.reflect.api.Universe] = {
val m = scala.reflect.runtime.currentMirror
import scala.tools.reflect.ToolBox
m.mkToolBox(options = compileOptions)
}
lazy val toolboxClasspath: String = {
val resource = getClass.getClassLoader.getResource("toolbox.classpath")
val classpathFile = scala.io.Source.fromFile(resource.toURI)
val completeSporesCoreClasspath = classpathFile.getLines.mkString
completeSporesCoreClasspath
}
}

View File

@ -1,8 +1,8 @@
package sbt
package std
package sbt.std
import sbt.internal.util.complete
import sbt.internal.util.complete.DefaultParsers
import sbt.{ Def, InputTask, Task }
/*object UseTask
{
@ -20,6 +20,7 @@ import sbt.internal.util.complete.DefaultParsers
}*/
object Assign {
import java.io.File
import Def.{ Initialize, inputKey, macroValueT, parserToInput, settingKey, taskKey }
// import UseTask.{x,y,z,a,set,plain}

View File

@ -0,0 +1,171 @@
package sbt.std.neg
import org.scalatest.FunSuite
import sbt.std.TaskLinterDSLFeedback
import sbt.std.TestUtil._
class TaskNegSpec extends FunSuite {
import tools.reflect.ToolBoxError
def expectError(errorSnippet: String,
compileOptions: String = "-Xfatal-warnings",
baseCompileOptions: String = s"-cp $toolboxClasspath")(code: String) = {
val errorMessage = intercept[ToolBoxError] {
eval(code, s"$compileOptions $baseCompileOptions")
println("SUCCESS")
}.getMessage
println(errorMessage)
val userMessage =
s"""
|FOUND: $errorMessage
|EXPECTED: $errorSnippet
""".stripMargin
println(userMessage)
assert(errorMessage.contains(errorSnippet), userMessage)
}
test("Fail on task invocation inside if it is used inside a regular task") {
val fooNegError = TaskLinterDSLFeedback.useOfValueInsideIfExpression("fooNeg")
val barNegError = TaskLinterDSLFeedback.useOfValueInsideIfExpression("barNeg")
expectError(List(fooNegError, barNegError).mkString("\n")) {
"""
|import sbt._
|import sbt.Def._
|
|val fooNeg = taskKey[String]("")
|val barNeg = taskKey[String]("")
|var condition = true
|
|val bazNeg = Def.task[String] {
| if (condition) fooNeg.value
| else barNeg.value
|}
""".stripMargin
}
}
test("Fail on task invocation inside `if` if it is used inside a regular task") {
val fooNegError = TaskLinterDSLFeedback.useOfValueInsideIfExpression("fooNeg")
val barNegError = TaskLinterDSLFeedback.useOfValueInsideIfExpression("barNeg")
expectError(List(fooNegError, barNegError).mkString("\n")) {
"""
|import sbt._
|import sbt.Def._
|
|val fooNeg = taskKey[String]("")
|val barNeg = taskKey[String]("")
|var condition = true
|def bi(s: String) = s + " "
|
|val bazNeg = Def.task[String] {
| if (condition) "" + fooNeg.value
| else bi(barNeg.value)
|}
""".stripMargin
}
}
test("Fail on task invocation inside inside `if` of task returned by dynamic task") {
expectError(TaskLinterDSLFeedback.useOfValueInsideIfExpression("fooNeg")) {
"""
|import sbt._
|import sbt.Def._
|
|val fooNeg = taskKey[String]("")
|val barNeg = taskKey[String]("")
|var condition = true
|
|val bazNeg = Def.taskDyn[String] {
| if (condition) {
| Def.task {
| if (condition) {
| fooNeg.value
| } else ""
| }
| } else Def.task("")
|}
""".stripMargin
}
}
test("Fail on task invocation inside else of task returned by dynamic task") {
expectError(TaskLinterDSLFeedback.useOfValueInsideIfExpression("barNeg")) {
"""
|import sbt._
|import sbt.Def._
|
|val fooNeg = taskKey[String]("")
|val barNeg = taskKey[String]("")
|var condition = true
|
|val bazNeg = Def.taskDyn[String] {
| if (condition) {
| Def.task {
| if (condition) ""
| else barNeg.value
| }
| } else Def.task("")
|}
""".stripMargin
}
}
test("Fail on task invocation inside anonymous function returned by regular task") {
val fooNegError = TaskLinterDSLFeedback.useOfValueInsideAnon("fooNeg")
expectError(fooNegError) {
"""
|import sbt._
|import sbt.Def._
|
|val fooNeg = taskKey[String]("")
|val barNeg = taskKey[String]("")
|var condition = true
|
|val bazNeg = Def.task[String] {
| val anon = () => fooNeg.value
| if (condition) anon()
| else anon()
|}
""".stripMargin
}
}
test("Fail on task invocation inside complex anonymous function returned by regular task") {
val fooNegError = TaskLinterDSLFeedback.useOfValueInsideAnon("fooNeg")
expectError(fooNegError) {
"""
|import sbt._
|import sbt.Def._
|
|val fooNeg = taskKey[String]("")
|var condition = true
|
|val bazNeg = Def.task[String] {
| val anon = () => fooNeg.value + ""
| if (condition) anon()
| else anon()
|}
""".stripMargin
}
}
test("Fail on task invocation inside anonymous function returned by dynamic task") {
val fooNegError = TaskLinterDSLFeedback.useOfValueInsideAnon("fooNeg")
expectError(fooNegError) {
"""
|import sbt._
|import sbt.Def._
|
|val fooNeg = taskKey[String]("")
|val barNeg = taskKey[String]("")
|var condition = true
|
|val bazNeg = Def.taskDyn[String] {
| if (condition) {
| val anon = () => fooNeg.value
| Def.task(anon())
| } else Def.task("")
|}
""".stripMargin
}
}
}

View File

@ -337,7 +337,8 @@ object Defaults extends BuildCommon {
val srcs = unmanagedSources.value
val f = (includeFilter in unmanagedSources).value
val excl = (excludeFilter in unmanagedSources).value
if (sourcesInBase.value) (srcs +++ baseDirectory.value * (f -- excl)).get else srcs
val baseDir = baseDirectory.value
if (sourcesInBase.value) (srcs +++ baseDir * (f -- excl)).get else srcs
}
)
@ -405,11 +406,12 @@ object Defaults extends BuildCommon {
bootIvyConfiguration.value,
scalaCompilerBridgeSource.value
)(appConfiguration.value, streams.value.log)
val classLoaderCache = state.value.classLoaderCache
if (java.lang.Boolean.getBoolean("sbt.disable.interface.classloader.cache")) compilers
else {
compilers.withScalac(
compilers.scalac match {
case x: AnalyzingCompiler => x.withClassLoaderCache(state.value.classLoaderCache)
case x: AnalyzingCompiler => x.withClassLoaderCache(classLoaderCache)
case x => x
}
)
@ -426,8 +428,9 @@ object Defaults extends BuildCommon {
compileAnalysisFilename := {
// Here, if the user wants cross-scala-versioning, we also append it
// to the analysis cache, so we keep the scala versions separated.
val binVersion = scalaBinaryVersion.value
val extra =
if (crossPaths.value) s"_${scalaBinaryVersion.value}"
if (crossPaths.value) s"_$binVersion"
else ""
s"inc_compile${extra}.zip"
},
@ -553,11 +556,11 @@ object Defaults extends BuildCommon {
def file(id: String) = files(id).headOption getOrElse sys.error(s"Missing ${id}.jar")
val allFiles = toolReport.modules.flatMap(_.artifacts.map(_._2))
val libraryJar = file(ScalaArtifacts.LibraryID)
val binVersion = scalaBinaryVersion.value
val compilerJar =
if (ScalaInstance.isDotty(scalaVersion.value))
file(ScalaArtifacts.dottyID(scalaBinaryVersion.value))
else
file(ScalaArtifacts.CompilerID)
file(ScalaArtifacts.dottyID(binVersion))
else file(ScalaArtifacts.CompilerID)
val otherJars = allFiles.filterNot(x => x == libraryJar || x == compilerJar)
new ScalaInstance(scalaVersion.value,
makeClassLoader(state.value)(libraryJar :: compilerJar :: otherJars.toList),
@ -590,9 +593,11 @@ object Defaults extends BuildCommon {
data(fullClasspath.value),
scalaInstance.value,
IO.createUniqueDirectory(taskTemporaryDirectory.value)),
loadedTestFrameworks := testFrameworks.value
.flatMap(f => f.create(testLoader.value, streams.value.log).map(x => (f, x)).toIterable)
.toMap,
loadedTestFrameworks := {
val loader = testLoader.value
val log = streams.value.log
testFrameworks.value.flatMap(f => f.create(loader, log).map(x => (f, x)).toIterable).toMap
},
definedTests := detectTests.value,
definedTestNames := (definedTests map (_.map(_.name).distinct) storeAs definedTestNames triggeredBy compile).value,
testFilter in testQuick := testQuickFilter.value,
@ -1193,8 +1198,9 @@ object Defaults extends BuildCommon {
inTask(key)(
Seq(
apiMappings ++= {
if (autoAPIMappings.value)
APIMappings.extract(dependencyClasspath.value, streams.value.log).toMap
val dependencyCp = dependencyClasspath.value
val log = streams.value.log
if (autoAPIMappings.value) APIMappings.extract(dependencyCp, log).toMap
else Map.empty[File, URL]
},
fileInputOptions := Seq("-doc-root-content", "-diagrams-dot-path"),
@ -1393,9 +1399,9 @@ object Defaults extends BuildCommon {
PomExtraDependencyAttributes.ScalaVersionKey -> scalaV)
.withCrossVersion(Disabled())
def discoverSbtPluginNames: Initialize[Task[PluginDiscovery.DiscoveredNames]] = Def.task {
if (sbtPlugin.value) PluginDiscovery.discoverSourceAll(compile.value)
else PluginDiscovery.emptyDiscoveredNames
def discoverSbtPluginNames: Initialize[Task[PluginDiscovery.DiscoveredNames]] = Def.taskDyn {
if (sbtPlugin.value) Def.task(PluginDiscovery.discoverSourceAll(compile.value))
else Def.task(PluginDiscovery.emptyDiscoveredNames)
}
def copyResourcesTask =
@ -1686,7 +1692,8 @@ object Classpaths {
bootResolvers.value match {
case Some(repos) if overrideBuildResolvers.value => proj +: repos
case _ =>
val base = if (sbtPlugin.value) sbtResolver.value +: sbtPluginReleases +: rs else rs
val sbtResolverValue = sbtResolver.value
val base = if (sbtPlugin.value) sbtResolverValue +: sbtPluginReleases +: rs else rs
proj +: base
}
}).value,
@ -1769,14 +1776,18 @@ object Classpaths {
else "release",
logging = ivyLoggingLevel.value),
deliverConfiguration := deliverLocalConfiguration.value,
publishConfiguration := publishConfig(
packagedArtifacts.in(publish).value,
if (publishMavenStyle.value) None else Some(deliver.value),
resolverName = getPublishTo(publishTo.value).name,
checksums = checksums.in(publish).value,
logging = ivyLoggingLevel.value,
overwrite = isSnapshot.value
),
publishConfiguration := {
// TODO(jvican): I think this is a bug.
val delivered = deliver.value
publishConfig(
packagedArtifacts.in(publish).value,
if (publishMavenStyle.value) None else Some(delivered),
resolverName = getPublishTo(publishTo.value).name,
checksums = checksums.in(publish).value,
logging = ivyLoggingLevel.value,
overwrite = isSnapshot.value
)
},
publishLocalConfiguration := publishConfig(
packagedArtifacts.in(publishLocal).value,
Some(deliverLocal.value),
@ -1795,8 +1806,11 @@ object Classpaths {
ivySbt := ivySbt0.value,
ivyModule := { val is = ivySbt.value; new is.Module(moduleSettings.value) },
transitiveUpdate := transitiveUpdateTask.value,
updateCacheName := "update_cache" + (if (crossPaths.value) s"_${scalaBinaryVersion.value}"
else ""),
updateCacheName := {
val binVersion = scalaBinaryVersion.value
val suffix = if (crossPaths.value) s"_$binVersion" else ""
s"update_cache$suffix"
},
evictionWarningOptions in update := EvictionWarningOptions.default,
dependencyPositions := dependencyPositionsTask.value,
unresolvedWarningConfiguration in update := UnresolvedWarningConfiguration(
@ -1839,17 +1853,19 @@ object Classpaths {
val out = is.withIvy(s.log)(_.getSettings.getDefaultIvyUserDir)
val uwConfig = (unresolvedWarningConfiguration in update).value
val depDir = dependencyCacheDirectory.value
val ivy = ivyScala.value
val st = state.value
withExcludes(out, mod.classifiers, lock(app)) { excludes =>
IvyActions.updateClassifiers(
is,
GetClassifiersConfiguration(mod,
excludes,
c.withArtifactFilter(c.artifactFilter.invert),
ivyScala.value,
ivy,
srcTypes,
docTypes),
uwConfig,
LogicalClock(state.value.hashCode),
LogicalClock(st.hashCode),
Some(depDir),
Vector.empty,
s.log
@ -1867,15 +1883,17 @@ object Classpaths {
// Override the default to handle mixing in the sbtPlugin + scala dependencies.
allDependencies := {
val base = projectDependencies.value ++ libraryDependencies.value
val dependency = sbtDependency.value
val pluginAdjust =
if (sbtPlugin.value) sbtDependency.value.withConfigurations(Some(Provided.name)) +: base
if (sbtPlugin.value) dependency.withConfigurations(Some(Provided.name)) +: base
else base
val sbtOrg = scalaOrganization.value
val version = scalaVersion.value
if (scalaHome.value.isDefined || ivyScala.value.isEmpty || !managedScalaInstance.value)
pluginAdjust
else {
val version = scalaVersion.value
val isDotty = ScalaInstance.isDotty(version)
ScalaArtifacts.toolDependencies(scalaOrganization.value, version, isDotty) ++ pluginAdjust
ScalaArtifacts.toolDependencies(sbtOrg, version, isDotty) ++ pluginAdjust
}
}
)
@ -1978,11 +1996,14 @@ object Classpaths {
val app = appConfiguration.value
val srcTypes = sourceArtifactTypes.value
val docTypes = docArtifactTypes.value
val out = is.withIvy(s.log)(_.getSettings.getDefaultIvyUserDir)
val log = s.log
val out = is.withIvy(log)(_.getSettings.getDefaultIvyUserDir)
val uwConfig = (unresolvedWarningConfiguration in update).value
val depDir = dependencyCacheDirectory.value
val ivy = ivyScala.value
val st = state.value
withExcludes(out, mod.classifiers, lock(app)) { excludes =>
val noExplicitCheck = ivyScala.value.map(_.withCheckExplicit(false))
val noExplicitCheck = ivy.map(_.withCheckExplicit(false))
IvyActions.transitiveScratch(
is,
"sbt",
@ -1993,9 +2014,9 @@ object Classpaths {
srcTypes,
docTypes),
uwConfig,
LogicalClock(state.value.hashCode),
LogicalClock(st.hashCode),
Some(depDir),
s.log
log
)
}
} tag (Tags.Update, Tags.Network)).value
@ -2124,11 +2145,11 @@ object Classpaths {
}
}
val evictionOptions = {
val evictionOptions = Def.taskDyn {
if (executionRoots.value.exists(_.key == evicted.key))
EvictionWarningOptions.empty
else (evictionWarningOptions in update).value
}
Def.task(EvictionWarningOptions.empty)
else Def.task((evictionWarningOptions in update).value)
}.value
LibraryManagement.cachedUpdate(
s.cacheStoreFactory.sub(updateCacheName.value),

View File

@ -17,4 +17,4 @@ object CorePlugin extends AutoPlugin {
Defaults.coreDefaultSettings
override lazy val globalSettings: Seq[Setting[_]] =
Defaults.globalSbtCore
}
}

View File

@ -2,4 +2,4 @@ package sbt.internal.parser
import org.specs2.mutable._
trait AbstractSpec extends Specification with SplitExpression
trait AbstractSpec extends Specification with SplitExpression

View File

@ -1,67 +0,0 @@
import org.scalafmt.cli.Cli
import org.scalafmt.sbt.ScalafmtPlugin
import sbt._
import sbt.Keys._
import sbt.internal.inc.Analysis
// Taken from https://github.com/akka/alpakka/blob/master/project/AutomateScalafmtPlugin.scala
object AutomateScalafmtPlugin extends AutoPlugin {
object autoImport {
def automateScalafmtFor(configurations: Configuration*): Seq[Setting[_]] =
configurations.flatMap { c =>
inConfig(c)(
Seq(
compileInputs.in(compile) := {
scalafmtInc.value
compileInputs.in(compile).value
},
sourceDirectories.in(scalafmtInc) := Seq(scalaSource.value),
scalafmtInc := {
val cache = streams.value.cacheStoreFactory / "scalafmt"
val include = includeFilter.in(scalafmtInc).value
val exclude = excludeFilter.in(scalafmtInc).value
val sources =
sourceDirectories
.in(scalafmtInc)
.value
.descendantsExcept(include, exclude)
.get
.toSet
def format(handler: Set[File] => Unit, msg: String) = {
def update(handler: Set[File] => Unit, msg: String)(in: ChangeReport[File],
out: ChangeReport[File]) = {
val label = Reference.display(thisProjectRef.value)
val files = in.modified -- in.removed
Analysis
.counted("Scala source", "", "s", files.size)
.foreach(count => streams.value.log.info(s"$msg $count in $label ..."))
handler(files)
files
}
FileFunction.cached(cache, FilesInfo.hash, FilesInfo.exists)(update(handler, msg))(
sources
)
}
def formattingHandler(files: Set[File]) =
if (files.nonEmpty) {
val filesArg = files.map(_.getAbsolutePath).mkString(",")
Cli.main(Array("--quiet", "-i", "-f", filesArg))
}
format(formattingHandler, "Formatting")
format(_ => (), "Reformatted") // Recalculate the cache
}
)
)
}
}
private val scalafmtInc = taskKey[Unit]("Incrementally format modified sources")
override def requires = ScalafmtPlugin
override def trigger = allRequirements
override def projectSettings =
(includeFilter.in(scalafmtInc) := "*.scala") +: autoImport.automateScalafmtFor(Compile, Test)
}

View File

@ -110,6 +110,7 @@ object Dependencies {
val sjsonNewScalaJson = "com.eed3si9n" %% "sjson-new-scalajson" % "0.7.0"
val scalatest = "org.scalatest" %% "scalatest" % "3.0.1"
val scalaCheck = "org.scalacheck" %% "scalacheck" % "1.13.4"
val specs2 = "org.specs2" %% "specs2" % "2.4.17"
val junit = "junit" % "junit" % "4.11"

View File

@ -6,7 +6,7 @@ import com.typesafe.sbt.site.SiteScaladocPlugin.autoImport._
import com.typesafe.sbt.sbtghpages.GhpagesPlugin.autoImport._
import com.typesafe.sbt.SbtGit, SbtGit.{ git, GitKeys }
import Sxr.{ sxr, sxrConf }
*/
*/
object Docs {
def settings: Seq[Setting[_]] = Nil
@ -52,5 +52,5 @@ object Docs {
IO.copy(toCopy)
repo
}
*/
*/
}

View File

@ -10,7 +10,8 @@ object NightlyPlugin extends AutoPlugin {
val includeTestDependencies = settingKey[Boolean]("Doesn't declare test dependencies.")
def testDependencies = libraryDependencies ++= (
if (includeTestDependencies.value) Seq(scalaCheck % Test, specs2 % Test, junit % Test)
if (includeTestDependencies.value)
Seq(scalaCheck % Test, specs2 % Test, junit % Test, scalatest % Test)
else Seq()
)
}

View File

@ -5,7 +5,8 @@ scalacOptions ++= Seq("-feature", "-language:postfixOps")
// addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.0")
// addSbtPlugin("com.typesafe.sbt" % "sbt-git" % "0.9.2")
// addSbtPlugin("com.typesafe.sbt" % "sbt-javaversioncheck" % "0.1.0")
addSbtPlugin("com.geirsson" % "sbt-scalafmt" % "0.7.0-RC1")
//addSbtPlugin("com.geirsson" % "sbt-scalafmt" % "0.7.0-RC1")
addSbtPlugin("com.lucidchart" % "sbt-scalafmt" % "0.3")
// addSbtPlugin("com.typesafe.sbt" % "sbt-site" % "1.2.0")
addSbtPlugin("org.foundweekends" % "sbt-bintray" % "0.4.0")
addSbtPlugin("org.scala-sbt" % "sbt-contraband" % "0.3.0-M5")

View File

@ -29,12 +29,14 @@ def addDep(projectName: String) =
val checkApiMappings = taskKey[Unit]("Verifies that the API mappings are collected as expected.")
def expectedMappings = Def.task {
val version = scalaVersion.value
val binVersion = scalaBinaryVersion.value
val ms = update.value.configuration(Compile.name).get.modules.flatMap { mod =>
mod.artifacts.flatMap { case (a,f) =>
val n = a.name.stripSuffix("_" + scalaBinaryVersion.value)
val n = a.name.stripSuffix("_" + binVersion)
n match {
case "a" | "b" | "c" => (f, apiBase(n)) :: Nil
case "scala-library" => (f, scalaLibraryBase(scalaVersion.value)) :: Nil
case "scala-library" => (f, scalaLibraryBase(version)) :: Nil
case _ => Nil
}
}

View File

@ -4,10 +4,11 @@ import complete.DefaultParsers._
// Reset compiler iterations, necessary because tests run in batch mode
val recordPreviousIterations = taskKey[Unit]("Record previous iterations.")
recordPreviousIterations := {
val log = streams.value.log
CompileState.previousIterations = {
val previousAnalysis = (previousCompile in Compile).value.analysis
if (previousAnalysis.isEmpty) {
streams.value.log.info("No previous analysis detected")
log.info("No previous analysis detected")
0
} else {
previousAnalysis.get match {

View File

@ -6,10 +6,11 @@ logLevel := Level.Debug
// Reset compiler iterations, necessary because tests run in batch mode
val recordPreviousIterations = taskKey[Unit]("Record previous iterations.")
recordPreviousIterations := {
val log = streams.value.log
CompileState.previousIterations = {
val previousAnalysis = (previousCompile in Compile).value.analysis
if (previousAnalysis.isEmpty) {
streams.value.log.info("No previous analysis detected")
log.info("No previous analysis detected")
0
} else {
previousAnalysis.get match {

View File

@ -4,10 +4,11 @@ import complete.DefaultParsers._
// Reset compiler iterations, necessary because tests run in batch mode
val recordPreviousIterations = taskKey[Unit]("Record previous iterations.")
recordPreviousIterations := {
val log = streams.value.log
CompileState.previousIterations = {
val previousAnalysis = (previousCompile in Compile).value.analysis
if (previousAnalysis.isEmpty) {
streams.value.log.info("No previous analysis detected")
log.info("No previous analysis detected")
0
} else {
previousAnalysis.get match {

View File

@ -10,6 +10,7 @@ lazy val root = (project in file(".")).
version := "1.0-SNAPSHOT",
autoScalaLibrary := false,
checkIvyXml := {
val resolverConverter = updateOptions.value.resolverConverter
ivySbt.value.withIvy(streams.value.log) { ivy =>
val cacheDir = ivy.getSettings.getDefaultRepositoryCacheBasedir
val xmlFile =
@ -18,7 +19,7 @@ lazy val root = (project in file(".")).
if(lines.isEmpty) sys.error(s"Unable to read $xmlFile, could not resolve geronimo...")
// Note: We do not do this if the maven plugin is enabled, because there is no rewrite of ivy.xml, extra attributes
// are handled in a different mechanism. This is a hacky mechanism to detect that.
val isMavenResolver = updateOptions.value.resolverConverter != PartialFunction.empty
val isMavenResolver = resolverConverter != PartialFunction.empty
if(!isMavenResolver) assert(lines contains "xmlns:e", s"Failed to appropriately modify ivy.xml file for sbt extra attributes!\n$lines")
val xmlFile2 = cacheDir / "com.example" / "example-child" / "ivy-1.0-SNAPSHOT.xml"

View File

@ -42,8 +42,9 @@ lazy val dependent = project.
)
TaskKey[Unit]("dumpResolvers") := {
streams.value.log.info(s" -- dependent/fullResolvers -- ")
val log = streams.value.log
log.info(s" -- dependent/fullResolvers -- ")
(fullResolvers in dependent).value foreach { r =>
streams.value.log.info(s" * ${r}")
log.info(s" * ${r}")
}
}

View File

@ -4,10 +4,11 @@ import complete.DefaultParsers._
// Reset compiler iterations, necessary because tests run in batch mode
val recordPreviousIterations = taskKey[Unit]("Record previous iterations.")
recordPreviousIterations := {
val log = streams.value.log
CompileState.previousIterations = {
val previousAnalysis = (previousCompile in Compile).value.analysis
if (previousAnalysis.isEmpty) {
streams.value.log.info("No previous analysis detected")
log.info("No previous analysis detected")
0
} else {
previousAnalysis.get match {

View File

@ -4,10 +4,11 @@ import complete.DefaultParsers._
// Reset compiler iterations, necessary because tests run in batch mode
val recordPreviousIterations = taskKey[Unit]("Record previous iterations.")
recordPreviousIterations := {
val log = streams.value.log
CompileState.previousIterations = {
val previousAnalysis = (previousCompile in Compile).value.analysis
if (previousAnalysis.isEmpty) {
streams.value.log.info("No previous analysis detected")
log.info("No previous analysis detected")
0
} else {
previousAnalysis.get match {

View File

@ -4,10 +4,11 @@ import complete.DefaultParsers._
// Reset compiler iterations, necessary because tests run in batch mode
val recordPreviousIterations = taskKey[Unit]("Record previous iterations.")
recordPreviousIterations := {
val log = streams.value.log
CompileState.previousIterations = {
val previousAnalysis = (previousCompile in Compile).value.analysis
if (previousAnalysis.isEmpty) {
streams.value.log.info("No previous analysis detected")
log.info("No previous analysis detected")
0
} else {
previousAnalysis.get match {

View File

@ -4,10 +4,11 @@ import complete.DefaultParsers._
// Reset compiler iterations, necessary because tests run in batch mode
val recordPreviousIterations = taskKey[Unit]("Record previous iterations.")
recordPreviousIterations := {
val log = streams.value.log
CompileState.previousIterations = {
val previousAnalysis = (previousCompile in Compile).value.analysis
if (previousAnalysis.isEmpty) {
streams.value.log.info("No previous analysis detected")
log.info("No previous analysis detected")
0
} else {
previousAnalysis.get match {

View File

@ -4,10 +4,11 @@ import complete.DefaultParsers._
// Reset compiler iterations, necessary because tests run in batch mode
val recordPreviousIterations = taskKey[Unit]("Record previous iterations.")
recordPreviousIterations := {
val log = streams.value.log
CompileState.previousIterations = {
val previousAnalysis = (previousCompile in Compile).value.analysis
if (previousAnalysis.isEmpty) {
streams.value.log.info("No previous analysis detected")
log.info("No previous analysis detected")
0
} else {
previousAnalysis.get match {

View File

@ -6,10 +6,11 @@ crossTarget in Compile := target.value
// Reset compiler iterations, necessary because tests run in batch mode
val recordPreviousIterations = taskKey[Unit]("Record previous iterations.")
recordPreviousIterations := {
val log = streams.value.log
CompileState.previousIterations = {
val previousAnalysis = (previousCompile in Compile).value.analysis
if (previousAnalysis.isEmpty) {
streams.value.log.info("No previous analysis detected")
log.info("No previous analysis detected")
0
} else {
previousAnalysis.get match {

View File

@ -3,11 +3,12 @@ import xsbti.Maybe
import xsbti.compile.{PreviousResult, CompileAnalysis, MiniSetup}
previousCompile in Compile := {
val previous = (previousCompile in Compile).value
if (!CompileState.isNew) {
val res = new PreviousResult(Maybe.nothing[CompileAnalysis], Maybe.nothing[MiniSetup])
CompileState.isNew = true
res
} else (previousCompile in Compile).value
} else previous
}
/* Performs checks related to compilations:

View File

@ -6,11 +6,12 @@ logLevel := Level.Debug
// Reset compile status because scripted tests are run in batch mode
previousCompile in Compile := {
val previous = (previousCompile in Compile).value
if (!CompileState.isNew) {
val res = new PreviousResult(Maybe.nothing[CompileAnalysis], Maybe.nothing[MiniSetup])
CompileState.isNew = true
res
} else (previousCompile in Compile).value
} else previous
}
// disable sbt's heuristic which recompiles everything in case

View File

@ -10,8 +10,9 @@ lazy val root = (project in file(".")).
fork in Test := true,
check := {
val nbProc = java.lang.Runtime.getRuntime().availableProcessors()
val log = streams.value.log
if( nbProc < 4 ) {
streams.value.log.warn("With fewer than 4 processors this test is meaningless")
log.warn("With fewer than 4 processors this test is meaningless")
} else {
// we've got at least 4 processors, we'll check the upper end but also 3 and 4 as the upper might not
// be reached if the system is under heavy load.

View File

@ -2,16 +2,24 @@ scalaVersion in ThisBuild := "2.11.8"
concurrentRestrictions in Global := Seq(Tags.limitAll(4))
libraryDependencies += "org.specs2" %% "specs2-core" % "3.8.4" % Test
inConfig(Test)(Seq(
testGrouping := definedTests.value.map { test => new Tests.Group(test.name, Seq(test), Tests.SubProcess(
ForkOptions(
javaHome = javaHome.value,
outputStrategy = outputStrategy.value,
bootJars = Vector(),
workingDirectory = Some(baseDirectory.value),
runJVMOptions = javaOptions.value.toVector,
connectInput = connectInput.value,
envVars = envVars.value
)
))},
testGrouping := {
val home = javaHome.value
val strategy = outputStrategy.value
val baseDir = baseDirectory.value
val options = javaOptions.value
val connect = connectInput.value
val vars = envVars.value
definedTests.value.map { test => new Tests.Group(test.name, Seq(test), Tests.SubProcess(
ForkOptions(
javaHome = home,
outputStrategy = strategy,
bootJars = Vector(),
workingDirectory = Some(baseDir),
runJVMOptions = options.toVector,
connectInput = connect,
envVars = vars
)
))}
},
TaskKey[Unit]("test-failure") := test.failure.value
))

View File

@ -1,7 +1,9 @@
testOptions in Test +=
testOptions in Test += {
val baseDir = baseDirectory.value
Tests.Setup { () =>
IO.touch(baseDirectory.value / "setup")
IO.touch(baseDir / "setup")
}
}
testOptions in Test += {
val t = baseDirectory.value / "tested"
@ -11,4 +13,4 @@ testOptions in Test += {
IO.delete(t)
IO.touch(c)
}
}
}