Merge branch '1.0.0' into wip/scalafmt

This commit is contained in:
Eugene Yokota 2017-08-13 21:46:37 -04:00
commit c16a3b6ffa
42 changed files with 162 additions and 218 deletions

View File

@ -8,11 +8,3 @@ docstrings = JavaDoc
# This also seems more idiomatic to include whitespace in import x.{ yyy }
spaces.inImportCurlyBraces = true
# This works around sequence wildcard (`_*`) turning into `_ *`
spaces.beforeSeqWildcard = true
# Vertical alignment only => for pattern matching
align.tokens.add = [
{ code = "=>", owner = "Case" }
]

View File

@ -16,9 +16,7 @@ matrix:
env:
matrix:
# drop scalafmt on the 1.0.0 branch to dogfood 1.0.0-RC2 before there is a sbt 1.0 of new-sbt-scalafnt
# - SBT_CMD=";mimaReportBinaryIssues;test:compile;scalafmt::test;test:scalafmt::test;mainSettingsProj/test;safeUnitTests;otherUnitTests"
- SBT_CMD=";mimaReportBinaryIssues;test:compile;mainSettingsProj/test;safeUnitTests;otherUnitTests"
- SBT_CMD=";mimaReportBinaryIssues ;scalafmt::test ;test:scalafmt::test ;sbt:scalafmt::test ;test:compile ;mainSettingsProj/test ;safeUnitTests ;otherUnitTests"
- SBT_CMD="scripted actions/*"
- SBT_CMD="scripted apiinfo/* compiler-project/* ivy-deps-management/*"
- SBT_CMD="scripted dependency-management/*1of4"

View File

@ -32,9 +32,8 @@ def buildLevelSettings: Seq[Setting[_]] =
homepage := Some(url("https://github.com/sbt/sbt")),
scmInfo := Some(ScmInfo(url("https://github.com/sbt/sbt"), "git@github.com:sbt/sbt.git")),
resolvers += Resolver.mavenLocal,
// scalafmtOnCompile := true,
// scalafmtVersion 1.0.0-RC3 has regression
// scalafmtVersion := "0.6.8"
scalafmtOnCompile := true,
scalafmtVersion := "1.1.0",
))
def commonSettings: Seq[Setting[_]] =
@ -66,8 +65,9 @@ def testedBaseSettings: Seq[Setting[_]] =
baseSettings ++ testDependencies
val mimaSettings = Def settings (
mimaPreviousArtifacts := Set(organization.value % moduleName.value % "1.0.0-RC3"
cross (if (crossPaths.value) CrossVersion.binary else CrossVersion.disabled)
mimaPreviousArtifacts := Set(
organization.value % moduleName.value % "1.0.0-RC3"
cross (if (crossPaths.value) CrossVersion.binary else CrossVersion.disabled)
)
)
@ -367,7 +367,8 @@ lazy val mainProj = (project in file("main"))
addSbtLmCore,
addSbtLmIvy,
addSbtCompilerInterface,
addSbtZincCompile)
addSbtZincCompile
)
// Strictly for bringing implicits and aliases from subsystems into the top-level sbt namespace through a single package object
// technically, we need a dependency on all of mainProj's dependencies, but we don't do that since this is strictly an integration project

View File

@ -82,7 +82,6 @@ object AttributeKey {
rank: Int): AttributeKey[T] =
make(name, Some(description), extend, rank)
private[sbt] def copyWithRank[T](a: AttributeKey[T], rank: Int): AttributeKey[T] =
make(a.label, a.description, a.extend, rank)(a.manifest, a.optJsonWriter)

View File

@ -19,7 +19,7 @@ abstract class JLine extends LineReader {
JLine.makeInputStream(injectThreadSleep && !Util.isNonCygwinWindows)
}
def readLine(prompt: String, mask: Option[Char] = None) =
def readLine(prompt: String, mask: Option[Char] = None) =
try {
JLine.withJLine {
unsynchronizedReadLine(prompt, mask)

View File

@ -24,8 +24,7 @@ object TokenCompletions {
}
}
val default: TokenCompletions = mapDelegateCompletions(
(seen, level, c) => ctoken(seen, c.append))
val default: TokenCompletions = mapDelegateCompletions((seen, level, c) => ctoken(seen, c.append))
def displayOnly(msg: String): TokenCompletions = new Fixed {
def completions(seen: String, level: Int) = Completions.single(Completion.displayOnly(msg))

View File

@ -100,8 +100,7 @@ object Tests {
* If None, the arguments will apply to all test frameworks.
* @param args The list of arguments to pass to the selected framework(s).
*/
final case class Argument(framework: Option[TestFramework], args: List[String])
extends TestOption
final case class Argument(framework: Option[TestFramework], args: List[String]) extends TestOption
/**
* Configures test execution.

View File

@ -78,8 +78,7 @@ object Watched {
} catch {
case e: Exception =>
val log = s.log
log.error(
"Error occurred obtaining files to watch. Terminating continuous execution...")
log.error("Error occurred obtaining files to watch. Terminating continuous execution...")
State.handleException(e, s, log)
(false, watchState)
}

View File

@ -48,9 +48,9 @@ private[sbt] final class ConsoleChannel(val name: String) extends CommandChannel
case Some(src) if src.channelName != name =>
askUserThread match {
case Some(x) =>
// keep listening while network-origin command is running
// make sure to test Windows and Cygwin, if you uncomment
// shutdown()
// keep listening while network-origin command is running
// make sure to test Windows and Cygwin, if you uncomment
// shutdown()
case _ =>
}
case _ =>

View File

@ -8,8 +8,8 @@ object Remove {
trait Value[A, B] extends Any {
def removeValue(a: A, b: B): A
}
@implicitNotFound(msg =
"No implicit for Remove.Values[${A}, ${B}] found,\n so ${B} cannot be removed from ${A}")
@implicitNotFound(
msg = "No implicit for Remove.Values[${A}, ${B}] found,\n so ${B} cannot be removed from ${A}")
trait Values[A, -B] extends Any {
def removeValues(a: A, b: B): A
}

View File

@ -49,25 +49,40 @@ object InputWrapper {
private[this] def implDetailError =
sys.error("This method is an implementation detail and should not be referenced.")
private[std] def wrapTask[T: c.WeakTypeTag](c: blackbox.Context)(ts: c.Expr[Any],
pos: c.Position): c.Expr[T] =
private[std] def wrapTask[T: c.WeakTypeTag](c: blackbox.Context)(
ts: c.Expr[Any],
pos: c.Position
): c.Expr[T] =
wrapImpl[T, InputWrapper.type](c, InputWrapper, WrapTaskName)(ts, pos)
private[std] def wrapInit[T: c.WeakTypeTag](c: blackbox.Context)(ts: c.Expr[Any],
pos: c.Position): c.Expr[T] =
private[std] def wrapInit[T: c.WeakTypeTag](c: blackbox.Context)(
ts: c.Expr[Any],
pos: c.Position
): c.Expr[T] =
wrapImpl[T, InputWrapper.type](c, InputWrapper, WrapInitName)(ts, pos)
private[std] def wrapInitTask[T: c.WeakTypeTag](
c: blackbox.Context)(ts: c.Expr[Any], pos: c.Position): c.Expr[T] =
private[std] def wrapInitTask[T: c.WeakTypeTag](c: blackbox.Context)(
ts: c.Expr[Any],
pos: c.Position
): c.Expr[T] =
wrapImpl[T, InputWrapper.type](c, InputWrapper, WrapInitTaskName)(ts, pos)
private[std] def wrapInitInputTask[T: c.WeakTypeTag](
c: blackbox.Context)(ts: c.Expr[Any], pos: c.Position): c.Expr[T] =
private[std] def wrapInitInputTask[T: c.WeakTypeTag](c: blackbox.Context)(
ts: c.Expr[Any],
pos: c.Position
): c.Expr[T] =
wrapImpl[T, InputWrapper.type](c, InputWrapper, WrapInitInputName)(ts, pos)
private[std] def wrapInputTask[T: c.WeakTypeTag](
c: blackbox.Context)(ts: c.Expr[Any], pos: c.Position): c.Expr[T] =
private[std] def wrapInputTask[T: c.WeakTypeTag](c: blackbox.Context)(
ts: c.Expr[Any],
pos: c.Position
): c.Expr[T] =
wrapImpl[T, InputWrapper.type](c, InputWrapper, WrapInputName)(ts, pos)
private[std] def wrapPrevious[T: c.WeakTypeTag](
c: blackbox.Context)(ts: c.Expr[Any], pos: c.Position): c.Expr[Option[T]] =
private[std] def wrapPrevious[T: c.WeakTypeTag](c: blackbox.Context)(
ts: c.Expr[Any],
pos: c.Position
): c.Expr[Option[T]] =
wrapImpl[Option[T], InputWrapper.type](c, InputWrapper, WrapPreviousName)(ts, pos)
/**
@ -79,8 +94,8 @@ object InputWrapper {
def wrapImpl[T: c.WeakTypeTag, S <: AnyRef with Singleton](
c: blackbox.Context,
s: S,
wrapName: String)(ts: c.Expr[Any], pos: c.Position)(
implicit it: c.TypeTag[s.type]): c.Expr[T] = {
wrapName: String
)(ts: c.Expr[Any], pos: c.Position)(implicit it: c.TypeTag[s.type]): c.Expr[T] = {
import c.universe.{ Apply => ApplyTree, _ }
import internal.decorators._
val util = new ContextUtil[c.type](c)
@ -260,8 +275,7 @@ object ParserInput {
wrap[T](c)(c.universe.reify { Def.toSParser(e.splice) }, pos)
}
private def wrapInitParser[T: c.WeakTypeTag](c: blackbox.Context)(tree: c.Tree,
pos: c.Position) = {
private def wrapInitParser[T: c.WeakTypeTag](c: blackbox.Context)(tree: c.Tree, pos: c.Position) = {
val e = c.Expr[Initialize[Parser[T]]](tree)
val es = c.universe.reify { Def.toISParser(e.splice) }
wrapInit[T](c)(es, pos)

View File

@ -49,8 +49,7 @@ object InitializeConvert extends Convert {
object SettingMacro {
import LinterDSL.{ Empty => EmptyLinter }
def settingMacroImpl[T: c.WeakTypeTag](c: blackbox.Context)(
t: c.Expr[T]): c.Expr[Initialize[T]] =
def settingMacroImpl[T: c.WeakTypeTag](c: blackbox.Context)(t: c.Expr[T]): c.Expr[Initialize[T]] =
Instance.contImpl[T, Id](c, InitializeInstance, InitializeConvert, MixedBuilder, EmptyLinter)(
Left(t),
Instance.idTransform[c.type])

View File

@ -179,8 +179,7 @@ object TaskMacro {
f: c.Expr[S => S]): c.Expr[Setting[S]] =
c.Expr[Setting[S]](transformMacroImpl(c)(f.tree)(TransformInitName))
def settingAssignPure[T: c.WeakTypeTag](c: blackbox.Context)(
app: c.Expr[T]): c.Expr[Setting[T]] =
def settingAssignPure[T: c.WeakTypeTag](c: blackbox.Context)(app: c.Expr[T]): c.Expr[Setting[T]] =
settingAssignPosition(c)(c.universe.reify { Def.valueStrict(app.splice) })
def settingAssignPosition[T: c.WeakTypeTag](c: blackbox.Context)(
@ -301,8 +300,7 @@ object TaskMacro {
}
}
private[this] def transformMacroImpl(c: blackbox.Context)(init: c.Tree)(
newName: String): c.Tree = {
private[this] def transformMacroImpl(c: blackbox.Context)(init: c.Tree)(newName: String): c.Tree = {
import c.universe._
val target =
c.macroApplication match {

View File

@ -58,9 +58,9 @@ object BuildPaths {
fileSetting(globalSettingsDirectory, GlobalSettingsProperty, globalBase)(state)
def getDependencyDirectory(state: State, globalBase: File): File =
fileSetting(dependencyBaseDirectory,
DependencyBaseProperty,
defaultDependencyBase(globalBase))(state)
fileSetting(dependencyBaseDirectory, DependencyBaseProperty, defaultDependencyBase(globalBase))(
state
)
def getZincDirectory(state: State, globalBase: File): File =
fileSetting(globalZincDirectory, GlobalZincProperty, defaultGlobalZinc(globalBase))(state)

View File

@ -12,14 +12,7 @@ import java.util.concurrent.{ TimeUnit, Callable }
import Keys._
import org.apache.ivy.core.module.{ descriptor, id }, descriptor.ModuleDescriptor,
id.ModuleRevisionId
import Project.{
inConfig,
inScope,
inTask,
richInitialize,
richInitializeTask,
richTaskSessionVar
}
import Project.{ inConfig, inScope, inTask, richInitialize, richInitializeTask, richTaskSessionVar }
import sbt.internal._
import sbt.internal.CommandStrings.ExportStream
import sbt.internal.inc.ZincUtil
@ -419,10 +412,7 @@ object Defaults extends BuildCommon {
derive(scalaBinaryVersion := binaryScalaVersion(scalaVersion.value))
))
def makeCrossSources(scalaSrcDir: File,
javaSrcDir: File,
sv: String,
cross: Boolean): Seq[File] = {
def makeCrossSources(scalaSrcDir: File, javaSrcDir: File, sv: String, cross: Boolean): Seq[File] = {
if (cross)
Seq(scalaSrcDir.getParentFile / s"${scalaSrcDir.name}-$sv", scalaSrcDir, javaSrcDir)
else
@ -1070,8 +1060,7 @@ object Defaults extends BuildCommon {
case None => scope :: Nil
}
def packageTaskSettings(key: TaskKey[File],
mappingsTask: Initialize[Task[Seq[(File, String)]]]) =
def packageTaskSettings(key: TaskKey[File], mappingsTask: Initialize[Task[Seq[(File, String)]]]) =
inTask(key)(
Seq(
key in TaskZero := packageTask.value,
@ -2125,12 +2114,13 @@ object Classpaths {
LibraryManagement.transitiveScratch(
lm,
"sbt",
GetClassifiersConfiguration(mod,
excludes.toVector,
c.withArtifactFilter(c.artifactFilter.map(af =>
af.withInverted(!af.inverted))),
srcTypes.toVector,
docTypes.toVector),
GetClassifiersConfiguration(
mod,
excludes.toVector,
c.withArtifactFilter(c.artifactFilter.map(af => af.withInverted(!af.inverted))),
srcTypes.toVector,
docTypes.toVector
),
uwConfig,
log
) match {
@ -2171,11 +2161,11 @@ object Classpaths {
val s = streams.value
val skp = (skip in publish).value
val ref = thisProjectRef.value
if (skp) Def.task { s.log.debug(s"Skipping publish* for ${ref.project}") }
else Def.task {
val cfg = config.value
IvyActions.publish(ivyModule.value, config.value, s.log)
}
if (skp) Def.task { s.log.debug(s"Skipping publish* for ${ref.project}") } else
Def.task {
val cfg = config.value
IvyActions.publish(ivyModule.value, config.value, s.log)
}
} tag (Tags.Publish, Tags.Network)
val moduleIdJsonKeyFormat: sjsonnew.JsonKeyFormat[ModuleID] =

View File

@ -114,10 +114,8 @@ final case class Extracted(structure: BuildStructure,
display.show(ScopedKey(scope, key)) + " is undefined.")
def append(settings: Seq[Setting[_]], state: State): State = {
val appendSettings = Load.transformSettings(Load.projectScope(currentRef),
currentRef.build,
rootProject,
settings)
val appendSettings =
Load.transformSettings(Load.projectScope(currentRef), currentRef.build, rootProject, settings)
val newStructure = Load.reapply(session.original ++ appendSettings, structure)
Project.setProject(session, newStructure, state)
}

View File

@ -47,9 +47,9 @@ private[sbt] object PluginCross {
state.log.info(s"Setting `sbtVersion in pluginCrossBuild` to $version")
val add = List(sbtVersion in GlobalScope in pluginCrossBuild :== version) ++
List(scalaVersion := scalaVersionSetting.value) ++
inScope(GlobalScope.copy(project = Select(currentRef)))(Seq(
scalaVersion := scalaVersionSetting.value
))
inScope(GlobalScope.copy(project = Select(currentRef)))(
Seq(scalaVersion := scalaVersionSetting.value)
)
val cleared = session.mergeSettings.filterNot(crossExclude)
val newStructure = Load.reapply(cleared ++ add, structure)
Project.setProject(session, newStructure, command :: state)

View File

@ -206,7 +206,8 @@ object Plugins extends PluginsFunctions {
val selectedPlugins = selectedAtoms map { a =>
byAtomMap.getOrElse(
a,
throw AutoPluginException(s"${a} was not found in atom map."))
throw AutoPluginException(s"${a} was not found in atom map.")
)
}
val forbidden: Set[AutoPlugin] =
(selectedPlugins flatMap { Plugins.asExclusions }).toSet

View File

@ -26,8 +26,7 @@ object SessionVar {
set(key, state, value)
}
def persist[T](key: ScopedKey[Task[T]], state: State, value: T)(
implicit f: JsonFormat[T]): Unit =
def persist[T](key: ScopedKey[Task[T]], state: State, value: T)(implicit f: JsonFormat[T]): Unit =
Project.structure(state).streams(state).use(key)(s => s.getOutput(DefaultDataID).write(value))
def clear(s: State): State = s.put(sessionVars, SessionVar.emptyMap)
@ -41,14 +40,11 @@ object SessionVar {
def orEmpty(opt: Option[Map]) = opt getOrElse emptyMap
def transform[S](task: Task[S], f: (State, S) => State): Task[S] = {
val g = (s: S, map: AttributeMap) =>
map.put(Keys.transformState, (state: State) => f(state, s))
val g = (s: S, map: AttributeMap) => map.put(Keys.transformState, (state: State) => f(state, s))
task.copy(info = task.info.postTransform(g))
}
def resolveContext[T](key: ScopedKey[Task[T]],
context: Scope,
state: State): ScopedKey[Task[T]] = {
def resolveContext[T](key: ScopedKey[Task[T]], context: Scope, state: State): ScopedKey[Task[T]] = {
val subScope = Scope.replaceThis(context)(key.scope)
val scope = Project.structure(state).data.definingScope(subScope, key.key) getOrElse subScope
ScopedKey(scope, key.key)

View File

@ -159,11 +159,14 @@ object Act {
case ParsedZero => None :: Nil
case pv: ParsedValue[x] => Some(pv.value) :: Nil
}
def defaultConfigurations(
proj: Option[ResolvedReference],
index: KeyIndex,
defaultConfigs: Option[ResolvedReference] => Seq[String]): Seq[String] =
defaultConfigs: Option[ResolvedReference] => Seq[String]
): Seq[String] =
if (index exists proj) defaultConfigs(proj) else Nil
def nonEmptyConfig(index: KeyIndex,
proj: Option[ResolvedReference]): String => Seq[Option[String]] =
config => if (index.isEmpty(proj, Some(config))) Nil else Some(config) :: Nil

View File

@ -204,8 +204,7 @@ object Aggregation {
if (other.nonEmpty) {
val inputStrings = inputTasks.map(_.key).mkString("Input task(s):\n\t", "\n\t", "\n")
val otherStrings = other.map(_.key).mkString("Task(s)/setting(s):\n\t", "\n\t", "\n")
failure(
s"Cannot mix input tasks with plain tasks/settings. $inputStrings $otherStrings")
failure(s"Cannot mix input tasks with plain tasks/settings. $inputStrings $otherStrings")
} else
applyDynamicTasks(s, structure, maps(inputTasks)(castToAny), show)
} else {

View File

@ -138,8 +138,8 @@ $LastCommand <key>
""".stripMargin.trim
val SetCommand = "set"
val setBrief = (s"$SetCommand [every] <setting>",
"Evaluates a Setting and applies it to the current project.")
val setBrief =
(s"$SetCommand [every] <setting>", "Evaluates a Setting and applies it to the current project.")
val setDetailed =
s"""$SetCommand [every] <setting-expression>
@ -275,8 +275,7 @@ $ProjectsCommand remove <URI>+
def LoadProjectImpl = "loadp"
def LoadProject = "reload"
def LoadProjectBrief =
(LoadProject,
"(Re)loads the current project or changes to plugins project or returns from it.")
(LoadProject, "(Re)loads the current project or changes to plugins project or returns from it.")
def LoadProjectDetailed =
s"""$LoadProject

View File

@ -38,9 +38,8 @@ object KeyIndex {
concat(_.tasks(proj, conf, key))
def keys(proj: Option[ResolvedReference]) = concat(_.keys(proj))
def keys(proj: Option[ResolvedReference], conf: Option[String]) = concat(_.keys(proj, conf))
def keys(proj: Option[ResolvedReference],
conf: Option[String],
task: Option[AttributeKey[_]]) = concat(_.keys(proj, conf, task))
def keys(proj: Option[ResolvedReference], conf: Option[String], task: Option[AttributeKey[_]]) =
concat(_.keys(proj, conf, task))
def concat[T](f: KeyIndex => Set[T]): Set[T] =
(Set.empty[T] /: indices)((s, k) => s ++ f(k))
}

View File

@ -742,9 +742,8 @@ private[sbt] object Load {
val defs = if (defsScala.isEmpty) defaultBuildIfNone :: Nil else defsScala
// HERE we pull out the defined vals from memoSettings and unify them all so
// we can use them later.
val valDefinitions = memoSettings.values.foldLeft(DefinedSbtValues.empty) {
(prev, sbtFile) =>
prev.zip(sbtFile.definitions)
val valDefinitions = memoSettings.values.foldLeft(DefinedSbtValues.empty) { (prev, sbtFile) =>
prev.zip(sbtFile.definitions)
}
val loadedDefs = new LoadedDefinitions(
defDir,

View File

@ -167,8 +167,7 @@ object LogManager {
def command(useFormat: Boolean) =
if (useFormat) BLUE + commandBase + RESET else s"'$commandBase'"
context =>
Some(
"Stack trace suppressed: run %s for the full output.".format(command(context.useFormat)))
Some("Stack trace suppressed: run %s for the full output.".format(command(context.useFormat)))
}
def unwrapStreamsKey(key: ScopedKey[_]): ScopedKey[_] = key.scope.task match {

View File

@ -63,10 +63,8 @@ private[sbt] object SettingCompletions {
settings: Seq[Def.Setting[_]],
arg: String): SetResult = {
import extracted._
val append = Load.transformSettings(Load.projectScope(currentRef),
currentRef.build,
rootProject,
settings)
val append =
Load.transformSettings(Load.projectScope(currentRef), currentRef.build, rootProject, settings)
val newSession = session.appendSettings(append map (a => (a, arg.split('\n').toList)))
val r = relation(newSession.mergeSettings, true)(structure.delegates,
structure.scopeLocal,

View File

@ -382,43 +382,27 @@ trait TaskSequential {
),
last
)
def sequential[A0,
A1,
A2,
A3,
A4,
A5,
A6,
A7,
A8,
A9,
A10,
A11,
A12,
A13,
A14,
A15,
A16,
A17,
B](task0: Initialize[Task[A0]],
task1: Initialize[Task[A1]],
task2: Initialize[Task[A2]],
task3: Initialize[Task[A3]],
task4: Initialize[Task[A4]],
task5: Initialize[Task[A5]],
task6: Initialize[Task[A6]],
task7: Initialize[Task[A7]],
task8: Initialize[Task[A8]],
task9: Initialize[Task[A9]],
task10: Initialize[Task[A10]],
task11: Initialize[Task[A11]],
task12: Initialize[Task[A12]],
task13: Initialize[Task[A13]],
task14: Initialize[Task[A14]],
task15: Initialize[Task[A15]],
task16: Initialize[Task[A16]],
task17: Initialize[Task[A17]],
last: Initialize[Task[B]]): Initialize[Task[B]] =
def sequential[A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, B](
task0: Initialize[Task[A0]],
task1: Initialize[Task[A1]],
task2: Initialize[Task[A2]],
task3: Initialize[Task[A3]],
task4: Initialize[Task[A4]],
task5: Initialize[Task[A5]],
task6: Initialize[Task[A6]],
task7: Initialize[Task[A7]],
task8: Initialize[Task[A8]],
task9: Initialize[Task[A9]],
task10: Initialize[Task[A10]],
task11: Initialize[Task[A11]],
task12: Initialize[Task[A12]],
task13: Initialize[Task[A13]],
task14: Initialize[Task[A14]],
task15: Initialize[Task[A15]],
task16: Initialize[Task[A16]],
task17: Initialize[Task[A17]],
last: Initialize[Task[B]]
): Initialize[Task[B]] =
sequential(
List(
unitTask(task0),

View File

@ -185,8 +185,7 @@ sealed trait ParsedSbtFileExpressions {
* @param file The file we're parsing (may be a dummy file)
* @param lines The parsed "lines" of the file, where each string is a line.
*/
private[sbt] case class SbtParser(file: File, lines: Seq[String])
extends ParsedSbtFileExpressions {
private[sbt] case class SbtParser(file: File, lines: Seq[String]) extends ParsedSbtFileExpressions {
//settingsTrees,modifiedContent needed for "session save"
// TODO - We should look into splitting out "definitions" vs. "settings" here instead of further string lookups, since we have the
// parsed trees.
@ -265,8 +264,7 @@ private[sbt] case class SbtParser(file: File, lines: Seq[String])
* @param imports - trees
* @return imports per line
*/
private def importsToLineRanges(modifiedContent: String,
imports: Seq[Tree]): Seq[(String, Int)] = {
private def importsToLineRanges(modifiedContent: String, imports: Seq[Tree]): Seq[(String, Int)] = {
val toLineRange = imports map convertImport(modifiedContent)
val groupedByLineNumber = toLineRange.groupBy { case (_, lineNumber) => lineNumber }
val mergedImports = groupedByLineNumber.map {

View File

@ -76,8 +76,7 @@ final class NetworkChannel(val name: String, connection: Socket, structure: Buil
}
private def onExecCommand(cmd: ExecCommand) =
append(
Exec(cmd.commandLine, cmd.execId orElse Some(Exec.newExecId), Some(CommandSource(name))))
append(Exec(cmd.commandLine, cmd.execId orElse Some(Exec.newExecId), Some(CommandSource(name))))
private def onSettingQuery(req: SettingQuery) =
StandardMain.exchange publishEventMessage SettingQuery.handleSettingQuery(req, structure)

View File

@ -87,9 +87,7 @@ object ParseKey extends Properties("Key parser test") {
structureGen: Gen[Structure]): Gen[StructureKeyMask] =
for (mask <- maskGen; structure <- structureGen; key <- genKey(structure))
yield new StructureKeyMask(structure, key, mask)
final class StructureKeyMask(val structure: Structure,
val key: ScopedKey[_],
val mask: ScopeMask)
final class StructureKeyMask(val structure: Structure, val key: ScopedKey[_], val mask: ScopeMask)
def resolve(structure: Structure, key: ScopedKey[_], mask: ScopeMask): ScopedKey[_] =
ScopedKey(Resolve(structure.extra, Select(structure.current), key.key, mask)(key.scope),

View File

@ -184,8 +184,7 @@ abstract class TestBuild {
project <- oneOf(build.projects)
cAxis <- oneOrGlobal(project.configurations map toConfigKey)
tAxis <- oneOrGlobal(env.tasks map getKey)
pAxis <- orGlobal(
frequency((1, BuildRef(build.uri)), (3, ProjectRef(build.uri, project.id))))
pAxis <- orGlobal(frequency((1, BuildRef(build.uri)), (3, ProjectRef(build.uri, project.id))))
} yield Scope(pAxis, cAxis, tAxis, Zero)
def orGlobal[T](gen: Gen[T]): Gen[ScopeAxis[T]] =
@ -265,9 +264,7 @@ abstract class TestBuild {
maxDeps: Gen[Int],
count: Gen[Int]): Gen[Seq[Config]] =
genAcyclicDirect[Config, String](maxDeps, genName, count)((key, deps) => new Config(key, deps))
def genTasks(implicit genName: Gen[String],
maxDeps: Gen[Int],
count: Gen[Int]): Gen[Seq[Taskk]] =
def genTasks(implicit genName: Gen[String], maxDeps: Gen[Int], count: Gen[Int]): Gen[Seq[Taskk]] =
genAcyclicDirect[Taskk, String](maxDeps, genName, count)((key, deps) =>
new Taskk(AttributeKey[String](key), deps))
@ -286,8 +283,7 @@ abstract class TestBuild {
genAcyclic(maxDeps, keys.distinct)(make)
}
}
def genAcyclic[A, T](maxDeps: Gen[Int], keys: List[T])(
make: T => Gen[Seq[A] => A]): Gen[Seq[A]] =
def genAcyclic[A, T](maxDeps: Gen[Int], keys: List[T])(make: T => Gen[Seq[A] => A]): Gen[Seq[A]] =
genAcyclic(maxDeps, keys, Nil) flatMap { pairs =>
sequence(pairs.map { case (key, deps) => mapMake(key, deps, make) }) flatMap { inputs =>
val made = new collection.mutable.HashMap[T, A]

View File

@ -26,8 +26,7 @@ abstract class AbstractSessionSettingsSpec(folder: String) extends AbstractSpec
}
}
private def runTestOnFiles(
expectedResultAndMap: File => Seq[(List[String], Seq[SessionSetting])])
private def runTestOnFiles(expectedResultAndMap: File => Seq[(List[String], Seq[SessionSetting])])
: MatchResult[GenTraversableOnce[File]] = {
val allFiles = rootDir

View File

@ -78,7 +78,8 @@ object Dependencies {
def addSbtUtilScripted(p: Project): Project =
addSbtModule(p, sbtUtilPath, "utilScripted", utilScripted)
def addSbtLmCore(p: Project): Project = addSbtModule(p, sbtLmPath, "lmCore", libraryManagementCore)
def addSbtLmCore(p: Project): Project =
addSbtModule(p, sbtLmPath, "lmCore", libraryManagementCore)
def addSbtLmIvy(p: Project): Project = addSbtModule(p, sbtLmPath, "lmIvy", libraryManagementIvy)
def addSbtCompilerInterface(p: Project): Project =
@ -95,7 +96,9 @@ object Dependencies {
def addSbtZincCompile(p: Project): Project =
addSbtModule(p, sbtZincPath, "zincCompile", zincCompile)
val sjsonNewScalaJson = Def.setting { "com.eed3si9n" %% "sjson-new-scalajson" % contrabandSjsonNewVersion.value }
val sjsonNewScalaJson = Def.setting {
"com.eed3si9n" %% "sjson-new-scalajson" % contrabandSjsonNewVersion.value
}
val jline = "jline" % "jline" % "2.14.4"
val scalatest = "org.scalatest" %% "scalatest" % "3.0.1"

View File

@ -19,7 +19,6 @@ object PublishBinPlugin extends AutoPlugin {
override def projectSettings = Def settings (
publishLocalBin := Classpaths.publishTask(publishLocalBinConfig, deliverLocal).value,
publishLocalBinConfig := {
val _ = deliverLocal.value
Classpaths.publishConfig(
@ -31,9 +30,9 @@ object PublishBinPlugin extends AutoPlugin {
(checksums in publishLocalBin).value.toVector,
resolverName = "local",
logging = ivyLoggingLevel.value,
overwrite = isSnapshot.value)
overwrite = isSnapshot.value
)
},
packagedArtifacts in publishLocalBin := Classpaths.packaged(Seq(packageBin in Compile)).value
)

View File

@ -105,8 +105,7 @@ object Util {
val timestamp = formatter.format(new Date)
val content = versionLine(version) + "\ntimestamp=" + timestamp
val f = dir / "xsbt.version.properties"
if (!f.exists || f.lastModified < lastCompilationTime(analysis) || !containsVersion(f,
version)) {
if (!f.exists || f.lastModified < lastCompilationTime(analysis) || !containsVersion(f, version)) {
s.log.info("Writing version information to " + f + " :\n" + content)
IO.write(f, content)
}

View File

@ -10,4 +10,4 @@ addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.1.17")
addSbtPlugin("org.foundweekends" % "sbt-bintray" % "0.5.1")
addSbtPlugin("org.scala-sbt" % "sbt-contraband" % "0.3.0")
addSbtPlugin("com.jsuereth" % "sbt-pgp" % "1.1.0-M1")
// addSbtPlugin("com.lucidchart" % "sbt-scalafmt" % "1.3")
addSbtPlugin("com.lucidchart" % "sbt-scalafmt" % "1.10")

View File

@ -19,9 +19,7 @@ object OutputStrategy {
* the `error` level. The output is buffered until the process completes, at which point
* the logger flushes it (to the screen, for example).
*/
final class BufferedOutput private (val logger: Logger)
extends OutputStrategy
with Serializable {
final class BufferedOutput private (val logger: Logger) extends OutputStrategy with Serializable {
override def equals(o: Any): Boolean = o match {
case x: BufferedOutput => (this.logger == x.logger)
case _ => false

View File

@ -24,23 +24,24 @@ final class SbtHandler(directory: File,
type State = Option[SbtInstance]
def initialState = None
def apply(command: String,
arguments: List[String],
i: Option[SbtInstance]): Option[SbtInstance] = onSbtInstance(i) { (process, server) =>
send((command :: arguments.map(escape)).mkString(" "), server)
receive(command + " failed", server)
}
def onSbtInstance(i: Option[SbtInstance])(
f: (Process, IPC.Server) => Unit): Option[SbtInstance] = i match {
case Some(ai @ SbtInstance(process, server)) if server.isClosed =>
finish(i)
onNewSbtInstance(f)
case Some(SbtInstance(process, server)) =>
f(process, server)
i
case None =>
onNewSbtInstance(f)
}
def apply(command: String, arguments: List[String], i: Option[SbtInstance]): Option[SbtInstance] =
onSbtInstance(i) { (process, server) =>
send((command :: arguments.map(escape)).mkString(" "), server)
receive(command + " failed", server)
}
def onSbtInstance(i: Option[SbtInstance])(f: (Process, IPC.Server) => Unit): Option[SbtInstance] =
i match {
case Some(ai @ SbtInstance(process, server)) if server.isClosed =>
finish(i)
onNewSbtInstance(f)
case Some(SbtInstance(process, server)) =>
f(process, server)
i
case None =>
onNewSbtInstance(f)
}
private[this] def onNewSbtInstance(f: (Process, IPC.Server) => Unit): Option[SbtInstance] = {
val server = IPC.unmanagedServer
val p = try newRemote(server)

View File

@ -65,8 +65,7 @@ final class ScriptedTests(resourceBaseDirectory: File,
}
}
private def createScriptedHandlers(testDir: File,
buffered: Logger): Map[Char, StatementHandler] = {
private def createScriptedHandlers(testDir: File, buffered: Logger): Map[Char, StatementHandler] = {
val fileHandler = new FileCommands(testDir)
val sbtHandler = new SbtHandler(testDir, launcher, buffered, launchOpts)
Map('$' -> fileHandler, '>' -> sbtHandler, '#' -> CommentHandler)

View File

@ -26,14 +26,7 @@ import sbt.io.syntax._
import sbt.internal.util.ManagedLogger
import sjsonnew.{ IsoString, SupportConverter }
import sbt.util.{
CacheStoreFactory,
DirectoryStoreFactory,
Input,
Output,
PlainInput,
PlainOutput
}
import sbt.util.{ CacheStoreFactory, DirectoryStoreFactory, Input, Output, PlainInput, PlainOutput }
// no longer specific to Tasks, so 'TaskStreams' should be renamed
/**

View File

@ -222,11 +222,11 @@ trait TaskExtra {
IO.readLines(s.readText(key(in), sid))
}
}
implicit def processToTask(p: ProcessBuilder)(
implicit streams: Task[TaskStreams[_]]): Task[Int] = streams map { s =>
val pio = TaskExtra.processIO(s)
(p run pio).exitValue
}
implicit def processToTask(p: ProcessBuilder)(implicit streams: Task[TaskStreams[_]]): Task[Int] =
streams map { s =>
val pio = TaskExtra.processIO(s)
(p run pio).exitValue
}
}
object TaskExtra extends TaskExtra {
def processIO(s: TaskStreams[_]): ProcessIO = {

View File

@ -27,11 +27,10 @@ object TaskRunnerForkTest extends Properties("TaskRunner Fork") {
def inner(i: Int) = List.range(0, b).map(j => task(j).named(j.toString)).join
tryRun(List.range(0, a).map(inner).join, false, workers)
}
property("fork and reduce") = forAll(TaskListGen, MaxWorkersGen) {
(m: List[Int], workers: Int) =>
m.nonEmpty ==> {
val expected = m.sum
checkResult(tryRun(m.tasks.reduced(_ + _), false, workers), expected)
}
property("fork and reduce") = forAll(TaskListGen, MaxWorkersGen) { (m: List[Int], workers: Int) =>
m.nonEmpty ==> {
val expected = m.sum
checkResult(tryRun(m.tasks.reduced(_ + _), false, workers), expected)
}
}
}