Remove all warnings from mainProj

This commit is contained in:
Dale Wijnand 2017-12-14 13:41:40 +00:00
parent f50260218d
commit a90832b593
No known key found for this signature in database
GPG Key ID: 4F256E3D151DF5EF
39 changed files with 502 additions and 446 deletions

View File

@ -421,17 +421,17 @@ lazy val mainProj = (project in file("main"))
sourceManaged in (Compile, generateContrabands) := baseDirectory.value / "src" / "main" / "contraband-scala",
mimaSettings,
mimaBinaryIssueFilters ++= Vector(
// Changed the signature of NetworkChannel ctor. internal.
exclude[DirectMissingMethodProblem]("sbt.internal.server.NetworkChannel.*"),
// ctor for ConfigIndex. internal.
exclude[DirectMissingMethodProblem]("sbt.internal.ConfigIndex.*"),
// Changed signature or removed something in the internal pacakge
exclude[DirectMissingMethodProblem]("sbt.internal.*"),
// New and changed methods on KeyIndex. internal.
exclude[ReversedMissingMethodProblem]("sbt.internal.KeyIndex.*"),
exclude[DirectMissingMethodProblem]("sbt.internal.KeyIndex.*"),
// Removed unused val. internal.
exclude[DirectMissingMethodProblem]("sbt.internal.RelayAppender.jsonFormat"),
// Removed unused def. internal.
exclude[DirectMissingMethodProblem]("sbt.internal.Load.isProjectThis"),
// Changed signature or removed private[sbt] methods
exclude[DirectMissingMethodProblem]("sbt.Classpaths.unmanagedLibs0"),
exclude[DirectMissingMethodProblem]("sbt.Defaults.allTestGroupsTask"),
exclude[DirectMissingMethodProblem]("sbt.Plugins.topologicalSort"),
exclude[IncompatibleMethTypeProblem]("sbt.Defaults.allTestGroupsTask"),
)
)
.configure(

View File

@ -324,6 +324,8 @@ object Scoped {
"0.13.2")
def task: SettingKey[Task[S]] = scopedSetting(scope, key)
def toSettingKey: SettingKey[Task[S]] = scopedSetting(scope, key)
def get(settings: Settings[Scope]): Option[Task[S]] = settings.get(scope, key)
def ? : Initialize[Task[Option[S]]] = Def.optional(scopedKey) {

View File

@ -72,8 +72,7 @@ object Cross {
} & spacedFirst(CrossCommand)
}
private def crossRestoreSessionParser(state: State): Parser[String] =
token(CrossRestoreSessionCommand)
private def crossRestoreSessionParser: Parser[String] = token(CrossRestoreSessionCommand)
private[sbt] def requireSession[T](p: State => Parser[T]): State => Parser[T] =
s => if (s get sessionSettings isEmpty) failure("No project loaded") else p(s)
@ -189,9 +188,10 @@ object Cross {
}
def crossRestoreSession: Command =
Command.arb(crossRestoreSessionParser, crossRestoreSessionHelp)(crossRestoreSessionImpl)
Command.arb(_ => crossRestoreSessionParser, crossRestoreSessionHelp)((s, _) =>
crossRestoreSessionImpl(s))
private def crossRestoreSessionImpl(state: State, arg: String): State = {
private def crossRestoreSessionImpl(state: State): State = {
restoreCapturedSession(state, Project.extract(state))
}

View File

@ -670,7 +670,6 @@ object Defaults extends BuildCommon {
(testGrouping in test).value,
(testExecution in test).value,
(fullClasspath in test).value,
(javaHome in test).value,
testForkedParallel.value,
(javaOptions in test).value
)
@ -828,7 +827,6 @@ object Defaults extends BuildCommon {
testGrouping.value,
newConfig,
fullClasspath.value,
javaHome.value,
testForkedParallel.value,
javaOptions.value
)
@ -855,20 +853,20 @@ object Defaults extends BuildCommon {
}
}
private[sbt] def allTestGroupsTask(s: TaskStreams,
frameworks: Map[TestFramework, Framework],
loader: ClassLoader,
groups: Seq[Tests.Group],
config: Tests.Execution,
cp: Classpath,
javaHome: Option[File]): Initialize[Task[Tests.Output]] = {
private[sbt] def allTestGroupsTask(
s: TaskStreams,
frameworks: Map[TestFramework, Framework],
loader: ClassLoader,
groups: Seq[Tests.Group],
config: Tests.Execution,
cp: Classpath,
): Initialize[Task[Tests.Output]] = {
allTestGroupsTask(s,
frameworks,
loader,
groups,
config,
cp,
javaHome,
forkedParallelExecution = false,
javaOptions = Nil)
}
@ -880,7 +878,6 @@ object Defaults extends BuildCommon {
groups: Seq[Tests.Group],
config: Tests.Execution,
cp: Classpath,
javaHome: Option[File],
forkedParallelExecution: Boolean): Initialize[Task[Tests.Output]] = {
allTestGroupsTask(s,
frameworks,
@ -888,7 +885,6 @@ object Defaults extends BuildCommon {
groups,
config,
cp,
javaHome,
forkedParallelExecution,
javaOptions = Nil)
}
@ -899,12 +895,11 @@ object Defaults extends BuildCommon {
groups: Seq[Tests.Group],
config: Tests.Execution,
cp: Classpath,
javaHome: Option[File],
forkedParallelExecution: Boolean,
javaOptions: Seq[String]): Initialize[Task[Tests.Output]] = {
val runners = createTestRunners(frameworks, loader, config)
val groupTasks = groups map {
case Tests.Group(name, tests, runPolicy) =>
case Tests.Group(_, tests, runPolicy) =>
runPolicy match {
case Tests.SubProcess(opts) =>
s.log.debug(s"javaOptions: ${opts.runJVMOptions}")
@ -1606,7 +1601,11 @@ object Defaults extends BuildCommon {
val sv = (sbtVersion in pluginCrossBuild).value
val scalaV = (scalaVersion in pluginCrossBuild).value
val binVersion = (scalaBinaryVersion in pluginCrossBuild).value
val cross = if (id.crossVersioned) CrossVersion.binary else Disabled()
val cross = id.crossVersionedValue match {
case CrossValue.Disabled => Disabled()
case CrossValue.Full => CrossVersion.full
case CrossValue.Binary => CrossVersion.binary
}
val base = ModuleID(id.groupID, id.name, sv).withCrossVersion(cross)
CrossVersion(scalaV, binVersion)(base).withCrossVersion(Disabled())
}
@ -1699,7 +1698,7 @@ object Classpaths {
}
def packaged(pkgTasks: Seq[TaskKey[File]]): Initialize[Task[Map[Artifact, File]]] =
enabledOnly(packagedArtifact.task, pkgTasks) apply (_.join.map(_.toMap))
enabledOnly(packagedArtifact.toSettingKey, pkgTasks) apply (_.join.map(_.toMap))
def artifactDefs(pkgTasks: Seq[TaskKey[File]]): Initialize[Seq[Artifact]] =
enabledOnly(artifact, pkgTasks)
@ -1709,8 +1708,10 @@ object Classpaths {
case (a, true) => a
})
def forallIn[T](key: Scoped.ScopingSetting[SettingKey[T]],
pkgTasks: Seq[TaskKey[_]]): Initialize[Seq[T]] =
def forallIn[T](
key: Scoped.ScopingSetting[SettingKey[T]], // should be just SettingKey[T] (mea culpa)
pkgTasks: Seq[TaskKey[_]],
): Initialize[Seq[T]] =
pkgTasks.map(pkg => key in pkg.scope in pkg).join
private[this] def publishGlobalDefaults =
@ -1740,9 +1741,9 @@ object Classpaths {
deliver := deliverTask(makeIvyXmlConfiguration).value,
deliverLocal := deliverTask(makeIvyXmlLocalConfiguration).value,
makeIvyXml := deliverTask(makeIvyXmlConfiguration).value,
publish := publishTask(publishConfiguration, deliver).value,
publishLocal := publishTask(publishLocalConfiguration, deliverLocal).value,
publishM2 := publishTask(publishM2Configuration, deliverLocal).value
publish := publishTask(publishConfiguration).value,
publishLocal := publishTask(publishLocalConfiguration).value,
publishM2 := publishTask(publishM2Configuration).value
)
private[this] def baseGlobalDefaults =
@ -1816,7 +1817,7 @@ object Classpaths {
appResolvers.value,
useJCenter.value) match {
case (Some(delegated), Seq(), _, _) => delegated
case (_, rs, Some(ars), uj) => ars ++ rs
case (_, rs, Some(ars), _) => ars ++ rs
case (_, rs, _, uj) => Resolver.combineDefaultResolvers(rs.toVector, uj, mavenCentral = true)
}),
appResolvers := {
@ -2027,7 +2028,6 @@ object Classpaths {
val docTypes = docArtifactTypes.value
val out = is.withIvy(s.log)(_.getSettings.getDefaultIvyUserDir)
val uwConfig = (unresolvedWarningConfiguration in update).value
val scalaModule = scalaModuleInfo.value
withExcludes(out, mod.classifiers, lock(app)) { excludes =>
lm.updateClassifiers(
GetClassifiersConfiguration(
@ -2058,7 +2058,6 @@ object Classpaths {
// Override the default to handle mixing in the sbtPlugin + scala dependencies.
allDependencies := {
val base = projectDependencies.value ++ libraryDependencies.value
val dependency = sbtDependency.value
val isPlugin = sbtPlugin.value
val sbtdeps =
(sbtDependency in pluginCrossBuild).value.withConfigurations(Some(Provided.name))
@ -2177,9 +2176,6 @@ object Classpaths {
val log = s.log
val out = is.withIvy(log)(_.getSettings.getDefaultIvyUserDir)
val uwConfig = (unresolvedWarningConfiguration in update).value
val depDir = dependencyCacheDirectory.value
val ivy = scalaModuleInfo.value
val st = state.value
withExcludes(out, mod.classifiers, lock(app)) {
excludes =>
// val noExplicitCheck = ivy.map(_.withCheckExplicit(false))
@ -2196,7 +2192,7 @@ object Classpaths {
uwConfig,
log
) match {
case Left(uw) => ???
case Left(_) => ???
case Right(ur) => ur
}
}
@ -2227,16 +2223,20 @@ object Classpaths {
IvyActions.deliver(ivyModule.value, config.value, streams.value.log)
}
def publishTask(config: TaskKey[PublishConfiguration],
deliverKey: TaskKey[_]): Initialize[Task[Unit]] =
@deprecated("Use variant without delivery key", "1.1.1")
def publishTask(
config: TaskKey[PublishConfiguration],
deliverKey: TaskKey[_],
): Initialize[Task[Unit]] =
publishTask(config)
def publishTask(config: TaskKey[PublishConfiguration]): Initialize[Task[Unit]] =
Def.taskDyn {
val s = streams.value
val skp = (skip in publish).value
val ref = thisProjectRef.value
if (skp) Def.task { s.log.debug(s"Skipping publish* for ${ref.project}") } else
Def.task {
IvyActions.publish(ivyModule.value, config.value, s.log)
}
Def.task { IvyActions.publish(ivyModule.value, config.value, s.log) }
} tag (Tags.Publish, Tags.Network)
val moduleIdJsonKeyFormat: sjsonnew.JsonKeyFormat[ModuleID] =
@ -2403,7 +2403,7 @@ object Classpaths {
s.init.evaluate(empty) map { _ -> s.pos }
}: _*)
} catch {
case NonFatal(e) => Map()
case NonFatal(_) => Map()
}
val outCacheStore = cacheStoreFactory make "output_dsp"
@ -2708,14 +2708,16 @@ object Classpaths {
data: Settings[Scope],
deps: BuildDependencies): Initialize[Task[Classpath]] =
Def.value {
interDependencies(projectRef,
deps,
conf,
conf,
data,
TrackLevel.TrackAlways,
true,
unmanagedLibs0)
interDependencies(
projectRef,
deps,
conf,
conf,
data,
TrackLevel.TrackAlways,
true,
(dep, conf, data, _) => unmanagedLibs(dep, conf, data),
)
}
private[sbt] def internalDependenciesImplTask(projectRef: ProjectRef,
conf: Configuration,
@ -2820,20 +2822,19 @@ object Classpaths {
case TrackLevel.TrackIfMissing => getClasspath(exportedProductJarsIfMissing, dep, conf, data)
case TrackLevel.TrackAlways => getClasspath(exportedProductJars, dep, conf, data)
}
private[sbt] def unmanagedLibs0(dep: ResolvedReference,
conf: String,
data: Settings[Scope],
track: TrackLevel): Task[Classpath] =
unmanagedLibs(dep, conf, data)
def unmanagedLibs(dep: ResolvedReference, conf: String, data: Settings[Scope]): Task[Classpath] =
getClasspath(unmanagedJars, dep, conf, data)
def getClasspath(key: TaskKey[Classpath],
dep: ResolvedReference,
conf: String,
data: Settings[Scope]): Task[Classpath] =
(key in (dep, ConfigKey(conf))) get data getOrElse constant(Nil)
def defaultConfigurationTask(p: ResolvedReference, data: Settings[Scope]): Configuration =
flatten(defaultConfiguration in p get data) getOrElse Configurations.Default
def flatten[T](o: Option[Option[T]]): Option[T] = o flatMap idFun
val sbtIvySnapshots: URLRepository = Resolver.sbtIvyRepo("snapshots")
@ -2866,7 +2867,7 @@ object Classpaths {
up.filter(configurationFilter(config.name) && artifactFilter(`type` = jarTypes))
.toSeq
.map {
case (conf, module, art, file) =>
case (_, module, art, file) =>
Attributed(file)(
AttributeMap.empty
.put(artifact.key, art)
@ -3126,13 +3127,16 @@ trait BuildExtra extends BuildCommon with DefExtra {
file.value,
managedScalaInstance.value)
def externalPom(file: Initialize[File] = inBase("pom.xml"),
iScala: Initialize[Option[ScalaModuleInfo]] = scalaModuleInfo)
: Setting[Task[ModuleSettings]] =
moduleSettings := PomConfiguration(ivyValidate.value,
scalaModuleInfo.value,
file.value,
managedScalaInstance.value)
def externalPom(
file: Initialize[File] = inBase("pom.xml"),
iScala: Initialize[Option[ScalaModuleInfo]] = scalaModuleInfo,
): Setting[Task[ModuleSettings]] =
moduleSettings := PomConfiguration(
ivyValidate.value,
iScala.value,
file.value,
managedScalaInstance.value,
)
def runInputTask(config: Configuration,
mainClass: String,
@ -3161,7 +3165,10 @@ trait BuildExtra extends BuildCommon with DefExtra {
config: Configuration,
mainClass: String,
baseArguments: String*): Vector[Setting[_]] = {
// Use Def.inputTask with the `Def.spaceDelimited()` parser
// TODO: Re-write to avoid InputTask.apply which is deprecated
// I tried "Def.spaceDelimited().parsed" (after importing Def.parserToInput)
// but it broke actions/run-task
// Maybe it needs to be defined inside a Def.inputTask?
def inputTask[T](f: TaskKey[Seq[String]] => Initialize[Task[T]]): Initialize[InputTask[T]] =
InputTask.apply(Def.value((s: State) => Def.spaceDelimited()))(f)
@ -3216,7 +3223,7 @@ trait BuildExtra extends BuildCommon with DefExtra {
trait DefExtra {
private[this] val ts: TaskSequential = new TaskSequential {}
implicit def toTaskSequential(d: Def.type): TaskSequential = ts
implicit def toTaskSequential(@deprecated("unused", "") d: Def.type): TaskSequential = ts
}
trait BuildCommon {
@ -3224,7 +3231,7 @@ trait BuildCommon {
/**
* Allows a String to be used where a `NameFilter` is expected.
* Asterisks (`*`) in the string are interpreted as wildcards.
* All other characters must match exactly. See [[sbt.GlobFilter]].
* All other characters must match exactly. See [[sbt.io.GlobFilter]].
*/
implicit def globFilter(expression: String): NameFilter = GlobFilter(expression)

View File

@ -8,7 +8,7 @@
package sbt
import sbt.internal.{ Load, BuildStructure, TaskTimings, TaskName, GCUtil }
import sbt.internal.util.{ Attributed, ErrorHandling, HList, RMap, Signals, Types }
import sbt.internal.util.{ Attributed, ConsoleAppender, ErrorHandling, HList, RMap, Signals, Types }
import sbt.util.{ Logger, Show }
import sbt.librarymanagement.{ Resolver, UpdateReport }
@ -247,7 +247,11 @@ object EvaluateTask {
(executionRoots in Global) ::= dummyRoots
)
def evalPluginDef(log: Logger)(pluginDef: BuildStructure, state: State): PluginData = {
@deprecated("Use variant which doesn't take a logger", "1.1.1")
def evalPluginDef(log: Logger)(pluginDef: BuildStructure, state: State): PluginData =
evalPluginDef(pluginDef, state)
def evalPluginDef(pluginDef: BuildStructure, state: State): PluginData = {
val root = ProjectRef(pluginDef.root, Load.getRootProject(pluginDef.units)(pluginDef.root))
val pluginKey = pluginData
val config = extractedTaskConfig(Project.extract(state), pluginDef, state)
@ -256,7 +260,7 @@ object EvaluateTask {
val (newS, result) = evaluated getOrElse sys.error(
"Plugin data does not exist for plugin definition at " + pluginDef.root)
Project.runUnloadHooks(newS) // discard states
processResult(result, log)
processResult2(result)
}
/**
@ -296,8 +300,8 @@ object EvaluateTask {
def logIncomplete(result: Incomplete, state: State, streams: Streams): Unit = {
val all = Incomplete linearize result
val keyed = for (Incomplete(Some(key: ScopedKey[_]), _, msg, _, ex) <- all)
yield (key, msg, ex)
val keyed =
all collect { case Incomplete(Some(key: ScopedKey[_]), _, msg, _, ex) => (key, msg, ex) }
import ExceptionCategory._
for ((key, msg, Some(ex)) <- keyed) {
@ -312,7 +316,7 @@ object EvaluateTask {
for ((key, msg, ex) <- keyed if (msg.isDefined || ex.isDefined)) {
val msgString = (msg.toList ++ ex.toList.map(ErrorHandling.reducedToString)).mkString("\n\t")
val log = getStreams(key, streams).log
val display = contextDisplay(state, log.ansiCodesSupported)
val display = contextDisplay(state, ConsoleAppender.formatEnabledInEnv)
log.error("(" + display.show(key) + ") " + msgString)
}
}
@ -433,12 +437,21 @@ object EvaluateTask {
case in @ Incomplete(Some(node: Task[_]), _, _, _, _) => in.copy(node = transformNode(node))
case i => i
}
type AnyCyclic = Execute[({ type A[_] <: AnyRef })#A]#CyclicException[_]
def convertCyclicInc: Incomplete => Incomplete = {
case in @ Incomplete(_, _, _, _, Some(c: AnyCyclic)) =>
case in @ Incomplete(
_,
_,
_,
_,
Some(c: Execute[({ type A[_] <: AnyRef })#A @unchecked]#CyclicException[_])
) =>
in.copy(directCause = Some(new RuntimeException(convertCyclic(c))))
case i => i
}
def convertCyclic(c: AnyCyclic): String =
(c.caller, c.target) match {
case (caller: Task[_], target: Task[_]) =>
@ -448,7 +461,7 @@ object EvaluateTask {
}
def liftAnonymous: Incomplete => Incomplete = {
case i @ Incomplete(node, tpe, None, causes, None) =>
case i @ Incomplete(_, _, None, causes, None) =>
causes.find(inc => inc.node.isEmpty && (inc.message.isDefined || inc.directCause.isDefined)) match {
case Some(lift) => i.copy(directCause = lift.directCause, message = lift.message)
case None => i
@ -456,12 +469,19 @@ object EvaluateTask {
case i => i
}
@deprecated("Use processResult2 which doesn't take the unused log param", "1.1.1")
def processResult[T](result: Result[T], log: Logger, show: Boolean = false): T =
onResult(result, log) { v =>
processResult2(result, show)
def processResult2[T](result: Result[T], show: Boolean = false): T =
onResult(result) { v =>
if (show) println("Result: " + v); v
}
def onResult[T, S](result: Result[T], log: Logger)(f: T => S): S =
@deprecated("Use variant that doesn't take log", "1.1.1")
def onResult[T, S](result: Result[T], log: Logger)(f: T => S): S = onResult(result)(f)
def onResult[T, S](result: Result[T])(f: T => S): S =
result match {
case Value(v) => f(v)
case Inc(inc) => throw inc

View File

@ -54,12 +54,12 @@ final case class Extracted(structure: BuildStructure,
* See `runAggregated` for that.
*/
def runTask[T](key: TaskKey[T], state: State): (State, T) = {
val rkey = resolve(key.scopedKey)
val rkey = resolve(key)
val config = extractedTaskConfig(this, structure, state)
val value: Option[(State, Result[T])] =
EvaluateTask(structure, key.scopedKey, state, currentRef, config)
val (newS, result) = getOrError(rkey.scope, rkey.key, value)
(newS, EvaluateTask.processResult(result, newS.log))
(newS, EvaluateTask.processResult2(result))
}
/**
@ -72,22 +72,22 @@ final case class Extracted(structure: BuildStructure,
* This method requests execution of only the given task and does not aggregate execution.
*/
def runInputTask[T](key: InputKey[T], input: String, state: State): (State, T) = {
val scopedKey = ScopedKey(
val key2 = Scoped.scopedSetting(
Scope.resolveScope(Load.projectScope(currentRef), currentRef.build, rootProject)(key.scope),
key.key
)
val rkey = resolve(scopedKey)
val inputTask = get(Scoped.scopedSetting(rkey.scope, rkey.key))
val rkey = resolve(key2)
val inputTask = get(rkey)
val task = Parser.parse(input, inputTask.parser(state)) match {
case Right(t) => t
case Left(msg) => sys.error(s"Invalid programmatic input:\n$msg")
}
val config = extractedTaskConfig(this, structure, state)
EvaluateTask.withStreams(structure, state) { str =>
val nv = EvaluateTask.nodeView(state, str, rkey :: Nil)
val nv = EvaluateTask.nodeView(state, str, rkey.scopedKey :: Nil)
val (newS, result) =
EvaluateTask.runTask(task, state, str, structure.index.triggers, config)(nv)
(newS, EvaluateTask.processResult(result, newS.log))
(newS, EvaluateTask.processResult2(result))
}
}
@ -98,27 +98,29 @@ final case class Extracted(structure: BuildStructure,
* Other axes are resolved to `Zero` if unspecified.
*/
def runAggregated[T](key: TaskKey[T], state: State): State = {
val rkey = resolve(key.scopedKey)
val rkey = resolve(key)
val keys = Aggregation.aggregate(rkey, ScopeMask(), structure.extra)
val tasks = Act.keyValues(structure)(keys)
Aggregation.runTasks(state,
structure,
tasks,
DummyTaskMap(Nil),
show = Aggregation.defaultShow(state, false))(showKey)
Aggregation.runTasks(
state,
tasks,
DummyTaskMap(Nil),
show = Aggregation.defaultShow(state, false),
)(showKey)
}
private[this] def resolve[T](key: ScopedKey[T]): ScopedKey[T] =
Project.mapScope(Scope.resolveScope(GlobalScope, currentRef.build, rootProject))(key.scopedKey)
private[this] def resolve[K <: Scoped.ScopingSetting[K] with Scoped](key: K): K =
key in Scope.resolveScope(GlobalScope, currentRef.build, rootProject)(key.scope)
private def getOrError[T](scope: Scope, key: AttributeKey[_], value: Option[T])(
implicit display: Show[ScopedKey[_]]): T =
implicit display: Show[ScopedKey[_]]
): T =
value getOrElse sys.error(display.show(ScopedKey(scope, key)) + " is undefined.")
private def getOrError[T](scope: Scope, key: AttributeKey[T])(
implicit display: Show[ScopedKey[_]]): T =
structure.data.get(scope, key) getOrElse sys.error(
display.show(ScopedKey(scope, key)) + " is undefined.")
implicit display: Show[ScopedKey[_]]
): T =
getOrError(scope, key, structure.data.get(scope, key))(display)
def append(settings: Seq[Setting[_]], state: State): State = {
val appendSettings =

View File

@ -273,9 +273,9 @@ object BuiltinCommands {
case _ => si.actualVersion
}
private[this] def quiet[T](t: => T): Option[T] = try { Some(t) } catch {
case e: Exception => None
}
private[this] def quiet[T](t: => T): Option[T] =
try Some(t)
catch { case _: Exception => None }
def settingsCommand: Command =
showSettingLike(SettingsCommand,
@ -400,7 +400,7 @@ object BuiltinCommands {
// For correct behavior, we also need to re-inject a settings logger, as we'll be re-evaluating settings
val loggerInject = LogManager.settingsLogger(s)
val withLogger = newSession.appendRaw(loggerInject :: Nil)
val show = Project.showContextKey(newSession, structure)
val show = Project.showContextKey2(newSession)
val newStructure = Load.reapply(withLogger.mergeSettings, structure)(show)
Project.setProject(newSession, newStructure, s)
}
@ -424,19 +424,27 @@ object BuiltinCommands {
)(cl)
val setResult =
if (all) SettingCompletions.setAll(extracted, settings)
else SettingCompletions.setThis(s, extracted, settings, arg)
else SettingCompletions.setThis(extracted, settings, arg)
s.log.info(setResult.quietSummary)
s.log.debug(setResult.verboseSummary)
reapply(setResult.session, structure, s)
}
@deprecated("Use variant that doesn't take a State", "1.1.1")
def setThis(
s: State,
extracted: Extracted,
settings: Seq[Def.Setting[_]],
arg: String
): SetResult =
SettingCompletions.setThis(s, extracted, settings, arg)
setThis(extracted, settings, arg)
def setThis(
extracted: Extracted,
settings: Seq[Def.Setting[_]],
arg: String
): SetResult =
SettingCompletions.setThis(extracted, settings, arg)
def inspect: Command = Command(InspectCommand, inspectBrief, inspectDetailed)(Inspect.parser) {
case (s, (option, sk)) =>
@ -448,10 +456,10 @@ object BuiltinCommands {
Command(LastGrepCommand, lastGrepBrief, lastGrepDetailed)(lastGrepParser) {
case (s, (pattern, Some(sks))) =>
val (str, _, display) = extractLast(s)
Output.lastGrep(sks, str.streams(s), pattern, printLast(s))(display)
Output.lastGrep(sks, str.streams(s), pattern, printLast)(display)
keepLastLog(s)
case (s, (pattern, None)) =>
for (logFile <- lastLogFile(s)) yield Output.lastGrep(logFile, pattern, printLast(s))
for (logFile <- lastLogFile(s)) yield Output.lastGrep(logFile, pattern, printLast)
keepLastLog(s)
}
@ -493,7 +501,7 @@ object BuiltinCommands {
lastOnly_keys <- keysParser
kvs = Act.keyValues(structure)(lastOnly_keys._2)
f <- if (lastOnly_keys._1) success(() => s)
else Aggregation.evaluatingParser(s, structure, show)(kvs)
else Aggregation.evaluatingParser(s, show)(kvs)
} yield
() => {
def export0(s: State): State = lastImpl(s, kvs, Some(ExportStream))
@ -516,7 +524,7 @@ object BuiltinCommands {
def last: Command = Command(LastCommand, lastBrief, lastDetailed)(aggregatedKeyValueParser) {
case (s, Some(sks)) => lastImpl(s, sks, None)
case (s, None) =>
for (logFile <- lastLogFile(s)) yield Output.last(logFile, printLast(s))
for (logFile <- lastLogFile(s)) yield Output.last(logFile, printLast)
keepLastLog(s)
}
@ -525,7 +533,7 @@ object BuiltinCommands {
private[this] def lastImpl(s: State, sks: AnyKeys, sid: Option[String]): State = {
val (str, _, display) = extractLast(s)
Output.last(sks, str.streams(s), printLast(s), sid)(display)
Output.last(sks, str.streams(s), printLast, sid)(display)
keepLastLog(s)
}
@ -550,7 +558,10 @@ object BuiltinCommands {
*/
def isLastOnly(s: State): Boolean = s.history.previous.forall(_.commandLine == Shell)
def printLast(s: State): Seq[String] => Unit = _ foreach println
@deprecated("Use variant that doesn't take the state", "1.1.1")
def printLast(s: State): Seq[String] => Unit = printLast
def printLast: Seq[String] => Unit = _ foreach println
def autoImports(extracted: Extracted): EvalImports =
new EvalImports(imports(extracted), "<auto-imports>")
@ -620,7 +631,7 @@ object BuiltinCommands {
val extraUpdated = Project.updateExtraBuilds(s, f)
try doLoadProject(extraUpdated, LoadAction.Current)
catch {
case e: Exception =>
case _: Exception =>
s.log.error("Project loading failed: reverting to previous state.")
Project.setExtraBuilds(s, original)
}

View File

@ -16,7 +16,7 @@ import sbt.internal.Load
import sbt.internal.CommandStrings._
import Cross.{ spacedFirst, requireSession }
import sbt.librarymanagement.VersionNumber
import Project.{ inScope }
import Project.inScope
/**
* Module responsible for plugin cross building.
@ -24,8 +24,7 @@ import Project.{ inScope }
private[sbt] object PluginCross {
lazy val pluginSwitch: Command = {
def switchParser(state: State): Parser[(String, String)] = {
val knownVersions = Nil
lazy val switchArgs = token(NotSpace.examples(knownVersions: _*)) ~ (token(
lazy val switchArgs = token(NotSpace.examples()) ~ (token(
Space ~> matched(state.combinedParser)) ?? "")
lazy val nextSpaced = spacedFirst(PluginSwitchCommand)
token(PluginSwitchCommand ~ OptSpace) flatMap { _ =>

View File

@ -111,7 +111,7 @@ abstract class AutoPlugin extends Plugins.Basic with PluginsFunctions {
def extraProjects: Seq[Project] = Nil
/** The [[Project]]s to add to the current build based on an existing project. */
def derivedProjects(proj: ProjectDefinition[_]): Seq[Project] = Nil
def derivedProjects(@deprecated("unused", "") proj: ProjectDefinition[_]): Seq[Project] = Nil
private[sbt] def unary_! : Exclude = Exclude(this)
@ -224,20 +224,19 @@ object Plugins extends PluginsFunctions {
_.label
})
}
val retval = topologicalSort(selectedPlugins, log)
val retval = topologicalSort(selectedPlugins)
// log.debug(s" :: sorted deduced result: ${retval.toString}")
retval
}
}
}
}
private[sbt] def topologicalSort(ns: List[AutoPlugin], log: Logger): List[AutoPlugin] = {
// log.debug(s"sorting: ns: ${ns.toString}")
private[sbt] def topologicalSort(ns: List[AutoPlugin]): List[AutoPlugin] = {
@tailrec
def doSort(found0: List[AutoPlugin],
notFound0: List[AutoPlugin],
limit0: Int): List[AutoPlugin] = {
// log.debug(s" :: sorting:: found: ${found0.toString} not found ${notFound0.toString}")
if (limit0 < 0) throw AutoPluginException(s"Failed to sort ${ns} topologically")
else if (notFound0.isEmpty) found0
else {
@ -250,6 +249,7 @@ object Plugins extends PluginsFunctions {
val (roots, nonRoots) = ns partition (_.isRoot)
doSort(roots, nonRoots, ns.size * ns.size + 1)
}
private[sbt] def translateMessage(e: LogicException) = e match {
case ic: InitialContradictions =>
s"Contradiction in selected plugins. These plugins were both included and excluded: ${literalsString(
@ -260,6 +260,7 @@ object Plugins extends PluginsFunctions {
case cn: CyclicNegation =>
s"Cycles in plugin requirements cannot involve excludes. The problematic cycle is: ${literalsString(cn.cycle)}"
}
private[this] def literalsString(lits: Seq[Literal]): String =
lits map { case Atom(l) => l; case Negated(Atom(l)) => l } mkString (", ")
@ -271,6 +272,7 @@ object Plugins extends PluginsFunctions {
val message = s"Plugin$ns provided by multiple AutoPlugins:$nl${dupStrings.mkString(nl)}"
throw AutoPluginException(message)
}
private[this] def exclusionConflictError(requested: Plugins,
selected: Seq[AutoPlugin],
conflicting: Seq[AutoPlugin]): Unit = {
@ -360,14 +362,14 @@ ${listConflicts(conflicting)}""")
// This would handle things like !!p or !(p && z)
case Exclude(n) => hasInclude(n, p)
case And(ns) => ns.forall(n => hasExclude(n, p))
case b: Basic => false
case _: Basic => false
case Empty => false
}
private[sbt] def hasInclude(n: Plugins, p: AutoPlugin): Boolean = n match {
case `p` => true
case Exclude(n) => hasExclude(n, p)
case And(ns) => ns.forall(n => hasInclude(n, p))
case b: Basic => false
case _: Basic => false
case Empty => false
}
private[this] def flattenConvert(n: Plugins): Seq[Literal] = n match {

View File

@ -276,13 +276,20 @@ object Project extends ProjectExtra {
showContextKey(state, None)
def showContextKey(state: State, keyNameColor: Option[String]): Show[ScopedKey[_]] =
if (isProjectLoaded(state)) showContextKey(session(state), structure(state), keyNameColor)
if (isProjectLoaded(state)) showContextKey2(session(state), keyNameColor)
else Def.showFullKey
@deprecated("Use showContextKey2 which doesn't take the unused structure param", "1.1.1")
def showContextKey(
session: SessionSettings,
structure: BuildStructure,
keyNameColor: Option[String] = None
): Show[ScopedKey[_]] =
showContextKey2(session, keyNameColor)
def showContextKey2(
session: SessionSettings,
keyNameColor: Option[String] = None
): Show[ScopedKey[_]] =
Def.showRelativeKey2(session.current, keyNameColor)
@ -402,7 +409,7 @@ object Project extends ProjectExtra {
def extract(state: State): Extracted = extract(session(state), structure(state))
private[sbt] def extract(se: SessionSettings, st: BuildStructure): Extracted =
Extracted(st, se, se.current)(showContextKey(se, st))
Extracted(st, se, se.current)(showContextKey2(se))
def getProjectForReference(ref: Reference, structure: BuildStructure): Option[ResolvedProject] =
ref match { case pr: ProjectRef => getProject(pr, structure); case _ => None }

View File

@ -104,7 +104,7 @@ object ScopeFilter {
/** Selects all scopes that apply to a single project. Zero and build-level scopes are excluded. */
def inAnyProject: ProjectFilter =
selectAxis(const { case p: ProjectRef => true; case _ => false })
selectAxis(const { case _: ProjectRef => true; case _ => false })
/** Accepts all values for the task axis except Zero. */
def inAnyTask: TaskFilter = selectAny[AttributeKey[_]]

View File

@ -63,7 +63,7 @@ object SessionVar {
def read[T](key: ScopedKey[Task[T]], state: State)(implicit f: JsonFormat[T]): Option[T] =
Project.structure(state).streams(state).use(key) { s =>
try { Some(s.getInput(key, DefaultDataID).read[T]) } catch { case NonFatal(e) => None }
try { Some(s.getInput(key, DefaultDataID).read[T]) } catch { case NonFatal(_) => None }
}
def load[T](key: ScopedKey[Task[T]], state: State)(implicit f: JsonFormat[T]): Option[T] =

View File

@ -21,9 +21,10 @@ import BasicCommandStrings._, BasicKeys._
private[sbt] object TemplateCommandUtil {
def templateCommand: Command =
Command(TemplateCommand, templateBrief, templateDetailed)(templateCommandParser)(runTemplate)
Command(TemplateCommand, templateBrief, templateDetailed)(_ => templateCommandParser)(
runTemplate)
private def templateCommandParser(state: State): Parser[Seq[String]] =
private def templateCommandParser: Parser[Seq[String]] =
(token(Space) ~> repsep(StringBasic, token(Space))) | (token(EOF) map (_ => Nil))
private def runTemplate(state: State, inputArg: Seq[String]): State = {

View File

@ -100,7 +100,7 @@ object Act {
conf <- configs(confAmb, defaultConfigs, proj, index)
} yield
for {
taskAmb <- taskAxis(conf, index.tasks(proj, conf), keyMap)
taskAmb <- taskAxis(index.tasks(proj, conf), keyMap)
task = resolveTask(taskAmb)
key <- key(index, proj, conf, task, keyMap)
extra <- extraAxis(keyMap, IMap.empty)
@ -161,6 +161,7 @@ object Act {
def examples(p: Parser[String], exs: Set[String], label: String): Parser[String] =
p !!! ("Expected " + label) examples exs
def examplesStrict(p: Parser[String], exs: Set[String], label: String): Parser[String] =
filterStrings(examples(p, exs, label), exs, label)
@ -168,6 +169,7 @@ object Act {
p.? map { opt =>
toAxis(opt, ifNone)
}
def toAxis[T](opt: Option[T], ifNone: ScopeAxis[T]): ScopeAxis[T] =
opt match { case Some(t) => Select(t); case None => ifNone }
@ -231,8 +233,8 @@ object Act {
// This queries the key index so tab completion will list the build-level keys.
val buildKeys: Set[String] =
proj match {
case Some(ProjectRef(uri, id)) => index.keys(Some(BuildRef(uri)), conf, task)
case _ => Set()
case Some(ProjectRef(uri, _)) => index.keys(Some(BuildRef(uri)), conf, task)
case _ => Set()
}
val keys: Set[String] = index.keys(proj, conf, task) ++ buildKeys
keyParser(keys)
@ -255,9 +257,10 @@ object Act {
optionalAxis(extras, Zero)
}
def taskAxis(d: Option[String],
tasks: Set[AttributeKey[_]],
allKnown: Map[String, AttributeKey[_]]): Parser[ParsedAxis[AttributeKey[_]]] = {
def taskAxis(
tasks: Set[AttributeKey[_]],
allKnown: Map[String, AttributeKey[_]],
): Parser[ParsedAxis[AttributeKey[_]]] = {
val taskSeq = tasks.toSeq
def taskKeys(f: AttributeKey[_] => String): Seq[(String, AttributeKey[_])] =
taskSeq.map(key => (f(key), key))
@ -380,7 +383,7 @@ object Act {
def evaluate(kvs: Seq[ScopedKey[_]]): Parser[() => State] = {
val preparedPairs = anyKeyValues(structure, kvs)
val showConfig = Aggregation.defaultShow(state, showTasks = action == ShowAction)
evaluatingParser(state, structure, showConfig)(preparedPairs) map { evaluate => () =>
evaluatingParser(state, showConfig)(preparedPairs) map { evaluate => () =>
{
val keyStrings = preparedPairs.map(pp => showKey.show(pp.key)).mkString(", ")
state.log.debug("Evaluating tasks: " + keyStrings)

View File

@ -61,11 +61,10 @@ object Aggregation {
def applyTasks[T](
s: State,
structure: BuildStructure,
ps: Values[Parser[Task[T]]],
show: ShowConfig
)(implicit display: Show[ScopedKey[_]]): Parser[() => State] =
Command.applyEffect(seqParser(ps))(ts => runTasks(s, structure, ts, DummyTaskMap(Nil), show))
Command.applyEffect(seqParser(ps))(ts => runTasks(s, ts, DummyTaskMap(Nil), show))
private def showRun[T](complete: Complete[T], show: ShowConfig)(
implicit display: Show[ScopedKey[_]]
@ -104,7 +103,6 @@ object Aggregation {
}
def runTasks[HL <: HList, T](s: State,
structure: BuildStructure,
ts: Values[Task[T]],
extra: DummyTaskMap,
show: ShowConfig)(implicit display: Show[ScopedKey[_]]): State = {
@ -128,33 +126,26 @@ object Aggregation {
key in currentRef get structure.data getOrElse true
if (get(showSuccess)) {
if (get(showTiming)) {
val msg = timingString(start, stop, "", structure.data, currentRef, log)
val msg = timingString(start, stop, structure.data, currentRef)
if (success) log.success(msg) else log.error(msg)
} else if (success)
log.success("")
}
}
private def timingString(
startTime: Long,
endTime: Long,
s: String,
data: Settings[Scope],
currentRef: ProjectRef,
log: Logger
): String = {
val format = timingFormat in currentRef get data getOrElse defaultFormat
timing(format, startTime, endTime, "", log)
timing(format, startTime, endTime)
}
def timing(
format: java.text.DateFormat,
startTime: Long,
endTime: Long,
s: String,
log: Logger
): String = {
val ss = if (s.isEmpty) "" else s + " "
def timing(format: java.text.DateFormat, startTime: Long, endTime: Long): String = {
val nowString = format.format(new java.util.Date(endTime))
"Total " + ss + "time: " + (endTime - startTime + 500) / 1000 + " s, completed " + nowString
"Total time: " + (endTime - startTime + 500) / 1000 + " s, completed " + nowString
}
def defaultFormat: DateFormat = {
@ -164,20 +155,19 @@ object Aggregation {
def applyDynamicTasks[I](
s: State,
structure: BuildStructure,
inputs: Values[InputTask[I]],
show: ShowConfig
)(implicit display: Show[ScopedKey[_]]): Parser[() => State] = {
val parsers = for (KeyValue(k, it) <- inputs)
yield it.parser(s).map(v => KeyValue(k, v))
Command.applyEffect(seq(parsers)) { roots =>
runTasks(s, structure, roots, DummyTaskMap(Nil), show)
runTasks(s, roots, DummyTaskMap(Nil), show)
}
}
def evaluatingParser(s: State, structure: BuildStructure, show: ShowConfig)(
keys: Seq[KeyValue[_]]
)(implicit display: Show[ScopedKey[_]]): Parser[() => State] = {
def evaluatingParser(s: State, show: ShowConfig)(keys: Seq[KeyValue[_]])(
implicit display: Show[ScopedKey[_]]
): Parser[() => State] = {
// to make the call sites clearer
def separate[L](in: Seq[KeyValue[_]])(
@ -210,12 +200,12 @@ object Aggregation {
val otherStrings = other.map(_.key).mkString("Task(s)/setting(s):\n\t", "\n\t", "\n")
failure(s"Cannot mix input tasks with plain tasks/settings. $inputStrings $otherStrings")
} else
applyDynamicTasks(s, structure, maps(inputTasks)(castToAny), show)
applyDynamicTasks(s, maps(inputTasks)(castToAny), show)
} else {
val base =
if (tasks.isEmpty) success(() => s)
else
applyTasks(s, structure, maps(tasks)(x => success(castToAny(x))), show)
applyTasks(s, maps(tasks)(x => success(castToAny(x))), show)
base.map { res => () =>
val newState = res()
if (show.settingValues && settings.nonEmpty) printSettings(settings, show.print)

View File

@ -16,7 +16,7 @@ import sbt.internal.util.Attributed
import sbt.internal.inc.ReflectUtilities
trait BuildDef {
def projectDefinitions(baseDirectory: File): Seq[Project] = projects
def projectDefinitions(@deprecated("unused", "") baseDirectory: File): Seq[Project] = projects
def projects: Seq[Project] = ReflectUtilities.allVals[Project](this).values.toSeq
// TODO: Should we grab the build core settings here or in a plugin?
def settings: Seq[Setting[_]] = Defaults.buildCore

View File

@ -172,9 +172,7 @@ final class DetectedPlugins(val autoPlugins: Seq[DetectedAutoPlugin],
private[this] lazy val (autoPluginAutoImports, topLevelAutoPluginAutoImports) =
autoPlugins
.flatMap {
case DetectedAutoPlugin(name, ap, hasAutoImport) =>
if (hasAutoImport) Some(name)
else None
case DetectedAutoPlugin(name, _, hasAutoImport) => if (hasAutoImport) Some(name) else None
}
.partition(nonTopLevelPlugin)

View File

@ -36,9 +36,9 @@ import sbt.util.{ Level, Logger, LogExchange }
* this exchange, which could serve command request from either of the channel.
*/
private[sbt] final class CommandExchange {
private val autoStartServer = sys.props.get("sbt.server.autostart") map {
_.toLowerCase == "true"
} getOrElse true
private val autoStartServer =
sys.props get "sbt.server.autostart" forall (_.toLowerCase == "true")
private val lock = new AnyRef {}
private var server: Option[ServerInstance] = None
private var consoleChannel: Option[ConsoleChannel] = None
@ -83,7 +83,6 @@ private[sbt] final class CommandExchange {
else s
}
private def newChannelName: String = s"channel-${nextChannelId.incrementAndGet()}"
private def newNetworkName: String = s"network-${nextChannelId.incrementAndGet()}"
/**
@ -192,42 +191,24 @@ private[sbt] final class CommandExchange {
val params = toLogMessageParams(entry)
channels collect {
case c: ConsoleChannel =>
if (broadcastStringMessage) {
if (broadcastStringMessage || (entry.channelName forall (_ == c.name)))
c.publishEvent(event)
} else {
if (entry.channelName.isEmpty || entry.channelName == Some(c.name)) {
c.publishEvent(event)
}
}
case c: NetworkChannel =>
try {
// Note that language server's LogMessageParams does not hold the execid,
// so this is weaker than the StringMessage. We might want to double-send
// in case we have a better client that can utilize the knowledge.
import sbt.internal.langserver.codec.JsonProtocol._
if (broadcastStringMessage) {
if (broadcastStringMessage || (entry.channelName contains c.name))
c.langNotify("window/logMessage", params)
} else {
if (entry.channelName == Some(c.name)) {
c.langNotify("window/logMessage", params)
}
}
} catch {
case _: IOException =>
toDel += c
}
} catch { case _: IOException => toDel += c }
}
case _ =>
channels collect {
case c: ConsoleChannel =>
c.publishEvent(event)
channels foreach {
case c: ConsoleChannel => c.publishEvent(event)
case c: NetworkChannel =>
try {
c.publishEvent(event)
} catch {
case _: IOException =>
toDel += c
}
try c.publishEvent(event)
catch { case _: IOException => toDel += c }
}
}
toDel.toList match {
@ -283,6 +264,11 @@ private[sbt] final class CommandExchange {
// fanout publishEvent
def publishEventMessage(event: EventMessage): Unit = {
val toDel: ListBuffer[CommandChannel] = ListBuffer.empty
def tryTo(x: => Unit, c: CommandChannel): Unit =
try x
catch { case _: IOException => toDel += c }
event match {
// Special treatment for ConsolePromptEvent since it's hand coded without codec.
case entry: ConsolePromptEvent =>
@ -296,32 +282,17 @@ private[sbt] final class CommandExchange {
case entry: ExecStatusEvent =>
channels collect {
case c: ConsoleChannel =>
if (entry.channelName.isEmpty || entry.channelName == Some(c.name)) {
c.publishEventMessage(event)
}
if (entry.channelName forall (_ == c.name)) c.publishEventMessage(event)
case c: NetworkChannel =>
try {
if (entry.channelName == Some(c.name)) {
c.publishEventMessage(event)
}
} catch {
case e: IOException =>
toDel += c
}
if (entry.channelName contains c.name) tryTo(c.publishEventMessage(event), c)
}
case _ =>
channels collect {
case c: ConsoleChannel =>
c.publishEventMessage(event)
case c: NetworkChannel =>
try {
c.publishEventMessage(event)
} catch {
case _: IOException =>
toDel += c
}
case c: ConsoleChannel => c.publishEventMessage(event)
case c: NetworkChannel => tryTo(c.publishEventMessage(event), c)
}
}
toDel.toList match {
case Nil => // do nothing
case xs =>

View File

@ -36,12 +36,15 @@ private[sbt] abstract class BackgroundJob {
}
def shutdown(): Unit
// this should be true on construction and stay true until
// the job is complete
def isRunning(): Boolean
// called after stop or on spontaneous exit, closing the result
// removes the listener
def onStop(listener: () => Unit)(implicit ex: ExecutionContext): Closeable
// do we need this or is the spawning task good enough?
// def tags: SomeType
}
@ -57,8 +60,8 @@ private[sbt] abstract class AbstractBackgroundJobService extends BackgroundJobSe
private val serviceTempDir = IO.createTemporaryDirectory
// hooks for sending start/stop events
protected def onAddJob(job: JobHandle): Unit = {}
protected def onRemoveJob(job: JobHandle): Unit = {}
protected def onAddJob(@deprecated("unused", "") job: JobHandle): Unit = ()
protected def onRemoveJob(@deprecated("unused", "") job: JobHandle): Unit = ()
// this mutable state could conceptually go on State except
// that then every task that runs a background job would have

View File

@ -8,11 +8,18 @@
package sbt
package internal
import sbt.internal.util.{ complete, AttributeEntry, AttributeKey, LineRange, MessageOnlyException, RangePosition, Settings }
import sbt.internal.util.{
AttributeEntry,
AttributeKey,
LineRange,
MessageOnlyException,
RangePosition,
Settings
}
import java.io.File
import compiler.{ Eval, EvalImports }
import complete.DefaultParsers.validID
import sbt.internal.util.complete.DefaultParsers.validID
import Def.{ ScopedKey, Setting }
import Scope.GlobalScope
import sbt.internal.parser.SbtParser
@ -37,7 +44,9 @@ private[sbt] object EvaluateConfigurations {
/**
* This represents the parsed expressions in a build sbt, as well as where they were defined.
*/
private[this] final class ParsedFile(val imports: Seq[(String, Int)], val definitions: Seq[(String, LineRange)], val settings: Seq[(String, LineRange)])
private[this] final class ParsedFile(val imports: Seq[(String, Int)],
val definitions: Seq[(String, LineRange)],
val settings: Seq[(String, LineRange)])
/** The keywords we look for when classifying a string as a definition. */
private[this] val DefinitionKeywords = Seq("lazy val ", "def ", "val ")
@ -48,18 +57,24 @@ private[sbt] object EvaluateConfigurations {
* return a parsed, compiled + evaluated [[LoadedSbtFile]]. The result has
* raw sbt-types that can be accessed and used.
*/
def apply(eval: Eval, srcs: Seq[File], imports: Seq[String]): LazyClassLoaded[LoadedSbtFile] =
{
val loadFiles = srcs.sortBy(_.getName) map { src => evaluateSbtFile(eval, src, IO.readLines(src), imports, 0) }
loader => (LoadedSbtFile.empty /: loadFiles) { (loaded, load) => loaded merge load(loader) }
def apply(eval: Eval, srcs: Seq[File], imports: Seq[String]): LazyClassLoaded[LoadedSbtFile] = {
val loadFiles = srcs.sortBy(_.getName) map { src =>
evaluateSbtFile(eval, src, IO.readLines(src), imports, 0)
}
loader =>
(LoadedSbtFile.empty /: loadFiles) { (loaded, load) =>
loaded merge load(loader)
}
}
/**
* Reads a given .sbt file and evaluates it into a sequence of setting values.
*
* Note: This ignores any non-Setting[_] values in the file.
*/
def evaluateConfiguration(eval: Eval, src: File, imports: Seq[String]): LazyClassLoaded[Seq[Setting[_]]] =
def evaluateConfiguration(eval: Eval,
src: File,
imports: Seq[String]): LazyClassLoaded[Seq[Setting[_]]] =
evaluateConfiguration(eval, src, IO.readLines(src), imports, 0)
/**
@ -68,13 +83,16 @@ private[sbt] object EvaluateConfigurations {
*
* @param builtinImports The set of import statements to add to those parsed in the .sbt file.
*/
private[this] def parseConfiguration(file: File, lines: Seq[String], builtinImports: Seq[String], offset: Int): ParsedFile =
{
val (importStatements, settingsAndDefinitions) = splitExpressions(file, lines)
val allImports = builtinImports.map(s => (s, -1)) ++ addOffset(offset, importStatements)
val (definitions, settings) = splitSettingsDefinitions(addOffsetToRange(offset, settingsAndDefinitions))
new ParsedFile(allImports, definitions, settings)
}
private[this] def parseConfiguration(file: File,
lines: Seq[String],
builtinImports: Seq[String],
offset: Int): ParsedFile = {
val (importStatements, settingsAndDefinitions) = splitExpressions(file, lines)
val allImports = builtinImports.map(s => (s, -1)) ++ addOffset(offset, importStatements)
val (definitions, settings) = splitSettingsDefinitions(
addOffsetToRange(offset, settingsAndDefinitions))
new ParsedFile(allImports, definitions, settings)
}
/**
* Evaluates a parsed sbt configuration file.
@ -86,11 +104,15 @@ private[sbt] object EvaluateConfigurations {
*
* @return Just the Setting[_] instances defined in the .sbt file.
*/
def evaluateConfiguration(eval: Eval, file: File, lines: Seq[String], imports: Seq[String], offset: Int): LazyClassLoaded[Seq[Setting[_]]] =
{
val l = evaluateSbtFile(eval, file, lines, imports, offset)
loader => l(loader).settings
}
def evaluateConfiguration(eval: Eval,
file: File,
lines: Seq[String],
imports: Seq[String],
offset: Int): LazyClassLoaded[Seq[Setting[_]]] = {
val l = evaluateSbtFile(eval, file, lines, imports, offset)
loader =>
l(loader).settings
}
/**
* Evaluates a parsed sbt configuration file.
@ -102,27 +124,33 @@ private[sbt] object EvaluateConfigurations {
* @return A function which can take an sbt classloader and return the raw types/configuration
* which was compiled/parsed for the given file.
*/
private[sbt] def evaluateSbtFile(eval: Eval, file: File, lines: Seq[String], imports: Seq[String], offset: Int): LazyClassLoaded[LoadedSbtFile] =
{
// TODO - Store the file on the LoadedSbtFile (or the parent dir) so we can accurately do
// detection for which project project manipulations should be applied.
val name = file.getPath
val parsed = parseConfiguration(file, lines, imports, offset)
val (importDefs, definitions) =
if (parsed.definitions.isEmpty) (Nil, DefinedSbtValues.empty) else {
val definitions = evaluateDefinitions(eval, name, parsed.imports, parsed.definitions, Some(file))
val imp = BuildUtil.importAllRoot(definitions.enclosingModule :: Nil)
(imp, DefinedSbtValues(definitions))
}
val allImports = importDefs.map(s => (s, -1)) ++ parsed.imports
val dslEntries = parsed.settings map {
case (dslExpression, range) =>
evaluateDslEntry(eval, name, allImports, dslExpression, range)
private[sbt] def evaluateSbtFile(eval: Eval,
file: File,
lines: Seq[String],
imports: Seq[String],
offset: Int): LazyClassLoaded[LoadedSbtFile] = {
// TODO - Store the file on the LoadedSbtFile (or the parent dir) so we can accurately do
// detection for which project project manipulations should be applied.
val name = file.getPath
val parsed = parseConfiguration(file, lines, imports, offset)
val (importDefs, definitions) =
if (parsed.definitions.isEmpty) (Nil, DefinedSbtValues.empty)
else {
val definitions =
evaluateDefinitions(eval, name, parsed.imports, parsed.definitions, Some(file))
val imp = BuildUtil.importAllRoot(definitions.enclosingModule :: Nil)
(imp, DefinedSbtValues(definitions))
}
eval.unlinkDeferred()
// Tracks all the files we generated from evaluating the sbt file.
val allGeneratedFiles = (definitions.generated ++ dslEntries.flatMap(_.generated))
loader => {
val allImports = importDefs.map(s => (s, -1)) ++ parsed.imports
val dslEntries = parsed.settings map {
case (dslExpression, range) =>
evaluateDslEntry(eval, name, allImports, dslExpression, range)
}
eval.unlinkDeferred()
// Tracks all the files we generated from evaluating the sbt file.
val allGeneratedFiles = (definitions.generated ++ dslEntries.flatMap(_.generated))
loader =>
{
val projects =
definitions.values(loader).collect {
case p: Project => resolveBase(file.getParentFile, p)
@ -140,9 +168,14 @@ private[sbt] object EvaluateConfigurations {
case DslEntry.ProjectManipulation(f) => f
}
// TODO -get project manipulations.
new LoadedSbtFile(settings, projects, importDefs, manipulations, definitions, allGeneratedFiles)
new LoadedSbtFile(settings,
projects,
importDefs,
manipulations,
definitions,
allGeneratedFiles)
}
}
}
/** move a project to be relative to this file after we've evaluated it. */
private[this] def resolveBase(f: File, p: Project) = p.copy(base = IO.resolve(f, p.base))
@ -173,11 +206,19 @@ private[sbt] object EvaluateConfigurations {
* @return A method that given an sbt classloader, can return the actual [[sbt.internal.DslEntry]] defined by
* the expression, and the sequence of .class files generated.
*/
private[sbt] def evaluateDslEntry(eval: Eval, name: String, imports: Seq[(String, Int)], expression: String, range: LineRange): TrackedEvalResult[DslEntry] = {
private[sbt] def evaluateDslEntry(eval: Eval,
name: String,
imports: Seq[(String, Int)],
expression: String,
range: LineRange): TrackedEvalResult[DslEntry] = {
// TODO - Should we try to namespace these between.sbt files? IF they hash to the same value, they may actually be
// exactly the same setting, so perhaps we don't care?
val result = try {
eval.eval(expression, imports = new EvalImports(imports, name), srcName = name, tpeName = Some(SettingsDefinitionName), line = range.start)
eval.eval(expression,
imports = new EvalImports(imports, name),
srcName = name,
tpeName = Some(SettingsDefinitionName),
line = range.start)
} catch {
case e: sbt.compiler.EvalException => throw new MessageOnlyException(e.getMessage)
}
@ -206,7 +247,11 @@ private[sbt] object EvaluateConfigurations {
*/
// Build DSL now includes non-Setting[_] type settings.
// Note: This method is used by the SET command, so we may want to evaluate that sucker a bit.
def evaluateSetting(eval: Eval, name: String, imports: Seq[(String, Int)], expression: String, range: LineRange): LazyClassLoaded[Seq[Setting[_]]] =
def evaluateSetting(eval: Eval,
name: String,
imports: Seq[(String, Int)],
expression: String,
range: LineRange): LazyClassLoaded[Seq[Setting[_]]] =
evaluateDslEntry(eval, name, imports, expression, range).result andThen {
case DslEntry.ProjectSettings(values) => values
case _ => Nil
@ -216,44 +261,59 @@ private[sbt] object EvaluateConfigurations {
* Splits a set of lines into (imports, expressions). That is,
* anything on the right of the tuple is a scala expression (definition or setting).
*/
private[sbt] def splitExpressions(file: File, lines: Seq[String]): (Seq[(String, Int)], Seq[(String, LineRange)]) =
{
val split = SbtParser(file, lines)
// TODO - Look at pulling the parsed expression trees from the SbtParser and stitch them back into a different
// scala compiler rather than re-parsing.
(split.imports, split.settings)
}
private[sbt] def splitExpressions(
file: File,
lines: Seq[String]): (Seq[(String, Int)], Seq[(String, LineRange)]) = {
val split = SbtParser(file, lines)
// TODO - Look at pulling the parsed expression trees from the SbtParser and stitch them back into a different
// scala compiler rather than re-parsing.
(split.imports, split.settings)
}
private[this] def splitSettingsDefinitions(lines: Seq[(String, LineRange)]): (Seq[(String, LineRange)], Seq[(String, LineRange)]) =
lines partition { case (line, range) => isDefinition(line) }
private[this] def splitSettingsDefinitions(
lines: Seq[(String, LineRange)]): (Seq[(String, LineRange)], Seq[(String, LineRange)]) =
lines partition { case (line, _) => isDefinition(line) }
private[this] def isDefinition(line: String): Boolean =
{
val trimmed = line.trim
DefinitionKeywords.exists(trimmed startsWith _)
}
private[this] def isDefinition(line: String): Boolean = {
val trimmed = line.trim
DefinitionKeywords.exists(trimmed startsWith _)
}
private[this] def extractedValTypes: Seq[String] =
Seq(classOf[Project], classOf[InputKey[_]], classOf[TaskKey[_]], classOf[SettingKey[_]]).map(_.getName)
Seq(classOf[Project], classOf[InputKey[_]], classOf[TaskKey[_]], classOf[SettingKey[_]])
.map(_.getName)
private[this] def evaluateDefinitions(eval: Eval, name: String, imports: Seq[(String, Int)], definitions: Seq[(String, LineRange)], file: Option[File]): compiler.EvalDefinitions =
{
val convertedRanges = definitions.map { case (s, r) => (s, r.start to r.end) }
eval.evalDefinitions(convertedRanges, new EvalImports(imports, name), name, file, extractedValTypes)
}
private[this] def evaluateDefinitions(eval: Eval,
name: String,
imports: Seq[(String, Int)],
definitions: Seq[(String, LineRange)],
file: Option[File]): compiler.EvalDefinitions = {
val convertedRanges = definitions.map { case (s, r) => (s, r.start to r.end) }
eval.evalDefinitions(convertedRanges,
new EvalImports(imports, name),
name,
file,
extractedValTypes)
}
}
object Index {
def taskToKeyMap(data: Settings[Scope]): Map[Task[_], ScopedKey[Task[_]]] =
{
// AttributeEntry + the checked type test 'value: Task[_]' ensures that the cast is correct.
// (scalac couldn't determine that 'key' is of type AttributeKey[Task[_]] on its own and a type match still required the cast)
val pairs = for (scope <- data.scopes; AttributeEntry(key, value: Task[_]) <- data.data(scope).entries) yield (value, ScopedKey(scope, key.asInstanceOf[AttributeKey[Task[_]]])) // unclear why this cast is needed even with a type test in the above filter
pairs.toMap[Task[_], ScopedKey[Task[_]]]
}
def taskToKeyMap(data: Settings[Scope]): Map[Task[_], ScopedKey[Task[_]]] = {
val pairs = data.scopes flatMap (scope =>
data.data(scope).entries collect {
case AttributeEntry(key, value: Task[_]) =>
(value, ScopedKey(scope, key.asInstanceOf[AttributeKey[Task[_]]]))
})
pairs.toMap[Task[_], ScopedKey[Task[_]]]
}
def allKeys(settings: Seq[Setting[_]]): Set[ScopedKey[_]] =
settings.flatMap(s => if (s.key.key.isLocal) Nil else s.key +: s.dependencies).filter(!_.key.isLocal).toSet
settings
.flatMap(s => if (s.key.key.isLocal) Nil else s.key +: s.dependencies)
.filter(!_.key.isLocal)
.toSet
def attributeKeys(settings: Settings[Scope]): Set[AttributeKey[_]] =
settings.data.values.flatMap(_.keys).toSet[AttributeKey[_]]
@ -261,30 +321,36 @@ object Index {
def stringToKeyMap(settings: Set[AttributeKey[_]]): Map[String, AttributeKey[_]] =
stringToKeyMap0(settings)(_.label)
private[this] def stringToKeyMap0(settings: Set[AttributeKey[_]])(label: AttributeKey[_] => String): Map[String, AttributeKey[_]] =
{
val multiMap = settings.groupBy(label)
val duplicates = multiMap collect { case (k, xs) if xs.size > 1 => (k, xs.map(_.manifest)) } collect { case (k, xs) if xs.size > 1 => (k, xs) }
if (duplicates.isEmpty)
multiMap.collect { case (k, v) if validID(k) => (k, v.head) } toMap
else
sys.error(duplicates map { case (k, tps) => "'" + k + "' (" + tps.mkString(", ") + ")" } mkString ("Some keys were defined with the same name but different types: ", ", ", ""))
private[this] def stringToKeyMap0(settings: Set[AttributeKey[_]])(
label: AttributeKey[_] => String): Map[String, AttributeKey[_]] = {
val multiMap = settings.groupBy(label)
val duplicates = multiMap collect { case (k, xs) if xs.size > 1 => (k, xs.map(_.manifest)) } collect {
case (k, xs) if xs.size > 1 => (k, xs)
}
if (duplicates.isEmpty)
multiMap.collect { case (k, v) if validID(k) => (k, v.head) } toMap
else
sys.error(
duplicates map { case (k, tps) => "'" + k + "' (" + tps.mkString(", ") + ")" } mkString ("Some keys were defined with the same name but different types: ", ", ", ""))
}
private[this]type TriggerMap = collection.mutable.HashMap[Task[_], Seq[Task[_]]]
private[this] type TriggerMap = collection.mutable.HashMap[Task[_], Seq[Task[_]]]
def triggers(ss: Settings[Scope]): Triggers[Task] =
{
val runBefore = new TriggerMap
val triggeredBy = new TriggerMap
for ((_, amap) <- ss.data; AttributeEntry(_, value: Task[_]) <- amap.entries) {
val as = value.info.attributes
update(runBefore, value, as get Keys.runBefore)
update(triggeredBy, value, as get Keys.triggeredBy)
def triggers(ss: Settings[Scope]): Triggers[Task] = {
val runBefore = new TriggerMap
val triggeredBy = new TriggerMap
ss.data.values foreach (
_.entries foreach {
case AttributeEntry(_, value: Task[_]) =>
val as = value.info.attributes
update(runBefore, value, as get Keys.runBefore)
update(triggeredBy, value, as get Keys.triggeredBy)
case _ => ()
}
val onComplete = Keys.onComplete in GlobalScope get ss getOrElse { () => () }
new Triggers[Task](runBefore, triggeredBy, map => { onComplete(); map })
}
)
val onComplete = Keys.onComplete in GlobalScope get ss getOrElse (() => ())
new Triggers[Task](runBefore, triggeredBy, map => { onComplete(); map })
}
private[this] def update(map: TriggerMap, base: Task[_], tasksOpt: Option[Seq[Task[_]]]): Unit =
for (tasks <- tasksOpt; task <- tasks)

View File

@ -94,7 +94,7 @@ object GlobalPlugin {
val nv = nodeView(state, str, roots)
val config = EvaluateTask.extractedTaskConfig(Project.extract(state), structure, state)
val (newS, result) = runTask(t, state, str, structure.index.triggers, config)(nv)
(newS, processResult(result, newS.log))
(newS, processResult2(result))
}
}
val globalPluginSettings = Project.inScope(Scope.GlobalScope in LocalRootProject)(

View File

@ -36,18 +36,12 @@ private[sbt] object LibraryManagement {
): UpdateReport = {
/* Resolve the module settings from the inputs. */
def resolve(inputs: UpdateInputs): UpdateReport = {
def resolve: UpdateReport = {
import sbt.util.ShowLines._
log.info(s"Updating $label...")
val reportOrUnresolved: Either[UnresolvedWarning, UpdateReport] =
//try {
lm.update(module, updateConfig, uwConfig, log)
// } catch {
// case e: Throwable =>
// e.printStackTrace
// throw e
// }
val report = reportOrUnresolved match {
case Right(report0) => report0
case Left(unresolvedWarning) =>
@ -95,12 +89,12 @@ private[sbt] object LibraryManagement {
import sbt.librarymanagement.LibraryManagementCodec._
val cachedResolve = Tracked.lastOutput[UpdateInputs, UpdateReport](cache) {
case (_, Some(out)) if upToDate(inChanged, out) => markAsCached(out)
case _ => resolve(updateInputs)
case _ => resolve
}
import scala.util.control.Exception.catching
catching(classOf[NullPointerException], classOf[OutOfMemoryError])
.withApply { t =>
val resolvedAgain = resolve(updateInputs)
val resolvedAgain = resolve
val culprit = t.getClass.getSimpleName
log.warn(s"Update task caching failed due to $culprit.")
log.warn("Report the following output to sbt:")

View File

@ -61,7 +61,7 @@ private[sbt] object Load {
val globalBase = getGlobalBase(state)
val base = baseDirectory.getCanonicalFile
val rawConfig = defaultPreGlobal(state, base, globalBase, log)
val config0 = defaultWithGlobal(state, base, rawConfig, globalBase, log)
val config0 = defaultWithGlobal(state, base, rawConfig, globalBase)
val config =
if (isPlugin) enableSbtPlugin(config0) else config0.copy(extraBuilds = topLevelExtras)
(base, config)
@ -109,7 +109,7 @@ private[sbt] object Load {
javaHome = None,
scalac
)
val evalPluginDef = EvaluateTask.evalPluginDef(log) _
val evalPluginDef: (BuildStructure, State) => PluginData = EvaluateTask.evalPluginDef _
val delegates = defaultDelegates
val pluginMgmt = PluginManagement(loader)
val inject = InjectSettings(injectGlobal(state), Nil, const(Nil))
@ -145,7 +145,6 @@ private[sbt] object Load {
base: File,
rawConfig: LoadBuildConfiguration,
globalBase: File,
log: Logger
): LoadBuildConfiguration = {
val globalPluginsDir = getGlobalPluginsDirectory(state, globalBase)
val withGlobal = loadGlobal(state, base, globalPluginsDir, rawConfig)

View File

@ -15,6 +15,7 @@ import Keys.{ logLevel, logManager, persistLogLevel, persistTraceLevel, sLog, tr
import scala.Console.{ BLUE, RESET }
import sbt.internal.util.{
AttributeKey,
ConsoleAppender,
ConsoleOut,
Settings,
SuppressedTraceContext,
@ -105,7 +106,7 @@ object LogManager {
def backgroundLog(data: Settings[Scope], state: State, task: ScopedKey[_]): ManagedLogger = {
val console = screen(task, state)
LogManager.backgroundLog(data, state, task, console, relay(()), extra(task).toList)
LogManager.backgroundLog(data, state, task, console, relay(()))
}
}
@ -191,7 +192,6 @@ object LogManager {
console: Appender,
/* TODO: backed: Appender,*/
relay: Appender,
extra: List[Appender]
): ManagedLogger = {
val scope = task.scope
val screenLevel = getOr(logLevel.key, data, scope, state, Level.Info)
@ -253,7 +253,7 @@ object LogManager {
private[this] def slog: Logger =
Option(ref.get) getOrElse sys.error("Settings logger used after project was loaded.")
override val ansiCodesSupported = slog.ansiCodesSupported
override val ansiCodesSupported = ConsoleAppender.formatEnabledInEnv
override def trace(t: => Throwable) = slog.trace(t)
override def success(message: => String) = slog.success(message)
override def log(level: Level.Value, message: => String) = slog.log(level, message)

View File

@ -57,7 +57,7 @@ private[sbt] class PluginsDebug(
if (possible.nonEmpty) {
val explained = possible.map(explainPluginEnable)
val possibleString =
if (explained.size > 1)
if (explained.lengthCompare(1) > 0)
explained.zipWithIndex
.map { case (s, i) => s"$i. $s" }
.mkString(s"Multiple plugins are available that can provide $notFoundKey:\n", "\n", "")
@ -111,7 +111,7 @@ private[sbt] class PluginsDebug(
}
private[this] def multi(strs: Seq[String]): String =
strs.mkString(if (strs.size > 4) "\n\t" else ", ")
strs.mkString(if (strs.lengthCompare(4) > 0) "\n\t" else ", ")
}
private[sbt] object PluginsDebug {
@ -377,7 +377,7 @@ private[sbt] object PluginsDebug {
def explainPluginEnable(ps: PluginEnable): String =
ps match {
case PluginRequirements(plugin,
context,
_,
blockingExcludes,
enablingPlugins,
extraEnabledPlugins,
@ -393,9 +393,8 @@ private[sbt] object PluginsDebug {
note(willRemove(plugin, toBeRemoved.toList)) ::
Nil
parts.filterNot(_.isEmpty).mkString("\n")
case PluginImpossible(plugin, context, contradictions) =>
pluginImpossible(plugin, contradictions)
case PluginActivated(plugin, context) => s"Plugin ${plugin.label} already activated."
case PluginImpossible(plugin, _, contradictions) => pluginImpossible(plugin, contradictions)
case PluginActivated(plugin, _) => s"Plugin ${plugin.label} already activated."
}
/**

View File

@ -26,7 +26,7 @@ class RelayAppender(name: String)
val level = ConsoleAppender.toLevel(event.getLevel)
val message = event.getMessage
message match {
case o: ObjectMessage => appendEvent(level, o.getParameter)
case o: ObjectMessage => appendEvent(o.getParameter)
case p: ParameterizedMessage => appendLog(level, p.getFormattedMessage)
case r: RingBufferLogEvent => appendLog(level, r.getFormattedMessage)
case _ => appendLog(level, message.toString)
@ -35,7 +35,7 @@ class RelayAppender(name: String)
def appendLog(level: Level.Value, message: => String): Unit = {
exchange.publishEventMessage(LogEvent(level.toString, message))
}
def appendEvent(level: Level.Value, event: AnyRef): Unit =
def appendEvent(event: AnyRef): Unit =
event match {
case x: StringEvent => {
import JsonProtocol._

View File

@ -15,7 +15,7 @@ import sbt.librarymanagement.Configuration
import Project._
import Def.{ ScopedKey, Setting }
import Scope.Global
import Types.{ const, idFun }
import Types.idFun
import complete._
import DefaultParsers._
@ -64,11 +64,10 @@ private[sbt] object SettingCompletions {
setResult(session, r, redefined)
}
/** Implementation of the `set` command that will reload the current project with `settings` appended to the current settings. */
def setThis(s: State,
extracted: Extracted,
settings: Seq[Def.Setting[_]],
arg: String): SetResult = {
/** Implementation of the `set` command that will reload the current project with `settings`
* appended to the current settings.
*/
def setThis(extracted: Extracted, settings: Seq[Def.Setting[_]], arg: String): SetResult = {
import extracted._
val append =
Load.transformSettings(Load.projectScope(currentRef), currentRef.build, rootProject, settings)
@ -82,16 +81,19 @@ private[sbt] object SettingCompletions {
private[this] def setResult(
session: SessionSettings,
r: Relation[ScopedKey[_], ScopedKey[_]],
redefined: Seq[Setting[_]])(implicit show: Show[ScopedKey[_]]): SetResult = {
redefined: Seq[Setting[_]],
)(implicit show: Show[ScopedKey[_]]): SetResult = {
val redefinedKeys = redefined.map(_.key).toSet
val affectedKeys = redefinedKeys.flatMap(r.reverse)
def summary(verbose: Boolean): String = setSummary(redefinedKeys, affectedKeys, verbose)
new SetResult(session, summary(true), summary(false))
}
private[this] def setSummary(redefined: Set[ScopedKey[_]],
affected: Set[ScopedKey[_]],
verbose: Boolean)(implicit display: Show[ScopedKey[_]]): String = {
private[this] def setSummary(
redefined: Set[ScopedKey[_]],
affected: Set[ScopedKey[_]],
verbose: Boolean,
)(implicit display: Show[ScopedKey[_]]): String = {
val QuietLimit = 3
def strings(in: Set[ScopedKey[_]]): Seq[String] = in.toSeq.map(sk => display.show(sk)).sorted
def lines(in: Seq[String]): (String, Boolean) =
@ -129,17 +131,17 @@ private[sbt] object SettingCompletions {
* when there are fewer choices or tab is pressed multiple times.
* The last part of the completion will generate a template for the value or function literal that will initialize the setting or task.
*/
def settingParser(settings: Settings[Scope],
rawKeyMap: Map[String, AttributeKey[_]],
context: ResolvedProject): Parser[String] = {
val keyMap
: Map[String, AttributeKey[_]] = rawKeyMap.map { case (k, v) => (keyScalaID(k), v) }.toMap
def inputScopedKey(pred: AttributeKey[_] => Boolean): Parser[ScopedKey[_]] =
scopedKeyParser(keyMap.filter { case (_, k) => pred(k) }, settings, context)
def settingParser(
settings: Settings[Scope],
rawKeyMap: Map[String, AttributeKey[_]],
context: ResolvedProject,
): Parser[String] = {
val keyMap: Map[String, AttributeKey[_]] =
rawKeyMap.map { case (k, v) => (keyScalaID(k), v) }.toMap
val full = for {
defineKey <- scopedKeyParser(keyMap, settings, context)
a <- assign(defineKey)
_ <- valueParser(defineKey, a, inputScopedKey(keyFilter(defineKey.key)))
_ <- valueParser(defineKey, a)
} yield
() // parser is currently only for completion and the parsed data structures are not used
@ -167,9 +169,7 @@ private[sbt] object SettingCompletions {
* Parser for the initialization expression for the assignment method `assign` on the key `sk`.
* `scopedKeyP` is used to parse and complete the input keys for an initialization that depends on other keys.
*/
def valueParser(sk: ScopedKey[_],
assign: Assign.Value,
scopedKeyP: Parser[ScopedKey[_]]): Parser[Seq[ScopedKey[_]]] = {
def valueParser(sk: ScopedKey[_], assign: Assign.Value): Parser[Seq[ScopedKey[_]]] = {
val fullTypeString = keyTypeString(sk.key)
val typeString = if (assignNoAppend(assign)) fullTypeString else "..."
if (assign == Assign.Update) {
@ -181,14 +181,6 @@ private[sbt] object SettingCompletions {
}
}
/**
* For a setting definition `definingKey <<= (..., in, ...) { ... }`,
* `keyFilter(definingKey)(in)` returns true when `in` is an allowed input for `definingKey` based on whether they are settings or not.
* For example, if `definingKey` is for a setting, `in` may only be a setting itself.
*/
def keyFilter(definingKey: AttributeKey[_]): AttributeKey[_] => Boolean =
if (isSetting(definingKey)) isSetting _ else isTaskOrSetting _
/**
* Parser for a Scope for a `key` given the current project `context` and evaluated `settings`.
* The completions are restricted to be more useful. Currently, this parser will suggest
@ -202,17 +194,20 @@ private[sbt] object SettingCompletions {
val definedScopes = data.toSeq flatMap {
case (scope, attrs) => if (attrs contains key) scope :: Nil else Nil
}
scope(key, allScopes, definedScopes, context)
scope(allScopes, definedScopes, context)
}
private[this] def scope(key: AttributeKey[_],
allScopes: Seq[Scope],
definedScopes: Seq[Scope],
context: ResolvedProject): Parser[Scope] = {
def axisParser[T](axis: Scope => ScopeAxis[T],
name: T => String,
description: T => Option[String],
label: String): Parser[ScopeAxis[T]] = {
private[this] def scope(
allScopes: Seq[Scope],
definedScopes: Seq[Scope],
context: ResolvedProject,
): Parser[Scope] = {
def axisParser[T](
axis: Scope => ScopeAxis[T],
name: T => String,
description: T => Option[String],
label: String,
): Parser[ScopeAxis[T]] = {
def getChoice(s: Scope): Seq[(String, T)] = axis(s) match {
case Select(t) => (name(t), t) :: Nil
case _ => Nil
@ -220,19 +215,23 @@ private[sbt] object SettingCompletions {
def getChoices(scopes: Seq[Scope]): Map[String, T] = scopes.flatMap(getChoice).toMap
val definedChoices: Set[String] =
definedScopes.flatMap(s => axis(s).toOption.map(name)).toSet
val fullChoices: Map[String, T] = getChoices(allScopes.toSeq)
val fullChoices: Map[String, T] = getChoices(allScopes)
val completions = fixedCompletions { (seen, level) =>
completeScope(seen, level, definedChoices, fullChoices)(description).toSet
}
Act.optionalAxis(inParser ~> token(Space) ~> token(scalaID(fullChoices, label), completions),
This)
Act.optionalAxis(
inParser ~> token(Space) ~> token(scalaID(fullChoices, label), completions),
This,
)
}
val configurations: Map[String, Configuration] =
context.configurations.map(c => (configScalaID(c.name), c)).toMap
val configParser = axisParser[ConfigKey](_.config,
c => configScalaID(c.name),
ck => configurations.get(ck.name).map(_.description),
"configuration")
val configParser = axisParser[ConfigKey](
_.config,
c => configScalaID(c.name),
ck => configurations.get(ck.name).map(_.description),
"configuration",
)
val taskParser =
axisParser[AttributeKey[_]](_.task, k => keyScalaID(k.label), _.description, "task")
val nonGlobal = (configParser ~ taskParser) map { case (c, t) => Scope(This, c, t, Zero) }
@ -242,8 +241,8 @@ private[sbt] object SettingCompletions {
/** Parser for the assignment method (such as `:=`) for defining `key`. */
def assign(key: ScopedKey[_]): Parser[Assign.Value] = {
val completions = fixedCompletions { (seen, level) =>
completeAssign(seen, level, key).toSet
val completions = fixedCompletions { (seen, _) =>
completeAssign(seen, key).toSet
}
val identifier = Act.filterStrings(Op, Assign.values.map(_.toString), "assignment method") map Assign.withName
token(Space) ~> token(optionallyQuoted(identifier), completions)
@ -267,7 +266,7 @@ private[sbt] object SettingCompletions {
* Completions for an assignment method for `key` given the tab completion `level` and existing partial string `seen`.
* This will filter possible assignment methods based on the underlying type of `key`, so that only `<<=` is shown for input tasks, for example.
*/
def completeAssign(seen: String, level: Int, key: ScopedKey[_]): Seq[Completion] = {
def completeAssign(seen: String, key: ScopedKey[_]): Seq[Completion] = {
val allowed: Iterable[Assign.Value] =
if (appendable(key.key)) Assign.values
else assignNoAppend
@ -284,7 +283,7 @@ private[sbt] object SettingCompletions {
prominentCutoff: Int,
detailLimit: Int): Seq[Completion] =
completeSelectDescribed(seen, level, keys, detailLimit)(_.description) {
case (k, v) => v.rank <= prominentCutoff
case (_, v) => v.rank <= prominentCutoff
}
def completeScope[T](
@ -293,17 +292,17 @@ private[sbt] object SettingCompletions {
definedChoices: Set[String],
allChoices: Map[String, T])(description: T => Option[String]): Seq[Completion] =
completeSelectDescribed(seen, level, allChoices, 10)(description) {
case (k, v) => definedChoices(k)
case (k, _) => definedChoices(k)
}
def completeSelectDescribed[T](seen: String, level: Int, all: Map[String, T], detailLimit: Int)(
description: T => Option[String])(prominent: (String, T) => Boolean): Seq[Completion] = {
val applicable = all.toSeq.filter { case (k, v) => k startsWith seen }
val applicable = all.toSeq.filter { case (k, _) => k startsWith seen }
val prominentOnly = applicable filter { case (k, v) => prominent(k, v) }
val showAll = (level >= 3) || (level == 2 && prominentOnly.size <= detailLimit) || prominentOnly.isEmpty
val showAll = (level >= 3) || (level == 2 && prominentOnly.lengthCompare(detailLimit) <= 0) || prominentOnly.isEmpty
val showKeys = if (showAll) applicable else prominentOnly
val showDescriptions = (level >= 2) || (showKeys.size <= detailLimit)
val showDescriptions = (level >= 2) || showKeys.lengthCompare(detailLimit) <= 0
completeDescribed(seen, showDescriptions, showKeys)(s => description(s).toList.mkString)
}
def completeDescribed[T](seen: String, showDescriptions: Boolean, in: Seq[(String, T)])(
@ -315,14 +314,11 @@ private[sbt] object SettingCompletions {
val withDescriptions = in map { case (id, key) => (id, description(key)) }
val padded = CommandUtil.aligned("", " ", withDescriptions)
(padded, in).zipped.map {
case (line, (id, key)) =>
case (line, (id, _)) =>
Completion.tokenDisplay(append = appendString(id), display = line + "\n")
}
} else
in map {
case (id, key) =>
Completion.tokenDisplay(display = id, append = appendString(id))
}
in map { case (id, _) => Completion.tokenDisplay(display = id, append = appendString(id)) }
}
/**
@ -364,18 +360,6 @@ private[sbt] object SettingCompletions {
keyType(key)(mfToString, mfToString, mfToString)
}
/** True if the `key` represents an input task, false if it represents a task or setting. */
def isInputTask(key: AttributeKey[_]): Boolean =
keyType(key)(const(false), const(false), const(true))
/** True if the `key` represents a setting, false if it represents a task or an input task.*/
def isSetting(key: AttributeKey[_]): Boolean =
keyType(key)(const(true), const(false), const(false))
/** True if the `key` represents a setting or task, false if it is for an input task. */
def isTaskOrSetting(key: AttributeKey[_]): Boolean =
keyType(key)(const(true), const(true), const(false))
/** True if the `key` represents a setting or task that may be appended using an assignment method such as `+=`. */
def appendable(key: AttributeKey[_]): Boolean = {
val underlying = keyUnderlyingType(key).runtimeClass

View File

@ -99,7 +99,7 @@ object Graph {
val withBar = childLines.zipWithIndex flatMap {
case ((line, withBar), pos) if pos < (cs.size - 1) =>
(line +: withBar) map { insertBar(_, 2 * (level + 1)) }
case ((line, withBar), pos) if withBar.lastOption.getOrElse(line).trim != "" =>
case ((line, withBar), _) if withBar.lastOption.getOrElse(line).trim != "" =>
(line +: withBar) ++ Vector(twoSpaces * (level + 1))
case ((line, withBar), _) => line +: withBar
}

View File

@ -81,7 +81,7 @@ private[sbt] final class TaskTimings(shutdown: Boolean) extends ExecuteProgress[
println(s"Total time: $total $unit")
import collection.JavaConverters._
def sumTimes(in: Seq[(Task[_], Long)]) = in.map(_._2).sum
val timingsByName = timings.asScala.toSeq.groupBy { case (t, time) => mappedName(t) } mapValues (sumTimes)
val timingsByName = timings.asScala.toSeq.groupBy { case (t, _) => mappedName(t) } mapValues (sumTimes)
val times = timingsByName.toSeq
.sortBy(_._2)
.reverse

View File

@ -277,7 +277,7 @@ private[sbt] case class SbtParser(file: File, lines: Seq[String]) extends Parsed
modifiedContent: String,
imports: Seq[Tree]
): Seq[(String, Int)] = {
val toLineRange = imports map convertImport(modifiedContent)
val toLineRange = imports map convertImport
val groupedByLineNumber = toLineRange.groupBy { case (_, lineNumber) => lineNumber }
val mergedImports = groupedByLineNumber.map {
case (l, seq) => (l, extractLine(modifiedContent, seq))
@ -286,12 +286,10 @@ private[sbt] case class SbtParser(file: File, lines: Seq[String]) extends Parsed
}
/**
*
* @param modifiedContent - modifiedContent
* @param t - tree
* @return ((start,end),lineNumber)
* @return ((start, end), lineNumber)
*/
private def convertImport(modifiedContent: String)(t: Tree): ((Int, Int), Int) = {
private def convertImport(t: Tree): ((Int, Int), Int) = {
val lineNumber = t.pos.line - 1
((t.pos.start, t.pos.end), lineNumber)
}

View File

@ -57,10 +57,7 @@ private[sbt] object SbtRefactorings {
commands.flatMap {
case (_, command) =>
val map = toTreeStringMap(command)
map.flatMap {
case (name, statement) =>
treesToReplacements(split, name, command)
}
map.flatMap { case (name, _) => treesToReplacements(split, name, command) }
}
private def treesToReplacements(split: SbtParser, name: String, command: Seq[String]) =

View File

@ -227,7 +227,7 @@ private[sbt] object Definition {
updateCache(StandardMain.cache)(cacheFile, useBinary)
}
private[sbt] def getAnalyses(log: Logger): Future[Seq[Analysis]] = {
private[sbt] def getAnalyses: Future[Seq[Analysis]] = {
import scalacache.modes.scalaFuture._
import scala.concurrent.ExecutionContext.Implicits.global
StandardMain.cache
@ -260,7 +260,7 @@ private[sbt] object Definition {
val LspDefinitionLogHead = "lsp-definition"
import sjsonnew.support.scalajson.unsafe.CompactPrinter
log.debug(s"$LspDefinitionLogHead json request: ${CompactPrinter(jsonDefinition)}")
lazy val analyses = getAnalyses(log)
lazy val analyses = getAnalyses
val definition = getDefinition(jsonDefinition)
definition
.flatMap { definition =>

View File

@ -47,7 +47,7 @@ object Delegates extends Properties("delegates") {
}
}
property("Initial scope present with all combinations of Global axes") = allAxes(
globalCombinations)
(s, ds, _) => globalCombinations(s, ds))
property("initial scope first") = forAll { (keys: Keys) =>
allDelegates(keys) { (scope, ds) =>
@ -66,6 +66,7 @@ object Delegates extends Properties("delegates") {
all(f(s, ds, _.project), f(s, ds, _.config), f(s, ds, _.task), f(s, ds, _.extra))
}
}
def allDelegates(keys: Keys)(f: (Scope, Seq[Scope]) => Prop): Prop =
all(keys.scopes map { scope =>
val delegates = keys.env.delegates(scope)
@ -73,16 +74,20 @@ object Delegates extends Properties("delegates") {
("Delegates:\n\t" + delegates.map(scope => Scope.display(scope, "_")).mkString("\n\t")) |:
f(scope, delegates)
}: _*)
def alwaysZero(s: Scope, ds: Seq[Scope], axis: Scope => ScopeAxis[_]): Prop =
(axis(s) != Zero) ||
all(ds map { d =>
(axis(d) == Zero): Prop
}: _*)
def globalCombinations(s: Scope, ds: Seq[Scope], axis: Scope => ScopeAxis[_]): Prop = {
val mods = List[Scope => Scope](_.copy(project = Zero),
_.copy(config = Zero),
_.copy(task = Zero),
_.copy(extra = Zero))
def globalCombinations(s: Scope, ds: Seq[Scope]): Prop = {
val mods = List[Scope => Scope](
_.copy(project = Zero),
_.copy(config = Zero),
_.copy(task = Zero),
_.copy(extra = Zero),
)
val modAndIdent = mods.map(_ :: idFun[Scope] :: Nil)
def loop(cur: Scope, acc: List[Scope], rem: List[Seq[Scope => Scope]]): Seq[Scope] =

View File

@ -55,9 +55,9 @@ object ParseKey extends Properties("Key parser test") {
("Mask: " + mask) |:
("Current: " + structure.current) |:
parse(structure, string) {
case Left(err) => false
case Right(sk) if hasZeroConfig => true
case Right(sk) => sk.scope.project == Select(structure.current)
case Left(_) => false
case Right(_) if hasZeroConfig => true
case Right(sk) => sk.scope.project == Select(structure.current)
}
}
@ -70,7 +70,7 @@ object ParseKey extends Properties("Key parser test") {
("Key: " + displayPedantic(key)) |:
("Mask: " + mask) |:
parse(structure, string) {
case Left(err) => false
case Left(_) => false
case Right(sk) => sk.scope.task == Zero
}
}
@ -88,7 +88,7 @@ object ParseKey extends Properties("Key parser test") {
("Expected configuration: " + resolvedConfig.map(_.name)) |:
parse(structure, string) {
case Right(sk) => (sk.scope.config == resolvedConfig) || (sk.scope == Scope.GlobalScope)
case Left(err) => false
case Left(_) => false
}
}
@ -117,7 +117,7 @@ object ParseKey extends Properties("Key parser test") {
("Expected: " + displayFull(expected)) |:
("Mask: " + mask) |:
parse(structure, s) {
case Left(err) => false
case Left(_) => false
case Right(sk) =>
(s"${sk}.key == ${expected}.key: ${sk.key == expected.key}") |:
(s"${sk.scope} == ${expected.scope}: ${Scope.equal(sk.scope, expected.scope, mask)}") |:

View File

@ -39,18 +39,18 @@ object PluginsTest extends Specification {
}
"throw an AutoPluginException on conflicting requirements" in {
deducePlugin(S, log) must throwAn[AutoPluginException](
message = """Contradiction in enabled plugins:
- requested: sbt.AI\$S
- enabled: sbt.AI\$S, sbt.AI\$Q, sbt.AI\$R, sbt.AI\$B, sbt.AI\$A
- conflict: sbt.AI\$R is enabled by sbt.AI\$Q; excluded by sbt.AI\$S""")
message = s"""Contradiction in enabled plugins:
- requested: sbt.AI\\$$S
- enabled: sbt.AI\\$$S, sbt.AI\\$$Q, sbt.AI\\$$R, sbt.AI\\$$B, sbt.AI\\$$A
- conflict: sbt.AI\\$$R is enabled by sbt.AI\\$$Q; excluded by sbt.AI\\$$S""")
}
"generates a detailed report on conflicting requirements" in {
deducePlugin(T && U, log) must throwAn[AutoPluginException](message =
"""Contradiction in enabled plugins:
- requested: sbt.AI\$T && sbt.AI\$U
- enabled: sbt.AI\$U, sbt.AI\$T, sbt.AI\$A, sbt.AI\$Q, sbt.AI\$R, sbt.AI\$B
- conflict: sbt.AI\$Q is enabled by sbt.AI\$A && sbt.AI\$B; required by sbt.AI\$T, sbt.AI\$R; excluded by sbt.AI\$U
- conflict: sbt.AI\$R is enabled by sbt.AI\$Q; excluded by sbt.AI\$T""")
deducePlugin(T && U, log) must throwAn[AutoPluginException](
message = s"""Contradiction in enabled plugins:
- requested: sbt.AI\\$$T && sbt.AI\\$$U
- enabled: sbt.AI\\$$U, sbt.AI\\$$T, sbt.AI\\$$A, sbt.AI\\$$Q, sbt.AI\\$$R, sbt.AI\\$$B
- conflict: sbt.AI\\$$Q is enabled by sbt.AI\\$$A && sbt.AI\\$$B; required by sbt.AI\\$$T, sbt.AI\\$$R; excluded by sbt.AI\\$$U
- conflict: sbt.AI\\$$R is enabled by sbt.AI\\$$Q; excluded by sbt.AI\\$$T""")
}
}
}

View File

@ -77,8 +77,7 @@ class ErrorSpec extends AbstractSpec {
case exception: MessageOnlyException =>
val error = exception.getMessage
"""(\d+)""".r.findFirstIn(error) match {
case Some(x) =>
true
case Some(_) => true
case None =>
println(s"Number not found in $error")
false

View File

@ -122,7 +122,7 @@ object SettingQueryTest extends org.specs2.mutable.Specification {
.put(globalBaseDirectory, globalDirFile)
val config0 = defaultPreGlobal(state, baseFile, globalDirFile, state.log)
val config = defaultWithGlobal(state, baseFile, config0, globalDirFile, state.log)
val config = defaultWithGlobal(state, baseFile, config0, globalDirFile)
val buildUnit: BuildUnit = {
val loadedPlugins: LoadedPlugins =

View File

@ -1,7 +1,6 @@
object A {
def main(args: Array[String]) =
{
assert(args(0).toInt == args(1).toInt)
assert(java.lang.Boolean.getBoolean("sbt.check.forked"))
}
def main(args: Array[String]) = {
assert(args(0).toInt == args(1).toInt)
assert(java.lang.Boolean.getBoolean("sbt.check.forked"))
}
}

View File

@ -11,7 +11,7 @@ object Common {
val UpdateK1 = Command.command("UpdateK1") { st: State =>
val ex = Project extract st
import ex._
val session2 = BuiltinCommands.setThis(st, ex, Seq(k1 := {}), """k1 := {
val session2 = BuiltinCommands.setThis(ex, Seq(k1 := {}), """k1 := {
|//
|//
|}""".stripMargin).session
@ -24,7 +24,7 @@ object Common {
val UpdateK3 = Command.command("UpdateK3") { st: State =>
val ex = Project extract st
import ex._
val session2 = BuiltinCommands.setThis(st, ex, Seq(k3 := {}), """k3 := {
val session2 = BuiltinCommands.setThis(ex, Seq(k3 := {}), """k3 := {
|//
|//
|}""".stripMargin).session