Split buildfile parsing to a subproject

This commit is contained in:
Eugene Yokota 2022-08-10 10:07:14 -04:00
parent 66fa46a912
commit 81a05430bd
116 changed files with 2494 additions and 1761 deletions

145
build.sbt
View File

@ -668,7 +668,6 @@ lazy val actionsProj = (project in file("main-actions"))
name := "Actions",
libraryDependencies += sjsonNewScalaJson.value,
libraryDependencies += jline3Terminal,
libraryDependencies += eval,
mimaSettings,
mimaBinaryIssueFilters ++= Seq(
// Removed unused private[sbt] nested class
@ -816,6 +815,7 @@ lazy val mainSettingsProj = (project in file("main-settings"))
commandProj,
stdTaskProj,
coreMacrosProj,
logicProj,
utilLogging,
utilCache,
utilRelation,
@ -889,11 +889,27 @@ lazy val zincLmIntegrationProj = (project in file("zinc-lm-integration"))
)
.configure(addSbtZincCompileCore, addSbtLmCore, addSbtLmIvyTest)
lazy val buildFileProj = (project in file("buildfile"))
.dependsOn(
mainSettingsProj,
)
.settings(
testedBaseSettings,
name := "build file",
libraryDependencies ++= Seq(scalaCompiler),
)
.configure(
addSbtIO,
addSbtLmCore,
addSbtLmIvy,
addSbtCompilerInterface,
addSbtZincCompile
)
// The main integration project for sbt. It brings all of the projects together, configures them, and provides for overriding conventions.
lazy val mainProj = (project in file("main"))
.enablePlugins(ContrabandPlugin)
.dependsOn(
logicProj,
actionsProj,
mainSettingsProj,
runProj,
@ -925,128 +941,8 @@ lazy val mainProj = (project in file("main"))
Test / testOptions += Tests
.Argument(TestFrameworks.ScalaCheck, "-minSuccessfulTests", "1000"),
SettingKey[Boolean]("usePipelining") := false,
mimaSettings,
mimaBinaryIssueFilters ++= Vector(
// New and changed methods on KeyIndex. internal.
exclude[ReversedMissingMethodProblem]("sbt.internal.KeyIndex.*"),
// internal
exclude[IncompatibleMethTypeProblem]("sbt.internal.*"),
// Changed signature or removed private[sbt] methods
exclude[DirectMissingMethodProblem]("sbt.Classpaths.unmanagedLibs0"),
exclude[DirectMissingMethodProblem]("sbt.Defaults.allTestGroupsTask"),
exclude[DirectMissingMethodProblem]("sbt.Plugins.topologicalSort"),
exclude[IncompatibleMethTypeProblem]("sbt.Defaults.allTestGroupsTask"),
exclude[DirectMissingMethodProblem]("sbt.StandardMain.shutdownHook"),
exclude[DirectMissingMethodProblem]("sbt.nio.Keys.compileBinaryFileInputs"),
exclude[DirectMissingMethodProblem]("sbt.nio.Keys.compileSourceFileInputs"),
exclude[MissingClassProblem]("sbt.internal.ResourceLoaderImpl"),
exclude[IncompatibleSignatureProblem]("sbt.internal.ConfigIndex.*"),
exclude[IncompatibleSignatureProblem]("sbt.internal.Inspect.*"),
exclude[IncompatibleSignatureProblem]("sbt.internal.ProjectIndex.*"),
exclude[IncompatibleSignatureProblem]("sbt.internal.BuildIndex.*"),
exclude[IncompatibleSignatureProblem]("sbt.internal.server.BuildServerReporter.*"),
exclude[VirtualStaticMemberProblem]("sbt.internal.server.LanguageServerProtocol.*"),
exclude[IncompatibleSignatureProblem]("sbt.internal.librarymanagement.IvyXml.*"),
exclude[IncompatibleSignatureProblem]("sbt.ScriptedPlugin.*Settings"),
exclude[IncompatibleSignatureProblem]("sbt.plugins.SbtPlugin.*Settings"),
// Removed private internal classes
exclude[MissingClassProblem]("sbt.internal.ReverseLookupClassLoaderHolder$BottomClassLoader"),
exclude[MissingClassProblem](
"sbt.internal.ReverseLookupClassLoaderHolder$ReverseLookupClassLoader$ResourceLoader"
),
exclude[MissingClassProblem]("sbt.internal.ReverseLookupClassLoaderHolder$ClassLoadingLock"),
exclude[MissingClassProblem](
"sbt.internal.ReverseLookupClassLoaderHolder$ReverseLookupClassLoader"
),
exclude[MissingClassProblem]("sbt.internal.LayeredClassLoaderImpl"),
exclude[MissingClassProblem]("sbt.internal.FileManagement"),
exclude[MissingClassProblem]("sbt.internal.FileManagement$"),
exclude[MissingClassProblem]("sbt.internal.FileManagement$CopiedFileTreeRepository"),
exclude[MissingClassProblem]("sbt.internal.server.LanguageServerReporter*"),
exclude[MissingClassProblem]("sbt.internal.ExternalHooks"),
exclude[MissingClassProblem]("sbt.internal.ExternalHooks$"),
// false positives
exclude[DirectMissingMethodProblem]("sbt.plugins.IvyPlugin.requires"),
exclude[DirectMissingMethodProblem]("sbt.plugins.JUnitXmlReportPlugin.requires"),
exclude[DirectMissingMethodProblem]("sbt.plugins.Giter8TemplatePlugin.requires"),
exclude[DirectMissingMethodProblem]("sbt.plugins.JvmPlugin.requires"),
exclude[DirectMissingMethodProblem]("sbt.plugins.SbtPlugin.requires"),
exclude[DirectMissingMethodProblem]("sbt.ResolvedClasspathDependency.apply"),
exclude[DirectMissingMethodProblem]("sbt.ClasspathDependency.apply"),
exclude[IncompatibleSignatureProblem]("sbt.plugins.SemanticdbPlugin.globalSettings"),
// File -> Source
exclude[DirectMissingMethodProblem]("sbt.Defaults.cleanFilesTask"),
exclude[IncompatibleSignatureProblem]("sbt.Defaults.resourceConfigPaths"),
exclude[IncompatibleSignatureProblem]("sbt.Defaults.sourceConfigPaths"),
exclude[IncompatibleSignatureProblem]("sbt.Defaults.configPaths"),
exclude[IncompatibleSignatureProblem]("sbt.Defaults.paths"),
exclude[IncompatibleSignatureProblem]("sbt.Keys.csrPublications"),
exclude[IncompatibleSignatureProblem](
"sbt.coursierint.CoursierArtifactsTasks.coursierPublicationsTask"
),
exclude[IncompatibleSignatureProblem](
"sbt.coursierint.CoursierArtifactsTasks.coursierPublicationsTask"
),
exclude[IncompatibleSignatureProblem]("sbt.coursierint.LMCoursier.coursierConfiguration"),
exclude[IncompatibleSignatureProblem]("sbt.coursierint.LMCoursier.publicationsSetting"),
exclude[IncompatibleSignatureProblem]("sbt.Project.inThisBuild"),
exclude[IncompatibleSignatureProblem]("sbt.Project.inConfig"),
exclude[IncompatibleSignatureProblem]("sbt.Project.inTask"),
exclude[IncompatibleSignatureProblem]("sbt.Project.inScope"),
exclude[IncompatibleSignatureProblem]("sbt.ProjectExtra.inThisBuild"),
exclude[IncompatibleSignatureProblem]("sbt.ProjectExtra.inConfig"),
exclude[IncompatibleSignatureProblem]("sbt.ProjectExtra.inTask"),
exclude[IncompatibleSignatureProblem]("sbt.ProjectExtra.inScope"),
exclude[MissingTypesProblem]("sbt.internal.Load*"),
exclude[IncompatibleSignatureProblem]("sbt.internal.Load*"),
exclude[MissingTypesProblem]("sbt.internal.server.NetworkChannel"),
// IvyConfiguration was replaced by InlineIvyConfiguration in the generic
// signature, this does not break compatibility regardless of what
// cast a compiler might have inserted based on the old signature
// since we're returning the same values as before.
exclude[IncompatibleSignatureProblem]("sbt.Classpaths.mkIvyConfiguration"),
exclude[IncompatibleMethTypeProblem]("sbt.internal.server.Definition*"),
exclude[IncompatibleTemplateDefProblem]("sbt.internal.server.LanguageServerProtocol"),
exclude[DirectMissingMethodProblem]("sbt.Classpaths.warnInsecureProtocol"),
exclude[DirectMissingMethodProblem]("sbt.Classpaths.warnInsecureProtocolInModules"),
exclude[MissingClassProblem]("sbt.internal.ExternalHooks*"),
// This seems to be a mima problem. The older constructor still exists but
// mima seems to incorrectly miss the secondary constructor that provides
// the binary compatible version.
exclude[IncompatibleMethTypeProblem]("sbt.internal.server.NetworkChannel.this"),
exclude[IncompatibleSignatureProblem]("sbt.internal.DeprecatedContinuous.taskDefinitions"),
exclude[MissingClassProblem]("sbt.internal.SettingsGraph*"),
// Tasks include non-Files, but it's ok
exclude[IncompatibleSignatureProblem]("sbt.Defaults.outputConfigPaths"),
// private[sbt]
exclude[DirectMissingMethodProblem]("sbt.Classpaths.trackedExportedProducts"),
exclude[DirectMissingMethodProblem]("sbt.Classpaths.trackedExportedJarProducts"),
exclude[DirectMissingMethodProblem]("sbt.Classpaths.unmanagedDependencies0"),
exclude[DirectMissingMethodProblem]("sbt.Classpaths.internalDependenciesImplTask"),
exclude[DirectMissingMethodProblem]("sbt.Classpaths.internalDependencyJarsImplTask"),
exclude[DirectMissingMethodProblem]("sbt.Classpaths.interDependencies"),
exclude[DirectMissingMethodProblem]("sbt.Classpaths.productsTask"),
exclude[DirectMissingMethodProblem]("sbt.Classpaths.jarProductsTask"),
exclude[DirectMissingMethodProblem]("sbt.StandardMain.cache"),
// internal logging apis,
exclude[IncompatibleSignatureProblem]("sbt.internal.LogManager*"),
exclude[MissingTypesProblem]("sbt.internal.RelayAppender"),
exclude[MissingClassProblem]("sbt.internal.TaskProgress$ProgressThread"),
// internal implementation
exclude[MissingClassProblem](
"sbt.internal.XMainConfiguration$ModifiedConfiguration$ModifiedAppProvider$ModifiedScalaProvider$"
),
// internal impl
exclude[IncompatibleSignatureProblem]("sbt.internal.Act.configIdent"),
exclude[IncompatibleSignatureProblem]("sbt.internal.Act.taskAxis"),
// private[sbt] method, used to call the correct sourcePositionMapper
exclude[DirectMissingMethodProblem]("sbt.Defaults.foldMappers"),
exclude[DirectMissingMethodProblem]("sbt.Defaults.toAbsoluteSourceMapper"),
exclude[DirectMissingMethodProblem]("sbt.Defaults.earlyArtifactPathSetting"),
exclude[MissingClassProblem]("sbt.internal.server.BuildServerReporter$"),
exclude[IncompatibleTemplateDefProblem]("sbt.internal.server.BuildServerReporter"),
exclude[MissingClassProblem]("sbt.internal.CustomHttp*"),
)
// mimaSettings,
// mimaBinaryIssueFilters ++= Vector(),
)
.configure(
addSbtIO,
@ -1362,6 +1258,7 @@ def allProjects =
sbtProj,
bundledLauncherProj,
sbtClientProj,
buildFileProj,
) ++ lowerUtilProjects
// These need to be cross published to 2.12 and 2.13 for Zinc

View File

@ -0,0 +1,438 @@
package sbt
package internal
import dotty.tools.dotc.ast
import dotty.tools.dotc.ast.{ tpd, untpd }
import dotty.tools.dotc.CompilationUnit
import dotty.tools.dotc.config.ScalaSettings
import dotty.tools.dotc.core.Contexts.{ atPhase, Context }
import dotty.tools.dotc.core.{ Flags, Names, Phases, Symbols, Types }
import dotty.tools.dotc.Driver
import dotty.tools.dotc.parsing.Parsers.Parser
import dotty.tools.dotc.reporting.Reporter
import dotty.tools.dotc.Run
import dotty.tools.dotc.util.SourceFile
import dotty.tools.io.{ PlainDirectory, Directory, VirtualDirectory, VirtualFile }
import dotty.tools.repl.AbstractFileClassLoader
import java.net.URLClassLoader
import java.nio.charset.StandardCharsets
import java.nio.file.{ Files, Path, Paths, StandardOpenOption }
import java.security.MessageDigest
import scala.collection.JavaConverters.*
import scala.quoted.*
import sbt.io.Hash
/**
* - nonCpOptions - non-classpath options
* - classpath - classpath used for evaluation
* - backingDir - directory to save `*.class` files
* - mkReporter - an optional factory method to create a reporter
*/
class Eval(
nonCpOptions: Seq[String],
classpath: Seq[Path],
backingDir: Option[Path],
mkReporter: Option[() => Reporter]
):
import Eval.*
backingDir.foreach { dir =>
Files.createDirectories(dir)
}
private val classpathString = classpath.map(_.toString).mkString(":")
private val outputDir =
backingDir match
case Some(dir) => PlainDirectory(Directory(dir.toString))
case None => VirtualDirectory("output")
private lazy val driver: EvalDriver = new EvalDriver
private lazy val reporter = mkReporter match
case Some(fn) => fn()
case None => EvalReporter.store
final class EvalDriver extends Driver:
import dotty.tools.dotc.config.Settings.Setting._
val compileCtx0 = initCtx.fresh
val options = nonCpOptions ++ Seq("-classpath", classpathString, "dummy.scala")
val compileCtx1 = setup(options.toArray, compileCtx0) match
case Some((_, ctx)) => ctx
case _ => sys.error(s"initialization failed for $options")
val compileCtx2 = compileCtx1.fresh
.setSetting(
compileCtx1.settings.outputDir,
outputDir
)
.setReporter(reporter)
val compileCtx = compileCtx2
val compiler = newCompiler(using compileCtx)
end EvalDriver
def eval(expression: String, tpeName: Option[String]): EvalResult =
eval(expression, noImports, tpeName, "<setting>", Eval.DefaultStartLine)
def evalInfer(expression: String): EvalResult =
eval(expression, noImports, None, "<setting>", Eval.DefaultStartLine)
def evalInfer(expression: String, imports: EvalImports): EvalResult =
eval(expression, imports, None, "<setting>", Eval.DefaultStartLine)
def eval(
expression: String,
imports: EvalImports,
tpeName: Option[String],
srcName: String,
line: Int
): EvalResult =
val ev = new EvalType[String]:
override def makeSource(moduleName: String): SourceFile =
val returnType = tpeName match
case Some(tpe) => s": $tpe"
case _ => ""
val header =
imports.strings.mkString("\n") +
s"""
|object $moduleName {
| def $WrapValName${returnType} = {""".stripMargin
val contents = s"""$header
|$expression
| }
|}
|""".stripMargin
val startLine = header.linesIterator.toList.size
EvalSourceFile(srcName, startLine, contents)
override def extract(run: Run, unit: CompilationUnit)(using ctx: Context): String =
atPhase(Phases.typerPhase.next) {
(new TypeExtractor).getType(unit.tpdTree)
}
override def read(file: Path): String =
String(Files.readAllBytes(file), StandardCharsets.UTF_8)
override def write(value: String, file: Path): Unit =
Files.write(
file,
value.getBytes(StandardCharsets.UTF_8),
StandardOpenOption.CREATE,
StandardOpenOption.TRUNCATE_EXISTING
)
override def extraHash: String = ""
val inter = evalCommon[String](expression :: Nil, imports, tpeName, ev)
val valueFn = (cl: ClassLoader) => getValue[Any](inter.enclosingModule, inter.loader(cl))
EvalResult(
tpe = inter.extra,
getValue = valueFn,
generated = inter.generated,
)
end eval
def evalDefinitions(
definitions: Seq[(String, scala.Range)],
imports: EvalImports,
srcName: String,
valTypes: Seq[String],
): EvalDefinitions =
evalDefinitions(definitions, imports, srcName, valTypes, "")
def evalDefinitions(
definitions: Seq[(String, scala.Range)],
imports: EvalImports,
srcName: String,
valTypes: Seq[String],
extraHash: String,
): EvalDefinitions =
require(definitions.nonEmpty, "definitions to evaluate cannot be empty.")
val extraHash0 = extraHash
val ev = new EvalType[Seq[String]]:
override def makeSource(moduleName: String): SourceFile =
val header =
imports.strings.mkString("\n") +
s"""
|object $moduleName {""".stripMargin
val contents =
s"""$header
|${definitions.map(_._1).mkString("\n")}
|}
|""".stripMargin
val startLine = header.linesIterator.toList.size
EvalSourceFile(srcName, startLine, contents)
override def extract(run: Run, unit: CompilationUnit)(using ctx: Context): Seq[String] =
atPhase(Phases.typerPhase.next) {
(new ValExtractor(valTypes.toSet)).getVals(unit.tpdTree)
}(using run.runContext)
override def read(file: Path): Seq[String] =
new String(Files.readAllBytes(file), StandardCharsets.UTF_8).linesIterator.toList
override def write(value: Seq[String], file: Path): Unit =
Files.write(
file,
value.mkString("\n").getBytes(StandardCharsets.UTF_8),
StandardOpenOption.CREATE,
StandardOpenOption.TRUNCATE_EXISTING
)
override def extraHash: String = extraHash0
val inter = evalCommon[Seq[String]](definitions.map(_._1), imports, tpeName = Some(""), ev)
EvalDefinitions(inter.loader, inter.generated, inter.enclosingModule, inter.extra.reverse)
end evalDefinitions
private[this] def evalCommon[A](
content: Seq[String],
imports: EvalImports,
tpeName: Option[String],
ev: EvalType[A],
): EvalIntermediate[A] =
import Eval.*
// This is a hot path.
val digester = MessageDigest.getInstance("SHA")
content.foreach { c =>
digester.update(bytes(c))
}
tpeName.foreach { tpe =>
digester.update(bytes(tpe))
}
digester.update(bytes(ev.extraHash))
val d = digester.digest()
val hash = Hash.toHex(d)
val moduleName = makeModuleName(hash)
val (extra, loader) = backingDir match
case Some(backing) if classExists(backing, moduleName) =>
val loader = (parent: ClassLoader) =>
(new URLClassLoader(Array(backing.toUri.toURL), parent): ClassLoader)
val extra = ev.read(cacheFile(backing, moduleName))
(extra, loader)
case _ => compileAndLoad(ev, moduleName)
val generatedFiles = getGeneratedFiles(moduleName)
EvalIntermediate(
extra = extra,
loader = loader,
generated = generatedFiles,
enclosingModule = moduleName,
)
// location of the cached type or definition information
private[this] def cacheFile(base: Path, moduleName: String): Path =
base.resolve(moduleName + ".cache")
private[this] def compileAndLoad[A](
ev: EvalType[A],
moduleName: String,
): (A, ClassLoader => ClassLoader) =
given rootCtx: Context = driver.compileCtx
val run = driver.compiler.newRun
val source = ev.makeSource(moduleName)
run.compileSources(source :: Nil)
checkError("an error in expression")
val unit = run.units.head
val extra: A = ev.extract(run, unit)
backingDir.foreach { backing =>
ev.write(extra, cacheFile(backing, moduleName))
}
val loader = (parent: ClassLoader) => AbstractFileClassLoader(outputDir, parent)
(extra, loader)
private[this] final class EvalIntermediate[A](
val extra: A,
val loader: ClassLoader => ClassLoader,
val generated: Seq[Path],
val enclosingModule: String,
)
private[this] def classExists(dir: Path, name: String): Boolean =
Files.exists(dir.resolve(s"$name.class"))
private[this] def getGeneratedFiles(moduleName: String): Seq[Path] =
backingDir match
case Some(dir) =>
asScala(
Files
.list(dir)
.filter(!Files.isDirectory(_))
.filter(_.getFileName.toString.contains(moduleName))
.iterator
).toList
case None => Nil
private[this] def makeModuleName(hash: String): String = "$Wrap" + hash.take(10)
private[this] def checkError(label: String)(using ctx: Context): Unit =
if ctx.reporter.hasErrors then
throw new EvalException(label + ": " + ctx.reporter.allErrors.head.toString)
else ()
end Eval
object Eval:
private[sbt] val DefaultStartLine = 0
lazy val noImports = EvalImports(Nil)
def apply(): Eval =
new Eval(Nil, currentClasspath, None, None)
def apply(mkReporter: () => Reporter): Eval =
new Eval(Nil, currentClasspath, None, Some(mkReporter))
def apply(
backingDir: Path,
mkReporter: () => Reporter,
): Eval =
new Eval(Nil, currentClasspath, Some(backingDir), Some(mkReporter))
def apply(
nonCpOptions: Seq[String],
backingDir: Path,
mkReporter: () => Reporter,
): Eval =
new Eval(nonCpOptions, currentClasspath, Some(backingDir), Some(mkReporter))
inline def apply[A](expression: String): A = ${ evalImpl[A]('{ expression }) }
private def thisClassLoader = this.getClass.getClassLoader
def evalImpl[A: Type](expression: Expr[String])(using qctx: Quotes): Expr[A] =
import quotes.reflect._
val sym = TypeRepr.of[A].typeSymbol
val fullName = Expr(sym.fullName)
'{
Eval().eval($expression, Some($fullName)).getValue(thisClassLoader).asInstanceOf[A]
}
def currentClasspath: Seq[Path] =
val cl = ClassLoader.getSystemClassLoader()
val urls = cl.asInstanceOf[URLClassLoader].getURLs().toList
urls.map(_.getFile).map(Paths.get(_))
def bytes(s: String): Array[Byte] = s.getBytes("UTF-8")
/** The name of the synthetic val in the synthetic module that an expression is assigned to. */
private[sbt] final val WrapValName = "$sbtdef"
// used to map the position offset
class EvalSourceFile(name: String, startLine: Int, contents: String)
extends SourceFile(
new VirtualFile(name, contents.getBytes(StandardCharsets.UTF_8)),
scala.io.Codec.UTF8
):
override def lineToOffset(line: Int): Int = super.lineToOffset((line + startLine) max 0)
override def offsetToLine(offset: Int): Int = super.offsetToLine(offset) - startLine
end EvalSourceFile
trait EvalType[A]:
def makeSource(moduleName: String): SourceFile
/** Extracts additional information after the compilation unit is evaluated. */
def extract(run: Run, unit: CompilationUnit)(using ctx: Context): A
/** Deserializes the extra information for unchanged inputs from a cache file. */
def read(file: Path): A
/**
* Serializes the extra information to a cache file, where it can be `read` back if inputs
* haven't changed.
*/
def write(value: A, file: Path): Unit
/** Extra information to include in the hash'd object name to help avoid collisions. */
def extraHash: String
end EvalType
class TypeExtractor extends tpd.TreeTraverser:
private[this] var result = ""
def getType(t: tpd.Tree)(using ctx: Context): String =
result = ""
this((), t)
result
override def traverse(tree: tpd.Tree)(using ctx: Context): Unit =
tree match
case tpd.DefDef(name, _, tpt, _) if name.toString == WrapValName =>
result = tpt.typeOpt.show
case t: tpd.Template => this((), t.body)
case t: tpd.PackageDef => this((), t.stats)
case t: tpd.TypeDef => this((), t.rhs)
case _ => ()
end TypeExtractor
/**
* Tree traverser that obtains the names of vals in a top-level module whose type is a subtype of
* one of `types`.
*/
class ValExtractor(tpes: Set[String]) extends tpd.TreeTraverser:
private[this] var vals = List[String]()
def getVals(t: tpd.Tree)(using ctx: Context): List[String] =
vals = Nil
traverse(t)
vals
def isAcceptableType(tpe: Types.Type)(using ctx: Context): Boolean =
tpe.baseClasses.exists { sym =>
tpes.contains(sym.fullName.toString)
}
def isTopLevelModule(sym: Symbols.Symbol)(using ctx: Context): Boolean =
(sym is Flags.Module) && (sym.owner is Flags.ModuleClass)
override def traverse(tree: tpd.Tree)(using ctx: Context): Unit =
tree match
case tpd.ValDef(name, tpt, _)
if isTopLevelModule(tree.symbol.owner) && isAcceptableType(tpt.tpe) =>
vals ::= name.mangledString
case t: tpd.Template => this((), t.body)
case t: tpd.PackageDef => this((), t.stats)
case t: tpd.TypeDef => this((), t.rhs)
case _ => ()
end ValExtractor
/**
* Gets the value of the expression wrapped in module `objectName`, which is accessible via
* `loader`. The module name should not include the trailing `$`.
*/
def getValue[A](objectName: String, loader: ClassLoader): A =
val module = getModule(objectName, loader)
val accessor = module.getClass.getMethod(WrapValName)
val value = accessor.invoke(module)
value.asInstanceOf[A]
/**
* Gets the top-level module `moduleName` from the provided class `loader`. The module name should
* not include the trailing `$`.
*/
def getModule(moduleName: String, loader: ClassLoader): Any =
val clazz = Class.forName(moduleName + "$", true, loader)
clazz.getField("MODULE$").get(null)
end Eval
final class EvalResult(
val tpe: String,
val getValue: ClassLoader => Any,
val generated: Seq[Path],
)
/**
* The result of evaluating a group of Scala definitions. The definitions are wrapped in an
* auto-generated, top-level module named `enclosingModule`. `generated` contains the compiled
* classes and cache files related to the definitions. A new class loader containing the module may
* be obtained from `loader` by passing the parent class loader providing the classes from the
* classpath that the definitions were compiled against. The list of vals with the requested types
* is `valNames`. The values for these may be obtained by providing the parent class loader to
* `values` as is done with `loader`.
*/
final class EvalDefinitions(
val loader: ClassLoader => ClassLoader,
val generated: Seq[Path],
val enclosingModule: String,
val valNames: Seq[String]
):
def values(parent: ClassLoader): Seq[Any] = {
val module = Eval.getModule(enclosingModule, loader(parent))
for n <- valNames
yield module.getClass.getMethod(n).invoke(module)
}
end EvalDefinitions
final class EvalException(msg: String) extends RuntimeException(msg)
final class EvalImports(val strings: Seq[String])

View File

@ -0,0 +1,19 @@
package sbt
package internal
import dotty.tools.dotc.core.Contexts.Context
import dotty.tools.dotc.reporting.ConsoleReporter
import dotty.tools.dotc.reporting.Diagnostic
import dotty.tools.dotc.reporting.Reporter
import dotty.tools.dotc.reporting.StoreReporter
abstract class EvalReporter extends Reporter
object EvalReporter:
def console: EvalReporter = ForwardingReporter(ConsoleReporter())
def store: EvalReporter = ForwardingReporter(StoreReporter())
end EvalReporter
class ForwardingReporter(delegate: Reporter) extends EvalReporter:
def doReport(dia: Diagnostic)(using Context): Unit = delegate.doReport(dia)
end ForwardingReporter

View File

@ -18,15 +18,16 @@ import sbt.internal.util.{
}
import java.io.File
import compiler.{ Eval, EvalImports }
import java.nio.file.Path
import sbt.internal.util.complete.DefaultParsers.validID
import Def.{ ScopedKey, Setting }
import Scope.GlobalScope
import sbt.SlashSyntax0._
// import Scope.GlobalScope
// import sbt.SlashSyntax0._
import sbt.internal.parser.SbtParser
import sbt.io.IO
import scala.collection.JavaConverters._
import xsbti.VirtualFile
import xsbti.VirtualFileRef
/**
* This file is responsible for compiling the .sbt files used to configure sbt builds.
@ -39,9 +40,12 @@ import scala.collection.JavaConverters._
*/
private[sbt] object EvaluateConfigurations {
type LazyClassLoaded[T] = ClassLoader => T
type LazyClassLoaded[A] = ClassLoader => A
private[sbt] case class TrackedEvalResult[T](generated: Seq[File], result: LazyClassLoaded[T])
private[sbt] case class TrackedEvalResult[A](
generated: Seq[Path],
result: LazyClassLoaded[A]
)
/**
* This represents the parsed expressions in a build sbt, as well as where they were defined.
@ -61,9 +65,13 @@ private[sbt] object EvaluateConfigurations {
* return a parsed, compiled + evaluated [[LoadedSbtFile]]. The result has
* raw sbt-types that can be accessed and used.
*/
def apply(eval: Eval, srcs: Seq[File], imports: Seq[String]): LazyClassLoaded[LoadedSbtFile] = {
val loadFiles = srcs.sortBy(_.getName) map { src =>
evaluateSbtFile(eval, src, IO.readLines(src), imports, 0)
def apply(
eval: Eval,
srcs: Seq[VirtualFile],
imports: Seq[String],
): LazyClassLoaded[LoadedSbtFile] = {
val loadFiles = srcs.sortBy(_.name) map { src =>
evaluateSbtFile(eval, src, IO.readStream(src.input()).linesIterator.toList, imports, 0)
}
loader =>
loadFiles.foldLeft(LoadedSbtFile.empty) { (loaded, load) =>
@ -78,10 +86,10 @@ private[sbt] object EvaluateConfigurations {
*/
def evaluateConfiguration(
eval: Eval,
src: File,
src: VirtualFile,
imports: Seq[String]
): LazyClassLoaded[Seq[Setting[_]]] =
evaluateConfiguration(eval, src, IO.readLines(src), imports, 0)
evaluateConfiguration(eval, src, IO.readStream(src.input()).linesIterator.toList, imports, 0)
/**
* Parses a sequence of build.sbt lines into a [[ParsedFile]]. The result contains
@ -90,7 +98,7 @@ private[sbt] object EvaluateConfigurations {
* @param builtinImports The set of import statements to add to those parsed in the .sbt file.
*/
private[this] def parseConfiguration(
file: File,
file: VirtualFileRef,
lines: Seq[String],
builtinImports: Seq[String],
offset: Int
@ -115,7 +123,7 @@ private[sbt] object EvaluateConfigurations {
*/
def evaluateConfiguration(
eval: Eval,
file: File,
file: VirtualFileRef,
lines: Seq[String],
imports: Seq[String],
offset: Int
@ -136,36 +144,40 @@ private[sbt] object EvaluateConfigurations {
*/
private[sbt] def evaluateSbtFile(
eval: Eval,
file: File,
file: VirtualFileRef,
lines: Seq[String],
imports: Seq[String],
offset: Int
): LazyClassLoaded[LoadedSbtFile] = {
// TODO - Store the file on the LoadedSbtFile (or the parent dir) so we can accurately do
// detection for which project project manipulations should be applied.
val name = file.getPath
val name = file.id
val parsed = parseConfiguration(file, lines, imports, offset)
val (importDefs, definitions) =
if (parsed.definitions.isEmpty) (Nil, DefinedSbtValues.empty)
else {
val definitions =
evaluateDefinitions(eval, name, parsed.imports, parsed.definitions, Some(file))
val imp = BuildUtil.importAllRoot(definitions.enclosingModule :: Nil)
val imp = BuildUtilLite.importAllRoot(definitions.enclosingModule :: Nil)
(imp, DefinedSbtValues(definitions))
}
val allImports = importDefs.map(s => (s, -1)) ++ parsed.imports
val dslEntries = parsed.settings map { case (dslExpression, range) =>
evaluateDslEntry(eval, name, allImports, dslExpression, range)
}
eval.unlinkDeferred()
// TODO:
// eval.unlinkDeferred()
// Tracks all the files we generated from evaluating the sbt file.
val allGeneratedFiles = (definitions.generated ++ dslEntries.flatMap(_.generated))
val allGeneratedFiles: Seq[Path] = (definitions.generated ++ dslEntries.flatMap(_.generated))
loader => {
val projects = {
val compositeProjects = definitions.values(loader).collect { case p: CompositeProject =>
p
}
CompositeProject.expand(compositeProjects).map(resolveBase(file.getParentFile, _))
// todo: resolveBase?
CompositeProject.expand(compositeProjects) // .map(resolveBase(file.getParentFile, _))
}
val (settingsRaw, manipulationsRaw) =
dslEntries map (_.result apply loader) partition {
@ -192,7 +204,8 @@ private[sbt] object EvaluateConfigurations {
}
/** move a project to be relative to this file after we've evaluated it. */
private[this] def resolveBase(f: File, p: Project) = p.copy(base = IO.resolve(f, p.base))
private[this] def resolveBase(f: File, p: Project) =
p.copy(base = IO.resolve(f, p.base))
def addOffset(offset: Int, lines: Seq[(String, Int)]): Seq[(String, Int)] =
lines.map { case (s, i) => (s, i + offset) }
@ -234,13 +247,13 @@ private[sbt] object EvaluateConfigurations {
try {
eval.eval(
expression,
imports = new EvalImports(imports, name),
imports = new EvalImports(imports.map(_._1)), // name
srcName = name,
tpeName = Some(SettingsDefinitionName),
line = range.start
)
} catch {
case e: sbt.compiler.EvalException => throw new MessageOnlyException(e.getMessage)
case e: EvalException => throw new MessageOnlyException(e.getMessage)
}
// TODO - keep track of configuration classes defined.
TrackedEvalResult(
@ -284,14 +297,13 @@ private[sbt] object EvaluateConfigurations {
* anything on the right of the tuple is a scala expression (definition or setting).
*/
private[sbt] def splitExpressions(
file: File,
file: VirtualFileRef,
lines: Seq[String]
): (Seq[(String, Int)], Seq[(String, LineRange)]) = {
): (Seq[(String, Int)], Seq[(String, LineRange)]) =
val split = SbtParser(file, lines)
// TODO - Look at pulling the parsed expression trees from the SbtParser and stitch them back into a different
// scala compiler rather than re-parsing.
(split.imports, split.settings)
}
private[this] def splitSettingsDefinitions(
lines: Seq[(String, LineRange)]
@ -316,19 +328,33 @@ private[sbt] object EvaluateConfigurations {
name: String,
imports: Seq[(String, Int)],
definitions: Seq[(String, LineRange)],
file: Option[File]
): compiler.EvalDefinitions = {
file: Option[VirtualFileRef],
): EvalDefinitions = {
val convertedRanges = definitions.map { case (s, r) => (s, r.start to r.end) }
eval.evalDefinitions(
convertedRanges,
new EvalImports(imports, name),
new EvalImports(imports.map(_._1)), // name
name,
file,
// file,
extractedValTypes
)
}
}
object BuildUtilLite:
/** Import just the names. */
def importNames(names: Seq[String]): Seq[String] =
if (names.isEmpty) Nil else names.mkString("import ", ", ", "") :: Nil
/** Prepend `_root_` and import just the names. */
def importNamesRoot(names: Seq[String]): Seq[String] = importNames(names map rootedName)
/** Wildcard import `._` for all values. */
def importAll(values: Seq[String]): Seq[String] = importNames(values map { _ + "._" })
def importAllRoot(values: Seq[String]): Seq[String] = importAll(values map rootedName)
def rootedName(s: String): String = if (s contains '.') "_root_." + s else s
end BuildUtilLite
object Index {
def taskToKeyMap(data: Settings[Scope]): Map[Task[_], ScopedKey[Task[_]]] = {
@ -360,39 +386,41 @@ object Index {
private[this] def stringToKeyMap0(
settings: Set[AttributeKey[_]]
)(label: AttributeKey[_] => String): Map[String, AttributeKey[_]] = {
val multiMap = settings.groupBy(label)
val duplicates = multiMap.iterator
.collect { case (k, xs) if xs.size > 1 => (k, xs.map(_.manifest)) }
.collect {
case (k, xs) if xs.size > 1 => (k, xs)
}
.toVector
if (duplicates.isEmpty)
multiMap.collect { case (k, v) if validID(k) => (k, v.head) } toMap
else
sys.error(
duplicates map { case (k, tps) =>
"'" + k + "' (" + tps.mkString(", ") + ")"
} mkString ("Some keys were defined with the same name but different types: ", ", ", "")
)
// val multiMap = settings.groupBy(label)
// val duplicates = multiMap.iterator
// .collect { case (k, xs) if xs.size > 1 => (k, xs.map(_.manifest)) }
// .collect {
// case (k, xs) if xs.size > 1 => (k, xs)
// }
// .toVector
// if (duplicates.isEmpty)
// multiMap.collect { case (k, v) if validID(k) => (k, v.head) } toMap
// else
// sys.error(
// duplicates map { case (k, tps) =>
// "'" + k + "' (" + tps.mkString(", ") + ")"
// } mkString ("Some keys were defined with the same name but different types: ", ", ", "")
// )
???
}
private[this] type TriggerMap = collection.mutable.HashMap[Task[_], Seq[Task[_]]]
def triggers(ss: Settings[Scope]): Triggers[Task] = {
val runBefore = new TriggerMap
val triggeredBy = new TriggerMap
ss.data.values foreach (
_.entries foreach {
case AttributeEntry(_, value: Task[_]) =>
val as = value.info.attributes
update(runBefore, value, as get Keys.runBefore)
update(triggeredBy, value, as get Keys.triggeredBy)
case _ => ()
}
)
val onComplete = (GlobalScope / Keys.onComplete) get ss getOrElse (() => ())
new Triggers[Task](runBefore, triggeredBy, map => { onComplete(); map })
// val runBefore = new TriggerMap
// val triggeredBy = new TriggerMap
// ss.data.values foreach (
// _.entries foreach {
// case AttributeEntry(_, value: Task[_]) =>
// val as = value.info.attributes
// update(runBefore, value, as get Keys.runBefore)
// update(triggeredBy, value, as get Keys.triggeredBy)
// case _ => ()
// }
// )
// val onComplete = (GlobalScope / Keys.onComplete) get ss getOrElse (() => ())
// new Triggers[Task](runBefore, triggeredBy, map => { onComplete(); map })
???
}
private[this] def update(map: TriggerMap, base: Task[_], tasksOpt: Option[Seq[Task[_]]]): Unit =

View File

@ -10,6 +10,7 @@ package internal
import Def.Setting
import java.io.File
import java.nio.file.Path
/**
* Represents the exported contents of a .sbt file. Currently, that includes the list of settings,
@ -23,7 +24,7 @@ private[sbt] final class LoadedSbtFile(
// TODO - we may want to expose a simpler interface on top of here for the set command,
// rather than what we have now...
val definitions: DefinedSbtValues,
val generatedFiles: Seq[File]
val generatedFiles: Seq[Path]
) {
// We still use merge for now. We track originating sbt file in an alternative manner.
def merge(o: LoadedSbtFile): LoadedSbtFile =
@ -44,7 +45,7 @@ private[sbt] final class LoadedSbtFile(
* Represents the `val`/`lazy val` definitions defined within a build.sbt file
* which we can reference in other settings.
*/
private[sbt] final class DefinedSbtValues(val sbtFiles: Seq[compiler.EvalDefinitions]) {
private[sbt] final class DefinedSbtValues(val sbtFiles: Seq[EvalDefinitions]) {
def values(parent: ClassLoader): Seq[Any] =
sbtFiles flatMap (_ values parent)
@ -63,12 +64,12 @@ private[sbt] final class DefinedSbtValues(val sbtFiles: Seq[compiler.EvalDefinit
v <- file.valNames
} yield s"import ${m}.`${v}`"
}
def generated: Seq[File] =
sbtFiles flatMap (_.generated)
def generated: Seq[Path] =
sbtFiles.flatMap(_.generated)
// Returns a classpath for the generated .sbt files.
def classpath: Seq[File] =
generated.map(_.getParentFile).distinct
def classpath: Seq[Path] =
generated.map(_.getParent()).distinct
/**
* Joins the defines of this build.sbt with another.
@ -81,7 +82,7 @@ private[sbt] final class DefinedSbtValues(val sbtFiles: Seq[compiler.EvalDefinit
private[sbt] object DefinedSbtValues {
/** Construct a DefinedSbtValues object directly from the underlying representation. */
def apply(eval: compiler.EvalDefinitions): DefinedSbtValues =
def apply(eval: EvalDefinitions): DefinedSbtValues =
new DefinedSbtValues(Seq(eval))
/** Construct an empty value object. */

View File

@ -0,0 +1,304 @@
/*
* sbt
* Copyright 2011 - 2018, Lightbend, Inc.
* Copyright 2008 - 2010, Mark Harrah
* Licensed under Apache License 2.0 (see LICENSE)
*/
package sbt
package internal
package parser
import sbt.internal.util.{ LineRange, MessageOnlyException }
import java.io.File
import java.nio.charset.StandardCharsets
import java.util.concurrent.ConcurrentHashMap
import sbt.internal.parser.SbtParser._
import scala.compat.Platform.EOL
import dotty.tools.dotc.ast.Trees.Lazy
import dotty.tools.dotc.ast.untpd
import dotty.tools.dotc.ast.untpd.Tree
import dotty.tools.dotc.CompilationUnit
import dotty.tools.dotc.core.Contexts.Context
import dotty.tools.dotc.Driver
import dotty.tools.dotc.util.NoSourcePosition
import dotty.tools.dotc.util.SourceFile
import dotty.tools.dotc.util.SourcePosition
import dotty.tools.io.VirtualDirectory
import dotty.tools.io.VirtualFile
import dotty.tools.dotc.parsing.*
import dotty.tools.dotc.reporting.ConsoleReporter
import dotty.tools.dotc.reporting.Diagnostic
import dotty.tools.dotc.reporting.Reporter
import dotty.tools.dotc.reporting.StoreReporter
import scala.util.Random
import scala.util.{ Failure, Success }
import xsbti.VirtualFileRef
import dotty.tools.dotc.printing.Printer
import dotty.tools.dotc.config.Printers
private[sbt] object SbtParser:
val END_OF_LINE_CHAR = '\n'
val END_OF_LINE = String.valueOf(END_OF_LINE_CHAR)
private[parser] val NOT_FOUND_INDEX = -1
private[sbt] val FAKE_FILE = VirtualFileRef.of("fake") // new File("fake")
private[parser] val XML_ERROR = "';' expected but 'val' found."
private val XmlErrorMessage =
"""Probably problem with parsing xml group, please add parens or semicolons:
|Replace:
|val xmlGroup = <a/><b/>
|with:
|val xmlGroup = (<a/><b/>)
|or
|val xmlGroup = <a/><b/>;
""".stripMargin
private final val defaultClasspath =
sbt.io.Path.makeString(sbt.io.IO.classLocationPath[Product].toFile :: Nil)
/**
* Provides the previous error reporting functionality in
* [[scala.tools.reflect.ToolBox]].
*
* This parser is a wrapper around a collection of reporters that are
* indexed by a unique key. This is used to ensure that the reports of
* one parser don't collide with other ones in concurrent settings.
*
* This parser is a sign that this whole parser should be rewritten.
* There are exceptions everywhere and the logic to work around
* the scalac parser bug heavily relies on them and it's tied
* to the test suite. Ideally, we only want to throw exceptions
* when we know for a fact that the user-provided snippet doesn't
* parse.
*/
private[sbt] class UniqueParserReporter extends Reporter {
private val reporters = new ConcurrentHashMap[String, StoreReporter]()
override def doReport(dia: Diagnostic)(using Context): Unit =
import scala.jdk.OptionConverters.*
val sourcePath = dia.position.asScala.getOrElse(sys.error("missing position")).source.path
val reporter = getReporter(sourcePath)
reporter.doReport(dia)
override def report(dia: Diagnostic)(using Context): Unit =
import scala.jdk.OptionConverters.*
val sourcePath = dia.position.asScala.getOrElse(sys.error("missing position")).source.path
val reporter = getReporter(sourcePath)
reporter.report(dia)
override def hasErrors: Boolean = {
var result = false
reporters.forEachValue(100, r => if (r.hasErrors) result = true)
result
}
def createReporter(uniqueFileName: String): StoreReporter =
val r = new StoreReporter(null)
reporters.put(uniqueFileName, r)
r
def getOrCreateReporter(uniqueFileName: String): StoreReporter = {
val r = reporters.get(uniqueFileName)
if (r == null) createReporter(uniqueFileName)
else r
}
private def getReporter(fileName: String) = {
val reporter = reporters.get(fileName)
if (reporter == null) {
scalacGlobalInitReporter.getOrElse(
sys.error(s"sbt forgot to initialize `scalacGlobalInitReporter`.")
)
} else reporter
}
def throwParserErrorsIfAny(reporter: StoreReporter, fileName: String)(using
context: Context
): Unit =
if reporter.hasErrors then {
val seq = reporter.pendingMessages.map { info =>
s"""[$fileName]:${info.pos.line}: ${info.msg}"""
}
val errorMessage = seq.mkString(EOL)
val error: String =
if (errorMessage.contains(XML_ERROR))
s"$errorMessage\n${SbtParser.XmlErrorMessage}"
else errorMessage
throw new MessageOnlyException(error)
} else ()
}
private[sbt] var scalacGlobalInitReporter: Option[ConsoleReporter] = None
private[sbt] val globalReporter = UniqueParserReporter()
private[sbt] val defaultGlobalForParser = ParseDriver()
private[sbt] final class ParseDriver extends Driver:
import dotty.tools.dotc.config.Settings.Setting._
val compileCtx0 = initCtx.fresh
val options = List("-classpath", s"$defaultClasspath", "dummy.scala")
val compileCtx1 = setup(options.toArray, compileCtx0) match
case Some((_, ctx)) => ctx
case _ => sys.error(s"initialization failed for $options")
val outputDir = VirtualDirectory("output")
val compileCtx2 = compileCtx1.fresh
.setSetting(
compileCtx1.settings.outputDir,
outputDir
)
.setReporter(globalReporter)
val compileCtx = compileCtx2
val compiler = newCompiler(using compileCtx)
end ParseDriver
/**
* Parse code reusing the same [[Run]] instance.
*
* @param code The code to be parsed.
* @param filePath The file name where the code comes from.
* @param reporterId0 The reporter id is the key used to get the pertinent
* reporter. Given that the parsing reuses a global
* instance, this reporter id makes sure that every parsing
* session gets its own errors in a concurrent setting.
* The reporter id must be unique per parsing session.
* @return
*/
private[sbt] def parse(
code: String,
filePath: String,
reporterId0: Option[String]
): (List[untpd.Tree], String, SourceFile) =
import defaultGlobalForParser.*
given ctx: Context = compileCtx
val reporterId = reporterId0.getOrElse(s"$filePath-${Random.nextInt}")
val reporter = globalReporter.getOrCreateReporter(reporterId)
reporter.removeBufferedMessages
val moduleName = "SyntheticModule"
val wrapCode = s"""object $moduleName {
|$code
|}""".stripMargin
val wrapperFile = SourceFile(
VirtualFile(reporterId, wrapCode.getBytes(StandardCharsets.UTF_8)),
scala.io.Codec.UTF8
)
val parser = Parsers.Parser(wrapperFile)
val t = parser.parse()
val parsedTrees = t match
case untpd.PackageDef(_, List(untpd.ModuleDef(_, untpd.Template(_, _, _, trees)))) =>
trees match
case ts: List[untpd.Tree] => ts
case ts: Lazy[List[untpd.Tree]] => ts.complete
globalReporter.throwParserErrorsIfAny(reporter, filePath)
(parsedTrees, reporterId, wrapperFile)
end SbtParser
private class SbtParserInit {
new Thread("sbt-parser-init-thread") {
setDaemon(true)
start()
override def run(): Unit = {
val _ = SbtParser.defaultGlobalForParser
}
}
}
/**
* This method solely exists to add scaladoc to members in SbtParser which
* are defined using pattern matching.
*/
sealed trait ParsedSbtFileExpressions:
/** The set of parsed import expressions. */
def imports: Seq[(String, Int)]
/** The set of parsed definitions and/or sbt build settings. */
def settings: Seq[(String, LineRange)]
/** The set of scala tree's for parsed definitions/settings and the underlying string representation.. */
def settingsTrees: Seq[(String, Tree)]
end ParsedSbtFileExpressions
/**
* An initial parser/splitter of .sbt files.
*
* This class is responsible for chunking a `.sbt` file into expression ranges
* which we can then compile using the Scala compiler.
*
* Example:
*
* {{{
* val parser = SbtParser(myFile, IO.readLines(myFile))
* // All import statements
* val imports = parser.imports
* // All other statements (val x =, or raw settings)
* val settings = parser.settings
* }}}
*
* @param file The file we're parsing (may be a dummy file)
* @param lines The parsed "lines" of the file, where each string is a line.
*/
private[sbt] case class SbtParser(path: VirtualFileRef, lines: Seq[String])
extends ParsedSbtFileExpressions:
// settingsTrees,modifiedContent needed for "session save"
// TODO - We should look into splitting out "definitions" vs. "settings" here instead of further string lookups, since we have the
// parsed trees.
val (imports, settings, settingsTrees) = splitExpressions(path, lines)
import SbtParser.defaultGlobalForParser.*
private def splitExpressions(
path: VirtualFileRef,
lines: Seq[String]
): (Seq[(String, Int)], Seq[(String, LineRange)], Seq[(String, Tree)]) = {
// import sbt.internal.parser.MissingBracketHandler.findMissingText
val indexedLines = lines.toIndexedSeq
val content = indexedLines.mkString(END_OF_LINE)
val fileName = path.id
val (parsedTrees, reporterId, sourceFile) = parse(content, fileName, None)
given ctx: Context = compileCtx
val (imports: Seq[untpd.Tree], statements: Seq[untpd.Tree]) =
parsedTrees.partition {
case _: untpd.Import => true
case _ => false
}
def convertStatement(tree: untpd.Tree)(using ctx: Context): Option[(String, Tree, LineRange)] =
if tree.span.exists then
// not sure why I need to reconstruct the position myself
val pos = SourcePosition(sourceFile, tree.span)
val statement = String(pos.linesSlice).trim()
val lines = pos.lines
val wrapperLineOffset = 0
Some(
(
statement,
tree,
LineRange(lines.start + wrapperLineOffset, lines.end + wrapperLineOffset)
)
)
else None
val stmtTreeLineRange = statements.flatMap(convertStatement)
val importsLineRange = importsToLineRanges(sourceFile, imports)
(
importsLineRange,
stmtTreeLineRange.map { case (stmt, _, lr) =>
(stmt, lr)
},
stmtTreeLineRange.map { case (stmt, tree, _) =>
(stmt, tree)
}
)
}
private def importsToLineRanges(
sourceFile: SourceFile,
imports: Seq[Tree]
)(using context: Context): Seq[(String, Int)] =
imports.map { tree =>
// not sure why I need to reconstruct the position myself
val pos = SourcePosition(sourceFile, tree.span)
val content = String(pos.linesSlice).trim()
val wrapperLineOffset = 0
(content, pos.line + wrapperLineOffset)
}
end SbtParser

View File

@ -9,6 +9,8 @@ package sbt
package internal
package parser
/*
private[sbt] object SbtRefactorings {
import sbt.internal.parser.SbtParser.{ END_OF_LINE, FAKE_FILE }
@ -18,14 +20,14 @@ private[sbt] object SbtRefactorings {
val reverseOrderingInt = Ordering[Int].reverse
/**
* Refactoring a `.sbt` file so that the new settings are used instead of any existing settings.
* @param configFile SbtConfigFile with the lines of an sbt file as a List[String] where each string is one line
* @param commands A List of settings (space separate) that should be inserted into the current file.
* If the settings replaces a value, it will replace the original line in the .sbt file.
* If in the `.sbt` file we have multiply value for one settings -
* the first will be replaced and the other will be removed.
* @return a SbtConfigFile with new lines which represent the contents of the refactored .sbt file.
*/
* Refactoring a `.sbt` file so that the new settings are used instead of any existing settings.
* @param configFile SbtConfigFile with the lines of an sbt file as a List[String] where each string is one line
* @param commands A List of settings (space separate) that should be inserted into the current file.
* If the settings replaces a value, it will replace the original line in the .sbt file.
* If in the `.sbt` file we have multiply value for one settings -
* the first will be replaced and the other will be removed.
* @return a SbtConfigFile with new lines which represent the contents of the refactored .sbt file.
*/
def applySessionSettings(
configFile: SbtConfigFile,
commands: Seq[SessionSetting]
@ -43,12 +45,11 @@ private[sbt] object SbtRefactorings {
modifiedContent: String,
sortedRecordedCommands: Seq[(Int, String, String)]
) = {
sortedRecordedCommands.foldLeft(modifiedContent) {
case (acc, (from, old, replacement)) =>
val before = acc.substring(0, from)
val after = acc.substring(from + old.length, acc.length)
val afterLast = emptyStringForEmptyString(after)
before + replacement + afterLast
sortedRecordedCommands.foldLeft(modifiedContent) { case (acc, (from, old, replacement)) =>
val before = acc.substring(0, from)
val after = acc.substring(from + old.length, acc.length)
val afterLast = emptyStringForEmptyString(after)
before + replacement + afterLast
}
}
@ -58,32 +59,29 @@ private[sbt] object SbtRefactorings {
}
private def recordCommands(commands: Seq[SessionSetting], split: SbtParser) =
commands.flatMap {
case (_, command) =>
val map = toTreeStringMap(command)
map.flatMap { case (name, _) => treesToReplacements(split, name, command) }
commands.flatMap { case (_, command) =>
val map = toTreeStringMap(command)
map.flatMap { case (name, _) => treesToReplacements(split, name, command) }
}
private def treesToReplacements(split: SbtParser, name: String, command: Seq[String]) =
split.settingsTrees.foldLeft(Seq.empty[(Int, String, String)]) {
case (acc, (st, tree)) =>
val treeName = extractSettingName(tree)
if (name == treeName) {
val replacement =
if (acc.isEmpty) command.mkString(END_OF_LINE)
else emptyString
(tree.pos.start, st, replacement) +: acc
} else {
acc
}
split.settingsTrees.foldLeft(Seq.empty[(Int, String, String)]) { case (acc, (st, tree)) =>
val treeName = extractSettingName(tree)
if (name == treeName) {
val replacement =
if (acc.isEmpty) command.mkString(END_OF_LINE)
else emptyString
(tree.pos.start, st, replacement) +: acc
} else {
acc
}
}
private def toTreeStringMap(command: Seq[String]) = {
val split = SbtParser(FAKE_FILE, command)
val trees = split.settingsTrees
val seq = trees.map {
case (statement, tree) =>
(extractSettingName(tree), statement)
val seq = trees.map { case (statement, tree) =>
(extractSettingName(tree), statement)
}
seq.toMap
}
@ -98,3 +96,4 @@ private[sbt] object SbtRefactorings {
}
}
*/

View File

@ -0,0 +1,39 @@
package sbt.internal
import sbt.internal.parser.SbtParser
import sbt.internal.util.LineRange
import xsbti.VirtualFileRef
object SbtParserTest extends verify.BasicTestSuite:
lazy val testCode: String = """import keys.*
import com.{
keys
}
val x = 1
lazy val foo = project
.settings(x := y)
"""
test("imports with their lines") {
val ref = VirtualFileRef.of("vfile")
val p = SbtParser(ref, testCode.linesIterator.toList)
assert(
p.imports == List(
"import keys.*" -> 1,
"""import com.{
keys
}""" -> 2
)
)
}
test("imports with their lines2") {
val ref = VirtualFileRef.of("vfile")
val p = SbtParser(ref, testCode.linesIterator.toList)
assert(p.settings.size == 2)
assert(p.settings(0) == ("""val x = 1""" -> LineRange(6, 6)))
assert(p.settings(1) == ("""lazy val foo = project
.settings(x := y)""" -> LineRange(7, 8)))
}
end SbtParserTest

View File

@ -10,36 +10,36 @@ package internal
package parser
abstract class CheckIfParsedSpec(
implicit val splitter: SplitExpressions.SplitExpression =
EvaluateConfigurations.splitExpressions
val splitter: SplitExpressions.SplitExpression = EvaluateConfigurations.splitExpressions
) extends AbstractSpec {
test(s"${this.getClass.getName} should parse sbt file") {
files foreach {
case (content, description, nonEmptyImports, nonEmptyStatements) =>
println(s"""${getClass.getSimpleName}: "$description" """)
val (imports, statements) = split(content)
assert(
nonEmptyStatements == statements.nonEmpty,
s"""$description
files foreach { case (content, description, nonEmptyImports, nonEmptyStatements) =>
println(s"""${getClass.getSimpleName}: "$description" """)
val (imports, statements) = split(content)(splitter)
assert(
nonEmptyStatements == statements.nonEmpty,
s"""$description
|***${shouldContains(nonEmptyStatements)} statements***
|$content """.stripMargin
)
assert(
nonEmptyImports == imports.nonEmpty,
s"""$description
)
assert(
nonEmptyImports == imports.nonEmpty,
s"""$description
|***${shouldContains(nonEmptyImports)} imports***
|$content """.stripMargin
)
)
}
}
private def shouldContains(b: Boolean): String =
s"""Should ${if (b) {
"contain"
} else {
"not contain"
}}"""
s"""Should ${
if (b) {
"contain"
} else {
"not contain"
}
}"""
protected def files: Seq[(String, String, Boolean, Boolean)]

View File

@ -25,7 +25,8 @@ object CommentedXmlSpec extends CheckIfParsedSpec {
false,
true
),
("""
(
"""
|val scmpom = taskKey[xml.NodeBuffer]("Node buffer")
|
|scmpom := <scm>
@ -44,8 +45,13 @@ object CommentedXmlSpec extends CheckIfParsedSpec {
|
|publishMavenStyle := true
|
""".stripMargin, "Wrong Commented xml ", false, true),
("""
""".stripMargin,
"Wrong Commented xml ",
false,
true
),
(
"""
|val scmpom = taskKey[xml.NodeBuffer]("Node buffer")
|
|scmpom := <scm>
@ -64,14 +70,28 @@ object CommentedXmlSpec extends CheckIfParsedSpec {
|
|publishMavenStyle := true
|
""".stripMargin, "Commented xml ", false, true),
("""
""".stripMargin,
"Commented xml ",
false,
true
),
(
"""
|import sbt._
|
|// </a
""".stripMargin, "Xml in comment", true, false),
("""
""".stripMargin,
"Xml in comment",
true,
false
),
(
"""
|// a/>
""".stripMargin, "Xml in comment2", false, false)
""".stripMargin,
"Xml in comment2",
false,
false
)
)
}

View File

@ -9,6 +9,7 @@ package sbt.internal.parser
import sbt.internal.util.MessageOnlyException
/*
object EmbeddedXmlSpec extends CheckIfParsedSpec {
test("File with xml content should Handle last xml part") {
@ -36,6 +37,7 @@ object EmbeddedXmlSpec extends CheckIfParsedSpec {
try {
split(buildSbt)
sys.error("expected MessageOnlyException")
} catch {
case exception: MessageOnlyException =>
val index = buildSbt.linesIterator.indexWhere(line => line.contains(errorLine)) + 1
@ -47,13 +49,24 @@ object EmbeddedXmlSpec extends CheckIfParsedSpec {
}
protected val files = Seq(
("""
(
"""
|val p = <a/>
""".stripMargin, "Xml modified closing tag at end of file", false, true),
("""
""".stripMargin,
"Xml modified closing tag at end of file",
false,
true
),
(
"""
|val p = <a></a>
""".stripMargin, "Xml at end of file", false, true),
("""|
""".stripMargin,
"Xml at end of file",
false,
true
),
(
"""|
|
|name := "play-html-compressor"
|
@ -89,8 +102,13 @@ object EmbeddedXmlSpec extends CheckIfParsedSpec {
|
|val tra = "</scm>"
|
""".stripMargin, "Xml in string", false, true),
("""|
""".stripMargin,
"Xml in string",
false,
true
),
(
"""|
|
|name := "play-html-compressor"
|
@ -119,7 +137,11 @@ object EmbeddedXmlSpec extends CheckIfParsedSpec {
|<aa/>
| </a></xml:group>
|
| """.stripMargin, "Xml with attributes", false, true),
| """.stripMargin,
"Xml with attributes",
false,
true
),
(
"""
|scalaVersion := "2.10.2"
@ -151,3 +173,4 @@ object EmbeddedXmlSpec extends CheckIfParsedSpec {
)
}
*/

View File

@ -12,48 +12,54 @@ package parser
import java.io.File
import sbt.internal.util.MessageOnlyException
import scala.io.Source
import sbt.internal.inc.PlainVirtualFileConverter
object ErrorSpec extends AbstractSpec {
implicit val splitter: SplitExpressions.SplitExpression = EvaluateConfigurations.splitExpressions
val converter = PlainVirtualFileConverter.converter
// implicit val splitter: SplitExpressions.SplitExpression = EvaluateConfigurations.splitExpressions
test("Parser should contains file name and line number") {
val rootPath = getClass.getResource("/error-format/").getPath
println(s"Reading files from: $rootPath")
new File(rootPath).listFiles foreach { file =>
print(s"Processing ${file.getName}: ")
val vf = converter.toVirtualFile(file.toPath())
val buildSbt = Source.fromFile(file).getLines().mkString("\n")
try {
SbtParser(file, buildSbt.linesIterator.toSeq)
SbtParser(vf, buildSbt.linesIterator.toSeq)
} catch {
case exp: MessageOnlyException =>
val message = exp.getMessage
println(s"${exp.getMessage}")
assert(message.contains(file.getName))
}
containsLineNumber(buildSbt)
// todo:
// containsLineNumber(buildSbt)
}
}
test("it should handle wrong parsing") {
intercept[MessageOnlyException] {
val buildSbt =
"""
|libraryDependencies ++= Seq("a" % "b" % "2") map {
|(dependency) =>{
| dependency
| } /* */ //
|}
""".stripMargin
MissingBracketHandler.findMissingText(
buildSbt,
buildSbt.length,
2,
"fake.txt",
new MessageOnlyException("fake")
)
()
}
}
// test("it should handle wrong parsing") {
// intercept[MessageOnlyException] {
// val buildSbt =
// """
// |libraryDependencies ++= Seq("a" % "b" % "2") map {
// |(dependency) =>{
// | dependency
// | } /* */ //
// |}
// """.stripMargin
// MissingBracketHandler.findMissingText(
// buildSbt,
// buildSbt.length,
// 2,
// "fake.txt",
// new MessageOnlyException("fake")
// )
// ()
// }
// }
test("it should handle xml error") {
try {
@ -63,11 +69,12 @@ object ErrorSpec extends AbstractSpec {
|val s = '
""".stripMargin
SbtParser(SbtParser.FAKE_FILE, buildSbt.linesIterator.toSeq)
// sys.error("not supposed to reach here")
} catch {
case exp: MessageOnlyException =>
val message = exp.getMessage
println(s"${exp.getMessage}")
assert(message.contains(SbtParser.FAKE_FILE.getName))
assert(message.contains(SbtParser.FAKE_FILE.id()))
}
}

View File

@ -12,8 +12,10 @@ package parser
import java.io.File
import scala.io.Source
import sbt.internal.inc.PlainVirtualFileConverter
object NewFormatSpec extends AbstractSpec {
val converter = PlainVirtualFileConverter.converter
implicit val splitter: SplitExpressions.SplitExpression = EvaluateConfigurations.splitExpressions
test("New Format should handle lines") {
@ -22,11 +24,15 @@ object NewFormatSpec extends AbstractSpec {
val allFiles = new File(rootPath).listFiles.toList
allFiles foreach { path =>
println(s"$path")
val vf = converter.toVirtualFile(path.toPath())
val lines = Source.fromFile(path).getLines().toList
val (_, statements) = splitter(path, lines)
assert(statements.nonEmpty, s"""
val (_, statements) = splitter(vf, lines)
assert(
statements.nonEmpty,
s"""
|***should contains statements***
|$lines """.stripMargin)
|$lines """.stripMargin
)
}
}
}

View File

@ -9,6 +9,7 @@ package sbt
package internal
package parser
/*
import java.io.{ File, FilenameFilter }
import scala.io.Source
@ -39,12 +40,11 @@ abstract class AbstractSessionSettingsSpec(folder: String) extends AbstractSpec
.toList
allFiles foreach { file =>
val originalLines = Source.fromFile(file).getLines().toList
expectedResultAndMap(file) foreach {
case (expectedResultList, commands) =>
val resultList = SbtRefactorings.applySessionSettings((file, originalLines), commands)
val expected = SbtParser(file, expectedResultList)
val result = SbtParser(file, resultList._2)
assert(result.settings == expected.settings)
expectedResultAndMap(file) foreach { case (expectedResultList, commands) =>
val resultList = SbtRefactorings.applySessionSettings((file, originalLines), commands)
val expected = SbtParser(file, expectedResultList)
val result = SbtParser(file, resultList._2)
assert(result.settings == expected.settings)
}
}
}
@ -76,3 +76,4 @@ abstract class AbstractSessionSettingsSpec(folder: String) extends AbstractSpec
class SessionSettingsSpec extends AbstractSessionSettingsSpec("session-settings")
class SessionSettingsQuickSpec extends AbstractSessionSettingsSpec("session-settings-quick")
*/

View File

@ -9,10 +9,10 @@ package sbt
package internal
package parser
import java.io.File
import sbt.internal.util.LineRange
import xsbti.VirtualFileRef
object SplitExpressions {
type SplitExpression = (File, Seq[String]) => (Seq[(String, Int)], Seq[(String, LineRange)])
}
object SplitExpressions:
type SplitExpression =
(VirtualFileRef, Seq[String]) => (Seq[(String, Int)], Seq[(String, LineRange)])
end SplitExpressions

View File

@ -12,20 +12,21 @@ package parser
import java.io.File
import sbt.internal.util.LineRange
import xsbti.VirtualFileRef
trait SplitExpression {
def split(s: String, file: File = new File("noFile"))(
implicit splitter: SplitExpressions.SplitExpression
def split(s: String, file: VirtualFileRef = VirtualFileRef.of("noFile"))(
splitter: SplitExpressions.SplitExpression
): (Seq[(String, Int)], Seq[(String, LineRange)]) = splitter(file, s.split("\n").toSeq)
}
trait SplitExpressionsBehavior extends SplitExpression { this: verify.BasicTestSuite =>
def newExpressionsSplitter(implicit splitter: SplitExpressions.SplitExpression) = {
def newExpressionsSplitter(splitter: SplitExpressions.SplitExpression) = {
test("parse a two settings without intervening blank line") {
val (imports, settings) = split("""version := "1.0"
scalaVersion := "2.10.4"""")
scalaVersion := "2.10.4"""")(splitter)
assert(imports.isEmpty)
assert(settings.size == 2)
@ -34,7 +35,7 @@ scalaVersion := "2.10.4"""")
test("parse a setting and val without intervening blank line") {
val (imports, settings) =
split("""version := "1.0"
lazy val root = (project in file(".")).enablePlugins­(PlayScala)""")
lazy val root = (project in file(".")).enablePlugins­(PlayScala)""")(splitter)
assert(imports.isEmpty)
assert(settings.size == 2)
@ -46,11 +47,10 @@ lazy val root = (project in file(".")).enablePlugins­(PlayScala)""")
import foo.Bar
version := "1.0"
""".stripMargin
)
)(splitter)
assert(imports.size == 2)
assert(settingsAndDefs.size == 1)
}
}
}

View File

@ -9,6 +9,6 @@ package sbt
package internal
package parser
object SplitExpressionsTest extends verify.BasicTestSuite with SplitExpressionsBehavior {
object SplitExpressionsTest extends verify.BasicTestSuite with SplitExpressionsBehavior:
newExpressionsSplitter(EvaluateConfigurations.splitExpressions)
}
end SplitExpressionsTest

View File

@ -99,7 +99,7 @@ object BasicIO {
in.close()
}
def inheritInput(connect: Boolean) = { p: JProcessBuilder => if (connect) InheritInput(p) else false }
def inheritInput(connect: Boolean) = { (p: JProcessBuilder) => if (connect) InheritInput(p) else false }
}
private[sbt] object ExitCodes {
def ignoreFirst: (Int, Int) => Int = (a, b) => b

View File

@ -0,0 +1,22 @@
/*
* sbt
* Copyright 2011 - 2018, Lightbend, Inc.
* Copyright 2008 - 2010, Mark Harrah
* Licensed under Apache License 2.0 (see LICENSE)
*/
package sbt
enum ClasspathDep[PR <: ProjectReference]:
case ResolvedClasspathDependency(project0: ProjectRef, configuration0: Option[String])
extends ClasspathDep[ProjectRef]
case ClasspathDependency(project0: ProjectReference, configuration0: Option[String])
extends ClasspathDep[ProjectReference]
def project: PR = this match
case dep: ResolvedClasspathDependency => dep.project0
case dep: ClasspathDependency => dep.project0
def configuration: Option[String] = this match
case dep: ResolvedClasspathDependency => dep.configuration0
case dep: ClasspathDependency => dep.configuration0

View File

@ -0,0 +1,12 @@
/*
* sbt
* Copyright 2011 - 2018, Lightbend, Inc.
* Copyright 2008 - 2010, Mark Harrah
* Licensed under Apache License 2.0 (see LICENSE)
*/
package sbt
enum PluginTrigger:
case AllRequirements
case NoTrigger

View File

@ -83,7 +83,8 @@ abstract class AutoPlugin extends Plugins.Basic with PluginsFunctions {
/**
* This AutoPlugin requires the plugins the Plugins matcher returned by this method. See [[trigger]].
*/
def requires: Plugins = plugins.JvmPlugin
def requires: Plugins = ???
// plugins.JvmPlugin
val label: String = getClass.getName.stripSuffix("$")
@ -317,7 +318,7 @@ object Plugins extends PluginsFunctions {
${listConflicts(conflicting)}""")
}
private[sbt] final object Empty extends Plugins {
private[sbt] object Empty extends Plugins {
def &&(o: Basic): Plugins = o
override def toString = "<none>"
}

View File

@ -0,0 +1,236 @@
/*
* sbt
* Copyright 2011 - 2018, Lightbend, Inc.
* Copyright 2008 - 2010, Mark Harrah
* Licensed under Apache License 2.0 (see LICENSE)
*/
package sbt
import java.io.File
import sbt.librarymanagement.Configuration
import sbt.Def.{ Flattened, Initialize, ScopedKey, Setting }
import sbt.internal.util.Dag
import sbt.internal.util.complete.DefaultParsers
sealed trait ProjectDefinition[PR <: ProjectReference] {
/**
* The project ID is used to uniquely identify a project within a build.
* It is used to refer to a project from the command line and in the scope of keys.
*/
def id: String
/** The base directory for the project. */
def base: File
/**
* The configurations for this project. These are groups of related tasks and the main reason
* to list them here is when one configuration extends another. In this case, a setting lookup
* in one configuration will fall back to the configurations it extends configuration if the setting doesn't exist.
*/
def configurations: Seq[Configuration]
/**
* The explicitly defined sequence of settings that configure this project.
* These do not include the automatically appended settings as configured by `auto`.
*/
def settings: Seq[Setting[_]]
/**
* The references to projects that are aggregated by this project.
* When a task is run on this project, it will also be run on aggregated projects.
*/
def aggregate: Seq[PR]
/** The references to projects that are classpath dependencies of this project. */
def dependencies: Seq[ClasspathDep[PR]]
/** The references to projects that are aggregate and classpath dependencies of this project. */
def uses: Seq[PR] = aggregate ++ dependencies.map(_.project)
def referenced: Seq[PR] = uses
/**
* The defined [[Plugins]] associated with this project.
* A [[AutoPlugin]] is a common label that is used by plugins to determine what settings, if any, to add to a project.
*/
def plugins: Plugins
/** Indicates whether the project was created organically, or was generated synthetically. */
def projectOrigin: ProjectOrigin
/** The [[AutoPlugin]]s enabled for this project. This value is only available on a loaded Project. */
private[sbt] def autoPlugins: Seq[AutoPlugin]
override final def hashCode: Int = id.hashCode ^ base.hashCode ^ getClass.hashCode
override final def equals(o: Any) = o match {
case p: ProjectDefinition[_] => p.getClass == this.getClass && p.id == id && p.base == base
case _ => false
}
override def toString = {
val agg = ifNonEmpty("aggregate", aggregate)
val dep = ifNonEmpty("dependencies", dependencies)
val conf = ifNonEmpty("configurations", configurations)
val autos = ifNonEmpty("autoPlugins", autoPlugins.map(_.label))
val fields =
s"id $id" :: s"base: $base" :: agg ::: dep ::: conf ::: (s"plugins: List($plugins)" :: autos)
s"Project(${fields.mkString(", ")})"
}
private[this] def ifNonEmpty[T](label: String, ts: Iterable[T]): List[String] =
if (ts.isEmpty) Nil else s"$label: $ts" :: Nil
}
trait CompositeProject:
def componentProjects: Seq[Project]
end CompositeProject
private[sbt] object CompositeProject {
/**
* Expand user defined projects with the component projects of `compositeProjects`.
*
* If two projects with the same id appear in the user defined projects and
* in `compositeProjects.componentProjects`, the user defined project wins.
* This is necessary for backward compatibility with the idioms:
* {{{
* lazy val foo = crossProject
* lazy val fooJS = foo.js.settings(...)
* lazy val fooJVM = foo.jvm.settings(...)
* }}}
* and the rarer:
* {{{
* lazy val fooJS = foo.js.settings(...)
* lazy val foo = crossProject
* lazy val fooJVM = foo.jvm.settings(...)
* }}}
*/
def expand(compositeProjects: Seq[CompositeProject]): Seq[Project] = {
val userProjects = compositeProjects.collect { case p: Project => p }
for (p <- compositeProjects.flatMap(_.componentProjects)) yield {
userProjects.find(_.id == p.id) match {
case Some(userProject) => userProject
case None => p
}
}
}.distinct
}
sealed trait Project extends ProjectDefinition[ProjectReference] with CompositeProject:
override def componentProjects: Seq[Project] = this :: Nil
/** Adds new configurations directly to this project. To override an existing configuration, use `overrideConfigs`. */
def configs(cs: Configuration*): Project = copy(configurations = configurations ++ cs)
/** Adds classpath dependencies on internal or external projects. */
def dependsOn(deps: ClasspathDep[ProjectReference]*): Project =
copy(dependencies = dependencies ++ deps)
/**
* Adds projects to be aggregated. When a user requests a task to run on this project from the command line,
* the task will also be run in aggregated projects.
*/
def aggregate(refs: ProjectReference*): Project =
copy(aggregate = (aggregate: Seq[ProjectReference]) ++ refs)
/** Appends settings to the current settings sequence for this project. */
def settings(ss: Def.SettingsDefinition*): Project =
copy(settings = (settings: Seq[Def.Setting[_]]) ++ Def.settings(ss: _*))
/**
* Sets the [[AutoPlugin]]s of this project.
* A [[AutoPlugin]] is a common label that is used by plugins to determine what settings, if any, to enable on a project.
*/
def enablePlugins(ns: Plugins*): Project =
setPlugins(ns.foldLeft(plugins)(Plugins.and))
/** Disable the given plugins on this project. */
def disablePlugins(ps: AutoPlugin*): Project =
setPlugins(Plugins.and(plugins, Plugins.And(ps.map(p => Plugins.Exclude(p)).toList)))
private[sbt] def setPlugins(ns: Plugins): Project = copy(plugins = ns)
/** Definitively set the [[AutoPlugin]]s for this project. */
private[sbt] def setAutoPlugins(autos: Seq[AutoPlugin]): Project = copy(autoPlugins = autos)
/** Definitively set the [[ProjectOrigin]] for this project. */
private[sbt] def setProjectOrigin(origin: ProjectOrigin): Project = copy(projectOrigin = origin)
private[sbt] def copy(
id: String = id,
base: File = base,
aggregate: Seq[ProjectReference] = aggregate,
dependencies: Seq[ClasspathDep[ProjectReference]] = dependencies,
settings: Seq[Setting[_]] = settings,
configurations: Seq[Configuration] = configurations,
plugins: Plugins = plugins,
autoPlugins: Seq[AutoPlugin] = autoPlugins,
projectOrigin: ProjectOrigin = projectOrigin,
): Project =
Project.unresolved(
id,
base,
aggregate = aggregate,
dependencies = dependencies,
settings = settings,
configurations,
plugins,
autoPlugins,
projectOrigin
)
end Project
object Project:
private abstract class ProjectDef[PR <: ProjectReference](
val id: String,
val base: File,
val aggregate: Seq[PR],
val dependencies: Seq[ClasspathDep[PR]],
val settings: Seq[Def.Setting[_]],
val configurations: Seq[Configuration],
val plugins: Plugins,
val autoPlugins: Seq[AutoPlugin],
val projectOrigin: ProjectOrigin
) extends ProjectDefinition[PR] {
// checks for cyclic references here instead of having to do it in Scope.delegates
Dag.topologicalSort(configurations)(_.extendsConfigs)
}
private def unresolved(
id: String,
base: File,
aggregate: Seq[ProjectReference],
dependencies: Seq[ClasspathDep[ProjectReference]],
settings: Seq[Def.Setting[_]],
configurations: Seq[Configuration],
plugins: Plugins,
autoPlugins: Seq[AutoPlugin],
origin: ProjectOrigin
): Project = {
validProjectID(id).foreach(errMsg => sys.error("Invalid project ID: " + errMsg))
new ProjectDef[ProjectReference](
id,
base,
aggregate,
dependencies,
settings,
configurations,
plugins,
autoPlugins,
origin
) with Project
}
/** Returns None if `id` is a valid Project ID or Some containing the parser error message if it is not. */
def validProjectID(id: String): Option[String] =
DefaultParsers.parse(id, DefaultParsers.ID).left.toOption
end Project
sealed trait ResolvedProject extends ProjectDefinition[ProjectRef] {
/** The [[AutoPlugin]]s enabled for this project as computed from [[plugins]]. */
def autoPlugins: Seq[AutoPlugin]
}

View File

@ -0,0 +1,11 @@
package sbt
/**
* Indicate whether the project was created organically, synthesized by a plugin,
* or is a "generic root" project supplied by sbt when a project doesn't exist for `file(".")`.
*/
enum ProjectOrigin:
case Organic
case ExtraProject
case DerivedProject
case GenericRoot

View File

@ -10,12 +10,38 @@ package sbt
import java.io.File
import java.net.URI
import sbt.internal.util.AttributeKey
import sbt.io.IO
import sbt.librarymanagement.Configuration
import sbt.SlashSyntax.{ RichConfiguration, RichScope }
import scala.annotation.nowarn
// in all of these, the URI must be resolved and normalized before it is definitive
/** Identifies a project or build. */
sealed trait Reference
sealed trait Reference:
private[sbt] def asScopeAxis: ScopeAxis[this.type] =
Select(this)
private[sbt] def asScope: Scope =
Scope(asScopeAxis, This, This, This)
@nowarn
def /(c: ConfigKey): RichConfiguration = RichConfiguration(asScope in c)
@nowarn
def /(c: Configuration): RichConfiguration = RichConfiguration(asScope in c)
// This is for handling `Zero / Zero / name`.
@nowarn
def /(configAxis: ScopeAxis[ConfigKey]): RichConfiguration =
new RichConfiguration(asScope.copy(config = configAxis))
@nowarn
final def /[K](key: Scoped.ScopingSetting[K]): K = key.in(asScope)
@nowarn
final def /(key: AttributeKey[_]): RichScope = new RichScope(asScope in key)
end Reference
/** A fully resolved, unique identifier for a project or build. */
sealed trait ResolvedReference extends Reference
@ -24,7 +50,7 @@ sealed trait ResolvedReference extends Reference
sealed trait BuildReference extends Reference
/** Identifies the build for the current context. */
final case object ThisBuild extends BuildReference
case object ThisBuild extends BuildReference
/** Uniquely identifies a build by a URI. */
final case class BuildRef(build: URI) extends BuildReference with ResolvedReference
@ -44,10 +70,10 @@ final case class LocalProject(project: String) extends ProjectReference
final case class RootProject(build: URI) extends ProjectReference
/** Identifies the root project in the current build context. */
final case object LocalRootProject extends ProjectReference
case object LocalRootProject extends ProjectReference
/** Identifies the project for the current context. */
final case object ThisProject extends ProjectReference
case object ThisProject extends ProjectReference
object ProjectRef {
def apply(base: File, id: String): ProjectRef = ProjectRef(IO toURI base, id)

View File

@ -43,6 +43,7 @@ case object Zero extends ScopeAxis[Nothing]
final case class Select[S](s: S) extends ScopeAxis[S] {
override def isSelect = true
}
object ScopeAxis {
def fromOption[T](o: Option[T]): ScopeAxis[T] = o match {
case Some(v) => Select(v)

View File

@ -36,8 +36,9 @@ trait SlashSyntax {
// implicit def sbtSlashSyntaxRichReference(r: Reference): RichReference = Select(r)
given sbtSlashSyntaxRichReference: Conversion[Reference, RichReference] =
(r: Reference) => Select(r)
// Implement in Reference directly
// given sbtSlashSyntaxRichReference: Conversion[Reference, RichReference] =
// (r: Reference) => Select(r)
given sbtSlashSyntaxRichProject[A](using Conversion[A, Reference]): Conversion[A, RichReference] =
(a: A) => Select(a: Reference)

View File

@ -221,9 +221,8 @@ object RemoteCache {
remoteCacheId := {
val inputs = (unmanagedSources / inputFileStamps).value
val cp = (externalDependencyClasspath / outputFileStamps).?.value.getOrElse(Nil)
val extraInc = (extraIncOptions.value) flatMap {
case (k, v) =>
Vector(k, v)
val extraInc = (extraIncOptions.value) flatMap { case (k, v) =>
Vector(k, v)
}
combineHash(extractHash(inputs) ++ extractHash(cp) ++ extraInc)
},
@ -262,54 +261,51 @@ object RemoteCache {
val artifacts = (pushRemoteCacheConfiguration / remoteCacheArtifacts).value
val nonPom = artifacts.filterNot(isPomArtifact).toVector
val copyResources = getResourceFilePaths.value
m.withModule(log) {
case (ivy, md, _) =>
val resolver = ivy.getSettings.getResolver(r.name)
if (resolver eq null) sys.error(s"undefined resolver '${r.name}'")
val cross = CrossVersion(p, smi)
val crossf: String => String = cross.getOrElse(identity _)
var found = false
ids foreach {
id: String =>
val v = toVersion(id)
val modId = p.withRevision(v).withName(crossf(p.name))
val ivyId = IvySbt.toID(modId)
if (found) ()
else {
val rawa = nonPom map { _.artifact }
val seqa = CrossVersion.substituteCross(rawa, cross)
val as = seqa map { a =>
val extra = a.classifier match {
case Some(c) => Map("e:classifier" -> c)
case None => Map.empty
}
new DefaultArtifact(ivyId, null, a.name, a.`type`, a.extension, extra.asJava)
}
pullFromMavenRepo0(as, resolver, log) match {
case Right(xs0) =>
val jars = xs0.distinct
nonPom.foreach { art =>
val classifier = art.artifact.classifier
findJar(classifier, v, jars) match {
case Some(jar) =>
extractJar(art, jar, copyResources)
log.info(s"remote cache artifact extracted for $p $classifier")
case None =>
log.info(s"remote cache artifact not found for $p $classifier")
}
}
found = true
case Left(e) =>
val classifier = seqa.map(_.classifier).mkString(" ")
log.info(s"remote cache artifact not found for $p $classifier")
log.debug(e.getMessage)
}
m.withModule(log) { case (ivy, md, _) =>
val resolver = ivy.getSettings.getResolver(r.name)
if (resolver eq null) sys.error(s"undefined resolver '${r.name}'")
val cross = CrossVersion(p, smi)
val crossf: String => String = cross.getOrElse(identity _)
var found = false
ids foreach { (id: String) =>
val v = toVersion(id)
val modId = p.withRevision(v).withName(crossf(p.name))
val ivyId = IvySbt.toID(modId)
if (found) ()
else {
val rawa = nonPom map { _.artifact }
val seqa = CrossVersion.substituteCross(rawa, cross)
val as = seqa map { a =>
val extra = a.classifier match {
case Some(c) => Map("e:classifier" -> c)
case None => Map.empty
}
new DefaultArtifact(ivyId, null, a.name, a.`type`, a.extension, extra.asJava)
}
pullFromMavenRepo0(as, resolver, log) match {
case Right(xs0) =>
val jars = xs0.distinct
nonPom.foreach { art =>
val classifier = art.artifact.classifier
findJar(classifier, v, jars) match {
case Some(jar) =>
extractJar(art, jar, copyResources)
log.info(s"remote cache artifact extracted for $p $classifier")
case None =>
log.info(s"remote cache artifact not found for $p $classifier")
}
}
found = true
case Left(e) =>
log.info(s"remote cache not found for ${v}")
log.debug(e.getMessage)
}
}
()
}
()
}
},
)
@ -439,10 +435,10 @@ object RemoteCache {
}
private def extractTestResult(output: File, testResult: File): Unit = {
//val expandedTestResult = output / "META-INF" / "succeeded_tests"
//if (expandedTestResult.exists) {
// val expandedTestResult = output / "META-INF" / "succeeded_tests"
// if (expandedTestResult.exists) {
// IO.move(expandedTestResult, testResult)
//}
// }
}
private def defaultArtifactTasks: Seq[TaskKey[File]] =
@ -453,13 +449,13 @@ object RemoteCache {
pkgTasks: Seq[TaskKey[File]]
): Def.Initialize[Seq[A]] =
(Classpaths.forallIn(key, pkgTasks) zipWith
Classpaths.forallIn(pushRemoteCacheArtifact, pkgTasks))(_ zip _ collect {
case (a, true) => a
Classpaths.forallIn(pushRemoteCacheArtifact, pkgTasks))(_ zip _ collect { case (a, true) =>
a
})
private def extractHash(inputs: Seq[(Path, FileStamp)]): Vector[String] =
inputs.toVector map {
case (_, stamp0) => toOption(stamp0.stamp.getHash).getOrElse("cafe")
inputs.toVector map { case (_, stamp0) =>
toOption(stamp0.stamp.getHash).getOrElse("cafe")
}
private def combineHash(vs: Vector[String]): String = {

View File

@ -62,7 +62,7 @@ object ScriptedPlugin extends AutoPlugin {
override lazy val projectSettings: Seq[Setting[_]] = Seq(
ivyConfigurations ++= Seq(ScriptedConf, ScriptedLaunchConf),
scriptedSbt := (pluginCrossBuild / sbtVersion).value,
sbtLauncher := getJars(ScriptedLaunchConf).map(_.get.head).value,
sbtLauncher := getJars(ScriptedLaunchConf).map(_.get().head).value,
sbtTestDirectory := sourceDirectory.value / "sbt-test",
libraryDependencies ++= (CrossVersion.partialVersion(scriptedSbt.value) match {
case Some((0, 13)) =>
@ -103,7 +103,7 @@ object ScriptedPlugin extends AutoPlugin {
private[sbt] def scriptedTestsTask: Initialize[Task[AnyRef]] =
Def.task {
val cp = scriptedClasspath.value.get.map(_.toPath)
val cp = scriptedClasspath.value.get().map(_.toPath)
val loader = ClasspathUtil.toLoader(cp, scalaInstance.value.loader)
try {
ModuleUtilities.getObject("sbt.scriptedtest.ScriptedTests", loader)
@ -124,7 +124,7 @@ object ScriptedPlugin extends AutoPlugin {
val scriptedFiles: NameFilter =
("test": NameFilter) | "test.script" | "pending" | "pending.script"
val pairs = (scriptedBase * AllPassFilter * AllPassFilter * scriptedFiles).get map {
val pairs = (scriptedBase * AllPassFilter * AllPassFilter * scriptedFiles).get() map {
(f: File) =>
val p = f.getParentFile
(p.getParentFile.getName, p.getName)

View File

@ -70,7 +70,6 @@ import scala.util.control.NonFatal
* For now Continuous extends DeprecatedContinuous to minimize the number of deprecation warnings
* produced by this file. In sbt 2.0, the DeprecatedContinuous mixin should be eliminated and
* the deprecated apis should no longer be supported.
*
*/
private[sbt] object Continuous extends DeprecatedContinuous {
private type Event = FileEvent[FileAttributes]
@ -316,8 +315,8 @@ private[sbt] object Continuous extends DeprecatedContinuous {
isCommand: Boolean,
commands: Seq[String],
fileStampCache: FileStamp.Cache
)(
implicit extracted: Extracted
)(implicit
extracted: Extracted
): Callbacks = {
val project = extracted.currentRef
val beforeCommand = () => configs.foreach(_.watchSettings.beforeCommand())
@ -356,10 +355,9 @@ private[sbt] object Continuous extends DeprecatedContinuous {
): (Watch.Action, String, Int, State) => State = {
configs.flatMap(_.watchSettings.onTermination).distinct match {
case Seq(head, tail @ _*) =>
tail.foldLeft(head) {
case (onTermination, configOnTermination) =>
(action, cmd, count, state) =>
configOnTermination(action, cmd, count, onTermination(action, cmd, count, state))
tail.foldLeft(head) { case (onTermination, configOnTermination) =>
(action, cmd, count, state) =>
configOnTermination(action, cmd, count, onTermination(action, cmd, count, state))
}
case _ =>
if (isCommand) Watch.defaultCommandOnTermination else Watch.defaultTaskOnTermination
@ -602,9 +600,8 @@ private[sbt] object Continuous extends DeprecatedContinuous {
else Update(event)
)
}
acceptedConfigParameters.flatMap {
case (_, _, callback) =>
watchEvent.map(e => e -> callback(count, e))
acceptedConfigParameters.flatMap { case (_, _, callback) =>
watchEvent.map(e => e -> callback(count, e))
}
} else Nil
}
@ -626,39 +623,41 @@ private[sbt] object Continuous extends DeprecatedContinuous {
}
}
((count: Int) => {
val interrupted = new AtomicBoolean(false)
def getEvent: Option[(Watch.Event, Watch.Action)] = {
val events =
try antiEntropyMonitor.poll(Duration.Inf)
catch { case _: InterruptedException => interrupted.set(true); Nil }
val actions = events.flatMap(onEvent(count, _))
if (actions.exists(_._2 != Watch.Ignore)) {
val builder = new StringBuilder
val min = actions.minBy {
case (e, a) =>
(
(count: Int) => {
val interrupted = new AtomicBoolean(false)
def getEvent: Option[(Watch.Event, Watch.Action)] = {
val events =
try antiEntropyMonitor.poll(Duration.Inf)
catch { case _: InterruptedException => interrupted.set(true); Nil }
val actions = events.flatMap(onEvent(count, _))
if (actions.exists(_._2 != Watch.Ignore)) {
val builder = new StringBuilder
val min = actions.minBy { case (e, a) =>
if (builder.nonEmpty) builder.append(", ")
val path = e.path
builder.append(path)
builder.append(" -> ")
builder.append(a.toString)
a
}
logger.debug(s"Received file event actions: $builder. Returning: $min")
if (min._2 == Watch.Trigger) onTrigger(count, min._1)
if (min._2 == Watch.ShowOptions) None else Some(min)
} else None
}
}
logger.debug(s"Received file event actions: $builder. Returning: $min")
if (min._2 == Watch.Trigger) onTrigger(count, min._1)
if (min._2 == Watch.ShowOptions) None else Some(min)
} else None
}
@tailrec def impl(): Option[(Watch.Event, Watch.Action)] = getEvent match {
case None =>
if (interrupted.get || Thread.interrupted) None
else impl()
case r => r
}
@tailrec def impl(): Option[(Watch.Event, Watch.Action)] = getEvent match {
case None =>
if (interrupted.get || Thread.interrupted) None
else impl()
case r => r
}
impl()
}, () => monitor.close())
impl()
},
() => monitor.close()
)
}
private[this] class WatchExecutor(name: String) extends AutoCloseable {
@ -718,10 +717,12 @@ private[sbt] object Continuous extends DeprecatedContinuous {
thread.joinFor(1.second)
}
def result: Try[R] =
try queue.take match {
case Right(r) => Success(r)
case Left(_) => Failure(new NullPointerException)
} catch { case t: InterruptedException => Failure(t) }
try
queue.take match {
case Right(r) => Success(r)
case Left(_) => Failure(new NullPointerException)
}
catch { case t: InterruptedException => Failure(t) }
}
}
@ -773,13 +774,12 @@ private[sbt] object Continuous extends DeprecatedContinuous {
val default: String => Watch.Action =
string => parse(inputStream(string), systemInBuilder, fullParser)
val alt = alternative
.map {
case (key, handler) =>
val is = extracted.runTask(key, state)._2
() => handler(is)
.map { case (key, handler) =>
val is = extracted.runTask(key, state)._2
() => handler(is)
}
.getOrElse(() => Watch.Ignore)
string: String =>
(string: String) =>
((if (string.nonEmpty) default(string) else Watch.Ignore) :: alt() :: Nil).min
}
executor => {
@ -923,8 +923,8 @@ private[sbt] object Continuous extends DeprecatedContinuous {
* @param key the [[ScopedKey]] instance that sets the [[Scope]] for the settings we're extracting
* @param extracted the [[Extracted]] instance for the build
*/
private final class WatchSettings private[Continuous] (val key: ScopedKey[_])(
implicit extracted: Extracted
private final class WatchSettings private[Continuous] (val key: ScopedKey[_])(implicit
extracted: Extracted
) {
val antiEntropy: FiniteDuration =
key.get(watchAntiEntropy).getOrElse(Watch.defaultAntiEntropy)
@ -1015,10 +1015,15 @@ private[sbt] object Continuous extends DeprecatedContinuous {
extra = scope.extra.toOption.isDefined
)
Scope
.displayMasked(scope, " ", (_: Reference) match {
case p: ProjectRef => s"${p.project.trim} /"
case _ => "Global /"
}, mask)
.displayMasked(
scope,
" ",
(_: Reference) match {
case p: ProjectRef => s"${p.project.trim} /"
case _ => "Global /"
},
mask
)
.dropRight(3) // delete trailing "/"
.trim
}

View File

@ -20,7 +20,7 @@ private[sbt] case class GraphModuleId(organization: String, name: String, versio
private[sbt] object GraphModuleId {
import sjsonnew.BasicJsonProtocol.StringJsonFormat
implicit val graphModuleIdIso = LList.iso[GraphModuleId, String :*: String :*: String :*: LNil](
{ m: GraphModuleId =>
{ (m: GraphModuleId) =>
("organization", m.organization) :*: ("name", m.name) :*: ("version", m.version) :*: LNil
},
{ case (_, organization) :*: (_, name) :*: (_, version) :*: LNil =>
@ -44,10 +44,14 @@ private[sbt] case class Module(
private[sbt] object Module {
import sjsonnew.BasicJsonProtocol._
implicit val moduleIso = LList.iso[Module, GraphModuleId :*: Option[String] :*: String :*: Option[
String
] :*: Option[File] :*: Option[String] :*: LNil](
{ m: Module =>
implicit val moduleIso = LList.iso[
Module,
GraphModuleId :*: Option[String] :*: String :*:
Option[
String
] :*: Option[File] :*: Option[String] :*: LNil
](
{ (m: Module) =>
("id", m.id) :*: ("license", m.license) :*: ("extraInfo", m.extraInfo) :*:
("evictedByVersion", m.evictedByVersion) :*: (
"jarFile",
@ -99,7 +103,7 @@ private[sbt] object ModuleGraph {
import BasicJsonProtocol._
implicit val moduleGraphIso = LList.iso[ModuleGraph, Vector[Module] :*: Vector[Edge] :*: LNil](
{ g: ModuleGraph =>
{ (g: ModuleGraph) =>
("nodes", g.nodes.toVector) :*: ("edges", g.edges.toVector) :*: LNil
},
{ case (_, nodes: Vector[Module]) :*: (_, edges: Vector[Edge]) :*: LNil =>

View File

@ -86,8 +86,8 @@ private[sbt] object Settings {
val taskKey = TaskKey(sk.key) in sk.scope
// We create a previous reference so that clean automatically works without the
// user having to explicitly call previous anywhere.
val init = Previous.runtime(taskKey).zip(taskKey) {
case (_, t) => t.map(implicitly[ToSeqPath[T]].apply)
val init = Previous.runtime(taskKey).zip(taskKey) { case (_, t) =>
t.map(implicitly[ToSeqPath[T]].apply)
}
val key = Def.ScopedKey(taskKey.scope in taskKey.key, Keys.dynamicFileOutputs.key)
addTaskDefinition(Def.setting[Task[Seq[Path]]](key, init, setting.pos)) ::
@ -223,14 +223,13 @@ private[sbt] object Settings {
val seen = ConcurrentHashMap.newKeySet[Path]
val prevMap = new ConcurrentHashMap[Path, FileStamp]()
previous.foreach { case (k, v) => prevMap.put(k, v); () }
current.foreach {
case (path, currentStamp) =>
if (seen.add(path)) {
prevMap.remove(path) match {
case null => createdBuilder += path
case old => (if (old != currentStamp) modifiedBuilder else unmodifiedBuilder) += path
}
current.foreach { case (path, currentStamp) =>
if (seen.add(path)) {
prevMap.remove(path) match {
case null => createdBuilder += path
case old => (if (old != currentStamp) modifiedBuilder else unmodifiedBuilder) += path
}
}
}
prevMap.forEach((p, _) => deletedBuilder += p)
val unmodified = unmodifiedBuilder.result()

View File

@ -74,7 +74,8 @@ object SemanticdbPlugin extends AutoPlugin {
if (enabled)
Def.setting {
semanticdbOptions.?.all(ancestorConfigs(config)).value.flatten.flatten
} else Def.setting { Nil }
}
else Def.setting { Nil }
}.value,
scalacOptions ++= {
if (semanticdbEnabled.value)

View File

@ -12,9 +12,12 @@ import sbt.librarymanagement.Configuration
private[sbt] trait BuildSyntax {
import scala.language.experimental.macros
def settingKey[T](description: String): SettingKey[T] = macro std.KeyMacro.settingKeyImpl[T]
def taskKey[T](description: String): TaskKey[T] = macro std.KeyMacro.taskKeyImpl[T]
def inputKey[T](description: String): InputKey[T] = macro std.KeyMacro.inputKeyImpl[T]
def settingKey[A](description: String): SettingKey[A] = ???
// macro std.KeyMacro.settingKeyImpl[T]
def taskKey[A](description: String): TaskKey[A] = ???
// macro std.KeyMacro.taskKeyImpl[T]
def inputKey[A](description: String): InputKey[A] = ???
// macro std.KeyMacro.inputKeyImpl[T]
def enablePlugins(ps: AutoPlugin*): DslEntry = DslEntry.DslEnablePlugins(ps)
def disablePlugins(ps: AutoPlugin*): DslEntry = DslEntry.DslDisablePlugins(ps)

View File

@ -117,8 +117,8 @@ object Cross {
)(command: String): (Seq[ProjectRef], String) = {
import extracted._
import DefaultParsers._
val parser = (OpOrID <~ charClass(_ == '/', "/")) ~ any.* map {
case seg1 ~ cmd => (seg1, cmd.mkString)
val parser = (OpOrID <~ charClass(_ == '/', "/")) ~ any.* map { case seg1 ~ cmd =>
(seg1, cmd.mkString)
}
Parser.parse(command, parser) match {
case Right((seg1, cmd)) =>
@ -157,8 +157,8 @@ object Cross {
"that are configured."
)
state.log.debug("Scala versions configuration is:")
projCrossVersions.foreach {
case (project, versions) => state.log.debug(s"$project: $versions")
projCrossVersions.foreach { case (project, versions) =>
state.log.debug(s"$project: $versions")
}
}
@ -180,41 +180,40 @@ object Cross {
.groupBy(_._1)
.mapValues(_.map(_._2).toSet)
val commandsByVersion = keysByVersion.toSeq
.flatMap {
case (v, keys) =>
val projects = keys.flatMap(project)
keys.toSeq.flatMap { k =>
project(k).filter(projects.contains).flatMap { p =>
if (p == extracted.currentRef || !projects.contains(extracted.currentRef)) {
val parts = project(k).map(_.project) ++ k.scope.config.toOption.map {
case ConfigKey(n) => n.head.toUpper + n.tail
.flatMap { case (v, keys) =>
val projects = keys.flatMap(project)
keys.toSeq.flatMap { k =>
project(k).filter(projects.contains).flatMap { p =>
if (p == extracted.currentRef || !projects.contains(extracted.currentRef)) {
val parts =
project(k).map(_.project) ++ k.scope.config.toOption.map { case ConfigKey(n) =>
n.head.toUpper + n.tail
} ++ k.scope.task.toOption.map(_.label) ++ Some(k.key.label)
Some(v -> parts.mkString("", "/", fullArgs))
} else None
}
Some(v -> parts.mkString("", "/", fullArgs))
} else None
}
}
}
.groupBy(_._1)
.mapValues(_.map(_._2))
.toSeq
.sortBy(_._1)
commandsByVersion.flatMap {
case (v, commands) =>
commands match {
case Seq(c) => Seq(s"$SwitchCommand $verbose $v $c")
case Seq() => Nil // should be unreachable
case multi if fullArgs.isEmpty =>
Seq(s"$SwitchCommand $verbose $v all ${multi.mkString(" ")}")
case multi => Seq(s"$SwitchCommand $verbose $v") ++ multi
}
commandsByVersion.flatMap { case (v, commands) =>
commands match {
case Seq(c) => Seq(s"$SwitchCommand $verbose $v $c")
case Seq() => Nil // should be unreachable
case multi if fullArgs.isEmpty =>
Seq(s"$SwitchCommand $verbose $v all ${multi.mkString(" ")}")
case multi => Seq(s"$SwitchCommand $verbose $v") ++ multi
}
}
}
allCommands.toList ::: CrossRestoreSessionCommand :: captureCurrentSession(state, extracted)
}
def crossRestoreSession: Command =
Command.arb(_ => crossRestoreSessionParser, crossRestoreSessionHelp)(
(s, _) => crossRestoreSessionImpl(s)
Command.arb(_ => crossRestoreSessionParser, crossRestoreSessionHelp)((s, _) =>
crossRestoreSessionImpl(s)
)
private def crossRestoreSessionImpl(state: State): State = {
@ -288,9 +287,8 @@ object Cross {
excluded: Seq[(ResolvedReference, Seq[ScalaVersion])]
) = {
instance.foreach {
case (home, instance) =>
state.log.info(s"Using Scala home $home with actual version ${instance.actualVersion}")
instance.foreach { case (home, instance) =>
state.log.info(s"Using Scala home $home with actual version ${instance.actualVersion}")
}
if (switch.version.force) {
state.log.info(s"Forcing Scala version to $version on all projects.")

File diff suppressed because it is too large Load Diff

View File

@ -269,13 +269,13 @@ object EvaluateTask {
val progress = tp.progress
override def initial(): Unit = progress.initial()
override def afterRegistered(
task: Task[_],
allDeps: Iterable[Task[_]],
pendingDeps: Iterable[Task[_]]
task: Task[Any],
allDeps: Iterable[Task[Any]],
pendingDeps: Iterable[Task[Any]]
): Unit =
progress.afterRegistered(task, allDeps, pendingDeps)
override def afterReady(task: Task[_]): Unit = progress.afterReady(task)
override def beforeWork(task: Task[_]): Unit = progress.beforeWork(task)
override def afterReady(task: Task[Any]): Unit = progress.afterReady(task)
override def beforeWork(task: Task[Any]): Unit = progress.beforeWork(task)
override def afterWork[A](task: Task[A], result: Either[Task[A], Result[A]]): Unit =
progress.afterWork(task, result)
override def afterCompleted[A](task: Task[A], result: Result[A]): Unit =
@ -379,7 +379,7 @@ object EvaluateTask {
): Option[(State, Result[T])] = {
withStreams(structure, state) { str =>
for ((task, toNode) <- getTask(structure, taskKey, state, str, ref))
yield runTask(task, state, str, structure.index.triggers, config)(toNode)
yield runTask(task, state, str, structure.index.triggers, config)(using toNode)
}
}
@ -442,7 +442,7 @@ object EvaluateTask {
for (t <- structure.data.get(resolvedScope, taskKey.key))
yield (t, nodeView(state, streams, taskKey :: Nil))
}
def nodeView[HL <: HList](
def nodeView(
state: State,
streams: Streams,
roots: Seq[ScopedKey[_]],
@ -470,7 +470,7 @@ object EvaluateTask {
streams: Streams,
triggers: Triggers[Task],
config: EvaluateTaskConfig
)(implicit taskToNode: NodeView[Task]): (State, Result[T]) = {
)(using taskToNode: NodeView[Task]): (State, Result[T]) = {
import ConcurrentRestrictions.{ cancellableCompletionService, tagged, tagsKey }
val log = state.log
@ -480,9 +480,9 @@ object EvaluateTask {
def tagMap(t: Task[_]): Tags.TagMap =
t.info.get(tagsKey).getOrElse(Map.empty)
val tags =
tagged[Task[_]](tagMap, Tags.predicate(config.restrictions))
tagged[Task[Any]](tagMap, Tags.predicate(config.restrictions))
val (service, shutdownThreads) =
cancellableCompletionService[Task[_], Completed](
cancellableCompletionService[Task[Any], Completed](
tags,
(s: String) => log.warn(s),
(t: Task[_]) => tagMap(t).contains(Tags.Sentinel)
@ -509,14 +509,16 @@ object EvaluateTask {
Execute.config(config.checkCycles, overwriteNode),
triggers,
config.progressReporter
)(taskToNode)
)
val (newState, result) =
try {
val results = x.runKeep(root)(service)
given strategy: x.Strategy = service
val results = x.runKeep(root)
storeValuesForPrevious(results, state, streams)
applyResults(results, state, root)
} catch { case inc: Incomplete => (state, Inc(inc)) }
finally shutdown()
} catch {
case inc: Incomplete => (state, Result.Inc(inc))
} finally shutdown()
val replaced = transformInc(result)
logIncResult(replaced, state, streams)
(newState, replaced)
@ -560,9 +562,9 @@ object EvaluateTask {
def stateTransform(results: RMap[Task, Result]): State => State =
Function.chain(
results.toTypedSeq flatMap {
case results.TPair(_, Value(KeyValue(_, st: StateTransform))) => Some(st.transform)
case results.TPair(Task(info, _), Value(v)) => info.post(v) get transformState
case _ => Nil
case results.TPair(_, Result.Value(KeyValue(_, st: StateTransform))) => Some(st.transform)
case results.TPair(Task(info, _), Result.Value(v)) => info.post(v) get transformState
case _ => Nil
}
)

View File

@ -89,7 +89,7 @@ final case class Extracted(
EvaluateTask.withStreams(structure, state) { str =>
val nv = EvaluateTask.nodeView(state, str, rkey.scopedKey :: Nil)
val (newS, result) =
EvaluateTask.runTask(task, state, str, structure.index.triggers, config)(nv)
EvaluateTask.runTask(task, state, str, structure.index.triggers, config)(using nv)
(newS, EvaluateTask.processResult2(result))
}
}
@ -100,7 +100,7 @@ final case class Extracted(
* The project axis is what determines where aggregation starts, so ensure this is set to what you want.
* Other axes are resolved to `Zero` if unspecified.
*/
def runAggregated[T](key: TaskKey[T], state: State): State = {
def runAggregated[A1](key: TaskKey[A1], state: State): State =
val rkey = resolve(key)
val keys = Aggregation.aggregate(rkey, ScopeMask(), structure.extra)
val tasks = Act.keyValues(structure)(keys)
@ -109,8 +109,7 @@ final case class Extracted(
tasks,
DummyTaskMap(Nil),
show = Aggregation.defaultShow(state, false),
)(showKey)
}
)
@nowarn
private[this] def resolve[K <: Scoped.ScopingSetting[K] with Scoped](key: K): K =

View File

@ -338,7 +338,7 @@ object BuiltinCommands {
eval,
last,
lastGrep,
export,
exportCommand,
boot,
initialize,
act,
@ -586,29 +586,28 @@ object BuiltinCommands {
Project.setProject(newSession, newStructure, s)
}
def set: Command = Command(SetCommand, setBrief, setDetailed)(setParser) {
case (s, (all, arg)) =>
val extracted = Project extract s
import extracted._
val dslVals = extracted.currentUnit.unit.definitions.dslDefinitions
// TODO - This is possibly inefficient (or stupid). We should try to only attach the
// classloader + imports NEEDED to compile the set command, rather than
// just ALL of them.
val ims = (imports(extracted) ++ dslVals.imports.map(i => (i, -1)))
val cl = dslVals.classloader(currentLoader)
val settings = EvaluateConfigurations.evaluateSetting(
session.currentEval(),
"<set>",
ims,
arg,
LineRange(0, 0)
)(cl)
val setResult =
if (all) SettingCompletions.setAll(extracted, settings)
else SettingCompletions.setThis(extracted, settings, arg)
s.log.info(setResult.quietSummary)
s.log.debug(setResult.verboseSummary)
reapply(setResult.session, structure, s)
def set: Command = Command(SetCommand, setBrief, setDetailed)(setParser) { case (s, (all, arg)) =>
val extracted = Project extract s
import extracted._
val dslVals = extracted.currentUnit.unit.definitions.dslDefinitions
// TODO - This is possibly inefficient (or stupid). We should try to only attach the
// classloader + imports NEEDED to compile the set command, rather than
// just ALL of them.
val ims = (imports(extracted) ++ dslVals.imports.map(i => (i, -1)))
val cl = dslVals.classloader(currentLoader)
val settings = EvaluateConfigurations.evaluateSetting(
session.currentEval(),
"<set>",
ims,
arg,
LineRange(0, 0)
)(cl)
val setResult =
if (all) SettingCompletions.setAll(extracted, settings)
else SettingCompletions.setThis(extracted, settings, arg)
s.log.info(setResult.quietSummary)
s.log.debug(setResult.verboseSummary)
reapply(setResult.session, structure, s)
}
@deprecated("Use variant that doesn't take a State", "1.1.1")
@ -693,18 +692,20 @@ object BuiltinCommands {
for {
lastOnly_keys <- keysParser
kvs = Act.keyValues(structure)(lastOnly_keys._2)
f <- if (lastOnly_keys._1) success(() => s)
else Aggregation.evaluatingParser(s, show)(kvs)
f <-
if (lastOnly_keys._1) success(() => s)
else Aggregation.evaluatingParser(s, show)(kvs)
} yield () => {
def export0(s: State): State = lastImpl(s, kvs, Some(ExportStream))
val newS = try f()
catch {
case NonFatal(e) =>
try export0(s)
finally {
throw e
}
}
val newS =
try f()
catch {
case NonFatal(e) =>
try export0(s)
finally {
throw e
}
}
export0(newS)
}
}
@ -722,7 +723,7 @@ object BuiltinCommands {
keepLastLog(s)
}
def export: Command =
def exportCommand: Command =
Command(ExportCommand, exportBrief, exportDetailed)(exportParser)((_, f) => f())
private[this] def lastImpl(s: State, sks: AnyKeys, sid: Option[String]): State = {
@ -808,8 +809,8 @@ object BuiltinCommands {
}
def projects: Command =
Command(ProjectsCommand, (ProjectsCommand, projectsBrief), projectsDetailed)(
s => projectsParser(s).?
Command(ProjectsCommand, (ProjectsCommand, projectsBrief), projectsDetailed)(s =>
projectsParser(s).?
) {
case (s, Some(modifyBuilds)) => transformExtraBuilds(s, modifyBuilds)
case (s, None) => showProjects(s); s
@ -863,10 +864,13 @@ object BuiltinCommands {
@tailrec
private[this] def doLoadFailed(s: State, loadArg: String): State = {
s.log.warn("Project loading failed: (r)etry, (q)uit, (l)ast, or (i)gnore? (default: r)")
val result = try ITerminal.get.withRawInput(System.in.read) match {
case -1 => 'q'.toInt
case b => b
} catch { case _: ClosedChannelException => 'q' }
val result =
try
ITerminal.get.withRawInput(System.in.read) match {
case -1 => 'q'.toInt
case b => b
}
catch { case _: ClosedChannelException => 'q' }
def retry: State = loadProjectCommand(LoadProject, loadArg) :: s.clearGlobalLog
def ignoreMsg: String =
if (Project.isProjectLoaded(s)) "using previously loaded project" else "no project loaded"
@ -890,8 +894,8 @@ object BuiltinCommands {
Nil
def loadProject: Command =
Command(LoadProject, LoadProjectBrief, LoadProjectDetailed)(loadProjectParser)(
(s, arg) => loadProjectCommands(arg) ::: s
Command(LoadProject, LoadProjectBrief, LoadProjectDetailed)(loadProjectParser)((s, arg) =>
loadProjectCommands(arg) ::: s
)
private[this] def loadProjectParser: State => Parser[String] =
@ -1002,13 +1006,14 @@ object BuiltinCommands {
def clearCaches: Command = {
val help = Help.more(ClearCaches, ClearCachesDetailed)
val f: State => State = registerCompilerCache _ andThen (_.initializeClassLoaderCache) andThen addCacheStoreFactoryFactory
val f: State => State =
registerCompilerCache _ andThen (_.initializeClassLoaderCache) andThen addCacheStoreFactoryFactory
Command.command(ClearCaches, help)(f)
}
private[sbt] def waitCmd: Command =
Command.arb(
_ => ContinuousCommands.waitWatch.examples() ~> " ".examples() ~> matched(any.*).examples()
Command.arb(_ =>
ContinuousCommands.waitWatch.examples() ~> " ".examples() ~> matched(any.*).examples()
) { (s0, channel) =>
val exchange = StandardMain.exchange
exchange.channelForName(channel) match {
@ -1118,8 +1123,7 @@ object BuiltinCommands {
val line = s"sbt.version=$sbtVersion"
IO.writeLines(buildProps, line :: buildPropsLines)
state.log info s"Updated file $buildProps: set sbt.version to $sbtVersion"
} else
state.log warn warnMsg
} else state.log warn warnMsg
} catch {
case _: IOException => state.log warn warnMsg
}

View File

@ -59,76 +59,6 @@ import language.experimental.macros
import scala.concurrent.TimeoutException
import scala.concurrent.duration.FiniteDuration
sealed trait ProjectDefinition[PR <: ProjectReference] {
/**
* The project ID is used to uniquely identify a project within a build.
* It is used to refer to a project from the command line and in the scope of keys.
*/
def id: String
/** The base directory for the project. */
def base: File
/**
* The configurations for this project. These are groups of related tasks and the main reason
* to list them here is when one configuration extends another. In this case, a setting lookup
* in one configuration will fall back to the configurations it extends configuration if the setting doesn't exist.
*/
def configurations: Seq[Configuration]
/**
* The explicitly defined sequence of settings that configure this project.
* These do not include the automatically appended settings as configured by `auto`.
*/
def settings: Seq[Setting[_]]
/**
* The references to projects that are aggregated by this project.
* When a task is run on this project, it will also be run on aggregated projects.
*/
def aggregate: Seq[PR]
/** The references to projects that are classpath dependencies of this project. */
def dependencies: Seq[ClasspathDep[PR]]
/** The references to projects that are aggregate and classpath dependencies of this project. */
def uses: Seq[PR] = aggregate ++ dependencies.map(_.project)
def referenced: Seq[PR] = uses
/**
* The defined [[Plugins]] associated with this project.
* A [[AutoPlugin]] is a common label that is used by plugins to determine what settings, if any, to add to a project.
*/
def plugins: Plugins
/** Indicates whether the project was created organically, or was generated synthetically. */
def projectOrigin: ProjectOrigin
/** The [[AutoPlugin]]s enabled for this project. This value is only available on a loaded Project. */
private[sbt] def autoPlugins: Seq[AutoPlugin]
override final def hashCode: Int = id.hashCode ^ base.hashCode ^ getClass.hashCode
override final def equals(o: Any) = o match {
case p: ProjectDefinition[_] => p.getClass == this.getClass && p.id == id && p.base == base
case _ => false
}
override def toString = {
val agg = ifNonEmpty("aggregate", aggregate)
val dep = ifNonEmpty("dependencies", dependencies)
val conf = ifNonEmpty("configurations", configurations)
val autos = ifNonEmpty("autoPlugins", autoPlugins.map(_.label))
val fields =
s"id $id" :: s"base: $base" :: agg ::: dep ::: conf ::: (s"plugins: List($plugins)" :: autos)
s"Project(${fields.mkString(", ")})"
}
private[this] def ifNonEmpty[T](label: String, ts: Iterable[T]): List[String] =
if (ts.isEmpty) Nil else s"$label: $ts" :: Nil
}
trait CompositeProject {
def componentProjects: Seq[Project]
}
@ -605,12 +535,15 @@ object Project extends ProjectExtra {
def fillTaskAxis(scoped: ScopedKey[_]): ScopedKey[_] =
ScopedKey(Scope.fillTaskAxis(scoped.scope, scoped.key), scoped.key)
def mapScope(f: Scope => Scope) = λ[ScopedKey ~> ScopedKey](k => ScopedKey(f(k.scope), k.key))
def mapScope(f: Scope => Scope): [a] => ScopedKey[a] => ScopedKey[a] =
[a] => (k: ScopedKey[a]) => ScopedKey(f(k.scope), k.key)
def transform(g: Scope => Scope, ss: Seq[Def.Setting[_]]): Seq[Def.Setting[_]] = {
def transform(g: Scope => Scope, ss: Seq[Def.Setting[_]]): Seq[Def.Setting[_]] =
val f = mapScope(g)
ss.map(_ mapKey f mapReferenced f)
}
ss.map { setting =>
setting.mapKey(f).mapReferenced(f)
}
def transformRef(g: Scope => Scope, ss: Seq[Def.Setting[_]]): Seq[Def.Setting[_]] = {
val f = mapScope(g)
ss.map(_ mapReferenced f)

View File

@ -188,7 +188,7 @@ object ScopeFilter {
* Information provided to Scope filters. These provide project relationships,
* project reference resolution, and the list of all static Scopes.
*/
private final class Data(
private[sbt] final class Data(
val units: Map[URI, LoadedBuildUnit],
val resolve: ProjectReference => ProjectRef,
val allScopes: Set[Scope]

View File

@ -62,10 +62,11 @@ object ScriptedRun {
val clazz = scriptedTests.getClass
if (batchExecution)
try new RunInParallelV2(
scriptedTests,
clazz.getMethod("runInParallel", fCls, bCls, asCls, fCls, sCls, asCls, lfCls, iCls)
)
try
new RunInParallelV2(
scriptedTests,
clazz.getMethod("runInParallel", fCls, bCls, asCls, fCls, sCls, asCls, lfCls, iCls)
)
catch {
case _: NoSuchMethodException =>
new RunInParallelV1(
@ -74,10 +75,11 @@ object ScriptedRun {
)
}
else
try new RunV2(
scriptedTests,
clazz.getMethod("run", fCls, bCls, asCls, fCls, sCls, asCls, lfCls)
)
try
new RunV2(
scriptedTests,
clazz.getMethod("run", fCls, bCls, asCls, fCls, sCls, asCls, lfCls)
)
catch {
case _: NoSuchMethodException =>
new RunV1(scriptedTests, clazz.getMethod("run", fCls, bCls, asCls, fCls, asCls, lfCls))

View File

@ -65,7 +65,7 @@ object SessionVar {
def read[T](key: ScopedKey[Task[T]], state: State)(implicit f: JsonFormat[T]): Option[T] =
Project.structure(state).streams(state).use(key) { s =>
try {
Some(s.getInput(key, DefaultDataID).read[T])
Some(s.getInput(key, DefaultDataID).read[T]())
} catch { case NonFatal(_) => None }
}

View File

@ -131,7 +131,7 @@ private[sbt] object TemplateCommandUtil {
val templatesBaseDirectory = new File(globalBase, "templates")
val templateId = s"${info.module.organization}_${info.module.name}_${info.module.revision}"
val templateDirectory = new File(templatesBaseDirectory, templateId)
def jars = (templateDirectory ** -DirectoryFilter).get
def jars = (templateDirectory ** -DirectoryFilter).get()
if (!(info.module.revision endsWith "-SNAPSHOT") && jars.nonEmpty) jars.toList.map(_.toPath)
else {
IO.createDirectory(templateDirectory)

View File

@ -106,9 +106,8 @@ object CoursierArtifactsTasks {
)
}
val sbtArtifactsPublication = sbtArtifacts.collect {
case Some((config, artifact)) =>
config -> artifactPublication(artifact)
val sbtArtifactsPublication = sbtArtifacts.collect { case Some((config, artifact)) =>
config -> artifactPublication(artifact)
}
val stdArtifactsSet = sbtArtifacts.flatMap(_.map { case (_, a) => a }.toSeq).toSet

View File

@ -19,9 +19,10 @@ import lmcoursier.definitions.{
Reconciliation,
Strict => CStrict,
}
import lmcoursier._
import lmcoursier.syntax._
import lmcoursier.*
import lmcoursier.syntax.*
import lmcoursier.credentials.Credentials
import lmcoursier.syntax.*
import Keys._
import sbt.internal.util.Util
import sbt.librarymanagement._

View File

@ -20,18 +20,18 @@ import sbt.internal.util.{ AttributeEntry, AttributeKey, AttributeMap, IMap, Set
import sbt.util.Show
import scala.collection.mutable
final class ParsedKey(val key: ScopedKey[_], val mask: ScopeMask, val separaters: Seq[String]) {
def this(key: ScopedKey[_], mask: ScopeMask) = this(key, mask, Nil)
final class ParsedKey[+A](val key: ScopedKey[A], val mask: ScopeMask, val separaters: Seq[String]):
def this(key: ScopedKey[A], mask: ScopeMask) = this(key, mask, Nil)
override def equals(o: Any): Boolean =
this.eq(o.asInstanceOf[AnyRef]) || (o match {
case x: ParsedKey => (this.key == x.key) && (this.mask == x.mask)
case _ => false
case x: ParsedKey[_] => (this.key == x.key) && (this.mask == x.mask)
case _ => false
})
override def hashCode: Int = {
37 * (37 * (37 * (17 + "sbt.internal.ParsedKey".##) + this.key.##)) + this.mask.##
}
}
end ParsedKey
object Act {
val ZeroString = "*"
@ -54,7 +54,7 @@ object Act {
defaultConfigs: Option[ResolvedReference] => Seq[String],
keyMap: Map[String, AttributeKey[_]],
data: Settings[Scope]
): Parser[ScopedKey[_]] =
): Parser[ScopedKey[Any]] =
scopedKeySelected(index, current, defaultConfigs, keyMap, data).map(_.key)
// the index should be an aggregated index for proper tab completion
@ -63,13 +63,15 @@ object Act {
defaultConfigs: Option[ResolvedReference] => Seq[String],
structure: BuildStructure
): KeysParser =
for (selected <- scopedKeySelected(
structure.index.aggregateKeyIndex,
current,
defaultConfigs,
structure.index.keyMap,
structure.data
))
for (
selected <- scopedKeySelected(
structure.index.aggregateKeyIndex,
current,
defaultConfigs,
structure.index.keyMap,
structure.data
)
)
yield Aggregation.aggregate(selected.key, selected.mask, structure.extra)
def scopedKeyAggregatedSep(
@ -77,16 +79,16 @@ object Act {
defaultConfigs: Option[ResolvedReference] => Seq[String],
structure: BuildStructure
): KeysParserSep =
for (selected <- scopedKeySelected(
structure.index.aggregateKeyIndex,
current,
defaultConfigs,
structure.index.keyMap,
structure.data
))
yield Aggregation
.aggregate(selected.key, selected.mask, structure.extra)
.map(k => k -> selected.separaters)
for selected <- scopedKeySelected(
structure.index.aggregateKeyIndex,
current,
defaultConfigs,
structure.index.keyMap,
structure.data
)
yield Aggregation
.aggregate(selected.key, selected.mask, structure.extra)
.map(k => k -> selected.separaters)
def scopedKeySelected(
index: KeyIndex,
@ -94,7 +96,7 @@ object Act {
defaultConfigs: Option[ResolvedReference] => Seq[String],
keyMap: Map[String, AttributeKey[_]],
data: Settings[Scope]
): Parser[ParsedKey] =
): Parser[ParsedKey[Any]] =
scopedKeyFull(index, current, defaultConfigs, keyMap) flatMap { choices =>
select(choices, data)(showRelativeKey2(current))
}
@ -104,7 +106,7 @@ object Act {
current: ProjectRef,
defaultConfigs: Option[ResolvedReference] => Seq[String],
keyMap: Map[String, AttributeKey[_]]
): Parser[Seq[Parser[ParsedKey]]] = {
): Parser[Seq[Parser[ParsedKey[Any]]]] = {
val confParserCache
: mutable.Map[Option[sbt.ResolvedReference], Parser[(ParsedAxis[String], Seq[String])]] =
mutable.Map.empty
@ -149,7 +151,7 @@ object Act {
confAmb: ParsedAxis[String],
baseMask: ScopeMask,
baseSeps: Seq[String]
): Seq[Parser[ParsedKey]] =
): Seq[Parser[ParsedKey[Any]]] =
for {
conf <- configs(confAmb, defaultConfigs, proj, index)
} yield for {
@ -161,7 +163,7 @@ object Act {
} yield {
val mask = baseMask.copy(task = taskAmb.isExplicit, extra = true)
val seps = baseSeps ++ taskSeps
new ParsedKey(makeScopedKey(proj, conf, task, extra, key), mask, seps)
ParsedKey(makeScopedKey(proj, conf, task, extra, key), mask, seps)
}
def makeScopedKey(
@ -176,9 +178,9 @@ object Act {
key
)
def select(allKeys: Seq[Parser[ParsedKey]], data: Settings[Scope])(
implicit show: Show[ScopedKey[_]]
): Parser[ParsedKey] =
def select(allKeys: Seq[Parser[ParsedKey[_]]], data: Settings[Scope])(implicit
show: Show[ScopedKey[_]]
): Parser[ParsedKey[Any]] =
seq(allKeys) flatMap { ss =>
val default = ss.headOption match {
case None => noValidKeys
@ -186,16 +188,16 @@ object Act {
}
selectFromValid(ss filter isValid(data), default)
}
def selectFromValid(ss: Seq[ParsedKey], default: Parser[ParsedKey])(
implicit show: Show[ScopedKey[_]]
): Parser[ParsedKey] =
def selectFromValid(ss: Seq[ParsedKey[_]], default: Parser[ParsedKey[_]])(implicit
show: Show[ScopedKey[_]]
): Parser[ParsedKey[Any]] =
selectByTask(selectByConfig(ss)) match {
case Seq() => default
case Seq(single) => success(single)
case multi => failure("Ambiguous keys: " + showAmbiguous(keys(multi)))
}
private[this] def keys(ss: Seq[ParsedKey]): Seq[ScopedKey[_]] = ss.map(_.key)
def selectByConfig(ss: Seq[ParsedKey]): Seq[ParsedKey] =
private[this] def keys(ss: Seq[ParsedKey[_]]): Seq[ScopedKey[_]] = ss.map(_.key)
def selectByConfig(ss: Seq[ParsedKey[_]]): Seq[ParsedKey[Any]] =
ss match {
case Seq() => Nil
case Seq(x, tail @ _*) => // select the first configuration containing a valid key
@ -204,7 +206,7 @@ object Act {
case xs => x +: xs
}
}
def selectByTask(ss: Seq[ParsedKey]): Seq[ParsedKey] = {
def selectByTask(ss: Seq[ParsedKey[_]]): Seq[ParsedKey[Any]] = {
val (selects, zeros) = ss.partition(_.key.scope.task.isSelect)
if (zeros.nonEmpty) zeros else selects
}
@ -214,7 +216,7 @@ object Act {
def showAmbiguous(keys: Seq[ScopedKey[_]])(implicit show: Show[ScopedKey[_]]): String =
keys.take(3).map(x => show.show(x)).mkString("", ", ", if (keys.size > 3) ", ..." else "")
def isValid(data: Settings[Scope])(parsed: ParsedKey): Boolean = {
def isValid(data: Settings[Scope])(parsed: ParsedKey[_]): Boolean = {
val key = parsed.key
data.definingScope(key.scope, key.key) == Some(key.scope)
}
@ -235,7 +237,9 @@ object Act {
def config(confs: Set[String]): Parser[ParsedAxis[String]] = {
val sep = ':' !!! "Expected ':' (if selecting a configuration)"
token((ZeroString ^^^ ParsedZero | value(examples(ID, confs, "configuration"))) <~ sep) ?? Omitted
token(
(ZeroString ^^^ ParsedZero | value(examples(ID, confs, "configuration"))) <~ sep
) ?? Omitted
}
// New configuration parser that's able to parse configuration ident trailed by slash.
@ -330,7 +334,8 @@ object Act {
knownValues: IMap[AttributeKey, Set]
): Parser[ScopeAxis[AttributeMap]] = {
val extrasP = extrasParser(knownKeys, knownValues)
val extras = token('(', hide = _ == 1 && knownValues.isEmpty) ~> extrasP <~ token(')')
val extras =
token('(', hide = (x: Int) => x == 1 && knownValues.isEmpty) ~> extrasP <~ token(')')
optionalAxis(extras, Zero)
}
@ -383,12 +388,11 @@ object Act {
knownValues: IMap[AttributeKey, Set]
): Parser[AttributeEntry[_]] = {
val keyp = knownIDParser(knownKeys, "Not a valid extra key") <~ token(':' ~ OptSpace)
keyp flatMap {
case key: AttributeKey[t] =>
val valueMap: Map[String, t] = knownValues(key).map(v => (v.toString, v)).toMap
knownIDParser(valueMap, "extra value") map { value =>
AttributeEntry(key, value)
}
keyp flatMap { case key: AttributeKey[t] =>
val valueMap: Map[String, t] = knownValues(key).map(v => (v.toString, v)).toMap
knownIDParser(valueMap, "extra value") map { value =>
AttributeEntry(key, value)
}
}
}
def knownIDParser[T](knownKeys: Map[String, T], label: String): Parser[T] =
@ -416,7 +420,11 @@ object Act {
): Parser[ResolvedReference] = {
def projectID(uri: URI) =
token(
DQuoteChar ~> examplesStrict(ID, index projects uri, "project ID") <~ DQuoteChar <~ OptSpace <~ ")" <~ trailing
DQuoteChar ~> examplesStrict(
ID,
index projects uri,
"project ID"
) <~ DQuoteChar <~ OptSpace <~ ")" <~ trailing
)
def projectRef(uri: URI) = projectID(uri) map { id =>
ProjectRef(uri, id)
@ -536,8 +544,8 @@ object Act {
structure.data
)
type KeysParser = Parser[Seq[ScopedKey[T]] forSome { type T }]
type KeysParserSep = Parser[Seq[(ScopedKey[T], Seq[String])] forSome { type T }]
type KeysParser = Parser[Seq[ScopedKey[Any]]]
type KeysParserSep = Parser[Seq[(ScopedKey[Any], Seq[String])]]
def aggregatedKeyParser(state: State): KeysParser = aggregatedKeyParser(Project extract state)
def aggregatedKeyParser(extracted: Extracted): KeysParser =

View File

@ -75,7 +75,9 @@ object Aggregation {
import complete._
val log = state.log
val extracted = Project.extract(state)
val success = results match { case Value(_) => true; case Inc(_) => false }
val success = results match
case Result.Value(_) => true
case Result.Inc(_) => false
results.toEither.right.foreach { r =>
if (show.taskValues) printSettings(r, show.print)
}
@ -100,25 +102,23 @@ object Aggregation {
val start = System.currentTimeMillis
val (newS, result) = withStreams(structure, s) { str =>
val transform = nodeView(s, str, roots, extra)
runTask(toRun, s, str, structure.index.triggers, config)(transform)
runTask(toRun, s, str, structure.index.triggers, config)(using transform)
}
val stop = System.currentTimeMillis
Complete(start, stop, result, newS)
}
def runTasks[HL <: HList, T](
def runTasks[A1](
s: State,
ts: Values[Task[T]],
ts: Values[Task[A1]],
extra: DummyTaskMap,
show: ShowConfig
)(implicit display: Show[ScopedKey[_]]): State = {
val complete = timedRun[T](s, ts, extra)
)(using display: Show[ScopedKey[_]]): State =
val complete = timedRun[A1](s, ts, extra)
showRun(complete, show)
complete.results match {
case Inc(i) => complete.state.handleError(i)
case Value(_) => complete.state
}
}
complete.results match
case Result.Inc(i) => complete.state.handleError(i)
case Result.Value(_) => complete.state
def printSuccess(
start: Long,
@ -163,7 +163,9 @@ object Aggregation {
val mins = f"${total % 3600 / 60}%02d"
val secs = f"${total % 60}%02d"
s" ($maybeHours$mins:$secs)"
}) s"Total time: $totalString, completed $nowString"
})
s"Total time: $totalString, completed $nowString"
}
def defaultFormat: DateFormat = {

View File

@ -55,11 +55,10 @@ final class MultiHandler[S, T](
def setRoot(resolver: S => Option[T]) =
new MultiHandler(builtIn, Some(resolver), nonRoots, getURI, log)
def applyNonRoots(info: S): List[(URI, T)] =
nonRoots flatMap {
case (definingURI, loader) =>
loader(info) map { unit =>
(definingURI, unit)
}
nonRoots flatMap { case (definingURI, loader) =>
loader(info) map { unit =>
(definingURI, unit)
}
}
private[this] def warn(baseMessage: String, log: Logger, matching: Seq[(URI, T)]): Unit = {
@ -183,7 +182,8 @@ object BuildLoader {
}
}
/** Defines the responsible for loading builds.
/**
* Defines the responsible for loading builds.
*
* @param fail A reporter for failures.
* @param state The state.

View File

@ -110,7 +110,7 @@ final class LoadedBuildUnit(
)
)
/** The base directory of the build unit (not the build definition).*/
/** The base directory of the build unit (not the build definition). */
def localBase = unit.localBase
/**
@ -211,8 +211,8 @@ final class DetectedPlugins(
private[this] lazy val (autoPluginAutoImports, topLevelAutoPluginAutoImports) =
autoPlugins
.flatMap {
case DetectedAutoPlugin(name, _, hasAutoImport) => if (hasAutoImport) Some(name) else None
.flatMap { case DetectedAutoPlugin(name, _, hasAutoImport) =>
if (hasAutoImport) Some(name) else None
}
.partition(nonTopLevelPlugin)
@ -271,8 +271,8 @@ final class LoadedBuild(val root: URI, val units: Map[URI, LoadedBuildUnit]) {
BuildUtil.checkCycles(units)
def allProjectRefs: Seq[(ProjectRef, ResolvedProject)] =
units.iterator.flatMap {
case (build, unit) => unit.projects.map(p => ProjectRef(build, p.id) -> p)
units.iterator.flatMap { case (build, unit) =>
unit.projects.map(p => ProjectRef(build, p.id) -> p)
}.toIndexedSeq
def extra(data: Settings[Scope])(keyIndex: KeyIndex): BuildUtil[ResolvedProject] =
@ -377,7 +377,7 @@ object BuildStreams {
// The Previous.scopedKeyAttribute is an implementation detail that allows us to get a
// more specific cache directory for a task stream.
case AttributeEntry(key, _) if key == Previous.scopedKeyAttribute => Nil
case AttributeEntry(key, value) => s"${key.label}=$value" :: Nil
case AttributeEntry(key, value) => s"${key.label}=$value" :: Nil
}
.mkString(" ")
@ -388,8 +388,8 @@ object BuildStreams {
data: Settings[Scope]
): File =
scoped.scope.project match {
case Zero => refTarget(GlobalScope, units(root).localBase, data) / GlobalPath
case Select(br @ BuildRef(uri)) => refTarget(br, units(uri).localBase, data) / BuildUnitPath
case Zero => refTarget(GlobalScope, units(root).localBase, data) / GlobalPath
case Select(br @ BuildRef(uri)) => refTarget(br, units(uri).localBase, data) / BuildUnitPath
case Select(pr @ ProjectRef(uri, id)) => refTarget(pr, units(uri).defined(id).base, data)
case Select(pr) =>
sys.error("Unresolved project reference (" + pr + ") in " + displayFull(scoped))

View File

@ -113,7 +113,8 @@ object BuildUtil {
def aggregationRelation(units: Map[URI, LoadedBuildUnit]): Relation[ProjectRef, ProjectRef] = {
val depPairs =
for {
(uri, unit) <- units.toIterable // don't lose this toIterable, doing so breaks actions/cross-multiproject & actions/update-state-fail
(uri, unit) <-
units.toIterable // don't lose this toIterable, doing so breaks actions/cross-multiproject & actions/update-state-fail
project <- unit.projects
ref = ProjectRef(uri, project.id)
agg <- project.aggregate

View File

@ -327,27 +327,33 @@ private[sbt] object ClasspathImpl {
for {
ac <- applicableConfigs
} // add all configurations in this project
visited add (p -> ac.name)
val masterConfs = names(getConfigurations(projectRef, data).toVector)
visited add (p -> ac.name)
val masterConfs = names(getConfigurations(projectRef, data).toVector)
for {
ResolvedClasspathDependency(dep, confMapping) <- deps.classpath(p)
} {
val configurations = getConfigurations(dep, data)
val mapping =
mapped(confMapping, masterConfs, names(configurations.toVector), "compile", "*->compile")
// map master configuration 'c' and all extended configurations to the appropriate dependency configuration
for {
ac <- applicableConfigs
depConfName <- mapping(ac.name)
ResolvedClasspathDependency(dep, confMapping) <- deps.classpath(p)
} {
val configurations = getConfigurations(dep, data)
val mapping =
mapped(
confMapping,
masterConfs,
names(configurations.toVector),
"compile",
"*->compile"
)
// map master configuration 'c' and all extended configurations to the appropriate dependency configuration
for {
depConf <- confOpt(configurations, depConfName)
} if (!visited((dep, depConfName))) {
visit(dep, depConf)
ac <- applicableConfigs
depConfName <- mapping(ac.name)
} {
for {
depConf <- confOpt(configurations, depConfName)
} if (!visited((dep, depConfName))) {
visit(dep, depConf)
}
}
}
}
}
visit(projectRef, conf)
visited.toSeq

View File

@ -58,7 +58,7 @@ private[sbt] object Clean {
case f if f.isDirectory => Glob(f, AnyPath)
case f => f.toGlob
} ++ (scope / cleanKeepGlobs).value
p: Path => excludes.exists(_.matches(p))
(p: Path) => excludes.exists(_.matches(p))
}
private[this] def cleanDelete(scope: Scope): Def.Initialize[Task[Path => Unit]] = Def.task {
// Don't use a regular logger because the logger actually writes to the target directory.

View File

@ -92,21 +92,22 @@ private[sbt] final class CommandExchange {
case s @ Seq(_, _) => Some(s.min)
case s => s.headOption
}
try Option(deadline match {
case Some(d: Deadline) =>
commandQueue.poll(d.timeLeft.toMillis + 1, TimeUnit.MILLISECONDS) match {
case null if idleDeadline.fold(false)(_.isOverdue) =>
state.foreach { s =>
s.get(BasicKeys.serverIdleTimeout) match {
case Some(Some(d)) => s.log.info(s"sbt idle timeout of $d expired")
case _ =>
try
Option(deadline match {
case Some(d: Deadline) =>
commandQueue.poll(d.timeLeft.toMillis + 1, TimeUnit.MILLISECONDS) match {
case null if idleDeadline.fold(false)(_.isOverdue) =>
state.foreach { s =>
s.get(BasicKeys.serverIdleTimeout) match {
case Some(Some(d)) => s.log.info(s"sbt idle timeout of $d expired")
case _ =>
}
}
}
Exec(TerminateAction, Some(CommandSource(ConsoleChannel.defaultName)))
case x => x
}
case _ => commandQueue.take
})
Exec(TerminateAction, Some(CommandSource(ConsoleChannel.defaultName)))
case x => x
}
case _ => commandQueue.take
})
catch { case _: InterruptedException => None }
}
poll match {
@ -134,10 +135,13 @@ private[sbt] final class CommandExchange {
}
}
// Do not manually run GC until the user has been idling for at least the min gc interval.
impl(interval match {
case d: FiniteDuration => Some(d.fromNow)
case _ => None
}, idleDeadline)
impl(
interval match {
case d: FiniteDuration => Some(d.fromNow)
case _ => None
},
idleDeadline
)
}
private def addConsoleChannel(): Unit =
@ -210,7 +214,9 @@ private[sbt] final class CommandExchange {
if (server.isEmpty && firstInstance.get) {
val h = Hash.halfHashString(IO.toURI(portfile).toString)
val serverDir =
sys.env get "SBT_GLOBAL_SERVER_DIR" map file getOrElse BuildPaths.getGlobalBase(s) / "server"
sys.env get "SBT_GLOBAL_SERVER_DIR" map file getOrElse BuildPaths.getGlobalBase(
s
) / "server"
val tokenfile = serverDir / h / "token.json"
val socketfile = serverDir / h / "sock"
val pipeName = "sbt-server-" + h
@ -290,7 +296,7 @@ private[sbt] final class CommandExchange {
// interrupt and kill the thread
server.foreach(_.shutdown())
server = None
EvaluateTask.onShutdown
EvaluateTask.onShutdown()
}
// This is an interface to directly respond events.

View File

@ -12,7 +12,7 @@ import sbt.io.Path
object CommandStrings {
/** The prefix used to identify a request to execute the remaining input on source changes.*/
/** The prefix used to identify a request to execute the remaining input on source changes. */
val AboutCommand = "about"
val TasksCommand = "tasks"
val SettingsCommand = "settings"

View File

@ -432,7 +432,7 @@ private[sbt] object CrossJava {
val base: File = Path.userHome / ".sdkman" / "candidates" / "java"
def candidates(): Vector[String] = wrapNull(base.list())
def javaHomes: Vector[(String, File)] =
candidates.collect {
candidates().collect {
case dir if dir.contains("-") =>
CrossJava.parseSdkmanString(dir) match {
case Success(v) => Some(v.toString -> (base / dir))

View File

@ -182,7 +182,7 @@ private[sbt] abstract class AbstractBackgroundJobService extends BackgroundJobSe
while (jobSet.nonEmpty && !deadline.isOverdue) {
jobSet.headOption.foreach {
case handle: ThreadJobHandle @unchecked =>
if (handle.job.isRunning) {
if (handle.job.isRunning()) {
handle.job.shutdown()
handle.job.awaitTerminationTry(10.seconds)
}
@ -451,11 +451,12 @@ private[sbt] class BackgroundThreadPool extends java.io.Closeable {
) extends BackgroundRunnable(taskName, body) {
override def awaitTermination(duration: Duration): Unit = {
try super.awaitTermination(duration)
finally loader.foreach {
case ac: AutoCloseable => ac.close()
case cp: ClasspathFilter => cp.close()
case _ =>
}
finally
loader.foreach {
case ac: AutoCloseable => ac.close()
case cp: ClasspathFilter => cp.close()
case _ =>
}
}
}

View File

@ -105,7 +105,7 @@ object GlobalPlugin {
withStreams(structure, state) { str =>
val nv = nodeView(state, str, roots)
val config = EvaluateTask.extractedTaskConfig(Project.extract(state), structure, state)
val (newS, result) = runTask(t, state, str, structure.index.triggers, config)(nv)
val (newS, result) = runTask(t, state, str, structure.index.triggers, config)(using nv)
(newS, processResult2(result))
}
}

View File

@ -100,10 +100,12 @@ private[sbt] object InstallSbtn {
try {
val result = new Array[Byte](1024 * 1024)
var bytesRead = -1
do {
def impl(): Unit = {
bytesRead = inputStream.read(result)
if (bytesRead > 0) os.write(result, 0, bytesRead)
} while (bytesRead > 0)
}
impl()
while bytesRead > 0 do impl()
} finally os.close()
} finally inputStream.close()
private[this] def getShell(term: Terminal): String = {
@ -139,12 +141,13 @@ private[sbt] object InstallSbtn {
setCompletions: Path => String,
): Unit = {
val bin = baseDirectory.resolve("bin")
val export = setPath(bin)
val exp = setPath(bin)
val completions = baseDirectory.resolve("completions")
val sourceCompletions = setCompletions(completions)
val contents = try IO.read(configFile)
catch { case _: IOException => "" }
if (!contents.contains(export)) {
val contents =
try IO.read(configFile)
catch { case _: IOException => "" }
if (!contents.contains(exp)) {
term.printStream.print(s"Add $bin to PATH in $configFile? y/n (y default): ")
term.printStream.flush()
term.inputStream.read() match {
@ -153,11 +156,12 @@ private[sbt] object InstallSbtn {
term.printStream.println(c.toChar)
// put the export at the bottom so that the ~/.sbt/1.0/bin/sbtn is least preferred
// but still on the path
IO.write(configFile, s"$contents\n$export")
IO.write(configFile, s"$contents\n$exp")
}
}
val newContents = try IO.read(configFile)
catch { case _: IOException => "" }
val newContents =
try IO.read(configFile)
catch { case _: IOException => "" }
if (!newContents.contains(sourceCompletions)) {
term.printStream.print(s"Add tab completions to $configFile? y/n (y default): ")
term.printStream.flush()

View File

@ -17,18 +17,17 @@ private[sbt] object InternalDependencies {
val projectDependencies = buildDependencies.value.classpath.get(ref).toSeq.flatten
val applicableConfigs = allConfigs + "*"
((ref -> allConfigs) +:
projectDependencies.flatMap {
case ResolvedClasspathDependency(p, rawConfigs) =>
val configs = rawConfigs.getOrElse("*->compile").split(";").flatMap { config =>
config.split("->") match {
case Array(n, c) if applicableConfigs.contains(n) => Some(c)
case Array(n) if applicableConfigs.contains(n) =>
// "test" is equivalent to "compile->test"
Some("compile")
case _ => None
}
projectDependencies.flatMap { case ResolvedClasspathDependency(p, rawConfigs) =>
val configs = rawConfigs.getOrElse("*->compile").split(";").flatMap { config =>
config.split("->") match {
case Array(n, c) if applicableConfigs.contains(n) => Some(c)
case Array(n) if applicableConfigs.contains(n) =>
// "test" is equivalent to "compile->test"
Some("compile")
case _ => None
}
if (configs.isEmpty) None else Some(p -> configs.toSet)
}
if (configs.isEmpty) None else Some(p -> configs.toSet)
}).distinct
}
}

View File

@ -33,9 +33,12 @@ object IvyConsole {
final val Name = "ivy-console"
lazy val command =
Command.command(Name) { state =>
val Dependencies(managed, repos, unmanaged) = parseDependencies(state.remainingCommands map {
_.commandLine
}, state.log)
val Dependencies(managed, repos, unmanaged) = parseDependencies(
state.remainingCommands map {
_.commandLine
},
state.log
)
val base = new File(CommandUtil.bootDirectory(state), Name)
IO.createDirectory(base)

View File

@ -24,7 +24,6 @@ import scala.collection.JavaConverters._
* If the top layer needs to load a class from the bottom layer via java reflection, we facilitate
* that with the `ReverseLookupClassLoader`.
*
*
* This holder caches the ReverseLookupClassLoader, which is the top loader in this hierarchy. The
* checkout method will get the RevereLookupClassLoader from the cache or make a new one if
* none is available. It will only cache at most one so if multiple concurrently tasks have the

View File

@ -97,13 +97,12 @@ object LintUnused {
if (size == 1) buffer.append("there's a key that's not used by any other settings/tasks:")
else buffer.append(s"there are $size keys that are not used by any other settings/tasks:")
buffer.append(" ")
result foreach {
case (_, str, positions) =>
buffer.append(s"* $str")
positions foreach {
case pos: FilePosition => buffer.append(s" +- ${pos.path}:${pos.startLine}")
case _ => ()
}
result foreach { case (_, str, positions) =>
buffer.append(s"* $str")
positions foreach {
case pos: FilePosition => buffer.append(s" +- ${pos.path}:${pos.startLine}")
case _ => ()
}
}
buffer.append(" ")
buffer.append(

View File

@ -135,7 +135,7 @@ object PluginDiscovery {
}
}
/** Returns `true` if `url` is an entry in `classpath`.*/
/** Returns `true` if `url` is an entry in `classpath`. */
def onClasspath(classpath: Seq[File])(url: URL): Boolean =
IO.urlAsFile(url) exists (classpath.contains _)

View File

@ -447,7 +447,7 @@ private[sbt] object PluginsDebug {
private[this] def excludedPluginsError(transitive: Boolean)(dependencies: List[AutoPlugin]) =
s"Required ${transitiveString(transitive)}dependencies were excluded:\n\t${labels(dependencies)
.mkString("\n\t")}"
.mkString("\n\t")}"
private[this] def transitiveString(transitive: Boolean) =
if (transitive) "(transitive) " else ""

View File

@ -42,8 +42,7 @@ object Resolve {
def resolveConfig[P](index: BuildUtil[P], key: AttributeKey[_], mask: ScopeMask)(
scope: Scope,
): Scope =
if (mask.config)
scope
if (mask.config) scope
else {
val (resolvedRef, proj) = scope.project match {
case Zero | This => (None, index.thisRootProject)

View File

@ -76,8 +76,7 @@ object Script {
def blocks(file: File): Seq[Block] = {
val lines = IO.readLines(file).toIndexedSeq
def blocks(b: Block, acc: List[Block]): List[Block] =
if (b.lines.isEmpty)
acc.reverse
if (b.lines.isEmpty) acc.reverse
else {
val (dropped, blockToEnd) = b.lines.span { line =>
!line.startsWith(BlockStart)

Some files were not shown because too many files have changed in this diff Show More