diff --git a/build.sbt b/build.sbt index cc100d090..e7a669496 100644 --- a/build.sbt +++ b/build.sbt @@ -668,7 +668,6 @@ lazy val actionsProj = (project in file("main-actions")) name := "Actions", libraryDependencies += sjsonNewScalaJson.value, libraryDependencies += jline3Terminal, - libraryDependencies += eval, mimaSettings, mimaBinaryIssueFilters ++= Seq( // Removed unused private[sbt] nested class @@ -816,6 +815,7 @@ lazy val mainSettingsProj = (project in file("main-settings")) commandProj, stdTaskProj, coreMacrosProj, + logicProj, utilLogging, utilCache, utilRelation, @@ -889,11 +889,27 @@ lazy val zincLmIntegrationProj = (project in file("zinc-lm-integration")) ) .configure(addSbtZincCompileCore, addSbtLmCore, addSbtLmIvyTest) +lazy val buildFileProj = (project in file("buildfile")) + .dependsOn( + mainSettingsProj, + ) + .settings( + testedBaseSettings, + name := "build file", + libraryDependencies ++= Seq(scalaCompiler), + ) + .configure( + addSbtIO, + addSbtLmCore, + addSbtLmIvy, + addSbtCompilerInterface, + addSbtZincCompile + ) + // The main integration project for sbt. It brings all of the projects together, configures them, and provides for overriding conventions. lazy val mainProj = (project in file("main")) .enablePlugins(ContrabandPlugin) .dependsOn( - logicProj, actionsProj, mainSettingsProj, runProj, @@ -925,128 +941,8 @@ lazy val mainProj = (project in file("main")) Test / testOptions += Tests .Argument(TestFrameworks.ScalaCheck, "-minSuccessfulTests", "1000"), SettingKey[Boolean]("usePipelining") := false, - mimaSettings, - mimaBinaryIssueFilters ++= Vector( - // New and changed methods on KeyIndex. internal. - exclude[ReversedMissingMethodProblem]("sbt.internal.KeyIndex.*"), - // internal - exclude[IncompatibleMethTypeProblem]("sbt.internal.*"), - // Changed signature or removed private[sbt] methods - exclude[DirectMissingMethodProblem]("sbt.Classpaths.unmanagedLibs0"), - exclude[DirectMissingMethodProblem]("sbt.Defaults.allTestGroupsTask"), - exclude[DirectMissingMethodProblem]("sbt.Plugins.topologicalSort"), - exclude[IncompatibleMethTypeProblem]("sbt.Defaults.allTestGroupsTask"), - exclude[DirectMissingMethodProblem]("sbt.StandardMain.shutdownHook"), - exclude[DirectMissingMethodProblem]("sbt.nio.Keys.compileBinaryFileInputs"), - exclude[DirectMissingMethodProblem]("sbt.nio.Keys.compileSourceFileInputs"), - exclude[MissingClassProblem]("sbt.internal.ResourceLoaderImpl"), - exclude[IncompatibleSignatureProblem]("sbt.internal.ConfigIndex.*"), - exclude[IncompatibleSignatureProblem]("sbt.internal.Inspect.*"), - exclude[IncompatibleSignatureProblem]("sbt.internal.ProjectIndex.*"), - exclude[IncompatibleSignatureProblem]("sbt.internal.BuildIndex.*"), - exclude[IncompatibleSignatureProblem]("sbt.internal.server.BuildServerReporter.*"), - exclude[VirtualStaticMemberProblem]("sbt.internal.server.LanguageServerProtocol.*"), - exclude[IncompatibleSignatureProblem]("sbt.internal.librarymanagement.IvyXml.*"), - exclude[IncompatibleSignatureProblem]("sbt.ScriptedPlugin.*Settings"), - exclude[IncompatibleSignatureProblem]("sbt.plugins.SbtPlugin.*Settings"), - // Removed private internal classes - exclude[MissingClassProblem]("sbt.internal.ReverseLookupClassLoaderHolder$BottomClassLoader"), - exclude[MissingClassProblem]( - "sbt.internal.ReverseLookupClassLoaderHolder$ReverseLookupClassLoader$ResourceLoader" - ), - exclude[MissingClassProblem]("sbt.internal.ReverseLookupClassLoaderHolder$ClassLoadingLock"), - exclude[MissingClassProblem]( - "sbt.internal.ReverseLookupClassLoaderHolder$ReverseLookupClassLoader" - ), - exclude[MissingClassProblem]("sbt.internal.LayeredClassLoaderImpl"), - exclude[MissingClassProblem]("sbt.internal.FileManagement"), - exclude[MissingClassProblem]("sbt.internal.FileManagement$"), - exclude[MissingClassProblem]("sbt.internal.FileManagement$CopiedFileTreeRepository"), - exclude[MissingClassProblem]("sbt.internal.server.LanguageServerReporter*"), - exclude[MissingClassProblem]("sbt.internal.ExternalHooks"), - exclude[MissingClassProblem]("sbt.internal.ExternalHooks$"), - // false positives - exclude[DirectMissingMethodProblem]("sbt.plugins.IvyPlugin.requires"), - exclude[DirectMissingMethodProblem]("sbt.plugins.JUnitXmlReportPlugin.requires"), - exclude[DirectMissingMethodProblem]("sbt.plugins.Giter8TemplatePlugin.requires"), - exclude[DirectMissingMethodProblem]("sbt.plugins.JvmPlugin.requires"), - exclude[DirectMissingMethodProblem]("sbt.plugins.SbtPlugin.requires"), - exclude[DirectMissingMethodProblem]("sbt.ResolvedClasspathDependency.apply"), - exclude[DirectMissingMethodProblem]("sbt.ClasspathDependency.apply"), - exclude[IncompatibleSignatureProblem]("sbt.plugins.SemanticdbPlugin.globalSettings"), - // File -> Source - exclude[DirectMissingMethodProblem]("sbt.Defaults.cleanFilesTask"), - exclude[IncompatibleSignatureProblem]("sbt.Defaults.resourceConfigPaths"), - exclude[IncompatibleSignatureProblem]("sbt.Defaults.sourceConfigPaths"), - exclude[IncompatibleSignatureProblem]("sbt.Defaults.configPaths"), - exclude[IncompatibleSignatureProblem]("sbt.Defaults.paths"), - exclude[IncompatibleSignatureProblem]("sbt.Keys.csrPublications"), - exclude[IncompatibleSignatureProblem]( - "sbt.coursierint.CoursierArtifactsTasks.coursierPublicationsTask" - ), - exclude[IncompatibleSignatureProblem]( - "sbt.coursierint.CoursierArtifactsTasks.coursierPublicationsTask" - ), - exclude[IncompatibleSignatureProblem]("sbt.coursierint.LMCoursier.coursierConfiguration"), - exclude[IncompatibleSignatureProblem]("sbt.coursierint.LMCoursier.publicationsSetting"), - exclude[IncompatibleSignatureProblem]("sbt.Project.inThisBuild"), - exclude[IncompatibleSignatureProblem]("sbt.Project.inConfig"), - exclude[IncompatibleSignatureProblem]("sbt.Project.inTask"), - exclude[IncompatibleSignatureProblem]("sbt.Project.inScope"), - exclude[IncompatibleSignatureProblem]("sbt.ProjectExtra.inThisBuild"), - exclude[IncompatibleSignatureProblem]("sbt.ProjectExtra.inConfig"), - exclude[IncompatibleSignatureProblem]("sbt.ProjectExtra.inTask"), - exclude[IncompatibleSignatureProblem]("sbt.ProjectExtra.inScope"), - exclude[MissingTypesProblem]("sbt.internal.Load*"), - exclude[IncompatibleSignatureProblem]("sbt.internal.Load*"), - exclude[MissingTypesProblem]("sbt.internal.server.NetworkChannel"), - // IvyConfiguration was replaced by InlineIvyConfiguration in the generic - // signature, this does not break compatibility regardless of what - // cast a compiler might have inserted based on the old signature - // since we're returning the same values as before. - exclude[IncompatibleSignatureProblem]("sbt.Classpaths.mkIvyConfiguration"), - exclude[IncompatibleMethTypeProblem]("sbt.internal.server.Definition*"), - exclude[IncompatibleTemplateDefProblem]("sbt.internal.server.LanguageServerProtocol"), - exclude[DirectMissingMethodProblem]("sbt.Classpaths.warnInsecureProtocol"), - exclude[DirectMissingMethodProblem]("sbt.Classpaths.warnInsecureProtocolInModules"), - exclude[MissingClassProblem]("sbt.internal.ExternalHooks*"), - // This seems to be a mima problem. The older constructor still exists but - // mima seems to incorrectly miss the secondary constructor that provides - // the binary compatible version. - exclude[IncompatibleMethTypeProblem]("sbt.internal.server.NetworkChannel.this"), - exclude[IncompatibleSignatureProblem]("sbt.internal.DeprecatedContinuous.taskDefinitions"), - exclude[MissingClassProblem]("sbt.internal.SettingsGraph*"), - // Tasks include non-Files, but it's ok - exclude[IncompatibleSignatureProblem]("sbt.Defaults.outputConfigPaths"), - // private[sbt] - exclude[DirectMissingMethodProblem]("sbt.Classpaths.trackedExportedProducts"), - exclude[DirectMissingMethodProblem]("sbt.Classpaths.trackedExportedJarProducts"), - exclude[DirectMissingMethodProblem]("sbt.Classpaths.unmanagedDependencies0"), - exclude[DirectMissingMethodProblem]("sbt.Classpaths.internalDependenciesImplTask"), - exclude[DirectMissingMethodProblem]("sbt.Classpaths.internalDependencyJarsImplTask"), - exclude[DirectMissingMethodProblem]("sbt.Classpaths.interDependencies"), - exclude[DirectMissingMethodProblem]("sbt.Classpaths.productsTask"), - exclude[DirectMissingMethodProblem]("sbt.Classpaths.jarProductsTask"), - exclude[DirectMissingMethodProblem]("sbt.StandardMain.cache"), - // internal logging apis, - exclude[IncompatibleSignatureProblem]("sbt.internal.LogManager*"), - exclude[MissingTypesProblem]("sbt.internal.RelayAppender"), - exclude[MissingClassProblem]("sbt.internal.TaskProgress$ProgressThread"), - // internal implementation - exclude[MissingClassProblem]( - "sbt.internal.XMainConfiguration$ModifiedConfiguration$ModifiedAppProvider$ModifiedScalaProvider$" - ), - // internal impl - exclude[IncompatibleSignatureProblem]("sbt.internal.Act.configIdent"), - exclude[IncompatibleSignatureProblem]("sbt.internal.Act.taskAxis"), - // private[sbt] method, used to call the correct sourcePositionMapper - exclude[DirectMissingMethodProblem]("sbt.Defaults.foldMappers"), - exclude[DirectMissingMethodProblem]("sbt.Defaults.toAbsoluteSourceMapper"), - exclude[DirectMissingMethodProblem]("sbt.Defaults.earlyArtifactPathSetting"), - exclude[MissingClassProblem]("sbt.internal.server.BuildServerReporter$"), - exclude[IncompatibleTemplateDefProblem]("sbt.internal.server.BuildServerReporter"), - exclude[MissingClassProblem]("sbt.internal.CustomHttp*"), - ) + // mimaSettings, + // mimaBinaryIssueFilters ++= Vector(), ) .configure( addSbtIO, @@ -1362,6 +1258,7 @@ def allProjects = sbtProj, bundledLauncherProj, sbtClientProj, + buildFileProj, ) ++ lowerUtilProjects // These need to be cross published to 2.12 and 2.13 for Zinc diff --git a/main/src/main/scala/sbt/internal/DslEntry.scala b/buildfile/src/main/scala/sbt/internal/DslEntry.scala similarity index 100% rename from main/src/main/scala/sbt/internal/DslEntry.scala rename to buildfile/src/main/scala/sbt/internal/DslEntry.scala diff --git a/buildfile/src/main/scala/sbt/internal/Eval.scala b/buildfile/src/main/scala/sbt/internal/Eval.scala new file mode 100644 index 000000000..93aff343f --- /dev/null +++ b/buildfile/src/main/scala/sbt/internal/Eval.scala @@ -0,0 +1,438 @@ +package sbt +package internal + +import dotty.tools.dotc.ast +import dotty.tools.dotc.ast.{ tpd, untpd } +import dotty.tools.dotc.CompilationUnit +import dotty.tools.dotc.config.ScalaSettings +import dotty.tools.dotc.core.Contexts.{ atPhase, Context } +import dotty.tools.dotc.core.{ Flags, Names, Phases, Symbols, Types } +import dotty.tools.dotc.Driver +import dotty.tools.dotc.parsing.Parsers.Parser +import dotty.tools.dotc.reporting.Reporter +import dotty.tools.dotc.Run +import dotty.tools.dotc.util.SourceFile +import dotty.tools.io.{ PlainDirectory, Directory, VirtualDirectory, VirtualFile } +import dotty.tools.repl.AbstractFileClassLoader +import java.net.URLClassLoader +import java.nio.charset.StandardCharsets +import java.nio.file.{ Files, Path, Paths, StandardOpenOption } +import java.security.MessageDigest +import scala.collection.JavaConverters.* +import scala.quoted.* +import sbt.io.Hash + +/** + * - nonCpOptions - non-classpath options + * - classpath - classpath used for evaluation + * - backingDir - directory to save `*.class` files + * - mkReporter - an optional factory method to create a reporter + */ +class Eval( + nonCpOptions: Seq[String], + classpath: Seq[Path], + backingDir: Option[Path], + mkReporter: Option[() => Reporter] +): + import Eval.* + + backingDir.foreach { dir => + Files.createDirectories(dir) + } + private val classpathString = classpath.map(_.toString).mkString(":") + private val outputDir = + backingDir match + case Some(dir) => PlainDirectory(Directory(dir.toString)) + case None => VirtualDirectory("output") + private lazy val driver: EvalDriver = new EvalDriver + private lazy val reporter = mkReporter match + case Some(fn) => fn() + case None => EvalReporter.store + + final class EvalDriver extends Driver: + import dotty.tools.dotc.config.Settings.Setting._ + val compileCtx0 = initCtx.fresh + val options = nonCpOptions ++ Seq("-classpath", classpathString, "dummy.scala") + val compileCtx1 = setup(options.toArray, compileCtx0) match + case Some((_, ctx)) => ctx + case _ => sys.error(s"initialization failed for $options") + val compileCtx2 = compileCtx1.fresh + .setSetting( + compileCtx1.settings.outputDir, + outputDir + ) + .setReporter(reporter) + val compileCtx = compileCtx2 + val compiler = newCompiler(using compileCtx) + end EvalDriver + + def eval(expression: String, tpeName: Option[String]): EvalResult = + eval(expression, noImports, tpeName, "", Eval.DefaultStartLine) + + def evalInfer(expression: String): EvalResult = + eval(expression, noImports, None, "", Eval.DefaultStartLine) + + def evalInfer(expression: String, imports: EvalImports): EvalResult = + eval(expression, imports, None, "", Eval.DefaultStartLine) + + def eval( + expression: String, + imports: EvalImports, + tpeName: Option[String], + srcName: String, + line: Int + ): EvalResult = + val ev = new EvalType[String]: + override def makeSource(moduleName: String): SourceFile = + val returnType = tpeName match + case Some(tpe) => s": $tpe" + case _ => "" + val header = + imports.strings.mkString("\n") + + s""" + |object $moduleName { + | def $WrapValName${returnType} = {""".stripMargin + val contents = s"""$header + |$expression + | } + |} + |""".stripMargin + val startLine = header.linesIterator.toList.size + EvalSourceFile(srcName, startLine, contents) + + override def extract(run: Run, unit: CompilationUnit)(using ctx: Context): String = + atPhase(Phases.typerPhase.next) { + (new TypeExtractor).getType(unit.tpdTree) + } + + override def read(file: Path): String = + String(Files.readAllBytes(file), StandardCharsets.UTF_8) + + override def write(value: String, file: Path): Unit = + Files.write( + file, + value.getBytes(StandardCharsets.UTF_8), + StandardOpenOption.CREATE, + StandardOpenOption.TRUNCATE_EXISTING + ) + + override def extraHash: String = "" + + val inter = evalCommon[String](expression :: Nil, imports, tpeName, ev) + val valueFn = (cl: ClassLoader) => getValue[Any](inter.enclosingModule, inter.loader(cl)) + EvalResult( + tpe = inter.extra, + getValue = valueFn, + generated = inter.generated, + ) + end eval + + def evalDefinitions( + definitions: Seq[(String, scala.Range)], + imports: EvalImports, + srcName: String, + valTypes: Seq[String], + ): EvalDefinitions = + evalDefinitions(definitions, imports, srcName, valTypes, "") + + def evalDefinitions( + definitions: Seq[(String, scala.Range)], + imports: EvalImports, + srcName: String, + valTypes: Seq[String], + extraHash: String, + ): EvalDefinitions = + require(definitions.nonEmpty, "definitions to evaluate cannot be empty.") + val extraHash0 = extraHash + val ev = new EvalType[Seq[String]]: + override def makeSource(moduleName: String): SourceFile = + val header = + imports.strings.mkString("\n") + + s""" + |object $moduleName {""".stripMargin + val contents = + s"""$header + |${definitions.map(_._1).mkString("\n")} + |} + |""".stripMargin + val startLine = header.linesIterator.toList.size + EvalSourceFile(srcName, startLine, contents) + + override def extract(run: Run, unit: CompilationUnit)(using ctx: Context): Seq[String] = + atPhase(Phases.typerPhase.next) { + (new ValExtractor(valTypes.toSet)).getVals(unit.tpdTree) + }(using run.runContext) + + override def read(file: Path): Seq[String] = + new String(Files.readAllBytes(file), StandardCharsets.UTF_8).linesIterator.toList + + override def write(value: Seq[String], file: Path): Unit = + Files.write( + file, + value.mkString("\n").getBytes(StandardCharsets.UTF_8), + StandardOpenOption.CREATE, + StandardOpenOption.TRUNCATE_EXISTING + ) + + override def extraHash: String = extraHash0 + + val inter = evalCommon[Seq[String]](definitions.map(_._1), imports, tpeName = Some(""), ev) + EvalDefinitions(inter.loader, inter.generated, inter.enclosingModule, inter.extra.reverse) + + end evalDefinitions + + private[this] def evalCommon[A]( + content: Seq[String], + imports: EvalImports, + tpeName: Option[String], + ev: EvalType[A], + ): EvalIntermediate[A] = + import Eval.* + // This is a hot path. + val digester = MessageDigest.getInstance("SHA") + content.foreach { c => + digester.update(bytes(c)) + } + tpeName.foreach { tpe => + digester.update(bytes(tpe)) + } + digester.update(bytes(ev.extraHash)) + val d = digester.digest() + val hash = Hash.toHex(d) + val moduleName = makeModuleName(hash) + val (extra, loader) = backingDir match + case Some(backing) if classExists(backing, moduleName) => + val loader = (parent: ClassLoader) => + (new URLClassLoader(Array(backing.toUri.toURL), parent): ClassLoader) + val extra = ev.read(cacheFile(backing, moduleName)) + (extra, loader) + case _ => compileAndLoad(ev, moduleName) + val generatedFiles = getGeneratedFiles(moduleName) + EvalIntermediate( + extra = extra, + loader = loader, + generated = generatedFiles, + enclosingModule = moduleName, + ) + + // location of the cached type or definition information + private[this] def cacheFile(base: Path, moduleName: String): Path = + base.resolve(moduleName + ".cache") + + private[this] def compileAndLoad[A]( + ev: EvalType[A], + moduleName: String, + ): (A, ClassLoader => ClassLoader) = + given rootCtx: Context = driver.compileCtx + val run = driver.compiler.newRun + val source = ev.makeSource(moduleName) + run.compileSources(source :: Nil) + checkError("an error in expression") + val unit = run.units.head + val extra: A = ev.extract(run, unit) + backingDir.foreach { backing => + ev.write(extra, cacheFile(backing, moduleName)) + } + val loader = (parent: ClassLoader) => AbstractFileClassLoader(outputDir, parent) + (extra, loader) + + private[this] final class EvalIntermediate[A]( + val extra: A, + val loader: ClassLoader => ClassLoader, + val generated: Seq[Path], + val enclosingModule: String, + ) + + private[this] def classExists(dir: Path, name: String): Boolean = + Files.exists(dir.resolve(s"$name.class")) + + private[this] def getGeneratedFiles(moduleName: String): Seq[Path] = + backingDir match + case Some(dir) => + asScala( + Files + .list(dir) + .filter(!Files.isDirectory(_)) + .filter(_.getFileName.toString.contains(moduleName)) + .iterator + ).toList + case None => Nil + + private[this] def makeModuleName(hash: String): String = "$Wrap" + hash.take(10) + + private[this] def checkError(label: String)(using ctx: Context): Unit = + if ctx.reporter.hasErrors then + throw new EvalException(label + ": " + ctx.reporter.allErrors.head.toString) + else () +end Eval + +object Eval: + private[sbt] val DefaultStartLine = 0 + + lazy val noImports = EvalImports(Nil) + + def apply(): Eval = + new Eval(Nil, currentClasspath, None, None) + + def apply(mkReporter: () => Reporter): Eval = + new Eval(Nil, currentClasspath, None, Some(mkReporter)) + + def apply( + backingDir: Path, + mkReporter: () => Reporter, + ): Eval = + new Eval(Nil, currentClasspath, Some(backingDir), Some(mkReporter)) + + def apply( + nonCpOptions: Seq[String], + backingDir: Path, + mkReporter: () => Reporter, + ): Eval = + new Eval(nonCpOptions, currentClasspath, Some(backingDir), Some(mkReporter)) + + inline def apply[A](expression: String): A = ${ evalImpl[A]('{ expression }) } + private def thisClassLoader = this.getClass.getClassLoader + def evalImpl[A: Type](expression: Expr[String])(using qctx: Quotes): Expr[A] = + import quotes.reflect._ + val sym = TypeRepr.of[A].typeSymbol + val fullName = Expr(sym.fullName) + '{ + Eval().eval($expression, Some($fullName)).getValue(thisClassLoader).asInstanceOf[A] + } + + def currentClasspath: Seq[Path] = + val cl = ClassLoader.getSystemClassLoader() + val urls = cl.asInstanceOf[URLClassLoader].getURLs().toList + urls.map(_.getFile).map(Paths.get(_)) + + def bytes(s: String): Array[Byte] = s.getBytes("UTF-8") + + /** The name of the synthetic val in the synthetic module that an expression is assigned to. */ + private[sbt] final val WrapValName = "$sbtdef" + + // used to map the position offset + class EvalSourceFile(name: String, startLine: Int, contents: String) + extends SourceFile( + new VirtualFile(name, contents.getBytes(StandardCharsets.UTF_8)), + scala.io.Codec.UTF8 + ): + override def lineToOffset(line: Int): Int = super.lineToOffset((line + startLine) max 0) + override def offsetToLine(offset: Int): Int = super.offsetToLine(offset) - startLine + end EvalSourceFile + + trait EvalType[A]: + def makeSource(moduleName: String): SourceFile + + /** Extracts additional information after the compilation unit is evaluated. */ + def extract(run: Run, unit: CompilationUnit)(using ctx: Context): A + + /** Deserializes the extra information for unchanged inputs from a cache file. */ + def read(file: Path): A + + /** + * Serializes the extra information to a cache file, where it can be `read` back if inputs + * haven't changed. + */ + def write(value: A, file: Path): Unit + + /** Extra information to include in the hash'd object name to help avoid collisions. */ + def extraHash: String + end EvalType + + class TypeExtractor extends tpd.TreeTraverser: + private[this] var result = "" + def getType(t: tpd.Tree)(using ctx: Context): String = + result = "" + this((), t) + result + override def traverse(tree: tpd.Tree)(using ctx: Context): Unit = + tree match + case tpd.DefDef(name, _, tpt, _) if name.toString == WrapValName => + result = tpt.typeOpt.show + case t: tpd.Template => this((), t.body) + case t: tpd.PackageDef => this((), t.stats) + case t: tpd.TypeDef => this((), t.rhs) + case _ => () + end TypeExtractor + + /** + * Tree traverser that obtains the names of vals in a top-level module whose type is a subtype of + * one of `types`. + */ + class ValExtractor(tpes: Set[String]) extends tpd.TreeTraverser: + private[this] var vals = List[String]() + + def getVals(t: tpd.Tree)(using ctx: Context): List[String] = + vals = Nil + traverse(t) + vals + + def isAcceptableType(tpe: Types.Type)(using ctx: Context): Boolean = + tpe.baseClasses.exists { sym => + tpes.contains(sym.fullName.toString) + } + + def isTopLevelModule(sym: Symbols.Symbol)(using ctx: Context): Boolean = + (sym is Flags.Module) && (sym.owner is Flags.ModuleClass) + + override def traverse(tree: tpd.Tree)(using ctx: Context): Unit = + tree match + case tpd.ValDef(name, tpt, _) + if isTopLevelModule(tree.symbol.owner) && isAcceptableType(tpt.tpe) => + vals ::= name.mangledString + case t: tpd.Template => this((), t.body) + case t: tpd.PackageDef => this((), t.stats) + case t: tpd.TypeDef => this((), t.rhs) + case _ => () + end ValExtractor + + /** + * Gets the value of the expression wrapped in module `objectName`, which is accessible via + * `loader`. The module name should not include the trailing `$`. + */ + def getValue[A](objectName: String, loader: ClassLoader): A = + val module = getModule(objectName, loader) + val accessor = module.getClass.getMethod(WrapValName) + val value = accessor.invoke(module) + value.asInstanceOf[A] + + /** + * Gets the top-level module `moduleName` from the provided class `loader`. The module name should + * not include the trailing `$`. + */ + def getModule(moduleName: String, loader: ClassLoader): Any = + val clazz = Class.forName(moduleName + "$", true, loader) + clazz.getField("MODULE$").get(null) +end Eval + +final class EvalResult( + val tpe: String, + val getValue: ClassLoader => Any, + val generated: Seq[Path], +) + +/** + * The result of evaluating a group of Scala definitions. The definitions are wrapped in an + * auto-generated, top-level module named `enclosingModule`. `generated` contains the compiled + * classes and cache files related to the definitions. A new class loader containing the module may + * be obtained from `loader` by passing the parent class loader providing the classes from the + * classpath that the definitions were compiled against. The list of vals with the requested types + * is `valNames`. The values for these may be obtained by providing the parent class loader to + * `values` as is done with `loader`. + */ +final class EvalDefinitions( + val loader: ClassLoader => ClassLoader, + val generated: Seq[Path], + val enclosingModule: String, + val valNames: Seq[String] +): + def values(parent: ClassLoader): Seq[Any] = { + val module = Eval.getModule(enclosingModule, loader(parent)) + for n <- valNames + yield module.getClass.getMethod(n).invoke(module) + } +end EvalDefinitions + +final class EvalException(msg: String) extends RuntimeException(msg) + +final class EvalImports(val strings: Seq[String]) diff --git a/buildfile/src/main/scala/sbt/internal/EvalReporter.scala b/buildfile/src/main/scala/sbt/internal/EvalReporter.scala new file mode 100644 index 000000000..d75890805 --- /dev/null +++ b/buildfile/src/main/scala/sbt/internal/EvalReporter.scala @@ -0,0 +1,19 @@ +package sbt +package internal + +import dotty.tools.dotc.core.Contexts.Context +import dotty.tools.dotc.reporting.ConsoleReporter +import dotty.tools.dotc.reporting.Diagnostic +import dotty.tools.dotc.reporting.Reporter +import dotty.tools.dotc.reporting.StoreReporter + +abstract class EvalReporter extends Reporter + +object EvalReporter: + def console: EvalReporter = ForwardingReporter(ConsoleReporter()) + def store: EvalReporter = ForwardingReporter(StoreReporter()) +end EvalReporter + +class ForwardingReporter(delegate: Reporter) extends EvalReporter: + def doReport(dia: Diagnostic)(using Context): Unit = delegate.doReport(dia) +end ForwardingReporter diff --git a/main/src/main/scala/sbt/internal/EvaluateConfigurations.scala b/buildfile/src/main/scala/sbt/internal/EvaluateConfigurations.scala similarity index 77% rename from main/src/main/scala/sbt/internal/EvaluateConfigurations.scala rename to buildfile/src/main/scala/sbt/internal/EvaluateConfigurations.scala index acc6d0ab5..889d68f5f 100644 --- a/main/src/main/scala/sbt/internal/EvaluateConfigurations.scala +++ b/buildfile/src/main/scala/sbt/internal/EvaluateConfigurations.scala @@ -18,15 +18,16 @@ import sbt.internal.util.{ } import java.io.File -import compiler.{ Eval, EvalImports } +import java.nio.file.Path import sbt.internal.util.complete.DefaultParsers.validID import Def.{ ScopedKey, Setting } -import Scope.GlobalScope -import sbt.SlashSyntax0._ +// import Scope.GlobalScope +// import sbt.SlashSyntax0._ import sbt.internal.parser.SbtParser - import sbt.io.IO import scala.collection.JavaConverters._ +import xsbti.VirtualFile +import xsbti.VirtualFileRef /** * This file is responsible for compiling the .sbt files used to configure sbt builds. @@ -39,9 +40,12 @@ import scala.collection.JavaConverters._ */ private[sbt] object EvaluateConfigurations { - type LazyClassLoaded[T] = ClassLoader => T + type LazyClassLoaded[A] = ClassLoader => A - private[sbt] case class TrackedEvalResult[T](generated: Seq[File], result: LazyClassLoaded[T]) + private[sbt] case class TrackedEvalResult[A]( + generated: Seq[Path], + result: LazyClassLoaded[A] + ) /** * This represents the parsed expressions in a build sbt, as well as where they were defined. @@ -61,9 +65,13 @@ private[sbt] object EvaluateConfigurations { * return a parsed, compiled + evaluated [[LoadedSbtFile]]. The result has * raw sbt-types that can be accessed and used. */ - def apply(eval: Eval, srcs: Seq[File], imports: Seq[String]): LazyClassLoaded[LoadedSbtFile] = { - val loadFiles = srcs.sortBy(_.getName) map { src => - evaluateSbtFile(eval, src, IO.readLines(src), imports, 0) + def apply( + eval: Eval, + srcs: Seq[VirtualFile], + imports: Seq[String], + ): LazyClassLoaded[LoadedSbtFile] = { + val loadFiles = srcs.sortBy(_.name) map { src => + evaluateSbtFile(eval, src, IO.readStream(src.input()).linesIterator.toList, imports, 0) } loader => loadFiles.foldLeft(LoadedSbtFile.empty) { (loaded, load) => @@ -78,10 +86,10 @@ private[sbt] object EvaluateConfigurations { */ def evaluateConfiguration( eval: Eval, - src: File, + src: VirtualFile, imports: Seq[String] ): LazyClassLoaded[Seq[Setting[_]]] = - evaluateConfiguration(eval, src, IO.readLines(src), imports, 0) + evaluateConfiguration(eval, src, IO.readStream(src.input()).linesIterator.toList, imports, 0) /** * Parses a sequence of build.sbt lines into a [[ParsedFile]]. The result contains @@ -90,7 +98,7 @@ private[sbt] object EvaluateConfigurations { * @param builtinImports The set of import statements to add to those parsed in the .sbt file. */ private[this] def parseConfiguration( - file: File, + file: VirtualFileRef, lines: Seq[String], builtinImports: Seq[String], offset: Int @@ -115,7 +123,7 @@ private[sbt] object EvaluateConfigurations { */ def evaluateConfiguration( eval: Eval, - file: File, + file: VirtualFileRef, lines: Seq[String], imports: Seq[String], offset: Int @@ -136,36 +144,40 @@ private[sbt] object EvaluateConfigurations { */ private[sbt] def evaluateSbtFile( eval: Eval, - file: File, + file: VirtualFileRef, lines: Seq[String], imports: Seq[String], offset: Int ): LazyClassLoaded[LoadedSbtFile] = { // TODO - Store the file on the LoadedSbtFile (or the parent dir) so we can accurately do // detection for which project project manipulations should be applied. - val name = file.getPath + val name = file.id val parsed = parseConfiguration(file, lines, imports, offset) val (importDefs, definitions) = if (parsed.definitions.isEmpty) (Nil, DefinedSbtValues.empty) else { val definitions = evaluateDefinitions(eval, name, parsed.imports, parsed.definitions, Some(file)) - val imp = BuildUtil.importAllRoot(definitions.enclosingModule :: Nil) + val imp = BuildUtilLite.importAllRoot(definitions.enclosingModule :: Nil) (imp, DefinedSbtValues(definitions)) } val allImports = importDefs.map(s => (s, -1)) ++ parsed.imports val dslEntries = parsed.settings map { case (dslExpression, range) => evaluateDslEntry(eval, name, allImports, dslExpression, range) } - eval.unlinkDeferred() + + // TODO: + // eval.unlinkDeferred() + // Tracks all the files we generated from evaluating the sbt file. - val allGeneratedFiles = (definitions.generated ++ dslEntries.flatMap(_.generated)) + val allGeneratedFiles: Seq[Path] = (definitions.generated ++ dslEntries.flatMap(_.generated)) loader => { val projects = { val compositeProjects = definitions.values(loader).collect { case p: CompositeProject => p } - CompositeProject.expand(compositeProjects).map(resolveBase(file.getParentFile, _)) + // todo: resolveBase? + CompositeProject.expand(compositeProjects) // .map(resolveBase(file.getParentFile, _)) } val (settingsRaw, manipulationsRaw) = dslEntries map (_.result apply loader) partition { @@ -192,7 +204,8 @@ private[sbt] object EvaluateConfigurations { } /** move a project to be relative to this file after we've evaluated it. */ - private[this] def resolveBase(f: File, p: Project) = p.copy(base = IO.resolve(f, p.base)) + private[this] def resolveBase(f: File, p: Project) = + p.copy(base = IO.resolve(f, p.base)) def addOffset(offset: Int, lines: Seq[(String, Int)]): Seq[(String, Int)] = lines.map { case (s, i) => (s, i + offset) } @@ -234,13 +247,13 @@ private[sbt] object EvaluateConfigurations { try { eval.eval( expression, - imports = new EvalImports(imports, name), + imports = new EvalImports(imports.map(_._1)), // name srcName = name, tpeName = Some(SettingsDefinitionName), line = range.start ) } catch { - case e: sbt.compiler.EvalException => throw new MessageOnlyException(e.getMessage) + case e: EvalException => throw new MessageOnlyException(e.getMessage) } // TODO - keep track of configuration classes defined. TrackedEvalResult( @@ -284,14 +297,13 @@ private[sbt] object EvaluateConfigurations { * anything on the right of the tuple is a scala expression (definition or setting). */ private[sbt] def splitExpressions( - file: File, + file: VirtualFileRef, lines: Seq[String] - ): (Seq[(String, Int)], Seq[(String, LineRange)]) = { + ): (Seq[(String, Int)], Seq[(String, LineRange)]) = val split = SbtParser(file, lines) // TODO - Look at pulling the parsed expression trees from the SbtParser and stitch them back into a different // scala compiler rather than re-parsing. (split.imports, split.settings) - } private[this] def splitSettingsDefinitions( lines: Seq[(String, LineRange)] @@ -316,19 +328,33 @@ private[sbt] object EvaluateConfigurations { name: String, imports: Seq[(String, Int)], definitions: Seq[(String, LineRange)], - file: Option[File] - ): compiler.EvalDefinitions = { + file: Option[VirtualFileRef], + ): EvalDefinitions = { val convertedRanges = definitions.map { case (s, r) => (s, r.start to r.end) } eval.evalDefinitions( convertedRanges, - new EvalImports(imports, name), + new EvalImports(imports.map(_._1)), // name name, - file, + // file, extractedValTypes ) } } +object BuildUtilLite: + /** Import just the names. */ + def importNames(names: Seq[String]): Seq[String] = + if (names.isEmpty) Nil else names.mkString("import ", ", ", "") :: Nil + + /** Prepend `_root_` and import just the names. */ + def importNamesRoot(names: Seq[String]): Seq[String] = importNames(names map rootedName) + + /** Wildcard import `._` for all values. */ + def importAll(values: Seq[String]): Seq[String] = importNames(values map { _ + "._" }) + def importAllRoot(values: Seq[String]): Seq[String] = importAll(values map rootedName) + def rootedName(s: String): String = if (s contains '.') "_root_." + s else s +end BuildUtilLite + object Index { def taskToKeyMap(data: Settings[Scope]): Map[Task[_], ScopedKey[Task[_]]] = { @@ -360,39 +386,41 @@ object Index { private[this] def stringToKeyMap0( settings: Set[AttributeKey[_]] )(label: AttributeKey[_] => String): Map[String, AttributeKey[_]] = { - val multiMap = settings.groupBy(label) - val duplicates = multiMap.iterator - .collect { case (k, xs) if xs.size > 1 => (k, xs.map(_.manifest)) } - .collect { - case (k, xs) if xs.size > 1 => (k, xs) - } - .toVector - if (duplicates.isEmpty) - multiMap.collect { case (k, v) if validID(k) => (k, v.head) } toMap - else - sys.error( - duplicates map { case (k, tps) => - "'" + k + "' (" + tps.mkString(", ") + ")" - } mkString ("Some keys were defined with the same name but different types: ", ", ", "") - ) + // val multiMap = settings.groupBy(label) + // val duplicates = multiMap.iterator + // .collect { case (k, xs) if xs.size > 1 => (k, xs.map(_.manifest)) } + // .collect { + // case (k, xs) if xs.size > 1 => (k, xs) + // } + // .toVector + // if (duplicates.isEmpty) + // multiMap.collect { case (k, v) if validID(k) => (k, v.head) } toMap + // else + // sys.error( + // duplicates map { case (k, tps) => + // "'" + k + "' (" + tps.mkString(", ") + ")" + // } mkString ("Some keys were defined with the same name but different types: ", ", ", "") + // ) + ??? } private[this] type TriggerMap = collection.mutable.HashMap[Task[_], Seq[Task[_]]] def triggers(ss: Settings[Scope]): Triggers[Task] = { - val runBefore = new TriggerMap - val triggeredBy = new TriggerMap - ss.data.values foreach ( - _.entries foreach { - case AttributeEntry(_, value: Task[_]) => - val as = value.info.attributes - update(runBefore, value, as get Keys.runBefore) - update(triggeredBy, value, as get Keys.triggeredBy) - case _ => () - } - ) - val onComplete = (GlobalScope / Keys.onComplete) get ss getOrElse (() => ()) - new Triggers[Task](runBefore, triggeredBy, map => { onComplete(); map }) + // val runBefore = new TriggerMap + // val triggeredBy = new TriggerMap + // ss.data.values foreach ( + // _.entries foreach { + // case AttributeEntry(_, value: Task[_]) => + // val as = value.info.attributes + // update(runBefore, value, as get Keys.runBefore) + // update(triggeredBy, value, as get Keys.triggeredBy) + // case _ => () + // } + // ) + // val onComplete = (GlobalScope / Keys.onComplete) get ss getOrElse (() => ()) + // new Triggers[Task](runBefore, triggeredBy, map => { onComplete(); map }) + ??? } private[this] def update(map: TriggerMap, base: Task[_], tasksOpt: Option[Seq[Task[_]]]): Unit = diff --git a/main/src/main/scala/sbt/internal/LoadedSbtFile.scala b/buildfile/src/main/scala/sbt/internal/LoadedSbtFile.scala similarity index 88% rename from main/src/main/scala/sbt/internal/LoadedSbtFile.scala rename to buildfile/src/main/scala/sbt/internal/LoadedSbtFile.scala index 7719c75c0..6c5fdda66 100644 --- a/main/src/main/scala/sbt/internal/LoadedSbtFile.scala +++ b/buildfile/src/main/scala/sbt/internal/LoadedSbtFile.scala @@ -10,6 +10,7 @@ package internal import Def.Setting import java.io.File +import java.nio.file.Path /** * Represents the exported contents of a .sbt file. Currently, that includes the list of settings, @@ -23,7 +24,7 @@ private[sbt] final class LoadedSbtFile( // TODO - we may want to expose a simpler interface on top of here for the set command, // rather than what we have now... val definitions: DefinedSbtValues, - val generatedFiles: Seq[File] + val generatedFiles: Seq[Path] ) { // We still use merge for now. We track originating sbt file in an alternative manner. def merge(o: LoadedSbtFile): LoadedSbtFile = @@ -44,7 +45,7 @@ private[sbt] final class LoadedSbtFile( * Represents the `val`/`lazy val` definitions defined within a build.sbt file * which we can reference in other settings. */ -private[sbt] final class DefinedSbtValues(val sbtFiles: Seq[compiler.EvalDefinitions]) { +private[sbt] final class DefinedSbtValues(val sbtFiles: Seq[EvalDefinitions]) { def values(parent: ClassLoader): Seq[Any] = sbtFiles flatMap (_ values parent) @@ -63,12 +64,12 @@ private[sbt] final class DefinedSbtValues(val sbtFiles: Seq[compiler.EvalDefinit v <- file.valNames } yield s"import ${m}.`${v}`" } - def generated: Seq[File] = - sbtFiles flatMap (_.generated) + def generated: Seq[Path] = + sbtFiles.flatMap(_.generated) // Returns a classpath for the generated .sbt files. - def classpath: Seq[File] = - generated.map(_.getParentFile).distinct + def classpath: Seq[Path] = + generated.map(_.getParent()).distinct /** * Joins the defines of this build.sbt with another. @@ -81,7 +82,7 @@ private[sbt] final class DefinedSbtValues(val sbtFiles: Seq[compiler.EvalDefinit private[sbt] object DefinedSbtValues { /** Construct a DefinedSbtValues object directly from the underlying representation. */ - def apply(eval: compiler.EvalDefinitions): DefinedSbtValues = + def apply(eval: EvalDefinitions): DefinedSbtValues = new DefinedSbtValues(Seq(eval)) /** Construct an empty value object. */ diff --git a/buildfile/src/main/scala/sbt/internal/parser/SbtParser.scala b/buildfile/src/main/scala/sbt/internal/parser/SbtParser.scala new file mode 100644 index 000000000..e78e6623c --- /dev/null +++ b/buildfile/src/main/scala/sbt/internal/parser/SbtParser.scala @@ -0,0 +1,304 @@ +/* + * sbt + * Copyright 2011 - 2018, Lightbend, Inc. + * Copyright 2008 - 2010, Mark Harrah + * Licensed under Apache License 2.0 (see LICENSE) + */ + +package sbt +package internal +package parser + +import sbt.internal.util.{ LineRange, MessageOnlyException } +import java.io.File +import java.nio.charset.StandardCharsets +import java.util.concurrent.ConcurrentHashMap +import sbt.internal.parser.SbtParser._ +import scala.compat.Platform.EOL +import dotty.tools.dotc.ast.Trees.Lazy +import dotty.tools.dotc.ast.untpd +import dotty.tools.dotc.ast.untpd.Tree +import dotty.tools.dotc.CompilationUnit +import dotty.tools.dotc.core.Contexts.Context +import dotty.tools.dotc.Driver +import dotty.tools.dotc.util.NoSourcePosition +import dotty.tools.dotc.util.SourceFile +import dotty.tools.dotc.util.SourcePosition +import dotty.tools.io.VirtualDirectory +import dotty.tools.io.VirtualFile +import dotty.tools.dotc.parsing.* +import dotty.tools.dotc.reporting.ConsoleReporter +import dotty.tools.dotc.reporting.Diagnostic +import dotty.tools.dotc.reporting.Reporter +import dotty.tools.dotc.reporting.StoreReporter +import scala.util.Random +import scala.util.{ Failure, Success } +import xsbti.VirtualFileRef +import dotty.tools.dotc.printing.Printer +import dotty.tools.dotc.config.Printers + +private[sbt] object SbtParser: + val END_OF_LINE_CHAR = '\n' + val END_OF_LINE = String.valueOf(END_OF_LINE_CHAR) + private[parser] val NOT_FOUND_INDEX = -1 + private[sbt] val FAKE_FILE = VirtualFileRef.of("fake") // new File("fake") + private[parser] val XML_ERROR = "';' expected but 'val' found." + + private val XmlErrorMessage = + """Probably problem with parsing xml group, please add parens or semicolons: + |Replace: + |val xmlGroup = + |with: + |val xmlGroup = () + |or + |val xmlGroup = ; + """.stripMargin + + private final val defaultClasspath = + sbt.io.Path.makeString(sbt.io.IO.classLocationPath[Product].toFile :: Nil) + + /** + * Provides the previous error reporting functionality in + * [[scala.tools.reflect.ToolBox]]. + * + * This parser is a wrapper around a collection of reporters that are + * indexed by a unique key. This is used to ensure that the reports of + * one parser don't collide with other ones in concurrent settings. + * + * This parser is a sign that this whole parser should be rewritten. + * There are exceptions everywhere and the logic to work around + * the scalac parser bug heavily relies on them and it's tied + * to the test suite. Ideally, we only want to throw exceptions + * when we know for a fact that the user-provided snippet doesn't + * parse. + */ + private[sbt] class UniqueParserReporter extends Reporter { + + private val reporters = new ConcurrentHashMap[String, StoreReporter]() + + override def doReport(dia: Diagnostic)(using Context): Unit = + import scala.jdk.OptionConverters.* + val sourcePath = dia.position.asScala.getOrElse(sys.error("missing position")).source.path + val reporter = getReporter(sourcePath) + reporter.doReport(dia) + override def report(dia: Diagnostic)(using Context): Unit = + import scala.jdk.OptionConverters.* + val sourcePath = dia.position.asScala.getOrElse(sys.error("missing position")).source.path + val reporter = getReporter(sourcePath) + reporter.report(dia) + + override def hasErrors: Boolean = { + var result = false + reporters.forEachValue(100, r => if (r.hasErrors) result = true) + result + } + + def createReporter(uniqueFileName: String): StoreReporter = + val r = new StoreReporter(null) + reporters.put(uniqueFileName, r) + r + + def getOrCreateReporter(uniqueFileName: String): StoreReporter = { + val r = reporters.get(uniqueFileName) + if (r == null) createReporter(uniqueFileName) + else r + } + + private def getReporter(fileName: String) = { + val reporter = reporters.get(fileName) + if (reporter == null) { + scalacGlobalInitReporter.getOrElse( + sys.error(s"sbt forgot to initialize `scalacGlobalInitReporter`.") + ) + } else reporter + } + + def throwParserErrorsIfAny(reporter: StoreReporter, fileName: String)(using + context: Context + ): Unit = + if reporter.hasErrors then { + val seq = reporter.pendingMessages.map { info => + s"""[$fileName]:${info.pos.line}: ${info.msg}""" + } + val errorMessage = seq.mkString(EOL) + val error: String = + if (errorMessage.contains(XML_ERROR)) + s"$errorMessage\n${SbtParser.XmlErrorMessage}" + else errorMessage + throw new MessageOnlyException(error) + } else () + } + + private[sbt] var scalacGlobalInitReporter: Option[ConsoleReporter] = None + + private[sbt] val globalReporter = UniqueParserReporter() + private[sbt] val defaultGlobalForParser = ParseDriver() + private[sbt] final class ParseDriver extends Driver: + import dotty.tools.dotc.config.Settings.Setting._ + val compileCtx0 = initCtx.fresh + val options = List("-classpath", s"$defaultClasspath", "dummy.scala") + val compileCtx1 = setup(options.toArray, compileCtx0) match + case Some((_, ctx)) => ctx + case _ => sys.error(s"initialization failed for $options") + val outputDir = VirtualDirectory("output") + val compileCtx2 = compileCtx1.fresh + .setSetting( + compileCtx1.settings.outputDir, + outputDir + ) + .setReporter(globalReporter) + val compileCtx = compileCtx2 + val compiler = newCompiler(using compileCtx) + end ParseDriver + + /** + * Parse code reusing the same [[Run]] instance. + * + * @param code The code to be parsed. + * @param filePath The file name where the code comes from. + * @param reporterId0 The reporter id is the key used to get the pertinent + * reporter. Given that the parsing reuses a global + * instance, this reporter id makes sure that every parsing + * session gets its own errors in a concurrent setting. + * The reporter id must be unique per parsing session. + * @return + */ + private[sbt] def parse( + code: String, + filePath: String, + reporterId0: Option[String] + ): (List[untpd.Tree], String, SourceFile) = + import defaultGlobalForParser.* + given ctx: Context = compileCtx + val reporterId = reporterId0.getOrElse(s"$filePath-${Random.nextInt}") + val reporter = globalReporter.getOrCreateReporter(reporterId) + reporter.removeBufferedMessages + val moduleName = "SyntheticModule" + val wrapCode = s"""object $moduleName { + |$code + |}""".stripMargin + val wrapperFile = SourceFile( + VirtualFile(reporterId, wrapCode.getBytes(StandardCharsets.UTF_8)), + scala.io.Codec.UTF8 + ) + val parser = Parsers.Parser(wrapperFile) + val t = parser.parse() + val parsedTrees = t match + case untpd.PackageDef(_, List(untpd.ModuleDef(_, untpd.Template(_, _, _, trees)))) => + trees match + case ts: List[untpd.Tree] => ts + case ts: Lazy[List[untpd.Tree]] => ts.complete + globalReporter.throwParserErrorsIfAny(reporter, filePath) + (parsedTrees, reporterId, wrapperFile) +end SbtParser + +private class SbtParserInit { + new Thread("sbt-parser-init-thread") { + setDaemon(true) + start() + override def run(): Unit = { + val _ = SbtParser.defaultGlobalForParser + } + } +} + +/** + * This method solely exists to add scaladoc to members in SbtParser which + * are defined using pattern matching. + */ +sealed trait ParsedSbtFileExpressions: + /** The set of parsed import expressions. */ + def imports: Seq[(String, Int)] + + /** The set of parsed definitions and/or sbt build settings. */ + def settings: Seq[(String, LineRange)] + + /** The set of scala tree's for parsed definitions/settings and the underlying string representation.. */ + def settingsTrees: Seq[(String, Tree)] +end ParsedSbtFileExpressions + +/** + * An initial parser/splitter of .sbt files. + * + * This class is responsible for chunking a `.sbt` file into expression ranges + * which we can then compile using the Scala compiler. + * + * Example: + * + * {{{ + * val parser = SbtParser(myFile, IO.readLines(myFile)) + * // All import statements + * val imports = parser.imports + * // All other statements (val x =, or raw settings) + * val settings = parser.settings + * }}} + * + * @param file The file we're parsing (may be a dummy file) + * @param lines The parsed "lines" of the file, where each string is a line. + */ +private[sbt] case class SbtParser(path: VirtualFileRef, lines: Seq[String]) + extends ParsedSbtFileExpressions: + // settingsTrees,modifiedContent needed for "session save" + // TODO - We should look into splitting out "definitions" vs. "settings" here instead of further string lookups, since we have the + // parsed trees. + val (imports, settings, settingsTrees) = splitExpressions(path, lines) + + import SbtParser.defaultGlobalForParser.* + + private def splitExpressions( + path: VirtualFileRef, + lines: Seq[String] + ): (Seq[(String, Int)], Seq[(String, LineRange)], Seq[(String, Tree)]) = { + // import sbt.internal.parser.MissingBracketHandler.findMissingText + val indexedLines = lines.toIndexedSeq + val content = indexedLines.mkString(END_OF_LINE) + val fileName = path.id + val (parsedTrees, reporterId, sourceFile) = parse(content, fileName, None) + given ctx: Context = compileCtx + + val (imports: Seq[untpd.Tree], statements: Seq[untpd.Tree]) = + parsedTrees.partition { + case _: untpd.Import => true + case _ => false + } + + def convertStatement(tree: untpd.Tree)(using ctx: Context): Option[(String, Tree, LineRange)] = + if tree.span.exists then + // not sure why I need to reconstruct the position myself + val pos = SourcePosition(sourceFile, tree.span) + val statement = String(pos.linesSlice).trim() + val lines = pos.lines + val wrapperLineOffset = 0 + Some( + ( + statement, + tree, + LineRange(lines.start + wrapperLineOffset, lines.end + wrapperLineOffset) + ) + ) + else None + val stmtTreeLineRange = statements.flatMap(convertStatement) + val importsLineRange = importsToLineRanges(sourceFile, imports) + ( + importsLineRange, + stmtTreeLineRange.map { case (stmt, _, lr) => + (stmt, lr) + }, + stmtTreeLineRange.map { case (stmt, tree, _) => + (stmt, tree) + } + ) + } + + private def importsToLineRanges( + sourceFile: SourceFile, + imports: Seq[Tree] + )(using context: Context): Seq[(String, Int)] = + imports.map { tree => + // not sure why I need to reconstruct the position myself + val pos = SourcePosition(sourceFile, tree.span) + val content = String(pos.linesSlice).trim() + val wrapperLineOffset = 0 + (content, pos.line + wrapperLineOffset) + } +end SbtParser diff --git a/main/src/main/scala/sbt/internal/parser/SbtRefactorings.scala b/buildfile/src/main/scala/sbt/internal/parser/SbtRefactorings.scala similarity index 53% rename from main/src/main/scala/sbt/internal/parser/SbtRefactorings.scala rename to buildfile/src/main/scala/sbt/internal/parser/SbtRefactorings.scala index 647e5d012..42758c57d 100644 --- a/main/src/main/scala/sbt/internal/parser/SbtRefactorings.scala +++ b/buildfile/src/main/scala/sbt/internal/parser/SbtRefactorings.scala @@ -9,6 +9,8 @@ package sbt package internal package parser +/* + private[sbt] object SbtRefactorings { import sbt.internal.parser.SbtParser.{ END_OF_LINE, FAKE_FILE } @@ -18,14 +20,14 @@ private[sbt] object SbtRefactorings { val reverseOrderingInt = Ordering[Int].reverse /** - * Refactoring a `.sbt` file so that the new settings are used instead of any existing settings. - * @param configFile SbtConfigFile with the lines of an sbt file as a List[String] where each string is one line - * @param commands A List of settings (space separate) that should be inserted into the current file. - * If the settings replaces a value, it will replace the original line in the .sbt file. - * If in the `.sbt` file we have multiply value for one settings - - * the first will be replaced and the other will be removed. - * @return a SbtConfigFile with new lines which represent the contents of the refactored .sbt file. - */ + * Refactoring a `.sbt` file so that the new settings are used instead of any existing settings. + * @param configFile SbtConfigFile with the lines of an sbt file as a List[String] where each string is one line + * @param commands A List of settings (space separate) that should be inserted into the current file. + * If the settings replaces a value, it will replace the original line in the .sbt file. + * If in the `.sbt` file we have multiply value for one settings - + * the first will be replaced and the other will be removed. + * @return a SbtConfigFile with new lines which represent the contents of the refactored .sbt file. + */ def applySessionSettings( configFile: SbtConfigFile, commands: Seq[SessionSetting] @@ -43,12 +45,11 @@ private[sbt] object SbtRefactorings { modifiedContent: String, sortedRecordedCommands: Seq[(Int, String, String)] ) = { - sortedRecordedCommands.foldLeft(modifiedContent) { - case (acc, (from, old, replacement)) => - val before = acc.substring(0, from) - val after = acc.substring(from + old.length, acc.length) - val afterLast = emptyStringForEmptyString(after) - before + replacement + afterLast + sortedRecordedCommands.foldLeft(modifiedContent) { case (acc, (from, old, replacement)) => + val before = acc.substring(0, from) + val after = acc.substring(from + old.length, acc.length) + val afterLast = emptyStringForEmptyString(after) + before + replacement + afterLast } } @@ -58,32 +59,29 @@ private[sbt] object SbtRefactorings { } private def recordCommands(commands: Seq[SessionSetting], split: SbtParser) = - commands.flatMap { - case (_, command) => - val map = toTreeStringMap(command) - map.flatMap { case (name, _) => treesToReplacements(split, name, command) } + commands.flatMap { case (_, command) => + val map = toTreeStringMap(command) + map.flatMap { case (name, _) => treesToReplacements(split, name, command) } } private def treesToReplacements(split: SbtParser, name: String, command: Seq[String]) = - split.settingsTrees.foldLeft(Seq.empty[(Int, String, String)]) { - case (acc, (st, tree)) => - val treeName = extractSettingName(tree) - if (name == treeName) { - val replacement = - if (acc.isEmpty) command.mkString(END_OF_LINE) - else emptyString - (tree.pos.start, st, replacement) +: acc - } else { - acc - } + split.settingsTrees.foldLeft(Seq.empty[(Int, String, String)]) { case (acc, (st, tree)) => + val treeName = extractSettingName(tree) + if (name == treeName) { + val replacement = + if (acc.isEmpty) command.mkString(END_OF_LINE) + else emptyString + (tree.pos.start, st, replacement) +: acc + } else { + acc + } } private def toTreeStringMap(command: Seq[String]) = { val split = SbtParser(FAKE_FILE, command) val trees = split.settingsTrees - val seq = trees.map { - case (statement, tree) => - (extractSettingName(tree), statement) + val seq = trees.map { case (statement, tree) => + (extractSettingName(tree), statement) } seq.toMap } @@ -98,3 +96,4 @@ private[sbt] object SbtRefactorings { } } + */ diff --git a/main/src/test/resources/error-format/1.sbt.txt b/buildfile/src/test/resources/error-format/1.sbt.txt similarity index 100% rename from main/src/test/resources/error-format/1.sbt.txt rename to buildfile/src/test/resources/error-format/1.sbt.txt diff --git a/main/src/test/resources/error-format/2.sbt.txt b/buildfile/src/test/resources/error-format/2.sbt.txt similarity index 100% rename from main/src/test/resources/error-format/2.sbt.txt rename to buildfile/src/test/resources/error-format/2.sbt.txt diff --git a/main/src/test/resources/error-format/3.sbt.txt b/buildfile/src/test/resources/error-format/3.sbt.txt similarity index 100% rename from main/src/test/resources/error-format/3.sbt.txt rename to buildfile/src/test/resources/error-format/3.sbt.txt diff --git a/main/src/test/resources/error-format/4.sbt.txt b/buildfile/src/test/resources/error-format/4.sbt.txt similarity index 100% rename from main/src/test/resources/error-format/4.sbt.txt rename to buildfile/src/test/resources/error-format/4.sbt.txt diff --git a/main/src/test/resources/new-format/1.sbt.txt b/buildfile/src/test/resources/new-format/1.sbt.txt similarity index 100% rename from main/src/test/resources/new-format/1.sbt.txt rename to buildfile/src/test/resources/new-format/1.sbt.txt diff --git a/main/src/test/resources/new-format/2.sbt.txt b/buildfile/src/test/resources/new-format/2.sbt.txt similarity index 100% rename from main/src/test/resources/new-format/2.sbt.txt rename to buildfile/src/test/resources/new-format/2.sbt.txt diff --git a/main/src/test/resources/new-format/3.sbt.txt b/buildfile/src/test/resources/new-format/3.sbt.txt similarity index 100% rename from main/src/test/resources/new-format/3.sbt.txt rename to buildfile/src/test/resources/new-format/3.sbt.txt diff --git a/main/src/test/resources/session-settings-quick/3.sbt.txt b/buildfile/src/test/resources/session-settings-quick/3.sbt.txt similarity index 100% rename from main/src/test/resources/session-settings-quick/3.sbt.txt rename to buildfile/src/test/resources/session-settings-quick/3.sbt.txt diff --git a/main/src/test/resources/session-settings-quick/3.sbt.txt_1/1.set b/buildfile/src/test/resources/session-settings-quick/3.sbt.txt_1/1.set similarity index 100% rename from main/src/test/resources/session-settings-quick/3.sbt.txt_1/1.set rename to buildfile/src/test/resources/session-settings-quick/3.sbt.txt_1/1.set diff --git a/main/src/test/resources/session-settings-quick/3.sbt.txt_1/1.set.result b/buildfile/src/test/resources/session-settings-quick/3.sbt.txt_1/1.set.result similarity index 100% rename from main/src/test/resources/session-settings-quick/3.sbt.txt_1/1.set.result rename to buildfile/src/test/resources/session-settings-quick/3.sbt.txt_1/1.set.result diff --git a/main/src/test/resources/session-settings/1.sbt.txt b/buildfile/src/test/resources/session-settings/1.sbt.txt similarity index 100% rename from main/src/test/resources/session-settings/1.sbt.txt rename to buildfile/src/test/resources/session-settings/1.sbt.txt diff --git a/main/src/test/resources/session-settings/1.sbt.txt_1/1.set b/buildfile/src/test/resources/session-settings/1.sbt.txt_1/1.set similarity index 100% rename from main/src/test/resources/session-settings/1.sbt.txt_1/1.set rename to buildfile/src/test/resources/session-settings/1.sbt.txt_1/1.set diff --git a/main/src/test/resources/session-settings/1.sbt.txt_1/1.set.result b/buildfile/src/test/resources/session-settings/1.sbt.txt_1/1.set.result similarity index 100% rename from main/src/test/resources/session-settings/1.sbt.txt_1/1.set.result rename to buildfile/src/test/resources/session-settings/1.sbt.txt_1/1.set.result diff --git a/main/src/test/resources/session-settings/1.sbt.txt_1/2.set b/buildfile/src/test/resources/session-settings/1.sbt.txt_1/2.set similarity index 100% rename from main/src/test/resources/session-settings/1.sbt.txt_1/2.set rename to buildfile/src/test/resources/session-settings/1.sbt.txt_1/2.set diff --git a/main/src/test/resources/session-settings/1.sbt.txt_1/2.set.result b/buildfile/src/test/resources/session-settings/1.sbt.txt_1/2.set.result similarity index 100% rename from main/src/test/resources/session-settings/1.sbt.txt_1/2.set.result rename to buildfile/src/test/resources/session-settings/1.sbt.txt_1/2.set.result diff --git a/main/src/test/resources/session-settings/1.sbt.txt_1/3.set b/buildfile/src/test/resources/session-settings/1.sbt.txt_1/3.set similarity index 100% rename from main/src/test/resources/session-settings/1.sbt.txt_1/3.set rename to buildfile/src/test/resources/session-settings/1.sbt.txt_1/3.set diff --git a/main/src/test/resources/session-settings/1.sbt.txt_1/3.set.result b/buildfile/src/test/resources/session-settings/1.sbt.txt_1/3.set.result similarity index 100% rename from main/src/test/resources/session-settings/1.sbt.txt_1/3.set.result rename to buildfile/src/test/resources/session-settings/1.sbt.txt_1/3.set.result diff --git a/main/src/test/resources/session-settings/2.sbt.txt b/buildfile/src/test/resources/session-settings/2.sbt.txt similarity index 100% rename from main/src/test/resources/session-settings/2.sbt.txt rename to buildfile/src/test/resources/session-settings/2.sbt.txt diff --git a/main/src/test/resources/session-settings/2.sbt.txt_1/1.set b/buildfile/src/test/resources/session-settings/2.sbt.txt_1/1.set similarity index 100% rename from main/src/test/resources/session-settings/2.sbt.txt_1/1.set rename to buildfile/src/test/resources/session-settings/2.sbt.txt_1/1.set diff --git a/main/src/test/resources/session-settings/2.sbt.txt_1/1.set.result b/buildfile/src/test/resources/session-settings/2.sbt.txt_1/1.set.result similarity index 100% rename from main/src/test/resources/session-settings/2.sbt.txt_1/1.set.result rename to buildfile/src/test/resources/session-settings/2.sbt.txt_1/1.set.result diff --git a/main/src/test/resources/session-settings/3.sbt.txt b/buildfile/src/test/resources/session-settings/3.sbt.txt similarity index 100% rename from main/src/test/resources/session-settings/3.sbt.txt rename to buildfile/src/test/resources/session-settings/3.sbt.txt diff --git a/main/src/test/resources/session-settings/3.sbt.txt_1/1.set b/buildfile/src/test/resources/session-settings/3.sbt.txt_1/1.set similarity index 100% rename from main/src/test/resources/session-settings/3.sbt.txt_1/1.set rename to buildfile/src/test/resources/session-settings/3.sbt.txt_1/1.set diff --git a/main/src/test/resources/session-settings/3.sbt.txt_1/1.set.result b/buildfile/src/test/resources/session-settings/3.sbt.txt_1/1.set.result similarity index 100% rename from main/src/test/resources/session-settings/3.sbt.txt_1/1.set.result rename to buildfile/src/test/resources/session-settings/3.sbt.txt_1/1.set.result diff --git a/main/src/test/resources/session-settings/4.sbt.txt b/buildfile/src/test/resources/session-settings/4.sbt.txt similarity index 100% rename from main/src/test/resources/session-settings/4.sbt.txt rename to buildfile/src/test/resources/session-settings/4.sbt.txt diff --git a/main/src/test/resources/session-settings/4.sbt.txt_1/1.set b/buildfile/src/test/resources/session-settings/4.sbt.txt_1/1.set similarity index 100% rename from main/src/test/resources/session-settings/4.sbt.txt_1/1.set rename to buildfile/src/test/resources/session-settings/4.sbt.txt_1/1.set diff --git a/main/src/test/resources/session-settings/4.sbt.txt_1/1.set.result b/buildfile/src/test/resources/session-settings/4.sbt.txt_1/1.set.result similarity index 100% rename from main/src/test/resources/session-settings/4.sbt.txt_1/1.set.result rename to buildfile/src/test/resources/session-settings/4.sbt.txt_1/1.set.result diff --git a/buildfile/src/test/scala/sbt/internal/SbtParserTest.scala b/buildfile/src/test/scala/sbt/internal/SbtParserTest.scala new file mode 100644 index 000000000..883c334c1 --- /dev/null +++ b/buildfile/src/test/scala/sbt/internal/SbtParserTest.scala @@ -0,0 +1,39 @@ +package sbt.internal + +import sbt.internal.parser.SbtParser +import sbt.internal.util.LineRange +import xsbti.VirtualFileRef + +object SbtParserTest extends verify.BasicTestSuite: + lazy val testCode: String = """import keys.* +import com.{ + keys +} + +val x = 1 +lazy val foo = project + .settings(x := y) +""" + + test("imports with their lines") { + val ref = VirtualFileRef.of("vfile") + val p = SbtParser(ref, testCode.linesIterator.toList) + assert( + p.imports == List( + "import keys.*" -> 1, + """import com.{ + keys +}""" -> 2 + ) + ) + } + + test("imports with their lines2") { + val ref = VirtualFileRef.of("vfile") + val p = SbtParser(ref, testCode.linesIterator.toList) + assert(p.settings.size == 2) + assert(p.settings(0) == ("""val x = 1""" -> LineRange(6, 6))) + assert(p.settings(1) == ("""lazy val foo = project + .settings(x := y)""" -> LineRange(7, 8))) + } +end SbtParserTest diff --git a/main/src/test/scala/sbt/internal/parser/AbstractSpec.scala b/buildfile/src/test/scala/sbt/internal/parser/AbstractSpec.scala similarity index 100% rename from main/src/test/scala/sbt/internal/parser/AbstractSpec.scala rename to buildfile/src/test/scala/sbt/internal/parser/AbstractSpec.scala diff --git a/main/src/test/scala/sbt/internal/parser/CheckIfParsedSpec.scala b/buildfile/src/test/scala/sbt/internal/parser/CheckIfParsedSpec.scala similarity index 51% rename from main/src/test/scala/sbt/internal/parser/CheckIfParsedSpec.scala rename to buildfile/src/test/scala/sbt/internal/parser/CheckIfParsedSpec.scala index e13e15eed..1561b5beb 100644 --- a/main/src/test/scala/sbt/internal/parser/CheckIfParsedSpec.scala +++ b/buildfile/src/test/scala/sbt/internal/parser/CheckIfParsedSpec.scala @@ -10,36 +10,36 @@ package internal package parser abstract class CheckIfParsedSpec( - implicit val splitter: SplitExpressions.SplitExpression = - EvaluateConfigurations.splitExpressions + val splitter: SplitExpressions.SplitExpression = EvaluateConfigurations.splitExpressions ) extends AbstractSpec { test(s"${this.getClass.getName} should parse sbt file") { - files foreach { - case (content, description, nonEmptyImports, nonEmptyStatements) => - println(s"""${getClass.getSimpleName}: "$description" """) - val (imports, statements) = split(content) - assert( - nonEmptyStatements == statements.nonEmpty, - s"""$description + files foreach { case (content, description, nonEmptyImports, nonEmptyStatements) => + println(s"""${getClass.getSimpleName}: "$description" """) + val (imports, statements) = split(content)(splitter) + assert( + nonEmptyStatements == statements.nonEmpty, + s"""$description |***${shouldContains(nonEmptyStatements)} statements*** |$content """.stripMargin - ) - assert( - nonEmptyImports == imports.nonEmpty, - s"""$description + ) + assert( + nonEmptyImports == imports.nonEmpty, + s"""$description |***${shouldContains(nonEmptyImports)} imports*** |$content """.stripMargin - ) + ) } } private def shouldContains(b: Boolean): String = - s"""Should ${if (b) { - "contain" - } else { - "not contain" - }}""" + s"""Should ${ + if (b) { + "contain" + } else { + "not contain" + } + }""" protected def files: Seq[(String, String, Boolean, Boolean)] diff --git a/main/src/test/scala/sbt/internal/parser/CommentedXmlSpec.scala b/buildfile/src/test/scala/sbt/internal/parser/CommentedXmlSpec.scala similarity index 82% rename from main/src/test/scala/sbt/internal/parser/CommentedXmlSpec.scala rename to buildfile/src/test/scala/sbt/internal/parser/CommentedXmlSpec.scala index 06f348b3e..47858534a 100644 --- a/main/src/test/scala/sbt/internal/parser/CommentedXmlSpec.scala +++ b/buildfile/src/test/scala/sbt/internal/parser/CommentedXmlSpec.scala @@ -25,7 +25,8 @@ object CommentedXmlSpec extends CheckIfParsedSpec { false, true ), - (""" + ( + """ |val scmpom = taskKey[xml.NodeBuffer]("Node buffer") | |scmpom := @@ -44,8 +45,13 @@ object CommentedXmlSpec extends CheckIfParsedSpec { | |publishMavenStyle := true | - """.stripMargin, "Wrong Commented xml ", false, true), - (""" + """.stripMargin, + "Wrong Commented xml ", + false, + true + ), + ( + """ |val scmpom = taskKey[xml.NodeBuffer]("Node buffer") | |scmpom := @@ -64,14 +70,28 @@ object CommentedXmlSpec extends CheckIfParsedSpec { | |publishMavenStyle := true | - """.stripMargin, "Commented xml ", false, true), - (""" + """.stripMargin, + "Commented xml ", + false, + true + ), + ( + """ |import sbt._ | |// - """.stripMargin, "Xml in comment2", false, false) + """.stripMargin, + "Xml in comment2", + false, + false + ) ) } diff --git a/main/src/test/scala/sbt/internal/parser/EmbeddedXmlSpec.scala b/buildfile/src/test/scala/sbt/internal/parser/EmbeddedXmlSpec.scala similarity index 90% rename from main/src/test/scala/sbt/internal/parser/EmbeddedXmlSpec.scala rename to buildfile/src/test/scala/sbt/internal/parser/EmbeddedXmlSpec.scala index 2af64a80c..01d1a86e0 100644 --- a/main/src/test/scala/sbt/internal/parser/EmbeddedXmlSpec.scala +++ b/buildfile/src/test/scala/sbt/internal/parser/EmbeddedXmlSpec.scala @@ -9,6 +9,7 @@ package sbt.internal.parser import sbt.internal.util.MessageOnlyException +/* object EmbeddedXmlSpec extends CheckIfParsedSpec { test("File with xml content should Handle last xml part") { @@ -36,6 +37,7 @@ object EmbeddedXmlSpec extends CheckIfParsedSpec { try { split(buildSbt) + sys.error("expected MessageOnlyException") } catch { case exception: MessageOnlyException => val index = buildSbt.linesIterator.indexWhere(line => line.contains(errorLine)) + 1 @@ -47,13 +49,24 @@ object EmbeddedXmlSpec extends CheckIfParsedSpec { } protected val files = Seq( - (""" + ( + """ |val p = - """.stripMargin, "Xml modified closing tag at end of file", false, true), - (""" + """.stripMargin, + "Xml modified closing tag at end of file", + false, + true + ), + ( + """ |val p = - """.stripMargin, "Xml at end of file", false, true), - ("""| + """.stripMargin, + "Xml at end of file", + false, + true + ), + ( + """| | |name := "play-html-compressor" | @@ -89,8 +102,13 @@ object EmbeddedXmlSpec extends CheckIfParsedSpec { | |val tra = "" | - """.stripMargin, "Xml in string", false, true), - ("""| + """.stripMargin, + "Xml in string", + false, + true + ), + ( + """| | |name := "play-html-compressor" | @@ -119,7 +137,11 @@ object EmbeddedXmlSpec extends CheckIfParsedSpec { | | | - | """.stripMargin, "Xml with attributes", false, true), + | """.stripMargin, + "Xml with attributes", + false, + true + ), ( """ |scalaVersion := "2.10.2" @@ -151,3 +173,4 @@ object EmbeddedXmlSpec extends CheckIfParsedSpec { ) } + */ diff --git a/main/src/test/scala/sbt/internal/parser/ErrorSpec.scala b/buildfile/src/test/scala/sbt/internal/parser/ErrorSpec.scala similarity index 62% rename from main/src/test/scala/sbt/internal/parser/ErrorSpec.scala rename to buildfile/src/test/scala/sbt/internal/parser/ErrorSpec.scala index 693e1b72a..9659d437e 100644 --- a/main/src/test/scala/sbt/internal/parser/ErrorSpec.scala +++ b/buildfile/src/test/scala/sbt/internal/parser/ErrorSpec.scala @@ -12,48 +12,54 @@ package parser import java.io.File import sbt.internal.util.MessageOnlyException import scala.io.Source +import sbt.internal.inc.PlainVirtualFileConverter object ErrorSpec extends AbstractSpec { - implicit val splitter: SplitExpressions.SplitExpression = EvaluateConfigurations.splitExpressions + + val converter = PlainVirtualFileConverter.converter + // implicit val splitter: SplitExpressions.SplitExpression = EvaluateConfigurations.splitExpressions test("Parser should contains file name and line number") { val rootPath = getClass.getResource("/error-format/").getPath println(s"Reading files from: $rootPath") new File(rootPath).listFiles foreach { file => print(s"Processing ${file.getName}: ") + + val vf = converter.toVirtualFile(file.toPath()) val buildSbt = Source.fromFile(file).getLines().mkString("\n") try { - SbtParser(file, buildSbt.linesIterator.toSeq) + SbtParser(vf, buildSbt.linesIterator.toSeq) } catch { case exp: MessageOnlyException => val message = exp.getMessage println(s"${exp.getMessage}") assert(message.contains(file.getName)) } - containsLineNumber(buildSbt) + // todo: + // containsLineNumber(buildSbt) } } - test("it should handle wrong parsing") { - intercept[MessageOnlyException] { - val buildSbt = - """ - |libraryDependencies ++= Seq("a" % "b" % "2") map { - |(dependency) =>{ - | dependency - | } /* */ // - |} - """.stripMargin - MissingBracketHandler.findMissingText( - buildSbt, - buildSbt.length, - 2, - "fake.txt", - new MessageOnlyException("fake") - ) - () - } - } + // test("it should handle wrong parsing") { + // intercept[MessageOnlyException] { + // val buildSbt = + // """ + // |libraryDependencies ++= Seq("a" % "b" % "2") map { + // |(dependency) =>{ + // | dependency + // | } /* */ // + // |} + // """.stripMargin + // MissingBracketHandler.findMissingText( + // buildSbt, + // buildSbt.length, + // 2, + // "fake.txt", + // new MessageOnlyException("fake") + // ) + // () + // } + // } test("it should handle xml error") { try { @@ -63,11 +69,12 @@ object ErrorSpec extends AbstractSpec { |val s = ' """.stripMargin SbtParser(SbtParser.FAKE_FILE, buildSbt.linesIterator.toSeq) + // sys.error("not supposed to reach here") } catch { case exp: MessageOnlyException => val message = exp.getMessage println(s"${exp.getMessage}") - assert(message.contains(SbtParser.FAKE_FILE.getName)) + assert(message.contains(SbtParser.FAKE_FILE.id())) } } diff --git a/main/src/test/scala/sbt/internal/parser/NewFormatSpec.scala b/buildfile/src/test/scala/sbt/internal/parser/NewFormatSpec.scala similarity index 71% rename from main/src/test/scala/sbt/internal/parser/NewFormatSpec.scala rename to buildfile/src/test/scala/sbt/internal/parser/NewFormatSpec.scala index b8253a55e..94f7d366f 100644 --- a/main/src/test/scala/sbt/internal/parser/NewFormatSpec.scala +++ b/buildfile/src/test/scala/sbt/internal/parser/NewFormatSpec.scala @@ -12,8 +12,10 @@ package parser import java.io.File import scala.io.Source +import sbt.internal.inc.PlainVirtualFileConverter object NewFormatSpec extends AbstractSpec { + val converter = PlainVirtualFileConverter.converter implicit val splitter: SplitExpressions.SplitExpression = EvaluateConfigurations.splitExpressions test("New Format should handle lines") { @@ -22,11 +24,15 @@ object NewFormatSpec extends AbstractSpec { val allFiles = new File(rootPath).listFiles.toList allFiles foreach { path => println(s"$path") + val vf = converter.toVirtualFile(path.toPath()) val lines = Source.fromFile(path).getLines().toList - val (_, statements) = splitter(path, lines) - assert(statements.nonEmpty, s""" + val (_, statements) = splitter(vf, lines) + assert( + statements.nonEmpty, + s""" |***should contains statements*** - |$lines """.stripMargin) + |$lines """.stripMargin + ) } } } diff --git a/main/src/test/scala/sbt/internal/parser/SessionSettingsSpec.scala b/buildfile/src/test/scala/sbt/internal/parser/SessionSettingsSpec.scala similarity index 85% rename from main/src/test/scala/sbt/internal/parser/SessionSettingsSpec.scala rename to buildfile/src/test/scala/sbt/internal/parser/SessionSettingsSpec.scala index 61e26dc4c..0adaa8f86 100644 --- a/main/src/test/scala/sbt/internal/parser/SessionSettingsSpec.scala +++ b/buildfile/src/test/scala/sbt/internal/parser/SessionSettingsSpec.scala @@ -9,6 +9,7 @@ package sbt package internal package parser +/* import java.io.{ File, FilenameFilter } import scala.io.Source @@ -39,12 +40,11 @@ abstract class AbstractSessionSettingsSpec(folder: String) extends AbstractSpec .toList allFiles foreach { file => val originalLines = Source.fromFile(file).getLines().toList - expectedResultAndMap(file) foreach { - case (expectedResultList, commands) => - val resultList = SbtRefactorings.applySessionSettings((file, originalLines), commands) - val expected = SbtParser(file, expectedResultList) - val result = SbtParser(file, resultList._2) - assert(result.settings == expected.settings) + expectedResultAndMap(file) foreach { case (expectedResultList, commands) => + val resultList = SbtRefactorings.applySessionSettings((file, originalLines), commands) + val expected = SbtParser(file, expectedResultList) + val result = SbtParser(file, resultList._2) + assert(result.settings == expected.settings) } } } @@ -76,3 +76,4 @@ abstract class AbstractSessionSettingsSpec(folder: String) extends AbstractSpec class SessionSettingsSpec extends AbstractSessionSettingsSpec("session-settings") class SessionSettingsQuickSpec extends AbstractSessionSettingsSpec("session-settings-quick") + */ diff --git a/main/src/test/scala/sbt/internal/parser/SplitExpressions.scala b/buildfile/src/test/scala/sbt/internal/parser/SplitExpressions.scala similarity index 55% rename from main/src/test/scala/sbt/internal/parser/SplitExpressions.scala rename to buildfile/src/test/scala/sbt/internal/parser/SplitExpressions.scala index fd8733a7a..38ec6e991 100644 --- a/main/src/test/scala/sbt/internal/parser/SplitExpressions.scala +++ b/buildfile/src/test/scala/sbt/internal/parser/SplitExpressions.scala @@ -9,10 +9,10 @@ package sbt package internal package parser -import java.io.File - import sbt.internal.util.LineRange +import xsbti.VirtualFileRef -object SplitExpressions { - type SplitExpression = (File, Seq[String]) => (Seq[(String, Int)], Seq[(String, LineRange)]) -} +object SplitExpressions: + type SplitExpression = + (VirtualFileRef, Seq[String]) => (Seq[(String, Int)], Seq[(String, LineRange)]) +end SplitExpressions diff --git a/main/src/test/scala/sbt/internal/parser/SplitExpressionsBehavior.scala b/buildfile/src/test/scala/sbt/internal/parser/SplitExpressionsBehavior.scala similarity index 80% rename from main/src/test/scala/sbt/internal/parser/SplitExpressionsBehavior.scala rename to buildfile/src/test/scala/sbt/internal/parser/SplitExpressionsBehavior.scala index a8f652149..69ce3b89c 100644 --- a/main/src/test/scala/sbt/internal/parser/SplitExpressionsBehavior.scala +++ b/buildfile/src/test/scala/sbt/internal/parser/SplitExpressionsBehavior.scala @@ -12,20 +12,21 @@ package parser import java.io.File import sbt.internal.util.LineRange +import xsbti.VirtualFileRef trait SplitExpression { - def split(s: String, file: File = new File("noFile"))( - implicit splitter: SplitExpressions.SplitExpression + def split(s: String, file: VirtualFileRef = VirtualFileRef.of("noFile"))( + splitter: SplitExpressions.SplitExpression ): (Seq[(String, Int)], Seq[(String, LineRange)]) = splitter(file, s.split("\n").toSeq) } trait SplitExpressionsBehavior extends SplitExpression { this: verify.BasicTestSuite => - def newExpressionsSplitter(implicit splitter: SplitExpressions.SplitExpression) = { + def newExpressionsSplitter(splitter: SplitExpressions.SplitExpression) = { test("parse a two settings without intervening blank line") { val (imports, settings) = split("""version := "1.0" -scalaVersion := "2.10.4"""") +scalaVersion := "2.10.4"""")(splitter) assert(imports.isEmpty) assert(settings.size == 2) @@ -34,7 +35,7 @@ scalaVersion := "2.10.4"""") test("parse a setting and val without intervening blank line") { val (imports, settings) = split("""version := "1.0" -lazy val root = (project in file(".")).enablePlugins­(PlayScala)""") +lazy val root = (project in file(".")).enablePlugins­(PlayScala)""")(splitter) assert(imports.isEmpty) assert(settings.size == 2) @@ -46,11 +47,10 @@ lazy val root = (project in file(".")).enablePlugins­(PlayScala)""") import foo.Bar version := "1.0" """.stripMargin - ) + )(splitter) assert(imports.size == 2) assert(settingsAndDefs.size == 1) } - } } diff --git a/main/src/test/scala/sbt/internal/parser/SplitExpressionsTest.scala b/buildfile/src/test/scala/sbt/internal/parser/SplitExpressionsTest.scala similarity index 86% rename from main/src/test/scala/sbt/internal/parser/SplitExpressionsTest.scala rename to buildfile/src/test/scala/sbt/internal/parser/SplitExpressionsTest.scala index 794a4edba..e202ec4d5 100644 --- a/main/src/test/scala/sbt/internal/parser/SplitExpressionsTest.scala +++ b/buildfile/src/test/scala/sbt/internal/parser/SplitExpressionsTest.scala @@ -9,6 +9,6 @@ package sbt package internal package parser -object SplitExpressionsTest extends verify.BasicTestSuite with SplitExpressionsBehavior { +object SplitExpressionsTest extends verify.BasicTestSuite with SplitExpressionsBehavior: newExpressionsSplitter(EvaluateConfigurations.splitExpressions) -} +end SplitExpressionsTest diff --git a/launcher-package/integration-test/src/test/scala/ProcessImpl.scala b/launcher-package/integration-test/src/test/scala/ProcessImpl.scala index 7c8e4bc01..dc67e3c44 100644 --- a/launcher-package/integration-test/src/test/scala/ProcessImpl.scala +++ b/launcher-package/integration-test/src/test/scala/ProcessImpl.scala @@ -99,7 +99,7 @@ object BasicIO { in.close() } - def inheritInput(connect: Boolean) = { p: JProcessBuilder => if (connect) InheritInput(p) else false } + def inheritInput(connect: Boolean) = { (p: JProcessBuilder) => if (connect) InheritInput(p) else false } } private[sbt] object ExitCodes { def ignoreFirst: (Int, Int) => Int = (a, b) => b diff --git a/main-settings/src/main/scala/sbt/ClasspathDep.scala b/main-settings/src/main/scala/sbt/ClasspathDep.scala new file mode 100644 index 000000000..15ff4c382 --- /dev/null +++ b/main-settings/src/main/scala/sbt/ClasspathDep.scala @@ -0,0 +1,22 @@ +/* + * sbt + * Copyright 2011 - 2018, Lightbend, Inc. + * Copyright 2008 - 2010, Mark Harrah + * Licensed under Apache License 2.0 (see LICENSE) + */ + +package sbt + +enum ClasspathDep[PR <: ProjectReference]: + case ResolvedClasspathDependency(project0: ProjectRef, configuration0: Option[String]) + extends ClasspathDep[ProjectRef] + case ClasspathDependency(project0: ProjectReference, configuration0: Option[String]) + extends ClasspathDep[ProjectReference] + + def project: PR = this match + case dep: ResolvedClasspathDependency => dep.project0 + case dep: ClasspathDependency => dep.project0 + + def configuration: Option[String] = this match + case dep: ResolvedClasspathDependency => dep.configuration0 + case dep: ClasspathDependency => dep.configuration0 diff --git a/main-settings/src/main/scala/sbt/PluginTrigger.scala b/main-settings/src/main/scala/sbt/PluginTrigger.scala new file mode 100644 index 000000000..41701218c --- /dev/null +++ b/main-settings/src/main/scala/sbt/PluginTrigger.scala @@ -0,0 +1,12 @@ +/* + * sbt + * Copyright 2011 - 2018, Lightbend, Inc. + * Copyright 2008 - 2010, Mark Harrah + * Licensed under Apache License 2.0 (see LICENSE) + */ + +package sbt + +enum PluginTrigger: + case AllRequirements + case NoTrigger diff --git a/main/src/main/scala/sbt/Plugins.scala b/main-settings/src/main/scala/sbt/Plugins.scala similarity index 99% rename from main/src/main/scala/sbt/Plugins.scala rename to main-settings/src/main/scala/sbt/Plugins.scala index b2c77b9aa..d2ce84e76 100644 --- a/main/src/main/scala/sbt/Plugins.scala +++ b/main-settings/src/main/scala/sbt/Plugins.scala @@ -83,7 +83,8 @@ abstract class AutoPlugin extends Plugins.Basic with PluginsFunctions { /** * This AutoPlugin requires the plugins the Plugins matcher returned by this method. See [[trigger]]. */ - def requires: Plugins = plugins.JvmPlugin + def requires: Plugins = ??? + // plugins.JvmPlugin val label: String = getClass.getName.stripSuffix("$") @@ -317,7 +318,7 @@ object Plugins extends PluginsFunctions { ${listConflicts(conflicting)}""") } - private[sbt] final object Empty extends Plugins { + private[sbt] object Empty extends Plugins { def &&(o: Basic): Plugins = o override def toString = "" } diff --git a/main-settings/src/main/scala/sbt/Project.scala b/main-settings/src/main/scala/sbt/Project.scala new file mode 100644 index 000000000..81f0a9c6c --- /dev/null +++ b/main-settings/src/main/scala/sbt/Project.scala @@ -0,0 +1,236 @@ +/* + * sbt + * Copyright 2011 - 2018, Lightbend, Inc. + * Copyright 2008 - 2010, Mark Harrah + * Licensed under Apache License 2.0 (see LICENSE) + */ + +package sbt + +import java.io.File +import sbt.librarymanagement.Configuration +import sbt.Def.{ Flattened, Initialize, ScopedKey, Setting } +import sbt.internal.util.Dag +import sbt.internal.util.complete.DefaultParsers + +sealed trait ProjectDefinition[PR <: ProjectReference] { + + /** + * The project ID is used to uniquely identify a project within a build. + * It is used to refer to a project from the command line and in the scope of keys. + */ + def id: String + + /** The base directory for the project. */ + def base: File + + /** + * The configurations for this project. These are groups of related tasks and the main reason + * to list them here is when one configuration extends another. In this case, a setting lookup + * in one configuration will fall back to the configurations it extends configuration if the setting doesn't exist. + */ + def configurations: Seq[Configuration] + + /** + * The explicitly defined sequence of settings that configure this project. + * These do not include the automatically appended settings as configured by `auto`. + */ + def settings: Seq[Setting[_]] + + /** + * The references to projects that are aggregated by this project. + * When a task is run on this project, it will also be run on aggregated projects. + */ + def aggregate: Seq[PR] + + /** The references to projects that are classpath dependencies of this project. */ + def dependencies: Seq[ClasspathDep[PR]] + + /** The references to projects that are aggregate and classpath dependencies of this project. */ + def uses: Seq[PR] = aggregate ++ dependencies.map(_.project) + def referenced: Seq[PR] = uses + + /** + * The defined [[Plugins]] associated with this project. + * A [[AutoPlugin]] is a common label that is used by plugins to determine what settings, if any, to add to a project. + */ + def plugins: Plugins + + /** Indicates whether the project was created organically, or was generated synthetically. */ + def projectOrigin: ProjectOrigin + + /** The [[AutoPlugin]]s enabled for this project. This value is only available on a loaded Project. */ + private[sbt] def autoPlugins: Seq[AutoPlugin] + + override final def hashCode: Int = id.hashCode ^ base.hashCode ^ getClass.hashCode + + override final def equals(o: Any) = o match { + case p: ProjectDefinition[_] => p.getClass == this.getClass && p.id == id && p.base == base + case _ => false + } + + override def toString = { + val agg = ifNonEmpty("aggregate", aggregate) + val dep = ifNonEmpty("dependencies", dependencies) + val conf = ifNonEmpty("configurations", configurations) + val autos = ifNonEmpty("autoPlugins", autoPlugins.map(_.label)) + val fields = + s"id $id" :: s"base: $base" :: agg ::: dep ::: conf ::: (s"plugins: List($plugins)" :: autos) + s"Project(${fields.mkString(", ")})" + } + + private[this] def ifNonEmpty[T](label: String, ts: Iterable[T]): List[String] = + if (ts.isEmpty) Nil else s"$label: $ts" :: Nil +} + +trait CompositeProject: + def componentProjects: Seq[Project] +end CompositeProject + +private[sbt] object CompositeProject { + + /** + * Expand user defined projects with the component projects of `compositeProjects`. + * + * If two projects with the same id appear in the user defined projects and + * in `compositeProjects.componentProjects`, the user defined project wins. + * This is necessary for backward compatibility with the idioms: + * {{{ + * lazy val foo = crossProject + * lazy val fooJS = foo.js.settings(...) + * lazy val fooJVM = foo.jvm.settings(...) + * }}} + * and the rarer: + * {{{ + * lazy val fooJS = foo.js.settings(...) + * lazy val foo = crossProject + * lazy val fooJVM = foo.jvm.settings(...) + * }}} + */ + def expand(compositeProjects: Seq[CompositeProject]): Seq[Project] = { + val userProjects = compositeProjects.collect { case p: Project => p } + for (p <- compositeProjects.flatMap(_.componentProjects)) yield { + userProjects.find(_.id == p.id) match { + case Some(userProject) => userProject + case None => p + } + } + }.distinct +} + +sealed trait Project extends ProjectDefinition[ProjectReference] with CompositeProject: + override def componentProjects: Seq[Project] = this :: Nil + + /** Adds new configurations directly to this project. To override an existing configuration, use `overrideConfigs`. */ + def configs(cs: Configuration*): Project = copy(configurations = configurations ++ cs) + + /** Adds classpath dependencies on internal or external projects. */ + def dependsOn(deps: ClasspathDep[ProjectReference]*): Project = + copy(dependencies = dependencies ++ deps) + + /** + * Adds projects to be aggregated. When a user requests a task to run on this project from the command line, + * the task will also be run in aggregated projects. + */ + def aggregate(refs: ProjectReference*): Project = + copy(aggregate = (aggregate: Seq[ProjectReference]) ++ refs) + + /** Appends settings to the current settings sequence for this project. */ + def settings(ss: Def.SettingsDefinition*): Project = + copy(settings = (settings: Seq[Def.Setting[_]]) ++ Def.settings(ss: _*)) + + /** + * Sets the [[AutoPlugin]]s of this project. + * A [[AutoPlugin]] is a common label that is used by plugins to determine what settings, if any, to enable on a project. + */ + def enablePlugins(ns: Plugins*): Project = + setPlugins(ns.foldLeft(plugins)(Plugins.and)) + + /** Disable the given plugins on this project. */ + def disablePlugins(ps: AutoPlugin*): Project = + setPlugins(Plugins.and(plugins, Plugins.And(ps.map(p => Plugins.Exclude(p)).toList))) + + private[sbt] def setPlugins(ns: Plugins): Project = copy(plugins = ns) + + /** Definitively set the [[AutoPlugin]]s for this project. */ + private[sbt] def setAutoPlugins(autos: Seq[AutoPlugin]): Project = copy(autoPlugins = autos) + + /** Definitively set the [[ProjectOrigin]] for this project. */ + private[sbt] def setProjectOrigin(origin: ProjectOrigin): Project = copy(projectOrigin = origin) + + private[sbt] def copy( + id: String = id, + base: File = base, + aggregate: Seq[ProjectReference] = aggregate, + dependencies: Seq[ClasspathDep[ProjectReference]] = dependencies, + settings: Seq[Setting[_]] = settings, + configurations: Seq[Configuration] = configurations, + plugins: Plugins = plugins, + autoPlugins: Seq[AutoPlugin] = autoPlugins, + projectOrigin: ProjectOrigin = projectOrigin, + ): Project = + Project.unresolved( + id, + base, + aggregate = aggregate, + dependencies = dependencies, + settings = settings, + configurations, + plugins, + autoPlugins, + projectOrigin + ) +end Project + +object Project: + private abstract class ProjectDef[PR <: ProjectReference]( + val id: String, + val base: File, + val aggregate: Seq[PR], + val dependencies: Seq[ClasspathDep[PR]], + val settings: Seq[Def.Setting[_]], + val configurations: Seq[Configuration], + val plugins: Plugins, + val autoPlugins: Seq[AutoPlugin], + val projectOrigin: ProjectOrigin + ) extends ProjectDefinition[PR] { + // checks for cyclic references here instead of having to do it in Scope.delegates + Dag.topologicalSort(configurations)(_.extendsConfigs) + } + + private def unresolved( + id: String, + base: File, + aggregate: Seq[ProjectReference], + dependencies: Seq[ClasspathDep[ProjectReference]], + settings: Seq[Def.Setting[_]], + configurations: Seq[Configuration], + plugins: Plugins, + autoPlugins: Seq[AutoPlugin], + origin: ProjectOrigin + ): Project = { + validProjectID(id).foreach(errMsg => sys.error("Invalid project ID: " + errMsg)) + new ProjectDef[ProjectReference]( + id, + base, + aggregate, + dependencies, + settings, + configurations, + plugins, + autoPlugins, + origin + ) with Project + } + + /** Returns None if `id` is a valid Project ID or Some containing the parser error message if it is not. */ + def validProjectID(id: String): Option[String] = + DefaultParsers.parse(id, DefaultParsers.ID).left.toOption +end Project + +sealed trait ResolvedProject extends ProjectDefinition[ProjectRef] { + + /** The [[AutoPlugin]]s enabled for this project as computed from [[plugins]]. */ + def autoPlugins: Seq[AutoPlugin] + +} diff --git a/main-settings/src/main/scala/sbt/ProjectOrigin.scala b/main-settings/src/main/scala/sbt/ProjectOrigin.scala new file mode 100644 index 000000000..1245625a6 --- /dev/null +++ b/main-settings/src/main/scala/sbt/ProjectOrigin.scala @@ -0,0 +1,11 @@ +package sbt + +/** + * Indicate whether the project was created organically, synthesized by a plugin, + * or is a "generic root" project supplied by sbt when a project doesn't exist for `file(".")`. + */ +enum ProjectOrigin: + case Organic + case ExtraProject + case DerivedProject + case GenericRoot diff --git a/main-settings/src/main/scala/sbt/Reference.scala b/main-settings/src/main/scala/sbt/Reference.scala index 7d24067cf..c880ade31 100644 --- a/main-settings/src/main/scala/sbt/Reference.scala +++ b/main-settings/src/main/scala/sbt/Reference.scala @@ -10,12 +10,38 @@ package sbt import java.io.File import java.net.URI +import sbt.internal.util.AttributeKey import sbt.io.IO +import sbt.librarymanagement.Configuration +import sbt.SlashSyntax.{ RichConfiguration, RichScope } +import scala.annotation.nowarn // in all of these, the URI must be resolved and normalized before it is definitive /** Identifies a project or build. */ -sealed trait Reference +sealed trait Reference: + private[sbt] def asScopeAxis: ScopeAxis[this.type] = + Select(this) + private[sbt] def asScope: Scope = + Scope(asScopeAxis, This, This, This) + + @nowarn + def /(c: ConfigKey): RichConfiguration = RichConfiguration(asScope in c) + + @nowarn + def /(c: Configuration): RichConfiguration = RichConfiguration(asScope in c) + + // This is for handling `Zero / Zero / name`. + @nowarn + def /(configAxis: ScopeAxis[ConfigKey]): RichConfiguration = + new RichConfiguration(asScope.copy(config = configAxis)) + + @nowarn + final def /[K](key: Scoped.ScopingSetting[K]): K = key.in(asScope) + + @nowarn + final def /(key: AttributeKey[_]): RichScope = new RichScope(asScope in key) +end Reference /** A fully resolved, unique identifier for a project or build. */ sealed trait ResolvedReference extends Reference @@ -24,7 +50,7 @@ sealed trait ResolvedReference extends Reference sealed trait BuildReference extends Reference /** Identifies the build for the current context. */ -final case object ThisBuild extends BuildReference +case object ThisBuild extends BuildReference /** Uniquely identifies a build by a URI. */ final case class BuildRef(build: URI) extends BuildReference with ResolvedReference @@ -44,10 +70,10 @@ final case class LocalProject(project: String) extends ProjectReference final case class RootProject(build: URI) extends ProjectReference /** Identifies the root project in the current build context. */ -final case object LocalRootProject extends ProjectReference +case object LocalRootProject extends ProjectReference /** Identifies the project for the current context. */ -final case object ThisProject extends ProjectReference +case object ThisProject extends ProjectReference object ProjectRef { def apply(base: File, id: String): ProjectRef = ProjectRef(IO toURI base, id) diff --git a/main-settings/src/main/scala/sbt/ScopeAxis.scala b/main-settings/src/main/scala/sbt/ScopeAxis.scala index 6b494a73b..eaded5d28 100644 --- a/main-settings/src/main/scala/sbt/ScopeAxis.scala +++ b/main-settings/src/main/scala/sbt/ScopeAxis.scala @@ -43,6 +43,7 @@ case object Zero extends ScopeAxis[Nothing] final case class Select[S](s: S) extends ScopeAxis[S] { override def isSelect = true } + object ScopeAxis { def fromOption[T](o: Option[T]): ScopeAxis[T] = o match { case Some(v) => Select(v) diff --git a/main-settings/src/main/scala/sbt/SlashSyntax.scala b/main-settings/src/main/scala/sbt/SlashSyntax.scala index e4721ad70..845ecea47 100644 --- a/main-settings/src/main/scala/sbt/SlashSyntax.scala +++ b/main-settings/src/main/scala/sbt/SlashSyntax.scala @@ -36,8 +36,9 @@ trait SlashSyntax { // implicit def sbtSlashSyntaxRichReference(r: Reference): RichReference = Select(r) - given sbtSlashSyntaxRichReference: Conversion[Reference, RichReference] = - (r: Reference) => Select(r) + // Implement in Reference directly + // given sbtSlashSyntaxRichReference: Conversion[Reference, RichReference] = + // (r: Reference) => Select(r) given sbtSlashSyntaxRichProject[A](using Conversion[A, Reference]): Conversion[A, RichReference] = (a: A) => Select(a: Reference) diff --git a/main/src/main/scala/sbt/internal/RemoteCache.scala b/main/src/main/scala-2/sbt/RemoteCache.scala similarity index 86% rename from main/src/main/scala/sbt/internal/RemoteCache.scala rename to main/src/main/scala-2/sbt/RemoteCache.scala index c3838243b..1f3d51045 100644 --- a/main/src/main/scala/sbt/internal/RemoteCache.scala +++ b/main/src/main/scala-2/sbt/RemoteCache.scala @@ -221,9 +221,8 @@ object RemoteCache { remoteCacheId := { val inputs = (unmanagedSources / inputFileStamps).value val cp = (externalDependencyClasspath / outputFileStamps).?.value.getOrElse(Nil) - val extraInc = (extraIncOptions.value) flatMap { - case (k, v) => - Vector(k, v) + val extraInc = (extraIncOptions.value) flatMap { case (k, v) => + Vector(k, v) } combineHash(extractHash(inputs) ++ extractHash(cp) ++ extraInc) }, @@ -262,54 +261,51 @@ object RemoteCache { val artifacts = (pushRemoteCacheConfiguration / remoteCacheArtifacts).value val nonPom = artifacts.filterNot(isPomArtifact).toVector val copyResources = getResourceFilePaths.value - m.withModule(log) { - case (ivy, md, _) => - val resolver = ivy.getSettings.getResolver(r.name) - if (resolver eq null) sys.error(s"undefined resolver '${r.name}'") - val cross = CrossVersion(p, smi) - val crossf: String => String = cross.getOrElse(identity _) - var found = false - ids foreach { - id: String => - val v = toVersion(id) - val modId = p.withRevision(v).withName(crossf(p.name)) - val ivyId = IvySbt.toID(modId) - if (found) () - else { - val rawa = nonPom map { _.artifact } - val seqa = CrossVersion.substituteCross(rawa, cross) - val as = seqa map { a => - val extra = a.classifier match { - case Some(c) => Map("e:classifier" -> c) - case None => Map.empty - } - new DefaultArtifact(ivyId, null, a.name, a.`type`, a.extension, extra.asJava) - } - pullFromMavenRepo0(as, resolver, log) match { - case Right(xs0) => - val jars = xs0.distinct - - nonPom.foreach { art => - val classifier = art.artifact.classifier - - findJar(classifier, v, jars) match { - case Some(jar) => - extractJar(art, jar, copyResources) - log.info(s"remote cache artifact extracted for $p $classifier") - - case None => - log.info(s"remote cache artifact not found for $p $classifier") - } - } - found = true - case Left(e) => - val classifier = seqa.map(_.classifier).mkString(" ") - log.info(s"remote cache artifact not found for $p $classifier") - log.debug(e.getMessage) - } + m.withModule(log) { case (ivy, md, _) => + val resolver = ivy.getSettings.getResolver(r.name) + if (resolver eq null) sys.error(s"undefined resolver '${r.name}'") + val cross = CrossVersion(p, smi) + val crossf: String => String = cross.getOrElse(identity _) + var found = false + ids foreach { (id: String) => + val v = toVersion(id) + val modId = p.withRevision(v).withName(crossf(p.name)) + val ivyId = IvySbt.toID(modId) + if (found) () + else { + val rawa = nonPom map { _.artifact } + val seqa = CrossVersion.substituteCross(rawa, cross) + val as = seqa map { a => + val extra = a.classifier match { + case Some(c) => Map("e:classifier" -> c) + case None => Map.empty } + new DefaultArtifact(ivyId, null, a.name, a.`type`, a.extension, extra.asJava) + } + pullFromMavenRepo0(as, resolver, log) match { + case Right(xs0) => + val jars = xs0.distinct + + nonPom.foreach { art => + val classifier = art.artifact.classifier + + findJar(classifier, v, jars) match { + case Some(jar) => + extractJar(art, jar, copyResources) + log.info(s"remote cache artifact extracted for $p $classifier") + + case None => + log.info(s"remote cache artifact not found for $p $classifier") + } + } + found = true + case Left(e) => + log.info(s"remote cache not found for ${v}") + log.debug(e.getMessage) + } } - () + } + () } }, ) @@ -439,10 +435,10 @@ object RemoteCache { } private def extractTestResult(output: File, testResult: File): Unit = { - //val expandedTestResult = output / "META-INF" / "succeeded_tests" - //if (expandedTestResult.exists) { + // val expandedTestResult = output / "META-INF" / "succeeded_tests" + // if (expandedTestResult.exists) { // IO.move(expandedTestResult, testResult) - //} + // } } private def defaultArtifactTasks: Seq[TaskKey[File]] = @@ -453,13 +449,13 @@ object RemoteCache { pkgTasks: Seq[TaskKey[File]] ): Def.Initialize[Seq[A]] = (Classpaths.forallIn(key, pkgTasks) zipWith - Classpaths.forallIn(pushRemoteCacheArtifact, pkgTasks))(_ zip _ collect { - case (a, true) => a + Classpaths.forallIn(pushRemoteCacheArtifact, pkgTasks))(_ zip _ collect { case (a, true) => + a }) private def extractHash(inputs: Seq[(Path, FileStamp)]): Vector[String] = - inputs.toVector map { - case (_, stamp0) => toOption(stamp0.stamp.getHash).getOrElse("cafe") + inputs.toVector map { case (_, stamp0) => + toOption(stamp0.stamp.getHash).getOrElse("cafe") } private def combineHash(vs: Vector[String]): String = { diff --git a/main/src/main/scala/sbt/ScriptedPlugin.scala b/main/src/main/scala-2/sbt/ScriptedPlugin.scala similarity index 97% rename from main/src/main/scala/sbt/ScriptedPlugin.scala rename to main/src/main/scala-2/sbt/ScriptedPlugin.scala index 7a564d166..45890dc15 100644 --- a/main/src/main/scala/sbt/ScriptedPlugin.scala +++ b/main/src/main/scala-2/sbt/ScriptedPlugin.scala @@ -62,7 +62,7 @@ object ScriptedPlugin extends AutoPlugin { override lazy val projectSettings: Seq[Setting[_]] = Seq( ivyConfigurations ++= Seq(ScriptedConf, ScriptedLaunchConf), scriptedSbt := (pluginCrossBuild / sbtVersion).value, - sbtLauncher := getJars(ScriptedLaunchConf).map(_.get.head).value, + sbtLauncher := getJars(ScriptedLaunchConf).map(_.get().head).value, sbtTestDirectory := sourceDirectory.value / "sbt-test", libraryDependencies ++= (CrossVersion.partialVersion(scriptedSbt.value) match { case Some((0, 13)) => @@ -103,7 +103,7 @@ object ScriptedPlugin extends AutoPlugin { private[sbt] def scriptedTestsTask: Initialize[Task[AnyRef]] = Def.task { - val cp = scriptedClasspath.value.get.map(_.toPath) + val cp = scriptedClasspath.value.get().map(_.toPath) val loader = ClasspathUtil.toLoader(cp, scalaInstance.value.loader) try { ModuleUtilities.getObject("sbt.scriptedtest.ScriptedTests", loader) @@ -124,7 +124,7 @@ object ScriptedPlugin extends AutoPlugin { val scriptedFiles: NameFilter = ("test": NameFilter) | "test.script" | "pending" | "pending.script" - val pairs = (scriptedBase * AllPassFilter * AllPassFilter * scriptedFiles).get map { + val pairs = (scriptedBase * AllPassFilter * AllPassFilter * scriptedFiles).get() map { (f: File) => val p = f.getParentFile (p.getParentFile.getName, p.getName) diff --git a/main/src/main/scala/sbt/internal/Continuous.scala b/main/src/main/scala-2/sbt/internal/Continuous.scala similarity index 96% rename from main/src/main/scala/sbt/internal/Continuous.scala rename to main/src/main/scala-2/sbt/internal/Continuous.scala index ea1b5134f..260e31804 100644 --- a/main/src/main/scala/sbt/internal/Continuous.scala +++ b/main/src/main/scala-2/sbt/internal/Continuous.scala @@ -70,7 +70,6 @@ import scala.util.control.NonFatal * For now Continuous extends DeprecatedContinuous to minimize the number of deprecation warnings * produced by this file. In sbt 2.0, the DeprecatedContinuous mixin should be eliminated and * the deprecated apis should no longer be supported. - * */ private[sbt] object Continuous extends DeprecatedContinuous { private type Event = FileEvent[FileAttributes] @@ -316,8 +315,8 @@ private[sbt] object Continuous extends DeprecatedContinuous { isCommand: Boolean, commands: Seq[String], fileStampCache: FileStamp.Cache - )( - implicit extracted: Extracted + )(implicit + extracted: Extracted ): Callbacks = { val project = extracted.currentRef val beforeCommand = () => configs.foreach(_.watchSettings.beforeCommand()) @@ -356,10 +355,9 @@ private[sbt] object Continuous extends DeprecatedContinuous { ): (Watch.Action, String, Int, State) => State = { configs.flatMap(_.watchSettings.onTermination).distinct match { case Seq(head, tail @ _*) => - tail.foldLeft(head) { - case (onTermination, configOnTermination) => - (action, cmd, count, state) => - configOnTermination(action, cmd, count, onTermination(action, cmd, count, state)) + tail.foldLeft(head) { case (onTermination, configOnTermination) => + (action, cmd, count, state) => + configOnTermination(action, cmd, count, onTermination(action, cmd, count, state)) } case _ => if (isCommand) Watch.defaultCommandOnTermination else Watch.defaultTaskOnTermination @@ -602,9 +600,8 @@ private[sbt] object Continuous extends DeprecatedContinuous { else Update(event) ) } - acceptedConfigParameters.flatMap { - case (_, _, callback) => - watchEvent.map(e => e -> callback(count, e)) + acceptedConfigParameters.flatMap { case (_, _, callback) => + watchEvent.map(e => e -> callback(count, e)) } } else Nil } @@ -626,39 +623,41 @@ private[sbt] object Continuous extends DeprecatedContinuous { } } - ((count: Int) => { - val interrupted = new AtomicBoolean(false) - def getEvent: Option[(Watch.Event, Watch.Action)] = { - val events = - try antiEntropyMonitor.poll(Duration.Inf) - catch { case _: InterruptedException => interrupted.set(true); Nil } - val actions = events.flatMap(onEvent(count, _)) - if (actions.exists(_._2 != Watch.Ignore)) { - val builder = new StringBuilder - val min = actions.minBy { - case (e, a) => + ( + (count: Int) => { + val interrupted = new AtomicBoolean(false) + def getEvent: Option[(Watch.Event, Watch.Action)] = { + val events = + try antiEntropyMonitor.poll(Duration.Inf) + catch { case _: InterruptedException => interrupted.set(true); Nil } + val actions = events.flatMap(onEvent(count, _)) + if (actions.exists(_._2 != Watch.Ignore)) { + val builder = new StringBuilder + val min = actions.minBy { case (e, a) => if (builder.nonEmpty) builder.append(", ") val path = e.path builder.append(path) builder.append(" -> ") builder.append(a.toString) a - } - logger.debug(s"Received file event actions: $builder. Returning: $min") - if (min._2 == Watch.Trigger) onTrigger(count, min._1) - if (min._2 == Watch.ShowOptions) None else Some(min) - } else None - } + } + logger.debug(s"Received file event actions: $builder. Returning: $min") + if (min._2 == Watch.Trigger) onTrigger(count, min._1) + if (min._2 == Watch.ShowOptions) None else Some(min) + } else None + } - @tailrec def impl(): Option[(Watch.Event, Watch.Action)] = getEvent match { - case None => - if (interrupted.get || Thread.interrupted) None - else impl() - case r => r - } + @tailrec def impl(): Option[(Watch.Event, Watch.Action)] = getEvent match { + case None => + if (interrupted.get || Thread.interrupted) None + else impl() + case r => r + } - impl() - }, () => monitor.close()) + impl() + }, + () => monitor.close() + ) } private[this] class WatchExecutor(name: String) extends AutoCloseable { @@ -718,10 +717,12 @@ private[sbt] object Continuous extends DeprecatedContinuous { thread.joinFor(1.second) } def result: Try[R] = - try queue.take match { - case Right(r) => Success(r) - case Left(_) => Failure(new NullPointerException) - } catch { case t: InterruptedException => Failure(t) } + try + queue.take match { + case Right(r) => Success(r) + case Left(_) => Failure(new NullPointerException) + } + catch { case t: InterruptedException => Failure(t) } } } @@ -773,13 +774,12 @@ private[sbt] object Continuous extends DeprecatedContinuous { val default: String => Watch.Action = string => parse(inputStream(string), systemInBuilder, fullParser) val alt = alternative - .map { - case (key, handler) => - val is = extracted.runTask(key, state)._2 - () => handler(is) + .map { case (key, handler) => + val is = extracted.runTask(key, state)._2 + () => handler(is) } .getOrElse(() => Watch.Ignore) - string: String => + (string: String) => ((if (string.nonEmpty) default(string) else Watch.Ignore) :: alt() :: Nil).min } executor => { @@ -923,8 +923,8 @@ private[sbt] object Continuous extends DeprecatedContinuous { * @param key the [[ScopedKey]] instance that sets the [[Scope]] for the settings we're extracting * @param extracted the [[Extracted]] instance for the build */ - private final class WatchSettings private[Continuous] (val key: ScopedKey[_])( - implicit extracted: Extracted + private final class WatchSettings private[Continuous] (val key: ScopedKey[_])(implicit + extracted: Extracted ) { val antiEntropy: FiniteDuration = key.get(watchAntiEntropy).getOrElse(Watch.defaultAntiEntropy) @@ -1015,10 +1015,15 @@ private[sbt] object Continuous extends DeprecatedContinuous { extra = scope.extra.toOption.isDefined ) Scope - .displayMasked(scope, " ", (_: Reference) match { - case p: ProjectRef => s"${p.project.trim} /" - case _ => "Global /" - }, mask) + .displayMasked( + scope, + " ", + (_: Reference) match { + case p: ProjectRef => s"${p.project.trim} /" + case _ => "Global /" + }, + mask + ) .dropRight(3) // delete trailing "/" .trim } diff --git a/main/src/main/scala/sbt/internal/DeprecatedContinuous.scala b/main/src/main/scala-2/sbt/internal/DeprecatedContinuous.scala similarity index 100% rename from main/src/main/scala/sbt/internal/DeprecatedContinuous.scala rename to main/src/main/scala-2/sbt/internal/DeprecatedContinuous.scala diff --git a/main/src/main/scala/sbt/internal/SettingCompletions.scala b/main/src/main/scala-2/sbt/internal/SettingCompletions.scala similarity index 100% rename from main/src/main/scala/sbt/internal/SettingCompletions.scala rename to main/src/main/scala-2/sbt/internal/SettingCompletions.scala diff --git a/main/src/main/scala/sbt/internal/graph/model.scala b/main/src/main/scala-2/sbt/internal/graph/model.scala similarity index 92% rename from main/src/main/scala/sbt/internal/graph/model.scala rename to main/src/main/scala-2/sbt/internal/graph/model.scala index b8e9efd4e..4a20ad9bf 100644 --- a/main/src/main/scala/sbt/internal/graph/model.scala +++ b/main/src/main/scala-2/sbt/internal/graph/model.scala @@ -20,7 +20,7 @@ private[sbt] case class GraphModuleId(organization: String, name: String, versio private[sbt] object GraphModuleId { import sjsonnew.BasicJsonProtocol.StringJsonFormat implicit val graphModuleIdIso = LList.iso[GraphModuleId, String :*: String :*: String :*: LNil]( - { m: GraphModuleId => + { (m: GraphModuleId) => ("organization", m.organization) :*: ("name", m.name) :*: ("version", m.version) :*: LNil }, { case (_, organization) :*: (_, name) :*: (_, version) :*: LNil => @@ -44,10 +44,14 @@ private[sbt] case class Module( private[sbt] object Module { import sjsonnew.BasicJsonProtocol._ - implicit val moduleIso = LList.iso[Module, GraphModuleId :*: Option[String] :*: String :*: Option[ - String - ] :*: Option[File] :*: Option[String] :*: LNil]( - { m: Module => + implicit val moduleIso = LList.iso[ + Module, + GraphModuleId :*: Option[String] :*: String :*: + Option[ + String + ] :*: Option[File] :*: Option[String] :*: LNil + ]( + { (m: Module) => ("id", m.id) :*: ("license", m.license) :*: ("extraInfo", m.extraInfo) :*: ("evictedByVersion", m.evictedByVersion) :*: ( "jarFile", @@ -99,7 +103,7 @@ private[sbt] object ModuleGraph { import BasicJsonProtocol._ implicit val moduleGraphIso = LList.iso[ModuleGraph, Vector[Module] :*: Vector[Edge] :*: LNil]( - { g: ModuleGraph => + { (g: ModuleGraph) => ("nodes", g.nodes.toVector) :*: ("edges", g.edges.toVector) :*: LNil }, { case (_, nodes: Vector[Module]) :*: (_, edges: Vector[Edge]) :*: LNil => diff --git a/main/src/main/scala/sbt/internal/server/BuildServerEvalReporter.scala b/main/src/main/scala-2/sbt/internal/server/BuildServerEvalReporter.scala similarity index 100% rename from main/src/main/scala/sbt/internal/server/BuildServerEvalReporter.scala rename to main/src/main/scala-2/sbt/internal/server/BuildServerEvalReporter.scala diff --git a/main/src/main/scala/sbt/internal/nio/CheckBuildSources.scala b/main/src/main/scala-2/sbt/nio/CheckBuildSources.scala similarity index 100% rename from main/src/main/scala/sbt/internal/nio/CheckBuildSources.scala rename to main/src/main/scala-2/sbt/nio/CheckBuildSources.scala diff --git a/main/src/main/scala/sbt/nio/Settings.scala b/main/src/main/scala-2/sbt/nio/Settings.scala similarity index 97% rename from main/src/main/scala/sbt/nio/Settings.scala rename to main/src/main/scala-2/sbt/nio/Settings.scala index cef86648f..00d327ca2 100644 --- a/main/src/main/scala/sbt/nio/Settings.scala +++ b/main/src/main/scala-2/sbt/nio/Settings.scala @@ -86,8 +86,8 @@ private[sbt] object Settings { val taskKey = TaskKey(sk.key) in sk.scope // We create a previous reference so that clean automatically works without the // user having to explicitly call previous anywhere. - val init = Previous.runtime(taskKey).zip(taskKey) { - case (_, t) => t.map(implicitly[ToSeqPath[T]].apply) + val init = Previous.runtime(taskKey).zip(taskKey) { case (_, t) => + t.map(implicitly[ToSeqPath[T]].apply) } val key = Def.ScopedKey(taskKey.scope in taskKey.key, Keys.dynamicFileOutputs.key) addTaskDefinition(Def.setting[Task[Seq[Path]]](key, init, setting.pos)) :: @@ -223,14 +223,13 @@ private[sbt] object Settings { val seen = ConcurrentHashMap.newKeySet[Path] val prevMap = new ConcurrentHashMap[Path, FileStamp]() previous.foreach { case (k, v) => prevMap.put(k, v); () } - current.foreach { - case (path, currentStamp) => - if (seen.add(path)) { - prevMap.remove(path) match { - case null => createdBuilder += path - case old => (if (old != currentStamp) modifiedBuilder else unmodifiedBuilder) += path - } + current.foreach { case (path, currentStamp) => + if (seen.add(path)) { + prevMap.remove(path) match { + case null => createdBuilder += path + case old => (if (old != currentStamp) modifiedBuilder else unmodifiedBuilder) += path } + } } prevMap.forEach((p, _) => deletedBuilder += p) val unmodified = unmodifiedBuilder.result() diff --git a/main/src/main/scala/sbt/nio/Watch.scala b/main/src/main/scala-2/sbt/nio/Watch.scala similarity index 100% rename from main/src/main/scala/sbt/nio/Watch.scala rename to main/src/main/scala-2/sbt/nio/Watch.scala diff --git a/main/src/main/scala/sbt/plugins/DependencyTreeSettings.scala b/main/src/main/scala-2/sbt/pluigins/DependencyTreeSettings.scala similarity index 100% rename from main/src/main/scala/sbt/plugins/DependencyTreeSettings.scala rename to main/src/main/scala-2/sbt/pluigins/DependencyTreeSettings.scala diff --git a/main/src/main/scala/sbt/plugins/SemanticdbPlugin.scala b/main/src/main/scala-2/sbt/pluigins/SemanticdbPlugin.scala similarity index 98% rename from main/src/main/scala/sbt/plugins/SemanticdbPlugin.scala rename to main/src/main/scala-2/sbt/pluigins/SemanticdbPlugin.scala index b7a558d6d..9c1f777b2 100644 --- a/main/src/main/scala/sbt/plugins/SemanticdbPlugin.scala +++ b/main/src/main/scala-2/sbt/pluigins/SemanticdbPlugin.scala @@ -74,7 +74,8 @@ object SemanticdbPlugin extends AutoPlugin { if (enabled) Def.setting { semanticdbOptions.?.all(ancestorConfigs(config)).value.flatten.flatten - } else Def.setting { Nil } + } + else Def.setting { Nil } }.value, scalacOptions ++= { if (semanticdbEnabled.value) diff --git a/main/src/main/scala/sbt/BuildSyntax.scala b/main/src/main/scala/sbt/BuildSyntax.scala index b2038c4a3..63b7bdcf6 100644 --- a/main/src/main/scala/sbt/BuildSyntax.scala +++ b/main/src/main/scala/sbt/BuildSyntax.scala @@ -12,9 +12,12 @@ import sbt.librarymanagement.Configuration private[sbt] trait BuildSyntax { import scala.language.experimental.macros - def settingKey[T](description: String): SettingKey[T] = macro std.KeyMacro.settingKeyImpl[T] - def taskKey[T](description: String): TaskKey[T] = macro std.KeyMacro.taskKeyImpl[T] - def inputKey[T](description: String): InputKey[T] = macro std.KeyMacro.inputKeyImpl[T] + def settingKey[A](description: String): SettingKey[A] = ??? + // macro std.KeyMacro.settingKeyImpl[T] + def taskKey[A](description: String): TaskKey[A] = ??? + // macro std.KeyMacro.taskKeyImpl[T] + def inputKey[A](description: String): InputKey[A] = ??? + // macro std.KeyMacro.inputKeyImpl[T] def enablePlugins(ps: AutoPlugin*): DslEntry = DslEntry.DslEnablePlugins(ps) def disablePlugins(ps: AutoPlugin*): DslEntry = DslEntry.DslDisablePlugins(ps) diff --git a/main/src/main/scala/sbt/Cross.scala b/main/src/main/scala/sbt/Cross.scala index b6f8447d4..6b8ac12bf 100644 --- a/main/src/main/scala/sbt/Cross.scala +++ b/main/src/main/scala/sbt/Cross.scala @@ -117,8 +117,8 @@ object Cross { )(command: String): (Seq[ProjectRef], String) = { import extracted._ import DefaultParsers._ - val parser = (OpOrID <~ charClass(_ == '/', "/")) ~ any.* map { - case seg1 ~ cmd => (seg1, cmd.mkString) + val parser = (OpOrID <~ charClass(_ == '/', "/")) ~ any.* map { case seg1 ~ cmd => + (seg1, cmd.mkString) } Parser.parse(command, parser) match { case Right((seg1, cmd)) => @@ -157,8 +157,8 @@ object Cross { "that are configured." ) state.log.debug("Scala versions configuration is:") - projCrossVersions.foreach { - case (project, versions) => state.log.debug(s"$project: $versions") + projCrossVersions.foreach { case (project, versions) => + state.log.debug(s"$project: $versions") } } @@ -180,41 +180,40 @@ object Cross { .groupBy(_._1) .mapValues(_.map(_._2).toSet) val commandsByVersion = keysByVersion.toSeq - .flatMap { - case (v, keys) => - val projects = keys.flatMap(project) - keys.toSeq.flatMap { k => - project(k).filter(projects.contains).flatMap { p => - if (p == extracted.currentRef || !projects.contains(extracted.currentRef)) { - val parts = project(k).map(_.project) ++ k.scope.config.toOption.map { - case ConfigKey(n) => n.head.toUpper + n.tail + .flatMap { case (v, keys) => + val projects = keys.flatMap(project) + keys.toSeq.flatMap { k => + project(k).filter(projects.contains).flatMap { p => + if (p == extracted.currentRef || !projects.contains(extracted.currentRef)) { + val parts = + project(k).map(_.project) ++ k.scope.config.toOption.map { case ConfigKey(n) => + n.head.toUpper + n.tail } ++ k.scope.task.toOption.map(_.label) ++ Some(k.key.label) - Some(v -> parts.mkString("", "/", fullArgs)) - } else None - } + Some(v -> parts.mkString("", "/", fullArgs)) + } else None } + } } .groupBy(_._1) .mapValues(_.map(_._2)) .toSeq .sortBy(_._1) - commandsByVersion.flatMap { - case (v, commands) => - commands match { - case Seq(c) => Seq(s"$SwitchCommand $verbose $v $c") - case Seq() => Nil // should be unreachable - case multi if fullArgs.isEmpty => - Seq(s"$SwitchCommand $verbose $v all ${multi.mkString(" ")}") - case multi => Seq(s"$SwitchCommand $verbose $v") ++ multi - } + commandsByVersion.flatMap { case (v, commands) => + commands match { + case Seq(c) => Seq(s"$SwitchCommand $verbose $v $c") + case Seq() => Nil // should be unreachable + case multi if fullArgs.isEmpty => + Seq(s"$SwitchCommand $verbose $v all ${multi.mkString(" ")}") + case multi => Seq(s"$SwitchCommand $verbose $v") ++ multi + } } } allCommands.toList ::: CrossRestoreSessionCommand :: captureCurrentSession(state, extracted) } def crossRestoreSession: Command = - Command.arb(_ => crossRestoreSessionParser, crossRestoreSessionHelp)( - (s, _) => crossRestoreSessionImpl(s) + Command.arb(_ => crossRestoreSessionParser, crossRestoreSessionHelp)((s, _) => + crossRestoreSessionImpl(s) ) private def crossRestoreSessionImpl(state: State): State = { @@ -288,9 +287,8 @@ object Cross { excluded: Seq[(ResolvedReference, Seq[ScalaVersion])] ) = { - instance.foreach { - case (home, instance) => - state.log.info(s"Using Scala home $home with actual version ${instance.actualVersion}") + instance.foreach { case (home, instance) => + state.log.info(s"Using Scala home $home with actual version ${instance.actualVersion}") } if (switch.version.force) { state.log.info(s"Forcing Scala version to $version on all projects.") diff --git a/main/src/main/scala/sbt/Defaults.scala b/main/src/main/scala/sbt/Defaults.scala index d86fd410e..6f8836056 100644 --- a/main/src/main/scala/sbt/Defaults.scala +++ b/main/src/main/scala/sbt/Defaults.scala @@ -254,7 +254,12 @@ object Defaults extends BuildCommon { buildDependencies := Classpaths.constructBuildDependencies.value, version :== "0.1.0-SNAPSHOT", versionScheme :== None, - classpathTypes :== Set("jar", "bundle", "maven-plugin", "test-jar") ++ CustomPomParser.JarPackagings, + classpathTypes :== Set( + "jar", + "bundle", + "maven-plugin", + "test-jar" + ) ++ CustomPomParser.JarPackagings, artifactClassifier :== None, checksums := Classpaths.bootChecksums(appConfiguration.value), conflictManager := ConflictManager.default, @@ -286,7 +291,7 @@ object Defaults extends BuildCommon { trapExit :== true, connectInput :== false, cancelable :== true, - taskCancelStrategy := { state: State => + taskCancelStrategy := { (state: State) => if (cancelable.value) TaskCancellationStrategy.Signal else TaskCancellationStrategy.Null }, @@ -653,7 +658,9 @@ object Defaults extends BuildCommon { }, semanticdbTargetRoot := crossTarget.value / (prefix(configuration.value.name) + "meta"), compileAnalysisTargetRoot := crossTarget.value / (prefix(configuration.value.name) + "zinc"), - earlyCompileAnalysisTargetRoot := crossTarget.value / (prefix(configuration.value.name) + "early-zinc"), + earlyCompileAnalysisTargetRoot := crossTarget.value / (prefix( + configuration.value.name + ) + "early-zinc"), doc / target := crossTarget.value / (prefix(configuration.value.name) + "api") ) @@ -884,135 +891,141 @@ object Defaults extends BuildCommon { ) ) ++ configGlobal ++ defaultCompileSettings ++ compileAnalysisSettings ++ Seq( - compileOutputs := { - import scala.collection.JavaConverters._ - val c = fileConverter.value - val classFiles = - manipulateBytecode.value.analysis.readStamps.getAllProductStamps.keySet.asScala - (classFiles.toSeq map { x => - c.toPath(x) - }) :+ compileAnalysisFile.value.toPath - }, - compileOutputs := compileOutputs.triggeredBy(compile).value, - tastyFiles := Def.taskIf { - if (ScalaArtifacts.isScala3(scalaVersion.value)) { - val _ = compile.value - val tastyFiles = classDirectory.value.**("*.tasty").get - tastyFiles.map(_.getAbsoluteFile) - } else Nil - }.value, - clean := (compileOutputs / clean).value, - earlyOutputPing := Def.promise[Boolean], - compileProgress := { - val s = streams.value - val promise = earlyOutputPing.value - val mn = moduleName.value - val c = configuration.value - new CompileProgress { - override def afterEarlyOutput(isSuccess: Boolean): Unit = { - if (isSuccess) s.log.debug(s"[$mn / $c] early output is success") - else s.log.debug(s"[$mn / $c] early output can't be made because of macros") - promise.complete(Value(isSuccess)) - } - } - }, - compileEarly := compileEarlyTask.value, - compile := compileTask.value, - compileScalaBackend := compileScalaBackendTask.value, - compileJava := compileJavaTask.value, - compileSplit := { - // conditional task - if (incOptions.value.pipelining) compileJava.value - else compileScalaBackend.value - }, - internalDependencyConfigurations := InternalDependencies.configurations.value, - manipulateBytecode := compileSplit.value, - compileIncremental := compileIncrementalTask.tag(Tags.Compile, Tags.CPU).value, - printWarnings := printWarningsTask.value, - compileAnalysisFilename := { - // Here, if the user wants cross-scala-versioning, we also append it - // to the analysis cache, so we keep the scala versions separated. - val binVersion = scalaBinaryVersion.value - val extra = - if (crossPaths.value) s"_$binVersion" - else "" - s"inc_compile$extra.zip" - }, - earlyCompileAnalysisFile := { - earlyCompileAnalysisTargetRoot.value / compileAnalysisFilename.value - }, - compileAnalysisFile := { - compileAnalysisTargetRoot.value / compileAnalysisFilename.value - }, - externalHooks := IncOptions.defaultExternal, - incOptions := { - val old = incOptions.value - old - .withAuxiliaryClassFiles(auxiliaryClassFiles.value.toArray) - .withExternalHooks(externalHooks.value) - .withClassfileManagerType( - Option( - TransactionalManagerType - .of( // https://github.com/sbt/sbt/issues/1673 - crossTarget.value / s"${prefix(configuration.value.name)}classes.bak", - streams.value.log - ): ClassFileManagerType - ).toOptional - ) - .withPipelining(usePipelining.value) - }, - scalacOptions := { - val old = scalacOptions.value - val converter = fileConverter.value - if (exportPipelining.value) - Vector("-Ypickle-java", "-Ypickle-write", converter.toPath(earlyOutput.value).toString) ++ old - else old - }, - scalacOptions := { - val old = scalacOptions.value - if (sbtPlugin.value && VersionNumber(scalaVersion.value) - .matchesSemVer(SemanticSelector("=2.12 >=2.12.13"))) - old ++ Seq("-Wconf:cat=unused-nowarn:s", "-Xsource:3") - else old - }, - persistJarClasspath :== true, - classpathEntryDefinesClassVF := { - (if (persistJarClasspath.value) classpathDefinesClassCache.value - else VirtualFileValueCache.definesClassCache(fileConverter.value)).get - }, - compileIncSetup := compileIncSetupTask.value, - console := consoleTask.value, - collectAnalyses := Definition.collectAnalysesTask.map(_ => ()).value, - consoleQuick := consoleQuickTask.value, - discoveredMainClasses := (compile map discoverMainClasses storeAs discoveredMainClasses xtriggeredBy compile).value, - discoveredSbtPlugins := discoverSbtPluginNames.value, - // This fork options, scoped to the configuration is used for tests - forkOptions := forkOptionsTask.value, - selectMainClass := mainClass.value orElse askForMainClass(discoveredMainClasses.value), - run / mainClass := (run / selectMainClass).value, - mainClass := { - val logWarning = state.value.currentCommand.forall(!_.commandLine.split(" ").exists { - case "run" | "runMain" => true - case r => - r.split("/") match { - case Array(parts @ _*) => - parts.lastOption match { - case Some("run" | "runMain") => true - case _ => false - } + compileOutputs := { + import scala.collection.JavaConverters._ + val c = fileConverter.value + val classFiles = + manipulateBytecode.value.analysis.readStamps.getAllProductStamps.keySet.asScala + (classFiles.toSeq map { x => + c.toPath(x) + }) :+ compileAnalysisFile.value.toPath + }, + compileOutputs := compileOutputs.triggeredBy(compile).value, + tastyFiles := Def.taskIf { + if (ScalaArtifacts.isScala3(scalaVersion.value)) { + val _ = compile.value + val tastyFiles = classDirectory.value.**("*.tasty").get + tastyFiles.map(_.getAbsoluteFile) + } else Nil + }.value, + clean := (compileOutputs / clean).value, + earlyOutputPing := Def.promise[Boolean], + compileProgress := { + val s = streams.value + val promise = earlyOutputPing.value + val mn = moduleName.value + val c = configuration.value + new CompileProgress { + override def afterEarlyOutput(isSuccess: Boolean): Unit = { + if (isSuccess) s.log.debug(s"[$mn / $c] early output is success") + else s.log.debug(s"[$mn / $c] early output can't be made because of macros") + promise.complete(Value(isSuccess)) } - }) - pickMainClassOrWarn(discoveredMainClasses.value, streams.value.log, logWarning) - }, - runMain := foregroundRunMainTask.evaluated, - run := foregroundRunTask.evaluated, - fgRun := runTask(fullClasspath, (run / mainClass), (run / runner)).evaluated, - fgRunMain := runMainTask(fullClasspath, (run / runner)).evaluated, - copyResources := copyResourcesTask.value, - // note that we use the same runner and mainClass as plain run - mainBgRunMainTaskForConfig(This), - mainBgRunTaskForConfig(This) - ) ++ inTask(run)(runnerSettings ++ newRunnerSettings) + } + }, + compileEarly := compileEarlyTask.value, + compile := compileTask.value, + compileScalaBackend := compileScalaBackendTask.value, + compileJava := compileJavaTask.value, + compileSplit := { + // conditional task + if (incOptions.value.pipelining) compileJava.value + else compileScalaBackend.value + }, + internalDependencyConfigurations := InternalDependencies.configurations.value, + manipulateBytecode := compileSplit.value, + compileIncremental := compileIncrementalTask.tag(Tags.Compile, Tags.CPU).value, + printWarnings := printWarningsTask.value, + compileAnalysisFilename := { + // Here, if the user wants cross-scala-versioning, we also append it + // to the analysis cache, so we keep the scala versions separated. + val binVersion = scalaBinaryVersion.value + val extra = + if (crossPaths.value) s"_$binVersion" + else "" + s"inc_compile$extra.zip" + }, + earlyCompileAnalysisFile := { + earlyCompileAnalysisTargetRoot.value / compileAnalysisFilename.value + }, + compileAnalysisFile := { + compileAnalysisTargetRoot.value / compileAnalysisFilename.value + }, + externalHooks := IncOptions.defaultExternal, + incOptions := { + val old = incOptions.value + old + .withAuxiliaryClassFiles(auxiliaryClassFiles.value.toArray) + .withExternalHooks(externalHooks.value) + .withClassfileManagerType( + Option( + TransactionalManagerType + .of( // https://github.com/sbt/sbt/issues/1673 + crossTarget.value / s"${prefix(configuration.value.name)}classes.bak", + streams.value.log + ): ClassFileManagerType + ).toOptional + ) + .withPipelining(usePipelining.value) + }, + scalacOptions := { + val old = scalacOptions.value + val converter = fileConverter.value + if (exportPipelining.value) + Vector( + "-Ypickle-java", + "-Ypickle-write", + converter.toPath(earlyOutput.value).toString + ) ++ old + else old + }, + scalacOptions := { + val old = scalacOptions.value + if ( + sbtPlugin.value && VersionNumber(scalaVersion.value) + .matchesSemVer(SemanticSelector("=2.12 >=2.12.13")) + ) + old ++ Seq("-Wconf:cat=unused-nowarn:s", "-Xsource:3") + else old + }, + persistJarClasspath :== true, + classpathEntryDefinesClassVF := { + (if (persistJarClasspath.value) classpathDefinesClassCache.value + else VirtualFileValueCache.definesClassCache(fileConverter.value)).get + }, + compileIncSetup := compileIncSetupTask.value, + console := consoleTask.value, + collectAnalyses := Definition.collectAnalysesTask.map(_ => ()).value, + consoleQuick := consoleQuickTask.value, + discoveredMainClasses := (compile map discoverMainClasses storeAs discoveredMainClasses xtriggeredBy compile).value, + discoveredSbtPlugins := discoverSbtPluginNames.value, + // This fork options, scoped to the configuration is used for tests + forkOptions := forkOptionsTask.value, + selectMainClass := mainClass.value orElse askForMainClass(discoveredMainClasses.value), + run / mainClass := (run / selectMainClass).value, + mainClass := { + val logWarning = state.value.currentCommand.forall(!_.commandLine.split(" ").exists { + case "run" | "runMain" => true + case r => + r.split("/") match { + case Array(parts @ _*) => + parts.lastOption match { + case Some("run" | "runMain") => true + case _ => false + } + } + }) + pickMainClassOrWarn(discoveredMainClasses.value, streams.value.log, logWarning) + }, + runMain := foregroundRunMainTask.evaluated, + run := foregroundRunTask.evaluated, + fgRun := runTask(fullClasspath, (run / mainClass), (run / runner)).evaluated, + fgRunMain := runMainTask(fullClasspath, (run / runner)).evaluated, + copyResources := copyResourcesTask.value, + // note that we use the same runner and mainClass as plain run + mainBgRunMainTaskForConfig(This), + mainBgRunTaskForConfig(This) + ) ++ inTask(run)(runnerSettings ++ newRunnerSettings) private[this] lazy val configGlobal = globalDefaults( Seq( @@ -1092,7 +1105,9 @@ object Defaults extends BuildCommon { case None => val scalaProvider = appConfiguration.value.provider.scalaProvider val version = scalaVersion.value - if (version == scalaProvider.version) // use the same class loader as the Scala classes used by sbt + if ( + version == scalaProvider.version + ) // use the same class loader as the Scala classes used by sbt Def.task { val allJars = scalaProvider.jars val libraryJars = allJars.filter(_.getName == "scala-library.jar") @@ -1108,7 +1123,8 @@ object Defaults extends BuildCommon { ) case _ => ScalaInstance(version, scalaProvider) } - } else + } + else scalaInstanceFromUpdate } } @@ -1225,52 +1241,54 @@ object Defaults extends BuildCommon { testOnly / testFilter :== (selectedFilter _) ) ) - lazy val testTasks - : Seq[Setting[_]] = testTaskOptions(test) ++ testTaskOptions(testOnly) ++ testTaskOptions( - testQuick - ) ++ testDefaults ++ Seq( - testLoader := ClassLoaders.testTask.value, - loadedTestFrameworks := { - val loader = testLoader.value - val log = streams.value.log - testFrameworks.value.flatMap(f => f.create(loader, log).map(x => (f, x)).toIterable).toMap - }, - definedTests := detectTests.value, - definedTestNames := (definedTests map (_.map(_.name).distinct) storeAs definedTestNames triggeredBy compile).value, - testQuick / testFilter := testQuickFilter.value, - executeTests := ( - Def.taskDyn { - allTestGroupsTask( - (test / streams).value, - loadedTestFrameworks.value, - testLoader.value, - (test / testGrouping).value, - (test / testExecution).value, - (test / fullClasspath).value, - testForkedParallel.value, - (test / javaOptions).value, - (classLoaderLayeringStrategy).value, - projectId = s"${thisProject.value.id} / ", - ) + lazy val testTasks: Seq[Setting[_]] = + testTaskOptions(test) ++ testTaskOptions(testOnly) ++ testTaskOptions( + testQuick + ) ++ testDefaults ++ Seq( + testLoader := ClassLoaders.testTask.value, + loadedTestFrameworks := { + val loader = testLoader.value + val log = streams.value.log + testFrameworks.value.flatMap(f => f.create(loader, log).map(x => (f, x)).toIterable).toMap + }, + definedTests := detectTests.value, + definedTestNames := (definedTests map (_.map( + _.name + ).distinct) storeAs definedTestNames triggeredBy compile).value, + testQuick / testFilter := testQuickFilter.value, + executeTests := ( + Def.taskDyn { + allTestGroupsTask( + (test / streams).value, + loadedTestFrameworks.value, + testLoader.value, + (test / testGrouping).value, + (test / testExecution).value, + (test / fullClasspath).value, + testForkedParallel.value, + (test / javaOptions).value, + (classLoaderLayeringStrategy).value, + projectId = s"${thisProject.value.id} / ", + ) + } + ).value, + // ((streams in test, loadedTestFrameworks, testLoader, testGrouping in test, testExecution in test, fullClasspath in test, javaHome in test, testForkedParallel, javaOptions in test) flatMap allTestGroupsTask).value, + Test / test / testResultLogger :== TestResultLogger.SilentWhenNoTests, // https://github.com/sbt/sbt/issues/1185 + test := { + val trl = (Test / test / testResultLogger).value + val taskName = Project.showContextKey(state.value).show(resolvedScoped.value) + try trl.run(streams.value.log, executeTests.value, taskName) + finally close(testLoader.value) + }, + testOnly := { + try inputTests(testOnly).evaluated + finally close(testLoader.value) + }, + testQuick := { + try inputTests(testQuick).evaluated + finally close(testLoader.value) } - ).value, - // ((streams in test, loadedTestFrameworks, testLoader, testGrouping in test, testExecution in test, fullClasspath in test, javaHome in test, testForkedParallel, javaOptions in test) flatMap allTestGroupsTask).value, - Test / test / testResultLogger :== TestResultLogger.SilentWhenNoTests, // https://github.com/sbt/sbt/issues/1185 - test := { - val trl = (Test / test / testResultLogger).value - val taskName = Project.showContextKey(state.value).show(resolvedScoped.value) - try trl.run(streams.value.log, executeTests.value, taskName) - finally close(testLoader.value) - }, - testOnly := { - try inputTests(testOnly).evaluated - finally close(testLoader.value) - }, - testQuick := { - try inputTests(testQuick).evaluated - finally close(testLoader.value) - } - ) + ) private def close(sbtLoader: ClassLoader): Unit = sbtLoader match { case u: AutoCloseable => u.close() case c: ClasspathFilter => c.close() @@ -1375,8 +1393,8 @@ object Defaults extends BuildCommon { Def.task { val cp = (test / fullClasspath).value val s = (test / streams).value - val ans: Seq[Analysis] = cp.flatMap(_.metadata get Keys.analysis) map { - case a0: Analysis => a0 + val ans: Seq[Analysis] = cp.flatMap(_.metadata get Keys.analysis) map { case a0: Analysis => + a0 } val succeeded = TestStatus.read(succeededFile(s.cacheDirectory)) val stamps = collection.mutable.Map.empty[String, Long] @@ -1428,7 +1446,8 @@ object Defaults extends BuildCommon { val config = testExecution.value implicit val display = Project.showContextKey(state.value) - val modifiedOpts = Tests.Filters(filter(selected)) +: Tests.Argument(frameworkOptions: _*) +: config.options + val modifiedOpts = + Tests.Filters(filter(selected)) +: Tests.Argument(frameworkOptions: _*) +: config.options val newConfig = config.copy(options = modifiedOpts) val output = allTestGroupsTask( s, @@ -1455,14 +1474,13 @@ object Defaults extends BuildCommon { ): Map[TestFramework, Runner] = { import Tests.Argument val opts = config.options.toList - frameworks.map { - case (tf, f) => - val args = opts.flatMap { - case Argument(None | Some(`tf`), args) => args - case _ => Nil - } - val mainRunner = f.runner(args.toArray, Array.empty[String], loader) - tf -> mainRunner + frameworks.map { case (tf, f) => + val args = opts.flatMap { + case Argument(None | Some(`tf`), args) => args + case _ => Nil + } + val mainRunner = f.runner(args.toArray, Array.empty[String], loader) + tf -> mainRunner } } @@ -1525,9 +1543,7 @@ object Defaults extends BuildCommon { ): Initialize[Task[Tests.Output]] = { val processedOptions: Map[Tests.Group, Tests.ProcessedOptions] = groups - .map( - group => group -> Tests.processOptions(config, group.tests.toVector, s.log) - ) + .map(group => group -> Tests.processOptions(config, group.tests.toVector, s.log)) .toMap val testDefinitions: Iterable[TestDefinition] = processedOptions.values.flatMap(_.tests) @@ -1535,7 +1551,7 @@ object Defaults extends BuildCommon { val filteredFrameworks: Map[TestFramework, Framework] = frameworks.filter { case (_, framework) => TestFramework.getFingerprints(framework).exists { t => - testDefinitions.exists { test => + testDefinitions.exists { (test) => TestFramework.matches(t, test.fingerprint) } } @@ -1574,50 +1590,50 @@ object Defaults extends BuildCommon { } val output = Tests.foldTasks(groupTasks, config.parallel) val result = output map { out => - out.events.foreach { - case (suite, e) => - if (strategy != ClassLoaderLayeringStrategy.Flat || - strategy != ClassLoaderLayeringStrategy.ScalaLibrary) { - (e.throwables ++ e.throwables.flatMap(t => Option(t.getCause))) - .find { t => - t.isInstanceOf[NoClassDefFoundError] || - t.isInstanceOf[IllegalAccessError] || - t.isInstanceOf[ClassNotFoundException] - } - .foreach { t => - s.log.error( - s"Test suite $suite failed with $t.\nThis may be due to the " - + s"ClassLoaderLayeringStrategy ($strategy) used by your task.\n" - + "To improve performance and reduce memory, sbt attempts to cache the" - + " class loaders used to load the project dependencies.\n" - + "The project class files are loaded in a separate class loader that is" - + " created for each test run.\nThe test class loader accesses the project" - + " dependency classes using the cached project dependency classloader.\nWith" - + " this approach, class loading may fail under the following conditions:\n\n" - + " * Dependencies use reflection to access classes in your project's" - + " classpath.\n Java serialization/deserialization may cause this.\n" - + " * An open package is accessed across layers. If the project's classes" - + " access or extend\n jvm package private classes defined in a" - + " project dependency, it may cause an IllegalAccessError\n because the" - + " jvm enforces package private at the classloader level.\n\n" - + "These issues, along with others that were not enumerated above, may be" - + " resolved by changing the class loader layering strategy.\n" - + "The Flat and ScalaLibrary strategies bundle the full project classpath in" - + " the same class loader.\nTo use one of these strategies, set the " - + " ClassLoaderLayeringStrategy key\nin your configuration, for example:\n\n" - + s"set ${projectId}Test / classLoaderLayeringStrategy :=" - + " ClassLoaderLayeringStrategy.ScalaLibrary\n" - + s"set ${projectId}Test / classLoaderLayeringStrategy :=" - + " ClassLoaderLayeringStrategy.Flat\n\n" - + "See ClassLoaderLayeringStrategy.scala for the full list of options." - ) - } - } + out.events.foreach { case (suite, e) => + if ( + strategy != ClassLoaderLayeringStrategy.Flat || + strategy != ClassLoaderLayeringStrategy.ScalaLibrary + ) { + (e.throwables ++ e.throwables.flatMap(t => Option(t.getCause))) + .find { t => + t.isInstanceOf[NoClassDefFoundError] || + t.isInstanceOf[IllegalAccessError] || + t.isInstanceOf[ClassNotFoundException] + } + .foreach { t => + s.log.error( + s"Test suite $suite failed with $t.\nThis may be due to the " + + s"ClassLoaderLayeringStrategy ($strategy) used by your task.\n" + + "To improve performance and reduce memory, sbt attempts to cache the" + + " class loaders used to load the project dependencies.\n" + + "The project class files are loaded in a separate class loader that is" + + " created for each test run.\nThe test class loader accesses the project" + + " dependency classes using the cached project dependency classloader.\nWith" + + " this approach, class loading may fail under the following conditions:\n\n" + + " * Dependencies use reflection to access classes in your project's" + + " classpath.\n Java serialization/deserialization may cause this.\n" + + " * An open package is accessed across layers. If the project's classes" + + " access or extend\n jvm package private classes defined in a" + + " project dependency, it may cause an IllegalAccessError\n because the" + + " jvm enforces package private at the classloader level.\n\n" + + "These issues, along with others that were not enumerated above, may be" + + " resolved by changing the class loader layering strategy.\n" + + "The Flat and ScalaLibrary strategies bundle the full project classpath in" + + " the same class loader.\nTo use one of these strategies, set the " + + " ClassLoaderLayeringStrategy key\nin your configuration, for example:\n\n" + + s"set ${projectId}Test / classLoaderLayeringStrategy :=" + + " ClassLoaderLayeringStrategy.ScalaLibrary\n" + + s"set ${projectId}Test / classLoaderLayeringStrategy :=" + + " ClassLoaderLayeringStrategy.Flat\n\n" + + "See ClassLoaderLayeringStrategy.scala for the full list of options." + ) + } + } } val summaries = - runners map { - case (tf, r) => - Tests.Summary(frameworks(tf).name, r.done()) + runners map { case (tf, r) => + Tests.Summary(frameworks(tf).name, r.done()) } out.copy(summaries = summaries) } @@ -1763,13 +1779,13 @@ object Defaults extends BuildCommon { val f = artifactName.value crossTarget.value / (prefix(configuration.value.name) + extraPrefix) / f( - ScalaVersion( - (artifactName / scalaVersion).value, - (artifactName / scalaBinaryVersion).value - ), - projectID.value, - art.value - ) + ScalaVersion( + (artifactName / scalaVersion).value, + (artifactName / scalaBinaryVersion).value + ), + projectID.value, + art.value + ) } private[sbt] def prefixArtifactPathSetting( @@ -1875,13 +1891,15 @@ object Defaults extends BuildCommon { def print(st: String) = { scala.Console.out.print(st); scala.Console.out.flush() } print(s) ITerminal.get.withRawInput { - try ITerminal.get.inputStream.read match { - case -1 | -2 => None - case b => - val res = b.toChar.toString - println(res) - Some(res) - } catch { case e: InterruptedException => None } + try + ITerminal.get.inputStream.read match { + case -1 | -2 => None + case b => + val res = b.toChar.toString + println(res) + Some(res) + } + catch { case e: InterruptedException => None } } }), classes @@ -1928,8 +1946,8 @@ object Defaults extends BuildCommon { copyClasspath: Initialize[Boolean], scalaRun: Initialize[Task[ScalaRun]] ): Initialize[InputTask[JobHandle]] = { - val parser = Defaults.loadForParser(discoveredMainClasses)( - (s, names) => Defaults.runMainParser(s, names getOrElse Nil) + val parser = Defaults.loadForParser(discoveredMainClasses)((s, names) => + Defaults.runMainParser(s, names getOrElse Nil) ) Def.inputTask { val service = bgJobService.value @@ -2139,9 +2157,14 @@ object Defaults extends BuildCommon { (hasScala, hasJava) match { case (true, _) => val options = sOpts ++ Opts.doc.externalAPI(xapis) - val runDoc = Doc.scaladoc(label, s.cacheStoreFactory sub "scala", cs.scalac match { - case ac: AnalyzingCompiler => ac.onArgs(exported(s, "scaladoc")) - }, fiOpts) + val runDoc = Doc.scaladoc( + label, + s.cacheStoreFactory sub "scala", + cs.scalac match { + case ac: AnalyzingCompiler => ac.onArgs(exported(s, "scaladoc")) + }, + fiOpts + ) def isScala3Doc(module: ModuleID): Boolean = { module.configurations.exists(_.startsWith(Configurations.ScalaDocTool.name)) && module.name == ScalaArtifacts.Scala3DocID @@ -2239,7 +2262,8 @@ object Defaults extends BuildCommon { finally w.close() // workaround for #937 } - /** Handles traditional Scalac compilation. For non-pipelined compilation, + /** + * Handles traditional Scalac compilation. For non-pipelined compilation, * this also handles Java compilation. */ private[sbt] def compileScalaBackendTask: Initialize[Task[CompileResult]] = Def.task { @@ -2261,7 +2285,8 @@ object Defaults extends BuildCommon { analysisResult } - /** Block on earlyOutputPing promise, which will be completed by `compile` midway + /** + * Block on earlyOutputPing promise, which will be completed by `compile` midway * via `compileProgress` implementation. */ private[sbt] def compileEarlyTask: Initialize[Task[CompileAnalysis]] = Def.task { @@ -2296,9 +2321,8 @@ object Defaults extends BuildCommon { val map = managedFileStampCache.value val analysis = analysisResult.analysis import scala.collection.JavaConverters._ - analysis.readStamps.getAllProductStamps.asScala.foreach { - case (f: VirtualFileRef, s) => - map.put(c.toPath(f), sbt.nio.FileStamp.fromZincStamp(s)) + analysis.readStamps.getAllProductStamps.asScala.foreach { case (f: VirtualFileRef, s) => + map.put(c.toPath(f), sbt.nio.FileStamp.fromZincStamp(s)) } analysis } @@ -2467,11 +2491,11 @@ object Defaults extends BuildCommon { def withAbsoluteSource(p: Position): Position = if (reportAbsolutePath) toAbsoluteSource(fc)(p) else p - mappers.foldRight({ p: Position => + mappers.foldRight({ (p: Position) => withAbsoluteSource(p) // Fallback if sourcePositionMappers is empty }) { (mapper, previousPosition) => - { p: Position => + { (p: Position) => // To each mapper we pass the position with the absolute source (only if reportAbsolutePath = true of course) mapper(withAbsoluteSource(p)).getOrElse(previousPosition(p)) } @@ -2514,10 +2538,9 @@ object Defaults extends BuildCommon { def sbtPluginExtra(m: ModuleID, sbtV: String, scalaV: String): ModuleID = m.extra( - PomExtraDependencyAttributes.SbtVersionKey -> sbtV, - PomExtraDependencyAttributes.ScalaVersionKey -> scalaV - ) - .withCrossVersion(Disabled()) + PomExtraDependencyAttributes.SbtVersionKey -> sbtV, + PomExtraDependencyAttributes.ScalaVersionKey -> scalaV + ).withCrossVersion(Disabled()) def discoverSbtPluginNames: Initialize[Task[PluginDiscovery.DiscoveredNames]] = Def.taskDyn { if (sbtPlugin.value) Def.task(PluginDiscovery.discoverSourceAll(compile.value)) @@ -2709,7 +2732,10 @@ object Classpaths { .trackedExportedJarProducts(TrackLevel.NoTracking) .value, internalDependencyAsJars := internalDependencyJarsTask.value, - dependencyClasspathAsJars := concat(internalDependencyAsJars, externalDependencyClasspath).value, + dependencyClasspathAsJars := concat( + internalDependencyAsJars, + externalDependencyClasspath + ).value, fullClasspathAsJars := concatDistinct(exportedProductJars, dependencyClasspathAsJars).value, unmanagedJars := findUnmanagedJars( configuration.value, @@ -2730,14 +2756,14 @@ object Classpaths { dependencyClasspathFiles / outputFileStamps := { val stamper = timeWrappedStamper.value val converter = fileConverter.value - dependencyClasspathFiles.value.flatMap( - p => FileStamp(stamper.library(converter.toVirtualFile(p))).map(p -> _) + dependencyClasspathFiles.value.flatMap(p => + FileStamp(stamper.library(converter.toVirtualFile(p))).map(p -> _) ) }, dependencyVirtualClasspath := { val converter = fileConverter.value val cp0 = dependencyClasspath.value - cp0 map { attr: Attributed[File] => + cp0 map { (attr: Attributed[File]) => attr map { file => converter.toVirtualFile(file.toPath) } @@ -2751,7 +2777,7 @@ object Classpaths { concat( internalDependencyPicklePath, Def.task { - externalDependencyClasspath.value map { attr: Attributed[File] => + externalDependencyClasspath.value map { (attr: Attributed[File]) => attr map { file => val converter = fileConverter.value converter.toVirtualFile(file.toPath) @@ -2870,10 +2896,11 @@ object Classpaths { assumedVersionScheme :== VersionScheme.Always, assumedVersionSchemeJava :== VersionScheme.Always, excludeDependencies :== Nil, - ivyLoggingLevel := (// This will suppress "Resolving..." logs on Jenkins and Travis. - if (insideCI.value) - UpdateLogging.Quiet - else UpdateLogging.Default), + ivyLoggingLevel := ( // This will suppress "Resolving..." logs on Jenkins and Travis. + if (insideCI.value) + UpdateLogging.Quiet + else UpdateLogging.Default + ), ivyXML :== NodeSeq.Empty, ivyValidate :== false, moduleConfigurations :== Nil, @@ -2948,7 +2975,7 @@ object Classpaths { ) match { case (Some(delegated), Seq(), _, _) => delegated case (_, rs, Some(ars), _) => ars ++ rs - case (_, rs, _, uj) => Resolver.combineDefaultResolvers(rs.toVector, uj, mavenCentral = true) + case (_, rs, _, uj) => Resolver.combineDefaultResolvers(rs.toVector, uj, mavenCentral = true) }), appResolvers := { val ac = appConfiguration.value @@ -3151,7 +3178,7 @@ object Classpaths { }, makeIvyXmlLocalConfiguration := { makeIvyXmlConfig( - false, //publishMavenStyle.value, + false, // publishMavenStyle.value, sbt.Classpaths.deliverPattern(crossTarget.value), if (isSnapshot.value) "integration" else "release", ivyConfigurations.value.map(c => ConfigRef(c.name)).toVector, @@ -3162,7 +3189,7 @@ object Classpaths { ) }, publishLocalConfiguration := publishConfig( - false, //publishMavenStyle.value, + false, // publishMavenStyle.value, deliverPattern(crossTarget.value), if (isSnapshot.value) "integration" else "release", ivyConfigurations.value.map(c => ConfigRef(c.name)).toVector, @@ -3235,15 +3262,15 @@ object Classpaths { TaskGlobal / updateClassifiers := LibraryManagement.updateClassifiersTask.value, ) ) ++ Seq( - csrProject := CoursierInputsTasks.coursierProjectTask.value, - csrConfiguration := LMCoursier.coursierConfigurationTask.value, - csrResolvers := CoursierRepositoriesTasks.coursierResolversTask.value, - csrRecursiveResolvers := CoursierRepositoriesTasks.coursierRecursiveResolversTask.value, - csrSbtResolvers := CoursierRepositoriesTasks.coursierSbtResolversTask.value, - csrInterProjectDependencies := CoursierInputsTasks.coursierInterProjectDependenciesTask.value, - csrExtraProjects := CoursierInputsTasks.coursierExtraProjectsTask.value, - csrFallbackDependencies := CoursierInputsTasks.coursierFallbackDependenciesTask.value, - ) ++ + csrProject := CoursierInputsTasks.coursierProjectTask.value, + csrConfiguration := LMCoursier.coursierConfigurationTask.value, + csrResolvers := CoursierRepositoriesTasks.coursierResolversTask.value, + csrRecursiveResolvers := CoursierRepositoriesTasks.coursierRecursiveResolversTask.value, + csrSbtResolvers := CoursierRepositoriesTasks.coursierSbtResolversTask.value, + csrInterProjectDependencies := CoursierInputsTasks.coursierInterProjectDependenciesTask.value, + csrExtraProjects := CoursierInputsTasks.coursierExtraProjectsTask.value, + csrFallbackDependencies := CoursierInputsTasks.coursierFallbackDependenciesTask.value, + ) ++ IvyXml.generateIvyXmlSettings() ++ LMCoursier.publicationsSetting(Seq(Compile, Test).map(c => c -> CConfiguration(c.name))) @@ -3268,7 +3295,9 @@ object Classpaths { val extResolvers = externalResolvers.value val isScala3M123 = ScalaArtifacts.isScala3M123(version) val allToolDeps = - if (scalaHome.value.isDefined || scalaModuleInfo.value.isEmpty || !managedScalaInstance.value) + if ( + scalaHome.value.isDefined || scalaModuleInfo.value.isEmpty || !managedScalaInstance.value + ) Nil else if (!isScala3M123 || extResolvers.contains(Resolver.JCenterRepository)) { ScalaArtifacts.toolDependencies(sbtOrg, version) ++ @@ -3488,35 +3517,37 @@ object Classpaths { ) ) ) ++ Seq( - bootIvyConfiguration := (updateSbtClassifiers / ivyConfiguration).value, - bootDependencyResolution := (updateSbtClassifiers / dependencyResolution).value, - scalaCompilerBridgeResolvers := { - val boot = bootResolvers.value - val explicit = buildStructure.value - .units(thisProjectRef.value.build) - .unit - .plugins - .pluginData - .resolvers - val ext = externalResolvers.value.toVector - // https://github.com/sbt/sbt/issues/4408 - val xs = (explicit, boot) match { - case (Some(ex), Some(b)) => (ex.toVector ++ b.toVector).distinct - case (Some(ex), None) => ex.toVector - case (None, Some(b)) => b.toVector - case _ => Vector() - } - (xs ++ ext).distinct - }, - scalaCompilerBridgeDependencyResolution := (scalaCompilerBridgeScope / dependencyResolution).value - ) + bootIvyConfiguration := (updateSbtClassifiers / ivyConfiguration).value, + bootDependencyResolution := (updateSbtClassifiers / dependencyResolution).value, + scalaCompilerBridgeResolvers := { + val boot = bootResolvers.value + val explicit = buildStructure.value + .units(thisProjectRef.value.build) + .unit + .plugins + .pluginData + .resolvers + val ext = externalResolvers.value.toVector + // https://github.com/sbt/sbt/issues/4408 + val xs = (explicit, boot) match { + case (Some(ex), Some(b)) => (ex.toVector ++ b.toVector).distinct + case (Some(ex), None) => ex.toVector + case (None, Some(b)) => b.toVector + case _ => Vector() + } + (xs ++ ext).distinct + }, + scalaCompilerBridgeDependencyResolution := (scalaCompilerBridgeScope / dependencyResolution).value + ) def classifiersModuleTask: Initialize[Task[GetClassifiersModule]] = Def.task { val classifiers = transitiveClassifiers.value val ref = thisProjectRef.value val pluginClasspath = loadedBuild.value.units(ref.build).unit.plugins.fullClasspath.toVector - val pluginJars = pluginClasspath.filter(_.data.isFile) // exclude directories: an approximation to whether they've been published + val pluginJars = pluginClasspath.filter( + _.data.isFile + ) // exclude directories: an approximation to whether they've been published val pluginIDs: Vector[ModuleID] = pluginJars.flatMap(_ get moduleID.key) GetClassifiersModule( projectID.value, @@ -3645,8 +3676,8 @@ object Classpaths { val maybeUpdateLevel = (update / logLevel).?.value val conf1 = maybeUpdateLevel.orElse(state0.get(logLevel.key)) match { case Some(Level.Debug) if conf.logging == Default => conf.withLogging(logging = Full) - case Some(_) if conf.logging == Default => conf.withLogging(logging = DownloadOnly) - case _ => conf + case Some(_) if conf.logging == Default => conf.withLogging(logging = DownloadOnly) + case _ => conf } // logical clock is folded into UpdateConfiguration @@ -3700,13 +3731,12 @@ object Classpaths { val extracted = (Project extract st) val sk = (projRef / Zero / Zero / libraryDependencies).scopedKey val empty = extracted.structure.data.set(sk.scope, sk.key, Nil) - val settings = extracted.structure.settings filter { s: Setting[_] => + val settings = extracted.structure.settings filter { (s: Setting[_]) => (s.key.key == libraryDependencies.key) && (s.key.scope.project == Select(projRef)) } - Map(settings flatMap { - case s: Setting[Seq[ModuleID]] @unchecked => - s.init.evaluate(empty) map { _ -> s.pos } + Map(settings flatMap { case s: Setting[Seq[ModuleID]] @unchecked => + s.init.evaluate(empty) map { _ -> s.pos } }: _*) } catch { case NonFatal(_) => Map() @@ -3718,24 +3748,27 @@ object Classpaths { implicit val NoPositionFormat: JsonFormat[NoPosition.type] = asSingleton(NoPosition) implicit val LinePositionFormat: IsoLList.Aux[LinePosition, String :*: Int :*: LNil] = LList.iso( - { l: LinePosition => + { (l: LinePosition) => ("path", l.path) :*: ("startLine", l.startLine) :*: LNil - }, { in: String :*: Int :*: LNil => + }, + { (in: String :*: Int :*: LNil) => LinePosition(in.head, in.tail.head) } ) implicit val LineRangeFormat: IsoLList.Aux[LineRange, Int :*: Int :*: LNil] = LList.iso( - { l: LineRange => + { (l: LineRange) => ("start", l.start) :*: ("end", l.end) :*: LNil - }, { in: Int :*: Int :*: LNil => + }, + { (in: Int :*: Int :*: LNil) => LineRange(in.head, in.tail.head) } ) implicit val RangePositionFormat : IsoLList.Aux[RangePosition, String :*: LineRange :*: LNil] = LList.iso( - { r: RangePosition => + { (r: RangePosition) => ("path", r.path) :*: ("range", r.range) :*: LNil - }, { in: String :*: LineRange :*: LNil => + }, + { (in: String :*: LineRange :*: LNil) => RangePosition(in.head, in.tail.head) } ) @@ -3891,7 +3924,7 @@ object Classpaths { earlyOutput.value :: Nil } else { val c = fileConverter.value - products.value map { x: File => + products.value map { (x: File) => c.toVirtualFile(x.toPath) } } @@ -3938,8 +3971,8 @@ object Classpaths { data: Settings[Scope], deps: BuildDependencies ): Seq[(ProjectRef, ConfigRef)] = - interSort(projectRef, conf, data, deps).map { - case (projectRef, configName) => (projectRef, ConfigRef(configName)) + interSort(projectRef, conf, data, deps).map { case (projectRef, configName) => + (projectRef, ConfigRef(configName)) } def mapped( @@ -4026,7 +4059,8 @@ object Classpaths { def unmanagedScalaLibrary: Initialize[Task[Seq[File]]] = Def.taskDyn { if (autoScalaLibrary.value && scalaHome.value.isDefined) - Def.task { scalaInstance.value.libraryJars } else + Def.task { scalaInstance.value.libraryJars } + else Def.task { Nil } } @@ -4034,14 +4068,13 @@ object Classpaths { def managedJars(config: Configuration, jarTypes: Set[String], up: UpdateReport): Classpath = up.filter(configurationFilter(config.name) && artifactFilter(`type` = jarTypes)) .toSeq - .map { - case (_, module, art, file) => - Attributed(file)( - AttributeMap.empty - .put(artifact.key, art) - .put(moduleID.key, module) - .put(configuration.key, config) - ) + .map { case (_, module, art, file) => + Attributed(file)( + AttributeMap.empty + .put(artifact.key, art) + .put(moduleID.key, module) + .put(configuration.key, config) + ) } .distinct @@ -4066,7 +4099,8 @@ object Classpaths { isDotty: Boolean ): Seq[String] = { import sbt.internal.inc.classpath.ClasspathUtil.compilerPlugins - val pluginClasspath = report.matching(configurationFilter(CompilerPlugin.name)) ++ internalPluginClasspath + val pluginClasspath = + report.matching(configurationFilter(CompilerPlugin.name)) ++ internalPluginClasspath val plugins = compilerPlugins(pluginClasspath.map(_.toPath), isDotty) plugins.map("-Xplugin:" + _.toAbsolutePath.toString).toSeq } @@ -4110,8 +4144,7 @@ object Classpaths { .filter(_.getName == jarName) .map(f => (Artifact(f.getName.stripSuffix(".jar")), f)) if (replaceWith.isEmpty) arts else replaceWith - } else - arts + } else arts } // try/catch for supporting earlier launchers @@ -4224,7 +4257,7 @@ object Classpaths { } def shellPromptFromState: State => String = shellPromptFromState(ITerminal.console.isColorEnabled) - def shellPromptFromState(isColorEnabled: Boolean): State => String = { s: State => + def shellPromptFromState(isColorEnabled: Boolean): State => String = { (s: State) => val extracted = Project.extract(s) (extracted.currentRef / name).get(extracted.structure.data) match { case Some(name) => @@ -4357,19 +4390,17 @@ trait BuildExtra extends BuildCommon with DefExtra { streams.value ) } - ivyConfiguration := ((uri zipWith other) { - case (u, otherTask) => - otherTask map { - case (base, app, pr, uo, s) => - val extraResolvers = if (addMultiResolver) Vector(pr) else Vector.empty - ExternalIvyConfiguration() - .withLock(lock(app)) - .withBaseDirectory(base) - .withLog(s.log) - .withUpdateOptions(uo) - .withUri(u) - .withExtraResolvers(extraResolvers) - } + ivyConfiguration := ((uri zipWith other) { case (u, otherTask) => + otherTask map { case (base, app, pr, uo, s) => + val extraResolvers = if (addMultiResolver) Vector(pr) else Vector.empty + ExternalIvyConfiguration() + .withLock(lock(app)) + .withBaseDirectory(base) + .withLog(s.log) + .withUpdateOptions(uo) + .withUri(u) + .withExtraResolvers(extraResolvers) + } }).value } @@ -4456,9 +4487,8 @@ trait BuildExtra extends BuildCommon with DefExtra { ClassLoaders.runner mapReferenced Project.mapScope(s => s.in(config)) ).zipWith(Def.task { ((config / fullClasspath).value, streams.value, result.value) }) { (rTask, t) => - (t, rTask) map { - case ((cp, s, args), r) => - r.run(mainClass, data(cp), baseArguments ++ args, s.log).get + (t, rTask) map { case ((cp, s, args), r) => + r.run(mainClass, data(cp), baseArguments ++ args, s.log).get } } }.evaluated @@ -4478,14 +4508,11 @@ trait BuildExtra extends BuildCommon with DefExtra { scoped := initScoped( scoped.scopedKey, ClassLoaders.runner mapReferenced Project.mapScope(s => s.in(config)) - ).zipWith(Def.task { ((config / fullClasspath).value, streams.value) }) { - case (rTask, t) => - (t, rTask) map { - case ((cp, s), r) => - r.run(mainClass, data(cp), arguments, s.log).get - } + ).zipWith(Def.task { ((config / fullClasspath).value, streams.value) }) { case (rTask, t) => + (t, rTask) map { case ((cp, s), r) => + r.run(mainClass, data(cp), arguments, s.log).get } - .value + }.value ) ++ inTask(scoped)((config / forkOptions) := forkOptionsTask.value) def initScoped[T](sk: ScopedKey[_], i: Initialize[T]): Initialize[T] = @@ -4530,7 +4557,7 @@ trait BuildCommon { } final class RichAttributed private[sbt] (s: Seq[Attributed[File]]) { - /** Extracts the plain `Seq[File]` from a Classpath (which is a `Seq[Attributed[File]]`).*/ + /** Extracts the plain `Seq[File]` from a Classpath (which is a `Seq[Attributed[File]]`). */ def files: Seq[File] = Attributed.data(s) } final class RichFiles private[sbt] (s: Seq[File]) { @@ -4557,8 +4584,8 @@ trait BuildCommon { def getFromContext[T](task: TaskKey[T], context: ScopedKey[_], s: State): Option[T] = SessionVar.get(SessionVar.resolveContext(task.scopedKey, context.scope, s), s) - def loadFromContext[T](task: TaskKey[T], context: ScopedKey[_], s: State)( - implicit f: JsonFormat[T] + def loadFromContext[T](task: TaskKey[T], context: ScopedKey[_], s: State)(implicit + f: JsonFormat[T] ): Option[T] = SessionVar.load(SessionVar.resolveContext(task.scopedKey, context.scope, s), s) diff --git a/main/src/main/scala/sbt/EvaluateTask.scala b/main/src/main/scala/sbt/EvaluateTask.scala index 0807bd3c3..d15d10702 100644 --- a/main/src/main/scala/sbt/EvaluateTask.scala +++ b/main/src/main/scala/sbt/EvaluateTask.scala @@ -269,13 +269,13 @@ object EvaluateTask { val progress = tp.progress override def initial(): Unit = progress.initial() override def afterRegistered( - task: Task[_], - allDeps: Iterable[Task[_]], - pendingDeps: Iterable[Task[_]] + task: Task[Any], + allDeps: Iterable[Task[Any]], + pendingDeps: Iterable[Task[Any]] ): Unit = progress.afterRegistered(task, allDeps, pendingDeps) - override def afterReady(task: Task[_]): Unit = progress.afterReady(task) - override def beforeWork(task: Task[_]): Unit = progress.beforeWork(task) + override def afterReady(task: Task[Any]): Unit = progress.afterReady(task) + override def beforeWork(task: Task[Any]): Unit = progress.beforeWork(task) override def afterWork[A](task: Task[A], result: Either[Task[A], Result[A]]): Unit = progress.afterWork(task, result) override def afterCompleted[A](task: Task[A], result: Result[A]): Unit = @@ -379,7 +379,7 @@ object EvaluateTask { ): Option[(State, Result[T])] = { withStreams(structure, state) { str => for ((task, toNode) <- getTask(structure, taskKey, state, str, ref)) - yield runTask(task, state, str, structure.index.triggers, config)(toNode) + yield runTask(task, state, str, structure.index.triggers, config)(using toNode) } } @@ -442,7 +442,7 @@ object EvaluateTask { for (t <- structure.data.get(resolvedScope, taskKey.key)) yield (t, nodeView(state, streams, taskKey :: Nil)) } - def nodeView[HL <: HList]( + def nodeView( state: State, streams: Streams, roots: Seq[ScopedKey[_]], @@ -470,7 +470,7 @@ object EvaluateTask { streams: Streams, triggers: Triggers[Task], config: EvaluateTaskConfig - )(implicit taskToNode: NodeView[Task]): (State, Result[T]) = { + )(using taskToNode: NodeView[Task]): (State, Result[T]) = { import ConcurrentRestrictions.{ cancellableCompletionService, tagged, tagsKey } val log = state.log @@ -480,9 +480,9 @@ object EvaluateTask { def tagMap(t: Task[_]): Tags.TagMap = t.info.get(tagsKey).getOrElse(Map.empty) val tags = - tagged[Task[_]](tagMap, Tags.predicate(config.restrictions)) + tagged[Task[Any]](tagMap, Tags.predicate(config.restrictions)) val (service, shutdownThreads) = - cancellableCompletionService[Task[_], Completed]( + cancellableCompletionService[Task[Any], Completed]( tags, (s: String) => log.warn(s), (t: Task[_]) => tagMap(t).contains(Tags.Sentinel) @@ -509,14 +509,16 @@ object EvaluateTask { Execute.config(config.checkCycles, overwriteNode), triggers, config.progressReporter - )(taskToNode) + ) val (newState, result) = try { - val results = x.runKeep(root)(service) + given strategy: x.Strategy = service + val results = x.runKeep(root) storeValuesForPrevious(results, state, streams) applyResults(results, state, root) - } catch { case inc: Incomplete => (state, Inc(inc)) } - finally shutdown() + } catch { + case inc: Incomplete => (state, Result.Inc(inc)) + } finally shutdown() val replaced = transformInc(result) logIncResult(replaced, state, streams) (newState, replaced) @@ -560,9 +562,9 @@ object EvaluateTask { def stateTransform(results: RMap[Task, Result]): State => State = Function.chain( results.toTypedSeq flatMap { - case results.TPair(_, Value(KeyValue(_, st: StateTransform))) => Some(st.transform) - case results.TPair(Task(info, _), Value(v)) => info.post(v) get transformState - case _ => Nil + case results.TPair(_, Result.Value(KeyValue(_, st: StateTransform))) => Some(st.transform) + case results.TPair(Task(info, _), Result.Value(v)) => info.post(v) get transformState + case _ => Nil } ) diff --git a/main/src/main/scala/sbt/Extracted.scala b/main/src/main/scala/sbt/Extracted.scala index 88bb3b9e7..f0ae0904a 100644 --- a/main/src/main/scala/sbt/Extracted.scala +++ b/main/src/main/scala/sbt/Extracted.scala @@ -89,7 +89,7 @@ final case class Extracted( EvaluateTask.withStreams(structure, state) { str => val nv = EvaluateTask.nodeView(state, str, rkey.scopedKey :: Nil) val (newS, result) = - EvaluateTask.runTask(task, state, str, structure.index.triggers, config)(nv) + EvaluateTask.runTask(task, state, str, structure.index.triggers, config)(using nv) (newS, EvaluateTask.processResult2(result)) } } @@ -100,7 +100,7 @@ final case class Extracted( * The project axis is what determines where aggregation starts, so ensure this is set to what you want. * Other axes are resolved to `Zero` if unspecified. */ - def runAggregated[T](key: TaskKey[T], state: State): State = { + def runAggregated[A1](key: TaskKey[A1], state: State): State = val rkey = resolve(key) val keys = Aggregation.aggregate(rkey, ScopeMask(), structure.extra) val tasks = Act.keyValues(structure)(keys) @@ -109,8 +109,7 @@ final case class Extracted( tasks, DummyTaskMap(Nil), show = Aggregation.defaultShow(state, false), - )(showKey) - } + ) @nowarn private[this] def resolve[K <: Scoped.ScopingSetting[K] with Scoped](key: K): K = diff --git a/main/src/main/scala/sbt/Main.scala b/main/src/main/scala/sbt/Main.scala index c2cbb74f9..865990a2b 100644 --- a/main/src/main/scala/sbt/Main.scala +++ b/main/src/main/scala/sbt/Main.scala @@ -338,7 +338,7 @@ object BuiltinCommands { eval, last, lastGrep, - export, + exportCommand, boot, initialize, act, @@ -586,29 +586,28 @@ object BuiltinCommands { Project.setProject(newSession, newStructure, s) } - def set: Command = Command(SetCommand, setBrief, setDetailed)(setParser) { - case (s, (all, arg)) => - val extracted = Project extract s - import extracted._ - val dslVals = extracted.currentUnit.unit.definitions.dslDefinitions - // TODO - This is possibly inefficient (or stupid). We should try to only attach the - // classloader + imports NEEDED to compile the set command, rather than - // just ALL of them. - val ims = (imports(extracted) ++ dslVals.imports.map(i => (i, -1))) - val cl = dslVals.classloader(currentLoader) - val settings = EvaluateConfigurations.evaluateSetting( - session.currentEval(), - "", - ims, - arg, - LineRange(0, 0) - )(cl) - val setResult = - if (all) SettingCompletions.setAll(extracted, settings) - else SettingCompletions.setThis(extracted, settings, arg) - s.log.info(setResult.quietSummary) - s.log.debug(setResult.verboseSummary) - reapply(setResult.session, structure, s) + def set: Command = Command(SetCommand, setBrief, setDetailed)(setParser) { case (s, (all, arg)) => + val extracted = Project extract s + import extracted._ + val dslVals = extracted.currentUnit.unit.definitions.dslDefinitions + // TODO - This is possibly inefficient (or stupid). We should try to only attach the + // classloader + imports NEEDED to compile the set command, rather than + // just ALL of them. + val ims = (imports(extracted) ++ dslVals.imports.map(i => (i, -1))) + val cl = dslVals.classloader(currentLoader) + val settings = EvaluateConfigurations.evaluateSetting( + session.currentEval(), + "", + ims, + arg, + LineRange(0, 0) + )(cl) + val setResult = + if (all) SettingCompletions.setAll(extracted, settings) + else SettingCompletions.setThis(extracted, settings, arg) + s.log.info(setResult.quietSummary) + s.log.debug(setResult.verboseSummary) + reapply(setResult.session, structure, s) } @deprecated("Use variant that doesn't take a State", "1.1.1") @@ -693,18 +692,20 @@ object BuiltinCommands { for { lastOnly_keys <- keysParser kvs = Act.keyValues(structure)(lastOnly_keys._2) - f <- if (lastOnly_keys._1) success(() => s) - else Aggregation.evaluatingParser(s, show)(kvs) + f <- + if (lastOnly_keys._1) success(() => s) + else Aggregation.evaluatingParser(s, show)(kvs) } yield () => { def export0(s: State): State = lastImpl(s, kvs, Some(ExportStream)) - val newS = try f() - catch { - case NonFatal(e) => - try export0(s) - finally { - throw e - } - } + val newS = + try f() + catch { + case NonFatal(e) => + try export0(s) + finally { + throw e + } + } export0(newS) } } @@ -722,7 +723,7 @@ object BuiltinCommands { keepLastLog(s) } - def export: Command = + def exportCommand: Command = Command(ExportCommand, exportBrief, exportDetailed)(exportParser)((_, f) => f()) private[this] def lastImpl(s: State, sks: AnyKeys, sid: Option[String]): State = { @@ -808,8 +809,8 @@ object BuiltinCommands { } def projects: Command = - Command(ProjectsCommand, (ProjectsCommand, projectsBrief), projectsDetailed)( - s => projectsParser(s).? + Command(ProjectsCommand, (ProjectsCommand, projectsBrief), projectsDetailed)(s => + projectsParser(s).? ) { case (s, Some(modifyBuilds)) => transformExtraBuilds(s, modifyBuilds) case (s, None) => showProjects(s); s @@ -863,10 +864,13 @@ object BuiltinCommands { @tailrec private[this] def doLoadFailed(s: State, loadArg: String): State = { s.log.warn("Project loading failed: (r)etry, (q)uit, (l)ast, or (i)gnore? (default: r)") - val result = try ITerminal.get.withRawInput(System.in.read) match { - case -1 => 'q'.toInt - case b => b - } catch { case _: ClosedChannelException => 'q' } + val result = + try + ITerminal.get.withRawInput(System.in.read) match { + case -1 => 'q'.toInt + case b => b + } + catch { case _: ClosedChannelException => 'q' } def retry: State = loadProjectCommand(LoadProject, loadArg) :: s.clearGlobalLog def ignoreMsg: String = if (Project.isProjectLoaded(s)) "using previously loaded project" else "no project loaded" @@ -890,8 +894,8 @@ object BuiltinCommands { Nil def loadProject: Command = - Command(LoadProject, LoadProjectBrief, LoadProjectDetailed)(loadProjectParser)( - (s, arg) => loadProjectCommands(arg) ::: s + Command(LoadProject, LoadProjectBrief, LoadProjectDetailed)(loadProjectParser)((s, arg) => + loadProjectCommands(arg) ::: s ) private[this] def loadProjectParser: State => Parser[String] = @@ -1002,13 +1006,14 @@ object BuiltinCommands { def clearCaches: Command = { val help = Help.more(ClearCaches, ClearCachesDetailed) - val f: State => State = registerCompilerCache _ andThen (_.initializeClassLoaderCache) andThen addCacheStoreFactoryFactory + val f: State => State = + registerCompilerCache _ andThen (_.initializeClassLoaderCache) andThen addCacheStoreFactoryFactory Command.command(ClearCaches, help)(f) } private[sbt] def waitCmd: Command = - Command.arb( - _ => ContinuousCommands.waitWatch.examples() ~> " ".examples() ~> matched(any.*).examples() + Command.arb(_ => + ContinuousCommands.waitWatch.examples() ~> " ".examples() ~> matched(any.*).examples() ) { (s0, channel) => val exchange = StandardMain.exchange exchange.channelForName(channel) match { @@ -1118,8 +1123,7 @@ object BuiltinCommands { val line = s"sbt.version=$sbtVersion" IO.writeLines(buildProps, line :: buildPropsLines) state.log info s"Updated file $buildProps: set sbt.version to $sbtVersion" - } else - state.log warn warnMsg + } else state.log warn warnMsg } catch { case _: IOException => state.log warn warnMsg } diff --git a/main/src/main/scala/sbt/Project.scala b/main/src/main/scala/sbt/Project.scala index 4e8f73e84..54dc316ff 100755 --- a/main/src/main/scala/sbt/Project.scala +++ b/main/src/main/scala/sbt/Project.scala @@ -59,76 +59,6 @@ import language.experimental.macros import scala.concurrent.TimeoutException import scala.concurrent.duration.FiniteDuration -sealed trait ProjectDefinition[PR <: ProjectReference] { - - /** - * The project ID is used to uniquely identify a project within a build. - * It is used to refer to a project from the command line and in the scope of keys. - */ - def id: String - - /** The base directory for the project. */ - def base: File - - /** - * The configurations for this project. These are groups of related tasks and the main reason - * to list them here is when one configuration extends another. In this case, a setting lookup - * in one configuration will fall back to the configurations it extends configuration if the setting doesn't exist. - */ - def configurations: Seq[Configuration] - - /** - * The explicitly defined sequence of settings that configure this project. - * These do not include the automatically appended settings as configured by `auto`. - */ - def settings: Seq[Setting[_]] - - /** - * The references to projects that are aggregated by this project. - * When a task is run on this project, it will also be run on aggregated projects. - */ - def aggregate: Seq[PR] - - /** The references to projects that are classpath dependencies of this project. */ - def dependencies: Seq[ClasspathDep[PR]] - - /** The references to projects that are aggregate and classpath dependencies of this project. */ - def uses: Seq[PR] = aggregate ++ dependencies.map(_.project) - def referenced: Seq[PR] = uses - - /** - * The defined [[Plugins]] associated with this project. - * A [[AutoPlugin]] is a common label that is used by plugins to determine what settings, if any, to add to a project. - */ - def plugins: Plugins - - /** Indicates whether the project was created organically, or was generated synthetically. */ - def projectOrigin: ProjectOrigin - - /** The [[AutoPlugin]]s enabled for this project. This value is only available on a loaded Project. */ - private[sbt] def autoPlugins: Seq[AutoPlugin] - - override final def hashCode: Int = id.hashCode ^ base.hashCode ^ getClass.hashCode - - override final def equals(o: Any) = o match { - case p: ProjectDefinition[_] => p.getClass == this.getClass && p.id == id && p.base == base - case _ => false - } - - override def toString = { - val agg = ifNonEmpty("aggregate", aggregate) - val dep = ifNonEmpty("dependencies", dependencies) - val conf = ifNonEmpty("configurations", configurations) - val autos = ifNonEmpty("autoPlugins", autoPlugins.map(_.label)) - val fields = - s"id $id" :: s"base: $base" :: agg ::: dep ::: conf ::: (s"plugins: List($plugins)" :: autos) - s"Project(${fields.mkString(", ")})" - } - - private[this] def ifNonEmpty[T](label: String, ts: Iterable[T]): List[String] = - if (ts.isEmpty) Nil else s"$label: $ts" :: Nil -} - trait CompositeProject { def componentProjects: Seq[Project] } @@ -605,12 +535,15 @@ object Project extends ProjectExtra { def fillTaskAxis(scoped: ScopedKey[_]): ScopedKey[_] = ScopedKey(Scope.fillTaskAxis(scoped.scope, scoped.key), scoped.key) - def mapScope(f: Scope => Scope) = λ[ScopedKey ~> ScopedKey](k => ScopedKey(f(k.scope), k.key)) + def mapScope(f: Scope => Scope): [a] => ScopedKey[a] => ScopedKey[a] = + [a] => (k: ScopedKey[a]) => ScopedKey(f(k.scope), k.key) - def transform(g: Scope => Scope, ss: Seq[Def.Setting[_]]): Seq[Def.Setting[_]] = { + def transform(g: Scope => Scope, ss: Seq[Def.Setting[_]]): Seq[Def.Setting[_]] = val f = mapScope(g) - ss.map(_ mapKey f mapReferenced f) - } + ss.map { setting => + setting.mapKey(f).mapReferenced(f) + } + def transformRef(g: Scope => Scope, ss: Seq[Def.Setting[_]]): Seq[Def.Setting[_]] = { val f = mapScope(g) ss.map(_ mapReferenced f) diff --git a/main/src/main/scala/sbt/ScopeFilter.scala b/main/src/main/scala/sbt/ScopeFilter.scala index c262f318f..a53908cd5 100644 --- a/main/src/main/scala/sbt/ScopeFilter.scala +++ b/main/src/main/scala/sbt/ScopeFilter.scala @@ -188,7 +188,7 @@ object ScopeFilter { * Information provided to Scope filters. These provide project relationships, * project reference resolution, and the list of all static Scopes. */ - private final class Data( + private[sbt] final class Data( val units: Map[URI, LoadedBuildUnit], val resolve: ProjectReference => ProjectRef, val allScopes: Set[Scope] diff --git a/main/src/main/scala/sbt/ScriptedRun.scala b/main/src/main/scala/sbt/ScriptedRun.scala index ea7d7054c..32313c4e7 100644 --- a/main/src/main/scala/sbt/ScriptedRun.scala +++ b/main/src/main/scala/sbt/ScriptedRun.scala @@ -62,10 +62,11 @@ object ScriptedRun { val clazz = scriptedTests.getClass if (batchExecution) - try new RunInParallelV2( - scriptedTests, - clazz.getMethod("runInParallel", fCls, bCls, asCls, fCls, sCls, asCls, lfCls, iCls) - ) + try + new RunInParallelV2( + scriptedTests, + clazz.getMethod("runInParallel", fCls, bCls, asCls, fCls, sCls, asCls, lfCls, iCls) + ) catch { case _: NoSuchMethodException => new RunInParallelV1( @@ -74,10 +75,11 @@ object ScriptedRun { ) } else - try new RunV2( - scriptedTests, - clazz.getMethod("run", fCls, bCls, asCls, fCls, sCls, asCls, lfCls) - ) + try + new RunV2( + scriptedTests, + clazz.getMethod("run", fCls, bCls, asCls, fCls, sCls, asCls, lfCls) + ) catch { case _: NoSuchMethodException => new RunV1(scriptedTests, clazz.getMethod("run", fCls, bCls, asCls, fCls, asCls, lfCls)) diff --git a/main/src/main/scala/sbt/SessionVar.scala b/main/src/main/scala/sbt/SessionVar.scala index bb27532a3..c13338bbb 100644 --- a/main/src/main/scala/sbt/SessionVar.scala +++ b/main/src/main/scala/sbt/SessionVar.scala @@ -65,7 +65,7 @@ object SessionVar { def read[T](key: ScopedKey[Task[T]], state: State)(implicit f: JsonFormat[T]): Option[T] = Project.structure(state).streams(state).use(key) { s => try { - Some(s.getInput(key, DefaultDataID).read[T]) + Some(s.getInput(key, DefaultDataID).read[T]()) } catch { case NonFatal(_) => None } } diff --git a/main/src/main/scala/sbt/TemplateCommandUtil.scala b/main/src/main/scala/sbt/TemplateCommandUtil.scala index 6c8831bbe..26e5b04cb 100644 --- a/main/src/main/scala/sbt/TemplateCommandUtil.scala +++ b/main/src/main/scala/sbt/TemplateCommandUtil.scala @@ -131,7 +131,7 @@ private[sbt] object TemplateCommandUtil { val templatesBaseDirectory = new File(globalBase, "templates") val templateId = s"${info.module.organization}_${info.module.name}_${info.module.revision}" val templateDirectory = new File(templatesBaseDirectory, templateId) - def jars = (templateDirectory ** -DirectoryFilter).get + def jars = (templateDirectory ** -DirectoryFilter).get() if (!(info.module.revision endsWith "-SNAPSHOT") && jars.nonEmpty) jars.toList.map(_.toPath) else { IO.createDirectory(templateDirectory) diff --git a/main/src/main/scala/sbt/coursierint/CoursierArtifactsTasks.scala b/main/src/main/scala/sbt/coursierint/CoursierArtifactsTasks.scala index 84f6776fb..ac04e4158 100644 --- a/main/src/main/scala/sbt/coursierint/CoursierArtifactsTasks.scala +++ b/main/src/main/scala/sbt/coursierint/CoursierArtifactsTasks.scala @@ -106,9 +106,8 @@ object CoursierArtifactsTasks { ) } - val sbtArtifactsPublication = sbtArtifacts.collect { - case Some((config, artifact)) => - config -> artifactPublication(artifact) + val sbtArtifactsPublication = sbtArtifacts.collect { case Some((config, artifact)) => + config -> artifactPublication(artifact) } val stdArtifactsSet = sbtArtifacts.flatMap(_.map { case (_, a) => a }.toSeq).toSet diff --git a/main/src/main/scala/sbt/coursierint/LMCoursier.scala b/main/src/main/scala/sbt/coursierint/LMCoursier.scala index dbd6513d4..6063cec70 100644 --- a/main/src/main/scala/sbt/coursierint/LMCoursier.scala +++ b/main/src/main/scala/sbt/coursierint/LMCoursier.scala @@ -19,9 +19,10 @@ import lmcoursier.definitions.{ Reconciliation, Strict => CStrict, } -import lmcoursier._ -import lmcoursier.syntax._ +import lmcoursier.* +import lmcoursier.syntax.* import lmcoursier.credentials.Credentials +import lmcoursier.syntax.* import Keys._ import sbt.internal.util.Util import sbt.librarymanagement._ diff --git a/main/src/main/scala/sbt/internal/Act.scala b/main/src/main/scala/sbt/internal/Act.scala index 50bdde82d..3ac9f256f 100644 --- a/main/src/main/scala/sbt/internal/Act.scala +++ b/main/src/main/scala/sbt/internal/Act.scala @@ -20,18 +20,18 @@ import sbt.internal.util.{ AttributeEntry, AttributeKey, AttributeMap, IMap, Set import sbt.util.Show import scala.collection.mutable -final class ParsedKey(val key: ScopedKey[_], val mask: ScopeMask, val separaters: Seq[String]) { - def this(key: ScopedKey[_], mask: ScopeMask) = this(key, mask, Nil) +final class ParsedKey[+A](val key: ScopedKey[A], val mask: ScopeMask, val separaters: Seq[String]): + def this(key: ScopedKey[A], mask: ScopeMask) = this(key, mask, Nil) override def equals(o: Any): Boolean = this.eq(o.asInstanceOf[AnyRef]) || (o match { - case x: ParsedKey => (this.key == x.key) && (this.mask == x.mask) - case _ => false + case x: ParsedKey[_] => (this.key == x.key) && (this.mask == x.mask) + case _ => false }) override def hashCode: Int = { 37 * (37 * (37 * (17 + "sbt.internal.ParsedKey".##) + this.key.##)) + this.mask.## } -} +end ParsedKey object Act { val ZeroString = "*" @@ -54,7 +54,7 @@ object Act { defaultConfigs: Option[ResolvedReference] => Seq[String], keyMap: Map[String, AttributeKey[_]], data: Settings[Scope] - ): Parser[ScopedKey[_]] = + ): Parser[ScopedKey[Any]] = scopedKeySelected(index, current, defaultConfigs, keyMap, data).map(_.key) // the index should be an aggregated index for proper tab completion @@ -63,13 +63,15 @@ object Act { defaultConfigs: Option[ResolvedReference] => Seq[String], structure: BuildStructure ): KeysParser = - for (selected <- scopedKeySelected( - structure.index.aggregateKeyIndex, - current, - defaultConfigs, - structure.index.keyMap, - structure.data - )) + for ( + selected <- scopedKeySelected( + structure.index.aggregateKeyIndex, + current, + defaultConfigs, + structure.index.keyMap, + structure.data + ) + ) yield Aggregation.aggregate(selected.key, selected.mask, structure.extra) def scopedKeyAggregatedSep( @@ -77,16 +79,16 @@ object Act { defaultConfigs: Option[ResolvedReference] => Seq[String], structure: BuildStructure ): KeysParserSep = - for (selected <- scopedKeySelected( - structure.index.aggregateKeyIndex, - current, - defaultConfigs, - structure.index.keyMap, - structure.data - )) - yield Aggregation - .aggregate(selected.key, selected.mask, structure.extra) - .map(k => k -> selected.separaters) + for selected <- scopedKeySelected( + structure.index.aggregateKeyIndex, + current, + defaultConfigs, + structure.index.keyMap, + structure.data + ) + yield Aggregation + .aggregate(selected.key, selected.mask, structure.extra) + .map(k => k -> selected.separaters) def scopedKeySelected( index: KeyIndex, @@ -94,7 +96,7 @@ object Act { defaultConfigs: Option[ResolvedReference] => Seq[String], keyMap: Map[String, AttributeKey[_]], data: Settings[Scope] - ): Parser[ParsedKey] = + ): Parser[ParsedKey[Any]] = scopedKeyFull(index, current, defaultConfigs, keyMap) flatMap { choices => select(choices, data)(showRelativeKey2(current)) } @@ -104,7 +106,7 @@ object Act { current: ProjectRef, defaultConfigs: Option[ResolvedReference] => Seq[String], keyMap: Map[String, AttributeKey[_]] - ): Parser[Seq[Parser[ParsedKey]]] = { + ): Parser[Seq[Parser[ParsedKey[Any]]]] = { val confParserCache : mutable.Map[Option[sbt.ResolvedReference], Parser[(ParsedAxis[String], Seq[String])]] = mutable.Map.empty @@ -149,7 +151,7 @@ object Act { confAmb: ParsedAxis[String], baseMask: ScopeMask, baseSeps: Seq[String] - ): Seq[Parser[ParsedKey]] = + ): Seq[Parser[ParsedKey[Any]]] = for { conf <- configs(confAmb, defaultConfigs, proj, index) } yield for { @@ -161,7 +163,7 @@ object Act { } yield { val mask = baseMask.copy(task = taskAmb.isExplicit, extra = true) val seps = baseSeps ++ taskSeps - new ParsedKey(makeScopedKey(proj, conf, task, extra, key), mask, seps) + ParsedKey(makeScopedKey(proj, conf, task, extra, key), mask, seps) } def makeScopedKey( @@ -176,9 +178,9 @@ object Act { key ) - def select(allKeys: Seq[Parser[ParsedKey]], data: Settings[Scope])( - implicit show: Show[ScopedKey[_]] - ): Parser[ParsedKey] = + def select(allKeys: Seq[Parser[ParsedKey[_]]], data: Settings[Scope])(implicit + show: Show[ScopedKey[_]] + ): Parser[ParsedKey[Any]] = seq(allKeys) flatMap { ss => val default = ss.headOption match { case None => noValidKeys @@ -186,16 +188,16 @@ object Act { } selectFromValid(ss filter isValid(data), default) } - def selectFromValid(ss: Seq[ParsedKey], default: Parser[ParsedKey])( - implicit show: Show[ScopedKey[_]] - ): Parser[ParsedKey] = + def selectFromValid(ss: Seq[ParsedKey[_]], default: Parser[ParsedKey[_]])(implicit + show: Show[ScopedKey[_]] + ): Parser[ParsedKey[Any]] = selectByTask(selectByConfig(ss)) match { case Seq() => default case Seq(single) => success(single) case multi => failure("Ambiguous keys: " + showAmbiguous(keys(multi))) } - private[this] def keys(ss: Seq[ParsedKey]): Seq[ScopedKey[_]] = ss.map(_.key) - def selectByConfig(ss: Seq[ParsedKey]): Seq[ParsedKey] = + private[this] def keys(ss: Seq[ParsedKey[_]]): Seq[ScopedKey[_]] = ss.map(_.key) + def selectByConfig(ss: Seq[ParsedKey[_]]): Seq[ParsedKey[Any]] = ss match { case Seq() => Nil case Seq(x, tail @ _*) => // select the first configuration containing a valid key @@ -204,7 +206,7 @@ object Act { case xs => x +: xs } } - def selectByTask(ss: Seq[ParsedKey]): Seq[ParsedKey] = { + def selectByTask(ss: Seq[ParsedKey[_]]): Seq[ParsedKey[Any]] = { val (selects, zeros) = ss.partition(_.key.scope.task.isSelect) if (zeros.nonEmpty) zeros else selects } @@ -214,7 +216,7 @@ object Act { def showAmbiguous(keys: Seq[ScopedKey[_]])(implicit show: Show[ScopedKey[_]]): String = keys.take(3).map(x => show.show(x)).mkString("", ", ", if (keys.size > 3) ", ..." else "") - def isValid(data: Settings[Scope])(parsed: ParsedKey): Boolean = { + def isValid(data: Settings[Scope])(parsed: ParsedKey[_]): Boolean = { val key = parsed.key data.definingScope(key.scope, key.key) == Some(key.scope) } @@ -235,7 +237,9 @@ object Act { def config(confs: Set[String]): Parser[ParsedAxis[String]] = { val sep = ':' !!! "Expected ':' (if selecting a configuration)" - token((ZeroString ^^^ ParsedZero | value(examples(ID, confs, "configuration"))) <~ sep) ?? Omitted + token( + (ZeroString ^^^ ParsedZero | value(examples(ID, confs, "configuration"))) <~ sep + ) ?? Omitted } // New configuration parser that's able to parse configuration ident trailed by slash. @@ -330,7 +334,8 @@ object Act { knownValues: IMap[AttributeKey, Set] ): Parser[ScopeAxis[AttributeMap]] = { val extrasP = extrasParser(knownKeys, knownValues) - val extras = token('(', hide = _ == 1 && knownValues.isEmpty) ~> extrasP <~ token(')') + val extras = + token('(', hide = (x: Int) => x == 1 && knownValues.isEmpty) ~> extrasP <~ token(')') optionalAxis(extras, Zero) } @@ -383,12 +388,11 @@ object Act { knownValues: IMap[AttributeKey, Set] ): Parser[AttributeEntry[_]] = { val keyp = knownIDParser(knownKeys, "Not a valid extra key") <~ token(':' ~ OptSpace) - keyp flatMap { - case key: AttributeKey[t] => - val valueMap: Map[String, t] = knownValues(key).map(v => (v.toString, v)).toMap - knownIDParser(valueMap, "extra value") map { value => - AttributeEntry(key, value) - } + keyp flatMap { case key: AttributeKey[t] => + val valueMap: Map[String, t] = knownValues(key).map(v => (v.toString, v)).toMap + knownIDParser(valueMap, "extra value") map { value => + AttributeEntry(key, value) + } } } def knownIDParser[T](knownKeys: Map[String, T], label: String): Parser[T] = @@ -416,7 +420,11 @@ object Act { ): Parser[ResolvedReference] = { def projectID(uri: URI) = token( - DQuoteChar ~> examplesStrict(ID, index projects uri, "project ID") <~ DQuoteChar <~ OptSpace <~ ")" <~ trailing + DQuoteChar ~> examplesStrict( + ID, + index projects uri, + "project ID" + ) <~ DQuoteChar <~ OptSpace <~ ")" <~ trailing ) def projectRef(uri: URI) = projectID(uri) map { id => ProjectRef(uri, id) @@ -536,8 +544,8 @@ object Act { structure.data ) - type KeysParser = Parser[Seq[ScopedKey[T]] forSome { type T }] - type KeysParserSep = Parser[Seq[(ScopedKey[T], Seq[String])] forSome { type T }] + type KeysParser = Parser[Seq[ScopedKey[Any]]] + type KeysParserSep = Parser[Seq[(ScopedKey[Any], Seq[String])]] def aggregatedKeyParser(state: State): KeysParser = aggregatedKeyParser(Project extract state) def aggregatedKeyParser(extracted: Extracted): KeysParser = diff --git a/main/src/main/scala/sbt/internal/Aggregation.scala b/main/src/main/scala/sbt/internal/Aggregation.scala index 190bf60e4..c776550b0 100644 --- a/main/src/main/scala/sbt/internal/Aggregation.scala +++ b/main/src/main/scala/sbt/internal/Aggregation.scala @@ -75,7 +75,9 @@ object Aggregation { import complete._ val log = state.log val extracted = Project.extract(state) - val success = results match { case Value(_) => true; case Inc(_) => false } + val success = results match + case Result.Value(_) => true + case Result.Inc(_) => false results.toEither.right.foreach { r => if (show.taskValues) printSettings(r, show.print) } @@ -100,25 +102,23 @@ object Aggregation { val start = System.currentTimeMillis val (newS, result) = withStreams(structure, s) { str => val transform = nodeView(s, str, roots, extra) - runTask(toRun, s, str, structure.index.triggers, config)(transform) + runTask(toRun, s, str, structure.index.triggers, config)(using transform) } val stop = System.currentTimeMillis Complete(start, stop, result, newS) } - def runTasks[HL <: HList, T]( + def runTasks[A1]( s: State, - ts: Values[Task[T]], + ts: Values[Task[A1]], extra: DummyTaskMap, show: ShowConfig - )(implicit display: Show[ScopedKey[_]]): State = { - val complete = timedRun[T](s, ts, extra) + )(using display: Show[ScopedKey[_]]): State = + val complete = timedRun[A1](s, ts, extra) showRun(complete, show) - complete.results match { - case Inc(i) => complete.state.handleError(i) - case Value(_) => complete.state - } - } + complete.results match + case Result.Inc(i) => complete.state.handleError(i) + case Result.Value(_) => complete.state def printSuccess( start: Long, @@ -163,7 +163,9 @@ object Aggregation { val mins = f"${total % 3600 / 60}%02d" val secs = f"${total % 60}%02d" s" ($maybeHours$mins:$secs)" - }) s"Total time: $totalString, completed $nowString" + }) + + s"Total time: $totalString, completed $nowString" } def defaultFormat: DateFormat = { diff --git a/main/src/main/scala/sbt/internal/BuildLoader.scala b/main/src/main/scala/sbt/internal/BuildLoader.scala index 76c67addc..6f1832b3d 100644 --- a/main/src/main/scala/sbt/internal/BuildLoader.scala +++ b/main/src/main/scala/sbt/internal/BuildLoader.scala @@ -55,11 +55,10 @@ final class MultiHandler[S, T]( def setRoot(resolver: S => Option[T]) = new MultiHandler(builtIn, Some(resolver), nonRoots, getURI, log) def applyNonRoots(info: S): List[(URI, T)] = - nonRoots flatMap { - case (definingURI, loader) => - loader(info) map { unit => - (definingURI, unit) - } + nonRoots flatMap { case (definingURI, loader) => + loader(info) map { unit => + (definingURI, unit) + } } private[this] def warn(baseMessage: String, log: Logger, matching: Seq[(URI, T)]): Unit = { @@ -183,7 +182,8 @@ object BuildLoader { } } -/** Defines the responsible for loading builds. +/** + * Defines the responsible for loading builds. * * @param fail A reporter for failures. * @param state The state. diff --git a/main/src/main/scala/sbt/internal/BuildStructure.scala b/main/src/main/scala/sbt/internal/BuildStructure.scala index 8bd0bcccf..fea28b587 100644 --- a/main/src/main/scala/sbt/internal/BuildStructure.scala +++ b/main/src/main/scala/sbt/internal/BuildStructure.scala @@ -110,7 +110,7 @@ final class LoadedBuildUnit( ) ) - /** The base directory of the build unit (not the build definition).*/ + /** The base directory of the build unit (not the build definition). */ def localBase = unit.localBase /** @@ -211,8 +211,8 @@ final class DetectedPlugins( private[this] lazy val (autoPluginAutoImports, topLevelAutoPluginAutoImports) = autoPlugins - .flatMap { - case DetectedAutoPlugin(name, _, hasAutoImport) => if (hasAutoImport) Some(name) else None + .flatMap { case DetectedAutoPlugin(name, _, hasAutoImport) => + if (hasAutoImport) Some(name) else None } .partition(nonTopLevelPlugin) @@ -271,8 +271,8 @@ final class LoadedBuild(val root: URI, val units: Map[URI, LoadedBuildUnit]) { BuildUtil.checkCycles(units) def allProjectRefs: Seq[(ProjectRef, ResolvedProject)] = - units.iterator.flatMap { - case (build, unit) => unit.projects.map(p => ProjectRef(build, p.id) -> p) + units.iterator.flatMap { case (build, unit) => + unit.projects.map(p => ProjectRef(build, p.id) -> p) }.toIndexedSeq def extra(data: Settings[Scope])(keyIndex: KeyIndex): BuildUtil[ResolvedProject] = @@ -377,7 +377,7 @@ object BuildStreams { // The Previous.scopedKeyAttribute is an implementation detail that allows us to get a // more specific cache directory for a task stream. case AttributeEntry(key, _) if key == Previous.scopedKeyAttribute => Nil - case AttributeEntry(key, value) => s"${key.label}=$value" :: Nil + case AttributeEntry(key, value) => s"${key.label}=$value" :: Nil } .mkString(" ") @@ -388,8 +388,8 @@ object BuildStreams { data: Settings[Scope] ): File = scoped.scope.project match { - case Zero => refTarget(GlobalScope, units(root).localBase, data) / GlobalPath - case Select(br @ BuildRef(uri)) => refTarget(br, units(uri).localBase, data) / BuildUnitPath + case Zero => refTarget(GlobalScope, units(root).localBase, data) / GlobalPath + case Select(br @ BuildRef(uri)) => refTarget(br, units(uri).localBase, data) / BuildUnitPath case Select(pr @ ProjectRef(uri, id)) => refTarget(pr, units(uri).defined(id).base, data) case Select(pr) => sys.error("Unresolved project reference (" + pr + ") in " + displayFull(scoped)) diff --git a/main/src/main/scala/sbt/internal/BuildUtil.scala b/main/src/main/scala/sbt/internal/BuildUtil.scala index 78611a963..6a3cf5087 100644 --- a/main/src/main/scala/sbt/internal/BuildUtil.scala +++ b/main/src/main/scala/sbt/internal/BuildUtil.scala @@ -113,7 +113,8 @@ object BuildUtil { def aggregationRelation(units: Map[URI, LoadedBuildUnit]): Relation[ProjectRef, ProjectRef] = { val depPairs = for { - (uri, unit) <- units.toIterable // don't lose this toIterable, doing so breaks actions/cross-multiproject & actions/update-state-fail + (uri, unit) <- + units.toIterable // don't lose this toIterable, doing so breaks actions/cross-multiproject & actions/update-state-fail project <- unit.projects ref = ProjectRef(uri, project.id) agg <- project.aggregate diff --git a/main/src/main/scala/sbt/internal/ClasspathImpl.scala b/main/src/main/scala/sbt/internal/ClasspathImpl.scala index c49e34a73..c28b623aa 100644 --- a/main/src/main/scala/sbt/internal/ClasspathImpl.scala +++ b/main/src/main/scala/sbt/internal/ClasspathImpl.scala @@ -327,27 +327,33 @@ private[sbt] object ClasspathImpl { for { ac <- applicableConfigs } // add all configurations in this project - visited add (p -> ac.name) - val masterConfs = names(getConfigurations(projectRef, data).toVector) + visited add (p -> ac.name) + val masterConfs = names(getConfigurations(projectRef, data).toVector) - for { - ResolvedClasspathDependency(dep, confMapping) <- deps.classpath(p) - } { - val configurations = getConfigurations(dep, data) - val mapping = - mapped(confMapping, masterConfs, names(configurations.toVector), "compile", "*->compile") - // map master configuration 'c' and all extended configurations to the appropriate dependency configuration for { - ac <- applicableConfigs - depConfName <- mapping(ac.name) + ResolvedClasspathDependency(dep, confMapping) <- deps.classpath(p) } { + val configurations = getConfigurations(dep, data) + val mapping = + mapped( + confMapping, + masterConfs, + names(configurations.toVector), + "compile", + "*->compile" + ) + // map master configuration 'c' and all extended configurations to the appropriate dependency configuration for { - depConf <- confOpt(configurations, depConfName) - } if (!visited((dep, depConfName))) { - visit(dep, depConf) + ac <- applicableConfigs + depConfName <- mapping(ac.name) + } { + for { + depConf <- confOpt(configurations, depConfName) + } if (!visited((dep, depConfName))) { + visit(dep, depConf) + } } } - } } visit(projectRef, conf) visited.toSeq diff --git a/main/src/main/scala/sbt/internal/Clean.scala b/main/src/main/scala/sbt/internal/Clean.scala index a9ad3bd75..73e04c5a1 100644 --- a/main/src/main/scala/sbt/internal/Clean.scala +++ b/main/src/main/scala/sbt/internal/Clean.scala @@ -58,7 +58,7 @@ private[sbt] object Clean { case f if f.isDirectory => Glob(f, AnyPath) case f => f.toGlob } ++ (scope / cleanKeepGlobs).value - p: Path => excludes.exists(_.matches(p)) + (p: Path) => excludes.exists(_.matches(p)) } private[this] def cleanDelete(scope: Scope): Def.Initialize[Task[Path => Unit]] = Def.task { // Don't use a regular logger because the logger actually writes to the target directory. diff --git a/main/src/main/scala/sbt/internal/CommandExchange.scala b/main/src/main/scala/sbt/internal/CommandExchange.scala index 992fb8548..381771e94 100644 --- a/main/src/main/scala/sbt/internal/CommandExchange.scala +++ b/main/src/main/scala/sbt/internal/CommandExchange.scala @@ -92,21 +92,22 @@ private[sbt] final class CommandExchange { case s @ Seq(_, _) => Some(s.min) case s => s.headOption } - try Option(deadline match { - case Some(d: Deadline) => - commandQueue.poll(d.timeLeft.toMillis + 1, TimeUnit.MILLISECONDS) match { - case null if idleDeadline.fold(false)(_.isOverdue) => - state.foreach { s => - s.get(BasicKeys.serverIdleTimeout) match { - case Some(Some(d)) => s.log.info(s"sbt idle timeout of $d expired") - case _ => + try + Option(deadline match { + case Some(d: Deadline) => + commandQueue.poll(d.timeLeft.toMillis + 1, TimeUnit.MILLISECONDS) match { + case null if idleDeadline.fold(false)(_.isOverdue) => + state.foreach { s => + s.get(BasicKeys.serverIdleTimeout) match { + case Some(Some(d)) => s.log.info(s"sbt idle timeout of $d expired") + case _ => + } } - } - Exec(TerminateAction, Some(CommandSource(ConsoleChannel.defaultName))) - case x => x - } - case _ => commandQueue.take - }) + Exec(TerminateAction, Some(CommandSource(ConsoleChannel.defaultName))) + case x => x + } + case _ => commandQueue.take + }) catch { case _: InterruptedException => None } } poll match { @@ -134,10 +135,13 @@ private[sbt] final class CommandExchange { } } // Do not manually run GC until the user has been idling for at least the min gc interval. - impl(interval match { - case d: FiniteDuration => Some(d.fromNow) - case _ => None - }, idleDeadline) + impl( + interval match { + case d: FiniteDuration => Some(d.fromNow) + case _ => None + }, + idleDeadline + ) } private def addConsoleChannel(): Unit = @@ -210,7 +214,9 @@ private[sbt] final class CommandExchange { if (server.isEmpty && firstInstance.get) { val h = Hash.halfHashString(IO.toURI(portfile).toString) val serverDir = - sys.env get "SBT_GLOBAL_SERVER_DIR" map file getOrElse BuildPaths.getGlobalBase(s) / "server" + sys.env get "SBT_GLOBAL_SERVER_DIR" map file getOrElse BuildPaths.getGlobalBase( + s + ) / "server" val tokenfile = serverDir / h / "token.json" val socketfile = serverDir / h / "sock" val pipeName = "sbt-server-" + h @@ -290,7 +296,7 @@ private[sbt] final class CommandExchange { // interrupt and kill the thread server.foreach(_.shutdown()) server = None - EvaluateTask.onShutdown + EvaluateTask.onShutdown() } // This is an interface to directly respond events. diff --git a/main/src/main/scala/sbt/internal/CommandStrings.scala b/main/src/main/scala/sbt/internal/CommandStrings.scala index 97d97fa1b..822a8c99a 100644 --- a/main/src/main/scala/sbt/internal/CommandStrings.scala +++ b/main/src/main/scala/sbt/internal/CommandStrings.scala @@ -12,7 +12,7 @@ import sbt.io.Path object CommandStrings { - /** The prefix used to identify a request to execute the remaining input on source changes.*/ + /** The prefix used to identify a request to execute the remaining input on source changes. */ val AboutCommand = "about" val TasksCommand = "tasks" val SettingsCommand = "settings" diff --git a/main/src/main/scala/sbt/internal/CrossJava.scala b/main/src/main/scala/sbt/internal/CrossJava.scala index 6e352d0d9..58f34d36a 100644 --- a/main/src/main/scala/sbt/internal/CrossJava.scala +++ b/main/src/main/scala/sbt/internal/CrossJava.scala @@ -432,7 +432,7 @@ private[sbt] object CrossJava { val base: File = Path.userHome / ".sdkman" / "candidates" / "java" def candidates(): Vector[String] = wrapNull(base.list()) def javaHomes: Vector[(String, File)] = - candidates.collect { + candidates().collect { case dir if dir.contains("-") => CrossJava.parseSdkmanString(dir) match { case Success(v) => Some(v.toString -> (base / dir)) diff --git a/main/src/main/scala/sbt/internal/DefaultBackgroundJobService.scala b/main/src/main/scala/sbt/internal/DefaultBackgroundJobService.scala index 1cb9416cb..8df38544f 100644 --- a/main/src/main/scala/sbt/internal/DefaultBackgroundJobService.scala +++ b/main/src/main/scala/sbt/internal/DefaultBackgroundJobService.scala @@ -182,7 +182,7 @@ private[sbt] abstract class AbstractBackgroundJobService extends BackgroundJobSe while (jobSet.nonEmpty && !deadline.isOverdue) { jobSet.headOption.foreach { case handle: ThreadJobHandle @unchecked => - if (handle.job.isRunning) { + if (handle.job.isRunning()) { handle.job.shutdown() handle.job.awaitTerminationTry(10.seconds) } @@ -451,11 +451,12 @@ private[sbt] class BackgroundThreadPool extends java.io.Closeable { ) extends BackgroundRunnable(taskName, body) { override def awaitTermination(duration: Duration): Unit = { try super.awaitTermination(duration) - finally loader.foreach { - case ac: AutoCloseable => ac.close() - case cp: ClasspathFilter => cp.close() - case _ => - } + finally + loader.foreach { + case ac: AutoCloseable => ac.close() + case cp: ClasspathFilter => cp.close() + case _ => + } } } diff --git a/main/src/main/scala/sbt/internal/GlobalPlugin.scala b/main/src/main/scala/sbt/internal/GlobalPlugin.scala index 285f15910..443141183 100644 --- a/main/src/main/scala/sbt/internal/GlobalPlugin.scala +++ b/main/src/main/scala/sbt/internal/GlobalPlugin.scala @@ -105,7 +105,7 @@ object GlobalPlugin { withStreams(structure, state) { str => val nv = nodeView(state, str, roots) val config = EvaluateTask.extractedTaskConfig(Project.extract(state), structure, state) - val (newS, result) = runTask(t, state, str, structure.index.triggers, config)(nv) + val (newS, result) = runTask(t, state, str, structure.index.triggers, config)(using nv) (newS, processResult2(result)) } } diff --git a/main/src/main/scala/sbt/internal/InstallSbtn.scala b/main/src/main/scala/sbt/internal/InstallSbtn.scala index b6a1a2682..ebed727f7 100644 --- a/main/src/main/scala/sbt/internal/InstallSbtn.scala +++ b/main/src/main/scala/sbt/internal/InstallSbtn.scala @@ -100,10 +100,12 @@ private[sbt] object InstallSbtn { try { val result = new Array[Byte](1024 * 1024) var bytesRead = -1 - do { + def impl(): Unit = { bytesRead = inputStream.read(result) if (bytesRead > 0) os.write(result, 0, bytesRead) - } while (bytesRead > 0) + } + impl() + while bytesRead > 0 do impl() } finally os.close() } finally inputStream.close() private[this] def getShell(term: Terminal): String = { @@ -139,12 +141,13 @@ private[sbt] object InstallSbtn { setCompletions: Path => String, ): Unit = { val bin = baseDirectory.resolve("bin") - val export = setPath(bin) + val exp = setPath(bin) val completions = baseDirectory.resolve("completions") val sourceCompletions = setCompletions(completions) - val contents = try IO.read(configFile) - catch { case _: IOException => "" } - if (!contents.contains(export)) { + val contents = + try IO.read(configFile) + catch { case _: IOException => "" } + if (!contents.contains(exp)) { term.printStream.print(s"Add $bin to PATH in $configFile? y/n (y default): ") term.printStream.flush() term.inputStream.read() match { @@ -153,11 +156,12 @@ private[sbt] object InstallSbtn { term.printStream.println(c.toChar) // put the export at the bottom so that the ~/.sbt/1.0/bin/sbtn is least preferred // but still on the path - IO.write(configFile, s"$contents\n$export") + IO.write(configFile, s"$contents\n$exp") } } - val newContents = try IO.read(configFile) - catch { case _: IOException => "" } + val newContents = + try IO.read(configFile) + catch { case _: IOException => "" } if (!newContents.contains(sourceCompletions)) { term.printStream.print(s"Add tab completions to $configFile? y/n (y default): ") term.printStream.flush() diff --git a/main/src/main/scala/sbt/internal/InternalDependencies.scala b/main/src/main/scala/sbt/internal/InternalDependencies.scala index 6808f5e6a..fe99fc32b 100644 --- a/main/src/main/scala/sbt/internal/InternalDependencies.scala +++ b/main/src/main/scala/sbt/internal/InternalDependencies.scala @@ -17,18 +17,17 @@ private[sbt] object InternalDependencies { val projectDependencies = buildDependencies.value.classpath.get(ref).toSeq.flatten val applicableConfigs = allConfigs + "*" ((ref -> allConfigs) +: - projectDependencies.flatMap { - case ResolvedClasspathDependency(p, rawConfigs) => - val configs = rawConfigs.getOrElse("*->compile").split(";").flatMap { config => - config.split("->") match { - case Array(n, c) if applicableConfigs.contains(n) => Some(c) - case Array(n) if applicableConfigs.contains(n) => - // "test" is equivalent to "compile->test" - Some("compile") - case _ => None - } + projectDependencies.flatMap { case ResolvedClasspathDependency(p, rawConfigs) => + val configs = rawConfigs.getOrElse("*->compile").split(";").flatMap { config => + config.split("->") match { + case Array(n, c) if applicableConfigs.contains(n) => Some(c) + case Array(n) if applicableConfigs.contains(n) => + // "test" is equivalent to "compile->test" + Some("compile") + case _ => None } - if (configs.isEmpty) None else Some(p -> configs.toSet) + } + if (configs.isEmpty) None else Some(p -> configs.toSet) }).distinct } } diff --git a/main/src/main/scala/sbt/internal/IvyConsole.scala b/main/src/main/scala/sbt/internal/IvyConsole.scala index ec61b1dda..ee889a24d 100644 --- a/main/src/main/scala/sbt/internal/IvyConsole.scala +++ b/main/src/main/scala/sbt/internal/IvyConsole.scala @@ -33,9 +33,12 @@ object IvyConsole { final val Name = "ivy-console" lazy val command = Command.command(Name) { state => - val Dependencies(managed, repos, unmanaged) = parseDependencies(state.remainingCommands map { - _.commandLine - }, state.log) + val Dependencies(managed, repos, unmanaged) = parseDependencies( + state.remainingCommands map { + _.commandLine + }, + state.log + ) val base = new File(CommandUtil.bootDirectory(state), Name) IO.createDirectory(base) diff --git a/main/src/main/scala/sbt/internal/LayeredClassLoaders.scala b/main/src/main/scala/sbt/internal/LayeredClassLoaders.scala index 579b08415..51a148ec2 100644 --- a/main/src/main/scala/sbt/internal/LayeredClassLoaders.scala +++ b/main/src/main/scala/sbt/internal/LayeredClassLoaders.scala @@ -24,7 +24,6 @@ import scala.collection.JavaConverters._ * If the top layer needs to load a class from the bottom layer via java reflection, we facilitate * that with the `ReverseLookupClassLoader`. * - * * This holder caches the ReverseLookupClassLoader, which is the top loader in this hierarchy. The * checkout method will get the RevereLookupClassLoader from the cache or make a new one if * none is available. It will only cache at most one so if multiple concurrently tasks have the diff --git a/main/src/main/scala/sbt/internal/LintUnused.scala b/main/src/main/scala/sbt/internal/LintUnused.scala index 17bb15924..4163ae3d0 100644 --- a/main/src/main/scala/sbt/internal/LintUnused.scala +++ b/main/src/main/scala/sbt/internal/LintUnused.scala @@ -97,13 +97,12 @@ object LintUnused { if (size == 1) buffer.append("there's a key that's not used by any other settings/tasks:") else buffer.append(s"there are $size keys that are not used by any other settings/tasks:") buffer.append(" ") - result foreach { - case (_, str, positions) => - buffer.append(s"* $str") - positions foreach { - case pos: FilePosition => buffer.append(s" +- ${pos.path}:${pos.startLine}") - case _ => () - } + result foreach { case (_, str, positions) => + buffer.append(s"* $str") + positions foreach { + case pos: FilePosition => buffer.append(s" +- ${pos.path}:${pos.startLine}") + case _ => () + } } buffer.append(" ") buffer.append( diff --git a/main/src/main/scala/sbt/internal/PluginDiscovery.scala b/main/src/main/scala/sbt/internal/PluginDiscovery.scala index 0f3998fc5..2cbe6724c 100644 --- a/main/src/main/scala/sbt/internal/PluginDiscovery.scala +++ b/main/src/main/scala/sbt/internal/PluginDiscovery.scala @@ -135,7 +135,7 @@ object PluginDiscovery { } } - /** Returns `true` if `url` is an entry in `classpath`.*/ + /** Returns `true` if `url` is an entry in `classpath`. */ def onClasspath(classpath: Seq[File])(url: URL): Boolean = IO.urlAsFile(url) exists (classpath.contains _) diff --git a/main/src/main/scala/sbt/internal/PluginsDebug.scala b/main/src/main/scala/sbt/internal/PluginsDebug.scala index b3d3c6850..560c15435 100644 --- a/main/src/main/scala/sbt/internal/PluginsDebug.scala +++ b/main/src/main/scala/sbt/internal/PluginsDebug.scala @@ -447,7 +447,7 @@ private[sbt] object PluginsDebug { private[this] def excludedPluginsError(transitive: Boolean)(dependencies: List[AutoPlugin]) = s"Required ${transitiveString(transitive)}dependencies were excluded:\n\t${labels(dependencies) - .mkString("\n\t")}" + .mkString("\n\t")}" private[this] def transitiveString(transitive: Boolean) = if (transitive) "(transitive) " else "" diff --git a/main/src/main/scala/sbt/internal/Resolve.scala b/main/src/main/scala/sbt/internal/Resolve.scala index 8e92e1b78..8162c2261 100644 --- a/main/src/main/scala/sbt/internal/Resolve.scala +++ b/main/src/main/scala/sbt/internal/Resolve.scala @@ -42,8 +42,7 @@ object Resolve { def resolveConfig[P](index: BuildUtil[P], key: AttributeKey[_], mask: ScopeMask)( scope: Scope, ): Scope = - if (mask.config) - scope + if (mask.config) scope else { val (resolvedRef, proj) = scope.project match { case Zero | This => (None, index.thisRootProject) diff --git a/main/src/main/scala/sbt/internal/Script.scala b/main/src/main/scala/sbt/internal/Script.scala index 203ee2000..cff4ae01e 100644 --- a/main/src/main/scala/sbt/internal/Script.scala +++ b/main/src/main/scala/sbt/internal/Script.scala @@ -76,8 +76,7 @@ object Script { def blocks(file: File): Seq[Block] = { val lines = IO.readLines(file).toIndexedSeq def blocks(b: Block, acc: List[Block]): List[Block] = - if (b.lines.isEmpty) - acc.reverse + if (b.lines.isEmpty) acc.reverse else { val (dropped, blockToEnd) = b.lines.span { line => !line.startsWith(BlockStart) diff --git a/main/src/main/scala/sbt/internal/SysProp.scala b/main/src/main/scala/sbt/internal/SysProp.scala index b2846a1b7..629dc6a5c 100644 --- a/main/src/main/scala/sbt/internal/SysProp.scala +++ b/main/src/main/scala/sbt/internal/SysProp.scala @@ -186,7 +186,8 @@ object SysProp { private[this] def file(value: String): File = new File(value) private[this] def home: File = file(sys.props("user.home")) - /** Operating system specific cache directory, similar to Coursier cache. + /** + * Operating system specific cache directory, similar to Coursier cache. */ def globalLocalCache: File = { val appName = "sbt" diff --git a/main/src/main/scala/sbt/internal/TaskTimings.scala b/main/src/main/scala/sbt/internal/TaskTimings.scala index 5b2041e3f..a2d95b361 100644 --- a/main/src/main/scala/sbt/internal/TaskTimings.scala +++ b/main/src/main/scala/sbt/internal/TaskTimings.scala @@ -76,7 +76,7 @@ private[sbt] final class TaskTimings(reportOnShutdown: Boolean, logger: Logger) val maxTime = times.map { _._2 }.max.toString.length times.foreach { case (taskName, time) => logger.info(s" ${taskName.padTo(maxTaskNameLength, ' ')}: ${"" - .padTo(maxTime - time.toString.length, ' ')}$time $unit") + .padTo(maxTime - time.toString.length, ' ')}$time $unit") } } } diff --git a/main/src/main/scala/sbt/internal/VirtualFileValueCache.scala b/main/src/main/scala/sbt/internal/VirtualFileValueCache.scala index 02d107e83..198286ac8 100644 --- a/main/src/main/scala/sbt/internal/VirtualFileValueCache.scala +++ b/main/src/main/scala/sbt/internal/VirtualFileValueCache.scala @@ -25,7 +25,7 @@ sealed trait VirtualFileValueCache[A] { object VirtualFileValueCache { def definesClassCache(converter: FileConverter): VirtualFileValueCache[DefinesClass] = { - apply(converter) { x: VirtualFile => + apply(converter) { (x: VirtualFile) => if (x.name.toString != "rt.jar") Locate.definesClass(x) else (_: String) => false } @@ -34,9 +34,13 @@ object VirtualFileValueCache { import collection.mutable.{ HashMap, Map } val stampCache: Map[VirtualFileRef, (Long, XStamp)] = new HashMap make( - Stamper.timeWrap(stampCache, converter, { - case (vf: VirtualFile) => Stamper.forContentHash(vf) - }) + Stamper.timeWrap( + stampCache, + converter, + { case (vf: VirtualFile) => + Stamper.forContentHash(vf) + } + ) )(f) } def make[A](stamp: VirtualFile => XStamp)(f: VirtualFile => A): VirtualFileValueCache[A] = @@ -46,8 +50,8 @@ object VirtualFileValueCache { private[this] final class VirtualFileValueCache0[A]( getStamp: VirtualFile => XStamp, make: VirtualFile => A -)( - implicit equiv: Equiv[XStamp] +)(implicit + equiv: Equiv[XStamp] ) extends VirtualFileValueCache[A] { private[this] val backing = new ConcurrentHashMap[VirtualFile, VirtualFileCache] diff --git a/main/src/main/scala/sbt/internal/parser/SbtParser.scala b/main/src/main/scala/sbt/internal/parser/SbtParser.scala deleted file mode 100644 index e281368e6..000000000 --- a/main/src/main/scala/sbt/internal/parser/SbtParser.scala +++ /dev/null @@ -1,415 +0,0 @@ -/* - * sbt - * Copyright 2011 - 2018, Lightbend, Inc. - * Copyright 2008 - 2010, Mark Harrah - * Licensed under Apache License 2.0 (see LICENSE) - */ - -package sbt -package internal -package parser - -import sbt.internal.util.{ LineRange, MessageOnlyException } -import java.io.File -import java.util.concurrent.ConcurrentHashMap - -import sbt.internal.parser.SbtParser._ - -import scala.compat.Platform.EOL -import scala.reflect.internal.util.{ BatchSourceFile, Position } -import scala.reflect.io.VirtualDirectory -import scala.reflect.internal.Positions -import scala.tools.nsc.{ CompilerCommand, Global, Settings } -import scala.tools.nsc.reporters.{ ConsoleReporter, FilteringReporter, StoreReporter } -import scala.util.Random -import scala.util.{ Failure, Success } - -private[sbt] object SbtParser { - val END_OF_LINE_CHAR = '\n' - val END_OF_LINE = String.valueOf(END_OF_LINE_CHAR) - private[parser] val NOT_FOUND_INDEX = -1 - private[sbt] val FAKE_FILE = new File("fake") - private[parser] val XML_ERROR = "';' expected but 'val' found." - - private val XmlErrorMessage = - """Probably problem with parsing xml group, please add parens or semicolons: - |Replace: - |val xmlGroup = - |with: - |val xmlGroup = () - |or - |val xmlGroup = ; - """.stripMargin - - private final val defaultClasspath = - sbt.io.Path.makeString(sbt.io.IO.classLocationPath[Product].toFile :: Nil) - - /** - * Provides the previous error reporting functionality in - * [[scala.tools.reflect.ToolBox]]. - * - * This parser is a wrapper around a collection of reporters that are - * indexed by a unique key. This is used to ensure that the reports of - * one parser don't collide with other ones in concurrent settings. - * - * This parser is a sign that this whole parser should be rewritten. - * There are exceptions everywhere and the logic to work around - * the scalac parser bug heavily relies on them and it's tied - * to the test suite. Ideally, we only want to throw exceptions - * when we know for a fact that the user-provided snippet doesn't - * parse. - */ - private[sbt] class UniqueParserReporter(val settings: Settings) extends FilteringReporter { - - private val reporters = new ConcurrentHashMap[String, StoreReporter]() - - override def doReport(pos: Position, msg: String, severity: Severity): Unit = { - val reporter = getReporter(pos.source.file.name) - severity.id match { - case 0 => reporter.echo(pos, msg) - case 1 => reporter.warning(pos, msg) - case 2 => reporter.error(pos, msg) - } - } - - // weird hack to make sure errors are counted by the underlying - // reporters in both Scala 2.12 and 2.13.x - // see https://github.com/scala/bug/issues/12317 - override def filter(pos: Position, msg: String, severity: Severity): Int = { - val reporter = getReporter(pos.source.file.name) - val result = reporter.filter(pos, msg, severity) - if (result <= 1) reporter.increment(severity) - if (result == 0) reporter.doReport(pos, msg, severity) - result - } - - override def hasErrors: Boolean = { - var result = false - reporters.forEachValue(100, r => if (r.hasErrors) result = true) - result - } - - def createReporter(uniqueFileName: String): StoreReporter = { - val r = new StoreReporter(settings) - reporters.put(uniqueFileName, r) - r - } - - def getOrCreateReporter(uniqueFileName: String): StoreReporter = { - val r = reporters.get(uniqueFileName) - if (r == null) createReporter(uniqueFileName) - else r - } - - private def getReporter(fileName: String) = { - val reporter = reporters.get(fileName) - if (reporter == null) { - scalacGlobalInitReporter.getOrElse( - sys.error(s"sbt forgot to initialize `scalacGlobalInitReporter`.") - ) - } else reporter - } - - def throwParserErrorsIfAny(reporter: StoreReporter, fileName: String): Unit = { - if (reporter.hasErrors) { - val seq = reporter.infos.map { info => - s"""[$fileName]:${info.pos.line}: ${info.msg}""" - } - val errorMessage = seq.mkString(EOL) - val error: String = - if (errorMessage.contains(XML_ERROR)) - s"$errorMessage\n${SbtParser.XmlErrorMessage}" - else errorMessage - throw new MessageOnlyException(error) - } else () - } - } - - private[sbt] var scalacGlobalInitReporter: Option[ConsoleReporter] = None - - private[sbt] final val (defaultGlobalForParser, globalReporter) = { - val options = "-cp" :: s"$defaultClasspath" :: "-Yrangepos" :: Nil - val reportError = (msg: String) => System.err.println(msg) - val command = new CompilerCommand(options, reportError) - val settings = command.settings - settings.outputDirs.setSingleOutput(new VirtualDirectory("(memory)", None)) - scalacGlobalInitReporter = Some(new ConsoleReporter(settings)) - - val reporter = new UniqueParserReporter(settings) - // Mix Positions, otherwise global ignores -Yrangepos - val global = new Global(settings, reporter) with Positions - val run = new global.Run - // Add required dummy unit for initialization... - val initFile = new BatchSourceFile("", "") - val _ = new global.CompilationUnit(initFile) - global.phase = run.parserPhase - (global, reporter) - } - - import defaultGlobalForParser.Tree - - /** - * Parse code reusing the same [[Run]] instance. - * - * @param code The code to be parsed. - * @param filePath The file name where the code comes from. - * @param reporterId0 The reporter id is the key used to get the pertinent - * reporter. Given that the parsing reuses a global - * instance, this reporter id makes sure that every parsing - * session gets its own errors in a concurrent setting. - * The reporter id must be unique per parsing session. - * @return - */ - private[sbt] def parse( - code: String, - filePath: String, - reporterId0: Option[String] - ): (Seq[Tree], String) = { - import defaultGlobalForParser._ - val reporterId = reporterId0.getOrElse(s"$filePath-${Random.nextInt}") - val reporter = globalReporter.getOrCreateReporter(reporterId) - reporter.reset() - val wrapperFile = new BatchSourceFile(reporterId, code) - val unit = new CompilationUnit(wrapperFile) - val parser = SbtParser.synchronized { // see https://github.com/sbt/sbt/issues/4148 - new syntaxAnalyzer.UnitParser(unit) - } - val parsedTrees = SbtParser.synchronized { // see https://github.com/scala/bug/issues/10605 - parser.templateStats() - } - parser.accept(scala.tools.nsc.ast.parser.Tokens.EOF) - globalReporter.throwParserErrorsIfAny(reporter, filePath) - parsedTrees -> reporterId - } -} - -private class SbtParserInit { - new Thread("sbt-parser-init-thread") { - setDaemon(true) - start() - override def run(): Unit = { - val _ = SbtParser.defaultGlobalForParser - } - } -} - -/** - * This method solely exists to add scaladoc to members in SbtParser which - * are defined using pattern matching. - */ -sealed trait ParsedSbtFileExpressions { - - /** The set of parsed import expressions. */ - def imports: Seq[(String, Int)] - - /** The set of parsed definitions and/or sbt build settings. */ - def settings: Seq[(String, LineRange)] - - /** The set of scala tree's for parsed definitions/settings and the underlying string representation.. */ - def settingsTrees: Seq[(String, Global#Tree)] - -} - -/** - * An initial parser/splitter of .sbt files. - * - * This class is responsible for chunking a `.sbt` file into expression ranges - * which we can then compile using the Scala compiler. - * - * Example: - * - * {{{ - * val parser = SbtParser(myFile, IO.readLines(myFile)) - * // All import statements - * val imports = parser.imports - * // All other statements (val x =, or raw settings) - * val settings = parser.settings - * }}} - * - * @param file The file we're parsing (may be a dummy file) - * @param lines The parsed "lines" of the file, where each string is a line. - */ -private[sbt] case class SbtParser(file: File, lines: Seq[String]) extends ParsedSbtFileExpressions { - //settingsTrees,modifiedContent needed for "session save" - // TODO - We should look into splitting out "definitions" vs. "settings" here instead of further string lookups, since we have the - // parsed trees. - val (imports, settings, settingsTrees) = splitExpressions(file, lines) - - import SbtParser.defaultGlobalForParser._ - - private def splitExpressions( - file: File, - lines: Seq[String] - ): (Seq[(String, Int)], Seq[(String, LineRange)], Seq[(String, Tree)]) = { - import sbt.internal.parser.MissingBracketHandler.findMissingText - - val indexedLines = lines.toIndexedSeq - val content = indexedLines.mkString(END_OF_LINE) - val fileName = file.getAbsolutePath - val (parsedTrees, reporterId) = parse(content, fileName, None) - - // Check No val (a,b) = foo *or* val a,b = foo as these are problematic to range positions and the WHOLE architecture. - def isBadValDef(t: Tree): Boolean = - t match { - case x @ ValDef(_, _, _, rhs) if rhs != EmptyTree => - val c = content.substring(x.pos.start, x.pos.end) - !(c contains "=") - case _ => false - } - parsedTrees.filter(isBadValDef).foreach { badTree => - // Issue errors - val positionLine = badTree.pos.line - throw new MessageOnlyException( - s"""[$fileName]:$positionLine: Pattern matching in val statements is not supported""".stripMargin - ) - } - - val (imports: Seq[Tree], statements: Seq[Tree]) = parsedTrees partition { - case _: Import => true - case _ => false - } - - /* - * See BugInParser - * @param t - tree - * @param originalStatement - original - * @return originalStatement or originalStatement with missing bracket - */ - def parseStatementAgain(t: Tree, originalStatement: String): String = { - val statement = scala.util.Try(parse(originalStatement, fileName, Some(reporterId))) match { - case Failure(th) => - val missingText = - findMissingText(content, t.pos.end, t.pos.line, fileName, th, Some(reporterId)) - originalStatement + missingText - case _ => - originalStatement - } - statement - } - - def convertStatement(t: Tree): Option[(String, Tree, LineRange)] = - t.pos match { - case NoPosition => - None - case position => - val originalStatement = content.substring(position.start, position.end) - val statement = parseStatementAgain(t, originalStatement) - val numberLines = countLines(statement) - Some((statement, t, LineRange(position.line - 1, position.line + numberLines))) - } - val stmtTreeLineRange = statements flatMap convertStatement - val importsLineRange = importsToLineRanges(content, imports) - (importsLineRange, stmtTreeLineRange.map { case (stmt, _, lr) => (stmt, lr) }, stmtTreeLineRange.map { - case (stmt, tree, _) => (stmt, tree) - }) - } - - /** - * import sbt._, Keys._,java.util._ should return ("import sbt._, Keys._,java.util._",0) - * @param modifiedContent - modifiedContent - * @param imports - trees - * @return imports per line - */ - private def importsToLineRanges( - modifiedContent: String, - imports: Seq[Tree] - ): Seq[(String, Int)] = { - val toLineRange = imports map convertImport - val groupedByLineNumber = toLineRange.groupBy { case (_, lineNumber) => lineNumber } - val mergedImports = groupedByLineNumber.map { - case (l, seq) => (l, extractLine(modifiedContent, seq)) - } - mergedImports.toSeq.sortBy(_._1).map { case (k, v) => (v, k) } - } - - /** - * @param t - tree - * @return ((start, end), lineNumber) - */ - private def convertImport(t: Tree): ((Int, Int), Int) = { - val lineNumber = t.pos.line - 1 - ((t.pos.start, t.pos.end), lineNumber) - } - - /** - * Search for min begin index and max end index - * @param modifiedContent - modifiedContent - * @param importsInOneLine - imports in line - * @return - text - */ - private def extractLine( - modifiedContent: String, - importsInOneLine: Seq[((Int, Int), Int)] - ): String = { - val (begin, end) = importsInOneLine.foldLeft((Int.MaxValue, Int.MinValue)) { - case ((min, max), ((start, end), _)) => - (min.min(start), max.max(end)) - } - modifiedContent.substring(begin, end) - } - - private def countLines(statement: String) = statement.count(c => c == END_OF_LINE_CHAR) -} - -/** - * Scala parser cuts last bracket - - * @see https://github.com/scala/scala/pull/3991 - */ -private[sbt] object MissingBracketHandler { - - /** - * - * @param content - parsed file - * @param positionEnd - from index - * @param positionLine - number of start position line - * @param fileName - file name - * @param originalException - original exception - * @return missing text - */ - private[sbt] def findMissingText( - content: String, - positionEnd: Int, - positionLine: Int, - fileName: String, - originalException: Throwable, - reporterId: Option[String] = Some(Random.nextInt.toString) - ): String = { - findClosingBracketIndex(content, positionEnd) match { - case Some(index) => - val text = content.substring(positionEnd, index + 1) - val textWithoutBracket = text.substring(0, text.length - 1) - scala.util.Try(SbtParser.parse(textWithoutBracket, fileName, reporterId)) match { - case Success(_) => - text - case Failure(_) => - findMissingText( - content, - index + 1, - positionLine, - fileName, - originalException, - reporterId - ) - } - case _ => - throw new MessageOnlyException( - s"""[$fileName]:$positionLine: ${originalException.getMessage}""".stripMargin - ) - } - } - - /** - * - * @param content - parsed file - * @param from - start index - * @return first not commented index or None - */ - private[sbt] def findClosingBracketIndex(content: String, from: Int): Option[Int] = { - val index = content.indexWhere(c => c == '}' || c == ')', from) - if (index == NOT_FOUND_INDEX) { - None - } else { - Some(index) - } - } -} diff --git a/main/src/main/scala/sbt/internal/server/NetworkChannel.scala b/main/src/main/scala/sbt/internal/server/NetworkChannel.scala index 0acab0974..3b18de70f 100644 --- a/main/src/main/scala/sbt/internal/server/NetworkChannel.scala +++ b/main/src/main/scala/sbt/internal/server/NetworkChannel.scala @@ -153,7 +153,7 @@ final class NetworkChannel( override private[sbt] val channel = NetworkChannel.this override private[sbt] lazy val reader: UITask.Reader = () => { try { - this.synchronized(this.wait) + this.synchronized((this.wait())) Left(TerminateAction) } catch { case _: InterruptedException => Right("") @@ -197,17 +197,17 @@ final class NetworkChannel( } lazy val onRequestMessage: PartialFunction[JsonRpcRequestMessage, Unit] = - intents.foldLeft(PartialFunction.empty[JsonRpcRequestMessage, Unit]) { - case (f, i) => f orElse i.onRequest + intents.foldLeft(PartialFunction.empty[JsonRpcRequestMessage, Unit]) { case (f, i) => + f orElse i.onRequest } lazy val onResponseMessage: PartialFunction[JsonRpcResponseMessage, Unit] = - intents.foldLeft(PartialFunction.empty[JsonRpcResponseMessage, Unit]) { - case (f, i) => f orElse i.onResponse + intents.foldLeft(PartialFunction.empty[JsonRpcResponseMessage, Unit]) { case (f, i) => + f orElse i.onResponse } lazy val onNotification: PartialFunction[JsonRpcNotificationMessage, Unit] = - intents.foldLeft(PartialFunction.empty[JsonRpcNotificationMessage, Unit]) { - case (f, i) => f orElse i.onNotification + intents.foldLeft(PartialFunction.empty[JsonRpcNotificationMessage, Unit]) { case (f, i) => + f orElse i.onNotification } def handleBody(chunk: Seq[Byte]): Unit = { @@ -335,34 +335,37 @@ final class NetworkChannel( /* * Do writes on a background thread because otherwise the client socket can get blocked. */ - private[this] val writeThread = new Thread(() => { - @tailrec def impl(): Unit = { - val (event, delimit) = - try pendingWrites.take - catch { - case _: InterruptedException => - alive.set(false) - (Array.empty[Byte], false) - } - if (alive.get) { - try { - out.write(event) - if (delimit) { - out.write(delimiter.toInt) + private[this] val writeThread = new Thread( + () => { + @tailrec def impl(): Unit = { + val (event, delimit) = + try pendingWrites.take + catch { + case _: InterruptedException => + alive.set(false) + (Array.empty[Byte], false) } - out.flush() - } catch { - case _: IOException => - alive.set(false) - shutdown(true) - case _: InterruptedException => - alive.set(false) + if (alive.get) { + try { + out.write(event) + if (delimit) { + out.write(delimiter.toInt) + } + out.flush() + } catch { + case _: IOException => + alive.set(false) + shutdown(true) + case _: InterruptedException => + alive.set(false) + } + impl() } - impl() } - } - impl() - }, s"sbt-$name-write-thread") + impl() + }, + s"sbt-$name-write-thread" + ) writeThread.setDaemon(true) writeThread.start() @@ -449,7 +452,11 @@ final class NetworkChannel( val runKeys = keys.filter(_.key.label == "runMain") val (runState, cachedMainClassNames) = runKeys.foldLeft((testState, true)) { case ((st, allCached), k) => - SessionVar.loadAndSet(sbt.Keys.discoveredMainClasses in k.scope, st, true) match { + SessionVar.loadAndSet( + sbt.Keys.discoveredMainClasses in k.scope, + st, + true + ) match { case (nst, d) => (nst, allCached && d.isDefined) } } @@ -513,8 +520,10 @@ final class NetworkChannel( // direct comparison on strings and // remove hotspring unicode added character for numbers - if (checkId || (crp.id == Serialization.CancelAll && - StandardMain.exchange.currentExec.exists(_.source.exists(_.channelName == name)))) { + if ( + checkId() || (crp.id == Serialization.CancelAll && + StandardMain.exchange.currentExec.exists(_.source.exists(_.channelName == name))) + ) { runningEngine.cancelAndShutdown() respondResult( @@ -659,18 +668,20 @@ final class NetworkChannel( import scala.collection.JavaConverters._ private[this] val outputBuffer = new LinkedBlockingQueue[Byte] - private[this] val flushExecutor = Executors.newSingleThreadScheduledExecutor( - r => new Thread(r, s"$name-output-buffer-timer-thread") + private[this] val flushExecutor = Executors.newSingleThreadScheduledExecutor(r => + new Thread(r, s"$name-output-buffer-timer-thread") ) - private[this] def forceFlush() = { + + private[this] def forceFlush(): Unit = Util.ignoreResult(flushExecutor.shutdownNow()) doFlush() - } - private[this] def doFlush()() = { + + private[this] def doFlush() = { val list = new java.util.ArrayList[Byte] outputBuffer.synchronized(outputBuffer.drainTo(list)) if (!list.isEmpty) jsonRpcNotify(Serialization.systemOut, list.asScala.toSeq) } + private[this] lazy val outputStream: OutputStream with AutoCloseable = new OutputStream with AutoCloseable { /* @@ -769,17 +780,21 @@ final class NetworkChannel( try { blockedThreads.synchronized(blockedThreads.add(t)) f - } catch { case _: InterruptedException => default } finally { + } catch { case _: InterruptedException => default } + finally { Util.ignoreResult(blockedThreads.synchronized(blockedThreads.remove(t))) } } def getProperty[T](f: TerminalPropertiesResponse => T, default: T): Option[T] = { if (closed.get || !isAttached) None else - withThread({ - getProperties(true); - Some(f(Option(properties.get).getOrElse(empty))) - }, None) + withThread( + { + getProperties(true); + Some(f(Option(properties.get).getOrElse(empty))) + }, + None + ) } private[this] def waitForPending(f: TerminalPropertiesResponse => Boolean): Boolean = { if (closed.get || !isAttached) false @@ -871,8 +886,9 @@ final class NetworkChannel( override private[sbt] def getSizeImpl: (Int, Int) = if (!closed.get) { val queue = VirtualTerminal.getTerminalSize(name, jsonRpcRequest) - val res = try queue.take - catch { case _: InterruptedException => TerminalGetSizeResponse(1, 1) } + val res = + try queue.take + catch { case _: InterruptedException => TerminalGetSizeResponse(1, 1) } (res.width, res.height) } else (1, 1) override def setSize(width: Int, height: Int): Unit = diff --git a/main/src/test/scala/Delegates.scala b/main/src/test/scala/Delegates.scala index 2d317e8c2..2bdb03df5 100644 --- a/main/src/test/scala/Delegates.scala +++ b/main/src/test/scala/Delegates.scala @@ -17,22 +17,34 @@ object Delegates extends Properties { override def tests: List[Test] = List( - property("generate non-empty configs", cGen.forAll.map { c => - assert(c.nonEmpty) - }), - property("generate non-empty tasks", tGen.forAll.map { t => - assert(t.nonEmpty) - }), - property("no duplicate scopes", keysGen.forAll.map { keys => - allDelegates(keys) { (_, ds) => - ds.distinct.size ==== ds.size + property( + "generate non-empty configs", + cGen.forAll.map { c => + assert(c.nonEmpty) } - }), - property("delegates non-empty", keysGen.forAll.map { keys => - allDelegates(keys) { (_, ds) => - assert(ds.nonEmpty) + ), + property( + "generate non-empty tasks", + tGen.forAll.map { t => + assert(t.nonEmpty) } - }), + ), + property( + "no duplicate scopes", + keysGen.forAll.map { keys => + allDelegates(keys) { (_, ds) => + ds.distinct.size ==== ds.size + } + } + ), + property( + "delegates non-empty", + keysGen.forAll.map { keys => + allDelegates(keys) { (_, ds) => + assert(ds.nonEmpty) + } + } + ), property("An initially Zero axis is Zero in all delegates", allAxes(alwaysZero)), property( "Projects precede builds precede Zero", @@ -49,71 +61,73 @@ object Delegates extends Properties { ), property( "Initial scope present with all combinations of Global axes", - allAxes( - (s, ds, _) => globalCombinations(s, ds) - ) + allAxes((s, ds, _) => globalCombinations(s, ds)) ), - property("initial scope first", keysGen.forAll.map { keys => - allDelegates(keys) { (scope, ds) => - ds.head ==== scope + property( + "initial scope first", + keysGen.forAll.map { keys => + allDelegates(keys) { (scope, ds) => + ds.head ==== scope + } } - }), - property("global scope last", keysGen.forAll.map { keys => - allDelegates(keys) { (_, ds) => - ds.last ==== Scope.GlobalScope + ), + property( + "global scope last", + keysGen.forAll.map { keys => + allDelegates(keys) { (_, ds) => + ds.last ==== Scope.GlobalScope + } } - }), + ), property( "Project axis delegates to BuildRef then Zero", keysGen.forAll.map { keys => - allDelegates(keys) { - (key, ds) => - key.project match { - case Zero => success // filtering out of testing - case Select(rr: ResolvedReference) => - rr match { - case BuildRef(_) => - assert(ds.indexOf(key) < ds.indexOf(key.copy(project = Zero))) - case ProjectRef(uri, _) => - val buildScoped = key.copy(project = Select(BuildRef(uri))) - val idxKey = ds.indexOf(key) - val idxB = ds.indexOf(buildScoped) - val z = key.copy(project = Zero) - val idxZ = ds.indexOf(z) - (z ==== Scope.GlobalScope) - .or( - assert((idxKey < idxB) && (idxB < idxZ)) - .log(s"idxKey = $idxKey; idxB = $idxB; idxZ = $idxZ") - ) - } - case Select(_) | This => - failure.log(s"Scope's reference should be resolved, but was ${key.project}") - } + allDelegates(keys) { (key, ds) => + key.project match { + case Zero => success // filtering out of testing + case Select(rr: ResolvedReference) => + rr match { + case BuildRef(_) => + assert(ds.indexOf(key) < ds.indexOf(key.copy(project = Zero))) + case ProjectRef(uri, _) => + val buildScoped = key.copy(project = Select(BuildRef(uri))) + val idxKey = ds.indexOf(key) + val idxB = ds.indexOf(buildScoped) + val z = key.copy(project = Zero) + val idxZ = ds.indexOf(z) + (z ==== Scope.GlobalScope) + .or( + assert((idxKey < idxB) && (idxB < idxZ)) + .log(s"idxKey = $idxKey; idxB = $idxB; idxZ = $idxZ") + ) + } + case Select(_) | This => + failure.log(s"Scope's reference should be resolved, but was ${key.project}") + } } } ), property( "Config axis delegates to parent configuration", keysGen.forAll.map { keys => - allDelegates(keys) { - (key, ds) => - key.config match { - case Zero => success - case Select(config) => - key.project match { - case Select(p @ ProjectRef(_, _)) => - val r = keys.env.resolve(p) - keys.env.inheritConfig(r, config).headOption.fold(success) { parent => - val idxKey = ds.indexOf(key) - val a = key.copy(config = Select(parent)) - val idxA = ds.indexOf(a) - assert(idxKey < idxA) - .log(s"idxKey = $idxKey; a = $a; idxA = $idxA") - } - case _ => success - } - case _ => success - } + allDelegates(keys) { (key, ds) => + key.config match { + case Zero => success + case Select(config) => + key.project match { + case Select(p @ ProjectRef(_, _)) => + val r = keys.env.resolve(p) + keys.env.inheritConfig(r, config).headOption.fold(success) { parent => + val idxKey = ds.indexOf(key) + val a = key.copy(config = Select(parent)) + val idxA = ds.indexOf(a) + assert(idxKey < idxA) + .log(s"idxKey = $idxKey; a = $a; idxA = $idxA") + } + case _ => success + } + case _ => success + } } } ) diff --git a/main/src/test/scala/ParseKey.scala b/main/src/test/scala/ParseKey.scala index d5405a0da..e36600378 100644 --- a/main/src/test/scala/ParseKey.scala +++ b/main/src/test/scala/ParseKey.scala @@ -62,12 +62,11 @@ object ParseKey extends Properties { val mask = if (showZeroConfig) skm.mask.copy(project = true) else skm.mask val expected = resolve(structure, key, mask) - parseCheck(structure, key, mask, showZeroConfig)( - sk => - hedgehog.Result - .assert(Project.equal(sk, expected, mask)) - .log(s"$sk.key == $expected.key: ${sk.key == expected.key}") - .log(s"${sk.scope} == ${expected.scope}: ${Scope.equal(sk.scope, expected.scope, mask)}") + parseCheck(structure, key, mask, showZeroConfig)(sk => + hedgehog.Result + .assert(Project.equal(sk, expected, mask)) + .log(s"$sk.key == $expected.key: ${sk.key == expected.key}") + .log(s"${sk.scope} == ${expected.scope}: ${Scope.equal(sk.scope, expected.scope, mask)}") ).log(s"Expected: ${displayFull(expected)}") } @@ -77,11 +76,10 @@ object ParseKey extends Properties { // skip when config axis is set to Zero val hasZeroConfig = key.scope.config ==== Zero val showZeroConfig = hasAmbiguousLowercaseAxes(key, structure) - parseCheck(structure, key, mask, showZeroConfig)( - sk => - (hasZeroConfig or sk.scope.project ==== Select(structure.current)) - .log(s"parsed subproject: ${sk.scope.project}") - .log(s"current subproject: ${structure.current}") + parseCheck(structure, key, mask, showZeroConfig)(sk => + (hasZeroConfig or sk.scope.project ==== Select(structure.current)) + .log(s"parsed subproject: ${sk.scope.project}") + .log(s"current subproject: ${structure.current}") ) } @@ -96,8 +94,8 @@ object ParseKey extends Properties { val mask = ScopeMask(config = false) val resolvedConfig = Resolve.resolveConfig(structure.extra, key.key, mask)(key.scope).config val showZeroConfig = hasAmbiguousLowercaseAxes(key, structure) - parseCheck(structure, key, mask, showZeroConfig)( - sk => (sk.scope.config ==== resolvedConfig) or (sk.scope ==== Scope.GlobalScope) + parseCheck(structure, key, mask, showZeroConfig)(sk => + (sk.scope.config ==== resolvedConfig) or (sk.scope ==== Scope.GlobalScope) ).log(s"Expected configuration: ${resolvedConfig map (_.name)}") } @@ -138,8 +136,8 @@ object ParseKey extends Properties { import skm._ val resolvedKey = resolve(structure, key, mask) val proj = resolvedKey.scope.project.toOption - val maybeResolvedProj = proj.collect { - case ref: ResolvedReference => ref + val maybeResolvedProj = proj.collect { case ref: ResolvedReference => + ref } val checkName = for { configKey <- resolvedKey.scope.config.toOption @@ -175,7 +173,7 @@ object ParseKey extends Properties { .log(s"Key string: '$s'") .log(s"Parsed: ${parsed.map(displayFull)}") .log(s"Structure: $structure") - ) + ) } // pickN is a function that randomly picks load % items from the "from" sequence. diff --git a/main/src/test/scala/ParserSpec.scala b/main/src/test/scala/ParserSpec.scala index 5022fa9d3..cce247cd3 100644 --- a/main/src/test/scala/ParserSpec.scala +++ b/main/src/test/scala/ParserSpec.scala @@ -21,19 +21,30 @@ import hedgehog.runner._ object ParserSpec extends Properties { override def tests: List[Test] = List( - property("can parse any build", TestBuild.uriGen.forAll.map { uri => - parse(buildURI = uri) - }), - property("can parse any project", TestBuild.nonEmptyId.forAll.map { id => - parse(projectID = id) - }), - property("can parse any configuration", TestBuild.nonEmptyId.map(_.capitalize).forAll.map { - name => + property( + "can parse any build", + TestBuild.uriGen.forAll.map { uri => + parse(buildURI = uri) + } + ), + property( + "can parse any project", + TestBuild.nonEmptyId.forAll.map { id => + parse(projectID = id) + } + ), + property( + "can parse any configuration", + TestBuild.nonEmptyId.map(_.capitalize).forAll.map { name => parse(configName = name) - }), - property("can parse any attribute", TestBuild.kebabIdGen.forAll.map { name => - parse(attributeName = name) - }) + } + ), + property( + "can parse any attribute", + TestBuild.kebabIdGen.forAll.map { name => + parse(attributeName = name) + } + ) ) private def parse( diff --git a/main/src/test/scala/ProjectMacro.scala b/main/src/test/scala/ProjectMacro.scala index d66146688..9e598aa47 100644 --- a/main/src/test/scala/ProjectMacro.scala +++ b/main/src/test/scala/ProjectMacro.scala @@ -67,6 +67,6 @@ object ProjectMacro extends Properties("ProjectMacro") { s"Actual id: ${p.id}" |: s"Actual dir: ${p.base}" |: (p.id == id) && - (p.base.getName == dir) + (p.base.getName == dir) } } diff --git a/main/src/test/scala/sbt/internal/TestBuild.scala b/main/src/test/scala/sbt/internal/TestBuild.scala index b9290240e..6617cd8a6 100644 --- a/main/src/test/scala/sbt/internal/TestBuild.scala +++ b/main/src/test/scala/sbt/internal/TestBuild.scala @@ -111,8 +111,7 @@ abstract class TestBuild { def makeKey(task: ScopeAxis[AttributeKey[_]]) = ScopedKey(skey.scope.copy(task = task), skey.key) val hasGlobal = tasks(Zero) - if (hasGlobal) - zero += skey + if (hasGlobal) zero += skey else { val keys = tasks map makeKey keys.size match { @@ -305,31 +304,34 @@ abstract class TestBuild { ): Gen[Vector[Proj]] = genAcyclic(maxDeps, genID, count) { (id: String) => for (cs <- confs) yield { (deps: Seq[Proj]) => - new Proj(id, deps.map { dep => - ProjectRef(build, dep.id) - }, cs) + new Proj( + id, + deps.map { dep => + ProjectRef(build, dep.id) + }, + cs + ) } } - def genConfigs( - implicit genName: Gen[String], + def genConfigs(implicit + genName: Gen[String], maxDeps: Range[Int], count: Range[Int] ): Gen[Vector[Configuration]] = - genAcyclicDirect[Configuration, String](maxDeps, genName, count)( - (key, deps) => - Configuration - .of(key.capitalize, key) - .withExtendsConfigs(deps.toVector) + genAcyclicDirect[Configuration, String](maxDeps, genName, count)((key, deps) => + Configuration + .of(key.capitalize, key) + .withExtendsConfigs(deps.toVector) ) - def genTasks( - implicit genName: Gen[String], + def genTasks(implicit + genName: Gen[String], maxDeps: Range[Int], count: Range[Int] ): Gen[Vector[Taskk]] = - genAcyclicDirect[Taskk, String](maxDeps, genName, count)( - (key, deps) => new Taskk(AttributeKey[String](key), deps) + genAcyclicDirect[Taskk, String](maxDeps, genName, count)((key, deps) => + new Taskk(AttributeKey[String](key), deps) ) def genAcyclicDirect[A, T](maxDeps: Range[Int], keyGen: Gen[T], max: Range[Int])( diff --git a/main/src/test/scala/sbt/internal/server/DefinitionTest.scala b/main/src/test/scala/sbt/internal/server/DefinitionTest.scala index f2c477e85..fca002b04 100644 --- a/main/src/test/scala/sbt/internal/server/DefinitionTest.scala +++ b/main/src/test/scala/sbt/internal/server/DefinitionTest.scala @@ -34,7 +34,9 @@ object DefinitionTest extends verify.BasicTestSuite { ) } - test("it should find valid standard short scala identifier when caret is set at the start of it") { + test( + "it should find valid standard short scala identifier when caret is set at the start of it" + ) { assert(textProcessor.identifier("val a = 0", 4) == Some("a")) } diff --git a/main/src/test/scala/testpkg/CompletionSpec.scala b/main/src/test/scala/testpkg/CompletionSpec.scala index 686001859..b530d5af6 100644 --- a/main/src/test/scala/testpkg/CompletionSpec.scala +++ b/main/src/test/scala/testpkg/CompletionSpec.scala @@ -22,12 +22,18 @@ import _root_.sbt.internal.util.complete.Parser object CompletionSpec extends Properties { override def tests: List[Test] = List( - property("can complete any build", TestBuild.uriGen.forAll.map { uri => - complete(buildURI = uri, line = "{", expected = "{" + uri.toString + "}") - }), - property("can complete any project", TestBuild.nonEmptyId.forAll.map { id => - complete(projectID = id, line = id.head.toString, expected = id) - }), + property( + "can complete any build", + TestBuild.uriGen.forAll.map { uri => + complete(buildURI = uri, line = "{", expected = "{" + uri.toString + "}") + } + ), + property( + "can complete any project", + TestBuild.nonEmptyId.forAll.map { id => + complete(projectID = id, line = id.head.toString, expected = id) + } + ), property( "can complete any configuration", TestBuild.nonEmptyId.forAll.map { name => diff --git a/project/Dependencies.scala b/project/Dependencies.scala index 15970e90e..03be65eda 100644 --- a/project/Dependencies.scala +++ b/project/Dependencies.scala @@ -79,7 +79,8 @@ object Dependencies { def addSbtZincCompile = addSbtModule(sbtZincPath, "zincCompile", zincCompile) def addSbtZincCompileCore = addSbtModule(sbtZincPath, "zincCompileCore", zincCompileCore) - val lmCoursierShaded = "io.get-coursier" %% "lm-coursier-shaded" % "2.0.13" + // val lmCoursierShaded = "io.get-coursier" %% "lm-coursier-shaded" % "2.0.10" + val lmCoursierShaded = "org.scala-sbt" %% "librarymanagement-coursier" % "2.0.0-alpha1" lazy val sjsonNewVersion = "0.10.0" def sjsonNew(n: String) = Def.setting( @@ -89,7 +90,8 @@ object Dependencies { val sjsonNewMurmurhash = sjsonNew("sjson-new-murmurhash") val sjsonNewCore = sjsonNew("sjson-new-core") - val eval = ("com.eed3si9n.eval" % "eval" % "0.1.0").cross(CrossVersion.full) + // val eval = ("com.eed3si9n.eval" % "eval" % "0.1.0").cross(CrossVersion.full) + val eval = "com.eed3si9n.eval" % "eval_3.1.1" % "0.1.0" // JLine 3 version must be coordinated together with JAnsi version // and the JLine 2 fork version, which uses the same JAnsi @@ -108,6 +110,8 @@ object Dependencies { val scalaVerify = "com.eed3si9n.verify" %% "verify" % "1.0.0" val templateResolverApi = "org.scala-sbt" % "template-resolver" % "0.1" + val scalaCompiler = "org.scala-lang" %% "scala3-compiler" % scala3 + val scalaXml = Def.setting( if (scalaBinaryVersion.value == "3") { "org.scala-lang.modules" %% "scala-xml" % "2.1.0" diff --git a/tasks/src/main/scala/sbt/CompletionService.scala b/tasks/src/main/scala/sbt/CompletionService.scala index 352a21d05..672edd725 100644 --- a/tasks/src/main/scala/sbt/CompletionService.scala +++ b/tasks/src/main/scala/sbt/CompletionService.scala @@ -7,7 +7,7 @@ package sbt -trait CompletionService[A, R] { +trait CompletionService[A, R]: /** * Submits a work node A with work that returns R. In Execute this is used for tasks returning @@ -20,7 +20,7 @@ trait CompletionService[A, R] { * In Execute this is used for tasks returning sbt.Completed. */ def take(): R -} +end CompletionService import java.util.concurrent.atomic.AtomicInteger import java.util.concurrent.{ diff --git a/tasks/src/main/scala/sbt/Execute.scala b/tasks/src/main/scala/sbt/Execute.scala index 56c7acb20..8bec9ba3f 100644 --- a/tasks/src/main/scala/sbt/Execute.scala +++ b/tasks/src/main/scala/sbt/Execute.scala @@ -59,7 +59,7 @@ private[sbt] final class Execute[F[_] <: AnyRef]( config: Config, triggers: Triggers[F], progress: ExecuteProgress[F] -)(implicit view: NodeView[F]) { +)(using view: NodeView[F]) { type Strategy = CompletionService[F[Any], Completed] private[this] val forward = idMap[F[Any], IDSet[F[Any]]] @@ -85,12 +85,12 @@ private[sbt] final class Execute[F[_] <: AnyRef]( def dump: String = "State: " + state.toString + "\n\nResults: " + results + "\n\nCalls: " + callers + "\n\n" - def run[A](root: F[A])(implicit strategy: Strategy): Result[A] = + def run[A](root: F[A])(using strategy: Strategy): Result[A] = try { - runKeep(root)(strategy)(root) + runKeep(root)(root) } catch { case i: Incomplete => Result.Inc(i) } - def runKeep[A](root: F[A])(implicit strategy: Strategy): RMap[F, Result] = { + def runKeep[A](root: F[A])(using strategy: Strategy): RMap[F, Result] = { assert(state.isEmpty, "Execute already running/ran.") addNew(root) @@ -102,7 +102,7 @@ private[sbt] final class Execute[F[_] <: AnyRef]( finalResults } - def processAll()(implicit strategy: Strategy): Unit = { + def processAll()(using strategy: Strategy): Unit = { @tailrec def next(): Unit = { pre { assert(reverse.nonEmpty, "Nothing to process.") @@ -135,7 +135,7 @@ private[sbt] final class Execute[F[_] <: AnyRef]( } def dumpCalling: String = state.filter(_._2 == Calling).mkString("\n\t") - def call[A](node: F[A], target: F[A])(implicit strategy: Strategy): Unit = { + def call[A](node: F[A], target: F[A])(using strategy: Strategy): Unit = { if (config.checkCycles) cycleCheck(node, target) pre { assert(running(node)) @@ -160,7 +160,7 @@ private[sbt] final class Execute[F[_] <: AnyRef]( } } - def retire[A](node: F[A], result: Result[A])(implicit strategy: Strategy): Unit = { + def retire[A](node: F[A], result: Result[A])(using strategy: Strategy): Unit = { pre { assert(running(node) | calling(node)) readyInv(node) @@ -194,7 +194,7 @@ private[sbt] final class Execute[F[_] <: AnyRef]( case Result.Inc(i) => Result.Inc(Incomplete(Some(node), tpe = i.tpe, causes = i :: Nil)) } - def notifyDone[A](node: F[A], dependent: F[Any])(implicit strategy: Strategy): Unit = { + def notifyDone[A](node: F[A], dependent: F[Any])(using strategy: Strategy): Unit = { val f = forward(dependent) f -= node.asInstanceOf if (f.isEmpty) { @@ -208,7 +208,7 @@ private[sbt] final class Execute[F[_] <: AnyRef]( * inputs and dependencies have completed. Its computation is then evaluated and made available * for nodes that have it as an input. */ - def addChecked[A](node: F[A])(implicit strategy: Strategy): Unit = { + def addChecked[A](node: F[A])(using strategy: Strategy): Unit = { if (!added(node)) addNew(node) post { addedInv(node) } @@ -219,7 +219,7 @@ private[sbt] final class Execute[F[_] <: AnyRef]( * have finished, the node's computation is scheduled to run. The node's dependencies will be * added (transitively) if they are not already registered. */ - def addNew[A](node: F[A])(implicit strategy: Strategy): Unit = { + def addNew[A](node: F[A])(using strategy: Strategy): Unit = { pre { newPre(node) } val v = register(node) @@ -253,7 +253,7 @@ private[sbt] final class Execute[F[_] <: AnyRef]( * Called when a pending 'node' becomes runnable. All of its dependencies must be done. This * schedules the node's computation with 'strategy'. */ - def ready[A](node: F[A])(implicit strategy: Strategy): Unit = { + def ready[A](node: F[A])(using strategy: Strategy): Unit = { pre { assert(pending(node)) readyInv(node) @@ -279,7 +279,7 @@ private[sbt] final class Execute[F[_] <: AnyRef]( } /** Send the work for this node to the provided Strategy. */ - def submit[A](node: F[A])(implicit strategy: Strategy): Unit = { + def submit[A](node: F[A])(using strategy: Strategy): Unit = { val v = viewCache(node) val rs = v.alist.transform[F, Result](v.in)(getResult) // v.alist.transform(v.in)(getResult) @@ -290,7 +290,7 @@ private[sbt] final class Execute[F[_] <: AnyRef]( * Evaluates the computation 'f' for 'node'. This returns a Completed instance, which contains the * post-processing to perform after the result is retrieved from the Strategy. */ - def work[A](node: F[A], f: => Either[F[A], A])(implicit strategy: Strategy): Completed = { + def work[A](node: F[A], f: => Either[F[A], A])(using strategy: Strategy): Completed = { progress.beforeWork(node.asInstanceOf) val rawResult = wideConvert(f).left.map { case i: Incomplete => if (config.overwriteNode(i)) i.copy(node = Some(node)) else i diff --git a/util-collection/src/main/scala/sbt/internal/util/Settings.scala b/util-collection/src/main/scala/sbt/internal/util/Settings.scala index 9d6e7b0d7..fdc627571 100644 --- a/util-collection/src/main/scala/sbt/internal/util/Settings.scala +++ b/util-collection/src/main/scala/sbt/internal/util/Settings.scala @@ -72,12 +72,12 @@ trait Init[ScopeType]: type SettingSeq[A] = Seq[Setting[A]] type ScopedMap = IMap[ScopedKey, SettingSeq] type CompiledMap = Map[ScopedKey[_], Compiled[_]] - type MapScoped = ScopedKey ~> ScopedKey + type MapScoped = [a] => ScopedKey[a] => ScopedKey[a] type ValidatedRef[A] = Either[Undefined, ScopedKey[A]] type ValidatedInit[A] = Either[Seq[Undefined], Initialize[A]] - type ValidateRef = ScopedKey ~> ValidatedRef + type ValidateRef = [a] => ScopedKey[a] => ValidatedRef[a] type ScopeLocal = ScopedKey[_] => Seq[Setting[_]] - type MapConstant = ScopedKey ~> Option + type MapConstant = [a] => ScopedKey[a] => Option[a] private[sbt] abstract class ValidateKeyRef { def apply[T](key: ScopedKey[T], selfRefOk: Boolean): ValidatedRef[T] @@ -174,11 +174,10 @@ trait Init[ScopeType]: def getValue[T](s: Settings[ScopeType], k: ScopedKey[T]) = s.get(k.scope, k.key) getOrElse (throw new InvalidReference(k)) - def asFunction[T](s: Settings[ScopeType]): ScopedKey[T] => T = k => getValue(s, k) + def asFunction[A](s: Settings[ScopeType]): ScopedKey[A] => A = k => getValue(s, k) - def mapScope(f: ScopeType => ScopeType): MapScoped = new MapScoped { - def apply[T](k: ScopedKey[T]): ScopedKey[T] = k.copy(scope = f(k.scope)) - } + def mapScope(f: ScopeType => ScopeType): MapScoped = + [a] => (k: ScopedKey[a]) => k.copy(scope = f(k.scope)) private[this] def applyDefaults(ss: Seq[Setting[_]]): Seq[Setting[_]] = { val result = new java.util.LinkedHashSet[Setting[_]]