Merge branch 'develop' into fix/8429-actioncache-symlink-optimization

This commit is contained in:
tellorian 2026-01-11 09:39:49 +09:00 committed by GitHub
commit d63191bb16
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
11 changed files with 147 additions and 57 deletions

View File

@ -439,6 +439,7 @@ lazy val workerProj = (project in file("worker"))
mimaBinaryIssueFilters ++= Vector(
exclude[MissingClassProblem]("com.google.gson.typeadapters.RuntimeTypeAdapterFactory"),
exclude[IncompatibleResultTypeProblem]("sbt.internal.worker1.WorkerMain.mkGson"),
exclude[DirectMissingMethodProblem]("sbt.internal.worker1.TestInfo.this"),
),
)
.configure(addSbtIOForTest)
@ -716,6 +717,7 @@ lazy val mainProj = (project in file("main"))
mimaSettings,
mimaBinaryIssueFilters ++= Vector(
exclude[ReversedMissingMethodProblem]("sbt.ProjectMatrix.*"),
exclude[DirectMissingMethodProblem]("sbt.internal.ConsoleProject.*"),
),
)
.dependsOn(lmCore, lmIvy, lmCoursierShadedPublishing)

View File

@ -33,6 +33,9 @@ object ScriptedRunnerImpl {
case ScriptedTest(group, name) =>
runner.scriptedTest(group, name, logger, context)
}
if (tests.nonEmpty && allTests.isEmpty) {
sys.error(s"No tests found matching: ${tests.mkString(", ")}")
}
runAll(allTests)
}
def runAll(tests: Seq[() => Option[String]]): Unit = {

View File

@ -45,6 +45,7 @@ private[sbt] object ForkTests:
converter: FileConverter,
fork: ForkOptions,
log: Logger,
parallelism: Option[Int],
tags: (Tag, Int)*
): Task[TestOutput] = {
import std.TaskExtra.*
@ -56,9 +57,10 @@ private[sbt] object ForkTests:
if opts.tests.isEmpty then
constant(TestOutput(TestResult.Passed, Map.empty[String, SuiteResult], Iterable.empty))
else
mainTestTask(runners, opts, classpath, converter, fork, log, config.parallel).tagw(
config.tags*
)
mainTestTask(runners, opts, classpath, converter, fork, log, config.parallel, parallelism)
.tagw(
config.tags*
)
main.tagw(tags*).dependsOn(all(opts.setup)*) flatMap { results =>
all(opts.cleanup).join.map(_ => results)
}
@ -72,10 +74,11 @@ private[sbt] object ForkTests:
converter: FileConverter,
fork: ForkOptions,
log: Logger,
parallelism: Option[Int],
tags: (Tag, Int)*
): Task[TestOutput] = {
val opts = processOptions(config, tests, log)
apply(runners, opts, config, classpath, converter, fork, log, tags*)
apply(runners, opts, config, classpath, converter, fork, log, parallelism, tags*)
}
def apply(
@ -86,9 +89,10 @@ private[sbt] object ForkTests:
converter: FileConverter,
fork: ForkOptions,
log: Logger,
parallelism: Option[Int],
tag: Tag
): Task[TestOutput] = {
apply(runners, tests, config, classpath, converter, fork, log, tag -> 1)
apply(runners, tests, config, classpath, converter, fork, log, parallelism, tag -> 1)
}
private def mainTestTask(
@ -98,7 +102,8 @@ private[sbt] object ForkTests:
converter: FileConverter,
fork: ForkOptions,
log: Logger,
parallel: Boolean
parallel: Boolean,
parallelism: Option[Int]
): Task[TestOutput] =
std.TaskExtra.task {
val testListeners = opts.testListeners.flatMap:
@ -148,6 +153,7 @@ private[sbt] object ForkTests:
null,
UTerminal.isAnsiSupported,
parallel,
parallelism.map(Integer.valueOf).orNull,
ArrayList(taskdefs.asJava),
ArrayList(testRunners.asJava),
)

View File

@ -201,6 +201,7 @@ object Defaults extends BuildCommon {
javaHomes :== ListMap.empty,
fullJavaHomes := CrossJava.expandJavaHomes(discoveredJavaHomes.value ++ javaHomes.value),
testForkedParallel :== true,
testForkedParallelism :== None,
javaOptions :== Nil,
sbtPlugin :== false,
isMetaBuild :== false,
@ -773,6 +774,27 @@ object Defaults extends BuildCommon {
javacOptions :== Nil,
scalacOptions :== Nil,
scalaVersion := appConfiguration.value.provider.scalaProvider.version,
consoleProject := ConsoleProject.consoleProjectTask.value,
consoleProject / scalaInstance := {
val topLoader = classOf[org.jline.terminal.Terminal].getClassLoader
val scalaProvider = appConfiguration.value.provider.scalaProvider
val allJars = scalaProvider.jars
val libraryJars = allJars.filter { jar =>
jar.getName == "scala-library.jar" || jar.getName.startsWith("scala3-library_3")
}
val compilerJar = allJars.filter { jar =>
jar.getName == "scala-compiler.jar" || jar.getName.startsWith("scala3-compiler_3")
}
ScalaInstance(scalaProvider.version, scalaProvider.launcher)
Compiler.makeScalaInstance(
scalaProvider.version,
libraryJars,
allJars.toSeq,
Seq.empty,
state.value,
topLoader,
)
},
derive(crossScalaVersions := Seq(scalaVersion.value)),
derive(compilersSetting),
derive(scalaBinaryVersion := binaryScalaVersion(scalaVersion.value)),
@ -1066,7 +1088,6 @@ object Defaults extends BuildCommon {
cleanKeepGlobs ++= historyPath.value.map(_.toGlob).toVector,
// clean := Def.taskDyn(Clean.task(resolvedScoped.value.scope, full = true)).value,
clean := Clean.scopedTask.value,
consoleProject := consoleProjectTask.value,
transitiveDynamicInputs := Def.uncached(WatchTransitiveDependencies.task.value),
)
@ -1123,8 +1144,9 @@ object Defaults extends BuildCommon {
.value,
testQuick / testFilter := Def.uncached(IncrementalTest.filterTask.value),
extraTestDigests ++= IncrementalTest.extraTestDigestsTask.value,
executeTests := Def.uncached({
executeTests := Def.uncached(Def.taskDyn {
import sbt.TupleSyntax.*
val fpm = testForkedParallelism.value
(
test / streams,
loadedTestFrameworks,
@ -1132,25 +1154,13 @@ object Defaults extends BuildCommon {
(test / testGrouping),
(test / testExecution),
(test / fullClasspath),
testForkedParallel,
testForkedParallel.toTaskable,
(test / javaOptions),
(classLoaderLayeringStrategy),
thisProject,
fileConverter,
).flatMapN { (s, lt, tl, gp, ex, cp, fp, jo, clls, thisProj, c) =>
allTestGroupsTask(
s,
lt,
tl,
gp,
ex,
cp,
fp,
jo,
clls,
projectId = s"${thisProj.id} / ",
c,
)
allTestGroupsTask(s, lt, tl, gp, ex, cp, fp, fpm, jo, clls, s"${thisProj.id} / ", c)
}
}.value),
// ((streams in test, loadedTestFrameworks, testLoader, testGrouping in test, testExecution in test, fullClasspath in test, javaHome in test, testForkedParallel, javaOptions in test) flatMap allTestGroupsTask).value,
@ -1318,6 +1328,7 @@ object Defaults extends BuildCommon {
newConfig,
fullClasspath.value,
testForkedParallel.value,
testForkedParallelism.value,
javaOptions.value,
classLoaderLayeringStrategy.value,
projectId = s"${thisProject.value.id} / ",
@ -1367,6 +1378,7 @@ object Defaults extends BuildCommon {
config,
cp,
forkedParallelExecution = false,
forkedParallelism = None,
javaOptions = Nil,
strategy = ClassLoaderLayeringStrategy.ScalaLibrary,
projectId = "",
@ -1392,6 +1404,7 @@ object Defaults extends BuildCommon {
config,
cp,
forkedParallelExecution,
forkedParallelism = None,
javaOptions = Nil,
strategy = ClassLoaderLayeringStrategy.ScalaLibrary,
projectId = "",
@ -1399,6 +1412,36 @@ object Defaults extends BuildCommon {
)
}
// Binary compatibility overload for sbt 2.0.0-RC7
private[sbt] def allTestGroupsTask(
s: TaskStreams,
frameworks: Map[TestFramework, Framework],
loader: ClassLoader,
groups: Seq[Tests.Group],
config: Tests.Execution,
cp: Classpath,
forkedParallelExecution: Boolean,
javaOptions: Seq[String],
strategy: ClassLoaderLayeringStrategy,
projectId: String,
converter: FileConverter,
): Task[Tests.Output] = {
allTestGroupsTask(
s,
frameworks,
loader,
groups,
config,
cp,
forkedParallelExecution,
forkedParallelism = None,
javaOptions,
strategy,
projectId,
converter,
)
}
private[sbt] def allTestGroupsTask(
s: TaskStreams,
frameworks: Map[TestFramework, Framework],
@ -1407,6 +1450,7 @@ object Defaults extends BuildCommon {
config: Tests.Execution,
cp: Classpath,
forkedParallelExecution: Boolean,
forkedParallelism: Option[Int],
javaOptions: Seq[String],
strategy: ClassLoaderLayeringStrategy,
projectId: String,
@ -1435,7 +1479,9 @@ object Defaults extends BuildCommon {
case Tests.SubProcess(opts) =>
s.log.debug(s"javaOptions: ${opts.runJVMOptions}")
val forkedConfig = config.copy(parallel = config.parallel && forkedParallelExecution)
s.log.debug(s"Forking tests - parallelism = ${forkedConfig.parallel}")
s.log.debug(
s"Forking tests - parallelism = ${forkedConfig.parallel}, threads = ${forkedParallelism.getOrElse("auto")}"
)
ForkTests(
runners,
processedOptions(group),
@ -1444,6 +1490,7 @@ object Defaults extends BuildCommon {
converter,
opts,
s.log,
forkedParallelism,
(Tags.ForkedTestGroup, 1) +: group.tags*
)
case Tests.InProcess =>
@ -2026,12 +2073,7 @@ object Defaults extends BuildCommon {
analysis.infos.allInfos.values.map(_.getMainClasses).flatten.toSeq.sorted
}
def consoleProjectTask =
Def.task {
ConsoleProject(state.value, (consoleProject / initialCommands).value)(using streams.value.log)
println()
}
def consoleProjectTask = ConsoleProject.consoleProjectTask
def consoleTask: Initialize[Task[Unit]] = consoleTask(fullClasspath, console)
def consoleQuickTask = consoleTask(externalDependencyClasspath, consoleQuick)
def consoleTask(classpath: TaskKey[Classpath], task: TaskKey[?]): Initialize[Task[Unit]] =

View File

@ -370,6 +370,7 @@ object Keys {
@transient
val testListeners = taskKey[Seq[TestReportListener]]("Defines test listeners.").withRank(DTask)
val testForkedParallel = settingKey[Boolean]("Whether forked tests should be executed in parallel").withRank(CTask)
val testForkedParallelism = settingKey[Option[Int]]("Maximum number of parallel test threads when using testForkedParallel. Defaults to the number of available processors.").withRank(CTask)
val testExecution = taskKey[Tests.Execution]("Settings controlling test execution").withRank(DTask)
val testFilter = taskKey[Seq[String] => Seq[String => Boolean]]("Filter controlling whether the test is executed").withRank(DTask)
val testResultLogger = settingKey[TestResultLogger]("Logs results after a test task completes.").withRank(DTask)

View File

@ -12,58 +12,84 @@ package internal
import sbt.ProjectExtra.extract
import sbt.internal.classpath.AlternativeZincUtil
import sbt.internal.inc.{ ScalaInstance, ZincLmUtil }
import sbt.internal.inc.classpath.ClasspathUtil
import sbt.internal.util.Terminal
import sbt.io.IO
import sbt.librarymanagement.DependencyResolution
import sbt.util.Logger
import xsbti.HashedVirtualFileRef
import xsbti.compile.ClasspathOptionsUtil
object ConsoleProject {
def apply(state: State, extra: String, cleanupCommands: String = "", options: Seq[String] = Nil)(
using log: Logger
object ConsoleProject:
def consoleProjectTask =
Def.task {
val st = Keys.state.value
val si = (Keys.consoleProject / Keys.scalaInstance).value
val dr = (LocalRootProject / Keys.dependencyResolution).value
val compilerBridgeBinaryBin =
(LocalRootProject / Keys.consoleProject / Keys.scalaCompilerBridgeBin).value
ConsoleProject(
st,
si,
dr,
compilerBridgeBinaryBin,
(Keys.consoleProject / Keys.initialCommands).value
)(using
Keys.streams.value.log
)
println()
}
def apply(
state: State,
si: ScalaInstance,
dr: DependencyResolution,
compilerBridgeBinaryBin: Seq[HashedVirtualFileRef],
extra: String,
cleanupCommands: String = "",
options: Seq[String] = Nil
)(using
log: Logger
): Unit = {
val extracted = Project.extract(state)
val cpImports = new Imports(extracted, state)
// Bindings are blocked by https://github.com/scala/scala3/issues/5069
val bindings =
("currentState" -> state) :: ("extracted" -> extracted) :: ("cpHelpers" -> cpImports) :: Nil
val unit = extracted.currentUnit
val (state1, dependencyResolution) =
extracted.runTask(Keys.dependencyResolution, state)
val (_, scalaCompilerBridgeBinaryBin) =
extracted.runTask(Keys.consoleProject / Keys.scalaCompilerBridgeBin, state1)
val tempDir0 = extracted.get(Keys.consoleProject / Keys.taskTemporaryDirectory)
val tempDir = IO.createUniqueDirectory(tempDir0).toPath()
val conv = extracted.get(Keys.fileConverter)
val scalaInstance = {
val scalaProvider = state.configuration.provider.scalaProvider
ScalaInstance(scalaProvider.version, scalaProvider)
}
val g = BuildPaths.getGlobalBase(state)
val zincDir = BuildPaths.getZincDirectory(state, g)
val app = state.configuration
val launcher = app.provider.scalaProvider.launcher
val compiler = scalaCompilerBridgeBinaryBin.toList match
val compiler = compilerBridgeBinaryBin.toList match
case jar :: xs =>
AlternativeZincUtil.scalaCompiler(
scalaInstance = scalaInstance,
scalaInstance = si,
classpathOptions = ClasspathOptionsUtil.repl,
compilerBridgeJar = conv.toPath(jar).toFile(),
classLoaderCache = state1.get(BasicKeys.classLoaderCache)
classLoaderCache = state.get(BasicKeys.classLoaderCache)
)
case Nil =>
ZincLmUtil.scalaCompiler(
scalaInstance = scalaInstance,
scalaInstance = si,
classpathOptions = ClasspathOptionsUtil.repl,
globalLock = launcher.globalLock,
componentProvider = app.provider.components,
secondaryCacheDir = Option(zincDir),
dependencyResolution = dependencyResolution,
dependencyResolution = dr,
compilerBridgeSource =
extracted.get(Keys.consoleProject / Keys.scalaCompilerBridgeSource),
scalaJarsTarget = zincDir,
classLoaderCache = state1.get(BasicKeys.classLoaderCache),
classLoaderCache = state.get(BasicKeys.classLoaderCache),
log = log
)
val imports = BuildUtil.getImports(unit.unit) ++ BuildUtil.importAll(bindings.map(_._1))
val importString = imports.mkString("", ";\n", ";\n\n")
val initCommands = importString + extra
val loader = ClasspathUtil.makeLoader(unit.classpath, si, tempDir)
val terminal = Terminal.get
// TODO - Hook up dsl classpath correctly...
(new Console(compiler))(
@ -72,7 +98,7 @@ object ConsoleProject {
initCommands,
cleanupCommands,
terminal
)(Some(unit.loader), bindings).get
)(Some(loader), bindings).get
()
}
@ -84,4 +110,4 @@ object ConsoleProject {
implicit def settingKeyEvaluate[T](s: SettingKey[T]): Evaluate[T] = new Evaluate(get(s))
}
final class Evaluate[T] private[sbt] (val eval: T)
}
end ConsoleProject

View File

@ -62,8 +62,10 @@ class GCMonitor(logger: Logger) extends GCMonitorBase with AutoCloseable {
override protected def emitWarning(total: Long, over: Option[Long]): Unit = {
val totalSeconds = total / 1000.0
val amountMsg = over.fold(s"$totalSeconds seconds") { d =>
"In the last " + (d / 1000.0).ceil.toInt + f" seconds, $totalSeconds (${total.toDouble / d * 100}%.1f%%)"
val amountMsg = over.fold(f"$totalSeconds%.3f CPU seconds") { d =>
val dSeconds = (d / 1000.0).ceil.toInt
val percentage = total.toDouble / d * 100
f"In the last $dSeconds seconds, $totalSeconds%.3f CPU seconds ($percentage%.1f%%) of GC pause"
}
val msg = s"$amountMsg were spent in GC. " +
s"[Heap: ${gbString(runtime.freeMemory())} free " +

View File

@ -104,7 +104,7 @@ object Scripted {
launcherJar: File,
logger: Logger
): Unit = {
logger.info(s"About to run tests: ${args.mkString("\n * ", "\n * ", "\n")}")
logger.info(s"Tests selected: ${args.mkString("\n * ", "\n * ", "\n")}")
logger.info("")
// Force Log4J to not use a thread context classloader otherwise it throws a CCE

View File

@ -589,6 +589,10 @@ class ScriptedRunner {
val groupCount = if (parallelExecution) instances else Int.MaxValue
val scriptedRunners =
runner.batchScriptedRunner(scriptedTests, addTestFile, groupCount, prop, logger)
// Fail if user provided test patterns but none matched any existing test directories
if (tests.nonEmpty && scriptedRunners.isEmpty) {
sys.error(s"No tests found matching: ${tests.mkString(", ")}")
}
if (parallelExecution && instances > 1) {
import scala.collection.parallel.CollectionConverters.*
val parallelRunners = scriptedRunners.toArray.par

View File

@ -311,12 +311,13 @@ public class ForkTestMain {
this.originalOut.flush();
}
private ExecutorService executorService(final boolean parallel) {
private ExecutorService executorService(final boolean parallel, final Integer parallelism) {
if (parallel) {
final int nbThreads = Runtime.getRuntime().availableProcessors();
final int nbThreads =
(parallelism != null && parallelism > 0)
? parallelism
: Runtime.getRuntime().availableProcessors();
logDebug("Create a test executor with a thread pool of " + nbThreads + " threads.");
// more options later...
// TODO we might want to configure the blocking queue with size #proc
return Executors.newFixedThreadPool(nbThreads);
} else {
logDebug("Create a single-thread test executor");
@ -326,7 +327,7 @@ public class ForkTestMain {
private void runTests(TestInfo info, ClassLoader classLoader) throws Exception {
Thread.currentThread().setContextClassLoader(classLoader);
final ExecutorService executor = executorService(info.parallel);
final ExecutorService executor = executorService(info.parallel, info.parallelism);
final TaskDef[] tests = info.taskDefs.toArray(new TaskDef[] {});
final int nFrameworks = info.testRunners.size();
final Logger[] loggers = {remoteLogger(info.ansiCodesSupported)};

View File

@ -33,6 +33,7 @@ public class TestInfo implements Serializable {
public final RunInfo.NativeRunInfo nativeRunInfo;
public final boolean ansiCodesSupported;
public final boolean parallel;
public final Integer parallelism;
public final ArrayList<TaskDef> taskDefs;
public final ArrayList<TestRunner> testRunners;
@ -42,6 +43,7 @@ public class TestInfo implements Serializable {
RunInfo.NativeRunInfo nativeRunInfo,
boolean ansiCodesSupported,
boolean parallel,
Integer parallelism,
ArrayList<TaskDef> taskDefs,
ArrayList<TestRunner> testRunners) {
this.jvm = jvm;
@ -49,6 +51,7 @@ public class TestInfo implements Serializable {
this.nativeRunInfo = nativeRunInfo;
this.ansiCodesSupported = ansiCodesSupported;
this.parallel = parallel;
this.parallelism = parallelism;
this.taskDefs = taskDefs;
this.testRunners = testRunners;
}