From 061145e67b4cc731f87df4d5ada7c94d3f672722 Mon Sep 17 00:00:00 2001 From: MkDev11 Date: Fri, 9 Jan 2026 09:43:50 -0800 Subject: [PATCH 1/5] [2.x] Add testForkedParallelism setting for forked test thread count (#8453) **Problems** When running forked tests, sbt uses `Runtime.getRuntime().availableProcessors()` to determine the thread pool size, ignoring `concurrentRestrictions`. This is inconsistent with non-forked parallel tests. **Expectations** Users should be able to control the number of parallel test threads in forked mode, similar to how `concurrentRestrictions` works for non-forked tests. **Notes** Added a new setting `testForkedParallelism` that allows explicit control: ```scala testForkedParallelism := Some(2) // Use 2 threads testForkedParallelism := None // Use availableProcessors() (default) ``` --- build.sbt | 1 + .../src/main/scala/sbt/ForkTests.scala | 18 ++++-- main/src/main/scala/sbt/Defaults.scala | 59 ++++++++++++++----- main/src/main/scala/sbt/Keys.scala | 1 + .../sbt/internal/worker1/ForkTestMain.java | 11 ++-- .../java/sbt/internal/worker1/TestInfo.java | 3 + 6 files changed, 66 insertions(+), 27 deletions(-) diff --git a/build.sbt b/build.sbt index 8cd469207..762b05261 100644 --- a/build.sbt +++ b/build.sbt @@ -439,6 +439,7 @@ lazy val workerProj = (project in file("worker")) mimaBinaryIssueFilters ++= Vector( exclude[MissingClassProblem]("com.google.gson.typeadapters.RuntimeTypeAdapterFactory"), exclude[IncompatibleResultTypeProblem]("sbt.internal.worker1.WorkerMain.mkGson"), + exclude[DirectMissingMethodProblem]("sbt.internal.worker1.TestInfo.this"), ), ) .configure(addSbtIOForTest) diff --git a/main-actions/src/main/scala/sbt/ForkTests.scala b/main-actions/src/main/scala/sbt/ForkTests.scala index 81d5ac606..25eb00c33 100755 --- a/main-actions/src/main/scala/sbt/ForkTests.scala +++ b/main-actions/src/main/scala/sbt/ForkTests.scala @@ -45,6 +45,7 @@ private[sbt] object ForkTests: converter: FileConverter, fork: ForkOptions, log: Logger, + parallelism: Option[Int], tags: (Tag, Int)* ): Task[TestOutput] = { import std.TaskExtra.* @@ -56,9 +57,10 @@ private[sbt] object ForkTests: if opts.tests.isEmpty then constant(TestOutput(TestResult.Passed, Map.empty[String, SuiteResult], Iterable.empty)) else - mainTestTask(runners, opts, classpath, converter, fork, log, config.parallel).tagw( - config.tags* - ) + mainTestTask(runners, opts, classpath, converter, fork, log, config.parallel, parallelism) + .tagw( + config.tags* + ) main.tagw(tags*).dependsOn(all(opts.setup)*) flatMap { results => all(opts.cleanup).join.map(_ => results) } @@ -72,10 +74,11 @@ private[sbt] object ForkTests: converter: FileConverter, fork: ForkOptions, log: Logger, + parallelism: Option[Int], tags: (Tag, Int)* ): Task[TestOutput] = { val opts = processOptions(config, tests, log) - apply(runners, opts, config, classpath, converter, fork, log, tags*) + apply(runners, opts, config, classpath, converter, fork, log, parallelism, tags*) } def apply( @@ -86,9 +89,10 @@ private[sbt] object ForkTests: converter: FileConverter, fork: ForkOptions, log: Logger, + parallelism: Option[Int], tag: Tag ): Task[TestOutput] = { - apply(runners, tests, config, classpath, converter, fork, log, tag -> 1) + apply(runners, tests, config, classpath, converter, fork, log, parallelism, tag -> 1) } private def mainTestTask( @@ -98,7 +102,8 @@ private[sbt] object ForkTests: converter: FileConverter, fork: ForkOptions, log: Logger, - parallel: Boolean + parallel: Boolean, + parallelism: Option[Int] ): Task[TestOutput] = std.TaskExtra.task { val testListeners = opts.testListeners.flatMap: @@ -148,6 +153,7 @@ private[sbt] object ForkTests: null, UTerminal.isAnsiSupported, parallel, + parallelism.map(Integer.valueOf).orNull, ArrayList(taskdefs.asJava), ArrayList(testRunners.asJava), ) diff --git a/main/src/main/scala/sbt/Defaults.scala b/main/src/main/scala/sbt/Defaults.scala index dd9f5a177..96747f085 100644 --- a/main/src/main/scala/sbt/Defaults.scala +++ b/main/src/main/scala/sbt/Defaults.scala @@ -201,6 +201,7 @@ object Defaults extends BuildCommon { javaHomes :== ListMap.empty, fullJavaHomes := CrossJava.expandJavaHomes(discoveredJavaHomes.value ++ javaHomes.value), testForkedParallel :== true, + testForkedParallelism :== None, javaOptions :== Nil, sbtPlugin :== false, isMetaBuild :== false, @@ -1123,8 +1124,9 @@ object Defaults extends BuildCommon { .value, testQuick / testFilter := Def.uncached(IncrementalTest.filterTask.value), extraTestDigests ++= IncrementalTest.extraTestDigestsTask.value, - executeTests := Def.uncached({ + executeTests := Def.uncached(Def.taskDyn { import sbt.TupleSyntax.* + val fpm = testForkedParallelism.value ( test / streams, loadedTestFrameworks, @@ -1132,25 +1134,13 @@ object Defaults extends BuildCommon { (test / testGrouping), (test / testExecution), (test / fullClasspath), - testForkedParallel, + testForkedParallel.toTaskable, (test / javaOptions), (classLoaderLayeringStrategy), thisProject, fileConverter, ).flatMapN { (s, lt, tl, gp, ex, cp, fp, jo, clls, thisProj, c) => - allTestGroupsTask( - s, - lt, - tl, - gp, - ex, - cp, - fp, - jo, - clls, - projectId = s"${thisProj.id} / ", - c, - ) + allTestGroupsTask(s, lt, tl, gp, ex, cp, fp, fpm, jo, clls, s"${thisProj.id} / ", c) } }.value), // ((streams in test, loadedTestFrameworks, testLoader, testGrouping in test, testExecution in test, fullClasspath in test, javaHome in test, testForkedParallel, javaOptions in test) flatMap allTestGroupsTask).value, @@ -1318,6 +1308,7 @@ object Defaults extends BuildCommon { newConfig, fullClasspath.value, testForkedParallel.value, + testForkedParallelism.value, javaOptions.value, classLoaderLayeringStrategy.value, projectId = s"${thisProject.value.id} / ", @@ -1367,6 +1358,7 @@ object Defaults extends BuildCommon { config, cp, forkedParallelExecution = false, + forkedParallelism = None, javaOptions = Nil, strategy = ClassLoaderLayeringStrategy.ScalaLibrary, projectId = "", @@ -1392,6 +1384,7 @@ object Defaults extends BuildCommon { config, cp, forkedParallelExecution, + forkedParallelism = None, javaOptions = Nil, strategy = ClassLoaderLayeringStrategy.ScalaLibrary, projectId = "", @@ -1399,6 +1392,36 @@ object Defaults extends BuildCommon { ) } + // Binary compatibility overload for sbt 2.0.0-RC7 + private[sbt] def allTestGroupsTask( + s: TaskStreams, + frameworks: Map[TestFramework, Framework], + loader: ClassLoader, + groups: Seq[Tests.Group], + config: Tests.Execution, + cp: Classpath, + forkedParallelExecution: Boolean, + javaOptions: Seq[String], + strategy: ClassLoaderLayeringStrategy, + projectId: String, + converter: FileConverter, + ): Task[Tests.Output] = { + allTestGroupsTask( + s, + frameworks, + loader, + groups, + config, + cp, + forkedParallelExecution, + forkedParallelism = None, + javaOptions, + strategy, + projectId, + converter, + ) + } + private[sbt] def allTestGroupsTask( s: TaskStreams, frameworks: Map[TestFramework, Framework], @@ -1407,6 +1430,7 @@ object Defaults extends BuildCommon { config: Tests.Execution, cp: Classpath, forkedParallelExecution: Boolean, + forkedParallelism: Option[Int], javaOptions: Seq[String], strategy: ClassLoaderLayeringStrategy, projectId: String, @@ -1435,7 +1459,9 @@ object Defaults extends BuildCommon { case Tests.SubProcess(opts) => s.log.debug(s"javaOptions: ${opts.runJVMOptions}") val forkedConfig = config.copy(parallel = config.parallel && forkedParallelExecution) - s.log.debug(s"Forking tests - parallelism = ${forkedConfig.parallel}") + s.log.debug( + s"Forking tests - parallelism = ${forkedConfig.parallel}, threads = ${forkedParallelism.getOrElse("auto")}" + ) ForkTests( runners, processedOptions(group), @@ -1444,6 +1470,7 @@ object Defaults extends BuildCommon { converter, opts, s.log, + forkedParallelism, (Tags.ForkedTestGroup, 1) +: group.tags* ) case Tests.InProcess => diff --git a/main/src/main/scala/sbt/Keys.scala b/main/src/main/scala/sbt/Keys.scala index 81ae104b8..6582cf51e 100644 --- a/main/src/main/scala/sbt/Keys.scala +++ b/main/src/main/scala/sbt/Keys.scala @@ -370,6 +370,7 @@ object Keys { @transient val testListeners = taskKey[Seq[TestReportListener]]("Defines test listeners.").withRank(DTask) val testForkedParallel = settingKey[Boolean]("Whether forked tests should be executed in parallel").withRank(CTask) + val testForkedParallelism = settingKey[Option[Int]]("Maximum number of parallel test threads when using testForkedParallel. Defaults to the number of available processors.").withRank(CTask) val testExecution = taskKey[Tests.Execution]("Settings controlling test execution").withRank(DTask) val testFilter = taskKey[Seq[String] => Seq[String => Boolean]]("Filter controlling whether the test is executed").withRank(DTask) val testResultLogger = settingKey[TestResultLogger]("Logs results after a test task completes.").withRank(DTask) diff --git a/worker/src/main/java/sbt/internal/worker1/ForkTestMain.java b/worker/src/main/java/sbt/internal/worker1/ForkTestMain.java index 22d824171..6620f32b1 100644 --- a/worker/src/main/java/sbt/internal/worker1/ForkTestMain.java +++ b/worker/src/main/java/sbt/internal/worker1/ForkTestMain.java @@ -311,12 +311,13 @@ public class ForkTestMain { this.originalOut.flush(); } - private ExecutorService executorService(final boolean parallel) { + private ExecutorService executorService(final boolean parallel, final Integer parallelism) { if (parallel) { - final int nbThreads = Runtime.getRuntime().availableProcessors(); + final int nbThreads = + (parallelism != null && parallelism > 0) + ? parallelism + : Runtime.getRuntime().availableProcessors(); logDebug("Create a test executor with a thread pool of " + nbThreads + " threads."); - // more options later... - // TODO we might want to configure the blocking queue with size #proc return Executors.newFixedThreadPool(nbThreads); } else { logDebug("Create a single-thread test executor"); @@ -326,7 +327,7 @@ public class ForkTestMain { private void runTests(TestInfo info, ClassLoader classLoader) throws Exception { Thread.currentThread().setContextClassLoader(classLoader); - final ExecutorService executor = executorService(info.parallel); + final ExecutorService executor = executorService(info.parallel, info.parallelism); final TaskDef[] tests = info.taskDefs.toArray(new TaskDef[] {}); final int nFrameworks = info.testRunners.size(); final Logger[] loggers = {remoteLogger(info.ansiCodesSupported)}; diff --git a/worker/src/main/java/sbt/internal/worker1/TestInfo.java b/worker/src/main/java/sbt/internal/worker1/TestInfo.java index a1990e0d7..e3c9adba9 100644 --- a/worker/src/main/java/sbt/internal/worker1/TestInfo.java +++ b/worker/src/main/java/sbt/internal/worker1/TestInfo.java @@ -33,6 +33,7 @@ public class TestInfo implements Serializable { public final RunInfo.NativeRunInfo nativeRunInfo; public final boolean ansiCodesSupported; public final boolean parallel; + public final Integer parallelism; public final ArrayList taskDefs; public final ArrayList testRunners; @@ -42,6 +43,7 @@ public class TestInfo implements Serializable { RunInfo.NativeRunInfo nativeRunInfo, boolean ansiCodesSupported, boolean parallel, + Integer parallelism, ArrayList taskDefs, ArrayList testRunners) { this.jvm = jvm; @@ -49,6 +51,7 @@ public class TestInfo implements Serializable { this.nativeRunInfo = nativeRunInfo; this.ansiCodesSupported = ansiCodesSupported; this.parallel = parallel; + this.parallelism = parallelism; this.taskDefs = taskDefs; this.testRunners = testRunners; } From 65af6c59d64ee4f9a59bfba06d500cdfbb3ba2d2 Mon Sep 17 00:00:00 2001 From: gayanMatch Date: Fri, 9 Jan 2026 13:57:58 -0600 Subject: [PATCH 2/5] Fix #3746: Scripted should fail when no tests match the pattern --- .../src/main/scala/sbt/internal/scripted/ScriptedTests.scala | 3 +++ project/Scripted.scala | 2 +- .../src/main/scala/sbt/scriptedtest/ScriptedTests.scala | 4 ++++ 3 files changed, 8 insertions(+), 1 deletion(-) diff --git a/internal/util-scripted/src/main/scala/sbt/internal/scripted/ScriptedTests.scala b/internal/util-scripted/src/main/scala/sbt/internal/scripted/ScriptedTests.scala index 4ff8a1567..c9db381c5 100644 --- a/internal/util-scripted/src/main/scala/sbt/internal/scripted/ScriptedTests.scala +++ b/internal/util-scripted/src/main/scala/sbt/internal/scripted/ScriptedTests.scala @@ -33,6 +33,9 @@ object ScriptedRunnerImpl { case ScriptedTest(group, name) => runner.scriptedTest(group, name, logger, context) } + if (tests.nonEmpty && allTests.isEmpty) { + sys.error(s"No tests found matching: ${tests.mkString(", ")}") + } runAll(allTests) } def runAll(tests: Seq[() => Option[String]]): Unit = { diff --git a/project/Scripted.scala b/project/Scripted.scala index 9bd2f3649..919199c80 100644 --- a/project/Scripted.scala +++ b/project/Scripted.scala @@ -104,7 +104,7 @@ object Scripted { launcherJar: File, logger: Logger ): Unit = { - logger.info(s"About to run tests: ${args.mkString("\n * ", "\n * ", "\n")}") + logger.info(s"Tests selected: ${args.mkString("\n * ", "\n * ", "\n")}") logger.info("") // Force Log4J to not use a thread context classloader otherwise it throws a CCE diff --git a/scripted-sbt/src/main/scala/sbt/scriptedtest/ScriptedTests.scala b/scripted-sbt/src/main/scala/sbt/scriptedtest/ScriptedTests.scala index e9c95a4fd..30bd860e4 100644 --- a/scripted-sbt/src/main/scala/sbt/scriptedtest/ScriptedTests.scala +++ b/scripted-sbt/src/main/scala/sbt/scriptedtest/ScriptedTests.scala @@ -589,6 +589,10 @@ class ScriptedRunner { val groupCount = if (parallelExecution) instances else Int.MaxValue val scriptedRunners = runner.batchScriptedRunner(scriptedTests, addTestFile, groupCount, prop, logger) + // Fail if user provided test patterns but none matched any existing test directories + if (tests.nonEmpty && scriptedRunners.isEmpty) { + sys.error(s"No tests found matching: ${tests.mkString(", ")}") + } if (parallelExecution && instances > 1) { import scala.collection.parallel.CollectionConverters.* val parallelRunners = scriptedRunners.toArray.par From a921a86440a57ca3e96554dc48d9132f3dae138d Mon Sep 17 00:00:00 2001 From: Eugene Yokota Date: Sat, 10 Jan 2026 01:56:27 -0500 Subject: [PATCH 3/5] [2.x] consoleProject **Problem** consoleProject doesn't work. REPL doesn't even start. **Solution** I made some progress into consoleProject. At least Scala 3.7 repl session will now start. The problem is that compiler bridge has not implemented binding, so we can't forward the sbt build information into the repl. --- build.sbt | 1 + main/src/main/scala/sbt/Defaults.scala | 29 ++++++-- .../scala/sbt/internal/ConsoleProject.scala | 66 +++++++++++++------ 3 files changed, 69 insertions(+), 27 deletions(-) diff --git a/build.sbt b/build.sbt index 762b05261..10824cd8b 100644 --- a/build.sbt +++ b/build.sbt @@ -717,6 +717,7 @@ lazy val mainProj = (project in file("main")) mimaSettings, mimaBinaryIssueFilters ++= Vector( exclude[ReversedMissingMethodProblem]("sbt.ProjectMatrix.*"), + exclude[DirectMissingMethodProblem]("sbt.internal.ConsoleProject.*"), ), ) .dependsOn(lmCore, lmIvy, lmCoursierShadedPublishing) diff --git a/main/src/main/scala/sbt/Defaults.scala b/main/src/main/scala/sbt/Defaults.scala index 96747f085..5f90ebbe2 100644 --- a/main/src/main/scala/sbt/Defaults.scala +++ b/main/src/main/scala/sbt/Defaults.scala @@ -774,6 +774,27 @@ object Defaults extends BuildCommon { javacOptions :== Nil, scalacOptions :== Nil, scalaVersion := appConfiguration.value.provider.scalaProvider.version, + consoleProject := ConsoleProject.consoleProjectTask.value, + consoleProject / scalaInstance := { + val topLoader = classOf[org.jline.terminal.Terminal].getClassLoader + val scalaProvider = appConfiguration.value.provider.scalaProvider + val allJars = scalaProvider.jars + val libraryJars = allJars.filter { jar => + jar.getName == "scala-library.jar" || jar.getName.startsWith("scala3-library_3") + } + val compilerJar = allJars.filter { jar => + jar.getName == "scala-compiler.jar" || jar.getName.startsWith("scala3-compiler_3") + } + ScalaInstance(scalaProvider.version, scalaProvider.launcher) + Compiler.makeScalaInstance( + scalaProvider.version, + libraryJars, + allJars.toSeq, + Seq.empty, + state.value, + topLoader, + ) + }, derive(crossScalaVersions := Seq(scalaVersion.value)), derive(compilersSetting), derive(scalaBinaryVersion := binaryScalaVersion(scalaVersion.value)), @@ -1067,7 +1088,6 @@ object Defaults extends BuildCommon { cleanKeepGlobs ++= historyPath.value.map(_.toGlob).toVector, // clean := Def.taskDyn(Clean.task(resolvedScoped.value.scope, full = true)).value, clean := Clean.scopedTask.value, - consoleProject := consoleProjectTask.value, transitiveDynamicInputs := Def.uncached(WatchTransitiveDependencies.task.value), ) @@ -2053,12 +2073,7 @@ object Defaults extends BuildCommon { analysis.infos.allInfos.values.map(_.getMainClasses).flatten.toSeq.sorted } - def consoleProjectTask = - Def.task { - ConsoleProject(state.value, (consoleProject / initialCommands).value)(using streams.value.log) - println() - } - + def consoleProjectTask = ConsoleProject.consoleProjectTask def consoleTask: Initialize[Task[Unit]] = consoleTask(fullClasspath, console) def consoleQuickTask = consoleTask(externalDependencyClasspath, consoleQuick) def consoleTask(classpath: TaskKey[Classpath], task: TaskKey[?]): Initialize[Task[Unit]] = diff --git a/main/src/main/scala/sbt/internal/ConsoleProject.scala b/main/src/main/scala/sbt/internal/ConsoleProject.scala index ee0c333c0..35fd97906 100644 --- a/main/src/main/scala/sbt/internal/ConsoleProject.scala +++ b/main/src/main/scala/sbt/internal/ConsoleProject.scala @@ -12,58 +12,84 @@ package internal import sbt.ProjectExtra.extract import sbt.internal.classpath.AlternativeZincUtil import sbt.internal.inc.{ ScalaInstance, ZincLmUtil } +import sbt.internal.inc.classpath.ClasspathUtil import sbt.internal.util.Terminal +import sbt.io.IO +import sbt.librarymanagement.DependencyResolution import sbt.util.Logger +import xsbti.HashedVirtualFileRef import xsbti.compile.ClasspathOptionsUtil -object ConsoleProject { - def apply(state: State, extra: String, cleanupCommands: String = "", options: Seq[String] = Nil)( - using log: Logger +object ConsoleProject: + def consoleProjectTask = + Def.task { + val st = Keys.state.value + val si = (Keys.consoleProject / Keys.scalaInstance).value + val dr = (LocalRootProject / Keys.dependencyResolution).value + val compilerBridgeBinaryBin = + (LocalRootProject / Keys.consoleProject / Keys.scalaCompilerBridgeBin).value + ConsoleProject( + st, + si, + dr, + compilerBridgeBinaryBin, + (Keys.consoleProject / Keys.initialCommands).value + )(using + Keys.streams.value.log + ) + println() + } + + def apply( + state: State, + si: ScalaInstance, + dr: DependencyResolution, + compilerBridgeBinaryBin: Seq[HashedVirtualFileRef], + extra: String, + cleanupCommands: String = "", + options: Seq[String] = Nil + )(using + log: Logger ): Unit = { val extracted = Project.extract(state) val cpImports = new Imports(extracted, state) + // Bindings are blocked by https://github.com/scala/scala3/issues/5069 val bindings = ("currentState" -> state) :: ("extracted" -> extracted) :: ("cpHelpers" -> cpImports) :: Nil val unit = extracted.currentUnit - val (state1, dependencyResolution) = - extracted.runTask(Keys.dependencyResolution, state) - val (_, scalaCompilerBridgeBinaryBin) = - extracted.runTask(Keys.consoleProject / Keys.scalaCompilerBridgeBin, state1) + val tempDir0 = extracted.get(Keys.consoleProject / Keys.taskTemporaryDirectory) + val tempDir = IO.createUniqueDirectory(tempDir0).toPath() val conv = extracted.get(Keys.fileConverter) - val scalaInstance = { - val scalaProvider = state.configuration.provider.scalaProvider - ScalaInstance(scalaProvider.version, scalaProvider) - } val g = BuildPaths.getGlobalBase(state) val zincDir = BuildPaths.getZincDirectory(state, g) val app = state.configuration val launcher = app.provider.scalaProvider.launcher - val compiler = scalaCompilerBridgeBinaryBin.toList match + val compiler = compilerBridgeBinaryBin.toList match case jar :: xs => AlternativeZincUtil.scalaCompiler( - scalaInstance = scalaInstance, + scalaInstance = si, classpathOptions = ClasspathOptionsUtil.repl, compilerBridgeJar = conv.toPath(jar).toFile(), - classLoaderCache = state1.get(BasicKeys.classLoaderCache) + classLoaderCache = state.get(BasicKeys.classLoaderCache) ) case Nil => ZincLmUtil.scalaCompiler( - scalaInstance = scalaInstance, + scalaInstance = si, classpathOptions = ClasspathOptionsUtil.repl, globalLock = launcher.globalLock, componentProvider = app.provider.components, secondaryCacheDir = Option(zincDir), - dependencyResolution = dependencyResolution, + dependencyResolution = dr, compilerBridgeSource = extracted.get(Keys.consoleProject / Keys.scalaCompilerBridgeSource), scalaJarsTarget = zincDir, - classLoaderCache = state1.get(BasicKeys.classLoaderCache), + classLoaderCache = state.get(BasicKeys.classLoaderCache), log = log ) val imports = BuildUtil.getImports(unit.unit) ++ BuildUtil.importAll(bindings.map(_._1)) val importString = imports.mkString("", ";\n", ";\n\n") val initCommands = importString + extra - + val loader = ClasspathUtil.makeLoader(unit.classpath, si, tempDir) val terminal = Terminal.get // TODO - Hook up dsl classpath correctly... (new Console(compiler))( @@ -72,7 +98,7 @@ object ConsoleProject { initCommands, cleanupCommands, terminal - )(Some(unit.loader), bindings).get + )(Some(loader), bindings).get () } @@ -84,4 +110,4 @@ object ConsoleProject { implicit def settingKeyEvaluate[T](s: SettingKey[T]): Evaluate[T] = new Evaluate(get(s)) } final class Evaluate[T] private[sbt] (val eval: T) -} +end ConsoleProject From 1b2bc53bcf6dd5ef8aec44d3044560564d3a5ba8 Mon Sep 17 00:00:00 2001 From: mkdev11 Date: Sat, 10 Jan 2026 15:03:57 +0200 Subject: [PATCH 4/5] fix: Improve GCMonitor warning message clarity Add 'seconds' unit to GC time and clarify that the percentage represents cumulative GC pause time across all collectors. Fixes #8002 --- main/src/main/scala/sbt/internal/GCMonitor.scala | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/main/src/main/scala/sbt/internal/GCMonitor.scala b/main/src/main/scala/sbt/internal/GCMonitor.scala index 79b49af02..f39b8a4ff 100644 --- a/main/src/main/scala/sbt/internal/GCMonitor.scala +++ b/main/src/main/scala/sbt/internal/GCMonitor.scala @@ -62,8 +62,10 @@ class GCMonitor(logger: Logger) extends GCMonitorBase with AutoCloseable { override protected def emitWarning(total: Long, over: Option[Long]): Unit = { val totalSeconds = total / 1000.0 - val amountMsg = over.fold(s"$totalSeconds seconds") { d => - "In the last " + (d / 1000.0).ceil.toInt + f" seconds, $totalSeconds (${total.toDouble / d * 100}%.1f%%)" + val amountMsg = over.fold(f"$totalSeconds%.3f seconds") { d => + val dSeconds = (d / 1000.0).ceil.toInt + val percentage = total.toDouble / d * 100 + f"In the last $dSeconds seconds, $totalSeconds%.3f seconds ($percentage%.1f%%) of GC pause" } val msg = s"$amountMsg were spent in GC. " + s"[Heap: ${gbString(runtime.freeMemory())} free " + From 9be376973db689810ed63f1b9a534c8d8c8866a7 Mon Sep 17 00:00:00 2001 From: mkdev11 Date: Sat, 10 Jan 2026 22:23:05 +0200 Subject: [PATCH 5/5] fix: Use 'CPU seconds' to clarify GC time can exceed wall clock Address review feedback from eed3si9n to make it clearer that GC time is cumulative CPU time across parallel collectors, which is why it can exceed wall clock time. Fixes #8002 --- main/src/main/scala/sbt/internal/GCMonitor.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/main/src/main/scala/sbt/internal/GCMonitor.scala b/main/src/main/scala/sbt/internal/GCMonitor.scala index f39b8a4ff..305fda484 100644 --- a/main/src/main/scala/sbt/internal/GCMonitor.scala +++ b/main/src/main/scala/sbt/internal/GCMonitor.scala @@ -62,10 +62,10 @@ class GCMonitor(logger: Logger) extends GCMonitorBase with AutoCloseable { override protected def emitWarning(total: Long, over: Option[Long]): Unit = { val totalSeconds = total / 1000.0 - val amountMsg = over.fold(f"$totalSeconds%.3f seconds") { d => + val amountMsg = over.fold(f"$totalSeconds%.3f CPU seconds") { d => val dSeconds = (d / 1000.0).ceil.toInt val percentage = total.toDouble / d * 100 - f"In the last $dSeconds seconds, $totalSeconds%.3f seconds ($percentage%.1f%%) of GC pause" + f"In the last $dSeconds seconds, $totalSeconds%.3f CPU seconds ($percentage%.1f%%) of GC pause" } val msg = s"$amountMsg were spent in GC. " + s"[Heap: ${gbString(runtime.freeMemory())} free " +