Split large .sbt definitions across Eval modules

Fixes JVM class/method size failures when many top-level vals are compiled
into one synthetic object (sbt/sbt#3057).

Later chunks import prior modules so definitions can still see vals above
them in the file. Partitioning is bounded by definition count and source
character budget.
This commit is contained in:
bitloi 2026-04-10 06:46:20 +02:00
parent 7218b2a1ac
commit a1a546456b
No known key found for this signature in database
GPG Key ID: DB743C90C0FE29BA
2 changed files with 119 additions and 12 deletions

View File

@ -151,10 +151,11 @@ private[sbt] object EvaluateConfigurations {
val (importDefs, definitions) =
if (parsed.definitions.isEmpty) (Nil, DefinedSbtValues.empty)
else {
val definitions =
val defValues =
evaluateDefinitions(eval, name, parsed.imports, parsed.definitions, Some(file))
val imp = BuildUtilLite.importAllRoot(definitions.enclosingModule :: Nil)
(imp, DefinedSbtValues(definitions))
val imp =
BuildUtilLite.importAllRoot(defValues.sbtFiles.map(_.enclosingModule))
(imp, defValues)
}
val allImports = importDefs.map(s => (s, -1)) ++ parsed.imports
val dslEntries = parsed.settings map { (dslExpression, range) =>
@ -317,21 +318,86 @@ private[sbt] object EvaluateConfigurations {
classOf[SettingKey[?]]
).map(_.getName)
/**
* Upper bound on total source characters per compiled definitions module. Together with
* `MaxDefinitionsPerChunk`, this keeps generated bytecode under JVM class and method size limits
* (see sbt/sbt#3057).
*/
private val MaxDefinitionCharsPerChunk: Int = 12000
/**
* Upper bound on definition count per compiled module. Large .sbt files with hundreds of
* `lazy val` lines can exceed JVM limits when compiled into a single synthetic object.
*/
private val MaxDefinitionsPerChunk: Int = 100
/**
* Partitions top-level .sbt definitions before `Eval` compiles them. Used by
* `evaluateDefinitions` (sbt/sbt#3057) and exposed for unit tests.
*/
private[sbt] def partitionDefinitionRanges(
definitions: Seq[(String, LineRange)]
): Seq[Seq[(String, scala.Range)]] =
chunkDefinitionRanges(definitions.map { (s, r) => (s, r.start to r.end) })
/**
* Partitions top-level .sbt definitions in source order. Later chunks are compiled with
* wildcard imports of earlier modules so each val can see definitions above it in the file.
*/
private def chunkDefinitionRanges(
definitions: Seq[(String, scala.Range)]
): Seq[Seq[(String, scala.Range)]] =
if definitions.isEmpty then Nil
else
val out = Seq.newBuilder[Seq[(String, scala.Range)]]
var cur = Seq.newBuilder[(String, scala.Range)]
var chars = 0
var count = 0
def flush(): Unit =
val c = cur.result()
if c.nonEmpty then
out += c
cur = Seq.newBuilder
chars = 0
count = 0
for pair @ (text, _) <- definitions do
val len = text.length
val overChars = count > 0 && chars + len > MaxDefinitionCharsPerChunk
val overCount = count > 0 && count >= MaxDefinitionsPerChunk
if overChars || overCount then flush()
cur += pair
chars += len
count += 1
flush()
out.result()
private def evaluateDefinitions(
eval: Eval,
name: String,
imports: Seq[(String, Int)],
definitions: Seq[(String, LineRange)],
file: Option[VirtualFileRef],
): EvalDefinitions = {
val convertedRanges = definitions.map { (s, r) => (s, r.start to r.end) }
eval.evalDefinitions(
convertedRanges,
new EvalImports(imports.map(_._1)), // name
name,
// file,
extractedValTypes
)
): DefinedSbtValues = {
val chunks = partitionDefinitionRanges(definitions)
if chunks.isEmpty then DefinedSbtValues.empty
else
val acc = Seq.newBuilder[EvalDefinitions]
var priorModuleNames = Seq.empty[String]
for chunk <- chunks do
val importSuffix =
if priorModuleNames.isEmpty then Nil
else BuildUtilLite.importAllRoot(priorModuleNames).map(s => (s, -1))
val combinedImports = imports ++ importSuffix
val ed = eval.evalDefinitions(
chunk,
new EvalImports(combinedImports.map(_._1)),
name,
extractedValTypes,
extraHash = priorModuleNames.mkString("|"),
)
acc += ed
priorModuleNames = priorModuleNames :+ ed.enclosingModule
new DefinedSbtValues(acc.result())
}
}

View File

@ -0,0 +1,41 @@
package sbt.internal
import sbt.internal.util.LineRange
object EvaluateConfigurationsChunkingSpec extends verify.BasicTestSuite:
test("partitions by definition count (sbt/sbt#3057)") {
val defs = (0 until 105).map(i => (s"lazy val x$i = ()", LineRange(i, i))).toList
val parts = EvaluateConfigurations.partitionDefinitionRanges(defs)
assert(parts.size == 2)
assert(parts.head.size == 100)
assert(parts(1).size == 5)
assert(parts.map(_.size).sum == 105)
}
test("small definition lists stay in one partition") {
val defs = (0 until 5).map(i => (s"lazy val x$i = ()", LineRange(i, i))).toList
val parts = EvaluateConfigurations.partitionDefinitionRanges(defs)
assert(parts.size == 1)
assert(parts.head.size == 5)
}
test("partitions when character budget is exceeded before count limit") {
val d0 = ("lazy val a = 1", LineRange(0, 0))
val padding = " " * 13000
val d1 = (s"lazy val b = 1$padding", LineRange(1, 1))
val parts = EvaluateConfigurations.partitionDefinitionRanges(List(d0, d1))
assert(parts.size == 2)
assert(parts(0).size == 1)
assert(parts(1).size == 1)
}
test("a single oversized definition is not split") {
val padding = " " * 20000
val d0 = (s"lazy val huge = 1$padding", LineRange(0, 0))
val parts = EvaluateConfigurations.partitionDefinitionRanges(List(d0))
assert(parts.size == 1)
assert(parts.head.size == 1)
}
end EvaluateConfigurationsChunkingSpec