mirror of https://github.com/sbt/sbt.git
Merge pull request #7534 from eed3si9n/wip/consistent
Consistent Analysis
This commit is contained in:
commit
acf825179b
|
|
@ -100,13 +100,7 @@ import scala.xml.NodeSeq
|
|||
|
||||
// incremental compiler
|
||||
import sbt.SlashSyntax0._
|
||||
import sbt.internal.inc.{
|
||||
Analysis,
|
||||
AnalyzingCompiler,
|
||||
ManagedLoggedReporter,
|
||||
MixedAnalyzingCompiler,
|
||||
ScalaInstance
|
||||
}
|
||||
import sbt.internal.inc.{ Analysis, AnalyzingCompiler, ManagedLoggedReporter, ScalaInstance }
|
||||
import xsbti.{ CrossValue, VirtualFile, VirtualFileRef }
|
||||
import xsbti.compile.{
|
||||
AnalysisContents,
|
||||
|
|
@ -875,7 +869,12 @@ object Defaults extends BuildCommon {
|
|||
}
|
||||
|
||||
def defaultCompileSettings: Seq[Setting[_]] =
|
||||
globalDefaults(enableBinaryCompileAnalysis := true)
|
||||
globalDefaults(
|
||||
Seq(
|
||||
enableBinaryCompileAnalysis :== true,
|
||||
enableConsistentCompileAnalysis :== SysProp.analysis2024,
|
||||
)
|
||||
)
|
||||
|
||||
lazy val configTasks: Seq[Setting[_]] = docTaskSettings(doc) ++
|
||||
inTask(compile)(compileInputsSettings) ++
|
||||
|
|
@ -2299,13 +2298,15 @@ object Defaults extends BuildCommon {
|
|||
*/
|
||||
private[sbt] def compileScalaBackendTask: Initialize[Task[CompileResult]] = Def.task {
|
||||
val setup: Setup = compileIncSetup.value
|
||||
val useBinary: Boolean = enableBinaryCompileAnalysis.value
|
||||
val analysisResult: CompileResult = compileIncremental.value
|
||||
val exportP = exportPipelining.value
|
||||
// Save analysis midway if pipelining is enabled
|
||||
if (analysisResult.hasModified && exportP) {
|
||||
val store =
|
||||
MixedAnalyzingCompiler.staticCachedStore(setup.cacheFile.toPath, !useBinary)
|
||||
val store = AnalysisUtil.staticCachedStore(
|
||||
analysisFile = setup.cacheFile.toPath,
|
||||
useTextAnalysis = !enableBinaryCompileAnalysis.value,
|
||||
useConsistent = enableConsistentCompileAnalysis.value,
|
||||
)
|
||||
val contents = AnalysisContents.create(analysisResult.analysis(), analysisResult.setup())
|
||||
store.set(contents)
|
||||
// this stores the eary analysis (again) in case the subproject contains a macro
|
||||
|
|
@ -2325,9 +2326,11 @@ object Defaults extends BuildCommon {
|
|||
.debug(s"${name.value}: compileEarly: blocking on earlyOutputPing")
|
||||
earlyOutputPing.await.value
|
||||
}) {
|
||||
val useBinary: Boolean = enableBinaryCompileAnalysis.value
|
||||
val store =
|
||||
MixedAnalyzingCompiler.staticCachedStore(earlyCompileAnalysisFile.value.toPath, !useBinary)
|
||||
val store = AnalysisUtil.staticCachedStore(
|
||||
analysisFile = earlyCompileAnalysisFile.value.toPath,
|
||||
useTextAnalysis = !enableBinaryCompileAnalysis.value,
|
||||
useConsistent = enableConsistentCompileAnalysis.value,
|
||||
)
|
||||
store.get.toOption match {
|
||||
case Some(contents) => contents.getAnalysis
|
||||
case _ => Analysis.empty
|
||||
|
|
@ -2338,13 +2341,15 @@ object Defaults extends BuildCommon {
|
|||
}
|
||||
def compileTask: Initialize[Task[CompileAnalysis]] = Def.task {
|
||||
val setup: Setup = compileIncSetup.value
|
||||
val useBinary: Boolean = enableBinaryCompileAnalysis.value
|
||||
val c = fileConverter.value
|
||||
// TODO - expose bytecode manipulation phase.
|
||||
val analysisResult: CompileResult = manipulateBytecode.value
|
||||
if (analysisResult.hasModified) {
|
||||
val store =
|
||||
MixedAnalyzingCompiler.staticCachedStore(setup.cacheFile.toPath, !useBinary)
|
||||
val store = AnalysisUtil.staticCachedStore(
|
||||
analysisFile = setup.cacheFile.toPath,
|
||||
useTextAnalysis = !enableBinaryCompileAnalysis.value,
|
||||
useConsistent = enableConsistentCompileAnalysis.value,
|
||||
)
|
||||
val contents = AnalysisContents.create(analysisResult.analysis(), analysisResult.setup())
|
||||
store.set(contents)
|
||||
}
|
||||
|
|
@ -2444,11 +2449,16 @@ object Defaults extends BuildCommon {
|
|||
cachedPerEntryDefinesClassLookup(classpathEntry)
|
||||
}
|
||||
val extra = extraIncOptions.value.map(t2)
|
||||
val useBinary: Boolean = enableBinaryCompileAnalysis.value
|
||||
val eapath = earlyCompileAnalysisFile.value.toPath
|
||||
val eaOpt =
|
||||
if (exportPipelining.value) Some(MixedAnalyzingCompiler.staticCachedStore(eapath, !useBinary))
|
||||
else None
|
||||
if (exportPipelining.value) {
|
||||
val store = AnalysisUtil.staticCachedStore(
|
||||
analysisFile = eapath,
|
||||
useTextAnalysis = !enableBinaryCompileAnalysis.value,
|
||||
useConsistent = enableConsistentCompileAnalysis.value,
|
||||
)
|
||||
Some(store)
|
||||
} else None
|
||||
Setup.of(
|
||||
lookup,
|
||||
(compile / skip).value,
|
||||
|
|
@ -2538,8 +2548,11 @@ object Defaults extends BuildCommon {
|
|||
def compileAnalysisSettings: Seq[Setting[_]] = Seq(
|
||||
previousCompile := {
|
||||
val setup = compileIncSetup.value
|
||||
val useBinary: Boolean = enableBinaryCompileAnalysis.value
|
||||
val store = MixedAnalyzingCompiler.staticCachedStore(setup.cacheFile.toPath, !useBinary)
|
||||
val store = AnalysisUtil.staticCachedStore(
|
||||
analysisFile = setup.cacheFile.toPath,
|
||||
useTextAnalysis = !enableBinaryCompileAnalysis.value,
|
||||
useConsistent = enableConsistentCompileAnalysis.value,
|
||||
)
|
||||
val prev = store.get().toOption match {
|
||||
case Some(contents) =>
|
||||
val analysis = Option(contents.getAnalysis).toOptional
|
||||
|
|
|
|||
|
|
@ -206,7 +206,8 @@ object Keys {
|
|||
val scalaCompilerBridgeSource = settingKey[ModuleID]("Configures the module ID of the sources of the compiler bridge when scalaCompilerBridgeBinaryJar is None").withRank(CSetting)
|
||||
val scalaCompilerBridgeScope = taskKey[Unit]("The compiler bridge scope.").withRank(DTask)
|
||||
val scalaArtifacts = settingKey[Seq[String]]("Configures the list of artifacts which should match the Scala binary version").withRank(CSetting)
|
||||
val enableBinaryCompileAnalysis = settingKey[Boolean]("Writes the analysis file in binary format")
|
||||
val enableBinaryCompileAnalysis = settingKey[Boolean]("Writes the analysis file in binary format").withRank(DSetting)
|
||||
val enableConsistentCompileAnalysis = settingKey[Boolean]("Writes the analysis file in consistent binary format").withRank(DSetting)
|
||||
val crossJavaVersions = settingKey[Seq[String]]("The java versions used during JDK cross testing").withRank(BPlusSetting)
|
||||
val semanticdbEnabled = settingKey[Boolean]("Enables SemanticDB Scalac plugin").withRank(CSetting)
|
||||
val semanticdbCompilerPlugin = settingKey[ModuleID]("SemanticDB Scalac plugin").withRank(CSetting)
|
||||
|
|
|
|||
|
|
@ -0,0 +1,40 @@
|
|||
/*
|
||||
* sbt
|
||||
* Copyright 2023, Scala center
|
||||
* Copyright 2011 - 2022, Lightbend, Inc.
|
||||
* Copyright 2008 - 2010, Mark Harrah
|
||||
* Licensed under Apache License 2.0 (see LICENSE)
|
||||
*/
|
||||
|
||||
package sbt
|
||||
package internal
|
||||
|
||||
import java.nio.file.Path
|
||||
import sbt.internal.inc.MixedAnalyzingCompiler
|
||||
import scala.concurrent.ExecutionContext
|
||||
import xsbti.compile.{ AnalysisStore => XAnalysisStore }
|
||||
import xsbti.compile.analysis.ReadWriteMappers
|
||||
|
||||
private[sbt] object AnalysisUtil {
|
||||
// some machines have many cores.
|
||||
// we don't want to occupy them all for analysis serialization.
|
||||
lazy val parallelism: Int =
|
||||
scala.math.min(
|
||||
Runtime.getRuntime.availableProcessors(),
|
||||
8,
|
||||
)
|
||||
def staticCachedStore(
|
||||
analysisFile: Path,
|
||||
useTextAnalysis: Boolean,
|
||||
useConsistent: Boolean,
|
||||
): XAnalysisStore =
|
||||
MixedAnalyzingCompiler.staticCachedStore(
|
||||
analysisFile = analysisFile,
|
||||
useTextAnalysis = useTextAnalysis,
|
||||
useConsistent = false,
|
||||
mappers = ReadWriteMappers.getEmptyMappers(),
|
||||
sort = true,
|
||||
ec = ExecutionContext.global,
|
||||
parallelism = parallelism,
|
||||
)
|
||||
}
|
||||
|
|
@ -139,6 +139,8 @@ object SysProp {
|
|||
def useLog4J: Boolean = getOrFalse("sbt.log.uselog4j")
|
||||
def turbo: Boolean = getOrFalse("sbt.turbo")
|
||||
def pipelining: Boolean = getOrFalse("sbt.pipelining")
|
||||
// opt-in or out of Zinc's consistent Analysis format.
|
||||
def analysis2024: Boolean = getOrTrue("sbt.analysis2024")
|
||||
|
||||
def taskTimings: Boolean = getOrFalse("sbt.task.timings")
|
||||
def taskTimingsOnShutdown: Boolean = getOrFalse("sbt.task.timings.on.shutdown")
|
||||
|
|
|
|||
|
|
@ -24,7 +24,7 @@ import sjsonnew.JsonFormat
|
|||
import sjsonnew.shaded.scalajson.ast.unsafe.JValue
|
||||
import sjsonnew.support.scalajson.unsafe.{ CompactPrinter, Converter }
|
||||
|
||||
import sbt.internal.inc.{ Analysis, MixedAnalyzingCompiler }
|
||||
import sbt.internal.inc.Analysis
|
||||
import sbt.internal.inc.JavaInterfaceUtil._
|
||||
import sbt.internal.protocol.JsonRpcResponseError
|
||||
import sbt.internal.protocol.codec.JsonRPCProtocol
|
||||
|
|
@ -183,11 +183,19 @@ private[sbt] object Definition {
|
|||
}
|
||||
|
||||
private[this] val AnalysesKey = "lsp.definition.analyses.key"
|
||||
private[server] type Analyses = Set[((String, Boolean), Option[Analysis])]
|
||||
private[server] type Analyses = Set[((String, Boolean, Boolean), Option[Analysis])]
|
||||
|
||||
private def storeAnalysis(cacheFile: Path, useBinary: Boolean): Option[Analysis] =
|
||||
MixedAnalyzingCompiler
|
||||
.staticCachedStore(cacheFile, !useBinary)
|
||||
private def storeAnalysis(
|
||||
cacheFile: Path,
|
||||
useBinary: Boolean,
|
||||
useConsistent: Boolean,
|
||||
): Option[Analysis] =
|
||||
AnalysisUtil
|
||||
.staticCachedStore(
|
||||
analysisFile = cacheFile,
|
||||
useTextAnalysis = !useBinary,
|
||||
useConsistent = useConsistent,
|
||||
)
|
||||
.get
|
||||
.toOption
|
||||
.map { _.getAnalysis }
|
||||
|
|
@ -195,13 +203,13 @@ private[sbt] object Definition {
|
|||
|
||||
private[sbt] def updateCache(
|
||||
cache: Cache[String, Analyses]
|
||||
)(cacheFile: String, useBinary: Boolean): Any = {
|
||||
cache.get(AnalysesKey, k => Set(cacheFile -> useBinary -> None)) match {
|
||||
)(cacheFile: String, useBinary: Boolean, useConsistent: Boolean): Any = {
|
||||
cache.get(AnalysesKey, k => Set((cacheFile, useBinary, useConsistent) -> None)) match {
|
||||
case null => new AnyRef
|
||||
case set =>
|
||||
val newSet = set
|
||||
.filterNot { case ((file, _), _) => file == cacheFile }
|
||||
.+(cacheFile -> useBinary -> None)
|
||||
.filterNot { case ((file, _, _), _) => file == cacheFile }
|
||||
.+((cacheFile, useBinary, useConsistent) -> None)
|
||||
cache.put(AnalysesKey, newSet)
|
||||
}
|
||||
}
|
||||
|
|
@ -221,10 +229,13 @@ private[sbt] object Definition {
|
|||
|
||||
def collectAnalysesTask = Def.task {
|
||||
val cacheFile: String = compileIncSetup.value.cacheFile.getAbsolutePath
|
||||
val useBinary = enableBinaryCompileAnalysis.value
|
||||
val s = state.value
|
||||
s.log.debug(s"analysis location ${cacheFile -> useBinary}")
|
||||
updateCache(AnalysesAccess.cache)(cacheFile, useBinary)
|
||||
s.log.debug(s"analysis location ${cacheFile}")
|
||||
updateCache(AnalysesAccess.cache)(
|
||||
cacheFile = cacheFile,
|
||||
useBinary = enableBinaryCompileAnalysis.value,
|
||||
useConsistent = enableConsistentCompileAnalysis.value,
|
||||
)
|
||||
}
|
||||
|
||||
private[sbt] def getAnalyses: Future[Seq[Analysis]] = {
|
||||
|
|
@ -243,8 +254,9 @@ private[sbt] object Definition {
|
|||
case (_, None) => false
|
||||
}
|
||||
val addToCache = uninitialized.collect {
|
||||
case (title @ (file, useBinary), _) if Files.exists(Paths.get(file)) =>
|
||||
(title, storeAnalysis(Paths.get(file), !useBinary))
|
||||
case (title @ (file, useBinary, useConsistent), _)
|
||||
if Files.exists(Paths.get(file)) =>
|
||||
(title, storeAnalysis(Paths.get(file), !useBinary, useConsistent))
|
||||
}
|
||||
val validCaches = working ++ addToCache
|
||||
if (addToCache.nonEmpty) {
|
||||
|
|
|
|||
|
|
@ -201,12 +201,13 @@ object DefinitionTest extends verify.BasicTestSuite {
|
|||
val cache = Caffeine.newBuilder().build[String, Definition.Analyses]()
|
||||
val cacheFile = "Test.scala"
|
||||
val useBinary = true
|
||||
val useConsistent = true
|
||||
|
||||
Definition.updateCache(cache)(cacheFile, useBinary)
|
||||
Definition.updateCache(cache)(cacheFile, useBinary, useConsistent)
|
||||
|
||||
val actual = Definition.AnalysesAccess.getFrom(cache)
|
||||
|
||||
assert(actual.get.contains(("Test.scala" -> true -> None)))
|
||||
assert(actual.get.contains((("Test.scala", true, true) -> None)))
|
||||
}
|
||||
|
||||
test("it should replace cache data in cache") {
|
||||
|
|
@ -214,13 +215,14 @@ object DefinitionTest extends verify.BasicTestSuite {
|
|||
val cacheFile = "Test.scala"
|
||||
val useBinary = true
|
||||
val falseUseBinary = false
|
||||
val useConsistent = true
|
||||
|
||||
Definition.updateCache(cache)(cacheFile, falseUseBinary)
|
||||
Definition.updateCache(cache)(cacheFile, useBinary)
|
||||
Definition.updateCache(cache)(cacheFile, falseUseBinary, useConsistent)
|
||||
Definition.updateCache(cache)(cacheFile, useBinary, useConsistent)
|
||||
|
||||
val actual = Definition.AnalysesAccess.getFrom(cache)
|
||||
|
||||
assert(actual.get.contains(("Test.scala" -> true -> None)))
|
||||
assert(actual.get.contains((("Test.scala", true, true) -> None)))
|
||||
}
|
||||
|
||||
test("it should cache more data in cache") {
|
||||
|
|
@ -229,15 +231,16 @@ object DefinitionTest extends verify.BasicTestSuite {
|
|||
val useBinary = true
|
||||
val otherCacheFile = "OtherTest.scala"
|
||||
val otherUseBinary = false
|
||||
val useConsistent = true
|
||||
|
||||
Definition.updateCache(cache)(otherCacheFile, otherUseBinary)
|
||||
Definition.updateCache(cache)(cacheFile, useBinary)
|
||||
Definition.updateCache(cache)(otherCacheFile, otherUseBinary, useConsistent)
|
||||
Definition.updateCache(cache)(cacheFile, useBinary, useConsistent)
|
||||
|
||||
val actual = Definition.AnalysesAccess.getFrom(cache)
|
||||
|
||||
assert(
|
||||
actual.get.contains("Test.scala" -> true -> Option.empty[Analysis]) &&
|
||||
actual.get.contains("OtherTest.scala" -> false -> Option.empty[Analysis])
|
||||
actual.get.contains(("Test.scala", true, true) -> Option.empty[Analysis]) &&
|
||||
actual.get.contains(("OtherTest.scala", false, true) -> Option.empty[Analysis])
|
||||
)
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ object Dependencies {
|
|||
private val ioVersion = nightlyVersion.getOrElse("1.9.9")
|
||||
private val lmVersion =
|
||||
sys.props.get("sbt.build.lm.version").orElse(nightlyVersion).getOrElse("1.10.0-RC1")
|
||||
val zincVersion = nightlyVersion.getOrElse("1.10.0-RC1")
|
||||
val zincVersion = nightlyVersion.getOrElse("1.10.0-RC2")
|
||||
|
||||
private val sbtIO = "org.scala-sbt" %% "io" % ioVersion
|
||||
|
||||
|
|
|
|||
|
|
@ -1,4 +1,3 @@
|
|||
object B
|
||||
{
|
||||
def main(args: Array[String]) = assert(args(0).toInt == A.x )
|
||||
}
|
||||
object B {
|
||||
def main(args: Array[String]) = assert(args(0).toInt == A.x, s"actual A.x is ${A.x}")
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,12 +1,16 @@
|
|||
$ copy-file changes/B.scala B.scala
|
||||
|
||||
$ copy-file changes/A1.scala a/A.scala
|
||||
$ sleep 1000
|
||||
|
||||
> run 1
|
||||
$ copy-file changes/A2.scala a/A.scala
|
||||
$ sleep 1000
|
||||
|
||||
# done this way because last modified times often have ~1s resolution
|
||||
> run 2
|
||||
$ copy-file changes/A3.scala a/A.scala
|
||||
$ sleep 1000
|
||||
|
||||
> run 3
|
||||
|
||||
|
|
@ -14,10 +18,14 @@ $ copy-file changes/build2.sbt build2.sbt
|
|||
> reload
|
||||
|
||||
$ copy-file changes/A1.scala a/A.scala
|
||||
$ sleep 1000
|
||||
|
||||
> run 1
|
||||
$ copy-file changes/A2.scala a/A.scala
|
||||
$ sleep 1000
|
||||
|
||||
> run 2
|
||||
$ copy-file changes/A3.scala a/A.scala
|
||||
$ sleep 1000
|
||||
|
||||
> run 3
|
||||
|
|
|
|||
|
|
@ -5,5 +5,7 @@ $ copy-file changes/C.scala C.scala
|
|||
-> run
|
||||
|
||||
$ copy-file changes/A2.scala A.scala
|
||||
$ sleep 1000
|
||||
|
||||
> compile
|
||||
> run
|
||||
|
|
|
|||
|
|
@ -5,6 +5,8 @@
|
|||
|
||||
# modify D.scala so that the linearization changes
|
||||
$ copy-file changes/D.scala D.scala
|
||||
$ sleep 1000
|
||||
|
||||
# F.x becomes 11
|
||||
> compile
|
||||
|
||||
|
|
|
|||
Loading…
Reference in New Issue