mirror of https://github.com/sbt/sbt.git
Fix BuildServerTest
This commit is contained in:
parent
924150851c
commit
03ca5365f5
|
|
@ -152,7 +152,7 @@ object Command {
|
|||
def combine(cmds: Seq[Command]): State => Parser[() => State] = {
|
||||
val (simple, arbs) = separateCommands(cmds)
|
||||
state =>
|
||||
arbs.map(_ parser state).foldLeft(simpleParser(simple)(state))(_ | _)
|
||||
arbs.map(_.parser(state)).foldLeft(simpleParser(simple)(state))(_ | _)
|
||||
}
|
||||
|
||||
private[this] def separateCommands(
|
||||
|
|
@ -188,7 +188,7 @@ object Command {
|
|||
else parse(command, state.nonMultiParser)) match {
|
||||
case Right(s) => s() // apply command. command side effects happen here
|
||||
case Left(errMsg) =>
|
||||
state.log error errMsg
|
||||
state.log.error(errMsg)
|
||||
state.fail
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -106,9 +106,11 @@ import sbt.internal.inc.{
|
|||
MixedAnalyzingCompiler,
|
||||
ScalaInstance
|
||||
}
|
||||
import xsbti.{ CrossValue, HashedVirtualFileRef, VirtualFile, VirtualFileRef }
|
||||
import sbt.internal.io.Retry
|
||||
import xsbti.{ CompileFailed, CrossValue, HashedVirtualFileRef, VirtualFile, VirtualFileRef }
|
||||
import xsbti.compile.{
|
||||
AnalysisContents,
|
||||
AnalysisStore,
|
||||
ClassFileManagerType,
|
||||
ClasspathOptionsUtil,
|
||||
CompileAnalysis,
|
||||
|
|
@ -2393,11 +2395,10 @@ object Defaults extends BuildCommon {
|
|||
*/
|
||||
private[sbt] def compileScalaBackendTask: Initialize[Task[CompileResult]] = Def.task {
|
||||
val setup: Setup = compileIncSetup.value
|
||||
val useBinary: Boolean = enableBinaryCompileAnalysis.value
|
||||
val _ = compileIncremental.value
|
||||
val exportP = exportPipelining.value
|
||||
// Save analysis midway if pipelining is enabled
|
||||
val store = MixedAnalyzingCompiler.staticCachedStore(setup.cachePath, !useBinary)
|
||||
val store = analysisStore
|
||||
val contents = store.unsafeGet()
|
||||
if (exportP) {
|
||||
// this stores the eary analysis (again) in case the subproject contains a macro
|
||||
|
|
@ -2422,9 +2423,7 @@ object Defaults extends BuildCommon {
|
|||
.debug(s"${name.value}: compileEarly: blocking on earlyOutputPing")
|
||||
earlyOutputPing.await.value
|
||||
}) {
|
||||
val useBinary: Boolean = enableBinaryCompileAnalysis.value
|
||||
val store =
|
||||
MixedAnalyzingCompiler.staticCachedStore(earlyCompileAnalysisFile.value.toPath, !useBinary)
|
||||
val store = earlyAnalysisStore
|
||||
store.get.toOption match {
|
||||
case Some(contents) => contents.getAnalysis
|
||||
case _ => Analysis.empty
|
||||
|
|
@ -2436,13 +2435,11 @@ object Defaults extends BuildCommon {
|
|||
|
||||
def compileTask: Initialize[Task[CompileAnalysis]] = Def.task {
|
||||
val setup: Setup = compileIncSetup.value
|
||||
val useBinary: Boolean = enableBinaryCompileAnalysis.value
|
||||
val store = analysisStore
|
||||
val c = fileConverter.value
|
||||
// TODO - expose bytecode manipulation phase.
|
||||
val analysisResult: CompileResult = manipulateBytecode.value
|
||||
if (analysisResult.hasModified) {
|
||||
val store =
|
||||
MixedAnalyzingCompiler.staticCachedStore(setup.cacheFile.toPath, !useBinary)
|
||||
val contents = AnalysisContents.create(analysisResult.analysis(), analysisResult.setup())
|
||||
store.set(contents)
|
||||
}
|
||||
|
|
@ -2455,73 +2452,72 @@ object Defaults extends BuildCommon {
|
|||
analysis
|
||||
}
|
||||
|
||||
def compileIncrementalTaskSettings =
|
||||
inTask(compileIncremental)(
|
||||
Seq(
|
||||
(TaskZero / compileIncremental) := (Def
|
||||
.cachedTask {
|
||||
val s = streams.value
|
||||
val ci = (compile / compileInputs).value
|
||||
// This is a cacheable version
|
||||
val ci2 = (compile / compileInputs2).value
|
||||
val ping = (TaskZero / earlyOutputPing).value
|
||||
val reporter = (compile / bspReporter).value
|
||||
val setup: Setup = (TaskZero / compileIncSetup).value
|
||||
val useBinary: Boolean = enableBinaryCompileAnalysis.value
|
||||
val c = fileConverter.value
|
||||
val analysisResult: CompileResult =
|
||||
BspCompileTask
|
||||
.compute(bspTargetIdentifier.value, thisProjectRef.value, configuration.value) {
|
||||
bspTask =>
|
||||
// TODO - Should readAnalysis + saveAnalysis be scoped by the compile task too?
|
||||
compileIncrementalTaskImpl(bspTask, s, ci, ping, reporter)
|
||||
}
|
||||
val analysisOut = c.toVirtualFile(setup.cachePath())
|
||||
val store =
|
||||
MixedAnalyzingCompiler.staticCachedStore(setup.cachePath, !useBinary)
|
||||
val contents =
|
||||
AnalysisContents.create(analysisResult.analysis(), analysisResult.setup())
|
||||
store.set(contents)
|
||||
Def.declareOutput(analysisOut)
|
||||
val dir = classDirectory.value
|
||||
if (dir / "META-INF" / "MANIFEST.MF").exists then
|
||||
IO.delete(dir / "META-INF" / "MANIFEST.MF")
|
||||
// inline mappings
|
||||
val mappings = Path
|
||||
.allSubpaths(dir)
|
||||
.filter(_._1.isFile())
|
||||
.map { case (p, path) =>
|
||||
val vf = c.toVirtualFile(p.toPath())
|
||||
(vf: HashedVirtualFileRef) -> path
|
||||
}
|
||||
.toSeq
|
||||
// inlined to avoid caching mappings
|
||||
val pkgConfig = Pkg.Configuration(
|
||||
mappings,
|
||||
artifactPath.value,
|
||||
packageOptions.value,
|
||||
)
|
||||
val out = Pkg(
|
||||
pkgConfig,
|
||||
c,
|
||||
s.log,
|
||||
Pkg.timeFromConfiguration(pkgConfig)
|
||||
)
|
||||
s.log.debug(s"wrote $out")
|
||||
Def.declareOutput(out)
|
||||
analysisResult.hasModified() -> (out: HashedVirtualFileRef)
|
||||
})
|
||||
.tag(Tags.Compile, Tags.CPU)
|
||||
.value,
|
||||
packagedArtifact := {
|
||||
val (hasModified, out) = compileIncremental.value
|
||||
artifact.value -> out
|
||||
},
|
||||
artifact := artifactSetting.value,
|
||||
artifactClassifier := Some("noresources"),
|
||||
artifactPath := artifactPathSetting(artifact).value,
|
||||
)
|
||||
def compileIncrementalTaskSettings = inTask(compileIncremental)(
|
||||
Seq(
|
||||
(TaskZero / compileIncremental) := {
|
||||
val bspTask = (compile / bspCompileTask).value
|
||||
val result = cachedCompileIncrementalTask.result.value
|
||||
val reporter = (compile / bspReporter).value
|
||||
val store = analysisStore
|
||||
val ci = (compile / compileInputs).value
|
||||
result match
|
||||
case Result.Value(res) =>
|
||||
val analysis = store.unsafeGet().getAnalysis()
|
||||
reporter.sendSuccessReport(analysis)
|
||||
bspTask.notifySuccess(analysis)
|
||||
res
|
||||
case Result.Inc(cause) =>
|
||||
val compileFailed = cause.directCause.collect { case c: CompileFailed => c }
|
||||
reporter.sendFailureReport(ci.options.sources)
|
||||
bspTask.notifyFailure(compileFailed)
|
||||
throw cause
|
||||
},
|
||||
packagedArtifact := {
|
||||
val (hasModified, out) = compileIncremental.value
|
||||
artifact.value -> out
|
||||
},
|
||||
artifact := artifactSetting.value,
|
||||
artifactClassifier := Some("noresources"),
|
||||
artifactPath := artifactPathSetting(artifact).value,
|
||||
)
|
||||
)
|
||||
|
||||
private val cachedCompileIncrementalTask = Def
|
||||
.cachedTask {
|
||||
val s = streams.value
|
||||
val ci = (compile / compileInputs).value
|
||||
val bspTask = (compile / bspCompileTask).value
|
||||
// This is a cacheable version
|
||||
val ci2 = (compile / compileInputs2).value
|
||||
val ping = (TaskZero / earlyOutputPing).value
|
||||
val setup: Setup = (TaskZero / compileIncSetup).value
|
||||
val store = analysisStore
|
||||
val c = fileConverter.value
|
||||
// TODO - Should readAnalysis + saveAnalysis be scoped by the compile task too?
|
||||
val analysisResult = Retry(compileIncrementalTaskImpl(bspTask, s, ci, ping))
|
||||
val analysisOut = c.toVirtualFile(setup.cachePath())
|
||||
val contents = AnalysisContents.create(analysisResult.analysis(), analysisResult.setup())
|
||||
store.set(contents)
|
||||
Def.declareOutput(analysisOut)
|
||||
val dir = classDirectory.value
|
||||
if (dir / "META-INF" / "MANIFEST.MF").exists then IO.delete(dir / "META-INF" / "MANIFEST.MF")
|
||||
// inline mappings
|
||||
val mappings = Path
|
||||
.allSubpaths(dir)
|
||||
.filter(_._1.isFile())
|
||||
.map { case (p, path) =>
|
||||
val vf = c.toVirtualFile(p.toPath())
|
||||
(vf: HashedVirtualFileRef) -> path
|
||||
}
|
||||
.toSeq
|
||||
// inlined to avoid caching mappings
|
||||
val pkgConfig = Pkg.Configuration(mappings, artifactPath.value, packageOptions.value)
|
||||
val out = Pkg(pkgConfig, c, s.log, Pkg.timeFromConfiguration(pkgConfig))
|
||||
s.log.debug(s"wrote $out")
|
||||
Def.declareOutput(out)
|
||||
analysisResult.hasModified() -> (out: HashedVirtualFileRef)
|
||||
}
|
||||
.tag(Tags.Compile, Tags.CPU)
|
||||
|
||||
private val incCompiler = ZincUtil.defaultIncrementalCompiler
|
||||
private[sbt] def compileJavaTask: Initialize[Task[CompileResult]] = Def.task {
|
||||
|
|
@ -2549,8 +2545,7 @@ object Defaults extends BuildCommon {
|
|||
task: BspCompileTask,
|
||||
s: TaskStreams,
|
||||
ci: Inputs,
|
||||
promise: PromiseWrap[Boolean],
|
||||
reporter: BuildServerReporter,
|
||||
promise: PromiseWrap[Boolean]
|
||||
): CompileResult = {
|
||||
lazy val x = s.text(ExportStream)
|
||||
def onArgs(cs: Compilers) =
|
||||
|
|
@ -2565,16 +2560,12 @@ object Defaults extends BuildCommon {
|
|||
val compilers: Compilers = ci.compilers
|
||||
val setup: Setup = ci.setup
|
||||
val i = ci.withCompilers(onArgs(compilers)).withSetup(onProgress(setup))
|
||||
try
|
||||
val result = incCompiler.compile(i, s.log)
|
||||
reporter.sendSuccessReport(result.getAnalysis)
|
||||
result
|
||||
try incCompiler.compile(i, s.log)
|
||||
catch
|
||||
case e: Throwable =>
|
||||
if !promise.isCompleted then
|
||||
promise.failure(e)
|
||||
ConcurrentRestrictions.cancelAllSentinels()
|
||||
reporter.sendFailureReport(ci.options.sources)
|
||||
throw e
|
||||
finally x.close() // workaround for #937
|
||||
}
|
||||
|
|
@ -2591,12 +2582,8 @@ object Defaults extends BuildCommon {
|
|||
override def definesClass(classpathEntry: VirtualFile): DefinesClass =
|
||||
cachedPerEntryDefinesClassLookup(classpathEntry)
|
||||
val extra = extraIncOptions.value.map(t2)
|
||||
val useBinary: Boolean = enableBinaryCompileAnalysis.value
|
||||
val eapath = earlyCompileAnalysisFile.value.toPath
|
||||
val eaOpt =
|
||||
if exportPipelining.value then
|
||||
Some(MixedAnalyzingCompiler.staticCachedStore(eapath, !useBinary))
|
||||
else None
|
||||
val store = earlyAnalysisStore
|
||||
val eaOpt = if exportPipelining.value then Some(store) else None
|
||||
Setup.of(
|
||||
lookup,
|
||||
(compile / skip).value,
|
||||
|
|
@ -2672,6 +2659,8 @@ object Defaults extends BuildCommon {
|
|||
javacOptions.value.toVector,
|
||||
)
|
||||
},
|
||||
bspCompileTask :=
|
||||
BspCompileTask.start(bspTargetIdentifier.value, thisProjectRef.value, configuration.value)
|
||||
)
|
||||
}
|
||||
|
||||
|
|
@ -2698,8 +2687,7 @@ object Defaults extends BuildCommon {
|
|||
def compileAnalysisSettings: Seq[Setting[_]] = Seq(
|
||||
previousCompile := {
|
||||
val setup = compileIncSetup.value
|
||||
val useBinary: Boolean = enableBinaryCompileAnalysis.value
|
||||
val store = MixedAnalyzingCompiler.staticCachedStore(setup.cacheFile.toPath, !useBinary)
|
||||
val store = analysisStore
|
||||
val prev = store.get().toOption match {
|
||||
case Some(contents) =>
|
||||
val analysis = Option(contents.getAnalysis).toOptional
|
||||
|
|
@ -2711,6 +2699,18 @@ object Defaults extends BuildCommon {
|
|||
}
|
||||
)
|
||||
|
||||
private inline def analysisStore: AnalysisStore = {
|
||||
val setup = compileIncSetup.value
|
||||
val useBinary = enableBinaryCompileAnalysis.value
|
||||
MixedAnalyzingCompiler.staticCachedStore(setup.cacheFile.toPath, !useBinary)
|
||||
}
|
||||
|
||||
private inline def earlyAnalysisStore: AnalysisStore = {
|
||||
val earlyAnalysisPath = earlyCompileAnalysisFile.value.toPath
|
||||
val useBinary = enableBinaryCompileAnalysis.value
|
||||
MixedAnalyzingCompiler.staticCachedStore(earlyAnalysisPath, !useBinary)
|
||||
}
|
||||
|
||||
def printWarningsTask: Initialize[Task[Unit]] =
|
||||
Def.task {
|
||||
val analysis = compile.value match { case a: Analysis => a }
|
||||
|
|
@ -4828,7 +4828,7 @@ trait BuildExtra extends BuildCommon with DefExtra {
|
|||
}
|
||||
}
|
||||
})
|
||||
.value
|
||||
.evaluated
|
||||
) ++ inTask(scoped)((config / forkOptions) := forkOptionsTask.value)
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -26,7 +26,7 @@ import sbt.internal.io.WatchState
|
|||
import sbt.internal.librarymanagement.{ CompatibilityWarningOptions, IvySbt }
|
||||
import sbt.internal.remotecache.RemoteCacheArtifact
|
||||
import sbt.internal.server.BuildServerProtocol.BspFullWorkspace
|
||||
import sbt.internal.server.{ BuildServerReporter, ServerHandler }
|
||||
import sbt.internal.server.{ BspCompileTask, BuildServerReporter, ServerHandler }
|
||||
import sbt.internal.util.{ AttributeKey, ProgressState, SourcePosition }
|
||||
import sbt.internal.util.StringAttributeKey
|
||||
import sbt.io._
|
||||
|
|
@ -44,6 +44,7 @@ import xsbti.compile.analysis.ReadStamps
|
|||
import scala.concurrent.duration.{ Duration, FiniteDuration }
|
||||
import scala.xml.{ NodeSeq, Node => XNode }
|
||||
|
||||
|
||||
// format: off
|
||||
|
||||
object Keys {
|
||||
|
|
@ -438,6 +439,7 @@ object Keys {
|
|||
val bspBuildTargetOutputPathsItem = taskKey[OutputPathsItem]("").withRank(DTask)
|
||||
val bspBuildTargetCompile = inputKey[Unit]("").withRank(DTask)
|
||||
val bspBuildTargetCompileItem = taskKey[Int]("").withRank(DTask)
|
||||
@cacheLevel(include = Array.empty) private[sbt] val bspCompileTask = taskKey[BspCompileTask]("").withRank(DTask)
|
||||
val bspBuildTargetTest = inputKey[Unit]("Corresponds to buildTarget/test request").withRank(DTask)
|
||||
val bspBuildTargetRun = inputKey[Unit]("Corresponds to buildTarget/run request").withRank(DTask)
|
||||
val bspBuildTargetCleanCache = inputKey[Unit]("Corresponds to buildTarget/cleanCache request").withRank(DTask)
|
||||
|
|
|
|||
|
|
@ -73,8 +73,8 @@ object RemoteCache {
|
|||
Def._outputDirectory = Some(outDir)
|
||||
val caches = s.get(BasicKeys.cacheStores)
|
||||
caches match
|
||||
case Some(xs) => Def._cacheStore = AggregateActionCacheStore(xs)
|
||||
case None =>
|
||||
case Some(xs) if xs.nonEmpty => Def._cacheStore = AggregateActionCacheStore(xs)
|
||||
case _ =>
|
||||
val tempDiskCache = (s.baseDir / "target" / "bootcache").toPath()
|
||||
Def._cacheStore = DiskActionCacheStore(tempDiskCache)
|
||||
|
||||
|
|
|
|||
|
|
@ -103,7 +103,7 @@ object Act {
|
|||
keyMap: Map[String, AttributeKey[_]],
|
||||
data: Settings[Scope]
|
||||
): Parser[ParsedKey] =
|
||||
scopedKeyFull(index, current, defaultConfigs, keyMap) flatMap { choices =>
|
||||
scopedKeyFull(index, current, defaultConfigs, keyMap).flatMap { choices =>
|
||||
select(choices, data)(showRelativeKey2(current))
|
||||
}
|
||||
|
||||
|
|
@ -355,7 +355,7 @@ object Act {
|
|||
val normKeys = taskKeys(_.label)
|
||||
val valid = allKnown ++ normKeys
|
||||
val suggested = normKeys.map(_._1).toSet
|
||||
val keyP = filterStrings(examples(ID, suggested, "key"), valid.keySet, "key") map valid
|
||||
val keyP = filterStrings(examples(ID, suggested, "key"), valid.keySet, "key").map(valid)
|
||||
|
||||
((token(
|
||||
value(keyP).map(_ -> slashSeq)
|
||||
|
|
@ -515,7 +515,7 @@ object Act {
|
|||
}
|
||||
}
|
||||
action match {
|
||||
case SingleAction => akp flatMap evaluate
|
||||
case SingleAction => akp.flatMap(evaluate)
|
||||
case ShowAction | PrintAction | MultiAction =>
|
||||
rep1sep(akp, token(Space)) flatMap { pairs =>
|
||||
val flat: mutable.ListBuffer[(ScopedKey[_], Seq[String])] = mutable.ListBuffer.empty
|
||||
|
|
|
|||
|
|
@ -9,57 +9,23 @@ package sbt.internal.server
|
|||
|
||||
import sbt._
|
||||
import sbt.internal.bsp._
|
||||
import sbt.internal.io.Retry
|
||||
import sbt.internal.server.BspCompileTask.{ compileReport, exchange }
|
||||
import sbt.librarymanagement.Configuration
|
||||
import sjsonnew.support.scalajson.unsafe.Converter
|
||||
import xsbti.compile.CompileResult
|
||||
import xsbti.compile.CompileAnalysis
|
||||
import xsbti.{ CompileFailed, Problem, Severity }
|
||||
|
||||
import scala.util.control.NonFatal
|
||||
|
||||
object BspCompileTask {
|
||||
private lazy val exchange = StandardMain.exchange
|
||||
|
||||
def compute(targetId: BuildTargetIdentifier, project: ProjectRef, config: Configuration)(
|
||||
compile: BspCompileTask => CompileResult
|
||||
): CompileResult = {
|
||||
val task = BspCompileTask(targetId, project, config)
|
||||
try {
|
||||
task.notifyStart()
|
||||
val result = Retry(compile(task))
|
||||
task.notifySuccess(result)
|
||||
result
|
||||
} catch {
|
||||
case NonFatal(cause) =>
|
||||
val compileFailed = cause match {
|
||||
case failed: CompileFailed => Some(failed)
|
||||
case _ => None
|
||||
}
|
||||
task.notifyFailure(compileFailed)
|
||||
throw cause
|
||||
}
|
||||
}
|
||||
|
||||
private def apply(
|
||||
def start(
|
||||
targetId: BuildTargetIdentifier,
|
||||
project: ProjectRef,
|
||||
config: Configuration
|
||||
): BspCompileTask = {
|
||||
val taskId = TaskId(BuildServerTasks.uniqueId, Vector())
|
||||
val targetName = BuildTargetName.fromScope(project.project, config.name)
|
||||
BspCompileTask(targetId, targetName, taskId, System.currentTimeMillis())
|
||||
}
|
||||
|
||||
private def compileReport(
|
||||
problems: Seq[Problem],
|
||||
targetId: BuildTargetIdentifier,
|
||||
elapsedTimeMillis: Long
|
||||
): CompileReport = {
|
||||
val countBySeverity = problems.groupBy(_.severity).view.mapValues(_.size)
|
||||
val warnings = countBySeverity.getOrElse(Severity.Warn, 0)
|
||||
val errors = countBySeverity.getOrElse(Severity.Error, 0)
|
||||
CompileReport(targetId, None, errors, warnings, Some(elapsedTimeMillis.toInt))
|
||||
val task = BspCompileTask(targetId, targetName, taskId, System.currentTimeMillis())
|
||||
task.notifyStart()
|
||||
task
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -75,16 +41,16 @@ case class BspCompileTask private (
|
|||
val message = s"Compiling $targetName"
|
||||
val data = Converter.toJsonUnsafe(CompileTask(targetId))
|
||||
val params = TaskStartParams(id, startTimeMillis, message, "compile-task", data)
|
||||
exchange.notifyEvent("build/taskStart", params)
|
||||
StandardMain.exchange.notifyEvent("build/taskStart", params)
|
||||
}
|
||||
|
||||
private[sbt] def notifySuccess(result: CompileResult): Unit = {
|
||||
private[sbt] def notifySuccess(analysis: CompileAnalysis): Unit = {
|
||||
import scala.jdk.CollectionConverters.*
|
||||
val endTimeMillis = System.currentTimeMillis()
|
||||
val elapsedTimeMillis = endTimeMillis - startTimeMillis
|
||||
val sourceInfos = result.analysis().readSourceInfos().getAllSourceInfos.asScala
|
||||
val sourceInfos = analysis.readSourceInfos().getAllSourceInfos.asScala
|
||||
val problems = sourceInfos.values.flatMap(_.getReportedProblems).toSeq
|
||||
val report = compileReport(problems, targetId, elapsedTimeMillis)
|
||||
val report = compileReport(problems, elapsedTimeMillis)
|
||||
val params = TaskFinishParams(
|
||||
id,
|
||||
endTimeMillis,
|
||||
|
|
@ -93,7 +59,7 @@ case class BspCompileTask private (
|
|||
"compile-report",
|
||||
Converter.toJsonUnsafe(report)
|
||||
)
|
||||
exchange.notifyEvent("build/taskFinish", params)
|
||||
StandardMain.exchange.notifyEvent("build/taskFinish", params)
|
||||
}
|
||||
|
||||
private[sbt] def notifyProgress(percentage: Int, total: Int): Unit = {
|
||||
|
|
@ -110,14 +76,14 @@ case class BspCompileTask private (
|
|||
Some("compile-progress"),
|
||||
Some(data)
|
||||
)
|
||||
exchange.notifyEvent("build/taskProgress", params)
|
||||
StandardMain.exchange.notifyEvent("build/taskProgress", params)
|
||||
}
|
||||
|
||||
private[sbt] def notifyFailure(cause: Option[CompileFailed]): Unit = {
|
||||
val endTimeMillis = System.currentTimeMillis()
|
||||
val elapsedTimeMillis = endTimeMillis - startTimeMillis
|
||||
val problems = cause.map(_.problems().toSeq).getOrElse(Seq.empty[Problem])
|
||||
val report = compileReport(problems, targetId, elapsedTimeMillis)
|
||||
val report = compileReport(problems, elapsedTimeMillis)
|
||||
val params = TaskFinishParams(
|
||||
id,
|
||||
endTimeMillis,
|
||||
|
|
@ -126,6 +92,13 @@ case class BspCompileTask private (
|
|||
"compile-report",
|
||||
Converter.toJsonUnsafe(report)
|
||||
)
|
||||
exchange.notifyEvent("build/taskFinish", params)
|
||||
StandardMain.exchange.notifyEvent("build/taskFinish", params)
|
||||
}
|
||||
|
||||
private def compileReport(problems: Seq[Problem], elapsedTimeMillis: Long): CompileReport = {
|
||||
val countBySeverity = problems.groupBy(_.severity).view.mapValues(_.size)
|
||||
val warnings = countBySeverity.getOrElse(Severity.Warn, 0)
|
||||
val errors = countBySeverity.getOrElse(Severity.Error, 0)
|
||||
CompileReport(targetId, None, errors, warnings, Some(elapsedTimeMillis.toInt))
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -38,7 +38,6 @@ import java.io.File
|
|||
import java.util.concurrent.atomic.AtomicBoolean
|
||||
import scala.collection.mutable
|
||||
|
||||
// import scala.annotation.nowarn
|
||||
import scala.util.control.NonFatal
|
||||
import scala.util.{ Failure, Success }
|
||||
import scala.annotation.nowarn
|
||||
|
|
@ -62,14 +61,10 @@ object BuildServerProtocol {
|
|||
|
||||
private val bspReload = "bspReload"
|
||||
|
||||
private lazy val targetIdentifierParser: Parser[Seq[BuildTargetIdentifier]] =
|
||||
private val targetIdentifierParser: Parser[Seq[BuildTargetIdentifier]] =
|
||||
Def
|
||||
.spaceDelimited()
|
||||
.map { xs =>
|
||||
xs.map { uri =>
|
||||
BuildTargetIdentifier(URI.create(uri))
|
||||
}
|
||||
}
|
||||
.map(xs => xs.map(uri => BuildTargetIdentifier(URI.create(uri))))
|
||||
|
||||
lazy val commands: Seq[Command] = Seq(
|
||||
Command.single(bspReload) { (state, reqId) =>
|
||||
|
|
@ -103,7 +98,7 @@ object BuildServerProtocol {
|
|||
bspSbtEnabled := true,
|
||||
bspFullWorkspace := bspFullWorkspaceSetting.value,
|
||||
bspWorkspace := bspFullWorkspace.value.scopes,
|
||||
bspWorkspaceBuildTargets := (Def
|
||||
bspWorkspaceBuildTargets := Def
|
||||
.task {
|
||||
val workspace = Keys.bspFullWorkspace.value
|
||||
val state = Keys.state.value
|
||||
|
|
@ -121,186 +116,137 @@ object BuildServerProtocol {
|
|||
state.respondEvent(WorkspaceBuildTargetsResult(successfulBuildTargets.toVector))
|
||||
successfulBuildTargets
|
||||
}
|
||||
})
|
||||
}
|
||||
.value,
|
||||
// https://github.com/build-server-protocol/build-server-protocol/blob/master/docs/specification.md#build-target-sources-request
|
||||
bspBuildTargetSources := (Def
|
||||
.input((s: State) => targetIdentifierParser)
|
||||
.flatMapTask { targets =>
|
||||
val s = state.value
|
||||
// val targets = spaceDelimited().parsed.map(uri => BuildTargetIdentifier(URI.create(uri)))
|
||||
val workspace = bspFullWorkspace.value.filter(targets)
|
||||
val filter = ScopeFilter.in(workspace.scopes.values.toList)
|
||||
// run the worker task concurrently
|
||||
Def.task {
|
||||
val items = bspBuildTargetSourcesItem.result.all(filter).value
|
||||
val buildItems = workspace.builds.map { case (id, loadedBuildUnit) =>
|
||||
val base = loadedBuildUnit.localBase
|
||||
val sbtFiles = configurationSources(base)
|
||||
val pluginData = loadedBuildUnit.unit.plugins.pluginData
|
||||
val dirs = pluginData.unmanagedSourceDirectories
|
||||
val sourceFiles = getStandaloneSourceFiles(pluginData.unmanagedSources, dirs)
|
||||
val managedDirs = pluginData.managedSourceDirectories
|
||||
val managedSourceFiles =
|
||||
getStandaloneSourceFiles(pluginData.managedSources, managedDirs)
|
||||
val items =
|
||||
dirs.map(toSourceItem(SourceItemKind.Directory, generated = false)) ++
|
||||
sourceFiles.map(toSourceItem(SourceItemKind.File, generated = false)) ++
|
||||
managedDirs.map(toSourceItem(SourceItemKind.Directory, generated = true)) ++
|
||||
managedSourceFiles.map(toSourceItem(SourceItemKind.File, generated = true)) ++
|
||||
sbtFiles.map(toSourceItem(SourceItemKind.File, generated = false))
|
||||
Result.Value(SourcesItem(id, items.toVector))
|
||||
}
|
||||
val successfulItems = anyOrThrow(items ++ buildItems)
|
||||
val result = SourcesResult(successfulItems.toVector)
|
||||
s.respondEvent(result)
|
||||
}
|
||||
})
|
||||
.value,
|
||||
bspBuildTargetSources := bspInputTask { (workspace, filter) =>
|
||||
val items = bspBuildTargetSourcesItem.result.all(filter).value
|
||||
val buildItems = workspace.builds.map { case (id, loadedBuildUnit) =>
|
||||
val base = loadedBuildUnit.localBase
|
||||
val sbtFiles = configurationSources(base)
|
||||
val pluginData = loadedBuildUnit.unit.plugins.pluginData
|
||||
val dirs = pluginData.unmanagedSourceDirectories
|
||||
val sourceFiles = getStandaloneSourceFiles(pluginData.unmanagedSources, dirs)
|
||||
val managedDirs = pluginData.managedSourceDirectories
|
||||
val managedSourceFiles =
|
||||
getStandaloneSourceFiles(pluginData.managedSources, managedDirs)
|
||||
val items =
|
||||
dirs.map(toSourceItem(SourceItemKind.Directory, generated = false)) ++
|
||||
sourceFiles.map(toSourceItem(SourceItemKind.File, generated = false)) ++
|
||||
managedDirs.map(toSourceItem(SourceItemKind.Directory, generated = true)) ++
|
||||
managedSourceFiles.map(toSourceItem(SourceItemKind.File, generated = true)) ++
|
||||
sbtFiles.map(toSourceItem(SourceItemKind.File, generated = false))
|
||||
Result.Value(SourcesItem(id, items.toVector))
|
||||
}
|
||||
val successfulItems = anyOrThrow(items ++ buildItems)
|
||||
val result = SourcesResult(successfulItems.toVector)
|
||||
state.value.respondEvent(result)
|
||||
}.evaluated,
|
||||
bspBuildTargetSources / aggregate := false,
|
||||
bspBuildTargetResources := (Def
|
||||
.input((s: State) => targetIdentifierParser)
|
||||
.flatMapTask { targets =>
|
||||
val s = state.value
|
||||
val workspace = bspFullWorkspace.value.filter(targets)
|
||||
workspace.warnIfBuildsNonEmpty(Method.Resources, s.log)
|
||||
val filter = ScopeFilter.in(workspace.scopes.values.toList)
|
||||
// run the worker task concurrently
|
||||
Def.task {
|
||||
val items = bspBuildTargetResourcesItem.result.all(filter).value
|
||||
val successfulItems = anyOrThrow(items)
|
||||
val result = ResourcesResult(successfulItems.toVector)
|
||||
s.respondEvent(result)
|
||||
}
|
||||
})
|
||||
.value,
|
||||
bspBuildTargetResources := bspInputTask { (_, filter) =>
|
||||
val items = bspBuildTargetResourcesItem.result.all(filter).value
|
||||
val successfulItems = anyOrThrow(items)
|
||||
val result = ResourcesResult(successfulItems.toVector)
|
||||
state.value.respondEvent(result)
|
||||
}.evaluated,
|
||||
bspBuildTargetResources / aggregate := false,
|
||||
bspBuildTargetDependencySources := (Def
|
||||
.input((s: State) => targetIdentifierParser)
|
||||
.flatMapTask { targets =>
|
||||
val s = state.value
|
||||
val workspace = bspFullWorkspace.value.filter(targets)
|
||||
val filter = ScopeFilter.in(workspace.scopes.values.toList)
|
||||
// run the worker task concurrently
|
||||
Def.task {
|
||||
import sbt.internal.bsp.codec.JsonProtocol._
|
||||
val items = bspBuildTargetDependencySourcesItem.result.all(filter).value
|
||||
val successfulItems = anyOrThrow(items)
|
||||
val result = DependencySourcesResult(successfulItems.toVector)
|
||||
s.respondEvent(result)
|
||||
}
|
||||
})
|
||||
.value,
|
||||
bspBuildTargetDependencySources := bspInputTask { (_, filter) =>
|
||||
val items = bspBuildTargetDependencySourcesItem.result.all(filter).value
|
||||
val successfulItems = anyOrThrow(items)
|
||||
val result = DependencySourcesResult(successfulItems.toVector)
|
||||
state.value.respondEvent(result)
|
||||
}.evaluated,
|
||||
bspBuildTargetDependencySources / aggregate := false,
|
||||
bspBuildTargetCompile := (Def
|
||||
.input((s: State) => targetIdentifierParser)
|
||||
.flatMapTask { targets =>
|
||||
val s: State = state.value
|
||||
val workspace = bspFullWorkspace.value.filter(targets)
|
||||
workspace.warnIfBuildsNonEmpty(Method.Compile, s.log)
|
||||
val filter = ScopeFilter.in(workspace.scopes.values.toList)
|
||||
Def.task {
|
||||
val statusCodes = Keys.bspBuildTargetCompileItem.result.all(filter).value
|
||||
val aggregatedStatusCode = allOrThrow(statusCodes) match {
|
||||
case Seq() => StatusCode.Success
|
||||
case codes => codes.max
|
||||
}
|
||||
s.respondEvent(BspCompileResult(None, aggregatedStatusCode))
|
||||
}
|
||||
})
|
||||
.value,
|
||||
bspBuildTargetCompile := bspInputTask { (workspace, filter) =>
|
||||
val s = state.value
|
||||
workspace.warnIfBuildsNonEmpty(Method.Compile, s.log)
|
||||
val statusCodes = Keys.bspBuildTargetCompileItem.result.all(filter).value
|
||||
val aggregatedStatusCode = allOrThrow(statusCodes) match {
|
||||
case Seq() => StatusCode.Success
|
||||
case codes => codes.max
|
||||
}
|
||||
s.respondEvent(BspCompileResult(None, aggregatedStatusCode))
|
||||
}.evaluated,
|
||||
bspBuildTargetOutputPaths := bspInputTask { (_, filter) =>
|
||||
val items = bspBuildTargetOutputPathsItem.result.all(filter).value
|
||||
val successfulItems = anyOrThrow(items)
|
||||
val result = OutputPathsResult(successfulItems.toVector)
|
||||
state.value.respondEvent(result)
|
||||
}.evaluated,
|
||||
bspBuildTargetOutputPaths / aggregate := false,
|
||||
bspBuildTargetCompile / aggregate := false,
|
||||
bspBuildTargetTest := bspTestTask.evaluated,
|
||||
bspBuildTargetTest / aggregate := false,
|
||||
bspBuildTargetCleanCache := (Def
|
||||
.input((s: State) => targetIdentifierParser)
|
||||
.flatMapTask { targets =>
|
||||
val s: State = state.value
|
||||
val workspace = bspFullWorkspace.value.filter(targets)
|
||||
workspace.warnIfBuildsNonEmpty(Method.CleanCache, s.log)
|
||||
val filter = ScopeFilter.in(workspace.scopes.values.toList)
|
||||
Def.task {
|
||||
val results = Keys.clean.result.all(filter).value
|
||||
val successes = anyOrThrow(results).size
|
||||
bspBuildTargetCleanCache := bspInputTask { (workspace, filter) =>
|
||||
val s = state.value
|
||||
workspace.warnIfBuildsNonEmpty(Method.CleanCache, s.log)
|
||||
val results = Keys.clean.result.all(filter).value
|
||||
val successes = anyOrThrow(results).size
|
||||
|
||||
// When asking to Rebuild Project, IntelliJ sends the root build as an additional target, however it is
|
||||
// not returned as part of the results. In this case, there's 1 build entry in the workspace, and we're
|
||||
// checking that the executed results plus this entry is equal to the total number of targets.
|
||||
// When rebuilding a single module, the root build isn't sent, just the requested targets.
|
||||
val cleaned = successes + workspace.builds.size == targets.size
|
||||
s.respondEvent(CleanCacheResult(None, cleaned))
|
||||
}
|
||||
})
|
||||
.value,
|
||||
// When asking to rebuild Project, IntelliJ sends the root build as an additional target,
|
||||
// however it is not returned as part of the results. We're checking that the number of
|
||||
// results equals the number of scopes (not the root build).
|
||||
// When rebuilding a single module, the root build isn't sent, just the requested targets.
|
||||
val cleaned = successes == workspace.scopes.size
|
||||
s.respondEvent(CleanCacheResult(None, cleaned))
|
||||
}.evaluated,
|
||||
bspBuildTargetCleanCache / aggregate := false,
|
||||
bspBuildTargetScalacOptions := (Def
|
||||
.input((s: State) => targetIdentifierParser)
|
||||
.flatMapTask { targets =>
|
||||
val s = state.value
|
||||
val workspace = bspFullWorkspace.value.filter(targets)
|
||||
val builds = workspace.builds
|
||||
|
||||
val filter = ScopeFilter.in(workspace.scopes.values.toList)
|
||||
Def.task {
|
||||
val items = bspBuildTargetScalacOptionsItem.result.all(filter).value
|
||||
val appProvider = appConfiguration.value.provider()
|
||||
val sbtJars = appProvider.mainClasspath()
|
||||
val buildItems = builds.map { build =>
|
||||
val plugins: LoadedPlugins = build._2.unit.plugins
|
||||
val scalacOptions = plugins.pluginData.scalacOptions
|
||||
val pluginClasspath = plugins.classpath
|
||||
val converter = plugins.pluginData.converter
|
||||
val classpath =
|
||||
pluginClasspath.map(converter.toPath).map(_.toFile).map(_.toURI).toVector ++
|
||||
(sbtJars).map(_.toURI).toVector
|
||||
val item = ScalacOptionsItem(
|
||||
build._1,
|
||||
scalacOptions.toVector,
|
||||
classpath,
|
||||
new File(build._2.localBase, "project/target").toURI
|
||||
)
|
||||
Result.Value(item)
|
||||
}
|
||||
val successfulItems = anyOrThrow(items ++ buildItems)
|
||||
val result = ScalacOptionsResult(successfulItems.toVector)
|
||||
s.respondEvent(result)
|
||||
}
|
||||
})
|
||||
.value,
|
||||
bspBuildTargetScalacOptions := bspInputTask { (workspace, filter) =>
|
||||
val items = bspBuildTargetScalacOptionsItem.result.all(filter).value
|
||||
val appProvider = appConfiguration.value.provider()
|
||||
val sbtJars = appProvider.mainClasspath()
|
||||
val buildItems = workspace.builds.map { build =>
|
||||
val plugins: LoadedPlugins = build._2.unit.plugins
|
||||
val scalacOptions = plugins.pluginData.scalacOptions
|
||||
val pluginClasspath = plugins.classpath
|
||||
val converter = plugins.pluginData.converter
|
||||
val classpath =
|
||||
pluginClasspath.map(converter.toPath).map(_.toFile).map(_.toURI).toVector ++
|
||||
(sbtJars).map(_.toURI).toVector
|
||||
val item = ScalacOptionsItem(
|
||||
build._1,
|
||||
scalacOptions.toVector,
|
||||
classpath,
|
||||
new File(build._2.localBase, "project/target").toURI
|
||||
)
|
||||
Result.Value(item)
|
||||
}
|
||||
val successfulItems = anyOrThrow(items ++ buildItems)
|
||||
val result = ScalacOptionsResult(successfulItems.toVector)
|
||||
state.value.respondEvent(result)
|
||||
}.evaluated,
|
||||
bspBuildTargetScalacOptions / aggregate := false,
|
||||
bspScalaTestClasses := (Def
|
||||
.input((s: State) => targetIdentifierParser)
|
||||
.flatMapTask { targets =>
|
||||
val s = state.value
|
||||
val workspace = bspFullWorkspace.value.filter(targets)
|
||||
workspace.warnIfBuildsNonEmpty(Method.ScalaTestClasses, s.log)
|
||||
val filter = ScopeFilter.in(workspace.scopes.values.toList)
|
||||
Def.task {
|
||||
val items = bspScalaTestClassesItem.result.all(filter).value
|
||||
val successfulItems = anyOrThrow[Seq[ScalaTestClassesItem]](items).flatten
|
||||
val result = ScalaTestClassesResult(
|
||||
items = successfulItems.toVector,
|
||||
originId = None: Option[String]
|
||||
)
|
||||
s.respondEvent(result)
|
||||
}
|
||||
})
|
||||
.value,
|
||||
bspScalaMainClasses := (Def
|
||||
.input((s: State) => targetIdentifierParser)
|
||||
.flatMapTask { targets =>
|
||||
val s = state.value
|
||||
val workspace = bspFullWorkspace.value.filter(targets)
|
||||
workspace.warnIfBuildsNonEmpty(Method.ScalaMainClasses, s.log)
|
||||
val filter = ScopeFilter.in(workspace.scopes.values.toList)
|
||||
Def.task {
|
||||
val items = bspScalaMainClassesItem.result.all(filter).value
|
||||
val successfulItems = anyOrThrow(items)
|
||||
val result = ScalaMainClassesResult(successfulItems.toVector, None)
|
||||
s.respondEvent(result)
|
||||
}
|
||||
})
|
||||
.value,
|
||||
bspBuildTargetJVMRunEnvironment := bspInputTask { (_, filter) =>
|
||||
val items = bspBuildTargetJvmEnvironmentItem.result.all(filter).value
|
||||
val successfulItems = anyOrThrow(items)
|
||||
val result = JvmRunEnvironmentResult(successfulItems.toVector, None)
|
||||
state.value.respondEvent(result)
|
||||
}.evaluated,
|
||||
bspBuildTargetJVMRunEnvironment / aggregate := false,
|
||||
bspBuildTargetJVMTestEnvironment := bspInputTask { (_, filter) =>
|
||||
val items = bspBuildTargetJvmEnvironmentItem.result.all(filter).value
|
||||
val successfulItems = anyOrThrow(items)
|
||||
val result = JvmTestEnvironmentResult(successfulItems.toVector, None)
|
||||
state.value.respondEvent(result)
|
||||
}.evaluated,
|
||||
bspBuildTargetJVMTestEnvironment / aggregate := false,
|
||||
bspScalaTestClasses := bspInputTask { (workspace, filter) =>
|
||||
val s = state.value
|
||||
val items = bspScalaTestClassesItem.result.all(filter).value
|
||||
workspace.warnIfBuildsNonEmpty(Method.ScalaTestClasses, s.log)
|
||||
val successfulItems = anyOrThrow[Seq[ScalaTestClassesItem]](items).flatten
|
||||
val result = ScalaTestClassesResult(
|
||||
items = successfulItems.toVector,
|
||||
originId = None: Option[String]
|
||||
)
|
||||
s.respondEvent(result)
|
||||
}.evaluated,
|
||||
bspScalaMainClasses := bspInputTask { (_, filter) =>
|
||||
val items = bspScalaMainClassesItem.result.all(filter).value
|
||||
val successfulItems = anyOrThrow(items)
|
||||
val result = ScalaMainClassesResult(successfulItems.toVector, None)
|
||||
state.value.respondEvent(result)
|
||||
}.evaluated,
|
||||
bspScalaMainClasses / aggregate := false
|
||||
)
|
||||
|
||||
|
|
@ -344,22 +290,6 @@ object BuildServerProtocol {
|
|||
bspBuildTargetCompileItem := bspCompileTask.value,
|
||||
bspBuildTargetRun := bspRunTask.evaluated,
|
||||
bspBuildTargetScalacOptionsItem := scalacOptionsTask.value,
|
||||
bspBuildTargetJVMRunEnvironment := bspInputTask { (state, _, _, filter) =>
|
||||
Def.task {
|
||||
val items = bspBuildTargetJvmEnvironmentItem.result.all(filter).value
|
||||
val successfulItems = anyOrThrow(items)
|
||||
val result = JvmRunEnvironmentResult(successfulItems.toVector, None)
|
||||
state.respondEvent(result)
|
||||
}
|
||||
}.evaluated,
|
||||
bspBuildTargetJVMTestEnvironment := bspInputTask { (state, _, _, filter) =>
|
||||
Def.task {
|
||||
val items = bspBuildTargetJvmEnvironmentItem.result.all(filter).value
|
||||
val successfulItems = anyOrThrow(items)
|
||||
val result = JvmTestEnvironmentResult(successfulItems.toVector, None)
|
||||
state.respondEvent(result)
|
||||
}
|
||||
}.evaluated,
|
||||
bspBuildTargetJvmEnvironmentItem := jvmEnvironmentItem().value,
|
||||
bspInternalDependencyConfigurations := internalDependencyConfigurationsSetting.value,
|
||||
bspScalaTestClassesItem := scalaTestClassesTask.value,
|
||||
|
|
@ -755,21 +685,16 @@ object BuildServerProtocol {
|
|||
)
|
||||
}
|
||||
|
||||
private def bspInputTask[T](
|
||||
taskImpl: (
|
||||
State,
|
||||
Seq[BuildTargetIdentifier],
|
||||
BspFullWorkspace,
|
||||
ScopeFilter
|
||||
) => Def.Initialize[Task[T]]
|
||||
private inline def bspInputTask[T](
|
||||
inline taskImpl: (BspFullWorkspace, ScopeFilter) => T
|
||||
): Def.Initialize[InputTask[T]] =
|
||||
Def
|
||||
.input((s: State) => targetIdentifierParser)
|
||||
.input(_ => targetIdentifierParser)
|
||||
.flatMapTask { targets =>
|
||||
val s = state.value
|
||||
val workspace: BspFullWorkspace = bspFullWorkspace.value.filter(targets)
|
||||
val filter = ScopeFilter.in(workspace.scopes.values.toList)
|
||||
taskImpl(s, targets, workspace, filter)
|
||||
Def.task(taskImpl(workspace, filter))
|
||||
}
|
||||
|
||||
private def jvmEnvironmentItem(): Initialize[Task[JvmEnvironmentItem]] = Def.task {
|
||||
|
|
|
|||
|
|
@ -39,9 +39,7 @@ sealed trait BuildServerReporter extends Reporter {
|
|||
|
||||
protected def publishDiagnostic(problem: Problem): Unit
|
||||
|
||||
def sendSuccessReport(
|
||||
analysis: CompileAnalysis,
|
||||
): Unit
|
||||
def sendSuccessReport(analysis: CompileAnalysis): Unit
|
||||
|
||||
def sendFailureReport(sources: Array[VirtualFile]): Unit
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
ThisBuild / scalaVersion := "2.13.8"
|
||||
|
||||
Global / serverLog / logLevel := Level.Debug
|
||||
Global / cacheStores := Seq.empty
|
||||
|
||||
lazy val runAndTest = project.in(file("run-and-test"))
|
||||
.settings(
|
||||
|
|
|
|||
|
|
@ -7,27 +7,27 @@
|
|||
|
||||
package testpkg
|
||||
|
||||
import sbt.internal.bsp._
|
||||
import sbt.internal.bsp.*
|
||||
import sbt.internal.langserver.ErrorCodes
|
||||
import sbt.IO
|
||||
import sbt.internal.protocol.JsonRpcRequestMessage
|
||||
import sbt.internal.protocol.codec.JsonRPCProtocol._
|
||||
import sbt.internal.protocol.codec.JsonRPCProtocol.*
|
||||
import sjsonnew.JsonWriter
|
||||
import sjsonnew.support.scalajson.unsafe.{ CompactPrinter, Converter }
|
||||
|
||||
import java.io.File
|
||||
import java.net.URI
|
||||
import java.nio.file.Paths
|
||||
import scala.concurrent.duration._
|
||||
import java.nio.file.Files
|
||||
import scala.concurrent.duration.*
|
||||
|
||||
// starts svr using server-test/buildserver and perform custom server tests
|
||||
object BuildServerTest extends AbstractServerTest {
|
||||
class BuildServerTest extends AbstractServerTest {
|
||||
|
||||
import sbt.internal.bsp.codec.JsonProtocol._
|
||||
|
||||
override val testDirectory: String = "buildserver"
|
||||
|
||||
test("build/initialize") { _ =>
|
||||
test("build/initialize") {
|
||||
initializeRequest()
|
||||
assert(svr.waitForString(10.seconds) { s =>
|
||||
(s contains """"id":"8"""") &&
|
||||
|
|
@ -36,7 +36,7 @@ object BuildServerTest extends AbstractServerTest {
|
|||
})
|
||||
}
|
||||
|
||||
test("workspace/buildTargets") { _ =>
|
||||
test("workspace/buildTargets") {
|
||||
svr.sendJsonRpc(
|
||||
"""{ "jsonrpc": "2.0", "id": "16", "method": "workspace/buildTargets", "params": {} }"""
|
||||
)
|
||||
|
|
@ -50,7 +50,7 @@ object BuildServerTest extends AbstractServerTest {
|
|||
assert(!result.targets.exists(_.displayName.contains("badBuildTarget")))
|
||||
}
|
||||
|
||||
test("buildTarget/sources") { _ =>
|
||||
test("buildTarget/sources") {
|
||||
val buildTarget = buildTargetUri("util", "Compile")
|
||||
val badBuildTarget = buildTargetUri("badBuildTarget", "Compile")
|
||||
svr.sendJsonRpc(buildTargetSources(24, Seq(buildTarget, badBuildTarget)))
|
||||
|
|
@ -59,7 +59,7 @@ object BuildServerTest extends AbstractServerTest {
|
|||
val sources = s.items.head.sources.map(_.uri)
|
||||
assert(sources.contains(new File(svr.baseDirectory, "util/src/main/scala").toURI))
|
||||
}
|
||||
test("buildTarget/sources: base sources") { _ =>
|
||||
test("buildTarget/sources: base sources") {
|
||||
val buildTarget = buildTargetUri("buildserver", "Compile")
|
||||
svr.sendJsonRpc(buildTargetSources(25, Seq(buildTarget)))
|
||||
assert(processing("buildTarget/sources"))
|
||||
|
|
@ -73,7 +73,7 @@ object BuildServerTest extends AbstractServerTest {
|
|||
assert(sources.contains(expectedSource))
|
||||
}
|
||||
|
||||
test("buildTarget/sources: sbt") { _ =>
|
||||
test("buildTarget/sources: sbt") {
|
||||
val x = new URI(s"${svr.baseDirectory.getAbsoluteFile.toURI}#buildserver-build")
|
||||
svr.sendJsonRpc(buildTargetSources(26, Seq(x)))
|
||||
assert(processing("buildTarget/sources"))
|
||||
|
|
@ -83,16 +83,15 @@ object BuildServerTest extends AbstractServerTest {
|
|||
"build.sbt",
|
||||
"project/A.scala",
|
||||
"project/src/main/java",
|
||||
"project/src/main/scala-2",
|
||||
"project/src/main/scala-2.12",
|
||||
"project/src/main/scala-sbt-1.0",
|
||||
"project/src/main/scala-3",
|
||||
s"project/src/main/scala-sbt-${sbtVersion}",
|
||||
"project/src/main/scala/",
|
||||
"project/target/scala-2.12/sbt-1.0/src_managed/main"
|
||||
"target/out/jvm/scala-3.3.1/buildserver-build/src_managed/main"
|
||||
).map(rel => new File(svr.baseDirectory.getAbsoluteFile, rel).toURI).sorted
|
||||
assert(sources == expectedSources)
|
||||
}
|
||||
|
||||
test("buildTarget/compile") { _ =>
|
||||
test("buildTarget/compile") {
|
||||
val buildTarget = buildTargetUri("util", "Compile")
|
||||
|
||||
compile(buildTarget, id = 32)
|
||||
|
|
@ -102,33 +101,31 @@ object BuildServerTest extends AbstractServerTest {
|
|||
assert(res.statusCode == StatusCode.Success)
|
||||
}
|
||||
|
||||
test("buildTarget/compile - reports compilation progress") { _ =>
|
||||
test("buildTarget/compile - reports compilation progress") {
|
||||
val buildTarget = buildTargetUri("runAndTest", "Compile")
|
||||
|
||||
compile(buildTarget, id = 33)
|
||||
|
||||
// This doesn't always come back in 10s on CI.
|
||||
assert(svr.waitForString(60.seconds) { s =>
|
||||
assert(svr.waitForString(20.seconds) { s =>
|
||||
s.contains("build/taskStart") &&
|
||||
s.contains(""""message":"Compiling runAndTest"""")
|
||||
})
|
||||
|
||||
assert(svr.waitForString(60.seconds) { s =>
|
||||
assert(svr.waitForString(20.seconds) { s =>
|
||||
s.contains("build/taskProgress") &&
|
||||
s.contains(""""message":"Compiling runAndTest (15%)"""")
|
||||
})
|
||||
|
||||
assert(svr.waitForString(60.seconds) { s =>
|
||||
assert(svr.waitForString(20.seconds) { s =>
|
||||
s.contains("build/taskProgress") &&
|
||||
s.contains(""""message":"Compiling runAndTest (100%)"""")
|
||||
})
|
||||
|
||||
assert(svr.waitForString(60.seconds) { s =>
|
||||
assert(svr.waitForString(20.seconds) { s =>
|
||||
s.contains("build/publishDiagnostics")
|
||||
s.contains(""""diagnostics":[]""")
|
||||
})
|
||||
|
||||
assert(svr.waitForString(60.seconds) { s =>
|
||||
assert(svr.waitForString(20.seconds) { s =>
|
||||
s.contains("build/taskFinish") &&
|
||||
s.contains(""""message":"Compiled runAndTest"""")
|
||||
})
|
||||
|
|
@ -136,7 +133,7 @@ object BuildServerTest extends AbstractServerTest {
|
|||
|
||||
test(
|
||||
"buildTarget/compile [diagnostics] don't publish unnecessary for successful compilation case"
|
||||
) { _ =>
|
||||
) {
|
||||
val buildTarget = buildTargetUri("diagnostics", "Compile")
|
||||
val mainFile = new File(svr.baseDirectory, "diagnostics/src/main/scala/Diagnostics.scala")
|
||||
|
||||
|
|
@ -199,7 +196,7 @@ object BuildServerTest extends AbstractServerTest {
|
|||
)
|
||||
}
|
||||
|
||||
test("buildTarget/compile [diagnostics] clear stale warnings") { _ =>
|
||||
test("buildTarget/compile [diagnostics] clear stale warnings") {
|
||||
val buildTarget = buildTargetUri("diagnostics", "Compile")
|
||||
val testFile = new File(svr.baseDirectory, s"diagnostics/src/main/scala/PatternMatch.scala")
|
||||
|
||||
|
|
@ -240,7 +237,7 @@ object BuildServerTest extends AbstractServerTest {
|
|||
|
||||
}
|
||||
|
||||
test("buildTarget/scalacOptions") { _ =>
|
||||
test("buildTarget/scalacOptions") {
|
||||
val buildTarget = buildTargetUri("util", "Compile")
|
||||
val badBuildTarget = buildTargetUri("badBuildTarget", "Compile")
|
||||
svr.sendJsonRpc(
|
||||
|
|
@ -255,20 +252,14 @@ object BuildServerTest extends AbstractServerTest {
|
|||
})
|
||||
}
|
||||
|
||||
test("buildTarget/cleanCache") { _ =>
|
||||
def targetDir =
|
||||
Paths
|
||||
.get(
|
||||
svr.baseDirectory.getAbsoluteFile.toString,
|
||||
"run-and-test/target/scala-2.13/classes/main"
|
||||
)
|
||||
.toFile
|
||||
|
||||
test("buildTarget/cleanCache") {
|
||||
def classFile = svr.baseDirectory.toPath.resolve(
|
||||
"target/out/jvm/scala-2.13.8/runandtest/classes/main/Main.class"
|
||||
)
|
||||
val buildTarget = buildTargetUri("runAndTest", "Compile")
|
||||
compile(buildTarget, id = 43)
|
||||
svr.waitFor[BspCompileResult](10.seconds)
|
||||
assert(targetDir.list().contains("Main.class"))
|
||||
|
||||
assert(Files.exists(classFile))
|
||||
svr.sendJsonRpc(
|
||||
s"""{ "jsonrpc": "2.0", "id": "44", "method": "buildTarget/cleanCache", "params": {
|
||||
| "targets": [{ "uri": "$buildTarget" }]
|
||||
|
|
@ -277,10 +268,10 @@ object BuildServerTest extends AbstractServerTest {
|
|||
assert(processing("buildTarget/cleanCache"))
|
||||
val res = svr.waitFor[CleanCacheResult](10.seconds)
|
||||
assert(res.cleaned)
|
||||
assert(targetDir.list().isEmpty)
|
||||
assert(Files.notExists(classFile))
|
||||
}
|
||||
|
||||
test("buildTarget/cleanCache: rebuild project") { _ =>
|
||||
test("buildTarget/cleanCache: rebuild project") {
|
||||
svr.sendJsonRpc(
|
||||
"""{ "jsonrpc": "2.0", "id": "45", "method": "workspace/buildTargets", "params": {} }"""
|
||||
)
|
||||
|
|
@ -300,7 +291,7 @@ object BuildServerTest extends AbstractServerTest {
|
|||
assert(res.cleaned)
|
||||
}
|
||||
|
||||
test("workspace/reload") { _ =>
|
||||
test("workspace/reload") {
|
||||
svr.sendJsonRpc(
|
||||
"""{ "jsonrpc": "2.0", "id": "48", "method": "workspace/reload"}"""
|
||||
)
|
||||
|
|
@ -311,23 +302,22 @@ object BuildServerTest extends AbstractServerTest {
|
|||
})
|
||||
}
|
||||
|
||||
test("workspace/reload: send diagnostic and respond with error") { _ =>
|
||||
test("workspace/reload: send diagnostic and respond with error") {
|
||||
// write an other-build.sbt file that does not compile
|
||||
val otherBuildFile = new File(svr.baseDirectory, "other-build.sbt")
|
||||
IO.write(
|
||||
val otherBuildFile = svr.baseDirectory.toPath.resolve("other-build.sbt")
|
||||
Files.writeString(
|
||||
otherBuildFile,
|
||||
"""
|
||||
|val someSettings = Seq(
|
||||
| scalacOptions ++= "-deprecation"
|
||||
|)
|
||||
|""".stripMargin
|
||||
"""|val someSettings = Seq(
|
||||
| scalacOptions ++= "-deprecation"
|
||||
|)
|
||||
|""".stripMargin
|
||||
)
|
||||
// reload
|
||||
reloadWorkspace(id = 52)
|
||||
assert(
|
||||
svr.waitForString(10.seconds) { s =>
|
||||
s.contains(s""""buildTarget":{"uri":"$metaBuildTarget"}""") &&
|
||||
s.contains(s""""textDocument":{"uri":"${otherBuildFile.toPath.toUri}"}""") &&
|
||||
s.contains(s""""textDocument":{"uri":"${otherBuildFile.toUri}"}""") &&
|
||||
s.contains(""""severity":1""") &&
|
||||
s.contains(""""reset":true""")
|
||||
}
|
||||
|
|
@ -337,32 +327,31 @@ object BuildServerTest extends AbstractServerTest {
|
|||
s.contains(""""id":"52"""") &&
|
||||
s.contains(""""error"""") &&
|
||||
s.contains(s""""code":${ErrorCodes.InternalError}""") &&
|
||||
s.contains("Type error in expression")
|
||||
s.contains("No Append.Values[Seq[String], String] found")
|
||||
}
|
||||
)
|
||||
// fix the other-build.sbt file and reload again
|
||||
IO.write(
|
||||
Files.writeString(
|
||||
otherBuildFile,
|
||||
"""
|
||||
|val someSettings = Seq(
|
||||
| scalacOptions += "-deprecation"
|
||||
|)
|
||||
|""".stripMargin
|
||||
"""|val someSettings = Seq(
|
||||
| scalacOptions += "-deprecation"
|
||||
|)
|
||||
|""".stripMargin
|
||||
)
|
||||
reloadWorkspace(id = 52)
|
||||
// assert received an empty diagnostic
|
||||
assert(
|
||||
svr.waitForString(10.seconds) { s =>
|
||||
s.contains(s""""buildTarget":{"uri":"$metaBuildTarget"}""") &&
|
||||
s.contains(s""""textDocument":{"uri":"${otherBuildFile.toPath.toUri}"}""") &&
|
||||
s.contains(s""""textDocument":{"uri":"${otherBuildFile.toUri}"}""") &&
|
||||
s.contains(""""diagnostics":[]""") &&
|
||||
s.contains(""""reset":true""")
|
||||
}
|
||||
)
|
||||
IO.delete(otherBuildFile)
|
||||
Files.delete(otherBuildFile)
|
||||
}
|
||||
|
||||
test("buildTarget/scalaMainClasses") { _ =>
|
||||
test("buildTarget/scalaMainClasses") {
|
||||
val buildTarget = buildTargetUri("runAndTest", "Compile")
|
||||
val badBuildTarget = buildTargetUri("badBuildTarget", "Compile")
|
||||
svr.sendJsonRpc(
|
||||
|
|
@ -377,7 +366,7 @@ object BuildServerTest extends AbstractServerTest {
|
|||
})
|
||||
}
|
||||
|
||||
test("buildTarget/run") { _ =>
|
||||
test("buildTarget/run") {
|
||||
val buildTarget = buildTargetUri("runAndTest", "Compile")
|
||||
svr.sendJsonRpc(
|
||||
s"""{ "jsonrpc": "2.0", "id": "64", "method": "buildTarget/run", "params": {
|
||||
|
|
@ -397,7 +386,7 @@ object BuildServerTest extends AbstractServerTest {
|
|||
})
|
||||
}
|
||||
|
||||
test("buildTarget/jvmRunEnvironment") { _ =>
|
||||
test("buildTarget/jvmRunEnvironment") {
|
||||
val buildTarget = buildTargetUri("runAndTest", "Compile")
|
||||
svr.sendJsonRpc(
|
||||
s"""|{ "jsonrpc": "2.0",
|
||||
|
|
@ -418,7 +407,7 @@ object BuildServerTest extends AbstractServerTest {
|
|||
}
|
||||
}
|
||||
|
||||
test("buildTarget/jvmTestEnvironment") { _ =>
|
||||
test("buildTarget/jvmTestEnvironment") {
|
||||
val buildTarget = buildTargetUri("runAndTest", "Test")
|
||||
svr.sendJsonRpc(
|
||||
s"""|{ "jsonrpc": "2.0",
|
||||
|
|
@ -441,7 +430,7 @@ object BuildServerTest extends AbstractServerTest {
|
|||
}
|
||||
}
|
||||
|
||||
test("buildTarget/scalaTestClasses") { _ =>
|
||||
test("buildTarget/scalaTestClasses") {
|
||||
val buildTarget = buildTargetUri("runAndTest", "Test")
|
||||
val badBuildTarget = buildTargetUri("badBuildTarget", "Test")
|
||||
svr.sendJsonRpc(
|
||||
|
|
@ -458,7 +447,7 @@ object BuildServerTest extends AbstractServerTest {
|
|||
})
|
||||
}
|
||||
|
||||
test("buildTarget/test: run all tests") { _ =>
|
||||
test("buildTarget/test: run all tests") {
|
||||
val buildTarget = buildTargetUri("runAndTest", "Test")
|
||||
svr.sendJsonRpc(
|
||||
s"""{ "jsonrpc": "2.0", "id": "80", "method": "buildTarget/test", "params": {
|
||||
|
|
@ -472,7 +461,7 @@ object BuildServerTest extends AbstractServerTest {
|
|||
})
|
||||
}
|
||||
|
||||
test("buildTarget/test: run one test class") { _ =>
|
||||
test("buildTarget/test: run one test class") {
|
||||
val buildTarget = buildTargetUri("runAndTest", "Test")
|
||||
svr.sendJsonRpc(
|
||||
s"""{ "jsonrpc": "2.0", "id": "84", "method": "buildTarget/test", "params": {
|
||||
|
|
@ -495,7 +484,7 @@ object BuildServerTest extends AbstractServerTest {
|
|||
})
|
||||
}
|
||||
|
||||
test("buildTarget/compile: report error") { _ =>
|
||||
test("buildTarget/compile: report error") {
|
||||
val buildTarget = buildTargetUri("reportError", "Compile")
|
||||
compile(buildTarget, id = 88)
|
||||
assert(svr.waitForString(10.seconds) { s =>
|
||||
|
|
@ -505,7 +494,7 @@ object BuildServerTest extends AbstractServerTest {
|
|||
})
|
||||
}
|
||||
|
||||
test("buildTarget/compile: report warning") { _ =>
|
||||
test("buildTarget/compile: report warning") {
|
||||
val buildTarget = buildTargetUri("reportWarning", "Compile")
|
||||
compile(buildTarget, id = 90)
|
||||
assert(svr.waitForString(10.seconds) { s =>
|
||||
|
|
@ -515,7 +504,7 @@ object BuildServerTest extends AbstractServerTest {
|
|||
})
|
||||
}
|
||||
|
||||
test("buildTarget/compile: respond error") { _ =>
|
||||
test("buildTarget/compile: respond error") {
|
||||
val buildTarget = buildTargetUri("respondError", "Compile")
|
||||
compile(buildTarget, id = 92)
|
||||
assert(svr.waitForString(10.seconds) { s =>
|
||||
|
|
@ -526,7 +515,7 @@ object BuildServerTest extends AbstractServerTest {
|
|||
})
|
||||
}
|
||||
|
||||
test("buildTarget/resources") { _ =>
|
||||
test("buildTarget/resources") {
|
||||
val buildTarget = buildTargetUri("util", "Compile")
|
||||
val badBuildTarget = buildTargetUri("badBuildTarget", "Compile")
|
||||
svr.sendJsonRpc(
|
||||
|
|
@ -540,7 +529,7 @@ object BuildServerTest extends AbstractServerTest {
|
|||
})
|
||||
}
|
||||
|
||||
test("buildTarget/outputPaths") { _ =>
|
||||
test("buildTarget/outputPaths") {
|
||||
val buildTarget = buildTargetUri("util", "Compile")
|
||||
val badBuildTarget = buildTargetUri("badBuildTarget", "Compile")
|
||||
svr.sendJsonRpc(
|
||||
|
|
|
|||
|
|
@ -32,6 +32,10 @@ trait AbstractServerTest extends AnyFunSuite with BeforeAndAfterAll {
|
|||
def testDirectory: String
|
||||
def testPath: Path = temp.toPath.resolve(testDirectory)
|
||||
|
||||
def sbtVersion = sys.props
|
||||
.get("sbt.server.version")
|
||||
.getOrElse(throw new IllegalStateException("No server version was specified."))
|
||||
|
||||
private val targetDir: File = {
|
||||
val p0 = new File("..").getAbsoluteFile.getCanonicalFile / "target"
|
||||
val p1 = new File("target").getAbsoluteFile
|
||||
|
|
|
|||
Loading…
Reference in New Issue