mirror of https://github.com/sbt/sbt.git
Work around Scala parallel collections situation
Ref https://github.com/scala/scala-parallel-collections/issues/22 Parallel collection got split off without source-compatible library, so apparently we need to roll our own compat hack, which causes import not used, so it needs to be paired with silencer.
This commit is contained in:
parent
cb4c1e7100
commit
f8c158291d
12
build.sbt
12
build.sbt
|
|
@ -105,6 +105,10 @@ def commonBaseSettings: Seq[Setting[_]] = Def.settings(
|
||||||
crossScalaVersions := Seq(baseScalaVersion),
|
crossScalaVersions := Seq(baseScalaVersion),
|
||||||
publishArtifact in Test := false,
|
publishArtifact in Test := false,
|
||||||
fork in run := true,
|
fork in run := true,
|
||||||
|
libraryDependencies ++= {
|
||||||
|
if (autoScalaLibrary.value) List(silencerLib)
|
||||||
|
else Nil
|
||||||
|
},
|
||||||
)
|
)
|
||||||
def commonSettings: Seq[Setting[_]] =
|
def commonSettings: Seq[Setting[_]] =
|
||||||
commonBaseSettings :+
|
commonBaseSettings :+
|
||||||
|
|
@ -324,6 +328,10 @@ val logicProj = (project in file("internal") / "util-logic")
|
||||||
testedBaseSettings,
|
testedBaseSettings,
|
||||||
name := "Logic",
|
name := "Logic",
|
||||||
mimaSettings,
|
mimaSettings,
|
||||||
|
libraryDependencies ++= (scalaVersion.value match {
|
||||||
|
case v if v.startsWith("2.12.") => List(compilerPlugin(silencerPlugin))
|
||||||
|
case _ => List()
|
||||||
|
}),
|
||||||
)
|
)
|
||||||
|
|
||||||
// defines Java structures used across Scala versions, such as the API structures and relationships extracted by
|
// defines Java structures used across Scala versions, such as the API structures and relationships extracted by
|
||||||
|
|
@ -613,6 +621,10 @@ lazy val scriptedSbtReduxProj = (project in file("scripted-sbt-redux"))
|
||||||
baseSettings,
|
baseSettings,
|
||||||
name := "Scripted sbt Redux",
|
name := "Scripted sbt Redux",
|
||||||
libraryDependencies ++= Seq(launcherInterface % "provided"),
|
libraryDependencies ++= Seq(launcherInterface % "provided"),
|
||||||
|
libraryDependencies ++= (scalaVersion.value match {
|
||||||
|
case v if v.startsWith("2.12.") => List(compilerPlugin(silencerPlugin))
|
||||||
|
case _ => List()
|
||||||
|
}),
|
||||||
mimaSettings,
|
mimaSettings,
|
||||||
scriptedSbtReduxMimaSettings,
|
scriptedSbtReduxMimaSettings,
|
||||||
)
|
)
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,23 @@
|
||||||
|
/*
|
||||||
|
* sbt
|
||||||
|
* Copyright 2011 - 2018, Lightbend, Inc.
|
||||||
|
* Copyright 2008 - 2010, Mark Harrah
|
||||||
|
* Licensed under Apache License 2.0 (see LICENSE)
|
||||||
|
*/
|
||||||
|
|
||||||
|
package sbt.internal
|
||||||
|
|
||||||
|
// https://github.com/scala/scala-parallel-collections/issues/22
|
||||||
|
private[sbt] object CompatParColls {
|
||||||
|
@com.github.ghik.silencer.silent
|
||||||
|
val Converters = {
|
||||||
|
import Compat._
|
||||||
|
{
|
||||||
|
import scala.collection.parallel._
|
||||||
|
CollectionConverters
|
||||||
|
}
|
||||||
|
}
|
||||||
|
object Compat {
|
||||||
|
object CollectionConverters
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -17,12 +17,16 @@ import sbt.librarymanagement.Configuration
|
||||||
|
|
||||||
object KeyIndex {
|
object KeyIndex {
|
||||||
def empty: ExtendableKeyIndex = new KeyIndex0(emptyBuildIndex)
|
def empty: ExtendableKeyIndex = new KeyIndex0(emptyBuildIndex)
|
||||||
|
@com.github.ghik.silencer.silent
|
||||||
def apply(
|
def apply(
|
||||||
known: Iterable[ScopedKey[_]],
|
known: Iterable[ScopedKey[_]],
|
||||||
projects: Map[URI, Set[String]],
|
projects: Map[URI, Set[String]],
|
||||||
configurations: Map[String, Seq[Configuration]]
|
configurations: Map[String, Seq[Configuration]]
|
||||||
): ExtendableKeyIndex =
|
): ExtendableKeyIndex = {
|
||||||
|
import sbt.internal.CompatParColls.Converters._
|
||||||
known.par.foldLeft(base(projects, configurations)) { _ add _ }
|
known.par.foldLeft(base(projects, configurations)) { _ add _ }
|
||||||
|
}
|
||||||
|
@com.github.ghik.silencer.silent
|
||||||
def aggregate(
|
def aggregate(
|
||||||
known: Iterable[ScopedKey[_]],
|
known: Iterable[ScopedKey[_]],
|
||||||
extra: BuildUtil[_],
|
extra: BuildUtil[_],
|
||||||
|
|
@ -37,6 +41,7 @@ object KeyIndex {
|
||||||
* This was a significant serial bottleneck during project loading that we can work around by
|
* This was a significant serial bottleneck during project loading that we can work around by
|
||||||
* computing the aggregations in parallel and then bulk adding them to the index.
|
* computing the aggregations in parallel and then bulk adding them to the index.
|
||||||
*/
|
*/
|
||||||
|
import sbt.internal.CompatParColls.Converters._
|
||||||
val toAggregate = known.par.map {
|
val toAggregate = known.par.map {
|
||||||
case key if validID(key.key.label) =>
|
case key if validID(key.key.label) =>
|
||||||
Aggregation.aggregate(key, ScopeMask(), extra, reverse = true)
|
Aggregation.aggregate(key, ScopeMask(), extra, reverse = true)
|
||||||
|
|
@ -92,6 +97,7 @@ object KeyIndex {
|
||||||
private[sbt] val emptyConfigIndex = new ConfigIndex(Map.empty, Map.empty, emptyAKeyIndex)
|
private[sbt] val emptyConfigIndex = new ConfigIndex(Map.empty, Map.empty, emptyAKeyIndex)
|
||||||
private[sbt] val emptyProjectIndex = new ProjectIndex(Map.empty)
|
private[sbt] val emptyProjectIndex = new ProjectIndex(Map.empty)
|
||||||
private[sbt] val emptyBuildIndex = new BuildIndex(Map.empty)
|
private[sbt] val emptyBuildIndex = new BuildIndex(Map.empty)
|
||||||
|
|
||||||
}
|
}
|
||||||
import KeyIndex._
|
import KeyIndex._
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -259,6 +259,7 @@ private[sbt] object Definition {
|
||||||
result.future
|
result.future
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@com.github.ghik.silencer.silent
|
||||||
def lspDefinition(
|
def lspDefinition(
|
||||||
jsonDefinition: JValue,
|
jsonDefinition: JValue,
|
||||||
requestId: String,
|
requestId: String,
|
||||||
|
|
@ -287,6 +288,7 @@ private[sbt] object Definition {
|
||||||
log.debug(s"symbol $sym")
|
log.debug(s"symbol $sym")
|
||||||
analyses
|
analyses
|
||||||
.map { analyses =>
|
.map { analyses =>
|
||||||
|
import sbt.internal.CompatParColls.Converters._
|
||||||
val locations = analyses.par.flatMap { analysis =>
|
val locations = analyses.par.flatMap { analysis =>
|
||||||
val selectPotentials = textProcessor.potentialClsOrTraitOrObj(sym)
|
val selectPotentials = textProcessor.potentialClsOrTraitOrObj(sym)
|
||||||
val classes =
|
val classes =
|
||||||
|
|
|
||||||
|
|
@ -269,7 +269,9 @@ private[sbt] object Settings {
|
||||||
* @return a task definition that retrieves the input files and their file stamps scoped to the
|
* @return a task definition that retrieves the input files and their file stamps scoped to the
|
||||||
* input key.
|
* input key.
|
||||||
*/
|
*/
|
||||||
|
@com.github.ghik.silencer.silent
|
||||||
private[sbt] def fileStamps(scopedKey: Def.ScopedKey[_]): Def.Setting[_] = {
|
private[sbt] def fileStamps(scopedKey: Def.ScopedKey[_]): Def.Setting[_] = {
|
||||||
|
import sbt.internal.CompatParColls.Converters._
|
||||||
val scope = scopedKey.scope
|
val scope = scopedKey.scope
|
||||||
addTaskDefinition(Keys.inputFileStamps in scope := {
|
addTaskDefinition(Keys.inputFileStamps in scope := {
|
||||||
val cache = (unmanagedFileStampCache in scope).value
|
val cache = (unmanagedFileStampCache in scope).value
|
||||||
|
|
|
||||||
|
|
@ -482,6 +482,7 @@ class ScriptedRunner {
|
||||||
instances: Int
|
instances: Int
|
||||||
) = run(baseDir, bufferLog, tests, logger, launchOpts, prescripted, prop, instances, true)
|
) = run(baseDir, bufferLog, tests, logger, launchOpts, prescripted, prop, instances, true)
|
||||||
|
|
||||||
|
@com.github.ghik.silencer.silent
|
||||||
private[this] def run(
|
private[this] def run(
|
||||||
baseDir: File,
|
baseDir: File,
|
||||||
bufferLog: Boolean,
|
bufferLog: Boolean,
|
||||||
|
|
@ -510,7 +511,8 @@ class ScriptedRunner {
|
||||||
val scriptedRunners =
|
val scriptedRunners =
|
||||||
runner.batchScriptedRunner(scriptedTests, addTestFile, groupCount, prop, logger)
|
runner.batchScriptedRunner(scriptedTests, addTestFile, groupCount, prop, logger)
|
||||||
if (parallelExecution && instances > 1) {
|
if (parallelExecution && instances > 1) {
|
||||||
val parallelRunners = scriptedRunners.toParArray
|
import sbt.internal.CompatParColls.Converters._
|
||||||
|
val parallelRunners = scriptedRunners.toArray.par
|
||||||
parallelRunners.tasksupport = new ForkJoinTaskSupport(new ForkJoinPool(instances))
|
parallelRunners.tasksupport = new ForkJoinTaskSupport(new ForkJoinPool(instances))
|
||||||
runAll(parallelRunners)
|
runAll(parallelRunners)
|
||||||
} else {
|
} else {
|
||||||
|
|
@ -544,9 +546,12 @@ class ScriptedRunner {
|
||||||
private def reportErrors(errors: GenSeq[String]): Unit =
|
private def reportErrors(errors: GenSeq[String]): Unit =
|
||||||
if (errors.nonEmpty) sys.error(errors.mkString("Failed tests:\n\t", "\n\t", "\n")) else ()
|
if (errors.nonEmpty) sys.error(errors.mkString("Failed tests:\n\t", "\n\t", "\n")) else ()
|
||||||
|
|
||||||
def runAll(toRun: GenSeq[ScriptedTests.TestRunner]): Unit =
|
def runAll(toRun: Seq[ScriptedTests.TestRunner]): Unit =
|
||||||
reportErrors(toRun.flatMap(test => test.apply().flatten))
|
reportErrors(toRun.flatMap(test => test.apply().flatten))
|
||||||
|
|
||||||
|
def runAll(toRun: scala.collection.parallel.ParSeq[ScriptedTests.TestRunner]): Unit =
|
||||||
|
reportErrors(toRun.flatMap(test => test.apply().flatten).toList)
|
||||||
|
|
||||||
@deprecated("No longer used", "1.1.0")
|
@deprecated("No longer used", "1.1.0")
|
||||||
def get(tests: Seq[String], baseDirectory: File, log: Logger): Seq[ScriptedTest] =
|
def get(tests: Seq[String], baseDirectory: File, log: Logger): Seq[ScriptedTest] =
|
||||||
get(tests, baseDirectory, _ => true, log)
|
get(tests, baseDirectory, _ => true, log)
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue