mirror of https://github.com/sbt/sbt.git
Flip testForkedParallel to true
This commit is contained in:
parent
d267bce6dc
commit
42010a317e
|
|
@ -201,7 +201,7 @@ object Defaults extends BuildCommon {
|
|||
discoveredJavaHomes := CrossJava.discoverJavaHomes,
|
||||
javaHomes :== ListMap.empty,
|
||||
fullJavaHomes := CrossJava.expandJavaHomes(discoveredJavaHomes.value ++ javaHomes.value),
|
||||
testForkedParallel :== false,
|
||||
testForkedParallel :== true,
|
||||
javaOptions :== Nil,
|
||||
sbtPlugin :== false,
|
||||
isMetaBuild :== false,
|
||||
|
|
|
|||
|
|
@ -1,27 +1,23 @@
|
|||
import Tests._
|
||||
import Defaults._
|
||||
|
||||
ThisBuild / scalaVersion := "2.12.20"
|
||||
scalaVersion := "2.12.20"
|
||||
val check = taskKey[Unit]("Check that tests are executed in parallel")
|
||||
|
||||
lazy val root = (project in file("."))
|
||||
.settings(
|
||||
libraryDependencies += "com.novocode" % "junit-interface" % "0.11" % Test,
|
||||
Test / fork := true,
|
||||
check := {
|
||||
val nbProc = java.lang.Runtime.getRuntime().availableProcessors()
|
||||
val log = streams.value.log
|
||||
if( nbProc < 4 ) {
|
||||
log.warn("With fewer than 4 processors this test is meaningless")
|
||||
// mimic behavior expected by scripted
|
||||
if (!testForkedParallel.value) sys.error("Exiting with error (note: test not performed)")
|
||||
} else {
|
||||
// we've got at least 4 processors, we'll check the upper end but also 3 and 4 as the upper might not
|
||||
// be reached if the system is under heavy load.
|
||||
if( ! (file("max-concurrent-tests_3").exists() || file("max-concurrent-tests_4").exists() ||
|
||||
file("max-concurrent-tests_" + (nbProc -1)).exists() || file("max-concurrent-tests_" + nbProc).exists())) {
|
||||
sys.error("Forked tests were not executed in parallel!")
|
||||
}
|
||||
}
|
||||
}
|
||||
)
|
||||
libraryDependencies += "com.novocode" % "junit-interface" % "0.11" % Test
|
||||
Test / fork := true
|
||||
|
||||
check := {
|
||||
val nbProc = java.lang.Runtime.getRuntime().availableProcessors()
|
||||
val log = streams.value.log
|
||||
if nbProc < 4 then
|
||||
log.warn("With fewer than 4 processors this test is meaningless")
|
||||
// mimic behavior expected by scripted
|
||||
if !testForkedParallel.value then sys.error("Exiting with error (note: test not performed)")
|
||||
else
|
||||
// we've got at least 4 processors, we'll check the upper end but also 3 and 4 as the upper might not
|
||||
// be reached if the system is under heavy load.
|
||||
if ! (file("max-concurrent-tests_3").exists() || file("max-concurrent-tests_4").exists() ||
|
||||
file("max-concurrent-tests_" + (nbProc -1)).exists() || file("max-concurrent-tests_" + nbProc).exists()) then
|
||||
sys.error("Forked tests were not executed in parallel!")
|
||||
}
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ object ParallelTest {
|
|||
val maxConcurrentTests = new AtomicInteger(0)
|
||||
|
||||
private def updateMaxConcurrentTests(currentMax: Int, newMax: Int) : Boolean = {
|
||||
if( maxConcurrentTests.compareAndSet(currentMax, newMax) ) {
|
||||
if (maxConcurrentTests.compareAndSet(currentMax, newMax) ) {
|
||||
val f = new File("max-concurrent-tests_" + newMax)
|
||||
f.createNewFile
|
||||
true
|
||||
|
|
@ -22,7 +22,7 @@ object ParallelTest {
|
|||
def execute(f : => Unit): Unit = {
|
||||
val nb = nbConcurrentTests.incrementAndGet()
|
||||
val max = maxConcurrentTests.get()
|
||||
if( nb <= max || updateMaxConcurrentTests(max, nb)) {
|
||||
if (nb <= max || updateMaxConcurrentTests(max, nb)) {
|
||||
f
|
||||
nbConcurrentTests.getAndDecrement
|
||||
} else {
|
||||
|
|
|
|||
|
|
@ -1,8 +1,13 @@
|
|||
# Note: this test is meaningless on less than four cores
|
||||
|
||||
> testFull
|
||||
-> check
|
||||
> clean
|
||||
> set testForkedParallel := true
|
||||
> testFull
|
||||
> check
|
||||
> clean
|
||||
$ delete max-concurrent-tests_1
|
||||
$ delete max-concurrent-tests_2
|
||||
$ delete max-concurrent-tests_3
|
||||
$ delete max-concurrent-tests_4
|
||||
|
||||
> set testForkedParallel := false
|
||||
> testFull
|
||||
-> check
|
||||
|
|
|
|||
Loading…
Reference in New Issue