Merge pull request #3885 from dwijnand/warnings

Remove compile warnings
This commit is contained in:
eugene yokota 2018-01-17 13:13:51 -05:00 committed by GitHub
commit 8067edbd8a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
24 changed files with 176 additions and 162 deletions

View File

@ -85,8 +85,10 @@ object Package {
}
def setVersion(main: Attributes): Unit = {
val version = Attributes.Name.MANIFEST_VERSION
if (main.getValue(version) eq null)
if (main.getValue(version) eq null) {
main.put(version, "1.0")
()
}
}
def addSpecManifestAttributes(name: String, version: String, orgName: String): PackageOption = {
import Attributes.Name._

View File

@ -71,11 +71,11 @@ object Sync {
def copy(source: File, target: File): Unit =
if (source.isFile)
IO.copyFile(source, target, true)
else if (!target.exists) // we don't want to update the last modified time of an existing directory
{
IO.createDirectory(target)
IO.copyLastModified(source, target)
}
else if (!target.exists) { // we don't want to update the last modified time of an existing directory
IO.createDirectory(target)
IO.copyLastModified(source, target)
()
}
def noDuplicateTargets(relation: Relation[File, File]): Unit = {
val dups = relation.reverseMap

View File

@ -126,6 +126,7 @@ object NetworkClient {
def run(arguments: List[String]): Unit =
try {
new NetworkClient(arguments)
()
} catch {
case NonFatal(e) => println(e.getMessage)
}

View File

@ -94,7 +94,7 @@ private[sbt] object Server {
def tryClient(f: => Socket): Unit = {
if (portfile.exists) {
Try { f } match {
case Failure(e) => ()
case Failure(_) => ()
case Success(socket) =>
socket.close()
throw new AlreadyRunningException()

View File

@ -48,6 +48,7 @@ abstract class BaseTaskLinterDSL extends LinterDSL {
case _ => exprAtUseSite
}
uncheckedWrappers.add(removedSbtWrapper)
()
}
case _ =>
}

View File

@ -500,7 +500,7 @@ object Defaults extends BuildCommon {
},
compileIncSetup := compileIncSetupTask.value,
console := consoleTask.value,
collectAnalyses := Definition.collectAnalysesTask.value,
collectAnalyses := Definition.collectAnalysesTask.map(_ => ()).value,
consoleQuick := consoleQuickTask.value,
discoveredMainClasses := (compile map discoverMainClasses storeAs discoveredMainClasses xtriggeredBy compile).value,
discoveredSbtPlugins := discoverSbtPluginNames.value,
@ -1373,7 +1373,7 @@ object Defaults extends BuildCommon {
private[this] def exported(w: PrintWriter, command: String): Seq[String] => Unit =
args => w.println((command +: args).mkString(" "))
private[this] def exported(s: TaskStreams, command: String): Seq[String] => Unit = args => {
private[this] def exported(s: TaskStreams, command: String): Seq[String] => Unit = {
val w = s.text(ExportStream)
try exported(w, command)
finally w.close() // workaround for #937

View File

@ -34,6 +34,7 @@ object MainLoop {
runLoggedLoop(state, state.globalLogging.backing)
} finally {
Runtime.getRuntime.removeShutdownHook(shutdownHook)
()
}
}

View File

@ -40,7 +40,7 @@ object Resolvers {
val to = uniqueSubdirectoryFor(info.uri, in = info.staging)
Some { () =>
creates(to) { IO.unzipURL(url, to) }
creates(to) { IO.unzipURL(url, to); () }
}
}

View File

@ -75,8 +75,10 @@ private[sbt] object TemplateCommandUtil {
private def runTemplate(info: TemplateResolverInfo,
arguments: List[String],
loader: ClassLoader): Unit =
loader: ClassLoader): Unit = {
call(info.implementationClass, "run", loader)(classOf[Array[String]])(arguments.toArray)
()
}
private def infoLoader(
info: TemplateResolverInfo,

View File

@ -156,9 +156,9 @@ private[sbt] final class CommandExchange {
Try(Await.ready(x.ready, Duration(d)))
x.ready.value match {
case Some(Success(_)) =>
// rememeber to shutdown only when the server comes up
// remember to shutdown only when the server comes up
server = Some(x)
case Some(Failure(e: AlreadyRunningException)) =>
case Some(Failure(_: AlreadyRunningException)) =>
s.log.warn(
"sbt server could not start because there's another instance of sbt running on this build.")
s.log.warn("Running multiple instances is unsupported")

View File

@ -48,7 +48,7 @@ object ConsoleProject {
options,
initCommands,
cleanupCommands
)(Some(unit.loader), bindings)
)(Some(unit.loader), bindings).get
}
/** Conveniences for consoleProject that shouldn't normally be used for builds. */

View File

@ -61,7 +61,7 @@ private[sbt] final class TaskTimings(shutdown: Boolean) extends ExecuteProgress[
}
}
def ready(state: Unit, task: Task[_]) = ()
def workStarting(task: Task[_]) = timings.put(task, System.nanoTime)
def workStarting(task: Task[_]) = { timings.put(task, System.nanoTime); () }
def workFinished[T](task: Task[T], result: Either[Task[T], Result[T]]) = {
timings.put(task, System.nanoTime - timings.get(task))
result.left.foreach { t =>

View File

@ -9,38 +9,47 @@ package sbt
package internal
package server
import sbt.io.IO
import sbt.internal.inc.MixedAnalyzingCompiler
import sbt.internal.langserver.ErrorCodes
import sbt.util.Logger
import java.io.File
import java.net.URI
import java.nio.file._
import scala.annotation.tailrec
import scala.collection.JavaConverters._
import scala.concurrent.{ ExecutionContext, Future }
import scala.concurrent.duration.Duration.Inf
import scala.util.matching.Regex.MatchIterator
import java.nio.file.{ Files, Paths }
import sbt.StandardMain
import scala.concurrent.duration.Duration
import scala.reflect.NameTransformer
import scala.tools.reflect.{ ToolBox, ToolBoxError }
import scala.util.matching.Regex
import sjsonnew.JsonFormat
import sjsonnew.shaded.scalajson.ast.unsafe.JValue
import sjsonnew.support.scalajson.unsafe.{ CompactPrinter, Converter }
import scalacache._
import sbt.io.IO
import sbt.internal.inc.{ Analysis, MixedAnalyzingCompiler }
import sbt.internal.inc.JavaInterfaceUtil._
import sbt.internal.protocol.JsonRpcResponseError
import sbt.internal.protocol.codec.JsonRPCProtocol
import sbt.internal.langserver
import sbt.internal.langserver.{ ErrorCodes, Location, Position, Range, TextDocumentPositionParams }
import sbt.util.Logger
import sbt.Keys._
private[sbt] object Definition {
import java.net.URI
import Keys._
import sbt.internal.inc.Analysis
import sbt.internal.inc.JavaInterfaceUtil._
val AnalysesKey = "lsp.definition.analyses.key"
import sjsonnew.JsonFormat
def send[A: JsonFormat](source: CommandSource, execId: String)(params: A): Unit = {
for {
channel <- StandardMain.exchange.channels.collectFirst {
case c if c.name == source.channelName => c
}
} yield {
} {
channel.publishEvent(params, Option(execId))
}
}
object textProcessor {
private val isIdentifier = {
import scala.tools.reflect.{ ToolBox, ToolBoxError }
lazy val tb =
scala.reflect.runtime.universe
.runtimeMirror(this.getClass.getClassLoader)
@ -58,23 +67,14 @@ private[sbt] object Definition {
private def findInBackticks(line: String, point: Int): Option[String] = {
val (even, odd) = line.zipWithIndex
.collect {
case (char, backtickIndex) if char == '`' =>
backtickIndex
}
.collect { case (char, backtickIndex) if char == '`' => backtickIndex }
.zipWithIndex
.partition { bs =>
val (_, index) = bs
index % 2 == 0
}
.partition { case (_, index) => index % 2 == 0 }
even
.collect {
case (backtickIndex, _) => backtickIndex
}
.collect { case (backtickIndex, _) => backtickIndex }
.zip {
odd.collect {
case (backtickIndex, _) => backtickIndex + 1
}
odd.collect { case (backtickIndex, _) => backtickIndex + 1 }
}
.collectFirst {
case (from, to) if from <= point && point < to => line.slice(from, to)
@ -83,43 +83,43 @@ private[sbt] object Definition {
def identifier(line: String, point: Int): Option[String] = findInBackticks(line, point).orElse {
val whiteSpaceReg = "(\\s|\\.)+".r
val (zero, end) = fold(Seq.empty)(whiteSpaceReg.findAllIn(line))
.collect {
case (white, ind) => (ind, ind + white.length)
}
.fold((0, line.length)) { (z, e) =>
val (from, to) = e
val (left, right) = z
(if (to > left && to <= point) to else left,
if (from < right && from >= point) from else right)
.fold((0, line.length)) {
case ((left, right), (from, to)) =>
val zero = if (to > left && to <= point) to else left
val end = if (from < right && from >= point) from else right
(zero, end)
}
val ranges = for {
from <- zero to point
to <- point to end
} yield (from -> to)
ranges
.sortBy {
case (from, to) => -(to - from)
}
.foldLeft(Seq.empty[String]) { (z, e) =>
val (from, to) = e
val fragment = line.slice(from, to).trim
z match {
case Nil if fragment.nonEmpty && isIdentifier(fragment) => fragment +: z
case h +: _ if h.length < fragment.length && isIdentifier(fragment) =>
Seq(fragment)
case h +: _ if h.length == fragment.length && isIdentifier(fragment) =>
fragment +: z
case z => z
}
.sortBy { case (from, to) => -(to - from) }
.foldLeft(List.empty[String]) {
case (z, (from, to)) =>
val fragment = line.slice(from, to).trim
if (isIdentifier(fragment))
z match {
case Nil if fragment.nonEmpty => fragment :: z
case h :: _ if h.length < fragment.length => fragment :: Nil
case h :: _ if h.length == fragment.length => fragment :: z
case _ => z
} else z
}
.headOption
}
private def asClassObjectIdentifier(sym: String) =
Seq(s".$sym", s".$sym$$", s"$$$sym", s"$$$sym$$")
def potentialClsOrTraitOrObj(sym: String): PartialFunction[String, String] = {
import scala.reflect.NameTransformer
val encodedSym = NameTransformer.encode(sym.toSeq match {
case '`' +: body :+ '`' => body.mkString
case noBackticked => noBackticked.mkString
@ -135,17 +135,17 @@ private[sbt] object Definition {
}
@tailrec
private def fold(z: Seq[(String, Int)])(it: MatchIterator): Seq[(String, Int)] = {
private def fold(z: Seq[(String, Int)])(it: Regex.MatchIterator): Seq[(String, Int)] = {
if (!it.hasNext) z
else fold(z :+ (it.next() -> it.start))(it)
}
def classTraitObjectInLine(sym: String)(line: String): Seq[(String, Int)] = {
import scala.util.matching.Regex.quote
val potentials =
Seq(s"object\\s+${quote(sym)}".r,
s"trait\\s+${quote(sym)} *\\[?".r,
s"class\\s+${quote(sym)} *\\[?".r)
val potentials = Seq(
s"object\\s+${Regex quote sym}".r,
s"trait\\s+${Regex quote sym} *\\[?".r,
s"class\\s+${Regex quote sym} *\\[?".r,
)
potentials
.flatMap { reg =>
fold(Seq.empty)(reg.findAllIn(line))
@ -156,10 +156,7 @@ private[sbt] object Definition {
}
}
import java.io.File
def markPosition(file: File, sym: String): Seq[(File, Long, Long, Long)] = {
import java.nio.file._
import scala.collection.JavaConverters._
val findInLine = classTraitObjectInLine(sym)(_)
Files
.lines(file.toPath)
@ -179,43 +176,49 @@ private[sbt] object Definition {
}
}
import sbt.internal.langserver.TextDocumentPositionParams
import sjsonnew.shaded.scalajson.ast.unsafe.JValue
private def getDefinition(jsonDefinition: JValue): Option[TextDocumentPositionParams] = {
import sbt.internal.langserver.codec.JsonProtocol._
import sjsonnew.support.scalajson.unsafe.Converter
import langserver.codec.JsonProtocol._
Converter.fromJson[TextDocumentPositionParams](jsonDefinition).toOption
}
import java.io.File
object AnalysesAccess {
private[this] val AnalysesKey = "lsp.definition.analyses.key"
private[server] type Analyses = Set[((String, Boolean), Option[Analysis])]
private[server] def getFrom[F[_]](
cache: Cache[Any]
)(implicit mode: Mode[F], flags: Flags): F[Option[Analyses]] =
mode.M.map(cache.get(AnalysesKey))(_ map (_.asInstanceOf[Analyses]))
private[server] def putIn[F[_]](
cache: Cache[Any],
value: Analyses,
ttl: Option[Duration],
)(implicit mode: Mode[F], flags: Flags): F[Any] =
cache.put(AnalysesKey)(value, ttl)
}
private def storeAnalysis(cacheFile: File, useBinary: Boolean): Option[Analysis] =
MixedAnalyzingCompiler
.staticCachedStore(cacheFile, !useBinary)
.get
.toOption
.collect {
case contents =>
contents.getAnalysis
}
.collect {
case a: Analysis => a
}
.map { _.getAnalysis }
.collect { case a: Analysis => a }
import scalacache._
private[sbt] def updateCache[F[_]](cache: Cache[Any])(cacheFile: String, useBinary: Boolean)(
implicit
mode: Mode[F],
flags: Flags): F[Any] = {
mode.M.flatMap(cache.get(AnalysesKey)) {
mode.M.flatMap(AnalysesAccess.getFrom(cache)) {
case None =>
cache.put(AnalysesKey)(Set(cacheFile -> useBinary -> None), Option(Inf))
AnalysesAccess.putIn(cache, Set(cacheFile -> useBinary -> None), Option(Duration.Inf))
case Some(set) =>
cache.put(AnalysesKey)(
set.asInstanceOf[Set[((String, Boolean), Option[Analysis])]].filterNot {
case ((file, _), _) => file == cacheFile
} + (cacheFile -> useBinary -> None),
Option(Inf))
case _ => mode.M.pure(())
val newSet = set
.filterNot { case ((file, _), _) => file == cacheFile }
.+(cacheFile -> useBinary -> None)
AnalysesAccess.putIn(cache, newSet, Option(Duration.Inf))
}
}
@ -231,14 +234,13 @@ private[sbt] object Definition {
private[sbt] def getAnalyses: Future[Seq[Analysis]] = {
import scalacache.modes.scalaFuture._
import scala.concurrent.ExecutionContext.Implicits.global
StandardMain.cache
.get(AnalysesKey)
.collect {
case Some(a) => a.asInstanceOf[Set[((String, Boolean), Option[Analysis])]]
}
AnalysesAccess
.getFrom(StandardMain.cache)
.collect { case Some(a) => a }
.map { caches =>
val (working, uninitialized) = caches.partition { cacheAnalysis =>
cacheAnalysis._2 != None
val (working, uninitialized) = caches.partition {
case (_, Some(_)) => true
case (_, None) => false
}
val addToCache = uninitialized.collect {
case (title @ (file, useBinary), _) if Files.exists(Paths.get(file)) =>
@ -246,7 +248,7 @@ private[sbt] object Definition {
}
val validCaches = working ++ addToCache
if (addToCache.nonEmpty)
StandardMain.cache.put(AnalysesKey)(validCaches, Option(Inf))
AnalysesAccess.putIn(StandardMain.cache, validCaches, Option(Duration.Inf))
validCaches.toSeq.collect {
case (_, Some(analysis)) =>
analysis
@ -254,19 +256,19 @@ private[sbt] object Definition {
}
}
def lspDefinition(jsonDefinition: JValue,
requestId: String,
commandSource: CommandSource,
log: Logger)(implicit ec: ExecutionContext): Future[Unit] = Future {
def lspDefinition(
jsonDefinition: JValue,
requestId: String,
commandSource: CommandSource,
log: Logger,
)(implicit ec: ExecutionContext): Future[Unit] = Future {
val LspDefinitionLogHead = "lsp-definition"
import sjsonnew.support.scalajson.unsafe.CompactPrinter
log.debug(s"$LspDefinitionLogHead json request: ${CompactPrinter(jsonDefinition)}")
val jsonDefinitionString = CompactPrinter(jsonDefinition)
log.debug(s"$LspDefinitionLogHead json request: $jsonDefinitionString")
lazy val analyses = getAnalyses
val definition = getDefinition(jsonDefinition)
definition
getDefinition(jsonDefinition)
.flatMap { definition =>
val uri = URI.create(definition.textDocument.uri)
import java.nio.file._
Files
.lines(Paths.get(uri))
.skip(definition.position.line)
@ -274,11 +276,10 @@ private[sbt] object Definition {
.toOption
.flatMap { line =>
log.debug(s"$LspDefinitionLogHead found line: $line")
textProcessor
.identifier(line, definition.position.character.toInt)
textProcessor.identifier(line, definition.position.character.toInt)
}
}
.map { sym =>
} match {
case Some(sym) =>
log.debug(s"symbol $sym")
analyses
.map { analyses =>
@ -291,40 +292,39 @@ private[sbt] object Definition {
log.debug(s"$LspDefinitionLogHead potentials: $classes")
classes
.flatMap { className =>
analysis.relations.definesClass(className) ++ analysis.relations
.libraryDefinesClass(className)
analysis.relations.definesClass(className) ++
analysis.relations.libraryDefinesClass(className)
}
.flatMap { classFile =>
textProcessor.markPosition(classFile, sym).collect {
case (file, line, from, to) =>
import sbt.internal.langserver.{ Location, Position, Range }
Location(IO.toURI(file).toString,
Range(Position(line, from), Position(line, to)))
Location(
IO.toURI(file).toString,
Range(Position(line, from), Position(line, to)),
)
}
}
}.seq
log.debug(s"$LspDefinitionLogHead locations ${locations}")
import sbt.internal.langserver.codec.JsonProtocol._
log.debug(s"$LspDefinitionLogHead locations $locations")
import langserver.codec.JsonProtocol._
send(commandSource, requestId)(locations.toArray)
}
.recover {
case anyException @ _ =>
log.warn(
s"Problem with processing analyses $anyException for ${CompactPrinter(jsonDefinition)}")
import sbt.internal.protocol.JsonRpcResponseError
import sbt.internal.protocol.codec.JsonRPCProtocol._
send(commandSource, requestId)(
JsonRpcResponseError(ErrorCodes.InternalError,
"Problem with processing analyses.",
None))
case t =>
log.warn(s"Problem with processing analyses $t for $jsonDefinitionString")
val rsp = JsonRpcResponseError(
ErrorCodes.InternalError,
"Problem with processing analyses.",
None,
)
import JsonRPCProtocol._
send(commandSource, requestId)(rsp)
}
}
.orElse {
log.info(s"Symbol not found in definition request ${CompactPrinter(jsonDefinition)}")
import sbt.internal.langserver.Location
import sbt.internal.langserver.codec.JsonProtocol._
()
case None =>
log.info(s"Symbol not found in definition request $jsonDefinitionString")
import langserver.codec.JsonProtocol._
send(commandSource, requestId)(Array.empty[Location])
None
}
}
}
}

View File

@ -39,6 +39,7 @@ private[sbt] trait LanguageServerProtocol extends CommandChannel {
notification.method match {
case "textDocument/didSave" =>
append(Exec(";compile; collectAnalyses", None, Some(CommandSource(name))))
()
case u => log.debug(s"Unhandled notification received: $u")
}
}
@ -69,9 +70,11 @@ private[sbt] trait LanguageServerProtocol extends CommandChannel {
case "textDocument/definition" =>
import scala.concurrent.ExecutionContext.Implicits.global
Definition.lspDefinition(json, request.id, CommandSource(name), log)
()
case "sbt/exec" =>
val param = Converter.fromJson[SbtExecParams](json).get
append(Exec(param.commandLine, Some(request.id), Some(CommandSource(name))))
()
case "sbt/setting" => {
import sbt.protocol.codec.JsonProtocol._
val param = Converter.fromJson[Q](json).get

View File

@ -331,6 +331,7 @@ final class NetworkChannel(val name: String,
if (initialized) {
append(
Exec(cmd.commandLine, cmd.execId orElse Some(Exec.newExecId), Some(CommandSource(name))))
()
} else {
log.warn(s"ignoring command $cmd before initialization")
}

View File

@ -9,8 +9,6 @@ package sbt
package internal
package server
import sbt.internal.inc.Analysis
class DefinitionTest extends org.specs2.mutable.Specification {
import Definition.textProcessor
@ -126,9 +124,12 @@ class DefinitionTest extends org.specs2.mutable.Specification {
textProcessor.classTraitObjectInLine("B")("trait A ") must be empty
}
}
"definition" should {
import scalacache.caffeine._
import scalacache.modes.sync._
"cache data in cache" in {
val cache = CaffeineCache[Any]
val cacheFile = "Test.scala"
@ -136,12 +137,11 @@ class DefinitionTest extends org.specs2.mutable.Specification {
Definition.updateCache(cache)(cacheFile, useBinary)
val actual = cache.get(Definition.AnalysesKey)
val actual = Definition.AnalysesAccess.getFrom(cache)
actual.collect {
case s => s.asInstanceOf[Set[((String, Boolean), Option[Analysis])]]
}.get should contain("Test.scala" -> true -> None)
actual.get should contain("Test.scala" -> true -> None)
}
"replace cache data in cache" in {
val cache = CaffeineCache[Any]
val cacheFile = "Test.scala"
@ -151,12 +151,11 @@ class DefinitionTest extends org.specs2.mutable.Specification {
Definition.updateCache(cache)(cacheFile, falseUseBinary)
Definition.updateCache(cache)(cacheFile, useBinary)
val actual = cache.get(Definition.AnalysesKey)
val actual = Definition.AnalysesAccess.getFrom(cache)
actual.collect {
case s => s.asInstanceOf[Set[((String, Boolean), Option[Analysis])]]
}.get should contain("Test.scala" -> true -> None)
actual.get should contain("Test.scala" -> true -> None)
}
"cache more data in cache" in {
val cache = CaffeineCache[Any]
val cacheFile = "Test.scala"
@ -167,11 +166,9 @@ class DefinitionTest extends org.specs2.mutable.Specification {
Definition.updateCache(cache)(otherCacheFile, otherUseBinary)
Definition.updateCache(cache)(cacheFile, useBinary)
val actual = cache.get(Definition.AnalysesKey)
val actual = Definition.AnalysesAccess.getFrom(cache)
actual.collect {
case s => s.asInstanceOf[Set[((String, Boolean), Option[Analysis])]]
}.get should contain("Test.scala" -> true -> None, "OtherTest.scala" -> false -> None)
actual.get should contain("Test.scala" -> true -> None, "OtherTest.scala" -> false -> None)
}
}
}

View File

@ -27,7 +27,8 @@ trait JsonRpcResponseMessageFormats {
val id = try {
unbuilder.readField[Option[String]]("id")
} catch {
case _: DeserializationException => unbuilder.readField[Option[Long]]("id") map { _.toString }
case _: DeserializationException =>
unbuilder.readField[Option[Long]]("id") map { _.toString }
}
val result = unbuilder.lookupField("result") map {

View File

@ -90,7 +90,7 @@ class Run(instance: ScalaInstance, trapExit: Boolean, nativeTmp: File) extends S
val currentThread = Thread.currentThread
val oldLoader = Thread.currentThread.getContextClassLoader
currentThread.setContextClassLoader(loader)
try { main.invoke(null, options.toArray[String]) } finally {
try { main.invoke(null, options.toArray[String]); () } finally {
currentThread.setContextClassLoader(oldLoader)
}
}

View File

@ -152,7 +152,7 @@ private final class TrapExit(delegateManager: SecurityManager) extends SecurityM
def runManaged(f: Supplier[Unit], xlog: xsbti.Logger): Int = {
val _ = running.incrementAndGet()
try runManaged0(f, xlog)
finally running.decrementAndGet()
finally { running.decrementAndGet(); () }
}
private[this] def runManaged0(f: Supplier[Unit], xlog: xsbti.Logger): Int = {
val log: Logger = xlog
@ -264,6 +264,7 @@ private final class TrapExit(delegateManager: SecurityManager) extends SecurityM
val old = groups.putIfAbsent(groupID, new WeakReference(g))
if (old.isEmpty) { // wasn't registered
threadToApp.put(groupID, this)
()
}
}
@ -299,6 +300,7 @@ private final class TrapExit(delegateManager: SecurityManager) extends SecurityM
threadToApp.remove(id)
threads.remove(id)
groups.remove(id)
()
}
/** Final cleanup for this application after it has terminated. */

View File

@ -65,6 +65,7 @@ final class SbtHandler(directory: File,
try {
send("exit", server)
process.exitValue()
()
} catch {
case _: IOException => process.destroy()
}

View File

@ -23,13 +23,13 @@ import java.util.concurrent.{
object CompletionService {
def apply[A, T](poolSize: Int): (CompletionService[A, T], () => Unit) = {
val pool = Executors.newFixedThreadPool(poolSize)
(apply[A, T](pool), () => pool.shutdownNow())
(apply[A, T](pool), () => { pool.shutdownNow(); () })
}
def apply[A, T](x: Executor): CompletionService[A, T] =
apply(new ExecutorCompletionService[T](x))
def apply[A, T](completion: JCompletionService[T]): CompletionService[A, T] =
new CompletionService[A, T] {
def submit(node: A, work: () => T) = CompletionService.submit(work, completion)
def submit(node: A, work: () => T) = { CompletionService.submit(work, completion); () }
def take() = completion.take().get()
}
def submit[T](work: () => T, completion: JCompletionService[T]): () => T = {

View File

@ -128,7 +128,7 @@ object ConcurrentRestrictions {
def completionService[A, R](tags: ConcurrentRestrictions[A],
warn: String => Unit): (CompletionService[A, R], () => Unit) = {
val pool = Executors.newCachedThreadPool()
(completionService[A, R](pool, tags, warn), () => pool.shutdownNow())
(completionService[A, R](pool, tags, warn), () => { pool.shutdownNow(); () })
}
/**
@ -167,6 +167,7 @@ object ConcurrentRestrictions {
if (running == 0) errorAddingToIdle()
pending.add(new Enqueue(node, work))
}
()
}
private[this] def submitValid(node: A, work: () => R) = {
running += 1
@ -192,6 +193,7 @@ object ConcurrentRestrictions {
if (!tried.isEmpty) {
if (running == 0) errorAddingToIdle()
pending.addAll(tried)
()
}
} else {
val next = pending.remove()

View File

@ -127,7 +127,7 @@ class JUnitXmlTestsListener(val outputDir: String) extends TestsListener {
val testSuite = new DynamicVariable(null: TestSuite)
/**Creates the output Dir*/
override def doInit() = { targetDir.mkdirs() }
override def doInit() = { targetDir.mkdirs(); () }
/**
* Starts a new, initially empty Suite with the given name.

View File

@ -18,7 +18,7 @@ private[sbt] class TestStatusReporter(f: File) extends TestsListener {
private lazy val succeeded = TestStatus.read(f)
def doInit = ()
def startGroup(name: String): Unit = { succeeded remove name }
def startGroup(name: String): Unit = { succeeded remove name; () }
def testEvent(event: TestEvent): Unit = ()
def endGroup(name: String, t: Throwable): Unit = ()
def endGroup(name: String, result: TestResult): Unit = {