Use java caffeine library rather than scalacache

sbt depends on scalacache (which hasn't been updated in about a year)
and we really don't need the functionality provided by scalacache. In
fact, the java api is somewhat easier to work with for our use case. The
motivation is that scalacache uses slf4j for logging which meant that it
was implicitly loading log4j. This caused some noisy logs during
shutdown when the previously unused cache was initialized just to be
cleaned up.

This commit also upgrades caffeine and moving forward we can always
upgrade caffeine (and potentially shade it) without any conflict with
the scalacache version.
This commit is contained in:
Ethan Atkins 2020-08-07 12:49:39 -07:00
parent 9beecf98e0
commit 525cff7fd7
5 changed files with 64 additions and 66 deletions

View File

@ -873,7 +873,7 @@ lazy val mainProj = (project in file("main"))
sys.error(s"PluginCross.scala does not match up with the scalaVersion $sv") sys.error(s"PluginCross.scala does not match up with the scalaVersion $sv")
}, },
libraryDependencies ++= libraryDependencies ++=
(Seq(scalaXml, launcherInterface, scalaCacheCaffeine, lmCoursierShaded) ++ log4jModules), (Seq(scalaXml, launcherInterface, caffeine, lmCoursierShaded) ++ log4jModules),
libraryDependencies ++= (scalaVersion.value match { libraryDependencies ++= (scalaVersion.value match {
case v if v.startsWith("2.12.") => List(compilerPlugin(silencerPlugin)) case v if v.startsWith("2.12.") => List(compilerPlugin(silencerPlugin))
case _ => List() case _ => List()
@ -985,6 +985,7 @@ lazy val mainProj = (project in file("main"))
exclude[DirectMissingMethodProblem]("sbt.Classpaths.interDependencies"), exclude[DirectMissingMethodProblem]("sbt.Classpaths.interDependencies"),
exclude[DirectMissingMethodProblem]("sbt.Classpaths.productsTask"), exclude[DirectMissingMethodProblem]("sbt.Classpaths.productsTask"),
exclude[DirectMissingMethodProblem]("sbt.Classpaths.jarProductsTask"), exclude[DirectMissingMethodProblem]("sbt.Classpaths.jarProductsTask"),
exclude[DirectMissingMethodProblem]("sbt.StandardMain.cache"),
) )
) )
.configure( .configure(

View File

@ -161,8 +161,6 @@ private[sbt] object ConsoleMain {
object StandardMain { object StandardMain {
private[sbt] lazy val exchange = new CommandExchange() private[sbt] lazy val exchange = new CommandExchange()
import scalacache.caffeine._
private[sbt] lazy val cache: scalacache.Cache[Any] = CaffeineCache[Any]
// The access to the pool should be thread safe because lazy val instantiation is thread safe // The access to the pool should be thread safe because lazy val instantiation is thread safe
// and pool is only referenced directly in closeRunnable after the executionContext is sure // and pool is only referenced directly in closeRunnable after the executionContext is sure
// to have been instantiated // to have been instantiated
@ -174,8 +172,6 @@ object StandardMain {
}) })
private[this] val closeRunnable = () => { private[this] val closeRunnable = () => {
cache.close()(scalacache.modes.sync.mode)
cache.close()(scalacache.modes.scalaFuture.mode(executionContext))
exchange.shutdown() exchange.shutdown()
pool.foreach(_.shutdownNow()) pool.foreach(_.shutdownNow())
} }

View File

@ -15,7 +15,6 @@ import java.nio.file._
import scala.annotation.tailrec import scala.annotation.tailrec
import scala.collection.JavaConverters._ import scala.collection.JavaConverters._
import scala.concurrent.{ ExecutionContext, Future } import scala.concurrent.{ ExecutionContext, Future }
import scala.concurrent.duration.Duration
import scala.reflect.NameTransformer import scala.reflect.NameTransformer
import scala.tools.reflect.{ ToolBox, ToolBoxError } import scala.tools.reflect.{ ToolBox, ToolBoxError }
import scala.util.matching.Regex import scala.util.matching.Regex
@ -24,8 +23,6 @@ import sjsonnew.JsonFormat
import sjsonnew.shaded.scalajson.ast.unsafe.JValue import sjsonnew.shaded.scalajson.ast.unsafe.JValue
import sjsonnew.support.scalajson.unsafe.{ CompactPrinter, Converter } import sjsonnew.support.scalajson.unsafe.{ CompactPrinter, Converter }
import scalacache._
import sbt.internal.inc.{ Analysis, MixedAnalyzingCompiler } import sbt.internal.inc.{ Analysis, MixedAnalyzingCompiler }
import sbt.internal.inc.JavaInterfaceUtil._ import sbt.internal.inc.JavaInterfaceUtil._
import sbt.internal.protocol.JsonRpcResponseError import sbt.internal.protocol.JsonRpcResponseError
@ -35,6 +32,9 @@ import sbt.internal.langserver.{ ErrorCodes, Location, Position, Range, TextDocu
import sbt.util.Logger import sbt.util.Logger
import sbt.Keys._ import sbt.Keys._
import xsbti.{ FileConverter, VirtualFileRef } import xsbti.{ FileConverter, VirtualFileRef }
import com.github.benmanes.caffeine.cache.Cache
import scala.concurrent.Promise
import com.github.benmanes.caffeine.cache.Caffeine
private[sbt] object Definition { private[sbt] object Definition {
def send[A: JsonFormat](source: CommandSource, execId: String)(params: A): Unit = { def send[A: JsonFormat](source: CommandSource, execId: String)(params: A): Unit = {
@ -181,24 +181,9 @@ private[sbt] object Definition {
Converter.fromJson[TextDocumentPositionParams](jsonDefinition).toOption Converter.fromJson[TextDocumentPositionParams](jsonDefinition).toOption
} }
object AnalysesAccess {
private[this] val AnalysesKey = "lsp.definition.analyses.key" private[this] val AnalysesKey = "lsp.definition.analyses.key"
private[server] type Analyses = Set[((String, Boolean), Option[Analysis])] private[server] type Analyses = Set[((String, Boolean), Option[Analysis])]
private[server] def getFrom[F[_]](
cache: Cache[Any]
)(implicit mode: Mode[F], flags: Flags): F[Option[Analyses]] =
mode.M.map(cache.get(AnalysesKey))(_ map (_.asInstanceOf[Analyses]))
private[server] def putIn[F[_]](
cache: Cache[Any],
value: Analyses,
ttl: Option[Duration],
)(implicit mode: Mode[F], flags: Flags): F[Any] =
cache.put(AnalysesKey)(value, ttl)
}
private def storeAnalysis(cacheFile: Path, useBinary: Boolean): Option[Analysis] = private def storeAnalysis(cacheFile: Path, useBinary: Boolean): Option[Analysis] =
MixedAnalyzingCompiler MixedAnalyzingCompiler
.staticCachedStore(cacheFile, !useBinary) .staticCachedStore(cacheFile, !useBinary)
@ -207,19 +192,29 @@ private[sbt] object Definition {
.map { _.getAnalysis } .map { _.getAnalysis }
.collect { case a: Analysis => a } .collect { case a: Analysis => a }
private[sbt] def updateCache[F[_]](cache: Cache[Any])(cacheFile: String, useBinary: Boolean)( private[sbt] def updateCache(
implicit cache: Cache[String, Analyses]
mode: Mode[F], )(cacheFile: String, useBinary: Boolean): Any = {
flags: Flags cache.get(AnalysesKey, k => Set(cacheFile -> useBinary -> None)) match {
): F[Any] = { case null => new AnyRef
mode.M.flatMap(AnalysesAccess.getFrom(cache)) { case set =>
case None =>
AnalysesAccess.putIn(cache, Set(cacheFile -> useBinary -> None), Option(Duration.Inf))
case Some(set) =>
val newSet = set val newSet = set
.filterNot { case ((file, _), _) => file == cacheFile } .filterNot { case ((file, _), _) => file == cacheFile }
.+(cacheFile -> useBinary -> None) .+(cacheFile -> useBinary -> None)
AnalysesAccess.putIn(cache, newSet, Option(Duration.Inf)) cache.put(AnalysesKey, newSet)
}
}
private[sbt] object AnalysesAccess {
private[sbt] lazy val cache: Cache[String, Analyses] = Caffeine.newBuilder.build()
ShutdownHooks.add(() => {
cache.invalidateAll()
cache.cleanUp()
})
private[sbt] def getFrom(cache: Cache[String, Analyses]): Option[Analyses] = {
cache.getIfPresent(AnalysesKey) match {
case null => None
case a => Some(a)
}
} }
} }
@ -228,17 +223,20 @@ private[sbt] object Definition {
val useBinary = enableBinaryCompileAnalysis.value val useBinary = enableBinaryCompileAnalysis.value
val s = state.value val s = state.value
s.log.debug(s"analysis location ${cacheFile -> useBinary}") s.log.debug(s"analysis location ${cacheFile -> useBinary}")
import scalacache.modes.sync._ updateCache(AnalysesAccess.cache)(cacheFile, useBinary)
updateCache(StandardMain.cache)(cacheFile, useBinary)
} }
private[sbt] def getAnalyses: Future[Seq[Analysis]] = { private[sbt] def getAnalyses: Future[Seq[Analysis]] = {
import scalacache.modes.scalaFuture._ val result = Promise[Seq[Analysis]]
implicit val executionContext: ExecutionContext = StandardMain.executionContext
AnalysesAccess new Thread("sbt-get-analysis-thread") {
.getFrom(StandardMain.cache) setDaemon(true)
.collect { case Some(a) => a } start()
.map { caches => override def run(): Unit =
try {
AnalysesAccess.cache.getIfPresent(AnalysesKey) match {
case null => result.success(Nil)
case caches =>
val (working, uninitialized) = caches.partition { val (working, uninitialized) = caches.partition {
case (_, Some(_)) => true case (_, Some(_)) => true
case (_, None) => false case (_, None) => false
@ -248,13 +246,17 @@ private[sbt] object Definition {
(title, storeAnalysis(Paths.get(file), !useBinary)) (title, storeAnalysis(Paths.get(file), !useBinary))
} }
val validCaches = working ++ addToCache val validCaches = working ++ addToCache
if (addToCache.nonEmpty) if (addToCache.nonEmpty) {
AnalysesAccess.putIn(StandardMain.cache, validCaches, Option(Duration.Inf)) AnalysesAccess.cache.put(AnalysesKey, validCaches)
validCaches.toSeq.collect { }
result.success(validCaches.toSeq.collect {
case (_, Some(analysis)) => case (_, Some(analysis)) =>
analysis analysis
})
} }
} catch { case scala.util.control.NonFatal(e) => result.failure(e) }
} }
result.future
} }
def lspDefinition( def lspDefinition(

View File

@ -9,6 +9,8 @@ package sbt
package internal package internal
package server package server
import com.github.benmanes.caffeine.cache.Caffeine
class DefinitionTest extends org.specs2.mutable.Specification { class DefinitionTest extends org.specs2.mutable.Specification {
import Definition.textProcessor import Definition.textProcessor
@ -132,11 +134,8 @@ class DefinitionTest extends org.specs2.mutable.Specification {
"definition" should { "definition" should {
import scalacache.caffeine._
import scalacache.modes.sync._
"cache data in cache" in { "cache data in cache" in {
val cache = CaffeineCache[Any] val cache = Caffeine.newBuilder().build[String, Definition.Analyses]()
val cacheFile = "Test.scala" val cacheFile = "Test.scala"
val useBinary = true val useBinary = true
@ -148,7 +147,7 @@ class DefinitionTest extends org.specs2.mutable.Specification {
} }
"replace cache data in cache" in { "replace cache data in cache" in {
val cache = CaffeineCache[Any] val cache = Caffeine.newBuilder().build[String, Definition.Analyses]()
val cacheFile = "Test.scala" val cacheFile = "Test.scala"
val useBinary = true val useBinary = true
val falseUseBinary = false val falseUseBinary = false
@ -162,7 +161,7 @@ class DefinitionTest extends org.specs2.mutable.Specification {
} }
"cache more data in cache" in { "cache more data in cache" in {
val cache = CaffeineCache[Any] val cache = Caffeine.newBuilder().build[String, Definition.Analyses]()
val cacheFile = "Test.scala" val cacheFile = "Test.scala"
val useBinary = true val useBinary = true
val otherCacheFile = "OtherTest.scala" val otherCacheFile = "OtherTest.scala"

View File

@ -105,7 +105,7 @@ object Dependencies {
val log4jSlf4jImpl = log4jModule("log4j-slf4j-impl") val log4jSlf4jImpl = log4jModule("log4j-slf4j-impl")
val log4jModules = Vector(log4jApi, log4jCore, log4jSlf4jImpl) val log4jModules = Vector(log4jApi, log4jCore, log4jSlf4jImpl)
val scalaCacheCaffeine = "com.github.cb372" %% "scalacache-caffeine" % "0.20.0" val caffeine = "com.github.ben-manes.caffeine" % "caffeine" % "2.8.5"
val hedgehog = "hedgehog" %% "hedgehog-sbt" % "0.1.0" val hedgehog = "hedgehog" %% "hedgehog-sbt" % "0.1.0"
val disruptor = "com.lmax" % "disruptor" % "3.4.2" val disruptor = "com.lmax" % "disruptor" % "3.4.2"