integrate with VirtualFile changes

Ref https://github.com/sbt/zinc/pull/712
This commit is contained in:
Eugene Yokota 2020-02-03 13:18:40 -05:00
parent 04a0b10ac4
commit 3ce4d22b84
27 changed files with 413 additions and 276 deletions

View File

@ -8,7 +8,7 @@
package sbt
import java.io.File
import sbt.internal.inc.AnalyzingCompiler
import sbt.internal.inc.{ AnalyzingCompiler, PlainVirtualFile }
import sbt.internal.util.JLine
import sbt.util.Logger
import xsbti.compile.{ Inputs, Compilers }
@ -45,7 +45,12 @@ final class Console(compiler: AnalyzingCompiler) {
cleanupCommands: String
)(loader: Option[ClassLoader], bindings: Seq[(String, Any)])(implicit log: Logger): Try[Unit] = {
def console0() =
compiler.console(classpath, options, initialCommands, cleanupCommands, log)(loader, bindings)
compiler.console(classpath map { x =>
PlainVirtualFile(x.toPath)
}, options, initialCommands, cleanupCommands, log)(
loader,
bindings
)
JLine.usingTerminal { t =>
t.init
Run.executeTrapExit(console0, log)

View File

@ -8,15 +8,13 @@
package sbt
import java.io.File
import sbt.internal.inc.AnalyzingCompiler
import sbt.internal.inc.{ AnalyzingCompiler, PlainVirtualFile }
import sbt.internal.util.ManagedLogger
import sbt.util.CacheStoreFactory
import sbt.util.Logger
import xsbti.Reporter
import xsbti.compile.JavaTools
import sbt.util.Logger
import sbt.internal.util.ManagedLogger
object Doc {
import RawCompileLike._
@ -36,7 +34,23 @@ object Doc {
cached(
cacheStoreFactory,
fileInputOptions,
prepare(label + " Scala API documentation", compiler.doc)
prepare(
label + " Scala API documentation",
(sources, classpath, outputDirectory, options, maxErrors, log) => {
compiler.doc(
sources map { x =>
PlainVirtualFile(x.toPath)
},
classpath map { x =>
PlainVirtualFile(x.toPath)
},
outputDirectory.toPath,
options,
maxErrors,
log
)
}
)
)
@deprecated("Going away", "1.1.1")

View File

@ -9,50 +9,22 @@ package sbt
import java.io.File
import sbt.internal.inc.Relations
import sbt.internal.util.Relation
import sbt.io.IO
object DotGraph {
private def fToString(roots: Iterable[File]): (File => String) =
(x: File) => sourceToString(roots, x)
def sources(relations: Relations, outputDirectory: File, sourceRoots: Iterable[File]): Unit = {
val toString = fToString(sourceRoots)
apply(relations, outputDirectory, toString, toString)
}
def packages(relations: Relations, outputDirectory: File, sourceRoots: Iterable[File]): Unit = {
val packageOnly = (path: String) => {
val last = path.lastIndexOf(File.separatorChar.toInt)
val packagePath = (if (last > 0) path.substring(0, last) else path).trim
if (packagePath.isEmpty) "" else packagePath.replace(File.separatorChar, '.')
}
val toString = packageOnly compose fToString(sourceRoots)
apply(relations, outputDirectory, toString, toString)
}
@deprecated("not used", "1.4.0")
def sources(relations: Relations, outputDirectory: File, sourceRoots: Iterable[File]): Unit = ???
@deprecated("not used", "1.4.0")
def packages(relations: Relations, outputDirectory: File, sourceRoots: Iterable[File]): Unit = ???
@deprecated("not used", "1.4.0")
def apply(
relations: Relations,
outputDir: File,
sourceToString: File => String,
externalToString: File => String
): Unit = {
def file(name: String) = new File(outputDir, name)
IO.createDirectory(outputDir)
generateGraph(
file("int-class-deps"),
"dependencies",
relations.internalClassDep,
identity[String],
identity[String]
)
generateGraph(
file("binary-dependencies"),
"externalDependencies",
relations.libraryDep,
externalToString,
sourceToString
)
}
): Unit = ???
def generateGraph[K, V](
file: File,

View File

@ -9,22 +9,19 @@ package sbt
import scala.annotation.tailrec
import java.io.File
import sbt.internal.inc.{ RawCompiler, ScalaInstance }
import sbt.io.syntax._
import sbt.io.IO
import sbt.internal.inc.{ PlainVirtualFile, RawCompiler, ScalaInstance }
import sbt.internal.util.Types.:+:
import sbt.internal.util.HListFormats._
import sbt.internal.util.HNil
import sbt.internal.util.HListFormats._
import sbt.util.CacheImplicits._
import sbt.util.Tracked.inputChanged
import sbt.util.{ CacheStoreFactory, FilesInfo, HashFileInfo, ModifiedFileInfo, PlainFileInfo }
import sbt.internal.util.HNil
import sbt.internal.util.HListFormats._
import sbt.util.FileInfo.{ exists, hash, lastModified }
import xsbti.compile.ClasspathOptions
import sbt.internal.util.ManagedLogger
import xsbti.compile.ClasspathOptions
object RawCompileLike {
type Gen = (Seq[File], Seq[File], File, Seq[String], Int, ManagedLogger) => Unit
@ -91,7 +88,11 @@ object RawCompileLike {
def rawCompile(instance: ScalaInstance, cpOptions: ClasspathOptions): Gen =
(sources, classpath, outputDirectory, options, _, log) => {
val compiler = new RawCompiler(instance, cpOptions, log)
compiler(sources, classpath, outputDirectory, options)
compiler(sources map { x =>
PlainVirtualFile(x.toPath)
}, classpath map { x =>
PlainVirtualFile(x.toPath)
}, outputDirectory.toPath, options)
}
def compile(

View File

@ -7,6 +7,7 @@
package sbt
import java.nio.file.Paths
import sbt.util.Level
import sbt.internal.util.{ AttributeKey, FullReader }
import sbt.internal.util.complete.{
@ -20,7 +21,7 @@ import sbt.internal.util.complete.{
}
import sbt.internal.util.Types.{ const, idFun }
import sbt.internal.util.Util.{ AnyOps, nil, nilSeq, none }
import sbt.internal.inc.classpath.ClasspathUtilities.toLoader
import sbt.internal.inc.classpath.ClasspathUtil.toLoader
import sbt.internal.inc.ModuleUtilities
import sbt.internal.client.NetworkClient
import DefaultParsers._
@ -318,7 +319,7 @@ object BasicCommands {
def fromCpStr = if (cp.isEmpty) "" else s" from $cpStr"
state.log info s"Applying State transformations $argsStr$fromCpStr"
val loader =
if (cp.isEmpty) parentLoader else toLoader(cp.map(f => new File(f)), parentLoader)
if (cp.isEmpty) parentLoader else toLoader(cp.map(f => Paths.get(f)), parentLoader)
val loaded =
args.map(arg => ModuleUtilities.getObject(arg, loader).asInstanceOf[State => State])
loaded.foldLeft(state)((s, obj) => obj(s))

View File

@ -9,6 +9,7 @@ package sbt
import java.io.{ File, PrintWriter }
import java.net.{ URI, URL, URLClassLoader }
import java.nio.file.{ Path => NioPath, Paths }
import java.util.Optional
import java.util.concurrent.TimeUnit
@ -32,8 +33,8 @@ import sbt.internal.CommandStrings.ExportStream
import sbt.internal._
import sbt.internal.classpath.AlternativeZincUtil
import sbt.internal.inc.JavaInterfaceUtil._
import sbt.internal.inc.classpath.{ ClassLoaderCache, ClasspathFilter }
import sbt.internal.inc.{ ZincLmUtil, ZincUtil }
import sbt.internal.inc.classpath.{ ClassLoaderCache, ClasspathFilter, ClasspathUtil }
import sbt.internal.inc.{ MappedFileConverter, PlainVirtualFile, Stamps, ZincLmUtil, ZincUtil }
import sbt.internal.io.{ Source, WatchState }
import sbt.internal.librarymanagement.mavenint.{
PomExtraDependencyAttributes,
@ -81,8 +82,6 @@ import sbt.util.InterfaceUtil.{ toJavaFunction => f1 }
import sbt.util._
import sjsonnew._
import sjsonnew.support.scalajson.unsafe.Converter
import xsbti.CrossValue
import xsbti.compile.{ AnalysisContents, IncOptions, IncToolOptionsUtil }
import scala.collection.immutable.ListMap
import scala.concurrent.duration.FiniteDuration
@ -100,7 +99,9 @@ import sbt.internal.inc.{
MixedAnalyzingCompiler,
ScalaInstance
}
import xsbti.{ CrossValue, VirtualFile, VirtualFileRef }
import xsbti.compile.{
AnalysisContents,
ClassFileManagerType,
ClasspathOptionsUtil,
CompileAnalysis,
@ -110,6 +111,8 @@ import xsbti.compile.{
CompilerCache,
Compilers,
DefinesClass,
IncOptions,
IncToolOptionsUtil,
Inputs,
MiniSetup,
PerClasspathEntryLookup,
@ -173,7 +176,33 @@ object Defaults extends BuildCommon {
apiMappings := Map.empty,
autoScalaLibrary :== true,
managedScalaInstance :== true,
classpathEntryDefinesClass :== FileValueCache(Locate.definesClass _).get,
classpathEntryDefinesClass := {
val converter = fileConverter.value
val f = FileValueCache({ x: NioPath =>
Locate.definesClass(converter.toVirtualFile(x))
}).get;
{ (x: File) =>
f(x.toPath)
}
},
allowMachinePath :== true,
rootPaths := {
val app = appConfiguration.value
val base = app.baseDirectory
val boot = app.provider.scalaProvider.launcher.bootDirectory
val ih = app.provider.scalaProvider.launcher.ivyHome
val coursierCache = csrCacheDirectory.value
val javaHome = Paths.get(sys.props("java.home"))
Vector(base.toPath, boot.toPath, coursierCache.toPath, ih.toPath, javaHome)
},
fileConverter := MappedFileConverter(rootPaths.value, allowMachinePath.value),
fullServerHandlers := {
(Vector(LanguageServerProtocol.handler(fileConverter.value))
++ serverHandlers.value
++ Vector(ServerHandler.fallback))
},
uncachedStamper := Stamps.uncachedStamps(fileConverter.value),
reusableStamper := Stamps.timeWrapLibraryStamps(uncachedStamper.value, fileConverter.value),
traceLevel in run :== 0,
traceLevel in runMain :== 0,
traceLevel in bgRun :== 0,
@ -348,11 +377,7 @@ object Defaults extends BuildCommon {
else Set()
},
serverHandlers :== Nil,
fullServerHandlers := {
(Vector(LanguageServerProtocol.handler)
++ serverHandlers.value
++ Vector(ServerHandler.fallback))
},
fullServerHandlers := Nil,
insideCI :== sys.env.contains("BUILD_NUMBER") ||
sys.env.contains("CI") || SysProp.ci,
// watch related settings
@ -576,7 +601,7 @@ object Defaults extends BuildCommon {
val compilers = ZincUtil.compilers(
instance = scalaInstance.value,
classpathOptions = classpathOptions.value,
javaHome = javaHome.value,
javaHome = javaHome.value.map(_.toPath),
scalac
)
val classLoaderCache = state.value.classLoaderCache
@ -600,9 +625,12 @@ object Defaults extends BuildCommon {
) ++ configGlobal ++ defaultCompileSettings ++ compileAnalysisSettings ++ Seq(
compileOutputs := {
import scala.collection.JavaConverters._
val c = fileConverter.value
val classFiles =
manipulateBytecode.value.analysis.readStamps.getAllProductStamps.keySet.asScala
classFiles.toSeq.map(_.toPath) :+ compileAnalysisFileTask.value.toPath
(classFiles.toSeq map { x =>
c.toPath(x)
}) :+ compileAnalysisFileTask.value.toPath
},
compileOutputs := compileOutputs.triggeredBy(compile).value,
clean := (compileOutputs / clean).value,
@ -1656,6 +1684,7 @@ object Defaults extends BuildCommon {
val label = nameForSrc(configuration.value.name)
val fiOpts = fileInputOptions.value
val reporter = (compilerReporter in compile).value
val converter = fileConverter.value
(hasScala, hasJava) match {
case (true, _) =>
val options = sOpts ++ Opts.doc.externalAPI(xapis)
@ -1667,9 +1696,14 @@ object Defaults extends BuildCommon {
val javadoc =
sbt.inc.Doc.cachedJavadoc(label, s.cacheStoreFactory sub "java", cs.javaTools)
javadoc.run(
srcs.toList,
cp,
out,
srcs.toList map { x =>
converter.toVirtualFile(x.toPath)
},
cp.toList map { x =>
converter.toVirtualFile(x.toPath)
},
converter,
out.toPath,
javacOptions.value.toList,
IncToolOptionsUtil.defaultIncToolOptions(),
s.log,
@ -1721,8 +1755,8 @@ object Defaults extends BuildCommon {
val s = streams.value
val cpFiles = data((classpath in task).value)
val fullcp = (cpFiles ++ si.allJars).distinct
val loader = sbt.internal.inc.classpath.ClasspathUtilities
.makeLoader(fullcp, si, IO.createUniqueDirectory((taskTemporaryDirectory in task).value))
val tempDir = IO.createUniqueDirectory((taskTemporaryDirectory in task).value).toPath
val loader = ClasspathUtil.makeLoader(fullcp.map(_.toPath), si, tempDir)
val compiler =
(compilers in task).value.scalac match {
case ac: AnalyzingCompiler => ac.onArgs(exported(s, "scala"))
@ -1746,11 +1780,12 @@ object Defaults extends BuildCommon {
def compileTask: Initialize[Task[CompileAnalysis]] = Def.task {
val setup: Setup = compileIncSetup.value
val useBinary: Boolean = enableBinaryCompileAnalysis.value
val c = fileConverter.value
// TODO - expose bytecode manipulation phase.
val analysisResult: CompileResult = manipulateBytecode.value
if (analysisResult.hasModified) {
val store =
MixedAnalyzingCompiler.staticCachedStore(setup.cacheFile, !useBinary)
MixedAnalyzingCompiler.staticCachedStore(setup.cacheFile.toPath, !useBinary)
val contents = AnalysisContents.create(analysisResult.analysis(), analysisResult.setup())
store.set(contents)
}
@ -1758,8 +1793,8 @@ object Defaults extends BuildCommon {
val analysis = analysisResult.analysis
import scala.collection.JavaConverters._
analysis.readStamps.getAllProductStamps.asScala.foreach {
case (f, s) =>
map.put(f.toPath, sbt.nio.FileStamp.LastModified(s.getLastModified.orElse(-1L)))
case (f: VirtualFileRef, s) =>
map.put(c.toPath(f), sbt.nio.FileStamp.fromZincStamp(s))
}
analysis
}
@ -1798,45 +1833,63 @@ object Defaults extends BuildCommon {
private def compileAnalysisFileTask: Def.Initialize[Task[File]] =
Def.task(streams.value.cacheDirectory / compileAnalysisFilename.value)
def compileIncSetupTask = Def.task {
val converter = fileConverter.value
val lookup = new PerClasspathEntryLookup {
private val cachedAnalysisMap = analysisMap(dependencyClasspath.value)
private val cachedPerEntryDefinesClassLookup = Keys.classpathEntryDefinesClass.value
private val cachedAnalysisMap: File => Option[CompileAnalysis] =
analysisMap(dependencyClasspath.value)
private val cachedPerEntryDefinesClassLookup: File => DefinesClass =
Keys.classpathEntryDefinesClass.value
override def analysis(classpathEntry: File): Optional[CompileAnalysis] =
cachedAnalysisMap(classpathEntry).toOptional
override def definesClass(classpathEntry: File): DefinesClass =
cachedPerEntryDefinesClassLookup(classpathEntry)
override def analysis(classpathEntry: VirtualFile): Optional[CompileAnalysis] =
cachedAnalysisMap(converter.toPath(classpathEntry).toFile).toOptional
override def definesClass(classpathEntry: VirtualFile): DefinesClass =
cachedPerEntryDefinesClassLookup(converter.toPath(classpathEntry).toFile)
}
Setup.of(
lookup,
(skip in compile).value,
// TODO - this is kind of a bad way to grab the cache directory for streams...
compileAnalysisFileTask.value,
compileAnalysisFileTask.value.toPath,
compilerCache.value,
incOptions.value,
(compilerReporter in compile).value,
None.toOptional,
// TODO - task / setting for compile progress
None.toOptional: Optional[xsbti.compile.CompileProgress],
// TODO - task / setting for extra,
Array.empty
Array.empty: Array[xsbti.T2[String, String]],
)
}
def compileInputsSettings: Seq[Setting[_]] = {
Seq(
compileOptions := CompileOptions.of(
(classDirectory.value +: data(dependencyClasspath.value)).toArray,
sources.value.toArray,
classDirectory.value,
scalacOptions.value.toArray,
javacOptions.value.toArray,
maxErrors.value,
f1(foldMappers(sourcePositionMappers.value)),
compileOrder.value
),
compileOptions := {
val c = fileConverter.value
val cp0 = classDirectory.value +: data(dependencyClasspath.value)
val cp = cp0 map { x =>
PlainVirtualFile(x.toPath)
}
val vs = sources.value.toVector map { x =>
c.toVirtualFile(x.toPath)
}
CompileOptions.of(
cp.toArray: Array[VirtualFile],
vs.toArray,
classDirectory.value.toPath,
scalacOptions.value.toArray,
javacOptions.value.toArray,
maxErrors.value,
f1(foldMappers(sourcePositionMappers.value)),
compileOrder.value,
None.toOptional: Optional[NioPath],
Some(fileConverter.value).toOptional,
Some(reusableStamper.value).toOptional
)
},
compilerReporter := {
new LanguageServerReporter(
maxErrors.value,
streams.value.log,
foldMappers(sourcePositionMappers.value)
foldMappers(sourcePositionMappers.value),
fileConverter.value
)
},
compileInputs := {
@ -1867,7 +1920,7 @@ object Defaults extends BuildCommon {
previousCompile := {
val setup = compileIncSetup.value
val useBinary: Boolean = enableBinaryCompileAnalysis.value
val store = MixedAnalyzingCompiler.staticCachedStore(setup.cacheFile, !useBinary)
val store = MixedAnalyzingCompiler.staticCachedStore(setup.cacheFile.toPath, !useBinary)
store.get().toOption match {
case Some(contents) =>
val analysis = Option(contents.getAnalysis).toOptional
@ -2112,7 +2165,7 @@ object Classpaths {
dependencyClasspathFiles := data(dependencyClasspath.value).map(_.toPath),
dependencyClasspathFiles / outputFileStamps := {
val cache = managedFileStampCache.value
val stamper = outputFileStamper.value
val stamper = (managedSourcePaths / outputFileStamper).value
dependencyClasspathFiles.value.flatMap(p => cache.getOrElseUpdate(p, stamper).map(p -> _))
}
)
@ -3573,10 +3626,10 @@ object Classpaths {
internalPluginClasspath: Seq[File],
isDotty: Boolean
): Seq[String] = {
import sbt.internal.inc.classpath.ClasspathUtil.compilerPlugins
val pluginClasspath = report.matching(configurationFilter(CompilerPlugin.name)) ++ internalPluginClasspath
val plugins =
sbt.internal.inc.classpath.ClasspathUtilities.compilerPlugins(pluginClasspath, isDotty)
plugins.map("-Xplugin:" + _.getAbsolutePath).toSeq
val plugins = compilerPlugins(pluginClasspath.map(_.toPath), isDotty)
plugins.map("-Xplugin:" + _.toAbsolutePath.toString).toSeq
}
private[this] lazy val internalCompilerPluginClasspath: Initialize[Task[Classpath]] =

View File

@ -7,6 +7,7 @@
package sbt
import java.nio.file.{ Path => NioPath }
import java.io.File
import java.net.URL
@ -33,7 +34,9 @@ import sbt.librarymanagement.ivy.{ Credentials, IvyConfiguration, IvyPaths, Upda
import sbt.nio.file.Glob
import sbt.testing.Framework
import sbt.util.{ Level, Logger }
import xsbti.FileConverter
import xsbti.compile._
import xsbti.compile.analysis.ReadStamps
import scala.concurrent.duration.{ Duration, FiniteDuration }
import scala.xml.{ NodeSeq, Node => XNode }
@ -214,6 +217,11 @@ object Keys {
val aggregate = settingKey[Boolean]("Configures task aggregation.").withRank(BMinusSetting)
val sourcePositionMappers = taskKey[Seq[xsbti.Position => Option[xsbti.Position]]]("Maps positions in generated source files to the original source it was generated from").withRank(DTask)
private[sbt] val externalHooks = taskKey[ExternalHooks]("The external hooks used by zinc.")
val fileConverter = settingKey[FileConverter]("The file converter used to convert between Path and VirtualFile")
val allowMachinePath = settingKey[Boolean]("Allow machine-specific paths during conversion.")
val rootPaths = settingKey[Seq[NioPath]]("The root paths used to abstract machine-specific paths.")
private[sbt] val uncachedStamper = settingKey[ReadStamps]("The stamper to create timestamp or hash.")
private[sbt] val reusableStamper = settingKey[ReadStamps]("The stamper can be reused across subprojects and sessions.")
// package keys
val packageBin = taskKey[File]("Produces a main artifact, such as a binary jar.").withRank(ATask)

View File

@ -15,7 +15,7 @@ import sbt.Keys._
import sbt.nio.Keys._
import sbt.Project._
import sbt.internal.inc.ModuleUtilities
import sbt.internal.inc.classpath.ClasspathUtilities
import sbt.internal.inc.classpath.ClasspathUtil
import sbt.internal.util.complete.{ DefaultParsers, Parser }
import sbt.io._
import sbt.io.syntax._
@ -92,7 +92,8 @@ object ScriptedPlugin extends AutoPlugin {
private[sbt] def scriptedTestsTask: Initialize[Task[AnyRef]] =
Def.task {
val loader = ClasspathUtilities.toLoader(scriptedClasspath.value, scalaInstance.value.loader)
val cp = scriptedClasspath.value.get.map(_.toPath)
val loader = ClasspathUtil.toLoader(cp, scalaInstance.value.loader)
try {
ModuleUtilities.getObject("sbt.scriptedtest.ScriptedTests", loader)
} catch {

View File

@ -8,6 +8,7 @@
package sbt
import java.lang.reflect.InvocationTargetException
import java.nio.file.Path
import java.io.File
import sbt.io._, syntax._
@ -16,7 +17,7 @@ import sbt.internal.util.complete.{ DefaultParsers, Parser }, DefaultParsers._
import xsbti.AppConfiguration
import sbt.librarymanagement._
import sbt.librarymanagement.ivy.{ IvyConfiguration, IvyDependencyResolution }
import sbt.internal.inc.classpath.ClasspathUtilities
import sbt.internal.inc.classpath.ClasspathUtil
import BasicCommandStrings._, BasicKeys._
private[sbt] object TemplateCommandUtil {
@ -106,7 +107,7 @@ private[sbt] object TemplateCommandUtil {
log: Logger
): ClassLoader = {
val cp = classpathForInfo(info, ivyConf, globalBase, scalaModuleInfo, log)
ClasspathUtilities.toLoader(cp, config.provider.loader)
ClasspathUtil.toLoader(cp, config.provider.loader)
}
private def call(
@ -134,13 +135,13 @@ private[sbt] object TemplateCommandUtil {
globalBase: File,
scalaModuleInfo: Option[ScalaModuleInfo],
log: Logger
): List[File] = {
): List[Path] = {
val lm = IvyDependencyResolution(ivyConf)
val templatesBaseDirectory = new File(globalBase, "templates")
val templateId = s"${info.module.organization}_${info.module.name}_${info.module.revision}"
val templateDirectory = new File(templatesBaseDirectory, templateId)
def jars = (templateDirectory ** -DirectoryFilter).get
if (!(info.module.revision endsWith "-SNAPSHOT") && jars.nonEmpty) jars.toList
if (!(info.module.revision endsWith "-SNAPSHOT") && jars.nonEmpty) jars.toList.map(_.toPath)
else {
IO.createDirectory(templateDirectory)
val m = lm.wrapDependencyInModule(info.module, scalaModuleInfo)
@ -148,7 +149,7 @@ private[sbt] object TemplateCommandUtil {
case Left(_) => sys.error(s"Retrieval of ${info.module} failed.")
case Right(files) => files.toList
}
xs
xs.map(_.toPath)
}
}
}

View File

@ -16,7 +16,7 @@ import sbt.ClassLoaderLayeringStrategy._
import sbt.Keys._
import sbt.internal.classpath.ClassLoaderCache
import sbt.internal.inc.ScalaInstance
import sbt.internal.inc.classpath.ClasspathUtilities
import sbt.internal.inc.classpath.ClasspathUtil
import sbt.internal.util.Attributed
import sbt.internal.util.Attributed.data
import sbt.io.IO
@ -52,7 +52,7 @@ private[sbt] object ClassLoaders {
fullCP = fullCP,
allDependenciesSet = dependencyJars(dependencyClasspath).value.filterNot(exclude).toSet,
cache = extendedClassLoaderCache.value,
resources = ClasspathUtilities.createClasspathResources(fullCP.map(_._1), si),
resources = ClasspathUtil.createClasspathResources(fullCP.map(_._1.toPath), si),
tmp = IO.createUniqueDirectory(taskTemporaryDirectory.value),
scope = resolvedScoped.value.scope,
logger = logger,
@ -94,6 +94,7 @@ private[sbt] object ClassLoaders {
val newLoader =
(classpath: Seq[File]) => {
val mappings = classpath.map(f => f.getName -> f).toMap
val cp = classpath.map(_.toPath)
val transformedDependencies = allDeps.map(f => mappings.getOrElse(f.getName, f))
buildLayers(
strategy = classLoaderLayeringStrategy.value: @sbtUnchecked,
@ -101,7 +102,7 @@ private[sbt] object ClassLoaders {
fullCP = classpath.map(f => f -> IO.getModifiedTimeOrZero(f)),
allDependenciesSet = transformedDependencies.toSet,
cache = extendedClassLoaderCache.value: @sbtUnchecked,
resources = ClasspathUtilities.createClasspathResources(classpath, instance),
resources = ClasspathUtil.createClasspathResources(cp, instance),
tmp = taskTemporaryDirectory.value: @sbtUnchecked,
scope = resolvedScope,
logger = logger,

View File

@ -14,18 +14,16 @@ import sbt.Def
import sbt.Keys._
import sbt.internal.inc.ExternalLookup
import sbt.internal.inc.Stamp.equivStamp.equiv
import sbt.io.syntax._
import sbt.nio.Keys._
import sbt.nio.file.syntax._
import sbt.nio.file.{ FileAttributes, FileTreeView, RecursiveGlob }
import sbt.nio.{ FileChanges, FileStamp, FileStamper }
import sbt.util.InterfaceUtil.jo2o
import xsbti.{ VirtualFile, VirtualFileRef }
import xsbti.compile._
import xsbti.compile.analysis.Stamp
import scala.collection.JavaConverters._
private[sbt] object ExternalHooks {
private val javaHome = Option(System.getProperty("java.home")).map(Paths.get(_))
private type Func =
(FileChanges, FileChanges, FileTreeView[(Path, FileAttributes)]) => ExternalHooks
def default: Def.Initialize[sbt.Task[Func]] = Def.task {
@ -34,13 +32,15 @@ private[sbt] object ExternalHooks {
val cp = dependencyClasspath.value.map(_.data)
cp.foreach { file =>
val path = file.toPath
managedCache.getOrElseUpdate(path, FileStamper.LastModified)
managedCache.getOrElseUpdate(path, FileStamper.Hash)
}
val classGlob = classDirectory.value.toGlob / RecursiveGlob / "*.class"
val options = (compileOptions in compile).value
((inputFileChanges, outputFileChanges, fileTreeView) => {
fileTreeView.list(classGlob).foreach {
case (path, _) => managedCache.update(path, FileStamper.LastModified)
case (path, _) =>
s"updating $path"
managedCache.update(path, FileStamper.Hash)
}
apply(inputFileChanges, outputFileChanges, options, unmanagedCache, managedCache)
}): Func
@ -52,34 +52,50 @@ private[sbt] object ExternalHooks {
unmanagedCache: FileStamp.Cache,
managedCache: FileStamp.Cache
): DefaultExternalHooks = {
val converter = jo2o(options.converter) getOrElse {
sys.error("file converter was expected")
}
val lookup = new ExternalLookup {
override def changedSources(previousAnalysis: CompileAnalysis): Option[Changes[File]] = Some {
new Changes[File] {
val getAdded: java.util.Set[File] = new java.util.HashSet[File]
val getRemoved: java.util.Set[File] = new java.util.HashSet[File]
val getChanged: java.util.Set[File] = new java.util.HashSet[File]
val getUnmodified: java.util.Set[File] = new java.util.HashSet[File]
private def add(p: Path, sets: java.util.Set[File]*): Unit = {
sets.foreach(add(p.toFile, _))
override def changedSources(
previousAnalysis: CompileAnalysis
): Option[Changes[VirtualFileRef]] = Some {
new Changes[VirtualFileRef] {
override val getAdded: java.util.Set[VirtualFileRef] =
new java.util.HashSet[VirtualFileRef]
override val getRemoved: java.util.Set[VirtualFileRef] =
new java.util.HashSet[VirtualFileRef]
override val getChanged: java.util.Set[VirtualFileRef] =
new java.util.HashSet[VirtualFileRef]
override val getUnmodified: java.util.Set[VirtualFileRef] =
new java.util.HashSet[VirtualFileRef]
override def toString: String =
s"""Changes(added = $getAdded, removed = $getRemoved, changed = $getChanged, unmodified = ...)"""
private def add(p: VirtualFileRef, sets: java.util.Set[VirtualFileRef]*): Unit = {
sets.foreach(add(p, _))
}
private def add(f: File, set: java.util.Set[File]): Unit = { set.add(f); () }
val allChanges = new java.util.HashSet[File]
private def add(f: VirtualFileRef, set: java.util.Set[VirtualFileRef]): Unit = {
set.add(f); ()
}
val allChanges = new java.util.HashSet[VirtualFileRef]
inputFileChanges match {
case FileChanges(c, d, m, _) =>
c.foreach(add(_, getAdded, allChanges))
d.foreach(add(_, getRemoved, allChanges))
m.foreach(add(_, getChanged, allChanges))
c.map(converter.toVirtualFile).foreach(add(_, getAdded, allChanges))
d.map(converter.toVirtualFile).foreach(add(_, getRemoved, allChanges))
m.map(converter.toVirtualFile).foreach(add(_, getChanged, allChanges))
case _ =>
}
override def isEmpty: java.lang.Boolean =
getAdded.isEmpty && getRemoved.isEmpty && getChanged.isEmpty
private val prevSources = previousAnalysis.readStamps().getAllSourceStamps
prevSources.forEach { (file: File, s: Stamp) =>
prevSources.forEach { (file: VirtualFileRef, s: Stamp) =>
if (!allChanges.contains(file)) {
val path = file.toPath
val path: Path = converter.toPath(file)
unmanagedCache
.get(path)
.orElse(managedCache.getOrElseUpdate(file.toPath, FileStamper.Hash)) match {
.orElse(managedCache.getOrElseUpdate(path, FileStamper.Hash)) match {
case None => add(file, getRemoved)
case Some(stamp) =>
if (equiv(stamp.stamp, s)) add(file, getUnmodified)
@ -97,34 +113,50 @@ private[sbt] object ExternalHooks {
): Boolean = true
// This could use the cache as well, but it would complicate the cache implementation.
override def hashClasspath(files: Array[File]): Optional[Array[FileHash]] =
override def hashClasspath(files: Array[VirtualFile]): Optional[Array[FileHash]] =
Optional.empty[Array[FileHash]]
override def changedBinaries(previousAnalysis: CompileAnalysis): Option[Set[File]] = {
val base =
import scala.collection.JavaConverters._
private val javaHome = Option(System.getProperty("java.home")).map(Paths.get(_))
override def changedBinaries(
previousAnalysis: CompileAnalysis
): Option[Set[VirtualFileRef]] = {
val base: Set[VirtualFileRef] =
(outputFileChanges.modified ++ outputFileChanges.created ++ outputFileChanges.deleted)
.map(_.toFile)
.map(converter.toVirtualFile(_))
.toSet
Some(base ++ previousAnalysis.readStamps.getAllBinaryStamps.asScala.flatMap {
Some(base ++ previousAnalysis.readStamps.getAllLibraryStamps.asScala.flatMap {
case (file, stamp) =>
managedCache.getOrElseUpdate(file.toPath, FileStamper.LastModified) match {
case Some(cachedStamp) if equiv(cachedStamp.stamp, stamp) => None
val path = converter.toPath(file)
val stampOpt = managedCache.getOrElseUpdate(path, FileStamper.Hash)
stampOpt match {
case Some(s) if equiv(s.stamp, stamp) => None
case _ =>
javaHome match {
case Some(h) if file.toPath.startsWith(h) => None
case _ if file.getName == "rt.jar" => None
case _ => Some(file)
case Some(h) if path.startsWith(h) => None
case _ if file.name == "rt.jar" => None
case _ =>
// stampOpt map { s => println(s"stamp changed for $file from ${s.stamp} to $stamp") }
Some(file)
}
}
})
}
override def removedProducts(previousAnalysis: CompileAnalysis): Option[Set[File]] = {
override def removedProducts(
previousAnalysis: CompileAnalysis
): Option[Set[VirtualFileRef]] = {
None
Some(previousAnalysis.readStamps.getAllProductStamps.asScala.flatMap {
case (file, stamp) =>
managedCache.get(file.toPath) match {
val path = converter.toPath(file)
managedCache.get(path) match {
case Some(s) if equiv(s.stamp, stamp) => None
case _ => Some(file)
case Some(s) => Some(file)
case _ =>
// This shouldn't be necessary
if (java.nio.file.Files.exists(path)) None
else Some(file)
}
}.toSet)
}

View File

@ -18,7 +18,7 @@ import sbt.Project.inScope
import sbt.Scope.GlobalScope
import sbt.compiler.Eval
import sbt.internal.BuildStreams._
import sbt.internal.inc.classpath.ClasspathUtilities
import sbt.internal.inc.classpath.ClasspathUtil
import sbt.internal.inc.{ ScalaInstance, ZincLmUtil, ZincUtil }
import sbt.internal.util.Attributed.data
import sbt.internal.util.Types.const
@ -1285,7 +1285,7 @@ private[sbt] object Load {
// Load the definition classpath separately to avoid conflicts, see #511.
if (definitionClasspath.isEmpty) parentLoader
else ClasspathUtilities.toLoader(data(definitionClasspath), parentLoader)
else ClasspathUtil.toLoader(data(definitionClasspath).map(_.toPath), parentLoader)
}
def buildPluginDefinition(dir: File, s: State, config: LoadBuildConfiguration): PluginData = {

View File

@ -9,7 +9,6 @@ package sbt
package internal
package server
import java.io.File
import java.net.URI
import java.nio.file._
@ -27,7 +26,6 @@ import sjsonnew.support.scalajson.unsafe.{ CompactPrinter, Converter }
import scalacache._
import sbt.io.IO
import sbt.internal.inc.{ Analysis, MixedAnalyzingCompiler }
import sbt.internal.inc.JavaInterfaceUtil._
import sbt.internal.protocol.JsonRpcResponseError
@ -36,6 +34,7 @@ import sbt.internal.langserver
import sbt.internal.langserver.{ ErrorCodes, Location, Position, Range, TextDocumentPositionParams }
import sbt.util.Logger
import sbt.Keys._
import xsbti.{ FileConverter, VirtualFileRef }
private[sbt] object Definition {
def send[A: JsonFormat](source: CommandSource, execId: String)(params: A): Unit = {
@ -157,10 +156,10 @@ private[sbt] object Definition {
}
}
def markPosition(file: File, sym: String): Seq[(File, Long, Long, Long)] = {
def markPosition(file: Path, sym: String): Seq[(URI, Long, Long, Long)] = {
val findInLine = classTraitObjectInLine(sym)(_)
Files
.lines(file.toPath)
.lines(file)
.iterator
.asScala
.zipWithIndex
@ -169,7 +168,7 @@ private[sbt] object Definition {
findInLine(line)
.collect {
case (sym, from) =>
(file, lineNumber.toLong, from.toLong, from.toLong + sym.length)
(file.toUri, lineNumber.toLong, from.toLong, from.toLong + sym.length)
}
}
.toSeq
@ -200,7 +199,7 @@ private[sbt] object Definition {
cache.put(AnalysesKey)(value, ttl)
}
private def storeAnalysis(cacheFile: File, useBinary: Boolean): Option[Analysis] =
private def storeAnalysis(cacheFile: Path, useBinary: Boolean): Option[Analysis] =
MixedAnalyzingCompiler
.staticCachedStore(cacheFile, !useBinary)
.get
@ -225,7 +224,7 @@ private[sbt] object Definition {
}
def collectAnalysesTask = Def.task {
val cacheFile = compileIncSetup.value.cacheFile.getAbsolutePath
val cacheFile: String = compileIncSetup.value.cacheFile.getAbsolutePath
val useBinary = enableBinaryCompileAnalysis.value
val s = state.value
s.log.debug(s"analysis location ${cacheFile -> useBinary}")
@ -246,7 +245,7 @@ private[sbt] object Definition {
}
val addToCache = uninitialized.collect {
case (title @ (file, useBinary), _) if Files.exists(Paths.get(file)) =>
(title, storeAnalysis(Paths.get(file).toFile, !useBinary))
(title, storeAnalysis(Paths.get(file), !useBinary))
}
val validCaches = working ++ addToCache
if (addToCache.nonEmpty)
@ -262,6 +261,7 @@ private[sbt] object Definition {
jsonDefinition: JValue,
requestId: String,
commandSource: CommandSource,
converter: FileConverter,
log: Logger,
)(implicit ec: ExecutionContext): Future[Unit] = Future {
val LspDefinitionLogHead = "lsp-definition"
@ -297,11 +297,12 @@ private[sbt] object Definition {
analysis.relations.definesClass(className) ++
analysis.relations.libraryDefinesClass(className)
}
.flatMap { classFile =>
textProcessor.markPosition(classFile, sym).collect {
case (file, line, from, to) =>
.flatMap { classFile: VirtualFileRef =>
val x = converter.toPath(classFile)
textProcessor.markPosition(x, sym).collect {
case (uri, line, from, to) =>
Location(
IO.toURI(file).toString,
uri.toString,
Range(Position(line, from), Position(line, to)),
)
}

View File

@ -20,8 +20,8 @@ import sbt.internal.protocol.codec._
import sbt.internal.langserver._
import sbt.internal.util.ObjectEvent
import sbt.util.Logger
import scala.concurrent.ExecutionContext
import xsbti.FileConverter
private[sbt] final case class LangServerError(code: Long, message: String)
extends Throwable(message)
@ -37,68 +37,69 @@ private[sbt] object LanguageServerProtocol {
)
}
lazy val handler: ServerHandler = ServerHandler({
case callback: ServerCallback =>
import callback._
ServerIntent(
{
import sbt.internal.langserver.codec.JsonProtocol._
import internalJsonProtocol._
def json(r: JsonRpcRequestMessage) =
r.params.getOrElse(
throw LangServerError(
ErrorCodes.InvalidParams,
s"param is expected on '${r.method}' method."
)
)
def handler(converter: FileConverter): ServerHandler =
ServerHandler({
case callback: ServerCallback =>
import callback._
ServerIntent(
{
case r: JsonRpcRequestMessage if r.method == "initialize" =>
if (authOptions(ServerAuthentication.Token)) {
val param = Converter.fromJson[InitializeParams](json(r)).get
val optionJson = param.initializationOptions.getOrElse(
throw LangServerError(
ErrorCodes.InvalidParams,
"initializationOptions is expected on 'initialize' param."
)
import sbt.internal.langserver.codec.JsonProtocol._
import internalJsonProtocol._
def json(r: JsonRpcRequestMessage) =
r.params.getOrElse(
throw LangServerError(
ErrorCodes.InvalidParams,
s"param is expected on '${r.method}' method."
)
val opt = Converter.fromJson[InitializeOption](optionJson).get
val token = opt.token.getOrElse(sys.error("'token' is missing."))
if (authenticate(token)) ()
else throw LangServerError(ErrorCodes.InvalidRequest, "invalid token")
} else ()
setInitialized(true)
appendExec(Exec(s"collectAnalyses", None, Some(CommandSource(name))))
jsonRpcRespond(InitializeResult(serverCapabilities), Option(r.id))
)
case r: JsonRpcRequestMessage if r.method == "textDocument/definition" =>
implicit val executionContext: ExecutionContext = StandardMain.executionContext
Definition.lspDefinition(json(r), r.id, CommandSource(name), log)
{
case r: JsonRpcRequestMessage if r.method == "initialize" =>
if (authOptions(ServerAuthentication.Token)) {
val param = Converter.fromJson[InitializeParams](json(r)).get
val optionJson = param.initializationOptions.getOrElse(
throw LangServerError(
ErrorCodes.InvalidParams,
"initializationOptions is expected on 'initialize' param."
)
)
val opt = Converter.fromJson[InitializeOption](optionJson).get
val token = opt.token.getOrElse(sys.error("'token' is missing."))
if (authenticate(token)) ()
else throw LangServerError(ErrorCodes.InvalidRequest, "invalid token")
} else ()
setInitialized(true)
appendExec(Exec(s"collectAnalyses", None, Some(CommandSource(name))))
jsonRpcRespond(InitializeResult(serverCapabilities), Option(r.id))
case r: JsonRpcRequestMessage if r.method == "textDocument/definition" =>
implicit val executionContext: ExecutionContext = StandardMain.executionContext
Definition.lspDefinition(json(r), r.id, CommandSource(name), converter, log)
()
case r: JsonRpcRequestMessage if r.method == "sbt/exec" =>
val param = Converter.fromJson[SbtExecParams](json(r)).get
appendExec(Exec(param.commandLine, Some(r.id), Some(CommandSource(name))))
()
case r: JsonRpcRequestMessage if r.method == "sbt/setting" =>
import sbt.protocol.codec.JsonProtocol._
val param = Converter.fromJson[Q](json(r)).get
onSettingQuery(Option(r.id), param)
case r: JsonRpcRequestMessage if r.method == "sbt/cancelRequest" =>
import sbt.protocol.codec.JsonProtocol._
val param = Converter.fromJson[CRP](json(r)).get
onCancellationRequest(Option(r.id), param)
case r: JsonRpcRequestMessage if r.method == "sbt/completion" =>
import sbt.protocol.codec.JsonProtocol._
val param = Converter.fromJson[CP](json(r)).get
onCompletionRequest(Option(r.id), param)
}
}, {
case n: JsonRpcNotificationMessage if n.method == "textDocument/didSave" =>
appendExec(Exec(";Test/compile; collectAnalyses", None, Some(CommandSource(name))))
()
case r: JsonRpcRequestMessage if r.method == "sbt/exec" =>
val param = Converter.fromJson[SbtExecParams](json(r)).get
appendExec(Exec(param.commandLine, Some(r.id), Some(CommandSource(name))))
()
case r: JsonRpcRequestMessage if r.method == "sbt/setting" =>
import sbt.protocol.codec.JsonProtocol._
val param = Converter.fromJson[Q](json(r)).get
onSettingQuery(Option(r.id), param)
case r: JsonRpcRequestMessage if r.method == "sbt/cancelRequest" =>
import sbt.protocol.codec.JsonProtocol._
val param = Converter.fromJson[CRP](json(r)).get
onCancellationRequest(Option(r.id), param)
case r: JsonRpcRequestMessage if r.method == "sbt/completion" =>
import sbt.protocol.codec.JsonProtocol._
val param = Converter.fromJson[CP](json(r)).get
onCompletionRequest(Option(r.id), param)
}
}, {
case n: JsonRpcNotificationMessage if n.method == "textDocument/didSave" =>
appendExec(Exec(";Test/compile; collectAnalyses", None, Some(CommandSource(name))))
()
}
)
})
)
})
}
/** Implements Language Server Protocol <https://github.com/Microsoft/language-server-protocol>. */

View File

@ -12,7 +12,7 @@ package server
import java.io.File
import sbt.internal.inc.ManagedLoggedReporter
import sbt.internal.util.ManagedLogger
import xsbti.{ Problem, Position => XPosition, Severity }
import xsbti.{ FileConverter, Problem, Position => XPosition, Severity }
import xsbti.compile.CompileAnalysis
import sbt.internal.langserver.{
PublishDiagnosticsParams,
@ -24,7 +24,6 @@ import sbt.internal.langserver.{
import sbt.internal.inc.JavaInterfaceUtil._
import scala.collection.mutable
import scala.collection.JavaConverters._
import sbt.io.IO
/**
* Defines a compiler reporter that uses event logging provided by a `ManagedLogger`.
@ -36,7 +35,8 @@ import sbt.io.IO
class LanguageServerReporter(
maximumErrors: Int,
logger: ManagedLogger,
sourcePositionMapper: XPosition => XPosition = identity[XPosition]
sourcePositionMapper: XPosition => XPosition = identity[XPosition],
converter: FileConverter
) extends ManagedLoggedReporter(maximumErrors, logger, sourcePositionMapper) {
lazy val exchange = StandardMain.exchange
@ -83,7 +83,8 @@ class LanguageServerReporter(
import sbt.internal.langserver.codec.JsonProtocol._
val files = analysis.readSourceInfos.getAllSourceInfos.keySet.asScala
files foreach { f =>
val params = PublishDiagnosticsParams(IO.toURI(f).toString, Vector())
val p = converter.toPath(f)
val params = PublishDiagnosticsParams(p.toUri.toString, Vector())
exchange.notifyEvent("textDocument/publishDiagnostics", params)
}
}
@ -95,7 +96,7 @@ class LanguageServerReporter(
problemsByFile.get(sourceFile) match {
case Some(xs: mutable.ListBuffer[Problem]) =>
val ds = toDiagnostics(xs)
val params = PublishDiagnosticsParams(IO.toURI(sourceFile).toString, ds)
val params = PublishDiagnosticsParams(sbt.io.IO.toURI(sourceFile).toString, ds)
exchange.notifyEvent("textDocument/publishDiagnostics", params)
case _ =>
}

View File

@ -78,17 +78,22 @@ object FileStamp {
case e: IOException => Some(Error(e))
}
private[sbt] def hash(string: String): Hash =
new FileHashImpl(sbt.internal.inc.Hash.unsafeFromString(string))
private[sbt] def hash(path: Path): Option[Hash] = Stamper.forHash(path.toFile) match {
new FileHashImpl(try {
sbt.internal.inc.Stamp.fromString(string)
} catch {
case _: Throwable => EmptyStamp
})
private[sbt] def hash(path: Path): Option[Hash] = Stamper.forFarmHashP(path) match {
case EmptyStamp => None
case s => Some(new FileHashImpl(s))
}
private[sbt] def fromZincStamp(stamp: XStamp): Hash = new FileHashImpl(stamp)
private[sbt] def lastModified(path: Path): Option[LastModified] =
IO.getModifiedTimeOrZero(path.toFile) match {
case 0 => None
case l => Some(LastModified(l))
}
private[this] class FileHashImpl(val xstamp: XStamp) extends Hash(xstamp.getHash.orElse(""))
private[this] class FileHashImpl(val xstamp: XStamp) extends Hash(xstamp.toString)
private[sbt] sealed abstract case class Hash private[sbt] (hex: String) extends FileStamp
private[sbt] final case class LastModified private[sbt] (time: Long) extends FileStamp
private[sbt] final case class Error(exception: IOException) extends FileStamp

View File

@ -14,7 +14,7 @@ object Dependencies {
private val ioVersion = nightlyVersion.getOrElse("1.4.0-M2")
private val lmVersion =
sys.props.get("sbt.build.lm.version").orElse(nightlyVersion).getOrElse("1.3.0")
val zincVersion = nightlyVersion.getOrElse("1.3.1")
val zincVersion = nightlyVersion.getOrElse("1.4.0-M2")
private val sbtIO = "org.scala-sbt" %% "io" % ioVersion

View File

@ -12,7 +12,7 @@ import java.lang.reflect.Method
import java.lang.reflect.Modifier.{ isPublic, isStatic }
import sbt.internal.inc.ScalaInstance
import sbt.internal.inc.classpath.{ ClasspathFilter, ClasspathUtilities }
import sbt.internal.inc.classpath.{ ClasspathFilter, ClasspathUtil }
import sbt.internal.util.MessageOnlyException
import sbt.io.Path
import sbt.util.Logger
@ -61,7 +61,10 @@ class ForkRun(config: ForkOptions) extends ScalaRun {
class Run(private[sbt] val newLoader: Seq[File] => ClassLoader, trapExit: Boolean)
extends ScalaRun {
def this(instance: ScalaInstance, trapExit: Boolean, nativeTmp: File) =
this((cp: Seq[File]) => ClasspathUtilities.makeLoader(cp, instance, nativeTmp), trapExit)
this(
(cp: Seq[File]) => ClasspathUtil.makeLoader(cp.map(_.toPath), instance, nativeTmp.toPath),
trapExit
)
private[sbt] def runWithLoader(
loader: ClassLoader,

View File

@ -1,4 +1,6 @@
-> doc
> debug
# -> doc
> set sources in (Compile, doc) := { val src = (sources in Compile).value; src.filterNot(_.getName contains "B") }

View File

@ -6,5 +6,7 @@ TaskKey[Unit]("verify-binary-deps") := {
val base = baseDirectory.value
val nestedPkgClass = classDir / "test/nested.class"
val fooSrc = base / "src/main/scala/test/nested/Foo.scala"
assert(!a.relations.libraryDeps(fooSrc).contains(nestedPkgClass), a.relations.toString)
val converter = fileConverter.value
assert(!a.relations.libraryDeps(converter.toVirtualFile(fooSrc.toPath))
.contains(converter.toVirtualFile(nestedPkgClass.toPath)), a.relations.toString)
}

View File

@ -6,9 +6,9 @@ TaskKey[Unit]("checkJavaFailures") := {
// First error should be on a specific line/file
val first = ps(0)
assert(first.position.line.get == 3, s"First failure position is not line 3, failure = $first")
val javaFile = baseDirectory.value / "src/main/java/bad.java"
val file = new File(first.position.sourcePath.get)
assert(file == javaFile, s"First failure file location is not $javaFile, $first")
val expected = "${0}/src/main/java/bad.java"
val sourcePath = first.position.sourcePath.get
assert(sourcePath == expected, s"$sourcePath == $expected was false")
}
TaskKey[Unit]("checkScalaFailures") := {
@ -19,9 +19,7 @@ TaskKey[Unit]("checkScalaFailures") := {
// First error should be on a specific line/file
val first = ps(0)
assert(first.position.line.get == 2, s"First failure position is not line 2, failure = $first")
val scalaFile = baseDirectory.value / "src/main/scala/bad.scala"
val file = new File(first.position.sourcePath.get)
assert(file == scalaFile, s"First failure file location is not $scalaFile, $first")
val expected = "${0}/src/main/scala/bad.scala"
val sourcePath = first.position.sourcePath.get
assert(sourcePath == expected, s"$sourcePath == $expected was false")
}
compileOrder := CompileOrder.Mixed

View File

@ -1,5 +1,7 @@
import xsbti.VirtualFileRef
import sbt.internal.inc.Analysis
import xsbti.compile.{PreviousResult, CompileAnalysis, MiniSetup}
import xsbti.compile.analysis.{ Compilation => XCompilation }
previousCompile in Compile := {
val previous = (previousCompile in Compile).value
@ -17,20 +19,22 @@ previousCompile in Compile := {
TaskKey[Unit]("checkCompilations") := {
val analysis = (compile in Compile).value match { case a: Analysis => a }
val srcDir = (scalaSource in Compile).value
def relative(f: java.io.File): java.io.File = f.relativeTo(srcDir) getOrElse f
def findFile(className: String): File = {
relative(analysis.relations.definesClass(className).head)
def findFile(className: String): VirtualFileRef = {
analysis.relations.definesClass(className).head
}
val allCompilations = analysis.compilations.allCompilations
val recompiledFiles: Seq[Set[java.io.File]] = allCompilations map { c =>
val recompiledFiles: Seq[Set[VirtualFileRef]] = allCompilations map { c: XCompilation =>
val recompiledFiles = analysis.apis.internal.collect {
case (cn, api) if api.compilationTimestamp == c.getStartTime => findFile(cn)
}
recompiledFiles.toSet
}
def recompiledFilesInIteration(iteration: Int, fileNames: Set[String]) = {
val files = fileNames.map(new java.io.File(_))
assert(recompiledFiles(iteration) == files, "%s != %s".format(recompiledFiles(iteration), files))
assert(recompiledFiles(iteration).map(_.name) == fileNames,
s"""${recompiledFiles(iteration).map(_.name)} != $fileNames
|
|allCompilations = $allCompilations
|""".stripMargin)
}
assert(allCompilations.size == 2, s"All compilations is ${allCompilations.size}")
// B.scala is just compiled at the beginning

View File

@ -1,5 +1,8 @@
import sbt.internal.inc.Analysis
import xsbti.VirtualFileRef
import xsbti.api.AnalyzedClass
import xsbti.compile.{PreviousResult, CompileAnalysis, MiniSetup}
import xsbti.compile.analysis.{ Compilation => XCompilation }
logLevel := Level.Debug
@ -19,27 +22,39 @@ previousCompile in Compile := {
// which that heuristic would distort
incOptions := incOptions.value.withRecompileAllFraction(1.0)
Global / allowMachinePath := false
/* Performs checks related to compilations:
* a) checks in which compilation given set of files was recompiled
* b) checks overall number of compilations performed
*/
TaskKey[Unit]("checkCompilations") := {
val log = streams.value.log
val c = fileConverter.value
val vs = (Compile / sources).value.toVector map { x =>
c.toVirtualFile(x.toPath)
}
// log.info(vs.mkString(","))
val analysis = (compile in Compile).value match { case a: Analysis => a }
val srcDir = (scalaSource in Compile).value
def relative(f: java.io.File): java.io.File = f.relativeTo(srcDir) getOrElse f
def findFile(className: String): File = {
relative(analysis.relations.definesClass(className).head)
def findFile(className: String): VirtualFileRef = {
analysis.relations.definesClass(className).head
}
val allCompilations = analysis.compilations.allCompilations
val recompiledFiles: Seq[Set[java.io.File]] = allCompilations map { c =>
val allCompilations: Seq[XCompilation] = analysis.compilations.allCompilations
log.info(s"allCompilations: $allCompilations")
val recompiledFiles: Seq[Set[VirtualFileRef]] = allCompilations map { c: XCompilation =>
val recompiledFiles = analysis.apis.internal.collect {
case (cn, api) if api.compilationTimestamp == c.getStartTime => findFile(cn)
}
recompiledFiles.toSet
}
def recompiledFilesInIteration(iteration: Int, fileNames: Set[String]) = {
val files = fileNames.map(new java.io.File(_))
assert(recompiledFiles(iteration) == files, "%s != %s".format(recompiledFiles(iteration), files))
assert(recompiledFiles(iteration).map(_.name) == fileNames,
s"""${recompiledFiles(iteration).map(_.name)} != $fileNames
|
|allCompilations = $allCompilations
|""".stripMargin)
}
// Y.scala is compiled only at the beginning as changes to A.scala do not affect it
recompiledFilesInIteration(0, Set("X.scala", "Y.scala"))

View File

@ -10,9 +10,10 @@ package internal
package inc
import java.io.File
import java.nio.file.{ Files, Path }
import java.util.concurrent.Callable
import sbt.internal.inc.classpath.ClasspathUtilities
import sbt.internal.inc.classpath.ClasspathUtil
import sbt.io.IO
import sbt.internal.librarymanagement._
import sbt.internal.util.{ BufferedLogger, FullLogger }
@ -81,10 +82,16 @@ private[sbt] object ZincComponentCompiler {
compiledBridge(bridgeSources, scalaInstance, logger)
}
private case class ScalaArtifacts(compiler: File, library: File, others: Vector[File])
// internal representation of Scala artifacts
private case class ScalaArtifacts(
compilerJar: Path,
libraryJars: Vector[Path],
others: Vector[Path]
)
private def getScalaArtifacts(scalaVersion: String, logger: Logger): ScalaArtifacts = {
def isPrefixedWith(artifact: File, prefix: String) = artifact.getName.startsWith(prefix)
def isPrefixedWith(artifact: Path, prefix: String) =
artifact.getFileName.toString.startsWith(prefix)
val fullLogger = new FullLogger(logger)
val CompileConf = Some(Configurations.Compile.name)
@ -107,25 +114,29 @@ private[sbt] object ZincComponentCompiler {
fullLogger,
"Scala compiler and library",
)
val isScalaCompiler = (f: File) => isPrefixedWith(f, "scala-compiler-")
val isScalaLibrary = (f: File) => isPrefixedWith(f, "scala-library-")
val isScalaCompiler = (f: Path) => isPrefixedWith(f, "scala-compiler-")
val isScalaLibrary = (f: Path) => isPrefixedWith(f, "scala-library-")
val maybeScalaCompiler = allArtifacts.find(isScalaCompiler)
val maybeScalaLibrary = allArtifacts.find(isScalaLibrary)
val others = allArtifacts.filterNot(a => isScalaCompiler(a) || isScalaLibrary(a))
val scalaCompilerJar = maybeScalaCompiler.getOrElse(throw MissingScalaJar.compiler)
val scalaLibraryJar = maybeScalaLibrary.getOrElse(throw MissingScalaJar.library)
ScalaArtifacts(scalaCompilerJar, scalaLibraryJar, others)
ScalaArtifacts(scalaCompilerJar, Vector(scalaLibraryJar), others)
}
override def fetchScalaInstance(scalaVersion: String, logger: Logger): ScalaInstance = {
val scalaArtifacts = getScalaArtifacts(scalaVersion, logger)
val scalaCompiler = scalaArtifacts.compiler
val scalaLibrary = scalaArtifacts.library
val jarsToLoad = (scalaCompiler +: scalaLibrary +: scalaArtifacts.others).toArray
assert(jarsToLoad.forall(_.exists), "One or more jar(s) in the Scala instance do not exist.")
val loaderLibraryOnly = ClasspathUtilities.toLoader(Vector(scalaLibrary))
val jarsToLoad2 = jarsToLoad.toVector.filterNot(_ == scalaLibrary)
val loader = ClasspathUtilities.toLoader(jarsToLoad2, loaderLibraryOnly)
val scalaCompilerJar = scalaArtifacts.compilerJar
val scalaLibraryJars = scalaArtifacts.libraryJars
val jarsToLoad: Vector[Path] =
(Vector(scalaCompilerJar) ++ scalaLibraryJars ++ scalaArtifacts.others)
assert(
jarsToLoad.forall(Files.exists(_)),
"One or more jar(s) in the Scala instance do not exist."
)
val loaderLibraryOnly = ClasspathUtil.toLoader(scalaLibraryJars)
val jarsToLoad2 = jarsToLoad diff scalaLibraryJars
val loader = ClasspathUtil.toLoader(jarsToLoad2, loaderLibraryOnly)
val properties = ResourceLoader.getSafePropertiesFor("compiler.properties", loader)
val loaderVersion = Option(properties.getProperty("version.number"))
val scalaV = loaderVersion.getOrElse("unknown")
@ -133,9 +144,9 @@ private[sbt] object ZincComponentCompiler {
scalaV,
loader,
loaderLibraryOnly,
scalaLibrary,
scalaCompiler,
jarsToLoad,
scalaLibraryJars.map(_.toFile).toArray,
scalaCompilerJar.toFile,
jarsToLoad.map(_.toFile).toArray,
loaderVersion,
)
}
@ -262,9 +273,17 @@ private[inc] class ZincComponentCompiler(
buffered,
s"compiler bridge sources $moduleForBridge",
)
val (srcs, xsbtiJars) = allArtifacts.partition(_.getName.endsWith("-sources.jar"))
val (srcs, xsbtiJars) =
allArtifacts.partition(_.getFileName.toString.endsWith("-sources.jar"))
val toCompileID = bridgeSources.name
AnalyzingCompiler.compileSources(srcs, target, xsbtiJars, toCompileID, compiler, log)
AnalyzingCompiler.compileSources(
srcs,
target.toPath,
xsbtiJars,
toCompileID,
compiler,
log
)
manager.define(compilerBridgeId, Seq(target))
}
}
@ -287,7 +306,7 @@ private object ZincLMHelper {
noSource: Boolean,
logger: sbt.util.Logger,
desc: String,
): Vector[File] = {
): Vector[Path] = {
val updateConfiguration = newUpdateConfiguration(retrieveDirectory, noSource)
val dependencies = prettyPrintDependency(module)
logger.info(s"Attempting to fetch $dependencies.")
@ -298,7 +317,7 @@ private object ZincLMHelper {
val unresolvedLines = UnresolvedWarning.unresolvedWarningLines.showLines(uw).mkString("\n")
throw new InvalidComponent(s"$unretrievedMessage\n$unresolvedLines")
case Right(updateReport) =>
val allFiles = updateReport.allFiles
val allFiles = updateReport.allFiles.map(_.toPath)
logger.debug(s"Files retrieved for ${prettyPrintDependency(module)}:")
logger.debug(allFiles.mkString(", "))
allFiles

View File

@ -18,8 +18,8 @@ class ZincComponentCompilerSpec extends IvyBridgeProviderSpecification {
val scala2121 = "2.12.1"
val scala2122 = "2.12.2"
val scala2123 = "2.12.3"
val scala2130M2 = "2.13.0-M2"
val scala2130RC1 = "2.13.0-RC1"
val scala2130 = "2.13.0"
val scala2131 = "2.13.1"
def isJava8: Boolean = sys.props("java.specification.version") == "1.8"
@ -45,11 +45,8 @@ class ZincComponentCompilerSpec extends IvyBridgeProviderSpecification {
IO.withTemporaryDirectory(t => getCompilerBridge(t, logger, scala2123) should exist)
}
it should "compile the bridge for Scala 2.13.0-M2" in { implicit td =>
IO.withTemporaryDirectory(t => getCompilerBridge(t, logger, scala2130M2) should exist)
}
it should "compile the bridge for Scala 2.13.0-RC1" in { implicit td =>
IO.withTemporaryDirectory(t => getCompilerBridge(t, logger, scala2130RC1) should exist)
it should "compile the bridge for Scala 2.13" in { implicit td =>
IO.withTemporaryDirectory(t => getCompilerBridge(t, logger, scala2130) should exist)
IO.withTemporaryDirectory(t => getCompilerBridge(t, logger, scala2131) should exist)
}
}