mirror of https://github.com/sbt/sbt.git
Format main-actions
This commit is contained in:
parent
c3d7be0e04
commit
1ee2a5ad68
|
|
@ -18,7 +18,7 @@ import scala.util.Try
|
|||
|
||||
final class Console(compiler: AnalyzingCompiler) {
|
||||
|
||||
/** Starts an interactive scala interpreter session with the given classpath.*/
|
||||
/** Starts an interactive scala interpreter session with the given classpath. */
|
||||
def apply(classpath: Seq[File], log: Logger): Try[Unit] =
|
||||
apply(classpath, Nil, "", "", log)
|
||||
|
||||
|
|
@ -57,9 +57,16 @@ final class Console(compiler: AnalyzingCompiler) {
|
|||
)(loader: Option[ClassLoader], bindings: Seq[(String, Any)])(implicit log: Logger): Try[Unit] = {
|
||||
def console0(): Unit =
|
||||
try {
|
||||
compiler.console(classpath map { x =>
|
||||
PlainVirtualFile(x.toPath)
|
||||
}, MappedFileConverter.empty, options, initialCommands, cleanupCommands, log)(
|
||||
compiler.console(
|
||||
classpath map { x =>
|
||||
PlainVirtualFile(x.toPath)
|
||||
},
|
||||
MappedFileConverter.empty,
|
||||
options,
|
||||
initialCommands,
|
||||
cleanupCommands,
|
||||
log
|
||||
)(
|
||||
loader,
|
||||
bindings
|
||||
)
|
||||
|
|
|
|||
|
|
@ -31,7 +31,8 @@ private[sbt] object ForkTests {
|
|||
tags: (Tag, Int)*
|
||||
): Task[TestOutput] = {
|
||||
import std.TaskExtra._
|
||||
val dummyLoader = this.getClass.getClassLoader // can't provide the loader for test classes, which is in another jvm
|
||||
val dummyLoader =
|
||||
this.getClass.getClassLoader // can't provide the loader for test classes, which is in another jvm
|
||||
def all(work: Seq[ClassLoader => Unit]) = work.fork(f => f(dummyLoader))
|
||||
|
||||
val main =
|
||||
|
|
|
|||
|
|
@ -20,7 +20,7 @@ import sbt.util.Logger
|
|||
|
||||
import sbt.util.{ CacheStoreFactory, FilesInfo, ModifiedFileInfo, PlainFileInfo }
|
||||
import sbt.internal.util.HNil
|
||||
import sbt.internal.util.HListFormats._
|
||||
// import sbt.internal.util.HListFormats._
|
||||
import sbt.util.FileInfo.{ exists, lastModified }
|
||||
import sbt.util.CacheImplicits._
|
||||
import sbt.util.Tracked.{ inputChanged, outputChanged }
|
||||
|
|
@ -107,7 +107,6 @@ object Package {
|
|||
)
|
||||
|
||||
/**
|
||||
*
|
||||
* @param conf the package configuration that should be build
|
||||
* @param cacheStoreFactory used for jar caching. We try to avoid rebuilds as much as possible
|
||||
* @param log feedback for the user
|
||||
|
|
@ -116,7 +115,6 @@ object Package {
|
|||
apply(conf, cacheStoreFactory, log, timeFromConfiguration(conf))
|
||||
|
||||
/**
|
||||
*
|
||||
* @param conf the package configuration that should be build
|
||||
* @param cacheStoreFactory used for jar caching. We try to avoid rebuilds as much as possible
|
||||
* @param log feedback for the user
|
||||
|
|
@ -132,11 +130,11 @@ object Package {
|
|||
val main = manifest.getMainAttributes
|
||||
for (option <- conf.options) {
|
||||
option match {
|
||||
case JarManifest(mergeManifest) => mergeManifests(manifest, mergeManifest); ()
|
||||
case MainClass(mainClassName) => main.put(Attributes.Name.MAIN_CLASS, mainClassName); ()
|
||||
case JarManifest(mergeManifest) => mergeManifests(manifest, mergeManifest); ()
|
||||
case MainClass(mainClassName) => main.put(Attributes.Name.MAIN_CLASS, mainClassName); ()
|
||||
case ManifestAttributes(attributes @ _*) => main.asScala ++= attributes; ()
|
||||
case FixedTimestamp(value) => ()
|
||||
case _ => log.warn("Ignored unknown package option " + option)
|
||||
case _ => log.warn("Ignored unknown package option " + option)
|
||||
}
|
||||
}
|
||||
setVersion(main)
|
||||
|
|
@ -151,8 +149,7 @@ object Package {
|
|||
makeJar(sources, jar.file, manifest, log, time)
|
||||
jar.file
|
||||
()
|
||||
} else
|
||||
log.debug("Jar uptodate: " + jar.file)
|
||||
} else log.debug("Jar uptodate: " + jar.file)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@ import sbt.io.syntax._
|
|||
import sbt.io.IO
|
||||
import sbt.internal.inc.{ RawCompiler, ScalaInstance }
|
||||
import sbt.internal.util.Types.:+:
|
||||
import sbt.internal.util.HListFormats._
|
||||
// import sbt.internal.util.HListFormats._
|
||||
import sbt.internal.util.HNil
|
||||
import sbt.internal.util.HListFormats._
|
||||
import sbt.util.CacheImplicits._
|
||||
|
|
@ -70,8 +70,7 @@ object RawCompileLike {
|
|||
|
||||
def prepare(description: String, doCompile: Gen): Gen =
|
||||
(sources, classpath, outputDirectory, options, maxErrors, log) => {
|
||||
if (sources.isEmpty)
|
||||
log.info("No sources available, skipping " + description + "...")
|
||||
if (sources.isEmpty) log.info("No sources available, skipping " + description + "...")
|
||||
else {
|
||||
log.info(description.capitalize + " to " + outputDirectory.absolutePath + "...")
|
||||
IO.delete(outputDirectory)
|
||||
|
|
|
|||
|
|
@ -86,8 +86,7 @@ object Sync {
|
|||
}
|
||||
|
||||
def copy(source: File, target: File): Unit =
|
||||
if (source.isFile)
|
||||
IO.copyFile(source, target, true)
|
||||
if (source.isFile) IO.copyFile(source, target, true)
|
||||
else if (!target.exists) { // we don't want to update the last modified time of an existing directory
|
||||
IO.createDirectory(target)
|
||||
IO.copyLastModified(source, target)
|
||||
|
|
@ -102,8 +101,8 @@ object Sync {
|
|||
sys.error("Duplicate mappings:" + dups.mkString)
|
||||
}
|
||||
|
||||
implicit def relationFormat[A, B](
|
||||
implicit af: JsonFormat[Map[A, Set[B]]],
|
||||
implicit def relationFormat[A, B](implicit
|
||||
af: JsonFormat[Map[A, Set[B]]],
|
||||
bf: JsonFormat[Map[B, Set[A]]]
|
||||
): JsonFormat[Relation[A, B]] =
|
||||
new JsonFormat[Relation[A, B]] {
|
||||
|
|
@ -142,9 +141,8 @@ object Sync {
|
|||
)(implicit infoFormat: JsonFormat[F]): Unit = {
|
||||
val virtualRelation: Relation[VirtualFileRef, VirtualFileRef] =
|
||||
Relation.switch(relation, (f: File) => fileConverter.toVirtualFile(f.toPath))
|
||||
val virtualInfo: Map[VirtualFileRef, F] = info.map {
|
||||
case (file, fileInfo) =>
|
||||
fileConverter.toVirtualFile(file.toPath) -> fileInfo
|
||||
val virtualInfo: Map[VirtualFileRef, F] = info.map { case (file, fileInfo) =>
|
||||
fileConverter.toVirtualFile(file.toPath) -> fileInfo
|
||||
}
|
||||
|
||||
import LibraryManagementCodec._
|
||||
|
|
@ -162,8 +160,8 @@ object Sync {
|
|||
type RelationInfo[F] = (Relation[File, File], Map[File, F])
|
||||
type RelationInfoVirtual[F] = (Relation[VirtualFileRef, VirtualFileRef], Map[VirtualFileRef, F])
|
||||
|
||||
def readInfoWrapped[F <: FileInfo](store: CacheStore, fileConverter: FileConverter)(
|
||||
implicit infoFormat: JsonFormat[F]
|
||||
def readInfoWrapped[F <: FileInfo](store: CacheStore, fileConverter: FileConverter)(implicit
|
||||
infoFormat: JsonFormat[F]
|
||||
): RelationInfo[F] = {
|
||||
convertFromVirtual(readInfoVirtual(store)(infoFormat), fileConverter)
|
||||
}
|
||||
|
|
@ -173,9 +171,8 @@ object Sync {
|
|||
fileConverter: FileConverter
|
||||
): RelationInfo[F] = {
|
||||
val firstPart = Relation.switch(info._1, (r: VirtualFileRef) => fileConverter.toPath(r).toFile)
|
||||
val secondPart = info._2.map {
|
||||
case (file, fileInfo) =>
|
||||
fileConverter.toPath(file).toFile -> fileInfo
|
||||
val secondPart = info._2.map { case (file, fileInfo) =>
|
||||
fileConverter.toPath(file).toFile -> fileInfo
|
||||
}
|
||||
firstPart -> secondPart
|
||||
}
|
||||
|
|
|
|||
|
|
@ -70,9 +70,8 @@ object TestResultLogger {
|
|||
* @param f The `TestResultLogger` to choose if the predicate fails.
|
||||
*/
|
||||
def choose(cond: (Output, String) => Boolean, t: TestResultLogger, f: TestResultLogger) =
|
||||
TestResultLogger(
|
||||
(log, results, taskName) =>
|
||||
(if (cond(results, taskName)) t else f).run(log, results, taskName)
|
||||
TestResultLogger((log, results, taskName) =>
|
||||
(if (cond(results, taskName)) t else f).run(log, results, taskName)
|
||||
)
|
||||
|
||||
/** Transforms the input to be completely silent when the subject module doesn't contain any tests. */
|
||||
|
|
@ -116,8 +115,7 @@ object TestResultLogger {
|
|||
val printSummary = TestResultLogger((log, results, _) => {
|
||||
val multipleFrameworks = results.summaries.size > 1
|
||||
for (Summary(name, message) <- results.summaries)
|
||||
if (message.isEmpty)
|
||||
log.debug("Summary for " + name + " not available.")
|
||||
if (message.isEmpty) log.debug("Summary for " + name + " not available.")
|
||||
else {
|
||||
if (multipleFrameworks) log.info(name)
|
||||
log.info(message)
|
||||
|
|
@ -139,19 +137,18 @@ object TestResultLogger {
|
|||
canceledCount,
|
||||
pendingCount,
|
||||
) =
|
||||
results.events.foldLeft((0, 0, 0, 0, 0, 0, 0)) {
|
||||
case (acc, (_, testEvent)) =>
|
||||
val (skippedAcc, errorAcc, passedAcc, failureAcc, ignoredAcc, canceledAcc, pendingAcc) =
|
||||
acc
|
||||
(
|
||||
skippedAcc + testEvent.skippedCount,
|
||||
errorAcc + testEvent.errorCount,
|
||||
passedAcc + testEvent.passedCount,
|
||||
failureAcc + testEvent.failureCount,
|
||||
ignoredAcc + testEvent.ignoredCount,
|
||||
canceledAcc + testEvent.canceledCount,
|
||||
pendingAcc + testEvent.pendingCount,
|
||||
)
|
||||
results.events.foldLeft((0, 0, 0, 0, 0, 0, 0)) { case (acc, (_, testEvent)) =>
|
||||
val (skippedAcc, errorAcc, passedAcc, failureAcc, ignoredAcc, canceledAcc, pendingAcc) =
|
||||
acc
|
||||
(
|
||||
skippedAcc + testEvent.skippedCount,
|
||||
errorAcc + testEvent.errorCount,
|
||||
passedAcc + testEvent.passedCount,
|
||||
failureAcc + testEvent.failureCount,
|
||||
ignoredAcc + testEvent.ignoredCount,
|
||||
canceledAcc + testEvent.canceledCount,
|
||||
pendingAcc + testEvent.pendingCount,
|
||||
)
|
||||
}
|
||||
val totalCount = failuresCount + errorsCount + skippedCount + passedCount
|
||||
val base =
|
||||
|
|
@ -190,8 +187,7 @@ object TestResultLogger {
|
|||
show("Error during tests:", Level.Error, select(TestResult.Error))
|
||||
})
|
||||
|
||||
val printNoTests = TestResultLogger(
|
||||
(log, results, taskName) => log.info("No tests to run for " + taskName)
|
||||
)
|
||||
val printNoTests =
|
||||
TestResultLogger((log, results, taskName) => log.info("No tests to run for " + taskName))
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -162,7 +162,7 @@ object Tests {
|
|||
new Group(name, tests, runPolicy, tags)
|
||||
}
|
||||
|
||||
//- EXPANDED CASE CLASS METHOD BEGIN -//
|
||||
// - EXPANDED CASE CLASS METHOD BEGIN -//
|
||||
@deprecated("Methods generated for case class will be removed in the future.", "1.4.0")
|
||||
def copy(
|
||||
name: String = this.name,
|
||||
|
|
@ -199,13 +199,13 @@ object Tests {
|
|||
runPolicy == Group$1.runPolicy && tags == Group$1.tags
|
||||
}))
|
||||
}
|
||||
//- EXPANDED CASE CLASS METHOD END -//
|
||||
// - EXPANDED CASE CLASS METHOD END -//
|
||||
}
|
||||
|
||||
object Group
|
||||
extends AbstractFunction3[String, Seq[TestDefinition], TestRunPolicy, Group]
|
||||
with Serializable {
|
||||
//- EXPANDED CASE CLASS METHOD BEGIN -//
|
||||
// - EXPANDED CASE CLASS METHOD BEGIN -//
|
||||
final override def toString(): String = "Group"
|
||||
def apply(
|
||||
name: String,
|
||||
|
|
@ -239,7 +239,7 @@ object Tests {
|
|||
)
|
||||
}
|
||||
private def readResolve(): Object = Group
|
||||
//- EXPANDED CASE CLASS METHOD END -//
|
||||
// - EXPANDED CASE CLASS METHOD END -//
|
||||
}
|
||||
|
||||
private[sbt] final class ProcessedOptions(
|
||||
|
|
@ -379,23 +379,22 @@ object Tests {
|
|||
testFun: TestFunction,
|
||||
nestedTasks: Seq[TestTask]
|
||||
): Seq[(String, TestFunction)] =
|
||||
(nestedTasks.view.zipWithIndex map {
|
||||
case (nt, idx) =>
|
||||
val testFunDef = testFun.taskDef
|
||||
(
|
||||
testFunDef.fullyQualifiedName,
|
||||
TestFramework.createTestFunction(
|
||||
loader,
|
||||
new TaskDef(
|
||||
testFunDef.fullyQualifiedName + "-" + idx,
|
||||
testFunDef.fingerprint,
|
||||
testFunDef.explicitlySpecified,
|
||||
testFunDef.selectors
|
||||
),
|
||||
testFun.runner,
|
||||
nt
|
||||
)
|
||||
(nestedTasks.view.zipWithIndex map { case (nt, idx) =>
|
||||
val testFunDef = testFun.taskDef
|
||||
(
|
||||
testFunDef.fullyQualifiedName,
|
||||
TestFramework.createTestFunction(
|
||||
loader,
|
||||
new TaskDef(
|
||||
testFunDef.fullyQualifiedName + "-" + idx,
|
||||
testFunDef.fingerprint,
|
||||
testFunDef.explicitlySpecified,
|
||||
testFunDef.selectors
|
||||
),
|
||||
testFun.runner,
|
||||
nt
|
||||
)
|
||||
)
|
||||
}).toSeq
|
||||
|
||||
def makeParallel(
|
||||
|
|
@ -412,15 +411,14 @@ object Tests {
|
|||
tags: Seq[(Tag, Int)]
|
||||
): Task[Map[String, SuiteResult]] = {
|
||||
val tasks = runnables.map { case (name, test) => toTask(loader, name, test, tags) }
|
||||
tasks.join.map(_.foldLeft(Map.empty[String, SuiteResult]) {
|
||||
case (sum, e) =>
|
||||
val merged = sum.toSeq ++ e.toSeq
|
||||
val grouped = merged.groupBy(_._1)
|
||||
grouped
|
||||
.mapValues(_.map(_._2).foldLeft(SuiteResult.Empty) {
|
||||
case (resultSum, result) => resultSum + result
|
||||
})
|
||||
.toMap
|
||||
tasks.join.map(_.foldLeft(Map.empty[String, SuiteResult]) { case (sum, e) =>
|
||||
val merged = sum.toSeq ++ e.toSeq
|
||||
val grouped = merged.groupBy(_._1)
|
||||
grouped
|
||||
.mapValues(_.map(_._2).foldLeft(SuiteResult.Empty) { case (resultSum, result) =>
|
||||
resultSum + result
|
||||
})
|
||||
.toMap
|
||||
})
|
||||
}
|
||||
|
||||
|
|
@ -435,17 +433,16 @@ object Tests {
|
|||
Pure(() => (name, fun.apply()), `inline` = false)
|
||||
)
|
||||
val taggedBase = base.tagw(tags: _*).tag(fun.tags.map(ConcurrentRestrictions.Tag(_)): _*)
|
||||
taggedBase flatMap {
|
||||
case (name, (result, nested)) =>
|
||||
val nestedRunnables = createNestedRunnables(loader, fun, nested)
|
||||
toTasks(loader, nestedRunnables, tags).map { currentResultMap =>
|
||||
val newResult =
|
||||
currentResultMap.get(name) match {
|
||||
case Some(currentResult) => currentResult + result
|
||||
case None => result
|
||||
}
|
||||
currentResultMap.updated(name, newResult)
|
||||
}
|
||||
taggedBase flatMap { case (name, (result, nested)) =>
|
||||
val nestedRunnables = createNestedRunnables(loader, fun, nested)
|
||||
toTasks(loader, nestedRunnables, tags).map { currentResultMap =>
|
||||
val newResult =
|
||||
currentResultMap.get(name) match {
|
||||
case Some(currentResult) => currentResult + result
|
||||
case None => result
|
||||
}
|
||||
currentResultMap.updated(name, newResult)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -495,13 +492,13 @@ object Tests {
|
|||
task { Output(TestResult.Passed, Map.empty, Nil) }
|
||||
} else if (parallel) {
|
||||
reduced[Output](
|
||||
results.toIndexedSeq, {
|
||||
case (Output(v1, m1, _), Output(v2, m2, _)) =>
|
||||
Output(
|
||||
(if (severity(v1) < severity(v2)) v2 else v1): TestResult,
|
||||
Map((m1.toSeq ++ m2.toSeq): _*),
|
||||
Iterable.empty[Summary]
|
||||
)
|
||||
results.toIndexedSeq,
|
||||
{ case (Output(v1, m1, _), Output(v2, m2, _)) =>
|
||||
Output(
|
||||
(if (severity(v1) < severity(v2)) v2 else v1): TestResult,
|
||||
Map((m1.toSeq ++ m2.toSeq): _*),
|
||||
Iterable.empty[Summary]
|
||||
)
|
||||
}
|
||||
)
|
||||
} else {
|
||||
|
|
@ -554,11 +551,11 @@ object Tests {
|
|||
definitions: Seq[Definition],
|
||||
log: Logger
|
||||
): (Seq[TestDefinition], Set[String]) = {
|
||||
val subclasses = fingerprints collect {
|
||||
case sub: SubclassFingerprint => (sub.superclassName, sub.isModule, sub)
|
||||
val subclasses = fingerprints collect { case sub: SubclassFingerprint =>
|
||||
(sub.superclassName, sub.isModule, sub)
|
||||
};
|
||||
val annotations = fingerprints collect {
|
||||
case ann: AnnotatedFingerprint => (ann.annotationName, ann.isModule, ann)
|
||||
val annotations = fingerprints collect { case ann: AnnotatedFingerprint =>
|
||||
(ann.annotationName, ann.isModule, ann)
|
||||
};
|
||||
log.debug("Subclass fingerprints: " + subclasses)
|
||||
log.debug("Annotation fingerprints: " + annotations)
|
||||
|
|
|
|||
|
|
@ -204,20 +204,21 @@ final class Eval(
|
|||
val hash = Hash.toHex(d)
|
||||
val moduleName = makeModuleName(hash)
|
||||
|
||||
val (extra, loader) = try {
|
||||
backing match {
|
||||
case Some(back) if classExists(back, moduleName) =>
|
||||
val loader = (parent: ClassLoader) =>
|
||||
(new URLClassLoader(Array(back.toURI.toURL), parent): ClassLoader)
|
||||
val extra = ev.read(cacheFile(back, moduleName))
|
||||
(extra, loader)
|
||||
case _ =>
|
||||
compileAndLoad(imports, backing, moduleName, ev)
|
||||
val (extra, loader) =
|
||||
try {
|
||||
backing match {
|
||||
case Some(back) if classExists(back, moduleName) =>
|
||||
val loader = (parent: ClassLoader) =>
|
||||
(new URLClassLoader(Array(back.toURI.toURL), parent): ClassLoader)
|
||||
val extra = ev.read(cacheFile(back, moduleName))
|
||||
(extra, loader)
|
||||
case _ =>
|
||||
compileAndLoad(imports, backing, moduleName, ev)
|
||||
}
|
||||
} finally {
|
||||
// send a final report even if the class file was backed to reset preceding diagnostics
|
||||
evalReporter.finalReport(ev.sourceName)
|
||||
}
|
||||
} finally {
|
||||
// send a final report even if the class file was backed to reset preceding diagnostics
|
||||
evalReporter.finalReport(ev.sourceName)
|
||||
}
|
||||
|
||||
val generatedFiles = getGeneratedFiles(backing, moduleName)
|
||||
new EvalIntermediate(extra, loader, generatedFiles, moduleName)
|
||||
|
|
@ -262,8 +263,7 @@ final class Eval(
|
|||
|
||||
def compile(phase: Phase): Unit = {
|
||||
globalPhase = phase
|
||||
if (phase == null || phase == phase.next || evalReporter.hasErrors)
|
||||
()
|
||||
if (phase == null || phase == phase.next || evalReporter.hasErrors) ()
|
||||
else {
|
||||
enteringPhase(phase) { phase.run() }
|
||||
compile(phase.next)
|
||||
|
|
@ -295,7 +295,7 @@ final class Eval(
|
|||
def loadPlain(dir: File, moduleName: String): ClassLoader => Any =
|
||||
parent => getValue[Any](moduleName, new URLClassLoader(Array(dir.toURI.toURL), parent))
|
||||
|
||||
//wrap tree in object objectName { def WrapValName = <tree> }
|
||||
// wrap tree in object objectName { def WrapValName = <tree> }
|
||||
def augment(
|
||||
parser: global.syntaxAnalyzer.UnitParser,
|
||||
imports: Seq[Tree],
|
||||
|
|
@ -342,7 +342,7 @@ final class Eval(
|
|||
}
|
||||
}
|
||||
|
||||
/** Tree traverser that obtains the names of vals in a top-level module whose type is a subtype of one of `types`.*/
|
||||
/** Tree traverser that obtains the names of vals in a top-level module whose type is a subtype of one of `types`. */
|
||||
private[this] final class ValExtractor(tpes: Set[String]) extends Traverser {
|
||||
private[this] var vals = List[String]()
|
||||
def getVals(t: Tree): List[String] = { vals = Nil; traverse(t); vals }
|
||||
|
|
@ -451,22 +451,24 @@ final class Eval(
|
|||
/** Parses one or more definitions (defs, vals, lazy vals, classes, traits, modules). */
|
||||
private[this] def parseDefinitions(parser: syntaxAnalyzer.UnitParser): Seq[Tree] = {
|
||||
val defs = ListBuffer[Tree]()
|
||||
do {
|
||||
def run(): Unit =
|
||||
defs ++= parser.nonLocalDefOrDcl
|
||||
parser.acceptStatSepOpt()
|
||||
} while (!parser.isStatSeqEnd)
|
||||
|
||||
run()
|
||||
while !parser.isStatSeqEnd do run()
|
||||
defs.toList
|
||||
}
|
||||
|
||||
private[this] trait EvalType[T] {
|
||||
|
||||
/** Extracts additional information after the compilation unit is evaluated.*/
|
||||
/** Extracts additional information after the compilation unit is evaluated. */
|
||||
def extra(run: Run, unit: CompilationUnit): T
|
||||
|
||||
/** Deserializes the extra information for unchanged inputs from a cache file.*/
|
||||
/** Deserializes the extra information for unchanged inputs from a cache file. */
|
||||
def read(file: File): T
|
||||
|
||||
/** Serializes the extra information to a cache file, where it can be `read` back if inputs haven't changed.*/
|
||||
/** Serializes the extra information to a cache file, where it can be `read` back if inputs haven't changed. */
|
||||
def write(value: T, file: File): Unit
|
||||
|
||||
def sourceName: String
|
||||
|
|
@ -478,7 +480,7 @@ final class Eval(
|
|||
*/
|
||||
def makeUnit: CompilationUnit
|
||||
|
||||
/** If true, all top-level symbols from this evaluation will be unlinked.*/
|
||||
/** If true, all top-level symbols from this evaluation will be unlinked. */
|
||||
def unlink: Boolean
|
||||
|
||||
/**
|
||||
|
|
@ -559,7 +561,8 @@ private[sbt] object Eval {
|
|||
if (f.isDirectory)
|
||||
(f listFiles classDirFilter) foreach { x =>
|
||||
fileModifiedHash(x, digester)
|
||||
} else digester.update(bytes(getModifiedTimeOrZero(f)))
|
||||
}
|
||||
else digester.update(bytes(getModifiedTimeOrZero(f)))
|
||||
|
||||
digester.update(bytes(f.getAbsolutePath))
|
||||
}
|
||||
|
|
@ -602,7 +605,7 @@ private[sbt] object Eval {
|
|||
value.asInstanceOf[T]
|
||||
}
|
||||
|
||||
/** Gets the top-level module `moduleName` from the provided class `loader`. The module name should not include the trailing `$`.*/
|
||||
/** Gets the top-level module `moduleName` from the provided class `loader`. The module name should not include the trailing `$`. */
|
||||
def getModule(moduleName: String, loader: ClassLoader): Any = {
|
||||
val clazz = Class.forName(moduleName + "$", true, loader)
|
||||
clazz.getField("MODULE$").get(null)
|
||||
|
|
|
|||
Loading…
Reference in New Issue