mirror of https://github.com/sbt/sbt.git
support vals/defs in build.sbt
* must start with val, lazy val, or def (no modifiers currently) * visible only within the same .sbt file * multiple definitions allowed without being separated by blank lines * no blank lines allowed within a definition
This commit is contained in:
parent
f99ba44703
commit
87e406fcbd
|
|
@ -25,7 +25,7 @@ final class Eval(optionsNoncp: Seq[String], classpath: Seq[File], mkReporter: Se
|
|||
def this() = this(s => new ConsoleReporter(s), None)
|
||||
|
||||
backing.foreach(IO.createDirectory)
|
||||
val classpathString = Path.makeString(classpath)
|
||||
val classpathString = Path.makeString(classpath ++ backing.toList)
|
||||
val options = "-cp" +: classpathString +: optionsNoncp
|
||||
|
||||
lazy val settings =
|
||||
|
|
@ -39,46 +39,83 @@ final class Eval(optionsNoncp: Seq[String], classpath: Seq[File], mkReporter: Se
|
|||
import global._
|
||||
import definitions._
|
||||
|
||||
private[sbt] def unlinkDeferred() {
|
||||
toUnlinkLater foreach unlink
|
||||
toUnlinkLater = Nil
|
||||
}
|
||||
|
||||
private[this] var toUnlinkLater = List[Symbol]()
|
||||
private[this] def unlink(sym: Symbol) = sym.owner.info.decls.unlink(sym)
|
||||
|
||||
def eval(expression: String, imports: EvalImports = noImports, tpeName: Option[String] = None, srcName: String = "<setting>", line: Int = DefaultStartLine): EvalResult =
|
||||
{
|
||||
val ev = new EvalType {
|
||||
def makeUnit = mkUnit(srcName, line, expression)
|
||||
def unlink = true
|
||||
def load(moduleName: String, loader: ClassLoader): Any = getValue[Any](moduleName, loader)
|
||||
def unitBody(unit: CompilationUnit, importTrees: Seq[Tree], moduleName: String): Tree = {
|
||||
val (parser, tree) = parse(unit, settingErrorStrings, _.expr())
|
||||
val tpt: Tree = expectedType(tpeName)
|
||||
augment(parser, importTrees, tree, tpt, moduleName)
|
||||
}
|
||||
}
|
||||
evalCommon(expression :: Nil, imports, tpeName, ev)
|
||||
}
|
||||
private[sbt] def evalDefinitions(definitions: Seq[(String,Range)], imports: EvalImports, srcName: String): EvalResult =
|
||||
{
|
||||
require(definitions.nonEmpty, "Definitions to evaluate cannot be empty.")
|
||||
val ev = new EvalType {
|
||||
lazy val (fullUnit, defUnits) = mkDefsUnit(srcName, definitions)
|
||||
def makeUnit = fullUnit
|
||||
def unlink = false
|
||||
def load(moduleName: String, loader: ClassLoader): Any = getModule(moduleName, loader)
|
||||
def unitBody(unit: CompilationUnit, importTrees: Seq[Tree], moduleName: String): Tree = {
|
||||
val fullParser = new syntaxAnalyzer.UnitParser(unit)
|
||||
val trees = defUnits flatMap parseDefinitions
|
||||
syntheticModule(fullParser, importTrees, trees.toList, moduleName)
|
||||
}
|
||||
}
|
||||
evalCommon(definitions.map(_._1), imports, Some(""), ev)
|
||||
}
|
||||
|
||||
private[this] def evalCommon(content: Seq[String], imports: EvalImports, tpeName: Option[String], ev: EvalType): EvalResult =
|
||||
{
|
||||
import Eval._
|
||||
val hash = Hash.toHex(Hash(bytes( bytes(expression) :: optBytes(backing)(fileExistsBytes) :: seqBytes(options)(bytes) ::
|
||||
seqBytes(classpath)(fileModifiedBytes) :: seqBytes(imports.strings.map(_._1))(bytes) :: optBytes(tpeName)(bytes) :: Nil)))
|
||||
val hash = Hash.toHex(Hash(bytes( stringSeqBytes(content) :: optBytes(backing)(fileExistsBytes) :: stringSeqBytes(options) ::
|
||||
seqBytes(classpath)(fileModifiedBytes) :: stringSeqBytes(imports.strings.map(_._1)) :: optBytes(tpeName)(bytes) :: Nil)))
|
||||
val moduleName = makeModuleName(hash)
|
||||
|
||||
lazy val unit = {
|
||||
reporter.reset
|
||||
mkUnit(srcName, line, expression)
|
||||
ev.makeUnit
|
||||
}
|
||||
lazy val run = new Run {
|
||||
override def units = (unit :: Nil).iterator
|
||||
}
|
||||
def unlinkAll(): Unit = for( (sym, _) <- run.symSource ) unlink(sym)
|
||||
def unlink(sym: Symbol) = sym.owner.info.decls.unlink(sym)
|
||||
def unlinkAll(): Unit = for( (sym, _) <- run.symSource ) if(ev.unlink) unlink(sym) else toUnlinkLater ::= sym
|
||||
|
||||
val (tpe, value) =
|
||||
(tpeName, backing) match {
|
||||
case (Some(tpe), Some(back)) if classExists(back, moduleName) => (tpe, loadPlain(back, moduleName))
|
||||
case _ => try { eval0(expression, imports, tpeName, run, unit, backing, moduleName) } finally { unlinkAll() }
|
||||
case (Some(tpe), Some(back)) if classExists(back, moduleName) =>
|
||||
val loader = (parent: ClassLoader) => ev.load(moduleName, new URLClassLoader(Array(back.toURI.toURL), parent))
|
||||
(tpe, loader)
|
||||
case _ =>
|
||||
try { compileAndLoad(run, unit, imports, backing, moduleName, ev) }
|
||||
finally { unlinkAll() }
|
||||
}
|
||||
|
||||
val classFiles = getClassFiles(backing, moduleName)
|
||||
new EvalResult(tpe, value, classFiles, moduleName)
|
||||
}
|
||||
def eval0(expression: String, imports: EvalImports, tpeName: Option[String], run: Run, unit: CompilationUnit, backing: Option[File], moduleName: String): (String, ClassLoader => Any) =
|
||||
|
||||
private[this] def compileAndLoad(run: Run, unit: CompilationUnit, imports: EvalImports, backing: Option[File], moduleName: String, ev: EvalType): (String, ClassLoader => Any) =
|
||||
{
|
||||
val dir = backing match { case None => new VirtualDirectory("<virtual>", None); case Some(dir) => new PlainFile(dir) }
|
||||
val dir = outputDirectory(backing)
|
||||
settings.outputDirs setSingleOutput dir
|
||||
|
||||
val importTrees = parseImports(imports)
|
||||
val (parser, tree) = parseExpr(unit)
|
||||
unit.body = ev.unitBody(unit, importTrees, moduleName)
|
||||
|
||||
val tpt: Tree = tpeName match {
|
||||
case Some(tpe) => parseType(tpe)
|
||||
case None => TypeTree(NoType)
|
||||
}
|
||||
|
||||
unit.body = augment(parser, importTrees, tree, tpt, moduleName)
|
||||
|
||||
def compile(phase: Phase): Unit =
|
||||
{
|
||||
globalPhase = phase
|
||||
|
|
@ -94,15 +131,31 @@ final class Eval(optionsNoncp: Seq[String], classpath: Seq[File], mkReporter: Se
|
|||
compile(run.namerPhase)
|
||||
checkError("Type error in expression")
|
||||
val tpe = atPhase(run.typerPhase.next) { (new TypeExtractor).getType(unit.body) }
|
||||
val loader = (parent: ClassLoader) => ev.load(moduleName, new AbstractFileClassLoader(dir, parent))
|
||||
|
||||
(tpe, load(dir, moduleName))
|
||||
(tpe, loader)
|
||||
}
|
||||
|
||||
private[this] def expectedType(tpeName: Option[String]): Tree =
|
||||
tpeName match {
|
||||
case Some(tpe) => parseType(tpe)
|
||||
case None => TypeTree(NoType)
|
||||
}
|
||||
|
||||
private[this] def outputDirectory(backing: Option[File]): AbstractFile =
|
||||
backing match { case None => new VirtualDirectory("<virtual>", None); case Some(dir) => new PlainFile(dir) }
|
||||
|
||||
def load(dir: AbstractFile, moduleName: String): ClassLoader => Any = parent => getValue[Any](moduleName, new AbstractFileClassLoader(dir, parent))
|
||||
def loadPlain(dir: File, moduleName: String): ClassLoader => Any = parent => getValue[Any](moduleName, new URLClassLoader(Array(dir.toURI.toURL), parent))
|
||||
|
||||
val WrapValName = "$sbtdef"
|
||||
//wrap tree in object objectName { def WrapValName = <tree> }
|
||||
def augment(parser: global.syntaxAnalyzer.UnitParser, imports: Seq[Tree], tree: Tree, tpt: Tree, objectName: String): Tree =
|
||||
{
|
||||
val method = DefDef(NoMods, newTermName(WrapValName), Nil, Nil, tpt, tree)
|
||||
syntheticModule(parser, imports, method :: Nil, objectName)
|
||||
}
|
||||
private[this] def syntheticModule(parser: global.syntaxAnalyzer.UnitParser, imports: Seq[Tree], definitions: List[Tree], objectName: String): Tree =
|
||||
{
|
||||
val emptyTypeName = nme.EMPTY.toTypeName
|
||||
def emptyPkg = parser.atPos(0, 0, 0) { Ident(nme.EMPTY_PACKAGE_NAME) }
|
||||
|
|
@ -115,20 +168,23 @@ final class Eval(optionsNoncp: Seq[String], classpath: Seq[File], mkReporter: Se
|
|||
Block(List(Apply(Select(Super(This(emptyTypeName), emptyTypeName), nme.CONSTRUCTOR), Nil)), Literal(Constant(())))
|
||||
)
|
||||
|
||||
def method = DefDef(NoMods, newTermName(WrapValName), Nil, Nil, tpt, tree)
|
||||
def moduleBody = Template(List(gen.scalaAnyRefConstr), emptyValDef, List(emptyInit, method))
|
||||
def moduleBody = Template(List(gen.scalaAnyRefConstr), emptyValDef, emptyInit :: definitions)
|
||||
def moduleDef = ModuleDef(NoMods, newTermName(objectName), moduleBody)
|
||||
parser.makePackaging(0, emptyPkg, (imports :+ moduleDef).toList)
|
||||
}
|
||||
|
||||
def getValue[T](objectName: String, loader: ClassLoader): T =
|
||||
{
|
||||
val clazz = Class.forName(objectName + "$", true, loader)
|
||||
val module = clazz.getField("MODULE$").get(null)
|
||||
val module = getModule(objectName, loader)
|
||||
val accessor = module.getClass.getMethod(WrapValName)
|
||||
val value = accessor.invoke(module)
|
||||
value.asInstanceOf[T]
|
||||
}
|
||||
private[this] def getModule(moduleName: String, loader: ClassLoader): Any =
|
||||
{
|
||||
val clazz = Class.forName(moduleName + "$", true, loader)
|
||||
clazz.getField("MODULE$").get(null)
|
||||
}
|
||||
|
||||
final class TypeExtractor extends Traverser {
|
||||
private[this] var result = ""
|
||||
|
|
@ -148,24 +204,41 @@ final class Eval(optionsNoncp: Seq[String], classpath: Seq[File], mkReporter: Se
|
|||
private[this] def moduleClassFilter(moduleName: String) = new java.io.FilenameFilter { def accept(dir: File, s: String) =
|
||||
(s contains moduleName) && (s endsWith ".class")
|
||||
}
|
||||
private[this] def parseExpr(unit: CompilationUnit) =
|
||||
|
||||
private[this] class ParseErrorStrings(val base: String, val extraBlank: String, val missingBlank: String, val extraSemi: String)
|
||||
private[this] def definitionErrorStrings = new ParseErrorStrings(
|
||||
base = "Error parsing definition.",
|
||||
extraBlank = " Ensure that there are no blank lines within a definition.",
|
||||
missingBlank = " Ensure that definitions are separated by blank lines.",
|
||||
extraSemi = " A trailing semicolon is not permitted for standalone definitions."
|
||||
)
|
||||
private[this] def settingErrorStrings = new ParseErrorStrings(
|
||||
base = "Error parsing expression.",
|
||||
extraBlank = " Ensure that there are no blank lines within a setting.",
|
||||
missingBlank = " Ensure that settings are separated by blank lines.",
|
||||
extraSemi = " Note that settings are expressions and do not end with semicolons. (Semicolons are fine within {} blocks, however.)"
|
||||
)
|
||||
|
||||
/** Parses the provided compilation `unit` according to `f` and then performs checks on the final parser state
|
||||
* to catch errors that are common when the content is embedded in a blank-line-delimited format. */
|
||||
private[this] def parse[T](unit: CompilationUnit, errors: ParseErrorStrings, f: syntaxAnalyzer.UnitParser => T): (syntaxAnalyzer.UnitParser, T) =
|
||||
{
|
||||
val parser = new syntaxAnalyzer.UnitParser(unit)
|
||||
|
||||
val tree: Tree = parser.expr()
|
||||
val tree = f(parser)
|
||||
val extra = parser.in.token match {
|
||||
case EOF => " Ensure that there are no blank lines within a setting."
|
||||
case EOF => errors.extraBlank
|
||||
case _ => ""
|
||||
}
|
||||
checkError("Error parsing expression." + extra)
|
||||
checkError(errors.base + extra)
|
||||
|
||||
parser.accept(EOF)
|
||||
val extra2 = parser.in.token match {
|
||||
case SEMI => " Note that settings are expressions and do not end with semicolons. (Semicolons are fine within {} blocks, however.)"
|
||||
case NEWLINE | NEWLINES => " Ensure that settings are separated by blank lines."
|
||||
case SEMI => errors.extraSemi
|
||||
case NEWLINE | NEWLINES => errors.missingBlank
|
||||
case _ => ""
|
||||
}
|
||||
checkError("Error parsing expression." + extra2)
|
||||
checkError(errors.base + extra2)
|
||||
|
||||
(parser, tree)
|
||||
}
|
||||
|
|
@ -187,7 +260,40 @@ final class Eval(optionsNoncp: Seq[String], classpath: Seq[File], mkReporter: Se
|
|||
checkError("Error parsing imports for expression.")
|
||||
trees
|
||||
}
|
||||
private[this] def parseDefinitions(du: CompilationUnit): Seq[Tree] =
|
||||
parse(du, definitionErrorStrings, parseDefinitions)._2
|
||||
|
||||
/** Parses one or more definitions (defs, vals, lazy vals, classes, traits, modules). */
|
||||
private[this] def parseDefinitions(parser: syntaxAnalyzer.UnitParser): Seq[Tree] =
|
||||
{
|
||||
var defs = parser.nonLocalDefOrDcl
|
||||
parser.acceptStatSepOpt()
|
||||
while(!parser.isStatSeqEnd) {
|
||||
val next = parser.nonLocalDefOrDcl
|
||||
defs ++= next
|
||||
parser.acceptStatSepOpt()
|
||||
}
|
||||
defs
|
||||
}
|
||||
|
||||
private[this] trait EvalType {
|
||||
/** Constructs the full compilation unit for this evaluation.
|
||||
* This is used for error reporting during compilation.
|
||||
* The `unitBody` method actually does the parsing and may parse the Tree from another source. */
|
||||
def makeUnit: CompilationUnit
|
||||
|
||||
/** If true, all top-level symbols from this evaluation will be unlinked.*/
|
||||
def unlink: Boolean
|
||||
|
||||
/** Gets the value for this evaluation.
|
||||
* The enclosing `moduleName` and the `parent` class loader containing classes on the classpath are provided. */
|
||||
def load(moduleName: String, parent: ClassLoader): Any
|
||||
|
||||
/** Constructs the Tree to be compiled. The full compilation `unit` from `makeUnit` is provided along with the
|
||||
* parsed imports `importTrees` to be used. `moduleName` should be name of the enclosing module.
|
||||
* The Tree doesn't need to be parsed from the contents of `unit`. */
|
||||
def unitBody(unit: CompilationUnit, importTrees: Seq[Tree], moduleName: String): Tree
|
||||
}
|
||||
|
||||
val DefaultStartLine = 0
|
||||
private[this] def makeModuleName(hash: String): String = "$" + Hash.halve(hash)
|
||||
|
|
@ -200,10 +306,42 @@ final class Eval(optionsNoncp: Seq[String], classpath: Seq[File], mkReporter: Se
|
|||
override def lineToOffset(line: Int): Int = super.lineToOffset((line - startLine) max 0)
|
||||
override def offsetToLine(offset: Int): Int = super.offsetToLine(offset) + startLine
|
||||
}
|
||||
/** Constructs a CompilationUnit for each definition, which can be used to independently parse the definition into a Tree.
|
||||
* Additionally, a CompilationUnit for the combined definitions is constructed for use by combined compilation after parsing. */
|
||||
private[this] def mkDefsUnit(srcName: String, definitions: Seq[(String,Range)]): (CompilationUnit, Seq[CompilationUnit]) =
|
||||
{
|
||||
def fragmentUnit(content: String, lineMap: Array[Int]) = new CompilationUnit(fragmentSourceFile(srcName, content, lineMap))
|
||||
|
||||
import collection.mutable.ListBuffer
|
||||
val lines = new ListBuffer[Int]()
|
||||
val defs = new ListBuffer[CompilationUnit]()
|
||||
val fullContent = new java.lang.StringBuilder()
|
||||
for( (defString, range) <- definitions )
|
||||
{
|
||||
defs += fragmentUnit(defString, range.toArray)
|
||||
fullContent.append(defString)
|
||||
lines ++= range
|
||||
fullContent.append("\n\n")
|
||||
lines ++= (range.end :: range.end :: Nil)
|
||||
}
|
||||
val fullUnit = fragmentUnit(fullContent.toString, lines.toArray)
|
||||
(fullUnit, defs.toSeq)
|
||||
}
|
||||
|
||||
/** Source file that can map the offset in the file to and from line numbers that may discontinuous.
|
||||
* The values in `lineMap` must be ordered, but need not be consecutive. */
|
||||
private[this] def fragmentSourceFile(srcName: String, content: String, lineMap: Array[Int]) = new BatchSourceFile(srcName, content) {
|
||||
override def lineToOffset(line: Int): Int = super.lineToOffset(lineMap.indexWhere(_ == line) max 0)
|
||||
override def offsetToLine(offset: Int): Int = index(lineMap, super.offsetToLine(offset))
|
||||
// the SourceFile attribute is populated from this method, so we are required to only return the name
|
||||
override def toString = new File(srcName).getName
|
||||
private[this] def index(a: Array[Int], i: Int): Int = if(i < 0 || i >= a.length) 0 else a(i)
|
||||
}
|
||||
}
|
||||
private object Eval
|
||||
{
|
||||
def optBytes[T](o: Option[T])(f: T => Array[Byte]): Array[Byte] = seqBytes(o.toSeq)(f)
|
||||
def stringSeqBytes(s: Seq[String]): Array[Byte] = seqBytes(s)(bytes)
|
||||
def seqBytes[T](s: Seq[T])(f: T => Array[Byte]): Array[Byte] = bytes(s map f)
|
||||
def bytes(b: Seq[Array[Byte]]): Array[Byte] = bytes(b.length) ++ b.flatten.toArray[Byte]
|
||||
def bytes(b: Boolean): Array[Byte] = Array[Byte](if(b) 1 else 0)
|
||||
|
|
|
|||
|
|
@ -77,18 +77,39 @@ object RetrieveUnit
|
|||
def unapply(uri: URI) = Option(uri.withoutMarkerScheme.getPath)
|
||||
}
|
||||
}
|
||||
|
||||
object EvaluateConfigurations
|
||||
{
|
||||
private[this] final class ParsedFile(val imports: Seq[(String,Int)], val definitions: Seq[(String,LineRange)], val settings: Seq[(String,LineRange)])
|
||||
private[this] final class Definitions(val loader: ClassLoader => ClassLoader, val moduleNames: Seq[String])
|
||||
|
||||
private[this] val DefinitionKeywords = Seq("lazy val ", "def ", "val ")
|
||||
|
||||
def apply(eval: Eval, srcs: Seq[File], imports: Seq[String]): ClassLoader => Seq[Setting[_]] =
|
||||
flatten(srcs.sortBy(_.getName) map { src => evaluateConfiguration(eval, src, imports) })
|
||||
def evaluateConfiguration(eval: Eval, src: File, imports: Seq[String]): ClassLoader => Seq[Setting[_]] =
|
||||
evaluateConfiguration(eval, src.getPath, IO.readLines(src), imports, 0)
|
||||
|
||||
private[this] def parseConfiguration(lines: Seq[String], builtinImports: Seq[String], offset: Int): ParsedFile =
|
||||
{
|
||||
val (importStatements, settingsAndDefinitions) = splitExpressions(lines)
|
||||
val allImports = builtinImports.map(s => (s, -1)) ++ addOffset(offset, importStatements)
|
||||
val (definitions, settings) = splitSettingsDefinitions(addOffsetToRange(offset, settingsAndDefinitions))
|
||||
new ParsedFile(allImports, definitions, settings)
|
||||
}
|
||||
|
||||
def evaluateConfiguration(eval: Eval, name: String, lines: Seq[String], imports: Seq[String], offset: Int): ClassLoader => Seq[Setting[_]] =
|
||||
{
|
||||
val (importExpressions, settingExpressions) = splitExpressions(lines)
|
||||
val settings = addOffsetToRange(offset, settingExpressions) map { case (settingExpression,range) =>
|
||||
evaluateSetting(eval, name, (imports.map(s => (s, -1)) ++ addOffset(offset, importExpressions)), settingExpression, range)
|
||||
val parsed = parseConfiguration(lines, imports, offset)
|
||||
val importDefs = if(parsed.definitions.isEmpty) Nil else {
|
||||
val definitions = evaluateDefinitions(eval, name, parsed.imports, parsed.definitions)
|
||||
Load.importAllRoot(definitions.moduleNames).map(s => (s, -1))
|
||||
}
|
||||
val allImports = importDefs ++ parsed.imports
|
||||
val settings = parsed.settings map { case (settingExpression,range) =>
|
||||
evaluateSetting(eval, name, allImports, settingExpression, range)
|
||||
}
|
||||
eval.unlinkDeferred()
|
||||
flatten(settings)
|
||||
}
|
||||
def flatten(mksettings: Seq[ClassLoader => Seq[Setting[_]]]): ClassLoader => Seq[Setting[_]] =
|
||||
|
|
@ -139,6 +160,19 @@ object EvaluateConfigurations
|
|||
}
|
||||
group0(lines, Nil)
|
||||
}
|
||||
private[this] def splitSettingsDefinitions(lines: Seq[(String,LineRange)]): (Seq[(String,LineRange)], Seq[(String,LineRange)]) =
|
||||
lines partition { case (line, range) => isDefinition(line) }
|
||||
private[this] def isDefinition(line: String): Boolean =
|
||||
{
|
||||
val trimmed = line.trim
|
||||
DefinitionKeywords.exists(trimmed startsWith _)
|
||||
}
|
||||
private[this] def evaluateDefinitions(eval: Eval, name: String, imports: Seq[(String,Int)], definitions: Seq[(String,LineRange)]): Definitions =
|
||||
{
|
||||
val convertedRanges = definitions.map { case (s, r) => (s, r.start to r.end) }
|
||||
val res = eval.evalDefinitions(convertedRanges, new EvalImports(imports, name), name)
|
||||
new Definitions(loader => res.getValue(loader).getClass.getClassLoader, res.enclosingModule :: Nil)
|
||||
}
|
||||
}
|
||||
object Index
|
||||
{
|
||||
|
|
|
|||
|
|
@ -0,0 +1,12 @@
|
|||
def now = System.currentTimeMillis
|
||||
|
||||
lazy val v = "1.0-" +
|
||||
(x / 1000)
|
||||
lazy val x = now + 1
|
||||
|
||||
val n = "Demo"
|
||||
val descr = "Description"
|
||||
|
||||
name := n
|
||||
|
||||
version := v
|
||||
|
|
@ -0,0 +1 @@
|
|||
description := descr
|
||||
|
|
@ -0,0 +1,7 @@
|
|||
# This test verifies that defs and vals are allowed in .sbt files
|
||||
# Initial load should succeed
|
||||
|
||||
# Then, check that other .sbt files cannot see definitions
|
||||
# defined in another file
|
||||
$ copy-file changes/visibility.sbt visibility.sbt
|
||||
-> reload
|
||||
Loading…
Reference in New Issue