* move Environment classes to util/env module

* move TrapExit, SelectMainClass to run module
* rearrange some compilation-related code
* Jetty-related code moved to web module
This commit is contained in:
Mark Harrah 2010-07-14 19:24:50 -04:00
parent 6f3c699435
commit d0fa1eb461
54 changed files with 534 additions and 2072 deletions

4
.gitignore vendored Normal file
View File

@ -0,0 +1,4 @@
LazyJettyRun6.scala
LazyJettyRun7.scala
project/plugins/project/
interface/src/main/resources/xsbt.version.properties

View File

@ -1,4 +1,4 @@
Copyright (c) 2008, 2009, 2010 Mark Harrah, Tony Sloane, Jason Zaugg
Copyright (c) 2008, 2009, 2010 Steven Blundy, Josh Cough, Mark Harrah, Stuart Roebuck, Tony Sloane, Vesa Vilhonen, Jason Zaugg
All rights reserved.
Redistribution and use in source and binary forms, with or without

2
NOTICE
View File

@ -1,4 +1,4 @@
Simple Build Tool (xsbt components other than sbt/)
Simple Build Tool
Copyright 2008, 2009, 2010 Mark Harrah, Jason Zaugg
Licensed under BSD-style license (see LICENSE)

View File

@ -12,6 +12,8 @@ trait JavaCompiler
}
object JavaCompiler
{
type Fork = (Seq[String], Logger) => Int
def construct(f: (Seq[String], Logger) => Int, cp: ClasspathOptions, scalaInstance: ScalaInstance): JavaCompiler =
new JavaCompiler {
def apply(sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String])(implicit log: Logger) {
@ -24,20 +26,26 @@ object JavaCompiler
if( code != 0 ) throw new CompileFailed(arguments.toArray, "javac returned nonzero exit code")
}
}
def directOrFork(cp: ClasspathOptions, scalaInstance: ScalaInstance): JavaCompiler = construct(directOrForkJavac, cp, scalaInstance)
def direct(cp: ClasspathOptions, scalaInstance: ScalaInstance): JavaCompiler = construct(directJavac, cp, scalaInstance)
def fork(cp: ClasspathOptions, scalaInstance: ScalaInstance): JavaCompiler = construct(forkJavac, cp, scalaInstance)
def directOrFork(cp: ClasspathOptions, scalaInstance: ScalaInstance)(implicit doFork: Fork): JavaCompiler =
construct(directOrForkJavac, cp, scalaInstance)
def direct(cp: ClasspathOptions, scalaInstance: ScalaInstance)(implicit doFork: Fork): JavaCompiler =
construct(directJavac, cp, scalaInstance)
val directOrForkJavac = (arguments: Seq[String], log: Logger) =>
def fork(cp: ClasspathOptions, scalaInstance: ScalaInstance)(implicit doFork: Fork): JavaCompiler =
construct(forkJavac, cp, scalaInstance)
def directOrForkJavac(implicit doFork: Fork) = (arguments: Seq[String], log: Logger) =>
try { directJavac(arguments, log) }
catch { case e: ClassNotFoundException =>
log.debug("com.sun.tools.javac.Main not found; forking javac instead")
forkJavac(arguments, log)
forkJavac(doFork)(arguments, log)
}
val forkJavac = (arguments: Seq[String], log: Logger) =>
/** `fork` should be a function that forks javac with the provided arguments and sends output to the given Logger.*/
def forkJavac(implicit doFork: Fork) = (arguments: Seq[String], log: Logger) =>
{
def externalJavac(argFile: File) = Process("javac", ("@" + normalizeSlash(argFile.getAbsolutePath)) :: Nil) ! log
def externalJavac(argFile: File) = doFork(("@" + normalizeSlash(argFile.getAbsolutePath)) :: Nil, log)
withArgumentFile(arguments)(externalJavac)
}
val directJavac = (arguments: Seq[String], log: Logger) =>

View File

@ -15,7 +15,7 @@ object CompileOrder extends Enumeration
// We cannot require an implicit parameter Equiv[Seq[String]] to construct Equiv[CompileSetup]
// because complexity(Equiv[Seq[String]]) > complexity(Equiv[CompileSetup])
// (6 > 4)
final class CompileOptions(val options: Seq[String])
final class CompileOptions(val options: Seq[String], val javacOptions: Seq[String])
final class CompileSetup(val outputDirectory: File, val options: CompileOptions, val compilerVersion: String, val order: CompileOrder.Value)
object CompileSetup
@ -32,7 +32,9 @@ object CompileSetup
def equiv(a: File, b: File) = a.getAbsoluteFile == b.getAbsoluteFile
}
implicit val equivOpts: Equiv[CompileOptions] = new Equiv[CompileOptions] {
def equiv(a: CompileOptions, b: CompileOptions) = a.options sameElements b.options
def equiv(a: CompileOptions, b: CompileOptions) =
(a.options sameElements b.options) &&
(a.javacOptions sameElements b.javacOptions)
}
implicit val equivCompilerVersion: Equiv[String] = new Equiv[String] {
def equiv(a: String, b: String) = a == b

View File

@ -300,7 +300,7 @@ final class API(val global: Global, val callback: xsbti.AnalysisCallback) extend
}
def isTopLevel(sym: Symbol): Boolean =
(sym ne null) && (sym != NoSymbol) && !sym.isImplClass && !sym.isNestedClass && sym.isStatic &&
!sym.hasFlag(Flags.SYNTHETIC)// && !sym.hasFlag(Flags.JAVA)
!sym.hasFlag(Flags.SYNTHETIC) && !sym.hasFlag(Flags.JAVA)
}
// In 2.8, attributes is renamed to annotations

View File

@ -42,7 +42,7 @@ object AnalysisFormats
implicit def fileFormat: Format[File] = wrap[File, String](_.getAbsolutePath, s => new File(s))
// can't require Format[Seq[String]] because its complexity is higher than Format[CompileOptions]
implicit def optsFormat(implicit strF: Format[String]): Format[CompileOptions] =
wrap[CompileOptions, Seq[String]](_.options, os => new CompileOptions(os))(seqFormat[String])
wrap[CompileOptions, (Seq[String],Seq[String])](co => (co.options, co.javacOptions), os => new CompileOptions(os._1, os._2))
implicit val orderFormat: Format[CompileOrder.Value] = enumerationFormat(CompileOrder)
implicit def seqFormat[T](implicit optionFormat: Format[T]): Format[Seq[T]] = viaSeq[Seq[T], T](x => x)

View File

@ -365,41 +365,4 @@ object ModuleConfiguration
{
def apply(org: String, resolver: Resolver): ModuleConfiguration = apply(org, "*", "*", resolver)
def apply(org: String, name: String, resolver: Resolver): ModuleConfiguration = ModuleConfiguration(org, name, "*", resolver)
}
/*
object Credentials
{
/** Add the provided credentials to Ivy's credentials cache.*/
def add(realm: String, host: String, userName: String, passwd: String): Unit =
CredentialsStore.INSTANCE.addCredentials(realm, host, userName, passwd)
/** Load credentials from the given file into Ivy's credentials cache.*/
def apply(file: String, log: Logger): Unit = apply(Path.fromFile(file), log)
/** Load credentials from the given file into Ivy's credentials cache.*/
def apply(file: File, log: Logger): Unit = apply(Path.fromFile(file), log)
/** Load credentials from the given file into Ivy's credentials cache.*/
def apply(path: Path, log: Logger)
{
val msg =
if(path.exists)
{
val properties = new scala.collection.mutable.HashMap[String, String]
def get(keys: List[String]) = keys.flatMap(properties.get).firstOption.toRight(keys.head + " not specified in credentials file: " + path)
impl.MapUtilities.read(properties, path, log) orElse
{
List.separate( List(RealmKeys, HostKeys, UserKeys, PasswordKeys).map(get) ) match
{
case (Nil, List(realm, host, user, pass)) => add(realm, host, user, pass); None
case (errors, _) => Some(errors.mkString("\n"))
}
}
}
else
Some("Credentials file " + path + " does not exist")
msg.foreach(x => log.warn(x))
}
private[this] val RealmKeys = List("realm")
private[this] val HostKeys = List("host", "hostname")
private[this] val UserKeys = List("user", "user.name", "username")
private[this] val PasswordKeys = List("password", "pwd", "pass", "passwd")
}*/
}

View File

@ -6,36 +6,38 @@ package sbt
import inc._
import java.io.File
import sbt.compile.{AnalyzingCompiler, CompilerArguments}
import compile.{AnalyzingCompiler, CompilerArguments, JavaCompiler}
import classpath.ClasspathUtilities
import classfile.Analyze
import xsbti.api.Source
import xsbti.AnalysisCallback
import CompileSetup._
import sbinary.DefaultProtocol.{ immutableMapFormat, immutableSetFormat, StringFormat }
final class CompileConfiguration(val sources: Seq[File], val classpath: Seq[File], val previousAnalysis: Analysis,
val previousSetup: Option[CompileSetup], val currentSetup: CompileSetup, val getAnalysis: File => Option[Analysis],
val maxErrors: Int, val compiler: AnalyzingCompiler)
final class CompileConfiguration(val sources: Seq[File], val classpath: Seq[File], val javaSrcBases: Seq[File],
val previousAnalysis: Analysis, val previousSetup: Option[CompileSetup], val currentSetup: CompileSetup, val getAnalysis: File => Option[Analysis],
val maxErrors: Int, val compiler: AnalyzingCompiler, val javac: JavaCompiler)
class AggressiveCompile(cacheDirectory: File)
{
def apply(sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String], compiler: AnalyzingCompiler, log: Logger): Analysis =
def apply(compiler: AnalyzingCompiler, javac: JavaCompiler, sources: Seq[File], classpath: Seq[File], outputDirectory: File, javaSrcBases: Seq[File] = Nil, options: Seq[String] = Nil, javacOptions: Seq[String] = Nil, maxErrors: Int = 100)(implicit log: Logger): Analysis =
{
val setup = new CompileSetup(outputDirectory, new CompileOptions(options), compiler.scalaInstance.actualVersion, CompileOrder.Mixed)
compile1(sources, classpath, setup, store, Map.empty, compiler, log)
val setup = new CompileSetup(outputDirectory, new CompileOptions(options, javacOptions), compiler.scalaInstance.actualVersion, CompileOrder.Mixed)
compile1(sources, classpath, javaSrcBases, setup, store, Map.empty, compiler, javac, maxErrors)
}
def withBootclasspath(args: CompilerArguments, classpath: Seq[File]): Seq[File] =
args.bootClasspath ++ classpath
def compile1(sources: Seq[File], classpath: Seq[File], setup: CompileSetup, store: AnalysisStore, analysis: Map[File, Analysis], compiler: AnalyzingCompiler, log: Logger): Analysis =
def compile1(sources: Seq[File], classpath: Seq[File], javaSrcBases: Seq[File], setup: CompileSetup, store: AnalysisStore, analysis: Map[File, Analysis], compiler: AnalyzingCompiler, javac: JavaCompiler, maxErrors: Int)(implicit log: Logger): Analysis =
{
val (previousAnalysis, previousSetup) = extract(store.get())
val config = new CompileConfiguration(sources, classpath, previousAnalysis, previousSetup, setup, analysis.get _, 100, compiler)
val result = compile2(config, log)
val config = new CompileConfiguration(sources, classpath, javaSrcBases, previousAnalysis, previousSetup, setup, analysis.get _, maxErrors, compiler, javac)
val result = compile2(config)
store.set(result, setup)
result
}
def compile2(config: CompileConfiguration, log: Logger)(implicit equiv: Equiv[CompileSetup]): Analysis =
def compile2(config: CompileConfiguration)(implicit log: Logger, equiv: Equiv[CompileSetup]): Analysis =
{
import config._
import currentSetup._
@ -46,11 +48,24 @@ class AggressiveCompile(cacheDirectory: File)
val apiOrEmpty = (api: Either[Boolean, Source]) => api.right.toOption.getOrElse( APIs.emptyAPI )
val cArgs = new CompilerArguments(compiler.scalaInstance, compiler.cp)
val externalAPI = apiOrEmpty compose Locate.value(withBootclasspath(cArgs, classpath), getAPI)
val compile0 = (include: Set[File], callback: AnalysisCallback) => {
IO.createDirectory(outputDirectory)
val arguments = cArgs(sources.filter(include), classpath, outputDirectory, options.options)
val incSrc = sources.filter(include)
val arguments = cArgs(incSrc, classpath, outputDirectory, options.options)
compiler.compile(arguments, callback, maxErrors, log)
val javaSrcs = incSrc.filter(javaOnly)
if(!javaSrcs.isEmpty)
{
import Path._
val loader = ClasspathUtilities.toLoader(classpath, compiler.scalaInstance.loader)
// TODO: Analyze needs to generate API from Java class files
Analyze(outputDirectory, javaSrcs, javaSrcBases, log)(callback, loader) {
javac(javaSrcs, classpath, outputDirectory, options.javacOptions)
}
}
}
val sourcesSet = sources.toSet
val analysis = previousSetup match {
case Some(previous) if equiv.equiv(previous, currentSetup) => previousAnalysis
@ -64,6 +79,7 @@ class AggressiveCompile(cacheDirectory: File)
case Some((an, setup)) => (an, Some(setup))
case None => (Analysis.Empty, None)
}
def javaOnly(f: File) = f.getName.endsWith(".java")
import AnalysisFormats._
// The following intermediate definitions are needed because of Scala's implicit parameter rules.

View File

@ -3,7 +3,7 @@
*/
package sbt
import sbt.compile.AnalyzingCompiler
import sbt.compile.{AnalyzingCompiler, JavaCompiler}
import java.io.File
import System.{currentTimeMillis => now}
import Path._
@ -25,6 +25,7 @@ class AggressiveCompiler extends xsbti.AppMain
val launcher = app.scalaProvider.launcher
val sources = cwd ** ("*.scala" | "*.java")
val target = cwd / "target"
val javaBaseDirs = cwd :: Nil
val outputDirectory = target / "classes"
val classpath = outputDirectory +++ (cwd * "*.jar") +++(cwd * (-"project")).descendentsExcept( "*.jar", "project" || HiddenFileFilter)
val cacheDirectory = target / "cache"
@ -32,11 +33,12 @@ class AggressiveCompiler extends xsbti.AppMain
val log = new ConsoleLogger with Logger with sbt.IvyLogger
val componentManager = new ComponentManager(launcher.globalLock, app.components, log)
val compiler = new AnalyzingCompiler(ScalaInstance(args.head, launcher), componentManager, log)
val javac = JavaCompiler.directOrFork(compiler.cp, compiler.scalaInstance)( (args: Seq[String], log: Logger) => Process("javac", args) ! log )
val agg = new AggressiveCompile(cacheDirectory)
try
{
val analysis = agg(sources.get.toSeq, classpath.get.toSeq, outputDirectory, options, compiler, log)
val analysis = agg(compiler, javac, sources.get.toSeq, classpath.get.toSeq, outputDirectory, javaBaseDirs, options)(log)
processResult(analysis, command)
true
}

View File

@ -12,7 +12,6 @@ abstract class SbtProject(info: ProjectInfo) extends DefaultProject(info) with t
def extraResources = descendents(info.projectPath / "licenses", "*") +++ "LICENSE" +++ "NOTICE"
override def mainResources = super.mainResources +++ extraResources
override def testOptions = ExcludeTests("sbt.ReflectiveSpecification" :: Nil) :: super.testOptions.toList
override def normalizedName = "sbt"
override def managedStyle = ManagedStyle.Ivy
@ -41,10 +40,6 @@ abstract class SbtProject(info: ProjectInfo) extends DefaultProject(info) with t
val ivy = "org.apache.ivy" % "ivy" % "2.1.0" intransitive()
val jsch = "com.jcraft" % "jsch" % "0.1.31" intransitive()
val jetty = "org.mortbay.jetty" % "jetty" % "6.1.14" % "optional"
val jetty7server = "org.eclipse.jetty" % "jetty-server" % "7.0.1.v20091125" % "optional"
val jetty7webapp = "org.eclipse.jetty" % "jetty-webapp" % "7.0.1.v20091125" % "optional"
val testInterface = "org.scala-tools.testing" % "test-interface" % "0.5"
@ -64,20 +59,4 @@ abstract class SbtProject(info: ProjectInfo) extends DefaultProject(info) with t
override def packageSrcJar= defaultJarPath("-sources.jar")
/*val sourceArtifact = Artifact(artifactID, "src", "jar", "sources")
val docsArtifact = Artifact(artifactID, "doc", "jar", "javadoc")*/
/* For generating JettyRun for Jetty 6 and 7. The only difference is the imports, but the file has to be compiled against each set of imports. */
override def compileAction = super.compileAction dependsOn (generateJettyRun6, generateJettyRun7)
def jettySrcDir = mainScalaSourcePath / "sbt" / "jetty"
def jettyTemplate = jettySrcDir / "LazyJettyRun.scala.templ"
lazy val generateJettyRun6 = generateJettyRun(jettyTemplate, jettySrcDir / "LazyJettyRun6.scala", "6", jettySrcDir / "jetty6.imports")
lazy val generateJettyRun7 = generateJettyRun(jettyTemplate, jettySrcDir / "LazyJettyRun7.scala", "7", jettySrcDir / "jetty7.imports")
def generateJettyRun(in: Path, out: Path, version: String, importsPath: Path) =
task
{
(for(template <- FileUtilities.readString(in asFile, log).right; imports <- FileUtilities.readString(importsPath asFile, log).right) yield
FileUtilities.write(out asFile, processJettyTemplate(template, version, imports), log).toLeft(()) ).left.toOption
}
def processJettyTemplate(template: String, version: String, imports: String): String =
template.replaceAll("""\Q${jetty.version}\E""", version).replaceAll("""\Q${jetty.imports}\E""", imports)
}

View File

@ -17,17 +17,20 @@ class XSbt(info: ProjectInfo) extends ParentProject(info) with NoCrossPaths
val collectionSub = testedBase(utilPath / "collection", "Collections")
val ioSub = testedBase(utilPath / "io", "IO", controlSub)
val classpathSub = baseProject(utilPath / "classpath", "Classpath", launchInterfaceSub, ioSub)
val classfileSub = testedBase(utilPath / "classfile", "Classfile", ioSub, interfaceSub)
val completeSub = testedBase(utilPath / "complete", "Completion", ioSub)
val logSub = project(utilPath / "log", "Logging", new LogProject(_), interfaceSub)
val classfileSub = testedBase(utilPath / "classfile", "Classfile", ioSub, interfaceSub, logSub)
val datatypeSub = baseProject(utilPath /"datatype", "Datatype Generator", ioSub)
val processSub = project(utilPath /"process", "Process", new Base(_) with TestWithIO, ioSub, logSub)
val processSub = project(utilPath / "process", "Process", new Base(_) with TestWithIO, ioSub, logSub)
val envSub= baseProject(utilPath / "env", "Properties", ioSub, logSub, classpathSub)
// intermediate-level modules
val ivySub = project("ivy", "Ivy", new IvyProject(_), interfaceSub, launchInterfaceSub, logSub)
val testingSub = project("testing", "Testing", new TestingProject(_), ioSub, classpathSub, logSub)
val taskSub = testedBase(tasksPath, "Tasks", controlSub, collectionSub)
val cacheSub = project(cachePath, "Cache", new CacheProject(_), ioSub, collectionSub)
val webappSub = project("web", "Web App", new WebAppProject(_), ioSub, logSub, classpathSub, controlSub)
val runSub = baseProject("run", "Run", ioSub, logSub, classpathSub, processSub)
// compilation/discovery related modules
val compileInterfaceSub = project(compilePath / "interface", "Compiler Interface", new CompilerInterfaceProject(_), interfaceSub)
@ -35,16 +38,19 @@ class XSbt(info: ProjectInfo) extends ParentProject(info) with NoCrossPaths
val discoverySub = testedBase(compilePath / "discover", "Discovery", compileIncrementalSub, apiSub)
val compilePersistSub = project(compilePath / "persist", "Persist", new PersistProject(_), compileIncrementalSub, apiSub)
val compilerSub = project(compilePath, "Compile", new CompileProject(_),
launchInterfaceSub, interfaceSub, ivySub, ioSub, classpathSub, compileInterfaceSub, logSub, processSub)
launchInterfaceSub, interfaceSub, ivySub, ioSub, classpathSub, compileInterfaceSub, logSub)
val altCompilerSub = baseProject("main", "Alternate Compiler Test", compileIncrementalSub, compilerSub, ioSub, logSub, discoverySub, compilePersistSub)
val altCompilerSub = baseProject("main", "Alternate Compiler Test",
classfileSub, compileIncrementalSub, compilerSub, ioSub, logSub, discoverySub, compilePersistSub, processSub)
/** following are not updated for 2.8 or 0.9 */
/** following modules are not updated for 2.8 or 0.9 */
val testSub = project("scripted", "Test", new TestProject(_), ioSub)
val trackingSub = baseProject(cachePath / "tracking", "Tracking", cacheSub)
val sbtSub = project(sbtPath, "Simple Build Tool", new SbtProject(_) {}, compilerSub, launchInterfaceSub, testingSub, cacheSub, taskSub)
val sbtSub = project(sbtPath, "Simple Build Tool", new SbtProject(_) {},
compilerSub, launchInterfaceSub, testingSub, cacheSub, taskSub)
val installerSub = project(sbtPath / "install", "Installer", new InstallerProject(_) {}, sbtSub)
lazy val dist = task { None } dependsOn(launchSub.proguard, sbtSub.publishLocal, installerSub.publishLocal)
@ -93,6 +99,35 @@ class XSbt(info: ProjectInfo) extends ParentProject(info) with NoCrossPaths
override def deliverProjectDependencies = Nil
}
}
class WebAppProject(info: ProjectInfo) extends Base(info)
{
val jetty = "org.mortbay.jetty" % "jetty" % "6.1.14" % "optional"
val jetty7server = "org.eclipse.jetty" % "jetty-server" % "7.0.1.v20091125" % "optional"
val jetty7webapp = "org.eclipse.jetty" % "jetty-webapp" % "7.0.1.v20091125" % "optional"
val optional = Configurations.Optional
/* For generating JettyRun for Jetty 6 and 7. The only difference is the imports, but the file has to be compiled against each set of imports. */
override def compileAction = super.compileAction dependsOn (generateJettyRun6, generateJettyRun7)
def jettySrcDir = info.projectPath
def jettyTemplate = jettySrcDir / "LazyJettyRun.scala.templ"
lazy val generateJettyRun6 = generateJettyRunN("6")
lazy val generateJettyRun7 = generateJettyRunN("7")
def generateJettyRunN(n: String) =
generateJettyRun(jettyTemplate, jettySrcDir / ("LazyJettyRun" + n + ".scala"), n, jettySrcDir / ("jetty" + n + ".imports"))
def generateJettyRun(in: Path, out: Path, version: String, importsPath: Path) =
task
{
(for(template <- FileUtilities.readString(in asFile, log).right; imports <- FileUtilities.readString(importsPath asFile, log).right) yield
FileUtilities.write(out asFile, processJettyTemplate(template, version, imports), log).toLeft(()) ).left.toOption
}
def processJettyTemplate(template: String, version: String, imports: String): String =
template.replaceAll("""\Q${jetty.version}\E""", version).replaceAll("""\Q${jetty.imports}\E""", imports)
}
trait TestDependencies extends Project
{
val sc = "org.scala-tools.testing" %% "scalacheck" % "1.7" % "test"

View File

@ -3,15 +3,11 @@
*/
package sbt
import scala.tools.nsc.{GenericRunnerCommand, Interpreter, InterpreterLoop, ObjectRunner, Settings}
import scala.tools.nsc.interpreter.InteractiveReader
import scala.tools.nsc.reporters.Reporter
import scala.tools.nsc.util.ClassPath
import java.io.File
import java.net.{URL, URLClassLoader}
import java.lang.reflect.{Method, Modifier}
import Modifier.{isPublic, isStatic}
import classpath.ClasspathUtilities
trait ScalaRun
{
@ -37,7 +33,7 @@ class ForkRun(config: ForkScalaRun) extends ScalaRun
Some("Nonzero exit code returned from " + label + ": " + exitCode)
}
}
class Run(instance: xsbt.ScalaInstance) extends ScalaRun
class Run(instance: ScalaInstance) extends ScalaRun
{
/** Runs the class 'mainClass' using the given classpath and options using the scala runner.*/
def run(mainClass: String, classpath: Iterable[Path], options: Seq[String], log: Logger) =
@ -59,7 +55,7 @@ class Run(instance: xsbt.ScalaInstance) extends ScalaRun
val main = getMainMethod(mainClassName, loader)
invokeMain(loader, main, options)
}
finally { xsbt.FileUtilities.delete(tempDir asFile) }
finally { IO.delete(tempDir asFile) }
}
private def invokeMain(loader: ClassLoader, main: Method, options: Seq[String])
{
@ -85,8 +81,9 @@ object Run
{
def run(mainClass: String, classpath: Iterable[Path], options: Seq[String], log: Logger)(implicit runner: ScalaRun) =
runner.run(mainClass, classpath, options, log)
/** Executes the given function, trapping calls to System.exit. */
private[sbt] def executeTrapExit(f: => Unit, log: Logger): Option[String] =
def executeTrapExit(f: => Unit, log: Logger): Option[String] =
{
val exitCode = TrapExit(f, log)
if(exitCode == 0)
@ -97,69 +94,4 @@ object Run
else
Some("Nonzero exit code: " + exitCode)
}
/** Create a settings object and execute the provided function if the settings are created ok.*/
private def createSettings(log: Logger)(f: Settings => Option[String]) =
{
val command = new GenericRunnerCommand(Nil, message => log.error(message))
if(command.ok)
f(command.settings)
else
Some(command.usageMsg)
}
/** Starts a Scala interpreter session with 'project' bound to the value 'current' in the console
* and the following two lines executed:
* import sbt._
* import current._
*/
def projectConsole(project: Project): Option[String] =
{
import project.log
createSettings(log) { interpreterSettings =>
createSettings(log) { compilerSettings =>
log.info("Starting scala interpreter with project definition " + project.name + " ...")
log.info("")
Control.trapUnit("Error during session: ", log)
{
JLine.withJLine {
val loop = new ProjectInterpreterLoop(compilerSettings, project)
executeTrapExit(loop.main(interpreterSettings), log)
}
}
}}
}
/** A custom InterpreterLoop with the purpose of creating an interpreter with Project 'project' bound to the value 'current',
* and the following three lines interpreted:
* import sbt._
* import Process._
* import current._.
* To do this,
* 1) The compiler uses a different settings instance: 'compilerSettings', which will have its classpath set to include
* the Scala compiler and library jars and the classpath used to compile the project.
* 2) The parent class loader for the interpreter is the loader that loaded the project, so that the project can be bound to a variable
* in the interpreter.
*/
private class ProjectInterpreterLoop(compilerSettings: Settings, project: Project) extends InterpreterLoop
{
override def createInterpreter()
{
val projectLoader = project.getClass.getClassLoader
val classpath = Project.getProjectClasspath(project)
val fullClasspath = classpath.get ++ Path.fromFiles(project.info.app.scalaProvider.jars)
compilerSettings.classpath.value = Path.makeString(fullClasspath)
project.log.debug(" console-project classpath:\n\t" + fullClasspath.mkString("\n\t"))
in = InteractiveReader.createDefault()
interpreter = new Interpreter(settings)
{
override protected def parentClassLoader = projectLoader
override protected def newCompiler(settings: Settings, reporter: Reporter) = super.newCompiler(compilerSettings, reporter)
}
interpreter.setContextClassLoader()
interpreter.bind("current", project.getClass.getName, project)
interpreter.interpret("import sbt._")
interpreter.interpret("import Process._")
interpreter.interpret("import current._")
}
}
}
}

View File

@ -1,29 +1,27 @@
/* sbt -- Simple Build Tool
* Copyright 2009 Mark Harrah
*/
package sbt.impl
import sbt._
package sbt
private[sbt] object SelectMainClass
object SelectMainClass
{
def apply(promptIfMultipleChoices: Boolean, mainClasses: List[String]) =
// Some(SimpleReader.readLine _)
def apply(promptIfMultipleChoices: Option[String => Option[String]], mainClasses: List[String]) =
{
mainClasses match
{
case Nil => None
case head :: Nil => Some(head)
case multiple =>
if(promptIfMultipleChoices)
for(prompt <- promptIfMultipleChoices) yield
{
println("\nMultiple main classes detected, select one to run:\n")
for( (className, index) <- multiple.zipWithIndex )
println(" [" + (index+1) + "] " + className)
val line = trim(SimpleReader.readLine("\nEnter number: "))
val line = trim(prompt("\nEnter number: "))
println("")
toInt(line, multiple.length) map multiple.apply
}
else
None
}
}
private def trim(s: Option[String]) = s.getOrElse("")

View File

@ -106,11 +106,8 @@ object TrapExit
* thread (AWT-XAWT, AWT-Windows, ...)*/
private def allThreads: Set[Thread] =
{
val allThreads = wrap.Wrappers.toList(Thread.getAllStackTraces.keySet)
val threads = new scala.collection.mutable.HashSet[Thread]
for(thread <- allThreads if !isSystemThread(thread))
threads += thread
threads
import collection.JavaConversions._
Thread.getAllStackTraces.keySet.filter(thread => !isSystemThread(thread))
}
/** Returns true if the given thread is in the 'system' thread group and is an AWT thread other than
* AWT-EventQueue or AWT-Shutdown.*/

3
sbt/.gitignore vendored
View File

@ -1,5 +1,2 @@
LazyJettyRun6.scala
LazyJettyRun7.scala
install/project/boot/
scripted/project/boot/
project/plugins/project/

View File

@ -1,228 +0,0 @@
/* sbt -- Simple Build Tool
* Copyright 2008, 2009 Mark Harrah
*/
package sbt
trait TaskAnalysis[Source, Product, External] extends NotNull
{
import scala.collection.Set
def save(): Option[String]
def revert(): Option[String]
def clear(): Unit
def allSources: Set[Source]
def allProducts: Set[Product]
def allExternals: Set[External]
def sourceDependencies(source: Source): Option[Set[Source]]
def products(source: Source): Option[Set[Product]]
def externalDependencies(external: External): Option[Set[Source]]
def addSource(source: Source): Unit
def addExternalDependency(dependsOn: External, source: Source): Unit
def addSourceDependency(dependsOn: Source, source: Source): Unit
def addProduct(source: Source, product: Product): Unit
def removeSource(source: Source): Unit
def removeDependent(source: Source): Unit
def removeDependencies(source: Source): Option[Set[Source]]
def removeExternalDependency(external: External): Unit
}
import java.io.File
import BasicAnalysis._
import impl.MapUtilities.{add, all, read, mark, readOnlyIterable, write}
import scala.collection.mutable.{HashMap, HashSet, ListBuffer, Map, Set}
class BasicAnalysis(analysisPath: Path, projectPath: Path, log: Logger) extends TaskAnalysis[Path, Path, File]
{
private val sourceDependencyMap: Map[Path, Set[Path]] = new HashMap
private val productMap: Map[Path, Set[Path]] = new HashMap
private val externalDependencyMap: Map[File, Set[Path]] = new HashMap
final type AnyMapToSource = Map[K, Set[Path]] forSome {type K}
final type AnySourceMap = Map[Path, T] forSome {type T}
final type AnySourceSetMap = Map[Path, Set[T]] forSome {type T}
final type AnyMap = Map[K, V] forSome { type K; type V }
protected def mapsToClear = List[AnyMap](sourceDependencyMap, productMap, externalDependencyMap)
protected def mapsToRemoveSource = List[AnySourceMap](sourceDependencyMap, productMap)
protected def mapsToRemoveDependent = List[AnyMapToSource](sourceDependencyMap, externalDependencyMap)
protected def mapsToMark = List[AnySourceSetMap](sourceDependencyMap, productMap)
def clear()
{
for(map <- mapsToClear)
map.clear()
}
def removeSource(source: Path)
{
for(sourceProducts <- productMap.get(source))
FileUtilities.clean(sourceProducts, true, log)
for(map <- mapsToRemoveSource)
map -= source
}
def removeSelfDependency(source: Path)
{
for(deps <- sourceDependencyMap.get(source))
deps -= source
}
def removeDependent(source: Path)
{
for(map <- mapsToRemoveDependent; deps <- map.values)
deps -= source
}
def removeDependencies(source: Path) = sourceDependencyMap.removeKey(source)
def removeExternalDependency(dep: File) = externalDependencyMap.removeKey(dep.getAbsoluteFile)
def externalDependencies(external: File) = externalDependencyMap.get(external.getAbsoluteFile)
def sourceDependencies(source: Path) = sourceDependencyMap.get(source)
def products(sources: Iterable[Path]): Iterable[Path] =
{
val buffer = new ListBuffer[Path]
for(source <- sources; sourceProducts <- productMap.get(source))
buffer ++= sourceProducts
buffer.readOnly
}
def products(source: Path) = productMap.get(source)
def allSources = sourceDependencyMap.keySet
def allProducts: Set[Path] = HashSet(flatten(productMap.values.toList) : _*)
def allExternals = externalDependencyMap.keySet
def allExternalDependencies = readOnlyIterable(externalDependencyMap)
def allDependencies = readOnlyIterable(sourceDependencyMap)
def addSourceDependency(on: Path, from: Path) = add(on, from, sourceDependencyMap)
def addExternalDependency(on: File, from: Path) = add(on.getAbsoluteFile, from, externalDependencyMap)
def addProductDependency(on: Path, from: Path) =
{
for( (source, _) <- productMap.find(_._2.contains(on)) )
addSourceDependency(source, from)
}
def addProduct(source: Path, file: Path) = add(source, file, productMap)
def addSource(source: Path) =
{
for(map <- mapsToMark)
mark(source, map)
}
import Format._ // get implicits for data types
implicit val path: Format[Path] = Format.path(projectPath)
implicit val pathSet: Format[Set[Path]] = Format.set
protected def backedMaps: Iterable[Backed[_,_]] =
Backed(sourceDependencyMap, DependenciesLabel, DependenciesFileName) ::
Backed(productMap, GeneratedLabel, GeneratedFileName) ::
Backed(externalDependencyMap, ExternalDependenciesLabel, ExternalDependenciesFileName) ::
Nil
def revert() = load()
private def loadBacked[Key,Value](b: Backed[Key,Value]) = read(b.map, analysisPath / b.name, log)(b.keyFormat, b.valueFormat)
private def storeBacked[Key,Value](b: Backed[Key,Value]) = write(b.map, b.label, analysisPath / b.name, log)(b.keyFormat, b.valueFormat)
final def load(): Option[String] = Control.lazyFold(backedMaps.toList)(backed =>loadBacked(backed))
final def save(): Option[String] = Control.lazyFold(backedMaps.toList)(backed => storeBacked(backed))
}
object BasicAnalysis
{
private def flatten(s: Iterable[Set[Path]]): Seq[Path] = s.flatMap(x => x.toSeq).toSeq
val GeneratedFileName = "generated_files"
val DependenciesFileName = "dependencies"
val ExternalDependenciesFileName = "external"
val GeneratedLabel = "Generated Classes"
val DependenciesLabel = "Source Dependencies"
val ExternalDependenciesLabel = "External Dependencies"
def load(analysisPath: Path, projectPath: Path, log: Logger): Either[String, BasicAnalysis] =
{
val analysis = new BasicAnalysis(analysisPath, projectPath, log)
analysis.load().toLeft(analysis)
}
}
object CompileAnalysis
{
val HashesFileName = "hashes"
val TestsFileName = "tests"
val ApplicationsFileName = "applications"
val ProjectDefinitionsName = "projects"
val HashesLabel = "Source Hashes"
val TestsLabel = "Tests"
val ApplicationsLabel = "Classes with main methods"
val ProjectDefinitionsLabel = "Project Definitions"
def load(analysisPath: Path, projectPath: Path, log: Logger): Either[String, CompileAnalysis] =
{
val analysis = new CompileAnalysis(analysisPath, projectPath, log)
analysis.load().toLeft(analysis)
}
}
import CompileAnalysis._
import Format._ // get implicits for data types
class BasicCompileAnalysis protected (analysisPath: Path, projectPath: Path, log: Logger) extends BasicAnalysis(analysisPath, projectPath, log)
{
/*private */val hashesMap = new HashMap[Path, Array[Byte]]
val apiMap = new HashMap[Path, xsbti.api.Source]
override protected def mapsToClear = apiMap :: hashesMap :: super.mapsToClear
override protected def mapsToRemoveSource = apiMap :: hashesMap :: super.mapsToRemoveSource
def setHash(source: Path, hash: Array[Byte]) { hashesMap(source) = hash }
def clearHash(source: Path) { hashesMap.removeKey(source) }
def hash(source: Path) = hashesMap.get(source)
def clearHashes() { hashesMap.clear() }
def setAPI(source: Path, a: xsbti.api.Source) { apiMap(source) = a }
def getClasses(sources: PathFinder, outputDirectory: Path): PathFinder =
Path.lazyPathFinder
{
val basePath = (outputDirectory ###)
for(c <- products(sources.get)) yield
Path.relativize(basePath, c).getOrElse(c)
}
implicit val stringSet: Format[Set[String]] = Format.set
override protected def backedMaps = Backed(hashesMap, HashesLabel, HashesFileName) :: super.backedMaps.toList
}
private[sbt] final class BuilderCompileAnalysis(analysisPath: Path, projectPath: Path, log: Logger) extends BasicCompileAnalysis(analysisPath, projectPath, log)
{
private val projectDefinitionMap = new HashMap[Path, Set[String]]
override protected def mapsToClear = projectDefinitionMap :: super.mapsToClear
override protected def mapsToRemoveSource = projectDefinitionMap :: super.mapsToRemoveSource
def allProjects = all(projectDefinitionMap)
def addProjectDefinition(source: Path, className: String) = add(source, className, projectDefinitionMap)
override protected def backedMaps =
Backed(projectDefinitionMap, ProjectDefinitionsLabel, ProjectDefinitionsName) ::
super.backedMaps
}
class CompileAnalysis(analysisPath: Path, projectPath: Path, log: Logger) extends BasicCompileAnalysis(analysisPath, projectPath, log)
{
private val testMap = new HashMap[Path, Set[Discovered]]
private val applicationsMap = new HashMap[Path, Set[String]]
def allTests = all(testMap)
def allApplications = all(applicationsMap)
def addTest(source: Path, test: Discovered) = add(source, test, testMap)
def addApplication(source: Path, className: String) = add(source, className, applicationsMap)
def testSourceMap: Map[String, Path] =
{
val map = new HashMap[String, Path]
for( (source, tests) <- testMap; test <- tests) map(test.className) = source
map
}
override protected def mapsToClear = applicationsMap :: testMap :: super.mapsToClear
override protected def mapsToRemoveSource = applicationsMap :: testMap :: super.mapsToRemoveSource
implicit val testSet: Format[Set[Discovered]] = Format.set
override protected def backedMaps =
Backed(testMap, TestsLabel, TestsFileName) ::
Backed(applicationsMap, ApplicationsLabel, ApplicationsFileName) ::
super.backedMaps
}
/** A map that is persisted in a properties file named 'name' and with 'label'. 'keyFormat' and 'valueFormat' are used to (de)serialize. */
final case class Backed[Key, Value](map: Map[Key, Value], label: String, name: String)(implicit val keyFormat: Format[Key], val valueFormat: Format[Value]) extends NotNull

View File

@ -1,101 +0,0 @@
/* sbt -- Simple Build Tool
* Copyright 2008, 2009 Mark Harrah
*/
package sbt
import java.io.File
object AnalysisCallback
{
private val map = new scala.collection.mutable.HashMap[Int, AnalysisCallback]
private var nextID: Int = 0
def register(callback: AnalysisCallback): Int =
{
val id = nextID
nextID += 1
map(id) = callback
id
}
def apply(id: Int): Option[AnalysisCallback] = map.get(id)
def unregister(id: Int)
{
map -= id
}
}
trait AnalysisCallback extends NotNull
{
/** The names of classes that the analyzer should find subclasses of.*/
def superclassNames: Iterable[String]
/** The names of annotations that the analyzer should look for on classes and methods. */
def annotationNames: Iterable[String]
/** The base path for the project.*/
def basePath: Path
/** Called when the the given superclass could not be found on the classpath by the compiler.*/
def superclassNotFound(superclassName: String): Unit
/** Called before the source at the given location is processed. */
def beginSource(sourcePath: Path): Unit
/** Called when the a subclass of one of the classes given in <code>superclassNames</code> is
* discovered.*/
def foundSubclass(sourcePath: Path, subclassName: String, superclassName: String, isModule: Boolean): Unit
/** Called when a class or one of its methods has an annotation listed in <code>annotationNames</code>*/
def foundAnnotated(source: Path, className: String, annotationName: String, isModule: Boolean): Unit
/** Called to indicate that the source file <code>sourcePath</code> depends on the source file
* <code>dependsOnPath</code>.*/
def sourceDependency(dependsOnPath: Path, sourcePath: Path): Unit
/** Called to indicate that the source file <code>sourcePath</code> depends on the jar
* <code>jarPath</code>.*/
def jarDependency(jarPath: File, sourcePath: Path): Unit
/** Called to indicate that the source file <code>sourcePath</code> depends on the class file
* <code>classFile</code>.*/
def classDependency(classFile: File, sourcePath: Path): Unit
/** Called to indicate that the source file <code>sourcePath</code> depends on the class file
* <code>classFile</code> that is a product of some source. This differs from classDependency
* because it is really a sourceDependency. The source corresponding to <code>classFile</code>
* was not incuded in the compilation so the plugin doesn't know what the source is though. It
* only knows that the class file came from the output directory.*/
def productDependency(classFile: Path, sourcePath: Path): Unit
/** Called to indicate that the source file <code>sourcePath</code> produces a class file at
* <code>modulePath</code>.*/
def generatedClass(sourcePath: Path, modulePath: Path): Unit
/** Called after the source at the given location has been processed. */
def endSource(sourcePath: Path): Unit
/** Called when a module with a public 'main' method with the right signature is found.*/
def foundApplication(sourcePath: Path, className: String): Unit
def api(sourcePath: Path, source: xsbti.api.Source): Unit
}
abstract class BasicAnalysisCallback[A <: BasicCompileAnalysis](val basePath: Path, protected val analysis: A) extends AnalysisCallback
{
def superclassNames: Iterable[String]
def superclassNotFound(superclassName: String) {}
def beginSource(sourcePath: Path): Unit =
analysis.addSource(sourcePath)
def sourceDependency(dependsOnPath: Path, sourcePath: Path): Unit =
analysis.addSourceDependency(dependsOnPath, sourcePath)
def jarDependency(jarFile: File, sourcePath: Path): Unit =
analysis.addExternalDependency(jarFile, sourcePath)
def classDependency(classFile: File, sourcePath: Path): Unit =
analysis.addExternalDependency(classFile, sourcePath)
def productDependency(classFile: Path, sourcePath: Path): Unit =
analysis.addProductDependency(classFile, sourcePath)
def generatedClass(sourcePath: Path, modulePath: Path): Unit =
analysis.addProduct(sourcePath, modulePath)
def endSource(sourcePath: Path): Unit =
analysis.removeSelfDependency(sourcePath)
def api(sourcePath: Path, source: xsbti.api.Source): Unit =
analysis.setAPI(sourcePath, source)
}
abstract class BasicCompileAnalysisCallback(basePath: Path, analysis: CompileAnalysis)
extends BasicAnalysisCallback(basePath, analysis)
{
def foundApplication(sourcePath: Path, className: String): Unit =
analysis.addApplication(sourcePath, className)
}

View File

@ -1,96 +0,0 @@
/* sbt -- Simple Build Tool
* Copyright 2008, 2009 Mark Harrah, Seth Tisue
*/
package sbt
import java.io.File
import xsbt.{AnalyzingCompiler, CompileFailed, CompilerArguments, ScalaInstance}
sealed abstract class CompilerCore
{
final def apply(label: String, sources: Iterable[Path], classpath: Iterable[Path], outputDirectory: Path, scalaOptions: Seq[String], log: Logger): Option[String] =
apply(label, sources, classpath, outputDirectory, scalaOptions, Nil, CompileOrder.Mixed, log)
final def apply(label: String, sources: Iterable[Path], classpath: Iterable[Path], outputDirectory: Path, scalaOptions: Seq[String], javaOptions: Seq[String], order: CompileOrder.Value, log: Logger): Option[String] =
{
def filteredSources(extension: String) = sources.filter(_.name.endsWith(extension))
def process(label: String, sources: Iterable[_], act: => Unit) =
() => if(sources.isEmpty) log.debug("No " + label + " sources.") else act
val javaSources = Path.getFiles(filteredSources(".java"))
val scalaSources = Path.getFiles( if(order == CompileOrder.Mixed) sources else filteredSources(".scala") )
val classpathSet = Path.getFiles(classpath)
val scalaCompile = process("Scala", scalaSources, processScala(scalaSources, classpathSet, outputDirectory.asFile, scalaOptions, log) )
val javaCompile = process("Java", javaSources, processJava(javaSources, classpathSet, outputDirectory.asFile, javaOptions, log))
doCompile(label, sources, outputDirectory, order, log)(javaCompile, scalaCompile)
}
protected def doCompile(label: String, sources: Iterable[Path], outputDirectory: Path, order: CompileOrder.Value, log: Logger)(javaCompile: () => Unit, scalaCompile: () => Unit) =
{
log.info(actionStartMessage(label))
if(sources.isEmpty)
{
log.info(actionNothingToDoMessage)
None
}
else
{
FileUtilities.createDirectory(outputDirectory.asFile, log) orElse
(try
{
val (first, second) = if(order == CompileOrder.JavaThenScala) (javaCompile, scalaCompile) else (scalaCompile, javaCompile)
first()
second()
log.info(actionSuccessfulMessage)
None
}
catch { case e: xsbti.CompileFailed => Some(e.toString) })
}
}
def actionStartMessage(label: String): String
def actionNothingToDoMessage: String
def actionSuccessfulMessage: String
protected def processScala(sources: Set[File], classpath: Set[File], outputDirectory: File, options: Seq[String], log: Logger): Unit
protected def processJava(sources: Set[File], classpath: Set[File], outputDirectory: File, options: Seq[String], log: Logger): Unit
}
sealed abstract class CompilerBase extends CompilerCore
{
def actionStartMessage(label: String) = "Compiling " + label + " sources..."
val actionNothingToDoMessage = "Nothing to compile."
val actionSuccessfulMessage = "Compilation successful."
}
// The following code is based on scala.tools.nsc.Main and scala.tools.nsc.ScalaDoc
// Copyright 2005-2008 LAMP/EPFL
// Original author: Martin Odersky
final class Compile(maximumErrors: Int, compiler: AnalyzingCompiler, analysisCallback: AnalysisCallback, baseDirectory: Path) extends CompilerBase with WithArgumentFile
{
protected def processScala(sources: Set[File], classpath: Set[File], outputDirectory: File, options: Seq[String], log: Logger)
{
val callbackInterface = new AnalysisInterface(analysisCallback, baseDirectory, outputDirectory)
compiler(Set() ++ sources, Set() ++ classpath, outputDirectory, options, callbackInterface, maximumErrors, log)
}
}
final class Scaladoc(maximumErrors: Int, compiler: AnalyzingCompiler) extends CompilerCore
{
protected def processScala(sources: Set[File], classpath: Set[File], outputDirectory: File, options: Seq[String], log: Logger): Unit =
compiler.doc(sources, classpath, outputDirectory, options, maximumErrors, log)
protected def processJava(sources: Set[File], classpath: Set[File], outputDirectory: File, options: Seq[String], log: Logger) = ()
def actionStartMessage(label: String) = "Generating API documentation for " + label + " sources..."
val actionNothingToDoMessage = "No sources specified."
val actionSuccessfulMessage = "API documentation generation successful."
def actionUnsuccessfulMessage = "API documentation generation unsuccessful."
}
final class Console(compiler: AnalyzingCompiler) extends NotNull
{
/** Starts an interactive scala interpreter session with the given classpath.*/
def apply(classpath: Iterable[Path], log: Logger): Option[String] =
apply(classpath, Nil, "", log)
def apply(classpath: Iterable[Path], options: Seq[String], initialCommands: String, log: Logger): Option[String] =
{
def console0 = compiler.console(Path.getFiles(classpath), options, initialCommands, log)
JLine.withJLine( Run.executeTrapExit(console0, log) )
}
}

View File

@ -1,394 +0,0 @@
/* sbt -- Simple Build Tool
* Copyright 2008, 2009 Mark Harrah
*/
package sbt
import xsbt.AnalyzingCompiler
trait Conditional[Source, Product, External] extends NotNull
{
type AnalysisType <: TaskAnalysis[Source, Product, External]
val analysis: AnalysisType = loadAnalysis
protected def loadAnalysis: AnalysisType
protected def log: Logger
protected def productType: String
protected def productTypePlural: String
protected def sourcesToProcess: Iterable[Source]
protected def sourceExists(source: Source): Boolean
protected def sourceLastModified(source: Source): Long
protected def productExists(product: Product): Boolean
protected def productLastModified(product: Product): Long
protected def externalInfo(externals: Iterable[External]): Iterable[(External, ExternalInfo)]
protected def execute(cAnalysis: ConditionalAnalysis): Option[String]
final case class ExternalInfo(available: Boolean, lastModified: Long) extends NotNull
trait ConditionalAnalysis extends NotNull
{
def dirtySources: Iterable[Source]
def cleanSources: Iterable[Source]
def directlyModifiedSourcesCount: Int
def invalidatedSourcesCount: Int
def removedSourcesCount: Int
}
final def run =
{
val result = execute(analyze)
processingComplete(result.isEmpty)
result
}
private def analyze =
{
import scala.collection.mutable.HashSet
val sourcesSnapshot = sourcesToProcess
val removedSources = new HashSet[Source]
removedSources ++= analysis.allSources
removedSources --= sourcesSnapshot
val removedCount = removedSources.size
for(removed <- removedSources)
{
log.debug("Source " + removed + " removed.")
analysis.removeDependent(removed)
}
val unmodified = new HashSet[Source]
val modified = new HashSet[Source]
for(source <- sourcesSnapshot)
{
if(isSourceModified(source))
{
log.debug("Source " + source + " directly modified.")
modified += source
}
else
{
log.debug("Source " + source + " unmodified.")
unmodified += source
}
}
val directlyModifiedCount = modified.size
for((external, info) <- externalInfo(analysis.allExternals))
{
val dependentSources = analysis.externalDependencies(external).getOrElse(Set.empty)
if(info.available)
{
val dependencyLastModified = info.lastModified
for(dependentSource <- dependentSources; dependentProducts <- analysis.products(dependentSource))
{
dependentProducts.find(p => productLastModified(p) < dependencyLastModified) match
{
case Some(modifiedProduct) =>
{
log.debug(productType + " " + modifiedProduct + " older than external dependency " + external)
unmodified -= dependentSource
modified += dependentSource
}
case None => ()
}
}
}
else
{
log.debug("External dependency " + external + " not found.")
unmodified --= dependentSources
modified ++= dependentSources
analysis.removeExternalDependency(external)
}
}
val handled = new scala.collection.mutable.HashSet[Source]
val transitive = !java.lang.Boolean.getBoolean("sbt.intransitive")
def markModified(changed: Iterable[Source]) { for(c <- changed if !handled.contains(c)) markSourceModified(c) }
def markSourceModified(src: Source)
{
unmodified -= src
modified += src
handled += src
if(transitive)
markDependenciesModified(src)
}
def markDependenciesModified(src: Source) { analysis.removeDependencies(src).map(markModified) }
markModified(modified.toList)
if(transitive)
removedSources.foreach(markDependenciesModified)
for(changed <- removedSources ++ modified)
analysis.removeSource(changed)
new ConditionalAnalysis
{
def dirtySources = wrap.Wrappers.readOnly(modified)
def cleanSources = wrap.Wrappers.readOnly(unmodified)
def directlyModifiedSourcesCount = directlyModifiedCount
def invalidatedSourcesCount = dirtySources.size - directlyModifiedCount
def removedSourcesCount = removedCount
override def toString =
{
" Source analysis: " + directlyModifiedSourcesCount + " new/modified, " +
invalidatedSourcesCount + " indirectly invalidated, " +
removedSourcesCount + " removed."
}
}
}
protected def checkLastModified = true
protected def noProductsImpliesModified = true
protected def isSourceModified(source: Source) =
{
analysis.products(source) match
{
case None =>
{
log.debug("New file " + source)
true
}
case Some(sourceProducts) =>
{
val sourceModificationTime = sourceLastModified(source)
def isOutofdate(p: Product) =
!productExists(p) || (checkLastModified && productLastModified(p) < sourceModificationTime)
sourceProducts.find(isOutofdate) match
{
case Some(modifiedProduct) =>
log.debug("Outdated " + productType + ": " + modifiedProduct + " for source " + source)
true
case None =>
if(noProductsImpliesModified && sourceProducts.isEmpty)
{
// necessary for change detection that depends on last modified
log.debug("Source " + source + " has no products, marking it modified.")
true
}
else
false
}
}
}
}
protected def processingComplete(success: Boolean)
{
if(success)
{
analysis.save()
log.info(" Post-analysis: " + analysis.allProducts.toSeq.length + " " + productTypePlural + ".")
}
else
analysis.revert()
}
}
abstract class AbstractCompileConfiguration extends NotNull
{
def label: String
def sourceRoots: PathFinder
def sources: PathFinder
def outputDirectory: Path
def classpath: PathFinder
def analysisPath: Path
def projectPath: Path
def log: Logger
def options: Seq[String]
def javaOptions: Seq[String]
def maxErrors: Int
def compileOrder: CompileOrder.Value
}
abstract class CompileConfiguration extends AbstractCompileConfiguration
{
def fingerprints: Fingerprints
}
final case class Fingerprints(superclassNames: Iterable[String], annotationNames: Iterable[String]) extends NotNull
import java.io.File
class CompileConditional(override val config: CompileConfiguration, compiler: AnalyzingCompiler) extends AbstractCompileConditional(config, compiler)
{
import config._
type AnalysisType = CompileAnalysis
protected def constructAnalysis(analysisPath: Path, projectPath: Path, log: Logger) =
new CompileAnalysis(analysisPath, projectPath, log)
protected def analysisCallback = new CompileAnalysisCallback
protected class CompileAnalysisCallback extends BasicCompileAnalysisCallback(projectPath, analysis)
{
private[this] val fingerprints0 = fingerprints
def superclassNames = fingerprints0.superclassNames
def annotationNames = fingerprints0.annotationNames
def foundSubclass(sourcePath: Path, subclassName: String, superclassName: String, isModule: Boolean): Unit =
analysis.addTest(sourcePath, DiscoveredSubclass(isModule, subclassName, superclassName))
def foundAnnotated(sourcePath: Path, className: String, annotationName: String, isModule: Boolean): Unit =
analysis.addTest(sourcePath, DiscoveredAnnotated(isModule, className, annotationName))
}
}
abstract class AbstractCompileConditional(val config: AbstractCompileConfiguration, val compiler: AnalyzingCompiler) extends Conditional[Path, Path, File]
{
import config._
type AnalysisType <: BasicCompileAnalysis
protected def loadAnalysis =
{
val a = constructAnalysis(analysisPath, projectPath, log)
for(errorMessage <- a.load())
error(errorMessage)
a
}
protected def constructAnalysis(analysisPath: Path, projectPath: Path, log: Logger): AnalysisType
protected def log = config.log
protected def productType = "class"
protected def productTypePlural = "classes"
protected def sourcesToProcess = sources.get
protected def sourceExists(source: Path) = source.asFile.exists
protected def sourceLastModified(source: Path) = source.asFile.lastModified
protected def productExists(product: Path) = product.asFile.exists
protected def productLastModified(product: Path) = product.asFile.lastModified
private def libraryJar = compiler.scalaInstance.libraryJar
private def compilerJar = compiler.scalaInstance.compilerJar
protected def externalInfo(externals: Iterable[File]) =
{
val (classpathJars, classpathDirs) = ClasspathUtilities.buildSearchPaths(classpath.get ++ Seq(Path.fromFile(libraryJar), Path.fromFile(compilerJar)))
for(external <- externals) yield
{
val available = external.exists && (external == libraryJar || ClasspathUtilities.onClasspath(classpathJars, classpathDirs, external) )
if(!available)
log.debug("External " + external + (if(external.exists) " not on classpath." else " does not exist."))
(external, ExternalInfo(available, external.lastModified))
}
}
import ChangeDetection.{LastModifiedOnly, HashOnly, HashAndLastModified, HashAndProductsExist}
protected def changeDetectionMethod: ChangeDetection.Value = HashAndProductsExist
override protected def checkLastModified = changeDetectionMethod != HashAndProductsExist
override protected def noProductsImpliesModified = changeDetectionMethod == LastModifiedOnly
override protected def isSourceModified(source: Path) =
changeDetectionMethod match
{
case HashAndLastModified | HashAndProductsExist =>
// behavior will differ because of checkLastModified
// hash modified must come first so that the latest hash is calculated for every source
hashModified(source) || super.isSourceModified(source)
case HashOnly => hashModified(source)
case LastModifiedOnly => super.isSourceModified(source)
}
import scala.collection.mutable.{Buffer, ListBuffer}
private val newHashes: Buffer[(Path, Option[Array[Byte]])] = new ListBuffer
private def warnHashError(source: Path, message: String)
{
log.warn("Error computing hash for source " + source + ": " + message)
newHashes += ((source, None))
}
protected def hashModified(source: Path) =
{
source.isDirectory ||
(analysis.hash(source) match
{
case None =>
log.debug("Source " + source + " had no hash, marking modified.")
Hash(source, log).fold(err => warnHashError(source, err), newHash => newHashes += ((source, Some(newHash))))
true
case Some(oldHash) =>
{
Hash(source, log) match
{
case Left(err) =>
warnHashError(source, err)
log.debug("Assuming source is modified because of error.")
true
case Right(newHash) =>
newHashes += ((source, Some(newHash)))
val different = !(oldHash deepEquals newHash)
if(different)
log.debug("Hash for source " + source + " changed (was " + Hash.toHex(oldHash) +
", is now " + Hash.toHex(newHash) + "), marking modified.")
different
}
}
})
}
private def scalaJars: Iterable[Path] =
{
val instance = compiler.scalaInstance
Seq(instance.libraryJar, instance.compilerJar).map(Path.fromFile)
}
protected def execute(executeAnalysis: ConditionalAnalysis) =
{
log.info(executeAnalysis.toString)
finishHashes()
import executeAnalysis.dirtySources
// the output directory won't show up in the classpath unless it exists, so do this before classpath.get
val outputDir = outputDirectory.asFile
FileUtilities.createDirectory(outputDir, log)
val cp = classpath.get
if(!dirtySources.isEmpty)
checkClasspath(cp)
def run =
{
val compile = new Compile(config.maxErrors, compiler, analysisCallback, projectPath)
compile(label, dirtySources, cp, outputDirectory, options, javaOptions, compileOrder, log)
}
val loader = ClasspathUtilities.toLoader(cp ++ scalaJars)
val r = classfile.Analyze(projectPath, outputDirectory, dirtySources, sourceRoots.get, log)(analysis.allProducts, analysisCallback, loader)(run)
if(log.atLevel(Level.Debug))
{
/** This checks that the plugin accounted for all classes in the output directory.*/
val classes = scala.collection.mutable.HashSet(analysis.allProducts.toSeq: _*)
val actualClasses = (outputDirectory ** GlobFilter("*.class")).get
val missedClasses = actualClasses.toList.remove(classes.contains)
missedClasses.foreach(c => log.debug("Missed class: " + c))
log.debug("Total missed classes: " + missedClasses.length)
}
r
}
private def finishHashes()
{
if(changeDetectionMethod == LastModifiedOnly)
analysis.clearHashes()
else
{
for((path, hash) <- newHashes)
{
hash match
{
case None => analysis.clearHash(path)
case Some(hash) => analysis.setHash(path, hash)
}
}
}
newHashes.clear()
}
private def checkClasspath(cp: Iterable[Path])
{
import scala.collection.mutable.{HashMap, HashSet, Set}
val collisions = new HashMap[String, Set[Path]]
for(jar <- cp if ClasspathUtilities.isArchive(jar))
collisions.getOrElseUpdate(jar.asFile.getName, new HashSet[Path]) += jar
for((name, jars) <- collisions)
{
if(jars.size > 1)
{
log.debug("Possible duplicate classpath locations for jar " + name + ": ")
for(jar <- jars) log.debug("\t" + jar.absolutePath)
}
}
}
protected def analysisCallback: AnalysisCallback
}
object ChangeDetection extends Enumeration
{
val LastModifiedOnly, HashOnly, HashAndLastModified, HashAndProductsExist = Value
}

View File

@ -12,11 +12,7 @@ object Credentials
def add(realm: String, host: String, userName: String, passwd: String): Unit =
CredentialsStore.INSTANCE.addCredentials(realm, host, userName, passwd)
/** Load credentials from the given file into Ivy's credentials cache.*/
def apply(file: String, log: Logger): Unit = apply(Path.fromFile(file), log)
/** Load credentials from the given file into Ivy's credentials cache.*/
def apply(file: File, log: Logger): Unit = apply(Path.fromFile(file), log)
/** Load credentials from the given file into Ivy's credentials cache.*/
def apply(path: Path, log: Logger)
def apply(path: File, log: Logger)
{
val msg =
if(path.exists)
@ -24,7 +20,7 @@ object Credentials
val properties = new scala.collection.mutable.HashMap[String, String]
def get(keys: List[String]) = keys.flatMap(properties.get).firstOption.toRight(keys.head + " not specified in credentials file: " + path)
impl.MapUtilities.read(properties, path, log) orElse
MapIO.read(properties, path, log) orElse
{
List.separate( List(RealmKeys, HostKeys, UserKeys, PasswordKeys).map(get) ) match
{

View File

@ -0,0 +1,38 @@
/* sbt -- Simple Build Tool
* Copyright 2008, 2009 Mark Harrah
*/
package sbt
import java.io.File
import xsbt.{AnalyzingCompiler, CompileFailed}
final class Scaladoc(maximumErrors: Int, compiler: AnalyzingCompiler)
{
final def apply(label: String, sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String])(implicit log: Logger)
{
log.info(actionStartMessage(label))
if(sources.isEmpty)
log.info(actionNothingToDoMessage)
else
{
IO.createDirectory(outputDirectory)
compiler.doc(sources, classpath, outputDirectory, options, maximumErrors, log)
log.info(actionSuccessfulMessage)
}
}
def actionStartMessage(label: String) = "Generating API documentation for " + label + " sources..."
val actionNothingToDoMessage = "No sources specified."
val actionSuccessfulMessage = "API documentation generation successful."
def actionUnsuccessfulMessage = "API documentation generation unsuccessful."
}
final class Console(compiler: AnalyzingCompiler)
{
/** Starts an interactive scala interpreter session with the given classpath.*/
def apply(classpath: Seq[File])(implicit log: Logger): Option[String] =
apply(classpath, Nil, "", log)
def apply(classpath: Iterable[File], options: Seq[String], initialCommands: String)(implicit log: Logger): Option[String] =
{
def console0 = compiler.console(Path.getFiles(classpath), options, initialCommands, log)
JLine.withJLine( Run.executeTrapExit(console0, log) )
}
}

View File

@ -1,43 +0,0 @@
/* sbt -- Simple Build Tool
* Copyright 2009 Mark Harrah
*/
package sbt
/** Defines a function to call as sbt exits.*/
trait ExitHook extends NotNull
{
/** Provides a name for this hook to be used to provide feedback to the user. */
def name: String
/** Subclasses should implement this method, which is called when this hook is executed. */
def runBeforeExiting(): Unit
}
object ExitHooks
{
/** This is a list of hooks to call when sbt is finished executing.*/
private val exitHooks = new scala.collection.mutable.HashSet[ExitHook]
/** Adds a hook to call before sbt exits. */
private[sbt] def register(hook: ExitHook) { exitHooks += hook }
/** Removes a hook. */
private[sbt] def unregister(hook: ExitHook) { exitHooks -= hook }
/** Calls each registered exit hook, trapping any exceptions so that each hook is given a chance to run. */
private[sbt] def runExitHooks(log: Logger)
{
for(hook <- exitHooks.toList)
{
try
{
log.debug("Running exit hook '" + hook.name + "'...")
hook.runBeforeExiting()
}
catch
{
case e =>
{
log.trace(e);
log.error("Error running exit hook '" + hook.name + "': " + e.toString)
}
}
}
}
}

View File

@ -1,126 +0,0 @@
/* sbt -- Simple Build Tool
* Copyright 2008, 2009, 2010 Mark Harrah
*/
package sbt
import scala.collection.mutable.{Buffer, HashMap, ListBuffer}
/** A logger that can buffer the logging done on it by currently executing Thread and
* then can flush the buffer to the delegate logger provided in the constructor. Use
* 'startRecording' to start buffering and then 'play' from to flush the buffer for the
* current Thread to the backing logger. The logging level set at the
* time a message is originally logged is used, not the level at the time 'play' is
* called.
*
* This class assumes that it is the only client of the delegate logger.
*
* This logger is thread-safe.
* */
final class BufferedLogger(delegate: Logger) extends AbstractLogger
{
override lazy val ansiCodesSupported = delegate.ansiCodesSupported
private[this] val buffers = wrap.Wrappers.weakMap[Thread, Buffer[LogEvent]]
private[this] var recordingAll = false
private[this] def getOrCreateBuffer = buffers.getOrElseUpdate(key, createBuffer)
private[this] def buffer = if(recordingAll) Some(getOrCreateBuffer) else buffers.get(key)
private[this] def createBuffer = new ListBuffer[LogEvent]
private[this] def key = Thread.currentThread
@deprecated def startRecording() = recordAll()
/** Enables buffering for logging coming from the current Thread. */
def record(): Unit = synchronized { buffers(key) = createBuffer }
/** Enables buffering for logging coming from all Threads. */
def recordAll(): Unit = synchronized{ recordingAll = true }
def buffer[T](f: => T): T =
{
record()
try { f }
finally { Control.trap(stop()) }
}
def bufferAll[T](f: => T): T =
{
recordAll()
try { f }
finally { Control.trap(stopAll()) }
}
/** Flushes the buffer to the delegate logger for the current thread. This method calls logAll on the delegate
* so that the messages are written consecutively. The buffer is cleared in the process. */
def play(): Unit =
synchronized
{
for(buffer <- buffers.get(key))
delegate.logAll(wrap.Wrappers.readOnly(buffer))
}
def playAll(): Unit =
synchronized
{
for(buffer <- buffers.values)
delegate.logAll(wrap.Wrappers.readOnly(buffer))
}
/** Clears buffered events for the current thread and disables buffering. */
def clear(): Unit = synchronized { buffers -= key }
/** Clears buffered events for all threads and disables all buffering. */
def clearAll(): Unit = synchronized { buffers.clear(); recordingAll = false }
/** Plays buffered events for the current thread and disables buffering. */
def stop(): Unit =
synchronized
{
play()
clear()
}
def stopAll(): Unit =
synchronized
{
playAll()
clearAll()
}
def setLevel(newLevel: Level.Value): Unit =
synchronized
{
buffer.foreach{_ += new SetLevel(newLevel) }
delegate.setLevel(newLevel)
}
def getLevel = synchronized { delegate.getLevel }
def getTrace = synchronized { delegate.getTrace }
def setTrace(level: Int): Unit =
synchronized
{
buffer.foreach{_ += new SetTrace(level) }
delegate.setTrace(level)
}
def trace(t: => Throwable): Unit =
doBufferableIf(traceEnabled, new Trace(t), _.trace(t))
def success(message: => String): Unit =
doBufferable(Level.Info, new Success(message), _.success(message))
def log(level: Level.Value, message: => String): Unit =
doBufferable(level, new Log(level, message), _.log(level, message))
def logAll(events: Seq[LogEvent]): Unit =
synchronized
{
buffer match
{
case Some(b) => b ++= events
case None => delegate.logAll(events)
}
}
def control(event: ControlEvent.Value, message: => String): Unit =
doBufferable(Level.Info, new ControlEvent(event, message), _.control(event, message))
private def doBufferable(level: Level.Value, appendIfBuffered: => LogEvent, doUnbuffered: AbstractLogger => Unit): Unit =
doBufferableIf(atLevel(level), appendIfBuffered, doUnbuffered)
private def doBufferableIf(condition: => Boolean, appendIfBuffered: => LogEvent, doUnbuffered: AbstractLogger => Unit): Unit =
synchronized
{
if(condition)
{
buffer match
{
case Some(b) => b += appendIfBuffered
case None => doUnbuffered(delegate)
}
}
}
}

View File

@ -1,494 +0,0 @@
/* sbt -- Simple Build Tool
* Copyright 2009 Mark Harrah
*/
package sbt
/** This file provides the parallel execution engine of sbt. It is a fairly general module, with pluggable Schedulers and Strategies.
*
* There are three main componenets to the engine: Distributors, Schedulers, and Strategies.
*
* A Scheduler provides work that is ready to execute. The main type of Scheduler in sbt is a scheduler
* of nodes in a directed, acyclic graph.. This type of scheduler provides work when its
* dependencies have finished executing successfully. Another type of scheduler is a MultiScheduler, which draws work
* from sub-schedulers.
*
* A Strategy is used by a Scheduler to select the work to process from the work that is ready. It is notified as work
* becomes ready. It is requested to select work to process from the work that is ready. The main Strategy in sbt is the
* OrderedStrategy, which prioritizes work according to some ordering defined by its constructor. The primary ordering
* used in sbt is based on the longest length of the processing path that includes the node being ordered.
*
* A Distributor uses a Scheduler to obtain work according up to the maximum work allowed to run at once. It runs each
* unit of work in its own Thread.
**/
import java.util.concurrent.LinkedBlockingQueue
import scala.collection.{immutable, mutable}
import immutable.TreeSet
/** Interface to the Distributor/Scheduler system for running tasks with dependencies described by a directed acyclic graph.*/
object ParallelRunner
{
/** Executes work for nodes in an acyclic directed graph with root node `node`. The name of a node is provided
* by the `name` function, the work to perform for a node by `action`, and the logger to use for a node by `log`.
* The maximum number of tasks to execute simultaneously is `maximumTasks`. */
def run[D <: Dag[D]](node: D, name: D => String, action: D => Option[String], maximumTasks: Int, log: D => Logger): List[WorkFailure[D]] =
{
val info = DagInfo(node)
// Create a strategy that gives each node a uniform self cost and uses the maximum cost to execute it and the nodes that depend on it
// to determine which node to run. The self cost could be modified to include more information about a node, such as the size of input files
val strategy = defaultStrategy(info)
val jobScheduler = CompoundScheduler(new DagScheduler(info, strategy), strategy)
val distributor = new Distributor(jobScheduler, action, maximumTasks, log)
val result = distributor.run().toList
for( WorkFailure(work, message) <- result ) yield WorkFailure(work, "Error running " + name(work) + ": " + message)
}
def dagScheduler[D <: Dag[D]](node: D) =
{
val info = DagInfo(node)
new DagScheduler(info, defaultStrategy(info))
}
private def defaultStrategy[D <: Dag[D]](info: DagInfo[D]) = MaxPathStrategy((d: D) => 1, info)
def emptyScheduler[D]: Scheduler[D] =
new Scheduler[D]
{
/** Starts a new run. The returned object is a new Run, representing a single scheduler run. All state for the run
* is encapsulated in this object.*/
def run: Run = new Run
{
def complete(d: D, result: Option[String]) {}
def hasPending = false
/**Returns true if this scheduler has no more work to be done, ever.*/
def isComplete = true
def next(max: Int) = Nil
def failures = Nil
}
}
}
/** Requests work from `scheduler` and processes it using `doWork`. This class limits the amount of work processing at any given time
* to `workers`.*/
final class Distributor[D](scheduler: Scheduler[D], doWork: D => Option[String], workers: Int, log: D => Logger) extends NotNull
{
require(workers > 0)
final def run(): Iterable[WorkFailure[D]] = (new Run).run()
private final class Run extends NotNull
{
private[this] val schedule = scheduler.run
/** The number of threads currently running. */
private[this] var running = 0
/** Pending notifications of completed work. */
private[this] val complete = new java.util.concurrent.LinkedBlockingQueue[Done]
private[Distributor] def run(): Iterable[WorkFailure[D]] =
{
next()
if(isIdle && !schedule.hasPending) // test if all work is complete
schedule.failures
else
{
waitForCompletedWork() // wait for some work to complete
run() // continue
}
}
// true if the maximum number of worker threads are currently running
private def atMaximum = running == workers
private def availableWorkers = workers - running
// true if no worker threads are currently running
private def isIdle = running == 0
// process more work
private def next()
{
// if the maximum threads are being used, do nothing
// if all work is complete or the scheduler is waiting for current work to complete, do nothing
if(!atMaximum && schedule.hasPending)
{
val nextWork = schedule.next(availableWorkers)
val nextSize = nextWork.size
assume(nextSize <= availableWorkers, "Scheduler provided more work (" + nextSize + ") than allowed (" + availableWorkers + ")")
assume(nextSize > 0 || !isIdle, "Distributor idle and the scheduler indicated work pending, but provided no work.")
nextWork.foreach(process)
}
}
// wait on the blocking queue `complete` until some work finishes and notify the scheduler
private def waitForCompletedWork()
{
require(running > 0)
val done = complete.take()
running -= 1
schedule.complete(done.data, done.result)
}
private def process(data: D)
{
require(running + 1 <= workers)
running += 1
new Worker(data).start()
}
private class Worker(data: D) extends Thread with NotNull
{
override def interrupt() {}
override def run()
{
val result = Control.trapUnit("", log(data))(doWork(data))
complete.put( new Done(result, data) )
}
}
}
private final class Done(val result: Option[String], val data: D) extends NotNull
}
final case class WorkFailure[D](work: D, message: String) extends NotNull
{
override def toString = message
}
/** Schedules work of type D. A Scheduler determines what work is ready to be processed.
* A Scheduler is itself immutable. It creates a mutable object for each scheduler run.*/
trait Scheduler[D] extends NotNull
{
/** Starts a new run. The returned object is a new Run, representing a single scheduler run. All state for the run
* is encapsulated in this object.*/
def run: Run
trait Run extends NotNull
{
/** Notifies this scheduler that work has completed with the given result (Some with the error message or None if the work succeeded).*/
def complete(d: D, result: Option[String]): Unit
/** Returns true if there is any more work to be done, although remaining work can be blocked
* waiting for currently running work to complete.*/
def hasPending: Boolean
/**Returns true if this scheduler has no more work to be done, ever.*/
def isComplete: Boolean
/** Returns up to 'max' units of work. `max` is always positive. The returned sequence cannot be empty if there is
* no work currently being processed.*/
def next(max: Int): Seq[D]
/** A list of failures that occurred to this point, as reported to the `complete` method. */
def failures: Iterable[WorkFailure[D]]
}
}
/** A Strategy selects the work to process from work that is ready to be processed.*/
private trait ScheduleStrategy[D] extends NotNull
{
/** Starts a new run. The returned object is a new Run, representing a single strategy run. All state for the run
* is handled through this object and is encapsulated in this object.*/
def run: Run
trait Run extends NotNull
{
/** Adds the given work to the list of work that is ready to run.*/
def workReady(dep: D): Unit
/** Returns true if there is work ready to be run. */
def hasReady: Boolean
/** Provides up to `max` units of work. `max` is always positive and this method is not called
* if hasReady is false. The returned list cannot be empty is there is work ready to be run.*/
def next(max: Int): List[D]
/** If this strategy returns different work from `next` than is provided to `workReady`,
* this method must map back to the original work.*/
def reverseMap(dep: D): Iterable[D]
}
}
/** A scheduler for nodes of a directed-acyclic graph. It requires the root of the graph
* and a strategy to select which available nodes to run on limited resources.*/
private[sbt] final class DagScheduler[D <: Dag[D]](info: DagInfo[D], strategy: ScheduleStrategy[D]) extends Scheduler[D]
{
def run: Run = new Run
{
val infoRun = info.run
val strategyRun = strategy.run
// find nodes that are ready to be run (no dependencies)
{
val startReady = for( (key, value) <- infoRun.remainingDepsRun if(value.isEmpty)) yield key
infoRun.remainingDepsRun --= startReady
startReady.foreach(strategyRun.workReady)
}
val failures = new mutable.ListBuffer[WorkFailure[D]]
def next(max: Int) = strategyRun.next(max)
def complete(work: D, result: Option[String])
{
for(originalWork <- strategyRun.reverseMap(work))
{
result match
{
case None => infoRun.complete(originalWork, strategyRun.workReady)
case Some(errorMessage) =>
infoRun.clear(originalWork)
failures += WorkFailure(originalWork, errorMessage)
}
}
}
def isComplete = !strategyRun.hasReady && infoRun.reverseDepsRun.isEmpty
// the strategy might not have any work ready if the remaining work needs currently executing work to finish first
def hasPending = strategyRun.hasReady || !infoRun.remainingDepsRun.isEmpty
}
}
private object MaxPathStrategy
{
def apply[D <: Dag[D]](selfCost: D => Int, info: DagInfo[D]): ScheduleStrategy[D] =
{
val cost = // compute the cost of the longest execution path ending at each node
{
val cost = new mutable.HashMap[D, Int]
def computeCost(work: D): Int = info.reverseDeps.getOrElse(work, immutable.Set.empty[D]).foldLeft(0)(_ max getCost(_)) + selfCost(work)
def getCost(work: D): Int = cost.getOrElseUpdate(work, computeCost(work))
info.remainingDeps.keys.foreach(getCost)
wrap.Wrappers.readOnly(cost)
}
// create a function to compare units of work. This is not as simple as cost(a) compare cost(b) because it cannot return 0 for
// unequal nodes (at least for the Ordered comparison)
// 2.8.0 uses Ordering
implicit val compareOrdering: Ordering[D] =
new Ordering[D]
{
def compare(a: D, b: D) =
{
val base = cost(a) compare cost(b)
if(base == 0)
a.hashCode compare b.hashCode // this is required because TreeSet interprets 0 as equal
else
base
}
}
// 2.7.x uses an implicit view to Ordered
implicit val compare =
(a: D) => new Ordered[D] {
def compare(b: D) = compareOrdering.compare(a, b)
}
new OrderedStrategy(new TreeSet())
}
}
/** A strategy that adds work to a tree and selects the last key as the next work to be done. */
private class OrderedStrategy[D](ready: TreeSet[D]) extends ScheduleStrategy[D]
{
def run = new Run
{
private[this] var readyRun = ready
def next(max: Int): List[D] = nextImpl(max, Nil)
private[this] def nextImpl(remaining: Int, accumulated: List[D]): List[D] =
{
if(remaining <= 0 || readyRun.isEmpty)
accumulated
else
{
val next = readyRun.lastKey
readyRun -= next
nextImpl(remaining - 1, next :: accumulated)
}
}
def workReady(dep: D) { readyRun += dep }
def hasReady = !readyRun.isEmpty
def reverseMap(dep: D) = dep :: Nil
}
}
/** A class that represents state for a DagScheduler and that MaxPathStrategy uses to initialize an OrderedStrategy. */
private final class DagInfo[D <: Dag[D]](val remainingDeps: immutable.Map[D, immutable.Set[D]],
val reverseDeps: immutable.Map[D, immutable.Set[D]]) extends NotNull
{
def run = new Run
final class Run extends NotNull
{
val remainingDepsRun = DagInfo.mutableMap(remainingDeps)
val reverseDepsRun = DagInfo.mutableMap(reverseDeps)
/** Called when work does not complete successfully and so all work that (transitively) depends on the work
* must be removed from the maps. */
def clear(work: D)
{
remainingDepsRun -= work
foreachReverseDep(work)(clear)
}
/** Called when work completes properly. `initial` and `ready` are used for a fold over
* the work that is now ready to go (becaues it was only waiting for `work` to complete).*/
def complete(work: D, ready: D => Unit)
{
def completed(dependsOnCompleted: D)
{
for(remainingDependencies <- remainingDepsRun.get(dependsOnCompleted))
{
remainingDependencies -= work
if(remainingDependencies.isEmpty)
{
remainingDepsRun -= dependsOnCompleted
ready(dependsOnCompleted)
}
}
}
foreachReverseDep(work)(completed)
}
private def foreachReverseDep(work: D)(f: D => Unit) { reverseDepsRun.removeKey(work).foreach(_.foreach(f)) }
}
}
/** Constructs forward and reverse dependency map for the given Dag root node. */
private object DagInfo
{
/** Constructs the reverse dependency map from the given Dag and
* puts the forward dependencies into a map */
def apply[D <: Dag[D]](root: D): DagInfo[D] =
{
val remainingDeps = new mutable.HashMap[D, immutable.Set[D]]
val reverseDeps = new mutable.HashMap[D, mutable.Set[D]]
def visitIfUnvisited(node: D): Unit = remainingDeps.getOrElseUpdate(node, processDependencies(node))
def processDependencies(node: D): Set[D] =
{
val workDependencies = node.dependencies
workDependencies.foreach(visitIfUnvisited)
for(dep <- workDependencies)
reverseDeps.getOrElseUpdate(dep, new mutable.HashSet[D]) += node
immutable.HashSet(workDependencies.toSeq: _*)
}
visitIfUnvisited(root)
new DagInfo(immutable.HashMap(remainingDeps.toSeq : _*), immute(reverseDeps) )
}
/** Convert a mutable Map with mutable Sets for values to an immutable Map with immutable Sets for values. */
private def immute[D](map: mutable.Map[D, mutable.Set[D]]): immutable.Map[D, immutable.Set[D]] =
{
val immutedSets = map.map { case (key, value) =>(key, immutable.HashSet(value.toSeq : _*)) }
immutable.HashMap(immutedSets.toSeq :_*)
}
/** Convert an immutable Map with immutable Sets for values to a mutable Map with mutable Sets for values. */
private def mutableMap[D](map: immutable.Map[D, immutable.Set[D]]): mutable.Map[D, mutable.Set[D]] =
{
val mutableSets = map.map { case (key, value) =>(key, mutable.HashSet(value.toSeq : _*)) }
mutable.HashMap(mutableSets.toSeq :_*)
}
}
/** A scheduler that can get work from sub-schedulers. The `schedulers` argument to the constructor
* is a sequence of the initial schedulers and the key to provide to a client that uses the 'detailedComplete'
* method when the scheduler completes its work.*/
private final class MultiScheduler[D, T](schedulers: (Scheduler[D], T)*) extends Scheduler[D]
{
/** Returns a Run instance that represents a scheduler run.*/
def run = new MultiRun
final class MultiRun extends Run
{
val owners = new mutable.HashMap[D, Scheduler[D]#Run]
val failures = new mutable.ListBuffer[WorkFailure[D]]
val schedules = mutable.HashMap[Scheduler[D]#Run, T](schedulers.map { case (scheduler, completeKey) => (scheduler.run, completeKey)} : _*)
def +=(schedule: Scheduler[D]#Run, completeKey: T) { schedules(schedule) = completeKey }
def isComplete = schedules.keys.forall(_.isComplete)
def hasPending = schedules.keys.exists(_.hasPending)
def next(max: Int) = nextImpl(max, schedules.keys.toList, Nil)
private def nextImpl(max: Int, remaining: List[Scheduler[D]#Run], accumulatedWork: List[D]): Seq[D] =
{
if(max == 0 || remaining.isEmpty)
accumulatedWork
else
{
val currentSchedule = remaining.head
if(currentSchedule.hasPending)
{
val newWork = currentSchedule.next(max).toList
newWork.foreach(work => owners.put(work, currentSchedule))
nextImpl(max - newWork.size, remaining.tail, newWork ::: accumulatedWork)
}
else
nextImpl(max, remaining.tail, accumulatedWork)
}
}
def complete(work: D, result: Option[String]) { detailedComplete(work, result) }
def detailedComplete(work: D, result: Option[String]) =
{
def complete(forOwner: Scheduler[D]#Run) =
{
forOwner.complete(work, result)
if(forOwner.isComplete)
{
failures ++= forOwner.failures
Some(forOwner, schedules.removeKey(forOwner).get)
}
else
None
}
owners.removeKey(work).flatMap(complete)
}
}
}
/** This scheduler allows a unit of work to provide nested work.
*
* When a unit of work that implements CompoundWork is returned for processing by `multi`, this scheduler will request the work's
* nested scheduler that represents the nested work to be done. The new scheduler will be added to `multi`. When the new scheduler
* is finished providing work, a final scheduler is run.*/
private final class CompoundScheduler[D](multi: MultiScheduler[D, Option[FinalWork[D]]], finalWorkStrategy: ScheduleStrategy[D]) extends Scheduler[D]
{
def run: Run = new Run
{
val multiRun = multi.run
val strategyRun = finalWorkStrategy.run
def isComplete = multiRun.isComplete && !strategyRun.hasReady
def hasPending = strategyRun.hasReady || multiRun.hasPending || multiRun.schedules.values.exists(_.isDefined)
def complete(work: D, result: Option[String]) =
{
for( (scheduler, Some(finalWorkTodo)) <- multiRun.detailedComplete(work, result) )
{
multiRun += (finalWorkTodo.doFinally.run, None)
if(scheduler.failures.isEmpty)
strategyRun workReady finalWorkTodo.compound
else
multiRun.complete(finalWorkTodo.compound, Some("One or more subtasks failed"))
}
}
def failures = multiRun.failures
def next(max: Int) = nextImpl(max, Nil)
private def nextImpl(max: Int, processedNextWork: List[D]): Seq[D] =
{
if(max > 0)
{
if(strategyRun.hasReady)
{
val newWork = strategyRun.next(max)
nextImpl(max - newWork.size, newWork ::: processedNextWork)
}
else if(multiRun.hasPending)
{
val multiWork = multiRun.next(max)
if(multiWork.isEmpty)
processedNextWork
else
{
val expandedWork = (processedNextWork /: multiWork)(expand)
val remaining = max - (expandedWork.size - processedNextWork.size)
nextImpl(remaining, expandedWork)
}
}
else
processedNextWork
}
else
processedNextWork
}
private def expand(accumulate: List[D], work: D): List[D] =
{
work match
{
case c: CompoundWork[D] =>
val subWork = c.work
addFinal(subWork.scheduler, new FinalWork(work, subWork.doFinally))
accumulate
case _ => work :: accumulate
}
}
private def addFinal(schedule: Scheduler[D], work: FinalWork[D]) { multiRun += (schedule.run, Some(work)) }
}
}
private object CompoundScheduler
{
def apply[D](scheduler: Scheduler[D], strategy: ScheduleStrategy[D]) : Scheduler[D] =
new CompoundScheduler(new MultiScheduler[D, Option[FinalWork[D]]]( (scheduler, None) ), strategy)
}
private final class FinalWork[D](val compound: D, val doFinally: Scheduler[D]) extends NotNull
/** This represents nested work. The work provided by `scheduler` is processed first. The work provided by `doFinally` is processed
* after `scheduler` completes regardless of the success of `scheduler`.*/
final class SubWork[D] private (val scheduler: Scheduler[D], val doFinally: Scheduler[D]) extends NotNull
object SubWork
{
def apply[D](scheduler: Scheduler[D], doFinally: Scheduler[D]): SubWork[D] = new SubWork(scheduler, doFinally)
def apply[D](scheduler: Scheduler[D]): SubWork[D] = SubWork(scheduler, ParallelRunner.emptyScheduler)
def apply[D <: Dag[D]](node: D): SubWork[D] = SubWork(ParallelRunner.dagScheduler(node))
def apply[D <: Dag[D]](node: D, doFinally: D): SubWork[D] = SubWork(ParallelRunner.dagScheduler(node), ParallelRunner.dagScheduler(doFinally))
}
/** Work that implements this interface provides nested work to be done before this work is processed.*/
trait CompoundWork[D] extends NotNull
{
def work: SubWork[D]
}

View File

@ -0,0 +1,79 @@
/* sbt -- Simple Build Tool
* Copyright 2008, 2009 Mark Harrah
*/
package sbt
import scala.tools.nsc.{GenericRunnerCommand, Interpreter, InterpreterLoop, ObjectRunner, Settings}
import scala.tools.nsc.interpreter.InteractiveReader
import scala.tools.nsc.reporters.Reporter
import scala.tools.nsc.util.ClassPath
/** This module is an interface to starting the scala interpreter or runner.*/
object ProjectConsole
{
/** Create a settings object and execute the provided function if the settings are created ok.*/
private def createSettings(log: Logger)(f: Settings => Option[String]) =
{
val command = new GenericRunnerCommand(Nil, message => log.error(message))
if(command.ok)
f(command.settings)
else
Some(command.usageMsg)
}
/** Starts a Scala interpreter session with 'project' bound to the value 'current' in the console
* and the following two lines executed:
* import sbt._
* import current._
*/
def apply(project: Project): Option[String] =
{
import project.log
createSettings(log) { interpreterSettings =>
createSettings(log) { compilerSettings =>
log.info("Starting scala interpreter with project definition " + project.name + " ...")
log.info("")
Control.trapUnit("Error during session: ", log)
{
JLine.withJLine {
val loop = new ProjectInterpreterLoop(compilerSettings, project)
executeTrapExit(loop.main(interpreterSettings), log)
}
}
}}
}
/** A custom InterpreterLoop with the purpose of creating an interpreter with Project 'project' bound to the value 'current',
* and the following three lines interpreted:
* import sbt._
* import Process._
* import current._.
* To do this,
* 1) The compiler uses a different settings instance: 'compilerSettings', which will have its classpath set to include
* the Scala compiler and library jars and the classpath used to compile the project.
* 2) The parent class loader for the interpreter is the loader that loaded the project, so that the project can be bound to a variable
* in the interpreter.
*/
private class ProjectInterpreterLoop(compilerSettings: Settings, project: Project) extends InterpreterLoop
{
override def createInterpreter()
{
val projectLoader = project.getClass.getClassLoader
val classpath = Project.getProjectClasspath(project)
val fullClasspath = classpath.get ++ Path.fromFiles(project.info.app.scalaProvider.jars)
compilerSettings.classpath.value = Path.makeString(fullClasspath)
project.log.debug(" console-project classpath:\n\t" + fullClasspath.mkString("\n\t"))
in = InteractiveReader.createDefault()
interpreter = new Interpreter(settings)
{
override protected def parentClassLoader = projectLoader
override protected def newCompiler(settings: Settings, reporter: Reporter) = super.newCompiler(compilerSettings, reporter)
}
interpreter.setContextClassLoader()
interpreter.bind("current", project.getClass.getName, project)
interpreter.interpret("import sbt._")
interpreter.interpret("import Process._")
interpreter.interpret("import current._")
}
}
}

View File

@ -82,7 +82,6 @@ trait TaskManager{
def work = createWork
}
def dynamic(createTask: => Project#Task) = new CompoundTask(SubWork[Project#Task](checkDynamic(createTask)))
@deprecated def compoundTask(createTask: => Project#Task) = dynamic(createTask)
/** Verifies that the given dynamically created task does not depend on any statically defined tasks.
* Returns the task if it is valid.*/
private def checkDynamic(task: Project#Task) =

View File

@ -1,67 +0,0 @@
/* sbt -- Simple Build Tool
* Copyright 2008, 2009 Mark Harrah
*/
package sbt.impl
import sbt._
import java.util.Properties
import java.io.{File, FileInputStream, FileOutputStream, InputStream, OutputStream}
import scala.collection.mutable.{HashMap, HashSet, ListBuffer, Map, Set}
private[sbt] object PropertiesUtilities
{
def write(properties: Properties, label: String, to: Path, log: Logger) =
FileUtilities.writeStream(to.asFile, log)(output => { properties.store(output, label); None })
def load(properties: Properties, from: Path, log: Logger): Option[String] =
{
val file = from.asFile
if(file.exists)
FileUtilities.readStream(file, log)( input => { properties.load(input); None })
else
None
}
def propertyNames(properties: Properties): Iterable[String] =
wrap.Wrappers.toList(properties.propertyNames).map(_.toString)
}
private[sbt] object MapUtilities
{
def write[Key, Value](map: Map[Key, Value], label: String, to: Path, log: Logger)(implicit keyFormat: Format[Key], valueFormat: Format[Value]): Option[String] =
{
val properties = new Properties
map foreach { pair => properties.setProperty(keyFormat.toString(pair._1), valueFormat.toString(pair._2)) }
PropertiesUtilities.write(properties, label, to, log)
}
def read[Key, Value](map: Map[Key, Value], from: Path, log: Logger)(implicit keyFormat: Format[Key], valueFormat: Format[Value]): Option[String] =
{
map.clear
val properties = new Properties
PropertiesUtilities.load(properties, from, log) orElse
{
for(name <- PropertiesUtilities.propertyNames(properties))
map.put( keyFormat.fromString(name), valueFormat.fromString(properties.getProperty(name)))
None
}
}
def readStrings(label: String, envBackingPath: Path, log: Logger): scala.collection.Map[String, String] =
{
val map = new HashMap[String, String]
for(errorMsg <- read(map, envBackingPath, log))
log.error("Error loading properties from " + label + " : " + errorMsg)
map.readOnly
}
def all[Key, Value](map: Map[Key, Set[Value]]): Iterable[Value] =
map.values.toList.flatMap(set => set.toList)
def readOnlyIterable[Key, Value](i: Map[Key, Set[Value]]): Iterable[(Key, scala.collection.Set[Value])] =
for( (key, set) <- i.elements.toList) yield (key, wrap.Wrappers.readOnly(set))//.readOnly)
def mark[Key, Value](source: Key, map: Map[Key, Set[Value]])
{
if(!map.contains(source))
map.put(source, new HashSet[Value])
}
def add[Key, Value](key: Key, value: Value, map: Map[Key, Set[Value]]): Unit =
map.getOrElseUpdate(key, new HashSet[Value]) + value
}

View File

@ -1,123 +0,0 @@
/* sbt -- Simple Build Tool
* Copyright 2009 Mark Harrah
*/
package sbt.wrap
// This file exists for compatibility between Scala 2.7.x and 2.8.0
import java.util.{Map => JMap, Set => JSet}
private[sbt] object Wrappers
{
def javaMap[K,V](pairs: (K,V)*) =
{
val basic = basicMap[K,V]
for( (k,v) <- pairs)
basic(k) = v
basic.underlying
}
def basicMap[K,V] = new MutableMapWrapper(new java.util.HashMap[K,V])
def identityMap[K,V] = new MutableMapWrapper(new java.util.IdentityHashMap[K,V])
def weakMap[K,V] = new MutableMapWrapper(new java.util.WeakHashMap[K,V])
def toList[K,V](s: java.util.Map[K,V]): List[(K,V)] = toList(s.entrySet).map(e => (e.getKey, e.getValue))
def toList[T](s: java.util.Collection[T]): List[T] = toList(s.iterator)
def toList[T](s: java.util.Iterator[T]): List[T] =
{
def add(l: List[T]): List[T] =
if(s.hasNext)
add(s.next() :: l)
else
l
add(Nil).reverse
}
def toList[T](s: java.util.Enumeration[T]): List[T] =
{
def add(l: List[T]): List[T] =
if(s.hasMoreElements)
add(s.nextElement() :: l)
else
l
add(Nil).reverse
}
def readOnly[K,V](map: scala.collection.mutable.Map[K,V]): scala.collection.Map[K,V] = map//.readOnly
def readOnly[T](set: scala.collection.mutable.Set[T]): scala.collection.Set[T] = set//.readOnly
def readOnly[T](buffer: scala.collection.mutable.Buffer[T]): Seq[T] = buffer//.readOnly
}
private[sbt] sealed abstract class Iterable[T] extends NotNull
{
def foreach(f: T => Unit) = toList.foreach(f)
def toList: List[T]
}
private[sbt] sealed trait Removable[T] extends NotNull
{
def -=(t: T) : Unit
def --=(all: Iterable[T]) { all.foreach(-=) }
def --=(all: scala.Iterable[T]) { all.foreach(-=) }
}
private[sbt] sealed trait Addable[T] extends NotNull
{
def +=(t: T) : Unit
def ++=(all: Iterable[T]) { all.foreach(+=) }
def ++=(all: scala.Iterable[T]) { all.foreach(+=) }
}
private[sbt] sealed abstract class Set[T] extends Iterable[T]
{
def contains(t: T): Boolean
}
private[sbt] sealed class SetWrapper[T](val underlying: JSet[T]) extends Set[T]
{
def contains(t: T) = underlying.contains(t)
def toList =Wrappers.toList(underlying.iterator)
}
private[sbt] final class MutableSetWrapper[T](wrapped: JSet[T]) extends SetWrapper[T](wrapped) with Addable[T] with Removable[T]
{
def +=(t: T) { underlying.add(t) }
def -=(t: T) { underlying.remove(t) }
def readOnly: Set[T] = this
}
private[sbt] sealed abstract class Map[K,V] extends Iterable[(K,V)]
{
def apply(key: K): V
def get(key: K): Option[V]
def containsKey(key: K): Boolean
final def getOrElse[V2 >: V](key: K, default: => V2): V2 =
get(key) match
{
case Some(value) => value
case None => default
}
}
private[sbt] sealed abstract class MapWrapper[K,V](val underlying: JMap[K,V]) extends Map[K,V]
{
final def apply(key: K) = underlying.get(key)
final def get(key: K) =
{
val value = underlying.get(key)
if(value == null)
None
else
Some(value)
}
final def containsKey(key: K) = underlying.containsKey(key)
final def toList = Wrappers.toList(underlying)
final def values = toList.map(_._2)
}
private[sbt] sealed class MutableMapWrapper[K,V](wrapped: JMap[K,V]) extends MapWrapper[K,V](wrapped) with Removable[K] with Addable[(K,V)]
{
final def getOrElseUpdate(key: K, default: => V): V =
get(key) match
{
case Some(value) => value
case None =>
val newValue = default
underlying.put(key, newValue)
newValue
}
final def clear() = underlying.clear()
final def update(key: K, value: V) { underlying.put(key, value) }
final def +=(pair: (K, V) ) { update(pair._1, pair._2) }
final def -=(key: K) { underlying.remove(key) }
final def remove(key: K) = underlying.remove(key)
final def readOnly: Map[K,V] = this
}

View File

@ -4,7 +4,6 @@
package sbt
package classfile
import ClassfileLogger._
import scala.collection.mutable
import mutable.{ArrayBuffer, Buffer}
import java.io.File
@ -14,28 +13,26 @@ import java.lang.reflect.Modifier.{STATIC, PUBLIC, ABSTRACT}
private[sbt] object Analyze
{
def apply[T](basePath: Path, outputDirectory: Path, sources: Iterable[Path], roots: Iterable[Path], log: ClassfileLogger)
(allProducts: => scala.collection.Set[Path], analysis: xsbti.AnalysisCallback, loader: ClassLoader)
(compile: => Option[String]): Option[String] =
def apply[T](outputDirectory: Path, sources: Seq[Path], roots: Seq[Path], log: Logger)(analysis: xsbti.AnalysisCallback, loader: ClassLoader)(compile: => Unit)
{
val sourceSet = Set(sources.toSeq : _*)
val classesFinder = outputDirectory ** GlobFilter("*.class")
val existingClasses = classesFinder.get
def load(tpe: String, errMsg: => String): Option[Class[_]] =
def load(tpe: String, errMsg: => Option[String]): Option[Class[_]] =
try { Some(Class.forName(tpe, false, loader)) }
catch { case e => log.warn(errMsg + " : " +e.toString); None }
catch { case e => errMsg.foreach(msg => log.warn(msg + " : " +e.toString)); None }
// runs after compilation
def analyze()
{
val allClasses = Set(classesFinder.get.toSeq : _*)
val newClasses = allClasses -- existingClasses -- allProducts
val newClasses = allClasses -- existingClasses
val productToSource = new mutable.HashMap[Path, Path]
val sourceToClassFiles = new mutable.HashMap[Path, Buffer[ClassFile]]
val superclasses = analysis.superclassNames flatMap { tpe => load(tpe, "Could not load superclass '" + tpe + "'") }
val superclasses = analysis.superclassNames flatMap { tpe => load(tpe, None) }
val annotations = analysis.annotationNames.toSeq
def annotated(fromClass: Seq[Annotation]) = if(fromClass.isEmpty) Nil else annotations.filter(fromClass.map(_.annotationType.getName).toSet)
@ -58,7 +55,7 @@ private[sbt] object Analyze
for( (source, classFiles) <- sourceToClassFiles )
{
for(classFile <- classFiles if isTopLevel(classFile);
cls <- load(classFile.className, "Could not load '" + classFile.className + "' to check for superclasses.") )
cls <- load(classFile.className, Some("Could not load '" + classFile.className + "' to check for superclasses.")) )
{
for(superclass <- superclasses)
if(superclass.isAssignableFrom(cls))
@ -78,7 +75,7 @@ private[sbt] object Analyze
{
trapAndLog(log)
{
val loaded = load(tpe, "Problem processing dependencies of source " + source)
val loaded = load(tpe, Some("Problem processing dependencies of source " + source))
for(clazz <- loaded; file <- ErrorHandling.convert(IO.classLocationFile(clazz)).right)
{
if(file.isDirectory)
@ -108,7 +105,13 @@ private[sbt] object Analyze
}
}
compile orElse ClassfileLogger.convertErrorMessage(log)(analyze()).left.toOption
compile
analyze()
}
private def trapAndLog(log: Logger)(execute: => Unit)
{
try { execute }
catch { case e => log.trace(e); log.error(e.toString) }
}
private def guessSourceName(name: String) = Some( takeToDollar(trimClassExt(name)) )
private def takeToDollar(name: String) =
@ -119,7 +122,7 @@ private[sbt] object Analyze
private final val ClassExt = ".class"
private def trimClassExt(name: String) = if(name.endsWith(ClassExt)) name.substring(0, name.length - ClassExt.length) else name
private def resolveClassFile(file: File, className: String): File = (file /: (className.replace('.','/') + ClassExt).split("/"))(new File(_, _))
private def guessSourcePath(sources: scala.collection.Set[Path], roots: Iterable[Path], classFile: ClassFile, log: ClassfileLogger) =
private def guessSourcePath(sources: scala.collection.Set[Path], roots: Iterable[Path], classFile: ClassFile, log: Logger) =
{
val classNameParts = classFile.className.split("""\.""")
val lastIndex = classNameParts.length - 1

View File

@ -1,25 +0,0 @@
/* sbt -- Simple Build Tool
* Copyright 2009, 2010 Mark Harrah
*/
package sbt
package classfile
trait ClassfileLogger
{
def warn(msg: => String): Unit
def error(msg: => String): Unit
def trace(exception: => Throwable): Unit
}
object ClassfileLogger
{
def convertErrorMessage[T](log: ClassfileLogger)(t: => T): Either[String, T] =
{
try { Right(t) }
catch { case e: Exception => log.trace(e); Left(e.toString) }
}
def trapAndLog(log: ClassfileLogger)(execute: => Unit)
{
try { execute }
catch { case e => log.trace(e); log.error(e.toString) }
}
}

View File

@ -200,7 +200,7 @@ class IncludePackagesFilter(include: Iterable[String]) extends PackageFilter(inc
def include(className: String): Boolean = matches(className)
}
private class LazyFrameworkLoader(runnerClassName: String, urls: Array[URL], parent: ClassLoader, grandparent: ClassLoader)
private[sbt] class LazyFrameworkLoader(runnerClassName: String, urls: Array[URL], parent: ClassLoader, grandparent: ClassLoader)
extends LoaderBase(urls, parent)
{
def doLoadClass(className: String): Class[_] =

View File

@ -1,5 +1,5 @@
/* sbt -- Simple Build Tool
* Copyright 2008, 2009 David MacIver, Mark Harrah
* Copyright 2008, 2009, 2010 David MacIver, Mark Harrah
*/
package sbt;
@ -11,11 +11,12 @@ trait Dag[Node <: Dag[Node]]{
}
object Dag
{
import scala.collection.mutable;
import scala.collection.{mutable, JavaConversions};
import JavaConversions.{asIterable, asSet}
def topologicalSort[T](root: T)(dependencies: T => Iterable[T]) = {
val discovered = new mutable.HashSet[T];
val finished = new wrap.MutableSetWrapper(new java.util.LinkedHashSet[T])
val discovered = new mutable.HashSet[T]
val finished = asSet(new java.util.LinkedHashSet[T])
def visit(dag : T){
if (!discovered(dag)) {

View File

@ -10,9 +10,9 @@ import scala.collection.mutable.HashSet
object DagSpecification extends Properties("Dag")
{
specify("No repeated nodes", (dag: TestDag) => isSet(dag.topologicalSort))
specify("Sort contains node", (dag: TestDag) => dag.topologicalSort.contains(dag))
specify("Dependencies precede node", (dag: TestDag) => dependenciesPrecedeNodes(dag.topologicalSort))
property("No repeated nodes") = forAll{ (dag: TestDag) => isSet(dag.topologicalSort) }
property("Sort contains node") = forAll{ (dag: TestDag) => dag.topologicalSort.contains(dag) }
property("Dependencies precede node") = forAll{ (dag: TestDag) => dependenciesPrecedeNodes(dag.topologicalSort) }
implicit lazy val arbTestDag: Arbitrary[TestDag] = Arbitrary(Gen.sized(dagGen))
private def dagGen(nodeCount: Int): Gen[TestDag] =

View File

@ -0,0 +1,32 @@
/* sbt -- Simple Build Tool
* Copyright 2009, 2010 Mark Harrah
*/
package sbt
/** Defines a function to call as sbt exits.*/
trait ExitHook extends NotNull
{
/** Provides a name for this hook to be used to provide feedback to the user. */
def name: String
/** Subclasses should implement this method, which is called when this hook is executed. */
def runBeforeExiting(): Unit
}
trait ExitHookRegistry
{
def register(hook: ExitHook): Unit
def unregister(hook: ExitHook): Unit
}
class ExitHooks extends ExitHookRegistry
{
private val exitHooks = new scala.collection.mutable.HashSet[ExitHook]
def register(hook: ExitHook) { exitHooks += hook }
def unregister(hook: ExitHook) { exitHooks -= hook }
/** Calls each registered exit hook, trapping any exceptions so that each hook is given a chance to run. */
def runExitHooks(debug: String => Unit): List[Throwable] =
exitHooks.toList.flatMap( hook =>
ErrorHandling.wideConvert( hook.runBeforeExiting() ).left.toOption
)
}

View File

@ -3,60 +3,8 @@
*/
package sbt
import impl.PropertiesUtilities
import scala.reflect.Manifest
trait Environment
{
abstract class Property[T] extends NotNull
{
/** Explicitly sets the value of this property to 'v'.*/
def update(v: T): Unit
/** Returns the current value of this property or throws an exception if the value could not be obtained.*/
def value: T = resolve.value
/** Returns the current value of this property in an 'Option'. 'None' is used to indicate that the
* value could not obtained.*/
def get: Option[T] = resolve.toOption
/** Returns full information about this property's current value. */
def resolve: PropertyResolution[T]
def foreach(f: T => Unit): Unit = resolve.foreach(f)
}
/** Creates a system property with the given name and no default value.*/
def system[T](propName: String)(implicit format: Format[T]): Property[T]
/** Creates a system property with the given name and the given default value to use if no value is explicitly specified.*/
def systemOptional[T](propName: String, defaultValue: => T)(implicit format: Format[T]): Property[T]
/** Creates a user-defined property that has no default value. The property will try to inherit its value
* from a parent environment (if one exists) if its value is not explicitly specified. An explicitly specified
* value will persist between builds if the object returned by this method is assigned to a 'val' in this
* 'Environment'.*/
def property[T](implicit manifest: Manifest[T], format: Format[T]): Property[T]
/** Creates a user-defined property that has no default value. The property will try to inherit its value
* from a parent environment (if one exists) if its value is not explicitly specified. An explicitly specified
* value will persist between builds if the object returned by this method is assigned to a 'val' in this
* 'Environment'. The given 'format' is used to convert an instance of 'T' to and from the 'String' representation
* used for persistence.*/
def propertyF[T](format: Format[T])(implicit manifest: Manifest[T]): Property[T] = property(manifest, format)
/** Creates a user-defined property with no default value and no value inheritance from a parent environment.
* Its value will persist between builds if the returned object is assigned to a 'val' in this 'Environment'.*/
def propertyLocal[T](implicit manifest: Manifest[T], format: Format[T]): Property[T]
/** Creates a user-defined property with no default value and no value inheritance from a parent environment.
* The property's value will persist between builds if the object returned by this method is assigned to a
* 'val' in this 'Environment'. The given 'format' is used to convert an instance of 'T' to and from the
* 'String' representation used for persistence.*/
def propertyLocalF[T](format: Format[T])(implicit manifest: Manifest[T]): Property[T] = propertyLocal(manifest, format)
/** Creates a user-defined property that uses the given default value if no value is explicitly specified for this property. The property's value will persist between builds
* if the object returned by this method is assigned to a 'val' in this 'Environment'.*/
def propertyOptional[T](defaultValue: => T)(implicit manifest: Manifest[T], format: Format[T]): Property[T]
/** Creates a user-defined property with no value inheritance from a parent environment but with the given default
* value if no value is explicitly specified for this property. The property's value will persist between builds
* if the object returned by this method is assigned to a 'val' in this 'Environment'. The given 'format' is used
* to convert an instance of 'T' to and from the 'String' representation used for persistence.*/
def propertyOptionalF[T](defaultValue: => T, format: Format[T])(implicit manifest: Manifest[T]): Property[T] =
propertyOptional(defaultValue)(manifest, format)
}
import scala.collection.Map
trait BasicEnvironment extends Environment
{
@ -188,13 +136,11 @@ trait BasicEnvironment extends Environment
if(rawValue == null)
notFound
else
{
Control.convertException(format.fromString(rawValue)) match
{
case Left(e) => ResolutionException("Error parsing system property '" + name + "': " + e.toString, Some(e))
case Right(x) => DefinedValue(x, false, false)
try
DefinedValue(format.fromString(rawValue), false, false)
catch {
case e: Exception => ResolutionException("Error parsing system property '" + name + "': " + e.toString, Some(e))
}
}
}
/** Handles resolution when the property has no explicit value. If there is a default value, that is returned,
* otherwise, UndefinedValue is returned.*/
@ -213,10 +159,11 @@ trait BasicEnvironment extends Environment
protected lazy val defaultValue = lazyDefaultValue
def update(t: T)
{
for(e <- Control.convertException(System.setProperty(name, format.toString(t))).left)
{
log.trace(e)
log.warn("Error setting system property '" + name + "': " + e.toString)
try System.setProperty(name, format.toString(t))
catch {
case e: Exception =>
log.trace(e)
log.warn("Error setting system property '" + name + "': " + e.toString)
}
}
override def toString = name + "=" + resolve
@ -251,93 +198,21 @@ trait BasicEnvironment extends Environment
propertyMap(name) = property
propertyMap //.readOnly (not currently in 2.8)
}
private val initialValues: Map[String, String] = impl.MapUtilities.readStrings(environmentLabel, envBackingPath, log)
private val initialValues: Map[String, String] = MapIO.readStrings(environmentLabel, envBackingPath)
def propertyNames: Iterable[String] = propertyMap.keys.toList
def getPropertyNamed(name: String): Option[UserProperty[_]] = propertyMap.get(name)
def propertyNamed(name: String): UserProperty[_] = propertyMap(name)
def saveEnvironment(): Option[String] =
def saveEnvironment()
{
if(isEnvironmentModified)
{
val properties = new java.util.Properties
for( (name, variable) <- propertyMap; stringValue <- variable.getStringValue)
properties.setProperty(name, stringValue)
val result = PropertiesUtilities.write(properties, "Project properties", envBackingPath, log)
IO.write(properties, "Project properties", envBackingPath)
setEnvironmentModified(false)
result
}
else
None
}
private[sbt] def uninitializedProperties: Iterable[(String, Property[_])] = propertyMap.filter(_._2.get.isEmpty)
}
private object Environment
{
def reflectiveMappings[T](obj: AnyRef, clazz: Class[T]): Map[String, T] =
{
val mappings = new scala.collection.mutable.OpenHashMap[String, T]
for ((name, value) <- ReflectUtilities.allValsC(obj, clazz))
mappings(ReflectUtilities.transformCamelCase(name, '.')) = value
mappings
}
}
sealed trait PropertyResolution[+T] extends NotNull
{
def value: T
def orElse[R >: T](r: => PropertyResolution[R]): PropertyResolution[R]
def toOption: Option[T]
def foreach(f: T => Unit): Unit
def map[R](f: T => R): PropertyResolution[R]
def flatMap[R](f: T => PropertyResolution[R]): PropertyResolution[R]
}
sealed trait NoPropertyValue extends PropertyResolution[Nothing]
{ self: RuntimeException with PropertyResolution[Nothing] =>
def value = throw this
def toOption = None
def map[R](f: Nothing => R): PropertyResolution[R] = this
def flatMap[R](f: Nothing => PropertyResolution[R]): PropertyResolution[R] = this
def foreach(f: Nothing => Unit) {}
}
final case class ResolutionException(message: String, exception: Option[Throwable])
extends RuntimeException(message, exception.getOrElse(null)) with NoPropertyValue
{
def orElse[R](r: => PropertyResolution[R]) = this
}
final case class UndefinedValue(name: String, environmentLabel: String)
extends RuntimeException("Value for property '" + name + "' from " + environmentLabel + " is undefined.") with NoPropertyValue
{
def orElse[R](r: => PropertyResolution[R]) =
r match
{
case u: UndefinedValue => this
case _ => r
}
}
final case class DefinedValue[T](value: T, isInherited: Boolean, isDefault: Boolean) extends PropertyResolution[T]
{
def toOption = Some(value)
def orElse[R >: T](r: => PropertyResolution[R]) = this
def map[R](f: T => R) = DefinedValue[R](f(value), isInherited, isDefault)
def flatMap[R](f: T => PropertyResolution[R]) = f(value)
def foreach(f: T => Unit) { f(value) }
}
private final class LazyVar[T](initialValue: => T) extends NotNull
{
private[this] var value: Option[T] = None
def apply() =
synchronized
{
value match
{
case Some(v) => v
case None =>
val newValue = initialValue
value = Some(newValue)
newValue
}
}
def update(newValue: T) = synchronized { value = Some(newValue) }
}

68
util/env/Environment.scala vendored Normal file
View File

@ -0,0 +1,68 @@
/* sbt -- Simple Build Tool
* Copyright 2008 Mark Harrah
*/
package sbt
import scala.reflect.Manifest
trait Environment
{
abstract class Property[T]
{
/** Explicitly sets the value of this property to 'v'.*/
def update(v: T): Unit
/** Returns the current value of this property or throws an exception if the value could not be obtained.*/
def value: T = resolve.value
/** Returns the current value of this property in an 'Option'. 'None' is used to indicate that the
* value could not obtained.*/
def get: Option[T] = resolve.toOption
/** Returns full information about this property's current value. */
def resolve: PropertyResolution[T]
def foreach(f: T => Unit): Unit = resolve.foreach(f)
}
/** Creates a system property with the given name and no default value.*/
def system[T](propName: String)(implicit format: Format[T]): Property[T]
/** Creates a system property with the given name and the given default value to use if no value is explicitly specified.*/
def systemOptional[T](propName: String, defaultValue: => T)(implicit format: Format[T]): Property[T]
/** Creates a user-defined property that has no default value. The property will try to inherit its value
* from a parent environment (if one exists) if its value is not explicitly specified. An explicitly specified
* value will persist between builds if the object returned by this method is assigned to a 'val' in this
* 'Environment'.*/
def property[T](implicit manifest: Manifest[T], format: Format[T]): Property[T]
/** Creates a user-defined property that has no default value. The property will try to inherit its value
* from a parent environment (if one exists) if its value is not explicitly specified. An explicitly specified
* value will persist between builds if the object returned by this method is assigned to a 'val' in this
* 'Environment'. The given 'format' is used to convert an instance of 'T' to and from the 'String' representation
* used for persistence.*/
def propertyF[T](format: Format[T])(implicit manifest: Manifest[T]): Property[T] = property(manifest, format)
/** Creates a user-defined property with no default value and no value inheritance from a parent environment.
* Its value will persist between builds if the returned object is assigned to a 'val' in this 'Environment'.*/
def propertyLocal[T](implicit manifest: Manifest[T], format: Format[T]): Property[T]
/** Creates a user-defined property with no default value and no value inheritance from a parent environment.
* The property's value will persist between builds if the object returned by this method is assigned to a
* 'val' in this 'Environment'. The given 'format' is used to convert an instance of 'T' to and from the
* 'String' representation used for persistence.*/
def propertyLocalF[T](format: Format[T])(implicit manifest: Manifest[T]): Property[T] = propertyLocal(manifest, format)
/** Creates a user-defined property that uses the given default value if no value is explicitly specified for this property. The property's value will persist between builds
* if the object returned by this method is assigned to a 'val' in this 'Environment'.*/
def propertyOptional[T](defaultValue: => T)(implicit manifest: Manifest[T], format: Format[T]): Property[T]
/** Creates a user-defined property with no value inheritance from a parent environment but with the given default
* value if no value is explicitly specified for this property. The property's value will persist between builds
* if the object returned by this method is assigned to a 'val' in this 'Environment'. The given 'format' is used
* to convert an instance of 'T' to and from the 'String' representation used for persistence.*/
def propertyOptionalF[T](defaultValue: => T, format: Format[T])(implicit manifest: Manifest[T]): Property[T] =
propertyOptional(defaultValue)(manifest, format)
}
private object Environment
{
def reflectiveMappings[T](obj: AnyRef, clazz: Class[T]): Map[String, T] =
{
var mappings = Map[String, T]()
for ((name, value) <- ReflectUtilities.allValsC(obj, clazz))
mappings = mappings.updated(ReflectUtilities.transformCamelCase(name, '.'), value)
mappings
}
}

View File

@ -6,7 +6,7 @@ package sbt
import java.io.File
import scala.collection.mutable.{HashSet, Set}
trait Format[T] extends NotNull
trait Format[T]
{
def toString(t: T): String
def fromString(s: String): T
@ -35,11 +35,11 @@ object Format
def set[T](implicit format: Format[T]): Format[Set[T]] = new Format[Set[T]]
{
def toString(set: Set[T]) = set.toList.map(format.toString).mkString(File.pathSeparator)
def fromString(s: String) = (new HashSet[T]) ++ FileUtilities.pathSplit(s).map(_.trim).filter(!_.isEmpty).map(format.fromString)
def fromString(s: String) = (new HashSet[T]) ++ IO.pathSplit(s).map(_.trim).filter(!_.isEmpty).map(format.fromString)
}
implicit val string: Format[String] = new SimpleFormat[String] { def fromString(s: String) = s }
implicit val test: Format[Discovered] = new SimpleFormat[Discovered]
/*implicit val test: Format[Discovered] = new SimpleFormat[Discovered]
{
def fromString(s: String) = DiscoveredParser.parse(s).fold(error, x => x)
}
}*/
}

22
util/env/LazyVar.scala vendored Normal file
View File

@ -0,0 +1,22 @@
/* sbt -- Simple Build Tool
* Copyright 2008 Mark Harrah
*/
package sbt
private final class LazyVar[T](initialValue: => T) extends NotNull
{
private[this] var value: Option[T] = None
def apply() =
synchronized
{
value match
{
case Some(v) => v
case None =>
val newValue = initialValue
value = Some(newValue)
newValue
}
}
def update(newValue: T) = synchronized { value = Some(newValue) }
}

43
util/env/MapIO.scala vendored Normal file
View File

@ -0,0 +1,43 @@
/* sbt -- Simple Build Tool
* Copyright 2008, 2009 Mark Harrah
*/
package sbt
import java.util.Properties
import java.io.File
import scala.collection.mutable.{HashMap, HashSet, Map, Set}
object MapIO
{
def write[Key, Value](map: Map[Key, Value], label: String, to: File)(implicit keyFormat: Format[Key], valueFormat: Format[Value])
{
val properties = new Properties
map foreach { pair => properties.setProperty(keyFormat.toString(pair._1), valueFormat.toString(pair._2)) }
IO.write(properties, label, to)
}
def read[Key, Value](map: Map[Key, Value], from: File)(implicit keyFormat: Format[Key], valueFormat: Format[Value])
{
map.clear
val properties = new Properties
IO.load(properties, from)
import collection.JavaConversions._
for(n <- properties.propertyNames)
{
val name = n.toString // convert _ to String
map.put( keyFormat.fromString(name), valueFormat.fromString(properties.getProperty(name)))
}
}
def readStrings(label: String, envBackingPath: File): scala.collection.immutable.Map[String, String] =
{
val map = new HashMap[String, String]
read(map, envBackingPath)
map.toMap
}
def all[Key, Value](map: Map[Key, Set[Value]]): Iterable[Value] =
map.values.toList.flatMap(set => set.toList)
def add[Key, Value](key: Key, value: Value, map: Map[Key, Set[Value]]): Unit =
map.getOrElseUpdate(key, new HashSet[Value]) + value
}

46
util/env/PropertyResolution.scala vendored Normal file
View File

@ -0,0 +1,46 @@
/* sbt -- Simple Build Tool
* Copyright 2008 Mark Harrah
*/
package sbt
sealed trait PropertyResolution[+T] extends NotNull
{
def value: T
def orElse[R >: T](r: => PropertyResolution[R]): PropertyResolution[R]
def toOption: Option[T]
def foreach(f: T => Unit): Unit
def map[R](f: T => R): PropertyResolution[R]
def flatMap[R](f: T => PropertyResolution[R]): PropertyResolution[R]
}
sealed trait NoPropertyValue extends PropertyResolution[Nothing]
{ self: RuntimeException with PropertyResolution[Nothing] =>
def value = throw this
def toOption = None
def map[R](f: Nothing => R): PropertyResolution[R] = this
def flatMap[R](f: Nothing => PropertyResolution[R]): PropertyResolution[R] = this
def foreach(f: Nothing => Unit) {}
}
final case class ResolutionException(message: String, exception: Option[Throwable])
extends RuntimeException(message, exception.getOrElse(null)) with NoPropertyValue
{
def orElse[R](r: => PropertyResolution[R]) = this
}
final case class UndefinedValue(name: String, environmentLabel: String)
extends RuntimeException("Value for property '" + name + "' from " + environmentLabel + " is undefined.") with NoPropertyValue
{
def orElse[R](r: => PropertyResolution[R]) =
r match
{
case u: UndefinedValue => this
case _ => r
}
}
final case class DefinedValue[T](value: T, isInherited: Boolean, isDefault: Boolean) extends PropertyResolution[T]
{
def toOption = Some(value)
def orElse[R >: T](r: => PropertyResolution[R]) = this
def map[R](f: T => R) = DefinedValue[R](f(value), isInherited, isDefault)
def flatMap[R](f: T => PropertyResolution[R]) = f(value)
def foreach(f: T => Unit) { f(value) }
}

View File

@ -3,7 +3,7 @@
*/
package sbt
sealed trait Version extends NotNull
sealed trait Version
case class BasicVersion(major: Int, minor: Option[Int], micro: Option[Int], extra: Option[String]) extends Version
{
import Version._

View File

@ -9,6 +9,7 @@ import ErrorHandling.translate
import java.io.{BufferedReader, ByteArrayOutputStream, BufferedWriter, File, FileInputStream, InputStream, OutputStream}
import java.net.{URI, URISyntaxException, URL}
import java.nio.charset.Charset
import java.util.Properties
import java.util.jar.{Attributes, JarEntry, JarFile, JarInputStream, JarOutputStream, Manifest}
import java.util.zip.{GZIPOutputStream, ZipEntry, ZipFile, ZipInputStream, ZipOutputStream}
import scala.collection.mutable.HashSet
@ -459,6 +460,12 @@ object IO
writer(file, lines.headOption.getOrElse(""), charset, append) { w =>
lines.foreach { line => w.write(line); w.newLine() }
}
def write(properties: Properties, label: String, to: File) =
fileOutputStream()(to) { output => properties.store(output, label) }
def load(properties: Properties, from: File): Unit =
if(from.exists)
fileInputStream(from){ input => properties.load(input) }
/** A pattern used to split a String by path separator characters.*/
private val PathSeparatorPattern = java.util.regex.Pattern.compile(File.pathSeparator)

View File

@ -131,6 +131,8 @@ object Path extends Alternatives with Mapper
implicit def pathToFile(path: Path): File = path.asFile
implicit def pathsToFiles[CC[X] <: TraversableLike[X,CC[X]]](cc: CC[Path])(implicit cb: generic.CanBuildFrom[CC[Path], File, CC[File]]): CC[File] =
cc.map(_.asFile)
implicit def filesToPaths[CC[X] <: TraversableLike[X,CC[X]]](cc: CC[File])(implicit cb: generic.CanBuildFrom[CC[File], Path, CC[Path]]): CC[Path] =
cc.map(fileToPath)
implicit def filesToFinder(cc: Traversable[File]): PathFinder = finder(cc)
implicit def pathsToFinder(cc: Traversable[Path]): PathFinder = lazyPathFinder(cc)

View File

@ -4,8 +4,8 @@
package sbt
import java.io.File
import xsbti.AppProvider
import FileUtilities._
import IO._
import Resources.error
object Resources
{
@ -24,41 +24,44 @@ object Resources
error("Resource base directory '" + basePath + "' does not exist.")
}
}
def error(msg: String) = throw new ResourcesException(msg)
private val LoadErrorPrefix = "Error loading initial project: "
}
class ResourcesException(msg: String) extends Exception(msg)
class Resources(val baseDirectory: File)
{
import Resources._
// The returned directory is not actually read-only, but it should be treated that way
def readOnlyResourceDirectory(group: String, name: String): Either[String, File] =
def readOnlyResourceDirectory(group: String, name: String): File =
{
val groupDirectory = new File(baseDirectory, group)
if(groupDirectory.isDirectory)
{
val resourceDirectory = new File(groupDirectory, name)
if(resourceDirectory.isDirectory)
Right(resourceDirectory)
resourceDirectory
else
Left("Resource directory '" + name + "' in group '" + group + "' not found.")
error("Resource directory '" + name + "' in group '" + group + "' not found.")
}
else
Left("Group '" + group + "' not found.")
error("Group '" + group + "' not found.")
}
def readWriteResourceDirectory[T](group: String, name: String, log: Logger)
(withDirectory: File => Either[String, T]): Either[String, T] =
readOnlyResourceDirectory(group, name).right flatMap(file => readWriteResourceDirectory(file, log)(withDirectory))
def readWriteResourceDirectory[T](readOnly: File, log: Logger)
(withDirectory: File => Either[String, T]): Either[String, T] =
def readWriteResourceDirectory[T](group: String, name: String)(withDirectory: File => T): T =
{
val file = readOnlyResourceDirectory(group, name)
readWriteResourceDirectory(file)(withDirectory)
}
def readWriteResourceDirectory[T](readOnly: File)(withDirectory: File => T): T =
{
require(readOnly.isDirectory)
def readWrite(readOnly: File)(temporary: File): Either[String, T] =
def readWrite(readOnly: File)(temporary: File): T =
{
val readWriteDirectory = new File(temporary, readOnly.getName)
FileUtilities.copyDirectory(readOnly, readWriteDirectory, log).toLeft(()).right flatMap { x =>
withDirectory(readWriteDirectory)
}
copyDirectory(readOnly, readWriteDirectory)
withDirectory(readWriteDirectory)
}
doInTemporaryDirectory(log)(readWrite(readOnly))
withTemporaryDirectory(readWrite(readOnly))
}
}

3
util/process/NOTICE Normal file
View File

@ -0,0 +1,3 @@
Simple Build Tool: Process Component
Copyright 2008, 2009, 2010 Mark Harrah, Vesa Vilhonen
Licensed under BSD-style license (see LICENSE)

View File

@ -1,4 +1,8 @@
package sbt.jetty
/* sbt -- Simple Build Tool
* Copyright 2008, 2009, 2010 Mark Harrah
*/
package sbt
package jetty
import java.io.File
import java.net.URL
@ -132,7 +136,7 @@ private object LazyJettyRun${jetty.version} extends JettyRun
case (notA, null) => notA.toList
case (notA, notB) => notB :: notA.toList
}
private class JettyLogger(delegate: Logger) extends JettyLoggerBase(delegate) with JLogger
private class JettyLogger(delegate: AbstractLogger) extends JettyLoggerBase(delegate) with JLogger
{
def getLogger(name: String) = this
}

3
web/NOTICE Normal file
View File

@ -0,0 +1,3 @@
Simple Build Tool: Process Component
Copyright 2008, 2009, 2010 Mark Harrah, Vesa Vilhonen
Licensed under BSD-style license (see LICENSE)

View File

@ -6,15 +6,17 @@ package sbt
import java.io.File
import java.net.{URL, URLClassLoader}
import scala.xml.NodeSeq
import classpath.ClasspathUtilities
object JettyRunner
{
val DefaultPort = 8080
val DefaultScanInterval = 3
}
class JettyRunner(configuration: JettyConfiguration) extends ExitHook
//TODO: don't pass registry, just handle it in client
class JettyRunner(configuration: JettyConfiguration, registry: ExitHookRegistry) extends ExitHook
{
ExitHooks.register(this)
registry.register(this)
def name = "jetty-shutdown"
def runBeforeExiting() { stop() }
@ -38,11 +40,11 @@ class JettyRunner(configuration: JettyConfiguration) extends ExitHook
val jettyFilter = (name: String) => name.startsWith("org.mortbay.") || name.startsWith("org.eclipse.jetty.")
val notJettyFilter = (name: String) => !jettyFilter(name)
val dual = new xsbt.DualLoader(baseLoader, notJettyFilter, x => true, jettyLoader, jettyFilter, x => false)
val dual = new classpath.DualLoader(baseLoader, notJettyFilter, x => true, jettyLoader, jettyFilter, x => false)
def createRunner(implClassName: String) =
{
val lazyLoader = new LazyFrameworkLoader(implClassName, Array(FileUtilities.classLocation[Stoppable].toURI.toURL), dual, baseLoader)
val lazyLoader = new classpath.LazyFrameworkLoader(implClassName, Array(IO.classLocation[Stoppable].toURI.toURL), dual, baseLoader)
ModuleUtilities.getObject(implClassName, lazyLoader).asInstanceOf[JettyRun]
}
val runner = try { createRunner(implClassName6) } catch { case e: NoClassDefFoundError => createRunner(implClassName7) }
@ -89,7 +91,7 @@ sealed trait JettyConfiguration extends NotNull
/** The classpath to get Jetty from. */
def jettyClasspath: PathFinder
def classpathName: String
def log: Logger
def log: AbstractLogger
}
trait DefaultJettyConfiguration extends JettyConfiguration
{
@ -109,7 +111,7 @@ abstract class CustomJettyConfiguration extends JettyConfiguration
def jettyConfigurationXML: NodeSeq = NodeSeq.Empty
}
private class JettyLoggerBase(delegate: Logger)
private class JettyLoggerBase(delegate: AbstractLogger)
{
def getName = "JettyLogger"
def isDebugEnabled = delegate.atLevel(Level.Debug)