mirror of https://github.com/sbt/sbt.git
merging sbt as a subproject
This commit is contained in:
commit
0e6ccf2eb9
|
|
@ -0,0 +1,5 @@
|
|||
LazyJettyRun6.scala
|
||||
LazyJettyRun7.scala
|
||||
install/project/boot/
|
||||
scripted/project/boot/
|
||||
project/plugins/project/
|
||||
|
|
@ -0,0 +1,25 @@
|
|||
Copyright (c) 2008, 2009, 2010 Steven Blundy, Josh Cough, Nathan Hamblen, Mark Harrah, David MacIver, Mikko Peltonen, Tony Sloane, Vesa Vilhonen
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions
|
||||
are met:
|
||||
1. Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
2. Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
3. The name of the author may not be used to endorse or promote products
|
||||
derived from this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
|
||||
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
|
||||
OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
|
||||
IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
|
||||
NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
|
||||
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
|
|
@ -0,0 +1,12 @@
|
|||
Simple Build Tool (sbt)
|
||||
Copyright 2008, 2009, 2010 Steven Blundy, Josh Cough, Nathan Hamblen, Mark Harrah, David MacIver, Mikko Peltonen, Tony Sloane, Vesa Vilhonen
|
||||
Licensed under BSD-style license (see LICENSE)
|
||||
|
||||
|
||||
Portions based on code by Mike Clark in JDepend
|
||||
Copyright 1999-2004 Clarkware Consulting, Inc.
|
||||
Licensed under BSD-style license (see licenses/LICENSE_jdepend)
|
||||
|
||||
Portions based on code by Pete Kirkham in Nailgun
|
||||
Copyright 2004, Martian Software, Inc
|
||||
Licensed under the Apache License, Version 2.0 (see licenses/LICENSE_Apache)
|
||||
|
|
@ -0,0 +1,106 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2009 Mark Harrah
|
||||
*/
|
||||
package sbt.extract
|
||||
|
||||
import java.io.{File, InputStream}
|
||||
import java.util.zip.{ZipEntry, ZipFile}
|
||||
|
||||
object Main
|
||||
{
|
||||
lazy val log: Logger = new ConsoleLogger
|
||||
|
||||
def main(args: Array[String])
|
||||
{
|
||||
if( args contains "debug" )
|
||||
log.setLevel(Level.Debug)
|
||||
val result = OpenResource.zipFile.ioOption(FileUtilities.classLocationFile[Install], "processing", log)(process)
|
||||
for(msg <- result)
|
||||
{
|
||||
log.error(msg)
|
||||
System.exit(1)
|
||||
}
|
||||
}
|
||||
private[this] val packedGzip = ".pack.gz"
|
||||
private def isArchive(name: String) = name.endsWith(".gz") || name.endsWith(".zip")
|
||||
private def process(zip: ZipFile) =
|
||||
{
|
||||
val installEntry = zip.getEntry("install")
|
||||
if(installEntry == null)
|
||||
Some("Install commands not found.")
|
||||
else
|
||||
{
|
||||
val jarAndZip = wrap.Wrappers.toList(zip.entries).filter(entry => isArchive(entry.getName)).partition(_.getName.endsWith(packedGzip))
|
||||
jarAndZip match
|
||||
{
|
||||
case (Nil, _)=> Some("sbt loader not found.")
|
||||
case (_, Nil) => Some("Project to extract and build not found.")
|
||||
case (loaderEntry :: _, projectEntry :: _) => extractAndRun(zip, loaderEntry, projectEntry, installEntry)
|
||||
}
|
||||
}
|
||||
}
|
||||
private def extractAndRun(zip: ZipFile, loaderEntry: ZipEntry, projectEntry: ZipEntry, installEntry: ZipEntry) =
|
||||
{
|
||||
val zipResource = OpenResource.zipEntry(zip)
|
||||
|
||||
import FileUtilities.{gunzip, readString, transfer, unzip, writeStream}
|
||||
val directory = new File(".", trimExtension(projectEntry.getName, ".zip"))
|
||||
assume(!directory.exists, "Could not extract project: directory " + projectEntry.getName + " exists.")
|
||||
|
||||
val loaderBaseName = trimExtension(loaderEntry.getName, packedGzip)
|
||||
val loaderFile = new File(directory, loaderBaseName + ".jar")
|
||||
val tempLoaderFile = new File(directory, loaderBaseName + ".pack")
|
||||
|
||||
def extractLoader() =
|
||||
{
|
||||
implicit def fileToPath(f: File) = Path.fromFile(f)
|
||||
val result =
|
||||
writeStream(tempLoaderFile, log) { out => zipResource.ioOption(loaderEntry, "reading", log)(gunzip(_, out, log)) } orElse
|
||||
Pack.unpack(tempLoaderFile, loaderFile, log)
|
||||
FileUtilities.clean(tempLoaderFile :: Nil, true, log)
|
||||
result.toLeft(loaderFile)
|
||||
}
|
||||
|
||||
Control.thread(zipResource.io(installEntry, "reading", log)(readString(_, log))) { installString =>
|
||||
Control.thread(parseInstall(installString)) { install =>
|
||||
zipResource.io(projectEntry, "reading", log)(unzip(_, Path.fromFile(directory), log)).left.toOption orElse
|
||||
Control.thread(extractLoader()) { loaderFile =>
|
||||
run(loaderFile, directory, install)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
private def parseInstall(installString: String): Either[String, Install] =
|
||||
{
|
||||
installString.split(separator) match
|
||||
{
|
||||
case Array(allOptions, allActions) =>
|
||||
val options = allOptions.split("""\n""").toList
|
||||
val actions = allActions.split("""\n""").toList
|
||||
Right( Install(options, actions) )
|
||||
case _ => Left("Invalid install script (no separator found)")
|
||||
}
|
||||
}
|
||||
private def filterEmpty(list: List[String]) = list.filter(!_.isEmpty)
|
||||
private def run(loader: File, project: File, install: Install) =
|
||||
{
|
||||
val command = "java" :: "-jar" :: loader.getAbsolutePath :: filterEmpty(install.options) ::: filterEmpty(install.actions)
|
||||
val builder = new java.lang.ProcessBuilder(command.toArray : _*)
|
||||
builder.directory(project)
|
||||
val exitCode = (Process(builder) !<)
|
||||
if(exitCode == 0)
|
||||
None
|
||||
else
|
||||
Some("sbt exited with nonzero exit code: " + exitCode)
|
||||
}
|
||||
private def trimExtension(name: String, ext: String) =
|
||||
{
|
||||
if(name.endsWith(ext))
|
||||
name.substring(0, name.length - ext.length)
|
||||
else
|
||||
name
|
||||
}
|
||||
// keep this in sync with sbt.extract.SelfExtractingProject
|
||||
private def separator = "===================="
|
||||
}
|
||||
private final case class Install(options: List[String], actions: List[String]) extends NotNull
|
||||
|
|
@ -0,0 +1,79 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2009 Mark Harrah
|
||||
*/
|
||||
package sbt.extract
|
||||
|
||||
import java.io.{ByteArrayOutputStream, File}
|
||||
import FileUtilities.{classLocationFile, clean, createTemporaryDirectory, download, transferAndClose, unzip, write, zip}
|
||||
import SelfExtractingProject.{flat, separator}
|
||||
|
||||
trait SelfExtractingProject extends Project
|
||||
{
|
||||
protected def createSelfExtractingJar(actions: List[String], jvmOptions: List[String], projectZip: Path, outputJar: Path): Option[String] =
|
||||
{
|
||||
def jarForClass(name: String) = Path.fromFile(classLocationFile(Class.forName(name)))
|
||||
val loaderJar = jarForClass("xsbti.Launcher")
|
||||
val bytes = new ByteArrayOutputStream
|
||||
transferAndClose(this.getClass.getResourceAsStream("extract.location"), bytes, log) orElse
|
||||
{
|
||||
val extractorJarLocation = bytes.toString("UTF-8")
|
||||
createSelfExtractingJar(actions, jvmOptions, projectZip, loaderJar, extractorJarLocation, outputJar)
|
||||
}
|
||||
}
|
||||
private def createSelfExtractingJar(actions: List[String], jvmOptions: List[String], projectZip: Path, loaderJar: Path, extractorJarLocation: String, outputJar: Path): Option[String] =
|
||||
{
|
||||
val installContents = jvmOptions.mkString("\n") + separator + actions.mkString("\n")
|
||||
withTemporaryDirectory(log) { tmp =>
|
||||
val tmpPath = Path.fromFile(tmp)
|
||||
write(new File(tmp, "install"), installContents, log) orElse
|
||||
unzip(this.getClass.getResource(extractorJarLocation), tmpPath, log).left.toOption orElse
|
||||
Control.thread(compressLoader(loaderJar)) { compressedLoader =>
|
||||
zip( (tmpPath ##) :: flat(projectZip) :: compressedLoader :: Nil, outputJar, true, log)
|
||||
}
|
||||
}
|
||||
}
|
||||
private def withTemporaryDirectory(log: Logger)(f: File => Option[String]) =
|
||||
{
|
||||
Control.thread(createTemporaryDirectory(log)) { dir =>
|
||||
Control.trapUnitAndFinally("", log)
|
||||
{ f(dir) }
|
||||
{ clean(Path.fromFile(dir) :: Nil, true, log) }
|
||||
}
|
||||
}
|
||||
private def compressLoader(loaderJar: Path): Either[String, Path] =
|
||||
{
|
||||
val jarName = loaderJar.asFile.getName
|
||||
val dotIndex = jarName.lastIndexOf('.')
|
||||
val baseName =
|
||||
if(dotIndex > 0) jarName.substring(0, dotIndex)
|
||||
else jarName
|
||||
val packedName = baseName + ".pack"
|
||||
val packed = outputPath / packedName
|
||||
val packedAndGzip = (outputPath ##) / (packedName + ".gz")
|
||||
val result =
|
||||
Pack.pack(loaderJar, packed, log) orElse
|
||||
FileUtilities.gzip(packed, packedAndGzip, log)
|
||||
result.toLeft(packedAndGzip)
|
||||
}
|
||||
}
|
||||
trait BasicSelfExtractingProject extends BasicScalaProject with SelfExtractingProject
|
||||
{
|
||||
def installActions: List[String] = update.name :: `package`.name :: Nil
|
||||
def jvmOptions: List[String] = Nil
|
||||
def selfExtractingJar: Path = outputPath / (artifactBaseName + "-setup.jar")
|
||||
|
||||
lazy val installer = installerAction
|
||||
def installerAction = task { createSelfExtractingJar(installActions, jvmOptions, packageProjectZip, selfExtractingJar) } dependsOn packageProject
|
||||
}
|
||||
|
||||
object SelfExtractingProject
|
||||
{
|
||||
// keep this in sync with sbt.extract.Main.separator
|
||||
def separator = "===================="
|
||||
private def flat(p: Path) =
|
||||
p match
|
||||
{
|
||||
case rp: RelativePath => (rp.parentPath ##) / rp.component
|
||||
case _ => p
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,6 @@
|
|||
project.name=extract
|
||||
project.organization=org.scala-tools.sbt
|
||||
project.version=0.3.1-SNAPSHOT
|
||||
sbt.version=0.7.1
|
||||
def.scala.version=2.7.7
|
||||
build.scala.versions=2.7.7
|
||||
|
|
@ -0,0 +1,67 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2009, 2010 Mark Harrah
|
||||
*/
|
||||
|
||||
import sbt._
|
||||
import java.io.File
|
||||
|
||||
protected class InstallExtractProject(info: ProjectInfo, pluginProject: => InstallPluginProject) extends DefaultProject(info) with NoPublish
|
||||
{
|
||||
override def unmanagedClasspath = super.unmanagedClasspath +++ info.sbtClasspath
|
||||
private lazy val plugin = pluginProject
|
||||
val mainClassName = "sbt.extract.Main"
|
||||
val proguardConfigurationPath: Path = outputPath / "proguard.pro"
|
||||
val toolsConfig = config("tools")
|
||||
val defaultConfig = Configurations.Default
|
||||
val proguardJar = "net.sf.proguard" % "proguard" % "4.3" % "tools"
|
||||
def rootProjectDirectory = rootProject.info.projectPath
|
||||
def outputJar = (plugin.outputPath ##) / defaultJarName
|
||||
|
||||
/******** Proguard *******/
|
||||
lazy val proguard = proguardTask dependsOn(`package`, writeProguardConfiguration, cleanProguard)
|
||||
lazy val writeProguardConfiguration = writeProguardConfigurationTask dependsOn `package`
|
||||
lazy val cleanProguard = cleanTask(outputJar)
|
||||
private def proguardTask =
|
||||
task
|
||||
{
|
||||
val proguardClasspathString = Path.makeString(managedClasspath(toolsConfig).get)
|
||||
val configFile = proguardConfigurationPath.asFile.getAbsolutePath
|
||||
val exitValue = Process("java", List("-Xmx256M", "-cp", proguardClasspathString, "proguard.ProGuard", "@" + configFile)) ! log
|
||||
if(exitValue == 0) None else Some("Proguard failed with nonzero exit code (" + exitValue + ")")
|
||||
}
|
||||
private def writeProguardConfigurationTask =
|
||||
task
|
||||
{
|
||||
// the template for the proguard configuration file
|
||||
val outTemplate = """
|
||||
|-dontoptimize
|
||||
|-dontobfuscate
|
||||
|-dontnote
|
||||
|-dontwarn
|
||||
|-libraryjars %s
|
||||
|%s
|
||||
|-outjars %s
|
||||
|-ignorewarnings
|
||||
|-keep public class %s {
|
||||
| public static void main(java.lang.String[]);
|
||||
|}"""
|
||||
|
||||
val defaultJar = jarPath.absolutePath
|
||||
log.debug("proguard configuration using main jar " + defaultJar)
|
||||
val externalDependencies = (mainCompileConditional.analysis.allExternals).map(_.getAbsoluteFile).filter(_.getName.endsWith(".jar"))
|
||||
debugJars("external dependencies", externalDependencies)
|
||||
// partition jars from the external jar dependencies of this project by whether they are located in the project directory
|
||||
// if they are, they are specified with -injars, otherwise they are specified with -libraryjars
|
||||
val (externalJars, libraryJars) = externalDependencies.toList.partition{jar => Path.relativize(rootProjectDirectory, jar).isDefined}
|
||||
debugJars("library jars", libraryJars)
|
||||
val externalJarStrings = externalJars.map( _ + "(!META-INF/**,!*.properties)")
|
||||
// exclude properties files and manifests from scala-library jar
|
||||
val inJars = (defaultJar :: externalJarStrings).map("-injars " + _).mkString("\n")
|
||||
|
||||
val proguardConfiguration = outTemplate.stripMargin.format(libraryJars.mkString(File.pathSeparator), inJars, outputJar.absolutePath, mainClassName)
|
||||
log.debug("Proguard configuration written to " + proguardConfigurationPath)
|
||||
FileUtilities.write(proguardConfigurationPath.asFile, proguardConfiguration, log)
|
||||
}
|
||||
private def debugJars[T](label: String, jars: Iterable[T]): Unit =
|
||||
log.debug("proguard configuration " + label + ": \n\t" + jars.mkString("\n\t"))
|
||||
}
|
||||
|
|
@ -0,0 +1,20 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2009, 2010 Mark Harrah
|
||||
*/
|
||||
|
||||
import sbt._
|
||||
import java.nio.charset.Charset
|
||||
|
||||
protected class InstallPluginProject(info: ProjectInfo, extract: => InstallExtractProject) extends PluginProject(info)
|
||||
{
|
||||
private lazy val extractProject = extract
|
||||
override def mainResources = super.mainResources +++ extractProject.outputJar +++ extractLocation
|
||||
|
||||
def extractLocation = (outputPath ##) / "extract.location"
|
||||
lazy val writeProperties = task { FileUtilities.write(extractLocation.asFile, extractProject.outputJar.relativePath, Charset.forName("UTF-8"), log) }
|
||||
override def packageAction = super.packageAction dependsOn(extractProject.proguard, writeProperties)
|
||||
|
||||
override def deliverProjectDependencies = Nil
|
||||
val publishTo = "Scala Tools Nexus" at "http://nexus.scala-tools.org/content/repositories/releases/"
|
||||
Credentials(Path.fromFile(System.getProperty("user.home")) / ".ivy2" / ".credentials", log)
|
||||
}
|
||||
|
|
@ -0,0 +1,20 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2009 Mark Harrah
|
||||
*/
|
||||
import sbt._
|
||||
|
||||
class InstallerProject(info: ProjectInfo) extends ParentProject(info) with NoPublish
|
||||
{
|
||||
/** Project for the sbt plugin that a project uses to generate the installer jar. */
|
||||
lazy val installPlugin: InstallPluginProject = project("plugin", "Installer Plugin", new InstallPluginProject(_, installExtractor), installExtractor)
|
||||
/** Project for the code that runs when the generated installer jar is run. */
|
||||
lazy val installExtractor: InstallExtractProject = project("extract", "Installer Extractor", new InstallExtractProject(_, installPlugin))
|
||||
}
|
||||
|
||||
trait NoPublish extends BasicManagedProject
|
||||
{
|
||||
override def publishLocalAction = publishAction
|
||||
override def deliverAction = publishAction
|
||||
override def deliverLocalAction = publishAction
|
||||
override def publishAction = task {None}
|
||||
}
|
||||
|
|
@ -0,0 +1,176 @@
|
|||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
|
@ -0,0 +1,29 @@
|
|||
Copyright (C) 1999-2004 Clarkware Consulting, Inc.
|
||||
All Rights Reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions
|
||||
are met:
|
||||
|
||||
1. Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
|
||||
2. Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
|
||||
3. Neither the name of Clarkware Consulting, Inc. nor the names of its
|
||||
contributors may be used to endorse or promote products derived
|
||||
from this software without prior written permission. For written
|
||||
permission, please contact clarkware@clarkware.com.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED WARRANTIES,
|
||||
INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
|
||||
FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
|
||||
CLARKWARE CONSULTING OR ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
|
||||
OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
|
||||
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
|
||||
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
|
||||
EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
|
@ -0,0 +1,25 @@
|
|||
Copyright (c) 2008, 2009, 2010 Steven Blundy, Josh Cough, Nathan Hamblen, Mark Harrah, David MacIver, Mikko Peltonen, Tony Sloane, Vesa Vilhonen
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions
|
||||
are met:
|
||||
1. Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
2. Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
3. The name of the author may not be used to endorse or promote products
|
||||
derived from this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
|
||||
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
|
||||
OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
|
||||
IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
|
||||
NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
|
||||
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
|
|
@ -0,0 +1,64 @@
|
|||
### Major Features
|
||||
* Batch and interactive commands are now unifed. All commands that can be executed at the interactive prompt can be run from the command line. To run commands and then enter interactive prompt, make the last command 'shell'.
|
||||
* Scala version handling has been overhauled.
|
||||
* The version of Scala used to run sbt (currently 2.7.7) is decoupled from the version used to build the project
|
||||
* Easily change between Scala versions with `++<version>`
|
||||
* Improved cross-building: project definition is only compiled against 2.7.7 instead of every Scala version cross-built against
|
||||
* Works with Scala 2.8.0.Beta1; however, classpath handling in Scala 2.8 trunk is changing and sbt currently cannot use 2.8 trunk to build projects until this settles
|
||||
* Using a [local version of Scala](http://code.google.com/p/simple-build-tool/wiki/LocalScala) is easier now.
|
||||
* Dependency management improvements:
|
||||
+ `make-pom` task now uses custom pom generation code instead of Ivy's pom writer
|
||||
- Writes Maven-style repositories to the pom by default
|
||||
- Override the 'pomExtra' method to provide XML (scala.xml.NodeSeq) to insert directly into the generated pom
|
||||
+ sbt's [Ivy interface](http://code.google.com/p/simple-build-tool/wiki/IvyInterface) can be used directly (useful for implementing certain tasks)
|
||||
* Test framework support is now done through a [uniform test interface](http://github.com/harrah/test-interface). Implications:
|
||||
+ New versions of specs, ScalaCheck, and ScalaTest are supported as soon as they are released
|
||||
+ Stefan Zeiger has written an [implementation for running JUnit](http://github.com/szeiger/junit-interface)
|
||||
+ Support is better, since the test framework authors are the ones providing the implementation
|
||||
+ Arguments can be passed to the test framework. In the simplest case: `test-only your.test -- -a -b -c`
|
||||
* Generalized the [launcher](http://code.google.com/p/simple-build-tool/wiki/GeneralizedLauncher) to be able to launch Scala applications, not just sbt
|
||||
* Provide a configuration file to the launcher and it can download the application and its dependencies from a repository and run it
|
||||
* sbt's configuration can be customized. For example,
|
||||
* The default values used to create a new project can be changed
|
||||
* The repositories used to fetch sbt and its dependencies, including Scala, can be configured
|
||||
* The location that sbt is retrieved to is configurable. For example, instead of `project/boot`, `/home/user/.ivy2/sbt/`
|
||||
* Configurable method of project searching (previously configurable by system property)
|
||||
|
||||
### Other Improvements
|
||||
* Jetty 7 support (doesn't work with JRebel at this time)
|
||||
* Control traces with 'on' 'off', 'nosbt', `<level>` (Thanks to Tony Sloane.)
|
||||
* New action 'test-run' method that is analogous to 'run', but for test classes.
|
||||
* New action 'clean-plugins' task that clears built plugins (useful for plugin development).
|
||||
* Can provide commands from a file with <file command (file is the name of the file to read from)
|
||||
* Can provide commands over loopback interface with <port command (port is the number to read commands from)
|
||||
* Launcher is allowed in root directory or `lib/` directory (jar name must match '*sbt-launch*.jar' and will be kept off classpath by default)
|
||||
* Compilation properly tracks certain types of synthetic classes (for comprehension with >30 clauses, for example)
|
||||
* Can provide custom task start and end delimiters by defining the system properties `sbt.start.delimiter` and `sbt.end.delimiter`
|
||||
|
||||
### Migrating from 0.5.6
|
||||
* Get the 0.7.0 launcher and [set it up as usual](http://code.google.com/p/simple-build-tool/wiki/Setup)
|
||||
* In your `project/build.properties`
|
||||
* Rename `scala.version`to `build.scala.versions`. If you previously overrode `crossScalaVersions`, just put the versions in `build.scala.versions` separated by whitespace.
|
||||
* Set `sbt.version` to 0.7.0
|
||||
* Check the compatibility section below
|
||||
|
||||
### Compatibility with 0.5.6
|
||||
* Artifact IDs (like your project's jar and ID for publishing to repositories) and output paths (like `target/` and `lib_managed/`) now have _<scala.version> appended. To keep the old way, `override def disableCrossPaths = true`.
|
||||
* 'reboot' is gone, 'reload' now covers 'reboot'
|
||||
* [Properties](http://code.google.com/p/simple-build-tool/wiki/Properties) need to be declared `lazy val` instead of just `val`.
|
||||
* The way to fork `run` has changed due to reworking the Scala version handling. See [Forking](http://code.google.com/p/simple-build-tool/wiki/Forking) for details. Forking the compiler is no longer supported.
|
||||
* The project definition is always compiled against the version of Scala running sbt (currently, 2.7.7). It was previously the same as the version used to build the project.
|
||||
* The Ivy interface has been broken out into its own subproject and reworked. All functional tests pass, but please report any issues you encounter.
|
||||
* `updateOptions` has been replaced by overriding the appropriate methods directly. Some other advanced configuration options have changed- documentation pending.
|
||||
* Web application support was reworked. Please report any issues you encounter.
|
||||
* Test framework support is now done through the uniform test interface. If you manually manage your test dependencies, you will need the [test-compat implementation](http://github.com/harrah/test-compat) of this interface. The earliest releases of test frameworks supported are:
|
||||
+ specs: 1.6.1
|
||||
+ ScalaCheck: 1.5
|
||||
+ ScalaTest: 1.0
|
||||
|
||||
### Other Notes
|
||||
* A message like `'compiler-interface' not compiled for '2.7.7'` is sbt adapting to your build Scala version (and Java version). It takes 5-20s depending on your machine and is a one time compilation for a given Scala/Java version combination.
|
||||
* Version control is now done with git and the source code is hosted on [Github](http://github.com/harrah). See the [instructions](http://code.google.com/p/simple-build-tool/wiki/Build) for building it locally. [Issue tracking](http://code.google.com/p/simple-build-tool/issues/list) and [documentation](http://code.google.com/p/simple-build-tool/wiki/DocumentationHome) are still on [Google Code](http://code.google.com/p/simple-build-tool).
|
||||
* Reports of performance degradation or improvement in 0.7.0 are useful.
|
||||
* Regular documentation has been updated. API Documentation is pending.
|
||||
* Now using the [posterous-sbt plugin](http://github.com/n8han/posterous-sbt) to generate and publish release notes to [http://implicit.ly/](http://implicit.ly/).
|
||||
|
|
@ -0,0 +1,3 @@
|
|||
### Fixes
|
||||
* Fixed Jetty 7 support to work with JRebel
|
||||
* Fixed make-pom to generate valid repositories section
|
||||
|
|
@ -0,0 +1 @@
|
|||
[Simple Build Tool](http://code.google.com/p/simple-build-tool/) is a build tool for Scala projects that aims to do the basics well.
|
||||
|
|
@ -0,0 +1,6 @@
|
|||
project.name=Simple Build Tool
|
||||
project.organization=org.scala-tools.sbt
|
||||
project.version=0.7.2-SNAPSHOT
|
||||
sbt.version=0.7.1
|
||||
def.scala.version=2.7.7
|
||||
build.scala.versions=2.7.7
|
||||
|
|
@ -0,0 +1,96 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2009 Mark Harrah
|
||||
*/
|
||||
/*import sbt._
|
||||
|
||||
import java.io.File
|
||||
|
||||
trait ReleaseProject extends ExecProject
|
||||
{ self: SbtProject =>
|
||||
def info: ProjectInfo
|
||||
lazy val releaseChecks = javaVersionCheck && projectVersionCheck && fullyCheckedOut
|
||||
lazy val fullyCheckedOut =
|
||||
task
|
||||
{
|
||||
if(svnArtifactsPath.exists) None
|
||||
else Some("You need a fully checked out sbt repository with commit rights to do a release.")
|
||||
}
|
||||
lazy val javaVersionCheck =
|
||||
task
|
||||
{
|
||||
val javaVersion = System.getProperty("java.version")
|
||||
if(!javaVersion.startsWith("1.5."))
|
||||
Some("Java version must be 1.5.x (was " + javaVersion + ")")
|
||||
else
|
||||
None
|
||||
}
|
||||
lazy val projectVersionCheck =
|
||||
task
|
||||
{
|
||||
def value(a: Option[Int]) = a.getOrElse(0)
|
||||
def lessThan(a: Option[Int], b: Option[Int]) = value(a) < value(b)
|
||||
version match
|
||||
{
|
||||
case BasicVersion(major, minor, micro, None) =>
|
||||
Version.fromString(sbtVersion.value) match
|
||||
{
|
||||
case Right(BasicVersion(builderMajor, builderMinor, builderMicro, None))
|
||||
if (builderMajor < major || ( builderMajor == major &&
|
||||
lessThan(builderMinor, minor) || (builderMinor == minor &&
|
||||
lessThan(builderMicro, micro ) ))) =>
|
||||
None
|
||||
case _ => Some("Invalid builder sbt version. Must be a release version older than the project version. (was: " + sbtVersion.value + ")")
|
||||
}
|
||||
case _ => Some("Invalid project version. Should be of the form #.#.# (was: " + version + ")")
|
||||
}
|
||||
}
|
||||
|
||||
def svnURL = "https://simple-build-tool.googlecode.com/svn/"
|
||||
def latestURL = svnURL + "artifacts/latest"
|
||||
|
||||
def svnArtifactsPath = Path.fromFile(info.projectPath.asFile.getParentFile) / "artifacts"
|
||||
def svnArtifactPath = svnArtifactsPath / version.toString
|
||||
def ivyLocalPath = Path.userHome / ".ivy2" / "local" / "sbt" / "simple-build-tool" / version.toString
|
||||
def manualTasks =
|
||||
("Upload launcher jar: " + boot.outputJar.absolutePath) ::
|
||||
"Update, build, check and commit Hello Lift example" ::
|
||||
Nil
|
||||
|
||||
lazy val copyDocs = main.copyTask ( (main.mainDocPath ##) ** "*", svnArtifactPath / "api") dependsOn(main.doc, releaseChecks)
|
||||
lazy val copyIvysJars = main.copyTask( (ivyLocalPath ##) ** "*", svnArtifactPath) dependsOn(main.crossPublishLocal, releaseChecks)
|
||||
|
||||
lazy val release = manualTaskMessage dependsOn(commitDocs, releaseArtifacts, releaseChecks)
|
||||
lazy val releaseArtifacts = nextVersion dependsOn(tag, latestLink, boot.proguard, releaseChecks)
|
||||
lazy val manualTaskMessage = task { println("The following tasks must be done manually:\n\t" + manualTasks.mkString("\n\t")); None }
|
||||
|
||||
import sbt.ProcessXML._
|
||||
lazy val addArtifacts = execTask {<o> svn add {svnArtifactPath} </o>} dependsOn ( copyIvysJars, copyDocs, releaseChecks )
|
||||
lazy val commitArtifacts = execTask {<o> svn commit -m "Jars, Ivys, and API Docs for {version.toString}" {svnArtifactPath} </o>} dependsOn(addArtifacts, releaseChecks)
|
||||
lazy val tag = execTask {<o> svn copy -m "Tagging {version.toString}" {svnURL}/trunk/ {svnURL}/tags/{version.toString} </o>} dependsOn(releaseChecks)
|
||||
lazy val latestLink = (deleteLatestLink && makeLatestLink) dependsOn(commitArtifacts, releaseChecks)
|
||||
lazy val makeLatestLink = execTask {<o> svn copy -m "Creating new latest link" {svnURL}/artifacts/{version.toString}/ {latestURL} </o>} dependsOn(releaseChecks)
|
||||
lazy val deleteLatestLink = execTask {<o> svn del -m "Deleting old latest link" {latestURL} </o>} dependsOn(releaseChecks)
|
||||
lazy val commitProperties = execTask {<o> svn commit -m "Bumping versions" project/build.properties </o>} dependsOn(releaseChecks)
|
||||
lazy val commitDocs = execTask {<o> svn commit -m "Updated documentation for {version.toString}" ../wiki/ </o>} dependsOn(releaseChecks)
|
||||
|
||||
lazy val nextVersion = (incrementVersions && commitProperties) dependsOn(releaseChecks)
|
||||
lazy val incrementVersions = task { incrementVersionNumbers(); None }
|
||||
def incrementVersionNumbers(): Unit =
|
||||
for( v <- projectVersion)
|
||||
{
|
||||
sbtVersion() = v.toString
|
||||
val incremented = v.asInstanceOf[BasicVersion].incrementMicro // BasicVersion checked by releaseChecks
|
||||
import incremented._
|
||||
val newVersion = BasicVersion(major, minor, micro, Some("SNAPSHOT"))
|
||||
log.info("Changing version to " + newVersion)
|
||||
projectVersion() = newVersion
|
||||
saveEnvironment
|
||||
}
|
||||
}
|
||||
|
||||
package sbt {
|
||||
object ProcessXML {
|
||||
implicit def elemToPB(command: scala.xml.Elem): ProcessBuilder =
|
||||
impl.CommandParser.parse(command.text.trim).fold(error, Function.tupled(Process.apply))
|
||||
}
|
||||
}*/
|
||||
|
|
@ -0,0 +1,68 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2008, 2009 Mark Harrah
|
||||
*/
|
||||
import sbt._
|
||||
|
||||
import java.io.File
|
||||
import java.net.URL
|
||||
|
||||
class SbtProject(info: ProjectInfo) extends DefaultProject(info) with test.SbtScripted with posterous.Publish
|
||||
{
|
||||
/* Additional resources to include in the produced jar.*/
|
||||
def extraResources = descendents(info.projectPath / "licenses", "*") +++ "LICENSE" +++ "NOTICE"
|
||||
override def mainResources = super.mainResources +++ extraResources
|
||||
|
||||
override def testOptions = ExcludeTests("sbt.ReflectiveSpecification" :: Nil) :: super.testOptions.toList
|
||||
override def normalizedName = "sbt"
|
||||
|
||||
override def managedStyle = ManagedStyle.Ivy
|
||||
//val publishTo = Resolver.file("technically", new File("/var/dbwww/repo/"))
|
||||
val technically = Resolver.url("technically.us", new URL("http://databinder.net/repo/"))(Resolver.ivyStylePatterns)
|
||||
|
||||
override def compileOptions = CompileOption("-Xno-varargs-conversion") :: Nil
|
||||
|
||||
/** configuration of scripted testing **/
|
||||
// Set to false to show logging as it happens without buffering, true to buffer until it completes and only show if the task fails.
|
||||
// The output of scripted tasks executed in parallel will be inteleaved if false.
|
||||
override def scriptedBufferLog = true
|
||||
// Configure which versions of Scala to test against for those tests that do cross building
|
||||
override def scriptedCompatibility = sbt.test.CompatibilityLevel.Minimal
|
||||
|
||||
override def useDefaultConfigurations = false
|
||||
val default = Configurations.Default
|
||||
val optional = Configurations.Optional
|
||||
val provided = Configurations.Provided
|
||||
val testConf = Configurations.Test
|
||||
|
||||
//testing
|
||||
val scalacheck = "org.scala-tools.testing" %% "scalacheck" % "1.6" % "test"
|
||||
|
||||
val ivy = "org.apache.ivy" % "ivy" % "2.1.0" intransitive()
|
||||
val jsch = "com.jcraft" % "jsch" % "0.1.31" intransitive()
|
||||
val jetty = "org.mortbay.jetty" % "jetty" % "6.1.14" % "optional"
|
||||
|
||||
val jetty7server = "org.eclipse.jetty" % "jetty-server" % "7.0.1.v20091125" % "optional"
|
||||
val jetty7webapp = "org.eclipse.jetty" % "jetty-webapp" % "7.0.1.v20091125" % "optional"
|
||||
|
||||
val testInterface = "org.scala-tools.testing" % "test-interface" % "0.4"
|
||||
|
||||
// xsbt components
|
||||
val xsbti = "org.scala-tools.sbt" % "launcher-interface" % projectVersion.value.toString % "provided"
|
||||
val compiler = "org.scala-tools.sbt" %% "compile" % projectVersion.value.toString
|
||||
|
||||
/* For generating JettyRun for Jetty 6 and 7. The only difference is the imports, but the file has to be compiled against each set of imports. */
|
||||
override def compileAction = super.compileAction dependsOn (generateJettyRun6, generateJettyRun7)
|
||||
def jettySrcDir = mainScalaSourcePath / "sbt" / "jetty"
|
||||
def jettyTemplate = jettySrcDir / "LazyJettyRun.scala.templ"
|
||||
|
||||
lazy val generateJettyRun6 = generateJettyRun(jettyTemplate, jettySrcDir / "LazyJettyRun6.scala", "6", jettySrcDir / "jetty6.imports")
|
||||
lazy val generateJettyRun7 = generateJettyRun(jettyTemplate, jettySrcDir / "LazyJettyRun7.scala", "7", jettySrcDir / "jetty7.imports")
|
||||
def generateJettyRun(in: Path, out: Path, version: String, importsPath: Path) =
|
||||
task
|
||||
{
|
||||
(for(template <- FileUtilities.readString(in asFile, log).right; imports <- FileUtilities.readString(importsPath asFile, log).right) yield
|
||||
FileUtilities.write(out asFile, processJettyTemplate(template, version, imports), log).toLeft(()) ).left.toOption
|
||||
}
|
||||
def processJettyTemplate(template: String, version: String, imports: String): String =
|
||||
template.replaceAll("""\Q${jetty.version}\E""", version).replaceAll("""\Q${jetty.imports}\E""", imports)
|
||||
}
|
||||
|
|
@ -0,0 +1,11 @@
|
|||
import sbt._
|
||||
|
||||
import java.net.URL
|
||||
|
||||
class Plugins(info: ProjectInfo) extends PluginDefinition(info)
|
||||
{
|
||||
val scripted = "org.scala-tools.sbt" % "scripted" % "0.7.0"
|
||||
val t_repo = "t_repo" at "http://tristanhunt.com:8081/content/groups/public/"
|
||||
val posterous = "net.databinder" % "posterous-sbt" % "0.1.3"
|
||||
val technically = Resolver.url("technically.us", new URL("http://databinder.net/repo/"))(Resolver.ivyStylePatterns)
|
||||
}
|
||||
|
|
@ -0,0 +1,25 @@
|
|||
Copyright (c) 2008, 2009, 2010 Mark Harrah
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions
|
||||
are met:
|
||||
1. Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
2. Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
3. The name of the author may not be used to endorse or promote products
|
||||
derived from this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
|
||||
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
|
||||
OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
|
||||
IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
|
||||
NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
|
||||
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
|
|
@ -0,0 +1,3 @@
|
|||
Simple Build Tool: Scripted Testing
|
||||
Copyright 2008, 2009, 2010 Mark Harrah
|
||||
Licensed under BSD-style license (see LICENSE)
|
||||
|
|
@ -0,0 +1,7 @@
|
|||
project.name=scripted
|
||||
project.organization=org.scala-tools.sbt
|
||||
project.version=0.7.2-SNAPSHOT
|
||||
sbt.version=0.7.1
|
||||
def.scala.version=2.7.7
|
||||
build.scala.versions=2.7.7
|
||||
project.initialize=false
|
||||
|
|
@ -0,0 +1,16 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2009 Mark Harrah
|
||||
*/
|
||||
|
||||
import sbt._
|
||||
|
||||
class SbtTest(info: ProjectInfo) extends PluginProject(info)
|
||||
{
|
||||
val xsbtTest = "org.scala-tools.sbt" %% "test" % version.toString
|
||||
val interface = "org.scala-tools.sbt" % "launcher-interface" % version.toString % "provided"
|
||||
|
||||
override def mainResources = super.mainResources +++ "LICENSE" +++ "NOTICE"
|
||||
|
||||
override def managedStyle = ManagedStyle.Ivy
|
||||
val publishTo = Resolver.file("technically", new java.io.File("/var/dbwww/repo/"))
|
||||
}
|
||||
|
|
@ -0,0 +1,49 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2009 Mark Harrah
|
||||
*/
|
||||
package sbt.test
|
||||
|
||||
import java.io.{File, IOException}
|
||||
import xsbt.IPC
|
||||
import xsbt.test.{StatementHandler, TestFailed}
|
||||
|
||||
final class SbtHandler(directory: File, log: Logger, server: IPC.Server) extends StatementHandler
|
||||
{
|
||||
type State = Process
|
||||
def initialState = newRemote
|
||||
def apply(command: String, arguments: List[String], p: Process): Process =
|
||||
{
|
||||
send((command :: arguments.map(escape)).mkString(" "))
|
||||
receive(command + " failed")
|
||||
p
|
||||
}
|
||||
def finish(state: Process) =
|
||||
try {
|
||||
server.connection { _.send("exit") }
|
||||
state.exitValue()
|
||||
} catch {
|
||||
case e: IOException => state.destroy()
|
||||
}
|
||||
def send(message: String) = server.connection { _.send(message) }
|
||||
def receive(errorMessage: String) =
|
||||
server.connection { ipc =>
|
||||
val resultMessage = ipc.receive
|
||||
if(!resultMessage.toBoolean) throw new TestFailed(errorMessage)
|
||||
}
|
||||
def newRemote =
|
||||
{
|
||||
val launcherJar = FileUtilities.classLocationFile(Class.forName("xsbti.AppProvider")).getAbsolutePath
|
||||
val args = "java" :: "-jar" :: launcherJar :: ( "<" + server.port) :: Nil
|
||||
val builder = new java.lang.ProcessBuilder(args.toArray : _*).directory(directory)
|
||||
val io = BasicIO(log, false).withInput(_.close())
|
||||
val p = Process(builder) run( io )
|
||||
Spawn { p.exitValue(); server.close() }
|
||||
try { receive("Remote sbt initialization failed") }
|
||||
catch { case e: java.net.SocketException => error("Remote sbt initialization failed") }
|
||||
p
|
||||
}
|
||||
import java.util.regex.Pattern.{quote => q}
|
||||
// if the argument contains spaces, enclose it in quotes, quoting backslashes and quotes
|
||||
def escape(argument: String) =
|
||||
if(argument.contains(" ")) "\"" + argument.replaceAll(q("""\"""), """\\""").replaceAll(q("\""), "\\\"") + "\"" else argument
|
||||
}
|
||||
|
|
@ -0,0 +1,94 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2008, 2009 Mark Harrah
|
||||
*/
|
||||
package sbt.test
|
||||
|
||||
import Scripted._
|
||||
import FileUtilities.wrapNull
|
||||
import java.io.File
|
||||
import java.net.URLClassLoader
|
||||
|
||||
trait ScalaScripted extends BasicScalaProject with Scripted with MavenStyleScalaPaths
|
||||
{
|
||||
def sbtTests = sourcePath / SbtTestDirectoryName
|
||||
def scriptedDependencies = compile :: Nil
|
||||
lazy val scripted = scriptedTask(scriptedDependencies : _*)
|
||||
lazy val testNoScripted = super.testAction
|
||||
override def testAction = testNoScripted dependsOn(scripted)
|
||||
|
||||
lazy val scriptedOnly = scriptedMethodTask(scriptedDependencies : _*)
|
||||
}
|
||||
trait SbtScripted extends ScalaScripted
|
||||
{
|
||||
override def scriptedDependencies = publishLocal :: Nil
|
||||
}
|
||||
final case class ScriptedTest(group: String, name: String) extends NotNull
|
||||
{
|
||||
override def toString = group + "/" + name
|
||||
}
|
||||
trait Scripted extends Project with MultiTaskProject
|
||||
{
|
||||
def scriptedCompatibility = CompatibilityLevel.Minimal
|
||||
def scriptedDefScala = buildScalaVersion
|
||||
def scriptedSbt = projectVersion.value.toString
|
||||
def scriptedBufferLog = true
|
||||
|
||||
def sbtTests: Path
|
||||
def scriptedTask(dependencies: ManagedTask*) = dynamic(scriptedTests(listTests)) dependsOn(dependencies : _*)
|
||||
def scriptedMethodTask(dependencies: ManagedTask*) = multiTask(listTests.map(_.toString).toList) { (args, includeFunction) =>
|
||||
scriptedTests(listTests.filter(test => includeFunction(test.toString)), dependencies : _*)
|
||||
}
|
||||
def listTests = (new ListTests(sbtTests.asFile, include _, log)).listTests
|
||||
def scriptedTests(tests: Seq[ScriptedTest], dependencies: ManagedTask*) =
|
||||
{
|
||||
val runner = new ScriptedTests(sbtTests.asFile, scriptedBufferLog, scriptedSbt, scriptedDefScala, scriptedCompatibility)
|
||||
|
||||
val startTask = task { None } named("scripted-test-start") dependsOn(dependencies : _*)
|
||||
def scriptedTest(test: ScriptedTest) =
|
||||
task { runner.scriptedTest(test.group, test.name, log) } named test.toString dependsOn(startTask)
|
||||
val testTasks = tests.map(scriptedTest)
|
||||
task { None } named("scripted-test-complete") dependsOn(testTasks : _*)
|
||||
}
|
||||
private def unwrapOption[T](s: T): Option[T] = if(s == null) None else Some(s)
|
||||
|
||||
def include(test: ScriptedTest) = true
|
||||
}
|
||||
import scala.collection.mutable
|
||||
private[test] object Scripted
|
||||
{
|
||||
val SbtTestDirectoryName = "sbt-test"
|
||||
def list(directory: File, filter: java.io.FileFilter) = wrapNull(directory.listFiles(filter))
|
||||
}
|
||||
private[test] final class ListTests(baseDirectory: File, accept: ScriptedTest => Boolean, log: Logger) extends NotNull
|
||||
{
|
||||
def filter = DirectoryFilter -- HiddenFileFilter
|
||||
def listTests: Seq[ScriptedTest] =
|
||||
{
|
||||
list(baseDirectory, filter) flatMap { group =>
|
||||
val groupName = group.getName
|
||||
listTests(group).map(ScriptedTest(groupName, _))
|
||||
}
|
||||
}
|
||||
private[this] def listTests(group: File): Set[String] =
|
||||
{
|
||||
val groupName = group.getName
|
||||
val allTests = list(group, filter)
|
||||
if(allTests.isEmpty)
|
||||
{
|
||||
log.warn("No tests in test group " + groupName)
|
||||
Set.empty
|
||||
}
|
||||
else
|
||||
{
|
||||
val (included, skipped) = allTests.toList.partition(test => accept(ScriptedTest(groupName, test.getName)))
|
||||
if(included.isEmpty)
|
||||
log.warn("Test group " + groupName + " skipped.")
|
||||
else if(!skipped.isEmpty)
|
||||
{
|
||||
log.warn("Tests skipped in group " + group.getName + ":")
|
||||
skipped.foreach(testName => log.warn(" " + testName.getName))
|
||||
}
|
||||
Set( included.map(_.getName) : _*)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,104 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2009 Mark Harrah
|
||||
*/
|
||||
|
||||
package sbt.test
|
||||
|
||||
import java.io.File
|
||||
import java.nio.charset.Charset
|
||||
|
||||
import xsbt.IPC
|
||||
import xsbt.test.{CommentHandler, FileCommands, ScriptRunner, TestScriptParser}
|
||||
|
||||
final class ScriptedTests(resourceBaseDirectory: File, bufferLog: Boolean, sbtVersion: String, defScalaVersion: String, level: CompatibilityLevel.Value) extends NotNull
|
||||
{
|
||||
private val testResources = new Resources(resourceBaseDirectory)
|
||||
|
||||
val ScriptFilename = "test"
|
||||
|
||||
def scriptedTest(group: String, name: String, log: Logger): Option[String] =
|
||||
testResources.readWriteResourceDirectory(group, name, log) { testDirectory =>
|
||||
scriptedTest(group + " / " + name, testDirectory, log).toLeft(())
|
||||
}.left.toOption
|
||||
private def scriptedTest(label: String, testDirectory: File, log: Logger): Option[String] =
|
||||
IPC.pullServer( scriptedTest0(label, testDirectory, log) )
|
||||
private def scriptedTest0(label: String, testDirectory: File, log: Logger)(server: IPC.Server): Option[String] =
|
||||
{
|
||||
FillProperties(testDirectory, sbtVersion, defScalaVersion, level)
|
||||
val buffered = new BufferedLogger(log)
|
||||
if(bufferLog)
|
||||
buffered.recordAll
|
||||
|
||||
def createParser() =
|
||||
{
|
||||
val fileHandler = new FileCommands(testDirectory)
|
||||
val sbtHandler = new SbtHandler(testDirectory, buffered, server)
|
||||
new TestScriptParser(Map('$' -> fileHandler, '>' -> sbtHandler, '#' -> CommentHandler))
|
||||
}
|
||||
def runTest() =
|
||||
{
|
||||
val run = new ScriptRunner
|
||||
val parser = createParser()
|
||||
run(parser.parse(new File(testDirectory, ScriptFilename)))
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
runTest()
|
||||
buffered.info("+ " + label)
|
||||
None
|
||||
}
|
||||
catch
|
||||
{
|
||||
case e: xsbt.test.TestException =>
|
||||
buffered.playAll()
|
||||
buffered.error("x " + label)
|
||||
if(e.getCause eq null)
|
||||
buffered.error(" " + e.getMessage)
|
||||
else
|
||||
e.printStackTrace
|
||||
Some(e.toString)
|
||||
case e: Exception =>
|
||||
buffered.playAll()
|
||||
buffered.error("x " + label)
|
||||
throw e
|
||||
}
|
||||
finally { buffered.clearAll() }
|
||||
}
|
||||
}
|
||||
|
||||
object CompatibilityLevel extends Enumeration
|
||||
{
|
||||
val Full, Basic, Minimal, Minimal27, Minimal28 = Value
|
||||
}
|
||||
object FillProperties
|
||||
{
|
||||
def apply(projectDirectory: File, sbtVersion: String, defScalaVersion: String, level: CompatibilityLevel.Value): Unit =
|
||||
{
|
||||
import xsbt.Paths._
|
||||
fill(projectDirectory / "project" / "build.properties", sbtVersion, defScalaVersion, getVersions(level))
|
||||
}
|
||||
def fill(properties: File, sbtVersion: String, defScalaVersion: String, buildScalaVersions: String)
|
||||
{
|
||||
val toAppend = extraProperties(sbtVersion, defScalaVersion, buildScalaVersions)
|
||||
xsbt.OpenResource.fileWriter(Charset.forName("ISO-8859-1"), true)(properties) { _.write(toAppend) }
|
||||
}
|
||||
def getVersions(level: CompatibilityLevel.Value) =
|
||||
{
|
||||
import CompatibilityLevel._
|
||||
level match
|
||||
{
|
||||
case Full => "2.7.2 2.7.3 2.7.5 2.7.7 2.8.0.Beta1 2.8.0-SNAPSHOT"
|
||||
case Basic => "2.7.7 2.7.2 2.8.0.Beta1"
|
||||
case Minimal => "2.7.7 2.8.0.Beta1"
|
||||
case Minimal27 => "2.7.7"
|
||||
case Minimal28 => "2.8.0.Beta1"
|
||||
}
|
||||
}
|
||||
def extraProperties(sbtVersion: String, defScalaVersion: String, buildScalaVersions: String) =
|
||||
<x>
|
||||
sbt.version={sbtVersion}
|
||||
def.scala.version={defScalaVersion}
|
||||
build.scala.versions={buildScalaVersions}
|
||||
</x>.text
|
||||
}
|
||||
|
|
@ -0,0 +1,4 @@
|
|||
<plugin>
|
||||
<name>sbt-analyze</name>
|
||||
<classname>sbt.Analyzer</classname>
|
||||
</plugin>
|
||||
|
|
@ -0,0 +1,228 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2008, 2009 Mark Harrah
|
||||
*/
|
||||
package sbt
|
||||
|
||||
trait TaskAnalysis[Source, Product, External] extends NotNull
|
||||
{
|
||||
import scala.collection.Set
|
||||
def save(): Option[String]
|
||||
def revert(): Option[String]
|
||||
def clear(): Unit
|
||||
|
||||
def allSources: Set[Source]
|
||||
def allProducts: Set[Product]
|
||||
def allExternals: Set[External]
|
||||
|
||||
def sourceDependencies(source: Source): Option[Set[Source]]
|
||||
def products(source: Source): Option[Set[Product]]
|
||||
def externalDependencies(external: External): Option[Set[Source]]
|
||||
|
||||
def addSource(source: Source): Unit
|
||||
def addExternalDependency(dependsOn: External, source: Source): Unit
|
||||
def addSourceDependency(dependsOn: Source, source: Source): Unit
|
||||
def addProduct(source: Source, product: Product): Unit
|
||||
|
||||
def removeSource(source: Source): Unit
|
||||
def removeDependent(source: Source): Unit
|
||||
def removeDependencies(source: Source): Option[Set[Source]]
|
||||
def removeExternalDependency(external: External): Unit
|
||||
}
|
||||
|
||||
import java.io.File
|
||||
import BasicAnalysis._
|
||||
import impl.MapUtilities.{add, all, read, mark, readOnlyIterable, write}
|
||||
import scala.collection.mutable.{HashMap, HashSet, ListBuffer, Map, Set}
|
||||
|
||||
class BasicAnalysis(analysisPath: Path, projectPath: Path, log: Logger) extends TaskAnalysis[Path, Path, File]
|
||||
{
|
||||
private val sourceDependencyMap: Map[Path, Set[Path]] = new HashMap
|
||||
private val productMap: Map[Path, Set[Path]] = new HashMap
|
||||
private val externalDependencyMap: Map[File, Set[Path]] = new HashMap
|
||||
|
||||
final type AnyMapToSource = Map[K, Set[Path]] forSome {type K}
|
||||
final type AnySourceMap = Map[Path, T] forSome {type T}
|
||||
final type AnySourceSetMap = Map[Path, Set[T]] forSome {type T}
|
||||
final type AnyMap = Map[K, V] forSome { type K; type V }
|
||||
|
||||
protected def mapsToClear = List[AnyMap](sourceDependencyMap, productMap, externalDependencyMap)
|
||||
protected def mapsToRemoveSource = List[AnySourceMap](sourceDependencyMap, productMap)
|
||||
protected def mapsToRemoveDependent = List[AnyMapToSource](sourceDependencyMap, externalDependencyMap)
|
||||
protected def mapsToMark = List[AnySourceSetMap](sourceDependencyMap, productMap)
|
||||
|
||||
def clear()
|
||||
{
|
||||
for(map <- mapsToClear)
|
||||
map.clear()
|
||||
}
|
||||
def removeSource(source: Path)
|
||||
{
|
||||
for(sourceProducts <- productMap.get(source))
|
||||
FileUtilities.clean(sourceProducts, true, log)
|
||||
for(map <- mapsToRemoveSource)
|
||||
map -= source
|
||||
}
|
||||
def removeSelfDependency(source: Path)
|
||||
{
|
||||
for(deps <- sourceDependencyMap.get(source))
|
||||
deps -= source
|
||||
}
|
||||
def removeDependent(source: Path)
|
||||
{
|
||||
for(map <- mapsToRemoveDependent; deps <- map.values)
|
||||
deps -= source
|
||||
}
|
||||
def removeDependencies(source: Path) = sourceDependencyMap.removeKey(source)
|
||||
def removeExternalDependency(dep: File) = externalDependencyMap.removeKey(dep.getAbsoluteFile)
|
||||
|
||||
def externalDependencies(external: File) = externalDependencyMap.get(external.getAbsoluteFile)
|
||||
def sourceDependencies(source: Path) = sourceDependencyMap.get(source)
|
||||
def products(sources: Iterable[Path]): Iterable[Path] =
|
||||
{
|
||||
val buffer = new ListBuffer[Path]
|
||||
for(source <- sources; sourceProducts <- productMap.get(source))
|
||||
buffer ++= sourceProducts
|
||||
buffer.readOnly
|
||||
}
|
||||
def products(source: Path) = productMap.get(source)
|
||||
|
||||
def allSources = sourceDependencyMap.keySet
|
||||
def allProducts: Set[Path] = HashSet(flatten(productMap.values.toList) : _*)
|
||||
def allExternals = externalDependencyMap.keySet
|
||||
|
||||
def allExternalDependencies = readOnlyIterable(externalDependencyMap)
|
||||
def allDependencies = readOnlyIterable(sourceDependencyMap)
|
||||
|
||||
def addSourceDependency(on: Path, from: Path) = add(on, from, sourceDependencyMap)
|
||||
def addExternalDependency(on: File, from: Path) = add(on.getAbsoluteFile, from, externalDependencyMap)
|
||||
def addProductDependency(on: Path, from: Path) =
|
||||
{
|
||||
for( (source, _) <- productMap.find(_._2.contains(on)) )
|
||||
addSourceDependency(source, from)
|
||||
}
|
||||
def addProduct(source: Path, file: Path) = add(source, file, productMap)
|
||||
def addSource(source: Path) =
|
||||
{
|
||||
for(map <- mapsToMark)
|
||||
mark(source, map)
|
||||
}
|
||||
|
||||
import Format._ // get implicits for data types
|
||||
implicit val path: Format[Path] = Format.path(projectPath)
|
||||
implicit val pathSet: Format[Set[Path]] = Format.set
|
||||
|
||||
protected def backedMaps: Iterable[Backed[_,_]] =
|
||||
Backed(sourceDependencyMap, DependenciesLabel, DependenciesFileName) ::
|
||||
Backed(productMap, GeneratedLabel, GeneratedFileName) ::
|
||||
Backed(externalDependencyMap, ExternalDependenciesLabel, ExternalDependenciesFileName) ::
|
||||
Nil
|
||||
|
||||
def revert() = load()
|
||||
private def loadBacked[Key,Value](b: Backed[Key,Value]) = read(b.map, analysisPath / b.name, log)(b.keyFormat, b.valueFormat)
|
||||
private def storeBacked[Key,Value](b: Backed[Key,Value]) = write(b.map, b.label, analysisPath / b.name, log)(b.keyFormat, b.valueFormat)
|
||||
final def load(): Option[String] = Control.lazyFold(backedMaps.toList)(backed =>loadBacked(backed))
|
||||
final def save(): Option[String] = Control.lazyFold(backedMaps.toList)(backed => storeBacked(backed))
|
||||
}
|
||||
object BasicAnalysis
|
||||
{
|
||||
private def flatten(s: Iterable[Set[Path]]): Seq[Path] = s.flatMap(x => x.toSeq).toSeq
|
||||
|
||||
val GeneratedFileName = "generated_files"
|
||||
val DependenciesFileName = "dependencies"
|
||||
val ExternalDependenciesFileName = "external"
|
||||
|
||||
val GeneratedLabel = "Generated Classes"
|
||||
val DependenciesLabel = "Source Dependencies"
|
||||
val ExternalDependenciesLabel = "External Dependencies"
|
||||
|
||||
def load(analysisPath: Path, projectPath: Path, log: Logger): Either[String, BasicAnalysis] =
|
||||
{
|
||||
val analysis = new BasicAnalysis(analysisPath, projectPath, log)
|
||||
analysis.load().toLeft(analysis)
|
||||
}
|
||||
}
|
||||
object CompileAnalysis
|
||||
{
|
||||
val HashesFileName = "hashes"
|
||||
val TestsFileName = "tests"
|
||||
val ApplicationsFileName = "applications"
|
||||
val ProjectDefinitionsName = "projects"
|
||||
|
||||
val HashesLabel = "Source Hashes"
|
||||
val TestsLabel = "Tests"
|
||||
val ApplicationsLabel = "Classes with main methods"
|
||||
val ProjectDefinitionsLabel = "Project Definitions"
|
||||
|
||||
def load(analysisPath: Path, projectPath: Path, log: Logger): Either[String, CompileAnalysis] =
|
||||
{
|
||||
val analysis = new CompileAnalysis(analysisPath, projectPath, log)
|
||||
analysis.load().toLeft(analysis)
|
||||
}
|
||||
}
|
||||
import CompileAnalysis._
|
||||
import Format._ // get implicits for data types
|
||||
class BasicCompileAnalysis protected (analysisPath: Path, projectPath: Path, log: Logger) extends BasicAnalysis(analysisPath, projectPath, log)
|
||||
{
|
||||
/*private */val hashesMap = new HashMap[Path, Array[Byte]]
|
||||
val apiMap = new HashMap[Path, xsbti.api.Source]
|
||||
|
||||
override protected def mapsToClear = apiMap :: hashesMap :: super.mapsToClear
|
||||
override protected def mapsToRemoveSource = apiMap :: hashesMap :: super.mapsToRemoveSource
|
||||
|
||||
def setHash(source: Path, hash: Array[Byte]) { hashesMap(source) = hash }
|
||||
def clearHash(source: Path) { hashesMap.removeKey(source) }
|
||||
def hash(source: Path) = hashesMap.get(source)
|
||||
def clearHashes() { hashesMap.clear() }
|
||||
|
||||
def setAPI(source: Path, a: xsbti.api.Source) { apiMap(source) = a }
|
||||
|
||||
def getClasses(sources: PathFinder, outputDirectory: Path): PathFinder =
|
||||
Path.lazyPathFinder
|
||||
{
|
||||
val basePath = (outputDirectory ##)
|
||||
for(c <- products(sources.get)) yield
|
||||
Path.relativize(basePath, c).getOrElse(c)
|
||||
}
|
||||
|
||||
implicit val stringSet: Format[Set[String]] = Format.set
|
||||
override protected def backedMaps = Backed(hashesMap, HashesLabel, HashesFileName) :: super.backedMaps.toList
|
||||
}
|
||||
private[sbt] final class BuilderCompileAnalysis(analysisPath: Path, projectPath: Path, log: Logger) extends BasicCompileAnalysis(analysisPath, projectPath, log)
|
||||
{
|
||||
private val projectDefinitionMap = new HashMap[Path, Set[String]]
|
||||
override protected def mapsToClear = projectDefinitionMap :: super.mapsToClear
|
||||
override protected def mapsToRemoveSource = projectDefinitionMap :: super.mapsToRemoveSource
|
||||
def allProjects = all(projectDefinitionMap)
|
||||
def addProjectDefinition(source: Path, className: String) = add(source, className, projectDefinitionMap)
|
||||
|
||||
override protected def backedMaps =
|
||||
Backed(projectDefinitionMap, ProjectDefinitionsLabel, ProjectDefinitionsName) ::
|
||||
super.backedMaps
|
||||
}
|
||||
class CompileAnalysis(analysisPath: Path, projectPath: Path, log: Logger) extends BasicCompileAnalysis(analysisPath, projectPath, log)
|
||||
{
|
||||
private val testMap = new HashMap[Path, Set[TestDefinition]]
|
||||
private val applicationsMap = new HashMap[Path, Set[String]]
|
||||
def allTests = all(testMap)
|
||||
def allApplications = all(applicationsMap)
|
||||
def addTest(source: Path, test: TestDefinition) = add(source, test, testMap)
|
||||
def addApplication(source: Path, className: String) = add(source, className, applicationsMap)
|
||||
|
||||
def testSourceMap: Map[String, Path] =
|
||||
{
|
||||
val map = new HashMap[String, Path]
|
||||
for( (source, tests) <- testMap; test <- tests) map(test.testClassName) = source
|
||||
map
|
||||
}
|
||||
|
||||
override protected def mapsToClear = applicationsMap :: testMap :: super.mapsToClear
|
||||
override protected def mapsToRemoveSource = applicationsMap :: testMap :: super.mapsToRemoveSource
|
||||
|
||||
implicit val testSet: Format[Set[TestDefinition]] = Format.set
|
||||
override protected def backedMaps =
|
||||
Backed(testMap, TestsLabel, TestsFileName) ::
|
||||
Backed(applicationsMap, ApplicationsLabel, ApplicationsFileName) ::
|
||||
super.backedMaps
|
||||
}
|
||||
/** A map that is persisted in a properties file named 'name' and with 'label'. 'keyFormat' and 'valueFormat' are used to (de)serialize. */
|
||||
final case class Backed[Key, Value](map: Map[Key, Value], label: String, name: String)(implicit val keyFormat: Format[Key], val valueFormat: Format[Value]) extends NotNull
|
||||
|
|
@ -0,0 +1,97 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2008, 2009 Mark Harrah
|
||||
*/
|
||||
package sbt
|
||||
|
||||
import java.io.File
|
||||
|
||||
object AnalysisCallback
|
||||
{
|
||||
private val map = new scala.collection.mutable.HashMap[Int, AnalysisCallback]
|
||||
private var nextID: Int = 0
|
||||
def register(callback: AnalysisCallback): Int =
|
||||
{
|
||||
val id = nextID
|
||||
nextID += 1
|
||||
map(id) = callback
|
||||
id
|
||||
}
|
||||
def apply(id: Int): Option[AnalysisCallback] = map.get(id)
|
||||
def unregister(id: Int)
|
||||
{
|
||||
map -= id
|
||||
}
|
||||
}
|
||||
|
||||
trait AnalysisCallback extends NotNull
|
||||
{
|
||||
/** The names of classes that the analyzer should find subclasses of.*/
|
||||
def superclassNames: Iterable[String]
|
||||
/** The base path for the project.*/
|
||||
def basePath: Path
|
||||
/** Called when the the given superclass could not be found on the classpath by the compiler.*/
|
||||
def superclassNotFound(superclassName: String): Unit
|
||||
/** Called before the source at the given location is processed. */
|
||||
def beginSource(sourcePath: Path): Unit
|
||||
/** Called when the a subclass of one of the classes given in <code>superclassNames</code> is
|
||||
* discovered.*/
|
||||
def foundSubclass(sourcePath: Path, subclassName: String, superclassName: String, isModule: Boolean): Unit
|
||||
/** Called to indicate that the source file <code>sourcePath</code> depends on the source file
|
||||
* <code>dependsOnPath</code>.*/
|
||||
def sourceDependency(dependsOnPath: Path, sourcePath: Path): Unit
|
||||
/** Called to indicate that the source file <code>sourcePath</code> depends on the jar
|
||||
* <code>jarPath</code>.*/
|
||||
def jarDependency(jarPath: File, sourcePath: Path): Unit
|
||||
/** Called to indicate that the source file <code>sourcePath</code> depends on the class file
|
||||
* <code>classFile</code>.*/
|
||||
def classDependency(classFile: File, sourcePath: Path): Unit
|
||||
/** Called to indicate that the source file <code>sourcePath</code> depends on the class file
|
||||
* <code>classFile</code> that is a product of some source. This differs from classDependency
|
||||
* because it is really a sourceDependency. The source corresponding to <code>classFile</code>
|
||||
* was not incuded in the compilation so the plugin doesn't know what the source is though. It
|
||||
* only knows that the class file came from the output directory.*/
|
||||
def productDependency(classFile: Path, sourcePath: Path): Unit
|
||||
/** Called to indicate that the source file <code>sourcePath</code> produces a class file at
|
||||
* <code>modulePath</code>.*/
|
||||
def generatedClass(sourcePath: Path, modulePath: Path): Unit
|
||||
/** Called after the source at the given location has been processed. */
|
||||
def endSource(sourcePath: Path): Unit
|
||||
/** Called when a module with a public 'main' method with the right signature is found.*/
|
||||
def foundApplication(sourcePath: Path, className: String): Unit
|
||||
def api(sourcePath: Path, source: xsbti.api.Source): Unit
|
||||
}
|
||||
abstract class BasicAnalysisCallback[A <: BasicCompileAnalysis](val basePath: Path, protected val analysis: A) extends AnalysisCallback
|
||||
{
|
||||
def superclassNames: Iterable[String]
|
||||
def superclassNotFound(superclassName: String) {}
|
||||
|
||||
def beginSource(sourcePath: Path): Unit =
|
||||
analysis.addSource(sourcePath)
|
||||
|
||||
def sourceDependency(dependsOnPath: Path, sourcePath: Path): Unit =
|
||||
analysis.addSourceDependency(dependsOnPath, sourcePath)
|
||||
|
||||
def jarDependency(jarFile: File, sourcePath: Path): Unit =
|
||||
analysis.addExternalDependency(jarFile, sourcePath)
|
||||
|
||||
def classDependency(classFile: File, sourcePath: Path): Unit =
|
||||
analysis.addExternalDependency(classFile, sourcePath)
|
||||
|
||||
def productDependency(classFile: Path, sourcePath: Path): Unit =
|
||||
analysis.addProductDependency(classFile, sourcePath)
|
||||
|
||||
def generatedClass(sourcePath: Path, modulePath: Path): Unit =
|
||||
analysis.addProduct(sourcePath, modulePath)
|
||||
|
||||
def endSource(sourcePath: Path): Unit =
|
||||
analysis.removeSelfDependency(sourcePath)
|
||||
|
||||
def api(sourcePath: Path, source: xsbti.api.Source): Unit =
|
||||
analysis.setAPI(sourcePath, source)
|
||||
}
|
||||
abstract class BasicCompileAnalysisCallback(basePath: Path, analysis: CompileAnalysis)
|
||||
extends BasicAnalysisCallback(basePath, analysis)
|
||||
{
|
||||
def foundApplication(sourcePath: Path, className: String): Unit =
|
||||
analysis.addApplication(sourcePath, className)
|
||||
}
|
||||
|
|
@ -0,0 +1,19 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2009 Mark Harrah
|
||||
*/
|
||||
package sbt
|
||||
|
||||
trait AutoCompilerPlugins extends BasicScalaProject
|
||||
{
|
||||
import Configurations.CompilerPlugin
|
||||
abstract override def extraDefaultConfigurations =
|
||||
CompilerPlugin :: super.extraDefaultConfigurations
|
||||
abstract override def compileOptions = compilerPlugins ++ super.compileOptions
|
||||
|
||||
/** A PathFinder that provides the classpath to search for compiler plugins. */
|
||||
def pluginClasspath = fullClasspath(CompilerPlugin)
|
||||
protected def compilerPlugins: List[CompileOption] =
|
||||
ClasspathUtilities.compilerPlugins(pluginClasspath.get).map(plugin => new CompileOption("-Xplugin:" + plugin.getAbsolutePath)).toList
|
||||
|
||||
def compilerPlugin(dependency: ModuleID) = dependency % "plugin->default(compile)"
|
||||
}
|
||||
|
|
@ -0,0 +1,648 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2009 Mark Harrah
|
||||
*/
|
||||
package sbt
|
||||
|
||||
import scala.xml.NodeSeq
|
||||
import StringUtilities.{appendable,nonEmpty}
|
||||
import BasicManagedProject._
|
||||
|
||||
/** A project that provides a classpath. */
|
||||
trait ClasspathProject extends Project
|
||||
{
|
||||
/** The local classpath for this project.*/
|
||||
def projectClasspath(config: Configuration): PathFinder
|
||||
|
||||
/** Returns the classpath of this project and the classpaths of all dependencies for the
|
||||
* given configuration. Specifically, this concatentates projectClasspath(config) for all
|
||||
* projects of type ClasspathProject in topologicalSort. */
|
||||
def fullClasspath(config: Configuration): PathFinder =
|
||||
Path.lazyPathFinder
|
||||
{
|
||||
val set = new wrap.MutableSetWrapper(new java.util.LinkedHashSet[Path])
|
||||
for(project <- topologicalSort)
|
||||
{
|
||||
project match
|
||||
{
|
||||
case sp: ClasspathProject => set ++= sp.projectClasspath(config).get
|
||||
case _ => ()
|
||||
}
|
||||
}
|
||||
set.toList
|
||||
}
|
||||
/* Filter used to select dependencies for the classpath from managed and unmanaged directories.
|
||||
* By default, it explicitly filters (x)sbt-launch(er)-<version>.jar, since it contains minified versions of various classes.*/
|
||||
def classpathFilter: FileFilter = "*.jar" - "*sbt-launch*.jar"
|
||||
}
|
||||
trait BasicDependencyProject extends BasicManagedProject with UnmanagedClasspathProject
|
||||
{
|
||||
/** This returns the classpath for only this project for the given configuration.*/
|
||||
def projectClasspath(config: Configuration) = fullUnmanagedClasspath(config) +++ managedClasspath(config)
|
||||
}
|
||||
/** A project that provides a directory in which jars can be manually managed.*/
|
||||
trait UnmanagedClasspathProject extends ClasspathProject
|
||||
{
|
||||
/** The location of the manually managed (unmanaged) dependency directory.*/
|
||||
def dependencyPath: Path
|
||||
/** The classpath containing all jars in the unmanaged directory. */
|
||||
def unmanagedClasspath: PathFinder =
|
||||
{
|
||||
val base = descendents(dependencyPath, classpathFilter)
|
||||
if(scratch)
|
||||
base +++ (info.projectPath * classpathFilter)
|
||||
else
|
||||
base
|
||||
}
|
||||
/** The classpath containing all unmanaged classpath elements for the given configuration. This typically includes
|
||||
* at least 'unmanagedClasspath'.*/
|
||||
def fullUnmanagedClasspath(config: Configuration): PathFinder
|
||||
}
|
||||
|
||||
trait IvyTasks extends Project
|
||||
{
|
||||
def ivyTask(action: => Unit) =
|
||||
task
|
||||
{
|
||||
try { action; None }
|
||||
catch {
|
||||
case e: ResolveException =>
|
||||
log.error(e.toString)
|
||||
Some(e.toString)
|
||||
case e: Exception =>
|
||||
log.trace(e)
|
||||
log.error(e.toString)
|
||||
Some(e.toString)
|
||||
}
|
||||
}
|
||||
def updateTask(module: => IvySbt#Module, configuration: => UpdateConfiguration) =
|
||||
ivyTask { IvyActions.update(module, configuration) }
|
||||
|
||||
def publishTask(module: => IvySbt#Module, publishConfiguration: => PublishConfiguration) =
|
||||
ivyTask
|
||||
{
|
||||
val publishConfig = publishConfiguration
|
||||
import publishConfig._
|
||||
val deliveredIvy = if(publishIvy) Some(deliveredPattern) else None
|
||||
IvyActions.publish(module, resolverName, srcArtifactPatterns, deliveredIvy, configurations)
|
||||
}
|
||||
def deliverTask(module: => IvySbt#Module, deliverConfiguration: => PublishConfiguration, quiet: Boolean) =
|
||||
ivyTask
|
||||
{
|
||||
val deliverConfig = deliverConfiguration
|
||||
import deliverConfig._
|
||||
IvyActions.deliver(module, status, deliveredPattern, extraDependencies, configurations, quiet)
|
||||
}
|
||||
def makePomTask(module: => IvySbt#Module, output: => Path, extraDependencies: => Iterable[ModuleID], pomExtra: => NodeSeq, configurations: => Option[Iterable[Configuration]]) =
|
||||
ivyTask { IvyActions.makePom(module, extraDependencies, configurations, pomExtra, output asFile) }
|
||||
|
||||
def installTask(module: IvySbt#Module, from: Resolver, to: Resolver) =
|
||||
ivyTask { IvyActions.install(module, from.name, to.name) }
|
||||
|
||||
def cleanCacheTask(ivySbt: => IvySbt) =
|
||||
ivyTask { IvyActions.cleanCache(ivySbt) }
|
||||
|
||||
def cleanLibTask(managedDependencyPath: Path) =
|
||||
task { FileUtilities.clean(managedDependencyPath.get, log) }
|
||||
}
|
||||
|
||||
/** A project that provides automatic dependency management.*/
|
||||
trait ManagedProject extends ClasspathProject with IvyTasks
|
||||
{
|
||||
/** This is the public ID of the project (used for publishing, for example) */
|
||||
def moduleID: String = normalizedName + appendable(crossScalaVersionString)
|
||||
/** This is the full public ID of the project (used for publishing, for example) */
|
||||
def projectID: ModuleID = ModuleID(organization, moduleID, version.toString).artifacts(artifacts.toSeq : _*)
|
||||
|
||||
/** This is the default name for artifacts (such as jars) without any version string.*/
|
||||
def artifactID = moduleID
|
||||
/** This is the default name for artifacts (such as jars) including the version string.*/
|
||||
def artifactBaseName = artifactID + "-" + version.toString
|
||||
def artifacts: Iterable[Artifact]
|
||||
|
||||
def managedDependencyPath: Path
|
||||
/** The managed classpath for the given configuration. This can be overridden to add jars from other configurations
|
||||
* so that the Ivy 'extends' mechanism is not required. That way, the jars are only copied to one configuration.*/
|
||||
def managedClasspath(config: Configuration): PathFinder = configurationClasspath(config)
|
||||
/** All dependencies in the given configuration. */
|
||||
final def configurationClasspath(config: Configuration): PathFinder = descendents(configurationPath(config), classpathFilter)
|
||||
|
||||
/** The base path to which dependencies in configuration 'config' are downloaded.*/
|
||||
def configurationPath(config: Configuration): Path = managedDependencyPath / config.toString
|
||||
|
||||
import StringUtilities.nonEmpty
|
||||
implicit def toGroupID(groupID: String): GroupID =
|
||||
{
|
||||
nonEmpty(groupID, "Group ID")
|
||||
new GroupID(groupID, buildScalaVersion)
|
||||
}
|
||||
implicit def toRepositoryName(name: String): RepositoryName =
|
||||
{
|
||||
nonEmpty(name, "Repository name")
|
||||
new RepositoryName(name)
|
||||
}
|
||||
implicit def moduleIDConfigurable(m: ModuleID): ModuleIDConfigurable =
|
||||
{
|
||||
require(m.configurations.isEmpty, "Configurations already specified for module " + m)
|
||||
new ModuleIDConfigurable(m)
|
||||
}
|
||||
|
||||
/** Creates a new configuration with the given name.*/
|
||||
def config(name: String) = new Configuration(name)
|
||||
}
|
||||
/** This class groups required configuration for the deliver and publish tasks. */
|
||||
trait PublishConfiguration extends NotNull
|
||||
{
|
||||
/** The name of the resolver to which publishing should be done.*/
|
||||
def resolverName: String
|
||||
/** The Ivy pattern used to determine the delivered Ivy file location. An example is
|
||||
* (outputPath / "[artifact]-[revision].[ext]").relativePath. */
|
||||
def deliveredPattern: String
|
||||
/** Ivy patterns used to find artifacts for publishing. An example pattern is
|
||||
* (outputPath / "[artifact]-[revision].[ext]").relativePath */
|
||||
def srcArtifactPatterns: Iterable[String]
|
||||
/** Additional dependencies to include for delivering/publishing only. These are typically dependencies on
|
||||
* subprojects. */
|
||||
def extraDependencies: Iterable[ModuleID]
|
||||
/** The status to use when delivering or publishing. This might be "release" or "integration" or another valid Ivy status. */
|
||||
def status: String
|
||||
/** The configurations to include in the publish/deliver action: specify none for all configurations. */
|
||||
def configurations: Option[Iterable[Configuration]]
|
||||
/** True if the Ivy file should be published. */
|
||||
def publishIvy: Boolean
|
||||
}
|
||||
object ManagedStyle extends Enumeration
|
||||
{
|
||||
val Maven, Ivy, Auto = Value
|
||||
}
|
||||
import ManagedStyle.{Auto, Ivy, Maven, Value => ManagedType}
|
||||
trait BasicManagedProject extends ManagedProject with ReflectiveManagedProject with BasicDependencyPaths
|
||||
{
|
||||
def ivyUpdateConfiguration = new UpdateConfiguration(managedDependencyPath.asFile, outputPattern, true/*sync*/, true/*quiet*/)
|
||||
|
||||
def ivyRepositories: Seq[Resolver] =
|
||||
{
|
||||
val repos = repositories.toSeq
|
||||
if(repos.isEmpty) Nil else Resolver.withDefaultResolvers(repos)
|
||||
}
|
||||
def otherRepositories: Seq[Resolver] = defaultPublishRepository.toList
|
||||
def ivyValidate = true
|
||||
def ivyScala: Option[IvyScala] = Some(new IvyScala(buildScalaVersion, checkScalaInConfigurations, checkExplicitScalaDependencies, filterScalaJars))
|
||||
def ivyCacheDirectory: Option[Path] = None
|
||||
|
||||
def ivyPaths: IvyPaths = new IvyPaths(info.projectPath.asFile, ivyCacheDirectory.map(_.asFile))
|
||||
def inlineIvyConfiguration = new InlineIvyConfiguration(ivyPaths, ivyRepositories.toSeq, otherRepositories, moduleConfigurations.toSeq, Some(info.launcher.globalLock), log)
|
||||
def ivyConfiguration: IvyConfiguration =
|
||||
{
|
||||
val in = inlineIvyConfiguration
|
||||
def adapt(c: IvyConfiguration): IvyConfiguration = c.withBase(in.baseDirectory)
|
||||
def parentIvyConfiguration(default: IvyConfiguration)(p: Project) = p match { case b: BasicManagedProject => adapt(b.ivyConfiguration); case _ => default }
|
||||
if(in.resolvers.isEmpty)
|
||||
{
|
||||
if(in.moduleConfigurations.isEmpty && in.otherResolvers.isEmpty)
|
||||
{
|
||||
IvyConfiguration(in.paths, in.lock, in.log) match
|
||||
{
|
||||
case e: ExternalIvyConfiguration => e
|
||||
case i => info.parent map(parentIvyConfiguration(i)) getOrElse(i)
|
||||
}
|
||||
}
|
||||
else
|
||||
new InlineIvyConfiguration(in.paths, Resolver.withDefaultResolvers(Nil), in.otherResolvers, in.moduleConfigurations, in.lock, in.log)
|
||||
}
|
||||
else
|
||||
in
|
||||
}
|
||||
|
||||
def moduleSettings: ModuleSettings = defaultModuleSettings
|
||||
def byIvyFile(path: Path): IvyFileConfiguration = new IvyFileConfiguration(path.asFile, ivyScala, ivyValidate)
|
||||
def byPom(path: Path): PomConfiguration = new PomConfiguration(path.asFile, ivyScala, ivyValidate)
|
||||
/** The settings that represent inline declarations. The default settings combines the information
|
||||
* from 'ivyXML', 'projectID', 'repositories', ivyConfigurations, defaultConfiguration,
|
||||
* ivyScala, and 'libraryDependencies' and does not typically need to be be overridden. */
|
||||
def inlineSettings = new InlineConfiguration(projectID, withCompat, ivyXML, ivyConfigurations, defaultConfiguration, ivyScala, ivyValidate)
|
||||
/** Library dependencies with extra dependencies for compatibility*/
|
||||
private def withCompat =
|
||||
{
|
||||
val deps = libraryDependencies
|
||||
deps ++ compatExtra(deps)
|
||||
}
|
||||
/** Determines extra libraries needed for compatibility. Currently, this is the compatibility test framework. */
|
||||
private def compatExtra(deps: Set[ModuleID]) =
|
||||
if(isScala27 && deps.exists(requiresCompat)) { log.debug("Using compatibility implementation of test interface."); compatTestFramework } else Nil
|
||||
/** True if the given dependency requires the compatibility test framework. */
|
||||
private def requiresCompat(m: ModuleID) =
|
||||
{
|
||||
def nameMatches(name: String, id: String) = name == id || name.startsWith(id + "_2.7.")
|
||||
|
||||
(nameMatches(m.name, "scalacheck") && Set("1.5", "1.6").contains(m.revision)) ||
|
||||
(nameMatches(m.name, "specs") && Set("1.6.0", "1.6.1").contains(m.revision)) ||
|
||||
(nameMatches(m.name, "scalatest") && m.revision == "1.0")
|
||||
}
|
||||
/** Extra dependencies to add if a dependency on an older test framework (one released before the uniform test interface) is declared.
|
||||
* This is the compatibility test framework by default.*/
|
||||
def compatTestFramework = Set("org.scala-tools.sbt" %% "test-compat" % "0.4.1" % "test")
|
||||
|
||||
def defaultModuleSettings: ModuleSettings =
|
||||
{
|
||||
val in = inlineSettings
|
||||
if(in.configurations.isEmpty)
|
||||
{
|
||||
if(in.dependencies.isEmpty && in.ivyXML.isEmpty && (in.module.explicitArtifacts.size <= 1) && in.configurations.isEmpty)
|
||||
externalSettings
|
||||
else if(useDefaultConfigurations)
|
||||
in withConfigurations ( Configurations.defaultMavenConfigurations )
|
||||
else
|
||||
in
|
||||
}
|
||||
else
|
||||
in
|
||||
}
|
||||
def externalSettings = ModuleSettings(ivyScala, ivyValidate, projectID)(info.projectPath.asFile, log)
|
||||
|
||||
def ivySbt: IvySbt = new IvySbt(ivyConfiguration)
|
||||
def ivyModule: IvySbt#Module = newIvyModule(moduleSettings)
|
||||
def newIvyModule(moduleSettings: ModuleSettings): IvySbt#Module =
|
||||
{
|
||||
val i = ivySbt
|
||||
new i.Module(moduleSettings)
|
||||
}
|
||||
|
||||
|
||||
/** The pattern for Ivy to use when retrieving dependencies into the local project. Classpath management
|
||||
* depends on the first directory being [conf] and the extension being [ext].*/
|
||||
def outputPattern = "[conf]/[artifact](-[revision])(-[classifier]).[ext]"
|
||||
/** Override this to specify the publications, configurations, and/or dependencies sections of an Ivy file.
|
||||
* See http://code.google.com/p/simple-build-tool/wiki/LibraryManagement for details.*/
|
||||
def ivyXML: NodeSeq = NodeSeq.Empty
|
||||
def pomExtra: NodeSeq = NodeSeq.Empty
|
||||
def expandedIvyConfigurations =
|
||||
{
|
||||
val confs = ivyConfigurations
|
||||
if(confs.isEmpty) Configurations.defaultMavenConfigurations else confs
|
||||
}
|
||||
override def ivyConfigurations: Iterable[Configuration] =
|
||||
{
|
||||
val reflective = super.ivyConfigurations
|
||||
val extra = extraDefaultConfigurations
|
||||
if(useDefaultConfigurations)
|
||||
{
|
||||
if(reflective.isEmpty && extra.isEmpty)
|
||||
Nil
|
||||
else
|
||||
Configurations.removeDuplicates(Configurations.defaultMavenConfigurations ++ reflective ++ extra)
|
||||
}
|
||||
else
|
||||
reflective ++ extra
|
||||
}
|
||||
def extraDefaultConfigurations: List[Configuration] = Nil
|
||||
def useIntegrationTestConfiguration = false
|
||||
def defaultConfiguration: Option[Configuration] = Some(Configurations.DefaultConfiguration(useDefaultConfigurations))
|
||||
def useMavenConfigurations = true // TODO: deprecate after going through a minor version series to verify that this works ok
|
||||
def useDefaultConfigurations = useMavenConfigurations
|
||||
def managedStyle: ManagedType =
|
||||
info.parent match
|
||||
{
|
||||
case Some(m: BasicManagedProject) => m.managedStyle
|
||||
case _ => Auto
|
||||
}
|
||||
protected implicit final val defaultPatterns: Patterns =
|
||||
{
|
||||
managedStyle match
|
||||
{
|
||||
case Maven => Resolver.mavenStylePatterns
|
||||
case Ivy => Resolver.ivyStylePatterns
|
||||
case Auto => Resolver.defaultPatterns
|
||||
}
|
||||
}
|
||||
|
||||
def updateModuleSettings = moduleSettings
|
||||
def updateIvyModule = newIvyModule(updateModuleSettings)
|
||||
def deliverModuleSettings = moduleSettings.noScala
|
||||
def deliverIvyModule = newIvyModule(deliverModuleSettings)
|
||||
def publishModuleSettings = deliverModuleSettings
|
||||
def publishIvyModule = newIvyModule(publishModuleSettings)
|
||||
/** True if the 'provided' configuration should be included on the 'compile' classpath. The default value is true.*/
|
||||
def includeProvidedWithCompile = true
|
||||
/** True if the default implicit extensions should be used when determining classpaths. The default value is true. */
|
||||
def defaultConfigurationExtensions = true
|
||||
/** If true, verify that explicit dependencies on Scala libraries use the same version as scala.version. */
|
||||
def checkExplicitScalaDependencies = true
|
||||
/** If true, filter dependencies on scala-library and scala-compiler. This is true by default to avoid conflicts with
|
||||
* the jars provided by sbt. You can set this to false to download these jars. Overriding checkScalaInConfigurations might
|
||||
* be more appropriate, however.*/
|
||||
def filterScalaJars = true
|
||||
/** The configurations to check/filter.*/
|
||||
def checkScalaInConfigurations: Iterable[Configuration] =
|
||||
{
|
||||
val all = ivyConfigurations
|
||||
if(all.isEmpty)
|
||||
Configurations.defaultMavenConfigurations
|
||||
else
|
||||
all
|
||||
}
|
||||
def defaultPublishRepository: Option[Resolver] =
|
||||
{
|
||||
reflectiveRepositories.get(PublishToName) orElse
|
||||
info.parent.flatMap
|
||||
{
|
||||
case managed: BasicManagedProject => managed.defaultPublishRepository
|
||||
case _ => None
|
||||
}
|
||||
}
|
||||
/** Includes the Provided configuration on the Compile classpath, the Compile configuration on the Runtime classpath,
|
||||
* and Compile and Runtime on the Test classpath. Including Provided can be disabled by setting
|
||||
* includeProvidedWithCompile to false. Including Compile and Runtime can be disabled by setting
|
||||
* defaultConfigurationExtensions to false.*/
|
||||
override def managedClasspath(config: Configuration) =
|
||||
{
|
||||
import Configurations.{Compile, CompilerPlugin, Default, Provided, Runtime, Test}
|
||||
val baseClasspath = configurationClasspath(config)
|
||||
config match
|
||||
{
|
||||
case Compile =>
|
||||
val baseCompileClasspath = baseClasspath +++ managedClasspath(Default)
|
||||
if(includeProvidedWithCompile)
|
||||
baseCompileClasspath +++ managedClasspath(Provided)
|
||||
else
|
||||
baseCompileClasspath
|
||||
case Runtime if defaultConfigurationExtensions => baseClasspath +++ managedClasspath(Compile)
|
||||
case Test if defaultConfigurationExtensions => baseClasspath +++ managedClasspath(Runtime)
|
||||
case _ => baseClasspath
|
||||
}
|
||||
}
|
||||
|
||||
protected def updateAction = updateTask(updateIvyModule, ivyUpdateConfiguration) describedAs UpdateDescription
|
||||
protected def cleanLibAction = cleanLibTask(managedDependencyPath) describedAs CleanLibDescription
|
||||
protected def cleanCacheAction = cleanCacheTask(ivySbt) describedAs CleanCacheDescription
|
||||
|
||||
protected def deliverProjectDependencies: Iterable[ModuleID] =
|
||||
{
|
||||
val interDependencies = new scala.collection.mutable.ListBuffer[ModuleID]
|
||||
dependencies.foreach(dep => dep match { case mp: ManagedProject => interDependencies += mp.projectID; case _ => () })
|
||||
if(filterScalaJars)
|
||||
interDependencies ++= deliverScalaDependencies
|
||||
interDependencies.readOnly
|
||||
}
|
||||
protected def deliverScalaDependencies: Iterable[ModuleID] = Nil
|
||||
protected def makePomAction = makePomTask(deliverIvyModule, pomPath, deliverProjectDependencies, pomExtra, None)
|
||||
protected def deliverLocalAction = deliverTask(deliverIvyModule, publishLocalConfiguration, true /*quiet*/)
|
||||
protected def publishLocalAction =
|
||||
{
|
||||
val dependencies = deliverLocal :: publishPomDepends
|
||||
publishTask(publishIvyModule, publishLocalConfiguration) dependsOn(dependencies : _*)
|
||||
}
|
||||
protected def publishLocalConfiguration = new DefaultPublishConfiguration("local", "release", true)
|
||||
protected def deliverAction = deliverTask(deliverIvyModule, publishConfiguration, true)
|
||||
protected def publishAction =
|
||||
{
|
||||
val dependencies = deliver :: publishPomDepends
|
||||
publishTask(publishIvyModule, publishConfiguration) dependsOn(dependencies : _*)
|
||||
}
|
||||
private def publishPomDepends = if(managedStyle == Maven) makePom :: Nil else Nil
|
||||
protected def publishConfiguration =
|
||||
{
|
||||
val repository = defaultPublishRepository.getOrElse(error("Repository to publish to not specified."))
|
||||
val publishIvy = managedStyle != Maven
|
||||
new DefaultPublishConfiguration(repository, "release", publishIvy)
|
||||
}
|
||||
protected class DefaultPublishConfiguration(val resolverName: String, val status: String, val publishIvy: Boolean) extends PublishConfiguration
|
||||
{
|
||||
def this(resolver: Resolver, status: String, publishIvy: Boolean) = this(resolver.name, status, publishIvy)
|
||||
def this(resolverName: String, status: String) = this(resolverName, status, true)
|
||||
def this(resolver: Resolver, status: String) = this(resolver.name, status)
|
||||
|
||||
protected def deliveredPathPattern = outputPath / "[artifact]-[revision](-[classifier]).[ext]"
|
||||
def deliveredPattern = deliveredPathPattern.relativePath
|
||||
def srcArtifactPatterns: Iterable[String] =
|
||||
{
|
||||
val pathPatterns =
|
||||
(outputPath / "[artifact]-[revision]-[type](-[classifier]).[ext]") ::
|
||||
(outputPath / "[artifact]-[revision](-[classifier]).[ext]") ::
|
||||
Nil
|
||||
pathPatterns.map(_.relativePath)
|
||||
}
|
||||
def extraDependencies: Iterable[ModuleID] = deliverProjectDependencies
|
||||
/** The configurations to include in the publish/deliver action: specify none for all public configurations. */
|
||||
def configurations: Option[Iterable[Configuration]] = None
|
||||
}
|
||||
|
||||
def packageToPublishActions: Seq[ManagedTask] = Nil
|
||||
|
||||
private[this] def depMap[T](f: BasicManagedProject => T) =
|
||||
topologicalSort.dropRight(1).flatMap { case m: BasicManagedProject => f(m) :: Nil; case _ => Nil }
|
||||
|
||||
lazy val update = updateAction
|
||||
lazy val makePom = makePomAction dependsOn(packageToPublishActions : _*)
|
||||
lazy val cleanLib = cleanLibAction
|
||||
lazy val cleanCache = cleanCacheAction
|
||||
// deliver must run after its dependencies' `publish` so that the artifacts produced by the dependencies can be resolved
|
||||
// (deliver requires a resolve first)
|
||||
lazy val deliverLocal: Task = deliverLocalAction dependsOn((depMap(_.publishLocal) ++ packageToPublishActions) : _*)
|
||||
lazy val publishLocal: Task = publishLocalAction
|
||||
lazy val deliver: Task = deliverAction dependsOn((depMap(_.publish) ++ packageToPublishActions) : _*)
|
||||
lazy val publish: Task = publishAction
|
||||
}
|
||||
|
||||
object BasicManagedProject
|
||||
{
|
||||
val UpdateDescription =
|
||||
"Resolves and retrieves automatically managed dependencies."
|
||||
val CleanLibDescription =
|
||||
"Deletes the managed library directory."
|
||||
val CleanCacheDescription =
|
||||
"Deletes the cache of artifacts downloaded for automatically managed dependencies."
|
||||
|
||||
val PublishToName = "publish-to"
|
||||
val RetrieveFromName = "retrieve-from"
|
||||
}
|
||||
|
||||
class DefaultInstallProject(val info: ProjectInfo) extends InstallProject with MavenStyleScalaPaths with BasicDependencyProject
|
||||
{
|
||||
def fullUnmanagedClasspath(config: Configuration) = unmanagedClasspath
|
||||
def dependencies = info.dependencies
|
||||
}
|
||||
trait InstallProject extends BasicManagedProject
|
||||
{
|
||||
def installModuleSettings: ModuleSettings = moduleSettings.noScala
|
||||
def installIvyModule: IvySbt#Module = newIvyModule(installModuleSettings)
|
||||
|
||||
lazy val install = installTask(installIvyModule, fromResolver, toResolver)
|
||||
def toResolver = reflectiveRepositories.get(PublishToName).getOrElse(error("No repository to publish to was specified"))
|
||||
def fromResolver = reflectiveRepositories.get(RetrieveFromName).getOrElse(error("No repository to retrieve from was specified"))
|
||||
}
|
||||
|
||||
trait BasicDependencyPaths extends ManagedProject
|
||||
{
|
||||
import BasicDependencyPaths._
|
||||
def dependencyDirectoryName = DefaultDependencyDirectoryName
|
||||
def managedDirectoryName = DefaultManagedDirectoryName
|
||||
def pomName = artifactBaseName + PomExtension
|
||||
def dependencyPath = path(dependencyDirectoryName)
|
||||
def managedDependencyPath = crossPath(managedDependencyRootPath)
|
||||
def managedDependencyRootPath: Path = managedDirectoryName
|
||||
def pomPath = outputPath / pomName
|
||||
}
|
||||
object BasicDependencyPaths
|
||||
{
|
||||
val DefaultManagedDirectoryName = "lib_managed"
|
||||
val DefaultManagedSourceDirectoryName = "src_managed"
|
||||
val DefaultDependencyDirectoryName = "lib"
|
||||
val PomExtension = ".pom"
|
||||
}
|
||||
|
||||
object StringUtilities
|
||||
{
|
||||
def normalize(s: String) = s.toLowerCase.replaceAll("""\s+""", "-")
|
||||
def nonEmpty(s: String, label: String)
|
||||
{
|
||||
require(s.trim.length > 0, label + " cannot be empty.")
|
||||
}
|
||||
def appendable(s: String) = if(s.isEmpty) "" else "_" + s
|
||||
}
|
||||
final class GroupID private[sbt] (groupID: String, scalaVersion: String) extends NotNull
|
||||
{
|
||||
def % (artifactID: String) = groupArtifact(artifactID)
|
||||
def %% (artifactID: String) =
|
||||
{
|
||||
require(!scalaVersion.isEmpty, "Cannot use %% when the sbt launcher is not used.")
|
||||
groupArtifact(artifactID + appendable(scalaVersion))
|
||||
}
|
||||
private def groupArtifact(artifactID: String) =
|
||||
{
|
||||
nonEmpty(artifactID, "Artifact ID")
|
||||
new GroupArtifactID(groupID, artifactID)
|
||||
}
|
||||
}
|
||||
final class GroupArtifactID private[sbt] (groupID: String, artifactID: String) extends NotNull
|
||||
{
|
||||
def % (revision: String): ModuleID =
|
||||
{
|
||||
nonEmpty(revision, "Revision")
|
||||
ModuleID(groupID, artifactID, revision, None)
|
||||
}
|
||||
}
|
||||
final class ModuleIDConfigurable private[sbt] (moduleID: ModuleID) extends NotNull
|
||||
{
|
||||
def % (configurations: String): ModuleID =
|
||||
{
|
||||
nonEmpty(configurations, "Configurations")
|
||||
import moduleID._
|
||||
ModuleID(organization, name, revision, Some(configurations))
|
||||
}
|
||||
}
|
||||
final class RepositoryName private[sbt] (name: String) extends NotNull
|
||||
{
|
||||
def at (location: String) =
|
||||
{
|
||||
nonEmpty(location, "Repository location")
|
||||
new MavenRepository(name, location)
|
||||
}
|
||||
}
|
||||
|
||||
import scala.collection.{Map, mutable}
|
||||
/** A Project that determines its tasks by reflectively finding all vals with a type
|
||||
* that conforms to Task.*/
|
||||
trait ReflectiveTasks extends Project
|
||||
{
|
||||
def tasks: Map[String, Task] = reflectiveTaskMappings
|
||||
def reflectiveTaskMappings : Map[String, Task] = Reflective.reflectiveMappings[Task](this)
|
||||
}
|
||||
/** A Project that determines its method tasks by reflectively finding all vals with a type
|
||||
* that conforms to MethodTask.*/
|
||||
trait ReflectiveMethods extends Project
|
||||
{
|
||||
def methods: Map[String, MethodTask] = reflectiveMethodMappings
|
||||
def reflectiveMethodMappings : Map[String, MethodTask] = Reflective.reflectiveMappings[MethodTask](this)
|
||||
}
|
||||
/** A Project that determines its dependencies on other projects by reflectively
|
||||
* finding all vals with a type that conforms to Project.*/
|
||||
trait ReflectiveModules extends Project
|
||||
{
|
||||
override def subProjects: Map[String, Project] = reflectiveModuleMappings
|
||||
def reflectiveModuleMappings : Map[String, Project] = Reflective.reflectiveMappings[Project](this)
|
||||
}
|
||||
/** A Project that determines its dependencies on other projects by reflectively
|
||||
* finding all vals with a type that conforms to Project and determines its tasks
|
||||
* by reflectively finding all vals with a type that conforms to Task.*/
|
||||
trait ReflectiveProject extends ReflectiveModules with ReflectiveTasks with ReflectiveMethods
|
||||
|
||||
/** This Project subclass is used to contain other projects as dependencies.*/
|
||||
class ParentProject(val info: ProjectInfo) extends BasicDependencyProject with Cleanable
|
||||
{
|
||||
def dependencies: Iterable[Project] = info.dependencies ++ subProjects.values.toList
|
||||
/** The directories to which a project writes are listed here and is used
|
||||
* to check a project and its dependencies for collisions.*/
|
||||
override def outputDirectories = managedDependencyPath :: outputPath :: Nil
|
||||
def fullUnmanagedClasspath(config: Configuration) = unmanagedClasspath
|
||||
}
|
||||
|
||||
object Reflective
|
||||
{
|
||||
def reflectiveMappings[T](obj: AnyRef)(implicit m: scala.reflect.Manifest[T]): Map[String, T] =
|
||||
{
|
||||
val mappings = new mutable.HashMap[String, T]
|
||||
for ((name, value) <- ReflectUtilities.allVals[T](obj))
|
||||
mappings(ReflectUtilities.transformCamelCase(name, '-')) = value
|
||||
mappings
|
||||
}
|
||||
}
|
||||
|
||||
/** A Project that determines its library dependencies by reflectively finding all vals with a type
|
||||
* that conforms to ModuleID.*/
|
||||
trait ReflectiveLibraryDependencies extends ManagedProject
|
||||
{
|
||||
def excludeIDs: Iterable[ModuleID] = projectID :: Nil
|
||||
/** Defines the library dependencies of this project. By default, this finds vals of type ModuleID defined on the project.
|
||||
* This can be overridden to directly provide dependencies */
|
||||
def libraryDependencies: Set[ModuleID] = reflectiveLibraryDependencies
|
||||
def reflectiveLibraryDependencies : Set[ModuleID] = Set[ModuleID](Reflective.reflectiveMappings[ModuleID](this).values.toList: _*) -- excludeIDs
|
||||
}
|
||||
|
||||
trait ReflectiveConfigurations extends Project
|
||||
{
|
||||
def ivyConfigurations: Iterable[Configuration] = reflectiveIvyConfigurations
|
||||
def reflectiveIvyConfigurations: Set[Configuration] = Configurations.removeDuplicates(Reflective.reflectiveMappings[Configuration](this).values.toList)
|
||||
}
|
||||
trait ReflectiveArtifacts extends ManagedProject
|
||||
{
|
||||
def managedStyle: ManagedType
|
||||
def artifacts: Set[Artifact] =
|
||||
{
|
||||
val reflective = reflectiveArtifacts
|
||||
managedStyle match
|
||||
{
|
||||
case Maven => reflective ++ List(Artifact(artifactID, "pom", "pom"))
|
||||
case Ivy => reflective
|
||||
case Auto => reflective
|
||||
}
|
||||
}
|
||||
def reflectiveArtifacts: Set[Artifact] = Set(Reflective.reflectiveMappings[Artifact](this).values.toList: _*)
|
||||
}
|
||||
/** A Project that determines its library dependencies by reflectively finding all vals with a type
|
||||
* that conforms to ModuleID.*/
|
||||
trait ReflectiveRepositories extends Project
|
||||
{
|
||||
def repositories: Set[Resolver] =
|
||||
{
|
||||
val reflective = Set[Resolver]() ++ reflectiveRepositories.toList.flatMap { case (PublishToName, _) => Nil; case (_, value) => List(value) }
|
||||
info.parent match
|
||||
{
|
||||
case Some(p: ReflectiveRepositories) => p.repositories ++ reflective
|
||||
case None => reflective
|
||||
}
|
||||
}
|
||||
def reflectiveRepositories: Map[String, Resolver] = Reflective.reflectiveMappings[Resolver](this)
|
||||
|
||||
def moduleConfigurations: Set[ModuleConfiguration] =
|
||||
{
|
||||
val reflective = Set[ModuleConfiguration](reflectiveModuleConfigurations.values.toList: _*)
|
||||
info.parent match
|
||||
{
|
||||
case Some(p: ReflectiveRepositories) => p.moduleConfigurations ++ reflective
|
||||
case None => reflective
|
||||
}
|
||||
}
|
||||
def reflectiveModuleConfigurations: Map[String, ModuleConfiguration] = Reflective.reflectiveMappings[ModuleConfiguration](this)
|
||||
}
|
||||
|
||||
trait ReflectiveManagedProject extends ReflectiveProject with ReflectiveArtifacts with ReflectiveRepositories with ReflectiveLibraryDependencies with ReflectiveConfigurations
|
||||
|
|
@ -0,0 +1,234 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2008, 2009 Mark Harrah, David MacIver
|
||||
*/
|
||||
package sbt
|
||||
|
||||
import BasicProjectPaths._
|
||||
|
||||
sealed abstract class InternalProject extends Project
|
||||
{
|
||||
override def defaultLoggingLevel = Level.Warn
|
||||
override final def historyPath = None
|
||||
override def tasks: Map[String, Task] = Map.empty
|
||||
override final protected def disableCrossPaths = false
|
||||
override final def shouldCheckOutputDirectories = false
|
||||
}
|
||||
private sealed abstract class BasicBuilderProject extends InternalProject
|
||||
{
|
||||
def sourceFilter = "*.scala" | "*.java"
|
||||
def jarFilter: NameFilter = "*.jar"
|
||||
def compilePath = outputPath / DefaultMainCompileDirectoryName
|
||||
def mainResourcesPath = path(DefaultResourcesDirectoryName)
|
||||
def dependencyPath = path(DefaultDependencyDirectoryName)
|
||||
def libraries = descendents(dependencyPath, jarFilter)
|
||||
override final def dependencies = Nil
|
||||
|
||||
protected final def logInfo(messages: String*): Unit = atInfo { messages.foreach(message => log.info(message)) }
|
||||
protected final def atInfo(action: => Unit)
|
||||
{
|
||||
val oldLevel = log.getLevel
|
||||
log.setLevel(Level.Info)
|
||||
action
|
||||
log.setLevel(oldLevel)
|
||||
}
|
||||
|
||||
def projectClasspath = compilePath +++ libraries +++ sbtJars
|
||||
def sbtJars = info.sbtClasspath
|
||||
|
||||
abstract class BuilderCompileConfiguration extends AbstractCompileConfiguration
|
||||
{
|
||||
def projectPath = info.projectPath
|
||||
def log = BasicBuilderProject.this.log
|
||||
def options = CompileOptions.Deprecation :: CompileOptions.Unchecked :: Nil
|
||||
def javaOptions = Nil
|
||||
def maxErrors = ScalaProject.DefaultMaximumCompileErrors
|
||||
def compileOrder = CompileOrder.Mixed
|
||||
}
|
||||
def definitionCompileConfiguration =
|
||||
new BuilderCompileConfiguration
|
||||
{
|
||||
def label = "builder"
|
||||
def sourceRoots = info.projectPath +++ path(DefaultSourceDirectoryName)
|
||||
def sources = (info.projectPath * sourceFilter) +++ path(DefaultSourceDirectoryName).descendentsExcept(sourceFilter, defaultExcludes)
|
||||
def outputDirectory = compilePath
|
||||
def classpath = projectClasspath
|
||||
def analysisPath = outputPath / DefaultMainAnalysisDirectoryName
|
||||
}
|
||||
|
||||
def tpe: String
|
||||
|
||||
import xsbt.ScalaInstance
|
||||
|
||||
lazy val definitionCompileConditional = new BuilderCompileConditional(definitionCompileConfiguration, buildCompiler, tpe)
|
||||
final class BuilderCompileConditional(config: BuilderCompileConfiguration, compiler: xsbt.AnalyzingCompiler, tpe: String) extends AbstractCompileConditional(config, compiler)
|
||||
{
|
||||
type AnalysisType = BuilderCompileAnalysis
|
||||
override protected def constructAnalysis(analysisPath: Path, projectPath: Path, log: Logger) =
|
||||
new BuilderCompileAnalysis(analysisPath, projectPath, log)
|
||||
override protected def execute(cAnalysis: ConditionalAnalysis): Option[String] =
|
||||
{
|
||||
if(cAnalysis.dirtySources.isEmpty)
|
||||
None
|
||||
else
|
||||
{
|
||||
definitionChanged()
|
||||
logInfo(
|
||||
"Recompiling " + tpe + "...",
|
||||
"\t" + cAnalysis.toString)
|
||||
super.execute(cAnalysis)
|
||||
}
|
||||
}
|
||||
protected def analysisCallback: AnalysisCallback =
|
||||
new BasicAnalysisCallback(info.projectPath, analysis)
|
||||
{
|
||||
def superclassNames = List(Project.ProjectClassName)
|
||||
def foundApplication(sourcePath: Path, className: String) {}
|
||||
def foundSubclass(sourcePath: Path, subclassName: String, superclassName: String, isModule: Boolean)
|
||||
{
|
||||
if(superclassName == Project.ProjectClassName && !isModule)
|
||||
{
|
||||
log.debug("Found " + tpe + " " + subclassName)
|
||||
analysis.addProjectDefinition(sourcePath, subclassName)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
protected def definitionChanged() {}
|
||||
lazy val compile = compileTask
|
||||
def compileTask = task { definitionCompileConditional.run }
|
||||
|
||||
def projectDefinition: Either[String, Option[String]] =
|
||||
{
|
||||
definitionCompileConditional.analysis.allProjects.toList match
|
||||
{
|
||||
case Nil =>
|
||||
log.debug("No " + tpe + "s detected using default project.")
|
||||
Right(None)
|
||||
case singleDefinition :: Nil => Right(Some(singleDefinition))
|
||||
case multipleDefinitions =>Left(multipleDefinitions.mkString("Multiple " + tpe + "s detected: \n\t","\n\t","\n"))
|
||||
}
|
||||
}
|
||||
override final def methods = Map.empty
|
||||
}
|
||||
/** The project definition used to build project definitions. */
|
||||
private final class BuilderProject(val info: ProjectInfo, val pluginPath: Path, rawLogger: Logger) extends BasicBuilderProject
|
||||
{
|
||||
lazy val pluginProject =
|
||||
{
|
||||
if(pluginPath.exists)
|
||||
Some(new PluginBuilderProject(ProjectInfo(pluginPath.asFile, Nil, None)(rawLogger, info.app, info.buildScalaVersion)))
|
||||
else
|
||||
None
|
||||
}
|
||||
override def projectClasspath = super.projectClasspath +++
|
||||
pluginProject.map(_.pluginClasspath).getOrElse(Path.emptyPathFinder)
|
||||
def tpe = "project definition"
|
||||
|
||||
override def compileTask = super.compileTask dependsOn(pluginProject.map(_.syncPlugins).toList : _*)
|
||||
|
||||
final class PluginBuilderProject(val info: ProjectInfo) extends BasicBuilderProject
|
||||
{
|
||||
lazy val pluginUptodate = propertyOptional[Boolean](false)
|
||||
def tpe = "plugin definition"
|
||||
def managedSourcePath = path(BasicDependencyPaths.DefaultManagedSourceDirectoryName)
|
||||
def managedDependencyPath = crossPath(BasicDependencyPaths.DefaultManagedDirectoryName)
|
||||
override protected def definitionChanged() { setUptodate(false) }
|
||||
private def setUptodate(flag: Boolean)
|
||||
{
|
||||
pluginUptodate() = flag
|
||||
saveEnvironment()
|
||||
}
|
||||
|
||||
private def pluginTask(f: => Option[String]) = task { if(!pluginUptodate.value) f else None }
|
||||
|
||||
lazy val syncPlugins = pluginTask(sync()) dependsOn(extractSources)
|
||||
lazy val extractSources = pluginTask(extract()) dependsOn(update)
|
||||
lazy val update = pluginTask(loadAndUpdate()) dependsOn(compile)
|
||||
|
||||
private def sync() = pluginCompileConditional.run orElse { setUptodate(true); None }
|
||||
private def extract() =
|
||||
{
|
||||
FileUtilities.clean(managedSourcePath, log) orElse
|
||||
Control.lazyFold(plugins.get.toList) { jar =>
|
||||
Control.thread(FileUtilities.unzip(jar, extractTo(jar), sourceFilter, log)) { extracted =>
|
||||
if(!extracted.isEmpty)
|
||||
logInfo("\tExtracted source plugin " + jar + " ...")
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
private def loadAndUpdate() =
|
||||
{
|
||||
Control.thread(projectDefinition) {
|
||||
case Some(definition) =>
|
||||
logInfo("\nUpdating plugins...")
|
||||
val pluginInfo = ProjectInfo(info.projectPath.asFile, Nil, None)(rawLogger, info.app, info.buildScalaVersion)
|
||||
val pluginBuilder = Project.constructProject(pluginInfo, Project.getProjectClass[PluginDefinition](definition, projectClasspath, getClass.getClassLoader))
|
||||
pluginBuilder.projectName() = "Plugin builder"
|
||||
pluginBuilder.projectVersion() = OpaqueVersion("1.0")
|
||||
val result = pluginBuilder.update.run
|
||||
if(result.isEmpty)
|
||||
{
|
||||
atInfo {
|
||||
log.success("Plugins updated successfully.")
|
||||
log.info("")
|
||||
}
|
||||
}
|
||||
result
|
||||
case None => None
|
||||
}
|
||||
}
|
||||
def extractTo(jar: Path) =
|
||||
{
|
||||
val name = jar.asFile.getName
|
||||
managedSourcePath / name.substring(0, name.length - ".jar".length)
|
||||
}
|
||||
def plugins = descendents(managedDependencyPath, jarFilter)
|
||||
def pluginClasspath: PathFinder = plugins +++ pluginCompileConfiguration.outputDirectory
|
||||
|
||||
lazy val pluginCompileConditional = new BuilderCompileConditional(pluginCompileConfiguration, buildCompiler, "plugin")
|
||||
lazy val pluginCompileConfiguration =
|
||||
new BuilderCompileConfiguration
|
||||
{
|
||||
def label = "plugin builder"
|
||||
def sourceRoots = managedSourcePath
|
||||
def sources = descendents(sourceRoots, sourceFilter)
|
||||
def outputDirectory = outputPath / "plugin-classes"
|
||||
def classpath: PathFinder = pluginClasspath +++ sbtJars
|
||||
def analysisPath = outputPath / "plugin-analysis"
|
||||
}
|
||||
}
|
||||
}
|
||||
class PluginDefinition(val info: ProjectInfo) extends InternalProject with BasicManagedProject
|
||||
{
|
||||
override def defaultLoggingLevel = Level.Info
|
||||
override final def outputPattern = "[artifact](-[revision]).[ext]"
|
||||
override final val tasks = Map("update" -> update)
|
||||
override def projectClasspath(config: Configuration) = Path.emptyPathFinder
|
||||
override def dependencies = info.dependencies
|
||||
}
|
||||
class PluginProject(info: ProjectInfo) extends DefaultProject(info)
|
||||
{
|
||||
/* Since plugins are distributed as source, there is no need to append _<scala.version> */
|
||||
override def moduleID = normalizedName
|
||||
/* Fix the version used to build to the version currently running sbt. */
|
||||
override def buildScalaVersion = defScalaVersion.value
|
||||
/* Add sbt to the classpath */
|
||||
override def unmanagedClasspath = super.unmanagedClasspath +++ info.sbtClasspath
|
||||
/* Package the plugin as source. */
|
||||
override def packageAction = packageSrc dependsOn(test)
|
||||
override def packageSrcJar = jarPath
|
||||
/* Some setup to make publishing quicker to configure. */
|
||||
override def useMavenConfigurations = true
|
||||
override def managedStyle = ManagedStyle.Maven
|
||||
}
|
||||
class ProcessorProject(info: ProjectInfo) extends DefaultProject(info)
|
||||
{
|
||||
/* Fix the version used to build to the version currently running sbt. */
|
||||
override def buildScalaVersion = defScalaVersion.value
|
||||
/* Add sbt to the classpath */
|
||||
override def unmanagedClasspath = super.unmanagedClasspath +++ info.sbtClasspath
|
||||
/* Some setup to make publishing quicker to configure. */
|
||||
override def useMavenConfigurations = true
|
||||
override def managedStyle = ManagedStyle.Maven
|
||||
}
|
||||
|
|
@ -0,0 +1,213 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2008, 2009 Mark Harrah
|
||||
*/
|
||||
package sbt
|
||||
|
||||
import java.io.File
|
||||
import java.net.{URI, URL, URLClassLoader}
|
||||
import java.util.Collections
|
||||
import scala.collection.Set
|
||||
import scala.collection.mutable.{HashSet, ListBuffer}
|
||||
|
||||
object ClasspathUtilities
|
||||
{
|
||||
def toClasspath(finder: PathFinder): Array[URL] = toClasspath(finder.get)
|
||||
def toClasspath(paths: Iterable[Path]): Array[URL] = paths.map(_.asURL).toSeq.toArray
|
||||
def toLoader(finder: PathFinder): ClassLoader = toLoader(finder.get)
|
||||
def toLoader(finder: PathFinder, parent: ClassLoader): ClassLoader = toLoader(finder.get, parent)
|
||||
def toLoader(paths: Iterable[Path]): ClassLoader = new URLClassLoader(toClasspath(paths), rootLoader)
|
||||
def toLoader(paths: Iterable[Path], parent: ClassLoader): ClassLoader = new URLClassLoader(toClasspath(paths), parent)
|
||||
|
||||
lazy val rootLoader =
|
||||
{
|
||||
def parent(loader: ClassLoader): ClassLoader =
|
||||
{
|
||||
val p = loader.getParent
|
||||
if(p eq null) loader else parent(p)
|
||||
}
|
||||
parent(getClass.getClassLoader)
|
||||
}
|
||||
|
||||
private[sbt] def printSource(c: Class[_]) =
|
||||
println(c.getName + " loader=" +c.getClassLoader + " location=" + FileUtilities.classLocationFile(c))
|
||||
|
||||
def isArchive(path: Path): Boolean = isArchive(path.asFile)
|
||||
def isArchive(file: File): Boolean = isArchiveName(file.getName)
|
||||
def isArchiveName(fileName: String) = fileName.endsWith(".jar") || fileName.endsWith(".zip")
|
||||
// Partitions the given classpath into (jars, directories)
|
||||
private[sbt] def separate(paths: Iterable[File]): (Iterable[File], Iterable[File]) = paths.partition(isArchive)
|
||||
// Partitions the given classpath into (jars, directories)
|
||||
private[sbt] def separatePaths(paths: Iterable[Path]) = separate(paths.map(_.asFile.getCanonicalFile))
|
||||
private[sbt] def buildSearchPaths(classpath: Iterable[Path]): (wrap.Set[File], wrap.Set[File]) =
|
||||
{
|
||||
val (jars, dirs) = separatePaths(classpath)
|
||||
(linkedSet(jars ++ extraJars.toList), linkedSet(dirs ++ extraDirs.toList))
|
||||
}
|
||||
private[sbt] def onClasspath(classpathJars: wrap.Set[File], classpathDirectories: wrap.Set[File], file: File): Boolean =
|
||||
{
|
||||
val f = file.getCanonicalFile
|
||||
if(ClasspathUtilities.isArchive(f))
|
||||
classpathJars.contains(f)
|
||||
else
|
||||
classpathDirectories.toList.find(Path.relativize(_, f).isDefined).isDefined
|
||||
}
|
||||
|
||||
/** Returns all entries in 'classpath' that correspond to a compiler plugin.*/
|
||||
private[sbt] def compilerPlugins(classpath: Iterable[Path]): Iterable[File] =
|
||||
{
|
||||
val loader = new URLClassLoader(classpath.map(_.asURL).toList.toArray)
|
||||
wrap.Wrappers.toList(loader.getResources("scalac-plugin.xml")).flatMap(asFile(true))
|
||||
}
|
||||
/** Converts the given URL to a File. If the URL is for an entry in a jar, the File for the jar is returned. */
|
||||
private[sbt] def asFile(url: URL): List[File] = asFile(false)(url)
|
||||
private[sbt] def asFile(jarOnly: Boolean)(url: URL): List[File] =
|
||||
{
|
||||
try
|
||||
{
|
||||
url.getProtocol match
|
||||
{
|
||||
case "file" if !jarOnly=> FileUtilities.toFile(url) :: Nil
|
||||
case "jar" =>
|
||||
val path = url.getPath
|
||||
val end = path.indexOf('!')
|
||||
new File(new URI(if(end == -1) path else path.substring(0, end))) :: Nil
|
||||
case _ => Nil
|
||||
}
|
||||
}
|
||||
catch { case e: Exception => Nil }
|
||||
}
|
||||
|
||||
private lazy val (extraJars, extraDirs) =
|
||||
{
|
||||
import scala.tools.nsc.GenericRunnerCommand
|
||||
val settings = (new GenericRunnerCommand(Nil, message => error(message))).settings
|
||||
val bootPaths = FileUtilities.pathSplit(settings.bootclasspath.value).map(p => new File(p)).toList
|
||||
val (bootJars, bootDirs) = separate(bootPaths)
|
||||
val extJars =
|
||||
{
|
||||
val buffer = new ListBuffer[File]
|
||||
def findJars(dir: File)
|
||||
{
|
||||
buffer ++= dir.listFiles(new SimpleFileFilter(isArchive))
|
||||
for(dir <- dir.listFiles(DirectoryFilter))
|
||||
findJars(dir)
|
||||
}
|
||||
for(path <- FileUtilities.pathSplit(settings.extdirs.value); val dir = new File(path) if dir.isDirectory)
|
||||
findJars(dir)
|
||||
buffer.readOnly.map(_.getCanonicalFile)
|
||||
}
|
||||
(linkedSet(extJars ++ bootJars), linkedSet(bootDirs))
|
||||
}
|
||||
private def linkedSet[T](s: Iterable[T]): wrap.Set[T] =
|
||||
{
|
||||
val set = new wrap.MutableSetWrapper(new java.util.LinkedHashSet[T])
|
||||
set ++= s
|
||||
set.readOnly
|
||||
}
|
||||
}
|
||||
|
||||
private abstract class LoaderBase(urls: Array[URL], parent: ClassLoader) extends URLClassLoader(urls, parent) with NotNull
|
||||
{
|
||||
require(parent != null) // included because a null parent is legitimate in Java
|
||||
@throws(classOf[ClassNotFoundException])
|
||||
override final def loadClass(className: String, resolve: Boolean): Class[_] =
|
||||
{
|
||||
val loaded = findLoadedClass(className)
|
||||
val found =
|
||||
if(loaded == null)
|
||||
doLoadClass(className)
|
||||
else
|
||||
loaded
|
||||
|
||||
if(resolve)
|
||||
resolveClass(found)
|
||||
found
|
||||
}
|
||||
protected def doLoadClass(className: String): Class[_]
|
||||
protected final def selfLoadClass(className: String): Class[_] = super.loadClass(className, false)
|
||||
}
|
||||
private class IntermediateLoader(urls: Array[URL], parent: ClassLoader) extends LoaderBase(urls, parent) with NotNull
|
||||
{
|
||||
def doLoadClass(className: String): Class[_] =
|
||||
{
|
||||
// if this loader is asked to load an sbt class, it must be because the project we are building is sbt itself,
|
||||
// so we want to load the version of classes on the project classpath, not the parent
|
||||
if(className.startsWith(Loaders.SbtPackage))
|
||||
findClass(className)
|
||||
else
|
||||
selfLoadClass(className)
|
||||
}
|
||||
}
|
||||
/** Delegates class loading to `parent` for all classes included by `filter`. An attempt to load classes excluded by `filter`
|
||||
* results in a `ClassNotFoundException`.*/
|
||||
private class FilteredLoader(parent: ClassLoader, filter: ClassFilter) extends ClassLoader(parent) with NotNull
|
||||
{
|
||||
require(parent != null) // included because a null parent is legitimate in Java
|
||||
def this(parent: ClassLoader, excludePackages: Iterable[String]) = this(parent, new ExcludePackagesFilter(excludePackages))
|
||||
|
||||
@throws(classOf[ClassNotFoundException])
|
||||
override final def loadClass(className: String, resolve: Boolean): Class[_] =
|
||||
{
|
||||
if(filter.include(className))
|
||||
super.loadClass(className, resolve)
|
||||
else
|
||||
throw new ClassNotFoundException(className)
|
||||
}
|
||||
}
|
||||
private class SelectiveLoader(urls: Array[URL], parent: ClassLoader, filter: ClassFilter) extends URLClassLoader(urls, parent) with NotNull
|
||||
{
|
||||
require(parent != null) // included because a null parent is legitimate in Java
|
||||
def this(urls: Array[URL], parent: ClassLoader, includePackages: Iterable[String]) = this(urls, parent, new IncludePackagesFilter(includePackages))
|
||||
|
||||
@throws(classOf[ClassNotFoundException])
|
||||
override final def loadClass(className: String, resolve: Boolean): Class[_] =
|
||||
{
|
||||
if(filter.include(className))
|
||||
super.loadClass(className, resolve)
|
||||
else
|
||||
{
|
||||
val loaded = parent.loadClass(className)
|
||||
if(resolve)
|
||||
resolveClass(loaded)
|
||||
loaded
|
||||
}
|
||||
}
|
||||
}
|
||||
private trait ClassFilter
|
||||
{
|
||||
def include(className: String): Boolean
|
||||
}
|
||||
private abstract class PackageFilter(packages: Iterable[String]) extends ClassFilter
|
||||
{
|
||||
require(packages.forall(_.endsWith(".")))
|
||||
protected final def matches(className: String): Boolean = packages.exists(className.startsWith)
|
||||
}
|
||||
private class ExcludePackagesFilter(exclude: Iterable[String]) extends PackageFilter(exclude)
|
||||
{
|
||||
def include(className: String): Boolean = !matches(className)
|
||||
}
|
||||
private class IncludePackagesFilter(include: Iterable[String]) extends PackageFilter(include)
|
||||
{
|
||||
def include(className: String): Boolean = matches(className)
|
||||
}
|
||||
|
||||
private class LazyFrameworkLoader(runnerClassName: String, urls: Array[URL], parent: ClassLoader, grandparent: ClassLoader)
|
||||
extends LoaderBase(urls, parent) with NotNull
|
||||
{
|
||||
def doLoadClass(className: String): Class[_] =
|
||||
{
|
||||
if(Loaders.isNestedOrSelf(className, runnerClassName))
|
||||
findClass(className)
|
||||
else if(Loaders.isSbtClass(className)) // we circumvent the parent loader because we know that we want the
|
||||
grandparent.loadClass(className) // version of sbt that is currently the builder (not the project being built)
|
||||
else
|
||||
parent.loadClass(className)
|
||||
}
|
||||
}
|
||||
private object Loaders
|
||||
{
|
||||
val SbtPackage = "sbt."
|
||||
def isNestedOrSelf(className: String, checkAgainst: String) =
|
||||
className == checkAgainst || className.startsWith(checkAgainst + "$")
|
||||
def isSbtClass(className: String) = className.startsWith(Loaders.SbtPackage)
|
||||
}
|
||||
|
|
@ -0,0 +1,155 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2008, 2009 Mark Harrah
|
||||
*/
|
||||
package sbt
|
||||
|
||||
import java.io.File
|
||||
import xsbt.{AnalyzingCompiler, CompileFailed, CompilerArguments, ScalaInstance}
|
||||
|
||||
object CompileOrder extends Enumeration
|
||||
{
|
||||
val Mixed, JavaThenScala, ScalaThenJava = Value
|
||||
}
|
||||
|
||||
sealed abstract class CompilerCore
|
||||
{
|
||||
final def apply(label: String, sources: Iterable[Path], classpath: Iterable[Path], outputDirectory: Path, scalaOptions: Seq[String], log: Logger): Option[String] =
|
||||
apply(label, sources, classpath, outputDirectory, scalaOptions, Nil, CompileOrder.Mixed, log)
|
||||
final def apply(label: String, sources: Iterable[Path], classpath: Iterable[Path], outputDirectory: Path, scalaOptions: Seq[String], javaOptions: Seq[String], order: CompileOrder.Value, log: Logger): Option[String] =
|
||||
{
|
||||
def filteredSources(extension: String) = sources.filter(_.asFile.getName.endsWith(extension))
|
||||
def fileSet(sources: Iterable[Path]) = Set() ++ sources.map(_.asFile)
|
||||
def process(label: String, sources: Iterable[_], act: => Unit) =
|
||||
() => if(sources.isEmpty) log.debug("No " + label + " sources.") else act
|
||||
|
||||
val javaSources = fileSet(filteredSources(".java"))
|
||||
val scalaSources = fileSet( if(order == CompileOrder.Mixed) sources else filteredSources(".scala") )
|
||||
val classpathSet = fileSet(classpath)
|
||||
val scalaCompile = process("Scala", scalaSources, processScala(scalaSources, classpathSet, outputDirectory.asFile, scalaOptions, log) )
|
||||
val javaCompile = process("Java", javaSources, processJava(javaSources, classpathSet, outputDirectory.asFile, javaOptions, log))
|
||||
doCompile(label, sources, outputDirectory, order, log)(javaCompile, scalaCompile)
|
||||
}
|
||||
protected def doCompile(label: String, sources: Iterable[Path], outputDirectory: Path, order: CompileOrder.Value, log: Logger)(javaCompile: () => Unit, scalaCompile: () => Unit) =
|
||||
{
|
||||
log.info(actionStartMessage(label))
|
||||
if(sources.isEmpty)
|
||||
{
|
||||
log.info(actionNothingToDoMessage)
|
||||
None
|
||||
}
|
||||
else
|
||||
{
|
||||
FileUtilities.createDirectory(outputDirectory.asFile, log) orElse
|
||||
(try
|
||||
{
|
||||
val (first, second) = if(order == CompileOrder.JavaThenScala) (javaCompile, scalaCompile) else (scalaCompile, javaCompile)
|
||||
first()
|
||||
second()
|
||||
log.info(actionSuccessfulMessage)
|
||||
None
|
||||
}
|
||||
catch { case e: xsbti.CompileFailed => Some(e.toString) })
|
||||
}
|
||||
}
|
||||
def actionStartMessage(label: String): String
|
||||
def actionNothingToDoMessage: String
|
||||
def actionSuccessfulMessage: String
|
||||
protected def processScala(sources: Set[File], classpath: Set[File], outputDirectory: File, options: Seq[String], log: Logger): Unit
|
||||
protected def processJava(sources: Set[File], classpath: Set[File], outputDirectory: File, options: Seq[String], log: Logger): Unit
|
||||
}
|
||||
|
||||
sealed abstract class CompilerBase extends CompilerCore
|
||||
{
|
||||
def actionStartMessage(label: String) = "Compiling " + label + " sources..."
|
||||
val actionNothingToDoMessage = "Nothing to compile."
|
||||
val actionSuccessfulMessage = "Compilation successful."
|
||||
}
|
||||
|
||||
// The following code is based on scala.tools.nsc.Main and scala.tools.nsc.ScalaDoc
|
||||
// Copyright 2005-2008 LAMP/EPFL
|
||||
// Original author: Martin Odersky
|
||||
|
||||
final class Compile(maximumErrors: Int, compiler: AnalyzingCompiler, analysisCallback: AnalysisCallback, baseDirectory: Path) extends CompilerBase with WithArgumentFile
|
||||
{
|
||||
protected def processScala(sources: Set[File], classpath: Set[File], outputDirectory: File, options: Seq[String], log: Logger)
|
||||
{
|
||||
val callbackInterface = new AnalysisInterface(analysisCallback, baseDirectory, outputDirectory)
|
||||
compiler(Set() ++ sources, Set() ++ classpath, outputDirectory, options, callbackInterface, maximumErrors, log)
|
||||
}
|
||||
protected def processJava(sources: Set[File], classpath: Set[File], outputDirectory: File, options: Seq[String], log: Logger)
|
||||
{
|
||||
val arguments = (new CompilerArguments(compiler.scalaInstance, false, compiler.compilerOnClasspath))(sources, classpath, outputDirectory, options)
|
||||
log.debug("Calling 'javac' with arguments:\n\t" + arguments.mkString("\n\t"))
|
||||
def javac(argFile: File) = Process("javac", ("@" + normalizeSlash(argFile.getAbsolutePath)) :: Nil) ! log
|
||||
val code = withArgumentFile(arguments)(javac)
|
||||
if( code != 0 ) throw new CompileFailed(arguments.toArray, "javac returned nonzero exit code")
|
||||
}
|
||||
}
|
||||
trait WithArgumentFile extends NotNull
|
||||
{
|
||||
def withArgumentFile[T](args: Seq[String])(f: File => T): T =
|
||||
{
|
||||
import xsbt.FileUtilities._
|
||||
withTemporaryDirectory { tmp =>
|
||||
val argFile = new File(tmp, "argfile")
|
||||
write(argFile, args.map(escapeSpaces).mkString(FileUtilities.Newline))
|
||||
f(argFile)
|
||||
}
|
||||
}
|
||||
// javac's argument file seems to allow naive space escaping with quotes. escaping a quote with a backslash does not work
|
||||
def escapeSpaces(s: String): String = '\"' + normalizeSlash(s) + '\"'
|
||||
def normalizeSlash(s: String) = s.replace(File.separatorChar, '/')
|
||||
}
|
||||
final class Scaladoc(maximumErrors: Int, compiler: AnalyzingCompiler) extends CompilerCore
|
||||
{
|
||||
protected def processScala(sources: Set[File], classpath: Set[File], outputDirectory: File, options: Seq[String], log: Logger): Unit =
|
||||
compiler.doc(sources, classpath, outputDirectory, options, maximumErrors, log)
|
||||
protected def processJava(sources: Set[File], classpath: Set[File], outputDirectory: File, options: Seq[String], log: Logger) = ()
|
||||
|
||||
def actionStartMessage(label: String) = "Generating API documentation for " + label + " sources..."
|
||||
val actionNothingToDoMessage = "No sources specified."
|
||||
val actionSuccessfulMessage = "API documentation generation successful."
|
||||
def actionUnsuccessfulMessage = "API documentation generation unsuccessful."
|
||||
}
|
||||
final class Console(compiler: AnalyzingCompiler) extends NotNull
|
||||
{
|
||||
/** Starts an interactive scala interpreter session with the given classpath.*/
|
||||
def apply(classpath: Iterable[Path], log: Logger): Option[String] =
|
||||
apply(classpath, "", log)
|
||||
def apply(classpath: Iterable[Path], initialCommands: String, log: Logger): Option[String] =
|
||||
{
|
||||
def console0 = compiler.console(Set() ++ classpath.map(_.asFile), initialCommands, log)
|
||||
JLine.withJLine( Run.executeTrapExit(console0, log) )
|
||||
}
|
||||
}
|
||||
|
||||
private final class AnalysisInterface(delegate: AnalysisCallback, basePath: Path, outputDirectory: File) extends xsbti.AnalysisCallback with NotNull
|
||||
{
|
||||
val outputPath = Path.fromFile(outputDirectory)
|
||||
def superclassNames = delegate.superclassNames.toSeq.toArray[String]
|
||||
def superclassNotFound(superclassName: String) = delegate.superclassNotFound(superclassName)
|
||||
def beginSource(source: File) = delegate.beginSource(srcPath(source))
|
||||
def foundSubclass(source: File, subclassName: String, superclassName: String, isModule: Boolean) =
|
||||
delegate.foundSubclass(srcPath(source), subclassName, superclassName, isModule)
|
||||
def sourceDependency(dependsOn: File, source: File) =
|
||||
delegate.sourceDependency(srcPath(dependsOn), srcPath(source))
|
||||
def jarDependency(jar: File, source: File) = delegate.jarDependency(jar, srcPath(source))
|
||||
def generatedClass(source: File, clazz: File) = delegate.generatedClass(srcPath(source), classPath(clazz))
|
||||
def endSource(source: File) = delegate.endSource(srcPath(source))
|
||||
def foundApplication(source: File, className: String) = delegate.foundApplication(srcPath(source), className)
|
||||
def classDependency(clazz: File, source: File) =
|
||||
{
|
||||
val sourcePath = srcPath(source)
|
||||
Path.relativize(outputPath, clazz) match
|
||||
{
|
||||
case None => // dependency is a class file outside of the output directory
|
||||
delegate.classDependency(clazz, sourcePath)
|
||||
case Some(relativeToOutput) => // dependency is a product of a source not included in this compilation
|
||||
delegate.productDependency(relativeToOutput, sourcePath)
|
||||
}
|
||||
}
|
||||
def relativizeOrAbs(base: Path, file: File) = Path.relativize(base, file).getOrElse(Path.fromFile(file))
|
||||
def classPath(file: File) = relativizeOrAbs(outputPath, file)
|
||||
def srcPath(file: File) = relativizeOrAbs(basePath, file)
|
||||
def api(file: File, source: xsbti.api.Source) = delegate.api(srcPath(file), source)
|
||||
}
|
||||
|
|
@ -0,0 +1,8 @@
|
|||
package sbt
|
||||
|
||||
object CompileOptions
|
||||
{
|
||||
val Deprecation = "-deprecation"
|
||||
val Unchecked = "-unchecked"
|
||||
val Verbose = "-verbose"
|
||||
}
|
||||
|
|
@ -0,0 +1,388 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2008, 2009 Mark Harrah
|
||||
*/
|
||||
package sbt
|
||||
|
||||
import xsbt.AnalyzingCompiler
|
||||
|
||||
trait Conditional[Source, Product, External] extends NotNull
|
||||
{
|
||||
type AnalysisType <: TaskAnalysis[Source, Product, External]
|
||||
val analysis: AnalysisType = loadAnalysis
|
||||
|
||||
protected def loadAnalysis: AnalysisType
|
||||
protected def log: Logger
|
||||
|
||||
protected def productType: String
|
||||
protected def productTypePlural: String
|
||||
|
||||
protected def sourcesToProcess: Iterable[Source]
|
||||
|
||||
protected def sourceExists(source: Source): Boolean
|
||||
protected def sourceLastModified(source: Source): Long
|
||||
|
||||
protected def productExists(product: Product): Boolean
|
||||
protected def productLastModified(product: Product): Long
|
||||
|
||||
protected def externalInfo(externals: Iterable[External]): Iterable[(External, ExternalInfo)]
|
||||
|
||||
protected def execute(cAnalysis: ConditionalAnalysis): Option[String]
|
||||
|
||||
final case class ExternalInfo(available: Boolean, lastModified: Long) extends NotNull
|
||||
trait ConditionalAnalysis extends NotNull
|
||||
{
|
||||
def dirtySources: Iterable[Source]
|
||||
def cleanSources: Iterable[Source]
|
||||
def directlyModifiedSourcesCount: Int
|
||||
def invalidatedSourcesCount: Int
|
||||
def removedSourcesCount: Int
|
||||
}
|
||||
|
||||
final def run =
|
||||
{
|
||||
val result = execute(analyze)
|
||||
processingComplete(result.isEmpty)
|
||||
result
|
||||
}
|
||||
private def analyze =
|
||||
{
|
||||
import scala.collection.mutable.HashSet
|
||||
|
||||
val sourcesSnapshot = sourcesToProcess
|
||||
val removedSources = new HashSet[Source]
|
||||
removedSources ++= analysis.allSources
|
||||
removedSources --= sourcesSnapshot
|
||||
val removedCount = removedSources.size
|
||||
for(removed <- removedSources)
|
||||
{
|
||||
log.debug("Source " + removed + " removed.")
|
||||
analysis.removeDependent(removed)
|
||||
}
|
||||
|
||||
val unmodified = new HashSet[Source]
|
||||
val modified = new HashSet[Source]
|
||||
|
||||
for(source <- sourcesSnapshot)
|
||||
{
|
||||
if(isSourceModified(source))
|
||||
{
|
||||
log.debug("Source " + source + " directly modified.")
|
||||
modified += source
|
||||
}
|
||||
else
|
||||
{
|
||||
log.debug("Source " + source + " unmodified.")
|
||||
unmodified += source
|
||||
}
|
||||
}
|
||||
val directlyModifiedCount = modified.size
|
||||
for((external, info) <- externalInfo(analysis.allExternals))
|
||||
{
|
||||
val dependentSources = analysis.externalDependencies(external).getOrElse(Set.empty)
|
||||
if(info.available)
|
||||
{
|
||||
val dependencyLastModified = info.lastModified
|
||||
for(dependentSource <- dependentSources; dependentProducts <- analysis.products(dependentSource))
|
||||
{
|
||||
dependentProducts.find(p => productLastModified(p) < dependencyLastModified) match
|
||||
{
|
||||
case Some(modifiedProduct) =>
|
||||
{
|
||||
log.debug(productType + " " + modifiedProduct + " older than external dependency " + external)
|
||||
unmodified -= dependentSource
|
||||
modified += dependentSource
|
||||
}
|
||||
case None => ()
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
log.debug("External dependency " + external + " not found.")
|
||||
unmodified --= dependentSources
|
||||
modified ++= dependentSources
|
||||
analysis.removeExternalDependency(external)
|
||||
}
|
||||
}
|
||||
|
||||
val handled = new scala.collection.mutable.HashSet[Source]
|
||||
val transitive = !java.lang.Boolean.getBoolean("sbt.intransitive")
|
||||
def markModified(changed: Iterable[Source]) { for(c <- changed if !handled.contains(c)) markSourceModified(c) }
|
||||
def markSourceModified(src: Source)
|
||||
{
|
||||
unmodified -= src
|
||||
modified += src
|
||||
handled += src
|
||||
if(transitive)
|
||||
markDependenciesModified(src)
|
||||
}
|
||||
def markDependenciesModified(src: Source) { analysis.removeDependencies(src).map(markModified) }
|
||||
|
||||
markModified(modified.toList)
|
||||
if(transitive)
|
||||
removedSources.foreach(markDependenciesModified)
|
||||
|
||||
for(changed <- removedSources ++ modified)
|
||||
analysis.removeSource(changed)
|
||||
|
||||
new ConditionalAnalysis
|
||||
{
|
||||
def dirtySources = wrap.Wrappers.readOnly(modified)
|
||||
def cleanSources = wrap.Wrappers.readOnly(unmodified)
|
||||
def directlyModifiedSourcesCount = directlyModifiedCount
|
||||
def invalidatedSourcesCount = dirtySources.size - directlyModifiedCount
|
||||
def removedSourcesCount = removedCount
|
||||
override def toString =
|
||||
{
|
||||
" Source analysis: " + directlyModifiedSourcesCount + " new/modified, " +
|
||||
invalidatedSourcesCount + " indirectly invalidated, " +
|
||||
removedSourcesCount + " removed."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected def checkLastModified = true
|
||||
protected def noProductsImpliesModified = true
|
||||
protected def isSourceModified(source: Source) =
|
||||
{
|
||||
analysis.products(source) match
|
||||
{
|
||||
case None =>
|
||||
{
|
||||
log.debug("New file " + source)
|
||||
true
|
||||
}
|
||||
case Some(sourceProducts) =>
|
||||
{
|
||||
val sourceModificationTime = sourceLastModified(source)
|
||||
def isOutofdate(p: Product) =
|
||||
!productExists(p) || (checkLastModified && productLastModified(p) < sourceModificationTime)
|
||||
|
||||
sourceProducts.find(isOutofdate) match
|
||||
{
|
||||
case Some(modifiedProduct) =>
|
||||
log.debug("Outdated " + productType + ": " + modifiedProduct + " for source " + source)
|
||||
true
|
||||
case None =>
|
||||
if(noProductsImpliesModified && sourceProducts.isEmpty)
|
||||
{
|
||||
// necessary for change detection that depends on last modified
|
||||
log.debug("Source " + source + " has no products, marking it modified.")
|
||||
true
|
||||
}
|
||||
else
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
protected def processingComplete(success: Boolean)
|
||||
{
|
||||
if(success)
|
||||
{
|
||||
analysis.save()
|
||||
log.info(" Post-analysis: " + analysis.allProducts.toSeq.length + " " + productTypePlural + ".")
|
||||
}
|
||||
else
|
||||
analysis.revert()
|
||||
}
|
||||
}
|
||||
|
||||
abstract class AbstractCompileConfiguration extends NotNull
|
||||
{
|
||||
def label: String
|
||||
def sourceRoots: PathFinder
|
||||
def sources: PathFinder
|
||||
def outputDirectory: Path
|
||||
def classpath: PathFinder
|
||||
def analysisPath: Path
|
||||
def projectPath: Path
|
||||
def log: Logger
|
||||
def options: Seq[String]
|
||||
def javaOptions: Seq[String]
|
||||
def maxErrors: Int
|
||||
def compileOrder: CompileOrder.Value
|
||||
}
|
||||
abstract class CompileConfiguration extends AbstractCompileConfiguration
|
||||
{
|
||||
def testDefinitionClassNames: Iterable[String]
|
||||
}
|
||||
import java.io.File
|
||||
class CompileConditional(override val config: CompileConfiguration, compiler: AnalyzingCompiler) extends AbstractCompileConditional(config, compiler)
|
||||
{
|
||||
import config._
|
||||
type AnalysisType = CompileAnalysis
|
||||
protected def constructAnalysis(analysisPath: Path, projectPath: Path, log: Logger) =
|
||||
new CompileAnalysis(analysisPath, projectPath, log)
|
||||
protected def analysisCallback = new CompileAnalysisCallback
|
||||
protected class CompileAnalysisCallback extends BasicCompileAnalysisCallback(projectPath, analysis)
|
||||
{
|
||||
def superclassNames = testDefinitionClassNames
|
||||
def foundSubclass(sourcePath: Path, subclassName: String, superclassName: String, isModule: Boolean): Unit =
|
||||
analysis.addTest(sourcePath, TestDefinition(isModule, subclassName, superclassName))
|
||||
}
|
||||
}
|
||||
abstract class AbstractCompileConditional(val config: AbstractCompileConfiguration, val compiler: AnalyzingCompiler) extends Conditional[Path, Path, File]
|
||||
{
|
||||
import config._
|
||||
type AnalysisType <: BasicCompileAnalysis
|
||||
protected def loadAnalysis =
|
||||
{
|
||||
val a = constructAnalysis(analysisPath, projectPath, log)
|
||||
for(errorMessage <- a.load())
|
||||
error(errorMessage)
|
||||
a
|
||||
}
|
||||
protected def constructAnalysis(analysisPath: Path, projectPath: Path, log: Logger): AnalysisType
|
||||
|
||||
protected def log = config.log
|
||||
|
||||
protected def productType = "class"
|
||||
protected def productTypePlural = "classes"
|
||||
protected def sourcesToProcess = sources.get
|
||||
|
||||
protected def sourceExists(source: Path) = source.asFile.exists
|
||||
protected def sourceLastModified(source: Path) = source.asFile.lastModified
|
||||
|
||||
protected def productExists(product: Path) = product.asFile.exists
|
||||
protected def productLastModified(product: Path) = product.asFile.lastModified
|
||||
|
||||
private def libraryJar = compiler.scalaInstance.libraryJar
|
||||
private def compilerJar = compiler.scalaInstance.compilerJar
|
||||
protected def externalInfo(externals: Iterable[File]) =
|
||||
{
|
||||
val (classpathJars, classpathDirs) = ClasspathUtilities.buildSearchPaths(classpath.get ++ Seq(Path.fromFile(libraryJar), Path.fromFile(compilerJar)))
|
||||
for(external <- externals) yield
|
||||
{
|
||||
val available = external.exists && (external == libraryJar || ClasspathUtilities.onClasspath(classpathJars, classpathDirs, external) )
|
||||
if(!available)
|
||||
log.debug("External " + external + (if(external.exists) " not on classpath." else " does not exist."))
|
||||
(external, ExternalInfo(available, external.lastModified))
|
||||
}
|
||||
}
|
||||
|
||||
import ChangeDetection.{LastModifiedOnly, HashOnly, HashAndLastModified, HashAndProductsExist}
|
||||
protected def changeDetectionMethod: ChangeDetection.Value = HashAndProductsExist
|
||||
override protected def checkLastModified = changeDetectionMethod != HashAndProductsExist
|
||||
override protected def noProductsImpliesModified = changeDetectionMethod == LastModifiedOnly
|
||||
override protected def isSourceModified(source: Path) =
|
||||
changeDetectionMethod match
|
||||
{
|
||||
case HashAndLastModified | HashAndProductsExist =>
|
||||
// behavior will differ because of checkLastModified
|
||||
// hash modified must come first so that the latest hash is calculated for every source
|
||||
hashModified(source) || super.isSourceModified(source)
|
||||
case HashOnly => hashModified(source)
|
||||
case LastModifiedOnly => super.isSourceModified(source)
|
||||
}
|
||||
|
||||
import scala.collection.mutable.{Buffer, ListBuffer}
|
||||
private val newHashes: Buffer[(Path, Option[Array[Byte]])] = new ListBuffer
|
||||
private def warnHashError(source: Path, message: String)
|
||||
{
|
||||
log.warn("Error computing hash for source " + source + ": " + message)
|
||||
newHashes += ((source, None))
|
||||
}
|
||||
protected def hashModified(source: Path) =
|
||||
{
|
||||
source.isDirectory ||
|
||||
(analysis.hash(source) match
|
||||
{
|
||||
case None =>
|
||||
log.debug("Source " + source + " had no hash, marking modified.")
|
||||
Hash(source, log).fold(err => warnHashError(source, err), newHash => newHashes += ((source, Some(newHash))))
|
||||
true
|
||||
case Some(oldHash) =>
|
||||
{
|
||||
Hash(source, log) match
|
||||
{
|
||||
case Left(err) =>
|
||||
warnHashError(source, err)
|
||||
log.debug("Assuming source is modified because of error.")
|
||||
true
|
||||
case Right(newHash) =>
|
||||
newHashes += ((source, Some(newHash)))
|
||||
val different = !(oldHash deepEquals newHash)
|
||||
if(different)
|
||||
log.debug("Hash for source " + source + " changed (was " + Hash.toHex(oldHash) +
|
||||
", is now " + Hash.toHex(newHash) + "), marking modified.")
|
||||
different
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
private def scalaJars: Iterable[Path] =
|
||||
{
|
||||
val instance = compiler.scalaInstance
|
||||
Seq(instance.libraryJar, instance.compilerJar).map(Path.fromFile)
|
||||
}
|
||||
protected def execute(executeAnalysis: ConditionalAnalysis) =
|
||||
{
|
||||
log.info(executeAnalysis.toString)
|
||||
finishHashes()
|
||||
import executeAnalysis.dirtySources
|
||||
|
||||
// the output directory won't show up in the classpath unless it exists, so do this before classpath.get
|
||||
val outputDir = outputDirectory.asFile
|
||||
FileUtilities.createDirectory(outputDir, log)
|
||||
|
||||
val cp = classpath.get
|
||||
if(!dirtySources.isEmpty)
|
||||
checkClasspath(cp)
|
||||
def run =
|
||||
{
|
||||
val compile = new Compile(config.maxErrors, compiler, analysisCallback, projectPath)
|
||||
compile(label, dirtySources, cp, outputDirectory, options, javaOptions, compileOrder, log)
|
||||
}
|
||||
val loader = ClasspathUtilities.toLoader(cp ++ scalaJars)
|
||||
val r = classfile.Analyze(projectPath, outputDirectory, dirtySources, sourceRoots.get, log)(analysis.allProducts, analysisCallback, loader)(run)
|
||||
if(log.atLevel(Level.Debug))
|
||||
{
|
||||
/** This checks that the plugin accounted for all classes in the output directory.*/
|
||||
val classes = scala.collection.mutable.HashSet(analysis.allProducts.toSeq: _*)
|
||||
val actualClasses = (outputDirectory ** GlobFilter("*.class")).get
|
||||
val missedClasses = actualClasses.toList.remove(classes.contains)
|
||||
missedClasses.foreach(c => log.debug("Missed class: " + c))
|
||||
log.debug("Total missed classes: " + missedClasses.length)
|
||||
}
|
||||
r
|
||||
}
|
||||
private def finishHashes()
|
||||
{
|
||||
if(changeDetectionMethod == LastModifiedOnly)
|
||||
analysis.clearHashes()
|
||||
else
|
||||
{
|
||||
for((path, hash) <- newHashes)
|
||||
{
|
||||
hash match
|
||||
{
|
||||
case None => analysis.clearHash(path)
|
||||
case Some(hash) => analysis.setHash(path, hash)
|
||||
}
|
||||
}
|
||||
}
|
||||
newHashes.clear()
|
||||
}
|
||||
private def checkClasspath(cp: Iterable[Path])
|
||||
{
|
||||
import scala.collection.mutable.{HashMap, HashSet, Set}
|
||||
val collisions = new HashMap[String, Set[Path]]
|
||||
for(jar <- cp if ClasspathUtilities.isArchive(jar))
|
||||
collisions.getOrElseUpdate(jar.asFile.getName, new HashSet[Path]) += jar
|
||||
for((name, jars) <- collisions)
|
||||
{
|
||||
if(jars.size > 1)
|
||||
{
|
||||
log.debug("Possible duplicate classpath locations for jar " + name + ": ")
|
||||
for(jar <- jars) log.debug("\t" + jar.absolutePath)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected def analysisCallback: AnalysisCallback
|
||||
}
|
||||
object ChangeDetection extends Enumeration
|
||||
{
|
||||
val LastModifiedOnly, HashOnly, HashAndLastModified, HashAndProductsExist = Value
|
||||
}
|
||||
|
|
@ -0,0 +1,73 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2008, 2009 Mark Harrah
|
||||
*/
|
||||
package sbt
|
||||
|
||||
/** The trap methods execute the provided code in a try block and handle a thrown exception.*/
|
||||
object Control
|
||||
{
|
||||
def trap[T](errorMessagePrefix: => String, log: Logger)(execute: => Either[String, T]): Either[String, T] =
|
||||
try { execute }
|
||||
catch { case e => log.trace(e); Left(errorMessagePrefix + e.toString) }
|
||||
|
||||
def trapAndFinally[T](errorMessagePrefix: => String, log: Logger)(execute: => Either[String, T])(doFinally: => Unit): Either[String, T] =
|
||||
try { execute }
|
||||
catch { case e => log.trace(e); Left(errorMessagePrefix + e.toString) }
|
||||
finally { trapAndLog(log)(doFinally) }
|
||||
|
||||
def trapUnit(errorMessagePrefix: => String, log: Logger)(execute: => Option[String]): Option[String] =
|
||||
try { execute }
|
||||
catch { case e => log.trace(e); Some(errorMessagePrefix + e.toString) }
|
||||
|
||||
def trapUnitAndFinally(errorMessagePrefix: => String, log: Logger)(execute: => Option[String])(doFinally: => Unit): Option[String] =
|
||||
try { execute }
|
||||
catch { case e => log.trace(e); Some(errorMessagePrefix + e.toString) }
|
||||
finally { trapAndLog(log)(doFinally) }
|
||||
|
||||
def trap(execute: => Unit)
|
||||
{
|
||||
try { execute }
|
||||
catch { case e: Exception => () }
|
||||
}
|
||||
def trapAndLog(log: Logger)(execute: => Unit)
|
||||
{
|
||||
try { execute }
|
||||
catch { case e => log.trace(e); log.error(e.toString) }
|
||||
}
|
||||
def convertException[T](t: => T): Either[Exception, T] =
|
||||
{
|
||||
try { Right(t) }
|
||||
catch { case e: Exception => Left(e) }
|
||||
}
|
||||
def convertErrorMessage[T](log: Logger)(t: => T): Either[String, T] =
|
||||
{
|
||||
try { Right(t) }
|
||||
catch { case e: Exception => log.trace(e); Left(e.toString) }
|
||||
}
|
||||
|
||||
def getOrError[T](result: Either[String, T]): T = result.fold(error, x=>x)
|
||||
final def lazyFold[T](list: List[T])(f: T => Option[String]): Option[String] =
|
||||
list match
|
||||
{
|
||||
case Nil => None
|
||||
case head :: tail =>
|
||||
f(head) match
|
||||
{
|
||||
case None => lazyFold(tail)(f)
|
||||
case x => x
|
||||
}
|
||||
}
|
||||
final def lazyFold[T, S](list: List[T], value: S)(f: (S,T) => Either[String, S]): Either[String, S] =
|
||||
list match
|
||||
{
|
||||
case Nil => Right(value)
|
||||
case head :: tail =>
|
||||
f(value, head) match
|
||||
{
|
||||
case Right(newValue) => lazyFold(tail, newValue)(f)
|
||||
case x => x
|
||||
}
|
||||
}
|
||||
def thread[T](e: Either[String, T])(f: T => Option[String]): Option[String] =
|
||||
e.right.flatMap( t => f(t).toLeft(()) ).left.toOption
|
||||
}
|
||||
|
|
@ -0,0 +1,44 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2009 Mark Harrah
|
||||
*/
|
||||
package sbt
|
||||
|
||||
import java.io.File
|
||||
import org.apache.ivy.util.url.CredentialsStore
|
||||
|
||||
object Credentials
|
||||
{
|
||||
/** Add the provided credentials to Ivy's credentials cache.*/
|
||||
def add(realm: String, host: String, userName: String, passwd: String): Unit =
|
||||
CredentialsStore.INSTANCE.addCredentials(realm, host, userName, passwd)
|
||||
/** Load credentials from the given file into Ivy's credentials cache.*/
|
||||
def apply(file: String, log: Logger): Unit = apply(Path.fromFile(file), log)
|
||||
/** Load credentials from the given file into Ivy's credentials cache.*/
|
||||
def apply(file: File, log: Logger): Unit = apply(Path.fromFile(file), log)
|
||||
/** Load credentials from the given file into Ivy's credentials cache.*/
|
||||
def apply(path: Path, log: Logger)
|
||||
{
|
||||
val msg =
|
||||
if(path.exists)
|
||||
{
|
||||
val properties = new scala.collection.mutable.HashMap[String, String]
|
||||
def get(keys: List[String]) = keys.flatMap(properties.get).firstOption.toRight(keys.head + " not specified in credentials file: " + path)
|
||||
|
||||
impl.MapUtilities.read(properties, path, log) orElse
|
||||
{
|
||||
List.separate( List(RealmKeys, HostKeys, UserKeys, PasswordKeys).map(get) ) match
|
||||
{
|
||||
case (Nil, List(realm, host, user, pass)) => add(realm, host, user, pass); None
|
||||
case (errors, _) => Some(errors.mkString("\n"))
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
Some("Credentials file " + path + " does not exist")
|
||||
msg.foreach(x => log.warn(x))
|
||||
}
|
||||
private[this] val RealmKeys = List("realm")
|
||||
private[this] val HostKeys = List("host", "hostname")
|
||||
private[this] val UserKeys = List("user", "user.name", "username")
|
||||
private[this] val PasswordKeys = List("password", "pwd", "pass", "passwd")
|
||||
}
|
||||
|
|
@ -0,0 +1,33 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2008, 2009 David MacIver, Mark Harrah
|
||||
*/
|
||||
package sbt;
|
||||
|
||||
trait Dag[Node <: Dag[Node]]{
|
||||
self : Node =>
|
||||
|
||||
def dependencies : Iterable[Node]
|
||||
def topologicalSort = Dag.topologicalSort(self)(_.dependencies)
|
||||
}
|
||||
object Dag
|
||||
{
|
||||
import scala.collection.mutable;
|
||||
|
||||
def topologicalSort[T](root: T)(dependencies: T => Iterable[T]) = {
|
||||
val discovered = new mutable.HashSet[T];
|
||||
val finished = new wrap.MutableSetWrapper(new java.util.LinkedHashSet[T])
|
||||
|
||||
def visit(dag : T){
|
||||
if (!discovered(dag)) {
|
||||
discovered(dag) = true;
|
||||
dependencies(dag).foreach(visit);
|
||||
finished += dag;
|
||||
}
|
||||
}
|
||||
|
||||
visit(root);
|
||||
|
||||
finished.toList;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -0,0 +1,546 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2008, 2009 Mark Harrah, David MacIver, Josh Cough
|
||||
*/
|
||||
package sbt
|
||||
|
||||
/** The default project when no project is explicitly configured and the common base class for
|
||||
* configuring a project.*/
|
||||
class DefaultProject(val info: ProjectInfo) extends BasicScalaProject with MavenStyleScalaPaths
|
||||
class DefaultWebProject(val info: ProjectInfo) extends BasicWebScalaProject with MavenStyleWebScalaPaths
|
||||
|
||||
|
||||
import BasicScalaProject._
|
||||
import ScalaProject.{optionsAsString, javaOptionsAsString}
|
||||
import java.io.File
|
||||
import java.net.URLClassLoader
|
||||
import java.util.jar.Attributes
|
||||
|
||||
/** This class defines concrete instances of actions from ScalaProject using overridable paths,
|
||||
* options, and configuration. */
|
||||
abstract class BasicScalaProject extends ScalaProject with BasicDependencyProject with ScalaPaths
|
||||
{
|
||||
/** The explicitly specified class to be run by the 'run' action.
|
||||
* See http://code.google.com/p/simple-build-tool/wiki/RunningProjectCode for details.*/
|
||||
def mainClass: Option[String] = None
|
||||
/** Gets the main class to use. This is used by package and run to determine which main
|
||||
* class to run or include as the Main-Class attribute.
|
||||
* If `mainClass` is explicitly specified, it is used. Otherwise, the main class is selected from
|
||||
* the classes with a main method as automatically detected by the analyzer plugin.
|
||||
* `promptIfMultipleChoices` controls the behavior when multiple main classes are detected.
|
||||
* If true, it prompts the user to select which main class to use. If false, it prints a warning
|
||||
* and returns no main class.*/
|
||||
def getMainClass(promptIfMultipleChoices: Boolean): Option[String] =
|
||||
getMainClass(promptIfMultipleChoices, mainCompileConditional, mainClass)
|
||||
def getMainClass(promptIfMultipleChoices: Boolean, compileConditional: CompileConditional, explicit: Option[String]): Option[String] =
|
||||
explicit orElse
|
||||
{
|
||||
val applications = compileConditional.analysis.allApplications.toList
|
||||
impl.SelectMainClass(promptIfMultipleChoices, applications) orElse
|
||||
{
|
||||
if(!promptIfMultipleChoices && !applications.isEmpty)
|
||||
warnMultipleMainClasses(log)
|
||||
None
|
||||
}
|
||||
}
|
||||
def testMainClass: Option[String] = None
|
||||
def getTestMainClass(promptIfMultipleChoices: Boolean): Option[String] =
|
||||
getMainClass(promptIfMultipleChoices, testCompileConditional, testMainClass)
|
||||
|
||||
/** Specifies the value of the `Class-Path` attribute in the manifest of the main jar. */
|
||||
def manifestClassPath: Option[String] = None
|
||||
def dependencies = info.dependencies ++ subProjects.values.toList
|
||||
|
||||
lazy val mainCompileConditional = new CompileConditional(mainCompileConfiguration, buildCompiler)
|
||||
lazy val testCompileConditional = new CompileConditional(testCompileConfiguration, buildCompiler)
|
||||
|
||||
def compileOrder = CompileOrder.Mixed
|
||||
|
||||
/** The main artifact produced by this project. To redefine the main artifact, override `defaultMainArtifact`
|
||||
* Additional artifacts are defined by `val`s of type `Artifact`.*/
|
||||
lazy val mainArtifact = defaultMainArtifact
|
||||
/** Defines the default main Artifact assigned to `mainArtifact`. By default, this is a jar file with name given
|
||||
* by `artifactID`.*/
|
||||
protected def defaultMainArtifact = Artifact(artifactID, "jar", "jar")
|
||||
|
||||
import Project._
|
||||
|
||||
/** The options provided to the 'compile' action to pass to the Scala compiler.*/
|
||||
def compileOptions: Seq[CompileOption] = Deprecation :: Nil
|
||||
/** The options provided to the 'compile' action to pass to the Java compiler. */
|
||||
def javaCompileOptions: Seq[JavaCompileOption] = Nil
|
||||
/** The options provided to the 'test-compile' action, defaulting to those for the 'compile' action.*/
|
||||
def testCompileOptions: Seq[CompileOption] = compileOptions
|
||||
/** The options provided to the 'test-compile' action to pass to the Java compiler. */
|
||||
def testJavaCompileOptions: Seq[JavaCompileOption] = javaCompileOptions
|
||||
|
||||
/** The options provided to the 'doc' and 'docTest' actions.*/
|
||||
def documentOptions: Seq[ScaladocOption] =
|
||||
documentTitle(name + " " + version + " API") ::
|
||||
(if(isScala27) only27Options else Nil)
|
||||
private def only27Options =
|
||||
windowTitle(name + " " + version + " API") :: Nil
|
||||
/** The options provided to the 'test' action..*/
|
||||
def testOptions: Seq[TestOption] =
|
||||
TestListeners(testListeners) ::
|
||||
TestFilter(includeTest) ::
|
||||
Nil
|
||||
/** The options provided to the clean action. You can add files to be removed and files to be preserved here.*/
|
||||
def cleanOptions: Seq[CleanOption] =
|
||||
ClearAnalysis(mainCompileConditional.analysis) ::
|
||||
ClearAnalysis(testCompileConditional.analysis) ::
|
||||
historyPath.map(history => Preserve(history)).toList
|
||||
|
||||
def packageOptions: Seq[PackageOption] =
|
||||
manifestClassPath.map(cp => ManifestAttributes( (Attributes.Name.CLASS_PATH, cp) )).toList :::
|
||||
getMainClass(false).map(MainClass(_)).toList
|
||||
|
||||
private def succeededTestPath = testAnalysisPath / "succeeded-tests"
|
||||
private def quickOptions(failedOnly: Boolean) =
|
||||
{
|
||||
val path = succeededTestPath
|
||||
val analysis = testCompileConditional.analysis
|
||||
TestFilter(new impl.TestQuickFilter(analysis, failedOnly, path, log)) :: TestListeners(new impl.TestStatusReporter(path, log) :: Nil) :: Nil
|
||||
}
|
||||
|
||||
def consoleInit = ""
|
||||
|
||||
protected def includeTest(test: String): Boolean = true
|
||||
|
||||
/** This is called to create the initial directories when a user makes a new project from
|
||||
* sbt.*/
|
||||
override final def initializeDirectories()
|
||||
{
|
||||
FileUtilities.createDirectories(directoriesToCreate.map(_.asFile), log) match
|
||||
{
|
||||
case Some(errorMessage) => log.error("Could not initialize directory structure: " + errorMessage)
|
||||
case None => log.success("Successfully initialized directory structure.")
|
||||
}
|
||||
}
|
||||
import Configurations._
|
||||
/** The managed configuration to use when determining the classpath for a Scala interpreter session.*/
|
||||
def consoleConfiguration = Test
|
||||
|
||||
/** A PathFinder that provides the classpath to pass to scaladoc. It is the same as the compile classpath
|
||||
* by default. */
|
||||
def docClasspath = compileClasspath
|
||||
/** A PathFinder that provides the classpath to pass to the compiler.*/
|
||||
def compileClasspath = fullClasspath(Compile) +++ optionalClasspath
|
||||
/** A PathFinder that provides the classpath to use when unit testing.*/
|
||||
def testClasspath = fullClasspath(Test) +++ optionalClasspath
|
||||
/** A PathFinder that provides the classpath to use when running the class specified by 'getMainClass'.*/
|
||||
def runClasspath = fullClasspath(Runtime) +++ optionalClasspath
|
||||
/** A PathFinder that provides the classpath to use for a Scala interpreter session.*/
|
||||
def consoleClasspath = fullClasspath(consoleConfiguration) +++ optionalClasspath
|
||||
/** A PathFinder that corresponds to Maven's optional scope. It includes any managed libraries in the
|
||||
* 'optional' configuration for this project only.*/
|
||||
def optionalClasspath = managedClasspath(Optional)
|
||||
/** A PathFinder that contains the jars that should be included in a comprehensive package. This is
|
||||
* by default the 'runtime' classpath excluding the 'provided' classpath.*/
|
||||
def publicClasspath = runClasspath --- fullClasspath(Provided)
|
||||
|
||||
/** This returns the unmanaged classpath for only this project for the given configuration. It by
|
||||
* default includes the main compiled classes for this project and the libraries in this project's
|
||||
* unmanaged library directory (lib) and the managed directory for the specified configuration. It
|
||||
* also adds the resource directories appropriate to the configuration.
|
||||
* The Provided and Optional configurations are treated specially; they are empty
|
||||
* by default.*/
|
||||
def fullUnmanagedClasspath(config: Configuration) =
|
||||
{
|
||||
config match
|
||||
{
|
||||
case CompilerPlugin => unmanagedClasspath
|
||||
case Runtime => runUnmanagedClasspath
|
||||
case Test => testUnmanagedClasspath
|
||||
case Provided | Optional => Path.emptyPathFinder
|
||||
case _ => mainUnmanagedClasspath
|
||||
}
|
||||
}
|
||||
/** The unmanaged base classpath. By default, the unmanaged classpaths for test and run include this classpath. */
|
||||
protected def mainUnmanagedClasspath = mainCompilePath +++ mainResourcesOutputPath +++ unmanagedClasspath
|
||||
/** The unmanaged classpath for the run configuration. By default, it includes the base classpath returned by
|
||||
* `mainUnmanagedClasspath`.*/
|
||||
protected def runUnmanagedClasspath = mainUnmanagedClasspath +++ mainDependencies.scalaCompiler
|
||||
/** The unmanaged classpath for the test configuration. By default, it includes the run classpath, which includes the base
|
||||
* classpath returned by `mainUnmanagedClasspath`.*/
|
||||
protected def testUnmanagedClasspath = testCompilePath +++ testResourcesOutputPath +++ testDependencies.scalaCompiler +++ runUnmanagedClasspath
|
||||
|
||||
/** @deprecated Use `mainDependencies.scalaJars`*/
|
||||
@deprecated protected final def scalaJars: Iterable[File] = mainDependencies.scalaJars.get.map(_.asFile)
|
||||
/** An analysis of the jar dependencies of the main Scala sources. It is only valid after main source compilation.
|
||||
* See the LibraryDependencies class for details. */
|
||||
final def mainDependencies = new LibraryDependencies(this, mainCompileConditional)
|
||||
/** An analysis of the jar dependencies of the test Scala sources. It is only valid after test source compilation.
|
||||
* See the LibraryDependencies class for details. */
|
||||
final def testDependencies = new LibraryDependencies(this, testCompileConditional)
|
||||
|
||||
/** The list of test frameworks to use for testing. Note that adding frameworks to this list
|
||||
* for an active project currently requires an explicit 'clean' to properly update the set of tests to
|
||||
* run*/
|
||||
def testFrameworks: Seq[TestFramework] =
|
||||
{
|
||||
import TestFrameworks.{JUnit, ScalaCheck, ScalaTest, Specs, ScalaCheckCompat, ScalaTestCompat, SpecsCompat}
|
||||
ScalaCheck :: Specs :: ScalaTest :: JUnit :: ScalaCheckCompat :: ScalaTestCompat :: SpecsCompat :: Nil
|
||||
}
|
||||
/** The list of listeners for testing. */
|
||||
def testListeners: Seq[TestReportListener] = TestLogger(log) :: Nil
|
||||
|
||||
def mainLabel = "main"
|
||||
def testLabel = "test"
|
||||
|
||||
def mainCompileConfiguration: CompileConfiguration = new MainCompileConfig
|
||||
def testCompileConfiguration: CompileConfiguration = new TestCompileConfig
|
||||
abstract class BaseCompileConfig extends CompileConfiguration
|
||||
{
|
||||
def log = BasicScalaProject.this.log
|
||||
def projectPath = info.projectPath
|
||||
def baseCompileOptions: Seq[CompileOption]
|
||||
def options = optionsAsString(baseCompileOptions.filter(!_.isInstanceOf[MaxCompileErrors]))
|
||||
def maxErrors = maximumErrors(baseCompileOptions)
|
||||
def compileOrder = BasicScalaProject.this.compileOrder
|
||||
protected def testClassNames(frameworks: Seq[TestFramework]) =
|
||||
{
|
||||
val loader = TestFramework.createTestLoader(classpath.get, buildScalaInstance.loader)
|
||||
def getTestNames(framework: TestFramework): Seq[String] =
|
||||
framework.create(loader, log).toList.flatMap(_.tests.map(_.superClassName))
|
||||
frameworks.flatMap(getTestNames)
|
||||
}
|
||||
}
|
||||
class MainCompileConfig extends BaseCompileConfig
|
||||
{
|
||||
def baseCompileOptions = compileOptions
|
||||
def label = mainLabel
|
||||
def sourceRoots = mainSourceRoots
|
||||
def sources = mainSources
|
||||
def outputDirectory = mainCompilePath
|
||||
def classpath = compileClasspath
|
||||
def analysisPath = mainAnalysisPath
|
||||
def testDefinitionClassNames: Seq[String] = Nil
|
||||
def javaOptions = javaOptionsAsString(javaCompileOptions)
|
||||
}
|
||||
class TestCompileConfig extends BaseCompileConfig
|
||||
{
|
||||
def baseCompileOptions = testCompileOptions
|
||||
def label = testLabel
|
||||
def sourceRoots = testSourceRoots
|
||||
def sources = testSources
|
||||
def outputDirectory = testCompilePath
|
||||
def classpath = testClasspath
|
||||
def analysisPath = testAnalysisPath
|
||||
def testDefinitionClassNames: Seq[String] = testClassNames(testFrameworks)
|
||||
def javaOptions = javaOptionsAsString(testJavaCompileOptions)
|
||||
}
|
||||
|
||||
/** Configures forking the compiler and runner. Use ForkScalaCompiler, ForkScalaRun or mix together.*/
|
||||
def fork: Option[ForkScala] = None
|
||||
def forkRun: Option[ForkScala] = forkRun(None, Nil)
|
||||
def forkRun(workingDirectory: File): Option[ForkScala] = forkRun(Some(workingDirectory), Nil)
|
||||
def forkRun(jvmOptions: Seq[String]): Option[ForkScala] = forkRun(None, jvmOptions)
|
||||
def forkRun(workingDirectory0: Option[File], jvmOptions: Seq[String]): Option[ForkScala] =
|
||||
{
|
||||
val si = buildScalaInstance
|
||||
Some(new ForkScalaRun {
|
||||
override def scalaJars = si.libraryJar :: si.compilerJar :: Nil
|
||||
override def workingDirectory: Option[File] = workingDirectory0
|
||||
override def runJVMOptions: Seq[String] = jvmOptions
|
||||
})
|
||||
}
|
||||
private def doCompile(conditional: CompileConditional) = conditional.run
|
||||
implicit def defaultRunner: ScalaRun =
|
||||
{
|
||||
fork match
|
||||
{
|
||||
case Some(fr: ForkScalaRun) => new ForkRun(fr)
|
||||
case _ => new Run(buildScalaInstance)
|
||||
}
|
||||
}
|
||||
|
||||
def basicConsoleTask = consoleTask(consoleClasspath, consoleInit)
|
||||
|
||||
protected def runTask(mainClass: String): MethodTask = task { args => runTask(Some(mainClass), runClasspath, args) dependsOn(compile, copyResources) }
|
||||
|
||||
protected def compileAction = task { doCompile(mainCompileConditional) } describedAs MainCompileDescription
|
||||
protected def testCompileAction = task { doCompile(testCompileConditional) } dependsOn compile describedAs TestCompileDescription
|
||||
protected def cleanAction = cleanTask(outputPath, cleanOptions) describedAs CleanDescription
|
||||
protected def testRunAction = task { args => runTask(getTestMainClass(true), testClasspath, args) dependsOn(testCompile, copyResources) } describedAs TestRunDescription
|
||||
protected def runAction = task { args => runTask(getMainClass(true), runClasspath, args) dependsOn(compile, copyResources) } describedAs RunDescription
|
||||
protected def consoleQuickAction = basicConsoleTask describedAs ConsoleQuickDescription
|
||||
protected def consoleAction = basicConsoleTask.dependsOn(testCompile, copyResources, copyTestResources) describedAs ConsoleDescription
|
||||
protected def docAction = scaladocTask(mainLabel, mainSources, mainDocPath, docClasspath, documentOptions).dependsOn(compile) describedAs DocDescription
|
||||
protected def docTestAction = scaladocTask(testLabel, testSources, testDocPath, docClasspath, documentOptions).dependsOn(testCompile) describedAs TestDocDescription
|
||||
|
||||
protected def testAction = defaultTestTask(testOptions)
|
||||
protected def testOnlyAction = testQuickMethod(testCompileConditional.analysis, testOptions)((options) => {
|
||||
defaultTestTask(options)
|
||||
}) describedAs (TestOnlyDescription)
|
||||
protected def testQuickAction = defaultTestQuickMethod(false) describedAs (TestQuickDescription)
|
||||
protected def testFailedAction = defaultTestQuickMethod(true) describedAs (TestFailedDescription)
|
||||
protected def defaultTestQuickMethod(failedOnly: Boolean) =
|
||||
testQuickMethod(testCompileConditional.analysis, testOptions)(options => defaultTestTask(quickOptions(failedOnly) ::: options.toList))
|
||||
protected def defaultTestTask(testOptions: => Seq[TestOption]) =
|
||||
testTask(testFrameworks, testClasspath, testCompileConditional.analysis, testOptions).dependsOn(testCompile, copyResources, copyTestResources) describedAs TestDescription
|
||||
|
||||
override def packageToPublishActions: Seq[ManagedTask] = `package` :: Nil
|
||||
|
||||
protected def packageAction = packageTask(packagePaths, jarPath, packageOptions).dependsOn(compile) describedAs PackageDescription
|
||||
protected def packageTestAction = packageTask(packageTestPaths, packageTestJar).dependsOn(testCompile) describedAs TestPackageDescription
|
||||
protected def packageDocsAction = packageTask(mainDocPath ##, packageDocsJar, Recursive).dependsOn(doc) describedAs DocPackageDescription
|
||||
protected def packageSrcAction = packageTask(packageSourcePaths, packageSrcJar) describedAs SourcePackageDescription
|
||||
protected def packageTestSrcAction = packageTask(packageTestSourcePaths, packageTestSrcJar) describedAs TestSourcePackageDescription
|
||||
protected def packageProjectAction = zipTask(packageProjectPaths, packageProjectZip) describedAs ProjectPackageDescription
|
||||
|
||||
protected def docAllAction = (doc && docTest) describedAs DocAllDescription
|
||||
protected def packageAllAction = task { None } dependsOn(`package`, packageTest, packageSrc, packageTestSrc, packageDocs) describedAs PackageAllDescription
|
||||
protected def graphSourcesAction = graphSourcesTask(graphSourcesPath, mainSourceRoots, mainCompileConditional.analysis).dependsOn(compile)
|
||||
protected def graphPackagesAction = graphPackagesTask(graphPackagesPath, mainSourceRoots, mainCompileConditional.analysis).dependsOn(compile)
|
||||
protected def incrementVersionAction = task { incrementVersionNumber(); None } describedAs IncrementVersionDescription
|
||||
protected def releaseAction = (test && packageAll && incrementVersion) describedAs ReleaseDescription
|
||||
|
||||
protected def copyResourcesAction = syncPathsTask(mainResources, mainResourcesOutputPath) describedAs CopyResourcesDescription
|
||||
protected def copyTestResourcesAction = syncPathsTask(testResources, testResourcesOutputPath) describedAs CopyTestResourcesDescription
|
||||
|
||||
lazy val compile = compileAction
|
||||
lazy val testCompile = testCompileAction
|
||||
lazy val clean = cleanAction
|
||||
lazy val run = runAction
|
||||
lazy val consoleQuick = consoleQuickAction
|
||||
lazy val console = consoleAction
|
||||
lazy val doc = docAction
|
||||
lazy val docTest = docTestAction
|
||||
lazy val test = testAction
|
||||
lazy val testRun = testRunAction
|
||||
lazy val `package` = packageAction
|
||||
lazy val packageTest = packageTestAction
|
||||
lazy val packageDocs = packageDocsAction
|
||||
lazy val packageSrc = packageSrcAction
|
||||
lazy val packageTestSrc = packageTestSrcAction
|
||||
lazy val packageProject = packageProjectAction
|
||||
lazy val docAll = docAllAction
|
||||
lazy val packageAll = packageAllAction
|
||||
lazy val graphSrc = graphSourcesAction
|
||||
lazy val graphPkg = graphPackagesAction
|
||||
lazy val incrementVersion = incrementVersionAction
|
||||
lazy val release = releaseAction
|
||||
lazy val copyResources = copyResourcesAction
|
||||
lazy val copyTestResources = copyTestResourcesAction
|
||||
|
||||
lazy val testQuick = testQuickAction
|
||||
lazy val testFailed = testFailedAction
|
||||
lazy val testOnly = testOnlyAction
|
||||
|
||||
lazy val javap = javapTask(runClasspath, mainCompileConditional, mainCompilePath)
|
||||
lazy val testJavap = javapTask(testClasspath, testCompileConditional, testCompilePath)
|
||||
|
||||
def jarsOfProjectDependencies = Path.lazyPathFinder {
|
||||
topologicalSort.dropRight(1) flatMap { p =>
|
||||
p match
|
||||
{
|
||||
case bpp: BasicScalaPaths => List(bpp.jarPath)
|
||||
case _ => Nil
|
||||
}
|
||||
}
|
||||
}
|
||||
override def deliverScalaDependencies: Iterable[ModuleID] =
|
||||
{
|
||||
val snapshot = mainDependencies.snapshot
|
||||
mapScalaModule(snapshot.scalaLibrary, ScalaArtifacts.LibraryID) ++
|
||||
mapScalaModule(snapshot.scalaCompiler, ScalaArtifacts.CompilerID)
|
||||
}
|
||||
override def watchPaths = mainSources +++ testSources +++ mainResources +++ testResources
|
||||
private def mapScalaModule(in: Iterable[File], id: String) = in.map(jar => ModuleID(ScalaArtifacts.Organization, id, buildScalaVersion) from(jar.toURI.toURL.toString))
|
||||
}
|
||||
abstract class BasicWebScalaProject extends BasicScalaProject with WebScalaProject with WebScalaPaths
|
||||
{ p =>
|
||||
import BasicWebScalaProject._
|
||||
override def watchPaths = super.watchPaths +++ webappResources
|
||||
|
||||
/** Override this to define paths that `prepare-webapp` and `package` should ignore.
|
||||
* They will not be pruned by prepare-webapp and will not be included in the war.*/
|
||||
def webappUnmanaged: PathFinder = Path.emptyPathFinder
|
||||
|
||||
lazy val prepareWebapp = prepareWebappAction
|
||||
protected def prepareWebappAction =
|
||||
prepareWebappTask(webappResources, temporaryWarPath, webappClasspath, mainDependencies.scalaJars, webappUnmanaged) dependsOn(compile, copyResources)
|
||||
|
||||
lazy val jettyInstance = new JettyRunner(jettyConfiguration)
|
||||
|
||||
def jettyConfiguration: JettyConfiguration =
|
||||
new DefaultJettyConfiguration
|
||||
{
|
||||
def classpath = jettyRunClasspath
|
||||
def jettyClasspath = p.jettyClasspath
|
||||
def war = jettyWebappPath
|
||||
def contextPath = jettyContextPath
|
||||
def classpathName = "test"
|
||||
def parentLoader = buildScalaInstance.loader
|
||||
def scanDirectories = p.scanDirectories.map(_.asFile)
|
||||
def scanInterval = p.scanInterval
|
||||
def port = jettyPort
|
||||
def log = p.log
|
||||
}
|
||||
/** This is the classpath used to determine what classes, resources, and jars to put in the war file.*/
|
||||
def webappClasspath = publicClasspath
|
||||
/** This is the classpath containing Jetty.*/
|
||||
def jettyClasspath = testClasspath --- jettyRunClasspath
|
||||
/** This is the classpath containing the web application.*/
|
||||
def jettyRunClasspath = publicClasspath
|
||||
def jettyWebappPath = temporaryWarPath
|
||||
lazy val jettyRun = jettyRunAction
|
||||
lazy val jetty = task { idle() } dependsOn(jettyRun) describedAs(JettyDescription)
|
||||
protected def jettyRunAction = jettyRunTask(jettyInstance) dependsOn(prepareWebapp) describedAs(JettyRunDescription)
|
||||
private def idle() =
|
||||
{
|
||||
log.info("Waiting... (press any key to interrupt)")
|
||||
def doWait()
|
||||
{
|
||||
try { Thread.sleep(1000) } catch { case _: InterruptedException => () }
|
||||
if(System.in.available() <= 0)
|
||||
doWait()
|
||||
}
|
||||
doWait()
|
||||
while (System.in.available() > 0) System.in.read()
|
||||
None
|
||||
}
|
||||
|
||||
/** The directories that should be watched to determine if the web application needs to be reloaded..*/
|
||||
def scanDirectories: Seq[Path] = jettyWebappPath :: Nil
|
||||
/** The time in seconds between scans that check whether the web application should be reloaded.*/
|
||||
def scanInterval: Int = JettyRunner.DefaultScanInterval
|
||||
/** The port that Jetty runs on. */
|
||||
def jettyPort: Int = JettyRunner.DefaultPort
|
||||
|
||||
lazy val jettyReload = task { jettyInstance.reload(); None } describedAs(JettyReloadDescription)
|
||||
lazy val jettyRestart = jettyStop && jettyRun
|
||||
lazy val jettyStop = jettyStopAction
|
||||
protected def jettyStopAction = jettyStopTask(jettyInstance) describedAs(JettyStopDescription)
|
||||
|
||||
/** The clean action for a web project is modified so that it first stops jetty if it is running,
|
||||
* since the webapp directory will be removed by the clean.*/
|
||||
override def cleanAction = super.cleanAction dependsOn jettyStop
|
||||
|
||||
/** Redefine the `package` action to make a war file.*/
|
||||
override protected def packageAction = packageWarAction(temporaryWarPath, webappUnmanaged, warPath, Nil) dependsOn(prepareWebapp) describedAs PackageWarDescription
|
||||
|
||||
/** Redefine the default main artifact to be a war file.*/
|
||||
override protected def defaultMainArtifact = Artifact(artifactID, "war", "war")
|
||||
}
|
||||
|
||||
object BasicScalaProject
|
||||
{
|
||||
val CleanDescription =
|
||||
"Deletes all generated files (the target directory)."
|
||||
val MainCompileDescription =
|
||||
"Compiles main sources."
|
||||
val TestCompileDescription =
|
||||
"Compiles test sources."
|
||||
val TestDescription =
|
||||
"Runs all tests detected during compilation."
|
||||
val TestOnlyDescription =
|
||||
"Runs the tests provided as arguments."
|
||||
val TestFailedDescription =
|
||||
"Runs the tests provided as arguments if they have not succeeded."
|
||||
val TestQuickDescription =
|
||||
"Runs the tests provided as arguments if they have not succeeded or their dependencies changed."
|
||||
val DocDescription =
|
||||
"Generates API documentation for main Scala source files using scaladoc."
|
||||
val TestDocDescription =
|
||||
"Generates API documentation for test Scala source files using scaladoc."
|
||||
val RunDescription =
|
||||
"Runs the main class for the project with the provided arguments."
|
||||
val TestRunDescription =
|
||||
"Runs a test class with a main method with the provided arguments."
|
||||
val ConsoleDescription =
|
||||
"Starts the Scala interpreter with the project classes on the classpath."
|
||||
val ConsoleQuickDescription =
|
||||
"Starts the Scala interpreter with the project classes on the classpath without running compile first."
|
||||
val PackageDescription =
|
||||
"Creates a jar file containing main classes and resources."
|
||||
val TestPackageDescription =
|
||||
"Creates a jar file containing test classes and resources."
|
||||
val DocPackageDescription =
|
||||
"Creates a jar file containing generated API documentation."
|
||||
val SourcePackageDescription =
|
||||
"Creates a jar file containing all main source files and resources."
|
||||
val TestSourcePackageDescription =
|
||||
"Creates a jar file containing all test source files and resources."
|
||||
val ProjectPackageDescription =
|
||||
"Creates a zip file containing the entire project, excluding generated files."
|
||||
val PackageAllDescription =
|
||||
"Executes all package tasks except package-project."
|
||||
val DocAllDescription =
|
||||
"Generates both main and test documentation."
|
||||
val IncrementVersionDescription =
|
||||
"Increments the micro part of the version (the third number) by one. (This is only valid for versions of the form #.#.#-*)"
|
||||
val ReleaseDescription =
|
||||
"Compiles, tests, generates documentation, packages, and increments the version."
|
||||
val CopyResourcesDescription =
|
||||
"Copies resources to the target directory where they can be included on classpaths."
|
||||
val CopyTestResourcesDescription =
|
||||
"Copies test resources to the target directory where they can be included on the test classpath."
|
||||
|
||||
private def warnMultipleMainClasses(log: Logger) =
|
||||
{
|
||||
log.warn("No Main-Class attribute will be added automatically added:")
|
||||
log.warn("Multiple classes with a main method were detected. Specify main class explicitly with:")
|
||||
log.warn(" override def mainClass = Some(\"className\")")
|
||||
}
|
||||
}
|
||||
object BasicWebScalaProject
|
||||
{
|
||||
val PackageWarDescription =
|
||||
"Creates a war file."
|
||||
val JettyStopDescription =
|
||||
"Stops the Jetty server that was started with the jetty-run action."
|
||||
val JettyRunDescription =
|
||||
"Starts the Jetty server and serves this project as a web application."
|
||||
val JettyDescription =
|
||||
"Starts the Jetty server and serves this project as a web application. Waits until interrupted, so it is suitable to call this batch-style."
|
||||
val JettyReloadDescription =
|
||||
"Forces a reload of a web application running in a Jetty server started by 'jetty-run'. Does nothing if Jetty is not running."
|
||||
}
|
||||
/** Analyzes the dependencies of a project after compilation. All methods except `snapshot` return a
|
||||
* `PathFinder`. The underlying calculations are repeated for each call to PathFinder.get. */
|
||||
final class LibraryDependencies(project: Project, conditional: CompileConditional) extends NotNull
|
||||
{
|
||||
/** Library jars located in unmanaged or managed dependency paths.*/
|
||||
def libraries: PathFinder = pathFinder(snapshot.libraries)
|
||||
/** Library jars located outside of the project.*/
|
||||
def external: PathFinder = pathFinder(snapshot.external)
|
||||
/** The Scala library jar.*/
|
||||
def scalaLibrary: PathFinder = pathFinder(snapshot.scalaLibrary)
|
||||
/** The Scala compiler jar.*/
|
||||
def scalaCompiler: PathFinder = pathFinder(snapshot.scalaCompiler)
|
||||
/** All jar dependencies.*/
|
||||
def all: PathFinder = pathFinder(snapshot.all)
|
||||
/** The Scala library and compiler jars.*/
|
||||
def scalaJars: PathFinder = pathFinder(snapshot.scalaJars)
|
||||
|
||||
/** Returns an object that has all analyzed dependency information frozen at the time of this method call. */
|
||||
def snapshot = new Dependencies
|
||||
|
||||
private def rootProjectDirectory = project.rootProject.info.projectPath
|
||||
|
||||
final class Dependencies
|
||||
{
|
||||
import LibraryDependencies._
|
||||
val all = conditional.analysis.allExternals.filter(ClasspathUtilities.isArchive).map(_.getAbsoluteFile)
|
||||
private[this] val (internal, externalAll) = all.toList.partition(jar => Path.relativize(rootProjectDirectory, jar).isDefined)
|
||||
private[this] val (bootScalaJars, librariesNoScala) = internal.partition(isScalaJar)
|
||||
private[this] val (externalScalaJars, externalNoScala) = externalAll.partition(isScalaJar)
|
||||
val scalaJars = externalScalaJars ::: bootScalaJars
|
||||
val (scalaLibrary, scalaCompiler) = scalaJars.partition(isScalaLibraryJar)
|
||||
def external = externalNoScala
|
||||
def libraries = librariesNoScala
|
||||
}
|
||||
|
||||
private def pathFinder(it: => Iterable[File]) = Path.lazyPathFinder(it.map(Path.fromFile))
|
||||
}
|
||||
private object LibraryDependencies
|
||||
{
|
||||
private def ScalaLibraryPrefix = ScalaArtifacts.LibraryID
|
||||
private def ScalaCompilerPrefix = ScalaArtifacts.CompilerID
|
||||
private def ScalaJarPrefixes = List(ScalaCompilerPrefix, ScalaLibraryPrefix)
|
||||
private def isScalaJar(file: File) = ClasspathUtilities.isArchive(file) && ScalaJarPrefixes.exists(isNamed(file))
|
||||
private def isScalaLibraryJar(file: File) = isNamed(file)(ScalaLibraryPrefix)
|
||||
private def isNamed(file: File)(name: String) = file.getName.startsWith(name)
|
||||
|
||||
}
|
||||
|
|
@ -0,0 +1,74 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2008, 2009 Mark Harrah
|
||||
*/
|
||||
package sbt
|
||||
|
||||
import java.io.{File, Writer}
|
||||
|
||||
object DotGraph
|
||||
{
|
||||
def sources(analysis: BasicCompileAnalysis, outputDirectory: Path, sourceRoots: Iterable[Path], log: Logger) =
|
||||
{
|
||||
val roots = sourceRoots.toList.map(_.asFile)
|
||||
val toString = (x: File) => sourceToString(roots, x)
|
||||
apply(analysis, outputDirectory, toString, toString, log)
|
||||
}
|
||||
def packages(analysis: BasicCompileAnalysis, outputDirectory: Path, sourceRoots: Iterable[Path], log: Logger) =
|
||||
{
|
||||
val roots = sourceRoots.toList.map(_.asFile)
|
||||
val packageOnly = (path: String) =>
|
||||
{
|
||||
val last = path.lastIndexOf(File.separatorChar)
|
||||
val packagePath = (if(last > 0) path.substring(0, last) else path).trim
|
||||
if(packagePath.isEmpty) "" else packagePath.replace(File.separatorChar, '.')
|
||||
}
|
||||
val toString = packageOnly compose ((x: File) => sourceToString(roots, x))
|
||||
apply(analysis, outputDirectory, toString, toString, log)
|
||||
}
|
||||
def apply(analysis: BasicCompileAnalysis, outputDirectory: Path, sourceToString: File => String, externalToString: File => String, log: Logger) =
|
||||
{
|
||||
val outputDir = outputDirectory.asFile
|
||||
|
||||
def generateGraph[Key, Value](fileName: String, graphName: String, graph: Iterable[(Key, scala.collection.Set[Value])],
|
||||
keyToString: Key => String, valueToString: Value => String) =
|
||||
{
|
||||
import scala.collection.mutable.{HashMap, HashSet}
|
||||
val mappedGraph = new HashMap[String, HashSet[String]]
|
||||
for( (key, values) <- graph; keyString = keyToString(key); value <- values)
|
||||
mappedGraph.getOrElseUpdate(keyString, new HashSet[String]) += valueToString(value)
|
||||
|
||||
FileUtilities.write(new File(outputDir, fileName), log) { (writer: Writer) =>
|
||||
|
||||
def writeLine(line: String) = FileUtilities.writeLine(writer, line)
|
||||
writeLine("digraph " + graphName + " {")
|
||||
for( (dependsOn, dependants) <- mappedGraph; dependant <- dependants)
|
||||
{
|
||||
if(dependant != dependsOn && !dependsOn.isEmpty && !dependant.isEmpty)
|
||||
writeLine("\"" + dependant + "\" -> \"" + dependsOn + "\"")
|
||||
}
|
||||
writeLine("}")
|
||||
None
|
||||
}
|
||||
}
|
||||
val srcToString = (p: Path) => sourceToString(p.asFile)
|
||||
FileUtilities.createDirectory(outputDir, log) orElse
|
||||
generateGraph(BasicAnalysis.DependenciesFileName, "dependencies", analysis.allDependencies,
|
||||
srcToString, srcToString) orElse
|
||||
generateGraph(BasicAnalysis.ExternalDependenciesFileName, "externalDependencies", analysis.allExternalDependencies,
|
||||
externalToString, srcToString)
|
||||
}
|
||||
def sourceToString(roots: List[File], source: File) =
|
||||
{
|
||||
val rawName = relativized(roots, source).trim
|
||||
if(rawName.endsWith(".scala"))
|
||||
rawName.substring(0, rawName.length - ".scala".length)
|
||||
else
|
||||
rawName
|
||||
}
|
||||
private def relativized(roots: List[File], path: File): String =
|
||||
{
|
||||
val relativized = roots.flatMap(root => Path.relativize(root, path))
|
||||
val shortest = (Int.MaxValue /: relativized)(_ min _.length)
|
||||
relativized.find(_.length == shortest).getOrElse(path.getName)
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,349 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2008 Mark Harrah, David MacIver
|
||||
*/
|
||||
package sbt
|
||||
|
||||
import impl.PropertiesUtilities
|
||||
import scala.reflect.Manifest
|
||||
|
||||
trait Environment
|
||||
{
|
||||
abstract class Property[T] extends NotNull
|
||||
{
|
||||
/** Explicitly sets the value of this property to 'v'.*/
|
||||
def update(v: T): Unit
|
||||
/** Returns the current value of this property or throws an exception if the value could not be obtained.*/
|
||||
def value: T = resolve.value
|
||||
/** Returns the current value of this property in an 'Option'. 'None' is used to indicate that the
|
||||
* value could not obtained.*/
|
||||
def get: Option[T] = resolve.toOption
|
||||
/** Returns full information about this property's current value. */
|
||||
def resolve: PropertyResolution[T]
|
||||
|
||||
def foreach(f: T => Unit): Unit = resolve.foreach(f)
|
||||
}
|
||||
|
||||
/** Creates a system property with the given name and no default value.*/
|
||||
def system[T](propName: String)(implicit format: Format[T]): Property[T]
|
||||
/** Creates a system property with the given name and the given default value to use if no value is explicitly specified.*/
|
||||
def systemOptional[T](propName: String, defaultValue: => T)(implicit format: Format[T]): Property[T]
|
||||
/** Creates a user-defined property that has no default value. The property will try to inherit its value
|
||||
* from a parent environment (if one exists) if its value is not explicitly specified. An explicitly specified
|
||||
* value will persist between builds if the object returned by this method is assigned to a 'val' in this
|
||||
* 'Environment'.*/
|
||||
def property[T](implicit manifest: Manifest[T], format: Format[T]): Property[T]
|
||||
/** Creates a user-defined property that has no default value. The property will try to inherit its value
|
||||
* from a parent environment (if one exists) if its value is not explicitly specified. An explicitly specified
|
||||
* value will persist between builds if the object returned by this method is assigned to a 'val' in this
|
||||
* 'Environment'. The given 'format' is used to convert an instance of 'T' to and from the 'String' representation
|
||||
* used for persistence.*/
|
||||
def propertyF[T](format: Format[T])(implicit manifest: Manifest[T]): Property[T] = property(manifest, format)
|
||||
/** Creates a user-defined property with no default value and no value inheritance from a parent environment.
|
||||
* Its value will persist between builds if the returned object is assigned to a 'val' in this 'Environment'.*/
|
||||
def propertyLocal[T](implicit manifest: Manifest[T], format: Format[T]): Property[T]
|
||||
/** Creates a user-defined property with no default value and no value inheritance from a parent environment.
|
||||
* The property's value will persist between builds if the object returned by this method is assigned to a
|
||||
* 'val' in this 'Environment'. The given 'format' is used to convert an instance of 'T' to and from the
|
||||
* 'String' representation used for persistence.*/
|
||||
def propertyLocalF[T](format: Format[T])(implicit manifest: Manifest[T]): Property[T] = propertyLocal(manifest, format)
|
||||
/** Creates a user-defined property that uses the given default value if no value is explicitly specified for this property. The property's value will persist between builds
|
||||
* if the object returned by this method is assigned to a 'val' in this 'Environment'.*/
|
||||
def propertyOptional[T](defaultValue: => T)(implicit manifest: Manifest[T], format: Format[T]): Property[T]
|
||||
/** Creates a user-defined property with no value inheritance from a parent environment but with the given default
|
||||
* value if no value is explicitly specified for this property. The property's value will persist between builds
|
||||
* if the object returned by this method is assigned to a 'val' in this 'Environment'. The given 'format' is used
|
||||
* to convert an instance of 'T' to and from the 'String' representation used for persistence.*/
|
||||
def propertyOptionalF[T](defaultValue: => T, format: Format[T])(implicit manifest: Manifest[T]): Property[T] =
|
||||
propertyOptional(defaultValue)(manifest, format)
|
||||
}
|
||||
|
||||
import scala.collection.Map
|
||||
trait BasicEnvironment extends Environment
|
||||
{
|
||||
protected def log: Logger
|
||||
/** The location of the properties file that backs the user-defined properties. */
|
||||
def envBackingPath: Path
|
||||
/** The environment from which user-defined properties inherit (if enabled). */
|
||||
protected def parentEnvironment: Option[BasicEnvironment] = None
|
||||
/** The identifier used in messages to refer to this environment. */
|
||||
def environmentLabel = envBackingPath.absolutePath
|
||||
|
||||
private[this] var isModified = false
|
||||
private[sbt] def setEnvironmentModified(modified: Boolean) { synchronized { isModified = modified } }
|
||||
private[this] def isEnvironmentModified = synchronized { isModified }
|
||||
|
||||
|
||||
implicit val IntFormat: Format[Int] = new SimpleFormat[Int] { def fromString(s: String) = java.lang.Integer.parseInt(s) }
|
||||
implicit val LongFormat: Format[Long] = new SimpleFormat[Long] { def fromString(s: String) = java.lang.Long.parseLong(s) }
|
||||
implicit val DoubleFormat: Format[Double] = new SimpleFormat[Double] { def fromString(s: String) = java.lang.Double.parseDouble(s) }
|
||||
implicit val BooleanFormat: Format[Boolean] = new SimpleFormat[Boolean] { def fromString(s: String) = java.lang.Boolean.valueOf(s).booleanValue }
|
||||
implicit val StringFormat: Format[String] = Format.string
|
||||
val NonEmptyStringFormat: Format[String] = new SimpleFormat[String]
|
||||
{
|
||||
def fromString(s: String) =
|
||||
{
|
||||
val trimmed = s.trim
|
||||
if(trimmed.isEmpty)
|
||||
error("The empty string is not allowed.")
|
||||
trimmed
|
||||
}
|
||||
}
|
||||
implicit val VersionFormat: Format[Version] =
|
||||
new SimpleFormat[Version]
|
||||
{
|
||||
def fromString(s: String) = Version.fromString(s).fold(msg => error(msg), x => x)
|
||||
}
|
||||
implicit val FileFormat = Format.file
|
||||
|
||||
|
||||
/** Implementation of 'Property' for user-defined properties. */
|
||||
private[sbt] class UserProperty[T](lazyDefaultValue: => Option[T], format: Format[T], inheritEnabled: Boolean,
|
||||
inheritFirst: Boolean, private[BasicEnvironment] val manifest: Manifest[T]) extends Property[T]
|
||||
{
|
||||
/** The name of this property is used for persistence in the properties file and as an identifier in messages.*/
|
||||
lazy val name = propertyMap.find( p => p._2 eq this ).map(_._1)
|
||||
/** Gets the name of this property or an alternative if the name is not available.*/
|
||||
private def nameString = name.getOrElse("<unnamed>")
|
||||
/** The lazily evaluated default value for this property.*/
|
||||
private lazy val defaultValue = lazyDefaultValue
|
||||
/** The explicitly set value for this property.*/
|
||||
private[BasicEnvironment] var explicitValue =
|
||||
{
|
||||
def initialValue = for(n <- name; stringValue <- initialValues.get(n)) yield format.fromString(stringValue)
|
||||
new LazyVar[Option[T]](initialValue) // ensure propertyMap is initialized before a read occurs
|
||||
}
|
||||
def update(v: T): Unit = synchronized { explicitValue() = Some(v); setEnvironmentModified(true) }
|
||||
def resolve: PropertyResolution[T] =
|
||||
synchronized
|
||||
{
|
||||
if(inheritFirst) resolveInheritFirst
|
||||
else resolveDefaultFirst
|
||||
}
|
||||
private def resolveInheritFirst =
|
||||
explicitValue() match
|
||||
{
|
||||
case Some(v) => DefinedValue(v, false, false)
|
||||
case None =>
|
||||
val inherited = inheritedValue
|
||||
// note that the following means the default value will not be used if an exception occurs inheriting
|
||||
inherited orElse getDefault(inherited)
|
||||
}
|
||||
private def resolveDefaultFirst =
|
||||
explicitValue() match
|
||||
{
|
||||
case Some(v) => DefinedValue(v, false, false)
|
||||
case None => getDefault(inheritedValue)
|
||||
}
|
||||
private def getDefault(orElse: => PropertyResolution[T]): PropertyResolution[T] =
|
||||
try
|
||||
{
|
||||
defaultValue match
|
||||
{
|
||||
case Some(v) => DefinedValue(v, false, true)
|
||||
case None => orElse
|
||||
}
|
||||
} catch { case e: Exception =>
|
||||
ResolutionException("Error while evaluating default value for property", Some(e))
|
||||
}
|
||||
|
||||
private def inheritedValue: PropertyResolution[T] =
|
||||
{
|
||||
val propOption = if(inheritEnabled) parentProperty else None
|
||||
propOption match
|
||||
{
|
||||
case Some(prop) => tryToInherit(prop)
|
||||
case None => UndefinedValue(nameString, environmentLabel)
|
||||
}
|
||||
}
|
||||
private def parentProperty = for(parent <- parentEnvironment; n <- name; prop <- parent.propertyMap.get(n)) yield prop
|
||||
|
||||
private def tryToInherit[R](prop: BasicEnvironment#UserProperty[R]): PropertyResolution[T] =
|
||||
{
|
||||
if(prop.manifest <:< manifest)
|
||||
markInherited(prop.resolve.asInstanceOf[PropertyResolution[T]])
|
||||
else
|
||||
ResolutionException("Could not inherit property '" + nameString + "' from '" + environmentLabel + "':\n" +
|
||||
"\t Property had type " + prop.manifest + ", expected type " + manifest, None)
|
||||
}
|
||||
private def markInherited(result: PropertyResolution[T]) =
|
||||
result match
|
||||
{
|
||||
case DefinedValue(v, isInherited, isDefault) => DefinedValue(v, true, isDefault)
|
||||
case x => x
|
||||
}
|
||||
|
||||
override def toString = nameString + "=" + resolve
|
||||
|
||||
/** Gets the explicitly set value converted to a 'String'.*/
|
||||
private[sbt] def getStringValue: Option[String] = explicitValue().map(format.toString)
|
||||
/** Explicitly sets the value for this property by converting the given string value.*/
|
||||
private[sbt] def setStringValue(s: String) { update(format.fromString(s)) }
|
||||
}
|
||||
/** Implementation of 'Property' for system properties (i.e. System.getProperty/setProperty) */
|
||||
private class SystemProperty[T](val name: String, lazyDefaultValue: => Option[T], val format: Format[T]) extends Property[T]
|
||||
{
|
||||
def resolve =
|
||||
{
|
||||
val rawValue = System.getProperty(name)
|
||||
if(rawValue == null)
|
||||
notFound
|
||||
else
|
||||
{
|
||||
Control.convertException(format.fromString(rawValue)) match
|
||||
{
|
||||
case Left(e) => ResolutionException("Error parsing system property '" + name + "': " + e.toString, Some(e))
|
||||
case Right(x) => DefinedValue(x, false, false)
|
||||
}
|
||||
}
|
||||
}
|
||||
/** Handles resolution when the property has no explicit value. If there is a default value, that is returned,
|
||||
* otherwise, UndefinedValue is returned.*/
|
||||
private def notFound =
|
||||
{
|
||||
defaultValue match
|
||||
{
|
||||
case Some(dv) =>
|
||||
{
|
||||
log.debug("System property '" + name + "' does not exist, using provided default.")
|
||||
DefinedValue(dv, false, true)
|
||||
}
|
||||
case None => UndefinedValue(name, environmentLabel)
|
||||
}
|
||||
}
|
||||
protected lazy val defaultValue = lazyDefaultValue
|
||||
def update(t: T)
|
||||
{
|
||||
for(e <- Control.convertException(System.setProperty(name, format.toString(t))).left)
|
||||
{
|
||||
log.trace(e)
|
||||
log.warn("Error setting system property '" + name + "': " + e.toString)
|
||||
}
|
||||
}
|
||||
override def toString = name + "=" + resolve
|
||||
}
|
||||
|
||||
def system[T](propertyName: String)(implicit format: Format[T]): Property[T] =
|
||||
new SystemProperty[T](propertyName, None, format)
|
||||
def systemOptional[T](propertyName: String, defaultValue: => T)(implicit format: Format[T]): Property[T] =
|
||||
new SystemProperty[T](propertyName, Some(defaultValue), format)
|
||||
|
||||
def property[T](implicit manifest: Manifest[T], format: Format[T]): Property[T] =
|
||||
new UserProperty[T](None, format, true, false, manifest)
|
||||
def propertyLocal[T](implicit manifest: Manifest[T], format: Format[T]): Property[T] =
|
||||
new UserProperty[T](None, format, false, false, manifest)
|
||||
def propertyOptional[T](defaultValue: => T)(implicit manifest: Manifest[T], format: Format[T]): Property[T] =
|
||||
propertyOptional(defaultValue, false)(manifest, format)
|
||||
def propertyOptional[T](defaultValue: => T, inheritFirst: Boolean)(implicit manifest: Manifest[T], format: Format[T]): Property[T] =
|
||||
new UserProperty[T](Some(defaultValue), format, true, inheritFirst, manifest)
|
||||
|
||||
private type AnyUserProperty = UserProperty[_]
|
||||
/** Maps property name to property. The map is constructed by reflecting vals defined on this object,
|
||||
* so it should not be referenced during initialization or else subclass properties will be missed.**/
|
||||
private lazy val propertyMap: Map[String, AnyUserProperty] =
|
||||
{
|
||||
log.debug("Discovering properties")
|
||||
val propertyMap = new scala.collection.mutable.HashMap[String, AnyUserProperty]
|
||||
// AnyProperty is required because the return type of the property*[T] methods is Property[T]
|
||||
// and so the vals we are looking for have type Property[T] and not UserProperty[T]
|
||||
// We then only keep instances of UserProperty
|
||||
val vals = Environment.reflectiveMappings(this, classOf[Property[_]])
|
||||
for( (name, property: AnyUserProperty) <- vals)
|
||||
propertyMap(name) = property
|
||||
propertyMap //.readOnly (not currently in 2.8)
|
||||
}
|
||||
private val initialValues: Map[String, String] =
|
||||
{
|
||||
val map = new scala.collection.mutable.HashMap[String, String]
|
||||
for(errorMsg <- impl.MapUtilities.read(map, envBackingPath, log))
|
||||
log.error("Error loading properties from " + environmentLabel + " : " + errorMsg)
|
||||
map //.readOnly (not currently in 2.8)
|
||||
}
|
||||
|
||||
def propertyNames: Iterable[String] = propertyMap.keys.toList
|
||||
def getPropertyNamed(name: String): Option[UserProperty[_]] = propertyMap.get(name)
|
||||
def propertyNamed(name: String): UserProperty[_] = propertyMap(name)
|
||||
def saveEnvironment(): Option[String] =
|
||||
{
|
||||
if(isEnvironmentModified)
|
||||
{
|
||||
val properties = new java.util.Properties
|
||||
for( (name, variable) <- propertyMap; stringValue <- variable.getStringValue)
|
||||
properties.setProperty(name, stringValue)
|
||||
val result = PropertiesUtilities.write(properties, "Project properties", envBackingPath, log)
|
||||
setEnvironmentModified(false)
|
||||
result
|
||||
}
|
||||
else
|
||||
None
|
||||
}
|
||||
private[sbt] def uninitializedProperties: Iterable[(String, Property[_])] = propertyMap.filter(_._2.get.isEmpty)
|
||||
}
|
||||
private object Environment
|
||||
{
|
||||
def reflectiveMappings[T](obj: AnyRef, clazz: Class[T]): Map[String, T] =
|
||||
{
|
||||
val mappings = new scala.collection.mutable.OpenHashMap[String, T]
|
||||
for ((name, value) <- ReflectUtilities.allValsC(obj, clazz))
|
||||
mappings(ReflectUtilities.transformCamelCase(name, '.')) = value
|
||||
mappings
|
||||
}
|
||||
}
|
||||
|
||||
sealed trait PropertyResolution[+T] extends NotNull
|
||||
{
|
||||
def value: T
|
||||
def orElse[R >: T](r: => PropertyResolution[R]): PropertyResolution[R]
|
||||
def toOption: Option[T]
|
||||
def foreach(f: T => Unit): Unit
|
||||
def map[R](f: T => R): PropertyResolution[R]
|
||||
def flatMap[R](f: T => PropertyResolution[R]): PropertyResolution[R]
|
||||
}
|
||||
sealed trait NoPropertyValue extends PropertyResolution[Nothing]
|
||||
{ self: RuntimeException with PropertyResolution[Nothing] =>
|
||||
|
||||
def value = throw this
|
||||
def toOption = None
|
||||
def map[R](f: Nothing => R): PropertyResolution[R] = this
|
||||
def flatMap[R](f: Nothing => PropertyResolution[R]): PropertyResolution[R] = this
|
||||
def foreach(f: Nothing => Unit) {}
|
||||
}
|
||||
final case class ResolutionException(message: String, exception: Option[Throwable])
|
||||
extends RuntimeException(message, exception.getOrElse(null)) with NoPropertyValue
|
||||
{
|
||||
def orElse[R](r: => PropertyResolution[R]) = this
|
||||
}
|
||||
final case class UndefinedValue(name: String, environmentLabel: String)
|
||||
extends RuntimeException("Value for property '" + name + "' from " + environmentLabel + " is undefined.") with NoPropertyValue
|
||||
{
|
||||
def orElse[R](r: => PropertyResolution[R]) =
|
||||
r match
|
||||
{
|
||||
case u: UndefinedValue => this
|
||||
case _ => r
|
||||
}
|
||||
}
|
||||
final case class DefinedValue[T](value: T, isInherited: Boolean, isDefault: Boolean) extends PropertyResolution[T]
|
||||
{
|
||||
def toOption = Some(value)
|
||||
def orElse[R >: T](r: => PropertyResolution[R]) = this
|
||||
def map[R](f: T => R) = DefinedValue[R](f(value), isInherited, isDefault)
|
||||
def flatMap[R](f: T => PropertyResolution[R]) = f(value)
|
||||
def foreach(f: T => Unit) { f(value) }
|
||||
}
|
||||
private final class LazyVar[T](initialValue: => T) extends NotNull
|
||||
{
|
||||
private[this] var value: Option[T] = None
|
||||
def apply() =
|
||||
synchronized
|
||||
{
|
||||
value match
|
||||
{
|
||||
case Some(v) => v
|
||||
case None =>
|
||||
val newValue = initialValue
|
||||
value = Some(newValue)
|
||||
newValue
|
||||
}
|
||||
}
|
||||
def update(newValue: T) = synchronized { value = Some(newValue) }
|
||||
}
|
||||
|
|
@ -0,0 +1,43 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2009 Mark Harrah
|
||||
*/
|
||||
package sbt
|
||||
|
||||
/** Defines a function to call as sbt exits.*/
|
||||
trait ExitHook extends NotNull
|
||||
{
|
||||
/** Provides a name for this hook to be used to provide feedback to the user. */
|
||||
def name: String
|
||||
/** Subclasses should implement this method, which is called when this hook is executed. */
|
||||
def runBeforeExiting(): Unit
|
||||
}
|
||||
|
||||
object ExitHooks
|
||||
{
|
||||
/** This is a list of hooks to call when sbt is finished executing.*/
|
||||
private val exitHooks = new scala.collection.mutable.HashSet[ExitHook]
|
||||
/** Adds a hook to call before sbt exits. */
|
||||
private[sbt] def register(hook: ExitHook) { exitHooks += hook }
|
||||
/** Removes a hook. */
|
||||
private[sbt] def unregister(hook: ExitHook) { exitHooks -= hook }
|
||||
/** Calls each registered exit hook, trapping any exceptions so that each hook is given a chance to run. */
|
||||
private[sbt] def runExitHooks(log: Logger)
|
||||
{
|
||||
for(hook <- exitHooks.toList)
|
||||
{
|
||||
try
|
||||
{
|
||||
log.debug("Running exit hook '" + hook.name + "'...")
|
||||
hook.runBeforeExiting()
|
||||
}
|
||||
catch
|
||||
{
|
||||
case e =>
|
||||
{
|
||||
log.trace(e);
|
||||
log.error("Error running exit hook '" + hook.name + "': " + e.toString)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,108 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2009 Mark Harrah
|
||||
*/
|
||||
package sbt
|
||||
|
||||
import scala.collection.{mutable, Map, Set}
|
||||
|
||||
sealed trait ProductsSources extends NotNull
|
||||
{
|
||||
def products: Iterable[Path]
|
||||
def sources: Iterable[Path]
|
||||
}
|
||||
sealed trait ProductsWrapper extends NotNull
|
||||
{
|
||||
def from(sources: => Iterable[Path]): ProductsSources = from(Path.lazyPathFinder(sources))
|
||||
def from(sources: PathFinder): ProductsSources
|
||||
}
|
||||
/** Provides methods to define tasks with basic conditional execution based on the sources
|
||||
* and products of the task. */
|
||||
trait FileTasks extends Project
|
||||
{
|
||||
implicit def wrapProduct(product: => Path): ProductsWrapper = FileTasks.wrapProduct(product)
|
||||
implicit def wrapProducts(productsList: => Iterable[Path]): ProductsWrapper = FileTasks.wrapProducts(productsList)
|
||||
/** Runs 'action' if the given products are out of date with respect to the given sources. */
|
||||
def fileTask(label: String, files: ProductsSources)(action: => Option[String]): Task =
|
||||
task { FileTasks.runOption(label, files, log)(action) }
|
||||
/** Runs 'action' if any of the given products do not exist. */
|
||||
def fileTask(label: String, products: => Iterable[Path])(action: => Option[String]): Task =
|
||||
task { FileTasks.existenceCheck[Option[String]](label, products, log)(action)(None) }
|
||||
|
||||
/** Creates a new task that performs 'action' only when the given products are out of date with respect to the given sources.. */
|
||||
def fileTask(files: ProductsSources)(action: => Option[String]): Task = fileTask("", files)(action)
|
||||
/** Creates a new task that performs 'action' only when at least one of the given products does not exist.. */
|
||||
def fileTask(products: => Iterable[Path])(action: => Option[String]): Task = fileTask("", products)(action)
|
||||
|
||||
}
|
||||
object FileTasks
|
||||
{
|
||||
implicit def wrapProduct(product: => Path): ProductsWrapper = wrapProducts(product :: Nil)
|
||||
implicit def wrapProducts(productsList: => Iterable[Path]): ProductsWrapper =
|
||||
new ProductsWrapper
|
||||
{
|
||||
def from(sourceFinder: PathFinder) =
|
||||
new ProductsSources
|
||||
{
|
||||
def products = productsList
|
||||
def sources = sourceFinder.get
|
||||
}
|
||||
}
|
||||
/** Runs 'ifOutofdate' if the given products are out of date with respect to the given sources.*/
|
||||
def runOption(label: String, files: ProductsSources, log: Logger)(ifOutofdate: => Option[String]): Option[String] =
|
||||
{
|
||||
val result = apply[Option[String]](label, files, log)(ifOutofdate)(None)
|
||||
if(result.isDefined)
|
||||
FileUtilities.clean(files.products, true, log)
|
||||
result
|
||||
}
|
||||
/** Returns 'ifOutofdate' if the given products are out of date with respect to the given sources. Otherwise, returns ifUptodate. */
|
||||
def apply[T](label: String, files: ProductsSources, log: Logger)(ifOutofdate: => T)(ifUptodate: => T): T =
|
||||
{
|
||||
val products = files.products
|
||||
existenceCheck[T](label, products, log)(ifOutofdate)
|
||||
{
|
||||
val sources = files.sources
|
||||
if(sources.isEmpty)
|
||||
{
|
||||
log.debug("Running " + label + " task because no sources exist.")
|
||||
ifOutofdate
|
||||
}
|
||||
else
|
||||
{
|
||||
val oldestProductModifiedTime = mapLastModified(products).reduceLeft(_ min _)
|
||||
val newestSourceModifiedTime = mapLastModified(sources).reduceLeft(_ max _)
|
||||
if(oldestProductModifiedTime < newestSourceModifiedTime)
|
||||
{
|
||||
if(log.atLevel(Level.Debug))
|
||||
{
|
||||
log.debug("Running " + label + " task because the following sources are newer than at least one product: ")
|
||||
logDebugIndented(sources.filter(_.lastModified > oldestProductModifiedTime), log)
|
||||
log.debug(" The following products are older than at least one source: ")
|
||||
logDebugIndented(products.filter(_.lastModified < newestSourceModifiedTime), log)
|
||||
}
|
||||
ifOutofdate
|
||||
}
|
||||
else
|
||||
ifUptodate
|
||||
}
|
||||
}
|
||||
}
|
||||
/** Checks that all 'products' exist. If they do, 'ifAllExists' is returned, otherwise 'products' is returned.*/
|
||||
private def existenceCheck[T](label: String, products: Iterable[Path], log: Logger)(action: => T)(ifAllExist: => T) =
|
||||
{
|
||||
val nonexisting = products.filter(!_.exists)
|
||||
if(nonexisting.isEmpty)
|
||||
ifAllExist
|
||||
else
|
||||
{
|
||||
if(log.atLevel(Level.Debug))
|
||||
{
|
||||
log.debug("Running " + label + " task because at least one product does not exist:")
|
||||
logDebugIndented(nonexisting, log)
|
||||
}
|
||||
action
|
||||
}
|
||||
}
|
||||
private def logDebugIndented[T](it: Iterable[T], log: Logger) { it.foreach(x => log.debug("\t" + x)) }
|
||||
private def mapLastModified(paths: Iterable[Path]): Iterable[Long] = paths.map(_.lastModified)
|
||||
}
|
||||
|
|
@ -0,0 +1,902 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2008, 2009 Mark Harrah, Nathan Hamblen
|
||||
*/
|
||||
package sbt
|
||||
|
||||
import java.io.{Closeable, File, FileInputStream, FileOutputStream, InputStream, OutputStream}
|
||||
import java.io.{ByteArrayOutputStream, InputStreamReader, OutputStreamWriter}
|
||||
import java.io.{BufferedReader, BufferedWriter, FileReader, FileWriter, Reader, Writer}
|
||||
import java.util.zip.{GZIPInputStream, GZIPOutputStream}
|
||||
import java.net.{URL, URISyntaxException}
|
||||
import java.nio.charset.{Charset, CharsetDecoder, CharsetEncoder}
|
||||
import java.nio.channels.FileChannel
|
||||
import java.util.jar.{Attributes, JarEntry, JarFile, JarInputStream, JarOutputStream, Manifest}
|
||||
import java.util.zip.{GZIPOutputStream, ZipEntry, ZipFile, ZipInputStream, ZipOutputStream}
|
||||
|
||||
import OpenResource._
|
||||
|
||||
final class Preserved private[sbt](toRestore: scala.collection.Map[File, Path], temp: File) extends NotNull
|
||||
{
|
||||
def restore(log: Logger) =
|
||||
{
|
||||
try
|
||||
{
|
||||
Control.lazyFold(toRestore.toList) { case (src, dest) =>
|
||||
FileUtilities.copyFile(src, dest.asFile, log)
|
||||
}
|
||||
}
|
||||
finally { FileUtilities.clean(Path.fromFile(temp) :: Nil, true, log) }
|
||||
}
|
||||
}
|
||||
|
||||
/** A collection of file related methods. */
|
||||
object FileUtilities
|
||||
{
|
||||
import wrap.Wrappers.readOnly
|
||||
/** The size of the byte or char buffer used in various methods.*/
|
||||
private val BufferSize = 8192
|
||||
val Newline = System.getProperty("line.separator")
|
||||
/** A pattern used to split a String by path separator characters.*/
|
||||
private val PathSeparatorPattern = java.util.regex.Pattern.compile(File.pathSeparator)
|
||||
|
||||
/** Splits a String around path separator characters. */
|
||||
private[sbt] def pathSplit(s: String) = PathSeparatorPattern.split(s)
|
||||
|
||||
def preserve(paths: Iterable[Path], log: Logger): Either[String, Preserved] =
|
||||
{
|
||||
for(tmp <- createTemporaryDirectory(log).right) yield
|
||||
{
|
||||
val pathMap = new scala.collection.mutable.HashMap[File, Path]
|
||||
val destinationDirectory = Path.fromFile(tmp)
|
||||
for(source <- paths)
|
||||
{
|
||||
val toPath = Path.fromString(destinationDirectory, source.relativePath)
|
||||
copyFile(source, toPath, log)
|
||||
pathMap(toPath.asFile) = source
|
||||
}
|
||||
new Preserved(readOnly(pathMap), tmp)
|
||||
}
|
||||
}
|
||||
|
||||
/** Gzips the file 'in' and writes it to 'out'. 'in' cannot be the same file as 'out'. */
|
||||
def gzip(in: Path, out: Path, log: Logger): Option[String] =
|
||||
{
|
||||
require(in != out, "Input file cannot be the same as the output file.")
|
||||
readStream(in.asFile, log) { inputStream =>
|
||||
writeStream(out.asFile, log) { outputStream =>
|
||||
gzip(inputStream, outputStream, log)
|
||||
}
|
||||
}
|
||||
}
|
||||
/** Gzips the InputStream 'in' and writes it to 'output'. Neither stream is closed.*/
|
||||
def gzip(input: InputStream, output: OutputStream, log: Logger): Option[String] =
|
||||
gzipOutputStream.ioOption(output, "gzipping", log) { gzStream => transfer(input, gzStream, log) }
|
||||
|
||||
def gunzip(input: InputStream, output: OutputStream, log: Logger): Option[String] =
|
||||
gzipInputStream.ioOption(input, "gunzipping", log) { gzStream => transfer(gzStream, output, log) }
|
||||
/** Gunzips the file 'in' and writes it to 'out'. 'in' cannot be the same file as 'out'. */
|
||||
def gunzip(in: Path, out: Path, log: Logger): Option[String] =
|
||||
{
|
||||
require(in != out, "Input file cannot be the same as the output file.")
|
||||
readStream(in.asFile, log) { inputStream =>
|
||||
writeStream(out.asFile, log) { outputStream =>
|
||||
gunzip(inputStream, outputStream, log)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/** Creates a jar file.
|
||||
* @param sources The files to include in the jar file. The path used for the jar is
|
||||
* relative to the base directory for the source. That is, the path in the jar for source
|
||||
* <code>(basePath ##) / x / y</code> is <code>x / y</code>.
|
||||
* @param outputJar The file to write the jar to.
|
||||
* @param manifest The manifest for the jar.
|
||||
* @param recursive If true, any directories in <code>sources</code> are recursively processed. Otherwise,
|
||||
* they are not
|
||||
* @param log The Logger to use. */
|
||||
def jar(sources: Iterable[Path], outputJar: Path, manifest: Manifest, recursive: Boolean, log: Logger) =
|
||||
archive(sources, outputJar, Some(manifest), recursive, log)
|
||||
@deprecated def pack(sources: Iterable[Path], outputJar: Path, manifest: Manifest, recursive: Boolean, log: Logger) =
|
||||
jar(sources, outputJar, manifest, recursive, log)
|
||||
/** Creates a zip file.
|
||||
* @param sources The files to include in the jar file. The path used for the jar is
|
||||
* relative to the base directory for the source. That is, the path in the jar for source
|
||||
* <code>(basePath ##) / x / y</code> is <code>x / y</code>.
|
||||
* @param outputZip The file to write the zip to.
|
||||
* @param recursive If true, any directories in <code>sources</code> are recursively processed. Otherwise,
|
||||
* they are not
|
||||
* @param log The Logger to use. */
|
||||
def zip(sources: Iterable[Path], outputZip: Path, recursive: Boolean, log: Logger) =
|
||||
archive(sources, outputZip, None, recursive, log)
|
||||
|
||||
private def archive(sources: Iterable[Path], outputPath: Path, manifest: Option[Manifest], recursive: Boolean, log: Logger) =
|
||||
{
|
||||
log.info("Packaging " + outputPath + " ...")
|
||||
val outputFile = outputPath.asFile
|
||||
if(outputFile.isDirectory)
|
||||
Some("Specified output file " + outputFile + " is a directory.")
|
||||
else
|
||||
{
|
||||
val outputDir = outputFile.getParentFile
|
||||
val result = createDirectory(outputDir, log) orElse
|
||||
withZipOutput(outputFile, manifest, log)
|
||||
{ output =>
|
||||
val createEntry: (String => ZipEntry) = if(manifest.isDefined) new JarEntry(_) else new ZipEntry(_)
|
||||
writeZip(sources, output, recursive, log)(createEntry)
|
||||
}
|
||||
if(result.isEmpty)
|
||||
log.info("Packaging complete.")
|
||||
result
|
||||
}
|
||||
}
|
||||
|
||||
private def writeZip(sources: Iterable[Path], output: ZipOutputStream, recursive: Boolean, log: Logger)(createEntry: String => ZipEntry) =
|
||||
{
|
||||
def add(source: Path)
|
||||
{
|
||||
val sourceFile = source.asFile
|
||||
if(sourceFile.isDirectory)
|
||||
{
|
||||
if(recursive)
|
||||
wrapNull(sourceFile.listFiles).foreach(file => add(source / file.getName))
|
||||
}
|
||||
else if(sourceFile.exists)
|
||||
{
|
||||
val relativePath = source.relativePathString("/")
|
||||
log.debug("\tAdding " + source + " as " + relativePath + " ...")
|
||||
val nextEntry = createEntry(relativePath)
|
||||
nextEntry.setTime(sourceFile.lastModified)
|
||||
output.putNextEntry(nextEntry)
|
||||
transferAndClose(new FileInputStream(sourceFile), output, log)
|
||||
output.closeEntry()
|
||||
}
|
||||
else
|
||||
log.warn("\tSource " + source + " does not exist.")
|
||||
}
|
||||
sources.foreach(add)
|
||||
None
|
||||
}
|
||||
|
||||
private def withZipOutput(file: File, manifest: Option[Manifest], log: Logger)(f: ZipOutputStream => Option[String]): Option[String] =
|
||||
{
|
||||
writeStream(file, log)
|
||||
{
|
||||
fileOut =>
|
||||
{
|
||||
val (zipOut, ext) =
|
||||
manifest match
|
||||
{
|
||||
case Some(mf) =>
|
||||
{
|
||||
import Attributes.Name.MANIFEST_VERSION
|
||||
val main = mf.getMainAttributes
|
||||
if(!main.containsKey(MANIFEST_VERSION))
|
||||
main.put(MANIFEST_VERSION, "1.0")
|
||||
(new JarOutputStream(fileOut, mf), "jar")
|
||||
}
|
||||
case None => (new ZipOutputStream(fileOut), "zip")
|
||||
}
|
||||
Control.trapUnitAndFinally("Error writing " + ext + ": ", log)
|
||||
{ f(zipOut) } { zipOut.close }
|
||||
}
|
||||
}
|
||||
}
|
||||
import scala.collection.Set
|
||||
/** Unzips the contents of the zip file <code>from</code> to the <code>toDirectory</code> directory.*/
|
||||
def unzip(from: Path, toDirectory: Path, log: Logger): Either[String, Set[Path]] =
|
||||
unzip(from, toDirectory, AllPassFilter, log)
|
||||
/** Unzips the contents of the zip file <code>from</code> to the <code>toDirectory</code> directory.*/
|
||||
def unzip(from: File, toDirectory: Path, log: Logger): Either[String, Set[Path]] =
|
||||
unzip(from, toDirectory, AllPassFilter, log)
|
||||
/** Unzips the contents of the zip file <code>from</code> to the <code>toDirectory</code> directory.*/
|
||||
def unzip(from: InputStream, toDirectory: Path, log: Logger): Either[String, Set[Path]] =
|
||||
unzip(from, toDirectory, AllPassFilter, log)
|
||||
/** Unzips the contents of the zip file <code>from</code> to the <code>toDirectory</code> directory.*/
|
||||
def unzip(from: URL, toDirectory: Path, log: Logger): Either[String, Set[Path]] =
|
||||
unzip(from, toDirectory, AllPassFilter, log)
|
||||
|
||||
/** Unzips the contents of the zip file <code>from</code> to the <code>toDirectory</code> directory.
|
||||
* Only the entries that match the given filter are extracted. */
|
||||
def unzip(from: Path, toDirectory: Path, filter: NameFilter, log: Logger): Either[String, Set[Path]] =
|
||||
unzip(from.asFile, toDirectory, filter, log)
|
||||
/** Unzips the contents of the zip file <code>from</code> to the <code>toDirectory</code> directory.
|
||||
* Only the entries that match the given filter are extracted. */
|
||||
def unzip(from: File, toDirectory: Path, filter: NameFilter, log: Logger): Either[String, Set[Path]] =
|
||||
readStreamValue(from, log)(in => unzip(in, toDirectory, filter, log))
|
||||
/** Unzips the contents of the zip file <code>from</code> to the <code>toDirectory</code> directory.
|
||||
* Only the entries that match the given filter are extracted. */
|
||||
def unzip(from: URL, toDirectory: Path, filter: NameFilter, log: Logger): Either[String, Set[Path]] =
|
||||
readStreamValue(from, log) { stream => unzip(stream, toDirectory, filter, log) }
|
||||
/** Unzips the contents of the zip file <code>from</code> to the <code>toDirectory</code> directory.
|
||||
* Only the entries that match the given filter are extracted. */
|
||||
def unzip(from: InputStream, toDirectory: Path, filter: NameFilter, log: Logger): Either[String, Set[Path]] =
|
||||
{
|
||||
createDirectory(toDirectory, log) match
|
||||
{
|
||||
case Some(err) => Left(err)
|
||||
case None => zipInputStream.io(from, "unzipping", log) { zipInput => extract(zipInput, toDirectory, filter, log) }
|
||||
}
|
||||
}
|
||||
private def extract(from: ZipInputStream, toDirectory: Path, filter: NameFilter, log: Logger) =
|
||||
{
|
||||
val set = new scala.collection.mutable.HashSet[Path]
|
||||
def next(): Option[String] =
|
||||
{
|
||||
val entry = from.getNextEntry
|
||||
if(entry == null)
|
||||
None
|
||||
else
|
||||
{
|
||||
val name = entry.getName
|
||||
val result =
|
||||
if(filter.accept(name))
|
||||
{
|
||||
val target = Path.fromString(toDirectory, name)
|
||||
log.debug("Extracting zip entry '" + name + "' to '" + target + "'")
|
||||
val result =
|
||||
if(entry.isDirectory)
|
||||
createDirectory(target, log)
|
||||
else
|
||||
{
|
||||
set += target
|
||||
writeStream(target.asFile, log) { out => FileUtilities.transfer(from, out, log) }
|
||||
}
|
||||
//target.asFile.setLastModified(entry.getTime)
|
||||
result
|
||||
}
|
||||
else
|
||||
{
|
||||
log.debug("Ignoring zip entry '" + name + "'")
|
||||
None
|
||||
}
|
||||
from.closeEntry()
|
||||
result match { case None => next(); case x => x }
|
||||
}
|
||||
}
|
||||
next().toLeft(readOnly(set))
|
||||
}
|
||||
|
||||
/** Copies all bytes from the given input stream to the given output stream.
|
||||
* Neither stream is closed.*/
|
||||
def transfer(in: InputStream, out: OutputStream, log: Logger): Option[String] =
|
||||
transferImpl(in, out, false, log)
|
||||
/** Copies all bytes from the given input stream to the given output stream. The
|
||||
* input stream is closed after the method completes.*/
|
||||
def transferAndClose(in: InputStream, out: OutputStream, log: Logger): Option[String] =
|
||||
transferImpl(in, out, true, log)
|
||||
private def transferImpl(in: InputStream, out: OutputStream, close: Boolean, log: Logger): Option[String] =
|
||||
{
|
||||
Control.trapUnitAndFinally("Error during transfer: ", log)
|
||||
{
|
||||
val buffer = new Array[Byte](BufferSize)
|
||||
def read: None.type =
|
||||
{
|
||||
val byteCount = in.read(buffer)
|
||||
if(byteCount >= 0)
|
||||
{
|
||||
out.write(buffer, 0, byteCount)
|
||||
read
|
||||
}
|
||||
else
|
||||
None
|
||||
}
|
||||
read
|
||||
}
|
||||
{ if(close) in.close }
|
||||
}
|
||||
|
||||
/** Creates a file at the given location.*/
|
||||
def touch(path: Path, log: Logger): Option[String] = touch(path.asFile, log)
|
||||
/** Creates a file at the given location.*/
|
||||
def touch(file: File, log: Logger): Option[String] =
|
||||
{
|
||||
Control.trapUnit("Could not create file " + file + ": ", log)
|
||||
{
|
||||
if(file.exists)
|
||||
{
|
||||
def updateFailBase = "Could not update last modified for file " + file
|
||||
Control.trapUnit(updateFailBase + ": ", log)
|
||||
{ if(file.setLastModified(System.currentTimeMillis)) None else Some(updateFailBase) }
|
||||
}
|
||||
else
|
||||
createDirectory(file.getParentFile, log) orElse { file.createNewFile(); None }
|
||||
}
|
||||
}
|
||||
/** Creates a directory at the given location.*/
|
||||
def createDirectory(dir: Path, log: Logger): Option[String] = createDirectory(dir.asFile, log)
|
||||
/** Creates a directory at the given location.*/
|
||||
def createDirectory(dir: File, log: Logger): Option[String] =
|
||||
{
|
||||
Control.trapUnit("Could not create directory " + dir + ": ", log)
|
||||
{
|
||||
if(dir.exists)
|
||||
{
|
||||
if(dir.isDirectory)
|
||||
None
|
||||
else
|
||||
Some(dir + " exists and is not a directory.")
|
||||
}
|
||||
else
|
||||
{
|
||||
dir.mkdirs()
|
||||
log.debug("Created directory " + dir)
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
/** Creates directories at the given locations.*/
|
||||
def createDirectories(d: Seq[Path], log: Logger): Option[String] = createDirectories(d.toList.map(_.asFile), log)
|
||||
/** Creates directories at the given locations.*/
|
||||
def createDirectories(d: List[File], log: Logger): Option[String] =
|
||||
d match
|
||||
{
|
||||
case Nil => None
|
||||
case head :: tail => createDirectory(head, log) orElse createDirectories(tail, log)
|
||||
}
|
||||
/** The maximum number of times a unique temporary filename is attempted to be created.*/
|
||||
private val MaximumTries = 10
|
||||
/** Creates a temporary directory and returns it.*/
|
||||
def createTemporaryDirectory(log: Logger): Either[String, File] =
|
||||
{
|
||||
def create(tries: Int): Either[String, File] =
|
||||
{
|
||||
if(tries > MaximumTries)
|
||||
Left("Could not create temporary directory.")
|
||||
else
|
||||
{
|
||||
val randomName = "sbt_" + java.lang.Integer.toHexString(random.nextInt)
|
||||
val f = new File(temporaryDirectory, randomName)
|
||||
|
||||
if(createDirectory(f, log).isEmpty)
|
||||
Right(f)
|
||||
else
|
||||
create(tries + 1)
|
||||
}
|
||||
}
|
||||
create(0)
|
||||
}
|
||||
|
||||
def withTemporaryDirectory(log: Logger)(action: File => Option[String]): Option[String] =
|
||||
doInTemporaryDirectory(log: Logger)(file => action(file).toLeft(())).left.toOption
|
||||
/** Creates a temporary directory and provides its location to the given function. The directory
|
||||
* is deleted after the function returns.*/
|
||||
def doInTemporaryDirectory[T](log: Logger)(action: File => Either[String, T]): Either[String, T] =
|
||||
{
|
||||
def doInDirectory(dir: File): Either[String, T] =
|
||||
{
|
||||
Control.trapAndFinally("", log)
|
||||
{ action(dir) }
|
||||
{ delete(dir, true, log) }
|
||||
}
|
||||
createTemporaryDirectory(log).right.flatMap(doInDirectory)
|
||||
}
|
||||
def withTemporaryFile[T](log: Logger, prefix: String, postfix: String)(action: File => Either[String, T]): Either[String, T] =
|
||||
{
|
||||
Control.trap("Error creating temporary file: ", log)
|
||||
{
|
||||
val file = File.createTempFile(prefix, postfix)
|
||||
Control.trapAndFinally("", log)
|
||||
{ action(file) }
|
||||
{ file.delete() }
|
||||
}
|
||||
}
|
||||
|
||||
/** Copies the files declared in <code>sources</code> to the <code>destinationDirectory</code>
|
||||
* directory. The source directory hierarchy is flattened so that all copies are immediate
|
||||
* children of <code>destinationDirectory</code>. Directories are not recursively entered.*/
|
||||
def copyFlat(sources: Iterable[Path], destinationDirectory: Path, log: Logger) =
|
||||
{
|
||||
val targetSet = new scala.collection.mutable.HashSet[Path]
|
||||
copyImpl(sources, destinationDirectory, log)
|
||||
{
|
||||
source =>
|
||||
{
|
||||
val from = source.asFile
|
||||
val toPath = destinationDirectory / from.getName
|
||||
targetSet += toPath
|
||||
val to = toPath.asFile
|
||||
if(!to.exists || from.lastModified > to.lastModified && !from.isDirectory)
|
||||
{
|
||||
log.debug("Copying " + source + " to " + toPath)
|
||||
copyFile(from, to, log)
|
||||
}
|
||||
else
|
||||
None
|
||||
}
|
||||
}.toLeft(readOnly(targetSet))
|
||||
}
|
||||
private def copyImpl(sources: Iterable[Path], destinationDirectory: Path, log: Logger)
|
||||
(doCopy: Path => Option[String]): Option[String] =
|
||||
{
|
||||
val target = destinationDirectory.asFile
|
||||
val creationError =
|
||||
if(target.isDirectory)
|
||||
None
|
||||
else
|
||||
createDirectory(target, log)
|
||||
def copy(sources: List[Path]): Option[String] =
|
||||
{
|
||||
sources match
|
||||
{
|
||||
case src :: remaining =>
|
||||
{
|
||||
doCopy(src) match
|
||||
{
|
||||
case None => copy(remaining)
|
||||
case error => error
|
||||
}
|
||||
}
|
||||
case Nil => None
|
||||
}
|
||||
}
|
||||
creationError orElse ( Control.trapUnit("", log) { copy(sources.toList) } )
|
||||
}
|
||||
/** Retrieves the content of the given URL and writes it to the given File. */
|
||||
def download(url: URL, to: File, log: Logger) =
|
||||
{
|
||||
readStream(url, log) { inputStream =>
|
||||
writeStream(to, log) { outputStream =>
|
||||
transfer(inputStream, outputStream, log)
|
||||
}
|
||||
}
|
||||
}
|
||||
/** Copies the files declared in <code>sources</code> to the <code>destinationDirectory</code>
|
||||
* directory. Directories are not recursively entered. The destination hierarchy matches the
|
||||
* source paths relative to any base directories. For example:
|
||||
*
|
||||
* A source <code>(basePath ##) / x / y</code> is copied to <code>destinationDirectory / x / y</code>.
|
||||
* */
|
||||
def copy(sources: Iterable[Path], destinationDirectory: Path, log: Logger) =
|
||||
{
|
||||
val targetSet = new scala.collection.mutable.HashSet[Path]
|
||||
copyImpl(sources, destinationDirectory, log)
|
||||
{
|
||||
source =>
|
||||
{
|
||||
val from = source.asFile
|
||||
val toPath = Path.fromString(destinationDirectory, source.relativePath)
|
||||
targetSet += toPath
|
||||
val to = toPath.asFile
|
||||
if(!to.exists || from.lastModified > to.lastModified)
|
||||
{
|
||||
if(from.isDirectory)
|
||||
createDirectory(to, log)
|
||||
else
|
||||
{
|
||||
log.debug("Copying " + source + " to " + toPath)
|
||||
copyFile(from, to, log)
|
||||
}
|
||||
}
|
||||
else
|
||||
None
|
||||
}
|
||||
}.toLeft(readOnly(targetSet))
|
||||
}
|
||||
|
||||
/** Copies the files declared in <code>sources</code> to the <code>targetDirectory</code>
|
||||
* directory. The source directory hierarchy is flattened so that all copies are immediate
|
||||
* children of <code>targetDirectory</code>. Directories are not recursively entered.*/
|
||||
def copyFilesFlat(sources: Iterable[File], targetDirectory: Path, log: Logger) =
|
||||
{
|
||||
require(targetDirectory.asFile.isDirectory, "Target '" + targetDirectory + "' is not a directory.")
|
||||
val byName = new scala.collection.mutable.HashMap[String, File]
|
||||
for(source <- sources) byName.put(source.getName, source)
|
||||
val uniquelyNamedSources = byName.values
|
||||
val targetSet = new scala.collection.mutable.HashSet[Path]
|
||||
def copy(source: File): Option[String] =
|
||||
{
|
||||
if(source.isDirectory)
|
||||
copyAll(source.listFiles.toList)
|
||||
else if(source.exists)
|
||||
{
|
||||
val targetPath = targetDirectory / source.getName
|
||||
targetSet += targetPath
|
||||
if(!targetPath.exists || source.lastModified > targetPath.lastModified)
|
||||
{
|
||||
log.debug("Copying " + source + " to " + targetPath)
|
||||
copyFile(source, targetPath.asFile, log)
|
||||
}
|
||||
else
|
||||
None
|
||||
}
|
||||
else
|
||||
None
|
||||
}
|
||||
def copyAll(sources: List[File]): Option[String] =
|
||||
sources match
|
||||
{
|
||||
case head :: tail =>
|
||||
copy(head) match
|
||||
{
|
||||
case None => copyAll(tail)
|
||||
case x => x
|
||||
}
|
||||
case Nil => None
|
||||
}
|
||||
|
||||
Control.trap("Error copying files: ", log) { copyAll(uniquelyNamedSources.toList).toLeft(readOnly(targetSet)) }
|
||||
}
|
||||
/** Copies <code>sourceFile</code> to <code>targetFile</code>. If <code>targetFile</code>
|
||||
* exists, it is overwritten. Note that unlike higher level copies in FileUtilities, this
|
||||
* method always performs the copy, even if sourceFile is older than targetFile.*/
|
||||
def copyFile(sourceFile: Path, targetFile: Path, log: Logger): Option[String] =
|
||||
copyFile(sourceFile.asFile, targetFile.asFile, log)
|
||||
/** Copies <code>sourceFile</code> to <code>targetFile</code>. If <code>targetFile</code>
|
||||
* exists, it is overwritten. Note that unlike higher level copies in FileUtilities, this
|
||||
* method always performs the copy, even if sourceFile is older than targetFile.*/
|
||||
def copyFile(sourceFile: File, targetFile: File, log: Logger): Option[String] =
|
||||
{
|
||||
require(sourceFile.exists, "Source file '" + sourceFile.getAbsolutePath + "' does not exist.")
|
||||
require(!sourceFile.isDirectory, "Source file '" + sourceFile.getAbsolutePath + "' is a directory.")
|
||||
readChannel(sourceFile, log)(
|
||||
in => writeChannel(targetFile, log) {
|
||||
out => {
|
||||
val copied = out.transferFrom(in, 0, in.size)
|
||||
if(copied == in.size)
|
||||
None
|
||||
else
|
||||
Some("Could not copy '" + sourceFile + "' to '" + targetFile + "' (" + copied + "/" + in.size + " bytes copied)")
|
||||
}
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
/** Synchronizes the contents of the <code>sourceDirectory</code> directory to the
|
||||
* <code>targetDirectory</code> directory.*/
|
||||
def sync(sourceDirectory: Path, targetDirectory: Path, log: Logger): Option[String] =
|
||||
syncPaths((sourceDirectory ##) ** AllPassFilter, targetDirectory, log)
|
||||
def syncPaths(sources: PathFinder, targetDirectory: Path, log: Logger): Option[String] =
|
||||
{
|
||||
copy(sources.get, targetDirectory, log).right.flatMap
|
||||
{ copiedTo => prune(targetDirectory, copiedTo, log).toLeft(()) }.left.toOption
|
||||
}
|
||||
def prune(directory: Path, keepOnly: Iterable[Path], log: Logger): Option[String] =
|
||||
{
|
||||
val existing = ((directory ##) ** AllPassFilter).get
|
||||
val toRemove = scala.collection.mutable.HashSet(existing.toSeq: _*)
|
||||
toRemove --= keepOnly
|
||||
if(log.atLevel(Level.Debug))
|
||||
toRemove.foreach(r => log.debug("Pruning " + r))
|
||||
clean(toRemove, true, log)
|
||||
}
|
||||
|
||||
/** Copies the contents of the <code>source</code> directory to the <code>target</code> directory .*/
|
||||
def copyDirectory(source: Path, target: Path, log: Logger): Option[String] =
|
||||
copyDirectory(source.asFile, target.asFile, log)
|
||||
/** Copies the contents of the <code>source</code> directory to the <code>target</code> directory .*/
|
||||
def copyDirectory(source: File, target: File, log: Logger): Option[String] =
|
||||
{
|
||||
require(source.isDirectory, "Source '" + source.getAbsolutePath + "' is not a directory.")
|
||||
require(!target.exists, "Target '" + target.getAbsolutePath + "' already exists.")
|
||||
def copyDirectory(sourceDir: File, targetDir: File): Option[String] =
|
||||
createDirectory(targetDir, log) orElse copyContents(sourceDir, targetDir)
|
||||
def copyContents(sourceDir: File, targetDir: File): Option[String] =
|
||||
sourceDir.listFiles.foldLeft(None: Option[String])
|
||||
{
|
||||
(result, file) =>
|
||||
result orElse
|
||||
{
|
||||
val targetFile = new File(targetDir, file.getName)
|
||||
if(file.isDirectory)
|
||||
copyDirectory(file, targetFile)
|
||||
else
|
||||
copyFile(file, targetFile, log)
|
||||
}
|
||||
}
|
||||
copyDirectory(source, target)
|
||||
}
|
||||
|
||||
|
||||
/** Deletes the given file recursively.*/
|
||||
def clean(file: Path, log: Logger): Option[String] = clean(file :: Nil, log)
|
||||
/** Deletes the given files recursively.*/
|
||||
def clean(files: Iterable[Path], log: Logger): Option[String] = clean(files, false, log)
|
||||
/** Deletes the given files recursively. <code>quiet</code> determines the logging level.
|
||||
* If it is true, each file in <code>files</code> is logged at the <code>info</code> level.
|
||||
* If it is false, the <code>debug</code> level is used.*/
|
||||
def clean(files: Iterable[Path], quiet: Boolean, log: Logger): Option[String] =
|
||||
deleteFiles(files.map(_.asFile), quiet, log)
|
||||
|
||||
private def deleteFiles(files: Iterable[File], quiet: Boolean, log: Logger): Option[String] =
|
||||
((None: Option[String]) /: files)( (result, file) => result orElse delete(file, quiet, log))
|
||||
private def delete(file: File, quiet: Boolean, log: Logger): Option[String] =
|
||||
{
|
||||
def logMessage(message: => String)
|
||||
{
|
||||
log.log(if(quiet) Level.Debug else Level.Info, message)
|
||||
}
|
||||
Control.trapUnit("Error deleting file " + file + ": ", log)
|
||||
{
|
||||
if(file.isDirectory)
|
||||
{
|
||||
logMessage("Deleting directory " + file)
|
||||
deleteFiles(wrapNull(file.listFiles), true, log)
|
||||
file.delete
|
||||
}
|
||||
else if(file.exists)
|
||||
{
|
||||
logMessage("Deleting file " + file)
|
||||
file.delete
|
||||
}
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/** Appends the given <code>String content</code> to the provided <code>file</code> using the default encoding.
|
||||
* A new file is created if it does not exist.*/
|
||||
def append(file: File, content: String, log: Logger): Option[String] = append(file, content, Charset.defaultCharset, log)
|
||||
/** Appends the given <code>String content</code> to the provided <code>file</code> using the given encoding.
|
||||
* A new file is created if it does not exist.*/
|
||||
def append(file: File, content: String, charset: Charset, log: Logger): Option[String] =
|
||||
write(file, content, charset, true, log)
|
||||
|
||||
/** Writes the given <code>String content</code> to the provided <code>file</code> using the default encoding.
|
||||
* If the file exists, it is overwritten.*/
|
||||
def write(file: File, content: String, log: Logger): Option[String] = write(file, content, Charset.defaultCharset, log)
|
||||
/** Writes the given <code>String content</code> to the provided <code>file</code> using the given encoding.
|
||||
* If the file already exists, it is overwritten.*/
|
||||
def write(file: File, content: String, charset: Charset, log: Logger): Option[String] =
|
||||
write(file, content, charset, false, log)
|
||||
private def write(file: File, content: String, charset: Charset, append: Boolean, log: Logger): Option[String] =
|
||||
{
|
||||
if(charset.newEncoder.canEncode(content))
|
||||
write(file, charset, append, log) { w => w.write(content); None }
|
||||
else
|
||||
Some("String cannot be encoded by charset " + charset.name)
|
||||
}
|
||||
|
||||
/** Opens a <code>Writer</code> on the given file using the default encoding,
|
||||
* passes it to the provided function, and closes the <code>Writer</code>.*/
|
||||
def write(file: File, log: Logger)(f: Writer => Option[String]): Option[String] =
|
||||
write(file, Charset.defaultCharset, log)(f)
|
||||
/** Opens a <code>Writer</code> on the given file using the given encoding,
|
||||
* passes it to the provided function, and closes the <code>Writer</code>.*/
|
||||
def write(file: File, charset: Charset, log: Logger)(f: Writer => Option[String]): Option[String] =
|
||||
write(file, charset, false, log)(f)
|
||||
private def write(file: File, charset: Charset, append: Boolean, log: Logger)(f: Writer => Option[String]): Option[String] =
|
||||
fileWriter(charset, append).ioOption(file, Writing, log)(f)
|
||||
|
||||
/** Opens a <code>Reader</code> on the given file using the default encoding,
|
||||
* passes it to the provided function, and closes the <code>Reader</code>.*/
|
||||
def read(file: File, log: Logger)(f: Reader => Option[String]): Option[String] =
|
||||
read(file, Charset.defaultCharset, log)(f)
|
||||
/** Opens a <code>Reader</code> on the given file using the default encoding,
|
||||
* passes it to the provided function, and closes the <code>Reader</code>.*/
|
||||
def read(file: File, charset: Charset, log: Logger)(f: Reader => Option[String]): Option[String] =
|
||||
fileReader(charset).ioOption(file, Reading, log)(f)
|
||||
/** Opens a <code>Reader</code> on the given file using the default encoding,
|
||||
* passes it to the provided function, and closes the <code>Reader</code>.*/
|
||||
def readValue[R](file: File, log: Logger)(f: Reader => Either[String, R]): Either[String, R] =
|
||||
readValue(file, Charset.defaultCharset, log)(f)
|
||||
/** Opens a <code>Reader</code> on the given file using the given encoding,
|
||||
* passes it to the provided function, and closes the <code>Reader</code>.*/
|
||||
def readValue[R](file: File, charset: Charset, log: Logger)(f: Reader => Either[String, R]): Either[String, R] =
|
||||
fileReader(charset).io(file, Reading, log)(f)
|
||||
|
||||
/** Reads the contents of the given file into a <code>String</code> using the default encoding.
|
||||
* The resulting <code>String</code> is wrapped in <code>Right</code>.*/
|
||||
def readString(file: File, log: Logger): Either[String, String] = readString(file, Charset.defaultCharset, log)
|
||||
/** Reads the contents of the given file into a <code>String</code> using the given encoding.
|
||||
* The resulting <code>String</code> is wrapped in <code>Right</code>.*/
|
||||
def readString(file: File, charset: Charset, log: Logger): Either[String, String] = readValue(file, charset, log)(readString)
|
||||
|
||||
def readString(in: InputStream, log: Logger): Either[String, String] = readString(in, Charset.defaultCharset, log)
|
||||
def readString(in: InputStream, charset: Charset, log: Logger): Either[String, String] =
|
||||
streamReader.io((in, charset), Reading, log)(readString)
|
||||
def readString(in: Reader, log: Logger): Either[String, String] =
|
||||
Control.trapAndFinally("Error reading bytes from reader: ", log)
|
||||
{ readString(in) }
|
||||
{ in.close() }
|
||||
private def readString(in: Reader): Either[String, String] =
|
||||
{
|
||||
val builder = new StringBuilder
|
||||
val buffer = new Array[Char](BufferSize)
|
||||
def readNext()
|
||||
{
|
||||
val read = in.read(buffer, 0, buffer.length)
|
||||
if(read >= 0)
|
||||
{
|
||||
builder.append(buffer, 0, read)
|
||||
readNext()
|
||||
}
|
||||
else
|
||||
None
|
||||
}
|
||||
readNext()
|
||||
Right(builder.toString)
|
||||
}
|
||||
/** Appends the given bytes to the given file. */
|
||||
def append(file: File, bytes: Array[Byte], log: Logger): Option[String] =
|
||||
writeBytes(file, bytes, true, log)
|
||||
/** Writes the given bytes to the given file. If the file already exists, it is overwritten.*/
|
||||
def write(file: File, bytes: Array[Byte], log: Logger): Option[String] =
|
||||
writeBytes(file, bytes, false, log)
|
||||
private def writeBytes(file: File, bytes: Array[Byte], append: Boolean, log: Logger): Option[String] =
|
||||
writeStream(file, append, log) { out => out.write(bytes); None }
|
||||
|
||||
/** Reads the entire file into a byte array. */
|
||||
def readBytes(file: File, log: Logger): Either[String, Array[Byte]] = readStreamValue(file, log)(readBytes)
|
||||
def readBytes(in: InputStream, log: Logger): Either[String, Array[Byte]] =
|
||||
Control.trapAndFinally("Error reading bytes from input stream: ", log)
|
||||
{ readBytes(in) }
|
||||
{ in.close() }
|
||||
private def readBytes(in: InputStream): Either[String, Array[Byte]] =
|
||||
{
|
||||
val out = new ByteArrayOutputStream
|
||||
val buffer = new Array[Byte](BufferSize)
|
||||
def readNext()
|
||||
{
|
||||
val read = in.read(buffer)
|
||||
if(read >= 0)
|
||||
{
|
||||
out.write(buffer, 0, read)
|
||||
readNext()
|
||||
}
|
||||
}
|
||||
readNext()
|
||||
Right(out.toByteArray)
|
||||
}
|
||||
|
||||
/** Opens an <code>OutputStream</code> on the given file with append=true and passes the stream
|
||||
* to the provided function. The stream is closed before this function returns.*/
|
||||
def appendStream(file: File, log: Logger)(f: OutputStream => Option[String]): Option[String] =
|
||||
fileOutputStream(true).ioOption(file, Appending, log)(f)
|
||||
/** Opens an <code>OutputStream</code> on the given file and passes the stream
|
||||
* to the provided function. The stream is closed before this function returns.*/
|
||||
def writeStream(file: File, log: Logger)(f: OutputStream => Option[String]): Option[String] =
|
||||
fileOutputStream(false).ioOption(file, Writing, log)(f)
|
||||
private def writeStream(file: File, append: Boolean, log: Logger)(f: OutputStream => Option[String]): Option[String] =
|
||||
if(append) appendStream(file, log)(f) else writeStream(file, log)(f)
|
||||
/** Opens an <code>InputStream</code> on the given file and passes the stream
|
||||
* to the provided function. The stream is closed before this function returns.*/
|
||||
def readStream(file: File, log: Logger)(f: InputStream => Option[String]): Option[String] =
|
||||
fileInputStream.ioOption(file, Reading, log)(f)
|
||||
/** Opens an <code>InputStream</code> on the given file and passes the stream
|
||||
* to the provided function. The stream is closed before this function returns.*/
|
||||
def readStreamValue[R](file: File, log: Logger)(f: InputStream => Either[String, R]): Either[String, R] =
|
||||
fileInputStream.io(file, Reading, log)(f)
|
||||
/** Opens an <code>InputStream</code> on the given <code>URL</code> and passes the stream
|
||||
* to the provided function. The stream is closed before this function returns.*/
|
||||
def readStream(url: URL, log: Logger)(f: InputStream => Option[String]): Option[String] =
|
||||
urlInputStream.ioOption(url, Reading, log)(f)
|
||||
/** Opens an <code>InputStream</code> on the given <code>URL</code> and passes the stream
|
||||
* to the provided function. The stream is closed before this function returns.*/
|
||||
def readStreamValue[R](url: URL, log: Logger)(f: InputStream => Either[String, R]): Either[String, R] =
|
||||
urlInputStream.io(url, Reading, log)(f)
|
||||
|
||||
/** Opens a <code>FileChannel</code> on the given file for writing and passes the channel
|
||||
* to the given function. The channel is closed before this function returns.*/
|
||||
def writeChannel(file: File, log: Logger)(f: FileChannel => Option[String]): Option[String] =
|
||||
fileOutputChannel.ioOption(file, Writing, log)(f)
|
||||
/** Opens a <code>FileChannel</code> on the given file for reading and passes the channel
|
||||
* to the given function. The channel is closed before this function returns.*/
|
||||
def readChannel(file: File, log: Logger)(f: FileChannel => Option[String]): Option[String] =
|
||||
fileInputChannel.ioOption(file, Reading, log)(f)
|
||||
/** Opens a <code>FileChannel</code> on the given file for reading and passes the channel
|
||||
* to the given function. The channel is closed before this function returns.*/
|
||||
def readChannelValue[R](file: File, log: Logger)(f: FileChannel => Either[String, R]): Either[String, R] =
|
||||
fileInputChannel.io(file, Reading, log)(f)
|
||||
|
||||
private[sbt] def wrapNull(a: Array[File]): Array[File] =
|
||||
if(a == null)
|
||||
new Array[File](0)
|
||||
else
|
||||
a
|
||||
|
||||
/** Writes the given string to the writer followed by a newline.*/
|
||||
private[sbt] def writeLine(writer: Writer, line: String)
|
||||
{
|
||||
writer.write(line)
|
||||
writer.write(Newline)
|
||||
}
|
||||
|
||||
def toFile(url: URL) =
|
||||
try { new File(url.toURI) }
|
||||
catch { case _: URISyntaxException => new File(url.getPath) }
|
||||
|
||||
/** The directory in which temporary files are placed.*/
|
||||
val temporaryDirectory = new File(System.getProperty("java.io.tmpdir"))
|
||||
def classLocation(cl: Class[_]): URL =
|
||||
{
|
||||
val codeSource = cl.getProtectionDomain.getCodeSource
|
||||
if(codeSource == null) error("No class location for " + cl)
|
||||
else codeSource.getLocation
|
||||
}
|
||||
def classLocationFile(cl: Class[_]): File = toFile(classLocation(cl))
|
||||
def classLocation[T](implicit mf: scala.reflect.Manifest[T]): URL = classLocation(mf.erasure)
|
||||
def classLocationFile[T](implicit mf: scala.reflect.Manifest[T]): File = classLocationFile(mf.erasure)
|
||||
|
||||
lazy val scalaLibraryJar: File = classLocationFile[scala.ScalaObject]
|
||||
lazy val scalaCompilerJar: File = classLocationFile[scala.tools.nsc.Settings]
|
||||
def scalaJars: Iterable[File] = List(scalaLibraryJar, scalaCompilerJar)
|
||||
|
||||
/** The producer of randomness for unique name generation.*/
|
||||
private val random = new java.util.Random
|
||||
|
||||
private val Reading = "reading"
|
||||
private val Writing = "writing"
|
||||
private val Appending = "appending"
|
||||
}
|
||||
|
||||
private abstract class OpenResource[Source, T] extends NotNull
|
||||
{
|
||||
import OpenResource.{unwrapEither, wrapEither}
|
||||
protected def open(src: Source, log: Logger): Either[String, T]
|
||||
def ioOption(src: Source, op: String, log: Logger)(f: T => Option[String]) =
|
||||
unwrapEither( io(src, op, log)(wrapEither(f)) )
|
||||
def io[R](src: Source, op: String, log: Logger)(f: T => Either[String,R]): Either[String, R] =
|
||||
open(src, log).right flatMap
|
||||
{
|
||||
resource => Control.trapAndFinally("Error " + op + " "+ src + ": ", log)
|
||||
{ f(resource) }
|
||||
{ close(resource) }
|
||||
}
|
||||
protected def close(out: T): Unit
|
||||
}
|
||||
private trait CloseableOpenResource[Source, T <: Closeable] extends OpenResource[Source, T]
|
||||
{
|
||||
protected def close(out: T): Unit = out.close()
|
||||
}
|
||||
import scala.reflect.{Manifest => SManifest}
|
||||
private abstract class WrapOpenResource[Source, T <: Closeable](implicit srcMf: SManifest[Source], targetMf: SManifest[T]) extends CloseableOpenResource[Source, T]
|
||||
{
|
||||
private def label[S](m: SManifest[S]) = m.erasure.getSimpleName
|
||||
protected def open(source: Source): T
|
||||
protected final def open(source: Source, log: Logger): Either[String, T] =
|
||||
Control.trap("Error wrapping " + label(srcMf) + " in " + label(targetMf) + ": ", log) { Right(open(source)) }
|
||||
}
|
||||
private abstract class OpenFile[T] extends OpenResource[File, T]
|
||||
{
|
||||
protected def open(file: File): T
|
||||
protected final def open(file: File, log: Logger): Either[String, T] =
|
||||
{
|
||||
val parent = file.getParentFile
|
||||
if(parent != null)
|
||||
FileUtilities.createDirectory(parent, log)
|
||||
Control.trap("Error opening " + file + ": ", log) { Right(open(file)) }
|
||||
}
|
||||
}
|
||||
private abstract class CloseableOpenFile[T <: Closeable] extends OpenFile[T] with CloseableOpenResource[File, T]
|
||||
private object OpenResource
|
||||
{
|
||||
private def wrapEither[R](f: R => Option[String]): (R => Either[String, Unit]) = (r: R) => f(r).toLeft(())
|
||||
private def unwrapEither(e: Either[String, Unit]): Option[String] = e.left.toOption
|
||||
|
||||
def fileOutputStream(append: Boolean) =
|
||||
new CloseableOpenFile[FileOutputStream] { protected def open(file: File) = new FileOutputStream(file, append) }
|
||||
def fileInputStream = new CloseableOpenFile[FileInputStream]
|
||||
{ protected def open(file: File) = new FileInputStream(file) }
|
||||
def urlInputStream = new CloseableOpenResource[URL, InputStream]
|
||||
{ protected def open(url: URL, log: Logger) = Control.trap("Error opening " + url + ": ", log) { Right(url.openStream) } }
|
||||
def fileOutputChannel = new CloseableOpenFile[FileChannel]
|
||||
{ protected def open(f: File) = (new FileOutputStream(f)).getChannel }
|
||||
def fileInputChannel = new CloseableOpenFile[FileChannel]
|
||||
{ protected def open(f: File) = (new FileInputStream(f)).getChannel }
|
||||
def fileWriter(charset: Charset, append: Boolean) = new CloseableOpenFile[Writer]
|
||||
{ protected def open(f: File) = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(f, append), charset)) }
|
||||
def fileReader(charset: Charset) = new CloseableOpenFile[Reader]
|
||||
{ protected def open(f: File) = new BufferedReader(new InputStreamReader(new FileInputStream(f), charset)) }
|
||||
def jarFile(verify: Boolean) = new OpenFile[JarFile]
|
||||
{ protected def open(f: File) = new JarFile(f, verify)
|
||||
override protected def close(j: JarFile) = j.close() }
|
||||
def zipFile = new OpenFile[ZipFile]
|
||||
{ protected def open(f: File) = new ZipFile(f)
|
||||
override protected def close(z: ZipFile) = z.close() }
|
||||
def streamReader = new WrapOpenResource[(InputStream, Charset), Reader]
|
||||
{ protected def open(streamCharset: (InputStream, Charset)) = new InputStreamReader(streamCharset._1, streamCharset._2) }
|
||||
def gzipInputStream = new WrapOpenResource[InputStream, GZIPInputStream]
|
||||
{ protected def open(in: InputStream) = new GZIPInputStream(in) }
|
||||
def zipInputStream = new WrapOpenResource[InputStream, ZipInputStream]
|
||||
{ protected def open(in: InputStream) = new ZipInputStream(in) }
|
||||
def gzipOutputStream = new WrapOpenResource[OutputStream, GZIPOutputStream]
|
||||
{ protected def open(out: OutputStream) = new GZIPOutputStream(out)
|
||||
override protected def close(out: GZIPOutputStream) = out.finish() }
|
||||
def jarOutputStream = new WrapOpenResource[OutputStream, JarOutputStream]
|
||||
{ protected def open(out: OutputStream) = new JarOutputStream(out) }
|
||||
def jarInputStream = new WrapOpenResource[InputStream, JarInputStream]
|
||||
{ protected def open(in: InputStream) = new JarInputStream(in) }
|
||||
def zipEntry(zip: ZipFile) = new CloseableOpenResource[ZipEntry, InputStream] {
|
||||
protected def open(entry: ZipEntry, log: Logger) =
|
||||
Control.trap("Error opening " + entry.getName + " in " + zip + ": ", log) { Right(zip.getInputStream(entry)) }
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,89 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2009 Mark Harrah, Vesa Vilhonen
|
||||
*/
|
||||
package sbt
|
||||
|
||||
import java.io.{File,OutputStream}
|
||||
|
||||
abstract class ForkJava extends NotNull
|
||||
{
|
||||
def javaHome: Option[File] = None
|
||||
def outputStrategy: Option[OutputStrategy] = None
|
||||
}
|
||||
abstract class ForkScala extends ForkJava
|
||||
{
|
||||
def scalaJars: Iterable[File] = Nil
|
||||
}
|
||||
trait ForkScalaRun extends ForkScala
|
||||
{
|
||||
def workingDirectory: Option[File] = None
|
||||
def runJVMOptions: Seq[String] = Nil
|
||||
}
|
||||
|
||||
sealed abstract class OutputStrategy extends NotNull
|
||||
case object StdoutOutput extends OutputStrategy
|
||||
case class BufferedOutput(logger: Logger) extends OutputStrategy
|
||||
case class LoggedOutput(logger: Logger) extends OutputStrategy
|
||||
case class CustomOutput(output: OutputStream) extends OutputStrategy
|
||||
|
||||
import java.lang.{ProcessBuilder => JProcessBuilder}
|
||||
object Fork
|
||||
{
|
||||
private val ScalacMainClass = "scala.tools.nsc.Main"
|
||||
private val ScalaMainClass = "scala.tools.nsc.MainGenericRunner"
|
||||
|
||||
val java = new ForkJava("java")
|
||||
val javac = new ForkJava("javac")
|
||||
val scala = new ForkScala(ScalaMainClass)
|
||||
val scalac = new ForkScala(ScalacMainClass)
|
||||
|
||||
private def javaCommand(javaHome: Option[File], name: String): File =
|
||||
{
|
||||
val home = javaHome.getOrElse(new File(System.getProperty("java.home")))
|
||||
new File(new File(home, "bin"), name)
|
||||
}
|
||||
|
||||
final class ForkJava(commandName: String) extends NotNull
|
||||
{
|
||||
def apply(javaHome: Option[File], options: Seq[String], log: Logger): Int =
|
||||
apply(javaHome, options, BufferedOutput(log))
|
||||
def apply(javaHome: Option[File], options: Seq[String], outputStrategy: OutputStrategy): Int =
|
||||
apply(javaHome, options, None, outputStrategy)
|
||||
def apply(javaHome: Option[File], options: Seq[String], workingDirectory: Option[File], log: Logger): Int =
|
||||
apply(javaHome, options, workingDirectory, BufferedOutput(log))
|
||||
def apply(javaHome: Option[File], options: Seq[String], workingDirectory: Option[File], outputStrategy: OutputStrategy): Int =
|
||||
apply(javaHome, options, workingDirectory, Map.empty, outputStrategy)
|
||||
def apply(javaHome: Option[File], options: Seq[String], workingDirectory: Option[File], env: Map[String, String], outputStrategy: OutputStrategy): Int =
|
||||
{
|
||||
val executable = javaCommand(javaHome, commandName).getAbsolutePath
|
||||
val command = (executable :: options.toList).toArray
|
||||
val builder = new JProcessBuilder(command : _*)
|
||||
workingDirectory.foreach(wd => builder.directory(wd))
|
||||
val environment = builder.environment
|
||||
for( (key, value) <- env )
|
||||
environment.put(key, value)
|
||||
outputStrategy match {
|
||||
case StdoutOutput => Process(builder) !
|
||||
case BufferedOutput(logger) => Process(builder) ! logger
|
||||
case LoggedOutput(logger) => Process(builder).run(logger).exitValue()
|
||||
case CustomOutput(output) => (Process(builder) #> output).run.exitValue()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
final class ForkScala(mainClassName: String) extends NotNull
|
||||
{
|
||||
def apply(javaHome: Option[File], jvmOptions: Seq[String], scalaJars: Iterable[File], arguments: Seq[String], log: Logger): Int =
|
||||
apply(javaHome, jvmOptions, scalaJars, arguments, None, BufferedOutput(log))
|
||||
def apply(javaHome: Option[File], jvmOptions: Seq[String], scalaJars: Iterable[File], arguments: Seq[String], workingDirectory: Option[File], log: Logger): Int =
|
||||
apply(javaHome, jvmOptions, scalaJars, arguments, workingDirectory, BufferedOutput(log))
|
||||
def apply(javaHome: Option[File], jvmOptions: Seq[String], scalaJars: Iterable[File], arguments: Seq[String], workingDirectory: Option[File], outputStrategy: OutputStrategy): Int =
|
||||
{
|
||||
if(scalaJars.isEmpty) error("Scala jars not specified")
|
||||
val scalaClasspathString = "-Xbootclasspath/a:" + scalaJars.map(_.getAbsolutePath).mkString(File.pathSeparator)
|
||||
val mainClass = if(mainClassName.isEmpty) Nil else mainClassName :: Nil
|
||||
val options = jvmOptions ++ (scalaClasspathString :: mainClass ::: arguments.toList)
|
||||
Fork.java(javaHome, options, workingDirectory, Map.empty, outputStrategy)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,45 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2008 Mark Harrah, David MacIver
|
||||
*/
|
||||
package sbt
|
||||
|
||||
import java.io.File
|
||||
import scala.collection.mutable.{HashSet, Set}
|
||||
|
||||
trait Format[T] extends NotNull
|
||||
{
|
||||
def toString(t: T): String
|
||||
def fromString(s: String): T
|
||||
}
|
||||
abstract class SimpleFormat[T] extends Format[T]
|
||||
{
|
||||
def toString(t: T) = t.toString
|
||||
}
|
||||
object Format
|
||||
{
|
||||
def path(basePath: Path): Format[Path] = new Format[Path]
|
||||
{
|
||||
def toString(path: Path) = Path.relativize(basePath.asFile, path.asFile).getOrElse(error("Path " + path + " not in " + basePath))
|
||||
def fromString(s: String) = Path.fromString(basePath, s)
|
||||
}
|
||||
implicit val file: Format[File] = new Format[File]
|
||||
{
|
||||
def toString(file: File) = file.getAbsolutePath
|
||||
def fromString(s: String) = (new File(s)).getAbsoluteFile
|
||||
}
|
||||
implicit val hash: Format[Array[Byte]] = new Format[Array[Byte]]
|
||||
{
|
||||
def toString(hash: Array[Byte]) = Hash.toHex(hash)
|
||||
def fromString(hash: String) = Hash.fromHex(hash)
|
||||
}
|
||||
def set[T](implicit format: Format[T]): Format[Set[T]] = new Format[Set[T]]
|
||||
{
|
||||
def toString(set: Set[T]) = set.toList.map(format.toString).mkString(File.pathSeparator)
|
||||
def fromString(s: String) = (new HashSet[T]) ++ FileUtilities.pathSplit(s).map(_.trim).filter(!_.isEmpty).map(format.fromString)
|
||||
}
|
||||
implicit val string: Format[String] = new SimpleFormat[String] { def fromString(s: String) = s }
|
||||
implicit val test: Format[TestDefinition] = new SimpleFormat[TestDefinition]
|
||||
{
|
||||
def fromString(s: String) = TestParser.parse(s).fold(error, x => x)
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,79 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2009 Mark Harrah
|
||||
*/
|
||||
package sbt
|
||||
|
||||
import java.io.{ByteArrayInputStream, File, InputStream}
|
||||
|
||||
object Hash
|
||||
{
|
||||
private val BufferSize = 8192
|
||||
def toHex(bytes: Array[Byte]): String =
|
||||
{
|
||||
val buffer = new StringBuilder(bytes.length * 2)
|
||||
for(i <- 0 until bytes.length)
|
||||
{
|
||||
val b = bytes(i)
|
||||
val bi: Int = if(b < 0) b + 256 else b
|
||||
buffer append toHex((bi >>> 4).asInstanceOf[Byte])
|
||||
buffer append toHex((bi & 0x0F).asInstanceOf[Byte])
|
||||
}
|
||||
buffer.toString
|
||||
}
|
||||
def fromHex(hex: String): Array[Byte] =
|
||||
{
|
||||
require((hex.length & 1) == 0, "Hex string must have length 2n.")
|
||||
val array = new Array[Byte](hex.length >> 1)
|
||||
for(i <- 0 until hex.length by 2)
|
||||
{
|
||||
val c1 = hex.charAt(i)
|
||||
val c2 = hex.charAt(i+1)
|
||||
array(i >> 1) = ((fromHex(c1) << 4) | fromHex(c2)).asInstanceOf[Byte]
|
||||
}
|
||||
array
|
||||
}
|
||||
/** Calculates the SHA-1 hash of the given String.*/
|
||||
def apply(s: String, log: Logger): Either[String, Array[Byte]] = apply(new ByteArrayInputStream(s.getBytes("UTF-8")), log)
|
||||
/** Calculates the SHA-1 hash of the given file.*/
|
||||
def apply(path: Path, log: Logger): Either[String, Array[Byte]] = apply(path.asFile, log)
|
||||
/** Calculates the SHA-1 hash of the given file.*/
|
||||
def apply(file: File, log: Logger): Either[String, Array[Byte]] =
|
||||
FileUtilities.readStreamValue(file, log) { stream => apply(stream, log) }
|
||||
/** Calculates the SHA-1 hash of the given stream, closing it when finished.*/
|
||||
def apply(stream: InputStream, log: Logger): Either[String, Array[Byte]] =
|
||||
{
|
||||
import java.security.{MessageDigest, DigestInputStream}
|
||||
val digest = MessageDigest.getInstance("SHA")
|
||||
Control.trapAndFinally("Error computing digest: ", log)
|
||||
{
|
||||
val dis = new DigestInputStream(stream, digest)
|
||||
val buffer = new Array[Byte](BufferSize)
|
||||
while(dis.read(buffer) >= 0) {}
|
||||
dis.close()
|
||||
Right(digest.digest)
|
||||
}
|
||||
{ stream.close() }
|
||||
}
|
||||
|
||||
private def toHex(b: Byte): Char =
|
||||
{
|
||||
require(b >= 0 && b <= 15, "Byte " + b + " was not between 0 and 15")
|
||||
if(b < 10)
|
||||
('0'.asInstanceOf[Int] + b).asInstanceOf[Char]
|
||||
else
|
||||
('a'.asInstanceOf[Int] + (b-10)).asInstanceOf[Char]
|
||||
}
|
||||
private def fromHex(c: Char): Int =
|
||||
{
|
||||
val b =
|
||||
if(c >= '0' && c <= '9')
|
||||
(c - '0')
|
||||
else if(c >= 'a' && c <= 'f')
|
||||
(c - 'a') + 10
|
||||
else if(c >= 'A' && c <= 'F')
|
||||
(c - 'A') + 10
|
||||
else
|
||||
throw new RuntimeException("Invalid hex character: '" + c + "'.")
|
||||
b
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,94 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2009 Steven Blundy, Mark Harrah, Josh Cough
|
||||
*/
|
||||
package sbt
|
||||
|
||||
import ScalaProject.{optionsAsString, javaOptionsAsString}
|
||||
|
||||
trait IntegrationTesting extends NotNull
|
||||
{
|
||||
/** Override to provide pre-test setup. */
|
||||
protected def pretests: Option[String] = None
|
||||
/** Override to provide post-test cleanup. */
|
||||
protected def posttests: Option[String] = None
|
||||
}
|
||||
trait ScalaIntegrationTesting extends IntegrationTesting
|
||||
{ self: ScalaProject =>
|
||||
|
||||
protected def integrationTestTask(frameworks: Seq[TestFramework], classpath: PathFinder, analysis: CompileAnalysis, options: => Seq[TestOption]) =
|
||||
testTask(frameworks, classpath, analysis, options)
|
||||
}
|
||||
|
||||
trait BasicScalaIntegrationTesting extends BasicIntegrationTesting with MavenStyleIntegrationTestPaths
|
||||
{ self: BasicScalaProject => }
|
||||
/** A fully featured integration testing that may be mixed in with any subclass of <code>BasicScalaProject</code>.
|
||||
* Pre-suite setup and post-suite cleanup are provide by overriding <code>pretests</code> and <code>posttests</code> respectively.*/
|
||||
trait BasicIntegrationTesting extends ScalaIntegrationTesting with IntegrationTestPaths with BasicDependencyProject
|
||||
{
|
||||
self: BasicScalaProject =>
|
||||
|
||||
import BasicScalaIntegrationTesting._
|
||||
|
||||
lazy val integrationTestCompile = integrationTestCompileAction
|
||||
lazy val integrationTest = integrationTestAction
|
||||
|
||||
val integrationTestCompileConditional = new CompileConditional(integrationTestCompileConfiguration, buildCompiler)
|
||||
|
||||
protected def integrationTestAction = integrationTestTask(integrationTestFrameworks, integrationTestClasspath, integrationTestCompileConditional.analysis, integrationTestOptions) dependsOn integrationTestCompile describedAs IntegrationTestCompileDescription
|
||||
protected def integrationTestCompileAction = integrationTestCompileTask() dependsOn compile describedAs IntegrationTestDescription
|
||||
|
||||
protected def integrationTestCompileTask() = task{ integrationTestCompileConditional.run }
|
||||
|
||||
def integrationTestOptions: Seq[TestOption] =
|
||||
TestSetup(() => pretests) ::
|
||||
TestCleanup(() => posttests) ::
|
||||
testOptions.toList
|
||||
def integrationTestCompileOptions = testCompileOptions
|
||||
def javaIntegrationTestCompileOptions: Seq[JavaCompileOption] = testJavaCompileOptions
|
||||
|
||||
def integrationTestConfiguration = if(useIntegrationTestConfiguration) Configurations.IntegrationTest else Configurations.Test
|
||||
def integrationTestClasspath = fullClasspath(integrationTestConfiguration) +++ optionalClasspath
|
||||
|
||||
def integrationTestLabel = "integration-test"
|
||||
def integrationTestCompileConfiguration = new IntegrationTestCompileConfig
|
||||
|
||||
protected def integrationTestDependencies = new LibraryDependencies(this, integrationTestCompileConditional)
|
||||
|
||||
def integrationTestFrameworks = testFrameworks
|
||||
override def useIntegrationTestConfiguration = false
|
||||
abstract override def extraDefaultConfigurations =
|
||||
{
|
||||
val superConfigurations = super.extraDefaultConfigurations
|
||||
if(useIntegrationTestConfiguration)
|
||||
integrationTestConfiguration :: superConfigurations
|
||||
else
|
||||
superConfigurations
|
||||
}
|
||||
abstract override def fullUnmanagedClasspath(config: Configuration) =
|
||||
{
|
||||
val superClasspath = super.fullUnmanagedClasspath(config)
|
||||
if(config == integrationTestConfiguration)
|
||||
integrationTestCompilePath +++ integrationTestResourcesPath +++ superClasspath
|
||||
else
|
||||
superClasspath
|
||||
}
|
||||
|
||||
class IntegrationTestCompileConfig extends BaseCompileConfig
|
||||
{
|
||||
def label = integrationTestLabel
|
||||
def sourceRoots = integrationTestScalaSourceRoots
|
||||
def sources = integrationTestSources
|
||||
def outputDirectory = integrationTestCompilePath
|
||||
def classpath = integrationTestClasspath
|
||||
def analysisPath = integrationTestAnalysisPath
|
||||
def baseCompileOptions = integrationTestCompileOptions
|
||||
def javaOptions = javaOptionsAsString(javaCompileOptions)
|
||||
def testDefinitionClassNames = testClassNames(integrationTestFrameworks)
|
||||
}
|
||||
}
|
||||
|
||||
object BasicScalaIntegrationTesting
|
||||
{
|
||||
val IntegrationTestCompileDescription = "Compiles integration test sources."
|
||||
val IntegrationTestDescription = "Runs all integration tests detected during compilation."
|
||||
}
|
||||
|
|
@ -0,0 +1,144 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2008, 2009 Mark Harrah
|
||||
*/
|
||||
package sbt
|
||||
|
||||
trait LineReader extends NotNull
|
||||
{
|
||||
def readLine(prompt: String): Option[String]
|
||||
}
|
||||
class Completors(val projectAction: String, projectNames0: => Iterable[String],
|
||||
val generalCommands: Iterable[String], val propertyActions: Iterable[String],
|
||||
val specificPrefix: String, scalaVersions0: => Iterable[String],
|
||||
val prefixes: Iterable[String], val taskNames: Iterable[String],
|
||||
propertyNames0: => Iterable[String], val extra: ExtraCompletions) extends NotNull
|
||||
{
|
||||
lazy val scalaVersions = scalaVersions0
|
||||
lazy val propertyNames = propertyNames0
|
||||
lazy val projectNames = projectNames0
|
||||
}
|
||||
|
||||
trait ExtraCompletions extends NotNull
|
||||
{
|
||||
def names: Iterable[String]
|
||||
def completions(name: String): Iterable[String]
|
||||
}
|
||||
|
||||
|
||||
import jline.{Completor, ConsoleReader}
|
||||
abstract class JLine extends LineReader
|
||||
{
|
||||
protected[this] val reader: ConsoleReader
|
||||
def readLine(prompt: String) = JLine.withJLine { unsynchronizedReadLine(prompt) }
|
||||
private[this] def unsynchronizedReadLine(prompt: String) =
|
||||
reader.readLine(prompt) match
|
||||
{
|
||||
case null => None
|
||||
case x => Some(x.trim)
|
||||
}
|
||||
}
|
||||
private object JLine
|
||||
{
|
||||
def terminal = jline.Terminal.getTerminal
|
||||
def createReader() =
|
||||
terminal.synchronized
|
||||
{
|
||||
val cr = new ConsoleReader
|
||||
terminal.enableEcho()
|
||||
cr.setBellEnabled(false)
|
||||
cr
|
||||
}
|
||||
def withJLine[T](action: => T): T =
|
||||
{
|
||||
val t = terminal
|
||||
t.synchronized
|
||||
{
|
||||
t.disableEcho()
|
||||
try { action }
|
||||
finally { t.enableEcho() }
|
||||
}
|
||||
}
|
||||
}
|
||||
object SimpleReader extends JLine
|
||||
{
|
||||
protected[this] val reader = JLine.createReader()
|
||||
}
|
||||
private[sbt] final class LazyJLineReader(historyPath: Option[Path], completor: => Completor, log: Logger) extends JLine
|
||||
{
|
||||
protected[this] val reader =
|
||||
{
|
||||
val cr = new ConsoleReader
|
||||
cr.setBellEnabled(false)
|
||||
for(historyLocation <- historyPath)
|
||||
{
|
||||
val historyFile = historyLocation.asFile
|
||||
Control.trapAndLog(log)
|
||||
{
|
||||
historyFile.getParentFile.mkdirs()
|
||||
cr.getHistory.setHistoryFile(historyFile)
|
||||
}
|
||||
}
|
||||
cr.addCompletor(new LazyCompletor(completor))
|
||||
cr
|
||||
}
|
||||
}
|
||||
object MainCompletor
|
||||
{
|
||||
import jline.{ArgumentCompletor, MultiCompletor, NullCompletor, SimpleCompletor}
|
||||
|
||||
def apply(completors: Completors): Completor =
|
||||
{
|
||||
import completors._
|
||||
import scala.collection.immutable.TreeSet
|
||||
|
||||
val generalCompletor = simpleCompletor(generalCommands)
|
||||
val projectCompletor = simpleArgumentCompletor(projectAction :: Nil, projectNames)
|
||||
|
||||
def propertyCompletor(propertyNames: Iterable[String]) =
|
||||
simpleArgumentCompletor(propertyActions, propertyNames)
|
||||
def prefixedCompletor(baseCompletor: Completor) =
|
||||
singleArgumentCompletor(simpleCompletor(prefixes), baseCompletor)
|
||||
def specificCompletor(baseCompletor: Completor) =
|
||||
{
|
||||
val specific = simpleCompletor(specificPrefix :: Nil) // TODO
|
||||
argumentCompletor( Array( specific, lazySimpleCompletor(scalaVersions), baseCompletor ) )
|
||||
}
|
||||
def extraCompletor(name: String) =
|
||||
repeatedArgumentCompletor(simpleCompletor(name :: Nil), lazySimpleCompletor(extra.completions(name)))
|
||||
val taskCompletor = simpleCompletor(TreeSet(taskNames.toSeq : _*))
|
||||
val extraCompletors = extra.names.map(extraCompletor)
|
||||
val baseCompletors = generalCompletor :: projectCompletor :: taskCompletor :: propertyCompletor(propertyNames) :: extraCompletors.toList
|
||||
val baseCompletor = new MultiCompletor(baseCompletors.toArray)
|
||||
|
||||
val completor = new MultiCompletor()
|
||||
//TODO: baseCompletor for prefixedCompletor should only be taskCompletor ++ extraCompletors
|
||||
completor.setCompletors( Array(baseCompletor, prefixedCompletor(baseCompletor), specificCompletor(baseCompletor)) )
|
||||
completor
|
||||
}
|
||||
/** Used for a single argument so that the argument can have spaces in it.*/
|
||||
object SingleArgumentDelimiter extends ArgumentCompletor.AbstractArgumentDelimiter
|
||||
{
|
||||
def isDelimiterChar(buffer: String, pos: Int) =
|
||||
(buffer.charAt(pos) == ' ') && buffer.substring(0, pos).trim.indexOf(' ') == -1
|
||||
}
|
||||
|
||||
private def lazySimpleCompletor(completions: => Iterable[String]) = new LazyCompletor(simpleCompletor(completions))
|
||||
private def simpleCompletor(completions: Iterable[String]) = new SimpleCompletor(completions.toList.toArray)
|
||||
private def simpleArgumentCompletor(first: Iterable[String], second: => Iterable[String]) =
|
||||
singleArgumentCompletor(simpleCompletor(first), lazySimpleCompletor(second))
|
||||
private def singleArgumentCompletor(first: Completor, second: Completor) =
|
||||
{
|
||||
val completors = Array(first, second, new NullCompletor)
|
||||
strict( new ArgumentCompletor(completors, SingleArgumentDelimiter) )
|
||||
}
|
||||
private def repeatedArgumentCompletor(first: Completor, repeat: Completor) = argumentCompletor(Array(first, repeat))
|
||||
private def argumentCompletor(args: Array[Completor]) = strict(new ArgumentCompletor(args))
|
||||
|
||||
private def strict(c: ArgumentCompletor) = { c.setStrict(true); c }
|
||||
}
|
||||
private class LazyCompletor(delegate0: => Completor) extends Completor
|
||||
{
|
||||
private lazy val delegate = delegate0
|
||||
def complete(buffer: String, cursor: Int, candidates: java.util.List[_]): Int =
|
||||
delegate.complete(buffer, cursor, candidates)
|
||||
}
|
||||
|
|
@ -0,0 +1,357 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2008, 2009 Mark Harrah
|
||||
*/
|
||||
package sbt
|
||||
|
||||
import scala.collection.mutable.{Buffer, HashMap, ListBuffer}
|
||||
|
||||
sealed trait LogEvent extends NotNull
|
||||
final class Success(val msg: String) extends LogEvent
|
||||
final class Log(val level: Level.Value, val msg: String) extends LogEvent
|
||||
final class Trace(val exception: Throwable) extends LogEvent
|
||||
final class SetLevel(val newLevel: Level.Value) extends LogEvent
|
||||
final class SetTrace(val level: Int) extends LogEvent
|
||||
final class ControlEvent(val event: ControlEvent.Value, val msg: String) extends LogEvent
|
||||
|
||||
object ControlEvent extends Enumeration
|
||||
{
|
||||
val Start, Header, Finish = Value
|
||||
}
|
||||
|
||||
abstract class Logger extends xsbt.CompileLogger with IvyLogger
|
||||
{
|
||||
def getLevel: Level.Value
|
||||
def setLevel(newLevel: Level.Value)
|
||||
def setTrace(flag: Int)
|
||||
def getTrace: Int
|
||||
final def traceEnabled = getTrace >= 0
|
||||
def ansiCodesSupported = false
|
||||
|
||||
def atLevel(level: Level.Value) = level.id >= getLevel.id
|
||||
def trace(t: => Throwable): Unit
|
||||
final def verbose(message: => String): Unit = debug(message)
|
||||
final def debug(message: => String): Unit = log(Level.Debug, message)
|
||||
final def info(message: => String): Unit = log(Level.Info, message)
|
||||
final def warn(message: => String): Unit = log(Level.Warn, message)
|
||||
final def error(message: => String): Unit = log(Level.Error, message)
|
||||
def success(message: => String): Unit
|
||||
def log(level: Level.Value, message: => String): Unit
|
||||
def control(event: ControlEvent.Value, message: => String): Unit
|
||||
|
||||
def logAll(events: Seq[LogEvent]): Unit
|
||||
/** Defined in terms of other methods in Logger and should not be called from them. */
|
||||
final def log(event: LogEvent)
|
||||
{
|
||||
event match
|
||||
{
|
||||
case s: Success => success(s.msg)
|
||||
case l: Log => log(l.level, l.msg)
|
||||
case t: Trace => trace(t.exception)
|
||||
case setL: SetLevel => setLevel(setL.newLevel)
|
||||
case setT: SetTrace => setTrace(setT.level)
|
||||
case c: ControlEvent => control(c.event, c.msg)
|
||||
}
|
||||
}
|
||||
|
||||
import xsbti.F0
|
||||
def debug(msg: F0[String]): Unit = log(Level.Debug, msg)
|
||||
def warn(msg: F0[String]): Unit = log(Level.Warn, msg)
|
||||
def info(msg: F0[String]): Unit = log(Level.Info, msg)
|
||||
def error(msg: F0[String]): Unit = log(Level.Error, msg)
|
||||
def trace(msg: F0[Throwable]) = trace(msg.apply)
|
||||
def log(level: Level.Value, msg: F0[String]): Unit = log(level, msg.apply)
|
||||
}
|
||||
|
||||
/** Implements the level-setting methods of Logger.*/
|
||||
abstract class BasicLogger extends Logger
|
||||
{
|
||||
private var traceEnabledVar = java.lang.Integer.MAX_VALUE
|
||||
private var level: Level.Value = Level.Info
|
||||
def getLevel = level
|
||||
def setLevel(newLevel: Level.Value) { level = newLevel }
|
||||
def setTrace(level: Int) { traceEnabledVar = level }
|
||||
def getTrace = traceEnabledVar
|
||||
}
|
||||
|
||||
final class SynchronizedLogger(delegate: Logger) extends Logger
|
||||
{
|
||||
override lazy val ansiCodesSupported = delegate.ansiCodesSupported
|
||||
def getLevel = { synchronized { delegate.getLevel } }
|
||||
def setLevel(newLevel: Level.Value) { synchronized { delegate.setLevel(newLevel) } }
|
||||
def setTrace(level: Int) { synchronized { delegate.setTrace(level) } }
|
||||
def getTrace: Int = { synchronized { delegate.getTrace } }
|
||||
|
||||
def trace(t: => Throwable) { synchronized { delegate.trace(t) } }
|
||||
def log(level: Level.Value, message: => String) { synchronized { delegate.log(level, message) } }
|
||||
def success(message: => String) { synchronized { delegate.success(message) } }
|
||||
def control(event: ControlEvent.Value, message: => String) { synchronized { delegate.control(event, message) } }
|
||||
def logAll(events: Seq[LogEvent]) { synchronized { delegate.logAll(events) } }
|
||||
}
|
||||
|
||||
final class MultiLogger(delegates: List[Logger]) extends BasicLogger
|
||||
{
|
||||
override lazy val ansiCodesSupported = delegates.forall(_.ansiCodesSupported)
|
||||
override def setLevel(newLevel: Level.Value)
|
||||
{
|
||||
super.setLevel(newLevel)
|
||||
dispatch(new SetLevel(newLevel))
|
||||
}
|
||||
override def setTrace(level: Int)
|
||||
{
|
||||
super.setTrace(level)
|
||||
dispatch(new SetTrace(level))
|
||||
}
|
||||
def trace(t: => Throwable) { dispatch(new Trace(t)) }
|
||||
def log(level: Level.Value, message: => String) { dispatch(new Log(level, message)) }
|
||||
def success(message: => String) { dispatch(new Success(message)) }
|
||||
def logAll(events: Seq[LogEvent]) { delegates.foreach(_.logAll(events)) }
|
||||
def control(event: ControlEvent.Value, message: => String) { delegates.foreach(_.control(event, message)) }
|
||||
private def dispatch(event: LogEvent) { delegates.foreach(_.log(event)) }
|
||||
}
|
||||
|
||||
/** A filter logger is used to delegate messages but not the logging level to another logger. This means
|
||||
* that messages are logged at the higher of the two levels set by this logger and its delegate.
|
||||
* */
|
||||
final class FilterLogger(delegate: Logger) extends BasicLogger
|
||||
{
|
||||
override lazy val ansiCodesSupported = delegate.ansiCodesSupported
|
||||
def trace(t: => Throwable)
|
||||
{
|
||||
if(traceEnabled)
|
||||
delegate.trace(t)
|
||||
}
|
||||
override def setTrace(level: Int) { delegate.setTrace(level) }
|
||||
override def getTrace = delegate.getTrace
|
||||
def log(level: Level.Value, message: => String)
|
||||
{
|
||||
if(atLevel(level))
|
||||
delegate.log(level, message)
|
||||
}
|
||||
def success(message: => String)
|
||||
{
|
||||
if(atLevel(Level.Info))
|
||||
delegate.success(message)
|
||||
}
|
||||
def control(event: ControlEvent.Value, message: => String)
|
||||
{
|
||||
if(atLevel(Level.Info))
|
||||
delegate.control(event, message)
|
||||
}
|
||||
def logAll(events: Seq[LogEvent]): Unit = events.foreach(delegate.log)
|
||||
}
|
||||
|
||||
/** A logger that can buffer the logging done on it by currently executing Thread and
|
||||
* then can flush the buffer to the delegate logger provided in the constructor. Use
|
||||
* 'startRecording' to start buffering and then 'play' from to flush the buffer for the
|
||||
* current Thread to the backing logger. The logging level set at the
|
||||
* time a message is originally logged is used, not the level at the time 'play' is
|
||||
* called.
|
||||
*
|
||||
* This class assumes that it is the only client of the delegate logger.
|
||||
*
|
||||
* This logger is thread-safe.
|
||||
* */
|
||||
final class BufferedLogger(delegate: Logger) extends Logger
|
||||
{
|
||||
override lazy val ansiCodesSupported = delegate.ansiCodesSupported
|
||||
private[this] val buffers = wrap.Wrappers.weakMap[Thread, Buffer[LogEvent]]
|
||||
private[this] var recordingAll = false
|
||||
|
||||
private[this] def getOrCreateBuffer = buffers.getOrElseUpdate(key, createBuffer)
|
||||
private[this] def buffer = if(recordingAll) Some(getOrCreateBuffer) else buffers.get(key)
|
||||
private[this] def createBuffer = new ListBuffer[LogEvent]
|
||||
private[this] def key = Thread.currentThread
|
||||
|
||||
@deprecated def startRecording() = recordAll()
|
||||
/** Enables buffering for logging coming from the current Thread. */
|
||||
def record(): Unit = synchronized { buffers(key) = createBuffer }
|
||||
/** Enables buffering for logging coming from all Threads. */
|
||||
def recordAll(): Unit = synchronized{ recordingAll = true }
|
||||
def buffer[T](f: => T): T =
|
||||
{
|
||||
record()
|
||||
try { f }
|
||||
finally { Control.trap(stop()) }
|
||||
}
|
||||
def bufferAll[T](f: => T): T =
|
||||
{
|
||||
recordAll()
|
||||
try { f }
|
||||
finally { Control.trap(stopAll()) }
|
||||
}
|
||||
|
||||
/** Flushes the buffer to the delegate logger for the current thread. This method calls logAll on the delegate
|
||||
* so that the messages are written consecutively. The buffer is cleared in the process. */
|
||||
def play(): Unit =
|
||||
synchronized
|
||||
{
|
||||
for(buffer <- buffers.get(key))
|
||||
delegate.logAll(wrap.Wrappers.readOnly(buffer))
|
||||
}
|
||||
def playAll(): Unit =
|
||||
synchronized
|
||||
{
|
||||
for(buffer <- buffers.values)
|
||||
delegate.logAll(wrap.Wrappers.readOnly(buffer))
|
||||
}
|
||||
/** Clears buffered events for the current thread and disables buffering. */
|
||||
def clear(): Unit = synchronized { buffers -= key }
|
||||
/** Clears buffered events for all threads and disables all buffering. */
|
||||
def clearAll(): Unit = synchronized { buffers.clear(); recordingAll = false }
|
||||
/** Plays buffered events for the current thread and disables buffering. */
|
||||
def stop(): Unit =
|
||||
synchronized
|
||||
{
|
||||
play()
|
||||
clear()
|
||||
}
|
||||
def stopAll(): Unit =
|
||||
synchronized
|
||||
{
|
||||
playAll()
|
||||
clearAll()
|
||||
}
|
||||
|
||||
def setLevel(newLevel: Level.Value): Unit =
|
||||
synchronized
|
||||
{
|
||||
buffer.foreach{_ += new SetLevel(newLevel) }
|
||||
delegate.setLevel(newLevel)
|
||||
}
|
||||
def getLevel = synchronized { delegate.getLevel }
|
||||
def getTrace = synchronized { delegate.getTrace }
|
||||
def setTrace(level: Int): Unit =
|
||||
synchronized
|
||||
{
|
||||
buffer.foreach{_ += new SetTrace(level) }
|
||||
delegate.setTrace(level)
|
||||
}
|
||||
|
||||
def trace(t: => Throwable): Unit =
|
||||
doBufferableIf(traceEnabled, new Trace(t), _.trace(t))
|
||||
def success(message: => String): Unit =
|
||||
doBufferable(Level.Info, new Success(message), _.success(message))
|
||||
def log(level: Level.Value, message: => String): Unit =
|
||||
doBufferable(level, new Log(level, message), _.log(level, message))
|
||||
def logAll(events: Seq[LogEvent]): Unit =
|
||||
synchronized
|
||||
{
|
||||
buffer match
|
||||
{
|
||||
case Some(b) => b ++= events
|
||||
case None => delegate.logAll(events)
|
||||
}
|
||||
}
|
||||
def control(event: ControlEvent.Value, message: => String): Unit =
|
||||
doBufferable(Level.Info, new ControlEvent(event, message), _.control(event, message))
|
||||
private def doBufferable(level: Level.Value, appendIfBuffered: => LogEvent, doUnbuffered: Logger => Unit): Unit =
|
||||
doBufferableIf(atLevel(level), appendIfBuffered, doUnbuffered)
|
||||
private def doBufferableIf(condition: => Boolean, appendIfBuffered: => LogEvent, doUnbuffered: Logger => Unit): Unit =
|
||||
synchronized
|
||||
{
|
||||
if(condition)
|
||||
{
|
||||
buffer match
|
||||
{
|
||||
case Some(b) => b += appendIfBuffered
|
||||
case None => doUnbuffered(delegate)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
object ConsoleLogger
|
||||
{
|
||||
private val formatEnabled = ansiSupported && !formatExplicitlyDisabled
|
||||
|
||||
private[this] def formatExplicitlyDisabled = java.lang.Boolean.getBoolean("sbt.log.noformat")
|
||||
private[this] def ansiSupported =
|
||||
try { jline.Terminal.getTerminal.isANSISupported }
|
||||
catch { case e: Exception => !isWindows }
|
||||
|
||||
private[this] def os = System.getProperty("os.name")
|
||||
private[this] def isWindows = os.toLowerCase.indexOf("windows") >= 0
|
||||
}
|
||||
|
||||
/** A logger that logs to the console. On supported systems, the level labels are
|
||||
* colored.
|
||||
*
|
||||
* This logger is not thread-safe.*/
|
||||
class ConsoleLogger extends BasicLogger
|
||||
{
|
||||
override def ansiCodesSupported = ConsoleLogger.formatEnabled
|
||||
def messageColor(level: Level.Value) = Console.RESET
|
||||
def labelColor(level: Level.Value) =
|
||||
level match
|
||||
{
|
||||
case Level.Error => Console.RED
|
||||
case Level.Warn => Console.YELLOW
|
||||
case _ => Console.RESET
|
||||
}
|
||||
def successLabelColor = Console.GREEN
|
||||
def successMessageColor = Console.RESET
|
||||
override def success(message: => String)
|
||||
{
|
||||
if(atLevel(Level.Info))
|
||||
log(successLabelColor, Level.SuccessLabel, successMessageColor, message)
|
||||
}
|
||||
def trace(t: => Throwable): Unit =
|
||||
System.out.synchronized
|
||||
{
|
||||
val traceLevel = getTrace
|
||||
if(traceLevel >= 0)
|
||||
System.out.synchronized { System.out.print(StackTrace.trimmed(t, traceLevel)) }
|
||||
}
|
||||
def log(level: Level.Value, message: => String)
|
||||
{
|
||||
if(atLevel(level))
|
||||
log(labelColor(level), level.toString, messageColor(level), message)
|
||||
}
|
||||
private def setColor(color: String)
|
||||
{
|
||||
if(ansiCodesSupported)
|
||||
System.out.synchronized { System.out.print(color) }
|
||||
}
|
||||
private def log(labelColor: String, label: String, messageColor: String, message: String): Unit =
|
||||
System.out.synchronized
|
||||
{
|
||||
for(line <- message.split("""\n"""))
|
||||
{
|
||||
setColor(Console.RESET)
|
||||
System.out.print('[')
|
||||
setColor(labelColor)
|
||||
System.out.print(label)
|
||||
setColor(Console.RESET)
|
||||
System.out.print("] ")
|
||||
setColor(messageColor)
|
||||
System.out.print(line)
|
||||
setColor(Console.RESET)
|
||||
System.out.println()
|
||||
}
|
||||
}
|
||||
|
||||
def logAll(events: Seq[LogEvent]) = System.out.synchronized { events.foreach(log) }
|
||||
def control(event: ControlEvent.Value, message: => String)
|
||||
{ log(labelColor(Level.Info), Level.Info.toString, Console.BLUE, message) }
|
||||
}
|
||||
|
||||
/** An enumeration defining the levels available for logging. A level includes all of the levels
|
||||
* with id larger than its own id. For example, Warn (id=3) includes Error (id=4).*/
|
||||
object Level extends Enumeration with NotNull
|
||||
{
|
||||
val Debug = Value(1, "debug")
|
||||
val Info = Value(2, "info")
|
||||
val Warn = Value(3, "warn")
|
||||
val Error = Value(4, "error")
|
||||
/** Defines the label to use for success messages. A success message is logged at the info level but
|
||||
* uses this label. Because the label for levels is defined in this module, the success
|
||||
* label is also defined here. */
|
||||
val SuccessLabel = "success"
|
||||
|
||||
// added because elements was renamed to iterator in 2.8.0 nightly
|
||||
def levels = Debug :: Info :: Warn :: Error :: Nil
|
||||
/** Returns the level with the given name wrapped in Some, or None if no level exists for that name. */
|
||||
def apply(s: String) = levels.find(s == _.toString)
|
||||
/** Same as apply, defined for use in pattern matching. */
|
||||
private[sbt] def unapply(s: String) = apply(s)
|
||||
}
|
||||
|
|
@ -0,0 +1,745 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2008, 2009, 2010 Steven Blundy, Mark Harrah, David MacIver, Mikko Peltonen
|
||||
*/
|
||||
package sbt
|
||||
|
||||
import java.io.File
|
||||
import scala.collection.immutable.TreeSet
|
||||
|
||||
/** This class is the entry point for sbt. If it is given any arguments, it interprets them
|
||||
* as actions, executes the corresponding actions, and exits. If there were no arguments provided,
|
||||
* sbt enters interactive mode.*/
|
||||
object Main
|
||||
{
|
||||
val NormalExitCode = 0
|
||||
val SetupErrorExitCode = 1
|
||||
val SetupDeclinedExitCode = 2
|
||||
val LoadErrorExitCode = 3
|
||||
val UsageErrorExitCode = 4
|
||||
val BuildErrorExitCode = 5
|
||||
val ProgramErrorExitCode = 6
|
||||
val MaxInt = java.lang.Integer.MAX_VALUE
|
||||
}
|
||||
|
||||
import Main._
|
||||
|
||||
class xMain extends xsbti.AppMain
|
||||
{
|
||||
final def run(configuration: xsbti.AppConfiguration): xsbti.MainResult =
|
||||
{
|
||||
def run0(remainingArguments: List[String], buildScalaVersion: Option[String]): xsbti.MainResult =
|
||||
{
|
||||
// done this way because in Scala 2.7.7, tail recursion in catch blocks is not optimized
|
||||
val result = try { Right(run(configuration, remainingArguments, buildScalaVersion)) } catch { case re: ReloadException => Left(re) }
|
||||
result match
|
||||
{
|
||||
case Left(re) => run0(re.remainingArguments, re.buildScalaVersion)
|
||||
case Right(r) => r
|
||||
}
|
||||
}
|
||||
run0(configuration.arguments.map(_.trim).toList, None)
|
||||
}
|
||||
final def run(configuration: xsbti.AppConfiguration, remainingArguments: List[String], buildScalaVersion: Option[String]): xsbti.MainResult =
|
||||
{
|
||||
val startTime = System.currentTimeMillis
|
||||
Project.loadProject(configuration.provider, buildScalaVersion) match
|
||||
{
|
||||
case err: LoadSetupError =>
|
||||
println("\n" + err.message)
|
||||
ExitHooks.runExitHooks(Project.bootLogger)
|
||||
Exit(SetupErrorExitCode)
|
||||
case LoadSetupDeclined =>
|
||||
ExitHooks.runExitHooks(Project.bootLogger)
|
||||
Exit(SetupDeclinedExitCode)
|
||||
case err: LoadError =>
|
||||
{
|
||||
val log = Project.bootLogger
|
||||
println(err.message)
|
||||
ExitHooks.runExitHooks(log)
|
||||
// Because this is an error that can probably be corrected, prompt user to try again.
|
||||
val line =
|
||||
try { SimpleReader.readLine("\n Hit enter to retry or 'exit' to quit: ") }
|
||||
catch
|
||||
{
|
||||
case e =>
|
||||
log.trace(e)
|
||||
log.error(e.toString)
|
||||
None
|
||||
}
|
||||
line match
|
||||
{
|
||||
case Some(l) => if(!isTerminateAction(l)) run(configuration, remainingArguments, buildScalaVersion) else Exit(NormalExitCode)
|
||||
case None => Exit(LoadErrorExitCode)
|
||||
}
|
||||
}
|
||||
case success: LoadSuccess =>
|
||||
{
|
||||
import success.project
|
||||
try
|
||||
{
|
||||
// in interactive mode, fill all undefined properties
|
||||
if(configuration.arguments.length > 0 || fillUndefinedProjectProperties(project.projectClosure.toList.reverse))
|
||||
startProject(project, configuration, remainingArguments, startTime)
|
||||
else
|
||||
Exit(NormalExitCode)
|
||||
}
|
||||
finally { ExitHooks.runExitHooks(project.log) }
|
||||
}
|
||||
}
|
||||
}
|
||||
/** If no arguments are provided, drop to interactive prompt.
|
||||
* If the user wants to run commands before dropping to the interactive prompt,
|
||||
* make dropping to the interactive prompt the action to perform on failure */
|
||||
private def initialize(args: List[String]): List[String] =
|
||||
args.lastOption match
|
||||
{
|
||||
case None => InteractiveCommand :: Nil
|
||||
case Some(InteractiveCommand) => (FailureHandlerPrefix + InteractiveCommand) :: args
|
||||
case Some(ExitCommand | QuitCommand) => args
|
||||
case _ => args ::: ExitCommand :: Nil
|
||||
}
|
||||
private def startProject(project: Project, configuration: xsbti.AppConfiguration, remainingArguments: List[String], startTime: Long): xsbti.MainResult =
|
||||
{
|
||||
project.log.info("Building project " + project.name + " " + project.version.toString + " against Scala " + project.buildScalaVersion)
|
||||
project.log.info(" using " + project.getClass.getName + " with sbt " + ComponentManager.version + " and Scala " + project.defScalaVersion.value)
|
||||
processArguments(project, initialize(remainingArguments), configuration, startTime) match
|
||||
{
|
||||
case e: xsbti.Exit =>
|
||||
printTime(project, startTime, "session")
|
||||
if(e.code == NormalExitCode)
|
||||
project.log.success("Build completed successfully.")
|
||||
else
|
||||
project.log.error("Error during build.")
|
||||
e
|
||||
case r => r
|
||||
}
|
||||
}
|
||||
/** This is the top-level command processing method. */
|
||||
private def processArguments(baseProject: Project, arguments: List[String], configuration: xsbti.AppConfiguration, startTime: Long): xsbti.MainResult =
|
||||
{
|
||||
type OnFailure = Option[String]
|
||||
def ExitOnFailure = None
|
||||
lazy val interactiveContinue = Some( InteractiveCommand )
|
||||
def remoteContinue(port: Int) = Some( FileCommandsPrefix + "-" + port )
|
||||
lazy val PHandler = new processor.Handler(baseProject)
|
||||
|
||||
// replace in 2.8
|
||||
trait Trampoline
|
||||
class Done(val r: xsbti.MainResult) extends Trampoline
|
||||
class Continue(project: Project, arguments: List[String], failAction: OnFailure) extends Trampoline {
|
||||
def apply() = process(project, arguments, failAction)
|
||||
}
|
||||
def continue(project: Project, arguments: List[String], failAction: OnFailure) = new Continue(project, arguments, failAction)
|
||||
def result(r: xsbti.MainResult) = new Done(r)
|
||||
def run(t: Trampoline): xsbti.MainResult = t match { case d: Done => d.r; case c: Continue => run(c()) }
|
||||
|
||||
def process(project: Project, arguments: List[String], failAction: OnFailure): Trampoline =
|
||||
{
|
||||
project.log.debug("commands " + failAction.map("(on failure: " + _ + "): ").mkString + arguments.mkString(", "))
|
||||
def rememberCurrent(newArgs: List[String]) = rememberProject(rememberFail(newArgs))
|
||||
def rememberProject(newArgs: List[String]) = if(baseProject.name != project.name) (ProjectAction + " " + project.name) :: newArgs else newArgs
|
||||
def rememberFail(newArgs: List[String]) = failAction.map(f => (FailureHandlerPrefix + f)).toList ::: newArgs
|
||||
|
||||
def tryOrFail(action: => Trampoline) = try { action } catch { case e: Exception => logCommandError(project.log, e); failed(BuildErrorExitCode) }
|
||||
def reload(args: List[String]) =
|
||||
{
|
||||
val newID = new ApplicationID(configuration.provider.id, baseProject.sbtVersion.value)
|
||||
result( new Reboot(project.defScalaVersion.value, rememberCurrent(args), newID, configuration.baseDirectory) )
|
||||
}
|
||||
def failed(code: Int) =
|
||||
failAction match
|
||||
{
|
||||
case Some(c) => continue(project, c :: Nil, ExitOnFailure)
|
||||
case None => result( Exit(code) )
|
||||
}
|
||||
|
||||
arguments match
|
||||
{
|
||||
case "" :: tail => continue(project, tail, failAction)
|
||||
case (ExitCommand | QuitCommand) :: _ => result( Exit(NormalExitCode) )
|
||||
case RebootCommand :: tail => reload( tail )
|
||||
case InteractiveCommand :: _ => continue(project, prompt(baseProject, project) :: arguments, interactiveContinue)
|
||||
case SpecificBuild(version, action) :: tail =>
|
||||
if(Some(version) != baseProject.info.buildScalaVersion)
|
||||
{
|
||||
if(checkVersion(baseProject, version))
|
||||
throw new ReloadException(rememberCurrent(action :: tail), Some(version))
|
||||
else
|
||||
failed(UsageErrorExitCode)
|
||||
}
|
||||
else
|
||||
continue(project, action :: tail, failAction)
|
||||
|
||||
case CrossBuild(action) :: tail =>
|
||||
if(checkAction(project, action))
|
||||
{
|
||||
CrossBuild(project, action) match
|
||||
{
|
||||
case Some(actions) => continue(project, actions ::: tail, failAction)
|
||||
case None => failed(UsageErrorExitCode)
|
||||
}
|
||||
}
|
||||
else
|
||||
failed(UsageErrorExitCode)
|
||||
|
||||
case SetProject(name) :: tail =>
|
||||
SetProject(baseProject, name, project) match
|
||||
{
|
||||
case Some(newProject) => continue(newProject, tail, failAction)
|
||||
case None => failed(BuildErrorExitCode)
|
||||
}
|
||||
|
||||
case action :: tail if action.startsWith(FileCommandsPrefix) =>
|
||||
getSource(action.substring(FileCommandsPrefix.length).trim, baseProject.info.projectDirectory) match
|
||||
{
|
||||
case Left(portAndSuccess) =>
|
||||
val port = Math.abs(portAndSuccess)
|
||||
val previousSuccess = portAndSuccess >= 0
|
||||
readMessage(port, previousSuccess) match
|
||||
{
|
||||
case Some(message) => continue(project, message :: (FileCommandsPrefix + port) :: Nil, remoteContinue(port))
|
||||
case None =>
|
||||
project.log.error("Connection closed.")
|
||||
failed(BuildErrorExitCode)
|
||||
}
|
||||
|
||||
case Right(file) =>
|
||||
readLines(project, file) match
|
||||
{
|
||||
case Some(lines) => continue(project, lines ::: tail , failAction)
|
||||
case None => failed(UsageErrorExitCode)
|
||||
}
|
||||
}
|
||||
|
||||
case action :: tail if action.startsWith(FailureHandlerPrefix) =>
|
||||
val errorAction = action.substring(FailureHandlerPrefix.length).trim
|
||||
continue(project, tail, if(errorAction.isEmpty) None else Some(errorAction) )
|
||||
|
||||
case action :: tail if action.startsWith(ProcessorPrefix) =>
|
||||
val processorCommand = action.substring(ProcessorPrefix.length).trim
|
||||
val runner = processor.CommandRunner(PHandler.manager, PHandler.defParser, ProcessorPrefix, project.log)
|
||||
tryOrFail {
|
||||
runner(processorCommand)
|
||||
continue(project, tail, failAction)
|
||||
}
|
||||
|
||||
case PHandler(parsed) :: tail =>
|
||||
tryOrFail {
|
||||
parsed.processor(parsed.label, project, failAction, parsed.arguments) match
|
||||
{
|
||||
case s: processor.Success => continue(s.project, s.insertArguments ::: tail, s.onFailure)
|
||||
case e: processor.Exit => result( Exit(e.code) )
|
||||
case r: processor.Reload => reload( r.insertArguments ::: tail )
|
||||
}
|
||||
}
|
||||
|
||||
case action :: tail =>
|
||||
val success = processAction(baseProject, project, action, failAction == interactiveContinue)
|
||||
if(success) continue(project, tail, failAction)
|
||||
else failed(BuildErrorExitCode)
|
||||
|
||||
case Nil =>
|
||||
project.log.error("Invalid internal sbt state: no arguments")
|
||||
result( Exit(ProgramErrorExitCode) )
|
||||
}
|
||||
}
|
||||
run(process(baseProject, arguments, ExitOnFailure))
|
||||
}
|
||||
private def isInteractive(failureActions: Option[List[String]]) = failureActions == Some(InteractiveCommand :: Nil)
|
||||
private def getSource(action: String, baseDirectory: File) =
|
||||
{
|
||||
try { Left(action.toInt) }
|
||||
catch { case _: NumberFormatException => Right(new File(baseDirectory, action)) }
|
||||
}
|
||||
private def readMessage(port: Int, previousSuccess: Boolean): Option[String] =
|
||||
{
|
||||
// split into two connections because this first connection ends the previous communication
|
||||
xsbt.IPC.client(port) { _.send(previousSuccess.toString) }
|
||||
// and this second connection starts the next communication
|
||||
xsbt.IPC.client(port) { ipc =>
|
||||
val message = ipc.receive
|
||||
if(message eq null) None else Some(message)
|
||||
}
|
||||
}
|
||||
object SetProject
|
||||
{
|
||||
def unapply(s: String) =
|
||||
if(s.startsWith(ProjectAction + " "))
|
||||
Some(s.substring(ProjectAction.length + 1))
|
||||
else
|
||||
None
|
||||
def apply(baseProject: Project, projectName: String, currentProject: Project) =
|
||||
{
|
||||
val found = baseProject.projectClosure.find(_.name == projectName)
|
||||
found match
|
||||
{
|
||||
case Some(newProject) => printProject("Set current project to ", newProject)
|
||||
case None => currentProject.log.error("Invalid project name '" + projectName + "' (type 'projects' to list available projects).")
|
||||
}
|
||||
found
|
||||
}
|
||||
}
|
||||
object SpecificBuild
|
||||
{
|
||||
import java.util.regex.Pattern.{compile,quote}
|
||||
val pattern = compile(quote(SpecificBuildPrefix) + """\s*(\S+)\s*(.*)""")
|
||||
def unapply(s: String) =
|
||||
{
|
||||
val m = pattern.matcher(s)
|
||||
if(m.matches)
|
||||
Some(m.group(1).trim, m.group(2).trim)
|
||||
else
|
||||
None
|
||||
}
|
||||
}
|
||||
def checkVersion(p: Project, version: String) =
|
||||
{
|
||||
try { p.getScalaInstance(version); true }
|
||||
catch { case e: xsbti.RetrieveException => p.log.error(e.getMessage); false }
|
||||
}
|
||||
object CrossBuild
|
||||
{
|
||||
def unapply(s: String) = if(s.startsWith(CrossBuildPrefix) && !s.startsWith(SpecificBuildPrefix)) Some(s.substring(1)) else None
|
||||
def apply(project: Project, action: String): Option[List[String]] =
|
||||
{
|
||||
val againstScalaVersions = project.crossScalaVersions
|
||||
if(againstScalaVersions.isEmpty)
|
||||
{
|
||||
Console.println("Project does not declare any Scala versions to cross-build against, building against current version...")
|
||||
Some(action :: Nil)
|
||||
}
|
||||
else
|
||||
{
|
||||
if( !againstScalaVersions.forall(v => checkVersion(project, v)) )
|
||||
None
|
||||
else
|
||||
{
|
||||
val actions =
|
||||
againstScalaVersions.toList.map(SpecificBuildPrefix + _ + " " + action) ::: // build against all versions
|
||||
(SpecificBuildPrefix + project.buildScalaVersion) :: // reset to the version before the cross-build
|
||||
Nil
|
||||
Some(actions)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
private def readLines(project: Project, file: File): Option[List[String]] =
|
||||
{
|
||||
try { Some(xsbt.FileUtilities.readLines(file)) }
|
||||
catch { case e: Exception =>
|
||||
project.log.trace(e)
|
||||
project.log.error("Error reading commands from file " + file.getAbsolutePath + ": " + e.toString)
|
||||
None
|
||||
}
|
||||
}
|
||||
private def prompt(baseProject: Project, project: Project): String =
|
||||
{
|
||||
// the times for evaluating the lazy vals here are a few hundred ms out of a 2s startup
|
||||
lazy val projectNames = baseProject.projectClosure.map(_.name)
|
||||
val prefixes = ContinuousExecutePrefix :: CrossBuildPrefix :: Nil
|
||||
lazy val scalaVersions = baseProject.crossScalaVersions ++ Seq(baseProject.defScalaVersion.value)
|
||||
lazy val methods = project.methods
|
||||
lazy val methodCompletions = new ExtraCompletions { def names = methods.keys.toList; def completions(name: String) = methods(name).completions }
|
||||
lazy val completors = new Completors(ProjectAction, projectNames, interactiveCommands, List(GetAction, SetAction), SpecificBuildPrefix, scalaVersions, prefixes, project.taskNames, project.propertyNames, methodCompletions)
|
||||
val reader = new LazyJLineReader(baseProject.historyPath, MainCompletor(completors), baseProject.log)
|
||||
reader.readLine("> ").getOrElse(ExitCommand)
|
||||
}
|
||||
|
||||
/** The name of the command that loads a console with access to the current project through the variable 'project'.*/
|
||||
val ProjectConsoleAction = "console-project"
|
||||
/** The name of the command that shows the current project and logging level of that project.*/
|
||||
val ShowCurrent = "current"
|
||||
/** The name of the command that shows all available actions.*/
|
||||
val ShowActions = "actions"
|
||||
/** The name of the command that sets the currently active project.*/
|
||||
val ProjectAction = "project"
|
||||
/** The name of the command that shows all available projects.*/
|
||||
val ShowProjectsAction = "projects"
|
||||
val ExitCommand = "exit"
|
||||
val QuitCommand = "quit"
|
||||
val InteractiveCommand = "shell"
|
||||
/** The list of lowercase command names that may be used to terminate the program.*/
|
||||
val TerminateActions: Iterable[String] = ExitCommand :: QuitCommand :: Nil
|
||||
/** The name of the command that sets the value of the property given as its argument.*/
|
||||
val SetAction = "set"
|
||||
/** The name of the command that gets the value of the property given as its argument.*/
|
||||
val GetAction = "get"
|
||||
/** The name of the command that displays the help message. */
|
||||
val HelpAction = "help"
|
||||
/** The command for reloading sbt.*/
|
||||
val RebootCommand = "reload"
|
||||
/** The name of the command that toggles logging stacktraces. */
|
||||
val TraceCommand = "trace"
|
||||
/** The name of the command that compiles all sources continuously when they are modified. */
|
||||
val ContinuousCompileCommand = "cc"
|
||||
/** The prefix used to identify a request to execute the remaining input on source changes.*/
|
||||
val ContinuousExecutePrefix = "~"
|
||||
/** The prefix used to identify a request to execute the remaining input across multiple Scala versions.*/
|
||||
val CrossBuildPrefix = "+"
|
||||
/** The prefix used to identify a request to execute the remaining input after the next space against the
|
||||
* Scala version between this prefix and the space (i.e. '++version action' means execute 'action' using
|
||||
* Scala version 'version'. */
|
||||
val SpecificBuildPrefix = "++"
|
||||
/** The prefix used to identify a file or local port to read commands from. */
|
||||
val FileCommandsPrefix = "<"
|
||||
/** The prefix used to identify the action to run after an error*/
|
||||
val FailureHandlerPrefix = "!"
|
||||
/** The prefix used to identify commands for managing processors.*/
|
||||
val ProcessorPrefix = "*"
|
||||
|
||||
/** The number of seconds between polling by the continuous compile command.*/
|
||||
val ContinuousCompilePollDelaySeconds = 1
|
||||
|
||||
/** The list of all available commands at the interactive prompt in addition to the tasks defined
|
||||
* by a project.*/
|
||||
protected def interactiveCommands: Iterable[String] = basicCommands.toList ++ logLevels.toList ++ TerminateActions
|
||||
/** The list of logging levels.*/
|
||||
private def logLevels: Iterable[String] = TreeSet.empty[String] ++ Level.levels.map(_.toString)
|
||||
/** The list of all interactive commands other than logging level.*/
|
||||
private def basicCommands: Iterable[String] = TreeSet(ShowProjectsAction, ShowActions, ShowCurrent, HelpAction,
|
||||
RebootCommand, TraceCommand, ContinuousCompileCommand, ProjectConsoleAction)
|
||||
|
||||
private def processAction(baseProject: Project, currentProject: Project, action: String, isInteractive: Boolean): Boolean =
|
||||
action match
|
||||
{
|
||||
case HelpAction => displayHelp(isInteractive); true
|
||||
case ShowProjectsAction => baseProject.projectClosure.foreach(listProject); true
|
||||
case ProjectConsoleAction =>
|
||||
showResult(Run.projectConsole(currentProject), currentProject.log)
|
||||
case _ =>
|
||||
if(action.startsWith(SetAction + " "))
|
||||
setProperty(currentProject, action.substring(SetAction.length + 1))
|
||||
else if(action.startsWith(GetAction + " "))
|
||||
getProperty(currentProject, action.substring(GetAction.length + 1))
|
||||
else if(action.startsWith(TraceCommand + " "))
|
||||
setTrace(currentProject, action.substring(TraceCommand.length + 1))
|
||||
else
|
||||
handleCommand(currentProject, action)
|
||||
}
|
||||
|
||||
private def printCmd(name:String, desc:String) = Console.println(" " + name + " : " + desc)
|
||||
val BatchHelpHeader = "You may execute any project action or method or one of the commands described below."
|
||||
val InteractiveHelpHeader = "You may execute any project action or one of the commands described below. Only one action " +
|
||||
"may be executed at a time in interactive mode and is entered by name, as it would be at the command line." +
|
||||
" Also, tab completion is available."
|
||||
private def displayHelp(isInteractive: Boolean)
|
||||
{
|
||||
Console.println(if(isInteractive) InteractiveHelpHeader else BatchHelpHeader)
|
||||
Console.println("Available Commands:")
|
||||
|
||||
printCmd("<action name>", "Executes the project specified action.")
|
||||
printCmd("<method name> <parameter>*", "Executes the project specified method.")
|
||||
printCmd("<processor label> <arguments>", "Runs the specified processor.")
|
||||
printCmd(ContinuousExecutePrefix + " <command>", "Executes the project specified action or method whenever source files change.")
|
||||
printCmd(FileCommandsPrefix + " file", "Executes the commands in the given file. Each command should be on its own line. Empty lines and lines beginning with '#' are ignored")
|
||||
printCmd(CrossBuildPrefix + " <command>", "Executes the project specified action or method for all versions of Scala defined in crossScalaVersions.")
|
||||
printCmd(SpecificBuildPrefix + "<version> <command>", "Changes the version of Scala building the project and executes the provided command. <command> is optional.")
|
||||
printCmd(ProcessorPrefix, "Prefix for commands for managing processors. Run '" + ProcessorPrefix + "help' for details.")
|
||||
printCmd(ShowActions, "Shows all available actions.")
|
||||
printCmd(RebootCommand, "Reloads sbt, picking up modifications to sbt.version or scala.version and recompiling modified project definitions.")
|
||||
printCmd(HelpAction, "Displays this help message.")
|
||||
printCmd(ShowCurrent, "Shows the current project, Scala version, and logging level.")
|
||||
printCmd(Level.levels.mkString(", "), "Set logging for the current project to the specified level.")
|
||||
printCmd(TraceCommand + " " + validTraceArguments, "Configures stack trace logging. " + traceExplanation)
|
||||
printCmd(ProjectAction + " <project name>", "Sets the currently active project.")
|
||||
printCmd(ShowProjectsAction, "Shows all available projects.")
|
||||
printCmd(TerminateActions.elements.mkString(", "), "Terminates the build.")
|
||||
printCmd(SetAction + " <property> <value>", "Sets the value of the property given as its argument.")
|
||||
printCmd(GetAction + " <property>", "Gets the value of the property given as its argument.")
|
||||
printCmd(ProjectConsoleAction, "Enters the Scala interpreter with the current project definition bound to the variable 'current' and all members imported.")
|
||||
if(!isInteractive)
|
||||
printCmd(InteractiveCommand, "Enters the sbt interactive shell")
|
||||
}
|
||||
private def listProject(p: Project) = printProject("\t", p)
|
||||
private def printProject(prefix: String, p: Project): Unit =
|
||||
Console.println(prefix + p.name + " " + p.version)
|
||||
|
||||
/** Handles the given command string provided at the command line. Returns false if there was an error*/
|
||||
private def handleCommand(project: Project, command: String): Boolean =
|
||||
{
|
||||
command match
|
||||
{
|
||||
case GetAction => getArgumentError(project.log)
|
||||
case SetAction => setArgumentError(project.log)
|
||||
case ProjectAction => setProjectError(project.log)
|
||||
case TraceCommand => setTraceError(project.log); true
|
||||
case ShowCurrent =>
|
||||
printProject("Current project is ", project)
|
||||
Console.println("Current Scala version is " + project.buildScalaVersion)
|
||||
Console.println("Current log level is " + project.log.getLevel)
|
||||
printTraceEnabled(project)
|
||||
true
|
||||
case ShowActions => showActions(project); true
|
||||
case Level(level) => setLevel(project, level); true
|
||||
case ContinuousCompileCommand => compileContinuously(project)
|
||||
case action if action.startsWith(ContinuousExecutePrefix) => executeContinuously(project, action.substring(ContinuousExecutePrefix.length).trim)
|
||||
case action => handleAction(project, action)
|
||||
}
|
||||
}
|
||||
private def showActions(project: Project): Unit = Console.println(project.taskAndMethodList)
|
||||
|
||||
// returns true if it succeeded
|
||||
private def handleAction(project: Project, action: String): Boolean =
|
||||
{
|
||||
def show(result: Option[String]): Boolean = showResult(result, project.log)
|
||||
val startTime = System.currentTimeMillis
|
||||
val result = withAction(project, action)( (name, params) => show(project.call(name, params)))( name => show(project.act(name)))
|
||||
printTime(project, startTime, "")
|
||||
result
|
||||
}
|
||||
// returns true if it succeeded
|
||||
private def showResult(result: Option[String], log: Logger): Boolean =
|
||||
{
|
||||
result match
|
||||
{
|
||||
case Some(errorMessage) => log.error(errorMessage); false
|
||||
case None => log.success("Successful."); true
|
||||
}
|
||||
}
|
||||
// true if the action exists
|
||||
private def checkAction(project: Project, actionString: String): Boolean =
|
||||
withAction(project, actionString)( (n,p) => true)( n => true)
|
||||
private def withAction(project: Project, actionString: String)(ifMethod: (String, Array[String]) => Boolean)(ifAction: String => Boolean): Boolean =
|
||||
{
|
||||
def didNotExist(taskType: String, name: String) =
|
||||
{
|
||||
project.log.error("No " + taskType + " named '" + name + "' exists.")
|
||||
project.log.info("Execute 'help' for a list of commands or 'actions' for a list of available project actions and methods.")
|
||||
false
|
||||
}
|
||||
impl.CommandParser.parse(actionString) match
|
||||
{
|
||||
case Left(errMsg) => project.log.error(errMsg); false
|
||||
case Right((name, parameters)) =>
|
||||
if(project.methods.contains(name))
|
||||
ifMethod(name, parameters.toArray)
|
||||
else if(!parameters.isEmpty)
|
||||
didNotExist("method", name)
|
||||
else if(project.deepTasks.contains(name))
|
||||
ifAction(name)
|
||||
else
|
||||
didNotExist("action", name)
|
||||
}
|
||||
}
|
||||
|
||||
/** Toggles whether stack traces are enabled.*/
|
||||
private def setTrace(project: Project, value: String): Boolean =
|
||||
{
|
||||
try
|
||||
{
|
||||
val newValue = if(value == "on") MaxInt else if(value == "off") -1 else if(value == "nosbt") 0 else value.toInt
|
||||
project.projectClosure.foreach(_.log.setTrace(newValue))
|
||||
printTraceEnabled(project)
|
||||
true
|
||||
}
|
||||
catch { case _: NumberFormatException => setTraceError(project.log) }
|
||||
}
|
||||
private def printTraceEnabled(project: Project)
|
||||
{
|
||||
def traceLevel(level: Int) = if(level == 0) " (no sbt stack elements)" else if(level == MaxInt) "" else " (maximum " + level + " stack elements per exception)"
|
||||
Console.println("Stack traces are " + (if(project.log.traceEnabled) "enabled" + traceLevel(project.log.getTrace) else "disabled"))
|
||||
}
|
||||
/** Sets the logging level on the given project.*/
|
||||
private def setLevel(project: Project, level: Level.Value)
|
||||
{
|
||||
project.projectClosure.foreach(_.log.setLevel(level))
|
||||
Console.println("Set log level to " + project.log.getLevel)
|
||||
}
|
||||
/** Prints the elapsed time to the given project's log using the given
|
||||
* initial time and the label 's'.*/
|
||||
private def printTime(project: Project, startTime: Long, s: String)
|
||||
{
|
||||
val endTime = System.currentTimeMillis()
|
||||
project.log.info("")
|
||||
val ss = if(s.isEmpty) "" else s + " "
|
||||
project.log.info("Total " + ss + "time: " + (endTime - startTime + 500) / 1000 + " s, completed " + nowString)
|
||||
}
|
||||
private def nowString =
|
||||
{
|
||||
import java.text.DateFormat
|
||||
val format = DateFormat.getDateTimeInstance(DateFormat.MEDIUM, DateFormat.MEDIUM)
|
||||
format.format(new java.util.Date)
|
||||
}
|
||||
/** Provides a partial message describing why the given property is undefined. */
|
||||
private def undefinedMessage(property: Project#UserProperty[_]): String =
|
||||
{
|
||||
property.resolve match
|
||||
{
|
||||
case vu: UndefinedValue => " is not defined."
|
||||
case e: ResolutionException => " has invalid value: " + e.toString
|
||||
case _ => ""
|
||||
}
|
||||
}
|
||||
/** Prompts the user for the value of undefined properties. 'first' is true if this is the first time
|
||||
* that the current property has been prompted.*/
|
||||
private def fillUndefinedProperties(project: Project, properties: List[(String, Project#Property[_])], first: Boolean): Boolean =
|
||||
{
|
||||
properties match
|
||||
{
|
||||
case (name, variable) :: tail =>
|
||||
{
|
||||
val shouldAdvanceOrQuit =
|
||||
variable match
|
||||
{
|
||||
case property: Project#UserProperty[_] =>
|
||||
if(first)
|
||||
project.log.error(" Property '" + name + "' " + undefinedMessage(property))
|
||||
for(newValue <- SimpleReader.readLine(" Enter new value for " + name + " : ")) yield
|
||||
{
|
||||
try
|
||||
{
|
||||
property.setStringValue(newValue)
|
||||
true
|
||||
}
|
||||
catch
|
||||
{
|
||||
case e =>
|
||||
project.log.error("Invalid value: " + e.getMessage)
|
||||
false
|
||||
}
|
||||
}
|
||||
case _ => Some(true)
|
||||
}
|
||||
shouldAdvanceOrQuit match
|
||||
{
|
||||
case Some(shouldAdvance) => fillUndefinedProperties(project, if(shouldAdvance) tail else properties, shouldAdvance)
|
||||
case None => false
|
||||
}
|
||||
}
|
||||
case Nil => true
|
||||
}
|
||||
}
|
||||
/** Iterates over the undefined properties in the given projects, prompting the user for the value of each undefined
|
||||
* property.*/
|
||||
private def fillUndefinedProjectProperties(projects: List[Project]): Boolean =
|
||||
{
|
||||
projects match
|
||||
{
|
||||
case project :: remaining =>
|
||||
val uninitialized = project.uninitializedProperties.toList
|
||||
if(uninitialized.isEmpty)
|
||||
fillUndefinedProjectProperties(remaining)
|
||||
else
|
||||
{
|
||||
project.log.error("Project in " + project.info.projectDirectory.getAbsolutePath + " has undefined properties.")
|
||||
val result = fillUndefinedProperties(project, uninitialized, true) && fillUndefinedProjectProperties(remaining)
|
||||
project.saveEnvironment()
|
||||
result
|
||||
}
|
||||
case Nil => true
|
||||
}
|
||||
}
|
||||
/** Prints the value of the property with the given name in the given project. */
|
||||
private def getProperty(project: Project, propertyName: String): Boolean =
|
||||
{
|
||||
if(propertyName.isEmpty)
|
||||
{
|
||||
project.log.error("No property name specified.")
|
||||
false
|
||||
}
|
||||
else
|
||||
{
|
||||
project.getPropertyNamed(propertyName) match
|
||||
{
|
||||
case Some(property) =>
|
||||
property.resolve match
|
||||
{
|
||||
case u: UndefinedValue => project.log.error("Value of property '" + propertyName + "' is undefined."); false
|
||||
case ResolutionException(m, e) => project.log.error(m); false
|
||||
case DefinedValue(value, isInherited, isDefault) => Console.println(value.toString); true
|
||||
}
|
||||
case None =>
|
||||
val value = System.getProperty(propertyName)
|
||||
if(value == null)
|
||||
project.log.error("No property named '" + propertyName + "' is defined.")
|
||||
else
|
||||
Console.println(value)
|
||||
value != null
|
||||
}
|
||||
}
|
||||
}
|
||||
/** Separates the space separated property name/value pair and stores the value in the user-defined property
|
||||
* with the given name in the given project. If no such property exists, the value is stored in a system
|
||||
* property. */
|
||||
private def setProperty(project: Project, propertyNameAndValue: String): Boolean =
|
||||
{
|
||||
val m = """(\S+)(\s+\S.*)?""".r.pattern.matcher(propertyNameAndValue)
|
||||
if(m.matches())
|
||||
{
|
||||
val name = m.group(1)
|
||||
val newValue =
|
||||
{
|
||||
val v = m.group(2)
|
||||
if(v == null) "" else v.trim
|
||||
}
|
||||
def notePending(changed: String): Unit = Console.println(" Build will use " + changed + newValue + " after running 'reload' command or restarting sbt.")
|
||||
project.getPropertyNamed(name) match
|
||||
{
|
||||
case Some(property) =>
|
||||
{
|
||||
try
|
||||
{
|
||||
property.setStringValue(newValue)
|
||||
property match
|
||||
{
|
||||
case project.defScalaVersion | project.buildScalaVersions => notePending("Scala ")
|
||||
case project.sbtVersion => notePending("sbt ")
|
||||
case _ => Console.println(" Set property '" + name + "' = '" + newValue + "'")
|
||||
}
|
||||
true
|
||||
}
|
||||
catch { case e =>
|
||||
project.log.error("Error setting property '" + name + "' in " + project.environmentLabel + ": " + e.toString)
|
||||
false
|
||||
}
|
||||
finally { project.saveEnvironment().foreach(msg => project.log.error("Error saving environment: " + msg)) }
|
||||
}
|
||||
case None =>
|
||||
{
|
||||
System.setProperty(name, newValue)
|
||||
project.log.info(" Set system property '" + name + "' = '" + newValue + "'")
|
||||
true
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
setArgumentError(project.log)
|
||||
}
|
||||
|
||||
private def compileContinuously(project: Project) = executeContinuously(project, "test-compile")
|
||||
private def executeContinuously(project: Project, action: String) =
|
||||
{
|
||||
def shouldTerminate: Boolean = (System.in.available > 0) && (project.terminateWatch(System.in.read()) || shouldTerminate)
|
||||
val actionValid = checkAction(project, action)
|
||||
if(actionValid)
|
||||
{
|
||||
SourceModificationWatch.watchUntil(project, ContinuousCompilePollDelaySeconds)(shouldTerminate)
|
||||
{
|
||||
handleAction(project, action)
|
||||
Console.println("Waiting for source changes... (press enter to interrupt)")
|
||||
}
|
||||
while (System.in.available() > 0) System.in.read()
|
||||
}
|
||||
actionValid
|
||||
}
|
||||
|
||||
def validTraceArguments = "'on', 'nosbt', 'off', or <integer>"
|
||||
def traceExplanation = "'nosbt' prints stack traces up to the first sbt frame. An integer gives the number of frames to show per exception."
|
||||
private def isTerminateAction(s: String) = TerminateActions.elements.contains(s.toLowerCase)
|
||||
private def setTraceError(log: Logger) = logError(log)("Invalid arguments for 'trace': expected " + validTraceArguments + ".")
|
||||
private def setArgumentError(log: Logger) = logError(log)("Invalid arguments for 'set': expected property name and new value.")
|
||||
private def getArgumentError(log: Logger) = logError(log)("Invalid arguments for 'get': expected property name.")
|
||||
private def setProjectError(log: Logger) = logError(log)("Invalid arguments for 'project': expected project name.")
|
||||
private def logError(log: Logger)(s: String) = { log.error(s); false }
|
||||
|
||||
private def logCommandError(log: Logger, e: Throwable) =
|
||||
e match
|
||||
{
|
||||
case pe: processor.ProcessorException =>
|
||||
if(pe.getCause ne null) log.trace(pe.getCause)
|
||||
log.error(e.getMessage)
|
||||
case e =>
|
||||
log.trace(e)
|
||||
log.error(e.toString)
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,31 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2009, 2010 Mark Harrah
|
||||
*/
|
||||
package sbt
|
||||
|
||||
import java.io.File
|
||||
|
||||
private case class Exit(code: Int) extends xsbti.Exit
|
||||
{
|
||||
require(code >= 0)
|
||||
}
|
||||
private class Reboot(val scalaVersion: String, argsList: List[String], val app: xsbti.ApplicationID, val baseDirectory: File) extends xsbti.Reboot
|
||||
{
|
||||
def arguments = argsList.toArray
|
||||
}
|
||||
private class ApplicationID(delegate: xsbti.ApplicationID, newVersion: String) extends xsbti.ApplicationID
|
||||
{
|
||||
def groupID = delegate.groupID
|
||||
def name = delegate.name
|
||||
def version = newVersion
|
||||
|
||||
def mainClass = delegate.mainClass
|
||||
def mainComponents = delegate.mainComponents
|
||||
def crossVersioned = delegate.crossVersioned
|
||||
|
||||
def classpathExtra = delegate.classpathExtra
|
||||
}
|
||||
private final class ReloadException(val remainingArguments: List[String], val buildScalaVersion: Option[String]) extends RuntimeException
|
||||
{
|
||||
override def fillInStackTrace = this
|
||||
}
|
||||
|
|
@ -0,0 +1,14 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2008 Mark Harrah
|
||||
*/
|
||||
package sbt
|
||||
|
||||
object ModuleUtilities
|
||||
{
|
||||
def getObject(className: String, loader: ClassLoader) =
|
||||
{
|
||||
val obj = Class.forName(className + "$", true, loader)
|
||||
val singletonField = obj.getField("MODULE$")
|
||||
singletonField.get(null)
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,72 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2008, 2009 Mark Harrah
|
||||
*/
|
||||
package sbt
|
||||
|
||||
import java.io.File
|
||||
import java.util.regex.Pattern
|
||||
|
||||
trait FileFilter extends java.io.FileFilter with NotNull
|
||||
{
|
||||
def || (filter: FileFilter): FileFilter = new SimpleFileFilter( file => accept(file) || filter.accept(file) )
|
||||
def && (filter: FileFilter): FileFilter = new SimpleFileFilter( file => accept(file) && filter.accept(file) )
|
||||
def -- (filter: FileFilter): FileFilter = new SimpleFileFilter( file => accept(file) && !filter.accept(file) )
|
||||
def unary_- : FileFilter = new SimpleFileFilter( file => !accept(file) )
|
||||
}
|
||||
trait NameFilter extends FileFilter with NotNull
|
||||
{
|
||||
def accept(name: String): Boolean
|
||||
final def accept(file: File): Boolean = accept(file.getName)
|
||||
def | (filter: NameFilter): NameFilter = new SimpleFilter( name => accept(name) || filter.accept(name) )
|
||||
def & (filter: NameFilter): NameFilter = new SimpleFilter( name => accept(name) && filter.accept(name) )
|
||||
def - (filter: NameFilter): NameFilter = new SimpleFilter( name => accept(name) && !filter.accept(name) )
|
||||
override def unary_- : NameFilter = new SimpleFilter( name => !accept(name) )
|
||||
}
|
||||
object HiddenFileFilter extends FileFilter {
|
||||
def accept(file: File) = file.isHidden && file.getName != "."
|
||||
}
|
||||
object ExistsFileFilter extends FileFilter {
|
||||
def accept(file: File) = file.exists
|
||||
}
|
||||
object DirectoryFilter extends FileFilter {
|
||||
def accept(file: File) = file.isDirectory
|
||||
}
|
||||
class SimpleFileFilter(val acceptFunction: File => Boolean) extends FileFilter
|
||||
{
|
||||
def accept(file: File) = acceptFunction(file)
|
||||
}
|
||||
class ExactFilter(val matchName: String) extends NameFilter
|
||||
{
|
||||
def accept(name: String) = matchName == name
|
||||
}
|
||||
class SimpleFilter(val acceptFunction: String => Boolean) extends NameFilter
|
||||
{
|
||||
def accept(name: String) = acceptFunction(name)
|
||||
}
|
||||
class PatternFilter(val pattern: Pattern) extends NameFilter
|
||||
{
|
||||
def accept(name: String) = pattern.matcher(name).matches
|
||||
}
|
||||
object AllPassFilter extends NameFilter
|
||||
{
|
||||
def accept(name: String) = true
|
||||
}
|
||||
object NothingFilter extends NameFilter
|
||||
{
|
||||
def accept(name: String) = false
|
||||
}
|
||||
|
||||
object GlobFilter
|
||||
{
|
||||
def apply(expression: String): NameFilter =
|
||||
{
|
||||
require(!expression.exists(java.lang.Character.isISOControl), "Control characters not allowed in filter expression.")
|
||||
if(expression == "*")
|
||||
AllPassFilter
|
||||
else if(expression.indexOf('*') < 0) // includes case where expression is empty
|
||||
new ExactFilter(expression)
|
||||
else
|
||||
new PatternFilter(Pattern.compile(expression.split("\\*", -1).map(quote).mkString(".*")))
|
||||
}
|
||||
private def quote(s: String) = if(s.isEmpty) "" else Pattern.quote(s.replaceAll("\n", """\n"""))
|
||||
}
|
||||
|
|
@ -0,0 +1,87 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2009 Mark Harrah
|
||||
*/
|
||||
package sbt
|
||||
|
||||
import java.io.{File, FileOutputStream}
|
||||
import java.util.jar.{JarEntry, JarFile, JarOutputStream, Pack200}
|
||||
import scala.collection.Map
|
||||
import FileUtilities._
|
||||
|
||||
object Pack
|
||||
{
|
||||
def pack(jarPath: Path, out: Path, log: Logger): Option[String] = pack(jarPath, out, defaultPackerOptions, log)
|
||||
def pack(jarPath: Path, out: Path, options: Map[String, String], log: Logger): Option[String] =
|
||||
{
|
||||
val packer = Pack200.newPacker
|
||||
val properties = new wrap.MutableMapWrapper(packer.properties)
|
||||
properties ++= options
|
||||
|
||||
OpenResource.jarFile(false).ioOption(jarPath.asFile, "applying pack200 compression to jar", log) { f =>
|
||||
writeStream(out.asFile, log) { stream =>
|
||||
packer.pack(f, stream)
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
def unpack(packedPath: Path, toJarPath: Path, log: Logger): Option[String] =
|
||||
{
|
||||
val unpacker = Pack200.newUnpacker
|
||||
writeStream(toJarPath.asFile, log) { fileStream =>
|
||||
val jarOut = new JarOutputStream(fileStream)
|
||||
Control.trapUnitAndFinally("Error unpacking '" + packedPath + "': ", log)
|
||||
{ unpacker.unpack(packedPath.asFile, jarOut); None }
|
||||
{ jarOut.close() }
|
||||
}
|
||||
}
|
||||
def defaultPackerOptions: Map[String, String] = scala.collection.immutable.Map()
|
||||
}
|
||||
|
||||
import java.net.URL
|
||||
/** This is somewhat of a mess and is not entirely correct. jarsigner doesn't work properly
|
||||
* on scalaz and it is difficult to determine whether a jar is both signed and valid. */
|
||||
object SignJar
|
||||
{
|
||||
final class SignOption private[SignJar](val toList: List[String], val signOnly: Boolean) extends NotNull
|
||||
{
|
||||
override def toString = toList.mkString(" ")
|
||||
}
|
||||
def keyStore(url: URL) = new SignOption("-keystore" :: url.toExternalForm :: Nil, true)
|
||||
def signedJar(p: Path) = new SignOption("-signedjar" :: p.asFile.getAbsolutePath :: Nil, true)
|
||||
def verbose = new SignOption("-verbose" :: Nil, false)
|
||||
def sigFile(name: String) = new SignOption("-sigfile" :: name :: Nil, true)
|
||||
def storeType(t: String) = new SignOption("-storetype" :: t :: Nil, false)
|
||||
def provider(p: String) = new SignOption("-provider" :: p :: Nil, false)
|
||||
def providerName(p: String) = new SignOption("-providerName" :: p :: Nil, false)
|
||||
def storePassword(p: String) = new SignOption("-storepass" :: p :: Nil, true)
|
||||
def keyPassword(p: String) = new SignOption("-keypass" :: p :: Nil, true)
|
||||
|
||||
private def VerifyOption = "-verify"
|
||||
|
||||
/** Uses jarsigner to sign the given jar. */
|
||||
def sign(jarPath: Path, alias: String, options: Seq[SignOption], log: Logger): Option[String] =
|
||||
{
|
||||
require(!alias.trim.isEmpty, "Alias cannot be empty")
|
||||
val arguments = options.toList.flatMap(_.toList) ::: jarPath.asFile.getAbsolutePath :: alias :: Nil
|
||||
execute("Signed " + jarPath, "signing", arguments, log)
|
||||
}
|
||||
/** Uses jarsigner to verify the given jar.*/
|
||||
def verify(jarPath: Path, options: Seq[SignOption], log: Logger): Option[String] =
|
||||
{
|
||||
val arguments = options.filter(!_.signOnly).toList.flatMap(_.toList) ::: VerifyOption :: jarPath.asFile.getAbsolutePath :: Nil
|
||||
execute("Verified " + jarPath, "verifying", arguments, log)
|
||||
}
|
||||
private def execute(successMessage: String, action: String, arguments: List[String], log: Logger): Option[String] =
|
||||
{
|
||||
val exitCode = Process(CommandName, arguments) ! log
|
||||
if(exitCode == 0)
|
||||
{
|
||||
log.debug(successMessage)
|
||||
None
|
||||
}
|
||||
else
|
||||
Some("Error " + action + " jar (exit code was " + exitCode + ".)")
|
||||
}
|
||||
|
||||
private val CommandName = "jarsigner"
|
||||
}
|
||||
|
|
@ -0,0 +1,494 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2009 Mark Harrah
|
||||
*/
|
||||
package sbt
|
||||
|
||||
/** This file provides the parallel execution engine of sbt. It is a fairly general module, with pluggable Schedulers and Strategies.
|
||||
*
|
||||
* There are three main componenets to the engine: Distributors, Schedulers, and Strategies.
|
||||
*
|
||||
* A Scheduler provides work that is ready to execute. The main type of Scheduler in sbt is a scheduler
|
||||
* of nodes in a directed, acyclic graph.. This type of scheduler provides work when its
|
||||
* dependencies have finished executing successfully. Another type of scheduler is a MultiScheduler, which draws work
|
||||
* from sub-schedulers.
|
||||
*
|
||||
* A Strategy is used by a Scheduler to select the work to process from the work that is ready. It is notified as work
|
||||
* becomes ready. It is requested to select work to process from the work that is ready. The main Strategy in sbt is the
|
||||
* OrderedStrategy, which prioritizes work according to some ordering defined by its constructor. The primary ordering
|
||||
* used in sbt is based on the longest length of the processing path that includes the node being ordered.
|
||||
*
|
||||
* A Distributor uses a Scheduler to obtain work according up to the maximum work allowed to run at once. It runs each
|
||||
* unit of work in its own Thread.
|
||||
**/
|
||||
|
||||
import java.util.concurrent.LinkedBlockingQueue
|
||||
import scala.collection.{immutable, mutable}
|
||||
import immutable.TreeSet
|
||||
|
||||
/** Interface to the Distributor/Scheduler system for running tasks with dependencies described by a directed acyclic graph.*/
|
||||
object ParallelRunner
|
||||
{
|
||||
/** Executes work for nodes in an acyclic directed graph with root node `node`. The name of a node is provided
|
||||
* by the `name` function, the work to perform for a node by `action`, and the logger to use for a node by `log`.
|
||||
* The maximum number of tasks to execute simultaneously is `maximumTasks`. */
|
||||
def run[D <: Dag[D]](node: D, name: D => String, action: D => Option[String], maximumTasks: Int, log: D => Logger): List[WorkFailure[D]] =
|
||||
{
|
||||
val info = DagInfo(node)
|
||||
// Create a strategy that gives each node a uniform self cost and uses the maximum cost to execute it and the nodes that depend on it
|
||||
// to determine which node to run. The self cost could be modified to include more information about a node, such as the size of input files
|
||||
val strategy = defaultStrategy(info)
|
||||
val jobScheduler = CompoundScheduler(new DagScheduler(info, strategy), strategy)
|
||||
val distributor = new Distributor(jobScheduler, action, maximumTasks, log)
|
||||
val result = distributor.run().toList
|
||||
for( WorkFailure(work, message) <- result ) yield WorkFailure(work, "Error running " + name(work) + ": " + message)
|
||||
}
|
||||
def dagScheduler[D <: Dag[D]](node: D) =
|
||||
{
|
||||
val info = DagInfo(node)
|
||||
new DagScheduler(info, defaultStrategy(info))
|
||||
}
|
||||
private def defaultStrategy[D <: Dag[D]](info: DagInfo[D]) = MaxPathStrategy((d: D) => 1, info)
|
||||
def emptyScheduler[D]: Scheduler[D] =
|
||||
new Scheduler[D]
|
||||
{
|
||||
/** Starts a new run. The returned object is a new Run, representing a single scheduler run. All state for the run
|
||||
* is encapsulated in this object.*/
|
||||
def run: Run = new Run
|
||||
{
|
||||
def complete(d: D, result: Option[String]) {}
|
||||
def hasPending = false
|
||||
/**Returns true if this scheduler has no more work to be done, ever.*/
|
||||
def isComplete = true
|
||||
def next(max: Int) = Nil
|
||||
def failures = Nil
|
||||
}
|
||||
}
|
||||
}
|
||||
/** Requests work from `scheduler` and processes it using `doWork`. This class limits the amount of work processing at any given time
|
||||
* to `workers`.*/
|
||||
final class Distributor[D](scheduler: Scheduler[D], doWork: D => Option[String], workers: Int, log: D => Logger) extends NotNull
|
||||
{
|
||||
require(workers > 0)
|
||||
final def run(): Iterable[WorkFailure[D]] = (new Run).run()
|
||||
|
||||
private final class Run extends NotNull
|
||||
{
|
||||
private[this] val schedule = scheduler.run
|
||||
/** The number of threads currently running. */
|
||||
private[this] var running = 0
|
||||
/** Pending notifications of completed work. */
|
||||
private[this] val complete = new java.util.concurrent.LinkedBlockingQueue[Done]
|
||||
|
||||
private[Distributor] def run(): Iterable[WorkFailure[D]] =
|
||||
{
|
||||
next()
|
||||
if(isIdle && !schedule.hasPending) // test if all work is complete
|
||||
schedule.failures
|
||||
else
|
||||
{
|
||||
waitForCompletedWork() // wait for some work to complete
|
||||
run() // continue
|
||||
}
|
||||
}
|
||||
// true if the maximum number of worker threads are currently running
|
||||
private def atMaximum = running == workers
|
||||
private def availableWorkers = workers - running
|
||||
// true if no worker threads are currently running
|
||||
private def isIdle = running == 0
|
||||
// process more work
|
||||
private def next()
|
||||
{
|
||||
// if the maximum threads are being used, do nothing
|
||||
// if all work is complete or the scheduler is waiting for current work to complete, do nothing
|
||||
if(!atMaximum && schedule.hasPending)
|
||||
{
|
||||
val nextWork = schedule.next(availableWorkers)
|
||||
val nextSize = nextWork.size
|
||||
assume(nextSize <= availableWorkers, "Scheduler provided more work (" + nextSize + ") than allowed (" + availableWorkers + ")")
|
||||
assume(nextSize > 0 || !isIdle, "Distributor idle and the scheduler indicated work pending, but provided no work.")
|
||||
nextWork.foreach(process)
|
||||
}
|
||||
}
|
||||
// wait on the blocking queue `complete` until some work finishes and notify the scheduler
|
||||
private def waitForCompletedWork()
|
||||
{
|
||||
require(running > 0)
|
||||
val done = complete.take()
|
||||
running -= 1
|
||||
schedule.complete(done.data, done.result)
|
||||
}
|
||||
private def process(data: D)
|
||||
{
|
||||
require(running + 1 <= workers)
|
||||
running += 1
|
||||
new Worker(data).start()
|
||||
}
|
||||
private class Worker(data: D) extends Thread with NotNull
|
||||
{
|
||||
override def interrupt() {}
|
||||
override def run()
|
||||
{
|
||||
val result = Control.trapUnit("", log(data))(doWork(data))
|
||||
complete.put( new Done(result, data) )
|
||||
}
|
||||
}
|
||||
}
|
||||
private final class Done(val result: Option[String], val data: D) extends NotNull
|
||||
}
|
||||
final case class WorkFailure[D](work: D, message: String) extends NotNull
|
||||
{
|
||||
override def toString = message
|
||||
}
|
||||
/** Schedules work of type D. A Scheduler determines what work is ready to be processed.
|
||||
* A Scheduler is itself immutable. It creates a mutable object for each scheduler run.*/
|
||||
trait Scheduler[D] extends NotNull
|
||||
{
|
||||
/** Starts a new run. The returned object is a new Run, representing a single scheduler run. All state for the run
|
||||
* is encapsulated in this object.*/
|
||||
def run: Run
|
||||
trait Run extends NotNull
|
||||
{
|
||||
/** Notifies this scheduler that work has completed with the given result (Some with the error message or None if the work succeeded).*/
|
||||
def complete(d: D, result: Option[String]): Unit
|
||||
/** Returns true if there is any more work to be done, although remaining work can be blocked
|
||||
* waiting for currently running work to complete.*/
|
||||
def hasPending: Boolean
|
||||
/**Returns true if this scheduler has no more work to be done, ever.*/
|
||||
def isComplete: Boolean
|
||||
/** Returns up to 'max' units of work. `max` is always positive. The returned sequence cannot be empty if there is
|
||||
* no work currently being processed.*/
|
||||
def next(max: Int): Seq[D]
|
||||
/** A list of failures that occurred to this point, as reported to the `complete` method. */
|
||||
def failures: Iterable[WorkFailure[D]]
|
||||
}
|
||||
}
|
||||
/** A Strategy selects the work to process from work that is ready to be processed.*/
|
||||
private trait ScheduleStrategy[D] extends NotNull
|
||||
{
|
||||
/** Starts a new run. The returned object is a new Run, representing a single strategy run. All state for the run
|
||||
* is handled through this object and is encapsulated in this object.*/
|
||||
def run: Run
|
||||
trait Run extends NotNull
|
||||
{
|
||||
/** Adds the given work to the list of work that is ready to run.*/
|
||||
def workReady(dep: D): Unit
|
||||
/** Returns true if there is work ready to be run. */
|
||||
def hasReady: Boolean
|
||||
/** Provides up to `max` units of work. `max` is always positive and this method is not called
|
||||
* if hasReady is false. The returned list cannot be empty is there is work ready to be run.*/
|
||||
def next(max: Int): List[D]
|
||||
/** If this strategy returns different work from `next` than is provided to `workReady`,
|
||||
* this method must map back to the original work.*/
|
||||
def reverseMap(dep: D): Iterable[D]
|
||||
}
|
||||
}
|
||||
|
||||
/** A scheduler for nodes of a directed-acyclic graph. It requires the root of the graph
|
||||
* and a strategy to select which available nodes to run on limited resources.*/
|
||||
private[sbt] final class DagScheduler[D <: Dag[D]](info: DagInfo[D], strategy: ScheduleStrategy[D]) extends Scheduler[D]
|
||||
{
|
||||
def run: Run = new Run
|
||||
{
|
||||
val infoRun = info.run
|
||||
val strategyRun = strategy.run
|
||||
|
||||
// find nodes that are ready to be run (no dependencies)
|
||||
{
|
||||
val startReady = for( (key, value) <- infoRun.remainingDepsRun if(value.isEmpty)) yield key
|
||||
infoRun.remainingDepsRun --= startReady
|
||||
startReady.foreach(strategyRun.workReady)
|
||||
}
|
||||
|
||||
val failures = new mutable.ListBuffer[WorkFailure[D]]
|
||||
def next(max: Int) = strategyRun.next(max)
|
||||
def complete(work: D, result: Option[String])
|
||||
{
|
||||
for(originalWork <- strategyRun.reverseMap(work))
|
||||
{
|
||||
result match
|
||||
{
|
||||
case None => infoRun.complete(originalWork, strategyRun.workReady)
|
||||
case Some(errorMessage) =>
|
||||
infoRun.clear(originalWork)
|
||||
failures += WorkFailure(originalWork, errorMessage)
|
||||
}
|
||||
}
|
||||
}
|
||||
def isComplete = !strategyRun.hasReady && infoRun.reverseDepsRun.isEmpty
|
||||
// the strategy might not have any work ready if the remaining work needs currently executing work to finish first
|
||||
def hasPending = strategyRun.hasReady || !infoRun.remainingDepsRun.isEmpty
|
||||
}
|
||||
}
|
||||
private object MaxPathStrategy
|
||||
{
|
||||
def apply[D <: Dag[D]](selfCost: D => Int, info: DagInfo[D]): ScheduleStrategy[D] =
|
||||
{
|
||||
val cost = // compute the cost of the longest execution path ending at each node
|
||||
{
|
||||
val cost = new mutable.HashMap[D, Int]
|
||||
def computeCost(work: D): Int = info.reverseDeps.getOrElse(work, immutable.Set.empty[D]).foldLeft(0)(_ max getCost(_)) + selfCost(work)
|
||||
def getCost(work: D): Int = cost.getOrElseUpdate(work, computeCost(work))
|
||||
info.remainingDeps.keys.foreach(getCost)
|
||||
wrap.Wrappers.readOnly(cost)
|
||||
}
|
||||
// create a function to compare units of work. This is not as simple as cost(a) compare cost(b) because it cannot return 0 for
|
||||
// unequal nodes (at least for the Ordered comparison)
|
||||
|
||||
// 2.8.0 uses Ordering
|
||||
implicit val compareOrdering: Ordering[D] =
|
||||
new Ordering[D]
|
||||
{
|
||||
def compare(a: D, b: D) =
|
||||
{
|
||||
val base = cost(a) compare cost(b)
|
||||
if(base == 0)
|
||||
a.hashCode compare b.hashCode // this is required because TreeSet interprets 0 as equal
|
||||
else
|
||||
base
|
||||
}
|
||||
}
|
||||
// 2.7.x uses an implicit view to Ordered
|
||||
implicit val compare =
|
||||
(a: D) => new Ordered[D] {
|
||||
def compare(b: D) = compareOrdering.compare(a, b)
|
||||
}
|
||||
new OrderedStrategy(new TreeSet())
|
||||
}
|
||||
}
|
||||
/** A strategy that adds work to a tree and selects the last key as the next work to be done. */
|
||||
private class OrderedStrategy[D](ready: TreeSet[D]) extends ScheduleStrategy[D]
|
||||
{
|
||||
def run = new Run
|
||||
{
|
||||
private[this] var readyRun = ready
|
||||
def next(max: Int): List[D] = nextImpl(max, Nil)
|
||||
private[this] def nextImpl(remaining: Int, accumulated: List[D]): List[D] =
|
||||
{
|
||||
if(remaining <= 0 || readyRun.isEmpty)
|
||||
accumulated
|
||||
else
|
||||
{
|
||||
val next = readyRun.lastKey
|
||||
readyRun -= next
|
||||
nextImpl(remaining - 1, next :: accumulated)
|
||||
}
|
||||
}
|
||||
def workReady(dep: D) { readyRun += dep }
|
||||
def hasReady = !readyRun.isEmpty
|
||||
def reverseMap(dep: D) = dep :: Nil
|
||||
}
|
||||
}
|
||||
/** A class that represents state for a DagScheduler and that MaxPathStrategy uses to initialize an OrderedStrategy. */
|
||||
private final class DagInfo[D <: Dag[D]](val remainingDeps: immutable.Map[D, immutable.Set[D]],
|
||||
val reverseDeps: immutable.Map[D, immutable.Set[D]]) extends NotNull
|
||||
{
|
||||
def run = new Run
|
||||
final class Run extends NotNull
|
||||
{
|
||||
val remainingDepsRun = DagInfo.mutableMap(remainingDeps)
|
||||
val reverseDepsRun = DagInfo.mutableMap(reverseDeps)
|
||||
/** Called when work does not complete successfully and so all work that (transitively) depends on the work
|
||||
* must be removed from the maps. */
|
||||
def clear(work: D)
|
||||
{
|
||||
remainingDepsRun -= work
|
||||
foreachReverseDep(work)(clear)
|
||||
}
|
||||
/** Called when work completes properly. `initial` and `ready` are used for a fold over
|
||||
* the work that is now ready to go (becaues it was only waiting for `work` to complete).*/
|
||||
def complete(work: D, ready: D => Unit)
|
||||
{
|
||||
def completed(dependsOnCompleted: D)
|
||||
{
|
||||
for(remainingDependencies <- remainingDepsRun.get(dependsOnCompleted))
|
||||
{
|
||||
remainingDependencies -= work
|
||||
if(remainingDependencies.isEmpty)
|
||||
{
|
||||
remainingDepsRun -= dependsOnCompleted
|
||||
ready(dependsOnCompleted)
|
||||
}
|
||||
}
|
||||
}
|
||||
foreachReverseDep(work)(completed)
|
||||
}
|
||||
private def foreachReverseDep(work: D)(f: D => Unit) { reverseDepsRun.removeKey(work).foreach(_.foreach(f)) }
|
||||
}
|
||||
}
|
||||
/** Constructs forward and reverse dependency map for the given Dag root node. */
|
||||
private object DagInfo
|
||||
{
|
||||
/** Constructs the reverse dependency map from the given Dag and
|
||||
* puts the forward dependencies into a map */
|
||||
def apply[D <: Dag[D]](root: D): DagInfo[D] =
|
||||
{
|
||||
val remainingDeps = new mutable.HashMap[D, immutable.Set[D]]
|
||||
val reverseDeps = new mutable.HashMap[D, mutable.Set[D]]
|
||||
def visitIfUnvisited(node: D): Unit = remainingDeps.getOrElseUpdate(node, processDependencies(node))
|
||||
def processDependencies(node: D): Set[D] =
|
||||
{
|
||||
val workDependencies = node.dependencies
|
||||
workDependencies.foreach(visitIfUnvisited)
|
||||
for(dep <- workDependencies)
|
||||
reverseDeps.getOrElseUpdate(dep, new mutable.HashSet[D]) += node
|
||||
immutable.HashSet(workDependencies.toSeq: _*)
|
||||
}
|
||||
visitIfUnvisited(root)
|
||||
new DagInfo(immutable.HashMap(remainingDeps.toSeq : _*), immute(reverseDeps) )
|
||||
}
|
||||
/** Convert a mutable Map with mutable Sets for values to an immutable Map with immutable Sets for values. */
|
||||
private def immute[D](map: mutable.Map[D, mutable.Set[D]]): immutable.Map[D, immutable.Set[D]] =
|
||||
{
|
||||
val immutedSets = map.map { case (key, value) =>(key, immutable.HashSet(value.toSeq : _*)) }
|
||||
immutable.HashMap(immutedSets.toSeq :_*)
|
||||
}
|
||||
/** Convert an immutable Map with immutable Sets for values to a mutable Map with mutable Sets for values. */
|
||||
private def mutableMap[D](map: immutable.Map[D, immutable.Set[D]]): mutable.Map[D, mutable.Set[D]] =
|
||||
{
|
||||
val mutableSets = map.map { case (key, value) =>(key, mutable.HashSet(value.toSeq : _*)) }
|
||||
mutable.HashMap(mutableSets.toSeq :_*)
|
||||
}
|
||||
}
|
||||
/** A scheduler that can get work from sub-schedulers. The `schedulers` argument to the constructor
|
||||
* is a sequence of the initial schedulers and the key to provide to a client that uses the 'detailedComplete'
|
||||
* method when the scheduler completes its work.*/
|
||||
private final class MultiScheduler[D, T](schedulers: (Scheduler[D], T)*) extends Scheduler[D]
|
||||
{
|
||||
/** Returns a Run instance that represents a scheduler run.*/
|
||||
def run = new MultiRun
|
||||
final class MultiRun extends Run
|
||||
{
|
||||
val owners = new mutable.HashMap[D, Scheduler[D]#Run]
|
||||
val failures = new mutable.ListBuffer[WorkFailure[D]]
|
||||
val schedules = mutable.HashMap[Scheduler[D]#Run, T](schedulers.map { case (scheduler, completeKey) => (scheduler.run, completeKey)} : _*)
|
||||
def +=(schedule: Scheduler[D]#Run, completeKey: T) { schedules(schedule) = completeKey }
|
||||
|
||||
def isComplete = schedules.keys.forall(_.isComplete)
|
||||
def hasPending = schedules.keys.exists(_.hasPending)
|
||||
def next(max: Int) = nextImpl(max, schedules.keys.toList, Nil)
|
||||
|
||||
private def nextImpl(max: Int, remaining: List[Scheduler[D]#Run], accumulatedWork: List[D]): Seq[D] =
|
||||
{
|
||||
if(max == 0 || remaining.isEmpty)
|
||||
accumulatedWork
|
||||
else
|
||||
{
|
||||
val currentSchedule = remaining.head
|
||||
if(currentSchedule.hasPending)
|
||||
{
|
||||
val newWork = currentSchedule.next(max).toList
|
||||
newWork.foreach(work => owners.put(work, currentSchedule))
|
||||
nextImpl(max - newWork.size, remaining.tail, newWork ::: accumulatedWork)
|
||||
}
|
||||
else
|
||||
nextImpl(max, remaining.tail, accumulatedWork)
|
||||
}
|
||||
}
|
||||
|
||||
def complete(work: D, result: Option[String]) { detailedComplete(work, result) }
|
||||
def detailedComplete(work: D, result: Option[String]) =
|
||||
{
|
||||
def complete(forOwner: Scheduler[D]#Run) =
|
||||
{
|
||||
forOwner.complete(work, result)
|
||||
if(forOwner.isComplete)
|
||||
{
|
||||
failures ++= forOwner.failures
|
||||
Some(forOwner, schedules.removeKey(forOwner).get)
|
||||
}
|
||||
else
|
||||
None
|
||||
}
|
||||
owners.removeKey(work).flatMap(complete)
|
||||
}
|
||||
}
|
||||
}
|
||||
/** This scheduler allows a unit of work to provide nested work.
|
||||
*
|
||||
* When a unit of work that implements CompoundWork is returned for processing by `multi`, this scheduler will request the work's
|
||||
* nested scheduler that represents the nested work to be done. The new scheduler will be added to `multi`. When the new scheduler
|
||||
* is finished providing work, a final scheduler is run.*/
|
||||
private final class CompoundScheduler[D](multi: MultiScheduler[D, Option[FinalWork[D]]], finalWorkStrategy: ScheduleStrategy[D]) extends Scheduler[D]
|
||||
{
|
||||
def run: Run = new Run
|
||||
{
|
||||
val multiRun = multi.run
|
||||
val strategyRun = finalWorkStrategy.run
|
||||
|
||||
def isComplete = multiRun.isComplete && !strategyRun.hasReady
|
||||
def hasPending = strategyRun.hasReady || multiRun.hasPending || multiRun.schedules.values.exists(_.isDefined)
|
||||
def complete(work: D, result: Option[String]) =
|
||||
{
|
||||
for( (scheduler, Some(finalWorkTodo)) <- multiRun.detailedComplete(work, result) )
|
||||
{
|
||||
multiRun += (finalWorkTodo.doFinally.run, None)
|
||||
if(scheduler.failures.isEmpty)
|
||||
strategyRun workReady finalWorkTodo.compound
|
||||
else
|
||||
multiRun.complete(finalWorkTodo.compound, Some("One or more subtasks failed"))
|
||||
}
|
||||
}
|
||||
def failures = multiRun.failures
|
||||
def next(max: Int) = nextImpl(max, Nil)
|
||||
private def nextImpl(max: Int, processedNextWork: List[D]): Seq[D] =
|
||||
{
|
||||
if(max > 0)
|
||||
{
|
||||
if(strategyRun.hasReady)
|
||||
{
|
||||
val newWork = strategyRun.next(max)
|
||||
nextImpl(max - newWork.size, newWork ::: processedNextWork)
|
||||
}
|
||||
else if(multiRun.hasPending)
|
||||
{
|
||||
val multiWork = multiRun.next(max)
|
||||
if(multiWork.isEmpty)
|
||||
processedNextWork
|
||||
else
|
||||
{
|
||||
val expandedWork = (processedNextWork /: multiWork)(expand)
|
||||
val remaining = max - (expandedWork.size - processedNextWork.size)
|
||||
nextImpl(remaining, expandedWork)
|
||||
}
|
||||
}
|
||||
else
|
||||
processedNextWork
|
||||
}
|
||||
else
|
||||
processedNextWork
|
||||
}
|
||||
private def expand(accumulate: List[D], work: D): List[D] =
|
||||
{
|
||||
work match
|
||||
{
|
||||
case c: CompoundWork[D] =>
|
||||
val subWork = c.work
|
||||
addFinal(subWork.scheduler, new FinalWork(work, subWork.doFinally))
|
||||
accumulate
|
||||
case _ => work :: accumulate
|
||||
}
|
||||
}
|
||||
private def addFinal(schedule: Scheduler[D], work: FinalWork[D]) { multiRun += (schedule.run, Some(work)) }
|
||||
}
|
||||
}
|
||||
private object CompoundScheduler
|
||||
{
|
||||
def apply[D](scheduler: Scheduler[D], strategy: ScheduleStrategy[D]) : Scheduler[D] =
|
||||
new CompoundScheduler(new MultiScheduler[D, Option[FinalWork[D]]]( (scheduler, None) ), strategy)
|
||||
}
|
||||
private final class FinalWork[D](val compound: D, val doFinally: Scheduler[D]) extends NotNull
|
||||
/** This represents nested work. The work provided by `scheduler` is processed first. The work provided by `doFinally` is processed
|
||||
* after `scheduler` completes regardless of the success of `scheduler`.*/
|
||||
final class SubWork[D] private (val scheduler: Scheduler[D], val doFinally: Scheduler[D]) extends NotNull
|
||||
object SubWork
|
||||
{
|
||||
def apply[D](scheduler: Scheduler[D], doFinally: Scheduler[D]): SubWork[D] = new SubWork(scheduler, doFinally)
|
||||
def apply[D](scheduler: Scheduler[D]): SubWork[D] = SubWork(scheduler, ParallelRunner.emptyScheduler)
|
||||
def apply[D <: Dag[D]](node: D): SubWork[D] = SubWork(ParallelRunner.dagScheduler(node))
|
||||
def apply[D <: Dag[D]](node: D, doFinally: D): SubWork[D] = SubWork(ParallelRunner.dagScheduler(node), ParallelRunner.dagScheduler(doFinally))
|
||||
}
|
||||
/** Work that implements this interface provides nested work to be done before this work is processed.*/
|
||||
trait CompoundWork[D] extends NotNull
|
||||
{
|
||||
def work: SubWork[D]
|
||||
}
|
||||
|
|
@ -0,0 +1,353 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2008, 2009 Mark Harrah
|
||||
*/
|
||||
package sbt
|
||||
|
||||
import Path._
|
||||
import FileUtilities.wrapNull
|
||||
import java.io.File
|
||||
import scala.collection.mutable.{Set, HashSet}
|
||||
|
||||
/** A Path represents a file in a project.
|
||||
* @see sbt.PathFinder*/
|
||||
sealed abstract class Path extends PathFinder with NotNull
|
||||
{
|
||||
/** Creates a base directory for this path. This is used by copy and zip functions
|
||||
* to determine the relative path that should be used in the destination. For example,
|
||||
* if the following path is specified to be copied to directory 'd',
|
||||
*
|
||||
* <code>((a / b) ##) / x / y</code>
|
||||
*
|
||||
* the copied path would be
|
||||
*
|
||||
* <code>d / x / y</code>
|
||||
*
|
||||
* The <code>relativePath</code> method is used to return the relative path to the base directory. */
|
||||
override def ## : Path = new BaseDirectory(this)
|
||||
private[sbt] def addTo(pathSet: Set[Path])
|
||||
{
|
||||
if(asFile.exists)
|
||||
pathSet += this
|
||||
}
|
||||
override def / (component: String): Path = if(component == ".") this else new RelativePath(this, component)
|
||||
/** True if and only if the file represented by this path exists.*/
|
||||
def exists = asFile.exists
|
||||
/** True if and only if the file represented by this path is a directory.*/
|
||||
def isDirectory = asFile.isDirectory
|
||||
/** The last modified time of the file represented by this path.*/
|
||||
def lastModified = asFile.lastModified
|
||||
/* True if and only if file that this path represents exists and the file represented by the path 'p'
|
||||
* does not exist or was modified before the file for this path.*/
|
||||
def newerThan(p: Path): Boolean = exists && (!p.exists || lastModified > p.lastModified)
|
||||
/* True if and only if file that this path represents does not exist or the file represented by the path 'p'
|
||||
* exists and was modified after the file for this path.*/
|
||||
def olderThan(p: Path): Boolean = p newerThan this
|
||||
/** The file represented by this path.*/
|
||||
def asFile: File
|
||||
/** The file represented by this path converted to a <code>URL</code>.*/
|
||||
def asURL = asFile.toURI.toURL
|
||||
/** The string representation of this path relative to the base directory. The project directory is the
|
||||
* default base directory if one is not specified explicitly using the <code>##</code> operator.*/
|
||||
lazy val relativePath: String = relativePathString(sep.toString)
|
||||
def relativePathString(separator: String): String
|
||||
final def projectRelativePath: String = projectRelativePathString(sep.toString)
|
||||
def projectRelativePathString(separator: String): String
|
||||
def absolutePath: String = asFile.getAbsolutePath
|
||||
private[sbt] def prependTo(s: String): String
|
||||
|
||||
/** Equality of Paths is defined in terms of the underlying <code>File</code>.*/
|
||||
override final def equals(other: Any) =
|
||||
other match
|
||||
{
|
||||
case op: Path => asFile == op.asFile
|
||||
case _ => false
|
||||
}
|
||||
/** The hash code of a Path is that of the underlying <code>File</code>.*/
|
||||
override final def hashCode = asFile.hashCode
|
||||
}
|
||||
private final class BaseDirectory(private[sbt] val path: Path) extends Path
|
||||
{
|
||||
override def ## : Path = this
|
||||
override def toString = path.toString
|
||||
def asFile = path.asFile
|
||||
def relativePathString(separator: String) = ""
|
||||
def projectRelativePathString(separator: String) = path.projectRelativePathString(separator)
|
||||
private[sbt] def prependTo(s: String) = "." + sep + s
|
||||
}
|
||||
private[sbt] final class FilePath(file: File) extends Path
|
||||
{
|
||||
lazy val asFile = absolute(file)
|
||||
override def toString = absolutePath
|
||||
def relativePathString(separator: String) = asFile.getName
|
||||
def projectRelativePathString(separator: String) = relativePathString(separator)
|
||||
private[sbt] def prependTo(s: String) = absolutePath + sep + s
|
||||
}
|
||||
// toRoot is the path between this and the root project path and is used for toString
|
||||
private[sbt] final class ProjectDirectory(file: File, toRoot: Option[Path]) extends Path
|
||||
{
|
||||
def this(file: File) = this(file, None)
|
||||
lazy val asFile = absolute(file)
|
||||
override def toString = foldToRoot(_.toString, ".")
|
||||
def relativePathString(separator: String) = ""
|
||||
def projectRelativePathString(separator: String) = ""
|
||||
private[sbt] def prependTo(s: String) = foldToRoot(_.prependTo(s), "." + sep + s)
|
||||
private[sbt] def foldToRoot[T](f: Path => T, orElse: T) = toRoot.map(f).getOrElse(orElse)
|
||||
}
|
||||
private[sbt] final class RelativePath(val parentPath: Path, val component: String) extends Path
|
||||
{
|
||||
checkComponent(component)
|
||||
override def toString = parentPath prependTo component
|
||||
lazy val asFile = new File(parentPath.asFile, component)
|
||||
private[sbt] def prependTo(s: String) = parentPath prependTo (component + sep + s)
|
||||
def relativePathString(separator: String) = relative(parentPath.relativePathString(separator), separator)
|
||||
def projectRelativePathString(separator: String) = relative(parentPath.projectRelativePathString(separator), separator)
|
||||
private def relative(parentRelative: String, separator: String) =
|
||||
{
|
||||
if(parentRelative.isEmpty)
|
||||
component
|
||||
else
|
||||
parentRelative + separator + component
|
||||
}
|
||||
}
|
||||
object Path
|
||||
{
|
||||
import java.io.File
|
||||
import File.pathSeparator
|
||||
|
||||
def fileProperty(name: String) = Path.fromFile(System.getProperty(name))
|
||||
def userHome = fileProperty("user.home")
|
||||
|
||||
def absolute(file: File) = new File(file.toURI.normalize).getAbsoluteFile
|
||||
/** Constructs a String representation of <code>Path</code>s. The absolute path String of each <code>Path</code> is
|
||||
* separated by the platform's path separator.*/
|
||||
def makeString(paths: Iterable[Path]): String = makeString(paths, pathSeparator)
|
||||
/** Constructs a String representation of <code>Path</code>s. The absolute path String of each <code>Path</code> is
|
||||
* separated by the given separator String.*/
|
||||
def makeString(paths: Iterable[Path], sep: String): String = paths.map(_.absolutePath).mkString(sep)
|
||||
|
||||
/** Constructs a String representation of <code>Path</code>s. The relative path String of each <code>Path</code> is
|
||||
* separated by the platform's path separator.*/
|
||||
def makeRelativeString(paths: Iterable[Path]): String = paths.map(_.relativePathString(sep.toString)).mkString(pathSeparator)
|
||||
|
||||
def splitString(projectPath: Path, value: String): Iterable[Path] =
|
||||
{
|
||||
for(pathString <- FileUtilities.pathSplit(value) if pathString.length > 0) yield
|
||||
Path.fromString(projectPath, pathString)
|
||||
}
|
||||
|
||||
/** A <code>PathFinder</code> that always produces the empty set of <code>Path</code>s.*/
|
||||
def emptyPathFinder =
|
||||
new PathFinder
|
||||
{
|
||||
private[sbt] def addTo(pathSet: Set[Path]) {}
|
||||
}
|
||||
/** A <code>PathFinder</code> that selects the paths provided by the <code>paths</code> argument, which is
|
||||
* reevaluated on each call to the <code>PathFinder</code>'s <code>get</code> method. */
|
||||
def lazyPathFinder(paths: => Iterable[Path]): PathFinder =
|
||||
new PathFinder
|
||||
{
|
||||
private[sbt] def addTo(pathSet: Set[Path]) = pathSet ++= paths
|
||||
}
|
||||
def finder(files: => Iterable[File]): PathFinder = lazyPathFinder { fromFiles(files) }
|
||||
|
||||
/** The separator character of the platform.*/
|
||||
val sep = java.io.File.separatorChar
|
||||
|
||||
/** Checks the string to verify that it is a legal path component. The string must be non-empty,
|
||||
* not a slash, and not '.' or '..'.*/
|
||||
def checkComponent(c: String): String =
|
||||
{
|
||||
require(c.length > 0, "Path component must not be empty")
|
||||
require(c.indexOf('/') == -1, "Path component '" + c + "' must not have forward slashes in it")
|
||||
require(c.indexOf('\\') == -1, "Path component '" + c + "' must not have backslashes in it")
|
||||
require(c != "..", "Path component cannot be '..'")
|
||||
require(c != ".", "Path component cannot be '.'")
|
||||
c
|
||||
}
|
||||
/** Converts a path string relative to the given base path to a <code>Path</code>. */
|
||||
def fromString(basePath: Path, value: String): Path =
|
||||
{
|
||||
if(value.isEmpty)
|
||||
basePath
|
||||
else
|
||||
{
|
||||
val components = value.split("""[/\\]""")
|
||||
(basePath /: components)( (path, component) => path / component )
|
||||
}
|
||||
}
|
||||
def baseAncestor(path: Path): Option[Path] =
|
||||
path match
|
||||
{
|
||||
case pd: ProjectDirectory => None
|
||||
case fp: FilePath => None
|
||||
case rp: RelativePath => baseAncestor(rp.parentPath)
|
||||
case b: BaseDirectory => Some(b.path)
|
||||
}
|
||||
|
||||
def relativize(basePath: Path, path: Path): Option[Path] = relativize(basePath, path.asFile)
|
||||
def relativize(basePath: Path, file: File): Option[Path] =
|
||||
basePathString(basePath) flatMap { baseString => relativize(basePath, baseString, file) }
|
||||
def relativize(basePath: Path, basePathString: String, file: File): Option[Path] =
|
||||
{
|
||||
val pathString = file.getAbsolutePath
|
||||
if(pathString.startsWith(basePathString))
|
||||
Some(fromString(basePath, pathString.substring(basePathString.length)))
|
||||
else
|
||||
None
|
||||
}
|
||||
private[sbt] def relativize(baseFile: File, file: File): Option[String] =
|
||||
{
|
||||
val pathString = file.getAbsolutePath
|
||||
baseFileString(baseFile) flatMap
|
||||
{
|
||||
baseString =>
|
||||
{
|
||||
if(pathString.startsWith(baseString))
|
||||
Some(pathString.substring(baseString.length))
|
||||
else
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
private[sbt] def basePathString(basePath: Path): Option[String] = baseFileString(basePath.asFile)
|
||||
private def baseFileString(baseFile: File): Option[String] =
|
||||
{
|
||||
if(baseFile.isDirectory)
|
||||
{
|
||||
val cp = baseFile.getAbsolutePath
|
||||
assert(cp.length > 0)
|
||||
if(cp.charAt(cp.length - 1) == File.separatorChar)
|
||||
Some(cp)
|
||||
else
|
||||
Some(cp + File.separatorChar)
|
||||
}
|
||||
else
|
||||
None
|
||||
}
|
||||
def fromFile(file: String): Path = fromFile(new File(file))
|
||||
def fromFile(file: File): Path = new FilePath(file)
|
||||
def fromFiles(files: Iterable[File]): Iterable[Path] = files.map(fromFile)
|
||||
}
|
||||
|
||||
/** A path finder constructs a set of paths. The set is evaluated by a call to the <code>get</code>
|
||||
* method. The set will be different for different calls to <code>get</code> if the underlying filesystem
|
||||
* has changed.*/
|
||||
sealed abstract class PathFinder extends NotNull
|
||||
{
|
||||
/** The union of the paths found by this <code>PathFinder</code> with the paths found by 'paths'.*/
|
||||
def +++(paths: PathFinder): PathFinder = new Paths(this, paths)
|
||||
/** Excludes all paths from <code>excludePaths</code> from the paths selected by this <code>PathFinder</code>.*/
|
||||
def ---(excludePaths: PathFinder): PathFinder = new ExcludePaths(this, excludePaths)
|
||||
/** Constructs a new finder that selects all paths with a name that matches <code>filter</code> and are
|
||||
* descendents of paths selected by this finder.*/
|
||||
def **(filter: FileFilter): PathFinder = new DescendentOrSelfPathFinder(this, filter)
|
||||
def *** : PathFinder = **(AllPassFilter)
|
||||
/** Constructs a new finder that selects all paths with a name that matches <code>filter</code> and are
|
||||
* immediate children of paths selected by this finder.*/
|
||||
def *(filter: FileFilter): PathFinder = new ChildPathFinder(this, filter)
|
||||
/** Constructs a new finder that selects all paths with name <code>literal</code> that are immediate children
|
||||
* of paths selected by this finder.*/
|
||||
def / (literal: String): PathFinder = new ChildPathFinder(this, new ExactFilter(literal))
|
||||
/** Constructs a new finder that selects all paths with name <code>literal</code> that are immediate children
|
||||
* of paths selected by this finder.*/
|
||||
final def \ (literal: String): PathFinder = this / literal
|
||||
|
||||
/** Makes the paths selected by this finder into base directories.
|
||||
* @see Path.##
|
||||
*/
|
||||
def ## : PathFinder = new BasePathFinder(this)
|
||||
|
||||
/** Selects all descendent paths with a name that matches <code>include</code> and do not have an intermediate
|
||||
* path with a name that matches <code>intermediateExclude</code>. Typical usage is:
|
||||
*
|
||||
* <code>descendentsExcept("*.jar", ".svn")</code>*/
|
||||
def descendentsExcept(include: FileFilter, intermediateExclude: FileFilter): PathFinder =
|
||||
(this ** include) --- (this ** intermediateExclude ** include)
|
||||
|
||||
/** Evaluates this finder. The set returned by this method will reflect the underlying filesystem at the
|
||||
* time of calling. If the filesystem changes, two calls to this method might be different.*/
|
||||
final def get: scala.collection.Set[Path] =
|
||||
{
|
||||
val pathSet = new HashSet[Path]
|
||||
addTo(pathSet)
|
||||
wrap.Wrappers.readOnly(pathSet)
|
||||
}
|
||||
final def filter(f: Path => Boolean): PathFinder = Path.lazyPathFinder(get.filter(f))
|
||||
final def flatMap(f: Path => PathFinder): PathFinder = Path.lazyPathFinder(get.flatMap(p => f(p).get))
|
||||
final def getFiles: scala.collection.Set[File] = Set( get.map(_.asFile).toSeq : _*)
|
||||
final def getPaths: scala.collection.Set[String] = Set( get.map(_.absolutePath).toSeq : _*)
|
||||
final def getRelativePaths: scala.collection.Set[String] = Set( get.map(_.relativePath).toSeq : _*)
|
||||
private[sbt] def addTo(pathSet: Set[Path])
|
||||
|
||||
final def absString = Path.makeString(get)
|
||||
final def relativeString = Path.makeRelativeString(get)
|
||||
override def toString = getRelativePaths.mkString("\n ", "\n ","")
|
||||
}
|
||||
private class BasePathFinder(base: PathFinder) extends PathFinder
|
||||
{
|
||||
private[sbt] def addTo(pathSet: Set[Path])
|
||||
{
|
||||
for(path <- base.get)
|
||||
pathSet += (path ##)
|
||||
}
|
||||
}
|
||||
private abstract class FilterPath extends PathFinder with FileFilter
|
||||
{
|
||||
def parent: PathFinder
|
||||
def filter: FileFilter
|
||||
final def accept(file: File) = filter.accept(file)
|
||||
|
||||
protected def handlePath(path: Path, pathSet: Set[Path])
|
||||
{
|
||||
for(matchedFile <- wrapNull(path.asFile.listFiles(this)))
|
||||
pathSet += path / matchedFile.getName
|
||||
}
|
||||
}
|
||||
private class DescendentOrSelfPathFinder(val parent: PathFinder, val filter: FileFilter) extends FilterPath
|
||||
{
|
||||
private[sbt] def addTo(pathSet: Set[Path])
|
||||
{
|
||||
for(path <- parent.get)
|
||||
{
|
||||
if(accept(path.asFile))
|
||||
pathSet += path
|
||||
handlePathDescendent(path, pathSet)
|
||||
}
|
||||
}
|
||||
private def handlePathDescendent(path: Path, pathSet: Set[Path])
|
||||
{
|
||||
handlePath(path, pathSet)
|
||||
for(childDirectory <- wrapNull(path.asFile.listFiles(DirectoryFilter)))
|
||||
handlePathDescendent(path / childDirectory.getName, pathSet)
|
||||
}
|
||||
}
|
||||
private class ChildPathFinder(val parent: PathFinder, val filter: FileFilter) extends FilterPath
|
||||
{
|
||||
private[sbt] def addTo(pathSet: Set[Path])
|
||||
{
|
||||
for(path <- parent.get)
|
||||
handlePath(path, pathSet)
|
||||
}
|
||||
}
|
||||
private class Paths(a: PathFinder, b: PathFinder) extends PathFinder
|
||||
{
|
||||
private[sbt] def addTo(pathSet: Set[Path])
|
||||
{
|
||||
a.addTo(pathSet)
|
||||
b.addTo(pathSet)
|
||||
}
|
||||
}
|
||||
private class ExcludePaths(include: PathFinder, exclude: PathFinder) extends PathFinder
|
||||
{
|
||||
private[sbt] def addTo(pathSet: Set[Path])
|
||||
{
|
||||
val includeSet = new HashSet[Path]
|
||||
include.addTo(includeSet)
|
||||
|
||||
val excludeSet = new HashSet[Path]
|
||||
exclude.addTo(excludeSet)
|
||||
|
||||
includeSet --= excludeSet
|
||||
pathSet ++= includeSet
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,147 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2009 Mark Harrah
|
||||
*/
|
||||
package sbt
|
||||
|
||||
import java.lang.{Process => JProcess, ProcessBuilder => JProcessBuilder}
|
||||
import java.io.{Closeable, File, IOException}
|
||||
import java.io.{BufferedReader, InputStream, InputStreamReader, OutputStream, PipedInputStream, PipedOutputStream}
|
||||
import java.net.URL
|
||||
|
||||
/** Methods for constructing simple commands that can then be combined. */
|
||||
object Process
|
||||
{
|
||||
implicit def apply(command: String): ProcessBuilder = apply(command, None)
|
||||
implicit def apply(command: Seq[String]): ProcessBuilder = apply (command.toArray, None)
|
||||
def apply(command: String, arguments: Seq[String]): ProcessBuilder = apply(command :: arguments.toList, None)
|
||||
/** create ProcessBuilder with working dir set to path and extra environment variables */
|
||||
def apply(command: String, cwd: Path, extraEnv: (String,String)*): ProcessBuilder = apply(command, cwd.asFile, extraEnv : _*)
|
||||
/** create ProcessBuilder with working dir set to File and extra environment variables */
|
||||
def apply(command: String, cwd: File, extraEnv: (String,String)*): ProcessBuilder =
|
||||
apply(command, Some(cwd), extraEnv : _*)
|
||||
/** create ProcessBuilder with working dir optionaly set to File and extra environment variables */
|
||||
def apply(command: String, cwd: Option[File], extraEnv: (String,String)*): ProcessBuilder = {
|
||||
apply(command.split("""\s+"""), cwd, extraEnv : _*)
|
||||
// not smart to use this on windows, because CommandParser uses \ to escape ".
|
||||
/*CommandParser.parse(command) match {
|
||||
case Left(errorMsg) => error(errorMsg)
|
||||
case Right((cmd, args)) => apply(cmd :: args, cwd, extraEnv : _*)
|
||||
}*/
|
||||
}
|
||||
/** create ProcessBuilder with working dir optionaly set to File and extra environment variables */
|
||||
def apply(command: Seq[String], cwd: Option[File], extraEnv: (String,String)*): ProcessBuilder = {
|
||||
val jpb = new JProcessBuilder(command.toArray : _*)
|
||||
cwd.foreach(jpb directory _)
|
||||
extraEnv.foreach { case (k, v) => jpb.environment.put(k, v) }
|
||||
apply(jpb)
|
||||
}
|
||||
implicit def apply(builder: JProcessBuilder): ProcessBuilder = new SimpleProcessBuilder(builder)
|
||||
implicit def apply(file: File): FilePartialBuilder = new FileBuilder(file)
|
||||
implicit def apply(url: URL): URLPartialBuilder = new URLBuilder(url)
|
||||
implicit def apply(command: scala.xml.Elem): ProcessBuilder = apply(command.text.trim)
|
||||
implicit def applySeq[T](builders: Seq[T])(implicit convert: T => SourcePartialBuilder): Seq[SourcePartialBuilder] = builders.map(convert)
|
||||
def apply(value: Boolean): ProcessBuilder = apply(value.toString, if(value) 0 else 1)
|
||||
def apply(name: String, exitValue: => Int): ProcessBuilder = new DummyProcessBuilder(name, exitValue)
|
||||
|
||||
def cat(file: SourcePartialBuilder, files: SourcePartialBuilder*): ProcessBuilder = cat(file :: files.toList)
|
||||
def cat(files: Seq[SourcePartialBuilder]): ProcessBuilder =
|
||||
{
|
||||
require(!files.isEmpty)
|
||||
files.map(_.cat).reduceLeft(_ #&& _)
|
||||
}
|
||||
}
|
||||
|
||||
trait SourcePartialBuilder extends NotNull
|
||||
{
|
||||
/** Writes the output stream of this process to the given file. */
|
||||
def #> (f: File): ProcessBuilder = toFile(f, false)
|
||||
/** Appends the output stream of this process to the given file. */
|
||||
def #>> (f: File): ProcessBuilder = toFile(f, true)
|
||||
/** Writes the output stream of this process to the given OutputStream. The
|
||||
* argument is call-by-name, so the stream is recreated, written, and closed each
|
||||
* time this process is executed. */
|
||||
def #>(out: => OutputStream): ProcessBuilder = #> (new OutputStreamBuilder(out))
|
||||
def #>(b: ProcessBuilder): ProcessBuilder = new PipedProcessBuilder(toSource, b, false)
|
||||
private def toFile(f: File, append: Boolean) = #> (new FileOutput(f, append))
|
||||
def cat = toSource
|
||||
protected def toSource: ProcessBuilder
|
||||
}
|
||||
trait SinkPartialBuilder extends NotNull
|
||||
{
|
||||
/** Reads the given file into the input stream of this process. */
|
||||
def #< (f: File): ProcessBuilder = #< (new FileInput(f))
|
||||
/** Reads the given URL into the input stream of this process. */
|
||||
def #< (f: URL): ProcessBuilder = #< (new URLInput(f))
|
||||
/** Reads the given InputStream into the input stream of this process. The
|
||||
* argument is call-by-name, so the stream is recreated, read, and closed each
|
||||
* time this process is executed. */
|
||||
def #<(in: => InputStream): ProcessBuilder = #< (new InputStreamBuilder(in))
|
||||
def #<(b: ProcessBuilder): ProcessBuilder = new PipedProcessBuilder(b, toSink, false)
|
||||
protected def toSink: ProcessBuilder
|
||||
}
|
||||
|
||||
trait URLPartialBuilder extends SourcePartialBuilder
|
||||
trait FilePartialBuilder extends SinkPartialBuilder with SourcePartialBuilder
|
||||
{
|
||||
def #<<(f: File): ProcessBuilder
|
||||
def #<<(u: URL): ProcessBuilder
|
||||
def #<<(i: => InputStream): ProcessBuilder
|
||||
def #<<(p: ProcessBuilder): ProcessBuilder
|
||||
}
|
||||
|
||||
/** Represents a process that is running or has finished running.
|
||||
* It may be a compound process with several underlying native processes (such as 'a #&& b`).*/
|
||||
trait Process extends NotNull
|
||||
{
|
||||
/** Blocks until this process exits and returns the exit code.*/
|
||||
def exitValue(): Int
|
||||
/** Destroys this process. */
|
||||
def destroy(): Unit
|
||||
}
|
||||
/** Represents a runnable process. */
|
||||
trait ProcessBuilder extends SourcePartialBuilder with SinkPartialBuilder
|
||||
{
|
||||
/** Starts the process represented by this builder, blocks until it exits, and returns the exit code. Standard output and error are
|
||||
* sent to the console.*/
|
||||
def ! : Int
|
||||
/** Starts the process represented by this builder, blocks until it exits, and returns the exit code. Standard output and error are
|
||||
* sent to the given Logger.*/
|
||||
def !(log: Logger): Int
|
||||
/** Starts the process represented by this builder, blocks until it exits, and returns the exit code. Standard output and error are
|
||||
* sent to the console. The newly started process reads from standard input of the current process if `connectInput` is true.*/
|
||||
def !< : Int
|
||||
/** Starts the process represented by this builder, blocks until it exits, and returns the exit code. Standard output and error are
|
||||
* sent to the given Logger. The newly started process reads from standard input of the current process if `connectInput` is true.*/
|
||||
def !<(log: Logger) : Int
|
||||
/** Starts the process represented by this builder. Standard output and error are sent to the console.*/
|
||||
def run(): Process
|
||||
/** Starts the process represented by this builder. Standard output and error are sent to the given Logger.*/
|
||||
def run(log: Logger): Process
|
||||
/** Starts the process represented by this builder. I/O is handled by the given ProcessIO instance.*/
|
||||
def run(io: ProcessIO): Process
|
||||
/** Starts the process represented by this builder. Standard output and error are sent to the console.
|
||||
* The newly started process reads from standard input of the current process if `connectInput` is true.*/
|
||||
def run(connectInput: Boolean): Process
|
||||
/** Starts the process represented by this builder, blocks until it exits, and returns the exit code. Standard output and error are
|
||||
* sent to the given Logger.
|
||||
* The newly started process reads from standard input of the current process if `connectInput` is true.*/
|
||||
def run(log: Logger, connectInput: Boolean): Process
|
||||
|
||||
/** Constructs a command that runs this command first and then `other` if this command succeeds.*/
|
||||
def #&& (other: ProcessBuilder): ProcessBuilder
|
||||
/** Constructs a command that runs this command first and then `other` if this command does not succeed.*/
|
||||
def #|| (other: ProcessBuilder): ProcessBuilder
|
||||
/** Constructs a command that will run this command and pipes the output to `other`. `other` must be a simple command.*/
|
||||
def #| (other: ProcessBuilder): ProcessBuilder
|
||||
/** Constructs a command that will run this command and then `other`. The exit code will be the exit code of `other`.*/
|
||||
def ## (other: ProcessBuilder): ProcessBuilder
|
||||
|
||||
def canPipeTo: Boolean
|
||||
}
|
||||
/** Each method will be called in a separate thread.*/
|
||||
final class ProcessIO(val writeInput: OutputStream => Unit, val processOutput: InputStream => Unit, val processError: InputStream => Unit) extends NotNull
|
||||
{
|
||||
def withOutput(process: InputStream => Unit): ProcessIO = new ProcessIO(writeInput, process, processError)
|
||||
def withError(process: InputStream => Unit): ProcessIO = new ProcessIO(writeInput, processOutput, process)
|
||||
def withInput(write: OutputStream => Unit): ProcessIO = new ProcessIO(write, processOutput, processError)
|
||||
}
|
||||
|
|
@ -0,0 +1,534 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2008, 2009 Mark Harrah, David MacIver
|
||||
*/
|
||||
package sbt
|
||||
|
||||
import xsbti.{AppProvider, ScalaProvider}
|
||||
import xsbt.{AnalyzingCompiler, ScalaInstance}
|
||||
import java.io.File
|
||||
import java.net.URLClassLoader
|
||||
import scala.collection._
|
||||
import FileUtilities._
|
||||
import Project._
|
||||
|
||||
trait Project extends TaskManager with Dag[Project] with BasicEnvironment
|
||||
{
|
||||
/** The logger for this project definition. */
|
||||
final val log: Logger = logImpl
|
||||
protected def logImpl: Logger =
|
||||
{
|
||||
val lg = new BufferedLogger(info.logger)
|
||||
lg.setLevel(defaultLoggingLevel)
|
||||
lg
|
||||
}
|
||||
protected def defaultLoggingLevel = Level.Info
|
||||
|
||||
trait ActionOption extends NotNull
|
||||
|
||||
/** Basic project information. */
|
||||
def info: ProjectInfo
|
||||
/** The project name. */
|
||||
def name: String = projectName.value
|
||||
/** The project version. */
|
||||
def version: Version = projectVersion.value
|
||||
/** The project organization. */
|
||||
def organization: String = projectOrganization.value
|
||||
/** True if the project should cater to a quick throwaway project setup.*/
|
||||
def scratch = projectScratch.value
|
||||
|
||||
final type ManagerType = Project
|
||||
final type ManagedTask = Project#Task
|
||||
/** The tasks declared on this project. */
|
||||
def tasks: Map[String, Task]
|
||||
/** The task methods declared on this project */
|
||||
def methods: Map[String, MethodTask]
|
||||
/** The names of all available tasks that may be called through `act`. These include
|
||||
* the names of the Tasks in `tasks` and those of all dependencies.*/
|
||||
def taskNames: Iterable[String] = deepTasks.keys.toList
|
||||
/** The names of all available method tasks that may be called through `call`. These
|
||||
* only include the names of the MethodTasks in `methods` and not those of dependencies.*/
|
||||
def methodNames: Iterable[String] = methods.keys.toList
|
||||
/** A description of all available method tasks in this project, but not of dependencies. */
|
||||
def methodList: String = descriptionList(methods)
|
||||
/** A description of all available tasks in this project and all dependencies. If there
|
||||
* are different tasks with the same name, only one will be included. */
|
||||
def taskList: String = descriptionList(deepTasks)
|
||||
|
||||
final def taskName(task: Task) = tasks.find( _._2 eq task ).map(_._1)
|
||||
/** A description of all available tasks in this project and all dependencies and all
|
||||
* available method tasks in this project, but not of dependencies. If there
|
||||
* are different tasks or methods with the same name, only one will be included. */
|
||||
def taskAndMethodList: String = descriptionList(tasksAndMethods)
|
||||
/** The actions and methods declared on this project. */
|
||||
final def tasksAndMethods: Map[String, Described] =
|
||||
immutable.TreeMap.empty[String, Described] ++ methods ++ tasks
|
||||
private def descriptionList(described: Map[String, Described]): String =
|
||||
{
|
||||
val buffer = new StringBuilder
|
||||
for((name, d) <- described)
|
||||
buffer.append("\t" + name + d.description.map(x => ": " + x).getOrElse("") + "\n")
|
||||
buffer.toString
|
||||
}
|
||||
/** Combines the method task maps of this project and all dependencies.*/
|
||||
private[sbt] def deepMethods: Map[String, Project#MethodTask] = deep(_.methods)
|
||||
/** Combines the task maps of this project and all dependencies.*/
|
||||
private[sbt] def deepTasks: Map[String, Project#Task] = deep(_.tasks)
|
||||
private def deep[T](p: Project => Map[String, T]): Map[String, T] =
|
||||
{
|
||||
var tasks: immutable.SortedMap[String,T] = new immutable.TreeMap[String, T]
|
||||
for(dependentProject <- topologicalSort)
|
||||
tasks ++= p(dependentProject).elements
|
||||
tasks
|
||||
}
|
||||
/** A map of names to projects for all subprojects of this project. These are typically explicitly
|
||||
* specified for the project and are different from those specified in the project constructor. The
|
||||
* main use within sbt is in ParentProject.*/
|
||||
def subProjects: Map[String, Project] = immutable.Map.empty
|
||||
def projectClosure: List[Project] = Dag.topologicalSort(this)(p => p.dependencies ++ p.subProjects.values.toList)
|
||||
|
||||
def call(name: String, parameters: Array[String]): Option[String] =
|
||||
{
|
||||
methods.get(name) match
|
||||
{
|
||||
case Some(method) =>run(method(parameters), name)
|
||||
case None => Some("Method '" + name + "' does not exist.")
|
||||
}
|
||||
}
|
||||
private def run(task: Project#Task, taskName: String): Option[String] =
|
||||
impl.RunTask(task, taskName, parallelExecution) match
|
||||
{
|
||||
case Nil => None
|
||||
case x => Some(Set(x: _*).mkString("\n"))
|
||||
}
|
||||
|
||||
/** Executes the task with the given name. This involves executing the task for all
|
||||
* project dependencies (transitive) and then for this project. Not every dependency
|
||||
* must define a task with the given name. If this project and all dependencies
|
||||
* do not define a task with the given name, an error is generated indicating this.*/
|
||||
def act(name: String): Option[String] =
|
||||
{
|
||||
val ordered = topologicalSort
|
||||
val definedTasks = ordered.flatMap(_.tasks.get(name).toList)
|
||||
def virtualTask(name: String): Task = new Task(None, definedTasks.filter(!_.interactive), false, None)
|
||||
|
||||
if(definedTasks.isEmpty)
|
||||
Some("Action '" + name + "' does not exist.")
|
||||
else
|
||||
{
|
||||
tasks.get(name) match
|
||||
{
|
||||
case None =>
|
||||
val virtual = virtualTask(name)
|
||||
if(virtual.dependencies.size == definedTasks.size)
|
||||
run(virtual, name)
|
||||
else
|
||||
{
|
||||
Some("Cannot run interactive action '" + name +
|
||||
"' defined on multiple subprojects (change to the desired project with 'project <name>').")
|
||||
}
|
||||
case Some(task) => run(task, name)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/** Logs the list of projects at the debug level.*/
|
||||
private def showBuildOrder(order: Iterable[Project])
|
||||
{
|
||||
log.debug("Project build order:")
|
||||
order.foreach(x => log.debug(" " + x.name) )
|
||||
log.debug("")
|
||||
}
|
||||
|
||||
/** Converts a String to a path relative to the project directory of this project. */
|
||||
implicit def path(component: String): Path = info.projectPath / component
|
||||
/** Converts a String to a simple name filter. * has the special meaning: zero or more of any character */
|
||||
implicit def filter(simplePattern: String): NameFilter = GlobFilter(simplePattern)
|
||||
|
||||
/** Loads the project at the given path and declares the project to have the given
|
||||
* dependencies. This method will configure the project according to the
|
||||
* project/ directory in the directory denoted by path.*/
|
||||
def project(path: Path, deps: Project*): Project = getProject(Project.loadProject(path, deps, Some(this), log, info.app, info.buildScalaVersion), path)
|
||||
|
||||
/** Loads the project at the given path using the given name and inheriting this project's version.
|
||||
* The builder class is the default builder class, sbt.DefaultProject. The loaded project is declared
|
||||
* to have the given dependencies. Any project/build/ directory for the project is ignored.*/
|
||||
def project(path: Path, name: String, deps: Project*): Project = project(path, name, Project.DefaultBuilderClass, deps: _*)
|
||||
|
||||
/** Loads the project at the given path using the given name and inheriting it's version from this project.
|
||||
* The Project implementation used is given by builderClass. The dependencies are declared to be
|
||||
* deps. Any project/build/ directory for the project is ignored.*/
|
||||
def project[P <: Project](path: Path, name: String, builderClass: Class[P], deps: Project*): P =
|
||||
{
|
||||
require(builderClass != this.getClass, "Cannot recursively construct projects of same type: " + builderClass.getName)
|
||||
project(path, name, info => Project.constructProject(info, builderClass), deps: _*)
|
||||
}
|
||||
/** Loads the project at the given path using the given name and inheriting it's version from this project.
|
||||
* The construct function is used to obtain the Project instance. Any project/build/ directory for the project
|
||||
* is ignored. The project is declared to have the dependencies given by deps.*/
|
||||
def project[P <: Project](path: Path, name: String, construct: ProjectInfo => P, deps: Project*): P =
|
||||
initialize(construct(ProjectInfo(path.asFile, deps, Some(this))(log, info.app, info.buildScalaVersion)), Some(new SetupInfo(name, None, None, false)), log)
|
||||
|
||||
/** Initializes the project directories when a user has requested that sbt create a new project.*/
|
||||
def initializeDirectories() {}
|
||||
/** True if projects should be run in parallel, false if they should run sequentially.
|
||||
* This only has an effect for multi-projects. If this project has a parent, this value is
|
||||
* inherited from that parent project.*/
|
||||
def parallelExecution: Boolean =
|
||||
info.parent match
|
||||
{
|
||||
case Some(parent) => parent.parallelExecution
|
||||
case None => false
|
||||
}
|
||||
|
||||
/** True if a project and its dependencies should be checked to ensure that their
|
||||
* output directories are not the same, false if they should not be checked. */
|
||||
def shouldCheckOutputDirectories = true
|
||||
|
||||
/** The list of directories to which this project writes. This is used to verify that multiple
|
||||
* projects have not been defined with the same output directories. */
|
||||
def outputDirectories: Iterable[Path] = outputPath :: Nil
|
||||
def rootProject = Project.rootProject(this)
|
||||
/** The path to the file that provides persistence for properties.*/
|
||||
final def envBackingPath = info.builderPath / Project.DefaultEnvBackingName
|
||||
/** The path to the file that provides persistence for history. */
|
||||
def historyPath: Option[Path] = Some(outputRootPath / ".history")
|
||||
def outputPath = crossPath(outputRootPath)
|
||||
def outputRootPath: Path = outputDirectoryName
|
||||
def outputDirectoryName = DefaultOutputDirectoryName
|
||||
|
||||
private def getProject(result: LoadResult, path: Path): Project =
|
||||
result match
|
||||
{
|
||||
case LoadSetupDeclined => Predef.error("No project exists at path " + path)
|
||||
case lse: LoadSetupError => Predef.error("Error setting up new project at path " + path + " : " + lse.message)
|
||||
case err: LoadError => Predef.error("Error loading project at path " + path + " : " + err.message)
|
||||
case success: LoadSuccess => success.project
|
||||
}
|
||||
|
||||
/** The property for the project's version. */
|
||||
final val projectVersion = property[Version]
|
||||
/** The property for the project's name. */
|
||||
final val projectName = propertyLocalF[String](NonEmptyStringFormat)
|
||||
/** The property for the project's organization. Defaults to the parent project's organization or the project name if there is no parent. */
|
||||
final val projectOrganization = propertyOptional[String](normalizedName, true)
|
||||
/** The property that defines the version of Scala to use with the project definition. This can be different
|
||||
* from the version of Scala used to build the project (current version used is buildScalaVersion, available are in buildScalaVersions).
|
||||
* This property is only read by `sbt` on startup and reload.*/
|
||||
final val defScalaVersion = propertyOptional[String](info.definitionScalaVersion)
|
||||
/** The property to specify the sbt revision to use.
|
||||
* Note that this can by a dynamic revision (see Ivy documentation for details on dynamic revisions).
|
||||
*Therefore, use `sbt.ComponentManager.version` and `timestamp` for actual version information. */
|
||||
final val sbtVersion = property[String]
|
||||
final val projectInitialize = propertyOptional[Boolean](false)
|
||||
final val projectScratch = propertyOptional[Boolean](false, true)
|
||||
/** The property that defines the versions of Scala to build this project against as a comma separated string. This can be
|
||||
* different from the version of Scala used to build and run the project definition (defined by defScalaVersion).
|
||||
* This property is only read by `sbt` on startup and reload. The definitive source for the version of Scala currently
|
||||
* being used is buildScalaVersion.*/
|
||||
final val buildScalaVersions = propertyOptional[String](defScalaVersion.value, true)
|
||||
/** The definitive source for the version of Scala being requested to *build* the project.
|
||||
* For the full version information, see buildScalaInstance.actualVersion.*/
|
||||
def buildScalaVersion = info.buildScalaVersion.getOrElse(crossScalaVersions.first)
|
||||
private[sbt] def isScala27 = buildScalaInstance.actualVersion.startsWith("2.7.")
|
||||
|
||||
|
||||
def componentManager = new ComponentManager(info.launcher.globalLock, info.app.components, log)
|
||||
def buildScalaInstance = buildScalaInstance0
|
||||
final def buildScalaInstance0: ScalaInstance =
|
||||
{
|
||||
val scalaVersion = buildScalaVersion
|
||||
try { getScalaInstance(scalaVersion) }
|
||||
catch { case e: xsbti.RetrieveException if info.buildScalaVersion.isEmpty => // only catch the exception if this is the default Scala version
|
||||
log.error(e.getMessage)
|
||||
SimpleReader.readLine("\nProvide a new Scala version or press enter to exit: ") match
|
||||
{
|
||||
case Some(v) if v.length > 0=>
|
||||
buildScalaVersions() = replace(scalaVersion, v)
|
||||
saveEnvironment()
|
||||
buildScalaInstance0
|
||||
case _ => throw e
|
||||
}
|
||||
}
|
||||
}
|
||||
private def replace(originalV: String, newV: String) = buildScalaVersions.value.replaceAll("""\b\Q""" + originalV + """\E\b""", newV)
|
||||
def getScalaInstance(version: String) =
|
||||
localScalaInstances.find(_.version == version) getOrElse
|
||||
xsbt.ScalaInstance(version, info.launcher)
|
||||
lazy val localScalaInstances: Seq[ScalaInstance] = localScala ++ info.parent.toList.flatMap(_.localScalaInstances)
|
||||
def localScala: Seq[ScalaInstance] = Nil
|
||||
lazy val buildCompiler = new AnalyzingCompiler(buildScalaInstance, componentManager, log)
|
||||
/** Get a `ScalaInstance` for the Scala version with base directory `home`. The library and compiler jars are
|
||||
* assumed to be at `new File(home, "lib/scala-library.jar")` and `new File(home, "lib/scala-compiler.jar")`.
|
||||
* The label for this instance is determined by the version String in the `compiler.properties` file in `scala-compiler.jar`.*/
|
||||
def defineScala(home: File): ScalaInstance = ScalaInstance(home, info.launcher)
|
||||
/** Get a `ScalaInstance` for the Scala version with base directory `home`. The library and compiler jars are
|
||||
* assumed to be at `new File(home, "lib/scala-library.jar")` and `new File(home, "lib/scala-compiler.jar")`.
|
||||
* `version` is used as the label for this instance.*/
|
||||
def defineScala(version: String, home: File): ScalaInstance = ScalaInstance(version, home, info.launcher)
|
||||
|
||||
/** If this project is cross-building, returns `base` with an additional path component containing the scala version
|
||||
* currently used to build the project. Otherwise, this returns `base`.
|
||||
* By default, cross-building is enabled when a project is loaded by the loader and crossScalaVersions is not empty.*/
|
||||
def crossPath(base: Path) = if(disableCrossPaths) base else base / crossString
|
||||
/** If modifying paths for cross-building is enabled, this returns ScalaVersion.currentString.
|
||||
* Otherwise, this returns the empty string. */
|
||||
def crossScalaVersionString: String = if(disableCrossPaths) "" else buildScalaVersion
|
||||
private def crossString = "scala_" + buildScalaVersion
|
||||
|
||||
|
||||
/** True if crossPath should be the identity function.*/
|
||||
protected def disableCrossPaths = crossScalaVersions.isEmpty
|
||||
/** By default, this is the build.scala.versions property split around whitespace. This can be overridden directly if preferred.*/
|
||||
def crossScalaVersions: Seq[String] =
|
||||
info.parent match
|
||||
{
|
||||
case Some(p) => p.crossScalaVersions
|
||||
case None => buildScalaVersions.value.split("""\s+""").toList.reverse.removeDuplicates.reverse
|
||||
}
|
||||
/** A `PathFinder` that determines the files watched when an action is run with a preceeding ~ when this is the current
|
||||
* project. This project does not need to include the watched paths for projects that this project depends on.*/
|
||||
def watchPaths: PathFinder = Path.emptyPathFinder
|
||||
def terminateWatch(key: Int): Boolean = key == 10 || key == 13
|
||||
|
||||
protected final override def parentEnvironment = info.parent
|
||||
|
||||
// .* included because svn doesn't mark .svn hidden
|
||||
def defaultExcludes: FileFilter = (".*" - ".") || HiddenFileFilter
|
||||
/** Short for parent.descendentsExcept(include, defaultExcludes)*/
|
||||
def descendents(parent: PathFinder, include: FileFilter) = parent.descendentsExcept(include, defaultExcludes)
|
||||
override def toString = "Project " + projectName.get.getOrElse("at " + environmentLabel)
|
||||
|
||||
def normalizedName = StringUtilities.normalize(name)
|
||||
}
|
||||
private[sbt] sealed trait LoadResult extends NotNull
|
||||
private[sbt] final class LoadSuccess(val project: Project) extends LoadResult
|
||||
private[sbt] final class LoadError(val message: String) extends LoadResult
|
||||
private[sbt] final object LoadSetupDeclined extends LoadResult
|
||||
private[sbt] final class LoadSetupError(val message: String) extends LoadResult
|
||||
|
||||
object Project
|
||||
{
|
||||
val BootDirectoryName = "boot"
|
||||
val DefaultOutputDirectoryName = "target"
|
||||
val DefaultEnvBackingName = "build.properties"
|
||||
val DefaultBuilderClassName = "sbt.DefaultProject"
|
||||
val DefaultBuilderClass = Class.forName(DefaultBuilderClassName).asSubclass(classOf[Project])
|
||||
|
||||
/** The name of the directory for project definitions.*/
|
||||
val BuilderProjectDirectoryName = "build"
|
||||
/** The name of the directory for plugin definitions.*/
|
||||
val PluginProjectDirectoryName = "plugins"
|
||||
/** The name of the class that all projects must inherit from.*/
|
||||
val ProjectClassName = classOf[Project].getName
|
||||
|
||||
/** The logger that should be used before the root project definition is loaded.*/
|
||||
private[sbt] def bootLogger =
|
||||
{
|
||||
val log = new ConsoleLogger
|
||||
log.setLevel(Level.Debug)
|
||||
log
|
||||
}
|
||||
|
||||
private[sbt] def booted = java.lang.Boolean.getBoolean("sbt.boot")
|
||||
|
||||
private[sbt] def loadProject(app: AppProvider): LoadResult = loadProject(app, None)
|
||||
/** Loads the project in the current working directory. */
|
||||
private[sbt] def loadProject(app: AppProvider, buildScalaVersion: Option[String]): LoadResult = loadProject(bootLogger, app, buildScalaVersion)
|
||||
/** Loads the project in the current working directory.*/
|
||||
private[sbt] def loadProject(log: Logger, app: AppProvider, buildScalaVersion: Option[String]): LoadResult =
|
||||
checkOutputDirectories(loadProject(new File("."), Nil, None, log, app, buildScalaVersion))
|
||||
/** Loads the project in the directory given by 'path' and with the given dependencies.*/
|
||||
private[sbt] def loadProject(path: Path, deps: Iterable[Project], parent: Option[Project], log: Logger, app: AppProvider, buildScalaVersion: Option[String]): LoadResult =
|
||||
loadProject(path.asFile, deps, parent, log, app, buildScalaVersion)
|
||||
/** Loads the project in the directory given by 'projectDirectory' and with the given dependencies.*/
|
||||
private[sbt] def loadProject(projectDirectory: File, deps: Iterable[Project], parent: Option[Project], log: Logger, app: AppProvider, buildScalaVersion: Option[String]): LoadResult =
|
||||
{
|
||||
val info = ProjectInfo(projectDirectory, deps, parent)(log, app, buildScalaVersion)
|
||||
ProjectInfo.setup(info, log) match
|
||||
{
|
||||
case err: SetupError => new LoadSetupError(err.message)
|
||||
case SetupDeclined => LoadSetupDeclined
|
||||
case AlreadySetup => loadProject(info, None, log)
|
||||
case setup: SetupInfo => loadProject(info, Some(setup), log)
|
||||
}
|
||||
}
|
||||
private def loadProject(info: ProjectInfo, setupInfo: Option[SetupInfo], log: Logger): LoadResult =
|
||||
{
|
||||
try
|
||||
{
|
||||
val result =
|
||||
for(builderClass <- getProjectDefinition(info, log).right) yield
|
||||
initialize(constructProject(info, builderClass), setupInfo, log)
|
||||
result.fold(new LoadError(_), new LoadSuccess(_))
|
||||
}
|
||||
catch
|
||||
{
|
||||
case ite: java.lang.reflect.InvocationTargetException =>
|
||||
{
|
||||
val cause =
|
||||
if(ite.getCause == null) ite
|
||||
else ite.getCause
|
||||
errorLoadingProject(cause, log)
|
||||
}
|
||||
case nme: NoSuchMethodException => new LoadError("Constructor with one argument of type sbt.ProjectInfo required for project definition.")
|
||||
case e: Exception => errorLoadingProject(e, log)
|
||||
}
|
||||
}
|
||||
private def errorLoadingProject(e: Throwable, log: Logger) =
|
||||
e match
|
||||
{
|
||||
case _: xsbti.RetrieveException => LoadSetupDeclined
|
||||
case _ =>
|
||||
log.trace(e)
|
||||
new LoadError("Error loading project: " + e.toString)
|
||||
}
|
||||
/** Loads the project for the given `info` and represented by an instance of 'builderClass'.*/
|
||||
private[sbt] def constructProject[P <: Project](info: ProjectInfo, builderClass: Class[P]): P =
|
||||
builderClass.getConstructor(classOf[ProjectInfo]).newInstance(info)
|
||||
/** Checks the project's dependencies, initializes its environment, and possibly its directories.*/
|
||||
private def initialize[P <: Project](p: P, setupInfo: Option[SetupInfo], log: Logger): P =
|
||||
{
|
||||
setupInfo match
|
||||
{
|
||||
case Some(setup) =>
|
||||
{
|
||||
p.projectName() = setup.name
|
||||
for(v <- setup.version)
|
||||
p.projectVersion() = v
|
||||
for(org <- setup.organization)
|
||||
p.projectOrganization() = org
|
||||
if(!setup.initializeDirectories)
|
||||
p.setEnvironmentModified(false)
|
||||
for(errorMessage <- p.saveEnvironment())
|
||||
log.error(errorMessage)
|
||||
if(setup.initializeDirectories)
|
||||
p.initializeDirectories()
|
||||
}
|
||||
case None =>
|
||||
if(p.projectInitialize.value)
|
||||
{
|
||||
p.initializeDirectories()
|
||||
p.projectInitialize() = false
|
||||
for(errorMessage <- p.saveEnvironment())
|
||||
log.error(errorMessage)
|
||||
}
|
||||
}
|
||||
val useName = p.projectName.get.getOrElse("at " + p.info.projectDirectory.getAbsolutePath)
|
||||
checkDependencies(useName, p.info.dependencies, log)
|
||||
p.buildScalaInstance // done so that build Scala version is initialized on project startup
|
||||
p
|
||||
}
|
||||
/** Compiles the project definition classes and returns the project definition class name
|
||||
* and the class loader that should be used to load the definition. */
|
||||
private def getProjectDefinition(info: ProjectInfo, buildLog: Logger): Either[String, Class[P] forSome { type P <: Project }] =
|
||||
getProjectBuilder(info, buildLog) match
|
||||
{
|
||||
case Some(builder) => buildProjectDefinition(builder)
|
||||
case None => Right(DefaultBuilderClass)
|
||||
}
|
||||
private def buildProjectDefinition(builderProject: BuilderProject): Either[String, Class[P] forSome { type P <: Project }] =
|
||||
builderProject.compile.run.toLeft(()).right.flatMap { ignore =>
|
||||
builderProject.projectDefinition.right.map {
|
||||
case Some(definition) => getProjectClass[Project](definition, builderProject.projectClasspath, getClass.getClassLoader)
|
||||
case None => DefaultBuilderClass
|
||||
}
|
||||
}
|
||||
private[sbt] def getProjectClasspath(project: Project): PathFinder =
|
||||
getProjectBuilder(project.info, project.log) match
|
||||
{
|
||||
case Some(builder) => builder.projectClasspath
|
||||
case _ if project.getClass == DefaultBuilderClass => project.info.sbtClasspath
|
||||
case _ =>
|
||||
project.info.parent match
|
||||
{
|
||||
case Some(p) => getProjectClasspath(p)
|
||||
case None => project.info.sbtClasspath
|
||||
}
|
||||
}
|
||||
private[sbt] def getProjectBuilder(info: ProjectInfo, buildLog: Logger): Option[BuilderProject] =
|
||||
{
|
||||
if(info.builderProjectPath.asFile.isDirectory)
|
||||
{
|
||||
val builderInfo = ProjectInfo(info.builderProjectPath.asFile, Nil, None)(buildLog, info.app, Some(info.definitionScalaVersion))
|
||||
val builderProject = new BuilderProject(builderInfo, info.pluginsPath, buildLog)
|
||||
Some(builderProject)
|
||||
}
|
||||
else
|
||||
None
|
||||
}
|
||||
/** Verifies that the given list of project dependencies contains no nulls. The
|
||||
* String argument should be the project name with the dependencies.*/
|
||||
private def checkDependencies(forProject: String, deps: Iterable[Project], log: Logger)
|
||||
{
|
||||
for(nullDep <- deps.find(_ == null))
|
||||
{
|
||||
log.error("Project " + forProject + " had a null dependency. This is probably an initialization problem and might be due to a circular dependency.")
|
||||
throw new RuntimeException("Null dependency in project " + forProject)
|
||||
}
|
||||
}
|
||||
/** Verifies that output directories of the given project and all of its dependencies are
|
||||
* all different. No verification is done if the project overrides
|
||||
* 'shouldCheckOutputDirectories' to be false. The 'Project.outputDirectories' method is
|
||||
* used to determine a project's output directories. */
|
||||
private def checkOutputDirectories(result: LoadResult): LoadResult =
|
||||
result match
|
||||
{
|
||||
case success: LoadSuccess =>
|
||||
if(success.project.shouldCheckOutputDirectories)
|
||||
checkOutputDirectoriesImpl(success.project)
|
||||
else
|
||||
success
|
||||
case x => x
|
||||
}
|
||||
/** Verifies that output directories of the given project and all of its dependencies are
|
||||
* all different. The 'Project.outputDirectories' method is used to determine a project's
|
||||
* output directories. */
|
||||
private def checkOutputDirectoriesImpl(project: Project): LoadResult =
|
||||
{
|
||||
val projects = project.projectClosure
|
||||
import scala.collection.mutable.{HashMap, HashSet, Set}
|
||||
val outputDirectories = new HashMap[Path, Set[Project]]
|
||||
for(p <- projects; path <- p.outputDirectories)
|
||||
outputDirectories.getOrElseUpdate(path, new HashSet[Project]) += p
|
||||
val shared = outputDirectories.filter(_._2.size > 1)
|
||||
if(shared.isEmpty)
|
||||
new LoadSuccess(project)
|
||||
else
|
||||
{
|
||||
val sharedString =
|
||||
{
|
||||
val s =
|
||||
for((path, projectsSharingPath) <- shared) yield
|
||||
projectsSharingPath.map(_.name).mkString(", ") + " share " + path
|
||||
s.mkString("\n\t")
|
||||
}
|
||||
new LoadError("The same directory is used for output for multiple projects:\n\t" + sharedString +
|
||||
"\n (If this is intentional, use 'override def shouldCheckOutputDirectories = false' in your project definition.)")
|
||||
}
|
||||
}
|
||||
import scala.reflect.Manifest
|
||||
private[sbt] def getProjectClass[P <: Project](name: String, classpath: PathFinder, additional: ClassLoader)(implicit mf: Manifest[P]): Class[P] =
|
||||
{
|
||||
val loader =ClasspathUtilities.toLoader(classpath, additional)
|
||||
val builderClass = Class.forName(name, false, loader)
|
||||
val projectClass = mf.erasure
|
||||
require(projectClass.isAssignableFrom(builderClass), "Builder class '" + builderClass + "' does not extend " + projectClass.getName + ".")
|
||||
builderClass.asSubclass(projectClass).asInstanceOf[Class[P]]
|
||||
}
|
||||
|
||||
/** Writes the project name and a separator to the project's log at the info level.*/
|
||||
def showProjectHeader(project: Project)
|
||||
{
|
||||
val projectHeader = "Project " + project.name
|
||||
project.log.info("")
|
||||
project.log.info(projectHeader)
|
||||
project.log.info("=" * projectHeader.length)
|
||||
}
|
||||
|
||||
def rootProject(p: Project): Project =
|
||||
p.info.parent match
|
||||
{
|
||||
case Some(parent) => rootProject(parent)
|
||||
case None => p
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,139 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2008 Mark Harrah
|
||||
*/
|
||||
package sbt
|
||||
|
||||
import java.io.File
|
||||
import xsbti.{AppProvider, ScalaProvider}
|
||||
import FileUtilities._
|
||||
|
||||
/** Represents the minimal information necessary to construct a Project.
|
||||
*
|
||||
* `projectDirectory` is the base directory for the project (not the root project directory)
|
||||
* `dependencies` are the Projects that this Project depends on.
|
||||
* `parent` is the parent Project, or None if this is the root project.
|
||||
* `log` is the Logger to use as a base for the default project Logger.
|
||||
* `buildScalaVersion` contains the explicitly requested Scala version to use for building (as when using `+` or `++`) or None if the normal version should be used.
|
||||
*/
|
||||
final case class ProjectInfo(projectDirectory: File, dependencies: Iterable[Project], parent: Option[Project])
|
||||
(log: Logger, val app: AppProvider, val buildScalaVersion: Option[String]) extends NotNull
|
||||
{
|
||||
/** The version of Scala running sbt.*/
|
||||
def definitionScalaVersion = app.scalaProvider.version
|
||||
/** The launcher instance that booted sbt.*/
|
||||
def launcher = app.scalaProvider.launcher
|
||||
|
||||
val logger = new FilterLogger(log)
|
||||
/** The base path for the project, preserving information to the root project directory.*/
|
||||
val projectPath: Path =
|
||||
{
|
||||
val toRoot = parent.flatMap(p => Path.relativize(p.info.projectPath, projectDirectory))
|
||||
new ProjectDirectory(projectDirectory, toRoot)
|
||||
}
|
||||
/** The path to build information. The current location is `project/`.
|
||||
* Note: The directory used to be `metadata/`, hence the name of the constant in the implementation.
|
||||
* Note 2: Although it is called builderPath, it is not the path to the builder definition, which is `builderProjectPath`*/
|
||||
val builderPath = projectPath / ProjectInfo.MetadataDirectoryName
|
||||
/** The boot directory contains the jars needed for building the project, including Scala, sbt, processors and dependencies of these.*/
|
||||
def bootPath = builderPath / Project.BootDirectoryName
|
||||
/** The path to the build definition project. */
|
||||
def builderProjectPath = builderPath / Project.BuilderProjectDirectoryName
|
||||
def builderProjectOutputPath = builderProjectPath / Project.DefaultOutputDirectoryName
|
||||
/** The path to the plugin definition project. This declares the plugins to use for the build definition.*/
|
||||
def pluginsPath = builderPath / Project.PluginProjectDirectoryName
|
||||
def pluginsOutputPath = pluginsPath / Project.DefaultOutputDirectoryName
|
||||
/** The path to which the source code for plugins are extracted.*/
|
||||
def pluginsManagedSourcePath = pluginsPath / BasicDependencyPaths.DefaultManagedSourceDirectoryName
|
||||
/** The path to which plugins are retrieved.*/
|
||||
def pluginsManagedDependencyPath = pluginsPath / BasicDependencyPaths.DefaultManagedDirectoryName
|
||||
|
||||
/** The classpath containing all jars comprising sbt, except for the launcher.*/
|
||||
def sbtClasspath = Path.finder(app.mainClasspath)
|
||||
}
|
||||
|
||||
private[sbt] sealed trait SetupResult extends NotNull
|
||||
private[sbt] final object SetupDeclined extends SetupResult
|
||||
private[sbt] final class SetupError(val message: String) extends SetupResult
|
||||
private[sbt] final object AlreadySetup extends SetupResult
|
||||
private[sbt] final class SetupInfo(val name: String, val version: Option[Version], val organization: Option[String], val initializeDirectories: Boolean) extends SetupResult
|
||||
|
||||
object ProjectInfo
|
||||
{
|
||||
val MetadataDirectoryName = "project"
|
||||
private val DefaultOrganization = "empty"
|
||||
|
||||
def setup(info: ProjectInfo, log: Logger): SetupResult =
|
||||
{
|
||||
val builderDirectory = info.builderPath.asFile
|
||||
if(builderDirectory.exists)
|
||||
{
|
||||
if(builderDirectory.isDirectory)
|
||||
AlreadySetup
|
||||
else
|
||||
new SetupError("'" + builderDirectory.getAbsolutePath + "' is not a directory.")
|
||||
}
|
||||
else
|
||||
setupProject(info.projectDirectory, log)
|
||||
}
|
||||
private def setupProject(projectDirectory: File, log: Logger): SetupResult =
|
||||
{
|
||||
if(confirmPrompt("No project found. Create new project?", false))
|
||||
{
|
||||
val name = trim(SimpleReader.readLine("Project Name: "))
|
||||
if(name.isEmpty)
|
||||
new SetupError("Project not created: no name specified.")
|
||||
else
|
||||
{
|
||||
val organization =
|
||||
{
|
||||
val org = trim(SimpleReader.readLine("Organization [" + DefaultOrganization + "]: "))
|
||||
if(org.isEmpty)
|
||||
DefaultOrganization
|
||||
else
|
||||
org
|
||||
}
|
||||
readVersion(projectDirectory, log) match
|
||||
{
|
||||
case None => new SetupError("Project not created: no version specified.")
|
||||
case Some(version) =>
|
||||
if(verifyCreateProject(name, version, organization))
|
||||
new SetupInfo(name, Some(version), Some(organization), true)
|
||||
else
|
||||
SetupDeclined
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
SetupDeclined
|
||||
}
|
||||
private def verifyCreateProject(name: String, version: Version, organization: String): Boolean =
|
||||
confirmPrompt("Create new project " + name + " " + version + " with organization " + organization +" ?", true)
|
||||
|
||||
private def confirmPrompt(question: String, defaultYes: Boolean) =
|
||||
{
|
||||
val choices = if(defaultYes) " (Y/n) " else " (y/N) "
|
||||
val answer = trim(SimpleReader.readLine(question + choices))
|
||||
val yes = "y" :: "yes" :: (if(defaultYes) List("") else Nil)
|
||||
yes.contains(answer.toLowerCase)
|
||||
}
|
||||
|
||||
private def readVersion(projectDirectory: File, log: Logger): Option[Version] =
|
||||
{
|
||||
val version = trim(SimpleReader.readLine("Version: "))
|
||||
if(version.isEmpty)
|
||||
None
|
||||
else
|
||||
{
|
||||
Version.fromString(version) match
|
||||
{
|
||||
case Left(errorMessage) =>
|
||||
{
|
||||
log.error("Invalid version: " + errorMessage)
|
||||
readVersion(projectDirectory, log)
|
||||
}
|
||||
case Right(v) => Some(v)
|
||||
}
|
||||
}
|
||||
}
|
||||
private def trim(s: Option[String]) = s.getOrElse("")
|
||||
}
|
||||
|
|
@ -0,0 +1,309 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2008, 2009 Mark Harrah
|
||||
*/
|
||||
package sbt
|
||||
|
||||
trait PackagePaths extends NotNull
|
||||
{
|
||||
def jarPath: Path
|
||||
def packageTestJar: Path
|
||||
def packageDocsJar: Path
|
||||
def packageSrcJar: Path
|
||||
def packageTestSrcJar: Path
|
||||
def packageProjectZip: Path
|
||||
}
|
||||
/** These are the paths required by BasicScalaProject.*/
|
||||
trait ScalaPaths extends PackagePaths
|
||||
{
|
||||
/** A PathFinder that selects all main sources.*/
|
||||
def mainSources: PathFinder
|
||||
/** A PathFinder that selects all test sources.*/
|
||||
def testSources: PathFinder
|
||||
def mainSourceRoots: PathFinder
|
||||
def testSourceRoots: PathFinder
|
||||
/** A PathFinder that selects all main resources.*/
|
||||
def mainResources: PathFinder
|
||||
/** A PathFinder that selects all test resources. */
|
||||
def testResources: PathFinder
|
||||
|
||||
def mainCompilePath: Path
|
||||
def testCompilePath: Path
|
||||
def mainAnalysisPath: Path
|
||||
def testAnalysisPath: Path
|
||||
def mainDocPath: Path
|
||||
def testDocPath: Path
|
||||
def graphSourcesPath: Path
|
||||
def graphPackagesPath: Path
|
||||
def mainResourcesOutputPath: Path
|
||||
def testResourcesOutputPath: Path
|
||||
|
||||
/** A PathFinder that selects all the classes compiled from the main sources.*/
|
||||
def mainClasses: PathFinder
|
||||
/** A PathFinder that selects all the classes compiled from the test sources.*/
|
||||
def testClasses: PathFinder
|
||||
|
||||
/** Declares all paths to be packaged by the package action.*/
|
||||
def packagePaths: PathFinder
|
||||
/** Declares all paths to be packaged by the package-test action.*/
|
||||
def packageTestPaths: PathFinder
|
||||
/** Declares all sources to be packaged by the package-src action.*/
|
||||
def packageSourcePaths: PathFinder
|
||||
/** Declares all sources to be packaged by the package-test-src action.*/
|
||||
def packageTestSourcePaths: PathFinder
|
||||
/** Declares all paths to be packaged by the package-project action.*/
|
||||
def packageProjectPaths: PathFinder
|
||||
|
||||
/** These are the directories that are created when a user makes a new project from sbt.*/
|
||||
protected def directoriesToCreate: List[Path]
|
||||
/** The directories to which a project writes are listed here and is used
|
||||
* to check a project and its dependencies for collisions.*/
|
||||
def outputDirectories: Iterable[Path]
|
||||
|
||||
def artifactBaseName: String
|
||||
}
|
||||
|
||||
trait BasicScalaPaths extends Project with ScalaPaths
|
||||
{
|
||||
def mainResourcesPath: PathFinder
|
||||
def testResourcesPath: PathFinder
|
||||
def managedDependencyPath: Path
|
||||
def managedDependencyRootPath: Path
|
||||
def dependencyPath: Path
|
||||
|
||||
protected def sources(base: PathFinder) = descendents(base, sourceExtensions)
|
||||
protected def sourceExtensions = "*.scala" | "*.java"
|
||||
|
||||
def mainSources =
|
||||
{
|
||||
val normal = sources(mainSourceRoots)
|
||||
if(scratch)
|
||||
normal +++ (info.projectPath * sourceExtensions)
|
||||
else
|
||||
normal
|
||||
}
|
||||
def testSources = sources(testSourceRoots)
|
||||
|
||||
def mainResources = descendents(mainResourcesPath ##, "*")
|
||||
def testResources = descendents(testResourcesPath ##, "*")
|
||||
|
||||
def mainClasses = (mainCompilePath ##) ** "*.class"
|
||||
def testClasses = (testCompilePath ##) ** "*.class"
|
||||
|
||||
def packagePaths = mainClasses +++ mainResources
|
||||
def packageTestPaths = testClasses +++ testResources
|
||||
def packageSourcePaths = mainSources +++ mainResources
|
||||
def packageTestSourcePaths = testSources +++ testResources
|
||||
def packageProjectPaths = descendents( (info.projectPath ##), "*") --- (packageProjectExcludes ** "*")
|
||||
protected def packageProjectExcludes: PathFinder =
|
||||
outputRootPath +++ managedDependencyRootPath +++
|
||||
info.bootPath +++ info.builderProjectOutputPath +++
|
||||
info.pluginsOutputPath +++ info.pluginsManagedSourcePath +++ info.pluginsManagedDependencyPath
|
||||
|
||||
override def outputDirectories = outputPath :: managedDependencyPath :: Nil
|
||||
}
|
||||
|
||||
@deprecated trait BasicProjectPaths extends MavenStyleScalaPaths
|
||||
trait MavenStyleScalaPaths extends BasicScalaPaths with BasicPackagePaths
|
||||
{
|
||||
import BasicProjectPaths._
|
||||
|
||||
def outputPath: Path
|
||||
|
||||
def sourceDirectoryName = DefaultSourceDirectoryName
|
||||
def mainDirectoryName = DefaultMainDirectoryName
|
||||
def scalaDirectoryName = DefaultScalaDirectoryName
|
||||
def javaDirectoryName = DefaultJavaDirectoryName
|
||||
def resourcesDirectoryName = DefaultResourcesDirectoryName
|
||||
def testDirectoryName = DefaultTestDirectoryName
|
||||
def mainCompileDirectoryName = DefaultMainCompileDirectoryName
|
||||
def testCompileDirectoryName = DefaultTestCompileDirectoryName
|
||||
def docDirectoryName = DefaultDocDirectoryName
|
||||
def apiDirectoryName = DefaultAPIDirectoryName
|
||||
def graphDirectoryName = DefaultGraphDirectoryName
|
||||
def mainAnalysisDirectoryName = DefaultMainAnalysisDirectoryName
|
||||
def testAnalysisDirectoryName = DefaultTestAnalysisDirectoryName
|
||||
def mainResourcesOutputDirectoryName = DefautMainResourcesOutputDirectoryName
|
||||
def testResourcesOutputDirectoryName = DefautTestResourcesOutputDirectoryName
|
||||
|
||||
def sourcePath = path(sourceDirectoryName)
|
||||
|
||||
def mainSourcePath = sourcePath / mainDirectoryName
|
||||
def mainScalaSourcePath = mainSourcePath / scalaDirectoryName
|
||||
def mainJavaSourcePath = mainSourcePath / javaDirectoryName
|
||||
def mainResourcesPath = mainSourcePath / resourcesDirectoryName
|
||||
def mainDocPath = docPath / mainDirectoryName / apiDirectoryName
|
||||
def mainCompilePath = outputPath / mainCompileDirectoryName
|
||||
def mainResourcesOutputPath = outputPath / mainResourcesOutputDirectoryName
|
||||
def mainAnalysisPath = outputPath / mainAnalysisDirectoryName
|
||||
|
||||
def testSourcePath = sourcePath / testDirectoryName
|
||||
def testJavaSourcePath = testSourcePath / javaDirectoryName
|
||||
def testScalaSourcePath = testSourcePath / scalaDirectoryName
|
||||
def testResourcesPath = testSourcePath / resourcesDirectoryName
|
||||
def testDocPath = docPath / testDirectoryName / apiDirectoryName
|
||||
def testCompilePath = outputPath / testCompileDirectoryName
|
||||
def testResourcesOutputPath = outputPath / testResourcesOutputDirectoryName
|
||||
def testAnalysisPath = outputPath / testAnalysisDirectoryName
|
||||
|
||||
def docPath = outputPath / docDirectoryName
|
||||
def graphPath = outputPath / graphDirectoryName
|
||||
def graphPackagesPath = graphPath / "packages"
|
||||
def graphSourcesPath = graphPath / "sources"
|
||||
|
||||
/** These are the directories that are created when a user makes a new project from sbt.*/
|
||||
protected def directoriesToCreate: List[Path] =
|
||||
dependencyPath ::
|
||||
mainScalaSourcePath ::
|
||||
mainResourcesPath ::
|
||||
testScalaSourcePath ::
|
||||
testResourcesPath ::
|
||||
Nil
|
||||
|
||||
def mainSourceRoots = (mainJavaSourcePath##) +++ (mainScalaSourcePath##)
|
||||
def testSourceRoots = (testJavaSourcePath##) +++ (testScalaSourcePath##)
|
||||
}
|
||||
|
||||
trait BasicPackagePaths extends ScalaPaths with PackagePaths
|
||||
{
|
||||
def outputPath: Path
|
||||
|
||||
def defaultJarBaseName: String = artifactBaseName
|
||||
def defaultJarName = defaultJarBaseName + ".jar"
|
||||
def jarPath = outputPath / defaultJarName
|
||||
def packageTestJar = defaultJarPath("-test.jar")
|
||||
def packageDocsJar = defaultJarPath("-docs.jar")
|
||||
def packageSrcJar= defaultJarPath("-src.jar")
|
||||
def packageTestSrcJar = defaultJarPath("-test-src.jar")
|
||||
def packageProjectZip = defaultJarPath("-project.zip")
|
||||
def defaultJarPath(extension: String) = outputPath / (artifactBaseName + extension)
|
||||
}
|
||||
|
||||
object BasicProjectPaths
|
||||
{
|
||||
val DefaultSourceDirectoryName = "src"
|
||||
val DefaultMainCompileDirectoryName = "classes"
|
||||
val DefaultTestCompileDirectoryName = "test-classes"
|
||||
val DefaultDocDirectoryName = "doc"
|
||||
val DefaultAPIDirectoryName = "api"
|
||||
val DefaultGraphDirectoryName = "graph"
|
||||
val DefaultMainAnalysisDirectoryName = "analysis"
|
||||
val DefaultTestAnalysisDirectoryName = "test-analysis"
|
||||
val DefautMainResourcesOutputDirectoryName = "resources"
|
||||
val DefautTestResourcesOutputDirectoryName = "test-resources"
|
||||
|
||||
val DefaultMainDirectoryName = "main"
|
||||
val DefaultScalaDirectoryName = "scala"
|
||||
val DefaultJavaDirectoryName = "java"
|
||||
val DefaultResourcesDirectoryName = "resources"
|
||||
val DefaultTestDirectoryName = "test"
|
||||
|
||||
// forwarders to new locations
|
||||
def BootDirectoryName = Project.BootDirectoryName
|
||||
def DefaultManagedDirectoryName = BasicDependencyPaths.DefaultManagedDirectoryName
|
||||
def DefaultDependencyDirectoryName = BasicDependencyPaths.DefaultDependencyDirectoryName
|
||||
}
|
||||
|
||||
trait WebScalaPaths extends ScalaPaths
|
||||
{
|
||||
def temporaryWarPath: Path
|
||||
def webappResources: PathFinder
|
||||
def jettyContextPath: String
|
||||
def warPath: Path
|
||||
}
|
||||
@deprecated trait WebProjectPaths extends MavenStyleWebScalaPaths
|
||||
trait MavenStyleWebScalaPaths extends WebScalaPaths with MavenStyleScalaPaths
|
||||
{
|
||||
import WebProjectPaths._
|
||||
def temporaryWarPath = outputPath / webappDirectoryName
|
||||
def webappPath = mainSourcePath / webappDirectoryName
|
||||
def webappDirectoryName = DefaultWebappDirectoryName
|
||||
def jettyContextPath = DefaultJettyContextPath
|
||||
def defaultWarName = defaultJarBaseName + ".war"
|
||||
def warPath = outputPath / defaultWarName
|
||||
/** Additional files to include in the web application. */
|
||||
protected def extraWebappFiles: PathFinder = Path.emptyPathFinder
|
||||
def webappResources = descendents(webappPath ##, "*") +++ extraWebappFiles
|
||||
}
|
||||
object WebProjectPaths
|
||||
{
|
||||
val DefaultWebappDirectoryName = "webapp"
|
||||
val DefaultJettyContextPath = "/"
|
||||
}
|
||||
|
||||
/** Defines default paths for a webstart project. It directly extends WebstartOptions to make
|
||||
* it easy to implement and override webstart options in the common case of one webstartTask per
|
||||
* project.*/
|
||||
trait WebstartPaths extends ScalaPaths
|
||||
{
|
||||
import WebstartPaths._
|
||||
|
||||
def outputPath: Path
|
||||
def jnlpPath: Path
|
||||
|
||||
def webstartOutputDirectory = outputPath / webstartDirectoryName
|
||||
|
||||
def jnlpFile = webstartOutputDirectory / jnlpFileName
|
||||
def webstartLibDirectory = webstartOutputDirectory / webstartLibName
|
||||
def webstartZip: Option[Path] = Some(outputPath / webstartZipName)
|
||||
def jnlpResourcesPath = jnlpPath / BasicProjectPaths.DefaultResourcesDirectoryName
|
||||
|
||||
def webstartLibName = DefaultWebstartLibName
|
||||
def webstartDirectoryName = DefaultWebstartDirectoryName
|
||||
|
||||
def webstartZipName: String
|
||||
def jnlpFileName: String
|
||||
}
|
||||
object WebstartPaths
|
||||
{
|
||||
val DefaultWebstartDirectoryName = "webstart"
|
||||
val DefaultJnlpName = "jnlp"
|
||||
val DefaultWebstartLibName = "lib"
|
||||
}
|
||||
trait MavenStyleWebstartPaths extends WebstartPaths with MavenStyleScalaPaths
|
||||
{
|
||||
import WebstartPaths._
|
||||
def jnlpPath = mainSourcePath / DefaultJnlpName
|
||||
def webstartMainJar = jarPath
|
||||
def jnlpFileName = DefaultJnlpFileName
|
||||
def webstartZipName = artifactBaseName + ".zip"
|
||||
def DefaultJnlpFileName = artifactBaseName + ".jnlp"
|
||||
}
|
||||
|
||||
trait IntegrationTestPaths extends NotNull
|
||||
{
|
||||
def integrationTestSources: PathFinder
|
||||
def integrationTestScalaSourceRoots: PathFinder
|
||||
def integrationTestResourcesPath: Path
|
||||
|
||||
def integrationTestCompilePath: Path
|
||||
def integrationTestAnalysisPath: Path
|
||||
}
|
||||
trait BasicIntegrationTestPaths extends IntegrationTestPaths
|
||||
{
|
||||
def integrationTestScalaSourcePath: Path
|
||||
def integrationTestScalaSourceRoots: PathFinder = integrationTestScalaSourcePath
|
||||
def integrationTestSources = sources(integrationTestScalaSourceRoots)
|
||||
protected def sources(base: PathFinder): PathFinder
|
||||
}
|
||||
trait MavenStyleIntegrationTestPaths extends BasicIntegrationTestPaths with MavenStyleScalaPaths
|
||||
{
|
||||
import IntegrationTestPaths._
|
||||
|
||||
def integrationTestDirectoryName = DefaultIntegrationTestDirectoryName
|
||||
def integrationTestCompileDirectoryName = DefaultIntegrationTestCompileDirectoryName
|
||||
def integrationTestAnalysisDirectoryName = DefaultIntegrationTestAnalysisDirectoryName
|
||||
|
||||
def integrationTestSourcePath = sourcePath / integrationTestDirectoryName
|
||||
def integrationTestScalaSourcePath = integrationTestSourcePath / scalaDirectoryName
|
||||
def integrationTestResourcesPath = integrationTestSourcePath / resourcesDirectoryName
|
||||
|
||||
def integrationTestCompilePath = outputPath / integrationTestCompileDirectoryName
|
||||
def integrationTestAnalysisPath = outputPath / integrationTestAnalysisDirectoryName
|
||||
}
|
||||
|
||||
object IntegrationTestPaths
|
||||
{
|
||||
val DefaultIntegrationTestDirectoryName = "it"
|
||||
val DefaultIntegrationTestCompileDirectoryName = "it-classes"
|
||||
val DefaultIntegrationTestAnalysisDirectoryName = "it-analysis"
|
||||
}
|
||||
|
|
@ -0,0 +1,57 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2008 David MacIver, Mark Harrah
|
||||
*/
|
||||
package sbt;
|
||||
|
||||
import scala.collection._
|
||||
|
||||
object ReflectUtilities
|
||||
{
|
||||
def transformCamelCase(name: String, separator: Char) =
|
||||
{
|
||||
val buffer = new StringBuilder
|
||||
for(char <- name)
|
||||
{
|
||||
import java.lang.Character._
|
||||
if(isUpperCase(char))
|
||||
{
|
||||
buffer += separator
|
||||
buffer += toLowerCase(char)
|
||||
}
|
||||
else
|
||||
buffer += char
|
||||
}
|
||||
buffer.toString
|
||||
}
|
||||
|
||||
def ancestry(clazz : Class[_]) : List[Class[_]] =
|
||||
if (clazz == classOf[AnyRef] || !classOf[AnyRef].isAssignableFrom(clazz)) List(clazz)
|
||||
else clazz :: ancestry(clazz.getSuperclass);
|
||||
|
||||
def fields(clazz : Class[_]) =
|
||||
mutable.OpenHashMap(ancestry(clazz).
|
||||
flatMap(_.getDeclaredFields).
|
||||
map(f => (f.getName, f)):_*)
|
||||
|
||||
def allValsC[T](self: AnyRef, clazz: Class[T]): Map[String, T] =
|
||||
{
|
||||
val mappings = new mutable.OpenHashMap[String, T]
|
||||
val correspondingFields = fields(self.getClass)
|
||||
for(method <- self.getClass.getMethods)
|
||||
{
|
||||
if(method.getParameterTypes.length == 0 && clazz.isAssignableFrom(method.getReturnType))
|
||||
{
|
||||
for(field <- correspondingFields.get(method.getName) if field.getType == method.getReturnType)
|
||||
{
|
||||
val value = method.invoke(self).asInstanceOf[T]
|
||||
if(value == null) throw new UninitializedVal(method.getName, method.getDeclaringClass.getName)
|
||||
mappings(method.getName) = value
|
||||
}
|
||||
}
|
||||
}
|
||||
mappings
|
||||
}
|
||||
def allVals[T](self: AnyRef)(implicit mt: scala.reflect.Manifest[T]): Map[String, T] =
|
||||
allValsC(self, mt.erasure).asInstanceOf[Map[String,T]]
|
||||
}
|
||||
final class UninitializedVal(val valName: String, val className: String) extends RuntimeException("val " + valName + " in class " + className + " was null.\nThis is probably an initialization problem and a 'lazy val' should be used.")
|
||||
|
|
@ -0,0 +1,64 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2008, 2009 Mark Harrah
|
||||
*/
|
||||
package sbt
|
||||
|
||||
import java.io.File
|
||||
import xsbti.AppProvider
|
||||
import FileUtilities._
|
||||
|
||||
object Resources
|
||||
{
|
||||
def apply(basePath: String) =
|
||||
{
|
||||
require(basePath.startsWith("/"))
|
||||
val resource = getClass.getResource(basePath)
|
||||
if(resource == null)
|
||||
error("Resource base directory '" + basePath + "' not on classpath.")
|
||||
else
|
||||
{
|
||||
val file = toFile(resource)
|
||||
if(file.exists)
|
||||
new Resources(file)
|
||||
else
|
||||
error("Resource base directory '" + basePath + "' does not exist.")
|
||||
}
|
||||
}
|
||||
private val LoadErrorPrefix = "Error loading initial project: "
|
||||
}
|
||||
|
||||
class Resources(val baseDirectory: File)
|
||||
{
|
||||
import Resources._
|
||||
// The returned directory is not actually read-only, but it should be treated that way
|
||||
def readOnlyResourceDirectory(group: String, name: String): Either[String, File] =
|
||||
{
|
||||
val groupDirectory = new File(baseDirectory, group)
|
||||
if(groupDirectory.isDirectory)
|
||||
{
|
||||
val resourceDirectory = new File(groupDirectory, name)
|
||||
if(resourceDirectory.isDirectory)
|
||||
Right(resourceDirectory)
|
||||
else
|
||||
Left("Resource directory '" + name + "' in group '" + group + "' not found.")
|
||||
}
|
||||
else
|
||||
Left("Group '" + group + "' not found.")
|
||||
}
|
||||
def readWriteResourceDirectory[T](group: String, name: String, log: Logger)
|
||||
(withDirectory: File => Either[String, T]): Either[String, T] =
|
||||
readOnlyResourceDirectory(group, name).right flatMap(file => readWriteResourceDirectory(file, log)(withDirectory))
|
||||
def readWriteResourceDirectory[T](readOnly: File, log: Logger)
|
||||
(withDirectory: File => Either[String, T]): Either[String, T] =
|
||||
{
|
||||
require(readOnly.isDirectory)
|
||||
def readWrite(readOnly: File)(temporary: File): Either[String, T] =
|
||||
{
|
||||
val readWriteDirectory = new File(temporary, readOnly.getName)
|
||||
FileUtilities.copyDirectory(readOnly, readWriteDirectory, log).toLeft(()).right flatMap { x =>
|
||||
withDirectory(readWriteDirectory)
|
||||
}
|
||||
}
|
||||
doInTemporaryDirectory(log)(readWrite(readOnly))
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,162 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2008, 2009 Mark Harrah, Vesa Vilhonen
|
||||
*/
|
||||
package sbt
|
||||
|
||||
import scala.tools.nsc.{GenericRunnerCommand, Interpreter, InterpreterLoop, ObjectRunner, Settings}
|
||||
import scala.tools.nsc.interpreter.InteractiveReader
|
||||
import scala.tools.nsc.reporters.Reporter
|
||||
import scala.tools.nsc.util.ClassPath
|
||||
|
||||
import java.io.File
|
||||
import java.net.{URL, URLClassLoader}
|
||||
import java.lang.reflect.Modifier.{isPublic, isStatic}
|
||||
|
||||
trait ScalaRun
|
||||
{
|
||||
def run(mainClass: String, classpath: Iterable[Path], options: Seq[String], log: Logger): Option[String]
|
||||
}
|
||||
class ForkRun(config: ForkScalaRun) extends ScalaRun
|
||||
{
|
||||
def run(mainClass: String, classpath: Iterable[Path], options: Seq[String], log: Logger): Option[String] =
|
||||
{
|
||||
val scalaOptions = classpathOption(classpath) ::: mainClass :: options.toList
|
||||
val exitCode = config.outputStrategy match {
|
||||
case Some(strategy) => Fork.scala(config.javaHome, config.runJVMOptions, config.scalaJars, scalaOptions, config.workingDirectory, strategy)
|
||||
case None => Fork.scala(config.javaHome, config.runJVMOptions, config.scalaJars, scalaOptions, config.workingDirectory, LoggedOutput(log))
|
||||
}
|
||||
processExitCode(exitCode, "runner")
|
||||
}
|
||||
private def classpathOption(classpath: Iterable[Path]) = "-cp" :: Path.makeString(classpath) :: Nil
|
||||
private def processExitCode(exitCode: Int, label: String) =
|
||||
{
|
||||
if(exitCode == 0)
|
||||
None
|
||||
else
|
||||
Some("Nonzero exit code returned from " + label + ": " + exitCode)
|
||||
}
|
||||
}
|
||||
class Run(instance: xsbt.ScalaInstance) extends ScalaRun
|
||||
{
|
||||
/** Runs the class 'mainClass' using the given classpath and options using the scala runner.*/
|
||||
def run(mainClass: String, classpath: Iterable[Path], options: Seq[String], log: Logger) =
|
||||
{
|
||||
log.info("Running " + mainClass + " " + options.mkString(" "))
|
||||
|
||||
def execute =
|
||||
try { run0(mainClass, classpath, options, log) }
|
||||
catch { case e: java.lang.reflect.InvocationTargetException => throw e.getCause }
|
||||
|
||||
Run.executeTrapExit( execute, log )
|
||||
}
|
||||
private def run0(mainClassName: String, classpath: Iterable[Path], options: Seq[String], log: Logger)
|
||||
{
|
||||
val loader = getLoader(classpath, log)
|
||||
val main = getMainMethod(mainClassName, loader)
|
||||
|
||||
val currentThread = Thread.currentThread
|
||||
val oldLoader = Thread.currentThread.getContextClassLoader()
|
||||
currentThread.setContextClassLoader(loader)
|
||||
try { main.invoke(null, options.toArray[String].asInstanceOf[Array[String]] ) }
|
||||
finally { currentThread.setContextClassLoader(oldLoader) }
|
||||
}
|
||||
def getMainMethod(mainClassName: String, loader: ClassLoader) =
|
||||
{
|
||||
val mainClass = Class.forName(mainClassName, true, loader)
|
||||
val method = mainClass.getMethod("main", classOf[Array[String]])
|
||||
val modifiers = method.getModifiers
|
||||
if(!isPublic(modifiers)) throw new NoSuchMethodException(mainClassName + ".main is not public")
|
||||
if(!isStatic(modifiers)) throw new NoSuchMethodException(mainClassName + ".main is not static")
|
||||
method
|
||||
}
|
||||
def getLoader(classpath: Iterable[Path], log: Logger) =
|
||||
{
|
||||
val classpathURLs = classpath.toSeq.map(_.asURL).toArray
|
||||
log.debug(" Classpath:\n\t" + classpathURLs.mkString("\n\t"))
|
||||
new URLClassLoader( classpathURLs, instance.loader)
|
||||
}
|
||||
}
|
||||
|
||||
/** This module is an interface to starting the scala interpreter or runner.*/
|
||||
object Run
|
||||
{
|
||||
def run(mainClass: String, classpath: Iterable[Path], options: Seq[String], log: Logger)(implicit runner: ScalaRun) =
|
||||
runner.run(mainClass, classpath, options, log)
|
||||
/** Executes the given function, trapping calls to System.exit. */
|
||||
private[sbt] def executeTrapExit(f: => Unit, log: Logger): Option[String] =
|
||||
{
|
||||
val exitCode = TrapExit(f, log)
|
||||
if(exitCode == 0)
|
||||
{
|
||||
log.debug("Exited with code 0")
|
||||
None
|
||||
}
|
||||
else
|
||||
Some("Nonzero exit code: " + exitCode)
|
||||
}
|
||||
/** Create a settings object and execute the provided function if the settings are created ok.*/
|
||||
private def createSettings(log: Logger)(f: Settings => Option[String]) =
|
||||
{
|
||||
val command = new GenericRunnerCommand(Nil, message => log.error(message))
|
||||
if(command.ok)
|
||||
f(command.settings)
|
||||
else
|
||||
Some(command.usageMsg)
|
||||
}
|
||||
|
||||
/** Starts a Scala interpreter session with 'project' bound to the value 'current' in the console
|
||||
* and the following two lines executed:
|
||||
* import sbt._
|
||||
* import current._
|
||||
*/
|
||||
def projectConsole(project: Project): Option[String] =
|
||||
{
|
||||
import project.log
|
||||
createSettings(log) { interpreterSettings =>
|
||||
createSettings(log) { compilerSettings =>
|
||||
log.info("Starting scala interpreter with project definition " + project.name + " ...")
|
||||
log.info("")
|
||||
Control.trapUnit("Error during session: ", log)
|
||||
{
|
||||
JLine.withJLine {
|
||||
val loop = new ProjectInterpreterLoop(compilerSettings, project)
|
||||
executeTrapExit(loop.main(interpreterSettings), log)
|
||||
}
|
||||
}
|
||||
}}
|
||||
}
|
||||
/** A custom InterpreterLoop with the purpose of creating an interpreter with Project 'project' bound to the value 'current',
|
||||
* and the following three lines interpreted:
|
||||
* import sbt._
|
||||
* import Process._
|
||||
* import current._.
|
||||
* To do this,
|
||||
* 1) The compiler uses a different settings instance: 'compilerSettings', which will have its classpath set to include
|
||||
* the Scala compiler and library jars and the classpath used to compile the project.
|
||||
* 2) The parent class loader for the interpreter is the loader that loaded the project, so that the project can be bound to a variable
|
||||
* in the interpreter.
|
||||
*/
|
||||
private class ProjectInterpreterLoop(compilerSettings: Settings, project: Project) extends InterpreterLoop
|
||||
{
|
||||
override def createInterpreter()
|
||||
{
|
||||
val projectLoader = project.getClass.getClassLoader
|
||||
val classpath = Project.getProjectClasspath(project)
|
||||
val fullClasspath = classpath.get ++ Path.fromFiles(project.info.app.scalaProvider.jars)
|
||||
compilerSettings.classpath.value = Path.makeString(fullClasspath)
|
||||
project.log.debug(" console-project classpath:\n\t" + fullClasspath.mkString("\n\t"))
|
||||
|
||||
in = InteractiveReader.createDefault()
|
||||
interpreter = new Interpreter(settings)
|
||||
{
|
||||
override protected def parentClassLoader = projectLoader
|
||||
override protected def newCompiler(settings: Settings, reporter: Reporter) = super.newCompiler(compilerSettings, reporter)
|
||||
}
|
||||
interpreter.setContextClassLoader()
|
||||
interpreter.bind("current", project.getClass.getName, project)
|
||||
interpreter.interpret("import sbt._")
|
||||
interpreter.interpret("import Process._")
|
||||
interpreter.interpret("import current._")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,443 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2008, 2009 Mark Harrah, David MacIver, Josh Cough
|
||||
*/
|
||||
package sbt
|
||||
|
||||
import FileUtilities._
|
||||
import java.io.File
|
||||
import java.util.jar.{Attributes, Manifest}
|
||||
import scala.collection.mutable.ListBuffer
|
||||
|
||||
trait Cleanable extends Project
|
||||
{
|
||||
trait CleanOption extends ActionOption
|
||||
case class ClearAnalysis(analysis: TaskAnalysis[_, _, _]) extends CleanOption
|
||||
case class Preserve(paths: PathFinder) extends CleanOption
|
||||
|
||||
def cleanTask(paths: PathFinder, options: CleanOption*): Task =
|
||||
cleanTask(paths, options)
|
||||
def cleanTask(paths: PathFinder, options: => Seq[CleanOption]): Task =
|
||||
task
|
||||
{
|
||||
val cleanOptions = options
|
||||
val preservePaths = for(Preserve(preservePaths) <- cleanOptions; toPreserve <- preservePaths.get) yield toPreserve
|
||||
Control.thread(FileUtilities.preserve(preservePaths, log))
|
||||
{ preserved =>
|
||||
val pathClean = FileUtilities.clean(paths.get, log)
|
||||
for(ClearAnalysis(analysis) <- cleanOptions)
|
||||
{
|
||||
analysis.clear()
|
||||
analysis.save()
|
||||
}
|
||||
val restored = preserved.restore(log)
|
||||
pathClean orElse restored
|
||||
}
|
||||
}
|
||||
|
||||
lazy val cleanPlugins = cleanTask(info.pluginsOutputPath +++ info.pluginsManagedSourcePath +++ info.pluginsManagedDependencyPath)
|
||||
}
|
||||
trait SimpleScalaProject extends ExecProject with Cleanable
|
||||
{
|
||||
def errorTask(message: String) = task{ Some(message) }
|
||||
|
||||
case class CompileOption(val asString: String) extends ActionOption
|
||||
case class JavaCompileOption(val asString: String) extends ActionOption
|
||||
|
||||
val Deprecation = CompileOption(CompileOptions.Deprecation)
|
||||
val ExplainTypes = CompileOption("-explaintypes")
|
||||
val Optimize = CompileOption("-optimise")
|
||||
def Optimise = Optimize
|
||||
val Verbose = CompileOption(CompileOptions.Verbose)
|
||||
val Unchecked = CompileOption(CompileOptions.Unchecked)
|
||||
val DisableWarnings = CompileOption("-nowarn")
|
||||
def target(target: Target.Value) = CompileOption("-target:" + target)
|
||||
object Target extends Enumeration
|
||||
{
|
||||
val Java1_5 = Value("jvm-1.5")
|
||||
val Java1_4 = Value("jvm-1.4")
|
||||
val Msil = Value("msil")
|
||||
}
|
||||
}
|
||||
trait ScalaProject extends SimpleScalaProject with FileTasks with MultiTaskProject with Exec
|
||||
{
|
||||
import ScalaProject._
|
||||
|
||||
final case class MaxCompileErrors(val value: Int) extends CompileOption("") with ScaladocOption { def asList = Nil }
|
||||
trait PackageOption extends ActionOption
|
||||
trait TestOption extends ActionOption
|
||||
|
||||
case class TestSetup(setup: () => Option[String]) extends TestOption
|
||||
case class TestCleanup(cleanup: () => Option[String]) extends TestOption
|
||||
case class ExcludeTests(tests: Iterable[String]) extends TestOption
|
||||
case class TestListeners(listeners: Iterable[TestReportListener]) extends TestOption
|
||||
case class TestFilter(filterTest: String => Boolean) extends TestOption
|
||||
|
||||
// args for all frameworks
|
||||
def TestArgument(args: String*): TestArgument = TestArgument(None, args.toList)
|
||||
// args for a particular test framework
|
||||
def TestArgument(tf: TestFramework, args: String*): TestArgument = TestArgument(Some(tf), args.toList)
|
||||
|
||||
// None means apply to all, Some(tf) means apply to a particular framework only.
|
||||
case class TestArgument(framework: Option[TestFramework], args: List[String]) extends TestOption
|
||||
|
||||
case class JarManifest(m: Manifest) extends PackageOption
|
||||
{
|
||||
assert(m != null)
|
||||
}
|
||||
case class MainClass(mainClassName: String) extends PackageOption
|
||||
case class ManifestAttributes(attributes: (Attributes.Name, String)*) extends PackageOption
|
||||
case object Recursive extends PackageOption
|
||||
def ManifestAttributes(attributes: (String, String)*): ManifestAttributes =
|
||||
{
|
||||
val converted = for( (name,value) <- attributes ) yield (new Attributes.Name(name), value)
|
||||
new ManifestAttributes(converted : _*)
|
||||
}
|
||||
|
||||
|
||||
trait ScaladocOption extends ActionOption
|
||||
{
|
||||
def asList: List[String]
|
||||
}
|
||||
case class SimpleDocOption(optionValue: String) extends ScaladocOption
|
||||
{
|
||||
def asList = List(optionValue)
|
||||
}
|
||||
case class CompoundDocOption(label: String, value: String) extends ScaladocOption
|
||||
{
|
||||
def asList = List(label, value)
|
||||
}
|
||||
val LinkSource = SimpleDocOption("-linksource")
|
||||
val NoComment = SimpleDocOption("-nocomment")
|
||||
def access(access: Access.Value) = SimpleDocOption("-access:" + access)
|
||||
def documentBottom(bottomText: String) = CompoundDocOption("-bottom", bottomText)
|
||||
def documentCharset(charset: String) = CompoundDocOption("-charset", charset)
|
||||
def documentTitle(title: String) = CompoundDocOption(if(isScala27) "-doctitle" else "-doc-title", title)
|
||||
def documentFooter(footerText: String) = CompoundDocOption("-footer", footerText)
|
||||
def documentHeader(headerText: String) = CompoundDocOption("-header", headerText)
|
||||
def stylesheetFile(path: Path) = CompoundDocOption("-stylesheetfile", path.asFile.getAbsolutePath)
|
||||
def documentTop(topText: String) = CompoundDocOption("-top", topText)
|
||||
def windowTitle(title: String) = CompoundDocOption("-windowtitle", title)
|
||||
|
||||
object Access extends Enumeration
|
||||
{
|
||||
val Public = Value("public")
|
||||
val Default = Value("protected")
|
||||
val Private = Value("private")
|
||||
}
|
||||
|
||||
def javapTask(classpath: PathFinder, conditional: => CompileConditional, compilePath: Path) =
|
||||
task { args =>
|
||||
val cp = classpath +++ Path.fromFile(FileUtilities.scalaLibraryJar) +++ Path.fromFile(FileUtilities.scalaCompilerJar)
|
||||
execOut { Process("javap" :: "-classpath" :: Path.makeString(cp.get) :: args.toList) }
|
||||
} completeWith(classNames(conditional, compilePath))
|
||||
private def classNames(conditional: CompileConditional, compilePath: Path) =
|
||||
{
|
||||
val classes = conditional.analysis.allProducts.flatMap(path => Path.relativize(compilePath.asFile, path.asFile))
|
||||
classes.map(_.replace(java.io.File.separatorChar, '.').toList.dropRight(".class".length).mkString).toSeq
|
||||
}
|
||||
|
||||
def consoleTask(classpath: PathFinder): Task = consoleTask(classpath, "")
|
||||
def consoleTask(classpath: PathFinder, initialCommands: => String): Task =
|
||||
interactiveTask {
|
||||
(new Console(buildCompiler))(classpath.get, initialCommands, log)
|
||||
}
|
||||
|
||||
def runTask(mainClass: => Option[String], classpath: PathFinder, options: String*)(implicit runner: ScalaRun): Task =
|
||||
runTask(mainClass, classpath, options)
|
||||
def runTask(mainClass: => Option[String], classpath: PathFinder, options: => Seq[String])(implicit runner: ScalaRun): Task =
|
||||
task
|
||||
{
|
||||
mainClass match
|
||||
{
|
||||
case Some(main) => runner.run(main, classpath.get, options, log)
|
||||
case None => Some("No main class specified.")
|
||||
}
|
||||
}
|
||||
|
||||
def syncPathsTask(sources: PathFinder, destinationDirectory: Path): Task =
|
||||
task { FileUtilities.syncPaths(sources, destinationDirectory, log) }
|
||||
def syncTask(sourceDirectory: Path, destinationDirectory: Path): Task =
|
||||
task { FileUtilities.sync(sourceDirectory, destinationDirectory, log) }
|
||||
def copyTask(sources: PathFinder, destinationDirectory: Path): Task =
|
||||
task { FileUtilities.copy(sources.get, destinationDirectory, log).left.toOption }
|
||||
|
||||
def testTask(frameworks: Seq[TestFramework], classpath: PathFinder, analysis: CompileAnalysis, options: TestOption*): Task =
|
||||
testTask(frameworks, classpath, analysis, options)
|
||||
def testTask(frameworks: Seq[TestFramework], classpath: PathFinder, analysis: CompileAnalysis, options: => Seq[TestOption]): Task =
|
||||
{
|
||||
def work =
|
||||
{
|
||||
val (begin, work, end) = testTasks(frameworks, classpath, analysis, options)
|
||||
val beginTasks = begin.map(toTask).toSeq // test setup tasks
|
||||
val workTasks = work.map(w => toTask(w) dependsOn(beginTasks : _*)) // the actual tests
|
||||
val endTasks = end.map(toTask).toSeq // tasks that perform test cleanup and are run regardless of success of tests
|
||||
val endTask = task { None } named("test-cleanup") dependsOn(endTasks : _*)
|
||||
val rootTask = task { None } named("test-complete") dependsOn(workTasks.toSeq : _*) // the task that depends on all test subtasks
|
||||
SubWork[Project#Task](rootTask, endTask)
|
||||
}
|
||||
new CompoundTask(work)
|
||||
}
|
||||
private def toTask(testTask: NamedTestTask) = task(testTask.run()) named(testTask.name)
|
||||
|
||||
def graphSourcesTask(outputDirectory: Path, roots: PathFinder, analysis: => CompileAnalysis): Task =
|
||||
task { DotGraph.sources(analysis, outputDirectory, roots.get, log) }
|
||||
def graphPackagesTask(outputDirectory: Path, roots: PathFinder, analysis: => CompileAnalysis): Task =
|
||||
task { DotGraph.packages(analysis, outputDirectory, roots.get, log) }
|
||||
def scaladocTask(label: String, sources: PathFinder, outputDirectory: Path, classpath: PathFinder, options: ScaladocOption*): Task =
|
||||
scaladocTask(label, sources, outputDirectory, classpath, options)
|
||||
def scaladocTask(label: String, sources: PathFinder, outputDirectory: Path, classpath: PathFinder, options: => Seq[ScaladocOption]): Task =
|
||||
task
|
||||
{
|
||||
val optionsLocal = options
|
||||
val maxErrors = maximumErrors(optionsLocal)
|
||||
(new Scaladoc(maxErrors, buildCompiler))(label, sources.get, classpath.get, outputDirectory, optionsLocal.flatMap(_.asList), log)
|
||||
}
|
||||
|
||||
def packageTask(sources: PathFinder, outputDirectory: Path, jarName: => String, options: PackageOption*): Task =
|
||||
packageTask(sources, outputDirectory / jarName, options)
|
||||
def packageTask(sources: PathFinder, outputDirectory: Path, jarName: => String, options: => Seq[PackageOption]): Task =
|
||||
packageTask(sources: PathFinder, outputDirectory / jarName, options)
|
||||
def packageTask(sources: PathFinder, jarPath: => Path, options: PackageOption*): Task =
|
||||
packageTask(sources, jarPath, options)
|
||||
def packageTask(sources: PathFinder, jarPath: => Path, options: => Seq[PackageOption]): Task =
|
||||
fileTask("package", jarPath from sources)
|
||||
{
|
||||
import wrap.{MutableMapWrapper,Wrappers}
|
||||
/** Copies the mappings in a2 to a1, mutating a1. */
|
||||
def mergeAttributes(a1: Attributes, a2: Attributes)
|
||||
{
|
||||
for( (key, value) <- Wrappers.toList(a2))
|
||||
a1.put(key, value)
|
||||
}
|
||||
|
||||
val manifest = new Manifest
|
||||
var recursive = false
|
||||
for(option <- options)
|
||||
{
|
||||
option match
|
||||
{
|
||||
case JarManifest(mergeManifest) =>
|
||||
{
|
||||
mergeAttributes(manifest.getMainAttributes, mergeManifest.getMainAttributes)
|
||||
val entryMap = new MutableMapWrapper(manifest.getEntries)
|
||||
for((key, value) <- Wrappers.toList(mergeManifest.getEntries))
|
||||
{
|
||||
entryMap.get(key) match
|
||||
{
|
||||
case Some(attributes) => mergeAttributes(attributes, value)
|
||||
case None => entryMap += (key, value)
|
||||
}
|
||||
}
|
||||
}
|
||||
case Recursive => recursive = true
|
||||
case MainClass(mainClassName) =>
|
||||
manifest.getMainAttributes.put(Attributes.Name.MAIN_CLASS, mainClassName)
|
||||
case ManifestAttributes(attributes @ _*) =>
|
||||
val main = manifest.getMainAttributes
|
||||
for( (name, value) <- attributes)
|
||||
main.put(name, value)
|
||||
case _ => log.warn("Ignored unknown package option " + option)
|
||||
}
|
||||
}
|
||||
val jarPathLocal = jarPath
|
||||
FileUtilities.clean(jarPathLocal :: Nil, log) orElse
|
||||
FileUtilities.jar(sources.get, jarPathLocal, manifest, recursive, log)
|
||||
}
|
||||
def zipTask(sources: PathFinder, outputDirectory: Path, zipName: => String): Task =
|
||||
zipTask(sources, outputDirectory / zipName)
|
||||
def zipTask(sources: PathFinder, zipPath: => Path): Task =
|
||||
fileTask("zip", zipPath from sources) { FileUtilities.zip(sources.get, zipPath, false, log) }
|
||||
def incrementVersionNumber()
|
||||
{
|
||||
projectVersion.get match
|
||||
{
|
||||
case Some(v: BasicVersion) =>
|
||||
{
|
||||
val newVersion = incrementImpl(v)
|
||||
log.info("Changing version to " + newVersion)
|
||||
projectVersion() = newVersion
|
||||
}
|
||||
case a => ()
|
||||
}
|
||||
}
|
||||
protected def incrementImpl(v: BasicVersion): Version = v.incrementMicro
|
||||
protected def testTasks(frameworks: Seq[TestFramework], classpath: PathFinder, analysis: CompileAnalysis, options: => Seq[TestOption]) =
|
||||
{
|
||||
import scala.collection.mutable.HashSet
|
||||
import scala.collection.mutable.Map
|
||||
|
||||
val testFilters = new ListBuffer[String => Boolean]
|
||||
val excludeTestsSet = new HashSet[String]
|
||||
val setup, cleanup = new ListBuffer[() => Option[String]]
|
||||
val testListeners = new ListBuffer[TestReportListener]
|
||||
val testArgsByFramework = Map[TestFramework, ListBuffer[String]]()
|
||||
def frameworkArgs(framework: TestFramework): ListBuffer[String] =
|
||||
testArgsByFramework.getOrElseUpdate(framework, new ListBuffer[String])
|
||||
|
||||
for(option <- options)
|
||||
{
|
||||
option match
|
||||
{
|
||||
case TestFilter(include) => testFilters += include
|
||||
case ExcludeTests(exclude) => excludeTestsSet ++= exclude
|
||||
case TestListeners(listeners) => testListeners ++= listeners
|
||||
case TestSetup(setupFunction) => setup += setupFunction
|
||||
case TestCleanup(cleanupFunction) => cleanup += cleanupFunction
|
||||
/**
|
||||
* There are two cases here.
|
||||
* The first handles TestArguments in the project file, which
|
||||
* might have a TestFramework specified.
|
||||
* The second handles arguments to be applied to all test frameworks.
|
||||
* -- arguments from the project file that didnt have a framework specified
|
||||
* -- command line arguments (ex: test-only someClass -- someArg)
|
||||
* (currently, command line args must be passed to all frameworks)
|
||||
*/
|
||||
case TestArgument(Some(framework), args) => frameworkArgs(framework) ++= args
|
||||
case TestArgument(None, args) => frameworks.foreach { framework => frameworkArgs(framework) ++= args.toList }
|
||||
}
|
||||
}
|
||||
|
||||
if(excludeTestsSet.size > 0 && log.atLevel(Level.Debug))
|
||||
{
|
||||
log.debug("Excluding tests: ")
|
||||
excludeTestsSet.foreach(test => log.debug("\t" + test))
|
||||
}
|
||||
def includeTest(test: TestDefinition) = !excludeTestsSet.contains(test.testClassName) && testFilters.forall(filter => filter(test.testClassName))
|
||||
val tests = HashSet.empty[TestDefinition] ++ analysis.allTests.filter(includeTest)
|
||||
TestFramework.testTasks(frameworks, classpath.get, buildScalaInstance.loader, tests.toSeq, log,
|
||||
testListeners.readOnly, false, setup.readOnly, cleanup.readOnly, testArgsByFramework)
|
||||
}
|
||||
private def flatten[T](i: Iterable[Iterable[T]]) = i.flatMap(x => x)
|
||||
|
||||
protected def testQuickMethod(testAnalysis: CompileAnalysis, options: => Seq[TestOption])(toRun: (Seq[TestOption]) => Task) = {
|
||||
val analysis = testAnalysis.allTests.map(_.testClassName).toList
|
||||
multiTask(analysis) { (args, includeFunction) =>
|
||||
toRun(TestArgument(args:_*) :: TestFilter(includeFunction) :: options.toList)
|
||||
}
|
||||
}
|
||||
|
||||
protected final def maximumErrors[T <: ActionOption](options: Seq[T]) =
|
||||
(for( MaxCompileErrors(maxErrors) <- options) yield maxErrors).firstOption.getOrElse(DefaultMaximumCompileErrors)
|
||||
}
|
||||
|
||||
trait WebScalaProject extends ScalaProject
|
||||
{
|
||||
protected def packageWarAction(stagedWarPath: Path, ignore: PathFinder, outputWarPath: => Path, options: => Seq[PackageOption]): Task =
|
||||
packageTask(descendents(stagedWarPath ##, "*") --- ignore, outputWarPath, options)
|
||||
|
||||
@deprecated protected def prepareWebappTask(webappContents: PathFinder, warPath: => Path, classpath: PathFinder, extraJars: => Iterable[File]): Task =
|
||||
prepareWebappTask(webappContents, warPath, classpath, Path.lazyPathFinder(extraJars.map(Path.fromFile)))
|
||||
protected def prepareWebappTask(webappContents: PathFinder, warPath: => Path, classpath: PathFinder, extraJars: PathFinder): Task =
|
||||
prepareWebappTask(webappContents, warPath, classpath, extraJars, Path.emptyPathFinder)
|
||||
protected def prepareWebappTask(webappContents: PathFinder, warPath: => Path, classpath: PathFinder, extraJars: PathFinder, ignore: PathFinder): Task =
|
||||
task
|
||||
{
|
||||
val webInfPath = warPath / "WEB-INF"
|
||||
val webLibDirectory = webInfPath / "lib"
|
||||
val classesTargetDirectory = webInfPath / "classes"
|
||||
|
||||
val (libs, directories) = classpath.get.toList.partition(ClasspathUtilities.isArchive)
|
||||
val classesAndResources = descendents(Path.lazyPathFinder(directories) ##, "*")
|
||||
if(log.atLevel(Level.Debug))
|
||||
directories.foreach(d => log.debug(" Copying the contents of directory " + d + " to " + classesTargetDirectory))
|
||||
|
||||
import FileUtilities.{copy, copyFlat, copyFilesFlat, clean}
|
||||
(copy(webappContents.get, warPath, log).right flatMap { copiedWebapp =>
|
||||
copy(classesAndResources.get, classesTargetDirectory, log).right flatMap { copiedClasses =>
|
||||
copyFlat(libs, webLibDirectory, log).right flatMap { copiedLibs =>
|
||||
copyFilesFlat(extraJars.get.map(_.asFile), webLibDirectory, log).right flatMap { copiedExtraLibs =>
|
||||
{
|
||||
val toRemove = scala.collection.mutable.HashSet(((warPath ** "*") --- ignore).get.toSeq : _*)
|
||||
toRemove --= copiedWebapp
|
||||
toRemove --= copiedClasses
|
||||
toRemove --= copiedLibs
|
||||
toRemove --= copiedExtraLibs
|
||||
val (directories, files) = toRemove.toList.partition(_.isDirectory)
|
||||
if(log.atLevel(Level.Debug))
|
||||
files.foreach(r => log.debug("Pruning file " + r))
|
||||
val result =
|
||||
clean(files, true, log) orElse
|
||||
{
|
||||
val emptyDirectories = directories.filter(directory => directory.asFile.listFiles.isEmpty)
|
||||
if(log.atLevel(Level.Debug))
|
||||
emptyDirectories.foreach(r => log.debug("Pruning directory " + r))
|
||||
clean(emptyDirectories, true, log)
|
||||
}
|
||||
result.toLeft(())
|
||||
}
|
||||
}}}}).left.toOption
|
||||
}
|
||||
def jettyRunTask(jettyRun: JettyRunner) = task { jettyRun() }
|
||||
def jettyStopTask(jettyRun: JettyRunner) = task { jettyRun.stop(); None }
|
||||
}
|
||||
object ScalaProject
|
||||
{
|
||||
val DefaultMaximumCompileErrors = 100
|
||||
val AnalysisDirectoryName = "analysis"
|
||||
val MainClassKey = "Main-Class"
|
||||
val TestResourcesProperty = "sbt.test.resources"
|
||||
def optionsAsString(options: Seq[ScalaProject#CompileOption]) = options.map(_.asString).filter(!_.isEmpty)
|
||||
def javaOptionsAsString(options: Seq[ScalaProject#JavaCompileOption]) = options.map(_.asString)
|
||||
}
|
||||
trait MultiTaskProject extends Project
|
||||
{
|
||||
def multiTask(allTests: => List[String])(run: (Seq[String], String => Boolean) => Task): MethodTask = {
|
||||
|
||||
task { tests =>
|
||||
|
||||
val (testNames, separatorAndArgs) = tests.toList.span(! _.startsWith("--"))
|
||||
val testArgs = separatorAndArgs.drop(1)
|
||||
|
||||
def filterInclude =
|
||||
{
|
||||
val (exactFilters, testFilters) = testNames.toList.map(GlobFilter.apply).partition(_.isInstanceOf[ExactFilter])
|
||||
val includeTests = exactFilters.map(_.asInstanceOf[ExactFilter].matchName)
|
||||
val toCheck = scala.collection.mutable.HashSet(includeTests: _*)
|
||||
toCheck --= allTests
|
||||
|
||||
if(!toCheck.isEmpty && log.atLevel(Level.Warn))
|
||||
{
|
||||
log.warn("Test(s) not found:")
|
||||
toCheck.foreach(test => log.warn("\t" + test))
|
||||
}
|
||||
val includeTestsSet = Set(includeTests: _*)
|
||||
(test: String) => includeTestsSet.contains(test) || testFilters.exists(_.accept(test))
|
||||
}
|
||||
|
||||
val includeFunction =
|
||||
if(testNames.isEmpty)
|
||||
(test: String) => true
|
||||
else
|
||||
filterInclude
|
||||
run(testArgs, includeFunction)
|
||||
} completeWith allTests
|
||||
}
|
||||
}
|
||||
trait ExecProject extends Project
|
||||
{
|
||||
def execOut(p: => ProcessBuilder) =
|
||||
task
|
||||
{
|
||||
val exitValue = (p !)
|
||||
if(exitValue == 0)
|
||||
None
|
||||
else
|
||||
Some("Nonzero exit value: " + exitValue)
|
||||
}
|
||||
def execTask(buildCommand: => ProcessBuilder): Task =
|
||||
task
|
||||
{
|
||||
val command = buildCommand
|
||||
log.debug("Executing command " + command)
|
||||
val exitValue = command.run(log).exitValue() // don't buffer output
|
||||
if(exitValue == 0)
|
||||
None
|
||||
else
|
||||
Some("Nonzero exit value: " + exitValue)
|
||||
}
|
||||
}
|
||||
trait Exec extends SimpleScalaProject
|
||||
{
|
||||
lazy val sh = task { args => execOut { Process("sh" :: "-c" :: args.mkString(" ") :: Nil) } }
|
||||
lazy val exec = task { args => execOut { Process(args) } }
|
||||
}
|
||||
|
|
@ -0,0 +1,31 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2009 Mikko Peltonen, Mark Harrah
|
||||
*/
|
||||
package sbt
|
||||
|
||||
object SourceModificationWatch
|
||||
{
|
||||
def watchUntil(project: Project, pollDelaySec: Int)(terminationCondition: => Boolean)(onSourcesModified: => Unit)
|
||||
{
|
||||
def sourceFiles: Iterable[java.io.File] = sourcesFinder.get.map(_.asFile)
|
||||
def sourcesFinder: PathFinder = (Path.emptyPathFinder /: project.topologicalSort)(_ +++ _.watchPaths)
|
||||
def loop(lastCallbackCallTime: Long, previousFileCount: Int)
|
||||
{
|
||||
val (lastModifiedTime, fileCount) = sourceFiles.foldLeft((0L, 0)){(acc, file) => (Math.max(acc._1, file.lastModified), acc._2 + 1)}
|
||||
val newCallbackCallTime =
|
||||
// check if sources are modified
|
||||
if (lastModifiedTime > lastCallbackCallTime || previousFileCount != fileCount)
|
||||
{
|
||||
val now = System.currentTimeMillis
|
||||
onSourcesModified
|
||||
now
|
||||
}
|
||||
else
|
||||
lastCallbackCallTime
|
||||
Thread.sleep(pollDelaySec * 1000)
|
||||
if(!terminationCondition)
|
||||
loop(newCallbackCallTime, fileCount)
|
||||
}
|
||||
loop(0L, 0)
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,63 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2010 Tony Sloane
|
||||
*/
|
||||
package sbt
|
||||
|
||||
object StackTrace
|
||||
{
|
||||
def isSbtClass(name: String) = name.startsWith("sbt") || name.startsWith("xsbt")
|
||||
/**
|
||||
* Return a printable representation of the stack trace associated
|
||||
* with t. Information about t and its Throwable causes is included.
|
||||
* The number of lines to be included for each Throwable is configured
|
||||
* via d which should be greater than or equal to zero. If d is zero,
|
||||
* then all elements are included up to (but not including) the first
|
||||
* element that comes from sbt. If d is greater than zero, then up to
|
||||
* that many lines are included, where the line for the Throwable is
|
||||
* counted plus one line for each stack element. Less lines will be
|
||||
* included if there are not enough stack elements.
|
||||
*/
|
||||
def trimmed(t : Throwable, d : Int) : String = {
|
||||
require(d >= 0)
|
||||
val b = new StringBuilder ()
|
||||
|
||||
def appendStackTrace (t : Throwable, first : Boolean) {
|
||||
|
||||
val include : StackTraceElement => Boolean =
|
||||
if (d == 0)
|
||||
element => !isSbtClass(element.getClassName)
|
||||
else {
|
||||
var count = d - 1
|
||||
(_ => { count -= 1; count >= 0 })
|
||||
}
|
||||
|
||||
def appendElement (e : StackTraceElement) {
|
||||
b.append ("\tat ")
|
||||
b.append (e)
|
||||
b.append ('\n')
|
||||
}
|
||||
|
||||
if (!first)
|
||||
b.append ("Caused by: ")
|
||||
b.append (t)
|
||||
b.append ('\n')
|
||||
|
||||
val els = t.getStackTrace ()
|
||||
var i = 0
|
||||
while ((i < els.size) && include (els (i))) {
|
||||
appendElement (els (i))
|
||||
i += 1
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
appendStackTrace (t, true)
|
||||
var c = t
|
||||
while (c.getCause () != null) {
|
||||
c = c.getCause ()
|
||||
appendStackTrace (c, false)
|
||||
}
|
||||
b.toString ()
|
||||
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,105 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2008 David MacIver, Mark Harrah
|
||||
*/
|
||||
package sbt
|
||||
|
||||
import TaskManager._
|
||||
|
||||
trait Described extends NotNull
|
||||
{
|
||||
def description: Option[String]
|
||||
}
|
||||
trait TaskManager{
|
||||
type ManagerType >: this.type <: TaskManager
|
||||
type ManagedTask >: Task <: TaskManager#Task with Dag[ManagedTask]
|
||||
/** Creates a task that executes the given action when invoked.*/
|
||||
def task(action : => Option[String]) = new Task(None, Nil, false, action)
|
||||
/** An interactive task is one that is not executed across all dependent projects when
|
||||
* it is called directly. The dependencies of the task are still invoked across all dependent
|
||||
* projects, however. */
|
||||
def interactiveTask(action: => Option[String]) = new Task(None, Nil, true, action)
|
||||
/** Creates a method task that executes the given action when invoked. */
|
||||
def task(action: Array[String] => ManagedTask) = new MethodTask(None, action, Nil)
|
||||
|
||||
def taskName(t: Task): Option[String]
|
||||
final def taskNameString(task: Task): String = taskName(task).getOrElse(UnnamedName)
|
||||
|
||||
/** A method task is an action that has parameters. Note that it is not a Task, though,
|
||||
* because it requires arguments to perform its work. It therefore cannot be a dependency of
|
||||
* a Task..*/
|
||||
final class MethodTask(val description: Option[String], action: Array[String] => ManagedTask, getCompletions: => Seq[String]) extends Described
|
||||
{
|
||||
/** Creates a new method task, identical to this method task, except with thE[String]e given description.*/
|
||||
def describedAs(description : String) = new MethodTask(Some(description), action, getCompletions)
|
||||
/** Invokes this method task with the given arguments.*/
|
||||
def apply(arguments: Array[String]) = action(arguments)
|
||||
def manager: ManagerType = TaskManager.this
|
||||
def completeWith(add: => Seq[String]) = new MethodTask(description, action, add)
|
||||
def completions = getCompletions
|
||||
}
|
||||
|
||||
sealed class Task(val explicitName: Option[String], val description : Option[String], val dependencies : List[ManagedTask],
|
||||
val interactive: Boolean, action : => Option[String]) extends Dag[ManagedTask] with Described
|
||||
{
|
||||
def this(description : Option[String], dependencies : List[ManagedTask], interactive: Boolean, action : => Option[String]) =
|
||||
this(None, description, dependencies, interactive, action)
|
||||
checkTaskDependencies(dependencies)
|
||||
def manager: ManagerType = TaskManager.this
|
||||
def name = explicitName.getOrElse(taskNameString(this))
|
||||
private[sbt] def implicitName = taskName(this)
|
||||
def named(name: String) = construct(Some(name), description,dependencies, interactive, action)
|
||||
override def toString = "Task " + name
|
||||
|
||||
/** Creates a new task, identical to this task, except with the additional dependencies specified.*/
|
||||
def dependsOn(tasks : ManagedTask*) = setDependencies(tasks.toList ::: dependencies)
|
||||
private[sbt] def setDependencies(dependencyList: List[ManagedTask]) =
|
||||
{
|
||||
checkTaskDependencies(dependencyList)
|
||||
construct(explicitName, description, dependencyList, interactive, action)
|
||||
}
|
||||
/** Creates a new task, identical to this task, except with the given description.*/
|
||||
def describedAs(description : String) = construct(explicitName, Some(description), dependencies, interactive, action);
|
||||
private[sbt] def invoke = action;
|
||||
|
||||
final def setInteractive = construct(explicitName, description, dependencies, true, action)
|
||||
final def run = runSequentially(topologicalSort)
|
||||
final def runDependenciesOnly = runSequentially(topologicalSort.dropRight(1))
|
||||
private def runSequentially(tasks: List[ManagedTask]) = Control.lazyFold(tasks)(_.invoke)
|
||||
|
||||
def &&(that : Task) =
|
||||
construct(explicitName, None, dependencies ::: that.dependencies, interactive || that.interactive, this.invoke.orElse(that.invoke))
|
||||
|
||||
protected def construct(explicitName: Option[String], description: Option[String], dependencies: List[ManagedTask], interactive: Boolean,
|
||||
action : => Option[String]): Task = new Task(explicitName, description, dependencies, interactive, action)
|
||||
}
|
||||
final class CompoundTask private (explicitName: Option[String], description : Option[String], dependencies : List[ManagedTask], interactive: Boolean,
|
||||
action : => Option[String], createWork: => SubWork[Project#Task]) extends Task(description, dependencies, interactive, action)
|
||||
with CompoundWork[Project#Task]
|
||||
{
|
||||
def this(createWork: => SubWork[Project#Task]) = this(None, None, Nil, false, None, createWork)
|
||||
override protected def construct(explicitName: Option[String], description: Option[String], dependencies: List[ManagedTask],
|
||||
interactive: Boolean, action : => Option[String]) = new CompoundTask(explicitName, description, dependencies, interactive, action, createWork)
|
||||
def work = createWork
|
||||
}
|
||||
def dynamic(createTask: => Project#Task) = new CompoundTask(SubWork[Project#Task](checkDynamic(createTask)))
|
||||
@deprecated def compoundTask(createTask: => Project#Task) = dynamic(createTask)
|
||||
/** Verifies that the given dynamically created task does not depend on any statically defined tasks.
|
||||
* Returns the task if it is valid.*/
|
||||
private def checkDynamic(task: Project#Task) =
|
||||
{
|
||||
for(t <- task.topologicalSort if !(t eq task); staticName <- t.implicitName)
|
||||
error("Dynamic task " + task.name + " depends on static task " + staticName)
|
||||
task
|
||||
}
|
||||
private def checkTaskDependencies(dependencyList: List[ManagedTask])
|
||||
{
|
||||
val nullDependencyIndex = dependencyList.findIndexOf(_ == null)
|
||||
require(nullDependencyIndex < 0, "Dependency (at index " + nullDependencyIndex + ") is null. This may be an initialization issue or a circular dependency.")
|
||||
val interactiveDependencyIndex = dependencyList.findIndexOf(_.interactive)
|
||||
require(interactiveDependencyIndex < 0, "Dependency (at index " + interactiveDependencyIndex + ") is interactive. Interactive tasks cannot be dependencies.")
|
||||
}
|
||||
}
|
||||
object TaskManager
|
||||
{
|
||||
val UnnamedName = "<anonymous>"
|
||||
}
|
||||
|
|
@ -0,0 +1,212 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2008, 2009 Steven Blundy, Mark Harrah, Josh Cough
|
||||
*/
|
||||
package sbt
|
||||
|
||||
import java.net.URLClassLoader
|
||||
import org.scalatools.testing.{TestFingerprint => Fingerprint, Framework,
|
||||
Runner, Logger=>TLogger, EventHandler, Event}
|
||||
|
||||
object Result extends Enumeration
|
||||
{
|
||||
val Error, Passed, Failed = Value
|
||||
}
|
||||
|
||||
object TestFrameworks
|
||||
{
|
||||
val ScalaCheck = new TestFramework("org.scalacheck.ScalaCheckFramework")
|
||||
val ScalaTest = new TestFramework("org.scalatest.tools.ScalaTestFramework")
|
||||
val Specs = new TestFramework("org.specs.runner.SpecsFramework")
|
||||
val JUnit = new TestFramework("com.novocode.junit.JUnitFramework")
|
||||
// These are compatibility frameworks included in the 'test-compat' library
|
||||
val ScalaCheckCompat = new TestFramework("sbt.impl.ScalaCheckFramework")
|
||||
val ScalaTestCompat = new TestFramework("sbt.impl.ScalaTestFramework")
|
||||
val SpecsCompat = new TestFramework("sbt.impl.SpecsFramework")
|
||||
}
|
||||
|
||||
class TestFramework(val implClassName: String) extends NotNull
|
||||
{
|
||||
def create(loader: ClassLoader, log: Logger): Option[Framework] =
|
||||
{
|
||||
try { Some(Class.forName(implClassName, true, loader).newInstance.asInstanceOf[Framework]) }
|
||||
catch { case e: ClassNotFoundException => log.debug("Framework implementation '" + implClassName + "' not present."); None }
|
||||
}
|
||||
}
|
||||
|
||||
final class TestRunner(framework: Framework, loader: ClassLoader, listeners: Seq[TestReportListener], log: Logger) extends NotNull
|
||||
{
|
||||
private[this] val delegate = framework.testRunner(loader, listeners.flatMap(_.contentLogger).toArray)
|
||||
final def run(testDefinition: TestDefinition, args: Seq[String]): Result.Value =
|
||||
{
|
||||
log.debug("Running " + testDefinition + " with arguments " + args.mkString(", "))
|
||||
val testClass = testDefinition.testClassName
|
||||
def runTest() =
|
||||
{
|
||||
// here we get the results! here is where we'd pass in the event listener
|
||||
val results = new scala.collection.mutable.ListBuffer[Event]
|
||||
val handler = new EventHandler { def handle(e:Event){ results += e } }
|
||||
delegate.run(testClass, testDefinition, handler, args.toArray)
|
||||
val event = TestEvent(results)
|
||||
safeListenersCall(_.testEvent( event ))
|
||||
event.result
|
||||
}
|
||||
|
||||
safeListenersCall(_.startGroup(testClass))
|
||||
try
|
||||
{
|
||||
val result = runTest().getOrElse(Result.Passed)
|
||||
safeListenersCall(_.endGroup(testClass, result))
|
||||
result
|
||||
}
|
||||
catch
|
||||
{
|
||||
case e =>
|
||||
safeListenersCall(_.endGroup(testClass, e))
|
||||
Result.Error
|
||||
}
|
||||
}
|
||||
|
||||
protected def safeListenersCall(call: (TestReportListener) => Unit): Unit =
|
||||
TestFramework.safeForeach(listeners, log)(call)
|
||||
}
|
||||
|
||||
final class NamedTestTask(val name: String, action: => Option[String]) extends NotNull { def run() = action }
|
||||
|
||||
object TestFramework
|
||||
{
|
||||
// def runTests(frameworks: Seq[TestFramework], classpath: Iterable[Path], scalaLoader: ClassLoader,
|
||||
// tests: Seq[TestDefinition], testArgs: Seq[String], log: Logger, listeners: Seq[TestReportListener]) =
|
||||
// {
|
||||
// val (start, runTests, end) = testTasks(frameworks, classpath, scalaLoader, tests, log, listeners, true, Nil, Nil, Nil)
|
||||
// def run(tasks: Iterable[NamedTestTask]) = tasks.foreach(_.run())
|
||||
// run(start)
|
||||
// run(runTests)
|
||||
// run(end)
|
||||
// }
|
||||
|
||||
private val ScalaCompilerJarPackages = "scala.tools.nsc." :: "jline." :: "ch.epfl.lamp." :: Nil
|
||||
|
||||
private val TestStartName = "test-start"
|
||||
private val TestFinishName = "test-finish"
|
||||
|
||||
private[sbt] def safeForeach[T](it: Iterable[T], log: Logger)(f: T => Unit): Unit =
|
||||
it.foreach(i => Control.trapAndLog(log){ f(i) } )
|
||||
|
||||
import scala.collection.{immutable, Map, Set}
|
||||
|
||||
def testTasks(frameworks: Seq[TestFramework],
|
||||
classpath: Iterable[Path],
|
||||
scalaLoader: ClassLoader,
|
||||
tests: Seq[TestDefinition],
|
||||
log: Logger,
|
||||
listeners: Seq[TestReportListener],
|
||||
endErrorsEnabled: Boolean,
|
||||
setup: Iterable[() => Option[String]],
|
||||
cleanup: Iterable[() => Option[String]],
|
||||
testArgsByFramework: Map[TestFramework, Seq[String]]):
|
||||
(Iterable[NamedTestTask], Iterable[NamedTestTask], Iterable[NamedTestTask]) =
|
||||
{
|
||||
val loader = createTestLoader(classpath, scalaLoader)
|
||||
val arguments = immutable.Map() ++
|
||||
( for(framework <- frameworks; created <- framework.create(loader, log)) yield
|
||||
(created, testArgsByFramework.getOrElse(framework, Nil)) )
|
||||
|
||||
val mappedTests = testMap(arguments.keys.toList, tests, arguments)
|
||||
if(mappedTests.isEmpty)
|
||||
(new NamedTestTask(TestStartName, None) :: Nil, Nil, new NamedTestTask(TestFinishName, { log.info("No tests to run."); None }) :: Nil )
|
||||
else
|
||||
createTestTasks(loader, mappedTests, log, listeners, endErrorsEnabled, setup, cleanup)
|
||||
}
|
||||
|
||||
private def testMap(frameworks: Seq[Framework], tests: Seq[TestDefinition], args: Map[Framework, Seq[String]]):
|
||||
immutable.Map[Framework, (Set[TestDefinition], Seq[String])] =
|
||||
{
|
||||
import scala.collection.mutable.{HashMap, HashSet, Set}
|
||||
val map = new HashMap[Framework, Set[TestDefinition]]
|
||||
def assignTests(): Unit =
|
||||
{
|
||||
for(test <- tests if !map.values.exists(_.contains(test)))
|
||||
{
|
||||
def isTestForFramework(framework: Framework) = framework.tests.exists(matches)
|
||||
def matches(fingerprint: Fingerprint) =
|
||||
(fingerprint.isModule == test.isModule) &&
|
||||
fingerprint.superClassName == test.superClassName
|
||||
|
||||
for(framework <- frameworks.find(isTestForFramework))
|
||||
map.getOrElseUpdate(framework, new HashSet[TestDefinition]) += test
|
||||
}
|
||||
}
|
||||
if(!frameworks.isEmpty)
|
||||
assignTests()
|
||||
(immutable.Map() ++ map) transform { (framework, tests) => (tests, args(framework)) }
|
||||
}
|
||||
private def createTasks(work: Iterable[() => Option[String]], baseName: String) =
|
||||
work.toList.zipWithIndex.map{ case (work, index) => new NamedTestTask(baseName + " " + (index+1), work()) }
|
||||
|
||||
private def createTestTasks(loader: ClassLoader, tests: Map[Framework, (Set[TestDefinition], Seq[String])], log: Logger,
|
||||
listeners: Seq[TestReportListener], endErrorsEnabled: Boolean, setup: Iterable[() => Option[String]],
|
||||
cleanup: Iterable[() => Option[String]]) =
|
||||
{
|
||||
val testsListeners = listeners.filter(_.isInstanceOf[TestsListener]).map(_.asInstanceOf[TestsListener])
|
||||
def foreachListenerSafe(f: TestsListener => Unit): Unit = safeForeach(testsListeners, log)(f)
|
||||
|
||||
import Result.{Error,Passed,Failed}
|
||||
object result
|
||||
{
|
||||
private[this] var value: Result.Value = Passed
|
||||
def apply() = synchronized { value }
|
||||
def update(v: Result.Value): Unit = synchronized { if(value != Error) value = v }
|
||||
}
|
||||
val startTask = new NamedTestTask(TestStartName, {foreachListenerSafe(_.doInit); None}) :: createTasks(setup, "Test setup")
|
||||
val testTasks =
|
||||
tests flatMap { case (framework, (testDefinitions, testArgs)) =>
|
||||
|
||||
val runner = new TestRunner(framework, loader, listeners, log)
|
||||
for(testDefinition <- testDefinitions) yield
|
||||
{
|
||||
def runTest() =
|
||||
{
|
||||
val oldLoader = Thread.currentThread.getContextClassLoader
|
||||
Thread.currentThread.setContextClassLoader(loader)
|
||||
try {
|
||||
runner.run(testDefinition, testArgs) match
|
||||
{
|
||||
case Error => result() = Error; Some("ERROR occurred during testing.")
|
||||
case Failed => result() = Failed; Some("Test FAILED")
|
||||
case _ => None
|
||||
}
|
||||
}
|
||||
finally {
|
||||
Thread.currentThread.setContextClassLoader(oldLoader)
|
||||
}
|
||||
}
|
||||
new NamedTestTask(testDefinition.testClassName, runTest())
|
||||
}
|
||||
}
|
||||
def end() =
|
||||
{
|
||||
foreachListenerSafe(_.doComplete(result()))
|
||||
result() match
|
||||
{
|
||||
case Error => if(endErrorsEnabled) Some("ERROR occurred during testing.") else None
|
||||
case Failed => if(endErrorsEnabled) Some("One or more tests FAILED.") else None
|
||||
case Passed =>
|
||||
{
|
||||
log.info(" ")
|
||||
log.info("All tests PASSED.")
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
val endTask = new NamedTestTask(TestFinishName, end() ) :: createTasks(cleanup, "Test cleanup")
|
||||
(startTask, testTasks, endTask)
|
||||
}
|
||||
def createTestLoader(classpath: Iterable[Path], scalaLoader: ClassLoader): ClassLoader =
|
||||
{
|
||||
val filterCompilerLoader = new FilteredLoader(scalaLoader, ScalaCompilerJarPackages)
|
||||
val interfaceFilter = (name: String) => name.startsWith("org.scalatools.testing.")
|
||||
val notInterfaceFilter = (name: String) => !interfaceFilter(name)
|
||||
val dual = new xsbt.DualLoader(filterCompilerLoader, notInterfaceFilter, x => true, getClass.getClassLoader, interfaceFilter, x => false)
|
||||
new URLClassLoader(classpath.map(_.asURL).toSeq.toArray, dual)
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,110 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2008, 2009 Steven Blundy, Mark Harrah
|
||||
*/
|
||||
|
||||
package sbt
|
||||
|
||||
import org.scalatools.testing.{Logger => TLogger, Event => TEvent, Result => TResult}
|
||||
|
||||
trait TestReportListener
|
||||
{
|
||||
/** called for each class or equivalent grouping */
|
||||
def startGroup(name: String)
|
||||
/** called for each test method or equivalent */
|
||||
def testEvent(event: TestEvent)
|
||||
/** called if there was an error during test */
|
||||
def endGroup(name: String, t: Throwable)
|
||||
/** called if test completed */
|
||||
def endGroup(name: String, result: Result.Value)
|
||||
/** Used by the test framework for logging test results*/
|
||||
def contentLogger: Option[TLogger] = None
|
||||
}
|
||||
|
||||
trait TestsListener extends TestReportListener
|
||||
{
|
||||
/** called once, at beginning. */
|
||||
def doInit
|
||||
/** called once, at end. */
|
||||
def doComplete(finalResult: Result.Value)
|
||||
}
|
||||
|
||||
abstract class TestEvent extends NotNull
|
||||
{
|
||||
def result: Option[Result.Value]
|
||||
def detail: Seq[TEvent] = Nil
|
||||
}
|
||||
object TestEvent
|
||||
{
|
||||
def apply(events: Seq[TEvent]): TestEvent =
|
||||
{
|
||||
val overallResult = (Result.Passed /: events) { (sum, event) =>
|
||||
val result = event.result
|
||||
if(sum == Result.Error || result == TResult.Error) Result.Error
|
||||
else if(sum == Result.Failed || result == TResult.Failure) Result.Failed
|
||||
else Result.Passed
|
||||
}
|
||||
new TestEvent {
|
||||
val result = Some(overallResult)
|
||||
override val detail = events
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
object TestLogger
|
||||
{
|
||||
def apply(logger: sbt.Logger): TestLogger = new TestLogger(wrap(logger))
|
||||
def wrap(logger: sbt.Logger): TLogger =
|
||||
new TLogger
|
||||
{
|
||||
def error(s: String) = log(Level.Error, s)
|
||||
def warn(s: String) = log(Level.Warn, s)
|
||||
def info(s: String) = log(Level.Info, s)
|
||||
def debug(s: String) = log(Level.Debug, s)
|
||||
def trace(t: Throwable) = logger.trace(t)
|
||||
private def log(level: Level.Value, s: String) = logger.log(level, s)
|
||||
def ansiCodesSupported() = logger.ansiCodesSupported
|
||||
}
|
||||
}
|
||||
class TestLogger(val log: TLogger) extends TestsListener
|
||||
{
|
||||
protected var skipped, errors, passed, failures = 0
|
||||
|
||||
def startGroup(name: String) {}
|
||||
def testEvent(event: TestEvent): Unit = event.detail.foreach(count)
|
||||
def endGroup(name: String, t: Throwable)
|
||||
{
|
||||
log.trace(t)
|
||||
log.error("Could not run test " + name + ": " + t.toString)
|
||||
}
|
||||
def endGroup(name: String, result: Result.Value) {}
|
||||
protected def count(event: TEvent): Unit =
|
||||
{
|
||||
event.result match
|
||||
{
|
||||
case TResult.Error => errors +=1
|
||||
case TResult.Success => passed +=1
|
||||
case TResult.Failure => failures +=1
|
||||
case TResult.Skipped => skipped += 1
|
||||
}
|
||||
}
|
||||
def doInit
|
||||
{
|
||||
failures = 0
|
||||
errors = 0
|
||||
passed = 0
|
||||
skipped = 0
|
||||
}
|
||||
/** called once, at end. */
|
||||
def doComplete(finalResult: Result.Value): Unit =
|
||||
{
|
||||
val totalCount = failures + errors + skipped + passed
|
||||
val postfix = ": Total " + totalCount + ", Failed " + failures + ", Errors " + errors + ", Passed " + passed + ", Skipped " + skipped
|
||||
finalResult match
|
||||
{
|
||||
case Result.Error => log.error("Error" + postfix)
|
||||
case Result.Passed => log.info("Passed: " + postfix)
|
||||
case Result.Failed => log.error("Failed: " + postfix)
|
||||
}
|
||||
}
|
||||
override def contentLogger: Option[TLogger] = Some(log)
|
||||
}
|
||||
|
|
@ -0,0 +1,249 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2008 Mark Harrah
|
||||
*
|
||||
* Partially based on exit trapping in Nailgun by Pete Kirkham,
|
||||
* copyright 2004, Martian Software, Inc
|
||||
* licensed under Apache 2.0 License.
|
||||
*/
|
||||
package sbt
|
||||
|
||||
import scala.collection.Set
|
||||
import scala.reflect.Manifest
|
||||
|
||||
/** This provides functionality to catch System.exit calls to prevent the JVM from terminating.
|
||||
* This is useful for executing user code that may call System.exit, but actually exiting is
|
||||
* undesirable. This file handles the call to exit by disposing all top-level windows and interrupting
|
||||
* all user started threads. It does not stop the threads and does not call shutdown hooks. It is
|
||||
* therefore inappropriate to use this with code that requires shutdown hooks or creates threads that
|
||||
* do not terminate. This category of code should only be called by forking the JVM. */
|
||||
object TrapExit
|
||||
{
|
||||
/** Executes the given thunk in a context where System.exit(code) throws
|
||||
* a custom SecurityException, which is then caught and the exit code returned.
|
||||
* Otherwise, 0 is returned. No other exceptions are handled by this method.*/
|
||||
def apply(execute: => Unit, log: Logger): Int =
|
||||
{
|
||||
log.debug("Starting sandboxed run...")
|
||||
|
||||
/** Take a snapshot of the threads that existed before execution in order to determine
|
||||
* the threads that were created by 'execute'.*/
|
||||
val originalThreads = allThreads
|
||||
val code = new ExitCode
|
||||
def executeMain =
|
||||
try { execute }
|
||||
catch
|
||||
{
|
||||
case e: TrapExitSecurityException => throw e
|
||||
case x =>
|
||||
code.set(1) //exceptions in the main thread cause the exit code to be 1
|
||||
throw x
|
||||
}
|
||||
val customThreadGroup = new ExitThreadGroup(new ExitHandler(Thread.getDefaultUncaughtExceptionHandler, originalThreads, code, log))
|
||||
val executionThread = new Thread(customThreadGroup, "run-main") { override def run() { executeMain } }
|
||||
|
||||
val originalSecurityManager = System.getSecurityManager
|
||||
try
|
||||
{
|
||||
val newSecurityManager = new TrapExitSecurityManager(originalSecurityManager, customThreadGroup)
|
||||
System.setSecurityManager(newSecurityManager)
|
||||
|
||||
executionThread.start()
|
||||
|
||||
log.debug("Waiting for threads to exit or System.exit to be called.")
|
||||
waitForExit(originalThreads, log)
|
||||
log.debug("Interrupting remaining threads (should be all daemons).")
|
||||
interruptAll(originalThreads) // should only be daemon threads left now
|
||||
log.debug("Sandboxed run complete..")
|
||||
code.value.getOrElse(0)
|
||||
}
|
||||
finally { System.setSecurityManager(originalSecurityManager) }
|
||||
}
|
||||
// wait for all non-daemon threads to terminate
|
||||
private def waitForExit(originalThreads: Set[Thread], log: Logger)
|
||||
{
|
||||
var daemonsOnly = true
|
||||
processThreads(originalThreads, thread =>
|
||||
if(!thread.isDaemon)
|
||||
{
|
||||
daemonsOnly = false
|
||||
waitOnThread(thread, log)
|
||||
}
|
||||
)
|
||||
if(!daemonsOnly)
|
||||
waitForExit(originalThreads, log)
|
||||
}
|
||||
/** Waits for the given thread to exit. */
|
||||
private def waitOnThread(thread: Thread, log: Logger)
|
||||
{
|
||||
log.debug("Waiting for thread " + thread.getName + " to exit")
|
||||
thread.join
|
||||
log.debug("\tThread " + thread.getName + " exited.")
|
||||
}
|
||||
/** Returns the exit code of the System.exit that caused the given Exception, or rethrows the exception
|
||||
* if its cause was not calling System.exit.*/
|
||||
private def exitCode(e: Throwable) =
|
||||
withCause[TrapExitSecurityException, Int](e)
|
||||
{exited => exited.exitCode}
|
||||
{other => throw other}
|
||||
/** Recurses into the causes of the given exception looking for a cause of type CauseType. If one is found, `withType` is called with that cause.
|
||||
* If not, `notType` is called with the root cause.*/
|
||||
private def withCause[CauseType <: Throwable, T](e: Throwable)(withType: CauseType => T)(notType: Throwable => T)(implicit mf: Manifest[CauseType]): T =
|
||||
{
|
||||
val clazz = mf.erasure
|
||||
if(clazz.isInstance(e))
|
||||
withType(e.asInstanceOf[CauseType])
|
||||
else
|
||||
{
|
||||
val cause = e.getCause
|
||||
if(cause == null)
|
||||
notType(e)
|
||||
else
|
||||
withCause(cause)(withType)(notType)(mf)
|
||||
}
|
||||
}
|
||||
|
||||
/** Returns all threads that are not in the 'system' thread group and are not the AWT implementation
|
||||
* thread (AWT-XAWT, AWT-Windows, ...)*/
|
||||
private def allThreads: Set[Thread] =
|
||||
{
|
||||
val allThreads = wrap.Wrappers.toList(Thread.getAllStackTraces.keySet)
|
||||
val threads = new scala.collection.mutable.HashSet[Thread]
|
||||
for(thread <- allThreads if !isSystemThread(thread))
|
||||
threads += thread
|
||||
threads
|
||||
}
|
||||
/** Returns true if the given thread is in the 'system' thread group and is an AWT thread other than
|
||||
* AWT-EventQueue or AWT-Shutdown.*/
|
||||
private def isSystemThread(t: Thread) =
|
||||
{
|
||||
val name = t.getName
|
||||
if(name.startsWith("AWT-"))
|
||||
!(name.startsWith("AWT-EventQueue") || name.startsWith("AWT-Shutdown"))
|
||||
else
|
||||
{
|
||||
val group = t.getThreadGroup
|
||||
(group != null) && (group.getName == "system")
|
||||
}
|
||||
}
|
||||
/** Calls the provided function for each thread in the system as provided by the
|
||||
* allThreads function except those in ignoreThreads.*/
|
||||
private def processThreads(ignoreThreads: Set[Thread], process: Thread => Unit)
|
||||
{
|
||||
allThreads.filter(thread => !ignoreThreads.contains(thread)).foreach(process)
|
||||
}
|
||||
/** Handles System.exit by disposing all frames and calling interrupt on all user threads */
|
||||
private def stopAll(originalThreads: Set[Thread])
|
||||
{
|
||||
disposeAllFrames()
|
||||
interruptAll(originalThreads)
|
||||
}
|
||||
private def disposeAllFrames()
|
||||
{
|
||||
val allFrames = java.awt.Frame.getFrames
|
||||
if(allFrames.length > 0)
|
||||
{
|
||||
allFrames.foreach(_.dispose) // dispose all top-level windows, which will cause the AWT-EventQueue-* threads to exit
|
||||
Thread.sleep(2000) // AWT Thread doesn't exit immediately, so wait to interrupt it
|
||||
}
|
||||
}
|
||||
// interrupt all threads that appear to have been started by the user
|
||||
private def interruptAll(originalThreads: Set[Thread]): Unit =
|
||||
processThreads(originalThreads, safeInterrupt)
|
||||
// interrupts the given thread, but first replaces the exception handler so that the InterruptedException is not printed
|
||||
private def safeInterrupt(thread: Thread)
|
||||
{
|
||||
if(!thread.getName.startsWith("AWT-"))
|
||||
{
|
||||
thread.setUncaughtExceptionHandler(new TrapInterrupt(thread.getUncaughtExceptionHandler))
|
||||
thread.interrupt
|
||||
}
|
||||
}
|
||||
// an uncaught exception handler that swallows InterruptedExceptions and otherwise defers to originalHandler
|
||||
private final class TrapInterrupt(originalHandler: Thread.UncaughtExceptionHandler) extends Thread.UncaughtExceptionHandler
|
||||
{
|
||||
def uncaughtException(thread: Thread, e: Throwable)
|
||||
{
|
||||
withCause[InterruptedException, Unit](e)
|
||||
{interrupted => ()}
|
||||
{other => originalHandler.uncaughtException(thread, e) }
|
||||
thread.setUncaughtExceptionHandler(originalHandler)
|
||||
}
|
||||
}
|
||||
/** An uncaught exception handler that delegates to the original uncaught exception handler except when
|
||||
* the cause was a call to System.exit (which generated a SecurityException)*/
|
||||
private final class ExitHandler(originalHandler: Thread.UncaughtExceptionHandler, originalThreads: Set[Thread], codeHolder: ExitCode, log: Logger) extends Thread.UncaughtExceptionHandler
|
||||
{
|
||||
def uncaughtException(t: Thread, e: Throwable)
|
||||
{
|
||||
try
|
||||
{
|
||||
codeHolder.set(exitCode(e)) // will rethrow e if it was not because of a call to System.exit
|
||||
stopAll(originalThreads)
|
||||
}
|
||||
catch
|
||||
{
|
||||
case _ =>
|
||||
log.trace(e)
|
||||
originalHandler.uncaughtException(t, e)
|
||||
}
|
||||
}
|
||||
}
|
||||
private final class ExitThreadGroup(handler: Thread.UncaughtExceptionHandler) extends ThreadGroup("trap.exit")
|
||||
{
|
||||
override def uncaughtException(t: Thread, e: Throwable) = handler.uncaughtException(t, e)
|
||||
}
|
||||
}
|
||||
private final class ExitCode extends NotNull
|
||||
{
|
||||
private var code: Option[Int] = None
|
||||
def set(c: Int): Unit = synchronized { code = code orElse Some(c) }
|
||||
def value: Option[Int] = synchronized { code }
|
||||
}
|
||||
/////// These two classes are based on similar classes in Nailgun
|
||||
/** A custom SecurityManager to disallow System.exit. */
|
||||
private final class TrapExitSecurityManager(delegateManager: SecurityManager, group: ThreadGroup) extends SecurityManager
|
||||
{
|
||||
import java.security.Permission
|
||||
override def checkExit(status: Int)
|
||||
{
|
||||
val stack = Thread.currentThread.getStackTrace
|
||||
if(stack == null || stack.exists(isRealExit))
|
||||
throw new TrapExitSecurityException(status)
|
||||
}
|
||||
/** This ensures that only actual calls to exit are trapped and not just calls to check if exit is allowed.*/
|
||||
private def isRealExit(element: StackTraceElement): Boolean =
|
||||
element.getClassName == "java.lang.Runtime" && element.getMethodName == "exit"
|
||||
override def checkPermission(perm: Permission)
|
||||
{
|
||||
if(delegateManager != null)
|
||||
delegateManager.checkPermission(perm)
|
||||
}
|
||||
override def checkPermission(perm: Permission, context: AnyRef)
|
||||
{
|
||||
if(delegateManager != null)
|
||||
delegateManager.checkPermission(perm, context)
|
||||
}
|
||||
override def getThreadGroup = group
|
||||
}
|
||||
/** A custom SecurityException that tries not to be caught.*/
|
||||
private final class TrapExitSecurityException(val exitCode: Int) extends SecurityException
|
||||
{
|
||||
private var accessAllowed = false
|
||||
def allowAccess
|
||||
{
|
||||
accessAllowed = true
|
||||
}
|
||||
override def printStackTrace = ifAccessAllowed(super.printStackTrace)
|
||||
override def toString = ifAccessAllowed(super.toString)
|
||||
override def getCause = ifAccessAllowed(super.getCause)
|
||||
override def getMessage = ifAccessAllowed(super.getMessage)
|
||||
override def fillInStackTrace = ifAccessAllowed(super.fillInStackTrace)
|
||||
override def getLocalizedMessage = ifAccessAllowed(super.getLocalizedMessage)
|
||||
private def ifAccessAllowed[T](f: => T): T =
|
||||
{
|
||||
if(accessAllowed)
|
||||
f
|
||||
else
|
||||
throw this
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,213 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2010 Mark Harrah
|
||||
*/
|
||||
package sbt
|
||||
|
||||
/** A trait that provides a task for updating sbt. */
|
||||
trait UpdateSbt extends Project
|
||||
{
|
||||
/** The first argument is the version to update to and is mandatory.
|
||||
* The second argument is the location of the launcher jar. If omitted, the launcher used to launch the currently running instance of sbt is used.*/
|
||||
lazy val sbtUpdate = task { args => task { (new Update(this))(args) } } describedAs("Updates the version of sbt used to build this project and updates the launcher jar.")
|
||||
}
|
||||
|
||||
import java.io.{File, InputStream, IOException}
|
||||
import java.net.{HttpURLConnection, URL}
|
||||
import HttpURLConnection.{HTTP_NOT_FOUND , HTTP_OK}
|
||||
import SimpleReader.readLine
|
||||
import xsbt.FileUtilities.{classLocationFile, copyFile, readLines, transfer, unzip, withTemporaryDirectory, write, zip}
|
||||
import xsbt.PathMapper.relativeTo
|
||||
import xsbt.Paths._
|
||||
import xsbt.OpenResource.{fileOutputStream, urlInputStream}
|
||||
|
||||
private class Update(project: Project)
|
||||
{
|
||||
val info = project.info
|
||||
val app = info.app
|
||||
val log = project.log
|
||||
|
||||
/** The location of the jar used to launch the currently running instance of sbt.*/
|
||||
lazy val launcherJar = classLocationFile[xsbti.AppProvider]
|
||||
/** A temporary jar file to use in the given directory. */
|
||||
def tempJar(dir: File) = dir / launcherJar.getName
|
||||
|
||||
/** Implementation of the sbt-update task: reads arguments and hands off to the other `apply`.*/
|
||||
def apply(args: Array[String]): Option[String] =
|
||||
args match
|
||||
{
|
||||
case Array(version) if validVersion(version) => apply(version, None)
|
||||
case Array(version, temporaryJar) if validVersion(version) => apply(version, Some(new File(temporaryJar) getAbsoluteFile))
|
||||
case _ => Some("Expected '<version>' or '<version> <new-launcher-file>', got '" + args.mkString(" ") + "'")
|
||||
}
|
||||
|
||||
def validVersion(version: String) = !version.trim.isEmpty
|
||||
|
||||
/** Implementation of the sbt-update task after arguments have checked. Gives user a chance to cancel and continues with `doUpdate`.*/
|
||||
def apply(version: String, temporaryJar: Option[File]): Option[String] =
|
||||
{
|
||||
readLine("Updating the sbt version requires a restart. Continue? (Y/n) ") match
|
||||
{
|
||||
case Some(line) if(isYes(line)) => doUpdate(version, temporaryJar)
|
||||
case _ => Some("Update declined.")
|
||||
}
|
||||
}
|
||||
/** Implementation of the sbt-update task: high-level control after initial verification.*/
|
||||
def doUpdate(version: String, temporaryJar: Option[File]): Option[String] =
|
||||
{
|
||||
retrieveNewVersion(version)
|
||||
log.info("Version is valid. Setting 'sbt.version' to " + version + "...")
|
||||
setNewVersion(version)
|
||||
|
||||
log.info("'sbt.version' updated.")
|
||||
if(temporaryJar.isDefined || updateInPlace(version))
|
||||
{
|
||||
log.info("Downloading new launcher ...")
|
||||
|
||||
if(downloadLauncher(version, temporaryJar))
|
||||
log.info("Downloaded launcher.")
|
||||
else
|
||||
tryUpdateLauncher(version, temporaryJar)
|
||||
}
|
||||
else
|
||||
log.info("Launcher update declined.")
|
||||
|
||||
log.info("Please restart sbt.")
|
||||
System.exit(0)
|
||||
None
|
||||
}
|
||||
/** Updates 'sbt.version' in `project/build.properties`.*/
|
||||
def setNewVersion(version: String)
|
||||
{
|
||||
project.sbtVersion() = version
|
||||
project.saveEnvironment()
|
||||
}
|
||||
/** Retrieves the given `version` of sbt in order to verify the version is valid.*/
|
||||
def retrieveNewVersion(version: String)
|
||||
{
|
||||
val newAppID = changeVersion(app.id, version)
|
||||
log.info("Checking repositories for sbt " + version + " ...")
|
||||
app.scalaProvider.app(newAppID)
|
||||
}
|
||||
/** Asks the user whether the current launcher should be overrwritten. Called when no file is explicitly specified as an argument. */
|
||||
def updateInPlace(version: String) =
|
||||
{
|
||||
val input = readLine(" The current launcher (" + launcherJar + ") will be updated in place. Continue? (Y/n) ")
|
||||
isYes(input)
|
||||
}
|
||||
def isYes(line: Option[String]): Boolean = line.filter(isYes).isDefined
|
||||
|
||||
/** Updates the launcher as in `updateLauncher` but performs various checks and logging around it. */
|
||||
def tryUpdateLauncher(version: String, temporaryJar: Option[File])
|
||||
{
|
||||
log.warn("No launcher found for '" + version + "'")
|
||||
def promptStart = if(temporaryJar.isDefined) " Copy current launcher but with " else " Modify current launcher to use "
|
||||
val input = readLine(promptStart + version + " as the default for new projects? (Y/n) ")
|
||||
val updated = isYes(input)
|
||||
if(updated) updateLauncher(version, temporaryJar)
|
||||
|
||||
def extra = if(temporaryJar.isDefined) " at " + temporaryJar.get + "." else "."
|
||||
log.info(if(updated) "Launcher updated" + extra else "Launcher not updated.")
|
||||
}
|
||||
/** The jar to copy/download to. If `userJar` is not defined, it is a temporary file in `tmpDir` that should then be moved to the current launcher file.
|
||||
* If it is defined and is a directory, the jar is defined in that directory. If it is a file, that file is returned. */
|
||||
def targetJar(tmpDir: File, userJar: Option[File]): File =
|
||||
userJar match { case Some(file) => if(file.isDirectory) tempJar(file) else file; case None => tempJar(tmpDir) }
|
||||
|
||||
/** Gets the given `version` of the launcher from Google Code. If `userProvidedJar` is defined,
|
||||
* this updated launcher is downloaded there, otherwise it overwrites the current launcher. */
|
||||
def downloadLauncher(version: String, userProvidedJar: Option[File]): Boolean =
|
||||
{
|
||||
def getLauncher(tmp: File): Boolean =
|
||||
{
|
||||
val temporaryJar = targetJar(tmp, userProvidedJar)
|
||||
temporaryJar.getParentFile.mkdirs()
|
||||
val url = launcherURL(version)
|
||||
val connection = url.openConnection.asInstanceOf[HttpURLConnection]
|
||||
connection.setInstanceFollowRedirects(false)
|
||||
|
||||
def download(in: InputStream): Unit = fileOutputStream(false)(temporaryJar) { out => transfer(in, out) }
|
||||
def checkAndRetrieve(in: InputStream): Boolean = (connection.getResponseCode == HTTP_OK) && { download(in); true }
|
||||
def handleError(e: IOException) = if(connection.getResponseCode == HTTP_NOT_FOUND ) false else throw e
|
||||
def retrieve() =
|
||||
{
|
||||
val in = connection.getInputStream
|
||||
try { checkAndRetrieve(in) } finally { in.close() }
|
||||
}
|
||||
|
||||
val success = try { retrieve() } catch { case e: IOException => handleError(e)} finally { connection.disconnect() }
|
||||
if(success && userProvidedJar.isEmpty)
|
||||
move(temporaryJar, launcherJar)
|
||||
success
|
||||
}
|
||||
withTemporaryDirectory(getLauncher)
|
||||
}
|
||||
/** The location of the launcher for the given version, if it exists. */
|
||||
def launcherURL(version: String): URL =
|
||||
new URL("http://simple-build-tool.googlecode.com/files/sbt-launch-" + version + ".jar")
|
||||
|
||||
/** True iff the given user input is empty, 'y' or 'yes' (case-insensitive).*/
|
||||
def isYes(line: String) =
|
||||
{
|
||||
val lower = line.toLowerCase
|
||||
lower.isEmpty || lower == "y" || lower == "yes"
|
||||
}
|
||||
/** Copies the current launcher but with the default 'sbt.version' set to `version`. If `userProvidedJar` is defined,
|
||||
* the updated launcher is copied there, otherwise the copy overwrites the current launcher. */
|
||||
def updateLauncher(version: String, userProvidedJar: Option[File])
|
||||
{
|
||||
def makeUpdated(base: File, newJar: File)
|
||||
{
|
||||
val files = unzip(launcherJar, base)
|
||||
updateBootProperties(files, version)
|
||||
zip(relativeTo(base)( files ), newJar)
|
||||
}
|
||||
def updateLauncher(tmp: File)
|
||||
{
|
||||
val basePath = tmp / "launcher-jar"
|
||||
val temporaryJar = targetJar(tmp, userProvidedJar)
|
||||
makeUpdated(basePath, temporaryJar)
|
||||
if(userProvidedJar.isEmpty)
|
||||
move(temporaryJar, launcherJar)
|
||||
}
|
||||
|
||||
withTemporaryDirectory(updateLauncher)
|
||||
}
|
||||
|
||||
/** Copies the `src` file to the `dest` file, preferably by renaming. The `src` file may or may not be removed.*/
|
||||
def move(src: File, dest: File)
|
||||
{
|
||||
val renameSuccess = src renameTo dest
|
||||
if(!renameSuccess)
|
||||
copyFile(src, dest)
|
||||
}
|
||||
|
||||
/** Updates the default value used for 'sbt.version' in the 'sbt.boot.properties' file in the launcher `files` to be `version`.*/
|
||||
def updateBootProperties(files: Iterable[File], version: String): Unit =
|
||||
files.find(_.getName == "sbt.boot.properties").foreach(updateBootProperties(version))
|
||||
/** Updates the default value used for 'sbt.version' in the given `file` to be `version`.*/
|
||||
def updateBootProperties(version: String)(file: File)
|
||||
{
|
||||
val newContent = readLines(file) map updateSbtVersion(version)
|
||||
write(file, newContent.mkString("\n"))
|
||||
}
|
||||
|
||||
/** If the given `line` is the 'sbt.version' configuration, update it to use the `newVersion`.*/
|
||||
def updateSbtVersion(newVersion: String)(line: String) =
|
||||
if(line.trim.startsWith("sbt.version")) sbtVersion(newVersion) else line
|
||||
|
||||
/** The configuration line that defines the 'sbt.version' property, using the provided `version` for defaults.*/
|
||||
def sbtVersion(version: String) =
|
||||
" sbt.version: quick=set(" + version + "), new=prompt(sbt version)[" + version + "], fill=prompt(sbt version)[" + version + "]"
|
||||
|
||||
/** Copies the given ApplicationID but with the specified version.*/
|
||||
def changeVersion(appID: xsbti.ApplicationID, versionA: String): xsbti.ApplicationID =
|
||||
new xsbti.ApplicationID {
|
||||
def groupID = appID.groupID
|
||||
def name = appID.name
|
||||
def version = versionA
|
||||
def mainClass = appID.mainClass
|
||||
def mainComponents = appID.mainComponents
|
||||
def crossVersioned = appID.crossVersioned
|
||||
def classpathExtra = appID.classpathExtra
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,66 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2008 Mark Harrah
|
||||
*/
|
||||
package sbt
|
||||
|
||||
sealed trait Version extends NotNull
|
||||
case class BasicVersion(major: Int, minor: Option[Int], micro: Option[Int], extra: Option[String]) extends Version
|
||||
{
|
||||
import Version._
|
||||
require(major >= 0, "Major revision must be nonnegative.")
|
||||
require(minor.isDefined || micro.isEmpty, "Cannot define micro revision without defining minor revision.")
|
||||
requirePositive(minor)
|
||||
requirePositive(micro)
|
||||
require(isValidExtra(extra))
|
||||
|
||||
def incrementMicro = BasicVersion(major, minor orElse Some(0), increment(micro), extra)
|
||||
def incrementMinor = BasicVersion(major, increment(minor), micro, extra)
|
||||
def incrementMajor = BasicVersion(major+1, minor, micro, extra)
|
||||
def withExtra(newExtra: Option[String]) = BasicVersion(major, minor, micro, newExtra)
|
||||
|
||||
override def toString = major +
|
||||
minor.map(minorI => "." + minorI + micro.map(microI => "." + microI).getOrElse("")).getOrElse("") +
|
||||
extra.map(x => "-" + x).getOrElse("")
|
||||
}
|
||||
case class OpaqueVersion(value: String) extends Version
|
||||
{
|
||||
require(!value.trim.isEmpty)
|
||||
override def toString = value
|
||||
}
|
||||
object Version
|
||||
{
|
||||
private[sbt] def increment(i: Option[Int]) = Some(i.getOrElse(0) + 1)
|
||||
private[sbt] def requirePositive(i: Option[Int]) { i.foreach(x => require(x >= 0)) }
|
||||
|
||||
import java.util.regex.Pattern
|
||||
val versionPattern = Pattern.compile("""(\d+)(?:\.(\d+)(?:\.(\d+))?)?(?:-(.+))?""")
|
||||
def fromString(v: String): Either[String, Version] =
|
||||
{
|
||||
val trimmed = v.trim
|
||||
if(trimmed.isEmpty)
|
||||
Left("Version cannot be empty.")
|
||||
else
|
||||
{
|
||||
val matcher = versionPattern.matcher(trimmed)
|
||||
import matcher._
|
||||
if(matches)
|
||||
{
|
||||
def toOption(index: Int) =
|
||||
{
|
||||
val v = group(index)
|
||||
if(v == null) None else Some(v)
|
||||
}
|
||||
def toInt(index: Int) = toOption(index).map(_.toInt)
|
||||
val extra = toOption(4)
|
||||
if(isValidExtra(extra))
|
||||
Right(BasicVersion(group(1).toInt, toInt(2), toInt(3), extra))
|
||||
else
|
||||
Right(OpaqueVersion(trimmed))
|
||||
}
|
||||
else
|
||||
Right(OpaqueVersion(trimmed))
|
||||
}
|
||||
}
|
||||
def isValidExtra(e: Option[String]): Boolean = e.map(isValidExtra).getOrElse(true)
|
||||
def isValidExtra(s: String): Boolean = !(s.trim.isEmpty || s.exists(java.lang.Character.isISOControl))
|
||||
}
|
||||
|
|
@ -0,0 +1,150 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2008, 2009, 2010 Mark Harrah
|
||||
*/
|
||||
package sbt
|
||||
|
||||
import java.io.File
|
||||
import java.net.{URL, URLClassLoader}
|
||||
import scala.xml.NodeSeq
|
||||
|
||||
object JettyRunner
|
||||
{
|
||||
val DefaultPort = 8080
|
||||
val DefaultScanInterval = 3
|
||||
}
|
||||
class JettyRunner(configuration: JettyConfiguration) extends ExitHook
|
||||
{
|
||||
ExitHooks.register(this)
|
||||
|
||||
def name = "jetty-shutdown"
|
||||
def runBeforeExiting() { stop() }
|
||||
private var running: Option[Stoppable] = None
|
||||
private def started(s: Stoppable) { running = Some(s) }
|
||||
def stop()
|
||||
{
|
||||
running.foreach(_.stop())
|
||||
running = None
|
||||
}
|
||||
def reload() = running.foreach(_.reload())
|
||||
def apply(): Option[String] =
|
||||
{
|
||||
import configuration._
|
||||
def runJetty() =
|
||||
{
|
||||
val baseLoader = this.getClass.getClassLoader
|
||||
val classpathURLs = jettyClasspath.get.map(_.asURL).toSeq
|
||||
val jettyParentLoader = configuration match { case d: DefaultJettyConfiguration => d.parentLoader; case _ => ClassLoader.getSystemClassLoader }
|
||||
val jettyLoader: ClassLoader = new java.net.URLClassLoader(classpathURLs.toArray, jettyParentLoader)
|
||||
|
||||
val jettyFilter = (name: String) => name.startsWith("org.mortbay.") || name.startsWith("org.eclipse.jetty.")
|
||||
val notJettyFilter = (name: String) => !jettyFilter(name)
|
||||
|
||||
val dual = new xsbt.DualLoader(baseLoader, notJettyFilter, x => true, jettyLoader, jettyFilter, x => false)
|
||||
|
||||
def createRunner(implClassName: String) =
|
||||
{
|
||||
val lazyLoader = new LazyFrameworkLoader(implClassName, Array(FileUtilities.classLocation[Stoppable].toURI.toURL), dual, baseLoader)
|
||||
ModuleUtilities.getObject(implClassName, lazyLoader).asInstanceOf[JettyRun]
|
||||
}
|
||||
val runner = try { createRunner(implClassName6) } catch { case e: NoClassDefFoundError => createRunner(implClassName7) }
|
||||
runner(configuration, jettyLoader)
|
||||
}
|
||||
|
||||
if(running.isDefined)
|
||||
Some("This instance of Jetty is already running.")
|
||||
else
|
||||
{
|
||||
try
|
||||
{
|
||||
started(runJetty())
|
||||
None
|
||||
}
|
||||
catch
|
||||
{
|
||||
case e: NoClassDefFoundError => runError(e, "Jetty and its dependencies must be on the " + classpathName + " classpath: ", log)
|
||||
case e => runError(e, "Error running Jetty: ", log)
|
||||
}
|
||||
}
|
||||
}
|
||||
private val implClassName6 = "sbt.jetty.LazyJettyRun6"
|
||||
private val implClassName7 = "sbt.jetty.LazyJettyRun7"
|
||||
|
||||
private def runError(e: Throwable, messageBase: String, log: Logger) =
|
||||
{
|
||||
log.trace(e)
|
||||
Some(messageBase + e.toString)
|
||||
}
|
||||
}
|
||||
|
||||
private trait Stoppable
|
||||
{
|
||||
def stop(): Unit
|
||||
def reload(): Unit
|
||||
}
|
||||
private trait JettyRun
|
||||
{
|
||||
def apply(configuration: JettyConfiguration, jettyLoader: ClassLoader): Stoppable
|
||||
}
|
||||
sealed trait JettyConfiguration extends NotNull
|
||||
{
|
||||
/** The classpath to get Jetty from. */
|
||||
def jettyClasspath: PathFinder
|
||||
def classpathName: String
|
||||
def log: Logger
|
||||
}
|
||||
trait DefaultJettyConfiguration extends JettyConfiguration
|
||||
{
|
||||
def war: Path
|
||||
def scanDirectories: Seq[File]
|
||||
def scanInterval: Int
|
||||
|
||||
def contextPath: String
|
||||
def port: Int
|
||||
/** The classpath containing the classes, jars, and resources for the web application. */
|
||||
def classpath: PathFinder
|
||||
def parentLoader: ClassLoader
|
||||
}
|
||||
abstract class CustomJettyConfiguration extends JettyConfiguration
|
||||
{
|
||||
def jettyConfigurationFiles: Seq[File] = Nil
|
||||
def jettyConfigurationXML: NodeSeq = NodeSeq.Empty
|
||||
}
|
||||
|
||||
private class JettyLoggerBase(delegate: Logger)
|
||||
{
|
||||
def getName = "JettyLogger"
|
||||
def isDebugEnabled = delegate.atLevel(Level.Debug)
|
||||
def setDebugEnabled(enabled: Boolean) = delegate.setLevel(if(enabled) Level.Debug else Level.Info)
|
||||
|
||||
def info(msg: String) { delegate.info(msg) }
|
||||
def debug(msg: String) { delegate.warn(msg) }
|
||||
def warn(msg: String) { delegate.warn(msg) }
|
||||
def info(msg: String, arg0: AnyRef, arg1: AnyRef) { delegate.info(format(msg, arg0, arg1)) }
|
||||
def debug(msg: String, arg0: AnyRef, arg1: AnyRef) { delegate.debug(format(msg, arg0, arg1)) }
|
||||
def warn(msg: String, arg0: AnyRef, arg1: AnyRef) { delegate.warn(format(msg, arg0, arg1)) }
|
||||
def warn(msg: String, th: Throwable)
|
||||
{
|
||||
delegate.warn(msg)
|
||||
delegate.trace(th)
|
||||
}
|
||||
def debug(msg: String, th: Throwable)
|
||||
{
|
||||
delegate.debug(msg)
|
||||
delegate.trace(th)
|
||||
}
|
||||
private def format(msg: String, arg0: AnyRef, arg1: AnyRef) =
|
||||
{
|
||||
def toString(arg: AnyRef) = if(arg == null) "" else arg.toString
|
||||
val pieces = msg.split("""\{\}""", 3)
|
||||
if(pieces.length == 1)
|
||||
pieces(0)
|
||||
else
|
||||
{
|
||||
val base = pieces(0) + toString(arg0) + pieces(1)
|
||||
if(pieces.length == 2)
|
||||
base
|
||||
else
|
||||
base + toString(arg1) + pieces(2)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,285 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2009 Mark Harrah
|
||||
*/
|
||||
package sbt
|
||||
|
||||
import java.io.File
|
||||
import scala.xml.{Elem, NodeSeq}
|
||||
import Control._
|
||||
|
||||
/** Defines the configurable parameters for the webstart task. */
|
||||
trait WebstartOptions extends NotNull
|
||||
{
|
||||
/** The main jar to use for webstart.*/
|
||||
def webstartMainJar: Path
|
||||
/** The location to put all generated files for webstart.*/
|
||||
def webstartOutputDirectory: Path
|
||||
/** Generates the .jnlp file using the provided resource descriptions. Each resource description
|
||||
* provides the path of the jar relative to 'webstartOutputDirectory' and whether or not
|
||||
* it is the main jar.*/
|
||||
def jnlpXML(jars: Seq[WebstartJarResource]): Elem
|
||||
/** The location to write the .jnlp file to. It must be in 'webstartOutputDirectory'.*/
|
||||
def jnlpFile: Path
|
||||
/** The location to put all jars that are not the main jar. It must be in 'webstartOutputDirectory'.*/
|
||||
def webstartLibDirectory: Path
|
||||
/** The libraries needed for webstart. Note that only jars are used; directories are discarded.*/
|
||||
def webstartLibraries: PathFinder
|
||||
/** Libraries external to the project needed for webstart. This is mainly for scala libraries.*/
|
||||
def webstartExtraLibraries: PathFinder
|
||||
/** Resources to copy to the webstart output directory.*/
|
||||
def webstartResources: PathFinder
|
||||
/** If defined, this specifies where to create a zip of the webstart output directory. It cannot be
|
||||
* in the output directory.*/
|
||||
def webstartZip: Option[Path]
|
||||
|
||||
/** If defined, configures signing of jars. All jars (main and libraries) are signed using
|
||||
* this configuration.*/
|
||||
def webstartSignConfiguration: Option[SignConfiguration]
|
||||
/** If true, pack200 compression is applied to all jars (main and libraries). A version of each jar
|
||||
* without pack200 compression is still created in the webstart output directory.*/
|
||||
def webstartPack200: Boolean
|
||||
/** If true, gzip compression will be applied to all jars. If pack200 compression is enabled,
|
||||
* gzip compression is also applied to the archives with pack200 compression. A version of
|
||||
* each file without gzip compression is still created in the webstart output directory. */
|
||||
def webstartGzip: Boolean
|
||||
}
|
||||
/** Represents a library included in the webstart distribution. Name is the filename of the jar.
|
||||
* href is the path of the jar relative to the webstart output directory. isMain is true only for
|
||||
* the main jar. */
|
||||
final class WebstartJarResource(val name: String, val href: String, val isMain: Boolean) extends NotNull
|
||||
/** Configuration for signing jars. */
|
||||
final class SignConfiguration(val alias: String, val options: Seq[SignJar.SignOption]) extends NotNull
|
||||
/** A scala project that produces a webstart distribution. */
|
||||
trait WebstartScalaProject extends ScalaProject
|
||||
{
|
||||
import WebstartScalaProject._
|
||||
/** Creates a task that produces a webstart distribution using the given options.*/
|
||||
def webstartTask(options: WebstartOptions) =
|
||||
task
|
||||
{
|
||||
import options._
|
||||
FileUtilities.createDirectories(webstartOutputDirectory :: webstartLibDirectory :: Nil, log) // ignore errors
|
||||
verifyOptions(options)
|
||||
|
||||
def relativize(jar: Path) = Path.relativize(webstartOutputDirectory ##, jar) getOrElse
|
||||
error("Jar (" + jar + ") was not in webstart output directory (" + webstartOutputDirectory + ").")
|
||||
def signAndPack(jars: List[Path], targetDirectory: Path): Either[String, List[Path]] =
|
||||
{
|
||||
lazyFold(jars, Nil: List[Path])
|
||||
{ (allJars, jar) =>
|
||||
val signPackResult =
|
||||
webstartSignConfiguration match
|
||||
{
|
||||
case Some(config) =>
|
||||
if(webstartPack200)
|
||||
signAndPack200(jar, config, targetDirectory, log)
|
||||
else
|
||||
signOnly(jar, config, targetDirectory, log)
|
||||
case None =>
|
||||
if(webstartPack200)
|
||||
pack200Only(jar, targetDirectory, log)
|
||||
else
|
||||
copyJar(jar, targetDirectory, log).right.map(jars => new Jars(jars, Nil))
|
||||
}
|
||||
val deleteOriginal = webstartPack200
|
||||
signPackResult.right flatMap { addJars =>
|
||||
if(webstartGzip)
|
||||
Control.lazyFold(addJars.gzippable, addJars.allJars ::: allJars)
|
||||
{ (accumulate, jar) => gzipJar(jar, deleteOriginal, log).right.map(_ ::: accumulate) }
|
||||
else
|
||||
Right(addJars.allJars ::: allJars)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
import FileUtilities._
|
||||
|
||||
val jars = (webstartLibraries +++ webstartExtraLibraries).get.filter(ClasspathUtilities.isArchive)
|
||||
def process(jars: Iterable[Path]) = for(jar <- jars if jar.asFile.getName.endsWith(".jar")) yield relativize(jar)
|
||||
|
||||
thread(signAndPack(webstartMainJar :: Nil, webstartOutputDirectory)) { mainJars =>
|
||||
thread(signAndPack(jars.toList, webstartLibDirectory)) { libJars =>
|
||||
writeXML(jnlpXML(jarResources(process(mainJars), process(libJars))), jnlpFile, log) orElse
|
||||
thread(copy(webstartResources.get, webstartOutputDirectory, log)) { copiedResources =>
|
||||
val keep = jnlpFile +++ Path.lazyPathFinder(mainJars ++ libJars ++ copiedResources) +++
|
||||
webstartOutputDirectory +++ webstartLibDirectory
|
||||
prune(webstartOutputDirectory, keep.get, log) orElse
|
||||
webstartZip.flatMap( zipPath => zip(List(webstartOutputDirectory ##), zipPath, true, log) )
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
/** Creates default XML elements for a JNLP file for the given resources.*/
|
||||
protected def defaultElements(resources: Seq[WebstartJarResource]): NodeSeq = NodeSeq.fromSeq(resources.map(defaultElement))
|
||||
/** Creates a default XML element for a JNLP file for the given resource.*/
|
||||
protected def defaultElement(resource: WebstartJarResource): Elem =
|
||||
<jar href={resource.href} main={resource.isMain.toString}/>
|
||||
|
||||
}
|
||||
private class Jars(val gzippable: List[Path], val nonGzippable: List[Path]) extends NotNull
|
||||
{
|
||||
def allJars = gzippable ::: nonGzippable
|
||||
}
|
||||
private object WebstartScalaProject
|
||||
{
|
||||
import FileTasks.{runOption, wrapProduct, wrapProducts}
|
||||
/** Changes the extension of the Path of the given jar from ".jar" to newExtension. If append is true,
|
||||
* the new extension is simply appended to the jar's filename. */
|
||||
private def appendExtension(jar: Path, newExtension: String) =
|
||||
jar match
|
||||
{
|
||||
case rp: RelativePath => rp.parentPath / (rp.component + newExtension)
|
||||
case x => x
|
||||
}
|
||||
private def gzipJarPath(jar: Path) = appendExtension(jar, ".gz")
|
||||
private def packPath(jar: Path) = appendExtension(jar, ".pack")
|
||||
private def signOnly(jar: Path, signConfiguration: SignConfiguration, targetDirectory: Path, log: Logger) =
|
||||
{
|
||||
val targetJar = targetDirectory / jar.asFile.getName
|
||||
runOption("sign", targetJar from jar, log) {
|
||||
log.debug("Signing " + jar)
|
||||
signAndVerify(jar, signConfiguration, targetJar, log)
|
||||
}.toLeft(new Jars(targetJar :: Nil, Nil))
|
||||
}
|
||||
private def signAndVerify(jar: Path, signConfiguration: SignConfiguration, targetJar: Path, log: Logger) =
|
||||
{
|
||||
import SignJar._
|
||||
sign(jar, signConfiguration.alias, signedJar(targetJar) :: signConfiguration.options.toList, log) orElse
|
||||
verify(jar, signConfiguration.options, log).map(err => "Signed jar failed verification: " + err)
|
||||
}
|
||||
private def gzipJar(jar: Path, deleteOriginal: Boolean, log: Logger) =
|
||||
{
|
||||
val gzipJar = gzipJarPath(jar)
|
||||
runOption("gzip", gzipJar from jar, log)
|
||||
{
|
||||
log.debug("Gzipping " + jar)
|
||||
FileUtilities.gzip(jar, gzipJar, log) orElse
|
||||
(if(deleteOriginal) FileUtilities.clean(jar :: Nil, true, log) else None)
|
||||
}.toLeft(gzipJar :: Nil)
|
||||
}
|
||||
/** Properly performs both signing and pack200 compression and verifies the result. This method only does anything if
|
||||
* its outputs are out of date with respect to 'jar'. Note that it does not determine if the signing configuration has changed.
|
||||
* See java.util.jar.Pack200 for more information.*/
|
||||
private def signAndPack200(jar: Path, signConfiguration: SignConfiguration, targetDirectory: Path, log: Logger) =
|
||||
{
|
||||
val signedJar = targetDirectory / jar.asFile.getName
|
||||
val packedJar = packPath(signedJar)
|
||||
import signConfiguration._
|
||||
|
||||
runOption("sign and pack200", List(packedJar, signedJar) from jar, log) {
|
||||
log.debug("Applying pack200 compression and signing " + jar)
|
||||
signAndPack(jar, signedJar, packedJar, alias, options, log) orElse
|
||||
signAndVerify(jar, signConfiguration, signedJar, log)
|
||||
}.toLeft(new Jars(packedJar :: Nil, signedJar :: Nil))
|
||||
}
|
||||
/** Properly performs both signing and pack200 compression and verifies the result. See java.util.jar.Pack200 for more information.*/
|
||||
private def signAndPack(jarPath: Path, signedPath: Path, out: Path, alias: String, options: Seq[SignJar.SignOption], log: Logger): Option[String] =
|
||||
{
|
||||
import Pack._
|
||||
import SignJar._
|
||||
pack(jarPath, out, log) orElse
|
||||
unpack(out, signedPath, log) orElse
|
||||
sign(signedPath, alias, options, log) orElse
|
||||
pack(signedPath, out, log) orElse
|
||||
unpack(out, signedPath, log) orElse
|
||||
verify(signedPath, options, log)
|
||||
}
|
||||
private def pack200Only(jar: Path, targetDirectory: Path, log: Logger) =
|
||||
{
|
||||
val targetJar = targetDirectory / jar.asFile.getName
|
||||
val packedJar = packPath(targetJar)
|
||||
val packResult =
|
||||
runOption("pack200", packedJar from jar, log)
|
||||
{
|
||||
log.debug("Applying pack200 compression to " + jar)
|
||||
Pack.pack(jar, packedJar, log)
|
||||
}
|
||||
packResult match
|
||||
{
|
||||
case Some(err) => Left(err)
|
||||
case None => copyJar(jar, targetDirectory, log).right.map(jars => new Jars(packedJar :: Nil, jars))
|
||||
}
|
||||
}
|
||||
private def copyJar(jar: Path, targetDirectory: Path, log: Logger) =
|
||||
{
|
||||
val targetJar = targetDirectory / jar.asFile.getName
|
||||
runOption("copy jar", targetJar from jar, log)( FileUtilities.copyFile(jar, targetJar, log) ).toLeft(targetJar :: Nil)
|
||||
}
|
||||
/** Writes the XML string 'xmlString' to the file 'outputPath'.*/
|
||||
private def writeXML(xmlString: String, outputPath: Path, log: Logger): Option[String] =
|
||||
FileUtilities.write(outputPath.asFile, xmlString, log)
|
||||
/** Writes the XML string 'xmlString' to the file 'outputPath' if the hashes are different.*/
|
||||
private def writeXML(xml: Elem, outputPath: Path, log: Logger): Option[String] =
|
||||
{
|
||||
val xmlString =
|
||||
{
|
||||
import scala.xml.Utility
|
||||
object WithToXML {
|
||||
def toXML(xml: Elem, stripComments: Boolean) = Utility.toXML(xml).toString // this will only be called for 2.8, which defaults to stripComments= false, unlike 2.7
|
||||
}
|
||||
implicit def another28Hack(any: AnyRef) = WithToXML
|
||||
scala.xml.Utility.toXML(xml, false) // 2.8 doesn't have this method anymore, so the above implicit will kick in for 2.8 only
|
||||
}
|
||||
if(!outputPath.exists)
|
||||
{
|
||||
log.debug("JNLP file did not exist, writing inline XML to " + outputPath)
|
||||
writeXML(xmlString, outputPath, log)
|
||||
}
|
||||
else
|
||||
{
|
||||
val result =
|
||||
for( xmlHash <- Hash(xmlString, log).right; fileHash <- Hash(outputPath, log).right ) yield
|
||||
{
|
||||
if(xmlHash deepEquals fileHash)
|
||||
{
|
||||
log.debug("JNLP file " + outputPath + " uptodate.")
|
||||
None
|
||||
}
|
||||
else
|
||||
{
|
||||
log.debug("Inline JNLP XML modified, updating file " + outputPath + ".")
|
||||
writeXML(xmlString, outputPath, log)
|
||||
}
|
||||
}
|
||||
result.fold(err => Some(err), x => x)
|
||||
}
|
||||
}
|
||||
private def jarResource(isMain: Boolean)(jar: Path): WebstartJarResource =
|
||||
new WebstartJarResource(jar.asFile.getName, jar.relativePathString("/"), isMain)
|
||||
private def jarResources(mainJars: Iterable[Path], libraries: Iterable[Path]): Seq[WebstartJarResource] =
|
||||
mainJars.map(jarResource(true)).toList ::: libraries.map(jarResource(false)).toList
|
||||
|
||||
/** True iff 'directory' is an ancestor (strictly) of 'check'.*/
|
||||
private def isInDirectory(directory: Path, check: Path) = Path.relativize(directory, check).isDefined && directory != check
|
||||
/** Checks the paths in the given options for validity. See the documentation for WebstartOptions.*/
|
||||
private def verifyOptions(options: WebstartOptions)
|
||||
{
|
||||
import options._
|
||||
require(isInDirectory(webstartOutputDirectory, webstartLibDirectory),
|
||||
"Webstart dependency directory (" + webstartLibDirectory + ") must be a subdirectory of webstart output directory (" +
|
||||
webstartOutputDirectory + ").")
|
||||
require(isInDirectory(webstartOutputDirectory, jnlpFile), "Webstart JNLP file output location (" + jnlpFile +
|
||||
") must be in the webstart output directory (" + webstartOutputDirectory + ").")
|
||||
for(wz <- webstartZip)
|
||||
require(!isInDirectory(webstartOutputDirectory, wz),
|
||||
"Webstart output zip location (" + wz + " cannot be in webstart output directory (" + webstartOutputDirectory + ").")
|
||||
}
|
||||
}
|
||||
/** The default extension point for a webstart project. There is one method that is required to be defined: jnlpXML.
|
||||
* 'webstartSignConfiguration', 'webstartPack200', and 'webstartGzip' are methods of interest. */
|
||||
abstract class DefaultWebstartProject(val info: ProjectInfo) extends BasicWebstartProject with MavenStyleWebstartPaths
|
||||
/** Defines default implementations of all methods in WebstartOptions except for jnlpXML. packageAction is overridden
|
||||
* to create a webstart distribution after the normal package operation. */
|
||||
abstract class BasicWebstartProject extends BasicScalaProject with WebstartScalaProject with WebstartOptions with WebstartPaths
|
||||
{
|
||||
def webstartSignConfiguration: Option[SignConfiguration] = None
|
||||
|
||||
def webstartExtraLibraries = mainDependencies.scalaJars
|
||||
def webstartLibraries = publicClasspath +++ jarsOfProjectDependencies
|
||||
def webstartResources = descendents(jnlpResourcesPath ##, AllPassFilter)
|
||||
|
||||
def webstartPack200 = true
|
||||
def webstartGzip = true
|
||||
|
||||
override def packageAction = super.packageAction && webstartTask(this)
|
||||
}
|
||||
|
|
@ -0,0 +1,107 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2009 Mark Harrah
|
||||
*/
|
||||
package sbt.classfile
|
||||
import sbt._
|
||||
|
||||
import scala.collection.mutable
|
||||
import mutable.{ArrayBuffer, Buffer}
|
||||
import java.io.File
|
||||
|
||||
private[sbt] object Analyze
|
||||
{
|
||||
def apply[T](basePath: Path, outputDirectory: Path, sources: Iterable[Path], roots: Iterable[Path], log: Logger)
|
||||
(allProducts: => scala.collection.Set[Path], analysis: AnalysisCallback, loader: ClassLoader)
|
||||
(compile: => Option[String]): Option[String] =
|
||||
{
|
||||
val sourceSet = Set(sources.toSeq : _*)
|
||||
val classesFinder = outputDirectory ** GlobFilter("*.class")
|
||||
val existingClasses = classesFinder.get
|
||||
|
||||
// runs after compilation
|
||||
def analyze()
|
||||
{
|
||||
val allClasses = Set(classesFinder.get.toSeq : _*)
|
||||
val newClasses = allClasses -- existingClasses -- allProducts
|
||||
|
||||
val productToSource = new mutable.HashMap[Path, Path]
|
||||
val sourceToClassFiles = new mutable.HashMap[Path, Buffer[ClassFile]]
|
||||
|
||||
// parse class files and assign classes to sources. This must be done before dependencies, since the information comes
|
||||
// as class->class dependencies that must be mapped back to source->class dependencies using the source+class assignment
|
||||
for(newClass <- newClasses;
|
||||
path <- Path.relativize(outputDirectory, newClass);
|
||||
classFile = Parser(newClass.asFile, log);
|
||||
sourceFile <- classFile.sourceFile;
|
||||
source <- guessSourcePath(sourceSet, roots, classFile, log))
|
||||
{
|
||||
analysis.beginSource(source)
|
||||
analysis.generatedClass(source, path)
|
||||
productToSource(path) = source
|
||||
sourceToClassFiles.getOrElseUpdate(source, new ArrayBuffer[ClassFile]) += classFile
|
||||
}
|
||||
|
||||
// get class to class dependencies and map back to source to class dependencies
|
||||
for( (source, classFiles) <- sourceToClassFiles )
|
||||
{
|
||||
for(classFile <- classFiles if isTopLevel(classFile);
|
||||
method <- classFile.methods; if method.isMain)
|
||||
analysis.foundApplication(source, classFile.className)
|
||||
def processDependency(tpe: String)
|
||||
{
|
||||
Control.trapAndLog(log)
|
||||
{
|
||||
val loaded =
|
||||
try { Some(Class.forName(tpe, false, loader)) }
|
||||
catch { case e => log.warn("Problem processing dependencies of source " + source + " : " +e.toString); None }
|
||||
for(clazz <- loaded; file <- Control.convertException(FileUtilities.classLocationFile(clazz)).right)
|
||||
{
|
||||
if(file.isDirectory)
|
||||
{
|
||||
val resolved = resolveClassFile(file, tpe)
|
||||
assume(resolved.exists, "Resolved class file " + resolved + " from " + source + " did not exist")
|
||||
val resolvedPath = Path.fromFile(resolved)
|
||||
if(Path.fromFile(file) == outputDirectory)
|
||||
{
|
||||
productToSource.get(resolvedPath) match
|
||||
{
|
||||
case Some(dependsOn) => analysis.sourceDependency(dependsOn, source)
|
||||
case None => analysis.productDependency(resolvedPath, source)
|
||||
}
|
||||
}
|
||||
else
|
||||
analysis.classDependency(resolved, source)
|
||||
}
|
||||
else
|
||||
analysis.jarDependency(file, source)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
classFiles.flatMap(_.types).foreach(processDependency)
|
||||
analysis.endSource(source)
|
||||
}
|
||||
}
|
||||
|
||||
compile orElse Control.convertErrorMessage(log)(analyze()).left.toOption
|
||||
}
|
||||
private def resolveClassFile(file: File, className: String): File = (file /: (className.replace('.','/') + ".class").split("/"))(new File(_, _))
|
||||
private def guessSourcePath(sources: scala.collection.Set[Path], roots: Iterable[Path], classFile: ClassFile, log: Logger) =
|
||||
{
|
||||
val classNameParts = classFile.className.split("""\.""")
|
||||
val lastIndex = classNameParts.length - 1
|
||||
val pkg = classNameParts.take(lastIndex)
|
||||
val simpleClassName = classNameParts(lastIndex)
|
||||
val sourceFileName = classFile.sourceFile.getOrElse(simpleClassName.takeWhile(_ != '$').mkString("", "", ".java"))
|
||||
val relativeSourceFile = (pkg ++ (sourceFileName :: Nil)).mkString("/")
|
||||
val candidates = roots.map(root => Path.fromString(root, relativeSourceFile)).filter(sources.contains).toList
|
||||
candidates match
|
||||
{
|
||||
case Nil => log.warn("Could not determine source for class " + classFile.className)
|
||||
case head :: Nil => ()
|
||||
case _ =>log.warn("Multiple sources matched for class " + classFile.className + ": " + candidates.mkString(", "))
|
||||
}
|
||||
candidates
|
||||
}
|
||||
private def isTopLevel(classFile: ClassFile) = classFile.className.indexOf('$') < 0
|
||||
}
|
||||
|
|
@ -0,0 +1,65 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2009 Mark Harrah
|
||||
*/
|
||||
package sbt.classfile
|
||||
|
||||
import Constants._
|
||||
import java.io.File
|
||||
|
||||
private[sbt] trait ClassFile
|
||||
{
|
||||
val majorVersion: Int
|
||||
val minorVersion: Int
|
||||
val fileName: String
|
||||
val className: String
|
||||
val superClassName: String
|
||||
val interfaceNames: Array[String]
|
||||
val accessFlags: Int
|
||||
val constantPool: Array[Constant]
|
||||
val fields: Array[FieldOrMethodInfo]
|
||||
val methods: Array[FieldOrMethodInfo]
|
||||
val attributes: Array[AttributeInfo]
|
||||
val sourceFile: Option[String]
|
||||
def types: Set[String]
|
||||
def stringValue(a: AttributeInfo): String
|
||||
}
|
||||
|
||||
private[sbt] final case class Constant(tag: Byte, nameIndex: Int, typeIndex: Int, value: Option[AnyRef]) extends NotNull
|
||||
{
|
||||
def this(tag: Byte, nameIndex: Int, typeIndex: Int) = this(tag, nameIndex, typeIndex, None)
|
||||
def this(tag: Byte, nameIndex: Int) = this(tag, nameIndex, -1)
|
||||
def this(tag: Byte, value: AnyRef) = this(tag, -1, -1, Some(value))
|
||||
def wide = tag == ConstantLong || tag == ConstantDouble
|
||||
}
|
||||
private[sbt] final case class FieldOrMethodInfo(accessFlags: Int, name: Option[String], descriptor: Option[String], attributes: RandomAccessSeq[AttributeInfo]) extends NotNull
|
||||
{
|
||||
def isStatic = (accessFlags&ACC_STATIC)== ACC_STATIC
|
||||
def isPublic = (accessFlags&ACC_PUBLIC)==ACC_PUBLIC
|
||||
def isMain = isPublic && isStatic && descriptor.filter(_ == "([Ljava/lang/String;)V").isDefined
|
||||
}
|
||||
private[sbt] final case class AttributeInfo(name: Option[String], value: Array[Byte]) extends NotNull
|
||||
{
|
||||
def isNamed(s: String) = name.filter(s == _).isDefined
|
||||
def isSignature = isNamed("Signature")
|
||||
def isSourceFile = isNamed("SourceFile")
|
||||
}
|
||||
private[sbt] object Constants
|
||||
{
|
||||
final val ACC_STATIC = 0x0008
|
||||
final val ACC_PUBLIC = 0x0001
|
||||
|
||||
final val JavaMagic = 0xCAFEBABE
|
||||
final val ConstantUTF8 = 1
|
||||
final val ConstantUnicode = 2
|
||||
final val ConstantInteger = 3
|
||||
final val ConstantFloat = 4
|
||||
final val ConstantLong = 5
|
||||
final val ConstantDouble = 6
|
||||
final val ConstantClass = 7
|
||||
final val ConstantString = 8
|
||||
final val ConstantField = 9
|
||||
final val ConstantMethod = 10
|
||||
final val ConstantInterfaceMethod = 11
|
||||
final val ConstantNameAndType = 12
|
||||
final val ClassDescriptor = 'L'
|
||||
}
|
||||
|
|
@ -0,0 +1,177 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2009 Mark Harrah
|
||||
*/
|
||||
package sbt.classfile
|
||||
import sbt._
|
||||
|
||||
import java.io.{DataInputStream, File, InputStream}
|
||||
|
||||
// Translation of jdepend.framework.ClassFileParser by Mike Clark, Clarkware Consulting, Inc.
|
||||
// BSD Licensed
|
||||
//
|
||||
// Note that unlike the rest of sbt, some things might be null.
|
||||
|
||||
import Constants._
|
||||
|
||||
private[sbt] object Parser
|
||||
{
|
||||
def apply(file: File, log: Logger): ClassFile = FileUtilities.readStreamValue(file, log)(parse(file.getCanonicalPath, log)).right.get
|
||||
private def parse(fileName: String, log: Logger)(is: InputStream): Either[String, ClassFile] = Right(parseImpl(fileName, is, log))
|
||||
private def parseImpl(filename: String, is: InputStream, log: Logger): ClassFile =
|
||||
{
|
||||
val in = new DataInputStream(is)
|
||||
new ClassFile
|
||||
{
|
||||
assume(in.readInt() == JavaMagic, "Invalid class file: " + fileName)
|
||||
|
||||
val fileName = filename
|
||||
val minorVersion: Int = in.readUnsignedShort()
|
||||
val majorVersion: Int = in.readUnsignedShort()
|
||||
|
||||
val constantPool = parseConstantPool(in)
|
||||
val accessFlags: Int = in.readUnsignedShort()
|
||||
|
||||
val className = getClassConstantName(in.readUnsignedShort())
|
||||
val superClassName = getClassConstantName(in.readUnsignedShort())
|
||||
val interfaceNames = array(in.readUnsignedShort())(getClassConstantName(in.readUnsignedShort()))
|
||||
|
||||
val fields = readFieldsOrMethods()
|
||||
val methods = readFieldsOrMethods()
|
||||
|
||||
val attributes = array(in.readUnsignedShort())(parseAttribute())
|
||||
|
||||
lazy val sourceFile =
|
||||
for(sourceFileAttribute <- attributes.find(_.isSourceFile)) yield
|
||||
toUTF8(entryIndex(sourceFileAttribute))
|
||||
|
||||
def stringValue(a: AttributeInfo) = toUTF8(entryIndex(a))
|
||||
|
||||
private def readFieldsOrMethods() = array(in.readUnsignedShort())(parseFieldOrMethodInfo())
|
||||
private def toUTF8(entryIndex: Int) =
|
||||
{
|
||||
val entry = constantPool(entryIndex)
|
||||
assume(entry.tag == ConstantUTF8, "Constant pool entry is not a UTF8 type: " + entryIndex)
|
||||
entry.value.get.asInstanceOf[String]
|
||||
}
|
||||
private def getClassConstantName(entryIndex: Int) =
|
||||
{
|
||||
val entry = constantPool(entryIndex)
|
||||
if(entry == null) ""
|
||||
else slashesToDots(toUTF8(entry.nameIndex))
|
||||
}
|
||||
private def toString(index: Int) =
|
||||
{
|
||||
if(index <= 0) None
|
||||
else Some(toUTF8(index))
|
||||
}
|
||||
private def parseFieldOrMethodInfo() =
|
||||
new FieldOrMethodInfo(in.readUnsignedShort(), toString(in.readUnsignedShort()), toString(in.readUnsignedShort()),
|
||||
array(in.readUnsignedShort())(parseAttribute()) )
|
||||
private def parseAttribute() =
|
||||
{
|
||||
val nameIndex = in.readUnsignedShort()
|
||||
val name = if(nameIndex == -1) None else Some(toUTF8(nameIndex))
|
||||
val value = array(in.readInt())(in.readByte())
|
||||
new AttributeInfo(name, value)
|
||||
}
|
||||
|
||||
def types = Set((fieldTypes ++ methodTypes ++ classConstantReferences) : _*)
|
||||
|
||||
private def getTypes(fieldsOrMethods: Array[FieldOrMethodInfo]) =
|
||||
fieldsOrMethods.flatMap { fieldOrMethod =>
|
||||
descriptorToTypes(fieldOrMethod.descriptor)
|
||||
}
|
||||
|
||||
private def fieldTypes = getTypes(fields)
|
||||
private def methodTypes = getTypes(methods)
|
||||
|
||||
private def classConstantReferences =
|
||||
constants.flatMap { constant =>
|
||||
constant.tag match
|
||||
{
|
||||
case ConstantClass =>
|
||||
val name = toUTF8(constant.nameIndex)
|
||||
if(name.startsWith("["))
|
||||
descriptorToTypes(Some(name))
|
||||
else
|
||||
slashesToDots(name) :: Nil
|
||||
case _ => Nil
|
||||
}
|
||||
}
|
||||
private def constants =
|
||||
{
|
||||
def next(i: Int, list: List[Constant]): List[Constant] =
|
||||
{
|
||||
if(i < constantPool.length)
|
||||
{
|
||||
val constant = constantPool(i)
|
||||
next(if(constant.wide) i+2 else i+1, constant :: list)
|
||||
}
|
||||
else
|
||||
list
|
||||
}
|
||||
next(1, Nil)
|
||||
}
|
||||
}
|
||||
}
|
||||
private def array[T](size: Int)(f: => T)(implicit mf: scala.reflect.Manifest[T]) = Array.fromFunction(i => f)(size)
|
||||
private def parseConstantPool(in: DataInputStream) =
|
||||
{
|
||||
val constantPoolSize = in.readUnsignedShort()
|
||||
val pool = new Array[Constant](constantPoolSize)
|
||||
|
||||
def parse(i: Int): Unit =
|
||||
if(i < constantPoolSize)
|
||||
{
|
||||
val constant = getConstant(in)
|
||||
pool(i) = constant
|
||||
parse( if(constant.wide) i+2 else i+1 )
|
||||
}
|
||||
|
||||
parse(1)
|
||||
pool
|
||||
}
|
||||
|
||||
private def getConstant(in: DataInputStream) =
|
||||
{
|
||||
val tag = in.readByte()
|
||||
tag match
|
||||
{
|
||||
case ConstantClass | ConstantString => new Constant(tag, in.readUnsignedShort())
|
||||
case ConstantField | ConstantMethod | ConstantInterfaceMethod | ConstantNameAndType =>
|
||||
new Constant(tag, in.readUnsignedShort(), in.readUnsignedShort())
|
||||
case ConstantInteger => new Constant(tag, new java.lang.Integer(in.readInt()))
|
||||
case ConstantFloat => new Constant(tag, new java.lang.Float(in.readFloat()))
|
||||
case ConstantLong => new Constant(tag, new java.lang.Long(in.readLong()))
|
||||
case ConstantDouble => new Constant(tag, new java.lang.Double(in.readDouble()))
|
||||
case ConstantUTF8 => new Constant(tag, in.readUTF())
|
||||
case _ => error("Unknown constant: " + tag)
|
||||
}
|
||||
}
|
||||
|
||||
private def toInt(v: Byte) = if(v < 0) v + 256 else v.toInt
|
||||
private def entryIndex(a: AttributeInfo) =
|
||||
{
|
||||
val Array(v0, v1) = a.value
|
||||
toInt(v0) * 256 + toInt(v1)
|
||||
}
|
||||
|
||||
private def slashesToDots(s: String) = s.replace('/', '.')
|
||||
|
||||
private def descriptorToTypes(descriptor: Option[String]) =
|
||||
{
|
||||
def toTypes(descriptor: String, types: List[String]): List[String] =
|
||||
{
|
||||
val startIndex = descriptor.indexOf(ClassDescriptor)
|
||||
if(startIndex < 0)
|
||||
types
|
||||
else
|
||||
{
|
||||
val endIndex = descriptor.indexOf(';', startIndex+1)
|
||||
val tpe = slashesToDots(descriptor.substring(startIndex + 1, endIndex))
|
||||
toTypes(descriptor.substring(endIndex), tpe :: types)
|
||||
}
|
||||
}
|
||||
toTypes(descriptor.getOrElse(""), Nil)
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,61 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2009 Mark Harrah
|
||||
*/
|
||||
package sbt.impl
|
||||
|
||||
import scala.util.parsing.combinator.Parsers
|
||||
import scala.util.parsing.input.CharSequenceReader
|
||||
import scala.util.parsing.input.CharArrayReader.EofCh
|
||||
|
||||
/** Parses a command of the form:
|
||||
* identifier argument*
|
||||
* where argument may be quoted to include spaces and
|
||||
* quotes and backslashes should be escaped.
|
||||
*/
|
||||
object Arguments
|
||||
{
|
||||
def apply(commandString: String): Either[String, (String, List[String])] =
|
||||
CommandParser.parse(commandString)
|
||||
}
|
||||
|
||||
/* Most of the complexity is for error handling.*/
|
||||
private[sbt] object CommandParser extends Parsers
|
||||
{
|
||||
type Elem = Char
|
||||
def parse(commandString: String): Either[String, (String, List[String])] =
|
||||
{
|
||||
command(new CharSequenceReader(commandString.trim, 0)) match
|
||||
{
|
||||
case Success(id ~ args, next) => Right((id, args))
|
||||
case err: NoSuccess =>
|
||||
{
|
||||
val pos = err.next.pos
|
||||
Left("Could not parse command: (" + pos.line + "," + pos.column + "): " + err.msg)
|
||||
}
|
||||
}
|
||||
}
|
||||
def command = phrase(identifier ~! (argument*))
|
||||
def identifier = unquoted | err("Expected identifier")
|
||||
def argument = ( (whitespaceChar+) ~> (unquoted | quoted) )
|
||||
|
||||
def unquoted: Parser[String] = ((unquotedChar ~! (unquotedMainChar*)) ^^ { case a ~ tail => (a :: tail).mkString("") })
|
||||
def quoted: Parser[String] = quote ~> quotedChars <~ (quote | err("Missing closing quote character"))
|
||||
|
||||
def quotedChars: Parser[String] = (escape | nonescapeChar)*
|
||||
def escape: Parser[Char] = backslash ~> (escapeChar | err("Illegal escape"))
|
||||
def escapeChar: Parser[Char] = quote | backslash
|
||||
def nonescapeChar: Parser[Char] = elem("", ch => !isEscapeChar(ch) && ch != EofCh)
|
||||
def unquotedChar: Parser[Char] = elem("", ch => !isEscapeChar(ch) && !Character.isWhitespace(ch) && ch != EofCh)
|
||||
def unquotedMainChar: Parser[Char] = unquotedChar | (errorIfEscape ~> failure(""))
|
||||
|
||||
private def errorIfEscape = (not(quote) | err("Unexpected quote character")) ~>
|
||||
(not(backslash) | err("Escape sequences can only occur in a quoted argument"))
|
||||
|
||||
private def isEscapeChar(ch: Char) = ch == '\\' || ch == '"'
|
||||
|
||||
def quote: Parser[Char] = '"'
|
||||
def backslash: Parser[Char] = '\\'
|
||||
def whitespaceChar: Parser[Char] = elem("whitespace", ch => Character.isWhitespace(ch))
|
||||
|
||||
private implicit def toString(p: Parser[List[Char]]): Parser[String] = p ^^ {_ mkString "" }
|
||||
}
|
||||
|
|
@ -0,0 +1,60 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2008, 2009 Mark Harrah
|
||||
*/
|
||||
|
||||
package sbt.impl
|
||||
import sbt._
|
||||
|
||||
import java.util.Properties
|
||||
import java.io.{File, FileInputStream, FileOutputStream, InputStream, OutputStream}
|
||||
import scala.collection.mutable.{HashMap, HashSet, ListBuffer, Map, Set}
|
||||
|
||||
private[sbt] object PropertiesUtilities
|
||||
{
|
||||
def write(properties: Properties, label: String, to: Path, log: Logger) =
|
||||
FileUtilities.writeStream(to.asFile, log)(output => { properties.store(output, label); None })
|
||||
def load(properties: Properties, from: Path, log: Logger): Option[String] =
|
||||
{
|
||||
val file = from.asFile
|
||||
if(file.exists)
|
||||
FileUtilities.readStream(file, log)( input => { properties.load(input); None })
|
||||
else
|
||||
None
|
||||
}
|
||||
def propertyNames(properties: Properties): Iterable[String] =
|
||||
wrap.Wrappers.toList(properties.propertyNames).map(_.toString)
|
||||
}
|
||||
|
||||
private[sbt] object MapUtilities
|
||||
{
|
||||
def write[Key, Value](map: Map[Key, Value], label: String, to: Path, log: Logger)(implicit keyFormat: Format[Key], valueFormat: Format[Value]): Option[String] =
|
||||
{
|
||||
val properties = new Properties
|
||||
map foreach { pair => properties.setProperty(keyFormat.toString(pair._1), valueFormat.toString(pair._2)) }
|
||||
PropertiesUtilities.write(properties, label, to, log)
|
||||
}
|
||||
def read[Key, Value](map: Map[Key, Value], from: Path, log: Logger)(implicit keyFormat: Format[Key], valueFormat: Format[Value]): Option[String] =
|
||||
{
|
||||
map.clear
|
||||
val properties = new Properties
|
||||
PropertiesUtilities.load(properties, from, log) orElse
|
||||
{
|
||||
for(name <- PropertiesUtilities.propertyNames(properties))
|
||||
map.put( keyFormat.fromString(name), valueFormat.fromString(properties.getProperty(name)))
|
||||
None
|
||||
}
|
||||
}
|
||||
def all[Key, Value](map: Map[Key, Set[Value]]): Iterable[Value] =
|
||||
map.values.toList.flatMap(set => set.toList)
|
||||
|
||||
def readOnlyIterable[Key, Value](i: Map[Key, Set[Value]]): Iterable[(Key, scala.collection.Set[Value])] =
|
||||
for( (key, set) <- i.elements.toList) yield (key, wrap.Wrappers.readOnly(set))//.readOnly)
|
||||
|
||||
def mark[Key, Value](source: Key, map: Map[Key, Set[Value]])
|
||||
{
|
||||
if(!map.contains(source))
|
||||
map.put(source, new HashSet[Value])
|
||||
}
|
||||
def add[Key, Value](key: Key, value: Value, map: Map[Key, Set[Value]]): Unit =
|
||||
map.getOrElseUpdate(key, new HashSet[Value]) + value
|
||||
}
|
||||
|
|
@ -0,0 +1,415 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2009 Mark Harrah, Vesa Vilhonen
|
||||
*/
|
||||
package sbt
|
||||
|
||||
import java.lang.{Process => JProcess, ProcessBuilder => JProcessBuilder}
|
||||
import java.io.{BufferedReader, Closeable, InputStream, InputStreamReader, IOException, OutputStream, PrintStream}
|
||||
import java.io.{FilterInputStream, FilterOutputStream, PipedInputStream, PipedOutputStream}
|
||||
import java.io.{File, FileInputStream, FileOutputStream}
|
||||
import java.net.URL
|
||||
|
||||
import scala.concurrent.SyncVar
|
||||
|
||||
/** Runs provided code in a new Thread and returns the Thread instance. */
|
||||
private object Spawn
|
||||
{
|
||||
def apply(f: => Unit): Thread = apply(f, false)
|
||||
def apply(f: => Unit, daemon: Boolean): Thread =
|
||||
{
|
||||
val thread = new Thread() { override def run() = { f } }
|
||||
thread.setDaemon(daemon)
|
||||
thread.start()
|
||||
thread
|
||||
}
|
||||
}
|
||||
private object Future
|
||||
{
|
||||
def apply[T](f: => T): () => T =
|
||||
{
|
||||
val result = new SyncVar[Either[Throwable, T]]
|
||||
def run: Unit =
|
||||
try { result.set(Right(f)) }
|
||||
catch { case e: Exception => result.set(Left(e)) }
|
||||
Spawn(run)
|
||||
() =>
|
||||
result.get match
|
||||
{
|
||||
case Right(value) => value
|
||||
case Left(exception) => throw exception
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private object BasicIO
|
||||
{
|
||||
def apply(log: Logger, withIn: Boolean) = new ProcessIO(input(withIn), processFully(log, Level.Info), processFully(log, Level.Error))
|
||||
|
||||
def ignoreOut = (i: OutputStream) => ()
|
||||
val BufferSize = 8192
|
||||
def close(c: java.io.Closeable) = try { c.close() } catch { case _: java.io.IOException => () }
|
||||
def processFully(log: Logger, level: Level.Value)(i: InputStream) { processFully(line => log.log(level, line))(i) }
|
||||
def processFully(processLine: String => Unit)(i: InputStream)
|
||||
{
|
||||
val reader = new BufferedReader(new InputStreamReader(i))
|
||||
processLinesFully(processLine)(reader.readLine)
|
||||
}
|
||||
def processLinesFully(processLine: String => Unit)(readLine: () => String)
|
||||
{
|
||||
def readFully()
|
||||
{
|
||||
val line = readLine()
|
||||
if(line != null)
|
||||
{
|
||||
processLine(line)
|
||||
readFully()
|
||||
}
|
||||
}
|
||||
readFully()
|
||||
}
|
||||
def connectToIn(o: OutputStream) { transferFully(System.in, o) }
|
||||
def input(connect: Boolean): OutputStream => Unit = if(connect) connectToIn else ignoreOut
|
||||
def standard(connectInput: Boolean): ProcessIO = standard(input(connectInput))
|
||||
def standard(in: OutputStream => Unit): ProcessIO = new ProcessIO(in, transferFully(_, System.out), transferFully(_, System.err))
|
||||
|
||||
def transferFully(in: InputStream, out: OutputStream): Unit =
|
||||
try { transferFullyImpl(in, out) }
|
||||
catch { case _: InterruptedException => () }
|
||||
|
||||
private[this] def transferFullyImpl(in: InputStream, out: OutputStream)
|
||||
{
|
||||
val continueCount = 1//if(in.isInstanceOf[PipedInputStream]) 1 else 0
|
||||
val buffer = new Array[Byte](BufferSize)
|
||||
def read
|
||||
{
|
||||
val byteCount = in.read(buffer)
|
||||
if(byteCount >= continueCount)
|
||||
{
|
||||
out.write(buffer, 0, byteCount)
|
||||
out.flush()
|
||||
read
|
||||
}
|
||||
}
|
||||
read
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private abstract class AbstractProcessBuilder extends ProcessBuilder with SinkPartialBuilder with SourcePartialBuilder
|
||||
{
|
||||
def #&&(other: ProcessBuilder): ProcessBuilder = new AndProcessBuilder(this, other)
|
||||
def #||(other: ProcessBuilder): ProcessBuilder = new OrProcessBuilder(this, other)
|
||||
def #|(other: ProcessBuilder): ProcessBuilder =
|
||||
{
|
||||
require(other.canPipeTo, "Piping to multiple processes is not supported.")
|
||||
new PipedProcessBuilder(this, other, false)
|
||||
}
|
||||
def ##(other: ProcessBuilder): ProcessBuilder = new SequenceProcessBuilder(this, other)
|
||||
|
||||
protected def toSource = this
|
||||
protected def toSink = this
|
||||
|
||||
def run(): Process = run(false)
|
||||
def run(connectInput: Boolean): Process = run(BasicIO.standard(connectInput))
|
||||
def run(log: Logger): Process = run(log, false)
|
||||
def run(log: Logger, connectInput: Boolean): Process = run(BasicIO(log, connectInput))
|
||||
|
||||
def ! = run(false).exitValue()
|
||||
def !< = run(true).exitValue()
|
||||
def !(log: Logger) = runBuffered(log, false)
|
||||
def !<(log: Logger) = runBuffered(log, true)
|
||||
private[this] def runBuffered(log: Logger, connectInput: Boolean) =
|
||||
{
|
||||
val log2 = new BufferedLogger(log)
|
||||
log2.bufferAll { run(log2, connectInput).exitValue() }
|
||||
}
|
||||
def !(io: ProcessIO) = run(io).exitValue()
|
||||
|
||||
def canPipeTo = false
|
||||
}
|
||||
|
||||
private[sbt] class URLBuilder(url: URL) extends URLPartialBuilder with SourcePartialBuilder
|
||||
{
|
||||
protected def toSource = new URLInput(url)
|
||||
}
|
||||
private[sbt] class FileBuilder(base: File) extends FilePartialBuilder with SinkPartialBuilder with SourcePartialBuilder
|
||||
{
|
||||
protected def toSource = new FileInput(base)
|
||||
protected def toSink = new FileOutput(base, false)
|
||||
def #<<(f: File): ProcessBuilder = #<<(new FileInput(f))
|
||||
def #<<(u: URL): ProcessBuilder = #<<(new URLInput(u))
|
||||
def #<<(s: => InputStream): ProcessBuilder = #<<(new InputStreamBuilder(s))
|
||||
def #<<(b: ProcessBuilder): ProcessBuilder = new PipedProcessBuilder(b, new FileOutput(base, true), false)
|
||||
}
|
||||
|
||||
private abstract class BasicBuilder extends AbstractProcessBuilder
|
||||
{
|
||||
protected[this] def checkNotThis(a: ProcessBuilder) = require(a != this, "Compound process '" + a + "' cannot contain itself.")
|
||||
final def run(io: ProcessIO): Process =
|
||||
{
|
||||
val p = createProcess(io)
|
||||
p.start()
|
||||
p
|
||||
}
|
||||
protected[this] def createProcess(io: ProcessIO): BasicProcess
|
||||
}
|
||||
private abstract class BasicProcess extends Process
|
||||
{
|
||||
def start(): Unit
|
||||
}
|
||||
|
||||
private abstract class CompoundProcess extends BasicProcess
|
||||
{
|
||||
def destroy() { destroyer() }
|
||||
def exitValue() = getExitValue().getOrElse(error("No exit code: process destroyed."))
|
||||
|
||||
def start() = getExitValue
|
||||
|
||||
protected lazy val (getExitValue, destroyer) =
|
||||
{
|
||||
val code = new SyncVar[Option[Int]]()
|
||||
code.set(None)
|
||||
val thread = Spawn(code.set(runAndExitValue()))
|
||||
|
||||
(
|
||||
Future { thread.join(); code.get },
|
||||
() => thread.interrupt()
|
||||
)
|
||||
}
|
||||
|
||||
/** Start and block until the exit value is available and then return it in Some. Return None if destroyed (use 'run')*/
|
||||
protected[this] def runAndExitValue(): Option[Int]
|
||||
|
||||
protected[this] def runInterruptible[T](action: => T)(destroyImpl: => Unit): Option[T] =
|
||||
{
|
||||
try { Some(action) }
|
||||
catch { case _: InterruptedException => destroyImpl; None }
|
||||
}
|
||||
}
|
||||
|
||||
private abstract class SequentialProcessBuilder(a: ProcessBuilder, b: ProcessBuilder, operatorString: String) extends BasicBuilder
|
||||
{
|
||||
checkNotThis(a)
|
||||
checkNotThis(b)
|
||||
override def toString = " ( " + a + " " + operatorString + " " + b + " ) "
|
||||
}
|
||||
private class PipedProcessBuilder(first: ProcessBuilder, second: ProcessBuilder, toError: Boolean) extends SequentialProcessBuilder(first, second, if(toError) "#|!" else "#|")
|
||||
{
|
||||
override def createProcess(io: ProcessIO) = new PipedProcesses(first, second, io, toError)
|
||||
}
|
||||
private class AndProcessBuilder(first: ProcessBuilder, second: ProcessBuilder) extends SequentialProcessBuilder(first, second, "#&&")
|
||||
{
|
||||
override def createProcess(io: ProcessIO) = new AndProcess(first, second, io)
|
||||
}
|
||||
private class OrProcessBuilder(first: ProcessBuilder, second: ProcessBuilder) extends SequentialProcessBuilder(first, second, "#||")
|
||||
{
|
||||
override def createProcess(io: ProcessIO) = new OrProcess(first, second, io)
|
||||
}
|
||||
private class SequenceProcessBuilder(first: ProcessBuilder, second: ProcessBuilder) extends SequentialProcessBuilder(first, second, "##")
|
||||
{
|
||||
override def createProcess(io: ProcessIO) = new ProcessSequence(first, second, io)
|
||||
}
|
||||
|
||||
private class SequentialProcess(a: ProcessBuilder, b: ProcessBuilder, io: ProcessIO, evaluateSecondProcess: Int => Boolean) extends CompoundProcess
|
||||
{
|
||||
protected[this] override def runAndExitValue() =
|
||||
{
|
||||
val first = a.run(io)
|
||||
runInterruptible(first.exitValue)(first.destroy()) flatMap
|
||||
{ codeA =>
|
||||
if(evaluateSecondProcess(codeA))
|
||||
{
|
||||
val second = b.run(io)
|
||||
runInterruptible(second.exitValue)(second.destroy())
|
||||
}
|
||||
else
|
||||
Some(codeA)
|
||||
}
|
||||
}
|
||||
}
|
||||
private class AndProcess(a: ProcessBuilder, b: ProcessBuilder, io: ProcessIO) extends SequentialProcess(a, b, io, _ == 0)
|
||||
private class OrProcess(a: ProcessBuilder, b: ProcessBuilder, io: ProcessIO) extends SequentialProcess(a, b, io, _ != 0)
|
||||
private class ProcessSequence(a: ProcessBuilder, b: ProcessBuilder, io: ProcessIO) extends SequentialProcess(a, b, io, ignore => true)
|
||||
|
||||
|
||||
private class PipedProcesses(a: ProcessBuilder, b: ProcessBuilder, defaultIO: ProcessIO, toError: Boolean) extends CompoundProcess
|
||||
{
|
||||
protected[this] override def runAndExitValue() =
|
||||
{
|
||||
val currentSource = new SyncVar[Option[InputStream]]
|
||||
val pipeOut = new PipedOutputStream
|
||||
val source = new PipeSource(currentSource, pipeOut, a.toString)
|
||||
source.start()
|
||||
|
||||
val pipeIn = new PipedInputStream(pipeOut)
|
||||
val currentSink = new SyncVar[Option[OutputStream]]
|
||||
val sink = new PipeSink(pipeIn, currentSink, b.toString)
|
||||
sink.start()
|
||||
|
||||
def handleOutOrError(fromOutput: InputStream) = currentSource.put(Some(fromOutput))
|
||||
|
||||
val firstIO =
|
||||
if(toError)
|
||||
defaultIO.withError(handleOutOrError)
|
||||
else
|
||||
defaultIO.withOutput(handleOutOrError)
|
||||
val secondIO = defaultIO.withInput(toInput => currentSink.put(Some(toInput)) )
|
||||
|
||||
val second = b.run(secondIO)
|
||||
val first = a.run(firstIO)
|
||||
try
|
||||
{
|
||||
runInterruptible {
|
||||
first.exitValue
|
||||
currentSource.put(None)
|
||||
currentSink.put(None)
|
||||
val result = second.exitValue
|
||||
result
|
||||
} {
|
||||
first.destroy()
|
||||
second.destroy()
|
||||
}
|
||||
}
|
||||
finally
|
||||
{
|
||||
BasicIO.close(pipeIn)
|
||||
BasicIO.close(pipeOut)
|
||||
}
|
||||
}
|
||||
}
|
||||
private class PipeSource(currentSource: SyncVar[Option[InputStream]], pipe: PipedOutputStream, label: => String) extends Thread
|
||||
{
|
||||
final override def run()
|
||||
{
|
||||
currentSource.get match
|
||||
{
|
||||
case Some(source) =>
|
||||
try { BasicIO.transferFully(source, pipe) }
|
||||
catch { case e: IOException => println("I/O error " + e.getMessage + " for process: " + label); e.printStackTrace() }
|
||||
finally
|
||||
{
|
||||
BasicIO.close(source)
|
||||
currentSource.unset()
|
||||
}
|
||||
run()
|
||||
case None =>
|
||||
currentSource.unset()
|
||||
BasicIO.close(pipe)
|
||||
}
|
||||
}
|
||||
}
|
||||
private class PipeSink(pipe: PipedInputStream, currentSink: SyncVar[Option[OutputStream]], label: => String) extends Thread
|
||||
{
|
||||
final override def run()
|
||||
{
|
||||
currentSink.get match
|
||||
{
|
||||
case Some(sink) =>
|
||||
try { BasicIO.transferFully(pipe, sink) }
|
||||
catch { case e: IOException => println("I/O error " + e.getMessage + " for process: " + label); e.printStackTrace() }
|
||||
finally
|
||||
{
|
||||
BasicIO.close(sink)
|
||||
currentSink.unset()
|
||||
}
|
||||
run()
|
||||
case None =>
|
||||
currentSink.unset()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private[sbt] class DummyProcessBuilder(override val toString: String, exitValue : => Int) extends AbstractProcessBuilder
|
||||
{
|
||||
override def run(io: ProcessIO): Process = new DummyProcess(exitValue)
|
||||
override def canPipeTo = true
|
||||
}
|
||||
/** A thin wrapper around a java.lang.Process. `ioThreads` are the Threads created to do I/O.
|
||||
* The implementation of `exitValue` waits until these threads die before returning. */
|
||||
private class DummyProcess(action: => Int) extends Process
|
||||
{
|
||||
private[this] val exitCode = Future(action)
|
||||
override def exitValue() = exitCode()
|
||||
override def destroy() {}
|
||||
}
|
||||
/** Represents a simple command without any redirection or combination. */
|
||||
private[sbt] class SimpleProcessBuilder(p: JProcessBuilder) extends AbstractProcessBuilder
|
||||
{
|
||||
override def run(io: ProcessIO): Process =
|
||||
{
|
||||
val process = p.start() // start the external process
|
||||
import io.{writeInput, processOutput, processError}
|
||||
// spawn threads that process the input, output, and error streams using the functions defined in `io`
|
||||
val inThread = Spawn(writeInput(process.getOutputStream), true)
|
||||
val outThread = Spawn(processOutput(process.getInputStream))
|
||||
val errorThread =
|
||||
if(!p.redirectErrorStream)
|
||||
Spawn(processError(process.getErrorStream)) :: Nil
|
||||
else
|
||||
Nil
|
||||
new SimpleProcess(process, inThread, outThread :: errorThread)
|
||||
}
|
||||
override def toString = p.command.toString
|
||||
override def canPipeTo = true
|
||||
}
|
||||
/** A thin wrapper around a java.lang.Process. `outputThreads` are the Threads created to read from the
|
||||
* output and error streams of the process. `inputThread` is the Thread created to write to the input stream of
|
||||
* the process.
|
||||
* The implementation of `exitValue` interrupts `inputThread` and then waits until all I/O threads die before
|
||||
* returning. */
|
||||
private class SimpleProcess(p: JProcess, inputThread: Thread, outputThreads: List[Thread]) extends Process
|
||||
{
|
||||
override def exitValue() =
|
||||
{
|
||||
try { p.waitFor() }// wait for the process to terminate
|
||||
finally { inputThread.interrupt() } // we interrupt the input thread to notify it that it can terminate
|
||||
outputThreads.foreach(_.join()) // this ensures that all output is complete before returning (waitFor does not ensure this)
|
||||
p.exitValue()
|
||||
}
|
||||
override def destroy() =
|
||||
{
|
||||
try { p.destroy() }
|
||||
finally { inputThread.interrupt() }
|
||||
}
|
||||
}
|
||||
|
||||
private class FileOutput(file: File, append: Boolean) extends OutputStreamBuilder(new FileOutputStream(file, append), file.getAbsolutePath)
|
||||
private class URLInput(url: URL) extends InputStreamBuilder(url.openStream, url.toString)
|
||||
private class FileInput(file: File) extends InputStreamBuilder(new FileInputStream(file), file.getAbsolutePath)
|
||||
|
||||
import Uncloseable.protect
|
||||
private class OutputStreamBuilder(stream: => OutputStream, label: String) extends ThreadProcessBuilder(label, _.writeInput(protect(stream)))
|
||||
{
|
||||
def this(stream: => OutputStream) = this(stream, "<output stream>")
|
||||
}
|
||||
private class InputStreamBuilder(stream: => InputStream, label: String) extends ThreadProcessBuilder(label, _.processOutput(protect(stream)))
|
||||
{
|
||||
def this(stream: => InputStream) = this(stream, "<input stream>")
|
||||
}
|
||||
|
||||
private abstract class ThreadProcessBuilder(override val toString: String, runImpl: ProcessIO => Unit) extends AbstractProcessBuilder
|
||||
{
|
||||
override def run(io: ProcessIO): Process =
|
||||
{
|
||||
val success = new SyncVar[Boolean]
|
||||
success.put(false)
|
||||
new ThreadProcess(Spawn {runImpl(io); success.set(true) }, success)
|
||||
}
|
||||
}
|
||||
private final class ThreadProcess(thread: Thread, success: SyncVar[Boolean]) extends Process
|
||||
{
|
||||
override def exitValue() =
|
||||
{
|
||||
thread.join()
|
||||
if(success.get) 0 else 1
|
||||
}
|
||||
override def destroy() { thread.interrupt() }
|
||||
}
|
||||
|
||||
object Uncloseable
|
||||
{
|
||||
def apply(in: InputStream): InputStream = new FilterInputStream(in) { override def close() {} }
|
||||
def apply(out: OutputStream): OutputStream = new FilterOutputStream(out) { override def close() {} }
|
||||
def protect(in: InputStream): InputStream = if(in eq System.in) Uncloseable(in) else in
|
||||
def protect(out: OutputStream): OutputStream = if( (out eq System.out) || (out eq System.err)) Uncloseable(out) else out
|
||||
}
|
||||
|
|
@ -0,0 +1,154 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2009 Mark Harrah
|
||||
*/
|
||||
package sbt.impl
|
||||
import sbt._
|
||||
|
||||
import scala.collection.{immutable, mutable}
|
||||
import scala.collection.Map
|
||||
import sbt.wrap.Wrappers.identityMap
|
||||
|
||||
private[sbt] object RunTask
|
||||
{
|
||||
final type Task = Project#Task
|
||||
def apply(root: Task, rootName: String): List[WorkFailure[Task]] = apply(root, rootName, true)
|
||||
def apply(root: Task, rootName: String, parallelExecution: Boolean): List[WorkFailure[Task]] =
|
||||
apply(root, rootName, if(parallelExecution) Runtime.getRuntime.availableProcessors else 1)
|
||||
def apply(root: Task, rootName: String, maximumTasks: Int): List[WorkFailure[Task]] = (new RunTask(root, rootName, maximumTasks)).run()
|
||||
}
|
||||
import RunTask._
|
||||
private final class RunTask(root: Task, rootName: String, maximumTasks: Int) extends NotNull
|
||||
{
|
||||
require(maximumTasks >= 1)
|
||||
def parallel = maximumTasks > 1
|
||||
def multiProject = allProjects.size >= 2
|
||||
def run(): List[WorkFailure[Task]] =
|
||||
{
|
||||
try
|
||||
{
|
||||
runTasksExceptRoot() match
|
||||
{
|
||||
case Nil =>
|
||||
val result = runTask(root, rootName)
|
||||
result.map( errorMessage => WorkFailure(root, "Error running " + rootName + ": " + errorMessage) ).toList
|
||||
case failures => failures
|
||||
}
|
||||
}
|
||||
finally
|
||||
{
|
||||
for(project <- allProjects; saveError <- project.saveEnvironment)
|
||||
project.log.warn("Could not save properties for project " + project.name + ": " + saveError)
|
||||
}
|
||||
}
|
||||
// This runs all tasks except the root.task.
|
||||
// It uses a buffered logger in record mode to ensure that all output for a given task is consecutive
|
||||
// it ignores the root task so that the root task may be run with buffering disabled so that the output
|
||||
// occurs without delay.
|
||||
private def runTasksExceptRoot() =
|
||||
ParallelRunner.run(expandedRoot, expandedTaskName, runIfNotRoot, maximumTasks, (t: Task) => t.manager.log)
|
||||
private def withBuffered(f: BufferedLogger => Unit)
|
||||
{
|
||||
for(buffered <- bufferedLoggers)
|
||||
Control.trap(f(buffered))
|
||||
}
|
||||
/** Will be called in its own thread. Runs the given task if it is not the root task.*/
|
||||
private def runIfNotRoot(action: Task): Option[String] =
|
||||
{
|
||||
if(isRoot(action))
|
||||
None
|
||||
else
|
||||
runTask(action, expandedTaskName(action))
|
||||
}
|
||||
private val headerStart = System.getProperty("sbt.start.delimiter", "==")
|
||||
private val headerEnd = System.getProperty("sbt.end.delimiter", "==")
|
||||
private def isRoot(t: Task) = t == expandedRoot
|
||||
/** Will be called in its own thread except for the root task. */
|
||||
private def runTask(action: Task, actionName: String): Option[String] =
|
||||
{
|
||||
val label = if(multiProject) (action.manager.name + " / " + actionName) else actionName
|
||||
def banner(event: ControlEvent.Value, firstSeparator: String, secondSeparator: String) =
|
||||
Control.trap(action.manager.log.control(event, firstSeparator + " " + label + " " + secondSeparator))
|
||||
val buffered = parallel && !isRoot(action)
|
||||
if(buffered)
|
||||
banner(ControlEvent.Start, "\n ", "...")
|
||||
def doRun() =
|
||||
{
|
||||
banner(ControlEvent.Header, "\n" + headerStart, headerStart)
|
||||
try { action.invoke }
|
||||
catch { case e: Exception => action.manager.log.trace(e); Some(e.toString) }
|
||||
finally { banner(ControlEvent.Finish, headerEnd, headerEnd) }
|
||||
}
|
||||
|
||||
if(buffered)
|
||||
bufferLogging(action, doRun())
|
||||
else
|
||||
doRun()
|
||||
}
|
||||
private def bufferLogging[T](action: Task, f: => T) =
|
||||
bufferedLogger(action.manager) match
|
||||
{
|
||||
case Some(buffered) => buffered.buffer { f }
|
||||
case None => f
|
||||
}
|
||||
|
||||
/* Most of the following is for implicitly adding dependencies (see the expand method)*/
|
||||
private val projectDependencyCache = identityMap[Project, Iterable[Project]]
|
||||
private def dependencies(project: Project) = projectDependencyCache.getOrElseUpdate(project, project.topologicalSort.dropRight(1))
|
||||
|
||||
private val expandedCache = identityMap[Task, Task]
|
||||
private def expanded(task: Task): Task = expandedCache.getOrElseUpdate(task, expandImpl(task))
|
||||
|
||||
private val expandedTaskNameCache = identityMap[Task, String]
|
||||
private def expandedTaskName(task: Task) =
|
||||
if(task == expandedRoot)
|
||||
rootName
|
||||
else
|
||||
expandedTaskNameCache.getOrElse(task, task.name)
|
||||
|
||||
private val nameToTaskCache = identityMap[Project, Map[String, Task]]
|
||||
private def nameToTaskMap(project: Project): Map[String, Task] = nameToTaskCache.getOrElseUpdate(project, project.tasks)
|
||||
private def taskForName(project: Project, name: String): Option[Task] = nameToTaskMap(project).get(name)
|
||||
|
||||
private val taskNameCache = identityMap[Project, Map[Task, String]]
|
||||
private def taskName(task: Task) =
|
||||
{
|
||||
val project = task.manager
|
||||
taskNameCache.getOrElseUpdate(project, taskNameMap(project)).get(task)
|
||||
}
|
||||
|
||||
private val expandedRoot = expand(root)
|
||||
private val allTasks = expandedRoot.topologicalSort
|
||||
private val allProjects = Set(allTasks.map(_.manager).toSeq : _*)
|
||||
private val bufferedLoggers = if(parallel) allProjects.toList.flatMap(bufferedLogger) else Nil
|
||||
|
||||
/** Adds implicit dependencies, which are tasks with the same name in the project dependencies
|
||||
* of the enclosing project of the task.*/
|
||||
private def expand(root: Task): Task = expanded(root)
|
||||
private def expandImpl(task: Task): Task =
|
||||
{
|
||||
val nameOption = taskName(task)
|
||||
val explicitDependencies = task.dependencies
|
||||
val implicitDependencies = nameOption.map(name => dependencies(task.manager).flatMap(noninteractiveTask(name)) ).getOrElse(Nil)
|
||||
val allDependencies = mutable.HashSet( (explicitDependencies ++ implicitDependencies).toSeq : _* )
|
||||
val expandedTask = task.setDependencies(allDependencies.toList.map(expanded))
|
||||
nameOption.foreach(name => expandedTaskNameCache(expandedTask) = name)
|
||||
expandedTask
|
||||
}
|
||||
private def noninteractiveTask(name: String)(project: Project): Option[Task] =
|
||||
taskForName(project, name) flatMap { task =>
|
||||
if(task.interactive)
|
||||
{
|
||||
project.log.debug("Not including task " + name + " in project " + project.name + ": interactive tasks can only be run directly.")
|
||||
None
|
||||
}
|
||||
else
|
||||
Some(task)
|
||||
}
|
||||
private def taskNameMap(project: Project) = mutable.Map(nameToTaskMap(project).map(_.swap).toSeq : _*)
|
||||
private def bufferedLogger(project: Project): Option[BufferedLogger] =
|
||||
project.log match
|
||||
{
|
||||
case buffered: BufferedLogger => Some(buffered)
|
||||
case _ => None
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,48 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2009 Mark Harrah
|
||||
*/
|
||||
package sbt.impl
|
||||
import sbt._
|
||||
|
||||
private[sbt] object SelectMainClass
|
||||
{
|
||||
def apply(promptIfMultipleChoices: Boolean, mainClasses: List[String]) =
|
||||
{
|
||||
mainClasses match
|
||||
{
|
||||
case Nil => None
|
||||
case head :: Nil => Some(head)
|
||||
case multiple =>
|
||||
if(promptIfMultipleChoices)
|
||||
{
|
||||
println("\nMultiple main classes detected, select one to run:\n")
|
||||
for( (className, index) <- multiple.zipWithIndex )
|
||||
println(" [" + (index+1) + "] " + className)
|
||||
val line = trim(SimpleReader.readLine("\nEnter number: "))
|
||||
println("")
|
||||
toInt(line, multiple.length) map multiple.apply
|
||||
}
|
||||
else
|
||||
None
|
||||
}
|
||||
}
|
||||
private def trim(s: Option[String]) = s.getOrElse("")
|
||||
private def toInt(s: String, size: Int) =
|
||||
try
|
||||
{
|
||||
val i = s.toInt
|
||||
if(i > 0 && i <= size)
|
||||
Some(i-1)
|
||||
else
|
||||
{
|
||||
println("Number out of range: was " + i + ", expected number between 1 and " + size)
|
||||
None
|
||||
}
|
||||
}
|
||||
catch
|
||||
{
|
||||
case nfe: NumberFormatException =>
|
||||
println("Invalid number: " + nfe.toString)
|
||||
None
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,47 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2008, 2009 Mark Harrah
|
||||
*/
|
||||
package sbt
|
||||
|
||||
/* The following implements the simple syntax for storing test definitions.
|
||||
* The syntax is:
|
||||
*
|
||||
* definition := isModule? className separator className
|
||||
* isModule := '<module>'
|
||||
* separator := '<<'
|
||||
*/
|
||||
|
||||
import scala.util.parsing.combinator._
|
||||
|
||||
import TestParser._
|
||||
/** Represents a test implemented by 'testClassName' of type 'superClassName'.*/
|
||||
final case class TestDefinition(isModule: Boolean, testClassName: String, superClassName: String) extends org.scalatools.testing.TestFingerprint with NotNull
|
||||
{
|
||||
override def toString =
|
||||
(if(isModule) IsModuleLiteral else "") + testClassName + SubSuperSeparator + superClassName
|
||||
}
|
||||
final class TestParser extends RegexParsers with NotNull
|
||||
{
|
||||
def test: Parser[TestDefinition] =
|
||||
( isModule ~! className ~! SubSuperSeparator ~! className ) ^^
|
||||
{ case module ~ testName ~ SubSuperSeparator ~ superName => TestDefinition(module, testName.trim, superName.trim) }
|
||||
def isModule: Parser[Boolean] = (IsModuleLiteral?) ^^ (_.isDefined)
|
||||
def className: Parser[String] = ClassNameRegexString.r
|
||||
|
||||
def parse(testDefinitionString: String): Either[String, TestDefinition] =
|
||||
{
|
||||
def parseError(msg: String) = Left("Could not parse test definition '" + testDefinitionString + "': " + msg)
|
||||
parseAll(test, testDefinitionString) match
|
||||
{
|
||||
case Success(result, next) => Right(result)
|
||||
case err: NoSuccess => parseError(err.msg)
|
||||
}
|
||||
}
|
||||
}
|
||||
object TestParser
|
||||
{
|
||||
val IsModuleLiteral = "<module>"
|
||||
val SubSuperSeparator = "<<"
|
||||
val ClassNameRegexString = """[^<]+"""
|
||||
def parse(testDefinitionString: String): Either[String, TestDefinition] = (new TestParser).parse(testDefinitionString)
|
||||
}
|
||||
|
|
@ -0,0 +1,75 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2009 Mark Harrah
|
||||
*/
|
||||
package sbt.impl
|
||||
import sbt._
|
||||
|
||||
import java.io.File
|
||||
import scala.collection.mutable.{HashMap, Map}
|
||||
|
||||
/** Only intended to be used once per instance. */
|
||||
private[sbt] class TestStatusReporter(path: Path, log: Logger) extends TestsListener
|
||||
{
|
||||
private lazy val succeeded: Map[String, Long] = TestStatus.read(path, log)
|
||||
|
||||
def doInit {}
|
||||
def startGroup(name: String) { succeeded removeKey name }
|
||||
def testEvent(event: TestEvent) {}
|
||||
def endGroup(name: String, t: Throwable) {}
|
||||
def endGroup(name: String, result: Result.Value)
|
||||
{
|
||||
if(result == Result.Passed)
|
||||
succeeded(name) = System.currentTimeMillis
|
||||
}
|
||||
def doComplete(finalResult: Result.Value) { complete() }
|
||||
def doComplete(t: Throwable) { complete() }
|
||||
|
||||
private def complete()
|
||||
{
|
||||
TestStatus.write(succeeded, "Successful Tests", path, log)
|
||||
}
|
||||
}
|
||||
|
||||
private[sbt] class TestQuickFilter(testAnalysis: CompileAnalysis, failedOnly: Boolean, path: Path, log: Logger) extends (String => Boolean) with NotNull
|
||||
{
|
||||
private lazy val exclude = TestStatus.read(path, log)
|
||||
private lazy val map = testAnalysis.testSourceMap
|
||||
def apply(test: String) =
|
||||
exclude.get(test) match
|
||||
{
|
||||
case None => true // include because this test has not been run or did not succeed
|
||||
case Some(lastSuccessTime) => // succeeded the last time it was run
|
||||
if(failedOnly)
|
||||
false // don't include because the last time succeeded
|
||||
else
|
||||
testAnalysis.products(map(test)) match
|
||||
{
|
||||
case None => true
|
||||
case Some(products) => products.exists(lastSuccessTime <= _.lastModified) // include if the test is newer than the last run
|
||||
}
|
||||
}
|
||||
}
|
||||
private object TestStatus
|
||||
{
|
||||
import java.util.Properties
|
||||
def read(path: Path, log: Logger): Map[String, Long] =
|
||||
{
|
||||
val map = new HashMap[String, Long]
|
||||
val properties = new Properties
|
||||
logError(PropertiesUtilities.load(properties, path, log), "loading", log)
|
||||
for(test <- PropertiesUtilities.propertyNames(properties))
|
||||
map.put(test, properties.getProperty(test).toLong)
|
||||
map
|
||||
}
|
||||
def write(map: Map[String, Long], label: String, path: Path, log: Logger)
|
||||
{
|
||||
val properties = new Properties
|
||||
for( (test, lastSuccessTime) <- map)
|
||||
properties.setProperty(test, lastSuccessTime.toString)
|
||||
logError(PropertiesUtilities.write(properties, label, path, log), "writing", log)
|
||||
}
|
||||
private def logError(result: Option[String], action: String, log: Logger)
|
||||
{
|
||||
result.foreach(msg => log.error("Error " + action + " test status: " + msg))
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,140 @@
|
|||
package sbt.jetty
|
||||
|
||||
import java.io.File
|
||||
import java.net.URL
|
||||
|
||||
/* This class starts Jetty.
|
||||
* NOTE: DO NOT actively use this class. You will see NoClassDefFoundErrors if you fail
|
||||
* to do so. Only use its name in JettyRun for reflective loading. This allows using
|
||||
* the Jetty libraries provided on the project classpath instead of requiring them to be
|
||||
* available on sbt's classpath at startup.
|
||||
*/
|
||||
private object LazyJettyRun${jetty.version} extends JettyRun
|
||||
{
|
||||
${jetty.imports}
|
||||
|
||||
import java.lang.ref.{Reference, WeakReference}
|
||||
|
||||
// Jetty classes must be loaded on initialization in order for the version detection code in WebApp to work properly
|
||||
// this forces them to be loaded- otherwise, it is possible they aren't loaded until 'apply' is called
|
||||
private[this] val forceJettyLoad = classOf[Server]
|
||||
|
||||
val DefaultMaxIdleTime = 30000
|
||||
|
||||
def apply(configuration: JettyConfiguration, jettyLoader: ClassLoader): Stoppable =
|
||||
{
|
||||
val oldLog = Log.getLog
|
||||
Log.setLog(new JettyLogger(configuration.log))
|
||||
val server = new Server
|
||||
|
||||
def configureScanner(listener: Scanner.BulkListener, scanDirectories: Seq[File], scanInterval: Int) =
|
||||
{
|
||||
if(scanDirectories.isEmpty)
|
||||
None
|
||||
else
|
||||
{
|
||||
configuration.log.debug("Scanning for changes to: " + scanDirectories.mkString(", "))
|
||||
val scanner = new Scanner
|
||||
val list = new java.util.ArrayList[File]
|
||||
scanDirectories.foreach(x => list.add(x))
|
||||
scanner.setScanDirs(list)
|
||||
scanner.setRecursive(true)
|
||||
scanner.setScanInterval(scanInterval)
|
||||
scanner.setReportExistingFilesOnStartup(false)
|
||||
scanner.addListener(listener)
|
||||
scanner.start()
|
||||
Some(new WeakReference(scanner))
|
||||
}
|
||||
}
|
||||
|
||||
val (listener, scanner) =
|
||||
configuration match
|
||||
{
|
||||
case c: DefaultJettyConfiguration =>
|
||||
import c._
|
||||
configureDefaultConnector(server, port)
|
||||
def classpathURLs = classpath.get.map(_.asURL).toSeq
|
||||
val webapp = new WebAppContext(war.absolutePath, contextPath)
|
||||
|
||||
def createLoader =
|
||||
{
|
||||
// Jetty treats WebAppClassLoader subclasses specially and we need this special behavior.
|
||||
// However, Jetty adds extra classpath components directly using 'addURL'.
|
||||
// We only want the 'urls' we provide in the constructor, so 'addURL' is overridden to do nothing.
|
||||
class SbtWebAppLoader(urls: Seq[URL]) extends WebAppClassLoader(jettyLoader, webapp)
|
||||
{
|
||||
urls.foreach(super.addURL)
|
||||
override def addURL(u: URL) = {}
|
||||
}
|
||||
new SbtWebAppLoader(classpathURLs)
|
||||
}
|
||||
def setLoader() = webapp.setClassLoader(createLoader)
|
||||
|
||||
setLoader()
|
||||
server.setHandler(webapp)
|
||||
|
||||
val listener = new Scanner.BulkListener with Reload {
|
||||
def reloadApp() = reload(server, setLoader(), log)
|
||||
def filesChanged(files: java.util.List[_]) { reloadApp() }
|
||||
}
|
||||
(Some(listener), configureScanner(listener, c.scanDirectories, c.scanInterval))
|
||||
case c: CustomJettyConfiguration =>
|
||||
for(x <- c.jettyConfigurationXML)
|
||||
(new XmlConfiguration(x.toString)).configure(server)
|
||||
for(file <- c.jettyConfigurationFiles)
|
||||
(new XmlConfiguration(file.toURI.toURL)).configure(server)
|
||||
(None, None)
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
server.start()
|
||||
new StopServer(new WeakReference(server), listener.map(new WeakReference(_)), scanner, oldLog)
|
||||
}
|
||||
catch { case e => server.stop(); throw e }
|
||||
}
|
||||
private def configureDefaultConnector(server: Server, port: Int)
|
||||
{
|
||||
val defaultConnector = new SelectChannelConnector
|
||||
defaultConnector.setPort(port)
|
||||
defaultConnector.setMaxIdleTime(DefaultMaxIdleTime)
|
||||
server.addConnector(defaultConnector)
|
||||
}
|
||||
trait Reload { def reloadApp(): Unit }
|
||||
private class StopServer(serverReference: Reference[Server], reloadReference: Option[Reference[Reload]], scannerReferenceOpt: Option[Reference[Scanner]], oldLog: JLogger) extends Stoppable
|
||||
{
|
||||
def reload(): Unit = on(reloadReference)(_.reloadApp())
|
||||
private def on[T](refOpt: Option[Reference[T]])(f: T => Unit): Unit = refOpt.foreach(ref => onReferenced(ref.get)(f))
|
||||
private def onReferenced[T](t: T)(f: T => Unit): Unit = if(t == null) () else f(t)
|
||||
def stop()
|
||||
{
|
||||
onReferenced(serverReference.get)(_.stop())
|
||||
on(scannerReferenceOpt)(_.stop())
|
||||
Log.setLog(oldLog)
|
||||
}
|
||||
}
|
||||
private def reload(server: Server, reconfigure: => Unit, log: Logger)
|
||||
{
|
||||
log.info("Reloading web application...")
|
||||
val handlers = wrapNull(server.getHandlers, server.getHandler)
|
||||
log.debug("Stopping handlers: " + handlers.mkString(", "))
|
||||
handlers.foreach(_.stop)
|
||||
log.debug("Reconfiguring...")
|
||||
reconfigure
|
||||
log.debug("Restarting handlers: " + handlers.mkString(", "))
|
||||
handlers.foreach(_.start)
|
||||
log.info("Reload complete.")
|
||||
}
|
||||
private def wrapNull(a: Array[Handler], b: Handler) =
|
||||
(a, b) match
|
||||
{
|
||||
case (null, null) => Nil
|
||||
case (null, notB) => notB :: Nil
|
||||
case (notA, null) => notA.toList
|
||||
case (notA, notB) => notB :: notA.toList
|
||||
}
|
||||
private class JettyLogger(delegate: Logger) extends JettyLoggerBase(delegate) with JLogger
|
||||
{
|
||||
def getLogger(name: String) = this
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,7 @@
|
|||
|
||||
import org.mortbay.jetty.{Handler, Server}
|
||||
import org.mortbay.jetty.nio.SelectChannelConnector
|
||||
import org.mortbay.jetty.webapp.{WebAppClassLoader, WebAppContext}
|
||||
import org.mortbay.log.{Log, Logger => JLogger}
|
||||
import org.mortbay.util.Scanner
|
||||
import org.mortbay.xml.XmlConfiguration
|
||||
|
|
@ -0,0 +1,7 @@
|
|||
|
||||
import org.eclipse.jetty.server.{Server, Handler}
|
||||
import org.eclipse.jetty.server.nio.SelectChannelConnector
|
||||
import org.eclipse.jetty.webapp.{WebAppClassLoader, WebAppContext}
|
||||
import org.eclipse.jetty.util.log.{Log, Logger => JLogger}
|
||||
import org.eclipse.jetty.util.Scanner
|
||||
import org.eclipse.jetty.xml.XmlConfiguration
|
||||
|
|
@ -0,0 +1,28 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2010 Mark Harrah
|
||||
*/
|
||||
package sbt.processor
|
||||
|
||||
/** Parses and executes a command (connects a parser to a runner). */
|
||||
class CommandRunner(parser: CommandParsing, execute: Executing)
|
||||
{
|
||||
def apply(processorCommand: String): Unit =
|
||||
parser.parseCommand(processorCommand) match
|
||||
{
|
||||
case Left(err) => throw new ProcessorException(err)
|
||||
case Right(command) => execute(command)
|
||||
}
|
||||
}
|
||||
object CommandRunner
|
||||
{
|
||||
/** Convenience method for constructing a CommandRunner with the minimal information required.*/
|
||||
def apply(manager: Manager, defParser: DefinitionParser, prefix: String, log: Logger): CommandRunner =
|
||||
{
|
||||
val parser = new CommandParser(defaultErrorMessage(prefix), defParser)
|
||||
val info = new InfoImpl(manager, prefix, parser, System.out.println)
|
||||
val execute = new Execute(manager, info, log)
|
||||
new CommandRunner(parser, execute)
|
||||
}
|
||||
def defaultErrorMessage(prefix: String) =
|
||||
"Invalid processor command. Run " + prefix + "help to see valid commands."
|
||||
}
|
||||
|
|
@ -0,0 +1,24 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2010 Mark Harrah
|
||||
*/
|
||||
package sbt.processor
|
||||
|
||||
/** Executes a parsed command. */
|
||||
class Execute(manager: Manager, info: Info, log: Logger) extends Executing
|
||||
{
|
||||
def apply(command: Command): Unit =
|
||||
command match
|
||||
{
|
||||
case dr: DefineRepository =>
|
||||
manager.defineRepository(dr.repo)
|
||||
log.info("Defined new processor repository '" + dr.repo + "'")
|
||||
case dp: DefineProcessor =>
|
||||
manager.defineProcessor(dp.pdef)
|
||||
log.info("Defined new processor '" + dp.pdef + "'")
|
||||
case rd: RemoveDefinition =>
|
||||
val removed = manager.removeDefinition(rd.label)
|
||||
log.info("Removed '" + removed + "'")
|
||||
case Help => info.help()
|
||||
case Show => info.show()
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,35 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2010 Mark Harrah
|
||||
*/
|
||||
package sbt.processor
|
||||
|
||||
class Handler(baseProject: Project) extends NotNull
|
||||
{
|
||||
def unapply(line: String): Option[ParsedProcessor] =
|
||||
line.split("""\s+""", 2) match
|
||||
{
|
||||
case Array(label @ GetProcessor(processor), args @ _*) => Some( new ParsedProcessor(label, processor, args.mkString) )
|
||||
case _ => None
|
||||
}
|
||||
private object GetProcessor
|
||||
{
|
||||
def unapply(name: String): Option[Processor] =
|
||||
manager.processorDefinition(name).flatMap(manager.processor)
|
||||
}
|
||||
|
||||
def lock = baseProject.info.launcher.globalLock
|
||||
|
||||
lazy val scalaVersion = baseProject.defScalaVersion.value
|
||||
lazy val base = baseProject.info.bootPath / ("scala-" + scalaVersion) / "sbt-processors"
|
||||
lazy val persistBase = Path.userHome / ".ivy2" / "sbt"
|
||||
|
||||
def retrieveLockFile = base / lockName
|
||||
def persistLockFile = persistBase / lockName
|
||||
def lockName = "processors.lock"
|
||||
def definitionsFile = persistBase / "processors.properties"
|
||||
def files = new ManagerFiles(base.asFile, retrieveLockFile.asFile, definitionsFile.asFile)
|
||||
|
||||
lazy val defParser = new DefinitionParser
|
||||
lazy val manager = new ManagerImpl(files, scalaVersion, new Persist(lock, persistLockFile.asFile, defParser), baseProject.log)
|
||||
}
|
||||
class ParsedProcessor(val label: String, val processor: Processor, val arguments: String) extends NotNull
|
||||
|
|
@ -0,0 +1,26 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2010 Mark Harrah
|
||||
*/
|
||||
package sbt.processor
|
||||
|
||||
class InfoImpl(manager: Manager, prefix: String, parser: CommandParser, print: String => Unit) extends Info
|
||||
{
|
||||
def show()
|
||||
{
|
||||
print("Processors:\n\t" + manager.processors.values.mkString("\n\t"))
|
||||
print("\nProcessor repositories:\n\t" + manager.repositories.values.mkString("\n\t"))
|
||||
}
|
||||
def help()
|
||||
{
|
||||
import parser.{ShowCommand, HelpCommand, ProcessorCommand, RemoveCommand, RepositoryCommand}
|
||||
val usage =
|
||||
(HelpCommand -> "Display this help message") ::
|
||||
(ShowCommand -> "Display defined processors and repositories") ::
|
||||
(ProcessorCommand -> "Define 'label' to be the processor with the given ID") ::
|
||||
(RepositoryCommand -> "Add a repository for searching for processors") ::
|
||||
(RemoveCommand -> "Undefine the repository or processor with the given 'label'") ::
|
||||
Nil
|
||||
|
||||
print("Processor management commands:\n " + (usage.map{ case (c,d) => prefix + "" + c + " " + d}).mkString("\n "))
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,41 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2010 Mark Harrah
|
||||
*/
|
||||
package sbt.processor
|
||||
|
||||
import java.io.File
|
||||
import java.net.{URL, URLClassLoader}
|
||||
import xsbt.FileUtilities.read
|
||||
import xsbt.OpenResource.urlInputStream
|
||||
import xsbt.Paths._
|
||||
import xsbt.GlobFilter._
|
||||
|
||||
import ProcessorException.error
|
||||
|
||||
class Loader extends NotNull
|
||||
{
|
||||
def classNameResource = "sbt.processor"
|
||||
def getProcessor(directory: File): Either[Throwable, Processor] = getProcessor( getLoader(directory) )
|
||||
private def getProcessor(loader: ClassLoader): Either[Throwable, Processor] =
|
||||
{
|
||||
val resource = loader.getResource(classNameResource)
|
||||
if(resource eq null) Left(new ProcessorException("Processor existed but did not contain '" + classNameResource + "' descriptor."))
|
||||
else loadProcessor(loader, resource)
|
||||
}
|
||||
private def loadProcessor(loader: ClassLoader, resource : URL): Either[Throwable, Processor] =
|
||||
try { Right(loadProcessor(loader, className(resource))) }
|
||||
catch { case e: Exception => Left(e) }
|
||||
|
||||
private def loadProcessor(loader: ClassLoader, className: String): Processor =
|
||||
{
|
||||
val processor = Class.forName(className, true, loader).newInstance
|
||||
classOf[Processor].cast(processor)
|
||||
}
|
||||
private def className(resource: URL): String = urlInputStream(resource) { in => read(in).trim }
|
||||
private def getLoader(dir: File) =
|
||||
{
|
||||
val jars = dir ** "*.jar"
|
||||
val jarURLs = jars.files.toArray[File].map(_.toURI.toURL)
|
||||
new URLClassLoader(jarURLs, getClass.getClassLoader)
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,84 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2010 Mark Harrah
|
||||
*/
|
||||
package sbt.processor
|
||||
|
||||
import java.io.File
|
||||
import xsbt.Paths._
|
||||
import ProcessorException.error
|
||||
|
||||
/** Files needed by ManagerImpl.
|
||||
* `retrieveBaseDirectory` is the directory that processors are retrieved under.
|
||||
* `retrieveLockFile` is used to synchronize access to that directory.
|
||||
* `definitionsFile` is the file to save repository and processor definitions to. It is usually per-user instead of per-project.*/
|
||||
class ManagerFiles(val retrieveBaseDirectory: File, val retrieveLockFile: File, val definitionsFile: File)
|
||||
|
||||
class ManagerImpl(files: ManagerFiles, scalaVersion: String, persist: Persist, log: Logger) extends Manager
|
||||
{
|
||||
import files._
|
||||
|
||||
def processorDefinition(label: String): Option[ProcessorDefinition] = processors.get(label)
|
||||
def processor(pdef: ProcessorDefinition): Option[Processor] =
|
||||
{
|
||||
def tryProcessor: Either[Throwable, Processor] =
|
||||
(new Loader).getProcessor( retrieveDirectory(pdef) )
|
||||
|
||||
// try to load the processor. It will succeed here if the processor has already been retrieved
|
||||
tryProcessor.left.flatMap { _ =>
|
||||
// if it hasn't been retrieved, retrieve the processor and its dependencies
|
||||
retrieveProcessor(pdef)
|
||||
// try to load the processor now that it has been retrieved
|
||||
tryProcessor.left.map { // if that fails, log a warning
|
||||
case p: ProcessorException => log.warn(p.getMessage)
|
||||
case t => log.trace(t); log.warn(t.toString)
|
||||
}
|
||||
}.right.toOption
|
||||
}
|
||||
def defineProcessor(p: ProcessorDefinition)
|
||||
{
|
||||
checkExisting(p)
|
||||
retrieveProcessor(p)
|
||||
add(p)
|
||||
}
|
||||
def defineRepository(r: RepositoryDefinition)
|
||||
{
|
||||
checkExisting(r)
|
||||
add(r)
|
||||
}
|
||||
def removeDefinition(label: String): Definition =
|
||||
definitions.removeKey(label) match
|
||||
{
|
||||
case Some(removed) =>
|
||||
saveDefinitions()
|
||||
removed
|
||||
case None => error("Label '" + label + "' not defined.")
|
||||
}
|
||||
|
||||
private def retrieveProcessor(p: ProcessorDefinition): Unit =
|
||||
{
|
||||
val resolvers = repositories.values.toList.map(toResolver)
|
||||
val module = p.toModuleID(scalaVersion)
|
||||
( new Retrieve(retrieveDirectory(p), module, persist.lock, retrieveLockFile, resolvers, log) ).retrieve()
|
||||
}
|
||||
private def add(d: Definition)
|
||||
{
|
||||
definitions(d.label) = d
|
||||
saveDefinitions()
|
||||
}
|
||||
|
||||
private lazy val definitions = loadDefinitions(definitionsFile)
|
||||
def repositories = Map() ++ partialMap(definitions) { case (label, d: RepositoryDefinition) => (label, d) }
|
||||
def processors = Map() ++ partialMap(definitions) { case (label, p: ProcessorDefinition) => (label, p) }
|
||||
|
||||
private def checkExisting(p: Definition): Unit = definitions.get(p.label) map { d => error ("Label '" + p.label + "' already in use: " + d) }
|
||||
private def partialMap[T,S](i: Iterable[T])(f: PartialFunction[T,S]) = i.filter(f.isDefinedAt).map(f)
|
||||
private def toResolver(repo: RepositoryDefinition): Resolver = new MavenRepository(repo.label, repo.url)
|
||||
|
||||
def retrieveDirectory(p: ProcessorDefinition) = retrieveBaseDirectory / p.group / p.module / p.rev
|
||||
|
||||
private def saveDefinitions(): Unit = saveDefinitions(definitionsFile)
|
||||
private def saveDefinitions(file: File): Unit = persist.save(file)(definitions.values.toList)
|
||||
private def loadDefinitions(file: File): scala.collection.mutable.Map[String, Definition] =
|
||||
scala.collection.mutable.HashMap( (if(file.exists) rawLoad(file) else Nil) : _*)
|
||||
private def rawLoad(file: File): Seq[(String, Definition)] = persist.load(definitionsFile).map { d => (d.label, d) }
|
||||
}
|
||||
|
|
@ -0,0 +1,50 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2010 Mark Harrah
|
||||
*/
|
||||
package sbt.processor
|
||||
|
||||
/** Parses commands. `errorMessage` is the String used when a command is invalid.
|
||||
* There is no detailed error reporting.
|
||||
* Input Strings are assumed to be trimmed.*/
|
||||
class CommandParser(errorMessage: String, defParser: DefinitionParsing) extends CommandParsing
|
||||
{
|
||||
def parseCommand(line: String): Either[String, Command] =
|
||||
defParser.parseDefinition(line) match
|
||||
{
|
||||
case Some(p: ProcessorDefinition) => Right(new DefineProcessor(p))
|
||||
case Some(r: RepositoryDefinition) => Right(new DefineRepository(r))
|
||||
case None => parseOther(line)
|
||||
}
|
||||
|
||||
def parseOther(line: String) =
|
||||
line match
|
||||
{
|
||||
case RemoveRegex(label) => Right(new RemoveDefinition(label))
|
||||
case HelpCommand | "" => Right(Help)
|
||||
case ShowCommand => Right(Show)
|
||||
case _ => Left(errorMessage)
|
||||
}
|
||||
|
||||
val ShowCommand = "show"
|
||||
val HelpCommand = "help"
|
||||
val ProcessorCommand = "<label> is <group> <module> <rev>"
|
||||
val RepositoryCommand = "<label> at <url>"
|
||||
val RemoveCommand = "remove <label>"
|
||||
|
||||
val RemoveRegex = """remove\s+(\w+)""".r
|
||||
}
|
||||
|
||||
/** Parses the String representation of definitions.*/
|
||||
class DefinitionParser extends DefinitionParsing
|
||||
{
|
||||
def parseDefinition(line: String): Option[Definition] =
|
||||
line match
|
||||
{
|
||||
case ProcessorRegex(label, group, name, rev) => Some( new ProcessorDefinition(label, group, name, rev) )
|
||||
case RepositoryRegex(label, url) => Some( new RepositoryDefinition(label, url) )
|
||||
case _ => None
|
||||
}
|
||||
|
||||
val ProcessorRegex = """(\w+)\s+is\s+(\S+)\s+(\S+)\s+(\S+)""".r
|
||||
val RepositoryRegex = """(\w+)\s+at\s+(\S+)""".r
|
||||
}
|
||||
|
|
@ -0,0 +1,26 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2010 Mark Harrah
|
||||
*/
|
||||
package sbt.processor
|
||||
|
||||
import java.io.File
|
||||
|
||||
import xsbt.FileUtilities.{readLines, write}
|
||||
|
||||
// lock file should be for synchronizing access to the persisted files
|
||||
class Persist(val lock: xsbti.GlobalLock, lockFile: File, defParser: DefinitionParser) extends Persisting
|
||||
{
|
||||
private def withDefinitionsLock[T](f: => T): T = lock(lockFile,Callable(f))
|
||||
|
||||
def save(file: File)(definitions: Iterable[Definition])
|
||||
{
|
||||
val lines = definitions.mkString(LineSeparator)
|
||||
withDefinitionsLock { write(file, lines) }
|
||||
}
|
||||
def load(file: File): Seq[Definition] =
|
||||
{
|
||||
def parseLine(line: String) = defParser.parseDefinition(line).toList
|
||||
withDefinitionsLock { if(file.exists) readLines(file) else Nil } flatMap(parseLine)
|
||||
}
|
||||
private final val LineSeparator = System.getProperty("line.separator", "\n")
|
||||
}
|
||||
|
|
@ -0,0 +1,148 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2010 Mark Harrah
|
||||
*/
|
||||
package sbt.processor
|
||||
|
||||
import java.io.File
|
||||
|
||||
/** An interface for code that operates on an sbt `Project`.*/
|
||||
trait Processor extends NotNull
|
||||
{
|
||||
/** Apply this processor's action to the given `project`.
|
||||
* The arguments are passed unparsed as a single String `args`.
|
||||
* The return value optionally provides additional commands to run, such as 'reload'.
|
||||
* Note: `project` is not necessarily the root project. To get the root project, use `project.rootProject`.
|
||||
* The `label` used to call the processor is provided to allow recursing.*/
|
||||
def apply(label: String, project: Project, onFailure: Option[String], args: String): ProcessorResult
|
||||
}
|
||||
/** An interface for code that operates on an sbt `Project` but doesn't need to modify command processing.*/
|
||||
abstract class BasicProcessor extends Processor
|
||||
{
|
||||
/** Apply this processor's action to the given `project`.
|
||||
* The arguments are passed unparsed as a single String `args`.
|
||||
* Note: `project` is not necessarily the root project. To get the root project, use `project.rootProject`.*/
|
||||
def apply(project: Project, args: String): Unit
|
||||
|
||||
override final def apply(label: String, project: Project, onFailure: Option[String], args: String): ProcessorResult =
|
||||
{
|
||||
apply(project, args)
|
||||
new Success(project, onFailure)
|
||||
}
|
||||
}
|
||||
|
||||
/** The result of a Processor run.*/
|
||||
sealed trait ProcessorResult extends NotNull
|
||||
/* Processor success.
|
||||
* `insertArgs` allows the Processor to insert additional commands to run.
|
||||
* These commands are run before pending commands.
|
||||
*
|
||||
* For example, consider a Processor bound to 'cleanCompile' that returns
|
||||
* `ProcessorResult("clean", "compile")`
|
||||
*
|
||||
* If a user runs:
|
||||
* `sbt a cleanCompile b `
|
||||
* This runs `a`, `cleanCompile`, `clean`, `compile`, and finally `b`.
|
||||
* Commands are processed as if they were entered at the prompt or from the command line.*/
|
||||
final class Success(val project: Project, val onFailure: Option[String], insertArgs: String*) extends ProcessorResult
|
||||
{
|
||||
val insertArguments = insertArgs.toList
|
||||
}
|
||||
final class Exit(val code: Int) extends ProcessorResult
|
||||
final class Reload(insertArgs: String*) extends ProcessorResult
|
||||
{
|
||||
val insertArguments = insertArgs.toList
|
||||
}
|
||||
|
||||
/** Manages the processor and repository definitions.*/
|
||||
trait Manager extends NotNull
|
||||
{
|
||||
def defineProcessor(pdef: ProcessorDefinition)
|
||||
def removeDefinition(label: String): Definition
|
||||
def defineRepository(repo: RepositoryDefinition)
|
||||
def processor(pdef: ProcessorDefinition): Option[Processor]
|
||||
def processorDefinition(label: String): Option[ProcessorDefinition]
|
||||
|
||||
def processors: Map[String, ProcessorDefinition]
|
||||
def repositories: Map[String, RepositoryDefinition]
|
||||
}
|
||||
|
||||
/** Executes a parsed command. */
|
||||
trait Executing extends NotNull
|
||||
{
|
||||
def apply(command: Command)
|
||||
}
|
||||
/** Prints information about processors. */
|
||||
trait Info extends NotNull
|
||||
{
|
||||
/** Prints available processors and defined repositories.*/
|
||||
def show()
|
||||
/** Prints usage of processor management commands.*/
|
||||
def help()
|
||||
}
|
||||
|
||||
/** Parses a command String */
|
||||
trait CommandParsing extends NotNull
|
||||
{
|
||||
/** Parses a command String that has been preprocessed.
|
||||
* It should have any prefix (like the * used by Main) removed
|
||||
* and whitespace trimmed
|
||||
*
|
||||
* If parsing is successful, a `Command` instance is returned wrapped in `Right`.
|
||||
* Otherwise, an error message is returned wrapped in `Left`.*/
|
||||
def parseCommand(line: String): Either[String, Command]
|
||||
}
|
||||
/** Parses a definition `String`.*/
|
||||
trait DefinitionParsing extends NotNull
|
||||
{
|
||||
/** Parses the given definition `String`.
|
||||
* The result is wrapped in `Some` if successful, or `None` if the string is not of the correct form. */
|
||||
def parseDefinition(line: String): Option[Definition]
|
||||
}
|
||||
/** Handles serializing `Definition`s.*/
|
||||
trait Persisting extends NotNull
|
||||
{
|
||||
def save(file: File)(definitions: Iterable[Definition])
|
||||
def load(file: File): Seq[Definition]
|
||||
}
|
||||
|
||||
sealed trait Definition extends NotNull
|
||||
{
|
||||
def label: String
|
||||
}
|
||||
final class ProcessorDefinition(val label: String, val group: String, val module: String, val rev: String) extends Definition
|
||||
{
|
||||
override def toString = Seq(label, "is", group, module, rev).mkString(" ")
|
||||
def idString = Seq(group, module, rev).mkString(" ")
|
||||
def toModuleID(scalaVersion: String) = ModuleID(group, module + "_" + scalaVersion, rev)
|
||||
}
|
||||
// maven-style repositories only right now
|
||||
final class RepositoryDefinition(val label: String, val url: String) extends Definition
|
||||
{
|
||||
override def toString = Seq(label, "at", url).mkString(" ")
|
||||
}
|
||||
|
||||
/** Data type representing a runnable command related to processor management.*/
|
||||
sealed trait Command extends NotNull
|
||||
/** A command to add the given processor definition. */
|
||||
final class DefineProcessor(val pdef: ProcessorDefinition) extends Command
|
||||
/** A command to remove the processor or repository definition currently associated with the given `label`.
|
||||
* If the definition is associated with other labels, those are not affected.*/
|
||||
final class RemoveDefinition(val label: String) extends Command
|
||||
/** A command to register the given repository to be used for obtaining `Processor`s. */
|
||||
final class DefineRepository(val repo: RepositoryDefinition) extends Command
|
||||
/** A command to show help for processor management command usage. */
|
||||
object Help extends Command
|
||||
/** A command to show available processors and repositories.*/
|
||||
object Show extends Command
|
||||
|
||||
/** An exception used when a `Processor` wants to terminate with an error message, but the stack trace is not important.
|
||||
* If a `cause` is provided, its stack trace is assumed to be important.*/
|
||||
final class ProcessorException(val message: String, cause: Throwable) extends RuntimeException(message, cause)
|
||||
{
|
||||
def this(message: String) = this(message, null)
|
||||
}
|
||||
object ProcessorException
|
||||
{
|
||||
def error(msg: String): Nothing = throw new ProcessorException(msg)
|
||||
def error(msg: String, t: Throwable): Nothing = throw new ProcessorException(msg, t)
|
||||
}
|
||||
|
|
@ -0,0 +1,30 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2010 Mark Harrah
|
||||
*/
|
||||
package sbt.processor
|
||||
|
||||
import java.io.File
|
||||
|
||||
class Retrieve(retrieveDirectory: File, module: ModuleID, lock: xsbti.GlobalLock, lockFile: File, repositories: Seq[Resolver], log: IvyLogger) extends NotNull
|
||||
{
|
||||
def retrieve()
|
||||
{
|
||||
val paths = new IvyPaths(retrieveDirectory, None)
|
||||
val ivyScala = new IvyScala("", Nil, false, true)
|
||||
val fullRepositories = Resolver.withDefaultResolvers(repositories) // TODO: move this somewhere under user control
|
||||
val configuration = new InlineIvyConfiguration(paths, fullRepositories, Nil, Nil, Some(lock), log)
|
||||
val moduleConfiguration = new InlineConfiguration(thisID, module :: Nil, scala.xml.NodeSeq.Empty, Nil, None, Some(ivyScala), false)
|
||||
val update = new UpdateConfiguration(retrieveDirectory, retrievePattern, true, true)
|
||||
val ivySbt = new IvySbt(configuration)
|
||||
val ivyModule = new ivySbt.Module(moduleConfiguration)
|
||||
|
||||
lock(lockFile, Callable { IvyActions.update(ivyModule, update) } )
|
||||
}
|
||||
def thisID = ModuleID("org.scala-tools.sbt", "retrieve-processor", "1.0")
|
||||
def retrievePattern = "[artifact](-[revision])(-[classifier]).[ext]"
|
||||
}
|
||||
|
||||
object Callable
|
||||
{
|
||||
def apply[T](f: => T): java.util.concurrent.Callable[T] = new java.util.concurrent.Callable[T] { def call = f }
|
||||
}
|
||||
|
|
@ -0,0 +1,123 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2009 Mark Harrah
|
||||
*/
|
||||
package sbt.wrap
|
||||
|
||||
// This file exists for compatibility between Scala 2.7.x and 2.8.0
|
||||
|
||||
import java.util.{Map => JMap, Set => JSet}
|
||||
|
||||
private[sbt] object Wrappers
|
||||
{
|
||||
def javaMap[K,V](pairs: (K,V)*) =
|
||||
{
|
||||
val basic = basicMap[K,V]
|
||||
for( (k,v) <- pairs)
|
||||
basic(k) = v
|
||||
basic.underlying
|
||||
}
|
||||
def basicMap[K,V] = new MutableMapWrapper(new java.util.HashMap[K,V])
|
||||
def identityMap[K,V] = new MutableMapWrapper(new java.util.IdentityHashMap[K,V])
|
||||
def weakMap[K,V] = new MutableMapWrapper(new java.util.WeakHashMap[K,V])
|
||||
def toList[K,V](s: java.util.Map[K,V]): List[(K,V)] = toList(s.entrySet).map(e => (e.getKey, e.getValue))
|
||||
def toList[T](s: java.util.Collection[T]): List[T] = toList(s.iterator)
|
||||
def toList[T](s: java.util.Iterator[T]): List[T] =
|
||||
{
|
||||
def add(l: List[T]): List[T] =
|
||||
if(s.hasNext)
|
||||
add(s.next() :: l)
|
||||
else
|
||||
l
|
||||
add(Nil).reverse
|
||||
}
|
||||
def toList[T](s: java.util.Enumeration[T]): List[T] =
|
||||
{
|
||||
def add(l: List[T]): List[T] =
|
||||
if(s.hasMoreElements)
|
||||
add(s.nextElement() :: l)
|
||||
else
|
||||
l
|
||||
add(Nil).reverse
|
||||
}
|
||||
def readOnly[K,V](map: scala.collection.mutable.Map[K,V]): scala.collection.Map[K,V] = map//.readOnly
|
||||
def readOnly[T](set: scala.collection.mutable.Set[T]): scala.collection.Set[T] = set//.readOnly
|
||||
def readOnly[T](buffer: scala.collection.mutable.Buffer[T]): Seq[T] = buffer//.readOnly
|
||||
}
|
||||
|
||||
private[sbt] sealed abstract class Iterable[T] extends NotNull
|
||||
{
|
||||
def foreach(f: T => Unit) = toList.foreach(f)
|
||||
def toList: List[T]
|
||||
}
|
||||
private[sbt] sealed trait Removable[T] extends NotNull
|
||||
{
|
||||
def -=(t: T) : Unit
|
||||
def --=(all: Iterable[T]) { all.foreach(-=) }
|
||||
def --=(all: scala.Iterable[T]) { all.foreach(-=) }
|
||||
}
|
||||
private[sbt] sealed trait Addable[T] extends NotNull
|
||||
{
|
||||
def +=(t: T) : Unit
|
||||
def ++=(all: Iterable[T]) { all.foreach(+=) }
|
||||
def ++=(all: scala.Iterable[T]) { all.foreach(+=) }
|
||||
}
|
||||
private[sbt] sealed abstract class Set[T] extends Iterable[T]
|
||||
{
|
||||
def contains(t: T): Boolean
|
||||
}
|
||||
private[sbt] sealed class SetWrapper[T](val underlying: JSet[T]) extends Set[T]
|
||||
{
|
||||
def contains(t: T) = underlying.contains(t)
|
||||
def toList =Wrappers.toList(underlying.iterator)
|
||||
}
|
||||
private[sbt] final class MutableSetWrapper[T](wrapped: JSet[T]) extends SetWrapper[T](wrapped) with Addable[T] with Removable[T]
|
||||
{
|
||||
def +=(t: T) { underlying.add(t) }
|
||||
def -=(t: T) { underlying.remove(t) }
|
||||
def readOnly: Set[T] = this
|
||||
}
|
||||
private[sbt] sealed abstract class Map[K,V] extends Iterable[(K,V)]
|
||||
{
|
||||
def apply(key: K): V
|
||||
def get(key: K): Option[V]
|
||||
def containsKey(key: K): Boolean
|
||||
final def getOrElse[V2 >: V](key: K, default: => V2): V2 =
|
||||
get(key) match
|
||||
{
|
||||
case Some(value) => value
|
||||
case None => default
|
||||
}
|
||||
}
|
||||
private[sbt] sealed abstract class MapWrapper[K,V](val underlying: JMap[K,V]) extends Map[K,V]
|
||||
{
|
||||
final def apply(key: K) = underlying.get(key)
|
||||
final def get(key: K) =
|
||||
{
|
||||
val value = underlying.get(key)
|
||||
if(value == null)
|
||||
None
|
||||
else
|
||||
Some(value)
|
||||
}
|
||||
final def containsKey(key: K) = underlying.containsKey(key)
|
||||
final def toList = Wrappers.toList(underlying)
|
||||
final def values = toList.map(_._2)
|
||||
}
|
||||
private[sbt] sealed class MutableMapWrapper[K,V](wrapped: JMap[K,V]) extends MapWrapper[K,V](wrapped) with Removable[K] with Addable[(K,V)]
|
||||
{
|
||||
final def getOrElseUpdate(key: K, default: => V): V =
|
||||
get(key) match
|
||||
{
|
||||
case Some(value) => value
|
||||
case None =>
|
||||
val newValue = default
|
||||
underlying.put(key, newValue)
|
||||
newValue
|
||||
}
|
||||
final def clear() = underlying.clear()
|
||||
final def update(key: K, value: V) { underlying.put(key, value) }
|
||||
final def +=(pair: (K, V) ) { update(pair._1, pair._2) }
|
||||
final def -=(key: K) { underlying.remove(key) }
|
||||
final def remove(key: K) = underlying.remove(key)
|
||||
final def readOnly: Map[K,V] = this
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue