Initial xsbt commit

This commit is contained in:
Mark Harrah 2009-08-16 14:29:08 -04:00
commit 65fc0e0453
38 changed files with 2731 additions and 0 deletions

58
cache/Cache.scala vendored Normal file
View File

@ -0,0 +1,58 @@
package xsbt
import sbinary.{CollectionTypes, Format, JavaFormats}
import java.io.File
trait Cache[I,O]
{
def apply(file: File)(i: I): Either[O, O => Unit]
}
trait SBinaryFormats extends CollectionTypes with JavaFormats with NotNull
{
//TODO: add basic types minus FileFormat
}
object Cache extends BasicCacheImplicits with SBinaryFormats with HListCacheImplicits
{
def cache[I,O](implicit c: Cache[I,O]): Cache[I,O] = c
def outputCache[O](implicit c: OutputCache[O]): OutputCache[O] = c
def inputCache[O](implicit c: InputCache[O]): InputCache[O] = c
def wrapInputCache[I,DI](implicit convert: I => DI, base: InputCache[DI]): InputCache[I] =
new WrappedInputCache(convert, base)
def wrapOutputCache[O,DO](implicit convert: O => DO, reverse: DO => O, base: OutputCache[DO]): OutputCache[O] =
new WrappedOutputCache[O,DO](convert, reverse, base)
/* Note: Task[O] { type Input = I } is written out because ITask[I,O] did not work (type could not be inferred properly) with a task
* with an HList input.*/
def apply[I,O](task: Task[O] { type Input = I }, file: File)(implicit cache: Cache[I,O]): Task[O] { type Input = I } =
task match { case m: M[I,O,_] =>
new M[I,O,Result[O]](None)(m.dependencies)(m.extract)(computeWithCache(m, cache, file))
}
private def computeWithCache[I,O](m: M[I,O,_], cache: Cache[I,O], file: File)(in: I): Result[O] =
cache(file)(in) match
{
case Left(value) => Value(value)
case Right(store) => NewTask(m.map { out => store(out); out })
}
}
trait BasicCacheImplicits extends NotNull
{
implicit def basicInputCache[I](implicit format: Format[I], equiv: Equiv[I]): InputCache[I] =
new BasicInputCache(format, equiv)
implicit def basicOutputCache[O](implicit format: Format[O]): OutputCache[O] =
new BasicOutputCache(format)
implicit def ioCache[I,O](implicit input: InputCache[I], output: OutputCache[O]): Cache[I,O] =
new SeparatedCache(input, output)
implicit def defaultEquiv[T]: Equiv[T] = new Equiv[T] { def equiv(a: T, b: T) = a == b }
}
trait HListCacheImplicits extends HLists
{
implicit def hConsInputCache[H,T<:HList](implicit headCache: InputCache[H], tailCache: InputCache[T]): InputCache[HCons[H,T]] =
new HConsInputCache(headCache, tailCache)
implicit lazy val hNilInputCache: InputCache[HNil] = new HNilInputCache
implicit def hConsOutputCache[H,T<:HList](implicit headCache: OutputCache[H], tailCache: OutputCache[T]): OutputCache[HCons[H,T]] =
new HConsOutputCache(headCache, tailCache)
implicit lazy val hNilOutputCache: OutputCache[HNil] = new HNilOutputCache
}

74
cache/FileInfo.scala vendored Normal file
View File

@ -0,0 +1,74 @@
package xsbt
import java.io.{File, IOException}
import sbinary.{DefaultProtocol, Format}
import DefaultProtocol._
import Function.tupled
sealed trait FileInfo extends NotNull
{
val file: File
}
sealed trait HashFileInfo extends FileInfo
{
val hash: List[Byte]
}
sealed trait ModifiedFileInfo extends FileInfo
{
val lastModified: Long
}
sealed trait HashModifiedFileInfo extends HashFileInfo with ModifiedFileInfo
private final case class FileHash(file: File, hash: List[Byte]) extends HashFileInfo
private final case class FileModified(file: File, lastModified: Long) extends ModifiedFileInfo
private final case class FileHashModified(file: File, hash: List[Byte], lastModified: Long) extends HashModifiedFileInfo
object FileInfo
{
sealed trait Style[F <: FileInfo] extends NotNull
{
implicit def apply(file: File): F
implicit def unapply(info: F): File = info.file
implicit val format: Format[F]
import Cache._
implicit def infoInputCache: InputCache[File] = wrapInputCache[File,F]
implicit def infoOutputCache: OutputCache[File] = wrapOutputCache[File,F]
}
object full extends Style[HashModifiedFileInfo]
{
implicit def apply(file: File): HashModifiedFileInfo = make(file, Hash(file).toList, file.lastModified)
def make(file: File, hash: List[Byte], lastModified: Long): HashModifiedFileInfo = FileHashModified(file.getAbsoluteFile, hash, lastModified)
implicit val format: Format[HashModifiedFileInfo] = wrap(f => (f.file, f.hash, f.lastModified), tupled(make _))
}
object hash extends Style[HashFileInfo]
{
implicit def apply(file: File): HashFileInfo = make(file, computeHash(file).toList)
def make(file: File, hash: List[Byte]): HashFileInfo = FileHash(file.getAbsoluteFile, hash)
implicit val format: Format[HashFileInfo] = wrap(f => (f.file, f.hash), tupled(make _))
private def computeHash(file: File) = try { Hash(file) } catch { case e: Exception => Nil }
}
object lastModified extends Style[ModifiedFileInfo]
{
implicit def apply(file: File): ModifiedFileInfo = make(file, file.lastModified)
def make(file: File, lastModified: Long): ModifiedFileInfo = FileModified(file.getAbsoluteFile, lastModified)
implicit val format: Format[ModifiedFileInfo] = wrap(f => (f.file, f.lastModified), tupled(make _))
}
}
final case class FilesInfo[F <: FileInfo] private(files: Set[F]) extends NotNull
object FilesInfo
{
sealed trait Style[F <: FileInfo] extends NotNull
{
implicit def apply(files: Iterable[File]): FilesInfo[F]
implicit val format: Format[FilesInfo[F]]
}
private final class BasicStyle[F <: FileInfo](fileStyle: FileInfo.Style[F])(implicit infoFormat: Format[F]) extends Style[F]
{
implicit def apply(files: Iterable[File]) = FilesInfo( (Set() ++ files.map(_.getAbsoluteFile)).map(fileStyle.apply) )
implicit val format: Format[FilesInfo[F]] = wrap(_.files, (fs: Set[F]) => new FilesInfo(fs))
}
lazy val full: Style[HashModifiedFileInfo] = new BasicStyle(FileInfo.full)(FileInfo.full.format)
lazy val hash: Style[HashFileInfo] = new BasicStyle(FileInfo.hash)(FileInfo.hash.format)
lazy val lastModified: Style[ModifiedFileInfo] = new BasicStyle(FileInfo.lastModified)(FileInfo.lastModified.format)
}

44
cache/HListCache.scala vendored Normal file
View File

@ -0,0 +1,44 @@
package xsbt
import java.io.{InputStream,OutputStream}
import metascala.HLists.{HCons,HList,HNil}
class HNilInputCache extends NoInputCache[HNil]
class HConsInputCache[H,T <: HList](val headCache: InputCache[H], val tailCache: InputCache[T]) extends InputCache[HCons[H,T]]
{
def uptodate(in: HCons[H,T])(cacheStream: InputStream) =
{
lazy val headResult = headCache.uptodate(in.head)(cacheStream)
lazy val tailResult = tailCache.uptodate(in.tail)(cacheStream)
new CacheResult
{
lazy val uptodate = headResult.uptodate && tailResult.uptodate
def update(outputStream: OutputStream) =
{
headResult.update(outputStream)
tailResult.update(outputStream)
}
}
}
def force(in: HCons[H,T])(cacheStream: OutputStream) =
{
headCache.force(in.head)(cacheStream)
tailCache.force(in.tail)(cacheStream)
}
}
class HNilOutputCache extends NoOutputCache[HNil](HNil)
class HConsOutputCache[H,T <: HList](val headCache: OutputCache[H], val tailCache: OutputCache[T]) extends OutputCache[HCons[H,T]]
{
def loadCached(cacheStream: InputStream) =
{
val head = headCache.loadCached(cacheStream)
val tail = tailCache.loadCached(cacheStream)
HCons(head, tail)
}
def update(out: HCons[H,T])(cacheStream: OutputStream)
{
headCache.update(out.head)(cacheStream)
tailCache.update(out.tail)(cacheStream)
}
}

19
cache/NoCache.scala vendored Normal file
View File

@ -0,0 +1,19 @@
package xsbt
import java.io.{InputStream,OutputStream}
class NoInputCache[T] extends InputCache[T]
{
def uptodate(in: T)(cacheStream: InputStream) =
new CacheResult
{
def uptodate = true
def update(outputStream: OutputStream) {}
}
def force(in: T)(outputStream: OutputStream) {}
}
class NoOutputCache[O](create: => O) extends OutputCache[O]
{
def loadCached(cacheStream: InputStream) = create
def update(out: O)(cacheStream: OutputStream) {}
}

80
cache/SeparatedCache.scala vendored Normal file
View File

@ -0,0 +1,80 @@
package xsbt
import sbinary.Format
import sbinary.JavaIO._
import java.io.{File, InputStream, OutputStream}
trait CacheResult
{
def uptodate: Boolean
def update(stream: OutputStream): Unit
}
trait InputCache[I] extends NotNull
{
def uptodate(in: I)(cacheStream: InputStream): CacheResult
def force(in: I)(cacheStream: OutputStream): Unit
}
trait OutputCache[O] extends NotNull
{
def loadCached(cacheStream: InputStream): O
def update(out: O)(cacheStream: OutputStream): Unit
}
class SeparatedCache[I,O](input: InputCache[I], output: OutputCache[O]) extends Cache[I,O]
{
def apply(file: File)(in: I) =
try { applyImpl(file, in) }
catch { case _: Exception => Right(update(file)(in)) }
protected def applyImpl(file: File, in: I) =
{
OpenResource.fileInputStream(file) { stream =>
val cache = input.uptodate(in)(stream)
lazy val doUpdate = (result: O) =>
{
OpenResource.fileOutputStream(false)(file) { stream =>
cache.update(stream)
output.update(result)(stream)
}
}
if(cache.uptodate)
try { Left(output.loadCached(stream)) }
catch { case _: Exception => Right(doUpdate) }
else
Right(doUpdate)
}
}
protected def update(file: File)(in: I)(out: O)
{
OpenResource.fileOutputStream(false)(file) { stream =>
input.force(in)(stream)
output.update(out)(stream)
}
}
}
class BasicOutputCache[O](val format: Format[O]) extends OutputCache[O]
{
def loadCached(cacheStream: InputStream): O = format.reads(cacheStream)
def update(out: O)(cacheStream: OutputStream): Unit = format.writes(cacheStream, out)
}
class BasicInputCache[I](val format: Format[I], val equiv: Equiv[I]) extends InputCache[I]
{
def uptodate(in: I)(cacheStream: InputStream) =
{
val loaded = format.reads(cacheStream)
new CacheResult
{
val uptodate = equiv.equiv(in, loaded)
def update(outputStream: OutputStream) = force(in)(outputStream)
}
}
def force(in: I)(outputStream: OutputStream) = format.writes(outputStream, in)
}
class WrappedInputCache[I,DI](val convert: I => DI, val base: InputCache[DI]) extends InputCache[I]
{
def uptodate(in: I)(cacheStream: InputStream) = base.uptodate(convert(in))(cacheStream)
def force(in: I)(outputStream: OutputStream) = base.force(convert(in))(outputStream)
}
class WrappedOutputCache[O,DO](val convert: O => DO, val reverse: DO => O, val base: OutputCache[DO]) extends OutputCache[O]
{
def loadCached(cacheStream: InputStream): O = reverse(base.loadCached(cacheStream))
def update(out: O)(cacheStream: OutputStream): Unit = base.update(convert(out))(cacheStream)
}

BIN
cache/lib/sbinary-0.3-alpha.jar vendored Normal file

Binary file not shown.

21
cache/src/test/scala/CacheTest.scala vendored Normal file
View File

@ -0,0 +1,21 @@
package xsbt
import java.io.File
object CacheTest// extends Properties("Cache test")
{
import Task._
import Cache._
import FileInfo.hash._
def checkFormattable(file: File)
{
val createTask = Task { new File("test") }
val lengthTask = createTask map { f => println("File length: " + f.length); f.length }
val cached = Cache(lengthTask, new File("/tmp/length-cache"))
val cTask = (createTask :: cached :: TNil) map { case (file :: len :: HNil) => println("File: " + file + " length: " + len); len :: file :: HNil }
val cachedC = Cache(cTask, new File("/tmp/c-cache"))
TaskRunner(cachedC).left.foreach(_.foreach(f => f.exception.printStackTrace))
}
}

100
ivy/ConvertResolver.scala Normal file
View File

@ -0,0 +1,100 @@
/* sbt -- Simple Build Tool
* Copyright 2008, 2009 Mark Harrah
*/
package xsbt
import org.apache.ivy.{core,plugins}
import core.module.id.ModuleRevisionId
import plugins.resolver.{ChainResolver, DependencyResolver, IBiblioResolver}
import plugins.resolver.{AbstractPatternsBasedResolver, AbstractSshBasedResolver, FileSystemResolver, SFTPResolver, SshResolver, URLResolver}
private object ConvertResolver
{
/** Converts the given sbt resolver into an Ivy resolver..*/
def apply(r: Resolver) =
{
r match
{
case repo: MavenRepository =>
{
val resolver = new IBiblioResolver
initializeMavenStyle(resolver, repo.name, repo.root)
resolver
}
case JavaNet1Repository =>
{
// Thanks to Matthias Pfau for posting how to use the Maven 1 repository on java.net with Ivy:
// http://www.nabble.com/Using-gradle-Ivy-with-special-maven-repositories-td23775489.html
val resolver = new IBiblioResolver { override def convertM2IdForResourceSearch(mrid: ModuleRevisionId) = mrid }
initializeMavenStyle(resolver, JavaNet1Repository.name, "http://download.java.net/maven/1/")
resolver.setPattern("[organisation]/[ext]s/[module]-[revision](-[classifier]).[ext]")
resolver
}
case repo: SshRepository =>
{
val resolver = new SshResolver
initializeSSHResolver(resolver, repo)
repo.publishPermissions.foreach(perm => resolver.setPublishPermissions(perm))
resolver
}
case repo: SftpRepository =>
{
val resolver = new SFTPResolver
initializeSSHResolver(resolver, repo)
resolver
}
case repo: FileRepository =>
{
val resolver = new FileSystemResolver
resolver.setName(repo.name)
initializePatterns(resolver, repo.patterns)
import repo.configuration.{isLocal, isTransactional}
resolver.setLocal(isLocal)
isTransactional.foreach(value => resolver.setTransactional(value.toString))
resolver
}
case repo: URLRepository =>
{
val resolver = new URLResolver
resolver.setName(repo.name)
initializePatterns(resolver, repo.patterns)
resolver
}
}
}
private def initializeMavenStyle(resolver: IBiblioResolver, name: String, root: String)
{
resolver.setName(name)
resolver.setM2compatible(true)
resolver.setRoot(root)
}
private def initializeSSHResolver(resolver: AbstractSshBasedResolver, repo: SshBasedRepository)
{
resolver.setName(repo.name)
resolver.setPassfile(null)
initializePatterns(resolver, repo.patterns)
initializeConnection(resolver, repo.connection)
}
private def initializeConnection(resolver: AbstractSshBasedResolver, connection: RepositoryHelpers.SshConnection)
{
import resolver._
import connection._
hostname.foreach(setHost)
port.foreach(setPort)
authentication foreach
{
case RepositoryHelpers.PasswordAuthentication(user, password) =>
setUser(user)
setUserPassword(password)
case RepositoryHelpers.KeyFileAuthentication(file, password) =>
setKeyFile(file)
setKeyFilePassword(password)
}
}
private def initializePatterns(resolver: AbstractPatternsBasedResolver, patterns: RepositoryHelpers.Patterns)
{
resolver.setM2compatible(patterns.isMavenCompatible)
patterns.ivyPatterns.foreach(resolver.addIvyPattern)
patterns.artifactPatterns.foreach(resolver.addArtifactPattern)
}
}

35
ivy/CustomXmlParser.scala Normal file
View File

@ -0,0 +1,35 @@
/* sbt -- Simple Build Tool
* Copyright 2008, 2009 Mark Harrah
*/
package xsbt
import java.io.ByteArrayInputStream
import java.net.URL
import org.apache.ivy.{core, plugins}
import core.module.descriptor.{DefaultDependencyDescriptor, DefaultModuleDescriptor}
import core.settings.IvySettings
import plugins.parser.xml.XmlModuleDescriptorParser
import plugins.repository.Resource
import plugins.repository.url.URLResource
/** Subclasses the default Ivy file parser in order to provide access to protected methods.*/
private object CustomXmlParser extends XmlModuleDescriptorParser with NotNull
{
import XmlModuleDescriptorParser.Parser
class CustomParser(settings: IvySettings) extends Parser(CustomXmlParser, settings) with NotNull
{
def setSource(url: URL) =
{
super.setResource(new URLResource(url))
super.setInput(url)
}
def setInput(bytes: Array[Byte]) { setInput(new ByteArrayInputStream(bytes)) }
/** Overridden because the super implementation overwrites the module descriptor.*/
override def setResource(res: Resource) {}
override def setMd(md: DefaultModuleDescriptor) = super.setMd(md)
override def parseDepsConfs(confs: String, dd: DefaultDependencyDescriptor) = super.parseDepsConfs(confs, dd)
override def getDefaultConf = super.getDefaultConf
override def setDefaultConf(conf: String) = super.setDefaultConf(conf)
}
}

307
ivy/Ivy.scala Normal file
View File

@ -0,0 +1,307 @@
/* sbt -- Simple Build Tool
* Copyright 2008, 2009 Mark Harrah
*/
package xsbt
import Artifact.{defaultExtension, defaultType}
import java.io.File
import org.apache.ivy.{core, plugins, util, Ivy}
import core.cache.DefaultRepositoryCacheManager
import core.module.descriptor.{DefaultArtifact, DefaultDependencyArtifactDescriptor, MDArtifact}
import core.module.descriptor.{DefaultDependencyDescriptor, DefaultModuleDescriptor, ModuleDescriptor}
import core.module.id.{ArtifactId,ModuleId, ModuleRevisionId}
import core.settings.IvySettings
import plugins.matcher.PatternMatcher
import plugins.parser.m2.PomModuleDescriptorParser
import plugins.resolver.ChainResolver
import util.Message
final class IvySbt(configuration: IvyConfiguration)
{
import configuration._
/** ========== Configuration/Setup ============
* This part configures the Ivy instance by first creating the logger interface to ivy, then IvySettings, and then the Ivy instance.
* These are lazy so that they are loaded within the right context. This is important so that no Ivy XML configuration needs to be loaded,
* saving some time. This is necessary because Ivy has global state (IvyContext, Message, DocumentBuilder, ...).
*/
private lazy val logger = new IvyLoggerInterface(log)
private def withDefaultLogger[T](f: => T): T =
IvySbt.synchronized // Ivy is not thread-safe. In particular, it uses a static DocumentBuilder, which is not thread-safe
{
val originalLogger = Message.getDefaultLogger
Message.setDefaultLogger(logger)
try { f }
finally { Message.setDefaultLogger(originalLogger) }
}
private lazy val settings =
{
val is = new IvySettings
is.setBaseDir(paths.baseDirectory)
IvySbt.configureCache(is, paths.cacheDirectory)
if(resolvers.isEmpty)
autodetectConfiguration(is)
else
IvySbt.setResolvers(is, resolvers, log)
is
}
private lazy val ivy =
{
val i = Ivy.newInstance(settings)
i.getLoggerEngine.pushLogger(logger)
i
}
/** Called to configure Ivy when inline resolvers are not specified.
* This will configure Ivy with an 'ivy-settings.xml' file if there is one or else use default resolvers.*/
private def autodetectConfiguration(settings: IvySettings)
{
log.debug("Autodetecting configuration.")
val defaultIvyConfigFile = IvySbt.defaultIvyConfiguration(paths.baseDirectory)
if(defaultIvyConfigFile.canRead)
settings.load(defaultIvyConfigFile)
else
IvySbt.setResolvers(settings, Resolver.withDefaultResolvers(Nil), log)
}
/** ========== End Configuration/Setup ============*/
/** Uses the configured Ivy instance within a safe context.*/
def withIvy[T](f: Ivy => T): T =
withDefaultLogger
{
ivy.pushContext()
try { f(ivy) }
finally { ivy.popContext() }
}
final class Module(val moduleConfiguration: ModuleConfiguration) extends NotNull
{
def logger = configuration.log
def withModule[T](f: (Ivy,DefaultModuleDescriptor,String) => T): T =
withIvy[T] { ivy => f(ivy, moduleDescriptor, defaultConfig) }
import moduleConfiguration._
private lazy val (moduleDescriptor: DefaultModuleDescriptor, defaultConfig: String) =
{
val (baseModule, baseConfiguration) =
if(isUnconfigured)
autodetectDependencies(IvySbt.toID(module))
else
configureModule
ivyScala.foreach(IvyScala.checkModule(baseModule, baseConfiguration))
baseModule.getExtraAttributesNamespaces.asInstanceOf[java.util.Map[String,String]].put("m", "m")
(baseModule, baseConfiguration)
}
private def configureModule =
{
val moduleID = newConfiguredModuleID
val defaultConf = defaultConfiguration getOrElse Configurations.config(ModuleDescriptor.DEFAULT_CONFIGURATION)
log.debug("Using inline dependencies specified in Scala" + (if(ivyXML.isEmpty) "." else " and XML."))
val parser = IvySbt.parseIvyXML(ivy.getSettings, IvySbt.wrapped(module, ivyXML), moduleID, defaultConf.name, validate)
IvySbt.addArtifacts(moduleID, artifacts)
IvySbt.addDependencies(moduleID, dependencies, parser)
IvySbt.addMainArtifact(moduleID)
(moduleID, parser.getDefaultConf)
}
private def newConfiguredModuleID =
{
val mod = new DefaultModuleDescriptor(IvySbt.toID(module), "release", null, false)
mod.setLastModified(System.currentTimeMillis)
configurations.foreach(config => mod.addConfiguration(IvySbt.toIvyConfiguration(config)))
mod
}
/** Parses the given Maven pom 'pomFile'.*/
private def readPom(pomFile: File) =
{
val md = PomModuleDescriptorParser.getInstance.parseDescriptor(settings, toURL(pomFile), validate)
(IvySbt.toDefaultModuleDescriptor(md), "compile")
}
/** Parses the given Ivy file 'ivyFile'.*/
private def readIvyFile(ivyFile: File) =
{
val url = toURL(ivyFile)
val parser = new CustomXmlParser.CustomParser(settings)
parser.setValidate(validate)
parser.setSource(url)
parser.parse()
val md = parser.getModuleDescriptor()
(IvySbt.toDefaultModuleDescriptor(md), parser.getDefaultConf)
}
private def toURL(file: File) = file.toURI.toURL
/** Called to determine dependencies when the dependency manager is SbtManager and no inline dependencies (Scala or XML)
* are defined. It will try to read from pom.xml first and then ivy.xml if pom.xml is not found. If neither is found,
* Ivy is configured with defaults.*/
private def autodetectDependencies(module: ModuleRevisionId) =
{
log.debug("Autodetecting dependencies.")
val defaultPOMFile = IvySbt.defaultPOM(paths.baseDirectory)
if(defaultPOMFile.canRead)
readPom(defaultPOMFile)
else
{
val defaultIvy = IvySbt.defaultIvyFile(paths.baseDirectory)
if(defaultIvy.canRead)
readIvyFile(defaultIvy)
else
{
val defaultConf = ModuleDescriptor.DEFAULT_CONFIGURATION
log.warn("No dependency configuration found, using defaults.")
val moduleID = DefaultModuleDescriptor.newDefaultInstance(module)
IvySbt.addMainArtifact(moduleID)
IvySbt.addDefaultArtifact(defaultConf, moduleID)
(moduleID, defaultConf)
}
}
}
}
}
private object IvySbt
{
val DefaultIvyConfigFilename = "ivysettings.xml"
val DefaultIvyFilename = "ivy.xml"
val DefaultMavenFilename = "pom.xml"
private def defaultIvyFile(project: File) = new File(project, DefaultIvyFilename)
private def defaultIvyConfiguration(project: File) = new File(project, DefaultIvyConfigFilename)
private def defaultPOM(project: File) = new File(project, DefaultMavenFilename)
/** Sets the resolvers for 'settings' to 'resolvers'. This is done by creating a new chain and making it the default. */
private def setResolvers(settings: IvySettings, resolvers: Seq[Resolver], log: IvyLogger)
{
val newDefault = new ChainResolver
newDefault.setName("sbt-chain")
newDefault.setReturnFirst(true)
newDefault.setCheckmodified(true)
resolvers.foreach(r => newDefault.add(ConvertResolver(r)))
settings.addResolver(newDefault)
settings.setDefaultResolver(newDefault.getName)
log.debug("Using repositories:\n" + resolvers.mkString("\n\t"))
}
private def configureCache(settings: IvySettings, dir: Option[File])
{
val cacheDir = dir.getOrElse(settings.getDefaultRepositoryCacheBasedir())
val manager = new DefaultRepositoryCacheManager("default-cache", settings, cacheDir)
manager.setUseOrigin(true)
manager.setChangingMatcher(PatternMatcher.REGEXP);
manager.setChangingPattern(".*-SNAPSHOT");
settings.setDefaultRepositoryCacheManager(manager)
}
private def toIvyConfiguration(configuration: Configuration) =
{
import org.apache.ivy.core.module.descriptor.{Configuration => IvyConfig}
import IvyConfig.Visibility._
import configuration._
new IvyConfig(name, if(isPublic) PUBLIC else PRIVATE, description, extendsConfigs.map(_.name).toArray, transitive, null)
}
private def addDefaultArtifact(defaultConf: String, moduleID: DefaultModuleDescriptor) =
moduleID.addArtifact(defaultConf, new MDArtifact(moduleID, moduleID.getModuleRevisionId.getName, defaultType, defaultExtension))
/** Adds the ivy.xml main artifact. */
private def addMainArtifact(moduleID: DefaultModuleDescriptor)
{
val artifact = DefaultArtifact.newIvyArtifact(moduleID.getResolvedModuleRevisionId, moduleID.getPublicationDate)
moduleID.setModuleArtifact(artifact)
moduleID.check()
}
/** Converts the given sbt module id into an Ivy ModuleRevisionId.*/
private def toID(m: ModuleID) =
{
import m._
ModuleRevisionId.newInstance(organization, name, revision)
}
private def toIvyArtifact(moduleID: ModuleDescriptor, a: Artifact, configurations: Iterable[String]): MDArtifact =
{
val artifact = new MDArtifact(moduleID, a.name, a.`type`, a.extension, null, extra(a))
configurations.foreach(artifact.addConfiguration)
artifact
}
private def extra(artifact: Artifact) = artifact.classifier.map(c => javaMap("m:classifier" -> c)).getOrElse(null)
private object javaMap
{
import java.util.{HashMap, Map}
def apply[K,V](pairs: (K,V)*): Map[K,V] =
{
val map = new HashMap[K,V]
pairs.foreach { case (key, value) => map.put(key, value) }
map
}
}
/** Creates a full ivy file for 'module' using the 'content' XML as the part after the &lt;info&gt;...&lt;/info&gt; section. */
private def wrapped(module: ModuleID, content: scala.xml.NodeSeq) =
{
import module._
<ivy-module version="2.0">
<info organisation={organization} module={name} revision={revision}/>
{content}
</ivy-module>
}
/** Parses the given in-memory Ivy file 'xml', using the existing 'moduleID' and specifying the given 'defaultConfiguration'. */
private def parseIvyXML(settings: IvySettings, xml: scala.xml.NodeSeq, moduleID: DefaultModuleDescriptor, defaultConfiguration: String, validate: Boolean): CustomXmlParser.CustomParser =
parseIvyXML(settings, xml.toString, moduleID, defaultConfiguration, validate)
/** Parses the given in-memory Ivy file 'xml', using the existing 'moduleID' and specifying the given 'defaultConfiguration'. */
private def parseIvyXML(settings: IvySettings, xml: String, moduleID: DefaultModuleDescriptor, defaultConfiguration: String, validate: Boolean): CustomXmlParser.CustomParser =
{
val parser = new CustomXmlParser.CustomParser(settings)
parser.setMd(moduleID)
parser.setDefaultConf(defaultConfiguration)
parser.setValidate(validate)
parser.setInput(xml.getBytes)
parser.parse()
parser
}
/** This method is used to add inline dependencies to the provided module. */
def addDependencies(moduleID: DefaultModuleDescriptor, dependencies: Iterable[ModuleID], parser: CustomXmlParser.CustomParser)
{
for(dependency <- dependencies)
{
val dependencyDescriptor = new DefaultDependencyDescriptor(moduleID, toID(dependency), false, dependency.isChanging, dependency.isTransitive)
dependency.configurations match
{
case None => // The configuration for this dependency was not explicitly specified, so use the default
parser.parseDepsConfs(parser.getDefaultConf, dependencyDescriptor)
case Some(confs) => // The configuration mapping (looks like: test->default) was specified for this dependency
parser.parseDepsConfs(confs, dependencyDescriptor)
}
for(artifact <- dependency.explicitArtifacts)
{
import artifact.{name, classifier, `type`, extension, url}
val extraMap = extra(artifact)
val ivyArtifact = new DefaultDependencyArtifactDescriptor(dependencyDescriptor, name, `type`, extension, url.getOrElse(null), extraMap)
for(conf <- dependencyDescriptor.getModuleConfigurations)
dependencyDescriptor.addDependencyArtifact(conf, ivyArtifact)
}
moduleID.addDependency(dependencyDescriptor)
}
}
/** This method is used to add inline artifacts to the provided module. */
def addArtifacts(moduleID: DefaultModuleDescriptor, artifacts: Iterable[Artifact])
{
val allConfigurations = moduleID.getPublicConfigurationsNames
for(artifact <- artifacts)
{
val configurationStrings =
{
val artifactConfigurations = artifact.configurations
if(artifactConfigurations.isEmpty)
allConfigurations
else
artifactConfigurations.map(_.name)
}
val ivyArtifact = toIvyArtifact(moduleID, artifact, configurationStrings)
configurationStrings.foreach(configuration => moduleID.addArtifact(configuration, ivyArtifact))
}
}
/** This code converts the given ModuleDescriptor to a DefaultModuleDescriptor by casting or generating an error.
* Ivy 2.0.0 always produces a DefaultModuleDescriptor. */
private def toDefaultModuleDescriptor(md: ModuleDescriptor) =
md match
{
case dmd: DefaultModuleDescriptor => dmd
case _ => error("Unknown ModuleDescriptor type.")
}
}

138
ivy/IvyActions.scala Normal file
View File

@ -0,0 +1,138 @@
package xsbt
import java.io.File
import org.apache.ivy.{core, plugins, util, Ivy}
import core.cache.DefaultRepositoryCacheManager
import core.LogOptions
import core.deliver.DeliverOptions
import core.module.descriptor.{DefaultArtifact, DefaultDependencyArtifactDescriptor, MDArtifact}
import core.module.descriptor.{DefaultDependencyDescriptor, DefaultModuleDescriptor, DependencyDescriptor, ModuleDescriptor}
import core.module.id.{ArtifactId,ModuleId, ModuleRevisionId}
import core.publish.PublishOptions
import core.resolve.ResolveOptions
import core.retrieve.RetrieveOptions
import plugins.parser.m2.{PomModuleDescriptorParser,PomModuleDescriptorWriter}
final class UpdateConfiguration(val retrieveDirectory: File, val outputPattern: String, val synchronize: Boolean, val quiet: Boolean) extends NotNull
object IvyActions
{
/** Clears the Ivy cache, as configured by 'config'. */
def cleanCache(ivy: IvySbt) = ivy.withIvy { _.getSettings.getRepositoryCacheManagers.foreach(_.clean()) }
/** Creates a Maven pom from the given Ivy configuration*/
def makePom(module: IvySbt#Module, extraDependencies: Iterable[ModuleID], configurations: Option[Iterable[Configuration]], output: File)
{
module.withModule { (ivy, md, default) =>
addLateDependencies(ivy, md, default, extraDependencies)
val pomModule = keepConfigurations(md, configurations)
PomModuleDescriptorWriter.write(pomModule, DefaultConfigurationMapping, output)
module.logger.info("Wrote " + output.getAbsolutePath)
}
}
// todo: correct default configuration for extra dependencies
private def addLateDependencies(ivy: Ivy, module: DefaultModuleDescriptor, defaultConfiguration: String, extraDependencies: Iterable[ModuleID])
{
val parser = new CustomXmlParser.CustomParser(ivy.getSettings)
parser.setMd(module)
val defaultConf = if(defaultConfiguration.contains("->")) defaultConfiguration else (defaultConfiguration + "->default(compile)")
parser.setDefaultConf(defaultConf)
IvySbt.addDependencies(module, extraDependencies, parser)
}
private def getConfigurations(module: ModuleDescriptor, configurations: Option[Iterable[Configuration]]) =
configurations match
{
case Some(confs) => confs.map(_.name).toList.toArray
case None => module.getPublicConfigurationsNames
}
/** Retain dependencies only with the configurations given, or all public configurations of `module` if `configurations` is None.
* This currently only preserves the information required by makePom*/
private def keepConfigurations(module: ModuleDescriptor, configurations: Option[Iterable[Configuration]]): ModuleDescriptor =
{
val keepConfigurations = getConfigurations(module, configurations)
val keepSet = Set(keepConfigurations.toSeq : _*)
def translate(dependency: DependencyDescriptor) =
{
val keep = dependency.getModuleConfigurations.filter(keepSet.contains)
if(keep.isEmpty)
None
else // TODO: translate the dependency to contain only configurations to keep
Some(dependency)
}
val newModule = new DefaultModuleDescriptor(module.getModuleRevisionId, "", null)
newModule.setHomePage(module.getHomePage)
for(dependency <- module.getDependencies; translated <- translate(dependency))
newModule.addDependency(translated)
newModule
}
def deliver(module: IvySbt#Module, status: String, deliverIvyPattern: String, extraDependencies: Iterable[ModuleID], configurations: Option[Iterable[Configuration]], quiet: Boolean)
{
module.withModule { case (ivy, md, default) =>
addLateDependencies(ivy, md, default, extraDependencies)
resolve(quiet)(ivy, md, default) // todo: set download = false for resolve
val revID = md.getModuleRevisionId
val options = DeliverOptions.newInstance(ivy.getSettings).setStatus(status)
options.setConfs(getConfigurations(md, configurations))
ivy.deliver(revID, revID.getRevision, deliverIvyPattern, options)
}
}
// todo: map configurations, extra dependencies
def publish(module: IvySbt#Module, resolverName: String, srcArtifactPatterns: Iterable[String], deliveredIvyPattern: Option[String], configurations: Option[Iterable[Configuration]])
{
module.withModule { case (ivy, md, default) =>
val revID = md.getModuleRevisionId
val patterns = new java.util.ArrayList[String]
srcArtifactPatterns.foreach(pattern => patterns.add(pattern))
val options = (new PublishOptions).setOverwrite(true)
deliveredIvyPattern.foreach(options.setSrcIvyPattern)
options.setConfs(getConfigurations(md, configurations))
ivy.publish(revID, patterns, resolverName, options)
}
}
/** Resolves and retrieves dependencies. 'ivyConfig' is used to produce an Ivy file and configuration.
* 'updateConfig' configures the actual resolution and retrieval process. */
def update(module: IvySbt#Module, configuration: UpdateConfiguration)
{
module.withModule { case (ivy, md, default) =>
import configuration._
resolve(quiet)(ivy, md, default)
val retrieveOptions = new RetrieveOptions
retrieveOptions.setSync(synchronize)
val patternBase = retrieveDirectory.getAbsolutePath
val pattern =
if(patternBase.endsWith(File.separator))
patternBase + outputPattern
else
patternBase + File.separatorChar + outputPattern
ivy.retrieve(md.getModuleRevisionId, pattern, retrieveOptions)
}
}
private def resolve(quiet: Boolean)(ivy: Ivy, module: DefaultModuleDescriptor, defaultConf: String) =
{
val resolveOptions = new ResolveOptions
if(quiet)
resolveOptions.setLog(LogOptions.LOG_DOWNLOAD_ONLY)
val resolveReport = ivy.resolve(module, resolveOptions)
if(resolveReport.hasError)
error(Set(resolveReport.getAllProblemMessages.toArray: _*).mkString("\n"))
}
}
private object DefaultConfigurationMapping extends PomModuleDescriptorWriter.ConfigurationScopeMapping(new java.util.HashMap)
{
override def getScope(confs: Array[String]) =
{
Configurations.defaultMavenConfigurations.find(conf => confs.contains(conf.name)) match
{
case Some(conf) => conf.name
case None =>
if(confs.isEmpty || confs(0) == Configurations.Default.name)
null
else
confs(0)
}
}
override def isOptional(confs: Array[String]) = confs.isEmpty || (confs.length == 1 && confs(0) == Configurations.Optional.name)
}

View File

@ -0,0 +1,33 @@
/* sbt -- Simple Build Tool
* Copyright 2008, 2009 Mark Harrah
*/
package xsbt
import java.io.File
import scala.xml.NodeSeq
final class IvyPaths(val baseDirectory: File, val cacheDirectory: Option[File]) extends NotNull
final class IvyConfiguration(val paths: IvyPaths, val resolvers: Seq[Resolver], val log: IvyLogger) extends NotNull
final class ModuleConfiguration(val module: ModuleID, val dependencies: Iterable[ModuleID], val ivyXML: NodeSeq,
val configurations: Iterable[Configuration], val defaultConfiguration: Option[Configuration], val ivyScala: Option[IvyScala],
val artifacts: Iterable[Artifact], val validate: Boolean) extends NotNull
{
def isUnconfigured = dependencies.isEmpty && ivyXML.isEmpty && configurations.isEmpty &&
defaultConfiguration.isEmpty && artifacts.isEmpty
}
object ModuleConfiguration
{
def configurations(explicitConfigurations: Iterable[Configuration], defaultConfiguration: Option[Configuration]) =
if(explicitConfigurations.isEmpty)
{
defaultConfiguration match
{
case Some(Configurations.DefaultIvyConfiguration) => Configurations.Default :: Nil
case Some(Configurations.DefaultMavenConfiguration) => Configurations.defaultMavenConfigurations
case _ => Nil
}
}
else
explicitConfigurations
}

360
ivy/IvyInterface.scala Normal file
View File

@ -0,0 +1,360 @@
/* sbt -- Simple Build Tool
* Copyright 2008, 2009 Mark Harrah
*/
package xsbt
import java.io.File
import java.net.{URI, URL}
import scala.xml.NodeSeq
import org.apache.ivy.plugins.resolver.IBiblioResolver
import org.apache.ivy.util.url.CredentialsStore
final case class ModuleID(organization: String, name: String, revision: String, configurations: Option[String], isChanging: Boolean, isTransitive: Boolean, explicitArtifacts: Seq[Artifact]) extends NotNull
{
override def toString = organization + ":" + name + ":" + revision
// () required for chaining
def notTransitive() = intransitive()
def intransitive() = ModuleID(organization, name, revision, configurations, isChanging, false, explicitArtifacts)
def changing() = ModuleID(organization, name, revision, configurations, true, isTransitive, explicitArtifacts)
def from(url: String) = artifacts(Artifact(name, new URL(url)))
def classifier(c: String) = artifacts(Artifact(name, c))
def artifacts(newArtifacts: Artifact*) = ModuleID(organization, name, revision, configurations, isChanging, isTransitive, newArtifacts ++ explicitArtifacts)
}
object ModuleID
{
def apply(organization: String, name: String, revision: String): ModuleID = ModuleID(organization, name, revision, None)
def apply(organization: String, name: String, revision: String, configurations: Option[String]): ModuleID =
ModuleID(organization, name, revision, configurations, false, true)
def apply(organization: String, name: String, revision: String, configurations: Option[String], isChanging: Boolean, isTransitive: Boolean): ModuleID =
ModuleID(organization, name, revision, configurations, isChanging, isTransitive, Nil)
}
sealed trait Resolver extends NotNull
{
def name: String
}
sealed case class MavenRepository(name: String, root: String) extends Resolver
{
override def toString = name + ": " + root
}
object RepositoryHelpers
{
final case class Patterns(ivyPatterns: Seq[String], artifactPatterns: Seq[String], isMavenCompatible: Boolean) extends NotNull
{
private[xsbt] def mavenStyle(): Patterns = Patterns(ivyPatterns, artifactPatterns, true)
private[xsbt] def withIvys(patterns: Seq[String]): Patterns = Patterns(patterns ++ ivyPatterns, artifactPatterns, isMavenCompatible)
private[xsbt] def withArtifacts(patterns: Seq[String]): Patterns = Patterns(ivyPatterns, patterns ++ artifactPatterns, isMavenCompatible)
}
final case class SshConnection(authentication: Option[SshAuthentication], hostname: Option[String], port: Option[Int]) extends NotNull
{
def copy(authentication: Option[SshAuthentication]) = SshConnection(authentication, hostname, port)
}
/** Configuration specific to an Ivy filesystem resolver. */
final case class FileConfiguration(isLocal: Boolean, isTransactional: Option[Boolean]) extends NotNull
{
def transactional() = FileConfiguration(isLocal, Some(true))
def nontransactional() = FileConfiguration(isLocal, Some(false))
def nonlocal() = FileConfiguration(false, isTransactional)
}
sealed trait SshAuthentication extends NotNull
final case class PasswordAuthentication(user: String, password: String) extends SshAuthentication
final case class KeyFileAuthentication(keyfile: File, password: String) extends SshAuthentication
}
import RepositoryHelpers.{Patterns, SshConnection, FileConfiguration}
import RepositoryHelpers.{KeyFileAuthentication, PasswordAuthentication, SshAuthentication}
/** sbt interface to an Ivy repository based on patterns, which is most Ivy repositories.*/
sealed abstract class PatternsBasedRepository extends Resolver
{
type RepositoryType <: PatternsBasedRepository
/** Should be implemented to create a new copy of this repository but with `patterns` as given.*/
protected def copy(patterns: Patterns): RepositoryType
/** The object representing the configured patterns for this repository. */
def patterns: Patterns
/** Enables maven 2 compatibility for this repository. */
def mavenStyle() = copy(patterns.mavenStyle())
/** Adds the given patterns for resolving/publishing Ivy files.*/
def ivys(ivyPatterns: String*): RepositoryType = copy(patterns.withIvys(ivyPatterns))
/** Adds the given patterns for resolving/publishing artifacts.*/
def artifacts(artifactPatterns: String*): RepositoryType = copy(patterns.withArtifacts(artifactPatterns))
}
/** sbt interface for an Ivy filesystem repository. More convenient construction is done using Resolver.file. */
final case class FileRepository(name: String, configuration: FileConfiguration, patterns: Patterns) extends PatternsBasedRepository
{
type RepositoryType = FileRepository
protected def copy(patterns: Patterns): FileRepository = FileRepository(name, configuration, patterns)
private def copy(configuration: FileConfiguration) = FileRepository(name, configuration, patterns)
def transactional() = copy(configuration.transactional())
def nonlocal() = copy(configuration.nonlocal())
}
final case class URLRepository(name: String, patterns: Patterns) extends PatternsBasedRepository
{
type RepositoryType = URLRepository
protected def copy(patterns: Patterns): URLRepository = URLRepository(name, patterns)
}
/** sbt interface for an Ivy ssh-based repository (ssh and sftp). Requires the Jsch library.. */
sealed abstract class SshBasedRepository extends PatternsBasedRepository
{
type RepositoryType <: SshBasedRepository
protected def copy(connection: SshConnection): RepositoryType
private def copy(authentication: SshAuthentication): RepositoryType = copy(connection.copy(Some(authentication)))
/** The object representing the configured ssh connection for this repository. */
def connection: SshConnection
/** Configures this to use the specified user name and password when connecting to the remote repository. */
def as(user: String, password: String): RepositoryType = copy(new PasswordAuthentication(user, password))
/** Configures this to use the specified keyfile and password for the keyfile when connecting to the remote repository. */
def as(keyfile: File, password: String): RepositoryType = copy(new KeyFileAuthentication(keyfile, password))
}
/** sbt interface for an Ivy repository over ssh. More convenient construction is done using Resolver.ssh. */
final case class SshRepository(name: String, connection: SshConnection, patterns: Patterns, publishPermissions: Option[String]) extends SshBasedRepository
{
type RepositoryType = SshRepository
protected def copy(patterns: Patterns): SshRepository = SshRepository(name, connection, patterns, publishPermissions)
protected def copy(connection: SshConnection): SshRepository = SshRepository(name, connection, patterns, publishPermissions)
/** Defines the permissions to set when publishing to this repository. */
def withPermissions(publishPermissions: String): SshRepository = withPermissions(Some(publishPermissions))
def withPermissions(publishPermissions: Option[String]): SshRepository = SshRepository(name, connection, patterns, publishPermissions)
}
/** sbt interface for an Ivy repository over sftp. More convenient construction is done using Resolver.sftp. */
final case class SftpRepository(name: String, connection: SshConnection, patterns: Patterns) extends SshBasedRepository
{
type RepositoryType = SftpRepository
protected def copy(patterns: Patterns): SftpRepository = SftpRepository(name, connection, patterns)
protected def copy(connection: SshConnection): SftpRepository = SftpRepository(name, connection, patterns)
}
import Resolver._
object ScalaToolsReleases extends MavenRepository(ScalaToolsReleasesName, ScalaToolsReleasesRoot)
object ScalaToolsSnapshots extends MavenRepository(ScalaToolsSnapshotsName, ScalaToolsSnapshotsRoot)
object DefaultMavenRepository extends MavenRepository("public", IBiblioResolver.DEFAULT_M2_ROOT)
object JavaNet1Repository extends Resolver
{
def name = "java.net Maven1 Repository"
}
object Resolver
{
val ScalaToolsReleasesName = "Scala-Tools Maven2 Repository"
val ScalaToolsSnapshotsName = "Scala-Tools Maven2 Snapshots Repository"
val ScalaToolsReleasesRoot = "http://scala-tools.org/repo-releases"
val ScalaToolsSnapshotsRoot = "http://scala-tools.org/repo-snapshots"
def withDefaultResolvers(userResolvers: Seq[Resolver]): Seq[Resolver] =
withDefaultResolvers(userResolvers, true)
def withDefaultResolvers(userResolvers: Seq[Resolver], scalaTools: Boolean): Seq[Resolver] =
withDefaultResolvers(userResolvers, true, scalaTools)
def withDefaultResolvers(userResolvers: Seq[Resolver], mavenCentral: Boolean, scalaTools: Boolean): Seq[Resolver] =
Seq(Resolver.defaultLocal) ++
userResolvers ++
single(DefaultMavenRepository, mavenCentral)++
single(ScalaToolsReleases, scalaTools)
private def single[T](value: T, nonEmpty: Boolean): Seq[T] = if(nonEmpty) Seq(value) else Nil
/** A base class for defining factories for interfaces to Ivy repositories that require a hostname , port, and patterns. */
sealed abstract class Define[RepositoryType <: SshBasedRepository] extends NotNull
{
/** Subclasses should implement this method to */
protected def construct(name: String, connection: SshConnection, patterns: Patterns): RepositoryType
/** Constructs this repository type with the given `name`. `basePatterns` are the initial patterns to use. A ManagedProject
* has an implicit defining these initial patterns based on a setting for either Maven or Ivy style patterns.*/
def apply(name: String)(implicit basePatterns: Patterns): RepositoryType =
apply(name, None, None, None)
/** Constructs this repository type with the given `name` and `hostname`. `basePatterns` are the initial patterns to use.
* A ManagedProject has an implicit defining these initial patterns based on a setting for either Maven or Ivy style patterns.*/
def apply(name: String, hostname: String)(implicit basePatterns: Patterns): RepositoryType =
apply(name, Some(hostname), None, None)
/** Constructs this repository type with the given `name`, `hostname`, and the `basePath` against which the initial
* patterns will be resolved. `basePatterns` are the initial patterns to use.
* A ManagedProject has an implicit defining these initial patterns based on a setting for either Maven or Ivy style patterns.*/
def apply(name: String, hostname: String, basePath: String)(implicit basePatterns: Patterns): RepositoryType =
apply(name, Some(hostname), None, Some(basePath))
/** Constructs this repository type with the given `name`, `hostname`, and `port`. `basePatterns` are the initial patterns to use.
* A ManagedProject has an implicit defining these initial patterns based on a setting for either Maven or Ivy style patterns.*/
def apply(name: String, hostname: String, port: Int)(implicit basePatterns: Patterns): RepositoryType =
apply(name, Some(hostname), Some(port), None)
/** Constructs this repository type with the given `name`, `hostname`, `port`, and the `basePath` against which the initial
* patterns will be resolved. `basePatterns` are the initial patterns to use.
* A ManagedProject has an implicit defining these initial patterns based on a setting for either Maven or Ivy style patterns.*/
def apply(name: String, hostname: String, port: Int, basePath: String)(implicit basePatterns: Patterns): RepositoryType =
apply(name, Some(hostname), Some(port), Some(basePath))
/** Constructs this repository type with the given `name`, `hostname`, `port`, and the `basePath` against which the initial
* patterns will be resolved. `basePatterns` are the initial patterns to use. All but the `name` are optional (use None).
* A ManagedProject has an implicit defining these initial patterns based on a setting for either Maven or Ivy style patterns.*/
def apply(name: String, hostname: Option[String], port: Option[Int], basePath: Option[String])(implicit basePatterns: Patterns): RepositoryType =
construct(name, SshConnection(None, hostname, port), resolvePatterns(basePath, basePatterns))
}
/** A factory to construct an interface to an Ivy SSH resolver.*/
object ssh extends Define[SshRepository]
{
protected def construct(name: String, connection: SshConnection, patterns: Patterns) = SshRepository(name, connection, patterns, None)
}
/** A factory to construct an interface to an Ivy SFTP resolver.*/
object sftp extends Define[SftpRepository]
{
protected def construct(name: String, connection: SshConnection, patterns: Patterns) = SftpRepository(name, connection, patterns)
}
/** A factory to construct an interface to an Ivy filesytem resolver. */
object file
{
/** Constructs a file resolver with the given name. The patterns to use must be explicitly specified
* using the `ivys` or `artifacts` methods on the constructed resolver object.*/
def apply(name: String): FileRepository = FileRepository(name, defaultFileConfiguration, ivyStylePatterns)
/** Constructs a file resolver with the given name and base directory. */
def apply(name: String, baseDirectory: File)(implicit basePatterns: Patterns): FileRepository =
baseRepository(baseDirectory.toURI)(FileRepository(name, defaultFileConfiguration, _))
}
object url
{
/** Constructs a URL resolver with the given name. The patterns to use must be explicitly specified
* using the `ivys` or `artifacts` methods on the constructed resolver object.*/
def apply(name: String): URLRepository = URLRepository(name, ivyStylePatterns)
/** Constructs a file resolver with the given name and base directory. */
def apply(name: String, baseURL: URL)(implicit basePatterns: Patterns): URLRepository =
baseRepository(baseURL.toURI)(URLRepository(name, _))
}
private def baseRepository[T](baseURI: java.net.URI)(construct: Patterns => T)(implicit basePatterns: Patterns): T =
construct(resolvePatterns(baseURI.normalize, basePatterns))
/** If `base` is None, `patterns` is returned unchanged.
* Otherwise, the ivy file and artifact patterns in `patterns` are resolved against the given base. */
private def resolvePatterns(base: Option[String], patterns: Patterns): Patterns =
base match
{
case Some(path) => resolvePatterns(pathURI(path), patterns)
case None => patterns
}
/** Resolves the ivy file and artifact patterns in `patterns` against the given base. */
private def resolvePatterns(base: URI, basePatterns: Patterns): Patterns =
{
def resolve(pattern: String) = base.resolve(pathURI(pattern)).getPath
def resolveAll(patterns: Seq[String]) = patterns.map(resolve)
Patterns(resolveAll(basePatterns.ivyPatterns), resolveAll(basePatterns.artifactPatterns), basePatterns.isMavenCompatible)
}
/** Constructs a `URI` with the path component set to `path` and the other components set to null.*/
private def pathURI(path: String) = new URI(null, null, path, null)
def defaultFileConfiguration = FileConfiguration(true, None)
def mavenStylePatterns = Patterns(Nil, mavenStyleBasePattern :: Nil, true)
def ivyStylePatterns = Patterns(Nil, Nil, false)
def defaultPatterns = mavenStylePatterns
def mavenStyleBasePattern = "[organisation]/[module]/[revision]/[artifact]-[revision](-[classifier]).[ext]"
def localBasePattern = "[organisation]/[module]/[revision]/[type]s/[artifact].[ext]"
def userRoot = System.getProperty("user.home")
def userMavenRoot = userRoot + "/.m2/repository/"
def userIvyRoot = userRoot + "/.ivy2/"
def defaultLocal = defaultUserFileRepository("local")
def defaultShared = defaultUserFileRepository("shared")
def defaultUserFileRepository(id: String) = file(id, new File(userIvyRoot, id))(defaultIvyPatterns)
def defaultIvyPatterns =
{
val pList = List(localBasePattern)
Patterns(pList, pList, false)
}
}
object Configurations
{
def config(name: String) = new Configuration(name)
def defaultMavenConfigurations = Compile :: Runtime :: Test :: Provided :: System :: Optional :: Sources :: Javadoc :: Nil
lazy val Default = config("default")
lazy val Compile = config("compile")
lazy val IntegrationTest = config("it") hide
lazy val Provided = config("provided")
lazy val Javadoc = config("javadoc")
lazy val Runtime = config("runtime")
lazy val Test = config("test") hide
lazy val Sources = config("sources")
lazy val System = config("system")
lazy val Optional = config("optional")
lazy val CompilerPlugin = config("plugin") hide
private[xsbt] val DefaultMavenConfiguration = defaultConfiguration(true)
private[xsbt] val DefaultIvyConfiguration = defaultConfiguration(false)
private[xsbt] def DefaultConfiguration(mavenStyle: Boolean) = if(mavenStyle) DefaultMavenConfiguration else DefaultIvyConfiguration
private[xsbt] def defaultConfiguration(mavenStyle: Boolean) =
{
val base = if(mavenStyle) Configurations.Compile else Configurations.Default
config(base.name + "->default(compile)")
}
private[xsbt] def removeDuplicates(configs: Iterable[Configuration]) = Set(scala.collection.mutable.Map(configs.map(config => (config.name, config)).toSeq: _*).values.toList: _*)
}
/** Represents an Ivy configuration. */
final case class Configuration(name: String, description: String, isPublic: Boolean, extendsConfigs: List[Configuration], transitive: Boolean) extends NotNull
{
require(name != null && !name.isEmpty)
require(description != null)
def this(name: String) = this(name, "", true, Nil, true)
def describedAs(newDescription: String) = Configuration(name, newDescription, isPublic, extendsConfigs, transitive)
def extend(configs: Configuration*) = Configuration(name, description, isPublic, configs.toList ::: extendsConfigs, transitive)
def notTransitive = intransitive
def intransitive = Configuration(name, description, isPublic, extendsConfigs, false)
def hide = Configuration(name, description, false, extendsConfigs, transitive)
override def toString = name
}
final case class Artifact(name: String, `type`: String, extension: String, classifier: Option[String], configurations: Iterable[Configuration], url: Option[URL]) extends NotNull
object Artifact
{
def apply(name: String): Artifact = Artifact(name, defaultType, defaultExtension, None, Nil, None)
def apply(name: String, classifier: String): Artifact = Artifact(name, defaultType, defaultExtension, Some(classifier), Nil, None)
def apply(name: String, `type`: String, extension: String): Artifact = Artifact(name, `type`, extension, None, Nil, None)
def apply(name: String, url: URL): Artifact =Artifact(name, extract(url, defaultType), extract(url, defaultExtension), None, Nil, Some(url))
val defaultExtension = "jar"
val defaultType = "jar"
private[this] def extract(url: URL, default: String) =
{
val s = url.toString
val i = s.lastIndexOf('.')
if(i >= 0)
s.substring(i+1)
else
default
}
}
/*
object Credentials
{
/** Add the provided credentials to Ivy's credentials cache.*/
def add(realm: String, host: String, userName: String, passwd: String): Unit =
CredentialsStore.INSTANCE.addCredentials(realm, host, userName, passwd)
/** Load credentials from the given file into Ivy's credentials cache.*/
def apply(file: String, log: Logger): Unit = apply(Path.fromFile(file), log)
/** Load credentials from the given file into Ivy's credentials cache.*/
def apply(file: File, log: Logger): Unit = apply(Path.fromFile(file), log)
/** Load credentials from the given file into Ivy's credentials cache.*/
def apply(path: Path, log: Logger)
{
val msg =
if(path.exists)
{
val properties = new scala.collection.mutable.HashMap[String, String]
def get(keys: List[String]) = keys.flatMap(properties.get).firstOption.toRight(keys.head + " not specified in credentials file: " + path)
impl.MapUtilities.read(properties, path, log) orElse
{
List.separate( List(RealmKeys, HostKeys, UserKeys, PasswordKeys).map(get) ) match
{
case (Nil, List(realm, host, user, pass)) => add(realm, host, user, pass); None
case (errors, _) => Some(errors.mkString("\n"))
}
}
}
else
Some("Credentials file " + path + " does not exist")
msg.foreach(x => log.warn(x))
}
private[this] val RealmKeys = List("realm")
private[this] val HostKeys = List("host", "hostname")
private[this] val UserKeys = List("user", "user.name", "username")
private[this] val PasswordKeys = List("password", "pwd", "pass", "passwd")
}*/

54
ivy/IvyLogger.scala Normal file
View File

@ -0,0 +1,54 @@
/* sbt -- Simple Build Tool
* Copyright 2008, 2009 Mark Harrah
*/
package xsbt
import org.apache.ivy.util.{Message, MessageLogger}
trait IvyLogger
{
def info(msg: => String)
def debug(msg: => String)
def warn(msg: => String)
def error(msg: => String)
def verbose(msg: => String)
}
/** Interface to Ivy logging. */
private final class IvyLoggerInterface(logger: IvyLogger) extends MessageLogger
{
def rawlog(msg: String, level: Int) = log(msg, level)
def log(msg: String, level: Int)
{
import Message.{MSG_DEBUG, MSG_VERBOSE, MSG_INFO, MSG_WARN, MSG_ERR}
level match
{
case MSG_DEBUG => debug(msg)
case MSG_VERBOSE => verbose(msg)
case MSG_INFO => info(msg)
case MSG_WARN => warn(msg)
case MSG_ERR => error(msg)
}
}
def debug(msg: String) = logger.debug(msg)
def verbose(msg: String) = logger.verbose(msg)
def deprecated(msg: String) = warn(msg)
def info(msg: String) = logger.info(msg)
def rawinfo(msg: String) = info(msg)
def warn(msg: String) = logger.warn(msg)
def error(msg: String) = logger.error(msg)
private def emptyList = java.util.Collections.emptyList[T forSome { type T}]
def getProblems = emptyList
def getWarns = emptyList
def getErrors = emptyList
def clearProblems = ()
def sumupProblems = ()
def progress = ()
def endProgress = ()
def endProgress(msg: String) = info(msg)
def isShowProgress = false
def setShowProgress(progress: Boolean) {}
}

74
ivy/IvyScala.scala Normal file
View File

@ -0,0 +1,74 @@
/* sbt -- Simple Build Tool
* Copyright 2008, 2009 Mark Harrah
*/
package xsbt
import java.util.Collections
import scala.collection.mutable.HashSet
import org.apache.ivy.{core, plugins}
import core.module.descriptor.{DefaultExcludeRule, ExcludeRule}
import core.module.descriptor.{DefaultModuleDescriptor, ModuleDescriptor}
import core.module.id.{ArtifactId,ModuleId, ModuleRevisionId}
import plugins.matcher.ExactPatternMatcher
final class IvyScala(val scalaVersion: String, val configurations: Iterable[Configuration], val checkExplicit: Boolean, val filterImplicit: Boolean) extends NotNull
private object IvyScala
{
val ScalaOrganization = "org.scala-lang"
val ScalaLibraryID = "scala-library"
val ScalaCompilerID = "scala-compiler"
/** Performs checks/adds filters on Scala dependencies (if enabled in IvyScala). */
def checkModule(module: DefaultModuleDescriptor, conf: String)(check: IvyScala)
{
if(check.checkExplicit)
checkDependencies(module, check.scalaVersion, check.configurations)
if(check.filterImplicit)
excludeScalaJars(module, check.configurations)
}
/** Checks the immediate dependencies of module for dependencies on scala jars and verifies that the version on the
* dependencies matches scalaVersion. */
private def checkDependencies(module: ModuleDescriptor, scalaVersion: String, configurations: Iterable[Configuration])
{
val configSet = configurationSet(configurations)
for(dep <- module.getDependencies.toList)
{
val id = dep.getDependencyRevisionId
if(id.getOrganisation == ScalaOrganization && id.getRevision != scalaVersion && dep.getModuleConfigurations.exists(configSet.contains))
error("Different Scala version specified in dependency ("+ id.getRevision + ") than in project (" + scalaVersion + ").")
}
}
private def configurationSet(configurations: Iterable[Configuration]) = HashSet(configurations.map(_.toString).toSeq : _*)
/** Adds exclusions for the scala library and compiler jars so that they are not downloaded. This is
* done because normally these jars are already on the classpath and cannot/should not be overridden. The version
* of Scala to use is done by setting scala.version in the project definition. */
private def excludeScalaJars(module: DefaultModuleDescriptor, configurations: Iterable[Configuration])
{
val configurationNames =
{
val names = module.getConfigurationsNames
if(configurations.isEmpty)
names
else
{
val configSet = configurationSet(configurations)
configSet.intersect(HashSet(names : _*))
configSet.toArray
}
}
def excludeScalaJar(name: String): Unit =
module.addExcludeRule(excludeRule(ScalaOrganization, name, configurationNames))
excludeScalaJar(ScalaLibraryID)
excludeScalaJar(ScalaCompilerID)
}
/** Creates an ExcludeRule that excludes artifacts with the given module organization and name for
* the given configurations. */
private def excludeRule(organization: String, name: String, configurationNames: Iterable[String]): ExcludeRule =
{
val artifact = new ArtifactId(ModuleId.newInstance(organization, name), "*", "*", "*")
val rule = new DefaultExcludeRule(artifact, ExactPatternMatcher.INSTANCE, Collections.emptyMap[AnyRef,AnyRef])
configurationNames.foreach(rule.addConfiguration)
rule
}
}

32
notes Normal file
View File

@ -0,0 +1,32 @@
Goals/Guidelines for xsbt
=====
As usual:
- Immutable interfaces
- Typesafe
- Robust, flexible API
Task engine
- method tasks will be normal tasks that pull the command line from a CommandLine task
- possibly have per task logging, including configuration (e.g. 'debug compile')
- unnamed tasks log to parent task
- in parallel, optionally one task always logging
- boot interface contains static final int version = N that main xsbt can use to check if it can be loaded by that version (a lower bound check)
- main xsbt has static final int version = N that boot can use to check if it can load that version (a lower bound check)
- Have Interfaces subproject that depends on no other project and defines interfaces in package xsbti. They are written in Java and cannot refer to Scala classes (compileOrder = JavaThenScala). These interfaces are loaded by the root loader and can be used to pass objects across ClassLoader boundaries.
- launcher/main interface is not static (no system properties!)
- simple, well-defined ClassLoaders
- use Exceptions instead of Option/Either
- every component gets its own subproject
- can use any version of compiler/Scala that is source compatible
- requires CrossLogger that can interface across ClassLoader boundaries with reflection
- Logger passed by implicit parameter
- build using normal cross-build conventions
- compiler: raw interface (no dependency analysis) or with dependency analysis
- compiler: can specify scala-library.jar and scala-compiler.jar + version instead of retrieving the ClassLoader
- minimal dependence on main xsbt logger from subcomponents: use thin interface for subcomponents and implement interface in separate files in main xsbt
Dependency Management
- drop explicit managers
- resolvers are completely defined in project definition (use Resolver.withDefaultResolvers)
- configurations completely defined within project (use ModuleConfiguration.configurations)

7
project/build.properties Normal file
View File

@ -0,0 +1,7 @@
#Project Properties
#Sat Aug 15 11:30:36 EDT 2009
project.name=xsbt
project.organization=org.scala-tools.sbt
sbt.version=0.5.3-p1
scala.version=2.7.5
project.version=0.7

35
project/build/XSbt.scala Normal file
View File

@ -0,0 +1,35 @@
import sbt._
class XSbt(info: ProjectInfo) extends ParentProject(info)
{
def utilPath = path("util")
val controlSub = project(utilPath / "control", "Control", new Base(_))
val collectionSub = project(utilPath / "collection", "Collections", new Base(_))
val ioSub = project(utilPath / "io", "IO", new Base(_),controlSub)
val classpathSub = project(utilPath / "classpath", "Classpath", new Base(_))
val ivySub = project("ivy", "Ivy", new IvyProject(_))
val logSub = project(utilPath / "log", "Logging", new Base(_))
val taskSub = project("tasks", "Tasks", new TaskProject(_), controlSub, collectionSub)
val cacheSub = project("cache", "Cache", new CacheProject(_), taskSub, ioSub)
override def parallelExecution = true
class TaskProject(info: ProjectInfo) extends Base(info)
{
val sc = "org.scala-tools.testing" % "scalacheck" % "1.5" % "test->default"
}
class CacheProject(info: ProjectInfo) extends Base(info)
{
//override def compileOptions = super.compileOptions ++ List(Unchecked,ExplainTypes, CompileOption("-Xlog-implicits"))
}
class Base(info: ProjectInfo) extends DefaultProject(info) with AssemblyProject
{
override def scratch = true
}
class IvyProject(info: ProjectInfo) extends Base(info)
{
val ivy = "org.apache.ivy" % "ivy" % "2.0.0"
}
}

190
tasks/ParallelRunner.scala Normal file
View File

@ -0,0 +1,190 @@
/* sbt -- Simple Build Tool
* Copyright 2009 Mark Harrah
*/
package xsbt
/** This file provides the parallel execution engine of sbt. It is a fairly general module, with pluggable Schedulers and Strategies.
*
* There are three main componenets to the engine: Distributors, Schedulers, and Strategies.
*
* A Scheduler provides work that is ready to execute.
*
* A Strategy is used by a Scheduler to select the work to process from the work that is ready. It is notified as work
* becomes ready. It is requested to select work to process from the work that is ready.
*
* A Distributor uses a Scheduler to obtain work up to the maximum work allowed to run at once. It runs each
* unit of work in its own Thread. It then returns the work and either the computed value or the error that occured.
*
* The Scheduler and Strategy are called from the main thread and therefore do not need to be thread-safe.
**/
import java.util.concurrent.LinkedBlockingQueue
import scala.collection.{immutable, mutable}
import immutable.TreeSet
/** Processes work. */
trait Compute[Work[_],Result[_]] { def apply[A](w: Work[A]): Result[A] }
/** Requests work from `scheduler` and processes it using `compute`. This class limits the amount of work processing at any given time
* to `workers`.*/
final class Distributor[O,Work[_],Result[_]](val scheduler: Scheduler[O,Work,Result], compute: Compute[Work,Result], workers: Int) extends NotNull
{
require(workers > 0)
final def run() = (new Run).run()
private final class Run extends NotNull
{
import java.util.concurrent.LinkedBlockingQueue
private[this] val schedule = scheduler.run
/** The number of threads currently running. */
private[this] var running = 0
/** Pending notifications of completed work. */
private[this] val complete = new LinkedBlockingQueue[Done[_]]
private[Distributor] def run(): O =
{
def runImpl(): O =
{
next()
if(isIdle && !schedule.hasPending) // test if all work is complete
{
assume(schedule.isComplete, "Distributor idle and the scheduler indicated no work pending, but scheduler indicates it is not complete.")
schedule.result
}
else
{
waitForCompletedWork() // wait for some work to complete
runImpl() // continue
}
}
try { runImpl() }
finally { shutdown() }
}
private def shutdown(): Unit = all.foreach(_.work.put(None))
// true if the maximum number of worker threads are currently running
private def atMaximum = running == workers
private def availableWorkers = workers - running
// true if no worker threads are currently running
private def isIdle = running == 0
// process more work
private def next()
{
// if the maximum threads are being used, do nothing
// if all work is complete or the scheduler is waiting for current work to complete, do nothing
if(!atMaximum && schedule.hasPending)
{
val nextWork = schedule.next(availableWorkers)
val nextSize = nextWork.size
assume(nextSize <= availableWorkers, "Scheduler provided more work (" + nextSize + ") than allowed (" + availableWorkers + ")")
assume(nextSize > 0 || !isIdle, "Distributor idle and the scheduler indicated work pending, but provided no work.")
nextWork.foreach(work => process(work))
}
}
// wait on the blocking queue `complete` until some work finishes and notify the scheduler
private def waitForCompletedWork()
{
assume(running > 0)
val done = complete.take()
running -= 1
notifyScheduler(done)
}
private def notifyScheduler[T](done: Done[T]): Unit = schedule.complete(done.work, done.result)
private def process[T](work: Work[T])
{
assume(running + 1 <= workers)
running += 1
available.take().work.put(Some(work))
}
private[this] val all = List.tabulate(workers, i => new Worker)
private[this] val available =
{
val q = new LinkedBlockingQueue[Worker]
all.foreach(q.put)
q
}
private final class Worker extends Thread with NotNull
{
lazy val work =
{
start()
new LinkedBlockingQueue[Option[Work[_]]]
}
override def run()
{
def processData[T](data: Work[T])
{
val result = ErrorHandling.wideConvert(compute(data))
complete.put( new Done(result, data) )
}
def runImpl()
{
work.take() match
{
case Some(data) =>
processData(data)
available.put(this)
runImpl()
case None => ()
}
}
try { runImpl() }
catch { case e: InterruptedException => () }
}
}
}
private final class Done[T](val result: Either[Throwable, Result[T]], val work: Work[T]) extends NotNull
}
/** Schedules work of type Work that produces results of type Result. A Scheduler determines what work is ready to be processed.
* A Scheduler is itself immutable. It creates a mutable object for each scheduler run.*/
trait Scheduler[O,Work[_],Result[_]] extends NotNull
{
/** Starts a new run. The returned object is a new Run, representing a single scheduler run. All state for the run
* is encapsulated in this object.*/
def run: Run
trait Run extends NotNull
{
/** Notifies this scheduler that work has completed with the given result.*/
def complete[A](d: Work[A], result: Either[Throwable,Result[A]]): Unit
/** Returns true if there is any more work to be done, although remaining work can be blocked
* waiting for currently running work to complete.*/
def hasPending: Boolean
/**Returns true if this scheduler has no more work to be done, ever.*/
def isComplete: Boolean
/** Returns up to 'max' units of work. `max` is always positive. The returned sequence cannot be empty if there is
* no work currently being processed.*/
def next(max: Int): Seq[Work[_]]
/** The final result after all work has completed. */
def result: O
}
}
/** A Strategy selects the work to process from work that is ready to be processed.*/
trait ScheduleStrategy[D] extends NotNull
{
/** Starts a new run. The returned object is a new Run, representing a single strategy run. All state for the run
* is handled through this object and is encapsulated in this object.*/
def run: Run
trait Run extends NotNull
{
/** Adds the given work to the list of work that is ready to run.*/
def workReady(dep: D): Unit
/** Returns true if there is work ready to be run. */
def hasReady: Boolean
/** Provides up to `max` units of work. `max` is always positive and this method is not called
* if hasReady is false. The returned list cannot be empty is there is work ready to be run.*/
def next(max: Int): List[D]
}
}
final class SimpleStrategy[D] extends ScheduleStrategy[D]
{
def run = new Run
{
private var ready = List[D]()
def workReady(dep: D) { ready ::= dep }
def hasReady = !ready.isEmpty
def next(max: Int): List[D] =
{
val ret = ready.take(max)
ready = ready.drop(max)
ret
}
}
}

145
tasks/Task.scala Normal file
View File

@ -0,0 +1,145 @@
package xsbt
import Task.{mapTask,bindTask, ITask}
import scala.collection.{mutable,immutable}
sealed abstract class Task[O] extends Identity
{
type Input
def dependencies: TreeHashSet[Task[_]] // IMPORTANT!! immutable.HashSet is NOT suitable. It has issues with multi-threaded access
def map[N](f: O => N): ITask[O,N]
def bind[N](f: O => Task[N]): ITask[O,N]
def dependsOn(addDependencies: Task[_]*): ITask[Input,O]
def named(s: String): ITask[Input,O]
}
private final class M[I,O,R <: Result[O]](name: Option[String])
(val dependencies: TreeHashSet[Task[_]])(val extract: Results => I)(val compute: I => R) extends Task[O]
{
type Input = I
def this(dependencies: Task[_]*)(extract: Results => I)(compute: I => R) =
this(None)(TreeHashSet(dependencies: _*))(extract)(compute)
final def dependsOn(addDependencies: Task[_]*) = new M(name)(dependencies ++ addDependencies)(extract)(compute)
final def map[N](f: O => N) = mapTask(this)(_(this))(f)
final def bind[N](f: O => Task[N]) = bindTask(this)(_(this))(f)
final def named(s: String) =
name match
{
case Some(n) => error("Cannot rename task already named '" + n + "'. (Tried to rename it to '" + s + "')")
case None => new M(Some(s))(dependencies)(extract)(compute)
}
final override def toString = "Task " + name.getOrElse("<anon>")
}
abstract class Identity extends NotNull
{
final override def equals(o: Any) = o match { case a: AnyRef => this eq a; case _ => false }
final override def hashCode = System.identityHashCode(this)
}
private trait Results extends NotNull
{
def apply[O](task: Task[O]): O
def contains(task: Task[_]): Boolean
}
private sealed trait Result[O] extends NotNull
private final case class NewTask[O](t: Task[O]) extends Result[O]
private final case class Value[O](t: O) extends Result[O]
object Task
{
type ITask[I,O] = Task[O] { type Input = I }
import Function.tupled
def apply[O](o: => O): ITask[Unit,O] =
new M[Unit,O,Value[O]]()(r => ())( u => Value(o) )
def bindTask[I,O](dependencies: Task[_]*)(extract: Results => I)(compute: I => Task[O]): ITask[I,O] =
new M[I,O,NewTask[O]](dependencies : _*)(extract)(in => NewTask(compute(in)))
def mapTask[I,O](dependencies: Task[_]*)(extract: Results => I)(compute: I => O): ITask[I,O] =
new M[I,O,Value[O]](dependencies : _*)(extract)(in => Value(compute(in)))
private[xsbt] def extract[I,O](t: ITask[I,O], results: Results): I = t match { case m: M[I,O,_] => m.extract(results) }
private[xsbt] def compute[I,O](t: ITask[I,O], input: I): Result[O] = t match { case m: M[I,O,_] => m.compute(input) }
implicit def iterableToForkBuilder[A](t: Iterable[A]): ForkBuilderIterable[A] = new ForkBuilderIterable(t)
final class ForkBuilderIterable[A] private[Task](a: Iterable[A]) extends NotNull
{
def fork[X](f: A => X): Iterable[ITask[Unit,X]] = a.map(x => Task(f(x)))
def reduce(f: (A,A) => A): Task[A] = fork(x => x) reduce(f)
}
implicit def iterableToBuilder[O](t: Iterable[Task[O]]): BuilderIterable[O] = new BuilderIterable(t)
final class BuilderIterable[O] private[Task](a: Iterable[Task[O]]) extends NotNull
{
//def mapBind[X](f: O => Task[_,X]): Iterable[Task[O,XO]] = a.map(_.bind(f))
def join: Task[Iterable[O]] = join(identity[O])
def join[X](f: O => X): Task[Iterable[X]] = mapTask(a.toSeq: _*)( r => a.map(t => r(t)) )(_.map(f))
//def bindJoin[X](f: O => Task[_,X]): Task[Iterable[X],Iterable[X]] = mapBind(f).join
def reduce(f: (O,O) => O): Task[O] =
{
def reduce2(list: List[Task[O]], accumulate: List[Task[O]]): List[Task[O]] =
list match
{
case Nil => accumulate
case x :: Nil => x :: accumulate
case xa :: xb :: tail => reduce2(tail, ( (xa, xb) map f ) :: accumulate )
}
def reduce1(list: List[Task[O]]): Task[O] =
list match
{
case Nil => error("Empty list")
case x :: Nil => x
case _ => reduce1(reduce2(list, Nil))
}
reduce1(a.toList)
}
}
import metascala.HLists.{HList,HNil,HCons}
sealed trait TList
{
type Head
type Tail <: TList
type HListType <: HList
def tasks: List[Task[_]]
def get(results: Results): HListType
}
final class TNil extends TList
{
type Head = Nothing
type Tail = TNil
type HListType = HNil
def ::[A](t: Task[A]) = TCons[A,HNil,TNil](t, this)
def tasks = Nil
def get(results: Results) = HNil
}
final case class TCons[H, HL <: HList, T <: TList { type HListType = HL}](head: Task[H], tail: T) extends TList
{
type Head = H
type Tail = T
type This = TCons[H,HL,T]
type HListType = HCons[H,HL]
def ::[A](t: Task[A]) = TCons[A,HListType,This](t, this)
def tasks = head :: tail.tasks
def get(results: Results) = HCons(results(head), tail.get(results))
def map[X](f: HListType => X): ITask[HListType,X] = mapTask(tasks: _*)(get)(f)
def bind[X](f: HListType => Task[X]): ITask[HListType,X] = bindTask(tasks: _*)(get)(f)
def join: ITask[HListType,HListType] = map(identity[HListType])
}
val TNil = new TNil
implicit def twoToBuilder[A,B](t: (Task[A], Task[B]) ): Builder2[A,B] =
t match { case (a,b) => new Builder2(a,b) }
final class Builder2[A,B] private[Task](a: Task[A], b: Task[B]) extends NotNull
{
def map[X](f: (A,B) => X): ITask[(A,B),X] = mapTask(a,b)(r => (r(a), r(b)))(tupled(f))
def bind[X](f: (A,B) => Task[X]): ITask[(A,B),X] = bindTask(a,b)( r => (r(a), r(b)) )(tupled(f))
}
implicit def threeToBuilder[A,B,C](t: (Task[A], Task[B], Task[C])): Builder3[A,B,C] = t match { case (a,b,c) => new Builder3(a,b,c) }
final class Builder3[A,B,C] private[Task](a: Task[A], b: Task[B], c: Task[C]) extends NotNull
{
def map[X](f: (A,B,C) => X): ITask[(A,B,C),X] = mapTask(a,b,c)( r => (r(a), r(b), r(c)) )(tupled(f))
def bind[X](f: (A,B,C) => Task[X]): ITask[(A,B,C),X] = bindTask(a,b,c)( r => (r(a), r(b), r(c)) )(tupled(f))
}
}

33
tasks/TaskListener.scala Normal file
View File

@ -0,0 +1,33 @@
package xsbt
trait TaskListener extends NotNull
{
def added(t: Task[_]): Unit
def runnable(t: Task[_]): Unit
def running(t: Task[_]): Unit
def calling(caller: Task[_], t: Task[_]): Unit
def called(caller: Task[_], t: Task[_]): Unit
def completed[T](t: Task[T], value: Option[T]): Unit
def failed[T](t: Task[T], exception: Throwable): Unit
}
class BasicTaskListener extends TaskListener
{
def added(t: Task[_]) {}
def runnable(t: Task[_]) {}
def running(t: Task[_]) {}
def calling(caller: Task[_], t: Task[_]) {}
def called(caller: Task[_], t: Task[_]) {}
def completed[T](t: Task[T], value: Option[T]) {}
def failed[T](t: Task[T], exception: Throwable) {}
}
class DebugTaskListener extends TaskListener
{
def added(t: Task[_]) { debug("Added " + t) }
def runnable(t: Task[_]) { debug("Runnable " + t)}
def running(t: Task[_]) { debug("Running " + t) }
def calling(caller: Task[_], t: Task[_]) { debug(caller + " calling " + t)}
def called(caller: Task[_], t: Task[_]) { debug(caller + " called " + t)}
def completed[T](t: Task[T], value: Option[T]) { debug("Completed " + t + " with " + value)}
def failed[T](t: Task[T], exception: Throwable) { debug("Failed " + t + " with " + exception.toString); exception.printStackTrace }
private def debug(msg: String) { println(msg) }
}

17
tasks/TaskRunner.scala Normal file
View File

@ -0,0 +1,17 @@
package xsbt
object TaskRunner
{
def apply[T](node: Task[T]): Either[ List[WorkFailure[Task[_]]] , T ] = apply(node, Runtime.getRuntime.availableProcessors)
/** Executes work for nodes in a directed acyclic graph with root node `node`.
* The maximum number of tasks to execute simultaneously is `maximumTasks`. */
def apply[T](node: Task[T], maximumTasks: Int): Either[ List[WorkFailure[Task[_]]] , T ] =
{
require(maximumTasks > 0)
val compute = new Compute[Work.Job, Result] { def apply[A](w: Work.Job[A]) = w.apply }
val strategy = new SimpleStrategy[Work[_,_]]
val scheduler = new TaskScheduler(node, strategy, new BasicTaskListener)
val distributor = new Distributor[ Either[ List[WorkFailure[Task[_]]], T ] , Work.Job, Result](scheduler, compute, maximumTasks)
distributor.run()
}
}

243
tasks/TaskScheduler.scala Normal file
View File

@ -0,0 +1,243 @@
package xsbt
import scala.collection.{immutable,mutable}
import Task.ITask
final case class WorkFailure[D](work: D, exception: Throwable) extends NotNull
{
def map[C](f: D => C) = WorkFailure(f(work), exception)
}
private final class TaskScheduler[O](root: Task[O], strategy: ScheduleStrategy[Work[_,_]], newListener: => TaskListener)
extends Scheduler[ Either[ List[WorkFailure[Task[_]]], O ], Work.Job, Result]
{
def run = new Run
{
val listener = newListener
def result =
{
assume(reverseDeps.isEmpty)
assume(forwardDeps.isEmpty)
assume(calls.isEmpty)
assume(!strategyRun.hasReady)
if(failureReports.isEmpty)
Right(completed(root))
else
Left(failureReports.toList)
}
def next(max: Int) =
{
val running = strategyRun.next(max)
running.foreach(r => listener.running(r.source))
running
}
def isComplete = reverseDeps.isEmpty
def hasPending = strategyRun.hasReady || !forwardDeps.isEmpty
def complete[A](work: Work.Job[A], result: Either[Throwable,Result[A]]): Unit =
{
val task = work.source
result match
{
case Left(err) =>
failureReports += WorkFailure(task, err)
listener.failed(task, err)
retire(task, None)
assert(failed.contains(task))
case Right(value) =>
success(task, value)
assert(completed.contains(task) || (calls.isCalling(task) && !reverseDeps.isEmpty) || failed.contains(task))
}
assert(calls.isCalling(task) || !reverseDeps.contains(task))
assert(!forwardDeps.contains(task))
}
private def newDepMap = new mutable.HashMap[Task[_], mutable.Set[Task[_]]]
private val reverseDeps = newDepMap
private val forwardDeps = newDepMap
private val calls = new CalledByMap
private val completed = new ResultMap
private val strategyRun = strategy.run
private val failed = new mutable.HashSet[Task[_]]
private val failureReports = new mutable.ArrayBuffer[WorkFailure[Task[_]]]
{
val initialized = addGraph(root, root) // TODO: replace second root with something better? (it is ignored here anyway)
assert(initialized)
}
private def addReady[O](m: Task[O])
{
def add[I](m: ITask[I,O])
{
val input = Task.extract(m, completed)
strategyRun.workReady(new Work(m, input))
listener.runnable(m)
}
assert(!forwardDeps.contains(m), m)
assert(reverseDeps.contains(m), m)
assert(!completed.contains(m), m)
assert(!calls.isCalling(m), m)
assert(m.dependencies.forall(completed.contains), "Could not find result for dependency of ready task " + m)
add(m: ITask[_,O])
}
// context called node
private def addGraph(node: Task[_], context: Task[_]): Boolean =
{
if(failed(node)) // node already failed
false
else if(calls.isCalling(node)) // node is waiting for a called task to complete, so we need to check for circular dependencies
{
if(calls.isCallerOf(node, context)) // if node called context, this is a circular dependency and is invalid
{
failureReports += WorkFailure(node, CircularDependency(node, context))
false
}
else
true
}
else if(reverseDeps.contains(node) || completed.contains(node)) // node is either already added and is waiting for dependencies to complete or it has completed
true
else // node has never been added
newAdd(node, context)
}
private def newAdd(node: Task[_], context: Task[_]): Boolean =
{
val deps = node.dependencies.filter(dep => !completed.contains(dep))
def finishAdding() =
{
listener.added(node)
true
}
if(deps.isEmpty) // node is ready to be run
{
reverseDeps(node) = new mutable.HashSet[Task[_]]
addReady(node)
finishAdding()
}
else if(deps.forall(dep => addGraph(dep,context))) // node requires dependencies to be added successfully and will then wait for them to complete before running
{
for(dep <- node.dependencies if !(completed.contains(dep) || reverseDeps.contains(dep) || calls.isCalling(dep)))
error("Invalid dependency state: (completed=" + completed.contains(dep) + ", reverse=" + reverseDeps.contains(dep) + ", calling=" + calls.isCalling(dep) + ") for " + dep)
reverseDeps(node) = new mutable.HashSet[Task[_]]
deps.foreach(dep => reverseDeps(dep) += node) // mark this node as depending on its dependencies
forwardDeps(node) = mutable.HashSet(deps.toSeq : _*)
finishAdding()
}
else // a dependency could not be added, so this node will fail as well.
{
failed += node
false
}
}
private def retire[O](m: Task[O], value: Option[O])
{
value match
{
case Some(v) => completed(m) = v // map the task to its value
case None => failed += m // mark the task as failed. complete has already recorded the error message for the original cause
}
updateCurrentGraph(m, value.isDefined) // update forward and reverse dependency maps and propagate the change to depending tasks
listener.completed(m, value)
calls.remove(m) match // unwind the call stack
{
case Some(c) =>
listener.called(c, m)
retire(c, value)
case None => ()
}
}
private def updateCurrentGraph[O](m: Task[O], success: Boolean)
{
if(!success)
{
// clear m from the forward dependency map
// for each dependency d of m, remove m from the set of tasks that depend on d
for(depSet <- forwardDeps.removeKey(m); dep <- depSet; reverseSet <- reverseDeps.get(dep))
reverseSet -= m
}
// m is complete, so remove its entry from reverseDeps and update all tasks that depend on m
for(mReverseDeps <- reverseDeps.removeKey(m); dependsOnM <- mReverseDeps)
{
if(success)
{
val on = forwardDeps(dependsOnM)
on -= m // m has completed, so remove it from the set of tasks that must complete before 'on' can run
if(on.isEmpty) // m was the last dependency of on, so make it runnable
{
forwardDeps.removeKey(dependsOnM)
addReady(dependsOnM)
}
}
else // cancel dependsOnM because dependency (m) failed
retire(dependsOnM, None)
}
}
private def success[O](task: Task[O], value: Result[O]): Unit =
value match
{
case NewTask(t) =>
if(t == task)
{
failureReports += WorkFailure(t, CircularDependency(t, task))
retire(task, None)
}
else if(addGraph(t, task))
{
calls(t) = task
listener.calling(task, t)
}
else
retire(task, None)
case Value(v) => retire(task, Some(v))
}
}
}
final case class CircularDependency(node: Task[_], context: Task[_])
extends RuntimeException("Task " + context + " provided task " + node + " already in calling stack")
private final class CalledByMap extends NotNull
{
private[this] val calling = new mutable.HashSet[Task[_]]
private[this] val callMap = new mutable.HashMap[Task[_], Task[_]]
def update[O](called: Task[O], by: Task[O])
{
calling += by
callMap(called) = by
}
final def isCallerOf(check: Task[_], frame: Task[_]): Boolean =
{
if(check eq frame) true
else
callMap.get(frame) match
{
case Some(nextFrame) => isCallerOf(check, nextFrame)
case None => false
}
}
def isEmpty = calling.isEmpty && callMap.isEmpty
def isCalled(task: Task[_]): Boolean = callMap.contains(task)
def isCalling(caller: Task[_]): Boolean = calling(caller)
def remove[O](called: Task[O]): Option[Task[O]] =
for(caller <- callMap.removeKey(called)) yield
{
calling -= caller
caller.asInstanceOf[Task[O]]
}
}
private final class ResultMap(private val map: mutable.HashMap[Task[_], Any]) extends Results
{
def this() = this(new mutable.HashMap)
def update[O](task: Task[O], value: O) { map(task) = value }
def apply[O](task: Task[O]): O = map(task).asInstanceOf[O]
def contains(task: Task[_]) = map.contains(task)
}
private final class Work[I,O](val source: ITask[I,O], input: I) extends Identity with NotNull
{
final def apply = Task.compute(source, input)
}
private object Work
{
type Job[A] = Work[_,A]
}

View File

@ -0,0 +1,17 @@
//import metascala.HLists._
import xsbt.{HLists,Task}
import HLists._
import Task._
/** This test just verifies that the HList support compiles.*/
object TListCompileTest
{
val n = Task(1)
val s = Task("3")
val t = Task(true)
val mapped = (n :: s :: t :: TNil) map { case n :: s :: t :: HNil => n }
val bound = (n :: s :: t :: TNil) bind { case n :: s :: t :: HNil => (Task(n*4) :: Task("Hi " + t) :: TNil).join }
val plusOne = mapped map { _ + 1 }
val forkN = plusOne bind { count => (0 until count) fork { i => Task(println(i)) } join }
}

View File

@ -0,0 +1,63 @@
import xsbt._
import org.scalacheck._
import Prop._
object TaskRunnerCircularTest extends Properties("TaskRunner Circular")
{
specify("Catches circular references", (intermediate: Int, workers: Int) =>
(workers > 0 && intermediate >= 0) ==> checkCircularReferences(intermediate, workers)
)
/*specify("Check root complete", (intermediate: Int, workers: Int) =>
(workers > 0 && intermediate >= 0) ==> checkRootComplete(intermediate, workers)
)*/
specify("Allows noncircular references", (intermediate: Int, workers: Int) =>
(workers > 0 && intermediate >= 0) ==> allowedReference(intermediate, workers)
)
final def allowedReference(intermediate: Int, workers: Int) =
{
val top = Task(intermediate) named("top")
def iterate(task: Task[Int]): Task[Int] =
task bind { t =>
if(t <= 0)
top
else
iterate(Task(t-1) named (t-1).toString)
}
try { checkResult(TaskRunner(iterate(top), workers), 0) }
catch { case e: CircularDependency => ("Unexpected exception: " + e) |: false }
}
final def checkCircularReferences(intermediate: Int, workers: Int) =
{
lazy val top = iterate(Task(intermediate) named"bottom", intermediate)
def iterate(task: Task[Int], i: Int): Task[Int] =
{
lazy val it: Task[Int] =
task bind { t =>
if(t <= 0)
top
else
iterate(Task(t-1) named (t-1).toString, i-1)
} named("it_" + i)
it
}
TaskRunner(top, workers).fold(_.exists(_.exception.isInstanceOf[CircularDependency]), x => false)
}
final def checkRootComplete(intermediate: Int, workers: Int) =
{
val top = Task(intermediate)
def iterate(task: Task[Int]): Task[Int] =
{
lazy val it: Task[Int] =
task bind { t =>
if(t <= 0)
it
else
iterate(Task(t-1) named (t-1).toString)
} named("it")
it
}
try { TaskRunner(iterate(top), workers); false }
catch { case e: CircularDependency => true }
}
}

View File

@ -0,0 +1,32 @@
import xsbt._
import org.scalacheck._
import Prop._
import Task._
import Math.abs
object TaskRunnerForkTest extends Properties("TaskRunner Fork")
{
specify("fork m tasks and wait for all to complete", (m: Int, workers: Int) =>
(workers > 0 && m >= 0) ==> {
val values = (0 until m).toList
checkResult(TaskRunner(values.fork(f => () ).join.map(_.toList),workers), values)
true
}
)
specify("Double join", (a: Int, b: Int, workers: Int) =>
(workers > 0) ==> { runDoubleJoin(abs(a),abs(b),workers); true }
)
def runDoubleJoin(a: Int, b: Int, workers: Int)
{
def inner(i: Int) = List.range(0, b).map(j => Task(j) named(j.toString)).join.named("Join " + i)
TaskRunner( List.range(0,a).map(inner).join.named("Outermost join"), workers)
}
specify("fork and reduce", (m: List[Int], workers: Int) => {
(workers > 0 && !m.isEmpty) ==> {
val expected = m.reduceLeft(_+_)
checkResult(TaskRunner( m.reduce(_ + _), workers), expected)
}
}
)
}

View File

@ -0,0 +1,52 @@
import xsbt._
import org.scalacheck._
import Prop._
object TaskRunnerSpec extends Properties("TaskRunner")
{
specify("evaluates simple task", (i: Int, workers: Int) =>
(workers > 0) ==> {
("Workers: " + workers) |:
checkResult(TaskRunner(Task(i), workers), i)
}
)
specify("evaluates simple static graph", (i: Int, workers: Int) =>
(workers > 0) ==> {
("Workers: " + workers) |:
{
def result = TaskRunner(Task(i) dependsOn(Task(false),Task("a")), workers)
checkResult(result, i)
}
}
)
specify("evaluates simple mapped task", (i: Int, times: Int, workers: Int) =>
(workers > 0) ==> {
("Workers: " + workers) |: ("Value: " + i) |: ("Times: " + times) |:
{
def result = TaskRunner(Task(i).map(_*times), workers)
checkResult(result, i*times)
}
}
)
specify("evaluates chained mapped task", (i: Int, times: Int, workers: Int) =>
(workers > 0 && times >= 0) ==> {
("Workers: " + workers) |: ("Value: " + i) |: ("Times: " + times) |:
{
val initial = Task(0) map(identity[Int])
def task = ( initial /: (0 until times) )( (t,ignore) => t.map(_ + i))
checkResult(TaskRunner(task, workers), i*times)
}
}
)
specify("evaluates simple bind", (i: Int, times: Int, workers: Int) =>
(workers > 0) ==> {
("Workers: " + workers) |: ("Value: " + i) |: ("Times: " + times) |:
{
def result = TaskRunner(Task(i).bind(x => Task(x*times)), workers)
checkResult(result, i*times)
}
}
)
}

View File

@ -0,0 +1,42 @@
import xsbt._
import org.scalacheck._
import Prop._
object TaskRunnerCallTest extends Properties("TaskRunner Call")
{
specify("calculates fibonacci", (i: Int, workers: Int) =>
(workers > 0 && i > 0) ==> {
val f = fibDirect(i)
("Workers: " + workers) |: ("i: " + i) |: ("fib(i): " + f) |:
{
def result = TaskRunner( fibTask(i), workers)
("Result: " + result) |: (result == Right(f))
}
}
)
final def fibTask(i: Int) =
{
require(i > 0)
lazy val next: (Int,Int,Int) => Task[Int] =
(index, x1, x2) =>
{
if(index == i)
Task(x2)
else
iterate( (index+1, x2, x1+x2) )
}
def iterate(iteration: (Int,Int,Int)) = Task( iteration ) bind Function.tupled(next)
iterate( (1, 0, 1) )
}
final def fibDirect(i: Int): Int =
{
require(i > 0)
def build(index: Int, x1: Int, x2: Int): Int =
if(index == i)
x2
else
build(index+1, x2, x1+x2)
build(1, 0, 1)
}
}

View File

@ -0,0 +1,43 @@
import xsbt._
import org.scalacheck._
import Prop._
object TaskRunnerSortTest extends Properties("TaskRunnerSort")
{
specify("sort", (a: Array[Int], workers: Int) =>
(workers > 0) ==> {
val sorted = a.toArray
java.util.Arrays.sort(sorted)
("Workers: " + workers) |: ("Array: " + a.toList) |:
{
def result = TaskRunner( sort(a.toArray), workers)
checkResult(result.right.map(_.toList), sorted.toList)
}
}
)
final def sortDirect(a: RandomAccessSeq[Int]): RandomAccessSeq[Int] =
{
if(a.length < 2)
a
else
{
val pivot = a(0)
val (lt,gte) = a.projection.drop(1).partition(_ < pivot)
sortDirect(lt) ++ List(pivot) ++ sortDirect(gte)
}
}
final def sort(a: RandomAccessSeq[Int]): Task[RandomAccessSeq[Int]] =
{
if(a.length < 2)
Task(a)
else
{
Task(a) bind { a =>
val pivot = a(0)
val (lt,gte) = a.projection.drop(1).partition(_ < pivot)
(sort(lt), sort(gte)) map { (l,g) => l ++ List(pivot) ++ g }
}
}
}
}

View File

@ -0,0 +1,29 @@
package xsbt
import org.scalacheck.Prop._
object checkResult
{
def apply[T](run: => Either[List[WorkFailure[Task[_]]],T], expected: T) =
{
("Expected: " + expected) |:
(try
{
val actual = run
("Actual: " + actual) |:
(actual match
{
case Right(a) => a == expected
case Left(failures) =>
failures.foreach(f => f.exception.printStackTrace)
false
})
}
catch
{
case e =>
e.printStackTrace
"Error in framework" |: false
})
}
}

View File

@ -0,0 +1,43 @@
/* sbt -- Simple Build Tool
* Copyright 2008, 2009 Mark Harrah
*/
package xsbt
import java.io.File
import java.net.{URI, URL, URLClassLoader}
/** This is a starting point for defining a custom ClassLoader. Override 'doLoadClass' to define
* loading a class that has not yet been loaded.*/
abstract class LoaderBase(urls: Seq[URL], parent: ClassLoader) extends URLClassLoader(urls.toArray, parent) with NotNull
{
require(parent != null) // included because a null parent is legitimate in Java
@throws(classOf[ClassNotFoundException])
override final def loadClass(className: String, resolve: Boolean): Class[_] =
{
val loaded = findLoadedClass(className)
val found =
if(loaded == null)
doLoadClass(className)
else
loaded
if(resolve)
resolveClass(found)
found
}
/** Provides the implementation of finding a class that has not yet been loaded.*/
protected def doLoadClass(className: String): Class[_]
/** Provides access to the default implementation of 'loadClass'.*/
protected final def defaultLoadClass(className: String): Class[_] = super.loadClass(className, false)
}
/** Searches self first before delegating to the parent.*/
class SelfFirstLoader(classpath: Seq[URL], parent: ClassLoader) extends LoaderBase(classpath, parent)
{
@throws(classOf[ClassNotFoundException])
override final def doLoadClass(className: String): Class[_] =
{
try { findClass(className) }
catch { case _: ClassNotFoundException => defaultLoadClass(className) }
}
}

View File

@ -0,0 +1,15 @@
package xsbt
import metascala.HLists.{HCons => metaHCons, HList => metaHList, HNil => metaHNil}
object HLists extends HLists
// add an extractor to metascala.HLists and define aliases to the HList classes in the xsbt namespace
trait HLists extends NotNull
{
object :: { def unapply[H,T<:HList](list: HCons[H,T]) = Some((list.head,list.tail)) }
final val HNil = metaHNil
final type ::[H, T <: HList] = metaHCons[H, T]
final type HNil = metaHNil
final type HList = metaHList
final type HCons[H, T <: HList] = metaHCons[H, T]
}

View File

@ -0,0 +1,22 @@
package xsbt
import scala.collection.{mutable,immutable}
// immutable.HashSet is not suitable for multi-threaded access, so this
// implementation uses an underlying immutable.TreeHashMap, which is suitable
object TreeHashSet
{
def apply[T](contents: T*) = new TreeHashSet(immutable.TreeHashMap( andUnit(contents) : _*))
def andUnit[T](contents: Iterable[T]) = contents.map(c => (c,()) ).toSeq
}
final class TreeHashSet[T](backing: immutable.TreeHashMap[T,Unit]) extends immutable.Set[T]
{
import TreeHashSet.andUnit
override def contains(t: T) = backing.contains(t)
override def ++(s: Iterable[T]) = new TreeHashSet(backing ++ andUnit(s))
override def +(s: T) = ++( Seq(s) )
override def -(s: T) = new TreeHashSet(backing - s)
override def elements = backing.keys
override def empty[A] = TreeHashSet[A]()
override def size = backing.size
}

Binary file not shown.

View File

@ -0,0 +1,18 @@
package xsbt
object ErrorHandling
{
def translate[T](msg: => String)(f: => T) =
try { f }
catch { case e => throw new TranslatedException(msg + e.toString, e) }
def wideConvert[T](f: => T): Either[Throwable, T] =
try { Right(f) }
catch { case e => Left(e) } // TODO: restrict type of e
def convert[T](f: => T): Either[Exception, T] =
try { Right(f) }
catch { case e: Exception => Left(e) }
}
final class TranslatedException private[xsbt](msg: String, cause: Throwable) extends RuntimeException(msg, cause)
{
override def toString = msg
}

76
util/io/Hash.scala Normal file
View File

@ -0,0 +1,76 @@
/* sbt -- Simple Build Tool
* Copyright 2009 Mark Harrah
*/
package xsbt
import java.io.{ByteArrayInputStream, File, InputStream}
object Hash
{
private val BufferSize = 8192
def toHex(bytes: Array[Byte]): String =
{
val buffer = new StringBuilder(bytes.length * 2)
for(i <- 0 until bytes.length)
{
val b = bytes(i)
val bi: Int = if(b < 0) b + 256 else b
buffer append toHex((bi >>> 4).asInstanceOf[Byte])
buffer append toHex((bi & 0x0F).asInstanceOf[Byte])
}
buffer.toString
}
def fromHex(hex: String): Array[Byte] =
{
require((hex.length & 1) == 0, "Hex string must have length 2n.")
val array = new Array[Byte](hex.length >> 1)
for(i <- 0 until hex.length by 2)
{
val c1 = hex.charAt(i)
val c2 = hex.charAt(i+1)
array(i >> 1) = ((fromHex(c1) << 4) | fromHex(c2)).asInstanceOf[Byte]
}
array
}
/** Calculates the SHA-1 hash of the given String.*/
def apply(s: String): Array[Byte] = apply(new ByteArrayInputStream(s.getBytes("UTF-8")))
/** Calculates the SHA-1 hash of the given file.*/
def apply(file: File): Array[Byte] = OpenResource.fileInputStream(file)(apply)
/** Calculates the SHA-1 hash of the given stream, closing it when finished.*/
def apply(stream: InputStream): Array[Byte] =
{
import java.security.{MessageDigest, DigestInputStream}
val digest = MessageDigest.getInstance("SHA")
try
{
val dis = new DigestInputStream(stream, digest)
val buffer = new Array[Byte](BufferSize)
while(dis.read(buffer) >= 0) {}
dis.close()
digest.digest
}
finally { stream.close() }
}
private def toHex(b: Byte): Char =
{
require(b >= 0 && b <= 15, "Byte " + b + " was not between 0 and 15")
if(b < 10)
('0'.asInstanceOf[Int] + b).asInstanceOf[Char]
else
('a'.asInstanceOf[Int] + (b-10)).asInstanceOf[Char]
}
private def fromHex(c: Char): Int =
{
val b =
if(c >= '0' && c <= '9')
(c - '0')
else if(c >= 'a' && c <= 'f')
(c - 'a') + 10
else if(c >= 'A' && c <= 'F')
(c - 'A') + 10
else
throw new RuntimeException("Invalid hex character: '" + c + "'.")
b
}
}

109
util/io/OpenResource.scala Normal file
View File

@ -0,0 +1,109 @@
/* sbt -- Simple Build Tool
* Copyright 2008, 2009 Mark Harrah
*/
package xsbt
import java.io.{Closeable, File, FileInputStream, FileOutputStream, InputStream, OutputStream}
import java.io.{ByteArrayOutputStream, InputStreamReader, OutputStreamWriter}
import java.io.{BufferedReader, BufferedWriter, FileReader, FileWriter, Reader, Writer}
import java.util.zip.{GZIPInputStream, GZIPOutputStream}
import java.net.{URL, URISyntaxException}
import java.nio.charset.{Charset, CharsetDecoder, CharsetEncoder}
import java.nio.channels.FileChannel
import java.util.jar.{Attributes, JarEntry, JarFile, JarInputStream, JarOutputStream, Manifest}
import java.util.zip.{GZIPOutputStream, ZipEntry, ZipFile, ZipInputStream, ZipOutputStream}
import ErrorHandling.translate
import OpenResource._
object FileUtilities
{
def createDirectory(dir: File): Unit =
translate("Could not create directory " + dir + ": ")
{
if(dir.exists)
{
if(!dir.isDirectory)
error("file exists and is not a directory.")
}
else if(!dir.mkdirs())
error("<unknown error>")
}
}
abstract class OpenResource[Source, T] extends NotNull
{
protected def open(src: Source): T
def apply[R](src: Source)(f: T => R): R =
{
val resource = open(src)
try { f(resource) }
finally { close(resource) }
}
protected def close(out: T): Unit
}
import scala.reflect.{Manifest => SManifest}
abstract class WrapOpenResource[Source, T](implicit srcMf: SManifest[Source], targetMf: SManifest[T]) extends OpenResource[Source, T]
{
protected def label[S](m: SManifest[S]) = m.erasure.getSimpleName
protected def openImpl(source: Source): T
protected final def open(source: Source): T =
translate("Error wrapping " + label(srcMf) + " in " + label(targetMf) + ": ") { openImpl(source) }
}
trait OpenFile[T] extends OpenResource[File, T]
{
protected def openImpl(file: File): T
protected final def open(file: File): T =
{
val parent = file.getParentFile
if(parent != null)
FileUtilities.createDirectory(parent)
translate("Error opening " + file + ": ") { openImpl(file) }
}
}
object OpenResource
{
def wrap[Source, T<: Closeable](openF: Source => T)(implicit srcMf: SManifest[Source], targetMf: SManifest[T]): OpenResource[Source,T] =
wrap(openF, _.close)
def wrap[Source, T](openF: Source => T, closeF: T => Unit)(implicit srcMf: SManifest[Source], targetMf: SManifest[T]): OpenResource[Source,T] =
new WrapOpenResource[Source, T]
{
def openImpl(source: Source) = openF(source)
def close(t: T) = closeF(t)
}
def resource[Source, T <: Closeable](openF: Source => T): OpenResource[Source,T] =
resource(openF, _.close)
def resource[Source, T <: Closeable](openF: Source => T, closeF: T => Unit): OpenResource[Source,T] =
new OpenResource[Source,T]
{
def open(s: Source) = openF(s)
def close(s: T) = closeF(s)
}
def file[T <: Closeable](openF: File => T): OpenFile[T] = file(openF, _.close())
def file[T](openF: File => T, closeF: T => Unit): OpenFile[T] =
new OpenFile[T]
{
def openImpl(file: File) = openF(file)
def close(t: T) = closeF(t)
}
def fileOutputStream(append: Boolean) = file(f => new FileOutputStream(f, append))
def fileInputStream = file(f => new FileInputStream(f))
def urlInputStream = resource( (u: URL) => translate("Error opening " + u + ": ")(u.openStream))
def fileOutputChannel = file(f => new FileOutputStream(f).getChannel)
def fileInputChannel = file(f => new FileInputStream(f).getChannel)
def fileWriter(charset: Charset, append: Boolean) =
file(f => new BufferedWriter(new OutputStreamWriter(new FileOutputStream(f, append), charset)) )
def fileReader(charset: Charset) = file(f => new BufferedReader(new InputStreamReader(new FileInputStream(f), charset)) )
def jarFile(verify: Boolean) = file(f => new JarFile(f, verify), (_: JarFile).close())
def zipFile = file(f => new ZipFile(f), (_: ZipFile).close())
def streamReader = wrap{ (_: (InputStream, Charset)) match { case (in, charset) => new InputStreamReader(in, charset) } }
def gzipInputStream = wrap( (in: InputStream) => new GZIPInputStream(in) )
def zipInputStream = wrap( (in: InputStream) => new ZipInputStream(in))
def gzipOutputStream = wrap((out: OutputStream) => new GZIPOutputStream(out), (_: GZIPOutputStream).finish())
def jarOutputStream = wrap( (out: OutputStream) => new JarOutputStream(out))
def jarInputStream = wrap( (in: InputStream) => new JarInputStream(in))
def zipEntry(zip: ZipFile) = resource( (entry: ZipEntry) =>
translate("Error opening " + entry.getName + " in " + zip + ": ") { zip.getInputStream(entry) } )
}

71
util/log/Logger.scala Normal file
View File

@ -0,0 +1,71 @@
/* sbt -- Simple Build Tool
* Copyright 2008, 2009 Mark Harrah
*/
package xsbt
abstract class Logger extends NotNull
{
def getLevel: Level.Value
def setLevel(newLevel: Level.Value)
def enableTrace(flag: Boolean)
def traceEnabled: Boolean
def atLevel(level: Level.Value) = level.id >= getLevel.id
def trace(t: => Throwable): Unit
final def debug(message: => String): Unit = log(Level.Debug, message)
final def info(message: => String): Unit = log(Level.Info, message)
final def warn(message: => String): Unit = log(Level.Warn, message)
final def error(message: => String): Unit = log(Level.Error, message)
def success(message: => String): Unit
def log(level: Level.Value, message: => String): Unit
def control(event: ControlEvent.Value, message: => String): Unit
def logAll(events: Seq[LogEvent]): Unit
/** Defined in terms of other methods in Logger and should not be called from them. */
final def log(event: LogEvent)
{
event match
{
case s: Success => success(s.msg)
case l: Log => log(l.level, l.msg)
case t: Trace => trace(t.exception)
case setL: SetLevel => setLevel(setL.newLevel)
case setT: SetTrace => enableTrace(setT.enabled)
case c: ControlEvent => control(c.event, c.msg)
}
}
}
sealed trait LogEvent extends NotNull
final class Success(val msg: String) extends LogEvent
final class Log(val level: Level.Value, val msg: String) extends LogEvent
final class Trace(val exception: Throwable) extends LogEvent
final class SetLevel(val newLevel: Level.Value) extends LogEvent
final class SetTrace(val enabled: Boolean) extends LogEvent
final class ControlEvent(val event: ControlEvent.Value, val msg: String) extends LogEvent
object ControlEvent extends Enumeration
{
val Start, Header, Finish = Value
}
/** An enumeration defining the levels available for logging. A level includes all of the levels
* with id larger than its own id. For example, Warn (id=3) includes Error (id=4).*/
object Level extends Enumeration with NotNull
{
val Debug = Value(1, "debug")
val Info = Value(2, "info")
val Warn = Value(3, "warn")
val Error = Value(4, "error")
/** Defines the label to use for success messages. A success message is logged at the info level but
* uses this label. Because the label for levels is defined in this module, the success
* label is also defined here. */
val SuccessLabel = "success"
// added because elements was renamed to iterator in 2.8.0 nightly
def levels = Debug :: Info :: Warn :: Error :: Nil
/** Returns the level with the given name wrapped in Some, or None if no level exists for that name. */
def apply(s: String) = levels.find(s == _.toString)
/** Same as apply, defined for use in pattern matching. */
private[xsbt] def unapply(s: String) = apply(s)
}