Merge pull request #1793 from sbt/wip/aether-resolver

Use Aether for resolving maven dependencies
This commit is contained in:
eugene yokota 2015-01-14 15:17:50 -05:00
commit cc3cea27e5
60 changed files with 2504 additions and 121 deletions

View File

@ -16,6 +16,8 @@ env:
- SCRIPTED_TEST="scripted compiler-project/*""
- SCRIPTED_TEST="scripted dependency-management/*1of2"
- SCRIPTED_TEST="scripted dependency-management/*2of2"
- SCRIPTED_TEST="mavenResolverPluginTest:scripted dependency-management/*1of2 project/transitive-plugins"
- SCRIPTED_TEST="mavenResolverPluginTest:scripted dependency-management/*2of2"
- SCRIPTED_TEST="scripted java/*"
- SCRIPTED_TEST="scripted package/*"
- SCRIPTED_TEST="scripted project/*1of2"

View File

@ -412,15 +412,26 @@ lazy val sbtProj = (project in sbtPath).
normalizedName := "sbt"
)
def scriptedTask: Initialize[InputTask[Unit]] = InputTask(scriptedSource(dir => (s: State) => scriptedParser(dir))) { result =>
(proguard in Proguard, fullClasspath in scriptedSbtProj in Test, scalaInstance in scriptedSbtProj, publishAll, scriptedSource, result) map {
(launcher, scriptedSbtClasspath, scriptedSbtInstance, _, sourcePath, args) =>
doScripted(launcher, scriptedSbtClasspath, scriptedSbtInstance, sourcePath, args)
}
lazy val mavenResolverPluginProj = (project in file("sbt-maven-resolver")).
dependsOn(sbtProj, ivyProj % "test->test").
settings(baseSettings: _*).
settings(
name := "sbt-maven-resolver",
libraryDependencies ++= aetherLibs,
sbtPlugin := true
)
def scriptedTask: Initialize[InputTask[Unit]] = Def.inputTask {
val result = scriptedSource(dir => (s: State) => scriptedParser(dir)).parsed
publishAll.value
doScripted((proguard in Proguard).value, (fullClasspath in scriptedSbtProj in Test).value,
(scalaInstance in scriptedSbtProj).value, scriptedSource.value, result, scriptedPrescripted.value)
}
def scriptedUnpublishedTask: Initialize[InputTask[Unit]] = InputTask(scriptedSource(dir => (s: State) => scriptedParser(dir))) { result =>
(proguard in Proguard, fullClasspath in scriptedSbtProj in Test, scalaInstance in scriptedSbtProj, scriptedSource, result) map doScripted
def scriptedUnpublishedTask: Initialize[InputTask[Unit]] = Def.inputTask {
val result = scriptedSource(dir => (s: State) => scriptedParser(dir)).parsed
doScripted((proguard in Proguard).value, (fullClasspath in scriptedSbtProj in Test).value,
(scalaInstance in scriptedSbtProj).value, scriptedSource.value, result, scriptedPrescripted.value)
}
lazy val publishAll = TaskKey[Unit]("publish-all")
@ -435,7 +446,7 @@ def allProjects = Seq(launchInterfaceProj, launchProj, testSamples, interfacePro
compileInterfaceProj, compileIncrementalProj, compilePersistProj, compilerProj,
compilerIntegrationProj, compilerIvyProj,
scriptedBaseProj, scriptedSbtProj, scriptedPluginProj,
actionsProj, commandProj, mainSettingsProj, mainProj, sbtProj)
actionsProj, commandProj, mainSettingsProj, mainProj, sbtProj, mavenResolverPluginProj)
def projectsWithMyProvided = allProjects.map(p => p.copy(configurations = (p.configurations.filter(_ != Provided)) :+ myProvided))
lazy val nonRoots = projectsWithMyProvided.map(p => LocalProject(p.id))
@ -444,13 +455,25 @@ def rootSettings = releaseSettings ++ fullDocSettings ++ LaunchProguard.settings
Util.publishPomSettings ++ otherRootSettings ++ proguardedLauncherSettings ++ Formatting.sbtFilesSettings ++
Transform.conscriptSettings(launchProj)
def otherRootSettings = Seq(
Scripted.scriptedPrescripted := { _ => },
Scripted.scripted <<= scriptedTask,
Scripted.scriptedUnpublished <<= scriptedUnpublishedTask,
Scripted.scriptedSource <<= (sourceDirectory in sbtProj) / "sbt-test",
publishAll := {
(publishLocal).all(ScopeFilter(inAnyProject)).value
}
)
) ++ inConfig(Scripted.MavenResolverPluginTest)(Seq(
Scripted.scripted <<= scriptedTask,
Scripted.scriptedUnpublished <<= scriptedUnpublishedTask,
Scripted.scriptedPrescripted := { f =>
val inj = f / "project" / "maven.sbt"
if (!inj.exists) {
IO.write(inj, """libraryDependencies += Defaults.sbtPluginExtra("org.scala-sbt" % "sbt-maven-resolver" % sbtVersion.value,
|sbtBinaryVersion.value, scalaBinaryVersion.value)""".stripMargin)
// sLog.value.info(s"""Injected project/maven.sbt to $f""")
}
}
))
lazy val docProjects: ScopeFilter = ScopeFilter(
inAnyProject -- inProjects(root, sbtProj, scriptedBaseProj, scriptedSbtProj, scriptedPluginProj),
inConfigurations(Compile)

View File

@ -0,0 +1,25 @@
package sbt.mavenint;
/**
* Extra properties we dump from Aether into the properties list.
*/
public class SbtPomExtraProperties {
public static final String MAVEN_PACKAGING_KEY = "sbt.pom.packaging";
public static final String SCALA_VERSION_KEY = "sbt.pom.scalaversion";
public static final String SBT_VERSION_KEY = "sbt.pom.sbtversion";
public static final String POM_INFO_KEY_PREFIX = "info.";
public static final String POM_SCALA_VERSION = "scalaVersion";
public static final String POM_SBT_VERSION = "sbtVersion";
public static final String POM_API_KEY = "info.apiURL";
public static final String LICENSE_COUNT_KEY = "license.count";
public static String makeLicenseName(int i) {
return "license." + i + ".name";
}
public static String makeLicenseUrl(int i) {
return "license." + i + ".url";
}
}

View File

@ -26,6 +26,11 @@ import org.apache.ivy.core.module.descriptor.DefaultDependencyDescriptor;
*/
object ReplaceMavenConfigurationMappings {
def addMappings(dd: DefaultDependencyDescriptor, scope: String, isOptional: Boolean) = {
val mapping = ReplaceMavenConfigurationMappings.REPLACEMENT_MAVEN_MAPPINGS.get(scope)
mapping.addMappingConfs(dd, isOptional)
}
val REPLACEMENT_MAVEN_MAPPINGS = {
// Here we copy paste from Ivy
val REPLACEMENT_MAPPINGS = new java.util.HashMap[String, PomModuleDescriptorBuilder.ConfMapper]

View File

@ -19,6 +19,8 @@ import org.apache.ivy.util.{ FileUtil, ChecksumHelper }
import org.apache.ivy.core.module.descriptor.{ Artifact => IArtifact }
private[sbt] object ConvertResolver {
import UpdateOptions.ResolverConverter
/**
* This class contains all the reflective lookups used in the
* checksum-friendly URL publishing shim.
@ -94,15 +96,25 @@ private[sbt] object ConvertResolver {
}
}
/** Converts the given sbt resolver into an Ivy resolver..*/
def apply(r: Resolver, settings: IvySettings, log: Logger) =
{
/** Converts the given sbt resolver into an Ivy resolver. */
@deprecated("0.13.8", "Use the variant with updateOptions")
def apply(r: Resolver, settings: IvySettings, log: Logger): DependencyResolver =
apply(r, settings, UpdateOptions(), log)
/** Converts the given sbt resolver into an Ivy resolver. */
def apply(r: Resolver, settings: IvySettings, updateOptions: UpdateOptions, log: Logger): DependencyResolver =
(updateOptions.resolverConverter orElse defaultConvert)((r, settings, log))
/** The default implementation of converter. */
lazy val defaultConvert: ResolverConverter = {
case (r, settings, log) =>
r match {
case repo: MavenRepository =>
{
val pattern = Collections.singletonList(Resolver.resolvePattern(repo.root, Resolver.mavenStyleBasePattern))
final class PluginCapableResolver extends IBiblioResolver with ChecksumFriendlyURLResolver with DescriptorRequired {
def setPatterns() { // done this way for access to protected methods.
def setPatterns() {
// done this way for access to protected methods.
setArtifactPatterns(pattern)
setIvyPatterns(pattern)
}
@ -163,7 +175,7 @@ private[sbt] object ConvertResolver {
case repo: ChainedResolver => IvySbt.resolverChain(repo.name, repo.resolvers, false, settings, log)
case repo: RawRepository => repo.resolver
}
}
}
private sealed trait DescriptorRequired extends BasicResolver {
override def getDependency(dd: DependencyDescriptor, data: ResolveData) =

View File

@ -12,7 +12,9 @@ import org.apache.ivy.util.extendable.ExtendableItem
import java.io.{ File, InputStream }
import java.net.URL
import java.util.regex.Pattern
import sbt.mavenint.{ PomExtraDependencyAttributes, SbtPomExtraProperties }
@deprecated("0.13.8", "We now use an Aether-based pom parser.")
final class CustomPomParser(delegate: ModuleDescriptorParser, transform: (ModuleDescriptorParser, ModuleDescriptor) => ModuleDescriptor) extends ModuleDescriptorParser {
override def parseDescriptor(ivySettings: ParserSettings, descriptorURL: URL, validate: Boolean) =
transform(this, delegate.parseDescriptor(ivySettings, descriptorURL, validate))
@ -26,21 +28,23 @@ final class CustomPomParser(delegate: ModuleDescriptorParser, transform: (Module
override def getType() = delegate.getType()
override def getMetadataArtifact(mrid: ModuleRevisionId, res: Resource) = delegate.getMetadataArtifact(mrid, res)
}
@deprecated("0.13.8", "We now use an Aether-based pom parser.")
object CustomPomParser {
// Evil hackery to override the default maven pom mappings.
ReplaceMavenConfigurationMappings.init()
/** The key prefix that indicates that this is used only to store extra information and is not intended for dependency resolution.*/
val InfoKeyPrefix = "info."
val ApiURLKey = "info.apiURL"
val InfoKeyPrefix = SbtPomExtraProperties.POM_INFO_KEY_PREFIX
val ApiURLKey = SbtPomExtraProperties.POM_API_KEY
val SbtVersionKey = "sbtVersion"
val ScalaVersionKey = "scalaVersion"
val ExtraAttributesKey = "extraDependencyAttributes"
val SbtVersionKey = PomExtraDependencyAttributes.SbtVersionKey
val ScalaVersionKey = PomExtraDependencyAttributes.ScalaVersionKey
val ExtraAttributesKey = PomExtraDependencyAttributes.ExtraAttributesKey
private[this] val unqualifiedKeys = Set(SbtVersionKey, ScalaVersionKey, ExtraAttributesKey, ApiURLKey)
// packagings that should be jars, but that Ivy doesn't handle as jars
// TODO - move this elsewhere.
val JarPackagings = Set("eclipse-plugin", "hk2-jar", "orbit", "scala-jar")
val default = new CustomPomParser(PomModuleDescriptorParser.getInstance, defaultTransform)
@ -123,46 +127,24 @@ object CustomPomParser {
}
private[this] def getDependencyExtra(m: Map[String, String]): Map[ModuleRevisionId, Map[String, String]] =
(m get ExtraAttributesKey) match {
case None => Map.empty
case Some(str) =>
def processDep(m: ModuleRevisionId) = (simplify(m), filterCustomExtra(m, include = true))
readDependencyExtra(str).map(processDep).toMap
}
PomExtraDependencyAttributes.getDependencyExtra(m)
def qualifiedExtra(item: ExtendableItem): Map[String, String] =
{
import collection.JavaConverters._
item.getQualifiedExtraAttributes.asInstanceOf[java.util.Map[String, String]].asScala.toMap
}
def qualifiedExtra(item: ExtendableItem): Map[String, String] = PomExtraDependencyAttributes.qualifiedExtra(item)
def filterCustomExtra(item: ExtendableItem, include: Boolean): Map[String, String] =
(qualifiedExtra(item) filterKeys { k => qualifiedIsExtra(k) == include })
def writeDependencyExtra(s: Seq[DependencyDescriptor]): Seq[String] =
s.flatMap { dd =>
val revId = dd.getDependencyRevisionId
if (filterCustomExtra(revId, include = true).isEmpty)
Nil
else
revId.encodeToString :: Nil
}
PomExtraDependencyAttributes.writeDependencyExtra(s)
// parses the sequence of dependencies with extra attribute information, with one dependency per line
def readDependencyExtra(s: String): Seq[ModuleRevisionId] =
LinesP.split(s).map(_.trim).filter(!_.isEmpty).map(ModuleRevisionId.decode)
def readDependencyExtra(s: String): Seq[ModuleRevisionId] = PomExtraDependencyAttributes.readDependencyExtra(s)
private[this] val LinesP = Pattern.compile("(?m)^")
def qualifiedIsExtra(k: String): Boolean = k.endsWith(ScalaVersionKey) || k.endsWith(SbtVersionKey)
def qualifiedIsExtra(k: String): Boolean = PomExtraDependencyAttributes.qualifiedIsExtra(k)
// Reduces the id to exclude custom extra attributes
// This makes the id suitable as a key to associate a dependency parsed from a <dependency> element
// with the extra attributes from the <properties> section
def simplify(id: ModuleRevisionId): ModuleRevisionId =
{
import collection.JavaConverters._
ModuleRevisionId.newInstance(id.getOrganisation, id.getName, id.getBranch, id.getRevision, filterCustomExtra(id, include = false).asJava)
}
def simplify(id: ModuleRevisionId): ModuleRevisionId = PomExtraDependencyAttributes.simplify(id)
private[this] def addExtra(dep: DependencyDescriptor, extra: Map[ModuleRevisionId, Map[String, String]]): DependencyDescriptor =
{

View File

@ -14,19 +14,20 @@ import java.util.{ Collection, Collections => CS, Date }
import CS.singleton
import org.apache.ivy.Ivy
import org.apache.ivy.core.report.ResolveReport
import org.apache.ivy.core.{ IvyPatternHelper, LogOptions, IvyContext }
import org.apache.ivy.core.cache.{ CacheMetadataOptions, DefaultRepositoryCacheManager, ModuleDescriptorWriter }
import org.apache.ivy.core.cache.{ ResolutionCacheManager, CacheMetadataOptions, DefaultRepositoryCacheManager, ModuleDescriptorWriter }
import org.apache.ivy.core.event.EventManager
import org.apache.ivy.core.module.descriptor.{ Artifact => IArtifact, DefaultArtifact, DefaultDependencyArtifactDescriptor, MDArtifact }
import org.apache.ivy.core.module.descriptor.{ DefaultDependencyDescriptor, DefaultModuleDescriptor, DependencyDescriptor, ModuleDescriptor, License }
import org.apache.ivy.core.module.descriptor.{ OverrideDependencyDescriptorMediator }
import org.apache.ivy.core.module.id.{ ArtifactId, ModuleId, ModuleRevisionId }
import org.apache.ivy.core.resolve.{ IvyNode, ResolveData, ResolvedModuleRevision, ResolveEngine }
import org.apache.ivy.core.resolve._
import org.apache.ivy.core.settings.IvySettings
import org.apache.ivy.core.sort.SortEngine
import org.apache.ivy.plugins.latest.{ LatestStrategy, LatestRevisionStrategy, ArtifactInfo }
import org.apache.ivy.plugins.matcher.PatternMatcher
import org.apache.ivy.plugins.parser.m2.PomModuleDescriptorParser
import org.apache.ivy.plugins.parser.m2.{ PomModuleDescriptorParser }
import org.apache.ivy.plugins.resolver.{ ChainResolver, DependencyResolver, BasicResolver }
import org.apache.ivy.plugins.resolver.util.{ HasLatestStrategy, ResolvedResource }
import org.apache.ivy.plugins.version.ExactVersionMatcher
@ -68,9 +69,11 @@ final class IvySbt(val configuration: IvyConfiguration) {
private lazy val settings: IvySettings =
{
val is = new IvySettings
is.setBaseDir(baseDirectory)
is.setCircularDependencyStrategy(configuration.updateOptions.circularDependencyLevel.ivyStrategy)
CustomPomParser.registerDefault
configuration match {
case e: ExternalIvyConfiguration =>
IvySbt.addResolvers(e.extraResolvers, is, configuration.log)
@ -104,6 +107,7 @@ final class IvySbt(val configuration: IvyConfiguration) {
super.bind()
}
}
i.setSettings(settings)
i.bind()
i.getLoggerEngine.pushLogger(new IvyLoggerInterface(configuration.log))
@ -284,7 +288,7 @@ private[sbt] object IvySbt {
def resolverChain(name: String, resolvers: Seq[Resolver], localOnly: Boolean, settings: IvySettings, log: Logger): DependencyResolver =
resolverChain(name, resolvers, localOnly, settings, UpdateOptions(), log)
def resolverChain(name: String, resolvers: Seq[Resolver], localOnly: Boolean, settings: IvySettings, updateOptions: UpdateOptions, log: Logger): DependencyResolver = {
def mapResolvers(rs: Seq[Resolver]) = rs.map(r => ConvertResolver(r, settings, log))
def mapResolvers(rs: Seq[Resolver]) = rs.map(r => ConvertResolver(r, settings, updateOptions, log))
val (projectResolvers, rest) = resolvers.partition(_.name == "inter-project")
if (projectResolvers.isEmpty) new ivyint.SbtChainResolver(name, mapResolvers(rest), settings, updateOptions, log)
else {

View File

@ -8,6 +8,9 @@
package sbt
import java.io.File
import sbt.mavenint.PomExtraDependencyAttributes
// Node needs to be renamed to XNode because the task subproject contains a Node type that will shadow
// scala.xml.Node when generating aggregated API documentation
import scala.xml.{ Elem, Node => XNode, NodeSeq, PrettyPrinter, PrefixedAttribute }
@ -17,6 +20,7 @@ import org.apache.ivy.Ivy
import org.apache.ivy.core.settings.IvySettings
import org.apache.ivy.core.module.descriptor.{ DependencyArtifactDescriptor, DependencyDescriptor, License, ModuleDescriptor, ExcludeRule }
import org.apache.ivy.plugins.resolver.{ ChainResolver, DependencyResolver, IBiblioResolver }
import ivyint.CustomRemoteMavenResolver
class MakePom(val log: Logger) {
@deprecated("Use `write(Ivy, ModuleDescriptor, ModuleInfo, Option[Iterable[Configuration]], Set[String], NodeSeq, XNode => XNode, MavenRepository => Boolean, Boolean, File)` instead", "0.11.2")
@ -119,12 +123,12 @@ class MakePom(val log: Logger) {
def makeProperties(module: ModuleDescriptor, dependencies: Seq[DependencyDescriptor]): NodeSeq =
{
val extra = IvySbt.getExtraAttributes(module)
val depExtra = CustomPomParser.writeDependencyExtra(dependencies).mkString("\n")
val allExtra = if (depExtra.isEmpty) extra else extra.updated(CustomPomParser.ExtraAttributesKey, depExtra)
val depExtra = PomExtraDependencyAttributes.writeDependencyExtra(dependencies).mkString("\n")
val allExtra = if (depExtra.isEmpty) extra else extra.updated(PomExtraDependencyAttributes.ExtraAttributesKey, depExtra)
if (allExtra.isEmpty) NodeSeq.Empty else makeProperties(allExtra)
}
def makeProperties(extra: Map[String, String]): NodeSeq = {
def _extraAttributes(k: String) = if (k == CustomPomParser.ExtraAttributesKey) xmlSpacePreserve else scala.xml.Null
def _extraAttributes(k: String) = if (k == PomExtraDependencyAttributes.ExtraAttributesKey) xmlSpacePreserve else scala.xml.Null
<properties> {
for ((key, value) <- extra) yield (<x>{ value }</x>).copy(label = key, attributes = _extraAttributes(key))
} </properties>
@ -330,6 +334,8 @@ class MakePom(val log: Logger) {
val repositories = if (includeAll) allResolvers(settings) else resolvers(settings.getDefaultResolver)
val mavenRepositories =
repositories.flatMap {
case m: CustomRemoteMavenResolver if m.repo.root != DefaultMavenRepository.root =>
MavenRepository(m.repo.name, m.repo.root) :: Nil
case m: IBiblioResolver if m.isM2compatible && m.getRoot != DefaultMavenRepository.root =>
MavenRepository(m.getName, m.getRoot) :: Nil
case _ => Nil

View File

@ -5,6 +5,8 @@ package sbt
import java.net.URL
import sbt.mavenint.SbtPomExtraProperties
final case class ModuleID(organization: String, name: String, revision: String, configurations: Option[String] = None, isChanging: Boolean = false, isTransitive: Boolean = true, isForce: Boolean = false, explicitArtifacts: Seq[Artifact] = Nil, exclusions: Seq[ExclusionRule] = Nil, extraAttributes: Map[String, String] = Map.empty, crossVersion: CrossVersion = CrossVersion.Disabled) {
override def toString: String =
organization + ":" + name + ":" + revision +
@ -15,7 +17,7 @@ final case class ModuleID(organization: String, name: String, revision: String,
def extraString: String = extraDependencyAttributes.map { case (k, v) => k + "=" + v } mkString ("(", ", ", ")")
/** Returns the extra attributes except for ones marked as information only (ones that typically would not be used for dependency resolution). */
def extraDependencyAttributes: Map[String, String] = extraAttributes.filterKeys(!_.startsWith(CustomPomParser.InfoKeyPrefix))
def extraDependencyAttributes: Map[String, String] = extraAttributes.filterKeys(!_.startsWith(SbtPomExtraProperties.POM_INFO_KEY_PREFIX))
@deprecated("Use `cross(CrossVersion)`, the variant accepting a CrossVersion value constructed by a member of the CrossVersion object instead.", "0.12.0")
def cross(v: Boolean): ModuleID = cross(if (v) CrossVersion.binary else CrossVersion.Disabled)

View File

@ -30,8 +30,23 @@ final class RawRepository(val resolver: DependencyResolver) extends Resolver {
}
}
sealed case class ChainedResolver(name: String, resolvers: Seq[Resolver]) extends Resolver
/** An instance of a remote maven repository. Note: This will use Aether/Maven to resolve artifacts. */
sealed case class MavenRepository(name: String, root: String) extends Resolver {
override def toString = name + ": " + root
def isCache: Boolean = false
}
/**
* An instance of maven CACHE directory. You cannot treat a cache directory the same as a a remote repository because
* the metadata is different (see Aether ML discussion).
*/
final class MavenCache(name: String, val rootFile: File) extends MavenRepository(name, rootFile.toURI.toURL.toString) {
override val toString = "cache:" + name + ": " + rootFile.getAbsolutePath
override def isCache: Boolean = true
}
object MavenCache {
def apply(name: String, rootFile: File): MavenCache = new MavenCache(name, rootFile)
}
final class Patterns(val ivyPatterns: Seq[String], val artifactPatterns: Seq[String], val isMavenCompatible: Boolean, val descriptorOptional: Boolean, val skipConsistencyCheck: Boolean) {
@ -334,8 +349,9 @@ object Resolver {
loadHomeFromSettings(() => new File(new File(System.getenv("M2_HOME")), "conf/settings.xml")) getOrElse
new File(Path.userHome, ".m2/repository")
}
def publishMavenLocal = Resolver.file("publish-m2-local", mavenLocalDir)
def mavenLocal = MavenRepository("Maven2 Local", mavenLocalDir.toURI.toString)
// TODO - should this just be the *exact* same as mavenLocal? probably...
def publishMavenLocal: MavenCache = new MavenCache("publish-m2-local", mavenLocalDir)
def mavenLocal: MavenRepository = new MavenCache("Maven2 Local", mavenLocalDir)
def defaultLocal = defaultUserFileRepository("local")
def defaultShared = defaultUserFileRepository("shared")
def defaultUserFileRepository(id: String) =

View File

@ -1,6 +1,8 @@
package sbt
import java.io.File
import org.apache.ivy.plugins.resolver.DependencyResolver
import org.apache.ivy.core.settings.IvySettings
/**
* Represents configurable options for update task.
@ -17,8 +19,9 @@ final class UpdateOptions private[sbt] (
/** If set to true, use consolidated resolution. */
val consolidatedResolution: Boolean,
/** If set to true, use cached resolution. */
val cachedResolution: Boolean) {
val cachedResolution: Boolean,
/** Extention point for an alternative resolver converter. */
val resolverConverter: UpdateOptions.ResolverConverter) {
def withCircularDependencyLevel(circularDependencyLevel: CircularDependencyLevel): UpdateOptions =
copy(circularDependencyLevel = circularDependencyLevel)
def withLatestSnapshots(latestSnapshots: Boolean): UpdateOptions =
@ -30,22 +33,28 @@ final class UpdateOptions private[sbt] (
def withCachedResolution(cachedResoluton: Boolean): UpdateOptions =
copy(cachedResolution = cachedResoluton,
consolidatedResolution = cachedResolution)
/** Extention point for an alternative resolver converter. */
def withResolverConverter(resolverConverter: UpdateOptions.ResolverConverter): UpdateOptions =
copy(resolverConverter = resolverConverter)
private[sbt] def copy(
circularDependencyLevel: CircularDependencyLevel = this.circularDependencyLevel,
latestSnapshots: Boolean = this.latestSnapshots,
consolidatedResolution: Boolean = this.consolidatedResolution,
cachedResolution: Boolean = this.cachedResolution): UpdateOptions =
cachedResolution: Boolean = this.cachedResolution,
resolverConverter: UpdateOptions.ResolverConverter = this.resolverConverter): UpdateOptions =
new UpdateOptions(circularDependencyLevel,
latestSnapshots,
consolidatedResolution,
cachedResolution)
cachedResolution,
resolverConverter)
override def equals(o: Any): Boolean = o match {
case o: UpdateOptions =>
this.circularDependencyLevel == o.circularDependencyLevel &&
this.latestSnapshots == o.latestSnapshots &&
this.cachedResolution == o.cachedResolution
this.cachedResolution == o.cachedResolution &&
this.resolverConverter == o.resolverConverter
case _ => false
}
@ -55,15 +64,19 @@ final class UpdateOptions private[sbt] (
hash = hash * 31 + this.circularDependencyLevel.##
hash = hash * 31 + this.latestSnapshots.##
hash = hash * 31 + this.cachedResolution.##
hash = hash * 31 + this.resolverConverter.##
hash
}
}
object UpdateOptions {
type ResolverConverter = PartialFunction[(Resolver, IvySettings, Logger), DependencyResolver]
def apply(): UpdateOptions =
new UpdateOptions(
circularDependencyLevel = CircularDependencyLevel.Warn,
latestSnapshots = false,
consolidatedResolution = false,
cachedResolution = false)
cachedResolution = false,
resolverConverter = PartialFunction.empty)
}

View File

@ -0,0 +1,11 @@
package sbt
package ivyint
import org.apache.ivy.plugins.resolver.DependencyResolver
// These are placeholder traits for sbt-aether-resolver
trait CustomMavenResolver extends DependencyResolver {
}
trait CustomRemoteMavenResolver extends CustomMavenResolver {
def repo: MavenRepository
}

View File

@ -12,7 +12,7 @@ import org.apache.ivy.core.resolve.{ ResolvedModuleRevision, ResolveData }
import org.apache.ivy.plugins.latest.LatestStrategy
import org.apache.ivy.plugins.repository.file.{ FileRepository => IFileRepository, FileResource }
import org.apache.ivy.plugins.repository.url.URLResource
import org.apache.ivy.plugins.resolver.{ ChainResolver, BasicResolver, DependencyResolver }
import org.apache.ivy.plugins.resolver._
import org.apache.ivy.plugins.resolver.util.{ HasLatestStrategy, ResolvedResource }
import org.apache.ivy.util.{ Message, MessageLogger, StringUtils => IvyStringUtils }
@ -155,34 +155,39 @@ private[sbt] case class SbtChainResolver(
val sorted =
if (useLatest) (foundRevisions.sortBy {
case (rmr, resolver) =>
Message.warn(s"Sorrting results from $rmr, using ${rmr.getPublicationDate} and ${rmr.getDescriptor.getPublicationDate}")
// Just issue warning about issues with publication date, and fake one on it for now.
rmr.getDescriptor.getPublicationDate match {
case null =>
Option(rmr.getPublicationDate) orElse Option(rmr.getDescriptor.getPublicationDate) match {
case None =>
(resolver.findIvyFileRef(dd, data), rmr.getDescriptor) match {
case (null, _) =>
// In this instance, the dependency is specified by a direct URL or some other sort of "non-ivy" file
if (dd.isChanging)
Message.warn(s"Resolving a changing dependency (${rmr.getId}) with no ivy/pom file!, resolution order is undefined!")
0L
case (ivf, dmd: DefaultModuleDescriptor) =>
val lmd = new java.util.Date(ivf.getLastModified)
Message.debug(s"Getting no publication date from resolver: ${resolver} for ${rmr.getId}, setting to: ${lmd}")
dmd.setPublicationDate(lmd)
ivf.getLastModified
case _ =>
Message.warn(s"Getting null publication date from resolver: ${resolver} for ${rmr.getId}, resolution order is undefined!")
0L
}
case _ => // All other cases ok
}
rmr.getDescriptor.getPublicationDate match {
case null => 0L
case d => d.getTime
case Some(date) => // All other cases ok
date.getTime
}
}).reverse.headOption map {
case (rmr, resolver) =>
Message.warn(s"Choosing ${resolver} for ${rmr.getId}")
// Now that we know the real latest revision, let's force Ivy to use it
val artifactOpt = findFirstArtifactRef(rmr.getDescriptor, dd, data, resolver)
artifactOpt match {
case None if resolver.getName == "inter-project" => // do nothing
case None => throw new RuntimeException(s"\t${resolver.getName}: no ivy file nor artifact found for $rmr")
case None if resolver.isInstanceOf[CustomMavenResolver] =>
// do nothing for now....
// We want to see if the maven caching is sufficient and we do not need to duplicate within the ivy cache...
case None => throw new RuntimeException(s"\t${resolver.getName}: no ivy file nor artifact found for $rmr")
case Some(artifactRef) =>
val systemMd = toSystem(rmr.getDescriptor)
getRepositoryCacheManager.cacheModuleDescriptor(resolver, artifactRef,
@ -210,6 +215,9 @@ private[sbt] case class SbtChainResolver(
}
// Ivy seem to not want to use the module descriptor found at the latest resolver
private[this] def reparseModuleDescriptor(dd: DependencyDescriptor, data: ResolveData, resolver: DependencyResolver, rmr: ResolvedModuleRevision): ResolvedModuleRevision =
// TODO - Redownloading/parsing the ivy file is not really the best way to make this correct.
// We should figure out a better alternative, or directly attack the resolvers Ivy uses to
// give them correct behavior around -SNAPSHOT.
Option(resolver.findIvyFileRef(dd, data)) flatMap { ivyFile =>
ivyFile.getResource match {
case r: FileResource =>
@ -222,7 +230,10 @@ private[sbt] case class SbtChainResolver(
}
case _ => None
}
} getOrElse rmr
} getOrElse {
Message.warn(s"Unable to reparse ${dd.getDependencyRevisionId} from $resolver, using ${rmr.getPublicationDate}")
rmr
}
/** Ported from BasicResolver#findFirstAirfactRef. */
private[this] def findFirstArtifactRef(md: ModuleDescriptor, dd: DependencyDescriptor, data: ResolveData, resolver: DependencyResolver): Option[ResolvedResource] =
{

View File

@ -0,0 +1,111 @@
package sbt.mavenint
import java.util.Properties
import java.util.regex.Pattern
import org.apache.ivy.core.module.descriptor.DependencyDescriptor
import org.apache.ivy.core.module.id.ModuleRevisionId
import org.apache.ivy.util.extendable.ExtendableItem
/**
* This class contains all the logic for dealing with the extra attributes in pom files relating to extra attributes
* on dependency declarations.
*
* Specifically, if we have a dependency on an sbt plugin, there are two properties that need to propogate:
* - `sbtVersion`
* - `scalaVersion`
*
* These need to exist on the *dependency declaration*. Maven/Aether has no way to inject these into
* the <dependency> section of pom files, so we use Ivy's Extra attribute hackery to inject a lookup table
* of extra attributes by dependency id into POM files and later we read these back.
*/
object PomExtraDependencyAttributes {
val ExtraAttributesKey = "extraDependencyAttributes"
val SbtVersionKey = "sbtVersion"
val ScalaVersionKey = "scalaVersion"
/**
* Reads the extra dependency attributes out of a maven property.
* @param props The properties from an Aether resolution.
* @return
* A map of module id to extra dependency attributes associated with dependencies on that module.
*/
def readFromAether(props: java.util.Map[String, AnyRef]): Map[ModuleRevisionId, Map[String, String]] = {
import scala.collection.JavaConverters._
(props.asScala get ExtraAttributesKey) match {
case None => Map.empty
case Some(str) =>
def processDep(m: ModuleRevisionId) = (simplify(m), filterCustomExtra(m, include = true))
(for {
(id, props) <- readDependencyExtra(str.toString).map(processDep)
} yield id -> props).toMap
}
}
/**
* Mutates the to collection with the extra depdendency attributes from the incoming pom properties list.
*
* @param from The properties directly off a maven POM file
* @param to The aaether properties where we can write whatever we want.
*
* TODO - maybe we can just parse this directly here. Note the `readFromAether` method uses
* whatever we set here.
*/
def transferDependencyExtraAttributes(from: Properties, to: java.util.Map[String, AnyRef]): Unit = {
Option(from.getProperty(ExtraAttributesKey, null)) match {
case Some(str) => to.put(ExtraAttributesKey, str)
case None =>
}
}
/**
* Reads the extra dependency information out of Ivy's notion of POM properties and returns
* the map of ID -> Extra Properties.
*/
def getDependencyExtra(m: Map[String, String]): Map[ModuleRevisionId, Map[String, String]] =
(m get ExtraAttributesKey) match {
case None => Map.empty
case Some(str) =>
def processDep(m: ModuleRevisionId) = (simplify(m), filterCustomExtra(m, include = true))
readDependencyExtra(str).map(processDep).toMap
}
def qualifiedExtra(item: ExtendableItem): Map[String, String] = {
import scala.collection.JavaConverters._
item.getQualifiedExtraAttributes.asInstanceOf[java.util.Map[String, String]].asScala.toMap
}
def filterCustomExtra(item: ExtendableItem, include: Boolean): Map[String, String] =
(qualifiedExtra(item) filterKeys { k => qualifiedIsExtra(k) == include })
def qualifiedIsExtra(k: String): Boolean = k.endsWith(ScalaVersionKey) || k.endsWith(SbtVersionKey)
// Reduces the id to exclude custom extra attributes
// This makes the id suitable as a key to associate a dependency parsed from a <dependency> element
// with the extra attributes from the <properties> section
def simplify(id: ModuleRevisionId): ModuleRevisionId = {
import scala.collection.JavaConverters._
ModuleRevisionId.newInstance(id.getOrganisation, id.getName, id.getBranch, id.getRevision, filterCustomExtra(id, include = false).asJava)
}
/** parses the sequence of dependencies with extra attribute information, with one dependency per line */
def readDependencyExtra(s: String): Seq[ModuleRevisionId] =
LinesP.split(s).map(_.trim).filter(!_.isEmpty).map(ModuleRevisionId.decode)
private[this] val LinesP = Pattern.compile("(?m)^")
/**
* Creates the "extra" property values for DependencyDescriptors that can be written into a maven pom
* so we don't loose the information.
* @param s
* @return
*/
def writeDependencyExtra(s: Seq[DependencyDescriptor]): Seq[String] =
s.flatMap { dd =>
val revId = dd.getDependencyRevisionId
if (filterCustomExtra(revId, include = true).isEmpty)
Nil
else
revId.encodeToString :: Nil
}
}

View File

@ -4,6 +4,8 @@ import Path._, Configurations._
import java.io.File
import org.specs2._
import cross.CrossVersionUtil
import sbt.PublishConfiguration
import sbt.ivyint.SbtChainResolver
trait BaseIvySpecification extends Specification {
def currentBase: File = new File(".")
@ -12,6 +14,8 @@ trait BaseIvySpecification extends Specification {
def currentDependency: File = currentBase / "target" / "dependency"
def defaultModuleId: ModuleID = ModuleID("com.example", "foo", "0.1.0", Some("compile"))
lazy val log = ConsoleLogger()
def configurations = Seq(Compile, Test, Runtime)
def module(moduleId: ModuleID, deps: Seq[ModuleID], scalaFullVersion: Option[String],
uo: UpdateOptions = UpdateOptions()): IvySbt#Module = {
val ivyScala = scalaFullVersion map { fv =>
@ -28,21 +32,24 @@ trait BaseIvySpecification extends Specification {
module = moduleId,
moduleInfo = ModuleInfo("foo"),
dependencies = deps,
configurations = Seq(Compile, Test, Runtime),
configurations = configurations,
ivyScala = ivyScala)
val ivySbt = new IvySbt(mkIvyConfiguration(uo))
new ivySbt.Module(moduleSetting)
}
def resolvers: Seq[Resolver] = Seq(DefaultMavenRepository)
def chainResolver = ChainedResolver("sbt-chain", resolvers)
def mkIvyConfiguration(uo: UpdateOptions): IvyConfiguration = {
val paths = new IvyPaths(currentBase, Some(currentTarget))
val rs = Seq(DefaultMavenRepository)
val other = Nil
val moduleConfs = Seq(ModuleConfiguration("*", DefaultMavenRepository))
val moduleConfs = Seq(ModuleConfiguration("*", chainResolver))
val off = false
val check = Nil
val resCacheDir = currentTarget / "resolution-cache"
new InlineIvyConfiguration(paths, rs, other, moduleConfs, off, None, check, Some(resCacheDir), uo, log)
new InlineIvyConfiguration(paths, resolvers, other, moduleConfs, off, None, check, Some(resCacheDir), uo, log)
}
def ivyUpdateEither(module: IvySbt#Module): Either[UnresolvedWarning, UpdateReport] = {
@ -58,4 +65,18 @@ trait BaseIvySpecification extends Specification {
case Left(w) =>
throw w.resolveException
}
def mkPublishConfiguration(resolver: Resolver, artifacts: Map[Artifact, File]): PublishConfiguration = {
new PublishConfiguration(
ivyFile = None,
resolverName = resolver.name,
artifacts = artifacts,
checksums = Seq(),
logging = UpdateLogging.Full,
overwrite = true)
}
def ivyPublish(module: IvySbt#Module, config: PublishConfiguration) = {
IvyActions.publish(module, config, log)
}
}

View File

@ -156,6 +156,7 @@ object CacheIvy {
implicit def moduleConfIC: InputCache[ModuleConfiguration] = wrapIn
object L3 {
implicit def mavenCacheToHL = (m: MavenCache) => m.name :+: m.rootFile.getAbsolutePath :+: HNil
implicit def mavenRToHL = (m: MavenRepository) => m.name :+: m.root :+: HNil
implicit def fileRToHL = (r: FileRepository) => r.name :+: r.configuration :+: r.patterns :+: HNil
implicit def urlRToHL = (u: URLRepository) => u.name :+: u.patterns :+: HNil
@ -169,7 +170,7 @@ object CacheIvy {
implicit lazy val chainedIC: InputCache[ChainedResolver] = InputCache.lzy(wrapIn)
implicit lazy val resolverIC: InputCache[Resolver] =
unionInputCache[Resolver, ChainedResolver :+: JavaNet1Repository :+: MavenRepository :+: FileRepository :+: URLRepository :+: SshRepository :+: SftpRepository :+: RawRepository :+: HNil]
unionInputCache[Resolver, ChainedResolver :+: JavaNet1Repository :+: MavenRepository :+: MavenCache :+: FileRepository :+: URLRepository :+: SshRepository :+: SftpRepository :+: RawRepository :+: HNil]
implicit def moduleIC: InputCache[ModuleID] = wrapIn
implicitly[InputCache[Seq[Configuration]]]

View File

@ -3,6 +3,8 @@ package sbt
import java.io.File
import java.net.{ MalformedURLException, URL }
import sbt.mavenint.SbtPomExtraProperties
private[sbt] object APIMappings {
def extract(cp: Seq[Attributed[File]], log: Logger): Seq[(File, URL)] =
cp.flatMap(entry => extractFromEntry(entry, log))
@ -15,7 +17,7 @@ private[sbt] object APIMappings {
private[this] def extractFromID(entry: File, mid: ModuleID, log: Logger): Option[(File, URL)] =
for {
urlString <- mid.extraAttributes.get(CustomPomParser.ApiURLKey)
urlString <- mid.extraAttributes.get(SbtPomExtraProperties.POM_API_KEY)
u <- parseURL(urlString, entry, log)
} yield (entry, u)

View File

@ -6,6 +6,7 @@ package sbt
import Attributed.data
import Scope.{ fillTaskAxis, GlobalScope, ThisScope }
import sbt.Compiler.InputsWithPrevious
import sbt.mavenint.{ PomExtraDependencyAttributes, SbtPomExtraProperties }
import xsbt.api.Discovery
import xsbti.compile.CompileOrder
import Project.{ inConfig, inScope, inTask, richInitialize, richInitializeTask, richTaskSessionVar }
@ -842,7 +843,7 @@ object Defaults extends BuildCommon {
}
def sbtPluginExtra(m: ModuleID, sbtV: String, scalaV: String): ModuleID =
m.extra(CustomPomParser.SbtVersionKey -> sbtV, CustomPomParser.ScalaVersionKey -> scalaV).copy(crossVersion = CrossVersion.Disabled)
m.extra(PomExtraDependencyAttributes.SbtVersionKey -> sbtV, PomExtraDependencyAttributes.ScalaVersionKey -> scalaV).copy(crossVersion = CrossVersion.Disabled)
@deprecated("Use PluginDiscovery.writeDescriptor.", "0.13.2")
def writePluginsDescriptor(plugins: Set[String], dir: File): Seq[File] =
@ -1215,7 +1216,7 @@ object Classpaths {
private[sbt] def defaultProjectID: Initialize[ModuleID] = Def.setting {
val base = ModuleID(organization.value, moduleName.value, version.value).cross(crossVersion in projectID value).artifacts(artifacts.value: _*)
apiURL.value match {
case Some(u) if autoAPIMappings.value => base.extra(CustomPomParser.ApiURLKey -> u.toExternalForm)
case Some(u) if autoAPIMappings.value => base.extra(SbtPomExtraProperties.POM_API_KEY -> u.toExternalForm)
case _ => base
}
}

View File

@ -0,0 +1,44 @@
[@jsuereth]: https://github.com/jsuereth
[1676]: https://github.com/sbt/sbt/issues/1676
[1322]: https://github.com/sbt/sbt/issues/1322
[679]: https://github.com/sbt/sbt/issues/679
[647]: https://github.com/sbt/sbt/issues/647
[1616]: https://github.com/sbt/sbt/issues/1616
### Fixes with compatibility implications
### Improvements
### Maven resolver plugin
sbt 0.13.8 adds an extention point in the dependency resolution to customize Maven resolvers.
This allows us to write sbt-maven-resolver auto plugin, which internally uses Eclipse Aether
to resolve Maven dependencies instead of Apache Ivy.
To enable this plugin, add the following to `project/maven.sbt` (or `project/plugin.sbt` the file name doesn't matter):
libraryDependencies += Defaults.sbtPluginExtra("org.scala-sbt" % "sbt-maven-resolver" % sbtVersion.value,
sbtBinaryVersion.value, scalaBinaryVersion.value)
This will create a new `~/.ivy2/maven-cache` directory, which contains the Aether cache of files.
You may notice some file will be re-downloaded for the new cache layout.
Additionally, sbt will now be able to fully construct
`maven-metadata.xml` files when publishing to remote repositories or when publishing to the local `~/.m2/repository`.
This should help erase many of the deficiencies encountered when using Maven and sbt together.
**Notes and known limitations**:
- sbt-maven-resolver requires sbt 0.13.8 and above.
- The current implementation does not support Ivy-style dynamic revisions, such as "2.10.+" or "latest.snapshot". This
is a fixable situation, but the version range query and Ivy -> Maven version range translation code has not been migrated.
### Bug fixes
- sbt doens't honor Maven's uniqueVersions (use sbt-maven-resolver to fix). [#1322][1322] by [@jsuereth][@jsuereth]
- sbt doens't see new SNAPSHOT dependency versions in local maven repos (use withLatestSnapshots + sbt-maven-resolver to fix) [#321][321] by [@jsuereth][@jsuereth]
- Property in pom's version field results to wrong dependency resolution (use sbt-maven-resolver to fix). [#647][647] by [@jsuereth][@jsuereth]
- Maven local resolver with parent POM (use sbt-maven-resolver). [#1616][1616] by [@jsuereth][@jsuereth]
// Possibly fixed, need verification.
- 1676 - SNAPSHOT dependency not updated ???
- 679 - Incorrect Maven Snapshot file resolution ????

View File

@ -27,4 +27,41 @@ object Dependencies {
}
lazy val scalaXml = scala211Module("scala-xml", "1.0.1")
lazy val scalaParsers = scala211Module("scala-parser-combinators", "1.0.1")
// Maven related dependnecy craziness
//val mvnEmbedder = "org.apache.maven" % "maven-embedder" % mvnVersion
val mvnWagonVersion = "2.4"
val mvnVersion = "3.2.3"
val aetherVersion = "1.0.1.v20141111"
val mvnAether = "org.apache.maven" % "maven-aether-provider" % mvnVersion
val aether = "org.eclipse.aether" % "aether" % aetherVersion
val aetherImpl = "org.eclipse.aether" % "aether-impl" % aetherVersion
val aetherUtil = "org.eclipse.aether" % "aether-util" % aetherVersion
val aetherTransportFile = "org.eclipse.aether" % "aether-transport-file" % aetherVersion
val aetherTransportWagon = "org.eclipse.aether" % "aether-transport-wagon" % aetherVersion
val aetherTransportHttp = "org.eclipse.aether" % "aether-transport-http" % aetherVersion
val aetherConnectorBasic = "org.eclipse.aether" % "aether-connector-basic" % aetherVersion
val sisuPlexus = ("org.eclipse.sisu" % "org.eclipse.sisu.plexus" % "0.3.0.M1").exclude("javax.enterprise", "cdi-api").exclude("com.google.code.findbugs", "jsr305")
val guice = "com.google.inject" % "guice" % "3.0"
val guava = "com.google.guava" % "guava" % "18.0"
val javaxInject = "javax.inject" % "javax.inject" % "1"
//val sisuGuice = ("org.eclipse.sisu" % "sisu-guice" % "3.1.0").classifier("no_aop").exclude("javax.enterprise", "cdi-api", )
/*
val mvnWagon = "org.apache.maven.wagon" % "wagon-http" % mvnWagonVersion
val mvnWagonProviderApi = "org.apache.maven.wagon" % "wagon-provider-api" % mvnWagonVersion
val mvnWagonLwHttp = "org.apache.maven.wagon" % "wagon-http-lightweight" % mvnWagonVersion
val mvnWagonFile = "org.apache.maven.wagon" % "wagon-file" % mvnWagonVersion
*/
def aetherLibs =
Seq(
guava,
javaxInject,
sisuPlexus,
aetherImpl,
aetherConnectorBasic,
mvnAether)
}

View File

@ -8,6 +8,9 @@ object Scripted {
lazy val scripted = InputKey[Unit]("scripted")
lazy val scriptedUnpublished = InputKey[Unit]("scripted-unpublished", "Execute scripted without publishing SBT first. Saves you some time when only your test has changed.")
lazy val scriptedSource = SettingKey[File]("scripted-source")
lazy val scriptedPrescripted = TaskKey[File => Unit]("scripted-prescripted")
lazy val MavenResolverPluginTest = config("mavenResolverPluginTest") extend Compile
import sbt.complete._
import DefaultParsers._
@ -55,14 +58,14 @@ object Scripted {
(token(Space) ~> (PagedIds | testIdAsGroup)).* map (_.flatten)
}
def doScripted(launcher: File, scriptedSbtClasspath: Seq[Attributed[File]], scriptedSbtInstance: ScalaInstance, sourcePath: File, args: Seq[String]) {
def doScripted(launcher: File, scriptedSbtClasspath: Seq[Attributed[File]], scriptedSbtInstance: ScalaInstance, sourcePath: File, args: Seq[String], prescripted: File => Unit) {
System.err.println(s"About to run tests: ${args.mkString("\n * ", "\n * ", "\n")}")
val noJLine = new classpath.FilteredLoader(scriptedSbtInstance.loader, "jline." :: Nil)
val loader = classpath.ClasspathUtilities.toLoader(scriptedSbtClasspath.files, noJLine)
val m = ModuleUtilities.getObject("sbt.test.ScriptedTests", loader)
val r = m.getClass.getMethod("run", classOf[File], classOf[Boolean], classOf[Array[String]], classOf[File], classOf[Array[String]])
val r = m.getClass.getMethod("run", classOf[File], classOf[Boolean], classOf[Array[String]], classOf[File], classOf[Array[String]], classOf[File => Unit])
val launcherVmOptions = Array("-XX:MaxPermSize=256M") // increased after a failure in scripted source-dependencies/macro
try { r.invoke(m, sourcePath, true: java.lang.Boolean, args.toArray[String], launcher, launcherVmOptions) }
try { r.invoke(m, sourcePath, true: java.lang.Boolean, args.toArray[String], launcher, launcherVmOptions, prescripted) }
catch { case ite: java.lang.reflect.InvocationTargetException => throw ite.getCause }
}

View File

@ -135,6 +135,7 @@ object %s {
sourceGenerators <+= generateKeywords map (x => Seq(x))
))
}
object Licensed {
lazy val notice = SettingKey[File]("notice")
lazy val extractLicenses = TaskKey[Seq[File]]("extract-licenses")

View File

@ -0,0 +1,560 @@
package org.apache.maven.repository.internal;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import javax.inject.Inject;
import javax.inject.Named;
import org.apache.maven.model.DependencyManagement;
import org.apache.maven.model.DistributionManagement;
import org.apache.maven.model.License;
import org.apache.maven.model.Model;
import org.apache.maven.model.Prerequisites;
import org.apache.maven.model.Relocation;
import org.apache.maven.model.Repository;
import org.apache.maven.model.building.DefaultModelBuilderFactory;
import org.apache.maven.model.building.DefaultModelBuildingRequest;
import org.apache.maven.model.building.FileModelSource;
import org.apache.maven.model.building.ModelBuilder;
import org.apache.maven.model.building.ModelBuildingException;
import org.apache.maven.model.building.ModelBuildingRequest;
import org.apache.maven.model.building.ModelProblem;
import org.apache.maven.model.resolution.UnresolvableModelException;
import org.codehaus.plexus.component.annotations.Component;
import org.codehaus.plexus.component.annotations.Requirement;
import org.eclipse.aether.RepositoryException;
import org.eclipse.aether.RepositoryEvent.EventType;
import org.eclipse.aether.RepositoryEvent;
import org.eclipse.aether.RepositorySystemSession;
import org.eclipse.aether.RequestTrace;
import org.eclipse.aether.artifact.Artifact;
import org.eclipse.aether.artifact.ArtifactProperties;
import org.eclipse.aether.artifact.ArtifactType;
import org.eclipse.aether.artifact.ArtifactTypeRegistry;
import org.eclipse.aether.artifact.DefaultArtifact;
import org.eclipse.aether.artifact.DefaultArtifactType;
import org.eclipse.aether.graph.Dependency;
import org.eclipse.aether.graph.Exclusion;
import org.eclipse.aether.impl.*;
import org.eclipse.aether.repository.WorkspaceRepository;
import org.eclipse.aether.resolution.ArtifactDescriptorException;
import org.eclipse.aether.resolution.ArtifactDescriptorPolicy;
import org.eclipse.aether.resolution.ArtifactDescriptorPolicyRequest;
import org.eclipse.aether.resolution.ArtifactDescriptorRequest;
import org.eclipse.aether.resolution.ArtifactDescriptorResult;
import org.eclipse.aether.resolution.ArtifactRequest;
import org.eclipse.aether.resolution.ArtifactResolutionException;
import org.eclipse.aether.resolution.ArtifactResult;
import org.eclipse.aether.resolution.VersionRequest;
import org.eclipse.aether.resolution.VersionResolutionException;
import org.eclipse.aether.resolution.VersionResult;
import org.eclipse.aether.spi.locator.Service;
import org.eclipse.aether.spi.locator.ServiceLocator;
import org.eclipse.aether.spi.log.Logger;
import org.eclipse.aether.spi.log.LoggerFactory;
import org.eclipse.aether.spi.log.NullLoggerFactory;
import org.eclipse.aether.transfer.ArtifactNotFoundException;
import sbt.mavenint.PomExtraDependencyAttributes;
import sbt.mavenint.SbtPomExtraProperties;
/**
* A hacked version of maven's default artifact descriptor reader which we use in place of the standard aether adapter.
*
* This adds the following to the parsing of maven files:
*
* Additonal properties:
*
* - `sbt.pom.packaging` - The pom.packaging value.
*
*
* @author Benjamin Bentmann
* @author Josh Suereth - Adapted for sbt
*/
@Named
@Component( role = ArtifactDescriptorReader.class )
public class SbtArtifactDescriptorReader
implements ArtifactDescriptorReader, Service
{
@SuppressWarnings( "unused" )
@Requirement( role = LoggerFactory.class )
private Logger logger = NullLoggerFactory.LOGGER;
@Requirement
private RemoteRepositoryManager remoteRepositoryManager;
@Requirement
private VersionResolver versionResolver;
@Requirement
private VersionRangeResolver versionRangeResolver;
@Requirement
private ArtifactResolver artifactResolver;
@Requirement
private RepositoryEventDispatcher repositoryEventDispatcher;
@Requirement
private ModelBuilder modelBuilder;
public SbtArtifactDescriptorReader()
{
// enable no-arg constructor
}
@Inject
SbtArtifactDescriptorReader( RemoteRepositoryManager remoteRepositoryManager, VersionResolver versionResolver,
ArtifactResolver artifactResolver, ModelBuilder modelBuilder,
RepositoryEventDispatcher repositoryEventDispatcher, LoggerFactory loggerFactory )
{
setRemoteRepositoryManager( remoteRepositoryManager );
setVersionResolver( versionResolver );
setArtifactResolver( artifactResolver );
setModelBuilder( modelBuilder );
setLoggerFactory( loggerFactory );
setRepositoryEventDispatcher( repositoryEventDispatcher );
}
public void initService( ServiceLocator locator )
{
setLoggerFactory(locator.getService(LoggerFactory.class));
setRemoteRepositoryManager(locator.getService(RemoteRepositoryManager.class));
setVersionResolver(locator.getService(VersionResolver.class));
setArtifactResolver(locator.getService(ArtifactResolver.class));
setRepositoryEventDispatcher(locator.getService(RepositoryEventDispatcher.class));
setVersionRangeResolver(locator.getService(VersionRangeResolver.class));
modelBuilder = locator.getService( ModelBuilder.class );
if ( modelBuilder == null )
{
setModelBuilder( new DefaultModelBuilderFactory().newInstance() );
}
}
public SbtArtifactDescriptorReader setLoggerFactory( LoggerFactory loggerFactory )
{
this.logger = NullLoggerFactory.getSafeLogger( loggerFactory, getClass() );
return this;
}
void setLogger( LoggerFactory loggerFactory )
{
// plexus support
setLoggerFactory( loggerFactory );
}
public SbtArtifactDescriptorReader setRemoteRepositoryManager( RemoteRepositoryManager remoteRepositoryManager )
{
if ( remoteRepositoryManager == null )
{
throw new IllegalArgumentException( "remote repository manager has not been specified" );
}
this.remoteRepositoryManager = remoteRepositoryManager;
return this;
}
public SbtArtifactDescriptorReader setVersionResolver( VersionResolver versionResolver )
{
if ( versionResolver == null )
{
throw new IllegalArgumentException( "version resolver has not been specified" );
}
this.versionResolver = versionResolver;
return this;
}
public SbtArtifactDescriptorReader setArtifactResolver( ArtifactResolver artifactResolver )
{
if ( artifactResolver == null )
{
throw new IllegalArgumentException( "artifact resolver has not been specified" );
}
this.artifactResolver = artifactResolver;
return this;
}
public SbtArtifactDescriptorReader setRepositoryEventDispatcher( RepositoryEventDispatcher repositoryEventDispatcher )
{
if ( repositoryEventDispatcher == null )
{
throw new IllegalArgumentException( "repository event dispatcher has not been specified" );
}
this.repositoryEventDispatcher = repositoryEventDispatcher;
return this;
}
public SbtArtifactDescriptorReader setModelBuilder( ModelBuilder modelBuilder )
{
if ( modelBuilder == null )
{
throw new IllegalArgumentException( "model builder has not been specified" );
}
this.modelBuilder = modelBuilder;
return this;
}
public ArtifactDescriptorResult readArtifactDescriptor( RepositorySystemSession session,
ArtifactDescriptorRequest request )
throws ArtifactDescriptorException
{
ArtifactDescriptorResult result = new ArtifactDescriptorResult( request );
Model model = loadPom( session, request, result );
if ( model != null )
{
ArtifactTypeRegistry stereotypes = session.getArtifactTypeRegistry();
for ( Repository r : model.getRepositories() )
{
result.addRepository( ArtifactDescriptorUtils.toRemoteRepository( r ) );
}
for ( org.apache.maven.model.Dependency dependency : model.getDependencies() )
{
result.addDependency( convert( dependency, stereotypes ) );
}
DependencyManagement mngt = model.getDependencyManagement();
if ( mngt != null )
{
for ( org.apache.maven.model.Dependency dependency : mngt.getDependencies() )
{
result.addManagedDependency( convert( dependency, stereotypes ) );
}
}
Map<String, Object> properties = new LinkedHashMap<String, Object>();
Prerequisites prerequisites = model.getPrerequisites();
if ( prerequisites != null )
{
properties.put( "prerequisites.maven", prerequisites.getMaven() );
}
List<License> licenses = model.getLicenses();
properties.put( SbtPomExtraProperties.LICENSE_COUNT_KEY, licenses.size() );
for ( int i = 0; i < licenses.size(); i++ )
{
License license = licenses.get( i );
properties.put( SbtPomExtraProperties.makeLicenseName(i), license.getName() );
properties.put( SbtPomExtraProperties.makeLicenseUrl(i), license.getUrl() );
properties.put( "license." + i + ".comments", license.getComments() );
properties.put( "license." + i + ".distribution", license.getDistribution() );
}
// SBT ADDED - Here we push in the pom packaging type for Ivy expectations.
final String packaging =
(model.getPackaging() == null) ? "jar" : model.getPackaging();
properties.put(SbtPomExtraProperties.MAVEN_PACKAGING_KEY, packaging);
// SBT ADDED - Here we inject the sbt/scala version we parse out of the pom.
final Properties mprops = model.getProperties();
if(mprops.containsKey(SbtPomExtraProperties.POM_SBT_VERSION)) {
final String sbtVersion = mprops.getProperty(SbtPomExtraProperties.POM_SBT_VERSION);
properties.put(SbtPomExtraProperties.SBT_VERSION_KEY, sbtVersion);
}
if(mprops.containsKey(SbtPomExtraProperties.POM_SCALA_VERSION)) {
properties.put(SbtPomExtraProperties.SCALA_VERSION_KEY, mprops.getProperty(SbtPomExtraProperties.POM_SCALA_VERSION));
}
// SBT-Added - Here we inject the additional dependency attributes (for transitive plugin resolution).
PomExtraDependencyAttributes.transferDependencyExtraAttributes(model.getProperties(), properties);
result.setProperties( properties);
setArtifactProperties( result, model );
}
return result;
}
// SBT FIX - We make sure that artifact properties are copied over here, so we can find sbt-plugin POM files.
public static Artifact toPomArtifact(Artifact artifact) {
Artifact pomArtifact = artifact;
if(artifact.getClassifier().length() > 0 || !"pom".equals(artifact.getExtension())) {
// TODO - only copy over sbt-important properties.
pomArtifact = new DefaultArtifact(artifact.getGroupId(), artifact.getArtifactId(), "pom", artifact.getVersion()).setProperties(artifact.getProperties());
}
return pomArtifact;
}
private Model loadPom( RepositorySystemSession session, ArtifactDescriptorRequest request,
ArtifactDescriptorResult result )
throws ArtifactDescriptorException
{
RequestTrace trace = RequestTrace.newChild( request.getTrace(), request );
Set<String> visited = new LinkedHashSet<String>();
for ( Artifact artifact = request.getArtifact();; )
{
// SBT FIX - we need to use our own variant here to preserve extra attributes.
// Artifact pomArtifact = ArtifactDescriptorUtils.toPomArtifact( artifact );
Artifact pomArtifact = toPomArtifact(artifact);
try
{
VersionRequest versionRequest =
new VersionRequest( artifact, request.getRepositories(), request.getRequestContext() );
versionRequest.setTrace( trace );
VersionResult versionResult = versionResolver.resolveVersion( session, versionRequest );
artifact = artifact.setVersion( versionResult.getVersion() );
versionRequest =
new VersionRequest( pomArtifact, request.getRepositories(), request.getRequestContext() );
versionRequest.setTrace( trace );
versionResult = versionResolver.resolveVersion( session, versionRequest );
pomArtifact = pomArtifact.setVersion( versionResult.getVersion() );
}
catch ( VersionResolutionException e )
{
result.addException( e );
throw new ArtifactDescriptorException( result );
}
if ( !visited.add( artifact.getGroupId() + ':' + artifact.getArtifactId() + ':' + artifact.getBaseVersion() ) )
{
RepositoryException exception =
new RepositoryException( "Artifact relocations form a cycle: " + visited );
invalidDescriptor( session, trace, artifact, exception );
if ( ( getPolicy( session, artifact, request ) & ArtifactDescriptorPolicy.IGNORE_INVALID ) != 0 )
{
return null;
}
result.addException( exception );
throw new ArtifactDescriptorException( result );
}
ArtifactResult resolveResult;
try
{
ArtifactRequest resolveRequest =
new ArtifactRequest( pomArtifact, request.getRepositories(), request.getRequestContext() );
resolveRequest.setTrace( trace );
resolveResult = artifactResolver.resolveArtifact( session, resolveRequest );
pomArtifact = resolveResult.getArtifact();
result.setRepository( resolveResult.getRepository() );
}
catch ( ArtifactResolutionException e )
{
if ( e.getCause() instanceof ArtifactNotFoundException )
{
missingDescriptor( session, trace, artifact, (Exception) e.getCause() );
if ( ( getPolicy( session, artifact, request ) & ArtifactDescriptorPolicy.IGNORE_MISSING ) != 0 )
{
return null;
}
}
result.addException( e );
throw new ArtifactDescriptorException( result );
}
Model model;
try
{
ModelBuildingRequest modelRequest = new DefaultModelBuildingRequest();
modelRequest.setValidationLevel( ModelBuildingRequest.VALIDATION_LEVEL_MINIMAL );
modelRequest.setProcessPlugins(false);
modelRequest.setTwoPhaseBuilding(false);
modelRequest.setSystemProperties(toProperties(session.getUserProperties(),
session.getSystemProperties()));
modelRequest.setModelCache(DefaultModelCache.newInstance(session));
modelRequest.setModelResolver(
new DefaultModelResolver(
session,
trace.newChild(modelRequest),
request.getRequestContext(),
artifactResolver,
versionRangeResolver,
remoteRepositoryManager,
request.getRepositories())
);
if ( resolveResult.getRepository() instanceof WorkspaceRepository )
{
modelRequest.setPomFile( pomArtifact.getFile() );
}
else
{
modelRequest.setModelSource( new FileModelSource( pomArtifact.getFile() ) );
}
model = modelBuilder.build( modelRequest ).getEffectiveModel();
}
catch ( ModelBuildingException e )
{
for ( ModelProblem problem : e.getProblems() )
{
if ( problem.getException() instanceof UnresolvableModelException )
{
result.addException( problem.getException() );
throw new ArtifactDescriptorException( result );
}
}
invalidDescriptor( session, trace, artifact, e );
if ( ( getPolicy( session, artifact, request ) & ArtifactDescriptorPolicy.IGNORE_INVALID ) != 0 )
{
return null;
}
result.addException( e );
throw new ArtifactDescriptorException( result );
}
Relocation relocation = getRelocation( model );
if ( relocation != null )
{
result.addRelocation( artifact );
artifact =
new RelocatedArtifact( artifact, relocation.getGroupId(), relocation.getArtifactId(),
relocation.getVersion() );
result.setArtifact( artifact );
}
else
{
return model;
}
}
}
private Properties toProperties( Map<String, String> dominant, Map<String, String> recessive )
{
Properties props = new Properties();
if ( recessive != null )
{
props.putAll( recessive );
}
if ( dominant != null )
{
props.putAll( dominant );
}
return props;
}
private Relocation getRelocation( Model model )
{
Relocation relocation = null;
DistributionManagement distMngt = model.getDistributionManagement();
if ( distMngt != null )
{
relocation = distMngt.getRelocation();
}
return relocation;
}
private void setArtifactProperties( ArtifactDescriptorResult result, Model model )
{
String downloadUrl = null;
DistributionManagement distMngt = model.getDistributionManagement();
if ( distMngt != null )
{
downloadUrl = distMngt.getDownloadUrl();
}
if ( downloadUrl != null && downloadUrl.length() > 0 )
{
Artifact artifact = result.getArtifact();
Map<String, String> props = new HashMap<String, String>( artifact.getProperties() );
props.put( ArtifactProperties.DOWNLOAD_URL, downloadUrl );
result.setArtifact( artifact.setProperties( props ) );
}
}
private Dependency convert( org.apache.maven.model.Dependency dependency, ArtifactTypeRegistry stereotypes )
{
ArtifactType stereotype = stereotypes.get( dependency.getType() );
if ( stereotype == null )
{
stereotype = new DefaultArtifactType( dependency.getType() );
}
boolean system = dependency.getSystemPath() != null && dependency.getSystemPath().length() > 0;
Map<String, String> props = null;
if ( system )
{
props = Collections.singletonMap( ArtifactProperties.LOCAL_PATH, dependency.getSystemPath() );
}
Artifact artifact =
new DefaultArtifact( dependency.getGroupId(), dependency.getArtifactId(), dependency.getClassifier(), null,
dependency.getVersion(), props, stereotype );
List<Exclusion> exclusions = new ArrayList<Exclusion>( dependency.getExclusions().size() );
for ( org.apache.maven.model.Exclusion exclusion : dependency.getExclusions() )
{
exclusions.add( convert( exclusion ) );
}
Dependency result = new Dependency( artifact, dependency.getScope(), dependency.isOptional(), exclusions );
return result;
}
private Exclusion convert( org.apache.maven.model.Exclusion exclusion )
{
return new Exclusion( exclusion.getGroupId(), exclusion.getArtifactId(), "*", "*" );
}
private void missingDescriptor( RepositorySystemSession session, RequestTrace trace, Artifact artifact,
Exception exception )
{
RepositoryEvent.Builder event = new RepositoryEvent.Builder( session, EventType.ARTIFACT_DESCRIPTOR_MISSING );
event.setTrace( trace );
event.setArtifact( artifact );
event.setException( exception );
repositoryEventDispatcher.dispatch( event.build() );
}
private void invalidDescriptor( RepositorySystemSession session, RequestTrace trace, Artifact artifact,
Exception exception )
{
RepositoryEvent.Builder event = new RepositoryEvent.Builder( session, EventType.ARTIFACT_DESCRIPTOR_INVALID );
event.setTrace( trace );
event.setArtifact( artifact );
event.setException( exception );
repositoryEventDispatcher.dispatch( event.build() );
}
private int getPolicy( RepositorySystemSession session, Artifact artifact, ArtifactDescriptorRequest request )
{
ArtifactDescriptorPolicy policy = session.getArtifactDescriptorPolicy();
if ( policy == null )
{
return ArtifactDescriptorPolicy.STRICT;
}
return policy.getPolicy( session, new ArtifactDescriptorPolicyRequest( artifact, request.getRequestContext() ) );
}
public void setVersionRangeResolver(final VersionRangeResolver versionRangeResolver) {
this.versionRangeResolver = versionRangeResolver;
}
}

View File

@ -0,0 +1,13 @@
package sbt
import UpdateOptions.ResolverConverter
import sbt.mavenint.{ MavenCacheRepositoryResolver, MavenRemoteRepositoryResolver }
object MavenResolverConverter {
val converter: ResolverConverter = {
case (cache: MavenCache, settings, log) =>
new MavenCacheRepositoryResolver(cache, settings)
case (repo: MavenRepository, settings, log) =>
new MavenRemoteRepositoryResolver(repo, settings)
}
}

View File

@ -0,0 +1,48 @@
package sbt.mavenint
import org.apache.ivy.plugins.repository.Resource
import org.apache.ivy.plugins.repository.url.URLResource
import org.eclipse.aether.repository.RemoteRepository
import org.eclipse.aether.spi.connector.transport._
/**
* A bridge file transportation protocol which uses some Ivy/sbt mechanisms.
*/
class FileTransport(repository: RemoteRepository) extends AbstractTransporter {
class NotFoundException(msg: String) extends Exception(msg)
private def toURL(task: TransportTask): java.net.URL =
try new java.net.URL(s"${repository.getUrl}/${task.getLocation.toASCIIString}")
catch {
case e: IllegalArgumentException => throw new IllegalArgumentException(s" URL (${task.getLocation}) is not absolute.")
}
private def toResource(task: TransportTask): Resource = new URLResource(toURL(task))
private def toFile(task: TransportTask): java.io.File =
new java.io.File(toURL(task).toURI)
override def implPeek(peek: PeekTask): Unit = {
if (!toFile(peek).exists()) throw new NotFoundException(s"Could not find ${peek.getLocation}")
}
override def implClose(): Unit = ()
override def implGet(out: GetTask): Unit = {
val from = toFile(out)
if (!from.exists()) throw new NotFoundException(s"Could not find ${out.getLocation}")
sbt.IO.copyFile(from, out.getDataFile, true)
}
override def implPut(put: PutTask): Unit = {
val to = toFile(put)
Option(put.getDataFile) match {
case Some(from) =>
sbt.IO.copyFile(from, to, true)
case None =>
// Here it's most likely a SHA or somethign where we read from memory.
val in = put.newInputStream
try sbt.IO.transfer(in, to)
finally in.close()
}
}
override def classify(err: Throwable): Int =
err match {
// TODO - Have we caught enough exceptions here?
case _: NotFoundException => Transporter.ERROR_NOT_FOUND
case _ => Transporter.ERROR_OTHER
}
}

View File

@ -0,0 +1,47 @@
package sbt.mavenint
import org.apache.ivy.plugins.repository.Resource
import org.apache.ivy.plugins.repository.url.URLResource
import org.apache.ivy.util.url.URLHandlerRegistry
import org.eclipse.aether.repository.RemoteRepository
import org.eclipse.aether.spi.connector.transport._
/** Aether Http <-> Ivy Http adapter. Aether's is better, but Ivy's has configuration hooks in sbt. */
class HttpTransport(repository: RemoteRepository) extends AbstractTransporter {
class NotFoundException(msg: String) extends Exception(msg)
private def toURL(task: TransportTask): java.net.URL =
try new java.net.URL(s"${repository.getUrl}/${task.getLocation.toASCIIString}")
catch {
case e: IllegalArgumentException => throw new IllegalArgumentException(s" URL (${task.getLocation}) is not absolute.")
}
private def toResource(task: TransportTask): Resource = new URLResource(toURL(task))
override def implPeek(peek: PeekTask): Unit = {
if (!toResource(peek).exists()) throw new NotFoundException(s"Could not find ${peek.getLocation}")
}
override def implClose(): Unit = ()
override def implGet(out: GetTask): Unit = {
if (!toResource(out).exists()) throw new NotFoundException(s"Could not find ${out.getLocation}")
URLHandlerRegistry.getDefault.download(toURL(out), out.getDataFile, null)
}
override def implPut(put: PutTask): Unit = {
val to = toURL(put)
Option(put.getDataFile) match {
case Some(file) => URLHandlerRegistry.getDefault.upload(file, to, null)
case None =>
// TODO - Ivy does not support uploading not from a file. This isn't very efficient in ANY way,
// so if we rewrite the URL handler for Ivy we should fix this as well.
sbt.IO.withTemporaryFile("tmp", "upload") { file =>
val in = put.newInputStream()
try sbt.IO.transfer(in, file)
finally in.close()
URLHandlerRegistry.getDefault.upload(file, to, null)
}
}
}
override def classify(err: Throwable): Int =
err match {
// TODO - Have we caught all the important exceptions here.
case _: NotFoundException => Transporter.ERROR_NOT_FOUND
case _ => Transporter.ERROR_OTHER
}
}

View File

@ -0,0 +1,80 @@
package sbt
package mavenint
import org.apache.ivy.core.module.id.ModuleRevisionId
import org.apache.ivy.core.settings.IvySettings
import org.eclipse.aether.artifact.{ DefaultArtifact => AetherArtifact }
import org.eclipse.aether.installation.{ InstallRequest => AetherInstallRequest }
import org.eclipse.aether.metadata.{ DefaultMetadata, Metadata }
import org.eclipse.aether.resolution.{
ArtifactDescriptorRequest => AetherDescriptorRequest,
ArtifactRequest => AetherArtifactRequest,
MetadataRequest => AetherMetadataRequest
}
import sbt.ivyint.CustomMavenResolver
import scala.collection.JavaConverters._
/**
* A resolver instance which can resolve from a maven CACHE.
*
* Note: This should never hit somethign remote, as it just looks in the maven cache for things already resolved.
*/
class MavenCacheRepositoryResolver(val repo: MavenCache, settings: IvySettings)
extends MavenRepositoryResolver(settings) with CustomMavenResolver {
setName(repo.name)
protected val system = MavenRepositorySystemFactory.newRepositorySystemImpl
sbt.IO.createDirectory(repo.rootFile)
protected val session = MavenRepositorySystemFactory.newSessionImpl(system, repo.rootFile)
protected def setRepository(request: AetherMetadataRequest): AetherMetadataRequest = request
protected def addRepositories(request: AetherDescriptorRequest): AetherDescriptorRequest = request
protected def addRepositories(request: AetherArtifactRequest): AetherArtifactRequest = request
protected def publishArtifacts(artifacts: Seq[AetherArtifact]): Unit = {
val request = new AetherInstallRequest()
artifacts foreach request.addArtifact
system.install(session, request)
}
// TODO - Share this with non-local repository code, since it's MOSTLY the same.
protected def getPublicationTime(mrid: ModuleRevisionId): Option[Long] = {
val metadataRequest = new AetherMetadataRequest()
metadataRequest.setMetadata(
new DefaultMetadata(
mrid.getOrganisation,
mrid.getName,
mrid.getRevision,
MavenRepositoryResolver.MAVEN_METADATA_XML,
Metadata.Nature.RELEASE_OR_SNAPSHOT))
val metadataResultOpt =
try system.resolveMetadata(session, java.util.Arrays.asList(metadataRequest)).asScala.headOption
catch {
case e: org.eclipse.aether.resolution.ArtifactResolutionException => None
}
try metadataResultOpt match {
case Some(md) if md.isResolved =>
import org.apache.maven.artifact.repository.metadata.io.xpp3.MetadataXpp3Reader
import org.codehaus.plexus.util.ReaderFactory
val readMetadata = {
val reader = ReaderFactory.newXmlReader(md.getMetadata.getFile)
try new MetadataXpp3Reader().read(reader, false)
finally reader.close()
}
val timestampOpt =
for {
v <- Option(readMetadata.getVersioning)
sp <- Option(v.getSnapshot)
ts <- Option(sp.getTimestamp)
t <- MavenRepositoryResolver.parseTimeString(ts)
} yield t
val lastUpdatedOpt =
for {
v <- Option(readMetadata.getVersioning)
lu <- Option(v.getLastUpdated)
d <- MavenRepositoryResolver.parseTimeString(lu)
} yield d
// TODO - Only look at timestamp *IF* the version is for a snapshot.
timestampOpt orElse lastUpdatedOpt
case _ => None
}
}
override def toString = s"${repo.name}: ${repo.root}"
}

View File

@ -0,0 +1,95 @@
package sbt
package mavenint
import org.apache.ivy.core.IvyContext
import org.apache.ivy.core.module.id.ModuleRevisionId
import org.apache.ivy.core.settings.IvySettings
import org.eclipse.aether.artifact.{ DefaultArtifact => AetherArtifact }
import org.eclipse.aether.deployment.{ DeployRequest => AetherDeployRequest }
import org.eclipse.aether.metadata.{ DefaultMetadata, Metadata }
import org.eclipse.aether.resolution.{
ArtifactDescriptorRequest => AetherDescriptorRequest,
ArtifactDescriptorResult => AetherDescriptorResult,
ArtifactRequest => AetherArtifactRequest,
MetadataRequest => AetherMetadataRequest
}
import sbt.ivyint.CustomRemoteMavenResolver
import scala.collection.JavaConverters._
/**
* A resolver instance which can resolve from a REMOTE maven repository.
*
* Note: This creates its *own* local cache directory for cache metadata. using its name.
*
*/
class MavenRemoteRepositoryResolver(val repo: MavenRepository, settings: IvySettings)
extends MavenRepositoryResolver(settings) with CustomRemoteMavenResolver {
setName(repo.name)
override def toString = s"${repo.name}: ${repo.root}"
protected val system = MavenRepositorySystemFactory.newRepositorySystemImpl
// Note: All maven repository resolvers will use the SAME maven cache.
// We're not sure if we care whether or not this means that the wrong resolver may report finding an artifact.
// The key is not to duplicate files repeatedly across many caches.
private val localRepo = new java.io.File(settings.getDefaultIvyUserDir, s"maven-cache")
sbt.IO.createDirectory(localRepo)
protected val session = MavenRepositorySystemFactory.newSessionImpl(system, localRepo)
private val aetherRepository = {
new org.eclipse.aether.repository.RemoteRepository.Builder(repo.name, SbtRepositoryLayout.LAYOUT_NAME, repo.root).build()
}
// TODO - Check if isUseCacheOnly is used correctly.
private def isUseCacheOnly: Boolean =
Option(IvyContext.getContext).flatMap(x => Option(x.getResolveData)).flatMap(x => Option(x.getOptions)).map(_.isUseCacheOnly).getOrElse(false)
protected def addRepositories(request: AetherDescriptorRequest): AetherDescriptorRequest =
if (isUseCacheOnly) request else request.addRepository(aetherRepository)
protected def addRepositories(request: AetherArtifactRequest): AetherArtifactRequest =
if (isUseCacheOnly) request else request.addRepository(aetherRepository)
/** Actually publishes aether artifacts. */
protected def publishArtifacts(artifacts: Seq[AetherArtifact]): Unit = {
val request = new AetherDeployRequest()
request.setRepository(aetherRepository)
artifacts foreach request.addArtifact
system.deploy(session, request)
}
protected def getPublicationTime(mrid: ModuleRevisionId): Option[Long] = {
val metadataRequest = new AetherMetadataRequest()
metadataRequest.setMetadata(
new DefaultMetadata(
mrid.getOrganisation,
mrid.getName,
mrid.getRevision,
MavenRepositoryResolver.MAVEN_METADATA_XML,
Metadata.Nature.RELEASE_OR_SNAPSHOT))
if (!isUseCacheOnly) metadataRequest.setRepository(aetherRepository)
val metadataResultOpt =
try system.resolveMetadata(session, java.util.Arrays.asList(metadataRequest)).asScala.headOption
catch {
case e: org.eclipse.aether.resolution.ArtifactResolutionException => None
}
try metadataResultOpt match {
case Some(md) if md.isResolved =>
import org.apache.maven.artifact.repository.metadata.io.xpp3.MetadataXpp3Reader
import org.codehaus.plexus.util.ReaderFactory
val readMetadata = {
val reader = ReaderFactory.newXmlReader(md.getMetadata.getFile)
try new MetadataXpp3Reader().read(reader, false)
finally reader.close()
}
val timestampOpt =
for {
v <- Option(readMetadata.getVersioning)
sp <- Option(v.getSnapshot)
ts <- Option(sp.getTimestamp)
t <- MavenRepositoryResolver.parseTimeString(ts)
} yield t
val lastUpdatedOpt =
for {
v <- Option(readMetadata.getVersioning)
lu <- Option(v.getLastUpdated)
d <- MavenRepositoryResolver.parseTimeString(lu)
} yield d
// TODO - Only look at timestamp *IF* the version is for a snapshot.
timestampOpt orElse lastUpdatedOpt
case _ => None
}
}
}

View File

@ -0,0 +1,519 @@
package sbt.mavenint
import java.io.File
import java.util.Date
import org.apache.ivy.core.IvyContext
import org.apache.ivy.core.cache.{ ArtifactOrigin, ModuleDescriptorWriter }
import org.apache.ivy.core.module.descriptor._
import org.apache.ivy.core.module.id.{ ModuleId, ModuleRevisionId }
import org.apache.ivy.core.report.{ ArtifactDownloadReport, DownloadReport, DownloadStatus, MetadataArtifactDownloadReport }
import org.apache.ivy.core.resolve.{ DownloadOptions, ResolveData, ResolvedModuleRevision }
import org.apache.ivy.core.settings.IvySettings
import org.apache.ivy.plugins.matcher.ExactPatternMatcher
import org.apache.ivy.plugins.parser.m2.{ PomModuleDescriptorBuilder, ReplaceMavenConfigurationMappings }
import org.apache.ivy.plugins.parser.xml.XmlModuleDescriptorWriter
import org.apache.ivy.plugins.resolver.AbstractResolver
import org.apache.ivy.plugins.resolver.util.ResolvedResource
import org.apache.ivy.util.Message
import org.eclipse.aether.artifact.{ DefaultArtifact => AetherArtifact }
import org.eclipse.aether.deployment.{ DeployRequest => AetherDeployRequest }
import org.eclipse.aether.installation.{ InstallRequest => AetherInstallRequest }
import org.eclipse.aether.metadata.{ DefaultMetadata, Metadata }
import org.eclipse.aether.resolution.{ ArtifactDescriptorRequest => AetherDescriptorRequest, ArtifactDescriptorResult => AetherDescriptorResult, ArtifactRequest => AetherArtifactRequest, ArtifactResolutionException, MetadataRequest => AetherMetadataRequest }
import org.eclipse.aether.{ RepositorySystem, RepositorySystemSession }
import sbt.ivyint.{ CustomMavenResolver, CustomRemoteMavenResolver }
import sbt.mavenint.MavenRepositoryResolver.JarPackaging
import sbt.{ MavenCache, MavenRepository }
import scala.collection.JavaConverters._
object MavenRepositoryResolver {
val MAVEN_METADATA_XML = "maven-metadata.xml"
val CLASSIFIER_ATTRIBUTE = "e:classifier"
// TODO - This may be duplciated in more than one location. We need to consolidate.
val JarPackagings = Set("eclipse-plugin", "hk2-jar", "orbit", "scala-jar", "jar", "bundle")
object JarPackaging {
def unapply(in: String): Boolean = JarPackagings.contains(in)
}
// Example: 2014 12 18 09 33 56
val LAST_UPDATE_FORMAT = new java.text.SimpleDateFormat("yyyyMMddhhmmss")
def parseTimeString(in: String): Option[Long] =
try Some(LAST_UPDATE_FORMAT.parse(in).getTime)
catch {
case _: java.text.ParseException => None
}
val DEFAULT_ARTIFACT_CONFIGURATION = "master"
}
/**
* An abstract repository resolver which has the basic hooks for mapping from Maven (Aether) notions into Ivy notions.
*
* THis is used to implement local-cache resolution from ~/.m2 caches or resolving from remote repositories.
*/
abstract class MavenRepositoryResolver(settings: IvySettings) extends AbstractResolver {
/** Our instance of the aether repository system. */
protected val system: RepositorySystem
/**
* Our instance of the aether repository system session.
*
* TODO - We may want to tie this into an IvyContext.
*/
protected val session: RepositorySystemSession
/** Determine the publication time of a module. The mechanism may differ if the repository is remote vs. local. */
protected def getPublicationTime(mrid: ModuleRevisionId): Option[Long]
/** Inject necessary repositories into a descriptor request. */
protected def addRepositories(request: AetherDescriptorRequest): AetherDescriptorRequest
protected def addRepositories(request: AetherArtifactRequest): AetherArtifactRequest
/** Actually publishes aether artifacts. */
protected def publishArtifacts(artifacts: Seq[AetherArtifact]): Unit
// TOOD - deal with packaging here.
private def aetherArtifactIdFromMrid(mrid: ModuleRevisionId): String =
getSbtVersion(mrid) match {
case Some(sbt) => s"${mrid.getName}_sbt_$sbt"
case None => mrid.getName
}
private def aetherCoordsFromMrid(mrid: ModuleRevisionId): String =
s"${mrid.getOrganisation}:${aetherArtifactIdFromMrid(mrid)}:${mrid.getRevision}"
private def aetherCoordsFromMrid(mrid: ModuleRevisionId, packaging: String): String =
s"${mrid.getOrganisation}:${aetherArtifactIdFromMrid(mrid)}:$packaging:${mrid.getRevision}"
private def aetherCoordsFromMrid(mrid: ModuleRevisionId, packaging: String, extension: String): String =
s"${mrid.getOrganisation}:${aetherArtifactIdFromMrid(mrid)}:$extension:$packaging:${mrid.getRevision}"
// Handles appending licenses to the module descriptor fromthe pom.
private def addLicenseInfo(md: DefaultModuleDescriptor, map: java.util.Map[String, AnyRef]) = {
val count = map.get(SbtPomExtraProperties.LICENSE_COUNT_KEY) match {
case null => 0
case x: java.lang.Integer => x.intValue
case x: String => x.toInt
case _ => 0
}
for {
i <- 0 until count
name <- Option(map.get(SbtPomExtraProperties.makeLicenseName(i))).map(_.toString)
url <- Option(map.get(SbtPomExtraProperties.makeLicenseUrl(i))).map(_.toString)
} md.addLicense(new License(name, url))
}
// This grabs the dependency for Ivy.
override def getDependency(dd: DependencyDescriptor, rd: ResolveData): ResolvedModuleRevision = {
val context = IvyContext.pushNewCopyContext
try {
// TODO - Check to see if we're asking for latest.* version, and if so, we should run a latest version query
// first and use that result to return the metadata/final module.
Message.debug(s"Requesting conf [${dd.getModuleConfigurations.mkString(",")}] from Aether module ${dd.getDependencyRevisionId} in resolver ${getName}")
val request = new AetherDescriptorRequest()
val coords = aetherCoordsFromMrid(dd.getDependencyRevisionId)
Message.debug(s"Aether about to resolve [$coords]...")
request.setArtifact(new AetherArtifact(coords, getArtifactProperties(dd.getDependencyRevisionId)))
addRepositories(request)
val result = system.readArtifactDescriptor(session, request)
val packaging = getPackagingFromPomProperties(result.getProperties)
Message.debug(s"Aether resolved ${dd.getDependencyId} w/ packaging ${packaging}")
// TODO - better pub date if we have no metadata.
val lastModifiedTime = getPublicationTime(dd.getDependencyRevisionId) getOrElse 0L
// Construct a new Ivy module descriptor
val desc: ModuleDescriptor = {
// TODO - Better detection of snapshot and handling latest.integration/latest.snapshot
val status =
if (dd.getDependencyRevisionId.getRevision.endsWith("-SNAPSHOT")) "integration"
else "release"
val md =
new DefaultModuleDescriptor(dd.getDependencyRevisionId, status, null /* pubDate */ , false)
//DefaultModuleDescriptor.newDefaultInstance(dd.getDependencyRevisionId)
// Here we add the standard configurations
for (config <- PomModuleDescriptorBuilder.MAVEN2_CONFIGURATIONS) {
md.addConfiguration(config)
}
// Here we look into the artifacts specified from the dependency descriptor *and* those that are defaulted,
// and append them to the appropriate configurations.
addArtifactsFromPom(dd, packaging, md, lastModifiedTime)
// Here we add dependencies.
addDependenciesFromAether(result, md)
// Here we use pom.xml Dependency management section to create Ivy dependency mediators.
addManagedDependenciesFromAether(result, md)
// TODO - Add excludes?
// Here we rip out license info.
addLicenseInfo(md, result.getProperties)
md.addExtraInfo(SbtPomExtraProperties.MAVEN_PACKAGING_KEY, packaging)
Message.debug(s"Setting publication date to ${new Date(lastModifiedTime)}")
// TODO - Figure out the differences between these items.
md.setPublicationDate(new Date(lastModifiedTime))
md.setLastModified(lastModifiedTime)
md.setResolvedPublicationDate(new Date(lastModifiedTime))
md.check()
// TODO - do we need the toSystem?
toSystem(md)
}
// Here we need to pretend we downloaded the pom.xml file
val pom = DefaultArtifact.newPomArtifact(dd.getDependencyRevisionId, new java.util.Date(lastModifiedTime))
val madr = new MetadataArtifactDownloadReport(pom)
madr.setSearched(true)
madr.setDownloadStatus(DownloadStatus.SUCCESSFUL) // TODO - Figure this things out for this report.
val rmr = new ResolvedModuleRevision(this, this, desc, madr, false /* Force */ )
// TODO - Here we cache the transformed pom.xml into an ivy.xml in the cache because ChainResolver will be looking at it.
// This doesn't appear to really work correctly.
// However, I think the chain resolver doesn't use this instance anyway. Ideally we don't put anything
// in the ivy cache, but this should be "ok".
getRepositoryCacheManager.originalToCachedModuleDescriptor(this,
null /* ivyRef. Just passed back to us. */ ,
pom,
rmr,
new ModuleDescriptorWriter() {
def write(originalMdResource: ResolvedResource, md: ModuleDescriptor, src: File, dest: File): Unit = {
// a basic ivy file is written containing default data
XmlModuleDescriptorWriter.write(md, dest);
}
}
)
rmr
} catch {
case e: org.eclipse.aether.resolution.ArtifactDescriptorException =>
Message.info(s"Failed to read descriptor ${dd} from ${getName}, ${e.getMessage}")
rd.getCurrentResolvedModuleRevision
case e: MavenResolutionException =>
Message.debug(s"Resolution Exception from ${getName}, ${e.getMessage}, returning: ${rd.getCurrentResolvedModuleRevision}")
rd.getCurrentResolvedModuleRevision
} finally IvyContext.popContext()
}
def getSbtVersion(dd: ModuleRevisionId): Option[String] =
Option(dd.getExtraAttribute(PomExtraDependencyAttributes.SbtVersionKey))
def getArtifactProperties(dd: ModuleRevisionId): java.util.Map[String, String] = {
val m = new java.util.HashMap[String, String]
Option(dd.getExtraAttribute(PomExtraDependencyAttributes.ScalaVersionKey)) foreach { sv =>
m.put(SbtPomExtraProperties.POM_SCALA_VERSION, sv)
}
getSbtVersion(dd) foreach { sv =>
m.put(SbtPomExtraProperties.POM_SBT_VERSION, sv)
}
m
}
final def checkJarArtifactExists(dd: DependencyDescriptor): Boolean = {
// TODO - We really want this to be as fast/efficient as possible!
val request = new AetherArtifactRequest()
val art = new AetherArtifact(
aetherCoordsFromMrid(dd.getDependencyRevisionId, "jar"),
getArtifactProperties(dd.getDependencyRevisionId))
request.setArtifact(art)
addRepositories(request)
try {
val result = system.resolveArtifact(session, request)
result.isResolved && !result.isMissing
} catch {
case e: ArtifactResolutionException =>
// Ignore, as we're just working around issues with pom.xml's with no jars or POM packaging
Message.debug(s"Could not find $art in ${getName}")
false
}
}
/** Determines which artifacts are associated with this maven module and appends them to the descriptor. */
def addArtifactsFromPom(dd: DependencyDescriptor, packaging: String, md: DefaultModuleDescriptor, lastModifiedTime: Long): Unit = {
Message.debug(s"Calculating artifacts for ${dd.getDependencyId} w/ packaging $packaging")
// Here we add in additional artifact requests, which ALLWAYS have to be explicit since
// Maven/Aether doesn't include all known artifacts in a pom.xml
// TODO - This does not appear to be working correctly.
if (dd.getAllDependencyArtifacts.isEmpty) {
val artifactId = s"${dd.getDependencyId.getName}-${dd.getDependencyRevisionId.getRevision}"
// Add the artifacts we know about the module
packaging match {
case "pom" =>
// THere we have to attempt to download the JAR and see if it comes, if not, we can punt.
// This is because sometimes pom-packaging attaches a JAR.
if (checkJarArtifactExists(dd)) {
val defaultArt =
new DefaultArtifact(md.getModuleRevisionId, new Date(lastModifiedTime), artifactId, packaging, "jar")
md.addArtifact(MavenRepositoryResolver.DEFAULT_ARTIFACT_CONFIGURATION, defaultArt)
}
case JarPackaging() =>
// Here we fail the resolution. This is an issue when pom.xml files exist with no JAR, which happens
// on maven central for some reason on old artifacts.
if (!checkJarArtifactExists(dd))
throw new MavenResolutionException(s"Failed to find JAR file associated with $dd")
// Assume for now everything else is a jar.
val defaultArt =
new DefaultArtifact(md.getModuleRevisionId, new Date(lastModifiedTime), artifactId, packaging, "jar")
// TODO - Unfortunately we have to try to download the JAR file HERE and then fail resolution if we cannot find it.
// This is because sometime a pom.xml exists with no JARs.
md.addArtifact(MavenRepositoryResolver.DEFAULT_ARTIFACT_CONFIGURATION, defaultArt)
case _ => // Ignore, we have no idea what this artifact is.
Message.warn(s"Not adding artifacts for resolution because we don't understand packaging: $packaging")
}
} else {
// NOTE: this means that someone is requested specific artifacts from us. What we need to do is *only* download the
// requested artifacts rather than the default "jar". What's odd, is that pretty much this almost ALWAYS happens.
// but in some circumstances, the above logic is checked.
// Additionally, we may want to somehow merge the "defined" artifacts from maven with the requested ones here, rather
// than having completely separate logic. For now, this appears to work the same way it was before.
// Since we aren't accurately guessing what maven files are meant to be included as artifacts ANYWAY, this
// is probably the right way to go.
for (requestedArt <- dd.getAllDependencyArtifacts) {
getClassifier(requestedArt) match {
case None =>
// This is the default artifact. We do need to add this, and to the default configuration.
val defaultArt =
new DefaultArtifact(md.getModuleRevisionId, new Date(lastModifiedTime), requestedArt.getName, requestedArt.getType, requestedArt.getExt)
md.addArtifact(MavenRepositoryResolver.DEFAULT_ARTIFACT_CONFIGURATION, defaultArt)
case Some(scope) =>
Message.debug(s"Adding additional artifact in $scope, $requestedArt")
// TODO - more Extra attributes?
val mda =
new MDArtifact(
md,
requestedArt.getName,
requestedArt.getType,
requestedArt.getExt,
requestedArt.getUrl,
requestedArt.getExtraAttributes)
md.addArtifact(getConfiguration(scope), mda)
}
}
}
}
/** Adds the dependency mediators required based on the managed dependency instances from this pom. */
def addManagedDependenciesFromAether(result: AetherDescriptorResult, md: DefaultModuleDescriptor) {
for (d <- result.getManagedDependencies.asScala) {
md.addDependencyDescriptorMediator(
ModuleId.newInstance(d.getArtifact.getGroupId, d.getArtifact.getArtifactId),
ExactPatternMatcher.INSTANCE,
new OverrideDependencyDescriptorMediator(null, d.getArtifact.getVersion) {
override def mediate(dd: DependencyDescriptor): DependencyDescriptor = {
super.mediate(dd)
}
})
}
}
/** Adds the list of dependencies this artifact has on other artifacts. */
def addDependenciesFromAether(result: AetherDescriptorResult, md: DefaultModuleDescriptor) {
// First we construct a map of any extra attributes we must append to dependencies.
// This is necessary for transitive maven-based sbt plugin dependencies, where we need to
// attach the sbtVersion/scalaVersion to the dependency id otherwise we'll fail to resolve the
// dependency correctly.
val extraAttributes = PomExtraDependencyAttributes.readFromAether(result.getProperties)
for (d <- result.getDependencies.asScala) {
// TODO - Is this correct for changing detection. We should use the Ivy mechanism configured...
val isChanging = d.getArtifact.getVersion.endsWith("-SNAPSHOT")
val drid = {
val tmp = ModuleRevisionId.newInstance(d.getArtifact.getGroupId, d.getArtifact.getArtifactId, d.getArtifact.getVersion)
extraAttributes get tmp match {
case Some(props) =>
Message.debug(s"Found $tmp w/ extra attributes ${props.mkString(",")}")
ModuleRevisionId.newInstance(
d.getArtifact.getGroupId,
d.getArtifact.getArtifactId,
d.getArtifact.getVersion,
props.asJava
)
case _ => tmp
}
}
// Note: The previous maven integration ALWAYS set force to true for dependnecies. If we do not do this, for some
// reason, Ivy will create dummy nodes when doing dependnecy mediation (e.g. dependencyManagement of one pom overrides version of a dependency)
// which was leading to "data not found" exceptions as Ivy would pick the correct IvyNode in the dependency tree but never load it with data....
val dd = new DefaultDependencyDescriptor(md, drid, /* force */ true, isChanging, true) {}
// TODO - Configuration mappings (are we grabbing scope correctly, or should the default not always be compile?)
val scope = Option(d.getScope).filterNot(_.isEmpty).getOrElse("compile")
val mapping = ReplaceMavenConfigurationMappings.addMappings(dd, scope, d.isOptional)
// TODO - include rules and exclude rules.
Message.debug(s"Adding maven transitive dependency ${md.getModuleRevisionId} -> ${dd}")
// TODO - Unify this borrowed Java code into something a bit friendlier.
// Now we add the artifact....
if ((d.getArtifact.getClassifier != null) || ((d.getArtifact.getExtension != null) && !("jar" == d.getArtifact.getExtension))) {
val tpe: String =
if (d.getArtifact.getExtension != null) d.getArtifact.getExtension
else "jar"
val ext: String = tpe match {
case "test-jar" => "jar"
case JarPackaging() => "jar"
case other => other
}
// Here we add the classifier, hopefully correctly...
val extraAtt = new java.util.HashMap[String, AnyRef]()
if (d.getArtifact.getClassifier != null) {
extraAtt.put("m:classifier", d.getArtifact.getClassifier)
}
val depArtifact: DefaultDependencyArtifactDescriptor =
new DefaultDependencyArtifactDescriptor(dd, dd.getDependencyId.getName, tpe, ext, null, extraAtt)
val optionalizedScope: String = if (d.isOptional) "optional" else scope
// TOOD - We may need to fix the configuration mappings here.
dd.addDependencyArtifact(optionalizedScope, depArtifact)
}
md.addDependency(dd)
}
}
// This method appears to be deprecated/unused in all of Ivy so we do not implement it.
override def findIvyFileRef(dd: DependencyDescriptor, rd: ResolveData): ResolvedResource = {
Message.error(s"Looking for ivy file ref, method not implemented! MavenRepositoryResolver($getName) will always return null.")
null
}
private def getPackagingFromPomProperties(props: java.util.Map[String, AnyRef]): String =
if (props.containsKey(SbtPomExtraProperties.MAVEN_PACKAGING_KEY))
props.get(SbtPomExtraProperties.MAVEN_PACKAGING_KEY).toString
else "jar"
override def download(artifacts: Array[Artifact], dopts: DownloadOptions): DownloadReport = {
// TODO - Status reports on download and possibly parallel downloads
val report = new DownloadReport
val requests =
for (a <- artifacts) yield {
val request = new AetherArtifactRequest
val aetherArt =
getClassifier(a) match {
case None | Some("") =>
new AetherArtifact(
aetherCoordsFromMrid(a.getModuleRevisionId),
getArtifactProperties(a.getModuleRevisionId))
case Some(other) => new AetherArtifact(
aetherCoordsFromMrid(a.getModuleRevisionId, other, a.getExt),
getArtifactProperties(a.getModuleRevisionId))
}
Message.debug(s"Requesting download of [$aetherArt]")
request.setArtifact(aetherArt)
addRepositories(request)
request
}
val (aetherResults, failed) =
try {
(system.resolveArtifacts(session, requests.toList.asJava).asScala, false)
} catch {
case e: org.eclipse.aether.resolution.ArtifactResolutionException =>
Message.error(s"Failed to resolve artifacts from ${getName}, ${e.getMessage}")
(e.getResults.asScala, true)
}
for ((result, art) <- aetherResults zip artifacts) {
Message.debug(s"Aether resolved artifact result: $result")
val adr = new ArtifactDownloadReport(art)
adr.setDownloadDetails(result.toString)
// TODO - Fill this out with a real estimate on time...
adr.setDownloadTimeMillis(0L)
// TODO - what is artifact origin actuallyused for?
adr.setArtifactOrigin(new ArtifactOrigin(
art,
true,
getName))
if (result.isMissing) {
adr.setDownloadStatus(DownloadStatus.FAILED)
adr.setDownloadDetails(ArtifactDownloadReport.MISSING_ARTIFACT)
} else if (!result.isResolved) {
adr.setDownloadStatus(DownloadStatus.FAILED)
adr.setDownloadDetails(result.toString)
// TODO - we should set download status to NO in the event we don't care about an artifact...
} else {
val file = result.getArtifact.getFile
Message.debug(s"Succesffully downloaded: $file")
adr.setLocalFile(file)
adr.setSize(file.length)
adr.setDownloadStatus(DownloadStatus.SUCCESSFUL)
}
report.addArtifactReport(adr)
}
report
}
case class PublishTransaction(module: ModuleRevisionId, artifacts: Seq[(Artifact, File)])
private var currentTransaction: Option[PublishTransaction] = None
override def beginPublishTransaction(module: ModuleRevisionId, overwrite: Boolean): Unit = {
currentTransaction match {
case Some(t) => throw new IllegalStateException(s"Publish Transaction already open for [$getName]")
case None => currentTransaction = Some(PublishTransaction(module, Nil))
}
}
override def abortPublishTransaction(): Unit = {
currentTransaction = None
}
def getClassifier(art: Artifact): Option[String] =
// TODO - Do we need to look anywere else?
Option(art.getExtraAttribute("classifier"))
def getClassifier(art: org.apache.ivy.core.module.descriptor.DependencyArtifactDescriptor): Option[String] =
art.getType match {
case "doc" | "javadoc" => Some("javadoc")
case "src" | "source" => Some("sources")
case "test-jar" | "test" => Some("tests")
case _ =>
// Look for extra attributes
art.getExtraAttribute(MavenRepositoryResolver.CLASSIFIER_ATTRIBUTE) match {
case null => None
case c => Some(c)
}
}
def getConfiguration(classifier: String): String =
classifier match {
// TODO - choice of configuration actually depends on whether or not the artifact is
// REQUESTED by the user, in which case it should be on master.
// Currently, we don't actually look for sources/javadoc/test artifacts at all,
// which means any artifact is in the master configuration, but we should
// fix this for better integration into the maven ecosystem from ivy.
//case "sources" => "sources"
//case "javadoc" => "javadoc"
case other => MavenRepositoryResolver.DEFAULT_ARTIFACT_CONFIGURATION
}
override def commitPublishTransaction(): Unit = {
// TODO - actually send all artifacts to aether
currentTransaction match {
case Some(t) =>
Message.debug(s"Publishing module ${t.module}, with artifact count = ${t.artifacts.size}")
val artifacts =
for ((art, file) <- t.artifacts) yield {
Message.debug(s" - Publishing $art (${art.getType})(${art.getExtraAttribute("classifier")}) in [${art.getConfigurations.mkString(",")}] from $file")
new AetherArtifact(
t.module.getOrganisation,
aetherArtifactIdFromMrid(t.module),
getClassifier(art).orNull,
art.getExt,
t.module.getRevision,
getArtifactProperties(t.module),
file
)
}
publishArtifacts(artifacts)
// TODO - Any kind of validity checking?
currentTransaction = None
case None => throw new IllegalStateException(s"Publish Transaction already open for [$getName]")
}
}
override def publish(art: Artifact, file: File, overwrite: Boolean): Unit = {
currentTransaction match {
case Some(t) =>
val allArts = t.artifacts ++ List(art -> file)
currentTransaction = Some(t.copy(artifacts = allArts))
case None =>
throw new IllegalStateException(("MavenRepositories require transactional publish"))
}
}
override def equals(a: Any): Boolean =
a match {
case x: MavenRepositoryResolver => x.getName == getName
case _ => false
}
override def hashCode: Int = getName.hashCode
}

View File

@ -0,0 +1,62 @@
package sbt.mavenint
import java.io.File
import org.apache.ivy.plugins.repository.Resource
import org.apache.ivy.plugins.repository.url.URLResource
import org.apache.ivy.util.Message
import org.apache.ivy.util.url.URLHandlerRegistry
import org.apache.maven.repository.internal.{ MavenRepositorySystemUtils, SbtArtifactDescriptorReader, SnapshotMetadataGeneratorFactory, VersionsMetadataGeneratorFactory }
import org.eclipse.aether.{ RepositorySystem, RepositorySystemSession }
import org.eclipse.aether.connector.basic.BasicRepositoryConnectorFactory
import org.eclipse.aether.impl.{ ArtifactDescriptorReader, DefaultServiceLocator, MetadataGeneratorFactory }
import org.eclipse.aether.repository.{ LocalRepository, RemoteRepository }
import org.eclipse.aether.spi.connector.RepositoryConnectorFactory
import org.eclipse.aether.spi.connector.layout.RepositoryLayoutFactory
import org.eclipse.aether.spi.connector.transport.{ TransporterFactory, _ }
/** Helper methods for dealing with starting up Aether. */
object MavenRepositorySystemFactory {
def newRepositorySystemImpl: RepositorySystem = {
// For now we just log Aether instantiation issues. These should probably cause fatal errors.
val locator = MavenRepositorySystemUtils.newServiceLocator()
locator.setErrorHandler(new DefaultServiceLocator.ErrorHandler {
override def serviceCreationFailed(tpe: Class[_], impl: Class[_], exception: Throwable): Unit = {
Message.error(s"Failed to create $tpe, of class $impl")
}
})
// Here we register the Ivy <-> Aether transport bridge
locator.addService(classOf[TransporterFactory], classOf[MyTransportFactory])
// This connects the download mechanism to our transports. Why is it needed? no clue.
locator.addService(classOf[RepositoryConnectorFactory], classOf[BasicRepositoryConnectorFactory])
// Plugins cause issues here, as their layout is super odd. Here we inject a new plugin layout
locator.addService(classOf[RepositoryLayoutFactory], classOf[SbtPluginLayoutFactory])
// Here we add the metadata services so aether will automatically add maven-metadata.xml files.
locator.addService(classOf[MetadataGeneratorFactory], classOf[SnapshotMetadataGeneratorFactory])
locator.addService(classOf[MetadataGeneratorFactory], classOf[VersionsMetadataGeneratorFactory])
// Add our hook for parsing pom.xml files.
locator.setService(classOf[ArtifactDescriptorReader], classOf[SbtArtifactDescriptorReader])
// Finally, use the DI to create our repository system.
locator.getService(classOf[RepositorySystem])
}
def newSessionImpl(system: RepositorySystem, localRepoDir: File): RepositorySystemSession = {
val session = MavenRepositorySystemUtils.newSession()
val localRepo = new LocalRepository(localRepoDir)
session setLocalRepositoryManager (system.newLocalRepositoryManager(session, localRepo))
// Here we set a descriptor policy that FORCES the pom.xml to exist, otherwise Ivy's resolution
// algorithm freaks out. What we could do is also do the ivy lame-thing of checking for a JAR
// instead of a pom.xml, but let's see if this is actually a problem in practice.
val descriptorPolicy = new org.eclipse.aether.util.repository.SimpleArtifactDescriptorPolicy(
/* ignoreMissing */ false, /* ignoreInvalid. */ true)
session.setArtifactDescriptorPolicy(descriptorPolicy)
session
}
def defaultLocalRepo: java.io.File = {
new java.io.File(s"${sys.props("user.home")}/.m2/repository")
}
}

View File

@ -0,0 +1,4 @@
package sbt.mavenint
/** An exception we can throw if we encounter issues. */
class MavenResolutionException(msg: String) extends RuntimeException(msg) {}

View File

@ -0,0 +1,16 @@
package sbt.mavenint
import org.eclipse.aether.RepositorySystemSession
import org.eclipse.aether.repository.RemoteRepository
import org.eclipse.aether.spi.connector.transport.{ Transporter, TransporterFactory }
/** Override aether's default transport with Ivy-ones. */
class MyTransportFactory extends TransporterFactory {
override def newInstance(session: RepositorySystemSession, repository: RemoteRepository): Transporter =
repository.getProtocol match {
case "http" | "https" => new HttpTransport(repository)
case "file" => new FileTransport(repository)
case other => throw new IllegalArgumentException(s"Unsupported transport protocol: $other")
}
override def getPriority: Float = 1.0f
}

View File

@ -0,0 +1,97 @@
package sbt.mavenint
import java.net.URI
import org.eclipse.aether.RepositorySystemSession
import org.eclipse.aether.artifact.Artifact
import org.eclipse.aether.metadata.Metadata
import org.eclipse.aether.repository.RemoteRepository
import org.eclipse.aether.spi.connector.layout.RepositoryLayout.Checksum
import org.eclipse.aether.spi.connector.layout.{ RepositoryLayout, RepositoryLayoutFactory }
import org.eclipse.aether.transfer.NoRepositoryLayoutException
import scala.util.matching.Regex
/** A factory which knows how to create repository layouts which can find sbt plugins. */
class SbtPluginLayoutFactory extends RepositoryLayoutFactory {
def newInstance(session: RepositorySystemSession, repository: RemoteRepository): RepositoryLayout = {
repository.getContentType match {
case SbtRepositoryLayout.LAYOUT_NAME =>
SbtRepositoryLayout
case _ => throw new NoRepositoryLayoutException(repository, "Not an sbt-plugin repository")
}
}
def getPriority: Float = 100.0f
}
object SbtRepositoryLayout extends RepositoryLayout {
val LAYOUT_NAME = "sbt-plugin"
// get location is ALMOST the same for Metadata + artifact... but subtle differences are important.
def getLocation(artifact: Artifact, upload: Boolean): URI = {
val sbtVersion = Option(artifact.getProperties.get(SbtPomExtraProperties.POM_SBT_VERSION))
val scalaVersion = Option(artifact.getProperties.get(SbtPomExtraProperties.POM_SCALA_VERSION))
val path = new StringBuilder(128)
path.append(artifact.getGroupId.replace('.', '/')).append('/')
(sbtVersion zip scalaVersion).headOption match {
case Some((sbt, scala)) =>
if (artifact.getArtifactId contains "_sbt_") {
val SbtNameVersionSplit(name, sbt2) = artifact.getArtifactId
path.append(name).append('_').append(scala).append('_').append(sbt).append('/')
} else path.append(artifact.getArtifactId).append('_').append(scala).append('_').append(sbt).append('/')
case None =>
// TODO - Should we automatically append the _<scala-verison> here if it's not there? Probably not for now.
path.append(artifact.getArtifactId).append('/')
}
path.append(artifact.getBaseVersion).append('/')
sbtVersion match {
case Some(_) if artifact.getArtifactId contains "_sbt_" =>
val SbtNameVersionSplit(name, sbt2) = artifact.getArtifactId
path.append(name).append('-').append(artifact.getVersion)
case None => path.append(artifact.getArtifactId).append('-').append(artifact.getVersion)
}
if (artifact.getClassifier != null && !artifact.getClassifier.trim.isEmpty) {
path.append("-").append(artifact.getClassifier)
}
if (artifact.getExtension.length > 0) {
path.append('.').append(artifact.getExtension)
}
URI.create(path.toString())
}
// Trickery for disambiguating sbt plugins in maven repositories.
val SbtNameVersionSplit = new Regex("(.*)_sbt_(.*)")
def getLocation(metadata: Metadata, upload: Boolean): URI = {
val sbtVersion = Option(metadata.getProperties.get(SbtPomExtraProperties.POM_SBT_VERSION))
val scalaVersion = Option(metadata.getProperties.get(SbtPomExtraProperties.POM_SCALA_VERSION))
val path = new StringBuilder(128)
path.append(metadata.getGroupId.replace('.', '/')).append('/')
(sbtVersion zip scalaVersion).headOption match {
case Some((sbt, scala)) =>
if (metadata.getArtifactId contains "_sbt_") {
val SbtNameVersionSplit(name, sbt2) = metadata.getArtifactId
path.append(name).append('_').append(scala).append('_').append(sbt).append('/')
} else path.append(metadata.getArtifactId).append('_').append(scala).append('_').append(sbt).append('/')
case None =>
// TODO - Should we automatically append the _<scala-verison> here? Proabbly not for now.
path.append(metadata.getArtifactId).append('/')
}
if (metadata.getVersion.length > 0)
path.append(metadata.getVersion).append('/')
path.append(metadata.getType)
URI.create(path.toString)
}
// TODO - This should be the same as configured from Ivy...
def getChecksums(artifact: Artifact, upload: Boolean, location: URI): java.util.List[Checksum] =
getChecksums(location)
def getChecksums(metadata: Metadata, upload: Boolean, location: URI): java.util.List[Checksum] =
getChecksums(location)
private def getChecksums(location: URI): java.util.List[Checksum] =
java.util.Arrays.asList(Checksum.forLocation(location, "SHA-1"), Checksum.forLocation(location, "MD5"))
}

View File

@ -0,0 +1,13 @@
package sbt
package plugins
import Keys._
object MavenResolverPlugin extends AutoPlugin {
override def requires = IvyPlugin
override def trigger = allRequirements
override lazy val projectSettings: Seq[Setting[_]] = Seq(
updateOptions := updateOptions.value.withResolverConverter(MavenResolverConverter.converter)
)
}

View File

@ -0,0 +1,291 @@
package sbt
import java.io.FileInputStream
import org.specs2._
import sbt.mavenint.PomExtraDependencyAttributes
class MavenResolutionSpec extends BaseIvySpecification {
def is = args(sequential = true) ^ s2""".stripMargin
This is a specification to check the maven resolution
Resolving a maven dependency should
handle sbt plugins $resolveSbtPlugins
use ivy for conflict resolution $resolveMajorConflicts
handle cross configuration deps $resolveCrossConfigurations
publish with maven-metadata $publishMavenMetadata
resolve transitive maven dependencies $resolveTransitiveMavenDependency
resolve intransitive maven dependencies $resolveIntransitiveMavenDependency
handle transitive configuration shifts $resolveTransitiveConfigurationMavenDependency
resolve source and doc $resolveSourceAndJavadoc
resolve nonstandard (jdk5) classifier $resolveNonstandardClassifier
Resolve pom artifact dependencies $resolvePomArtifactAndDependencies
Fail if JAR artifact is not found w/ POM $failIfMainArtifactMissing
Fail if POM.xml is not found $failIfPomMissing
resolve publication date for -SNAPSHOT $resolveSnapshotPubDate
""" // */
// TODO - test latest.integration and .+
def akkaActor = ModuleID("com.typesafe.akka", "akka-actor_2.11", "2.3.8", Some("compile"))
def akkaActorTestkit = ModuleID("com.typesafe.akka", "akka-testkit_2.11", "2.3.8", Some("test"))
def testngJdk5 = ModuleID("org.testng", "testng", "5.7", Some("compile")).classifier("jdk15")
def jmxri = ModuleID("com.sun.jmx", "jmxri", "1.2.1", Some("compile"))
def scalaLibraryAll = ModuleID("org.scala-lang", "scala-library-all", "2.11.4", Some("compile"))
def scalaCompiler = ModuleID("org.scala-lang", "scala-compiler", "2.8.1", Some("scala-tool->default(compile)"))
def scalaContinuationPlugin = ModuleID("org.scala-lang.plugins", "continuations", "2.8.1", Some("plugin->default(compile)"))
def sbtPlugin =
ModuleID("com.github.mpeltonen", "sbt-idea", "1.6.0", Some("compile")).
extra(PomExtraDependencyAttributes.SbtVersionKey -> "0.13", PomExtraDependencyAttributes.ScalaVersionKey -> "2.10").
copy(crossVersion = CrossVersion.Disabled)
def oldSbtPlugin =
ModuleID("com.github.mpeltonen", "sbt-idea", "1.6.0", Some("compile")).
extra(PomExtraDependencyAttributes.SbtVersionKey -> "0.12", PomExtraDependencyAttributes.ScalaVersionKey -> "2.9.2").
copy(crossVersion = CrossVersion.Disabled)
def majorConflictLib = ModuleID("com.joestelmach", "natty", "0.3", Some("compile"))
// TODO - This snapshot and resolver should be something we own/control so it doesn't disappear on us.
def testSnapshot = ModuleID("com.typesafe", "config", "0.4.9-SNAPSHOT", Some("compile"))
val SnapshotResolver = MavenRepository("some-snapshots", "https://oss.sonatype.org/content/repositories/snapshots/")
override def resolvers = Seq(DefaultMavenRepository, SnapshotResolver, Resolver.publishMavenLocal)
import Configurations.{ Compile, Test, Runtime, CompilerPlugin, ScalaTool }
override def configurations = Seq(Compile, Test, Runtime, CompilerPlugin, ScalaTool)
import ShowLines._
def defaultUpdateOptions = UpdateOptions().withResolverConverter(MavenResolverConverter.converter)
def resolveMajorConflicts = {
val m = module(ModuleID("com.example", "foo", "0.1.0", Some("compile")),
Seq(majorConflictLib), None, defaultUpdateOptions)
val report = ivyUpdate(m) // must not(throwAn[IllegalStateException])
val jars =
for {
conf <- report.configurations
if conf.configuration == Compile.name
m <- conf.modules
if (m.module.name contains "stringtemplate")
(a, f) <- m.artifacts
if a.extension == "jar"
} yield f
jars must haveSize(1)
}
def resolveCrossConfigurations = {
val m = module(ModuleID("com.example", "foo", "0.1.0", Some("compile")),
Seq(scalaCompiler, scalaContinuationPlugin), None, defaultUpdateOptions)
val report = ivyUpdate(m)
val jars =
for {
conf <- report.configurations
if conf.configuration == ScalaTool.name
m <- conf.modules
if (m.module.name contains "scala-compiler")
(a, f) <- m.artifacts
if a.extension == "jar"
} yield f
jars must haveSize(1)
}
def resolveSbtPlugins = {
def sha(f: java.io.File): String = sbt.Hash.toHex(sbt.Hash(f))
def findSbtIdeaJars(dep: ModuleID, name: String) = {
val m = module(ModuleID("com.example", name, "0.1.0", Some("compile")), Seq(dep), None, defaultUpdateOptions)
val report = ivyUpdate(m)
for {
conf <- report.configurations
if conf.configuration == "compile"
m <- conf.modules
if (m.module.name contains "sbt-idea")
(a, f) <- m.artifacts
if a.extension == "jar"
} yield (f, sha(f))
}
val oldJars = findSbtIdeaJars(oldSbtPlugin, "old")
System.err.println(s"${oldJars.mkString("\n")}")
val newJars = findSbtIdeaJars(sbtPlugin, "new")
System.err.println(s"${newJars.mkString("\n")}")
(newJars must haveSize(1)) and (oldJars must haveSize(1)) and (oldJars.map(_._2) must not(containTheSameElementsAs(newJars.map(_._2))))
}
def resolveSnapshotPubDate = {
val m = module(ModuleID("com.example", "foo", "0.1.0", Some("compile")), Seq(testSnapshot), Some("2.10.2"), defaultUpdateOptions.withLatestSnapshots(true))
val report = ivyUpdate(m)
val pubTime =
for {
conf <- report.configurations
if conf.configuration == "compile"
m <- conf.modules
if m.module.revision endsWith "-SNAPSHOT"
date <- m.publicationDate
} yield date
(pubTime must haveSize(1))
}
def resolvePomArtifactAndDependencies = {
val m = module(ModuleID("com.example", "foo", "0.1.0", Some("compile")), Seq(scalaLibraryAll), Some("2.10.2"), defaultUpdateOptions)
val report = ivyUpdate(m)
val jars =
for {
conf <- report.configurations
if conf.configuration == "compile"
m <- conf.modules
if (m.module.name == "scala-library") || (m.module.name contains "parser")
(a, f) <- m.artifacts
if a.extension == "jar"
} yield f
jars must haveSize(2)
}
def failIfPomMissing = {
// TODO - we need the jar to not exist too.
val m = module(ModuleID("com.example", "foo", "0.1.0", Some("compile")), Seq(ModuleID("org.scala-sbt", "does-not-exist", "1.0", Some("compile"))), Some("2.10.2"), defaultUpdateOptions)
ivyUpdate(m) must throwAn[Exception]
}
def failIfMainArtifactMissing = {
val m = module(ModuleID("com.example", "foo", "0.1.0", Some("compile")), Seq(jmxri), Some("2.10.2"), defaultUpdateOptions)
ivyUpdate(m) must throwAn[Exception]
}
def resolveNonstandardClassifier = {
val m = module(ModuleID("com.example", "foo", "0.1.0", Some("compile")), Seq(testngJdk5), Some("2.10.2"), defaultUpdateOptions)
val report = ivyUpdate(m)
val jars =
for {
conf <- report.configurations
if conf.configuration == "compile"
m <- conf.modules
if m.module.name == "testng"
(a, f) <- m.artifacts
if a.extension == "jar"
} yield f
(report.configurations must haveSize(configurations.size)) and
(jars must haveSize(1))
(jars.forall(_.exists) must beTrue)
}
def resolveTransitiveMavenDependency = {
val m = module(ModuleID("com.example", "foo", "0.1.0", Some("compile")), Seq(akkaActor), Some("2.10.2"), defaultUpdateOptions)
val report = ivyUpdate(m)
val jars =
for {
conf <- report.configurations
if conf.configuration == "compile"
m <- conf.modules
if m.module.name == "scala-library"
(a, f) <- m.artifacts
if a.extension == "jar"
} yield f
(report.configurations must haveSize(configurations.size)) and
(jars must not(beEmpty)) and
(jars.forall(_.exists) must beTrue)
}
def resolveIntransitiveMavenDependency = {
val m = module(ModuleID("com.example", "foo", "0.1.0", Some("compile")), Seq(akkaActorTestkit.intransitive()), Some("2.10.2"), defaultUpdateOptions)
val report = ivyUpdate(m)
val transitiveJars =
for {
conf <- report.configurations
if conf.configuration == "compile"
m <- conf.modules
if (m.module.name contains "akka-actor") && !(m.module.name contains "testkit")
(a, f) <- m.artifacts
if a.extension == "jar"
} yield f
val directJars =
for {
conf <- report.configurations
if conf.configuration == "compile"
m <- conf.modules
if (m.module.name contains "akka-actor") && (m.module.name contains "testkit")
(a, f) <- m.artifacts
if a.extension == "jar"
} yield f
(report.configurations must haveSize(configurations.size)) and
(transitiveJars must beEmpty) and (directJars.forall(_.exists) must beTrue)
}
def resolveTransitiveConfigurationMavenDependency = {
val m = module(ModuleID("com.example", "foo", "0.1.0", Some("compile")), Seq(akkaActorTestkit), Some("2.10.2"), defaultUpdateOptions)
val report = ivyUpdate(m)
val jars =
for {
conf <- report.configurations
if conf.configuration == "test"
m <- conf.modules
if m.module.name contains "akka-actor"
(a, f) <- m.artifacts
if a.extension == "jar"
} yield f
(report.configurations must haveSize(configurations.size)) and
(jars must not(beEmpty)) and
(jars.forall(_.exists) must beTrue)
}
def resolveSourceAndJavadoc = {
val m = module(
ModuleID("com.example", "foo", "0.1.0", Some("sources")),
Seq(akkaActor.artifacts(Artifact(akkaActor.name, "javadoc"), Artifact(akkaActor.name, "sources"))),
Some("2.10.2"),
defaultUpdateOptions
)
val report = ivyUpdate(m)
val jars =
for {
conf <- report.configurations
// We actually injected javadoc/sources into the compile scope, due to how we did the request.
// SO, we report that here.
if conf.configuration == "compile"
m <- conf.modules
(a, f) <- m.artifacts
if (f.getName contains "sources") || (f.getName contains "javadoc")
} yield f
(report.configurations must haveSize(configurations.size)) and
(jars must haveSize(2))
}
def publishMavenMetadata = {
val m = module(
ModuleID("com.example", "test-it", "1.0-SNAPSHOT", Some("compile")),
Seq(),
None,
defaultUpdateOptions.withLatestSnapshots(true)
)
sbt.IO.withTemporaryDirectory { dir =>
val pomFile = new java.io.File(dir, "pom.xml")
sbt.IO.write(pomFile,
"""
|<project>
| <groupId>com.example</groupId>
| <name>test-it</name>
| <version>1.0-SNAPSHOT</version>
|</project>
""".stripMargin)
val jarFile = new java.io.File(dir, "test-it-1.0-SNAPSHOT.jar")
sbt.IO.touch(jarFile)
System.err.println(s"DEBUGME - Publishing $m to ${Resolver.publishMavenLocal}")
ivyPublish(m, mkPublishConfiguration(
Resolver.publishMavenLocal,
Map(
Artifact("test-it-1.0-SNAPSHOT.jar") -> pomFile,
Artifact("test-it-1.0-SNAPSHOT.pom", "pom", "pom") -> jarFile
)))
}
val baseLocalMavenDir: java.io.File = Resolver.publishMavenLocal.rootFile
val allFiles: Seq[java.io.File] = sbt.PathFinder(new java.io.File(baseLocalMavenDir, "com/example/test-it")).***.get
val metadataFiles = allFiles.filter(_.getName contains "maven-metadata-local")
// TODO - maybe we check INSIDE the metadata, or make sure we can get a publication date on resolve...
// We end up with 4 files, two mavne-metadata files, and 2 maven-metadata-local files.
metadataFiles must haveSize(2)
}
}

View File

@ -6,4 +6,6 @@ version := "1.0.0-SNAPSHOT"
publishArtifact in (Test,packageBin) := true
publishTo := Some(Resolver.file("demo", (baseDirectory in ThisBuild).value / "demo-repo"))
publishTo := Some(MavenCache("demo", ((baseDirectory in ThisBuild).value / "demo-repo")))
//Resolver.file("demo", (baseDirectory in ThisBuild).value / "demo-repo"))

View File

@ -1,7 +1,7 @@
libraryDependencies += "org.example" %% "artifacta" % "1.0.0-SNAPSHOT" withSources() classifier("tests")
externalResolvers := Seq(
"demo" at ( (baseDirectory in ThisBuild).value / "demo-repo").toURI.toString,
MavenCache("demo", ((baseDirectory in ThisBuild).value / "demo-repo")),
DefaultMavenRepository
)

View File

@ -0,0 +1,2 @@
libraryDependencies += Defaults.sbtPluginExtra("org.scala-sbt" % "sbt-maven-resolver" % sbtVersion.value,
sbtBinaryVersion.value, scalaBinaryVersion.value)

View File

@ -1,7 +1,7 @@
// the default, but make it explicit
publishMavenStyle := true
publishTo <<= baseDirectory(bd => Some( Resolver.file("test-repo", bd / "repo") ) )
publishTo <<= baseDirectory(bd => Some( MavenRepository("test-repo", (bd / "repo").toURI.toASCIIString )) )
name := "test"

View File

@ -3,6 +3,8 @@ val repoFile = file("mvn-repo")
resolvers += "bad-mvn-repo" at repoFile.toURI.toURL.toString
resolvers += Resolver.typesafeIvyRepo("releases")
libraryDependencies += "bad" % "mvn" % "1.0"
TaskKey[Unit]("check") := {

View File

@ -1,28 +1,40 @@
organization in ThisBuild := "org.example"
def commonSettings: Seq[Def.Setting[_]] =
Seq(
ivyPaths := new IvyPaths( (baseDirectory in ThisBuild).value, Some((baseDirectory in LocalRootProject).value / "ivy-cache")),
dependencyCacheDirectory := (baseDirectory in LocalRootProject).value / "dependency",
scalaVersion := "2.10.4",
organization in ThisBuild := "org.example",
version in ThisBuild := "1.0-SNAPSHOT"
)
version in ThisBuild := "1.0-SNAPSHOT"
lazy val main = project.
settings(commonSettings: _*).
settings(
uniqueName,
libraryDependencies += (projectID in library).value,
fullResolvers := fullResolvers.value.filterNot(_.name == "inter-project"),
// TODO - should this not be needed?
updateOptions := updateOptions.value.withLatestSnapshots(true)
)
lazy val main = project.settings(
uniqueName,
libraryDependencies += (projectID in library).value
)
lazy val library = project.settings(uniqueName)
lazy val library = project.
settings(commonSettings: _*).
settings(
uniqueName
)
def uniqueName =
name := (name.value + "-" + randomSuffix( (baseDirectory in ThisBuild).value))
name := (name.value + "-" + randomSuffix( (baseDirectory in ThisBuild).value))
// better long-term approach to a clean cache/local
// would be to not use the actual ~/.m2/repository
def randomSuffix(base: File) = {
// need to persist it so that it doesn't change across reloads
val persist = base / "suffix"
if(persist.exists)
IO.read(persist)
else {
val s = Hash.halfHashString(System.currentTimeMillis.toString)
IO.write(persist, s)
s
}
// need to persist it so that it doesn't change across reloads
val persist = base / "suffix"
if(persist.exists) IO.read(persist)
else {
val s = Hash.halfHashString(System.currentTimeMillis.toString)
IO.write(persist, s)
s
}
}

View File

@ -0,0 +1,2 @@
libraryDependencies += Defaults.sbtPluginExtra("org.scala-sbt" % "sbt-maven-resolver" % sbtVersion.value,
sbtBinaryVersion.value, scalaBinaryVersion.value)

View File

@ -1,9 +1,12 @@
# this mimics any resolver that comes before m2.
> library/publishLocal
> library/publishM2
# should fail because local Maven repository not added yet
-> main/update
# should suceed because local Ivy repository works
> main/update
# should succeed now that local Maven repository is added
# should succeed when local Maven repository is added
$ copy-file changes/mvnLocal.sbt main/build.sbt
> reload
> main/update
@ -16,6 +19,7 @@ $ copy-file changes/mainB1.scala main/B.scala
-> main/compile
$ copy-file changes/libA.scala library/A.scala
> debug
> library/publishM2
# should succeed even without 'update' because Ivy should use the jar from the origin and not copy it to its cache
> main/compile
@ -24,8 +28,6 @@ $ copy-file changes/libA.scala library/A.scala
> main/update
> main/compile
# update B.scala to depend on a dependency that 'library' doesn't declare yet
$ delete main/B.scala
$ copy-file changes/mainB2.scala main/B.scala

View File

@ -18,10 +18,16 @@ object PomRepoTest extends Build
def pomIncludeRepository(base: File, prev: MavenRepository => Boolean) = (r: MavenRepository) =>
if(base / "repo.none" exists) false else if(base / "repo.all" exists) true else prev(r)
def addSlash(s: String): String =
s match {
case s if s endsWith "/" => s
case _ => s + "/"
}
def checkPomRepositories(file: File, args: Seq[String], s: TaskStreams)
{
val repositories = scala.xml.XML.loadFile(file) \\ "repository"
val extracted = repositories.map { repo => MavenRepository(repo \ "name" text, repo \ "url" text) }
val extracted = repositories.map { repo => MavenRepository(repo \ "name" text, addSlash(repo \ "url" text)) }
val expected = args.map(GlobFilter.apply)
s.log.info("Extracted: " + extracted.mkString("\n\t", "\n\t", "\n"))
s.log.info("Expected: " + args.mkString("\n\t", "\n\t", "\n"))

View File

@ -0,0 +1,22 @@
name := "test-parent-pom"
val localMavenRepo = file("local-repo")
val cleanExampleCache = taskKey[Unit]("Cleans the example cache.")
resolvers +=
MavenRepository("Maven2 Local Test", localMavenRepo.toURI.toString)
libraryDependencies +=
"com.example" % "example-child" % "1.0-SNAPSHOT"
version := "1.0-SNAPSHOT"
cleanExampleCache := {
ivySbt.value.withIvy(streams.value.log) { ivy =>
val cacheDir = ivy.getSettings.getDefaultRepositoryCacheBasedir
// TODO - Is this actually ok?
IO.delete(cacheDir / "com.example")
}
}

View File

@ -0,0 +1,4 @@
#NOTE: This is an internal implementation file, its format can be changed without prior notice.
#Tue Dec 16 09:06:35 EST 2014
example-child-1.0-SNAPSHOT.jar>=
example-child-1.0-SNAPSHOT.pom>=

View File

@ -0,0 +1,16 @@
<?xml version="1.0"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>com.example</groupId>
<artifactId>example-parent</artifactId>
<version>1.0-SNAPSHOT</version>
</parent>
<groupId>com.example</groupId>
<artifactId>example-child</artifactId>
<version>1.0-SNAPSHOT</version>
<packaging>jar</packaging>
</project>

View File

@ -0,0 +1,24 @@
<?xml version="1.0" encoding="UTF-8"?>
<metadata modelVersion="1.1.0">
<groupId>com.example</groupId>
<artifactId>example-child</artifactId>
<version>1.0-SNAPSHOT</version>
<versioning>
<snapshot>
<localCopy>true</localCopy>
</snapshot>
<lastUpdated>20141216140635</lastUpdated>
<snapshotVersions>
<snapshotVersion>
<extension>jar</extension>
<value>1.0-SNAPSHOT</value>
<updated>20141216140635</updated>
</snapshotVersion>
<snapshotVersion>
<extension>pom</extension>
<value>1.0-SNAPSHOT</value>
<updated>20141216140635</updated>
</snapshotVersion>
</snapshotVersions>
</versioning>
</metadata>

View File

@ -0,0 +1,11 @@
<?xml version="1.0" encoding="UTF-8"?>
<metadata>
<groupId>com.example</groupId>
<artifactId>example-child</artifactId>
<versioning>
<versions>
<version>1.0-SNAPSHOT</version>
</versions>
<lastUpdated>20141216140635</lastUpdated>
</versioning>
</metadata>

View File

@ -0,0 +1,3 @@
#NOTE: This is an internal implementation file, its format can be changed without prior notice.
#Tue Dec 16 09:01:47 EST 2014
example-parent-1.0-SNAPSHOT.pom>=

View File

@ -0,0 +1,10 @@
<?xml version="1.0"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.example</groupId>
<artifactId>example-parent</artifactId>
<version>1.0-SNAPSHOT</version>
<packaging>pom</packaging>
</project>

View File

@ -0,0 +1,19 @@
<?xml version="1.0" encoding="UTF-8"?>
<metadata modelVersion="1.1.0">
<groupId>com.example</groupId>
<artifactId>example-parent</artifactId>
<version>1.0-SNAPSHOT</version>
<versioning>
<snapshot>
<localCopy>true</localCopy>
</snapshot>
<lastUpdated>20141216140147</lastUpdated>
<snapshotVersions>
<snapshotVersion>
<extension>pom</extension>
<value>1.0-SNAPSHOT</value>
<updated>20141216140147</updated>
</snapshotVersion>
</snapshotVersions>
</versioning>
</metadata>

View File

@ -0,0 +1,11 @@
<?xml version="1.0" encoding="UTF-8"?>
<metadata>
<groupId>com.example</groupId>
<artifactId>example-parent</artifactId>
<versioning>
<versions>
<version>1.0-SNAPSHOT</version>
</versions>
<lastUpdated>20141216140147</lastUpdated>
</versioning>
</metadata>

View File

@ -0,0 +1,2 @@
> cleanExampleCache
> update

View File

@ -21,6 +21,7 @@ lazy val common = project.
case repo: PatternsBasedRepository => repo.patterns.isMavenCompatible
case _: RawRepository => false // TODO - look deeper
case _: MavenRepository => true
case _: MavenCache => true
case _ => false // TODO - Handle chain repository?
}
case _ => true

View File

@ -0,0 +1,35 @@
# Validate that a bad dependency fails the compile
$ copy-file changes/BadCommon.scala common/src/main/scala/Common.scala
> common/publish
# Force dep resolution to be successful, then compilation to fail
> dependent/update
-> dependent/compile
# Push new good change to a DIFFERENT repository.
$ copy-file changes/GoodCommon.scala common/src/main/scala/Common.scala
# Sleep to ensure timestamp change
$ sleep 1000
> common/publishLocal
# This should compile now, because Ivy should look at each repository for the most up-to-date file.
> dependent/update
> dependent/compile
# Now let's try this on the opposite order: pubishLocal => publish
$ copy-file changes/BadCommon.scala common/src/main/scala/Common.scala
> common/publishLocal
# Force dep resolution to be successful, then compilation to fail
> dependent/update
-> dependent/compile
# Push new good change to a DIFFERENT repository.
$ copy-file changes/GoodCommon.scala common/src/main/scala/Common.scala
# Sleep to ensure timestamp change
$ sleep 1000
> common/publish
# This should compile now gain, because Ivy should look at each repository for the most up-to-date file.
> dependent/update
> dependent/compile

View File

@ -16,7 +16,8 @@ TaskKey[Unit]("check-update") <<= update map { report =>
TaskKey[Unit]("check-classpath") <<= dependencyClasspath in Compile map { cp =>
val jars = cp.files.map(_.getName).toSet
val expected = Set("org.sat4j.pb-2.3.1.jar", "org.sat4j.core-2.3.1.jar")
// Note: pb depends on tests artifact in core for no good reason. Previously this was not correctly added to the classpath.
val expected = Set("org.sat4j.pb-2.3.1.jar", "org.sat4j.core-2.3.1.jar", "org.sat4j.core-2.3.1-tests.jar")
if(jars != expected)
error("Expected jars " + expected + ", got: " + jars)
}

View File

@ -0,0 +1,2 @@
libraryDependencies += Defaults.sbtPluginExtra("org.scala-sbt" % "sbt-maven-resolver" % sbtVersion.value,
sbtBinaryVersion.value, scalaBinaryVersion.value)

View File

@ -13,6 +13,7 @@ import xsbt.test.{ CommentHandler, FileCommands, ScriptRunner, TestScriptParser
import IO.wrapNull
final class ScriptedTests(resourceBaseDirectory: File, bufferLog: Boolean, launcher: File, launchOpts: Seq[String]) {
import ScriptedTests.emptyCallback
private val testResources = new Resources(resourceBaseDirectory)
val ScriptFilename = "test"
@ -20,7 +21,9 @@ final class ScriptedTests(resourceBaseDirectory: File, bufferLog: Boolean, launc
def scriptedTest(group: String, name: String, log: xsbti.Logger): Seq[() => Option[String]] =
scriptedTest(group, name, Logger.xlog2Log(log))
def scriptedTest(group: String, name: String, log: Logger): Seq[() => Option[String]] = {
def scriptedTest(group: String, name: String, log: Logger): Seq[() => Option[String]] =
scriptedTest(group, name, emptyCallback, log)
def scriptedTest(group: String, name: String, prescripted: File => Unit, log: Logger): Seq[() => Option[String]] = {
import Path._
import GlobFilter._
var failed = false
@ -36,14 +39,14 @@ final class ScriptedTests(resourceBaseDirectory: File, bufferLog: Boolean, launc
log.info("D " + str + " [DISABLED]")
None
} else {
try { scriptedTest(str, testDirectory, log); None }
try { scriptedTest(str, testDirectory, prescripted, log); None }
catch { case e: xsbt.test.TestException => Some(str) }
}
}
}
}
}
private def scriptedTest(label: String, testDirectory: File, log: Logger): Unit =
private def scriptedTest(label: String, testDirectory: File, prescripted: File => Unit, log: Logger): Unit =
{
val buffered = new BufferedLogger(new FullLogger(log))
if (bufferLog)
@ -74,6 +77,7 @@ final class ScriptedTests(resourceBaseDirectory: File, bufferLog: Boolean, launc
}
try {
prescripted(testDirectory)
runTest()
buffered.info("+ " + label + pendingString)
} catch {
@ -91,6 +95,8 @@ final class ScriptedTests(resourceBaseDirectory: File, bufferLog: Boolean, launc
}
}
object ScriptedTests {
val emptyCallback: File => Unit = { _ => () }
def main(args: Array[String]) {
val directory = new File(args(0))
val buffer = args(1).toBoolean
@ -100,16 +106,26 @@ object ScriptedTests {
val bootProperties = new File(args(5))
val tests = args.drop(6)
val logger = ConsoleLogger()
run(directory, buffer, tests, logger, bootProperties, Array())
run(directory, buffer, tests, logger, bootProperties, Array(), emptyCallback)
}
def run(resourceBaseDirectory: File, bufferLog: Boolean, tests: Array[String], bootProperties: File, launchOpts: Array[String]): Unit =
run(resourceBaseDirectory, bufferLog, tests, ConsoleLogger(), bootProperties, launchOpts) //new FullLogger(Logger.xlog2Log(log)))
def run(resourceBaseDirectory: File, bufferLog: Boolean, tests: Array[String], bootProperties: File,
launchOpts: Array[String]): Unit =
run(resourceBaseDirectory, bufferLog, tests, ConsoleLogger(), bootProperties, launchOpts, emptyCallback) //new FullLogger(Logger.xlog2Log(log)))
def run(resourceBaseDirectory: File, bufferLog: Boolean, tests: Array[String], logger: AbstractLogger, bootProperties: File, launchOpts: Array[String]) {
def run(resourceBaseDirectory: File, bufferLog: Boolean, tests: Array[String], bootProperties: File,
launchOpts: Array[String], prescripted: File => Unit): Unit =
run(resourceBaseDirectory, bufferLog, tests, ConsoleLogger(), bootProperties, launchOpts, prescripted) //new FullLogger(Logger.xlog2Log(log)))
def run(resourceBaseDirectory: File, bufferLog: Boolean, tests: Array[String], logger: AbstractLogger, bootProperties: File,
launchOpts: Array[String]): Unit =
run(resourceBaseDirectory, bufferLog, tests, logger, bootProperties, launchOpts, emptyCallback)
def run(resourceBaseDirectory: File, bufferLog: Boolean, tests: Array[String], logger: AbstractLogger, bootProperties: File,
launchOpts: Array[String], prescripted: File => Unit) {
val runner = new ScriptedTests(resourceBaseDirectory, bufferLog, bootProperties, launchOpts)
val allTests = get(tests, resourceBaseDirectory, logger) flatMap {
case ScriptedTest(group, name) =>
runner.scriptedTest(group, name, logger)
runner.scriptedTest(group, name, prescripted, logger)
}
runAll(allTests)
}