mirror of https://github.com/sbt/sbt.git
Merge pull request #420 from xuwei-k/update-scalafmt
This commit is contained in:
commit
7ba1085b62
|
|
@ -1,11 +1,12 @@
|
|||
version = 2.0.0
|
||||
version = 3.7.4
|
||||
maxColumn = 100
|
||||
project.git = true
|
||||
project.excludeFilters = [ /sbt-test/, /input_sources/, /contraband-scala/ ]
|
||||
|
||||
# http://docs.scala-lang.org/style/scaladoc.html recommends the JavaDoc style.
|
||||
# scala/scala is written that way too https://github.com/scala/scala/blob/v2.12.2/src/library/scala/Predef.scala
|
||||
docstrings = JavaDoc
|
||||
docstrings.style = Asterisk
|
||||
docstrings.wrap = no
|
||||
|
||||
# This also seems more idiomatic to include whitespace in import x.{ yyy }
|
||||
spaces.inImportCurlyBraces = true
|
||||
|
|
@ -16,6 +17,8 @@ align.openParenCallSite = false
|
|||
align.openParenDefnSite = false
|
||||
|
||||
# For better code clarity
|
||||
danglingParentheses = true
|
||||
danglingParentheses.preset = true
|
||||
|
||||
trailingCommas = preserve
|
||||
|
||||
runner.dialect = Scala212Source3
|
||||
|
|
|
|||
11
build.sbt
11
build.sbt
|
|
@ -3,7 +3,7 @@ import Path._
|
|||
import com.typesafe.tools.mima.core._, ProblemFilters._
|
||||
|
||||
val _ = {
|
||||
//https://github.com/sbt/contraband/issues/122
|
||||
// https://github.com/sbt/contraband/issues/122
|
||||
sys.props += ("line.separator" -> "\n")
|
||||
}
|
||||
Global / semanticdbEnabled := !(Global / insideCI).value
|
||||
|
|
@ -90,11 +90,10 @@ val mimaSettings = Def settings (
|
|||
"1.4.0",
|
||||
"1.5.0",
|
||||
"1.6.0",
|
||||
) map (
|
||||
version =>
|
||||
organization.value %% moduleName.value % version
|
||||
cross (if (crossPaths.value) CrossVersion.binary else CrossVersion.disabled)
|
||||
),
|
||||
) map (version =>
|
||||
organization.value %% moduleName.value % version
|
||||
cross (if (crossPaths.value) CrossVersion.binary else CrossVersion.disabled)
|
||||
),
|
||||
)
|
||||
|
||||
lazy val lmRoot = (project in file("."))
|
||||
|
|
|
|||
|
|
@ -34,34 +34,33 @@ private[sbt] object JsonUtil {
|
|||
UpdateReportLite(ur.configurations map { cr =>
|
||||
ConfigurationReportLite(
|
||||
cr.configuration.name,
|
||||
cr.details map {
|
||||
oar =>
|
||||
OrganizationArtifactReport(
|
||||
oar.organization,
|
||||
oar.name,
|
||||
oar.modules map { mr =>
|
||||
ModuleReport(
|
||||
mr.module,
|
||||
mr.artifacts,
|
||||
mr.missingArtifacts,
|
||||
mr.status,
|
||||
mr.publicationDate,
|
||||
mr.resolver,
|
||||
mr.artifactResolver,
|
||||
mr.evicted,
|
||||
mr.evictedData,
|
||||
mr.evictedReason,
|
||||
mr.problem,
|
||||
mr.homepage,
|
||||
mr.extraAttributes,
|
||||
mr.isDefault,
|
||||
mr.branch,
|
||||
mr.configurations,
|
||||
mr.licenses,
|
||||
filterOutArtificialCallers(mr.callers)
|
||||
)
|
||||
}
|
||||
)
|
||||
cr.details map { oar =>
|
||||
OrganizationArtifactReport(
|
||||
oar.organization,
|
||||
oar.name,
|
||||
oar.modules map { mr =>
|
||||
ModuleReport(
|
||||
mr.module,
|
||||
mr.artifacts,
|
||||
mr.missingArtifacts,
|
||||
mr.status,
|
||||
mr.publicationDate,
|
||||
mr.resolver,
|
||||
mr.artifactResolver,
|
||||
mr.evicted,
|
||||
mr.evictedData,
|
||||
mr.evictedReason,
|
||||
mr.problem,
|
||||
mr.homepage,
|
||||
mr.extraAttributes,
|
||||
mr.isDefault,
|
||||
mr.branch,
|
||||
mr.configurations,
|
||||
mr.licenses,
|
||||
filterOutArtificialCallers(mr.callers)
|
||||
)
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
})
|
||||
|
|
|
|||
|
|
@ -58,8 +58,8 @@ private[librarymanagement] abstract class SemComparatorExtra {
|
|||
|
||||
protected def toStringImpl: String = {
|
||||
val versionStr = Seq(major, minor, patch)
|
||||
.collect {
|
||||
case Some(v) => v.toString
|
||||
.collect { case Some(v) =>
|
||||
v.toString
|
||||
}
|
||||
.mkString(".")
|
||||
val tagsStr = if (tags.nonEmpty) s"-${tags.mkString("-")}" else ""
|
||||
|
|
@ -177,8 +177,8 @@ private[librarymanagement] abstract class SemComparatorFunctions {
|
|||
}
|
||||
parse(
|
||||
numbers
|
||||
.collect {
|
||||
case Some(v) => v.toString
|
||||
.collect { case Some(v) =>
|
||||
v.toString
|
||||
}
|
||||
.mkString(".")
|
||||
)
|
||||
|
|
|
|||
|
|
@ -71,7 +71,7 @@ object VersionRange {
|
|||
case _: NumberFormatException =>
|
||||
// TODO - if the version doesn't meet our expectations, maybe we just issue a hard
|
||||
// error instead of softly ignoring the attempt to rewrite.
|
||||
//sys.error(s"Could not fix version [$revision] into maven style version")
|
||||
// sys.error(s"Could not fix version [$revision] into maven style version")
|
||||
revision
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -36,7 +36,8 @@ private[sbt] object VersionSchemes {
|
|||
case x => sys.error(s"unknown version scheme: $x")
|
||||
}
|
||||
|
||||
/** info.versionScheme property will be included into POM after sbt 1.4.0.
|
||||
/**
|
||||
* info.versionScheme property will be included into POM after sbt 1.4.0.
|
||||
*/
|
||||
def extractFromId(mid: ModuleID): Option[String] = extractFromExtraAttributes(mid.extraAttributes)
|
||||
|
||||
|
|
|
|||
|
|
@ -27,7 +27,8 @@ object CrossVersionUtil {
|
|||
private[sbt] val BinCompatV = raw"""$basicVersion(-$tagPattern)?-bin(-.*)?""".r
|
||||
private val CandidateV = raw"""$basicVersion(-RC\d+)""".r
|
||||
private val MilestonV = raw"""$basicVersion(-M$tagPattern)""".r
|
||||
private val NonReleaseV_n = raw"""$basicVersion((?:-$tagPattern)*)""".r // 0-n word suffixes, with leading dashes
|
||||
private val NonReleaseV_n =
|
||||
raw"""$basicVersion((?:-$tagPattern)*)""".r // 0-n word suffixes, with leading dashes
|
||||
private val NonReleaseV_1 = raw"""$basicVersion(-$tagPattern)""".r // 1 word suffix, after a dash
|
||||
private[sbt] val PartialVersion = raw"""($longPattern)\.($longPattern)(?:\..+)?""".r
|
||||
|
||||
|
|
|
|||
|
|
@ -21,11 +21,11 @@ final class Configuration private[sbt] (
|
|||
override def equals(o: Any): Boolean = o match {
|
||||
case x: Configuration =>
|
||||
(this.id == x.id) &&
|
||||
(this.name == x.name) &&
|
||||
(this.description == x.description) &&
|
||||
(this.isPublic == x.isPublic) &&
|
||||
(this.extendsConfigs == x.extendsConfigs) &&
|
||||
(this.transitive == x.transitive)
|
||||
(this.name == x.name) &&
|
||||
(this.description == x.description) &&
|
||||
(this.isPublic == x.isPublic) &&
|
||||
(this.extendsConfigs == x.extendsConfigs) &&
|
||||
(this.transitive == x.transitive)
|
||||
case _ => false
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -37,13 +37,13 @@ private[librarymanagement] abstract class CrossVersionFunctions {
|
|||
*/
|
||||
def fullWith(prefix: String, suffix: String): CrossVersion = Full(prefix, suffix)
|
||||
|
||||
/** Cross-versions a module with the binary version (typically the binary Scala version). */
|
||||
/** Cross-versions a module with the binary version (typically the binary Scala version). */
|
||||
def binary: CrossVersion = Binary()
|
||||
|
||||
/** Disables cross versioning for a module. */
|
||||
def disabled: CrossVersion = sbt.librarymanagement.Disabled
|
||||
|
||||
/** Cross-versions a module with a constant string (typically the binary Scala version). */
|
||||
/** Cross-versions a module with a constant string (typically the binary Scala version). */
|
||||
def constant(value: String): CrossVersion = Constant(value)
|
||||
|
||||
/**
|
||||
|
|
@ -218,7 +218,7 @@ private[librarymanagement] abstract class CrossVersionFunctions {
|
|||
*/
|
||||
def scalaApiVersion(v: String): Option[(Long, Long)] = CrossVersionUtil.scalaApiVersion(v)
|
||||
|
||||
/** Regular expression that extracts the major and minor components of a version into matched groups 1 and 2.*/
|
||||
/** Regular expression that extracts the major and minor components of a version into matched groups 1 and 2. */
|
||||
val PartialVersion = CrossVersionUtil.PartialVersion
|
||||
|
||||
/** Extracts the major and minor components of a version string `s` or returns `None` if the version is improperly formatted. */
|
||||
|
|
|
|||
|
|
@ -29,7 +29,9 @@ trait DependencyFilterExtra {
|
|||
): ArtifactFilter =
|
||||
new ArtifactFilter {
|
||||
def apply(a: Artifact): Boolean =
|
||||
name.accept(a.name) && `type`.accept(a.`type`) && extension.accept(a.extension) && classifier
|
||||
name.accept(a.name) && `type`.accept(a.`type`) && extension.accept(
|
||||
a.extension
|
||||
) && classifier
|
||||
.accept(a.classifier getOrElse "")
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -189,10 +189,9 @@ class DependencyResolution private[sbt] (lmEngine: DependencyResolutionInterface
|
|||
((sourceArtifactTypes.toIterable map (_ -> Artifact.SourceClassifier))
|
||||
:: (docArtifactTypes.toIterable map (_ -> Artifact.DocClassifier)) :: Nil).flatten.toMap
|
||||
Right(r.substitute { (conf, mid, artFileSeq) =>
|
||||
artFileSeq map {
|
||||
case (art, f) =>
|
||||
// Deduce the classifier from the type if no classifier is present already
|
||||
art.withClassifier(art.classifier orElse typeClassifierMap.get(art.`type`)) -> f
|
||||
artFileSeq map { case (art, f) =>
|
||||
// Deduce the classifier from the type if no classifier is present already
|
||||
art.withClassifier(art.classifier orElse typeClassifierMap.get(art.`type`)) -> f
|
||||
}
|
||||
})
|
||||
case Left(w) => Left(w)
|
||||
|
|
@ -200,10 +199,9 @@ class DependencyResolution private[sbt] (lmEngine: DependencyResolutionInterface
|
|||
}
|
||||
|
||||
protected def directDependenciesNames(module: ModuleDescriptor): String =
|
||||
(module.directDependencies map {
|
||||
case mID: ModuleID =>
|
||||
import mID._
|
||||
s"$organization % $name % $revision"
|
||||
(module.directDependencies map { case mID: ModuleID =>
|
||||
import mID._
|
||||
s"$organization % $name % $revision"
|
||||
}).mkString(", ")
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -157,30 +157,29 @@ final class EvictionError private[sbt] (
|
|||
val out: mutable.ListBuffer[String] = mutable.ListBuffer()
|
||||
out += "found version conflict(s) in library dependencies; some are suspected to be binary incompatible:"
|
||||
out += ""
|
||||
evictions.foreach({
|
||||
case (a, scheme) =>
|
||||
val revs = a.evicteds map { _.module.revision }
|
||||
val revsStr =
|
||||
if (revs.size <= 1) revs.mkString else "{" + revs.distinct.mkString(", ") + "}"
|
||||
val seen: mutable.Set[ModuleID] = mutable.Set()
|
||||
val callers: List[String] = (a.evicteds.toList ::: a.winner.toList) flatMap { r =>
|
||||
val rev = r.module.revision
|
||||
r.callers.toList flatMap { caller =>
|
||||
if (seen(caller.caller)) Nil
|
||||
else {
|
||||
seen += caller.caller
|
||||
List(f"\t +- ${caller}%-50s (depends on $rev)")
|
||||
}
|
||||
evictions.foreach({ case (a, scheme) =>
|
||||
val revs = a.evicteds map { _.module.revision }
|
||||
val revsStr =
|
||||
if (revs.size <= 1) revs.mkString else "{" + revs.distinct.mkString(", ") + "}"
|
||||
val seen: mutable.Set[ModuleID] = mutable.Set()
|
||||
val callers: List[String] = (a.evicteds.toList ::: a.winner.toList) flatMap { r =>
|
||||
val rev = r.module.revision
|
||||
r.callers.toList flatMap { caller =>
|
||||
if (seen(caller.caller)) Nil
|
||||
else {
|
||||
seen += caller.caller
|
||||
List(f"\t +- ${caller}%-50s (depends on $rev)")
|
||||
}
|
||||
}
|
||||
val que = if (assumed) "?" else ""
|
||||
val winnerRev = a.winner match {
|
||||
case Some(r) => s":${r.module.revision} ($scheme$que) is selected over ${revsStr}"
|
||||
case _ => " is evicted for all versions"
|
||||
}
|
||||
val title = s"\t* ${a.organization}:${a.name}$winnerRev"
|
||||
val lines = title :: (if (a.showCallers) callers.reverse else Nil) ::: List("")
|
||||
out ++= lines
|
||||
}
|
||||
val que = if (assumed) "?" else ""
|
||||
val winnerRev = a.winner match {
|
||||
case Some(r) => s":${r.module.revision} ($scheme$que) is selected over ${revsStr}"
|
||||
case _ => " is evicted for all versions"
|
||||
}
|
||||
val title = s"\t* ${a.organization}:${a.name}$winnerRev"
|
||||
val lines = title :: (if (a.showCallers) callers.reverse else Nil) ::: List("")
|
||||
out ++= lines
|
||||
})
|
||||
out.toList
|
||||
}
|
||||
|
|
|
|||
|
|
@ -196,7 +196,7 @@ final class EvictionPair private[sbt] (
|
|||
override def equals(o: Any): Boolean = o match {
|
||||
case o: EvictionPair =>
|
||||
(this.organization == o.organization) &&
|
||||
(this.name == o.name)
|
||||
(this.name == o.name)
|
||||
case _ => false
|
||||
}
|
||||
override def hashCode: Int = {
|
||||
|
|
@ -279,10 +279,12 @@ object EvictionWarning {
|
|||
}
|
||||
confs flatMap { confReport =>
|
||||
confReport.details map { detail =>
|
||||
if ((detail.modules exists { _.evicted }) &&
|
||||
!(buffer exists { x =>
|
||||
(x.organization == detail.organization) && (x.name == detail.name)
|
||||
})) {
|
||||
if (
|
||||
(detail.modules exists { _.evicted }) &&
|
||||
!(buffer exists { x =>
|
||||
(x.organization == detail.organization) && (x.name == detail.name)
|
||||
})
|
||||
) {
|
||||
buffer += detail
|
||||
}
|
||||
}
|
||||
|
|
@ -298,7 +300,7 @@ object EvictionWarning {
|
|||
module.scalaModuleInfo match {
|
||||
case Some(s) =>
|
||||
organization == s.scalaOrganization &&
|
||||
(name == LibraryID) || (name == CompilerID)
|
||||
(name == LibraryID) || (name == CompilerID)
|
||||
case _ => false
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -37,10 +37,10 @@ private[librarymanagement] abstract class ModuleIDExtra {
|
|||
protected def toStringImpl: String =
|
||||
s"""$organization:$name:$revision""" +
|
||||
(configurations match { case Some(s) => ":" + s; case None => "" }) + {
|
||||
val attr = attributeString
|
||||
if (attr == "") ""
|
||||
else " " + attr
|
||||
} +
|
||||
val attr = attributeString
|
||||
if (attr == "") ""
|
||||
else " " + attr
|
||||
} +
|
||||
(if (extraAttributes.isEmpty) "" else " " + extraString)
|
||||
|
||||
protected def attributeString: String = {
|
||||
|
|
@ -95,10 +95,10 @@ private[librarymanagement] abstract class ModuleIDExtra {
|
|||
})
|
||||
|
||||
// () required for chaining
|
||||
/** Do not follow dependencies of this module. Synonym for `intransitive`.*/
|
||||
/** Do not follow dependencies of this module. Synonym for `intransitive`. */
|
||||
def notTransitive(): ModuleID = intransitive()
|
||||
|
||||
/** Do not follow dependencies of this module. Synonym for `notTransitive`.*/
|
||||
/** Do not follow dependencies of this module. Synonym for `notTransitive`. */
|
||||
def intransitive(): ModuleID = withIsTransitive(false)
|
||||
|
||||
/**
|
||||
|
|
|
|||
|
|
@ -185,7 +185,7 @@ private[librarymanagement] abstract class ResolverFunctions {
|
|||
"https://repository.apache.org/content/repositories/snapshots/"
|
||||
)
|
||||
|
||||
/** Add the local and Maven Central repositories to the user repositories. */
|
||||
/** Add the local and Maven Central repositories to the user repositories. */
|
||||
def combineDefaultResolvers(userResolvers: Vector[Resolver]): Vector[Resolver] =
|
||||
combineDefaultResolvers(userResolvers, mavenCentral = true)
|
||||
|
||||
|
|
@ -232,7 +232,10 @@ private[librarymanagement] abstract class ResolverFunctions {
|
|||
single(JCenterRepository, jcenter) ++
|
||||
(xs.partition(_ == DefaultMavenRepository) match {
|
||||
case (_, xs) =>
|
||||
single(DefaultMavenRepository, mavenCentral) ++ xs // TODO - Do we need to filter out duplicates?
|
||||
single(
|
||||
DefaultMavenRepository,
|
||||
mavenCentral
|
||||
) ++ xs // TODO - Do we need to filter out duplicates?
|
||||
})
|
||||
})
|
||||
}
|
||||
|
|
@ -240,7 +243,7 @@ private[librarymanagement] abstract class ResolverFunctions {
|
|||
private def single[T](value: T, nonEmpty: Boolean): Vector[T] =
|
||||
if (nonEmpty) Vector(value) else Vector.empty
|
||||
|
||||
/** A base class for defining factories for interfaces to Ivy repositories that require a hostname , port, and patterns. */
|
||||
/** A base class for defining factories for interfaces to Ivy repositories that require a hostname , port, and patterns. */
|
||||
sealed abstract class Define[RepositoryType <: SshBasedRepository] {
|
||||
|
||||
/** Subclasses should implement this method to */
|
||||
|
|
@ -269,8 +272,8 @@ private[librarymanagement] abstract class ResolverFunctions {
|
|||
* patterns will be resolved. `basePatterns` are the initial patterns to use.
|
||||
* A ManagedProject has an implicit defining these initial patterns based on a setting for either Maven or Ivy style patterns.
|
||||
*/
|
||||
def apply(name: String, hostname: String, basePath: String)(
|
||||
implicit basePatterns: Patterns
|
||||
def apply(name: String, hostname: String, basePath: String)(implicit
|
||||
basePatterns: Patterns
|
||||
): RepositoryType =
|
||||
apply(name, Some(hostname), None, Some(basePath))
|
||||
|
||||
|
|
@ -278,8 +281,8 @@ private[librarymanagement] abstract class ResolverFunctions {
|
|||
* Constructs this repository type with the given `name`, `hostname`, and `port`. `basePatterns` are the initial patterns to use.
|
||||
* A ManagedProject has an implicit defining these initial patterns based on a setting for either Maven or Ivy style patterns.
|
||||
*/
|
||||
def apply(name: String, hostname: String, port: Int)(
|
||||
implicit basePatterns: Patterns
|
||||
def apply(name: String, hostname: String, port: Int)(implicit
|
||||
basePatterns: Patterns
|
||||
): RepositoryType =
|
||||
apply(name, Some(hostname), Some(port), None)
|
||||
|
||||
|
|
@ -288,8 +291,8 @@ private[librarymanagement] abstract class ResolverFunctions {
|
|||
* patterns will be resolved. `basePatterns` are the initial patterns to use.
|
||||
* A ManagedProject has an implicit defining these initial patterns based on a setting for either Maven or Ivy style patterns.
|
||||
*/
|
||||
def apply(name: String, hostname: String, port: Int, basePath: String)(
|
||||
implicit basePatterns: Patterns
|
||||
def apply(name: String, hostname: String, port: Int, basePath: String)(implicit
|
||||
basePatterns: Patterns
|
||||
): RepositoryType =
|
||||
apply(name, Some(hostname), Some(port), Some(basePath))
|
||||
|
||||
|
|
@ -304,13 +307,13 @@ private[librarymanagement] abstract class ResolverFunctions {
|
|||
construct(name, SshConnection(None, hostname, port), resolvePatterns(basePath, basePatterns))
|
||||
}
|
||||
|
||||
/** A factory to construct an interface to an Ivy SSH resolver.*/
|
||||
/** A factory to construct an interface to an Ivy SSH resolver. */
|
||||
object ssh extends Define[SshRepository] {
|
||||
protected def construct(name: String, connection: SshConnection, patterns: Patterns) =
|
||||
SshRepository(name, connection, patterns, None)
|
||||
}
|
||||
|
||||
/** A factory to construct an interface to an Ivy SFTP resolver.*/
|
||||
/** A factory to construct an interface to an Ivy SFTP resolver. */
|
||||
object sftp extends Define[SftpRepository] {
|
||||
protected def construct(name: String, connection: SshConnection, patterns: Patterns) =
|
||||
SftpRepository(name, connection, patterns)
|
||||
|
|
@ -348,8 +351,8 @@ private[librarymanagement] abstract class ResolverFunctions {
|
|||
def apply(name: String, baseURL: URL)(implicit basePatterns: Patterns): URLRepository =
|
||||
baseRepository(baseURL.toURI.normalize.toString)(URLRepository(name, _))
|
||||
}
|
||||
private def baseRepository[T](base: String)(construct: Patterns => T)(
|
||||
implicit basePatterns: Patterns
|
||||
private def baseRepository[T](base: String)(construct: Patterns => T)(implicit
|
||||
basePatterns: Patterns
|
||||
): T =
|
||||
construct(resolvePatterns(base, basePatterns))
|
||||
|
||||
|
|
@ -381,7 +384,7 @@ private[librarymanagement] abstract class ResolverFunctions {
|
|||
}
|
||||
def defaultFileConfiguration = FileConfiguration(true, None)
|
||||
def mavenStylePatterns = Patterns().withArtifactPatterns(Vector(mavenStyleBasePattern))
|
||||
def ivyStylePatterns = defaultIvyPatterns //Patterns(Nil, Nil, false)
|
||||
def ivyStylePatterns = defaultIvyPatterns // Patterns(Nil, Nil, false)
|
||||
|
||||
def defaultPatterns = mavenStylePatterns
|
||||
def mavenStyleBasePattern =
|
||||
|
|
@ -397,10 +400,13 @@ private[librarymanagement] abstract class ResolverFunctions {
|
|||
val findQuoted = "\\$\\{([^\\}]*)\\}".r
|
||||
val env = "env\\.(.*)".r
|
||||
|
||||
findQuoted.replaceAllIn(str, _.group(1) match {
|
||||
case env(variable) => sys.env.getOrElse(variable, "")
|
||||
case property => sys.props.getOrElse(property, "")
|
||||
})
|
||||
findQuoted.replaceAllIn(
|
||||
str,
|
||||
_.group(1) match {
|
||||
case env(variable) => sys.env.getOrElse(variable, "")
|
||||
case property => sys.props.getOrElse(property, "")
|
||||
}
|
||||
)
|
||||
}
|
||||
private[this] def mavenLocalDir: File = {
|
||||
def loadHomeFromSettings(f: () => File): Option[File] =
|
||||
|
|
@ -423,7 +429,9 @@ private[librarymanagement] abstract class ResolverFunctions {
|
|||
}
|
||||
sys.props.get("maven.repo.local").map(new File(_)) orElse
|
||||
loadHomeFromSettings(() => new File(sbt.io.Path.userHome, ".m2/settings.xml")) orElse
|
||||
loadHomeFromSettings(() => new File(new File(System.getenv("M2_HOME")), "conf/settings.xml")) getOrElse
|
||||
loadHomeFromSettings(() =>
|
||||
new File(new File(System.getenv("M2_HOME")), "conf/settings.xml")
|
||||
) getOrElse
|
||||
new File(sbt.io.Path.userHome, ".m2/repository")
|
||||
}
|
||||
// TODO - should this just be the *exact* same as mavenLocal? probably...
|
||||
|
|
|
|||
|
|
@ -12,16 +12,15 @@ final class RichUpdateReport(report: UpdateReport) {
|
|||
private[sbt] def recomputeStamps(): UpdateReport = {
|
||||
val files = report.cachedDescriptor +: allFiles
|
||||
val stamps = files
|
||||
.map(
|
||||
f =>
|
||||
(
|
||||
f,
|
||||
// TODO: The list of files may also contain some odd files that do not actually exist like:
|
||||
// "./target/ivyhome/resolution-cache/com.example/foo/0.4.0/resolved.xml.xml".
|
||||
// IO.getModifiedTimeOrZero() will just return zero, but the list of files should not contain such
|
||||
// files to begin with, in principle.
|
||||
IO.getModifiedTimeOrZero(f)
|
||||
)
|
||||
.map(f =>
|
||||
(
|
||||
f,
|
||||
// TODO: The list of files may also contain some odd files that do not actually exist like:
|
||||
// "./target/ivyhome/resolution-cache/com.example/foo/0.4.0/resolved.xml.xml".
|
||||
// IO.getModifiedTimeOrZero() will just return zero, but the list of files should not contain such
|
||||
// files to begin with, in principle.
|
||||
IO.getModifiedTimeOrZero(f)
|
||||
)
|
||||
)
|
||||
.toMap
|
||||
UpdateReport(report.cachedDescriptor, report.configurations, report.stats, stamps)
|
||||
|
|
@ -65,13 +64,13 @@ final class RichUpdateReport(report: UpdateReport) {
|
|||
file
|
||||
}
|
||||
|
||||
/** Constructs a new report that only contains files matching the specified filter.*/
|
||||
/** Constructs a new report that only contains files matching the specified filter. */
|
||||
def filter(f: DependencyFilter): UpdateReport =
|
||||
moduleReportMap { (configuration, modReport) =>
|
||||
modReport
|
||||
.withArtifacts(
|
||||
modReport.artifacts filter {
|
||||
case (art, _) => f(configuration, modReport.module, art)
|
||||
modReport.artifacts filter { case (art, _) =>
|
||||
f(configuration, modReport.module, art)
|
||||
}
|
||||
)
|
||||
.withMissingArtifacts(
|
||||
|
|
|
|||
|
|
@ -10,9 +10,13 @@ final class ResolveException(
|
|||
val failedPaths: Map[ModuleID, Seq[ModuleID]]
|
||||
) extends RuntimeException(messages.mkString("\n")) {
|
||||
def this(messages: Seq[String], failed: Seq[ModuleID]) =
|
||||
this(messages, failed, Map(failed map { m =>
|
||||
m -> Nil
|
||||
}: _*))
|
||||
this(
|
||||
messages,
|
||||
failed,
|
||||
Map(failed map { m =>
|
||||
m -> Nil
|
||||
}: _*)
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -30,13 +34,12 @@ object UnresolvedWarning {
|
|||
config: UnresolvedWarningConfiguration
|
||||
): UnresolvedWarning = {
|
||||
def modulePosition(m0: ModuleID): Option[SourcePosition] =
|
||||
config.modulePositions.find {
|
||||
case (m, _) =>
|
||||
(m.organization == m0.organization) &&
|
||||
(m0.name startsWith m.name) &&
|
||||
(m.revision == m0.revision)
|
||||
} map {
|
||||
case (_, p) => p
|
||||
config.modulePositions.find { case (m, _) =>
|
||||
(m.organization == m0.organization) &&
|
||||
(m0.name startsWith m.name) &&
|
||||
(m.revision == m0.revision)
|
||||
} map { case (_, p) =>
|
||||
p
|
||||
}
|
||||
val failedPaths = err.failed map { (x: ModuleID) =>
|
||||
err.failedPaths(x).toList.reverse map { id =>
|
||||
|
|
@ -67,9 +70,8 @@ object UnresolvedWarning {
|
|||
if (path.nonEmpty) {
|
||||
val head = path.head
|
||||
buffer += "\t\t" + head._1.toString + sourcePosStr(head._2)
|
||||
path.tail foreach {
|
||||
case (m, pos) =>
|
||||
buffer += "\t\t +- " + m.toString + sourcePosStr(pos)
|
||||
path.tail foreach { case (m, pos) =>
|
||||
buffer += "\t\t +- " + m.toString + sourcePosStr(pos)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -29,9 +29,13 @@ private[librarymanagement] abstract class ConfigurationReportExtra {
|
|||
}
|
||||
|
||||
def retrieve(f: (ConfigRef, ModuleID, Artifact, File) => File): ConfigurationReport =
|
||||
ConfigurationReport(configuration, modules map {
|
||||
_.retrieve((mid, art, file) => f(configuration, mid, art, file))
|
||||
}, details)
|
||||
ConfigurationReport(
|
||||
configuration,
|
||||
modules map {
|
||||
_.retrieve((mid, art, file) => f(configuration, mid, art, file))
|
||||
},
|
||||
details
|
||||
)
|
||||
}
|
||||
|
||||
private[librarymanagement] abstract class ModuleReportExtra {
|
||||
|
|
@ -124,24 +128,23 @@ private[librarymanagement] abstract class UpdateReportExtra {
|
|||
/** All resolved modules in all configurations. */
|
||||
def allModules: Vector[ModuleID] = {
|
||||
val key = (m: ModuleID) => (m.organization, m.name, m.revision)
|
||||
configurations.flatMap(_.allModules).groupBy(key).toVector map {
|
||||
case (_, v) =>
|
||||
v reduceLeft { (agg, x) =>
|
||||
agg.withConfigurations(
|
||||
(agg.configurations, x.configurations) match {
|
||||
case (None, _) => x.configurations
|
||||
case (Some(ac), None) => Some(ac)
|
||||
case (Some(ac), Some(xc)) => Some(s"$ac;$xc")
|
||||
}
|
||||
)
|
||||
}
|
||||
configurations.flatMap(_.allModules).groupBy(key).toVector map { case (_, v) =>
|
||||
v reduceLeft { (agg, x) =>
|
||||
agg.withConfigurations(
|
||||
(agg.configurations, x.configurations) match {
|
||||
case (None, _) => x.configurations
|
||||
case (Some(ac), None) => Some(ac)
|
||||
case (Some(ac), Some(xc)) => Some(s"$ac;$xc")
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def retrieve(f: (ConfigRef, ModuleID, Artifact, File) => File): UpdateReport =
|
||||
UpdateReport(cachedDescriptor, configurations map { _ retrieve f }, stats, stamps)
|
||||
|
||||
/** Gets the report for the given configuration, or `None` if the configuration was not resolved.*/
|
||||
/** Gets the report for the given configuration, or `None` if the configuration was not resolved. */
|
||||
def configuration(s: ConfigRef) = configurations.find(_.configuration == s)
|
||||
|
||||
/** Gets the names of all resolved configurations. This `UpdateReport` contains one `ConfigurationReport` for each configuration in this list. */
|
||||
|
|
|
|||
|
|
@ -162,7 +162,8 @@ object VersionNumber {
|
|||
}
|
||||
}
|
||||
|
||||
/** A variant of SemVar that seems to be common among the Scala libraries.
|
||||
/**
|
||||
* A variant of SemVar that seems to be common among the Scala libraries.
|
||||
* The second segment (y in x.y.z) increments breaks the binary compatibility even when x > 0.
|
||||
* Also API compatibility is expected even when the first segment is zero.
|
||||
*/
|
||||
|
|
@ -172,7 +173,8 @@ object VersionNumber {
|
|||
PackVer.isCompatible(v1, v2)
|
||||
}
|
||||
|
||||
/** A variant of SemVar that seems to be common among the Scala libraries.
|
||||
/**
|
||||
* A variant of SemVar that seems to be common among the Scala libraries.
|
||||
* The second segment (y in x.y.z) increments breaks the binary compatibility even when x > 0.
|
||||
* Also API compatibility is expected even when the first segment is zero.
|
||||
*/
|
||||
|
|
@ -193,7 +195,8 @@ object VersionNumber {
|
|||
}
|
||||
}
|
||||
|
||||
/** A variant of SemVar that enforces API compatibility when the first segment is zero.
|
||||
/**
|
||||
* A variant of SemVar that enforces API compatibility when the first segment is zero.
|
||||
*/
|
||||
object EarlySemVer extends VersionNumberCompatibility {
|
||||
import SemVer._
|
||||
|
|
|
|||
|
|
@ -56,6 +56,6 @@ object ConfigMacroSpec extends Properties("ConfigMacroSpec") {
|
|||
s"Actual isPublic: ${c.isPublic}" |:
|
||||
(c.id == id) &&
|
||||
(c.name == name) &&
|
||||
(c.isPublic == isPublic)
|
||||
(c.isPublic == isPublic)
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -148,13 +148,12 @@ class VersionNumberSpec extends AnyFreeSpec with Matchers with Inside {
|
|||
ts: Seq[String],
|
||||
es: Seq[String]
|
||||
): Unit =
|
||||
s"should parse to ($ns, $ts, $es)" in inside(v.value) {
|
||||
case VersionNumber(ns1, ts1, es1) =>
|
||||
(ns1 shouldBe ns)
|
||||
(ts1 shouldBe ts)
|
||||
(es1 shouldBe es)
|
||||
(VersionNumber(ns, ts, es).toString shouldBe v.value)
|
||||
(VersionNumber(ns, ts, es) shouldBe VersionNumber(ns, ts, es))
|
||||
s"should parse to ($ns, $ts, $es)" in inside(v.value) { case VersionNumber(ns1, ts1, es1) =>
|
||||
(ns1 shouldBe ns)
|
||||
(ts1 shouldBe ts)
|
||||
(es1 shouldBe es)
|
||||
(VersionNumber(ns, ts, es).toString shouldBe v.value)
|
||||
(VersionNumber(ns, ts, es) shouldBe VersionNumber(ns, ts, es))
|
||||
}
|
||||
|
||||
private[this] def assertParsesToError(v: VersionString): Unit =
|
||||
|
|
|
|||
|
|
@ -21,8 +21,6 @@ import org.apache.ivy.core.module.descriptor.DefaultDependencyDescriptor;
|
|||
*
|
||||
* Also see: http://ant.apache.org/ivy/history/2.3.0/ivyfile/dependency.html
|
||||
* and: http://svn.apache.org/repos/asf/ant/ivy/core/tags/2.3.0/src/java/org/apache/ivy/plugins/parser/m2/PomModuleDescriptorBuilder.java
|
||||
*
|
||||
*
|
||||
*/
|
||||
object ReplaceMavenConfigurationMappings {
|
||||
|
||||
|
|
|
|||
|
|
@ -56,7 +56,7 @@ class ComponentManager(
|
|||
/** This is used to lock the local cache in project/boot/. By checking the local cache first, we can avoid grabbing a global lock. */
|
||||
private def lockLocalCache[T](action: => T): T = lock(provider.lockFile)(action)
|
||||
|
||||
/** This is used to ensure atomic access to components in the global Ivy cache.*/
|
||||
/** This is used to ensure atomic access to components in the global Ivy cache. */
|
||||
private def lockGlobalCache[T](action: => T): T = lock(ivyCache.lockFile)(action)
|
||||
private def lock[T](file: File)(action: => T): T =
|
||||
globalLock(file, new Callable[T] { def call = action })
|
||||
|
|
|
|||
|
|
@ -157,109 +157,108 @@ private[sbt] object ConvertResolver {
|
|||
(updateOptions.resolverConverter orElse defaultConvert)((r, settings, log))
|
||||
|
||||
/** The default implementation of converter. */
|
||||
lazy val defaultConvert: ResolverConverter = {
|
||||
case (r, settings, log) =>
|
||||
val managedChecksums = Option(settings.getVariable(ManagedChecksums)) match {
|
||||
case Some(x) => x.toBoolean
|
||||
case _ => false
|
||||
}
|
||||
r match {
|
||||
case repo: MavenRepository => {
|
||||
val pattern = Collections.singletonList(
|
||||
Resolver.resolvePattern(repo.root, Resolver.mavenStyleBasePattern)
|
||||
)
|
||||
final class PluginCapableResolver
|
||||
extends IBiblioResolver
|
||||
with ChecksumFriendlyURLResolver
|
||||
with DescriptorRequired {
|
||||
override val managedChecksumsEnabled: Boolean = managedChecksums
|
||||
override def getResource(resource: Resource, dest: File): Long = get(resource, dest)
|
||||
def setPatterns(): Unit = {
|
||||
// done this way for access to protected methods.
|
||||
setArtifactPatterns(pattern)
|
||||
setIvyPatterns(pattern)
|
||||
}
|
||||
override protected def findResourceUsingPattern(
|
||||
mrid: ModuleRevisionId,
|
||||
pattern: String,
|
||||
artifact: IArtifact,
|
||||
rmdparser: ResourceMDParser,
|
||||
date: Date
|
||||
): ResolvedResource = {
|
||||
val extraAttributes =
|
||||
mrid.getExtraAttributes.asScala.toMap.asInstanceOf[Map[String, String]]
|
||||
getSbtPluginCrossVersion(extraAttributes) match {
|
||||
case Some(sbtCrossVersion) =>
|
||||
// if the module is an sbt plugin
|
||||
// we first try to resolve the artifact with the sbt cross version suffix
|
||||
// and we fallback to the one without the suffix
|
||||
val newArtifact = DefaultArtifact.cloneWithAnotherName(
|
||||
artifact,
|
||||
artifact.getName + sbtCrossVersion
|
||||
)
|
||||
val resolved =
|
||||
super.findResourceUsingPattern(mrid, pattern, newArtifact, rmdparser, date)
|
||||
if (resolved != null) resolved
|
||||
else super.findResourceUsingPattern(mrid, pattern, artifact, rmdparser, date)
|
||||
case None =>
|
||||
super.findResourceUsingPattern(mrid, pattern, artifact, rmdparser, date)
|
||||
}
|
||||
lazy val defaultConvert: ResolverConverter = { case (r, settings, log) =>
|
||||
val managedChecksums = Option(settings.getVariable(ManagedChecksums)) match {
|
||||
case Some(x) => x.toBoolean
|
||||
case _ => false
|
||||
}
|
||||
r match {
|
||||
case repo: MavenRepository => {
|
||||
val pattern = Collections.singletonList(
|
||||
Resolver.resolvePattern(repo.root, Resolver.mavenStyleBasePattern)
|
||||
)
|
||||
final class PluginCapableResolver
|
||||
extends IBiblioResolver
|
||||
with ChecksumFriendlyURLResolver
|
||||
with DescriptorRequired {
|
||||
override val managedChecksumsEnabled: Boolean = managedChecksums
|
||||
override def getResource(resource: Resource, dest: File): Long = get(resource, dest)
|
||||
def setPatterns(): Unit = {
|
||||
// done this way for access to protected methods.
|
||||
setArtifactPatterns(pattern)
|
||||
setIvyPatterns(pattern)
|
||||
}
|
||||
override protected def findResourceUsingPattern(
|
||||
mrid: ModuleRevisionId,
|
||||
pattern: String,
|
||||
artifact: IArtifact,
|
||||
rmdparser: ResourceMDParser,
|
||||
date: Date
|
||||
): ResolvedResource = {
|
||||
val extraAttributes =
|
||||
mrid.getExtraAttributes.asScala.toMap.asInstanceOf[Map[String, String]]
|
||||
getSbtPluginCrossVersion(extraAttributes) match {
|
||||
case Some(sbtCrossVersion) =>
|
||||
// if the module is an sbt plugin
|
||||
// we first try to resolve the artifact with the sbt cross version suffix
|
||||
// and we fallback to the one without the suffix
|
||||
val newArtifact = DefaultArtifact.cloneWithAnotherName(
|
||||
artifact,
|
||||
artifact.getName + sbtCrossVersion
|
||||
)
|
||||
val resolved =
|
||||
super.findResourceUsingPattern(mrid, pattern, newArtifact, rmdparser, date)
|
||||
if (resolved != null) resolved
|
||||
else super.findResourceUsingPattern(mrid, pattern, artifact, rmdparser, date)
|
||||
case None =>
|
||||
super.findResourceUsingPattern(mrid, pattern, artifact, rmdparser, date)
|
||||
}
|
||||
}
|
||||
val resolver = new PluginCapableResolver
|
||||
if (repo.localIfFile) resolver.setRepository(new LocalIfFileRepo)
|
||||
initializeMavenStyle(resolver, repo.name, repo.root)
|
||||
resolver
|
||||
.setPatterns() // has to be done after initializeMavenStyle, which calls methods that overwrite the patterns
|
||||
resolver
|
||||
}
|
||||
case repo: SshRepository => {
|
||||
val resolver = new SshResolver with DescriptorRequired with ThreadSafeSshBasedResolver {
|
||||
override val managedChecksumsEnabled: Boolean = managedChecksums
|
||||
override def getResource(resource: Resource, dest: File): Long = get(resource, dest)
|
||||
}
|
||||
initializeSSHResolver(resolver, repo, settings)
|
||||
repo.publishPermissions.foreach(perm => resolver.setPublishPermissions(perm))
|
||||
resolver
|
||||
}
|
||||
case repo: SftpRepository => {
|
||||
val resolver = new SFTPResolver with ThreadSafeSshBasedResolver
|
||||
initializeSSHResolver(resolver, repo, settings)
|
||||
resolver
|
||||
}
|
||||
case repo: FileRepository => {
|
||||
val resolver = new FileSystemResolver with DescriptorRequired {
|
||||
// Workaround for #1156
|
||||
// Temporarily in sbt 0.13.x we deprecate overwriting
|
||||
// in local files for non-changing revisions.
|
||||
// This will be fully enforced in sbt 1.0.
|
||||
setRepository(new WarnOnOverwriteFileRepo())
|
||||
override val managedChecksumsEnabled: Boolean = managedChecksums
|
||||
override def getResource(resource: Resource, dest: File): Long = get(resource, dest)
|
||||
}
|
||||
resolver.setName(repo.name)
|
||||
initializePatterns(resolver, repo.patterns, settings)
|
||||
import repo.configuration.{ isLocal, isTransactional }
|
||||
resolver.setLocal(isLocal)
|
||||
isTransactional.foreach(value => resolver.setTransactional(value.toString))
|
||||
resolver
|
||||
}
|
||||
case repo: URLRepository => {
|
||||
val resolver = new URLResolver with ChecksumFriendlyURLResolver with DescriptorRequired {
|
||||
override val managedChecksumsEnabled: Boolean = managedChecksums
|
||||
override def getResource(resource: Resource, dest: File): Long = get(resource, dest)
|
||||
}
|
||||
resolver.setName(repo.name)
|
||||
initializePatterns(resolver, repo.patterns, settings)
|
||||
resolver
|
||||
}
|
||||
case repo: ChainedResolver =>
|
||||
IvySbt.resolverChain(repo.name, repo.resolvers, settings, log)
|
||||
case repo: RawRepository =>
|
||||
repo.resolver match {
|
||||
case r: DependencyResolver => r
|
||||
}
|
||||
val resolver = new PluginCapableResolver
|
||||
if (repo.localIfFile) resolver.setRepository(new LocalIfFileRepo)
|
||||
initializeMavenStyle(resolver, repo.name, repo.root)
|
||||
resolver
|
||||
.setPatterns() // has to be done after initializeMavenStyle, which calls methods that overwrite the patterns
|
||||
resolver
|
||||
}
|
||||
case repo: SshRepository => {
|
||||
val resolver = new SshResolver with DescriptorRequired with ThreadSafeSshBasedResolver {
|
||||
override val managedChecksumsEnabled: Boolean = managedChecksums
|
||||
override def getResource(resource: Resource, dest: File): Long = get(resource, dest)
|
||||
}
|
||||
initializeSSHResolver(resolver, repo, settings)
|
||||
repo.publishPermissions.foreach(perm => resolver.setPublishPermissions(perm))
|
||||
resolver
|
||||
}
|
||||
case repo: SftpRepository => {
|
||||
val resolver = new SFTPResolver with ThreadSafeSshBasedResolver
|
||||
initializeSSHResolver(resolver, repo, settings)
|
||||
resolver
|
||||
}
|
||||
case repo: FileRepository => {
|
||||
val resolver = new FileSystemResolver with DescriptorRequired {
|
||||
// Workaround for #1156
|
||||
// Temporarily in sbt 0.13.x we deprecate overwriting
|
||||
// in local files for non-changing revisions.
|
||||
// This will be fully enforced in sbt 1.0.
|
||||
setRepository(new WarnOnOverwriteFileRepo())
|
||||
override val managedChecksumsEnabled: Boolean = managedChecksums
|
||||
override def getResource(resource: Resource, dest: File): Long = get(resource, dest)
|
||||
}
|
||||
resolver.setName(repo.name)
|
||||
initializePatterns(resolver, repo.patterns, settings)
|
||||
import repo.configuration.{ isLocal, isTransactional }
|
||||
resolver.setLocal(isLocal)
|
||||
isTransactional.foreach(value => resolver.setTransactional(value.toString))
|
||||
resolver
|
||||
}
|
||||
case repo: URLRepository => {
|
||||
val resolver = new URLResolver with ChecksumFriendlyURLResolver with DescriptorRequired {
|
||||
override val managedChecksumsEnabled: Boolean = managedChecksums
|
||||
override def getResource(resource: Resource, dest: File): Long = get(resource, dest)
|
||||
}
|
||||
resolver.setName(repo.name)
|
||||
initializePatterns(resolver, repo.patterns, settings)
|
||||
resolver
|
||||
}
|
||||
case repo: ChainedResolver =>
|
||||
IvySbt.resolverChain(repo.name, repo.resolvers, settings, log)
|
||||
case repo: RawRepository =>
|
||||
repo.resolver match {
|
||||
case r: DependencyResolver => r
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private def getSbtPluginCrossVersion(extraAttributes: Map[String, String]): Option[String] = {
|
||||
|
|
@ -329,8 +328,9 @@ private[sbt] object ConvertResolver {
|
|||
override def getDependency(dd: DependencyDescriptor, data: ResolveData) = {
|
||||
val prev = descriptorString(isAllownomd)
|
||||
setDescriptor(descriptorString(hasExplicitURL(dd)))
|
||||
val t = try super.getDependency(dd, data)
|
||||
finally setDescriptor(prev)
|
||||
val t =
|
||||
try super.getDependency(dd, data)
|
||||
finally setDescriptor(prev)
|
||||
t
|
||||
}
|
||||
def descriptorString(optional: Boolean) =
|
||||
|
|
|
|||
|
|
@ -64,7 +64,7 @@ object CustomPomParser {
|
|||
// Evil hackery to override the default maven pom mappings.
|
||||
ReplaceMavenConfigurationMappings.init()
|
||||
|
||||
/** The key prefix that indicates that this is used only to store extra information and is not intended for dependency resolution.*/
|
||||
/** The key prefix that indicates that this is used only to store extra information and is not intended for dependency resolution. */
|
||||
val InfoKeyPrefix = SbtPomExtraProperties.POM_INFO_KEY_PREFIX
|
||||
val ApiURLKey = SbtPomExtraProperties.POM_API_KEY
|
||||
val VersionSchemeKey = SbtPomExtraProperties.VERSION_SCHEME_KEY
|
||||
|
|
@ -75,7 +75,8 @@ object CustomPomParser {
|
|||
private[this] val unqualifiedKeys =
|
||||
Set(SbtVersionKey, ScalaVersionKey, ExtraAttributesKey, ApiURLKey, VersionSchemeKey)
|
||||
|
||||
/** In the new POM format of sbt plugins, the dependency to an sbt plugin
|
||||
/**
|
||||
* In the new POM format of sbt plugins, the dependency to an sbt plugin
|
||||
* contains the sbt cross-version _2.12_1.0. The reason is we want Maven to be able
|
||||
* to resolve the dependency using the pattern:
|
||||
* <org>/<artifact-name>_2.12_1.0/<version>/<artifact-name>_2.12_1.0-<version>.pom
|
||||
|
|
@ -134,7 +135,9 @@ object CustomPomParser {
|
|||
val MyHash = MakeTransformHash(md)
|
||||
// sbt 0.13.1 used "sbtTransformHash" instead of "e:sbtTransformHash" until #1192 so read both
|
||||
Option(extraInfo).isDefined &&
|
||||
((Option(extraInfo get TransformedHashKey) orElse Option(extraInfo get oldTransformedHashKey)) match {
|
||||
((Option(extraInfo get TransformedHashKey) orElse Option(
|
||||
extraInfo get oldTransformedHashKey
|
||||
)) match {
|
||||
case Some(MyHash) => true
|
||||
case _ => false
|
||||
})
|
||||
|
|
@ -297,17 +300,23 @@ object CustomPomParser {
|
|||
for (l <- md.getLicenses) dmd.addLicense(l)
|
||||
for ((key, value) <- md.getExtraInfo.asInstanceOf[java.util.Map[String, String]].asScala)
|
||||
dmd.addExtraInfo(key, value)
|
||||
dmd.addExtraInfo(TransformedHashKey, MakeTransformHash(md)) // mark as transformed by this version, so we don't need to do it again
|
||||
for ((key, value) <- md.getExtraAttributesNamespaces
|
||||
.asInstanceOf[java.util.Map[String, String]]
|
||||
.asScala) dmd.addExtraAttributeNamespace(key, value)
|
||||
dmd.addExtraInfo(
|
||||
TransformedHashKey,
|
||||
MakeTransformHash(md)
|
||||
) // mark as transformed by this version, so we don't need to do it again
|
||||
for (
|
||||
(key, value) <- md.getExtraAttributesNamespaces
|
||||
.asInstanceOf[java.util.Map[String, String]]
|
||||
.asScala
|
||||
) dmd.addExtraAttributeNamespace(key, value)
|
||||
IvySbt.addExtraNamespace(dmd)
|
||||
|
||||
val withExtra = md.getDependencies map { dd =>
|
||||
addExtra(dd, dependencyExtra)
|
||||
}
|
||||
val withVersionRangeMod: Seq[DependencyDescriptor] =
|
||||
if (LMSysProp.modifyVersionRange) withExtra map { stripVersionRange } else withExtra
|
||||
if (LMSysProp.modifyVersionRange) withExtra map { stripVersionRange }
|
||||
else withExtra
|
||||
val unique = IvySbt.mergeDuplicateDefinitions(withVersionRangeMod)
|
||||
unique foreach dmd.addDependency
|
||||
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ import org.apache.ivy.plugins.parser.xml.XmlModuleDescriptorParser
|
|||
import org.apache.ivy.plugins.repository.Resource
|
||||
import org.apache.ivy.plugins.repository.url.URLResource
|
||||
|
||||
/** Subclasses the default Ivy file parser in order to provide access to protected methods.*/
|
||||
/** Subclasses the default Ivy file parser in order to provide access to protected methods. */
|
||||
private[sbt] object CustomXmlParser extends XmlModuleDescriptorParser {
|
||||
import XmlModuleDescriptorParser.Parser
|
||||
class CustomParser(settings: IvySettings, defaultConfig: Option[String])
|
||||
|
|
@ -26,7 +26,7 @@ private[sbt] object CustomXmlParser extends XmlModuleDescriptorParser {
|
|||
}
|
||||
def setInput(bytes: Array[Byte]): Unit = setInput(new ByteArrayInputStream(bytes))
|
||||
|
||||
/** Overridden because the super implementation overwrites the module descriptor.*/
|
||||
/** Overridden because the super implementation overwrites the module descriptor. */
|
||||
override def setResource(res: Resource): Unit = ()
|
||||
override def setMd(md: DefaultModuleDescriptor) = {
|
||||
super.setMd(md)
|
||||
|
|
|
|||
|
|
@ -178,8 +178,8 @@ private[sbt] class FakeResolver(private var name: String, cacheDir: File, module
|
|||
val artifact =
|
||||
for {
|
||||
artifacts <- modules get ((moduleOrganisation, moduleName, moduleRevision))
|
||||
artifact <- artifacts find (
|
||||
a => a.name == art.getName && a.tpe == art.getType && a.ext == art.getExt
|
||||
artifact <- artifacts find (a =>
|
||||
a.name == art.getName && a.tpe == art.getType && a.ext == art.getExt
|
||||
)
|
||||
} yield new ArtifactOrigin(art, /* isLocal = */ true, artifact.file.toURI.toURL.toString)
|
||||
|
||||
|
|
|
|||
|
|
@ -135,7 +135,8 @@ final class IvySbt(
|
|||
is
|
||||
}
|
||||
|
||||
/** Defines a parallel [[CachedResolutionResolveEngine]].
|
||||
/**
|
||||
* Defines a parallel [[CachedResolutionResolveEngine]].
|
||||
*
|
||||
* This is defined here because it needs access to [[mkIvy]].
|
||||
*/
|
||||
|
|
@ -154,8 +155,10 @@ final class IvySbt(
|
|||
}
|
||||
}
|
||||
|
||||
/** Provides a default ivy implementation that decides which resolution
|
||||
* engine to use depending on the passed ivy configuration options. */
|
||||
/**
|
||||
* Provides a default ivy implementation that decides which resolution
|
||||
* engine to use depending on the passed ivy configuration options.
|
||||
*/
|
||||
private class IvyImplementation extends Ivy {
|
||||
private val loggerEngine = new SbtMessageLoggerEngine
|
||||
override def getLoggerEngine: SbtMessageLoggerEngine = loggerEngine
|
||||
|
|
@ -195,7 +198,7 @@ final class IvySbt(
|
|||
|
||||
// ========== End Configuration/Setup ============
|
||||
|
||||
/** Uses the configured Ivy instance within a safe context.*/
|
||||
/** Uses the configured Ivy instance within a safe context. */
|
||||
def withIvy[T](log: Logger)(f: Ivy => T): T =
|
||||
withIvy(new IvyLoggerInterface(log))(f)
|
||||
|
||||
|
|
@ -333,7 +336,7 @@ final class IvySbt(
|
|||
mod
|
||||
}
|
||||
|
||||
/** Parses the Maven pom 'pomFile' from the given `PomConfiguration`.*/
|
||||
/** Parses the Maven pom 'pomFile' from the given `PomConfiguration`. */
|
||||
private def configurePom(pc: PomConfiguration) = {
|
||||
val md = CustomPomParser.default.parseDescriptor(settings, toURL(pc.file), pc.validate)
|
||||
val dmd = IvySbt.toDefaultModuleDescriptor(md)
|
||||
|
|
@ -347,7 +350,7 @@ final class IvySbt(
|
|||
(dmd, defaultConf)
|
||||
}
|
||||
|
||||
/** Parses the Ivy file 'ivyFile' from the given `IvyFileConfiguration`.*/
|
||||
/** Parses the Ivy file 'ivyFile' from the given `IvyFileConfiguration`. */
|
||||
private def configureIvyFile(ifc: IvyFileConfiguration) = {
|
||||
val parser = new CustomXmlParser.CustomParser(settings, None)
|
||||
parser.setValidate(ifc.validate)
|
||||
|
|
@ -703,7 +706,7 @@ private[sbt] object IvySbt {
|
|||
moduleID.addConflictManager(mid, matcher, manager)
|
||||
}
|
||||
|
||||
/** Converts the given sbt module id into an Ivy ModuleRevisionId.*/
|
||||
/** Converts the given sbt module id into an Ivy ModuleRevisionId. */
|
||||
def toID(m: ModuleID) = {
|
||||
import m._
|
||||
ModuleRevisionId.newInstance(
|
||||
|
|
@ -783,7 +786,8 @@ private[sbt] object IvySbt {
|
|||
}
|
||||
private[sbt] def javaMap(m: Map[String, String], unqualify: Boolean = false) = {
|
||||
import scala.collection.JavaConverters._
|
||||
val map = if (unqualify) m map { case (k, v) => (k.stripPrefix("e:"), v) } else m
|
||||
val map = if (unqualify) m map { case (k, v) => (k.stripPrefix("e:"), v) }
|
||||
else m
|
||||
if (map.isEmpty) null else map.asJava
|
||||
}
|
||||
|
||||
|
|
@ -814,8 +818,8 @@ private[sbt] object IvySbt {
|
|||
elem: scala.xml.Elem,
|
||||
extra: Map[String, String]
|
||||
): scala.xml.Elem =
|
||||
extra.foldLeft(elem) {
|
||||
case (e, (key, value)) => e % new scala.xml.UnprefixedAttribute(key, value, scala.xml.Null)
|
||||
extra.foldLeft(elem) { case (e, (key, value)) =>
|
||||
e % new scala.xml.UnprefixedAttribute(key, value, scala.xml.Null)
|
||||
}
|
||||
private def hasInfo(module: ModuleID, x: scala.xml.NodeSeq) = {
|
||||
val info = <g>{x}</g> \ "info"
|
||||
|
|
@ -943,7 +947,7 @@ private[sbt] object IvySbt {
|
|||
}
|
||||
}
|
||||
|
||||
/** Transforms an sbt ModuleID into an Ivy DefaultDependencyDescriptor.*/
|
||||
/** Transforms an sbt ModuleID into an Ivy DefaultDependencyDescriptor. */
|
||||
def convertDependency(
|
||||
moduleID: DefaultModuleDescriptor,
|
||||
dependency: ModuleID,
|
||||
|
|
@ -961,7 +965,9 @@ private[sbt] object IvySbt {
|
|||
dependency.configurations match {
|
||||
case None => // The configuration for this dependency was not explicitly specified, so use the default
|
||||
parser.parseDepsConfs(parser.getDefaultConf, dependencyDescriptor)
|
||||
case Some(confs) => // The configuration mapping (looks like: test->default) was specified for this dependency
|
||||
case Some(
|
||||
confs
|
||||
) => // The configuration mapping (looks like: test->default) was specified for this dependency
|
||||
parser.parseDepsConfs(confs, dependencyDescriptor)
|
||||
}
|
||||
for (artifact <- dependency.explicitArtifacts) {
|
||||
|
|
|
|||
|
|
@ -29,7 +29,7 @@ import sbt.internal.librarymanagement.IvyUtil.TransientNetworkException
|
|||
|
||||
object IvyActions {
|
||||
|
||||
/** Installs the dependencies of the given 'module' from the resolver named 'from' to the resolver named 'to'.*/
|
||||
/** Installs the dependencies of the given 'module' from the resolver named 'from' to the resolver named 'to'. */
|
||||
def install(module: IvySbt#Module, from: String, to: String, log: Logger): Unit = {
|
||||
module.withModule(log) { (ivy, md, _) =>
|
||||
for (dependency <- md.getDependencies) {
|
||||
|
|
@ -57,7 +57,7 @@ object IvyActions {
|
|||
module.owner.cleanCachedResolutionCache()
|
||||
}
|
||||
|
||||
/** Creates a Maven pom from the given Ivy configuration*/
|
||||
/** Creates a Maven pom from the given Ivy configuration */
|
||||
def makePomFile(module: IvySbt#Module, configuration: MakePomConfiguration, log: Logger): File = {
|
||||
import configuration.{
|
||||
allRepositories,
|
||||
|
|
@ -91,13 +91,12 @@ object IvyActions {
|
|||
val deliverIvyPattern = configuration.deliverIvyPattern
|
||||
.getOrElse(sys.error("deliverIvyPattern must be specified."))
|
||||
val status = getDeliverStatus(configuration.status)
|
||||
module.withModule(log) {
|
||||
case (ivy, md, _) =>
|
||||
val revID = md.getModuleRevisionId
|
||||
val options = DeliverOptions.newInstance(ivy.getSettings).setStatus(status)
|
||||
options.setConfs(getConfigurations(md, configuration.configurations))
|
||||
ivy.deliver(revID, revID.getRevision, deliverIvyPattern, options)
|
||||
deliveredFile(ivy, deliverIvyPattern, md)
|
||||
module.withModule(log) { case (ivy, md, _) =>
|
||||
val revID = md.getModuleRevisionId
|
||||
val options = DeliverOptions.newInstance(ivy.getSettings).setStatus(status)
|
||||
options.setConfs(getConfigurations(md, configuration.configurations))
|
||||
ivy.deliver(revID, revID.getRevision, deliverIvyPattern, options)
|
||||
deliveredFile(ivy, deliverIvyPattern, md)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -130,18 +129,17 @@ object IvyActions {
|
|||
|
||||
val artifacts = Map(configuration.artifacts: _*)
|
||||
val checksums = configuration.checksums
|
||||
module.withModule(log) {
|
||||
case (ivy, md, _) =>
|
||||
val resolver = ivy.getSettings.getResolver(resolverName)
|
||||
if (resolver eq null) sys.error("Undefined resolver '" + resolverName + "'")
|
||||
val ivyArtifact = ivyFile map { file =>
|
||||
(MDArtifact.newIvyArtifact(md), file)
|
||||
}
|
||||
val cross = crossVersionMap(module.moduleSettings)
|
||||
val as = mapArtifacts(md, cross, artifacts) ++ ivyArtifact.toList
|
||||
withChecksums(resolver, checksums) {
|
||||
publish(md, as, resolver, overwrite = configuration.overwrite)
|
||||
}
|
||||
module.withModule(log) { case (ivy, md, _) =>
|
||||
val resolver = ivy.getSettings.getResolver(resolverName)
|
||||
if (resolver eq null) sys.error("Undefined resolver '" + resolverName + "'")
|
||||
val ivyArtifact = ivyFile map { file =>
|
||||
(MDArtifact.newIvyArtifact(md), file)
|
||||
}
|
||||
val cross = crossVersionMap(module.moduleSettings)
|
||||
val as = mapArtifacts(md, cross, artifacts) ++ ivyArtifact.toList
|
||||
withChecksums(resolver, checksums) {
|
||||
publish(md, as, resolver, overwrite = configuration.overwrite)
|
||||
}
|
||||
}
|
||||
}
|
||||
private[this] def withChecksums[T](resolver: DependencyResolver, checksums: Vector[String])(
|
||||
|
|
@ -193,35 +191,36 @@ object IvyActions {
|
|||
uwconfig: UnresolvedWarningConfiguration,
|
||||
log: Logger
|
||||
): Either[UnresolvedWarning, UpdateReport] = {
|
||||
module.withModule(log) {
|
||||
case (ivy, moduleDescriptor, _) =>
|
||||
// Warn about duplicated and inconsistent dependencies
|
||||
val iw = IvySbt.inconsistentDuplicateWarning(moduleDescriptor)
|
||||
iw.foreach(log.warn(_))
|
||||
module.withModule(log) { case (ivy, moduleDescriptor, _) =>
|
||||
// Warn about duplicated and inconsistent dependencies
|
||||
val iw = IvySbt.inconsistentDuplicateWarning(moduleDescriptor)
|
||||
iw.foreach(log.warn(_))
|
||||
|
||||
val metadataDirectory = configuration.metadataDirectory
|
||||
val metadataDirectory = configuration.metadataDirectory
|
||||
|
||||
// Create inputs, resolve and retrieve the module descriptor
|
||||
val inputs = ResolutionInputs(ivy, moduleDescriptor, configuration, log)
|
||||
val resolutionResult: Either[ResolveException, UpdateReport] = {
|
||||
if (module.owner.configuration.updateOptions.cachedResolution && metadataDirectory.isDefined) {
|
||||
val cache =
|
||||
metadataDirectory.getOrElse(sys.error("Missing directory for cached resolution."))
|
||||
cachedResolveAndRetrieve(inputs, cache)
|
||||
} else resolveAndRetrieve(inputs)
|
||||
}
|
||||
// Create inputs, resolve and retrieve the module descriptor
|
||||
val inputs = ResolutionInputs(ivy, moduleDescriptor, configuration, log)
|
||||
val resolutionResult: Either[ResolveException, UpdateReport] = {
|
||||
if (
|
||||
module.owner.configuration.updateOptions.cachedResolution && metadataDirectory.isDefined
|
||||
) {
|
||||
val cache =
|
||||
metadataDirectory.getOrElse(sys.error("Missing directory for cached resolution."))
|
||||
cachedResolveAndRetrieve(inputs, cache)
|
||||
} else resolveAndRetrieve(inputs)
|
||||
}
|
||||
|
||||
// Convert to unresolved warning or retrieve update report
|
||||
resolutionResult.fold(
|
||||
exception => Left(UnresolvedWarning(exception, uwconfig)),
|
||||
ur0 => {
|
||||
val ur = configuration.retrieveManaged match {
|
||||
case Some(retrieveConf) => retrieve(log, ivy, ur0, retrieveConf)
|
||||
case _ => ur0
|
||||
}
|
||||
Right(ur)
|
||||
// Convert to unresolved warning or retrieve update report
|
||||
resolutionResult.fold(
|
||||
exception => Left(UnresolvedWarning(exception, uwconfig)),
|
||||
ur0 => {
|
||||
val ur = configuration.retrieveManaged match {
|
||||
case Some(retrieveConf) => retrieve(log, ivy, ur0, retrieveConf)
|
||||
case _ => ur0
|
||||
}
|
||||
)
|
||||
Right(ur)
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -252,11 +251,10 @@ object IvyActions {
|
|||
exclude.getOrElse(restrictedCopy(id, false), Set.empty[String])
|
||||
|
||||
def extractExcludes(report: UpdateReport): Map[ModuleID, Set[String]] =
|
||||
report.allMissing flatMap {
|
||||
case (_, mod, art) =>
|
||||
art.classifier.map { c =>
|
||||
(restrictedCopy(mod, false), c)
|
||||
}
|
||||
report.allMissing flatMap { case (_, mod, art) =>
|
||||
art.classifier.map { c =>
|
||||
(restrictedCopy(mod, false), c)
|
||||
}
|
||||
} groupBy (_._1) map { case (mod, pairs) => (mod, pairs.map(_._2).toSet) }
|
||||
|
||||
/**
|
||||
|
|
@ -275,8 +273,8 @@ object IvyActions {
|
|||
)
|
||||
|
||||
implicit def toIvyFilter(f: ArtifactTypeFilter): IvyFilter = new IvyFilter {
|
||||
override def accept(o: Object): Boolean = Option(o) exists {
|
||||
case a: IArtifact => applyFilter(a)
|
||||
override def accept(o: Object): Boolean = Option(o) exists { case a: IArtifact =>
|
||||
applyFilter(a)
|
||||
}
|
||||
|
||||
def applyFilter(a: IArtifact): Boolean =
|
||||
|
|
@ -498,13 +496,12 @@ object IvyActions {
|
|||
checkFilesPresent(artifacts)
|
||||
try {
|
||||
resolver.beginPublishTransaction(module.getModuleRevisionId(), overwrite);
|
||||
artifacts.foreach {
|
||||
case (artifact, file) =>
|
||||
IvyUtil.retryWithBackoff(
|
||||
resolver.publish(artifact, file, overwrite),
|
||||
TransientNetworkException.apply,
|
||||
maxAttempts = LMSysProp.maxPublishAttempts
|
||||
)
|
||||
artifacts.foreach { case (artifact, file) =>
|
||||
IvyUtil.retryWithBackoff(
|
||||
resolver.publish(artifact, file, overwrite),
|
||||
TransientNetworkException.apply,
|
||||
maxAttempts = LMSysProp.maxPublishAttempts
|
||||
)
|
||||
}
|
||||
resolver.commitPublishTransaction()
|
||||
} catch {
|
||||
|
|
|
|||
|
|
@ -31,11 +31,11 @@ private object NotInCache {
|
|||
}
|
||||
}
|
||||
|
||||
/** Provides methods for working at the level of a single jar file with the default Ivy cache.*/
|
||||
/** Provides methods for working at the level of a single jar file with the default Ivy cache. */
|
||||
class IvyCache(val ivyHome: Option[File]) {
|
||||
def lockFile = new File(ivyHome getOrElse Path.userHome, ".sbt.cache.lock")
|
||||
|
||||
/** Caches the given 'file' with the given ID. It may be retrieved or cleared using this ID.*/
|
||||
/** Caches the given 'file' with the given ID. It may be retrieved or cleared using this ID. */
|
||||
def cacheJar(
|
||||
moduleID: ModuleID,
|
||||
file: File,
|
||||
|
|
@ -52,7 +52,7 @@ class IvyCache(val ivyHome: Option[File]) {
|
|||
}
|
||||
}
|
||||
|
||||
/** Clears the cache of the jar for the given ID.*/
|
||||
/** Clears the cache of the jar for the given ID. */
|
||||
def clearCachedJar(id: ModuleID, lock: Option[xsbti.GlobalLock], log: Logger): Unit = {
|
||||
try {
|
||||
withCachedJar(id, lock, log)(_.delete); ()
|
||||
|
|
@ -61,7 +61,7 @@ class IvyCache(val ivyHome: Option[File]) {
|
|||
}
|
||||
}
|
||||
|
||||
/** Copies the cached jar for the given ID to the directory 'toDirectory'. If the jar is not in the cache, NotInCache is thrown.*/
|
||||
/** Copies the cached jar for the given ID to the directory 'toDirectory'. If the jar is not in the cache, NotInCache is thrown. */
|
||||
def retrieveCachedJar(
|
||||
id: ModuleID,
|
||||
toDirectory: File,
|
||||
|
|
@ -74,7 +74,7 @@ class IvyCache(val ivyHome: Option[File]) {
|
|||
copyTo
|
||||
}
|
||||
|
||||
/** Get the location of the cached jar for the given ID in the Ivy cache. If the jar is not in the cache, NotInCache is thrown .*/
|
||||
/** Get the location of the cached jar for the given ID in the Ivy cache. If the jar is not in the cache, NotInCache is thrown . */
|
||||
def withCachedJar[T](id: ModuleID, lock: Option[xsbti.GlobalLock], log: Logger)(
|
||||
f: File => T
|
||||
): T = {
|
||||
|
|
@ -89,7 +89,7 @@ class IvyCache(val ivyHome: Option[File]) {
|
|||
if (cachedFile.exists) f(cachedFile) else throw new NotInCache(id)
|
||||
}
|
||||
|
||||
/** Calls the given function with the default Ivy cache.*/
|
||||
/** Calls the given function with the default Ivy cache. */
|
||||
def withDefaultCache[T](lock: Option[xsbti.GlobalLock], log: Logger)(
|
||||
f: DefaultRepositoryCacheManager => T
|
||||
): T = {
|
||||
|
|
@ -103,7 +103,7 @@ class IvyCache(val ivyHome: Option[File]) {
|
|||
}
|
||||
private def unknownOrigin(artifact: IvyArtifact) = ArtifactOrigin.unkwnown(artifact)
|
||||
|
||||
/** A minimal Ivy setup with only a local resolver and the current directory as the base directory.*/
|
||||
/** A minimal Ivy setup with only a local resolver and the current directory as the base directory. */
|
||||
private def basicLocalIvy(lock: Option[xsbti.GlobalLock], log: Logger) = {
|
||||
val local = Resolver.defaultLocal
|
||||
val paths = IvyPaths(new File("."), ivyHome)
|
||||
|
|
@ -115,12 +115,12 @@ class IvyCache(val ivyHome: Option[File]) {
|
|||
(new IvySbt(conf), local)
|
||||
}
|
||||
|
||||
/** Creates a default jar artifact based on the given ID.*/
|
||||
/** Creates a default jar artifact based on the given ID. */
|
||||
private def defaultArtifact(moduleID: ModuleID): IvyArtifact =
|
||||
new DefaultArtifact(IvySbt.toID(moduleID), null, moduleID.name, "jar", "jar")
|
||||
}
|
||||
|
||||
/** Required by Ivy for copying to the cache.*/
|
||||
/** Required by Ivy for copying to the cache. */
|
||||
private class FileDownloader extends ResourceDownloader {
|
||||
def download(artifact: IvyArtifact, resource: Resource, dest: File): Unit = {
|
||||
if (dest.exists()) dest.delete()
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ private[sbt] final class IvyLoggerInterface(logger: Logger) extends MessageLogge
|
|||
case MSG_ERR => error(msg)
|
||||
}
|
||||
}
|
||||
//DEBUG level messages are very verbose and rarely useful to users.
|
||||
// DEBUG level messages are very verbose and rarely useful to users.
|
||||
// TODO: provide access to this information some other way
|
||||
def debug(msg: String): Unit = ()
|
||||
def verbose(msg: String): Unit = logger.verbose(msg)
|
||||
|
|
|
|||
|
|
@ -54,14 +54,18 @@ object IvyRetrieve {
|
|||
private[sbt] def organizationArtifactReports(
|
||||
confReport: ConfigurationResolveReport
|
||||
): Vector[OrganizationArtifactReport] = {
|
||||
val moduleIds = confReport.getModuleIds.toArray.toVector collect {
|
||||
case mId: IvyModuleId => mId
|
||||
val moduleIds = confReport.getModuleIds.toArray.toVector collect { case mId: IvyModuleId =>
|
||||
mId
|
||||
}
|
||||
def organizationArtifact(mid: IvyModuleId): OrganizationArtifactReport = {
|
||||
val deps = confReport.getNodes(mid).toArray.toVector collect { case node: IvyNode => node }
|
||||
OrganizationArtifactReport(mid.getOrganisation, mid.getName, deps map {
|
||||
moduleRevisionDetail(confReport, _)
|
||||
})
|
||||
OrganizationArtifactReport(
|
||||
mid.getOrganisation,
|
||||
mid.getName,
|
||||
deps map {
|
||||
moduleRevisionDetail(confReport, _)
|
||||
}
|
||||
)
|
||||
}
|
||||
moduleIds map { organizationArtifact }
|
||||
}
|
||||
|
|
@ -141,9 +145,13 @@ object IvyRetrieve {
|
|||
val edOpt = Option(dep.getEvictedData(confReport.getConfiguration))
|
||||
edOpt match {
|
||||
case Some(ed) =>
|
||||
(true, nonEmptyString(Option(ed.getConflictManager) map { _.toString } getOrElse {
|
||||
"transitive"
|
||||
}), nonEmptyString(ed.getDetail))
|
||||
(
|
||||
true,
|
||||
nonEmptyString(Option(ed.getConflictManager) map { _.toString } getOrElse {
|
||||
"transitive"
|
||||
}),
|
||||
nonEmptyString(ed.getDetail)
|
||||
)
|
||||
case None => (true, None, None)
|
||||
}
|
||||
case _ => (false, None, None)
|
||||
|
|
|
|||
|
|
@ -67,9 +67,11 @@ object IvyScalaUtil {
|
|||
new NamespaceTransformer {
|
||||
def transform(mrid: ModuleRevisionId): ModuleRevisionId = {
|
||||
if (mrid == null) mrid
|
||||
else if ((isScala2Artifact(mrid.getName) || isScala3Artifact(mrid.getName)) &&
|
||||
configQualifies &&
|
||||
dependeeQualifies) {
|
||||
else if (
|
||||
(isScala2Artifact(mrid.getName) || isScala3Artifact(mrid.getName)) &&
|
||||
configQualifies &&
|
||||
dependeeQualifies
|
||||
) {
|
||||
// do not override the binary incompatible Scala version because:
|
||||
// - the artifacts compiled with Scala 3 depends on the Scala 2.13 scala-library
|
||||
// - the Scala 2 TASTy reader can consume the Scala 3 artifacts
|
||||
|
|
@ -152,7 +154,8 @@ object IvyScalaUtil {
|
|||
.forall(bv => bv.startsWith("3") || bv.startsWith("2.13"))
|
||||
|
||||
def matchesOneOfTheConfigs = dep.getModuleConfigurations exists { scalaVersionConfigs }
|
||||
val mismatched = isScalaLangOrg && isScalaArtifact && hasBinVerMismatch && matchesOneOfTheConfigs
|
||||
val mismatched =
|
||||
isScalaLangOrg && isScalaArtifact && hasBinVerMismatch && matchesOneOfTheConfigs
|
||||
if (mismatched)
|
||||
Some(
|
||||
"Binary version (" + depBinaryVersion + ") for dependency " + id +
|
||||
|
|
|
|||
|
|
@ -102,5 +102,6 @@ private[sbt] object ResolutionCache {
|
|||
private val Name = "sbt-resolution-cache"
|
||||
|
||||
// use sbt-specific extra attributes so that resolved xml files do not get overwritten when using different Scala/sbt versions
|
||||
private val ResolvedPattern = "[organisation]/[module]/" + Resolver.PluginPattern + "([branch]/)[revision]/[artifact].[ext]"
|
||||
private val ResolvedPattern =
|
||||
"[organisation]/[module]/" + Resolver.PluginPattern + "([branch]/)[revision]/[artifact].[ext]"
|
||||
}
|
||||
|
|
|
|||
|
|
@ -105,7 +105,7 @@ private[sbt] class CachedResolutionResolveCache {
|
|||
s"""Include(${rule.getId},${rule.getConfigurations.mkString(",")},${rule.getMatcher})"""
|
||||
def artifactString(dad: DependencyArtifactDescriptor): String =
|
||||
s"""Artifact(${dad.getName},${dad.getType},${dad.getExt},${dad.getUrl},${dad.getConfigurations
|
||||
.mkString(",")},${dad.getExtraAttributes})"""
|
||||
.mkString(",")},${dad.getExtraAttributes})"""
|
||||
val mrid = dd.getDependencyRevisionId
|
||||
val confMap = (dd.getModuleConfigurations map { conf =>
|
||||
conf + "->(" + dd.getDependencyConfigurations(conf).mkString(",") + ")"
|
||||
|
|
@ -127,9 +127,13 @@ private[sbt] class CachedResolutionResolveCache {
|
|||
val mesStr = (mes map excludeRuleString).mkString(",")
|
||||
val os = extractOverrides(parent)
|
||||
val moduleLevel = s"""dependencyOverrides=${os.mkString(",")};moduleExclusions=$mesStr"""
|
||||
val depsString = s"""$mrid;${confMap.mkString(",")};isForce=${dd.isForce};isChanging=${dd.isChanging};isTransitive=${dd.isTransitive};""" +
|
||||
s"""exclusions=${exclusions.mkString(",")};inclusions=${inclusions.mkString(",")};explicitArtifacts=${explicitArtifacts
|
||||
.mkString(",")};$moduleLevel;"""
|
||||
val depsString = s"""$mrid;${confMap.mkString(
|
||||
","
|
||||
)};isForce=${dd.isForce};isChanging=${dd.isChanging};isTransitive=${dd.isTransitive};""" +
|
||||
s"""exclusions=${exclusions.mkString(",")};inclusions=${inclusions.mkString(
|
||||
","
|
||||
)};explicitArtifacts=${explicitArtifacts
|
||||
.mkString(",")};$moduleLevel;"""
|
||||
val sha1 = Hash.toHex(
|
||||
Hash(s"""graphVersion=${CachedResolutionResolveCache.graphVersion};$depsString""")
|
||||
)
|
||||
|
|
@ -158,15 +162,14 @@ private[sbt] class CachedResolutionResolveCache {
|
|||
md0.getAllDependencyDescriptorMediators.getAllRules.asScala.toSeq.toVector sortBy {
|
||||
case (k, _) =>
|
||||
k.toString
|
||||
} collect {
|
||||
case (k: MapMatcher, v: OverrideDependencyDescriptorMediator) =>
|
||||
val attr: Map[Any, Any] = k.getAttributes.asScala.toMap
|
||||
val module = IvyModuleId.newInstance(
|
||||
attr(IvyPatternHelper.ORGANISATION_KEY).toString,
|
||||
attr(IvyPatternHelper.MODULE_KEY).toString
|
||||
)
|
||||
val pm = k.getPatternMatcher
|
||||
IvyOverride(module, pm, v)
|
||||
} collect { case (k: MapMatcher, v: OverrideDependencyDescriptorMediator) =>
|
||||
val attr: Map[Any, Any] = k.getAttributes.asScala.toMap
|
||||
val module = IvyModuleId.newInstance(
|
||||
attr(IvyPatternHelper.ORGANISATION_KEY).toString,
|
||||
attr(IvyPatternHelper.MODULE_KEY).toString
|
||||
)
|
||||
val pm = k.getPatternMatcher
|
||||
IvyOverride(module, pm, v)
|
||||
}
|
||||
}
|
||||
def getOrElseUpdateMiniGraph(
|
||||
|
|
@ -200,8 +203,10 @@ private[sbt] class CachedResolutionResolveCache {
|
|||
}
|
||||
val staticGraphDirectory = miniGraphPath / "static"
|
||||
val dynamicGraphDirectory = miniGraphPath / "dynamic"
|
||||
val staticGraphPath = staticGraphDirectory / pathScalaVersion / pathSbtVersion / pathOrg / pathName / pathRevision / "graphs" / "graph.json"
|
||||
val dynamicGraphPath = dynamicGraphDirectory / todayStr / logicalClock.toString / pathScalaVersion / pathSbtVersion / pathOrg / pathName / pathRevision / "graphs" / "graph.json"
|
||||
val staticGraphPath =
|
||||
staticGraphDirectory / pathScalaVersion / pathSbtVersion / pathOrg / pathName / pathRevision / "graphs" / "graph.json"
|
||||
val dynamicGraphPath =
|
||||
dynamicGraphDirectory / todayStr / logicalClock.toString / pathScalaVersion / pathSbtVersion / pathOrg / pathName / pathRevision / "graphs" / "graph.json"
|
||||
def cleanDynamicGraph(): Unit = {
|
||||
val list = IO.listFiles(dynamicGraphDirectory, DirectoryFilter).toList
|
||||
list filterNot { d =>
|
||||
|
|
@ -282,9 +287,12 @@ private[sbt] class CachedResolutionResolveCache {
|
|||
val moduleIdMap = Map(conflicts map { x =>
|
||||
x.module -> x
|
||||
}: _*)
|
||||
(surviving map moduleIdMap, evicted map moduleIdMap map {
|
||||
_.withEvicted(true).withEvictedReason(Some(mgr.toString))
|
||||
})
|
||||
(
|
||||
surviving map moduleIdMap,
|
||||
evicted map moduleIdMap map {
|
||||
_.withEvicted(true).withEvictedReason(Some(mgr.toString))
|
||||
}
|
||||
)
|
||||
}
|
||||
(conflictCache get ((cf0, cf1))) match {
|
||||
case Some((surviving, evicted, mgr)) => reconstructReports(surviving, evicted, mgr)
|
||||
|
|
@ -410,59 +418,58 @@ private[sbt] trait CachedResolutionResolveEngine extends ResolveEngine {
|
|||
Left(new ResolveException(messages, failed, failedPaths))
|
||||
}
|
||||
}
|
||||
val (internal, external) = mds.partition {
|
||||
case (_, _, dd) => cache.internalDependency(dd, projectResolver).isDefined
|
||||
val (internal, external) = mds.partition { case (_, _, dd) =>
|
||||
cache.internalDependency(dd, projectResolver).isDefined
|
||||
}
|
||||
val internalResults = internal map {
|
||||
case (md, changing, dd) =>
|
||||
cache.getOrElseUpdateMiniGraph(
|
||||
md,
|
||||
changing,
|
||||
logicalClock,
|
||||
miniGraphPath,
|
||||
cachedDescriptor,
|
||||
log
|
||||
) {
|
||||
doWork(md, dd)
|
||||
}
|
||||
val internalResults = internal map { case (md, changing, dd) =>
|
||||
cache.getOrElseUpdateMiniGraph(
|
||||
md,
|
||||
changing,
|
||||
logicalClock,
|
||||
miniGraphPath,
|
||||
cachedDescriptor,
|
||||
log
|
||||
) {
|
||||
doWork(md, dd)
|
||||
}
|
||||
}
|
||||
val externalResults = external map {
|
||||
case (md0, changing, dd) =>
|
||||
val configurationsInInternal = internalResults flatMap {
|
||||
case Right(ur) =>
|
||||
ur.allModules.flatMap {
|
||||
case md =>
|
||||
val sameName = md.name == dd.getDependencyId.getName
|
||||
val sameOrg = md.organization == dd.getDependencyId.getOrganisation
|
||||
if (sameName && sameOrg) md.configurations
|
||||
else None
|
||||
val externalResults = external map { case (md0, changing, dd) =>
|
||||
val configurationsInInternal = internalResults flatMap {
|
||||
case Right(ur) =>
|
||||
ur.allModules.flatMap { case md =>
|
||||
val sameName = md.name == dd.getDependencyId.getName
|
||||
val sameOrg = md.organization == dd.getDependencyId.getOrganisation
|
||||
if (sameName && sameOrg) md.configurations
|
||||
else None
|
||||
}
|
||||
case _ => Nil
|
||||
}
|
||||
|
||||
dd match {
|
||||
case d: DefaultDependencyDescriptor =>
|
||||
configurationsInInternal foreach { c =>
|
||||
val configurations = c.split(";").map(_.split("->"))
|
||||
configurations foreach { conf =>
|
||||
try d.addDependencyConfiguration(conf(0), conf(1))
|
||||
catch {
|
||||
case _: Throwable => ()
|
||||
} // An exception will be thrown if `conf(0)` doesn't exist.
|
||||
}
|
||||
case _ => Nil
|
||||
}
|
||||
}
|
||||
|
||||
dd match {
|
||||
case d: DefaultDependencyDescriptor =>
|
||||
configurationsInInternal foreach { c =>
|
||||
val configurations = c.split(";").map(_.split("->"))
|
||||
configurations foreach { conf =>
|
||||
try d.addDependencyConfiguration(conf(0), conf(1))
|
||||
catch { case _: Throwable => () } // An exception will be thrown if `conf(0)` doesn't exist.
|
||||
}
|
||||
}
|
||||
case _ => ()
|
||||
}
|
||||
|
||||
case _ => ()
|
||||
}
|
||||
|
||||
cache.getOrElseUpdateMiniGraph(
|
||||
md0,
|
||||
changing,
|
||||
logicalClock,
|
||||
miniGraphPath,
|
||||
cachedDescriptor,
|
||||
log
|
||||
) {
|
||||
doWork(md0, dd)
|
||||
}
|
||||
cache.getOrElseUpdateMiniGraph(
|
||||
md0,
|
||||
changing,
|
||||
logicalClock,
|
||||
miniGraphPath,
|
||||
cachedDescriptor,
|
||||
log
|
||||
) {
|
||||
doWork(md0, dd)
|
||||
}
|
||||
}
|
||||
val results = internalResults ++ externalResults
|
||||
val uReport =
|
||||
|
|
@ -485,21 +492,20 @@ private[sbt] trait CachedResolutionResolveEngine extends ResolveEngine {
|
|||
log: Logger
|
||||
): Either[ResolveException, UpdateReport] =
|
||||
if (!missingOk && (results exists { _.isLeft }))
|
||||
Left(mergeErrors(md0, results collect { case Left(re) => re }))
|
||||
Left(mergeErrors(md0, results collect { case Left(re) => re }))
|
||||
else Right(mergeReports(md0, results collect { case Right(ur) => ur }, resolveTime, os, log))
|
||||
|
||||
def mergeErrors(md0: ModuleDescriptor, errors: Vector[ResolveException]): ResolveException = {
|
||||
val messages = errors flatMap { _.messages }
|
||||
val failed = errors flatMap { _.failed }
|
||||
val failedPaths = errors flatMap {
|
||||
_.failedPaths.toList map {
|
||||
case (failed, paths) =>
|
||||
if (paths.isEmpty) (failed, paths)
|
||||
else
|
||||
(
|
||||
failed,
|
||||
List(IvyRetrieve.toModuleID(md0.getResolvedModuleRevisionId)) ::: paths.toList.tail
|
||||
)
|
||||
_.failedPaths.toList map { case (failed, paths) =>
|
||||
if (paths.isEmpty) (failed, paths)
|
||||
else
|
||||
(
|
||||
failed,
|
||||
List(IvyRetrieve.toModuleID(md0.getResolvedModuleRevisionId)) ::: paths.toList.tail
|
||||
)
|
||||
}
|
||||
}
|
||||
new ResolveException(messages, failed, ListMap(failedPaths: _*))
|
||||
|
|
@ -579,12 +585,11 @@ private[sbt] trait CachedResolutionResolveEngine extends ResolveEngine {
|
|||
// this might take up some memory, but it's limited to a single
|
||||
val reports1 = reports0 flatMap { filterReports }
|
||||
val allModules0: Map[(String, String), Vector[OrganizationArtifactReport]] =
|
||||
Map(orgNamePairs map {
|
||||
case (organization, name) =>
|
||||
val xs = reports1 filter { oar =>
|
||||
oar.organization == organization && oar.name == name
|
||||
}
|
||||
((organization, name), xs)
|
||||
Map(orgNamePairs map { case (organization, name) =>
|
||||
val xs = reports1 filter { oar =>
|
||||
oar.organization == organization && oar.name == name
|
||||
}
|
||||
((organization, name), xs)
|
||||
}: _*)
|
||||
// this returns a List of Lists of (org, name). should be deterministic
|
||||
def detectLoops(
|
||||
|
|
@ -766,8 +771,8 @@ private[sbt] trait CachedResolutionResolveEngine extends ResolveEngine {
|
|||
val completelyEvicted = xs forall { _.evicted }
|
||||
val allCallers = xs flatMap { _.callers }
|
||||
// Caller info is often repeated across the subprojects. We only need ModuleID info for later, so xs.head is ok.
|
||||
val distinctByModuleId = allCallers.groupBy({ _.caller }).toVector map {
|
||||
case (_, xs) => xs.head
|
||||
val distinctByModuleId = allCallers.groupBy({ _.caller }).toVector map { case (_, xs) =>
|
||||
xs.head
|
||||
}
|
||||
val allArtifacts = (xs flatMap { _.artifacts }).distinct
|
||||
xs.head
|
||||
|
|
@ -777,10 +782,9 @@ private[sbt] trait CachedResolutionResolveEngine extends ResolveEngine {
|
|||
}
|
||||
val merged = (modules groupBy { m =>
|
||||
(m.module.organization, m.module.name, m.module.revision)
|
||||
}).toSeq.toVector flatMap {
|
||||
case (_, xs) =>
|
||||
if (xs.size < 2) xs
|
||||
else Vector(mergeModuleReports(xs))
|
||||
}).toSeq.toVector flatMap { case (_, xs) =>
|
||||
if (xs.size < 2) xs
|
||||
else Vector(mergeModuleReports(xs))
|
||||
}
|
||||
val conflicts = merged filter { m =>
|
||||
!m.evicted && m.problem.isEmpty
|
||||
|
|
@ -789,9 +793,12 @@ private[sbt] trait CachedResolutionResolveEngine extends ResolveEngine {
|
|||
else
|
||||
resolveConflict(rootModuleConf, conflicts, os, log) match {
|
||||
case (survivor, evicted) =>
|
||||
(survivor ++ (merged filter { m =>
|
||||
m.evicted || m.problem.isDefined
|
||||
}), evicted)
|
||||
(
|
||||
survivor ++ (merged filter { m =>
|
||||
m.evicted || m.problem.isDefined
|
||||
}),
|
||||
evicted
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -869,9 +876,13 @@ private[sbt] trait CachedResolutionResolveEngine extends ResolveEngine {
|
|||
}) match {
|
||||
case Some(m) =>
|
||||
log.debug(s"- directly forced dependency: $m ${m.callers}")
|
||||
(Vector(m), conflicts filterNot { _ == m } map {
|
||||
_.withEvicted(true).withEvictedReason(Some("direct-force"))
|
||||
}, "direct-force")
|
||||
(
|
||||
Vector(m),
|
||||
conflicts filterNot { _ == m } map {
|
||||
_.withEvicted(true).withEvictedReason(Some("direct-force"))
|
||||
},
|
||||
"direct-force"
|
||||
)
|
||||
case None =>
|
||||
(conflicts find { m =>
|
||||
m.callers.exists { _.isForceDependency }
|
||||
|
|
@ -879,18 +890,26 @@ private[sbt] trait CachedResolutionResolveEngine extends ResolveEngine {
|
|||
// Ivy translates pom.xml dependencies to forced="true", so transitive force is broken.
|
||||
case Some(m) if !ignoreTransitiveForce =>
|
||||
log.debug(s"- transitively forced dependency: $m ${m.callers}")
|
||||
(Vector(m), conflicts filterNot { _ == m } map {
|
||||
_.withEvicted(true).withEvictedReason(Some("transitive-force"))
|
||||
}, "transitive-force")
|
||||
(
|
||||
Vector(m),
|
||||
conflicts filterNot { _ == m } map {
|
||||
_.withEvicted(true).withEvictedReason(Some("transitive-force"))
|
||||
},
|
||||
"transitive-force"
|
||||
)
|
||||
case _ =>
|
||||
val strategy = lcm.getStrategy
|
||||
val infos = conflicts map { ModuleReportArtifactInfo(_) }
|
||||
log.debug(s"- Using $strategy with $infos")
|
||||
Option(strategy.findLatest(infos.toArray, None.orNull)) match {
|
||||
case Some(ModuleReportArtifactInfo(m)) =>
|
||||
(Vector(m), conflicts filterNot { _ == m } map {
|
||||
_.withEvicted(true).withEvictedReason(Some(lcm.toString))
|
||||
}, lcm.toString)
|
||||
(
|
||||
Vector(m),
|
||||
conflicts filterNot { _ == m } map {
|
||||
_.withEvicted(true).withEvictedReason(Some(lcm.toString))
|
||||
},
|
||||
lcm.toString
|
||||
)
|
||||
case _ => (conflicts, Vector(), lcm.toString)
|
||||
}
|
||||
}
|
||||
|
|
@ -905,9 +924,13 @@ private[sbt] trait CachedResolutionResolveEngine extends ResolveEngine {
|
|||
mr.module.revision == ovrVersion
|
||||
} match {
|
||||
case Some(m) =>
|
||||
(Vector(m), conflicts filterNot { _ == m } map {
|
||||
_.withEvicted(true).withEvictedReason(Some("override"))
|
||||
}, "override")
|
||||
(
|
||||
Vector(m),
|
||||
conflicts filterNot { _ == m } map {
|
||||
_.withEvicted(true).withEvictedReason(Some("override"))
|
||||
},
|
||||
"override"
|
||||
)
|
||||
case None =>
|
||||
sys.error(
|
||||
s"override dependency specifies $ovrVersion but no candidates were found: " + (conflicts map {
|
||||
|
|
@ -925,7 +948,7 @@ private[sbt] trait CachedResolutionResolveEngine extends ResolveEngine {
|
|||
}).mkString("(", ", ", ")"))
|
||||
)
|
||||
case lcm: LatestConflictManager => useLatest(lcm)
|
||||
case conflictManager => sys.error(s"Unsupported conflict manager $conflictManager")
|
||||
case conflictManager => sys.error(s"Unsupported conflict manager $conflictManager")
|
||||
}
|
||||
}
|
||||
if (conflicts.size == 2 && os.isEmpty) {
|
||||
|
|
|
|||
|
|
@ -64,10 +64,14 @@ object ErrorMessageAuthenticator {
|
|||
ivyOriginalField.set(ivy, newOriginal)
|
||||
}
|
||||
|
||||
try Option(ivyOriginalField.get(ivy).asInstanceOf[Authenticator]) match {
|
||||
case Some(_: ErrorMessageAuthenticator) => // We're already installed, no need to do the work again.
|
||||
case originalOpt => installIntoIvyImpl(originalOpt)
|
||||
} catch {
|
||||
try
|
||||
Option(ivyOriginalField.get(ivy).asInstanceOf[Authenticator]) match {
|
||||
case Some(
|
||||
_: ErrorMessageAuthenticator
|
||||
) => // We're already installed, no need to do the work again.
|
||||
case originalOpt => installIntoIvyImpl(originalOpt)
|
||||
}
|
||||
catch {
|
||||
case t: Throwable =>
|
||||
Message.debug(
|
||||
"Error occurred while trying to install debug messages into Ivy Authentication" + t.getMessage
|
||||
|
|
@ -135,16 +139,17 @@ private[sbt] final class ErrorMessageAuthenticator(original: Option[Authenticato
|
|||
// Grabs the authentication that would have been provided had we not been installed...
|
||||
def originalAuthentication: Option[PasswordAuthentication] = {
|
||||
Authenticator.setDefault(original.orNull)
|
||||
try Option(
|
||||
Authenticator.requestPasswordAuthentication(
|
||||
getRequestingHost,
|
||||
getRequestingSite,
|
||||
getRequestingPort,
|
||||
getRequestingProtocol,
|
||||
getRequestingPrompt,
|
||||
getRequestingScheme
|
||||
try
|
||||
Option(
|
||||
Authenticator.requestPasswordAuthentication(
|
||||
getRequestingHost,
|
||||
getRequestingSite,
|
||||
getRequestingPort,
|
||||
getRequestingProtocol,
|
||||
getRequestingPrompt,
|
||||
getRequestingScheme
|
||||
)
|
||||
)
|
||||
)
|
||||
finally Authenticator.setDefault(this)
|
||||
}
|
||||
originalAuthentication.orNull
|
||||
|
|
|
|||
|
|
@ -56,8 +56,8 @@ private[sbt] object IvyCredentialsLookup {
|
|||
* A mapping of host -> realms in the ivy credentials store.
|
||||
*/
|
||||
def realmsForHost: Map[String, Set[String]] =
|
||||
(keyringKeys collect {
|
||||
case x: Realm => x
|
||||
(keyringKeys collect { case x: Realm =>
|
||||
x
|
||||
} groupBy { realm =>
|
||||
realm.host
|
||||
} mapValues { realms =>
|
||||
|
|
|
|||
|
|
@ -13,14 +13,14 @@ private[sbt] object MergeDescriptors {
|
|||
a.isTransitive == b.isTransitive &&
|
||||
a.getParentRevisionId == b.getParentRevisionId &&
|
||||
a.getNamespace == b.getNamespace && {
|
||||
val amrid = a.getDependencyRevisionId
|
||||
val bmrid = b.getDependencyRevisionId
|
||||
amrid == bmrid
|
||||
} && {
|
||||
val adyn = a.getDynamicConstraintDependencyRevisionId
|
||||
val bdyn = b.getDynamicConstraintDependencyRevisionId
|
||||
adyn == bdyn
|
||||
}
|
||||
val amrid = a.getDependencyRevisionId
|
||||
val bmrid = b.getDependencyRevisionId
|
||||
amrid == bmrid
|
||||
} && {
|
||||
val adyn = a.getDynamicConstraintDependencyRevisionId
|
||||
val bdyn = b.getDynamicConstraintDependencyRevisionId
|
||||
adyn == bdyn
|
||||
}
|
||||
|
||||
def apply(a: DependencyDescriptor, b: DependencyDescriptor): DependencyDescriptor = {
|
||||
assert(mergeable(a, b))
|
||||
|
|
|
|||
|
|
@ -48,17 +48,18 @@ private[sbt] class ParallelResolveEngine(
|
|||
}
|
||||
// Farm out the dependencies for parallel download
|
||||
implicit val ec = ParallelResolveEngine.resolveExecutionContext
|
||||
val allDownloadsFuture = Future.traverse(report.getDependencies.asScala) {
|
||||
case dep: IvyNode =>
|
||||
Future {
|
||||
if (!(dep.isCompletelyEvicted || dep.hasProblem) &&
|
||||
dep.getModuleRevision != null) {
|
||||
Some(downloadNodeArtifacts(dep, artifactFilter, options))
|
||||
} else None
|
||||
}
|
||||
val allDownloadsFuture = Future.traverse(report.getDependencies.asScala) { case dep: IvyNode =>
|
||||
Future {
|
||||
if (
|
||||
!(dep.isCompletelyEvicted || dep.hasProblem) &&
|
||||
dep.getModuleRevision != null
|
||||
) {
|
||||
Some(downloadNodeArtifacts(dep, artifactFilter, options))
|
||||
} else None
|
||||
}
|
||||
}
|
||||
val allDownloads = Await.result(allDownloadsFuture, Duration.Inf)
|
||||
//compute total downloaded size
|
||||
// compute total downloaded size
|
||||
val totalSize = allDownloads.foldLeft(0L) {
|
||||
case (size, Some(download)) =>
|
||||
val dependency = download.dep
|
||||
|
|
@ -67,8 +68,10 @@ private[sbt] class ParallelResolveEngine(
|
|||
val configurationReport = report.getConfigurationReport(configuration)
|
||||
|
||||
// Take into account artifacts required by the given configuration
|
||||
if (dependency.isEvicted(configuration) ||
|
||||
dependency.isBlacklisted(configuration)) {
|
||||
if (
|
||||
dependency.isEvicted(configuration) ||
|
||||
dependency.isBlacklisted(configuration)
|
||||
) {
|
||||
configurationReport.addDependency(dependency)
|
||||
} else configurationReport.addDependency(dependency, download.report)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -37,9 +37,9 @@ private[sbt] case class SbtChainResolver(
|
|||
override def equals(o: Any): Boolean = o match {
|
||||
case o: SbtChainResolver =>
|
||||
this.name == o.name &&
|
||||
this.resolvers == o.resolvers &&
|
||||
this.settings == o.settings &&
|
||||
this.updateOptions == o.updateOptions
|
||||
this.resolvers == o.resolvers &&
|
||||
this.settings == o.settings &&
|
||||
this.updateOptions == o.updateOptions
|
||||
case _ => false
|
||||
}
|
||||
|
||||
|
|
@ -124,7 +124,8 @@ private[sbt] case class SbtChainResolver(
|
|||
/** If None, module was not found. Otherwise, hit. */
|
||||
type TriedResolution = Option[(ResolvedModuleRevision, DependencyResolver)]
|
||||
|
||||
/** Attempts to resolve the artifact from each of the resolvers in the chain.
|
||||
/**
|
||||
* Attempts to resolve the artifact from each of the resolvers in the chain.
|
||||
*
|
||||
* Contract:
|
||||
* 1. It doesn't resolve anything when there is a resolved module, `isReturnFirst` is
|
||||
|
|
@ -155,8 +156,8 @@ private[sbt] case class SbtChainResolver(
|
|||
currentlyResolved = Option(resolver.getDependency(descriptor, data))
|
||||
if (currentlyResolved eq previouslyResolved) None
|
||||
else if (useLatest) {
|
||||
currentlyResolved.map(
|
||||
x => (reparseModuleDescriptor(descriptor, data, resolver, x), resolver)
|
||||
currentlyResolved.map(x =>
|
||||
(reparseModuleDescriptor(descriptor, data, resolver, x), resolver)
|
||||
)
|
||||
} else currentlyResolved.map(x => (forcedRevision(x), resolver))
|
||||
}
|
||||
|
|
@ -174,7 +175,8 @@ private[sbt] case class SbtChainResolver(
|
|||
val oldLatest: Option[LatestStrategy] =
|
||||
setLatestIfRequired(resolver, Option(getLatestStrategy))
|
||||
try Right(performResolution(resolver))
|
||||
catch { case NonFatal(t) => reportError(t, resolver); Left(t) } finally {
|
||||
catch { case NonFatal(t) => reportError(t, resolver); Left(t) }
|
||||
finally {
|
||||
oldLatest.foreach(_ => doSetLatestStrategy(resolver, oldLatest))
|
||||
checkInterrupted()
|
||||
}
|
||||
|
|
@ -189,34 +191,33 @@ private[sbt] case class SbtChainResolver(
|
|||
data: ResolveData
|
||||
): Option[ResolvedModuleRevision] = {
|
||||
|
||||
val sortedRevisions = foundRevisions.sortBy {
|
||||
case (rmr, resolver) =>
|
||||
val publicationDate = rmr.getPublicationDate
|
||||
val descriptorDate = rmr.getDescriptor.getPublicationDate
|
||||
Message.warn(s"Sorting results from $rmr, using $publicationDate and $descriptorDate.")
|
||||
// Just issue warning about issues with publication date, and fake one on it for now
|
||||
val chosenPublicationDate = Option(publicationDate).orElse(Option(descriptorDate))
|
||||
chosenPublicationDate match {
|
||||
case Some(date) => date.getTime
|
||||
case None =>
|
||||
val id = rmr.getId
|
||||
val resolvedResource = (resolver.findIvyFileRef(descriptor, data), rmr.getDescriptor)
|
||||
resolvedResource match {
|
||||
case (res: ResolvedResource, dmd: DefaultModuleDescriptor) =>
|
||||
val resolvedPublicationDate = new java.util.Date(res.getLastModified)
|
||||
Message.debug(s"No publication date from resolver $resolver for $id.")
|
||||
Message.debug(s"Setting publication date to: $resolvedPublicationDate.")
|
||||
dmd.setPublicationDate(resolvedPublicationDate)
|
||||
res.getLastModified
|
||||
case (ivf, dmd) =>
|
||||
// The dependency is specified by a direct URL or some sort of non-ivy file
|
||||
if (ivf == null && descriptor.isChanging)
|
||||
Message.warn(s"$prefix: changing dependency $id with no ivy/pom file!")
|
||||
if (dmd == null)
|
||||
Message.warn(s"$prefix: no publication date from resolver $resolver for $id")
|
||||
0L
|
||||
}
|
||||
}
|
||||
val sortedRevisions = foundRevisions.sortBy { case (rmr, resolver) =>
|
||||
val publicationDate = rmr.getPublicationDate
|
||||
val descriptorDate = rmr.getDescriptor.getPublicationDate
|
||||
Message.warn(s"Sorting results from $rmr, using $publicationDate and $descriptorDate.")
|
||||
// Just issue warning about issues with publication date, and fake one on it for now
|
||||
val chosenPublicationDate = Option(publicationDate).orElse(Option(descriptorDate))
|
||||
chosenPublicationDate match {
|
||||
case Some(date) => date.getTime
|
||||
case None =>
|
||||
val id = rmr.getId
|
||||
val resolvedResource = (resolver.findIvyFileRef(descriptor, data), rmr.getDescriptor)
|
||||
resolvedResource match {
|
||||
case (res: ResolvedResource, dmd: DefaultModuleDescriptor) =>
|
||||
val resolvedPublicationDate = new java.util.Date(res.getLastModified)
|
||||
Message.debug(s"No publication date from resolver $resolver for $id.")
|
||||
Message.debug(s"Setting publication date to: $resolvedPublicationDate.")
|
||||
dmd.setPublicationDate(resolvedPublicationDate)
|
||||
res.getLastModified
|
||||
case (ivf, dmd) =>
|
||||
// The dependency is specified by a direct URL or some sort of non-ivy file
|
||||
if (ivf == null && descriptor.isChanging)
|
||||
Message.warn(s"$prefix: changing dependency $id with no ivy/pom file!")
|
||||
if (dmd == null)
|
||||
Message.warn(s"$prefix: no publication date from resolver $resolver for $id")
|
||||
0L
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
val firstHit = sortedRevisions.reverse.headOption
|
||||
|
|
@ -233,7 +234,7 @@ private[sbt] case class SbtChainResolver(
|
|||
val (module, resolver) = h
|
||||
Message.info(
|
||||
s"Out of ${sortedRevisions.size} candidates we found for ${module.getId} in ${resolvers
|
||||
.mkString(" and ")}, we are choosing ${resolver}."
|
||||
.mkString(" and ")}, we are choosing ${resolver}."
|
||||
)
|
||||
})
|
||||
} else {
|
||||
|
|
@ -277,12 +278,11 @@ private[sbt] case class SbtChainResolver(
|
|||
}
|
||||
|
||||
/** Cleans unnecessary module id information not provided by [[IvyRetrieve.toModuleID()]]. */
|
||||
private final val moduleResolvers = updateOptions.moduleResolvers.map {
|
||||
case (key, value) =>
|
||||
val cleanKey = ModuleID(key.organization, key.name, key.revision)
|
||||
.withExtraAttributes(key.extraAttributes)
|
||||
.withBranchName(key.branchName)
|
||||
cleanKey -> value
|
||||
private final val moduleResolvers = updateOptions.moduleResolvers.map { case (key, value) =>
|
||||
val cleanKey = ModuleID(key.organization, key.name, key.revision)
|
||||
.withExtraAttributes(key.extraAttributes)
|
||||
.withBranchName(key.branchName)
|
||||
cleanKey -> value
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -309,7 +309,8 @@ private[sbt] case class SbtChainResolver(
|
|||
def findInterProjectResolver(resolvers: Seq[DependencyResolver]): Option[DependencyResolver] =
|
||||
resolvers.find(_.getName == ProjectResolver.InterProject)
|
||||
|
||||
/** Gets the dependency for a given descriptor with the pertinent resolve data.
|
||||
/**
|
||||
* Gets the dependency for a given descriptor with the pertinent resolve data.
|
||||
*
|
||||
* This is a custom sbt chain operation that produces better error output and deals with
|
||||
* cases that the conventional ivy resolver does not. It accumulates the resolution of
|
||||
|
|
|
|||
|
|
@ -16,11 +16,11 @@ object Credentials {
|
|||
def apply(file: File): Credentials =
|
||||
new FileCredentials(file)
|
||||
|
||||
/** Add the provided credentials to Ivy's credentials cache.*/
|
||||
/** Add the provided credentials to Ivy's credentials cache. */
|
||||
def add(realm: String, host: String, userName: String, passwd: String): Unit =
|
||||
CredentialsStore.INSTANCE.addCredentials(realm, host, userName, passwd)
|
||||
|
||||
/** Load credentials from the given file into Ivy's credentials cache.*/
|
||||
/** Load credentials from the given file into Ivy's credentials cache. */
|
||||
def add(path: File, log: Logger): Unit =
|
||||
loadCredentials(path) match {
|
||||
case Left(err) => log.warn(err)
|
||||
|
|
|
|||
|
|
@ -79,12 +79,12 @@ final class UpdateOptions private[sbt] (
|
|||
override def equals(o: Any): Boolean = o match {
|
||||
case o: UpdateOptions =>
|
||||
this.circularDependencyLevel == o.circularDependencyLevel &&
|
||||
this.interProjectFirst == o.interProjectFirst &&
|
||||
this.latestSnapshots == o.latestSnapshots &&
|
||||
this.cachedResolution == o.cachedResolution &&
|
||||
this.gigahorse == o.gigahorse &&
|
||||
this.resolverConverter == o.resolverConverter &&
|
||||
this.moduleResolvers == o.moduleResolvers
|
||||
this.interProjectFirst == o.interProjectFirst &&
|
||||
this.latestSnapshots == o.latestSnapshots &&
|
||||
this.cachedResolution == o.cachedResolution &&
|
||||
this.gigahorse == o.gigahorse &&
|
||||
this.resolverConverter == o.resolverConverter &&
|
||||
this.moduleResolvers == o.moduleResolvers
|
||||
case _ => false
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -121,7 +121,7 @@ object ComponentManagerTest extends BasicTestSuite {
|
|||
TestLogger { logger =>
|
||||
withTemporaryDirectory { temp =>
|
||||
// The actual classes we'll use at runtime.
|
||||
//val mgr = new ComponentManager(xsbt.boot.Locks, new xsbt.boot.ComponentProvider(temp, true), Some(ivyHome), logger)
|
||||
// val mgr = new ComponentManager(xsbt.boot.Locks, new xsbt.boot.ComponentProvider(temp, true), Some(ivyHome), logger)
|
||||
|
||||
// A stub component manager
|
||||
object provider extends ComponentProvider {
|
||||
|
|
|
|||
|
|
@ -28,12 +28,22 @@ class CredentialsSpec extends AnyFunSuite {
|
|||
|
||||
test("DirectCredentials.toString") {
|
||||
assert(
|
||||
Credentials(realm = null, host = "example.org", userName = "username", passwd = "password").toString ==
|
||||
Credentials(
|
||||
realm = null,
|
||||
host = "example.org",
|
||||
userName = "username",
|
||||
passwd = "password"
|
||||
).toString ==
|
||||
"""DirectCredentials(null, "example.org", "username", ****)"""
|
||||
)
|
||||
|
||||
assert(
|
||||
Credentials(realm = "realm", host = "example.org", userName = "username", passwd = "password").toString ==
|
||||
Credentials(
|
||||
realm = "realm",
|
||||
host = "example.org",
|
||||
userName = "username",
|
||||
passwd = "password"
|
||||
).toString ==
|
||||
"""DirectCredentials("realm", "example.org", "username", ****)"""
|
||||
)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -118,11 +118,17 @@ object EvictionErrorSpec extends BaseIvySpecification {
|
|||
def oldAkkaPvp = List("com.typesafe.akka" % "*" % "pvp")
|
||||
|
||||
lazy val `akkaActor2.1.4` =
|
||||
ModuleID("com.typesafe.akka", "akka-actor", "2.1.4").withConfigurations(Some("compile")) cross CrossVersion.binary
|
||||
ModuleID("com.typesafe.akka", "akka-actor", "2.1.4").withConfigurations(
|
||||
Some("compile")
|
||||
) cross CrossVersion.binary
|
||||
lazy val `akkaActor2.3.0` =
|
||||
ModuleID("com.typesafe.akka", "akka-actor", "2.3.0").withConfigurations(Some("compile")) cross CrossVersion.binary
|
||||
ModuleID("com.typesafe.akka", "akka-actor", "2.3.0").withConfigurations(
|
||||
Some("compile")
|
||||
) cross CrossVersion.binary
|
||||
lazy val `akkaActor2.6.0` =
|
||||
ModuleID("com.typesafe.akka", "akka-actor", "2.6.0").withConfigurations(Some("compile")) cross CrossVersion.binary
|
||||
ModuleID("com.typesafe.akka", "akka-actor", "2.6.0").withConfigurations(
|
||||
Some("compile")
|
||||
) cross CrossVersion.binary
|
||||
lazy val `scala2.10.4` =
|
||||
ModuleID("org.scala-lang", "scala-library", "2.10.4").withConfigurations(Some("compile"))
|
||||
lazy val `scala2.12.17` =
|
||||
|
|
@ -130,9 +136,13 @@ object EvictionErrorSpec extends BaseIvySpecification {
|
|||
lazy val `scala2.13.3` =
|
||||
ModuleID("org.scala-lang", "scala-library", "2.13.3").withConfigurations(Some("compile"))
|
||||
lazy val `bananaSesame0.4` =
|
||||
ModuleID("org.w3", "banana-sesame", "0.4").withConfigurations(Some("compile")) cross CrossVersion.binary // uses akka-actor 2.1.4
|
||||
ModuleID("org.w3", "banana-sesame", "0.4").withConfigurations(
|
||||
Some("compile")
|
||||
) cross CrossVersion.binary // uses akka-actor 2.1.4
|
||||
lazy val `akkaRemote2.3.4` =
|
||||
ModuleID("com.typesafe.akka", "akka-remote", "2.3.4").withConfigurations(Some("compile")) cross CrossVersion.binary // uses akka-actor 2.3.4
|
||||
ModuleID("com.typesafe.akka", "akka-remote", "2.3.4").withConfigurations(
|
||||
Some("compile")
|
||||
) cross CrossVersion.binary // uses akka-actor 2.3.4
|
||||
lazy val `http4s0.21.11` =
|
||||
("org.http4s" %% "http4s-blaze-server" % "0.21.11").withConfigurations(Some("compile"))
|
||||
// https://repo1.maven.org/maven2/org/typelevel/cats-effect_2.13/3.0.0-M4/cats-effect_2.13-3.0.0-M4.pom
|
||||
|
|
|
|||
|
|
@ -20,7 +20,11 @@ object EvictionWarningSpec extends BaseIvySpecification {
|
|||
val m = module(defaultModuleId, scalaVersionDeps, Some("2.10.2"), overrideScalaVersion = false)
|
||||
val report = ivyUpdate(m)
|
||||
assert(
|
||||
EvictionWarning(m, fullOptions.withWarnScalaVersionEviction(false), report).scalaEvictions.size == 0
|
||||
EvictionWarning(
|
||||
m,
|
||||
fullOptions.withWarnScalaVersionEviction(false),
|
||||
report
|
||||
).scalaEvictions.size == 0
|
||||
)
|
||||
}
|
||||
|
||||
|
|
@ -79,7 +83,11 @@ object EvictionWarningSpec extends BaseIvySpecification {
|
|||
val m = module(defaultModuleId, scalaVersionDeps, Some("2.10.2"))
|
||||
val report = ivyUpdate(m)
|
||||
assert(
|
||||
EvictionWarning(m, fullOptions.withWarnScalaVersionEviction(false), report).scalaEvictions.size == 0
|
||||
EvictionWarning(
|
||||
m,
|
||||
fullOptions.withWarnScalaVersionEviction(false),
|
||||
report
|
||||
).scalaEvictions.size == 0
|
||||
)
|
||||
}
|
||||
|
||||
|
|
@ -302,11 +310,17 @@ object EvictionWarningSpec extends BaseIvySpecification {
|
|||
}
|
||||
|
||||
def akkaActor214 =
|
||||
ModuleID("com.typesafe.akka", "akka-actor", "2.1.4").withConfigurations(Some("compile")) cross CrossVersion.binary
|
||||
ModuleID("com.typesafe.akka", "akka-actor", "2.1.4").withConfigurations(
|
||||
Some("compile")
|
||||
) cross CrossVersion.binary
|
||||
def akkaActor230 =
|
||||
ModuleID("com.typesafe.akka", "akka-actor", "2.3.0").withConfigurations(Some("compile")) cross CrossVersion.binary
|
||||
ModuleID("com.typesafe.akka", "akka-actor", "2.3.0").withConfigurations(
|
||||
Some("compile")
|
||||
) cross CrossVersion.binary
|
||||
def akkaActor234 =
|
||||
ModuleID("com.typesafe.akka", "akka-actor", "2.3.4").withConfigurations(Some("compile")) cross CrossVersion.binary
|
||||
ModuleID("com.typesafe.akka", "akka-actor", "2.3.4").withConfigurations(
|
||||
Some("compile")
|
||||
) cross CrossVersion.binary
|
||||
def scala2102 =
|
||||
ModuleID("org.scala-lang", "scala-library", "2.10.2").withConfigurations(Some("compile"))
|
||||
def scala2103 =
|
||||
|
|
@ -317,13 +331,21 @@ object EvictionWarningSpec extends BaseIvySpecification {
|
|||
def commonsIo14 = ModuleID("commons-io", "commons-io", "1.4").withConfigurations(Some("compile"))
|
||||
def commonsIo24 = ModuleID("commons-io", "commons-io", "2.4").withConfigurations(Some("compile"))
|
||||
def bnfparser10 =
|
||||
ModuleID("ca.gobits.bnf", "bnfparser", "1.0").withConfigurations(Some("compile")) // uses commons-io 2.4
|
||||
ModuleID("ca.gobits.bnf", "bnfparser", "1.0").withConfigurations(
|
||||
Some("compile")
|
||||
) // uses commons-io 2.4
|
||||
def unfilteredUploads080 =
|
||||
ModuleID("net.databinder", "unfiltered-uploads", "0.8.0").withConfigurations(Some("compile")) cross CrossVersion.binary // uses commons-io 1.4
|
||||
ModuleID("net.databinder", "unfiltered-uploads", "0.8.0").withConfigurations(
|
||||
Some("compile")
|
||||
) cross CrossVersion.binary // uses commons-io 1.4
|
||||
def bananaSesame04 =
|
||||
ModuleID("org.w3", "banana-sesame", "0.4").withConfigurations(Some("compile")) cross CrossVersion.binary // uses akka-actor 2.1.4
|
||||
ModuleID("org.w3", "banana-sesame", "0.4").withConfigurations(
|
||||
Some("compile")
|
||||
) cross CrossVersion.binary // uses akka-actor 2.1.4
|
||||
def akkaRemote234 =
|
||||
ModuleID("com.typesafe.akka", "akka-remote", "2.3.4").withConfigurations(Some("compile")) cross CrossVersion.binary // uses akka-actor 2.3.4
|
||||
ModuleID("com.typesafe.akka", "akka-remote", "2.3.4").withConfigurations(
|
||||
Some("compile")
|
||||
) cross CrossVersion.binary // uses akka-actor 2.3.4
|
||||
|
||||
def fullOptions = EvictionWarningOptions.full
|
||||
def javaLibDirectDeps = Vector(commonsIo14, commonsIo24)
|
||||
|
|
|
|||
|
|
@ -24,9 +24,15 @@ object InconsistentDuplicateSpec extends BasicTestSuite {
|
|||
}
|
||||
|
||||
def akkaActor214 =
|
||||
ModuleID("com.typesafe.akka", "akka-actor", "2.1.4").withConfigurations(Some("compile")) cross CrossVersion.binary
|
||||
ModuleID("com.typesafe.akka", "akka-actor", "2.1.4").withConfigurations(
|
||||
Some("compile")
|
||||
) cross CrossVersion.binary
|
||||
def akkaActor230 =
|
||||
ModuleID("com.typesafe.akka", "akka-actor", "2.3.0").withConfigurations(Some("compile")) cross CrossVersion.binary
|
||||
ModuleID("com.typesafe.akka", "akka-actor", "2.3.0").withConfigurations(
|
||||
Some("compile")
|
||||
) cross CrossVersion.binary
|
||||
def akkaActor230Test =
|
||||
ModuleID("com.typesafe.akka", "akka-actor", "2.3.0").withConfigurations(Some("test")) cross CrossVersion.binary
|
||||
ModuleID("com.typesafe.akka", "akka-actor", "2.3.0").withConfigurations(
|
||||
Some("test")
|
||||
) cross CrossVersion.binary
|
||||
}
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@ object IvyRepoSpec extends BaseIvySpecification {
|
|||
module(
|
||||
ourModuleID,
|
||||
Vector(dep),
|
||||
None //, UpdateOptions().withCachedResolution(true)
|
||||
None // , UpdateOptions().withCachedResolution(true)
|
||||
)
|
||||
}
|
||||
|
||||
|
|
@ -31,13 +31,11 @@ object IvyRepoSpec extends BaseIvySpecification {
|
|||
val report = ivyUpdate(m)
|
||||
|
||||
import Inside._
|
||||
inside(report.configuration(ConfigRef("compile")).map(_.modules)) {
|
||||
case Some(Seq(mr)) =>
|
||||
inside(mr.artifacts) {
|
||||
case Seq((ar, _)) =>
|
||||
assert(ar.`type` == "jar")
|
||||
assert(ar.extension == "jar")
|
||||
}
|
||||
inside(report.configuration(ConfigRef("compile")).map(_.modules)) { case Some(Seq(mr)) =>
|
||||
inside(mr.artifacts) { case Seq((ar, _)) =>
|
||||
assert(ar.`type` == "jar")
|
||||
assert(ar.extension == "jar")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -90,14 +88,12 @@ object IvyRepoSpec extends BaseIvySpecification {
|
|||
.get
|
||||
|
||||
import Inside._
|
||||
inside(report2.configuration(ConfigRef("compile")).map(_.modules)) {
|
||||
case Some(Seq(mr)) =>
|
||||
inside(mr.artifacts) {
|
||||
case Seq((ar, _)) =>
|
||||
assert(ar.name == "libmodule-source")
|
||||
assert(ar.`type` == "src")
|
||||
assert(ar.extension == "jar")
|
||||
}
|
||||
inside(report2.configuration(ConfigRef("compile")).map(_.modules)) { case Some(Seq(mr)) =>
|
||||
inside(mr.artifacts) { case Seq((ar, _)) =>
|
||||
assert(ar.name == "libmodule-source")
|
||||
assert(ar.`type` == "src")
|
||||
assert(ar.extension == "jar")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -14,20 +14,19 @@ object MergeDescriptorSpec extends BaseIvySpecification {
|
|||
None,
|
||||
UpdateOptions()
|
||||
)
|
||||
m.withModule(log) {
|
||||
case (_, md, _) =>
|
||||
val deps = md.getDependencies
|
||||
assert(deps.size == 1)
|
||||
deps.headOption.getOrElse(sys.error("Dependencies not found")) match {
|
||||
case dd @ MergedDescriptors(_, _) =>
|
||||
val arts = dd.getAllDependencyArtifacts
|
||||
val a0: DependencyArtifactDescriptor = arts.toList(0)
|
||||
val a1: DependencyArtifactDescriptor = arts.toList(1)
|
||||
val configs0 = a0.getConfigurations.toList
|
||||
val configs1 = a1.getConfigurations.toList
|
||||
assert(configs0 == List("compile"))
|
||||
assert(configs1 == List("test"))
|
||||
}
|
||||
m.withModule(log) { case (_, md, _) =>
|
||||
val deps = md.getDependencies
|
||||
assert(deps.size == 1)
|
||||
deps.headOption.getOrElse(sys.error("Dependencies not found")) match {
|
||||
case dd @ MergedDescriptors(_, _) =>
|
||||
val arts = dd.getAllDependencyArtifacts
|
||||
val a0: DependencyArtifactDescriptor = arts.toList(0)
|
||||
val a1: DependencyArtifactDescriptor = arts.toList(1)
|
||||
val configs0 = a0.getConfigurations.toList
|
||||
val configs1 = a1.getConfigurations.toList
|
||||
assert(configs0 == List("compile"))
|
||||
assert(configs1 == List("test"))
|
||||
}
|
||||
}
|
||||
}
|
||||
def guavaTest =
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@ import java.nio.file.Paths
|
|||
object SftpRepoSpec extends BaseIvySpecification {
|
||||
val repo: Option[String] = None
|
||||
// val repo: Option[String] = Some("some repo")
|
||||
//a dependency which depends on another in the repo
|
||||
// a dependency which depends on another in the repo
|
||||
def org(repo: String) = s"com.${repo}"
|
||||
def module(org: String) = org % "some-lib" % "version"
|
||||
|
||||
|
|
|
|||
|
|
@ -31,8 +31,8 @@ object HouseRulesPlugin extends AutoPlugin {
|
|||
scalacOptions += "-Ywarn-numeric-widen",
|
||||
scalacOptions += "-Ywarn-value-discard",
|
||||
scalacOptions ++= "-Ywarn-unused-import".ifScala(v => 11 <= v && v <= 12).value.toList
|
||||
) ++ Seq(Compile, Test).flatMap(
|
||||
c => (c / console / scalacOptions) --= Seq("-Ywarn-unused-import", "-Xlint")
|
||||
) ++ Seq(Compile, Test).flatMap(c =>
|
||||
(c / console / scalacOptions) --= Seq("-Ywarn-unused-import", "-Xlint")
|
||||
)
|
||||
|
||||
private def scalaPartV = Def setting (CrossVersion partialVersion scalaVersion.value)
|
||||
|
|
|
|||
|
|
@ -30,7 +30,9 @@ object Util {
|
|||
val f = dir / "xsbt.version.properties"
|
||||
// TODO: replace lastModified() with sbt.io.IO.getModifiedTimeOrZero(), once the build
|
||||
// has been upgraded to a version of sbt that includes that call.
|
||||
if (!f.exists || f.lastModified < lastCompilationTime(analysis) || !containsVersion(f, version)) {
|
||||
if (
|
||||
!f.exists || f.lastModified < lastCompilationTime(analysis) || !containsVersion(f, version)
|
||||
) {
|
||||
s.log.info("Writing version information to " + f + " :\n" + content)
|
||||
IO.write(f, content)
|
||||
}
|
||||
|
|
|
|||
Loading…
Reference in New Issue