mirror of https://github.com/sbt/sbt.git
commit
f43d8d59ad
|
|
@ -5,6 +5,9 @@ os:
|
|||
- osx
|
||||
script:
|
||||
- scripts/travis.sh
|
||||
sudo: required
|
||||
services:
|
||||
- docker
|
||||
# Uncomment once https://github.com/scoverage/sbt-scoverage/issues/111 is fixed
|
||||
# after_success:
|
||||
# - bash <(curl -s https://codecov.io/bash)
|
||||
|
|
@ -13,6 +16,9 @@ matrix:
|
|||
- env: SCALA_VERSION=2.12.1 PUBLISH=1
|
||||
os: linux
|
||||
jdk: oraclejdk8
|
||||
sudo: required
|
||||
services:
|
||||
- docker
|
||||
- env: SCALA_VERSION=2.11.11 PUBLISH=1
|
||||
os: linux
|
||||
jdk: oraclejdk8
|
||||
|
|
|
|||
30
build.sbt
30
build.sbt
|
|
@ -69,6 +69,19 @@ lazy val tests = crossProject
|
|||
lazy val testsJvm = tests.jvm
|
||||
lazy val testsJs = tests.js
|
||||
|
||||
lazy val `proxy-tests` = project
|
||||
.dependsOn(testsJvm % "test->test")
|
||||
.configs(Integration)
|
||||
.settings(
|
||||
shared,
|
||||
dontPublish,
|
||||
hasITs,
|
||||
coursierPrefix,
|
||||
libs += Deps.scalaAsync.value,
|
||||
utest,
|
||||
sharedTestResources
|
||||
)
|
||||
|
||||
lazy val cache = project
|
||||
.dependsOn(coreJvm)
|
||||
.settings(
|
||||
|
|
@ -242,6 +255,7 @@ lazy val jvm = project
|
|||
.aggregate(
|
||||
coreJvm,
|
||||
testsJvm,
|
||||
`proxy-tests`,
|
||||
cache,
|
||||
bootstrap,
|
||||
extra,
|
||||
|
|
@ -274,6 +288,21 @@ lazy val js = project
|
|||
moduleName := "coursier-js"
|
||||
)
|
||||
|
||||
// run sbt-plugins/publishLocal to publish all that necessary for plugins
|
||||
lazy val `sbt-plugins` = project
|
||||
.dummy
|
||||
.aggregate(
|
||||
coreJvm,
|
||||
cache,
|
||||
extra,
|
||||
`sbt-coursier`,
|
||||
`sbt-shading`
|
||||
)
|
||||
.settings(
|
||||
shared,
|
||||
dontPublish
|
||||
)
|
||||
|
||||
lazy val coursier = project
|
||||
.in(root)
|
||||
.aggregate(
|
||||
|
|
@ -282,6 +311,7 @@ lazy val coursier = project
|
|||
`fetch-js`,
|
||||
testsJvm,
|
||||
testsJs,
|
||||
`proxy-tests`,
|
||||
cache,
|
||||
bootstrap,
|
||||
extra,
|
||||
|
|
|
|||
|
|
@ -19,6 +19,7 @@ import scalaz.Scalaz.ToEitherOps
|
|||
import scalaz.concurrent.{ Task, Strategy }
|
||||
|
||||
import java.io.{ Serializable => _, _ }
|
||||
import java.nio.charset.Charset
|
||||
|
||||
import scala.concurrent.duration.{ Duration, DurationInt }
|
||||
import scala.util.Try
|
||||
|
|
@ -30,6 +31,9 @@ trait AuthenticatedURLConnection extends URLConnection {
|
|||
|
||||
object Cache {
|
||||
|
||||
// java.nio.charset.StandardCharsets.UTF_8 not available in Java 6
|
||||
private val UTF_8 = Charset.forName("UTF-8")
|
||||
|
||||
// Check SHA-1 if available, else be fine with no checksum
|
||||
val defaultChecksums = Seq(Some("SHA-1"), None)
|
||||
|
||||
|
|
@ -337,7 +341,7 @@ object Cache {
|
|||
).r
|
||||
|
||||
private def basicAuthenticationEncode(user: String, password: String): String =
|
||||
(user + ":" + password).getBytes("UTF-8").toBase64
|
||||
(user + ":" + password).getBytes(UTF_8).toBase64
|
||||
|
||||
/**
|
||||
* Returns a `java.net.URL` for `s`, possibly using the custom protocol handlers found under the
|
||||
|
|
@ -676,7 +680,7 @@ object Cache {
|
|||
Task {
|
||||
if (referenceFileExists) {
|
||||
if (!errFile0.exists())
|
||||
FileUtil.write(errFile0, "".getBytes("UTF-8"))
|
||||
FileUtil.write(errFile0, "".getBytes(UTF_8))
|
||||
}
|
||||
|
||||
().right[FileError]
|
||||
|
|
@ -834,6 +838,18 @@ object Cache {
|
|||
parseChecksumLine(lines) orElse parseChecksumAlternative(lines)
|
||||
}
|
||||
|
||||
def parseRawChecksum(content: Array[Byte]): Option[BigInteger] =
|
||||
if (content.length == 16 || content.length == 20)
|
||||
Some(new BigInteger(content))
|
||||
else {
|
||||
val s = new String(content, UTF_8)
|
||||
val lines = s
|
||||
.lines
|
||||
.toVector
|
||||
|
||||
parseChecksumLine(lines) orElse parseChecksumAlternative(lines)
|
||||
}
|
||||
|
||||
// matches md5 or sha1
|
||||
private val checksumPattern = Pattern.compile("^[0-9a-f]{32}([0-9a-f]{8})?")
|
||||
|
||||
|
|
@ -866,9 +882,7 @@ object Cache {
|
|||
val sumFile = localFile(sumUrl, cache, artifact.authentication.map(_.user))
|
||||
|
||||
Task {
|
||||
val sumOpt = parseChecksum(
|
||||
new String(FileUtil.readAllBytes(sumFile), "UTF-8")
|
||||
)
|
||||
val sumOpt = parseRawChecksum(FileUtil.readAllBytes(sumFile))
|
||||
|
||||
sumOpt match {
|
||||
case None =>
|
||||
|
|
@ -979,7 +993,7 @@ object Cache {
|
|||
def notFound(f: File) = Left(s"${f.getCanonicalPath} not found")
|
||||
|
||||
def read(f: File) =
|
||||
try Right(new String(FileUtil.readAllBytes(f), "UTF-8"))
|
||||
try Right(new String(FileUtil.readAllBytes(f), UTF_8))
|
||||
catch {
|
||||
case NonFatal(e) =>
|
||||
Left(s"Could not read (file:${f.getCanonicalPath}): ${e.getMessage}")
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
package coursier
|
||||
|
||||
import java.io._
|
||||
import java.nio.charset.Charset
|
||||
|
||||
import scala.language.implicitConversions
|
||||
|
||||
|
|
@ -23,6 +24,8 @@ object Platform {
|
|||
buffer.toByteArray
|
||||
}
|
||||
|
||||
private lazy val UTF_8 = Charset.forName("UTF-8")
|
||||
|
||||
def readFully(is: => InputStream) =
|
||||
Task {
|
||||
\/.fromTryCatchNonFatal {
|
||||
|
|
@ -31,7 +34,7 @@ object Platform {
|
|||
try readFullySync(is0)
|
||||
finally is0.close()
|
||||
|
||||
new String(b, "UTF-8")
|
||||
new String(b, UTF_8)
|
||||
} .leftMap{
|
||||
case e: java.io.FileNotFoundException if e.getMessage != null =>
|
||||
s"Not found: ${e.getMessage}"
|
||||
|
|
|
|||
|
|
@ -675,16 +675,47 @@ class Helper(
|
|||
val task = Task.gatherUnordered(tasks)
|
||||
|
||||
val results = task.unsafePerformSync
|
||||
val errors = results.collect{case (artifact, -\/(err)) => artifact -> err }
|
||||
val files0 = results.collect{case (artifact, \/-(f)) => f }
|
||||
|
||||
val (ignoredErrors, errors) = results
|
||||
.collect {
|
||||
case (artifact, -\/(err)) =>
|
||||
artifact -> err
|
||||
}
|
||||
.partition {
|
||||
case (a, err) =>
|
||||
val notFound = err match {
|
||||
case _: FileError.NotFound => true
|
||||
case _ => false
|
||||
}
|
||||
a.isOptional && notFound
|
||||
}
|
||||
|
||||
val files0 = results.collect {
|
||||
case (artifact, \/-(f)) =>
|
||||
f
|
||||
}
|
||||
|
||||
logger.foreach(_.stop())
|
||||
|
||||
if (verbosityLevel >= 2)
|
||||
errPrintln(
|
||||
" Ignoring error(s):\n" +
|
||||
ignoredErrors
|
||||
.map {
|
||||
case (artifact, error) =>
|
||||
s"${artifact.url}: $error"
|
||||
}
|
||||
.mkString("\n")
|
||||
)
|
||||
|
||||
exitIf(errors.nonEmpty) {
|
||||
s" Error:\n" +
|
||||
errors.map { case (artifact, error) =>
|
||||
s"${artifact.url}: $error"
|
||||
}.mkString("\n")
|
||||
errors
|
||||
.map {
|
||||
case (artifact, error) =>
|
||||
s"${artifact.url}: $error"
|
||||
}
|
||||
.mkString("\n")
|
||||
}
|
||||
|
||||
files0
|
||||
|
|
|
|||
|
|
@ -223,9 +223,7 @@ object Assembly {
|
|||
throw new Exception(s"SHA-1 file not found for ${a.url}")
|
||||
}
|
||||
|
||||
val sumOpt = Cache.parseChecksum(
|
||||
new String(FileUtil.readAllBytes(f), "UTF-8")
|
||||
)
|
||||
val sumOpt = Cache.parseRawChecksum(FileUtil.readAllBytes(f))
|
||||
|
||||
sumOpt match {
|
||||
case Some(sum) =>
|
||||
|
|
|
|||
|
|
@ -198,9 +198,15 @@ final case class Artifact(
|
|||
) {
|
||||
def `type`: String = attributes.`type`
|
||||
def classifier: String = attributes.classifier
|
||||
|
||||
// TODO make that a proper field after 1.0 (instead of the hack via extra)
|
||||
def isOptional: Boolean = extra.contains(Artifact.optionalKey)
|
||||
}
|
||||
|
||||
object Artifact {
|
||||
|
||||
private[coursier] val optionalKey = s"$$optional"
|
||||
|
||||
trait Source {
|
||||
def artifacts(
|
||||
dependency: Dependency,
|
||||
|
|
|
|||
|
|
@ -19,6 +19,13 @@ final case class MavenSource(
|
|||
overrideClassifiers: Option[Seq[String]]
|
||||
): Seq[Artifact] = {
|
||||
|
||||
val packagingTpeMap = project.packagingOpt
|
||||
.filter(_ != Pom.relocatedPackaging)
|
||||
.map { packaging =>
|
||||
(MavenSource.typeDefaultClassifier(packaging), MavenSource.typeExtension(packaging)) -> packaging
|
||||
}
|
||||
.toMap
|
||||
|
||||
def artifactOf(publication: Publication) = {
|
||||
|
||||
val versioning = project
|
||||
|
|
@ -60,38 +67,57 @@ final case class MavenSource(
|
|||
)
|
||||
}
|
||||
|
||||
lazy val defaultPublication = {
|
||||
|
||||
val type0 = if (dependency.attributes.`type`.isEmpty) "jar" else dependency.attributes.`type`
|
||||
|
||||
val ext = MavenSource.typeExtension(type0)
|
||||
|
||||
val classifier =
|
||||
if (dependency.attributes.classifier.isEmpty)
|
||||
MavenSource.typeDefaultClassifier(type0)
|
||||
else
|
||||
dependency.attributes.classifier
|
||||
|
||||
val tpe = packagingTpeMap.getOrElse(
|
||||
(classifier, ext),
|
||||
MavenSource.classifierExtensionDefaultTypeOpt(classifier, ext).getOrElse(ext)
|
||||
)
|
||||
|
||||
Publication(
|
||||
dependency.module.name,
|
||||
tpe,
|
||||
ext,
|
||||
classifier
|
||||
)
|
||||
}
|
||||
|
||||
overrideClassifiers match {
|
||||
case Some(classifiers) =>
|
||||
|
||||
classifiers.map { classifier =>
|
||||
Publication(
|
||||
dependency.module.name,
|
||||
"jar",
|
||||
"jar",
|
||||
classifier
|
||||
)
|
||||
}.map(artifactWithExtra)
|
||||
classifiers
|
||||
.map { classifier =>
|
||||
if (classifier == dependency.attributes.classifier)
|
||||
defaultPublication
|
||||
else {
|
||||
val ext = "jar"
|
||||
val tpe = packagingTpeMap.getOrElse(
|
||||
(classifier, ext),
|
||||
MavenSource.classifierExtensionDefaultTypeOpt(classifier, ext).getOrElse(ext)
|
||||
)
|
||||
|
||||
Publication(
|
||||
dependency.module.name,
|
||||
tpe,
|
||||
ext,
|
||||
classifier
|
||||
)
|
||||
}
|
||||
}
|
||||
.map(artifactWithExtra)
|
||||
|
||||
case None =>
|
||||
|
||||
val type0 = if (dependency.attributes.`type`.isEmpty) "jar" else dependency.attributes.`type`
|
||||
|
||||
val extension = MavenSource.typeExtension(type0)
|
||||
|
||||
val classifier =
|
||||
if (dependency.attributes.classifier.isEmpty)
|
||||
MavenSource.typeDefaultClassifier(type0)
|
||||
else
|
||||
dependency.attributes.classifier
|
||||
|
||||
Seq(
|
||||
Publication(
|
||||
dependency.module.name,
|
||||
type0,
|
||||
extension,
|
||||
classifier
|
||||
)
|
||||
).map(artifactWithExtra)
|
||||
Seq(defaultPublication).map(artifactWithExtra)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -107,12 +133,14 @@ final case class MavenSource(
|
|||
publication: Publication,
|
||||
extra: Map[String, EnrichedPublication]
|
||||
) {
|
||||
def artifact: Artifact = {
|
||||
def artifact: Artifact =
|
||||
artifact(publication.`type`)
|
||||
def artifact(versioningType: String): Artifact = {
|
||||
|
||||
val versioning = project
|
||||
.snapshotVersioning
|
||||
.flatMap(versioning =>
|
||||
mavenVersioning(versioning, publication.classifier, publication.`type`)
|
||||
mavenVersioning(versioning, publication.classifier, versioningType)
|
||||
)
|
||||
|
||||
val path = dependency.module.organization.split('.').toSeq ++ Seq(
|
||||
|
|
@ -123,7 +151,7 @@ final case class MavenSource(
|
|||
|
||||
val changing0 = changing.getOrElse(project.actualVersion.contains("-SNAPSHOT"))
|
||||
|
||||
val extra0 = extra.mapValues(_.artifact).iterator.toMap
|
||||
val extra0 = extra.mapValues(_.artifact(versioningType)).iterator.toMap
|
||||
|
||||
Artifact(
|
||||
root + path.mkString("/"),
|
||||
|
|
@ -211,9 +239,10 @@ final case class MavenSource(
|
|||
else if (dependency.attributes.`type`.nonEmpty)
|
||||
enrichedPublications.collect {
|
||||
case p
|
||||
if p.publication.`type` == dependency.attributes.`type` ||
|
||||
(p.publication.ext == dependency.attributes.`type` && project.packagingOpt.toSeq.contains(p.publication.`type`)) // wow
|
||||
=>
|
||||
if p.publication.classifier.isEmpty && (
|
||||
p.publication.`type` == dependency.attributes.`type` ||
|
||||
(p.publication.ext == dependency.attributes.`type` && project.packagingOpt.toSeq.contains(p.publication.`type`)) // wow
|
||||
) =>
|
||||
p.artifact
|
||||
}
|
||||
else
|
||||
|
|
@ -226,6 +255,8 @@ final case class MavenSource(
|
|||
res.map(withMetadataExtra)
|
||||
}
|
||||
|
||||
private val dummyArtifact = Artifact("", Map(), Map(), Attributes("", ""), changing = false, None)
|
||||
|
||||
def artifacts(
|
||||
dependency: Dependency,
|
||||
project: Project,
|
||||
|
|
@ -233,10 +264,53 @@ final case class MavenSource(
|
|||
): Seq[Artifact] =
|
||||
if (project.packagingOpt.toSeq.contains(Pom.relocatedPackaging))
|
||||
Nil
|
||||
else if (project.publications.isEmpty)
|
||||
artifactsUnknownPublications(dependency, project, overrideClassifiers)
|
||||
else
|
||||
artifactsKnownPublications(dependency, project, overrideClassifiers)
|
||||
else {
|
||||
|
||||
def makeOptional(a: Artifact): Artifact =
|
||||
a.copy(
|
||||
extra = a.extra.mapValues(makeOptional).iterator.toMap + (Artifact.optionalKey -> dummyArtifact)
|
||||
)
|
||||
|
||||
def merge(a: Artifact, other: Artifact): Artifact = {
|
||||
|
||||
assert(a.url == other.url, s"Merging artifacts with different URLs (${a.url}, ${other.url})")
|
||||
|
||||
val extra =
|
||||
a.extra.map {
|
||||
case (k, v) =>
|
||||
k -> other.extra.get(k).fold(v)(merge(v, _))
|
||||
} ++
|
||||
other.extra
|
||||
.filterKeys(k => !a.extra.contains(k))
|
||||
|
||||
a.copy(
|
||||
checksumUrls = other.checksumUrls ++ a.checksumUrls,
|
||||
extra = extra
|
||||
)
|
||||
}
|
||||
|
||||
val defaultPublications = artifactsUnknownPublications(dependency, project, overrideClassifiers)
|
||||
|
||||
if (project.publications.isEmpty)
|
||||
defaultPublications
|
||||
else {
|
||||
val listedPublications = artifactsKnownPublications(dependency, project, overrideClassifiers)
|
||||
val listedUrls = listedPublications.map(_.url).toSet
|
||||
val defaultPublications0 = defaultPublications.map(makeOptional)
|
||||
val defaultPublicationsMap = defaultPublications0
|
||||
.map(a => a.url -> a)
|
||||
.toMap
|
||||
val listedPublications0 = listedPublications.map { a =>
|
||||
defaultPublicationsMap
|
||||
.get(a.url)
|
||||
.fold(a)(merge(a, _))
|
||||
}
|
||||
val extraPublications = defaultPublications0
|
||||
.filter(a => !listedUrls(a.url))
|
||||
|
||||
listedPublications0 ++ extraPublications
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
object MavenSource {
|
||||
|
|
|
|||
|
|
@ -0,0 +1,9 @@
|
|||
package coursier.test
|
||||
|
||||
object CentralNexus2ProxyTests extends CentralTests {
|
||||
override def centralBase = "http://localhost:9081/nexus/content/repositories/central"
|
||||
}
|
||||
|
||||
object CentralNexus3ProxyTests extends CentralTests {
|
||||
override def centralBase = "http://localhost:9082/repository/maven-central"
|
||||
}
|
||||
|
|
@ -28,6 +28,7 @@ object CoursierPlugin extends AutoPlugin {
|
|||
val coursierFallbackDependencies = Keys.coursierFallbackDependencies
|
||||
val coursierCache = Keys.coursierCache
|
||||
val coursierProject = Keys.coursierProject
|
||||
val coursierConfigGraphs = Keys.coursierConfigGraphs
|
||||
val coursierInterProjectDependencies = Keys.coursierInterProjectDependencies
|
||||
val coursierPublications = Keys.coursierPublications
|
||||
val coursierSbtClassifiersModule = Keys.coursierSbtClassifiersModule
|
||||
|
|
@ -35,7 +36,11 @@ object CoursierPlugin extends AutoPlugin {
|
|||
val coursierConfigurations = Keys.coursierConfigurations
|
||||
|
||||
val coursierParentProjectCache = Keys.coursierParentProjectCache
|
||||
val coursierResolution = Keys.coursierResolution
|
||||
val coursierResolutions = Keys.coursierResolutions
|
||||
|
||||
@deprecated("Use coursierResolutions instead", "1.0.0-RC4")
|
||||
val coursierResolution = Keys.actualCoursierResolution
|
||||
|
||||
val coursierSbtClassifiersResolution = Keys.coursierSbtClassifiersResolution
|
||||
|
||||
val coursierDependencyTree = Keys.coursierDependencyTree
|
||||
|
|
@ -117,15 +122,30 @@ object CoursierPlugin extends AutoPlugin {
|
|||
ignoreArtifactErrors = true
|
||||
).value,
|
||||
coursierProject := Tasks.coursierProjectTask.value,
|
||||
coursierConfigGraphs := Tasks.ivyGraphsTask.value,
|
||||
coursierInterProjectDependencies := Tasks.coursierInterProjectDependenciesTask.value,
|
||||
coursierPublications := Tasks.coursierPublicationsTask(packageConfigs: _*).value,
|
||||
coursierSbtClassifiersModule := classifiersModule.in(updateSbtClassifiers).value,
|
||||
coursierConfigurations := Tasks.coursierConfigurationsTask(None).value,
|
||||
coursierParentProjectCache := Tasks.parentProjectCacheTask.value,
|
||||
coursierResolution := Tasks.resolutionTask().value,
|
||||
coursierSbtClassifiersResolution := Tasks.resolutionTask(
|
||||
coursierResolutions := Tasks.resolutionsTask().value,
|
||||
Keys.actualCoursierResolution := {
|
||||
|
||||
val config = Compile.name
|
||||
|
||||
coursierResolutions
|
||||
.value
|
||||
.collectFirst {
|
||||
case (configs, res) if (configs(config)) =>
|
||||
res
|
||||
}
|
||||
.getOrElse {
|
||||
sys.error(s"Resolution for configuration $config not found")
|
||||
}
|
||||
},
|
||||
coursierSbtClassifiersResolution := Tasks.resolutionsTask(
|
||||
sbtClassifiers = true
|
||||
).value,
|
||||
).value.head._2,
|
||||
ivyConfigurations := {
|
||||
val confs = ivyConfigurations.value
|
||||
val names = confs.map(_.name).toSet
|
||||
|
|
|
|||
|
|
@ -34,6 +34,7 @@ object Keys {
|
|||
val coursierFallbackDependencies = TaskKey[Seq[(Module, String, URL, Boolean)]]("coursier-fallback-dependencies")
|
||||
|
||||
val coursierProject = TaskKey[Project]("coursier-project")
|
||||
val coursierConfigGraphs = TaskKey[Seq[Set[String]]]("coursier-config-graphs")
|
||||
val coursierInterProjectDependencies = TaskKey[Seq[Project]]("coursier-inter-project-dependencies", "Projects the current project depends on, possibly transitively")
|
||||
val coursierPublications = TaskKey[Seq[(String, Publication)]]("coursier-publications")
|
||||
|
||||
|
|
@ -43,7 +44,12 @@ object Keys {
|
|||
|
||||
|
||||
val coursierParentProjectCache = TaskKey[Map[Seq[Resolver], Seq[ProjectCache]]]("coursier-parent-project-cache")
|
||||
val coursierResolution = TaskKey[Resolution]("coursier-resolution")
|
||||
val coursierResolutions = TaskKey[Map[Set[String], Resolution]]("coursier-resolutions")
|
||||
|
||||
private[coursier] val actualCoursierResolution = TaskKey[Resolution]("coursier-resolution")
|
||||
|
||||
@deprecated("Use coursierResolutions instead", "1.0.0-RC4")
|
||||
val coursierResolution = actualCoursierResolution
|
||||
val coursierSbtClassifiersResolution = TaskKey[Resolution]("coursier-sbt-classifiers-resolution")
|
||||
|
||||
val coursierDependencyTree = TaskKey[Unit](
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ import coursier.extra.Typelevel
|
|||
import coursier.ivy.{IvyRepository, PropertiesPattern}
|
||||
import coursier.Keys._
|
||||
import coursier.Structure._
|
||||
import coursier.util.{Config, Print}
|
||||
import coursier.util.Print
|
||||
import sbt.{Classpaths, Def, Resolver, UpdateReport}
|
||||
import sbt.Keys._
|
||||
|
||||
|
|
@ -344,18 +344,18 @@ object Tasks {
|
|||
project: Project,
|
||||
repositories: Seq[Repository],
|
||||
userEnabledProfiles: Set[String],
|
||||
resolution: Resolution,
|
||||
resolution: Map[Set[String], Resolution],
|
||||
sbtClassifiers: Boolean
|
||||
)
|
||||
|
||||
private final case class ReportCacheKey(
|
||||
project: Project,
|
||||
resolution: Resolution,
|
||||
resolution: Map[Set[String], Resolution],
|
||||
withClassifiers: Boolean,
|
||||
sbtClassifiers: Boolean
|
||||
)
|
||||
|
||||
private val resolutionsCache = new mutable.HashMap[ResolutionCacheKey, Resolution]
|
||||
private val resolutionsCache = new mutable.HashMap[ResolutionCacheKey, Map[Set[String], Resolution]]
|
||||
// these may actually not need to be cached any more, now that the resolutions
|
||||
// are cached
|
||||
private val reportsCache = new mutable.HashMap[ReportCacheKey, UpdateReport]
|
||||
|
|
@ -423,28 +423,76 @@ object Tasks {
|
|||
val t =
|
||||
for {
|
||||
m <- coursierRecursiveResolvers.forAllProjects(state, projects)
|
||||
n <- coursierResolution.forAllProjects(state, m.keys.toSeq)
|
||||
n <- coursierResolutions.forAllProjects(state, m.keys.toSeq)
|
||||
} yield
|
||||
n.foldLeft(Map.empty[Seq[Resolver], Seq[ProjectCache]]) {
|
||||
case (caches, (ref, resolution)) =>
|
||||
m.get(ref).fold(caches) { resolvers =>
|
||||
caches.updated(
|
||||
resolvers,
|
||||
resolution.projectCache +: caches.getOrElse(resolvers, Nil)
|
||||
)
|
||||
case (caches, (ref, resolutions)) =>
|
||||
val mainResOpt = resolutions.collectFirst {
|
||||
case (k, v) if k("compile") => v
|
||||
}
|
||||
|
||||
val r = for {
|
||||
resolvers <- m.get(ref)
|
||||
resolution <- mainResOpt
|
||||
} yield
|
||||
caches.updated(resolvers, resolution.projectCache +: caches.getOrElse(resolvers, Seq.empty))
|
||||
|
||||
r.getOrElse(caches)
|
||||
}
|
||||
|
||||
Def.task(t.value)
|
||||
}
|
||||
|
||||
|
||||
def ivyGraphsTask = Def.task {
|
||||
|
||||
// probably bad complexity, but that shouldn't matter given the size of the graphs involved...
|
||||
|
||||
val p = coursierProject.value
|
||||
|
||||
final class Wrapper(val set: mutable.HashSet[String]) {
|
||||
def ++=(other: Wrapper): this.type = {
|
||||
set ++= other.set
|
||||
this
|
||||
}
|
||||
}
|
||||
|
||||
val sets =
|
||||
new mutable.HashMap[String, Wrapper] ++= p.configurations.map {
|
||||
case (k, l) =>
|
||||
val s = new mutable.HashSet[String]()
|
||||
s ++= l
|
||||
s += k
|
||||
k -> new Wrapper(s)
|
||||
}
|
||||
|
||||
for (k <- p.configurations.keys) {
|
||||
val s = sets(k)
|
||||
|
||||
var foundNew = true
|
||||
while (foundNew) {
|
||||
foundNew = false
|
||||
for (other <- s.set.toVector) {
|
||||
val otherS = sets(other)
|
||||
if (!otherS.eq(s)) {
|
||||
s ++= otherS
|
||||
sets += other -> s
|
||||
foundNew = true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
sets.values.toVector.distinct.map(_.set.toSet)
|
||||
}
|
||||
|
||||
private val noOptionalFilter: Option[Dependency => Boolean] = Some(dep => !dep.optional)
|
||||
private val typelevelOrgSwap: Option[Dependency => Dependency] = Some(Typelevel.swap(_))
|
||||
|
||||
|
||||
def resolutionTask(
|
||||
def resolutionsTask(
|
||||
sbtClassifiers: Boolean = false
|
||||
): Def.Initialize[sbt.Task[coursier.Resolution]] = Def.task {
|
||||
): Def.Initialize[sbt.Task[Map[Set[String], coursier.Resolution]]] = Def.task {
|
||||
|
||||
// let's update only one module at once, for a better output
|
||||
// Downloads are already parallel, no need to parallelize further anyway
|
||||
|
|
@ -454,7 +502,7 @@ object Tasks {
|
|||
|
||||
lazy val projectName = thisProjectRef.value.project
|
||||
|
||||
val (currentProject, fallbackDependencies) =
|
||||
val (currentProject, fallbackDependencies, configGraphs) =
|
||||
if (sbtClassifiers) {
|
||||
val sv = scalaVersion.value
|
||||
val sbv = scalaBinaryVersion.value
|
||||
|
|
@ -473,12 +521,12 @@ object Tasks {
|
|||
sbv
|
||||
)
|
||||
|
||||
(proj, fallbackDeps)
|
||||
(proj, fallbackDeps, Vector(cm.configurations.map(_.name).toSet))
|
||||
} else {
|
||||
val proj = coursierProject.value
|
||||
val publications = coursierPublications.value
|
||||
val fallbackDeps = coursierFallbackDependencies.value
|
||||
(proj.copy(publications = publications), fallbackDeps)
|
||||
(proj.copy(publications = publications), fallbackDeps, coursierConfigGraphs.value)
|
||||
}
|
||||
|
||||
val interProjectDependencies = coursierInterProjectDependencies.value
|
||||
|
|
@ -520,23 +568,6 @@ object Tasks {
|
|||
|
||||
val typelevel = scalaOrganization.value == Typelevel.typelevelOrg
|
||||
|
||||
val startRes = Resolution(
|
||||
currentProject.dependencies.map(_._2).toSet,
|
||||
filter = noOptionalFilter,
|
||||
userActivations =
|
||||
if (userEnabledProfiles.isEmpty)
|
||||
None
|
||||
else
|
||||
Some(userEnabledProfiles.iterator.map(_ -> true).toMap),
|
||||
forceVersions =
|
||||
// order matters here
|
||||
userForceVersions ++
|
||||
forcedScalaModules(so, sv) ++
|
||||
interProjectDependencies.map(_.moduleVersion),
|
||||
projectCache = parentProjectCache,
|
||||
mapDependencies = if (typelevel) typelevelOrgSwap else None
|
||||
)
|
||||
|
||||
if (verbosityLevel >= 2) {
|
||||
log.info("InterProjectRepository")
|
||||
for (p <- interProjectDependencies)
|
||||
|
|
@ -653,7 +684,31 @@ object Tasks {
|
|||
}.map(withAuthenticationByHost(_, authenticationByHost)) ++
|
||||
fallbackDependenciesRepositories
|
||||
|
||||
def resolution = {
|
||||
def startRes(configs: Set[String]) = Resolution(
|
||||
currentProject
|
||||
.dependencies
|
||||
.collect {
|
||||
case (config, dep) if configs(config) =>
|
||||
dep
|
||||
}
|
||||
.toSet,
|
||||
filter = noOptionalFilter,
|
||||
userActivations =
|
||||
if (userEnabledProfiles.isEmpty)
|
||||
None
|
||||
else
|
||||
Some(userEnabledProfiles.iterator.map(_ -> true).toMap),
|
||||
forceVersions =
|
||||
// order matters here
|
||||
userForceVersions ++
|
||||
(if (configs("compile") || configs("scala-tool")) forcedScalaModules(so, sv) else Map()) ++
|
||||
interProjectDependencies.map(_.moduleVersion),
|
||||
projectCache = parentProjectCache,
|
||||
mapDependencies = if (typelevel && (configs("compile") || configs("scala-tool"))) typelevelOrgSwap else None
|
||||
)
|
||||
|
||||
def resolution(startRes: Resolution) = {
|
||||
|
||||
var pool: ExecutorService = null
|
||||
var resLogger: TermDisplay = null
|
||||
|
||||
|
|
@ -763,15 +818,20 @@ object Tasks {
|
|||
res
|
||||
}
|
||||
|
||||
val allStartRes = configGraphs.map(configs => configs -> startRes(configs)).toMap
|
||||
|
||||
resolutionsCache.getOrElseUpdate(
|
||||
ResolutionCacheKey(
|
||||
currentProject,
|
||||
repositories,
|
||||
userEnabledProfiles,
|
||||
startRes.copy(filter = None),
|
||||
allStartRes,
|
||||
sbtClassifiers
|
||||
),
|
||||
resolution
|
||||
allStartRes.map {
|
||||
case (config, startRes) =>
|
||||
config -> resolution(startRes)
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
|
|
@ -800,12 +860,11 @@ object Tasks {
|
|||
|
||||
val verbosityLevel = coursierVerbosity.value
|
||||
|
||||
val res = {
|
||||
val res =
|
||||
if (withClassifiers && sbtClassifiers)
|
||||
coursierSbtClassifiersResolution
|
||||
Seq(coursierSbtClassifiersResolution.value)
|
||||
else
|
||||
coursierResolution
|
||||
}.value
|
||||
coursierResolutions.value.values.toVector
|
||||
|
||||
val classifiers =
|
||||
if (withClassifiers)
|
||||
|
|
@ -820,8 +879,8 @@ object Tasks {
|
|||
|
||||
val allArtifacts =
|
||||
classifiers match {
|
||||
case None => res.artifacts
|
||||
case Some(cl) => res.classifiersArtifacts(cl)
|
||||
case None => res.flatMap(_.artifacts)
|
||||
case Some(cl) => res.flatMap(_.classifiersArtifacts(cl))
|
||||
}
|
||||
|
||||
var pool: ExecutorService = null
|
||||
|
|
@ -833,7 +892,7 @@ object Tasks {
|
|||
pool = Executors.newFixedThreadPool(parallelDownloads, Strategy.DefaultDaemonThreadFactory)
|
||||
artifactsLogger = createLogger()
|
||||
|
||||
val artifactFileOrErrorTasks = allArtifacts.toVector.map { a =>
|
||||
val artifactFileOrErrorTasks = allArtifacts.toVector.distinct.map { a =>
|
||||
def f(p: CachePolicy) =
|
||||
Cache.file(
|
||||
a,
|
||||
|
|
@ -916,6 +975,48 @@ object Tasks {
|
|||
res
|
||||
}
|
||||
|
||||
// Move back to coursier.util (in core module) after 1.0?
|
||||
private def allDependenciesByConfig(
|
||||
res: Map[String, Resolution],
|
||||
depsByConfig: Map[String, Set[Dependency]],
|
||||
configs: Map[String, Set[String]]
|
||||
): Map[String, Set[Dependency]] = {
|
||||
|
||||
val allDepsByConfig = depsByConfig.map {
|
||||
case (config, deps) =>
|
||||
config -> res(config).subset(deps).minDependencies
|
||||
}
|
||||
|
||||
val filteredAllDepsByConfig = allDepsByConfig.map {
|
||||
case (config, allDeps) =>
|
||||
val allExtendedConfigs = configs.getOrElse(config, Set.empty) - config
|
||||
val inherited = allExtendedConfigs
|
||||
.flatMap(allDepsByConfig.getOrElse(_, Set.empty))
|
||||
|
||||
config -> (allDeps -- inherited)
|
||||
}
|
||||
|
||||
filteredAllDepsByConfig
|
||||
}
|
||||
|
||||
// Move back to coursier.util (in core module) after 1.0?
|
||||
private def dependenciesWithConfig(
|
||||
res: Map[String, Resolution],
|
||||
depsByConfig: Map[String, Set[Dependency]],
|
||||
configs: Map[String, Set[String]]
|
||||
): Set[Dependency] =
|
||||
allDependenciesByConfig(res, depsByConfig, configs)
|
||||
.flatMap {
|
||||
case (config, deps) =>
|
||||
deps.map(dep => dep.copy(configuration = s"$config->${dep.configuration}"))
|
||||
}
|
||||
.groupBy(_.copy(configuration = ""))
|
||||
.map {
|
||||
case (dep, l) =>
|
||||
dep.copy(configuration = l.map(_.configuration).mkString(";"))
|
||||
}
|
||||
.toSet
|
||||
|
||||
def updateTask(
|
||||
shadedConfigOpt: Option[(String, String)],
|
||||
withClassifiers: Boolean,
|
||||
|
|
@ -969,12 +1070,17 @@ object Tasks {
|
|||
|
||||
val verbosityLevel = coursierVerbosity.value
|
||||
|
||||
val res = {
|
||||
if (withClassifiers && sbtClassifiers)
|
||||
coursierSbtClassifiersResolution
|
||||
else
|
||||
coursierResolution
|
||||
}.value
|
||||
val res =
|
||||
if (withClassifiers && sbtClassifiers) {
|
||||
val r = coursierSbtClassifiersResolution.value
|
||||
Map(cm.configurations.map(c => c.name).toSet -> r)
|
||||
} else
|
||||
coursierResolutions.value
|
||||
|
||||
val configResolutions = res.flatMap {
|
||||
case (configs, r) =>
|
||||
configs.iterator.map((_, r))
|
||||
}
|
||||
|
||||
def report = {
|
||||
|
||||
|
|
@ -999,13 +1105,13 @@ object Tasks {
|
|||
}
|
||||
|
||||
if (verbosityLevel >= 2) {
|
||||
val finalDeps = Config.dependenciesWithConfig(
|
||||
res,
|
||||
val finalDeps = dependenciesWithConfig(
|
||||
configResolutions,
|
||||
depsByConfig.map { case (k, l) => k -> l.toSet },
|
||||
configs
|
||||
)
|
||||
|
||||
val projCache = res.projectCache.mapValues { case (_, p) => p }
|
||||
val projCache = res.values.foldLeft(Map.empty[ModuleVersion, Project])(_ ++ _.projectCache.mapValues(_._2))
|
||||
val repr = Print.dependenciesUnknownConfigs(finalDeps.toVector, projCache)
|
||||
log.info(repr.split('\n').map(" " + _).mkString("\n"))
|
||||
}
|
||||
|
|
@ -1036,13 +1142,23 @@ object Tasks {
|
|||
artifact -> file
|
||||
}
|
||||
|
||||
val artifactErrors = artifactFilesOrErrors0.toVector.collect {
|
||||
case (_, -\/(err)) =>
|
||||
err
|
||||
}
|
||||
val (ignoredArtifactErrors, artifactErrors) = artifactFilesOrErrors0
|
||||
.toVector
|
||||
.collect {
|
||||
case (a, -\/(err)) =>
|
||||
a -> err
|
||||
}
|
||||
.partition {
|
||||
case (a, err) =>
|
||||
val notFound = err match {
|
||||
case _: FileError.NotFound => true
|
||||
case _ => false
|
||||
}
|
||||
a.isOptional && notFound
|
||||
}
|
||||
|
||||
if (artifactErrors.nonEmpty) {
|
||||
val error = ResolutionError.DownloadErrors(artifactErrors)
|
||||
val error = ResolutionError.DownloadErrors(artifactErrors.map(_._2))
|
||||
|
||||
if (ignoreArtifactErrors)
|
||||
log.warn(error.description(verbosityLevel >= 1))
|
||||
|
|
@ -1050,7 +1166,7 @@ object Tasks {
|
|||
error.throwException()
|
||||
}
|
||||
|
||||
// can be non empty only if ignoreArtifactErrors is true
|
||||
// can be non empty only if ignoreArtifactErrors is true or some optional artifacts are not found
|
||||
val erroredArtifacts = artifactFilesOrErrors0.collect {
|
||||
case (artifact, -\/(_)) =>
|
||||
artifact
|
||||
|
|
@ -1058,7 +1174,7 @@ object Tasks {
|
|||
|
||||
ToSbt.updateReport(
|
||||
depsByConfig,
|
||||
res,
|
||||
configResolutions,
|
||||
configs,
|
||||
classifiers,
|
||||
artifactFileOpt(
|
||||
|
|
@ -1112,37 +1228,43 @@ object Tasks {
|
|||
proj.copy(publications = publications)
|
||||
}
|
||||
|
||||
val res = {
|
||||
if (sbtClassifiers)
|
||||
coursierSbtClassifiersResolution
|
||||
else
|
||||
coursierResolution
|
||||
}.value
|
||||
val resolutions =
|
||||
if (sbtClassifiers) {
|
||||
val r = coursierSbtClassifiersResolution.value
|
||||
Map(currentProject.configurations.keySet -> r)
|
||||
} else
|
||||
coursierResolutions.value
|
||||
|
||||
val config = configuration.value.name
|
||||
val configs = coursierConfigurations.value
|
||||
|
||||
val includedConfigs = configs.getOrElse(config, Set.empty) + config
|
||||
|
||||
val dependencies0 = currentProject.dependencies.collect {
|
||||
case (cfg, dep) if includedConfigs(cfg) => dep
|
||||
}.sortBy { dep =>
|
||||
(dep.module.organization, dep.module.name, dep.version)
|
||||
}
|
||||
for {
|
||||
(subGraphConfigs, res) <- resolutions
|
||||
if subGraphConfigs.exists(includedConfigs)
|
||||
} {
|
||||
|
||||
val subRes = res.subset(dependencies0.toSet)
|
||||
val dependencies0 = currentProject.dependencies.collect {
|
||||
case (cfg, dep) if includedConfigs(cfg) && subGraphConfigs(cfg) => dep
|
||||
}.sortBy { dep =>
|
||||
(dep.module.organization, dep.module.name, dep.version)
|
||||
}
|
||||
|
||||
// use sbt logging?
|
||||
println(
|
||||
projectName + "\n" +
|
||||
Print.dependencyTree(
|
||||
dependencies0,
|
||||
subRes,
|
||||
printExclusions = true,
|
||||
inverse,
|
||||
colors = !sys.props.get("sbt.log.noformat").toSeq.contains("true")
|
||||
val subRes = res.subset(dependencies0.toSet)
|
||||
|
||||
// use sbt logging?
|
||||
println(
|
||||
s"$projectName (configurations ${subGraphConfigs.toVector.sorted.mkString(", ")})" + "\n" +
|
||||
Print.dependencyTree(
|
||||
dependencies0,
|
||||
subRes,
|
||||
printExclusions = true,
|
||||
inverse,
|
||||
colors = !sys.props.get("sbt.log.noformat").toSeq.contains("true")
|
||||
)
|
||||
)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -180,7 +180,7 @@ object ToSbt {
|
|||
|
||||
def updateReport(
|
||||
configDependencies: Map[String, Seq[Dependency]],
|
||||
resolution: Resolution,
|
||||
resolutions: Map[String, Resolution],
|
||||
configs: Map[String, Set[String]],
|
||||
classifiersOpt: Option[Seq[String]],
|
||||
artifactFileOpt: (Module, String, Artifact) => Option[File],
|
||||
|
|
@ -190,7 +190,7 @@ object ToSbt {
|
|||
val configReports = configs.map {
|
||||
case (config, extends0) =>
|
||||
val configDeps = extends0.flatMap(configDependencies.getOrElse(_, Nil))
|
||||
val subRes = resolution.subset(configDeps)
|
||||
val subRes = resolutions(config).subset(configDeps)
|
||||
|
||||
val reports = ToSbt.moduleReports(subRes, classifiersOpt, artifactFileOpt, keepPomArtifact)
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,2 @@
|
|||
scalaVersion := "2.12.2"
|
||||
enablePlugins(ScalafmtPlugin)
|
||||
|
|
@ -0,0 +1,13 @@
|
|||
{
|
||||
val pluginVersion = sys.props.getOrElse(
|
||||
"plugin.version",
|
||||
throw new RuntimeException(
|
||||
"""|The system property 'plugin.version' is not defined.
|
||||
|Specify this property using the scriptedLaunchOpts -D.""".stripMargin
|
||||
)
|
||||
)
|
||||
|
||||
addSbtPlugin("io.get-coursier" % "sbt-coursier" % pluginVersion)
|
||||
}
|
||||
|
||||
addSbtPlugin("com.lucidchart" % "sbt-scalafmt" % "0.3")
|
||||
|
|
@ -0,0 +1,6 @@
|
|||
import java.io.File
|
||||
import java.nio.file.Files
|
||||
|
||||
object Main extends App {
|
||||
Files.write(new File("output").toPath, "OK".getBytes("UTF-8"))
|
||||
}
|
||||
|
|
@ -0,0 +1 @@
|
|||
> scalafmt
|
||||
|
|
@ -1 +1 @@
|
|||
> coursierResolution
|
||||
> coursierResolutions
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
> coursierResolution
|
||||
> coursierResolutions
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
> coursierResolution
|
||||
> coursierResolutions
|
||||
|
|
|
|||
|
|
@ -102,7 +102,16 @@ object ShadingPlugin extends AutoPlugin {
|
|||
toShadeJars := {
|
||||
coursier.Shading.toShadeJars(
|
||||
coursierProject.in(baseSbtConfiguration).value,
|
||||
coursierResolution.in(baseSbtConfiguration).value,
|
||||
coursierResolutions
|
||||
.in(baseSbtConfiguration)
|
||||
.value
|
||||
.collectFirst {
|
||||
case (configs, res) if configs(baseDependencyConfiguration) =>
|
||||
res
|
||||
}
|
||||
.getOrElse {
|
||||
sys.error(s"Resolution for configuration $baseDependencyConfiguration not found")
|
||||
},
|
||||
coursierConfigurations.in(baseSbtConfiguration).value,
|
||||
Keys.coursierArtifacts.in(baseSbtConfiguration).value,
|
||||
classpathTypes.value,
|
||||
|
|
|
|||
|
|
@ -2,7 +2,10 @@
|
|||
enablePlugins(coursier.ShadingPlugin)
|
||||
shadingNamespace := "test.shaded"
|
||||
|
||||
libraryDependencies += "io.argonaut" %% "argonaut" % "6.2-RC2" % "shaded"
|
||||
libraryDependencies ++= Seq(
|
||||
"io.argonaut" %% "argonaut" % "6.2-RC2" % "shaded",
|
||||
"org.scala-lang" % "scala-reflect" % scalaVersion.value // not shading that one
|
||||
)
|
||||
|
||||
scalaVersion := "2.11.8"
|
||||
organization := "io.get-coursier.test"
|
||||
|
|
|
|||
|
|
@ -0,0 +1,24 @@
|
|||
#!/usr/bin/env bash
|
||||
set -e
|
||||
|
||||
MAX_WAIT=120
|
||||
|
||||
wait_for() {
|
||||
TARGET="$1"
|
||||
I=0
|
||||
while ! curl "$TARGET"; do
|
||||
if [ "$I" -gt "$MAX_WAIT" ]; then
|
||||
echo "$TARGET not available after $MAX_WAIT seconds" 1>&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
I="$(( $I + 1 ))"
|
||||
sleep 1
|
||||
done
|
||||
}
|
||||
|
||||
docker run -d -p 9081:8081 --name nexus sonatype/nexus:2.14.4
|
||||
wait_for "http://localhost:9081/nexus/content/repositories/central/"
|
||||
|
||||
docker run -d -p 9082:8081 --name nexus3 sonatype/nexus3:3.3.1
|
||||
wait_for "http://localhost:9082/repository/maven-central/"
|
||||
|
|
@ -24,6 +24,10 @@ launchTestRepo() {
|
|||
./scripts/launch-test-repo.sh "$@"
|
||||
}
|
||||
|
||||
launchProxyRepos() {
|
||||
./scripts/launch-proxies.sh
|
||||
}
|
||||
|
||||
integrationTestsRequirements() {
|
||||
# Required for ~/.ivy2/local repo tests
|
||||
sbt ++2.11.11 coreJVM/publishLocal
|
||||
|
|
@ -217,6 +221,9 @@ else
|
|||
runSbtShadingTests
|
||||
fi
|
||||
else
|
||||
# Required for the proxy tests (currently CentralNexus2ProxyTests and CentralNexus3ProxyTests)
|
||||
launchProxyRepos
|
||||
|
||||
runJvmTests
|
||||
|
||||
testBootstrap
|
||||
|
|
|
|||
Binary file not shown.
|
|
@ -0,0 +1 @@
|
|||
レ9」硤kK
2Uソ<55>`星リ
|
||||
|
|
@ -22,7 +22,7 @@ object ChecksumTests extends TestSuite {
|
|||
assert(Cache.parseChecksum(other) == expected)
|
||||
}
|
||||
|
||||
* - {
|
||||
'junk - {
|
||||
// https://repo1.maven.org/maven2/org/apache/spark/spark-core_2.11/1.2.0/spark-core_2.11-1.2.0.pom.sha1
|
||||
// as of 2016-03-02
|
||||
val junkSha1 =
|
||||
|
|
@ -34,7 +34,7 @@ object ChecksumTests extends TestSuite {
|
|||
sha1ParseTest(cleanSha1, junkSha1)
|
||||
}
|
||||
|
||||
* - {
|
||||
'singleLine - {
|
||||
// https://repo1.maven.org/maven2/org/json/json/20080701/json-20080701.pom.sha1
|
||||
// as of 2016-03-05
|
||||
val dirtySha1 =
|
||||
|
|
@ -45,6 +45,18 @@ object ChecksumTests extends TestSuite {
|
|||
|
||||
sha1ParseTest(cleanSha1, dirtySha1)
|
||||
}
|
||||
|
||||
'binarySha1 - {
|
||||
val content = Platform.readFullySync(getClass.getResource("/empty.sha1").openStream())
|
||||
val res = Cache.parseRawChecksum(content)
|
||||
assert(res.nonEmpty)
|
||||
}
|
||||
|
||||
'binaryMd5 - {
|
||||
val content = Platform.readFullySync(getClass.getResource("/empty.md5").openStream())
|
||||
val res = Cache.parseRawChecksum(content)
|
||||
assert(res.nonEmpty)
|
||||
}
|
||||
}
|
||||
|
||||
'artifact - {
|
||||
|
|
|
|||
|
|
@ -39,7 +39,7 @@ object MavenTests extends TestSuite {
|
|||
|
||||
* - CentralTests.withArtifacts(
|
||||
dep = dep,
|
||||
artifactType = "jar",
|
||||
artifactType = "src",
|
||||
extraRepo = Some(repo),
|
||||
classifierOpt = Some("sources")
|
||||
) {
|
||||
|
|
|
|||
|
|
@ -9,10 +9,16 @@ import coursier.test.compatibility._
|
|||
|
||||
import scala.concurrent.Future
|
||||
|
||||
object CentralTests extends TestSuite {
|
||||
object CentralTests extends CentralTests
|
||||
|
||||
abstract class CentralTests extends TestSuite {
|
||||
|
||||
def centralBase = "https://repo1.maven.org/maven2"
|
||||
|
||||
final def isActualCentral = centralBase == "https://repo1.maven.org/maven2"
|
||||
|
||||
val repositories = Seq[Repository](
|
||||
MavenRepository("https://repo1.maven.org/maven2/")
|
||||
MavenRepository(centralBase)
|
||||
)
|
||||
|
||||
def resolve(
|
||||
|
|
@ -347,7 +353,7 @@ object CentralTests extends TestSuite {
|
|||
'versionFromDependency - {
|
||||
val mod = Module("org.apache.ws.commons", "XmlSchema")
|
||||
val version = "1.1"
|
||||
val expectedArtifactUrl = "https://repo1.maven.org/maven2/org/apache/ws/commons/XmlSchema/1.1/XmlSchema-1.1.jar"
|
||||
val expectedArtifactUrl = s"$centralBase/org/apache/ws/commons/XmlSchema/1.1/XmlSchema-1.1.jar"
|
||||
|
||||
* - resolutionCheck(mod, version)
|
||||
|
||||
|
|
@ -404,12 +410,27 @@ object CentralTests extends TestSuite {
|
|||
'packaging - {
|
||||
'aar - {
|
||||
// random aar-based module found on Central
|
||||
ensureHasArtifactWithExtension(
|
||||
Module("com.yandex.android", "speechkit"),
|
||||
"2.5.0",
|
||||
"aar",
|
||||
"aar"
|
||||
val module = Module("com.yandex.android", "speechkit")
|
||||
val version = "2.5.0"
|
||||
val tpe = "aar"
|
||||
|
||||
* - ensureHasArtifactWithExtension(
|
||||
module,
|
||||
version,
|
||||
tpe,
|
||||
tpe,
|
||||
attributes = Attributes(tpe)
|
||||
)
|
||||
|
||||
* - {
|
||||
if (isActualCentral)
|
||||
ensureHasArtifactWithExtension(
|
||||
module,
|
||||
version,
|
||||
tpe,
|
||||
tpe
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
'bundle - {
|
||||
|
|
@ -570,26 +591,52 @@ object CentralTests extends TestSuite {
|
|||
|
||||
* - resolutionCheck(mod, version)
|
||||
|
||||
val mainTarGzUrl = s"$centralBase/org/apache/maven/apache-maven/3.3.9/apache-maven-3.3.9-bin.tar.gz"
|
||||
val expectedTarGzArtifactUrls = Set(
|
||||
"https://repo1.maven.org/maven2/org/apache/maven/apache-maven/3.3.9/apache-maven-3.3.9-bin.tar.gz",
|
||||
"https://repo1.maven.org/maven2/commons-logging/commons-logging/1.1.3/commons-logging-1.1.3-bin.tar.gz"
|
||||
mainTarGzUrl,
|
||||
s"$centralBase/commons-logging/commons-logging/1.1.3/commons-logging-1.1.3-bin.tar.gz"
|
||||
)
|
||||
|
||||
val mainZipUrl = s"$centralBase/org/apache/maven/apache-maven/3.3.9/apache-maven-3.3.9-bin.zip"
|
||||
val expectedZipArtifactUrls = Set(
|
||||
"https://repo1.maven.org/maven2/org/apache/maven/apache-maven/3.3.9/apache-maven-3.3.9-bin.zip",
|
||||
"https://repo1.maven.org/maven2/commons-logging/commons-logging/1.1.3/commons-logging-1.1.3-bin.zip"
|
||||
mainZipUrl,
|
||||
s"$centralBase/commons-logging/commons-logging/1.1.3/commons-logging-1.1.3-bin.zip"
|
||||
)
|
||||
|
||||
* - withArtifacts(mod, version, "tar.gz", classifierOpt = Some("bin"), transitive = true) { artifacts =>
|
||||
assert(artifacts.length == 2)
|
||||
val urls = artifacts.map(_.url).toSet
|
||||
assert(urls == expectedTarGzArtifactUrls)
|
||||
'tarGz - {
|
||||
* - {
|
||||
if (isActualCentral)
|
||||
withArtifacts(mod, version, "tar.gz", classifierOpt = Some("bin"), transitive = true) { artifacts =>
|
||||
assert(artifacts.length == 2)
|
||||
val urls = artifacts.map(_.url).toSet
|
||||
assert(urls == expectedTarGzArtifactUrls)
|
||||
}
|
||||
}
|
||||
* - {
|
||||
withArtifacts(mod, version, "tar.gz", attributes = Attributes("tar.gz", "bin"), classifierOpt = Some("bin"), transitive = true) { artifacts =>
|
||||
assert(artifacts.nonEmpty)
|
||||
val urls = artifacts.map(_.url).toSet
|
||||
assert(urls.contains(mainTarGzUrl))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
* - withArtifacts(mod, version, "zip", classifierOpt = Some("bin"), transitive = true) { artifacts =>
|
||||
assert(artifacts.length == 2)
|
||||
val urls = artifacts.map(_.url).toSet
|
||||
assert(urls == expectedZipArtifactUrls)
|
||||
'zip - {
|
||||
* - {
|
||||
if (isActualCentral)
|
||||
withArtifacts(mod, version, "zip", classifierOpt = Some("bin"), transitive = true) { artifacts =>
|
||||
assert(artifacts.length == 2)
|
||||
val urls = artifacts.map(_.url).toSet
|
||||
assert(urls == expectedZipArtifactUrls)
|
||||
}
|
||||
}
|
||||
* - {
|
||||
withArtifacts(mod, version, "zip", attributes = Attributes("zip", "bin"), classifierOpt = Some("bin"), transitive = true) { artifacts =>
|
||||
assert(artifacts.nonEmpty)
|
||||
val urls = artifacts.map(_.url).toSet
|
||||
assert(urls.contains(mainZipUrl))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -610,7 +657,7 @@ object CentralTests extends TestSuite {
|
|||
val mod = Module("org.apache.commons", "commons-io")
|
||||
val ver = "1.3.2"
|
||||
|
||||
val expectedUrl = "https://repo1.maven.org/maven2/commons-io/commons-io/1.3.2/commons-io-1.3.2.jar"
|
||||
val expectedUrl = s"$centralBase/commons-io/commons-io/1.3.2/commons-io-1.3.2.jar"
|
||||
|
||||
* - resolutionCheck(mod, ver)
|
||||
|
||||
|
|
@ -640,8 +687,8 @@ object CentralTests extends TestSuite {
|
|||
val mod = Module("org.yaml", "snakeyaml")
|
||||
val ver = "1.17"
|
||||
|
||||
def hasSha1(a: Artifact) = a.extra.contains("SHA-1")
|
||||
def hasMd5(a: Artifact) = a.extra.contains("MD5")
|
||||
def hasSha1(a: Artifact) = a.checksumUrls.contains("SHA-1")
|
||||
def hasMd5(a: Artifact) = a.checksumUrls.contains("MD5")
|
||||
def hasSig(a: Artifact) = a.extra.contains("sig")
|
||||
def sigHasSig(a: Artifact) = a.extra.get("sig").exists(hasSig)
|
||||
|
||||
|
|
@ -649,24 +696,27 @@ object CentralTests extends TestSuite {
|
|||
|
||||
* - withArtifacts(mod, ver, "*") { artifacts =>
|
||||
|
||||
val jarOpt = artifacts.find(_.`type` == "bundle")
|
||||
val jarOpt = artifacts.find(_.`type` == "bundle").orElse(artifacts.find(_.`type` == "jar"))
|
||||
val pomOpt = artifacts.find(_.`type` == "pom")
|
||||
|
||||
if (artifacts.length != 2 || jarOpt.isEmpty || pomOpt.isEmpty)
|
||||
artifacts.foreach(println)
|
||||
|
||||
assert(artifacts.length == 2)
|
||||
assert(jarOpt.nonEmpty)
|
||||
assert(pomOpt.nonEmpty)
|
||||
|
||||
assert(jarOpt.forall(hasSha1))
|
||||
assert(pomOpt.forall(hasSha1))
|
||||
assert(jarOpt.forall(hasMd5))
|
||||
assert(pomOpt.forall(hasMd5))
|
||||
assert(jarOpt.forall(hasSig))
|
||||
assert(pomOpt.forall(hasSig))
|
||||
assert(jarOpt.forall(sigHasSig))
|
||||
assert(pomOpt.forall(sigHasSig))
|
||||
|
||||
if (isActualCentral) {
|
||||
if (artifacts.length != 2 || jarOpt.isEmpty || pomOpt.isEmpty)
|
||||
artifacts.foreach(println)
|
||||
|
||||
assert(jarOpt.forall(_.`type` == "bundle"))
|
||||
assert(artifacts.length == 2)
|
||||
assert(pomOpt.nonEmpty)
|
||||
assert(pomOpt.forall(hasSha1))
|
||||
assert(pomOpt.forall(hasMd5))
|
||||
assert(pomOpt.forall(hasSig))
|
||||
assert(jarOpt.forall(sigHasSig))
|
||||
assert(pomOpt.forall(sigHasSig))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
Loading…
Reference in New Issue