2015-12-30 01:34:34 +01:00
|
|
|
package coursier
|
|
|
|
|
|
2015-12-30 01:34:43 +01:00
|
|
|
import java.io.{ OutputStreamWriter, File }
|
2015-12-30 01:34:44 +01:00
|
|
|
import java.nio.file.Files
|
2015-12-30 01:34:41 +01:00
|
|
|
import java.util.concurrent.Executors
|
2015-12-30 01:34:39 +01:00
|
|
|
|
2015-12-30 01:34:44 +01:00
|
|
|
import coursier.core.Publication
|
2015-12-30 01:34:39 +01:00
|
|
|
import coursier.ivy.IvyRepository
|
2015-12-30 01:34:43 +01:00
|
|
|
import coursier.Keys._
|
|
|
|
|
import coursier.Structure._
|
2016-01-10 21:32:28 +01:00
|
|
|
import coursier.util.{ Config, Print }
|
2015-12-30 01:34:44 +01:00
|
|
|
import org.apache.ivy.core.module.id.ModuleRevisionId
|
2015-12-30 01:34:43 +01:00
|
|
|
|
|
|
|
|
import sbt.{ UpdateReport, Classpaths, Resolver, Def }
|
2015-12-30 01:34:44 +01:00
|
|
|
import sbt.Configurations.{ Compile, Test }
|
2015-12-30 01:34:34 +01:00
|
|
|
import sbt.Keys._
|
|
|
|
|
|
2015-12-30 01:34:43 +01:00
|
|
|
import scala.collection.mutable
|
2015-12-30 01:34:44 +01:00
|
|
|
import scala.collection.JavaConverters._
|
2015-12-30 01:34:43 +01:00
|
|
|
|
|
|
|
|
import scalaz.{ \/-, -\/ }
|
2015-12-30 01:34:41 +01:00
|
|
|
import scalaz.concurrent.{ Task, Strategy }
|
2015-12-30 01:34:39 +01:00
|
|
|
|
2015-12-30 01:34:34 +01:00
|
|
|
object Tasks {
|
|
|
|
|
|
|
|
|
|
def coursierResolversTask: Def.Initialize[sbt.Task[Seq[Resolver]]] = Def.task {
|
2015-12-30 01:34:43 +01:00
|
|
|
var resolvers = externalResolvers.value
|
2015-12-30 01:34:34 +01:00
|
|
|
if (sbtPlugin.value)
|
2015-12-30 01:34:43 +01:00
|
|
|
resolvers = Seq(
|
2015-12-30 01:34:34 +01:00
|
|
|
sbtResolver.value,
|
|
|
|
|
Classpaths.sbtPluginReleases
|
2015-12-30 01:34:43 +01:00
|
|
|
) ++ resolvers
|
|
|
|
|
resolvers
|
2015-12-30 01:34:34 +01:00
|
|
|
}
|
|
|
|
|
|
2015-12-30 01:34:48 +01:00
|
|
|
def coursierProjectTask: Def.Initialize[sbt.Task[Project]] =
|
2015-12-30 01:34:34 +01:00
|
|
|
(
|
|
|
|
|
sbt.Keys.state,
|
|
|
|
|
sbt.Keys.thisProjectRef
|
|
|
|
|
).flatMap { (state, projectRef) =>
|
|
|
|
|
|
|
|
|
|
// should projectID.configurations be used instead?
|
|
|
|
|
val configurations = ivyConfigurations.in(projectRef).get(state)
|
|
|
|
|
|
|
|
|
|
val allDependenciesTask = allDependencies.in(projectRef).get(state)
|
|
|
|
|
|
|
|
|
|
for {
|
|
|
|
|
allDependencies <- allDependenciesTask
|
|
|
|
|
} yield {
|
|
|
|
|
|
2015-12-30 01:34:48 +01:00
|
|
|
FromSbt.project(
|
2015-12-30 01:34:34 +01:00
|
|
|
projectID.in(projectRef).get(state),
|
|
|
|
|
allDependencies,
|
|
|
|
|
configurations.map { cfg => cfg.name -> cfg.extendsConfigs.map(_.name) }.toMap,
|
|
|
|
|
scalaVersion.in(projectRef).get(state),
|
|
|
|
|
scalaBinaryVersion.in(projectRef).get(state)
|
|
|
|
|
)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2015-12-30 01:34:48 +01:00
|
|
|
def coursierProjectsTask: Def.Initialize[sbt.Task[Seq[Project]]] =
|
2015-12-30 01:34:34 +01:00
|
|
|
sbt.Keys.state.flatMap { state =>
|
|
|
|
|
val projects = structure(state).allProjectRefs
|
|
|
|
|
coursierProject.forAllProjects(state, projects).map(_.values.toVector)
|
|
|
|
|
}
|
|
|
|
|
|
2015-12-30 01:34:44 +01:00
|
|
|
def coursierPublicationsTask: Def.Initialize[sbt.Task[Seq[(String, Publication)]]] =
|
|
|
|
|
(
|
|
|
|
|
sbt.Keys.state,
|
|
|
|
|
sbt.Keys.thisProjectRef,
|
|
|
|
|
sbt.Keys.projectID,
|
|
|
|
|
sbt.Keys.scalaVersion,
|
|
|
|
|
sbt.Keys.scalaBinaryVersion
|
|
|
|
|
).map { (state, projectRef, projId, sv, sbv) =>
|
|
|
|
|
|
|
|
|
|
val packageTasks = Seq(packageBin, packageSrc, packageDoc)
|
|
|
|
|
val configs = Seq(Compile, Test)
|
|
|
|
|
|
|
|
|
|
val sbtArtifacts =
|
|
|
|
|
for {
|
|
|
|
|
pkgTask <- packageTasks
|
|
|
|
|
config <- configs
|
|
|
|
|
} yield {
|
|
|
|
|
val publish = publishArtifact.in(projectRef).in(pkgTask).in(config).getOrElse(state, false)
|
|
|
|
|
if (publish)
|
|
|
|
|
Option(artifact.in(projectRef).in(pkgTask).in(config).getOrElse(state, null))
|
|
|
|
|
.map(config.name -> _)
|
|
|
|
|
else
|
|
|
|
|
None
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
sbtArtifacts.collect {
|
|
|
|
|
case Some((config, artifact)) =>
|
|
|
|
|
val name = FromSbt.sbtCrossVersionName(
|
|
|
|
|
artifact.name,
|
|
|
|
|
projId.crossVersion,
|
|
|
|
|
sv,
|
|
|
|
|
sbv
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
val publication = Publication(
|
|
|
|
|
name,
|
|
|
|
|
artifact.`type`,
|
|
|
|
|
artifact.extension,
|
|
|
|
|
artifact.classifier.getOrElse("")
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
config -> publication
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2015-12-30 01:34:43 +01:00
|
|
|
// FIXME More things should possibly be put here too (resolvers, etc.)
|
|
|
|
|
private case class CacheKey(
|
|
|
|
|
resolution: Resolution,
|
|
|
|
|
withClassifiers: Boolean,
|
|
|
|
|
sbtClassifiers: Boolean
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
private val resolutionsCache = new mutable.HashMap[CacheKey, UpdateReport]
|
|
|
|
|
|
2015-12-30 01:34:39 +01:00
|
|
|
def updateTask(withClassifiers: Boolean, sbtClassifiers: Boolean = false) = Def.task {
|
|
|
|
|
|
2015-12-30 01:34:43 +01:00
|
|
|
// SBT logging should be better than that most of the time...
|
2015-12-30 01:34:39 +01:00
|
|
|
def errPrintln(s: String): Unit = scala.Console.err.println(s)
|
|
|
|
|
|
|
|
|
|
def grouped[K, V](map: Seq[(K, V)]): Map[K, Seq[V]] =
|
|
|
|
|
map.groupBy { case (k, _) => k }.map {
|
|
|
|
|
case (k, l) =>
|
|
|
|
|
k -> l.map { case (_, v) => v }
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// let's update only one module at once, for a better output
|
|
|
|
|
// Downloads are already parallel, no need to parallelize further anyway
|
|
|
|
|
synchronized {
|
|
|
|
|
|
|
|
|
|
lazy val cm = coursierSbtClassifiersModule.value
|
|
|
|
|
|
|
|
|
|
val currentProject =
|
|
|
|
|
if (sbtClassifiers)
|
|
|
|
|
FromSbt.project(
|
|
|
|
|
cm.id,
|
|
|
|
|
cm.modules,
|
|
|
|
|
cm.configurations.map(cfg => cfg.name -> cfg.extendsConfigs.map(_.name)).toMap,
|
|
|
|
|
scalaVersion.value,
|
|
|
|
|
scalaBinaryVersion.value
|
|
|
|
|
)
|
|
|
|
|
else {
|
2015-12-30 01:34:48 +01:00
|
|
|
val proj = coursierProject.value
|
2015-12-30 01:34:44 +01:00
|
|
|
val publications = coursierPublications.value
|
|
|
|
|
proj.copy(publications = publications)
|
2015-12-30 01:34:39 +01:00
|
|
|
}
|
|
|
|
|
|
2015-12-30 01:34:44 +01:00
|
|
|
val ivySbt0 = ivySbt.value
|
|
|
|
|
val ivyCacheManager = ivySbt0.withIvy(streams.value.log)(ivy =>
|
|
|
|
|
ivy.getResolutionCacheManager
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
val ivyModule = ModuleRevisionId.newInstance(
|
|
|
|
|
currentProject.module.organization,
|
|
|
|
|
currentProject.module.name,
|
|
|
|
|
currentProject.version,
|
|
|
|
|
currentProject.module.attributes.asJava
|
|
|
|
|
)
|
|
|
|
|
val cacheIvyFile = ivyCacheManager.getResolvedIvyFileInCache(ivyModule)
|
|
|
|
|
val cacheIvyPropertiesFile = ivyCacheManager.getResolvedIvyPropertiesInCache(ivyModule)
|
|
|
|
|
|
2015-12-30 01:34:39 +01:00
|
|
|
val projects = coursierProjects.value
|
|
|
|
|
|
|
|
|
|
val parallelDownloads = coursierParallelDownloads.value
|
|
|
|
|
val checksums = coursierChecksums.value
|
2015-12-30 01:34:42 +01:00
|
|
|
val artifactsChecksums = coursierArtifactsChecksums.value
|
2015-12-30 01:34:39 +01:00
|
|
|
val maxIterations = coursierMaxIterations.value
|
|
|
|
|
val cachePolicy = coursierCachePolicy.value
|
|
|
|
|
val cacheDir = coursierCache.value
|
|
|
|
|
|
2015-12-30 01:34:42 +01:00
|
|
|
val resolvers =
|
|
|
|
|
if (sbtClassifiers)
|
|
|
|
|
coursierSbtResolvers.value
|
|
|
|
|
else
|
|
|
|
|
coursierResolvers.value
|
2015-12-30 01:34:39 +01:00
|
|
|
|
|
|
|
|
val verbosity = coursierVerbosity.value
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
val startRes = Resolution(
|
|
|
|
|
currentProject.dependencies.map { case (_, dep) => dep }.toSet,
|
|
|
|
|
filter = Some(dep => !dep.optional),
|
2015-12-30 01:34:48 +01:00
|
|
|
forceVersions = projects.map(_.moduleVersion).toMap
|
2015-12-30 01:34:39 +01:00
|
|
|
)
|
|
|
|
|
|
2015-12-30 01:34:44 +01:00
|
|
|
// required for publish to be fine, later on
|
|
|
|
|
def writeIvyFiles() = {
|
|
|
|
|
val printer = new scala.xml.PrettyPrinter(80, 2)
|
|
|
|
|
|
|
|
|
|
val b = new StringBuilder
|
|
|
|
|
b ++= """<?xml version="1.0" encoding="UTF-8"?>"""
|
|
|
|
|
b += '\n'
|
|
|
|
|
b ++= printer.format(MakeIvyXml(currentProject))
|
|
|
|
|
cacheIvyFile.getParentFile.mkdirs()
|
|
|
|
|
Files.write(cacheIvyFile.toPath, b.result().getBytes("UTF-8"))
|
|
|
|
|
|
|
|
|
|
// Just writing an empty file here... Are these only used?
|
|
|
|
|
cacheIvyPropertiesFile.getParentFile.mkdirs()
|
|
|
|
|
Files.write(cacheIvyPropertiesFile.toPath, "".getBytes("UTF-8"))
|
|
|
|
|
}
|
|
|
|
|
|
2015-12-30 01:34:43 +01:00
|
|
|
def report = {
|
2016-01-10 21:32:28 +01:00
|
|
|
if (verbosity >= 2) {
|
2015-12-30 01:34:43 +01:00
|
|
|
println("InterProjectRepository")
|
2015-12-30 01:34:48 +01:00
|
|
|
for (p <- projects)
|
2015-12-30 01:34:43 +01:00
|
|
|
println(s" ${p.module}:${p.version}")
|
|
|
|
|
}
|
2015-12-30 01:34:39 +01:00
|
|
|
|
2015-12-30 01:34:43 +01:00
|
|
|
val globalPluginsRepo = IvyRepository(
|
|
|
|
|
new File(sys.props("user.home") + "/.sbt/0.13/plugins/target/resolution-cache/").toURI.toString +
|
|
|
|
|
"[organization]/[module](/scala_[scalaVersion])(/sbt_[sbtVersion])/[revision]/resolved.xml.[ext]",
|
|
|
|
|
withChecksums = false,
|
|
|
|
|
withSignatures = false,
|
|
|
|
|
withArtifacts = false
|
|
|
|
|
)
|
2015-12-30 01:34:39 +01:00
|
|
|
|
2015-12-30 01:34:43 +01:00
|
|
|
val interProjectRepo = InterProjectRepository(projects)
|
2015-12-30 01:34:43 +01:00
|
|
|
|
2015-12-30 01:34:43 +01:00
|
|
|
val ivyProperties = Map(
|
2016-01-02 15:51:15 +01:00
|
|
|
"ivy.home" -> (new File(sys.props("user.home")).toURI.getPath + ".ivy2")
|
2015-12-30 01:34:43 +01:00
|
|
|
) ++ sys.props
|
2015-12-30 01:34:43 +01:00
|
|
|
|
2015-12-30 01:34:43 +01:00
|
|
|
val repositories = Seq(globalPluginsRepo, interProjectRepo) ++ resolvers.flatMap(FromSbt.repository(_, ivyProperties))
|
2015-12-30 01:34:39 +01:00
|
|
|
|
2015-12-30 01:34:43 +01:00
|
|
|
val caches = Seq(
|
|
|
|
|
"http://" -> new File(cacheDir, "http"),
|
|
|
|
|
"https://" -> new File(cacheDir, "https")
|
|
|
|
|
)
|
2015-12-30 01:34:39 +01:00
|
|
|
|
2015-12-30 01:34:43 +01:00
|
|
|
val pool = Executors.newFixedThreadPool(parallelDownloads, Strategy.DefaultDaemonThreadFactory)
|
2015-12-30 01:34:41 +01:00
|
|
|
|
2015-12-30 01:34:48 +01:00
|
|
|
def createLogger() = new TermDisplay(
|
2015-12-30 01:34:43 +01:00
|
|
|
new OutputStreamWriter(System.err),
|
|
|
|
|
fallbackMode = sys.env.get("COURSIER_NO_TERM").nonEmpty
|
|
|
|
|
)
|
2015-12-30 01:34:48 +01:00
|
|
|
|
|
|
|
|
val resLogger = createLogger()
|
2015-12-30 01:34:43 +01:00
|
|
|
|
2016-01-03 16:38:29 +01:00
|
|
|
val fetch = Fetch.from(
|
2015-12-30 01:34:43 +01:00
|
|
|
repositories,
|
2015-12-30 01:34:48 +01:00
|
|
|
Cache.fetch(caches, CachePolicy.LocalOnly, checksums = checksums, logger = Some(resLogger), pool = pool),
|
|
|
|
|
Cache.fetch(caches, cachePolicy, checksums = checksums, logger = Some(resLogger), pool = pool)
|
2015-12-30 01:34:43 +01:00
|
|
|
)
|
2015-12-30 01:34:39 +01:00
|
|
|
|
2015-12-30 01:34:45 +01:00
|
|
|
def depsRepr(deps: Seq[(String, Dependency)]) =
|
|
|
|
|
deps.map { case (config, dep) =>
|
|
|
|
|
s"${dep.module}:${dep.version}:$config->${dep.configuration}"
|
|
|
|
|
}.sorted.distinct
|
|
|
|
|
|
|
|
|
|
def depsRepr0(deps: Seq[Dependency]) =
|
|
|
|
|
deps.map { dep =>
|
|
|
|
|
s"${dep.module}:${dep.version}:${dep.configuration}"
|
|
|
|
|
}.sorted.distinct
|
|
|
|
|
|
|
|
|
|
if (verbosity >= 1) {
|
2016-01-10 21:32:28 +01:00
|
|
|
val repoReprs = repositories.map {
|
|
|
|
|
case r: IvyRepository =>
|
|
|
|
|
s"ivy:${r.pattern}"
|
|
|
|
|
case r: InterProjectRepository =>
|
|
|
|
|
"inter-project"
|
|
|
|
|
case r: MavenRepository =>
|
|
|
|
|
r.root
|
|
|
|
|
case r =>
|
|
|
|
|
// should not happen
|
|
|
|
|
r.toString
|
2015-12-30 01:34:45 +01:00
|
|
|
}
|
2016-01-10 21:32:28 +01:00
|
|
|
|
|
|
|
|
errPrintln(s"Repositories:\n${repoReprs.map(" "+_).mkString("\n")}")
|
2015-12-30 01:34:45 +01:00
|
|
|
}
|
2015-12-30 01:34:43 +01:00
|
|
|
|
|
|
|
|
if (verbosity >= 0)
|
|
|
|
|
errPrintln(s"Resolving ${currentProject.module.organization}:${currentProject.module.name}:${currentProject.version}")
|
|
|
|
|
if (verbosity >= 1)
|
2015-12-30 01:34:45 +01:00
|
|
|
for (depRepr <- depsRepr(currentProject.dependencies))
|
2015-12-30 01:34:43 +01:00
|
|
|
errPrintln(s" $depRepr")
|
|
|
|
|
|
2015-12-30 01:34:48 +01:00
|
|
|
resLogger.init()
|
|
|
|
|
|
2015-12-30 01:34:43 +01:00
|
|
|
val res = startRes
|
|
|
|
|
.process
|
|
|
|
|
.run(fetch, maxIterations)
|
|
|
|
|
.attemptRun
|
|
|
|
|
.leftMap(ex => throw new Exception(s"Exception during resolution", ex))
|
|
|
|
|
.merge
|
|
|
|
|
|
2015-12-30 01:34:48 +01:00
|
|
|
resLogger.stop()
|
2015-12-30 01:34:45 +01:00
|
|
|
|
|
|
|
|
|
2015-12-30 01:34:43 +01:00
|
|
|
if (!res.isDone)
|
2016-01-10 21:32:28 +01:00
|
|
|
throw new Exception(s"Maximum number of iteration of dependency resolution reached")
|
2015-12-30 01:34:43 +01:00
|
|
|
|
2016-01-10 21:32:28 +01:00
|
|
|
if (res.conflicts.nonEmpty) {
|
|
|
|
|
println(s"${res.conflicts.size} conflict(s):\n ${Print.dependenciesUnknownConfigs(res.conflicts.toVector)}")
|
|
|
|
|
throw new Exception(s"Conflict(s) in dependency resolution")
|
2015-12-30 01:34:43 +01:00
|
|
|
}
|
2015-12-30 01:34:39 +01:00
|
|
|
|
2016-01-10 21:32:28 +01:00
|
|
|
if (res.errors.nonEmpty) {
|
|
|
|
|
println(s"\n${res.errors.size} error(s):")
|
|
|
|
|
for ((dep, errs) <- res.errors) {
|
|
|
|
|
println(s" ${dep.module}:${dep.version}:\n${errs.map(" " + _.replace("\n", " \n")).mkString("\n")}")
|
|
|
|
|
}
|
|
|
|
|
throw new Exception(s"Encountered ${res.errors.length} error(s) in dependency resolution")
|
2015-12-30 01:34:43 +01:00
|
|
|
}
|
2015-12-30 01:34:39 +01:00
|
|
|
|
2016-01-10 21:32:28 +01:00
|
|
|
val depsByConfig = grouped(currentProject.dependencies)
|
2015-12-30 01:34:39 +01:00
|
|
|
|
2016-01-10 21:32:28 +01:00
|
|
|
val configs = {
|
|
|
|
|
val configs0 = ivyConfigurations.value.map { config =>
|
|
|
|
|
config.name -> config.extendsConfigs.map(_.name)
|
|
|
|
|
}.toMap
|
|
|
|
|
|
|
|
|
|
def allExtends(c: String) = {
|
|
|
|
|
// possibly bad complexity
|
|
|
|
|
def helper(current: Set[String]): Set[String] = {
|
|
|
|
|
val newSet = current ++ current.flatMap(configs0.getOrElse(_, Nil))
|
|
|
|
|
if ((newSet -- current).nonEmpty)
|
|
|
|
|
helper(newSet)
|
|
|
|
|
else
|
|
|
|
|
newSet
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
helper(Set(c))
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
configs0.map {
|
|
|
|
|
case (config, _) =>
|
|
|
|
|
config -> allExtends(config)
|
2015-12-30 01:34:43 +01:00
|
|
|
}
|
2016-01-10 21:32:28 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (verbosity >= 0)
|
|
|
|
|
errPrintln("Resolution done")
|
|
|
|
|
if (verbosity >= 1) {
|
|
|
|
|
val finalDeps = Config.dependenciesWithConfig(
|
|
|
|
|
res,
|
|
|
|
|
depsByConfig.map { case (k, l) => k -> l.toSet },
|
|
|
|
|
configs
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
val repr = Print.dependenciesUnknownConfigs(finalDeps.toVector)
|
2016-01-13 13:32:04 +01:00
|
|
|
println(repr.split('\n').map(" "+_).mkString("\n"))
|
2015-12-30 01:34:39 +01:00
|
|
|
}
|
|
|
|
|
|
2015-12-30 01:34:43 +01:00
|
|
|
val classifiers =
|
|
|
|
|
if (withClassifiers)
|
|
|
|
|
Some {
|
|
|
|
|
if (sbtClassifiers)
|
|
|
|
|
cm.classifiers
|
|
|
|
|
else
|
|
|
|
|
transitiveClassifiers.value
|
|
|
|
|
}
|
|
|
|
|
else
|
|
|
|
|
None
|
|
|
|
|
|
|
|
|
|
val allArtifacts =
|
|
|
|
|
classifiers match {
|
|
|
|
|
case None => res.artifacts
|
|
|
|
|
case Some(cl) => res.classifiersArtifacts(cl)
|
2015-12-30 01:34:39 +01:00
|
|
|
}
|
|
|
|
|
|
2015-12-30 01:34:48 +01:00
|
|
|
val artifactsLogger = createLogger()
|
|
|
|
|
|
2015-12-30 01:34:43 +01:00
|
|
|
val artifactFileOrErrorTasks = allArtifacts.toVector.map { a =>
|
2015-12-30 01:34:48 +01:00
|
|
|
Cache.file(a, caches, cachePolicy, checksums = artifactsChecksums, logger = Some(artifactsLogger), pool = pool).run.map((a, _))
|
2015-12-30 01:34:39 +01:00
|
|
|
}
|
|
|
|
|
|
2015-12-30 01:34:43 +01:00
|
|
|
if (verbosity >= 0)
|
|
|
|
|
errPrintln(s"Fetching artifacts")
|
2015-12-30 01:34:39 +01:00
|
|
|
|
2015-12-30 01:34:48 +01:00
|
|
|
artifactsLogger.init()
|
|
|
|
|
|
2015-12-30 01:34:43 +01:00
|
|
|
val artifactFilesOrErrors = Task.gatherUnordered(artifactFileOrErrorTasks).attemptRun match {
|
|
|
|
|
case -\/(ex) =>
|
|
|
|
|
throw new Exception(s"Error while downloading / verifying artifacts", ex)
|
|
|
|
|
case \/-(l) =>
|
|
|
|
|
l.toMap
|
|
|
|
|
}
|
2015-12-30 01:34:39 +01:00
|
|
|
|
2015-12-30 01:34:48 +01:00
|
|
|
artifactsLogger.stop()
|
|
|
|
|
|
2015-12-30 01:34:43 +01:00
|
|
|
if (verbosity >= 0)
|
|
|
|
|
errPrintln(s"Fetching artifacts: done")
|
|
|
|
|
|
|
|
|
|
def artifactFileOpt(artifact: Artifact) = {
|
|
|
|
|
val fileOrError = artifactFilesOrErrors.getOrElse(artifact, -\/("Not downloaded"))
|
|
|
|
|
|
|
|
|
|
fileOrError match {
|
|
|
|
|
case \/-(file) =>
|
|
|
|
|
if (file.toString.contains("file:/"))
|
|
|
|
|
throw new Exception(s"Wrong path: $file")
|
|
|
|
|
Some(file)
|
|
|
|
|
case -\/(err) =>
|
|
|
|
|
errPrintln(s"${artifact.url}: $err")
|
|
|
|
|
None
|
|
|
|
|
}
|
2015-12-30 01:34:39 +01:00
|
|
|
}
|
|
|
|
|
|
2015-12-30 01:34:44 +01:00
|
|
|
writeIvyFiles()
|
|
|
|
|
|
2015-12-30 01:34:43 +01:00
|
|
|
ToSbt.updateReport(
|
|
|
|
|
depsByConfig,
|
|
|
|
|
res,
|
|
|
|
|
configs,
|
|
|
|
|
classifiers,
|
|
|
|
|
artifactFileOpt
|
|
|
|
|
)
|
2015-12-30 01:34:39 +01:00
|
|
|
}
|
|
|
|
|
|
2015-12-30 01:34:43 +01:00
|
|
|
resolutionsCache.getOrElseUpdate(
|
|
|
|
|
CacheKey(startRes.copy(filter = None), withClassifiers, sbtClassifiers),
|
|
|
|
|
report
|
2015-12-30 01:34:39 +01:00
|
|
|
)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2015-12-30 01:34:34 +01:00
|
|
|
}
|