2015-12-30 01:34:34 +01:00
|
|
|
package coursier
|
|
|
|
|
|
2017-02-04 16:25:42 +01:00
|
|
|
import java.io.{ File, InputStream, OutputStreamWriter }
|
2016-04-01 00:39:31 +02:00
|
|
|
import java.net.URL
|
2016-08-04 01:35:38 +02:00
|
|
|
import java.util.concurrent.{ ExecutorService, Executors }
|
2015-12-30 01:34:39 +01:00
|
|
|
|
2016-05-22 21:43:06 +02:00
|
|
|
import coursier.core.{ Authentication, Publication }
|
2016-08-15 16:14:27 +02:00
|
|
|
import coursier.ivy.{ IvyRepository, PropertiesPattern }
|
2015-12-30 01:34:43 +01:00
|
|
|
import coursier.Keys._
|
|
|
|
|
import coursier.Structure._
|
2016-11-27 13:44:14 +01:00
|
|
|
import coursier.internal.FileUtil
|
2016-01-10 21:32:28 +01:00
|
|
|
import coursier.util.{ Config, Print }
|
2015-12-30 01:34:44 +01:00
|
|
|
import org.apache.ivy.core.module.id.ModuleRevisionId
|
2015-12-30 01:34:43 +01:00
|
|
|
|
|
|
|
|
import sbt.{ UpdateReport, Classpaths, Resolver, Def }
|
2015-12-30 01:34:34 +01:00
|
|
|
import sbt.Keys._
|
|
|
|
|
|
2015-12-30 01:34:43 +01:00
|
|
|
import scala.collection.mutable
|
2015-12-30 01:34:44 +01:00
|
|
|
import scala.collection.JavaConverters._
|
2016-08-15 16:49:20 +02:00
|
|
|
import scala.collection.mutable.ArrayBuffer
|
2016-05-22 21:43:06 +02:00
|
|
|
import scala.util.Try
|
2015-12-30 01:34:43 +01:00
|
|
|
|
|
|
|
|
import scalaz.{ \/-, -\/ }
|
2015-12-30 01:34:41 +01:00
|
|
|
import scalaz.concurrent.{ Task, Strategy }
|
2015-12-30 01:34:39 +01:00
|
|
|
|
2015-12-30 01:34:34 +01:00
|
|
|
object Tasks {
|
|
|
|
|
|
2016-08-17 22:19:28 +02:00
|
|
|
def allRecursiveInterDependencies(state: sbt.State, projectRef: sbt.ProjectRef) = {
|
|
|
|
|
|
|
|
|
|
def dependencies(map: Map[String, Seq[String]], id: String): Set[String] = {
|
|
|
|
|
|
|
|
|
|
def helper(map: Map[String, Seq[String]], acc: Set[String]): Set[String] =
|
|
|
|
|
if (acc.exists(map.contains)) {
|
|
|
|
|
val (kept, rem) = map.partition { case (k, _) => acc(k) }
|
|
|
|
|
helper(rem, acc ++ kept.valuesIterator.flatten)
|
|
|
|
|
} else
|
|
|
|
|
acc
|
|
|
|
|
|
|
|
|
|
helper(map - id, map.getOrElse(id, Nil).toSet)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
val allProjectsDeps =
|
|
|
|
|
for (p <- structure(state).allProjects)
|
|
|
|
|
yield p.id -> p.dependencies.map(_.project.project)
|
|
|
|
|
|
|
|
|
|
val deps = dependencies(allProjectsDeps.toMap, projectRef.project)
|
|
|
|
|
|
|
|
|
|
structure(state).allProjectRefs.filter(p => deps(p.project))
|
|
|
|
|
}
|
|
|
|
|
|
2016-02-23 20:15:24 +01:00
|
|
|
def coursierResolversTask: Def.Initialize[sbt.Task[Seq[Resolver]]] =
|
|
|
|
|
(
|
|
|
|
|
externalResolvers,
|
|
|
|
|
sbtPlugin,
|
|
|
|
|
sbtResolver,
|
|
|
|
|
bootResolvers,
|
|
|
|
|
overrideBuildResolvers
|
|
|
|
|
).map { (extRes, isSbtPlugin, sbtRes, bootResOpt, overrideFlag) =>
|
|
|
|
|
bootResOpt.filter(_ => overrideFlag).getOrElse {
|
|
|
|
|
var resolvers = extRes
|
|
|
|
|
if (isSbtPlugin)
|
|
|
|
|
resolvers = Seq(
|
|
|
|
|
sbtRes,
|
|
|
|
|
Classpaths.sbtPluginReleases
|
|
|
|
|
) ++ resolvers
|
|
|
|
|
resolvers
|
|
|
|
|
}
|
|
|
|
|
}
|
2015-12-30 01:34:34 +01:00
|
|
|
|
2016-08-17 22:19:30 +02:00
|
|
|
def coursierRecursiveResolversTask: Def.Initialize[sbt.Task[Seq[Resolver]]] =
|
|
|
|
|
(
|
|
|
|
|
sbt.Keys.state,
|
|
|
|
|
sbt.Keys.thisProjectRef
|
|
|
|
|
).flatMap { (state, projectRef) =>
|
|
|
|
|
|
|
|
|
|
val projects = allRecursiveInterDependencies(state, projectRef)
|
|
|
|
|
|
|
|
|
|
coursierResolvers
|
|
|
|
|
.forAllProjects(state, projectRef +: projects)
|
|
|
|
|
.map(_.values.toVector.flatten)
|
|
|
|
|
}
|
|
|
|
|
|
2016-04-01 00:39:31 +02:00
|
|
|
def coursierFallbackDependenciesTask: Def.Initialize[sbt.Task[Seq[(Module, String, URL, Boolean)]]] =
|
|
|
|
|
(
|
|
|
|
|
sbt.Keys.state,
|
|
|
|
|
sbt.Keys.thisProjectRef
|
|
|
|
|
).flatMap { (state, projectRef) =>
|
|
|
|
|
|
2016-08-17 22:19:28 +02:00
|
|
|
val projects = allRecursiveInterDependencies(state, projectRef)
|
|
|
|
|
|
|
|
|
|
val allDependenciesTask = allDependencies
|
|
|
|
|
.forAllProjects(state, projectRef +: projects)
|
|
|
|
|
.map(_.values.toVector.flatten)
|
2016-04-01 00:39:31 +02:00
|
|
|
|
|
|
|
|
for {
|
|
|
|
|
allDependencies <- allDependenciesTask
|
|
|
|
|
} yield {
|
|
|
|
|
|
|
|
|
|
FromSbt.fallbackDependencies(
|
|
|
|
|
allDependencies,
|
|
|
|
|
scalaVersion.in(projectRef).get(state),
|
|
|
|
|
scalaBinaryVersion.in(projectRef).get(state)
|
|
|
|
|
)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2015-12-30 01:34:48 +01:00
|
|
|
def coursierProjectTask: Def.Initialize[sbt.Task[Project]] =
|
2015-12-30 01:34:34 +01:00
|
|
|
(
|
|
|
|
|
sbt.Keys.state,
|
|
|
|
|
sbt.Keys.thisProjectRef
|
|
|
|
|
).flatMap { (state, projectRef) =>
|
|
|
|
|
|
|
|
|
|
// should projectID.configurations be used instead?
|
|
|
|
|
val configurations = ivyConfigurations.in(projectRef).get(state)
|
|
|
|
|
|
|
|
|
|
val allDependenciesTask = allDependencies.in(projectRef).get(state)
|
|
|
|
|
|
2016-06-25 20:15:09 +02:00
|
|
|
lazy val projId = projectID.in(projectRef).get(state)
|
|
|
|
|
lazy val sv = scalaVersion.in(projectRef).get(state)
|
|
|
|
|
lazy val sbv = scalaBinaryVersion.in(projectRef).get(state)
|
|
|
|
|
|
2015-12-30 01:34:34 +01:00
|
|
|
for {
|
|
|
|
|
allDependencies <- allDependenciesTask
|
|
|
|
|
} yield {
|
|
|
|
|
|
2017-02-04 16:25:42 +01:00
|
|
|
val configMap = configurations
|
|
|
|
|
.map { cfg => cfg.name -> cfg.extendsConfigs.map(_.name) }
|
|
|
|
|
.toMap
|
|
|
|
|
|
2015-12-30 01:34:48 +01:00
|
|
|
FromSbt.project(
|
2016-06-25 20:15:09 +02:00
|
|
|
projId,
|
2015-12-30 01:34:34 +01:00
|
|
|
allDependencies,
|
2017-02-04 16:25:42 +01:00
|
|
|
configMap,
|
2016-06-25 20:15:09 +02:00
|
|
|
sv,
|
2017-02-03 00:57:01 +01:00
|
|
|
sbv
|
2015-12-30 01:34:34 +01:00
|
|
|
)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2016-07-24 14:32:33 +02:00
|
|
|
def coursierInterProjectDependenciesTask: Def.Initialize[sbt.Task[Seq[Project]]] =
|
|
|
|
|
(
|
|
|
|
|
sbt.Keys.state,
|
|
|
|
|
sbt.Keys.thisProjectRef
|
|
|
|
|
).flatMap { (state, projectRef) =>
|
|
|
|
|
|
2016-08-17 22:19:28 +02:00
|
|
|
val projects = allRecursiveInterDependencies(state, projectRef)
|
2016-07-24 14:32:33 +02:00
|
|
|
|
2015-12-30 01:34:34 +01:00
|
|
|
coursierProject.forAllProjects(state, projects).map(_.values.toVector)
|
|
|
|
|
}
|
|
|
|
|
|
2017-01-30 22:57:24 +01:00
|
|
|
def coursierPublicationsTask(configsMap: (sbt.Configuration, String)*): Def.Initialize[sbt.Task[Seq[(String, Publication)]]] =
|
2015-12-30 01:34:44 +01:00
|
|
|
(
|
|
|
|
|
sbt.Keys.state,
|
|
|
|
|
sbt.Keys.thisProjectRef,
|
|
|
|
|
sbt.Keys.projectID,
|
|
|
|
|
sbt.Keys.scalaVersion,
|
2016-02-20 15:53:09 +01:00
|
|
|
sbt.Keys.scalaBinaryVersion,
|
|
|
|
|
sbt.Keys.ivyConfigurations
|
|
|
|
|
).map { (state, projectRef, projId, sv, sbv, ivyConfs) =>
|
2015-12-30 01:34:44 +01:00
|
|
|
|
|
|
|
|
val packageTasks = Seq(packageBin, packageSrc, packageDoc)
|
|
|
|
|
|
|
|
|
|
val sbtArtifacts =
|
|
|
|
|
for {
|
|
|
|
|
pkgTask <- packageTasks
|
2017-01-30 22:57:24 +01:00
|
|
|
(config, targetConfig) <- configsMap
|
2015-12-30 01:34:44 +01:00
|
|
|
} yield {
|
|
|
|
|
val publish = publishArtifact.in(projectRef).in(pkgTask).in(config).getOrElse(state, false)
|
|
|
|
|
if (publish)
|
|
|
|
|
Option(artifact.in(projectRef).in(pkgTask).in(config).getOrElse(state, null))
|
2017-01-30 22:57:24 +01:00
|
|
|
.map(targetConfig -> _)
|
2015-12-30 01:34:44 +01:00
|
|
|
else
|
|
|
|
|
None
|
|
|
|
|
}
|
|
|
|
|
|
2016-02-20 15:53:09 +01:00
|
|
|
def artifactPublication(artifact: sbt.Artifact) = {
|
|
|
|
|
|
|
|
|
|
val name = FromSbt.sbtCrossVersionName(
|
|
|
|
|
artifact.name,
|
|
|
|
|
projId.crossVersion,
|
|
|
|
|
sv,
|
|
|
|
|
sbv
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
Publication(
|
|
|
|
|
name,
|
|
|
|
|
artifact.`type`,
|
|
|
|
|
artifact.extension,
|
|
|
|
|
artifact.classifier.getOrElse("")
|
|
|
|
|
)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
val sbtArtifactsPublication = sbtArtifacts.collect {
|
2015-12-30 01:34:44 +01:00
|
|
|
case Some((config, artifact)) =>
|
2016-02-20 15:53:09 +01:00
|
|
|
config -> artifactPublication(artifact)
|
2015-12-30 01:34:44 +01:00
|
|
|
}
|
2016-02-20 15:53:09 +01:00
|
|
|
|
|
|
|
|
val stdArtifactsSet = sbtArtifacts.flatMap(_.map { case (_, a) => a }.toSeq).toSet
|
|
|
|
|
|
|
|
|
|
// Second-way of getting artifacts from SBT
|
|
|
|
|
// No obvious way of getting the corresponding publishArtifact value for the ones
|
|
|
|
|
// only here, it seems.
|
|
|
|
|
val extraSbtArtifacts = Option(artifacts.in(projectRef).getOrElse(state, null))
|
|
|
|
|
.toSeq
|
|
|
|
|
.flatten
|
|
|
|
|
.filterNot(stdArtifactsSet)
|
|
|
|
|
|
|
|
|
|
// Seems that SBT does that - if an artifact has no configs,
|
|
|
|
|
// it puts it in all of them. See for example what happens to
|
|
|
|
|
// the standalone JAR artifact of the coursier cli module.
|
|
|
|
|
def allConfigsIfEmpty(configs: Iterable[sbt.Configuration]): Iterable[sbt.Configuration] =
|
|
|
|
|
if (configs.isEmpty) ivyConfs else configs
|
|
|
|
|
|
|
|
|
|
val extraSbtArtifactsPublication = for {
|
|
|
|
|
artifact <- extraSbtArtifacts
|
|
|
|
|
config <- allConfigsIfEmpty(artifact.configurations) if config.isPublic
|
|
|
|
|
} yield config.name -> artifactPublication(artifact)
|
|
|
|
|
|
|
|
|
|
sbtArtifactsPublication ++ extraSbtArtifactsPublication
|
2015-12-30 01:34:44 +01:00
|
|
|
}
|
|
|
|
|
|
2017-01-30 22:57:24 +01:00
|
|
|
def coursierConfigurationsTask(shadedConfig: Option[(String, String)]) = Def.task {
|
2016-05-06 13:53:49 +02:00
|
|
|
|
|
|
|
|
val configs0 = ivyConfigurations.value.map { config =>
|
|
|
|
|
config.name -> config.extendsConfigs.map(_.name)
|
|
|
|
|
}.toMap
|
|
|
|
|
|
|
|
|
|
def allExtends(c: String) = {
|
|
|
|
|
// possibly bad complexity
|
|
|
|
|
def helper(current: Set[String]): Set[String] = {
|
|
|
|
|
val newSet = current ++ current.flatMap(configs0.getOrElse(_, Nil))
|
|
|
|
|
if ((newSet -- current).nonEmpty)
|
|
|
|
|
helper(newSet)
|
|
|
|
|
else
|
|
|
|
|
newSet
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
helper(Set(c))
|
|
|
|
|
}
|
|
|
|
|
|
2017-01-30 22:57:24 +01:00
|
|
|
val map = configs0.map {
|
2016-05-06 13:53:49 +02:00
|
|
|
case (config, _) =>
|
|
|
|
|
config -> allExtends(config)
|
|
|
|
|
}
|
2017-01-30 22:57:24 +01:00
|
|
|
|
|
|
|
|
map ++ shadedConfig.toSeq.flatMap {
|
|
|
|
|
case (baseConfig, shadedConfig) =>
|
|
|
|
|
Seq(
|
|
|
|
|
baseConfig -> (map.getOrElse(baseConfig, Set(baseConfig)) + shadedConfig),
|
|
|
|
|
shadedConfig -> map.getOrElse(shadedConfig, Set(shadedConfig))
|
|
|
|
|
)
|
|
|
|
|
}
|
2016-05-06 13:53:49 +02:00
|
|
|
}
|
|
|
|
|
|
2017-02-02 01:34:42 +01:00
|
|
|
private final case class ResolutionCacheKey(
|
2016-03-06 19:39:15 +01:00
|
|
|
project: Project,
|
|
|
|
|
repositories: Seq[Repository],
|
2016-08-04 01:36:53 +02:00
|
|
|
userEnabledProfiles: Set[String],
|
2015-12-30 01:34:43 +01:00
|
|
|
resolution: Resolution,
|
2016-05-06 13:53:49 +02:00
|
|
|
sbtClassifiers: Boolean
|
|
|
|
|
)
|
|
|
|
|
|
2017-02-02 01:34:42 +01:00
|
|
|
private final case class ReportCacheKey(
|
2016-05-06 13:53:49 +02:00
|
|
|
project: Project,
|
|
|
|
|
resolution: Resolution,
|
2015-12-30 01:34:43 +01:00
|
|
|
withClassifiers: Boolean,
|
|
|
|
|
sbtClassifiers: Boolean
|
|
|
|
|
)
|
|
|
|
|
|
2016-05-06 13:53:49 +02:00
|
|
|
private val resolutionsCache = new mutable.HashMap[ResolutionCacheKey, Resolution]
|
|
|
|
|
// these may actually not need to be cached any more, now that the resolutions
|
|
|
|
|
// are cached
|
|
|
|
|
private val reportsCache = new mutable.HashMap[ReportCacheKey, UpdateReport]
|
2015-12-30 01:34:43 +01:00
|
|
|
|
2016-05-30 13:15:02 +02:00
|
|
|
private def forcedScalaModules(
|
|
|
|
|
scalaOrganization: String,
|
|
|
|
|
scalaVersion: String
|
|
|
|
|
): Map[Module, String] =
|
2016-02-24 20:22:29 +01:00
|
|
|
Map(
|
2016-05-30 13:15:02 +02:00
|
|
|
Module(scalaOrganization, "scala-library") -> scalaVersion,
|
|
|
|
|
Module(scalaOrganization, "scala-compiler") -> scalaVersion,
|
|
|
|
|
Module(scalaOrganization, "scala-reflect") -> scalaVersion,
|
|
|
|
|
Module(scalaOrganization, "scalap") -> scalaVersion
|
2016-02-24 20:22:29 +01:00
|
|
|
)
|
|
|
|
|
|
2016-08-04 01:35:38 +02:00
|
|
|
private def createLogger() = new TermDisplay(new OutputStreamWriter(System.err))
|
|
|
|
|
|
2016-08-15 16:14:27 +02:00
|
|
|
private lazy val globalPluginPattern = {
|
2016-08-15 16:49:20 +02:00
|
|
|
|
|
|
|
|
val props = sys.props.toMap
|
|
|
|
|
|
|
|
|
|
val extraProps = new ArrayBuffer[(String, String)]
|
|
|
|
|
|
|
|
|
|
def addUriProp(key: String): Unit =
|
|
|
|
|
for (b <- props.get(key)) {
|
|
|
|
|
val uri = new File(b).toURI.toString
|
|
|
|
|
extraProps += s"$key.uri" -> uri
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
addUriProp("sbt.global.base")
|
|
|
|
|
addUriProp("user.home")
|
|
|
|
|
|
2016-08-15 16:14:27 +02:00
|
|
|
// FIXME get the 0.13 automatically?
|
2016-08-15 16:49:20 +02:00
|
|
|
val s = s"$${sbt.global.base.uri-$${user.home.uri}/.sbt/0.13}/plugins/target/resolution-cache/" +
|
2016-08-15 16:14:27 +02:00
|
|
|
"[organization]/[module](/scala_[scalaVersion])(/sbt_[sbtVersion])/[revision]/resolved.xml.[ext]"
|
|
|
|
|
|
|
|
|
|
val p = PropertiesPattern.parse(s) match {
|
|
|
|
|
case -\/(err) =>
|
|
|
|
|
throw new Exception(s"Cannot parse pattern $s: $err")
|
|
|
|
|
case \/-(p) =>
|
|
|
|
|
p
|
|
|
|
|
}
|
|
|
|
|
|
2016-08-15 16:49:20 +02:00
|
|
|
p.substituteProperties(props ++ extraProps) match {
|
2016-08-15 16:14:27 +02:00
|
|
|
case -\/(err) =>
|
|
|
|
|
throw new Exception(err)
|
|
|
|
|
case \/-(p) =>
|
|
|
|
|
p
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2016-05-06 13:53:49 +02:00
|
|
|
def resolutionTask(
|
|
|
|
|
sbtClassifiers: Boolean = false
|
|
|
|
|
) = Def.task {
|
2015-12-30 01:34:39 +01:00
|
|
|
|
|
|
|
|
// let's update only one module at once, for a better output
|
|
|
|
|
// Downloads are already parallel, no need to parallelize further anyway
|
|
|
|
|
synchronized {
|
|
|
|
|
|
|
|
|
|
lazy val cm = coursierSbtClassifiersModule.value
|
|
|
|
|
|
2016-08-22 00:19:52 +02:00
|
|
|
lazy val projectName = thisProjectRef.value.project
|
|
|
|
|
|
2016-04-01 00:39:31 +02:00
|
|
|
val (currentProject, fallbackDependencies) =
|
|
|
|
|
if (sbtClassifiers) {
|
|
|
|
|
val sv = scalaVersion.value
|
|
|
|
|
val sbv = scalaBinaryVersion.value
|
|
|
|
|
|
|
|
|
|
val proj = FromSbt.project(
|
2015-12-30 01:34:39 +01:00
|
|
|
cm.id,
|
|
|
|
|
cm.modules,
|
|
|
|
|
cm.configurations.map(cfg => cfg.name -> cfg.extendsConfigs.map(_.name)).toMap,
|
2016-04-01 00:39:31 +02:00
|
|
|
sv,
|
2017-02-03 00:57:01 +01:00
|
|
|
sbv
|
2015-12-30 01:34:39 +01:00
|
|
|
)
|
2016-04-01 00:39:31 +02:00
|
|
|
|
|
|
|
|
val fallbackDeps = FromSbt.fallbackDependencies(
|
|
|
|
|
cm.modules,
|
|
|
|
|
sv,
|
|
|
|
|
sbv
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
(proj, fallbackDeps)
|
|
|
|
|
} else {
|
2015-12-30 01:34:48 +01:00
|
|
|
val proj = coursierProject.value
|
2015-12-30 01:34:44 +01:00
|
|
|
val publications = coursierPublications.value
|
2016-04-01 00:39:31 +02:00
|
|
|
val fallbackDeps = coursierFallbackDependencies.value
|
|
|
|
|
(proj.copy(publications = publications), fallbackDeps)
|
2015-12-30 01:34:39 +01:00
|
|
|
}
|
|
|
|
|
|
2016-07-24 14:32:33 +02:00
|
|
|
val interProjectDependencies = coursierInterProjectDependencies.value
|
2015-12-30 01:34:39 +01:00
|
|
|
|
|
|
|
|
val parallelDownloads = coursierParallelDownloads.value
|
|
|
|
|
val checksums = coursierChecksums.value
|
|
|
|
|
val maxIterations = coursierMaxIterations.value
|
2016-03-13 22:57:26 +01:00
|
|
|
val cachePolicies = coursierCachePolicies.value
|
2016-05-31 15:18:29 +02:00
|
|
|
val ttl = coursierTtl.value
|
2016-03-06 14:45:57 +01:00
|
|
|
val cache = coursierCache.value
|
2015-12-30 01:34:39 +01:00
|
|
|
|
2016-04-05 16:24:39 +02:00
|
|
|
val log = streams.value.log
|
|
|
|
|
|
2016-05-30 13:15:02 +02:00
|
|
|
// are these always defined? (e.g. for Java only projects?)
|
|
|
|
|
val so = scalaOrganization.value
|
|
|
|
|
val sv = scalaVersion.value
|
2016-03-14 16:20:47 +01:00
|
|
|
val sbv = scalaBinaryVersion.value
|
|
|
|
|
|
|
|
|
|
val userForceVersions = dependencyOverrides.value.map(
|
|
|
|
|
FromSbt.moduleVersion(_, sv, sbv)
|
|
|
|
|
).toMap
|
2016-02-24 20:22:29 +01:00
|
|
|
|
2016-04-01 00:39:32 +02:00
|
|
|
var anyNonSupportedExclusionRule = false
|
|
|
|
|
val exclusions = excludeDependencies.value.flatMap {
|
|
|
|
|
rule =>
|
|
|
|
|
if (
|
|
|
|
|
rule.artifact != "*" ||
|
2016-04-22 12:01:22 +02:00
|
|
|
rule.configurations.nonEmpty
|
2016-04-01 00:39:32 +02:00
|
|
|
) {
|
2016-04-05 16:24:39 +02:00
|
|
|
log.warn(s"Unsupported exclusion rule $rule")
|
2016-04-01 00:39:32 +02:00
|
|
|
anyNonSupportedExclusionRule = true
|
|
|
|
|
Nil
|
|
|
|
|
} else
|
2016-04-22 12:01:22 +02:00
|
|
|
Seq((rule.organization,
|
|
|
|
|
FromSbt.sbtCrossVersionName(rule.name, rule.crossVersion, sv, sbv)))
|
2016-04-01 00:39:32 +02:00
|
|
|
}.toSet
|
|
|
|
|
|
|
|
|
|
if (anyNonSupportedExclusionRule)
|
2016-04-05 16:24:39 +02:00
|
|
|
log.warn("Only supported exclusion rule fields: organization, name")
|
2016-04-01 00:39:32 +02:00
|
|
|
|
2015-12-30 01:34:42 +01:00
|
|
|
val resolvers =
|
|
|
|
|
if (sbtClassifiers)
|
|
|
|
|
coursierSbtResolvers.value
|
|
|
|
|
else
|
2016-08-17 22:19:30 +02:00
|
|
|
coursierRecursiveResolvers.value.distinct
|
2015-12-30 01:34:39 +01:00
|
|
|
|
2016-05-06 13:54:03 +02:00
|
|
|
// TODO Warn about possible duplicated modules from source repositories?
|
|
|
|
|
|
2016-03-13 22:57:26 +01:00
|
|
|
val verbosityLevel = coursierVerbosity.value
|
2015-12-30 01:34:39 +01:00
|
|
|
|
2016-07-25 00:18:59 +02:00
|
|
|
val userEnabledProfiles = mavenProfiles.value
|
2015-12-30 01:34:39 +01:00
|
|
|
|
|
|
|
|
val startRes = Resolution(
|
2016-04-01 00:39:32 +02:00
|
|
|
currentProject.dependencies.map {
|
|
|
|
|
case (_, dep) =>
|
|
|
|
|
dep.copy(exclusions = dep.exclusions ++ exclusions)
|
|
|
|
|
}.toSet,
|
2015-12-30 01:34:39 +01:00
|
|
|
filter = Some(dep => !dep.optional),
|
2016-11-07 11:22:49 +01:00
|
|
|
userActivations =
|
|
|
|
|
if (userEnabledProfiles.isEmpty)
|
|
|
|
|
None
|
|
|
|
|
else
|
|
|
|
|
Some(userEnabledProfiles.iterator.map(_ -> true).toMap),
|
2016-05-06 13:54:03 +02:00
|
|
|
forceVersions =
|
|
|
|
|
// order matters here
|
|
|
|
|
userForceVersions ++
|
2016-05-30 13:15:02 +02:00
|
|
|
forcedScalaModules(so, sv) ++
|
2016-07-24 14:32:33 +02:00
|
|
|
interProjectDependencies.map(_.moduleVersion)
|
2015-12-30 01:34:39 +01:00
|
|
|
)
|
|
|
|
|
|
2016-03-13 22:57:26 +01:00
|
|
|
if (verbosityLevel >= 2) {
|
2016-04-05 16:24:39 +02:00
|
|
|
log.info("InterProjectRepository")
|
2016-07-24 14:32:33 +02:00
|
|
|
for (p <- interProjectDependencies)
|
2016-04-05 16:24:39 +02:00
|
|
|
log.info(s" ${p.module}:${p.version}")
|
2016-03-06 19:39:15 +01:00
|
|
|
}
|
2015-12-30 01:34:39 +01:00
|
|
|
|
2016-08-15 16:14:27 +02:00
|
|
|
val globalPluginsRepo = IvyRepository.fromPattern(
|
|
|
|
|
globalPluginPattern,
|
2016-03-06 19:39:15 +01:00
|
|
|
withChecksums = false,
|
|
|
|
|
withSignatures = false,
|
|
|
|
|
withArtifacts = false
|
|
|
|
|
)
|
2015-12-30 01:34:39 +01:00
|
|
|
|
2016-07-24 14:32:33 +02:00
|
|
|
val interProjectRepo = InterProjectRepository(interProjectDependencies)
|
2015-12-30 01:34:43 +01:00
|
|
|
|
2016-08-15 16:14:27 +02:00
|
|
|
val ivyHome = sys.props.getOrElse(
|
|
|
|
|
"ivy.home",
|
|
|
|
|
new File(sys.props("user.home")).toURI.getPath + ".ivy2"
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
val sbtIvyHome = sys.props.getOrElse(
|
|
|
|
|
"sbt.ivy.home",
|
|
|
|
|
ivyHome
|
|
|
|
|
)
|
|
|
|
|
|
2016-03-06 19:39:15 +01:00
|
|
|
val ivyProperties = Map(
|
2016-08-15 16:14:27 +02:00
|
|
|
"ivy.home" -> ivyHome,
|
|
|
|
|
"sbt.ivy.home" -> sbtIvyHome
|
2016-03-06 19:39:15 +01:00
|
|
|
) ++ sys.props
|
2015-12-30 01:34:43 +01:00
|
|
|
|
2016-05-22 21:43:06 +02:00
|
|
|
val useSbtCredentials = coursierUseSbtCredentials.value
|
|
|
|
|
|
|
|
|
|
val authenticationByHost =
|
|
|
|
|
if (useSbtCredentials) {
|
|
|
|
|
val cred = sbt.Keys.credentials.value.map(sbt.Credentials.toDirect)
|
|
|
|
|
cred.map { c =>
|
|
|
|
|
c.host -> Authentication(c.userName, c.passwd)
|
|
|
|
|
}.toMap
|
|
|
|
|
} else
|
|
|
|
|
Map.empty[String, Authentication]
|
|
|
|
|
|
|
|
|
|
val authenticationByRepositoryId = coursierCredentials.value.mapValues(_.authentication)
|
2016-05-06 13:54:17 +02:00
|
|
|
|
2016-05-06 13:54:03 +02:00
|
|
|
val fallbackDependenciesRepositories =
|
2016-04-01 00:39:31 +02:00
|
|
|
if (fallbackDependencies.isEmpty)
|
|
|
|
|
Nil
|
|
|
|
|
else {
|
|
|
|
|
val map = fallbackDependencies.map {
|
|
|
|
|
case (mod, ver, url, changing) =>
|
|
|
|
|
(mod, ver) -> ((url, changing))
|
|
|
|
|
}.toMap
|
|
|
|
|
|
|
|
|
|
Seq(
|
|
|
|
|
FallbackDependenciesRepository(map)
|
|
|
|
|
)
|
|
|
|
|
}
|
2016-05-06 13:54:03 +02:00
|
|
|
|
2016-05-22 21:43:06 +02:00
|
|
|
def withAuthenticationByHost(repo: Repository, credentials: Map[String, Authentication]): Repository = {
|
|
|
|
|
|
|
|
|
|
def httpHost(s: String) =
|
|
|
|
|
if (s.startsWith("http://") || s.startsWith("https://"))
|
|
|
|
|
Try(Cache.url(s).getHost).toOption
|
|
|
|
|
else
|
|
|
|
|
None
|
|
|
|
|
|
|
|
|
|
repo match {
|
|
|
|
|
case m: MavenRepository =>
|
|
|
|
|
if (m.authentication.isEmpty)
|
|
|
|
|
httpHost(m.root).flatMap(credentials.get).fold(m) { auth =>
|
|
|
|
|
m.copy(authentication = Some(auth))
|
|
|
|
|
}
|
|
|
|
|
else
|
|
|
|
|
m
|
|
|
|
|
case i: IvyRepository =>
|
|
|
|
|
if (i.authentication.isEmpty) {
|
2016-07-03 17:21:17 +02:00
|
|
|
val base = i.pattern.chunks.takeWhile {
|
|
|
|
|
case _: coursier.ivy.Pattern.Chunk.Const => true
|
|
|
|
|
case _ => false
|
|
|
|
|
}.map(_.string).mkString
|
2016-05-22 21:43:06 +02:00
|
|
|
|
|
|
|
|
httpHost(base).flatMap(credentials.get).fold(i) { auth =>
|
|
|
|
|
i.copy(authentication = Some(auth))
|
|
|
|
|
}
|
|
|
|
|
} else
|
|
|
|
|
i
|
|
|
|
|
case _ =>
|
|
|
|
|
repo
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2016-12-03 02:45:23 +01:00
|
|
|
val internalRepositories = Seq(globalPluginsRepo, interProjectRepo)
|
2016-05-31 13:43:30 +02:00
|
|
|
|
2016-05-06 13:54:03 +02:00
|
|
|
val repositories =
|
2016-05-31 13:43:30 +02:00
|
|
|
internalRepositories ++
|
2016-05-06 13:54:17 +02:00
|
|
|
resolvers.flatMap { resolver =>
|
|
|
|
|
FromSbt.repository(
|
|
|
|
|
resolver,
|
|
|
|
|
ivyProperties,
|
|
|
|
|
log,
|
2016-05-22 21:43:06 +02:00
|
|
|
authenticationByRepositoryId.get(resolver.name)
|
2016-05-06 13:54:17 +02:00
|
|
|
)
|
2016-05-22 21:43:06 +02:00
|
|
|
}.map(withAuthenticationByHost(_, authenticationByHost)) ++
|
2016-05-06 13:54:03 +02:00
|
|
|
fallbackDependenciesRepositories
|
2015-12-30 01:34:39 +01:00
|
|
|
|
2016-05-06 13:53:49 +02:00
|
|
|
def resolution = {
|
2016-08-04 01:35:38 +02:00
|
|
|
var pool: ExecutorService = null
|
|
|
|
|
var resLogger: TermDisplay = null
|
|
|
|
|
|
2016-11-16 16:22:48 +01:00
|
|
|
val printOptionalMessage = verbosityLevel >= 0 && verbosityLevel <= 1
|
|
|
|
|
|
2016-11-07 11:22:52 +01:00
|
|
|
val res = try {
|
2016-08-04 01:35:38 +02:00
|
|
|
pool = Executors.newFixedThreadPool(parallelDownloads, Strategy.DefaultDaemonThreadFactory)
|
|
|
|
|
resLogger = createLogger()
|
|
|
|
|
|
|
|
|
|
val fetch = Fetch.from(
|
|
|
|
|
repositories,
|
|
|
|
|
Cache.fetch(cache, cachePolicies.head, checksums = checksums, logger = Some(resLogger), pool = pool, ttl = ttl),
|
|
|
|
|
cachePolicies.tail.map(p =>
|
|
|
|
|
Cache.fetch(cache, p, checksums = checksums, logger = Some(resLogger), pool = pool, ttl = ttl)
|
|
|
|
|
): _*
|
|
|
|
|
)
|
2015-12-30 01:34:43 +01:00
|
|
|
|
2016-08-04 01:35:38 +02:00
|
|
|
def depsRepr(deps: Seq[(String, Dependency)]) =
|
|
|
|
|
deps.map { case (config, dep) =>
|
|
|
|
|
s"${dep.module}:${dep.version}:$config->${dep.configuration}"
|
|
|
|
|
}.sorted.distinct
|
|
|
|
|
|
|
|
|
|
if (verbosityLevel >= 2) {
|
|
|
|
|
val repoReprs = repositories.map {
|
|
|
|
|
case r: IvyRepository =>
|
|
|
|
|
s"ivy:${r.pattern}"
|
|
|
|
|
case r: InterProjectRepository =>
|
|
|
|
|
"inter-project"
|
|
|
|
|
case r: MavenRepository =>
|
|
|
|
|
r.root
|
|
|
|
|
case r =>
|
|
|
|
|
// should not happen
|
|
|
|
|
r.toString
|
|
|
|
|
}
|
2015-12-30 01:34:39 +01:00
|
|
|
|
2016-08-04 01:35:38 +02:00
|
|
|
log.info(
|
|
|
|
|
"Repositories:\n" +
|
|
|
|
|
repoReprs.map(" " + _).mkString("\n")
|
|
|
|
|
)
|
2015-12-30 01:34:45 +01:00
|
|
|
}
|
2016-01-10 21:32:28 +01:00
|
|
|
|
2016-11-07 11:22:52 +01:00
|
|
|
val initialMessage =
|
|
|
|
|
Seq(
|
|
|
|
|
if (verbosityLevel >= 0)
|
|
|
|
|
Seq(s"Updating $projectName" + (if (sbtClassifiers) " (sbt classifiers)" else ""))
|
|
|
|
|
else
|
|
|
|
|
Nil,
|
|
|
|
|
if (verbosityLevel >= 2)
|
|
|
|
|
depsRepr(currentProject.dependencies).map(depRepr =>
|
|
|
|
|
s" $depRepr"
|
|
|
|
|
)
|
|
|
|
|
else
|
|
|
|
|
Nil
|
|
|
|
|
).flatten.mkString("\n")
|
|
|
|
|
|
2016-11-16 16:22:48 +01:00
|
|
|
if (verbosityLevel >= 2)
|
2016-11-07 11:22:52 +01:00
|
|
|
log.info(initialMessage)
|
2016-08-04 01:35:38 +02:00
|
|
|
|
2016-11-16 16:22:48 +01:00
|
|
|
resLogger.init(if (printOptionalMessage) log.info(initialMessage))
|
2016-08-04 01:35:38 +02:00
|
|
|
|
2016-11-07 11:22:52 +01:00
|
|
|
startRes
|
2016-08-04 01:35:38 +02:00
|
|
|
.process
|
|
|
|
|
.run(fetch, maxIterations)
|
|
|
|
|
.attemptRun
|
|
|
|
|
.leftMap(ex =>
|
|
|
|
|
ResolutionError.UnknownException(ex)
|
|
|
|
|
.throwException()
|
|
|
|
|
)
|
|
|
|
|
.merge
|
2016-11-07 11:22:52 +01:00
|
|
|
} finally {
|
|
|
|
|
if (pool != null)
|
|
|
|
|
pool.shutdown()
|
|
|
|
|
if (resLogger != null)
|
2016-11-16 16:22:48 +01:00
|
|
|
if ((resLogger.stopDidPrintSomething() && printOptionalMessage) || verbosityLevel >= 2)
|
2016-11-07 11:22:52 +01:00
|
|
|
log.info(s"Resolved $projectName dependencies")
|
|
|
|
|
}
|
2015-12-30 01:34:43 +01:00
|
|
|
|
2016-11-07 11:22:52 +01:00
|
|
|
if (!res.isDone)
|
|
|
|
|
ResolutionError.MaximumIterationsReached
|
|
|
|
|
.throwException()
|
2015-12-30 01:34:39 +01:00
|
|
|
|
2016-11-07 11:22:52 +01:00
|
|
|
if (res.conflicts.nonEmpty) {
|
|
|
|
|
val projCache = res.projectCache.mapValues { case (_, p) => p }
|
2016-04-05 16:24:39 +02:00
|
|
|
|
2016-11-07 11:22:52 +01:00
|
|
|
ResolutionError.Conflicts(
|
|
|
|
|
"Conflict(s) in dependency resolution:\n " +
|
|
|
|
|
Print.dependenciesUnknownConfigs(res.conflicts.toVector, projCache)
|
|
|
|
|
).throwException()
|
|
|
|
|
}
|
2015-12-30 01:34:39 +01:00
|
|
|
|
2016-11-07 11:22:52 +01:00
|
|
|
if (res.errors.nonEmpty) {
|
|
|
|
|
val internalRepositoriesLen = internalRepositories.length
|
|
|
|
|
val errors =
|
|
|
|
|
if (repositories.length > internalRepositoriesLen)
|
|
|
|
|
// drop internal repository errors
|
|
|
|
|
res.errors.map {
|
|
|
|
|
case (dep, errs) =>
|
|
|
|
|
dep -> errs.drop(internalRepositoriesLen)
|
|
|
|
|
}
|
|
|
|
|
else
|
|
|
|
|
res.errors
|
2015-12-30 01:34:39 +01:00
|
|
|
|
2016-11-07 11:22:52 +01:00
|
|
|
ResolutionError.MetadataDownloadErrors(errors)
|
|
|
|
|
.throwException()
|
2016-08-04 01:35:38 +02:00
|
|
|
}
|
2016-11-07 11:22:52 +01:00
|
|
|
|
|
|
|
|
res
|
2016-05-06 13:53:49 +02:00
|
|
|
}
|
2016-01-10 21:32:28 +01:00
|
|
|
|
2016-05-06 13:53:49 +02:00
|
|
|
resolutionsCache.getOrElseUpdate(
|
|
|
|
|
ResolutionCacheKey(
|
|
|
|
|
currentProject,
|
|
|
|
|
repositories,
|
2016-08-04 01:36:53 +02:00
|
|
|
userEnabledProfiles,
|
2016-11-07 11:22:49 +01:00
|
|
|
startRes.copy(filter = None),
|
2016-05-06 13:53:49 +02:00
|
|
|
sbtClassifiers
|
|
|
|
|
),
|
|
|
|
|
resolution
|
|
|
|
|
)
|
|
|
|
|
}
|
|
|
|
|
}
|
2016-01-10 21:32:28 +01:00
|
|
|
|
2017-01-30 22:57:24 +01:00
|
|
|
def artifactFilesOrErrors(
|
|
|
|
|
withClassifiers: Boolean,
|
|
|
|
|
sbtClassifiers: Boolean = false,
|
|
|
|
|
ignoreArtifactErrors: Boolean = false
|
|
|
|
|
) = Def.task {
|
|
|
|
|
|
|
|
|
|
// let's update only one module at once, for a better output
|
|
|
|
|
// Downloads are already parallel, no need to parallelize further anyway
|
|
|
|
|
synchronized {
|
|
|
|
|
|
|
|
|
|
lazy val cm = coursierSbtClassifiersModule.value
|
|
|
|
|
|
|
|
|
|
lazy val projectName = thisProjectRef.value.project
|
|
|
|
|
|
|
|
|
|
val parallelDownloads = coursierParallelDownloads.value
|
|
|
|
|
val artifactsChecksums = coursierArtifactsChecksums.value
|
|
|
|
|
val cachePolicies = coursierCachePolicies.value
|
|
|
|
|
val ttl = coursierTtl.value
|
|
|
|
|
val cache = coursierCache.value
|
|
|
|
|
|
|
|
|
|
val log = streams.value.log
|
|
|
|
|
|
|
|
|
|
val verbosityLevel = coursierVerbosity.value
|
|
|
|
|
|
|
|
|
|
val res = {
|
|
|
|
|
if (withClassifiers && sbtClassifiers)
|
|
|
|
|
coursierSbtClassifiersResolution
|
|
|
|
|
else
|
|
|
|
|
coursierResolution
|
|
|
|
|
}.value
|
|
|
|
|
|
|
|
|
|
val classifiers =
|
|
|
|
|
if (withClassifiers)
|
|
|
|
|
Some {
|
|
|
|
|
if (sbtClassifiers)
|
|
|
|
|
cm.classifiers
|
|
|
|
|
else
|
|
|
|
|
transitiveClassifiers.value
|
|
|
|
|
}
|
|
|
|
|
else
|
|
|
|
|
None
|
|
|
|
|
|
|
|
|
|
val allArtifacts =
|
|
|
|
|
classifiers match {
|
|
|
|
|
case None => res.artifacts
|
|
|
|
|
case Some(cl) => res.classifiersArtifacts(cl)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
var pool: ExecutorService = null
|
|
|
|
|
var artifactsLogger: TermDisplay = null
|
|
|
|
|
|
|
|
|
|
val printOptionalMessage = verbosityLevel >= 0 && verbosityLevel <= 1
|
|
|
|
|
|
|
|
|
|
val artifactFilesOrErrors = try {
|
|
|
|
|
pool = Executors.newFixedThreadPool(parallelDownloads, Strategy.DefaultDaemonThreadFactory)
|
|
|
|
|
artifactsLogger = createLogger()
|
|
|
|
|
|
|
|
|
|
val artifactFileOrErrorTasks = allArtifacts.toVector.map { a =>
|
|
|
|
|
def f(p: CachePolicy) =
|
|
|
|
|
Cache.file(
|
|
|
|
|
a,
|
|
|
|
|
cache,
|
|
|
|
|
p,
|
|
|
|
|
checksums = artifactsChecksums,
|
|
|
|
|
logger = Some(artifactsLogger),
|
|
|
|
|
pool = pool,
|
|
|
|
|
ttl = ttl
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
cachePolicies.tail
|
|
|
|
|
.foldLeft(f(cachePolicies.head))(_ orElse f(_))
|
|
|
|
|
.run
|
|
|
|
|
.map((a, _))
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
val artifactInitialMessage =
|
|
|
|
|
if (verbosityLevel >= 0)
|
|
|
|
|
s"Fetching artifacts of $projectName" +
|
|
|
|
|
(if (sbtClassifiers) " (sbt classifiers)" else "")
|
|
|
|
|
else
|
|
|
|
|
""
|
|
|
|
|
|
|
|
|
|
if (verbosityLevel >= 2)
|
|
|
|
|
log.info(artifactInitialMessage)
|
|
|
|
|
|
|
|
|
|
artifactsLogger.init(if (printOptionalMessage) log.info(artifactInitialMessage))
|
|
|
|
|
|
|
|
|
|
Task.gatherUnordered(artifactFileOrErrorTasks).attemptRun match {
|
|
|
|
|
case -\/(ex) =>
|
|
|
|
|
ResolutionError.UnknownDownloadException(ex)
|
|
|
|
|
.throwException()
|
|
|
|
|
case \/-(l) =>
|
|
|
|
|
l.toMap
|
|
|
|
|
}
|
|
|
|
|
} finally {
|
|
|
|
|
if (pool != null)
|
|
|
|
|
pool.shutdown()
|
|
|
|
|
if (artifactsLogger != null)
|
|
|
|
|
if ((artifactsLogger.stopDidPrintSomething() && printOptionalMessage) || verbosityLevel >= 2)
|
|
|
|
|
log.info(
|
|
|
|
|
s"Fetched artifacts of $projectName" +
|
|
|
|
|
(if (sbtClassifiers) " (sbt classifiers)" else "")
|
|
|
|
|
)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
artifactFilesOrErrors
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
private def artifactFileOpt(
|
|
|
|
|
sbtBootJarOverrides: Map[(Module, String), File],
|
|
|
|
|
artifactFiles: Map[Artifact, File],
|
|
|
|
|
erroredArtifacts: Set[Artifact],
|
|
|
|
|
log: sbt.Logger,
|
|
|
|
|
module: Module,
|
|
|
|
|
version: String,
|
|
|
|
|
artifact: Artifact
|
|
|
|
|
) = {
|
|
|
|
|
|
|
|
|
|
val artifact0 = artifact
|
|
|
|
|
.copy(attributes = Attributes()) // temporary hack :-(
|
|
|
|
|
|
|
|
|
|
// Under some conditions, SBT puts the scala JARs of its own classpath
|
|
|
|
|
// in the application classpath. Ensuring we return SBT's jars rather than
|
|
|
|
|
// JARs from the coursier cache, so that a same JAR doesn't land twice in the
|
|
|
|
|
// application classpath (once via SBT jars, once via coursier cache).
|
|
|
|
|
val fromBootJars =
|
|
|
|
|
if (artifact.classifier.isEmpty && artifact.`type` == "jar")
|
|
|
|
|
sbtBootJarOverrides.get((module, version))
|
|
|
|
|
else
|
|
|
|
|
None
|
|
|
|
|
|
|
|
|
|
val res = fromBootJars.orElse(artifactFiles.get(artifact0))
|
|
|
|
|
|
|
|
|
|
if (res.isEmpty && !erroredArtifacts(artifact0))
|
|
|
|
|
log.error(s"${artifact.url} not downloaded (should not happen)")
|
|
|
|
|
|
|
|
|
|
res
|
|
|
|
|
}
|
|
|
|
|
|
2016-05-06 13:53:49 +02:00
|
|
|
def updateTask(
|
2017-01-30 22:57:24 +01:00
|
|
|
shadedConfigOpt: Option[(String, String)],
|
2016-05-06 13:53:49 +02:00
|
|
|
withClassifiers: Boolean,
|
|
|
|
|
sbtClassifiers: Boolean = false,
|
|
|
|
|
ignoreArtifactErrors: Boolean = false
|
|
|
|
|
) = Def.task {
|
2016-01-10 21:32:28 +01:00
|
|
|
|
2017-01-30 22:57:24 +01:00
|
|
|
def grouped[K, V](map: Seq[(K, V)])(mapKey: K => K): Map[K, Seq[V]] =
|
|
|
|
|
map.groupBy { case (k, _) => mapKey(k) }.map {
|
2016-05-06 13:53:49 +02:00
|
|
|
case (k, l) =>
|
|
|
|
|
k -> l.map { case (_, v) => v }
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// let's update only one module at once, for a better output
|
|
|
|
|
// Downloads are already parallel, no need to parallelize further anyway
|
|
|
|
|
synchronized {
|
|
|
|
|
|
2016-12-03 02:45:23 +01:00
|
|
|
val so = scalaOrganization.value
|
|
|
|
|
val internalSbtScalaProvider = appConfiguration.value.provider.scalaProvider
|
|
|
|
|
val sbtBootJarOverrides = SbtBootJars(
|
|
|
|
|
so, // this seems plain wrong - this assumes that the scala org of the project is the same
|
|
|
|
|
// as the one that started SBT. This will scrap the scala org specific JARs by the ones
|
|
|
|
|
// that booted SBT, even if the latter come from the standard org.scala-lang org.
|
|
|
|
|
// But SBT itself does it this way, and not doing so may make two different versions
|
|
|
|
|
// of the scala JARs land in the classpath...
|
|
|
|
|
internalSbtScalaProvider.version(),
|
|
|
|
|
internalSbtScalaProvider.jars()
|
|
|
|
|
)
|
|
|
|
|
|
2016-05-06 13:53:49 +02:00
|
|
|
lazy val cm = coursierSbtClassifiersModule.value
|
|
|
|
|
|
|
|
|
|
val currentProject =
|
|
|
|
|
if (sbtClassifiers) {
|
|
|
|
|
val sv = scalaVersion.value
|
|
|
|
|
val sbv = scalaBinaryVersion.value
|
|
|
|
|
|
|
|
|
|
FromSbt.project(
|
|
|
|
|
cm.id,
|
|
|
|
|
cm.modules,
|
|
|
|
|
cm.configurations.map(cfg => cfg.name -> cfg.extendsConfigs.map(_.name)).toMap,
|
|
|
|
|
sv,
|
2017-02-03 00:57:01 +01:00
|
|
|
sbv
|
2016-05-06 13:53:49 +02:00
|
|
|
)
|
|
|
|
|
} else {
|
|
|
|
|
val proj = coursierProject.value
|
|
|
|
|
val publications = coursierPublications.value
|
|
|
|
|
proj.copy(publications = publications)
|
2016-01-10 21:32:28 +01:00
|
|
|
}
|
|
|
|
|
|
2016-05-06 13:53:49 +02:00
|
|
|
val log = streams.value.log
|
|
|
|
|
|
|
|
|
|
val verbosityLevel = coursierVerbosity.value
|
|
|
|
|
|
|
|
|
|
val res = {
|
|
|
|
|
if (withClassifiers && sbtClassifiers)
|
|
|
|
|
coursierSbtClassifiersResolution
|
|
|
|
|
else
|
|
|
|
|
coursierResolution
|
|
|
|
|
}.value
|
|
|
|
|
|
|
|
|
|
def report = {
|
|
|
|
|
|
2017-01-30 22:57:24 +01:00
|
|
|
val depsByConfig = grouped(currentProject.dependencies)(
|
|
|
|
|
config =>
|
|
|
|
|
shadedConfigOpt match {
|
|
|
|
|
case Some((baseConfig, `config`)) =>
|
|
|
|
|
baseConfig
|
|
|
|
|
case _ =>
|
|
|
|
|
config
|
|
|
|
|
}
|
|
|
|
|
)
|
2016-05-06 13:53:49 +02:00
|
|
|
|
2017-01-30 22:57:24 +01:00
|
|
|
val configs = {
|
|
|
|
|
val m = coursierConfigurations.value
|
|
|
|
|
shadedConfigOpt.fold(m) {
|
|
|
|
|
case (baseConfig, shadedConfig) =>
|
|
|
|
|
(m - shadedConfig) + (
|
|
|
|
|
baseConfig -> (m.getOrElse(baseConfig, Set()) - shadedConfig)
|
|
|
|
|
)
|
|
|
|
|
}
|
|
|
|
|
}
|
2016-05-06 13:53:49 +02:00
|
|
|
|
2016-11-07 11:22:52 +01:00
|
|
|
if (verbosityLevel >= 2) {
|
|
|
|
|
val finalDeps = Config.dependenciesWithConfig(
|
|
|
|
|
res,
|
|
|
|
|
depsByConfig.map { case (k, l) => k -> l.toSet },
|
|
|
|
|
configs
|
|
|
|
|
)
|
2016-05-06 13:53:49 +02:00
|
|
|
|
2016-11-07 11:22:52 +01:00
|
|
|
val projCache = res.projectCache.mapValues { case (_, p) => p }
|
|
|
|
|
val repr = Print.dependenciesUnknownConfigs(finalDeps.toVector, projCache)
|
|
|
|
|
log.info(repr.split('\n').map(" " + _).mkString("\n"))
|
|
|
|
|
}
|
2016-01-10 21:32:28 +01:00
|
|
|
|
2016-11-07 11:22:52 +01:00
|
|
|
val classifiers =
|
|
|
|
|
if (withClassifiers)
|
|
|
|
|
Some {
|
|
|
|
|
if (sbtClassifiers)
|
|
|
|
|
cm.classifiers
|
|
|
|
|
else
|
|
|
|
|
transitiveClassifiers.value
|
|
|
|
|
}
|
|
|
|
|
else
|
|
|
|
|
None
|
2015-12-30 01:34:39 +01:00
|
|
|
|
2017-01-30 22:57:24 +01:00
|
|
|
val artifactFilesOrErrors0 = (
|
|
|
|
|
if (withClassifiers) {
|
|
|
|
|
if (sbtClassifiers)
|
|
|
|
|
Keys.coursierSbtClassifiersArtifacts
|
2016-11-07 11:22:52 +01:00
|
|
|
else
|
2017-01-30 22:57:24 +01:00
|
|
|
Keys.coursierClassifiersArtifacts
|
|
|
|
|
} else
|
|
|
|
|
Keys.coursierArtifacts
|
|
|
|
|
).value
|
2015-12-30 01:34:48 +01:00
|
|
|
|
2017-01-30 22:57:24 +01:00
|
|
|
val artifactFiles = artifactFilesOrErrors0.collect {
|
2016-11-07 11:22:52 +01:00
|
|
|
case (artifact, \/-(file)) =>
|
|
|
|
|
artifact -> file
|
|
|
|
|
}
|
2015-12-30 01:34:48 +01:00
|
|
|
|
2017-01-30 22:57:24 +01:00
|
|
|
val artifactErrors = artifactFilesOrErrors0.toVector.collect {
|
2016-11-07 11:22:52 +01:00
|
|
|
case (_, -\/(err)) =>
|
|
|
|
|
err
|
|
|
|
|
}
|
2015-12-30 01:34:43 +01:00
|
|
|
|
2016-11-07 11:22:52 +01:00
|
|
|
if (artifactErrors.nonEmpty) {
|
|
|
|
|
val error = ResolutionError.DownloadErrors(artifactErrors)
|
2016-04-05 16:24:39 +02:00
|
|
|
|
2016-11-07 11:22:52 +01:00
|
|
|
if (ignoreArtifactErrors)
|
|
|
|
|
log.warn(error.description(verbosityLevel >= 1))
|
|
|
|
|
else
|
|
|
|
|
error.throwException()
|
|
|
|
|
}
|
2016-04-05 16:24:39 +02:00
|
|
|
|
2016-11-07 11:22:52 +01:00
|
|
|
// can be non empty only if ignoreArtifactErrors is true
|
2017-01-30 22:57:24 +01:00
|
|
|
val erroredArtifacts = artifactFilesOrErrors0.collect {
|
2016-11-07 11:22:52 +01:00
|
|
|
case (artifact, -\/(_)) =>
|
|
|
|
|
artifact
|
|
|
|
|
}.toSet
|
2016-04-05 16:24:39 +02:00
|
|
|
|
2016-11-07 11:22:52 +01:00
|
|
|
ToSbt.updateReport(
|
|
|
|
|
depsByConfig,
|
|
|
|
|
res,
|
|
|
|
|
configs,
|
|
|
|
|
classifiers,
|
2017-01-30 22:57:24 +01:00
|
|
|
artifactFileOpt(
|
|
|
|
|
sbtBootJarOverrides,
|
|
|
|
|
artifactFiles,
|
|
|
|
|
erroredArtifacts,
|
|
|
|
|
log,
|
|
|
|
|
_,
|
|
|
|
|
_,
|
|
|
|
|
_
|
|
|
|
|
)
|
2016-11-07 11:22:52 +01:00
|
|
|
)
|
2015-12-30 01:34:39 +01:00
|
|
|
}
|
|
|
|
|
|
2016-05-06 13:53:49 +02:00
|
|
|
reportsCache.getOrElseUpdate(
|
|
|
|
|
ReportCacheKey(
|
2016-03-06 19:39:15 +01:00
|
|
|
currentProject,
|
2016-11-07 11:22:49 +01:00
|
|
|
res.copy(filter = None),
|
2016-03-06 19:39:15 +01:00
|
|
|
withClassifiers,
|
|
|
|
|
sbtClassifiers
|
|
|
|
|
),
|
2015-12-30 01:34:43 +01:00
|
|
|
report
|
2015-12-30 01:34:39 +01:00
|
|
|
)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2016-05-06 13:53:49 +02:00
|
|
|
def coursierDependencyTreeTask(
|
|
|
|
|
inverse: Boolean,
|
|
|
|
|
sbtClassifiers: Boolean = false,
|
|
|
|
|
ignoreArtifactErrors: Boolean = false
|
|
|
|
|
) = Def.task {
|
|
|
|
|
|
2016-08-22 00:19:52 +02:00
|
|
|
lazy val projectName = thisProjectRef.value.project
|
|
|
|
|
|
2016-05-06 13:53:49 +02:00
|
|
|
val currentProject =
|
|
|
|
|
if (sbtClassifiers) {
|
|
|
|
|
val cm = coursierSbtClassifiersModule.value
|
|
|
|
|
val sv = scalaVersion.value
|
|
|
|
|
val sbv = scalaBinaryVersion.value
|
|
|
|
|
|
|
|
|
|
FromSbt.project(
|
|
|
|
|
cm.id,
|
|
|
|
|
cm.modules,
|
|
|
|
|
cm.configurations.map(cfg => cfg.name -> cfg.extendsConfigs.map(_.name)).toMap,
|
|
|
|
|
sv,
|
2017-02-03 00:57:01 +01:00
|
|
|
sbv
|
2016-05-06 13:53:49 +02:00
|
|
|
)
|
|
|
|
|
} else {
|
|
|
|
|
val proj = coursierProject.value
|
|
|
|
|
val publications = coursierPublications.value
|
|
|
|
|
proj.copy(publications = publications)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
val res = {
|
|
|
|
|
if (sbtClassifiers)
|
|
|
|
|
coursierSbtClassifiersResolution
|
|
|
|
|
else
|
|
|
|
|
coursierResolution
|
|
|
|
|
}.value
|
|
|
|
|
|
2016-08-22 00:18:40 +02:00
|
|
|
val config = configuration.value.name
|
2016-05-06 13:53:49 +02:00
|
|
|
val configs = coursierConfigurations.value
|
|
|
|
|
|
|
|
|
|
val includedConfigs = configs.getOrElse(config, Set.empty) + config
|
|
|
|
|
|
|
|
|
|
val dependencies0 = currentProject.dependencies.collect {
|
|
|
|
|
case (cfg, dep) if includedConfigs(cfg) => dep
|
|
|
|
|
}.sortBy { dep =>
|
|
|
|
|
(dep.module.organization, dep.module.name, dep.version)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
val subRes = res.subset(dependencies0.toSet)
|
|
|
|
|
|
|
|
|
|
// use sbt logging?
|
|
|
|
|
println(
|
2016-08-22 00:19:52 +02:00
|
|
|
projectName + "\n" +
|
2016-10-30 20:27:50 +01:00
|
|
|
Print.dependencyTree(
|
|
|
|
|
dependencies0,
|
|
|
|
|
subRes,
|
|
|
|
|
printExclusions = true,
|
|
|
|
|
inverse,
|
|
|
|
|
colors = !sys.props.get("sbt.log.noformat").toSeq.contains("true")
|
|
|
|
|
)
|
2016-05-06 13:53:49 +02:00
|
|
|
)
|
|
|
|
|
}
|
|
|
|
|
|
2015-12-30 01:34:34 +01:00
|
|
|
}
|