mirror of https://github.com/sbt/sbt.git
Add json report to fetch and local exclusion option (#692)
This patch introduces changes for cli with json output #659. Format as follows: ``` { "conflict_resolution": { "org:name:version" (requested): "org:name:version" (reconciled) }, "dependencies": [ { "coord": "orgA:nameA:versionA", "files": [ [ <classifier>, <path> ] ], "dependencies": [ // coodinates for its transitive dependencies <orgX:nameX:versionX>, <orgY:nameY:versionY>, ] }, { "coord": "orgB:nameB:versionB", "files": [ [ <classifier>, <path> ] ], "dependencies": [ // coodinates for its transitive dependencies <orgX:nameX:versionX>, <orgZ:nameZ:versionZ>, ] }, ] } ``` For example: ``` fetch -t org.apache.avro:trevni-avro:1.8.2 org.slf4j:slf4j-api:1.7.6 --json-output-file x.out Result: ├─ org.apache.avro:trevni-avro:1.8.2 │ ├─ org.apache.avro:trevni-core:1.8.2 │ │ ├─ org.apache.commons:commons-compress:1.8.1 │ │ ├─ org.slf4j:slf4j-api:1.7.7 │ │ └─ org.xerial.snappy:snappy-java:1.1.1.3 │ └─ org.slf4j:slf4j-api:1.7.7 └─ org.slf4j:slf4j-api:1.7.6 -> 1.7.7 ``` would produce the following json file: ``` $ jq < x.out { "conflict_resolution": { "org.slf4j:slf4j-api:1.7.6": "org.slf4j:slf4j-api:1.7.7" }, "dependencies": [ { "coord": "org.apache.avro:trevni-core:1.8.2", "files": [ [ "", "/Users/yic/Library/Caches/Coursier/v1/https/repo1.maven.org/maven2/org/apache/avro/trevni-core/1.8.2/trevni-core-1.8.2.jar" ] ], "dependencies": [ "org.slf4j:slf4j-api:1.7.7", "org.xerial.snappy:snappy-java:1.1.1.3", "org.apache.commons:commons-compress:1.8.1" ] }, { "coord": "org.apache.avro:trevni-avro:1.8.2", "files": [ [ "", "/Users/yic/Library/Caches/Coursier/v1/https/repo1.maven.org/maven2/org/apache/avro/trevni-avro/1.8.2/trevni-avro-1.8.2.jar" ] ], "dependencies": [ "org.apache.avro:trevni-core:1.8.2", "org.slf4j:slf4j-api:1.7.7", "org.xerial.snappy:snappy-java:1.1.1.3", "org.apache.commons:commons-compress:1.8.1" ] }, { "coord": "org.slf4j:slf4j-api:1.7.7", "files": [ [ "", "/Users/yic/Library/Caches/Coursier/v1/https/repo1.maven.org/maven2/org/slf4j/slf4j-api/1.7.7/slf4j-api-1.7.7.jar" ] ], "dependencies": [] }, { "coord": "org.apache.commons:commons-compress:1.8.1", "files": [ [ "", "/Users/yic/Library/Caches/Coursier/v1/https/repo1.maven.org/maven2/org/apache/commons/commons-compress/1.8.1/commons-compress-1.8.1.jar" ] ], "dependencies": [] }, { "coord": "org.xerial.snappy:snappy-java:1.1.1.3", "files": [ [ "", "/Users/yic/Library/Caches/Coursier/v1/https/repo1.maven.org/maven2/org/xerial/snappy/snappy-java/1.1.1.3/snappy-java-1.1.1.3.jar" ] ], "dependencies": [] } ] } ```
This commit is contained in:
parent
b21042a42a
commit
a4258f48ce
|
|
@ -72,3 +72,5 @@ cache:
|
|||
- $HOME/.ivy2/cache
|
||||
- $HOME/.sbt
|
||||
- $HOME/.coursier
|
||||
# Pants cache
|
||||
- $HOME/.cache
|
||||
|
|
|
|||
|
|
@ -110,17 +110,6 @@ jar_library(
|
|||
],
|
||||
)
|
||||
|
||||
jar_library(
|
||||
name = "jackson-module-scala",
|
||||
jars = [
|
||||
scala_jar(
|
||||
name = "jackson-module-scala",
|
||||
org = "com.fasterxml.jackson.module",
|
||||
rev = "2.8.4",
|
||||
),
|
||||
],
|
||||
)
|
||||
|
||||
jar_library(
|
||||
name = "caseapp",
|
||||
jars = [
|
||||
|
|
@ -169,3 +158,32 @@ jar_library(
|
|||
),
|
||||
],
|
||||
)
|
||||
|
||||
jar_library(
|
||||
name = "utest",
|
||||
jars = [
|
||||
scala_jar(
|
||||
name = "utest",
|
||||
org = "com.lihaoyi",
|
||||
rev = "0.5.4",
|
||||
),
|
||||
],
|
||||
)
|
||||
|
||||
jar_library(
|
||||
name = "async",
|
||||
jars = [
|
||||
scala_jar(
|
||||
name = "scala-async",
|
||||
org = "org.scala-lang.modules",
|
||||
rev = "0.9.7",
|
||||
),
|
||||
],
|
||||
)
|
||||
|
||||
jar_library(
|
||||
name = "scalatest",
|
||||
jars = [
|
||||
scala_jar("org.scalatest", "scalatest", "3.0.0"),
|
||||
],
|
||||
)
|
||||
|
|
|
|||
|
|
@ -153,7 +153,9 @@ lazy val cli = project
|
|||
if (scalaBinaryVersion.value == "2.11")
|
||||
Seq(
|
||||
Deps.caseApp,
|
||||
Deps.argonautShapeless
|
||||
Deps.argonautShapeless,
|
||||
Deps.junit % "test", // to be able to run tests with pants
|
||||
Deps.scalatest % "test"
|
||||
)
|
||||
else
|
||||
Seq()
|
||||
|
|
|
|||
|
|
@ -3,8 +3,8 @@ scala_library(
|
|||
dependencies = [
|
||||
"3rdparty/jvm:argonaut-shapeless",
|
||||
"3rdparty/jvm:caseapp",
|
||||
"core:core",
|
||||
"cache/src/main/scala:cache",
|
||||
"core:core",
|
||||
"extra/src/main/scala/coursier/extra:extra",
|
||||
"extra/src/main/scala-2.11/coursier/extra:native",
|
||||
],
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ import caseapp._
|
|||
|
||||
import scala.language.reflectiveCalls
|
||||
|
||||
final case class Fetch(
|
||||
case class Fetch(
|
||||
@Recurse
|
||||
options: FetchOptions
|
||||
) extends App {
|
||||
|
|
|
|||
|
|
@ -1,23 +1,22 @@
|
|||
package coursier
|
||||
package cli
|
||||
|
||||
import java.io.{ OutputStreamWriter, File }
|
||||
import java.net.{ URL, URLClassLoader }
|
||||
import java.util.jar.{ Manifest => JManifest }
|
||||
import java.io.{File, OutputStreamWriter, PrintWriter}
|
||||
import java.net.{URL, URLClassLoader}
|
||||
import java.util.concurrent.Executors
|
||||
import java.util.jar.{Manifest => JManifest}
|
||||
|
||||
import coursier.cli.scaladex.Scaladex
|
||||
import coursier.cli.util.{JsonElem, JsonPrintRequirement, JsonReport}
|
||||
import coursier.extra.Typelevel
|
||||
import coursier.ivy.IvyRepository
|
||||
import coursier.util.{Print, Parse}
|
||||
import coursier.util.{Parse, Print}
|
||||
|
||||
import scala.annotation.tailrec
|
||||
import scala.concurrent.duration.Duration
|
||||
import scala.util.Try
|
||||
import scalaz.concurrent.{Strategy, Task}
|
||||
import scalaz.{-\/, Failure, Nondeterminism, Success, \/-}
|
||||
|
||||
import scalaz.{Failure, Nondeterminism, Success, \/-, -\/}
|
||||
import scalaz.concurrent.{ Task, Strategy }
|
||||
import scalaz.std.list._
|
||||
|
||||
object Helper {
|
||||
def fileRepr(f: File) = f.toString
|
||||
|
|
@ -86,7 +85,6 @@ class Helper(
|
|||
) {
|
||||
import common._
|
||||
import Helper.errPrintln
|
||||
|
||||
import Util._
|
||||
|
||||
val ttl0 =
|
||||
|
|
@ -315,10 +313,32 @@ class Helper(
|
|||
.mkString("\n")
|
||||
}
|
||||
|
||||
val excludes = excludesNoAttr.map { mod =>
|
||||
val excludes: Set[(String, String)] = excludesNoAttr.map { mod =>
|
||||
(mod.organization, mod.name)
|
||||
}.toSet
|
||||
|
||||
val localExcludeMap: Map[String, Set[(String, String)]] =
|
||||
if (localExcludeFile.isEmpty) {
|
||||
Map()
|
||||
} else {
|
||||
val source = scala.io.Source.fromFile(localExcludeFile)
|
||||
val lines = try source.mkString.split("\n") finally source.close()
|
||||
|
||||
lines.map({ str =>
|
||||
val parent_and_child = str.split("--")
|
||||
if (parent_and_child.length != 2) {
|
||||
throw SoftExcludeParsingException(s"Failed to parse $str")
|
||||
}
|
||||
|
||||
val child_org_name = parent_and_child(1).split(":")
|
||||
if (child_org_name.length != 2) {
|
||||
throw SoftExcludeParsingException(s"Failed to parse $child_org_name")
|
||||
}
|
||||
|
||||
(parent_and_child(0), (child_org_name(0), child_org_name(1)))
|
||||
}).groupBy(_._1).mapValues(_.map(_._2).toSet).toMap
|
||||
}
|
||||
|
||||
val baseDependencies = allModuleVersionConfigs.map {
|
||||
case (module, version, configOpt) =>
|
||||
Dependency(
|
||||
|
|
@ -326,7 +346,7 @@ class Helper(
|
|||
version,
|
||||
attributes = Attributes("", ""),
|
||||
configuration = configOpt.getOrElse(defaultConfiguration),
|
||||
exclusions = excludes
|
||||
exclusions = localExcludeMap.getOrElse(module.orgName, Set()) | excludes
|
||||
)
|
||||
}
|
||||
|
||||
|
|
@ -611,17 +631,9 @@ class Helper(
|
|||
|
||||
val res0 = Option(subset).fold(res)(res.subset)
|
||||
|
||||
val artifacts0 =
|
||||
if (classifier0.nonEmpty || sources || javadoc) {
|
||||
var classifiers = classifier0
|
||||
if (sources)
|
||||
classifiers = classifiers + "sources"
|
||||
if (javadoc)
|
||||
classifiers = classifiers + "javadoc"
|
||||
val depArtTuples: Seq[(Dependency, Artifact)] = getDepArtifactsForClassifier(sources, javadoc, res0)
|
||||
|
||||
res0.dependencyClassifiersArtifacts(classifiers.toVector.sorted).map(_._2)
|
||||
} else
|
||||
res0.dependencyArtifacts(withOptional = true).map(_._2)
|
||||
val artifacts0 = depArtTuples.map(_._2)
|
||||
|
||||
if (artifactTypes("*"))
|
||||
artifacts0
|
||||
|
|
@ -631,6 +643,20 @@ class Helper(
|
|||
}
|
||||
}
|
||||
|
||||
private def getDepArtifactsForClassifier(sources: Boolean, javadoc: Boolean, res0: Resolution): Seq[(Dependency, Artifact)] = {
|
||||
if (classifier0.nonEmpty || sources || javadoc) {
|
||||
var classifiers = classifier0
|
||||
if (sources)
|
||||
classifiers = classifiers + "sources"
|
||||
if (javadoc)
|
||||
classifiers = classifiers + "javadoc"
|
||||
//TODO: this function somehow gives duplicated things
|
||||
res0.dependencyClassifiersArtifacts(classifiers.toVector.sorted)
|
||||
} else {
|
||||
res0.dependencyArtifacts(withOptional = true)
|
||||
}
|
||||
}
|
||||
|
||||
def fetch(
|
||||
sources: Boolean,
|
||||
javadoc: Boolean,
|
||||
|
|
@ -690,8 +716,10 @@ class Helper(
|
|||
a.isOptional && notFound
|
||||
}
|
||||
|
||||
val artifactToFile: collection.mutable.Map[String, File] = collection.mutable.Map()
|
||||
val files0 = results.collect {
|
||||
case (artifact, \/-(f)) =>
|
||||
case (artifact: Artifact, \/-(f)) =>
|
||||
artifactToFile.put(artifact.url, f)
|
||||
f
|
||||
}
|
||||
|
||||
|
|
@ -718,6 +746,37 @@ class Helper(
|
|||
.mkString("\n")
|
||||
}
|
||||
|
||||
val depToArtifacts: Map[Dependency, Vector[Artifact]] =
|
||||
getDepArtifactsForClassifier(sources, javadoc, res).groupBy(_._1).mapValues(_.map(_._2).toVector)
|
||||
|
||||
|
||||
if (!jsonOutputFile.isEmpty) {
|
||||
// TODO(wisechengyi): This is not exactly the root dependencies we are asking for on the command line, but it should be
|
||||
// a strict super set.
|
||||
val deps: Seq[Dependency] = Set(getDepArtifactsForClassifier(sources, javadoc, res).map(_._1): _*).toSeq
|
||||
|
||||
// A map from requested org:name:version to reconciled org:name:version
|
||||
val conflictResolutionForRoots: Map[String, String] = dependencies.map({ dep =>
|
||||
val reconciledVersion: String = res.reconciledVersions
|
||||
.getOrElse(dep.module, dep.version)
|
||||
if (reconciledVersion != dep.version) {
|
||||
Option((s"${dep.module}:${dep.version}", s"${dep.module}:$reconciledVersion"))
|
||||
}
|
||||
else {
|
||||
Option.empty
|
||||
}
|
||||
}).filter(_.isDefined).map(_.get).toMap
|
||||
|
||||
val artifacts: Seq[(Dependency, Artifact)] = res.dependencyArtifacts
|
||||
|
||||
val jsonReq = JsonPrintRequirement(artifactToFile, depToArtifacts, conflictResolutionForRoots)
|
||||
val roots = deps.toVector.map(JsonElem(_, artifacts, Option(jsonReq), res, printExclusions = verbosityLevel >= 1, excluded = false, colors = false))
|
||||
val jsonStr = JsonReport(roots, jsonReq.conflictResolutionForRoots)(_.children, _.reconciledVersionStr, _.requestedVersionStr, _.downloadedFiles)
|
||||
|
||||
val pw = new PrintWriter(new File(jsonOutputFile))
|
||||
pw.write(jsonStr)
|
||||
pw.close()
|
||||
}
|
||||
files0
|
||||
}
|
||||
|
||||
|
|
@ -849,3 +908,7 @@ class Helper(
|
|||
mainClass
|
||||
}
|
||||
}
|
||||
|
||||
case class SoftExcludeParsingException(private val message: String = "",
|
||||
private val cause: Throwable = None.orNull)
|
||||
extends Exception(message, cause)
|
||||
|
|
@ -49,7 +49,16 @@ final case class CommonOptions(
|
|||
@Help("Exclude module")
|
||||
@Value("organization:name")
|
||||
@Short("E")
|
||||
@Help("Global level exclude")
|
||||
exclude: List[String] = Nil,
|
||||
|
||||
@Short("x")
|
||||
@Help("Path to the local exclusion file. " +
|
||||
"Syntax: <org:name>--<org:name>. `--` means minus. Example file content:\n\t" +
|
||||
"\tcom.twitter.penguin:korean-text--com.twitter:util-tunable-internal_2.11\n\t" +
|
||||
"\torg.apache.commons:commons-math--com.twitter.search:core-query-nodes\n\t" +
|
||||
"Behavior: If root module A excludes module X, but root module B requires X, module X will still be fetched.")
|
||||
localExcludeFile: String = "",
|
||||
@Help("Default scala version")
|
||||
@Short("e")
|
||||
scalaVersion: String = scala.util.Properties.versionNumberString,
|
||||
|
|
@ -83,6 +92,11 @@ final case class CommonOptions(
|
|||
@Value("profile")
|
||||
@Short("F")
|
||||
profile: List[String] = Nil,
|
||||
|
||||
@Help("Specify path for json output")
|
||||
@Short("j")
|
||||
jsonOutputFile: String = "",
|
||||
|
||||
@Help("Swap the mainline Scala JARs by Typelevel ones")
|
||||
typelevel: Boolean = false,
|
||||
@Recurse
|
||||
|
|
@ -169,7 +183,7 @@ final case class IsolatedLoaderOptions(
|
|||
}
|
||||
|
||||
object ArtifactOptions {
|
||||
def defaultArtifactTypes = Set("jar", "bundle")
|
||||
def defaultArtifactTypes = Set("jar", "bundle", "test-jar")
|
||||
}
|
||||
|
||||
final case class ArtifactOptions(
|
||||
|
|
|
|||
|
|
@ -0,0 +1,166 @@
|
|||
package coursier.cli.util
|
||||
|
||||
import java.io.File
|
||||
|
||||
import coursier.Artifact
|
||||
import coursier.core.{Attributes, Dependency, Resolution}
|
||||
import coursier.util.Print
|
||||
|
||||
import scala.collection.mutable
|
||||
import scala.collection.parallel.ParSeq
|
||||
|
||||
import argonaut._, Argonaut._
|
||||
|
||||
final case class JsonPrintRequirement(fileByArtifact: collection.mutable.Map[String, File], depToArtifacts: Map[Dependency, Vector[Artifact]], conflictResolutionForRoots: Map[String, String])
|
||||
|
||||
final case class DepNode(coord: String, files: Vector[(String, String)], dependencies: Set[String])
|
||||
|
||||
final case class ReportNode(conflict_resolution: Map[String, String], dependencies: Vector[DepNode])
|
||||
|
||||
object ReportNode {
|
||||
import argonaut.ArgonautShapeless._
|
||||
implicit val encodeJson = EncodeJson.of[ReportNode]
|
||||
implicit val decodeJson = DecodeJson.of[ReportNode]
|
||||
}
|
||||
|
||||
|
||||
object JsonReport {
|
||||
|
||||
private val printer = PrettyParams.nospace.copy(preserveOrder = true)
|
||||
|
||||
def apply[T](roots: IndexedSeq[T], conflictResolutionForRoots: Map[String, String])
|
||||
(children: T => Seq[T], reconciledVersionStr: T => String, requestedVersionStr: T => String, getFiles: T => Seq[(String, String)]): String = {
|
||||
|
||||
val rootDeps: ParSeq[DepNode] = roots.par.map(r => {
|
||||
|
||||
/**
|
||||
* Same printing mechanism as [[coursier.util.Tree#recursivePrint]]
|
||||
*/
|
||||
def flattenDeps(elems: Seq[T], ancestors: Set[T], acc: mutable.Set[String]): Unit = {
|
||||
val unseenElems: Seq[T] = elems.filterNot(ancestors.contains)
|
||||
for (elem <- unseenElems) {
|
||||
val depElems = children(elem)
|
||||
acc ++= depElems.map(reconciledVersionStr(_))
|
||||
|
||||
if (depElems.nonEmpty) {
|
||||
flattenDeps(children(elem), ancestors + elem, acc)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
val acc = scala.collection.mutable.Set[String]()
|
||||
flattenDeps(Seq(r), Set(), acc)
|
||||
DepNode(reconciledVersionStr(r), getFiles(r).toVector, acc.toSet)
|
||||
|
||||
})
|
||||
val report = ReportNode(conflictResolutionForRoots, rootDeps.toVector)
|
||||
printer.pretty(report.asJson)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
final case class JsonElem(dep: Dependency,
|
||||
artifacts: Seq[(Dependency, Artifact)] = Seq(),
|
||||
jsonPrintRequirement: Option[JsonPrintRequirement],
|
||||
resolution: Resolution,
|
||||
colors: Boolean,
|
||||
printExclusions: Boolean,
|
||||
excluded: Boolean) {
|
||||
|
||||
val (red, yellow, reset) =
|
||||
if (colors)
|
||||
(Console.RED, Console.YELLOW, Console.RESET)
|
||||
else
|
||||
("", "", "")
|
||||
|
||||
// This is used to printing json output
|
||||
// Seq of (classifier, file path) tuple
|
||||
lazy val downloadedFiles: Seq[(String, String)] = {
|
||||
jsonPrintRequirement match {
|
||||
case Some(req) =>
|
||||
req.depToArtifacts.getOrElse(dep, Seq())
|
||||
.map(x => (x.classifier, req.fileByArtifact.get(x.url)))
|
||||
.filter(_._2.isDefined)
|
||||
.map(x => (x._1, x._2.get.getPath))
|
||||
case None => Seq()
|
||||
}
|
||||
}
|
||||
|
||||
lazy val reconciledVersion: String = resolution.reconciledVersions
|
||||
.getOrElse(dep.module, dep.version)
|
||||
|
||||
// These are used to printing json output
|
||||
val reconciledVersionStr = s"${dep.module}:$reconciledVersion"
|
||||
val requestedVersionStr = s"${dep.module}:${dep.version}"
|
||||
|
||||
lazy val repr =
|
||||
if (excluded)
|
||||
resolution.reconciledVersions.get(dep.module) match {
|
||||
case None =>
|
||||
s"$yellow(excluded)$reset ${dep.module}:${dep.version}"
|
||||
case Some(version) =>
|
||||
val versionMsg =
|
||||
if (version == dep.version)
|
||||
"this version"
|
||||
else
|
||||
s"version $version"
|
||||
|
||||
s"${dep.module}:${dep.version} " +
|
||||
s"$red(excluded, $versionMsg present anyway)$reset"
|
||||
}
|
||||
else {
|
||||
val versionStr =
|
||||
if (reconciledVersion == dep.version)
|
||||
dep.version
|
||||
else {
|
||||
val assumeCompatibleVersions = Print.compatibleVersions(dep.version, reconciledVersion)
|
||||
|
||||
(if (assumeCompatibleVersions) yellow else red) +
|
||||
s"${dep.version} -> $reconciledVersion" +
|
||||
(if (assumeCompatibleVersions || colors) "" else " (possible incompatibility)") +
|
||||
reset
|
||||
}
|
||||
|
||||
s"${dep.module}:$versionStr"
|
||||
}
|
||||
|
||||
lazy val children: Seq[JsonElem] =
|
||||
if (excluded)
|
||||
Nil
|
||||
else {
|
||||
val dep0 = dep.copy(version = reconciledVersion)
|
||||
|
||||
val dependencies = resolution.dependenciesOf(
|
||||
dep0,
|
||||
withReconciledVersions = false
|
||||
).sortBy { trDep =>
|
||||
(trDep.module.organization, trDep.module.name, trDep.version)
|
||||
}
|
||||
|
||||
def excluded = resolution
|
||||
.dependenciesOf(
|
||||
dep0.copy(exclusions = Set.empty),
|
||||
withReconciledVersions = false
|
||||
)
|
||||
.sortBy { trDep =>
|
||||
(trDep.module.organization, trDep.module.name, trDep.version)
|
||||
}
|
||||
.map(_.moduleVersion)
|
||||
.filterNot(dependencies.map(_.moduleVersion).toSet).map {
|
||||
case (mod, ver) =>
|
||||
JsonElem(
|
||||
Dependency(mod, ver, "", Set.empty, Attributes("", ""), false, false),
|
||||
artifacts,
|
||||
jsonPrintRequirement,
|
||||
resolution,
|
||||
colors,
|
||||
printExclusions,
|
||||
excluded = true
|
||||
)
|
||||
}
|
||||
|
||||
dependencies.map(JsonElem(_, artifacts, jsonPrintRequirement, resolution, colors, printExclusions, excluded = false)) ++
|
||||
(if (printExclusions) excluded else Nil)
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,8 @@
|
|||
junit_tests(
|
||||
name = "test",
|
||||
dependencies = [
|
||||
"3rdparty/jvm:scalatest",
|
||||
"cli/src/main/scala-2.11:cli",
|
||||
],
|
||||
sources = globs("*.scala"),
|
||||
)
|
||||
|
|
@ -0,0 +1,221 @@
|
|||
package coursier.cli
|
||||
|
||||
import java.io.{File, FileWriter}
|
||||
|
||||
import coursier.cli.util.ReportNode
|
||||
import argonaut._, Argonaut._
|
||||
import org.junit.runner.RunWith
|
||||
import org.scalatest.FlatSpec
|
||||
import org.scalatest.junit.JUnitRunner
|
||||
|
||||
@RunWith(classOf[JUnitRunner])
|
||||
class CliIntegrationTest extends FlatSpec {
|
||||
|
||||
def withFile(content: String = "")(testCode: (File, FileWriter) => Any) {
|
||||
val file = File.createTempFile("hello", "world") // create the fixture
|
||||
val writer = new FileWriter(file)
|
||||
writer.write(content)
|
||||
writer.flush()
|
||||
try {
|
||||
testCode(file, writer) // "loan" the fixture to the test
|
||||
}
|
||||
finally {
|
||||
writer.close()
|
||||
file.delete()
|
||||
}
|
||||
}
|
||||
|
||||
def getReportFromJson(f: File): ReportNode = {
|
||||
// Parse back the output json file
|
||||
val source = scala.io.Source.fromFile(f)
|
||||
val str = try source.mkString finally source.close()
|
||||
|
||||
str.decodeEither[ReportNode] match {
|
||||
case Left(error) =>
|
||||
throw new Exception(s"Error while decoding report: $error")
|
||||
case Right(report) => report
|
||||
}
|
||||
}
|
||||
|
||||
trait TestOnlyExtraArgsApp extends caseapp.core.DefaultArgsApp {
|
||||
private var remainingArgs1 = Seq.empty[String]
|
||||
private var extraArgs1 = Seq.empty[String]
|
||||
|
||||
override def setRemainingArgs(remainingArgs: Seq[String], extraArgs: Seq[String]): Unit = {
|
||||
remainingArgs1 = remainingArgs
|
||||
}
|
||||
|
||||
override def remainingArgs: Seq[String] = remainingArgs1
|
||||
|
||||
def extraArgs: Seq[String] =
|
||||
extraArgs1
|
||||
}
|
||||
|
||||
"Normal fetch" should "get all files" in {
|
||||
|
||||
val fetchOpt = FetchOptions(common = CommonOptions())
|
||||
val fetch = new Fetch(fetchOpt) with TestOnlyExtraArgsApp
|
||||
fetch.setRemainingArgs(Seq("junit:junit:4.12"), Seq())
|
||||
fetch.apply()
|
||||
assert(fetch.files0.map(_.getName).toSet.equals(Set("junit-4.12.jar", "hamcrest-core-1.3.jar")))
|
||||
|
||||
}
|
||||
|
||||
"Module level" should "exclude correctly" in withFile(
|
||||
"junit:junit--org.hamcrest:hamcrest-core") { (file, _) =>
|
||||
withFile() { (jsonFile, _) =>
|
||||
val commonOpt = CommonOptions(localExcludeFile = file.getAbsolutePath, jsonOutputFile = jsonFile.getPath)
|
||||
val fetchOpt = FetchOptions(common = commonOpt)
|
||||
|
||||
val fetch = new Fetch(fetchOpt) with TestOnlyExtraArgsApp
|
||||
fetch.setRemainingArgs(Seq("junit:junit:4.12"), Seq())
|
||||
fetch.apply()
|
||||
val filesFetched = fetch.files0.map(_.getName).toSet
|
||||
val expected = Set("junit-4.12.jar")
|
||||
assert(filesFetched.equals(expected), s"files fetched: $filesFetched not matching expected: $expected")
|
||||
|
||||
val node: ReportNode = getReportFromJson(jsonFile)
|
||||
|
||||
assert(node.dependencies.length == 1)
|
||||
assert(node.dependencies.head.coord == "junit:junit:4.12")
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Result without exclusion:
|
||||
* |└─ org.apache.avro:avro:1.7.4
|
||||
* |├─ com.thoughtworks.paranamer:paranamer:2.3
|
||||
* |├─ org.apache.commons:commons-compress:1.4.1
|
||||
* |│ └─ org.tukaani:xz:1.0 // this should be fetched
|
||||
* |├─ org.codehaus.jackson:jackson-core-asl:1.8.8
|
||||
* |├─ org.codehaus.jackson:jackson-mapper-asl:1.8.8
|
||||
* |│ └─ org.codehaus.jackson:jackson-core-asl:1.8.8
|
||||
* |├─ org.slf4j:slf4j-api:1.6.4
|
||||
* |└─ org.xerial.snappy:snappy-java:1.0.4.1
|
||||
*/
|
||||
"avro exclude xz" should "not fetch xz" in withFile(
|
||||
"org.apache.avro:avro--org.tukaani:xz") { (file, writer) =>
|
||||
withFile() { (jsonFile, _) =>
|
||||
val commonOpt = CommonOptions(localExcludeFile = file.getAbsolutePath, jsonOutputFile = jsonFile.getPath)
|
||||
val fetchOpt = FetchOptions(common = commonOpt)
|
||||
|
||||
val fetch = new Fetch(fetchOpt) with TestOnlyExtraArgsApp
|
||||
fetch.setRemainingArgs(Seq("org.apache.avro:avro:1.7.4"), Seq())
|
||||
fetch.apply()
|
||||
|
||||
val filesFetched = fetch.files0.map(_.getName).toSet
|
||||
assert(!filesFetched.contains("xz-1.0.jar"))
|
||||
|
||||
val node: ReportNode = getReportFromJson(jsonFile)
|
||||
|
||||
// assert root level dependencies
|
||||
assert(node.dependencies.map(_.coord).toSet == Set(
|
||||
"org.apache.avro:avro:1.7.4",
|
||||
"com.thoughtworks.paranamer:paranamer:2.3",
|
||||
"org.apache.commons:commons-compress:1.4.1",
|
||||
"org.codehaus.jackson:jackson-core-asl:1.8.8",
|
||||
"org.codehaus.jackson:jackson-mapper-asl:1.8.8",
|
||||
"org.slf4j:slf4j-api:1.6.4",
|
||||
"org.xerial.snappy:snappy-java:1.0.4.1"
|
||||
))
|
||||
|
||||
// org.apache.commons:commons-compress:1.4.1 should not contain deps underneath it.
|
||||
val compressNode = node.dependencies.find(_.coord == "org.apache.commons:commons-compress:1.4.1")
|
||||
assert(compressNode.isDefined)
|
||||
assert(compressNode.get.dependencies.isEmpty)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Result without exclusion:
|
||||
* |├─ org.apache.avro:avro:1.7.4
|
||||
* |│ ├─ com.thoughtworks.paranamer:paranamer:2.3
|
||||
* |│ ├─ org.apache.commons:commons-compress:1.4.1
|
||||
* |│ │ └─ org.tukaani:xz:1.0
|
||||
* |│ ├─ org.codehaus.jackson:jackson-core-asl:1.8.8
|
||||
* |│ ├─ org.codehaus.jackson:jackson-mapper-asl:1.8.8
|
||||
* |│ │ └─ org.codehaus.jackson:jackson-core-asl:1.8.8
|
||||
* |│ ├─ org.slf4j:slf4j-api:1.6.4
|
||||
* |│ └─ org.xerial.snappy:snappy-java:1.0.4.1
|
||||
* |└─ org.apache.commons:commons-compress:1.4.1
|
||||
* | └─ org.tukaani:xz:1.0
|
||||
*/
|
||||
"avro excluding xz + commons-compress" should "still fetch xz" in withFile(
|
||||
"org.apache.avro:avro--org.tukaani:xz") {
|
||||
(file, writer) =>
|
||||
|
||||
withFile() {
|
||||
(jsonFile, _) => {
|
||||
val commonOpt = CommonOptions(localExcludeFile = file.getAbsolutePath, jsonOutputFile = jsonFile.getPath)
|
||||
val fetchOpt = FetchOptions(common = commonOpt)
|
||||
|
||||
val fetch = new Fetch(fetchOpt) with TestOnlyExtraArgsApp
|
||||
fetch.setRemainingArgs(Seq("org.apache.avro:avro:1.7.4", "org.apache.commons:commons-compress:1.4.1"), Seq())
|
||||
fetch.apply()
|
||||
val filesFetched = fetch.files0.map(_.getName).toSet
|
||||
assert(filesFetched.contains("xz-1.0.jar"))
|
||||
|
||||
val node: ReportNode = getReportFromJson(jsonFile)
|
||||
|
||||
// Root level org.apache.commons:commons-compress:1.4.1 should have org.tukaani:xz:1.0 underneath it.
|
||||
val compressNode = node.dependencies.find(_.coord == "org.apache.commons:commons-compress:1.4.1")
|
||||
assert(compressNode.isDefined)
|
||||
assert(compressNode.get.dependencies.contains("org.tukaani:xz:1.0"))
|
||||
|
||||
val innerCompressNode = node.dependencies.find(_.coord == "org.apache.avro:avro:1.7.4")
|
||||
assert(innerCompressNode.isDefined)
|
||||
assert(!innerCompressNode.get.dependencies.contains("org.tukaani:xz:1.0"))
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Result:
|
||||
* |├─ org.apache.commons:commons-compress:1.4.1
|
||||
* |│ └─ org.tukaani:xz:1.0 -> 1.1
|
||||
* |└─ org.tukaani:xz:1.1
|
||||
*/
|
||||
"requested xz:1.1" should "not have conflicts" in withFile() {
|
||||
(excludeFile, writer) =>
|
||||
withFile() {
|
||||
(jsonFile, _) => {
|
||||
val commonOpt = CommonOptions(jsonOutputFile = jsonFile.getPath)
|
||||
val fetchOpt = FetchOptions(common = commonOpt)
|
||||
|
||||
val fetch = new Fetch(fetchOpt) with TestOnlyExtraArgsApp
|
||||
fetch.setRemainingArgs(Seq("org.apache.commons:commons-compress:1.4.1", "org.tukaani:xz:1.1"), Seq())
|
||||
fetch.apply()
|
||||
|
||||
val node: ReportNode = getReportFromJson(jsonFile)
|
||||
assert(node.conflict_resolution.isEmpty)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Result:
|
||||
* |├─ org.apache.commons:commons-compress:1.5
|
||||
* |│ └─ org.tukaani:xz:1.2
|
||||
* |└─ org.tukaani:xz:1.1 -> 1.2
|
||||
*/
|
||||
"org.apache.commons:commons-compress:1.5 org.tukaani:xz:1.1" should "have conflicts" in withFile() {
|
||||
(excludeFile, _) =>
|
||||
withFile() {
|
||||
(jsonFile, _) => {
|
||||
val commonOpt = CommonOptions(jsonOutputFile = jsonFile.getPath)
|
||||
val fetchOpt = FetchOptions(common = commonOpt)
|
||||
|
||||
val fetch = new Fetch(fetchOpt) with TestOnlyExtraArgsApp
|
||||
fetch.setRemainingArgs(Seq("org.apache.commons:commons-compress:1.5", "org.tukaani:xz:1.1"), Seq())
|
||||
fetch.apply()
|
||||
|
||||
val node: ReportNode = getReportFromJson(jsonFile)
|
||||
assert(node.conflict_resolution == Map("org.tukaani:xz:1.1" -> "org.tukaani:xz:1.2"))
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -0,0 +1,72 @@
|
|||
package coursier.cli
|
||||
|
||||
import java.io.{File, FileWriter}
|
||||
|
||||
import org.junit.runner.RunWith
|
||||
import org.scalatest.FlatSpec
|
||||
import org.scalatest.junit.JUnitRunner
|
||||
|
||||
|
||||
@RunWith(classOf[JUnitRunner])
|
||||
class CliUnitTest extends FlatSpec {
|
||||
|
||||
def withFile(content: String)(testCode: (File, FileWriter) => Any) {
|
||||
val file = File.createTempFile("hello", "world") // create the fixture
|
||||
val writer = new FileWriter(file)
|
||||
writer.write(content)
|
||||
writer.flush()
|
||||
try {
|
||||
testCode(file, writer) // "loan" the fixture to the test
|
||||
}
|
||||
finally {
|
||||
writer.close()
|
||||
file.delete()
|
||||
}
|
||||
}
|
||||
|
||||
"Normal text" should "parse correctly" in withFile(
|
||||
"org1:name1--org2:name2") { (file, writer) =>
|
||||
val opt = CommonOptions(localExcludeFile = file.getAbsolutePath)
|
||||
val helper = new Helper(opt, Seq())
|
||||
assert(helper.localExcludeMap.equals(Map("org1:name1" -> Set(("org2", "name2")))))
|
||||
}
|
||||
|
||||
"Multiple excludes" should "be combined" in withFile(
|
||||
"org1:name1--org2:name2\n" +
|
||||
"org1:name1--org3:name3\n" +
|
||||
"org4:name4--org5:name5") { (file, writer) =>
|
||||
|
||||
val opt = CommonOptions(localExcludeFile = file.getAbsolutePath)
|
||||
val helper = new Helper(opt, Seq())
|
||||
assert(helper.localExcludeMap.equals(Map(
|
||||
"org1:name1" -> Set(("org2", "name2"), ("org3", "name3")),
|
||||
"org4:name4" -> Set(("org5", "name5")))))
|
||||
}
|
||||
|
||||
"extra --" should "error" in withFile(
|
||||
"org1:name1--org2:name2--xxx\n" +
|
||||
"org1:name1--org3:name3\n" +
|
||||
"org4:name4--org5:name5") { (file, writer) =>
|
||||
assertThrows[SoftExcludeParsingException]({
|
||||
val opt = CommonOptions(localExcludeFile = file.getAbsolutePath)
|
||||
new Helper(opt, Seq())
|
||||
})
|
||||
}
|
||||
|
||||
"child has no name" should "error" in withFile(
|
||||
"org1:name1--org2:") { (file, writer) =>
|
||||
assertThrows[SoftExcludeParsingException]({
|
||||
val opt = CommonOptions(localExcludeFile = file.getAbsolutePath)
|
||||
new Helper(opt, Seq())
|
||||
})
|
||||
}
|
||||
|
||||
"child has nothing" should "error" in withFile(
|
||||
"org1:name1--:") { (file, writer) =>
|
||||
assertThrows[SoftExcludeParsingException]({
|
||||
val opt = CommonOptions(localExcludeFile = file.getAbsolutePath)
|
||||
new Helper(opt, Seq())
|
||||
})
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -31,6 +31,9 @@ final case class Module(
|
|||
override def toString: String =
|
||||
s"$organization:$nameWithAttributes"
|
||||
|
||||
def orgName: String =
|
||||
s"$organization:$name"
|
||||
|
||||
override final lazy val hashCode = Module.unapply(this).get.hashCode()
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -57,7 +57,7 @@ object Print {
|
|||
deps1.map(dependency(_, printExclusions)).mkString("\n")
|
||||
}
|
||||
|
||||
private def compatibleVersions(first: String, second: String): Boolean = {
|
||||
def compatibleVersions(first: String, second: String): Boolean = {
|
||||
// too loose for now
|
||||
// e.g. RCs and milestones should not be considered compatible with subsequent non-RC or
|
||||
// milestone versions - possibly not with each other either
|
||||
|
|
|
|||
|
|
@ -2,7 +2,6 @@ scala_library(
|
|||
name = "native",
|
||||
dependencies = [
|
||||
"3rdparty/jvm:scala-native",
|
||||
"core:core",
|
||||
],
|
||||
sources = rglobs("*.scala"),
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,6 +1,13 @@
|
|||
[DEFAULT]
|
||||
local_artifact_cache: %(pants_bootstrapdir)s/artifact_cache
|
||||
|
||||
[GLOBAL]
|
||||
pants_version: 1.2.1
|
||||
|
||||
[cache]
|
||||
read_from: ["%(local_artifact_cache)s"]
|
||||
write_to: ["%(local_artifact_cache)s"]
|
||||
|
||||
[jvm]
|
||||
options: ['-Xmx4g', '-XX:MaxMetaspaceSize=256m']
|
||||
|
||||
|
|
|
|||
|
|
@ -19,6 +19,9 @@ object Deps {
|
|||
def sbtLauncherInterface = "org.scala-sbt" % "launcher-interface" % "1.0.0"
|
||||
def typesafeConfig = "com.typesafe" % "config" % "1.3.2"
|
||||
def argonautShapeless = "com.github.alexarchambault" %% "argonaut-shapeless_6.2" % "1.2.0-M6"
|
||||
def jackson = "com.fasterxml.jackson.module" %% "jackson-module-scala" % "2.8.4"
|
||||
def scalatest = "org.scalatest" %% "scalatest" % "3.0.0"
|
||||
def junit = "junit" % "junit" % "4.12"
|
||||
|
||||
def sbtPgp = Def.setting {
|
||||
val sbtv = CrossVersion.binarySbtVersion(sbtVersion.value)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
#!/usr/bin/env bash
|
||||
set -ev
|
||||
set -evx
|
||||
|
||||
SCALA_VERSION="${SCALA_VERSION:-${TRAVIS_SCALA_VERSION:-2.12.4}}"
|
||||
PULL_REQUEST="${PULL_REQUEST:-${TRAVIS_PULL_REQUEST:-false}}"
|
||||
|
|
|
|||
Loading…
Reference in New Issue