Keep only sbt plugins

This commit is contained in:
Alexandre Archambault 2018-09-28 17:55:42 +02:00
parent d327418ed2
commit 3e05d920a4
288 changed files with 52 additions and 26571 deletions

6
.gitmodules vendored
View File

@ -1,6 +0,0 @@
[submodule "tests/metadata"]
path = tests/metadata
url = https://github.com/coursier/test-metadata.git
[submodule "directories"]
path = directories
url = https://github.com/soc/directories.git

View File

@ -1,58 +1,18 @@
language: java
os:
- osx
language: scala
scala: 2.12.7
os: linux
jdk: oraclejdk8
script:
- scripts/travis.sh
# Uncomment once https://github.com/scoverage/sbt-scoverage/issues/111 is fixed
# after_success:
# - bash <(curl -s https://codecov.io/bash)
addons:
apt:
update: true
matrix:
include:
- env: SCALA_VERSION=2.12 NATIVE=1
os: linux
jdk: oraclejdk8
sudo: required
before_install:
- curl https://raw.githubusercontent.com/scala-native/scala-native/master/scripts/travis_setup.sh | bash -x
services:
- docker
- env: SCALA_VERSION=2.11
os: linux
jdk: oraclejdk8
sudo: required
services:
- docker
- env: SCALA_VERSION=2.12 SBT_COURSIER=1
os: linux
jdk: oraclejdk8
- env: SCALA_VERSION=2.12 SBT_SHADING=1
os: linux
jdk: oraclejdk8
- env: SCALA_VERSION=2.12 SCALA_JS=1
os: linux
jdk: oraclejdk8
- env: SCALA_VERSION=2.11 SCALA_JS=1
os: linux
jdk: oraclejdk8
- os: linux
jdk: oraclejdk8
script:
# Sanity check for Pants build path.
- ./pants run cli/src/main/scala-2.12:coursier-cli -- fetch --help
env:
global:
- secure: miHFMwVRD/yjOLy794nOwc2lJTMyL5O0MXABT9ksg5ejQy1FrFVc2YH86Agp80W02/lGLGl0qWCiK1TBcs9q4Apt01nkD1a/0/iuTRm//bdhnu8BbRxFITf+2cyYJVytKPsF585aHldMv1rwZs3TDaTzEEecAEki5r50yyTVo7ycG0lVj9aVWXerKRMIT54Wb8M6nqbyRB1jGWT0ETNU13vOvQznPTUXQG5hsiKnGYRf8T3umOMdOHpV0rvdwYqAIMsikaAFcYCS5P/pLXMtmRHICH9KUG8TV/ST07p1BXtbBg9y1Q+lpnXotXh4ZNoWOp8B6v7fxJ/WlLYTDROWCiHJ4s2V4Di00db/nW4OWrEEBlrh7vJ/npZqyt9V9YeNv6alxi+DCESwusgvD4Cx5c3zh+2X6RB6BYwWHlFnd80rmsLe4R4fFUcc8E/ZR9vUFjP1CsQKqfJ5yfKR6V+n8jK8FjLpoaU9PHPo2H4V3FZM/fCLcxhE37vfaYI7/O7MqE/cdGpZIuz7g3c4toWCgNZJDn8iJCPmrgcbW5zbfDxvWU2K816ycgnUwSQ5dufrJpAbLNrjR1O8EPRkMDDp9bB7/4RVQvfDfP9GGoiHPHHgxGzY0Lf5bm+Bj1mRfB5/SXHd3IjhUCD9q7eD1/ANifEYALC5BJ4TB8RhQUPU8uM=
- secure: 2/SSqa7A+aIzTJrMuqfK53QoHqes8HZPpIXUC9BH+bP2V2n7LqlFCnLZ9OSFfiJYfgeYMQDILpt8GTXHYc7JgM/N9xXpywrpYNDCYo7GMhqRyUPQOuK9044IRnZmme289Ut6ozHHptZUeZp/9DEUNZcPOxTN+KbzbHrUL+9l5BxnAxJ3e0HihxhmaINrla3T36EetdfINigarB9muyvuCRdRhZjwxsSF1fo5P+ZgWvAIDhPgNJH8eyjxHVbTabk7efPtWNWu0HjyOqJaIVk+TNjuQhvQPHKpYel0gVlCAfUjq7ZP8hZurfC6NjCFcnfTZ3d4R8GDcWJ47pgBWND8saIQOigNd7KHBPntD4fEJqgBSq3ZWakNBYzOtm8CxMGmiJHDCVqAEGzUG+lowN+SnPS2UluL3QtZ7oL/7MeJqCscH7sPwHtmZY+o0Muqo0ZJ2T2TzekQNYOAE7jeSzG1xOa/NNghny5fT+w6asPxfeolkMgyzuRFp1SLaLUf/XRV4fux0meGY9NIXso47xMSfAYVAAXT1FA2OOwmM1O4yvm3Ur95oEGDNw6z7MnWOSKS663WFwuw2cCaheCfAwvoa5jZUMWMbyUM/cBTgCaQdmETpvCzZzUr5Ls/nBXjyiTdJaQLZATr7HSGZHgYVmEAhVwBvuhTar/6VUZUMKGc2P4=
- secure: NmXh4uxqvvqxYvOBOiXE131HajCYhJyd9+7kc1YjllRZVYG11YLah9Np7qnRUyugNOdcBnWVQGlfDHOFe8GHQsZKt5PvsIzxszTor0GeDQOePX3L4YXPkZRJatmoJJ0COxdI6weCAWkI6Zr934RsOndT0mO55gk9c6eeXCcLdNjAJ3izGQHy5Wb2KTzwMhBfwjdTQ0s65c1rzz5dZ/JODilWfTHiHsz+4sKwWVmAvXDTjePd0X3svX775ot23QesJgtaC/p0AKSLcHg3zEjKkJJvvLooQyNn/zU/bio/UatDZWXnNMsTBfEr3qUedjoOY65g3EX/vYlbNRkF3Itk0dpuPooTFmezJASI4ZpewBS9OvPZheMmU/dy5Bx//622x7p4MHyao9IvYmSX0C92VWEd3gwkSzKCJtBEz4Csd5BaGhzeL41di6NSVx4IEiehC9191G1wk4Yj7S2t69N6OdAJEq+znQlYISF5ogCqip7PuesBMYTW4FaIgpnfW/OYP6VpWW87ohw/dz/CcTzP9MzuoM249EHNZKTfnJrmPJBRYSn+W4y9sTgGElPhY1U/NVQ+C/9Fov1kHFD25WeTDPdZe6yCczaUrcvfTDitfo6qnWf8ZW5dJMXN744idaZ25AT/SGoCzkPXMe+us5XLTAOtrbBMP8NXLMv5OtU999E=
- env: SBT_COURSIER=1
- env: SBT_SHADING=1
branches:
only:
- master
cache:
directories:
- $HOME/.m2
- $HOME/.ivy2/cache
- $HOME/.sbt
- $HOME/.cache

194
3rdparty/jvm/BUILD vendored
View File

@ -1,194 +0,0 @@
# This file should match the 3rdparty jars in project/Deps.scala
jar_library(
name = "scala-xml",
jars = [
scala_jar(
org = "org.scala-lang.modules",
name = "scala-xml",
rev = "1.1.0",
),
],
)
jar_library(
name = "quasiQuotes",
jars = [
scala_jar(
org = "org.scalamacros",
name = "quasiquotes",
rev = "2.1.0",
),
],
)
jar_library(
name = "fastParse",
jars = [
scala_jar(
org = "com.lihaoyi",
name = "fastparse",
rev = "1.0.0",
),
],
)
jar_library(
name = "scalaz-concurrent",
jars = [
scala_jar(
org = "org.scalaz",
name = "scalaz-concurrent",
rev = "7.2.24",
),
],
)
jar_library(
name = "scalaz-core",
jars = [
scala_jar(
org = "org.scalaz",
name = "scalaz-core",
rev = "7.2.24",
),
],
)
jar_library(
name = "jsoup",
jars = [jar(
org = "org.jsoup",
name = "jsoup",
rev = "1.10.3",
)],
)
SCALAJS_REV = "0.9.3"
jar_library(
name = "cli",
jars = [
scala_jar(
org = "org.scala-js",
name = "scalajs-cli",
rev = SCALAJS_REV,
),
],
)
jar_library(
name = "compiler",
jars = [
jar(
org = "org.scala-js",
name = "scalajs-compiler_2.11.12",
rev = SCALAJS_REV,
),
],
)
jar_library(
name = "library",
jars = [
scala_jar(
org = "org.scala-js",
name = "scalajs-library",
rev = SCALAJS_REV,
),
],
)
jar_library(
name = "dom",
jars = [
scala_jar(
org = "org.scala-js",
name = "scalajs-dom_sjs0.6",
rev = "0.9.1",
),
],
)
jar_library(
name = "caseapp",
jars = [
scala_jar(
org = "com.github.alexarchambault",
name = "case-app",
rev = "2.0.0-M3",
),
],
)
jar_library(
name = "argonaut-shapeless",
jars = [
scala_jar(
org = "com.github.alexarchambault",
name = "argonaut-shapeless_6.2",
rev = "1.2.0-M8",
),
],
)
jar_library(
name = "soc",
jars = [
jar(
org = "io.github.soc",
name = "directories",
rev = "10",
),
],
)
jar_library(
name = "scala-native",
jars = [
scala_jar(
org = "org.scala-native",
name = "nir",
rev = "0.3.7",
),
scala_jar(
org = "org.scala-native",
name = "tools",
rev = "0.3.7",
),
scala_jar(
org = "org.scala-native",
name = "util",
rev = "0.3.7",
),
],
)
jar_library(
name = "utest",
jars = [
scala_jar(
org = "com.lihaoyi",
name = "utest",
rev = "0.6.4",
),
],
)
jar_library(
name = "async",
jars = [
scala_jar(
org = "org.scala-lang.modules",
name = "scala-async",
rev = "0.9.7",
),
],
)
jar_library(
name = "scalatest",
jars = [
scala_jar("org.scalatest", "scalatest", "3.0.5"),
],
)

View File

@ -1,22 +0,0 @@
jar_library(
name = "scala-library",
jars = [
jar(
org = "org.scala-lang",
name = "scala-library",
rev = "2.12.6",
),
],
scope = 'runtime'
)
jar_library(
name = "scalac",
jars = [
jar(
org = "org.scala-lang",
name = "scala-compiler",
rev = "2.12.6",
),
],
)

View File

@ -1,202 +0,0 @@
# Cookbook of stuff to do while developing on coursier
General note: always explicitly set the scala version at the sbt prompt, like
```
> ++2.12.4
> ++2.11.11
> ++2.10.6
```
Some modules of coursier are only built in specific scala versions (sbt plugins in 2.10 and 2.12, cli and web modules in 2.11, …). coursier doesn't use sbt-doge
to handle that for now (but any help to make it work would be welcome).
The sources of coursier rely on some git submodules. Clone the sources of coursier via
```
$ git clone --recursive https://github.com/coursier/coursier.git
```
or run
```
$ git submodule update --init --recursive
```
from the coursier sources to initialize them.
The latter command also needs to be run whenever these submodules are updated.
## Compile and run the CLI
```
$ sbt ++2.11.11 "project cli" pack
$ cli/target/pack/bin/coursier --help
```
Note: `sbt ++2.11.11 cli/pack` used to work fine, but doesn't anymore, see
https://github.com/coursier/coursier/commit/3636c58d07532ab2dc176f2d2caa2b4d51050f12.
## Automatically re-compile the CLI
Doesn't work anymore :/ `sbt ++2.11.11 ~cli/pack` used to work, but doesn't
anymore for now (see above). `sbt ++2.11.11 "project cli" ~pack` only watches
the sources of the cli module, not those of the modules it depends on (core,
cache, …).
## Run a scripted test of sbt-coursier or sbt-shading
```
$ sbt
> ++2.12.4
> sbt-plugins/publishLocal
> sbt-coursier/scripted sbt-coursier/simple
> sbt-shading/scripted sbt-shading/shading
```
`++2.12.4` sets the scala version, which automatically builds the plugins for sbt 1.0. For sbt 0.13, do `++2.10.6`.
`sbt-plugins/publishLocal` publishes locally the plugins *and their dependencies*, which scripted seems not to do automatically.
## Run all the scripted tests of sbt-coursier or sbt-shading
```
$ sbt
> ++2.12.4
> sbt-plugins/publishLocal
> sbt-coursier/scripted
> sbt-shading/scripted
```
Use `++2.10.6` for sbt 0.13. See discussion above too.
## Run unit tests (JVM)
```
$ sbt
> ++2.12.4
> testsJVM/testOnly coursier.util.TreeTests
> testsJVM/test
```
`testOnly` runs the tests that match the expression it is passed.
`test` runs all the tests.
To run the tests each time the sources change, prefix the test commands with
`~`, like
```
$ sbt
> ++2.12.4
> ~testsJVM/testOnly coursier.util.TreeTests
> ~testsJVM/test
```
## Run unit tests (JS)
The JS tests require node to be installed. They automatically run `npm install` from the root of the coursier sources if needed.
JS tests can be run like JVM tests, like
```
$ sbt
> ++2.12.4
> testsJS/testOnly coursier.util.TreeTests
> testsJS/test
```
Like for the JVM tests, prefix test commands with `~` to watch sources (see above).
## Run integration tests
### Main tests
Run the small web repositories with:
```
$ scripts/launch-test-repo.sh --port 8080 --list-pages
$ scripts/launch-test-repo.sh --port 8081
```
Both of these commands spawn a web server in the background.
Run the main ITs with
```
$ sbt ++2.12.4 testsJVM/it:test
```
### Nexus proxy tests
Start the test Nexus servers with
```
$ scripts/launch-proxies.sh
```
This spawns two docker-based Nexus servers in the background (a Nexus 2 and a Nexus 3).
Then run the proxy ITs with
```
$ sbt ++2.12.4 proxy-tests/it:test
```
### Build with Pants
[Pants](https://github.com/pantsbuild/pants) build tool is also added to an experimental path to build the software
Currently only the CLI command can be built via Pants with Scala 2.12.4.
To iterate on code changes:
```
./pants run cli/src/main/scala-2.12:coursier-cli -- fetch --help
```
To build a distributable binary
```
./pants binary cli/src/main/scala-2.12:coursier-cli
# Artifact will be placed under dist/
java -jar dist/coursier-cli.jar fetch --help
```
## Build the web demo
coursier is cross-compiled to scala-js, and can run in the browser. It has a [demo web site](https://coursier.github.io/coursier/#demo), that runs resolutions straight from your web browser.
Its sources are in the `web` module.
To build and test this demo site locally, you can do
```
$ sbt web/fastOptJS
$ open web/target/scala-2.12/classes/index.html
```
(on Linux, use `xdg-open` instead of `open`)
# Merging PRs on GitHub
Use either "Create merge commit" or "Squash and merge".
Use "Create merge commit" if the commit list is clean enough (each commit has a clear message, and doesn't break simple compilation and test tasks).
Use "Squash and merge" in the other cases.
# General Versioning Guideline
* Major Version 1.x.x : Increment this field when there is a major change.
* Minor Version x.1.x : Increment this field when there is a minor change that breaks backward compatibility for an method.
* Patch version x.x.1 : Increment this field when a minor format change that just adds information that an application can safely ignore.
# Deprecation Strategy
When deprecating a method/field, we want to know
1. Since which version this field/method is being deprecated
2. Migration path, i.e. what to use instead
3. At which point the deprecation will be removed
Due to scala's builtin deprecation works like
```
class deprecated(message: String = {}, since: String = {})
```
we need to put 2) and 3) into `message`:
```
@deprecated(message = "<migration path>. <version to be removed>", since: "deprecation start version")
```
Typically there needs to be at least 2 minor versions between since-version and to-be-removed-version to help migration.
For example, if since version is 1.1.0, then deprecation can be removed in 1.3.0

View File

@ -1,84 +0,0 @@
# Internals / architecture
## The model
Mainly in [Definitions.scala](https://github.com/coursier/coursier/blob/master/core/shared/src/main/scala/coursier/core/Definitions.scala), [Resolution.scala](https://github.com/coursier/coursier/blob/master/core/shared/src/main/scala/coursier/core/Resolution.scala), and [ResolutionProcess.scala](https://github.com/coursier/coursier/blob/master/core/shared/src/main/scala/coursier/core/ResolutionProcess.scala).
### Module
[definition](https://github.com/coursier/coursier/blob/462c16d6db98d35e180a25b0f87aa47083ad98aa/core/shared/src/main/scala/coursier/core/Definitions.scala#L12-L16)
Uniquely designates a... module. Typically, just an organisation (`org.scala-lang`) and a name (`scala-library`). At times, can also contains so called attributes: (unordered) key-value pairs. E.g. the SBT plugins usually have some, like `scalaVersion` with value `2.10` and `sbtVersion` with value `0.13`.
Two modules having different organisation / name / attributes are simply considered different.
During resolution, all dependencies with the same module have their versions reconciled if possible. If not possible, we have a version conflict (conflicting versions of a module are needed), which makes the resolution fail.
### Dependency
[definition](https://github.com/coursier/coursier/blob/462c16d6db98d35e180a25b0f87aa47083ad98aa/core/shared/src/main/scala/coursier/core/Definitions.scala#L43-L54)
A dependency towards a given module, with a given version (can be a version interval too, like `[2.2,2.3)` - TODO add support for `last.revision`).
TODO Add a word about the various fields of `Dependency`
### Project
[definition](https://github.com/coursier/coursier/blob/462c16d6db98d35e180a25b0f87aa47083ad98aa/core/shared/src/main/scala/coursier/core/Definitions.scala#L69-L96)
Metadata about a given version of a module.
Usually comes from a POM file (Maven) or an `ivy.xml` file (Ivy). In the former case, the infos in `Project` may originate from several POMs, if the main one has a parent POM, or some import dependencies. Raw `Project`s are obtained from the various individual POMs, and merged back into one (done during resolution by `Resolution` - TODO give more details).
TODO Describe the various fields of `Project`
### Resolution
[definition](https://github.com/coursier/coursier/blob/462c16d6db98d35e180a25b0f87aa47083ad98aa/core/shared/src/main/scala/coursier/core/Resolution.scala#L477-L487)
State of the resolution.
At any given point during resolution, we have:
- ... (TODO describe the various fields)
Properties of Resolution: TODO describe the most important methods of `Resolution`
### ResolutionProcess
[definition](https://github.com/coursier/coursier/blob/462c16d6db98d35e180a25b0f87aa47083ad98aa/core/shared/src/main/scala/coursier/core/ResolutionProcess.scala)
Proceeds with the resolution per se, starting from an initial state (a `Resolution` instance), until the final one.
Goes from one state to another by determining whether the current state:
- needs some extra metadata about some versions of some modules (some IO is needed, with a cache and/or repositories), or
- needs to take into account previously fed metadata (just calculations, no IO, giving the next state).
So each step is either IO or calculations. Several IO steps can occur in a row, if some parent POMs or dependency imports are needed to get the full picture about a given module. A calculation step is either the last step, or followed by some IO. So the steps are like: IO, IO, IO, calculations, IO, IO, calculations, IO, calculations -> done. The last step is necessarily some calculations (IO can't end it - the newly fetched metadata of an IO step needs to be taken into account by a calculation step).
TODO Describe ResolutionProcess a bit more...
### Artifacts
Once the resolution is done, we get the final `Resolution`. This `Resolution`can provide:
- final dependency list,
- artifact list,
- dependency graph,
- ...
TODO Describe all of these a bit more
## Cache / downloading
### Fetching metadata
What we need during resolution: fetching metadata, possibly several ones at once.
TODO Describe the function to supply to `ResolutionProcess` to fetch metadata
### Resolution result: artifact list
TODO How we can fetch artifacts per se...
### Cache structure on disk
TODO More about the cache (structure on disk, error tracking, TTL tracking, locks)

798
README.md
View File

@ -1,795 +1,11 @@
# Coursier
# sbt-coursier
*Pure Scala Artifact Fetching*
A Scala library to fetch dependencies from Maven / Ivy repositories
[![Build Status](https://travis-ci.org/coursier/sbt-coursier.svg?branch=master)](https://travis-ci.org/coursier/sbt-coursier)
[![Build status (Windows)](https://ci.appveyor.com/api/projects/status/zzzz?svg=true)](https://ci.appveyor.com/project/alexarchambault/sbt-coursier)
[![Maven Central](https://img.shields.io/maven-central/v/io.get-coursier/sbt-coursier_2.12_1.0.svg)](https://maven-badges.herokuapp.com/maven-central/io.get-coursier/sbt-coursier_2.12_1.0)
[![Build Status](https://travis-ci.org/coursier/coursier.svg?branch=master)](https://travis-ci.org/coursier/coursier)
[![Build status (Windows)](https://ci.appveyor.com/api/projects/status/yy3svc6ukqpykw5s?svg=true)](https://ci.appveyor.com/project/alexarchambault/coursier-a7n6k)
[![Join the chat at https://gitter.im/coursier/coursier](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/coursier/coursier?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
[![Maven Central](https://img.shields.io/maven-central/v/io.get-coursier/coursier_2.11.svg)](https://maven-badges.herokuapp.com/maven-central/io.get-coursier/coursier_2.11)
[![Scaladoc](http://javadoc-badge.appspot.com/io.get-coursier/coursier_2.11.svg?label=scaladoc)](http://javadoc-badge.appspot.com/io.get-coursier/coursier_2.11)
sbt plugins of [coursier](https://github.com/coursier/coursier)
![Demo (courtesy of @paulp)](http://i.imgur.com/lCJ9oql.gif)
*coursier* is a dependency resolver / fetcher *à la* Maven / Ivy, entirely
rewritten from scratch in Scala. It aims at being fast and easy to embed
in other contexts. Its very core (`core` module) aims at being
extremely pure, and only requires to be fed external data (Ivy / Maven metadata) via a monad.
The `cache` module handles caching of the metadata and artifacts themselves,
and is less so pure than the `core` module, in the sense that it happily
does IO as a side-effect (always wrapped in `Task`, and naturally favoring immutability for all
that's kept in memory).
It handles fancy Maven features like
* [POM inheritance](http://books.sonatype.com/mvnref-book/reference/pom-relationships-sect-project-relationships.html#pom-relationships-sect-project-inheritance),
* [dependency management](http://books.sonatype.com/mvnex-book/reference/optimizing-sect-dependencies.html),
* [import scope](https://maven.apache.org/guides/introduction/introduction-to-dependency-mechanism.html#Importing_Dependencies),
* [properties](http://books.sonatype.com/mvnref-book/reference/resource-filtering-sect-properties.html),
* etc.
and is able to fetch metadata and artifacts from both Maven and Ivy repositories.
Compared to the default dependency resolution of SBT, it adds:
* downloading of artifacts in parallel,
* better offline mode - one can safely work with snapshot dependencies if these are in cache (SBT tends to try and fail if it cannot check for updates),
* non obfuscated cache (cache structure just mimics the URL it caches),
* no global lock (no "Waiting for ~/.ivy2/.sbt.ivy.lock to be available").
From the command-line, it also has:
* a [launcher](#launch), able to launch apps distributed via Maven / Ivy repositories,
* a [bootstrap](#bootstrap) generator, able to generate stripped launchers of these apps.
Lastly, it can be used programmatically via its [API](#api) and has a Scala JS [demo](#scala-js-demo).
## Table of content
1. [Quick start](#quick-start)
1. [SBT plugin](#sbt-plugin)
2. [Command-line](#command-line)
3. [API](#api)
2. [Why](#why)
3. [Usage](#usage)
1. [SBT plugin](#sbt-plugin-1)
2. [Command-line](#command-line-1)
1. [launch](#launch)
2. [fetch](#fetch)
3. [bootstrap](#bootstrap)
4. [native bootstrap](#native-bootstrap)
3. [API](#api-1)
4. [Scala JS demo](#scala-js-demo)
4. [Extra features](#extra-features)
1. [Printing trees](#printing-trees)
2. [Generating bootstrap launchers](#generating-bootstrap-launchers)
3. [Credentials](#credentials)
4. [Extra protocols](#extra-protocols)
5. [Limitations](#limitations)
6. [FAQ](#faq)
7. [Contributors](#contributors)
8. [Projects using coursier](#projects-using-coursier)
## Quick start
### SBT plugin
Enable the SBT plugin by adding
```scala
addSbtPlugin("io.get-coursier" % "sbt-coursier" % "1.0.3")
```
to `~/.sbt/0.13/plugins/build.sbt` (enables it globally), or to the `project/plugins.sbt` file
of an SBT project. Tested with SBT 0.13.8 / 0.13.9 / 0.13.11 / 0.13.12 / 0.13.13 / 0.13.15 / 0.13.16-M1 / 1.0.1-M5.
### Command-line
Download and run its launcher with
```
$ curl -L -o coursier https://git.io/vgvpD && chmod +x coursier && ./coursier --help
```
Alternatively on OS X, install it via homebrew,
```
$ brew install --HEAD coursier/formulas/coursier
```
Or on Archlinux, install it from [AUR](https://aur.archlinux.org/packages/coursier/),
```
$ pacaur -S coursier
```
Run an application distributed via artifacts with
```
$ ./coursier launch com.lihaoyi:ammonite_2.11.8:0.7.0
```
Download and list the classpath of one or several dependencies with
```
$ ./coursier fetch org.apache.spark:spark-sql_2.11:1.6.1 com.twitter:algebird-spark_2.11:0.12.0
/path/to/.coursier/cache/v1/https/repo1.maven.org/maven2/com/twitter/algebird-spark_2.11/0.12.0/algebird-spark_2.11-0.12.0.jar
/path/to/.coursier/cache/v1/https/repo1.maven.org/maven2/com/twitter/algebird-core_2.11/0.12.0/algebird-core_2.11-0.12.0.jar
/path/to/.coursier/cache/v1/https/repo1.maven.org/maven2/org/apache/hadoop/hadoop-annotations/2.2.0/hadoop-annotations-2.2.0.jar
/path/to/.coursier/cache/v1/https/repo1.maven.org/maven2/org/tukaani/xz/1.0/xz-1.0.jar
...
```
If you use ZSH, simple tab-completions are available by copying the
`scripts/_coursier` file into your completions directory, if you have one. If
you do not, then you can install the completions with,
```
mkdir -p ~/.zsh/completion
cp scripts/_coursier ~/.zsh/completion/
echo 'fpath=(~/.zsh/completion $fpath)' >> ~/.zshrc
echo 'autoload -Uz compinit ; compinit' >> ~/.zshrc
```
### API
Add to your `build.sbt`
```scala
libraryDependencies ++= Seq(
"io.get-coursier" %% "coursier" % "1.0.3",
"io.get-coursier" %% "coursier-cache" % "1.0.3"
)
```
Note that the examples below are validated against the current sources of coursier. You may want to read the [documentation of the latest release](https://github.com/coursier/coursier/blob/v1.1.0-M7/README.md#api) of coursier instead.
Add an import for coursier,
```scala
import coursier._
```
To resolve dependencies, first create a `Resolution` case class with your dependencies in it,
```scala
val start = Resolution(
Set(
Dependency(
Module("org.scalaz", "scalaz-core_2.11"), "7.2.3"
),
Dependency(
Module("org.typelevel", "cats-core_2.11"), "0.6.0"
)
)
)
```
Create a fetch function able to get things from a few repositories via a local cache,
```scala
import coursier.util.Task
val repositories = Seq(
Cache.ivy2Local,
MavenRepository("https://repo1.maven.org/maven2")
)
val fetch = Fetch.from(repositories, Cache.fetch[Task]())
```
Then run the resolution per-se,
```scala
import scala.concurrent.ExecutionContext.Implicits.global
val resolution = start.process.run(fetch).unsafeRun()
```
That will fetch and use metadata.
Check for errors in
```scala
val errors: Seq[((Module, String), Seq[String])] = resolution.errors
```
These would mean that the resolution wasn't able to get metadata about some dependencies.
Then fetch and get local copies of the artifacts themselves (the JARs) with
```scala
import java.io.File
import coursier.util.Gather
val localArtifacts: Seq[Either[FileError, File]] = Gather[Task].gather(
resolution.artifacts.map(Cache.file[Task](_).run)
).unsafeRun()
```
The default global cache used by coursier is `~/.coursier/cache/v1`. E.g. the artifact at
`https://repo1.maven.org/maven2/org/scala-lang/scala-library/2.11.8/scala-library-2.11.8.jar`
will land in `~/.coursier/cache/v1/https/repo1.maven.org/maven2/org/scala-lang/scala-library/2.11.8/scala-library-2.11.8.jar`.
From the SBT plugin, the default repositories are the ones provided by SBT (typically Central or JFrog, and `~/.ivy2/local`).
From the CLI tools, these are Central (`https://repo1.maven.org/maven2`) and `~/.ivy2/local`.
From the API, these are specified manually - you are encouraged to use those too.
## Why
The current state of dependency management in Scala suffers several flaws, that prevent applications to fully
profit from and rely on dependency management. Coursier aims at addressing these by making it easy to:
- resolve / download dependencies programmatically,
- launch applications distributed via Maven / Ivy artifacts from the command-line,
- work offline with artifacts,
- sandbox dependency management between projects.
As its [API](#api) illustrates, getting artifacts of dependencies is just a matter of specifying these along
with a few repositories. You can then straightforwardly get the corresponding artifacts, easily getting
precise feedback about what goes on during the resolution.
Launching an application distributed via Maven artifacts is just a command away with the [launcher](#command-line) of coursier.
In most cases, just specifying the corresponding main dependency is enough to launch the corresponding application.
If all your dependencies are in cache, chances are coursier will not even try to connect to remote repositories. This
also applies to snapshot dependencies of course - these are only updated on demand, not getting constantly in your way
like is currently the case by default with SBT.
When using coursier from the command-line or via its SBT plugin, sandboxing is just one command away. Just do
`export COURSIER_CACHE="$(pwd)/.coursier-cache"`, and the cache will become `.coursier-cache` from the current
directory instead of the default global `~/.coursier/cache/v1`. This allows for example to quickly inspect the content
of the cache used by a particular project, in case you have any doubt about what's in it.
## Usage
### SBT plugin
Enable the SBT plugin globally by adding
```scala
addSbtPlugin("io.get-coursier" % "sbt-coursier" % "1.0.3")
```
to `~/.sbt/0.13/plugins/build.sbt`
To enable it on a per-project basis, add it only to the `project/plugins.sbt` of an SBT project.
The SBT plugin has been tested only with SBT 0.13.8 / 0.13.9 / 0.13.11 / 0.13.12 / 0.13.13. It doesn't currently work with the SBT 1.0 milestones.
Once enabled, the `update`, `updateClassifiers`, and `updateSbtClassifiers` commands are taken care of by coursier. These
provide more output about what's going on than their default implementations do.
### Command-line
Download and run its launcher with
```
$ curl -L -o coursier https://git.io/vgvpD && chmod +x coursier && ./coursier --help
```
The launcher itself weighs only 30 kB and can be easily embedded as is in other projects.
It downloads the artifacts required to launch coursier on the first run.
Alternatively on OS X, install it via homebrew, that puts the `coursier` launcher directly in your PATH,
```
$ brew install --HEAD coursier/formulas/coursier
```
```
$ ./coursier --help
```
lists the available coursier commands. The most notable ones are `launch`, and `fetch`. Type
```
$ ./coursier command --help
```
to get a description of the various options the command `command` (replace with one
of the above command) accepts.
Both commands below can be given repositories with the `-r` or `--repository` option, like
```
-r central
-r https://oss.sonatype.org/content/repositories/snapshots
-r "ivy:https://repo.typesafe.com/typesafe/ivy-releases/[organisation]/[module]/(scala_[scalaVersion]/)(sbt_[sbtVersion]/)[revision]/[type]s/[artifact](-[classifier]).[ext]"
```
`central` and `ivy2local` correspond to Maven Central and `~/.ivy2/local`. These are used by default
unless the `--no-default` option is specified.
Repositories starting with `ivy:` are assumed to be Ivy repositories, specified with an Ivy pattern, like `ivy:https://repo.typesafe.com/typesafe/ivy-releases/[organisation]/[module]/(scala_[scalaVersion]/)(sbt_[sbtVersion]/)[revision]/[type]s/[artifact](-[classifier]).[ext]`.
Else, a Maven repository is assumed.
To set credentials for a repository, pass a user and password in its URL, like
```
-r https://user:pass@nexus.corp.com/content/repositories/releases
```
#### launch
The `launch` command fetches a set of Maven coordinates it is given, along
with their transitive dependencies, then launches the "main `main` class" from
it if it can find one (typically from the manifest of the first coordinates).
The main class to launch can also be manually specified with the `-M` option.
For example, it can launch:
* [Ammonite](https://github.com/lihaoyi/Ammonite) (enhanced Scala REPL),
```
$ ./coursier launch com.lihaoyi:ammonite_2.11.8:0.7.0
```
along with the REPLs of various JVM languages like
* Frege,
```
$ ./coursier launch -r central -r https://oss.sonatype.org/content/groups/public \
org.frege-lang:frege-repl-core:1.3 -M frege.repl.FregeRepl
```
* clojure,
```
$ ./coursier launch org.clojure:clojure:1.7.0 -M clojure.main
```
* jruby,
```
$ wget https://raw.githubusercontent.com/jruby/jruby/master/bin/jirb && \
./coursier launch org.jruby:jruby:9.0.4.0 -M org.jruby.Main -- -- jirb
```
* jython,
```
$ ./coursier launch org.python:jython-standalone:2.7.0 -M org.python.util.jython
```
* Groovy,
```
$ ./coursier launch org.codehaus.groovy:groovy-groovysh:2.4.5 -M org.codehaus.groovy.tools.shell.Main \
commons-cli:commons-cli:1.3.1
```
etc.
and various programs, like
* ProGuard and its utility Retrace,
```
$ ./coursier launch net.sf.proguard:proguard-base:5.2.1 -M proguard.ProGuard
$ ./coursier launch net.sf.proguard:proguard-retrace:5.2.1 -M proguard.retrace.ReTrace
```
* Wiremock,
```
./coursier launch com.github.tomakehurst:wiremock:1.57 -- \
--proxy-all="http://search.twitter.com" --record-mappings --verbose
```
* SQLLine,
```
$ ./coursier launch \
sqlline:sqlline:1.3.0 \
org.postgresql:postgresql:42.1.4 \
-M sqlline.SqlLine -- \
-d org.postgresql.Driver \
-n USERNAME \
-p PASSWORD \
-u jdbc:postgresql://HOST:PORT/DATABASE
```
If you wish to pass additional argument to the artifact being launched, separate them from the coursier's parameters list with the "--", just like in the Wiremock example above.
#### fetch
The `fetch` command simply fetches a set of dependencies, along with their
transitive dependencies, then prints the local paths of all their artifacts.
Example
```
$ ./coursier fetch org.apache.spark:spark-sql_2.11:1.6.1
/path/to/.coursier/cache/v1/https/repo1.maven.org/maven2/org/apache/hadoop/hadoop-annotations/2.2.0/hadoop-annotations-2.2.0.jar
/path/to/.coursier/cache/v1/https/repo1.maven.org/maven2/org/tukaani/xz/1.0/xz-1.0.jar
/path/to/.coursier/cache/v1/https/repo1.maven.org/maven2/org/tachyonproject/tachyon-underfs-s3/0.8.2/tachyon-underfs-s3-0.8.2.jar
/path/to/.coursier/cache/v1/https/repo1.maven.org/maven2/org/glassfish/grizzly/grizzly-http/2.1.2/grizzly-http-2.1.2.jar
...
```
By adding the `-p` option, these paths can be handed over directly to
`java -cp`, like
```
$ java -cp "$(./coursier fetch -p com.lihaoyi:ammonite_2.11.8:0.7.0)" ammonite.Main
Loading...
Welcome to the Ammonite Repl 0.7.0
(Scala 2.11.8 Java 1.8.0_60)
@
```
Fetch with module level attributes, as opposed to e.g. `--classifier` is applied globally.
```
$ ./coursier fetch org.apache.avro:avro:1.7.4,classifier=tests --artifact-type test-jar,jar
```
Fetch and generate a machine readable json report. [Json Report Documentation](/doc/cli.md)
```
$ ./coursier fetch org.apache.avro:avro:1.7.4 --json-output-file report.json
```
#### bootstrap
The `bootstrap` command generates tiny bootstrap launchers, able to pull their dependencies from
repositories on first launch. For example, the launcher of coursier is [generated](https://github.com/coursier/coursier/blob/master/scripts/generate-launcher.sh) with a command like
```
$ ./coursier bootstrap \
io.get-coursier:coursier-cli_2.11:1.0.3 \
-f -o coursier
```
See `./coursier bootstrap --help` for a list of the available options.
#### native bootstrap
The `bootstrap` command can also generate [scala-native](http://scala-native.org) executables. This requires the corresponding scala-native app to publish its JARs, on Maven Central for example, and your environment to be [set up for scala-native](http://www.scala-native.org/en/latest/user/setup.html). One can then generate executables with a command like
```
$ ./coursier bootstrap \
--native \
io.get-coursier:echo_native0.3_2.11:1.0.2 \
-o echo-native
[info] Linking (2354 ms)
[info] Discovered 1291 classes and 9538 methods
$ ./echo-native hey
hey
```
### API
Add to your `build.sbt`
```scala
libraryDependencies ++= Seq(
"io.get-coursier" %% "coursier" % "1.0.3",
"io.get-coursier" %% "coursier-cache" % "1.0.3"
)
```
Note that the examples below are validated against the current sources of coursier. You may want to read the [documentation of the latest release](https://github.com/coursier/coursier/blob/v1.1.0-M7/README.md#api-1) of coursier instead.
The first module, `"io.get-coursier" %% "coursier" % "1.0.3"`, mainly depends on
`scalaz-core` (and only it, *not* `scalaz-concurrent` for example). It contains among others,
definitions,
mainly in [`Definitions.scala`](https://github.com/coursier/coursier/blob/master/core/shared/src/main/scala/coursier/core/Definitions.scala),
[`Resolution`](https://github.com/coursier/coursier/blob/master/core/shared/src/main/scala/coursier/core/Resolution.scala), representing a particular state of the resolution,
and [`ResolutionProcess`](https://github.com/coursier/coursier/blob/master/core/shared/src/main/scala/coursier/core/ResolutionProcess.scala),
that expects to be given metadata, wrapped in any `Monad`, then feeds these to `Resolution`, and at the end gives
you the final `Resolution`, wrapped in the same `Monad` it was given input. This final `Resolution` has all the dependencies,
including the transitive ones.
The second module, `"io.get-coursier" %% "coursier-cache" % "1.0.3"`, is precisely in charge of fetching
these input metadata. It uses `scalaz.concurrent.Task` as a `Monad` to wrap them. It also fetches artifacts (JARs, etc.).
It caches all of these (metadata and artifacts) on disk, and validates checksums too.
In the code below, we'll assume some imports are around,
```scala
import coursier._
```
Resolving dependencies involves create an initial resolution state, with all the initial dependencies in it, like
```scala
val start = Resolution(
Set(
Dependency(
Module("org.typelevel", "cats-core_2.11"), "0.6.0"
),
Dependency(
Module("org.scalaz", "scalaz-core_2.11"), "7.2.3"
)
)
)
```
It goes without saying that a `Resolution` is immutable, as are all the classes defined in the core module.
The resolution process will go on by giving successive `Resolution`s, until the final one.
`start` above is only the initial state - it is far from over, as the `isDone` method on it tells,
```scala
scala> start.isDone
res0: Boolean = false
```
In order for the resolution to go on, we'll need things from a few repositories,
```scala
scala> val repositories = Seq(
| Cache.ivy2Local,
| MavenRepository("https://repo1.maven.org/maven2")
| )
repositories: Seq[coursier.core.Repository] = List(IvyRepository(Pattern(List(Const(file://), Var(user.home), Const(/local/), Var(organisation), Const(/), Var(module), Const(/), Opt(WrappedArray(Const(scala_), Var(scalaVersion), Const(/))), Opt(WrappedArray(Const(sbt_), Var(sbtVersion), Const(/))), Var(revision), Const(/), Var(type), Const(s/), Var(artifact), Opt(WrappedArray(Const(-), Var(classifier))), Const(.), Var(ext))),None,None,true,true,true,true,None), MavenRepository(https://repo1.maven.org/maven2,None,true,None))
```
The first one, `Cache.ivy2Local`, is defined in `coursier.Cache`, itself from the `coursier-cache` module that
we added above. As we can see, it is an `IvyRepository`, picking things under `~/.ivy2/local`. An `IvyRepository`
is related to the [Ivy](http://ant.apache.org/ivy/) build tool. This kind of repository involves a so-called [pattern](http://ant.apache.org/ivy/history/2.4.0/concept.html#patterns), with
various properties. These are not of very common use in Scala, although SBT uses them a bit.
The second repository is a `MavenRepository`. These are simpler than the Ivy repositories. They're the ones
we're the most used to in Scala. Common ones like [Central](https://repo1.maven.org/maven2) like here, or the repositories
from [Sonatype](https://oss.sonatype.org/content/repositories/), are Maven repositories. These originate
from the [Maven](https://maven.apache.org/) build tool. Unlike the Ivy repositories which involve customisable patterns to point
to the underlying metadata and artifacts, the paths of these for Maven repositories all look alike,
like for any particular version of the standard library, under paths like
[this one](http://repo1.maven.org/maven2/org/scala-lang/scala-library/2.11.7/).
Both `IvyRepository` and `MavenRepository` are case classes, so that it's straightforward to specify one's own
repositories.
To set credentials for a `MavenRepository` or `IvyRepository`, set their `authentication` field, like
```scala
scala> import coursier.core.Authentication
import coursier.core.Authentication
scala> MavenRepository(
| "https://nexus.corp.com/content/repositories/releases",
| authentication = Some(Authentication("user", "pass"))
| )
res2: coursier.maven.MavenRepository = MavenRepository(https://nexus.corp.com/content/repositories/releases,None,true,Some(Authentication(user, *******)))
```
Now that we have repositories, we're going to mix these with things from the `coursier-cache` module,
for resolution to happen via the cache. We'll create a function
of type `Seq[(Module, String)] => F[Seq[((Module, String), Either[Seq[String], (Artifact.Source, Project)])]]`.
Given a sequence of dependencies, designated by their `Module` (organisation and name in most cases)
and version (just a `String`), it gives either errors (`Seq[String]`) or metadata (`(Artifact.Source, Project)`),
wrapping the whole in a monad `F`.
```scala
val fetch = Fetch.from(repositories, Cache.fetch[Task]())
```
The monad used by `Fetch.from` is `scalaz.concurrent.Task`, but the resolution process is not tied to a particular
monad - any stack-safe monad would do.
With this `fetch` method, we can now go on with the resolution. Calling `process` on `start` above gives a
[`ResolutionProcess`](https://github.com/coursier/coursier/blob/master/core/shared/src/main/scala/coursier/core/ResolutionProcess.scala),
that drives the resolution. It is loosely inspired by the `Process` of scalaz-stream.
It is an immutable structure, that represents the various states the resolution process can be in.
Its method `current` gives the current `Resolution`. Calling `isDone` on the latter says whether the
resolution is done or not.
The `next` method, that expects a `fetch` method like the one above, gives
the "next" state of the resolution process, wrapped in the monad of the `fetch` method. It allows to do
one resolution step.
Lastly, the `run` method runs the whole resolution until its end. It expects a `fetch` method too,
and will make at most `maxIterations` steps (50 by default), and return the "final" resolution state,
wrapped in the monad of `fetch`. One should check that the `Resolution` it returns is done (`isDone`) -
the contrary means that `maxIterations` were reached, likely signaling an issue, unless the underlying
resolution is particularly complex, in which case `maxIterations` could be increased.
Let's run the whole resolution,
```scala
import scala.concurrent.ExecutionContext.Implicits.global
val resolution = start.process.run(fetch).unsafeRun()
```
To get additional feedback during the resolution, we can give the `Cache.default` method above
a [`Cache.Logger`](https://github.com/coursier/coursier/blob/cf269c6895e19f2d590f08811406724304332950/cache/src/main/scala/coursier/Cache.scala#L484-L490).
By default, downloads happen in a global fixed thread pool (with 6 threads, allowing for 6 parallel downloads), but
you can supply your own thread pool to `Cache.default`.
Now that the resolution is done, we can check for errors in
```scala
val errors: Seq[((Module, String), Seq[String])] = resolution.metadataErrors
```
These would mean that the resolution wasn't able to get metadata about some dependencies.
We can also check for version conflicts, in
```scala
val conflicts: Set[Dependency] = resolution.conflicts
```
which are dependencies whose versions could not be unified.
Then, if all went well, we can fetch and get local copies of the artifacts themselves (the JARs) with
```scala
import java.io.File
import coursier.util.Gather
val localArtifacts: Seq[Either[FileError, File]] = Gather[Task].gather(
resolution.artifacts.map(Cache.file[Task](_).run)
).unsafeRun()
```
We're using the `Cache.file` method, that can also be given a `Logger` (for more feedback) and a custom thread pool.
### Scala JS demo
*coursier* is also compiled to Scala JS, and can be tested in the browser via its
[demo](http://coursier.github.io/coursier/#demo).
## Extra features
### TTL
Changing things in cache are given a time-to-live (TTL) of **24 hours** by default. Changing things are artifacts for versions ending with `-SNAPSHOT`, Maven metadata files listing available versions, etc.
The most straightforward way of changing that consists in setting `COURSIER_TTL` in the environment. It's parsed with `scala.concurrent.duration.Duration`, so that things like `24 hours`, `5 min`, `10s`, or `0s`, are fine, and it accepts infinity (`Inf`) as a duration.
### Printing trees
E.g. to print the dependency tree of `io.circe:circe-core:0.4.1`,
```
$ coursier resolve -t io.circe:circe-core_2.11:0.4.1
Result:
└─ io.circe:circe-core_2.11:0.4.1
├─ io.circe:circe-numbers_2.11:0.4.1
| └─ org.scala-lang:scala-library:2.11.8
├─ org.scala-lang:scala-library:2.11.8
└─ org.typelevel:cats-core_2.11:0.4.1
├─ com.github.mpilquist:simulacrum_2.11:0.7.0
| ├─ org.scala-lang:scala-library:2.11.7 -> 2.11.8
| └─ org.typelevel:macro-compat_2.11:1.1.0
| └─ org.scala-lang:scala-library:2.11.7 -> 2.11.8
...
```
From SBT, with sbt-coursier enabled, the command `coursierDependencyTree` prints the dependency tree of the various sub-projects,
```
> coursierDependencyTree
io.get-coursier:coursier_2.11:1.0.1-SNAPSHOT
├─ com.lihaoyi:fastparse_2.11:0.3.7
| ├─ com.lihaoyi:fastparse-utils_2.11:0.3.7
| | ├─ com.lihaoyi:sourcecode_2.11:0.1.1
| | | └─ org.scala-lang:scala-library:2.11.7 -> 2.11.8
| | └─ org.scala-lang:scala-library:2.11.7 -> 2.11.8
| ├─ com.lihaoyi:sourcecode_2.11:0.1.1
| | └─ org.scala-lang:scala-library:2.11.7 -> 2.11.8
| └─ org.scala-lang:scala-library:2.11.7 -> 2.11.8
├─ org.jsoup:jsoup:1.9.2
...
```
Note that this command can be scoped to sub-projects, like `proj/coursierDependencyTree`.
The printed trees highlight version bumps, that only change the patch number, in yellow. The `2.11.7 -> 2.11.8` above mean that the parent dependency wanted version `2.11.7`, but version `2.11.8` landed in the classpath, pulled in this version by other dependencies.
They highlight in red version bumps that may not be binary compatible, changing major or minor version number.
### Generating bootstrap launchers
The `coursier bootstrap` command generates tiny bootstrap launchers (~30 kB). These are able to download their dependencies upon first launch, then launch the corresponding application. E.g. to generate a launcher for scalafmt,
```
$ coursier bootstrap com.geirsson:scalafmt-cli_2.11:0.2.3 -o scalafmt
```
This generates a `scalafmt` file, which is a tiny JAR, corresponding to the `bootstrap` sub-project of coursier. It contains resource files, with the URLs of the various dependencies of scalafmt. On first launch, these are downloaded under `~/.coursier/bootstrap/com.geirsson/scalafmt-cli_2.11` (following the organization and name of the first dependency - note that this directory can be changed with the `-D` option). Nothing needs to be downloaded once all the dependencies are there, and the application is then launched straightaway.
### Credentials
To use artifacts from repositories requiring credentials, pass the user and password via the repository URL, like
```
$ coursier fetch -r https://user:pass@company.com/repo com.company:lib:0.1.0
```
From SBT, add the setting `coursierUseSbtCredentials := true` for sbt-coursier to use the credentials set via the `credentials` key. This manual step was added in order for the `credentials` setting not to be checked if not needed, as it seems to acquire some (good ol') global lock when checked, which sbt-coursier aims at avoiding.
### Extra protocols
By default, coursier and sbt-coursier handle the `http://`, `https://`, and `file://` protocols. It should also be fine
by protocols supported by `java.net.URL` (not thoroughly tested). Support for other protocols can be added via plugins. [coursier-s3](https://github.com/rtfpessoa/coursier-s3), a plugin for S3, is under development, and illustrates how to write such plugins.
## Limitations
#### Ivy support is poorly tested
The minimum was made for SBT plugins to be resolved fine (including dependencies
between plugins, the possibility that some of them come from Maven repositories,
with a peculiarities, classifiers - sources, javadoc - should be fine too).
So it is likely that projects relying more heavily
on Ivy features could run into the limitations of the current implementation.
Any issue report related to that, illustrated with public Ivy repositories
if possible, would be greatly appreciated.
#### *Important*: SBT plugin might mess with published artifacts
SBT seems to require the `update` command to generate a few metadata files
later used by `publish`. If ever there's an issue with these, this might
add discrepancies in the artifacts published with `publish` or `publishLocal`.
Should you want to use the coursier SBT plugin while publishing artifacts at the
same time, I'd recommend an extreme caution at first, like manually inspecting
the metadata files and compare with previous ones, to ensure everything's fine.
coursier publishes its artifacts with its own plugin enabled since version
`1.0.0-M2` though, without any apparent problem.
#### No wait on locked file
If ever resolution or artifact downloading stumbles upon a locked metadata or
artifact in the cache, it will just fail, instead of waiting for the lock to be freed.
#### Also
Plus the inherent amount of bugs arising in a young project :-)
## FAQ
#### Even though the coursier SBT plugin is enabled and some `coursier*` keys can be found from the SBT prompt, dependency resolution seems still to be handled by SBT itself. Why?
Check that the default SBT settings (`sbt.Defaults.defaultSettings`) are not manually added to your project.
These define commands that the coursier SBT plugin overrides. Adding them again erases these overrides,
effectively disabling coursier.
#### With spark >= 1.5, I get some `NoVerifyError` exceptions related to jboss/netty. Why?
This error originates from the `org.jboss.netty:netty:3.2.2.Final` dependency to be put in the classpath.
Exclude it from your spark dependencies with the exclusion `org.jboss.netty:netty`.
Coursier tries to follow the Maven documentation to build the full dependency set, in particular
some [points about dependency exclusion](https://maven.apache.org/guides/introduction/introduction-to-optional-and-excludes-dependencies.html#Dependency_Exclusions).
Inspecting the `org.apache.spark:spark-core_2.11:1.5.2` dependency graph shows that spark-core
depends on `org.jboss.netty:netty:3.2.2.Final` via the following path: `org.apache.spark:spark-core_2.11:1.5.2` ->
`org.tachyonproject:tachyon-client:0.7.1` -> `org.apache.curator:curator-framework:2.4.0` ->
`org.apache.zookeeper:zookeeper:3.4.5` -> `org.jboss.netty:netty:3.2.2.Final`. Even though
spark-core tries to exclude `org.jboss.netty:netty` to land in its classpath via some other dependencies
(e.g. it excludes it via its dependencies towards `org.apache.hadoop:hadoop-client` and `org.apache.curator:curator-recipes`),
it does not via the former path. So it depends on it according to the
[Maven documentation](https://maven.apache.org/guides/introduction/introduction-to-optional-and-excludes-dependencies.html#Dependency_Exclusions).
This is likely unintended, as it leads to exceptions like
```
java.lang.VerifyError: (class: org/jboss/netty/channel/socket/nio/NioWorkerPool, method: createWorker signature: (Ljava/util/concurrent/Executor;)Lorg/jboss/netty/channel/socket/nio/AbstractNioWorker;) Wrong return type in function
```
Excluding `org.jboss.netty:netty` from the spark dependencies fixes it.
#### On first launch, the coursier launcher downloads a 1.5+ MB JAR. Is it possible to have a standalone launcher, that would not need to download things on first launch?
Run `scripts/generate-launcher.sh -s` from the root of the coursier sources. That will generate a new (bigger) `coursier` launcher, that needs not to download anything on first launch.
#### How can the launcher be run on Windows, or manually with the `java` program?
Download it from the same link as the command above. Then run from a console, in the directory where the `coursier` launcher is,
```
> java -noverify -jar coursier
```
The `-noverify` option seems to be required after the proguarding step of the main JAR of coursier.
#### How to enable sandboxing?
Set the `COURSIER_CACHE` prior to running `coursier` or SBT, like
```
$ COURSIER_CACHE=$(pwd)/.coursier-cache coursier
```
or
```
$ COURSIER_CACHE=$(pwd)/.coursier-cache sbt
```
## Contributors
See the [up-to-date list of contributors on GitHub](https://github.com/coursier/coursier/graphs/contributors).
Don't hesitate to pick an issue to contribute, and / or ask for help for how to proceed
on the [Gitter channel](https://gitter.im/coursier/coursier).
## Projects using coursier
- [Lars Hupel](https://github.com/larsrh/)'s [libisabelle](https://github.com/larsrh/libisabelle) fetches
some of its requirements via coursier,
- [jupyter-scala](https://github.com/alexarchambault/jupyter-scala) is launched
and allows to add dependencies in its sessions with coursier (initial motivation
for writing coursier),
- [Apache Toree](https://github.com/apache/incubator-toree) - formerly known as [spark-kernel](https://github.com/ibm-et/spark-kernel), is now using coursier to
add dependencies on-the-fly ([#4](https://github.com/apache/incubator-toree/pull/4)),
- [Quill](https://github.com/getquill/quill) is using coursier for faster dependency resolution ([#591](https://github.com/getquill/quill/pull/591)),
- [vscode-scala](https://github.com/dragos/dragos-vscode-scala) uses coursier to fetch and launch its ensime-based server,
- [Ammonite](https://github.com/lihaoyi/Ammonite) uses coursier to fetch user-added dependencies since version `0.9.0`,
- [ensime-sbt](https://github.com/ensime/ensime-sbt) uses coursier to get the ensime server classpath,
- [scalafmt](https://github.com/scalameta/scalafmt) relies on coursier for its CLI installation,
- [scalafiddle](https://scalafiddle.io) uses coursier to fetch user-added dependencies,
- Your project here :-)
## Acknowledgements
<a href="https://www.ej-technologies.com/products/jprofiler/overview.html">
<img src="https://www.ej-technologies.com/images/product_banners/jprofiler_medium.png">
</a>
Thanks to [JProfiler](https://www.ej-technologies.com/products/jprofiler/overview.html) for having kindly granted an [Open Source license](https://www.ej-technologies.com/buy/jprofiler/openSource) to help for the development of coursier.
Released under the Apache license, v2.
See the [coursier documentation](https://github.com/coursier/coursier)
for more details.

View File

@ -14,7 +14,6 @@ install:
- cmd: SET JAVA_HOME=C:\Program Files\Java\jdk1.8.0
- cmd: SET PATH=C:\sbt\sbt\bin;%JAVA_HOME%\bin;%PATH%
- cmd: SET SBT_OPTS=-Xmx4g -Xss2m
- git submodule update --init --recursive
environment:
TEST_REPOSITORY_HOST: localhost
TEST_REPOSITORY_PORT: 8080
@ -23,13 +22,7 @@ environment:
TEST_REPOSITORY: http://localhost:8080
build_script:
- ps: Start-Job -filepath .\scripts\start-it-auth-server.ps1 -ArgumentList $pwd, $env:TEST_REPOSITORY_HOST, $env:TEST_REPOSITORY_PORT, $env:TEST_REPOSITORY_USER, $env:TEST_REPOSITORY_PASSWORD
- sbt scala212 coreJVM/publishLocal cacheJVM/publishLocal extra/publishLocal scalazJVM/publishLocal cli/publishLocal
- sbt scala211 compile coreJVM/publishLocal
test_script:
- sbt scala212 testsJVM/test
- sbt scala211 testsJVM/test
- sbt scala212 testsJVM/it:test
- sbt scala211 testsJVM/it:test
- sbt scala212 sbt-coursier/scripted sbt-shading/scripted
branches:
only:

View File

@ -1,443 +0,0 @@
package coursier;
import java.io.*;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.net.*;
import java.nio.channels.FileLock;
import java.nio.channels.OverlappingFileLockException;
import java.nio.file.Files;
import java.nio.file.StandardCopyOption;
import java.security.CodeSource;
import java.security.ProtectionDomain;
import java.util.*;
import java.util.concurrent.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class Bootstrap {
static void exit(String message) {
System.err.println(message);
System.exit(255);
}
static byte[] readFullySync(InputStream is) throws IOException {
ByteArrayOutputStream buffer = new ByteArrayOutputStream();
byte[] data = new byte[16384];
int nRead = is.read(data, 0, data.length);
while (nRead != -1) {
buffer.write(data, 0, nRead);
nRead = is.read(data, 0, data.length);
}
buffer.flush();
return buffer.toByteArray();
}
final static String defaultURLResource = "bootstrap-jar-urls";
final static String defaultJarResource = "bootstrap-jar-resources";
final static String isolationIDsResource = "bootstrap-isolation-ids";
static String[] readStringSequence(String resource) throws IOException {
ClassLoader loader = Thread.currentThread().getContextClassLoader();
InputStream is = loader.getResourceAsStream(resource);
if (is == null)
return new String[] {};
byte[] rawContent = readFullySync(is);
String content = new String(rawContent, "UTF-8");
if (content.length() == 0)
return new String[] {};
return content.split("\n");
}
/**
*
* @param cacheDir can be null if nothing should be downloaded!
* @param isolationIDs
* @param bootstrapProtocol
* @param loader
* @return
* @throws IOException
*/
static Map<String, URL[]> readIsolationContexts(File cacheDir, String[] isolationIDs, String bootstrapProtocol, ClassLoader loader) throws IOException {
final Map<String, URL[]> perContextURLs = new LinkedHashMap<String, URL[]>();
for (String isolationID: isolationIDs) {
String[] strUrls = readStringSequence("bootstrap-isolation-" + isolationID + "-jar-urls");
String[] resources = readStringSequence("bootstrap-isolation-" + isolationID + "-jar-resources");
List<URL> urls = getURLs(strUrls, resources, bootstrapProtocol, loader);
List<URL> localURLs = getLocalURLs(urls, cacheDir, bootstrapProtocol);
perContextURLs.put(isolationID, localURLs.toArray(new URL[localURLs.size()]));
}
return perContextURLs;
}
final static int concurrentDownloadCount = 6;
// http://stackoverflow.com/questions/872272/how-to-reference-another-property-in-java-util-properties/27724276#27724276
public static Map<String,String> loadPropertiesMap(InputStream s) throws IOException {
final Map<String, String> ordered = new LinkedHashMap<String, String>();
//Hack to use properties class to parse but our map for preserved order
Properties bp = new Properties() {
@Override
public synchronized Object put(Object key, Object value) {
ordered.put((String)key, (String)value);
return super.put(key, value);
}
};
bp.load(s);
final Pattern propertyRegex = Pattern.compile(Pattern.quote("${") + "[^" + Pattern.quote("{[()]}") + "]*" + Pattern.quote("}"));
final Map<String, String> resolved = new LinkedHashMap<String, String>(ordered.size());
for (String k : ordered.keySet()) {
String value = ordered.get(k);
Matcher matcher = propertyRegex.matcher(value);
// cycles would loop indefinitely here :-|
while (matcher.find()) {
int start = matcher.start(0);
int end = matcher.end(0);
String subKey = value.substring(start + 2, end - 1);
String subValue = resolved.get(subKey);
if (subValue == null)
subValue = System.getProperty(subKey);
value = value.substring(0, start) + subValue + value.substring(end);
}
resolved.put(k, value);
}
return resolved;
}
static String mainJarPath() {
ProtectionDomain protectionDomain = Bootstrap.class.getProtectionDomain();
if (protectionDomain != null) {
CodeSource source = protectionDomain.getCodeSource();
if (source != null) {
URL location = source.getLocation();
if (location != null && location.getProtocol().equals("file")) {
return location.getPath();
}
}
}
return "";
}
// from http://www.java2s.com/Code/Java/File-Input-Output/Readfiletobytearrayandsavebytearraytofile.htm
static void writeBytesToFile(File file, byte[] bytes) throws IOException {
BufferedOutputStream bos = null;
try {
FileOutputStream fos = new FileOutputStream(file);
bos = new BufferedOutputStream(fos);
bos.write(bytes);
} finally {
if (bos != null) {
try {
// flush and close the BufferedOutputStream
bos.flush();
bos.close();
} catch (Exception e) {}
}
}
}
/**
*
* @param urls
* @param cacheDir
* @param bootstrapProtocol
* @return
* @throws MalformedURLException
*/
static List<URL> getLocalURLs(List<URL> urls, final File cacheDir, String bootstrapProtocol) throws MalformedURLException {
ThreadFactory threadFactory = new ThreadFactory() {
// from scalaz Strategy.DefaultDaemonThreadFactory
ThreadFactory defaultThreadFactory = Executors.defaultThreadFactory();
public Thread newThread(Runnable r) {
Thread t = defaultThreadFactory.newThread(r);
t.setDaemon(true);
return t;
}
};
ExecutorService pool = Executors.newFixedThreadPool(concurrentDownloadCount, threadFactory);
CompletionService<URL> completionService =
new ExecutorCompletionService<URL>(pool);
List<URL> localURLs = new ArrayList<URL>();
List<URL> missingURLs = new ArrayList<URL>();
for (URL url : urls) {
String protocol = url.getProtocol();
if (protocol.equals("file") || protocol.equals(bootstrapProtocol)) {
localURLs.add(url);
} else {
// fourth argument is false because we don't want to store local files when bootstrapping
File dest = CachePath.localFile(url.toString(), cacheDir, null, false);
if (dest.exists()) {
localURLs.add(dest.toURI().toURL());
} else {
missingURLs.add(url);
}
}
}
for (final URL url : missingURLs) {
completionService.submit(new Callable<URL>() {
@Override
public URL call() throws Exception {
// fourth argument is false because we don't want to store local files when bootstrapping
final File dest = CachePath.localFile(url.toString(), cacheDir, null, false);
if (!dest.exists()) {
FileOutputStream out = null;
FileLock lock = null;
final File tmpDest = CachePath.temporaryFile(dest);
final File lockFile = CachePath.lockFile(tmpDest);
try {
out = CachePath.withStructureLock(cacheDir, new Callable<FileOutputStream>() {
@Override
public FileOutputStream call() throws FileNotFoundException {
tmpDest.getParentFile().mkdirs();
lockFile.getParentFile().mkdirs();
dest.getParentFile().mkdirs();
return new FileOutputStream(lockFile);
}
});
try {
lock = out.getChannel().tryLock();
if (lock == null)
throw new RuntimeException("Ongoing concurrent download for " + url);
else
try {
URLConnection conn = url.openConnection();
long lastModified = conn.getLastModified();
InputStream s = conn.getInputStream();
byte[] b = readFullySync(s);
tmpDest.deleteOnExit();
writeBytesToFile(tmpDest, b);
tmpDest.setLastModified(lastModified);
Files.move(tmpDest.toPath(), dest.toPath(), StandardCopyOption.ATOMIC_MOVE);
}
finally {
lock.release();
lock = null;
out.close();
out = null;
lockFile.delete();
}
}
catch (OverlappingFileLockException e) {
throw new RuntimeException("Ongoing concurrent download for " + url);
}
finally {
if (lock != null) lock.release();
}
} catch (Exception e) {
System.err.println("Error while downloading " + url + ": " + e.getMessage() + ", ignoring it");
throw e;
}
finally {
if (out != null) out.close();
}
}
return dest.toURI().toURL();
}
});
}
String clearLine = "\033[2K";
try {
while (localURLs.size() < urls.size()) {
Future<URL> future = completionService.take();
try {
URL url = future.get();
localURLs.add(url);
int nowMissing = urls.size() - localURLs.size();
String up = "\033[1A";
System.err.print(clearLine + "Downloaded " + (missingURLs.size() - nowMissing) + " missing file(s) / " + missingURLs.size() + "\n" + up);
} catch (ExecutionException ex) {
// Error message already printed from the Callable above
System.exit(255);
}
}
} catch (InterruptedException ex) {
exit("Interrupted");
}
System.err.print(clearLine);
return localURLs;
}
static void setMainProperties(String mainJarPath, String[] args) {
System.setProperty("coursier.mainJar", mainJarPath);
for (int i = 0; i < args.length; i++) {
System.setProperty("coursier.main.arg-" + i, args[i]);
}
}
static void setExtraProperties(String resource) throws IOException {
ClassLoader loader = Thread.currentThread().getContextClassLoader();
Map<String,String> properties = loadPropertiesMap(loader.getResourceAsStream(resource));
for (Map.Entry<String, String> ent : properties.entrySet()) {
System.setProperty(ent.getKey(), ent.getValue());
}
}
static List<URL> getURLs(String[] rawURLs, String[] resources, String bootstrapProtocol, ClassLoader loader) throws MalformedURLException {
List<String> errors = new ArrayList<String>();
List<URL> urls = new ArrayList<URL>();
for (String urlStr : rawURLs) {
try {
URL url = URI.create(urlStr).toURL();
urls.add(url);
} catch (Exception ex) {
String message = urlStr + ": " + ex.getMessage();
errors.add(message);
}
}
for (String resource : resources) {
URL url = loader.getResource(resource);
if (url == null) {
String message = "Resource " + resource + " not found";
errors.add(message);
} else {
URL url0 = new URL(bootstrapProtocol, null, resource);
urls.add(url0);
}
}
if (!errors.isEmpty()) {
StringBuilder builder = new StringBuilder("Error:");
for (String error: errors) {
builder.append("\n ");
builder.append(error);
}
exit(builder.toString());
}
return urls;
}
// JARs from JARs can't be used directly, see:
// http://stackoverflow.com/questions/183292/classpath-including-jar-within-a-jar/2326775#2326775
// Loading them via a custom protocol, inspired by:
// http://stackoverflow.com/questions/26363573/registering-and-using-a-custom-java-net-url-protocol/26409796#26409796
static void registerBootstrapUnder(final String bootstrapProtocol, final ClassLoader loader) {
URL.setURLStreamHandlerFactory(new URLStreamHandlerFactory() {
public URLStreamHandler createURLStreamHandler(String protocol) {
return bootstrapProtocol.equals(protocol) ? new URLStreamHandler() {
protected URLConnection openConnection(URL url) throws IOException {
String path = url.getPath();
URL resURL = loader.getResource(path);
if (resURL == null)
throw new FileNotFoundException("Resource " + path);
return resURL.openConnection();
}
} : null;
}
});
}
public static void main(String[] args) throws Throwable {
setMainProperties(mainJarPath(), args);
setExtraProperties("bootstrap.properties");
String mainClass0 = System.getProperty("bootstrap.mainClass");
File cacheDir = CachePath.defaultCacheDirectory();
Random rng = new Random();
String protocol = "bootstrap" + rng.nextLong();
ClassLoader contextLoader = Thread.currentThread().getContextClassLoader();
registerBootstrapUnder(protocol, contextLoader);
String[] strUrls = readStringSequence(defaultURLResource);
String[] resources = readStringSequence(defaultJarResource);
List<URL> urls = getURLs(strUrls, resources, protocol, contextLoader);
List<URL> localURLs = getLocalURLs(urls, cacheDir, protocol);
String[] isolationIDs = readStringSequence(isolationIDsResource);
Map<String, URL[]> perIsolationContextURLs = readIsolationContexts(cacheDir, isolationIDs, protocol, contextLoader);
Thread thread = Thread.currentThread();
ClassLoader parentClassLoader = thread.getContextClassLoader();
for (String isolationID: isolationIDs) {
URL[] contextURLs = perIsolationContextURLs.get(isolationID);
parentClassLoader = new IsolatedClassLoader(contextURLs, parentClassLoader, new String[]{ isolationID });
}
ClassLoader classLoader = new URLClassLoader(localURLs.toArray(new URL[localURLs.size()]), parentClassLoader);
Class<?> mainClass = null;
Method mainMethod = null;
try {
mainClass = classLoader.loadClass(mainClass0);
} catch (ClassNotFoundException ex) {
exit("Error: class " + mainClass0 + " not found");
}
try {
Class params[] = { String[].class };
mainMethod = mainClass.getMethod("main", params);
}
catch (NoSuchMethodException ex) {
exit("Error: main method not found in class " + mainClass0);
}
List<String> userArgs0 = new ArrayList<String>();
for (int i = 0; i < args.length; i++)
userArgs0.add(args[i]);
thread.setContextClassLoader(classLoader);
try {
Object mainArgs[] = { userArgs0.toArray(new String[userArgs0.size()]) };
mainMethod.invoke(null, mainArgs);
}
catch (IllegalAccessException ex) {
exit(ex.getMessage());
}
catch (InvocationTargetException ex) {
throw ex.getCause();
}
finally {
thread.setContextClassLoader(parentClassLoader);
}
}
}

View File

@ -1,29 +0,0 @@
package coursier;
import java.net.URL;
import java.net.URLClassLoader;
public class IsolatedClassLoader extends URLClassLoader {
private String[] isolationTargets;
public IsolatedClassLoader(
URL[] urls,
ClassLoader parent,
String[] isolationTargets
) {
super(urls, parent);
this.isolationTargets = isolationTargets;
}
/**
* Applications wanting to access an isolated `ClassLoader` should inspect the hierarchy of
* loaders, and look into each of them for this method, by reflection. Then they should
* call it (still by reflection), and look for an agreed in advance target in it. If it is found,
* then the corresponding `ClassLoader` is the one with isolated dependencies.
*/
public String[] getIsolationTargets() {
return isolationTargets;
}
}

444
build.sbt
View File

@ -3,246 +3,15 @@ import Aliases._
import Settings._
import Publish._
import sbtcrossproject.CrossPlugin.autoImport.crossProject
lazy val core = crossProject(JSPlatform, JVMPlatform)
.jvmConfigure(_.enablePlugins(ShadingPlugin))
.jvmSettings(
shading,
libs ++= Seq(
Deps.fastParse % "shaded",
Deps.jsoup % "shaded",
Deps.scalaXml
),
shadeNamespaces ++= Set(
"org.jsoup",
"fastparse",
"sourcecode"
),
generatePropertyFile
)
.jsSettings(
libs ++= Seq(
CrossDeps.fastParse.value,
CrossDeps.scalaJsDom.value
)
)
.settings(
shared,
name := "coursier",
Mima.previousArtifacts,
Mima.coreFilters
)
lazy val coreJvm = core.jvm
lazy val coreJs = core.js
lazy val tests = crossProject(JSPlatform, JVMPlatform)
.dependsOn(core, cache % Test, scalaz)
.jsSettings(
scalaJSStage.in(Global) := FastOptStage,
testOptions := testOptions.dependsOn(runNpmInstallIfNeeded).value
)
.configs(Integration)
.settings(
shared,
dontPublish,
hasITs,
coursierPrefix,
libs += Deps.scalaAsync,
utest,
sharedTestResources
)
lazy val testsJvm = tests.jvm
lazy val testsJs = tests.js
lazy val `proxy-tests` = project
.dependsOn(testsJvm % "test->test")
.configs(Integration)
.settings(
shared,
dontPublish,
hasITs,
coursierPrefix,
libs ++= Seq(
Deps.dockerClient,
Deps.scalaAsync,
Deps.slf4JNop
),
utest,
sharedTestResources
)
lazy val paths = project
.settings(
pureJava,
dontPublish,
addDirectoriesSources
)
lazy val cache = crossProject(JSPlatform, JVMPlatform)
.dependsOn(core)
.jvmSettings(
addPathsSources
)
.jsSettings(
name := "fetch-js"
)
.settings(
shared,
Mima.previousArtifacts,
coursierPrefix,
Mima.cacheFilters
)
lazy val cacheJvm = cache.jvm
lazy val cacheJs = cache.js
lazy val scalaz = crossProject(JSPlatform, JVMPlatform)
.dependsOn(cache)
.jvmSettings(
libs += Deps.scalazConcurrent
)
.jsSettings(
libs += CrossDeps.scalazCore.value
)
.settings(
name := "scalaz-interop",
shared,
Mima.previousArtifacts,
coursierPrefix
)
lazy val scalazJvm = scalaz.jvm
lazy val scalazJs = scalaz.js
lazy val bootstrap = project
.settings(
pureJava,
dontPublish,
addPathsSources,
// seems not to be automatically found with sbt 0.13.16-M1 :-/
mainClass := Some("coursier.Bootstrap"),
renameMainJar("bootstrap.jar")
)
lazy val extra = project
.enablePlugins(ShadingPlugin)
.dependsOn(coreJvm, cacheJvm)
.settings(
shared,
coursierPrefix,
shading,
libs ++= {
if (scalaBinaryVersion.value == "2.12")
Seq(
Deps.scalaNativeTools % "shaded",
// Still applies?
// brought by only tools, so should be automatically shaded,
// but issues in ShadingPlugin (with things published locally?)
// seem to require explicit shading...
Deps.scalaNativeNir % "shaded",
Deps.scalaNativeUtil % "shaded",
Deps.fastParse % "shaded"
)
else
Nil
},
shadeNamespaces ++=
Set(
"fastparse",
"sourcecode"
) ++
// not blindly shading the whole scala.scalanative here, for some constant strings starting with
// "scala.scalanative.native." in scalanative not to get prefixed with "coursier.shaded."
Seq("codegen", "io", "linker", "nir", "optimizer", "tools", "util")
.map("scala.scalanative." + _)
)
lazy val cli = project
.dependsOn(coreJvm, cacheJvm, extra, scalazJvm)
.enablePlugins(PackPlugin, SbtProguard)
.settings(
shared,
dontPublishIn("2.10", "2.11"),
coursierPrefix,
unmanagedResources.in(Test) += packageBin.in(bootstrap).in(Compile).value,
libs ++= {
if (scalaBinaryVersion.value == "2.12")
Seq(
Deps.caseApp,
Deps.argonautShapeless,
Deps.junit % Test, // to be able to run tests with pants
Deps.scalatest % Test
)
else
Seq()
},
mainClass.in(Compile) := {
if (scalaBinaryVersion.value == "2.12")
Some("coursier.cli.Coursier")
else
None
},
addBootstrapJarAsResource,
proguardedCli
)
lazy val web = project
.enablePlugins(ScalaJSPlugin, ScalaJSBundlerPlugin)
.dependsOn(coreJs, cacheJs)
.settings(
shared,
dontPublish,
libs ++= {
if (scalaBinaryVersion.value == "2.12")
Seq(
CrossDeps.scalaJsJquery.value,
CrossDeps.scalaJsReact.value
)
else
Seq()
},
sourceDirectory := {
val dir = sourceDirectory.value
if (scalaBinaryVersion.value == "2.12")
dir
else
dir / "target" / "dummy"
},
noTests,
webjarBintrayRepository,
scalaJSUseMainModuleInitializer := true,
webpackConfigFile := Some(resourceDirectory.in(Compile).value / "webpack.config.js"),
npmDependencies.in(Compile) ++= Seq(
"bootstrap" -> "3.3.4",
"bootstrap-treeview" -> "1.2.0",
"graphdracula" -> "1.2.1",
"webpack-raphael" -> "2.1.4",
"react" -> "15.6.1",
"react-dom" -> "15.6.1",
"requirejs" -> "2.3.6"
)
)
lazy val readme = project
.in(file("doc/readme"))
.dependsOn(coreJvm, cacheJvm, scalazJvm)
.enablePlugins(TutPlugin)
.settings(
shared,
dontPublish,
tutSourceDirectory := baseDirectory.value,
tutTargetDirectory := baseDirectory.in(LocalRootProject).value
)
val coursierVersion = "1.1.0-M7"
lazy val `sbt-shared` = project
.dependsOn(coreJvm, cacheJvm)
.settings(
plugin,
utest,
libraryDependencies ++= Seq(
"io.get-coursier" %% "coursier" % coursierVersion,
"io.get-coursier" %% "coursier-cache" % coursierVersion
),
// because we don't publish for 2.11 the following declaration
// is more wordy than usual
// once support for sbt 0.13 is removed, this dependency can go away
@ -259,20 +28,22 @@ lazy val `sbt-shared` = project
)
lazy val `sbt-coursier` = project
.dependsOn(coreJvm, cacheJvm, extra, `sbt-shared`, scalazJvm)
.dependsOn(`sbt-shared`)
.settings(
plugin,
utest,
libraryDependencies ++= Seq(
"io.get-coursier" %% "coursier" % coursierVersion,
"io.get-coursier" %% "coursier-cache" % coursierVersion,
"io.get-coursier" %% "coursier-extra" % coursierVersion,
"io.get-coursier" %% "coursier-scalaz-interop" % coursierVersion
),
scriptedDependencies := {
scriptedDependencies.value
// TODO Get dependency projects automatically
// (but shouldn't scripted itself handle that…?)
publishLocal.in(coreJvm).value
publishLocal.in(cacheJvm).value
publishLocal.in(extra).value
publishLocal.in(`sbt-shared`).value
publishLocal.in(scalazJvm).value
}
)
@ -280,13 +51,7 @@ lazy val `sbt-pgp-coursier` = project
.dependsOn(`sbt-coursier`)
.settings(
plugin,
libs ++= {
scalaBinaryVersion.value match {
case "2.10" | "2.12" =>
Seq(Deps.sbtPgp.value)
case _ => Nil
}
},
libs += Deps.sbtPgp.value,
scriptedDependencies := {
scriptedDependencies.value
// TODO Get dependency projects automatically
@ -300,7 +65,6 @@ lazy val `sbt-shading` = project
.settings(
plugin,
shading,
localM2Repository, // for a possibly locally published jarjar
libs += Deps.jarjar % "shaded",
// dependencies of jarjar-core - directly depending on these so that they don't get shaded
libs ++= Deps.jarjarTransitiveDeps,
@ -311,196 +75,18 @@ lazy val `sbt-shading` = project
}
)
lazy val okhttp = project
.dependsOn(cacheJvm)
.settings(
shared,
coursierPrefix,
libs += Deps.okhttpUrlConnection
)
lazy val jvm = project
.dummy
.aggregate(
coreJvm,
testsJvm,
`proxy-tests`,
paths,
cacheJvm,
scalazJvm,
bootstrap,
extra,
cli,
`sbt-shared`,
`sbt-coursier`,
`sbt-pgp-coursier`,
`sbt-shading`,
readme,
okhttp
)
.settings(
shared,
dontPublish,
moduleName := "coursier-jvm"
)
lazy val js = project
.dummy
.aggregate(
coreJs,
cacheJs,
testsJs,
web
)
.settings(
shared,
dontPublish,
moduleName := "coursier-js"
)
lazy val coursier = project
.in(root)
.aggregate(
coreJvm,
coreJs,
testsJvm,
testsJs,
`proxy-tests`,
paths,
cacheJvm,
cacheJs,
bootstrap,
extra,
cli,
`sbt-shared`,
`sbt-coursier`,
`sbt-pgp-coursier`,
`sbt-shading`,
scalazJvm,
scalazJs,
web,
readme,
okhttp
`sbt-shading`
)
.settings(
shared,
dontPublish,
moduleName := "coursier-root"
moduleName := "sbt-coursier-root"
)
lazy val addBootstrapJarAsResource = {
import java.nio.file.Files
packageBin.in(Compile) := {
val bootstrapJar = packageBin.in(bootstrap).in(Compile).value
val source = packageBin.in(Compile).value
val dest = source.getParentFile / (source.getName.stripSuffix(".jar") + "-with-bootstrap.jar")
ZipUtil.addToZip(source, dest, Seq(
"bootstrap.jar" -> Files.readAllBytes(bootstrapJar.toPath)
))
dest
}
}
lazy val addBootstrapInProguardedJar = {
import java.nio.charset.StandardCharsets
import java.nio.file.Files
proguard.in(Proguard) := {
val bootstrapJar = packageBin.in(bootstrap).in(Compile).value
val source = proguardedJar.value
val dest = source.getParentFile / (source.getName.stripSuffix(".jar") + "-with-bootstrap.jar")
val dest0 = source.getParentFile / (source.getName.stripSuffix(".jar") + "-with-bootstrap-and-prelude.jar")
// TODO Get from cli original JAR
val manifest =
s"""Manifest-Version: 1.0
|Implementation-Title: ${name.value}
|Implementation-Version: ${version.value}
|Specification-Vendor: ${organization.value}
|Specification-Title: ${name.value}
|Implementation-Vendor-Id: ${organization.value}
|Specification-Version: ${version.value}
|Implementation-URL: ${homepage.value.getOrElse("")}
|Implementation-Vendor: ${organization.value}
|Main-Class: ${mainClass.in(Compile).value.getOrElse(sys.error("Main class not found"))}
|""".stripMargin
ZipUtil.addToZip(source, dest, Seq(
"bootstrap.jar" -> Files.readAllBytes(bootstrapJar.toPath),
"META-INF/MANIFEST.MF" -> manifest.getBytes(StandardCharsets.UTF_8)
))
ZipUtil.addPrelude(dest, dest0)
Seq(dest0)
}
}
lazy val proguardedCli = Seq(
proguardVersion.in(Proguard) := SharedVersions.proguard,
proguardOptions.in(Proguard) ++= Seq(
"-dontwarn",
"-dontoptimize", // required since the switch to scala 2.12
"-keep class coursier.cli.Coursier {\n public static void main(java.lang.String[]);\n}",
"-keep class coursier.cli.IsolatedClassLoader {\n public java.lang.String[] getIsolationTargets();\n}",
"-adaptresourcefilenames **.properties",
"""-keep class scala.Symbol { *; }"""
),
javaOptions.in(Proguard, proguard) := Seq("-Xmx3172M"),
artifactPath.in(Proguard) := proguardDirectory.in(Proguard).value / "coursier-standalone.jar",
artifacts ++= {
if (scalaBinaryVersion.value == "2.12")
Seq(proguardedArtifact.value)
else
Nil
},
addBootstrapInProguardedJar,
addProguardedJar
)
lazy val sharedTestResources = {
unmanagedResourceDirectories.in(Test) += {
val baseDir = baseDirectory.in(LocalRootProject).value
val testsMetadataDir = baseDir / "tests" / "metadata" / "https"
if (!testsMetadataDir.exists())
gitLock.synchronized {
if (!testsMetadataDir.exists()) {
val cmd = Seq("git", "submodule", "update", "--init", "--recursive", "--", "tests/metadata")
runCommand(cmd, baseDir)
}
}
baseDir / "tests" / "shared" / "src" / "test" / "resources"
}
}
// Using directly the sources of directories, rather than depending on it.
// This is required to use it from the bootstrap module, whose jar is launched as is (so shouldn't require dependencies).
// This is done for the other use of it too, from the cache module, not to have to manage two ways of depending on it.
lazy val addDirectoriesSources = {
unmanagedSourceDirectories.in(Compile) += {
val baseDir = baseDirectory.in(LocalRootProject).value
val directoriesDir = baseDir / "directories" / "src" / "main" / "java"
if (!directoriesDir.exists())
gitLock.synchronized {
if (!directoriesDir.exists()) {
val cmd = Seq("git", "submodule", "update", "--init", "--recursive", "--", "directories")
runCommand(cmd, baseDir)
}
}
directoriesDir
}
}
lazy val addPathsSources = Seq(
addDirectoriesSources,
unmanagedSourceDirectories.in(Compile) ++= unmanagedSourceDirectories.in(Compile).in(paths).value
)

8
cache/BUILD vendored
View File

@ -1,8 +0,0 @@
scala_library(
name = "cache",
dependencies = [
"core:core",
"paths/src/main/java:paths",
],
sources = rglobs("jvm/*.scala", "shared/*.scala"),
)

View File

@ -1,113 +0,0 @@
package coursier
import coursier.util.{EitherT, Task}
import org.scalajs.dom.raw.{Event, XMLHttpRequest}
import scala.concurrent.{ExecutionContext, Future, Promise}
import scala.scalajs.js
import js.Dynamic.{global => g}
import scala.scalajs.js.timers._
object Platform {
def encodeURIComponent(s: String): String =
g.encodeURIComponent(s).asInstanceOf[String]
lazy val jsonpAvailable = !js.isUndefined(g.jsonp)
val timeout =
if (jsonpAvailable)
10000 // Browser - better to have it > 5000 for complex resolutions
else
4000 // Node - tests crash if not < 5000
/** Available if we're running on node, and package xhr2 is installed */
lazy val xhr = g.require("xhr2")
def xhrReq() =
js.Dynamic.newInstance(xhr)().asInstanceOf[XMLHttpRequest]
def fetchTimeout(target: String, p: Promise[_]) =
setTimeout(timeout) {
if (!p.isCompleted) {
p.failure(new Exception(s"Timeout when fetching $target"))
}
}
// FIXME Take into account HTTP error codes from YQL response
def proxiedJsonp(url: String)(implicit executionContext: ExecutionContext): Future[String] = {
val url0 =
"https://query.yahooapis.com/v1/public/yql?q=select%20*%20from%20xml%20where%20url%3D%22" +
encodeURIComponent(url) +
"%22&format=jsonp&diagnostics=true"
val p = Promise[String]()
g.jsonp(url0, (res: js.Dynamic) => if (!p.isCompleted) {
val success = !js.isUndefined(res) && !js.isUndefined(res.results)
if (success)
p.success(res.results.asInstanceOf[js.Array[String]].mkString("\n"))
else
p.failure(new Exception(s"Fetching $url ($url0)"))
})
fetchTimeout(s"$url ($url0)", p)
p.future
}
def get(url: String)(implicit executionContext: ExecutionContext): Future[String] =
if (jsonpAvailable)
proxiedJsonp(url)
else {
val p = Promise[String]()
val xhrReq0 = xhrReq()
val f = { _: Event =>
p.success(xhrReq0.responseText)
}
xhrReq0.onload = f
xhrReq0.open("GET", url)
xhrReq0.send()
fetchTimeout(url, p)
p.future
}
val artifact: Fetch.Content[Task] = { artifact =>
EitherT(
Task { implicit ec =>
get(artifact.url)
.map(Right(_))
.recover { case e: Exception =>
Left(e.toString + Option(e.getMessage).fold("")(" (" + _ + ")"))
}
}
)
}
def fetch(
repositories: Seq[core.Repository]
): Fetch.Metadata[Task] =
Fetch.from(repositories, Platform.artifact)
trait Logger {
def fetching(url: String): Unit
def fetched(url: String): Unit
def other(url: String, msg: String): Unit
}
def artifactWithLogger(logger: Logger): Fetch.Content[Task] = { artifact =>
EitherT(
Task { implicit ec =>
Future(logger.fetching(artifact.url))
.flatMap(_ => get(artifact.url))
.map { s => logger.fetched(artifact.url); Right(s) }
.recover { case e: Exception =>
val msg = e.toString + Option(e.getMessage).fold("")(" (" + _ + ")")
logger.other(artifact.url, msg)
Left(msg)
}
}
)
}
}

View File

@ -1,8 +0,0 @@
package coursier.util
abstract class PlatformTask {
implicit val gather: Gather[Task] =
new TaskGather {}
}

View File

@ -1,9 +0,0 @@
package coursier
import java.net.URLConnection
import coursier.core.Authentication
trait AuthenticatedURLConnection extends URLConnection {
def authenticate(authentication: Authentication): Unit
}

File diff suppressed because it is too large Load Diff

View File

@ -1,112 +0,0 @@
package coursier
import java.net.MalformedURLException
import coursier.core.Authentication
import coursier.ivy.IvyRepository
import coursier.util.{Parse, ValidationNel}
import coursier.util.Traverse.TraverseOps
object CacheParse {
def repository(s: String): Either[String, Repository] =
if (s == "ivy2local" || s == "ivy2Local")
Right(Cache.ivy2Local)
else if (s == "ivy2cache" || s == "ivy2Cache")
Right(Cache.ivy2Cache)
else {
val repo = Parse.repository(s)
val url = repo.right.map {
case m: MavenRepository =>
m.root
case i: IvyRepository =>
// FIXME We're not handling metadataPattern here
i.pattern.chunks.takeWhile {
case _: coursier.ivy.Pattern.Chunk.Const => true
case _ => false
}.map(_.string).mkString
case r =>
sys.error(s"Unrecognized repository: $r")
}
val validatedUrl = try {
url.right.map(Cache.url)
} catch {
case e: MalformedURLException =>
Left("Error parsing URL " + url + Option(e.getMessage).fold("")(" (" + _ + ")"))
}
validatedUrl.right.flatMap { url =>
Option(url.getUserInfo) match {
case None =>
repo
case Some(userInfo) =>
userInfo.split(":", 2) match {
case Array(user, password) =>
val baseUrl = new java.net.URL(
url.getProtocol,
url.getHost,
url.getPort,
url.getFile
).toString
repo.right.map {
case m: MavenRepository =>
m.copy(
root = baseUrl,
authentication = Some(Authentication(user, password))
)
case i: IvyRepository =>
i.copy(
pattern = coursier.ivy.Pattern(
coursier.ivy.Pattern.Chunk.Const(baseUrl) +: i.pattern.chunks.dropWhile {
case _: coursier.ivy.Pattern.Chunk.Const => true
case _ => false
}
),
authentication = Some(Authentication(user, password))
)
case r =>
sys.error(s"Unrecognized repository: $r")
}
case _ =>
Left(s"No password found in user info of URL $url")
}
}
}
}
def repositories(l: Seq[String]): ValidationNel[String, Seq[Repository]] =
l.toVector.validationNelTraverse { s =>
ValidationNel.fromEither(repository(s))
}
def cachePolicies(s: String): ValidationNel[String, Seq[CachePolicy]] =
s
.split(',')
.toVector
.validationNelTraverse[String, Seq[CachePolicy]] {
case "offline" =>
ValidationNel.success(Seq(CachePolicy.LocalOnly))
case "update-local-changing" =>
ValidationNel.success(Seq(CachePolicy.LocalUpdateChanging))
case "update-local" =>
ValidationNel.success(Seq(CachePolicy.LocalUpdate))
case "update-changing" =>
ValidationNel.success(Seq(CachePolicy.UpdateChanging))
case "update" =>
ValidationNel.success(Seq(CachePolicy.Update))
case "missing" =>
ValidationNel.success(Seq(CachePolicy.FetchMissing))
case "force" =>
ValidationNel.success(Seq(CachePolicy.ForceDownload))
case "default" =>
ValidationNel.success(Seq(CachePolicy.LocalOnly, CachePolicy.FetchMissing))
case other =>
ValidationNel.failure(s"Unrecognized mode: $other")
}
.map(_.flatten)
}

View File

@ -1,113 +0,0 @@
package coursier
sealed abstract class CachePolicy extends Product with Serializable
object CachePolicy {
/** Only pick local files, possibly from the cache. Don't try to download anything. */
case object LocalOnly extends CachePolicy
/**
* Only pick local files. If one of these local files corresponds to a changing artifact, check
* for updates, and download these if needed.
*
* If no local file is found, *don't* try download it. Updates are only checked for files already
* in cache.
*
* Follows the TTL parameter (assumes no update is needed if the last one is recent enough).
*/
case object LocalUpdateChanging extends CachePolicy
/**
* Only pick local files, check if any update is available for them, and download these if needed.
*
* If no local file is found, *don't* try download it. Updates are only checked for files already
* in cache.
*
* Follows the TTL parameter (assumes no update is needed if the last one is recent enough).
*
* Unlike `LocalUpdateChanging`, all found local files are checked for updates, not just the
* changing ones.
*/
case object LocalUpdate extends CachePolicy
/**
* Pick local files, and download the missing ones.
*
* For changing ones, check for updates, and download those if any.
*
* Follows the TTL parameter (assumes no update is needed if the last one is recent enough).
*/
case object UpdateChanging extends CachePolicy
/**
* Pick local files, download the missing ones, check for updates and download those if any.
*
* Follows the TTL parameter (assumes no update is needed if the last one is recent enough).
*
* Unlike `UpdateChanging`, all found local files are checked for updates, not just the changing
* ones.
*/
case object Update extends CachePolicy
/**
* Pick local files, download the missing ones.
*
* No updates are checked for files already downloaded.
*/
case object FetchMissing extends CachePolicy
/**
* (Re-)download all files.
*
* Erases files already in cache.
*/
case object ForceDownload extends CachePolicy
private val baseDefault = Seq(
// first, try to update changing artifacts that were previously downloaded (follows TTL)
CachePolicy.LocalUpdateChanging,
// then, use what's available locally
CachePolicy.LocalOnly,
// lastly, try to download what's missing
CachePolicy.FetchMissing
)
def default: Seq[CachePolicy] = {
def fromOption(value: Option[String], description: String): Option[Seq[CachePolicy]] =
value.filter(_.nonEmpty).flatMap {
str =>
CacheParse.cachePolicies(str).either match {
case Right(Seq()) =>
Console.err.println(
s"Warning: no mode found in $description, ignoring it."
)
None
case Right(policies) =>
Some(policies)
case Left(_) =>
Console.err.println(
s"Warning: unrecognized mode in $description, ignoring it."
)
None
}
}
val fromEnv = fromOption(
sys.env.get("COURSIER_MODE"),
"COURSIER_MODE environment variable"
)
def fromProps = fromOption(
sys.props.get("coursier.mode"),
"Java property coursier.mode"
)
fromEnv
.orElse(fromProps)
.getOrElse(baseDefault)
}
}

View File

@ -1,80 +0,0 @@
package coursier
import java.io.File
sealed abstract class FileError(
val `type`: String,
val message: String
) extends Product with Serializable {
def describe: String = s"${`type`}: $message"
final def notFound: Boolean = this match {
case _: FileError.NotFound => true
case _ => false
}
}
object FileError {
final case class DownloadError(reason: String) extends FileError(
"download error",
reason
)
final case class NotFound(
file: String,
permanent: Option[Boolean] = None
) extends FileError(
"not found",
file
)
final case class Unauthorized(
file: String,
realm: Option[String]
) extends FileError(
"unauthorized",
file + realm.fold("")(" (" + _ + ")")
)
final case class ChecksumNotFound(
sumType: String,
file: String
) extends FileError(
"checksum not found",
file
)
final case class ChecksumFormatError(
sumType: String,
file: String
) extends FileError(
"checksum format error",
file
)
final case class WrongChecksum(
sumType: String,
got: String,
expected: String,
file: String,
sumFile: String
) extends FileError(
"wrong checksum",
file
)
sealed abstract class Recoverable(
`type`: String,
message: String
) extends FileError(`type`, message)
final case class Locked(file: File) extends Recoverable(
"locked",
file.toString
)
final case class ConcurrentDownload(url: String) extends Recoverable(
"concurrent download",
url
)
}

View File

@ -1,505 +0,0 @@
package coursier
import java.io.{ File, Writer }
import java.sql.Timestamp
import java.util.concurrent._
import java.util.concurrent.atomic.AtomicBoolean
import scala.collection.mutable.ArrayBuffer
object TermDisplay {
def defaultFallbackMode: Boolean = {
val env0 = sys.env.get("COURSIER_PROGRESS").map(_.toLowerCase).collect {
case "true" | "enable" | "1" => true
case "false" | "disable" | "0" => false
}
def compatibilityEnv = sys.env.get("COURSIER_NO_TERM").nonEmpty
def nonInteractive = System.console() == null
def insideEmacs = sys.env.contains("INSIDE_EMACS")
def ci = sys.env.contains("CI")
val env = env0.fold(compatibilityEnv)(!_)
env || nonInteractive || insideEmacs || ci
}
private sealed abstract class Info extends Product with Serializable {
def fraction: Option[Double]
def display(isDone: Boolean): String
def watching: Boolean
}
private final case class DownloadInfo(
downloaded: Long,
previouslyDownloaded: Long,
length: Option[Long],
startTime: Long,
updateCheck: Boolean,
watching: Boolean
) extends Info {
/** 0.0 to 1.0 */
def fraction: Option[Double] = length.map(downloaded.toDouble / _)
/** Byte / s */
def rate(): Option[Double] = {
val currentTime = System.currentTimeMillis()
if (currentTime > startTime)
Some((downloaded - previouslyDownloaded).toDouble / (System.currentTimeMillis() - startTime) * 1000.0)
else
None
}
// Scala version of http://stackoverflow.com/questions/3758606/how-to-convert-byte-size-into-human-readable-format-in-java/3758880#3758880
private def byteCount(bytes: Long, si: Boolean = false) = {
val unit = if (si) 1000 else 1024
if (bytes < unit)
bytes + " B"
else {
val prefixes = if (si) "kMGTPE" else "KMGTPE"
val exp = (math.log(bytes) / math.log(unit)).toInt min prefixes.length
val pre = prefixes.charAt(exp - 1) + (if (si) "" else "i")
f"${bytes / math.pow(unit, exp)}%.1f ${pre}B"
}
}
def display(isDone: Boolean): String = {
val actualFraction = fraction
.orElse(if (isDone) Some(1.0) else None)
.orElse(if (downloaded == 0L) Some(0.0) else None)
val start =
actualFraction match {
case None =>
" [ ] "
case Some(frac) =>
val elem = if (watching) "." else "#"
val decile = (10.0 * frac).toInt
assert(decile >= 0)
assert(decile <= 10)
f"${100.0 * frac}%5.1f%%" +
" [" + (elem * decile) + (" " * (10 - decile)) + "] "
}
start +
byteCount(downloaded) +
rate().fold("")(r => s" (${byteCount(r.toLong)} / s)")
}
}
private val format =
new java.text.SimpleDateFormat("yyyy-MM-dd HH:mm:ss")
private def formatTimestamp(ts: Long): String =
format.format(new Timestamp(ts))
private final case class CheckUpdateInfo(
currentTimeOpt: Option[Long],
remoteTimeOpt: Option[Long],
isDone: Boolean
) extends Info {
def watching = false
def fraction = None
def display(isDone: Boolean): String = {
if (isDone)
(currentTimeOpt, remoteTimeOpt) match {
case (Some(current), Some(remote)) =>
if (current < remote)
s"Updated since ${formatTimestamp(current)} (${formatTimestamp(remote)})"
else if (current == remote)
s"No new update since ${formatTimestamp(current)}"
else
s"Warning: local copy newer than remote one (${formatTimestamp(current)} > ${formatTimestamp(remote)})"
case (Some(_), None) =>
// FIXME Likely a 404 Not found, that should be taken into account by the cache
"No modified time in response"
case (None, Some(remote)) =>
s"Last update: ${formatTimestamp(remote)}"
case (None, None) =>
"" // ???
}
else
currentTimeOpt match {
case Some(current) =>
s"Checking for updates since ${formatTimestamp(current)}"
case None =>
"" // ???
}
}
}
private class UpdateDisplayRunnable(
beforeOutput: => Unit,
out: Writer,
width: Int,
fallbackMode: Boolean
) extends Runnable {
import Terminal.Ansi
private var currentHeight = 0
private var printedAnything0 = false
private var stopped = false
def printedAnything() = printedAnything0
private val needsUpdate = new AtomicBoolean(false)
def update(): Unit =
needsUpdate.set(true)
private val downloads = new ArrayBuffer[String]
private val doneQueue = new ArrayBuffer[(String, Info)]
val infos = new ConcurrentHashMap[String, Info]
def newEntry(
url: String,
info: Info,
fallbackMessage: => String
): Unit = {
assert(!infos.containsKey(url), s"Attempts to download $url twice in parallel")
val prev = infos.putIfAbsent(url, info)
assert(prev == null, s"Attempts to download $url twice in parallel (second check)")
if (fallbackMode) {
// FIXME What about concurrent accesses to out from the thread above?
out.write(fallbackMessage)
out.flush()
}
downloads.synchronized {
downloads.append(url)
}
update()
}
def removeEntry(
url: String,
success: Boolean,
fallbackMessage: => String
)(
update0: Info => Info
): Unit = {
val inf = downloads.synchronized {
downloads -= url
val info = infos.remove(url)
assert(info != null, s"$url was not being downloaded")
if (success)
doneQueue += (url -> update0(info))
info
}
if (fallbackMode && success) {
// FIXME What about concurrent accesses to out from the thread above?
out.write((if (inf.watching) "(watching) " else "") + fallbackMessage)
out.flush()
}
update()
}
private def reflowed(url: String, info: Info) = {
val extra = info match {
case downloadInfo: DownloadInfo =>
val pctOpt = downloadInfo.fraction.map(100.0 * _)
if (downloadInfo.length.isEmpty && downloadInfo.downloaded == 0L)
""
else
s"(${pctOpt.map(pct => f"$pct%.2f %%, ").mkString}${downloadInfo.downloaded}${downloadInfo.length.map(" / " + _).mkString})"
case _: CheckUpdateInfo =>
"Checking for updates"
}
val baseExtraWidth = width / 5
val total = url.length + 1 + extra.length
val (url0, extra0) =
if (total >= width) { // or > ? If equal, does it go down 2 lines?
val overflow = total - width + 1
val extra0 =
if (extra.length > baseExtraWidth)
extra.take((baseExtraWidth max (extra.length - overflow)) - 1) + "…"
else
extra
val total0 = url.length + 1 + extra0.length
val overflow0 = total0 - width + 1
val url0 =
if (total0 >= width)
url.take(((width - baseExtraWidth - 1) max (url.length - overflow0)) - 1) + "…"
else
url
(url0, extra0)
} else
(url, extra)
(url0, extra0)
}
private def truncatedPrintln(s: String): Unit = {
out.clearLine(2)
if (s.length <= width)
out.write(s + "\n")
else
out.write(s.take(width - 1) + "…\n")
}
private def updateDisplay(): Unit =
if (!stopped && needsUpdate.getAndSet(false)) {
val (done0, downloads0) = downloads.synchronized {
val q = doneQueue
.toVector
.filter {
case (url, _) =>
!url.endsWith(".sha1") && !url.endsWith(".sha256") && !url.endsWith(".md5") && !url.endsWith("/")
}
.sortBy { case (url, _) => url }
doneQueue.clear()
val dw = downloads
.toVector
.map { url => url -> infos.get(url) }
.sortBy { case (_, info) => - info.fraction.sum }
(q, dw)
}
for (((url, info), isDone) <- done0.iterator.map((_, true)) ++ downloads0.iterator.map((_, false))) {
assert(info != null, s"Incoherent state ($url)")
if (!printedAnything0) {
beforeOutput
printedAnything0 = true
}
truncatedPrintln(url)
out.clearLine(2)
out.write(s" ${info.display(isDone)}\n")
}
val displayedCount = (done0 ++ downloads0).length
if (displayedCount < currentHeight) {
for (_ <- 1 to 2; _ <- displayedCount until currentHeight) {
out.clearLine(2)
out.down(1)
}
for (_ <- displayedCount until currentHeight)
out.up(2)
}
for (_ <- downloads0.indices)
out.up(2)
out.left(10000)
out.flush()
currentHeight = downloads0.length
}
def stop(): Unit = {
for (_ <- 1 to 2; _ <- 0 until currentHeight) {
out.clearLine(2)
out.down(1)
}
for (_ <- 0 until currentHeight)
out.up(2)
out.flush()
stopped = true
}
private var previous = Set.empty[String]
private def fallbackDisplay(): Unit = {
val downloads0 = downloads.synchronized {
downloads
.toVector
.map { url => url -> infos.get(url) }
.sortBy { case (_, info) => - info.fraction.sum }
}
var displayedSomething = false
for ((url, info) <- downloads0 if previous(url)) {
assert(info != null, s"Incoherent state ($url)")
val (url0, extra0) = reflowed(url, info)
displayedSomething = true
out.write(s"$url0 $extra0\n")
}
if (displayedSomething)
out.write("\n")
out.flush()
previous = previous ++ downloads0.map { case (url, _) => url }
}
def init(): Unit =
if (!fallbackMode)
out.clearLine(2)
def run(): Unit =
if (fallbackMode)
fallbackDisplay()
else
updateDisplay()
}
}
class TermDisplay(
out: Writer,
val fallbackMode: Boolean = TermDisplay.defaultFallbackMode
) extends Cache.Logger {
import TermDisplay._
private var updateRunnableOpt = Option.empty[UpdateDisplayRunnable]
private val scheduler = Executors.newSingleThreadScheduledExecutor(
new ThreadFactory {
val defaultThreadFactory = Executors.defaultThreadFactory()
def newThread(r: Runnable) = {
val t = defaultThreadFactory.newThread(r)
t.setDaemon(true)
t.setName("progress-bar")
t
}
}
)
private def updateRunnable = updateRunnableOpt.getOrElse {
throw new Exception("Uninitialized TermDisplay")
}
val defaultWidth = 80
lazy val (width, fallbackMode0) = Terminal.consoleDim("cols") match {
case Some(w) =>
(w, fallbackMode)
case None =>
(defaultWidth, true)
}
lazy val refreshInterval =
if (fallbackMode0)
1000L
else
1000L / 60
override def init(beforeOutput: => Unit): Unit = {
updateRunnableOpt = Some(new UpdateDisplayRunnable(beforeOutput, out, width, fallbackMode0))
updateRunnable.init()
scheduler.scheduleAtFixedRate(updateRunnable, 0L, refreshInterval, TimeUnit.MILLISECONDS)
}
def init(): Unit =
init(())
override def stopDidPrintSomething(): Boolean = {
scheduler.shutdown()
scheduler.awaitTermination(2 * refreshInterval, TimeUnit.MILLISECONDS)
updateRunnable.stop()
updateRunnable.printedAnything()
}
def stop(): Unit =
stopDidPrintSomething()
override def downloadingArtifact(url: String, file: File): Unit =
updateRunnable.newEntry(
url,
DownloadInfo(0L, 0L, None, System.currentTimeMillis(), updateCheck = false, watching = false),
s"Downloading $url\n"
)
override def downloadLength(url: String, totalLength: Long, alreadyDownloaded: Long, watching: Boolean): Unit = {
val info = updateRunnable.infos.get(url)
assert(info != null, s"Incoherent state ($url)")
val newInfo = info match {
case info0: DownloadInfo =>
info0.copy(
length = Some(totalLength),
previouslyDownloaded = alreadyDownloaded,
watching = watching
)
case _ =>
throw new Exception(s"Incoherent display state for $url")
}
updateRunnable.infos.put(url, newInfo)
updateRunnable.update()
}
override def downloadProgress(url: String, downloaded: Long): Unit = {
val info = updateRunnable.infos.get(url)
assert(info != null, s"Incoherent state ($url)")
val newInfo = info match {
case info0: DownloadInfo =>
info0.copy(downloaded = downloaded)
case _ =>
throw new Exception(s"Incoherent display state for $url")
}
updateRunnable.infos.put(url, newInfo)
updateRunnable.update()
}
override def downloadedArtifact(url: String, success: Boolean): Unit =
updateRunnable.removeEntry(url, success, s"Downloaded $url\n")(x => x)
override def checkingUpdates(url: String, currentTimeOpt: Option[Long]): Unit =
updateRunnable.newEntry(
url,
CheckUpdateInfo(currentTimeOpt, None, isDone = false),
s"Checking $url\n"
)
override def checkingUpdatesResult(
url: String,
currentTimeOpt: Option[Long],
remoteTimeOpt: Option[Long]
): Unit = {
// Not keeping a message on-screen if a download should happen next
// so that the corresponding URL doesn't appear twice
val newUpdate = remoteTimeOpt.exists { remoteTime =>
currentTimeOpt.forall { currentTime =>
currentTime < remoteTime
}
}
updateRunnable.removeEntry(url, !newUpdate, s"Checked $url\n") {
case info: CheckUpdateInfo =>
info.copy(remoteTimeOpt = remoteTimeOpt, isDone = true)
case _ =>
throw new Exception(s"Incoherent display state for $url")
}
}
// TODO(wisechengyi,alexarchambault): implement this
override def removedCorruptFile(url: String, file: File, reason: Option[FileError]): Unit = {}
}

View File

@ -1,52 +0,0 @@
package coursier
import java.io.{File, Writer}
import scala.util.Try
object Terminal {
// Cut-n-pasted and adapted from
// https://github.com/lihaoyi/Ammonite/blob/10854e3b8b454a74198058ba258734a17af32023/terminal/src/main/scala/ammonite/terminal/Utils.scala
private lazy val pathedTput = if (new File("/usr/bin/tput").exists()) "/usr/bin/tput" else "tput"
def consoleDim(s: String): Option[Int] =
if (new File("/dev/tty").exists()) {
import sys.process._
val nullLog = new ProcessLogger {
def out(s: => String): Unit = {}
def err(s: => String): Unit = {}
def buffer[T](f: => T): T = f
}
Try(Process(Seq("bash", "-c", s"$pathedTput $s 2> /dev/tty")).!!(nullLog).trim.toInt).toOption
} else
None
implicit class Ansi(val output: Writer) extends AnyVal {
private def control(n: Int, c: Char) = output.write(s"\033[" + n + c)
/**
* Move up `n` squares
*/
def up(n: Int): Unit = if (n > 0) control(n, 'A')
/**
* Move down `n` squares
*/
def down(n: Int): Unit = if (n > 0) control(n, 'B')
/**
* Move left `n` squares
*/
def left(n: Int): Unit = if (n > 0) control(n, 'D')
/**
* Clear the current line
*
* n=0: clear from cursor to end of line
* n=1: clear from cursor to start of line
* n=2: clear entire line
*/
def clearLine(n: Int): Unit = control(n, 'K')
}
}

View File

@ -1,25 +0,0 @@
package coursier.internal
import java.io.{ByteArrayOutputStream, InputStream}
object FileUtil {
// Won't be necessary anymore with Java 9
// (https://docs.oracle.com/javase/9/docs/api/java/io/InputStream.html#readAllBytes--,
// via https://stackoverflow.com/questions/1264709/convert-inputstream-to-byte-array-in-java/37681322#37681322)
def readFully(is: InputStream): Array[Byte] = {
val buffer = new ByteArrayOutputStream
val data = Array.ofDim[Byte](16384)
var nRead = 0
while ({
nRead = is.read(data, 0, data.length)
nRead != -1
})
buffer.write(data, 0, nRead)
buffer.flush()
buffer.toByteArray
}
}

View File

@ -1,33 +0,0 @@
package coursier.util
import java.util.concurrent.ExecutorService
import scala.concurrent.{Await, ExecutionContext, ExecutionContextExecutorService, Future}
import scala.concurrent.duration.Duration
abstract class PlatformTask { self =>
def schedule[A](pool: ExecutorService)(f: => A): Task[A] = {
val ec0 = pool match {
case eces: ExecutionContextExecutorService => eces
case _ => ExecutionContext.fromExecutorService(pool) // FIXME Is this instantiation costly? Cache it?
}
Task(_ => Future(f)(ec0))
}
implicit val schedulable: Schedulable[Task] =
new TaskGather with Schedulable[Task] {
def schedule[A](pool: ExecutorService)(f: => A) = self.schedule(pool)(f)
}
def gather: Gather[Task] =
schedulable
implicit class PlatformTaskOps[T](private val task: Task[T]) {
def unsafeRun()(implicit ec: ExecutionContext): T =
Await.result(task.future(), Duration.Inf)
}
}

View File

@ -1,28 +0,0 @@
package coursier.util
import java.util.concurrent.{ExecutorService, Executors, ThreadFactory}
trait Schedulable[F[_]] extends Gather[F] {
def schedule[A](pool: ExecutorService)(f: => A): F[A]
}
object Schedulable {
lazy val defaultThreadPool =
fixedThreadPool(4 max Runtime.getRuntime.availableProcessors())
def fixedThreadPool(size: Int): ExecutorService =
Executors.newFixedThreadPool(
size,
// from scalaz.concurrent.Strategy.DefaultDaemonThreadFactory
new ThreadFactory {
val defaultThreadFactory = Executors.defaultThreadFactory()
def newThread(r: Runnable) = {
val t = defaultThreadFactory.newThread(r)
t.setDaemon(true)
t
}
}
)
}

View File

@ -1,46 +0,0 @@
package coursier.util
import scala.concurrent.{ExecutionContext, Future, Promise}
final case class Task[T](value: ExecutionContext => Future[T]) extends AnyVal {
def map[U](f: T => U): Task[U] =
Task(implicit ec => value(ec).map(f))
def flatMap[U](f: T => Task[U]): Task[U] =
Task(implicit ec => value(ec).flatMap(t => f(t).value(ec)))
def handle[U >: T](f: PartialFunction[Throwable, U]): Task[U] =
Task(ec => value(ec).recover(f)(ec))
def future()(implicit ec: ExecutionContext): Future[T] =
value(ec)
}
object Task extends PlatformTask {
def point[A](a: A): Task[A] = {
val future = Future.successful(a)
Task(_ => future)
}
def delay[A](a: => A): Task[A] =
Task(ec => Future(a)(ec))
def never[A]: Task[A] =
Task(_ => Promise[A].future)
def tailRecM[A, B](a: A)(fn: A => Task[Either[A, B]]): Task[B] =
Task[B] { implicit ec =>
def loop(a: A): Future[B] =
fn(a).future().flatMap {
case Right(b) =>
Future.successful(b)
case Left(a) =>
// this is safe because recursive
// flatMap is safe on Future
loop(a)
}
loop(a)
}
}

View File

@ -1,12 +0,0 @@
package coursier.util
import scala.concurrent.Future
trait TaskGather extends Gather[Task] {
def point[A](a: A) = Task.point(a)
def bind[A, B](elem: Task[A])(f: A => Task[B]) =
elem.flatMap(f)
def gather[A](elems: Seq[Task[A]]) =
Task(implicit ec => Future.sequence(elems.map(_.value(ec))))
}

View File

@ -1,6 +0,0 @@
package coursier.cli
// dummy app to keep proguard quiet in 2.11
object Coursier {
def main(args: Array[String]): Unit = {}
}

View File

@ -1,39 +0,0 @@
scala_library(
name = "cli",
dependencies = [
"3rdparty/jvm:argonaut-shapeless",
"3rdparty/jvm:caseapp",
"cache:cache",
"core:core",
"extra/src/main/scala/coursier:fallback-deps-repo",
"extra/src/main/scala/coursier/extra:extra",
"extra/src/main/scala-2.12/coursier/extra:native",
"scalaz:scalaz-interop",
":util",
],
sources = globs(
"coursier/cli/options/*.scala",
"coursier/cli/scaladex/*.scala",
"coursier/cli/spark/*.scala",
"coursier/cli/*.scala",
),
)
scala_library(
name = "util",
dependencies = [
"3rdparty/jvm:argonaut-shapeless",
"cache:cache",
"core:core",
],
sources = globs("coursier/cli/util/*.scala"),
)
jvm_binary(
name = "coursier-cli",
basename = "coursier-cli",
dependencies = [
":cli",
],
main = "coursier.cli.Coursier",
)

View File

@ -1,379 +0,0 @@
package coursier
package cli
import java.io.{ByteArrayInputStream, ByteArrayOutputStream, File, FileInputStream, FileOutputStream, IOException}
import java.nio.charset.StandardCharsets.UTF_8
import java.nio.file.Files
import java.nio.file.attribute.PosixFilePermission
import java.util.Properties
import java.util.jar.{JarFile, Attributes => JarAttributes}
import java.util.zip.{ZipEntry, ZipInputStream, ZipOutputStream}
import caseapp._
import coursier.cli.options.BootstrapOptions
import coursier.cli.util.{Assembly, Zip}
import coursier.internal.FileUtil
import scala.collection.JavaConverters._
object Bootstrap extends CaseApp[BootstrapOptions] {
private def createNativeBootstrap(
options: BootstrapOptions,
helper: Helper,
mainClass: String
): Unit = {
val files = helper.fetch(
sources = false,
javadoc = false,
artifactTypes = options.artifactOptions.artifactTypes(sources = false, javadoc = false)
)
val log: String => Unit =
if (options.options.common.verbosityLevel >= 0)
s => Console.err.println(s)
else
_ => ()
val tmpDir = new File(options.options.target)
try {
coursier.extra.Native.create(
mainClass,
files,
new File(options.options.output),
tmpDir,
log,
verbosity = options.options.common.verbosityLevel
)
} finally {
if (!options.options.keepTarget)
coursier.extra.Native.deleteRecursive(tmpDir)
}
}
private def createJarBootstrap(javaOpts: Seq[String], output: File, content: Array[Byte], withPreamble: Boolean): Unit =
if (withPreamble)
createJarBootstrapWithPreamble(javaOpts, output, content)
else
createSimpleJarBootstrap(output, content)
private def createSimpleJarBootstrap(output: File, content: Array[Byte]): Unit =
try Files.write(output.toPath, content)
catch { case e: IOException =>
Console.err.println(s"Error while writing $output${Option(e.getMessage).fold("")(" (" + _ + ")")}")
sys.exit(1)
}
private def createJarBootstrapWithPreamble(javaOpts: Seq[String], output: File, content: Array[Byte]): Unit = {
val argsPartitioner =
"""|nargs=$#
|
|i=1; while [ "$i" -le $nargs ]; do
| eval arg=\${$i}
| case $arg in
| -J-*) set -- "$@" "${arg#-J}" ;;
| esac
| i=$((i + 1))
| done
|
|set -- "$@" -jar "$0"
|
|i=1; while [ "$i" -le $nargs ]; do
| eval arg=\${$i}
| case $arg in
| -J-*) ;;
| *) set -- "$@" "$arg" ;;
| esac
| i=$((i + 1))
| done
|
|shift "$nargs"
|""".stripMargin
val javaCmd = Seq("java") ++
javaOpts
// escaping possibly a bit loose :-|
.map(s => "'" + s.replace("'", "\\'") + "'") ++
Seq("\"$@\"")
val shellPreamble = Seq(
"#!/usr/bin/env sh",
argsPartitioner,
"exec " + javaCmd.mkString(" ")
).mkString("", "\n", "\n")
try Files.write(output.toPath, shellPreamble.getBytes(UTF_8) ++ content)
catch { case e: IOException =>
Console.err.println(s"Error while writing $output${Option(e.getMessage).fold("")(" (" + _ + ")")}")
sys.exit(1)
}
try {
val perms = Files.getPosixFilePermissions(output.toPath).asScala.toSet
var newPerms = perms
if (perms(PosixFilePermission.OWNER_READ))
newPerms += PosixFilePermission.OWNER_EXECUTE
if (perms(PosixFilePermission.GROUP_READ))
newPerms += PosixFilePermission.GROUP_EXECUTE
if (perms(PosixFilePermission.OTHERS_READ))
newPerms += PosixFilePermission.OTHERS_EXECUTE
if (newPerms != perms)
Files.setPosixFilePermissions(
output.toPath,
newPerms.asJava
)
} catch {
case e: UnsupportedOperationException =>
// Ignored
case e: IOException =>
Console.err.println(
s"Error while making $output executable" +
Option(e.getMessage).fold("")(" (" + _ + ")")
)
sys.exit(1)
}
}
private def createOneJarLikeJarBootstrap(
options: BootstrapOptions,
helper: Helper,
mainClass: String,
javaOpts: Seq[String],
urls: Seq[String],
files: Seq[File],
output: File
): Unit = {
val bootstrapJar =
Option(Thread.currentThread().getContextClassLoader.getResourceAsStream("bootstrap.jar")) match {
case Some(is) => FileUtil.readFully(is)
case None =>
Console.err.println(s"Error: bootstrap JAR not found")
sys.exit(1)
}
val isolatedDeps = options.options.isolated.isolatedDeps(options.options.common.scalaVersion)
val (_, isolatedArtifactFiles) =
options.options.isolated.targets.foldLeft((Vector.empty[String], Map.empty[String, (Seq[String], Seq[File])])) {
case ((done, acc), target) =>
// TODO Add non regression test checking that optional artifacts indeed land in the isolated loader URLs
val m = helper.fetchMap(
sources = false,
javadoc = false,
artifactTypes = options.artifactOptions.artifactTypes(sources = false, javadoc = false),
subset = isolatedDeps.getOrElse(target, Seq.empty).toSet
)
val (done0, subUrls, subFiles) =
if (options.options.standalone) {
val subFiles0 = m.values.toSeq
(done, Nil, subFiles0)
} else {
val filteredSubArtifacts = m.keys.toSeq.diff(done)
(done ++ filteredSubArtifacts, filteredSubArtifacts, Nil)
}
val updatedAcc = acc + (target -> (subUrls, subFiles))
(done0, updatedAcc)
}
val isolatedUrls = isolatedArtifactFiles.map { case (k, (v, _)) => k -> v }
val isolatedFiles = isolatedArtifactFiles.map { case (k, (_, v)) => k -> v }
val buffer = new ByteArrayOutputStream
val bootstrapZip = new ZipInputStream(new ByteArrayInputStream(bootstrapJar))
val outputZip = new ZipOutputStream(buffer)
for ((ent, data) <- Zip.zipEntries(bootstrapZip)) {
outputZip.putNextEntry(ent)
outputZip.write(data)
outputZip.closeEntry()
}
val time = System.currentTimeMillis()
def putStringEntry(name: String, content: String): Unit = {
val entry = new ZipEntry(name)
entry.setTime(time)
outputZip.putNextEntry(entry)
outputZip.write(content.getBytes(UTF_8))
outputZip.closeEntry()
}
def putEntryFromFile(name: String, f: File): Unit = {
val entry = new ZipEntry(name)
entry.setTime(f.lastModified())
outputZip.putNextEntry(entry)
outputZip.write(FileUtil.readFully(new FileInputStream(f)))
outputZip.closeEntry()
}
putStringEntry("bootstrap-jar-urls", urls.mkString("\n"))
if (options.options.isolated.anyIsolatedDep) {
putStringEntry("bootstrap-isolation-ids", options.options.isolated.targets.mkString("\n"))
for (target <- options.options.isolated.targets) {
val urls = isolatedUrls.getOrElse(target, Nil)
val files = isolatedFiles.getOrElse(target, Nil)
putStringEntry(s"bootstrap-isolation-$target-jar-urls", urls.mkString("\n"))
putStringEntry(s"bootstrap-isolation-$target-jar-resources", files.map(pathFor).mkString("\n"))
}
}
def pathFor(f: File) = s"jars/${f.getName}"
for (f <- files)
putEntryFromFile(pathFor(f), f)
putStringEntry("bootstrap-jar-resources", files.map(pathFor).mkString("\n"))
val propsEntry = new ZipEntry("bootstrap.properties")
propsEntry.setTime(time)
val properties = new Properties
properties.setProperty("bootstrap.mainClass", mainClass)
outputZip.putNextEntry(propsEntry)
properties.store(outputZip, "")
outputZip.closeEntry()
outputZip.close()
createJarBootstrap(
javaOpts,
output,
buffer.toByteArray,
options.options.preamble
)
}
private def defaultRules = Seq(
Assembly.Rule.Append("reference.conf"),
Assembly.Rule.AppendPattern("META-INF/services/.*"),
Assembly.Rule.Exclude("log4j.properties"),
Assembly.Rule.Exclude(JarFile.MANIFEST_NAME),
Assembly.Rule.ExcludePattern("META-INF/.*\\.[sS][fF]"),
Assembly.Rule.ExcludePattern("META-INF/.*\\.[dD][sS][aA]"),
Assembly.Rule.ExcludePattern("META-INF/.*\\.[rR][sS][aA]")
)
private def createAssemblyJar(
options: BootstrapOptions,
files: Seq[File],
javaOpts: Seq[String],
mainClass: String,
output: File
): Unit = {
val parsedRules = options.options.rule.map { s =>
s.split(":", 2) match {
case Array("append", v) => Assembly.Rule.Append(v)
case Array("append-pattern", v) => Assembly.Rule.AppendPattern(v)
case Array("exclude", v) => Assembly.Rule.Exclude(v)
case Array("exclude-pattern", v) => Assembly.Rule.ExcludePattern(v)
case _ =>
sys.error(s"Malformed assembly rule: $s")
}
}
val rules =
(if (options.options.defaultRules) defaultRules else Nil) ++ parsedRules
val attrs = Seq(
JarAttributes.Name.MAIN_CLASS -> mainClass
)
val baos = new ByteArrayOutputStream
Assembly.make(files, baos, attrs, rules)
createJarBootstrap(
javaOpts,
output,
baos.toByteArray,
options.options.preamble
)
}
def run(options: BootstrapOptions, args: RemainingArgs): Unit = {
val helper = new Helper(
options.options.common,
args.all,
isolated = options.options.isolated,
warnBaseLoaderNotFound = false
)
val output0 = new File(options.options.output)
if (!options.options.force && output0.exists()) {
Console.err.println(s"Error: ${options.options.output} already exists, use -f option to force erasing it.")
sys.exit(1)
}
val mainClass =
if (options.options.mainClass.isEmpty)
helper.retainedMainClass
else
options.options.mainClass
if (options.options.native)
createNativeBootstrap(options, helper, mainClass)
else {
val (validProperties, wrongProperties) = options.options.property.partition(_.contains("="))
if (wrongProperties.nonEmpty) {
Console.err.println(s"Wrong -P / --property option(s):\n${wrongProperties.mkString("\n")}")
sys.exit(255)
}
val properties0 = validProperties.map { s =>
s.split("=", 2) match {
case Array(k, v) => k -> v
case _ => sys.error("Cannot possibly happen")
}
}
val javaOpts = options.options.javaOpt ++
properties0.map { case (k, v) => s"-D$k=$v" }
val (urls, files) =
helper.fetchMap(
sources = false,
javadoc = false,
artifactTypes = options.artifactOptions.artifactTypes(sources = false, javadoc = false)
).toList.foldLeft((List.empty[String], List.empty[File])){
case ((urls, files), (url, file)) =>
if (options.options.assembly || options.options.standalone) (urls, file :: files)
else if (options.options.embedFiles && url.startsWith("file:/")) (urls, file :: files)
else (url :: urls, files)
}
if (options.options.assembly)
createAssemblyJar(options, files, javaOpts, mainClass, output0)
else
createOneJarLikeJarBootstrap(
options,
helper,
mainClass,
javaOpts,
urls,
files,
output0
)
}
}
}

View File

@ -1,29 +0,0 @@
package coursier
package cli
import caseapp.core.app.CommandAppA
import shapeless._
object Coursier extends CommandAppA(CoursierCommand.parser, CoursierCommand.help) {
override val appName = "Coursier"
override val progName = "coursier"
override val appVersion = coursier.util.Properties.version
def runA =
args => {
case Inl(bootstrapOptions) =>
Bootstrap.run(bootstrapOptions, args)
case Inr(Inl(fetchOptions)) =>
Fetch.run(fetchOptions, args)
case Inr(Inr(Inl(launchOptions))) =>
Launch.run(launchOptions, args)
case Inr(Inr(Inr(Inl(resolveOptions)))) =>
Resolve.run(resolveOptions, args)
case Inr(Inr(Inr(Inr(Inl(sparkSubmitOptions))))) =>
SparkSubmit.run(sparkSubmitOptions, args)
case Inr(Inr(Inr(Inr(Inr(cnil))))) =>
cnil.impossible
}
}

View File

@ -1,26 +0,0 @@
package coursier.cli
import caseapp.CommandParser
import caseapp.core.help.CommandsHelp
object CoursierCommand {
val parser =
CommandParser.nil
.add(Bootstrap)
.add(Fetch)
.add(Launch)
.add(Resolve)
.add(SparkSubmit)
.reverse
val help =
CommandsHelp.nil
.add(Bootstrap)
.add(Fetch)
.add(Launch)
.add(Resolve)
.add(SparkSubmit)
.reverse
}

View File

@ -1,51 +0,0 @@
package coursier
package cli
import java.io.File
import caseapp._
import coursier.cli.options.FetchOptions
final class Fetch(options: FetchOptions, args: RemainingArgs) {
val helper = new Helper(options.common, args.all, ignoreErrors = options.artifactOptions.force)
val files0 = helper.fetch(
sources = options.sources,
javadoc = options.javadoc,
artifactTypes = options.artifactOptions.artifactTypes(
options.sources || options.common.classifier0("sources"),
options.javadoc || options.common.classifier0("javadoc")
)
)
}
object Fetch extends CaseApp[FetchOptions] {
def apply(options: FetchOptions, args: RemainingArgs): Fetch =
new Fetch(options, args)
def run(options: FetchOptions, args: RemainingArgs): Unit = {
val fetch = Fetch(options, args)
// Some progress lines seem to be scraped without this.
Console.out.flush()
val out =
if (options.classpath)
fetch
.files0
.map(_.toString)
.mkString(File.pathSeparator)
else
fetch
.files0
.map(_.toString)
.mkString("\n")
println(out)
}
}

View File

@ -1,897 +0,0 @@
package coursier
package cli
import java.io.{File, OutputStreamWriter, PrintWriter}
import java.net.{URL, URLClassLoader, URLDecoder}
import java.util.concurrent.Executors
import java.util.jar.{Manifest => JManifest}
import coursier.cli.options.{CommonOptions, IsolatedLoaderOptions}
import coursier.cli.scaladex.Scaladex
import coursier.cli.util.{JsonElem, JsonPrintRequirement, JsonReport}
import coursier.extra.Typelevel
import coursier.interop.scalaz._
import coursier.ivy.IvyRepository
import coursier.util.Parse.ModuleRequirements
import coursier.util.{Gather, Parse, Print}
import scala.annotation.tailrec
import scala.concurrent.duration.Duration
import scalaz.concurrent.{Strategy, Task}
object Helper {
def fileRepr(f: File) = f.toString
def errPrintln(s: String) = Console.err.println(s)
private val manifestPath = "META-INF/MANIFEST.MF"
def mainClasses(cl: ClassLoader): Map[(String, String), String] = {
import scala.collection.JavaConverters._
val parentMetaInfs = Option(cl.getParent).fold(Set.empty[URL]) { parent =>
parent.getResources(manifestPath).asScala.toSet
}
val allMetaInfs = cl.getResources(manifestPath).asScala.toVector
val metaInfs = allMetaInfs.filterNot(parentMetaInfs)
val mainClasses = metaInfs.flatMap { url =>
val attributes = new JManifest(url.openStream()).getMainAttributes
def attributeOpt(name: String) =
Option(attributes.getValue(name))
val vendor = attributeOpt("Implementation-Vendor-Id").getOrElse("")
val title = attributeOpt("Specification-Title").getOrElse("")
val mainClass = attributeOpt("Main-Class")
mainClass.map((vendor, title) -> _)
}
mainClasses.toMap
}
}
class Helper(
common: CommonOptions,
rawDependencies: Seq[String],
extraJars: Seq[File] = Nil,
printResultStdout: Boolean = false,
ignoreErrors: Boolean = false,
isolated: IsolatedLoaderOptions = IsolatedLoaderOptions(),
warnBaseLoaderNotFound: Boolean = true
) {
import Helper.errPrintln
import Util._
import common._
val ttl0 =
if (ttl.isEmpty)
Cache.defaultTtl
else
try Some(Duration(ttl))
catch {
case e: Exception =>
prematureExit(s"Unrecognized TTL duration: $ttl")
}
val cachePolicies =
if (common.mode.isEmpty)
CachePolicy.default
else
CacheParse.cachePolicies(common.mode).either match {
case Right(cp) => cp
case Left(errors) =>
prematureExit(
s"Error parsing modes:\n${errors.map(" "+_).mkString("\n")}"
)
}
val cache = new File(cacheOptions.cache)
val pool = Executors.newFixedThreadPool(parallel, Strategy.DefaultDaemonThreadFactory)
val defaultRepositories = Seq(
Cache.ivy2Local,
MavenRepository("https://repo1.maven.org/maven2")
)
val repositoriesValidation = CacheParse.repositories(common.repository).map { repos0 =>
var repos = (if (common.noDefault) Nil else defaultRepositories) ++ repos0
repos = repos.map {
case m: MavenRepository => m.copy(sbtAttrStub = common.sbtPluginHack)
case other => other
}
if (common.dropInfoAttr)
repos = repos.map {
case m: IvyRepository => m.copy(dropInfoAttributes = true)
case other => other
}
repos
}
val standardRepositories = repositoriesValidation.either match {
case Right(repos) =>
repos
case Left(errors) =>
prematureExit(
s"Error with repositories:\n${errors.map(" "+_).mkString("\n")}"
)
}
val loggerFallbackMode =
!progress && TermDisplay.defaultFallbackMode
val (scaladexRawDependencies, otherRawDependencies) =
rawDependencies.partition(s => s.contains("/") || !s.contains(":"))
val scaladexDepsWithExtraParams: List[(Dependency, Map[String, String])] =
if (scaladexRawDependencies.isEmpty)
Nil
else {
val logger =
if (verbosityLevel >= 0)
Some(new TermDisplay(
new OutputStreamWriter(System.err),
fallbackMode = loggerFallbackMode
))
else
None
val fetchs = cachePolicies.map(p =>
Cache.fetch[Task](cache, p, checksums = Nil, logger = logger, pool = pool, ttl = ttl0)
)
logger.foreach(_.init())
val scaladex = Scaladex.cached(fetchs: _*)
val res = Gather[Task].gather(scaladexRawDependencies.map { s =>
val deps = scaladex.dependencies(
s,
scalaVersion,
if (verbosityLevel >= 2) Console.err.println(_) else _ => ()
)
deps.map { modVers =>
val m = modVers.groupBy(_._2)
if (m.size > 1) {
val (keptVer, modVers0) = m.map {
case (v, l) =>
val ver = coursier.core.Parse.version(v)
.getOrElse(???) // FIXME
ver -> l
}
.maxBy(_._1)
if (verbosityLevel >= 1)
Console.err.println(s"Keeping version ${keptVer.repr}")
modVers0
} else
modVers
}.run
}).unsafePerformSync
logger.foreach(_.stop())
val errors = res.collect { case Left(err) => err }
prematureExitIf(errors.nonEmpty) {
s"Error getting scaladex infos:\n" + errors.map(" " + _).mkString("\n")
}
res
.collect { case Right(l) => l }
.flatten
.map { case (mod, ver) => (Dependency(mod, ver), Map[String, String]()) }
.toList
}
val (forceVersionErrors, forceVersions0) = Parse.moduleVersions(forceVersion, scalaVersion)
prematureExitIf(forceVersionErrors.nonEmpty) {
s"Cannot parse forced versions:\n" + forceVersionErrors.map(" "+_).mkString("\n")
}
val forceVersions = {
val grouped = forceVersions0
.groupBy { case (mod, _) => mod }
.map { case (mod, l) => mod -> l.map { case (_, version) => version } }
for ((mod, forcedVersions) <- grouped if forcedVersions.distinct.lengthCompare(1) > 0)
errPrintln(s"Warning: version of $mod forced several times, using only the last one (${forcedVersions.last})")
grouped.map { case (mod, versions) => mod -> versions.last }
}
val (excludeErrors, excludes0) = Parse.modules(exclude, scalaVersion)
prematureExitIf(excludeErrors.nonEmpty) {
s"Cannot parse excluded modules:\n" +
excludeErrors
.map(" " + _)
.mkString("\n")
}
val (excludesNoAttr, excludesWithAttr) = excludes0.partition(_.attributes.isEmpty)
prematureExitIf(excludesWithAttr.nonEmpty) {
s"Excluded modules with attributes not supported:\n" +
excludesWithAttr
.map(" " + _)
.mkString("\n")
}
val globalExcludes: Set[(String, String)] = excludesNoAttr.map { mod =>
(mod.organization, mod.name)
}.toSet
val localExcludeMap: Map[String, Set[(String, String)]] =
if (localExcludeFile.isEmpty) {
Map()
} else {
val source = scala.io.Source.fromFile(localExcludeFile)
val lines = try source.mkString.split("\n") finally source.close()
lines.map({ str =>
val parent_and_child = str.split("--")
if (parent_and_child.length != 2) {
throw new SoftExcludeParsingException(s"Failed to parse $str")
}
val child_org_name = parent_and_child(1).split(":")
if (child_org_name.length != 2) {
throw new SoftExcludeParsingException(s"Failed to parse $child_org_name")
}
(parent_and_child(0), (child_org_name(0), child_org_name(1)))
}).groupBy(_._1).mapValues(_.map(_._2).toSet).toMap
}
val moduleReq = ModuleRequirements(globalExcludes, localExcludeMap, defaultConfiguration)
val (modVerCfgErrors: Seq[String], normalDepsWithExtraParams: Seq[(Dependency, Map[String, String])]) =
Parse.moduleVersionConfigs(otherRawDependencies, moduleReq, transitive=true, scalaVersion)
val (intransitiveModVerCfgErrors: Seq[String], intransitiveDepsWithExtraParams: Seq[(Dependency, Map[String, String])]) =
Parse.moduleVersionConfigs(intransitive, moduleReq, transitive=false, scalaVersion)
prematureExitIf(modVerCfgErrors.nonEmpty) {
s"Cannot parse dependencies:\n" + modVerCfgErrors.map(" "+_).mkString("\n")
}
prematureExitIf(intransitiveModVerCfgErrors.nonEmpty) {
s"Cannot parse intransitive dependencies:\n" +
intransitiveModVerCfgErrors.map(" "+_).mkString("\n")
}
val transitiveDepsWithExtraParams: Seq[(Dependency, Map[String, String])] =
// FIXME Order of the dependencies is not respected here (scaladex ones go first)
scaladexDepsWithExtraParams ++ normalDepsWithExtraParams
val transitiveDeps: Seq[Dependency] = transitiveDepsWithExtraParams.map(dep => dep._1)
val allDependenciesWithExtraParams: Seq[(Dependency, Map[String, String])] =
transitiveDepsWithExtraParams ++ intransitiveDepsWithExtraParams
val allDependencies: Seq[Dependency] = allDependenciesWithExtraParams.map(dep => dep._1)
// Any dependencies with URIs should not be resolved with a pom so this is a
// hack to add all the deps with URIs to the FallbackDependenciesRepository
// which will be used during the resolve
val depsWithUrls: Map[(Module, String), (URL, Boolean)] = allDependenciesWithExtraParams
.flatMap {
case (dep, extraParams) =>
extraParams.get("url").map { url =>
dep.moduleVersion -> (new URL(URLDecoder.decode(url, "UTF-8")), true)
}
}.toMap
val depsWithUrlRepo: FallbackDependenciesRepository = FallbackDependenciesRepository(depsWithUrls, cacheFileArtifacts)
// Prepend FallbackDependenciesRepository to the repository list
// so that dependencies with URIs are resolved against this repo
val repositories: Seq[Repository] = Seq(depsWithUrlRepo) ++ standardRepositories
for (((mod, version), _) <- depsWithUrls if forceVersions.get(mod).exists(_ != version))
throw new Exception(s"Cannot force a version that is different from the one specified " +
s"for the module ${mod}:${version} with url")
val checksums = {
val splitChecksumArgs = checksum.flatMap(_.split(',')).filter(_.nonEmpty)
if (splitChecksumArgs.isEmpty)
Cache.defaultChecksums
else
splitChecksumArgs.map {
case none if none.toLowerCase == "none" => None
case sumType => Some(sumType)
}
}
val userEnabledProfiles = profile.toSet
val forcedProperties = forceProperty
.map { s =>
s.split("=", 2) match {
case Array(k, v) => k -> v
case _ =>
sys.error(s"Malformed forced property argument: $s")
}
}
.toMap
val startRes = Resolution(
allDependencies.toSet,
forceVersions = forceVersions,
filter = Some(dep => keepOptional || !dep.optional),
userActivations =
if (userEnabledProfiles.isEmpty) None
else Some(userEnabledProfiles.iterator.map(p => if (p.startsWith("!")) p.drop(1) -> false else p -> true).toMap),
mapDependencies = if (typelevel) Some(Typelevel.swap(_)) else None,
forceProperties = forcedProperties
)
val logger =
if (verbosityLevel >= 0)
Some(new TermDisplay(
new OutputStreamWriter(System.err),
fallbackMode = loggerFallbackMode
))
else
None
val fetchs = cachePolicies.map(p =>
Cache.fetch(cache, p, checksums = checksums, logger = logger, pool = pool, ttl = ttl0)
)
val fetchQuiet = coursier.Fetch.from(
repositories,
fetchs.head,
fetchs.tail: _*
)
val fetch0 =
if (verbosityLevel >= 2) {
modVers: Seq[(Module, String)] =>
val print = Task {
errPrintln(s"Getting ${modVers.length} project definition(s)")
}
print.flatMap(_ => fetchQuiet(modVers))
} else
fetchQuiet
if (verbosityLevel >= 1) {
errPrintln(
s" Dependencies:\n" +
Print.dependenciesUnknownConfigs(
allDependencies,
Map.empty,
printExclusions = verbosityLevel >= 2
)
)
if (forceVersions.nonEmpty) {
errPrintln(" Force versions:")
for ((mod, ver) <- forceVersions.toVector.sortBy { case (mod, _) => mod.toString })
errPrintln(s"$mod:$ver")
}
}
logger.foreach(_.init())
val res =
if (benchmark > 0) {
class Counter(var value: Int = 0) {
def add(value: Int): Unit = {
this.value += value
}
}
def timed[T](name: String, counter: Counter, f: Task[T]): Task[T] =
Task(System.currentTimeMillis()).flatMap { start =>
f.map { t =>
val end = System.currentTimeMillis()
Console.err.println(s"$name: ${end - start} ms")
counter.add((end - start).toInt)
t
}
}
def helper(proc: ResolutionProcess, counter: Counter, iteration: Int): Task[Resolution] =
if (iteration >= maxIterations)
Task.now(proc.current)
else
proc match {
case _: core.Done =>
Task.now(proc.current)
case _ =>
val iterationType = proc match {
case _: core.Missing => "IO"
case _: core.Continue => "calculations"
case _ => ???
}
timed(
s"Iteration ${iteration + 1} ($iterationType)",
counter,
proc.next(fetch0, fastForward = false)).flatMap(helper(_, counter, iteration + 1)
)
}
def res = {
val iterationCounter = new Counter
val resolutionCounter = new Counter
val res0 = timed(
"Resolution",
resolutionCounter,
helper(
startRes.process,
iterationCounter,
0
)
).unsafePerformSync
Console.err.println(s"Overhead: ${resolutionCounter.value - iterationCounter.value} ms")
res0
}
@tailrec
def result(warmUp: Int): Resolution =
if (warmUp >= benchmark) {
Console.err.println("Benchmark resolution")
res
} else {
Console.err.println(s"Warm-up ${warmUp + 1} / $benchmark")
res
result(warmUp + 1)
}
result(0)
} else if (benchmark < 0) {
def res(index: Int) = {
val start = System.currentTimeMillis()
val res0 = startRes
.process
.run(fetch0, maxIterations)
.unsafePerformSync
val end = System.currentTimeMillis()
Console.err.println(s"Resolution ${index + 1} / ${-benchmark}: ${end - start} ms")
res0
}
@tailrec
def result(warmUp: Int): Resolution =
if (warmUp >= -benchmark) {
Console.err.println("Benchmark resolution")
res(warmUp)
} else {
Console.err.println(s"Warm-up ${warmUp + 1} / ${-benchmark}")
res(warmUp)
result(warmUp + 1)
}
result(0)
} else
startRes
.process
.run(fetch0, maxIterations)
.unsafePerformSync
logger.foreach(_.stop())
val trDeps = res.minDependencies.toVector
lazy val projCache = res.projectCache.mapValues { case (_, p) => p }
if (printResultStdout || verbosityLevel >= 1 || tree || reverseTree) {
if ((printResultStdout && verbosityLevel >= 1) || verbosityLevel >= 2 || tree || reverseTree)
errPrintln(s" Result:")
val depsStr =
if (reverseTree || tree)
Print.dependencyTree(
allDependencies,
res,
printExclusions = verbosityLevel >= 1,
reverse = reverseTree
)
else
Print.dependenciesUnknownConfigs(
trDeps,
projCache,
printExclusions = verbosityLevel >= 1
)
if (printResultStdout)
println(depsStr)
else
errPrintln(depsStr)
}
var anyError = false
if (!res.isDone) {
anyError = true
errPrintln("\nMaximum number of iterations reached!")
}
if (res.errors.nonEmpty) {
anyError = true
errPrintln(
"\nError:\n" +
res.errors.map {
case ((module, version), errors) =>
s" $module:$version\n${errors.map(" " + _.replace("\n", " \n")).mkString("\n")}"
}.mkString("\n")
)
}
if (res.conflicts.nonEmpty) {
anyError = true
errPrintln(
s"\nConflict:\n" +
Print.dependenciesUnknownConfigs(
res.conflicts.toVector,
projCache,
printExclusions = verbosityLevel >= 1
)
)
}
if (anyError) {
if (ignoreErrors)
errPrintln("Ignoring errors")
else
sys.exit(1)
}
def artifacts(
sources: Boolean,
javadoc: Boolean,
artifactTypes: Set[String],
subset: Set[Dependency] = null
): Seq[Artifact] = {
if (subset == null && verbosityLevel >= 1) {
def isLocal(p: CachePolicy) = p match {
case CachePolicy.LocalOnly => true
case CachePolicy.LocalUpdate => true
case CachePolicy.LocalUpdateChanging => true
case _ => false
}
val msg =
if (cachePolicies.forall(isLocal))
" Checking artifacts"
else
" Fetching artifacts"
errPrintln(msg)
}
val res0 = Option(subset).fold(res)(res.subset)
val depArtTuples: Seq[(Dependency, Artifact)] = getDepArtifactsForClassifier(sources, javadoc, res0)
val artifacts0 = depArtTuples.map(_._2)
if (artifactTypes("*"))
artifacts0
else
artifacts0.filter { artifact =>
artifactTypes(artifact.`type`)
}
}
private def getDepArtifactsForClassifier(sources: Boolean, javadoc: Boolean, res0: Resolution): Seq[(Dependency, Artifact)] = {
val raw: Seq[(Dependency, Artifact)] = if (hasOverrideClassifiers(sources, javadoc)) {
//TODO: this function somehow gives duplicated things
res0.dependencyClassifiersArtifacts(overrideClassifiers(sources, javadoc).toVector.sorted)
} else {
res0.dependencyArtifacts(withOptional = true)
}
raw.map({ case (dep, artifact) =>
(
dep.copy(
attributes = dep.attributes.copy(classifier = artifact.classifier)),
artifact
)
})
}
private def overrideClassifiers(sources: Boolean, javadoc:Boolean): Set[String] = {
var classifiers = classifier0
if (sources)
classifiers = classifiers + "sources"
if (javadoc)
classifiers = classifiers + "javadoc"
classifiers
}
private def hasOverrideClassifiers(sources: Boolean, javadoc: Boolean): Boolean = {
classifier0.nonEmpty || sources || javadoc
}
def fetchMap(
sources: Boolean,
javadoc: Boolean,
artifactTypes: Set[String],
subset: Set[Dependency] = null
): Map[String, File] = {
val artifacts0 = artifacts(sources, javadoc, artifactTypes, subset).map { artifact =>
artifact.copy(attributes = Attributes())
}.distinct
val logger =
if (verbosityLevel >= 0)
Some(new TermDisplay(
new OutputStreamWriter(System.err),
fallbackMode = loggerFallbackMode
))
else
None
if (verbosityLevel >= 1 && artifacts0.nonEmpty)
println(s" Found ${artifacts0.length} artifacts")
val tasks = artifacts0.map { artifact =>
def file(policy: CachePolicy) = Cache.file(
artifact,
cache,
policy,
checksums = checksums,
logger = logger,
pool = pool,
ttl = ttl0,
retry = common.retryCount,
cacheFileArtifacts
)
(file(cachePolicies.head) /: cachePolicies.tail)(_ orElse file(_))
.run
.map(artifact.->)
}
logger.foreach(_.init())
val task = Task.gatherUnordered(tasks)
val results = task.unsafePerformSync
val (ignoredErrors, errors) = results
.collect {
case (artifact, Left(err)) =>
artifact -> err
}
.partition {
case (a, err) =>
val notFound = err match {
case _: FileError.NotFound => true
case _ => false
}
a.isOptional && notFound
}
val artifactToFile = results.collect {
case (artifact: Artifact, Right(f)) =>
(artifact.url, f)
}.toMap
logger.foreach(_.stop())
if (verbosityLevel >= 2)
errPrintln(
" Ignoring error(s):\n" +
ignoredErrors
.map {
case (artifact, error) =>
s"${artifact.url}: $error"
}
.mkString("\n")
)
exitIf(errors.nonEmpty) {
s" Error:\n" +
errors
.map {
case (artifact, error) =>
s"${artifact.url}: $error"
}
.mkString("\n")
}
val depToArtifacts: Map[Dependency, Vector[Artifact]] =
getDepArtifactsForClassifier(sources, javadoc, res).groupBy(_._1).mapValues(_.map(_._2).toVector)
if (!jsonOutputFile.isEmpty) {
// TODO(wisechengyi): This is not exactly the root dependencies we are asking for on the command line, but it should be
// a strict super set.
val deps: Seq[Dependency] = Set(getDepArtifactsForClassifier(sources, javadoc, res).map(_._1): _*).toSeq
// A map from requested org:name:version to reconciled org:name:version
val conflictResolutionForRoots: Map[String, String] = allDependencies.map({ dep =>
val reconciledVersion: String = res.reconciledVersions
.getOrElse(dep.module, dep.version)
if (reconciledVersion != dep.version) {
Option((s"${dep.module}:${dep.version}", s"${dep.module}:$reconciledVersion"))
}
else {
Option.empty
}
}).filter(_.isDefined).map(_.get).toMap
val artifacts: Seq[(Dependency, Artifact)] = res.dependencyArtifacts
val jsonReq = JsonPrintRequirement(artifactToFile, depToArtifacts)
val roots = deps.toVector.map(JsonElem(_, artifacts, Option(jsonReq), res, printExclusions = verbosityLevel >= 1, excluded = false, colors = false, overrideClassifiers = overrideClassifiers(sources, javadoc)))
val jsonStr = JsonReport(
roots,
conflictResolutionForRoots,
overrideClassifiers(sources, javadoc)
)(
_.children,
_.reconciledVersionStr,
_.requestedVersionStr,
_.downloadedFile
)
val pw = new PrintWriter(new File(jsonOutputFile))
pw.write(jsonStr)
pw.close()
}
artifactToFile
}
def fetch(
sources: Boolean,
javadoc: Boolean,
artifactTypes: Set[String],
subset: Set[Dependency] = null
): Seq[File] = {
fetchMap(sources,javadoc,artifactTypes,subset).values.toSeq
}
def contextLoader = Thread.currentThread().getContextClassLoader
def baseLoader = {
@tailrec
def rootLoader(cl: ClassLoader): ClassLoader =
Option(cl.getParent) match {
case Some(par) => rootLoader(par)
case None => cl
}
rootLoader(ClassLoader.getSystemClassLoader)
}
lazy val (parentLoader, filteredFiles) = {
// FIXME That shouldn't be hard-coded this way...
// This whole class ought to be rewritten more cleanly.
val artifactTypes = Set("jar", "bundle")
val files0 = fetch(
sources = false,
javadoc = false,
artifactTypes = artifactTypes
)
if (isolated.isolated.isEmpty)
(baseLoader, files0)
else {
val isolatedDeps = isolated.isolatedDeps(common.scalaVersion)
val (isolatedLoader, filteredFiles0) = isolated.targets.foldLeft((baseLoader, files0)) {
case ((parent, files0), target) =>
// FIXME These were already fetched above
val isolatedFiles = fetch(
sources = false,
javadoc = false,
artifactTypes = artifactTypes,
subset = isolatedDeps.getOrElse(target, Seq.empty).toSet
)
if (common.verbosityLevel >= 2) {
Console.err.println(s"Isolated loader files:")
for (f <- isolatedFiles.map(_.toString).sorted)
Console.err.println(s" $f")
}
val isolatedLoader = new IsolatedClassLoader(
isolatedFiles.map(_.toURI.toURL).toArray,
parent,
Array(target)
)
val filteredFiles0 = files0.filterNot(isolatedFiles.toSet)
(isolatedLoader, filteredFiles0)
}
if (common.verbosityLevel >= 2) {
Console.err.println(s"Remaining files:")
for (f <- filteredFiles0.map(_.toString).sorted)
Console.err.println(s" $f")
}
(isolatedLoader, filteredFiles0)
}
}
lazy val loader = new URLClassLoader(
(filteredFiles ++ extraJars).map(_.toURI.toURL).toArray,
parentLoader
)
lazy val retainedMainClass = {
val mainClasses = Helper.mainClasses(loader)
if (common.verbosityLevel >= 2) {
Console.err.println("Found main classes:")
for (((vendor, title), mainClass) <- mainClasses)
Console.err.println(s" $mainClass (vendor: $vendor, title: $title)")
Console.err.println("")
}
val mainClass =
if (mainClasses.size == 1) {
val (_, mainClass) = mainClasses.head
mainClass
} else {
// TODO Move main class detection code to the coursier-extra module to come, add non regression tests for it
// In particular, check the main class for scalafmt, scalafix, ammonite, ...
// Trying to get the main class of the first artifact
val mainClassOpt = for {
dep: Dependency <- transitiveDeps.headOption
module = dep.module
mainClass <- mainClasses.collectFirst {
case ((org, name), mainClass)
if org == module.organization && (
module.name == name ||
module.name.startsWith(name + "_") // Ignore cross version suffix
) =>
mainClass
}
} yield mainClass
def sameOrgOnlyMainClassOpt = for {
dep: Dependency <- transitiveDeps.headOption
module = dep.module
orgMainClasses = mainClasses.collect {
case ((org, name), mainClass)
if org == module.organization =>
mainClass
}.toSet
if orgMainClasses.size == 1
} yield orgMainClasses.head
mainClassOpt.orElse(sameOrgOnlyMainClassOpt).getOrElse {
Helper.errPrintln(s"Cannot find default main class. Specify one with -M or --main.")
sys.exit(255)
}
}
mainClass
}
}

View File

@ -1,20 +0,0 @@
package coursier.cli
import java.net.{URL, URLClassLoader}
class IsolatedClassLoader(
urls: Array[URL],
parent: ClassLoader,
isolationTargets: Array[String]
) extends URLClassLoader(urls, parent) {
/**
* Applications wanting to access an isolated `ClassLoader` should inspect the hierarchy of
* loaders, and look into each of them for this method, by reflection. Then they should
* call it (still by reflection), and look for an agreed in advance target in it. If it is found,
* then the corresponding `ClassLoader` is the one with isolated dependencies.
*/
def getIsolationTargets: Array[String] = isolationTargets
}

View File

@ -1,74 +0,0 @@
package coursier
package cli
import java.io.File
import caseapp._
import coursier.cli.options.LaunchOptions
object Launch extends CaseApp[LaunchOptions] {
def apply(
loader: ClassLoader,
mainClass: String,
args: Seq[String],
verbosity: Int,
beforeMain: => Unit = ()
): Unit = {
val cls =
try loader.loadClass(mainClass)
catch { case e: ClassNotFoundException =>
Helper.errPrintln(s"Error: class $mainClass not found")
sys.exit(255)
}
val method =
try cls.getMethod("main", classOf[Array[String]])
catch { case e: NoSuchMethodException =>
Helper.errPrintln(s"Error: method main not found in $mainClass")
sys.exit(255)
}
method.setAccessible(true)
if (verbosity >= 2)
Helper.errPrintln(s"Launching $mainClass ${args.mkString(" ")}")
else if (verbosity == 1)
Helper.errPrintln(s"Launching")
beforeMain
Thread.currentThread().setContextClassLoader(loader)
try method.invoke(null, args.toArray)
catch {
case e: java.lang.reflect.InvocationTargetException =>
throw Option(e.getCause).getOrElse(e)
}
}
def run(options: LaunchOptions, args: RemainingArgs): Unit = {
val userArgs = args.unparsed
val helper = new Helper(
options.common,
args.remaining ++ options.isolated.rawIsolated.map { case (_, dep) => dep },
extraJars = options.extraJars.map(new File(_)),
isolated = options.isolated
)
val mainClass =
if (options.mainClass.isEmpty)
helper.retainedMainClass
else
options.mainClass
Launch(
helper.loader,
mainClass,
userArgs,
options.common.verbosityLevel
)
}
}

View File

@ -1,13 +0,0 @@
package coursier
package cli
import caseapp._
import coursier.cli.options.ResolveOptions
object Resolve extends CaseApp[ResolveOptions] {
def run(options: ResolveOptions, args: RemainingArgs): Unit = {
new Helper(options.common, args.all, printResultStdout = true)
}
}

View File

@ -1,6 +0,0 @@
package coursier.cli
final class SoftExcludeParsingException(
private val message: String = "",
private val cause: Throwable = None.orNull
) extends Exception(message, cause)

View File

@ -1,129 +0,0 @@
package coursier.cli
import java.io.{BufferedReader, File, InputStream, InputStreamReader, PipedInputStream, PipedOutputStream, PrintStream}
import java.nio.charset.StandardCharsets.UTF_8
import java.nio.file.Files
import scala.util.control.NonFatal
object SparkOutputHelper {
def outputInspectThread(
name: String,
from: InputStream,
to: PrintStream,
handlers: Seq[String => Unit]
) = {
val t = new Thread {
override def run() = {
val in = new BufferedReader(new InputStreamReader(from))
var line: String = null
while ({
line = in.readLine()
line != null
}) {
to.println(line)
handlers.foreach(_(line))
}
}
}
t.setName(name)
t.setDaemon(true)
t
}
def handleOutput(yarnAppFileOpt: Option[File], maxIdleTimeOpt: Option[Int]): Unit = {
var handlers = Seq.empty[String => Unit]
var threads = Seq.empty[Thread]
for (yarnAppFile <- yarnAppFileOpt) {
val Pattern = ".*Application report for ([^ ]+) .*".r
@volatile var written = false
val lock = new AnyRef
def handleMessage(s: String): Unit =
if (!written)
s match {
case Pattern(id) =>
lock.synchronized {
if (!written) {
println(s"Detected YARN app ID $id")
Option(yarnAppFile.getParentFile).foreach(_.mkdirs())
Files.write(yarnAppFile.toPath, id.getBytes(UTF_8))
written = true
}
}
case _ =>
}
val f = { line: String =>
try handleMessage(line)
catch {
case NonFatal(_) =>
}
}
handlers = handlers :+ f
}
for (maxIdleTime <- maxIdleTimeOpt if maxIdleTime > 0) {
@volatile var lastMessageTs = -1L
def updateLastMessageTs() = {
lastMessageTs = System.currentTimeMillis()
}
val checkThread = new Thread {
override def run() =
try {
while (true) {
lastMessageTs = -1L
Thread.sleep(maxIdleTime * 1000L)
if (lastMessageTs < 0) {
Console.err.println(s"No output from spark-submit for more than $maxIdleTime s, exiting")
sys.exit(1)
}
}
} catch {
case t: Throwable =>
Console.err.println(s"Caught $t in check spark-submit output thread!")
throw t
}
}
checkThread.setName("check-spark-submit-output")
checkThread.setDaemon(true)
threads = threads :+ checkThread
val f = { line: String =>
updateLastMessageTs()
}
handlers = handlers :+ f
}
def createThread(name: String, replaces: PrintStream, install: PrintStream => Unit): Thread = {
val in = new PipedInputStream
val out = new PipedOutputStream(in)
install(new PrintStream(out))
outputInspectThread(name, in, replaces, handlers)
}
if (handlers.nonEmpty) {
threads = threads ++ Seq(
createThread("inspect-out", System.out, System.setOut),
createThread("inspect-err", System.err, System.setErr)
)
threads.foreach(_.start())
}
}
}

View File

@ -1,222 +0,0 @@
package coursier.cli
import java.io.File
import java.net.URLClassLoader
import caseapp._
import coursier.Dependency
import coursier.cli.options.SparkSubmitOptions
import coursier.cli.spark.{SparkAssembly, Submit}
/**
* Submits spark applications.
*
* Can be run with no spark distributions around.
*
* @author Alexandre Archambault
* @author Han Ju
*/
object SparkSubmit extends CaseApp[SparkSubmitOptions] {
def scalaSparkVersions(dependencies: Iterable[Dependency]): Either[String, (String, String)] = {
val sparkCoreMods = dependencies.collect {
case dep if dep.module.organization == "org.apache.spark" &&
(dep.module.name == "spark-core_2.10" || dep.module.name == "spark-core_2.11") =>
(dep.module, dep.version)
}
if (sparkCoreMods.isEmpty)
Left("Cannot find spark among dependencies")
else if (sparkCoreMods.size == 1) {
val scalaVersion = sparkCoreMods.head._1.name match {
case "spark-core_2.10" => "2.10"
case "spark-core_2.11" => "2.11"
case _ => throw new Exception("Cannot happen")
}
val sparkVersion = sparkCoreMods.head._2
Right((scalaVersion, sparkVersion))
} else
Left(s"Found several spark code modules among dependencies (${sparkCoreMods.mkString(", ")})")
}
def run(options: SparkSubmitOptions, args: RemainingArgs): Unit = {
val rawExtraJars = options.extraJars.map(new File(_))
val extraDirs = rawExtraJars.filter(_.isDirectory)
if (extraDirs.nonEmpty) {
Console.err.println(s"Error: directories not allowed in extra job JARs.")
Console.err.println(extraDirs.map(" " + _).mkString("\n"))
sys.exit(1)
}
val helper: Helper = new Helper(
options.common,
args.remaining,
extraJars = rawExtraJars
)
val jars =
helper.fetch(
sources = false,
javadoc = false,
artifactTypes = options.artifactOptions.artifactTypes(sources = false, javadoc = false)
) ++ options.extraJars.map(new File(_))
val (scalaVersion, sparkVersion) =
if (options.sparkVersion.isEmpty)
SparkSubmit.scalaSparkVersions(helper.res.dependencies) match {
case Left(err) =>
Console.err.println(
s"Cannot get spark / scala versions from dependencies: $err\n" +
"Set them via --scala-version or --spark-version"
)
sys.exit(1)
case Right(versions) => versions
}
else
(options.common.scalaVersion, options.sparkVersion)
val (sparkYarnExtraConf, sparkBaseJars) =
if (!options.autoAssembly || sparkVersion.startsWith("2.")) {
val assemblyJars = SparkAssembly.sparkJars(
scalaVersion,
sparkVersion,
options.yarnVersion,
options.defaultAssemblyDependencies.getOrElse(options.autoAssembly),
options.assemblyDependencies.flatMap(_.split(",")).filter(_.nonEmpty) ++
options.sparkAssemblyDependencies.flatMap(_.split(",")).filter(_.nonEmpty).map(_ + s":$sparkVersion"),
options.common,
options.artifactOptions.artifactTypes(sources = false, javadoc = false)
)
val extraConf =
if (options.autoAssembly && sparkVersion.startsWith("2."))
Seq(
"spark.yarn.jars" -> assemblyJars.map(_.getAbsolutePath).mkString(",")
)
else
Nil
(extraConf, assemblyJars)
} else {
val assemblyAndJarsOrError = SparkAssembly.spark(
scalaVersion,
sparkVersion,
options.yarnVersion,
options.defaultAssemblyDependencies.getOrElse(true),
options.assemblyDependencies.flatMap(_.split(",")).filter(_.nonEmpty) ++
options.sparkAssemblyDependencies.flatMap(_.split(",")).filter(_.nonEmpty).map(_ + s":$sparkVersion"),
options.common,
options.artifactOptions.artifactTypes(sources = false, javadoc = false)
)
val (assembly, assemblyJars) = assemblyAndJarsOrError match {
case Left(err) =>
Console.err.println(s"Cannot get spark assembly: $err")
sys.exit(1)
case Right(res) => res
}
val extraConf = Seq(
"spark.yarn.jar" -> assembly.getAbsolutePath
)
(extraConf, assemblyJars)
}
val idx = {
val idx0 = args.unparsed.indexOf("--")
if (idx0 < 0)
args.unparsed.length
else
idx0
}
assert(idx >= 0)
val sparkOpts = args.unparsed.take(idx)
val jobArgs = args.unparsed.drop(idx + 1)
val mainClass =
if (options.mainClass.isEmpty)
helper.retainedMainClass
else
options.mainClass
val mainJar = helper
.loader
.loadClass(mainClass) // FIXME Check for errors, provide a nicer error message in that case
.getProtectionDomain
.getCodeSource
.getLocation
.getPath // TODO Safety check: protocol must be file
val (check, extraJars0) = jars.partition(_.getAbsolutePath == mainJar)
val extraJars = extraJars0.filterNot(sparkBaseJars.toSet)
if (check.isEmpty)
Console.err.println(
s"Warning: cannot find back $mainJar among the dependencies JARs (likely a coursier bug)"
)
val extraSparkOpts = sparkYarnExtraConf.flatMap {
case (k, v) => Seq(
"--conf", s"$k=$v"
)
}
val extraJarsOptions =
if (extraJars.isEmpty)
Nil
else
Seq("--jars", extraJars.mkString(","))
val mainClassOptions = Seq("--class", mainClass)
val sparkSubmitOptions = sparkOpts ++ extraSparkOpts ++ extraJarsOptions ++ mainClassOptions ++
Seq(mainJar) ++ jobArgs
val submitCp = Submit.cp(
scalaVersion,
sparkVersion,
options.noDefaultSubmitDependencies,
options.submitDependencies.flatMap(_.split(",")).filter(_.nonEmpty),
options.artifactOptions.artifactTypes(sources = false, javadoc = false),
options.common
)
val submitLoader = new URLClassLoader(
submitCp.map(_.toURI.toURL).toArray,
helper.baseLoader
)
Launch(
submitLoader,
Submit.mainClassName,
sparkSubmitOptions,
options.common.verbosityLevel,
{
if (options.common.verbosityLevel >= 1)
Console.err.println(
s"Launching spark-submit with arguments:\n" +
sparkSubmitOptions.map(" " + _).mkString("\n")
)
SparkOutputHelper.handleOutput(
Some(options.yarnIdFile).filter(_.nonEmpty).map(new File(_)),
Some(options.maxIdleTime).filter(_ > 0)
)
}
)
}
}

View File

@ -1,23 +0,0 @@
package coursier.cli
object Util {
def prematureExit(msg: String): Nothing = {
Console.err.println(msg)
sys.exit(255)
}
def prematureExitIf(cond: Boolean)(msg: => String): Unit =
if (cond)
prematureExit(msg)
def exit(msg: String): Nothing = {
Console.err.println(msg)
sys.exit(1)
}
def exitIf(cond: Boolean)(msg: => String): Unit =
if (cond)
exit(msg)
}

View File

@ -1,36 +0,0 @@
package coursier.cli.options
import caseapp.{ HelpMessage => Help, ValueDescription => Value, ExtraName => Short, _ }
object ArtifactOptions {
def defaultArtifactTypes = Set("jar", "bundle", "test-jar")
implicit val parser = Parser[ArtifactOptions]
implicit val help = caseapp.core.help.Help[ArtifactOptions]
}
final case class ArtifactOptions(
@Help("Artifact types that should be retained (e.g. jar, src, doc, etc.) - defaults to jar,bundle")
@Value("type1,type2,...")
@Short("A")
artifactType: List[String] = Nil,
@Help("Fetch artifacts even if the resolution is errored")
force: Boolean = false
) {
def artifactTypes(sources: Boolean, javadoc: Boolean) = {
val types0 = artifactType
.flatMap(_.split(','))
.filter(_.nonEmpty)
.toSet
if (types0.isEmpty) {
if (sources || javadoc)
Some("src").filter(_ => sources).toSet ++ Some("doc").filter(_ => javadoc)
else
ArtifactOptions.defaultArtifactTypes
} else if (types0("*"))
Set("*")
else
types0
}
}

View File

@ -1,15 +0,0 @@
package coursier.cli.options
import caseapp.{Parser, Recurse}
final case class BootstrapOptions(
@Recurse
artifactOptions: ArtifactOptions,
@Recurse
options: BootstrapSpecificOptions
)
object BootstrapOptions {
implicit val parser = Parser[BootstrapOptions]
implicit val help = caseapp.core.help.Help[BootstrapOptions]
}

View File

@ -1,55 +0,0 @@
package coursier.cli.options
import caseapp.{ HelpMessage => Help, ValueDescription => Value, ExtraName => Short, _ }
final case class BootstrapSpecificOptions(
@Short("M")
@Short("main")
mainClass: String = "",
@Short("o")
output: String = "bootstrap",
@Short("f")
force: Boolean = false,
@Help("Generate a standalone launcher, with all JARs included, instead of one downloading its dependencies on startup.")
@Short("s")
standalone: Boolean = false,
@Help("Include files in generated launcher even in non-standalone mode.")
@Short("s")
embedFiles: Boolean = true,
@Help("Set Java properties in the generated launcher.")
@Value("key=value")
@Short("D")
property: List[String] = Nil,
@Help("Set Java command-line options in the generated launcher.")
@Value("option")
@Short("J")
javaOpt: List[String] = Nil,
@Help("Generate native launcher")
@Short("S")
native: Boolean = false,
@Help("Native compilation target directory")
@Short("d")
target: String = "native-target",
@Help("Don't wipe native compilation target directory (for debug purposes)")
keepTarget: Boolean = false,
@Help("Generate an assembly rather than a bootstrap jar")
@Short("a")
assembly: Boolean = false,
@Help("Add assembly rule")
@Value("append:$path|append-pattern:$pattern|exclude:$path|exclude-pattern:$pattern")
@Short("R")
rule: List[String] = Nil,
@Help("Add default rules to assembly rule list")
defaultRules: Boolean = true,
@Help("Add preamble")
preamble: Boolean = true,
@Recurse
isolated: IsolatedLoaderOptions = IsolatedLoaderOptions(),
@Recurse
common: CommonOptions = CommonOptions()
)
object BootstrapSpecificOptions {
implicit val parser = Parser[BootstrapSpecificOptions]
implicit val help = caseapp.core.help.Help[BootstrapSpecificOptions]
}

View File

@ -1,14 +0,0 @@
package coursier.cli.options
import caseapp.{ExtraName => Short, HelpMessage => Help, _}
import coursier.Cache
final case class CacheOptions(
@Help("Cache directory (defaults to environment variable COURSIER_CACHE, or ~/.cache/coursier/v1 on Linux and ~/Library/Caches/Coursier/v1 on Mac)")
cache: String = Cache.default.toString
)
object CacheOptions {
implicit val parser = Parser[CacheOptions]
implicit val help = caseapp.core.help.Help[CacheOptions]
}

View File

@ -1,118 +0,0 @@
package coursier.cli.options
import caseapp.{ HelpMessage => Help, ValueDescription => Value, ExtraName => Short, _ }
import coursier.core.ResolutionProcess
final case class CommonOptions(
@Help("Keep optional dependencies (Maven)")
keepOptional: Boolean = false,
@Help("Download mode (default: missing, that is fetch things missing from cache)")
@Value("offline|update-changing|update|missing|force")
@Short("m")
mode: String = "",
@Help("TTL duration (e.g. \"24 hours\")")
@Value("duration")
@Short("l")
ttl: String = "",
@Help("Quiet output")
@Short("q")
quiet: Boolean = false,
@Help("Increase verbosity (specify several times to increase more)")
@Short("v")
verbose: Int @@ Counter = Tag.of(0),
@Help("Force display of progress bars")
@Short("P")
progress: Boolean = false,
@Help("Maximum number of resolution iterations (specify a negative value for unlimited, default: 100)")
@Short("N")
maxIterations: Int = ResolutionProcess.defaultMaxIterations,
@Help("Repository - for multiple repositories, separate with comma and/or add this option multiple times (e.g. -r central,ivy2local -r sonatype-snapshots, or equivalently -r central,ivy2local,sonatype-snapshots)")
@Value("maven|sonatype:$repo|ivy2local|bintray:$org/$repo|bintray-ivy:$org/$repo|typesafe:ivy-$repo|typesafe:$repo|sbt-plugin:$repo|ivy:$pattern")
@Short("r")
repository: List[String] = Nil,
@Help("Do not add default repositories (~/.ivy2/local, and Central)")
noDefault: Boolean = false,
@Help("Modify names in Maven repository paths for SBT plugins")
sbtPluginHack: Boolean = true,
@Help("Drop module attributes starting with 'info.' - these are sometimes used by projects built with SBT")
dropInfoAttr: Boolean = false,
@Help("Force module version")
@Value("organization:name:forcedVersion")
@Short("V")
forceVersion: List[String] = Nil,
@Help("Force property in POM files")
@Value("name=value")
forceProperty: List[String] = Nil,
@Help("Exclude module")
@Value("organization:name")
@Short("E")
@Help("Global level exclude")
exclude: List[String] = Nil,
@Short("x")
@Help("Path to the local exclusion file. " +
"Syntax: <org:name>--<org:name>. `--` means minus. Example file content:\n\t" +
"\tcom.twitter.penguin:korean-text--com.twitter:util-tunable-internal_2.11\n\t" +
"\torg.apache.commons:commons-math--com.twitter.search:core-query-nodes\n\t" +
"Behavior: If root module A excludes module X, but root module B requires X, module X will still be fetched.")
localExcludeFile: String = "",
@Help("Default scala version")
@Short("e")
scalaVersion: String = scala.util.Properties.versionNumberString,
@Help("Add intransitive dependencies")
intransitive: List[String] = Nil,
@Help("Classifiers that should be fetched")
@Value("classifier1,classifier2,...")
@Short("C")
classifier: List[String] = Nil,
@Help("Default configuration (default(compile) by default)")
@Value("configuration")
@Short("c")
defaultConfiguration: String = "default(compile)",
@Help("Maximum number of parallel downloads (default: 6)")
@Short("n")
parallel: Int = 6,
@Help("Checksums")
@Value("checksum1,checksum2,... - end with none to allow for no checksum validation if none are available")
checksum: List[String] = Nil,
@Help("Print the duration of each iteration of the resolution")
@Short("B")
@Value("Number of warm-up resolutions - if negative, doesn't print per iteration benchmark (less overhead)")
benchmark: Int = 0,
@Help("Print dependencies as a tree")
@Short("t")
tree: Boolean = false,
@Help("Print dependencies as an inversed tree (dependees as children)")
@Short("T")
reverseTree: Boolean = false,
@Help("Enable profile")
@Value("profile")
@Short("F")
profile: List[String] = Nil,
@Help("Specify path for json output")
@Short("j")
jsonOutputFile: String = "",
@Help("Swap the mainline Scala JARs by Typelevel ones")
typelevel: Boolean = false,
@Recurse
cacheOptions: CacheOptions = CacheOptions(),
@Help("Retry limit for Checksum error when fetching a file")
retryCount: Int = 1,
@Help("Flag that specifies if a local artifact should be cached.")
@Short("cfa")
cacheFileArtifacts: Boolean = false
) {
val verbosityLevel = Tag.unwrap(verbose) - (if (quiet) 1 else 0)
lazy val classifier0 = classifier.flatMap(_.split(',')).filter(_.nonEmpty).toSet
}
object CommonOptions {
implicit val parser = Parser[CommonOptions]
implicit val help = caseapp.core.help.Help[CommonOptions]
}

View File

@ -1,24 +0,0 @@
package coursier.cli.options
import caseapp.{ HelpMessage => Help, ExtraName => Short, _ }
final case class FetchOptions(
@Help("Fetch source artifacts")
@Short("S")
sources: Boolean = false,
@Help("Fetch javadoc artifacts")
@Short("D")
javadoc: Boolean = false,
@Help("Print java -cp compatible output")
@Short("p")
classpath: Boolean = false,
@Recurse
artifactOptions: ArtifactOptions = ArtifactOptions(),
@Recurse
common: CommonOptions = CommonOptions()
)
object FetchOptions {
implicit val parser = Parser[FetchOptions]
implicit val help = caseapp.core.help.Help[FetchOptions]
}

View File

@ -1,81 +0,0 @@
package coursier.cli.options
import caseapp.{ExtraName => Short, HelpMessage => Help, ValueDescription => Value, _}
import coursier.{Attributes, Dependency}
import coursier.util.Parse
final case class IsolatedLoaderOptions(
@Value("target:dependency")
@Short("I")
isolated: List[String] = Nil,
@Help("Comma-separated isolation targets")
@Short("i")
isolateTarget: List[String] = Nil
) {
def anyIsolatedDep = isolateTarget.nonEmpty || isolated.nonEmpty
lazy val targets = {
val l = isolateTarget.flatMap(_.split(',')).filter(_.nonEmpty)
val (invalid, valid) = l.partition(_.contains(":"))
if (invalid.nonEmpty) {
Console.err.println(s"Invalid target IDs:")
for (t <- invalid)
Console.err.println(s" $t")
sys.exit(255)
}
if (valid.isEmpty)
Array("default")
else
valid.toArray
}
lazy val (validIsolated, unrecognizedIsolated) = isolated.partition(s => targets.exists(t => s.startsWith(t + ":")))
def check() = {
if (unrecognizedIsolated.nonEmpty) {
Console.err.println(s"Unrecognized isolation targets in:")
for (i <- unrecognizedIsolated)
Console.err.println(s" $i")
sys.exit(255)
}
}
lazy val rawIsolated = validIsolated.map { s =>
val Array(target, dep) = s.split(":", 2)
target -> dep
}
def isolatedModuleVersions(defaultScalaVersion: String) = rawIsolated.groupBy { case (t, _) => t }.map {
case (t, l) =>
val (errors, modVers) = Parse.moduleVersions(l.map { case (_, d) => d }, defaultScalaVersion)
if (errors.nonEmpty) {
errors.foreach(Console.err.println)
sys.exit(255)
}
t -> modVers
}
def isolatedDeps(defaultScalaVersion: String) =
isolatedModuleVersions(defaultScalaVersion).map {
case (t, l) =>
t -> l.map {
case (mod, ver) =>
Dependency(
mod,
ver,
configuration = "runtime",
attributes = Attributes("", "")
)
}
}
}
object IsolatedLoaderOptions {
implicit val parser = Parser[IsolatedLoaderOptions]
implicit val help = caseapp.core.help.Help[IsolatedLoaderOptions]
}

View File

@ -1,21 +0,0 @@
package coursier.cli.options
import caseapp.{ HelpMessage => Help, ExtraName => Short, _ }
final case class LaunchOptions(
@Short("M")
@Short("main")
mainClass: String = "",
@Short("J")
@Help("Extra JARs to be added to the classpath of the launched application. Directories accepted too.")
extraJars: List[String] = Nil,
@Recurse
isolated: IsolatedLoaderOptions = IsolatedLoaderOptions(),
@Recurse
common: CommonOptions = CommonOptions()
)
object LaunchOptions {
implicit val parser = Parser[LaunchOptions]
implicit val help = caseapp.core.help.Help[LaunchOptions]
}

View File

@ -1,8 +0,0 @@
package coursier.cli.options
import caseapp._
final case class ResolveOptions(
@Recurse
common: CommonOptions = CommonOptions()
)

View File

@ -1,40 +0,0 @@
package coursier.cli.options
import caseapp.{ HelpMessage => Help, ValueDescription => Value, ExtraName => Short, _ }
final case class SparkSubmitOptions(
@Short("M")
@Short("main")
@Help("Main class to be launched (optional if in manifest)")
mainClass: String = "",
@Short("J")
@Help("Extra JARs to be added in the classpath of the job")
extraJars: List[String] = Nil,
@Help("If master is yarn-cluster, write YARN app ID to a file. (The ID is deduced from the spark-submit output.)")
@Value("file")
yarnIdFile: String = "",
@Help("Generate Spark Yarn assembly (Spark 1.x) or fetch Spark Yarn jars (Spark 2.x), and supply those to Spark via conf. (Default: true)")
autoAssembly: Boolean = true,
@Help("Include default dependencies in Spark Yarn assembly or jars (see --auto-assembly). If --auto-assembly is false, the corresponding dependencies will still be shunted from the job classpath if this option is true. (Default: same as --auto-assembly)")
defaultAssemblyDependencies: Option[Boolean] = None,
assemblyDependencies: List[String] = Nil,
sparkAssemblyDependencies: List[String] = Nil,
noDefaultSubmitDependencies: Boolean = false,
submitDependencies: List[String] = Nil,
@Help("Spark version - if empty, deduced from the job classpath. (Default: empty)")
sparkVersion: String = "",
@Help("YARN version - only used with Spark 2. (Default: 2.7.3)")
yarnVersion: String = "2.7.3",
@Help("Maximum idle time of spark-submit (time with no output). Exit early if no output from spark-submit for more than this duration. Set to 0 for unlimited. (Default: 0)")
@Value("seconds")
maxIdleTime: Int = 0,
@Recurse
artifactOptions: ArtifactOptions = ArtifactOptions(),
@Recurse
common: CommonOptions = CommonOptions()
)
object SparkSubmitOptions {
implicit val parser = Parser[SparkSubmitOptions]
implicit val help = caseapp.core.help.Help[SparkSubmitOptions]
}

View File

@ -1,162 +0,0 @@
package coursier.cli.scaladex
import java.net.HttpURLConnection
import java.nio.charset.StandardCharsets
import java.util.concurrent.ExecutorService
import argonaut._, Argonaut._, ArgonautShapeless._
import coursier.core.{Artifact, Attributes}
import coursier.interop.scalaz._
import coursier.util.{EitherT, Gather}
import coursier.{Fetch, Module}
import scalaz.concurrent.Task
object Scaladex {
case class SearchResult(
/** GitHub organization */
organization: String,
/** GitHub repository */
repository: String,
/** Scaladex artifact names */
artifacts: List[String] = Nil
)
case class ArtifactInfos(
/** Dependency group ID (aka organization) */
groupId: String,
/** Dependency artifact ID (aka name or module name) */
artifactId: String,
/** Dependency version */
version: String
)
def apply(pool: ExecutorService): Scaladex[Task] =
Scaladex({ url =>
EitherT(Task[Either[String, String]]({
var conn: HttpURLConnection = null
val b = try {
conn = new java.net.URL(url).openConnection().asInstanceOf[HttpURLConnection]
coursier.internal.FileUtil.readFully(conn.getInputStream)
} finally {
if (conn != null)
coursier.Cache.closeConn(conn)
}
Right(new String(b, StandardCharsets.UTF_8))
})(pool))
}, Gather[Task])
def cached(fetch: Fetch.Content[Task]*): Scaladex[Task] =
Scaladex({
url =>
def get(fetch: Fetch.Content[Task]) =
fetch(
Artifact(url, Map(), Map(), Attributes("", ""), changing = true, None)
)
(get(fetch.head) /: fetch.tail)(_ orElse get(_))
}, Gather[Task])
}
// TODO Add F[_] type param, change `fetch` type to `String => EitherT[F, String, String]`, adjust method signatures accordingly, ...
case class Scaladex[F[_]](fetch: String => EitherT[F, String, String], G: Gather[F]) {
private implicit val G0 = G
// quick & dirty API for querying scaladex
def search(name: String, target: String, scalaVersion: String): EitherT[F, String, Seq[Scaladex.SearchResult]] = {
val s = fetch(
// FIXME Escaping
s"https://index.scala-lang.org/api/search?q=$name&target=$target&scalaVersion=$scalaVersion"
)
s.flatMap(s => EitherT.fromEither(s.decodeEither[List[Scaladex.SearchResult]]))
}
/**
*
* @param organization: GitHub organization
* @param repository: GitHub repository name
* @param artifactName: Scaladex artifact name
* @return
*/
def artifactInfos(organization: String, repository: String, artifactName: String): EitherT[F, String, Scaladex.ArtifactInfos] = {
val s = fetch(
// FIXME Escaping
s"https://index.scala-lang.org/api/project?organization=$organization&repository=$repository&artifact=$artifactName"
)
s.flatMap(s => EitherT.fromEither(s.decodeEither[Scaladex.ArtifactInfos]))
}
/**
*
* @param organization: GitHub organization
* @param repository: GitHub repository name
* @return
*/
def artifactNames(organization: String, repository: String): EitherT[F, String, Seq[String]] = {
val s = fetch(
// FIXME Escaping
s"https://index.scala-lang.org/api/project?organization=$organization&repository=$repository"
)
case class Result(artifacts: List[String])
s.flatMap(s => EitherT.fromEither(s.decodeEither[Result].map(_.artifacts)))
}
/**
* Modules / versions known to the Scaladex
*
* Latest version only.
*/
def dependencies(name: String, scalaVersion: String, logger: String => Unit): EitherT[F, String, Seq[(Module, String)]] = {
val idx = name.indexOf('/')
val orgNameOrError =
if (idx >= 0) {
val org = name.take(idx)
val repo = name.drop(idx + 1)
artifactNames(org, repo).map((org, repo, _)): EitherT[F, String, (String, String, Seq[String])]
} else
search(name, "JVM", scalaVersion) // FIXME Don't hardcode
.flatMap {
case Seq(first, _*) =>
logger(s"Using ${first.organization}/${first.repository} for $name")
EitherT.fromEither[F](Right((first.organization, first.repository, first.artifacts)): Either[String, (String, String, Seq[String])])
case Seq() =>
EitherT.fromEither[F](Left(s"No project found for $name"): Either[String, (String, String, Seq[String])])
}
orgNameOrError.flatMap {
case (ghOrg, ghRepo, artifactNames) =>
val moduleVersions = G.map(G.gather(artifactNames.map { artifactName =>
G.map(artifactInfos(ghOrg, ghRepo, artifactName).run) {
case Left(err) =>
logger(s"Cannot get infos about artifact $artifactName from $ghOrg/$ghRepo: $err, ignoring it")
Nil
case Right(infos) =>
logger(s"Found module ${infos.groupId}:${infos.artifactId}:${infos.version}")
Seq(Module(infos.groupId, infos.artifactId) -> infos.version)
}
}))(_.flatten)
EitherT(G.map(moduleVersions) { l =>
if (l.isEmpty)
Left(s"No module found for $ghOrg/$ghRepo")
else
Right(l)
})
}
}
}

View File

@ -1,172 +0,0 @@
package coursier.cli.spark
import java.io.{File, FileOutputStream}
import java.math.BigInteger
import java.nio.charset.StandardCharsets.UTF_8
import java.nio.file.{Files, StandardCopyOption}
import java.security.MessageDigest
import java.util.jar.JarFile
import coursier.Cache
import coursier.cli.Helper
import coursier.cli.options.CommonOptions
import coursier.cli.util.Assembly
object SparkAssembly {
val assemblyRules = Seq[Assembly.Rule](
Assembly.Rule.Append("META-INF/services/org.apache.hadoop.fs.FileSystem"),
Assembly.Rule.Append("reference.conf"),
Assembly.Rule.AppendPattern("META-INF/services/.*"),
Assembly.Rule.Exclude("log4j.properties"),
Assembly.Rule.Exclude(JarFile.MANIFEST_NAME),
Assembly.Rule.ExcludePattern("META-INF/.*\\.[sS][fF]"),
Assembly.Rule.ExcludePattern("META-INF/.*\\.[dD][sS][aA]"),
Assembly.Rule.ExcludePattern("META-INF/.*\\.[rR][sS][aA]")
)
def sparkBaseDependencies(
scalaVersion: String,
sparkVersion: String,
yarnVersion: String
) =
if (sparkVersion.startsWith("2."))
Seq(
s"org.apache.spark::spark-hive-thriftserver:$sparkVersion",
s"org.apache.spark::spark-repl:$sparkVersion",
s"org.apache.spark::spark-hive:$sparkVersion",
s"org.apache.spark::spark-graphx:$sparkVersion",
s"org.apache.spark::spark-mllib:$sparkVersion",
s"org.apache.spark::spark-streaming:$sparkVersion",
s"org.apache.spark::spark-yarn:$sparkVersion",
s"org.apache.spark::spark-sql:$sparkVersion",
s"org.apache.hadoop:hadoop-client:$yarnVersion",
s"org.apache.hadoop:hadoop-yarn-server-web-proxy:$yarnVersion",
s"org.apache.hadoop:hadoop-yarn-server-nodemanager:$yarnVersion"
)
else
Seq(
s"org.apache.spark:spark-core_$scalaVersion:$sparkVersion",
s"org.apache.spark:spark-bagel_$scalaVersion:$sparkVersion",
s"org.apache.spark:spark-mllib_$scalaVersion:$sparkVersion",
s"org.apache.spark:spark-streaming_$scalaVersion:$sparkVersion",
s"org.apache.spark:spark-graphx_$scalaVersion:$sparkVersion",
s"org.apache.spark:spark-sql_$scalaVersion:$sparkVersion",
s"org.apache.spark:spark-repl_$scalaVersion:$sparkVersion",
s"org.apache.spark:spark-yarn_$scalaVersion:$sparkVersion"
)
def sparkJarsHelper(
scalaVersion: String,
sparkVersion: String,
yarnVersion: String,
default: Boolean,
extraDependencies: Seq[String],
options: CommonOptions
): Helper = {
val base = if (default) sparkBaseDependencies(scalaVersion, sparkVersion, yarnVersion) else Seq()
new Helper(options, extraDependencies ++ base)
}
def sparkJars(
scalaVersion: String,
sparkVersion: String,
yarnVersion: String,
default: Boolean,
extraDependencies: Seq[String],
options: CommonOptions,
artifactTypes: Set[String]
): Seq[File] = {
val helper = sparkJarsHelper(scalaVersion, sparkVersion, yarnVersion, default, extraDependencies, options)
helper.fetch(sources = false, javadoc = false, artifactTypes = artifactTypes)
}
def spark(
scalaVersion: String,
sparkVersion: String,
yarnVersion: String,
default: Boolean,
extraDependencies: Seq[String],
options: CommonOptions,
artifactTypes: Set[String],
checksumSeed: Array[Byte] = "v1".getBytes(UTF_8),
localArtifactsShouldBeCached: Boolean = false
): Either[String, (File, Seq[File])] = {
val helper = sparkJarsHelper(scalaVersion, sparkVersion, yarnVersion, default, extraDependencies, options)
val artifacts = helper.artifacts(sources = false, javadoc = false, artifactTypes = artifactTypes)
val jars = helper.fetch(sources = false, javadoc = false, artifactTypes = artifactTypes)
val checksums = artifacts.map { a =>
val f = a.checksumUrls.get("SHA-1") match {
case Some(url) =>
Cache.localFile(url, helper.cache, a.authentication.map(_.user), localArtifactsShouldBeCached)
case None =>
throw new Exception(s"SHA-1 file not found for ${a.url}")
}
val sumOpt = Cache.parseRawChecksum(Files.readAllBytes(f.toPath))
sumOpt match {
case Some(sum) =>
val s = sum.toString(16)
"0" * (40 - s.length) + s
case None =>
throw new Exception(s"Cannot read SHA-1 sum from $f")
}
}
val md = MessageDigest.getInstance("SHA-1")
md.update(checksumSeed)
for (c <- checksums.sorted) {
val b = c.getBytes(UTF_8)
md.update(b, 0, b.length)
}
val digest = md.digest()
val calculatedSum = new BigInteger(1, digest)
val s = calculatedSum.toString(16)
val sum = "0" * (40 - s.length) + s
val destPath = Seq(
sys.props("user.home"),
".coursier",
"spark-assemblies",
s"scala_${scalaVersion}_spark_$sparkVersion",
sum,
"spark-assembly.jar"
).mkString("/")
val dest = new File(destPath)
def success = Right((dest, jars))
if (dest.exists())
success
else
Cache.withLockFor(helper.cache, dest) {
dest.getParentFile.mkdirs()
val tmpDest = new File(dest.getParentFile, s".${dest.getName}.part")
// FIXME Acquire lock on tmpDest
var fos: FileOutputStream = null
try {
fos = new FileOutputStream(tmpDest)
Assembly.make(jars, fos, Nil, assemblyRules)
} finally {
if (fos != null)
fos.close()
}
Files.move(tmpDest.toPath, dest.toPath, StandardCopyOption.ATOMIC_MOVE)
Right((dest, jars))
}.left.map(_.describe)
}
}

View File

@ -1,57 +0,0 @@
package coursier.cli.spark
import java.io.File
import coursier.cli.Helper
import coursier.cli.options.CommonOptions
object Submit {
def cp(
scalaVersion: String,
sparkVersion: String,
noDefault: Boolean,
extraDependencies: Seq[String],
artifactTypes: Set[String],
common: CommonOptions
): Seq[File] = {
var extraCp = Seq.empty[File]
for (yarnConf <- sys.env.get("YARN_CONF_DIR") if yarnConf.nonEmpty) {
val f = new File(yarnConf)
if (!f.isDirectory) {
Console.err.println(s"Error: YARN conf path ($yarnConf) is not a directory or doesn't exist.")
sys.exit(1)
}
extraCp = extraCp :+ f
}
def defaultDependencies = Seq(
// FIXME We whould be able to pass these as (parsed) Dependency instances to Helper
s"org.apache.spark::spark-core:$sparkVersion",
s"org.apache.spark::spark-yarn:$sparkVersion"
)
val helper = new Helper(
common.copy(
intransitive = Nil,
classifier = Nil,
scalaVersion = scalaVersion
),
// FIXME We whould be able to pass these as (parsed) Dependency instances to Helper
(if (noDefault) Nil else defaultDependencies) ++ extraDependencies
)
helper.fetch(
sources = false,
javadoc = false,
artifactTypes = artifactTypes
) ++ extraCp
}
def mainClassName = "org.apache.spark.deploy.SparkSubmit"
}

View File

@ -1,122 +0,0 @@
package coursier.cli.util
import java.io.{File, FileInputStream, OutputStream}
import java.util.jar.{Attributes, JarOutputStream, Manifest}
import java.util.regex.Pattern
import java.util.zip.{ZipEntry, ZipInputStream, ZipOutputStream}
import scala.collection.mutable
object Assembly {
sealed abstract class Rule extends Product with Serializable
object Rule {
sealed abstract class PathRule extends Rule {
def path: String
}
final case class Exclude(path: String) extends PathRule
final case class ExcludePattern(path: Pattern) extends Rule
object ExcludePattern {
def apply(s: String): ExcludePattern =
ExcludePattern(Pattern.compile(s))
}
// TODO Accept a separator: Array[Byte] argument in these
// (to separate content with a line return in particular)
final case class Append(path: String) extends PathRule
final case class AppendPattern(path: Pattern) extends Rule
object AppendPattern {
def apply(s: String): AppendPattern =
AppendPattern(Pattern.compile(s))
}
}
def make(jars: Seq[File], output: OutputStream, attributes: Seq[(Attributes.Name, String)], rules: Seq[Rule]): Unit = {
val rulesMap = rules.collect { case r: Rule.PathRule => r.path -> r }.toMap
val excludePatterns = rules.collect { case Rule.ExcludePattern(p) => p }
val appendPatterns = rules.collect { case Rule.AppendPattern(p) => p }
val manifest = new Manifest
manifest.getMainAttributes.put(Attributes.Name.MANIFEST_VERSION, "1.0")
for ((k, v) <- attributes)
manifest.getMainAttributes.put(k, v)
var zos: ZipOutputStream = null
try {
zos = new JarOutputStream(output, manifest)
val concatenedEntries = new mutable.HashMap[String, ::[(ZipEntry, Array[Byte])]]
var ignore = Set.empty[String]
for (jar <- jars) {
var fis: FileInputStream = null
var zis: ZipInputStream = null
try {
fis = new FileInputStream(jar)
zis = new ZipInputStream(fis)
for ((ent, content) <- Zip.zipEntries(zis)) {
def append() =
concatenedEntries += ent.getName -> ::((ent, content), concatenedEntries.getOrElse(ent.getName, Nil))
rulesMap.get(ent.getName) match {
case Some(Rule.Exclude(_)) =>
// ignored
case Some(Rule.Append(_)) =>
append()
case None =>
if (!excludePatterns.exists(_.matcher(ent.getName).matches())) {
if (appendPatterns.exists(_.matcher(ent.getName).matches()))
append()
else if (!ignore(ent.getName)) {
ent.setCompressedSize(-1L)
zos.putNextEntry(ent)
zos.write(content)
zos.closeEntry()
ignore += ent.getName
}
}
}
}
} finally {
if (zis != null)
zis.close()
if (fis != null)
fis.close()
}
}
for ((_, entries) <- concatenedEntries) {
val (ent, _) = entries.head
ent.setCompressedSize(-1L)
if (entries.tail.nonEmpty)
ent.setSize(entries.map(_._2.length).sum)
zos.putNextEntry(ent)
// for ((_, b) <- entries.reverse)
// zos.write(b)
zos.write(entries.reverse.toArray.flatMap(_._2))
zos.closeEntry()
}
} finally {
if (zos != null)
zos.close()
}
}
}

View File

@ -1,206 +0,0 @@
package coursier.cli.util
import java.io.File
import java.util.Objects
import coursier.Artifact
import coursier.core.{Attributes, Dependency, Resolution}
import coursier.util.Print
import scala.collection.mutable
import scala.collection.parallel.ParSeq
import argonaut._
import Argonaut._
/**
* Lookup table for files and artifacts to print in the JsonReport.
*/
final case class JsonPrintRequirement(fileByArtifact: Map[String, File], depToArtifacts: Map[Dependency, Vector[Artifact]])
/**
* Represents a resolved dependency's artifact in the JsonReport.
* @param coord String representation of the artifact's maven coordinate.
* @param file The path to the file for the artifact.
* @param dependencies The dependencies of the artifact.
*/
final case class DepNode(coord: String, file: Option[String], dependencies: Set[String])
final case class ReportNode(conflict_resolution: Map[String, String], dependencies: Vector[DepNode], version: String)
/**
* FORMAT_VERSION_NUMBER: Version number for identifying the export file format output. This
* version number should change when there is a change to the output format.
*
* Major Version 1.x.x : Increment this field when there is a major format change
* Minor Version x.1.x : Increment this field when there is a minor change that breaks backward
* compatibility for an existing field or a field is removed.
* Patch version x.x.1 : Increment this field when a minor format change that just adds information
* that an application can safely ignore.
*
* Note format changes in cli/README.md and update the Changelog section.
*/
object ReportNode {
import argonaut.ArgonautShapeless._
implicit val encodeJson = EncodeJson.of[ReportNode]
implicit val decodeJson = DecodeJson.of[ReportNode]
val version = "0.1.0"
}
object JsonReport {
private val printer = PrettyParams.nospace.copy(preserveOrder = true)
def apply[T](roots: IndexedSeq[T], conflictResolutionForRoots: Map[String, String], overrideClassifiers: Set[String])
(children: T => Seq[T], reconciledVersionStr: T => String, requestedVersionStr: T => String, getFile: T => Option[String]): String = {
val rootDeps: ParSeq[DepNode] = roots.par.map(r => {
/**
* Same printing mechanism as [[coursier.util.Tree#recursivePrint]]
*/
def flattenDeps(elems: Seq[T], ancestors: Set[T], acc: mutable.Set[String]): Unit = {
val unseenElems: Seq[T] = elems.filterNot(ancestors.contains)
for (elem <- unseenElems) {
val depElems = children(elem)
acc ++= depElems.map(reconciledVersionStr(_))
if (depElems.nonEmpty) {
flattenDeps(children(elem), ancestors + elem, acc)
}
}
}
val acc = scala.collection.mutable.Set[String]()
flattenDeps(Seq(r), Set(), acc)
DepNode(reconciledVersionStr(r), getFile(r), acc.toSet)
})
val report = ReportNode(conflictResolutionForRoots, rootDeps.toVector.sortBy(_.coord), ReportNode.version)
printer.pretty(report.asJson)
}
}
final case class JsonElem(dep: Dependency,
artifacts: Seq[(Dependency, Artifact)] = Seq(),
jsonPrintRequirement: Option[JsonPrintRequirement],
resolution: Resolution,
colors: Boolean,
printExclusions: Boolean,
excluded: Boolean,
overrideClassifiers: Set[String]
) {
val (red, yellow, reset) =
if (colors)
(Console.RED, Console.YELLOW, Console.RESET)
else
("", "", "")
// This is used to printing json output
// Option of the file path
lazy val downloadedFile: Option[String] = {
jsonPrintRequirement.flatMap(req =>
req.depToArtifacts.getOrElse(dep, Seq())
.filter(_.classifier == dep.attributes.classifier)
.map(x => req.fileByArtifact.get(x.url))
.filter(_.isDefined)
.filter(_.nonEmpty)
.map(_.get.getPath)
.headOption
)
}
lazy val reconciledVersion: String = resolution.reconciledVersions
.getOrElse(dep.module, dep.version)
// These are used to printing json output
val reconciledVersionStr = s"${dep.mavenPrefix}:$reconciledVersion"
val requestedVersionStr = s"${dep.module}:${dep.version}"
lazy val repr =
if (excluded)
resolution.reconciledVersions.get(dep.module) match {
case None =>
s"$yellow(excluded)$reset ${dep.module}:${dep.version}"
case Some(version) =>
val versionMsg =
if (version == dep.version)
"this version"
else
s"version $version"
s"${dep.module}:${dep.version} " +
s"$red(excluded, $versionMsg present anyway)$reset"
}
else {
val versionStr =
if (reconciledVersion == dep.version)
dep.version
else {
val assumeCompatibleVersions = Print.compatibleVersions(dep.version, reconciledVersion)
(if (assumeCompatibleVersions) yellow else red) +
s"${dep.version} -> $reconciledVersion" +
(if (assumeCompatibleVersions || colors) "" else " (possible incompatibility)") +
reset
}
s"${dep.module}:$versionStr"
}
lazy val children: Seq[JsonElem] =
if (excluded)
Nil
else {
val dep0 = dep.copy(version = reconciledVersion)
val dependencies = resolution.dependenciesOf(
dep0,
withReconciledVersions = false
).sortBy { trDep =>
(trDep.module.organization, trDep.module.name, trDep.version)
}.map { d =>
if (overrideClassifiers.contains(dep0.attributes.classifier)) {
d.copy(attributes = d.attributes.copy(classifier = dep0.attributes.classifier))
} else {
d
}
}
def excluded = resolution
.dependenciesOf(
dep0.copy(exclusions = Set.empty),
withReconciledVersions = false
)
.sortBy { trDep =>
(trDep.module.organization, trDep.module.name, trDep.version)
}
.map(_.moduleVersion)
.filterNot(dependencies.map(_.moduleVersion).toSet).map {
case (mod, ver) =>
JsonElem(
Dependency(mod, ver, "", Set.empty, Attributes("", ""), optional = false, transitive = false),
artifacts,
jsonPrintRequirement,
resolution,
colors,
printExclusions,
excluded = true,
overrideClassifiers = overrideClassifiers
)
}
dependencies.map(JsonElem(_, artifacts, jsonPrintRequirement, resolution, colors, printExclusions, excluded = false, overrideClassifiers = overrideClassifiers)) ++
(if (printExclusions) excluded else Nil)
}
/**
* Override the hashcode to explicitly exclude `children`, because children will result in recursive hash on
* children's children, causing performance issue. Hash collision should be rare, but when that happens, the
* default equality check should take of the recursive aspect of `children`.
*/
override def hashCode(): Int = Objects.hash(dep, requestedVersionStr, reconciledVersion, downloadedFile)
}

View File

@ -1,26 +0,0 @@
package coursier.cli.util
import java.util.zip.{ZipEntry, ZipInputStream}
object Zip {
def zipEntries(zipStream: ZipInputStream): Iterator[(ZipEntry, Array[Byte])] =
new Iterator[(ZipEntry, Array[Byte])] {
var nextEntry = Option.empty[ZipEntry]
def update() =
nextEntry = Option(zipStream.getNextEntry)
update()
def hasNext = nextEntry.nonEmpty
def next() = {
val ent = nextEntry.get
val data = coursier.internal.FileUtil.readFully(zipStream)
update()
(ent, data)
}
}
}

View File

@ -1,17 +0,0 @@
junit_tests(
name = "test",
dependencies = [
":lib",
"3rdparty/jvm:scalatest",
"cli/src/main/scala-2.12:cli",
],
sources = rglobs("*.scala", exclude = ["CliTestLib.scala", "CliBootstrapIntegrationTest.scala"]),
)
scala_library(
name='lib',
dependencies = [
"cache:cache"
],
sources = ["CliTestLib.scala"],
)

View File

@ -1,87 +0,0 @@
package coursier.cli
import java.io._
import java.nio.charset.StandardCharsets.UTF_8
import java.util.zip.ZipInputStream
import caseapp.core.RemainingArgs
import coursier.cli.options._
import org.junit.runner.RunWith
import org.scalatest.FlatSpec
import org.scalatest.junit.JUnitRunner
/**
* Bootstrap test is not covered by Pants because it does not prebuild a bootstrap.jar
*/
@RunWith(classOf[JUnitRunner])
class CliBootstrapIntegrationTest extends FlatSpec with CliTestLib {
"bootstrap" should "not add POMs to the classpath" in withFile() {
def zipEntryContent(zis: ZipInputStream, path: String): Array[Byte] = {
val e = zis.getNextEntry
if (e == null)
throw new NoSuchElementException(s"Entry $path in zip file")
else if (e.getName == path)
coursier.internal.FileUtil.readFully(zis)
else
zipEntryContent(zis, path)
}
(bootstrapFile, _) =>
val artifactOptions = ArtifactOptions()
val common = CommonOptions(
repository = List("bintray:scalameta/maven")
)
val isolatedLoaderOptions = IsolatedLoaderOptions(
isolateTarget = List("foo"),
isolated = List("foo:org.scalameta:trees_2.12:1.7.0")
)
val bootstrapSpecificOptions = BootstrapSpecificOptions(
output = bootstrapFile.getPath,
isolated = isolatedLoaderOptions,
force = true,
common = common
)
val bootstrapOptions = BootstrapOptions(artifactOptions, bootstrapSpecificOptions)
Bootstrap.run(
bootstrapOptions,
RemainingArgs(Seq("com.geirsson:scalafmt-cli_2.12:1.4.0"), Seq())
)
var fis: InputStream = null
val content = try {
fis = new FileInputStream(bootstrapFile)
coursier.internal.FileUtil.readFully(fis)
} finally {
if (fis != null) fis.close()
}
val actualContent = {
val header = Seq[Byte](0x50, 0x4b, 0x03, 0x04)
val idx = content.indexOfSlice(header)
if (idx < 0)
throw new Exception(s"ZIP header not found in ${bootstrapFile.getPath}")
else
content.drop(idx)
}
val zis = new ZipInputStream(new ByteArrayInputStream(actualContent))
val lines = new String(zipEntryContent(zis, "bootstrap-isolation-foo-jar-urls"), UTF_8).lines.toVector
val extensions = lines
.map { l =>
val idx = l.lastIndexOf('.')
if (idx < 0)
l
else
l.drop(idx + 1)
}
.toSet
assert(extensions == Set("jar"))
}
}

View File

@ -1,957 +0,0 @@
package coursier.cli
import java.io._
import java.net.URLEncoder.encode
import argonaut.Argonaut._
import caseapp.core.RemainingArgs
import coursier.cli.options._
import coursier.cli.util.{DepNode, ReportNode}
import java.io._
import java.net.URLEncoder.encode
import java.nio.charset.StandardCharsets.UTF_8
import java.nio.file.{Files, Paths}
import org.junit.runner.RunWith
import org.scalatest.{FlatSpec, Matchers}
import org.scalatest.junit.JUnitRunner
import scala.io.Source
@RunWith(classOf[JUnitRunner])
class CliFetchIntegrationTest extends FlatSpec with CliTestLib with Matchers {
def getReportFromJson(f: File): ReportNode = {
// Parse back the output json file
val source = scala.io.Source.fromFile(f)
val str = try source.mkString finally source.close()
str.decodeEither[ReportNode] match {
case Left(error) =>
throw new Exception(s"Error while decoding report: $error")
case Right(report) => report
}
}
private val fileNameLength: DepNode => Int = _.file.getOrElse("").length
"Normal fetch" should "get all files" in {
val fetchOpt = FetchOptions(common = CommonOptions())
val fetch = Fetch(fetchOpt, RemainingArgs(Seq("junit:junit:4.12"), Seq()))
assert(fetch.files0.map(_.getName).toSet.equals(Set("junit-4.12.jar", "hamcrest-core-1.3.jar")))
}
"scalafmt-cli fetch" should "discover all main classes" in {
val fetchOpt = FetchOptions(common = CommonOptions())
val fetch = Fetch(fetchOpt, RemainingArgs(Seq("com.geirsson:scalafmt-cli_2.12:1.4.0"), Seq()))
Helper.mainClasses(fetch.helper.loader) should contain theSameElementsAs Map (
("", "") -> "com.martiansoftware.nailgun.NGServer",
("com.geirsson", "cli") -> "org.scalafmt.cli.Cli"
)
}
"scalafix-cli fetch" should "discover all main classes" in {
val fetchOpt = FetchOptions(common = CommonOptions())
val fetch = Fetch(fetchOpt, RemainingArgs(Seq("ch.epfl.scala:scalafix-cli_2.12.4:0.5.10"), Seq()))
Helper.mainClasses(fetch.helper.loader) should contain theSameElementsAs Map(
("", "") -> "com.martiansoftware.nailgun.NGServer",
("ch.epfl.scala", "cli") -> "scalafix.cli.Cli"
)
}
"ammonite fetch" should "discover all main classes" in {
val fetchOpt = FetchOptions(common = CommonOptions())
val fetch = Fetch(fetchOpt, RemainingArgs(Seq("com.lihaoyi:ammonite_2.12.4:1.1.0"), Seq()))
Helper.mainClasses(fetch.helper.loader) should contain theSameElementsAs Map(
("", "Javassist") -> "javassist.CtClass",
("" ,"Java Native Access (JNA)") -> "com.sun.jna.Native",
("com.lihaoyi", "ammonite") -> "ammonite.Main"
)
}
"sssio fetch" should "discover all main classes" in {
val fetchOpt = FetchOptions(common = CommonOptions())
val fetch = Fetch(fetchOpt, RemainingArgs(Seq("lt.dvim.sssio:sssio_2.12:0.0.1"), Seq()))
Helper.mainClasses(fetch.helper.loader) should contain theSameElementsAs Map(
("", "") -> "com.kenai.jffi.Main",
("lt.dvim.sssio", "sssio") -> "lt.dvim.sssio.Sssio"
)
}
"Module level" should "exclude correctly" in withFile(
"junit:junit--org.hamcrest:hamcrest-core") { (file, _) =>
withFile() { (jsonFile, _) =>
val commonOpt = CommonOptions(localExcludeFile = file.getAbsolutePath, jsonOutputFile = jsonFile.getPath)
val fetchOpt = FetchOptions(common = commonOpt)
val fetch = Fetch(fetchOpt, RemainingArgs(Seq("junit:junit:4.12"), Seq()))
val filesFetched = fetch.files0.map(_.getName).toSet
val expected = Set("junit-4.12.jar")
assert(filesFetched.equals(expected), s"files fetched: $filesFetched not matching expected: $expected")
val node: ReportNode = getReportFromJson(jsonFile)
assert(node.dependencies.length == 1)
assert(node.dependencies.head.coord == "junit:junit:4.12")
}
}
/**
* Result without exclusion:
* |└─ org.apache.avro:avro:1.7.4
* |├─ com.thoughtworks.paranamer:paranamer:2.3
* |├─ org.apache.commons:commons-compress:1.4.1
* |│ └─ org.tukaani:xz:1.0 // this should be fetched
* |├─ org.codehaus.jackson:jackson-core-asl:1.8.8
* |├─ org.codehaus.jackson:jackson-mapper-asl:1.8.8
* |│ └─ org.codehaus.jackson:jackson-core-asl:1.8.8
* |├─ org.slf4j:slf4j-api:1.6.4
* |└─ org.xerial.snappy:snappy-java:1.0.4.1
*/
"avro exclude xz" should "not fetch xz" in withFile(
"org.apache.avro:avro--org.tukaani:xz") { (file, writer) =>
withFile() { (jsonFile, _) =>
val commonOpt = CommonOptions(localExcludeFile = file.getAbsolutePath, jsonOutputFile = jsonFile.getPath)
val fetchOpt = FetchOptions(common = commonOpt)
val fetch = Fetch(fetchOpt, RemainingArgs(Seq("org.apache.avro:avro:1.7.4"), Seq()))
val filesFetched = fetch.files0.map(_.getName).toSet
assert(!filesFetched.contains("xz-1.0.jar"))
val node: ReportNode = getReportFromJson(jsonFile)
// assert root level dependencies
assert(node.dependencies.map(_.coord).toSet == Set(
"org.apache.avro:avro:1.7.4",
"com.thoughtworks.paranamer:paranamer:2.3",
"org.apache.commons:commons-compress:1.4.1",
"org.codehaus.jackson:jackson-core-asl:1.8.8",
"org.codehaus.jackson:jackson-mapper-asl:1.8.8",
"org.slf4j:slf4j-api:1.6.4",
"org.xerial.snappy:snappy-java:1.0.4.1"
))
// org.apache.commons:commons-compress:1.4.1 should not contain deps underneath it.
val compressNode = node.dependencies.find(_.coord == "org.apache.commons:commons-compress:1.4.1")
assert(node.dependencies.exists(_.coord == "org.apache.commons:commons-compress:1.4.1"))
assert(compressNode.get.dependencies.isEmpty)
}
}
/**
* Result without exclusion:
* |├─ org.apache.avro:avro:1.7.4
* |│ ├─ com.thoughtworks.paranamer:paranamer:2.3
* |│ ├─ org.apache.commons:commons-compress:1.4.1
* |│ └─ org.tukaani:xz:1.0
* |│ ├─ org.codehaus.jackson:jackson-core-asl:1.8.8
* |│ ├─ org.codehaus.jackson:jackson-mapper-asl:1.8.8
* |│ └─ org.codehaus.jackson:jackson-core-asl:1.8.8
* |│ ├─ org.slf4j:slf4j-api:1.6.4
* |│ └─ org.xerial.snappy:snappy-java:1.0.4.1
* |└─ org.apache.commons:commons-compress:1.4.1
* | └─ org.tukaani:xz:1.0
*/
"avro excluding xz + commons-compress" should "still fetch xz" in withFile(
"org.apache.avro:avro--org.tukaani:xz") {
(file, writer) =>
withFile() {
(jsonFile, _) => {
val commonOpt = CommonOptions(localExcludeFile = file.getAbsolutePath, jsonOutputFile = jsonFile.getPath)
val fetchOpt = FetchOptions(common = commonOpt)
val fetch = Fetch(fetchOpt, RemainingArgs(Seq("org.apache.avro:avro:1.7.4", "org.apache.commons:commons-compress:1.4.1"), Seq()))
val filesFetched = fetch.files0.map(_.getName).toSet
assert(filesFetched.contains("xz-1.0.jar"))
val node: ReportNode = getReportFromJson(jsonFile)
// Root level org.apache.commons:commons-compress:1.4.1 should have org.tukaani:xz:1.0 underneath it.
val compressNode = node.dependencies.find(_.coord == "org.apache.commons:commons-compress:1.4.1")
assert(compressNode.isDefined)
assert(compressNode.get.dependencies.contains("org.tukaani:xz:1.0"))
val innerCompressNode = node.dependencies.find(_.coord == "org.apache.avro:avro:1.7.4")
assert(innerCompressNode.isDefined)
assert(!innerCompressNode.get.dependencies.contains("org.tukaani:xz:1.0"))
}
}
}
/**
* Result:
* |├─ org.apache.commons:commons-compress:1.4.1
* |│ └─ org.tukaani:xz:1.0 -> 1.1
* |└─ org.tukaani:xz:1.1
*/
"requested xz:1.1" should "not have conflicts" in withFile() {
(excludeFile, writer) =>
withFile() {
(jsonFile, _) => {
val commonOpt = CommonOptions(jsonOutputFile = jsonFile.getPath)
val fetchOpt = FetchOptions(common = commonOpt)
Fetch.run(fetchOpt, RemainingArgs(Seq("org.apache.commons:commons-compress:1.4.1", "org.tukaani:xz:1.1"), Seq()))
val node: ReportNode = getReportFromJson(jsonFile)
assert(node.conflict_resolution.isEmpty)
}
}
}
/**
* Result:
* |├─ org.apache.commons:commons-compress:1.5
* |│ └─ org.tukaani:xz:1.2
* |└─ org.tukaani:xz:1.1 -> 1.2
*/
"org.apache.commons:commons-compress:1.5 org.tukaani:xz:1.1" should "have conflicts" in withFile() {
(excludeFile, _) =>
withFile() {
(jsonFile, _) => {
val commonOpt = CommonOptions(jsonOutputFile = jsonFile.getPath)
val fetchOpt = FetchOptions(common = commonOpt)
Fetch.run(fetchOpt, RemainingArgs(Seq("org.apache.commons:commons-compress:1.5", "org.tukaani:xz:1.1"), Seq()))
val node: ReportNode = getReportFromJson(jsonFile)
assert(node.conflict_resolution == Map("org.tukaani:xz:1.1" -> "org.tukaani:xz:1.2"))
}
}
}
/**
* Result:
* |└─ org.apache.commons:commons-compress:1.5
* | └─ org.tukaani:xz:1.2
*/
"classifier tests" should "have tests.jar" in withFile() {
(excludeFile, _) =>
withFile() {
(jsonFile, _) => {
val commonOpt = CommonOptions(jsonOutputFile = jsonFile.getPath)
val fetchOpt = FetchOptions(common = commonOpt)
Fetch.run(
fetchOpt,
RemainingArgs(Seq("org.apache.commons:commons-compress:1.5,classifier=tests"), Seq())
)
val node: ReportNode = getReportFromJson(jsonFile)
val compressNode = node.dependencies.find(_.coord == "org.apache.commons:commons-compress:jar:tests:1.5")
assert(compressNode.isDefined)
compressNode.get.file.map(f => assert(f.contains("commons-compress-1.5-tests.jar"))).orElse(fail("Not Defined"))
assert(compressNode.get.dependencies.contains("org.tukaani:xz:1.2"))
}
}
}
/**
* Result:
* |├─ org.apache.commons:commons-compress:1.5
* |│ └─ org.tukaani:xz:1.2
* |└─ org.apache.commons:commons-compress:1.5
* | └─ org.tukaani:xz:1.2
*/
"mixed vanilla and classifier " should "have tests.jar and .jar" in withFile() {
(excludeFile, _) =>
withFile() {
(jsonFile, _) => {
val commonOpt = CommonOptions(jsonOutputFile = jsonFile.getPath)
val fetchOpt = FetchOptions(common = commonOpt)
Fetch.run(
fetchOpt,
RemainingArgs(
Seq(
"org.apache.commons:commons-compress:1.5,classifier=tests",
"org.apache.commons:commons-compress:1.5"
),
Seq()
)
)
val node: ReportNode = getReportFromJson(jsonFile)
val compressNodes: Seq[DepNode] = node.dependencies
.filter(_.coord.startsWith("org.apache.commons:commons-compress"))
.sortBy(_.coord.length) // sort by coord length
assert(compressNodes.length == 2)
assert(compressNodes.head.coord == "org.apache.commons:commons-compress:1.5")
compressNodes.head.file.map( f => assert(f.contains("commons-compress-1.5.jar"))).orElse(fail("Not Defined"))
assert(compressNodes.last.coord == "org.apache.commons:commons-compress:jar:tests:1.5")
compressNodes.last.file.map( f => assert(f.contains("commons-compress-1.5-tests.jar"))).orElse(fail("Not Defined"))
}
}
}
/**
* Result:
* |└─ org.apache.commons:commons-compress:1.5
* | └─ org.tukaani:xz:1.2 // should not be fetched
*/
"intransitive" should "only fetch a single jar" in withFile() {
(_, _) =>
withFile() {
(jsonFile, _) => {
val commonOpt = CommonOptions(jsonOutputFile = jsonFile.getPath, intransitive = List("org.apache.commons:commons-compress:1.5"))
val fetchOpt = FetchOptions(common = commonOpt)
Fetch.run(fetchOpt, RemainingArgs(Nil, Nil))
val node: ReportNode = getReportFromJson(jsonFile)
val compressNode = node.dependencies.find(_.coord == "org.apache.commons:commons-compress:1.5")
assert(compressNode.isDefined)
compressNode.get.file.map( f => assert(f.contains("commons-compress-1.5.jar"))).orElse(fail("Not Defined"))
assert(compressNode.get.dependencies.isEmpty)
}
}
}
/**
* Result:
* |└─ org.apache.commons:commons-compress:1.5
* | └─ org.tukaani:xz:1.2
*/
"intransitive classifier" should "only fetch a single tests jar" in withFile() {
(excludeFile, _) =>
withFile() {
(jsonFile, _) => {
val commonOpt = CommonOptions(jsonOutputFile = jsonFile.getPath, intransitive = List("org.apache.commons:commons-compress:1.5,classifier=tests"))
val fetchOpt = FetchOptions(common = commonOpt)
Fetch.run(fetchOpt, RemainingArgs(Seq(), Seq()))
val node: ReportNode = getReportFromJson(jsonFile)
val compressNode = node.dependencies.find(_.coord == "org.apache.commons:commons-compress:jar:tests:1.5")
assert(compressNode.isDefined)
compressNode.get.file.map( f => assert(f.contains("commons-compress-1.5-tests.jar"))).orElse(fail("Not Defined"))
assert(compressNode.get.dependencies.isEmpty)
}
}
}
/**
* Result:
* |└─ org.apache.commons:commons-compress:1.5 -> 1.4.1
* | └─ org.tukaani:xz:1.0
*/
"classifier with forced version" should "fetch tests jar" in withFile() {
(excludeFile, _) =>
withFile() {
(jsonFile, _) => {
val commonOpt = CommonOptions(jsonOutputFile = jsonFile.getPath, forceVersion = List("org.apache.commons:commons-compress:1.4.1"))
val fetchOpt = FetchOptions(common = commonOpt)
Fetch(
fetchOpt,
RemainingArgs(Seq("org.apache.commons:commons-compress:1.5,classifier=tests"), Seq())
)
val node: ReportNode = getReportFromJson(jsonFile)
assert(!node.dependencies.exists(_.coord == "org.apache.commons:commons-compress:1.5"))
val compressNode = node.dependencies.find(_.coord == "org.apache.commons:commons-compress:jar:tests:1.4.1")
assert(compressNode.isDefined)
compressNode.get.file.map( f => assert(f.contains("commons-compress-1.4.1-tests.jar"))).orElse(fail("Not Defined"))
assert(compressNode.get.dependencies.size == 1)
assert(compressNode.get.dependencies.head == "org.tukaani:xz:1.0")
}
}
}
/**
* Result:
* |└─ org.apache.commons:commons-compress:1.5 -> 1.4.1
* | └─ org.tukaani:xz:1.0 // should not be there
*/
"intransitive, classifier, forced version" should "fetch a single tests jar" in withFile() {
(excludeFile, _) =>
withFile() {
(jsonFile, _) => {
val commonOpt = CommonOptions(jsonOutputFile = jsonFile.getPath,
intransitive = List("org.apache.commons:commons-compress:1.5,classifier=tests"),
forceVersion = List("org.apache.commons:commons-compress:1.4.1"))
val fetchOpt = FetchOptions(common = commonOpt)
Fetch.run(fetchOpt, RemainingArgs(Seq(), Seq()))
val node: ReportNode = getReportFromJson(jsonFile)
assert(!node.dependencies.exists(_.coord == "org.apache.commons:commons-compress:1.5"))
val compressNode = node.dependencies.find(_.coord == "org.apache.commons:commons-compress:jar:tests:1.4.1")
assert(compressNode.isDefined)
compressNode.get.file.map( f => assert(f.contains("commons-compress-1.4.1-tests.jar"))).orElse(fail("Not Defined"))
assert(compressNode.get.dependencies.isEmpty)
}
}
}
"profiles" should "be manually (de)activated" in withFile() {
(jsonFile, _) =>
val commonOpt = CommonOptions(
jsonOutputFile = jsonFile.getPath,
profile = List("scala-2.10", "!scala-2.11")
)
val fetchOpt = FetchOptions(common = commonOpt)
Fetch(
fetchOpt,
RemainingArgs(Seq("org.apache.spark:spark-core_2.10:2.2.1"), Seq())
)
val node = getReportFromJson(jsonFile)
assert(node.dependencies.exists(_.coord.startsWith("org.scala-lang:scala-library:2.10.")))
assert(!node.dependencies.exists(_.coord.startsWith("org.scala-lang:scala-library:2.11.")))
}
"com.spotify:helios-testing:0.9.193" should "have dependencies with classifiers" in withFile() {
(excludeFile, _) =>
withFile() {
(jsonFile, _) => {
val commonOpt = CommonOptions(jsonOutputFile = jsonFile.getPath)
val fetchOpt = FetchOptions(common = commonOpt)
val heliosCoord = "com.spotify:helios-testing:0.9.193"
Fetch(
fetchOpt,
RemainingArgs(Seq(heliosCoord), Seq())
)
val node: ReportNode = getReportFromJson(jsonFile)
val testEntry: DepNode = node.dependencies.find(_.coord == heliosCoord).get
assert(
testEntry.dependencies.exists(_.startsWith("com.spotify:docker-client:jar:shaded:")))
assert(
node.dependencies.exists(_.coord.startsWith("com.spotify:docker-client:jar:shaded:")))
}
}
}
/**
* Result:
* |└─ a:b:c
*/
"local file dep url" should "have coursier-fetch-test.jar and cached for second run" in withFile() {
(jsonFile, _) => {
withFile("tada", "coursier-fetch-test", ".jar") {
(testFile, _) => {
val path = testFile.getAbsolutePath
val encodedUrl = encode("file://" + path, "UTF-8")
val commonOpt = CommonOptions(jsonOutputFile = jsonFile.getPath, cacheFileArtifacts = true)
val fetchOpt = FetchOptions(common = commonOpt)
// fetch with encoded url set to temp jar
Fetch.run(
fetchOpt,
RemainingArgs(
Seq(
"a:b:c,url=" + encodedUrl
),
Seq()
)
)
val node1: ReportNode = getReportFromJson(jsonFile)
val depNodes1: Seq[DepNode] = node1.dependencies
.filter(_.coord == "a:b:c")
.sortBy(fileNameLength)
assert(depNodes1.length == 1)
val urlInJsonFile1 = depNodes1.head.file.get
assert(urlInJsonFile1.contains(path))
// open jar and inspect contents
val fileContents1 = Source.fromFile(urlInJsonFile1).getLines.mkString
assert(fileContents1 == "tada")
testFile.delete()
Fetch.run(
fetchOpt,
RemainingArgs(
Seq(
"a:b:c,url=" + encodedUrl
),
Seq()
)
)
val node2: ReportNode = getReportFromJson(jsonFile)
val depNodes2: Seq[DepNode] = node2.dependencies
.filter(_.coord == "a:b:c")
.sortBy(fileNameLength)
assert(depNodes2.length == 1)
val urlInJsonFile2 = depNodes2.head.file.get
val inCoursierCache =
urlInJsonFile2.contains("/.coursier/") || // Former cache path
urlInJsonFile2.contains("/coursier/") || // New cache path, Linux
urlInJsonFile2.contains("/Coursier/") // New cache path, OS X
assert(inCoursierCache && urlInJsonFile2.contains(testFile.toString))
}
}
}
}
/**
* Result:
* |└─ org.apache.commons:commons-compress:1.5
*/
"external dep url" should "fetch junit-4.12.jar" in withFile() {
(jsonFile, _) => {
val commonOpt = CommonOptions(jsonOutputFile = jsonFile.getPath)
val fetchOpt = FetchOptions(common = commonOpt)
// encode path to different jar than requested
val externalUrl = encode("http://central.maven.org/maven2/junit/junit/4.12/junit-4.12.jar", "UTF-8")
// fetch with different artifact url
Fetch.run(
fetchOpt,
RemainingArgs(
Seq(
"org.apache.commons:commons-compress:1.5,url=" + externalUrl
),
Seq()
)
)
val node: ReportNode = getReportFromJson(jsonFile)
val depNodes: Seq[DepNode] = node.dependencies
.filter(_.coord == "org.apache.commons:commons-compress:1.5")
.sortBy(fileNameLength)
assert(depNodes.length == 1)
depNodes.head.file.map( f => assert(f.contains("junit/junit/4.12/junit-4.12.jar"))).orElse(fail("Not Defined"))
}
}
/**
* Result:
* |└─ h:i:j
*/
"external dep url with arbitrary coords" should "fetch junit-4.12.jar" in withFile() {
(jsonFile, _) => {
val commonOpt = CommonOptions(jsonOutputFile = jsonFile.getPath)
val fetchOpt = FetchOptions(common = commonOpt)
// encode path to different jar than requested
val externalUrl = encode("http://central.maven.org/maven2/junit/junit/4.12/junit-4.12.jar", "UTF-8")
// arbitrary coords fail to fetch because... coords need to exist in a repo somewhere to work. fix this.
Fetch.run(
fetchOpt,
RemainingArgs(
Seq(
"h:i:j,url=" + externalUrl
),
Seq()
)
)
val node: ReportNode = getReportFromJson(jsonFile)
val depNodes: Seq[DepNode] = node.dependencies
.filter(_.coord == "h:i:j")
.sortBy(fileNameLength)
assert(depNodes.length == 1)
depNodes.head.file.map( f => assert(f.contains("junit/junit/4.12/junit-4.12.jar"))).orElse(fail("Not Defined"))
}
}
/**
* Result:
* |└─ org.apache.commons:commons-compress:1.5
*/
"external dep url with classifier" should "fetch junit-4.12.jar and classifier gets thrown away" in withFile() {
(jsonFile, _) => {
val commonOpt = CommonOptions(jsonOutputFile = jsonFile.getPath)
val fetchOpt = FetchOptions(common = commonOpt)
// encode path to different jar than requested
val externalUrl = encode("http://central.maven.org/maven2/junit/junit/4.12/junit-4.12.jar", "UTF-8")
Fetch.run(
fetchOpt,
RemainingArgs(
Seq(
"org.apache.commons:commons-compress:1.5,url=" + externalUrl + ",classifier=tests"
),
Seq()
)
)
val node: ReportNode = getReportFromJson(jsonFile)
val depNodes: Seq[DepNode] = node.dependencies
.filter(_.coord.startsWith("org.apache.commons:commons-compress:"))
.sortBy(fileNameLength)
val coords: Seq[String] = node.dependencies
.map(_.coord)
.sorted
assert(depNodes.length == 1)
// classifier doesn't matter when we have a url so it is not listed
depNodes.head.file.map( f => assert(f.contains("junit/junit/4.12/junit-4.12.jar"))).orElse(fail("Not Defined"))
}
}
/**
* Result:
* |└─ org.apache.commons:commons-compress:1.5
* | └─ org.tukaani:xz:1.2
* |└─ org.tukaani:xz:1.2 // with the file from the URL
*/
"external dep url with classifier that is a transitive dep" should "fetch junit-4.12.jar and classifier gets thrown away" in withFile() {
(jsonFile, _) => {
val commonOpt = CommonOptions(jsonOutputFile = jsonFile.getPath)
val fetchOpt = FetchOptions(common = commonOpt)
// encode path to different jar than requested
val externalUrl = encode("http://central.maven.org/maven2/junit/junit/4.12/junit-4.12.jar", "UTF-8")
Fetch.run(
fetchOpt,
RemainingArgs(
Seq(
"org.apache.commons:commons-compress:1.5",
"org.tukaani:xz:1.2,classifier=tests,url="+externalUrl
),
Seq()
)
)
val node: ReportNode = getReportFromJson(jsonFile)
val depNodes: Seq[DepNode] = node.dependencies
.filter(_.coord.startsWith("org.tukaani:xz:"))
.sortBy(fileNameLength)
val coords: Seq[String] = node.dependencies.map(_.coord).sorted
assert(coords == Seq("org.apache.commons:commons-compress:1.5", "org.tukaani:xz:1.2"))
assert(depNodes.length == 1)
assert(depNodes.last.file.isDefined)
depNodes.last.file.map( f => assert(f.contains("junit/junit/4.12/junit-4.12.jar"))).orElse(fail("Not Defined"))
}
}
/**
* Result:
* |└─ org.apache.commons:commons-compress:1.5,classifier=sources
* └─ org.tukaani:xz:1.2,classifier=sources
*/
"classifier sources" should "fetch sources jar" in withFile() {
(jsonFile, _) => {
val commonOpt = CommonOptions(jsonOutputFile = jsonFile.getPath)
val fetchOpt = FetchOptions(common = commonOpt, sources=true)
// encode path to different jar than requested
Fetch.run(
fetchOpt,
RemainingArgs(
Seq(
"org.apache.commons:commons-compress:1.5,classifier=sources"
),
Seq()
)
)
val node: ReportNode = getReportFromJson(jsonFile)
val coords: Seq[String] = node.dependencies.map(_.coord).sorted
val depNodes: Seq[DepNode] = node.dependencies
.filter(_.coord.startsWith("org.apache.commons"))
.sortBy(fileNameLength)
assert(depNodes.length == 1)
assert(depNodes.head.file.isDefined)
depNodes.head.file.map(f => assert(f.contains("1.5-sources.jar"))).orElse(fail("Not Defined"))
depNodes.head.dependencies.foreach(d => {
assert(d.contains(":sources:"))
})
assert(coords == Seq(
"org.apache.commons:commons-compress:jar:sources:1.5",
"org.tukaani:xz:jar:sources:1.2")
)
}
}
/**
* Result:
* |└─ org.apache.commons:commons-compress:1.5
* |└─ org.codehaus.jackson:jackson-mapper-asl:1.8.8
* | └─ org.codehaus.jackson:jackson-core-asl:1.8.8
*/
"external dep url with another dep" should "fetch junit-4.12.jar and jars for jackson-mapper" in withFile() {
(jsonFile, _) => {
val commonOpt = CommonOptions(jsonOutputFile = jsonFile.getPath)
val fetchOpt = FetchOptions(common = commonOpt)
val externalUrl = encode("http://central.maven.org/maven2/junit/junit/4.12/junit-4.12.jar", "UTF-8")
Fetch.run(
fetchOpt,
RemainingArgs(
Seq(
"org.apache.commons:commons-compress:1.5,url=" + externalUrl,
"org.codehaus.jackson:jackson-mapper-asl:1.8.8"
),
Seq()
)
)
val node: ReportNode = getReportFromJson(jsonFile)
val depNodes: Seq[DepNode] = node.dependencies
assert(depNodes.length == 3)
val compressNodes = depNodes
.filter(_.coord == "org.apache.commons:commons-compress:1.5")
.sortBy(fileNameLength)
assert(compressNodes.length == 1)
compressNodes.head.file.map( f => assert(f.contains("junit/junit/4.12/junit-4.12.jar"))).orElse(fail("Not Defined"))
val jacksonMapperNodes = depNodes
.filter(_.coord == "org.codehaus.jackson:jackson-mapper-asl:1.8.8")
.sortBy(fileNameLength)
assert(jacksonMapperNodes.length == 1)
jacksonMapperNodes.head.file.map( f => assert(f.contains("org/codehaus/jackson/jackson-mapper-asl/1.8.8/jackson-mapper-asl-1.8.8.jar"))).orElse(fail("Not Defined"))
assert(jacksonMapperNodes.head.dependencies.size == 1)
assert(jacksonMapperNodes.head.dependencies.head == "org.codehaus.jackson:jackson-core-asl:1.8.8")
val jacksonCoreNodes = depNodes
.filter(_.coord == "org.codehaus.jackson:jackson-core-asl:1.8.8")
.sortBy(fileNameLength)
assert(jacksonCoreNodes.length == 1)
jacksonCoreNodes.head.file.map( f => assert(f.contains("org/codehaus/jackson/jackson-core-asl/1.8.8/jackson-core-asl-1.8.8.jar"))).orElse(fail("Not Defined"))
}
}
/**
* Result:
* Error
*/
"external dep url with forced version" should "throw an error" in withFile() {
(jsonFile, _) => {
val commonOpt = CommonOptions(
jsonOutputFile = jsonFile.getPath,
forceVersion = List("org.apache.commons:commons-compress:1.4.1"))
val fetchOpt = FetchOptions(common = commonOpt)
val externalUrl = encode("http://central.maven.org/maven2/junit/junit/4.12/junit-4.12.jar", "UTF-8")
assertThrows[Exception]({
Fetch.run(
fetchOpt,
RemainingArgs(
Seq(
"org.apache.commons:commons-compress:1.5,url=" + externalUrl
),
Seq()
)
)
})
}
}
/**
* Result:
* |└─ org.apache.commons:commons-compress:1.5
*/
"external dep url with the same forced version" should "fetch junit-4.12.jar" in withFile() {
(jsonFile, _) => {
val commonOpt = CommonOptions(
jsonOutputFile = jsonFile.getPath,
forceVersion = List("org.apache.commons:commons-compress:1.5"))
val fetchOpt = FetchOptions(common = commonOpt)
val externalUrl = encode("http://central.maven.org/maven2/junit/junit/4.12/junit-4.12.jar", "UTF-8")
Fetch.run(
fetchOpt,
RemainingArgs(
Seq(
"org.apache.commons:commons-compress:1.5,url=" + externalUrl
),
Seq()
)
)
val node: ReportNode = getReportFromJson(jsonFile)
val depNodes: Seq[DepNode] = node.dependencies
assert(depNodes.length == 1)
depNodes.head.file.map( f => assert(f.contains("junit/junit/4.12/junit-4.12.jar"))).orElse(fail("Not Defined"))
}
}
/**
* Result:
* |└─ org.apache.commons:commons-compress:1.4.1 -> 1.5
*/
"external dep url on higher version" should "fetch junit-4.12.jar" in withFile() {
(jsonFile, _) => {
val commonOpt = CommonOptions(jsonOutputFile = jsonFile.getPath)
val fetchOpt = FetchOptions(common = commonOpt)
// encode path to different jar than requested
val externalUrl = encode("http://central.maven.org/maven2/junit/junit/4.12/junit-4.12.jar", "UTF-8")
Fetch.run(
fetchOpt,
RemainingArgs(
Seq(
"org.apache.commons:commons-compress:1.4.1",
"org.apache.commons:commons-compress:1.5,url=" + externalUrl
),
Seq()
)
)
val node: ReportNode = getReportFromJson(jsonFile)
val depNodes: Seq[DepNode] = node.dependencies
.filter(_.coord == "org.apache.commons:commons-compress:1.5")
.sortBy(fileNameLength)
assert(depNodes.length == 1)
depNodes.head.file.map( f => assert(f.contains("junit/junit/4.12/junit-4.12.jar"))).orElse(fail("Not Defined"))
}
}
/**
* Result:
* |└─ org.apache.commons:commons-compress:1.4.1 -> 1.5
* | └─ org.tukaani:xz:1.2
*/
"external dep url on lower version" should "fetch higher version" in withFile() {
(jsonFile, _) => {
val commonOpt = CommonOptions(jsonOutputFile = jsonFile.getPath)
val fetchOpt = FetchOptions(common = commonOpt)
// encode path to different jar than requested
val externalUrl = encode("http://central.maven.org/maven2/junit/junit/4.12/junit-4.12.jar", "UTF-8")
Fetch.run(
fetchOpt,
RemainingArgs(
Seq(
"org.apache.commons:commons-compress:1.4.1,url=" + externalUrl,
"org.apache.commons:commons-compress:1.5"
),
Seq()
)
)
val node: ReportNode = getReportFromJson(jsonFile)
val depNode = node.dependencies.find(_.coord == "org.apache.commons:commons-compress:1.5")
assert(depNode.isDefined)
depNode.get.file.map( f => assert(f.contains("commons-compress-1.5.jar"))).orElse(fail("Not Defined"))
assert(depNode.get.dependencies.size == 1)
assert(depNode.get.dependencies.head.contains("org.tukaani:xz:1.2"))
}
}
"Bad pom resolve" should "succeed with retry" in withTempDir("tmp_dir") {
dir => {
def runFetchJunit() = {
val fetchOpt = FetchOptions(common = CommonOptions(cacheOptions = CacheOptions(cache = dir.getAbsolutePath)))
val fetch = Fetch(fetchOpt, RemainingArgs(Seq("junit:junit:4.12"), Seq()))
assert(fetch.files0.map(_.getName).toSet
.equals(Set("junit-4.12.jar", "hamcrest-core-1.3.jar")))
val junitJarPath = fetch.files0.map(_.getAbsolutePath()).filter(_.contains("junit-4.12.jar"))
.head
val junitPomFile = Paths.get(junitJarPath.replace(".jar", ".pom"))
val junitPomShaFile = Paths.get(junitJarPath.replace(".jar", ".pom.sha1"))
assert(Files.isRegularFile(junitPomFile))
assert(Files.isRegularFile(junitPomShaFile))
junitPomFile
}
val junitPomFile = runFetchJunit()
val originalPomContent = Files.readAllBytes(junitPomFile)
// Corrupt the pom content
Files.write(junitPomFile, "bad pom".getBytes(UTF_8))
// Run fetch again and it should pass because of retrying om the bad pom.
val pom = runFetchJunit()
assert(Files.readAllBytes(pom).sameElements(originalPomContent))
}
}
"Bad jar resolve" should "succeed with retry" in withTempDir("tmp_dir") {
dir => {
def runFetchJunit() = {
val fetchOpt = FetchOptions(common = CommonOptions(cacheOptions = CacheOptions(cache = dir.getAbsolutePath)))
val fetch = Fetch(fetchOpt, RemainingArgs(Seq("junit:junit:4.12"), Seq()))
assert(fetch.files0.map(_.getName).toSet
.equals(Set("junit-4.12.jar", "hamcrest-core-1.3.jar")))
val junitJarPath = fetch.files0.map(_.getAbsolutePath()).filter(_.contains("junit-4.12.jar"))
.head
Paths.get(junitJarPath)
}
val originalJunitJar = runFetchJunit()
val originalJunitJarContent = Files.readAllBytes(originalJunitJar)
// Corrupt the jar content
Files.write(originalJunitJar, "bad jar".getBytes(UTF_8))
// Run fetch again and it should pass because of retrying on the bad jar.
val jar = runFetchJunit()
assert(Files.readAllBytes(jar).sameElements(originalJunitJarContent))
}
}
"Wrong range partial artifact resolve" should "succeed with retry" in withTempDir("tmp_dir") {
dir => {
def runFetchJunit() = {
val fetchOpt = FetchOptions(common = CommonOptions(mode = "force", cacheOptions = CacheOptions(cache = dir.getAbsolutePath)))
val fetch = Fetch(fetchOpt, RemainingArgs(Seq("junit:junit:4.6"), Seq()))
assert(fetch.files0.map(_.getName).toSet
.equals(Set("junit-4.6.jar")))
val junitJarPath = fetch.files0.map(_.getAbsolutePath()).filter(_.contains("junit-4.6.jar"))
.head
Paths.get(junitJarPath)
}
val originalJunitJar = runFetchJunit()
val originalJunitJarContent = Files.readAllBytes(originalJunitJar)
// Move the jar to partial (but complete) download
val newJunitJar = originalJunitJar.getParent.resolve(originalJunitJar.getFileName.toString + ".part")
Files.move(originalJunitJar, newJunitJar)
// Run fetch again and it should pass because of retrying on the partial jar.
val jar = runFetchJunit()
assert(Files.readAllBytes(jar).sameElements(originalJunitJarContent))
}
}
}

View File

@ -1,50 +0,0 @@
package coursier.cli
import java.io.{File, FileWriter}
import java.nio.file.Files
trait CliTestLib {
def withFile(content: String = "",
fileName: String = "hello",
suffix: String = "world")(testCode: (File, FileWriter) => Any) {
val file = File.createTempFile(fileName, suffix) // create the fixture
val writer = new FileWriter(file)
writer.write(content)
writer.flush()
try {
testCode(file, writer) // "loan" the fixture to the test
} finally {
writer.close()
file.delete()
}
}
def withTempDir(
prefix: String
)(testCode: File => Any) {
val dir = Files.createTempDirectory(prefix).toFile // create the fixture
try {
testCode(dir) // "loan" the fixture to the test
} finally {
cleanDir(dir)
}
}
def cleanDir(tmpDir: File): Unit = {
def delete(f: File): Boolean =
if (f.isDirectory) {
val removedContent =
Option(f.listFiles()).toSeq.flatten.map(delete).forall(x => x)
val removedDir = f.delete()
removedContent && removedDir
} else
f.delete()
if (!delete(tmpDir))
Console.err.println(
s"Warning: unable to remove temporary directory $tmpDir")
}
}

View File

@ -1,73 +0,0 @@
package coursier.cli
import java.io.{File, FileWriter}
import coursier.cli.options.CommonOptions
import org.junit.runner.RunWith
import org.scalatest.FlatSpec
import org.scalatest.junit.JUnitRunner
@RunWith(classOf[JUnitRunner])
class CliUnitTest extends FlatSpec {
def withFile(content: String)(testCode: (File, FileWriter) => Any) {
val file = File.createTempFile("hello", "world") // create the fixture
val writer = new FileWriter(file)
writer.write(content)
writer.flush()
try {
testCode(file, writer) // "loan" the fixture to the test
}
finally {
writer.close()
file.delete()
}
}
"Normal text" should "parse correctly" in withFile(
"org1:name1--org2:name2") { (file, writer) =>
val opt = CommonOptions(localExcludeFile = file.getAbsolutePath)
val helper = new Helper(opt, Seq())
assert(helper.localExcludeMap.equals(Map("org1:name1" -> Set(("org2", "name2")))))
}
"Multiple excludes" should "be combined" in withFile(
"org1:name1--org2:name2\n" +
"org1:name1--org3:name3\n" +
"org4:name4--org5:name5") { (file, writer) =>
val opt = CommonOptions(localExcludeFile = file.getAbsolutePath)
val helper = new Helper(opt, Seq())
assert(helper.localExcludeMap.equals(Map(
"org1:name1" -> Set(("org2", "name2"), ("org3", "name3")),
"org4:name4" -> Set(("org5", "name5")))))
}
"extra --" should "error" in withFile(
"org1:name1--org2:name2--xxx\n" +
"org1:name1--org3:name3\n" +
"org4:name4--org5:name5") { (file, writer) =>
assertThrows[SoftExcludeParsingException]({
val opt = CommonOptions(localExcludeFile = file.getAbsolutePath)
new Helper(opt, Seq())
})
}
"child has no name" should "error" in withFile(
"org1:name1--org2:") { (file, writer) =>
assertThrows[SoftExcludeParsingException]({
val opt = CommonOptions(localExcludeFile = file.getAbsolutePath)
new Helper(opt, Seq())
})
}
"child has nothing" should "error" in withFile(
"org1:name1--:") { (file, writer) =>
assertThrows[SoftExcludeParsingException]({
val opt = CommonOptions(localExcludeFile = file.getAbsolutePath)
new Helper(opt, Seq())
})
}
}

View File

@ -1,60 +0,0 @@
package coursier.cli.util
import coursier.cli.CliTestLib
import org.junit.runner.RunWith
import org.scalatest.FlatSpec
import org.scalatest.junit.JUnitRunner
@RunWith(classOf[JUnitRunner])
class JsonReportTest extends FlatSpec with CliTestLib {
"empty JsonReport" should "be empty" in {
val report: String = JsonReport[String](IndexedSeq(), Map(), Set())(
children = _ => Seq(),
reconciledVersionStr = _ => "",
requestedVersionStr = _ => "",
getFile = _ => Option("")
)
assert(
report == "{\"conflict_resolution\":{},\"dependencies\":[],\"version\":\"0.1.0\"}")
}
"JsonReport containing two deps" should "not be empty" in {
val children = Map("a" -> Seq("b"), "b" -> Seq())
val report: String = JsonReport[String](
roots = IndexedSeq("a", "b"),
conflictResolutionForRoots = Map(),
overrideClassifiers = Set()
)(
children = children(_),
reconciledVersionStr = s => s"$s:reconciled",
requestedVersionStr = s => s"$s:requested",
getFile = _ => Option("")
)
assert(
report == "{\"conflict_resolution\":{},\"dependencies\":[" +
"{\"coord\":\"a:reconciled\",\"file\":\"\",\"dependencies\":[\"b:reconciled\"]}," +
"{\"coord\":\"b:reconciled\",\"file\":\"\",\"dependencies\":[]}]," +
"\"version\":\"0.1.0\"}")
}
"JsonReport containing two deps" should "be sorted alphabetically regardless of input order" in {
val children = Map("a" -> Seq("b"), "b" -> Seq())
val report: String = JsonReport[String](
roots = IndexedSeq( "b", "a"),
conflictResolutionForRoots = Map(),
overrideClassifiers = Set()
)(
children = children(_),
reconciledVersionStr = s => s"$s:reconciled",
requestedVersionStr = s => s"$s:requested",
getFile = _ => Option("")
)
assert(
report == "{\"conflict_resolution\":{},\"dependencies\":[" +
"{\"coord\":\"a:reconciled\",\"file\":\"\",\"dependencies\":[\"b:reconciled\"]}," +
"{\"coord\":\"b:reconciled\",\"file\":\"\",\"dependencies\":[]}]," +
"\"version\":\"0.1.0\"}")
}
}

View File

@ -1,11 +0,0 @@
scala_library(
name = "core",
dependencies = [
"3rdparty/jvm:fastParse",
"3rdparty/jvm:jsoup",
# TODO(wisechengyi) for some reason there is no compile error
# and this is needed at runtime.
"3rdparty/jvm:scala-xml",
],
sources = rglobs("jvm/*.scala", "shared/*.scala"),
)

View File

@ -1,131 +0,0 @@
package coursier.core
import scala.scalajs.js
import js.Dynamic.{ global => g }
import org.scalajs.dom.raw.NodeList
import coursier.util.Xml
import scala.collection.mutable.ListBuffer
package object compatibility {
def option[A](a: js.Dynamic): Option[A] =
if (js.isUndefined(a)) None
else Some(a.asInstanceOf[A])
def dynOption(a: js.Dynamic): Option[js.Dynamic] =
if (js.isUndefined(a)) None
else Some(a)
private def between(c: Char, lower: Char, upper: Char) = lower <= c && c <= upper
implicit class RichChar(val c: Char) extends AnyVal {
def letterOrDigit: Boolean = {
between(c, '0', '9') || letter
}
def letter: Boolean = between(c, 'a', 'z') || between(c, 'A', 'Z')
}
def newFromXmlDomOrGlobal(name: String) = {
var defn = g.selectDynamic(name)
if (js.isUndefined(defn))
defn = g.require("xmldom").selectDynamic(name)
js.Dynamic.newInstance(defn)()
}
lazy val DOMParser = newFromXmlDomOrGlobal("DOMParser")
lazy val XMLSerializer = newFromXmlDomOrGlobal("XMLSerializer")
// Can't find these from node
val ELEMENT_NODE = 1 // org.scalajs.dom.raw.Node.ELEMENT_NODE
val TEXT_NODE = 3 // org.scalajs.dom.raw.Node.TEXT_NODE
def fromNode(node: org.scalajs.dom.raw.Node): Xml.Node = {
val node0 = node.asInstanceOf[js.Dynamic]
new Xml.Node {
def label =
option[String](node0.nodeName)
.getOrElse("")
def children =
option[NodeList](node0.childNodes)
.map(l => List.tabulate(l.length)(l.item).map(fromNode))
.getOrElse(Nil)
def attributes = ???
// `exists` instead of `contains`, for scala 2.10
def isText =
option[Int](node0.nodeType)
.exists(_ == TEXT_NODE)
def textContent =
option(node0.textContent)
.getOrElse("")
def isElement =
option[Int](node0.nodeType)
.exists(_ == ELEMENT_NODE)
override def toString =
XMLSerializer.serializeToString(node).asInstanceOf[String]
}
}
def xmlParse(s: String): Either[String, Xml.Node] = {
val doc = {
if (s.isEmpty) None
else {
for {
xmlDoc <- dynOption(DOMParser.parseFromString(s, "text/xml"))
rootNodes <- dynOption(xmlDoc.childNodes)
// From node, rootNodes.head is sometimes just a comment instead of the main root node
// (tested with org.ow2.asm:asm-commons in CentralTests)
rootNode <- rootNodes.asInstanceOf[js.Array[js.Dynamic]]
.flatMap(option[org.scalajs.dom.raw.Node])
.dropWhile(_.nodeType != ELEMENT_NODE)
.headOption
} yield rootNode
}
}
Right(doc.fold(Xml.Node.empty)(fromNode))
}
def encodeURIComponent(s: String): String =
g.encodeURIComponent(s).asInstanceOf[String]
// FIXME Won't work in the browser
lazy val cheerio = g.require("cheerio")
lazy val jqueryAvailable = !js.isUndefined(g.$)
def listWebPageRawElements(page: String): Seq[String] = {
val links = new ListBuffer[String]
// getting weird "maybe a wrong Dynamic method signature" errors when trying to factor that more
if (jqueryAvailable)
g.$("<div></div>").html(page).find("a").each({ self: js.Dynamic =>
val href = g.$(self).attr("href")
if (!js.isUndefined(href))
links += href.asInstanceOf[String]
()
}: js.ThisFunction0[js.Dynamic, Unit])
else {
val jquery = cheerio.load(page)
jquery("a").each({ self: js.Dynamic =>
val href = jquery(self).attr("href")
if (!js.isUndefined(href))
links += href.asInstanceOf[String]
()
}: js.ThisFunction0[js.Dynamic, Unit])
}
links.result()
}
def regexLookbehind: String = ":"
}

View File

@ -1,99 +0,0 @@
package coursier.core.compatibility
object Entities {
// Generated via https://gist.github.com/alexarchambault/79388ff31ec8cbddf6607b55ab2f6527
val entities = Vector(
("&nbsp;", "&#160;"),
("&iexcl;", "&#161;"),
("&cent;", "&#162;"),
("&pound;", "&#163;"),
("&curren;", "&#164;"),
("&yen;", "&#165;"),
("&brvbar;", "&#166;"),
("&sect;", "&#167;"),
("&uml;", "&#168;"),
("&copy;", "&#169;"),
("&ordf;", "&#170;"),
("&laquo;", "&#171;"),
("&not;", "&#172;"),
("&shy;", "&#173;"),
("&reg;", "&#174;"),
("&macr;", "&#175;"),
("&deg;", "&#176;"),
("&plusmn;", "&#177;"),
("&acute;", "&#180;"),
("&micro;", "&#181;"),
("&para;", "&#182;"),
("&middot;", "&#183;"),
("&cedil;", "&#184;"),
("&ordm;", "&#186;"),
("&raquo;", "&#187;"),
("&iquest;", "&#191;"),
("&Agrave;", "&#192;"),
("&Aacute;", "&#193;"),
("&Acirc;", "&#194;"),
("&Atilde;", "&#195;"),
("&Auml;", "&#196;"),
("&Aring;", "&#197;"),
("&AElig;", "&#198;"),
("&Ccedil;", "&#199;"),
("&Egrave;", "&#200;"),
("&Eacute;", "&#201;"),
("&Ecirc;", "&#202;"),
("&Euml;", "&#203;"),
("&Igrave;", "&#204;"),
("&Iacute;", "&#205;"),
("&Icirc;", "&#206;"),
("&Iuml;", "&#207;"),
("&ETH;", "&#208;"),
("&Ntilde;", "&#209;"),
("&Ograve;", "&#210;"),
("&Oacute;", "&#211;"),
("&Ocirc;", "&#212;"),
("&Otilde;", "&#213;"),
("&Ouml;", "&#214;"),
("&times;", "&#215;"),
("&Oslash;", "&#216;"),
("&Ugrave;", "&#217;"),
("&Uacute;", "&#218;"),
("&Ucirc;", "&#219;"),
("&Uuml;", "&#220;"),
("&Yacute;", "&#221;"),
("&THORN;", "&#222;"),
("&szlig;", "&#223;"),
("&agrave;", "&#224;"),
("&aacute;", "&#225;"),
("&acirc;", "&#226;"),
("&atilde;", "&#227;"),
("&auml;", "&#228;"),
("&aring;", "&#229;"),
("&aelig;", "&#230;"),
("&ccedil;", "&#231;"),
("&egrave;", "&#232;"),
("&eacute;", "&#233;"),
("&ecirc;", "&#234;"),
("&euml;", "&#235;"),
("&igrave;", "&#236;"),
("&iacute;", "&#237;"),
("&icirc;", "&#238;"),
("&iuml;", "&#239;"),
("&eth;", "&#240;"),
("&ntilde;", "&#241;"),
("&ograve;", "&#242;"),
("&oacute;", "&#243;"),
("&ocirc;", "&#244;"),
("&otilde;", "&#245;"),
("&ouml;", "&#246;"),
("&divide;", "&#247;"),
("&oslash;", "&#248;"),
("&ugrave;", "&#249;"),
("&uacute;", "&#250;"),
("&ucirc;", "&#251;"),
("&uuml;", "&#252;"),
("&yacute;", "&#253;"),
("&thorn;", "&#254;"),
("&yuml;", "&#255;")
)
}

View File

@ -1,84 +0,0 @@
package coursier.core
import coursier.util.Xml
import java.util.regex.Pattern.quote
import scala.collection.JavaConverters._
import scala.xml.{ Attribute, MetaData, Null }
import org.jsoup.Jsoup
package object compatibility {
implicit class RichChar(val c: Char) extends AnyVal {
def letterOrDigit = c.isLetterOrDigit
def letter = c.isLetter
}
private val entityPattern = (quote("&") + "[a-zA-Z]+" + quote(";")).r
private val utf8Bom = "\ufeff"
def xmlParse(s: String): Either[String, Xml.Node] = {
val content =
if (entityPattern.findFirstIn(s).isEmpty)
s
else
Entities.entities.foldLeft(s) {
case (s0, (target, replacement)) =>
s0.replace(target, replacement)
}
def parse =
try Right(scala.xml.XML.loadString(content.stripPrefix(utf8Bom)))
catch { case e: Exception => Left(e.toString + Option(e.getMessage).fold("")(" (" + _ + ")")) }
def fromNode(node: scala.xml.Node): Xml.Node =
new Xml.Node {
lazy val attributes = {
def helper(m: MetaData): Stream[(String, String, String)] =
m match {
case Null => Stream.empty
case attr =>
val pre = attr match {
case a: Attribute => Option(node.getNamespace(a.pre)).getOrElse("")
case _ => ""
}
val value = attr.value.collect {
case scala.xml.Text(t) => t
}.mkString("")
(pre, attr.key, value) #:: helper(m.next)
}
helper(node.attributes).toVector
}
def label = node.label
def children = node.child.map(fromNode)
def isText = node match { case _: scala.xml.Text => true; case _ => false }
def textContent = node.text
def isElement = node match { case _: scala.xml.Elem => true; case _ => false }
override def toString = node.toString
}
parse.right
.map(fromNode)
}
def encodeURIComponent(s: String): String =
new java.net.URI(null, null, null, -1, s, null, null) .toASCIIString
def listWebPageRawElements(page: String): Seq[String] =
Jsoup.parse(page)
.select("a")
.asScala
.toVector
.map(_.attr("href"))
def regexLookbehind: String = "<="
}

View File

@ -1,77 +0,0 @@
package coursier.maven
import coursier.core.{ Dependency, Project }
object WritePom {
def project(proj: Project, packaging: Option[String]) = {
def dependencyNode(config: String, dep: Dependency) = {
<dependency>
<groupId>{dep.module.organization}</groupId>
<artifactId>{dep.module.name}</artifactId>
{
if (dep.version.isEmpty)
Nil
else
Seq(<version>{dep.version}</version>)
}
{
if (config.isEmpty)
Nil
else
Seq(<scope>{config}</scope>)
}
</dependency>
}
<project>
// parent
<groupId>{proj.module.organization}</groupId>
<artifactId>{proj.module.name}</artifactId>
{
packaging
.map(p => <packaging>{p}</packaging>)
.toSeq
}
<description>{proj.info.description}</description>
<url>{proj.info.homePage}</url>
<version>{proj.version}</version>
// licenses
<name>{proj.module.name}</name>
<organization>
<name>{proj.module.name}</name>
<url>{proj.info.homePage}</url>
</organization>
// SCM
// developers
{
if (proj.dependencies.isEmpty)
Nil
else
<dependencies>{
proj.dependencies.map {
case (config, dep) =>
dependencyNode(config, dep)
}
}</dependencies>
}
{
if (proj.dependencyManagement.isEmpty)
Nil
else
<dependencyManagement>
<dependencies>{
proj.dependencyManagement.map {
case (config, dep) =>
dependencyNode(config, dep)
}
}</dependencies>
</dependencyManagement>
}
// properties
// repositories
</project>
}
}

View File

@ -1,25 +0,0 @@
package coursier.util
import java.util.{ Properties => JProperties }
object Properties {
private lazy val props = {
val p = new JProperties()
try {
p.load(
getClass
.getClassLoader
.getResourceAsStream("coursier/coursier.properties")
)
}
catch {
case e: NullPointerException =>
}
p
}
lazy val version = props.getProperty("version")
lazy val commitHash = props.getProperty("commit-hash")
}

View File

@ -1,74 +0,0 @@
package coursier
import coursier.util.{EitherT, Gather, Monad}
object Fetch {
type Content[F[_]] = Artifact => EitherT[F, String, String]
type MD = Seq[(
(Module, String),
Either[Seq[String], (Artifact.Source, Project)]
)]
type Metadata[F[_]] = Seq[(Module, String)] => F[MD]
/**
* Try to find `module` among `repositories`.
*
* Look at `repositories` from the left, one-by-one, and stop at first success.
* Else, return all errors, in the same order.
*
* The `version` field of the returned `Project` in case of success may not be
* equal to the provided one, in case the latter is not a specific
* version (e.g. version interval). Which version get chosen depends on
* the repository implementation.
*/
def find[F[_]](
repositories: Seq[Repository],
module: Module,
version: String,
fetch: Content[F]
)(implicit
F: Monad[F]
): EitherT[F, Seq[String], (Artifact.Source, Project)] = {
val lookups = repositories
.map(repo => repo -> repo.find(module, version, fetch).run)
val task0 = lookups.foldLeft[F[Either[Seq[String], (Artifact.Source, Project)]]](F.point(Left(Nil))) {
case (acc, (_, eitherProjTask)) =>
F.bind(acc) {
case Left(errors) =>
F.map(eitherProjTask)(_.left.map(error => error +: errors))
case res @ Right(_) =>
F.point(res)
}
}
val task = F.map(task0)(e => e.left.map(_.reverse): Either[Seq[String], (Artifact.Source, Project)])
EitherT(task)
}
def from[F[_]](
repositories: Seq[core.Repository],
fetch: Content[F],
extra: Content[F]*
)(implicit
F: Gather[F]
): Metadata[F] = {
modVers =>
F.map(
F.gather {
modVers.map {
case (module, version) =>
def get(fetch: Content[F]) = find(repositories, module, version, fetch)
F.map((get(fetch) /: extra)(_ orElse get(_)).run)(d => (module, version) -> d)
}
}
)(_.toSeq)
}
}

View File

@ -1,135 +0,0 @@
package coursier.core
// Maven-specific
final case class Activation(
properties: Seq[(String, Option[String])],
os: Activation.Os,
jdk: Option[Either[VersionInterval, Seq[Version]]]
) {
def isEmpty: Boolean = properties.isEmpty && os.isEmpty && jdk.isEmpty
def isActive(
currentProperties: Map[String, String],
osInfo: Activation.Os,
jdkVersion: Option[Version]
): Boolean = {
def fromProperties = properties.forall {
case (name, valueOpt) =>
if (name.startsWith("!"))
currentProperties.get(name.drop(1)).isEmpty
else
currentProperties.get(name).exists { v =>
valueOpt.forall { reqValue =>
if (reqValue.startsWith("!"))
v != reqValue.drop(1)
else
v == reqValue
}
}
}
def fromOs = os.isActive(osInfo)
def fromJdk = jdk.forall {
case Left(itv) =>
jdkVersion.exists(itv.contains)
case Right(versions) =>
jdkVersion.exists(versions.contains)
}
!isEmpty && fromProperties && fromOs && fromJdk
}
}
object Activation {
final case class Os(
arch: Option[String],
families: Set[String],
name: Option[String],
version: Option[String] // FIXME Could this be an interval?
) {
def isEmpty: Boolean =
arch.isEmpty && families.isEmpty && name.isEmpty && version.isEmpty
def archMatch(current: Option[String]): Boolean =
arch.forall(current.toSeq.contains) || {
// seems required by org.nd4j:nd4j-native:0.5.0
arch.toSeq.contains("x86-64") && current.toSeq.contains("x86_64")
}
def isActive(osInfo: Os): Boolean =
archMatch(osInfo.arch) &&
families.forall { f =>
if (Os.knownFamilies(f))
osInfo.families.contains(f)
else
osInfo.name.exists(_.contains(f))
} &&
name.forall(osInfo.name.toSeq.contains) &&
version.forall(osInfo.version.toSeq.contains)
}
object Os {
val empty = Os(None, Set(), None, None)
// below logic adapted from https://github.com/sonatype/plexus-utils/blob/f2beca21c75084986b49b3ab7b5f0f988021dcea/src/main/java/org/codehaus/plexus/util/Os.java
// brought in https://github.com/coursier/coursier/issues/341 by @eboto
private val standardFamilies = Set(
"windows",
"os/2",
"netware",
"mac",
"os/400",
"openvms"
)
private[Os] val knownFamilies = standardFamilies ++ Seq(
"dos",
"tandem",
"unix",
"win9x",
"z/os"
)
def families(name: String, pathSep: String): Set[String] = {
var families = standardFamilies.filter(f => name.indexOf(f) >= 0)
if (pathSep == ";" && name.indexOf("netware") < 0)
families += "dos"
if (name.indexOf("nonstop_kernel") >= 0)
families += "tandem"
if (pathSep == ":" && name.indexOf("openvms") < 0 && (name.indexOf("mac") < 0 || name.endsWith("x")))
families += "unix"
if (name.indexOf("windows") >= 0 && (name.indexOf("95") >= 0 || name.indexOf("98") >= 0 || name.indexOf("me") >= 0 || name.indexOf("ce") >= 0))
families += "win9x"
if (name.indexOf("z/os") >= 0 || name.indexOf("os/390") >= 0)
families += "z/os"
families
}
def fromProperties(properties: Map[String, String]): Os = {
val name = properties.get("os.name").map(_.toLowerCase)
Os(
properties.get("os.arch").map(_.toLowerCase),
(for (n <- name; sep <- properties.get("path.separator"))
yield families(n, sep)).getOrElse(Set()),
name,
properties.get("os.version").map(_.toLowerCase)
)
}
}
val empty = Activation(Nil, Os.empty, None)
}

View File

@ -1,263 +0,0 @@
package coursier.core
/**
* Identifies a "module".
*
* During resolution, all dependencies having the same module
* will be given the same version, if there are no version conflicts
* between them.
*
* Using the same terminology as Ivy.
*/
final case class Module(
organization: String,
name: String,
attributes: Map[String, String]
) {
def trim: Module = copy(
organization = organization.trim,
name = name.trim
)
private def attributesStr = attributes.toSeq
.sortBy { case (k, _) => k }
.map { case (k, v) => s"$k=$v" }
.mkString(";")
def nameWithAttributes: String =
name + (if (attributes.nonEmpty) s";$attributesStr" else "")
override def toString: String =
s"$organization:$nameWithAttributes"
def orgName: String =
s"$organization:$name"
override final lazy val hashCode = Module.unapply(this).get.hashCode()
}
/**
* Dependencies with the same @module will typically see their @version-s merged.
*
* The remaining fields are left untouched, some being transitively
* propagated (exclusions, optional, in particular).
*/
final case class Dependency(
module: Module,
version: String,
configuration: String,
exclusions: Set[(String, String)],
// Maven-specific
attributes: Attributes,
optional: Boolean,
transitive: Boolean
) {
lazy val moduleVersion = (module, version)
override lazy val hashCode = Dependency.unapply(this).get.hashCode()
def mavenPrefix: String = {
if (attributes.isEmpty)
module.orgName
else {
s"${module.orgName}:${attributes.packagingAndClassifier}"
}
}
}
// Maven-specific
final case class Attributes(
`type`: String,
classifier: String
) {
def packaging: String =
if (`type`.isEmpty)
"jar"
else
`type`
def packagingAndClassifier: String =
if (isEmpty)
""
else if (classifier.isEmpty)
packaging
else
s"$packaging:$classifier"
def publication(name: String, ext: String): Publication =
Publication(name, `type`, ext, classifier)
def isEmpty: Boolean =
`type`.isEmpty && classifier.isEmpty
}
final case class Project(
module: Module,
version: String,
// First String is configuration (scope for Maven)
dependencies: Seq[(String, Dependency)],
// For Maven, this is the standard scopes as an Ivy configuration
configurations: Map[String, Seq[String]],
// Maven-specific
parent: Option[(Module, String)],
dependencyManagement: Seq[(String, Dependency)],
properties: Seq[(String, String)],
profiles: Seq[Profile],
versions: Option[Versions],
snapshotVersioning: Option[SnapshotVersioning],
packagingOpt: Option[String],
/**
* Optional exact version used to get this project metadata.
* May not match `version` for projects having a wrong version in their metadata.
*/
actualVersionOpt: Option[String],
// First String is configuration
publications: Seq[(String, Publication)],
// Extra infos, not used during resolution
info: Info
) {
lazy val moduleVersion = (module, version)
/** All configurations that each configuration extends, including the ones it extends transitively */
lazy val allConfigurations: Map[String, Set[String]] =
Orders.allConfigurations(configurations)
/**
* Version used to get this project metadata if available, else the version from metadata.
* May not match `version` for projects having a wrong version in their metadata, if the actual version was kept
* around.
*/
def actualVersion: String = actualVersionOpt.getOrElse(version)
}
/** Extra project info, not used during resolution */
final case class Info(
description: String,
homePage: String,
licenses: Seq[(String, Option[String])],
developers: Seq[Info.Developer],
publication: Option[Versions.DateTime]
)
object Info {
final case class Developer(
id: String,
name: String,
url: String
)
val empty = Info("", "", Nil, Nil, None)
}
// Maven-specific
final case class Profile(
id: String,
activeByDefault: Option[Boolean],
activation: Activation,
dependencies: Seq[(String, Dependency)],
dependencyManagement: Seq[(String, Dependency)],
properties: Map[String, String]
)
// Maven-specific
final case class Versions(
latest: String,
release: String,
available: List[String],
lastUpdated: Option[Versions.DateTime]
)
object Versions {
final case class DateTime(
year: Int,
month: Int,
day: Int,
hour: Int,
minute: Int,
second: Int
)
}
// Maven-specific
final case class SnapshotVersion(
classifier: String,
extension: String,
value: String,
updated: Option[Versions.DateTime]
)
// Maven-specific
final case class SnapshotVersioning(
module: Module,
version: String,
latest: String,
release: String,
timestamp: String,
buildNumber: Option[Int],
localCopy: Option[Boolean],
lastUpdated: Option[Versions.DateTime],
snapshotVersions: Seq[SnapshotVersion]
)
// Ivy-specific
final case class Publication(
name: String,
`type`: String,
ext: String,
classifier: String
) {
def attributes: Attributes = Attributes(`type`, classifier)
}
final case class Artifact(
url: String,
checksumUrls: Map[String, String],
extra: Map[String, Artifact],
attributes: Attributes,
changing: Boolean,
authentication: Option[Authentication]
) {
def `type`: String = attributes.`type`
def classifier: String = attributes.classifier
// TODO make that a proper field after 1.0 (instead of the hack via extra)
def isOptional: Boolean = extra.contains(Artifact.optionalKey)
}
object Artifact {
private[coursier] val optionalKey = s"$$optional"
trait Source {
def artifacts(
dependency: Dependency,
project: Project,
overrideClassifiers: Option[Seq[String]]
): Seq[Artifact]
}
object Source {
val empty: Source = new Source {
def artifacts(
dependency: Dependency,
project: Project,
overrideClassifiers: Option[Seq[String]]
): Seq[Artifact] = Nil
}
}
}
final case class Authentication(
user: String,
password: String
) {
override def toString: String =
s"Authentication($user, *******)"
}

View File

@ -1,87 +0,0 @@
package coursier.core
object Exclusions {
def partition(exclusions: Set[(String, String)]): (Boolean, Set[String], Set[String], Set[(String, String)]) = {
val (wildCards, remaining) = exclusions
.partition{case (org, name) => org == "*" || name == "*" }
val all = wildCards
.contains(one.head)
val excludeByOrg = wildCards
.collect{case (org, "*") if org != "*" => org }
val excludeByName = wildCards
.collect{case ("*", name) if name != "*" => name }
(all, excludeByOrg, excludeByName, remaining)
}
def apply(exclusions: Set[(String, String)]): (String, String) => Boolean = {
val (all, excludeByOrg, excludeByName, remaining) = partition(exclusions)
if (all) (_, _) => false
else
(org, name) => {
!excludeByName(name) &&
!excludeByOrg(org) &&
!remaining((org, name))
}
}
def minimize(exclusions: Set[(String, String)]): Set[(String, String)] = {
val (all, excludeByOrg, excludeByName, remaining) = partition(exclusions)
if (all) one
else {
val filteredRemaining = remaining
.filter{case (org, name) =>
!excludeByOrg(org) &&
!excludeByName(name)
}
excludeByOrg.map((_, "*")) ++
excludeByName.map(("*", _)) ++
filteredRemaining
}
}
val zero = Set.empty[(String, String)]
val one = Set(("*", "*"))
def join(x: Set[(String, String)], y: Set[(String, String)]): Set[(String, String)] =
minimize(x ++ y)
def meet(x: Set[(String, String)], y: Set[(String, String)]): Set[(String, String)] = {
val ((xAll, xExcludeByOrg, xExcludeByName, xRemaining), (yAll, yExcludeByOrg, yExcludeByName, yRemaining)) =
(partition(x), partition(y))
val all = xAll && yAll
if (all) one
else {
val excludeByOrg =
if (xAll) yExcludeByOrg
else if (yAll) xExcludeByOrg
else xExcludeByOrg intersect yExcludeByOrg
val excludeByName =
if (xAll) yExcludeByName
else if (yAll) xExcludeByName
else xExcludeByName intersect yExcludeByName
val remaining =
xRemaining.filter{case (org, name) => yAll || yExcludeByOrg(org) || yExcludeByName(name)} ++
yRemaining.filter{case (org, name) => xAll || xExcludeByOrg(org) || xExcludeByName(name)} ++
(xRemaining intersect yRemaining)
excludeByOrg.map((_, "*")) ++
excludeByName.map(("*", _)) ++
remaining
}
}
}

View File

@ -1,188 +0,0 @@
package coursier.core
object Orders {
trait PartialOrdering[T] extends scala.math.PartialOrdering[T] {
def lteq(x: T, y: T): Boolean =
tryCompare(x, y)
.exists(_ <= 0)
}
/** All configurations that each configuration extends, including the ones it extends transitively */
def allConfigurations(configurations: Map[String, Seq[String]]): Map[String, Set[String]] = {
def allParents(config: String): Set[String] = {
def helper(configs: Set[String], acc: Set[String]): Set[String] =
if (configs.isEmpty)
acc
else if (configs.exists(acc))
helper(configs -- acc, acc)
else if (configs.exists(!configurations.contains(_))) {
val (remaining, notFound) = configs.partition(configurations.contains)
helper(remaining, acc ++ notFound)
} else {
val extraConfigs = configs.flatMap(configurations)
helper(extraConfigs, acc ++ configs)
}
helper(Set(config), Set.empty)
}
configurations
.keys
.toList
.map(config => config -> (allParents(config) - config))
.toMap
}
/**
* Configurations partial order based on configuration mapping `configurations`.
*
* @param configurations: for each configuration, the configurations it directly extends.
*/
def configurationPartialOrder(configurations: Map[String, Seq[String]]): PartialOrdering[String] =
new PartialOrdering[String] {
val allParentsMap = allConfigurations(configurations)
def tryCompare(x: String, y: String) =
if (x == y)
Some(0)
else if (allParentsMap.get(x).exists(_(y)))
Some(-1)
else if (allParentsMap.get(y).exists(_(x)))
Some(1)
else
None
}
/** Non-optional < optional */
val optionalPartialOrder: PartialOrdering[Boolean] =
new PartialOrdering[Boolean] {
def tryCompare(x: Boolean, y: Boolean) =
Some(
if (x == y) 0
else if (x) 1
else -1
)
}
/**
* Exclusions partial order.
*
* x <= y iff all that x excludes is also excluded by y.
* x and y not related iff x excludes some elements not excluded by y AND
* y excludes some elements not excluded by x.
*
* In particular, no exclusions <= anything <= Set(("*", "*"))
*/
val exclusionsPartialOrder: PartialOrdering[Set[(String, String)]] =
new PartialOrdering[Set[(String, String)]] {
def boolCmp(a: Boolean, b: Boolean) = (a, b) match {
case (true, true) => Some(0)
case (true, false) => Some(1)
case (false, true) => Some(-1)
case (false, false) => None
}
def tryCompare(x: Set[(String, String)], y: Set[(String, String)]) = {
val (xAll, xExcludeByOrg1, xExcludeByName1, xRemaining0) = Exclusions.partition(x)
val (yAll, yExcludeByOrg1, yExcludeByName1, yRemaining0) = Exclusions.partition(y)
boolCmp(xAll, yAll).orElse {
def filtered(e: Set[(String, String)]) =
e.filter{case (org, name) =>
!xExcludeByOrg1(org) && !yExcludeByOrg1(org) &&
!xExcludeByName1(name) && !yExcludeByName1(name)
}
def removeIntersection[T](a: Set[T], b: Set[T]) =
(a -- b, b -- a)
def allEmpty(set: Set[_]*) = set.forall(_.isEmpty)
val (xRemaining1, yRemaining1) =
(filtered(xRemaining0), filtered(yRemaining0))
val (xProperRemaining, yProperRemaining) =
removeIntersection(xRemaining1, yRemaining1)
val (onlyXExcludeByOrg, onlyYExcludeByOrg) =
removeIntersection(xExcludeByOrg1, yExcludeByOrg1)
val (onlyXExcludeByName, onlyYExcludeByName) =
removeIntersection(xExcludeByName1, yExcludeByName1)
val (noXProper, noYProper) = (
allEmpty(xProperRemaining, onlyXExcludeByOrg, onlyXExcludeByName),
allEmpty(yProperRemaining, onlyYExcludeByOrg, onlyYExcludeByName)
)
boolCmp(noYProper, noXProper) // order matters
}
}
}
private def fallbackConfigIfNecessary(dep: Dependency, configs: Set[String]): Dependency =
Parse.withFallbackConfig(dep.configuration) match {
case Some((main, fallback)) =>
val config0 =
if (configs(main))
main
else if (configs(fallback))
fallback
else
dep.configuration
dep.copy(configuration = config0)
case _ =>
dep
}
/**
* Assume all dependencies have same `module`, `version`, and `artifact`; see `minDependencies`
* if they don't.
*/
def minDependenciesUnsafe(
dependencies: Set[Dependency],
configs: Map[String, Seq[String]]
): Set[Dependency] = {
val availableConfigs = configs.keySet
val groupedDependencies = dependencies
.map(fallbackConfigIfNecessary(_, availableConfigs))
.groupBy(dep => (dep.optional, dep.configuration))
.mapValues(deps => deps.head.copy(exclusions = deps.foldLeft(Exclusions.one)((acc, dep) => Exclusions.meet(acc, dep.exclusions))))
.toList
val remove =
for {
List(((xOpt, xScope), xDep), ((yOpt, yScope), yDep)) <- groupedDependencies.combinations(2)
optCmp <- optionalPartialOrder.tryCompare(xOpt, yOpt).iterator
scopeCmp <- configurationPartialOrder(configs).tryCompare(xScope, yScope).iterator
if optCmp*scopeCmp >= 0
exclCmp <- exclusionsPartialOrder.tryCompare(xDep.exclusions, yDep.exclusions).iterator
if optCmp*exclCmp >= 0
if scopeCmp*exclCmp >= 0
xIsMin = optCmp < 0 || scopeCmp < 0 || exclCmp < 0
yIsMin = optCmp > 0 || scopeCmp > 0 || exclCmp > 0
if xIsMin || yIsMin // should be always true, unless xDep == yDep, which shouldn't happen
} yield if (xIsMin) yDep else xDep
groupedDependencies.map(_._2).toSet -- remove
}
/**
* Minified representation of `dependencies`.
*
* The returned set brings exactly the same things as `dependencies`, with no redundancy.
*/
def minDependencies(
dependencies: Set[Dependency],
configs: ((Module, String)) => Map[String, Seq[String]]
): Set[Dependency] = {
dependencies
.groupBy(_.copy(configuration = "", exclusions = Set.empty, optional = false))
.mapValues(deps => minDependenciesUnsafe(deps, configs(deps.head.moduleVersion)))
.valuesIterator
.fold(Set.empty)(_ ++ _)
}
}

View File

@ -1,107 +0,0 @@
package coursier.core
import java.util.regex.Pattern.quote
import coursier.core.compatibility._
object Parse {
def version(s: String): Option[Version] = {
val trimmed = s.trim
if (trimmed.isEmpty || trimmed.exists(c => c != '.' && c != '-' && c != '_' && !c.letterOrDigit)) None
else Some(Version(trimmed))
}
// matches revisions with a '+' appended, e.g. "1.2.+", "1.2+" or "1.2.3-+"
private val latestSubRevision = "(.*[^.-])[.-]?[+]".r
def ivyLatestSubRevisionInterval(s: String): Option[VersionInterval] =
s match {
case latestSubRevision(prefix) =>
for {
from <- version(prefix)
if from.rawItems.nonEmpty
last <- Some(from.rawItems.last).collect { case n: Version.Numeric => n }
// a bit loose, but should do the job
if from.repr.endsWith(last.repr)
// appending -a1 to the next version, so has not to include things like
// nextVersion-RC1 in the interval - nothing like nextVersion* should be included
to <- version(from.repr.stripSuffix(last.repr) + last.next.repr + "-a1")
// the contrary would mean something went wrong in the loose substitution above
if from.rawItems.init == to.rawItems.dropRight(2).init
if to.rawItems.takeRight(2) == Seq(Version.Literal("a"), Version.Number(1))
} yield VersionInterval(Some(from), Some(to), fromIncluded = true, toIncluded = false)
case _ =>
None
}
def versionInterval(s: String): Option[VersionInterval] = {
def parseBounds(fromIncluded: Boolean, toIncluded: Boolean, s: String) = {
val commaIdx = s.indexOf(',')
if (commaIdx >= 0) {
val strFrom = s.take(commaIdx)
val strTo = s.drop(commaIdx + 1)
for {
from <- if (strFrom.isEmpty) Some(None) else version(strFrom).map(Some(_))
to <- if (strTo.isEmpty) Some(None) else version(strTo).map(Some(_))
} yield VersionInterval(from.filterNot(_.isEmpty), to.filterNot(_.isEmpty), fromIncluded, toIncluded)
} else if (s.nonEmpty && fromIncluded && toIncluded)
for (v <- version(s) if !v.isEmpty)
yield VersionInterval(Some(v), Some(v), fromIncluded, toIncluded)
else
None
}
for {
fromIncluded <- if (s.startsWith("[")) Some(true) else if (s.startsWith("(")) Some(false) else None
toIncluded <- if (s.endsWith("]")) Some(true) else if (s.endsWith(")")) Some(false) else None
s0 = s.drop(1).dropRight(1)
itv <- parseBounds(fromIncluded, toIncluded, s0)
} yield itv
}
private val multiVersionIntervalSplit = ("(?" + regexLookbehind + "[" + quote("])") + "]),(?=[" + quote("([") + "])").r
def multiVersionInterval(s: String): Option[VersionInterval] = {
// TODO Use a full-fledged (fastparsed-based) parser for this and versionInterval above
val openCount = s.count(c => c == '[' || c == '(')
val closeCount = s.count(c => c == ']' || c == ')')
if (openCount == closeCount && openCount >= 1)
versionInterval(multiVersionIntervalSplit.split(s).last)
else
None
}
def versionConstraint(s: String): Option[VersionConstraint] = {
def noConstraint = if (s.isEmpty) Some(VersionConstraint.all) else None
noConstraint
.orElse(ivyLatestSubRevisionInterval(s).map(VersionConstraint.interval))
.orElse(version(s).map(VersionConstraint.preferred))
.orElse(versionInterval(s).orElse(multiVersionInterval(s)).map(VersionConstraint.interval))
}
val fallbackConfigRegex = {
val noPar = "([^" + quote("()") + "]*)"
"^" + noPar + quote("(") + noPar + quote(")") + "$"
}.r
def withFallbackConfig(config: String): Option[(String, String)] =
Parse.fallbackConfigRegex.findAllMatchIn(config).toSeq match {
case Seq(m) =>
assert(m.groupCount == 2)
val main = config.substring(m.start(1), m.end(1))
val fallback = config.substring(m.start(2), m.end(2))
Some((main, fallback))
case _ =>
None
}
}

View File

@ -1,51 +0,0 @@
package coursier.core
import coursier.Fetch
import coursier.core.compatibility.encodeURIComponent
import coursier.maven.MavenSource
import coursier.util.{EitherT, Monad}
trait Repository extends Product with Serializable {
def find[F[_]](
module: Module,
version: String,
fetch: Fetch.Content[F]
)(implicit
F: Monad[F]
): EitherT[F, String, (Artifact.Source, Project)]
}
object Repository {
implicit class ArtifactExtensions(val underlying: Artifact) extends AnyVal {
def withDefaultChecksums: Artifact =
underlying.copy(checksumUrls = underlying.checksumUrls ++ Seq(
"MD5" -> (underlying.url + ".md5"),
"SHA-1" -> (underlying.url + ".sha1"),
"SHA-256" -> (underlying.url + ".sha256")
))
def withDefaultSignature: Artifact = {
val underlyingExt =
if (underlying.attributes.`type`.isEmpty)
"jar"
else
// TODO move MavenSource.typeExtension elsewhere
MavenSource.typeExtension(underlying.attributes.`type`)
underlying.copy(extra = underlying.extra ++ Seq(
"sig" ->
Artifact(
underlying.url + ".asc",
Map.empty,
Map.empty,
Attributes(s"$underlyingExt.asc", ""),
changing = underlying.changing,
authentication = underlying.authentication
)
.withDefaultChecksums
))
}
}
}

File diff suppressed because it is too large Load Diff

View File

@ -1,205 +0,0 @@
package coursier
package core
import coursier.util.Monad
import scala.annotation.tailrec
sealed abstract class ResolutionProcess {
def run[F[_]](
fetch: Fetch.Metadata[F],
maxIterations: Int = ResolutionProcess.defaultMaxIterations
)(implicit
F: Monad[F]
): F[Resolution] =
if (maxIterations == 0) F.point(current)
else {
val maxIterations0 =
if (maxIterations > 0) maxIterations - 1 else maxIterations
this match {
case Done(res) =>
F.point(res)
case missing0 @ Missing(missing, _, _) =>
F.bind(ResolutionProcess.fetchAll(missing, fetch))(result =>
missing0.next(result).run(fetch, maxIterations0)
)
case cont @ Continue(_, _) =>
cont
.nextNoCont
.run(fetch, maxIterations0)
}
}
@tailrec
final def next[F[_]](
fetch: Fetch.Metadata[F],
fastForward: Boolean = true
)(implicit
F: Monad[F]
): F[ResolutionProcess] =
this match {
case Done(_) =>
F.point(this)
case missing0 @ Missing(missing, _, _) =>
F.map(ResolutionProcess.fetchAll(missing, fetch))(result => missing0.next(result))
case cont @ Continue(_, _) =>
if (fastForward)
cont.nextNoCont.next(fetch, fastForward = fastForward)
else
F.point(cont.next)
}
def current: Resolution
}
final case class Missing(
missing: Seq[(Module, String)],
current: Resolution,
cont: Resolution => ResolutionProcess
) extends ResolutionProcess {
def next(results: Fetch.MD): ResolutionProcess = {
val errors = results.collect {
case (modVer, Left(errs)) =>
modVer -> errs
}
val successes = results.collect {
case (modVer, Right(repoProj)) =>
modVer -> repoProj
}
def cont0(res: Resolution): ResolutionProcess = {
val remainingSuccesses = successes.filter {
case (modVer, _) =>
!res.projectCache.contains(modVer)
}
val depMgmtMissing0 = remainingSuccesses.map {
case elem @ (_, (_, proj)) =>
elem -> res.dependencyManagementMissing(proj)
}
val depMgmtMissing = depMgmtMissing0.map(_._2).fold(Set.empty)(_ ++ _) -- results.map(_._1)
if (depMgmtMissing.isEmpty) {
type Elem = ((Module, String), (Artifact.Source, Project))
val modVer = depMgmtMissing0.map(_._1._1).toSet
@tailrec
def order(map: Map[Elem, Set[(Module, String)]], acc: List[Elem]): List[Elem] =
if (map.isEmpty)
acc.reverse
else {
val min = map.map(_._2.size).min // should be 0
val (toAdd, remaining) = map.partition {
case (_, v) => v.size == min
}
val acc0 = toAdd.keys.foldLeft(acc)(_.::(_))
val remainingKeys = remaining.keySet.map(_._1)
val map0 = remaining.map {
case (k, v) =>
k -> v.intersect(remainingKeys)
}
order(map0, acc0)
}
val orderedSuccesses = order(depMgmtMissing0.map { case (k, v) => k -> v.intersect(modVer) }.toMap, Nil)
val res0 = orderedSuccesses.foldLeft(res) {
case (acc, (modVer0, (source, proj))) =>
acc.addToProjectCache(
modVer0 -> (source, proj)
)
}
Continue(res0, cont)
} else
Missing(depMgmtMissing.toSeq, res, cont0)
}
val current0 = current.copyWithCache(
errorCache = current.errorCache ++ errors
)
cont0(current0)
}
}
final case class Continue(
current: Resolution,
cont: Resolution => ResolutionProcess
) extends ResolutionProcess {
def next: ResolutionProcess = cont(current)
@tailrec def nextNoCont: ResolutionProcess =
next match {
case nextCont: Continue => nextCont.nextNoCont
case other => other
}
}
final case class Done(resolution: Resolution) extends ResolutionProcess {
def current: Resolution = resolution
}
object ResolutionProcess {
def defaultMaxIterations: Int = 100
def apply(resolution: Resolution): ResolutionProcess = {
val resolution0 = resolution.nextIfNoMissing
if (resolution0.isDone)
Done(resolution0)
else
Missing(resolution0.missingFromCache.toSeq, resolution0, apply)
}
private[coursier] def fetchAll[F[_]](
modVers: Seq[(Module, String)],
fetch: Fetch.Metadata[F]
)(implicit F: Monad[F]): F[Vector[((Module, String), Either[Seq[String], (Artifact.Source, Project)])]] = {
def uniqueModules(modVers: Seq[(Module, String)]): Stream[Seq[(Module, String)]] = {
val res = modVers.groupBy(_._1).toSeq.map(_._2).map {
case Seq(v) => (v, Nil)
case Seq() => sys.error("Cannot happen")
case v =>
// there might be version intervals in there, but that shouldn't matter...
val res = v.maxBy { case (_, v0) => Version(v0) }
(res, v.filter(_ != res))
}
val other = res.flatMap(_._2)
if (other.isEmpty)
Stream(modVers)
else {
val missing0 = res.map(_._1)
missing0 #:: uniqueModules(other)
}
}
uniqueModules(modVers)
.toVector
.foldLeft(F.point(Vector.empty[((Module, String), Either[Seq[String], (Artifact.Source, Project)])])) {
(acc, l) =>
F.bind(acc) { v =>
F.map(fetch(l)) { e =>
v ++ e
}
}
}
}
}

View File

@ -1,249 +0,0 @@
package coursier.core
import scala.annotation.tailrec
import coursier.core.compatibility._
/**
* Used internally by Resolver.
*
* Same kind of ordering as aether-util/src/main/java/org/eclipse/aether/util/version/GenericVersion.java
*/
final case class Version(repr: String) extends Ordered[Version] {
lazy val items = Version.items(repr)
lazy val rawItems: Seq[Version.Item] = {
val (first, tokens) = Version.Tokenizer(repr)
first +: tokens.toVector.map { case (_, item) => item }
}
def compare(other: Version) = Version.listCompare(items, other.items)
def isEmpty = items.forall(_.isEmpty)
}
object Version {
sealed abstract class Item extends Ordered[Item] {
def compare(other: Item): Int =
(this, other) match {
case (Number(a), Number(b)) => a.compare(b)
case (BigNumber(a), BigNumber(b)) => a.compare(b)
case (Number(a), BigNumber(b)) => -b.compare(a)
case (BigNumber(a), Number(b)) => a.compare(b)
case (Qualifier(_, a), Qualifier(_, b)) => a.compare(b)
case (Literal(a), Literal(b)) => a.compareToIgnoreCase(b)
case (BuildMetadata(_), BuildMetadata(_)) =>
// Semver § 10: two versions that differ only in the build metadata, have the same precedence.
// Might introduce some non-determinism though :-/
0
case _ =>
val rel0 = compareToEmpty
val rel1 = other.compareToEmpty
if (rel0 == rel1) order.compare(other.order)
else rel0.compare(rel1)
}
def order: Int
def isEmpty: Boolean = compareToEmpty == 0
def compareToEmpty: Int = 1
}
sealed abstract class Numeric extends Item {
def repr: String
def next: Numeric
}
final case class Number(value: Int) extends Numeric {
val order = 0
def next: Number = Number(value + 1)
def repr: String = value.toString
override def compareToEmpty = value.compare(0)
}
final case class BigNumber(value: BigInt) extends Numeric {
val order = 0
def next: BigNumber = BigNumber(value + 1)
def repr: String = value.toString
override def compareToEmpty = value.compare(0)
}
final case class Qualifier(value: String, level: Int) extends Item {
val order = -2
override def compareToEmpty = level.compare(0)
}
final case class Literal(value: String) extends Item {
val order = -1
override def compareToEmpty = if (value.isEmpty) 0 else 1
}
final case class BuildMetadata(value: String) extends Item {
val order = 1
override def compareToEmpty = if (value.isEmpty) 0 else 1
}
case object Min extends Item {
val order = -8
override def compareToEmpty = -1
}
case object Max extends Item {
val order = 8
}
val empty = Number(0)
val qualifiers = Seq[Qualifier](
Qualifier("alpha", -5),
Qualifier("beta", -4),
Qualifier("milestone", -3),
Qualifier("cr", -2),
Qualifier("rc", -2),
Qualifier("snapshot", -1),
Qualifier("ga", 0),
Qualifier("final", 0),
Qualifier("sp", 1)
)
val qualifiersMap = qualifiers.map(q => q.value -> q).toMap
object Tokenizer {
sealed abstract class Separator
case object Dot extends Separator
case object Hyphen extends Separator
case object Underscore extends Separator
case object Plus extends Separator
case object None extends Separator
def apply(s: String): (Item, Stream[(Separator, Item)]) = {
def parseItem(s: Stream[Char]): (Item, Stream[Char]) = {
if (s.isEmpty) (empty, s)
else if (s.head.isDigit) {
def digits(b: StringBuilder, s: Stream[Char]): (String, Stream[Char]) =
if (s.isEmpty || !s.head.isDigit) (b.result(), s)
else digits(b + s.head, s.tail)
val (digits0, rem) = digits(new StringBuilder, s)
val item =
if (digits0.length >= 10) BigNumber(BigInt(digits0))
else Number(digits0.toInt)
(item, rem)
} else if (s.head.letter) {
def letters(b: StringBuilder, s: Stream[Char]): (String, Stream[Char]) =
if (s.isEmpty || !s.head.letter)
(b.result().toLowerCase, s) // not specifying a Locale (error with scala js)
else
letters(b + s.head, s.tail)
val (letters0, rem) = letters(new StringBuilder, s)
val item =
qualifiersMap.getOrElse(letters0, Literal(letters0))
(item, rem)
} else {
val (sep, _) = parseSeparator(s)
if (sep == None) {
def other(b: StringBuilder, s: Stream[Char]): (String, Stream[Char]) =
if (s.isEmpty || s.head.isLetterOrDigit || parseSeparator(s)._1 != None)
(b.result().toLowerCase, s) // not specifying a Locale (error with scala js)
else
other(b + s.head, s.tail)
val (item, rem0) = other(new StringBuilder, s)
(Literal(item), rem0)
} else
(empty, s)
}
}
def parseSeparator(s: Stream[Char]): (Separator, Stream[Char]) = {
assert(s.nonEmpty)
s.head match {
case '.' => (Dot, s.tail)
case '-' => (Hyphen, s.tail)
case '_' => (Underscore, s.tail)
case '+' => (Plus, s.tail)
case _ => (None, s)
}
}
def helper(s: Stream[Char]): Stream[(Separator, Item)] = {
if (s.isEmpty) Stream()
else {
val (sep, rem0) = parseSeparator(s)
sep match {
case Plus =>
Stream((sep, BuildMetadata(rem0.mkString)))
case _ =>
val (item, rem) = parseItem(rem0)
(sep, item) #:: helper(rem)
}
}
}
val (first, rem) = parseItem(s.toStream)
(first, helper(rem))
}
}
def postProcess(prevIsNumeric: Option[Boolean], item: Item, tokens0: Stream[(Tokenizer.Separator, Item)]): Stream[Item] = {
val tokens = {
var _tokens = tokens0
if (isNumeric(item)) {
val nextNonDotZero = _tokens.dropWhile{case (Tokenizer.Dot, n: Numeric) => n.isEmpty; case _ => false }
if (nextNonDotZero.forall(t => t._1 == Tokenizer.Hyphen || ((t._1 == Tokenizer.Dot || t._1 == Tokenizer.None) && !isNumeric(t._2)))) { // Dot && isNumeric(t._2)
_tokens = nextNonDotZero
}
}
_tokens
}
def ifFollowedByNumberElse(ifFollowedByNumber: Item, default: Item) = {
val followedByNumber = tokens.headOption
.exists{ case (Tokenizer.None, num: Numeric) if !num.isEmpty => true; case _ => false }
if (followedByNumber) ifFollowedByNumber
else default
}
def next =
if (tokens.isEmpty) Stream()
else postProcess(Some(isNumeric(item)), tokens.head._2, tokens.tail)
item match {
case Literal("min") => Min #:: next
case Literal("max") => Max #:: next
case Literal("a") =>
ifFollowedByNumberElse(qualifiersMap("alpha"), item) #:: next
case Literal("b") =>
ifFollowedByNumberElse(qualifiersMap("beta"), item) #:: next
case Literal("m") =>
ifFollowedByNumberElse(qualifiersMap("milestone"), item) #:: next
case _ =>
item #:: next
}
}
def isNumeric(item: Item) = item match { case _: Numeric => true; case _ => false }
def items(repr: String): List[Item] = {
val (first, tokens) = Tokenizer(repr)
postProcess(None, first, tokens).toList
}
@tailrec
def listCompare(first: List[Item], second: List[Item]): Int = {
if (first.isEmpty && second.isEmpty) 0
else if (first.isEmpty) {
assert(second.nonEmpty)
-second.dropWhile(_.isEmpty).headOption.fold(0)(_.compareToEmpty)
} else if (second.isEmpty) {
assert(first.nonEmpty)
first.dropWhile(_.isEmpty).headOption.fold(0)(_.compareToEmpty)
} else {
val rel = first.head.compare(second.head)
if (rel == 0) listCompare(first.tail, second.tail)
else rel
}
}
}

View File

@ -1,137 +0,0 @@
package coursier.core
final case class VersionInterval(
from: Option[Version],
to: Option[Version],
fromIncluded: Boolean,
toIncluded: Boolean
) {
def isValid: Boolean = {
val fromToOrder =
for {
f <- from
t <- to
cmd = f.compare(t)
} yield cmd < 0 || (cmd == 0 && fromIncluded && toIncluded)
fromToOrder.forall(x => x) && (from.nonEmpty || !fromIncluded) && (to.nonEmpty || !toIncluded)
}
def contains(version: Version): Boolean = {
val fromCond =
from.forall { from0 =>
val cmp = from0.compare(version)
cmp < 0 || cmp == 0 && fromIncluded
}
lazy val toCond =
to.forall { to0 =>
val cmp = version.compare(to0)
cmp < 0 || cmp == 0 && toIncluded
}
fromCond && toCond
}
def merge(other: VersionInterval): Option[VersionInterval] = {
val (newFrom, newFromIncluded) =
(from, other.from) match {
case (Some(a), Some(b)) =>
val cmp = a.compare(b)
if (cmp < 0) (Some(b), other.fromIncluded)
else if (cmp > 0) (Some(a), fromIncluded)
else (Some(a), fromIncluded && other.fromIncluded)
case (Some(a), None) => (Some(a), fromIncluded)
case (None, Some(b)) => (Some(b), other.fromIncluded)
case (None, None) => (None, false)
}
val (newTo, newToIncluded) =
(to, other.to) match {
case (Some(a), Some(b)) =>
val cmp = a.compare(b)
if (cmp < 0) (Some(a), toIncluded)
else if (cmp > 0) (Some(b), other.toIncluded)
else (Some(a), toIncluded && other.toIncluded)
case (Some(a), None) => (Some(a), toIncluded)
case (None, Some(b)) => (Some(b), other.toIncluded)
case (None, None) => (None, false)
}
Some(VersionInterval(newFrom, newTo, newFromIncluded, newToIncluded))
.filter(_.isValid)
}
def constraint: VersionConstraint =
this match {
case VersionInterval.zero => VersionConstraint.all
case VersionInterval(Some(version), None, true, false) => VersionConstraint.preferred(version)
case itv => VersionConstraint.interval(itv)
}
def repr: String = Seq(
if (fromIncluded) "[" else "(",
from.map(_.repr).mkString,
",",
to.map(_.repr).mkString,
if (toIncluded) "]" else ")"
).mkString
}
object VersionInterval {
val zero = VersionInterval(None, None, fromIncluded = false, toIncluded = false)
}
final case class VersionConstraint(
interval: VersionInterval,
preferred: Seq[Version]
) {
def blend: Option[Either[VersionInterval, Version]] =
if (interval.isValid) {
val preferredInInterval = preferred.filter(interval.contains)
if (preferredInInterval.isEmpty)
Some(Left(interval))
else
Some(Right(preferredInInterval.max))
} else
None
def repr: Option[String] =
blend.map {
case Left(itv) =>
if (itv == VersionInterval.zero)
""
else
itv.repr
case Right(v) => v.repr
}
}
object VersionConstraint {
def preferred(version: Version): VersionConstraint =
VersionConstraint(VersionInterval.zero, Seq(version))
def interval(interval: VersionInterval): VersionConstraint =
VersionConstraint(interval, Nil)
val all = VersionConstraint(VersionInterval.zero, Nil)
def merge(constraints: VersionConstraint*): Option[VersionConstraint] = {
val intervals = constraints.map(_.interval)
val intervalOpt =
(Option(VersionInterval.zero) /: intervals) {
case (acc, itv) =>
acc.flatMap(_.merge(itv))
}
for (interval <- intervalOpt) yield {
val preferreds = constraints.flatMap(_.preferred).distinct
VersionConstraint(interval, preferreds)
}
}
}

View File

@ -1,343 +0,0 @@
package coursier.ivy
import coursier.Fetch
import coursier.core._
import coursier.util.{EitherT, Monad, WebPage}
final case class IvyRepository(
pattern: Pattern,
metadataPatternOpt: Option[Pattern],
changing: Option[Boolean],
withChecksums: Boolean,
withSignatures: Boolean,
withArtifacts: Boolean,
// hack for SBT putting infos in properties
dropInfoAttributes: Boolean,
authentication: Option[Authentication]
) extends Repository {
def metadataPattern: Pattern = metadataPatternOpt.getOrElse(pattern)
lazy val revisionListingPatternOpt: Option[Pattern] = {
val idx = metadataPattern.chunks.indexWhere { chunk =>
chunk == Pattern.Chunk.Var("revision")
}
if (idx < 0)
None
else
Some(Pattern(metadataPattern.chunks.take(idx)))
}
import Repository._
// See http://ant.apache.org/ivy/history/latest-milestone/concept.html for a
// list of variables that should be supported.
// Some are missing (branch, conf, originalName).
private def variables(
module: Module,
versionOpt: Option[String],
`type`: String,
artifact: String,
ext: String,
classifierOpt: Option[String]
) =
Map(
"organization" -> module.organization,
"organisation" -> module.organization,
"orgPath" -> module.organization.replace('.', '/'),
"module" -> module.name,
"type" -> `type`,
"artifact" -> artifact,
"ext" -> ext
) ++
module.attributes ++
classifierOpt.map("classifier" -> _).toSeq ++
versionOpt.map("revision" -> _).toSeq
val source: Artifact.Source =
if (withArtifacts)
new Artifact.Source {
def artifacts(
dependency: Dependency,
project: Project,
overrideClassifiers: Option[Seq[String]]
) = {
val retained =
overrideClassifiers match {
case None =>
// FIXME Some duplication with what's done in MavenSource
if (dependency.attributes.classifier.nonEmpty)
// FIXME We're ignoring dependency.attributes.`type` in this case
project.publications.collect {
case (_, p) if p.classifier == dependency.attributes.classifier =>
p
}
else if (dependency.attributes.`type`.nonEmpty)
project.publications.collect {
case (conf, p)
if (conf == "*" ||
conf == dependency.configuration ||
project.allConfigurations.getOrElse(dependency.configuration, Set.empty).contains(conf)) &&
(
p.`type` == dependency.attributes.`type` ||
(p.ext == dependency.attributes.`type` && project.packagingOpt.toSeq.contains(p.`type`)) // wow
) =>
p
}
else
project.publications.collect {
case (conf, p)
if conf == "*" ||
conf == dependency.configuration ||
project.allConfigurations.getOrElse(dependency.configuration, Set.empty).contains(conf) =>
p
}
case Some(classifiers) =>
val classifiersSet = classifiers.toSet
project.publications.collect {
case (_, p) if classifiersSet(p.classifier) =>
p
}
}
val retainedWithUrl = retained.distinct.flatMap { p =>
pattern.substituteVariables(variables(
dependency.module,
Some(project.actualVersion),
p.`type`,
p.name,
p.ext,
Some(p.classifier).filter(_.nonEmpty)
)).right.toSeq.toList.map(p -> _) // FIXME Validation errors are ignored
}
retainedWithUrl.map { case (p, url) =>
var artifact = Artifact(
url,
Map.empty,
Map.empty,
p.attributes,
changing = changing.getOrElse(project.version.contains("-SNAPSHOT")), // could be more reliable
authentication = authentication
)
if (withChecksums)
artifact = artifact.withDefaultChecksums
if (withSignatures)
artifact = artifact.withDefaultSignature
artifact
}
}
}
else
Artifact.Source.empty
def find[F[_]](
module: Module,
version: String,
fetch: Fetch.Content[F]
)(implicit
F: Monad[F]
): EitherT[F, String, (Artifact.Source, Project)] = {
revisionListingPatternOpt match {
case None =>
findNoInverval(module, version, fetch)
case Some(revisionListingPattern) =>
Parse.versionInterval(version)
.orElse(Parse.multiVersionInterval(version))
.orElse(Parse.ivyLatestSubRevisionInterval(version))
.filter(_.isValid) match {
case None =>
findNoInverval(module, version, fetch)
case Some(itv) =>
val listingUrl = revisionListingPattern
.substituteVariables(variables(module, None, "ivy", "ivy", "xml", None))
.right
.flatMap { s =>
if (s.endsWith("/"))
Right(s)
else
Left(s"Don't know how to list revisions of ${metadataPattern.string}")
}
def fromWebPage(url: String, s: String) = {
val subDirs = WebPage.listDirectories(url, s)
val versions = subDirs.map(Parse.version).collect { case Some(v) => v }
val versionsInItv = versions.filter(itv.contains)
if (versionsInItv.isEmpty)
EitherT(
F.point[Either[String, (Artifact.Source, Project)]](Left(s"No version found for $version"))
)
else {
val version0 = versionsInItv.max
findNoInverval(module, version0.repr, fetch)
}
}
def artifactFor(url: String) =
Artifact(
url,
Map.empty,
Map.empty,
Attributes("", ""),
changing = changing.getOrElse(version.contains("-SNAPSHOT")),
authentication
)
for {
url <- EitherT(F.point(listingUrl))
s <- fetch(artifactFor(url))
res <- fromWebPage(url, s)
} yield res
}
}
}
def findNoInverval[F[_]](
module: Module,
version: String,
fetch: Fetch.Content[F]
)(implicit
F: Monad[F]
): EitherT[F, String, (Artifact.Source, Project)] = {
val eitherArtifact: Either[String, Artifact] =
for {
url <- metadataPattern.substituteVariables(
variables(module, Some(version), "ivy", "ivy", "xml", None)
).right
} yield {
var artifact = Artifact(
url,
Map.empty,
Map.empty,
Attributes("ivy", ""),
changing = changing.getOrElse(version.contains("-SNAPSHOT")),
authentication = authentication
)
if (withChecksums)
artifact = artifact.withDefaultChecksums
if (withSignatures)
artifact = artifact.withDefaultSignature
artifact
}
for {
artifact <- EitherT(F.point(eitherArtifact))
ivy <- fetch(artifact)
proj0 <- EitherT(
F.point {
for {
xml <- compatibility.xmlParse(ivy).right
_ <- (if (xml.label == "ivy-module") Right(()) else Left("Module definition not found")).right
proj <- IvyXml.project(xml).right
} yield proj
}
)
} yield {
val proj =
if (dropInfoAttributes)
proj0.copy(
module = proj0.module.copy(
attributes = proj0.module.attributes.filter {
case (k, _) => !k.startsWith("info.")
}
),
dependencies = proj0.dependencies.map {
case (config, dep0) =>
val dep = dep0.copy(
module = dep0.module.copy(
attributes = dep0.module.attributes.filter {
case (k, _) => !k.startsWith("info.")
}
)
)
config -> dep
}
)
else
proj0
source -> proj.copy(
actualVersionOpt = Some(version)
)
}
}
}
object IvyRepository {
def parse(
pattern: String,
metadataPatternOpt: Option[String] = None,
changing: Option[Boolean] = None,
properties: Map[String, String] = Map.empty,
withChecksums: Boolean = true,
withSignatures: Boolean = true,
withArtifacts: Boolean = true,
// hack for SBT putting infos in properties
dropInfoAttributes: Boolean = false,
authentication: Option[Authentication] = None
): Either[String, IvyRepository] =
for {
propertiesPattern <- PropertiesPattern.parse(pattern).right
metadataPropertiesPatternOpt <- metadataPatternOpt
.fold[Either[String, Option[PropertiesPattern]]](Right(None))(PropertiesPattern.parse(_).right.map(Some(_)))
.right
pattern <- propertiesPattern.substituteProperties(properties).right
metadataPatternOpt <- metadataPropertiesPatternOpt
.fold[Either[String, Option[Pattern]]](Right(None))(_.substituteProperties(properties).right.map(Some(_)))
.right
} yield
IvyRepository(
pattern,
metadataPatternOpt,
changing,
withChecksums,
withSignatures,
withArtifacts,
dropInfoAttributes,
authentication
)
// because of the compatibility apply method below, we can't give default values
// to the default constructor of IvyPattern
// this method accepts the same arguments as this constructor, with default values when possible
def fromPattern(
pattern: Pattern,
metadataPatternOpt: Option[Pattern] = None,
changing: Option[Boolean] = None,
withChecksums: Boolean = true,
withSignatures: Boolean = true,
withArtifacts: Boolean = true,
// hack for SBT putting infos in properties
dropInfoAttributes: Boolean = false,
authentication: Option[Authentication] = None
): IvyRepository =
IvyRepository(
pattern,
metadataPatternOpt,
changing,
withChecksums,
withSignatures,
withArtifacts,
dropInfoAttributes,
authentication
)
}

View File

@ -1,187 +0,0 @@
package coursier.ivy
import coursier.core._
import coursier.util.Xml._
object IvyXml {
val attributesNamespace = "http://ant.apache.org/ivy/extra"
private def info(node: Node): Either[String, (Module, String)] =
for {
org <- node.attribute("organisation").right
name <- node.attribute("module").right
version <- node.attribute("revision").right
} yield {
val attr = node.attributesFromNamespace(attributesNamespace)
(Module(org, name, attr.toMap), version)
}
// FIXME Errors are ignored here
private def configurations(node: Node): Seq[(String, Seq[String])] =
node.children
.filter(_.label == "conf")
.flatMap { node =>
node.attribute("name").right.toOption.toSeq.map(_ -> node)
}
.map { case (name, node) =>
name -> node.attribute("extends").right.toSeq.flatMap(_.split(','))
}
// FIXME "default(compile)" likely not to be always the default
def mappings(mapping: String): Seq[(String, String)] =
mapping.split(';').flatMap { m =>
val (froms, tos) = m.split("->", 2) match {
case Array(from) => (from, "default(compile)")
case Array(from, to) => (from, to)
}
for {
from <- froms.split(',')
to <- tos.split(',')
} yield (from.trim, to.trim)
}
// FIXME Errors ignored as above - warnings should be reported at least for anything suspicious
private def dependencies(node: Node): Seq[(String, Dependency)] =
node.children
.filter(_.label == "dependency")
.flatMap { node =>
// artifact and include sub-nodes are ignored here
val excludes = node.children
.filter(_.label == "exclude")
.flatMap { node0 =>
val org = node0.attribute("org").right.getOrElse("*")
val name = node0.attribute("module").right.toOption
.orElse(node0.attribute("name").right.toOption)
.getOrElse("*")
val confs = node0.attribute("conf").right.toOption.filter(_.nonEmpty).fold(Seq("*"))(_.split(','))
confs.map(_ -> (org, name))
}
.groupBy { case (conf, _) => conf }
.map { case (conf, l) => conf -> l.map { case (_, e) => e }.toSet }
val allConfsExcludes = excludes.getOrElse("*", Set.empty)
for {
org <- node.attribute("org").right.toOption.toSeq
name <- node.attribute("name").right.toOption.toSeq
version <- node.attribute("rev").right.toOption.toSeq
rawConf <- node.attribute("conf").right.toOption.toSeq
(fromConf, toConf) <- mappings(rawConf)
} yield {
val attr = node.attributesFromNamespace(attributesNamespace)
val transitive = node.attribute("transitive") match {
case Right("false") => false
case _ => true
}
fromConf -> Dependency(
Module(org, name, attr.toMap),
version,
toConf,
allConfsExcludes ++ excludes.getOrElse(fromConf, Set.empty),
Attributes("", ""), // should come from possible artifact nodes
optional = false,
transitive = transitive
)
}
}
private def publications(node: Node): Map[String, Seq[Publication]] =
node.children
.filter(_.label == "artifact")
.flatMap { node =>
val name = node.attribute("name").right.getOrElse("")
val type0 = node.attribute("type").right.getOrElse("jar")
val ext = node.attribute("ext").right.getOrElse(type0)
val confs = node.attribute("conf").fold(_ => Seq("*"), _.split(',').toSeq)
val classifier = node.attribute("classifier").right.getOrElse("")
confs.map(_ -> Publication(name, type0, ext, classifier))
}
.groupBy { case (conf, _) => conf }
.map { case (conf, l) => conf -> l.map { case (_, p) => p } }
def project(node: Node): Either[String, Project] =
for {
infoNode <- node.children
.find(_.label == "info")
.toRight("Info not found")
.right
modVer <- info(infoNode).right
} yield {
val (module, version) = modVer
val dependenciesNodeOpt = node.children
.find(_.label == "dependencies")
val dependencies0 = dependenciesNodeOpt.map(dependencies).getOrElse(Nil)
val configurationsNodeOpt = node.children
.find(_.label == "configurations")
val configurationsOpt = configurationsNodeOpt.map(configurations)
val configurations0 = configurationsOpt.getOrElse(Seq("default" -> Seq.empty[String]))
val publicationsNodeOpt = node.children
.find(_.label == "publications")
val publicationsOpt = publicationsNodeOpt.map(publications)
val description = infoNode.children
.find(_.label == "description")
.map(_.textContent.trim)
.getOrElse("")
val licenses = infoNode.children
.filter(_.label == "license")
.flatMap { n =>
n.attribute("name").right.toSeq.map { name =>
(name, n.attribute("url").right.toOption)
}
}
val publicationDate = infoNode.attribute("publication")
.right
.toOption
.flatMap(parseDateTime)
Project(
module,
version,
dependencies0,
configurations0.toMap,
None,
Nil,
Nil,
Nil,
None,
None,
None,
None,
if (publicationsOpt.isEmpty)
// no publications node -> default JAR artifact
Seq("*" -> Publication(module.name, "jar", "jar", ""))
else {
// publications node is there -> only its content (if it is empty, no artifacts,
// as per the Ivy manual)
val inAllConfs = publicationsOpt.flatMap(_.get("*")).getOrElse(Nil)
configurations0.flatMap { case (conf, _) =>
(publicationsOpt.flatMap(_.get(conf)).getOrElse(Nil) ++ inAllConfs).map(conf -> _)
}
},
Info(
description,
"",
licenses,
Nil,
publicationDate
)
)
}
}

View File

@ -1,194 +0,0 @@
package coursier.ivy
import coursier.util.Traverse.TraverseOps
import coursier.util.ValidationNel
import fastparse.all._
final case class PropertiesPattern(chunks: Seq[PropertiesPattern.ChunkOrProperty]) {
def string: String = chunks.map(_.string).mkString
import PropertiesPattern.ChunkOrProperty
def substituteProperties(properties: Map[String, String]): Either[String, Pattern] = {
val validation = chunks.validationNelTraverse[String, Seq[Pattern.Chunk]] {
case ChunkOrProperty.Prop(name, alternativesOpt) =>
properties.get(name) match {
case Some(value) =>
ValidationNel.success(Seq(Pattern.Chunk.Const(value)))
case None =>
alternativesOpt match {
case Some(alt) =>
ValidationNel.fromEither(
PropertiesPattern(alt)
.substituteProperties(properties)
.right
.map(_.chunks.toVector)
)
case None =>
ValidationNel.failure(name)
}
}
case ChunkOrProperty.Opt(l @ _*) =>
ValidationNel.fromEither(
PropertiesPattern(l)
.substituteProperties(properties)
.right
.map(l => Seq(Pattern.Chunk.Opt(l.chunks: _*)))
)
case ChunkOrProperty.Var(name) =>
ValidationNel.success(Seq(Pattern.Chunk.Var(name)))
case ChunkOrProperty.Const(value) =>
ValidationNel.success(Seq(Pattern.Chunk.Const(value)))
}.map(c => Pattern(c.flatten))
validation.either.left.map { notFoundProps =>
s"Property(ies) not found: ${notFoundProps.mkString(", ")}"
}
}
}
final case class Pattern(chunks: Seq[Pattern.Chunk]) {
def +:(chunk: Pattern.Chunk): Pattern =
Pattern(chunk +: chunks)
import Pattern.Chunk
def string: String = chunks.map(_.string).mkString
def substituteVariables(variables: Map[String, String]): Either[String, String] = {
def helper(chunks: Seq[Chunk]): ValidationNel[String, Seq[Chunk.Const]] =
chunks.validationNelTraverse[String, Seq[Chunk.Const]] {
case Chunk.Var(name) =>
variables.get(name) match {
case Some(value) =>
ValidationNel.success(Seq(Chunk.Const(value)))
case None =>
ValidationNel.failure(name)
}
case Chunk.Opt(l @ _*) =>
val res = helper(l)
if (res.isSuccess)
res
else
ValidationNel.success(Seq())
case c: Chunk.Const =>
ValidationNel.success(Seq(c))
}.map(_.flatten)
val validation = helper(chunks)
validation.either match {
case Left(notFoundVariables) =>
Left(s"Variables not found: ${notFoundVariables.mkString(", ")}")
case Right(constants) =>
val b = new StringBuilder
constants.foreach(b ++= _.value)
Right(b.result())
}
}
}
object PropertiesPattern {
sealed abstract class ChunkOrProperty extends Product with Serializable {
def string: String
}
object ChunkOrProperty {
final case class Prop(name: String, alternative: Option[Seq[ChunkOrProperty]]) extends ChunkOrProperty {
def string: String =
s"$${" + name + alternative.fold("")(alt => "-" + alt.map(_.string).mkString) + "}"
}
final case class Var(name: String) extends ChunkOrProperty {
def string: String = "[" + name + "]"
}
final case class Opt(content: ChunkOrProperty*) extends ChunkOrProperty {
def string: String = "(" + content.map(_.string).mkString + ")"
}
final case class Const(value: String) extends ChunkOrProperty {
def string: String = value
}
implicit def fromString(s: String): ChunkOrProperty = Const(s)
}
private def parser: Parser[Seq[ChunkOrProperty]] = {
val notIn = s"[]{}()$$".toSet
val chars = P(CharsWhile(c => !notIn(c)).!)
val noHyphenChars = P(CharsWhile(c => !notIn(c) && c != '-').!)
val constant = P(chars).map(ChunkOrProperty.Const)
lazy val property: Parser[ChunkOrProperty.Prop] =
P(s"$${" ~ noHyphenChars ~ ("-" ~ chunks).? ~ "}")
.map { case (name, altOpt) => ChunkOrProperty.Prop(name, altOpt) }
lazy val variable: Parser[ChunkOrProperty.Var] = P("[" ~ chars ~ "]").map(ChunkOrProperty.Var)
lazy val optional: Parser[ChunkOrProperty.Opt] = P("(" ~ chunks ~ ")")
.map(l => ChunkOrProperty.Opt(l: _*))
lazy val chunks: Parser[Seq[ChunkOrProperty]] = P((constant | property | variable | optional).rep)
.map(_.toVector) // "Vector" is more readable than "ArrayBuffer"
chunks
}
def parse(pattern: String): Either[String, PropertiesPattern] =
parser.parse(pattern) match {
case f: Parsed.Failure =>
Left(f.msg)
case Parsed.Success(v, _) =>
Right(PropertiesPattern(v))
}
}
object Pattern {
sealed abstract class Chunk extends Product with Serializable {
def string: String
}
object Chunk {
final case class Var(name: String) extends Chunk {
def string: String = "[" + name + "]"
}
final case class Opt(content: Chunk*) extends Chunk {
def string: String = "(" + content.map(_.string).mkString + ")"
}
final case class Const(value: String) extends Chunk {
def string: String = value
}
implicit def fromString(s: String): Chunk = Const(s)
}
import Chunk.{ Var, Opt }
// Corresponds to
// [organisation]/[module]/(scala_[scalaVersion]/)(sbt_[sbtVersion]/)[revision]/[type]s/[artifact](-[classifier]).[ext]
val default = Pattern(
Seq(
Var("organisation"), "/",
Var("module"), "/",
Opt("scala_", Var("scalaVersion"), "/"),
Opt("sbt_", Var("sbtVersion"), "/"),
Var("revision"), "/",
Var("type"), "s/",
Var("artifact"), Opt("-", Var("classifier")), ".", Var("ext")
)
)
}

View File

@ -1,384 +0,0 @@
package coursier.maven
import coursier.Fetch
import coursier.core._
import coursier.core.compatibility.encodeURIComponent
import coursier.util.{EitherT, Monad, WebPage}
object MavenRepository {
val SnapshotTimestamp = "(.*-)?[0-9]{8}\\.[0-9]{6}-[0-9]+".r
def isSnapshot(version: String): Boolean =
version.endsWith("SNAPSHOT") || SnapshotTimestamp.pattern.matcher(version).matches()
def toBaseVersion(version: String): String = version match {
case SnapshotTimestamp(null) => "SNAPSHOT"
case SnapshotTimestamp(base) => base + "SNAPSHOT"
case _ => version
}
def ivyLikePath(
org: String,
dirName: String,
name: String,
version: String,
subDir: String,
baseSuffix: String,
ext: String
) =
Seq(
org,
dirName,
version,
subDir,
s"$name$baseSuffix.$ext"
)
def mavenVersioning(
snapshotVersioning: SnapshotVersioning,
classifier: String,
extension: String
): Option[String] =
snapshotVersioning
.snapshotVersions
.find(v =>
(v.classifier == classifier || v.classifier == "*") &&
(v.extension == extension || v.extension == "*")
)
.map(_.value)
.filter(_.nonEmpty)
val defaultConfigurations = Map(
"compile" -> Seq.empty,
"runtime" -> Seq("compile"),
"default" -> Seq("runtime"),
"test" -> Seq("runtime")
)
def dirModuleName(module: Module, sbtAttrStub: Boolean): String =
if (sbtAttrStub) {
var name = module.name
for (scalaVersion <- module.attributes.get("scalaVersion"))
name = name + "_" + scalaVersion
for (sbtVersion <- module.attributes.get("sbtVersion"))
name = name + "_" + sbtVersion
name
} else
module.name
}
final case class MavenRepository(
root: String,
changing: Option[Boolean] = None,
/** Hackish hack for sbt plugins mainly - what this does really sucks */
sbtAttrStub: Boolean = true,
authentication: Option[Authentication] = None
) extends Repository {
import Repository._
import MavenRepository._
val root0 = if (root.endsWith("/")) root else root + "/"
val source = MavenSource(root0, changing, sbtAttrStub, authentication)
private def modulePath(module: Module): Seq[String] =
module.organization.split('.').toSeq :+ dirModuleName(module, sbtAttrStub)
private def moduleVersionPath(module: Module, version: String): Seq[String] =
modulePath(module) :+ toBaseVersion(version)
private def urlFor(path: Seq[String]): String =
root0 + path.map(encodeURIComponent).mkString("/")
def projectArtifact(
module: Module,
version: String,
versioningValue: Option[String]
): Artifact = {
val path = moduleVersionPath(module, version) :+
s"${module.name}-${versioningValue getOrElse version}.pom"
Artifact(
urlFor(path),
Map.empty,
Map.empty,
Attributes("pom", ""),
changing = changing.getOrElse(isSnapshot(version)),
authentication = authentication
)
.withDefaultChecksums
.withDefaultSignature
}
def versionsArtifact(module: Module): Option[Artifact] = {
val path = module.organization.split('.').toSeq ++ Seq(
dirModuleName(module, sbtAttrStub),
"maven-metadata.xml"
)
val artifact =
Artifact(
urlFor(path),
Map.empty,
Map.empty,
Attributes("pom", ""),
changing = true,
authentication = authentication
)
.withDefaultChecksums
.withDefaultSignature
Some(artifact)
}
def snapshotVersioningArtifact(
module: Module,
version: String
): Option[Artifact] = {
val path = moduleVersionPath(module, version) :+ "maven-metadata.xml"
val artifact =
Artifact(
urlFor(path),
Map.empty,
Map.empty,
Attributes("pom", ""),
changing = true,
authentication = authentication
)
.withDefaultChecksums
.withDefaultSignature
Some(artifact)
}
private def versionsFromListing[F[_]](
module: Module,
fetch: Fetch.Content[F]
)(implicit
F: Monad[F]
): EitherT[F, String, Versions] = {
val listingUrl = urlFor(modulePath(module)) + "/"
// version listing -> changing (changes as new versions are released)
val listingArtifact = artifactFor(listingUrl, changing = true)
fetch(listingArtifact).flatMap { listing =>
val files = WebPage.listFiles(listingUrl, listing)
val rawVersions = WebPage.listDirectories(listingUrl, listing)
val res =
if (files.contains("maven-metadata.xml"))
Left("maven-metadata.xml found, not listing version from directory listing")
else if (rawVersions.isEmpty)
Left(s"No versions found at $listingUrl")
else {
val parsedVersions = rawVersions.map(Version(_))
val nonPreVersions = parsedVersions.filter(_.items.forall {
case q: Version.Qualifier => q.level >= 0
case _ => true
})
if (nonPreVersions.isEmpty)
Left(s"Found only pre-versions at $listingUrl")
else {
val latest = nonPreVersions.max
Right(Versions(
latest.repr,
latest.repr,
nonPreVersions.map(_.repr).toList,
None
))
}
}
EitherT(F.point(res))
}
}
def versions[F[_]](
module: Module,
fetch: Fetch.Content[F]
)(implicit
F: Monad[F]
): EitherT[F, String, Versions] =
EitherT(
versionsArtifact(module) match {
case None => F.point(Left("Not supported"))
case Some(artifact) =>
F.map(fetch(artifact).run) { eitherStr =>
for {
str <- eitherStr.right
xml <- compatibility.xmlParse(str).right
_ <- (if (xml.label == "metadata") Right(()) else Left("Metadata not found")).right
versions <- Pom.versions(xml).right
} yield versions
}
}
)
def snapshotVersioning[F[_]](
module: Module,
version: String,
fetch: Fetch.Content[F]
)(implicit
F: Monad[F]
): EitherT[F, String, SnapshotVersioning] = {
EitherT(
snapshotVersioningArtifact(module, version) match {
case None => F.point(Left("Not supported"))
case Some(artifact) =>
F.map(fetch(artifact).run) { eitherStr =>
for {
str <- eitherStr.right
xml <- compatibility.xmlParse(str).right
_ <- (if (xml.label == "metadata") Right(()) else Left("Metadata not found")).right
snapshotVersioning <- Pom.snapshotVersioning(xml).right
} yield snapshotVersioning
}
}
)
}
def findNoInterval[F[_]](
module: Module,
version: String,
fetch: Fetch.Content[F]
)(implicit
F: Monad[F]
): EitherT[F, String, Project] =
EitherT {
def withSnapshotVersioning =
snapshotVersioning(module, version, fetch).flatMap { snapshotVersioning =>
val versioningOption =
mavenVersioning(snapshotVersioning, "", "jar")
.orElse(mavenVersioning(snapshotVersioning, "", "pom"))
.orElse(mavenVersioning(snapshotVersioning, "", ""))
versioningOption match {
case None =>
EitherT[F, String, Project](
F.point(Left("No snapshot versioning value found"))
)
case versioning @ Some(_) =>
findVersioning(module, version, versioning, fetch)
.map(_.copy(snapshotVersioning = Some(snapshotVersioning)))
}
}
val res = F.bind(findVersioning(module, version, None, fetch).run) { eitherProj =>
if (eitherProj.isLeft && isSnapshot(version))
F.map(withSnapshotVersioning.run)(eitherProj0 =>
if (eitherProj0.isLeft)
eitherProj
else
eitherProj0
)
else
F.point(eitherProj)
}
// keep exact version used to get metadata, in case the one inside the metadata is wrong
F.map(res)(_.right.map(proj => proj.copy(actualVersionOpt = Some(version))))
}
private def artifactFor(url: String, changing: Boolean) =
Artifact(
url,
Map.empty,
Map.empty,
Attributes("", ""),
changing = changing,
authentication
)
def findVersioning[F[_]](
module: Module,
version: String,
versioningValue: Option[String],
fetch: Fetch.Content[F]
)(implicit
F: Monad[F]
): EitherT[F, String, Project] = {
def parseRawPom(str: String) =
for {
xml <- compatibility.xmlParse(str).right
_ <- (if (xml.label == "project") Right(()) else Left("Project definition not found")).right
proj <- Pom.project(xml, relocationAsDependency = true).right
} yield proj
val projectArtifact0 = projectArtifact(module, version, versioningValue)
for {
str <- fetch(projectArtifact0)
proj0 <- EitherT(F.point[Either[String, Project]](parseRawPom(str)))
} yield
Pom.addOptionalDependenciesInConfig(
proj0.copy(
actualVersionOpt = Some(version),
configurations = defaultConfigurations
),
Set("", "default"),
"optional"
)
}
def find[F[_]](
module: Module,
version: String,
fetch: Fetch.Content[F]
)(implicit
F: Monad[F]
): EitherT[F, String, (Artifact.Source, Project)] = {
Parse.versionInterval(version)
.orElse(Parse.multiVersionInterval(version))
.orElse(Parse.ivyLatestSubRevisionInterval(version))
.filter(_.isValid) match {
case None =>
findNoInterval(module, version, fetch).map((source, _))
case Some(itv) =>
def v = versions(module, fetch)
val v0 =
if (changing.forall(!_) && module.attributes.contains("scalaVersion") && module.attributes.contains("sbtVersion"))
versionsFromListing(module, fetch).orElse(v)
else
v
v0.flatMap { versions0 =>
val eitherVersion = {
val release = Version(versions0.release)
if (itv.contains(release)) Right(versions0.release)
else {
val inInterval = versions0.available
.map(Version(_))
.filter(itv.contains)
if (inInterval.isEmpty) Left(s"No version found for $version")
else Right(inInterval.max.repr)
}
}
eitherVersion match {
case Left(reason) => EitherT[F, String, (Artifact.Source, Project)](F.point(Left(reason)))
case Right(version0) =>
findNoInterval(module, version0, fetch)
.map(_.copy(versions = Some(versions0)))
.map((source, _))
}
}
}
}
}

View File

@ -1,201 +0,0 @@
package coursier.maven
import coursier.core._
final case class MavenSource(
root: String,
changing: Option[Boolean] = None,
/** See doc on MavenRepository */
sbtAttrStub: Boolean,
authentication: Option[Authentication]
) extends Artifact.Source {
import Repository._
import MavenRepository._
private def artifactsUnknownPublications(
dependency: Dependency,
project: Project,
overrideClassifiers: Option[Seq[String]]
): Seq[Artifact] = {
val packagingOpt = project.packagingOpt.filter(_ != Pom.relocatedPackaging)
val packagingTpeMap = packagingOpt
.map { packaging =>
(MavenSource.typeDefaultClassifier(packaging), MavenSource.typeExtension(packaging)) -> packaging
}
.toMap
def artifactOf(publication: Publication) = {
val versioning = project
.snapshotVersioning
.flatMap(versioning =>
mavenVersioning(
versioning,
publication.classifier,
MavenSource.typeExtension(publication.`type`)
)
)
val path = dependency.module.organization.split('.').toSeq ++ Seq(
MavenRepository.dirModuleName(dependency.module, sbtAttrStub),
toBaseVersion(project.actualVersion),
s"${dependency.module.name}-${versioning getOrElse project.actualVersion}${Some(publication.classifier).filter(_.nonEmpty).map("-" + _).mkString}.${publication.ext}"
)
val changing0 = changing.getOrElse(isSnapshot(project.actualVersion))
Artifact(
root + path.mkString("/"),
Map.empty,
Map.empty,
publication.attributes,
changing = changing0,
authentication = authentication
)
.withDefaultChecksums
.withDefaultSignature
}
val metadataArtifact = artifactOf(Publication(dependency.module.name, "pom", "pom", ""))
def artifactWithExtra(publication: Publication) = {
val a = artifactOf(publication)
a.copy(
extra = a.extra + ("metadata" -> metadataArtifact)
)
}
lazy val defaultPublications = {
val packagingPublicationOpt = packagingOpt
.filter(_ => dependency.attributes.isEmpty)
.map { packaging =>
Publication(
dependency.module.name,
packaging,
MavenSource.typeExtension(packaging),
MavenSource.typeDefaultClassifier(packaging)
)
}
val type0 = if (dependency.attributes.`type`.isEmpty) "jar" else dependency.attributes.`type`
val ext = MavenSource.typeExtension(type0)
val classifier =
if (dependency.attributes.classifier.isEmpty)
MavenSource.typeDefaultClassifier(type0)
else
dependency.attributes.classifier
val tpe = packagingTpeMap.getOrElse(
(classifier, ext),
MavenSource.classifierExtensionDefaultTypeOpt(classifier, ext).getOrElse(ext)
)
val pubs = packagingPublicationOpt.toSeq :+
Publication(
dependency.module.name,
tpe,
ext,
classifier
)
pubs.distinct
}
overrideClassifiers
.fold(defaultPublications) { classifiers =>
classifiers.flatMap { classifier =>
if (classifier == dependency.attributes.classifier)
defaultPublications
else {
val ext = "jar"
val tpe = packagingTpeMap.getOrElse(
(classifier, ext),
MavenSource.classifierExtensionDefaultTypeOpt(classifier, ext).getOrElse(ext)
)
Seq(
Publication(
dependency.module.name,
tpe,
ext,
classifier
)
)
}
}
}
.map(artifactWithExtra)
}
private val dummyArtifact = Artifact("", Map(), Map(), Attributes("", ""), changing = false, None)
def artifacts(
dependency: Dependency,
project: Project,
overrideClassifiers: Option[Seq[String]]
): Seq[Artifact] =
if (project.packagingOpt.toSeq.contains(Pom.relocatedPackaging))
Nil
else {
def makeOptional(a: Artifact): Artifact =
a.copy(
extra = a.extra.mapValues(makeOptional).iterator.toMap + (Artifact.optionalKey -> dummyArtifact)
)
artifactsUnknownPublications(dependency, project, overrideClassifiers)
.map(makeOptional)
}
}
object MavenSource {
val typeExtensions: Map[String, String] = Map(
"eclipse-plugin" -> "jar",
"maven-plugin" -> "jar",
"hk2-jar" -> "jar",
"orbit" -> "jar",
"scala-jar" -> "jar",
"jar" -> "jar",
"bundle" -> "jar",
"doc" -> "jar",
"src" -> "jar",
"test-jar" -> "jar",
"ejb-client" -> "jar"
)
def typeExtension(`type`: String): String =
typeExtensions.getOrElse(`type`, `type`)
// see https://github.com/apache/maven/blob/c023e58104b71e27def0caa034d39ab0fa0373b6/maven-core/src/main/resources/META-INF/plexus/artifact-handlers.xml
// discussed in https://github.com/coursier/coursier/issues/298
val typeDefaultClassifiers: Map[String, String] = Map(
"test-jar" -> "tests",
"javadoc" -> "javadoc",
"java-source" -> "sources",
"ejb-client" -> "client"
)
def typeDefaultClassifierOpt(`type`: String): Option[String] =
typeDefaultClassifiers.get(`type`)
def typeDefaultClassifier(`type`: String): String =
typeDefaultClassifierOpt(`type`).getOrElse("")
val classifierExtensionDefaultTypes: Map[(String, String), String] = Map(
("tests", "jar") -> "test-jar",
("javadoc", "jar") -> "doc",
("sources", "jar") -> "src"
// don't know much about "client" classifier, not including it here
)
def classifierExtensionDefaultTypeOpt(classifier: String, ext: String): Option[String] =
classifierExtensionDefaultTypes.get((classifier, ext))
}

View File

@ -1,592 +0,0 @@
package coursier.maven
import coursier.core._
import coursier.util.Traverse.TraverseOps
object Pom {
import coursier.util.Xml._
private def point[T](t: T) =
Right(t).right
/**
* Returns either a property's key-value pair or an error if the elem is not an element.
*
* This method trims all spaces, whereas Maven has an option to preserve them.
*
* @param elem a property element
* @return the key and the value of the property
* @see [[https://issues.apache.org/jira/browse/MNG-5380]]
*/
def property(elem: Node): Either[String, (String, String)] = {
// Not matching with Text, which fails on scala-js if the property value has xml comments
if (elem.isElement) Right(elem.label -> elem.textContent.trim)
else Left(s"Can't parse property $elem")
}
// TODO Allow no version in some contexts
private def module(
node: Node,
defaultGroupId: Option[String] = None,
defaultArtifactId: Option[String] = None
): Either[String, Module] = {
for {
organization <- {
val e = text(node, "groupId", "Organization")
defaultGroupId.fold(e)(g => Right(e.right.getOrElse(g))).right
}
name <- {
val n = text(node, "artifactId", "Name")
defaultArtifactId.fold(n)(n0 => Right(n.right.getOrElse(n0))).right
}
} yield Module(organization, name, Map.empty).trim
}
private def readVersion(node: Node) =
text(node, "version", "Version").right.getOrElse("").trim
def dependency(node: Node): Either[String, (String, Dependency)] =
module(node).right.flatMap { mod =>
val version0 = readVersion(node)
val scopeOpt = text(node, "scope", "").right.toOption
val typeOpt = text(node, "type", "").right.toOption
val classifierOpt = text(node, "classifier", "").right.toOption
val xmlExclusions = node.children
.find(_.label == "exclusions")
.map(_.children.filter(_.label == "exclusion"))
.getOrElse(Seq.empty)
xmlExclusions
.eitherTraverse(module(_, defaultArtifactId = Some("*")))
.right
.map { exclusions =>
val optional = text(node, "optional", "").right.toSeq.contains("true")
scopeOpt.getOrElse("") -> Dependency(
mod,
version0,
"",
exclusions.map(mod => (mod.organization, mod.name)).toSet,
Attributes(typeOpt.getOrElse(""), classifierOpt.getOrElse("")),
optional,
transitive = true
)
}
}
private def profileActivation(node: Node): (Option[Boolean], Activation) = {
val byDefault =
text(node, "activeByDefault", "").right.toOption.flatMap {
case "true" => Some(true)
case "false" => Some(false)
case _ => None
}
val properties = node.children
.filter(_.label == "property")
.flatMap { p =>
for{
name <- text(p, "name", "").right.toOption
valueOpt = text(p, "value", "").right.toOption
} yield (name, valueOpt)
}
val osNodeOpt = node.children.collectFirst { case n if n.label == "os" => n }
val os = Activation.Os(
osNodeOpt.flatMap(n => text(n, "arch", "").right.toOption),
osNodeOpt.flatMap(n => text(n, "family", "").right.toOption).toSet,
osNodeOpt.flatMap(n => text(n, "name", "").right.toOption),
osNodeOpt.flatMap(n => text(n, "version", "").right.toOption)
)
val jdk = text(node, "jdk", "").right.toOption.flatMap { s =>
Parse.versionInterval(s)
.orElse(Parse.multiVersionInterval(s))
.map(Left(_))
.orElse(Parse.version(s).map(v => Right(Seq(v))))
}
val activation = Activation(properties, os, jdk)
(byDefault, activation)
}
def profile(node: Node): Either[String, Profile] = {
val id = text(node, "id", "Profile ID").right.getOrElse("")
val xmlActivationOpt = node.children
.find(_.label == "activation")
val (activeByDefault, activation) = xmlActivationOpt.fold((Option.empty[Boolean], Activation.empty))(profileActivation)
val xmlDeps = node.children
.find(_.label == "dependencies")
.map(_.children.filter(_.label == "dependency"))
.getOrElse(Seq.empty)
for {
deps <- xmlDeps
.eitherTraverse(dependency)
.right
depMgmts <- node
.children
.find(_.label == "dependencyManagement")
.flatMap(_.children.find(_.label == "dependencies"))
.map(_.children.filter(_.label == "dependency"))
.getOrElse(Seq.empty)
.eitherTraverse(dependency)
.right
properties <- node
.children
.find(_.label == "properties")
.map(_.children.collect { case elem if elem.isElement => elem })
.getOrElse(Seq.empty)
.eitherTraverse(property)
.right
} yield Profile(id, activeByDefault, activation, deps, depMgmts, properties.toMap)
}
def packagingOpt(pom: Node): Option[String] =
text(pom, "packaging", "").right.toOption
def project(pom: Node): Either[String, Project] =
project(pom, relocationAsDependency = false)
def project(
pom: Node,
relocationAsDependency: Boolean
): Either[String, Project] = {
for {
projModule <- module(pom, defaultGroupId = Some("")).right
parentOpt <- point(pom.children.find(_.label == "parent"))
parentModuleOpt <- parentOpt
.map(module(_).right.map(Some(_)))
.getOrElse(Right(None))
.right
parentVersionOpt <- point(parentOpt.map(readVersion))
xmlDeps <- point(
pom.children
.find(_.label == "dependencies")
.map(_.children.filter(_.label == "dependency"))
.getOrElse(Seq.empty)
)
deps <- xmlDeps.eitherTraverse(dependency).right
xmlDepMgmts <- point(
pom.children
.find(_.label == "dependencyManagement")
.flatMap(_.children.find(_.label == "dependencies"))
.map(_.children.filter(_.label == "dependency"))
.getOrElse(Seq.empty)
)
depMgmts <- xmlDepMgmts.eitherTraverse(dependency).right
groupId <- Some(projModule.organization).filter(_.nonEmpty)
.orElse(parentModuleOpt.map(_.organization).filter(_.nonEmpty))
.toRight("No organization found")
.right
version <- Some(readVersion(pom)).filter(_.nonEmpty)
.orElse(parentVersionOpt.filter(_.nonEmpty))
.toRight("No version found")
.right
_ <- parentVersionOpt
.map(v => if (v.isEmpty) Left("Parent version missing") else Right(()))
.getOrElse(Right(()))
.right
_ <- parentModuleOpt
.map(mod => if (mod.organization.isEmpty) Left("Parent organization missing") else Right(()))
.getOrElse(Right(()))
.right
xmlProperties <- point(
pom.children
.find(_.label == "properties")
.map(_.children.collect{case elem if elem.isElement => elem})
.getOrElse(Seq.empty)
)
properties <- xmlProperties.eitherTraverse(property).right
xmlProfiles <- point(
pom
.children
.find(_.label == "profiles")
.map(_.children.filter(_.label == "profile"))
.getOrElse(Seq.empty)
)
profiles <- xmlProfiles.eitherTraverse(profile).right
extraAttrs <- properties
.collectFirst { case ("extraDependencyAttributes", s) => extraAttributes(s) }
.getOrElse(Right(Map.empty))
.right
} yield {
val extraAttrsMap = extraAttrs
.map {
case (mod, ver) =>
(mod.copy(attributes = Map.empty), ver) -> mod.attributes
}
.toMap
val description = pom.children
.find(_.label == "description")
.map(_.textContent)
.getOrElse("")
val homePage = pom.children
.find(_.label == "url")
.map(_.textContent)
.getOrElse("")
val licenses = pom.children
.find(_.label == "licenses")
.toSeq
.flatMap(_.children)
.filter(_.label == "license")
.flatMap { n =>
text(n, "name", "License name").right.toOption.map { name =>
(name, text(n, "url", "License URL").right.toOption)
}.toSeq
}
val developers = pom.children
.find(_.label == "developers")
.toSeq
.flatMap(_.children)
.filter(_.label == "developer")
.map { n =>
for {
id <- text(n, "id", "Developer ID").right
name <- text(n, "name", "Developer name").right
url <- text(n, "url", "Developer URL").right
} yield Info.Developer(id, name, url)
}
.collect {
case Right(d) => d
}
val finalProjModule = projModule.copy(organization = groupId)
val relocationDependencyOpt =
if (relocationAsDependency)
pom.children
.find(_.label == "distributionManagement")
.flatMap(_.children.find(_.label == "relocation"))
.map { n =>
// see https://maven.apache.org/guides/mini/guide-relocation.html
val relocatedGroupId = text(n, "groupId", "").right.getOrElse(finalProjModule.organization)
val relocatedArtifactId = text(n, "artifactId", "").right.getOrElse(finalProjModule.name)
val relocatedVersion = text(n, "version", "").right.getOrElse(version)
"" -> Dependency(
finalProjModule.copy(
organization = relocatedGroupId,
name = relocatedArtifactId
),
relocatedVersion,
"",
Set(),
Attributes("", ""),
optional = false,
transitive = true
)
}
else
None
Project(
finalProjModule,
version,
(relocationDependencyOpt.toSeq ++ deps).map {
case (config, dep0) =>
val dep = extraAttrsMap.get(dep0.moduleVersion).fold(dep0)(attrs =>
dep0.copy(module = dep0.module.copy(attributes = attrs))
)
config -> dep
},
Map.empty,
parentModuleOpt.map((_, parentVersionOpt.getOrElse(""))),
depMgmts,
properties,
profiles,
None,
None,
relocationDependencyOpt.fold(packagingOpt(pom))(_ => Some(relocatedPackaging)),
None,
Nil,
Info(
description,
homePage,
licenses,
developers,
None
)
)
}
}
def versions(node: Node): Either[String, Versions] = {
for {
organization <- text(node, "groupId", "Organization").right // Ignored
name <- text(node, "artifactId", "Name").right // Ignored
xmlVersioning <- node.children
.find(_.label == "versioning")
.toRight("Versioning info not found in metadata")
.right
} yield {
val latest = text(xmlVersioning, "latest", "Latest version")
.right
.getOrElse("")
val release = text(xmlVersioning, "release", "Release version")
.right
.getOrElse("")
val versionsOpt = xmlVersioning.children
.find(_.label == "versions")
.map(_.children.filter(_.label == "version").flatMap(_.children.collectFirst { case Text(t) => t }))
val lastUpdatedOpt = text(xmlVersioning, "lastUpdated", "Last update date and time")
.right
.toOption
.flatMap(parseDateTime)
Versions(latest, release, versionsOpt.map(_.toList).getOrElse(Nil), lastUpdatedOpt)
}
}
def snapshotVersion(node: Node): Either[String, SnapshotVersion] = {
def textOrEmpty(name: String, desc: String): String =
text(node, name, desc)
.right
.getOrElse("")
val classifier = textOrEmpty("classifier", "Classifier")
val ext = textOrEmpty("extension", "Extensions")
val value = textOrEmpty("value", "Value")
val updatedOpt = text(node, "updated", "Updated")
.right
.toOption
.flatMap(parseDateTime)
Right(SnapshotVersion(
classifier,
ext,
value,
updatedOpt
))
}
/** If `snapshotVersion` is missing, guess it based on
* `version`, `timestamp` and `buildNumber`, as is done in:
* https://github.com/sbt/ivy/blob/2.3.x-sbt/src/java/org/apache/ivy/plugins/resolver/IBiblioResolver.java
*/
def guessedSnapshotVersion(
version: String,
timestamp: String,
buildNumber: Int
): SnapshotVersion = {
val value = s"${version.dropRight("SNAPSHOT".length)}$timestamp-$buildNumber"
SnapshotVersion("*", "*", value, None)
}
def snapshotVersioning(node: Node): Either[String, SnapshotVersioning] =
// FIXME Quite similar to Versions above
for {
organization <- text(node, "groupId", "Organization").right
name <- text(node, "artifactId", "Name").right
xmlVersioning <- node
.children
.find(_.label == "versioning")
.toRight("Versioning info not found in metadata")
.right
snapshotVersions <- {
val xmlSnapshotVersions = xmlVersioning
.children
.find(_.label == "snapshotVersions")
.map(_.children.filter(_.label == "snapshotVersion"))
.getOrElse(Seq.empty)
xmlSnapshotVersions
.eitherTraverse(snapshotVersion)
.right
}
} yield {
val version = readVersion(node)
val latest = text(xmlVersioning, "latest", "Latest version")
.right
.getOrElse("")
val release = text(xmlVersioning, "release", "Release version")
.right
.getOrElse("")
val lastUpdatedOpt = text(xmlVersioning, "lastUpdated", "Last update date and time")
.right
.toOption
.flatMap(parseDateTime)
val xmlSnapshotOpt = xmlVersioning
.children
.find(_.label == "snapshot")
val timestamp = xmlSnapshotOpt
.flatMap(
text(_, "timestamp", "Snapshot timestamp")
.right
.toOption
)
.getOrElse("")
val buildNumber = xmlSnapshotOpt
.flatMap(
text(_, "buildNumber", "Snapshot build number")
.right
.toOption
)
.filter(s => s.nonEmpty && s.forall(_.isDigit))
.map(_.toInt)
val localCopy = xmlSnapshotOpt
.flatMap(
text(_, "localCopy", "Snapshot local copy")
.right
.toOption
)
.collect {
case "true" => true
case "false" => false
}
SnapshotVersioning(
Module(organization, name, Map.empty),
version,
latest,
release,
timestamp,
buildNumber,
localCopy,
lastUpdatedOpt,
if (!snapshotVersions.isEmpty)
snapshotVersions
else
buildNumber.map(bn => guessedSnapshotVersion(version, timestamp, bn)).toList
)
}
val relocatedPackaging = s"$$relocated"
val extraAttributeSeparator = ":#@#:"
val extraAttributePrefix = "+"
val extraAttributeOrg = "organisation"
val extraAttributeName = "module"
val extraAttributeVersion = "revision"
val extraAttributeBase = Set(
extraAttributeOrg,
extraAttributeName,
extraAttributeVersion,
"branch"
)
val extraAttributeDropPrefix = "e:"
def extraAttribute(s: String): Either[String, (Module, String)] = {
// vaguely does the same as:
// https://github.com/apache/ant-ivy/blob/2.2.0/src/java/org/apache/ivy/core/module/id/ModuleRevisionId.java#L291
// dropping the attributes with a value of NULL here...
val rawParts = s.split(extraAttributeSeparator).toSeq
val partsOrError =
if (rawParts.length % 2 == 0) {
val malformed = rawParts.filter(!_.startsWith(extraAttributePrefix))
if (malformed.isEmpty)
Right(rawParts.map(_.drop(extraAttributePrefix.length)))
else
Left(s"Malformed attributes ${malformed.map("'"+_+"'").mkString(", ")} in extra attributes '$s'")
} else
Left(s"Malformed extra attributes '$s'")
def attrFrom(attrs: Map[String, String], name: String): Either[String, String] =
attrs
.get(name)
.toRight(s"$name not found in extra attributes '$s'")
for {
parts <- partsOrError.right
attrs <- point(
parts
.grouped(2)
.collect {
case Seq(k, v) if v != "NULL" =>
k.stripPrefix(extraAttributeDropPrefix) -> v
}
.toMap
)
org <- attrFrom(attrs, extraAttributeOrg).right
name <- attrFrom(attrs, extraAttributeName).right
version <- attrFrom(attrs, extraAttributeVersion).right
} yield {
val remainingAttrs = attrs.filterKeys(!extraAttributeBase(_))
(Module(org, name, remainingAttrs.toVector.toMap), version)
}
}
def extraAttributes(s: String): Either[String, Seq[(Module, String)]] = {
val lines = s.split('\n').toSeq.map(_.trim).filter(_.nonEmpty)
lines.foldLeft[Either[String, Seq[(Module, String)]]](Right(Vector.empty)) {
case (acc, line) =>
for {
modVers <- acc.right
modVer <- extraAttribute(line).right
} yield modVers :+ modVer
}
}
def addOptionalDependenciesInConfig(
proj: Project,
fromConfigs: Set[String],
optionalConfig: String
): Project = {
val optionalDeps = proj.dependencies.collect {
case (conf, dep) if dep.optional && fromConfigs(conf) =>
optionalConfig -> dep.copy(optional = false)
}
val configurations = proj.configurations +
(optionalConfig -> (proj.configurations.getOrElse(optionalConfig, Nil) ++ fromConfigs.filter(_.nonEmpty)).distinct)
proj.copy(
configurations = configurations,
dependencies = proj.dependencies ++ optionalDeps
)
}
}

View File

@ -1,113 +0,0 @@
import coursier.core.{Activation, Parse, Version}
/**
* Mainly pulls definitions from coursier.core, sometimes with default arguments.
*/
package object coursier {
// `extends Serializable` added here-or-there for bin compat while switching from 2.12.1 to 2.12.4
type Dependency = core.Dependency
object Dependency extends Serializable {
def apply(
module: Module,
version: String,
// Substituted by Resolver with its own default configuration (compile)
configuration: String = "",
attributes: Attributes = Attributes(),
exclusions: Set[(String, String)] = Set.empty,
optional: Boolean = false,
transitive: Boolean = true
): Dependency =
core.Dependency(
module,
version,
configuration,
exclusions,
attributes,
optional,
transitive
)
}
type Attributes = core.Attributes
object Attributes extends Serializable {
def apply(
`type`: String = "",
classifier: String = ""
): Attributes =
core.Attributes(`type`, classifier)
}
type Project = core.Project
val Project = core.Project
type Info = core.Info
val Info = core.Info
type Profile = core.Profile
val Profile = core.Profile
type Module = core.Module
object Module extends Serializable {
def apply(organization: String, name: String, attributes: Map[String, String] = Map.empty): Module =
core.Module(organization, name, attributes)
}
type ModuleVersion = (core.Module, String)
type ProjectCache = Map[ModuleVersion, (Artifact.Source, Project)]
type Repository = core.Repository
val Repository = core.Repository
type MavenRepository = maven.MavenRepository
val MavenRepository = maven.MavenRepository
type Resolution = core.Resolution
object Resolution extends Serializable {
val empty = apply()
def apply(
rootDependencies: Set[Dependency] = Set.empty,
dependencies: Set[Dependency] = Set.empty,
forceVersions: Map[Module, String] = Map.empty,
conflicts: Set[Dependency] = Set.empty,
projectCache: ProjectCache = Map.empty,
errorCache: Map[ModuleVersion, Seq[String]] = Map.empty,
finalDependencies: Map[Dependency, Seq[Dependency]] = Map.empty,
filter: Option[Dependency => Boolean] = None,
osInfo: Activation.Os = Activation.Os.fromProperties(sys.props.toMap),
jdkVersion: Option[Version] = sys.props.get("java.version").flatMap(Parse.version),
userActivations: Option[Map[String, Boolean]] = None,
mapDependencies: Option[Dependency => Dependency] = None,
forceProperties: Map[String, String] = Map.empty
): Resolution =
core.Resolution(
rootDependencies,
dependencies,
forceVersions,
conflicts,
projectCache,
errorCache,
finalDependencies,
filter,
osInfo,
jdkVersion,
userActivations,
mapDependencies,
forceProperties
)
}
type Artifact = core.Artifact
val Artifact = core.Artifact
type ResolutionProcess = core.ResolutionProcess
val ResolutionProcess = core.ResolutionProcess
implicit class ResolutionExtensions(val underlying: Resolution) extends AnyVal {
def process: ResolutionProcess = ResolutionProcess(underlying)
}
}

View File

@ -1,49 +0,0 @@
package coursier.util
import coursier.core.{ Dependency, Resolution }
object Config {
// loose attempt at minimizing a set of dependencies from various configs
// `configs` is assumed to be fully unfold
def allDependenciesByConfig(
res: Resolution,
depsByConfig: Map[String, Set[Dependency]],
configs: Map[String, Set[String]]
): Map[String, Set[Dependency]] = {
val allDepsByConfig = depsByConfig.map {
case (config, deps) =>
config -> res.subset(deps).minDependencies
}
val filteredAllDepsByConfig = allDepsByConfig.map {
case (config, allDeps) =>
val allExtendedConfigs = configs.getOrElse(config, Set.empty) - config
val inherited = allExtendedConfigs
.flatMap(allDepsByConfig.getOrElse(_, Set.empty))
config -> (allDeps -- inherited)
}
filteredAllDepsByConfig
}
def dependenciesWithConfig(
res: Resolution,
depsByConfig: Map[String, Set[Dependency]],
configs: Map[String, Set[String]]
): Set[Dependency] =
allDependenciesByConfig(res, depsByConfig, configs)
.flatMap {
case (config, deps) =>
deps.map(dep => dep.copy(configuration = s"$config->${dep.configuration}"))
}
.groupBy(_.copy(configuration = ""))
.map {
case (dep, l) =>
dep.copy(configuration = l.map(_.configuration).mkString(";"))
}
.toSet
}

View File

@ -1,60 +0,0 @@
package coursier.util
final case class EitherT[F[_], L, R](run: F[Either[L, R]]) {
def map[S](f: R => S)(implicit M: Monad[F]): EitherT[F, L, S] =
EitherT(
M.map(run)(e => e.right.map(f))
)
def flatMap[S](f: R => EitherT[F, L, S])(implicit M: Monad[F]): EitherT[F, L, S] =
EitherT(
M.bind(run) {
case Left(l) =>
M.point(Left(l))
case Right(r) =>
f(r).run
}
)
def leftMap[M](f: L => M)(implicit M: Monad[F]): EitherT[F, M, R] =
EitherT(
M.map(run)(e => e.left.map(f))
)
def leftFlatMap[S](f: L => EitherT[F, S, R])(implicit M: Monad[F]): EitherT[F, S, R] =
EitherT(
M.bind(run) {
case Left(l) =>
f(l).run
case Right(r) =>
M.point(Right(r))
}
)
def orElse(other: => EitherT[F, L, R])(implicit M: Monad[F]): EitherT[F, L, R] =
EitherT(
M.bind(run) {
case Left(_) =>
other.run
case Right(r) =>
M.point(Right(r))
}
)
}
object EitherT {
def point[F[_], L, R](r: R)(implicit M: Monad[F]): EitherT[F, L, R] =
EitherT(M.point(Right(r)))
def fromEither[F[_]]: FromEither[F] =
new FromEither[F]
final class FromEither[F[_]] {
def apply[L, R](either: Either[L, R])(implicit M: Monad[F]): EitherT[F, L, R] =
EitherT(M.point(either))
}
}

View File

@ -1,9 +0,0 @@
package coursier.util
trait Gather[F[_]] extends Monad[F] {
def gather[A](elems: Seq[F[A]]): F[Seq[A]]
}
object Gather {
def apply[F[_]](implicit G: Gather[F]): Gather[F] = G
}

View File

@ -1,9 +0,0 @@
package coursier.util
trait Monad[F[_]] {
def point[A](a: A): F[A]
def bind[A, B](elem: F[A])(f: A => F[B]): F[B]
def map[A, B](elem: F[A])(f: A => B): F[B] =
bind(elem)(a => point(f(a)))
}

View File

@ -1,328 +0,0 @@
package coursier.util
import coursier.{Attributes, Dependency}
import coursier.core.{Module, Repository}
import coursier.ivy.IvyRepository
import coursier.maven.MavenRepository
import scala.collection.mutable.ArrayBuffer
object Parse {
private def defaultScalaVersion = scala.util.Properties.versionNumberString
/**
* Parses a module like
* org:name
* possibly with attributes, like
* org:name;attr1=val1;attr2=val2
*
* Two semi-columns after the org part is interpreted as a scala module. E.g. if
* `defaultScalaVersion` is `"2.11.x"`, org::name:ver is equivalent to org:name_2.11:ver.
*/
def module(s: String, defaultScalaVersion: String): Either[String, Module] = {
val parts = s.split(":", 3)
val values = parts match {
case Array(org, rawName) =>
Right((org, rawName, ""))
case Array(org, "", rawName) =>
Right((org, rawName, "_" + defaultScalaVersion.split('.').take(2).mkString(".")))
case _ =>
Left(s"malformed module: $s")
}
values.right.flatMap {
case (org, rawName, suffix) =>
val splitName = rawName.split(';')
if (splitName.tail.exists(!_.contains("=")))
Left(s"malformed attribute(s) in $s")
else {
val name = splitName.head
val attributes = splitName.tail.map(_.split("=", 2)).map {
case Array(key, value) => key -> value
}.toMap
Right(Module(org, name + suffix, attributes))
}
}
}
private def valuesAndErrors[L, R](f: String => Either[L, R], l: Seq[String]): (Seq[L], Seq[R]) = {
val errors = new ArrayBuffer[L]
val values = new ArrayBuffer[R]
for (elem <- l)
f(elem) match {
case Left(err) => errors += err
case Right(modVer) => values += modVer
}
(errors, values)
}
/**
* Parses a sequence of coordinates.
*
* @return Sequence of errors, and sequence of modules/versions
*/
def modules(l: Seq[String], defaultScalaVersion: String): (Seq[String], Seq[Module]) =
valuesAndErrors(module(_, defaultScalaVersion), l)
/**
* Parses coordinates like
* org:name:version
* possibly with attributes, like
* org:name;attr1=val1;attr2=val2:version
*/
def moduleVersion(s: String, defaultScalaVersion: String): Either[String, (Module, String)] = {
val parts = s.split(":", 4)
parts match {
case Array(org, rawName, version) =>
module(s"$org:$rawName", defaultScalaVersion)
.right
.map((_, version))
case Array(org, "", rawName, version) =>
module(s"$org::$rawName", defaultScalaVersion)
.right
.map((_, version))
case _ =>
Left(s"Malformed dependency: $s")
}
}
class ModuleParseError(private val message: String = "",
private val cause: Throwable = None.orNull)
extends Exception(message, cause)
/**
* Parses coordinates like
* org:name:version
* with attributes, like
* org:name:version,attr1=val1,attr2=val2
* and a configuration, like
* org:name:version:config
* or
* org:name:version:config,attr1=val1,attr2=val2
*
* Currently only the "classifier" and "url attributes are
* used, and others throw errors.
*/
def moduleVersionConfig(s: String,
req: ModuleRequirements,
transitive: Boolean,
defaultScalaVersion: String): Either[String, (Dependency, Map[String, String])] = {
// Assume org:name:version,attr1=val1,attr2=val2
// That is ',' has to go after ':'.
// E.g. "org:name,attr1=val1,attr2=val2:version:config" is illegal.
val attrSeparator = ","
val argSeparator = ":"
val Array(coords, rawAttrs @ _*) = s.split(attrSeparator)
val attrsOrErrors = rawAttrs
.map { x =>
if (x.contains(argSeparator))
Left(s"'$argSeparator' is not allowed in attribute '$x' in '$s'. Please follow the format " +
s"'org${argSeparator}name[${argSeparator}version][${argSeparator}config]${attrSeparator}attr1=val1${attrSeparator}attr2=val2'")
else
x.split("=") match {
case Array(k, v) =>
Right(k -> v)
case _ =>
Left(s"Failed to parse attribute '$x' in '$s'. Keyword argument expected such as 'classifier=tests'")
}
}
attrsOrErrors
.collectFirst {
case Left(err) => Left(err)
}
.getOrElse {
val attrs = attrsOrErrors
.collect {
case Right(attr) => attr
}
.toMap
val parts = coords.split(":", 5)
// Only "classifier" and "url" attributes are allowed
val validAttrsKeys = Set("classifier", "url")
validateAttributes(attrs, s, validAttrsKeys) match {
case Some(err) => Left(err)
case None =>
val attributes = attrs.get("classifier") match {
case Some(c) => Attributes("", c)
case None => Attributes("", "")
}
val extraDependencyParams: Map[String, String] = attrs.get("url") match {
case Some(url) => Map("url" -> url)
case None => Map()
}
val localExcludes = req.localExcludes
val globalExcludes = req.globalExcludes
val defaultConfig = req.defaultConfiguration
val depOrError = parts match {
case Array(org, "", rawName, version, config) =>
module(s"$org::$rawName", defaultScalaVersion)
.right
.map(mod => {
Dependency(
mod,
version,
config,
attributes,
transitive = transitive,
exclusions = localExcludes.getOrElse(mod.orgName, Set()) | globalExcludes)
})
case Array(org, "", rawName, version) =>
module(s"$org::$rawName", defaultScalaVersion)
.right
.map(mod => {
Dependency(
mod,
version,
configuration = defaultConfig,
attributes = attributes,
transitive = transitive,
exclusions = localExcludes.getOrElse(mod.orgName, Set()) | globalExcludes)
})
case Array(org, rawName, version, config) =>
module(s"$org:$rawName", defaultScalaVersion)
.right
.map(mod => {
Dependency(
mod,
version,
config,
attributes,
transitive = transitive,
exclusions = localExcludes.getOrElse(mod.orgName, Set()) | globalExcludes)
})
case Array(org, rawName, version) =>
module(s"$org:$rawName", defaultScalaVersion)
.right
.map(mod => {
Dependency(
mod,
version,
configuration = defaultConfig,
attributes = attributes,
transitive = transitive,
exclusions = localExcludes.getOrElse(mod.orgName, Set()) | globalExcludes)
})
case _ =>
Left(s"Malformed dependency: $s")
}
depOrError.right.map(dep => (dep, extraDependencyParams))
}
}
}
/**
* Validates the parsed attributes
*
* Currently only "classifier" and "url" are allowed. If more are
* added, they should be passed in via the second parameter
*
* @param attrs Attributes parsed
* @param dep String representing the dep being parsed
* @param validAttrsKeys Valid attribute keys
* @return A string if there is an error, otherwise None
*/
private def validateAttributes(attrs: Map[String, String],
dep: String,
validAttrsKeys: Set[String]): Option[String] = {
val extraAttributes = attrs.keys.toSet.diff(validAttrsKeys)
if (attrs.size > validAttrsKeys.size || extraAttributes.nonEmpty)
Some(s"The only attributes allowed are: ${validAttrsKeys.mkString(", ")}. ${
if (extraAttributes.nonEmpty) s"The following are invalid: " +
s"${extraAttributes.map(_ + s" in "+ dep).mkString(", ")}"
}")
else None
}
/**
* Parses a sequence of coordinates.
*
* @return Sequence of errors, and sequence of modules / versions
*/
def moduleVersions(l: Seq[String], defaultScalaVersion: String): (Seq[String], Seq[(Module, String)]) =
valuesAndErrors(moduleVersion(_, defaultScalaVersion), l)
/**
* Data holder for additional info that needs to be considered when parsing the module.
*
* @param globalExcludes global excludes that need to be applied to all modules
* @param localExcludes excludes to be applied to specific modules
* @param defaultConfiguration default configuration
*/
case class ModuleRequirements(globalExcludes: Set[(String, String)] = Set(),
localExcludes: Map[String, Set[(String, String)]] = Map(),
defaultConfiguration: String = "default(compile)")
/**
* Parses a sequence of coordinates having an optional configuration.
*
* @return Sequence of errors, and sequence of modules / versions / optional configurations
*/
def moduleVersionConfigs(l: Seq[String],
req: ModuleRequirements,
transitive: Boolean,
defaultScalaVersion: String): (Seq[String], Seq[(Dependency, Map[String, String])]) =
valuesAndErrors(moduleVersionConfig(_, req, transitive, defaultScalaVersion), l)
def repository(s: String): Either[String, Repository] =
if (s == "central")
Right(MavenRepository("https://repo1.maven.org/maven2"))
else if (s.startsWith("sonatype:"))
Right(MavenRepository(s"https://oss.sonatype.org/content/repositories/${s.stripPrefix("sonatype:")}"))
else if (s.startsWith("bintray:"))
Right(MavenRepository(s"https://dl.bintray.com/${s.stripPrefix("bintray:")}"))
else if (s.startsWith("bintray-ivy:"))
Right(IvyRepository.fromPattern(
s"https://dl.bintray.com/${s.stripPrefix("bintray-ivy:").stripSuffix("/")}/" +:
coursier.ivy.Pattern.default
))
else if (s.startsWith("typesafe:ivy-"))
Right(IvyRepository.fromPattern(
s"https://repo.typesafe.com/typesafe/ivy-${s.stripPrefix("typesafe:ivy-")}/" +:
coursier.ivy.Pattern.default
))
else if (s.startsWith("typesafe:"))
Right(MavenRepository(s"https://repo.typesafe.com/typesafe/${s.stripPrefix("typesafe:")}"))
else if (s.startsWith("sbt-plugin:"))
Right(IvyRepository.fromPattern(
s"https://repo.scala-sbt.org/scalasbt/sbt-plugin-${s.stripPrefix("sbt-plugin:")}/" +:
coursier.ivy.Pattern.default
))
else if (s.startsWith("ivy:"))
IvyRepository.parse(s.stripPrefix("ivy:"))
else if (s == "jitpack")
Right(MavenRepository("https://jitpack.io"))
else
Right(MavenRepository(s))
}

View File

@ -1,263 +0,0 @@
package coursier.util
import coursier.core.{ Attributes, Dependency, Module, Orders, Project, Resolution }
object Print {
object Colors {
private val `with`: Colors = Colors(Console.RED, Console.YELLOW, Console.RESET)
private val `without`: Colors = Colors("", "", "")
def get(colors: Boolean): Colors = if (colors) `with` else `without`
}
case class Colors private(red: String, yellow: String, reset: String)
trait Renderable {
def repr(colors: Colors): String
}
trait Elem extends Renderable {
def dep: Dependency
def excluded: Boolean
def reconciledVersion: String
def children: Seq[Elem]
}
trait Parent extends Renderable {
def module: Module
def version: String
def dependsOn: Module
def wantVersion: String
def reconciledVersion: String
def excluding: Boolean
}
def dependency(dep: Dependency): String =
dependency(dep, printExclusions = false)
def dependency(dep: Dependency, printExclusions: Boolean): String = {
def exclusionsStr = dep
.exclusions
.toVector
.sorted
.map {
case (org, name) =>
s"\n exclude($org, $name)"
}
.mkString
s"${dep.module}:${dep.version}:${dep.configuration}" + (if (printExclusions) exclusionsStr else "")
}
def dependenciesUnknownConfigs(deps: Seq[Dependency], projects: Map[(Module, String), Project]): String =
dependenciesUnknownConfigs(deps, projects, printExclusions = false)
def dependenciesUnknownConfigs(
deps: Seq[Dependency],
projects: Map[(Module, String), Project],
printExclusions: Boolean
): String = {
val deps0 = deps.map { dep =>
dep.copy(
version = projects
.get(dep.moduleVersion)
.fold(dep.version)(_.version)
)
}
val minDeps = Orders.minDependencies(
deps0.toSet,
_ => Map.empty
)
val deps1 = minDeps
.groupBy(_.copy(configuration = "", attributes = Attributes("", "")))
.toVector
.map { case (k, l) =>
k.copy(configuration = l.toVector.map(_.configuration).sorted.distinct.mkString(";"))
}
.sortBy { dep =>
(dep.module.organization, dep.module.name, dep.module.toString, dep.version)
}
deps1.map(dependency(_, printExclusions)).mkString("\n")
}
def compatibleVersions(first: String, second: String): Boolean = {
// too loose for now
// e.g. RCs and milestones should not be considered compatible with subsequent non-RC or
// milestone versions - possibly not with each other either
first.split('.').take(2).toSeq == second.split('.').take(2).toSeq
}
def dependencyTree(
roots: Seq[Dependency],
resolution: Resolution,
printExclusions: Boolean,
reverse: Boolean
): String =
dependencyTree(roots, resolution, printExclusions, reverse, colors = true)
def dependencyTree(
roots: Seq[Dependency],
resolution: Resolution,
printExclusions: Boolean,
reverse: Boolean,
colors: Boolean
): String = {
val colorsCase = Colors.get(colors)
if (reverse) {
reverseTree(resolution.dependencies.toSeq, resolution, printExclusions).render(_.repr(colorsCase))
} else {
normalTree(roots, resolution, printExclusions).render(_.repr(colorsCase))
}
}
private def getElemFactory(resolution: Resolution, withExclusions: Boolean): Dependency => Elem = {
final case class ElemImpl(dep: Dependency, excluded: Boolean) extends Elem {
val reconciledVersion: String = resolution.reconciledVersions
.getOrElse(dep.module, dep.version)
def repr(colors: Colors): String =
if (excluded)
resolution.reconciledVersions.get(dep.module) match {
case None =>
s"${colors.yellow}(excluded)${colors.reset} ${dep.module}:${dep.version}"
case Some(version) =>
val versionMsg =
if (version == dep.version)
"this version"
else
s"version $version"
s"${dep.module}:${dep.version} " +
s"${colors.red}(excluded, $versionMsg present anyway)${colors.reset}"
}
else {
val versionStr =
if (reconciledVersion == dep.version)
dep.version
else {
val assumeCompatibleVersions = compatibleVersions(dep.version, reconciledVersion)
(if (assumeCompatibleVersions) colors.yellow else colors.red) +
s"${dep.version} -> $reconciledVersion" +
(if (assumeCompatibleVersions) "" else " (possible incompatibility)") +
colors.reset
}
s"${dep.module}:$versionStr"
}
val children: Seq[Elem] =
if (excluded)
Nil
else {
val dep0 = dep.copy(version = reconciledVersion)
val dependencies = resolution.dependenciesOf(
dep0,
withReconciledVersions = false
).sortBy { trDep =>
(trDep.module.organization, trDep.module.name, trDep.version)
}
def excluded = resolution
.dependenciesOf(
dep0.copy(exclusions = Set.empty),
withReconciledVersions = false
)
.sortBy { trDep =>
(trDep.module.organization, trDep.module.name, trDep.version)
}
.map(_.moduleVersion)
.filterNot(dependencies.map(_.moduleVersion).toSet).map {
case (mod, ver) =>
ElemImpl(
Dependency(mod, ver, "", Set.empty, Attributes("", ""), false, false),
excluded = true
)
}
dependencies.map(ElemImpl(_, excluded = false)) ++
(if (withExclusions) excluded else Nil)
}
}
a => ElemImpl(a, excluded = false)
}
def normalTree(roots: Seq[Dependency], resolution: Resolution, withExclusions: Boolean): Tree[Elem] = {
val elemFactory = getElemFactory(resolution, withExclusions)
Tree[Elem](roots.toVector.map(elemFactory), (elem: Elem) => elem.children)
}
def reverseTree(roots: Seq[Dependency], resolution: Resolution, withExclusions: Boolean): Tree[Parent] = {
val elemFactory = getElemFactory(resolution, withExclusions)
final case class ParentImpl(
module: Module,
version: String,
dependsOn: Module,
wantVersion: String,
reconciledVersion: String,
excluding: Boolean
) extends Parent {
def repr(colors: Colors): String =
if (excluding)
s"${colors.yellow}(excluded by)${colors.reset} $module:$version"
else if (wantVersion == reconciledVersion)
s"$module:$version"
else {
val assumeCompatibleVersions = compatibleVersions(wantVersion, reconciledVersion)
s"$module:$version " +
(if (assumeCompatibleVersions) colors.yellow else colors.red) +
s"$dependsOn:$wantVersion -> $reconciledVersion" +
colors.reset
}
}
val parents: Map[Module, Seq[Parent]] = {
val links = for {
dep <- resolution.dependencies.toVector
elem <- elemFactory(dep).children
}
yield elem.dep.module -> ParentImpl(
dep.module,
dep.version,
elem.dep.module,
elem.dep.version,
elem.reconciledVersion,
elem.excluded
)
links
.groupBy(_._1)
.mapValues(_.map(_._2).distinct.sortBy(par => (par.module.organization, par.module.name)))
.iterator
.toMap
}
def children(par: Parent) =
if (par.excluding)
Nil
else
parents.getOrElse(par.module, Nil)
Tree[Parent](roots
.toVector
.sortBy(dep => (dep.module.organization, dep.module.name, dep.version))
.map(dep => {
ParentImpl(dep.module, dep.version, dep.module, dep.version, dep.version, excluding = false)
}), (par: Parent) => children(par))
}
}

View File

@ -1,53 +0,0 @@
package coursier.util
import scala.collection.mutable.ListBuffer
object Traverse {
implicit class TraverseOps[T](val seq: Seq[T]) {
def eitherTraverse[L, R](f: T => Either[L, R]): Either[L, Seq[R]] =
// Warning: iterates on the whole sequence no matter what, even if the first element is a Left
seq
.foldLeft[Either[L, ListBuffer[R]]](Right(new ListBuffer)) {
case (l @ Left(_), _) => l
case (Right(b), elem) =>
f(elem) match {
case Left(l) => Left(l)
case Right(r) => Right(b += r)
}
}
.right
.map(_.result())
def validationNelTraverse[L, R](f: T => ValidationNel[L, R]): ValidationNel[L, Seq[R]] = {
val e = seq
.foldLeft[Either[ListBuffer[L], ListBuffer[R]]](Right(new ListBuffer)) {
case (l @ Left(b), elem) =>
f(elem).either match {
case Left(l0) => Left(b ++= l0)
case Right(_) => l
}
case (Right(b), elem) =>
f(elem).either match {
case Left(l) => Left(new ListBuffer[L] ++= l)
case Right(r) => Right(b += r)
}
}
.left
.map { b =>
b.result() match {
case Nil => sys.error("Can't happen")
case h :: t => ::(h, t)
}
}
.right
.map(_.result())
ValidationNel(e)
}
}
}

View File

@ -1,43 +0,0 @@
package coursier.util
import scala.collection.mutable.ArrayBuffer
object Tree {
def apply[A](roots: IndexedSeq[A])(children: A => Seq[A], show: A => String): String = {
Tree(roots, children).render(show)
}
}
case class Tree[A](roots: IndexedSeq[A], children: A => Seq[A]) {
def render(show: A => String): String = {
/**
* Recursively go down the resolution for the elems to construct the tree for print out.
*
* @param elems Seq of Elems that have been resolved
* @param ancestors a set of Elems to keep track for cycle detection
* @param prefix prefix for the print out
* @param acc accumulation method on a string
*/
def recursivePrint(elems: Seq[A], ancestors: Set[A], prefix: String, acc: String => Unit): Unit = {
val unseenElems: Seq[A] = elems.filterNot(ancestors.contains)
val unseenElemsLen = unseenElems.length
for ((elem, idx) <- unseenElems.iterator.zipWithIndex) {
val isLast = idx == unseenElemsLen - 1
val tee = if (isLast) "└─ " else "├─ "
acc(prefix + tee + show(elem))
val extraPrefix = if (isLast) " " else "│ "
recursivePrint(children(elem), ancestors + elem, prefix + extraPrefix, acc)
}
}
val b = new ArrayBuffer[String]
recursivePrint(roots, Set(), "", b += _)
b.mkString("\n")
}
}

View File

@ -1,27 +0,0 @@
package coursier.util
// not covariant because scala.:: isn't (and is there a point in being covariant in R but not L?)
final case class ValidationNel[L, R](either: Either[::[L], R]) {
def isSuccess: Boolean =
either.isRight
def map[S](f: R => S): ValidationNel[L, S] =
ValidationNel(either.right.map(f))
}
object ValidationNel {
def fromEither[L, R](either: Either[L, R]): ValidationNel[L, R] =
ValidationNel(either.left.map(l => ::(l, Nil)))
def success[L]: SuccessBuilder[L] =
new SuccessBuilder
def failure[R]: FailureBuilder[R] =
new FailureBuilder
final class SuccessBuilder[L] {
def apply[R](r: R): ValidationNel[L, R] =
ValidationNel(Right(r))
}
final class FailureBuilder[R] {
def apply[L](l: L): ValidationNel[L, R] =
ValidationNel(Left(::(l, Nil)))
}
}

View File

@ -1,22 +0,0 @@
package coursier.util
object WebPage {
def listElements(url: String, page: String, directories: Boolean): Seq[String] =
coursier.core.compatibility.listWebPageRawElements(page)
.collect {
case elem if elem.nonEmpty && elem.endsWith("/") == directories =>
elem
.stripSuffix("/")
.stripPrefix(url)
.stripPrefix(":") // bintray typically prepends these
}
.filter(n => !n.contains("/") && n != "." && n != "..")
def listDirectories(url: String, page: String): Seq[String] =
listElements(url, page, directories = true)
def listFiles(url: String, page: String): Seq[String] =
listElements(url, page, directories = false)
}

View File

@ -1,68 +0,0 @@
package coursier.util
import coursier.core.Versions
object Xml {
/** A representation of an XML node/document, with different implementations on JVM and JS */
trait Node {
def label: String
/** Namespace / key / value */
def attributes: Seq[(String, String, String)]
def children: Seq[Node]
def isText: Boolean
def textContent: String
def isElement: Boolean
def attributesFromNamespace(namespace: String): Seq[(String, String)] =
attributes.collect {
case (`namespace`, k, v) =>
k -> v
}
lazy val attributesMap = attributes.map { case (_, k, v) => k -> v }.toMap
def attribute(name: String): Either[String, String] =
attributesMap.get(name) match {
case None => Left(s"Missing attribute $name")
case Some(value) => Right(value)
}
}
object Node {
val empty: Node =
new Node {
val isText = false
val isElement = false
val children = Nil
val label = ""
val attributes = Nil
val textContent = ""
}
}
object Text {
def unapply(n: Node): Option[String] =
if (n.isText) Some(n.textContent)
else None
}
def text(elem: Node, label: String, description: String): Either[String, String] =
elem.children
.find(_.label == label)
.flatMap(_.children.collectFirst{case Text(t) => t})
.toRight(s"$description not found")
def parseDateTime(s: String): Option[Versions.DateTime] =
if (s.length == 14 && s.forall(_.isDigit))
Some(Versions.DateTime(
s.substring(0, 4).toInt,
s.substring(4, 6).toInt,
s.substring(6, 8).toInt,
s.substring(8, 10).toInt,
s.substring(10, 12).toInt,
s.substring(12, 14).toInt
))
else
None
}

BIN
coursier

Binary file not shown.

View File

@ -1,29 +0,0 @@
@REM https://github.com/xerial/sbt-pack/blob/master/src/main/templates/launch-bat.mustache
@REM would be worth getting more inspiration from
@echo off
SET ERROR_CODE=0
SET LAUNCHER_PATH=%~dp0/coursier
IF NOT EXIST %LAUNCHER_PATH% (
bitsadmin /transfer "DownloadCoursierLauncher" https://github.com/coursier/coursier/raw/master/coursier %LAUNCHER_PATH%
)
SET CMD_LINE_ARGS=%*
java -jar %LAUNCHER_PATH% %CMD_LINE_ARGS%
IF ERRORLEVEL 1 GOTO error
GOTO end
:error
SET ERROR_CODE=1
:end
SET LAUNCHER_PATH=
SET CMD_LINE_ARGS=
EXIT /B %ERROR_CODE%

@ -1 +0,0 @@
Subproject commit d302b1e93963c81ed511e072a52e95251b5d078b

Some files were not shown because too many files have changed in this diff Show More