mirror of https://github.com/sbt/sbt.git
Merge pull request #748 from coursier/topic/update-cli
Switch the cli module to scala 2.12 and case-app 2.0.x
This commit is contained in:
commit
335873a292
|
|
@ -10,18 +10,18 @@ script:
|
|||
# - bash <(curl -s https://codecov.io/bash)
|
||||
matrix:
|
||||
include:
|
||||
- env: SCALA_VERSION=2.12.4 PUBLISH=1
|
||||
- env: SCALA_VERSION=2.12.4 NATIVE=1 PUBLISH=1
|
||||
os: linux
|
||||
jdk: oraclejdk8
|
||||
sudo: required
|
||||
before_install:
|
||||
- curl https://raw.githubusercontent.com/scala-native/scala-native/v0.3.6/bin/travis_setup.sh | bash -x
|
||||
services:
|
||||
- docker
|
||||
- env: SCALA_VERSION=2.11.11 PUBLISH=1
|
||||
os: linux
|
||||
jdk: oraclejdk8
|
||||
sudo: required
|
||||
before_install:
|
||||
- curl https://raw.githubusercontent.com/scala-native/scala-native/e9df70ce671c58f34eab530a44875d6f818ba8fe/bin/travis_setup.sh | bash -x
|
||||
services:
|
||||
- docker
|
||||
- env: SCALA_VERSION=2.10.6 PUBLISH=1
|
||||
|
|
@ -57,7 +57,7 @@ matrix:
|
|||
jdk: oraclejdk8
|
||||
script:
|
||||
# Sanity check for Pants build path.
|
||||
- ./pants run cli/src/main/scala-2.11:coursier-cli -- fetch --help
|
||||
- ./pants run cli/src/main/scala-2.12:coursier-cli -- fetch --help
|
||||
env:
|
||||
global:
|
||||
- secure: miHFMwVRD/yjOLy794nOwc2lJTMyL5O0MXABT9ksg5ejQy1FrFVc2YH86Agp80W02/lGLGl0qWCiK1TBcs9q4Apt01nkD1a/0/iuTRm//bdhnu8BbRxFITf+2cyYJVytKPsF585aHldMv1rwZs3TDaTzEEecAEki5r50yyTVo7ycG0lVj9aVWXerKRMIT54Wb8M6nqbyRB1jGWT0ETNU13vOvQznPTUXQG5hsiKnGYRf8T3umOMdOHpV0rvdwYqAIMsikaAFcYCS5P/pLXMtmRHICH9KUG8TV/ST07p1BXtbBg9y1Q+lpnXotXh4ZNoWOp8B6v7fxJ/WlLYTDROWCiHJ4s2V4Di00db/nW4OWrEEBlrh7vJ/npZqyt9V9YeNv6alxi+DCESwusgvD4Cx5c3zh+2X6RB6BYwWHlFnd80rmsLe4R4fFUcc8E/ZR9vUFjP1CsQKqfJ5yfKR6V+n8jK8FjLpoaU9PHPo2H4V3FZM/fCLcxhE37vfaYI7/O7MqE/cdGpZIuz7g3c4toWCgNZJDn8iJCPmrgcbW5zbfDxvWU2K816ycgnUwSQ5dufrJpAbLNrjR1O8EPRkMDDp9bB7/4RVQvfDfP9GGoiHPHHgxGzY0Lf5bm+Bj1mRfB5/SXHd3IjhUCD9q7eD1/ANifEYALC5BJ4TB8RhQUPU8uM=
|
||||
|
|
|
|||
|
|
@ -4,8 +4,8 @@ jar_library(
|
|||
name = "scala-xml",
|
||||
jars = [
|
||||
scala_jar(
|
||||
name = "scala-xml",
|
||||
org = "org.scala-lang.modules",
|
||||
name = "scala-xml",
|
||||
rev = "1.0.6",
|
||||
),
|
||||
],
|
||||
|
|
@ -15,8 +15,8 @@ jar_library(
|
|||
name = "quasiQuotes",
|
||||
jars = [
|
||||
scala_jar(
|
||||
name = "quasiquotes",
|
||||
org = "org.scalamacros",
|
||||
name = "quasiquotes",
|
||||
rev = "2.1.0",
|
||||
),
|
||||
],
|
||||
|
|
@ -26,8 +26,8 @@ jar_library(
|
|||
name = "fastParse",
|
||||
jars = [
|
||||
scala_jar(
|
||||
name = "fastparse",
|
||||
org = "com.lihaoyi",
|
||||
name = "fastparse",
|
||||
rev = "1.0.0",
|
||||
),
|
||||
],
|
||||
|
|
@ -37,8 +37,8 @@ jar_library(
|
|||
name = "scalaz-concurrent",
|
||||
jars = [
|
||||
scala_jar(
|
||||
name = "scalaz-concurrent",
|
||||
org = "org.scalaz",
|
||||
name = "scalaz-concurrent",
|
||||
rev = "7.2.16",
|
||||
),
|
||||
],
|
||||
|
|
@ -48,8 +48,8 @@ jar_library(
|
|||
name = "scalaz-core",
|
||||
jars = [
|
||||
scala_jar(
|
||||
name = "scalaz-core",
|
||||
org = "org.scalaz",
|
||||
name = "scalaz-core",
|
||||
rev = "7.2.16",
|
||||
),
|
||||
],
|
||||
|
|
@ -58,8 +58,8 @@ jar_library(
|
|||
jar_library(
|
||||
name = "jsoup",
|
||||
jars = [jar(
|
||||
name = "jsoup",
|
||||
org = "org.jsoup",
|
||||
name = "jsoup",
|
||||
rev = "1.10.3",
|
||||
)],
|
||||
)
|
||||
|
|
@ -70,8 +70,8 @@ jar_library(
|
|||
name = "cli",
|
||||
jars = [
|
||||
scala_jar(
|
||||
name = "scalajs-cli",
|
||||
org = "org.scala-js",
|
||||
name = "scalajs-cli",
|
||||
rev = SCALAJS_REV,
|
||||
),
|
||||
],
|
||||
|
|
@ -81,8 +81,8 @@ jar_library(
|
|||
name = "compiler",
|
||||
jars = [
|
||||
jar(
|
||||
name = "scalajs-compiler_2.11.8",
|
||||
org = "org.scala-js",
|
||||
name = "scalajs-compiler_2.11.8",
|
||||
rev = SCALAJS_REV,
|
||||
),
|
||||
],
|
||||
|
|
@ -92,8 +92,8 @@ jar_library(
|
|||
name = "library",
|
||||
jars = [
|
||||
scala_jar(
|
||||
name = "scalajs-library",
|
||||
org = "org.scala-js",
|
||||
name = "scalajs-library",
|
||||
rev = SCALAJS_REV,
|
||||
),
|
||||
],
|
||||
|
|
@ -103,8 +103,8 @@ jar_library(
|
|||
name = "dom",
|
||||
jars = [
|
||||
scala_jar(
|
||||
name = "scalajs-dom_sjs0.6",
|
||||
org = "org.scala-js",
|
||||
name = "scalajs-dom_sjs0.6",
|
||||
rev = "0.9.1",
|
||||
),
|
||||
],
|
||||
|
|
@ -114,9 +114,9 @@ jar_library(
|
|||
name = "caseapp",
|
||||
jars = [
|
||||
scala_jar(
|
||||
name = "case-app",
|
||||
org = "com.github.alexarchambault",
|
||||
rev = "1.1.3",
|
||||
name = "case-app",
|
||||
rev = "2.0.0-M3",
|
||||
),
|
||||
],
|
||||
)
|
||||
|
|
@ -125,9 +125,9 @@ jar_library(
|
|||
name = "argonaut-shapeless",
|
||||
jars = [
|
||||
scala_jar(
|
||||
name = "argonaut-shapeless_6.2",
|
||||
org = "com.github.alexarchambault",
|
||||
rev = "1.2.0-M6",
|
||||
name = "argonaut-shapeless_6.2",
|
||||
rev = "1.2.0-M8",
|
||||
),
|
||||
],
|
||||
)
|
||||
|
|
@ -136,8 +136,8 @@ jar_library(
|
|||
name = "soc",
|
||||
jars = [
|
||||
jar(
|
||||
name = "directories",
|
||||
org = "io.github.soc",
|
||||
name = "directories",
|
||||
rev = "5",
|
||||
),
|
||||
],
|
||||
|
|
@ -146,15 +146,20 @@ jar_library(
|
|||
jar_library(
|
||||
name = "scala-native",
|
||||
jars = [
|
||||
jar(
|
||||
name = "nativelib_native0.3_2.11",
|
||||
scala_jar(
|
||||
org = "org.scala-native",
|
||||
rev = "0.3.3",
|
||||
name = "nir",
|
||||
rev = "0.3.6",
|
||||
),
|
||||
jar(
|
||||
name = "tools_2.11",
|
||||
org = "io.get-coursier.scala-native",
|
||||
rev = "0.3.0-coursier-1",
|
||||
scala_jar(
|
||||
org = "org.scala-native",
|
||||
name = "tools",
|
||||
rev = "0.3.6",
|
||||
),
|
||||
scala_jar(
|
||||
org = "org.scala-native",
|
||||
name = "util",
|
||||
rev = "0.3.6",
|
||||
),
|
||||
],
|
||||
)
|
||||
|
|
@ -163,8 +168,8 @@ jar_library(
|
|||
name = "utest",
|
||||
jars = [
|
||||
scala_jar(
|
||||
name = "utest",
|
||||
org = "com.lihaoyi",
|
||||
name = "utest",
|
||||
rev = "0.5.4",
|
||||
),
|
||||
],
|
||||
|
|
@ -174,8 +179,8 @@ jar_library(
|
|||
name = "async",
|
||||
jars = [
|
||||
scala_jar(
|
||||
name = "scala-async",
|
||||
org = "org.scala-lang.modules",
|
||||
name = "scala-async",
|
||||
rev = "0.9.7",
|
||||
),
|
||||
],
|
||||
|
|
|
|||
|
|
@ -0,0 +1,21 @@
|
|||
jar_library(
|
||||
name = "scala-library",
|
||||
jars = [
|
||||
jar(
|
||||
org = "org.scala-lang",
|
||||
name = "scala-library",
|
||||
rev = "2.12.4",
|
||||
),
|
||||
],
|
||||
)
|
||||
|
||||
jar_library(
|
||||
name = "scalac",
|
||||
jars = [
|
||||
jar(
|
||||
org = "org.scala-lang",
|
||||
name = "scala-compiler",
|
||||
rev = "2.12.4",
|
||||
),
|
||||
],
|
||||
)
|
||||
|
|
@ -15,10 +15,10 @@ install:
|
|||
- cmd: SET SBT_OPTS=-XX:MaxPermSize=2g -Xmx4g
|
||||
- git submodule update --init --recursive
|
||||
build_script:
|
||||
- sbt ++2.11.11 clean compile coreJVM/publishLocal cli/publishLocal
|
||||
- sbt ++2.11.12 clean compile coreJVM/publishLocal
|
||||
- sbt ++2.10.6 clean compile
|
||||
- sbt ++2.12.4 coreJVM/publishLocal cache/publishLocal extra/publishLocal sbt-shared/publishLocal # to make the scripted sbt 1.0 tests happy
|
||||
- sbt ++2.10.6 coreJVM/publishLocal cache/publishLocal extra/publishLocal sbt-shared/publishLocal # to make the scripted sbt 0.13 tests happy
|
||||
- sbt ++2.12.4 coreJVM/publishLocal cache/publishLocal cli/publishLocal extra/publishLocal sbt-shared/publishLocal
|
||||
- sbt ++2.10.6 coreJVM/publishLocal cache/publishLocal extra/publishLocal sbt-shared/publishLocal
|
||||
test_script:
|
||||
- ps: Start-Job -filepath .\scripts\start-it-auth-server.ps1 -ArgumentList $pwd
|
||||
- ps: Start-Sleep -s 15 # wait for the first server to have downloaded its dependencies
|
||||
|
|
|
|||
23
build.sbt
23
build.sbt
|
|
@ -3,8 +3,6 @@ import Aliases._
|
|||
import Settings._
|
||||
import Publish._
|
||||
|
||||
parallelExecution.in(Global) := false
|
||||
|
||||
lazy val core = crossProject
|
||||
.disablePlugins(ScriptedPlugin)
|
||||
.jvmConfigure(_.enablePlugins(ShadingPlugin))
|
||||
|
|
@ -126,12 +124,13 @@ lazy val extra = project
|
|||
coursierPrefix,
|
||||
shading,
|
||||
libs ++= {
|
||||
if (scalaBinaryVersion.value == "2.11")
|
||||
if (scalaBinaryVersion.value == "2.12")
|
||||
Seq(
|
||||
Deps.scalaNativeTools % "shaded",
|
||||
// brought by only tools, so should be automaticaly shaded,
|
||||
// but issues in ShadingPlugin (with things published locally?)
|
||||
// seem to require explicit shading...
|
||||
// Still applies?
|
||||
// brought by only tools, so should be automatically shaded,
|
||||
// but issues in ShadingPlugin (with things published locally?)
|
||||
// seem to require explicit shading...
|
||||
Deps.scalaNativeNir % "shaded",
|
||||
Deps.scalaNativeUtil % "shaded",
|
||||
Deps.fastParse % "shaded"
|
||||
|
|
@ -156,11 +155,11 @@ lazy val cli = project
|
|||
.enablePlugins(PackPlugin, SbtProguard)
|
||||
.settings(
|
||||
shared,
|
||||
dontPublishIn("2.10", "2.12"),
|
||||
dontPublishIn("2.10", "2.11"),
|
||||
coursierPrefix,
|
||||
unmanagedResources.in(Test) += packageBin.in(bootstrap).in(Compile).value,
|
||||
libs ++= {
|
||||
if (scalaBinaryVersion.value == "2.11")
|
||||
if (scalaBinaryVersion.value == "2.12")
|
||||
Seq(
|
||||
Deps.caseApp,
|
||||
Deps.argonautShapeless,
|
||||
|
|
@ -170,6 +169,12 @@ lazy val cli = project
|
|||
else
|
||||
Seq()
|
||||
},
|
||||
mainClass.in(Compile) := {
|
||||
if (scalaBinaryVersion.value == "2.12")
|
||||
Some("coursier.cli.Coursier")
|
||||
else
|
||||
None
|
||||
},
|
||||
addBootstrapJarAsResource,
|
||||
proguardedCli
|
||||
)
|
||||
|
|
@ -451,7 +456,7 @@ lazy val proguardedCli = Seq(
|
|||
javaOptions.in(Proguard, proguard) := Seq("-Xmx3172M"),
|
||||
artifactPath.in(Proguard) := proguardDirectory.in(Proguard).value / "coursier-standalone.jar",
|
||||
artifacts ++= {
|
||||
if (scalaBinaryVersion.value == "2.11")
|
||||
if (scalaBinaryVersion.value == "2.12")
|
||||
Seq(proguardedArtifact.value)
|
||||
else
|
||||
Nil
|
||||
|
|
|
|||
|
|
@ -1,246 +0,0 @@
|
|||
package coursier
|
||||
package cli
|
||||
|
||||
import java.io.{ByteArrayInputStream, ByteArrayOutputStream, File, FileInputStream, IOException}
|
||||
import java.nio.file.Files
|
||||
import java.nio.file.attribute.PosixFilePermission
|
||||
import java.util.Properties
|
||||
import java.util.zip.{ZipEntry, ZipInputStream, ZipOutputStream}
|
||||
|
||||
import caseapp._
|
||||
import coursier.cli.util.Zip
|
||||
import coursier.internal.FileUtil
|
||||
|
||||
case class Bootstrap(
|
||||
@Recurse
|
||||
artifactOptions: ArtifactOptions,
|
||||
@Recurse
|
||||
options: BootstrapOptions
|
||||
) extends App {
|
||||
|
||||
import scala.collection.JavaConverters._
|
||||
|
||||
val helper = new Helper(
|
||||
options.common,
|
||||
remainingArgs,
|
||||
isolated = options.isolated,
|
||||
warnBaseLoaderNotFound = false
|
||||
)
|
||||
|
||||
val output0 = new File(options.output)
|
||||
if (!options.force && output0.exists()) {
|
||||
Console.err.println(s"Error: ${options.output} already exists, use -f option to force erasing it.")
|
||||
sys.exit(1)
|
||||
}
|
||||
|
||||
val mainClass =
|
||||
if (options.mainClass.isEmpty)
|
||||
helper.retainedMainClass
|
||||
else
|
||||
options.mainClass
|
||||
|
||||
if (options.native) {
|
||||
|
||||
val files = helper.fetch(
|
||||
sources = false,
|
||||
javadoc = false,
|
||||
artifactTypes = artifactOptions.artifactTypes(sources = false, javadoc = false)
|
||||
)
|
||||
|
||||
val log: String => Unit =
|
||||
if (options.common.verbosityLevel >= 0)
|
||||
s => Console.err.println(s)
|
||||
else
|
||||
_ => ()
|
||||
|
||||
val tmpDir = new File(options.target)
|
||||
|
||||
try {
|
||||
coursier.extra.Native.create(
|
||||
mainClass,
|
||||
files,
|
||||
output0,
|
||||
tmpDir,
|
||||
log,
|
||||
verbosity = options.common.verbosityLevel
|
||||
)
|
||||
} finally {
|
||||
if (!options.keepTarget)
|
||||
coursier.extra.Native.deleteRecursive(tmpDir)
|
||||
}
|
||||
} else {
|
||||
|
||||
val (validProperties, wrongProperties) = options.property.partition(_.contains("="))
|
||||
if (wrongProperties.nonEmpty) {
|
||||
Console.err.println(s"Wrong -P / --property option(s):\n${wrongProperties.mkString("\n")}")
|
||||
sys.exit(255)
|
||||
}
|
||||
|
||||
val properties0 = validProperties.map { s =>
|
||||
val idx = s.indexOf('=')
|
||||
assert(idx >= 0)
|
||||
(s.take(idx), s.drop(idx + 1))
|
||||
}
|
||||
|
||||
val bootstrapJar =
|
||||
Option(Thread.currentThread().getContextClassLoader.getResourceAsStream("bootstrap.jar")) match {
|
||||
case Some(is) => Cache.readFullySync(is)
|
||||
case None =>
|
||||
Console.err.println(s"Error: bootstrap JAR not found")
|
||||
sys.exit(1)
|
||||
}
|
||||
|
||||
val isolatedDeps = options.isolated.isolatedDeps(options.common.scalaVersion)
|
||||
|
||||
val (_, isolatedArtifactFiles) =
|
||||
options.isolated.targets.foldLeft((Vector.empty[String], Map.empty[String, (Seq[String], Seq[File])])) {
|
||||
case ((done, acc), target) =>
|
||||
val subRes = helper.res.subset(isolatedDeps.getOrElse(target, Nil).toSet)
|
||||
|
||||
val (done0, subUrls, subFiles) =
|
||||
if (options.standalone) {
|
||||
val subFiles0 = helper.fetch(
|
||||
sources = false,
|
||||
javadoc = false,
|
||||
artifactTypes = artifactOptions.artifactTypes(sources = false, javadoc = false),
|
||||
subset = isolatedDeps.getOrElse(target, Seq.empty).toSet
|
||||
)
|
||||
|
||||
(done, Nil, subFiles0)
|
||||
} else {
|
||||
val subArtifacts0 = subRes.dependencyArtifacts.map(_._2)
|
||||
val artifactTypes = artifactOptions.artifactTypes(sources = false, javadoc = false)
|
||||
val subArtifacts =
|
||||
if (artifactTypes("*"))
|
||||
subArtifacts0
|
||||
else
|
||||
subArtifacts0.filter(a => artifactTypes(a.`type`))
|
||||
val filteredSubArtifacts = subArtifacts.map(_.url).diff(done)
|
||||
(done ++ filteredSubArtifacts, filteredSubArtifacts, Nil)
|
||||
}
|
||||
|
||||
val updatedAcc = acc + (target -> (subUrls, subFiles))
|
||||
|
||||
(done0, updatedAcc)
|
||||
}
|
||||
|
||||
val (urls, files) =
|
||||
helper.fetchMap(
|
||||
sources = false,
|
||||
javadoc = false,
|
||||
artifactTypes = artifactOptions.artifactTypes(sources = false, javadoc = false)
|
||||
).toList.foldLeft((List.empty[String], List.empty[File])){
|
||||
case ((urls, files), (url, file)) =>
|
||||
if (options.standalone) (urls, file :: files)
|
||||
else if (url.startsWith("file:/")) (urls, file :: files)
|
||||
else (url :: urls, files)
|
||||
}
|
||||
|
||||
val isolatedUrls = isolatedArtifactFiles.map { case (k, (v, _)) => k -> v }
|
||||
val isolatedFiles = isolatedArtifactFiles.map { case (k, (_, v)) => k -> v }
|
||||
|
||||
val buffer = new ByteArrayOutputStream
|
||||
|
||||
val bootstrapZip = new ZipInputStream(new ByteArrayInputStream(bootstrapJar))
|
||||
val outputZip = new ZipOutputStream(buffer)
|
||||
|
||||
for ((ent, data) <- Zip.zipEntries(bootstrapZip)) {
|
||||
outputZip.putNextEntry(ent)
|
||||
outputZip.write(data)
|
||||
outputZip.closeEntry()
|
||||
}
|
||||
|
||||
|
||||
val time = System.currentTimeMillis()
|
||||
|
||||
def putStringEntry(name: String, content: String): Unit = {
|
||||
val entry = new ZipEntry(name)
|
||||
entry.setTime(time)
|
||||
|
||||
outputZip.putNextEntry(entry)
|
||||
outputZip.write(content.getBytes("UTF-8"))
|
||||
outputZip.closeEntry()
|
||||
}
|
||||
|
||||
def putEntryFromFile(name: String, f: File): Unit = {
|
||||
val entry = new ZipEntry(name)
|
||||
entry.setTime(f.lastModified())
|
||||
|
||||
outputZip.putNextEntry(entry)
|
||||
outputZip.write(Cache.readFullySync(new FileInputStream(f)))
|
||||
outputZip.closeEntry()
|
||||
}
|
||||
|
||||
putStringEntry("bootstrap-jar-urls", urls.mkString("\n"))
|
||||
|
||||
if (options.isolated.anyIsolatedDep) {
|
||||
putStringEntry("bootstrap-isolation-ids", options.isolated.targets.mkString("\n"))
|
||||
|
||||
for (target <- options.isolated.targets) {
|
||||
val urls = isolatedUrls.getOrElse(target, Nil)
|
||||
val files = isolatedFiles.getOrElse(target, Nil)
|
||||
putStringEntry(s"bootstrap-isolation-$target-jar-urls", urls.mkString("\n"))
|
||||
putStringEntry(s"bootstrap-isolation-$target-jar-resources", files.map(pathFor).mkString("\n"))
|
||||
}
|
||||
}
|
||||
|
||||
def pathFor(f: File) = s"jars/${f.getName}"
|
||||
|
||||
for (f <- files)
|
||||
putEntryFromFile(pathFor(f), f)
|
||||
|
||||
putStringEntry("bootstrap-jar-resources", files.map(pathFor).mkString("\n"))
|
||||
|
||||
val propsEntry = new ZipEntry("bootstrap.properties")
|
||||
propsEntry.setTime(time)
|
||||
|
||||
val properties = new Properties
|
||||
properties.setProperty("bootstrap.mainClass", mainClass)
|
||||
|
||||
outputZip.putNextEntry(propsEntry)
|
||||
properties.store(outputZip, "")
|
||||
outputZip.closeEntry()
|
||||
|
||||
outputZip.close()
|
||||
|
||||
// escaping of javaOpt possibly a bit loose :-|
|
||||
val shellPreamble = Seq(
|
||||
"#!/usr/bin/env sh",
|
||||
"exec java -jar " + options.javaOpt.map(s => "'" + s.replace("'", "\\'") + "'").mkString(" ") + " \"$0\" \"$@\""
|
||||
).mkString("", "\n", "\n")
|
||||
|
||||
try FileUtil.write(output0, shellPreamble.getBytes("UTF-8") ++ buffer.toByteArray)
|
||||
catch { case e: IOException =>
|
||||
Console.err.println(s"Error while writing $output0${Option(e.getMessage).fold("")(" (" + _ + ")")}")
|
||||
sys.exit(1)
|
||||
}
|
||||
|
||||
try {
|
||||
val perms = Files.getPosixFilePermissions(output0.toPath).asScala.toSet
|
||||
|
||||
var newPerms = perms
|
||||
if (perms(PosixFilePermission.OWNER_READ))
|
||||
newPerms += PosixFilePermission.OWNER_EXECUTE
|
||||
if (perms(PosixFilePermission.GROUP_READ))
|
||||
newPerms += PosixFilePermission.GROUP_EXECUTE
|
||||
if (perms(PosixFilePermission.OTHERS_READ))
|
||||
newPerms += PosixFilePermission.OTHERS_EXECUTE
|
||||
|
||||
if (newPerms != perms)
|
||||
Files.setPosixFilePermissions(
|
||||
output0.toPath,
|
||||
newPerms.asJava
|
||||
)
|
||||
} catch {
|
||||
case e: UnsupportedOperationException =>
|
||||
// Ignored
|
||||
case e: IOException =>
|
||||
Console.err.println(
|
||||
s"Error while making $output0 executable" +
|
||||
Option(e.getMessage).fold("")(" (" + _ + ")")
|
||||
)
|
||||
sys.exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -1,128 +1,6 @@
|
|||
package coursier
|
||||
package cli
|
||||
package coursier.cli
|
||||
|
||||
import caseapp._
|
||||
import caseapp.core.{ ArgsApp, CommandMessages, CommandsMessages }
|
||||
import caseapp.core.util.pascalCaseSplit
|
||||
import caseapp.util.AnnotationOption
|
||||
|
||||
import shapeless._
|
||||
import shapeless.labelled.FieldType
|
||||
import shapeless.union.Union
|
||||
|
||||
// Temporary, see comment in Coursier below
|
||||
final case class CoursierCommandHelper(
|
||||
command: CoursierCommandHelper.U
|
||||
) extends ArgsApp {
|
||||
def setRemainingArgs(remainingArgs: Seq[String], extraArgs: Seq[String]): Unit =
|
||||
command.unify.setRemainingArgs(remainingArgs, extraArgs)
|
||||
def remainingArgs: Seq[String] =
|
||||
command.unify.remainingArgs
|
||||
def apply(): Unit =
|
||||
command.unify.apply()
|
||||
}
|
||||
|
||||
object CoursierCommandHelper {
|
||||
type U = Union.`'bootstrap -> Bootstrap, 'fetch -> Fetch, 'launch -> Launch, 'resolve -> Resolve, 'sparksubmit -> SparkSubmit`.T
|
||||
|
||||
// Partially deriving these ones manually, to circumvent more-or-less random failures during auto derivation
|
||||
// Only running into those with the new custom sbt launcher though :-|
|
||||
|
||||
implicit def commandParser: CommandParser[CoursierCommandHelper] =
|
||||
CommandParser.ccons(
|
||||
Witness('bootstrap),
|
||||
AnnotationOption[CommandName, Bootstrap],
|
||||
Parser[Bootstrap],
|
||||
CommandParser.ccons(
|
||||
Witness('fetch),
|
||||
AnnotationOption[CommandName, Fetch],
|
||||
Parser[Fetch],
|
||||
CommandParser.ccons(
|
||||
Witness('launch),
|
||||
AnnotationOption[CommandName, Launch],
|
||||
Parser[Launch],
|
||||
CommandParser.ccons(
|
||||
Witness('resolve),
|
||||
AnnotationOption[CommandName, Resolve],
|
||||
Parser[Resolve],
|
||||
CommandParser.ccons(
|
||||
Witness('sparksubmit),
|
||||
AnnotationOption[CommandName, SparkSubmit],
|
||||
Parser[SparkSubmit],
|
||||
CommandParser.cnil
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
).map(CoursierCommandHelper(_))
|
||||
|
||||
|
||||
// Cut-n-pasted from caseapp.core.CommandsMessages.ccons, fixing the type of argsName
|
||||
private def commandsMessagesCCons[K <: Symbol, H, T <: Coproduct]
|
||||
(implicit
|
||||
key: Witness.Aux[K],
|
||||
commandName: AnnotationOption[CommandName, H],
|
||||
parser: Strict[Parser[H]],
|
||||
argsName: AnnotationOption[ArgsName, H],
|
||||
tail: CommandsMessages[T]
|
||||
): CommandsMessages[FieldType[K, H] :+: T] = {
|
||||
// FIXME Duplicated in CommandParser.ccons
|
||||
val name = commandName().map(_.commandName).getOrElse {
|
||||
pascalCaseSplit(key.value.name.toList.takeWhile(_ != '$'))
|
||||
.map(_.toLowerCase)
|
||||
.mkString("-")
|
||||
}
|
||||
|
||||
CommandsMessages((name -> CommandMessages(
|
||||
parser.value.args,
|
||||
argsName().map(_.argsName)
|
||||
)) +: tail.messages)
|
||||
}
|
||||
|
||||
|
||||
implicit def commandsMessages: CommandsMessages[CoursierCommandHelper] =
|
||||
CommandsMessages(
|
||||
commandsMessagesCCons(
|
||||
Witness('bootstrap),
|
||||
AnnotationOption[CommandName, Bootstrap],
|
||||
Parser[Bootstrap],
|
||||
AnnotationOption[ArgsName, Bootstrap],
|
||||
commandsMessagesCCons(
|
||||
Witness('fetch),
|
||||
AnnotationOption[CommandName, Fetch],
|
||||
Parser[Fetch],
|
||||
AnnotationOption[ArgsName, Fetch],
|
||||
commandsMessagesCCons(
|
||||
Witness('launch),
|
||||
AnnotationOption[CommandName, Launch],
|
||||
Parser[Launch],
|
||||
AnnotationOption[ArgsName, Launch],
|
||||
commandsMessagesCCons(
|
||||
Witness('resolve),
|
||||
AnnotationOption[CommandName, Resolve],
|
||||
Parser[Resolve],
|
||||
AnnotationOption[ArgsName, Resolve],
|
||||
commandsMessagesCCons(
|
||||
Witness('sparksubmit),
|
||||
AnnotationOption[CommandName, SparkSubmit],
|
||||
Parser[SparkSubmit],
|
||||
AnnotationOption[ArgsName, SparkSubmit],
|
||||
CommandsMessages.cnil
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
).messages
|
||||
)
|
||||
}
|
||||
|
||||
object Coursier extends CommandAppOf[
|
||||
// Temporary using CoursierCommandHelper instead of the union type, until case-app
|
||||
// supports the latter directly.
|
||||
// Union.`'bootstrap -> Bootstrap, 'fetch -> Fetch, 'launch -> Launch, 'resolve -> Resolve`.T
|
||||
CoursierCommandHelper
|
||||
] {
|
||||
override def appName = "Coursier"
|
||||
override def progName = "coursier"
|
||||
override def appVersion = coursier.util.Properties.version
|
||||
// dummy app to keep proguard quiet in 2.11
|
||||
object Coursier {
|
||||
def main(args: Array[String]): Unit = {}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,41 +0,0 @@
|
|||
package coursier
|
||||
package cli
|
||||
|
||||
import java.io.File
|
||||
|
||||
import caseapp._
|
||||
|
||||
import scala.language.reflectiveCalls
|
||||
|
||||
case class Fetch(
|
||||
@Recurse
|
||||
options: FetchOptions
|
||||
) extends App {
|
||||
|
||||
val helper = new Helper(options.common, remainingArgs, ignoreErrors = options.artifactOptions.force)
|
||||
|
||||
val files0 = helper.fetch(
|
||||
sources = options.sources,
|
||||
javadoc = options.javadoc,
|
||||
artifactTypes = options.artifactOptions.artifactTypes(
|
||||
options.sources || options.common.classifier0("sources"),
|
||||
options.javadoc || options.common.classifier0("javadoc")
|
||||
)
|
||||
)
|
||||
|
||||
// Some progress lines seem to be scraped without this.
|
||||
Console.out.flush()
|
||||
|
||||
val out =
|
||||
if (options.classpath)
|
||||
files0
|
||||
.map(_.toString)
|
||||
.mkString(File.pathSeparator)
|
||||
else
|
||||
files0
|
||||
.map(_.toString)
|
||||
.mkString("\n")
|
||||
|
||||
println(out)
|
||||
|
||||
}
|
||||
|
|
@ -1,108 +0,0 @@
|
|||
package coursier
|
||||
package cli
|
||||
|
||||
import java.io.File
|
||||
import java.net.{ URL, URLClassLoader }
|
||||
|
||||
import caseapp._
|
||||
|
||||
object Launch {
|
||||
|
||||
def run(
|
||||
loader: ClassLoader,
|
||||
mainClass: String,
|
||||
args: Seq[String],
|
||||
verbosity: Int,
|
||||
beforeMain: => Unit = ()
|
||||
): Unit = {
|
||||
|
||||
val cls =
|
||||
try loader.loadClass(mainClass)
|
||||
catch { case e: ClassNotFoundException =>
|
||||
Helper.errPrintln(s"Error: class $mainClass not found")
|
||||
sys.exit(255)
|
||||
}
|
||||
val method =
|
||||
try cls.getMethod("main", classOf[Array[String]])
|
||||
catch { case e: NoSuchMethodException =>
|
||||
Helper.errPrintln(s"Error: method main not found in $mainClass")
|
||||
sys.exit(255)
|
||||
}
|
||||
method.setAccessible(true)
|
||||
|
||||
if (verbosity >= 2)
|
||||
Helper.errPrintln(s"Launching $mainClass ${args.mkString(" ")}")
|
||||
else if (verbosity == 1)
|
||||
Helper.errPrintln(s"Launching")
|
||||
|
||||
beforeMain
|
||||
|
||||
Thread.currentThread().setContextClassLoader(loader)
|
||||
try method.invoke(null, args.toArray)
|
||||
catch {
|
||||
case e: java.lang.reflect.InvocationTargetException =>
|
||||
throw Option(e.getCause).getOrElse(e)
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
class IsolatedClassLoader(
|
||||
urls: Array[URL],
|
||||
parent: ClassLoader,
|
||||
isolationTargets: Array[String]
|
||||
) extends URLClassLoader(urls, parent) {
|
||||
|
||||
/**
|
||||
* Applications wanting to access an isolated `ClassLoader` should inspect the hierarchy of
|
||||
* loaders, and look into each of them for this method, by reflection. Then they should
|
||||
* call it (still by reflection), and look for an agreed in advance target in it. If it is found,
|
||||
* then the corresponding `ClassLoader` is the one with isolated dependencies.
|
||||
*/
|
||||
def getIsolationTargets: Array[String] = isolationTargets
|
||||
|
||||
}
|
||||
|
||||
// should be in case-app somehow
|
||||
trait ExtraArgsApp extends caseapp.core.DefaultArgsApp {
|
||||
private var remainingArgs1 = Seq.empty[String]
|
||||
private var extraArgs1 = Seq.empty[String]
|
||||
|
||||
override def setRemainingArgs(remainingArgs: Seq[String], extraArgs: Seq[String]): Unit = {
|
||||
remainingArgs1 = remainingArgs
|
||||
extraArgs1 = extraArgs
|
||||
}
|
||||
|
||||
override def remainingArgs: Seq[String] =
|
||||
remainingArgs1
|
||||
def extraArgs: Seq[String] =
|
||||
extraArgs1
|
||||
}
|
||||
|
||||
final case class Launch(
|
||||
@Recurse
|
||||
options: LaunchOptions
|
||||
) extends App with ExtraArgsApp {
|
||||
|
||||
val userArgs = extraArgs
|
||||
|
||||
val helper = new Helper(
|
||||
options.common,
|
||||
remainingArgs ++ options.isolated.rawIsolated.map { case (_, dep) => dep },
|
||||
extraJars = options.extraJars.map(new File(_)),
|
||||
isolated = options.isolated
|
||||
)
|
||||
|
||||
val mainClass =
|
||||
if (options.mainClass.isEmpty)
|
||||
helper.retainedMainClass
|
||||
else
|
||||
options.mainClass
|
||||
|
||||
Launch.run(
|
||||
helper.loader,
|
||||
mainClass,
|
||||
userArgs,
|
||||
options.common.verbosityLevel
|
||||
)
|
||||
}
|
||||
|
|
@ -1,304 +0,0 @@
|
|||
package coursier
|
||||
package cli
|
||||
|
||||
import caseapp.{ HelpMessage => Help, ValueDescription => Value, ExtraName => Short, _ }
|
||||
|
||||
import coursier.core.ResolutionProcess
|
||||
import coursier.util.Parse
|
||||
|
||||
final case class CommonOptions(
|
||||
@Help("Keep optional dependencies (Maven)")
|
||||
keepOptional: Boolean = false,
|
||||
@Help("Download mode (default: missing, that is fetch things missing from cache)")
|
||||
@Value("offline|update-changing|update|missing|force")
|
||||
@Short("m")
|
||||
mode: String = "",
|
||||
@Help("TTL duration (e.g. \"24 hours\")")
|
||||
@Value("duration")
|
||||
@Short("l")
|
||||
ttl: String = "",
|
||||
@Help("Quiet output")
|
||||
@Short("q")
|
||||
quiet: Boolean = false,
|
||||
@Help("Increase verbosity (specify several times to increase more)")
|
||||
@Short("v")
|
||||
verbose: Int @@ Counter = Tag.of(0),
|
||||
@Help("Force display of progress bars")
|
||||
@Short("P")
|
||||
progress: Boolean = false,
|
||||
@Help("Maximum number of resolution iterations (specify a negative value for unlimited, default: 100)")
|
||||
@Short("N")
|
||||
maxIterations: Int = ResolutionProcess.defaultMaxIterations,
|
||||
@Help("Repository - for multiple repositories, separate with comma and/or add this option multiple times (e.g. -r central,ivy2local -r sonatype-snapshots, or equivalently -r central,ivy2local,sonatype-snapshots)")
|
||||
@Value("maven|sonatype:$repo|ivy2local|bintray:$org/$repo|bintray-ivy:$org/$repo|typesafe:ivy-$repo|typesafe:$repo|sbt-plugin:$repo|ivy:$pattern")
|
||||
@Short("r")
|
||||
repository: List[String] = Nil,
|
||||
@Help("Do not add default repositories (~/.ivy2/local, and Central)")
|
||||
noDefault: Boolean = false,
|
||||
@Help("Modify names in Maven repository paths for SBT plugins")
|
||||
sbtPluginHack: Boolean = true,
|
||||
@Help("Drop module attributes starting with 'info.' - these are sometimes used by projects built with SBT")
|
||||
dropInfoAttr: Boolean = false,
|
||||
@Help("Force module version")
|
||||
@Value("organization:name:forcedVersion")
|
||||
@Short("V")
|
||||
forceVersion: List[String] = Nil,
|
||||
@Help("Exclude module")
|
||||
@Value("organization:name")
|
||||
@Short("E")
|
||||
@Help("Global level exclude")
|
||||
exclude: List[String] = Nil,
|
||||
|
||||
@Short("x")
|
||||
@Help("Path to the local exclusion file. " +
|
||||
"Syntax: <org:name>--<org:name>. `--` means minus. Example file content:\n\t" +
|
||||
"\tcom.twitter.penguin:korean-text--com.twitter:util-tunable-internal_2.11\n\t" +
|
||||
"\torg.apache.commons:commons-math--com.twitter.search:core-query-nodes\n\t" +
|
||||
"Behavior: If root module A excludes module X, but root module B requires X, module X will still be fetched.")
|
||||
localExcludeFile: String = "",
|
||||
@Help("Default scala version")
|
||||
@Short("e")
|
||||
scalaVersion: String = scala.util.Properties.versionNumberString,
|
||||
@Help("Add intransitive dependencies")
|
||||
intransitive: List[String] = Nil,
|
||||
@Help("Classifiers that should be fetched")
|
||||
@Value("classifier1,classifier2,...")
|
||||
@Short("C")
|
||||
classifier: List[String] = Nil,
|
||||
@Help("Default configuration (default(compile) by default)")
|
||||
@Value("configuration")
|
||||
@Short("c")
|
||||
defaultConfiguration: String = "default(compile)",
|
||||
@Help("Maximum number of parallel downloads (default: 6)")
|
||||
@Short("n")
|
||||
parallel: Int = 6,
|
||||
@Help("Checksums")
|
||||
@Value("checksum1,checksum2,... - end with none to allow for no checksum validation if none are available")
|
||||
checksum: List[String] = Nil,
|
||||
@Help("Print the duration of each iteration of the resolution")
|
||||
@Short("B")
|
||||
@Value("Number of warm-up resolutions - if negative, doesn't print per iteration benchmark (less overhead)")
|
||||
benchmark: Int = 0,
|
||||
@Help("Print dependencies as a tree")
|
||||
@Short("t")
|
||||
tree: Boolean = false,
|
||||
@Help("Print dependencies as an inversed tree (dependees as children)")
|
||||
@Short("T")
|
||||
reverseTree: Boolean = false,
|
||||
@Help("Enable profile")
|
||||
@Value("profile")
|
||||
@Short("F")
|
||||
profile: List[String] = Nil,
|
||||
|
||||
@Help("Specify path for json output")
|
||||
@Short("j")
|
||||
jsonOutputFile: String = "",
|
||||
|
||||
@Help("Swap the mainline Scala JARs by Typelevel ones")
|
||||
typelevel: Boolean = false,
|
||||
@Recurse
|
||||
cacheOptions: CacheOptions = CacheOptions()
|
||||
) {
|
||||
val verbosityLevel = Tag.unwrap(verbose) - (if (quiet) 1 else 0)
|
||||
lazy val classifier0 = classifier.flatMap(_.split(',')).filter(_.nonEmpty).toSet
|
||||
}
|
||||
|
||||
final case class CacheOptions(
|
||||
@Help("Cache directory (defaults to environment variable COURSIER_CACHE or ~/.coursier/cache/v1)")
|
||||
@Short("C")
|
||||
cache: String = Cache.default.toString
|
||||
)
|
||||
|
||||
final case class IsolatedLoaderOptions(
|
||||
@Value("target:dependency")
|
||||
@Short("I")
|
||||
isolated: List[String] = Nil,
|
||||
@Help("Comma-separated isolation targets")
|
||||
@Short("i")
|
||||
isolateTarget: List[String] = Nil
|
||||
) {
|
||||
|
||||
def anyIsolatedDep = isolateTarget.nonEmpty || isolated.nonEmpty
|
||||
|
||||
lazy val targets = {
|
||||
val l = isolateTarget.flatMap(_.split(',')).filter(_.nonEmpty)
|
||||
val (invalid, valid) = l.partition(_.contains(":"))
|
||||
if (invalid.nonEmpty) {
|
||||
Console.err.println(s"Invalid target IDs:")
|
||||
for (t <- invalid)
|
||||
Console.err.println(s" $t")
|
||||
sys.exit(255)
|
||||
}
|
||||
if (valid.isEmpty)
|
||||
Array("default")
|
||||
else
|
||||
valid.toArray
|
||||
}
|
||||
|
||||
lazy val (validIsolated, unrecognizedIsolated) = isolated.partition(s => targets.exists(t => s.startsWith(t + ":")))
|
||||
|
||||
def check() = {
|
||||
if (unrecognizedIsolated.nonEmpty) {
|
||||
Console.err.println(s"Unrecognized isolation targets in:")
|
||||
for (i <- unrecognizedIsolated)
|
||||
Console.err.println(s" $i")
|
||||
sys.exit(255)
|
||||
}
|
||||
}
|
||||
|
||||
lazy val rawIsolated = validIsolated.map { s =>
|
||||
val Array(target, dep) = s.split(":", 2)
|
||||
target -> dep
|
||||
}
|
||||
|
||||
def isolatedModuleVersions(defaultScalaVersion: String) = rawIsolated.groupBy { case (t, _) => t }.map {
|
||||
case (t, l) =>
|
||||
val (errors, modVers) = Parse.moduleVersions(l.map { case (_, d) => d }, defaultScalaVersion)
|
||||
|
||||
if (errors.nonEmpty) {
|
||||
errors.foreach(Console.err.println)
|
||||
sys.exit(255)
|
||||
}
|
||||
|
||||
t -> modVers
|
||||
}
|
||||
|
||||
def isolatedDeps(defaultScalaVersion: String) =
|
||||
isolatedModuleVersions(defaultScalaVersion).map {
|
||||
case (t, l) =>
|
||||
t -> l.map {
|
||||
case (mod, ver) =>
|
||||
Dependency(
|
||||
mod,
|
||||
ver,
|
||||
configuration = "runtime",
|
||||
attributes = Attributes("", "")
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
object ArtifactOptions {
|
||||
def defaultArtifactTypes = Set("jar", "bundle", "test-jar")
|
||||
}
|
||||
|
||||
final case class ArtifactOptions(
|
||||
@Help("Artifact types that should be retained (e.g. jar, src, doc, etc.) - defaults to jar,bundle")
|
||||
@Value("type1,type2,...")
|
||||
@Short("A")
|
||||
artifactType: List[String] = Nil,
|
||||
@Help("Fetch artifacts even if the resolution is errored")
|
||||
force: Boolean = false
|
||||
) {
|
||||
def artifactTypes(sources: Boolean, javadoc: Boolean) = {
|
||||
val types0 = artifactType
|
||||
.flatMap(_.split(','))
|
||||
.filter(_.nonEmpty)
|
||||
.toSet
|
||||
|
||||
if (types0.isEmpty) {
|
||||
if (sources || javadoc)
|
||||
Some("src").filter(_ => sources).toSet ++ Some("doc").filter(_ => javadoc)
|
||||
else
|
||||
ArtifactOptions.defaultArtifactTypes
|
||||
} else if (types0("*"))
|
||||
Set("*")
|
||||
else
|
||||
types0
|
||||
}
|
||||
}
|
||||
|
||||
final case class FetchOptions(
|
||||
@Help("Fetch source artifacts")
|
||||
@Short("S")
|
||||
sources: Boolean = false,
|
||||
@Help("Fetch javadoc artifacts")
|
||||
@Short("D")
|
||||
javadoc: Boolean = false,
|
||||
@Help("Print java -cp compatible output")
|
||||
@Short("p")
|
||||
classpath: Boolean = false,
|
||||
@Recurse
|
||||
artifactOptions: ArtifactOptions = ArtifactOptions(),
|
||||
@Recurse
|
||||
common: CommonOptions = CommonOptions()
|
||||
)
|
||||
|
||||
final case class LaunchOptions(
|
||||
@Short("M")
|
||||
@Short("main")
|
||||
mainClass: String = "",
|
||||
@Short("J")
|
||||
@Help("Extra JARs to be added to the classpath of the launched application. Directories accepted too.")
|
||||
extraJars: List[String] = Nil,
|
||||
@Recurse
|
||||
isolated: IsolatedLoaderOptions = IsolatedLoaderOptions(),
|
||||
@Recurse
|
||||
common: CommonOptions = CommonOptions()
|
||||
)
|
||||
|
||||
final case class BootstrapOptions(
|
||||
@Short("M")
|
||||
@Short("main")
|
||||
mainClass: String = "",
|
||||
@Short("o")
|
||||
output: String = "bootstrap",
|
||||
@Short("f")
|
||||
force: Boolean = false,
|
||||
@Help("Generate a standalone launcher, with all JARs included, instead of one downloading its dependencies on startup.")
|
||||
@Short("s")
|
||||
standalone: Boolean = false,
|
||||
@Help("Set Java properties in the generated launcher.")
|
||||
@Value("key=value")
|
||||
@Short("D")
|
||||
property: List[String] = Nil,
|
||||
@Help("Set Java command-line options in the generated launcher.")
|
||||
@Value("option")
|
||||
@Short("J")
|
||||
javaOpt: List[String] = Nil,
|
||||
@Help("Generate native launcher")
|
||||
@Short("S")
|
||||
native: Boolean = false,
|
||||
@Help("Native compilation target directory")
|
||||
@Short("d")
|
||||
target: String = "native-target",
|
||||
@Help("Don't wipe native compilation target directory (for debug purposes)")
|
||||
keepTarget: Boolean = false,
|
||||
@Recurse
|
||||
isolated: IsolatedLoaderOptions = IsolatedLoaderOptions(),
|
||||
@Recurse
|
||||
common: CommonOptions = CommonOptions()
|
||||
)
|
||||
|
||||
final case class SparkSubmitOptions(
|
||||
@Short("M")
|
||||
@Short("main")
|
||||
@Help("Main class to be launched (optional if in manifest)")
|
||||
mainClass: String = "",
|
||||
@Short("J")
|
||||
@Help("Extra JARs to be added in the classpath of the job")
|
||||
extraJars: List[String] = Nil,
|
||||
@Help("If master is yarn-cluster, write YARN app ID to a file. (The ID is deduced from the spark-submit output.)")
|
||||
@Value("file")
|
||||
yarnIdFile: String = "",
|
||||
@Help("Generate Spark Yarn assembly (Spark 1.x) or fetch Spark Yarn jars (Spark 2.x), and supply those to Spark via conf. (Default: true)")
|
||||
autoAssembly: Boolean = true,
|
||||
@Help("Include default dependencies in Spark Yarn assembly or jars (see --auto-assembly). If --auto-assembly is false, the corresponding dependencies will still be shunted from the job classpath if this option is true. (Default: same as --auto-assembly)")
|
||||
defaultAssemblyDependencies: Option[Boolean] = None,
|
||||
assemblyDependencies: List[String] = Nil,
|
||||
sparkAssemblyDependencies: List[String] = Nil,
|
||||
noDefaultSubmitDependencies: Boolean = false,
|
||||
submitDependencies: List[String] = Nil,
|
||||
@Help("Spark version - if empty, deduced from the job classpath. (Default: empty)")
|
||||
sparkVersion: String = "",
|
||||
@Help("YARN version - only used with Spark 2. (Default: 2.7.3)")
|
||||
yarnVersion: String = "2.7.3",
|
||||
@Help("Maximum idle time of spark-submit (time with no output). Exit early if no output from spark-submit for more than this duration. Set to 0 for unlimited. (Default: 0)")
|
||||
@Value("seconds")
|
||||
maxIdleTime: Int = 0,
|
||||
@Recurse
|
||||
artifactOptions: ArtifactOptions = ArtifactOptions(),
|
||||
@Recurse
|
||||
common: CommonOptions = CommonOptions()
|
||||
)
|
||||
|
|
@ -1,14 +0,0 @@
|
|||
package coursier
|
||||
package cli
|
||||
|
||||
import caseapp._
|
||||
|
||||
final case class Resolve(
|
||||
@Recurse
|
||||
common: CommonOptions
|
||||
) extends App {
|
||||
|
||||
// the `val helper = ` part is needed because of DelayedInit it seems
|
||||
val helper = new Helper(common, remainingArgs, printResultStdout = true)
|
||||
|
||||
}
|
||||
|
|
@ -1,352 +0,0 @@
|
|||
package coursier.cli
|
||||
|
||||
import java.io.{PrintStream, BufferedReader, File, PipedInputStream, PipedOutputStream, InputStream, InputStreamReader}
|
||||
import java.net.URLClassLoader
|
||||
|
||||
import caseapp._
|
||||
|
||||
import coursier.{ Attributes, Dependency }
|
||||
import coursier.cli.spark.{ Assembly, Submit }
|
||||
import coursier.internal.FileUtil
|
||||
import coursier.util.Parse
|
||||
|
||||
import scala.util.control.NonFatal
|
||||
|
||||
object SparkSubmit {
|
||||
|
||||
def scalaSparkVersions(dependencies: Iterable[Dependency]): Either[String, (String, String)] = {
|
||||
|
||||
val sparkCoreMods = dependencies.collect {
|
||||
case dep if dep.module.organization == "org.apache.spark" &&
|
||||
(dep.module.name == "spark-core_2.10" || dep.module.name == "spark-core_2.11") =>
|
||||
(dep.module, dep.version)
|
||||
}
|
||||
|
||||
if (sparkCoreMods.isEmpty)
|
||||
Left("Cannot find spark among dependencies")
|
||||
else if (sparkCoreMods.size == 1) {
|
||||
val scalaVersion = sparkCoreMods.head._1.name match {
|
||||
case "spark-core_2.10" => "2.10"
|
||||
case "spark-core_2.11" => "2.11"
|
||||
case _ => throw new Exception("Cannot happen")
|
||||
}
|
||||
|
||||
val sparkVersion = sparkCoreMods.head._2
|
||||
|
||||
Right((scalaVersion, sparkVersion))
|
||||
} else
|
||||
Left(s"Found several spark code modules among dependencies (${sparkCoreMods.mkString(", ")})")
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Submits spark applications.
|
||||
*
|
||||
* Can be run with no spark distributions around.
|
||||
*
|
||||
* @author Alexandre Archambault
|
||||
* @author Han Ju
|
||||
*/
|
||||
@CommandName("spark-submit")
|
||||
final case class SparkSubmit(
|
||||
@Recurse
|
||||
options: SparkSubmitOptions
|
||||
) extends App with ExtraArgsApp {
|
||||
|
||||
val rawExtraJars = options.extraJars.map(new File(_))
|
||||
|
||||
val extraDirs = rawExtraJars.filter(_.isDirectory)
|
||||
if (extraDirs.nonEmpty) {
|
||||
Console.err.println(s"Error: directories not allowed in extra job JARs.")
|
||||
Console.err.println(extraDirs.map(" " + _).mkString("\n"))
|
||||
sys.exit(1)
|
||||
}
|
||||
|
||||
val helper: Helper = new Helper(
|
||||
options.common,
|
||||
remainingArgs,
|
||||
extraJars = rawExtraJars
|
||||
)
|
||||
val jars =
|
||||
helper.fetch(
|
||||
sources = false,
|
||||
javadoc = false,
|
||||
artifactTypes = options.artifactOptions.artifactTypes(sources = false, javadoc = false)
|
||||
) ++ options.extraJars.map(new File(_))
|
||||
|
||||
val (scalaVersion, sparkVersion) =
|
||||
if (options.sparkVersion.isEmpty)
|
||||
SparkSubmit.scalaSparkVersions(helper.res.dependencies) match {
|
||||
case Left(err) =>
|
||||
Console.err.println(
|
||||
s"Cannot get spark / scala versions from dependencies: $err\n" +
|
||||
"Set them via --scala-version or --spark-version"
|
||||
)
|
||||
sys.exit(1)
|
||||
case Right(versions) => versions
|
||||
}
|
||||
else
|
||||
(options.common.scalaVersion, options.sparkVersion)
|
||||
|
||||
val (sparkYarnExtraConf, sparkBaseJars) =
|
||||
if (!options.autoAssembly || sparkVersion.startsWith("2.")) {
|
||||
|
||||
val assemblyJars = Assembly.sparkJars(
|
||||
scalaVersion,
|
||||
sparkVersion,
|
||||
options.yarnVersion,
|
||||
options.defaultAssemblyDependencies.getOrElse(options.autoAssembly),
|
||||
options.assemblyDependencies.flatMap(_.split(",")).filter(_.nonEmpty) ++
|
||||
options.sparkAssemblyDependencies.flatMap(_.split(",")).filter(_.nonEmpty).map(_ + s":$sparkVersion"),
|
||||
options.common,
|
||||
options.artifactOptions.artifactTypes(sources = false, javadoc = false)
|
||||
)
|
||||
|
||||
val extraConf =
|
||||
if (options.autoAssembly && sparkVersion.startsWith("2."))
|
||||
Seq(
|
||||
"spark.yarn.jars" -> assemblyJars.map(_.getAbsolutePath).mkString(",")
|
||||
)
|
||||
else
|
||||
Nil
|
||||
|
||||
(extraConf, assemblyJars)
|
||||
} else {
|
||||
|
||||
val assemblyAndJarsOrError = Assembly.spark(
|
||||
scalaVersion,
|
||||
sparkVersion,
|
||||
options.yarnVersion,
|
||||
options.defaultAssemblyDependencies.getOrElse(true),
|
||||
options.assemblyDependencies.flatMap(_.split(",")).filter(_.nonEmpty) ++
|
||||
options.sparkAssemblyDependencies.flatMap(_.split(",")).filter(_.nonEmpty).map(_ + s":$sparkVersion"),
|
||||
options.common,
|
||||
options.artifactOptions.artifactTypes(sources = false, javadoc = false)
|
||||
)
|
||||
|
||||
val (assembly, assemblyJars) = assemblyAndJarsOrError match {
|
||||
case Left(err) =>
|
||||
Console.err.println(s"Cannot get spark assembly: $err")
|
||||
sys.exit(1)
|
||||
case Right(res) => res
|
||||
}
|
||||
|
||||
val extraConf = Seq(
|
||||
"spark.yarn.jar" -> assembly.getAbsolutePath
|
||||
)
|
||||
|
||||
(extraConf, assemblyJars)
|
||||
}
|
||||
|
||||
|
||||
val idx = {
|
||||
val idx0 = extraArgs.indexOf("--")
|
||||
if (idx0 < 0)
|
||||
extraArgs.length
|
||||
else
|
||||
idx0
|
||||
}
|
||||
|
||||
assert(idx >= 0)
|
||||
|
||||
val sparkOpts = extraArgs.take(idx)
|
||||
val jobArgs = extraArgs.drop(idx + 1)
|
||||
|
||||
val mainClass =
|
||||
if (options.mainClass.isEmpty)
|
||||
helper.retainedMainClass
|
||||
else
|
||||
options.mainClass
|
||||
|
||||
val mainJar = helper
|
||||
.loader
|
||||
.loadClass(mainClass) // FIXME Check for errors, provide a nicer error message in that case
|
||||
.getProtectionDomain
|
||||
.getCodeSource
|
||||
.getLocation
|
||||
.getPath // TODO Safety check: protocol must be file
|
||||
|
||||
val (check, extraJars0) = jars.partition(_.getAbsolutePath == mainJar)
|
||||
|
||||
val extraJars = extraJars0.filterNot(sparkBaseJars.toSet)
|
||||
|
||||
if (check.isEmpty)
|
||||
Console.err.println(
|
||||
s"Warning: cannot find back $mainJar among the dependencies JARs (likely a coursier bug)"
|
||||
)
|
||||
|
||||
val extraSparkOpts = sparkYarnExtraConf.flatMap {
|
||||
case (k, v) => Seq(
|
||||
"--conf", s"$k=$v"
|
||||
)
|
||||
}
|
||||
|
||||
val extraJarsOptions =
|
||||
if (extraJars.isEmpty)
|
||||
Nil
|
||||
else
|
||||
Seq("--jars", extraJars.mkString(","))
|
||||
|
||||
val mainClassOptions = Seq("--class", mainClass)
|
||||
|
||||
val sparkSubmitOptions = sparkOpts ++ extraSparkOpts ++ extraJarsOptions ++ mainClassOptions ++
|
||||
Seq(mainJar) ++ jobArgs
|
||||
|
||||
val submitCp = Submit.cp(
|
||||
scalaVersion,
|
||||
sparkVersion,
|
||||
options.noDefaultSubmitDependencies,
|
||||
options.submitDependencies.flatMap(_.split(",")).filter(_.nonEmpty),
|
||||
options.artifactOptions.artifactTypes(sources = false, javadoc = false),
|
||||
options.common
|
||||
)
|
||||
|
||||
val submitLoader = new URLClassLoader(
|
||||
submitCp.map(_.toURI.toURL).toArray,
|
||||
helper.baseLoader
|
||||
)
|
||||
|
||||
Launch.run(
|
||||
submitLoader,
|
||||
Submit.mainClassName,
|
||||
sparkSubmitOptions,
|
||||
options.common.verbosityLevel,
|
||||
{
|
||||
if (options.common.verbosityLevel >= 1)
|
||||
Console.err.println(
|
||||
s"Launching spark-submit with arguments:\n" +
|
||||
sparkSubmitOptions.map(" " + _).mkString("\n")
|
||||
)
|
||||
|
||||
OutputHelper.handleOutput(
|
||||
Some(options.yarnIdFile).filter(_.nonEmpty).map(new File(_)),
|
||||
Some(options.maxIdleTime).filter(_ > 0)
|
||||
)
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
object OutputHelper {
|
||||
|
||||
def outputInspectThread(
|
||||
name: String,
|
||||
from: InputStream,
|
||||
to: PrintStream,
|
||||
handlers: Seq[String => Unit]
|
||||
) = {
|
||||
|
||||
val t = new Thread {
|
||||
override def run() = {
|
||||
val in = new BufferedReader(new InputStreamReader(from))
|
||||
var line: String = null
|
||||
while ({
|
||||
line = in.readLine()
|
||||
line != null
|
||||
}) {
|
||||
to.println(line)
|
||||
handlers.foreach(_(line))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
t.setName(name)
|
||||
t.setDaemon(true)
|
||||
|
||||
t
|
||||
}
|
||||
|
||||
|
||||
def handleOutput(yarnAppFileOpt: Option[File], maxIdleTimeOpt: Option[Int]): Unit = {
|
||||
|
||||
var handlers = Seq.empty[String => Unit]
|
||||
var threads = Seq.empty[Thread]
|
||||
|
||||
for (yarnAppFile <- yarnAppFileOpt) {
|
||||
|
||||
val Pattern = ".*Application report for ([^ ]+) .*".r
|
||||
|
||||
@volatile var written = false
|
||||
val lock = new AnyRef
|
||||
def handleMessage(s: String): Unit =
|
||||
if (!written)
|
||||
s match {
|
||||
case Pattern(id) =>
|
||||
lock.synchronized {
|
||||
if (!written) {
|
||||
println(s"Detected YARN app ID $id")
|
||||
Option(yarnAppFile.getParentFile).foreach(_.mkdirs())
|
||||
FileUtil.write(yarnAppFile, id.getBytes("UTF-8"))
|
||||
written = true
|
||||
}
|
||||
}
|
||||
case _ =>
|
||||
}
|
||||
|
||||
val f = { line: String =>
|
||||
try handleMessage(line)
|
||||
catch {
|
||||
case NonFatal(_) =>
|
||||
}
|
||||
}
|
||||
|
||||
handlers = handlers :+ f
|
||||
}
|
||||
|
||||
for (maxIdleTime <- maxIdleTimeOpt if maxIdleTime > 0) {
|
||||
|
||||
@volatile var lastMessageTs = -1L
|
||||
|
||||
def updateLastMessageTs() = {
|
||||
lastMessageTs = System.currentTimeMillis()
|
||||
}
|
||||
|
||||
val checkThread = new Thread {
|
||||
override def run() =
|
||||
try {
|
||||
while (true) {
|
||||
lastMessageTs = -1L
|
||||
Thread.sleep(maxIdleTime * 1000L)
|
||||
if (lastMessageTs < 0) {
|
||||
Console.err.println(s"No output from spark-submit for more than $maxIdleTime s, exiting")
|
||||
sys.exit(1)
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
case t: Throwable =>
|
||||
Console.err.println(s"Caught $t in check spark-submit output thread!")
|
||||
throw t
|
||||
}
|
||||
}
|
||||
|
||||
checkThread.setName("check-spark-submit-output")
|
||||
checkThread.setDaemon(true)
|
||||
|
||||
threads = threads :+ checkThread
|
||||
|
||||
val f = { line: String =>
|
||||
updateLastMessageTs()
|
||||
}
|
||||
|
||||
handlers = handlers :+ f
|
||||
}
|
||||
|
||||
def createThread(name: String, replaces: PrintStream, install: PrintStream => Unit): Thread = {
|
||||
val in = new PipedInputStream
|
||||
val out = new PipedOutputStream(in)
|
||||
install(new PrintStream(out))
|
||||
outputInspectThread(name, in, replaces, handlers)
|
||||
}
|
||||
|
||||
if (handlers.nonEmpty) {
|
||||
threads = threads ++ Seq(
|
||||
createThread("inspect-out", System.out, System.setOut),
|
||||
createThread("inspect-err", System.err, System.setErr)
|
||||
)
|
||||
|
||||
threads.foreach(_.start())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -6,10 +6,11 @@ scala_library(
|
|||
"cache/src/main/scala:cache",
|
||||
"core:core",
|
||||
"extra/src/main/scala/coursier/extra:extra",
|
||||
"extra/src/main/scala-2.11/coursier/extra:native",
|
||||
"extra/src/main/scala-2.12/coursier/extra:native",
|
||||
":util",
|
||||
],
|
||||
sources = globs(
|
||||
"coursier/cli/options/*.scala",
|
||||
"coursier/cli/scaladex/*.scala",
|
||||
"coursier/cli/spark/*.scala",
|
||||
"coursier/cli/*.scala",
|
||||
|
|
@ -0,0 +1,245 @@
|
|||
package coursier
|
||||
package cli
|
||||
|
||||
import java.io.{ByteArrayInputStream, ByteArrayOutputStream, File, FileInputStream, IOException}
|
||||
import java.nio.file.Files
|
||||
import java.nio.file.attribute.PosixFilePermission
|
||||
import java.util.Properties
|
||||
import java.util.zip.{ZipEntry, ZipInputStream, ZipOutputStream}
|
||||
|
||||
import caseapp._
|
||||
import coursier.cli.options.BootstrapOptions
|
||||
import coursier.cli.util.Zip
|
||||
import coursier.internal.FileUtil
|
||||
|
||||
import scala.collection.JavaConverters._
|
||||
|
||||
object Bootstrap extends CaseApp[BootstrapOptions] {
|
||||
|
||||
def run(options: BootstrapOptions, args: RemainingArgs): Unit = {
|
||||
|
||||
val helper = new Helper(
|
||||
options.options.common,
|
||||
args.all,
|
||||
isolated = options.options.isolated,
|
||||
warnBaseLoaderNotFound = false
|
||||
)
|
||||
|
||||
val output0 = new File(options.options.output)
|
||||
if (!options.options.force && output0.exists()) {
|
||||
Console.err.println(s"Error: ${options.options.output} already exists, use -f option to force erasing it.")
|
||||
sys.exit(1)
|
||||
}
|
||||
|
||||
val mainClass =
|
||||
if (options.options.mainClass.isEmpty)
|
||||
helper.retainedMainClass
|
||||
else
|
||||
options.options.mainClass
|
||||
|
||||
if (options.options.native) {
|
||||
|
||||
val files = helper.fetch(
|
||||
sources = false,
|
||||
javadoc = false,
|
||||
artifactTypes = options.artifactOptions.artifactTypes(sources = false, javadoc = false)
|
||||
)
|
||||
|
||||
val log: String => Unit =
|
||||
if (options.options.common.verbosityLevel >= 0)
|
||||
s => Console.err.println(s)
|
||||
else
|
||||
_ => ()
|
||||
|
||||
val tmpDir = new File(options.options.target)
|
||||
|
||||
try {
|
||||
coursier.extra.Native.create(
|
||||
mainClass,
|
||||
files,
|
||||
output0,
|
||||
tmpDir,
|
||||
log,
|
||||
verbosity = options.options.common.verbosityLevel
|
||||
)
|
||||
} finally {
|
||||
if (!options.options.keepTarget)
|
||||
coursier.extra.Native.deleteRecursive(tmpDir)
|
||||
}
|
||||
} else {
|
||||
|
||||
val (validProperties, wrongProperties) = options.options.property.partition(_.contains("="))
|
||||
if (wrongProperties.nonEmpty) {
|
||||
Console.err.println(s"Wrong -P / --property option(s):\n${wrongProperties.mkString("\n")}")
|
||||
sys.exit(255)
|
||||
}
|
||||
|
||||
val properties0 = validProperties.map { s =>
|
||||
val idx = s.indexOf('=')
|
||||
assert(idx >= 0)
|
||||
(s.take(idx), s.drop(idx + 1))
|
||||
}
|
||||
|
||||
val bootstrapJar =
|
||||
Option(Thread.currentThread().getContextClassLoader.getResourceAsStream("bootstrap.jar")) match {
|
||||
case Some(is) => Cache.readFullySync(is)
|
||||
case None =>
|
||||
Console.err.println(s"Error: bootstrap JAR not found")
|
||||
sys.exit(1)
|
||||
}
|
||||
|
||||
val isolatedDeps = options.options.isolated.isolatedDeps(options.options.common.scalaVersion)
|
||||
|
||||
val (_, isolatedArtifactFiles) =
|
||||
options.options.isolated.targets.foldLeft((Vector.empty[String], Map.empty[String, (Seq[String], Seq[File])])) {
|
||||
case ((done, acc), target) =>
|
||||
val subRes = helper.res.subset(isolatedDeps.getOrElse(target, Nil).toSet)
|
||||
|
||||
val (done0, subUrls, subFiles) =
|
||||
if (options.options.standalone) {
|
||||
val subFiles0 = helper.fetch(
|
||||
sources = false,
|
||||
javadoc = false,
|
||||
artifactTypes = options.artifactOptions.artifactTypes(sources = false, javadoc = false),
|
||||
subset = isolatedDeps.getOrElse(target, Seq.empty).toSet
|
||||
)
|
||||
|
||||
(done, Nil, subFiles0)
|
||||
} else {
|
||||
val subArtifacts0 = subRes.dependencyArtifacts.map(_._2)
|
||||
val artifactTypes = options.artifactOptions.artifactTypes(sources = false, javadoc = false)
|
||||
val subArtifacts =
|
||||
if (artifactTypes("*"))
|
||||
subArtifacts0
|
||||
else
|
||||
subArtifacts0.filter(a => artifactTypes(a.`type`))
|
||||
val filteredSubArtifacts = subArtifacts.map(_.url).diff(done)
|
||||
(done ++ filteredSubArtifacts, filteredSubArtifacts, Nil)
|
||||
}
|
||||
|
||||
val updatedAcc = acc + (target -> (subUrls, subFiles))
|
||||
|
||||
(done0, updatedAcc)
|
||||
}
|
||||
|
||||
val (urls, files) =
|
||||
helper.fetchMap(
|
||||
sources = false,
|
||||
javadoc = false,
|
||||
artifactTypes = options.artifactOptions.artifactTypes(sources = false, javadoc = false)
|
||||
).toList.foldLeft((List.empty[String], List.empty[File])){
|
||||
case ((urls, files), (url, file)) =>
|
||||
if (options.options.standalone) (urls, file :: files)
|
||||
else if (url.startsWith("file:/")) (urls, file :: files)
|
||||
else (url :: urls, files)
|
||||
}
|
||||
|
||||
val isolatedUrls = isolatedArtifactFiles.map { case (k, (v, _)) => k -> v }
|
||||
val isolatedFiles = isolatedArtifactFiles.map { case (k, (_, v)) => k -> v }
|
||||
|
||||
val buffer = new ByteArrayOutputStream
|
||||
|
||||
val bootstrapZip = new ZipInputStream(new ByteArrayInputStream(bootstrapJar))
|
||||
val outputZip = new ZipOutputStream(buffer)
|
||||
|
||||
for ((ent, data) <- Zip.zipEntries(bootstrapZip)) {
|
||||
outputZip.putNextEntry(ent)
|
||||
outputZip.write(data)
|
||||
outputZip.closeEntry()
|
||||
}
|
||||
|
||||
|
||||
val time = System.currentTimeMillis()
|
||||
|
||||
def putStringEntry(name: String, content: String): Unit = {
|
||||
val entry = new ZipEntry(name)
|
||||
entry.setTime(time)
|
||||
|
||||
outputZip.putNextEntry(entry)
|
||||
outputZip.write(content.getBytes("UTF-8"))
|
||||
outputZip.closeEntry()
|
||||
}
|
||||
|
||||
def putEntryFromFile(name: String, f: File): Unit = {
|
||||
val entry = new ZipEntry(name)
|
||||
entry.setTime(f.lastModified())
|
||||
|
||||
outputZip.putNextEntry(entry)
|
||||
outputZip.write(Cache.readFullySync(new FileInputStream(f)))
|
||||
outputZip.closeEntry()
|
||||
}
|
||||
|
||||
putStringEntry("bootstrap-jar-urls", urls.mkString("\n"))
|
||||
|
||||
if (options.options.isolated.anyIsolatedDep) {
|
||||
putStringEntry("bootstrap-isolation-ids", options.options.isolated.targets.mkString("\n"))
|
||||
|
||||
for (target <- options.options.isolated.targets) {
|
||||
val urls = isolatedUrls.getOrElse(target, Nil)
|
||||
val files = isolatedFiles.getOrElse(target, Nil)
|
||||
putStringEntry(s"bootstrap-isolation-$target-jar-urls", urls.mkString("\n"))
|
||||
putStringEntry(s"bootstrap-isolation-$target-jar-resources", files.map(pathFor).mkString("\n"))
|
||||
}
|
||||
}
|
||||
|
||||
def pathFor(f: File) = s"jars/${f.getName}"
|
||||
|
||||
for (f <- files)
|
||||
putEntryFromFile(pathFor(f), f)
|
||||
|
||||
putStringEntry("bootstrap-jar-resources", files.map(pathFor).mkString("\n"))
|
||||
|
||||
val propsEntry = new ZipEntry("bootstrap.properties")
|
||||
propsEntry.setTime(time)
|
||||
|
||||
val properties = new Properties
|
||||
properties.setProperty("bootstrap.mainClass", mainClass)
|
||||
|
||||
outputZip.putNextEntry(propsEntry)
|
||||
properties.store(outputZip, "")
|
||||
outputZip.closeEntry()
|
||||
|
||||
outputZip.close()
|
||||
|
||||
// escaping of javaOpt possibly a bit loose :-|
|
||||
val shellPreamble = Seq(
|
||||
"#!/usr/bin/env sh",
|
||||
"exec java -jar " + options.options.javaOpt.map(s => "'" + s.replace("'", "\\'") + "'").mkString(" ") + " \"$0\" \"$@\""
|
||||
).mkString("", "\n", "\n")
|
||||
|
||||
try FileUtil.write(output0, shellPreamble.getBytes("UTF-8") ++ buffer.toByteArray)
|
||||
catch { case e: IOException =>
|
||||
Console.err.println(s"Error while writing $output0${Option(e.getMessage).fold("")(" (" + _ + ")")}")
|
||||
sys.exit(1)
|
||||
}
|
||||
|
||||
try {
|
||||
val perms = Files.getPosixFilePermissions(output0.toPath).asScala.toSet
|
||||
|
||||
var newPerms = perms
|
||||
if (perms(PosixFilePermission.OWNER_READ))
|
||||
newPerms += PosixFilePermission.OWNER_EXECUTE
|
||||
if (perms(PosixFilePermission.GROUP_READ))
|
||||
newPerms += PosixFilePermission.GROUP_EXECUTE
|
||||
if (perms(PosixFilePermission.OTHERS_READ))
|
||||
newPerms += PosixFilePermission.OTHERS_EXECUTE
|
||||
|
||||
if (newPerms != perms)
|
||||
Files.setPosixFilePermissions(
|
||||
output0.toPath,
|
||||
newPerms.asJava
|
||||
)
|
||||
} catch {
|
||||
case e: UnsupportedOperationException =>
|
||||
// Ignored
|
||||
case e: IOException =>
|
||||
Console.err.println(
|
||||
s"Error while making $output0 executable" +
|
||||
Option(e.getMessage).fold("")(" (" + _ + ")")
|
||||
)
|
||||
sys.exit(1)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -1,6 +1,29 @@
|
|||
package coursier.cli
|
||||
package coursier
|
||||
package cli
|
||||
|
||||
import caseapp.core.app.CommandAppA
|
||||
import shapeless._
|
||||
|
||||
object Coursier extends CommandAppA(CoursierCommand.parser, CoursierCommand.help) {
|
||||
|
||||
override val appName = "Coursier"
|
||||
override val progName = "coursier"
|
||||
override val appVersion = coursier.util.Properties.version
|
||||
|
||||
def runA =
|
||||
args => {
|
||||
case Inl(bootstrapOptions) =>
|
||||
Bootstrap.run(bootstrapOptions, args)
|
||||
case Inr(Inl(fetchOptions)) =>
|
||||
Fetch.run(fetchOptions, args)
|
||||
case Inr(Inr(Inl(launchOptions))) =>
|
||||
Launch.run(launchOptions, args)
|
||||
case Inr(Inr(Inr(Inl(resolveOptions)))) =>
|
||||
Resolve.run(resolveOptions, args)
|
||||
case Inr(Inr(Inr(Inr(Inl(sparkSubmitOptions))))) =>
|
||||
SparkSubmit.run(sparkSubmitOptions, args)
|
||||
case Inr(Inr(Inr(Inr(Inr(cnil))))) =>
|
||||
cnil.impossible
|
||||
}
|
||||
|
||||
// dummy app to keep proguard quiet in 2.12
|
||||
object Coursier {
|
||||
def main(args: Array[String]): Unit = {}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -0,0 +1,26 @@
|
|||
package coursier.cli
|
||||
|
||||
import caseapp.CommandParser
|
||||
import caseapp.core.help.CommandsHelp
|
||||
|
||||
object CoursierCommand {
|
||||
|
||||
val parser =
|
||||
CommandParser.nil
|
||||
.add(Bootstrap)
|
||||
.add(Fetch)
|
||||
.add(Launch)
|
||||
.add(Resolve)
|
||||
.add(SparkSubmit)
|
||||
.reverse
|
||||
|
||||
val help =
|
||||
CommandsHelp.nil
|
||||
.add(Bootstrap)
|
||||
.add(Fetch)
|
||||
.add(Launch)
|
||||
.add(Resolve)
|
||||
.add(SparkSubmit)
|
||||
.reverse
|
||||
|
||||
}
|
||||
|
|
@ -0,0 +1,53 @@
|
|||
package coursier
|
||||
package cli
|
||||
|
||||
import java.io.File
|
||||
|
||||
import caseapp._
|
||||
import coursier.cli.options.FetchOptions
|
||||
|
||||
import scala.language.reflectiveCalls
|
||||
|
||||
final class Fetch(options: FetchOptions, args: RemainingArgs) {
|
||||
|
||||
val helper = new Helper(options.common, args.all, ignoreErrors = options.artifactOptions.force)
|
||||
|
||||
val files0 = helper.fetch(
|
||||
sources = options.sources,
|
||||
javadoc = options.javadoc,
|
||||
artifactTypes = options.artifactOptions.artifactTypes(
|
||||
options.sources || options.common.classifier0("sources"),
|
||||
options.javadoc || options.common.classifier0("javadoc")
|
||||
)
|
||||
)
|
||||
|
||||
}
|
||||
|
||||
object Fetch extends CaseApp[FetchOptions] {
|
||||
|
||||
def apply(options: FetchOptions, args: RemainingArgs): Fetch =
|
||||
new Fetch(options, args)
|
||||
|
||||
def run(options: FetchOptions, args: RemainingArgs): Unit = {
|
||||
|
||||
val fetch = Fetch(options, args)
|
||||
|
||||
// Some progress lines seem to be scraped without this.
|
||||
Console.out.flush()
|
||||
|
||||
val out =
|
||||
if (options.classpath)
|
||||
fetch
|
||||
.files0
|
||||
.map(_.toString)
|
||||
.mkString(File.pathSeparator)
|
||||
else
|
||||
fetch
|
||||
.files0
|
||||
.map(_.toString)
|
||||
.mkString("\n")
|
||||
|
||||
println(out)
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -6,6 +6,7 @@ import java.net.{URL, URLClassLoader}
|
|||
import java.util.concurrent.Executors
|
||||
import java.util.jar.{Manifest => JManifest}
|
||||
|
||||
import coursier.cli.options.{CommonOptions, IsolatedLoaderOptions}
|
||||
import coursier.cli.scaladex.Scaladex
|
||||
import coursier.cli.util.{JsonElem, JsonPrintRequirement, JsonReport}
|
||||
import coursier.extra.Typelevel
|
||||
|
|
@ -53,28 +54,6 @@ object Helper {
|
|||
}
|
||||
}
|
||||
|
||||
object Util {
|
||||
|
||||
def prematureExit(msg: String): Nothing = {
|
||||
Console.err.println(msg)
|
||||
sys.exit(255)
|
||||
}
|
||||
|
||||
def prematureExitIf(cond: Boolean)(msg: => String): Unit =
|
||||
if (cond)
|
||||
prematureExit(msg)
|
||||
|
||||
def exit(msg: String): Nothing = {
|
||||
Console.err.println(msg)
|
||||
sys.exit(1)
|
||||
}
|
||||
|
||||
def exitIf(cond: Boolean)(msg: => String): Unit =
|
||||
if (cond)
|
||||
exit(msg)
|
||||
|
||||
}
|
||||
|
||||
class Helper(
|
||||
common: CommonOptions,
|
||||
rawDependencies: Seq[String],
|
||||
|
|
@ -265,12 +244,12 @@ class Helper(
|
|||
lines.map({ str =>
|
||||
val parent_and_child = str.split("--")
|
||||
if (parent_and_child.length != 2) {
|
||||
throw SoftExcludeParsingException(s"Failed to parse $str")
|
||||
throw new SoftExcludeParsingException(s"Failed to parse $str")
|
||||
}
|
||||
|
||||
val child_org_name = parent_and_child(1).split(":")
|
||||
if (child_org_name.length != 2) {
|
||||
throw SoftExcludeParsingException(s"Failed to parse $child_org_name")
|
||||
throw new SoftExcludeParsingException(s"Failed to parse $child_org_name")
|
||||
}
|
||||
|
||||
(parent_and_child(0), (child_org_name(0), child_org_name(1)))
|
||||
|
|
@ -853,7 +832,3 @@ class Helper(
|
|||
mainClass
|
||||
}
|
||||
}
|
||||
|
||||
case class SoftExcludeParsingException(private val message: String = "",
|
||||
private val cause: Throwable = None.orNull)
|
||||
extends Exception(message, cause)
|
||||
|
|
@ -0,0 +1,20 @@
|
|||
package coursier.cli
|
||||
|
||||
import java.net.{URL, URLClassLoader}
|
||||
|
||||
|
||||
class IsolatedClassLoader(
|
||||
urls: Array[URL],
|
||||
parent: ClassLoader,
|
||||
isolationTargets: Array[String]
|
||||
) extends URLClassLoader(urls, parent) {
|
||||
|
||||
/**
|
||||
* Applications wanting to access an isolated `ClassLoader` should inspect the hierarchy of
|
||||
* loaders, and look into each of them for this method, by reflection. Then they should
|
||||
* call it (still by reflection), and look for an agreed in advance target in it. If it is found,
|
||||
* then the corresponding `ClassLoader` is the one with isolated dependencies.
|
||||
*/
|
||||
def getIsolationTargets: Array[String] = isolationTargets
|
||||
|
||||
}
|
||||
|
|
@ -0,0 +1,74 @@
|
|||
package coursier
|
||||
package cli
|
||||
|
||||
import java.io.File
|
||||
|
||||
import caseapp._
|
||||
import coursier.cli.options.LaunchOptions
|
||||
|
||||
object Launch extends CaseApp[LaunchOptions] {
|
||||
|
||||
def apply(
|
||||
loader: ClassLoader,
|
||||
mainClass: String,
|
||||
args: Seq[String],
|
||||
verbosity: Int,
|
||||
beforeMain: => Unit = ()
|
||||
): Unit = {
|
||||
|
||||
val cls =
|
||||
try loader.loadClass(mainClass)
|
||||
catch { case e: ClassNotFoundException =>
|
||||
Helper.errPrintln(s"Error: class $mainClass not found")
|
||||
sys.exit(255)
|
||||
}
|
||||
val method =
|
||||
try cls.getMethod("main", classOf[Array[String]])
|
||||
catch { case e: NoSuchMethodException =>
|
||||
Helper.errPrintln(s"Error: method main not found in $mainClass")
|
||||
sys.exit(255)
|
||||
}
|
||||
method.setAccessible(true)
|
||||
|
||||
if (verbosity >= 2)
|
||||
Helper.errPrintln(s"Launching $mainClass ${args.mkString(" ")}")
|
||||
else if (verbosity == 1)
|
||||
Helper.errPrintln(s"Launching")
|
||||
|
||||
beforeMain
|
||||
|
||||
Thread.currentThread().setContextClassLoader(loader)
|
||||
try method.invoke(null, args.toArray)
|
||||
catch {
|
||||
case e: java.lang.reflect.InvocationTargetException =>
|
||||
throw Option(e.getCause).getOrElse(e)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
def run(options: LaunchOptions, args: RemainingArgs): Unit = {
|
||||
|
||||
val userArgs = args.unparsed
|
||||
|
||||
val helper = new Helper(
|
||||
options.common,
|
||||
args.remaining ++ options.isolated.rawIsolated.map { case (_, dep) => dep },
|
||||
extraJars = options.extraJars.map(new File(_)),
|
||||
isolated = options.isolated
|
||||
)
|
||||
|
||||
val mainClass =
|
||||
if (options.mainClass.isEmpty)
|
||||
helper.retainedMainClass
|
||||
else
|
||||
options.mainClass
|
||||
|
||||
Launch(
|
||||
helper.loader,
|
||||
mainClass,
|
||||
userArgs,
|
||||
options.common.verbosityLevel
|
||||
)
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -0,0 +1,13 @@
|
|||
package coursier
|
||||
package cli
|
||||
|
||||
import caseapp._
|
||||
import coursier.cli.options.ResolveOptions
|
||||
|
||||
object Resolve extends CaseApp[ResolveOptions] {
|
||||
|
||||
def run(options: ResolveOptions, args: RemainingArgs): Unit = {
|
||||
new Helper(options.common, args.all, printResultStdout = true)
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -0,0 +1,6 @@
|
|||
package coursier.cli
|
||||
|
||||
final class SoftExcludeParsingException(
|
||||
private val message: String = "",
|
||||
private val cause: Throwable = None.orNull
|
||||
) extends Exception(message, cause)
|
||||
|
|
@ -0,0 +1,129 @@
|
|||
package coursier.cli
|
||||
|
||||
import java.io.{BufferedReader, File, InputStream, InputStreamReader, PipedInputStream, PipedOutputStream, PrintStream}
|
||||
|
||||
import coursier.internal.FileUtil
|
||||
|
||||
import scala.util.control.NonFatal
|
||||
|
||||
object SparkOutputHelper {
|
||||
|
||||
def outputInspectThread(
|
||||
name: String,
|
||||
from: InputStream,
|
||||
to: PrintStream,
|
||||
handlers: Seq[String => Unit]
|
||||
) = {
|
||||
|
||||
val t = new Thread {
|
||||
override def run() = {
|
||||
val in = new BufferedReader(new InputStreamReader(from))
|
||||
var line: String = null
|
||||
while ({
|
||||
line = in.readLine()
|
||||
line != null
|
||||
}) {
|
||||
to.println(line)
|
||||
handlers.foreach(_(line))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
t.setName(name)
|
||||
t.setDaemon(true)
|
||||
|
||||
t
|
||||
}
|
||||
|
||||
|
||||
def handleOutput(yarnAppFileOpt: Option[File], maxIdleTimeOpt: Option[Int]): Unit = {
|
||||
|
||||
var handlers = Seq.empty[String => Unit]
|
||||
var threads = Seq.empty[Thread]
|
||||
|
||||
for (yarnAppFile <- yarnAppFileOpt) {
|
||||
|
||||
val Pattern = ".*Application report for ([^ ]+) .*".r
|
||||
|
||||
@volatile var written = false
|
||||
val lock = new AnyRef
|
||||
def handleMessage(s: String): Unit =
|
||||
if (!written)
|
||||
s match {
|
||||
case Pattern(id) =>
|
||||
lock.synchronized {
|
||||
if (!written) {
|
||||
println(s"Detected YARN app ID $id")
|
||||
Option(yarnAppFile.getParentFile).foreach(_.mkdirs())
|
||||
FileUtil.write(yarnAppFile, id.getBytes("UTF-8"))
|
||||
written = true
|
||||
}
|
||||
}
|
||||
case _ =>
|
||||
}
|
||||
|
||||
val f = { line: String =>
|
||||
try handleMessage(line)
|
||||
catch {
|
||||
case NonFatal(_) =>
|
||||
}
|
||||
}
|
||||
|
||||
handlers = handlers :+ f
|
||||
}
|
||||
|
||||
for (maxIdleTime <- maxIdleTimeOpt if maxIdleTime > 0) {
|
||||
|
||||
@volatile var lastMessageTs = -1L
|
||||
|
||||
def updateLastMessageTs() = {
|
||||
lastMessageTs = System.currentTimeMillis()
|
||||
}
|
||||
|
||||
val checkThread = new Thread {
|
||||
override def run() =
|
||||
try {
|
||||
while (true) {
|
||||
lastMessageTs = -1L
|
||||
Thread.sleep(maxIdleTime * 1000L)
|
||||
if (lastMessageTs < 0) {
|
||||
Console.err.println(s"No output from spark-submit for more than $maxIdleTime s, exiting")
|
||||
sys.exit(1)
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
case t: Throwable =>
|
||||
Console.err.println(s"Caught $t in check spark-submit output thread!")
|
||||
throw t
|
||||
}
|
||||
}
|
||||
|
||||
checkThread.setName("check-spark-submit-output")
|
||||
checkThread.setDaemon(true)
|
||||
|
||||
threads = threads :+ checkThread
|
||||
|
||||
val f = { line: String =>
|
||||
updateLastMessageTs()
|
||||
}
|
||||
|
||||
handlers = handlers :+ f
|
||||
}
|
||||
|
||||
def createThread(name: String, replaces: PrintStream, install: PrintStream => Unit): Thread = {
|
||||
val in = new PipedInputStream
|
||||
val out = new PipedOutputStream(in)
|
||||
install(new PrintStream(out))
|
||||
outputInspectThread(name, in, replaces, handlers)
|
||||
}
|
||||
|
||||
if (handlers.nonEmpty) {
|
||||
threads = threads ++ Seq(
|
||||
createThread("inspect-out", System.out, System.setOut),
|
||||
createThread("inspect-err", System.err, System.setErr)
|
||||
)
|
||||
|
||||
threads.foreach(_.start())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,222 @@
|
|||
package coursier.cli
|
||||
|
||||
import java.io.File
|
||||
import java.net.URLClassLoader
|
||||
|
||||
import caseapp._
|
||||
import coursier.Dependency
|
||||
import coursier.cli.options.SparkSubmitOptions
|
||||
import coursier.cli.spark.{Assembly, Submit}
|
||||
|
||||
|
||||
/**
|
||||
* Submits spark applications.
|
||||
*
|
||||
* Can be run with no spark distributions around.
|
||||
*
|
||||
* @author Alexandre Archambault
|
||||
* @author Han Ju
|
||||
*/
|
||||
object SparkSubmit extends CaseApp[SparkSubmitOptions] {
|
||||
|
||||
def scalaSparkVersions(dependencies: Iterable[Dependency]): Either[String, (String, String)] = {
|
||||
|
||||
val sparkCoreMods = dependencies.collect {
|
||||
case dep if dep.module.organization == "org.apache.spark" &&
|
||||
(dep.module.name == "spark-core_2.10" || dep.module.name == "spark-core_2.11") =>
|
||||
(dep.module, dep.version)
|
||||
}
|
||||
|
||||
if (sparkCoreMods.isEmpty)
|
||||
Left("Cannot find spark among dependencies")
|
||||
else if (sparkCoreMods.size == 1) {
|
||||
val scalaVersion = sparkCoreMods.head._1.name match {
|
||||
case "spark-core_2.10" => "2.10"
|
||||
case "spark-core_2.11" => "2.11"
|
||||
case _ => throw new Exception("Cannot happen")
|
||||
}
|
||||
|
||||
val sparkVersion = sparkCoreMods.head._2
|
||||
|
||||
Right((scalaVersion, sparkVersion))
|
||||
} else
|
||||
Left(s"Found several spark code modules among dependencies (${sparkCoreMods.mkString(", ")})")
|
||||
|
||||
}
|
||||
|
||||
|
||||
def run(options: SparkSubmitOptions, args: RemainingArgs): Unit = {
|
||||
|
||||
val rawExtraJars = options.extraJars.map(new File(_))
|
||||
|
||||
val extraDirs = rawExtraJars.filter(_.isDirectory)
|
||||
if (extraDirs.nonEmpty) {
|
||||
Console.err.println(s"Error: directories not allowed in extra job JARs.")
|
||||
Console.err.println(extraDirs.map(" " + _).mkString("\n"))
|
||||
sys.exit(1)
|
||||
}
|
||||
|
||||
val helper: Helper = new Helper(
|
||||
options.common,
|
||||
args.remaining,
|
||||
extraJars = rawExtraJars
|
||||
)
|
||||
val jars =
|
||||
helper.fetch(
|
||||
sources = false,
|
||||
javadoc = false,
|
||||
artifactTypes = options.artifactOptions.artifactTypes(sources = false, javadoc = false)
|
||||
) ++ options.extraJars.map(new File(_))
|
||||
|
||||
val (scalaVersion, sparkVersion) =
|
||||
if (options.sparkVersion.isEmpty)
|
||||
SparkSubmit.scalaSparkVersions(helper.res.dependencies) match {
|
||||
case Left(err) =>
|
||||
Console.err.println(
|
||||
s"Cannot get spark / scala versions from dependencies: $err\n" +
|
||||
"Set them via --scala-version or --spark-version"
|
||||
)
|
||||
sys.exit(1)
|
||||
case Right(versions) => versions
|
||||
}
|
||||
else
|
||||
(options.common.scalaVersion, options.sparkVersion)
|
||||
|
||||
val (sparkYarnExtraConf, sparkBaseJars) =
|
||||
if (!options.autoAssembly || sparkVersion.startsWith("2.")) {
|
||||
|
||||
val assemblyJars = Assembly.sparkJars(
|
||||
scalaVersion,
|
||||
sparkVersion,
|
||||
options.yarnVersion,
|
||||
options.defaultAssemblyDependencies.getOrElse(options.autoAssembly),
|
||||
options.assemblyDependencies.flatMap(_.split(",")).filter(_.nonEmpty) ++
|
||||
options.sparkAssemblyDependencies.flatMap(_.split(",")).filter(_.nonEmpty).map(_ + s":$sparkVersion"),
|
||||
options.common,
|
||||
options.artifactOptions.artifactTypes(sources = false, javadoc = false)
|
||||
)
|
||||
|
||||
val extraConf =
|
||||
if (options.autoAssembly && sparkVersion.startsWith("2."))
|
||||
Seq(
|
||||
"spark.yarn.jars" -> assemblyJars.map(_.getAbsolutePath).mkString(",")
|
||||
)
|
||||
else
|
||||
Nil
|
||||
|
||||
(extraConf, assemblyJars)
|
||||
} else {
|
||||
|
||||
val assemblyAndJarsOrError = Assembly.spark(
|
||||
scalaVersion,
|
||||
sparkVersion,
|
||||
options.yarnVersion,
|
||||
options.defaultAssemblyDependencies.getOrElse(true),
|
||||
options.assemblyDependencies.flatMap(_.split(",")).filter(_.nonEmpty) ++
|
||||
options.sparkAssemblyDependencies.flatMap(_.split(",")).filter(_.nonEmpty).map(_ + s":$sparkVersion"),
|
||||
options.common,
|
||||
options.artifactOptions.artifactTypes(sources = false, javadoc = false)
|
||||
)
|
||||
|
||||
val (assembly, assemblyJars) = assemblyAndJarsOrError match {
|
||||
case Left(err) =>
|
||||
Console.err.println(s"Cannot get spark assembly: $err")
|
||||
sys.exit(1)
|
||||
case Right(res) => res
|
||||
}
|
||||
|
||||
val extraConf = Seq(
|
||||
"spark.yarn.jar" -> assembly.getAbsolutePath
|
||||
)
|
||||
|
||||
(extraConf, assemblyJars)
|
||||
}
|
||||
|
||||
|
||||
val idx = {
|
||||
val idx0 = args.unparsed.indexOf("--")
|
||||
if (idx0 < 0)
|
||||
args.unparsed.length
|
||||
else
|
||||
idx0
|
||||
}
|
||||
|
||||
assert(idx >= 0)
|
||||
|
||||
val sparkOpts = args.unparsed.take(idx)
|
||||
val jobArgs = args.unparsed.drop(idx + 1)
|
||||
|
||||
val mainClass =
|
||||
if (options.mainClass.isEmpty)
|
||||
helper.retainedMainClass
|
||||
else
|
||||
options.mainClass
|
||||
|
||||
val mainJar = helper
|
||||
.loader
|
||||
.loadClass(mainClass) // FIXME Check for errors, provide a nicer error message in that case
|
||||
.getProtectionDomain
|
||||
.getCodeSource
|
||||
.getLocation
|
||||
.getPath // TODO Safety check: protocol must be file
|
||||
|
||||
val (check, extraJars0) = jars.partition(_.getAbsolutePath == mainJar)
|
||||
|
||||
val extraJars = extraJars0.filterNot(sparkBaseJars.toSet)
|
||||
|
||||
if (check.isEmpty)
|
||||
Console.err.println(
|
||||
s"Warning: cannot find back $mainJar among the dependencies JARs (likely a coursier bug)"
|
||||
)
|
||||
|
||||
val extraSparkOpts = sparkYarnExtraConf.flatMap {
|
||||
case (k, v) => Seq(
|
||||
"--conf", s"$k=$v"
|
||||
)
|
||||
}
|
||||
|
||||
val extraJarsOptions =
|
||||
if (extraJars.isEmpty)
|
||||
Nil
|
||||
else
|
||||
Seq("--jars", extraJars.mkString(","))
|
||||
|
||||
val mainClassOptions = Seq("--class", mainClass)
|
||||
|
||||
val sparkSubmitOptions = sparkOpts ++ extraSparkOpts ++ extraJarsOptions ++ mainClassOptions ++
|
||||
Seq(mainJar) ++ jobArgs
|
||||
|
||||
val submitCp = Submit.cp(
|
||||
scalaVersion,
|
||||
sparkVersion,
|
||||
options.noDefaultSubmitDependencies,
|
||||
options.submitDependencies.flatMap(_.split(",")).filter(_.nonEmpty),
|
||||
options.artifactOptions.artifactTypes(sources = false, javadoc = false),
|
||||
options.common
|
||||
)
|
||||
|
||||
val submitLoader = new URLClassLoader(
|
||||
submitCp.map(_.toURI.toURL).toArray,
|
||||
helper.baseLoader
|
||||
)
|
||||
|
||||
Launch(
|
||||
submitLoader,
|
||||
Submit.mainClassName,
|
||||
sparkSubmitOptions,
|
||||
options.common.verbosityLevel,
|
||||
{
|
||||
if (options.common.verbosityLevel >= 1)
|
||||
Console.err.println(
|
||||
s"Launching spark-submit with arguments:\n" +
|
||||
sparkSubmitOptions.map(" " + _).mkString("\n")
|
||||
)
|
||||
|
||||
SparkOutputHelper.handleOutput(
|
||||
Some(options.yarnIdFile).filter(_.nonEmpty).map(new File(_)),
|
||||
Some(options.maxIdleTime).filter(_ > 0)
|
||||
)
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,23 @@
|
|||
package coursier.cli
|
||||
|
||||
object Util {
|
||||
|
||||
def prematureExit(msg: String): Nothing = {
|
||||
Console.err.println(msg)
|
||||
sys.exit(255)
|
||||
}
|
||||
|
||||
def prematureExitIf(cond: Boolean)(msg: => String): Unit =
|
||||
if (cond)
|
||||
prematureExit(msg)
|
||||
|
||||
def exit(msg: String): Nothing = {
|
||||
Console.err.println(msg)
|
||||
sys.exit(1)
|
||||
}
|
||||
|
||||
def exitIf(cond: Boolean)(msg: => String): Unit =
|
||||
if (cond)
|
||||
exit(msg)
|
||||
|
||||
}
|
||||
|
|
@ -0,0 +1,36 @@
|
|||
package coursier.cli.options
|
||||
|
||||
import caseapp.{ HelpMessage => Help, ValueDescription => Value, ExtraName => Short, _ }
|
||||
|
||||
object ArtifactOptions {
|
||||
def defaultArtifactTypes = Set("jar", "bundle", "test-jar")
|
||||
|
||||
implicit val parser = Parser[ArtifactOptions]
|
||||
implicit val help = caseapp.core.help.Help[ArtifactOptions]
|
||||
}
|
||||
|
||||
final case class ArtifactOptions(
|
||||
@Help("Artifact types that should be retained (e.g. jar, src, doc, etc.) - defaults to jar,bundle")
|
||||
@Value("type1,type2,...")
|
||||
@Short("A")
|
||||
artifactType: List[String] = Nil,
|
||||
@Help("Fetch artifacts even if the resolution is errored")
|
||||
force: Boolean = false
|
||||
) {
|
||||
def artifactTypes(sources: Boolean, javadoc: Boolean) = {
|
||||
val types0 = artifactType
|
||||
.flatMap(_.split(','))
|
||||
.filter(_.nonEmpty)
|
||||
.toSet
|
||||
|
||||
if (types0.isEmpty) {
|
||||
if (sources || javadoc)
|
||||
Some("src").filter(_ => sources).toSet ++ Some("doc").filter(_ => javadoc)
|
||||
else
|
||||
ArtifactOptions.defaultArtifactTypes
|
||||
} else if (types0("*"))
|
||||
Set("*")
|
||||
else
|
||||
types0
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,15 @@
|
|||
package coursier.cli.options
|
||||
|
||||
import caseapp.{Parser, Recurse}
|
||||
|
||||
final case class BootstrapOptions(
|
||||
@Recurse
|
||||
artifactOptions: ArtifactOptions,
|
||||
@Recurse
|
||||
options: BootstrapSpecificOptions
|
||||
)
|
||||
|
||||
object BootstrapOptions {
|
||||
implicit val parser = Parser[BootstrapOptions]
|
||||
implicit val help = caseapp.core.help.Help[BootstrapOptions]
|
||||
}
|
||||
|
|
@ -0,0 +1,41 @@
|
|||
package coursier.cli.options
|
||||
|
||||
import caseapp.{ HelpMessage => Help, ValueDescription => Value, ExtraName => Short, _ }
|
||||
|
||||
final case class BootstrapSpecificOptions(
|
||||
@Short("M")
|
||||
@Short("main")
|
||||
mainClass: String = "",
|
||||
@Short("o")
|
||||
output: String = "bootstrap",
|
||||
@Short("f")
|
||||
force: Boolean = false,
|
||||
@Help("Generate a standalone launcher, with all JARs included, instead of one downloading its dependencies on startup.")
|
||||
@Short("s")
|
||||
standalone: Boolean = false,
|
||||
@Help("Set Java properties in the generated launcher.")
|
||||
@Value("key=value")
|
||||
@Short("D")
|
||||
property: List[String] = Nil,
|
||||
@Help("Set Java command-line options in the generated launcher.")
|
||||
@Value("option")
|
||||
@Short("J")
|
||||
javaOpt: List[String] = Nil,
|
||||
@Help("Generate native launcher")
|
||||
@Short("S")
|
||||
native: Boolean = false,
|
||||
@Help("Native compilation target directory")
|
||||
@Short("d")
|
||||
target: String = "native-target",
|
||||
@Help("Don't wipe native compilation target directory (for debug purposes)")
|
||||
keepTarget: Boolean = false,
|
||||
@Recurse
|
||||
isolated: IsolatedLoaderOptions = IsolatedLoaderOptions(),
|
||||
@Recurse
|
||||
common: CommonOptions = CommonOptions()
|
||||
)
|
||||
|
||||
object BootstrapSpecificOptions {
|
||||
implicit val parser = Parser[BootstrapSpecificOptions]
|
||||
implicit val help = caseapp.core.help.Help[BootstrapSpecificOptions]
|
||||
}
|
||||
|
|
@ -0,0 +1,15 @@
|
|||
package coursier.cli.options
|
||||
|
||||
import caseapp.{ExtraName => Short, HelpMessage => Help, _}
|
||||
import coursier.Cache
|
||||
|
||||
final case class CacheOptions(
|
||||
@Help("Cache directory (defaults to environment variable COURSIER_CACHE or ~/.coursier/cache/v1)")
|
||||
@Short("C")
|
||||
cache: String = Cache.default.toString
|
||||
)
|
||||
|
||||
object CacheOptions {
|
||||
implicit val parser = Parser[CacheOptions]
|
||||
implicit val help = caseapp.core.help.Help[CacheOptions]
|
||||
}
|
||||
|
|
@ -0,0 +1,107 @@
|
|||
package coursier.cli.options
|
||||
|
||||
import caseapp.{ HelpMessage => Help, ValueDescription => Value, ExtraName => Short, _ }
|
||||
|
||||
import coursier.core.ResolutionProcess
|
||||
|
||||
final case class CommonOptions(
|
||||
@Help("Keep optional dependencies (Maven)")
|
||||
keepOptional: Boolean = false,
|
||||
@Help("Download mode (default: missing, that is fetch things missing from cache)")
|
||||
@Value("offline|update-changing|update|missing|force")
|
||||
@Short("m")
|
||||
mode: String = "",
|
||||
@Help("TTL duration (e.g. \"24 hours\")")
|
||||
@Value("duration")
|
||||
@Short("l")
|
||||
ttl: String = "",
|
||||
@Help("Quiet output")
|
||||
@Short("q")
|
||||
quiet: Boolean = false,
|
||||
@Help("Increase verbosity (specify several times to increase more)")
|
||||
@Short("v")
|
||||
verbose: Int @@ Counter = Tag.of(0),
|
||||
@Help("Force display of progress bars")
|
||||
@Short("P")
|
||||
progress: Boolean = false,
|
||||
@Help("Maximum number of resolution iterations (specify a negative value for unlimited, default: 100)")
|
||||
@Short("N")
|
||||
maxIterations: Int = ResolutionProcess.defaultMaxIterations,
|
||||
@Help("Repository - for multiple repositories, separate with comma and/or add this option multiple times (e.g. -r central,ivy2local -r sonatype-snapshots, or equivalently -r central,ivy2local,sonatype-snapshots)")
|
||||
@Value("maven|sonatype:$repo|ivy2local|bintray:$org/$repo|bintray-ivy:$org/$repo|typesafe:ivy-$repo|typesafe:$repo|sbt-plugin:$repo|ivy:$pattern")
|
||||
@Short("r")
|
||||
repository: List[String] = Nil,
|
||||
@Help("Do not add default repositories (~/.ivy2/local, and Central)")
|
||||
noDefault: Boolean = false,
|
||||
@Help("Modify names in Maven repository paths for SBT plugins")
|
||||
sbtPluginHack: Boolean = true,
|
||||
@Help("Drop module attributes starting with 'info.' - these are sometimes used by projects built with SBT")
|
||||
dropInfoAttr: Boolean = false,
|
||||
@Help("Force module version")
|
||||
@Value("organization:name:forcedVersion")
|
||||
@Short("V")
|
||||
forceVersion: List[String] = Nil,
|
||||
@Help("Exclude module")
|
||||
@Value("organization:name")
|
||||
@Short("E")
|
||||
@Help("Global level exclude")
|
||||
exclude: List[String] = Nil,
|
||||
|
||||
@Short("x")
|
||||
@Help("Path to the local exclusion file. " +
|
||||
"Syntax: <org:name>--<org:name>. `--` means minus. Example file content:\n\t" +
|
||||
"\tcom.twitter.penguin:korean-text--com.twitter:util-tunable-internal_2.11\n\t" +
|
||||
"\torg.apache.commons:commons-math--com.twitter.search:core-query-nodes\n\t" +
|
||||
"Behavior: If root module A excludes module X, but root module B requires X, module X will still be fetched.")
|
||||
localExcludeFile: String = "",
|
||||
@Help("Default scala version")
|
||||
@Short("e")
|
||||
scalaVersion: String = scala.util.Properties.versionNumberString,
|
||||
@Help("Add intransitive dependencies")
|
||||
intransitive: List[String] = Nil,
|
||||
@Help("Classifiers that should be fetched")
|
||||
@Value("classifier1,classifier2,...")
|
||||
@Short("C")
|
||||
classifier: List[String] = Nil,
|
||||
@Help("Default configuration (default(compile) by default)")
|
||||
@Value("configuration")
|
||||
@Short("c")
|
||||
defaultConfiguration: String = "default(compile)",
|
||||
@Help("Maximum number of parallel downloads (default: 6)")
|
||||
@Short("n")
|
||||
parallel: Int = 6,
|
||||
@Help("Checksums")
|
||||
@Value("checksum1,checksum2,... - end with none to allow for no checksum validation if none are available")
|
||||
checksum: List[String] = Nil,
|
||||
@Help("Print the duration of each iteration of the resolution")
|
||||
@Short("B")
|
||||
@Value("Number of warm-up resolutions - if negative, doesn't print per iteration benchmark (less overhead)")
|
||||
benchmark: Int = 0,
|
||||
@Help("Print dependencies as a tree")
|
||||
@Short("t")
|
||||
tree: Boolean = false,
|
||||
@Help("Print dependencies as an inversed tree (dependees as children)")
|
||||
@Short("T")
|
||||
reverseTree: Boolean = false,
|
||||
@Help("Enable profile")
|
||||
@Value("profile")
|
||||
@Short("F")
|
||||
profile: List[String] = Nil,
|
||||
|
||||
@Help("Specify path for json output")
|
||||
@Short("j")
|
||||
jsonOutputFile: String = "",
|
||||
|
||||
@Help("Swap the mainline Scala JARs by Typelevel ones")
|
||||
typelevel: Boolean = false,
|
||||
@Recurse
|
||||
cacheOptions: CacheOptions = CacheOptions()
|
||||
) {
|
||||
val verbosityLevel = Tag.unwrap(verbose) - (if (quiet) 1 else 0)
|
||||
lazy val classifier0 = classifier.flatMap(_.split(',')).filter(_.nonEmpty).toSet
|
||||
}
|
||||
|
||||
object CommonOptions {
|
||||
implicit val parser = Parser[CommonOptions]
|
||||
implicit val help = caseapp.core.help.Help[CommonOptions]
|
||||
}
|
||||
|
|
@ -0,0 +1,24 @@
|
|||
package coursier.cli.options
|
||||
|
||||
import caseapp.{ HelpMessage => Help, ExtraName => Short, _ }
|
||||
|
||||
final case class FetchOptions(
|
||||
@Help("Fetch source artifacts")
|
||||
@Short("S")
|
||||
sources: Boolean = false,
|
||||
@Help("Fetch javadoc artifacts")
|
||||
@Short("D")
|
||||
javadoc: Boolean = false,
|
||||
@Help("Print java -cp compatible output")
|
||||
@Short("p")
|
||||
classpath: Boolean = false,
|
||||
@Recurse
|
||||
artifactOptions: ArtifactOptions = ArtifactOptions(),
|
||||
@Recurse
|
||||
common: CommonOptions = CommonOptions()
|
||||
)
|
||||
|
||||
object FetchOptions {
|
||||
implicit val parser = Parser[FetchOptions]
|
||||
implicit val help = caseapp.core.help.Help[FetchOptions]
|
||||
}
|
||||
|
|
@ -0,0 +1,81 @@
|
|||
package coursier.cli.options
|
||||
|
||||
import caseapp.{ExtraName => Short, HelpMessage => Help, ValueDescription => Value, _}
|
||||
import coursier.{Attributes, Dependency}
|
||||
import coursier.util.Parse
|
||||
|
||||
|
||||
final case class IsolatedLoaderOptions(
|
||||
@Value("target:dependency")
|
||||
@Short("I")
|
||||
isolated: List[String] = Nil,
|
||||
@Help("Comma-separated isolation targets")
|
||||
@Short("i")
|
||||
isolateTarget: List[String] = Nil
|
||||
) {
|
||||
|
||||
def anyIsolatedDep = isolateTarget.nonEmpty || isolated.nonEmpty
|
||||
|
||||
lazy val targets = {
|
||||
val l = isolateTarget.flatMap(_.split(',')).filter(_.nonEmpty)
|
||||
val (invalid, valid) = l.partition(_.contains(":"))
|
||||
if (invalid.nonEmpty) {
|
||||
Console.err.println(s"Invalid target IDs:")
|
||||
for (t <- invalid)
|
||||
Console.err.println(s" $t")
|
||||
sys.exit(255)
|
||||
}
|
||||
if (valid.isEmpty)
|
||||
Array("default")
|
||||
else
|
||||
valid.toArray
|
||||
}
|
||||
|
||||
lazy val (validIsolated, unrecognizedIsolated) = isolated.partition(s => targets.exists(t => s.startsWith(t + ":")))
|
||||
|
||||
def check() = {
|
||||
if (unrecognizedIsolated.nonEmpty) {
|
||||
Console.err.println(s"Unrecognized isolation targets in:")
|
||||
for (i <- unrecognizedIsolated)
|
||||
Console.err.println(s" $i")
|
||||
sys.exit(255)
|
||||
}
|
||||
}
|
||||
|
||||
lazy val rawIsolated = validIsolated.map { s =>
|
||||
val Array(target, dep) = s.split(":", 2)
|
||||
target -> dep
|
||||
}
|
||||
|
||||
def isolatedModuleVersions(defaultScalaVersion: String) = rawIsolated.groupBy { case (t, _) => t }.map {
|
||||
case (t, l) =>
|
||||
val (errors, modVers) = Parse.moduleVersions(l.map { case (_, d) => d }, defaultScalaVersion)
|
||||
|
||||
if (errors.nonEmpty) {
|
||||
errors.foreach(Console.err.println)
|
||||
sys.exit(255)
|
||||
}
|
||||
|
||||
t -> modVers
|
||||
}
|
||||
|
||||
def isolatedDeps(defaultScalaVersion: String) =
|
||||
isolatedModuleVersions(defaultScalaVersion).map {
|
||||
case (t, l) =>
|
||||
t -> l.map {
|
||||
case (mod, ver) =>
|
||||
Dependency(
|
||||
mod,
|
||||
ver,
|
||||
configuration = "runtime",
|
||||
attributes = Attributes("", "")
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
object IsolatedLoaderOptions {
|
||||
implicit val parser = Parser[IsolatedLoaderOptions]
|
||||
implicit val help = caseapp.core.help.Help[IsolatedLoaderOptions]
|
||||
}
|
||||
|
|
@ -0,0 +1,21 @@
|
|||
package coursier.cli.options
|
||||
|
||||
import caseapp.{ HelpMessage => Help, ExtraName => Short, _ }
|
||||
|
||||
final case class LaunchOptions(
|
||||
@Short("M")
|
||||
@Short("main")
|
||||
mainClass: String = "",
|
||||
@Short("J")
|
||||
@Help("Extra JARs to be added to the classpath of the launched application. Directories accepted too.")
|
||||
extraJars: List[String] = Nil,
|
||||
@Recurse
|
||||
isolated: IsolatedLoaderOptions = IsolatedLoaderOptions(),
|
||||
@Recurse
|
||||
common: CommonOptions = CommonOptions()
|
||||
)
|
||||
|
||||
object LaunchOptions {
|
||||
implicit val parser = Parser[LaunchOptions]
|
||||
implicit val help = caseapp.core.help.Help[LaunchOptions]
|
||||
}
|
||||
|
|
@ -0,0 +1,8 @@
|
|||
package coursier.cli.options
|
||||
|
||||
import caseapp._
|
||||
|
||||
final case class ResolveOptions(
|
||||
@Recurse
|
||||
common: CommonOptions = CommonOptions()
|
||||
)
|
||||
|
|
@ -0,0 +1,40 @@
|
|||
package coursier.cli.options
|
||||
|
||||
import caseapp.{ HelpMessage => Help, ValueDescription => Value, ExtraName => Short, _ }
|
||||
|
||||
final case class SparkSubmitOptions(
|
||||
@Short("M")
|
||||
@Short("main")
|
||||
@Help("Main class to be launched (optional if in manifest)")
|
||||
mainClass: String = "",
|
||||
@Short("J")
|
||||
@Help("Extra JARs to be added in the classpath of the job")
|
||||
extraJars: List[String] = Nil,
|
||||
@Help("If master is yarn-cluster, write YARN app ID to a file. (The ID is deduced from the spark-submit output.)")
|
||||
@Value("file")
|
||||
yarnIdFile: String = "",
|
||||
@Help("Generate Spark Yarn assembly (Spark 1.x) or fetch Spark Yarn jars (Spark 2.x), and supply those to Spark via conf. (Default: true)")
|
||||
autoAssembly: Boolean = true,
|
||||
@Help("Include default dependencies in Spark Yarn assembly or jars (see --auto-assembly). If --auto-assembly is false, the corresponding dependencies will still be shunted from the job classpath if this option is true. (Default: same as --auto-assembly)")
|
||||
defaultAssemblyDependencies: Option[Boolean] = None,
|
||||
assemblyDependencies: List[String] = Nil,
|
||||
sparkAssemblyDependencies: List[String] = Nil,
|
||||
noDefaultSubmitDependencies: Boolean = false,
|
||||
submitDependencies: List[String] = Nil,
|
||||
@Help("Spark version - if empty, deduced from the job classpath. (Default: empty)")
|
||||
sparkVersion: String = "",
|
||||
@Help("YARN version - only used with Spark 2. (Default: 2.7.3)")
|
||||
yarnVersion: String = "2.7.3",
|
||||
@Help("Maximum idle time of spark-submit (time with no output). Exit early if no output from spark-submit for more than this duration. Set to 0 for unlimited. (Default: 0)")
|
||||
@Value("seconds")
|
||||
maxIdleTime: Int = 0,
|
||||
@Recurse
|
||||
artifactOptions: ArtifactOptions = ArtifactOptions(),
|
||||
@Recurse
|
||||
common: CommonOptions = CommonOptions()
|
||||
)
|
||||
|
||||
object SparkSubmitOptions {
|
||||
implicit val parser = Parser[SparkSubmitOptions]
|
||||
implicit val help = caseapp.core.help.Help[SparkSubmitOptions]
|
||||
}
|
||||
|
|
@ -8,7 +8,8 @@ import java.util.regex.Pattern
|
|||
import java.util.zip.{ZipEntry, ZipInputStream, ZipOutputStream}
|
||||
|
||||
import coursier.Cache
|
||||
import coursier.cli.{CommonOptions, Helper}
|
||||
import coursier.cli.Helper
|
||||
import coursier.cli.options.CommonOptions
|
||||
import coursier.cli.util.Zip
|
||||
import coursier.internal.FileUtil
|
||||
|
||||
|
|
@ -2,7 +2,8 @@ package coursier.cli.spark
|
|||
|
||||
import java.io.File
|
||||
|
||||
import coursier.cli.{ CommonOptions, Helper }
|
||||
import coursier.cli.Helper
|
||||
import coursier.cli.options.CommonOptions
|
||||
|
||||
object Submit {
|
||||
|
||||
|
|
@ -2,7 +2,7 @@ junit_tests(
|
|||
name = "test",
|
||||
dependencies = [
|
||||
"3rdparty/jvm:scalatest",
|
||||
"cli/src/main/scala-2.11:cli",
|
||||
"cli/src/main/scala-2.12:cli",
|
||||
],
|
||||
sources = globs("*.scala"),
|
||||
)
|
||||
|
|
@ -5,6 +5,8 @@ import java.util.zip.ZipInputStream
|
|||
|
||||
import argonaut.Argonaut._
|
||||
import coursier.cli.util.{DepNode, ReportNode}
|
||||
import caseapp.core.RemainingArgs
|
||||
import coursier.cli.options._
|
||||
import org.junit.runner.RunWith
|
||||
import org.scalatest.FlatSpec
|
||||
import org.scalatest.junit.JUnitRunner
|
||||
|
|
@ -38,26 +40,10 @@ class CliIntegrationTest extends FlatSpec {
|
|||
}
|
||||
}
|
||||
|
||||
trait TestOnlyExtraArgsApp extends caseapp.core.DefaultArgsApp {
|
||||
private var remainingArgs1 = Seq.empty[String]
|
||||
private var extraArgs1 = Seq.empty[String]
|
||||
|
||||
override def setRemainingArgs(remainingArgs: Seq[String], extraArgs: Seq[String]): Unit = {
|
||||
remainingArgs1 = remainingArgs
|
||||
}
|
||||
|
||||
override def remainingArgs: Seq[String] = remainingArgs1
|
||||
|
||||
def extraArgs: Seq[String] =
|
||||
extraArgs1
|
||||
}
|
||||
|
||||
"Normal fetch" should "get all files" in {
|
||||
|
||||
val fetchOpt = FetchOptions(common = CommonOptions())
|
||||
val fetch = new Fetch(fetchOpt) with TestOnlyExtraArgsApp
|
||||
fetch.setRemainingArgs(Seq("junit:junit:4.12"), Seq())
|
||||
fetch.apply()
|
||||
val fetch = Fetch(fetchOpt, RemainingArgs(Seq("junit:junit:4.12"), Seq()))
|
||||
assert(fetch.files0.map(_.getName).toSet.equals(Set("junit-4.12.jar", "hamcrest-core-1.3.jar")))
|
||||
|
||||
}
|
||||
|
|
@ -68,9 +54,7 @@ class CliIntegrationTest extends FlatSpec {
|
|||
val commonOpt = CommonOptions(localExcludeFile = file.getAbsolutePath, jsonOutputFile = jsonFile.getPath)
|
||||
val fetchOpt = FetchOptions(common = commonOpt)
|
||||
|
||||
val fetch = new Fetch(fetchOpt) with TestOnlyExtraArgsApp
|
||||
fetch.setRemainingArgs(Seq("junit:junit:4.12"), Seq())
|
||||
fetch.apply()
|
||||
val fetch = Fetch(fetchOpt, RemainingArgs(Seq("junit:junit:4.12"), Seq()))
|
||||
val filesFetched = fetch.files0.map(_.getName).toSet
|
||||
val expected = Set("junit-4.12.jar")
|
||||
assert(filesFetched.equals(expected), s"files fetched: $filesFetched not matching expected: $expected")
|
||||
|
|
@ -101,9 +85,7 @@ class CliIntegrationTest extends FlatSpec {
|
|||
val commonOpt = CommonOptions(localExcludeFile = file.getAbsolutePath, jsonOutputFile = jsonFile.getPath)
|
||||
val fetchOpt = FetchOptions(common = commonOpt)
|
||||
|
||||
val fetch = new Fetch(fetchOpt) with TestOnlyExtraArgsApp
|
||||
fetch.setRemainingArgs(Seq("org.apache.avro:avro:1.7.4"), Seq())
|
||||
fetch.apply()
|
||||
val fetch = Fetch(fetchOpt, RemainingArgs(Seq("org.apache.avro:avro:1.7.4"), Seq()))
|
||||
|
||||
val filesFetched = fetch.files0.map(_.getName).toSet
|
||||
assert(!filesFetched.contains("xz-1.0.jar"))
|
||||
|
|
@ -151,9 +133,7 @@ class CliIntegrationTest extends FlatSpec {
|
|||
val commonOpt = CommonOptions(localExcludeFile = file.getAbsolutePath, jsonOutputFile = jsonFile.getPath)
|
||||
val fetchOpt = FetchOptions(common = commonOpt)
|
||||
|
||||
val fetch = new Fetch(fetchOpt) with TestOnlyExtraArgsApp
|
||||
fetch.setRemainingArgs(Seq("org.apache.avro:avro:1.7.4", "org.apache.commons:commons-compress:1.4.1"), Seq())
|
||||
fetch.apply()
|
||||
val fetch = Fetch(fetchOpt, RemainingArgs(Seq("org.apache.avro:avro:1.7.4", "org.apache.commons:commons-compress:1.4.1"), Seq()))
|
||||
val filesFetched = fetch.files0.map(_.getName).toSet
|
||||
assert(filesFetched.contains("xz-1.0.jar"))
|
||||
|
||||
|
|
@ -185,9 +165,7 @@ class CliIntegrationTest extends FlatSpec {
|
|||
val commonOpt = CommonOptions(jsonOutputFile = jsonFile.getPath)
|
||||
val fetchOpt = FetchOptions(common = commonOpt)
|
||||
|
||||
val fetch = new Fetch(fetchOpt) with TestOnlyExtraArgsApp
|
||||
fetch.setRemainingArgs(Seq("org.apache.commons:commons-compress:1.4.1", "org.tukaani:xz:1.1"), Seq())
|
||||
fetch.apply()
|
||||
Fetch.run(fetchOpt, RemainingArgs(Seq("org.apache.commons:commons-compress:1.4.1", "org.tukaani:xz:1.1"), Seq()))
|
||||
|
||||
val node: ReportNode = getReportFromJson(jsonFile)
|
||||
assert(node.conflict_resolution.isEmpty)
|
||||
|
|
@ -208,9 +186,7 @@ class CliIntegrationTest extends FlatSpec {
|
|||
val commonOpt = CommonOptions(jsonOutputFile = jsonFile.getPath)
|
||||
val fetchOpt = FetchOptions(common = commonOpt)
|
||||
|
||||
val fetch = new Fetch(fetchOpt) with TestOnlyExtraArgsApp
|
||||
fetch.setRemainingArgs(Seq("org.apache.commons:commons-compress:1.5", "org.tukaani:xz:1.1"), Seq())
|
||||
fetch.apply()
|
||||
Fetch.run(fetchOpt, RemainingArgs(Seq("org.apache.commons:commons-compress:1.5", "org.tukaani:xz:1.1"), Seq()))
|
||||
|
||||
val node: ReportNode = getReportFromJson(jsonFile)
|
||||
assert(node.conflict_resolution == Map("org.tukaani:xz:1.1" -> "org.tukaani:xz:1.2"))
|
||||
|
|
@ -230,9 +206,10 @@ class CliIntegrationTest extends FlatSpec {
|
|||
val commonOpt = CommonOptions(jsonOutputFile = jsonFile.getPath)
|
||||
val fetchOpt = FetchOptions(common = commonOpt)
|
||||
|
||||
val fetch = new Fetch(fetchOpt) with TestOnlyExtraArgsApp
|
||||
fetch.setRemainingArgs(Seq("org.apache.commons:commons-compress:1.5,classifier=tests"), Seq())
|
||||
fetch.apply()
|
||||
Fetch.run(
|
||||
fetchOpt,
|
||||
RemainingArgs(Seq("org.apache.commons:commons-compress:1.5,classifier=tests"), Seq())
|
||||
)
|
||||
|
||||
val node: ReportNode = getReportFromJson(jsonFile)
|
||||
|
||||
|
|
@ -259,12 +236,16 @@ class CliIntegrationTest extends FlatSpec {
|
|||
val commonOpt = CommonOptions(jsonOutputFile = jsonFile.getPath)
|
||||
val fetchOpt = FetchOptions(common = commonOpt)
|
||||
|
||||
val fetch = new Fetch(fetchOpt) with TestOnlyExtraArgsApp
|
||||
fetch.setRemainingArgs(
|
||||
Seq("org.apache.commons:commons-compress:1.5,classifier=tests",
|
||||
"org.apache.commons:commons-compress:1.5"),
|
||||
Seq())
|
||||
fetch.apply()
|
||||
Fetch.run(
|
||||
fetchOpt,
|
||||
RemainingArgs(
|
||||
Seq(
|
||||
"org.apache.commons:commons-compress:1.5,classifier=tests",
|
||||
"org.apache.commons:commons-compress:1.5"
|
||||
),
|
||||
Seq()
|
||||
)
|
||||
)
|
||||
|
||||
val node: ReportNode = getReportFromJson(jsonFile)
|
||||
|
||||
|
|
@ -293,8 +274,7 @@ class CliIntegrationTest extends FlatSpec {
|
|||
val commonOpt = CommonOptions(jsonOutputFile = jsonFile.getPath, intransitive = List("org.apache.commons:commons-compress:1.5"))
|
||||
val fetchOpt = FetchOptions(common = commonOpt)
|
||||
|
||||
val fetch = new Fetch(fetchOpt) with TestOnlyExtraArgsApp
|
||||
fetch.apply()
|
||||
Fetch.run(fetchOpt, RemainingArgs(Nil, Nil))
|
||||
|
||||
val node: ReportNode = getReportFromJson(jsonFile)
|
||||
val compressNode = node.dependencies.find(_.coord == "org.apache.commons:commons-compress:1.5")
|
||||
|
|
@ -319,9 +299,7 @@ class CliIntegrationTest extends FlatSpec {
|
|||
val commonOpt = CommonOptions(jsonOutputFile = jsonFile.getPath, intransitive = List("org.apache.commons:commons-compress:1.5,classifier=tests"))
|
||||
val fetchOpt = FetchOptions(common = commonOpt)
|
||||
|
||||
val fetch = new Fetch(fetchOpt) with TestOnlyExtraArgsApp
|
||||
fetch.setRemainingArgs(Seq(), Seq())
|
||||
fetch.apply()
|
||||
Fetch.run(fetchOpt, RemainingArgs(Seq(), Seq()))
|
||||
|
||||
val node: ReportNode = getReportFromJson(jsonFile)
|
||||
|
||||
|
|
@ -347,9 +325,10 @@ class CliIntegrationTest extends FlatSpec {
|
|||
val commonOpt = CommonOptions(jsonOutputFile = jsonFile.getPath, forceVersion = List("org.apache.commons:commons-compress:1.4.1"))
|
||||
val fetchOpt = FetchOptions(common = commonOpt)
|
||||
|
||||
val fetch = new Fetch(fetchOpt) with TestOnlyExtraArgsApp
|
||||
fetch.setRemainingArgs(Seq("org.apache.commons:commons-compress:1.5,classifier=tests"), Seq())
|
||||
fetch.apply()
|
||||
Fetch(
|
||||
fetchOpt,
|
||||
RemainingArgs(Seq("org.apache.commons:commons-compress:1.5,classifier=tests"), Seq())
|
||||
)
|
||||
|
||||
val node: ReportNode = getReportFromJson(jsonFile)
|
||||
|
||||
|
|
@ -380,9 +359,7 @@ class CliIntegrationTest extends FlatSpec {
|
|||
forceVersion = List("org.apache.commons:commons-compress:1.4.1"))
|
||||
val fetchOpt = FetchOptions(common = commonOpt)
|
||||
|
||||
val fetch = new Fetch(fetchOpt) with TestOnlyExtraArgsApp
|
||||
fetch.setRemainingArgs(Seq(), Seq())
|
||||
fetch.apply()
|
||||
Fetch.run(fetchOpt, RemainingArgs(Seq(), Seq()))
|
||||
|
||||
val node: ReportNode = getReportFromJson(jsonFile)
|
||||
|
||||
|
|
@ -406,9 +383,10 @@ class CliIntegrationTest extends FlatSpec {
|
|||
)
|
||||
val fetchOpt = FetchOptions(common = commonOpt)
|
||||
|
||||
val fetch = new Fetch(fetchOpt) with TestOnlyExtraArgsApp
|
||||
fetch.setRemainingArgs(Seq("org.apache.spark:spark-core_2.10:2.2.1"), Seq())
|
||||
fetch.apply()
|
||||
Fetch(
|
||||
fetchOpt,
|
||||
RemainingArgs(Seq("org.apache.spark:spark-core_2.10:2.2.1"), Seq())
|
||||
)
|
||||
|
||||
val node = getReportFromJson(jsonFile)
|
||||
|
||||
|
|
@ -437,15 +415,18 @@ class CliIntegrationTest extends FlatSpec {
|
|||
isolateTarget = List("foo"),
|
||||
isolated = List("foo:org.scalameta:trees_2.12:1.7.0")
|
||||
)
|
||||
val bootstrapOptions = BootstrapOptions(
|
||||
val bootstrapSpecificOptions = BootstrapSpecificOptions(
|
||||
output = bootstrapFile.getPath,
|
||||
isolated = isolatedLoaderOptions,
|
||||
force = true
|
||||
force = true,
|
||||
common = common
|
||||
)
|
||||
val bootstrapOptions = BootstrapOptions(artifactOptions, bootstrapSpecificOptions)
|
||||
|
||||
val bootstrap = new Bootstrap(artifactOptions, bootstrapOptions) with TestOnlyExtraArgsApp
|
||||
bootstrap.setRemainingArgs(Seq("com.geirsson:scalafmt-cli_2.12:1.4.0"), Seq())
|
||||
bootstrap.apply()
|
||||
Bootstrap.run(
|
||||
bootstrapOptions,
|
||||
RemainingArgs(Seq("com.geirsson:scalafmt-cli_2.12:1.4.0"), Seq())
|
||||
)
|
||||
|
||||
var fis: InputStream = null
|
||||
|
||||
|
|
@ -2,6 +2,7 @@ package coursier.cli
|
|||
|
||||
import java.io.{File, FileWriter}
|
||||
|
||||
import coursier.cli.options.CommonOptions
|
||||
import org.junit.runner.RunWith
|
||||
import org.scalatest.FlatSpec
|
||||
import org.scalatest.junit.JUnitRunner
|
||||
|
|
@ -2,6 +2,7 @@ scala_library(
|
|||
name = "extra",
|
||||
dependencies = [
|
||||
"core:core",
|
||||
"3rdparty/jvm:scala-native",
|
||||
],
|
||||
sources = rglobs("*.scala"),
|
||||
)
|
||||
|
|
|
|||
|
|
@ -12,7 +12,8 @@ write_to: ["%(local_artifact_cache)s"]
|
|||
options: ['-Xmx4g', '-XX:MaxMetaspaceSize=256m']
|
||||
|
||||
[scala-platform]
|
||||
version: 2.11
|
||||
version: custom
|
||||
suffix_version: 2.12
|
||||
|
||||
[jvm-platform]
|
||||
default_platform: java8
|
||||
|
|
|
|||
|
|
@ -10,9 +10,9 @@ object Deps {
|
|||
def jsoup = "org.jsoup" % "jsoup" % "1.10.3"
|
||||
def scalaXml = "org.scala-lang.modules" %% "scala-xml" % "1.0.6"
|
||||
def scalazConcurrent = "org.scalaz" %% "scalaz-concurrent" % SharedVersions.scalaz
|
||||
def caseApp = "com.github.alexarchambault" %% "case-app" % "1.1.3"
|
||||
def caseApp = "com.github.alexarchambault" %% "case-app" % "2.0.0-M3"
|
||||
def okhttpUrlConnection = "com.squareup.okhttp" % "okhttp-urlconnection" % "2.7.5"
|
||||
def argonautShapeless = "com.github.alexarchambault" %% "argonaut-shapeless_6.2" % "1.2.0-M6"
|
||||
def argonautShapeless = "com.github.alexarchambault" %% "argonaut-shapeless_6.2" % "1.2.0-M8"
|
||||
def jackson = "com.fasterxml.jackson.module" %% "jackson-module-scala" % "2.8.4"
|
||||
def scalatest = "org.scalatest" %% "scalatest" % "3.0.0"
|
||||
def junit = "junit" % "junit" % "4.12"
|
||||
|
|
@ -42,7 +42,7 @@ object Deps {
|
|||
"org.slf4j" % "slf4j-api" % "1.7.25"
|
||||
)
|
||||
|
||||
def scalaNativeNir = "io.get-coursier.scala-native" %% "nir" % SharedVersions.scalaNative
|
||||
def scalaNativeTools = "io.get-coursier.scala-native" %% "tools" % SharedVersions.scalaNative
|
||||
def scalaNativeUtil = "io.get-coursier.scala-native" %% "util" % SharedVersions.scalaNative
|
||||
def scalaNativeNir = "org.scala-native" %% "nir" % SharedVersions.scalaNative
|
||||
def scalaNativeTools = "org.scala-native" %% "tools" % SharedVersions.scalaNative
|
||||
def scalaNativeUtil = "org.scala-native" %% "util" % SharedVersions.scalaNative
|
||||
}
|
||||
|
|
|
|||
|
|
@ -266,7 +266,7 @@ object Settings {
|
|||
lazy val addProguardedJar = {
|
||||
|
||||
val extra = Def.taskDyn[Map[Artifact, File]] {
|
||||
if (scalaBinaryVersion.value == "2.11")
|
||||
if (scalaBinaryVersion.value == "2.12")
|
||||
Def.task(Map(proguardedArtifact.value -> proguardedJar.value))
|
||||
else
|
||||
Def.task(Map())
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ object SharedVersions {
|
|||
def asm = "5.2"
|
||||
def fastParse = "1.0.0"
|
||||
def proguard = "5.3.3"
|
||||
def scalaNative = "0.3.0-coursier-1"
|
||||
def scalaNative = "0.3.6"
|
||||
def scalaz = "7.2.16"
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ CACHE_VERSION=v1
|
|||
SBTPACK_LAUNCHER="$(dirname "$0")/../cli/target/pack/bin/coursier"
|
||||
|
||||
if [ ! -f "$SBTPACK_LAUNCHER" ]; then
|
||||
sbt ++2.11.11 "project cli" pack
|
||||
sbt ++2.12.4 "project cli" pack
|
||||
fi
|
||||
|
||||
"$SBTPACK_LAUNCHER" bootstrap \
|
||||
|
|
|
|||
|
|
@ -47,7 +47,7 @@ launchProxyRepos() {
|
|||
|
||||
integrationTestsRequirements() {
|
||||
# Required for ~/.ivy2/local repo tests
|
||||
sbt ++2.11.11 coreJVM/publishLocal cli/publishLocal
|
||||
sbt ++2.11.12 coreJVM/publishLocal ++2.12.4 cli/publishLocal
|
||||
|
||||
# Required for HTTP authentication tests
|
||||
launchTestRepo --port 8080 --list-pages
|
||||
|
|
@ -134,32 +134,6 @@ checkBinaryCompatibility() {
|
|||
sbt ++${SCALA_VERSION} coreJVM/mimaReportBinaryIssues cache/mimaReportBinaryIssues
|
||||
}
|
||||
|
||||
testLauncherJava6() {
|
||||
sbt ++${SCALA_VERSION} "project cli" pack
|
||||
|
||||
# Via docker, getting errors like
|
||||
# standard_init_linux.go:178: exec user process caused "exec format error"
|
||||
# because of the initial empty line in the sbt-pack launchers.
|
||||
# Required until something like https://github.com/xerial/sbt-pack/pull/120
|
||||
# gets merged.
|
||||
local DIR="cli/target/pack/bin"
|
||||
mv "$DIR/coursier" "$DIR/coursier.orig"
|
||||
sed '1{/^$/d}' < "$DIR/coursier.orig" > "$DIR/coursier"
|
||||
chmod +x "$DIR/coursier"
|
||||
|
||||
docker run -it --rm \
|
||||
-v $(pwd)/cli/target/pack:/opt/coursier \
|
||||
-e CI=true \
|
||||
openjdk:6-jre \
|
||||
/opt/coursier/bin/coursier fetch org.scalacheck::scalacheck:1.13.4
|
||||
|
||||
docker run -it --rm \
|
||||
-v $(pwd)/cli/target/pack:/opt/coursier \
|
||||
-e CI=true \
|
||||
openjdk:6-jre \
|
||||
/opt/coursier/bin/coursier launch --help
|
||||
}
|
||||
|
||||
testSbtCoursierJava6() {
|
||||
sbt ++${SCALA_VERSION} coreJVM/publishLocal cache/publishLocal extra/publishLocal sbt-coursier/publishLocal
|
||||
|
||||
|
|
@ -211,7 +185,7 @@ publish() {
|
|||
}
|
||||
|
||||
testBootstrap() {
|
||||
if is211; then
|
||||
if is212; then
|
||||
sbt ++${SCALA_VERSION} "project cli" pack
|
||||
cli/target/pack/bin/coursier bootstrap -o cs-echo io.get-coursier:echo:1.0.0
|
||||
if [ "$(./cs-echo foo)" != foo ]; then
|
||||
|
|
@ -222,10 +196,10 @@ testBootstrap() {
|
|||
}
|
||||
|
||||
testNativeBootstrap() {
|
||||
if is211; then
|
||||
if is212 && [ "$NATIVE" = "1" ]; then
|
||||
sbt ++${SCALA_VERSION} "project cli" pack
|
||||
cli/target/pack/bin/coursier bootstrap -S -o native-test io.get-coursier.scala-native::sandbox_native0.3:0.3.0-coursier-1
|
||||
if [ "$(./native-test)" != "Hello, World!" ]; then
|
||||
cli/target/pack/bin/coursier bootstrap -S -o native-echo io.get-coursier:echo_native0.3_2.11:1.0.1
|
||||
if [ "$(./native-echo -n foo a)" != "foo a" ]; then
|
||||
echo "Error: unexpected output from native test bootstrap." 1>&2
|
||||
exit 1
|
||||
fi
|
||||
|
|
@ -275,10 +249,6 @@ else
|
|||
|
||||
validateReadme
|
||||
checkBinaryCompatibility
|
||||
|
||||
if is211; then
|
||||
testLauncherJava6
|
||||
fi
|
||||
fi
|
||||
|
||||
# Not using a jdk6 matrix entry with Travis as some sources of coursier require Java 7 to compile
|
||||
|
|
|
|||
|
|
@ -25,7 +25,7 @@ object IvyLocalTests extends TestSuite {
|
|||
'uniqueArtifacts - async {
|
||||
|
||||
val res = await(CentralTests.resolve(
|
||||
Set(Dependency(Module("io.get-coursier", "coursier-cli_2.11"), version, transitive = false)),
|
||||
Set(Dependency(Module("io.get-coursier", "coursier-cli_2.12"), version, transitive = false)),
|
||||
extraRepos = extraRepos
|
||||
))
|
||||
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
Subproject commit 3ff24a70a65a09243110905735cc67c87588ce4d
|
||||
Subproject commit 03da8397c0239f19521779d8ee2bdc253d19bb57
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
io.get-coursier:coursier_2.11:1.0.2-SNAPSHOT:compile
|
||||
org.scala-lang:scala-library:2.11.11:default
|
||||
org.scala-lang:scala-library:2.11.12:default
|
||||
org.scala-lang.modules:scala-xml_2.11:1.0.6:default
|
||||
org.scalaz:scalaz-core_2.11:7.2.16:default
|
||||
|
|
|
|||
|
|
@ -45,9 +45,12 @@ abstract class CentralTests extends TestSuite {
|
|||
.run(fetch0)
|
||||
.map { res =>
|
||||
|
||||
assert(res.metadataErrors.isEmpty)
|
||||
assert(res.conflicts.isEmpty)
|
||||
assert(res.isDone)
|
||||
val metadataErrors = res.metadataErrors
|
||||
val conflicts = res.conflicts
|
||||
val isDone = res.isDone
|
||||
assert(metadataErrors.isEmpty)
|
||||
assert(conflicts.isEmpty)
|
||||
assert(isDone)
|
||||
|
||||
res
|
||||
}
|
||||
|
|
@ -180,9 +183,12 @@ abstract class CentralTests extends TestSuite {
|
|||
): Future[T] = async {
|
||||
val res = await(resolve(deps, extraRepos = extraRepos))
|
||||
|
||||
assert(res.metadataErrors.isEmpty)
|
||||
assert(res.conflicts.isEmpty)
|
||||
assert(res.isDone)
|
||||
val metadataErrors = res.metadataErrors
|
||||
val conflicts = res.conflicts
|
||||
val isDone = res.isDone
|
||||
assert(metadataErrors.isEmpty)
|
||||
assert(conflicts.isEmpty)
|
||||
assert(isDone)
|
||||
|
||||
val artifacts = classifierOpt
|
||||
.fold(res.dependencyArtifacts(withOptional = optional))(c => res.dependencyClassifiersArtifacts(Seq(c)))
|
||||
|
|
@ -575,9 +581,12 @@ abstract class CentralTests extends TestSuite {
|
|||
|
||||
val res = await(resolve(deps))
|
||||
|
||||
assert(res.metadataErrors.isEmpty)
|
||||
assert(res.conflicts.isEmpty)
|
||||
assert(res.isDone)
|
||||
val metadataErrors = res.metadataErrors
|
||||
val conflicts = res.conflicts
|
||||
val isDone = res.isDone
|
||||
assert(metadataErrors.isEmpty)
|
||||
assert(conflicts.isEmpty)
|
||||
assert(isDone)
|
||||
|
||||
val artifacts = res.artifacts
|
||||
|
||||
|
|
@ -610,9 +619,12 @@ abstract class CentralTests extends TestSuite {
|
|||
|
||||
val res = await(resolve(deps))
|
||||
|
||||
assert(res.metadataErrors.isEmpty)
|
||||
assert(res.conflicts.isEmpty)
|
||||
assert(res.isDone)
|
||||
val metadataErrors = res.metadataErrors
|
||||
val conflicts = res.conflicts
|
||||
val isDone = res.isDone
|
||||
assert(metadataErrors.isEmpty)
|
||||
assert(conflicts.isEmpty)
|
||||
assert(isDone)
|
||||
|
||||
val dependencyArtifacts = res.dependencyArtifacts(withOptional = true)
|
||||
|
||||
|
|
|
|||
Loading…
Reference in New Issue