diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs new file mode 100644 index 000000000..98cf84fdb --- /dev/null +++ b/.git-blame-ignore-revs @@ -0,0 +1,3 @@ +# git blame should ignore the following commits: +# - scalafmt lm-core, lm-ivy and lm-coursier +30248a451385540a604505911985b44b7a9f0d73 diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index f765e58b0..903779edf 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -48,6 +48,14 @@ jobs: java: 8 distribution: zulu jobtype: 9 + - os: ubuntu-latest + java: 8 + distribution: temurin + jobtype: 10 + - os: windows-latest + java: 8 + distribution: temurin + jobtype: 10 runs-on: ${{ matrix.os }} env: JAVA_OPTS: -Xms800M -Xmx2G -Xss6M -XX:ReservedCodeCacheSize=128M -server -Dsbt.io.virtual=false -Dfile.encoding=UTF-8 @@ -59,20 +67,22 @@ jobs: JDK11: adopt@1.11.0-9 SPARK_LOCAL_IP: "127.0.0.1" steps: + - name: Don't convert LF to CRLF during checkout + if: runner.os == 'Windows' + run: | + git config --global core.autocrlf false + git config --global core.eol lf - name: Checkout sbt/sbt uses: actions/checkout@v4 + with: + fetch-depth: 0 + submodules: true - name: Checkout sbt/io uses: actions/checkout@v4 with: repository: sbt/io ref: develop path: io - - name: Checkout sbt/librarymanagement - uses: actions/checkout@v4 - with: - repository: sbt/librarymanagement - ref: develop - path: librarymanagement - name: Checkout sbt/zinc uses: actions/checkout@v4 with: @@ -172,7 +182,7 @@ jobs: # cd io # sbt -v -Dsbt.build.version=${BUILD_VERSION} +publishLocal # cd ../ - # sbt -Dsbtlm.path=$HOME/work/sbt/sbt/librarymanagement -Dsbtzinc.path=$HOME/work/sbt/sbt/zinc -Dsbt.build.version=$BUILD_VERSION -Dsbt.build.fatal=false "+lowerUtils/publishLocal; {librarymanagement}/publishLocal; {zinc}/publishLocal; upperModules/publishLocal" + # sbt -Dsbtzinc.path=$HOME/work/sbt/sbt/zinc -Dsbt.build.version=$BUILD_VERSION -Dsbt.build.fatal=false "+lowerUtils/publishLocal; {zinc}/publishLocal; upperModules/publishLocal" # rm -r $(find $HOME/.sbt/boot -name "*-SNAPSHOT") || true # sbt -v -Dsbt.version=$BUILD_VERSION "++$SCALA_213; all $UTIL_TESTS; ++$SCALA_212; all $UTIL_TESTS; scripted actions/* source-dependencies/*1of3 dependency-management/*1of4 java/*" - name: Build and test (7) @@ -216,6 +226,10 @@ jobs: cd citest ./test.bat test3/test3.bat + - name: Build and test lm-coursier (10) + if: ${{ matrix.jobtype == 10 }} + shell: bash + run: scripts/lm-coursier-ci.sh - name: Cleanup shell: bash run: | diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index 767d58986..1c9a644c3 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -28,12 +28,6 @@ jobs: repository: sbt/io ref: develop path: io - - name: Checkout sbt/librarymanagement - uses: actions/checkout@v4 - with: - repository: sbt/librarymanagement - ref: develop - path: librarymanagement - name: Checkout sbt/zinc uses: actions/checkout@v4 with: @@ -55,7 +49,7 @@ jobs: cd io sbt -v +publish cd ../ - sbt -Dsbtlm.path=$HOME/work/sbt/sbt/librarymanagement -Dsbtzinc.path=$HOME/work/sbt/sbt/zinc -Dsbt.build.fatal=false "+lowerUtils/publish; {librarymanagement}/publish; {zinc}/publish; upperModules/publish; bundledLauncherProj/publish" + sbt -Dsbtzinc.path=$HOME/work/sbt/sbt/zinc -Dsbt.build.fatal=false "+lowerUtils/publish; {zinc}/publish; upperModules/publish; bundledLauncherProj/publish" rm -rf "$HOME/.ivy2/local" || true rm -r $(find $HOME/.sbt/boot -name "*-SNAPSHOT") || true find $HOME/Library/Caches/Coursier/v1 -name "ivydata-*.properties" -delete || true diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 000000000..5738b50ae --- /dev/null +++ b/.gitmodules @@ -0,0 +1,3 @@ +[submodule "lm-coursier/metadata"] + path = lm-coursier/metadata + url = https://github.com/coursier/handmade-metadata.git diff --git a/.scala-steward.conf b/.scala-steward.conf new file mode 100644 index 000000000..d9a4607f5 --- /dev/null +++ b/.scala-steward.conf @@ -0,0 +1,3 @@ +updates.pin = [ + { groupId = "org.slf4j", artifactId="slf4j-api", version = "1." } +] diff --git a/DEVELOPING.md b/DEVELOPING.md index a0d41f26a..7c4e315dc 100644 --- a/DEVELOPING.md +++ b/DEVELOPING.md @@ -45,7 +45,7 @@ When working on a change that requires changing one or more sub modules, the sou ``` $ mkdir sbt-modules $ cd sbt-modules - $ for i in sbt io librarymanagement zinc; do \ + $ for i in sbt io zinc; do \ git clone https://github.com/sbt/$i.git && (cd $i; git checkout -b develop origin/develop) done $ cd sbt diff --git a/README.md b/README.md index b169ba27d..4c754d767 100644 --- a/README.md +++ b/README.md @@ -11,7 +11,6 @@ [asking]: https://stackoverflow.com/questions/ask?tags=sbt [LICENSE]: LICENSE [sbt/io]: https://github.com/sbt/io - [sbt/librarymanagement]: https://github.com/sbt/librarymanagement [sbt/zinc]: https://github.com/sbt/zinc [sbt/sbt]: https://github.com/sbt/sbt @@ -29,7 +28,6 @@ This is the 1.x series of sbt. The source code of sbt is split across several GitHub repositories, including this one. - [sbt/io][sbt/io] hosts `sbt.io` module. -- [sbt/librarymanagement][sbt/librarymanagement] hosts `sbt.librarymanagement` module that wraps Ivy. - [sbt/zinc][sbt/zinc] hosts Zinc, an incremental compiler for Scala. - [sbt/sbt][sbt/sbt], this repository hosts modules that implements the build tool. diff --git a/build.sbt b/build.sbt index 9d39e3540..c1eda5dff 100644 --- a/build.sbt +++ b/build.sbt @@ -1,20 +1,19 @@ import Dependencies._ -import Util._ import com.typesafe.tools.mima.core.ProblemFilters._ import com.typesafe.tools.mima.core._ import local.Scripted import java.nio.file.{ Files, Path => JPath } import java.util.Locale - -import scala.util.Try +import sbt.internal.inc.Analysis +import com.eed3si9n.jarjarabrams.ModuleCoordinate // ThisBuild settings take lower precedence, // but can be shared across the multi projects. ThisBuild / version := { - val v = "2.0.0-alpha12-SNAPSHOT" + val v = "2.0.0-SNAPSHOT" nightlyVersion.getOrElse(v) } -ThisBuild / version2_13 := "2.0.0-alpha10-SNAPSHOT" +ThisBuild / Utils.version2_13 := "2.0.0-SNAPSHOT" ThisBuild / versionScheme := Some("early-semver") ThisBuild / scalafmtOnCompile := !(Global / insideCI).value ThisBuild / Test / scalafmtOnCompile := !(Global / insideCI).value @@ -49,11 +48,9 @@ ThisBuild / libraryDependencySchemes += "org.scala-lang.modules" %% "scala-xml" Global / semanticdbEnabled := !(Global / insideCI).value // Change main/src/main/scala/sbt/plugins/SemanticdbPlugin.scala too, if you change this. Global / semanticdbVersion := "4.7.8" -val excludeLint = SettingKey[Set[Def.KeyedInitialize[_]]]("excludeLintKeys") -Global / excludeLint := (Global / excludeLint).?.value.getOrElse(Set.empty) -Global / excludeLint += componentID -Global / excludeLint += scriptedBufferLog -Global / excludeLint += checkPluginCross +Global / excludeLintKeys += Utils.componentID +Global / excludeLintKeys += scriptedBufferLog +Global / excludeLintKeys += checkPluginCross ThisBuild / evictionErrorLevel := Level.Info def commonSettings: Seq[Setting[_]] = Def.settings( @@ -68,13 +65,13 @@ def commonSettings: Seq[Setting[_]] = Def.settings( ) ), scalaVersion := baseScalaVersion, - componentID := None, + Utils.componentID := None, resolvers += Resolver.typesafeIvyRepo("releases").withName("typesafe-sbt-build-ivy-releases"), resolvers ++= Resolver.sonatypeOssRepos("snapshots"), resolvers ++= Resolver.sonatypeOssRepos("snapshots"), testFrameworks += TestFramework("hedgehog.sbt.Framework"), testFrameworks += TestFramework("verify.runner.Framework"), - Global / concurrentRestrictions += Util.testExclusiveRestriction, + Global / concurrentRestrictions += Utils.testExclusiveRestriction, Test / testOptions += Tests.Argument(TestFrameworks.ScalaCheck, "-w", "1"), Test / testOptions += Tests.Argument(TestFrameworks.ScalaCheck, "-verbosity", "2"), compile / javacOptions ++= Seq("-Xlint", "-Xlint:-serial"), @@ -116,11 +113,10 @@ def utilCommonSettings: Seq[Setting[_]] = Def.settings( ) def minimalSettings: Seq[Setting[_]] = - commonSettings ++ customCommands ++ - publishPomSettings + commonSettings ++ customCommands ++ Utils.publishPomSettings def baseSettings: Seq[Setting[_]] = - minimalSettings ++ Seq(projectComponent) ++ baseScalacOptions ++ Licensed.settings + minimalSettings ++ Seq(Utils.projectComponent) ++ Utils.baseScalacOptions ++ Licensed.settings def testedBaseSettings: Seq[Setting[_]] = baseSettings ++ testDependencies @@ -202,14 +198,13 @@ lazy val sbtRoot: Project = (project in file(".")) |!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!""".stripMargin else "") }, - Util.baseScalacOptions, + Utils.baseScalacOptions, Docs.settings, scalacOptions += "-Ymacro-expand:none", // for both sxr and doc - Util.publishPomSettings, + Utils.publishPomSettings, otherRootSettings, - publish := {}, + Utils.noPublish, publishLocal := {}, - publish / skip := true, Global / commands += Command .single("sbtOn")((state, dir) => s"sbtProj/test:runMain sbt.RunFromSourceMain $dir" :: state), mimaSettings, @@ -312,7 +307,7 @@ val logicProj = (project in file("internal") / "util-logic") // format from which Java sources are generated by the datatype generator Projproject lazy val utilInterface = (project in file("internal") / "util-interface").settings( baseSettings, - javaOnlySettings, + Utils.javaOnlySettings, crossPaths := false, autoScalaLibrary := false, Compile / doc / javacOptions := Nil, @@ -346,7 +341,7 @@ lazy val utilCore = project Seq("org.scala-lang" % "scala-reflect" % scalaVersion.value) } else Seq.empty }, - Util.keywordsSettings, + Utils.keywordsSettings, utilMimaSettings ) @@ -574,7 +569,7 @@ lazy val stdTaskProj = (project in file("tasks-standard")) .settings( testedBaseSettings, name := "Task System", - testExclusive, + Utils.testExclusive, mimaSettings, mimaBinaryIssueFilters ++= Seq( // unused private[sbt] @@ -627,7 +622,8 @@ lazy val scriptedSbtProj = (project in file("scripted-sbt")) mimaSettings, scriptedSbtMimaSettings, ) - .configure(addSbtIO, addSbtCompilerInterface, addSbtLmCore) + .dependsOn(lmCore) + .configure(addSbtIO, addSbtCompilerInterface) lazy val dependencyTreeProj = (project in file("dependency-tree")) .dependsOn(sbtProj) @@ -685,12 +681,12 @@ lazy val actionsProj = (project in file("main-actions")) exclude[ReversedMissingMethodProblem]("sbt.compiler.Eval#EvalType.sourceName"), ), ) + .dependsOn(lmCore) .configure( addSbtIO, addSbtCompilerInterface, addSbtCompilerClasspath, addSbtCompilerApiInfo, - addSbtLmCore, addSbtZinc ) @@ -791,11 +787,11 @@ lazy val commandProj = (project in file("main-command")) } }, ) + .dependsOn(lmCore) .configure( addSbtIO, addSbtCompilerInterface, addSbtCompilerClasspath, - addSbtLmCore, addSbtZinc ) @@ -871,12 +867,8 @@ lazy val mainSettingsProj = (project in file("main-settings")) exclude[IncompatibleSignatureProblem]("sbt.TupleSyntax.t*ToTable*"), ), ) - .configure( - addSbtIO, - addSbtCompilerInterface, - addSbtCompilerClasspath, - addSbtLmCore - ) + .dependsOn(lmCore) + .configure(addSbtIO, addSbtCompilerInterface, addSbtCompilerClasspath) lazy val zincLmIntegrationProj = (project in file("zinc-lm-integration")) .settings( @@ -892,7 +884,8 @@ lazy val zincLmIntegrationProj = (project in file("zinc-lm-integration")) ), libraryDependencies += launcherInterface, ) - .configure(addSbtZincCompileCore, addSbtLmCore, addSbtLmIvyTest) + .dependsOn(lmCore, lmIvy) + .configure(addSbtZincCompileCore) lazy val buildFileProj = (project in file("buildfile")) .dependsOn( @@ -903,13 +896,8 @@ lazy val buildFileProj = (project in file("buildfile")) name := "build file", libraryDependencies ++= Seq(scalaCompiler), ) - .configure( - addSbtIO, - addSbtLmCore, - addSbtLmIvy, - addSbtCompilerInterface, - addSbtZincCompileCore - ) + .dependsOn(lmCore, lmIvy) + .configure(addSbtIO, addSbtCompilerInterface, addSbtZincCompileCore) // The main integration project for sbt. It brings all of the projects together, configures them, and provides for overriding conventions. lazy val mainProj = (project in file("main")) @@ -941,7 +929,6 @@ lazy val mainProj = (project in file("main")) sjsonNewCore.value, launcherInterface, caffeine, - lmCoursierShaded, ) ++ log4jModules), libraryDependencies ++= (scalaVersion.value match { case v if v.startsWith("2.12.") => List() @@ -958,13 +945,8 @@ lazy val mainProj = (project in file("main")) // mimaSettings, // mimaBinaryIssueFilters ++= Vector(), ) - .configure( - addSbtIO, - addSbtLmCore, - addSbtLmIvy, - addSbtCompilerInterface, - addSbtZincCompileCore - ) + .dependsOn(lmCore, lmIvy, lmCoursierShadedPublishing) + .configure(addSbtIO, addSbtCompilerInterface, addSbtZincCompileCore) // Strictly for bringing implicits and aliases from subsystems into the top-level sbt namespace through a single package object // technically, we need a dependency on all of mainProj's dependencies, but we don't do that since this is strictly an integration project @@ -977,7 +959,7 @@ lazy val sbtProj = (project in file("sbt-app")) normalizedName := "sbt", version := { if (scalaVersion.value == baseScalaVersion) version.value - else version2_13.value + else Utils.version2_13.value }, crossPaths := false, crossTarget := { target.value / scalaVersion.value }, @@ -1004,7 +986,7 @@ lazy val serverTestProj = (project in file("server-test")) .dependsOn(sbtProj % "compile->test", scriptedSbtProj % "compile->test") .settings( testedBaseSettings, - publish / skip := true, + Utils.noPublish, // make server tests serial Test / watchTriggers += baseDirectory.value.toGlob / "src" / "server-test" / **, Test / parallelExecution := false, @@ -1050,7 +1032,7 @@ lazy val sbtClientProj = (project in file("client")) .dependsOn(commandProj) .settings( commonSettings, - publish / skip := true, + Utils.noPublish, name := "sbt-client", mimaPreviousArtifacts := Set.empty, crossPaths := false, @@ -1135,7 +1117,7 @@ lazy val sbtBig = (project in file(".big")) lazy val lowerUtils = (project in (file("internal") / "lower")) .aggregate(lowerUtilProjects.map(p => LocalProject(p.id)): _*) .settings( - publish / skip := true + Utils.noPublish ) lazy val upperModules = (project in (file("internal") / "upper")) @@ -1144,7 +1126,7 @@ lazy val upperModules = (project in (file("internal") / "upper")) diff Seq(bundledLauncherProj)).map(p => LocalProject(p.id)): _* ) .settings( - publish / skip := true + Utils.noPublish ) lazy val sbtIgnoredProblems = { @@ -1233,6 +1215,11 @@ def allProjects = collectionProj, coreMacrosProj, remoteCacheProj, + lmCore, + lmIvy, + lmCoursierDefinitions, + lmCoursier, + lmCoursierShaded, ) ++ lowerUtilProjects // These need to be cross published to 2.12 and 2.13 for Zinc @@ -1355,11 +1342,257 @@ def customCommands: Seq[Setting[_]] = Seq( }, ) -ThisBuild / pomIncludeRepository := { _ => - false -} +ThisBuild / pomIncludeRepository := (_ => false) // drop repos other than Maven Central from POM ThisBuild / publishTo := { val nexus = "https://oss.sonatype.org/" Some("releases" at nexus + "service/local/staging/deploy/maven2") } ThisBuild / publishMavenStyle := true + +def lmTestSettings: Seq[Setting[_]] = Def.settings( + Test / classLoaderLayeringStrategy := ClassLoaderLayeringStrategy.Flat, + Test / parallelExecution := false +) + +lazy val lmCore = (project in file("lm-core")) + // .enablePlugins(ContrabandPlugin, JsonCodecPlugin) + .settings( + commonSettings, + lmTestSettings, + name := "librarymanagement-core", + contrabandSjsonNewVersion := sjsonNewVersion, + libraryDependencies ++= Seq( + jsch, + // scalaReflect.value, + // scalaCompiler.value, + launcherInterface, + gigahorseApacheHttp, + scalaXml.value, + sjsonNewScalaJson.value % Optional, + sjsonNewCore.value % Optional, + scalaTest % Test, + scalaCheck % Test, + scalaVerify % Test, + ), + Compile / resourceGenerators += Def + .task( + Utils.generateVersionFile( + version.value, + resourceManaged.value, + streams.value, + (Compile / compile).value.asInstanceOf[Analysis] + ) + ) + .taskValue, + Compile / scalacOptions ++= (scalaVersion.value match { + case v if v.startsWith("2.12.") => List("-Ywarn-unused:-locals,-explicits,-privates") + case _ => List() + }), + Compile / unmanagedSourceDirectories += + baseDirectory.value / "src" / "main" / "contraband-scala", + Compile / generateContrabands / sourceManaged := baseDirectory.value / "src" / "main" / "contraband-scala", + Compile / generateContrabands / contrabandFormatsForType := DatatypeConfig.getFormats, + // WORKAROUND sbt/sbt#2205 include managed sources in packageSrc + Compile / packageSrc / mappings ++= { + val srcs = (Compile / managedSources).value + val sdirs = (Compile / managedSourceDirectories).value + val base = baseDirectory.value + import Path._ + (((srcs --- sdirs --- base) pair (relativeTo(sdirs) | relativeTo(base) | flat)) toSeq) + }, + ) + .dependsOn(utilLogging, utilPosition, utilCache) + .configure(addSbtIO, addSbtCompilerInterface) + +lazy val lmIvy = (project in file("lm-ivy")) + // .enablePlugins(ContrabandPlugin, JsonCodecPlugin) + .dependsOn(lmCore) + .settings( + commonSettings, + lmTestSettings, + name := "librarymanagement-ivy", + contrabandSjsonNewVersion := sjsonNewVersion, + libraryDependencies ++= Seq( + ivy, + sjsonNewScalaJson.value, + sjsonNewCore.value, + scalaTest % Test, + scalaCheck % Test, + scalaVerify % Test, + ), + Compile / unmanagedSourceDirectories += + baseDirectory.value / "src" / "main" / "contraband-scala", + Compile / generateContrabands / sourceManaged := baseDirectory.value / "src" / "main" / "contraband-scala", + Compile / generateContrabands / contrabandFormatsForType := DatatypeConfig.getFormats, + Test / classLoaderLayeringStrategy := ClassLoaderLayeringStrategy.Flat + ) + +lazy val lmCoursierSettings: Seq[Setting[_]] = Def.settings( + baseSettings, + headerLicense := Some( + HeaderLicense.Custom( + """|sbt + |Copyright 2024, Scala Center + |Copyright 2015 - 2023, Alexandre Archambault + |Licensed under Apache License 2.0 (see LICENSE) + |""".stripMargin + ) + ), + developers += + Developer( + "alexarchambault", + "Alexandre Archambault", + "", + url("https://github.com/alexarchambault") + ), +) + +lazy val lmCoursierDependencies = Def.settings( + libraryDependencies ++= Seq( + coursier, + coursierSbtMavenRepo, + "io.get-coursier.jniutils" % "windows-jni-utils-lmcoursier" % jniUtilsVersion, + "net.hamnaberg" %% "dataclass-annotation" % dataclassScalafixVersion % Provided, + "org.scalatest" %% "scalatest" % "3.2.19" % Test, + ), + excludeDependencies ++= Seq( + ExclusionRule("org.scala-lang.modules", "scala-xml_2.13"), + ExclusionRule("org.scala-lang.modules", "scala-collection-compat_2.13") + ), +) + +def dataclassGen(data: Reference) = Def.taskDyn { + val root = (ThisBuild / baseDirectory).value.toURI.toString + val from = (data / Compile / sourceDirectory).value + val to = (Compile / sourceManaged).value + val outFrom = from.toURI.toString.stripSuffix("/").stripPrefix(root) + val outTo = to.toURI.toString.stripSuffix("/").stripPrefix(root) + val _ = (data / Compile / compile).value + Def.task { + val _ = (data / Compile / scalafix) + .toTask(s" --rules GenerateDataClass --out-from=$outFrom --out-to=$outTo") + .value + (to ** "*.scala").get + } +} + +lazy val lmCoursierDefinitions = project + .in(file("lm-coursier/definitions")) + .disablePlugins(MimaPlugin) + .settings( + lmCoursierSettings, + semanticdbEnabled := true, + semanticdbVersion := scalafixSemanticdb.revision, + scalafixDependencies += "net.hamnaberg" %% "dataclass-scalafix" % dataclassScalafixVersion, + libraryDependencies ++= Seq( + coursier, + "net.hamnaberg" %% "dataclass-annotation" % dataclassScalafixVersion % Provided, + ), + conflictWarning := ConflictWarning.disable, + Utils.noPublish, + ) + .dependsOn(lmIvy % "provided") + +lazy val lmCoursier = project + .in(file("lm-coursier")) + .settings( + lmCoursierSettings, + Mima.settings, + Mima.lmCoursierFilters, + lmCoursierDependencies, + Compile / sourceGenerators += dataclassGen(lmCoursierDefinitions).taskValue, + ) + .dependsOn( + // We depend on lmIvy rather than just lmCore to handle the ModuleDescriptor + // passed to DependencyResolutionInterface.update, which is an IvySbt#Module + // (seems DependencyResolutionInterface.moduleDescriptor is ignored). + lmIvy + ) + +lazy val lmCoursierShaded = project + .in(file("lm-coursier/target/shaded-module")) + .settings( + lmCoursierSettings, + Mima.settings, + Mima.lmCoursierFilters, + Mima.lmCoursierShadedFilters, + Compile / sources := (lmCoursier / Compile / sources).value, + lmCoursierDependencies, + conflictWarning := ConflictWarning.disable, + Utils.noPublish, + // shadedModules ++= Set( + // "io.get-coursier" %% "coursier", + // "io.get-coursier" %% "coursier-sbt-maven-repository", + // "io.get-coursier.jniutils" % "windows-jni-utils-lmcoursier" + // ), + // validNamespaces += "lmcoursier", + // validEntries ++= Set( + // // FIXME Ideally, we should just strip those from the resulting JAR… + // "README", // from google-collections via plexus-archiver (see below) + // // from plexus-util via plexus-archiver (see below) + // "licenses/extreme.indiana.edu.license.TXT", + // "licenses/javolution.license.TXT", + // "licenses/thoughtworks.TXT", + // "licenses/", + // ), + assemblyShadeRules := { + val namespacesToShade = Seq( + "coursier", + "org.fusesource", + "macrocompat", + "io.github.alexarchambault.windowsansi", + "concurrentrefhashmap", + "com.github.ghik", + // pulled by the plexus-archiver stuff that coursier-cache + // depends on for now… can hopefully be removed in the future + "com.google.common", + "com.jcraft", + "com.lmax", + "org.apache.commons", + "org.apache.xbean", + "org.codehaus", + "org.iq80", + "org.tukaani", + "com.github.plokhotnyuk.jsoniter_scala", + "scala.cli", + "com.github.luben.zstd", + "javax.inject" // hope shading this is fine… It's probably pulled via plexus-archiver, that sbt shouldn't use anyway… + ) + namespacesToShade.map { ns => + ShadeRule.rename(ns + ".**" -> s"lmcoursier.internal.shaded.$ns.@1").inAll + } + }, + assemblyMergeStrategy := { + case PathList("lmcoursier", "internal", "shaded", "org", "fusesource", _*) => + MergeStrategy.first + // case PathList("lmcoursier", "internal", "shaded", "package.class") => MergeStrategy.first + // case PathList("lmcoursier", "internal", "shaded", "package$.class") => MergeStrategy.first + case PathList("com", "github") => MergeStrategy.discard + case PathList("com", "jcraft") => MergeStrategy.discard + case PathList("com", "lmax") => MergeStrategy.discard + case PathList("com", "sun") => MergeStrategy.discard + case PathList("com", "swoval") => MergeStrategy.discard + case PathList("com", "typesafe") => MergeStrategy.discard + case PathList("gigahorse") => MergeStrategy.discard + case PathList("jline") => MergeStrategy.discard + case PathList("scala") => MergeStrategy.discard + case PathList("sjsonnew") => MergeStrategy.discard + case PathList("xsbti") => MergeStrategy.discard + case PathList("META-INF", "native", _*) => MergeStrategy.first + case "META-INF/services/lmcoursier.internal.shaded.coursier.jniutils.NativeApi" => + MergeStrategy.first + case x => + val oldStrategy = (ThisBuild / assemblyMergeStrategy).value + oldStrategy(x) + } + ) + .dependsOn(lmIvy % "provided") + +lazy val lmCoursierShadedPublishing = project + .in(file("lm-coursier/target/shaded-publishing-module")) + .settings( + scalaVersion := scala3, + name := "librarymanagement-coursier", + Compile / packageBin := (lmCoursierShaded / assembly).value, + Compile / exportedProducts := Seq(Attributed.blank((Compile / packageBin).value)) + ) diff --git a/buildfile/src/main/scala/sbt/internal/Eval.scala b/buildfile/src/main/scala/sbt/internal/Eval.scala index aa2fb957b..06ae18ae8 100644 --- a/buildfile/src/main/scala/sbt/internal/Eval.scala +++ b/buildfile/src/main/scala/sbt/internal/Eval.scala @@ -43,7 +43,7 @@ class Eval( case None => VirtualDirectory("output") private val classpathString = (backingDir.toList ++ classpath) .map(_.toString) - .mkString(":") + .mkString(java.io.File.pathSeparator) private lazy val driver: EvalDriver = new EvalDriver private lazy val reporter: EvalReporter = mkReporter match case Some(f) => f() diff --git a/lm-core/NOTICE b/lm-core/NOTICE new file mode 100644 index 000000000..dd0df7b5a --- /dev/null +++ b/lm-core/NOTICE @@ -0,0 +1,28 @@ +Simple Build Tool: Ivy Interface Component +Copyright 2008, 2009, 2010 Mark Harrah +Licensed under BSD-style license (see LICENSE) + +Portions based on Apache Ivy, +licensed under the Apache License, Version 2.0 (see licenses/LICENSE_Apache) + +It requires the following notice: + +This product includes software developed by +The Apache Software Foundation (http://www.apache.org/). + +Portions of Ivy were originally developed by +Jayasoft SARL (http://www.jayasoft.fr/) +and are licensed to the Apache Software Foundation under the +"Software Grant License Agreement" + + +THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR +IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES +OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. +IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, +INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT +NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF +THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/lm-core/src/main/contraband-scala/sbt/internal/librarymanagement/ConfigurationReportLite.scala b/lm-core/src/main/contraband-scala/sbt/internal/librarymanagement/ConfigurationReportLite.scala new file mode 100644 index 000000000..d277f7ca2 --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/internal/librarymanagement/ConfigurationReportLite.scala @@ -0,0 +1,36 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.internal.librarymanagement +final class ConfigurationReportLite private ( + val configuration: String, + val details: Vector[sbt.librarymanagement.OrganizationArtifactReport]) extends Serializable { + + + + override def equals(o: Any): Boolean = this.eq(o.asInstanceOf[AnyRef]) || (o match { + case x: ConfigurationReportLite => (this.configuration == x.configuration) && (this.details == x.details) + case _ => false + }) + override def hashCode: Int = { + 37 * (37 * (37 * (17 + "sbt.internal.librarymanagement.ConfigurationReportLite".##) + configuration.##) + details.##) + } + override def toString: String = { + "ConfigurationReportLite(" + configuration + ", " + details + ")" + } + private[this] def copy(configuration: String = configuration, details: Vector[sbt.librarymanagement.OrganizationArtifactReport] = details): ConfigurationReportLite = { + new ConfigurationReportLite(configuration, details) + } + def withConfiguration(configuration: String): ConfigurationReportLite = { + copy(configuration = configuration) + } + def withDetails(details: Vector[sbt.librarymanagement.OrganizationArtifactReport]): ConfigurationReportLite = { + copy(details = details) + } +} +object ConfigurationReportLite { + + def apply(configuration: String, details: Vector[sbt.librarymanagement.OrganizationArtifactReport]): ConfigurationReportLite = new ConfigurationReportLite(configuration, details) +} diff --git a/lm-core/src/main/contraband-scala/sbt/internal/librarymanagement/SemComparator.scala b/lm-core/src/main/contraband-scala/sbt/internal/librarymanagement/SemComparator.scala new file mode 100644 index 000000000..25765e8f8 --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/internal/librarymanagement/SemComparator.scala @@ -0,0 +1,56 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.internal.librarymanagement +final class SemComparator private ( + val op: sbt.internal.librarymanagement.SemSelOperator, + val major: Option[Long], + val minor: Option[Long], + val patch: Option[Long], + val tags: Seq[String]) extends sbt.internal.librarymanagement.SemComparatorExtra with Serializable { + def matches(version: sbt.librarymanagement.VersionNumber): Boolean = this.matchesImpl(version) + def expandWildcard: Seq[SemComparator] = { + if (op == sbt.internal.librarymanagement.SemSelOperator.Eq && !allFieldsSpecified) { + Seq( + this.withOp(sbt.internal.librarymanagement.SemSelOperator.Gte), + this.withOp(sbt.internal.librarymanagement.SemSelOperator.Lte) + ) + } else { Seq(this) } + } + + + override def equals(o: Any): Boolean = this.eq(o.asInstanceOf[AnyRef]) || (o match { + case x: SemComparator => (this.op == x.op) && (this.major == x.major) && (this.minor == x.minor) && (this.patch == x.patch) && (this.tags == x.tags) + case _ => false + }) + override def hashCode: Int = { + 37 * (37 * (37 * (37 * (37 * (37 * (17 + "sbt.internal.librarymanagement.SemComparator".##) + op.##) + major.##) + minor.##) + patch.##) + tags.##) + } + override def toString: String = { + this.toStringImpl + } + private[this] def copy(op: sbt.internal.librarymanagement.SemSelOperator = op, major: Option[Long] = major, minor: Option[Long] = minor, patch: Option[Long] = patch, tags: Seq[String] = tags): SemComparator = { + new SemComparator(op, major, minor, patch, tags) + } + def withOp(op: sbt.internal.librarymanagement.SemSelOperator): SemComparator = { + copy(op = op) + } + def withMajor(major: Option[Long]): SemComparator = { + copy(major = major) + } + def withMinor(minor: Option[Long]): SemComparator = { + copy(minor = minor) + } + def withPatch(patch: Option[Long]): SemComparator = { + copy(patch = patch) + } + def withTags(tags: Seq[String]): SemComparator = { + copy(tags = tags) + } +} +object SemComparator extends sbt.internal.librarymanagement.SemComparatorFunctions { + def apply(comparator: String): SemComparator = parse(comparator) + def apply(op: sbt.internal.librarymanagement.SemSelOperator, major: Option[Long], minor: Option[Long], patch: Option[Long], tags: Seq[String]): SemComparator = new SemComparator(op, major, minor, patch, tags) +} diff --git a/lm-core/src/main/contraband-scala/sbt/internal/librarymanagement/SemSelAndChunk.scala b/lm-core/src/main/contraband-scala/sbt/internal/librarymanagement/SemSelAndChunk.scala new file mode 100644 index 000000000..820fc3231 --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/internal/librarymanagement/SemSelAndChunk.scala @@ -0,0 +1,32 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.internal.librarymanagement +final class SemSelAndChunk private ( + val comparators: Seq[sbt.internal.librarymanagement.SemComparator]) extends Serializable { + def matches(version: sbt.librarymanagement.VersionNumber): Boolean = comparators.forall(_.matches(version)) + + + override def equals(o: Any): Boolean = this.eq(o.asInstanceOf[AnyRef]) || (o match { + case x: SemSelAndChunk => (this.comparators == x.comparators) + case _ => false + }) + override def hashCode: Int = { + 37 * (37 * (17 + "sbt.internal.librarymanagement.SemSelAndChunk".##) + comparators.##) + } + override def toString: String = { + comparators.map(_.toString).mkString(" ") + } + private[this] def copy(comparators: Seq[sbt.internal.librarymanagement.SemComparator] = comparators): SemSelAndChunk = { + new SemSelAndChunk(comparators) + } + def withComparators(comparators: Seq[sbt.internal.librarymanagement.SemComparator]): SemSelAndChunk = { + copy(comparators = comparators) + } +} +object SemSelAndChunk extends sbt.internal.librarymanagement.SemSelAndChunkFunctions { + def apply(andClauseToken: String): SemSelAndChunk = parse(andClauseToken) + def apply(comparators: Seq[sbt.internal.librarymanagement.SemComparator]): SemSelAndChunk = new SemSelAndChunk(comparators) +} diff --git a/lm-core/src/main/contraband-scala/sbt/internal/librarymanagement/UpdateReportLite.scala b/lm-core/src/main/contraband-scala/sbt/internal/librarymanagement/UpdateReportLite.scala new file mode 100644 index 000000000..f5040d97f --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/internal/librarymanagement/UpdateReportLite.scala @@ -0,0 +1,32 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.internal.librarymanagement +final class UpdateReportLite private ( + val configurations: Vector[sbt.internal.librarymanagement.ConfigurationReportLite]) extends Serializable { + + + + override def equals(o: Any): Boolean = this.eq(o.asInstanceOf[AnyRef]) || (o match { + case x: UpdateReportLite => (this.configurations == x.configurations) + case _ => false + }) + override def hashCode: Int = { + 37 * (37 * (17 + "sbt.internal.librarymanagement.UpdateReportLite".##) + configurations.##) + } + override def toString: String = { + "UpdateReportLite(" + configurations + ")" + } + private[this] def copy(configurations: Vector[sbt.internal.librarymanagement.ConfigurationReportLite] = configurations): UpdateReportLite = { + new UpdateReportLite(configurations) + } + def withConfigurations(configurations: Vector[sbt.internal.librarymanagement.ConfigurationReportLite]): UpdateReportLite = { + copy(configurations = configurations) + } +} +object UpdateReportLite { + + def apply(configurations: Vector[sbt.internal.librarymanagement.ConfigurationReportLite]): UpdateReportLite = new UpdateReportLite(configurations) +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/Artifact.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/Artifact.scala new file mode 100644 index 000000000..e7d033043 --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/Artifact.scala @@ -0,0 +1,67 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +final class Artifact private ( + val name: String, + val `type`: String, + val extension: String, + val classifier: Option[String], + val configurations: Vector[sbt.librarymanagement.ConfigRef], + val url: Option[java.net.URI], + val extraAttributes: Map[String, String], + val checksum: Option[sbt.librarymanagement.Checksum], + val allowInsecureProtocol: Boolean) extends sbt.librarymanagement.ArtifactExtra with Serializable { + private[sbt] def validateProtocol(logger: sbt.util.Logger): Boolean = Resolver.validateArtifact(this, logger) + private def this(name: String) = this(name, Artifact.DefaultType, Artifact.DefaultExtension, None, Vector.empty, None, Map.empty, None, false) + private def this(name: String, `type`: String, extension: String, classifier: Option[String], configurations: Vector[sbt.librarymanagement.ConfigRef], url: Option[java.net.URI], extraAttributes: Map[String, String], checksum: Option[sbt.librarymanagement.Checksum]) = this(name, `type`, extension, classifier, configurations, url, extraAttributes, checksum, false) + + override def equals(o: Any): Boolean = this.eq(o.asInstanceOf[AnyRef]) || (o match { + case x: Artifact => (this.name == x.name) && (this.`type` == x.`type`) && (this.extension == x.extension) && (this.classifier == x.classifier) && (this.configurations == x.configurations) && (this.url == x.url) && (this.extraAttributes == x.extraAttributes) && (this.checksum == x.checksum) && (this.allowInsecureProtocol == x.allowInsecureProtocol) + case _ => false + }) + override def hashCode: Int = { + 37 * (37 * (37 * (37 * (37 * (37 * (37 * (37 * (37 * (37 * (17 + "sbt.librarymanagement.Artifact".##) + name.##) + `type`.##) + extension.##) + classifier.##) + configurations.##) + url.##) + extraAttributes.##) + checksum.##) + allowInsecureProtocol.##) + } + override def toString: String = { + "Artifact(" + name + ", " + `type` + ", " + extension + ", " + classifier + ", " + configurations + ", " + url + ", " + extraAttributes + ", " + checksum + ", " + allowInsecureProtocol + ")" + } + private[this] def copy(name: String = name, `type`: String = `type`, extension: String = extension, classifier: Option[String] = classifier, configurations: Vector[sbt.librarymanagement.ConfigRef] = configurations, url: Option[java.net.URI] = url, extraAttributes: Map[String, String] = extraAttributes, checksum: Option[sbt.librarymanagement.Checksum] = checksum, allowInsecureProtocol: Boolean = allowInsecureProtocol): Artifact = { + new Artifact(name, `type`, extension, classifier, configurations, url, extraAttributes, checksum, allowInsecureProtocol) + } + def withName(name: String): Artifact = { + copy(name = name) + } + def withType(`type`: String): Artifact = { + copy(`type` = `type`) + } + def withExtension(extension: String): Artifact = { + copy(extension = extension) + } + def withClassifier(classifier: Option[String]): Artifact = { + copy(classifier = classifier) + } + def withConfigurations(configurations: Vector[sbt.librarymanagement.ConfigRef]): Artifact = { + copy(configurations = configurations) + } + def withUrl(url: Option[java.net.URI]): Artifact = { + copy(url = url) + } + def withExtraAttributes(extraAttributes: Map[String, String]): Artifact = { + copy(extraAttributes = extraAttributes) + } + def withChecksum(checksum: Option[sbt.librarymanagement.Checksum]): Artifact = { + copy(checksum = checksum) + } + def withAllowInsecureProtocol(allowInsecureProtocol: Boolean): Artifact = { + copy(allowInsecureProtocol = allowInsecureProtocol) + } +} +object Artifact extends sbt.librarymanagement.ArtifactFunctions { + + def apply(name: String): Artifact = new Artifact(name) + def apply(name: String, `type`: String, extension: String, classifier: Option[String], configurations: Vector[sbt.librarymanagement.ConfigRef], url: Option[java.net.URI], extraAttributes: Map[String, String], checksum: Option[sbt.librarymanagement.Checksum]): Artifact = new Artifact(name, `type`, extension, classifier, configurations, url, extraAttributes, checksum) + def apply(name: String, `type`: String, extension: String, classifier: Option[String], configurations: Vector[sbt.librarymanagement.ConfigRef], url: Option[java.net.URI], extraAttributes: Map[String, String], checksum: Option[sbt.librarymanagement.Checksum], allowInsecureProtocol: Boolean): Artifact = new Artifact(name, `type`, extension, classifier, configurations, url, extraAttributes, checksum, allowInsecureProtocol) +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/ArtifactFormats.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/ArtifactFormats.scala new file mode 100644 index 000000000..6b92852cf --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/ArtifactFormats.scala @@ -0,0 +1,43 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +import _root_.sjsonnew.{ Unbuilder, Builder, JsonFormat, deserializationError } +trait ArtifactFormats { self: sbt.librarymanagement.ConfigRefFormats with sbt.librarymanagement.ChecksumFormats with sjsonnew.BasicJsonProtocol => +implicit lazy val ArtifactFormat: JsonFormat[sbt.librarymanagement.Artifact] = new JsonFormat[sbt.librarymanagement.Artifact] { + override def read[J](__jsOpt: Option[J], unbuilder: Unbuilder[J]): sbt.librarymanagement.Artifact = { + __jsOpt match { + case Some(__js) => + unbuilder.beginObject(__js) + val name = unbuilder.readField[String]("name") + val `type` = unbuilder.readField[String]("type") + val extension = unbuilder.readField[String]("extension") + val classifier = unbuilder.readField[Option[String]]("classifier") + val configurations = unbuilder.readField[Vector[sbt.librarymanagement.ConfigRef]]("configurations") + val url = unbuilder.readField[Option[java.net.URI]]("url") + val extraAttributes = unbuilder.readField[Map[String, String]]("extraAttributes") + val checksum = unbuilder.readField[Option[sbt.librarymanagement.Checksum]]("checksum") + val allowInsecureProtocol = unbuilder.readField[Boolean]("allowInsecureProtocol") + unbuilder.endObject() + sbt.librarymanagement.Artifact(name, `type`, extension, classifier, configurations, url, extraAttributes, checksum, allowInsecureProtocol) + case None => + deserializationError("Expected JsObject but found None") + } + } + override def write[J](obj: sbt.librarymanagement.Artifact, builder: Builder[J]): Unit = { + builder.beginObject() + builder.addField("name", obj.name) + builder.addField("type", obj.`type`) + builder.addField("extension", obj.extension) + builder.addField("classifier", obj.classifier) + builder.addField("configurations", obj.configurations) + builder.addField("url", obj.url) + builder.addField("extraAttributes", obj.extraAttributes) + builder.addField("checksum", obj.checksum) + builder.addField("allowInsecureProtocol", obj.allowInsecureProtocol) + builder.endObject() + } +} +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/ArtifactTypeFilter.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/ArtifactTypeFilter.scala new file mode 100644 index 000000000..6d9676e75 --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/ArtifactTypeFilter.scala @@ -0,0 +1,43 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +/** + * Work around the inadequacy of Ivy's ArtifactTypeFilter (that it cannot reverse a filter) + * @param types Represents the artifact types that we should try to resolve for (as in the allowed values of + `artifact[type]` from a dependency `` section). One can use this to filter + source / doc artifacts. + * @param inverted Whether to invert the types filter (i.e. allow only types NOT in the set) + */ +final class ArtifactTypeFilter private ( + val types: Set[String], + val inverted: Boolean) extends sbt.librarymanagement.ArtifactTypeFilterExtra with Serializable { + + + + override def equals(o: Any): Boolean = this.eq(o.asInstanceOf[AnyRef]) || (o match { + case x: ArtifactTypeFilter => (this.types == x.types) && (this.inverted == x.inverted) + case _ => false + }) + override def hashCode: Int = { + 37 * (37 * (37 * (17 + "sbt.librarymanagement.ArtifactTypeFilter".##) + types.##) + inverted.##) + } + override def toString: String = { + "ArtifactTypeFilter(" + types + ", " + inverted + ")" + } + private[this] def copy(types: Set[String] = types, inverted: Boolean = inverted): ArtifactTypeFilter = { + new ArtifactTypeFilter(types, inverted) + } + def withTypes(types: Set[String]): ArtifactTypeFilter = { + copy(types = types) + } + def withInverted(inverted: Boolean): ArtifactTypeFilter = { + copy(inverted = inverted) + } +} +object ArtifactTypeFilter extends sbt.librarymanagement.ArtifactTypeFilterFunctions { + + def apply(types: Set[String], inverted: Boolean): ArtifactTypeFilter = new ArtifactTypeFilter(types, inverted) +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/ArtifactTypeFilterFormats.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/ArtifactTypeFilterFormats.scala new file mode 100644 index 000000000..75e185a13 --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/ArtifactTypeFilterFormats.scala @@ -0,0 +1,29 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +import _root_.sjsonnew.{ Unbuilder, Builder, JsonFormat, deserializationError } +trait ArtifactTypeFilterFormats { self: sjsonnew.BasicJsonProtocol => +implicit lazy val ArtifactTypeFilterFormat: JsonFormat[sbt.librarymanagement.ArtifactTypeFilter] = new JsonFormat[sbt.librarymanagement.ArtifactTypeFilter] { + override def read[J](__jsOpt: Option[J], unbuilder: Unbuilder[J]): sbt.librarymanagement.ArtifactTypeFilter = { + __jsOpt match { + case Some(__js) => + unbuilder.beginObject(__js) + val types = unbuilder.readField[Set[String]]("types") + val inverted = unbuilder.readField[Boolean]("inverted") + unbuilder.endObject() + sbt.librarymanagement.ArtifactTypeFilter(types, inverted) + case None => + deserializationError("Expected JsObject but found None") + } + } + override def write[J](obj: sbt.librarymanagement.ArtifactTypeFilter, builder: Builder[J]): Unit = { + builder.beginObject() + builder.addField("types", obj.types) + builder.addField("inverted", obj.inverted) + builder.endObject() + } +} +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/Caller.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/Caller.scala new file mode 100644 index 000000000..30b0b9372 --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/Caller.scala @@ -0,0 +1,56 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +final class Caller private ( + val caller: sbt.librarymanagement.ModuleID, + val callerConfigurations: Vector[sbt.librarymanagement.ConfigRef], + val callerExtraAttributes: Map[String, String], + val isForceDependency: Boolean, + val isChangingDependency: Boolean, + val isTransitiveDependency: Boolean, + val isDirectlyForceDependency: Boolean) extends Serializable { + + + + override def equals(o: Any): Boolean = this.eq(o.asInstanceOf[AnyRef]) || (o match { + case x: Caller => (this.caller == x.caller) && (this.callerConfigurations == x.callerConfigurations) && (this.callerExtraAttributes == x.callerExtraAttributes) && (this.isForceDependency == x.isForceDependency) && (this.isChangingDependency == x.isChangingDependency) && (this.isTransitiveDependency == x.isTransitiveDependency) && (this.isDirectlyForceDependency == x.isDirectlyForceDependency) + case _ => false + }) + override def hashCode: Int = { + 37 * (37 * (37 * (37 * (37 * (37 * (37 * (37 * (17 + "sbt.librarymanagement.Caller".##) + caller.##) + callerConfigurations.##) + callerExtraAttributes.##) + isForceDependency.##) + isChangingDependency.##) + isTransitiveDependency.##) + isDirectlyForceDependency.##) + } + override def toString: String = { + s"$caller" + } + private[this] def copy(caller: sbt.librarymanagement.ModuleID = caller, callerConfigurations: Vector[sbt.librarymanagement.ConfigRef] = callerConfigurations, callerExtraAttributes: Map[String, String] = callerExtraAttributes, isForceDependency: Boolean = isForceDependency, isChangingDependency: Boolean = isChangingDependency, isTransitiveDependency: Boolean = isTransitiveDependency, isDirectlyForceDependency: Boolean = isDirectlyForceDependency): Caller = { + new Caller(caller, callerConfigurations, callerExtraAttributes, isForceDependency, isChangingDependency, isTransitiveDependency, isDirectlyForceDependency) + } + def withCaller(caller: sbt.librarymanagement.ModuleID): Caller = { + copy(caller = caller) + } + def withCallerConfigurations(callerConfigurations: Vector[sbt.librarymanagement.ConfigRef]): Caller = { + copy(callerConfigurations = callerConfigurations) + } + def withCallerExtraAttributes(callerExtraAttributes: Map[String, String]): Caller = { + copy(callerExtraAttributes = callerExtraAttributes) + } + def withIsForceDependency(isForceDependency: Boolean): Caller = { + copy(isForceDependency = isForceDependency) + } + def withIsChangingDependency(isChangingDependency: Boolean): Caller = { + copy(isChangingDependency = isChangingDependency) + } + def withIsTransitiveDependency(isTransitiveDependency: Boolean): Caller = { + copy(isTransitiveDependency = isTransitiveDependency) + } + def withIsDirectlyForceDependency(isDirectlyForceDependency: Boolean): Caller = { + copy(isDirectlyForceDependency = isDirectlyForceDependency) + } +} +object Caller { + + def apply(caller: sbt.librarymanagement.ModuleID, callerConfigurations: Vector[sbt.librarymanagement.ConfigRef], callerExtraAttributes: Map[String, String], isForceDependency: Boolean, isChangingDependency: Boolean, isTransitiveDependency: Boolean, isDirectlyForceDependency: Boolean): Caller = new Caller(caller, callerConfigurations, callerExtraAttributes, isForceDependency, isChangingDependency, isTransitiveDependency, isDirectlyForceDependency) +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/CallerFormats.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/CallerFormats.scala new file mode 100644 index 000000000..5b3f57d80 --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/CallerFormats.scala @@ -0,0 +1,39 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +import _root_.sjsonnew.{ Unbuilder, Builder, JsonFormat, deserializationError } +trait CallerFormats { self: sbt.librarymanagement.ModuleIDFormats with sbt.librarymanagement.ArtifactFormats with sbt.librarymanagement.ConfigRefFormats with sbt.librarymanagement.ChecksumFormats with sjsonnew.BasicJsonProtocol with sbt.librarymanagement.InclExclRuleFormats with sbt.librarymanagement.CrossVersionFormats with sbt.librarymanagement.DisabledFormats with sbt.librarymanagement.BinaryFormats with sbt.librarymanagement.ConstantFormats with sbt.librarymanagement.PatchFormats with sbt.librarymanagement.FullFormats with sbt.librarymanagement.For3Use2_13Formats with sbt.librarymanagement.For2_13Use3Formats => +implicit lazy val CallerFormat: JsonFormat[sbt.librarymanagement.Caller] = new JsonFormat[sbt.librarymanagement.Caller] { + override def read[J](__jsOpt: Option[J], unbuilder: Unbuilder[J]): sbt.librarymanagement.Caller = { + __jsOpt match { + case Some(__js) => + unbuilder.beginObject(__js) + val caller = unbuilder.readField[sbt.librarymanagement.ModuleID]("caller") + val callerConfigurations = unbuilder.readField[Vector[sbt.librarymanagement.ConfigRef]]("callerConfigurations") + val callerExtraAttributes = unbuilder.readField[Map[String, String]]("callerExtraAttributes") + val isForceDependency = unbuilder.readField[Boolean]("isForceDependency") + val isChangingDependency = unbuilder.readField[Boolean]("isChangingDependency") + val isTransitiveDependency = unbuilder.readField[Boolean]("isTransitiveDependency") + val isDirectlyForceDependency = unbuilder.readField[Boolean]("isDirectlyForceDependency") + unbuilder.endObject() + sbt.librarymanagement.Caller(caller, callerConfigurations, callerExtraAttributes, isForceDependency, isChangingDependency, isTransitiveDependency, isDirectlyForceDependency) + case None => + deserializationError("Expected JsObject but found None") + } + } + override def write[J](obj: sbt.librarymanagement.Caller, builder: Builder[J]): Unit = { + builder.beginObject() + builder.addField("caller", obj.caller) + builder.addField("callerConfigurations", obj.callerConfigurations) + builder.addField("callerExtraAttributes", obj.callerExtraAttributes) + builder.addField("isForceDependency", obj.isForceDependency) + builder.addField("isChangingDependency", obj.isChangingDependency) + builder.addField("isTransitiveDependency", obj.isTransitiveDependency) + builder.addField("isDirectlyForceDependency", obj.isDirectlyForceDependency) + builder.endObject() + } +} +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/ChainedResolver.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/ChainedResolver.scala new file mode 100644 index 000000000..b91c5db8e --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/ChainedResolver.scala @@ -0,0 +1,36 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +final class ChainedResolver private ( + name: String, + val resolvers: Vector[sbt.librarymanagement.Resolver]) extends sbt.librarymanagement.Resolver(name) with Serializable { + private[sbt] override def validateProtocol(logger: sbt.util.Logger): Boolean = !resolvers.forall(!_.validateProtocol(logger)) + + + override def equals(o: Any): Boolean = this.eq(o.asInstanceOf[AnyRef]) || (o match { + case x: ChainedResolver => (this.name == x.name) && (this.resolvers == x.resolvers) + case _ => false + }) + override def hashCode: Int = { + 37 * (37 * (37 * (17 + "sbt.librarymanagement.ChainedResolver".##) + name.##) + resolvers.##) + } + override def toString: String = { + "ChainedResolver(" + name + ", " + resolvers + ")" + } + private[this] def copy(name: String = name, resolvers: Vector[sbt.librarymanagement.Resolver] = resolvers): ChainedResolver = { + new ChainedResolver(name, resolvers) + } + def withName(name: String): ChainedResolver = { + copy(name = name) + } + def withResolvers(resolvers: Vector[sbt.librarymanagement.Resolver]): ChainedResolver = { + copy(resolvers = resolvers) + } +} +object ChainedResolver { + + def apply(name: String, resolvers: Vector[sbt.librarymanagement.Resolver]): ChainedResolver = new ChainedResolver(name, resolvers) +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/ChainedResolverFormats.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/ChainedResolverFormats.scala new file mode 100644 index 000000000..61cf796da --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/ChainedResolverFormats.scala @@ -0,0 +1,29 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +import _root_.sjsonnew.{ Unbuilder, Builder, JsonFormat, deserializationError } +trait ChainedResolverFormats { self: sbt.librarymanagement.ResolverFormats with sjsonnew.BasicJsonProtocol => +implicit lazy val ChainedResolverFormat: JsonFormat[sbt.librarymanagement.ChainedResolver] = new JsonFormat[sbt.librarymanagement.ChainedResolver] { + override def read[J](__jsOpt: Option[J], unbuilder: Unbuilder[J]): sbt.librarymanagement.ChainedResolver = { + __jsOpt match { + case Some(__js) => + unbuilder.beginObject(__js) + val name = unbuilder.readField[String]("name") + val resolvers = unbuilder.readField[Vector[sbt.librarymanagement.Resolver]]("resolvers") + unbuilder.endObject() + sbt.librarymanagement.ChainedResolver(name, resolvers) + case None => + deserializationError("Expected JsObject but found None") + } + } + override def write[J](obj: sbt.librarymanagement.ChainedResolver, builder: Builder[J]): Unit = { + builder.beginObject() + builder.addField("name", obj.name) + builder.addField("resolvers", obj.resolvers) + builder.endObject() + } +} +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/Checksum.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/Checksum.scala new file mode 100644 index 000000000..13676bc3d --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/Checksum.scala @@ -0,0 +1,36 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +final class Checksum private ( + val digest: String, + val `type`: String) extends Serializable { + + + + override def equals(o: Any): Boolean = this.eq(o.asInstanceOf[AnyRef]) || (o match { + case x: Checksum => (this.digest == x.digest) && (this.`type` == x.`type`) + case _ => false + }) + override def hashCode: Int = { + 37 * (37 * (37 * (17 + "sbt.librarymanagement.Checksum".##) + digest.##) + `type`.##) + } + override def toString: String = { + "Checksum(" + digest + ", " + `type` + ")" + } + private[this] def copy(digest: String = digest, `type`: String = `type`): Checksum = { + new Checksum(digest, `type`) + } + def withDigest(digest: String): Checksum = { + copy(digest = digest) + } + def withType(`type`: String): Checksum = { + copy(`type` = `type`) + } +} +object Checksum { + + def apply(digest: String, `type`: String): Checksum = new Checksum(digest, `type`) +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/ChecksumFormats.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/ChecksumFormats.scala new file mode 100644 index 000000000..365bf5770 --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/ChecksumFormats.scala @@ -0,0 +1,29 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +import _root_.sjsonnew.{ Unbuilder, Builder, JsonFormat, deserializationError } +trait ChecksumFormats { self: sjsonnew.BasicJsonProtocol => +implicit lazy val ChecksumFormat: JsonFormat[sbt.librarymanagement.Checksum] = new JsonFormat[sbt.librarymanagement.Checksum] { + override def read[J](__jsOpt: Option[J], unbuilder: Unbuilder[J]): sbt.librarymanagement.Checksum = { + __jsOpt match { + case Some(__js) => + unbuilder.beginObject(__js) + val digest = unbuilder.readField[String]("digest") + val `type` = unbuilder.readField[String]("type") + unbuilder.endObject() + sbt.librarymanagement.Checksum(digest, `type`) + case None => + deserializationError("Expected JsObject but found None") + } + } + override def write[J](obj: sbt.librarymanagement.Checksum, builder: Builder[J]): Unit = { + builder.beginObject() + builder.addField("digest", obj.digest) + builder.addField("type", obj.`type`) + builder.endObject() + } +} +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/ConfigurationReport.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/ConfigurationReport.scala new file mode 100644 index 000000000..99b421dd7 --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/ConfigurationReport.scala @@ -0,0 +1,48 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +/** + * Provides information about resolution of a single configuration. + * @param configuration the configuration this report is for. + * @param modules a sequence containing one report for each module resolved for this configuration. + * @param details a sequence containing one report for each org/name, which may or may not be part of the final resolution. + */ +final class ConfigurationReport private ( + val configuration: sbt.librarymanagement.ConfigRef, + val modules: Vector[sbt.librarymanagement.ModuleReport], + val details: Vector[sbt.librarymanagement.OrganizationArtifactReport]) extends sbt.librarymanagement.ConfigurationReportExtra with Serializable { + + + + override def equals(o: Any): Boolean = this.eq(o.asInstanceOf[AnyRef]) || (o match { + case x: ConfigurationReport => (this.configuration == x.configuration) && (this.modules == x.modules) && (this.details == x.details) + case _ => false + }) + override def hashCode: Int = { + 37 * (37 * (37 * (37 * (17 + "sbt.librarymanagement.ConfigurationReport".##) + configuration.##) + modules.##) + details.##) + } + override def toString: String = { + s"\t$configuration:\n" + + (if (details.isEmpty) modules.mkString + details.flatMap(_.modules).filter(_.evicted).map("\t\t(EVICTED) " + _ + "\n").mkString + else details.mkString) + } + private[this] def copy(configuration: sbt.librarymanagement.ConfigRef = configuration, modules: Vector[sbt.librarymanagement.ModuleReport] = modules, details: Vector[sbt.librarymanagement.OrganizationArtifactReport] = details): ConfigurationReport = { + new ConfigurationReport(configuration, modules, details) + } + def withConfiguration(configuration: sbt.librarymanagement.ConfigRef): ConfigurationReport = { + copy(configuration = configuration) + } + def withModules(modules: Vector[sbt.librarymanagement.ModuleReport]): ConfigurationReport = { + copy(modules = modules) + } + def withDetails(details: Vector[sbt.librarymanagement.OrganizationArtifactReport]): ConfigurationReport = { + copy(details = details) + } +} +object ConfigurationReport { + + def apply(configuration: sbt.librarymanagement.ConfigRef, modules: Vector[sbt.librarymanagement.ModuleReport], details: Vector[sbt.librarymanagement.OrganizationArtifactReport]): ConfigurationReport = new ConfigurationReport(configuration, modules, details) +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/ConfigurationReportFormats.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/ConfigurationReportFormats.scala new file mode 100644 index 000000000..c8090c6e1 --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/ConfigurationReportFormats.scala @@ -0,0 +1,31 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +import _root_.sjsonnew.{ Unbuilder, Builder, JsonFormat, deserializationError } +trait ConfigurationReportFormats { self: sbt.librarymanagement.ConfigRefFormats with sbt.librarymanagement.ModuleReportFormats with sbt.librarymanagement.ModuleIDFormats with sbt.librarymanagement.ArtifactFormats with sbt.librarymanagement.ChecksumFormats with sjsonnew.BasicJsonProtocol with sbt.librarymanagement.InclExclRuleFormats with sbt.librarymanagement.CrossVersionFormats with sbt.librarymanagement.DisabledFormats with sbt.librarymanagement.BinaryFormats with sbt.librarymanagement.ConstantFormats with sbt.librarymanagement.PatchFormats with sbt.librarymanagement.FullFormats with sbt.librarymanagement.For3Use2_13Formats with sbt.librarymanagement.For2_13Use3Formats with sbt.librarymanagement.CallerFormats with sbt.librarymanagement.OrganizationArtifactReportFormats => +implicit lazy val ConfigurationReportFormat: JsonFormat[sbt.librarymanagement.ConfigurationReport] = new JsonFormat[sbt.librarymanagement.ConfigurationReport] { + override def read[J](__jsOpt: Option[J], unbuilder: Unbuilder[J]): sbt.librarymanagement.ConfigurationReport = { + __jsOpt match { + case Some(__js) => + unbuilder.beginObject(__js) + val configuration = unbuilder.readField[sbt.librarymanagement.ConfigRef]("configuration") + val modules = unbuilder.readField[Vector[sbt.librarymanagement.ModuleReport]]("modules") + val details = unbuilder.readField[Vector[sbt.librarymanagement.OrganizationArtifactReport]]("details") + unbuilder.endObject() + sbt.librarymanagement.ConfigurationReport(configuration, modules, details) + case None => + deserializationError("Expected JsObject but found None") + } + } + override def write[J](obj: sbt.librarymanagement.ConfigurationReport, builder: Builder[J]): Unit = { + builder.beginObject() + builder.addField("configuration", obj.configuration) + builder.addField("modules", obj.modules) + builder.addField("details", obj.details) + builder.endObject() + } +} +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/ConfigurationReportLiteFormats.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/ConfigurationReportLiteFormats.scala new file mode 100644 index 000000000..b0181f495 --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/ConfigurationReportLiteFormats.scala @@ -0,0 +1,29 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +import _root_.sjsonnew.{ Unbuilder, Builder, JsonFormat, deserializationError } +trait ConfigurationReportLiteFormats { self: sbt.librarymanagement.OrganizationArtifactReportFormats with sbt.librarymanagement.ModuleReportFormats with sbt.librarymanagement.ModuleIDFormats with sbt.librarymanagement.ArtifactFormats with sbt.librarymanagement.ConfigRefFormats with sbt.librarymanagement.ChecksumFormats with sjsonnew.BasicJsonProtocol with sbt.librarymanagement.InclExclRuleFormats with sbt.librarymanagement.CrossVersionFormats with sbt.librarymanagement.DisabledFormats with sbt.librarymanagement.BinaryFormats with sbt.librarymanagement.ConstantFormats with sbt.librarymanagement.PatchFormats with sbt.librarymanagement.FullFormats with sbt.librarymanagement.For3Use2_13Formats with sbt.librarymanagement.For2_13Use3Formats with sbt.librarymanagement.CallerFormats => +implicit lazy val ConfigurationReportLiteFormat: JsonFormat[sbt.internal.librarymanagement.ConfigurationReportLite] = new JsonFormat[sbt.internal.librarymanagement.ConfigurationReportLite] { + override def read[J](__jsOpt: Option[J], unbuilder: Unbuilder[J]): sbt.internal.librarymanagement.ConfigurationReportLite = { + __jsOpt match { + case Some(__js) => + unbuilder.beginObject(__js) + val configuration = unbuilder.readField[String]("configuration") + val details = unbuilder.readField[Vector[sbt.librarymanagement.OrganizationArtifactReport]]("details") + unbuilder.endObject() + sbt.internal.librarymanagement.ConfigurationReportLite(configuration, details) + case None => + deserializationError("Expected JsObject but found None") + } + } + override def write[J](obj: sbt.internal.librarymanagement.ConfigurationReportLite, builder: Builder[J]): Unit = { + builder.beginObject() + builder.addField("configuration", obj.configuration) + builder.addField("details", obj.details) + builder.endObject() + } +} +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/ConflictManager.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/ConflictManager.scala new file mode 100644 index 000000000..cef310494 --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/ConflictManager.scala @@ -0,0 +1,42 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +/** See http://ant.apache.org/ivy/history/latest-milestone/settings/conflict-managers.html for details of the different conflict managers. */ +final class ConflictManager private ( + val name: String, + val organization: String, + val module: String) extends Serializable { + + private def this(name: String) = this(name, "*", "*") + + override def equals(o: Any): Boolean = this.eq(o.asInstanceOf[AnyRef]) || (o match { + case x: ConflictManager => (this.name == x.name) && (this.organization == x.organization) && (this.module == x.module) + case _ => false + }) + override def hashCode: Int = { + 37 * (37 * (37 * (37 * (17 + "sbt.librarymanagement.ConflictManager".##) + name.##) + organization.##) + module.##) + } + override def toString: String = { + "ConflictManager(" + name + ", " + organization + ", " + module + ")" + } + private[this] def copy(name: String = name, organization: String = organization, module: String = module): ConflictManager = { + new ConflictManager(name, organization, module) + } + def withName(name: String): ConflictManager = { + copy(name = name) + } + def withOrganization(organization: String): ConflictManager = { + copy(organization = organization) + } + def withModule(module: String): ConflictManager = { + copy(module = module) + } +} +object ConflictManager extends sbt.librarymanagement.ConflictManagerFunctions { + + def apply(name: String): ConflictManager = new ConflictManager(name) + def apply(name: String, organization: String, module: String): ConflictManager = new ConflictManager(name, organization, module) +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/ConflictManagerFormats.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/ConflictManagerFormats.scala new file mode 100644 index 000000000..7511f4c4b --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/ConflictManagerFormats.scala @@ -0,0 +1,31 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +import _root_.sjsonnew.{ Unbuilder, Builder, JsonFormat, deserializationError } +trait ConflictManagerFormats { self: sjsonnew.BasicJsonProtocol => +implicit lazy val ConflictManagerFormat: JsonFormat[sbt.librarymanagement.ConflictManager] = new JsonFormat[sbt.librarymanagement.ConflictManager] { + override def read[J](__jsOpt: Option[J], unbuilder: Unbuilder[J]): sbt.librarymanagement.ConflictManager = { + __jsOpt match { + case Some(__js) => + unbuilder.beginObject(__js) + val name = unbuilder.readField[String]("name") + val organization = unbuilder.readField[String]("organization") + val module = unbuilder.readField[String]("module") + unbuilder.endObject() + sbt.librarymanagement.ConflictManager(name, organization, module) + case None => + deserializationError("Expected JsObject but found None") + } + } + override def write[J](obj: sbt.librarymanagement.ConflictManager, builder: Builder[J]): Unit = { + builder.beginObject() + builder.addField("name", obj.name) + builder.addField("organization", obj.organization) + builder.addField("module", obj.module) + builder.endObject() + } +} +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/Developer.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/Developer.scala new file mode 100644 index 000000000..0afb34a0f --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/Developer.scala @@ -0,0 +1,44 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +final class Developer private ( + val id: String, + val name: String, + val email: String, + val url: java.net.URI) extends Serializable { + + + + override def equals(o: Any): Boolean = this.eq(o.asInstanceOf[AnyRef]) || (o match { + case x: Developer => (this.id == x.id) && (this.name == x.name) && (this.email == x.email) && (this.url == x.url) + case _ => false + }) + override def hashCode: Int = { + 37 * (37 * (37 * (37 * (37 * (17 + "sbt.librarymanagement.Developer".##) + id.##) + name.##) + email.##) + url.##) + } + override def toString: String = { + "Developer(" + id + ", " + name + ", " + email + ", " + url + ")" + } + private[this] def copy(id: String = id, name: String = name, email: String = email, url: java.net.URI = url): Developer = { + new Developer(id, name, email, url) + } + def withId(id: String): Developer = { + copy(id = id) + } + def withName(name: String): Developer = { + copy(name = name) + } + def withEmail(email: String): Developer = { + copy(email = email) + } + def withUrl(url: java.net.URI): Developer = { + copy(url = url) + } +} +object Developer { + + def apply(id: String, name: String, email: String, url: java.net.URI): Developer = new Developer(id, name, email, url) +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/DeveloperFormats.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/DeveloperFormats.scala new file mode 100644 index 000000000..3a7cdbf60 --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/DeveloperFormats.scala @@ -0,0 +1,33 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +import _root_.sjsonnew.{ Unbuilder, Builder, JsonFormat, deserializationError } +trait DeveloperFormats { self: sjsonnew.BasicJsonProtocol => +implicit lazy val DeveloperFormat: JsonFormat[sbt.librarymanagement.Developer] = new JsonFormat[sbt.librarymanagement.Developer] { + override def read[J](__jsOpt: Option[J], unbuilder: Unbuilder[J]): sbt.librarymanagement.Developer = { + __jsOpt match { + case Some(__js) => + unbuilder.beginObject(__js) + val id = unbuilder.readField[String]("id") + val name = unbuilder.readField[String]("name") + val email = unbuilder.readField[String]("email") + val url = unbuilder.readField[java.net.URI]("url") + unbuilder.endObject() + sbt.librarymanagement.Developer(id, name, email, url) + case None => + deserializationError("Expected JsObject but found None") + } + } + override def write[J](obj: sbt.librarymanagement.Developer, builder: Builder[J]): Unit = { + builder.beginObject() + builder.addField("id", obj.id) + builder.addField("name", obj.name) + builder.addField("email", obj.email) + builder.addField("url", obj.url) + builder.endObject() + } +} +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/FileConfiguration.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/FileConfiguration.scala new file mode 100644 index 000000000..daf2e6d3c --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/FileConfiguration.scala @@ -0,0 +1,41 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +/** Configuration specific to an Ivy filesystem resolver. */ +final class FileConfiguration private ( + val isLocal: Boolean, + val isTransactional: Option[Boolean]) extends Serializable { + + + + override def equals(o: Any): Boolean = this.eq(o.asInstanceOf[AnyRef]) || (o match { + case x: FileConfiguration => (this.isLocal == x.isLocal) && (this.isTransactional == x.isTransactional) + case _ => false + }) + override def hashCode: Int = { + 37 * (37 * (37 * (17 + "sbt.librarymanagement.FileConfiguration".##) + isLocal.##) + isTransactional.##) + } + override def toString: String = { + "FileConfiguration(" + isLocal + ", " + isTransactional + ")" + } + private[this] def copy(isLocal: Boolean = isLocal, isTransactional: Option[Boolean] = isTransactional): FileConfiguration = { + new FileConfiguration(isLocal, isTransactional) + } + def withIsLocal(isLocal: Boolean): FileConfiguration = { + copy(isLocal = isLocal) + } + def withIsTransactional(isTransactional: Option[Boolean]): FileConfiguration = { + copy(isTransactional = isTransactional) + } + def withIsTransactional(isTransactional: Boolean): FileConfiguration = { + copy(isTransactional = Option(isTransactional)) + } +} +object FileConfiguration { + + def apply(isLocal: Boolean, isTransactional: Option[Boolean]): FileConfiguration = new FileConfiguration(isLocal, isTransactional) + def apply(isLocal: Boolean, isTransactional: Boolean): FileConfiguration = new FileConfiguration(isLocal, Option(isTransactional)) +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/FileConfigurationFormats.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/FileConfigurationFormats.scala new file mode 100644 index 000000000..be208ebec --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/FileConfigurationFormats.scala @@ -0,0 +1,29 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +import _root_.sjsonnew.{ Unbuilder, Builder, JsonFormat, deserializationError } +trait FileConfigurationFormats { self: sjsonnew.BasicJsonProtocol => +implicit lazy val FileConfigurationFormat: JsonFormat[sbt.librarymanagement.FileConfiguration] = new JsonFormat[sbt.librarymanagement.FileConfiguration] { + override def read[J](__jsOpt: Option[J], unbuilder: Unbuilder[J]): sbt.librarymanagement.FileConfiguration = { + __jsOpt match { + case Some(__js) => + unbuilder.beginObject(__js) + val isLocal = unbuilder.readField[Boolean]("isLocal") + val isTransactional = unbuilder.readField[Option[Boolean]]("isTransactional") + unbuilder.endObject() + sbt.librarymanagement.FileConfiguration(isLocal, isTransactional) + case None => + deserializationError("Expected JsObject but found None") + } + } + override def write[J](obj: sbt.librarymanagement.FileConfiguration, builder: Builder[J]): Unit = { + builder.beginObject() + builder.addField("isLocal", obj.isLocal) + builder.addField("isTransactional", obj.isTransactional) + builder.endObject() + } +} +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/FileRepository.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/FileRepository.scala new file mode 100644 index 000000000..ca1fb7b7c --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/FileRepository.scala @@ -0,0 +1,43 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +/** sbt interface for an Ivy filesystem repository. More convenient construction is done using Resolver.file. */ +final class FileRepository private ( + name: String, + patterns: sbt.librarymanagement.Patterns, + val configuration: sbt.librarymanagement.FileConfiguration) extends sbt.librarymanagement.PatternsBasedRepository(name, patterns) with Serializable { + def this(name: String, configuration: sbt.librarymanagement.FileConfiguration, patterns: sbt.librarymanagement.Patterns) = + this(name, patterns, configuration) + + + override def equals(o: Any): Boolean = this.eq(o.asInstanceOf[AnyRef]) || (o match { + case x: FileRepository => (this.name == x.name) && (this.patterns == x.patterns) && (this.configuration == x.configuration) + case _ => false + }) + override def hashCode: Int = { + 37 * (37 * (37 * (37 * (17 + "sbt.librarymanagement.FileRepository".##) + name.##) + patterns.##) + configuration.##) + } + override def toString: String = { + "FileRepository(" + name + ", " + patterns + ", " + configuration + ")" + } + private[this] def copy(name: String = name, patterns: sbt.librarymanagement.Patterns = patterns, configuration: sbt.librarymanagement.FileConfiguration = configuration): FileRepository = { + new FileRepository(name, patterns, configuration) + } + def withName(name: String): FileRepository = { + copy(name = name) + } + def withPatterns(patterns: sbt.librarymanagement.Patterns): FileRepository = { + copy(patterns = patterns) + } + def withConfiguration(configuration: sbt.librarymanagement.FileConfiguration): FileRepository = { + copy(configuration = configuration) + } +} +object FileRepository { + def apply(name: String, configuration: sbt.librarymanagement.FileConfiguration, patterns: sbt.librarymanagement.Patterns) = + new FileRepository(name, patterns, configuration) + def apply(name: String, patterns: sbt.librarymanagement.Patterns, configuration: sbt.librarymanagement.FileConfiguration): FileRepository = new FileRepository(name, patterns, configuration) +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/FileRepositoryFormats.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/FileRepositoryFormats.scala new file mode 100644 index 000000000..da1e0c26f --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/FileRepositoryFormats.scala @@ -0,0 +1,31 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +import _root_.sjsonnew.{ Unbuilder, Builder, JsonFormat, deserializationError } +trait FileRepositoryFormats { self: sbt.librarymanagement.PatternsFormats with sjsonnew.BasicJsonProtocol with sbt.librarymanagement.FileConfigurationFormats => +implicit lazy val FileRepositoryFormat: JsonFormat[sbt.librarymanagement.FileRepository] = new JsonFormat[sbt.librarymanagement.FileRepository] { + override def read[J](__jsOpt: Option[J], unbuilder: Unbuilder[J]): sbt.librarymanagement.FileRepository = { + __jsOpt match { + case Some(__js) => + unbuilder.beginObject(__js) + val name = unbuilder.readField[String]("name") + val patterns = unbuilder.readField[sbt.librarymanagement.Patterns]("patterns") + val configuration = unbuilder.readField[sbt.librarymanagement.FileConfiguration]("configuration") + unbuilder.endObject() + sbt.librarymanagement.FileRepository(name, patterns, configuration) + case None => + deserializationError("Expected JsObject but found None") + } + } + override def write[J](obj: sbt.librarymanagement.FileRepository, builder: Builder[J]): Unit = { + builder.beginObject() + builder.addField("name", obj.name) + builder.addField("patterns", obj.patterns) + builder.addField("configuration", obj.configuration) + builder.endObject() + } +} +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/GetClassifiersConfiguration.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/GetClassifiersConfiguration.scala new file mode 100644 index 000000000..85deb700c --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/GetClassifiersConfiguration.scala @@ -0,0 +1,48 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +final class GetClassifiersConfiguration private ( + val module: sbt.librarymanagement.GetClassifiersModule, + val excludes: Vector[scala.Tuple2[sbt.librarymanagement.ModuleID, scala.Vector[sbt.librarymanagement.ConfigRef]]], + val updateConfiguration: sbt.librarymanagement.UpdateConfiguration, + val sourceArtifactTypes: Vector[String], + val docArtifactTypes: Vector[String]) extends Serializable { + + + + override def equals(o: Any): Boolean = this.eq(o.asInstanceOf[AnyRef]) || (o match { + case x: GetClassifiersConfiguration => (this.module == x.module) && (this.excludes == x.excludes) && (this.updateConfiguration == x.updateConfiguration) && (this.sourceArtifactTypes == x.sourceArtifactTypes) && (this.docArtifactTypes == x.docArtifactTypes) + case _ => false + }) + override def hashCode: Int = { + 37 * (37 * (37 * (37 * (37 * (37 * (17 + "sbt.librarymanagement.GetClassifiersConfiguration".##) + module.##) + excludes.##) + updateConfiguration.##) + sourceArtifactTypes.##) + docArtifactTypes.##) + } + override def toString: String = { + "GetClassifiersConfiguration(" + module + ", " + excludes + ", " + updateConfiguration + ", " + sourceArtifactTypes + ", " + docArtifactTypes + ")" + } + private[this] def copy(module: sbt.librarymanagement.GetClassifiersModule = module, excludes: Vector[scala.Tuple2[sbt.librarymanagement.ModuleID, scala.Vector[sbt.librarymanagement.ConfigRef]]] = excludes, updateConfiguration: sbt.librarymanagement.UpdateConfiguration = updateConfiguration, sourceArtifactTypes: Vector[String] = sourceArtifactTypes, docArtifactTypes: Vector[String] = docArtifactTypes): GetClassifiersConfiguration = { + new GetClassifiersConfiguration(module, excludes, updateConfiguration, sourceArtifactTypes, docArtifactTypes) + } + def withModule(module: sbt.librarymanagement.GetClassifiersModule): GetClassifiersConfiguration = { + copy(module = module) + } + def withExcludes(excludes: Vector[scala.Tuple2[sbt.librarymanagement.ModuleID, scala.Vector[sbt.librarymanagement.ConfigRef]]]): GetClassifiersConfiguration = { + copy(excludes = excludes) + } + def withUpdateConfiguration(updateConfiguration: sbt.librarymanagement.UpdateConfiguration): GetClassifiersConfiguration = { + copy(updateConfiguration = updateConfiguration) + } + def withSourceArtifactTypes(sourceArtifactTypes: Vector[String]): GetClassifiersConfiguration = { + copy(sourceArtifactTypes = sourceArtifactTypes) + } + def withDocArtifactTypes(docArtifactTypes: Vector[String]): GetClassifiersConfiguration = { + copy(docArtifactTypes = docArtifactTypes) + } +} +object GetClassifiersConfiguration { + + def apply(module: sbt.librarymanagement.GetClassifiersModule, excludes: Vector[scala.Tuple2[sbt.librarymanagement.ModuleID, scala.Vector[sbt.librarymanagement.ConfigRef]]], updateConfiguration: sbt.librarymanagement.UpdateConfiguration, sourceArtifactTypes: Vector[String], docArtifactTypes: Vector[String]): GetClassifiersConfiguration = new GetClassifiersConfiguration(module, excludes, updateConfiguration, sourceArtifactTypes, docArtifactTypes) +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/GetClassifiersConfigurationFormats.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/GetClassifiersConfigurationFormats.scala new file mode 100644 index 000000000..c76d4686a --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/GetClassifiersConfigurationFormats.scala @@ -0,0 +1,35 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +import _root_.sjsonnew.{ Unbuilder, Builder, JsonFormat, deserializationError } +trait GetClassifiersConfigurationFormats { self: sbt.librarymanagement.GetClassifiersModuleFormats with sbt.librarymanagement.ModuleIDFormats with sbt.librarymanagement.ArtifactFormats with sbt.librarymanagement.ConfigRefFormats with sbt.librarymanagement.ChecksumFormats with sjsonnew.BasicJsonProtocol with sbt.librarymanagement.InclExclRuleFormats with sbt.librarymanagement.CrossVersionFormats with sbt.librarymanagement.DisabledFormats with sbt.librarymanagement.BinaryFormats with sbt.librarymanagement.ConstantFormats with sbt.librarymanagement.PatchFormats with sbt.librarymanagement.FullFormats with sbt.librarymanagement.For3Use2_13Formats with sbt.librarymanagement.For2_13Use3Formats with sbt.librarymanagement.ScalaModuleInfoFormats with sbt.librarymanagement.ConfigurationFormats with sbt.librarymanagement.UpdateConfigurationFormats with sbt.librarymanagement.RetrieveConfigurationFormats with sbt.librarymanagement.UpdateLoggingFormats with sbt.internal.librarymanagement.formats.LogicalClockFormats with sbt.librarymanagement.ArtifactTypeFilterFormats => +implicit lazy val GetClassifiersConfigurationFormat: JsonFormat[sbt.librarymanagement.GetClassifiersConfiguration] = new JsonFormat[sbt.librarymanagement.GetClassifiersConfiguration] { + override def read[J](__jsOpt: Option[J], unbuilder: Unbuilder[J]): sbt.librarymanagement.GetClassifiersConfiguration = { + __jsOpt match { + case Some(__js) => + unbuilder.beginObject(__js) + val module = unbuilder.readField[sbt.librarymanagement.GetClassifiersModule]("module") + val excludes = unbuilder.readField[Vector[scala.Tuple2[sbt.librarymanagement.ModuleID, scala.Vector[sbt.librarymanagement.ConfigRef]]]]("excludes") + val updateConfiguration = unbuilder.readField[sbt.librarymanagement.UpdateConfiguration]("updateConfiguration") + val sourceArtifactTypes = unbuilder.readField[Vector[String]]("sourceArtifactTypes") + val docArtifactTypes = unbuilder.readField[Vector[String]]("docArtifactTypes") + unbuilder.endObject() + sbt.librarymanagement.GetClassifiersConfiguration(module, excludes, updateConfiguration, sourceArtifactTypes, docArtifactTypes) + case None => + deserializationError("Expected JsObject but found None") + } + } + override def write[J](obj: sbt.librarymanagement.GetClassifiersConfiguration, builder: Builder[J]): Unit = { + builder.beginObject() + builder.addField("module", obj.module) + builder.addField("excludes", obj.excludes) + builder.addField("updateConfiguration", obj.updateConfiguration) + builder.addField("sourceArtifactTypes", obj.sourceArtifactTypes) + builder.addField("docArtifactTypes", obj.docArtifactTypes) + builder.endObject() + } +} +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/GetClassifiersModule.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/GetClassifiersModule.scala new file mode 100644 index 000000000..dfdaa72c1 --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/GetClassifiersModule.scala @@ -0,0 +1,52 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +final class GetClassifiersModule private ( + val id: sbt.librarymanagement.ModuleID, + val scalaModuleInfo: Option[sbt.librarymanagement.ScalaModuleInfo], + val dependencies: Vector[sbt.librarymanagement.ModuleID], + val configurations: Vector[sbt.librarymanagement.Configuration], + val classifiers: Vector[String]) extends Serializable { + + + + override def equals(o: Any): Boolean = this.eq(o.asInstanceOf[AnyRef]) || (o match { + case x: GetClassifiersModule => (this.id == x.id) && (this.scalaModuleInfo == x.scalaModuleInfo) && (this.dependencies == x.dependencies) && (this.configurations == x.configurations) && (this.classifiers == x.classifiers) + case _ => false + }) + override def hashCode: Int = { + 37 * (37 * (37 * (37 * (37 * (37 * (17 + "sbt.librarymanagement.GetClassifiersModule".##) + id.##) + scalaModuleInfo.##) + dependencies.##) + configurations.##) + classifiers.##) + } + override def toString: String = { + "GetClassifiersModule(" + id + ", " + scalaModuleInfo + ", " + dependencies + ", " + configurations + ", " + classifiers + ")" + } + private[this] def copy(id: sbt.librarymanagement.ModuleID = id, scalaModuleInfo: Option[sbt.librarymanagement.ScalaModuleInfo] = scalaModuleInfo, dependencies: Vector[sbt.librarymanagement.ModuleID] = dependencies, configurations: Vector[sbt.librarymanagement.Configuration] = configurations, classifiers: Vector[String] = classifiers): GetClassifiersModule = { + new GetClassifiersModule(id, scalaModuleInfo, dependencies, configurations, classifiers) + } + def withId(id: sbt.librarymanagement.ModuleID): GetClassifiersModule = { + copy(id = id) + } + def withScalaModuleInfo(scalaModuleInfo: Option[sbt.librarymanagement.ScalaModuleInfo]): GetClassifiersModule = { + copy(scalaModuleInfo = scalaModuleInfo) + } + def withScalaModuleInfo(scalaModuleInfo: sbt.librarymanagement.ScalaModuleInfo): GetClassifiersModule = { + copy(scalaModuleInfo = Option(scalaModuleInfo)) + } + def withDependencies(dependencies: Vector[sbt.librarymanagement.ModuleID]): GetClassifiersModule = { + copy(dependencies = dependencies) + } + def withConfigurations(configurations: Vector[sbt.librarymanagement.Configuration]): GetClassifiersModule = { + copy(configurations = configurations) + } + def withClassifiers(classifiers: Vector[String]): GetClassifiersModule = { + copy(classifiers = classifiers) + } +} +object GetClassifiersModule { + + def apply(id: sbt.librarymanagement.ModuleID, scalaModuleInfo: Option[sbt.librarymanagement.ScalaModuleInfo], dependencies: Vector[sbt.librarymanagement.ModuleID], configurations: Vector[sbt.librarymanagement.Configuration], classifiers: Vector[String]): GetClassifiersModule = new GetClassifiersModule(id, scalaModuleInfo, dependencies, configurations, classifiers) + def apply(id: sbt.librarymanagement.ModuleID, scalaModuleInfo: sbt.librarymanagement.ScalaModuleInfo, dependencies: Vector[sbt.librarymanagement.ModuleID], configurations: Vector[sbt.librarymanagement.Configuration], classifiers: Vector[String]): GetClassifiersModule = new GetClassifiersModule(id, Option(scalaModuleInfo), dependencies, configurations, classifiers) +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/GetClassifiersModuleFormats.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/GetClassifiersModuleFormats.scala new file mode 100644 index 000000000..bd9475f86 --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/GetClassifiersModuleFormats.scala @@ -0,0 +1,35 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +import _root_.sjsonnew.{ Unbuilder, Builder, JsonFormat, deserializationError } +trait GetClassifiersModuleFormats { self: sbt.librarymanagement.ModuleIDFormats with sbt.librarymanagement.ArtifactFormats with sbt.librarymanagement.ConfigRefFormats with sbt.librarymanagement.ChecksumFormats with sjsonnew.BasicJsonProtocol with sbt.librarymanagement.InclExclRuleFormats with sbt.librarymanagement.CrossVersionFormats with sbt.librarymanagement.DisabledFormats with sbt.librarymanagement.BinaryFormats with sbt.librarymanagement.ConstantFormats with sbt.librarymanagement.PatchFormats with sbt.librarymanagement.FullFormats with sbt.librarymanagement.For3Use2_13Formats with sbt.librarymanagement.For2_13Use3Formats with sbt.librarymanagement.ScalaModuleInfoFormats with sbt.librarymanagement.ConfigurationFormats => +implicit lazy val GetClassifiersModuleFormat: JsonFormat[sbt.librarymanagement.GetClassifiersModule] = new JsonFormat[sbt.librarymanagement.GetClassifiersModule] { + override def read[J](__jsOpt: Option[J], unbuilder: Unbuilder[J]): sbt.librarymanagement.GetClassifiersModule = { + __jsOpt match { + case Some(__js) => + unbuilder.beginObject(__js) + val id = unbuilder.readField[sbt.librarymanagement.ModuleID]("id") + val scalaModuleInfo = unbuilder.readField[Option[sbt.librarymanagement.ScalaModuleInfo]]("scalaModuleInfo") + val dependencies = unbuilder.readField[Vector[sbt.librarymanagement.ModuleID]]("dependencies") + val configurations = unbuilder.readField[Vector[sbt.librarymanagement.Configuration]]("configurations") + val classifiers = unbuilder.readField[Vector[String]]("classifiers") + unbuilder.endObject() + sbt.librarymanagement.GetClassifiersModule(id, scalaModuleInfo, dependencies, configurations, classifiers) + case None => + deserializationError("Expected JsObject but found None") + } + } + override def write[J](obj: sbt.librarymanagement.GetClassifiersModule, builder: Builder[J]): Unit = { + builder.beginObject() + builder.addField("id", obj.id) + builder.addField("scalaModuleInfo", obj.scalaModuleInfo) + builder.addField("dependencies", obj.dependencies) + builder.addField("configurations", obj.configurations) + builder.addField("classifiers", obj.classifiers) + builder.endObject() + } +} +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/InclExclRule.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/InclExclRule.scala new file mode 100644 index 000000000..23da54f26 --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/InclExclRule.scala @@ -0,0 +1,58 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +/** + * Defines a rule to either: + * + * The use case that is applied depends on the parameter name which it is passed to, but the + * filter has the same fields in both cases. + */ +final class InclExclRule private ( + val organization: String, + val name: String, + val artifact: String, + val configurations: Vector[sbt.librarymanagement.ConfigRef], + val crossVersion: sbt.librarymanagement.CrossVersion) extends Serializable { + + private def this() = this("*", "*", "*", Vector.empty, sbt.librarymanagement.Disabled()) + + override def equals(o: Any): Boolean = this.eq(o.asInstanceOf[AnyRef]) || (o match { + case x: InclExclRule => (this.organization == x.organization) && (this.name == x.name) && (this.artifact == x.artifact) && (this.configurations == x.configurations) && (this.crossVersion == x.crossVersion) + case _ => false + }) + override def hashCode: Int = { + 37 * (37 * (37 * (37 * (37 * (37 * (17 + "sbt.librarymanagement.InclExclRule".##) + organization.##) + name.##) + artifact.##) + configurations.##) + crossVersion.##) + } + override def toString: String = { + "InclExclRule(" + organization + ", " + name + ", " + artifact + ", " + configurations + ", " + crossVersion + ")" + } + private[this] def copy(organization: String = organization, name: String = name, artifact: String = artifact, configurations: Vector[sbt.librarymanagement.ConfigRef] = configurations, crossVersion: sbt.librarymanagement.CrossVersion = crossVersion): InclExclRule = { + new InclExclRule(organization, name, artifact, configurations, crossVersion) + } + def withOrganization(organization: String): InclExclRule = { + copy(organization = organization) + } + def withName(name: String): InclExclRule = { + copy(name = name) + } + def withArtifact(artifact: String): InclExclRule = { + copy(artifact = artifact) + } + def withConfigurations(configurations: Vector[sbt.librarymanagement.ConfigRef]): InclExclRule = { + copy(configurations = configurations) + } + def withCrossVersion(crossVersion: sbt.librarymanagement.CrossVersion): InclExclRule = { + copy(crossVersion = crossVersion) + } +} +object InclExclRule extends sbt.librarymanagement.InclExclRuleFunctions { + + def apply(): InclExclRule = new InclExclRule() + def apply(organization: String, name: String, artifact: String, configurations: Vector[sbt.librarymanagement.ConfigRef], crossVersion: sbt.librarymanagement.CrossVersion): InclExclRule = new InclExclRule(organization, name, artifact, configurations, crossVersion) +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/InclExclRuleFormats.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/InclExclRuleFormats.scala new file mode 100644 index 000000000..837b478e9 --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/InclExclRuleFormats.scala @@ -0,0 +1,35 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +import _root_.sjsonnew.{ Unbuilder, Builder, JsonFormat, deserializationError } +trait InclExclRuleFormats { self: sbt.librarymanagement.ConfigRefFormats with sbt.librarymanagement.CrossVersionFormats with sbt.librarymanagement.DisabledFormats with sbt.librarymanagement.BinaryFormats with sbt.librarymanagement.ConstantFormats with sbt.librarymanagement.PatchFormats with sbt.librarymanagement.FullFormats with sbt.librarymanagement.For3Use2_13Formats with sbt.librarymanagement.For2_13Use3Formats with sjsonnew.BasicJsonProtocol => +implicit lazy val InclExclRuleFormat: JsonFormat[sbt.librarymanagement.InclExclRule] = new JsonFormat[sbt.librarymanagement.InclExclRule] { + override def read[J](__jsOpt: Option[J], unbuilder: Unbuilder[J]): sbt.librarymanagement.InclExclRule = { + __jsOpt match { + case Some(__js) => + unbuilder.beginObject(__js) + val organization = unbuilder.readField[String]("organization") + val name = unbuilder.readField[String]("name") + val artifact = unbuilder.readField[String]("artifact") + val configurations = unbuilder.readField[Vector[sbt.librarymanagement.ConfigRef]]("configurations") + val crossVersion = unbuilder.readField[sbt.librarymanagement.CrossVersion]("crossVersion") + unbuilder.endObject() + sbt.librarymanagement.InclExclRule(organization, name, artifact, configurations, crossVersion) + case None => + deserializationError("Expected JsObject but found None") + } + } + override def write[J](obj: sbt.librarymanagement.InclExclRule, builder: Builder[J]): Unit = { + builder.beginObject() + builder.addField("organization", obj.organization) + builder.addField("name", obj.name) + builder.addField("artifact", obj.artifact) + builder.addField("configurations", obj.configurations) + builder.addField("crossVersion", obj.crossVersion) + builder.endObject() + } +} +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/IvyFileConfiguration.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/IvyFileConfiguration.scala new file mode 100644 index 000000000..0611fbabf --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/IvyFileConfiguration.scala @@ -0,0 +1,49 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +final class IvyFileConfiguration private ( + validate: Boolean, + scalaModuleInfo: Option[sbt.librarymanagement.ScalaModuleInfo], + val file: java.io.File, + val autoScalaTools: Boolean) extends sbt.librarymanagement.ModuleSettings(validate, scalaModuleInfo) with Serializable { + + private def this(file: java.io.File, autoScalaTools: Boolean) = this(false, None, file, autoScalaTools) + + override def equals(o: Any): Boolean = this.eq(o.asInstanceOf[AnyRef]) || (o match { + case x: IvyFileConfiguration => (this.validate == x.validate) && (this.scalaModuleInfo == x.scalaModuleInfo) && (this.file == x.file) && (this.autoScalaTools == x.autoScalaTools) + case _ => false + }) + override def hashCode: Int = { + 37 * (37 * (37 * (37 * (37 * (17 + "sbt.librarymanagement.IvyFileConfiguration".##) + validate.##) + scalaModuleInfo.##) + file.##) + autoScalaTools.##) + } + override def toString: String = { + "IvyFileConfiguration(" + validate + ", " + scalaModuleInfo + ", " + file + ", " + autoScalaTools + ")" + } + private[this] def copy(validate: Boolean = validate, scalaModuleInfo: Option[sbt.librarymanagement.ScalaModuleInfo] = scalaModuleInfo, file: java.io.File = file, autoScalaTools: Boolean = autoScalaTools): IvyFileConfiguration = { + new IvyFileConfiguration(validate, scalaModuleInfo, file, autoScalaTools) + } + def withValidate(validate: Boolean): IvyFileConfiguration = { + copy(validate = validate) + } + def withScalaModuleInfo(scalaModuleInfo: Option[sbt.librarymanagement.ScalaModuleInfo]): IvyFileConfiguration = { + copy(scalaModuleInfo = scalaModuleInfo) + } + def withScalaModuleInfo(scalaModuleInfo: sbt.librarymanagement.ScalaModuleInfo): IvyFileConfiguration = { + copy(scalaModuleInfo = Option(scalaModuleInfo)) + } + def withFile(file: java.io.File): IvyFileConfiguration = { + copy(file = file) + } + def withAutoScalaTools(autoScalaTools: Boolean): IvyFileConfiguration = { + copy(autoScalaTools = autoScalaTools) + } +} +object IvyFileConfiguration { + + def apply(file: java.io.File, autoScalaTools: Boolean): IvyFileConfiguration = new IvyFileConfiguration(file, autoScalaTools) + def apply(validate: Boolean, scalaModuleInfo: Option[sbt.librarymanagement.ScalaModuleInfo], file: java.io.File, autoScalaTools: Boolean): IvyFileConfiguration = new IvyFileConfiguration(validate, scalaModuleInfo, file, autoScalaTools) + def apply(validate: Boolean, scalaModuleInfo: sbt.librarymanagement.ScalaModuleInfo, file: java.io.File, autoScalaTools: Boolean): IvyFileConfiguration = new IvyFileConfiguration(validate, Option(scalaModuleInfo), file, autoScalaTools) +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/IvyFileConfigurationFormats.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/IvyFileConfigurationFormats.scala new file mode 100644 index 000000000..990f2b9c0 --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/IvyFileConfigurationFormats.scala @@ -0,0 +1,33 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +import _root_.sjsonnew.{ Unbuilder, Builder, JsonFormat, deserializationError } +trait IvyFileConfigurationFormats { self: sbt.librarymanagement.ScalaModuleInfoFormats with sbt.librarymanagement.ConfigurationFormats with sjsonnew.BasicJsonProtocol => +implicit lazy val IvyFileConfigurationFormat: JsonFormat[sbt.librarymanagement.IvyFileConfiguration] = new JsonFormat[sbt.librarymanagement.IvyFileConfiguration] { + override def read[J](__jsOpt: Option[J], unbuilder: Unbuilder[J]): sbt.librarymanagement.IvyFileConfiguration = { + __jsOpt match { + case Some(__js) => + unbuilder.beginObject(__js) + val validate = unbuilder.readField[Boolean]("validate") + val scalaModuleInfo = unbuilder.readField[Option[sbt.librarymanagement.ScalaModuleInfo]]("scalaModuleInfo") + val file = unbuilder.readField[java.io.File]("file") + val autoScalaTools = unbuilder.readField[Boolean]("autoScalaTools") + unbuilder.endObject() + sbt.librarymanagement.IvyFileConfiguration(validate, scalaModuleInfo, file, autoScalaTools) + case None => + deserializationError("Expected JsObject but found None") + } + } + override def write[J](obj: sbt.librarymanagement.IvyFileConfiguration, builder: Builder[J]): Unit = { + builder.beginObject() + builder.addField("validate", obj.validate) + builder.addField("scalaModuleInfo", obj.scalaModuleInfo) + builder.addField("file", obj.file) + builder.addField("autoScalaTools", obj.autoScalaTools) + builder.endObject() + } +} +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/KeyFileAuthentication.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/KeyFileAuthentication.scala new file mode 100644 index 000000000..7366734e7 --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/KeyFileAuthentication.scala @@ -0,0 +1,44 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +final class KeyFileAuthentication private ( + val user: String, + val keyfile: java.io.File, + val password: Option[String]) extends sbt.librarymanagement.SshAuthentication() with Serializable { + + + + override def equals(o: Any): Boolean = this.eq(o.asInstanceOf[AnyRef]) || (o match { + case x: KeyFileAuthentication => (this.user == x.user) && (this.keyfile == x.keyfile) && (this.password == x.password) + case _ => false + }) + override def hashCode: Int = { + 37 * (37 * (37 * (37 * (17 + "sbt.librarymanagement.KeyFileAuthentication".##) + user.##) + keyfile.##) + password.##) + } + override def toString: String = { + "KeyFileAuthentication(" + user + ", " + keyfile + ", " + password + ")" + } + private[this] def copy(user: String = user, keyfile: java.io.File = keyfile, password: Option[String] = password): KeyFileAuthentication = { + new KeyFileAuthentication(user, keyfile, password) + } + def withUser(user: String): KeyFileAuthentication = { + copy(user = user) + } + def withKeyfile(keyfile: java.io.File): KeyFileAuthentication = { + copy(keyfile = keyfile) + } + def withPassword(password: Option[String]): KeyFileAuthentication = { + copy(password = password) + } + def withPassword(password: String): KeyFileAuthentication = { + copy(password = Option(password)) + } +} +object KeyFileAuthentication { + + def apply(user: String, keyfile: java.io.File, password: Option[String]): KeyFileAuthentication = new KeyFileAuthentication(user, keyfile, password) + def apply(user: String, keyfile: java.io.File, password: String): KeyFileAuthentication = new KeyFileAuthentication(user, keyfile, Option(password)) +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/KeyFileAuthenticationFormats.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/KeyFileAuthenticationFormats.scala new file mode 100644 index 000000000..8f99b0e10 --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/KeyFileAuthenticationFormats.scala @@ -0,0 +1,31 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +import _root_.sjsonnew.{ Unbuilder, Builder, JsonFormat, deserializationError } +trait KeyFileAuthenticationFormats { self: sjsonnew.BasicJsonProtocol => +implicit lazy val KeyFileAuthenticationFormat: JsonFormat[sbt.librarymanagement.KeyFileAuthentication] = new JsonFormat[sbt.librarymanagement.KeyFileAuthentication] { + override def read[J](__jsOpt: Option[J], unbuilder: Unbuilder[J]): sbt.librarymanagement.KeyFileAuthentication = { + __jsOpt match { + case Some(__js) => + unbuilder.beginObject(__js) + val user = unbuilder.readField[String]("user") + val keyfile = unbuilder.readField[java.io.File]("keyfile") + val password = unbuilder.readField[Option[String]]("password") + unbuilder.endObject() + sbt.librarymanagement.KeyFileAuthentication(user, keyfile, password) + case None => + deserializationError("Expected JsObject but found None") + } + } + override def write[J](obj: sbt.librarymanagement.KeyFileAuthentication, builder: Builder[J]): Unit = { + builder.beginObject() + builder.addField("user", obj.user) + builder.addField("keyfile", obj.keyfile) + builder.addField("password", obj.password) + builder.endObject() + } +} +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/LibraryManagementCodec.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/LibraryManagementCodec.scala new file mode 100644 index 000000000..7334fd859 --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/LibraryManagementCodec.scala @@ -0,0 +1,66 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +trait LibraryManagementCodec extends sbt.librarymanagement.ConfigRefFormats + with sjsonnew.BasicJsonProtocol + with sbt.librarymanagement.RetrieveConfigurationFormats + with sbt.librarymanagement.UpdateLoggingFormats + with sbt.internal.librarymanagement.formats.LogicalClockFormats + with sbt.librarymanagement.ArtifactTypeFilterFormats + with sbt.librarymanagement.UpdateConfigurationFormats + with sbt.librarymanagement.ChecksumFormats + with sbt.librarymanagement.ArtifactFormats + with sbt.librarymanagement.CrossVersionFormats + with sbt.librarymanagement.DisabledFormats + with sbt.librarymanagement.BinaryFormats + with sbt.librarymanagement.ConstantFormats + with sbt.librarymanagement.PatchFormats + with sbt.librarymanagement.FullFormats + with sbt.librarymanagement.For3Use2_13Formats + with sbt.librarymanagement.For2_13Use3Formats + with sbt.librarymanagement.InclExclRuleFormats + with sbt.librarymanagement.ModuleIDFormats + with sbt.librarymanagement.ConfigurationFormats + with sbt.librarymanagement.ScalaModuleInfoFormats + with sbt.librarymanagement.GetClassifiersModuleFormats + with sbt.librarymanagement.GetClassifiersConfigurationFormats + with sbt.librarymanagement.PublishConfigurationFormats + with sbt.librarymanagement.CallerFormats + with sbt.librarymanagement.ModuleReportFormats + with sbt.librarymanagement.OrganizationArtifactReportFormats + with sbt.librarymanagement.ConfigurationReportFormats + with sbt.librarymanagement.ConflictManagerFormats + with sbt.librarymanagement.DeveloperFormats + with sbt.librarymanagement.FileConfigurationFormats + with sbt.librarymanagement.ChainedResolverFormats + with sbt.librarymanagement.MavenRepoFormats + with sbt.librarymanagement.MavenCacheFormats + with sbt.librarymanagement.PatternsFormats + with sbt.librarymanagement.FileRepositoryFormats + with sbt.librarymanagement.URLRepositoryFormats + with sbt.librarymanagement.PasswordAuthenticationFormats + with sbt.librarymanagement.KeyFileAuthenticationFormats + with sbt.librarymanagement.SshAuthenticationFormats + with sbt.librarymanagement.SshConnectionFormats + with sbt.librarymanagement.SshRepositoryFormats + with sbt.librarymanagement.SftpRepositoryFormats + with sbt.librarymanagement.ResolverFormats + with sbt.librarymanagement.ModuleConfigurationFormats + with sbt.librarymanagement.ScmInfoFormats + with sbt.librarymanagement.ModuleInfoFormats + with sbt.librarymanagement.IvyFileConfigurationFormats + with sbt.librarymanagement.PomConfigurationFormats + with sbt.internal.librarymanagement.formats.NodeSeqFormat + with sbt.librarymanagement.ModuleDescriptorConfigurationFormats + with sbt.librarymanagement.ModuleSettingsFormats + with sbt.librarymanagement.MavenRepositoryFormats + with sbt.librarymanagement.PatternsBasedRepositoryFormats + with sbt.librarymanagement.SshBasedRepositoryFormats + with sbt.librarymanagement.UpdateStatsFormats + with sbt.librarymanagement.UpdateReportFormats + with sbt.librarymanagement.ConfigurationReportLiteFormats + with sbt.librarymanagement.UpdateReportLiteFormats +object LibraryManagementCodec extends LibraryManagementCodec \ No newline at end of file diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/MakePomConfiguration.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/MakePomConfiguration.scala new file mode 100644 index 000000000..320fe3722 --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/MakePomConfiguration.scala @@ -0,0 +1,73 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +final class MakePomConfiguration private ( + val file: Option[java.io.File], + val moduleInfo: Option[sbt.librarymanagement.ModuleInfo], + val configurations: Option[scala.Vector[sbt.librarymanagement.Configuration]], + val extra: Option[scala.xml.NodeSeq], + val process: scala.Function1[scala.xml.Node, scala.xml.Node], + val filterRepositories: scala.Function1[sbt.librarymanagement.MavenRepository, Boolean], + val allRepositories: Boolean, + val includeTypes: Set[String]) extends Serializable { + + + + override def equals(o: Any): Boolean = this.eq(o.asInstanceOf[AnyRef]) || (o match { + case x: MakePomConfiguration => (this.file == x.file) && (this.moduleInfo == x.moduleInfo) && (this.configurations == x.configurations) && (this.extra == x.extra) && (this.process == x.process) && (this.filterRepositories == x.filterRepositories) && (this.allRepositories == x.allRepositories) && (this.includeTypes == x.includeTypes) + case _ => false + }) + override def hashCode: Int = { + 37 * (37 * (37 * (37 * (37 * (37 * (37 * (37 * (37 * (17 + "sbt.librarymanagement.MakePomConfiguration".##) + file.##) + moduleInfo.##) + configurations.##) + extra.##) + process.##) + filterRepositories.##) + allRepositories.##) + includeTypes.##) + } + override def toString: String = { + "MakePomConfiguration(" + file + ", " + moduleInfo + ", " + configurations + ", " + extra + ", " + process + ", " + filterRepositories + ", " + allRepositories + ", " + includeTypes + ")" + } + private[this] def copy(file: Option[java.io.File] = file, moduleInfo: Option[sbt.librarymanagement.ModuleInfo] = moduleInfo, configurations: Option[scala.Vector[sbt.librarymanagement.Configuration]] = configurations, extra: Option[scala.xml.NodeSeq] = extra, process: scala.Function1[scala.xml.Node, scala.xml.Node] = process, filterRepositories: scala.Function1[sbt.librarymanagement.MavenRepository, Boolean] = filterRepositories, allRepositories: Boolean = allRepositories, includeTypes: Set[String] = includeTypes): MakePomConfiguration = { + new MakePomConfiguration(file, moduleInfo, configurations, extra, process, filterRepositories, allRepositories, includeTypes) + } + def withFile(file: Option[java.io.File]): MakePomConfiguration = { + copy(file = file) + } + def withFile(file: java.io.File): MakePomConfiguration = { + copy(file = Option(file)) + } + def withModuleInfo(moduleInfo: Option[sbt.librarymanagement.ModuleInfo]): MakePomConfiguration = { + copy(moduleInfo = moduleInfo) + } + def withModuleInfo(moduleInfo: sbt.librarymanagement.ModuleInfo): MakePomConfiguration = { + copy(moduleInfo = Option(moduleInfo)) + } + def withConfigurations(configurations: Option[scala.Vector[sbt.librarymanagement.Configuration]]): MakePomConfiguration = { + copy(configurations = configurations) + } + def withConfigurations(configurations: scala.Vector[sbt.librarymanagement.Configuration]): MakePomConfiguration = { + copy(configurations = Option(configurations)) + } + def withExtra(extra: Option[scala.xml.NodeSeq]): MakePomConfiguration = { + copy(extra = extra) + } + def withExtra(extra: scala.xml.NodeSeq): MakePomConfiguration = { + copy(extra = Option(extra)) + } + def withProcess(process: scala.Function1[scala.xml.Node, scala.xml.Node]): MakePomConfiguration = { + copy(process = process) + } + def withFilterRepositories(filterRepositories: scala.Function1[sbt.librarymanagement.MavenRepository, Boolean]): MakePomConfiguration = { + copy(filterRepositories = filterRepositories) + } + def withAllRepositories(allRepositories: Boolean): MakePomConfiguration = { + copy(allRepositories = allRepositories) + } + def withIncludeTypes(includeTypes: Set[String]): MakePomConfiguration = { + copy(includeTypes = includeTypes) + } +} +object MakePomConfiguration extends sbt.librarymanagement.MakePomConfigurationFunctions { + + def apply(file: Option[java.io.File], moduleInfo: Option[sbt.librarymanagement.ModuleInfo], configurations: Option[scala.Vector[sbt.librarymanagement.Configuration]], extra: Option[scala.xml.NodeSeq], process: scala.Function1[scala.xml.Node, scala.xml.Node], filterRepositories: scala.Function1[sbt.librarymanagement.MavenRepository, Boolean], allRepositories: Boolean, includeTypes: Set[String]): MakePomConfiguration = new MakePomConfiguration(file, moduleInfo, configurations, extra, process, filterRepositories, allRepositories, includeTypes) + def apply(file: java.io.File, moduleInfo: sbt.librarymanagement.ModuleInfo, configurations: scala.Vector[sbt.librarymanagement.Configuration], extra: scala.xml.NodeSeq, process: scala.Function1[scala.xml.Node, scala.xml.Node], filterRepositories: scala.Function1[sbt.librarymanagement.MavenRepository, Boolean], allRepositories: Boolean, includeTypes: Set[String]): MakePomConfiguration = new MakePomConfiguration(Option(file), Option(moduleInfo), Option(configurations), Option(extra), process, filterRepositories, allRepositories, includeTypes) +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/MavenCache.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/MavenCache.scala new file mode 100644 index 000000000..dc69f0abe --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/MavenCache.scala @@ -0,0 +1,51 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +/** + * An instance of maven CACHE directory. You cannot treat a cache directory the same as a a remote repository because + * the metadata is different (see Aether ML discussion). + */ +final class MavenCache private ( + name: String, + root: String, + localIfFile: Boolean, + val rootFile: java.io.File) extends sbt.librarymanagement.MavenRepository(name, root, localIfFile) with Serializable { + def this(name: String, rootFile: java.io.File) = this(name, rootFile.toURI.toURL.toString, true, rootFile) + override def isCache: Boolean = true + override def allowInsecureProtocol: Boolean = false + private def this(name: String, root: String, rootFile: java.io.File) = this(name, root, true, rootFile) + + override def equals(o: Any): Boolean = this.eq(o.asInstanceOf[AnyRef]) || (o match { + case x: MavenCache => (this.name == x.name) && (this.root == x.root) && (this.localIfFile == x.localIfFile) && (this.rootFile == x.rootFile) + case _ => false + }) + override def hashCode: Int = { + 37 * (37 * (37 * (37 * (37 * (17 + "sbt.librarymanagement.MavenCache".##) + name.##) + root.##) + localIfFile.##) + rootFile.##) + } + override def toString: String = { + s"cache:$name: ${rootFile.getAbsolutePath}" + } + private[this] def copy(name: String = name, root: String = root, localIfFile: Boolean = localIfFile, rootFile: java.io.File = rootFile): MavenCache = { + new MavenCache(name, root, localIfFile, rootFile) + } + def withName(name: String): MavenCache = { + copy(name = name) + } + def withRoot(root: String): MavenCache = { + copy(root = root) + } + def withLocalIfFile(localIfFile: Boolean): MavenCache = { + copy(localIfFile = localIfFile) + } + def withRootFile(rootFile: java.io.File): MavenCache = { + copy(rootFile = rootFile) + } +} +object MavenCache { + def apply(name: String, rootFile: java.io.File): MavenCache = new MavenCache(name, rootFile) + def apply(name: String, root: String, rootFile: java.io.File): MavenCache = new MavenCache(name, root, rootFile) + def apply(name: String, root: String, localIfFile: Boolean, rootFile: java.io.File): MavenCache = new MavenCache(name, root, localIfFile, rootFile) +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/MavenCacheFormats.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/MavenCacheFormats.scala new file mode 100644 index 000000000..9b28abfda --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/MavenCacheFormats.scala @@ -0,0 +1,33 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +import _root_.sjsonnew.{ Unbuilder, Builder, JsonFormat, deserializationError } +trait MavenCacheFormats { self: sjsonnew.BasicJsonProtocol => +implicit lazy val MavenCacheFormat: JsonFormat[sbt.librarymanagement.MavenCache] = new JsonFormat[sbt.librarymanagement.MavenCache] { + override def read[J](__jsOpt: Option[J], unbuilder: Unbuilder[J]): sbt.librarymanagement.MavenCache = { + __jsOpt match { + case Some(__js) => + unbuilder.beginObject(__js) + val name = unbuilder.readField[String]("name") + val root = unbuilder.readField[String]("root") + val localIfFile = unbuilder.readField[Boolean]("localIfFile") + val rootFile = unbuilder.readField[java.io.File]("rootFile") + unbuilder.endObject() + sbt.librarymanagement.MavenCache(name, root, localIfFile, rootFile) + case None => + deserializationError("Expected JsObject but found None") + } + } + override def write[J](obj: sbt.librarymanagement.MavenCache, builder: Builder[J]): Unit = { + builder.beginObject() + builder.addField("name", obj.name) + builder.addField("root", obj.root) + builder.addField("localIfFile", obj.localIfFile) + builder.addField("rootFile", obj.rootFile) + builder.endObject() + } +} +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/MavenRepo.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/MavenRepo.scala new file mode 100644 index 000000000..b8fa71575 --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/MavenRepo.scala @@ -0,0 +1,50 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +/** This is the internal implementation of actual Maven Repository (as opposed to a file cache). */ +final class MavenRepo private ( + name: String, + root: String, + localIfFile: Boolean, + val _allowInsecureProtocol: Boolean) extends sbt.librarymanagement.MavenRepository(name, root, localIfFile) with Serializable { + override def isCache: Boolean = false + override def allowInsecureProtocol: Boolean = _allowInsecureProtocol + private[sbt] override def validateProtocol(logger: sbt.util.Logger): Boolean = Resolver.validateMavenRepo(this, logger) + private def this(name: String, root: String) = this(name, root, true, false) + private def this(name: String, root: String, localIfFile: Boolean) = this(name, root, localIfFile, false) + + override def equals(o: Any): Boolean = this.eq(o.asInstanceOf[AnyRef]) || (o match { + case x: MavenRepo => (this.name == x.name) && (this.root == x.root) && (this.localIfFile == x.localIfFile) && (this._allowInsecureProtocol == x._allowInsecureProtocol) + case _ => false + }) + override def hashCode: Int = { + 37 * (37 * (37 * (37 * (37 * (17 + "sbt.librarymanagement.MavenRepo".##) + name.##) + root.##) + localIfFile.##) + _allowInsecureProtocol.##) + } + override def toString: String = { + s"$name: $root" + } + private[this] def copy(name: String = name, root: String = root, localIfFile: Boolean = localIfFile, _allowInsecureProtocol: Boolean = _allowInsecureProtocol): MavenRepo = { + new MavenRepo(name, root, localIfFile, _allowInsecureProtocol) + } + def withName(name: String): MavenRepo = { + copy(name = name) + } + def withRoot(root: String): MavenRepo = { + copy(root = root) + } + def withLocalIfFile(localIfFile: Boolean): MavenRepo = { + copy(localIfFile = localIfFile) + } + def with_allowInsecureProtocol(_allowInsecureProtocol: Boolean): MavenRepo = { + copy(_allowInsecureProtocol = _allowInsecureProtocol) + } +} +object MavenRepo { + + def apply(name: String, root: String): MavenRepo = new MavenRepo(name, root) + def apply(name: String, root: String, localIfFile: Boolean): MavenRepo = new MavenRepo(name, root, localIfFile) + def apply(name: String, root: String, localIfFile: Boolean, _allowInsecureProtocol: Boolean): MavenRepo = new MavenRepo(name, root, localIfFile, _allowInsecureProtocol) +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/MavenRepoFormats.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/MavenRepoFormats.scala new file mode 100644 index 000000000..21da41dcd --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/MavenRepoFormats.scala @@ -0,0 +1,33 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +import _root_.sjsonnew.{ Unbuilder, Builder, JsonFormat, deserializationError } +trait MavenRepoFormats { self: sjsonnew.BasicJsonProtocol => +implicit lazy val MavenRepoFormat: JsonFormat[sbt.librarymanagement.MavenRepo] = new JsonFormat[sbt.librarymanagement.MavenRepo] { + override def read[J](__jsOpt: Option[J], unbuilder: Unbuilder[J]): sbt.librarymanagement.MavenRepo = { + __jsOpt match { + case Some(__js) => + unbuilder.beginObject(__js) + val name = unbuilder.readField[String]("name") + val root = unbuilder.readField[String]("root") + val localIfFile = unbuilder.readField[Boolean]("localIfFile") + val _allowInsecureProtocol = unbuilder.readField[Boolean]("_allowInsecureProtocol") + unbuilder.endObject() + sbt.librarymanagement.MavenRepo(name, root, localIfFile, _allowInsecureProtocol) + case None => + deserializationError("Expected JsObject but found None") + } + } + override def write[J](obj: sbt.librarymanagement.MavenRepo, builder: Builder[J]): Unit = { + builder.beginObject() + builder.addField("name", obj.name) + builder.addField("root", obj.root) + builder.addField("localIfFile", obj.localIfFile) + builder.addField("_allowInsecureProtocol", obj._allowInsecureProtocol) + builder.endObject() + } +} +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/MavenRepository.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/MavenRepository.scala new file mode 100644 index 000000000..7618f2b02 --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/MavenRepository.scala @@ -0,0 +1,35 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +/** An instance of a remote maven repository. Note: This will use Aether/Maven to resolve artifacts. */ +abstract class MavenRepository( + name: String, + val root: String, + val localIfFile: Boolean) extends sbt.librarymanagement.Resolver(name) with Serializable { + def isCache: Boolean + def allowInsecureProtocol: Boolean + def withAllowInsecureProtocol(allowInsecureProtocol: Boolean): MavenRepository = + this match { + case x: MavenRepo => x.with_allowInsecureProtocol(allowInsecureProtocol) + case x: MavenCache => x + } + def this(name: String, root: String) = this(name, root, true) + + + override def equals(o: Any): Boolean = this.eq(o.asInstanceOf[AnyRef]) || (o match { + case x: MavenRepository => (this.name == x.name) && (this.root == x.root) && (this.localIfFile == x.localIfFile) + case _ => false + }) + override def hashCode: Int = { + 37 * (37 * (37 * (37 * (17 + "sbt.librarymanagement.MavenRepository".##) + name.##) + root.##) + localIfFile.##) + } + override def toString: String = { + "MavenRepository(" + name + ", " + root + ", " + localIfFile + ")" + } +} +object MavenRepository extends sbt.librarymanagement.MavenRepositoryFunctions { + +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/MavenRepositoryFormats.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/MavenRepositoryFormats.scala new file mode 100644 index 000000000..34e8f0975 --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/MavenRepositoryFormats.scala @@ -0,0 +1,11 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement + +import _root_.sjsonnew.JsonFormat +trait MavenRepositoryFormats { self: sjsonnew.BasicJsonProtocol with sbt.librarymanagement.MavenRepoFormats with sbt.librarymanagement.MavenCacheFormats => +implicit lazy val MavenRepositoryFormat: JsonFormat[sbt.librarymanagement.MavenRepository] = flatUnionFormat2[sbt.librarymanagement.MavenRepository, sbt.librarymanagement.MavenRepo, sbt.librarymanagement.MavenCache]("type") +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/ModuleConfiguration.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/ModuleConfiguration.scala new file mode 100644 index 000000000..7b4075929 --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/ModuleConfiguration.scala @@ -0,0 +1,45 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +final class ModuleConfiguration private ( + val organization: String, + val name: String, + val revision: String, + val resolver: sbt.librarymanagement.Resolver) extends Serializable { + + + + override def equals(o: Any): Boolean = this.eq(o.asInstanceOf[AnyRef]) || (o match { + case x: ModuleConfiguration => (this.organization == x.organization) && (this.name == x.name) && (this.revision == x.revision) && (this.resolver == x.resolver) + case _ => false + }) + override def hashCode: Int = { + 37 * (37 * (37 * (37 * (37 * (17 + "sbt.librarymanagement.ModuleConfiguration".##) + organization.##) + name.##) + revision.##) + resolver.##) + } + override def toString: String = { + "ModuleConfiguration(" + organization + ", " + name + ", " + revision + ", " + resolver + ")" + } + private[this] def copy(organization: String = organization, name: String = name, revision: String = revision, resolver: sbt.librarymanagement.Resolver = resolver): ModuleConfiguration = { + new ModuleConfiguration(organization, name, revision, resolver) + } + def withOrganization(organization: String): ModuleConfiguration = { + copy(organization = organization) + } + def withName(name: String): ModuleConfiguration = { + copy(name = name) + } + def withRevision(revision: String): ModuleConfiguration = { + copy(revision = revision) + } + def withResolver(resolver: sbt.librarymanagement.Resolver): ModuleConfiguration = { + copy(resolver = resolver) + } +} +object ModuleConfiguration { + def apply(org: String, resolver: sbt.librarymanagement.Resolver): ModuleConfiguration = apply(org, "*", "*", resolver) + def apply(org: String, name: String, resolver: sbt.librarymanagement.Resolver): ModuleConfiguration = ModuleConfiguration(org, name, "*", resolver) + def apply(organization: String, name: String, revision: String, resolver: sbt.librarymanagement.Resolver): ModuleConfiguration = new ModuleConfiguration(organization, name, revision, resolver) +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/ModuleConfigurationFormats.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/ModuleConfigurationFormats.scala new file mode 100644 index 000000000..1f34ab28f --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/ModuleConfigurationFormats.scala @@ -0,0 +1,33 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +import _root_.sjsonnew.{ Unbuilder, Builder, JsonFormat, deserializationError } +trait ModuleConfigurationFormats { self: sbt.librarymanagement.ResolverFormats with sjsonnew.BasicJsonProtocol => +implicit lazy val ModuleConfigurationFormat: JsonFormat[sbt.librarymanagement.ModuleConfiguration] = new JsonFormat[sbt.librarymanagement.ModuleConfiguration] { + override def read[J](__jsOpt: Option[J], unbuilder: Unbuilder[J]): sbt.librarymanagement.ModuleConfiguration = { + __jsOpt match { + case Some(__js) => + unbuilder.beginObject(__js) + val organization = unbuilder.readField[String]("organization") + val name = unbuilder.readField[String]("name") + val revision = unbuilder.readField[String]("revision") + val resolver = unbuilder.readField[sbt.librarymanagement.Resolver]("resolver") + unbuilder.endObject() + sbt.librarymanagement.ModuleConfiguration(organization, name, revision, resolver) + case None => + deserializationError("Expected JsObject but found None") + } + } + override def write[J](obj: sbt.librarymanagement.ModuleConfiguration, builder: Builder[J]): Unit = { + builder.beginObject() + builder.addField("organization", obj.organization) + builder.addField("name", obj.name) + builder.addField("revision", obj.revision) + builder.addField("resolver", obj.resolver) + builder.endObject() + } +} +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/ModuleDescriptorConfiguration.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/ModuleDescriptorConfiguration.scala new file mode 100644 index 000000000..11d51997d --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/ModuleDescriptorConfiguration.scala @@ -0,0 +1,80 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +final class ModuleDescriptorConfiguration private ( + validate: Boolean, + scalaModuleInfo: Option[sbt.librarymanagement.ScalaModuleInfo], + val module: sbt.librarymanagement.ModuleID, + val moduleInfo: sbt.librarymanagement.ModuleInfo, + val dependencies: Vector[sbt.librarymanagement.ModuleID], + val overrides: Vector[sbt.librarymanagement.ModuleID], + val excludes: Vector[sbt.librarymanagement.InclExclRule], + val ivyXML: scala.xml.NodeSeq, + val configurations: Vector[sbt.librarymanagement.Configuration], + val defaultConfiguration: Option[sbt.librarymanagement.Configuration], + val conflictManager: sbt.librarymanagement.ConflictManager) extends sbt.librarymanagement.ModuleSettings(validate, scalaModuleInfo) with Serializable { + + private def this(module: sbt.librarymanagement.ModuleID, moduleInfo: sbt.librarymanagement.ModuleInfo) = this(false, None, module, moduleInfo, Vector.empty, Vector.empty, Vector.empty, scala.xml.NodeSeq.Empty, sbt.librarymanagement.Configurations.default, Option(sbt.librarymanagement.Configurations.Compile), sbt.librarymanagement.ConflictManager.default) + + override def equals(o: Any): Boolean = this.eq(o.asInstanceOf[AnyRef]) || (o match { + case x: ModuleDescriptorConfiguration => (this.validate == x.validate) && (this.scalaModuleInfo == x.scalaModuleInfo) && (this.module == x.module) && (this.moduleInfo == x.moduleInfo) && (this.dependencies == x.dependencies) && (this.overrides == x.overrides) && (this.excludes == x.excludes) && (this.ivyXML == x.ivyXML) && (this.configurations == x.configurations) && (this.defaultConfiguration == x.defaultConfiguration) && (this.conflictManager == x.conflictManager) + case _ => false + }) + override def hashCode: Int = { + 37 * (37 * (37 * (37 * (37 * (37 * (37 * (37 * (37 * (37 * (37 * (37 * (17 + "sbt.librarymanagement.ModuleDescriptorConfiguration".##) + validate.##) + scalaModuleInfo.##) + module.##) + moduleInfo.##) + dependencies.##) + overrides.##) + excludes.##) + ivyXML.##) + configurations.##) + defaultConfiguration.##) + conflictManager.##) + } + override def toString: String = { + "ModuleDescriptorConfiguration(" + validate + ", " + scalaModuleInfo + ", " + module + ", " + moduleInfo + ", " + dependencies + ", " + overrides + ", " + excludes + ", " + ivyXML + ", " + configurations + ", " + defaultConfiguration + ", " + conflictManager + ")" + } + private[this] def copy(validate: Boolean = validate, scalaModuleInfo: Option[sbt.librarymanagement.ScalaModuleInfo] = scalaModuleInfo, module: sbt.librarymanagement.ModuleID = module, moduleInfo: sbt.librarymanagement.ModuleInfo = moduleInfo, dependencies: Vector[sbt.librarymanagement.ModuleID] = dependencies, overrides: Vector[sbt.librarymanagement.ModuleID] = overrides, excludes: Vector[sbt.librarymanagement.InclExclRule] = excludes, ivyXML: scala.xml.NodeSeq = ivyXML, configurations: Vector[sbt.librarymanagement.Configuration] = configurations, defaultConfiguration: Option[sbt.librarymanagement.Configuration] = defaultConfiguration, conflictManager: sbt.librarymanagement.ConflictManager = conflictManager): ModuleDescriptorConfiguration = { + new ModuleDescriptorConfiguration(validate, scalaModuleInfo, module, moduleInfo, dependencies, overrides, excludes, ivyXML, configurations, defaultConfiguration, conflictManager) + } + def withValidate(validate: Boolean): ModuleDescriptorConfiguration = { + copy(validate = validate) + } + def withScalaModuleInfo(scalaModuleInfo: Option[sbt.librarymanagement.ScalaModuleInfo]): ModuleDescriptorConfiguration = { + copy(scalaModuleInfo = scalaModuleInfo) + } + def withScalaModuleInfo(scalaModuleInfo: sbt.librarymanagement.ScalaModuleInfo): ModuleDescriptorConfiguration = { + copy(scalaModuleInfo = Option(scalaModuleInfo)) + } + def withModule(module: sbt.librarymanagement.ModuleID): ModuleDescriptorConfiguration = { + copy(module = module) + } + def withModuleInfo(moduleInfo: sbt.librarymanagement.ModuleInfo): ModuleDescriptorConfiguration = { + copy(moduleInfo = moduleInfo) + } + def withDependencies(dependencies: Vector[sbt.librarymanagement.ModuleID]): ModuleDescriptorConfiguration = { + copy(dependencies = dependencies) + } + def withOverrides(overrides: Vector[sbt.librarymanagement.ModuleID]): ModuleDescriptorConfiguration = { + copy(overrides = overrides) + } + def withExcludes(excludes: Vector[sbt.librarymanagement.InclExclRule]): ModuleDescriptorConfiguration = { + copy(excludes = excludes) + } + def withIvyXML(ivyXML: scala.xml.NodeSeq): ModuleDescriptorConfiguration = { + copy(ivyXML = ivyXML) + } + def withConfigurations(configurations: Vector[sbt.librarymanagement.Configuration]): ModuleDescriptorConfiguration = { + copy(configurations = configurations) + } + def withDefaultConfiguration(defaultConfiguration: Option[sbt.librarymanagement.Configuration]): ModuleDescriptorConfiguration = { + copy(defaultConfiguration = defaultConfiguration) + } + def withDefaultConfiguration(defaultConfiguration: sbt.librarymanagement.Configuration): ModuleDescriptorConfiguration = { + copy(defaultConfiguration = Option(defaultConfiguration)) + } + def withConflictManager(conflictManager: sbt.librarymanagement.ConflictManager): ModuleDescriptorConfiguration = { + copy(conflictManager = conflictManager) + } +} +object ModuleDescriptorConfiguration extends sbt.librarymanagement.InlineConfigurationFunctions { + + def apply(module: sbt.librarymanagement.ModuleID, moduleInfo: sbt.librarymanagement.ModuleInfo): ModuleDescriptorConfiguration = new ModuleDescriptorConfiguration(module, moduleInfo) + def apply(validate: Boolean, scalaModuleInfo: Option[sbt.librarymanagement.ScalaModuleInfo], module: sbt.librarymanagement.ModuleID, moduleInfo: sbt.librarymanagement.ModuleInfo, dependencies: Vector[sbt.librarymanagement.ModuleID], overrides: Vector[sbt.librarymanagement.ModuleID], excludes: Vector[sbt.librarymanagement.InclExclRule], ivyXML: scala.xml.NodeSeq, configurations: Vector[sbt.librarymanagement.Configuration], defaultConfiguration: Option[sbt.librarymanagement.Configuration], conflictManager: sbt.librarymanagement.ConflictManager): ModuleDescriptorConfiguration = new ModuleDescriptorConfiguration(validate, scalaModuleInfo, module, moduleInfo, dependencies, overrides, excludes, ivyXML, configurations, defaultConfiguration, conflictManager) + def apply(validate: Boolean, scalaModuleInfo: sbt.librarymanagement.ScalaModuleInfo, module: sbt.librarymanagement.ModuleID, moduleInfo: sbt.librarymanagement.ModuleInfo, dependencies: Vector[sbt.librarymanagement.ModuleID], overrides: Vector[sbt.librarymanagement.ModuleID], excludes: Vector[sbt.librarymanagement.InclExclRule], ivyXML: scala.xml.NodeSeq, configurations: Vector[sbt.librarymanagement.Configuration], defaultConfiguration: sbt.librarymanagement.Configuration, conflictManager: sbt.librarymanagement.ConflictManager): ModuleDescriptorConfiguration = new ModuleDescriptorConfiguration(validate, Option(scalaModuleInfo), module, moduleInfo, dependencies, overrides, excludes, ivyXML, configurations, Option(defaultConfiguration), conflictManager) +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/ModuleDescriptorConfigurationFormats.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/ModuleDescriptorConfigurationFormats.scala new file mode 100644 index 000000000..e774c1a5c --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/ModuleDescriptorConfigurationFormats.scala @@ -0,0 +1,47 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +import _root_.sjsonnew.{ Unbuilder, Builder, JsonFormat, deserializationError } +trait ModuleDescriptorConfigurationFormats { self: sbt.librarymanagement.ScalaModuleInfoFormats with sbt.librarymanagement.ConfigurationFormats with sjsonnew.BasicJsonProtocol with sbt.librarymanagement.ModuleIDFormats with sbt.librarymanagement.ArtifactFormats with sbt.librarymanagement.ConfigRefFormats with sbt.librarymanagement.ChecksumFormats with sbt.librarymanagement.InclExclRuleFormats with sbt.librarymanagement.CrossVersionFormats with sbt.librarymanagement.DisabledFormats with sbt.librarymanagement.BinaryFormats with sbt.librarymanagement.ConstantFormats with sbt.librarymanagement.PatchFormats with sbt.librarymanagement.FullFormats with sbt.librarymanagement.For3Use2_13Formats with sbt.librarymanagement.For2_13Use3Formats with sbt.librarymanagement.ModuleInfoFormats with sbt.librarymanagement.ScmInfoFormats with sbt.librarymanagement.DeveloperFormats with sbt.internal.librarymanagement.formats.NodeSeqFormat with sbt.librarymanagement.ConflictManagerFormats => +implicit lazy val ModuleDescriptorConfigurationFormat: JsonFormat[sbt.librarymanagement.ModuleDescriptorConfiguration] = new JsonFormat[sbt.librarymanagement.ModuleDescriptorConfiguration] { + override def read[J](__jsOpt: Option[J], unbuilder: Unbuilder[J]): sbt.librarymanagement.ModuleDescriptorConfiguration = { + __jsOpt match { + case Some(__js) => + unbuilder.beginObject(__js) + val validate = unbuilder.readField[Boolean]("validate") + val scalaModuleInfo = unbuilder.readField[Option[sbt.librarymanagement.ScalaModuleInfo]]("scalaModuleInfo") + val module = unbuilder.readField[sbt.librarymanagement.ModuleID]("module") + val moduleInfo = unbuilder.readField[sbt.librarymanagement.ModuleInfo]("moduleInfo") + val dependencies = unbuilder.readField[Vector[sbt.librarymanagement.ModuleID]]("dependencies") + val overrides = unbuilder.readField[Vector[sbt.librarymanagement.ModuleID]]("overrides") + val excludes = unbuilder.readField[Vector[sbt.librarymanagement.InclExclRule]]("excludes") + val ivyXML = unbuilder.readField[scala.xml.NodeSeq]("ivyXML") + val configurations = unbuilder.readField[Vector[sbt.librarymanagement.Configuration]]("configurations") + val defaultConfiguration = unbuilder.readField[Option[sbt.librarymanagement.Configuration]]("defaultConfiguration") + val conflictManager = unbuilder.readField[sbt.librarymanagement.ConflictManager]("conflictManager") + unbuilder.endObject() + sbt.librarymanagement.ModuleDescriptorConfiguration(validate, scalaModuleInfo, module, moduleInfo, dependencies, overrides, excludes, ivyXML, configurations, defaultConfiguration, conflictManager) + case None => + deserializationError("Expected JsObject but found None") + } + } + override def write[J](obj: sbt.librarymanagement.ModuleDescriptorConfiguration, builder: Builder[J]): Unit = { + builder.beginObject() + builder.addField("validate", obj.validate) + builder.addField("scalaModuleInfo", obj.scalaModuleInfo) + builder.addField("module", obj.module) + builder.addField("moduleInfo", obj.moduleInfo) + builder.addField("dependencies", obj.dependencies) + builder.addField("overrides", obj.overrides) + builder.addField("excludes", obj.excludes) + builder.addField("ivyXML", obj.ivyXML) + builder.addField("configurations", obj.configurations) + builder.addField("defaultConfiguration", obj.defaultConfiguration) + builder.addField("conflictManager", obj.conflictManager) + builder.endObject() + } +} +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/ModuleID.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/ModuleID.scala new file mode 100644 index 000000000..ed282ac6e --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/ModuleID.scala @@ -0,0 +1,87 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +final class ModuleID private ( + val organization: String, + val name: String, + val revision: String, + val configurations: Option[String], + val isChanging: Boolean, + val isTransitive: Boolean, + val isForce: Boolean, + val explicitArtifacts: Vector[sbt.librarymanagement.Artifact], + val inclusions: Vector[sbt.librarymanagement.InclExclRule], + val exclusions: Vector[sbt.librarymanagement.InclExclRule], + val extraAttributes: Map[String, String], + val crossVersion: sbt.librarymanagement.CrossVersion, + val branchName: Option[String], + val platformOpt: Option[String]) extends sbt.librarymanagement.ModuleIDExtra with Serializable { + + private def this(organization: String, name: String, revision: String) = this(organization, name, revision, None, false, true, false, Vector.empty, Vector.empty, Vector.empty, Map.empty, sbt.librarymanagement.Disabled(), None, None) + private def this(organization: String, name: String, revision: String, configurations: Option[String], isChanging: Boolean, isTransitive: Boolean, isForce: Boolean, explicitArtifacts: Vector[sbt.librarymanagement.Artifact], inclusions: Vector[sbt.librarymanagement.InclExclRule], exclusions: Vector[sbt.librarymanagement.InclExclRule], extraAttributes: Map[String, String], crossVersion: sbt.librarymanagement.CrossVersion, branchName: Option[String]) = this(organization, name, revision, configurations, isChanging, isTransitive, isForce, explicitArtifacts, inclusions, exclusions, extraAttributes, crossVersion, branchName, None) + + override def equals(o: Any): Boolean = this.eq(o.asInstanceOf[AnyRef]) || (o match { + case x: ModuleID => (this.organization == x.organization) && (this.name == x.name) && (this.revision == x.revision) && (this.configurations == x.configurations) && (this.isChanging == x.isChanging) && (this.isTransitive == x.isTransitive) && (this.isForce == x.isForce) && (this.explicitArtifacts == x.explicitArtifacts) && (this.inclusions == x.inclusions) && (this.exclusions == x.exclusions) && (this.extraAttributes == x.extraAttributes) && (this.crossVersion == x.crossVersion) && (this.branchName == x.branchName) && (this.platformOpt == x.platformOpt) + case _ => false + }) + override def hashCode: Int = { + 37 * (37 * (37 * (37 * (37 * (37 * (37 * (37 * (37 * (37 * (37 * (37 * (37 * (37 * (37 * (17 + "sbt.librarymanagement.ModuleID".##) + organization.##) + name.##) + revision.##) + configurations.##) + isChanging.##) + isTransitive.##) + isForce.##) + explicitArtifacts.##) + inclusions.##) + exclusions.##) + extraAttributes.##) + crossVersion.##) + branchName.##) + platformOpt.##) + } + override def toString: String = { + this.toStringImpl + } + private[this] def copy(organization: String = organization, name: String = name, revision: String = revision, configurations: Option[String] = configurations, isChanging: Boolean = isChanging, isTransitive: Boolean = isTransitive, isForce: Boolean = isForce, explicitArtifacts: Vector[sbt.librarymanagement.Artifact] = explicitArtifacts, inclusions: Vector[sbt.librarymanagement.InclExclRule] = inclusions, exclusions: Vector[sbt.librarymanagement.InclExclRule] = exclusions, extraAttributes: Map[String, String] = extraAttributes, crossVersion: sbt.librarymanagement.CrossVersion = crossVersion, branchName: Option[String] = branchName, platformOpt: Option[String] = platformOpt): ModuleID = { + new ModuleID(organization, name, revision, configurations, isChanging, isTransitive, isForce, explicitArtifacts, inclusions, exclusions, extraAttributes, crossVersion, branchName, platformOpt) + } + def withOrganization(organization: String): ModuleID = { + copy(organization = organization) + } + def withName(name: String): ModuleID = { + copy(name = name) + } + def withRevision(revision: String): ModuleID = { + copy(revision = revision) + } + def withConfigurations(configurations: Option[String]): ModuleID = { + copy(configurations = configurations) + } + def withIsChanging(isChanging: Boolean): ModuleID = { + copy(isChanging = isChanging) + } + def withIsTransitive(isTransitive: Boolean): ModuleID = { + copy(isTransitive = isTransitive) + } + def withIsForce(isForce: Boolean): ModuleID = { + copy(isForce = isForce) + } + def withExplicitArtifacts(explicitArtifacts: Vector[sbt.librarymanagement.Artifact]): ModuleID = { + copy(explicitArtifacts = explicitArtifacts) + } + def withInclusions(inclusions: Vector[sbt.librarymanagement.InclExclRule]): ModuleID = { + copy(inclusions = inclusions) + } + def withExclusions(exclusions: Vector[sbt.librarymanagement.InclExclRule]): ModuleID = { + copy(exclusions = exclusions) + } + def withExtraAttributes(extraAttributes: Map[String, String]): ModuleID = { + copy(extraAttributes = extraAttributes) + } + def withCrossVersion(crossVersion: sbt.librarymanagement.CrossVersion): ModuleID = { + copy(crossVersion = crossVersion) + } + def withBranchName(branchName: Option[String]): ModuleID = { + copy(branchName = branchName) + } + def withPlatformOpt(platformOpt: Option[String]): ModuleID = { + copy(platformOpt = platformOpt) + } +} +object ModuleID extends sbt.librarymanagement.ModuleIDFunctions { + + def apply(organization: String, name: String, revision: String): ModuleID = new ModuleID(organization, name, revision) + def apply(organization: String, name: String, revision: String, configurations: Option[String], isChanging: Boolean, isTransitive: Boolean, isForce: Boolean, explicitArtifacts: Vector[sbt.librarymanagement.Artifact], inclusions: Vector[sbt.librarymanagement.InclExclRule], exclusions: Vector[sbt.librarymanagement.InclExclRule], extraAttributes: Map[String, String], crossVersion: sbt.librarymanagement.CrossVersion, branchName: Option[String]): ModuleID = new ModuleID(organization, name, revision, configurations, isChanging, isTransitive, isForce, explicitArtifacts, inclusions, exclusions, extraAttributes, crossVersion, branchName) + def apply(organization: String, name: String, revision: String, configurations: Option[String], isChanging: Boolean, isTransitive: Boolean, isForce: Boolean, explicitArtifacts: Vector[sbt.librarymanagement.Artifact], inclusions: Vector[sbt.librarymanagement.InclExclRule], exclusions: Vector[sbt.librarymanagement.InclExclRule], extraAttributes: Map[String, String], crossVersion: sbt.librarymanagement.CrossVersion, branchName: Option[String], platformOpt: Option[String]): ModuleID = new ModuleID(organization, name, revision, configurations, isChanging, isTransitive, isForce, explicitArtifacts, inclusions, exclusions, extraAttributes, crossVersion, branchName, platformOpt) +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/ModuleIDFormats.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/ModuleIDFormats.scala new file mode 100644 index 000000000..62e93e147 --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/ModuleIDFormats.scala @@ -0,0 +1,53 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +import _root_.sjsonnew.{ Unbuilder, Builder, JsonFormat, deserializationError } +trait ModuleIDFormats { self: sbt.librarymanagement.ArtifactFormats with sbt.librarymanagement.ConfigRefFormats with sbt.librarymanagement.ChecksumFormats with sjsonnew.BasicJsonProtocol with sbt.librarymanagement.InclExclRuleFormats with sbt.librarymanagement.CrossVersionFormats with sbt.librarymanagement.DisabledFormats with sbt.librarymanagement.BinaryFormats with sbt.librarymanagement.ConstantFormats with sbt.librarymanagement.PatchFormats with sbt.librarymanagement.FullFormats with sbt.librarymanagement.For3Use2_13Formats with sbt.librarymanagement.For2_13Use3Formats => +implicit lazy val ModuleIDFormat: JsonFormat[sbt.librarymanagement.ModuleID] = new JsonFormat[sbt.librarymanagement.ModuleID] { + override def read[J](__jsOpt: Option[J], unbuilder: Unbuilder[J]): sbt.librarymanagement.ModuleID = { + __jsOpt match { + case Some(__js) => + unbuilder.beginObject(__js) + val organization = unbuilder.readField[String]("organization") + val name = unbuilder.readField[String]("name") + val revision = unbuilder.readField[String]("revision") + val configurations = unbuilder.readField[Option[String]]("configurations") + val isChanging = unbuilder.readField[Boolean]("isChanging") + val isTransitive = unbuilder.readField[Boolean]("isTransitive") + val isForce = unbuilder.readField[Boolean]("isForce") + val explicitArtifacts = unbuilder.readField[Vector[sbt.librarymanagement.Artifact]]("explicitArtifacts") + val inclusions = unbuilder.readField[Vector[sbt.librarymanagement.InclExclRule]]("inclusions") + val exclusions = unbuilder.readField[Vector[sbt.librarymanagement.InclExclRule]]("exclusions") + val extraAttributes = unbuilder.readField[Map[String, String]]("extraAttributes") + val crossVersion = unbuilder.readField[sbt.librarymanagement.CrossVersion]("crossVersion") + val branchName = unbuilder.readField[Option[String]]("branchName") + val platformOpt = unbuilder.readField[Option[String]]("platformOpt") + unbuilder.endObject() + sbt.librarymanagement.ModuleID(organization, name, revision, configurations, isChanging, isTransitive, isForce, explicitArtifacts, inclusions, exclusions, extraAttributes, crossVersion, branchName, platformOpt) + case None => + deserializationError("Expected JsObject but found None") + } + } + override def write[J](obj: sbt.librarymanagement.ModuleID, builder: Builder[J]): Unit = { + builder.beginObject() + builder.addField("organization", obj.organization) + builder.addField("name", obj.name) + builder.addField("revision", obj.revision) + builder.addField("configurations", obj.configurations) + builder.addField("isChanging", obj.isChanging) + builder.addField("isTransitive", obj.isTransitive) + builder.addField("isForce", obj.isForce) + builder.addField("explicitArtifacts", obj.explicitArtifacts) + builder.addField("inclusions", obj.inclusions) + builder.addField("exclusions", obj.exclusions) + builder.addField("extraAttributes", obj.extraAttributes) + builder.addField("crossVersion", obj.crossVersion) + builder.addField("branchName", obj.branchName) + builder.addField("platformOpt", obj.platformOpt) + builder.endObject() + } +} +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/ModuleInfo.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/ModuleInfo.scala new file mode 100644 index 000000000..46f2d6f7f --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/ModuleInfo.scala @@ -0,0 +1,66 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +/** Additional information about a project module */ +final class ModuleInfo private ( + val nameFormal: String, + val description: String, + val homepage: Option[java.net.URI], + val startYear: Option[Int], + val licenses: Vector[scala.Tuple2[String, java.net.URI]], + val organizationName: String, + val organizationHomepage: Option[java.net.URI], + val scmInfo: Option[sbt.librarymanagement.ScmInfo], + val developers: Vector[sbt.librarymanagement.Developer]) extends Serializable { + + private def this(nameFormal: String) = this(nameFormal, "", None, None, Vector.empty, "", None, None, Vector.empty) + + override def equals(o: Any): Boolean = this.eq(o.asInstanceOf[AnyRef]) || (o match { + case x: ModuleInfo => (this.nameFormal == x.nameFormal) && (this.description == x.description) && (this.homepage == x.homepage) && (this.startYear == x.startYear) && (this.licenses == x.licenses) && (this.organizationName == x.organizationName) && (this.organizationHomepage == x.organizationHomepage) && (this.scmInfo == x.scmInfo) && (this.developers == x.developers) + case _ => false + }) + override def hashCode: Int = { + 37 * (37 * (37 * (37 * (37 * (37 * (37 * (37 * (37 * (37 * (17 + "sbt.librarymanagement.ModuleInfo".##) + nameFormal.##) + description.##) + homepage.##) + startYear.##) + licenses.##) + organizationName.##) + organizationHomepage.##) + scmInfo.##) + developers.##) + } + override def toString: String = { + "ModuleInfo(" + nameFormal + ", " + description + ", " + homepage + ", " + startYear + ", " + licenses + ", " + organizationName + ", " + organizationHomepage + ", " + scmInfo + ", " + developers + ")" + } + private[this] def copy(nameFormal: String = nameFormal, description: String = description, homepage: Option[java.net.URI] = homepage, startYear: Option[Int] = startYear, licenses: Vector[scala.Tuple2[String, java.net.URI]] = licenses, organizationName: String = organizationName, organizationHomepage: Option[java.net.URI] = organizationHomepage, scmInfo: Option[sbt.librarymanagement.ScmInfo] = scmInfo, developers: Vector[sbt.librarymanagement.Developer] = developers): ModuleInfo = { + new ModuleInfo(nameFormal, description, homepage, startYear, licenses, organizationName, organizationHomepage, scmInfo, developers) + } + def withNameFormal(nameFormal: String): ModuleInfo = { + copy(nameFormal = nameFormal) + } + def withDescription(description: String): ModuleInfo = { + copy(description = description) + } + def withHomepage(homepage: Option[java.net.URI]): ModuleInfo = { + copy(homepage = homepage) + } + def withStartYear(startYear: Option[Int]): ModuleInfo = { + copy(startYear = startYear) + } + def withLicenses(licenses: Vector[scala.Tuple2[String, java.net.URI]]): ModuleInfo = { + copy(licenses = licenses) + } + def withOrganizationName(organizationName: String): ModuleInfo = { + copy(organizationName = organizationName) + } + def withOrganizationHomepage(organizationHomepage: Option[java.net.URI]): ModuleInfo = { + copy(organizationHomepage = organizationHomepage) + } + def withScmInfo(scmInfo: Option[sbt.librarymanagement.ScmInfo]): ModuleInfo = { + copy(scmInfo = scmInfo) + } + def withDevelopers(developers: Vector[sbt.librarymanagement.Developer]): ModuleInfo = { + copy(developers = developers) + } +} +object ModuleInfo { + + def apply(nameFormal: String): ModuleInfo = new ModuleInfo(nameFormal) + def apply(nameFormal: String, description: String, homepage: Option[java.net.URI], startYear: Option[Int], licenses: Vector[scala.Tuple2[String, java.net.URI]], organizationName: String, organizationHomepage: Option[java.net.URI], scmInfo: Option[sbt.librarymanagement.ScmInfo], developers: Vector[sbt.librarymanagement.Developer]): ModuleInfo = new ModuleInfo(nameFormal, description, homepage, startYear, licenses, organizationName, organizationHomepage, scmInfo, developers) +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/ModuleInfoFormats.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/ModuleInfoFormats.scala new file mode 100644 index 000000000..aa586ebca --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/ModuleInfoFormats.scala @@ -0,0 +1,43 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +import _root_.sjsonnew.{ Unbuilder, Builder, JsonFormat, deserializationError } +trait ModuleInfoFormats { self: sbt.librarymanagement.ScmInfoFormats with sbt.librarymanagement.DeveloperFormats with sjsonnew.BasicJsonProtocol => +implicit lazy val ModuleInfoFormat: JsonFormat[sbt.librarymanagement.ModuleInfo] = new JsonFormat[sbt.librarymanagement.ModuleInfo] { + override def read[J](__jsOpt: Option[J], unbuilder: Unbuilder[J]): sbt.librarymanagement.ModuleInfo = { + __jsOpt match { + case Some(__js) => + unbuilder.beginObject(__js) + val nameFormal = unbuilder.readField[String]("nameFormal") + val description = unbuilder.readField[String]("description") + val homepage = unbuilder.readField[Option[java.net.URI]]("homepage") + val startYear = unbuilder.readField[Option[Int]]("startYear") + val licenses = unbuilder.readField[Vector[scala.Tuple2[String, java.net.URI]]]("licenses") + val organizationName = unbuilder.readField[String]("organizationName") + val organizationHomepage = unbuilder.readField[Option[java.net.URI]]("organizationHomepage") + val scmInfo = unbuilder.readField[Option[sbt.librarymanagement.ScmInfo]]("scmInfo") + val developers = unbuilder.readField[Vector[sbt.librarymanagement.Developer]]("developers") + unbuilder.endObject() + sbt.librarymanagement.ModuleInfo(nameFormal, description, homepage, startYear, licenses, organizationName, organizationHomepage, scmInfo, developers) + case None => + deserializationError("Expected JsObject but found None") + } + } + override def write[J](obj: sbt.librarymanagement.ModuleInfo, builder: Builder[J]): Unit = { + builder.beginObject() + builder.addField("nameFormal", obj.nameFormal) + builder.addField("description", obj.description) + builder.addField("homepage", obj.homepage) + builder.addField("startYear", obj.startYear) + builder.addField("licenses", obj.licenses) + builder.addField("organizationName", obj.organizationName) + builder.addField("organizationHomepage", obj.organizationHomepage) + builder.addField("scmInfo", obj.scmInfo) + builder.addField("developers", obj.developers) + builder.endObject() + } +} +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/ModuleReport.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/ModuleReport.scala new file mode 100644 index 000000000..2c3386942 --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/ModuleReport.scala @@ -0,0 +1,106 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +/** + * Provides information about the resolution of a module. + * This information is in the context of a specific configuration. + */ +final class ModuleReport private ( + val module: sbt.librarymanagement.ModuleID, + val artifacts: Vector[scala.Tuple2[sbt.librarymanagement.Artifact, java.io.File]], + val missingArtifacts: Vector[sbt.librarymanagement.Artifact], + val status: Option[String], + val publicationDate: Option[java.util.Calendar], + val resolver: Option[String], + val artifactResolver: Option[String], + val evicted: Boolean, + val evictedData: Option[String], + val evictedReason: Option[String], + val problem: Option[String], + val homepage: Option[String], + val extraAttributes: Map[String, String], + val isDefault: Option[Boolean], + val branch: Option[String], + val configurations: Vector[sbt.librarymanagement.ConfigRef], + val licenses: Vector[scala.Tuple2[String, Option[String]]], + val callers: Vector[sbt.librarymanagement.Caller]) extends sbt.librarymanagement.ModuleReportExtra with Serializable { + + private def this(module: sbt.librarymanagement.ModuleID, artifacts: Vector[scala.Tuple2[sbt.librarymanagement.Artifact, java.io.File]], missingArtifacts: Vector[sbt.librarymanagement.Artifact]) = this(module, artifacts, missingArtifacts, None, None, None, None, false, None, None, None, None, Map.empty, None, None, Vector.empty, Vector.empty, Vector.empty) + + override def equals(o: Any): Boolean = this.eq(o.asInstanceOf[AnyRef]) || (o match { + case x: ModuleReport => (this.module == x.module) && (this.artifacts == x.artifacts) && (this.missingArtifacts == x.missingArtifacts) && (this.status == x.status) && (this.publicationDate == x.publicationDate) && (this.resolver == x.resolver) && (this.artifactResolver == x.artifactResolver) && (this.evicted == x.evicted) && (this.evictedData == x.evictedData) && (this.evictedReason == x.evictedReason) && (this.problem == x.problem) && (this.homepage == x.homepage) && (this.extraAttributes == x.extraAttributes) && (this.isDefault == x.isDefault) && (this.branch == x.branch) && (this.configurations == x.configurations) && (this.licenses == x.licenses) && (this.callers == x.callers) + case _ => false + }) + override def hashCode: Int = { + 37 * (37 * (37 * (37 * (37 * (37 * (37 * (37 * (37 * (37 * (37 * (37 * (37 * (37 * (37 * (37 * (37 * (37 * (37 * (17 + "sbt.librarymanagement.ModuleReport".##) + module.##) + artifacts.##) + missingArtifacts.##) + status.##) + publicationDate.##) + resolver.##) + artifactResolver.##) + evicted.##) + evictedData.##) + evictedReason.##) + problem.##) + homepage.##) + extraAttributes.##) + isDefault.##) + branch.##) + configurations.##) + licenses.##) + callers.##) + } + override def toString: String = { + s"\t\t$module: " + + (if (arts.size <= 1) "" else "\n\t\t\t") + arts.mkString("\n\t\t\t") + "\n" + } + private[this] def copy(module: sbt.librarymanagement.ModuleID = module, artifacts: Vector[scala.Tuple2[sbt.librarymanagement.Artifact, java.io.File]] = artifacts, missingArtifacts: Vector[sbt.librarymanagement.Artifact] = missingArtifacts, status: Option[String] = status, publicationDate: Option[java.util.Calendar] = publicationDate, resolver: Option[String] = resolver, artifactResolver: Option[String] = artifactResolver, evicted: Boolean = evicted, evictedData: Option[String] = evictedData, evictedReason: Option[String] = evictedReason, problem: Option[String] = problem, homepage: Option[String] = homepage, extraAttributes: Map[String, String] = extraAttributes, isDefault: Option[Boolean] = isDefault, branch: Option[String] = branch, configurations: Vector[sbt.librarymanagement.ConfigRef] = configurations, licenses: Vector[scala.Tuple2[String, Option[String]]] = licenses, callers: Vector[sbt.librarymanagement.Caller] = callers): ModuleReport = { + new ModuleReport(module, artifacts, missingArtifacts, status, publicationDate, resolver, artifactResolver, evicted, evictedData, evictedReason, problem, homepage, extraAttributes, isDefault, branch, configurations, licenses, callers) + } + def withModule(module: sbt.librarymanagement.ModuleID): ModuleReport = { + copy(module = module) + } + def withArtifacts(artifacts: Vector[scala.Tuple2[sbt.librarymanagement.Artifact, java.io.File]]): ModuleReport = { + copy(artifacts = artifacts) + } + def withMissingArtifacts(missingArtifacts: Vector[sbt.librarymanagement.Artifact]): ModuleReport = { + copy(missingArtifacts = missingArtifacts) + } + def withStatus(status: Option[String]): ModuleReport = { + copy(status = status) + } + def withPublicationDate(publicationDate: Option[java.util.Calendar]): ModuleReport = { + copy(publicationDate = publicationDate) + } + def withResolver(resolver: Option[String]): ModuleReport = { + copy(resolver = resolver) + } + def withArtifactResolver(artifactResolver: Option[String]): ModuleReport = { + copy(artifactResolver = artifactResolver) + } + def withEvicted(evicted: Boolean): ModuleReport = { + copy(evicted = evicted) + } + def withEvictedData(evictedData: Option[String]): ModuleReport = { + copy(evictedData = evictedData) + } + def withEvictedReason(evictedReason: Option[String]): ModuleReport = { + copy(evictedReason = evictedReason) + } + def withProblem(problem: Option[String]): ModuleReport = { + copy(problem = problem) + } + def withHomepage(homepage: Option[String]): ModuleReport = { + copy(homepage = homepage) + } + def withExtraAttributes(extraAttributes: Map[String, String]): ModuleReport = { + copy(extraAttributes = extraAttributes) + } + def withIsDefault(isDefault: Option[Boolean]): ModuleReport = { + copy(isDefault = isDefault) + } + def withBranch(branch: Option[String]): ModuleReport = { + copy(branch = branch) + } + def withConfigurations(configurations: Vector[sbt.librarymanagement.ConfigRef]): ModuleReport = { + copy(configurations = configurations) + } + def withLicenses(licenses: Vector[scala.Tuple2[String, Option[String]]]): ModuleReport = { + copy(licenses = licenses) + } + def withCallers(callers: Vector[sbt.librarymanagement.Caller]): ModuleReport = { + copy(callers = callers) + } +} +object ModuleReport { + + def apply(module: sbt.librarymanagement.ModuleID, artifacts: Vector[scala.Tuple2[sbt.librarymanagement.Artifact, java.io.File]], missingArtifacts: Vector[sbt.librarymanagement.Artifact]): ModuleReport = new ModuleReport(module, artifacts, missingArtifacts) + def apply(module: sbt.librarymanagement.ModuleID, artifacts: Vector[scala.Tuple2[sbt.librarymanagement.Artifact, java.io.File]], missingArtifacts: Vector[sbt.librarymanagement.Artifact], status: Option[String], publicationDate: Option[java.util.Calendar], resolver: Option[String], artifactResolver: Option[String], evicted: Boolean, evictedData: Option[String], evictedReason: Option[String], problem: Option[String], homepage: Option[String], extraAttributes: Map[String, String], isDefault: Option[Boolean], branch: Option[String], configurations: Vector[sbt.librarymanagement.ConfigRef], licenses: Vector[scala.Tuple2[String, Option[String]]], callers: Vector[sbt.librarymanagement.Caller]): ModuleReport = new ModuleReport(module, artifacts, missingArtifacts, status, publicationDate, resolver, artifactResolver, evicted, evictedData, evictedReason, problem, homepage, extraAttributes, isDefault, branch, configurations, licenses, callers) +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/ModuleReportFormats.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/ModuleReportFormats.scala new file mode 100644 index 000000000..43d2a196f --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/ModuleReportFormats.scala @@ -0,0 +1,61 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +import _root_.sjsonnew.{ Unbuilder, Builder, JsonFormat, deserializationError } +trait ModuleReportFormats { self: sbt.librarymanagement.ModuleIDFormats with sbt.librarymanagement.ArtifactFormats with sbt.librarymanagement.ConfigRefFormats with sbt.librarymanagement.ChecksumFormats with sjsonnew.BasicJsonProtocol with sbt.librarymanagement.InclExclRuleFormats with sbt.librarymanagement.CrossVersionFormats with sbt.librarymanagement.DisabledFormats with sbt.librarymanagement.BinaryFormats with sbt.librarymanagement.ConstantFormats with sbt.librarymanagement.PatchFormats with sbt.librarymanagement.FullFormats with sbt.librarymanagement.For3Use2_13Formats with sbt.librarymanagement.For2_13Use3Formats with sbt.librarymanagement.CallerFormats => +implicit lazy val ModuleReportFormat: JsonFormat[sbt.librarymanagement.ModuleReport] = new JsonFormat[sbt.librarymanagement.ModuleReport] { + override def read[J](__jsOpt: Option[J], unbuilder: Unbuilder[J]): sbt.librarymanagement.ModuleReport = { + __jsOpt match { + case Some(__js) => + unbuilder.beginObject(__js) + val module = unbuilder.readField[sbt.librarymanagement.ModuleID]("module") + val artifacts = unbuilder.readField[Vector[scala.Tuple2[sbt.librarymanagement.Artifact, java.io.File]]]("artifacts") + val missingArtifacts = unbuilder.readField[Vector[sbt.librarymanagement.Artifact]]("missingArtifacts") + val status = unbuilder.readField[Option[String]]("status") + val publicationDate = unbuilder.readField[Option[java.util.Calendar]]("publicationDate") + val resolver = unbuilder.readField[Option[String]]("resolver") + val artifactResolver = unbuilder.readField[Option[String]]("artifactResolver") + val evicted = unbuilder.readField[Boolean]("evicted") + val evictedData = unbuilder.readField[Option[String]]("evictedData") + val evictedReason = unbuilder.readField[Option[String]]("evictedReason") + val problem = unbuilder.readField[Option[String]]("problem") + val homepage = unbuilder.readField[Option[String]]("homepage") + val extraAttributes = unbuilder.readField[Map[String, String]]("extraAttributes") + val isDefault = unbuilder.readField[Option[Boolean]]("isDefault") + val branch = unbuilder.readField[Option[String]]("branch") + val configurations = unbuilder.readField[Vector[sbt.librarymanagement.ConfigRef]]("configurations") + val licenses = unbuilder.readField[Vector[scala.Tuple2[String, Option[String]]]]("licenses") + val callers = unbuilder.readField[Vector[sbt.librarymanagement.Caller]]("callers") + unbuilder.endObject() + sbt.librarymanagement.ModuleReport(module, artifacts, missingArtifacts, status, publicationDate, resolver, artifactResolver, evicted, evictedData, evictedReason, problem, homepage, extraAttributes, isDefault, branch, configurations, licenses, callers) + case None => + deserializationError("Expected JsObject but found None") + } + } + override def write[J](obj: sbt.librarymanagement.ModuleReport, builder: Builder[J]): Unit = { + builder.beginObject() + builder.addField("module", obj.module) + builder.addField("artifacts", obj.artifacts) + builder.addField("missingArtifacts", obj.missingArtifacts) + builder.addField("status", obj.status) + builder.addField("publicationDate", obj.publicationDate) + builder.addField("resolver", obj.resolver) + builder.addField("artifactResolver", obj.artifactResolver) + builder.addField("evicted", obj.evicted) + builder.addField("evictedData", obj.evictedData) + builder.addField("evictedReason", obj.evictedReason) + builder.addField("problem", obj.problem) + builder.addField("homepage", obj.homepage) + builder.addField("extraAttributes", obj.extraAttributes) + builder.addField("isDefault", obj.isDefault) + builder.addField("branch", obj.branch) + builder.addField("configurations", obj.configurations) + builder.addField("licenses", obj.licenses) + builder.addField("callers", obj.callers) + builder.endObject() + } +} +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/ModuleSettings.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/ModuleSettings.scala new file mode 100644 index 000000000..5fac9f2f3 --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/ModuleSettings.scala @@ -0,0 +1,27 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +abstract class ModuleSettings( + val validate: Boolean, + val scalaModuleInfo: Option[sbt.librarymanagement.ScalaModuleInfo]) extends Serializable { + + def this() = this(false, None) + + + override def equals(o: Any): Boolean = this.eq(o.asInstanceOf[AnyRef]) || (o match { + case x: ModuleSettings => (this.validate == x.validate) && (this.scalaModuleInfo == x.scalaModuleInfo) + case _ => false + }) + override def hashCode: Int = { + 37 * (37 * (37 * (17 + "sbt.librarymanagement.ModuleSettings".##) + validate.##) + scalaModuleInfo.##) + } + override def toString: String = { + "ModuleSettings(" + validate + ", " + scalaModuleInfo + ")" + } +} +object ModuleSettings { + +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/ModuleSettingsFormats.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/ModuleSettingsFormats.scala new file mode 100644 index 000000000..f72bc1a96 --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/ModuleSettingsFormats.scala @@ -0,0 +1,11 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement + +import _root_.sjsonnew.JsonFormat +trait ModuleSettingsFormats { self: sbt.librarymanagement.ScalaModuleInfoFormats with sbt.librarymanagement.ConfigurationFormats with sjsonnew.BasicJsonProtocol with sbt.librarymanagement.IvyFileConfigurationFormats with sbt.librarymanagement.PomConfigurationFormats with sbt.librarymanagement.ModuleIDFormats with sbt.librarymanagement.ArtifactFormats with sbt.librarymanagement.ConfigRefFormats with sbt.librarymanagement.ChecksumFormats with sbt.librarymanagement.InclExclRuleFormats with sbt.librarymanagement.CrossVersionFormats with sbt.librarymanagement.DisabledFormats with sbt.librarymanagement.BinaryFormats with sbt.librarymanagement.ConstantFormats with sbt.librarymanagement.PatchFormats with sbt.librarymanagement.FullFormats with sbt.librarymanagement.For3Use2_13Formats with sbt.librarymanagement.For2_13Use3Formats with sbt.librarymanagement.ModuleInfoFormats with sbt.librarymanagement.ScmInfoFormats with sbt.librarymanagement.DeveloperFormats with sbt.internal.librarymanagement.formats.NodeSeqFormat with sbt.librarymanagement.ConflictManagerFormats with sbt.librarymanagement.ModuleDescriptorConfigurationFormats => +implicit lazy val ModuleSettingsFormat: JsonFormat[sbt.librarymanagement.ModuleSettings] = flatUnionFormat3[sbt.librarymanagement.ModuleSettings, sbt.librarymanagement.IvyFileConfiguration, sbt.librarymanagement.PomConfiguration, sbt.librarymanagement.ModuleDescriptorConfiguration]("type") +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/OrganizationArtifactReport.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/OrganizationArtifactReport.scala new file mode 100644 index 000000000..83624f2c8 --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/OrganizationArtifactReport.scala @@ -0,0 +1,52 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +/** + * OrganizationArtifactReport represents an organization+name entry in Ivy resolution report. + * In sbt's terminology, "module" consists of organization, name, and version. + * In Ivy's, "module" means just organization and name, and the one including version numbers + * are called revisions. + * + * A sequence of OrganizationArtifactReport called details is newly added to ConfigurationReport, replacing evicted. + * (Note old evicted was just a seq of ModuleIDs). + * OrganizationArtifactReport groups the ModuleReport of both winners and evicted reports by their organization and name, + * which can be used to calculate detailed eviction warning etc. + */ +final class OrganizationArtifactReport private ( + val organization: String, + val name: String, + val modules: Vector[sbt.librarymanagement.ModuleReport]) extends Serializable { + + + + override def equals(o: Any): Boolean = this.eq(o.asInstanceOf[AnyRef]) || (o match { + case x: OrganizationArtifactReport => (this.organization == x.organization) && (this.name == x.name) && (this.modules == x.modules) + case _ => false + }) + override def hashCode: Int = { + 37 * (37 * (37 * (37 * (17 + "sbt.librarymanagement.OrganizationArtifactReport".##) + organization.##) + name.##) + modules.##) + } + override def toString: String = { + val details = modules map { _.detailReport } + s"\t$organization:$name\n${details.mkString}\n" + } + private[this] def copy(organization: String = organization, name: String = name, modules: Vector[sbt.librarymanagement.ModuleReport] = modules): OrganizationArtifactReport = { + new OrganizationArtifactReport(organization, name, modules) + } + def withOrganization(organization: String): OrganizationArtifactReport = { + copy(organization = organization) + } + def withName(name: String): OrganizationArtifactReport = { + copy(name = name) + } + def withModules(modules: Vector[sbt.librarymanagement.ModuleReport]): OrganizationArtifactReport = { + copy(modules = modules) + } +} +object OrganizationArtifactReport { + + def apply(organization: String, name: String, modules: Vector[sbt.librarymanagement.ModuleReport]): OrganizationArtifactReport = new OrganizationArtifactReport(organization, name, modules) +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/OrganizationArtifactReportFormats.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/OrganizationArtifactReportFormats.scala new file mode 100644 index 000000000..2744325a2 --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/OrganizationArtifactReportFormats.scala @@ -0,0 +1,31 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +import _root_.sjsonnew.{ Unbuilder, Builder, JsonFormat, deserializationError } +trait OrganizationArtifactReportFormats { self: sbt.librarymanagement.ModuleReportFormats with sbt.librarymanagement.ModuleIDFormats with sbt.librarymanagement.ArtifactFormats with sbt.librarymanagement.ConfigRefFormats with sbt.librarymanagement.ChecksumFormats with sjsonnew.BasicJsonProtocol with sbt.librarymanagement.InclExclRuleFormats with sbt.librarymanagement.CrossVersionFormats with sbt.librarymanagement.DisabledFormats with sbt.librarymanagement.BinaryFormats with sbt.librarymanagement.ConstantFormats with sbt.librarymanagement.PatchFormats with sbt.librarymanagement.FullFormats with sbt.librarymanagement.For3Use2_13Formats with sbt.librarymanagement.For2_13Use3Formats with sbt.librarymanagement.CallerFormats => +implicit lazy val OrganizationArtifactReportFormat: JsonFormat[sbt.librarymanagement.OrganizationArtifactReport] = new JsonFormat[sbt.librarymanagement.OrganizationArtifactReport] { + override def read[J](__jsOpt: Option[J], unbuilder: Unbuilder[J]): sbt.librarymanagement.OrganizationArtifactReport = { + __jsOpt match { + case Some(__js) => + unbuilder.beginObject(__js) + val organization = unbuilder.readField[String]("organization") + val name = unbuilder.readField[String]("name") + val modules = unbuilder.readField[Vector[sbt.librarymanagement.ModuleReport]]("modules") + unbuilder.endObject() + sbt.librarymanagement.OrganizationArtifactReport(organization, name, modules) + case None => + deserializationError("Expected JsObject but found None") + } + } + override def write[J](obj: sbt.librarymanagement.OrganizationArtifactReport, builder: Builder[J]): Unit = { + builder.beginObject() + builder.addField("organization", obj.organization) + builder.addField("name", obj.name) + builder.addField("modules", obj.modules) + builder.endObject() + } +} +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/PasswordAuthentication.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/PasswordAuthentication.scala new file mode 100644 index 000000000..56a07dc60 --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/PasswordAuthentication.scala @@ -0,0 +1,40 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +final class PasswordAuthentication private ( + val user: String, + val password: Option[String]) extends sbt.librarymanagement.SshAuthentication() with Serializable { + + + + override def equals(o: Any): Boolean = this.eq(o.asInstanceOf[AnyRef]) || (o match { + case x: PasswordAuthentication => (this.user == x.user) && (this.password == x.password) + case _ => false + }) + override def hashCode: Int = { + 37 * (37 * (37 * (17 + "sbt.librarymanagement.PasswordAuthentication".##) + user.##) + password.##) + } + override def toString: String = { + "PasswordAuthentication(" + user + ", " + password + ")" + } + private[this] def copy(user: String = user, password: Option[String] = password): PasswordAuthentication = { + new PasswordAuthentication(user, password) + } + def withUser(user: String): PasswordAuthentication = { + copy(user = user) + } + def withPassword(password: Option[String]): PasswordAuthentication = { + copy(password = password) + } + def withPassword(password: String): PasswordAuthentication = { + copy(password = Option(password)) + } +} +object PasswordAuthentication { + + def apply(user: String, password: Option[String]): PasswordAuthentication = new PasswordAuthentication(user, password) + def apply(user: String, password: String): PasswordAuthentication = new PasswordAuthentication(user, Option(password)) +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/PasswordAuthenticationFormats.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/PasswordAuthenticationFormats.scala new file mode 100644 index 000000000..646853d49 --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/PasswordAuthenticationFormats.scala @@ -0,0 +1,29 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +import _root_.sjsonnew.{ Unbuilder, Builder, JsonFormat, deserializationError } +trait PasswordAuthenticationFormats { self: sjsonnew.BasicJsonProtocol => +implicit lazy val PasswordAuthenticationFormat: JsonFormat[sbt.librarymanagement.PasswordAuthentication] = new JsonFormat[sbt.librarymanagement.PasswordAuthentication] { + override def read[J](__jsOpt: Option[J], unbuilder: Unbuilder[J]): sbt.librarymanagement.PasswordAuthentication = { + __jsOpt match { + case Some(__js) => + unbuilder.beginObject(__js) + val user = unbuilder.readField[String]("user") + val password = unbuilder.readField[Option[String]]("password") + unbuilder.endObject() + sbt.librarymanagement.PasswordAuthentication(user, password) + case None => + deserializationError("Expected JsObject but found None") + } + } + override def write[J](obj: sbt.librarymanagement.PasswordAuthentication, builder: Builder[J]): Unit = { + builder.beginObject() + builder.addField("user", obj.user) + builder.addField("password", obj.password) + builder.endObject() + } +} +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/Patterns.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/Patterns.scala new file mode 100644 index 000000000..a87caca63 --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/Patterns.scala @@ -0,0 +1,50 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +final class Patterns private ( + val ivyPatterns: Vector[String], + val artifactPatterns: Vector[String], + val isMavenCompatible: Boolean, + val descriptorOptional: Boolean, + val skipConsistencyCheck: Boolean) extends Serializable { + + private def this() = this(Vector.empty, Vector.empty, true, false, false) + + override def equals(o: Any): Boolean = this.eq(o.asInstanceOf[AnyRef]) || (o match { + case x: Patterns => (this.ivyPatterns == x.ivyPatterns) && (this.artifactPatterns == x.artifactPatterns) && (this.isMavenCompatible == x.isMavenCompatible) && (this.descriptorOptional == x.descriptorOptional) && (this.skipConsistencyCheck == x.skipConsistencyCheck) + case _ => false + }) + override def hashCode: Int = { + 37 * (37 * (37 * (37 * (37 * (37 * (17 + "sbt.librarymanagement.Patterns".##) + ivyPatterns.##) + artifactPatterns.##) + isMavenCompatible.##) + descriptorOptional.##) + skipConsistencyCheck.##) + } + override def toString: String = { + "Patterns(ivyPatterns=%s, artifactPatterns=%s, isMavenCompatible=%s, descriptorOptional=%s, skipConsistencyCheck=%s)".format( + ivyPatterns, artifactPatterns, isMavenCompatible, descriptorOptional, skipConsistencyCheck) + } + private[this] def copy(ivyPatterns: Vector[String] = ivyPatterns, artifactPatterns: Vector[String] = artifactPatterns, isMavenCompatible: Boolean = isMavenCompatible, descriptorOptional: Boolean = descriptorOptional, skipConsistencyCheck: Boolean = skipConsistencyCheck): Patterns = { + new Patterns(ivyPatterns, artifactPatterns, isMavenCompatible, descriptorOptional, skipConsistencyCheck) + } + def withIvyPatterns(ivyPatterns: Vector[String]): Patterns = { + copy(ivyPatterns = ivyPatterns) + } + def withArtifactPatterns(artifactPatterns: Vector[String]): Patterns = { + copy(artifactPatterns = artifactPatterns) + } + def withIsMavenCompatible(isMavenCompatible: Boolean): Patterns = { + copy(isMavenCompatible = isMavenCompatible) + } + def withDescriptorOptional(descriptorOptional: Boolean): Patterns = { + copy(descriptorOptional = descriptorOptional) + } + def withSkipConsistencyCheck(skipConsistencyCheck: Boolean): Patterns = { + copy(skipConsistencyCheck = skipConsistencyCheck) + } +} +object Patterns extends sbt.librarymanagement.PatternsFunctions { + + def apply(): Patterns = new Patterns() + def apply(ivyPatterns: Vector[String], artifactPatterns: Vector[String], isMavenCompatible: Boolean, descriptorOptional: Boolean, skipConsistencyCheck: Boolean): Patterns = new Patterns(ivyPatterns, artifactPatterns, isMavenCompatible, descriptorOptional, skipConsistencyCheck) +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/PatternsBasedRepository.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/PatternsBasedRepository.scala new file mode 100644 index 000000000..494baf2e4 --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/PatternsBasedRepository.scala @@ -0,0 +1,28 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +/** sbt interface to an Ivy repository based on patterns, which is most Ivy repositories. */ +abstract class PatternsBasedRepository( + name: String, + val patterns: sbt.librarymanagement.Patterns) extends sbt.librarymanagement.Resolver(name) with Serializable { + + + + + override def equals(o: Any): Boolean = this.eq(o.asInstanceOf[AnyRef]) || (o match { + case x: PatternsBasedRepository => (this.name == x.name) && (this.patterns == x.patterns) + case _ => false + }) + override def hashCode: Int = { + 37 * (37 * (37 * (17 + "sbt.librarymanagement.PatternsBasedRepository".##) + name.##) + patterns.##) + } + override def toString: String = { + "PatternsBasedRepository(" + name + ", " + patterns + ")" + } +} +object PatternsBasedRepository { + +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/PatternsBasedRepositoryFormats.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/PatternsBasedRepositoryFormats.scala new file mode 100644 index 000000000..843e70f3d --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/PatternsBasedRepositoryFormats.scala @@ -0,0 +1,11 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement + +import _root_.sjsonnew.JsonFormat +trait PatternsBasedRepositoryFormats { self: sbt.librarymanagement.PatternsFormats with sjsonnew.BasicJsonProtocol with sbt.librarymanagement.FileConfigurationFormats with sbt.librarymanagement.FileRepositoryFormats with sbt.librarymanagement.URLRepositoryFormats with sbt.librarymanagement.SshConnectionFormats with sbt.librarymanagement.SshAuthenticationFormats with sbt.librarymanagement.SshRepositoryFormats with sbt.librarymanagement.SftpRepositoryFormats => +implicit lazy val PatternsBasedRepositoryFormat: JsonFormat[sbt.librarymanagement.PatternsBasedRepository] = flatUnionFormat4[sbt.librarymanagement.PatternsBasedRepository, sbt.librarymanagement.FileRepository, sbt.librarymanagement.URLRepository, sbt.librarymanagement.SshRepository, sbt.librarymanagement.SftpRepository]("type") +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/PatternsFormats.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/PatternsFormats.scala new file mode 100644 index 000000000..fe190bc92 --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/PatternsFormats.scala @@ -0,0 +1,35 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +import _root_.sjsonnew.{ Unbuilder, Builder, JsonFormat, deserializationError } +trait PatternsFormats { self: sjsonnew.BasicJsonProtocol => +implicit lazy val PatternsFormat: JsonFormat[sbt.librarymanagement.Patterns] = new JsonFormat[sbt.librarymanagement.Patterns] { + override def read[J](__jsOpt: Option[J], unbuilder: Unbuilder[J]): sbt.librarymanagement.Patterns = { + __jsOpt match { + case Some(__js) => + unbuilder.beginObject(__js) + val ivyPatterns = unbuilder.readField[Vector[String]]("ivyPatterns") + val artifactPatterns = unbuilder.readField[Vector[String]]("artifactPatterns") + val isMavenCompatible = unbuilder.readField[Boolean]("isMavenCompatible") + val descriptorOptional = unbuilder.readField[Boolean]("descriptorOptional") + val skipConsistencyCheck = unbuilder.readField[Boolean]("skipConsistencyCheck") + unbuilder.endObject() + sbt.librarymanagement.Patterns(ivyPatterns, artifactPatterns, isMavenCompatible, descriptorOptional, skipConsistencyCheck) + case None => + deserializationError("Expected JsObject but found None") + } + } + override def write[J](obj: sbt.librarymanagement.Patterns, builder: Builder[J]): Unit = { + builder.beginObject() + builder.addField("ivyPatterns", obj.ivyPatterns) + builder.addField("artifactPatterns", obj.artifactPatterns) + builder.addField("isMavenCompatible", obj.isMavenCompatible) + builder.addField("descriptorOptional", obj.descriptorOptional) + builder.addField("skipConsistencyCheck", obj.skipConsistencyCheck) + builder.endObject() + } +} +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/PomConfiguration.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/PomConfiguration.scala new file mode 100644 index 000000000..441ce8b64 --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/PomConfiguration.scala @@ -0,0 +1,49 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +final class PomConfiguration private ( + validate: Boolean, + scalaModuleInfo: Option[sbt.librarymanagement.ScalaModuleInfo], + val file: java.io.File, + val autoScalaTools: Boolean) extends sbt.librarymanagement.ModuleSettings(validate, scalaModuleInfo) with Serializable { + + private def this(file: java.io.File, autoScalaTools: Boolean) = this(false, None, file, autoScalaTools) + + override def equals(o: Any): Boolean = this.eq(o.asInstanceOf[AnyRef]) || (o match { + case x: PomConfiguration => (this.validate == x.validate) && (this.scalaModuleInfo == x.scalaModuleInfo) && (this.file == x.file) && (this.autoScalaTools == x.autoScalaTools) + case _ => false + }) + override def hashCode: Int = { + 37 * (37 * (37 * (37 * (37 * (17 + "sbt.librarymanagement.PomConfiguration".##) + validate.##) + scalaModuleInfo.##) + file.##) + autoScalaTools.##) + } + override def toString: String = { + "PomConfiguration(" + validate + ", " + scalaModuleInfo + ", " + file + ", " + autoScalaTools + ")" + } + private[this] def copy(validate: Boolean = validate, scalaModuleInfo: Option[sbt.librarymanagement.ScalaModuleInfo] = scalaModuleInfo, file: java.io.File = file, autoScalaTools: Boolean = autoScalaTools): PomConfiguration = { + new PomConfiguration(validate, scalaModuleInfo, file, autoScalaTools) + } + def withValidate(validate: Boolean): PomConfiguration = { + copy(validate = validate) + } + def withScalaModuleInfo(scalaModuleInfo: Option[sbt.librarymanagement.ScalaModuleInfo]): PomConfiguration = { + copy(scalaModuleInfo = scalaModuleInfo) + } + def withScalaModuleInfo(scalaModuleInfo: sbt.librarymanagement.ScalaModuleInfo): PomConfiguration = { + copy(scalaModuleInfo = Option(scalaModuleInfo)) + } + def withFile(file: java.io.File): PomConfiguration = { + copy(file = file) + } + def withAutoScalaTools(autoScalaTools: Boolean): PomConfiguration = { + copy(autoScalaTools = autoScalaTools) + } +} +object PomConfiguration { + + def apply(file: java.io.File, autoScalaTools: Boolean): PomConfiguration = new PomConfiguration(file, autoScalaTools) + def apply(validate: Boolean, scalaModuleInfo: Option[sbt.librarymanagement.ScalaModuleInfo], file: java.io.File, autoScalaTools: Boolean): PomConfiguration = new PomConfiguration(validate, scalaModuleInfo, file, autoScalaTools) + def apply(validate: Boolean, scalaModuleInfo: sbt.librarymanagement.ScalaModuleInfo, file: java.io.File, autoScalaTools: Boolean): PomConfiguration = new PomConfiguration(validate, Option(scalaModuleInfo), file, autoScalaTools) +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/PomConfigurationFormats.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/PomConfigurationFormats.scala new file mode 100644 index 000000000..229957379 --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/PomConfigurationFormats.scala @@ -0,0 +1,33 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +import _root_.sjsonnew.{ Unbuilder, Builder, JsonFormat, deserializationError } +trait PomConfigurationFormats { self: sbt.librarymanagement.ScalaModuleInfoFormats with sbt.librarymanagement.ConfigurationFormats with sjsonnew.BasicJsonProtocol => +implicit lazy val PomConfigurationFormat: JsonFormat[sbt.librarymanagement.PomConfiguration] = new JsonFormat[sbt.librarymanagement.PomConfiguration] { + override def read[J](__jsOpt: Option[J], unbuilder: Unbuilder[J]): sbt.librarymanagement.PomConfiguration = { + __jsOpt match { + case Some(__js) => + unbuilder.beginObject(__js) + val validate = unbuilder.readField[Boolean]("validate") + val scalaModuleInfo = unbuilder.readField[Option[sbt.librarymanagement.ScalaModuleInfo]]("scalaModuleInfo") + val file = unbuilder.readField[java.io.File]("file") + val autoScalaTools = unbuilder.readField[Boolean]("autoScalaTools") + unbuilder.endObject() + sbt.librarymanagement.PomConfiguration(validate, scalaModuleInfo, file, autoScalaTools) + case None => + deserializationError("Expected JsObject but found None") + } + } + override def write[J](obj: sbt.librarymanagement.PomConfiguration, builder: Builder[J]): Unit = { + builder.beginObject() + builder.addField("validate", obj.validate) + builder.addField("scalaModuleInfo", obj.scalaModuleInfo) + builder.addField("file", obj.file) + builder.addField("autoScalaTools", obj.autoScalaTools) + builder.endObject() + } +} +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/PublishConfiguration.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/PublishConfiguration.scala new file mode 100644 index 000000000..63864882c --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/PublishConfiguration.scala @@ -0,0 +1,81 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +final class PublishConfiguration private ( + val publishMavenStyle: Boolean, + val deliverIvyPattern: Option[String], + val status: Option[String], + val configurations: Option[scala.Vector[sbt.librarymanagement.ConfigRef]], + val resolverName: Option[String], + val artifacts: Vector[scala.Tuple2[sbt.librarymanagement.Artifact, java.io.File]], + val checksums: scala.Vector[String], + val logging: Option[sbt.librarymanagement.UpdateLogging], + val overwrite: Boolean) extends Serializable { + + private def this() = this(true, None, None, None, None, Vector(), Vector("sha1", "md5"), None, false) + + override def equals(o: Any): Boolean = this.eq(o.asInstanceOf[AnyRef]) || (o match { + case x: PublishConfiguration => (this.publishMavenStyle == x.publishMavenStyle) && (this.deliverIvyPattern == x.deliverIvyPattern) && (this.status == x.status) && (this.configurations == x.configurations) && (this.resolverName == x.resolverName) && (this.artifacts == x.artifacts) && (this.checksums == x.checksums) && (this.logging == x.logging) && (this.overwrite == x.overwrite) + case _ => false + }) + override def hashCode: Int = { + 37 * (37 * (37 * (37 * (37 * (37 * (37 * (37 * (37 * (37 * (17 + "sbt.librarymanagement.PublishConfiguration".##) + publishMavenStyle.##) + deliverIvyPattern.##) + status.##) + configurations.##) + resolverName.##) + artifacts.##) + checksums.##) + logging.##) + overwrite.##) + } + override def toString: String = { + "PublishConfiguration(" + publishMavenStyle + ", " + deliverIvyPattern + ", " + status + ", " + configurations + ", " + resolverName + ", " + artifacts + ", " + checksums + ", " + logging + ", " + overwrite + ")" + } + private[this] def copy(publishMavenStyle: Boolean = publishMavenStyle, deliverIvyPattern: Option[String] = deliverIvyPattern, status: Option[String] = status, configurations: Option[scala.Vector[sbt.librarymanagement.ConfigRef]] = configurations, resolverName: Option[String] = resolverName, artifacts: Vector[scala.Tuple2[sbt.librarymanagement.Artifact, java.io.File]] = artifacts, checksums: scala.Vector[String] = checksums, logging: Option[sbt.librarymanagement.UpdateLogging] = logging, overwrite: Boolean = overwrite): PublishConfiguration = { + new PublishConfiguration(publishMavenStyle, deliverIvyPattern, status, configurations, resolverName, artifacts, checksums, logging, overwrite) + } + def withPublishMavenStyle(publishMavenStyle: Boolean): PublishConfiguration = { + copy(publishMavenStyle = publishMavenStyle) + } + def withDeliverIvyPattern(deliverIvyPattern: Option[String]): PublishConfiguration = { + copy(deliverIvyPattern = deliverIvyPattern) + } + def withDeliverIvyPattern(deliverIvyPattern: String): PublishConfiguration = { + copy(deliverIvyPattern = Option(deliverIvyPattern)) + } + def withStatus(status: Option[String]): PublishConfiguration = { + copy(status = status) + } + def withStatus(status: String): PublishConfiguration = { + copy(status = Option(status)) + } + def withConfigurations(configurations: Option[scala.Vector[sbt.librarymanagement.ConfigRef]]): PublishConfiguration = { + copy(configurations = configurations) + } + def withConfigurations(configurations: scala.Vector[sbt.librarymanagement.ConfigRef]): PublishConfiguration = { + copy(configurations = Option(configurations)) + } + def withResolverName(resolverName: Option[String]): PublishConfiguration = { + copy(resolverName = resolverName) + } + def withResolverName(resolverName: String): PublishConfiguration = { + copy(resolverName = Option(resolverName)) + } + def withArtifacts(artifacts: Vector[scala.Tuple2[sbt.librarymanagement.Artifact, java.io.File]]): PublishConfiguration = { + copy(artifacts = artifacts) + } + def withChecksums(checksums: scala.Vector[String]): PublishConfiguration = { + copy(checksums = checksums) + } + def withLogging(logging: Option[sbt.librarymanagement.UpdateLogging]): PublishConfiguration = { + copy(logging = logging) + } + def withLogging(logging: sbt.librarymanagement.UpdateLogging): PublishConfiguration = { + copy(logging = Option(logging)) + } + def withOverwrite(overwrite: Boolean): PublishConfiguration = { + copy(overwrite = overwrite) + } +} +object PublishConfiguration { + + def apply(): PublishConfiguration = new PublishConfiguration() + def apply(publishMavenStyle: Boolean, deliverIvyPattern: Option[String], status: Option[String], configurations: Option[scala.Vector[sbt.librarymanagement.ConfigRef]], resolverName: Option[String], artifacts: Vector[scala.Tuple2[sbt.librarymanagement.Artifact, java.io.File]], checksums: scala.Vector[String], logging: Option[sbt.librarymanagement.UpdateLogging], overwrite: Boolean): PublishConfiguration = new PublishConfiguration(publishMavenStyle, deliverIvyPattern, status, configurations, resolverName, artifacts, checksums, logging, overwrite) + def apply(publishMavenStyle: Boolean, deliverIvyPattern: String, status: String, configurations: scala.Vector[sbt.librarymanagement.ConfigRef], resolverName: String, artifacts: Vector[scala.Tuple2[sbt.librarymanagement.Artifact, java.io.File]], checksums: scala.Vector[String], logging: sbt.librarymanagement.UpdateLogging, overwrite: Boolean): PublishConfiguration = new PublishConfiguration(publishMavenStyle, Option(deliverIvyPattern), Option(status), Option(configurations), Option(resolverName), artifacts, checksums, Option(logging), overwrite) +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/PublishConfigurationFormats.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/PublishConfigurationFormats.scala new file mode 100644 index 000000000..d1fde3d18 --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/PublishConfigurationFormats.scala @@ -0,0 +1,43 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +import _root_.sjsonnew.{ Unbuilder, Builder, JsonFormat, deserializationError } +trait PublishConfigurationFormats { self: sbt.librarymanagement.ConfigRefFormats with sbt.librarymanagement.ArtifactFormats with sbt.librarymanagement.UpdateLoggingFormats with sjsonnew.BasicJsonProtocol => +implicit lazy val PublishConfigurationFormat: JsonFormat[sbt.librarymanagement.PublishConfiguration] = new JsonFormat[sbt.librarymanagement.PublishConfiguration] { + override def read[J](__jsOpt: Option[J], unbuilder: Unbuilder[J]): sbt.librarymanagement.PublishConfiguration = { + __jsOpt match { + case Some(__js) => + unbuilder.beginObject(__js) + val publishMavenStyle = unbuilder.readField[Boolean]("publishMavenStyle") + val deliverIvyPattern = unbuilder.readField[Option[String]]("deliverIvyPattern") + val status = unbuilder.readField[Option[String]]("status") + val configurations = unbuilder.readField[Option[scala.Vector[sbt.librarymanagement.ConfigRef]]]("configurations") + val resolverName = unbuilder.readField[Option[String]]("resolverName") + val artifacts = unbuilder.readField[Vector[scala.Tuple2[sbt.librarymanagement.Artifact, java.io.File]]]("artifacts") + val checksums = unbuilder.readField[scala.Vector[String]]("checksums") + val logging = unbuilder.readField[Option[sbt.librarymanagement.UpdateLogging]]("logging") + val overwrite = unbuilder.readField[Boolean]("overwrite") + unbuilder.endObject() + sbt.librarymanagement.PublishConfiguration(publishMavenStyle, deliverIvyPattern, status, configurations, resolverName, artifacts, checksums, logging, overwrite) + case None => + deserializationError("Expected JsObject but found None") + } + } + override def write[J](obj: sbt.librarymanagement.PublishConfiguration, builder: Builder[J]): Unit = { + builder.beginObject() + builder.addField("publishMavenStyle", obj.publishMavenStyle) + builder.addField("deliverIvyPattern", obj.deliverIvyPattern) + builder.addField("status", obj.status) + builder.addField("configurations", obj.configurations) + builder.addField("resolverName", obj.resolverName) + builder.addField("artifacts", obj.artifacts) + builder.addField("checksums", obj.checksums) + builder.addField("logging", obj.logging) + builder.addField("overwrite", obj.overwrite) + builder.endObject() + } +} +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/Resolver.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/Resolver.scala new file mode 100644 index 000000000..13c57e0d6 --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/Resolver.scala @@ -0,0 +1,27 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +abstract class Resolver( + val name: String) extends Serializable { + /** check for HTTP */ + private[sbt] def validateProtocol(logger: sbt.util.Logger): Boolean = false + + + + override def equals(o: Any): Boolean = this.eq(o.asInstanceOf[AnyRef]) || (o match { + case x: Resolver => (this.name == x.name) + case _ => false + }) + override def hashCode: Int = { + 37 * (37 * (17 + "sbt.librarymanagement.Resolver".##) + name.##) + } + override def toString: String = { + "Resolver(" + name + ")" + } +} +object Resolver extends sbt.librarymanagement.ResolverFunctions { + +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/ResolverFormats.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/ResolverFormats.scala new file mode 100644 index 000000000..5569ef568 --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/ResolverFormats.scala @@ -0,0 +1,11 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement + +import _root_.sjsonnew.JsonFormat +trait ResolverFormats { self: sjsonnew.BasicJsonProtocol with sbt.librarymanagement.ChainedResolverFormats with sbt.librarymanagement.MavenRepoFormats with sbt.librarymanagement.MavenCacheFormats with sbt.librarymanagement.PatternsFormats with sbt.librarymanagement.FileConfigurationFormats with sbt.librarymanagement.FileRepositoryFormats with sbt.librarymanagement.URLRepositoryFormats with sbt.librarymanagement.SshConnectionFormats with sbt.librarymanagement.SshAuthenticationFormats with sbt.librarymanagement.SshRepositoryFormats with sbt.librarymanagement.SftpRepositoryFormats => +implicit lazy val ResolverFormat: JsonFormat[sbt.librarymanagement.Resolver] = flatUnionFormat7[sbt.librarymanagement.Resolver, sbt.librarymanagement.ChainedResolver, sbt.librarymanagement.MavenRepo, sbt.librarymanagement.MavenCache, sbt.librarymanagement.FileRepository, sbt.librarymanagement.URLRepository, sbt.librarymanagement.SshRepository, sbt.librarymanagement.SftpRepository]("type") +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/RetrieveConfiguration.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/RetrieveConfiguration.scala new file mode 100644 index 000000000..1aa3a2e22 --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/RetrieveConfiguration.scala @@ -0,0 +1,58 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +final class RetrieveConfiguration private ( + val retrieveDirectory: Option[java.io.File], + val outputPattern: Option[String], + val sync: Boolean, + val configurationsToRetrieve: Option[scala.Vector[sbt.librarymanagement.ConfigRef]]) extends Serializable { + + private def this() = this(None, None, false, None) + private def this(retrieveDirectory: Option[java.io.File], outputPattern: Option[String]) = this(retrieveDirectory, outputPattern, false, None) + + override def equals(o: Any): Boolean = this.eq(o.asInstanceOf[AnyRef]) || (o match { + case x: RetrieveConfiguration => (this.retrieveDirectory == x.retrieveDirectory) && (this.outputPattern == x.outputPattern) && (this.sync == x.sync) && (this.configurationsToRetrieve == x.configurationsToRetrieve) + case _ => false + }) + override def hashCode: Int = { + 37 * (37 * (37 * (37 * (37 * (17 + "sbt.librarymanagement.RetrieveConfiguration".##) + retrieveDirectory.##) + outputPattern.##) + sync.##) + configurationsToRetrieve.##) + } + override def toString: String = { + "RetrieveConfiguration(" + retrieveDirectory + ", " + outputPattern + ", " + sync + ", " + configurationsToRetrieve + ")" + } + private[this] def copy(retrieveDirectory: Option[java.io.File] = retrieveDirectory, outputPattern: Option[String] = outputPattern, sync: Boolean = sync, configurationsToRetrieve: Option[scala.Vector[sbt.librarymanagement.ConfigRef]] = configurationsToRetrieve): RetrieveConfiguration = { + new RetrieveConfiguration(retrieveDirectory, outputPattern, sync, configurationsToRetrieve) + } + def withRetrieveDirectory(retrieveDirectory: Option[java.io.File]): RetrieveConfiguration = { + copy(retrieveDirectory = retrieveDirectory) + } + def withRetrieveDirectory(retrieveDirectory: java.io.File): RetrieveConfiguration = { + copy(retrieveDirectory = Option(retrieveDirectory)) + } + def withOutputPattern(outputPattern: Option[String]): RetrieveConfiguration = { + copy(outputPattern = outputPattern) + } + def withOutputPattern(outputPattern: String): RetrieveConfiguration = { + copy(outputPattern = Option(outputPattern)) + } + def withSync(sync: Boolean): RetrieveConfiguration = { + copy(sync = sync) + } + def withConfigurationsToRetrieve(configurationsToRetrieve: Option[scala.Vector[sbt.librarymanagement.ConfigRef]]): RetrieveConfiguration = { + copy(configurationsToRetrieve = configurationsToRetrieve) + } + def withConfigurationsToRetrieve(configurationsToRetrieve: scala.Vector[sbt.librarymanagement.ConfigRef]): RetrieveConfiguration = { + copy(configurationsToRetrieve = Option(configurationsToRetrieve)) + } +} +object RetrieveConfiguration { + + def apply(): RetrieveConfiguration = new RetrieveConfiguration() + def apply(retrieveDirectory: Option[java.io.File], outputPattern: Option[String]): RetrieveConfiguration = new RetrieveConfiguration(retrieveDirectory, outputPattern) + def apply(retrieveDirectory: java.io.File, outputPattern: String): RetrieveConfiguration = new RetrieveConfiguration(Option(retrieveDirectory), Option(outputPattern)) + def apply(retrieveDirectory: Option[java.io.File], outputPattern: Option[String], sync: Boolean, configurationsToRetrieve: Option[scala.Vector[sbt.librarymanagement.ConfigRef]]): RetrieveConfiguration = new RetrieveConfiguration(retrieveDirectory, outputPattern, sync, configurationsToRetrieve) + def apply(retrieveDirectory: java.io.File, outputPattern: String, sync: Boolean, configurationsToRetrieve: scala.Vector[sbt.librarymanagement.ConfigRef]): RetrieveConfiguration = new RetrieveConfiguration(Option(retrieveDirectory), Option(outputPattern), sync, Option(configurationsToRetrieve)) +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/RetrieveConfigurationFormats.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/RetrieveConfigurationFormats.scala new file mode 100644 index 000000000..9505bf2ef --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/RetrieveConfigurationFormats.scala @@ -0,0 +1,33 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +import _root_.sjsonnew.{ Unbuilder, Builder, JsonFormat, deserializationError } +trait RetrieveConfigurationFormats { self: sbt.librarymanagement.ConfigRefFormats with sjsonnew.BasicJsonProtocol => +implicit lazy val RetrieveConfigurationFormat: JsonFormat[sbt.librarymanagement.RetrieveConfiguration] = new JsonFormat[sbt.librarymanagement.RetrieveConfiguration] { + override def read[J](__jsOpt: Option[J], unbuilder: Unbuilder[J]): sbt.librarymanagement.RetrieveConfiguration = { + __jsOpt match { + case Some(__js) => + unbuilder.beginObject(__js) + val retrieveDirectory = unbuilder.readField[Option[java.io.File]]("retrieveDirectory") + val outputPattern = unbuilder.readField[Option[String]]("outputPattern") + val sync = unbuilder.readField[Boolean]("sync") + val configurationsToRetrieve = unbuilder.readField[Option[scala.Vector[sbt.librarymanagement.ConfigRef]]]("configurationsToRetrieve") + unbuilder.endObject() + sbt.librarymanagement.RetrieveConfiguration(retrieveDirectory, outputPattern, sync, configurationsToRetrieve) + case None => + deserializationError("Expected JsObject but found None") + } + } + override def write[J](obj: sbt.librarymanagement.RetrieveConfiguration, builder: Builder[J]): Unit = { + builder.beginObject() + builder.addField("retrieveDirectory", obj.retrieveDirectory) + builder.addField("outputPattern", obj.outputPattern) + builder.addField("sync", obj.sync) + builder.addField("configurationsToRetrieve", obj.configurationsToRetrieve) + builder.endObject() + } +} +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/ScalaModuleInfo.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/ScalaModuleInfo.scala new file mode 100644 index 000000000..0e4db8751 --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/ScalaModuleInfo.scala @@ -0,0 +1,67 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +final class ScalaModuleInfo private ( + val scalaFullVersion: String, + val scalaBinaryVersion: String, + val configurations: Vector[sbt.librarymanagement.Configuration], + val checkExplicit: Boolean, + val filterImplicit: Boolean, + val overrideScalaVersion: Boolean, + val scalaOrganization: String, + val scalaArtifacts: scala.Vector[String], + val platform: Option[String]) extends Serializable { + + private def this(scalaFullVersion: String, scalaBinaryVersion: String, configurations: Vector[sbt.librarymanagement.Configuration], checkExplicit: Boolean, filterImplicit: Boolean, overrideScalaVersion: Boolean) = this(scalaFullVersion, scalaBinaryVersion, configurations, checkExplicit, filterImplicit, overrideScalaVersion, sbt.librarymanagement.ScalaArtifacts.Organization, sbt.librarymanagement.ScalaArtifacts.Artifacts, None) + private def this(scalaFullVersion: String, scalaBinaryVersion: String, configurations: Vector[sbt.librarymanagement.Configuration], checkExplicit: Boolean, filterImplicit: Boolean, overrideScalaVersion: Boolean, scalaOrganization: String, scalaArtifacts: scala.Vector[String]) = this(scalaFullVersion, scalaBinaryVersion, configurations, checkExplicit, filterImplicit, overrideScalaVersion, scalaOrganization, scalaArtifacts, None) + + override def equals(o: Any): Boolean = this.eq(o.asInstanceOf[AnyRef]) || (o match { + case x: ScalaModuleInfo => (this.scalaFullVersion == x.scalaFullVersion) && (this.scalaBinaryVersion == x.scalaBinaryVersion) && (this.configurations == x.configurations) && (this.checkExplicit == x.checkExplicit) && (this.filterImplicit == x.filterImplicit) && (this.overrideScalaVersion == x.overrideScalaVersion) && (this.scalaOrganization == x.scalaOrganization) && (this.scalaArtifacts == x.scalaArtifacts) && (this.platform == x.platform) + case _ => false + }) + override def hashCode: Int = { + 37 * (37 * (37 * (37 * (37 * (37 * (37 * (37 * (37 * (37 * (17 + "sbt.librarymanagement.ScalaModuleInfo".##) + scalaFullVersion.##) + scalaBinaryVersion.##) + configurations.##) + checkExplicit.##) + filterImplicit.##) + overrideScalaVersion.##) + scalaOrganization.##) + scalaArtifacts.##) + platform.##) + } + override def toString: String = { + "ScalaModuleInfo(" + scalaFullVersion + ", " + scalaBinaryVersion + ", " + configurations + ", " + checkExplicit + ", " + filterImplicit + ", " + overrideScalaVersion + ", " + scalaOrganization + ", " + scalaArtifacts + ", " + platform + ")" + } + private[this] def copy(scalaFullVersion: String = scalaFullVersion, scalaBinaryVersion: String = scalaBinaryVersion, configurations: Vector[sbt.librarymanagement.Configuration] = configurations, checkExplicit: Boolean = checkExplicit, filterImplicit: Boolean = filterImplicit, overrideScalaVersion: Boolean = overrideScalaVersion, scalaOrganization: String = scalaOrganization, scalaArtifacts: scala.Vector[String] = scalaArtifacts, platform: Option[String] = platform): ScalaModuleInfo = { + new ScalaModuleInfo(scalaFullVersion, scalaBinaryVersion, configurations, checkExplicit, filterImplicit, overrideScalaVersion, scalaOrganization, scalaArtifacts, platform) + } + def withScalaFullVersion(scalaFullVersion: String): ScalaModuleInfo = { + copy(scalaFullVersion = scalaFullVersion) + } + def withScalaBinaryVersion(scalaBinaryVersion: String): ScalaModuleInfo = { + copy(scalaBinaryVersion = scalaBinaryVersion) + } + def withConfigurations(configurations: Vector[sbt.librarymanagement.Configuration]): ScalaModuleInfo = { + copy(configurations = configurations) + } + def withCheckExplicit(checkExplicit: Boolean): ScalaModuleInfo = { + copy(checkExplicit = checkExplicit) + } + def withFilterImplicit(filterImplicit: Boolean): ScalaModuleInfo = { + copy(filterImplicit = filterImplicit) + } + def withOverrideScalaVersion(overrideScalaVersion: Boolean): ScalaModuleInfo = { + copy(overrideScalaVersion = overrideScalaVersion) + } + def withScalaOrganization(scalaOrganization: String): ScalaModuleInfo = { + copy(scalaOrganization = scalaOrganization) + } + def withScalaArtifacts(scalaArtifacts: scala.Vector[String]): ScalaModuleInfo = { + copy(scalaArtifacts = scalaArtifacts) + } + def withPlatform(platform: Option[String]): ScalaModuleInfo = { + copy(platform = platform) + } +} +object ScalaModuleInfo { + + def apply(scalaFullVersion: String, scalaBinaryVersion: String, configurations: Vector[sbt.librarymanagement.Configuration], checkExplicit: Boolean, filterImplicit: Boolean, overrideScalaVersion: Boolean): ScalaModuleInfo = new ScalaModuleInfo(scalaFullVersion, scalaBinaryVersion, configurations, checkExplicit, filterImplicit, overrideScalaVersion) + def apply(scalaFullVersion: String, scalaBinaryVersion: String, configurations: Vector[sbt.librarymanagement.Configuration], checkExplicit: Boolean, filterImplicit: Boolean, overrideScalaVersion: Boolean, scalaOrganization: String, scalaArtifacts: scala.Vector[String]): ScalaModuleInfo = new ScalaModuleInfo(scalaFullVersion, scalaBinaryVersion, configurations, checkExplicit, filterImplicit, overrideScalaVersion, scalaOrganization, scalaArtifacts) + def apply(scalaFullVersion: String, scalaBinaryVersion: String, configurations: Vector[sbt.librarymanagement.Configuration], checkExplicit: Boolean, filterImplicit: Boolean, overrideScalaVersion: Boolean, scalaOrganization: String, scalaArtifacts: scala.Vector[String], platform: Option[String]): ScalaModuleInfo = new ScalaModuleInfo(scalaFullVersion, scalaBinaryVersion, configurations, checkExplicit, filterImplicit, overrideScalaVersion, scalaOrganization, scalaArtifacts, platform) +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/ScalaModuleInfoFormats.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/ScalaModuleInfoFormats.scala new file mode 100644 index 000000000..2c2602605 --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/ScalaModuleInfoFormats.scala @@ -0,0 +1,43 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +import _root_.sjsonnew.{ Unbuilder, Builder, JsonFormat, deserializationError } +trait ScalaModuleInfoFormats { self: sbt.librarymanagement.ConfigurationFormats with sjsonnew.BasicJsonProtocol => +implicit lazy val ScalaModuleInfoFormat: JsonFormat[sbt.librarymanagement.ScalaModuleInfo] = new JsonFormat[sbt.librarymanagement.ScalaModuleInfo] { + override def read[J](__jsOpt: Option[J], unbuilder: Unbuilder[J]): sbt.librarymanagement.ScalaModuleInfo = { + __jsOpt match { + case Some(__js) => + unbuilder.beginObject(__js) + val scalaFullVersion = unbuilder.readField[String]("scalaFullVersion") + val scalaBinaryVersion = unbuilder.readField[String]("scalaBinaryVersion") + val configurations = unbuilder.readField[Vector[sbt.librarymanagement.Configuration]]("configurations") + val checkExplicit = unbuilder.readField[Boolean]("checkExplicit") + val filterImplicit = unbuilder.readField[Boolean]("filterImplicit") + val overrideScalaVersion = unbuilder.readField[Boolean]("overrideScalaVersion") + val scalaOrganization = unbuilder.readField[String]("scalaOrganization") + val scalaArtifacts = unbuilder.readField[scala.Vector[String]]("scalaArtifacts") + val platform = unbuilder.readField[Option[String]]("platform") + unbuilder.endObject() + sbt.librarymanagement.ScalaModuleInfo(scalaFullVersion, scalaBinaryVersion, configurations, checkExplicit, filterImplicit, overrideScalaVersion, scalaOrganization, scalaArtifacts, platform) + case None => + deserializationError("Expected JsObject but found None") + } + } + override def write[J](obj: sbt.librarymanagement.ScalaModuleInfo, builder: Builder[J]): Unit = { + builder.beginObject() + builder.addField("scalaFullVersion", obj.scalaFullVersion) + builder.addField("scalaBinaryVersion", obj.scalaBinaryVersion) + builder.addField("configurations", obj.configurations) + builder.addField("checkExplicit", obj.checkExplicit) + builder.addField("filterImplicit", obj.filterImplicit) + builder.addField("overrideScalaVersion", obj.overrideScalaVersion) + builder.addField("scalaOrganization", obj.scalaOrganization) + builder.addField("scalaArtifacts", obj.scalaArtifacts) + builder.addField("platform", obj.platform) + builder.endObject() + } +} +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/ScmInfo.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/ScmInfo.scala new file mode 100644 index 000000000..f91a5bbf1 --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/ScmInfo.scala @@ -0,0 +1,46 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +/** Basic SCM information for a project module */ +final class ScmInfo private ( + val browseUrl: java.net.URI, + val connection: String, + val devConnection: Option[String]) extends Serializable { + + private def this(browseUrl: java.net.URI, connection: String) = this(browseUrl, connection, None) + + override def equals(o: Any): Boolean = this.eq(o.asInstanceOf[AnyRef]) || (o match { + case x: ScmInfo => (this.browseUrl == x.browseUrl) && (this.connection == x.connection) && (this.devConnection == x.devConnection) + case _ => false + }) + override def hashCode: Int = { + 37 * (37 * (37 * (37 * (17 + "sbt.librarymanagement.ScmInfo".##) + browseUrl.##) + connection.##) + devConnection.##) + } + override def toString: String = { + "ScmInfo(" + browseUrl + ", " + connection + ", " + devConnection + ")" + } + private[this] def copy(browseUrl: java.net.URI = browseUrl, connection: String = connection, devConnection: Option[String] = devConnection): ScmInfo = { + new ScmInfo(browseUrl, connection, devConnection) + } + def withBrowseUrl(browseUrl: java.net.URI): ScmInfo = { + copy(browseUrl = browseUrl) + } + def withConnection(connection: String): ScmInfo = { + copy(connection = connection) + } + def withDevConnection(devConnection: Option[String]): ScmInfo = { + copy(devConnection = devConnection) + } + def withDevConnection(devConnection: String): ScmInfo = { + copy(devConnection = Option(devConnection)) + } +} +object ScmInfo { + + def apply(browseUrl: java.net.URI, connection: String): ScmInfo = new ScmInfo(browseUrl, connection) + def apply(browseUrl: java.net.URI, connection: String, devConnection: Option[String]): ScmInfo = new ScmInfo(browseUrl, connection, devConnection) + def apply(browseUrl: java.net.URI, connection: String, devConnection: String): ScmInfo = new ScmInfo(browseUrl, connection, Option(devConnection)) +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/ScmInfoFormats.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/ScmInfoFormats.scala new file mode 100644 index 000000000..70e7f40f0 --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/ScmInfoFormats.scala @@ -0,0 +1,31 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +import _root_.sjsonnew.{ Unbuilder, Builder, JsonFormat, deserializationError } +trait ScmInfoFormats { self: sjsonnew.BasicJsonProtocol => +implicit lazy val ScmInfoFormat: JsonFormat[sbt.librarymanagement.ScmInfo] = new JsonFormat[sbt.librarymanagement.ScmInfo] { + override def read[J](__jsOpt: Option[J], unbuilder: Unbuilder[J]): sbt.librarymanagement.ScmInfo = { + __jsOpt match { + case Some(__js) => + unbuilder.beginObject(__js) + val browseUrl = unbuilder.readField[java.net.URI]("browseUrl") + val connection = unbuilder.readField[String]("connection") + val devConnection = unbuilder.readField[Option[String]]("devConnection") + unbuilder.endObject() + sbt.librarymanagement.ScmInfo(browseUrl, connection, devConnection) + case None => + deserializationError("Expected JsObject but found None") + } + } + override def write[J](obj: sbt.librarymanagement.ScmInfo, builder: Builder[J]): Unit = { + builder.beginObject() + builder.addField("browseUrl", obj.browseUrl) + builder.addField("connection", obj.connection) + builder.addField("devConnection", obj.devConnection) + builder.endObject() + } +} +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/SemanticSelector.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/SemanticSelector.scala new file mode 100644 index 000000000..6ebc65a99 --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/SemanticSelector.scala @@ -0,0 +1,90 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +/** + * Semantic version selector API to check if the VersionNumber satisfies + * conditions described by semantic version selector. + * + * A `comparator` generally consist of an operator and version specifier. + * The set of operators is + * - `<`: Less than + * - `<=`: Less than or equal to + * - `>`: Greater than + * - `>=`: Greater than or equal to + * - `=`: Equal + * If no operator is specified, `=` is assumed. + * + * If minor or patch versions are not specified, some numbers are assumed. + * - `<=1.0` is equivalent to `<1.1.0`. + * - `<1.0` is equivalent to `<1.0.0`. + * - `>=1.0` is equivalent to `>=1.0.0`. + * - `>1.0` is equivalent to `>=1.1.0`. + * - `=1.0` is equivalent to `>=1.0 <=1.0` (so `>=1.0.0 <1.1.0`). + * + * Comparators can be combined by spaces to form the intersection set of the comparators. + * For example, `>1.2.3 <4.5.6` matches versions that are `greater than 1.2.3 AND less than 4.5.6`. + * + * The (intersection) set of comparators can combined by ` || ` (spaces are required) to form the + * union set of the intersection sets. So the semantic selector is in disjunctive normal form. + * + * Wildcard (`x`, `X`, `*`) can be used to match any number of minor or patch version. + * Actually, `1.0.x` is equivalent to `=1.0` (that is equivalent to `>=1.0.0 <1.1.0`) + * + * The hyphen range like `1.2.3 - 4.5.6` matches inclusive set of versions. + * So `1.2.3 - 4.5.6` is equivalent to `>=1.2.3 <=4.5.6`. + * Both sides of comparators around - are required and they can not have any operators. + * For example, `>=1.2.3 - 4.5.6` is invalid. + * + * The order of versions basically follows the rule specified in https://semver.org/#spec-item-11 + * > When major, minor, and patch are equal, a pre-release version has lower precedence + * > than a normal version. Example: 1.0.0-alpha < 1.0.0. + * > Precedence for two pre-release versions with the same major, minor, and patch version + * > Must be determined by comparing each dot separated identifier from left to right + * > until a difference is found as follows: + * > identifiers consisting of only digits are compared numerically + * > and identifiers with letters or hyphens are compared lexically in ASCII sort order. + * > Numeric identifiers always have lower precedence than non-numeric identifiers. + * > A larger set of pre-release fields has a higher precedence than a smaller set, + * > if all of the preceding identifiers are equal. + * > Example: 1.0.0-alpha < 1.0.0-alpha.1 < 1.0.0-alpha.beta < 1.0.0-beta < 1.0.0-beta.2 < 1.0.0-beta.11 < 1.0.0-rc.1 < 1.0.0. + * + * The differences from the original specification are following + * - `SemanticVersionSelector` separetes the pre-release fields by hyphen instead of dot + * - hyphen cannot be used in pre-release identifiers because it is used as separator for pre-release fields + * + * Therefore, in order to match pre-release versions like `1.0.0-beta` + * we need to explicitly specify the pre-release identifiers like `>=1.0.0-alpha`. + */ +final class SemanticSelector private ( + val selectors: Seq[sbt.internal.librarymanagement.SemSelAndChunk]) extends Serializable { + def matches(versionNumber: VersionNumber): Boolean = selectors.exists(_.matches(versionNumber)) + + + override def equals(o: Any): Boolean = this.eq(o.asInstanceOf[AnyRef]) || (o match { + case x: SemanticSelector => (this.selectors == x.selectors) + case _ => false + }) + override def hashCode: Int = { + 37 * (37 * (17 + "sbt.librarymanagement.SemanticSelector".##) + selectors.##) + } + override def toString: String = { + selectors.map(_.toString).mkString(" || ") + } + private[this] def copy(selectors: Seq[sbt.internal.librarymanagement.SemSelAndChunk] = selectors): SemanticSelector = { + new SemanticSelector(selectors) + } + def withSelectors(selectors: Seq[sbt.internal.librarymanagement.SemSelAndChunk]): SemanticSelector = { + copy(selectors = selectors) + } +} +object SemanticSelector { + def apply(selector: String): SemanticSelector = { + val orChunkTokens = selector.split("\\s+\\|\\|\\s+").map(_.trim) + val orChunks = orChunkTokens.map { chunk => sbt.internal.librarymanagement.SemSelAndChunk(chunk) } + SemanticSelector(scala.collection.immutable.ArraySeq.unsafeWrapArray(orChunks)) + } + def apply(selectors: Seq[sbt.internal.librarymanagement.SemSelAndChunk]): SemanticSelector = new SemanticSelector(selectors) +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/SftpRepository.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/SftpRepository.scala new file mode 100644 index 000000000..b20a2f25b --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/SftpRepository.scala @@ -0,0 +1,43 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +/** sbt interface for an Ivy repository over sftp. More convenient construction is done using Resolver.sftp. */ +final class SftpRepository private ( + name: String, + patterns: sbt.librarymanagement.Patterns, + connection: sbt.librarymanagement.SshConnection) extends sbt.librarymanagement.SshBasedRepository(name, patterns, connection) with sbt.librarymanagement.SftpRepositoryExtra with Serializable { + def this(name: String, connection: sbt.librarymanagement.SshConnection, patterns: sbt.librarymanagement.Patterns) = + this(name, patterns, connection) + + + override def equals(o: Any): Boolean = this.eq(o.asInstanceOf[AnyRef]) || (o match { + case x: SftpRepository => (this.name == x.name) && (this.patterns == x.patterns) && (this.connection == x.connection) + case _ => false + }) + override def hashCode: Int = { + 37 * (37 * (37 * (37 * (17 + "sbt.librarymanagement.SftpRepository".##) + name.##) + patterns.##) + connection.##) + } + override def toString: String = { + "SftpRepository(" + name + ", " + patterns + ", " + connection + ")" + } + private[this] def copy(name: String = name, patterns: sbt.librarymanagement.Patterns = patterns, connection: sbt.librarymanagement.SshConnection = connection): SftpRepository = { + new SftpRepository(name, patterns, connection) + } + def withName(name: String): SftpRepository = { + copy(name = name) + } + def withPatterns(patterns: sbt.librarymanagement.Patterns): SftpRepository = { + copy(patterns = patterns) + } + def withConnection(connection: sbt.librarymanagement.SshConnection): SftpRepository = { + copy(connection = connection) + } +} +object SftpRepository { + def apply(name: String, connection: sbt.librarymanagement.SshConnection, patterns: sbt.librarymanagement.Patterns) = + new SftpRepository(name, patterns, connection) + def apply(name: String, patterns: sbt.librarymanagement.Patterns, connection: sbt.librarymanagement.SshConnection): SftpRepository = new SftpRepository(name, patterns, connection) +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/SftpRepositoryFormats.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/SftpRepositoryFormats.scala new file mode 100644 index 000000000..9441ada31 --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/SftpRepositoryFormats.scala @@ -0,0 +1,31 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +import _root_.sjsonnew.{ Unbuilder, Builder, JsonFormat, deserializationError } +trait SftpRepositoryFormats { self: sbt.librarymanagement.PatternsFormats with sjsonnew.BasicJsonProtocol with sbt.librarymanagement.SshConnectionFormats with sbt.librarymanagement.SshAuthenticationFormats => +implicit lazy val SftpRepositoryFormat: JsonFormat[sbt.librarymanagement.SftpRepository] = new JsonFormat[sbt.librarymanagement.SftpRepository] { + override def read[J](__jsOpt: Option[J], unbuilder: Unbuilder[J]): sbt.librarymanagement.SftpRepository = { + __jsOpt match { + case Some(__js) => + unbuilder.beginObject(__js) + val name = unbuilder.readField[String]("name") + val patterns = unbuilder.readField[sbt.librarymanagement.Patterns]("patterns") + val connection = unbuilder.readField[sbt.librarymanagement.SshConnection]("connection") + unbuilder.endObject() + sbt.librarymanagement.SftpRepository(name, patterns, connection) + case None => + deserializationError("Expected JsObject but found None") + } + } + override def write[J](obj: sbt.librarymanagement.SftpRepository, builder: Builder[J]): Unit = { + builder.beginObject() + builder.addField("name", obj.name) + builder.addField("patterns", obj.patterns) + builder.addField("connection", obj.connection) + builder.endObject() + } +} +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/SshAuthentication.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/SshAuthentication.scala new file mode 100644 index 000000000..eaeb29bf5 --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/SshAuthentication.scala @@ -0,0 +1,25 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +abstract class SshAuthentication() extends Serializable { + + + + +override def equals(o: Any): Boolean = this.eq(o.asInstanceOf[AnyRef]) || (o match { + case _: SshAuthentication => true + case _ => false +}) +override def hashCode: Int = { + 37 * (17 + "sbt.librarymanagement.SshAuthentication".##) +} +override def toString: String = { + "SshAuthentication()" +} +} +object SshAuthentication { + +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/SshAuthenticationFormats.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/SshAuthenticationFormats.scala new file mode 100644 index 000000000..2783143ec --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/SshAuthenticationFormats.scala @@ -0,0 +1,11 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement + +import _root_.sjsonnew.JsonFormat +trait SshAuthenticationFormats { self: sjsonnew.BasicJsonProtocol with sbt.librarymanagement.PasswordAuthenticationFormats with sbt.librarymanagement.KeyFileAuthenticationFormats => +implicit lazy val SshAuthenticationFormat: JsonFormat[sbt.librarymanagement.SshAuthentication] = flatUnionFormat2[sbt.librarymanagement.SshAuthentication, sbt.librarymanagement.PasswordAuthentication, sbt.librarymanagement.KeyFileAuthentication]("type") +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/SshBasedRepository.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/SshBasedRepository.scala new file mode 100644 index 000000000..ec3af1b7a --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/SshBasedRepository.scala @@ -0,0 +1,29 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +/** sbt interface for an Ivy ssh-based repository (ssh and sftp). Requires the Jsch library.. */ +abstract class SshBasedRepository( + name: String, + patterns: sbt.librarymanagement.Patterns, + val connection: sbt.librarymanagement.SshConnection) extends sbt.librarymanagement.PatternsBasedRepository(name, patterns) with sbt.librarymanagement.SshBasedRepositoryExtra with Serializable { + + + + + override def equals(o: Any): Boolean = this.eq(o.asInstanceOf[AnyRef]) || (o match { + case x: SshBasedRepository => (this.name == x.name) && (this.patterns == x.patterns) && (this.connection == x.connection) + case _ => false + }) + override def hashCode: Int = { + 37 * (37 * (37 * (37 * (17 + "sbt.librarymanagement.SshBasedRepository".##) + name.##) + patterns.##) + connection.##) + } + override def toString: String = { + "SshBasedRepository(" + name + ", " + patterns + ", " + connection + ")" + } +} +object SshBasedRepository { + +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/SshBasedRepositoryFormats.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/SshBasedRepositoryFormats.scala new file mode 100644 index 000000000..8143d7239 --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/SshBasedRepositoryFormats.scala @@ -0,0 +1,11 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement + +import _root_.sjsonnew.JsonFormat +trait SshBasedRepositoryFormats { self: sbt.librarymanagement.PatternsFormats with sjsonnew.BasicJsonProtocol with sbt.librarymanagement.SshConnectionFormats with sbt.librarymanagement.SshAuthenticationFormats with sbt.librarymanagement.SshRepositoryFormats with sbt.librarymanagement.SftpRepositoryFormats => +implicit lazy val SshBasedRepositoryFormat: JsonFormat[sbt.librarymanagement.SshBasedRepository] = flatUnionFormat2[sbt.librarymanagement.SshBasedRepository, sbt.librarymanagement.SshRepository, sbt.librarymanagement.SftpRepository]("type") +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/SshConnection.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/SshConnection.scala new file mode 100644 index 000000000..68161ac8c --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/SshConnection.scala @@ -0,0 +1,50 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +final class SshConnection private ( + val authentication: Option[sbt.librarymanagement.SshAuthentication], + val hostname: Option[String], + val port: Option[Int]) extends Serializable { + + + + override def equals(o: Any): Boolean = this.eq(o.asInstanceOf[AnyRef]) || (o match { + case x: SshConnection => (this.authentication == x.authentication) && (this.hostname == x.hostname) && (this.port == x.port) + case _ => false + }) + override def hashCode: Int = { + 37 * (37 * (37 * (37 * (17 + "sbt.librarymanagement.SshConnection".##) + authentication.##) + hostname.##) + port.##) + } + override def toString: String = { + "SshConnection(" + authentication + ", " + hostname + ", " + port + ")" + } + private[this] def copy(authentication: Option[sbt.librarymanagement.SshAuthentication] = authentication, hostname: Option[String] = hostname, port: Option[Int] = port): SshConnection = { + new SshConnection(authentication, hostname, port) + } + def withAuthentication(authentication: Option[sbt.librarymanagement.SshAuthentication]): SshConnection = { + copy(authentication = authentication) + } + def withAuthentication(authentication: sbt.librarymanagement.SshAuthentication): SshConnection = { + copy(authentication = Option(authentication)) + } + def withHostname(hostname: Option[String]): SshConnection = { + copy(hostname = hostname) + } + def withHostname(hostname: String): SshConnection = { + copy(hostname = Option(hostname)) + } + def withPort(port: Option[Int]): SshConnection = { + copy(port = port) + } + def withPort(port: Int): SshConnection = { + copy(port = Option(port)) + } +} +object SshConnection { + + def apply(authentication: Option[sbt.librarymanagement.SshAuthentication], hostname: Option[String], port: Option[Int]): SshConnection = new SshConnection(authentication, hostname, port) + def apply(authentication: sbt.librarymanagement.SshAuthentication, hostname: String, port: Int): SshConnection = new SshConnection(Option(authentication), Option(hostname), Option(port)) +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/SshConnectionFormats.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/SshConnectionFormats.scala new file mode 100644 index 000000000..9daea2940 --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/SshConnectionFormats.scala @@ -0,0 +1,31 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +import _root_.sjsonnew.{ Unbuilder, Builder, JsonFormat, deserializationError } +trait SshConnectionFormats { self: sbt.librarymanagement.SshAuthenticationFormats with sjsonnew.BasicJsonProtocol => +implicit lazy val SshConnectionFormat: JsonFormat[sbt.librarymanagement.SshConnection] = new JsonFormat[sbt.librarymanagement.SshConnection] { + override def read[J](__jsOpt: Option[J], unbuilder: Unbuilder[J]): sbt.librarymanagement.SshConnection = { + __jsOpt match { + case Some(__js) => + unbuilder.beginObject(__js) + val authentication = unbuilder.readField[Option[sbt.librarymanagement.SshAuthentication]]("authentication") + val hostname = unbuilder.readField[Option[String]]("hostname") + val port = unbuilder.readField[Option[Int]]("port") + unbuilder.endObject() + sbt.librarymanagement.SshConnection(authentication, hostname, port) + case None => + deserializationError("Expected JsObject but found None") + } + } + override def write[J](obj: sbt.librarymanagement.SshConnection, builder: Builder[J]): Unit = { + builder.beginObject() + builder.addField("authentication", obj.authentication) + builder.addField("hostname", obj.hostname) + builder.addField("port", obj.port) + builder.endObject() + } +} +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/SshRepository.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/SshRepository.scala new file mode 100644 index 000000000..0c7a4c5ef --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/SshRepository.scala @@ -0,0 +1,51 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +/** sbt interface for an Ivy repository over ssh. More convenient construction is done using Resolver.ssh. */ +final class SshRepository private ( + name: String, + patterns: sbt.librarymanagement.Patterns, + connection: sbt.librarymanagement.SshConnection, + val publishPermissions: Option[String]) extends sbt.librarymanagement.SshBasedRepository(name, patterns, connection) with sbt.librarymanagement.SshRepositoryExtra with Serializable { + def this(name: String, connection: sbt.librarymanagement.SshConnection, patterns: sbt.librarymanagement.Patterns, publishPermissions: Option[String]) = + this(name, patterns, connection, publishPermissions) + + + override def equals(o: Any): Boolean = this.eq(o.asInstanceOf[AnyRef]) || (o match { + case x: SshRepository => (this.name == x.name) && (this.patterns == x.patterns) && (this.connection == x.connection) && (this.publishPermissions == x.publishPermissions) + case _ => false + }) + override def hashCode: Int = { + 37 * (37 * (37 * (37 * (37 * (17 + "sbt.librarymanagement.SshRepository".##) + name.##) + patterns.##) + connection.##) + publishPermissions.##) + } + override def toString: String = { + "SshRepository(" + name + ", " + patterns + ", " + connection + ", " + publishPermissions + ")" + } + private[this] def copy(name: String = name, patterns: sbt.librarymanagement.Patterns = patterns, connection: sbt.librarymanagement.SshConnection = connection, publishPermissions: Option[String] = publishPermissions): SshRepository = { + new SshRepository(name, patterns, connection, publishPermissions) + } + def withName(name: String): SshRepository = { + copy(name = name) + } + def withPatterns(patterns: sbt.librarymanagement.Patterns): SshRepository = { + copy(patterns = patterns) + } + def withConnection(connection: sbt.librarymanagement.SshConnection): SshRepository = { + copy(connection = connection) + } + def withPublishPermissions(publishPermissions: Option[String]): SshRepository = { + copy(publishPermissions = publishPermissions) + } + def withPublishPermissions(publishPermissions: String): SshRepository = { + copy(publishPermissions = Option(publishPermissions)) + } +} +object SshRepository { + def apply(name: String, connection: sbt.librarymanagement.SshConnection, patterns: sbt.librarymanagement.Patterns, publishPermissions: Option[String]) = + new SshRepository(name, patterns, connection, publishPermissions) + def apply(name: String, patterns: sbt.librarymanagement.Patterns, connection: sbt.librarymanagement.SshConnection, publishPermissions: Option[String]): SshRepository = new SshRepository(name, patterns, connection, publishPermissions) + def apply(name: String, patterns: sbt.librarymanagement.Patterns, connection: sbt.librarymanagement.SshConnection, publishPermissions: String): SshRepository = new SshRepository(name, patterns, connection, Option(publishPermissions)) +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/SshRepositoryFormats.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/SshRepositoryFormats.scala new file mode 100644 index 000000000..861d359b1 --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/SshRepositoryFormats.scala @@ -0,0 +1,33 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +import _root_.sjsonnew.{ Unbuilder, Builder, JsonFormat, deserializationError } +trait SshRepositoryFormats { self: sbt.librarymanagement.PatternsFormats with sjsonnew.BasicJsonProtocol with sbt.librarymanagement.SshConnectionFormats with sbt.librarymanagement.SshAuthenticationFormats => +implicit lazy val SshRepositoryFormat: JsonFormat[sbt.librarymanagement.SshRepository] = new JsonFormat[sbt.librarymanagement.SshRepository] { + override def read[J](__jsOpt: Option[J], unbuilder: Unbuilder[J]): sbt.librarymanagement.SshRepository = { + __jsOpt match { + case Some(__js) => + unbuilder.beginObject(__js) + val name = unbuilder.readField[String]("name") + val patterns = unbuilder.readField[sbt.librarymanagement.Patterns]("patterns") + val connection = unbuilder.readField[sbt.librarymanagement.SshConnection]("connection") + val publishPermissions = unbuilder.readField[Option[String]]("publishPermissions") + unbuilder.endObject() + sbt.librarymanagement.SshRepository(name, patterns, connection, publishPermissions) + case None => + deserializationError("Expected JsObject but found None") + } + } + override def write[J](obj: sbt.librarymanagement.SshRepository, builder: Builder[J]): Unit = { + builder.beginObject() + builder.addField("name", obj.name) + builder.addField("patterns", obj.patterns) + builder.addField("connection", obj.connection) + builder.addField("publishPermissions", obj.publishPermissions) + builder.endObject() + } +} +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/URLRepository.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/URLRepository.scala new file mode 100644 index 000000000..6bdb70fd2 --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/URLRepository.scala @@ -0,0 +1,41 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +final class URLRepository private ( + name: String, + patterns: sbt.librarymanagement.Patterns, + val allowInsecureProtocol: Boolean) extends sbt.librarymanagement.PatternsBasedRepository(name, patterns) with Serializable { + private[sbt] override def validateProtocol(logger: sbt.util.Logger): Boolean = Resolver.validateURLRepository(this, logger) + private def this(name: String, patterns: sbt.librarymanagement.Patterns) = this(name, patterns, false) + + override def equals(o: Any): Boolean = this.eq(o.asInstanceOf[AnyRef]) || (o match { + case x: URLRepository => (this.name == x.name) && (this.patterns == x.patterns) && (this.allowInsecureProtocol == x.allowInsecureProtocol) + case _ => false + }) + override def hashCode: Int = { + 37 * (37 * (37 * (37 * (17 + "sbt.librarymanagement.URLRepository".##) + name.##) + patterns.##) + allowInsecureProtocol.##) + } + override def toString: String = { + "URLRepository(" + name + ", " + patterns + ", " + allowInsecureProtocol + ")" + } + private[this] def copy(name: String = name, patterns: sbt.librarymanagement.Patterns = patterns, allowInsecureProtocol: Boolean = allowInsecureProtocol): URLRepository = { + new URLRepository(name, patterns, allowInsecureProtocol) + } + def withName(name: String): URLRepository = { + copy(name = name) + } + def withPatterns(patterns: sbt.librarymanagement.Patterns): URLRepository = { + copy(patterns = patterns) + } + def withAllowInsecureProtocol(allowInsecureProtocol: Boolean): URLRepository = { + copy(allowInsecureProtocol = allowInsecureProtocol) + } +} +object URLRepository { + + def apply(name: String, patterns: sbt.librarymanagement.Patterns): URLRepository = new URLRepository(name, patterns) + def apply(name: String, patterns: sbt.librarymanagement.Patterns, allowInsecureProtocol: Boolean): URLRepository = new URLRepository(name, patterns, allowInsecureProtocol) +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/URLRepositoryFormats.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/URLRepositoryFormats.scala new file mode 100644 index 000000000..d9bb6f32b --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/URLRepositoryFormats.scala @@ -0,0 +1,31 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +import _root_.sjsonnew.{ Unbuilder, Builder, JsonFormat, deserializationError } +trait URLRepositoryFormats { self: sbt.librarymanagement.PatternsFormats with sjsonnew.BasicJsonProtocol => +implicit lazy val URLRepositoryFormat: JsonFormat[sbt.librarymanagement.URLRepository] = new JsonFormat[sbt.librarymanagement.URLRepository] { + override def read[J](__jsOpt: Option[J], unbuilder: Unbuilder[J]): sbt.librarymanagement.URLRepository = { + __jsOpt match { + case Some(__js) => + unbuilder.beginObject(__js) + val name = unbuilder.readField[String]("name") + val patterns = unbuilder.readField[sbt.librarymanagement.Patterns]("patterns") + val allowInsecureProtocol = unbuilder.readField[Boolean]("allowInsecureProtocol") + unbuilder.endObject() + sbt.librarymanagement.URLRepository(name, patterns, allowInsecureProtocol) + case None => + deserializationError("Expected JsObject but found None") + } + } + override def write[J](obj: sbt.librarymanagement.URLRepository, builder: Builder[J]): Unit = { + builder.beginObject() + builder.addField("name", obj.name) + builder.addField("patterns", obj.patterns) + builder.addField("allowInsecureProtocol", obj.allowInsecureProtocol) + builder.endObject() + } +} +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/UpdateConfiguration.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/UpdateConfiguration.scala new file mode 100644 index 000000000..69b7e6968 --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/UpdateConfiguration.scala @@ -0,0 +1,80 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +/** + * @param retrieveManaged If set to some RetrieveConfiguration, this enables retrieving dependencies to the specified directory. + Otherwise, dependencies are used directly from the cache. + * @param missingOk If set to true, it ignores when artifacts are missing. + This setting could be uses when retrieving source/javadocs jars opportunistically. + * @param logging Logging setting used specifially for library management. + * @param logicalClock The clock that may be used for caching. + * @param metadataDirectory The base directory that may be used to store metadata. + */ +final class UpdateConfiguration private ( + val retrieveManaged: Option[sbt.librarymanagement.RetrieveConfiguration], + val missingOk: Boolean, + val logging: sbt.librarymanagement.UpdateLogging, + val logicalClock: sbt.librarymanagement.LogicalClock, + val metadataDirectory: Option[java.io.File], + val artifactFilter: Option[sbt.librarymanagement.ArtifactTypeFilter], + val offline: Boolean, + val frozen: Boolean) extends Serializable { + + private def this() = this(None, false, sbt.librarymanagement.UpdateLogging.Default, sbt.librarymanagement.LogicalClock.unknown, None, None, false, false) + + override def equals(o: Any): Boolean = this.eq(o.asInstanceOf[AnyRef]) || (o match { + case x: UpdateConfiguration => (this.retrieveManaged == x.retrieveManaged) && (this.missingOk == x.missingOk) && (this.logging == x.logging) && (this.logicalClock == x.logicalClock) && (this.metadataDirectory == x.metadataDirectory) && (this.artifactFilter == x.artifactFilter) && (this.offline == x.offline) && (this.frozen == x.frozen) + case _ => false + }) + override def hashCode: Int = { + 37 * (37 * (37 * (37 * (37 * (37 * (37 * (37 * (37 * (17 + "sbt.librarymanagement.UpdateConfiguration".##) + retrieveManaged.##) + missingOk.##) + logging.##) + logicalClock.##) + metadataDirectory.##) + artifactFilter.##) + offline.##) + frozen.##) + } + override def toString: String = { + "UpdateConfiguration(" + retrieveManaged + ", " + missingOk + ", " + logging + ", " + logicalClock + ", " + metadataDirectory + ", " + artifactFilter + ", " + offline + ", " + frozen + ")" + } + private[this] def copy(retrieveManaged: Option[sbt.librarymanagement.RetrieveConfiguration] = retrieveManaged, missingOk: Boolean = missingOk, logging: sbt.librarymanagement.UpdateLogging = logging, logicalClock: sbt.librarymanagement.LogicalClock = logicalClock, metadataDirectory: Option[java.io.File] = metadataDirectory, artifactFilter: Option[sbt.librarymanagement.ArtifactTypeFilter] = artifactFilter, offline: Boolean = offline, frozen: Boolean = frozen): UpdateConfiguration = { + new UpdateConfiguration(retrieveManaged, missingOk, logging, logicalClock, metadataDirectory, artifactFilter, offline, frozen) + } + def withRetrieveManaged(retrieveManaged: Option[sbt.librarymanagement.RetrieveConfiguration]): UpdateConfiguration = { + copy(retrieveManaged = retrieveManaged) + } + def withRetrieveManaged(retrieveManaged: sbt.librarymanagement.RetrieveConfiguration): UpdateConfiguration = { + copy(retrieveManaged = Option(retrieveManaged)) + } + def withMissingOk(missingOk: Boolean): UpdateConfiguration = { + copy(missingOk = missingOk) + } + def withLogging(logging: sbt.librarymanagement.UpdateLogging): UpdateConfiguration = { + copy(logging = logging) + } + def withLogicalClock(logicalClock: sbt.librarymanagement.LogicalClock): UpdateConfiguration = { + copy(logicalClock = logicalClock) + } + def withMetadataDirectory(metadataDirectory: Option[java.io.File]): UpdateConfiguration = { + copy(metadataDirectory = metadataDirectory) + } + def withMetadataDirectory(metadataDirectory: java.io.File): UpdateConfiguration = { + copy(metadataDirectory = Option(metadataDirectory)) + } + def withArtifactFilter(artifactFilter: Option[sbt.librarymanagement.ArtifactTypeFilter]): UpdateConfiguration = { + copy(artifactFilter = artifactFilter) + } + def withArtifactFilter(artifactFilter: sbt.librarymanagement.ArtifactTypeFilter): UpdateConfiguration = { + copy(artifactFilter = Option(artifactFilter)) + } + def withOffline(offline: Boolean): UpdateConfiguration = { + copy(offline = offline) + } + def withFrozen(frozen: Boolean): UpdateConfiguration = { + copy(frozen = frozen) + } +} +object UpdateConfiguration { + + def apply(): UpdateConfiguration = new UpdateConfiguration() + def apply(retrieveManaged: Option[sbt.librarymanagement.RetrieveConfiguration], missingOk: Boolean, logging: sbt.librarymanagement.UpdateLogging, logicalClock: sbt.librarymanagement.LogicalClock, metadataDirectory: Option[java.io.File], artifactFilter: Option[sbt.librarymanagement.ArtifactTypeFilter], offline: Boolean, frozen: Boolean): UpdateConfiguration = new UpdateConfiguration(retrieveManaged, missingOk, logging, logicalClock, metadataDirectory, artifactFilter, offline, frozen) + def apply(retrieveManaged: sbt.librarymanagement.RetrieveConfiguration, missingOk: Boolean, logging: sbt.librarymanagement.UpdateLogging, logicalClock: sbt.librarymanagement.LogicalClock, metadataDirectory: java.io.File, artifactFilter: sbt.librarymanagement.ArtifactTypeFilter, offline: Boolean, frozen: Boolean): UpdateConfiguration = new UpdateConfiguration(Option(retrieveManaged), missingOk, logging, logicalClock, Option(metadataDirectory), Option(artifactFilter), offline, frozen) +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/UpdateConfigurationFormats.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/UpdateConfigurationFormats.scala new file mode 100644 index 000000000..70f7d3994 --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/UpdateConfigurationFormats.scala @@ -0,0 +1,41 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +import _root_.sjsonnew.{ Unbuilder, Builder, JsonFormat, deserializationError } +trait UpdateConfigurationFormats { self: sbt.librarymanagement.RetrieveConfigurationFormats with sbt.librarymanagement.ConfigRefFormats with sjsonnew.BasicJsonProtocol with sbt.librarymanagement.UpdateLoggingFormats with sbt.internal.librarymanagement.formats.LogicalClockFormats with sbt.librarymanagement.ArtifactTypeFilterFormats => +implicit lazy val UpdateConfigurationFormat: JsonFormat[sbt.librarymanagement.UpdateConfiguration] = new JsonFormat[sbt.librarymanagement.UpdateConfiguration] { + override def read[J](__jsOpt: Option[J], unbuilder: Unbuilder[J]): sbt.librarymanagement.UpdateConfiguration = { + __jsOpt match { + case Some(__js) => + unbuilder.beginObject(__js) + val retrieveManaged = unbuilder.readField[Option[sbt.librarymanagement.RetrieveConfiguration]]("retrieveManaged") + val missingOk = unbuilder.readField[Boolean]("missingOk") + val logging = unbuilder.readField[sbt.librarymanagement.UpdateLogging]("logging") + val logicalClock = unbuilder.readField[sbt.librarymanagement.LogicalClock]("logicalClock") + val metadataDirectory = unbuilder.readField[Option[java.io.File]]("metadataDirectory") + val artifactFilter = unbuilder.readField[Option[sbt.librarymanagement.ArtifactTypeFilter]]("artifactFilter") + val offline = unbuilder.readField[Boolean]("offline") + val frozen = unbuilder.readField[Boolean]("frozen") + unbuilder.endObject() + sbt.librarymanagement.UpdateConfiguration(retrieveManaged, missingOk, logging, logicalClock, metadataDirectory, artifactFilter, offline, frozen) + case None => + deserializationError("Expected JsObject but found None") + } + } + override def write[J](obj: sbt.librarymanagement.UpdateConfiguration, builder: Builder[J]): Unit = { + builder.beginObject() + builder.addField("retrieveManaged", obj.retrieveManaged) + builder.addField("missingOk", obj.missingOk) + builder.addField("logging", obj.logging) + builder.addField("logicalClock", obj.logicalClock) + builder.addField("metadataDirectory", obj.metadataDirectory) + builder.addField("artifactFilter", obj.artifactFilter) + builder.addField("offline", obj.offline) + builder.addField("frozen", obj.frozen) + builder.endObject() + } +} +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/UpdateLogging.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/UpdateLogging.scala new file mode 100644 index 000000000..ee1db908a --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/UpdateLogging.scala @@ -0,0 +1,22 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +/** + * Configures logging during an 'update'. `level` determines the amount of other information logged. + * `Full` is the default and logs the most. + * `DownloadOnly` only logs what is downloaded. + * `Quiet` only displays errors. + * `Default` uses the current log level of `update` task. + */ +sealed abstract class UpdateLogging extends Serializable +object UpdateLogging { + + + case object Full extends UpdateLogging + case object DownloadOnly extends UpdateLogging + case object Quiet extends UpdateLogging + case object Default extends UpdateLogging +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/UpdateLoggingFormats.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/UpdateLoggingFormats.scala new file mode 100644 index 000000000..8da04c8ee --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/UpdateLoggingFormats.scala @@ -0,0 +1,33 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +import _root_.sjsonnew.{ Unbuilder, Builder, JsonFormat, deserializationError } +trait UpdateLoggingFormats { self: sjsonnew.BasicJsonProtocol => +implicit lazy val UpdateLoggingFormat: JsonFormat[sbt.librarymanagement.UpdateLogging] = new JsonFormat[sbt.librarymanagement.UpdateLogging] { + override def read[J](__jsOpt: Option[J], unbuilder: Unbuilder[J]): sbt.librarymanagement.UpdateLogging = { + __jsOpt match { + case Some(__js) => + unbuilder.readString(__js) match { + case "Full" => sbt.librarymanagement.UpdateLogging.Full + case "DownloadOnly" => sbt.librarymanagement.UpdateLogging.DownloadOnly + case "Quiet" => sbt.librarymanagement.UpdateLogging.Quiet + case "Default" => sbt.librarymanagement.UpdateLogging.Default + } + case None => + deserializationError("Expected JsString but found None") + } + } + override def write[J](obj: sbt.librarymanagement.UpdateLogging, builder: Builder[J]): Unit = { + val str = obj match { + case sbt.librarymanagement.UpdateLogging.Full => "Full" + case sbt.librarymanagement.UpdateLogging.DownloadOnly => "DownloadOnly" + case sbt.librarymanagement.UpdateLogging.Quiet => "Quiet" + case sbt.librarymanagement.UpdateLogging.Default => "Default" + } + builder.writeString(str) + } +} +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/UpdateReport.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/UpdateReport.scala new file mode 100644 index 000000000..3e8bcbe1a --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/UpdateReport.scala @@ -0,0 +1,52 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +/** + * Provides information about dependency resolution. + * It does not include information about evicted modules, only about the modules ultimately selected by the conflict manager. + * This means that for a given configuration, there should only be one revision for a given organization and module name. + * @param cachedDescriptor the location of the resolved module descriptor in the cache + * @param configurations a sequence containing one report for each configuration resolved. + * @param stats stats information about the update that produced this report + */ +final class UpdateReport private ( + val cachedDescriptor: java.io.File, + val configurations: Vector[sbt.librarymanagement.ConfigurationReport], + val stats: sbt.librarymanagement.UpdateStats, + val stamps: Map[String, Long]) extends sbt.librarymanagement.UpdateReportExtra with Serializable { + + + + override def equals(o: Any): Boolean = this.eq(o.asInstanceOf[AnyRef]) || (o match { + case x: UpdateReport => (this.cachedDescriptor == x.cachedDescriptor) && (this.configurations == x.configurations) && (this.stats == x.stats) && (this.stamps == x.stamps) + case _ => false + }) + override def hashCode: Int = { + 37 * (37 * (37 * (37 * (37 * (17 + "sbt.librarymanagement.UpdateReport".##) + cachedDescriptor.##) + configurations.##) + stats.##) + stamps.##) + } + override def toString: String = { + "Update report:\n\t" + stats + "\n" + configurations.mkString + } + private[this] def copy(cachedDescriptor: java.io.File = cachedDescriptor, configurations: Vector[sbt.librarymanagement.ConfigurationReport] = configurations, stats: sbt.librarymanagement.UpdateStats = stats, stamps: Map[String, Long] = stamps): UpdateReport = { + new UpdateReport(cachedDescriptor, configurations, stats, stamps) + } + def withCachedDescriptor(cachedDescriptor: java.io.File): UpdateReport = { + copy(cachedDescriptor = cachedDescriptor) + } + def withConfigurations(configurations: Vector[sbt.librarymanagement.ConfigurationReport]): UpdateReport = { + copy(configurations = configurations) + } + def withStats(stats: sbt.librarymanagement.UpdateStats): UpdateReport = { + copy(stats = stats) + } + def withStamps(stamps: Map[String, Long]): UpdateReport = { + copy(stamps = stamps) + } +} +object UpdateReport { + + def apply(cachedDescriptor: java.io.File, configurations: Vector[sbt.librarymanagement.ConfigurationReport], stats: sbt.librarymanagement.UpdateStats, stamps: Map[String, Long]): UpdateReport = new UpdateReport(cachedDescriptor, configurations, stats, stamps) +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/UpdateReportFormats.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/UpdateReportFormats.scala new file mode 100644 index 000000000..36ea7c27a --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/UpdateReportFormats.scala @@ -0,0 +1,33 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +import _root_.sjsonnew.{ Unbuilder, Builder, JsonFormat, deserializationError } +trait UpdateReportFormats { self: sbt.librarymanagement.ConfigurationReportFormats with sbt.librarymanagement.ConfigRefFormats with sbt.librarymanagement.ModuleReportFormats with sbt.librarymanagement.ModuleIDFormats with sbt.librarymanagement.ArtifactFormats with sbt.librarymanagement.ChecksumFormats with sjsonnew.BasicJsonProtocol with sbt.librarymanagement.InclExclRuleFormats with sbt.librarymanagement.CrossVersionFormats with sbt.librarymanagement.DisabledFormats with sbt.librarymanagement.BinaryFormats with sbt.librarymanagement.ConstantFormats with sbt.librarymanagement.PatchFormats with sbt.librarymanagement.FullFormats with sbt.librarymanagement.For3Use2_13Formats with sbt.librarymanagement.For2_13Use3Formats with sbt.librarymanagement.CallerFormats with sbt.librarymanagement.OrganizationArtifactReportFormats with sbt.librarymanagement.UpdateStatsFormats => +implicit lazy val UpdateReportFormat: JsonFormat[sbt.librarymanagement.UpdateReport] = new JsonFormat[sbt.librarymanagement.UpdateReport] { + override def read[J](__jsOpt: Option[J], unbuilder: Unbuilder[J]): sbt.librarymanagement.UpdateReport = { + __jsOpt match { + case Some(__js) => + unbuilder.beginObject(__js) + val cachedDescriptor = unbuilder.readField[java.io.File]("cachedDescriptor") + val configurations = unbuilder.readField[Vector[sbt.librarymanagement.ConfigurationReport]]("configurations") + val stats = unbuilder.readField[sbt.librarymanagement.UpdateStats]("stats") + val stamps = unbuilder.readField[Map[String, Long]]("stamps") + unbuilder.endObject() + sbt.librarymanagement.UpdateReport(cachedDescriptor, configurations, stats, stamps) + case None => + deserializationError("Expected JsObject but found None") + } + } + override def write[J](obj: sbt.librarymanagement.UpdateReport, builder: Builder[J]): Unit = { + builder.beginObject() + builder.addField("cachedDescriptor", obj.cachedDescriptor) + builder.addField("configurations", obj.configurations) + builder.addField("stats", obj.stats) + builder.addField("stamps", obj.stamps) + builder.endObject() + } +} +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/UpdateReportLiteFormats.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/UpdateReportLiteFormats.scala new file mode 100644 index 000000000..f28ed6713 --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/UpdateReportLiteFormats.scala @@ -0,0 +1,27 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +import _root_.sjsonnew.{ Unbuilder, Builder, JsonFormat, deserializationError } +trait UpdateReportLiteFormats { self: sbt.librarymanagement.ConfigurationReportLiteFormats with sbt.librarymanagement.OrganizationArtifactReportFormats with sbt.librarymanagement.ModuleReportFormats with sbt.librarymanagement.ModuleIDFormats with sbt.librarymanagement.ArtifactFormats with sbt.librarymanagement.ConfigRefFormats with sbt.librarymanagement.ChecksumFormats with sjsonnew.BasicJsonProtocol with sbt.librarymanagement.InclExclRuleFormats with sbt.librarymanagement.CrossVersionFormats with sbt.librarymanagement.DisabledFormats with sbt.librarymanagement.BinaryFormats with sbt.librarymanagement.ConstantFormats with sbt.librarymanagement.PatchFormats with sbt.librarymanagement.FullFormats with sbt.librarymanagement.For3Use2_13Formats with sbt.librarymanagement.For2_13Use3Formats with sbt.librarymanagement.CallerFormats => +implicit lazy val UpdateReportLiteFormat: JsonFormat[sbt.internal.librarymanagement.UpdateReportLite] = new JsonFormat[sbt.internal.librarymanagement.UpdateReportLite] { + override def read[J](__jsOpt: Option[J], unbuilder: Unbuilder[J]): sbt.internal.librarymanagement.UpdateReportLite = { + __jsOpt match { + case Some(__js) => + unbuilder.beginObject(__js) + val configurations = unbuilder.readField[Vector[sbt.internal.librarymanagement.ConfigurationReportLite]]("configurations") + unbuilder.endObject() + sbt.internal.librarymanagement.UpdateReportLite(configurations) + case None => + deserializationError("Expected JsObject but found None") + } + } + override def write[J](obj: sbt.internal.librarymanagement.UpdateReportLite, builder: Builder[J]): Unit = { + builder.beginObject() + builder.addField("configurations", obj.configurations) + builder.endObject() + } +} +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/UpdateStats.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/UpdateStats.scala new file mode 100644 index 000000000..a2ea73ced --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/UpdateStats.scala @@ -0,0 +1,44 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +final class UpdateStats private ( + val resolveTime: Long, + val downloadTime: Long, + val downloadSize: Long, + val cached: Boolean) extends Serializable { + + + + override def equals(o: Any): Boolean = this.eq(o.asInstanceOf[AnyRef]) || (o match { + case x: UpdateStats => (this.resolveTime == x.resolveTime) && (this.downloadTime == x.downloadTime) && (this.downloadSize == x.downloadSize) && (this.cached == x.cached) + case _ => false + }) + override def hashCode: Int = { + 37 * (37 * (37 * (37 * (37 * (17 + "sbt.librarymanagement.UpdateStats".##) + resolveTime.##) + downloadTime.##) + downloadSize.##) + cached.##) + } + override def toString: String = { + Seq("Resolve time: " + resolveTime + " ms", "Download time: " + downloadTime + " ms", "Download size: " + downloadSize + " bytes").mkString(", ") + } + private[this] def copy(resolveTime: Long = resolveTime, downloadTime: Long = downloadTime, downloadSize: Long = downloadSize, cached: Boolean = cached): UpdateStats = { + new UpdateStats(resolveTime, downloadTime, downloadSize, cached) + } + def withResolveTime(resolveTime: Long): UpdateStats = { + copy(resolveTime = resolveTime) + } + def withDownloadTime(downloadTime: Long): UpdateStats = { + copy(downloadTime = downloadTime) + } + def withDownloadSize(downloadSize: Long): UpdateStats = { + copy(downloadSize = downloadSize) + } + def withCached(cached: Boolean): UpdateStats = { + copy(cached = cached) + } +} +object UpdateStats { + + def apply(resolveTime: Long, downloadTime: Long, downloadSize: Long, cached: Boolean): UpdateStats = new UpdateStats(resolveTime, downloadTime, downloadSize, cached) +} diff --git a/lm-core/src/main/contraband-scala/sbt/librarymanagement/UpdateStatsFormats.scala b/lm-core/src/main/contraband-scala/sbt/librarymanagement/UpdateStatsFormats.scala new file mode 100644 index 000000000..7457c7a23 --- /dev/null +++ b/lm-core/src/main/contraband-scala/sbt/librarymanagement/UpdateStatsFormats.scala @@ -0,0 +1,33 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement +import _root_.sjsonnew.{ Unbuilder, Builder, JsonFormat, deserializationError } +trait UpdateStatsFormats { self: sjsonnew.BasicJsonProtocol => +implicit lazy val UpdateStatsFormat: JsonFormat[sbt.librarymanagement.UpdateStats] = new JsonFormat[sbt.librarymanagement.UpdateStats] { + override def read[J](__jsOpt: Option[J], unbuilder: Unbuilder[J]): sbt.librarymanagement.UpdateStats = { + __jsOpt match { + case Some(__js) => + unbuilder.beginObject(__js) + val resolveTime = unbuilder.readField[Long]("resolveTime") + val downloadTime = unbuilder.readField[Long]("downloadTime") + val downloadSize = unbuilder.readField[Long]("downloadSize") + val cached = unbuilder.readField[Boolean]("cached") + unbuilder.endObject() + sbt.librarymanagement.UpdateStats(resolveTime, downloadTime, downloadSize, cached) + case None => + deserializationError("Expected JsObject but found None") + } + } + override def write[J](obj: sbt.librarymanagement.UpdateStats, builder: Builder[J]): Unit = { + builder.beginObject() + builder.addField("resolveTime", obj.resolveTime) + builder.addField("downloadTime", obj.downloadTime) + builder.addField("downloadSize", obj.downloadSize) + builder.addField("cached", obj.cached) + builder.endObject() + } +} +} diff --git a/lm-core/src/main/contraband/librarymanagement.json b/lm-core/src/main/contraband/librarymanagement.json new file mode 100644 index 000000000..37ede5b91 --- /dev/null +++ b/lm-core/src/main/contraband/librarymanagement.json @@ -0,0 +1,855 @@ +{ + "codecNamespace": "sbt.librarymanagement", + "fullCodec": "LibraryManagementCodec", + "types": [ + { + "name": "UpdateConfiguration", + "namespace": "sbt.librarymanagement", + "target": "Scala", + "type": "record", + "fields": [ + { + "name": "retrieveManaged", + "doc": [ + "If set to some RetrieveConfiguration, this enables retrieving dependencies to the specified directory.", + "Otherwise, dependencies are used directly from the cache." + ], + "type": "sbt.librarymanagement.RetrieveConfiguration?", + "default": "None", + "since": "0.0.1" + }, + { + "name": "missingOk", + "doc": [ + "If set to true, it ignores when artifacts are missing.", + "This setting could be uses when retrieving source/javadocs jars opportunistically." + ], + "type": "boolean", + "default": "false", + "since": "0.0.1" + }, + { + "name": "logging", + "doc": [ + "Logging setting used specifially for library management." + ], + "type": "sbt.librarymanagement.UpdateLogging", + "default": "sbt.librarymanagement.UpdateLogging.Default", + "since": "0.0.1" + }, + { + "name": "logicalClock", + "doc": [ + "The clock that may be used for caching." + ], + "type": "sbt.librarymanagement.LogicalClock", + "default": "sbt.librarymanagement.LogicalClock.unknown", + "since": "0.0.1" + }, + { + "name": "metadataDirectory", + "doc": [ + "The base directory that may be used to store metadata." + ], + "type": "java.io.File?", + "default": "None", + "since": "0.0.1" + }, + { "name": "artifactFilter", "type": "sbt.librarymanagement.ArtifactTypeFilter?", "default": "None", "since": "0.0.1" }, + { + "name": "offline", + "type": "boolean", + "default": "false", + "since": "0.0.1" + }, + { + "name": "frozen", + "type": "boolean", + "default": "false", + "since": "0.0.1" + } + ] + }, + { + "name": "RetrieveConfiguration", + "namespace": "sbt.librarymanagement", + "target": "Scala", + "type": "record", + "fields": [ + { "name": "retrieveDirectory", "type": "java.io.File?", "default": "None", "since": "0.0.1" }, + { "name": "outputPattern", "type": "String?", "default": "None", "since": "0.0.1" }, + { "name": "sync", "type": "boolean", "default": "false", "since": "0.0.2" }, + { "name": "configurationsToRetrieve", "type": "scala.Vector[sbt.librarymanagement.ConfigRef]?", "default": "None", "since": "0.0.2" } + ] + }, + { + "name": "GetClassifiersConfiguration", + "namespace": "sbt.librarymanagement", + "target": "Scala", + "type": "record", + "fields": [ + { "name": "module", "type": "sbt.librarymanagement.GetClassifiersModule" }, + { "name": "excludes", "type": "scala.Tuple2[sbt.librarymanagement.ModuleID, scala.Vector[sbt.librarymanagement.ConfigRef]]*" }, + { "name": "updateConfiguration", "type": "sbt.librarymanagement.UpdateConfiguration" }, + { "name": "sourceArtifactTypes", "type": "String*" }, + { "name": "docArtifactTypes", "type": "String*" } + ] + }, + { + "name": "GetClassifiersModule", + "namespace": "sbt.librarymanagement", + "target": "Scala", + "type": "record", + "fields": [ + { "name": "id", "type": "sbt.librarymanagement.ModuleID" }, + { "name": "scalaModuleInfo", "type": "sbt.librarymanagement.ScalaModuleInfo?" }, + { "name": "dependencies", "type": "sbt.librarymanagement.ModuleID*" }, + { "name": "configurations", "type": "sbt.librarymanagement.Configuration*" }, + { "name": "classifiers", "type": "String*" } + ] + }, + { + "name": "PublishConfiguration", + "namespace": "sbt.librarymanagement", + "target": "Scala", + "type": "record", + "fields": [ + { "name": "publishMavenStyle", "type": "boolean", "default": "true", "since": "0.0.1" }, + { "name": "deliverIvyPattern", "type": "String?", "default": "None", "since": "0.0.1" }, + { "name": "status", "type": "String?", "default": "None", "since": "0.0.1" }, + { "name": "configurations", "type": "scala.Vector[sbt.librarymanagement.ConfigRef]?", "default": "None", "since": "0.0.1" }, + { "name": "resolverName", "type": "String?", "default": "None", "since": "0.0.1" }, + { "name": "artifacts", "type": "scala.Tuple2[sbt.librarymanagement.Artifact, java.io.File]*", "default": "Vector()", "since": "0.0.1" }, + { + "name": "checksums", + "type": "scala.Vector[String]", + "default": "Vector(\"sha1\", \"md5\")", + "since": "0.0.1" }, + { "name": "logging", "type": "sbt.librarymanagement.UpdateLogging?", "default": "None", "since": "0.0.1" }, + { "name": "overwrite", "type": "boolean", "default": "false", "since": "0.0.1" } + ] + }, + { + "name": "Artifact", + "namespace": "sbt.librarymanagement", + "target": "Scala", + "type": "record", + "parents": "sbt.librarymanagement.ArtifactExtra", + "fields": [ + { "name": "name", "type": "String" }, + { "name": "type", "type": "String", "default": "Artifact.DefaultType", "since": "0.0.1" }, + { "name": "extension", "type": "String", "default": "Artifact.DefaultExtension", "since": "0.0.1" }, + { "name": "classifier", "type": "Option[String]", "default": "None", "since": "0.0.1" }, + { "name": "configurations", "type": "sbt.librarymanagement.ConfigRef*", "default": "Vector.empty", "since": "0.0.1" }, + { "name": "url", "type": "Option[java.net.URI]", "default": "None", "since": "0.0.1" }, + { "name": "extraAttributes", "type": "Map[String, String]", "default": "Map.empty", "since": "0.0.1" }, + { "name": "checksum", "type": "Option[sbt.librarymanagement.Checksum]", "default": "None", "since": "0.0.1" }, + { "name": "allowInsecureProtocol", "type": "Boolean", "default": "false", "since": "1.3.0" } + ], + "extra": [ + "private[sbt] def validateProtocol(logger: sbt.util.Logger): Boolean = Resolver.validateArtifact(this, logger)" + ], + "parentsCompanion": "sbt.librarymanagement.ArtifactFunctions" + }, + { + "name": "ArtifactTypeFilter", + "namespace": "sbt.librarymanagement", + "target": "Scala", + "type": "record", + "doc": "Work around the inadequacy of Ivy's ArtifactTypeFilter (that it cannot reverse a filter)", + "parents": "sbt.librarymanagement.ArtifactTypeFilterExtra", + "fields": [ + { + "name": "types", + "doc": [ + "Represents the artifact types that we should try to resolve for (as in the allowed values of", + "`artifact[type]` from a dependency `` section). One can use this to filter", + "source / doc artifacts." + ], + "type": "Set[String]" + }, + { + "name": "inverted", + "doc": [ "Whether to invert the types filter (i.e. allow only types NOT in the set)" ], + "type": "boolean" + } + ], + "parentsCompanion": "sbt.librarymanagement.ArtifactTypeFilterFunctions" + }, + { + "name": "Caller", + "namespace": "sbt.librarymanagement", + "target": "Scala", + "type": "record", + "fields": [ + { "name": "caller", "type": "sbt.librarymanagement.ModuleID" }, + { "name": "callerConfigurations", "type": "sbt.librarymanagement.ConfigRef*" }, + { "name": "callerExtraAttributes", "type": "Map[String, String]" }, + { "name": "isForceDependency", "type": "boolean" }, + { "name": "isChangingDependency", "type": "boolean" }, + { "name": "isTransitiveDependency", "type": "boolean" }, + { "name": "isDirectlyForceDependency", "type": "boolean" } + ], + "toString": "s\"$caller\"" + }, + { + "name": "ConfigurationReport", + "namespace": "sbt.librarymanagement", + "target": "Scala", + "type": "record", + "doc": [ + "Provides information about resolution of a single configuration." + ], + "parents": "sbt.librarymanagement.ConfigurationReportExtra", + "fields": [ + { "name": "configuration", "type": "sbt.librarymanagement.ConfigRef", "doc": [ "the configuration this report is for." ] }, + { + "name": "modules", + "type": "sbt.librarymanagement.ModuleReport*", + "doc": [ + "a sequence containing one report for each module resolved for this configuration." + ] + }, + { + "name": "details", + "type": "sbt.librarymanagement.OrganizationArtifactReport*", + "doc": [ "a sequence containing one report for each org/name, which may or may not be part of the final resolution." ] + } + ], + "toString": [ + "s\"\\t$configuration:\\n\" +", + "(if (details.isEmpty) modules.mkString + details.flatMap(_.modules).filter(_.evicted).map(\"\\t\\t(EVICTED) \" + _ + \"\\n\").mkString", + "else details.mkString)" + ] + }, + { + "name": "ConflictManager", + "namespace": "sbt.librarymanagement", + "target": "Scala", + "type": "record", + "doc": "See http://ant.apache.org/ivy/history/latest-milestone/settings/conflict-managers.html for details of the different conflict managers.", + "fields": [ + { "name": "name", "type": "String" }, + { "name": "organization", "type": "String", "default": "\"*\"", "since": "0.0.1" }, + { "name": "module", "type": "String", "default": "\"*\"", "since": "0.0.1" } + ], + "parentsCompanion": "sbt.librarymanagement.ConflictManagerFunctions" + }, + { + "name": "Developer", + "namespace": "sbt.librarymanagement", + "target": "Scala", + "type": "record", + "fields": [ + { "name": "id", "type": "String" }, + { "name": "name", "type": "String" }, + { "name": "email", "type": "String" }, + { "name": "url", "type": "java.net.URI" } + ] + }, + { + "name": "FileConfiguration", + "namespace": "sbt.librarymanagement", + "target": "Scala", + "type": "record", + "doc": "Configuration specific to an Ivy filesystem resolver.", + "fields": [ + { "name": "isLocal", "type": "boolean" }, + { "name": "isTransactional", "type": "boolean?" } + ] + }, + { + "name": "InclExclRule", + "namespace": "sbt.librarymanagement", + "target": "Scala", + "type": "record", + "doc": [ + "Defines a rule to either:", + "", + "The use case that is applied depends on the parameter name which it is passed to, but the", + "filter has the same fields in both cases." + ], + "fields": [ + { "name": "organization", "type": "String", "default": "\"*\"", "since": "0.0.1" }, + { "name": "name", "type": "String", "default": "\"*\"", "since": "0.0.1" }, + { "name": "artifact", "type": "String", "default": "\"*\"", "since": "0.0.1" }, + { "name": "configurations", "type": "sbt.librarymanagement.ConfigRef*", "default": "Vector.empty", "since": "0.0.1" }, + { "name": "crossVersion", "type": "sbt.librarymanagement.CrossVersion", "default": "sbt.librarymanagement.Disabled()", "since": "0.0.1"} + ], + "parentsCompanion": "sbt.librarymanagement.InclExclRuleFunctions" + }, + { + "name": "ScalaModuleInfo", + "namespace": "sbt.librarymanagement", + "target": "Scala", + "type": "record", + "fields": [ + { "name": "scalaFullVersion", "type": "String" }, + { "name": "scalaBinaryVersion", "type": "String" }, + { "name": "configurations", "type": "sbt.librarymanagement.Configuration*" }, + { "name": "checkExplicit", "type": "boolean" }, + { "name": "filterImplicit", "type": "boolean" }, + { "name": "overrideScalaVersion", "type": "boolean" }, + { "name": "scalaOrganization", "type": "String", "default": "sbt.librarymanagement.ScalaArtifacts.Organization", "since": "0.0.1" }, + { "name": "scalaArtifacts", "type": "scala.Vector[String]", "default": "sbt.librarymanagement.ScalaArtifacts.Artifacts", "since": "0.0.1" }, + { + "name": "platform", + "type": "Option[String]", + "default": "None", + "since": "2.0.0" + } + ] + }, + { + "name": "ModuleConfiguration", + "namespace": "sbt.librarymanagement", + "target": "Scala", + "type": "record", + "fields": [ + { "name": "organization", "type": "String" }, + { "name": "name", "type": "String" }, + { "name": "revision", "type": "String" }, + { "name": "resolver", "type": "sbt.librarymanagement.Resolver" } + ], + "extraCompanion": [ + "def apply(org: String, resolver: sbt.librarymanagement.Resolver): ModuleConfiguration = apply(org, \"*\", \"*\", resolver)", + "def apply(org: String, name: String, resolver: sbt.librarymanagement.Resolver): ModuleConfiguration = ModuleConfiguration(org, name, \"*\", resolver)" + ] + }, + { + "name": "ModuleID", + "namespace": "sbt.librarymanagement", + "target": "Scala", + "type": "record", + "parents": "sbt.librarymanagement.ModuleIDExtra", + "fields": [ + { "name": "organization", "type": "String" }, + { "name": "name", "type": "String" }, + { "name": "revision", "type": "String" }, + { "name": "configurations", "type": "Option[String]", "default": "None", "since": "0.0.1" }, + { "name": "isChanging", "type": "boolean", "default": "false", "since": "0.0.1" }, + { "name": "isTransitive", "type": "boolean", "default": "true", "since": "0.0.1" }, + { "name": "isForce", "type": "boolean", "default": "false", "since": "0.0.1" }, + { "name": "explicitArtifacts", "type": "sbt.librarymanagement.Artifact*", "default": "Vector.empty", "since": "0.0.1" }, + { "name": "inclusions", "type": "sbt.librarymanagement.InclExclRule*", "default": "Vector.empty", "since": "0.0.1" }, + { "name": "exclusions", "type": "sbt.librarymanagement.InclExclRule*", "default": "Vector.empty", "since": "0.0.1" }, + { "name": "extraAttributes", "type": "Map[String, String]", "default": "Map.empty", "since": "0.0.1" }, + { "name": "crossVersion", "type": "sbt.librarymanagement.CrossVersion", "default": "sbt.librarymanagement.Disabled()", "since": "0.0.1" }, + { "name": "branchName", "type": "Option[String]", "default": "None", "since": "0.0.1" }, + { "name": "platformOpt", "type": "Option[String]", "default": "None", "since": "2.0.0" } + ], + "toString": [ + "this.toStringImpl" + ], + "parentsCompanion": "sbt.librarymanagement.ModuleIDFunctions" + }, + { + "name": "Checksum", + "namespace": "sbt.librarymanagement", + "target": "Scala", + "type": "record", + "fields": [ + { "name": "digest", "type": "String" }, + { "name": "type", "type": "String", "default": "sha1" } + ] + }, + { + "name": "ModuleInfo", + "namespace": "sbt.librarymanagement", + "target": "Scala", + "type": "record", + "doc": "Additional information about a project module", + "fields": [ + { "name": "nameFormal", "type": "String" }, + { "name": "description", "type": "String", "default": "\"\"", "since": "0.0.1" }, + { "name": "homepage", "type": "Option[java.net.URI]", "default": "None", "since": "0.0.1" }, + { "name": "startYear", "type": "Option[Int]", "default": "None", "since": "0.0.1" }, + { "name": "licenses", "type": "scala.Tuple2[String, java.net.URI]*", "default": "Vector.empty", "since": "0.0.1" }, + { "name": "organizationName", "type": "String", "default": "\"\"", "since": "0.0.1" }, + { "name": "organizationHomepage", "type": "Option[java.net.URI]", "default": "None", "since": "0.0.1" }, + { "name": "scmInfo", "type": "Option[sbt.librarymanagement.ScmInfo]", "default": "None", "since": "0.0.1" }, + { "name": "developers", "type": "sbt.librarymanagement.Developer*", "default": "Vector.empty", "since": "0.0.1" } + ] + }, + { + "name": "ModuleReport", + "namespace": "sbt.librarymanagement", + "target": "Scala", + "type": "record", + "doc": [ + "Provides information about the resolution of a module.", + "This information is in the context of a specific configuration." + ], + "parents": "sbt.librarymanagement.ModuleReportExtra", + "fields": [ + { "name": "module", "type": "sbt.librarymanagement.ModuleID" }, + { "name": "artifacts", "type": "scala.Tuple2[sbt.librarymanagement.Artifact, java.io.File]*" }, + { "name": "missingArtifacts", "type": "sbt.librarymanagement.Artifact*" }, + { "name": "status", "type": "Option[String]", "default": "None", "since": "0.0.1" }, + { "name": "publicationDate", "type": "Option[java.util.Calendar]", "default": "None", "since": "0.0.1" }, + { "name": "resolver", "type": "Option[String]", "default": "None", "since": "0.0.1" }, + { "name": "artifactResolver", "type": "Option[String]", "default": "None", "since": "0.0.1" }, + { "name": "evicted", "type": "boolean", "default": "false", "since": "0.0.1" }, + { "name": "evictedData", "type": "Option[String]", "default": "None", "since": "0.0.1" }, + { "name": "evictedReason", "type": "Option[String]", "default": "None", "since": "0.0.1" }, + { "name": "problem", "type": "Option[String]", "default": "None", "since": "0.0.1" }, + { "name": "homepage", "type": "Option[String]", "default": "None", "since": "0.0.1" }, + { "name": "extraAttributes", "type": "Map[String, String]", "default": "Map.empty", "since": "0.0.1" }, + { "name": "isDefault", "type": "Option[Boolean]", "default": "None", "since": "0.0.1" }, + { "name": "branch", "type": "Option[String]", "default": "None", "since": "0.0.1" }, + { "name": "configurations", "type": "sbt.librarymanagement.ConfigRef*", "default": "Vector.empty", "since": "0.0.1" }, + { "name": "licenses", "type": "scala.Tuple2[String, Option[String]]*", "default": "Vector.empty", "since": "0.0.1" }, + { "name": "callers", "type": "sbt.librarymanagement.Caller*", "default": "Vector.empty", "since": "0.0.1" } + ], + "toString": [ + "s\"\\t\\t$module: \" +", + "(if (arts.size <= 1) \"\" else \"\\n\\t\\t\\t\") + arts.mkString(\"\\n\\t\\t\\t\") + \"\\n\"" + ] + }, + { + "name": "ModuleSettings", + "namespace": "sbt.librarymanagement", + "target": "Scala", + "type": "interface", + "fields": [ + { + "name": "validate", + "type": "boolean", + "default": "false", + "since": "0.0.1" + }, + { + "name": "scalaModuleInfo", + "type": "sbt.librarymanagement.ScalaModuleInfo?", + "default": "None", + "since": "0.0.1" + } + ], + "types": [ + { + "name": "IvyFileConfiguration", + "namespace": "sbt.librarymanagement", + "target": "Scala", + "type": "record", + "fields": [ + { "name": "file", "type": "java.io.File" }, + { "name": "autoScalaTools", "type": "boolean" } + ] + }, + { + "name": "PomConfiguration", + "namespace": "sbt.librarymanagement", + "target": "Scala", + "type": "record", + "fields": [ + { "name": "file", "type": "java.io.File" }, + { "name": "autoScalaTools", "type": "boolean" } + ] + }, + { + "name": "ModuleDescriptorConfiguration", + "docs": [ + "A module descriptor that represents a subproject, formerly known as an InlineConfiguration." + ], + "namespace": "sbt.librarymanagement", + "target": "Scala", + "type": "record", + "fields": [ + { + "name": "module", + "type": "sbt.librarymanagement.ModuleID" + }, + { + "name": "moduleInfo", + "type": "sbt.librarymanagement.ModuleInfo" + }, + { + "name": "dependencies", + "type": "sbt.librarymanagement.ModuleID*", + "default": "Vector.empty", + "since": "0.0.1" + }, + { + "name": "overrides", + "type": "sbt.librarymanagement.ModuleID*", + "default": "Vector.empty", + "since": "0.0.1" + }, + { "name": "excludes", "type": "sbt.librarymanagement.InclExclRule*", "default": "Vector.empty", "since": "0.0.1" }, + { "name": "ivyXML", "type": "scala.xml.NodeSeq", "default": "scala.xml.NodeSeq.Empty", "since": "0.0.1" }, + { + "name": "configurations", + "type": "sbt.librarymanagement.Configuration*", + "default": "sbt.librarymanagement.Configurations.default", + "since": "0.0.1" + }, + { + "name": "defaultConfiguration", + "type": "sbt.librarymanagement.Configuration?", + "default": "Option(sbt.librarymanagement.Configurations.Compile)", + "since": "0.0.1" + }, + { "name": "conflictManager", "type": "sbt.librarymanagement.ConflictManager", "default": "sbt.librarymanagement.ConflictManager.default", "since": "0.0.1" } + ], + "parentsCompanion": "sbt.librarymanagement.InlineConfigurationFunctions" + } + ] + }, + { + "name": "OrganizationArtifactReport", + "namespace": "sbt.librarymanagement", + "target": "Scala", + "type": "record", + "doc": [ + "OrganizationArtifactReport represents an organization+name entry in Ivy resolution report.", + "In sbt's terminology, \"module\" consists of organization, name, and version.", + "In Ivy's, \"module\" means just organization and name, and the one including version numbers", + "are called revisions.", + "", + "A sequence of OrganizationArtifactReport called details is newly added to ConfigurationReport, replacing evicted.", + "(Note old evicted was just a seq of ModuleIDs).", + "OrganizationArtifactReport groups the ModuleReport of both winners and evicted reports by their organization and name,", + "which can be used to calculate detailed eviction warning etc." + ], + "fields": [ + { "name": "organization", "type": "String" }, + { "name": "name", "type": "String" }, + { "name": "modules", "type": "sbt.librarymanagement.ModuleReport*" } + ], + "toString": [ + "val details = modules map { _.detailReport }", + "s\"\\t$organization:$name\\n${details.mkString}\\n\"" + ] + }, + { + "name": "Patterns", + "namespace": "sbt.librarymanagement", + "target": "Scala", + "type": "record", + "fields": [ + { "name": "ivyPatterns", "type": "String*", "default": "Vector.empty", "since": "0.0.1" }, + { "name": "artifactPatterns", "type": "String*", "default": "Vector.empty", "since": "0.0.1" }, + { "name": "isMavenCompatible", "type": "boolean", "default": "true", "since": "0.0.1" }, + { "name": "descriptorOptional", "type": "boolean", "default": "false", "since": "0.0.1" }, + { "name": "skipConsistencyCheck", "type": "boolean", "default": "false", "since": "0.0.1" } + ], + "toString": [ + "\"Patterns(ivyPatterns=%s, artifactPatterns=%s, isMavenCompatible=%s, descriptorOptional=%s, skipConsistencyCheck=%s)\".format(", + " ivyPatterns, artifactPatterns, isMavenCompatible, descriptorOptional, skipConsistencyCheck)" + ], + "parentsCompanion": "sbt.librarymanagement.PatternsFunctions" + }, + { + "name": "Resolver", + "namespace": "sbt.librarymanagement", + "target": "Scala", + "type": "interface", + "fields": [ + { "name": "name", "type": "String" } + ], + "extra": [ + "/** check for HTTP */", + "private[sbt] def validateProtocol(logger: sbt.util.Logger): Boolean = false" + ], + "types": [ + { + "name": "ChainedResolver", + "namespace": "sbt.librarymanagement", + "target": "Scala", + "type": "record", + "fields": [ + { "name": "resolvers", "type": "sbt.librarymanagement.Resolver*" } + ], + "extra": [ + "private[sbt] override def validateProtocol(logger: sbt.util.Logger): Boolean = !resolvers.forall(!_.validateProtocol(logger))" + ] + }, + { + "name": "MavenRepository", + "namespace": "sbt.librarymanagement", + "target": "Scala", + "type": "interface", + "doc": "An instance of a remote maven repository. Note: This will use Aether/Maven to resolve artifacts.", + "fields": [ + { "name": "root", "type": "String" }, + { "name": "localIfFile", "type": "boolean", "default": "true", "since": "0.0.1" } + ], + "types": [ + { + "name": "MavenRepo", + "namespace": "sbt.librarymanagement", + "target": "Scala", + "type": "record", + "doc": [ + "This is the internal implementation of actual Maven Repository (as opposed to a file cache)." + ], + "fields": [ + { "name": "_allowInsecureProtocol", "type": "Boolean", "default": "false", "since": "1.3.0" } + ], + "extra": [ + "override def isCache: Boolean = false", + "override def allowInsecureProtocol: Boolean = _allowInsecureProtocol", + "private[sbt] override def validateProtocol(logger: sbt.util.Logger): Boolean = Resolver.validateMavenRepo(this, logger)" + ], + "toString": "s\"$name: $root\"" + }, + { + "name": "MavenCache", + "namespace": "sbt.librarymanagement", + "target": "Scala", + "type": "record", + "doc": [ + "An instance of maven CACHE directory. You cannot treat a cache directory the same as a a remote repository because", + "the metadata is different (see Aether ML discussion)." + ], + "fields": [ + { "name": "rootFile", "type": "java.io.File" } + ], + "extra": [ + "def this(name: String, rootFile: java.io.File) = this(name, rootFile.toURI.toURL.toString, true, rootFile)", + "override def isCache: Boolean = true", + "override def allowInsecureProtocol: Boolean = false" + ], + "toString": "s\"cache:$name: ${rootFile.getAbsolutePath}\"", + "extraCompanion": "def apply(name: String, rootFile: java.io.File): MavenCache = new MavenCache(name, rootFile)" + } + ], + "extra": [ + "def isCache: Boolean", + "def allowInsecureProtocol: Boolean", + "def withAllowInsecureProtocol(allowInsecureProtocol: Boolean): MavenRepository =", + " this match {", + " case x: MavenRepo => x.with_allowInsecureProtocol(allowInsecureProtocol)", + " case x: MavenCache => x", + " }" + ], + "parentsCompanion": "sbt.librarymanagement.MavenRepositoryFunctions" + }, + { + "name": "PatternsBasedRepository", + "namespace": "sbt.librarymanagement", + "target": "Scala", + "type": "interface", + "fields": [ + { "name": "patterns", "type": "sbt.librarymanagement.Patterns" } + ], + "doc": "sbt interface to an Ivy repository based on patterns, which is most Ivy repositories.", + "types": [ + { + "name": "FileRepository", + "namespace": "sbt.librarymanagement", + "target": "Scala", + "type": "record", + "doc": "sbt interface for an Ivy filesystem repository. More convenient construction is done using Resolver.file.", + "fields": [ + { "name": "configuration", "type": "sbt.librarymanagement.FileConfiguration" } + ], + "extra": [ + "def this(name: String, configuration: sbt.librarymanagement.FileConfiguration, patterns: sbt.librarymanagement.Patterns) = ", + " this(name, patterns, configuration)" + ], + "extraCompanion": [ + "def apply(name: String, configuration: sbt.librarymanagement.FileConfiguration, patterns: sbt.librarymanagement.Patterns) = ", + " new FileRepository(name, patterns, configuration)" + ] + }, + { + "name": "URLRepository", + "namespace": "sbt.librarymanagement", + "target": "Scala", + "type": "record", + "fields": [ + { "name": "allowInsecureProtocol", "type": "boolean", "default": "false", "since": "1.3.0" } + ], + "extra": [ + "private[sbt] override def validateProtocol(logger: sbt.util.Logger): Boolean = Resolver.validateURLRepository(this, logger)" + ] + }, + { + "name": "SshBasedRepository", + "namespace": "sbt.librarymanagement", + "target": "Scala", + "type": "interface", + "doc": "sbt interface for an Ivy ssh-based repository (ssh and sftp). Requires the Jsch library..", + "parents": "sbt.librarymanagement.SshBasedRepositoryExtra", + "fields": [ + { "name": "connection", "type": "sbt.librarymanagement.SshConnection" } + ], + "types": [ + { + "name": "SshRepository", + "namespace": "sbt.librarymanagement", + "target": "Scala", + "type": "record", + "doc": "sbt interface for an Ivy repository over ssh. More convenient construction is done using Resolver.ssh.", + "parents": "sbt.librarymanagement.SshRepositoryExtra", + "fields": [ + { "name": "publishPermissions", "type": "String?" } + ], + "extra": [ + "def this(name: String, connection: sbt.librarymanagement.SshConnection, patterns: sbt.librarymanagement.Patterns, publishPermissions: Option[String]) = ", + " this(name, patterns, connection, publishPermissions)" + ], + "extraCompanion": [ + "def apply(name: String, connection: sbt.librarymanagement.SshConnection, patterns: sbt.librarymanagement.Patterns, publishPermissions: Option[String]) = ", + " new SshRepository(name, patterns, connection, publishPermissions)" + ] + }, + { + "name": "SftpRepository", + "namespace": "sbt.librarymanagement", + "target": "Scala", + "type": "record", + "doc": "sbt interface for an Ivy repository over sftp. More convenient construction is done using Resolver.sftp.", + "parents": "sbt.librarymanagement.SftpRepositoryExtra", + "extra": [ + "def this(name: String, connection: sbt.librarymanagement.SshConnection, patterns: sbt.librarymanagement.Patterns) = ", + " this(name, patterns, connection)" + ], + "extraCompanion": [ + "def apply(name: String, connection: sbt.librarymanagement.SshConnection, patterns: sbt.librarymanagement.Patterns) = ", + " new SftpRepository(name, patterns, connection)" + ] + } + ] + } + ] + } + ], + "parentsCompanion": "sbt.librarymanagement.ResolverFunctions" + }, + { + "name": "ScmInfo", + "namespace": "sbt.librarymanagement", + "target": "Scala", + "type": "record", + "doc": "Basic SCM information for a project module", + "fields": [ + { "name": "browseUrl", "type": "java.net.URI" }, + { "name": "connection", "type": "String" }, + { "name": "devConnection", "type": "String?", "default": "None", "since": "0.0.1" } + ] + }, + { + "name": "SshAuthentication", + "namespace": "sbt.librarymanagement", + "target": "Scala", + "type": "interface", + "types": [ + { + "name": "PasswordAuthentication", + "namespace": "sbt.librarymanagement", + "target": "Scala", + "type": "record", + "fields": [ + { "name": "user", "type": "String" }, + { "name": "password", "type": "String?" } + ] + }, + { + "name": "KeyFileAuthentication", + "namespace": "sbt.librarymanagement", + "target": "Scala", + "type": "record", + "fields": [ + { "name": "user", "type": "String" }, + { "name": "keyfile", "type": "java.io.File" }, + { "name": "password", "type": "String?" } + ] + } + ] + }, + { + "name": "SshConnection", + "namespace": "sbt.librarymanagement", + "target": "Scala", + "type": "record", + "fields": [ + { "name": "authentication", "type": "sbt.librarymanagement.SshAuthentication?" }, + { "name": "hostname", "type": "String?" }, + { "name": "port", "type": "int?" } + ] + }, + { + "name": "UpdateLogging", + "namespace": "sbt.librarymanagement", + "target": "Scala", + "type": "enumeration", + "doc": [ + "Configures logging during an 'update'. `level` determines the amount of other information logged.", + "`Full` is the default and logs the most.", + "`DownloadOnly` only logs what is downloaded.", + "`Quiet` only displays errors.", + "`Default` uses the current log level of `update` task." + ], + "symbols": [ "Full", "DownloadOnly", "Quiet", "Default" ] + }, + { + "name": "UpdateReport", + "namespace": "sbt.librarymanagement", + "target": "Scala", + "type": "record", + "doc": [ + "Provides information about dependency resolution.", + "It does not include information about evicted modules, only about the modules ultimately selected by the conflict manager.", + "This means that for a given configuration, there should only be one revision for a given organization and module name." + ], + "parents": "sbt.librarymanagement.UpdateReportExtra", + "fields": [ + { + "name": "cachedDescriptor", + "type": "java.io.File", + "doc": [ "the location of the resolved module descriptor in the cache" ] + }, + { + "name": "configurations", + "type": "sbt.librarymanagement.ConfigurationReport*", + "doc": [ "a sequence containing one report for each configuration resolved." ] + }, + { + "name": "stats", + "type": "sbt.librarymanagement.UpdateStats", + "doc": [ "stats information about the update that produced this report" ] + }, + { "name": "stamps", "type": "Map[String, Long]" } + ], + "toString": "\"Update report:\\n\\t\" + stats + \"\\n\" + configurations.mkString" + }, + { + "name": "UpdateStats", + "namespace": "sbt.librarymanagement", + "target": "Scala", + "type": "record", + "fields": [ + { "name": "resolveTime", "type": "long" }, + { "name": "downloadTime", "type": "long" }, + { "name": "downloadSize", "type": "long" }, + { "name": "cached", "type": "boolean" } + ], + "toString": "Seq(\"Resolve time: \" + resolveTime + \" ms\", \"Download time: \" + downloadTime + \" ms\", \"Download size: \" + downloadSize + \" bytes\").mkString(\", \")" + }, + + { + "name": "ConfigurationReportLite", + "namespace": "sbt.internal.librarymanagement", + "target": "Scala", + "type": "record", + "fields": [ + { "name": "configuration", "type": "String" }, + { "name": "details", "type": "sbt.librarymanagement.OrganizationArtifactReport*" } + ] + }, + { + "name": "UpdateReportLite", + "namespace": "sbt.internal.librarymanagement", + "target": "Scala", + "type": "record", + "fields": [ + { "name": "configurations", "type": "sbt.internal.librarymanagement.ConfigurationReportLite*" } + ] + } + ] +} diff --git a/lm-core/src/main/contraband/librarymanagement2.json b/lm-core/src/main/contraband/librarymanagement2.json new file mode 100644 index 000000000..ffdee8da4 --- /dev/null +++ b/lm-core/src/main/contraband/librarymanagement2.json @@ -0,0 +1,142 @@ +{ + "types": [ + { + "name": "MakePomConfiguration", + "namespace": "sbt.librarymanagement", + "target": "Scala", + "type": "record", + "generateCodec": false, + "fields": [ + { "name": "file", "type": "java.io.File?" }, + { "name": "moduleInfo", "type": "sbt.librarymanagement.ModuleInfo?" }, + { "name": "configurations", "type": "scala.Vector[sbt.librarymanagement.Configuration]?" }, + { "name": "extra", "type": "scala.xml.NodeSeq?" }, + { "name": "process", "type": "scala.Function1[scala.xml.Node, scala.xml.Node]" }, + { "name": "filterRepositories", "type": "scala.Function1[sbt.librarymanagement.MavenRepository, Boolean]" }, + { "name": "allRepositories", "type": "boolean" }, + { "name": "includeTypes", "type": "Set[String]" } + ], + "parentsCompanion": "sbt.librarymanagement.MakePomConfigurationFunctions" + }, + { + "name": "SemanticSelector", + "namespace": "sbt.librarymanagement", + "target": "Scala", + "type": "record", + "doc": [ + "Semantic version selector API to check if the VersionNumber satisfies", + "conditions described by semantic version selector.", + "", + "A `comparator` generally consist of an operator and version specifier.", + "The set of operators is", + "- `<`: Less than", + "- `<=`: Less than or equal to", + "- `>`: Greater than", + "- `>=`: Greater than or equal to", + "- `=`: Equal", + "If no operator is specified, `=` is assumed.", + "", + "If minor or patch versions are not specified, some numbers are assumed.", + "- `<=1.0` is equivalent to `<1.1.0`.", + "- `<1.0` is equivalent to `<1.0.0`.", + "- `>=1.0` is equivalent to `>=1.0.0`.", + "- `>1.0` is equivalent to `>=1.1.0`.", + "- `=1.0` is equivalent to `>=1.0 <=1.0` (so `>=1.0.0 <1.1.0`).", + "", + "Comparators can be combined by spaces to form the intersection set of the comparators.", + "For example, `>1.2.3 <4.5.6` matches versions that are `greater than 1.2.3 AND less than 4.5.6`.", + "", + "The (intersection) set of comparators can combined by ` || ` (spaces are required) to form the", + "union set of the intersection sets. So the semantic selector is in disjunctive normal form.", + "", + "Wildcard (`x`, `X`, `*`) can be used to match any number of minor or patch version.", + "Actually, `1.0.x` is equivalent to `=1.0` (that is equivalent to `>=1.0.0 <1.1.0`)", + "", + "The hyphen range like `1.2.3 - 4.5.6` matches inclusive set of versions.", + "So `1.2.3 - 4.5.6` is equivalent to `>=1.2.3 <=4.5.6`.", + "Both sides of comparators around - are required and they can not have any operators.", + "For example, `>=1.2.3 - 4.5.6` is invalid.", + "", + "The order of versions basically follows the rule specified in https://semver.org/#spec-item-11", + "> When major, minor, and patch are equal, a pre-release version has lower precedence", + "> than a normal version. Example: 1.0.0-alpha < 1.0.0.", + "> Precedence for two pre-release versions with the same major, minor, and patch version", + "> Must be determined by comparing each dot separated identifier from left to right", + "> until a difference is found as follows:", + "> identifiers consisting of only digits are compared numerically", + "> and identifiers with letters or hyphens are compared lexically in ASCII sort order.", + "> Numeric identifiers always have lower precedence than non-numeric identifiers.", + "> A larger set of pre-release fields has a higher precedence than a smaller set,", + "> if all of the preceding identifiers are equal.", + "> Example: 1.0.0-alpha < 1.0.0-alpha.1 < 1.0.0-alpha.beta < 1.0.0-beta < 1.0.0-beta.2 < 1.0.0-beta.11 < 1.0.0-rc.1 < 1.0.0.", + "", + "The differences from the original specification are following", + "- `SemanticVersionSelector` separetes the pre-release fields by hyphen instead of dot", + "- hyphen cannot be used in pre-release identifiers because it is used as separator for pre-release fields", + "", + "Therefore, in order to match pre-release versions like `1.0.0-beta`", + "we need to explicitly specify the pre-release identifiers like `>=1.0.0-alpha`." + ], + "generateCodec": false, + "fields": [ + { "name": "selectors", "type": "Seq[sbt.internal.librarymanagement.SemSelAndChunk]" } + ], + "toString": "selectors.map(_.toString).mkString(\" || \")", + "extra": "def matches(versionNumber: VersionNumber): Boolean = selectors.exists(_.matches(versionNumber))", + "extraCompanion": [ + "def apply(selector: String): SemanticSelector = {", + " val orChunkTokens = selector.split(\"\\\\s+\\\\|\\\\|\\\\s+\").map(_.trim)", + " val orChunks = orChunkTokens.map { chunk => sbt.internal.librarymanagement.SemSelAndChunk(chunk) }", + " SemanticSelector(scala.collection.immutable.ArraySeq.unsafeWrapArray(orChunks))", + "}" + ] + }, + { + "name": "SemSelAndChunk", + "namespace": "sbt.internal.librarymanagement", + "target": "Scala", + "type": "record", + "generateCodec": false, + "parentsCompanion": "sbt.internal.librarymanagement.SemSelAndChunkFunctions", + "fields": [ + { "name": "comparators", "type": "Seq[sbt.internal.librarymanagement.SemComparator]" } + ], + "toString": "comparators.map(_.toString).mkString(\" \")", + "extra": "def matches(version: sbt.librarymanagement.VersionNumber): Boolean = comparators.forall(_.matches(version))", + "extraCompanion": [ + "def apply(andClauseToken: String): SemSelAndChunk = parse(andClauseToken)" + ] + }, + { + "name": "SemComparator", + "namespace": "sbt.internal.librarymanagement", + "target": "Scala", + "type": "record", + "generateCodec": false, + "toString": "this.toStringImpl", + "parents": "sbt.internal.librarymanagement.SemComparatorExtra", + "parentsCompanion": "sbt.internal.librarymanagement.SemComparatorFunctions", + "fields": [ + { "name": "op", "type": "sbt.internal.librarymanagement.SemSelOperator" }, + { "name": "major", "type": "Option[Long]" }, + { "name": "minor", "type": "Option[Long]" }, + { "name": "patch", "type": "Option[Long]" }, + { "name": "tags", "type": "Seq[String]" } + ], + "extra": [ + "def matches(version: sbt.librarymanagement.VersionNumber): Boolean = this.matchesImpl(version)", + "def expandWildcard: Seq[SemComparator] = {", + " if (op == sbt.internal.librarymanagement.SemSelOperator.Eq && !allFieldsSpecified) {", + " Seq(", + " this.withOp(sbt.internal.librarymanagement.SemSelOperator.Gte),", + " this.withOp(sbt.internal.librarymanagement.SemSelOperator.Lte)", + " )", + " } else { Seq(this) }", + "}" + ], + "extraCompanion": [ + "def apply(comparator: String): SemComparator = parse(comparator)" + ] + } + ] +} diff --git a/lm-core/src/main/java/sbt/internal/librarymanagement/mavenint/SbtPomExtraProperties.java b/lm-core/src/main/java/sbt/internal/librarymanagement/mavenint/SbtPomExtraProperties.java new file mode 100644 index 000000000..9f7891301 --- /dev/null +++ b/lm-core/src/main/java/sbt/internal/librarymanagement/mavenint/SbtPomExtraProperties.java @@ -0,0 +1,26 @@ +package sbt.internal.librarymanagement.mavenint; + +/** Extra properties we dump from Aether into the properties list. */ +public class SbtPomExtraProperties { + + public static final String MAVEN_PACKAGING_KEY = "sbt.pom.packaging"; + public static final String SCALA_VERSION_KEY = "sbt.pom.scalaversion"; + public static final String SBT_VERSION_KEY = "sbt.pom.sbtversion"; + + public static final String POM_INFO_KEY_PREFIX = "info."; + public static final String POM_SCALA_VERSION = "scalaVersion"; + public static final String POM_SBT_VERSION = "sbtVersion"; + public static final String POM_API_KEY = "info.apiURL"; + public static final String VERSION_SCHEME_KEY = "info.versionScheme"; + public static final String POM_RELEASE_NOTES_KEY = "info.releaseNotesUrl"; + + public static final String LICENSE_COUNT_KEY = "license.count"; + + public static String makeLicenseName(int i) { + return "license." + i + ".name"; + } + + public static String makeLicenseUrl(int i) { + return "license." + i + ".url"; + } +} diff --git a/lm-core/src/main/scala/sbt/internal/librarymanagement/CompatibilityWarning.scala b/lm-core/src/main/scala/sbt/internal/librarymanagement/CompatibilityWarning.scala new file mode 100644 index 000000000..3ad192577 --- /dev/null +++ b/lm-core/src/main/scala/sbt/internal/librarymanagement/CompatibilityWarning.scala @@ -0,0 +1,59 @@ +package sbt.internal.librarymanagement + +import sbt.librarymanagement._ +import sbt.util.{ Level, Logger } + +import Configurations._ + +final class CompatibilityWarningOptions private[sbt] ( + val configurations: Seq[Configuration], + val level: Level.Value +) + +object CompatibilityWarningOptions { + def default: CompatibilityWarningOptions = + apply(configurations = List(Compile, Runtime), level = Level.Warn) + def apply( + configurations: List[Configuration], + level: Level.Value + ): CompatibilityWarningOptions = + new CompatibilityWarningOptions( + configurations = configurations, + level = level + ) +} + +private[sbt] object CompatibilityWarning { + def run( + config: CompatibilityWarningOptions, + module: ModuleDescriptor, + mavenStyle: Boolean, + log: Logger + ): Unit = { + if (mavenStyle) { + processIntransitive(config, module, log) + } + } + def processIntransitive( + config: CompatibilityWarningOptions, + module: ModuleDescriptor, + log: Logger + ): Unit = { + val monitoredConfigsStr: Set[String] = (config.configurations map { _.name }).toSet + def inMonitoredConfigs(configOpt: Option[String]): Boolean = + configOpt match { + case Some(c) => (c.split(",").toSet intersect monitoredConfigsStr).nonEmpty + case None => monitoredConfigsStr contains "compile" + } + module.directDependencies foreach { m => + if (!m.isTransitive && inMonitoredConfigs(m.configurations)) { + log.warn( + s"""Found intransitive dependency ($m) while publishMavenStyle is true, but Maven repositories + | do not support intransitive dependencies. Use exclusions instead so transitive dependencies + | will be correctly excluded in dependent projects. + """.stripMargin + ) + } else () + } + } +} diff --git a/lm-core/src/main/scala/sbt/internal/librarymanagement/InternalDefaults.scala b/lm-core/src/main/scala/sbt/internal/librarymanagement/InternalDefaults.scala new file mode 100644 index 000000000..277d54dba --- /dev/null +++ b/lm-core/src/main/scala/sbt/internal/librarymanagement/InternalDefaults.scala @@ -0,0 +1,29 @@ +package sbt +package internal.librarymanagement + +import java.io.File +import sbt.librarymanagement._ +import sbt.io.syntax._ + +/** + * This is a list of functions with default values. + */ +object InternalDefaults { + val sbtOrgTemp = JsonUtil.sbtOrgTemp + val modulePrefixTemp = "temp-module-" + + def getArtifactTypeFilter(opt: Option[ArtifactTypeFilter]): ArtifactTypeFilter = + opt.getOrElse(Artifact.defaultArtifactTypeFilter) + + def defaultRetrieveDirectory: File = + (new File(".")).getAbsoluteFile / "lib_managed" + + def getRetrieveDirectory(opt: Option[File]): File = + opt.getOrElse(defaultRetrieveDirectory) + + def getRetrievePattern(opt: Option[String]): String = + opt.getOrElse(Resolver.defaultRetrievePattern) + + def getDeliverStatus(opt: Option[String]): String = + opt.getOrElse("release") +} diff --git a/lm-core/src/main/scala/sbt/internal/librarymanagement/IvyConfigurations.scala b/lm-core/src/main/scala/sbt/internal/librarymanagement/IvyConfigurations.scala new file mode 100644 index 000000000..aac26548d --- /dev/null +++ b/lm-core/src/main/scala/sbt/internal/librarymanagement/IvyConfigurations.scala @@ -0,0 +1,20 @@ +/* sbt -- Simple Build Tool + * Copyright 2008, 2009, 2010 Mark Harrah + */ +package sbt.librarymanagement + +private[librarymanagement] abstract class InlineConfigurationFunctions { + def configurations( + explicitConfigurations: Iterable[Configuration], + defaultConfiguration: Option[Configuration] + ) = + if (explicitConfigurations.isEmpty) { + defaultConfiguration match { + case Some(Configurations.DefaultIvyConfiguration) => Configurations.Default :: Nil + case Some(Configurations.DefaultMavenConfiguration) => + Configurations.defaultMavenConfigurations + case _ => Nil + } + } else + explicitConfigurations +} diff --git a/lm-core/src/main/scala/sbt/internal/librarymanagement/JsonUtil.scala b/lm-core/src/main/scala/sbt/internal/librarymanagement/JsonUtil.scala new file mode 100644 index 000000000..8d15a64cb --- /dev/null +++ b/lm-core/src/main/scala/sbt/internal/librarymanagement/JsonUtil.scala @@ -0,0 +1,96 @@ +package sbt.internal.librarymanagement + +import java.io.File +import sbt.util.{ CacheStore, Logger } +import sbt.librarymanagement._, LibraryManagementCodec._ +import sbt.io.IO + +private[sbt] object JsonUtil { + def sbtOrgTemp = "org.scala-sbt.temp" + def fakeCallerOrganization = "org.scala-sbt.temp-callers" + + def parseUpdateReport( + path: File, + cachedDescriptor: File, + log: Logger + ): UpdateReport = { + try { + val lite = CacheStore(path).read[UpdateReportLite]() + fromLite(lite, cachedDescriptor) + } catch { + case e: Throwable => + log.error(s"Unable to parse mini graph: $path") + throw e + } + } + + def writeUpdateReport(ur: UpdateReport, graphPath: File): Unit = { + val updateReportLite = toLite(ur) + IO.createDirectory(graphPath.getParentFile) + CacheStore(graphPath).write(updateReportLite) + } + + def toLite(ur: UpdateReport): UpdateReportLite = + UpdateReportLite(ur.configurations map { cr => + ConfigurationReportLite( + cr.configuration.name, + cr.details map { oar => + OrganizationArtifactReport( + oar.organization, + oar.name, + oar.modules map { mr => + ModuleReport( + mr.module, + mr.artifacts, + mr.missingArtifacts, + mr.status, + mr.publicationDate, + mr.resolver, + mr.artifactResolver, + mr.evicted, + mr.evictedData, + mr.evictedReason, + mr.problem, + mr.homepage, + mr.extraAttributes, + mr.isDefault, + mr.branch, + mr.configurations, + mr.licenses, + filterOutArtificialCallers(mr.callers) + ) + } + ) + } + ) + }) + + // #1763/#2030. Caller takes up 97% of space, so we need to shrink it down, + // but there are semantics associated with some of them. + def filterOutArtificialCallers(callers: Vector[Caller]): Vector[Caller] = + if (callers.isEmpty) callers + else { + val nonArtificial = callers filter { c => + (c.caller.organization != sbtOrgTemp) && + (c.caller.organization != fakeCallerOrganization) + } + val interProj = (callers find { c => + c.caller.organization == sbtOrgTemp + }).toVector + interProj ++ nonArtificial + } + + def fromLite(lite: UpdateReportLite, cachedDescriptor: File): UpdateReport = { + val stats = UpdateStats(0L, 0L, 0L, false) + val configReports = lite.configurations map { cr => + val details = cr.details + val modules = details flatMap { + _.modules filter { mr => + !mr.evicted && mr.problem.isEmpty + } + } + ConfigurationReport(ConfigRef(cr.configuration), modules, details) + } + UpdateReport(cachedDescriptor, configReports, stats, Map.empty) + } +} diff --git a/lm-core/src/main/scala/sbt/internal/librarymanagement/LMSysProp.scala b/lm-core/src/main/scala/sbt/internal/librarymanagement/LMSysProp.scala new file mode 100644 index 000000000..209e0cd11 --- /dev/null +++ b/lm-core/src/main/scala/sbt/internal/librarymanagement/LMSysProp.scala @@ -0,0 +1,66 @@ +/* + * sbt + * Copyright 2011 - 2018, Lightbend, Inc. + * Copyright 2008 - 2010, Mark Harrah + * Licensed under Apache License 2.0 (see LICENSE) + */ + +package sbt +package internal +package librarymanagement + +import java.util.Locale +import scala.util.control.NonFatal + +object LMSysProp { + def booleanOpt(name: String): Option[Boolean] = + sys.props.get(name).flatMap { x => + x.toLowerCase(Locale.ENGLISH) match { + case "1" | "always" | "true" => Some(true) + case "0" | "never" | "false" => Some(false) + case "auto" => None + case _ => None + } + } + + def getOrFalse(name: String): Boolean = booleanOpt(name).getOrElse(false) + def getOrTrue(name: String): Boolean = booleanOpt(name).getOrElse(true) + + // System property style: + // 1. use sbt. prefix + // 2. prefer short nouns + // 3. use dot for namespacing, and avoid making dot-separated English phrase + // 4. make active/enable properties, instead of "sbt.disable." + // + // Good: sbt.offline + // + // Bad: + // sbt.disable.interface.classloader.cache + // sbt.task.timings.on.shutdown + // sbt.skip.version.write -> sbt.genbuildprops=false + + val useSecureResolvers: Boolean = getOrTrue("sbt.repository.secure") + + lazy val modifyVersionRange: Boolean = getOrTrue("sbt.modversionrange") + + lazy val isJavaVersion9Plus: Boolean = javaVersion > 8 + lazy val javaVersion: Int = { + try { + // See Oracle section 1.5.3 at: + // https://docs.oracle.com/javase/8/docs/technotes/guides/versioning/spec/versioning2.html + val version = sys.props("java.specification.version").split("\\.").toList.map(_.toInt) + version match { + case 1 :: minor :: _ => minor + case major :: _ => major + case _ => 0 + } + } catch { + case NonFatal(_) => 0 + } + } + + lazy val useGigahorse: Boolean = getOrFalse("sbt.gigahorse") + lazy val maxPublishAttempts: Int = + java.lang.Integer.getInteger("sbt.repository.publish.attempts", 3) + +} diff --git a/lm-core/src/main/scala/sbt/internal/librarymanagement/SemSelOperator.scala b/lm-core/src/main/scala/sbt/internal/librarymanagement/SemSelOperator.scala new file mode 100644 index 000000000..d881fe9e2 --- /dev/null +++ b/lm-core/src/main/scala/sbt/internal/librarymanagement/SemSelOperator.scala @@ -0,0 +1,17 @@ +package sbt.internal.librarymanagement +sealed abstract class SemSelOperator { + override def toString: String = this match { + case SemSelOperator.Lte => "<=" + case SemSelOperator.Lt => "<" + case SemSelOperator.Gte => ">=" + case SemSelOperator.Gt => ">" + case SemSelOperator.Eq => "=" + } +} +object SemSelOperator { + case object Lte extends SemSelOperator + case object Lt extends SemSelOperator + case object Gte extends SemSelOperator + case object Gt extends SemSelOperator + case object Eq extends SemSelOperator +} diff --git a/lm-core/src/main/scala/sbt/internal/librarymanagement/SemanticSelectorExtra.scala b/lm-core/src/main/scala/sbt/internal/librarymanagement/SemanticSelectorExtra.scala new file mode 100644 index 000000000..2020e71fd --- /dev/null +++ b/lm-core/src/main/scala/sbt/internal/librarymanagement/SemanticSelectorExtra.scala @@ -0,0 +1,216 @@ +package sbt.internal.librarymanagement + +import sbt.librarymanagement.VersionNumber +import sbt.internal.librarymanagement.SemSelOperator.{ Lt, Lte, Gt, Gte, Eq } + +import scala.annotation.tailrec +import java.util.Locale + +private[librarymanagement] abstract class SemSelAndChunkFunctions { + protected def parse(andClauseToken: String): SemSelAndChunk = { + val comparatorTokens = + scala.collection.immutable.ArraySeq.unsafeWrapArray(andClauseToken.split("\\s+")) + val hyphenIndex = comparatorTokens.indexWhere(_ == "-") + val comparators = if (hyphenIndex == -1) { + comparatorTokens.map(SemComparator.apply) + } else { + // interpret `A.B.C - D.E.F` to `>=A.B.C <=D.E.F` + val (before, after) = comparatorTokens.splitAt(hyphenIndex) + (before.lastOption, after.drop(1).headOption) match { + case (Some(fromStr), Some(toStr)) => + // from and to can not have an operator. + if (hasOperator(fromStr) || hasOperator(toStr)) { + throw new IllegalArgumentException( + s"Invalid ' - ' range, both side of comparators can not have an operator: $fromStr - $toStr" + ) + } + val from = SemComparator(fromStr) + val to = SemComparator(toStr) + val comparatorsBefore = before.dropRight(1).map(SemComparator.apply) + val comparatorsAfter = after.drop(2) match { + case tokens if !tokens.isEmpty => + parse(tokens.mkString(" ")).comparators + case _ => Seq.empty + } + from.withOp(Gte) +: to.withOp(Lte) +: + (comparatorsBefore ++ comparatorsAfter) + case _ => + throw new IllegalArgumentException( + s"Invalid ' - ' range position, both side of versions must be specified: $andClauseToken" + ) + } + } + SemSelAndChunk(comparators.flatMap(_.expandWildcard)) + } + + private[this] def hasOperator(comparator: String): Boolean = { + comparator.startsWith("<") || + comparator.startsWith(">") || + comparator.startsWith("=") + } +} + +private[librarymanagement] abstract class SemComparatorExtra { + val op: SemSelOperator + val major: Option[Long] + val minor: Option[Long] + val patch: Option[Long] + val tags: Seq[String] + + protected def toStringImpl: String = { + val versionStr = Seq(major, minor, patch) + .collect { case Some(v) => + v.toString + } + .mkString(".") + val tagsStr = if (tags.nonEmpty) s"-${tags.mkString("-")}" else "" + s"$op$versionStr$tagsStr" + } + + protected def matchesImpl(version: VersionNumber): Boolean = { + // Fill empty fields of version specifier with 0 or max value of Long. + // By filling them, SemComparator realize the properties below + // `<=1.0` is equivalent to `<1.1.0` (`<=1.0.${Long.MaxValue}`) + // `<1.0` is equivalent to `<1.0.0` + // `>=1.0` is equivalent to `>=1.0.0` + // `>1.0` is equivalent to `>=1.1.0` (`>1.0.${Long.MaxValue}`) + // + // However this fills 0 for a comparator that have `=` operator, + // a comparator that have empty part of version and `=` operator won't appear + // because of expanding it to and clause of comparators. + val assumed = op match { + case Lte => Long.MaxValue + case Lt => 0L + case Gte => 0L + case Gt => Long.MaxValue + case Eq => 0L + } + // empty fields of the version number are assumed to be 0. + val versionNumber = + (version._1.getOrElse(0L), version._2.getOrElse(0L), version._3.getOrElse(0L)) + val selector = (major.getOrElse(assumed), minor.getOrElse(assumed), patch.getOrElse(assumed)) + val normalVersionCmp = + implicitly[Ordering[(Long, Long, Long)]].compare(versionNumber, selector) + val cmp = + if (normalVersionCmp == 0) comparePreReleaseTags(version.tags, tags) + else normalVersionCmp + op match { + case Lte if cmp <= 0 => true + case Lt if cmp < 0 => true + case Gte if cmp >= 0 => true + case Gt if cmp > 0 => true + case Eq if cmp == 0 => true + case _ => false + } + } + private[this] def comparePreReleaseTags(ts1: Seq[String], ts2: Seq[String]): Int = { + // > When major, minor, and patch are equal, a pre-release version has lower precedence than a normal version. + if (ts1.isEmpty && ts2.isEmpty) 0 + else if (ts1.nonEmpty && ts2.isEmpty) -1 // ts1 is pre-release version + else if (ts1.isEmpty && ts2.nonEmpty) 1 // ts2 is pre-release version + else compareTags(ts1, ts2) + } + + @tailrec + private[this] def compareTags(ts1: Seq[String], ts2: Seq[String]): Int = { + // > A larger set of pre-release fields has a higher precedence than a smaller set, + // > if all of the preceding identifiers are equal. + if (ts1.isEmpty && ts2.isEmpty) 0 + else if (ts1.nonEmpty && ts2.isEmpty) 1 + else if (ts1.isEmpty && ts2.nonEmpty) -1 + else { + val ts1head = ts1.head + val ts2head = ts2.head + val cmp = (ts1head.matches("\\d+"), ts2head.matches("\\d+")) match { + // Identifiers consisting of only digits are compared numerically. + // Numeric identifiers always have lower precedence than non-numeric identifiers. + // Identifiers with letters are compared case insensitive lexical order. + case (true, true) => implicitly[Ordering[Long]].compare(ts1head.toLong, ts2head.toLong) + case (false, true) => 1 + case (true, false) => -1 + case (false, false) => + ts1head.toLowerCase(Locale.ENGLISH).compareTo(ts2head.toLowerCase(Locale.ENGLISH)) + } + if (cmp == 0) compareTags(ts1.tail, ts2.tail) + else cmp + } + } + + // Expand wildcard with `=` operator to and clause of comparators. + // `=1.0` is equivalent to `>=1.0 <=1.0` + protected def allFieldsSpecified: Boolean = + major.isDefined && minor.isDefined && patch.isDefined +} + +private[librarymanagement] abstract class SemComparatorFunctions { + private[this] val ComparatorRegex = """(?x)^ + ([<>]=?|=)? + (?:(\d+|[xX*]) + (?:\.(\d+|[xX*]) + (?:\.(\d+|[xX*]))? + )? + )((?:-\w+(?:\.\w+)*)*)$ + """.r + protected def parse(comparator: String): SemComparator = { + comparator match { + case ComparatorRegex(rawOp, rawMajor, rawMinor, rawPatch, ts) => + val opStr = Option(rawOp) + val major = Option(rawMajor) + val minor = Option(rawMinor) + val patch = Option(rawPatch) + val tags = splitDash(ts) + + // Trim wildcard(x, X, *) and re-parse it. + // By trimming it, comparator realize the property like + // `=1.2.x` is equivalent to `=1.2`. + val hasXrangeSelector = Seq(major, minor, patch).exists { + case Some(str) => str.matches("[xX*]") + case None => false + } + if (hasXrangeSelector) { + if (tags.nonEmpty) + throw new IllegalArgumentException( + s"Pre-release version requires major, minor, patch versions to be specified: $comparator" + ) + val numbers = Seq(major, minor, patch).takeWhile { + case Some(str) => str.matches("\\d+") + case None => false + } + parse( + numbers + .collect { case Some(v) => + v.toString + } + .mkString(".") + ) + } else { + if (tags.nonEmpty && (major.isEmpty || minor.isEmpty || patch.isEmpty)) + throw new IllegalArgumentException( + s"Pre-release version requires major, minor, patch versions to be specified: $comparator" + ) + val operator = opStr match { + case Some("<") => Lt + case Some("<=") => Lte + case Some(">") => Gt + case Some(">=") => Gte + case Some("=") => Eq + case None => Eq + case Some(_) => + throw new IllegalArgumentException(s"Invalid operator: $opStr") + } + SemComparator( + operator, + major.map(_.toLong), + minor.map(_.toLong), + patch.map(_.toLong), + tags + ) + } + case _ => throw new IllegalArgumentException(s"Invalid comparator: $comparator") + } + } + private[this] def splitOn[A](s: String, sep: Char): Vector[String] = + if (s eq null) Vector() + else s.split(sep).filterNot(_ == "").toVector + private[this] def splitDash(s: String) = splitOn(s, '-') +} diff --git a/lm-core/src/main/scala/sbt/internal/librarymanagement/StringUtilities.scala b/lm-core/src/main/scala/sbt/internal/librarymanagement/StringUtilities.scala new file mode 100644 index 000000000..37c26418d --- /dev/null +++ b/lm-core/src/main/scala/sbt/internal/librarymanagement/StringUtilities.scala @@ -0,0 +1,17 @@ +/* sbt -- Simple Build Tool + * Copyright 2009 Mark Harrah + */ +package sbt.internal.librarymanagement + +import java.util.Locale + +object StringUtilities { + @deprecated( + "Different use cases require different normalization. Use Project.normalizeModuleID or normalizeProjectID instead.", + "0.13.0" + ) + def normalize(s: String) = s.toLowerCase(Locale.ENGLISH).replaceAll("""\W+""", "-") + def nonEmpty(s: String, label: String): Unit = + require(s.trim.length > 0, label + " cannot be empty.") + def appendable(s: String) = if (s.isEmpty) "" else "_" + s +} diff --git a/lm-core/src/main/scala/sbt/internal/librarymanagement/UpdateClassifiersUtil.scala b/lm-core/src/main/scala/sbt/internal/librarymanagement/UpdateClassifiersUtil.scala new file mode 100644 index 000000000..9f87ff5e8 --- /dev/null +++ b/lm-core/src/main/scala/sbt/internal/librarymanagement/UpdateClassifiersUtil.scala @@ -0,0 +1,79 @@ +package sbt.internal.librarymanagement + +import java.io.File +import sbt.librarymanagement._ + +object UpdateClassifiersUtil { + + def restrictedCopy(m: ModuleID, confs: Boolean) = + ModuleID(m.organization, m.name, m.revision) + .withCrossVersion(m.crossVersion) + .withExtraAttributes(m.extraAttributes) + .withConfigurations(if (confs) m.configurations else None) + .branch(m.branchName) + + // This version adds explicit artifact + def classifiedArtifacts( + classifiers: Vector[String], + exclude: Map[ModuleID, Set[ConfigRef]], + artifacts: Vector[(String, ModuleID, Artifact, File)] + )(m: ModuleID): Option[ModuleID] = { + def sameModule(m1: ModuleID, m2: ModuleID): Boolean = + m1.organization == m2.organization && m1.name == m2.name && m1.revision == m2.revision + def explicitArtifacts = { + val arts = (artifacts collect { + case (_, x, art, _) if sameModule(m, x) && art.classifier.isDefined => art + }).distinct + if (arts.isEmpty) None + else Some(intransitiveModuleWithExplicitArts(m, arts)) + } + def hardcodedArtifacts = classifiedArtifacts(classifiers, exclude)(m) + explicitArtifacts orElse hardcodedArtifacts + } + + def classifiedArtifacts( + classifiers: Vector[String], + exclude: Map[ModuleID, Set[ConfigRef]] + )(m: ModuleID): Option[ModuleID] = { + val excluded: Set[ConfigRef] = exclude.getOrElse(restrictedCopy(m, false), Set.empty) + val exls = excluded map { _.name } + val included = classifiers filterNot exls + if (included.isEmpty) None + else { + Some( + intransitiveModuleWithExplicitArts( + module = m, + arts = classifiedArtifacts(m.name, included) + ) + ) + } + } + + def classifiedArtifacts(name: String, classifiers: Vector[String]): Vector[Artifact] = + classifiers map { c => + Artifact.classified(name, c) + } + + /** + * Explicitly set an "include all" rule (the default) because otherwise, if we declare ANY explicitArtifacts, + * [[org.apache.ivy.core.resolve.IvyNode#getArtifacts]] (in Ivy 2.3.0-rc1) will not merge in the descriptor's + * artifacts and will only keep the explicitArtifacts. + *
+ * Look for the comment saying {{{ + * // and now we filter according to include rules + * }}} + * in `IvyNode`, which iterates on `includes`, which will ordinarily be empty because higher up, in {{{ + * addAllIfNotNull(includes, usage.getDependencyIncludesSet(rootModuleConf)); + * }}} + * `usage.getDependencyIncludesSet` returns null if there are no (explicit) include rules. + */ + private def intransitiveModuleWithExplicitArts( + module: ModuleID, + arts: Vector[Artifact] + ): ModuleID = + module + .withIsTransitive(false) + .withExplicitArtifacts(arts) + .withInclusions(Vector(InclExclRule.everything)) + +} diff --git a/lm-core/src/main/scala/sbt/internal/librarymanagement/VersionRange.scala b/lm-core/src/main/scala/sbt/internal/librarymanagement/VersionRange.scala new file mode 100644 index 000000000..3bf85a9e9 --- /dev/null +++ b/lm-core/src/main/scala/sbt/internal/librarymanagement/VersionRange.scala @@ -0,0 +1,87 @@ +package sbt +package internal +package librarymanagement + +object VersionRange { + + /** True if the revision is an ivy-range, not a complete revision. */ + def isVersionRange(revision: String): Boolean = { + (revision endsWith "+") || + (revision contains "[") || + (revision contains "]") || + (revision contains "(") || + (revision contains ")") + } + + // Assuming Ivy is used to resolve conflict, this removes the version range + // when it is open-ended to avoid dependency resolution hitting the Internet to get the latest. + // See https://github.com/sbt/sbt/issues/2954 + def stripMavenVersionRange(version: String): Option[String] = + if (isVersionRange(version)) { + val noSpace = version.replace(" ", "") + noSpace match { + case MavenVersionSetPattern(open1, x1, comma, x2, close1, _) => + // http://maven.apache.org/components/enforcer/enforcer-rules/versionRanges.html + (open1, Option(x1), Option(comma), Option(x2), close1) match { + case (_, None, _, Some(x2), "]") => Some(x2) + // a good upper bound is unknown + case (_, None, _, Some(_), ")") => None + case (_, Some(x1), _, None, _) => Some(x1) + case _ => None + } + case _ => None + } + } else None + + /** Converts Ivy revision ranges to that of Maven POM */ + def fromIvyToMavenVersion(revision: String): String = { + def plusRange(s: String, shift: Int = 0) = { + def pow(i: Int): Int = if (i > 0) 10 * pow(i - 1) else 1 + val (prefixVersion, lastVersion) = (s + "0" * shift).reverse.split("\\.", 2) match { + case Array(revLast, revRest) => + (revRest.reverse + ".", revLast.reverse) + case Array(revLast) => ("", revLast.reverse) + } + val lastVersionInt = lastVersion.toInt + s"[${prefixVersion}${lastVersion},${prefixVersion}${lastVersionInt + pow(shift)})" + } + val DotPlusPattern = """(.+)\.\+""".r + val DotNumPlusPattern = """(.+)\.(\d+)\+""".r + val NumPlusPattern = """(\d+)\+""".r + val maxDigit = 5 + try { + revision match { + case "+" => "[0,)" + case DotPlusPattern(base) => plusRange(base) + // This is a heuristic. Maven just doesn't support Ivy's notions of 1+, so + // we assume version ranges never go beyond 5 siginificant digits. + case NumPlusPattern(tail) => (0 until maxDigit).map(plusRange(tail, _)).mkString(",") + case DotNumPlusPattern(base, tail) => + (0 until maxDigit).map(plusRange(base + "." + tail, _)).mkString(",") + case rev if rev endsWith "+" => + sys.error(s"dynamic revision '$rev' cannot be translated to POM") + case rev if startSym(rev(0)) && stopSym(rev(rev.length - 1)) => + val start = rev(0) + val stop = rev(rev.length - 1) + val mid = rev.substring(1, rev.length - 1) + (if (start == ']') "(" else start.toString) + mid + (if (stop == '[') ")" else stop) + case _ => revision + } + } catch { + case _: NumberFormatException => + // TODO - if the version doesn't meet our expectations, maybe we just issue a hard + // error instead of softly ignoring the attempt to rewrite. + // sys.error(s"Could not fix version [$revision] into maven style version") + revision + } + } + + def hasMavenVersionRange(version: String): Boolean = + if (version.length <= 1) false + else startSym(version(0)) && stopSym(version(version.length - 1)) + + private[this] val startSym = Set(']', '[', '(') + private[this] val stopSym = Set(']', '[', ')') + private[this] val MavenVersionSetPattern = + """([\]\[\(])([\w\.\-]+)?(,)?([\w\.\-]+)?([\]\[\)])(,.+)?""".r +} diff --git a/lm-core/src/main/scala/sbt/internal/librarymanagement/VersionSchemes.scala b/lm-core/src/main/scala/sbt/internal/librarymanagement/VersionSchemes.scala new file mode 100644 index 000000000..112420028 --- /dev/null +++ b/lm-core/src/main/scala/sbt/internal/librarymanagement/VersionSchemes.scala @@ -0,0 +1,57 @@ +/* + * sbt + * Copyright 2011 - 2018, Lightbend, Inc. + * Copyright 2008 - 2010, Mark Harrah + * Licensed under Apache License 2.0 (see LICENSE) + */ + +package sbt +package internal +package librarymanagement + +import sbt.internal.librarymanagement.mavenint.SbtPomExtraProperties +import sbt.librarymanagement.{ EvictionWarningOptions, ModuleID, ScalaModuleInfo } + +// See APIMappings.scala +private[sbt] object VersionSchemes { + final val EarlySemVer = "early-semver" + final val SemVerSpec = "semver-spec" + final val PackVer = "pvp" + final val Strict = "strict" + final val Always = "always" + + def validateScheme(value: String): Unit = + value match { + case EarlySemVer | SemVerSpec | PackVer | Strict | Always => () + case "semver" => + sys.error( + s"""'semver' is ambiguous. + |Based on the Semantic Versioning 2.0.0, 0.y.z updates are all initial development and thus + |0.6.0 and 0.6.1 would NOT maintain any compatibility, but in Scala ecosystem it is + |common to start adopting binary compatibility even in 0.y.z releases. + | + |Specify 'early-semver' for the early variant. + |Specify 'semver-spec' for the spec-correct SemVer.""".stripMargin + ) + case x => sys.error(s"unknown version scheme: $x") + } + + /** + * info.versionScheme property will be included into POM after sbt 1.4.0. + */ + def extractFromId(mid: ModuleID): Option[String] = extractFromExtraAttributes(mid.extraAttributes) + + def extractFromExtraAttributes(extraAttributes: Map[String, String]): Option[String] = + extraAttributes.get(SbtPomExtraProperties.VERSION_SCHEME_KEY) + + def evalFunc( + scheme: String + ): Function1[(ModuleID, Option[ModuleID], Option[ScalaModuleInfo]), Boolean] = + scheme match { + case EarlySemVer => EvictionWarningOptions.guessEarlySemVer + case SemVerSpec => EvictionWarningOptions.guessSemVer + case PackVer => EvictionWarningOptions.evalPvp + case Strict => EvictionWarningOptions.guessStrict + case Always => EvictionWarningOptions.guessTrue + } +} diff --git a/lm-core/src/main/scala/sbt/internal/librarymanagement/cross/CrossVersionUtil.scala b/lm-core/src/main/scala/sbt/internal/librarymanagement/cross/CrossVersionUtil.scala new file mode 100644 index 000000000..f2d467133 --- /dev/null +++ b/lm-core/src/main/scala/sbt/internal/librarymanagement/cross/CrossVersionUtil.scala @@ -0,0 +1,142 @@ +package sbt.internal.librarymanagement +package cross + +import sbt.librarymanagement.ScalaArtifacts + +object CrossVersionUtil { + val trueString = "true" + val falseString = "false" + val fullString = "full" + val noneString = "none" + val disabledString = "disabled" + val binaryString = "binary" + val TransitionScalaVersion = "2.10" // ...but scalac doesn't until Scala 2.10 + val TransitionSbtVersion = "0.12" + + def isFull(s: String): Boolean = (s == trueString) || (s == fullString) + + def isDisabled(s: String): Boolean = + (s == falseString) || (s == noneString) || (s == disabledString) + + def isBinary(s: String): Boolean = (s == binaryString) + + private val longPattern = """\d{1,19}""" + private val basicVersion = raw"""($longPattern)\.($longPattern)\.($longPattern)""" + private val tagPattern = raw"""(?:\w+(?:\.\w+)*)""" + private val ReleaseV = raw"""$basicVersion(-\d+)?""".r + private[sbt] val BinCompatV = raw"""$basicVersion(-$tagPattern)?-bin(-.*)?""".r + private val CandidateV = raw"""$basicVersion(-RC\d+)""".r + private val MilestonV = raw"""$basicVersion(-M$tagPattern)""".r + private val NonReleaseV_n = + raw"""$basicVersion((?:-$tagPattern)*)""".r // 0-n word suffixes, with leading dashes + private val NonReleaseV_1 = raw"""$basicVersion(-$tagPattern)""".r // 1 word suffix, after a dash + private[sbt] val PartialVersion = raw"""($longPattern)\.($longPattern)(?:\..+)?""".r + + private[sbt] def isSbtApiCompatible(v: String): Boolean = sbtApiVersion(v).isDefined + + /** + * Returns sbt binary interface x.y API compatible with the given version string v. + * RCs for x.y.0 are considered API compatible. + * Compatible versions include 0.12.0-1 and 0.12.0-RC1 for Some(0, 12). + */ + private[sbt] def sbtApiVersion(v: String): Option[(Long, Long)] = v match { + case ReleaseV(x, y, _, _) => Some(sbtApiVersion(x.toLong, y.toLong)) + case CandidateV(x, y, _, _) => Some(sbtApiVersion(x.toLong, y.toLong)) + case NonReleaseV_n(x, y, z, _) if x.toLong == 0 && z.toLong > 0 => + Some(sbtApiVersion(x.toLong, y.toLong)) + case NonReleaseV_n(x, y, z, _) if x.toLong > 0 && (y.toLong > 0 || z.toLong > 0) => + Some(sbtApiVersion(x.toLong, y.toLong)) + case _ => None + } + + private def sbtApiVersion(x: Long, y: Long) = { + // Prior to sbt 1 the "sbt api version" was the X.Y in the X.Y.Z version. + // For example for sbt 0.13.x releases, the sbt api version is 0.13 + // As of sbt 1 it is now X.0. + // This means, for example, that all versions of sbt 1.x have sbt api version 1.0 + if (x > 0) (x, 0L) else (x, y) + } + + private[sbt] def isScalaApiCompatible(v: String): Boolean = scalaApiVersion(v).isDefined + + /** + * Returns Scala binary interface x.y API compatible with the given version string v. + * Compatible versions include 2.10.0-1 and 2.10.1-M1 for Some(2, 10), but not 2.10.0-RC1. + */ + private[sbt] def scalaApiVersion(v: String): Option[(Long, Long)] = v match { + case ReleaseV(x, y, _, _) => Some((x.toLong, y.toLong)) + case BinCompatV(x, y, _, _, _) => Some((x.toLong, y.toLong)) + case NonReleaseV_1(x, y, z, _) if z.toLong > 0 => Some((x.toLong, y.toLong)) + case _ => None + } + + private[sbt] def partialVersion(s: String): Option[(Long, Long)] = + s match { + case PartialVersion(major, minor) => Some((major.toLong, minor.toLong)) + case _ => None + } + + private[sbt] def binaryScala3Version(full: String): String = full match { + case ReleaseV(maj, _, _, _) => maj + case NonReleaseV_n(maj, min, patch, _) if min.toLong > 0 || patch.toLong > 0 => maj + case BinCompatV(maj, min, patch, stageOrNull, _) => + val stage = if (stageOrNull != null) stageOrNull else "" + binaryScala3Version(s"$maj.$min.$patch$stage") + case _ => full + } + + // Uses the following rules: + // + // - Forwards and backwards compatibility is guaranteed for Scala 2.N.x (https://docs.scala-lang.org/overviews/core/binary-compatibility-of-scala-releases.html) + // + // - A Scala compiler in version 3.x1.y1 is able to read TASTy files produced by another compiler in version 3.x2.y2 if x1 >= x2 (https://docs.scala-lang.org/scala3/reference/language-versions/binary-compatibility.html) + // + // - For non-stable Scala 3 versions, compiler versions can read TASTy in an older stable format but their TASTY versions are not compatible between each other even if the compilers have the same minor version (https://docs.scala-lang.org/scala3/reference/language-versions/binary-compatibility.html) + // + private[sbt] def isScalaBinaryCompatibleWith(newVersion: String, origVersion: String): Boolean = { + (newVersion, origVersion) match { + case (NonReleaseV_n("2", _, _, _), NonReleaseV_n("2", _, _, _)) => + val api1 = scalaApiVersion(newVersion) + val api2 = scalaApiVersion(origVersion) + (api1.isDefined && api1 == api2) || (newVersion == origVersion) + case (ReleaseV(nMaj, nMin, _, _), ReleaseV(oMaj, oMin, _, _)) + if nMaj == oMaj && nMaj.toLong >= 3 => + nMin.toInt >= oMin.toInt + case (NonReleaseV_1(nMaj, nMin, _, _), ReleaseV(oMaj, oMin, _, _)) + if nMaj == oMaj && nMaj.toLong >= 3 => + nMin.toInt > oMin.toInt + case _ => + newVersion == origVersion + } + } + + def binaryScalaVersion(full: String): String = { + if (ScalaArtifacts.isScala3(full)) binaryScala3Version(full) + else + binaryVersionWithApi(full, TransitionScalaVersion)(scalaApiVersion) // Scala 2 binary version + } + + def binarySbtVersion(full: String): String = + sbtApiVersion(full) match { + case Some((0, minor)) if minor < 12 => full + case Some((0, minor)) => s"0.$minor" + case Some((1, minor)) => s"1.$minor" + case Some((major, _)) => major.toString + case _ => full + } + + private[this] def isNewer(major: Long, minor: Long, minMajor: Long, minMinor: Long): Boolean = + major > minMajor || (major == minMajor && minor >= minMinor) + + private[this] def binaryVersionWithApi(full: String, cutoff: String)( + apiVersion: String => Option[(Long, Long)] + ): String = { + (apiVersion(full), partialVersion(cutoff)) match { + case (Some((major, minor)), None) => s"$major.$minor" + case (Some((major, minor)), Some((minMajor, minMinor))) + if isNewer(major, minor, minMajor, minMinor) => + s"$major.$minor" + case _ => full + } + } +} diff --git a/lm-core/src/main/scala/sbt/internal/librarymanagement/formats/GlobalLockFormat.scala b/lm-core/src/main/scala/sbt/internal/librarymanagement/formats/GlobalLockFormat.scala new file mode 100644 index 000000000..2e4ced132 --- /dev/null +++ b/lm-core/src/main/scala/sbt/internal/librarymanagement/formats/GlobalLockFormat.scala @@ -0,0 +1,25 @@ +package sbt.internal.librarymanagement.formats + +import sjsonnew._ +import xsbti._ +import java.io.File +import java.util.concurrent.Callable + +/** + * A fake JsonFormat for xsbti.GlobalLock. + * This is mostly for making IvyConfiguration serializable to JSON. + */ +trait GlobalLockFormat { self: BasicJsonProtocol => + import GlobalLockFormats._ + + implicit lazy val globalLockIsoString: IsoString[GlobalLock] = + IsoString.iso(_ => "", _ => NoGlobalLock) + + implicit lazy val GlobalLockFormat: JsonFormat[GlobalLock] = isoStringFormat(globalLockIsoString) +} + +private[sbt] object GlobalLockFormats { + object NoGlobalLock extends GlobalLock { + def apply[T](lockFile: File, run: Callable[T]) = run.call() + } +} diff --git a/lm-core/src/main/scala/sbt/internal/librarymanagement/formats/LoggerFormat.scala b/lm-core/src/main/scala/sbt/internal/librarymanagement/formats/LoggerFormat.scala new file mode 100644 index 000000000..f10fdb03f --- /dev/null +++ b/lm-core/src/main/scala/sbt/internal/librarymanagement/formats/LoggerFormat.scala @@ -0,0 +1,16 @@ +package sbt.internal.librarymanagement.formats + +import sjsonnew._ +import xsbti._ +import sbt.util.Logger.Null + +/** + * A fake JsonFormat for xsbti.Logger. + * This is mostly for making IvyConfiguration serializable to JSON. + */ +trait LoggerFormat { self: BasicJsonProtocol => + implicit lazy val xsbtiLoggerIsoString: IsoString[Logger] = + IsoString.iso(_ => "", _ => Null) + + implicit lazy val LoggerFormat: JsonFormat[Logger] = isoStringFormat(implicitly) +} diff --git a/lm-core/src/main/scala/sbt/internal/librarymanagement/formats/LogicalClockFormats.scala b/lm-core/src/main/scala/sbt/internal/librarymanagement/formats/LogicalClockFormats.scala new file mode 100644 index 000000000..9daca4a93 --- /dev/null +++ b/lm-core/src/main/scala/sbt/internal/librarymanagement/formats/LogicalClockFormats.scala @@ -0,0 +1,13 @@ +package sbt.internal.librarymanagement.formats + +import sjsonnew._ + +import sbt.librarymanagement.LogicalClock + +trait LogicalClockFormats { self: BasicJsonProtocol => + implicit lazy val LogicalClockFormat: JsonFormat[LogicalClock] = + projectFormat[LogicalClock, String]( + cl => cl.toString, + str => LogicalClock(str) + ) +} diff --git a/lm-core/src/main/scala/sbt/internal/librarymanagement/formats/NodeSeqFormat.scala b/lm-core/src/main/scala/sbt/internal/librarymanagement/formats/NodeSeqFormat.scala new file mode 100644 index 000000000..45704dfa6 --- /dev/null +++ b/lm-core/src/main/scala/sbt/internal/librarymanagement/formats/NodeSeqFormat.scala @@ -0,0 +1,11 @@ +package sbt.internal.librarymanagement.formats + +import sjsonnew._ +import scala.xml._ + +trait NodeSeqFormat { self: BasicJsonProtocol => + implicit lazy val NodeSeqFormat: JsonFormat[NodeSeq] = projectFormat[NodeSeq, String]( + xml => {xml}.toString, + str => XML.loadString(str).child + ) +} diff --git a/lm-core/src/main/scala/sbt/librarymanagement/ArtifactExtra.scala b/lm-core/src/main/scala/sbt/librarymanagement/ArtifactExtra.scala new file mode 100644 index 000000000..411f486aa --- /dev/null +++ b/lm-core/src/main/scala/sbt/librarymanagement/ArtifactExtra.scala @@ -0,0 +1,135 @@ +/* sbt -- Simple Build Tool + * Copyright 2008, 2009, 2010 Mark Harrah + */ +package sbt.librarymanagement + +import java.io.File +import java.net.URI + +private[librarymanagement] abstract class ArtifactExtra { + def extraAttributes: Map[String, String] + + def withExtraAttributes(extraAttributes: Map[String, String]): Artifact + + def extra(attributes: (String, String)*) = + withExtraAttributes(extraAttributes ++ ModuleID.checkE(attributes)) +} + +import Configurations.{ Optional, Pom, Test } + +private[librarymanagement] abstract class ArtifactFunctions { + def apply(name: String, extra: Map[String, String]): Artifact = + Artifact(name, DefaultType, DefaultExtension, None, Vector.empty, None, extra, None) + def apply(name: String, classifier: String): Artifact = + Artifact(name, DefaultType, DefaultExtension, Some(classifier), Vector.empty, None) + def apply(name: String, `type`: String, extension: String): Artifact = + Artifact(name, `type`, extension, None, Vector.empty, None) + def apply(name: String, `type`: String, extension: String, classifier: String): Artifact = + Artifact(name, `type`, extension, Some(classifier), Vector.empty, None) + def apply(name: String, uri: URI): Artifact = Artifact(name, uri, false) + def apply(name: String, uri: URI, allowInsecureProtocol: Boolean): Artifact = + Artifact( + name, + extract(uri, DefaultType), + extract(uri, DefaultExtension), + None, + Vector.empty, + Some(uri), + Map.empty, + None, + allowInsecureProtocol + ) + + private final val empty = Map.empty[String, String] + def apply( + name: String, + `type`: String, + extension: String, + classifier: Option[String], + configurations: Vector[ConfigRef], + uri: Option[URI] + ): Artifact = + Artifact(name, `type`, extension, classifier, configurations, uri, empty, None) + + val DefaultExtension = "jar" + val DefaultType = "jar" + + def sources(name: String) = classified(name, SourceClassifier) + def javadoc(name: String) = classified(name, DocClassifier) + def pom(name: String) = Artifact(name, PomType, PomType, None, Vector(Pom), None) + + // Possible ivy artifact types such that sbt will treat those artifacts at sources / docs + val DefaultSourceTypes: Set[String] = Set("src", "source", "sources") + val DefaultDocTypes: Set[String] = Set("doc", "docs", "javadoc", "javadocs") + val specialArtifactTypes: Set[String] = DefaultSourceTypes union DefaultDocTypes + val defaultArtifactTypeFilter: ArtifactTypeFilter = + ArtifactTypeFilter.forbid(specialArtifactTypes) + + val DocClassifier = "javadoc" + val SourceClassifier = "sources" + + val TestsClassifier = "tests" + // Artifact types used when: + // * artifacts are explicitly created for Maven dependency resolution (see updateClassifiers) + // * declaring artifacts as part of creating Ivy files. + val DocType = "doc" + val SourceType = "src" + val PomType = "pom" + + assert(DefaultDocTypes contains DocType) + assert(DefaultSourceTypes contains SourceType) + + def extract(uri: URI, default: String): String = extract(uri.toString, default) + def extract(name: String, default: String): String = { + val i = name.lastIndexOf('.') + if (i >= 0) + name.substring(i + 1) + else + default + } + def defaultArtifact(file: File) = { + val name = file.getName + val i = name.lastIndexOf('.') + val base = if (i >= 0) name.substring(0, i) else name + Artifact( + base, + extract(name, DefaultType), + extract(name, DefaultExtension), + None, + Vector.empty, + Some(file.toURI) + ) + } + def artifactName(scalaVersion: ScalaVersion, module: ModuleID, artifact: Artifact): String = { + import artifact._ + val classifierStr = classifier match { case None => ""; case Some(c) => "-" + c } + val cross = CrossVersion(module.crossVersion, scalaVersion.full, scalaVersion.binary) + val base = CrossVersion.applyCross(artifact.name, cross) + base + "-" + module.revision + classifierStr + "." + artifact.extension + } + + val classifierTypeMap = Map(SourceClassifier -> SourceType, DocClassifier -> DocType) + @deprecated("Configuration should not be decided from the classifier.", "1.0") + def classifierConf(classifier: String): Configuration = + if (classifier.startsWith(TestsClassifier)) + Test + else + Optional + def classifierType(classifier: String): String = + classifierTypeMap.getOrElse(classifier.stripPrefix(TestsClassifier + "-"), DefaultType) + + /** + * Create a classified explicit artifact, to be used when trying to resolve sources|javadocs from Maven. This is + * necessary because those artifacts are not published in the Ivy generated from the Pom of the module in question. + * The artifact is created under the default configuration. + */ + def classified(name: String, classifier: String): Artifact = + Artifact( + name, + classifierType(classifier), + DefaultExtension, + Some(classifier), + Vector.empty, + None + ) +} diff --git a/lm-core/src/main/scala/sbt/librarymanagement/ConfigRef.scala b/lm-core/src/main/scala/sbt/librarymanagement/ConfigRef.scala new file mode 100644 index 000000000..6143642b6 --- /dev/null +++ b/lm-core/src/main/scala/sbt/librarymanagement/ConfigRef.scala @@ -0,0 +1,75 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ +package sbt.librarymanagement + +import scala.collection.concurrent.TrieMap + +/** + * A reference to Configuration. + * @param name The name of the configuration that eventually get used by Maven. + */ +final class ConfigRef private (val name: String) extends Serializable { + + override def equals(o: Any): Boolean = + this.eq(o.asInstanceOf[AnyRef]) + + override val hashCode: Int = { + 37 * (37 * (17 + "sbt.librarymanagement.ConfigRef".##) + name.##) + } + + override def toString: String = { + name + } + + private[this] def copy(name: String = name): ConfigRef = { + ConfigRef(name) + } + + def withName(name: String): ConfigRef = { + copy(name = name) + } +} + +object ConfigRef extends sbt.librarymanagement.ConfigRefFunctions { + // cache the reference to ConfigRefs + private val cache = new TrieMap[String, ConfigRef] + private lazy val Default = new ConfigRef("default") + private lazy val Compile = new ConfigRef("compile") + private lazy val IntegrationTest = new ConfigRef("it") + private lazy val Provided = new ConfigRef("provided") + private lazy val Runtime = new ConfigRef("runtime") + private lazy val Test = new ConfigRef("test") + private lazy val System = new ConfigRef("system") + private lazy val Optional = new ConfigRef("optional") + private lazy val Pom = new ConfigRef("pom") + private lazy val ScalaTool = new ConfigRef("scala-tool") + private lazy val ScalaDocTool = new ConfigRef("scala-doc-tool") + private lazy val CompilerPlugin = new ConfigRef("plugin") + private lazy val Component = new ConfigRef("component") + private lazy val RuntimeInternal = new ConfigRef("runtime-internal") + private lazy val TestInternal = new ConfigRef("test-internal") + private lazy val IntegrationTestInternal = new ConfigRef("it-internal") + private lazy val CompileInternal = new ConfigRef("compile-internal") + + def apply(name: String): ConfigRef = name match { + case "default" => Default + case "compile" => Compile + case "it" => IntegrationTest + case "provided" => Provided + case "runtime" => Runtime + case "test" => Test + case "system" => System + case "optional" => Optional + case "pom" => Pom + case "scala-tool" => ScalaTool + case "scala-doc-tool" => ScalaDocTool + case "plugin" => CompilerPlugin + case "component" => Component + case "runtime-internal" => RuntimeInternal + case "test-internal" => TestInternal + case "it-internal" => IntegrationTestInternal + case "compile-internal" => CompileInternal + case _ => cache.getOrElseUpdate(name, new ConfigRef(name)) + } +} diff --git a/lm-core/src/main/scala/sbt/librarymanagement/ConfigRefFormats.scala b/lm-core/src/main/scala/sbt/librarymanagement/ConfigRefFormats.scala new file mode 100644 index 000000000..b7d8d0b05 --- /dev/null +++ b/lm-core/src/main/scala/sbt/librarymanagement/ConfigRefFormats.scala @@ -0,0 +1,31 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ +package sbt.librarymanagement + +import _root_.sjsonnew.{ Unbuilder, Builder, JsonFormat, deserializationError } + +trait ConfigRefFormats { self: sjsonnew.BasicJsonProtocol => + implicit lazy val ConfigRefFormat: JsonFormat[sbt.librarymanagement.ConfigRef] = + new JsonFormat[sbt.librarymanagement.ConfigRef] { + override def read[J]( + __jsOpt: Option[J], + unbuilder: Unbuilder[J] + ): sbt.librarymanagement.ConfigRef = { + __jsOpt match { + case Some(__js) => + unbuilder.beginObject(__js) + val name = unbuilder.readField[String]("name") + unbuilder.endObject() + sbt.librarymanagement.ConfigRef(name) + case None => + deserializationError("Expected JsObject but found None") + } + } + override def write[J](obj: sbt.librarymanagement.ConfigRef, builder: Builder[J]): Unit = { + builder.beginObject() + builder.addField("name", obj.name) + builder.endObject() + } + } +} diff --git a/lm-core/src/main/scala/sbt/librarymanagement/Configuration.scala b/lm-core/src/main/scala/sbt/librarymanagement/Configuration.scala new file mode 100644 index 000000000..53c409839 --- /dev/null +++ b/lm-core/src/main/scala/sbt/librarymanagement/Configuration.scala @@ -0,0 +1,91 @@ +package sbt +package librarymanagement + +/** Represents an Ivy configuration. */ +final class Configuration private[sbt] ( + val id: String, + val name: String, + val description: String, + val isPublic: Boolean, + val extendsConfigs: Vector[Configuration], + val transitive: Boolean +) extends ConfigurationExtra + with Serializable { + + require(name != null, "name cannot be null") + require(name.nonEmpty, "name cannot be empty") + require(id != null, "id cannot be null") + require(id.nonEmpty, "id cannot be empty") + require(id.head.isUpper, s"id must be capitalized: $id") + + override def equals(o: Any): Boolean = o match { + case x: Configuration => + (this.id == x.id) && + (this.name == x.name) && + (this.description == x.description) && + (this.isPublic == x.isPublic) && + (this.extendsConfigs == x.extendsConfigs) && + (this.transitive == x.transitive) + case _ => false + } + + override val hashCode: Int = + 37 * (37 * (37 * (37 * (37 * (37 * (17 + + id.##) + name.##) + description.##) + isPublic.##) + extendsConfigs.##) + transitive.##) + + override def toString: String = name + + private[this] def copy( + id: String = id, + name: String = name, + description: String = description, + isPublic: Boolean = isPublic, + extendsConfigs: Vector[Configuration] = extendsConfigs, + transitive: Boolean = transitive + ): Configuration = + new Configuration(id, name, description, isPublic, extendsConfigs, transitive) + + def withDescription(description: String): Configuration = copy(description = description) + + def withIsPublic(isPublic: Boolean): Configuration = copy(isPublic = isPublic) + + def withExtendsConfigs(extendsConfigs: Vector[Configuration]): Configuration = + copy(extendsConfigs = extendsConfigs) + + def withTransitive(transitive: Boolean): Configuration = + copy(transitive = transitive) + + def toConfigRef: ConfigRef = ConfigRef(name) +} + +object Configuration { + // Don't call this directly. It's intended to be used from config macro. + def of(id: String, name: String): Configuration = + new Configuration(id, name, "", true, Vector.empty, true) + + def of( + id: String, + name: String, + description: String, + isPublic: Boolean, + extendsConfigs: Vector[Configuration], + transitive: Boolean + ): Configuration = + new Configuration(id, name, description, isPublic, extendsConfigs, transitive) +} + +/* +Configuration isn't defined with Contraband. + +The reasons that block it from being made a Contraband type (AFAIK): + * its hashCode is a val, not a def + * it doesn't have companion apply methods, but "of" instead + +Other reasons that I don't think are blocking: + * it's constructor is private[sbt] (doesn't matter, Configuration.of is public) + +Reasons that are definitely not blocking: + * it has a custom toString, supported by Contraband + * it has init requirements, supported by Contraband via parent class + * it has other methods, supported by Contraband via parent class + */ diff --git a/lm-core/src/main/scala/sbt/librarymanagement/ConfigurationExtra.scala b/lm-core/src/main/scala/sbt/librarymanagement/ConfigurationExtra.scala new file mode 100644 index 000000000..27143a9a4 --- /dev/null +++ b/lm-core/src/main/scala/sbt/librarymanagement/ConfigurationExtra.scala @@ -0,0 +1,136 @@ +/* sbt -- Simple Build Tool + * Copyright 2008, 2009, 2010 Mark Harrah + */ +package sbt.librarymanagement + +import scala.annotation.nowarn +import scala.quoted.* + +object Configurations { + inline def config(name: String): Configuration = ${ + ConfigurationMacro.configMacroImpl('{ name }) + } + def default: Vector[Configuration] = defaultMavenConfigurations + def defaultMavenConfigurations: Vector[Configuration] = + Vector(Compile, Runtime, Test, Provided, Optional) + def defaultInternal: Vector[Configuration] = + Vector(CompileInternal, RuntimeInternal, TestInternal) + def auxiliary: Vector[Configuration] = Vector(Pom) + def names(cs: Vector[Configuration]): Vector[String] = cs.map(_.name) + def refs(cs: Vector[Configuration]): Vector[ConfigRef] = cs.map(_.toConfigRef) + + lazy val RuntimeInternal = optionalInternal(Runtime) + lazy val TestInternal = fullInternal(Test) + @nowarn + lazy val IntegrationTestInternal = fullInternal(IntegrationTest) + lazy val CompileInternal = fullInternal(Compile) + + @nowarn + def internalMap(c: Configuration) = c match { + case Compile => CompileInternal + case Test => TestInternal + case Runtime => RuntimeInternal + case IntegrationTest => IntegrationTestInternal + case _ => c + } + + private[sbt] def internal(base: Configuration, ext: Configuration*) = + Configuration.of(base.id + "Internal", base.name + "-internal").extend(ext: _*).hide + private[sbt] def fullInternal(base: Configuration): Configuration = + internal(base, base, Optional, Provided) + private[sbt] def optionalInternal(base: Configuration): Configuration = + internal(base, base, Optional) + + lazy val Default = Configuration.of("Default", "default") + lazy val Compile = Configuration.of("Compile", "compile") + @deprecated("Create a separate subproject for testing instead", "1.9.0") + lazy val IntegrationTest = Configuration.of("IntegrationTest", "it") extend (Runtime) + lazy val Provided = Configuration.of("Provided", "provided") + lazy val Runtime = Configuration.of("Runtime", "runtime") extend (Compile) + lazy val Test = Configuration.of("Test", "test") extend (Runtime) + lazy val System = Configuration.of("System", "system") + lazy val Optional = Configuration.of("Optional", "optional") + lazy val Pom = Configuration.of("Pom", "pom") + + lazy val ScalaTool = Configuration.of("ScalaTool", "scala-tool").hide + lazy val ScalaDocTool = Configuration.of("ScalaDocTool", "scala-doc-tool").hide + lazy val CompilerPlugin = Configuration.of("CompilerPlugin", "plugin").hide + lazy val Component = Configuration.of("Component", "component").hide + + private[sbt] val DefaultMavenConfiguration = defaultConfiguration(true) + private[sbt] val DefaultIvyConfiguration = defaultConfiguration(false) + private[sbt] def DefaultConfiguration(mavenStyle: Boolean) = + if (mavenStyle) DefaultMavenConfiguration else DefaultIvyConfiguration + private[sbt] def defaultConfiguration(mavenStyle: Boolean) = + if (mavenStyle) Configurations.Compile else Configurations.Default + private[sbt] def removeDuplicates(configs: Iterable[Configuration]) = + Set( + scala.collection.mutable + .Map(configs.map(config => (config.name, config)).toSeq: _*) + .values + .toList: _* + ) + + /** Returns true if the configuration should be under the influence of scalaVersion. */ + @nowarn + private[sbt] def underScalaVersion(c: Configuration): Boolean = + c match { + case Default | Compile | IntegrationTest | Provided | Runtime | Test | Optional | + CompilerPlugin | CompileInternal | RuntimeInternal | TestInternal => + true + case config => + config.extendsConfigs exists underScalaVersion + } +} + +private[librarymanagement] abstract class ConfigurationExtra { + def id: String + def name: String + def description: String + def isPublic: Boolean + def extendsConfigs: Vector[Configuration] + def transitive: Boolean + + require(name != null && !name.isEmpty) + require(description != null) + + def describedAs(newDescription: String) = + Configuration.of(id, name, newDescription, isPublic, extendsConfigs, transitive) + def extend(configs: Configuration*) = + Configuration.of( + id, + name, + description, + isPublic, + configs.toVector ++ extendsConfigs, + transitive + ) + def notTransitive = intransitive + def intransitive = Configuration.of(id, name, description, isPublic, extendsConfigs, false) + def hide = Configuration.of(id, name, description, false, extendsConfigs, transitive) +} + +private[sbt] object ConfigurationMacro: + def configMacroImpl(name: Expr[String])(using Quotes): Expr[Configuration] = + import quotes.reflect.* + def enclosingTerm(sym: Symbol): Symbol = + sym match + case sym if sym.flags is Flags.Macro => enclosingTerm(sym.owner) + case sym if !sym.isTerm => enclosingTerm(sym.owner) + case _ => sym + val term = enclosingTerm(Symbol.spliceOwner) + if !term.isValDef then + report.error( + """config must be directly assigned to a val, such as `val Tooling = config("tooling")`.""" + ) + + val enclosingValName = term.name + if enclosingValName.head.isLower then report.error("configuration id must be capitalized") + val id = Expr(enclosingValName) + '{ Configuration.of($id, $name) } +end ConfigurationMacro + +private[librarymanagement] abstract class ConfigRefFunctions { + implicit def configToConfigRef(c: Configuration): ConfigRef = + c.toConfigRef +} diff --git a/lm-core/src/main/scala/sbt/librarymanagement/ConfigurationFormats.scala b/lm-core/src/main/scala/sbt/librarymanagement/ConfigurationFormats.scala new file mode 100644 index 000000000..66bea9aae --- /dev/null +++ b/lm-core/src/main/scala/sbt/librarymanagement/ConfigurationFormats.scala @@ -0,0 +1,51 @@ +/** + * This code is generated using [[http://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ +package sbt +package librarymanagement + +import _root_.sjsonnew.{ deserializationError, Builder, JsonFormat, Unbuilder } + +trait ConfigurationFormats { + self: sbt.librarymanagement.ConfigurationFormats with sjsonnew.BasicJsonProtocol => + implicit lazy val ConfigurationFormat: JsonFormat[sbt.librarymanagement.Configuration] = + new JsonFormat[sbt.librarymanagement.Configuration] { + override def read[J]( + jsOpt: Option[J], + unbuilder: Unbuilder[J] + ): sbt.librarymanagement.Configuration = { + jsOpt match { + case Some(js) => + unbuilder.beginObject(js) + val id = unbuilder.readField[String]("id") + val name = unbuilder.readField[String]("name") + val description = unbuilder.readField[String]("description") + val isPublic = unbuilder.readField[Boolean]("isPublic") + val extendsConfigs = + unbuilder.readField[Vector[sbt.librarymanagement.Configuration]]("extendsConfigs") + val transitive = unbuilder.readField[Boolean]("transitive") + unbuilder.endObject() + new sbt.librarymanagement.Configuration( + id, + name, + description, + isPublic, + extendsConfigs, + transitive + ) + case None => + deserializationError("Expected JsObject but found None") + } + } + override def write[J](obj: sbt.librarymanagement.Configuration, builder: Builder[J]): Unit = { + builder.beginObject() + builder.addField("id", obj.id) + builder.addField("name", obj.name) + builder.addField("description", obj.description) + builder.addField("isPublic", obj.isPublic) + builder.addField("extendsConfigs", obj.extendsConfigs) + builder.addField("transitive", obj.transitive) + builder.endObject() + } + } +} diff --git a/lm-core/src/main/scala/sbt/librarymanagement/ConflictWarning.scala b/lm-core/src/main/scala/sbt/librarymanagement/ConflictWarning.scala new file mode 100644 index 000000000..5122b0144 --- /dev/null +++ b/lm-core/src/main/scala/sbt/librarymanagement/ConflictWarning.scala @@ -0,0 +1,72 @@ +package sbt.librarymanagement + +import sbt.util.{ Logger, Level } + +/** + * Provide warnings for cross version conflicts. + * A library foo_2.10 and foo_2.11 can potentially be both included on the + * library dependency graph by mistake, but it won't be caught by eviction. + */ +final case class ConflictWarning(label: String, level: Level.Value, failOnConflict: Boolean) {} +object ConflictWarning { + def disable: ConflictWarning = ConflictWarning("", Level.Debug, false) + + private[this] def idString(org: String, name: String) = s"$org:$name" + + def default(label: String): ConflictWarning = ConflictWarning(label, Level.Error, true) + + def apply(config: ConflictWarning, report: UpdateReport, log: Logger): Unit = { + processCrossVersioned(config, report, log) + } + private[this] def processCrossVersioned( + config: ConflictWarning, + report: UpdateReport, + log: Logger + ): Unit = { + val crossMismatches = crossVersionMismatches(report) + if (crossMismatches.nonEmpty) { + val pre = + s"Modules were resolved with conflicting cross-version suffixes in ${config.label}:\n " + val conflictMsgs = + for (((org, rawName), fullNames) <- crossMismatches) yield { + val suffixes = fullNames.map(getCrossSuffix).mkString(", ") + s"${idString(org, rawName)} $suffixes" + } + log.log(config.level, conflictMsgs.mkString(pre, "\n ", "")) + if (config.failOnConflict) { + val summary = + crossMismatches.map { case ((org, raw), _) => idString(org, raw) }.mkString(", ") + sys.error("Conflicting cross-version suffixes in: " + summary) + } + } + } + + /** Map from (organization, rawName) to set of multiple full names. */ + def crossVersionMismatches(report: UpdateReport): Map[(String, String), Set[String]] = { + val mismatches = report.configurations.flatMap { confReport => + groupByRawName(confReport.allModules).view.mapValues { modules => + val differentFullNames = modules.map(_.name).toSet + if (differentFullNames.size > 1) differentFullNames else Set.empty[String] + } + } + mismatches.foldLeft(Map.empty[(String, String), Set[String]])(merge) + } + private[this] def merge[A, B](m: Map[A, Set[B]], b: (A, Set[B])): Map[A, Set[B]] = + if (b._2.isEmpty) m + else + m.updated(b._1, m.getOrElse(b._1, Set.empty) ++ b._2) + + private[this] def groupByRawName(ms: Seq[ModuleID]): Map[(String, String), Seq[ModuleID]] = + ms.groupBy(m => (m.organization, dropCrossSuffix(m.name))) + + private[this] val CrossSuffixPattern = """(.+)_(\d+(?:\.\d+)?(?:\.\d+)?(?:-.+)?)""".r + private[this] def dropCrossSuffix(s: String): String = s match { + case CrossSuffixPattern(raw, _) => raw + case _ => s + } + private[this] def getCrossSuffix(s: String): String = s match { + case CrossSuffixPattern(_, v) => "_" + v + case _ => "" + } + +} diff --git a/lm-core/src/main/scala/sbt/librarymanagement/CrossVersion.scala b/lm-core/src/main/scala/sbt/librarymanagement/CrossVersion.scala new file mode 100644 index 000000000..e447cb6a7 --- /dev/null +++ b/lm-core/src/main/scala/sbt/librarymanagement/CrossVersion.scala @@ -0,0 +1,494 @@ +/** + * This code is generated using [[http://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ +package sbt.librarymanagement + +import _root_.sjsonnew.{ Unbuilder, Builder, JsonFormat, deserializationError } + +/** Configures how a module will be cross-versioned. */ +abstract class CrossVersion() extends Serializable { + override def equals(o: Any): Boolean = o match { + case _: CrossVersion => true + case _ => false + } + override def hashCode: Int = { + 37 * (17 + "sbt.librarymanagement.CrossVersion".##) + } + override def toString: String = { + "CrossVersion()" + } +} +object CrossVersion extends sbt.librarymanagement.CrossVersionFunctions {} + +/** Disables cross versioning for a module. */ +sealed class Disabled private () extends sbt.librarymanagement.CrossVersion() with Serializable { + + override def equals(o: Any): Boolean = o match { + case _: Disabled => true + case _ => false + } + override def hashCode: Int = { + 37 * (17 + "sbt.librarymanagement.Disabled".##) + } + override def toString: String = { + "Disabled()" + } + private[this] def copy(): Disabled = { + new Disabled() + } + +} +object Disabled extends sbt.librarymanagement.Disabled { + def apply(): Disabled = Disabled +} + +/** + * Cross-versions a module using the result of + * prepending `prefix` and appending `suffix` to the binary version. + * For example, if `prefix = "foo_"` and `suffix = "_bar"` and the binary version is "2.10", + * the module is cross-versioned with "foo_2.10_bar". + */ +final class Binary private (val prefix: String, val suffix: String) + extends sbt.librarymanagement.CrossVersion() + with Serializable { + + private def this() = this("", "") + + override def equals(o: Any): Boolean = o match { + case x: Binary => (this.prefix == x.prefix) && (this.suffix == x.suffix) + case _ => false + } + override def hashCode: Int = { + 37 * (37 * (37 * (17 + "sbt.librarymanagement.Binary".##) + prefix.##) + suffix.##) + } + override def toString: String = { + "Binary(" + prefix + ", " + suffix + ")" + } + private[this] def copy(prefix: String = prefix, suffix: String = suffix): Binary = { + new Binary(prefix, suffix) + } + def withPrefix(prefix: String): Binary = { + copy(prefix = prefix) + } + def withSuffix(suffix: String): Binary = { + copy(suffix = suffix) + } +} +object Binary { + + def apply(): Binary = new Binary() + def apply(prefix: String, suffix: String): Binary = new Binary(prefix, suffix) +} + +/** Cross-versions a module using the string `value`. */ +final class Constant private (val value: String) + extends sbt.librarymanagement.CrossVersion() + with Serializable { + + private def this() = this("") + + override def equals(o: Any): Boolean = o match { + case x: Constant => (this.value == x.value) + case _ => false + } + override def hashCode: Int = { + 37 * (37 * (17 + "sbt.librarymanagement.Constant".##) + value.##) + } + override def toString: String = { + "Constant(" + value + ")" + } + private[this] def copy(value: String = value): Constant = { + new Constant(value) + } + def withValue(value: String): Constant = { + copy(value = value) + } +} +object Constant { + def apply(): Constant = new Constant() + def apply(value: String): Constant = new Constant(value) +} + +/** + * Cross-versions a module by stripping off -bin-suffix. + * This is intended for patch-version compatible alternative replacements. + */ +final class Patch private () extends sbt.librarymanagement.CrossVersion() with Serializable { + + override def equals(o: Any): Boolean = o match { + case _: Patch => true + case _ => false + } + override def hashCode: Int = { + 37 * (17 + "sbt.librarymanagement.Patch".##) + } + override def toString: String = { + "Patch()" + } + private[this] def copy(): Patch = { + new Patch() + } +} + +object Patch { + def apply(): Patch = new Patch() +} + +/** + * Cross-versions a module with the result of + * prepending `prefix` and appending `suffix` to the full version. + * For example, if `prefix = "foo_"` and `suffix = "_bar"` and the full version is "2.12.1", + * the module is cross-versioned with "foo_2.12.1_bar". + */ +final class Full private (val prefix: String, val suffix: String) + extends sbt.librarymanagement.CrossVersion() + with Serializable { + + private def this() = this("", "") + + override def equals(o: Any): Boolean = o match { + case x: Full => (this.prefix == x.prefix) && (this.suffix == x.suffix) + case _ => false + } + override def hashCode: Int = { + 37 * (37 * (37 * (17 + "sbt.librarymanagement.Full".##) + prefix.##) + suffix.##) + } + override def toString: String = { + "Full(" + prefix + ", " + suffix + ")" + } + private[this] def copy(prefix: String = prefix, suffix: String = suffix): Full = { + new Full(prefix, suffix) + } + def withPrefix(prefix: String): Full = { + copy(prefix = prefix) + } + def withSuffix(suffix: String): Full = { + copy(suffix = suffix) + } +} + +object Full { + def apply(): Full = new Full() + def apply(prefix: String, suffix: String): Full = new Full(prefix, suffix) +} + +/** + * Similar to Binary except that if the binary version is 3 + * (or if it is of the form 3.0.0-x) it uses 2.13 instead. + * For example, if `prefix = "foo_"` and `suffix = "_bar"` and the binary version is "3", + * the module is cross-versioned with "foo_2.13_bar". + */ +final class For3Use2_13 private (val prefix: String, val suffix: String) + extends sbt.librarymanagement.CrossVersion() + with Serializable { + + private def this() = this("", "") + + override def equals(o: Any): Boolean = o match { + case x: For3Use2_13 => (this.prefix == x.prefix) && (this.suffix == x.suffix) + case _ => false + } + override def hashCode: Int = { + 37 * (37 * (37 * (17 + "sbt.librarymanagement.For3Use2_13".##) + prefix.##) + suffix.##) + } + override def toString: String = { + "For3Use2_13(" + prefix + ", " + suffix + ")" + } + private[this] def copy(prefix: String = prefix, suffix: String = suffix): For3Use2_13 = { + new For3Use2_13(prefix, suffix) + } + def withPrefix(prefix: String): For3Use2_13 = { + copy(prefix = prefix) + } + def withSuffix(suffix: String): For3Use2_13 = { + copy(suffix = suffix) + } +} +object For3Use2_13 { + + def apply(): For3Use2_13 = new For3Use2_13() + def apply(prefix: String, suffix: String): For3Use2_13 = new For3Use2_13(prefix, suffix) +} + +/** + * Similar to Binary except that if the binary version is 2.13 + * it uses 3 instead. + * For example, if `prefix = "foo_"` and `suffix = "_bar"` and the binary version is "2.13", + * the module is cross-versioned with "foo_3_bar". + */ +final class For2_13Use3 private (val prefix: String, val suffix: String) + extends sbt.librarymanagement.CrossVersion() + with Serializable { + + private def this() = this("", "") + + override def equals(o: Any): Boolean = o match { + case x: For2_13Use3 => (this.prefix == x.prefix) && (this.suffix == x.suffix) + case _ => false + } + override def hashCode: Int = { + 37 * (37 * (37 * (17 + "sbt.librarymanagement.For3Use2_13".##) + prefix.##) + suffix.##) + } + override def toString: String = { + "For3Use2_13(" + prefix + ", " + suffix + ")" + } + private[this] def copy(prefix: String = prefix, suffix: String = suffix): For2_13Use3 = { + new For2_13Use3(prefix, suffix) + } + def withPrefix(prefix: String): For2_13Use3 = { + copy(prefix = prefix) + } + def withSuffix(suffix: String): For2_13Use3 = { + copy(suffix = suffix) + } +} +object For2_13Use3 { + + def apply(): For2_13Use3 = new For2_13Use3() + def apply(prefix: String, suffix: String): For2_13Use3 = new For2_13Use3(prefix, suffix) +} + +trait DisabledFormats { self: sjsonnew.BasicJsonProtocol => + implicit lazy val DisabledFormat: JsonFormat[sbt.librarymanagement.Disabled] = + new JsonFormat[sbt.librarymanagement.Disabled] { + override def read[J]( + jsOpt: Option[J], + unbuilder: Unbuilder[J] + ): sbt.librarymanagement.Disabled = { + jsOpt match { + case Some(js) => + unbuilder.beginObject(js) + + unbuilder.endObject() + sbt.librarymanagement.Disabled() + case None => + deserializationError("Expected JsObject but found None") + } + } + override def write[J](obj: sbt.librarymanagement.Disabled, builder: Builder[J]): Unit = { + builder.beginObject() + + builder.endObject() + } + } + + implicit lazy val DisabledObjectFormat: JsonFormat[sbt.librarymanagement.Disabled.type] = + new JsonFormat[sbt.librarymanagement.Disabled.type] { + override def read[J]( + jsOpt: Option[J], + unbuilder: Unbuilder[J] + ): sbt.librarymanagement.Disabled.type = { + jsOpt match { + case Some(js) => + unbuilder.beginObject(js) + + unbuilder.endObject() + sbt.librarymanagement.Disabled + case None => + deserializationError("Expected JsObject but found None") + } + } + override def write[J](obj: sbt.librarymanagement.Disabled.type, builder: Builder[J]): Unit = { + builder.beginObject() + + builder.endObject() + } + } +} + +trait BinaryFormats { self: sjsonnew.BasicJsonProtocol => + implicit lazy val BinaryFormat: JsonFormat[sbt.librarymanagement.Binary] = + new JsonFormat[sbt.librarymanagement.Binary] { + override def read[J]( + jsOpt: Option[J], + unbuilder: Unbuilder[J] + ): sbt.librarymanagement.Binary = { + jsOpt match { + case Some(js) => + unbuilder.beginObject(js) + val prefix = unbuilder.readField[String]("prefix") + val suffix = unbuilder.readField[String]("suffix") + unbuilder.endObject() + sbt.librarymanagement.Binary(prefix, suffix) + case None => + deserializationError("Expected JsObject but found None") + } + } + override def write[J](obj: sbt.librarymanagement.Binary, builder: Builder[J]): Unit = { + builder.beginObject() + builder.addField("prefix", obj.prefix) + builder.addField("suffix", obj.suffix) + builder.endObject() + } + } +} + +trait ConstantFormats { self: sjsonnew.BasicJsonProtocol => + implicit lazy val ConstantFormat: JsonFormat[sbt.librarymanagement.Constant] = + new JsonFormat[sbt.librarymanagement.Constant] { + override def read[J]( + jsOpt: Option[J], + unbuilder: Unbuilder[J] + ): sbt.librarymanagement.Constant = { + jsOpt match { + case Some(js) => + unbuilder.beginObject(js) + val value = unbuilder.readField[String]("value") + unbuilder.endObject() + sbt.librarymanagement.Constant(value) + case None => + deserializationError("Expected JsObject but found None") + } + } + override def write[J](obj: sbt.librarymanagement.Constant, builder: Builder[J]): Unit = { + builder.beginObject() + builder.addField("value", obj.value) + builder.endObject() + } + } +} + +trait PatchFormats { self: sjsonnew.BasicJsonProtocol => + implicit lazy val PatchFormat: JsonFormat[sbt.librarymanagement.Patch] = + new JsonFormat[sbt.librarymanagement.Patch] { + override def read[J]( + jsOpt: Option[J], + unbuilder: Unbuilder[J] + ): sbt.librarymanagement.Patch = { + jsOpt match { + case Some(js) => + unbuilder.beginObject(js) + + unbuilder.endObject() + sbt.librarymanagement.Patch() + case None => + deserializationError("Expected JsObject but found None") + } + } + override def write[J](obj: sbt.librarymanagement.Patch, builder: Builder[J]): Unit = { + builder.beginObject() + + builder.endObject() + } + } +} + +trait FullFormats { self: sjsonnew.BasicJsonProtocol => + implicit lazy val FullFormat: JsonFormat[sbt.librarymanagement.Full] = + new JsonFormat[sbt.librarymanagement.Full] { + override def read[J]( + jsOpt: Option[J], + unbuilder: Unbuilder[J] + ): sbt.librarymanagement.Full = { + jsOpt match { + case Some(js) => + unbuilder.beginObject(js) + val prefix = unbuilder.readField[String]("prefix") + val suffix = unbuilder.readField[String]("suffix") + unbuilder.endObject() + sbt.librarymanagement.Full(prefix, suffix) + case None => + deserializationError("Expected JsObject but found None") + } + } + override def write[J](obj: sbt.librarymanagement.Full, builder: Builder[J]): Unit = { + builder.beginObject() + builder.addField("prefix", obj.prefix) + builder.addField("suffix", obj.suffix) + builder.endObject() + } + } +} + +trait For3Use2_13Formats { self: sjsonnew.BasicJsonProtocol => + implicit lazy val For3Use2_13Format: JsonFormat[sbt.librarymanagement.For3Use2_13] = + new JsonFormat[sbt.librarymanagement.For3Use2_13] { + override def read[J]( + jsOpt: Option[J], + unbuilder: Unbuilder[J] + ): sbt.librarymanagement.For3Use2_13 = { + jsOpt match { + case Some(js) => + unbuilder.beginObject(js) + val prefix = unbuilder.readField[String]("prefix") + val suffix = unbuilder.readField[String]("suffix") + unbuilder.endObject() + sbt.librarymanagement.For3Use2_13(prefix, suffix) + case None => + deserializationError("Expected JsObject but found None") + } + } + override def write[J](obj: sbt.librarymanagement.For3Use2_13, builder: Builder[J]): Unit = { + builder.beginObject() + builder.addField("prefix", obj.prefix) + builder.addField("suffix", obj.suffix) + builder.endObject() + } + } +} + +trait For2_13Use3Formats { self: sjsonnew.BasicJsonProtocol => + implicit lazy val For2_13Use3Format: JsonFormat[sbt.librarymanagement.For2_13Use3] = + new JsonFormat[sbt.librarymanagement.For2_13Use3] { + override def read[J]( + jsOpt: Option[J], + unbuilder: Unbuilder[J] + ): sbt.librarymanagement.For2_13Use3 = { + jsOpt match { + case Some(js) => + unbuilder.beginObject(js) + val prefix = unbuilder.readField[String]("prefix") + val suffix = unbuilder.readField[String]("suffix") + unbuilder.endObject() + sbt.librarymanagement.For2_13Use3(prefix, suffix) + case None => + deserializationError("Expected JsObject but found None") + } + } + override def write[J](obj: sbt.librarymanagement.For2_13Use3, builder: Builder[J]): Unit = { + builder.beginObject() + builder.addField("prefix", obj.prefix) + builder.addField("suffix", obj.suffix) + builder.endObject() + } + } +} + +trait CrossVersionFormats { + self: sjsonnew.BasicJsonProtocol + with sbt.librarymanagement.DisabledFormats + with sbt.librarymanagement.BinaryFormats + with sbt.librarymanagement.ConstantFormats + with sbt.librarymanagement.PatchFormats + with sbt.librarymanagement.FullFormats + with sbt.librarymanagement.For3Use2_13Formats + with sbt.librarymanagement.For2_13Use3Formats => + implicit lazy val CrossVersionFormat: JsonFormat[CrossVersion] = { + val format = flatUnionFormat8[ + CrossVersion, + Disabled, + Disabled.type, + Binary, + Constant, + Patch, + Full, + For3Use2_13, + For2_13Use3 + ]("type") + // This is a hand-crafted formatter to avoid Disabled$ showing up in JSON + new JsonFormat[CrossVersion] { + override def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]): CrossVersion = + format.read(jsOpt, unbuilder) + override def write[J](obj: CrossVersion, builder: Builder[J]): Unit = { + if (obj == Disabled) { + builder.beginPreObject() + builder.addFieldName("type") + builder.writeString("Disabled") + builder.endPreObject() + builder.beginObject() + builder.endObject() + } else format.write(obj, builder) + } + } + } +} diff --git a/lm-core/src/main/scala/sbt/librarymanagement/CrossVersionExtra.scala b/lm-core/src/main/scala/sbt/librarymanagement/CrossVersionExtra.scala new file mode 100644 index 000000000..546089928 --- /dev/null +++ b/lm-core/src/main/scala/sbt/librarymanagement/CrossVersionExtra.scala @@ -0,0 +1,244 @@ +package sbt.librarymanagement + +import sbt.internal.librarymanagement.cross.CrossVersionUtil +import sbt.librarymanagement.syntax._ + +final case class ScalaVersion(full: String, binary: String) + +private[librarymanagement] abstract class CrossVersionFunctions { + + /** Compatibility with 0.13 */ + @deprecated( + "use CrossVersion.disabled instead. prior to sbt 1.3.0, Disabled did not work without apply(). sbt/sbt#4977", + "1.3.0" + ) + final val Disabled = sbt.librarymanagement.Disabled + final val Binary = sbt.librarymanagement.Binary + final val Constant = sbt.librarymanagement.Constant + final val Full = sbt.librarymanagement.Full + final val Patch = sbt.librarymanagement.Patch + type Binary = sbt.librarymanagement.Binary + type Constant = sbt.librarymanagement.Constant + type Full = sbt.librarymanagement.Full + type Patch = sbt.librarymanagement.Patch + + /** The first `major.minor` Scala version that the Scala binary version should be used for cross-versioning instead of the full version. */ + val TransitionScalaVersion = CrossVersionUtil.TransitionScalaVersion + + /** The first `major.minor` sbt version that the sbt binary version should be used for cross-versioning instead of the full version. */ + val TransitionSbtVersion = CrossVersionUtil.TransitionSbtVersion + + /** Cross-versions a module with the full version (typically the full Scala version). */ + def full: CrossVersion = Full() + + /** + * Cross-versions a module with the result of prepending `prefix` and appending `suffix` to the full version. + * (typically the full Scala version). See also [[sbt.librarymanagement.Full]] + */ + def fullWith(prefix: String, suffix: String): CrossVersion = Full(prefix, suffix) + + /** Cross-versions a module with the binary version (typically the binary Scala version). */ + def binary: CrossVersion = Binary() + + /** Disables cross versioning for a module. */ + def disabled: CrossVersion = sbt.librarymanagement.Disabled + + /** Cross-versions a module with a constant string (typically the binary Scala version). */ + def constant(value: String): CrossVersion = Constant(value) + + /** + * Cross-versions a module with the result of prepending `prefix` and appending `suffix` to the binary version + * (typically the binary Scala version). See also [[sbt.librarymanagement.Binary]]. + */ + def binaryWith(prefix: String, suffix: String): CrossVersion = Binary(prefix, suffix) + + /** + * Cross-versions a module with the full Scala version excluding any `-bin` suffix. + */ + def patch: CrossVersion = Patch() + + /** + * Cross-versions a module with the binary version but + * if the binary version is 3 (or of the form 3.0.0-x), cross-versions it with 2.13 instead + */ + def for3Use2_13: CrossVersion = For3Use2_13() + + /** + * Cross-versions a module with the binary version but + * if the binary version is 3 (or of the form 3.0.0-x), cross-versions it with 2.13 instead + * Always prepend `prefix` and append `suffix` + */ + def for3Use2_13With(prefix: String, suffix: String): CrossVersion = For3Use2_13(prefix, suffix) + + /** + * Cross-versions a module with the binary version but + * if the binary version is 2.13 cross-versions it with 3 instead + */ + def for2_13Use3: CrossVersion = For2_13Use3() + + /** + * Cross-versions a module with the binary version but + * if the binary version is 2.13 cross-versions it with 3 instead + * Always prepend `prefix` and append `suffix` + */ + def for2_13Use3With(prefix: String, suffix: String): CrossVersion = For2_13Use3(prefix, suffix) + + private[sbt] def getPrefixSuffix(value: CrossVersion): (String, String) = + value match { + case (_: Disabled | _: Constant | _: Patch) => ("", "") + case b: Binary => (b.prefix, b.suffix) + case f: Full => (f.prefix, f.suffix) + case c: For3Use2_13 => (c.prefix, c.suffix) + case c: For2_13Use3 => (c.prefix, c.suffix) + } + + private[sbt] def setPrefixSuffix(value: CrossVersion, p: String, s: String): CrossVersion = + value match { + case (_: Disabled | _: Constant | _: Patch) => value + case b: Binary => b.withPrefix(p).withSuffix(s) + case f: Full => f.withPrefix(p).withSuffix(s) + case c: For3Use2_13 => c.withPrefix(p).withSuffix(s) + case c: For2_13Use3 => c.withPrefix(p).withSuffix(s) + } + + private[sbt] def patchFun(fullVersion: String): String = { + import sbt.internal.librarymanagement.cross.CrossVersionUtil.BinCompatV + fullVersion match { + case BinCompatV(x, y, z, w, _) => s"""$x.$y.$z${if (w == null) "" else w}""" + case other => other + } + } + + private[sbt] def append(s: String): Option[String => String] = Some(x => crossName(x, s)) + + /** + * Construct a cross-versioning function given cross-versioning configuration `cross`, + * full version `fullVersion` and binary version `binaryVersion`. The behavior of the + * constructed function is as documented for the [[sbt.librarymanagement.CrossVersion]] datatypes. + */ + def apply( + cross: CrossVersion, + fullVersion: String, + binaryVersion: String + ): Option[String => String] = + cross match { + case _: Disabled => None + case b: Binary => append(b.prefix + binaryVersion + b.suffix) + case c: Constant => append(c.value) + case _: Patch => append(patchFun(fullVersion)) + case f: Full => append(f.prefix + fullVersion + f.suffix) + case c: For3Use2_13 => + val compat = + if (binaryVersion == "3" || binaryVersion.startsWith("3.0.0")) "2.13" + else binaryVersion + append(c.prefix + compat + c.suffix) + case c: For2_13Use3 => + val compat = + if (binaryVersion == "2.13") "3" + else binaryVersion + append(c.prefix + compat + c.suffix) + } + + /** Constructs the cross-version function defined by `module` and `is`, if one is configured. */ + def apply(module: ModuleID, is: ScalaModuleInfo): Option[String => String] = + CrossVersion(module.crossVersion, is.scalaFullVersion, is.scalaBinaryVersion) + + /** Constructs the cross-version function defined by `module` and `is`, if one is configured. */ + def apply(module: ModuleID, is: Option[ScalaModuleInfo]): Option[String => String] = + is.flatMap(i => apply(module, i)) + + /** Cross-version each `Artifact` in `artifacts` according to cross-version function `cross`. */ + def substituteCross( + artifacts: Vector[Artifact], + cross: Option[String => String] + ): Vector[Artifact] = + cross match { + case None => artifacts + case Some(_) => substituteCrossA(artifacts, cross) + } + + private[sbt] def applyCross(s: String, fopt: Option[String => String]): String = + fopt match { + case None => s + case Some(fopt) => fopt(s) + } + + private[sbt] def crossName(name: String, cross: String): String = + name + "_" + cross + + /** Cross-versions `exclude` according to its `crossVersion`. */ + private[sbt] def substituteCross( + exclude: ExclusionRule, + is: Option[ScalaModuleInfo] + ): ExclusionRule = { + val fopt: Option[String => String] = + is flatMap { i => + CrossVersion(exclude.crossVersion, i.scalaFullVersion, i.scalaBinaryVersion) + } + exclude.withName(applyCross(exclude.name, fopt)) + } + + /** Cross-versions `a` according to cross-version function `cross`. */ + def substituteCross(a: Artifact, cross: Option[String => String]): Artifact = + a.withName(applyCross(a.name, cross)) + + private[sbt] def substituteCrossA( + as: Vector[Artifact], + cross: Option[String => String] + ): Vector[Artifact] = as.map(art => substituteCross(art, cross)) + + /** + * Constructs a function that will cross-version a ModuleID + * for the given full and binary Scala versions `scalaFullVersion` and `scalaBinaryVersion` + * according to the ModuleID's cross-versioning setting. + */ + def apply(scalaFullVersion: String, scalaBinaryVersion: String): ModuleID => ModuleID = m => { + val cross = apply(m.crossVersion, scalaFullVersion, scalaBinaryVersion) + if (cross.isDefined) + m.withName(applyCross(m.name, cross)) + .withExplicitArtifacts(substituteCrossA(m.explicitArtifacts, cross)) + else + m + } + + def isSbtApiCompatible(v: String): Boolean = CrossVersionUtil.isSbtApiCompatible(v) + + /** + * Returns sbt binary interface x.y API compatible with the given version string v. + * RCs for x.y.0 are considered API compatible. + * Compatible versions include 0.12.0-1 and 0.12.0-RC1 for Some(0, 12). + */ + def sbtApiVersion(v: String): Option[(Long, Long)] = CrossVersionUtil.sbtApiVersion(v) + + def isScalaApiCompatible(v: String): Boolean = CrossVersionUtil.isScalaApiCompatible(v) + + /** + * Returns Scala binary interface x.y API compatible with the given version string v. + * Compatible versions include 2.10.0-1 and 2.10.1-M1 for Some(2, 10), but not 2.10.0-RC1. + */ + def scalaApiVersion(v: String): Option[(Long, Long)] = CrossVersionUtil.scalaApiVersion(v) + + /** Regular expression that extracts the major and minor components of a version into matched groups 1 and 2. */ + val PartialVersion = CrossVersionUtil.PartialVersion + + /** Extracts the major and minor components of a version string `s` or returns `None` if the version is improperly formatted. */ + def partialVersion(s: String): Option[(Long, Long)] = CrossVersionUtil.partialVersion(s) + + /** + * Computes the binary Scala version from the `full` version. + * Full Scala versions earlier than [[sbt.librarymanagement.CrossVersion.TransitionScalaVersion]] are returned as is. + */ + def binaryScalaVersion(full: String): String = CrossVersionUtil.binaryScalaVersion(full) + + /** + * Computes the binary sbt version from the `full` version. + * Full sbt versions earlier than [[sbt.librarymanagement.CrossVersion.TransitionSbtVersion]] are returned as is. + */ + def binarySbtVersion(full: String): String = CrossVersionUtil.binarySbtVersion(full) + + /** + * Returns `true` if a project targeting version `origVersion` can run with version `newVersion`. + */ + def isScalaBinaryCompatibleWith(newVersion: String, origVersion: String): Boolean = + CrossVersionUtil.isScalaBinaryCompatibleWith(newVersion, origVersion) +} diff --git a/lm-core/src/main/scala/sbt/librarymanagement/DependencyBuilders.scala b/lm-core/src/main/scala/sbt/librarymanagement/DependencyBuilders.scala new file mode 100755 index 000000000..99e64106d --- /dev/null +++ b/lm-core/src/main/scala/sbt/librarymanagement/DependencyBuilders.scala @@ -0,0 +1,75 @@ +/* sbt -- Simple Build Tool + * Copyright 2009,2010 Mark Harrah + */ +package sbt.librarymanagement + +import sbt.internal.librarymanagement.StringUtilities.nonEmpty + +/** + * DependencyBuilders implements the implicits for % and %% DSL. + */ +trait DependencyBuilders { + // See http://www.scala-lang.org/news/2.12.0#traits-compile-to-interfaces + // Avoid defining fields (val or var, but a constant is ok – final val without result type) + // Avoid calling super + // Avoid initializer statements in the body + + import DependencyBuilders._ + + implicit def stringToOrganization(organization: String): Organization = { + nonEmpty(organization, "Organization") + new Organization(organization) + } + + implicit def toRepositoryName(name: String): RepositoryName = { + nonEmpty(name, "Repository name") + new RepositoryName(name) + } + + implicit def moduleIDConfigurable(m: ModuleID): ModuleIDConfigurable = { + require(m.configurations.isEmpty, "Configurations already specified for module " + m) + new ModuleIDConfigurable(m) + } +} + +object DependencyBuilders { + final class Organization private[sbt] (private[sbt] val organization: String) { + def %(name: String) = organizationArtifact(name, Disabled()) + def %%(name: String): OrganizationArtifactName = + organizationArtifact(name, CrossVersion.binary) + + private def organizationArtifact(name: String, cross: CrossVersion) = { + nonEmpty(name, "Name") + new OrganizationArtifactName(organization, name, cross) + } + } + + final class OrganizationArtifactName private[sbt] ( + private[sbt] val organization: String, + private[sbt] val name: String, + private[sbt] val crossVersion: CrossVersion + ) { + def %(revision: String): ModuleID = { + nonEmpty(revision, "Revision") + ModuleID(organization, name, revision).cross(crossVersion) + } + } + + final class ModuleIDConfigurable private[sbt] (moduleID: ModuleID) { + def %(configuration: Configuration): ModuleID = %(configuration.name) + def %(configuration: ConfigRef): ModuleID = %(configuration.name) + + def %(configurations: String): ModuleID = { + nonEmpty(configurations, "Configurations") + val c = configurations + moduleID.withConfigurations(configurations = Some(c)) + } + } + + final class RepositoryName private[sbt] (name: String) { + def at(location: String): MavenRepository = { + nonEmpty(location, "Repository location") + MavenRepository(name, location) + } + } +} diff --git a/lm-core/src/main/scala/sbt/librarymanagement/DependencyFilter.scala b/lm-core/src/main/scala/sbt/librarymanagement/DependencyFilter.scala new file mode 100644 index 000000000..3aa96f30d --- /dev/null +++ b/lm-core/src/main/scala/sbt/librarymanagement/DependencyFilter.scala @@ -0,0 +1,109 @@ +/* sbt -- Simple Build Tool + * Copyright 2011 Mark Harrah + */ +package sbt.librarymanagement + +import sbt.io.{ AllPassFilter, NameFilter } + +trait DependencyFilterExtra { + // See http://www.scala-lang.org/news/2.12.0#traits-compile-to-interfaces + // Avoid defining fields (val or var, but a constant is ok – final val without result type) + // Avoid calling super + // Avoid initializer statements in the body + + def moduleFilter( + organization: NameFilter = AllPassFilter, + name: NameFilter = AllPassFilter, + revision: NameFilter = AllPassFilter + ): ModuleFilter = + new ModuleFilter { + def apply(m: ModuleID): Boolean = + organization.accept(m.organization) && name.accept(m.name) && revision.accept(m.revision) + } + + def artifactFilter( + name: NameFilter = AllPassFilter, + `type`: NameFilter = AllPassFilter, + extension: NameFilter = AllPassFilter, + classifier: NameFilter = AllPassFilter + ): ArtifactFilter = + new ArtifactFilter { + def apply(a: Artifact): Boolean = + name.accept(a.name) && `type`.accept(a.`type`) && extension.accept( + a.extension + ) && classifier + .accept(a.classifier getOrElse "") + } + + def configurationFilter(name: NameFilter = AllPassFilter): ConfigurationFilter = + new ConfigurationFilter { + def apply(c: ConfigRef): Boolean = name.accept(c.name) + } +} + +object DependencyFilter extends DependencyFilterExtra { + def make( + configuration: ConfigurationFilter = configurationFilter(), + module: ModuleFilter = moduleFilter(), + artifact: ArtifactFilter = artifactFilter() + ): DependencyFilter = + new DependencyFilter { + def apply(c: ConfigRef, m: ModuleID, a: Artifact): Boolean = + configuration(c) && module(m) && artifact(a) + } + def apply( + x: DependencyFilter, + y: DependencyFilter, + combine: (Boolean, Boolean) => Boolean + ): DependencyFilter = + new DependencyFilter { + def apply(c: ConfigRef, m: ModuleID, a: Artifact): Boolean = combine(x(c, m, a), y(c, m, a)) + } + def allPass: DependencyFilter = configurationFilter() + implicit def fnToModuleFilter(f: ModuleID => Boolean): ModuleFilter = new ModuleFilter { + def apply(m: ModuleID) = f(m) + } + implicit def fnToArtifactFilter(f: Artifact => Boolean): ArtifactFilter = new ArtifactFilter { + def apply(m: Artifact) = f(m) + } + implicit def fnToConfigurationFilter(f: ConfigRef => Boolean): ConfigurationFilter = + new ConfigurationFilter { def apply(c: ConfigRef) = f(c) } + implicit def subDepFilterToFn[Arg](f: SubDepFilter[Arg, _]): Arg => Boolean = f apply _ +} +trait DependencyFilter { + def apply(configuration: ConfigRef, module: ModuleID, artifact: Artifact): Boolean + final def &&(o: DependencyFilter) = DependencyFilter(this, o, _ && _) + final def ||(o: DependencyFilter) = DependencyFilter(this, o, _ || _) + final def --(o: DependencyFilter) = DependencyFilter(this, o, _ && !_) +} +sealed trait SubDepFilter[Arg, Self <: SubDepFilter[Arg, Self]] extends DependencyFilter { + self: Self => + def apply(a: Arg): Boolean + protected def make(f: Arg => Boolean): Self + final def &(o: Self): Self = combine(o, _ && _) + final def |(o: Self): Self = combine(o, _ || _) + final def -(o: Self): Self = combine(o, _ && !_) + private[this] def combine(o: Self, f: (Boolean, Boolean) => Boolean): Self = + make((m: Arg) => f(this(m), o(m))) +} +trait ModuleFilter extends SubDepFilter[ModuleID, ModuleFilter] { + protected final def make(f: ModuleID => Boolean) = new ModuleFilter { + def apply(m: ModuleID) = f(m) + } + final def apply(configuration: ConfigRef, module: ModuleID, artifact: Artifact): Boolean = + apply(module) +} +trait ArtifactFilter extends SubDepFilter[Artifact, ArtifactFilter] { + protected final def make(f: Artifact => Boolean) = new ArtifactFilter { + def apply(m: Artifact) = f(m) + } + final def apply(configuration: ConfigRef, module: ModuleID, artifact: Artifact): Boolean = + apply(artifact) +} +trait ConfigurationFilter extends SubDepFilter[ConfigRef, ConfigurationFilter] { + protected final def make(f: ConfigRef => Boolean) = new ConfigurationFilter { + def apply(m: ConfigRef) = f(m) + } + final def apply(configuration: ConfigRef, module: ModuleID, artifact: Artifact): Boolean = + apply(configuration) +} diff --git a/lm-core/src/main/scala/sbt/librarymanagement/DependencyResolution.scala b/lm-core/src/main/scala/sbt/librarymanagement/DependencyResolution.scala new file mode 100644 index 000000000..8a0ccd996 --- /dev/null +++ b/lm-core/src/main/scala/sbt/librarymanagement/DependencyResolution.scala @@ -0,0 +1,210 @@ +package sbt.librarymanagement + +import java.io.File +import sbt.util.Logger +import sbt.io.Hash +import sbt.librarymanagement.syntax._ + +/** + * Library management API to resolve dependencies. + */ +class DependencyResolution private[sbt] (lmEngine: DependencyResolutionInterface) { + import sbt.internal.librarymanagement.InternalDefaults._ + import sbt.internal.librarymanagement.UpdateClassifiersUtil._ + + /** + * Builds a ModuleDescriptor that describes a subproject with dependencies. + * + * @param moduleSetting It contains the information about the module including the dependencies. + * @return A `ModuleDescriptor` describing a subproject and its dependencies. + */ + def moduleDescriptor(moduleSetting: ModuleDescriptorConfiguration): ModuleDescriptor = + lmEngine.moduleDescriptor(moduleSetting) + + /** + * Build a ModuleDescriptor that describes a subproject with dependencies. + * + * @param moduleId The root module for which to create a `ModuleDescriptor`. + * @param directDependencies The direct dependencies of the module. + * @param scalaModuleInfo The information about the Scala version used, if any. + * @param configurations The configurations that this module has. + * @return A `ModuleDescriptor` describing a subproject and its dependencies. + */ + def moduleDescriptor( + moduleId: ModuleID, + directDependencies: Vector[ModuleID], + scalaModuleInfo: Option[ScalaModuleInfo] + ): ModuleDescriptor = { + val moduleSetting = ModuleDescriptorConfiguration(moduleId, ModuleInfo(moduleId.name)) + .withScalaModuleInfo(scalaModuleInfo) + .withDependencies(directDependencies) + moduleDescriptor(moduleSetting) + } + + /** + * Resolves the given module's dependencies performing a retrieval. + * + * @param module The module to be resolved. + * @param configuration The update configuration. + * @param uwconfig The configuration to handle unresolved warnings. + * @param log The logger. + * @return The result, either an unresolved warning or an update report. Note that this + * update report will or will not be successful depending on the `missingOk` option. + */ + def update( + module: ModuleDescriptor, + configuration: UpdateConfiguration, + uwconfig: UnresolvedWarningConfiguration, + log: Logger + ): Either[UnresolvedWarning, UpdateReport] = + lmEngine.update(module, configuration, uwconfig, log) + + /** + * Returns a `ModuleDescriptor` that depends on `dependencyId`. + * + * @param dependencyId The module to depend on. + * @return A `ModuleDescriptor` that depends on `dependencyId`. + */ + def wrapDependencyInModule(dependencyId: ModuleID): ModuleDescriptor = + wrapDependencyInModule(dependencyId, None) + + /** + * Returns a `ModuleDescriptor` that depends on `dependencyId`. + * + * @param dependencyId The module to depend on. + * @param scalaModuleInfo The information about the Scala verson used, if any. + * @return A `ModuleDescriptor` that depends on `dependencyId`. + */ + def wrapDependencyInModule( + dependencyId: ModuleID, + scalaModuleInfo: Option[ScalaModuleInfo] + ): ModuleDescriptor = { + val sha1 = Hash.toHex(Hash(dependencyId.name)) + val dummyID = ModuleID(sbtOrgTemp, modulePrefixTemp + sha1, dependencyId.revision) + .withConfigurations(dependencyId.configurations) + moduleDescriptor(dummyID, Vector(dependencyId), scalaModuleInfo) + } + + /** + * Resolves the given dependency, and retrieves the artifacts to a directory. + * + * @param dependencyId The dependency to be resolved. + * @param scalaModuleInfo The module info about Scala. + * @param retrieveDirectory The directory to retrieve the files. + * @param log The logger. + * @return The result, either an unresolved warning or a sequence of files. + */ + def retrieve( + dependencyId: ModuleID, + scalaModuleInfo: Option[ScalaModuleInfo], + retrieveDirectory: File, + log: Logger + ): Either[UnresolvedWarning, Vector[File]] = + retrieve(wrapDependencyInModule(dependencyId, scalaModuleInfo), retrieveDirectory, log) + + /** + * Resolves the given module's dependencies, and retrieves the artifacts to a directory. + * + * @param module The module to be resolved. + * @param retrieveDirectory The directory to retrieve the files. + * @param log The logger. + * @return The result, either an unresolved warning or a sequence of files. + */ + def retrieve( + module: ModuleDescriptor, + retrieveDirectory: File, + log: Logger + ): Either[UnresolvedWarning, Vector[File]] = { + // Using the default artifact type filter here, so sources and docs are excluded. + val retrieveConfiguration = RetrieveConfiguration() + .withRetrieveDirectory(retrieveDirectory) + val updateConfiguration = UpdateConfiguration() + .withRetrieveManaged(retrieveConfiguration) + // .withMissingOk(true) + log.debug(s"Attempting to fetch ${directDependenciesNames(module)}. This operation may fail.") + update( + module, + updateConfiguration, + UnresolvedWarningConfiguration(), + log + ) match { + case Left(unresolvedWarning) => Left(unresolvedWarning) + case Right(updateReport) => + val allFiles = + for { + conf <- updateReport.configurations + m <- conf.modules + (_, f) <- m.artifacts + } yield f + log.debug(s"Files retrieved for ${directDependenciesNames(module)}:") + log.debug(allFiles mkString ", ") + // allFiles filter predicate match { + // case Seq() => None + // case files => Some(files) + // } + Right(allFiles) + } + } + + /** + * Creates explicit artifacts for each classifier in `config.module`, and then attempts to resolve them directly. This + * is for Maven compatibility, where these artifacts are not "published" in the POM, so they don't end up in the Ivy + * that sbt generates for them either.
+ * Artifacts can be obtained from calling toSeq on UpdateReport.
+ * In addition, retrieves specific Ivy artifacts if they have one of the requested `config.configuration.types`. + * @param config important to set `config.configuration.types` to only allow artifact types that can correspond to + * "classified" artifacts (sources and javadocs). + */ + def updateClassifiers( + config: GetClassifiersConfiguration, + uwconfig: UnresolvedWarningConfiguration, + artifacts: Vector[(String, ModuleID, Artifact, File)], + log: Logger + ): Either[UnresolvedWarning, UpdateReport] = { + import config.module._ + val artifactFilter = getArtifactTypeFilter(config.updateConfiguration.artifactFilter) + assert(classifiers.nonEmpty, "classifiers cannot be empty") + assert(artifactFilter.types.nonEmpty, "UpdateConfiguration must filter on some types") + val baseModules = dependencies map { m => + restrictedCopy(m, true) + } + // Adding list of explicit artifacts here. + val exls = Map(config.excludes map { case (k, v) => (k, v.toSet) }: _*) + val deps = baseModules.distinct flatMap classifiedArtifacts(classifiers, exls, artifacts) + val base = restrictedCopy(id, true).withName(id.name + classifiers.mkString("$", "_", "")) + val moduleSetting = ModuleDescriptorConfiguration(base, ModuleInfo(base.name)) + .withScalaModuleInfo(scalaModuleInfo) + .withDependencies(deps) + .withConfigurations(configurations) + val module = moduleDescriptor(moduleSetting) + + // c.copy ensures c.types is preserved too + val upConf = config.updateConfiguration.withMissingOk(true) + update(module, upConf, uwconfig, log) match { + case Right(r) => + // The artifacts that came from Ivy don't have their classifier set, let's set it according to + // FIXME: this is only done because IDE plugins depend on `classifier` to determine type. They + val typeClassifierMap: Map[String, String] = + ((config.sourceArtifactTypes.toSeq map (_ -> Artifact.SourceClassifier)) + :: (config.docArtifactTypes.toSeq map (_ -> Artifact.DocClassifier)) :: Nil).flatten.toMap + Right(r.substitute { (conf, mid, artFileSeq) => + artFileSeq map { case (art, f) => + // Deduce the classifier from the type if no classifier is present already + art.withClassifier(art.classifier orElse typeClassifierMap.get(art.`type`)) -> f + } + }) + case Left(w) => Left(w) + } + } + + protected def directDependenciesNames(module: ModuleDescriptor): String = + (module.directDependencies map { case mID: ModuleID => + import mID._ + s"$organization % $name % $revision" + }).mkString(", ") +} + +object DependencyResolution { + def apply(lmEngine: DependencyResolutionInterface): DependencyResolution = + new DependencyResolution(lmEngine) +} diff --git a/lm-core/src/main/scala/sbt/librarymanagement/EvictionError.scala b/lm-core/src/main/scala/sbt/librarymanagement/EvictionError.scala new file mode 100644 index 000000000..da0e03033 --- /dev/null +++ b/lm-core/src/main/scala/sbt/librarymanagement/EvictionError.scala @@ -0,0 +1,183 @@ +package sbt +package librarymanagement + +import scala.collection.mutable +import sbt.internal.librarymanagement.VersionSchemes +import sbt.util.{ Level, ShowLines } +import EvictionWarningOptions.isNameScalaSuffixed + +object EvictionError { + def apply( + report: UpdateReport, + module: ModuleDescriptor, + schemes: Seq[ModuleID], + ): EvictionError = { + apply(report, module, schemes, "always", "always", Level.Debug) + } + + def apply( + report: UpdateReport, + module: ModuleDescriptor, + schemes: Seq[ModuleID], + assumedVersionScheme: String, + assumedVersionSchemeJava: String, + assumedEvictionErrorLevel: Level.Value, + ): EvictionError = { + val options = EvictionWarningOptions.full + val evictions = EvictionWarning.buildEvictions(options, report) + processEvictions( + module, + options, + evictions, + schemes, + assumedVersionScheme, + assumedVersionSchemeJava, + assumedEvictionErrorLevel, + ) + } + + private[sbt] def processEvictions( + module: ModuleDescriptor, + options: EvictionWarningOptions, + reports: Seq[OrganizationArtifactReport], + schemes: Seq[ModuleID], + assumedVersionScheme: String, + assumedVersionSchemeJava: String, + assumedEvictionErrorLevel: Level.Value, + ): EvictionError = { + val directDependencies = module.directDependencies + val pairs = reports map { detail => + val evicteds = detail.modules filter { _.evicted } + val winner = (detail.modules filterNot { _.evicted }).headOption + new EvictionPair( + detail.organization, + detail.name, + winner, + evicteds, + true, + options.showCallers + ) + } + val incompatibleEvictions: mutable.ListBuffer[(EvictionPair, String)] = mutable.ListBuffer() + val assumedIncompatEvictions: mutable.ListBuffer[(EvictionPair, String)] = mutable.ListBuffer() + val sbvOpt = module.scalaModuleInfo.map(_.scalaBinaryVersion) + val userDefinedSchemes: Map[(String, String), String] = Map(schemes flatMap { s => + val organization = s.organization + VersionSchemes.validateScheme(s.revision) + val versionScheme = s.revision + (s.crossVersion, sbvOpt) match { + case (_: Binary, Some("2.13")) => + List( + (s.organization, s"${s.name}_2.13") -> versionScheme, + (s.organization, s"${s.name}_3") -> versionScheme + ) + case (_: Binary, Some(sbv)) if sbv.startsWith("3.0") || sbv == "3" => + List( + (s.organization, s"${s.name}_$sbv") -> versionScheme, + (s.organization, s"${s.name}_2.13") -> versionScheme + ) + case (_: Binary, Some(sbv)) => + List((s.organization, s"${s.name}_$sbv") -> versionScheme) + case _ => + List((s.organization, s.name) -> versionScheme) + } + }: _*) + + pairs foreach { + // don't report on a transitive eviction that does not have a winner + // https://github.com/sbt/sbt/issues/4946 + case p if p.winner.isDefined => + val winner = p.winner.get + + def hasIncompatibleVersionForScheme(scheme: String) = { + val isCompat = VersionSchemes.evalFunc(scheme) + p.evicteds.exists { r => + !isCompat((r.module, Some(winner.module), module.scalaModuleInfo)) + } + } + + // from libraryDependencyScheme or defined in the pom using the `info.versionScheme` attribute + val userDefinedSchemeOrFromPom = { + def fromLibraryDependencySchemes(org: String = "*", mod: String = "*") = + userDefinedSchemes.get((org, mod)) + def fromWinnerPom = VersionSchemes.extractFromExtraAttributes( + winner.extraAttributes.toMap ++ winner.module.extraAttributes + ) + + fromLibraryDependencySchemes(p.organization, p.name) // by org and name + .orElse(fromLibraryDependencySchemes(p.organization)) // for whole org + .orElse(fromWinnerPom) // from pom + .orElse(fromLibraryDependencySchemes()) // global + } + + // We want the user to be able to suppress eviction errors for a specific library, + // which would result in an incompatible eviction based on the assumed version scheme. + // So, only fall back to the assumed scheme if there is no given scheme by the user or the pom. + userDefinedSchemeOrFromPom match { + case Some(givenScheme) => + if (hasIncompatibleVersionForScheme(givenScheme)) + incompatibleEvictions += (p -> givenScheme) + case None => + val assumedScheme = + if (isNameScalaSuffixed(p.name)) assumedVersionScheme + else assumedVersionSchemeJava + + if (hasIncompatibleVersionForScheme(assumedScheme)) + assumedIncompatEvictions += (p -> assumedScheme) + } + + case _ => () + } + + new EvictionError( + incompatibleEvictions.toList, + assumedIncompatEvictions.toList, + ) + } + + given evictionErrorLines: ShowLines[EvictionError] = ShowLines { (a: EvictionError) => + a.toLines + } +} + +final class EvictionError private[sbt] ( + val incompatibleEvictions: Seq[(EvictionPair, String)], + val assumedIncompatibleEvictions: Seq[(EvictionPair, String)], +) { + def run(): Unit = + if (incompatibleEvictions.nonEmpty) { + sys.error(toLines.mkString("\n")) + } + + def toLines: List[String] = toLines(incompatibleEvictions, false) + + def toAssumedLines: List[String] = toLines(assumedIncompatibleEvictions, true) + + def toLines(evictions: Seq[(EvictionPair, String)], assumed: Boolean): List[String] = { + val out: mutable.ListBuffer[String] = mutable.ListBuffer() + out += "found version conflict(s) in library dependencies; some are suspected to be binary incompatible:" + out += "" + evictions.foreach({ case (a, scheme) => + val seen: mutable.Set[ModuleID] = mutable.Set() + val callers: List[String] = (a.evicteds.toList ::: a.winner.toList) flatMap { r => + val rev = r.module.revision + r.callers.toList flatMap { caller => + if (seen(caller.caller)) Nil + else { + seen += caller.caller + List(f"\t +- ${caller}%-50s (depends on $rev)") + } + } + } + val que = if (assumed) "?" else "" + val winnerRev = a.winner match { + case Some(r) => s":${r.module.revision} ($scheme$que) is selected over ${a.evictedRevs}" + case _ => " is evicted for all versions" + } + val title = s"\t* ${a.organization}:${a.name}$winnerRev" + val lines = title :: (if (a.showCallers) callers.reverse else Nil) ::: List("") + out ++= lines + }) + out.toList + } +} diff --git a/lm-core/src/main/scala/sbt/librarymanagement/EvictionWarning.scala b/lm-core/src/main/scala/sbt/librarymanagement/EvictionWarning.scala new file mode 100644 index 000000000..b3ac16fb0 --- /dev/null +++ b/lm-core/src/main/scala/sbt/librarymanagement/EvictionWarning.scala @@ -0,0 +1,441 @@ +package sbt.librarymanagement + +import collection.mutable +import Configurations.Compile +import ScalaArtifacts.{ LibraryID, CompilerID } +import sbt.internal.librarymanagement.VersionSchemes +import sbt.util.Logger +import sbt.util.ShowLines + +final class EvictionWarningOptions private[sbt] ( + val configurations: Seq[ConfigRef], + val warnScalaVersionEviction: Boolean, + val warnDirectEvictions: Boolean, + val warnTransitiveEvictions: Boolean, + val warnEvictionSummary: Boolean, + val infoAllEvictions: Boolean, + val showCallers: Boolean, + val guessCompatible: Function1[(ModuleID, Option[ModuleID], Option[ScalaModuleInfo]), Boolean] +) { + def withConfigurations(configurations: Seq[ConfigRef]): EvictionWarningOptions = + copy(configurations = configurations) + def withWarnScalaVersionEviction(warnScalaVersionEviction: Boolean): EvictionWarningOptions = + copy(warnScalaVersionEviction = warnScalaVersionEviction) + def withWarnDirectEvictions(warnDirectEvictions: Boolean): EvictionWarningOptions = + copy(warnDirectEvictions = warnDirectEvictions) + def withWarnTransitiveEvictions(warnTransitiveEvictions: Boolean): EvictionWarningOptions = + copy(warnTransitiveEvictions = warnTransitiveEvictions) + def withWarnEvictionSummary(warnEvictionSummary: Boolean): EvictionWarningOptions = + copy(warnEvictionSummary = warnEvictionSummary) + def withInfoAllEvictions(infoAllEvictions: Boolean): EvictionWarningOptions = + copy(infoAllEvictions = infoAllEvictions) + def withShowCallers(showCallers: Boolean): EvictionWarningOptions = + copy(showCallers = showCallers) + def withGuessCompatible( + guessCompatible: Function1[(ModuleID, Option[ModuleID], Option[ScalaModuleInfo]), Boolean] + ): EvictionWarningOptions = + copy(guessCompatible = guessCompatible) + + private[sbt] def copy( + configurations: Seq[ConfigRef] = configurations, + warnScalaVersionEviction: Boolean = warnScalaVersionEviction, + warnDirectEvictions: Boolean = warnDirectEvictions, + warnTransitiveEvictions: Boolean = warnTransitiveEvictions, + warnEvictionSummary: Boolean = warnEvictionSummary, + infoAllEvictions: Boolean = infoAllEvictions, + showCallers: Boolean = showCallers, + guessCompatible: Function1[(ModuleID, Option[ModuleID], Option[ScalaModuleInfo]), Boolean] = + guessCompatible + ): EvictionWarningOptions = + new EvictionWarningOptions( + configurations = configurations, + warnScalaVersionEviction = warnScalaVersionEviction, + warnDirectEvictions = warnDirectEvictions, + warnTransitiveEvictions = warnTransitiveEvictions, + warnEvictionSummary = warnEvictionSummary, + infoAllEvictions = infoAllEvictions, + showCallers = showCallers, + guessCompatible = guessCompatible + ) +} + +object EvictionWarningOptions { + def empty: EvictionWarningOptions = + new EvictionWarningOptions( + Vector(), + warnScalaVersionEviction = false, + warnDirectEvictions = false, + warnTransitiveEvictions = false, + warnEvictionSummary = false, + infoAllEvictions = false, + showCallers = false, + defaultGuess + ) + def default: EvictionWarningOptions = summary + def full: EvictionWarningOptions = + new EvictionWarningOptions( + Vector(Compile), + warnScalaVersionEviction = true, + warnDirectEvictions = true, + warnTransitiveEvictions = true, + warnEvictionSummary = false, + infoAllEvictions = true, + showCallers = true, + defaultGuess + ) + def summary: EvictionWarningOptions = + new EvictionWarningOptions( + Vector(Compile), + warnScalaVersionEviction = false, + warnDirectEvictions = false, + warnTransitiveEvictions = false, + warnEvictionSummary = true, + infoAllEvictions = false, + showCallers = false, + defaultGuess + ) + + lazy val defaultGuess: Function1[(ModuleID, Option[ModuleID], Option[ScalaModuleInfo]), Boolean] = + guessSbtOne orElse guessSecondSegment orElse guessSemVer orElse guessFalse + + private[sbt] def isNameScalaSuffixed(name: String): Boolean = + name.contains("_2.") || name.contains("_3") || name.contains("_4") + + /** A partial function that checks if given m2 is suffixed, and use pvp to evaluate. */ + lazy val guessSecondSegment + : PartialFunction[(ModuleID, Option[ModuleID], Option[ScalaModuleInfo]), Boolean] = { + case (m1, Some(m2), Some(_)) if isNameScalaSuffixed(m2.name) => + (m1.revision, m2.revision) match { + case (VersionNumber(ns1, ts1, es1), VersionNumber(ns2, ts2, es2)) => + VersionNumber.SecondSegment + .isCompatible(VersionNumber(ns1, ts1, es1), VersionNumber(ns2, ts2, es2)) + case _ => false + } + } + + /** A partial function that checks two versions match pvp. */ + private[sbt] lazy val evalPvp + : PartialFunction[(ModuleID, Option[ModuleID], Option[ScalaModuleInfo]), Boolean] = { + case (m1, Some(m2), _) => + (m1.revision, m2.revision) match { + case (VersionNumber(ns1, ts1, es1), VersionNumber(ns2, ts2, es2)) => + VersionNumber.SecondSegment + .isCompatible(VersionNumber(ns1, ts1, es1), VersionNumber(ns2, ts2, es2)) + case _ => false + } + } + + lazy val guessSbtOne + : PartialFunction[(ModuleID, Option[ModuleID], Option[ScalaModuleInfo]), Boolean] = { + case (m1, Some(m2), Some(scalaModuleInfo)) + if (m2.organization == "org.scala-sbt") && + (m2.name.endsWith("_" + scalaModuleInfo.scalaFullVersion) || + m2.name.endsWith("_" + scalaModuleInfo.scalaBinaryVersion)) => + (m1.revision, m2.revision) match { + case (VersionNumber(ns1, ts1, es1), VersionNumber(ns2, ts2, es2)) => + VersionNumber.SemVer + .isCompatible(VersionNumber(ns1, ts1, es1), VersionNumber(ns2, ts2, es2)) + case _ => false + } + } + + lazy val guessSemVer + : PartialFunction[(ModuleID, Option[ModuleID], Option[ScalaModuleInfo]), Boolean] = { + case (m1, Some(m2), _) => + (m1.revision, m2.revision) match { + case (VersionNumber(ns1, ts1, es1), VersionNumber(ns2, ts2, es2)) => + VersionNumber.SemVer + .isCompatible(VersionNumber(ns1, ts1, es1), VersionNumber(ns2, ts2, es2)) + case _ => false + } + } + + lazy val guessEarlySemVer + : PartialFunction[(ModuleID, Option[ModuleID], Option[ScalaModuleInfo]), Boolean] = { + case (m1, Some(m2), _) => + (m1.revision, m2.revision) match { + case (VersionNumber(ns1, ts1, es1), VersionNumber(ns2, ts2, es2)) => + VersionNumber.EarlySemVer + .isCompatible(VersionNumber(ns1, ts1, es1), VersionNumber(ns2, ts2, es2)) + case _ => false + } + } + + lazy val guessStrict + : PartialFunction[(ModuleID, Option[ModuleID], Option[ScalaModuleInfo]), Boolean] = { + case (m1, Some(m2), _) => + (m1.revision, m2.revision) match { + case (VersionNumber(ns1, ts1, es1), VersionNumber(ns2, ts2, es2)) => + VersionNumber.Strict + .isCompatible(VersionNumber(ns1, ts1, es1), VersionNumber(ns2, ts2, es2)) + case _ => false + } + } + + lazy val guessFalse + : PartialFunction[(ModuleID, Option[ModuleID], Option[ScalaModuleInfo]), Boolean] = { + case (_, _, _) => false + } + + lazy val guessTrue + : PartialFunction[(ModuleID, Option[ModuleID], Option[ScalaModuleInfo]), Boolean] = { + case (_, _, _) => true + } +} + +final class EvictionPair private[sbt] ( + val organization: String, + val name: String, + val winner: Option[ModuleReport], + val evicteds: Vector[ModuleReport], + val includesDirect: Boolean, + val showCallers: Boolean +) { + val evictedRevs: String = { + val revs = evicteds map { _.module.revision } + if (revs.size <= 1) revs.mkString else revs.distinct.mkString("{", ", ", "}") + } + + override def toString: String = + EvictionPair.evictionPairLines.showLines(this).mkString + override def equals(o: Any): Boolean = o match { + case o: EvictionPair => + (this.organization == o.organization) && + (this.name == o.name) + case _ => false + } + override def hashCode: Int = { + var hash = 1 + hash = hash * 31 + this.organization.## + hash = hash * 31 + this.name.## + hash + } +} + +object EvictionPair { + implicit val evictionPairLines: ShowLines[EvictionPair] = ShowLines { (a: EvictionPair) => + val seen: mutable.Set[ModuleID] = mutable.Set() + val callers: List[String] = (a.evicteds.toList ::: a.winner.toList) flatMap { r => + val rev = r.module.revision + r.callers.toList flatMap { caller => + if (seen(caller.caller)) Nil + else { + seen += caller.caller + List(f"\t +- ${caller}%-50s (depends on $rev)") + } + } + } + val winnerRev = a.winner match { + case Some(r) => s":${r.module.revision} is selected over ${a.evictedRevs}" + case _ => " is evicted for all versions" + } + val title = s"\t* ${a.organization}:${a.name}$winnerRev" + title :: (if (a.showCallers) callers.reverse else Nil) ::: List("") + } +} + +final class EvictionWarning private[sbt] ( + val options: EvictionWarningOptions, + val scalaEvictions: Seq[EvictionPair], + val directEvictions: Seq[EvictionPair], + val transitiveEvictions: Seq[EvictionPair], + val allEvictions: Seq[EvictionPair], + val binaryIncompatibleEvictionExists: Boolean +) { + private[sbt] def this( + options: EvictionWarningOptions, + scalaEvictions: Seq[EvictionPair], + directEvictions: Seq[EvictionPair], + transitiveEvictions: Seq[EvictionPair], + allEvictions: Seq[EvictionPair] + ) = this(options, scalaEvictions, directEvictions, transitiveEvictions, allEvictions, false) + def reportedEvictions: Seq[EvictionPair] = + scalaEvictions ++ directEvictions ++ transitiveEvictions + private[sbt] def infoAllTheThings: List[String] = EvictionWarning.infoAllTheThings(this) +} + +object EvictionWarning { + @deprecated("Use variant that doesn't take an unused logger", "1.2.0") + def apply( + module: ModuleDescriptor, + options: EvictionWarningOptions, + report: UpdateReport, + log: Logger + ): EvictionWarning = apply(module, options, report) + + def apply( + module: ModuleDescriptor, + options: EvictionWarningOptions, + report: UpdateReport + ): EvictionWarning = { + val evictions = buildEvictions(options, report) + processEvictions(module, options, evictions) + } + + private[sbt] def buildEvictions( + options: EvictionWarningOptions, + report: UpdateReport + ): Seq[OrganizationArtifactReport] = { + val buffer: mutable.ListBuffer[OrganizationArtifactReport] = mutable.ListBuffer() + val confs = report.configurations filter { x => + options.configurations.contains[ConfigRef](x.configuration) + } + confs flatMap { confReport => + confReport.details map { detail => + if ( + (detail.modules exists { _.evicted }) && + !(buffer exists { x => + (x.organization == detail.organization) && (x.name == detail.name) + }) + ) { + buffer += detail + } + } + } + buffer.toList.toVector + } + + private[sbt] def isScalaArtifact( + module: ModuleDescriptor, + organization: String, + name: String + ): Boolean = + module.scalaModuleInfo match { + case Some(s) => + organization == s.scalaOrganization && + (name == LibraryID) || (name == CompilerID) + case _ => false + } + + private[sbt] def processEvictions( + module: ModuleDescriptor, + options: EvictionWarningOptions, + reports: Seq[OrganizationArtifactReport] + ): EvictionWarning = { + val directDependencies = module.directDependencies + val pairs = reports map { detail => + val evicteds = detail.modules filter { _.evicted } + val winner = (detail.modules filterNot { _.evicted }).headOption + val includesDirect: Boolean = + options.warnDirectEvictions && + (directDependencies exists { dep => + (detail.organization == dep.organization) && (detail.name == dep.name) + }) + new EvictionPair( + detail.organization, + detail.name, + winner, + evicteds, + includesDirect, + options.showCallers + ) + } + val scalaEvictions: mutable.ListBuffer[EvictionPair] = mutable.ListBuffer() + val directEvictions: mutable.ListBuffer[EvictionPair] = mutable.ListBuffer() + val transitiveEvictions: mutable.ListBuffer[EvictionPair] = mutable.ListBuffer() + var binaryIncompatibleEvictionExists = false + def guessCompatible(p: EvictionPair): Boolean = + p.evicteds forall { r => + val winnerOpt = p.winner map { _.module } + val extraAttributes = ((p.winner match { + case Some(r) => r.extraAttributes + case _ => Map.empty + }): collection.immutable.Map[String, String]) ++ (winnerOpt match { + case Some(w) => w.extraAttributes + case _ => Map.empty + }) + val schemeOpt = VersionSchemes.extractFromExtraAttributes(extraAttributes) + val f = (winnerOpt, schemeOpt) match { + case (Some(_), Some(VersionSchemes.Always)) => + EvictionWarningOptions.guessTrue + case (Some(_), Some(VersionSchemes.Strict)) => + EvictionWarningOptions.guessStrict + case (Some(_), Some(VersionSchemes.EarlySemVer)) => + EvictionWarningOptions.guessEarlySemVer + case (Some(_), Some(VersionSchemes.SemVerSpec)) => + EvictionWarningOptions.guessSemVer + case (Some(_), Some(VersionSchemes.PackVer)) => + EvictionWarningOptions.evalPvp + case _ => options.guessCompatible(_) + } + f((r.module, winnerOpt, module.scalaModuleInfo)) + } + pairs foreach { + case p if isScalaArtifact(module, p.organization, p.name) => + (module.scalaModuleInfo, p.winner) match { + case (Some(s), Some(winner)) if (s.scalaFullVersion != winner.module.revision) => + if (options.warnScalaVersionEviction) + scalaEvictions += p + if (options.warnEvictionSummary) + binaryIncompatibleEvictionExists = true + case _ => + } + case p if p.includesDirect => + if (!guessCompatible(p)) { + if (options.warnDirectEvictions) + directEvictions += p + if (options.warnEvictionSummary) + binaryIncompatibleEvictionExists = true + } + case p => + // don't report on a transitive eviction that does not have a winner + // https://github.com/sbt/sbt/issues/4946 + if (!guessCompatible(p) && p.winner.isDefined) { + if (options.warnTransitiveEvictions) + transitiveEvictions += p + if (options.warnEvictionSummary) + binaryIncompatibleEvictionExists = true + } + } + new EvictionWarning( + options, + scalaEvictions.toList, + directEvictions.toList, + transitiveEvictions.toList, + pairs, + binaryIncompatibleEvictionExists + ) + } + + given evictionWarningLines: ShowLines[EvictionWarning] = ShowLines { (a: EvictionWarning) => + import ShowLines._ + val out: mutable.ListBuffer[String] = mutable.ListBuffer() + if (a.options.warnEvictionSummary && a.binaryIncompatibleEvictionExists) { + out += "There may be incompatibilities among your library dependencies; run 'evicted' to see detailed eviction warnings." + } + + if (a.scalaEvictions.nonEmpty) { + out += "Scala version was updated by one of library dependencies:" + out ++= (a.scalaEvictions flatMap { _.lines }) + out += "To force scalaVersion, add the following:" + out += "\tscalaModuleInfo ~= (_.map(_.withOverrideScalaVersion(true)))" + } + + if (a.directEvictions.nonEmpty || a.transitiveEvictions.nonEmpty) { + out += "Found version conflict(s) in library dependencies; some are suspected to be binary incompatible:" + out += "" + out ++= (a.directEvictions flatMap { _.lines }) + out ++= (a.transitiveEvictions flatMap { _.lines }) + } + + out.toList + } + + private[sbt] def infoAllTheThings(a: EvictionWarning): List[String] = + if (a.options.infoAllEvictions) { + import ShowLines._ + val evo = a.options + val out: mutable.ListBuffer[String] = mutable.ListBuffer() + a.allEvictions foreach { ev => + if ((a.scalaEvictions.contains[EvictionPair](ev)) && evo.warnScalaVersionEviction) () + else if ((a.directEvictions.contains[EvictionPair](ev)) && evo.warnDirectEvictions) () + else if ((a.transitiveEvictions.contains[EvictionPair](ev)) && evo.warnTransitiveEvictions) + () + else { + out ++= ev.lines + } + } + if (out.isEmpty) Nil + else List("Here are other dependency conflicts that were resolved:", "") ::: out.toList + } else Nil +} diff --git a/lm-core/src/main/scala/sbt/librarymanagement/Extra.scala b/lm-core/src/main/scala/sbt/librarymanagement/Extra.scala new file mode 100644 index 000000000..421a40505 --- /dev/null +++ b/lm-core/src/main/scala/sbt/librarymanagement/Extra.scala @@ -0,0 +1,57 @@ +/* sbt -- Simple Build Tool + * Copyright 2008, 2009, 2010 Mark Harrah + */ +package sbt.librarymanagement + +import sbt.librarymanagement.DependencyBuilders.{ Organization, OrganizationArtifactName } + +private[librarymanagement] abstract class InclExclRuleFunctions { + def everything = InclExclRule("*", "*", "*", Vector.empty, Disabled()) + + def apply(organization: String, name: String): InclExclRule = + InclExclRule(organization, name, "*", Vector.empty, Disabled()) + + def apply(organization: String): InclExclRule = apply(organization, "*") + + implicit def organizationToExclusionRule(organization: Organization): InclExclRule = + apply(organization.organization) + implicit def stringToExclusionRule(organization: String): InclExclRule = apply(organization) + + implicit def organizationArtifactNameToExclusionRule(oa: OrganizationArtifactName): InclExclRule = + InclExclRule(oa.organization, oa.name, "*", Vector.empty, oa.crossVersion) + + implicit def moduleIDToExclusionRule(moduleID: ModuleID): InclExclRule = { + val org = moduleID.organization + val name = moduleID.name + val version = moduleID.revision + val crossVersion = moduleID.crossVersion + InclExclRule(org, name, version, Vector.empty, crossVersion) + } +} + +private[librarymanagement] abstract class ArtifactTypeFilterExtra { + def inverted: Boolean + + def withInverted(inverted: Boolean): ArtifactTypeFilter + + def invert = withInverted(!inverted) +} + +private[librarymanagement] abstract class ArtifactTypeFilterFunctions { + def allow(types: Set[String]) = ArtifactTypeFilter(types, false) + def forbid(types: Set[String]) = ArtifactTypeFilter(types, true) +} + +private[librarymanagement] abstract class ConflictManagerFunctions { + // To avoid NPE (or making the val's below lazy) + // For case classes refchecks rewrites apply calls to constructor calls, we have to do it manually + def apply(name: String, organization: String = "*", module: String = "*"): ConflictManager + def ConflictManager(name: String) = apply(name) + + val all = ConflictManager("all") + val latestTime = ConflictManager("latest-time") + val latestRevision = ConflictManager("latest-revision") + val latestCompatible = ConflictManager("latest-compatible") + val strict = ConflictManager("strict") + val default = latestRevision +} diff --git a/lm-core/src/main/scala/sbt/librarymanagement/Http.scala b/lm-core/src/main/scala/sbt/librarymanagement/Http.scala new file mode 100644 index 000000000..5edf9a746 --- /dev/null +++ b/lm-core/src/main/scala/sbt/librarymanagement/Http.scala @@ -0,0 +1,8 @@ +package sbt.librarymanagement + +import gigahorse._, support.apachehttp.Gigahorse +import scala.concurrent.duration.DurationInt + +object Http { + lazy val http: HttpClient = Gigahorse.http(gigahorse.Config().withReadTimeout(60.minutes)) +} diff --git a/lm-core/src/main/scala/sbt/librarymanagement/LibraryManagementInterface.scala b/lm-core/src/main/scala/sbt/librarymanagement/LibraryManagementInterface.scala new file mode 100644 index 000000000..cdca4a8dc --- /dev/null +++ b/lm-core/src/main/scala/sbt/librarymanagement/LibraryManagementInterface.scala @@ -0,0 +1,96 @@ +package sbt.librarymanagement + +import java.io.File +import sbt.util.Logger + +/** + * Interface for dependency resolution intended for engine authors. + */ +trait DependencyResolutionInterface { + + /** + * Builds a ModuleDescriptor that describes a subproject with dependencies. + * + * @param moduleSetting It contains the information about the module including the dependencies. + * @return A `ModuleDescriptor` describing a subproject and its dependencies. + */ + def moduleDescriptor(moduleSetting: ModuleDescriptorConfiguration): ModuleDescriptor + + /** + * Resolves the given module's dependencies performing a retrieval. + * + * @param module The module to be resolved. + * @param configuration The update configuration. + * @param uwconfig The configuration to handle unresolved warnings. + * @param log The logger. + * @return The result, either an unresolved warning or an update report. Note that this + * update report will or will not be successful depending on the `missingOk` option. + */ + def update( + module: ModuleDescriptor, + configuration: UpdateConfiguration, + uwconfig: UnresolvedWarningConfiguration, + log: Logger + ): Either[UnresolvedWarning, UpdateReport] +} + +/** + * Interface for publishing modules. + */ +trait PublisherInterface { + + /** + * Builds a ModuleDescriptor that describes a subproject with dependencies. + * + * @param moduleSetting It contains the information about the module including the dependencies. + * @return A `ModuleDescriptor` describing a subproject and its dependencies. + */ + def moduleDescriptor(moduleSetting: ModuleDescriptorConfiguration): ModuleDescriptor + + /** + * Publishes the given module. + * + * @param module The module to be published. + * @param configuration The publish configuration. + * @param log The logger. + */ + def publish(module: ModuleDescriptor, configuration: PublishConfiguration, log: Logger): Unit + + /** + * Makes the `pom.xml` file for the given module. + * + * @param module The module for which a `.pom` file is to be created. + * @param configuration The makePomFile configuration. + * @param log The logger. + * @return The `File` containing the POM descriptor. + */ + def makePomFile(module: ModuleDescriptor, configuration: MakePomConfiguration, log: Logger): File +} + +/** + * Decribes the representation of a module, including its dependencies + * and the version of Scala it uses, if any. + */ +trait ModuleDescriptor { + + /** + * The direct dependencies of this module. + */ + def directDependencies: Vector[ModuleID] + + /** + * The information and module about the scala version that this module requires, + * if any. + */ + def scalaModuleInfo: Option[ScalaModuleInfo] + + /** + * The input parameters used to construct the `ModuleSettings`. + */ + def moduleSettings: ModuleSettings + + /** + * Hash for extra parameter that were not captured as `moduleSettings`. + */ + def extraInputHash: Long +} diff --git a/lm-core/src/main/scala/sbt/librarymanagement/LibraryManagementSyntax.scala b/lm-core/src/main/scala/sbt/librarymanagement/LibraryManagementSyntax.scala new file mode 100644 index 000000000..1ab5629d7 --- /dev/null +++ b/lm-core/src/main/scala/sbt/librarymanagement/LibraryManagementSyntax.scala @@ -0,0 +1,40 @@ +package sbt.librarymanagement + +trait LibraryManagementSyntax0 { + // See http://www.scala-lang.org/news/2.12.0#traits-compile-to-interfaces + // Avoid defining fields (val or var, but a constant is ok – final val without result type) + // Avoid calling super + // Avoid initializer statements in the body + + implicit def richUpdateReport(ur: UpdateReport): RichUpdateReport = new RichUpdateReport(ur) +} + +trait LibraryManagementSyntax + extends LibraryManagementSyntax0 + with DependencyBuilders + with DependencyFilterExtra { + // See http://www.scala-lang.org/news/2.12.0#traits-compile-to-interfaces + // Avoid defining fields (val or var, but a constant is ok – final val without result type) + // Avoid calling super + // Avoid initializer statements in the body + + type ExclusionRule = InclExclRule + final val ExclusionRule = InclExclRule + + type InclusionRule = InclExclRule + final val InclusionRule = InclExclRule + + import sbt.librarymanagement.{ Configurations => C } + final val Compile = C.Compile + final val Test = C.Test + final val Runtime = C.Runtime + @deprecated("Create a separate subproject for testing instead", "1.9.0") + final val IntegrationTest = C.IntegrationTest + final val Default = C.Default + final val Provided = C.Provided + // java.lang.System is more important, so don't alias this one + // final val System = C.System + final val Optional = C.Optional +} + +object syntax extends LibraryManagementSyntax diff --git a/lm-core/src/main/scala/sbt/librarymanagement/License.scala b/lm-core/src/main/scala/sbt/librarymanagement/License.scala new file mode 100644 index 000000000..a8ac31088 --- /dev/null +++ b/lm-core/src/main/scala/sbt/librarymanagement/License.scala @@ -0,0 +1,25 @@ +package sbt.librarymanagement + +import java.net.URL +import java.net.URI + +/** + * Commonly used software licenses + * Names are SPDX ids: + * https://raw.githubusercontent.com/spdx/license-list-data/master/json/licenses.json + */ +object License { + lazy val Apache2: (String, URL) = + ("Apache-2.0", new URI("https://www.apache.org/licenses/LICENSE-2.0.txt").toURL) + + lazy val MIT: (String, URL) = + ("MIT", new URI("https://opensource.org/licenses/MIT").toURL) + + lazy val CC0: (String, URL) = + ("CC0-1.0", new URI("https://creativecommons.org/publicdomain/zero/1.0/legalcode").toURL) + + def PublicDomain: (String, URL) = CC0 + + lazy val GPL3_or_later: (String, URL) = + ("GPL-3.0-or-later", new URI("https://spdx.org/licenses/GPL-3.0-or-later.html").toURL) +} diff --git a/lm-core/src/main/scala/sbt/librarymanagement/LogicalClock.scala b/lm-core/src/main/scala/sbt/librarymanagement/LogicalClock.scala new file mode 100644 index 000000000..f55a61b82 --- /dev/null +++ b/lm-core/src/main/scala/sbt/librarymanagement/LogicalClock.scala @@ -0,0 +1,21 @@ +package sbt.librarymanagement + +/** + * Represents a logical time point for dependency resolution. + * This is used to cache dependencies across subproject resolution which may change over time. + */ +trait LogicalClock { + def toString: String +} + +object LogicalClock { + def apply(hashCode: Int): LogicalClock = { + def intToByteArray(x: Int): Array[Byte] = + Array((x >>> 24).toByte, (x >> 16 & 0xff).toByte, (x >> 8 & 0xff).toByte, (x & 0xff).toByte) + apply(sbt.io.Hash.toHex(intToByteArray(hashCode))) + } + def apply(x: String): LogicalClock = new LogicalClock { + override def toString: String = x + } + def unknown: LogicalClock = apply("unknown") +} diff --git a/lm-core/src/main/scala/sbt/librarymanagement/MakePomConfigurationExtra.scala b/lm-core/src/main/scala/sbt/librarymanagement/MakePomConfigurationExtra.scala new file mode 100644 index 000000000..1b5edbb9c --- /dev/null +++ b/lm-core/src/main/scala/sbt/librarymanagement/MakePomConfigurationExtra.scala @@ -0,0 +1,17 @@ +package sbt.librarymanagement + +private[librarymanagement] abstract class MakePomConfigurationFunctions { + private[sbt] lazy val constTrue: MavenRepository => Boolean = _ => true + + def apply(): MakePomConfiguration = + MakePomConfiguration( + None, + None, + None, + None, + identity(_: scala.xml.Node), + constTrue, + true, + Set(Artifact.DefaultType, Artifact.PomType) + ) +} diff --git a/lm-core/src/main/scala/sbt/librarymanagement/ModuleIDExtra.scala b/lm-core/src/main/scala/sbt/librarymanagement/ModuleIDExtra.scala new file mode 100644 index 000000000..f8eb4fe6c --- /dev/null +++ b/lm-core/src/main/scala/sbt/librarymanagement/ModuleIDExtra.scala @@ -0,0 +1,219 @@ +/* sbt -- Simple Build Tool + * Copyright 2008, 2009, 2010 Mark Harrah + */ +package sbt.librarymanagement + +import java.net.URI + +import sbt.internal.librarymanagement.mavenint.SbtPomExtraProperties +import scala.collection.mutable.ListBuffer +import sbt.librarymanagement.syntax._ +import sbt.util.Logger + +private[librarymanagement] abstract class ModuleIDExtra { + def organization: String + def name: String + def revision: String + def configurations: Option[String] + def isChanging: Boolean + def isTransitive: Boolean + def isForce: Boolean + def explicitArtifacts: Vector[Artifact] + def inclusions: Vector[InclusionRule] + def exclusions: Vector[ExclusionRule] + def extraAttributes: Map[String, String] + def crossVersion: CrossVersion + def branchName: Option[String] + + def withIsChanging(isChanging: Boolean): ModuleID + def withIsTransitive(isTransitive: Boolean): ModuleID + def withIsForce(isForce: Boolean): ModuleID + def withExplicitArtifacts(explicitArtifacts: Vector[Artifact]): ModuleID + def withExclusions(exclusions: Vector[InclExclRule]): ModuleID + def withExtraAttributes(extraAttributes: Map[String, String]): ModuleID + def withCrossVersion(crossVersion: CrossVersion): ModuleID + def withBranchName(branchName: Option[String]): ModuleID + def withPlatformOpt(platformOpt: Option[String]): ModuleID + + protected def toStringImpl: String = + s"""$organization:$name:$revision""" + + (configurations match { case Some(s) => ":" + s; case None => "" }) + { + val attr = attributeString + if (attr == "") "" + else " " + attr + } + + (if (extraAttributes.isEmpty) "" else " " + extraString) + + protected def attributeString: String = { + val buffer = ListBuffer.empty[String] + if (isChanging) { + buffer += "changing" + } + if (!isTransitive) { + buffer += "intransitive" + } + if (isForce) { + buffer += "force" + } + buffer.toList.mkString(";") + } + + /** String representation of the extra attributes, excluding any information only attributes. */ + def extraString: String = + extraDependencyAttributes.map { case (k, v) => k + "=" + v }.mkString("(", ", ", ")") + + /** Returns the extra attributes except for ones marked as information only (ones that typically would not be used for dependency resolution). */ + def extraDependencyAttributes: Map[String, String] = + extraAttributes.view.filterKeys(!_.startsWith(SbtPomExtraProperties.POM_INFO_KEY_PREFIX)).toMap + + @deprecated( + "Use `cross(CrossVersion)`, the variant accepting a CrossVersion value constructed by a member of the CrossVersion object instead.", + "0.12.0" + ) + def cross(v: Boolean): ModuleID = cross(if (v) CrossVersion.binary else Disabled()) + + /** + * Specifies the cross-version behavior for this module. See [CrossVersion] for details. + * Unlike `withCrossVersion(...)`, `cross(...)` will preserve the prefix and suffix + * values from the existing `crossVersion` value. + * + * {{{ + * ModuleID("com.example", "foo", "1.0") + * .cross(CrossVersion.binaryWith("sjs1_", "")) + * .cross(CrossVersion.for3Use2_13) + * }}} + * + * This allows `.cross(...)` to play well with `%%%` operator provided by sbt-platform-deps. + */ + def cross(v: CrossVersion): ModuleID = + withCrossVersion(CrossVersion.getPrefixSuffix(this.crossVersion) match { + case ("", "") => v + case (prefix, suffix) => + CrossVersion.getPrefixSuffix(v) match { + case ("", "") => CrossVersion.setPrefixSuffix(v, prefix, suffix) + case _ => v + } + }) + + // () required for chaining + /** Do not follow dependencies of this module. Synonym for `intransitive`. */ + def notTransitive(): ModuleID = intransitive() + + /** Do not follow dependencies of this module. Synonym for `notTransitive`. */ + def intransitive(): ModuleID = withIsTransitive(false) + + /** + * Marks this dependency as "changing". Ivy will always check if the metadata has changed and then if the artifact has changed, + * redownload it. sbt configures all -SNAPSHOT dependencies to be changing. + * + * See the "Changes in artifacts" section of https://ant.apache.org/ivy/history/trunk/concept.html for full details. + */ + def changing(): ModuleID = withIsChanging(true) + + /** + * Indicates that conflict resolution should only select this module's revision. + * This prevents a newer revision from being pulled in by a transitive dependency, for example. + */ + def force(): ModuleID = withIsForce(true) + + private[sbt] def validateProtocol(logger: Logger): Unit = { + explicitArtifacts foreach { _.validateProtocol(logger) } + } + + /** + * Specifies a URL from which the main artifact for this dependency can be downloaded. + * This value is only consulted if the module is not found in a repository. + * It is not included in published metadata. + */ + def from(url: String): ModuleID = from(url, false) + + /** + * Specifies a URL from which the main artifact for this dependency can be downloaded. + * This value is only consulted if the module is not found in a repository. + * It is not included in published metadata. + */ + def from(url: String, allowInsecureProtocol: Boolean): ModuleID = + artifacts(Artifact(name, new URI(url), allowInsecureProtocol)) + + /** Adds a dependency on the artifact for this module with classifier `c`. */ + def classifier(c: String): ModuleID = artifacts(Artifact(name, c)) + + /** + * Declares the explicit artifacts for this module. If this ModuleID represents a dependency, + * these artifact definitions override the information in the dependency's published metadata. + */ + def artifacts(newArtifacts: Artifact*): ModuleID = + withExplicitArtifacts(newArtifacts.toVector ++ explicitArtifacts) + + /** + * Applies the provided exclusions to dependencies of this module. Note that only exclusions that specify + * both the exact organization and name and nothing else will be included in a pom.xml. + */ + def excludeAll(rules: ExclusionRule*): ModuleID = withExclusions(exclusions ++ rules) + + /** Excludes the dependency with organization `org` and `name` from being introduced by this dependency during resolution. */ + def exclude(org: String, name: String): ModuleID = + excludeAll(ExclusionRule().withOrganization(org).withName(name)) + + /** + * Adds extra attributes for this module. All keys are prefixed with `e:` if they are not already so prefixed. + * This information will only be published in an ivy.xml and not in a pom.xml. + */ + def extra(attributes: (String, String)*): ModuleID = + withExtraAttributes(extraAttributes ++ ModuleID.checkE(attributes)) + + /** + * Not recommended for new use. This method is not deprecated, but the `update-classifiers` task is preferred + * for performance and correctness. This method adds a dependency on this module's artifact with the "sources" + * classifier. If you want to also depend on the main artifact, be sure to also call `jar()` or use `withSources()` instead. + */ + def sources(): ModuleID = artifacts(Artifact.sources(name)) + + /** + * Not recommended for new use. This method is not deprecated, but the `update-classifiers` task is preferred + * for performance and correctness. This method adds a dependency on this module's artifact with the "javadoc" + * classifier. If you want to also depend on the main artifact, be sure to also call `jar()` or use `withJavadoc()` instead. + */ + def javadoc(): ModuleID = artifacts(Artifact.javadoc(name)) + + def pomOnly(): ModuleID = artifacts(Artifact.pom(name)) + + /** + * Not recommended for new use. This method is not deprecated, but the `update-classifiers` task is preferred + * for performance and correctness. This method adds a dependency on this module's artifact with the "sources" + * classifier. If there is not already an explicit dependency on the main artifact, this adds one. + */ + def withSources(): ModuleID = jarIfEmpty.sources() + + /** + * Not recommended for new use. This method is not deprecated, but the `update-classifiers` task is preferred + * for performance and correctness. This method adds a dependency on this module's artifact with the "javadoc" + * classifier. If there is not already an explicit dependency on the main artifact, this adds one. + */ + def withJavadoc(): ModuleID = jarIfEmpty.javadoc() + + private def jarIfEmpty = if (explicitArtifacts.isEmpty) jar() else this + + /** + * Declares a dependency on the main artifact. This is implied by default unless artifacts are explicitly declared, such + * as when adding a dependency on an artifact with a classifier. + */ + def jar(): ModuleID = artifacts(Artifact(name)) + + /** + * Sets the Ivy branch of this module. + */ + def branch(branchName: String): ModuleID = withBranchName(Some(branchName)) + + def branch(branchName: Option[String]): ModuleID = withBranchName(branchName) + + def platform(platform: String): ModuleID = withPlatformOpt(Some(platform)) +} + +private[librarymanagement] abstract class ModuleIDFunctions { + + /** Prefixes all keys with `e:` if they are not already so prefixed. */ + def checkE(attributes: Seq[(String, String)]) = + for ((key, value) <- attributes) + yield if (key.startsWith("e:")) (key, value) else ("e:" + key, value) +} diff --git a/lm-core/src/main/scala/sbt/librarymanagement/Platform.scala b/lm-core/src/main/scala/sbt/librarymanagement/Platform.scala new file mode 100644 index 000000000..3a9555686 --- /dev/null +++ b/lm-core/src/main/scala/sbt/librarymanagement/Platform.scala @@ -0,0 +1,7 @@ +package sbt.librarymanagement + +object Platform: + val sjs1: String = "sjs1" + val jvm: String = "jvm" + val native0_4: String = "native0.4" +end Platform diff --git a/lm-core/src/main/scala/sbt/librarymanagement/Publisher.scala b/lm-core/src/main/scala/sbt/librarymanagement/Publisher.scala new file mode 100644 index 000000000..c071240f9 --- /dev/null +++ b/lm-core/src/main/scala/sbt/librarymanagement/Publisher.scala @@ -0,0 +1,49 @@ +package sbt.librarymanagement + +import java.io.File +import sbt.util.Logger + +/** + * Library management API to publish artifacts. + */ +class Publisher private[sbt] (publisherEngine: PublisherInterface) { + + /** + * Builds a ModuleDescriptor that describes a subproject with dependencies. + * + * @param moduleSetting It contains the information about the module including the dependencies. + * @return A `ModuleDescriptor` describing a subproject and its dependencies. + */ + def moduleDescriptor(moduleSetting: ModuleDescriptorConfiguration): ModuleDescriptor = + publisherEngine.moduleDescriptor(moduleSetting) + + /** + * Publishes the given module. + * + * @param module The module to be published. + * @param configuration The publish configuration. + * @param log The logger. + */ + def publish(module: ModuleDescriptor, configuration: PublishConfiguration, log: Logger): Unit = + publisherEngine.publish(module, configuration, log) + + /** + * Makes the `pom.xml` file for the given module. + * + * @param module The module for which a `.pom` file is to be created. + * @param configuration The makePomFile configuration. + * @param log The logger. + * @return The `File` containing the POM descriptor. + */ + def makePomFile( + module: ModuleDescriptor, + configuration: MakePomConfiguration, + log: Logger + ): File = + publisherEngine.makePomFile(module, configuration, log) +} + +object Publisher { + def apply(publisherEngine: PublisherInterface): Publisher = + new Publisher(publisherEngine) +} diff --git a/lm-core/src/main/scala/sbt/librarymanagement/ResolverExtra.scala b/lm-core/src/main/scala/sbt/librarymanagement/ResolverExtra.scala new file mode 100644 index 000000000..58833429f --- /dev/null +++ b/lm-core/src/main/scala/sbt/librarymanagement/ResolverExtra.scala @@ -0,0 +1,530 @@ +/* sbt -- Simple Build Tool + * Copyright 2008, 2009, 2010 Mark Harrah + */ +package sbt.librarymanagement + +import java.io.{ File, IOException } +import java.net.{ URI, URL } +import scala.annotation.nowarn +import scala.xml.XML +import org.xml.sax.SAXParseException +import sbt.util.Logger + +import scala.util.matching.Regex + +final class RawRepository(val resolver: AnyRef, name: String) extends Resolver(name) { + override def toString = "Raw(" + resolver.toString + ")" + + override def equals(o: Any): Boolean = o match { + case o: RawRepository => + this.name == o.name + case _ => false + } + + override def hashCode: Int = { + var hash = 1 + hash = hash * 31 + this.name.## + hash + } +} + +private[librarymanagement] abstract class MavenRepositoryFunctions { + def apply(name: String, root: String, localIfFile: Boolean = true): MavenRepository = + MavenRepo(name, root, localIfFile) +} + +private[librarymanagement] abstract class PatternsFunctions { + implicit def defaultPatterns: Patterns = Resolver.defaultPatterns + + def apply(artifactPatterns: String*): Patterns = Patterns(true, artifactPatterns: _*) + def apply(isMavenCompatible: Boolean, artifactPatterns: String*): Patterns = { + val patterns = artifactPatterns.toVector + Patterns() + .withIvyPatterns(patterns) + .withArtifactPatterns(patterns) + .withIsMavenCompatible(isMavenCompatible) + } +} + +private[librarymanagement] trait SshBasedRepositoryExtra { + + /** The object representing the configured ssh connection for this repository. */ + def connection: SshConnection + + type RepositoryType <: SshBasedRepository + protected def copy(connection: SshConnection): RepositoryType + private def copy(authentication: SshAuthentication): RepositoryType = + copy(connection withAuthentication authentication) + + /** Configures this to use the specified user name and password when connecting to the remote repository. */ + def as(user: String, password: String): RepositoryType = as(user, Some(password)) + def as(user: String): RepositoryType = as(user, None) + def as(user: String, password: Option[String]): RepositoryType = + copy(PasswordAuthentication(user, password)) + + /** Configures this to use the specified keyfile and password for the keyfile when connecting to the remote repository. */ + def as(user: String, keyfile: File): RepositoryType = as(user, keyfile, None) + def as(user: String, keyfile: File, password: String): RepositoryType = + as(user, keyfile, Some(password)) + + def as(user: String, keyfile: File, password: Option[String]): RepositoryType = + copy(KeyFileAuthentication(user, keyfile, password)) +} + +private[librarymanagement] trait SshRepositoryExtra extends SshBasedRepositoryExtra { + def name: String + def patterns: sbt.librarymanagement.Patterns + def publishPermissions: Option[String] + + type RepositoryType = SshRepository + + protected def copy(connection: SshConnection): SshRepository = + SshRepository(name, connection, patterns, publishPermissions) +} + +private[librarymanagement] trait SftpRepositoryExtra extends SshBasedRepositoryExtra { + def name: String + def patterns: sbt.librarymanagement.Patterns + + type RepositoryType = SftpRepository + + protected def copy(connection: SshConnection): SftpRepository = + SftpRepository(name, connection, patterns) +} + +/** A repository that conforms to sbt launcher's interface */ +private[sbt] class FakeRepository(resolver: AnyRef, name: String) extends xsbti.Repository { + def rawRepository = new RawRepository(resolver, name) +} + +private[librarymanagement] abstract class ResolverFunctions { + import sbt.internal.librarymanagement.LMSysProp.useSecureResolvers + + val TypesafeRepositoryRoot = typesafeRepositoryRoot(useSecureResolvers) + val SbtRepositoryRoot = sbtRepositoryRoot(useSecureResolvers) + @deprecated("Renamed to SbtRepositoryRoot.", "1.0.0") + val SbtPluginRepositoryRoot = SbtRepositoryRoot + val SonatypeRepositoryRoot = "https://oss.sonatype.org/content/repositories" + val SonatypeS01RepositoryRoot = "https://s01.oss.sonatype.org/content/repositories" + val SonatypeReleasesRepository = + "https://oss.sonatype.org/service/local/repositories/releases/content/" + val JavaNet2RepositoryName = "java.net Maven2 Repository" + val JavaNet2RepositoryRoot = javanet2RepositoryRoot(useSecureResolvers) + val JCenterRepositoryName = "jcenter" + val JCenterRepositoryRoot = "https://jcenter.bintray.com/" + val DefaultMavenRepositoryRoot = "https://repo1.maven.org/maven2/" + val DefaultMavenRepository = + MavenRepository("public", centralRepositoryRoot(useSecureResolvers)) + val JavaNet2Repository = MavenRepository(JavaNet2RepositoryName, JavaNet2RepositoryRoot) + val JCenterRepository = MavenRepository(JCenterRepositoryName, JCenterRepositoryRoot) + + def mavenCentral: Resolver = DefaultMavenRepository + def defaults: Vector[Resolver] = Vector(mavenCentral) + + // TODO: This switch is only kept for backward compatibility. Hardcode to HTTPS in the future. + private[sbt] def centralRepositoryRoot(secure: Boolean) = + (if (secure) "https" else "http") + "://repo1.maven.org/maven2/" + // TODO: This switch is only kept for backward compatibility. Hardcode to HTTPS in the future. + private[sbt] def javanet2RepositoryRoot(secure: Boolean) = + if (secure) "https://maven.java.net/content/repositories/public/" + else "http://download.java.net/maven/2" + // TODO: This switch is only kept for backward compatibility. Hardcode to HTTPS in the future. + private[sbt] def typesafeRepositoryRoot(secure: Boolean) = + (if (secure) "https" else "http") + "://repo.typesafe.com/typesafe" + // TODO: This switch is only kept for backward compatibility. Hardcode to HTTPS in the future. + private[sbt] def sbtRepositoryRoot(secure: Boolean) = + (if (secure) "https" else "http") + "://repo.scala-sbt.org/scalasbt" + + // obsolete: kept only for launcher compatibility + private[sbt] val ScalaToolsReleasesName = "Sonatype OSS Releases" + private[sbt] val ScalaToolsSnapshotsName = "Sonatype OSS Snapshots" + private[sbt] val ScalaToolsReleasesRoot = SonatypeReleasesRepository + private[sbt] val ScalaToolsSnapshotsRoot = SonatypeRepositoryRoot + "/snapshots" + private[sbt] val ScalaToolsReleases = + MavenRepository(ScalaToolsReleasesName, ScalaToolsReleasesRoot) + private[sbt] val ScalaToolsSnapshots = + MavenRepository(ScalaToolsSnapshotsName, ScalaToolsSnapshotsRoot) + + def typesafeRepo(status: String) = + MavenRepository("typesafe-" + status, TypesafeRepositoryRoot + "/" + status) + def typesafeIvyRepo(status: String) = + url("typesafe-ivy-" + status, new URI(TypesafeRepositoryRoot + "/ivy-" + status + "/").toURL)( + ivyStylePatterns + ) + def sbtIvyRepo(status: String) = + url(s"sbt-ivy-$status", new URI(s"$SbtRepositoryRoot/ivy-$status/").toURL)(ivyStylePatterns) + def sbtPluginRepo(status: String) = + url("sbt-plugin-" + status, new URI(SbtRepositoryRoot + "/sbt-plugin-" + status + "/").toURL)( + ivyStylePatterns + ) + @deprecated( + """Use sonatypeOssRepos instead e.g. `resolvers ++= Resolver.sonatypeOssRepos("snapshots")`""", + "1.7.0" + ) + def sonatypeRepo(status: String) = + MavenRepository( + "sonatype-" + status, + if (status == "releases") SonatypeReleasesRepository + else SonatypeRepositoryRoot + "/" + status + ) + private def sonatypeS01Repo(status: String) = + MavenRepository( + "sonatype-s01-" + status, + SonatypeS01RepositoryRoot + "/" + status + ) + def sonatypeOssRepos(status: String) = + Vector(sonatypeRepo(status): @nowarn("cat=deprecation"), sonatypeS01Repo(status)) + def bintrayRepo(owner: String, repo: String) = + MavenRepository(s"bintray-$owner-$repo", s"https://dl.bintray.com/$owner/$repo/") + def bintrayIvyRepo(owner: String, repo: String) = + url(s"bintray-$owner-$repo", new URI(s"https://dl.bintray.com/$owner/$repo/").toURL)( + Resolver.ivyStylePatterns + ) + def jcenterRepo = JCenterRepository + + val ApacheMavenSnapshotsRepo = MavenRepository( + "apache-snapshots", + "https://repository.apache.org/content/repositories/snapshots/" + ) + + val ApacheMavenStagingRepo = MavenRepository( + "apache-staging", + "https://repository.apache.org/content/groups/staging/" + ) + + /** Add the local and Maven Central repositories to the user repositories. */ + def combineDefaultResolvers(userResolvers: Vector[Resolver]): Vector[Resolver] = + combineDefaultResolvers(userResolvers, mavenCentral = true) + + /** + * Add the local Ivy repository to the user repositories. + * If `mavenCentral` is true, add the Maven Central repository. + */ + def combineDefaultResolvers( + userResolvers: Vector[Resolver], + mavenCentral: Boolean + ): Vector[Resolver] = + combineDefaultResolvers(userResolvers, jcenter = false, mavenCentral) + + /** + * Add the local Ivy repository to the user repositories. + * If `jcenter` is true, add the JCenter. + * If `mavenCentral` is true, add the Maven Central repository. + */ + def combineDefaultResolvers( + userResolvers: Vector[Resolver], + jcenter: Boolean, + mavenCentral: Boolean + ): Vector[Resolver] = + Vector(Resolver.defaultLocal) ++ + userResolvers ++ + single(JCenterRepository, jcenter) ++ + single(DefaultMavenRepository, mavenCentral) + + /** + * Reorganize the built-in resolvers that is configured for this application by the sbt launcher. + * If `jcenter` is true, add the JCenter. + * If `mavenCentral` is true, add the Maven Central repository. + */ + private[sbt] def reorganizeAppResolvers( + appResolvers: Vector[Resolver], + jcenter: Boolean, + mavenCentral: Boolean + ): Vector[Resolver] = + appResolvers.partition(_ == Resolver.defaultLocal) match { + case (locals, xs) => + locals ++ + (xs.partition(_ == JCenterRepository) match { + case (_, xs) => + single(JCenterRepository, jcenter) ++ + (xs.partition(_ == DefaultMavenRepository) match { + case (_, xs) => + single( + DefaultMavenRepository, + mavenCentral + ) ++ xs // TODO - Do we need to filter out duplicates? + }) + }) + } + + private def single[T](value: T, nonEmpty: Boolean): Vector[T] = + if (nonEmpty) Vector(value) else Vector.empty + + /** A base class for defining factories for interfaces to Ivy repositories that require a hostname , port, and patterns. */ + sealed abstract class Define[RepositoryType <: SshBasedRepository] { + + /** Subclasses should implement this method to */ + protected def construct( + name: String, + connection: SshConnection, + patterns: Patterns + ): RepositoryType + + /** + * Constructs this repository type with the given `name`. `basePatterns` are the initial patterns to use. A ManagedProject + * has an implicit defining these initial patterns based on a setting for either Maven or Ivy style patterns. + */ + def apply(name: String)(implicit basePatterns: Patterns): RepositoryType = + apply(name, None, None, None) + + /** + * Constructs this repository type with the given `name` and `hostname`. `basePatterns` are the initial patterns to use. + * A ManagedProject has an implicit defining these initial patterns based on a setting for either Maven or Ivy style patterns. + */ + def apply(name: String, hostname: String)(implicit basePatterns: Patterns): RepositoryType = + apply(name, Some(hostname), None, None) + + /** + * Constructs this repository type with the given `name`, `hostname`, and the `basePath` against which the initial + * patterns will be resolved. `basePatterns` are the initial patterns to use. + * A ManagedProject has an implicit defining these initial patterns based on a setting for either Maven or Ivy style patterns. + */ + def apply(name: String, hostname: String, basePath: String)(implicit + basePatterns: Patterns + ): RepositoryType = + apply(name, Some(hostname), None, Some(basePath)) + + /** + * Constructs this repository type with the given `name`, `hostname`, and `port`. `basePatterns` are the initial patterns to use. + * A ManagedProject has an implicit defining these initial patterns based on a setting for either Maven or Ivy style patterns. + */ + def apply(name: String, hostname: String, port: Int)(implicit + basePatterns: Patterns + ): RepositoryType = + apply(name, Some(hostname), Some(port), None) + + /** + * Constructs this repository type with the given `name`, `hostname`, `port`, and the `basePath` against which the initial + * patterns will be resolved. `basePatterns` are the initial patterns to use. + * A ManagedProject has an implicit defining these initial patterns based on a setting for either Maven or Ivy style patterns. + */ + def apply(name: String, hostname: String, port: Int, basePath: String)(implicit + basePatterns: Patterns + ): RepositoryType = + apply(name, Some(hostname), Some(port), Some(basePath)) + + /** + * Constructs this repository type with the given `name`, `hostname`, `port`, and the `basePath` against which the initial + * patterns will be resolved. `basePatterns` are the initial patterns to use. All but the `name` are optional (use None). + * A ManagedProject has an implicit defining these initial patterns based on a setting for either Maven or Ivy style patterns. + */ + def apply(name: String, hostname: Option[String], port: Option[Int], basePath: Option[String])( + implicit basePatterns: Patterns + ): RepositoryType = + construct(name, SshConnection(None, hostname, port), resolvePatterns(basePath, basePatterns)) + } + + /** A factory to construct an interface to an Ivy SSH resolver. */ + object ssh extends Define[SshRepository] { + protected def construct(name: String, connection: SshConnection, patterns: Patterns) = + SshRepository(name, connection, patterns, None) + } + + /** A factory to construct an interface to an Ivy SFTP resolver. */ + object sftp extends Define[SftpRepository] { + protected def construct(name: String, connection: SshConnection, patterns: Patterns) = + SftpRepository(name, connection, patterns) + } + + /** A factory to construct an interface to an Ivy filesystem resolver. */ + object file { + + /** + * Constructs a file resolver with the given name. The patterns to use must be explicitly specified + * using the `withPatterns` method on the constructed resolver object. + */ + def apply(name: String): FileRepository = + FileRepository(name, defaultFileConfiguration, Patterns(false)) + + /** Constructs a file resolver with the given name and base directory. */ + def apply(name: String, baseDirectory: File)(implicit basePatterns: Patterns): FileRepository = + baseRepository( + new File(toUri(baseDirectory).normalize.getSchemeSpecificPart).getAbsolutePath + )( + FileRepository(name, defaultFileConfiguration, _) + ) + + private def toUri(dir: File): URI = dir.toPath.toUri + } + object url { + + /** + * Constructs a URL resolver with the given name. The patterns to use must be explicitly specified + * using the `withPatterns` method on the constructed resolver object. + */ + def apply(name: String): URLRepository = URLRepository(name, Patterns(false)) + + /** Constructs a file resolver with the given name and base directory. */ + def apply(name: String, baseURL: URL)(implicit basePatterns: Patterns): URLRepository = + baseRepository(baseURL.toURI.normalize.toString)(URLRepository(name, _)) + } + private def baseRepository[T](base: String)(construct: Patterns => T)(implicit + basePatterns: Patterns + ): T = + construct(resolvePatterns(base, basePatterns)) + + /** + * If `base` is None, `patterns` is returned unchanged. + * Otherwise, the ivy file and artifact patterns in `patterns` are resolved against the given base. + */ + private def resolvePatterns(base: Option[String], patterns: Patterns): Patterns = + base match { + case Some(path) => resolvePatterns(path, patterns) + case None => patterns + } + + /** Resolves the ivy file and artifact patterns in `patterns` against the given base. */ + private def resolvePatterns(base: String, basePatterns: Patterns): Patterns = { + def resolveAll(patterns: Vector[String]) = patterns.map(p => resolvePattern(base, p)) + Patterns( + resolveAll(basePatterns.ivyPatterns), + resolveAll(basePatterns.artifactPatterns), + basePatterns.isMavenCompatible, + basePatterns.descriptorOptional, + basePatterns.skipConsistencyCheck + ) + } + private[sbt] def resolvePattern(base: String, pattern: String): String = { + val normBase = base.replace('\\', '/') + if (normBase.endsWith("/") || pattern.startsWith("/")) normBase + pattern + else normBase + "/" + pattern + } + def defaultFileConfiguration = FileConfiguration(true, None) + def mavenStylePatterns = Patterns().withArtifactPatterns(Vector(mavenStyleBasePattern)) + def ivyStylePatterns = defaultIvyPatterns // Patterns(Nil, Nil, false) + + def defaultPatterns = mavenStylePatterns + def mavenStyleBasePattern = + "[organisation]/[module](_[scalaVersion])(_[sbtVersion])/[revision]/[artifact]-[revision](-[classifier]).[ext]" + def localBasePattern = + "[organisation]/[module]/" + PluginPattern + "([branch]/)[revision]/[type]s/[artifact](-[classifier]).[ext]" + def defaultRetrievePattern = + "[type]s/[organisation]/[module]/" + PluginPattern + "[artifact](-[revision])(-[classifier]).[ext]" + final val PluginPattern = "(scala_[scalaVersion]/)(sbt_[sbtVersion]/)" + + private[librarymanagement] def expandMavenSettings( + str: String, + envVars: Map[String, String] = sys.env, + props: Map[String, String] = sys.props.toMap + ): String = { + // Aren't regular expressions beautifully clear and concise. + // This means "find all ${...}" blocks, with the first group of each being the text between curly brackets. + val findQuoted = "\\$\\{([^}]*)}".r + val env = "env\\.(.*)".r + + findQuoted.replaceAllIn( + str, + regexMatch => + Regex.quoteReplacement { + regexMatch.group(1) match { + case env(variable) => envVars.getOrElse(variable, "") + case property => props.getOrElse(property, "") + } + } + ) + } + + private[this] def mavenLocalDir: File = { + def loadHomeFromSettings(f: () => File): Option[File] = + try { + val file = f() + if (!file.exists) None + else + ((XML.loadFile(file) \ "localRepository").text match { + case "" => None + case e @ _ => Some(new File(expandMavenSettings(e))) + }) + } catch { + // Occurs inside File constructor when property or environment variable does not exist + case _: NullPointerException => None + // Occurs when File does not exist + case _: IOException => None + case e: SAXParseException => + System.err.println(s"WARNING: Problem parsing ${f().getAbsolutePath}, ${e.getMessage}"); + None + } + sys.props.get("maven.repo.local").map(new File(_)) orElse + loadHomeFromSettings(() => new File(sbt.io.Path.userHome, ".m2/settings.xml")) orElse + loadHomeFromSettings(() => + new File(new File(System.getenv("M2_HOME")), "conf/settings.xml") + ) getOrElse + new File(sbt.io.Path.userHome, ".m2/repository") + } + // TODO - should this just be the *exact* same as mavenLocal? probably... + def publishMavenLocal: MavenCache = new MavenCache("publish-m2-local", mavenLocalDir) + def mavenLocal: MavenRepository = new MavenCache("Maven2 Local", mavenLocalDir) + def defaultLocal = defaultUserFileRepository("local") + def defaultShared = defaultUserFileRepository("shared") + def defaultUserFileRepository(id: String) = { + val pList = Vector(s"$${ivy.home}/$id/$localBasePattern") + FileRepository( + id, + defaultFileConfiguration, + Patterns().withIvyPatterns(pList).withArtifactPatterns(pList).withIsMavenCompatible(false) + ) + } + def defaultIvyPatterns = { + val pList = Vector(localBasePattern) + Patterns().withIvyPatterns(pList).withArtifactPatterns(pList).withIsMavenCompatible(false) + } + + // to display all error messages at once, just log here don't throw + private[sbt] def warnHttp(value: String, suggestion: String, logger: Logger): Unit = { + logger.error(s"insecure HTTP request is unsupported '$value'; switch to HTTPS$suggestion") + } + private[sbt] def isInsecureUrl(str: String): Boolean = { + // don't try to parse str as URL because it could contain $variable from Ivy pattern + str.startsWith("http:") && + !(str.startsWith("http://localhost/") + || str.startsWith("http://localhost:") + || str.startsWith("http://127.0.0.1/") + || str.startsWith("http://127.0.0.1:")) + } + private[sbt] def validateURLRepository(repo: URLRepository, logger: Logger): Boolean = { + if (repo.allowInsecureProtocol) false + else { + val patterns = repo.patterns + val ivy = patterns.ivyPatterns.headOption match { + case Some(x) => isInsecureUrl(x) + case _ => false + } + val art = patterns.artifactPatterns.headOption match { + case Some(x) => isInsecureUrl(x) + case _ => false + } + if (ivy || art) { + warnHttp( + patterns.toString, + s""" or opt-in as Resolver.url("${repo.name}", url(...)).withAllowInsecureProtocol(true), or by using allowInsecureProtocol in repositories file""", + logger + ) + true + } else false + } + } + + private[sbt] def validateMavenRepo(repo: MavenRepo, logger: Logger): Boolean = + if (repo.allowInsecureProtocol) false + else if (isInsecureUrl(repo.root)) { + warnHttp( + repo.root, + s""" or opt-in as ("${repo.name}" at "${repo.root}").withAllowInsecureProtocol(true), or by using allowInsecureProtocol in repositories file""", + logger + ) + true + } else false + + private[sbt] def validateArtifact(art: Artifact, logger: Logger): Boolean = + if (art.allowInsecureProtocol) false + else + art.url match { + case Some(url) if isInsecureUrl(url.toString) => + warnHttp( + art.toString, + " or opt-in using from(url(...), allowInsecureProtocol = true) on ModuleID or .withAllowInsecureProtocol(true) on Artifact", + logger + ) + true + case _ => false + } +} diff --git a/lm-core/src/main/scala/sbt/librarymanagement/RichUpdateReport.scala b/lm-core/src/main/scala/sbt/librarymanagement/RichUpdateReport.scala new file mode 100644 index 000000000..321790def --- /dev/null +++ b/lm-core/src/main/scala/sbt/librarymanagement/RichUpdateReport.scala @@ -0,0 +1,124 @@ +package sbt +package librarymanagement + +import java.io.File +import sbt.io.IO + +/** + * Provides extra methods for filtering the contents of an `UpdateReport` + * and for obtaining references to a selected subset of the underlying files. + */ +final class RichUpdateReport(report: UpdateReport) { + private[sbt] def recomputeStamps(): UpdateReport = { + val files = report.cachedDescriptor +: allFiles + val stamps = files + .map(f => + ( + f.toString, + // TODO: The list of files may also contain some odd files that do not actually exist like: + // "./target/ivyhome/resolution-cache/com.example/foo/0.4.0/resolved.xml.xml". + // IO.getModifiedTimeOrZero() will just return zero, but the list of files should not contain such + // files to begin with, in principle. + IO.getModifiedTimeOrZero(f) + ) + ) + .toMap + UpdateReport(report.cachedDescriptor, report.configurations, report.stats, stamps) + } + + import DependencyFilter._ + + /** Obtains all successfully retrieved files in all configurations and modules. */ + def allFiles: Vector[File] = matching(DependencyFilter.allPass) + + /** Obtains all successfully retrieved files in configurations, modules, and artifacts matching the specified filter. */ + def matching(f: DependencyFilter): Vector[File] = select0(f).distinct + + /** Obtains all successfully retrieved files matching all provided filters. */ + def select( + configuration: ConfigurationFilter, + module: ModuleFilter, + artifact: ArtifactFilter + ): Vector[File] = + matching(DependencyFilter.make(configuration, module, artifact)) + + def select(configuration: ConfigurationFilter): Vector[File] = + select(configuration, moduleFilter(), artifactFilter()) + def select(module: ModuleFilter): Vector[File] = + select(configurationFilter(), module, artifactFilter()) + def select(artifact: ArtifactFilter): Vector[File] = + select(configurationFilter(), moduleFilter(), artifact) + + private[this] def select0(f: DependencyFilter): Vector[File] = + for { + cReport <- report.configurations + mReport <- cReport.modules + (artifact, file) <- mReport.artifacts + if f(cReport.configuration, mReport.module, artifact) + } yield { + if (file == null) { + sys.error( + s"Null file: conf=${cReport.configuration}, module=${mReport.module}, art: $artifact" + ) + } + file + } + + /** Constructs a new report that only contains files matching the specified filter. */ + def filter(f: DependencyFilter): UpdateReport = + moduleReportMap { (configuration, modReport) => + modReport + .withArtifacts( + modReport.artifacts filter { case (art, _) => + f(configuration, modReport.module, art) + } + ) + .withMissingArtifacts( + modReport.missingArtifacts filter { art => + f(configuration, modReport.module, art) + } + ) + } + + private[sbt] def substitute( + f: (ConfigRef, ModuleID, Vector[(Artifact, File)]) => Vector[(Artifact, File)] + ): UpdateReport = + moduleReportMap { (configuration, modReport) => + val newArtifacts = f(configuration, modReport.module, modReport.artifacts) + modReport + .withArtifacts(newArtifacts) + .withMissingArtifacts(modReport.missingArtifacts) + } + + def toSeq: Seq[(ConfigRef, ModuleID, Artifact, File)] = toVector + def toVector: Vector[(ConfigRef, ModuleID, Artifact, File)] = + for { + confReport <- report.configurations + modReport <- confReport.modules + (artifact, file) <- modReport.artifacts + } yield (confReport.configuration, modReport.module, artifact, file) + + def allMissing: Vector[(ConfigRef, ModuleID, Artifact)] = + for { + confReport <- report.configurations + modReport <- confReport.modules + artifact <- modReport.missingArtifacts + } yield (confReport.configuration, modReport.module, artifact) + + private[sbt] def addMissing(f: ModuleID => Seq[Artifact]): UpdateReport = + moduleReportMap { (configuration, modReport) => + modReport + .withMissingArtifacts((modReport.missingArtifacts ++ f(modReport.module)).distinct) + } + + private[sbt] def moduleReportMap(f: (ConfigRef, ModuleReport) => ModuleReport): UpdateReport = { + val newConfigurations = report.configurations.map { confReport => + import confReport._ + val newModules = modules map { modReport => + f(configuration, modReport) + } + ConfigurationReport(configuration, newModules, details) + } + UpdateReport(report.cachedDescriptor, newConfigurations, report.stats, report.stamps) + } +} diff --git a/lm-core/src/main/scala/sbt/librarymanagement/ScalaArtifacts.scala b/lm-core/src/main/scala/sbt/librarymanagement/ScalaArtifacts.scala new file mode 100644 index 000000000..3dc2b71c4 --- /dev/null +++ b/lm-core/src/main/scala/sbt/librarymanagement/ScalaArtifacts.scala @@ -0,0 +1,113 @@ +package sbt.librarymanagement + +object ScalaArtifacts { + final val Organization = "org.scala-lang" + final val LibraryID = "scala-library" + final val CompilerID = "scala-compiler" + final val ReflectID = "scala-reflect" + final val ActorsID = "scala-actors" + final val ScalapID = "scalap" + final val Artifacts = Vector(LibraryID, CompilerID, ReflectID, ActorsID, ScalapID) + + final val Scala3LibraryID = "scala3-library" + final val Scala3CompilerID = "scala3-compiler" + final val Scala3InterfacesID = "scala3-interfaces" + final val TastyCoreID = "tasty-core" + final val ScaladocID = "scaladoc" + final val Scala3DocID = "scala3doc" + final val Scala3TastyInspectorID = "scala3-tasty-inspector" + + private[sbt] final val Scala3LibraryPrefix = Scala3LibraryID + "_" + private[sbt] final val Scala3CompilerPrefix = Scala3CompilerID + "_" + private[sbt] final val TastyCorePrefix = TastyCoreID + "_" + private[sbt] final val ScaladocPrefix = ScaladocID + "_" + private[sbt] final val Scala3DocPrefix = Scala3DocID + "_" + private[sbt] final val Scala3TastyInspectorPrefix = Scala3TastyInspectorID + "_" + + def isScala2Artifact(name: String): Boolean = { + name == LibraryID || name == CompilerID || name == ReflectID || name == ActorsID || name == ScalapID + } + def isScala3Artifact(name: String): Boolean = { + name.startsWith(Scala3LibraryPrefix) || + name.startsWith(Scala3CompilerPrefix) || + name.startsWith(TastyCorePrefix) || + name == Scala3InterfacesID || + name.startsWith(ScaladocPrefix) || + name.startsWith(Scala3DocPrefix) || + name.startsWith(Scala3TastyInspectorPrefix) + } + + def isScala3(scalaVersion: String): Boolean = scalaVersion.startsWith("3.") + + private[sbt] def isScala3M123(scalaVersion: String): Boolean = + (scalaVersion == "3.0.0-M1") || + (scalaVersion == "3.0.0-M2") || + (scalaVersion == "3.0.0-M3") + + def libraryIds(version: String): Array[String] = { + if (isScala3(version)) + Array(Scala3LibraryID, LibraryID) + else Array(LibraryID) + } + + def compilerId(version: String): String = { + if (isScala3(version)) Scala3CompilerID + else CompilerID + } + + def libraryDependency(version: String): ModuleID = libraryDependency(Organization, version) + + def libraryDependency(org: String, version: String): ModuleID = + if isScala3(version) then + ModuleID(org, Scala3LibraryID, version) + .withCrossVersion(CrossVersion.binary) + .platform(Platform.jvm) + else ModuleID(org, LibraryID, version).platform(Platform.jvm) + + private[sbt] def docToolDependencies( + org: String, + version: String + ): Seq[ModuleID] = + if (isScala3M123(version)) + Seq( + ModuleID(org, Scala3DocID, version) + .withConfigurations(Some(Configurations.ScalaDocTool.name + "->default(compile)")) + .withCrossVersion(CrossVersion.binary) + ) + else if (isScala3(version)) + Seq( + ModuleID(org, ScaladocID, version) + .withConfigurations(Some(Configurations.ScalaDocTool.name + "->default(compile)")) + .withCrossVersion(CrossVersion.binary) + .platform(Platform.jvm) + ) + else Seq.empty + + private[sbt] def toolDependencies( + org: String, + version: String + ): Seq[ModuleID] = + if (isScala3(version)) + Seq( + ModuleID(org, Scala3CompilerID, version) + .withConfigurations(Some(Configurations.ScalaTool.name + "->default(compile)")) + .withCrossVersion(CrossVersion.binary) + .platform(Platform.jvm) + ) + else + Seq( + scala2ToolDependency(org, CompilerID, version), + scala2ToolDependency(org, LibraryID, version) + ) + + private[this] def scala2ToolDependency(org: String, id: String, version: String): ModuleID = + ModuleID(org, id, version) + .withConfigurations( + Some(Configurations.ScalaTool.name + "->default,optional(default)") + ) + .platform(Platform.jvm) +} + +object SbtArtifacts { + val Organization = "org.scala-sbt" +} diff --git a/lm-core/src/main/scala/sbt/librarymanagement/TrackLevel.scala b/lm-core/src/main/scala/sbt/librarymanagement/TrackLevel.scala new file mode 100644 index 000000000..e92b37d68 --- /dev/null +++ b/lm-core/src/main/scala/sbt/librarymanagement/TrackLevel.scala @@ -0,0 +1,33 @@ +package sbt.librarymanagement + +/** + * An enumeration defining the tracking of dependencies. A level includes all of the levels + * with id larger than its own id. For example, Warn (id=3) includes Error (id=4). + */ +object TrackLevel { + case object NoTracking extends TrackLevel { + override def id: Int = 0 + } + case object TrackIfMissing extends TrackLevel { + override def id: Int = 1 + } + case object TrackAlways extends TrackLevel { + override def id: Int = 10 + } + + private[sbt] def apply(x: Int): TrackLevel = + x match { + case 0 => NoTracking + case 1 => TrackIfMissing + case 10 => TrackAlways + } + + def intersection(a: TrackLevel, b: TrackLevel): TrackLevel = + if (a.id < b.id) a + else b + def intersectionAll(vs: List[TrackLevel]): TrackLevel = vs reduceLeft intersection +} + +sealed trait TrackLevel { + def id: Int +} diff --git a/lm-core/src/main/scala/sbt/librarymanagement/UnresolvedWarning.scala b/lm-core/src/main/scala/sbt/librarymanagement/UnresolvedWarning.scala new file mode 100644 index 000000000..6f4aa832d --- /dev/null +++ b/lm-core/src/main/scala/sbt/librarymanagement/UnresolvedWarning.scala @@ -0,0 +1,90 @@ +package sbt.librarymanagement + +import collection.mutable +import sbt.util.ShowLines +import sbt.internal.util.{ SourcePosition, LinePosition, RangePosition, LineRange } + +final class ResolveException( + val messages: Seq[String], + val failed: Seq[ModuleID], + val failedPaths: Map[ModuleID, Seq[ModuleID]] +) extends RuntimeException(messages.mkString("\n")) { + def this(messages: Seq[String], failed: Seq[ModuleID]) = + this( + messages, + failed, + Map(failed map { m => + m -> Nil + }: _*) + ) +} + +/** + * Represents unresolved dependency warning, which displays reconstructed dependency tree + * along with source position of each node. + */ +final class UnresolvedWarning( + val resolveException: ResolveException, + val failedPaths: Seq[Seq[(ModuleID, Option[SourcePosition])]] +) + +object UnresolvedWarning { + def apply( + err: ResolveException, + config: UnresolvedWarningConfiguration + ): UnresolvedWarning = { + def modulePosition(m0: ModuleID): Option[SourcePosition] = + config.modulePositions.find { case (m, _) => + (m.organization == m0.organization) && + (m0.name startsWith m.name) && + (m.revision == m0.revision) + } map { case (_, p) => + p + } + val failedPaths = err.failed map { (x: ModuleID) => + err.failedPaths(x).toList.reverse map { id => + (id, modulePosition(id)) + } + } + new UnresolvedWarning(err, failedPaths) + } + + private[sbt] def sourcePosStr(posOpt: Option[SourcePosition]): String = + posOpt match { + case Some(LinePosition(path, start)) => s" ($path#L$start)" + case Some(RangePosition(path, LineRange(start, end))) => s" ($path#L$start-$end)" + case _ => "" + } + implicit val unresolvedWarningLines: ShowLines[UnresolvedWarning] = ShowLines { a => + val withExtra = a.resolveException.failed.filter(_.extraDependencyAttributes.nonEmpty) + val buffer = mutable.ListBuffer[String]() + if (withExtra.nonEmpty) { + buffer += "\n\tNote: Some unresolved dependencies have extra attributes. Check that these dependencies exist with the requested attributes." + withExtra foreach { id => + buffer += "\t\t" + id + } + } + if (a.failedPaths.nonEmpty) { + buffer += "\n\tNote: Unresolved dependencies path:" + a.failedPaths foreach { path => + if (path.nonEmpty) { + val head = path.head + buffer += "\t\t" + head._1.toString + sourcePosStr(head._2) + path.tail foreach { case (m, pos) => + buffer += "\t\t +- " + m.toString + sourcePosStr(pos) + } + } + } + } + buffer.toList + } +} + +final class UnresolvedWarningConfiguration private[sbt] ( + val modulePositions: Map[ModuleID, SourcePosition] +) +object UnresolvedWarningConfiguration { + def apply(): UnresolvedWarningConfiguration = apply(Map()) + def apply(modulePositions: Map[ModuleID, SourcePosition]): UnresolvedWarningConfiguration = + new UnresolvedWarningConfiguration(modulePositions) +} diff --git a/lm-core/src/main/scala/sbt/librarymanagement/UpdateReportExtra.scala b/lm-core/src/main/scala/sbt/librarymanagement/UpdateReportExtra.scala new file mode 100644 index 000000000..c8b3ac69c --- /dev/null +++ b/lm-core/src/main/scala/sbt/librarymanagement/UpdateReportExtra.scala @@ -0,0 +1,168 @@ +/* sbt -- Simple Build Tool + * Copyright 2011 Mark Harrah + */ +package sbt.librarymanagement + +import java.io.File +import java.{ util => ju } + +private[librarymanagement] abstract class ConfigurationReportExtra { + def configuration: ConfigRef + def modules: Vector[ModuleReport] + def details: Vector[OrganizationArtifactReport] + + /** a sequence of evicted modules */ + def evicted: Seq[ModuleID] = + details flatMap (_.modules) filter (_.evicted) map (_.module) + + /** + * All resolved modules for this configuration. + * For a given organization and module name, there is only one revision/`ModuleID` in this sequence. + */ + def allModules: Seq[ModuleID] = modules map addConfiguration + private[this] def addConfiguration(mr: ModuleReport): ModuleID = { + val module = mr.module + if (module.configurations.isEmpty) { + val conf = mr.configurations map (c => s"$configuration->$c") mkString ";" + module.withConfigurations(Some(conf)) + } else module + } + + def retrieve(f: (ConfigRef, ModuleID, Artifact, File) => File): ConfigurationReport = + ConfigurationReport( + configuration, + modules map { + _.retrieve((mid, art, file) => f(configuration, mid, art, file)) + }, + details + ) +} + +private[librarymanagement] abstract class ModuleReportExtra { + def module: ModuleID + def artifacts: Vector[(Artifact, File)] + def missingArtifacts: Vector[Artifact] + def status: Option[String] + def publicationDate: Option[ju.Calendar] + def resolver: Option[String] + def artifactResolver: Option[String] + def evicted: Boolean + def evictedData: Option[String] + def evictedReason: Option[String] + def problem: Option[String] + def homepage: Option[String] + def extraAttributes: Map[String, String] + def isDefault: Option[Boolean] + def branch: Option[String] + def configurations: Vector[ConfigRef] + def licenses: Vector[(String, Option[String])] + def callers: Vector[Caller] + + def withArtifacts(artifacts: Vector[(Artifact, File)]): ModuleReport + + protected[this] def arts: Vector[String] = + artifacts.map(_.toString) ++ missingArtifacts.map(art => "(MISSING) " + art) + + def detailReport: String = + s"\t\t- ${module.revision}\n" + + (if (arts.size <= 1) "" else arts.mkString("\t\t\t", "\n\t\t\t", "\n")) + + reportStr("status", status) + + reportStr("publicationDate", publicationDate map calendarToString) + + reportStr("resolver", resolver) + + reportStr("artifactResolver", artifactResolver) + + reportStr("evicted", Some(evicted.toString)) + + reportStr("evictedData", evictedData) + + reportStr("evictedReason", evictedReason) + + reportStr("problem", problem) + + reportStr("homepage", homepage) + + reportStr( + "extraAttributes", + if (extraAttributes.isEmpty) None + else { + Some(extraAttributes.toString) + } + ) + + reportStr("isDefault", isDefault map { _.toString }) + + reportStr("branch", branch) + + reportStr( + "configurations", + if (configurations.isEmpty) None + else { + Some(configurations.mkString(", ")) + } + ) + + reportStr( + "licenses", + if (licenses.isEmpty) None + else { + Some(licenses.mkString(", ")) + } + ) + + reportStr( + "callers", + if (callers.isEmpty) None + else { + Some(callers.mkString(", ")) + } + ) + private[sbt] def reportStr(key: String, value: Option[String]): String = + value map { x => + s"\t\t\t$key: $x\n" + } getOrElse "" + + private[this] def calendarToString(c: ju.Calendar): String = { + import sjsonnew._, BasicJsonProtocol._ + implicitly[IsoString[ju.Calendar]] to c + } + + def retrieve(f: (ModuleID, Artifact, File) => File): ModuleReport = + withArtifacts(artifacts.map { case (art, file) => (art, f(module, art, file)) }) +} + +private[librarymanagement] abstract class UpdateReportExtra { + def cachedDescriptor: File + def configurations: Vector[ConfigurationReport] + def stats: UpdateStats + private[sbt] def stamps: Map[String, Long] + + private[sbt] def moduleKey(m: ModuleID) = (m.organization, m.name, m.revision) + + /** All resolved modules in all configurations. */ + def allModules: Vector[ModuleID] = { + configurations.flatMap(_.allModules).groupBy(moduleKey).toVector map { case (_, v) => + v reduceLeft { (agg, x) => + agg.withConfigurations( + (agg.configurations, x.configurations) match { + case (None, _) => x.configurations + case (Some(ac), None) => Some(ac) + case (Some(ac), Some(xc)) => Some(s"$ac;$xc") + } + ) + } + } + } + + def allModuleReports: Vector[ModuleReport] = { + configurations.flatMap(_.modules).groupBy(mR => moduleKey(mR.module)).toVector map { + case (_, v) => + v reduceLeft { (agg, x) => + agg.withConfigurations( + (agg.configurations, x.configurations) match { + case (v, _) if v.isEmpty => x.configurations + case (ac, v) if v.isEmpty => ac + case (ac, xc) => ac ++ xc + } + ) + } + } + } + + def retrieve(f: (ConfigRef, ModuleID, Artifact, File) => File): UpdateReport = + UpdateReport(cachedDescriptor, configurations map { _ retrieve f }, stats, stamps) + + /** Gets the report for the given configuration, or `None` if the configuration was not resolved. */ + def configuration(s: ConfigRef) = configurations.find(_.configuration == s) + + /** Gets the names of all resolved configurations. This `UpdateReport` contains one `ConfigurationReport` for each configuration in this list. */ + def allConfigurations: Vector[ConfigRef] = configurations.map(_.configuration) +} diff --git a/lm-core/src/main/scala/sbt/librarymanagement/VersionNumber.scala b/lm-core/src/main/scala/sbt/librarymanagement/VersionNumber.scala new file mode 100644 index 000000000..1da91342e --- /dev/null +++ b/lm-core/src/main/scala/sbt/librarymanagement/VersionNumber.scala @@ -0,0 +1,266 @@ +package sbt.librarymanagement + +final class VersionNumber private[sbt] ( + val numbers: Seq[Long], + val tags: Seq[String], + val extras: Seq[String] +) { + + def _1: Option[Long] = get(0) + def _2: Option[Long] = get(1) + def _3: Option[Long] = get(2) + def _4: Option[Long] = get(3) + def get(idx: Int): Option[Long] = numbers lift idx + def size: Int = numbers.size + + /** The vector of version numbers from more to less specific from this version number. */ + lazy val cascadingVersions: Vector[VersionNumber] = + (Vector(this) ++ (numbers.inits filter (_.size >= 2) map (VersionNumber(_, Nil, Nil)))).distinct + + override val toString: String = + numbers.mkString(".") + mkString1(tags, "-", "-", "") + extras.mkString("") + + override def hashCode: Int = numbers.## * 41 * 41 + tags.## * 41 + extras.## + + override def equals(that: Any): Boolean = that match { + case v: VersionNumber => (numbers == v.numbers) && (tags == v.tags) && (extras == v.extras) + case _ => false + } + + def matchesSemVer(selsem: SemanticSelector): Boolean = { + selsem.matches(this) + } + + /** A variant of mkString that returns the empty string if the sequence is empty. */ + private[this] def mkString1[A](xs: Seq[A], start: String, sep: String, end: String): String = + if (xs.isEmpty) "" else xs.mkString(start, sep, end) +} + +object VersionNumber { + + /** + * @param numbers numbers delimited by a dot. + * @param tags string prefixed by a dash. + * @param extras strings at the end. + */ + def apply(numbers: Seq[Long], tags: Seq[String], extras: Seq[String]): VersionNumber = + new VersionNumber(numbers, tags, extras) + + def apply(s: String): VersionNumber = + unapply(s) match { + case Some((ns, ts, es)) => VersionNumber(ns, ts, es) + case _ => throw new IllegalArgumentException(s"Invalid version number: $s") + } + + def unapply(v: VersionNumber): Option[(Seq[Long], Seq[String], Seq[String])] = + Some((v.numbers, v.tags, v.extras)) + + def unapply(s: String): Option[(Seq[Long], Seq[String], Seq[String])] = { + + // null safe, empty string safe + def splitOn[A](s: String, sep: Char): Vector[String] = + if (s eq null) Vector() + else s.split(sep).filterNot(_ == "").toVector + + def splitDot(s: String) = splitOn(s, '.') map (_.toLong) + def splitDash(s: String) = splitOn(s, '-') + def splitPlus(s: String) = splitOn(s, '+') map ("+" + _) + + val TaggedVersion = """(\d{1,14})([\.\d{1,14}]*)((?:-\w+(?:\.\w+)*)*)((?:\+.+)*)""".r + val NonSpaceString = """(\S+)""".r + + s match { + case TaggedVersion(m, ns, ts, es) => + val numbers = Vector(m.toLong) ++ splitDot(ns) + val tags = splitDash(ts) + val extras = splitPlus(es) + Some((numbers, tags, extras)) + case "" => None + case NonSpaceString(s) => Some((Vector.empty, Vector.empty, Vector(s))) + case _ => None + } + } + + /** Strict. Checks everything. */ + object Strict extends VersionNumberCompatibility { + def name: String = "Strict" + def isCompatible(v1: VersionNumber, v2: VersionNumber): Boolean = v1 == v2 + } + + /** Semantic Versioning. See http://semver.org/spec/v2.0.0.html */ + object SemVer extends VersionNumberCompatibility { + def name: String = "Semantic Versioning" + + /* Quotes of parts of the rules in the SemVer Spec relevant to compatibility checking: + * + * Rule 2: + * > A normal version number MUST take the form X.Y.Z + * + * Rule 4: + * > Major version zero (0.y.z) is for initial development. Anything may change at any time. + * + * Rule 6: + * > Patch version Z (x.y.Z | x > 0) MUST be incremented if only backwards compatible bug fixes are introduced. + * + * Rule 7: + * > Minor version Y (x.Y.z | x > 0) MUST be incremented if new, backwards compatible functionality is introduced. + * + * Rule 8: + * > Major version X (X.y.z | X > 0) MUST be incremented if any backwards incompatible changes are introduced. + * + * Rule 9: + * > A pre-release version MAY be denoted by appending a hyphen and a series of + * > dot separated identifiers immediately following the patch version. + * > Identifiers MUST comprise only ASCII alphanumerics and hyphen [0-9A-Za-z-]. + * > Identifiers MUST NOT be empty. + * > Numeric identifiers MUST NOT include leading zeroes. + * > Pre-release versions have a lower precedence than the associated normal version. + * > A pre-release version indicates that the version is unstable and might not satisfy the + * > intended compatibility requirements as denoted by its associated normal version. + * > Examples: 1.0.0-alpha, 1.0.0-alpha.1, 1.0.0-0.3.7, 1.0.0-x.7.z.92. + * + * Rule 10: + * > Build metadata MAY be denoted by appending a plus sign and a series of + * > dot separated identifiers immediately following the patch or pre-release version. + * > Identifiers MUST comprise only ASCII alphanumerics and hyphen [0-9A-Za-z-]. + * > Identifiers MUST NOT be empty. + * > Build metadata SHOULD be ignored when determining version precedence. + * > Thus two versions that differ only in the build metadata, have the same precedence. + * > Examples: 1.0.0-alpha+001, 1.0.0+20130313144700, 1.0.0-beta+exp.sha.5114f85. + * + * Rule 10 means that build metadata is never considered for compatibility + * we'll enforce this immediately by dropping them from both versions + * Rule 2 we enforce with custom extractors. + * Rule 4 we enforce by matching x = 0 & fully equals checking the two versions + * Rule 6, 7 & 8 means version compatibility is determined by comparing the two X values + * Rule 9.. + * Dale thinks means pre-release versions are fully equals checked.. + * Eugene thinks means pre-releases before 1.0.0 are not compatible, if not they are.. + */ + def isCompatible(v1: VersionNumber, v2: VersionNumber): Boolean = + doIsCompat(dropBuildMetadata(v1), dropBuildMetadata(v2)) + + private[this] def doIsCompat(v1: VersionNumber, v2: VersionNumber): Boolean = + (v1, v2) match { + case (NormalVersion(0, _, _), NormalVersion(0, _, _)) => v1 == v2 // R4 + case (NormalVersion(_, 0, 0), NormalVersion(_, 0, 0)) => v1 == v2 // R9 maybe? + case (NormalVersion(x1, _, _), NormalVersion(x2, _, _)) => x1 == x2 // R6, R7 & R8 + case _ => false + } + + // SemVer Spec Rule 10 (above) + private[VersionNumber] def dropBuildMetadata(v: VersionNumber) = + if (v.extras.isEmpty) v else VersionNumber(v.numbers, v.tags, Nil) + + // An extractor for SemVer's "normal version number" - SemVer Spec Rule 2 & Rule 9 (above) + private[VersionNumber] object NormalVersion { + def unapply(v: VersionNumber): Option[(Long, Long, Long)] = + PartialFunction.condOpt(v.numbers) { + // NOTE! We allow the z to be missing, because of legacy like commons-io 1.3 + case Seq(x, y, _*) => (x, y, v._3 getOrElse 0) + } + } + } + + /** + * A variant of SemVar that seems to be common among the Scala libraries. + * The second segment (y in x.y.z) increments breaks the binary compatibility even when x > 0. + * Also API compatibility is expected even when the first segment is zero. + */ + object SecondSegment extends VersionNumberCompatibility { + def name: String = "Second Segment Variant" + def isCompatible(v1: VersionNumber, v2: VersionNumber): Boolean = + PackVer.isCompatible(v1, v2) + } + + /** + * A variant of SemVar that seems to be common among the Scala libraries. + * The second segment (y in x.y.z) increments breaks the binary compatibility even when x > 0. + * Also API compatibility is expected even when the first segment is zero. + */ + object PackVer extends VersionNumberCompatibility { + import SemVer._ + + def name: String = "Package Versioning Policy" + + def isCompatible(v1: VersionNumber, v2: VersionNumber): Boolean = + doIsCompat(dropBuildMetadata(v1), dropBuildMetadata(v2)) + + private[this] def doIsCompat(v1: VersionNumber, v2: VersionNumber): Boolean = { + (v1, v2) match { + case (NormalVersion(_, _, 0), NormalVersion(_, _, 0)) => v1 == v2 // R9 maybe? + case (NormalVersion(x1, y1, _), NormalVersion(x2, y2, _)) => (x1 == x2) && (y1 == y2) + case _ => false + } + } + } + + /** + * A variant of SemVar that enforces API compatibility when the first segment is zero. + */ + object EarlySemVer extends VersionNumberCompatibility { + import SemVer._ + + def name: String = "Early Semantic Versioning" + + /* Quotes of parts of the rules in the SemVer Spec relevant to compatibility checking: + * + * Rule 2: + * > A normal version number MUST take the form X.Y.Z + * + * Rule 6: + * > Patch version Z (x.y.Z | x > 0) MUST be incremented if only backwards compatible bug fixes are introduced. + * + * Rule 7: + * > Minor version Y (x.Y.z | x > 0) MUST be incremented if new, backwards compatible functionality is introduced. + * + * Rule 8: + * > Major version X (X.y.z | X > 0) MUST be incremented if any backwards incompatible changes are introduced. + * + * Rule 9: + * > A pre-release version MAY be denoted by appending a hyphen and a series of + * > dot separated identifiers immediately following the patch version. + * > Identifiers MUST comprise only ASCII alphanumerics and hyphen [0-9A-Za-z-]. + * > Identifiers MUST NOT be empty. + * > Numeric identifiers MUST NOT include leading zeroes. + * > Pre-release versions have a lower precedence than the associated normal version. + * > A pre-release version indicates that the version is unstable and might not satisfy the + * > intended compatibility requirements as denoted by its associated normal version. + * > Examples: 1.0.0-alpha, 1.0.0-alpha.1, 1.0.0-0.3.7, 1.0.0-x.7.z.92. + * + * Rule 10: + * > Build metadata MAY be denoted by appending a plus sign and a series of + * > dot separated identifiers immediately following the patch or pre-release version. + * > Identifiers MUST comprise only ASCII alphanumerics and hyphen [0-9A-Za-z-]. + * > Identifiers MUST NOT be empty. + * > Build metadata SHOULD be ignored when determining version precedence. + * > Thus two versions that differ only in the build metadata, have the same precedence. + * > Examples: 1.0.0-alpha+001, 1.0.0+20130313144700, 1.0.0-beta+exp.sha.5114f85. + * + * Rule 10 means that build metadata is never considered for compatibility + * we'll enforce this immediately by dropping them from both versions + * Rule 2 we enforce with custom extractors. + * Rule 6, 7 & 8 means version compatibility is determined by comparing the two X values + * Rule 9.. + * Dale thinks means pre-release versions are fully equals checked.. + * Eugene thinks means pre-releases before 1.0.0 are not compatible, if not they are.. + * Rule 4 is modified in this variant. + */ + def isCompatible(v1: VersionNumber, v2: VersionNumber): Boolean = + doIsCompat(dropBuildMetadata(v1), dropBuildMetadata(v2)) + + private[this] def doIsCompat(v1: VersionNumber, v2: VersionNumber): Boolean = + (v1, v2) match { + case (NormalVersion(0, _, 0), NormalVersion(0, _, 0)) => v1 == v2 + case (NormalVersion(0, y1, _), NormalVersion(0, y2, _)) => y1 == y2 + case (NormalVersion(_, 0, 0), NormalVersion(_, 0, 0)) => v1 == v2 // R9 maybe? + case (NormalVersion(x1, _, _), NormalVersion(x2, _, _)) => x1 == x2 // R6, R7 & R8 + case _ => false + } + } +} + +trait VersionNumberCompatibility { + def name: String + def isCompatible(v1: VersionNumber, v2: VersionNumber): Boolean +} diff --git a/lm-core/src/test/scala/ConfigMacroSpec.scala b/lm-core/src/test/scala/ConfigMacroSpec.scala new file mode 100644 index 000000000..ea409efa7 --- /dev/null +++ b/lm-core/src/test/scala/ConfigMacroSpec.scala @@ -0,0 +1,61 @@ +package sbt.internal.librarymanagement + +import sbt.librarymanagement.Configuration +import sbt.librarymanagement.Configurations.config +import scala.util.control.NonFatal +import org.scalacheck._ +import Prop._ + +class ConfigDefs { + lazy val Kompile = config("kompile") + val X = config("x") + val Z = config("z").hide + val A: Configuration = config("a") + lazy val Aa: Configuration = config("aa") +} + +object ConfigMacroSpec extends Properties("ConfigMacroSpec") { + lazy val cd = new ConfigDefs + import cd._ + + def secure(f: => Prop): Prop = + try { + Prop.secure(f) + } catch { + case NonFatal(e) => + e.printStackTrace + throw e + } + + property("Explicit type on lazy val supported") = secure { + check(Aa, "Aa", "aa", true) + } + + property("Explicit type on val supported") = secure { + check(A, "A", "a", true) + } + + property("lazy vals supported") = secure { + check(Kompile, "Kompile", "kompile", true) + } + + property("plain vals supported") = secure { + check(X, "X", "x", true) + } + + property("Directory overridable") = secure { + check(Z, "Z", "z", false) + } + + def check(c: Configuration, id: String, name: String, isPublic: Boolean): Prop = { + s"Expected id: $id" |: + s"Expected name: $name" |: + s"Expected isPublic: $isPublic" |: + s"Actual id: ${c.id}" |: + s"Actual name: ${c.name}" |: + s"Actual isPublic: ${c.isPublic}" |: + (c.id == id) && + (c.name == name) && + (c.isPublic == isPublic) + } +} diff --git a/lm-core/src/test/scala/UpdateReportSpec.scala b/lm-core/src/test/scala/UpdateReportSpec.scala new file mode 100644 index 000000000..d12d76585 --- /dev/null +++ b/lm-core/src/test/scala/UpdateReportSpec.scala @@ -0,0 +1,51 @@ +package sbt.librarymanagement + +import java.io.File + +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers + +class UpdateReportSpec extends AnyFlatSpec with Matchers { + "UpdateReport.toString" should "have a nice toString" in { + assert(updateReport.toString === s""" + |Update report: + | Resolve time: 0 ms, Download time: 0 ms, Download size: 0 bytes + | compile: + | org:name + | - 1.0 + | publicationDate: 1970-01-01T00:00:00Z + | evicted: false + | + |""".stripMargin.drop(1)) + } + + lazy val updateReport = + UpdateReport( + new File("cachedDescriptor.data"), + Vector(configurationReport), + UpdateStats(0, 0, 0, false), + Map.empty + ) + + lazy val configurationReport = + ConfigurationReport( + ConfigRef("compile"), + Vector(moduleReport), + Vector(organizationArtifactReport) + ) + + lazy val moduleReport = ( + ModuleReport(ModuleID("org", "name", "1.0"), Vector.empty, Vector.empty) + withPublicationDate Some(epochCalendar) + ) + + lazy val organizationArtifactReport = + OrganizationArtifactReport("org", "name", Vector(moduleReport)) + + val epochCalendar: java.util.Calendar = { + val utc = java.util.TimeZone getTimeZone "UTC" + val c = new java.util.GregorianCalendar(utc, java.util.Locale.ENGLISH) + c setTimeInMillis 0L + c + } +} diff --git a/lm-core/src/test/scala/example/tests/CrossVersionCompatTest.scala b/lm-core/src/test/scala/example/tests/CrossVersionCompatTest.scala new file mode 100644 index 000000000..ab5f47633 --- /dev/null +++ b/lm-core/src/test/scala/example/tests/CrossVersionCompatTest.scala @@ -0,0 +1,72 @@ +package example.tests + +import sbt.librarymanagement.{ CrossVersion, Disabled } +import verify.BasicTestSuite +import scala.annotation.nowarn + +@nowarn +object CrossVersionCompatTest extends BasicTestSuite { + test("CrossVersion.Disabled is typed to be Disabled") { + assert(CrossVersion.Disabled match { + case _: Disabled => true + case _ => false + }) + } + + test("CrossVersion.Disabled functions as disabled") { + assert(CrossVersion(CrossVersion.disabled, "1.0.0", "1.0") == None) + assert(CrossVersion(CrossVersion.Disabled, "1.0.0", "1.0") == None) + } + + test("CrossVersion.Disabled() is typed to be Disabled") { + assert(CrossVersion.Disabled() match { + case _: Disabled => true + case _ => false + }) + } + + test("CrossVersion.Disabled() functions as disabled") { + assert(CrossVersion(CrossVersion.disabled, "1.0.0", "1.0") == None) + assert(CrossVersion(CrossVersion.Disabled(), "1.0.0", "1.0") == None) + } + + test("CrossVersion.Disabled is stable") { + assert(CrossVersion.Disabled match { + case CrossVersion.Disabled => true + case _ => false + }) + } + + test("sbt.librarymanagement.Disabled is typed to be Disabled") { + assert(Disabled match { + case _: Disabled => true + case _ => false + }) + } + + test("sbt.librarymanagement.Disabled is stable") { + assert(Disabled match { + case Disabled => true + case _ => false + }) + } + + test("sbt.librarymanagement.Disabled() is typed to be Disabled") { + assert(Disabled() match { + case _: Disabled => true + case _ => false + }) + } + + test("CrossVersion.disabled is sbt.librarymanagement.Disabled") { + assert(CrossVersion.disabled == Disabled) + } + + test("CrossVersion.Disabled is sbt.librarymanagement.Disabled") { + assert(CrossVersion.Disabled == Disabled) + } + + test("CrossVersion.Disabled() is sbt.librarymanagement.Disabled") { + assert(CrossVersion.Disabled() == Disabled) + } +} diff --git a/lm-core/src/test/scala/sbt/internal/librarymanagement/UnitSpec.scala b/lm-core/src/test/scala/sbt/internal/librarymanagement/UnitSpec.scala new file mode 100644 index 000000000..c656812d0 --- /dev/null +++ b/lm-core/src/test/scala/sbt/internal/librarymanagement/UnitSpec.scala @@ -0,0 +1,8 @@ +package sbt +package internal +package librarymanagement + +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers + +abstract class UnitSpec extends AnyFlatSpec with Matchers diff --git a/lm-core/src/test/scala/sbt/librarymanagement/ConfigMacroTest.scala b/lm-core/src/test/scala/sbt/librarymanagement/ConfigMacroTest.scala new file mode 100644 index 000000000..27103395a --- /dev/null +++ b/lm-core/src/test/scala/sbt/librarymanagement/ConfigMacroTest.scala @@ -0,0 +1,13 @@ +package sbt.librarymanagement + +import org.scalatest.funspec.AnyFunSpec +import org.scalatest.matchers.should.Matchers + +class ConfigMacroTest extends AnyFunSpec with Matchers { + describe("Configurations.config") { + it("should validate the ID in compile time") { + """val A = Configurations.config("a")""" should compile + """val b = Configurations.config("b")""" shouldNot compile + } + } +} diff --git a/lm-core/src/test/scala/sbt/librarymanagement/CrossVersionTest.scala b/lm-core/src/test/scala/sbt/librarymanagement/CrossVersionTest.scala new file mode 100644 index 000000000..c7cac1919 --- /dev/null +++ b/lm-core/src/test/scala/sbt/librarymanagement/CrossVersionTest.scala @@ -0,0 +1,378 @@ +package sbt.librarymanagement + +import sbt.internal.librarymanagement.UnitSpec +import CrossVersion._ +import scala.annotation.nowarn + +class CrossVersionTest extends UnitSpec { + "sbtApiVersion" should "for xyz return None" in { + sbtApiVersion("xyz") shouldBe None + } + it should "for 0.12 return None" in { + sbtApiVersion("0.12") shouldBe None + } + it should "for 0.12.0-SNAPSHOT return None" in { + sbtApiVersion("0.12.0-SNAPSHOT") shouldBe None + } + it should "for 0.12.0-RC1 return Some((0, 12))" in { + sbtApiVersion("0.12.0-RC1") shouldBe Some((0, 12)) + } + it should "for 0.12.0 return Some((0, 12))" in { + sbtApiVersion("0.12.0") shouldBe Some((0, 12)) + } + it should "for 0.12.1-SNAPSHOT return Some((0, 12))" in { + sbtApiVersion("0.12.1-SNAPSHOT") shouldBe Some((0, 12)) + } + it should "for 0.12.1-RC1 return Some((0, 12))" in { + sbtApiVersion("0.12.1-RC1") shouldBe Some((0, 12)) + } + it should "for 0.12.1 return Some((0, 12))" in { + sbtApiVersion("0.12.1") shouldBe Some((0, 12)) + } + it should "for 1.0.0-M6 return None" in { + sbtApiVersion("1.0.0-M6") shouldBe None + } + it should "for 1.0.0-RC1 return Some((1, 0))" in { + sbtApiVersion("1.0.0-RC1") shouldBe Some((1, 0)) + } + it should "for 1.0.0 return Some((1, 0))" in { + sbtApiVersion("1.0.0") shouldBe Some((1, 0)) + } + it should "for 1.0.2-M1 return Some((1, 0))" in { + sbtApiVersion("1.0.2-M1") shouldBe Some((1, 0)) + } + it should "for 1.0.2-RC1 return Some((1, 0))" in { + sbtApiVersion("1.0.2-RC1") shouldBe Some((1, 0)) + } + it should "for 1.0.2 return Some((1, 0))" in { + sbtApiVersion("1.0.2") shouldBe Some((1, 0)) + } + it should "for 1.3.0 return Some((1, 0))" in { + sbtApiVersion("1.3.0") shouldBe Some((1, 0)) + } + it should "for 1.10.0 return Some((1, 0))" in { + sbtApiVersion("1.10.0") shouldBe Some((1, 0)) + } + it should "for 2.0.0 return Some((2, 0))" in { + sbtApiVersion("2.0.0") shouldBe Some((2, 0)) + } + + "isSbtApiCompatible" should "for 0.12.0-M1 return false" in { + isSbtApiCompatible("0.12.0-M1") shouldBe false + } + it should "for 0.12.0-RC1 return true" in { + isSbtApiCompatible("0.12.0-RC1") shouldBe true + } + it should "for 0.12.1-RC1 return true" in { + isSbtApiCompatible("0.12.1-RC1") shouldBe true + } + it should "for 1.0.0-M6 return false" in { + isSbtApiCompatible("1.0.0-M6") shouldBe false + } + it should "for 1.0.0-RC1 return true" in { + isSbtApiCompatible("1.0.0-RC1") shouldBe true + } + it should "for 1.0.0 return true" in { + isSbtApiCompatible("1.0.0") shouldBe true + } + it should "for 1.0.2-M1 return true" in { + isSbtApiCompatible("1.0.2-M1") shouldBe true + } + it should "for 1.0.2-RC1 return true" in { + isSbtApiCompatible("1.0.2-RC1") shouldBe true + } + it should "for 1.0.2 return true" in { + isSbtApiCompatible("1.0.2") shouldBe true + } + it should "for 1.3.0 return true" in { + isSbtApiCompatible("1.3.0") shouldBe true + } + it should "for 1.10.0 return true" in { + isSbtApiCompatible("1.10.0") shouldBe true + } + it should "for 2.0.0 return true" in { + isSbtApiCompatible("2.0.0") shouldBe true + } + + "binarySbtVersion" should "for 0.11.3 return 0.11.3" in { + binarySbtVersion("0.11.3") shouldBe "0.11.3" + } + it should "for 2.0.0 return 2" in { + binarySbtVersion("2.0.0") shouldBe "2" + } + it should "for 2.0.0-M1 return 2.0.0-M1" in { + binarySbtVersion("2.0.0-M1") shouldBe "2.0.0-M1" + } + it should "for 2.0.0-RC1 return 2" in { + binarySbtVersion("2.0.0-RC1") shouldBe "2" + } + it should "for 2.1.0-M1 return 2" in { + binarySbtVersion("2.1.0-M1") shouldBe "2" + } + it should "for 2.1.0 return 2" in { + binarySbtVersion("2.1.0") shouldBe "2" + } + it should "for 0.13.1 return 0.13" in { + binarySbtVersion("0.13.1") shouldBe "0.13" + } + it should "for 1.0.0-M6 return 1.0.0-M6" in { + binarySbtVersion("1.0.0-M6") shouldBe "1.0.0-M6" + } + it should "for 1.0.0-RC1 return 1.0" in { + binarySbtVersion("1.0.0-RC1") shouldBe "1.0" + } + it should "for 1.0.0 return 1.0" in { + binarySbtVersion("1.0.0") shouldBe "1.0" + } + it should "for 1.0.2-M1 return 1.0" in { + binarySbtVersion("1.0.2-M1") shouldBe "1.0" + } + it should "for 1.0.2-RC1 return 1.0" in { + binarySbtVersion("1.0.2-RC1") shouldBe "1.0" + } + it should "for 1.0.2 return 1.0" in { + binarySbtVersion("1.0.2") shouldBe "1.0" + } + it should "for 1.3.0 return 1.0" in { + binarySbtVersion("1.3.0") shouldBe "1.0" + } + it should "for 1.3.0-SNAPSHOT return 1.0" in { + binarySbtVersion("1.3.0-SNAPSHOT") shouldBe "1.0" + } + it should "for 1.3.0-A1-B1.1 return 1.0" in { + binarySbtVersion("1.3.0-A1-B1.1") shouldBe "1.0" + } + it should "for 1.10.0 return 1.0" in { + binarySbtVersion("1.10.0") shouldBe "1.0" + } + + "scalaApiVersion" should "for xyz return None" in { + scalaApiVersion("xyz") shouldBe None + } + it should "for 2.10 return None" in { + scalaApiVersion("2.10") shouldBe None + } + it should "for 2.10.0-SNAPSHOT return None" in { + scalaApiVersion("2.10.0-SNAPSHOT") shouldBe None + } + it should "for 2.10.0-RC1 return None" in { + scalaApiVersion("2.10.0-RC1") shouldBe None + } + it should "for 2.10.0 return Some((2, 10))" in { + scalaApiVersion("2.10.0") shouldBe Some((2, 10)) + } + it should "for 2.10.0-1 return Some((2, 10))" in { + scalaApiVersion("2.10.0-1") shouldBe Some((2, 10)) + } + it should "for 2.10.1-SNAPSHOT return Some((2, 10))" in { + scalaApiVersion("2.10.1-SNAPSHOT") shouldBe Some((2, 10)) + } + it should "for 2.10.1-RC1 return Some((2, 10))" in { + scalaApiVersion("2.10.1-RC1") shouldBe Some((2, 10)) + } + it should "for 2.10.1 return Some((2, 10))" in { + scalaApiVersion("2.10.1") shouldBe Some((2, 10)) + } + + "isScalaApiCompatible" should "for 2.10.0-M1 return false" in { + isScalaApiCompatible("2.10.0-M1") shouldBe false + } + it should "for 2.10.0-RC1 return false" in { + isScalaApiCompatible("2.10.0-RC1") shouldBe false + } + it should "for 2.10.1-RC1 return false" in { + isScalaApiCompatible("2.10.1-RC1") shouldBe true + } + + "binaryScalaVersion" should "for 2.9.2 return 2.9.2" in { + binaryScalaVersion("2.9.2") shouldBe "2.9.2" + } + it should "for 2.10.0-M1 return 2.10.0-M1" in { + binaryScalaVersion("2.10.0-M1") shouldBe "2.10.0-M1" + } + it should "for 2.10.0-RC1 return 2.10.0-RC1" in { + binaryScalaVersion("2.10.0-RC1") shouldBe "2.10.0-RC1" + } + it should "for 2.10.0 return 2.10" in { + binaryScalaVersion("2.10.0") shouldBe "2.10" + } + it should "for 2.10.1-M1 return 2.10" in { + binaryScalaVersion("2.10.1-M1") shouldBe "2.10" + } + it should "for 2.10.1-RC1 return 2.10" in { + binaryScalaVersion("2.10.1-RC1") shouldBe "2.10" + } + it should "for 2.10.1 return 2.10" in { + binaryScalaVersion("2.10.1") shouldBe "2.10" + } + it should "for 2.20170314093845.0-87654321 return 2.20170314093845" in { + binaryScalaVersion("2.20170314093845.0-87654321") shouldBe "2.20170314093845" + } + it should "for 3.0.0-M2 return 3.0.0-M2" in { + binaryScalaVersion("3.0.0-M2") shouldBe "3.0.0-M2" + } + it should "for 3.0.0-M3-bin-SNAPSHOT return 3.0.0-M3" in { + binaryScalaVersion("3.0.0-M3-bin-SNAPSHOT") shouldBe "3.0.0-M3" + } + it should "for 3.0.0-M3-bin-20201215-cbe50b3-NIGHTLY return 3.0.0-M3" in { + binaryScalaVersion("3.0.0-M3-bin-20201215-cbe50b3-NIGHTLY") shouldBe "3.0.0-M3" + } + it should "for 3.0.0-M3.5-bin-20201215-cbe50b3-NIGHTLY return 3.0.0-M3" in { + binaryScalaVersion("3.0.0-M3.5-bin-20201215-cbe50b3-NIGHTLY") shouldBe "3.0.0-M3.5" + } + it should "for 3.0.0-RC1 return 3.0.0-RC1" in { + binaryScalaVersion("3.0.0-RC1") shouldBe "3.0.0-RC1" + } + + // Not set in stone but 3 is the favorite candidate so far + // (see https://github.com/lampepfl/dotty/issues/10244) + it should "for 3.0.0 return 3" in { + binaryScalaVersion("3.0.0") shouldBe "3" + } + it should "for 3.1.0-M1 return 3" in { + binaryScalaVersion("3.1.0-M1") shouldBe "3" + } + it should "for 3.1.0-RC1-bin-SNAPSHOT return 3" in { + binaryScalaVersion("3.1.0-RC1-bin-SNAPSHOT") shouldBe "3" + } + it should "for 3.1.0-RC1 return 3" in { + binaryScalaVersion("3.1.0-RC1") shouldBe "3" + } + it should "for 3.1.0 return 3" in { + binaryScalaVersion("3.1.0") shouldBe "3" + } + it should "for 3.0.1-RC1 return 3" in { + binaryScalaVersion("3.0.1-RC1") shouldBe "3" + } + it should "for 3.0.1-M1 return 3" in { + binaryScalaVersion("3.0.1-M1") shouldBe "3" + } + it should "for 3.0.1-RC1-bin-SNAPSHOT return 3" in { + binaryScalaVersion("3.0.1-RC1-bin-SNAPSHOT") shouldBe "3" + } + it should "for 3.0.1-bin-nonbootstrapped return 3" in { + binaryScalaVersion("3.0.1-bin-SNAPSHOT") shouldBe "3" + } + it should "for 3.0.1-SNAPSHOT return 3" in { + binaryScalaVersion("3.0.1-SNAPSHOT") shouldBe "3" + } + + private def patchVersion(fullVersion: String) = + CrossVersion(CrossVersion.patch, fullVersion, "dummy") map (fn => fn("artefact")) + + "CrossVersion.patch" should "for 2.11.8 return 2.11.8" in { + patchVersion("2.11.8") shouldBe Some("artefact_2.11.8") + } + it should "for 2.11.8-M1 return 2.11.8-M1" in { + patchVersion("2.11.8-M1") shouldBe Some("artefact_2.11.8-M1") + } + it should "for 2.11.8-RC1 return 2.11.8-RC1" in { + patchVersion("2.11.8-RC1") shouldBe Some("artefact_2.11.8-RC1") + } + it should "for 2.11.8-bin-extra return 2.11.8" in { + patchVersion("2.11.8-bin-extra") shouldBe Some("artefact_2.11.8") + } + it should "for 2.11.8-M1-bin-extra return 2.11.8-M1" in { + patchVersion("2.11.8-M1-bin-extra") shouldBe Some("artefact_2.11.8-M1") + } + it should "for 2.11.8-RC1-bin-extra return 2.11.8-RC1" in { + patchVersion("2.11.8-RC1-bin-extra") shouldBe Some("artefact_2.11.8-RC1") + } + it should "for 2.11.8-X1.5-bin-extra return 2.11.8-X1.5" in { + patchVersion("2.11.8-X1.5-bin-extra") shouldBe Some("artefact_2.11.8-X1.5") + } + + "isScalaBinaryCompatibleWith" should "for (2.10.4, 2.10.5) return true" in { + isScalaBinaryCompatibleWith("2.10.4", "2.10.5") shouldBe true + } + it should "for (2.10.6, 2.10.5) return true" in { + isScalaBinaryCompatibleWith("2.10.6", "2.10.5") shouldBe true + } + it should "for (2.11.0, 2.10.5) return false" in { + isScalaBinaryCompatibleWith("2.11.0", "2.10.5") shouldBe false + } + it should "for (3.0.0, 2.10.5) return false" in { + isScalaBinaryCompatibleWith("3.0.0", "2.10.5") shouldBe false + } + it should "for (3.0.0, 3.1.0) return false" in { + isScalaBinaryCompatibleWith("3.0.0", "3.1.0") shouldBe false + } + it should "for (3.1.0, 3.0.0) return true" in { + isScalaBinaryCompatibleWith("3.1.0", "3.0.0") shouldBe true + } + it should "for (3.1.0, 3.1.1) return true" in { + isScalaBinaryCompatibleWith("3.1.0", "3.1.1") shouldBe true + } + it should "for (3.1.1, 3.1.0) return true" in { + isScalaBinaryCompatibleWith("3.1.1", "3.1.0") shouldBe true + } + it should "for (2.10.0-M1, 2.10.5) return false" in { + isScalaBinaryCompatibleWith("2.10.0-M1", "2.10.5") shouldBe false + } + it should "for (2.10.5, 2.10.0-M1) return false" in { + isScalaBinaryCompatibleWith("2.10.5", "2.10.0-M1") shouldBe false + } + it should "for (2.10.0-M1, 2.10.0-M2) return false" in { + isScalaBinaryCompatibleWith("2.10.0-M1", "2.10.0-M2") shouldBe false + } + it should "for (2.10.0-M1, 2.11.0-M1) return false" in { + isScalaBinaryCompatibleWith("2.10.0-M1", "2.11.0-M1") shouldBe false + } + it should "for (3.1.0-M1, 3.0.0) return true" in { + isScalaBinaryCompatibleWith("3.1.0-M1", "3.0.0") shouldBe true + } + it should "for (3.1.0-M1, 3.1.0) return false" in { + isScalaBinaryCompatibleWith("3.1.0-M1", "3.1.0") shouldBe false + } + it should "for (3.1.0-M1, 3.1.0-M2) return false" in { + isScalaBinaryCompatibleWith("3.1.0-M1", "3.1.0-M2") shouldBe false + } + it should "for (3.1.0-M2, 3.1.0-M1) return false" in { + isScalaBinaryCompatibleWith("3.1.0-M2", "3.1.0-M1") shouldBe false + } + + private def constantVersion(value: String) = + CrossVersion(CrossVersion.constant(value), "dummy1", "dummy2") map (fn => fn("artefact")) + + "CrossVersion.constant" should "add a constant to the version" in { + constantVersion("duck") shouldBe Some("artefact_duck") + } + + "Disabled" should "have structural equality" in { + Disabled() shouldBe Disabled() + }: @nowarn + "CrossVersion.full" should "have structural equality" in { + CrossVersion.full shouldBe CrossVersion.full + } + "CrossVersion.binary" should "have structural equality" in { + CrossVersion.binary shouldBe CrossVersion.binary + } + "CrossVersion.constant" should "have structural equality" in { + CrossVersion.constant("duck") shouldBe CrossVersion.constant("duck") + } + + "CrossVersion.for3Use2_13" should "have structural equality" in { + CrossVersion.for3Use2_13 shouldBe CrossVersion.for3Use2_13 + CrossVersion.for3Use2_13With("_sjs1", "") shouldBe CrossVersion.for3Use2_13With("_sjs1", "") + } + it should "use the cross version 2.13 instead of 3" in { + CrossVersion(CrossVersion.for3Use2_13, "3.0.0", "3").map(_("artefact")) shouldBe Some( + "artefact_2.13" + ) + } + it should "use the cross version 2.13 instead of 3.0.0-M3" in { + CrossVersion(CrossVersion.for3Use2_13, "3.0.0-M3", "3.0.0-M3").map(_("artefact")) shouldBe Some( + "artefact_2.13" + ) + } + + "CrossVersion.for2_13Use3" should "have structural equality" in { + CrossVersion.for2_13Use3 shouldBe CrossVersion.for2_13Use3 + CrossVersion.for2_13Use3With("_sjs1", "") shouldBe CrossVersion.for2_13Use3With("_sjs1", "") + } + it should "use the cross version 3 instead of 2.13" in { + CrossVersion(CrossVersion.for2_13Use3, "2.13.4", "2.13").map(_("artefact")) shouldBe Some( + "artefact_3" + ) + } +} diff --git a/lm-core/src/test/scala/sbt/librarymanagement/ModuleIdTest.scala b/lm-core/src/test/scala/sbt/librarymanagement/ModuleIdTest.scala new file mode 100644 index 000000000..1c962aac2 --- /dev/null +++ b/lm-core/src/test/scala/sbt/librarymanagement/ModuleIdTest.scala @@ -0,0 +1,49 @@ +package sbt.librarymanagement + +import sjsonnew.support.scalajson.unsafe.{ Converter, CompactPrinter, Parser } + +object ModuleIdTest extends verify.BasicTestSuite { + test("Module Id should return cross-disabled module id as equal to a copy") { + assert(ModuleID("com.acme", "foo", "1") == ModuleID("com.acme", "foo", "1")) + } + + test("it should return cross-full module id as equal to a copy") { + assert( + ModuleID("com.acme", "foo", "1").cross(CrossVersion.full) == + ModuleID("com.acme", "foo", "1").cross(CrossVersion.full) + ) + } + + test("it should return cross-binary module id as equal to a copy") { + assert( + ModuleID("com.acme", "foo", "1").cross(CrossVersion.binary) == + ModuleID("com.acme", "foo", "1").cross(CrossVersion.binary) + ) + } + + test("it should format itself into JSON") { + import LibraryManagementCodec._ + val json = Converter.toJson(ModuleID("com.acme", "foo", "1")).get + assert(CompactPrinter(json) == expectedJson) + } + + test("it should thaw back from JSON") { + import LibraryManagementCodec._ + val json = Parser.parseUnsafe(expectedJson) + val m = Converter.fromJsonUnsafe[ModuleID](json) + assert(m == ModuleID("com.acme", "foo", "1")) + } + + test("cross(...) should compose prefix with the existing value") { + assert( + ModuleID("com.acme", "foo", "1") + .cross(CrossVersion.binaryWith("sjs1_", "")) + .cross(CrossVersion.for3Use2_13) + == + ModuleID("com.acme", "foo", "1").cross(CrossVersion.for3Use2_13With("sjs1_", "")) + ) + } + + def expectedJson = + """{"organization":"com.acme","name":"foo","revision":"1","isChanging":false,"isTransitive":true,"isForce":false,"explicitArtifacts":[],"inclusions":[],"exclusions":[],"extraAttributes":{},"crossVersion":{"type":"Disabled"}}""" +} diff --git a/lm-core/src/test/scala/sbt/librarymanagement/ResolverExtraTest.scala b/lm-core/src/test/scala/sbt/librarymanagement/ResolverExtraTest.scala new file mode 100644 index 000000000..57b407c8a --- /dev/null +++ b/lm-core/src/test/scala/sbt/librarymanagement/ResolverExtraTest.scala @@ -0,0 +1,50 @@ +package sbt.librarymanagement + +import verify.BasicTestSuite +import scala.annotation.nowarn + +@nowarn // Necessary because our test cases look like interpolated strings. +object ResolverExtraTest extends BasicTestSuite { + test("expandMavenSettings should expand existing environment variables") { + assertExpansion( + input = "User home: ${env.HOME}", + expected = s"User home: ${env("HOME")}" + ) + } + + test("expandMavenSettings should expand existing system properties") { + assertExpansion( + input = "User dir: ${user.dir}", + expected = s"User dir: ${prop("user.dir")}" + ) + } + + test("expandMavenSettings should expand unknown system properties to the empty string") { + assertExpansion( + input = "Unknown system property: ${IF_THIS_EXISTS_WE_NEED_TO_HAVE_A_CHAT}", + expected = s"Unknown system property: " + ) + } + + test("expandMavenSettings should expand unknown environment variables to the empty string") { + assertExpansion( + input = "Unknown environment variable: ${IF_THIS_EXISTS_I_WORRY_ABOUT_YOU}", + expected = s"Unknown environment variable: " + ) + } + + test("expandMavenSettings should preserve backslashes in environment variable values") { + val path = """C:\foo\bar\baz""" + val env = Map("SOME_PATH" -> path) + + assert(Resolver.expandMavenSettings("${env.SOME_PATH}", env) == path) + } + + // - Helper functions ---------------------------------------------------------------------------- + // ----------------------------------------------------------------------------------------------- + def assertExpansion(input: String, expected: String) = + Predef.assert(Resolver.expandMavenSettings(input) == s"$expected") + + def env(name: String) = sys.env.getOrElse(name, "") + def prop(name: String) = sys.props.getOrElse(name, "") +} diff --git a/lm-core/src/test/scala/sbt/librarymanagement/ResolverTest.scala b/lm-core/src/test/scala/sbt/librarymanagement/ResolverTest.scala new file mode 100644 index 000000000..400162766 --- /dev/null +++ b/lm-core/src/test/scala/sbt/librarymanagement/ResolverTest.scala @@ -0,0 +1,30 @@ +package sbt.librarymanagement + +import java.net.URI + +import sbt.internal.librarymanagement.UnitSpec + +object ResolverTest extends UnitSpec { + + "Resolver url" should "propagate pattern descriptorOptional and skipConsistencyCheck." in { + val pats = Vector("[orgPath]") + val patsExpected = Vector("http://foo.com/test/[orgPath]") + val patterns = Resolver + .url("test", new URI("http://foo.com/test").toURL)( + Patterns( + pats, + pats, + isMavenCompatible = false, + descriptorOptional = true, + skipConsistencyCheck = true + ) + ) + .patterns + + patterns.ivyPatterns shouldBe patsExpected + patterns.artifactPatterns shouldBe patsExpected + patterns.isMavenCompatible shouldBe false + assert(patterns.skipConsistencyCheck) + assert(patterns.descriptorOptional) + } +} diff --git a/lm-core/src/test/scala/sbt/librarymanagement/SemanticSelectorSpec.scala b/lm-core/src/test/scala/sbt/librarymanagement/SemanticSelectorSpec.scala new file mode 100644 index 000000000..cce9b4ff1 --- /dev/null +++ b/lm-core/src/test/scala/sbt/librarymanagement/SemanticSelectorSpec.scala @@ -0,0 +1,399 @@ +package sbt.librarymanagement + +import org.scalatest.freespec.AnyFreeSpec +import org.scalatest.matchers.should.Matchers + +class SemanticSelectorSpec extends AnyFreeSpec with Matchers { + semsel("<=1.2.3") { sel => + assertMatches(sel, "1.2.3") + assertMatches(sel, "1.2-beta") + assertMatches(sel, "1.2.3-beta") + assertMatches(sel, "1.2") + assertMatches(sel, "1") + assertNotMatches(sel, "1.2.4-alpha") + assertNotMatches(sel, "1.2.4") + assertNotMatches(sel, "1.3") + assertNotMatches(sel, "1.3.0") + assertNotMatches(sel, "2") + } + + semsel("<=1.2") { sel => + assertMatches(sel, "1.2.345-beta") + assertMatches(sel, "1.2.3-beta") + assertMatches(sel, "1.2.3") + assertMatches(sel, "1.2") + assertMatches(sel, "1") + assertNotMatches(sel, "1.3.0") + assertNotMatches(sel, "1.3.0-alpha") + } + + semsel("<=1") { sel => + assertMatches(sel, "1.234.567-alpha") + assertMatches(sel, "1.234.567") + assertMatches(sel, "1.234") + assertMatches(sel, "1.0.0-alpha") + assertMatches(sel, "1.0.0") + assertMatches(sel, "1.0") + assertMatches(sel, "1") + assertNotMatches(sel, "2.0.0") + assertNotMatches(sel, "2.0.0-alpha") + } + + semsel("<1.2.3") { sel => + assertMatches(sel, "1.2.3-alpha") + assertMatches(sel, "1.2.2") + assertMatches(sel, "1.2") + assertMatches(sel, "1") + assertNotMatches(sel, "1.2.4-beta") + assertNotMatches(sel, "1.2.3") + assertNotMatches(sel, "1.3") + assertNotMatches(sel, "2") + } + + semsel("<1.2") { sel => + assertMatches(sel, "1.2.0-alpha") + assertMatches(sel, "1.1.23") + assertMatches(sel, "1.1") + assertMatches(sel, "1") + assertNotMatches(sel, "1.3-beta") + assertNotMatches(sel, "1.2.0") + assertNotMatches(sel, "1.2") + assertNotMatches(sel, "2") + } + + semsel("<1") { sel => + assertMatches(sel, "1.0.0-beta") + assertMatches(sel, "0.9.9-beta") + assertMatches(sel, "0.9.12") + assertMatches(sel, "0.8") + assertMatches(sel, "0") + assertNotMatches(sel, "1.0.1-beta") + assertNotMatches(sel, "1") + assertNotMatches(sel, "1.0") + assertNotMatches(sel, "1.0.0") + } + + semsel(">=1.2.3") { sel => + assertMatches(sel, "1.2.4-beta") + assertMatches(sel, "1.2.4-beta.1") + assertMatches(sel, "1.2.3") + assertMatches(sel, "1.3") + assertMatches(sel, "2") + assertNotMatches(sel, "1.2.3-beta") + assertNotMatches(sel, "1.2.2") + assertNotMatches(sel, "1.2") + assertNotMatches(sel, "1") + } + + semsel(">=1.2") { sel => + assertMatches(sel, "1.2.1-beta") + assertMatches(sel, "1.2.0") + assertMatches(sel, "1.2") + assertMatches(sel, "2") + assertNotMatches(sel, "1.2.0-beta") + assertNotMatches(sel, "1.1.23") + assertNotMatches(sel, "1.1") + assertNotMatches(sel, "1") + } + + semsel(">=1") { sel => + assertMatches(sel, "1.0.1-beta") + assertMatches(sel, "1.0.0") + assertMatches(sel, "1.0") + assertMatches(sel, "1") + assertNotMatches(sel, "1.0.0-beta") + assertNotMatches(sel, "0.9.9") + assertNotMatches(sel, "0.1") + assertNotMatches(sel, "0") + } + + semsel(">1.2.3") { sel => + assertMatches(sel, "1.2.4") + assertMatches(sel, "1.2.4-alpha") + assertMatches(sel, "1.3") + assertMatches(sel, "2") + assertNotMatches(sel, "1.2.3-alpha") + assertNotMatches(sel, "1.2.3") + assertNotMatches(sel, "1.2") + assertNotMatches(sel, "1") + } + + semsel(">1.2") { sel => + assertMatches(sel, "1.3.0") + assertMatches(sel, "1.3.0-alpha") + assertMatches(sel, "1.3") + assertMatches(sel, "2") + assertNotMatches(sel, "1.2.0-alpha") + assertNotMatches(sel, "1.2.9") + assertNotMatches(sel, "1.2") + assertNotMatches(sel, "1") + } + + semsel(">1") { sel => + assertMatches(sel, "2.0.0-alpha") + assertMatches(sel, "2.0.0") + assertMatches(sel, "2.0") + assertMatches(sel, "2") + assertNotMatches(sel, "1.2.3-alpha") + assertNotMatches(sel, "1.2.3") + assertNotMatches(sel, "1.2") + assertNotMatches(sel, "1") + } + + semsel("1.2.3") { sel => + assertMatches(sel, "1.2.3") + assertNotMatches(sel, "1.2.3-alpha") + assertNotMatches(sel, "1.2") + assertNotMatches(sel, "1.2.4") + } + + Seq(".x", ".X", ".*", ".x.x", "").foreach { xrange => + semsel(s"1$xrange") { sel => + assertMatches(sel, "1.2.3-alpha") + assertMatches(sel, "1.0.0") + assertMatches(sel, "1.0.1") + assertMatches(sel, "1.1.1") + assertNotMatches(sel, "1.0.0-alpha") + assertNotMatches(sel, "2.0.0-alpha") + assertNotMatches(sel, "2.0.0") + assertNotMatches(sel, "0.1.0") + } + } + + Seq(".x", ".X", ".*", "").foreach { xrange => + semsel(s"1.2$xrange") { sel => + assertMatches(sel, "1.2.0") + assertMatches(sel, "1.2.3") + assertNotMatches(sel, "1.2.0-alpha") + assertNotMatches(sel, "1.2.0-beta") + assertNotMatches(sel, "1.3.0-beta") + assertNotMatches(sel, "1.3.0") + assertNotMatches(sel, "1.1.1") + } + } + + semsel("=1.2.3") { sel => + assertMatches(sel, "1.2.3") + assertNotMatches(sel, "1.2.3-alpha") + assertNotMatches(sel, "1.2") + assertNotMatches(sel, "1.2.4") + } + semsel("=1.2") { sel => + assertMatches(sel, "1.2.0") + assertMatches(sel, "1.2") + assertMatches(sel, "1.2.1") + assertMatches(sel, "1.2.4") + assertNotMatches(sel, "1.1.0") + assertNotMatches(sel, "1.3.0") + assertNotMatches(sel, "1.2.0-alpha") + assertNotMatches(sel, "1.3.0-alpha") + } + semsel("=1") { sel => + assertMatches(sel, "1.0.0") + assertMatches(sel, "1.0") + assertMatches(sel, "1.0.1") + assertMatches(sel, "1.2.3") + assertNotMatches(sel, "1.0.0-alpha") + assertNotMatches(sel, "2.0.0") + } + semsel("1.2.3 || 2.0.0") { sel => + assertMatches(sel, "1.2.3") + assertMatches(sel, "2.0.0") + assertNotMatches(sel, "1.2") + assertNotMatches(sel, "2.0.1") + } + semsel("<=1.2.3 || >=2.0.0 || 1.3.x") { sel => + assertMatches(sel, "1.0") + assertMatches(sel, "1.2.3") + assertMatches(sel, "2.0.0") + assertMatches(sel, "2.0") + assertMatches(sel, "1.3.0") + assertMatches(sel, "1.3.3") + assertNotMatches(sel, "1.2.4") + assertNotMatches(sel, "1.4.0") + } + + semsel(">=1.2.3 <2.0.0") { sel => + assertMatches(sel, "1.2.3") + assertMatches(sel, "1.9.9") + assertNotMatches(sel, "1.2") + assertNotMatches(sel, "2.0.0") + } + + semsel(">=1.2.3 <2.0.0 || >3.0.0 <=3.2.0") { sel => + assertMatches(sel, "1.2.3") + assertMatches(sel, "1.9.9") + assertMatches(sel, "3.0.1") + assertMatches(sel, "3.2.0") + assertNotMatches(sel, "1.2") + assertNotMatches(sel, "2.0.0") + assertNotMatches(sel, "3.0.0") + assertNotMatches(sel, "3.2.1") + } + + semsel("1.2.3 - 2.0.0") { sel => + assertMatches(sel, "1.2.3") + assertMatches(sel, "1.9.9") + assertMatches(sel, "2.0.0") + assertNotMatches(sel, "1.2") + assertNotMatches(sel, "2.0.1") + } + semsel("1.2 - 2") { sel => + assertMatches(sel, "1.2.0") + assertMatches(sel, "1.9.9") + assertMatches(sel, "2.0.0") + assertMatches(sel, "2.0.1") + assertNotMatches(sel, "1.1") + assertNotMatches(sel, "3.0.0") + } + semsel("1.2.3 - 2.0.0 1.5.0 - 2.4.0") { sel => + assertMatches(sel, "1.5.0") + assertMatches(sel, "1.9.9") + assertMatches(sel, "2.0.0") + assertNotMatches(sel, "1.2.3") + assertNotMatches(sel, "1.4") + assertNotMatches(sel, "2.0.1") + assertNotMatches(sel, "2.4.0") + } + semsel("1.2.3 - 2.0 || 2.4.0 - 3") { sel => + assertMatches(sel, "1.2.3") + assertMatches(sel, "1.5.0") + assertMatches(sel, "2.0.0") + assertMatches(sel, "2.4.0") + assertMatches(sel, "2.9") + assertMatches(sel, "3.0.0") + assertMatches(sel, "2.0.1") + assertMatches(sel, "3.0.1") + assertMatches(sel, "3.1.0") + assertNotMatches(sel, "2.1") + assertNotMatches(sel, "2.3.9") + assertNotMatches(sel, "4.0.0") + } + + semsel(">=1.x") { sel => + assertMatches(sel, "1.0.0") + assertMatches(sel, "1.0") + assertMatches(sel, "1") + assertNotMatches(sel, "1.0.0-beta") + assertNotMatches(sel, "0.9.9") + assertNotMatches(sel, "0.1") + assertNotMatches(sel, "0") + } + + semsel(">=1.2.3-beta") { sel => + assertMatches(sel, "1.3-alpha") + assertMatches(sel, "1.2.3") + assertMatches(sel, "1.2.3-beta") + assertMatches(sel, "1.2.3-beta-2") + assertMatches(sel, "1.2.3-beta-gamma") + assertMatches(sel, "1.2.4") + assertMatches(sel, "1.3") + assertNotMatches(sel, "1.2.3-alpha") + assertNotMatches(sel, "1.2.2") + } + + semsel(">=1.2.3-beta-2") { sel => + assertMatches(sel, "1.3-alpha") + assertMatches(sel, "1.2.3") + assertMatches(sel, "1.2.3-beta-2") + assertMatches(sel, "1.2.3-beta-2-3") + assertMatches(sel, "1.2.3-beta-3") + assertMatches(sel, "1.2.3-beta-gamma") + assertMatches(sel, "1.2.4") + assertMatches(sel, "1.3") + assertNotMatches(sel, "1.2.3-alpha-3") + assertNotMatches(sel, "1.2.3-beta-1") + assertNotMatches(sel, "1.2.3-beta") + assertNotMatches(sel, "1.2.2") + } + + semsel(">=1.2.3-beta.5") { sel => + assertMatches(sel, "1.3-alpha") + assertMatches(sel, "1.2.3") + assertMatches(sel, "1.2.3-beta.5") + assertMatches(sel, "1.2.3-beta.6-3") + assertMatches(sel, "1.2.3-beta.7") + assertMatches(sel, "1.2.3-beta.gamma") + assertMatches(sel, "1.2.4") + assertMatches(sel, "1.3") + assertNotMatches(sel, "1.2.3-alpha-3") + assertNotMatches(sel, "1.2.3-beta-1") + assertNotMatches(sel, "1.2.3-beta") + assertNotMatches(sel, "1.2.2") + } + + Seq( + // invalid operator + "~1.2.3", + "<~1.2.3", + "+1.2.3", + "!1.0.0", + ">~1.2.3", + // too much version fields + "1.2.3.4", + "1.2.3.4.5", + "1.2.3.x", + // invalid version specifier + "string.!?", + "1.y", + "1.2x", + "1.1.c", + "-1", + "x", + "", + // || without spaces + "1.2.3|| 2.3.4", + "1.2.3 ||2.3.4", + "1.2.3||2.3.4", + // invalid - operator + "- 1.1.1", + "2.0.0 -", + "1.0.0 - 2.0.0 - 3.0.0", + ">=1.0.0 - 2.0.0", + "1.0.0 - =3.0.0", + "=1.0.0 - =3.0.0", + "1.0.0 - 2.0.0 || - 2.0.0", + "1.0.0- 2.0.0", + "1.0.0 -2.0.0", + "-", + // minor and patch versions are required for pre-release version + "1.2-alpha-beta", + "1-beta", + "<=1.2-beta", + "<=1-beta", + "1.2-beta - 1.3-alpha", + "1.2.x-beta", + "1.x.*-beta", + // cannot specify metadata + "1.2.3+meta" + ).foreach { selectorStr => + semsel(selectorStr) { sel => + assertParsesToError(sel) + } + } + + private[this] final class SemanticSelectorString(val value: String) + private[this] def semsel(s: String)(f: SemanticSelectorString => Unit): Unit = + s"""SemanticSelector "$s"""" - { + f(new SemanticSelectorString(s)) + } + + private[this] def assertMatches( + s: SemanticSelectorString, + v: String + ): Unit = s"""should match "$v"""" in { + SemanticSelector(s.value).matches(VersionNumber(v)) shouldBe true + } + + private[this] def assertNotMatches( + s: SemanticSelectorString, + v: String + ): Unit = s"""should not match "$v"""" in { + SemanticSelector(s.value).matches(VersionNumber(v)) shouldBe false + } + + private[this] def assertParsesToError(s: SemanticSelectorString): Unit = + s"""should parse as an error""" in { + an[IllegalArgumentException] should be thrownBy SemanticSelector(s.value) + } +} diff --git a/lm-core/src/test/scala/sbt/librarymanagement/VersionNumberSpec.scala b/lm-core/src/test/scala/sbt/librarymanagement/VersionNumberSpec.scala new file mode 100644 index 000000000..608be5f03 --- /dev/null +++ b/lm-core/src/test/scala/sbt/librarymanagement/VersionNumberSpec.scala @@ -0,0 +1,220 @@ +package sbt.librarymanagement + +import org.scalatest.Inside +import org.scalatest.freespec.AnyFreeSpec +import org.scalatest.matchers.should.Matchers + +// This is a specification to check VersionNumber and VersionNumberCompatibility. +class VersionNumberSpec extends AnyFreeSpec with Matchers with Inside { + import VersionNumber.{ EarlySemVer, SemVer, PackVer } + + version("1") { v => + assertParsesTo(v, Seq(1), Seq(), Seq()) + assertBreaksDownTo(v, Some(1)) + assertCascadesTo(v, Seq("1")) + } + + version("1.0") { v => + assertParsesTo(v, Seq(1, 0), Seq(), Seq()) + assertBreaksDownTo(v, Some(1), Some(0)) + assertCascadesTo(v, Seq("1.0")) + } + + version("1.0.0") { v => + assertParsesTo(v, Seq(1, 0, 0), Seq(), Seq()) + assertBreaksDownTo(v, Some(1), Some(0), Some(0)) + assertCascadesTo(v, Seq("1.0.0", "1.0")) + + assertIsCompatibleWith(v, "1.0.1", SemVer) + assertIsCompatibleWith(v, "1.1.1", SemVer) + assertIsNotCompatibleWith(v, "2.0.0", SemVer) + assertIsNotCompatibleWith(v, "1.0.0-M1", SemVer) + + assertIsCompatibleWith(v, "1.0.1", EarlySemVer) + assertIsCompatibleWith(v, "1.1.1", EarlySemVer) + assertIsNotCompatibleWith(v, "2.0.0", EarlySemVer) + assertIsNotCompatibleWith(v, "1.0.0-M1", EarlySemVer) + + assertIsCompatibleWith(v, "1.0.1", PackVer) + assertIsNotCompatibleWith(v, "1.1.1", PackVer) + assertIsNotCompatibleWith(v, "2.0.0", PackVer) + assertIsNotCompatibleWith(v, "1.0.0-M1", PackVer) + } + + version("1.0.0.0") { v => + assertParsesTo(v, Seq(1, 0, 0, 0), Seq(), Seq()) + assertBreaksDownTo(v, Some(1), Some(0), Some(0), Some(0)) + assertCascadesTo(v, Seq("1.0.0.0", "1.0.0", "1.0")) + } + + version("0.12.0") { v => + assertParsesTo(v, Seq(0, 12, 0), Seq(), Seq()) + assertBreaksDownTo(v, Some(0), Some(12), Some(0)) + assertCascadesTo(v, Seq("0.12.0", "0.12")) + + assertIsNotCompatibleWith(v, "0.12.0-RC1", SemVer) + assertIsNotCompatibleWith(v, "0.12.1", SemVer) + assertIsNotCompatibleWith(v, "0.12.1-M1", SemVer) + + assertIsNotCompatibleWith(v, "0.12.0-RC1", EarlySemVer) + assertIsCompatibleWith(v, "0.12.1", EarlySemVer) + assertIsCompatibleWith(v, "0.12.1-M1", EarlySemVer) + + assertIsNotCompatibleWith(v, "0.12.0-RC1", PackVer) + assertIsCompatibleWith(v, "0.12.1", PackVer) + assertIsCompatibleWith(v, "0.12.1-M1", PackVer) + } + + version("0.1.0-SNAPSHOT") { v => + assertParsesTo(v, Seq(0, 1, 0), Seq("SNAPSHOT"), Seq()) + assertCascadesTo(v, Seq("0.1.0-SNAPSHOT", "0.1.0", "0.1")) + + assertIsCompatibleWith(v, "0.1.0-SNAPSHOT", SemVer) + assertIsNotCompatibleWith(v, "0.1.0", SemVer) + assertIsCompatibleWith(v, "0.1.0-SNAPSHOT+001", SemVer) + + assertIsCompatibleWith(v, "0.1.0-SNAPSHOT", EarlySemVer) + assertIsNotCompatibleWith(v, "0.1.0", EarlySemVer) + assertIsCompatibleWith(v, "0.1.0-SNAPSHOT+001", EarlySemVer) + + assertIsCompatibleWith(v, "0.1.0-SNAPSHOT", PackVer) + assertIsNotCompatibleWith(v, "0.1.0", PackVer) + assertIsCompatibleWith(v, "0.1.0-SNAPSHOT+001", PackVer) + } + + version("0.1.0-M1") { v => + assertParsesTo(v, Seq(0, 1, 0), Seq("M1"), Seq()) + assertCascadesTo(v, Seq("0.1.0-M1", "0.1.0", "0.1")) + } + + version("0.1.0-RC1") { v => + assertParsesTo(v, Seq(0, 1, 0), Seq("RC1"), Seq()) + assertCascadesTo(v, Seq("0.1.0-RC1", "0.1.0", "0.1")) + } + + version("0.1.0-MSERVER-1") { v => + assertParsesTo(v, Seq(0, 1, 0), Seq("MSERVER", "1"), Seq()) + assertCascadesTo(v, Seq("0.1.0-MSERVER-1", "0.1.0", "0.1")) + } + + version("1.1.0-DLP-7923-presigned-download-url.5") { v => + assertParsesTo(v, Seq(1, 1, 0), Seq("DLP", "7923", "presigned", "download", "url.5"), Seq()) + assertCascadesTo(v, Seq("1.1.0-DLP-7923-presigned-download-url.5", "1.1.0", "1.1")) + assertIsCompatibleWith(v, "1.0.7", EarlySemVer) + assertIsNotCompatibleWith(v, "1.0.7", PackVer) + } + + version("2.10.4-20140115-000117-b3a-sources") { v => + assertParsesTo(v, Seq(2, 10, 4), Seq("20140115", "000117", "b3a", "sources"), Seq()) + assertCascadesTo(v, Seq("2.10.4-20140115-000117-b3a-sources", "2.10.4", "2.10")) + assertIsCompatibleWith(v, "2.0.0", SemVer) + assertIsNotCompatibleWith(v, "2.0.0", PackVer) + } + + version("20140115000117-b3a-sources") { v => + assertParsesTo(v, Seq(20140115000117L), Seq("b3a", "sources"), Seq()) + assertCascadesTo(v, Seq("20140115000117-b3a-sources")) + } + + version("1.0.0-alpha+001+002") { v => + assertParsesTo(v, Seq(1, 0, 0), Seq("alpha"), Seq("+001", "+002")) + assertCascadesTo(v, Seq("1.0.0-alpha+001+002", "1.0.0", "1.0")) + } + + version("non.space.!?string") { v => + assertParsesTo(v, Seq(), Seq(), Seq("non.space.!?string")) + assertCascadesTo(v, Seq("non.space.!?string")) + } + + version("space !?string") { v => + assertParsesToError(v) + } + version("") { v => + assertParsesToError(v) + } + + // // + + private[this] final class VersionString(val value: String) + + private[this] def version(s: String)(f: VersionString => Unit) = + s"""Version "$s"""" - { + f(new VersionString(s)) + } + + private[this] def assertParsesTo( + v: VersionString, + ns: Seq[Long], + ts: Seq[String], + es: Seq[String] + ): Unit = + s"should parse to ($ns, $ts, $es)" in inside(v.value) { case VersionNumber(ns1, ts1, es1) => + (ns1 shouldBe ns) + (ts1 shouldBe ts) + (es1 shouldBe es) + (VersionNumber(ns, ts, es).toString shouldBe v.value) + (VersionNumber(ns, ts, es) shouldBe VersionNumber(ns, ts, es)) + } + + private[this] def assertParsesToError(v: VersionString): Unit = + "should parse as an error" in { + v.value should not matchPattern { + case s: String if VersionNumber.unapply(s).isDefined => // because of unapply overloading + } + } + + private[this] def assertBreaksDownTo( + v: VersionString, + major: Option[Long], + minor: Option[Long] = None, + patch: Option[Long] = None, + buildNumber: Option[Long] = None + ): Unit = + s"should breakdown to ($major, $minor, $patch, $buildNumber)" in inside(v.value) { + case VersionNumber(ns, ts, es) => + val v = VersionNumber(ns, ts, es) + (v._1 shouldBe major) + (v._2 shouldBe minor) + (v._3 shouldBe patch) + (v._4 shouldBe buildNumber) + } + + private[this] def assertCascadesTo(v: VersionString, ns: Seq[String]): Unit = { + s"should cascade to $ns" in { + val versionNumbers = ns.toVector map VersionNumber.apply + VersionNumber(v.value).cascadingVersions shouldBe versionNumbers + } + } + + private[this] def assertIsCompatibleWith( + v1: VersionString, + v2: String, + vnc: VersionNumberCompatibility + ): Unit = + checkCompat(true, vnc, v1, v2) + + private[this] def assertIsNotCompatibleWith( + v1: VersionString, + v2: String, + vnc: VersionNumberCompatibility + ): Unit = + checkCompat(false, vnc, v1, v2) + + private[this] def checkCompat( + expectOutcome: Boolean, + vnc: VersionNumberCompatibility, + v1: VersionString, + v2: String + ) = { + val prefix = if (expectOutcome) "should" else "should NOT" + val compatibilityStrategy = vnc match { + case SemVer => "SemVer" + case PackVer => "PackVer" + case EarlySemVer => "EarlySemVer" + case _ => val s = vnc.name; if (s contains " ") s""""$s"""" else s + } + s"$prefix be $compatibilityStrategy compatible with $v2" in { + vnc.isCompatible(VersionNumber(v1.value), VersionNumber(v2)) shouldBe expectOutcome + } + } +} diff --git a/lm-core/src/test/scala/sbt/librarymanagement/VersionRangeSpec.scala b/lm-core/src/test/scala/sbt/librarymanagement/VersionRangeSpec.scala new file mode 100644 index 000000000..e8bd99028 --- /dev/null +++ b/lm-core/src/test/scala/sbt/librarymanagement/VersionRangeSpec.scala @@ -0,0 +1,19 @@ +package sbt +package internal +package librarymanagement + +class VersionRangeSpec extends UnitSpec { + "Version range" should "strip 1.0 to None" in stripTo("1.0", None) + it should "strip (,1.0] to 1.0" in stripTo("(,1.0]", Some("1.0")) + it should "strip (,1.0) to None" in stripTo("(,1.0)", None) + it should "strip [1.0] to 1.0" in stripTo("[1.0]", Some("1.0")) + it should "strip [1.0,) to 1.0" in stripTo("[1.0,)", Some("1.0")) + it should "strip (1.0,) to 1.0" in stripTo("(1.0,)", Some("1.0")) + it should "strip (1.0,2.0) to None" in stripTo("(1.0,2.0)", None) + it should "strip [1.0,2.0] to None" in stripTo("[1.0,2.0]", None) + it should "strip (,1.0],[1.2,) to 1.0" in stripTo("(,1.0],[1.2,)", Some("1.0")) + it should "strip (,1.1),(1.1,) to None" in stripTo("(,1.1),(1.1,)", None) + + def stripTo(s: String, expected: Option[String]) = + assert(VersionRange.stripMavenVersionRange(s) == expected) +} diff --git a/lm-coursier/NOTICE b/lm-coursier/NOTICE new file mode 100644 index 000000000..0f79e2619 --- /dev/null +++ b/lm-coursier/NOTICE @@ -0,0 +1,13 @@ +Coursier-based implementation of sbt-librarymanagement +Copyright 2015-2024 Alexandre Archambault + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/lm-coursier/definitions/src/main/scala/lmcoursier/CoursierConfiguration.scala b/lm-coursier/definitions/src/main/scala/lmcoursier/CoursierConfiguration.scala new file mode 100644 index 000000000..0d2173a20 --- /dev/null +++ b/lm-coursier/definitions/src/main/scala/lmcoursier/CoursierConfiguration.scala @@ -0,0 +1,72 @@ +package lmcoursier + +import java.io.File +import dataclass.{ data, since } +import coursier.cache.CacheDefaults +import lmcoursier.credentials.Credentials +import lmcoursier.definitions.{ + Authentication, + CacheLogger, + CachePolicy, + FromCoursier, + Module, + ModuleMatchers, + Project, + Reconciliation, + Strict +} +import sbt.librarymanagement.{ InclExclRule, ModuleID, Resolver } +import xsbti.Logger + +import scala.concurrent.duration.{ Duration, FiniteDuration } + +@data class CoursierConfiguration( + log: Option[Logger] = None, + resolvers: Vector[Resolver] = Resolver.defaults, + parallelDownloads: Int = 6, + maxIterations: Int = 100, + sbtScalaOrganization: Option[String] = None, + sbtScalaVersion: Option[String] = None, + sbtScalaJars: Vector[File] = Vector.empty, + interProjectDependencies: Vector[Project] = Vector.empty, + excludeDependencies: Vector[(String, String)] = Vector.empty, + fallbackDependencies: Vector[FallbackDependency] = Vector.empty, + autoScalaLibrary: Boolean = true, + hasClassifiers: Boolean = false, + classifiers: Vector[String] = Vector.empty, + mavenProfiles: Vector[String] = Vector.empty, + scalaOrganization: Option[String] = None, + scalaVersion: Option[String] = None, + authenticationByRepositoryId: Vector[(String, Authentication)] = Vector.empty, + credentials: Seq[Credentials] = Vector.empty, + logger: Option[CacheLogger] = None, + cache: Option[File] = None, + @since + ivyHome: Option[File] = None, + @since + followHttpToHttpsRedirections: Option[Boolean] = None, + @since + strict: Option[Strict] = None, + extraProjects: Vector[Project] = Vector.empty, + forceVersions: Vector[(Module, String)] = Vector.empty, + @since + reconciliation: Vector[(ModuleMatchers, Reconciliation)] = Vector.empty, + @since + classpathOrder: Boolean = true, + @since + verbosityLevel: Int = 0, + ttl: Option[Duration] = CacheDefaults.ttl, + checksums: Vector[Option[String]] = CacheDefaults.checksums.toVector, + cachePolicies: Vector[CachePolicy] = + CacheDefaults.cachePolicies.toVector.map(FromCoursier.cachePolicy), + @since + missingOk: Boolean = false, + @since + sbtClassifiers: Boolean = false, + @since + providedInCompile: Boolean = false, // unused, kept for binary compatibility + @since + protocolHandlerDependencies: Seq[ModuleID] = Vector.empty, + retry: Option[(FiniteDuration, Int)] = None, + sameVersions: Seq[Set[InclExclRule]] = Nil, +) diff --git a/lm-coursier/definitions/src/main/scala/lmcoursier/FallbackDependency.scala b/lm-coursier/definitions/src/main/scala/lmcoursier/FallbackDependency.scala new file mode 100644 index 000000000..0669a8baa --- /dev/null +++ b/lm-coursier/definitions/src/main/scala/lmcoursier/FallbackDependency.scala @@ -0,0 +1,13 @@ +package lmcoursier + +import java.net.URL + +import dataclass.data +import lmcoursier.definitions.Module +//FIXME use URI instead of URL +@data class FallbackDependency( + module: Module, + version: String, + url: URL, + changing: Boolean +) diff --git a/lm-coursier/definitions/src/main/scala/lmcoursier/credentials/Credentials.scala b/lm-coursier/definitions/src/main/scala/lmcoursier/credentials/Credentials.scala new file mode 120000 index 000000000..b38bea81d --- /dev/null +++ b/lm-coursier/definitions/src/main/scala/lmcoursier/credentials/Credentials.scala @@ -0,0 +1 @@ +../../../../../../src/main/scala/lmcoursier/credentials/Credentials.scala \ No newline at end of file diff --git a/lm-coursier/definitions/src/main/scala/lmcoursier/credentials/DirectCredentials.scala b/lm-coursier/definitions/src/main/scala/lmcoursier/credentials/DirectCredentials.scala new file mode 100644 index 000000000..3dacc4832 --- /dev/null +++ b/lm-coursier/definitions/src/main/scala/lmcoursier/credentials/DirectCredentials.scala @@ -0,0 +1,20 @@ +package lmcoursier.credentials + +import dataclass._ + +@data class DirectCredentials( + host: String = "", + username: String = "", + password: String = "", + @since("1.0") + realm: Option[String] = None, + @since("1.1") + optional: Boolean = true, + @since("1.2") + matchHost: Boolean = false, + @since("1.3") + httpsOnly: Boolean = true +) extends Credentials { + + override def toString(): String = s"DirectCredentials(host=$host, username=$username)" +} diff --git a/lm-coursier/definitions/src/main/scala/lmcoursier/credentials/FileCredentials.scala b/lm-coursier/definitions/src/main/scala/lmcoursier/credentials/FileCredentials.scala new file mode 100644 index 000000000..bac8b53d4 --- /dev/null +++ b/lm-coursier/definitions/src/main/scala/lmcoursier/credentials/FileCredentials.scala @@ -0,0 +1,8 @@ +package lmcoursier.credentials + +import dataclass.data + +@data class FileCredentials( + path: String, + optional: Boolean = true +) extends Credentials diff --git a/lm-coursier/definitions/src/main/scala/lmcoursier/definitions/Attributes.scala b/lm-coursier/definitions/src/main/scala/lmcoursier/definitions/Attributes.scala new file mode 100644 index 000000000..e9e76760e --- /dev/null +++ b/lm-coursier/definitions/src/main/scala/lmcoursier/definitions/Attributes.scala @@ -0,0 +1,8 @@ +package lmcoursier.definitions + +import dataclass.data + +@data class Attributes( + `type`: Type, + classifier: Classifier +) diff --git a/lm-coursier/definitions/src/main/scala/lmcoursier/definitions/Authentication.scala b/lm-coursier/definitions/src/main/scala/lmcoursier/definitions/Authentication.scala new file mode 100644 index 000000000..e02f99b2f --- /dev/null +++ b/lm-coursier/definitions/src/main/scala/lmcoursier/definitions/Authentication.scala @@ -0,0 +1,19 @@ +package lmcoursier.definitions + +import dataclass._ + +@data class Authentication( + user: String, + password: String, + optional: Boolean = false, + realmOpt: Option[String] = None, + @since("1.0") + headers: Seq[(String, String)] = Nil, + @since("1.1") + httpsOnly: Boolean = true, + @since("1.2") + passOnRedirect: Boolean = false +) { + override def toString(): String = + s"Authentication(user=$user)" +} diff --git a/lm-coursier/definitions/src/main/scala/lmcoursier/definitions/CacheLogger.scala b/lm-coursier/definitions/src/main/scala/lmcoursier/definitions/CacheLogger.scala new file mode 120000 index 000000000..02587c293 --- /dev/null +++ b/lm-coursier/definitions/src/main/scala/lmcoursier/definitions/CacheLogger.scala @@ -0,0 +1 @@ +../../../../../../src/main/scala/lmcoursier/definitions/CacheLogger.scala \ No newline at end of file diff --git a/lm-coursier/definitions/src/main/scala/lmcoursier/definitions/CachePolicy.scala b/lm-coursier/definitions/src/main/scala/lmcoursier/definitions/CachePolicy.scala new file mode 120000 index 000000000..7a9d3c092 --- /dev/null +++ b/lm-coursier/definitions/src/main/scala/lmcoursier/definitions/CachePolicy.scala @@ -0,0 +1 @@ +../../../../../../src/main/scala/lmcoursier/definitions/CachePolicy.scala \ No newline at end of file diff --git a/lm-coursier/definitions/src/main/scala/lmcoursier/definitions/DateTime.scala b/lm-coursier/definitions/src/main/scala/lmcoursier/definitions/DateTime.scala new file mode 100644 index 000000000..33c2d23ae --- /dev/null +++ b/lm-coursier/definitions/src/main/scala/lmcoursier/definitions/DateTime.scala @@ -0,0 +1,12 @@ +package lmcoursier.definitions + +import dataclass.data + +@data class DateTime( + year: Int, + month: Int, + day: Int, + hour: Int, + minute: Int, + second: Int +) diff --git a/lm-coursier/definitions/src/main/scala/lmcoursier/definitions/Definitions.scala b/lm-coursier/definitions/src/main/scala/lmcoursier/definitions/Definitions.scala new file mode 120000 index 000000000..49e909006 --- /dev/null +++ b/lm-coursier/definitions/src/main/scala/lmcoursier/definitions/Definitions.scala @@ -0,0 +1 @@ +../../../../../../src/main/scala/lmcoursier/definitions/Definitions.scala \ No newline at end of file diff --git a/lm-coursier/definitions/src/main/scala/lmcoursier/definitions/Dependency.scala b/lm-coursier/definitions/src/main/scala/lmcoursier/definitions/Dependency.scala new file mode 100644 index 000000000..b4d43b4b3 --- /dev/null +++ b/lm-coursier/definitions/src/main/scala/lmcoursier/definitions/Dependency.scala @@ -0,0 +1,13 @@ +package lmcoursier.definitions + +import dataclass.data + +@data class Dependency( + module: Module, + version: String, + configuration: Configuration, + exclusions: Set[(Organization, ModuleName)], + publication: Publication, + optional: Boolean, + transitive: Boolean +) diff --git a/lm-coursier/definitions/src/main/scala/lmcoursier/definitions/Developer.scala b/lm-coursier/definitions/src/main/scala/lmcoursier/definitions/Developer.scala new file mode 100644 index 000000000..3fb4a5e7b --- /dev/null +++ b/lm-coursier/definitions/src/main/scala/lmcoursier/definitions/Developer.scala @@ -0,0 +1,9 @@ +package lmcoursier.definitions + +import dataclass.data + +@data class Developer( + id: String, + name: String, + url: String +) diff --git a/lm-coursier/definitions/src/main/scala/lmcoursier/definitions/FromCoursier.scala b/lm-coursier/definitions/src/main/scala/lmcoursier/definitions/FromCoursier.scala new file mode 120000 index 000000000..98543357d --- /dev/null +++ b/lm-coursier/definitions/src/main/scala/lmcoursier/definitions/FromCoursier.scala @@ -0,0 +1 @@ +../../../../../../src/main/scala/lmcoursier/definitions/FromCoursier.scala \ No newline at end of file diff --git a/lm-coursier/definitions/src/main/scala/lmcoursier/definitions/Info.scala b/lm-coursier/definitions/src/main/scala/lmcoursier/definitions/Info.scala new file mode 100644 index 000000000..579bbd0eb --- /dev/null +++ b/lm-coursier/definitions/src/main/scala/lmcoursier/definitions/Info.scala @@ -0,0 +1,11 @@ +package lmcoursier.definitions + +import dataclass.data + +@data class Info( + description: String, + homePage: String, + licenses: Seq[(String, Option[String])], + developers: Seq[Developer], + publication: Option[DateTime] +) diff --git a/lm-coursier/definitions/src/main/scala/lmcoursier/definitions/Module.scala b/lm-coursier/definitions/src/main/scala/lmcoursier/definitions/Module.scala new file mode 100644 index 000000000..fd1c173fe --- /dev/null +++ b/lm-coursier/definitions/src/main/scala/lmcoursier/definitions/Module.scala @@ -0,0 +1,8 @@ +package lmcoursier.definitions +import dataclass.data + +@data class Module( + organization: Organization, + name: ModuleName, + attributes: Map[String, String] +) diff --git a/lm-coursier/definitions/src/main/scala/lmcoursier/definitions/ModuleMatchers.scala b/lm-coursier/definitions/src/main/scala/lmcoursier/definitions/ModuleMatchers.scala new file mode 100644 index 000000000..cc6a3100e --- /dev/null +++ b/lm-coursier/definitions/src/main/scala/lmcoursier/definitions/ModuleMatchers.scala @@ -0,0 +1,13 @@ +package lmcoursier.definitions + +import dataclass.data + +/** + * @param exclude Use "*" in either organization or name to match any. + * @param include Use "*" in either organization or name to match any. + */ +@data class ModuleMatchers( + exclude: Set[Module], + include: Set[Module], + includeByDefault: Boolean = true +) diff --git a/lm-coursier/definitions/src/main/scala/lmcoursier/definitions/Project.scala b/lm-coursier/definitions/src/main/scala/lmcoursier/definitions/Project.scala new file mode 100644 index 000000000..97b5e7af4 --- /dev/null +++ b/lm-coursier/definitions/src/main/scala/lmcoursier/definitions/Project.scala @@ -0,0 +1,14 @@ +package lmcoursier.definitions + +import dataclass.data + +@data class Project( + module: Module, + version: String, + dependencies: Seq[(Configuration, Dependency)], + configurations: Map[Configuration, Seq[Configuration]], + properties: Seq[(String, String)], + packagingOpt: Option[Type], + publications: Seq[(Configuration, Publication)], + info: Info +) diff --git a/lm-coursier/definitions/src/main/scala/lmcoursier/definitions/Publication.scala b/lm-coursier/definitions/src/main/scala/lmcoursier/definitions/Publication.scala new file mode 100644 index 000000000..9323104b9 --- /dev/null +++ b/lm-coursier/definitions/src/main/scala/lmcoursier/definitions/Publication.scala @@ -0,0 +1,10 @@ +package lmcoursier.definitions + +import dataclass.data + +@data class Publication( + name: String, + `type`: Type, + ext: Extension, + classifier: Classifier +) diff --git a/lm-coursier/definitions/src/main/scala/lmcoursier/definitions/Reconciliation.scala b/lm-coursier/definitions/src/main/scala/lmcoursier/definitions/Reconciliation.scala new file mode 120000 index 000000000..b9e12205d --- /dev/null +++ b/lm-coursier/definitions/src/main/scala/lmcoursier/definitions/Reconciliation.scala @@ -0,0 +1 @@ +../../../../../../src/main/scala/lmcoursier/definitions/Reconciliation.scala \ No newline at end of file diff --git a/lm-coursier/definitions/src/main/scala/lmcoursier/definitions/Strict.scala b/lm-coursier/definitions/src/main/scala/lmcoursier/definitions/Strict.scala new file mode 100644 index 000000000..22b7c56c7 --- /dev/null +++ b/lm-coursier/definitions/src/main/scala/lmcoursier/definitions/Strict.scala @@ -0,0 +1,12 @@ +package lmcoursier.definitions + +import dataclass._ + +@data class Strict( + include: Set[(String, String)] = Set(("*", "*")), + exclude: Set[(String, String)] = Set.empty, + ignoreIfForcedVersion: Boolean = true, + @since + includeByDefault: Boolean = false, + semVer: Boolean = false +) diff --git a/lm-coursier/metadata b/lm-coursier/metadata new file mode 160000 index 000000000..95874ca5b --- /dev/null +++ b/lm-coursier/metadata @@ -0,0 +1 @@ +Subproject commit 95874ca5bd90277c302f5a4d5c9b8119d91730af diff --git a/lm-coursier/src/main/scala/lmcoursier/CoursierDependencyResolution.scala b/lm-coursier/src/main/scala/lmcoursier/CoursierDependencyResolution.scala new file mode 100644 index 000000000..3d0d67b4f --- /dev/null +++ b/lm-coursier/src/main/scala/lmcoursier/CoursierDependencyResolution.scala @@ -0,0 +1,389 @@ +package lmcoursier + +import java.io.File +import java.net.{ URL, URLClassLoader } + +import coursier.{ Organization, Resolution } +import coursier.core.{ Classifier, Configuration } +import coursier.cache.CacheDefaults +import coursier.util.Artifact +import coursier.internal.Typelevel +import lmcoursier.definitions.ToCoursier +import lmcoursier.internal.{ + ArtifactsParams, + ArtifactsRun, + CoursierModuleDescriptor, + InterProjectRepository, + ResolutionParams, + ResolutionRun, + Resolvers, + SbtBootJars, + UpdateParams, + UpdateRun +} +import lmcoursier.syntax._ +import sbt.internal.librarymanagement.IvySbt +import sbt.librarymanagement._ +import sbt.util.Logger +import coursier.core.Dependency +import coursier.core.Publication + +import scala.util.{ Try, Failure } + +class CoursierDependencyResolution( + conf: CoursierConfiguration, + protocolHandlerConfiguration: Option[CoursierConfiguration], + bootstrappingProtocolHandler: Boolean +) extends DependencyResolutionInterface { + + def this(conf: CoursierConfiguration) = + this( + conf, + protocolHandlerConfiguration = None, + bootstrappingProtocolHandler = true + ) + + private var protocolHandlerClassLoader: Option[ClassLoader] = None + private val protocolHandlerClassLoaderLock = new Object + + private def fetchProtocolHandlerClassLoader( + configuration: UpdateConfiguration, + uwconfig: UnresolvedWarningConfiguration, + log: Logger + ): ClassLoader = { + + val conf0 = protocolHandlerConfiguration.getOrElse(conf) + + def isUnknownProtocol(rawURL: String): Boolean = { + Try(new URL(rawURL)) match { + case Failure(ex) if ex.getMessage.startsWith("unknown protocol: ") => true + case _ => false + } + } + + val confWithoutUnknownProtocol = + conf0.withResolvers( + conf0.resolvers.filter { + case maven: MavenRepository => + !isUnknownProtocol(maven.root) + case _ => + true + } + ) + + val resolution = new CoursierDependencyResolution( + conf = confWithoutUnknownProtocol, + protocolHandlerConfiguration = None, + bootstrappingProtocolHandler = false + ) + + val fakeModule = + ModuleDescriptorConfiguration( + ModuleID("lmcoursier", "lmcoursier", "0.1.0"), + ModuleInfo("protocol-handler") + ) + .withDependencies(conf0.protocolHandlerDependencies.toVector) + + val reportOrUnresolved = + resolution.update(moduleDescriptor(fakeModule), configuration, uwconfig, log) + + val report = reportOrUnresolved match { + case Right(report0) => + report0 + + case Left(unresolvedWarning) => + import sbt.util.ShowLines._ + unresolvedWarning.lines.foreach(log.warn(_)) + throw unresolvedWarning.resolveException + } + + val jars = + for { + reportConfiguration <- report.configurations.filter(_.configuration.name == "runtime") + module <- reportConfiguration.modules + (_, jar) <- module.artifacts + } yield jar + + new URLClassLoader(jars.map(_.toURI().toURL()).toArray) + } + + /* + * Based on earlier implementations by @leonardehrenfried (https://github.com/sbt/librarymanagement/pull/190) + * and @andreaTP (https://github.com/sbt/librarymanagement/pull/270), then adapted to the code from the former + * sbt-coursier, that was moved to this module. + */ + + def moduleDescriptor(moduleSetting: ModuleDescriptorConfiguration): ModuleDescriptor = + CoursierModuleDescriptor(moduleSetting, conf) + + def update( + module: ModuleDescriptor, + configuration: UpdateConfiguration, + uwconfig: UnresolvedWarningConfiguration, + log: Logger + ): Either[UnresolvedWarning, UpdateReport] = { + + if (bootstrappingProtocolHandler && protocolHandlerClassLoader.isEmpty) + protocolHandlerClassLoaderLock.synchronized { + if (bootstrappingProtocolHandler && protocolHandlerClassLoader.isEmpty) { + val classLoader = fetchProtocolHandlerClassLoader(configuration, uwconfig, log) + protocolHandlerClassLoader = Some(classLoader) + } + } + + val conf = this.conf.withUpdateConfiguration(configuration) + + // TODO Take stuff in configuration into account? uwconfig too? + + val module0 = module match { + case c: CoursierModuleDescriptor => + // seems not to happen, not sure what DependencyResolutionInterface.moduleDescriptor is for + c.descriptor + case i: IvySbt#Module => + i.moduleSettings match { + case d: ModuleDescriptorConfiguration => d + case other => sys.error(s"unrecognized module settings: $other") + } + case _ => + sys.error(s"unrecognized ModuleDescriptor type: $module") + } + + val so = conf.scalaOrganization + .map(Organization(_)) + .orElse(module0.scalaModuleInfo.map(m => Organization(m.scalaOrganization))) + .getOrElse(Organization("org.scala-lang")) + val sv = conf.scalaVersion + .orElse(module0.scalaModuleInfo.map(_.scalaFullVersion)) + // FIXME Manage to do stuff below without a scala version? + .getOrElse(scala.util.Properties.versionNumberString) + + val sbv = module0.scalaModuleInfo.map(_.scalaBinaryVersion).getOrElse { + sv.split('.').take(2).mkString(".") + } + val projectPlatform = module0.scalaModuleInfo.flatMap(_.platform) + val (mod, ver) = FromSbt.moduleVersion( + module0.module, + sv, + sbv, + optionalCrossVer = true, + projectPlatform = projectPlatform + ) + val interProjectDependencies = { + val needed = conf.interProjectDependencies.exists { p => + p.module == mod && p.version == ver + } + + if (needed) + conf.interProjectDependencies.map(ToCoursier.project) + else + Vector.empty[coursier.core.Project] + } + + val extraProjects = conf.extraProjects.map(ToCoursier.project) + + val verbosityLevel = conf.verbosityLevel + + val ttl = conf.ttl + val loggerOpt = conf.logger.map(ToCoursier.cacheLogger) + val cache = conf.cache.getOrElse(CacheDefaults.location) + val cachePolicies = conf.cachePolicies.map(ToCoursier.cachePolicy) + val checksums = conf.checksums + val projectName = module0.module.name + + val ivyProperties = ResolutionParams.defaultIvyProperties(conf.ivyHome) + + val classifiers = + if (conf.hasClassifiers) + Some(conf.classifiers.map(Classifier(_))) + else + None + + val authenticationByRepositoryId = conf.authenticationByRepositoryId.toMap + + val mainRepositories = conf.resolvers + .flatMap { resolver => + Resolvers.repository( + resolver, + ivyProperties, + log, + authenticationByRepositoryId.get(resolver.name).map(ToCoursier.authentication), + protocolHandlerClassLoader.toSeq, + ) + } + + val interProjectRepo = InterProjectRepository(interProjectDependencies) + val extraProjectsRepo = InterProjectRepository(extraProjects) + + val dependencies = module0.dependencies + .flatMap { d => + // crossVersion sometimes already taken into account (when called via the update task), sometimes not + // (e.g. sbt-dotty 0.13.0-RC1) + FromSbt.dependencies(d, sv, sbv, optionalCrossVer = true) + } + .map { case (config, dep) => + (ToCoursier.configuration(config), ToCoursier.dependency(dep)) + } + + val orderedConfigs = Inputs + .orderedConfigurations(Inputs.configExtendsSeq(module0.configurations)) + .map { case (config, extends0) => + (ToCoursier.configuration(config), extends0.map(ToCoursier.configuration)) + } + + val typelevel = so == Typelevel.typelevelOrg + + val cache0 = coursier.cache + .FileCache() + .withLocation(cache) + .withCachePolicies(cachePolicies) + .withTtl(ttl) + .withChecksums(checksums) + .withCredentials(conf.credentials.map(ToCoursier.credentials)) + .withFollowHttpToHttpsRedirections(conf.followHttpToHttpsRedirections.getOrElse(true)) + + val excludeDependencies = conf.excludeDependencies.map { case (strOrg, strName) => + (coursier.Organization(strOrg), coursier.ModuleName(strName)) + }.toSet + + val resolutionParams = ResolutionParams( + dependencies = dependencies, + fallbackDependencies = conf.fallbackDependencies, + orderedConfigs = orderedConfigs, + autoScalaLibOpt = if (conf.autoScalaLibrary) Some((so, sv)) else None, + mainRepositories = mainRepositories, + parentProjectCache = Map.empty, + interProjectDependencies = interProjectDependencies, + internalRepositories = Seq(interProjectRepo, extraProjectsRepo), + sbtClassifiers = conf.sbtClassifiers, + projectName = projectName, + loggerOpt = loggerOpt, + cache = cache0, + parallel = conf.parallelDownloads, + params = coursier.params + .ResolutionParams() + .withMaxIterations(conf.maxIterations) + .withProfiles(conf.mavenProfiles.toSet) + .withForceVersion(conf.forceVersions.map { case (k, v) => (ToCoursier.module(k), v) }.toMap) + .withTypelevel(typelevel) + .withReconciliation(ToCoursier.reconciliation(conf.reconciliation)) + .withExclusions(excludeDependencies) + .withRules(ToCoursier.sameVersions(conf.sameVersions)), + strictOpt = conf.strict.map(ToCoursier.strict), + missingOk = conf.missingOk, + retry = conf.retry.getOrElse(ResolutionParams.defaultRetry), + ) + + def artifactsParams(resolutions: Map[Configuration, Resolution]): ArtifactsParams = + ArtifactsParams( + classifiers = classifiers, + resolutions = resolutions.values.toSeq.distinct, + includeSignatures = false, + loggerOpt = loggerOpt, + projectName = projectName, + sbtClassifiers = conf.sbtClassifiers, + cache = cache0, + parallel = conf.parallelDownloads, + classpathOrder = conf.classpathOrder, + missingOk = conf.missingOk + ) + + val sbtBootJarOverrides = SbtBootJars( + conf.sbtScalaOrganization.fold(Organization("org.scala-lang"))(Organization(_)), + conf.sbtScalaVersion.getOrElse(sv), + conf.sbtScalaJars + ) + + val configs = Inputs.coursierConfigurationsMap(module0.configurations).map { case (k, l) => + ToCoursier.configuration(k) -> l.map(ToCoursier.configuration) + } + + def updateParams( + resolutions: Map[Configuration, Resolution], + artifacts: Seq[(Dependency, Publication, Artifact, Option[File])] + ) = + UpdateParams( + thisModule = (ToCoursier.module(mod), ver), + artifacts = artifacts.collect { case (d, p, a, Some(f)) => a -> f }.toMap, + fullArtifacts = Some(artifacts.map { case (d, p, a, f) => (d, p, a) -> f }.toMap), + classifiers = classifiers, + configs = configs, + dependencies = dependencies, + forceVersions = conf.forceVersions.map { case (m, v) => (ToCoursier.module(m), v) }.toMap, + interProjectDependencies = interProjectDependencies, + res = resolutions, + includeSignatures = false, + sbtBootJarOverrides = sbtBootJarOverrides, + classpathOrder = conf.classpathOrder, + missingOk = conf.missingOk, + classLoaders = protocolHandlerClassLoader.toSeq, + ) + + val e = for { + resolutions <- ResolutionRun.resolutions(resolutionParams, verbosityLevel, log) + artifactsParams0 = artifactsParams(resolutions) + artifacts <- ArtifactsRun(artifactsParams0, verbosityLevel, log) + } yield { + val updateParams0 = updateParams(resolutions, artifacts.fullDetailedArtifacts) + UpdateRun.update(updateParams0, verbosityLevel, log) + } + e.left.map(unresolvedWarningOrThrow(uwconfig, _)) + } + + private def unresolvedWarningOrThrow( + uwconfig: UnresolvedWarningConfiguration, + ex: coursier.error.CoursierError + ): UnresolvedWarning = { + + // TODO Take coursier.error.FetchError.DownloadingArtifacts into account + + val downloadErrors = ex match { + case ex0: coursier.error.ResolutionError => + ex0.errors.collect { case err: coursier.error.ResolutionError.CantDownloadModule => + err + } + case _ => + Nil + } + val otherErrors = ex match { + case ex0: coursier.error.ResolutionError => + ex0.errors.flatMap { + case _: coursier.error.ResolutionError.CantDownloadModule => None + case err => Some(err) + } + case _ => + Seq(ex) + } + + if (otherErrors.isEmpty) { + val r = new ResolveException( + downloadErrors.map(_.getMessage), + downloadErrors.map { err => + ModuleID(err.module.organization.value, err.module.name.value, err.version) + .withExtraAttributes(err.module.attributes) + } + ) + UnresolvedWarning(r, uwconfig) + } else + throw ex + } +} + +object CoursierDependencyResolution { + def apply(configuration: CoursierConfiguration): DependencyResolution = + DependencyResolution(new CoursierDependencyResolution(configuration)) + + def apply( + configuration: CoursierConfiguration, + protocolHandlerConfiguration: Option[CoursierConfiguration] + ): DependencyResolution = + DependencyResolution( + new CoursierDependencyResolution( + configuration, + protocolHandlerConfiguration, + bootstrappingProtocolHandler = true + ) + ) + + def defaultCacheLocation: File = + CacheDefaults.location +} diff --git a/lm-coursier/src/main/scala/lmcoursier/FromSbt.scala b/lm-coursier/src/main/scala/lmcoursier/FromSbt.scala new file mode 100644 index 000000000..3f1b28f1e --- /dev/null +++ b/lm-coursier/src/main/scala/lmcoursier/FromSbt.scala @@ -0,0 +1,219 @@ +package lmcoursier + +import coursier.ivy.IvyXml.{ mappings => ivyXmlMappings } +import lmcoursier.definitions.{ + Classifier, + Configuration, + Dependency, + Extension, + Info, + Module, + ModuleName, + Organization, + Project, + Publication, + Type +} +import sbt.internal.librarymanagement.mavenint.SbtPomExtraProperties +import sbt.librarymanagement.{ Configuration => _, MavenRepository => _, _ } + +object FromSbt { + + private def sbtModuleIdName( + moduleId: ModuleID, + scalaVersion: => String, + scalaBinaryVersion: => String, + optionalCrossVer: Boolean = false, + projectPlatform: Option[String], + ): String = { + val name0 = moduleId.name + val name1 = + moduleId.crossVersion match + case _: Disabled => name0 + case _ => addPlatformSuffix(name0, moduleId.platformOpt, projectPlatform) + val updatedName = CrossVersion(moduleId.crossVersion, scalaVersion, scalaBinaryVersion) + .fold(name1)(_(name1)) + if (!optionalCrossVer || updatedName.length <= name0.length) + updatedName + else { + val suffix = updatedName.substring(name0.length) + if (name0.endsWith(suffix)) + name0 + else + updatedName + } + } + + private def addPlatformSuffix( + name: String, + platformOpt: Option[String], + projectPlatform: Option[String] + ): String = { + def addSuffix(platformName: String): String = + platformName match { + case "" | "jvm" => name + case _ => s"${name}_$platformName" + } + (platformOpt, projectPlatform) match { + case (Some(p), None) => addSuffix(p) + case (_, Some(p)) => addSuffix(p) + case _ => name + } + } + + private def attributes(attr: Map[String, String]): Map[String, String] = + attr + .map { case (k, v) => + k.stripPrefix("e:") -> v + } + .filter { case (k, _) => + !k.startsWith(SbtPomExtraProperties.POM_INFO_KEY_PREFIX) + } + + def moduleVersion( + module: ModuleID, + scalaVersion: String, + scalaBinaryVersion: String, + optionalCrossVer: Boolean, + projectPlatform: Option[String], + ): (Module, String) = { + + val fullName = + sbtModuleIdName(module, scalaVersion, scalaBinaryVersion, optionalCrossVer, projectPlatform) + + val module0 = Module( + Organization(module.organization), + ModuleName(fullName), + attributes(module.extraDependencyAttributes) + ) + val version = module.revision + + (module0, version) + } + + def moduleVersion( + module: ModuleID, + scalaVersion: String, + scalaBinaryVersion: String + ): (Module, String) = + moduleVersion( + module, + scalaVersion, + scalaBinaryVersion, + optionalCrossVer = false, + projectPlatform = None + ) + + def dependencies( + module: ModuleID, + scalaVersion: String, + scalaBinaryVersion: String, + optionalCrossVer: Boolean = false, + projectPlatform: Option[String] = None, + ): Seq[(Configuration, Dependency)] = { + + // TODO Warn about unsupported properties in `module` + + val (module0, version) = + moduleVersion(module, scalaVersion, scalaBinaryVersion, optionalCrossVer, projectPlatform) + + val dep = Dependency( + module0, + version, + Configuration(""), + exclusions = module.exclusions.map { rule => + // FIXME Other `rule` fields are ignored here + (Organization(rule.organization), ModuleName(rule.name)) + }.toSet, + Publication("", Type(""), Extension(""), Classifier("")), + optional = false, + transitive = module.isTransitive + ) + + val mapping = module.configurations.getOrElse("compile") + val allMappings = ivyXmlMappings(mapping).map { case (from, to) => + (Configuration(from.value), Configuration(to.value)) + } + + val publications = + if (module.explicitArtifacts.isEmpty) + Seq(Publication("", Type(""), Extension(""), Classifier(""))) + else + module.explicitArtifacts + .map { a => + Publication( + name = a.name, + `type` = Type(a.`type`), + ext = Extension(a.extension), + classifier = a.classifier.fold(Classifier(""))(Classifier(_)) + ) + } + + for { + (from, to) <- allMappings.distinct + pub <- publications.distinct + } yield { + val dep0 = dep + .withConfiguration(to) + .withPublication(pub) + from -> dep0 + } + } + + def fallbackDependencies( + allDependencies: Seq[ModuleID], + scalaVersion: String, + scalaBinaryVersion: String + ): Seq[FallbackDependency] = + for { + module <- allDependencies + artifact <- module.explicitArtifacts + url <- artifact.url.toSeq + } yield { + val (module0, version) = moduleVersion(module, scalaVersion, scalaBinaryVersion) + FallbackDependency(module0, version, url.toURL, module.isChanging) + } + + def project( + projectID: ModuleID, + allDependencies: Seq[ModuleID], + ivyConfigurations: Map[Configuration, Seq[Configuration]], + scalaVersion: String, + scalaBinaryVersion: String, + projectPlatform: Option[String], + ): Project = { + + val deps = allDependencies.flatMap( + dependencies(_, scalaVersion, scalaBinaryVersion, projectPlatform = projectPlatform) + ) + + val prefix = "e:" + SbtPomExtraProperties.POM_INFO_KEY_PREFIX + val properties = projectID.extraAttributes.view + .filterKeys(_.startsWith(prefix)) + .toSeq + .map { case (k, v) => (k.stripPrefix("e:"), v) } + .sortBy(_._1) + + Project( + Module( + Organization(projectID.organization), + ModuleName( + sbtModuleIdName( + projectID, + scalaVersion, + scalaBinaryVersion, + projectPlatform = projectPlatform + ) + ), + attributes(projectID.extraDependencyAttributes) + ), + projectID.revision, + deps, + ivyConfigurations, + properties, + None, + Nil, + Info("", "", Nil, Nil, None) + ) + } +} diff --git a/lm-coursier/src/main/scala/lmcoursier/Inputs.scala b/lm-coursier/src/main/scala/lmcoursier/Inputs.scala new file mode 100644 index 000000000..01272ddcb --- /dev/null +++ b/lm-coursier/src/main/scala/lmcoursier/Inputs.scala @@ -0,0 +1,165 @@ +package lmcoursier + +import coursier.ivy.IvyXml.{ mappings => initialIvyXmlMappings } +import lmcoursier.definitions.{ Configuration, Module, ModuleName, Organization } +import sbt.librarymanagement.{ CrossVersion, InclExclRule, ModuleID } +import sbt.util.Logger + +import scala.collection.mutable + +object Inputs { + + def ivyXmlMappings(mapping: String): Seq[(Configuration, Configuration)] = + initialIvyXmlMappings(mapping).map { case (from, to) => + Configuration(from.value) -> Configuration(to.value) + } + + def configExtendsSeq( + configurations: Seq[sbt.librarymanagement.Configuration] + ): Seq[(Configuration, Seq[Configuration])] = + configurations + .map(cfg => Configuration(cfg.name) -> cfg.extendsConfigs.map(c => Configuration(c.name))) + + @deprecated("Now unused internally, to be removed in the future", "2.0.0-RC6-5") + def configExtends( + configurations: Seq[sbt.librarymanagement.Configuration] + ): Map[Configuration, Seq[Configuration]] = + configurations + .map(cfg => Configuration(cfg.name) -> cfg.extendsConfigs.map(c => Configuration(c.name))) + .toMap + + @deprecated("Use coursierConfigurationsMap instead", "2.0.0-RC6-5") + def coursierConfigurations( + configurations: Seq[sbt.librarymanagement.Configuration], + shadedConfigOpt: Option[String] = None + ): Map[Configuration, Set[Configuration]] = + coursierConfigurationsMap(configurations) + + def coursierConfigurationsMap( + configurations: Seq[sbt.librarymanagement.Configuration] + ): Map[Configuration, Set[Configuration]] = { + + val configs0 = configExtendsSeq(configurations).toMap + + def allExtends(c: Configuration) = { + // possibly bad complexity + def helper(current: Set[Configuration]): Set[Configuration] = { + val newSet = current ++ current.flatMap(configs0.getOrElse(_, Nil)) + if ((newSet -- current).nonEmpty) + helper(newSet) + else + newSet + } + + helper(Set(c)) + } + + configs0.map { case (config, _) => + config -> allExtends(config) + } + } + + def orderedConfigurations( + configurations: Seq[(Configuration, Seq[Configuration])] + ): Seq[(Configuration, Seq[Configuration])] = { + + val map = configurations.toMap + + def helper( + done: Set[Configuration], + toAdd: List[Configuration] + ): LazyList[(Configuration, Seq[Configuration])] = + toAdd match { + case Nil => LazyList.empty + case config :: rest => + val extends0 = map.getOrElse(config, Nil) + val missingExtends = extends0.filterNot(done) + if (missingExtends.isEmpty) + (config, extends0) #:: helper(done + config, rest) + else + helper(done, missingExtends.toList ::: toAdd) + } + + helper(Set.empty, configurations.map(_._1).toList).toVector + } + + @deprecated("Now unused internally, to be removed in the future", "2.0.0-RC6-5") + def ivyGraphs(configurations: Map[Configuration, Seq[Configuration]]): Seq[Set[Configuration]] = { + + // probably bad complexity, but that shouldn't matter given the size of the graphs involved... + + final class Wrapper(val set: mutable.HashSet[Configuration]) { + def ++=(other: Wrapper): this.type = { + set ++= other.set + this + } + } + + val sets = + new mutable.HashMap[Configuration, Wrapper] ++= configurations.map { case (k, l) => + val s = new mutable.HashSet[Configuration] + s ++= l + s += k + k -> new Wrapper(s) + } + + for (k <- configurations.keys) { + val s = sets(k) + + var foundNew = true + while (foundNew) { + foundNew = false + for (other <- s.set.toVector) { + val otherS = sets(other) + if (!otherS.eq(s)) { + s ++= otherS + sets += other -> s + foundNew = true + } + } + } + } + + sets.values.toVector.distinct.map(_.set.toSet) + } + + def exclusionsSeq( + excludeDeps: Seq[InclExclRule], + sv: String, + sbv: String, + log: Logger + ): Seq[(Organization, ModuleName)] = { + + var anyNonSupportedExclusionRule = false + + val res = excludeDeps + .flatMap { rule => + if (rule.artifact != "*" || rule.configurations.nonEmpty) { + log.warn(s"Unsupported exclusion rule $rule") + anyNonSupportedExclusionRule = true + Nil + } else { + val name = CrossVersion(rule.crossVersion, sv, sbv) + .fold(rule.name)(_(rule.name)) + Seq((Organization(rule.organization), ModuleName(name))) + } + } + + if (anyNonSupportedExclusionRule) + log.warn("Only supported exclusion rule fields: organization, name") + + res + } + + def exclusions( + excludeDeps: Seq[InclExclRule], + sv: String, + sbv: String, + log: Logger + ): Set[(Organization, ModuleName)] = + exclusionsSeq(excludeDeps, sv, sbv, log).toSet + + def forceVersions(depOverrides: Seq[ModuleID], sv: String, sbv: String): Seq[(Module, String)] = + depOverrides.map(FromSbt.moduleVersion(_, sv, sbv)) + +} diff --git a/lm-coursier/src/main/scala/lmcoursier/IvyXml.scala b/lm-coursier/src/main/scala/lmcoursier/IvyXml.scala new file mode 100644 index 000000000..2984a8f31 --- /dev/null +++ b/lm-coursier/src/main/scala/lmcoursier/IvyXml.scala @@ -0,0 +1,124 @@ +package lmcoursier + +import lmcoursier.definitions.{ Configuration, Project } + +import scala.xml.{ Node, PrefixedAttribute } + +object IvyXml { + + @deprecated("Use the override accepting 3 arguments", "2.0.0-RC6-6") + def apply( + currentProject: Project, + exclusions: Seq[(String, String)] + ): String = + apply(currentProject, exclusions, Nil) + + def apply( + currentProject: Project, + exclusions: Seq[(String, String)], + overrides: Seq[(String, String, String)] + ): String = { + + // Important: width = Int.MaxValue, so that no tag gets truncated. + // In particular, that prevents things like to be split to + // + // + // by the pretty-printer. + // See https://github.com/sbt/sbt/issues/3412. + val printer = new scala.xml.PrettyPrinter(Int.MaxValue, 2) + + """""" + '\n' + + printer.format(content(currentProject, exclusions, overrides)) + } + + // These are required for publish to be fine, later on. + private def content( + project: Project, + exclusions: Seq[(String, String)], + overrides: Seq[(String, String, String)] + ): Node = { + + val props = project.module.attributes.toSeq ++ project.properties + val infoAttrs = props.foldLeft[xml.MetaData](xml.Null) { case (acc, (k, v)) => + new PrefixedAttribute("e", k, v, acc) + } + + val licenseElems = project.info.licenses.map { case (name, urlOpt) => + val n = + + urlOpt.fold(n) { url => + n % .attributes + } + } + + val infoElem = { + + {licenseElems} + {project.info.description} + + } % infoAttrs + + val confElems = project.configurations.toVector.collect { case (name, extends0) => + val n = + if (extends0.nonEmpty) + n % .attributes + else + n + } + + val publications = project.publications + .groupMap((_, p) => p)((cfg, _) => cfg) + + val publicationElems = publications.map { case (pub, configs) => + val n = + + if (pub.classifier.value.nonEmpty) + n % .attributes + else + n + } + + val dependencyElems = project.dependencies.toVector.map { case (conf, dep) => + val excludes = dep.exclusions.toSeq.map { case (org, name) => + + } + + val n = ${dep.configuration.value}"}> + {excludes} + + + val moduleAttrs = dep.module.attributes.foldLeft[xml.MetaData](xml.Null) { + case (acc, (k, v)) => + new PrefixedAttribute("e", k, v, acc) + } + + n % moduleAttrs + } + + val excludeElems = exclusions.toVector.map { case (org, name) => + + } + + val overrideElems = overrides.toVector.map { case (org, name, ver) => + + } + + + {infoElem} + {confElems} + {publicationElems} + {dependencyElems}{excludeElems}{overrideElems} + + } + +} diff --git a/lm-coursier/src/main/scala/lmcoursier/credentials/Credentials.scala b/lm-coursier/src/main/scala/lmcoursier/credentials/Credentials.scala new file mode 100644 index 000000000..72ba24d05 --- /dev/null +++ b/lm-coursier/src/main/scala/lmcoursier/credentials/Credentials.scala @@ -0,0 +1,5 @@ +package lmcoursier.credentials + +abstract class Credentials extends Serializable + +object Credentials diff --git a/lm-coursier/src/main/scala/lmcoursier/definitions/CacheLogger.scala b/lm-coursier/src/main/scala/lmcoursier/definitions/CacheLogger.scala new file mode 100644 index 000000000..e64c9ae07 --- /dev/null +++ b/lm-coursier/src/main/scala/lmcoursier/definitions/CacheLogger.scala @@ -0,0 +1,38 @@ +package lmcoursier.definitions + +abstract class CacheLogger { + def foundLocally(url: String): Unit = {} + + def downloadingArtifact(url: String): Unit = {} + + def downloadProgress(url: String, downloaded: Long): Unit = {} + + def downloadedArtifact(url: String, success: Boolean): Unit = {} + def checkingUpdates(url: String, currentTimeOpt: Option[Long]): Unit = {} + def checkingUpdatesResult( + url: String, + currentTimeOpt: Option[Long], + remoteTimeOpt: Option[Long] + ): Unit = {} + + def downloadLength( + url: String, + totalLength: Long, + alreadyDownloaded: Long, + watching: Boolean + ): Unit = {} + + def gettingLength(url: String): Unit = {} + def gettingLengthResult(url: String, length: Option[Long]): Unit = {} + + def removedCorruptFile(url: String, reason: Option[String]): Unit = {} + + // sizeHint: estimated # of artifacts to be downloaded (doesn't include side stuff like checksums) + def init(sizeHint: Option[Int] = None): Unit = {} + def stop(): Unit = {} +} + +object CacheLogger { + def nop: CacheLogger = + new CacheLogger {} +} diff --git a/lm-coursier/src/main/scala/lmcoursier/definitions/CachePolicy.scala b/lm-coursier/src/main/scala/lmcoursier/definitions/CachePolicy.scala new file mode 100644 index 000000000..482c323a8 --- /dev/null +++ b/lm-coursier/src/main/scala/lmcoursier/definitions/CachePolicy.scala @@ -0,0 +1,72 @@ +package lmcoursier.definitions + +sealed abstract class CachePolicy extends Serializable + +object CachePolicy { + /* NOTE: the following comments are copied from coursier.cache.CachePolicy for the benefit of users within an IDE + that reads the javadocs. Please keep in sync from the original ADT. + */ + + /** Only pick local files, possibly from the cache. Don't try to download anything. */ + case object LocalOnly extends CachePolicy + + /** Only pick local files, possibly from the cache. Don't return changing artifacts (whose last check is) older than TTL */ + case object LocalOnlyIfValid extends CachePolicy + + /** + * Only pick local files. If one of these local files corresponds to a changing artifact, check + * for updates, and download these if needed. + * + * If no local file is found, *don't* try download it. Updates are only checked for files already + * in cache. + * + * Follows the TTL parameter (assumes no update is needed if the last one is recent enough). + */ + case object LocalUpdateChanging extends CachePolicy + + /** + * Only pick local files, check if any update is available for them, and download these if needed. + * + * If no local file is found, *don't* try download it. Updates are only checked for files already + * in cache. + * + * Follows the TTL parameter (assumes no update is needed if the last one is recent enough). + * + * Unlike `LocalUpdateChanging`, all found local files are checked for updates, not just the + * changing ones. + */ + case object LocalUpdate extends CachePolicy + + /** + * Pick local files, and download the missing ones. + * + * For changing ones, check for updates, and download those if any. + * + * Follows the TTL parameter (assumes no update is needed if the last one is recent enough). + */ + case object UpdateChanging extends CachePolicy + + /** + * Pick local files, download the missing ones, check for updates and download those if any. + * + * Follows the TTL parameter (assumes no update is needed if the last one is recent enough). + * + * Unlike `UpdateChanging`, all found local files are checked for updates, not just the changing + * ones. + */ + case object Update extends CachePolicy + + /** + * Pick local files, download the missing ones. + * + * No updates are checked for files already downloaded. + */ + case object FetchMissing extends CachePolicy + + /** + * (Re-)download all files. + * + * Erases files already in cache. + */ + case object ForceDownload extends CachePolicy +} diff --git a/lm-coursier/src/main/scala/lmcoursier/definitions/Definitions.scala b/lm-coursier/src/main/scala/lmcoursier/definitions/Definitions.scala new file mode 100644 index 000000000..f60cbe505 --- /dev/null +++ b/lm-coursier/src/main/scala/lmcoursier/definitions/Definitions.scala @@ -0,0 +1,13 @@ +package lmcoursier.definitions + +final case class Classifier(value: String) extends AnyVal + +final case class Configuration(value: String) extends AnyVal + +final case class Extension(value: String) extends AnyVal + +final case class ModuleName(value: String) extends AnyVal + +final case class Organization(value: String) extends AnyVal + +final case class Type(value: String) extends AnyVal diff --git a/lm-coursier/src/main/scala/lmcoursier/definitions/FromCoursier.scala b/lm-coursier/src/main/scala/lmcoursier/definitions/FromCoursier.scala new file mode 100644 index 000000000..fb92ac7d2 --- /dev/null +++ b/lm-coursier/src/main/scala/lmcoursier/definitions/FromCoursier.scala @@ -0,0 +1,18 @@ +package lmcoursier.definitions + +// TODO Make private[lmcoursier] +// private[coursier] +object FromCoursier { + + def cachePolicy(r: coursier.cache.CachePolicy): CachePolicy = + (r: @unchecked) match { + case coursier.cache.CachePolicy.LocalOnly => CachePolicy.LocalOnly + case coursier.cache.CachePolicy.LocalOnlyIfValid => CachePolicy.LocalOnlyIfValid + case coursier.cache.CachePolicy.LocalUpdateChanging => CachePolicy.LocalUpdateChanging + case coursier.cache.CachePolicy.LocalUpdate => CachePolicy.LocalUpdate + case coursier.cache.CachePolicy.UpdateChanging => CachePolicy.UpdateChanging + case coursier.cache.CachePolicy.Update => CachePolicy.Update + case coursier.cache.CachePolicy.FetchMissing => CachePolicy.FetchMissing + case coursier.cache.CachePolicy.ForceDownload => CachePolicy.ForceDownload + } +} diff --git a/lm-coursier/src/main/scala/lmcoursier/definitions/Reconciliation.scala b/lm-coursier/src/main/scala/lmcoursier/definitions/Reconciliation.scala new file mode 100644 index 000000000..31ff20a43 --- /dev/null +++ b/lm-coursier/src/main/scala/lmcoursier/definitions/Reconciliation.scala @@ -0,0 +1,19 @@ +package lmcoursier.definitions + +sealed abstract class Reconciliation extends Serializable + +object Reconciliation { + case object Default extends Reconciliation + case object Relaxed extends Reconciliation + case object Strict extends Reconciliation + case object SemVer extends Reconciliation + + def apply(input: String): Option[Reconciliation] = + input match { + case "default" => Some(Default) + case "relaxed" => Some(Relaxed) + case "strict" => Some(Strict) + case "semver" => Some(SemVer) + case _ => None + } +} diff --git a/lm-coursier/src/main/scala/lmcoursier/definitions/ToCoursier.scala b/lm-coursier/src/main/scala/lmcoursier/definitions/ToCoursier.scala new file mode 100644 index 000000000..3f786d1da --- /dev/null +++ b/lm-coursier/src/main/scala/lmcoursier/definitions/ToCoursier.scala @@ -0,0 +1,226 @@ +package lmcoursier.definitions + +import lmcoursier.credentials.{ Credentials, DirectCredentials, FileCredentials } +import sbt.librarymanagement.InclExclRule + +// TODO Make private[lmcoursier] +// private[coursier] +object ToCoursier { + + def configuration(configuration: Configuration): coursier.core.Configuration = + coursier.core.Configuration(configuration.value) + + private def attributes(attributes: Attributes): coursier.core.Attributes = + coursier.core.Attributes( + coursier.core.Type(attributes.`type`.value), + coursier.core.Classifier(attributes.classifier.value) + ) + + def publication(publication: Publication): coursier.core.Publication = + coursier.core.Publication( + publication.name, + coursier.core.Type(publication.`type`.value), + coursier.core.Extension(publication.ext.value), + coursier.core.Classifier(publication.classifier.value) + ) + + def authentication(authentication: Authentication): coursier.core.Authentication = + coursier.core + .Authentication(authentication.user, authentication.password) + .withOptional(authentication.optional) + .withRealmOpt(authentication.realmOpt) + .withHttpHeaders(authentication.headers) + .withHttpsOnly(authentication.httpsOnly) + .withPassOnRedirect(authentication.passOnRedirect) + + def module(mod: Module): coursier.core.Module = + module(mod.organization.value, mod.name.value, mod.attributes) + + def module( + organization: String, + name: String, + attributes: Map[String, String] = Map.empty + ): coursier.core.Module = + coursier.core.Module( + coursier.core.Organization(organization), + coursier.core.ModuleName(name), + attributes + ) + + def moduleMatchers(matcher: ModuleMatchers): coursier.util.ModuleMatchers = + coursier.util.ModuleMatchers( + exclude = matcher.exclude map { x => + coursier.util.ModuleMatcher(module(x)) + }, + include = matcher.include map { x => + coursier.util.ModuleMatcher(module(x)) + }, + includeByDefault = matcher.includeByDefault + ) + + def reconciliation(r: Reconciliation): coursier.core.Reconciliation = + r match { + case Reconciliation.Default => coursier.core.Reconciliation.Default + case Reconciliation.Relaxed => coursier.core.Reconciliation.Relaxed + case Reconciliation.Strict => coursier.core.Reconciliation.Strict + case Reconciliation.SemVer => coursier.core.Reconciliation.SemVer + } + + def reconciliation( + rs: Vector[(ModuleMatchers, Reconciliation)] + ): Vector[(coursier.util.ModuleMatchers, coursier.core.Reconciliation)] = + rs map { case (m, r) => (moduleMatchers(m), reconciliation(r)) } + + def sameVersions( + sv: Seq[Set[InclExclRule]] + ): Seq[(coursier.params.rule.SameVersion, coursier.params.rule.RuleResolution)] = + sv.map { libs => + val matchers = + libs.map(rule => coursier.util.ModuleMatcher(module(rule.organization, rule.name))) + coursier.params.rule.SameVersion(matchers) -> coursier.params.rule.RuleResolution.TryResolve + } + + def dependency(dependency: Dependency): coursier.core.Dependency = + coursier.core.Dependency( + module(dependency.module), + dependency.version, + configuration(dependency.configuration), + dependency.exclusions.map { case (org, name) => + (coursier.core.Organization(org.value), coursier.core.ModuleName(name.value)) + }, + publication(dependency.publication), + dependency.optional, + dependency.transitive + ) + + def project(project: Project): coursier.core.Project = + coursier.core.Project( + module(project.module), + project.version, + project.dependencies.map { case (conf, dep) => + configuration(conf) -> dependency(dep) + }, + project.configurations.map { case (k, l) => + configuration(k) -> l.map(configuration) + }, + None, + Nil, + project.properties, + Nil, + None, + None, + project.packagingOpt.map(t => coursier.core.Type(t.value)), + relocated = false, + None, + project.publications.map { case (conf, pub) => + configuration(conf) -> publication(pub) + }, + coursier.core.Info( + project.info.description, + project.info.homePage, + project.info.licenses, + project.info.developers.map { dev => + coursier.core.Info.Developer( + dev.id, + dev.name, + dev.url + ) + }, + project.info.publication.map { dt => + coursier.core.Versions.DateTime( + dt.year, + dt.month, + dt.day, + dt.hour, + dt.minute, + dt.second + ) + }, + None // TODO Add scm field in lmcoursier.definitions.Info? + ) + ) + + def credentials(credentials: Credentials): coursier.credentials.Credentials = + credentials match { + case d: DirectCredentials => + coursier.credentials + .DirectCredentials() + .withHost(d.host) + .withUsername(d.username) + .withPassword(d.password) + .withRealm(d.realm) + .withOptional(d.optional) + .withMatchHost(d.matchHost) + .withHttpsOnly(d.httpsOnly) + case f: FileCredentials => + coursier.credentials + .FileCredentials(f.path) + .withOptional(f.optional) + } + + def cacheLogger(logger: CacheLogger): coursier.cache.CacheLogger = + new coursier.cache.CacheLogger { + override def foundLocally(url: String): Unit = + logger.foundLocally(url) + override def downloadingArtifact(url: String): Unit = + logger.downloadingArtifact(url) + override def downloadProgress(url: String, downloaded: Long): Unit = + logger.downloadProgress(url, downloaded) + override def downloadedArtifact(url: String, success: Boolean): Unit = + logger.downloadedArtifact(url, success) + override def checkingUpdates(url: String, currentTimeOpt: Option[Long]): Unit = + logger.checkingUpdates(url, currentTimeOpt) + override def checkingUpdatesResult( + url: String, + currentTimeOpt: Option[Long], + remoteTimeOpt: Option[Long] + ): Unit = + logger.checkingUpdatesResult(url, currentTimeOpt, remoteTimeOpt) + override def downloadLength( + url: String, + totalLength: Long, + alreadyDownloaded: Long, + watching: Boolean + ): Unit = + logger.downloadLength(url, totalLength, alreadyDownloaded, watching) + override def gettingLength(url: String): Unit = + logger.gettingLength(url) + override def gettingLengthResult(url: String, length: Option[Long]): Unit = + logger.gettingLengthResult(url, length) + override def removedCorruptFile(url: String, reason: Option[String]): Unit = + logger.removedCorruptFile(url, reason) + override def init(sizeHint: Option[Int] = None): Unit = + logger.init(sizeHint) + override def stop(): Unit = + logger.stop() + } + + def strict(strict: Strict): coursier.params.rule.Strict = + coursier.params.rule + .Strict() + .withInclude(strict.include.map { case (o, n) => + coursier.util.ModuleMatcher( + coursier.Module(coursier.Organization(o), coursier.ModuleName(n)) + ) + }) + .withExclude(strict.exclude.map { case (o, n) => + coursier.util.ModuleMatcher( + coursier.Module(coursier.Organization(o), coursier.ModuleName(n)) + ) + }) + .withIncludeByDefault(strict.includeByDefault) + .withIgnoreIfForcedVersion(strict.ignoreIfForcedVersion) + .withSemVer(strict.semVer) + + def cachePolicy(r: CachePolicy): coursier.cache.CachePolicy = + r match { + case CachePolicy.LocalOnly => coursier.cache.CachePolicy.LocalOnly + case CachePolicy.LocalOnlyIfValid => coursier.cache.CachePolicy.LocalOnlyIfValid + case CachePolicy.LocalUpdateChanging => coursier.cache.CachePolicy.LocalUpdateChanging + case CachePolicy.LocalUpdate => coursier.cache.CachePolicy.LocalUpdate + case CachePolicy.UpdateChanging => coursier.cache.CachePolicy.UpdateChanging + case CachePolicy.Update => coursier.cache.CachePolicy.Update + case CachePolicy.FetchMissing => coursier.cache.CachePolicy.FetchMissing + case CachePolicy.ForceDownload => coursier.cache.CachePolicy.ForceDownload + } +} diff --git a/lm-coursier/src/main/scala/lmcoursier/internal/ArtifactsParams.scala b/lm-coursier/src/main/scala/lmcoursier/internal/ArtifactsParams.scala new file mode 100644 index 000000000..c85daebca --- /dev/null +++ b/lm-coursier/src/main/scala/lmcoursier/internal/ArtifactsParams.scala @@ -0,0 +1,19 @@ +package lmcoursier.internal + +import coursier.cache.{ CacheLogger, FileCache } +import coursier.core.{ Classifier, Resolution } +import coursier.util.Task + +// private[coursier] +final case class ArtifactsParams( + classifiers: Option[Seq[Classifier]], + resolutions: Seq[Resolution], + includeSignatures: Boolean, + loggerOpt: Option[CacheLogger], + projectName: String, + sbtClassifiers: Boolean, + cache: FileCache[Task], + parallel: Int, + classpathOrder: Boolean, + missingOk: Boolean +) diff --git a/lm-coursier/src/main/scala/lmcoursier/internal/ArtifactsRun.scala b/lm-coursier/src/main/scala/lmcoursier/internal/ArtifactsRun.scala new file mode 100644 index 000000000..b304dbbc1 --- /dev/null +++ b/lm-coursier/src/main/scala/lmcoursier/internal/ArtifactsRun.scala @@ -0,0 +1,77 @@ +package lmcoursier.internal + +import coursier.Artifacts +import coursier.cache.CacheLogger +import coursier.cache.loggers.{ FallbackRefreshDisplay, ProgressBarRefreshDisplay, RefreshLogger } +import coursier.core.Type +import sbt.util.Logger + +// private[lmcoursier] +object ArtifactsRun { + + def apply( + params: ArtifactsParams, + verbosityLevel: Int, + log: Logger + ): Either[coursier.error.FetchError, Artifacts.Result] = { + + val printOptionalMessage = verbosityLevel >= 0 && verbosityLevel <= 1 + + val artifactInitialMessage = + if (verbosityLevel >= 0) + s"Fetching artifacts of ${params.projectName}" + + (if (params.sbtClassifiers) " (sbt classifiers)" else "") + else + "" + + val coursierLogger = params.loggerOpt.getOrElse { + RefreshLogger.create( + if (RefreshLogger.defaultFallbackMode) + new FallbackRefreshDisplay() + else + ProgressBarRefreshDisplay.create( + if (printOptionalMessage) log.info(artifactInitialMessage), + if (printOptionalMessage || verbosityLevel >= 2) + log.info( + s"Fetched artifacts of ${params.projectName}" + + (if (params.sbtClassifiers) " (sbt classifiers)" else "") + ) + ) + ) + } + + Lock.maybeSynchronized(needsLock = + params.loggerOpt.nonEmpty || !RefreshLogger.defaultFallbackMode + ) { + result(params, coursierLogger) + } + } + + private def result( + params: ArtifactsParams, + coursierLogger: CacheLogger + ): Either[coursier.error.FetchError, Artifacts.Result] = + coursier + .Artifacts() + .withResolutions(params.resolutions) + .withArtifactTypes(Set(Type.all)) + .withClassifiers(params.classifiers.getOrElse(Nil).toSet) + .withClasspathOrder(params.classpathOrder) + .addExtraArtifacts { l => + if (params.includeSignatures) + l.flatMap(_._3.extra.get("sig").toSeq) + else + Nil + } + .addTransformArtifacts { artifacts => + if (params.missingOk) + artifacts.map { case (dependency, publication, artifact) => + (dependency, publication, artifact.withOptional(true)) + } + else + artifacts + } + .withCache(params.cache.withLogger(coursierLogger)) + .eitherResult() + +} diff --git a/lm-coursier/src/main/scala/lmcoursier/internal/CoursierModuleDescriptor.scala b/lm-coursier/src/main/scala/lmcoursier/internal/CoursierModuleDescriptor.scala new file mode 100644 index 000000000..006ec2441 --- /dev/null +++ b/lm-coursier/src/main/scala/lmcoursier/internal/CoursierModuleDescriptor.scala @@ -0,0 +1,22 @@ +package lmcoursier.internal + +import lmcoursier.CoursierConfiguration +import sbt.librarymanagement._ + +private[lmcoursier] final case class CoursierModuleDescriptor( + descriptor: ModuleDescriptorConfiguration, + conf: CoursierConfiguration +) extends ModuleDescriptor { + + def directDependencies: Vector[ModuleID] = + descriptor.dependencies + + def scalaModuleInfo: Option[ScalaModuleInfo] = + descriptor.scalaModuleInfo + + def moduleSettings: CoursierModuleSettings = + CoursierModuleSettings() + + lazy val extraInputHash: Long = + conf.## +} diff --git a/lm-coursier/src/main/scala/lmcoursier/internal/CoursierModuleSettings.scala b/lm-coursier/src/main/scala/lmcoursier/internal/CoursierModuleSettings.scala new file mode 100644 index 000000000..badad3183 --- /dev/null +++ b/lm-coursier/src/main/scala/lmcoursier/internal/CoursierModuleSettings.scala @@ -0,0 +1,5 @@ +package lmcoursier.internal + +import sbt.librarymanagement.ModuleSettings + +private[lmcoursier] case class CoursierModuleSettings() extends ModuleSettings diff --git a/lm-coursier/src/main/scala/lmcoursier/internal/InterProjectRepository.scala b/lm-coursier/src/main/scala/lmcoursier/internal/InterProjectRepository.scala new file mode 100644 index 000000000..5e6c584ac --- /dev/null +++ b/lm-coursier/src/main/scala/lmcoursier/internal/InterProjectRepository.scala @@ -0,0 +1,35 @@ +package lmcoursier.internal + +import coursier.core._ +import coursier.util.{ EitherT, Monad } + +// private[coursier] +final case class InterProjectRepository(projects: Seq[Project]) extends Repository { + + private val map = projects + .map(proj => proj.moduleVersion -> proj) + .toMap + + def find[F[_]]( + module: Module, + version: String, + fetch: Repository.Fetch[F] + )(implicit + F: Monad[F] + ): EitherT[F, String, (ArtifactSource, Project)] = { + + val res = map + .get((module, version)) + .map((this, _)) + .toRight("Not found") + + EitherT(F.point(res)) + } + + override def artifacts( + dependency: Dependency, + project: Project, + overrideClassifiers: Option[Seq[Classifier]] + ) = + Nil +} diff --git a/lm-coursier/src/main/scala/lmcoursier/internal/Lock.scala b/lm-coursier/src/main/scala/lmcoursier/internal/Lock.scala new file mode 100644 index 000000000..33a5cf7c8 --- /dev/null +++ b/lm-coursier/src/main/scala/lmcoursier/internal/Lock.scala @@ -0,0 +1,10 @@ +package lmcoursier.internal + +private[lmcoursier] object Lock { + private val lock = new Object + + /* Progress bars require us to only work on one module at the time. Without those we can go faster */ + def maybeSynchronized[T](needsLock: Boolean)(f: => T): T = + if (needsLock) lock.synchronized(f) + else f +} diff --git a/lm-coursier/src/main/scala/lmcoursier/internal/ResolutionParams.scala b/lm-coursier/src/main/scala/lmcoursier/internal/ResolutionParams.scala new file mode 100644 index 000000000..2dc9e7bae --- /dev/null +++ b/lm-coursier/src/main/scala/lmcoursier/internal/ResolutionParams.scala @@ -0,0 +1,132 @@ +package lmcoursier.internal + +import java.io.File + +import coursier.cache.{ CacheLogger, FileCache } +import coursier.ProjectCache +import coursier.core._ +import coursier.params.rule.Strict +import lmcoursier.FallbackDependency +import lmcoursier.definitions.ToCoursier +import coursier.util.Task + +import scala.collection.mutable +import scala.concurrent.duration.{ DurationInt, FiniteDuration } + +// private[coursier] +final case class ResolutionParams( + dependencies: Seq[(Configuration, Dependency)], + fallbackDependencies: Seq[FallbackDependency], + orderedConfigs: Seq[(Configuration, Seq[Configuration])], + autoScalaLibOpt: Option[(Organization, String)], + mainRepositories: Seq[Repository], + parentProjectCache: ProjectCache, + interProjectDependencies: Seq[Project], + internalRepositories: Seq[Repository], + sbtClassifiers: Boolean, + projectName: String, + loggerOpt: Option[CacheLogger], + cache: coursier.cache.FileCache[Task], + parallel: Int, + params: coursier.params.ResolutionParams, + strictOpt: Option[Strict], + missingOk: Boolean, + retry: (FiniteDuration, Int) +) { + + lazy val allConfigExtends: Map[Configuration, Set[Configuration]] = { + val map = new mutable.HashMap[Configuration, Set[Configuration]] + for ((config, extends0) <- orderedConfigs) { + val allExtends = extends0.iterator + // the else of the getOrElse shouldn't be hit (because of the ordering of the configurations) + .foldLeft(Set(config))((acc, ext) => acc ++ map.getOrElse(ext, Set(ext))) + map += config -> allExtends + } + map.toMap + } + + val fallbackDependenciesRepositories = + if (fallbackDependencies.isEmpty) + Nil + else { + val map = fallbackDependencies.map { dep => + (ToCoursier.module(dep.module), dep.version) -> ((dep.url, dep.changing)) + }.toMap + + Seq( + TemporaryInMemoryRepository(map, cache) + ) + } + + lazy val resolutionKey = { + val cleanCache = cache + .withPool(null) + .withLogger(null) + .withSync(null) + SbtCoursierCache.ResolutionKey( + dependencies, + internalRepositories, + mainRepositories, + fallbackDependenciesRepositories, + copy( + parentProjectCache = Map.empty, + loggerOpt = None, + parallel = 0, + cache = cleanCache + ), + cleanCache, + missingOk + ) + } + + override lazy val hashCode = + this match { + case ResolutionParams( + a1, + a2, + a3, + a4, + a5, + a6, + a7, + a8, + a9, + a10, + a11, + a12, + a13, + a14, + a15, + a16, + a17 + ) => + (a1, a2, a3, a4, a5, a6, a7, a8, a9, a10, a11, a12, a13, a14, a15, a16, a17).## + } + + // ResolutionParams.unapply(this).get.## + +} + +// private[coursier] +object ResolutionParams { + + def defaultIvyProperties(ivyHomeOpt: Option[File]): Map[String, String] = { + + val ivyHome = sys.props + .get("ivy.home") + .orElse(ivyHomeOpt.map(_.getAbsoluteFile.toURI.getPath)) + .getOrElse(new File(sys.props("user.home")).toURI.getPath + ".ivy2") + + val sbtIvyHome = sys.props.getOrElse( + "sbt.ivy.home", + ivyHome + ) + + Map( + "ivy.home" -> ivyHome, + "sbt.ivy.home" -> sbtIvyHome + ) ++ sys.props + } + + val defaultRetry: (FiniteDuration, Int) = (1.seconds, 3) +} diff --git a/lm-coursier/src/main/scala/lmcoursier/internal/ResolutionRun.scala b/lm-coursier/src/main/scala/lmcoursier/internal/ResolutionRun.scala new file mode 100644 index 000000000..d7ee089e4 --- /dev/null +++ b/lm-coursier/src/main/scala/lmcoursier/internal/ResolutionRun.scala @@ -0,0 +1,221 @@ +package lmcoursier.internal + +import coursier.{ Resolution, Resolve } +import coursier.cache.internal.ThreadUtil +import coursier.cache.loggers.{ FallbackRefreshDisplay, ProgressBarRefreshDisplay, RefreshLogger } +import coursier.core._ +import coursier.error.ResolutionError +import coursier.error.ResolutionError.CantDownloadModule +import coursier.ivy.IvyRepository +import coursier.maven.MavenRepositoryLike +import coursier.params.rule.RuleResolution +import coursier.util.Task +import sbt.util.Logger + +import scala.concurrent.duration.FiniteDuration +import scala.collection.mutable + +// private[coursier] +object ResolutionRun { + + private def resolution( + params: ResolutionParams, + verbosityLevel: Int, + log: Logger, + configs: Set[Configuration], + startingResolutionOpt: Option[Resolution] + ): Either[coursier.error.ResolutionError, Resolution] = { + + val isScalaToolConfig = configs(Configuration("scala-tool")) + // Ref coursier/coursier#1340 coursier/coursier#1442 + // This treats ScalaTool as a sandbox configuration isolated from other subprojects. + // Likely this behavior is needed only for ScalaTool configuration where the scala-xml + // build's ScalaTool configuration transitively loops back to scala-xml's Compile artifacts. + // In most other cases, it's desirable to allow "x->compile" relationship. + def isSandboxConfig: Boolean = isScalaToolConfig + + val repositories = + params.internalRepositories.drop(if (isSandboxConfig) 1 else 0) ++ + params.mainRepositories ++ + params.fallbackDependenciesRepositories + + val rules = + params.params.rules ++ params.strictOpt.map(s => Seq((s, RuleResolution.Fail))).getOrElse(Nil) + + val printOptionalMessage = verbosityLevel >= 0 && verbosityLevel <= 1 + + def depsRepr(deps: Seq[(Configuration, Dependency)]) = + deps + .map { case (config, dep) => + s"${dep.module}:${dep.version}:${config.value}->${dep.configuration.value}" + } + .sorted + .distinct + + val initialMessage = + Seq( + if (verbosityLevel >= 0) + Seq( + s"Updating ${params.projectName}" + (if (params.sbtClassifiers) " (sbt classifiers)" + else "") + ) + else + Nil, + if (verbosityLevel >= 2) + depsRepr(params.dependencies).map(depRepr => s" $depRepr") + else + Nil + ).flatten.mkString("\n") + + if (verbosityLevel >= 2) { + val repoReprs = repositories.map { + case r: IvyRepository => + s"ivy:${r.pattern}" + case _: InterProjectRepository => + "inter-project" + case r: MavenRepositoryLike => + r.root + case r => + // should not happen + r.toString + } + + log.info( + "Repositories:\n" + + repoReprs.map(" " + _).mkString("\n") + ) + } + + if (verbosityLevel >= 2) + log.info(initialMessage) + + val resolveTask: Resolve[Task] = { + Resolve() + // re-using various caches from a resolution of a configuration we extend + .withInitialResolution(startingResolutionOpt) + .withDependencies( + params.dependencies.collect { + case (config, dep) if configs(config) => + dep + } + ) + .withRepositories(repositories) + .withResolutionParams( + params.params + .addForceVersion( + (if (isSandboxConfig) Nil + else params.interProjectDependencies.map(_.moduleVersion)): _* + ) + .withForceScalaVersion(params.autoScalaLibOpt.nonEmpty) + .withScalaVersionOpt(params.autoScalaLibOpt.map(_._2)) + .withTypelevel(params.params.typelevel) + .withRules(rules) + ) + .withCache( + params.cache + .withLogger( + params.loggerOpt.getOrElse { + RefreshLogger.create( + if (RefreshLogger.defaultFallbackMode) + new FallbackRefreshDisplay() + else + ProgressBarRefreshDisplay.create( + if (printOptionalMessage) log.info(initialMessage), + if (printOptionalMessage || verbosityLevel >= 2) + log.info(s"Resolved ${params.projectName} dependencies") + ) + ) + } + ) + ) + } + + val (period, maxAttempts) = params.retry + val finalResult: Either[ResolutionError, Resolution] = { + + def retry( + attempt: Int, + waitOnError: FiniteDuration + ): Task[Either[ResolutionError, Resolution]] = + resolveTask.io.attempt + .flatMap { + case Left(e: ResolutionError) => + val hasConnectionTimeouts = e.errors.exists { + case err: CantDownloadModule => + err.perRepositoryErrors.exists(_.contains("Connection timed out")) + case _ => false + } + if (hasConnectionTimeouts) + if (attempt + 1 >= maxAttempts) { + log.error(s"Failed, maximum iterations ($maxAttempts) reached") + Task.point(Left(e)) + } else { + log.warn(s"Attempt ${attempt + 1} failed: $e") + Task.completeAfter(retryScheduler, waitOnError).flatMap { _ => + retry(attempt + 1, waitOnError * 2) + } + } + else + Task.point(Left(e)) + case Left(ex) => + Task.fail(ex) + case Right(value) => + Task.point(Right(value)) + } + + retry(0, period).unsafeRun()(resolveTask.cache.ec) + } + + finalResult match { + case Left(err) if params.missingOk => Right(err.resolution) + case others => others + } + } + + def resolutions( + params: ResolutionParams, + verbosityLevel: Int, + log: Logger + ): Either[coursier.error.ResolutionError, Map[Configuration, Resolution]] = { + + // TODO Warn about possible duplicated modules from source repositories? + + if (verbosityLevel >= 2) { + log.info("InterProjectRepository") + for (p <- params.interProjectDependencies) + log.info(s" ${p.module}:${p.version}") + } + + SbtCoursierCache.default.resolutionOpt(params.resolutionKey).map(Right(_)).getOrElse { + val resOrError = + Lock.maybeSynchronized(needsLock = + params.loggerOpt.nonEmpty || !RefreshLogger.defaultFallbackMode + ) { + val map = new mutable.HashMap[Configuration, Resolution] + val either = params.orderedConfigs.foldLeft[Either[coursier.error.ResolutionError, Unit]]( + Right(()) + ) { case (acc, (config, extends0)) => + for { + _ <- acc + initRes = { + val it = extends0.iterator.flatMap(map.get(_).iterator) + if (it.hasNext) Some(it.next()) + else None + } + allExtends = params.allConfigExtends.getOrElse(config, Set.empty) + res <- resolution(params, verbosityLevel, log, allExtends, initRes) + } yield { + map += config -> res + () + } + } + either.map(_ => map.toMap) + } + for (res <- resOrError) + SbtCoursierCache.default.putResolution(params.resolutionKey, res) + resOrError + } + } + + private lazy val retryScheduler = ThreadUtil.fixedScheduledThreadPool(1) +} diff --git a/lm-coursier/src/main/scala/lmcoursier/internal/Resolvers.scala b/lm-coursier/src/main/scala/lmcoursier/internal/Resolvers.scala new file mode 100644 index 000000000..56fba5b77 --- /dev/null +++ b/lm-coursier/src/main/scala/lmcoursier/internal/Resolvers.scala @@ -0,0 +1,201 @@ +package lmcoursier.internal + +import java.net.MalformedURLException +import java.nio.file.Paths + +import coursier.cache.CacheUrl +import coursier.core.{ Authentication, Repository } +import coursier.ivy.IvyRepository +import coursier.maven.SbtMavenRepository +import org.apache.ivy.plugins.resolver.IBiblioResolver +import sbt.librarymanagement.{ Configuration => _, MavenRepository => _, _ } +import sbt.util.Logger + +import scala.jdk.CollectionConverters._ + +object Resolvers { + + private def mavenCompatibleBaseOpt(patterns: Patterns): Option[String] = + if (patterns.isMavenCompatible) { + // input : /Users/user/custom/repo/[organisation]/[module](_[scalaVersion])(_[sbtVersion])/[revision]/[artifact]-[revision](-[classifier]).[ext] + // output : /Users/user/custom/repo/ + def basePattern(pattern: String): String = pattern.takeWhile(c => c != '[' && c != '(') + + val baseIvyPattern = basePattern(patterns.ivyPatterns.head) + val baseArtifactPattern = basePattern(patterns.artifactPatterns.head) + + if (baseIvyPattern == baseArtifactPattern) + Some(baseIvyPattern) + else + None + } else + None + + private def mavenRepositoryOpt( + root: String, + log: Logger, + authentication: Option[Authentication], + classLoaders: Seq[ClassLoader] + ): Option[SbtMavenRepository] = + try { + CacheUrl.url(root, classLoaders) // ensure root is a URL whose protocol can be handled here + val root0 = if (root.endsWith("/")) root else root + "/" + Some( + SbtMavenRepository( + root0, + authentication = authentication + ) + ) + } catch { + case e: MalformedURLException => + log.warn( + "Error parsing Maven repository base " + + root + + Option(e.getMessage).fold("")(" (" + _ + ")") + + ", ignoring it" + ) + + None + } + + // this handles whitespace in path + private def pathToUriString(path: String): String = { + val stopAtIdx = path.indexWhere(c => c == '[' || c == '$' || c == '(') + if (stopAtIdx > 0) { + val (pathPart, patternPart) = path.splitAt(stopAtIdx) + Paths.get(pathPart).toUri.toASCIIString + patternPart + } else if (stopAtIdx == 0) + "file://" + path + else + Paths.get(path).toUri.toASCIIString + } + + def repository( + resolver: Resolver, + ivyProperties: Map[String, String], + log: Logger, + authentication: Option[Authentication], + classLoaders: Seq[ClassLoader] + ): Option[Repository] = + resolver match { + case r: sbt.librarymanagement.MavenRepository => + mavenRepositoryOpt(r.root, log, authentication, classLoaders) + + case r: FileRepository + if r.patterns.ivyPatterns.lengthCompare(1) == 0 && + r.patterns.artifactPatterns.lengthCompare(1) == 0 => + val mavenCompatibleBaseOpt0 = mavenCompatibleBaseOpt(r.patterns) + + mavenCompatibleBaseOpt0 match { + case None => + val repo = IvyRepository.parse( + pathToUriString(r.patterns.artifactPatterns.head), + metadataPatternOpt = Some(pathToUriString(r.patterns.ivyPatterns.head)), + changing = Some(true), + properties = ivyProperties, + dropInfoAttributes = true, + authentication = authentication + ) match { + case Left(err) => + sys.error( + s"Cannot parse Ivy patterns ${r.patterns.artifactPatterns.head} and ${r.patterns.ivyPatterns.head}: $err" + ) + case Right(repo) => + repo + } + + Some(repo) + + case Some(mavenCompatibleBase) => + mavenRepositoryOpt( + pathToUriString(mavenCompatibleBase), + log, + authentication, + classLoaders + ) + } + + case r: URLRepository if patternMatchGuard(r.patterns) => + parseMavenCompatResolver(log, ivyProperties, authentication, r.patterns, classLoaders) + + case raw: RawRepository + if raw.name == "inter-project" => // sbt.RawRepository.equals just compares names anyway + None + + // Pattern Match resolver-type-specific RawRepositories + case IBiblioRepository(p) => + parseMavenCompatResolver(log, ivyProperties, authentication, p, classLoaders) + + case other => + log.warn(s"Unrecognized repository ${other.name}, ignoring it") + None + } + + private object IBiblioRepository { + + private def stringVector(v: java.util.List[_]): Vector[String] = + Option(v).map(_.asScala.toVector).getOrElse(Vector.empty).collect { case s: String => + s + } + + private def patterns(resolver: IBiblioResolver): Patterns = Patterns( + ivyPatterns = stringVector(resolver.getIvyPatterns), + artifactPatterns = stringVector(resolver.getArtifactPatterns), + isMavenCompatible = resolver.isM2compatible, + descriptorOptional = !resolver.isUseMavenMetadata, + skipConsistencyCheck = !resolver.isCheckconsistency + ) + + def unapply(r: Resolver): Option[Patterns] = + r match { + case raw: RawRepository => + raw.resolver match { + case b: IBiblioResolver => + Some(patterns(b)) + .filter(patternMatchGuard) + case _ => + None + } + case _ => + None + } + } + + private def patternMatchGuard(patterns: Patterns): Boolean = + patterns.ivyPatterns.lengthCompare(1) == 0 && + patterns.artifactPatterns.lengthCompare(1) == 0 + + private def parseMavenCompatResolver( + log: Logger, + ivyProperties: Map[String, String], + authentication: Option[Authentication], + patterns: Patterns, + classLoaders: Seq[ClassLoader], + ): Option[Repository] = { + val mavenCompatibleBaseOpt0 = mavenCompatibleBaseOpt(patterns) + + mavenCompatibleBaseOpt0 match { + case None => + val repo = IvyRepository.parse( + patterns.artifactPatterns.head, + metadataPatternOpt = Some(patterns.ivyPatterns.head), + changing = None, + properties = ivyProperties, + dropInfoAttributes = true, + authentication = authentication + ) match { + case Left(err) => + sys.error( + s"Cannot parse Ivy patterns ${patterns.artifactPatterns.head} and ${patterns.ivyPatterns.head}: $err" + ) + case Right(repo) => + repo + } + + Some(repo) + + case Some(mavenCompatibleBase) => + mavenRepositoryOpt(mavenCompatibleBase, log, authentication, classLoaders) + } + } +} diff --git a/lm-coursier/src/main/scala/lmcoursier/internal/SbtBootJars.scala b/lm-coursier/src/main/scala/lmcoursier/internal/SbtBootJars.scala new file mode 100644 index 000000000..d38c5f53e --- /dev/null +++ b/lm-coursier/src/main/scala/lmcoursier/internal/SbtBootJars.scala @@ -0,0 +1,21 @@ +package lmcoursier.internal + +import java.io.File + +import coursier.core.{ Module, ModuleName, Organization } + +// private[coursier] +object SbtBootJars { + def apply( + scalaOrg: Organization, + scalaVersion: String, + jars: Seq[File] + ): Map[(Module, String), File] = + jars.collect { + case jar if jar.getName.endsWith(".jar") => + val name = ModuleName(jar.getName.stripSuffix(".jar")) + val mod = Module(scalaOrg, name, Map.empty) + + (mod, scalaVersion) -> jar + }.toMap +} diff --git a/lm-coursier/src/main/scala/lmcoursier/internal/SbtCoursierCache.scala b/lm-coursier/src/main/scala/lmcoursier/internal/SbtCoursierCache.scala new file mode 100644 index 000000000..35dcdad86 --- /dev/null +++ b/lm-coursier/src/main/scala/lmcoursier/internal/SbtCoursierCache.scala @@ -0,0 +1,65 @@ +package lmcoursier.internal + +import java.util.concurrent.ConcurrentHashMap + +import coursier.core._ +import sbt.librarymanagement.UpdateReport +import coursier.cache.FileCache +import coursier.util.Task + +// private[coursier] +class SbtCoursierCache { + + import SbtCoursierCache._ + + private val resolutionsCache = + new ConcurrentHashMap[ResolutionKey, Map[Configuration, Resolution]] + // these may actually not need to be cached any more, now that the resolutions + // are cached + private val reportsCache = new ConcurrentHashMap[ReportKey, UpdateReport] + + def resolutionOpt(key: ResolutionKey): Option[Map[Configuration, Resolution]] = + Option(resolutionsCache.get(key)) + def putResolution(key: ResolutionKey, res: Map[Configuration, Resolution]): Unit = + resolutionsCache.put(key, res) + + def reportOpt(key: ReportKey): Option[UpdateReport] = + Option(reportsCache.get(key)) + def putReport(key: ReportKey, report: UpdateReport): Unit = + reportsCache.put(key, report) + + def clear(): Unit = { + resolutionsCache.clear() + reportsCache.clear() + } + + def isEmpty: Boolean = + resolutionsCache.isEmpty && reportsCache.isEmpty + +} + +// private[coursier] +object SbtCoursierCache { + + final case class ResolutionKey( + dependencies: Seq[(Configuration, Dependency)], + internalRepositories: Seq[Repository], + mainRepositories: Seq[Repository], + fallbackRepositories: Seq[Repository], + params: ResolutionParams, + cache: FileCache[Task], + sbtClassifiers: Boolean + ) + + final case class ReportKey( + dependencies: Seq[(Configuration, Dependency)], + resolution: Map[Configuration, Resolution], + withClassifiers: Boolean, + sbtClassifiers: Boolean, + includeSignatures: Boolean + ) + + // private[coursier] + val default = new SbtCoursierCache + +} diff --git a/lm-coursier/src/main/scala/lmcoursier/internal/SbtUpdateReport.scala b/lm-coursier/src/main/scala/lmcoursier/internal/SbtUpdateReport.scala new file mode 100644 index 000000000..af766b719 --- /dev/null +++ b/lm-coursier/src/main/scala/lmcoursier/internal/SbtUpdateReport.scala @@ -0,0 +1,426 @@ +package lmcoursier.internal + +import java.io.File +import java.util.GregorianCalendar +import java.util.concurrent.ConcurrentHashMap +import coursier.cache.CacheUrl +import coursier.{ Attributes, Dependency, Module, Project, Resolution } +import coursier.core.{ Classifier, Configuration, Extension, Info, Publication, Type } +import coursier.maven.MavenAttributes +import coursier.util.Artifact +import sbt.librarymanagement.{ Artifact => _, Configuration => _, _ } +import sbt.util.Logger + +import scala.annotation.tailrec +import coursier.core.MinimizedExclusions + +private[internal] object SbtUpdateReport { + + private def caching[K, V](f: K => V): K => V = { + + val cache = new ConcurrentHashMap[K, V] + + key => + val previousValueOpt = Option(cache.get(key)) + + previousValueOpt.getOrElse { + val value = f(key) + val concurrentValueOpt = Option(cache.putIfAbsent(key, value)) + concurrentValueOpt.getOrElse(value) + } + } + + private def infoProperties(project: Project): Seq[(String, String)] = + project.properties.filter(_._1.startsWith("info.")) + + private val moduleId = caching[(Dependency, String, Map[String, String]), ModuleID] { + case (dependency, version, extraProperties) => + val mod = sbt.librarymanagement.ModuleID( + dependency.module.organization.value, + dependency.module.name.value, + version + ) + mod + .withConfigurations( + Some(dependency.configuration.value) + .filter(_.nonEmpty) // ??? + ) + .withExtraAttributes(dependency.module.attributes ++ extraProperties) + .withExclusions( + dependency.minimizedExclusions.toVector + .map { case (org, name) => + sbt.librarymanagement + .InclExclRule() + .withOrganization(org.value) + .withName(name.value) + } + ) + .withIsTransitive(dependency.transitive) + } + + private val artifact = caching[ + (Module, Map[String, String], Publication, Artifact, Seq[ClassLoader]), + sbt.librarymanagement.Artifact + ] { case (module, extraProperties, pub, artifact, classLoaders) => + sbt.librarymanagement + .Artifact(pub.name) + .withType(pub.`type`.value) + .withExtension(pub.ext.value) + .withClassifier( + Some(pub.classifier) + .filter(_.nonEmpty) + .orElse(MavenAttributes.typeDefaultClassifierOpt(pub.`type`)) + .map(_.value) + ) + .withUrl(Some(CacheUrl.url(artifact.url, classLoaders).toURI)) + .withExtraAttributes(module.attributes ++ extraProperties) + } + + private val moduleReport = caching[ + ( + Dependency, + Seq[(Dependency, ProjectInfo)], + Project, + Seq[(Publication, Artifact, Option[File])], + Seq[ClassLoader] + ), + ModuleReport + ] { case (dependency, dependees, project, artifacts, classLoaders) => + val sbtArtifacts = artifacts.collect { case (pub, artifact0, Some(file)) => + ( + artifact((dependency.module, infoProperties(project).toMap, pub, artifact0, classLoaders)), + file + ) + } + val sbtMissingArtifacts = artifacts.collect { case (pub, artifact0, None) => + artifact((dependency.module, infoProperties(project).toMap, pub, artifact0, classLoaders)) + } + + val publicationDate = project.info.publication.map { dt => + new GregorianCalendar(dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second) + } + + val callers = dependees.distinct.map { case (dependee, dependeeProj) => + Caller( + moduleId((dependee, dependeeProj.version, Map.empty)), + // FIXME Shouldn't we only keep the configurations pulling dependency? + dependeeProj.configs, + dependee.module.attributes ++ dependeeProj.properties, + // FIXME Set better values here + isForceDependency = false, + isChangingDependency = false, + isTransitiveDependency = dependency.transitive, + isDirectlyForceDependency = false + ) + } + + val rep = ModuleReport( + moduleId((dependency, project.version, infoProperties(project).toMap)), + sbtArtifacts.toVector, + sbtMissingArtifacts.toVector + ) + + rep + // .withStatus(None) + .withPublicationDate(publicationDate) + // .withResolver(None) + // .withArtifactResolver(None) + // .withEvicted(false) + // .withEvictedData(None) + // .withEvictedReason(None) + // .withProblem(None) + .withHomepage(Some(project.info.homePage).filter(_.nonEmpty)) + .withLicenses(project.info.licenses.toVector) + .withExtraAttributes(dependency.module.attributes ++ infoProperties(project)) + // .withIsDefault(None) + // .withBranch(None) + .withConfigurations(project.configurations.keys.toVector.map(c => ConfigRef(c.value))) + .withLicenses(project.info.licenses.toVector) + .withCallers(callers.toVector) + } + + private def moduleReports( + thisModule: (Module, String), + res: Resolution, + interProjectDependencies: Seq[Project], + classifiersOpt: Option[Seq[Classifier]], + artifactFileOpt: (Module, String, Attributes, Artifact) => Option[File], + fullArtifactsOpt: Option[Map[(Dependency, Publication, Artifact), Option[File]]], + log: Logger, + includeSignatures: Boolean, + classpathOrder: Boolean, + missingOk: Boolean, + classLoaders: Seq[ClassLoader] + ): Vector[ModuleReport] = { + + val deps = classifiersOpt match { + case Some(classifiers) => + res.dependencyArtifacts(Some(classifiers.toSeq), classpathOrder) + case None => + res.dependencyArtifacts(None, classpathOrder) + } + + val depArtifacts1 = fullArtifactsOpt match { + case Some(map) => + deps.map { case (d, p, a) => + val d0 = d.withAttributes(d.attributes.withClassifier(p.classifier)) + val a0 = if (missingOk) a.withOptional(true) else a + val f = map.get((d0, p, a0)).flatten + (d, p, a0, f) // not d0 + } + case None => + deps.map { case (d, p, a) => + (d, p, a, None) + } + } + + val depArtifacts0 = depArtifacts1.filter { case (_, pub, _, _) => + pub.attributes != Attributes(Type.pom, Classifier.empty) + } + + val depArtifacts = + if (includeSignatures) { + + val notFound = depArtifacts0.filter(!_._3.extra.contains("sig")) + + if (notFound.isEmpty) + depArtifacts0.flatMap { case (dep, pub, a, f) => + val sigPub = pub + // not too sure about those + .withExt(Extension(pub.ext.value)) + .withType(Type(pub.`type`.value)) + Seq((dep, pub, a, f)) ++ + a.extra.get("sig").toSeq.map((dep, sigPub, _, None)) + } + else { + for ((_, _, a, _) <- notFound) + log.error(s"No signature found for ${a.url}") + sys.error(s"${notFound.length} signature(s) not found") + } + } else + depArtifacts0 + + val groupedDepArtifacts = { + val m = depArtifacts.groupBy(_._1) + val fromLib = depArtifacts.map(_._1).distinct.map { dep => + dep -> m.getOrElse(dep, Nil).map { case (_, pub, a, f) => (pub, a, f) } + } + val fromInterProj = interProjectDependencies + .filter(p => p.module != thisModule._1) + .map(p => Dependency(p.module, p.version) -> Nil) + fromLib ++ fromInterProj + } + + val versions = (Vector( + Dependency(thisModule._1, thisModule._2) + ) ++ res.dependencies.toVector ++ res.rootDependencies.toVector).map { dep => + dep.module -> dep.version + }.toMap + + def clean(dep: Dependency): Dependency = + dep + .withConfiguration(Configuration.empty) + .withMinimizedExclusions(MinimizedExclusions.zero) + .withOptional(false) + + def lookupProject(mv: coursier.core.Resolution.ModuleVersion): Option[Project] = + res.projectCache.get(mv) match { + case Some((_, p)) => Some(p) + case _ => + interProjectDependencies.find(p => mv == (p.module, p.version)) + } + + /** + * Assemble the project info, resolving inherited fields. Only implements resolving + * the fields that are relevant for moduleReport + * + * @see https://maven.apache.org/pom.html#Inheritance + * @see https://maven.apache.org/ref/3-LATEST/maven-model-builder/index.html#Inheritance_Assembly + */ + def assemble(project: Project): Project = { + @tailrec + def licenseInfo(project: Project): Seq[Info.License] = { + if (project.info.licenseInfo.nonEmpty || project.parent.isEmpty) + project.info.licenseInfo + else + licenseInfo(lookupProject(project.parent.get).get) + } + project.withInfo( + project.info.withLicenseInfo(licenseInfo(project)) + ) + } + + val m = Dependency(thisModule._1, "") + val directReverseDependencies = res.rootDependencies.toSet + .map(clean) + .map(_.withVersion("")) + .map(dep => dep -> Vector(m)) + .toMap + + val reverseDependencies = { + val transitiveReverseDependencies = res.reverseDependencies.toVector + .map { case (k, v) => clean(k) -> v.map(clean) } + .groupMapReduce(_._1)((_, deps) => deps)(_ ++ _) + + (transitiveReverseDependencies.toVector ++ directReverseDependencies.toVector) + .groupMapReduce(_._1)((_, deps) => deps)(_ ++ _) + } + + groupedDepArtifacts.toVector.map { case (dep, artifacts) => + val proj = lookupProject(dep.moduleVersion).get + val assembledProject = assemble(proj) + + // FIXME Likely flaky... + val dependees = reverseDependencies + .getOrElse(clean(dep.withVersion("")), Vector.empty) + .flatMap { dependee0 => + val version = versions(dependee0.module) + val dependee = dependee0.withVersion(version) + lookupProject(dependee.moduleVersion) match { + case Some(dependeeProj) => + Vector( + ( + dependee, + ProjectInfo( + dependeeProj.version, + dependeeProj.configurations.keys.toVector.map(c => ConfigRef(c.value)), + dependeeProj.properties + ) + ) + ) + case _ => + Vector.empty + } + } + val filesOpt = artifacts.map { case (pub, a, fileOpt) => + val fileOpt0 = fileOpt.orElse { + if (fullArtifactsOpt.isEmpty) + artifactFileOpt(proj.module, proj.version, pub.attributes, a) + else None + } + (pub, a, fileOpt0) + } + moduleReport( + ( + dep, + dependees, + assembledProject, + filesOpt, + classLoaders, + ) + ) + } + } + + def apply( + thisModule: (Module, String), + configDependencies: Map[Configuration, Seq[Dependency]], + resolutions: Seq[(Configuration, Resolution)], + interProjectDependencies: Vector[Project], + classifiersOpt: Option[Seq[Classifier]], + artifactFileOpt: (Module, String, Attributes, Artifact) => Option[File], + fullArtifactsOpt: Option[Map[(Dependency, Publication, Artifact), Option[File]]], + log: Logger, + includeSignatures: Boolean, + classpathOrder: Boolean, + missingOk: Boolean, + forceVersions: Map[Module, String], + classLoaders: Seq[ClassLoader], + ): UpdateReport = { + + val configReports = resolutions.map { case (config, subRes) => + val reports = moduleReports( + thisModule, + subRes, + interProjectDependencies, + classifiersOpt, + artifactFileOpt, + fullArtifactsOpt, + log, + includeSignatures = includeSignatures, + classpathOrder = classpathOrder, + missingOk = missingOk, + classLoaders = classLoaders, + ) + + val reports0 = subRes.rootDependencies match { + case Seq(dep) if subRes.projectCache.contains(dep.moduleVersion) => + // quick hack ensuring the module for the only root dependency + // appears first in the update report, see https://github.com/coursier/coursier/issues/650 + val (_, proj) = subRes.projectCache(dep.moduleVersion) + val mod = moduleId((dep, proj.version, infoProperties(proj).toMap)) + val (main, other) = reports.partition { r => + r.module.organization == mod.organization && + r.module.name == mod.name && + r.module.crossVersion == mod.crossVersion + } + main ++ other + case _ => reports + } + + val mainReportDetails = reports0.map { rep => + OrganizationArtifactReport(rep.module.organization, rep.module.name, Vector(rep)) + } + + val evicted = for { + c <- coursier.graph.Conflict(subRes) + // ideally, forceVersions should be taken into account by coursier.core.Resolution itself, when + // it computes transitive dependencies. It only handles forced versions at a global level for now, + // rather than handing them for each dependency (where each dependency could have its own forced + // versions, and apply and pass them to its transitive dependencies, just like for exclusions today). + if !forceVersions.contains(c.module) + projOpt = subRes.projectCache + .get((c.module, c.wantedVersion)) + .orElse(subRes.projectCache.get((c.module, c.version))) + (_, proj) <- projOpt.toSeq + } yield { + val dep = Dependency(c.module, c.wantedVersion) + val dependee = Dependency(c.dependeeModule, c.dependeeVersion) + val dependeeProj = subRes.projectCache.get((c.dependeeModule, c.dependeeVersion)) match { + case Some((_, p)) => + ProjectInfo( + p.version, + p.configurations.keys.toVector.map(c => ConfigRef(c.value)), + p.properties + ) + case None => + // should not happen + ProjectInfo(c.dependeeVersion, Vector.empty, Vector.empty) + } + val rep = moduleReport( + (dep, Seq((dependee, dependeeProj)), proj.withVersion(c.wantedVersion), Nil, classLoaders) + ) + .withEvicted(true) + .withEvictedData(Some("version selection")) // ??? put latest-revision like sbt/ivy here? + OrganizationArtifactReport(c.module.organization.value, c.module.name.value, Vector(rep)) + } + + val details = (mainReportDetails ++ evicted) + .groupBy(r => (r.organization, r.name)) + .toVector // order? + .map { case ((org, name), l) => + val modules = l.flatMap(_.modules) + OrganizationArtifactReport(org, name, modules) + } + + ConfigurationReport( + ConfigRef(config.value), + reports0, + details + ) + } + + UpdateReport( + new File("."), // dummy value + configReports.toVector, + UpdateStats(-1L, -1L, -1L, cached = false), + Map.empty + ) + } + + private case class ProjectInfo( + version: String, + configs: Vector[ConfigRef], + properties: Seq[(String, String)] + ) +} diff --git a/lm-coursier/src/main/scala/lmcoursier/internal/TemporaryInMemoryRepository.scala b/lm-coursier/src/main/scala/lmcoursier/internal/TemporaryInMemoryRepository.scala new file mode 100644 index 000000000..a537856d8 --- /dev/null +++ b/lm-coursier/src/main/scala/lmcoursier/internal/TemporaryInMemoryRepository.scala @@ -0,0 +1,207 @@ +package lmcoursier.internal + +import java.io.{ File, FileNotFoundException, IOException } +import java.net.{ HttpURLConnection, URL, URLConnection } + +import coursier.cache.{ ConnectionBuilder, FileCache } +import coursier.core._ +import coursier.util.{ Artifact, EitherT, Monad } + +import scala.util.Try + +object TemporaryInMemoryRepository { + + def closeConn(conn: URLConnection): Unit = { + Try(conn.getInputStream).toOption.filter(_ != null).foreach(_.close()) + conn match { + case conn0: HttpURLConnection => + Try(conn0.getErrorStream).toOption.filter(_ != null).foreach(_.close()) + conn0.disconnect() + case _ => + } + } + + def exists( + url: URL, + localArtifactsShouldBeCached: Boolean + ): Boolean = + exists(url, localArtifactsShouldBeCached, None) + + def exists( + url: URL, + localArtifactsShouldBeCached: Boolean, + cacheOpt: Option[FileCache[Nothing]] + ): Boolean = { + + // Sometimes HEAD attempts fail even though standard GETs are fine. + // E.g. https://github.com/NetLogo/NetLogo/releases/download/5.3.1/NetLogo.jar + // returning 403s. Hence the second attempt below. + + val protocolSpecificAttemptOpt = { + + def ifFile: Option[Boolean] = { + if (localArtifactsShouldBeCached && !new File(url.toURI).exists()) { + val cachePath = coursier.cache.CacheDefaults.location + // 'file' here stands for the protocol (e.g. it's https instead for https:// URLs) + Some(new File(cachePath, s"file/${url.getPath}").exists()) + } else { + Some(new File(url.toURI).exists()) // FIXME Escaping / de-escaping needed here? + } + } + + def ifHttp: Option[Boolean] = { + // HEAD request attempt, adapted from http://stackoverflow.com/questions/22541629/android-how-can-i-make-an-http-head-request/22545275#22545275 + + var conn: URLConnection = null + try { + conn = ConnectionBuilder(url.toURI.toASCIIString) + .withFollowHttpToHttpsRedirections( + cacheOpt.fold(false)(_.followHttpToHttpsRedirections) + ) + .withFollowHttpsToHttpRedirections( + cacheOpt.fold(false)(_.followHttpsToHttpRedirections) + ) + .withSslSocketFactoryOpt(cacheOpt.flatMap(_.sslSocketFactoryOpt)) + .withHostnameVerifierOpt(cacheOpt.flatMap(_.hostnameVerifierOpt)) + .withMethod("HEAD") + .withMaxRedirectionsOpt(cacheOpt.flatMap(_.maxRedirections)) + .connection() + // Even though the finally clause handles this too, this has to be run here, so that we return Some(true) + // iff this doesn't throw. + conn.getInputStream.close() + Some(true) + } catch { + case _: FileNotFoundException => Some(false) + case _: IOException => None // error other than not found + } finally { + if (conn != null) + closeConn(conn) + } + } + + url.getProtocol match { + case "file" => ifFile + case "http" | "https" => ifHttp + case _ => None + } + } + + def genericAttempt: Boolean = { + var conn: URLConnection = null + try { + conn = url.openConnection() + // NOT setting request type to HEAD here. + conn.getInputStream.close() + true + } catch { + case _: IOException => false + } finally { + if (conn != null) + closeConn(conn) + } + } + + protocolSpecificAttemptOpt + .getOrElse(genericAttempt) + } + + def apply( + fallbacks: Map[(Module, String), (URL, Boolean)] + ): TemporaryInMemoryRepository = + new TemporaryInMemoryRepository(fallbacks, localArtifactsShouldBeCached = false, None) + + def apply( + fallbacks: Map[(Module, String), (URL, Boolean)], + localArtifactsShouldBeCached: Boolean + ): TemporaryInMemoryRepository = + new TemporaryInMemoryRepository(fallbacks, localArtifactsShouldBeCached, None) + + def apply[F[_]]( + fallbacks: Map[(Module, String), (URL, Boolean)], + cache: FileCache[F] + ): TemporaryInMemoryRepository = + new TemporaryInMemoryRepository( + fallbacks, + localArtifactsShouldBeCached = cache.localArtifactsShouldBeCached, + Some(cache.asInstanceOf[FileCache[Nothing]]) + ) + +} + +final class TemporaryInMemoryRepository private ( + val fallbacks: Map[(Module, String), (URL, Boolean)], + val localArtifactsShouldBeCached: Boolean, + val cacheOpt: Option[FileCache[Nothing]] +) extends Repository { + + def find[F[_]]( + module: Module, + version: String, + fetch: Repository.Fetch[F] + )(implicit + F: Monad[F] + ): EitherT[F, String, (ArtifactSource, Project)] = { + + def res = fallbacks + .get((module, version)) + .fold[Either[String, (ArtifactSource, Project)]](Left("No fallback URL found")) { + case (url, _) => + val urlStr = url.toExternalForm + val idx = urlStr.lastIndexOf('/') + + if (idx < 0 || urlStr.endsWith("/")) + Left(s"$url doesn't point to a file") + else { + val (dirUrlStr, fileName) = urlStr.splitAt(idx + 1) + + if (TemporaryInMemoryRepository.exists(url, localArtifactsShouldBeCached, cacheOpt)) { + val proj = Project( + module, + version, + Nil, + Map.empty, + None, + Nil, + Nil, + Nil, + None, + None, + None, + relocated = false, + None, + Nil, + Info.empty + ) + + Right((this, proj)) + } else + Left(s"$fileName not found under $dirUrlStr") + } + } + + // EitherT(F.bind(F.point(()))(_ => F.point(res))) + EitherT(F.map(F.point(()))(_ => res)) + } + + def artifacts( + dependency: Dependency, + project: Project, + overrideClassifiers: Option[Seq[Classifier]] + ): Seq[(Publication, Artifact)] = { + fallbacks + .get(dependency.moduleVersion) + .toSeq + .map { case (url, changing) => + val url0 = url.toString + val ext = url0.substring(url0.lastIndexOf('.') + 1) + val pub = Publication( + dependency.module.name.value, // ??? + Type(ext), + Extension(ext), + Classifier.empty + ) + (pub, Artifact(url0, Map.empty, Map.empty, changing, optional = false, None)) + } + } + +} diff --git a/lm-coursier/src/main/scala/lmcoursier/internal/UpdateParams.scala b/lm-coursier/src/main/scala/lmcoursier/internal/UpdateParams.scala new file mode 100644 index 000000000..bed75ebce --- /dev/null +++ b/lm-coursier/src/main/scala/lmcoursier/internal/UpdateParams.scala @@ -0,0 +1,50 @@ +package lmcoursier.internal + +import java.io.File + +import coursier.core._ +import coursier.util.Artifact + +// private[coursier] +final case class UpdateParams( + thisModule: (Module, String), + artifacts: Map[Artifact, File], + fullArtifacts: Option[Map[(Dependency, Publication, Artifact), Option[File]]], + classifiers: Option[Seq[Classifier]], + configs: Map[Configuration, Set[Configuration]], + dependencies: Seq[(Configuration, Dependency)], + forceVersions: Map[Module, String], + interProjectDependencies: Seq[Project], + res: Map[Configuration, Resolution], + includeSignatures: Boolean, + sbtBootJarOverrides: Map[(Module, String), File], + classpathOrder: Boolean, + missingOk: Boolean, + classLoaders: Seq[ClassLoader] +) { + + def artifactFileOpt( + module: Module, + version: String, + attributes: Attributes, + artifact: Artifact + ): Option[File] = { + + // Under some conditions, SBT puts the scala JARs of its own classpath + // in the application classpath. Ensuring we return SBT's jars rather than + // JARs from the coursier cache, so that a same JAR doesn't land twice in the + // application classpath (once via SBT jars, once via coursier cache). + val fromBootJars = + if (attributes.classifier.isEmpty && attributes.`type` == Type.jar) + sbtBootJarOverrides.get((module, version)) + else + None + + val artifact0 = + if (missingOk) artifact.withOptional(true) + else artifact + + fromBootJars.orElse(artifacts.get(artifact0)) + } + +} diff --git a/lm-coursier/src/main/scala/lmcoursier/internal/UpdateRun.scala b/lm-coursier/src/main/scala/lmcoursier/internal/UpdateRun.scala new file mode 100644 index 000000000..13e7d77d5 --- /dev/null +++ b/lm-coursier/src/main/scala/lmcoursier/internal/UpdateRun.scala @@ -0,0 +1,92 @@ +package lmcoursier.internal + +import coursier.cache.loggers.RefreshLogger +import coursier.core.Resolution.ModuleVersion +import coursier.core._ +import coursier.util.Print +import sbt.librarymanagement.UpdateReport +import sbt.util.Logger + +// private[coursier] +object UpdateRun { + + // Move back to coursier.util (in core module) after 1.0? + private def allDependenciesByConfig( + res: Map[Configuration, Resolution], + depsByConfig: Map[Configuration, Seq[Dependency]], + configs: Map[Configuration, Set[Configuration]] + ): Map[Configuration, Set[Dependency]] = { + + val allDepsByConfig = depsByConfig.map { case (config, deps) => + config -> res(config).subset(deps).minDependencies + } + + val filteredAllDepsByConfig = allDepsByConfig.map { case (config, allDeps) => + val allExtendedConfigs = configs.getOrElse(config, Set.empty) - config + val inherited = allExtendedConfigs + .flatMap(allDepsByConfig.getOrElse(_, Set.empty)) + + config -> (allDeps -- inherited) + } + + filteredAllDepsByConfig + } + + // Move back to coursier.util (in core module) after 1.0? + private def dependenciesWithConfig( + res: Map[Configuration, Resolution], + depsByConfig: Map[Configuration, Seq[Dependency]], + configs: Map[Configuration, Set[Configuration]] + ): Set[Dependency] = + allDependenciesByConfig(res, depsByConfig, configs) + .flatMap { case (config, deps) => + deps.map(dep => dep.withConfiguration(config --> dep.configuration)) + } + .groupBy(_.withConfiguration(Configuration.empty)) + .map { case (dep, l) => + dep.withConfiguration(Configuration.join(l.map(_.configuration).toSeq: _*)) + } + .toSet + + def update( + params: UpdateParams, + verbosityLevel: Int, + log: Logger + ): UpdateReport = Lock.maybeSynchronized(needsLock = !RefreshLogger.defaultFallbackMode) { + val depsByConfig = grouped(params.dependencies) + + if (verbosityLevel >= 2) { + val finalDeps = dependenciesWithConfig( + params.res, + depsByConfig, + params.configs + ) + + val projCache = params.res.values.foldLeft(Map.empty[ModuleVersion, Project])( + _ ++ _.projectCache.view.mapValues(_._2).toMap + ) + val repr = Print.dependenciesUnknownConfigs(finalDeps.toVector, projCache) + log.info(repr.split('\n').map(" " + _).mkString("\n")) + } + + SbtUpdateReport( + params.thisModule, + depsByConfig, + params.res.toVector.sortBy(_._1.value), // FIXME Order by config topologically? + params.interProjectDependencies.toVector, + params.classifiers, + params.artifactFileOpt, + params.fullArtifacts, + log, + includeSignatures = params.includeSignatures, + classpathOrder = params.classpathOrder, + missingOk = params.missingOk, + params.forceVersions, + params.classLoaders, + ) + } + + private def grouped[K, V](map: Seq[(K, V)]): Map[K, Seq[V]] = + map.groupMap(_._1)((_, values) => values) + +} diff --git a/lm-coursier/src/main/scala/lmcoursier/syntax/package.scala b/lm-coursier/src/main/scala/lmcoursier/syntax/package.scala new file mode 100644 index 000000000..676fc1eae --- /dev/null +++ b/lm-coursier/src/main/scala/lmcoursier/syntax/package.scala @@ -0,0 +1,261 @@ +package lmcoursier + +import coursier.cache.CacheDefaults +import lmcoursier.credentials._ +import lmcoursier.definitions._ +import sbt.librarymanagement.{ Resolver, UpdateConfiguration } +import xsbti.Logger + +import scala.concurrent.duration.{ Duration, FiniteDuration } +import java.io.File + +package object syntax { + implicit class CoursierConfigurationModule(value: CoursierConfiguration.type) { + @deprecated( + "Legacy cache location support was dropped, this method does nothing.", + "2.0.0-RC6-10" + ) + def checkLegacyCache(): Unit = () + + def apply( + log: Logger, + resolvers: Vector[Resolver], + parallelDownloads: Int, + maxIterations: Int, + sbtScalaOrganization: String, + sbtScalaVersion: String, + sbtScalaJars: Vector[File], + interProjectDependencies: Vector[Project], + excludeDependencies: Vector[(String, String)], + fallbackDependencies: Vector[FallbackDependency], + autoScalaLibrary: Boolean, + hasClassifiers: Boolean, + classifiers: Vector[String], + mavenProfiles: Vector[String], + scalaOrganization: String, + scalaVersion: String, + authenticationByRepositoryId: Vector[(String, Authentication)], + credentials: Seq[Credentials], + logger: CacheLogger, + cache: File + ): CoursierConfiguration = + CoursierConfiguration( + Option(log), + resolvers, + parallelDownloads, + maxIterations, + Option(sbtScalaOrganization), + Option(sbtScalaVersion), + sbtScalaJars, + interProjectDependencies, + excludeDependencies, + fallbackDependencies, + autoScalaLibrary, + hasClassifiers, + classifiers, + mavenProfiles, + Option(scalaOrganization), + Option(scalaVersion), + authenticationByRepositoryId, + credentials, + Option(logger), + Option(cache), + ivyHome = None, + followHttpToHttpsRedirections = None, + strict = None, + extraProjects = Vector.empty, + forceVersions = Vector.empty, + reconciliation = Vector.empty, + classpathOrder = true, + verbosityLevel = 0, + ttl = CacheDefaults.ttl, + checksums = CacheDefaults.checksums.toVector, + cachePolicies = CacheDefaults.cachePolicies.toVector.map(FromCoursier.cachePolicy), + missingOk = false, + sbtClassifiers = false, + providedInCompile = false, + protocolHandlerDependencies = Vector.empty, + retry = None, + sameVersions = Nil, + ) + } + + implicit class CoursierConfigurationOp(value: CoursierConfiguration) { + def withLog(log: Logger): CoursierConfiguration = + value.withLog(Option(log)) + def withSbtScalaOrganization(sbtScalaOrganization: String): CoursierConfiguration = + value.withSbtScalaOrganization(Option(sbtScalaOrganization)) + def withSbtScalaVersion(sbtScalaVersion: String): CoursierConfiguration = + value.withSbtScalaVersion(Option(sbtScalaVersion)) + def withScalaOrganization(scalaOrganization: String): CoursierConfiguration = + value.withScalaOrganization(Option(scalaOrganization)) + def withScalaVersion(scalaVersion: String): CoursierConfiguration = + value.withScalaVersion(Option(scalaVersion)) + def withLogger(logger: CacheLogger): CoursierConfiguration = + value.withLogger(Option(logger)) + def withCache(cache: File): CoursierConfiguration = + value.withCache(Option(cache)) + def withIvyHome(ivyHome: File): CoursierConfiguration = + value.withIvyHome(Option(ivyHome)) + def withFollowHttpToHttpsRedirections( + followHttpToHttpsRedirections: Boolean + ): CoursierConfiguration = + value.withFollowHttpToHttpsRedirections(Some(followHttpToHttpsRedirections)) + def withFollowHttpToHttpsRedirections(): CoursierConfiguration = + value.withFollowHttpToHttpsRedirections(Some(true)) + def withStrict(strict: Strict): CoursierConfiguration = + value.withStrict(Some(strict)) + def withTtl(ttl: Duration): CoursierConfiguration = + value.withTtl(Some(ttl)) + def addRepositoryAuthentication( + repositoryId: String, + authentication: Authentication + ): CoursierConfiguration = + value.withAuthenticationByRepositoryId( + value.authenticationByRepositoryId :+ (repositoryId, authentication) + ) + + def withUpdateConfiguration(conf: UpdateConfiguration): CoursierConfiguration = + value.withMissingOk(conf.missingOk) + + def withRetry(retry: (FiniteDuration, Int)): CoursierConfiguration = + value.withRetry(Some((retry._1, retry._2))) + } + + implicit class PublicationOp(value: Publication) { + def attributes: Attributes = + Attributes(value.`type`, value.classifier) + + def withAttributes(attributes: Attributes): Publication = + value + .withType(attributes.`type`) + .withClassifier(attributes.classifier) + } + + implicit class DependencyModule(value: Dependency.type) { + def apply( + module: Module, + version: String, + configuration: Configuration, + exclusions: Set[(Organization, ModuleName)], + attributes: Attributes, + optional: Boolean, + transitive: Boolean + ): Dependency = + Dependency( + module, + version, + configuration, + exclusions, + Publication("", attributes.`type`, Extension(""), attributes.classifier), + optional, + transitive + ) + } + + implicit class DependencyOp(value: Dependency) { + def attributes: Attributes = value.publication.attributes + + def withAttributes(attributes: Attributes): Dependency = + value.withPublication( + value.publication + .withType(attributes.`type`) + .withClassifier(attributes.classifier) + ) + } + + implicit class ModuleMatchersModule(value: ModuleMatchers.type) { + def all: ModuleMatchers = + ModuleMatchers(Set.empty, Set.empty) + def only(organization: String, moduleName: String): ModuleMatchers = + ModuleMatchers( + Set.empty, + Set(Module(Organization(organization), ModuleName(moduleName), Map())), + includeByDefault = false + ) + def only(mod: Module): ModuleMatchers = + ModuleMatchers(Set.empty, Set(mod), includeByDefault = false) + } + + implicit class StrictOp(value: Strict) { + def addInclude(include: (String, String)*): Strict = + value.withInclude(value.include ++ include) + def addExclude(exclude: (String, String)*): Strict = + value.withExclude(value.exclude ++ exclude) + } + + implicit class AuthenticationModule(value: Authentication.type) { + def apply(headers: Seq[(String, String)]): Authentication = + Authentication("", "").withHeaders(headers) + } + + implicit class DirectCredentialsModule(value: DirectCredentials.type) { + def apply(host: String, username: String, password: String, realm: String): DirectCredentials = + DirectCredentials(host, username, password, Option(realm)) + def apply( + host: String, + username: String, + password: String, + realm: String, + optional: Boolean + ): DirectCredentials = + DirectCredentials(host, username, password, Option(realm)) + } + + implicit class DirectCredentialsOp(value: DirectCredentials) { + def withRealm(realm: String): DirectCredentials = + value.withRealm(Option(realm)) + } + + implicit class CredentialsModule(value: Credentials.type) { + def apply(): DirectCredentials = DirectCredentials() + def apply(host: String, username: String, password: String): DirectCredentials = + DirectCredentials(host, username, password) + def apply( + host: String, + username: String, + password: String, + realm: Option[String] + ): DirectCredentials = + DirectCredentials(host, username, password, realm) + def apply(host: String, username: String, password: String, realm: String): DirectCredentials = + DirectCredentials(host, username, password, Option(realm)) + def apply( + host: String, + username: String, + password: String, + realm: Option[String], + optional: Boolean + ): DirectCredentials = + DirectCredentials( + host, + username, + password, + realm, + optional, + matchHost = false, + httpsOnly = true + ) + def apply( + host: String, + username: String, + password: String, + realm: String, + optional: Boolean + ): DirectCredentials = + DirectCredentials( + host, + username, + password, + Option(realm), + optional, + matchHost = false, + httpsOnly = true + ) + + def apply(f: File): FileCredentials = + FileCredentials(f.getAbsolutePath) + def apply(f: File, optional: Boolean): FileCredentials = + FileCredentials(f.getAbsolutePath, optional) + } +} diff --git a/lm-coursier/src/test/scala/lmcoursier/CoursierDependencyResolutionTests.scala b/lm-coursier/src/test/scala/lmcoursier/CoursierDependencyResolutionTests.scala new file mode 100644 index 000000000..462e434bb --- /dev/null +++ b/lm-coursier/src/test/scala/lmcoursier/CoursierDependencyResolutionTests.scala @@ -0,0 +1,53 @@ +package lmcoursier + +import org.scalatest.matchers.should.Matchers +import org.scalatest.propspec.AnyPropSpec +import sbt.librarymanagement.ModuleID +import sbt.librarymanagement.UpdateConfiguration +import sbt.librarymanagement.UnresolvedWarningConfiguration +import sbt.util.Logger +import sbt.librarymanagement.ModuleInfo +import sbt.librarymanagement.ModuleDescriptorConfiguration +import sbt.librarymanagement.Configuration + +class CoursierDependencyResolutionTests extends AnyPropSpec with Matchers { + + property("missingOk from passed UpdateConfiguration") { + + val depRes = CoursierDependencyResolution(CoursierConfiguration().withAutoScalaLibrary(false)) + + val desc = ModuleDescriptorConfiguration(ModuleID("test", "foo", "1.0"), ModuleInfo("foo")) + .withDependencies( + Vector( + ModuleID("io.get-coursier", "coursier_2.13", "0.1.53") + .withConfigurations(Some("compile")), + ModuleID("org.scala-lang", "scala-library", "2.12.11").withConfigurations(Some("compile")) + ) + ) + .withConfigurations(Vector(Configuration.of("Compile", "compile"))) + val module = depRes.moduleDescriptor(desc) + + val logger: Logger = new Logger { + def log(level: sbt.util.Level.Value, message: => String): Unit = + System.err.println(s"${level.id} $message") + def success(message: => String): Unit = + System.err.println(message) + def trace(t: => Throwable): Unit = + System.err.println(s"trace $t") + } + + depRes + .update(module, UpdateConfiguration(), UnresolvedWarningConfiguration(), logger) + .fold(w => (), rep => sys.error(s"Expected resolution to fail, got report $rep")) + + val report = depRes + .update( + module, + UpdateConfiguration().withMissingOk(true), + UnresolvedWarningConfiguration(), + logger + ) + .fold(w => throw w.resolveException, identity) + } + +} diff --git a/lm-coursier/src/test/scala/lmcoursier/IvyXmlTests.scala b/lm-coursier/src/test/scala/lmcoursier/IvyXmlTests.scala new file mode 100644 index 000000000..1261a93e3 --- /dev/null +++ b/lm-coursier/src/test/scala/lmcoursier/IvyXmlTests.scala @@ -0,0 +1,30 @@ +package lmcoursier + +import lmcoursier.definitions.{ Configuration, Info, Module, ModuleName, Organization, Project } +import org.scalatest.matchers.should.Matchers +import org.scalatest.propspec.AnyPropSpec + +class IvyXmlTests extends AnyPropSpec with Matchers { + + property("no truncation") { + val project = Project( + Module(Organization("org"), ModuleName("name"), Map()), + "ver", + Nil, + Map( + Configuration("foo") -> (1 to 80).map(n => + Configuration("bar" + n) + ) // long list of configurations -> no truncation any way + ), + Nil, + None, + Nil, + Info("", "", Nil, Nil, None) + ) + + val content = IvyXml(project, Nil, Nil) + + assert(!content.contains("")) + } + +} diff --git a/lm-coursier/src/test/scala/lmcoursier/ResolutionSpec.scala b/lm-coursier/src/test/scala/lmcoursier/ResolutionSpec.scala new file mode 100644 index 000000000..30182cb60 --- /dev/null +++ b/lm-coursier/src/test/scala/lmcoursier/ResolutionSpec.scala @@ -0,0 +1,256 @@ +package lmcoursier + +import org.scalatest.matchers.should.Matchers +import org.scalatest.propspec.AnyPropSpec +import sbt.internal.librarymanagement.cross.CrossVersionUtil +import sbt.internal.util.ConsoleLogger +import sbt.librarymanagement._ +import sbt.librarymanagement.Configurations.Component +import sbt.librarymanagement.Resolver.{ + DefaultMavenRepository, + JCenterRepository, + JavaNet2Repository +} +import sbt.librarymanagement.{ Resolver, UnresolvedWarningConfiguration, UpdateConfiguration } +import sbt.librarymanagement.syntax._ + +final class ResolutionSpec extends AnyPropSpec with Matchers { + + lazy val log = ConsoleLogger() + + def configurations = Vector(Compile, Test, Runtime, Provided, Optional, Component) + def module( + lmEngine: DependencyResolution, + moduleId: ModuleID, + deps: Vector[ModuleID], + scalaFullVersion: Option[String], + overrideScalaVersion: Boolean = true + ): ModuleDescriptor = { + val scalaModuleInfo = scalaFullVersion map { fv => + ScalaModuleInfo( + scalaFullVersion = fv, + scalaBinaryVersion = CrossVersionUtil.binaryScalaVersion(fv), + configurations = configurations, + checkExplicit = true, + filterImplicit = false, + overrideScalaVersion = overrideScalaVersion + ) + } + + val moduleSetting = ModuleDescriptorConfiguration(moduleId, ModuleInfo("foo")) + .withDependencies(deps) + .withConfigurations(configurations) + .withScalaModuleInfo(scalaModuleInfo) + lmEngine.moduleDescriptor(moduleSetting) + } + + def resolvers = Vector( + DefaultMavenRepository, + JavaNet2Repository, + JCenterRepository, + Resolver.sbtPluginRepo("releases") + ) + + val lmEngine = CoursierDependencyResolution(CoursierConfiguration().withResolvers(resolvers)) + + private final val stubModule = "com.example" % "foo" % "0.1.0" % "compile" + + property("very simple module") { + val dependencies = Vector( + "com.typesafe.scala-logging" % "scala-logging_2.12" % "3.7.2" % "compile", + "org.scalatest" % "scalatest_2.12" % "3.0.4" % "test" + ).map(_.withIsTransitive(false)) + + val coursierModule = module(lmEngine, stubModule, dependencies, Some("2.12.4")) + val resolution = + lmEngine.update(coursierModule, UpdateConfiguration(), UnresolvedWarningConfiguration(), log) + + val r = resolution.toOption.get + r.configurations.map(_.configuration) should have size configurations.length + + val compileConfig = r.configurations.find(_.configuration == Compile.toConfigRef).get + compileConfig.modules should have size 1 + + val runtimeConfig = r.configurations.find(_.configuration == Runtime.toConfigRef).get + runtimeConfig.modules should have size 1 + + val testConfig = r.configurations.find(_.configuration == Test.toConfigRef).get + testConfig.modules should have size 2 + } + + property("resolve compiler bridge") { + val dependencies = + Vector(("org.scala-sbt" % "compiler-interface" % "1.0.4" % "component").sources()) + val coursierModule = module(lmEngine, stubModule, dependencies, Some("2.12.4")) + val resolution = + lmEngine.update(coursierModule, UpdateConfiguration(), UnresolvedWarningConfiguration(), log) + + val r = resolution.toOption.get + + val componentConfig = r.configurations.find(_.configuration == Component.toConfigRef).get + componentConfig.modules should have size 2 + componentConfig.modules.head.artifacts should have size 1 + componentConfig.modules.head.artifacts.head._1.classifier should contain("sources") + } + + property("resolve sbt jars") { + val dependencies = + Vector("org.scala-sbt" % "sbt" % "1.1.0" % "provided") + val coursierModule = module(lmEngine, stubModule, dependencies, Some("2.12.4")) + val resolution = + lmEngine.update(coursierModule, UpdateConfiguration(), UnresolvedWarningConfiguration(), log) + + val r = resolution.toOption.get + + val modules = r.configurations.flatMap(_.modules) + modules.map(_.module.name) should contain("main_2.12") + } + + property("resolve with default resolvers") { + val dependencies = + Vector(("org.scala-sbt" % "compiler-interface" % "1.0.4" % "component").sources()) + val lmEngine = + CoursierDependencyResolution( + CoursierConfiguration() + .withResolvers(Resolver.combineDefaultResolvers(Vector.empty)) + ) + val coursierModule = module(lmEngine, stubModule, dependencies, Some("2.12.4")) + val resolution = + lmEngine.update(coursierModule, UpdateConfiguration(), UnresolvedWarningConfiguration(), log) + + assert(resolution.isRight) + } + + /* + property("resolve with resolvers using a custom protocols") { + val sbtModule = "org.scala-sbt" % "sbt" % "1.1.0" + val dependencies = Vector(sbtModule) + + val protocolHandlerDependencies = Vector( + "org.example" %% "customprotocol-handler" % "0.1.0" + ) + + val resolvers = Vector( + "custom" at "customprotocol://host" + ) + + val configuration = + CoursierConfiguration() + .withResolvers(resolvers) + + val protocolHandlerConfiguration = + Some( + CoursierConfiguration() + .withProtocolHandlerDependencies(protocolHandlerDependencies) + .withResolvers(Resolver.combineDefaultResolvers(Vector.empty)) + ) + + val lmEngine = + CoursierDependencyResolution( + configuration = configuration, + protocolHandlerConfiguration = protocolHandlerConfiguration + ) + val coursierModule = module(lmEngine, stubModule, dependencies, Some("2.12.13")) + val resolution = + lmEngine.update(coursierModule, UpdateConfiguration(), UnresolvedWarningConfiguration(), log) + + + val report = resolution.toOption.get + + val modules = report.configurations.flatMap(_.modules) + modules.map(_.module).map(module => (module.organization, module.name, module.revision)) should contain( + (sbtModule.organization, sbtModule.name, sbtModule.revision) + ) + } + + property("resolve with resolvers using a custom protocols written in java") { + val sbtModule = "org.scala-sbt" % "sbt" % "1.1.0" + val dependencies = Vector(sbtModule) + + val protocolHandlerDependencies = Vector( + "org.example" % "customprotocoljava-handler" % "0.1.0" + ) + + val resolvers = Vector( + "custom" at "customprotocoljava://host" + ) + + val configuration = + CoursierConfiguration() + .withResolvers(resolvers) + + val protocolHandlerConfiguration = + Some( + CoursierConfiguration() + .withProtocolHandlerDependencies(protocolHandlerDependencies) + .withResolvers(Resolver.combineDefaultResolvers(Vector.empty)) + ) + + val lmEngine = + CoursierDependencyResolution( + configuration = configuration, + protocolHandlerConfiguration = protocolHandlerConfiguration + ) + val coursierModule = module(lmEngine, stubModule, dependencies, Some("2.12.13")) + val resolution = + lmEngine.update(coursierModule, UpdateConfiguration(), UnresolvedWarningConfiguration(), log) + + + val report = resolution.toOption.get + + val modules = report.configurations.flatMap(_.modules) + modules.map(_.module).map(module => (module.organization, module.name, module.revision)) should contain( + (sbtModule.organization, sbtModule.name, sbtModule.revision) + ) + } + */ + + property("resolve plugin") { + val pluginAttributes = Map("scalaVersion" -> "2.12", "sbtVersion" -> "1.0") + val dependencies = + Vector(("org.xerial.sbt" % "sbt-sonatype" % "2.0").withExtraAttributes(pluginAttributes)) + val coursierModule = module(lmEngine, stubModule, dependencies, Some("2.12.4")) + val resolution = + lmEngine.update(coursierModule, UpdateConfiguration(), UnresolvedWarningConfiguration(), log) + + val r = resolution.toOption.get + + val componentConfig = r.configurations.find(_.configuration == Compile.toConfigRef).get + componentConfig.modules.map(_.module.name) should have size 5 + } + + property("strip e: prefix from plugin attributes") { + val pluginAttributes = Map("e:scalaVersion" -> "2.12", "e:sbtVersion" -> "1.0") + val dependencies = + Vector(("org.xerial.sbt" % "sbt-sonatype" % "2.0").withExtraAttributes(pluginAttributes)) + val coursierModule = module(lmEngine, stubModule, dependencies, Some("2.12.4")) + val resolution = + lmEngine.update(coursierModule, UpdateConfiguration(), UnresolvedWarningConfiguration(), log) + assert(resolution.isRight) + } + + property("resolve plugins hosted on repo.typesafe.com") { + val pluginAttributes = Map("e:scalaVersion" -> "2.12", "e:sbtVersion" -> "1.0") + val dependencies = + Vector(("com.typesafe.sbt" % "sbt-git" % "0.9.3").withExtraAttributes(pluginAttributes)) + val coursierModule = module(lmEngine, stubModule, dependencies, Some("2.12.4")) + val resolution = + lmEngine.update(coursierModule, UpdateConfiguration(), UnresolvedWarningConfiguration(), log) + + assert(resolution.isRight) + } + + property("resolve licenses from parent poms") { + val dependencies = + Vector(("org.apache.commons" % "commons-compress" % "1.26.2")) + val coursierModule = module(lmEngine, stubModule, dependencies, Some("2.12.4")) + val resolution = + lmEngine.update(coursierModule, UpdateConfiguration(), UnresolvedWarningConfiguration(), log) + + assert(resolution.isRight) + val componentConfig = + resolution.toOption.get.configurations.find(_.configuration == Compile.toConfigRef).get + val compress = componentConfig.modules.find(_.module.name == "commons-compress").get + compress.licenses should have size 1 + } +} diff --git a/lm-ivy/src/main/contraband-scala/sbt/librarymanagement/ivy/ExternalIvyConfiguration.scala b/lm-ivy/src/main/contraband-scala/sbt/librarymanagement/ivy/ExternalIvyConfiguration.scala new file mode 100644 index 000000000..d99050794 --- /dev/null +++ b/lm-ivy/src/main/contraband-scala/sbt/librarymanagement/ivy/ExternalIvyConfiguration.scala @@ -0,0 +1,66 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement.ivy +final class ExternalIvyConfiguration private ( + lock: Option[xsbti.GlobalLock], + log: Option[xsbti.Logger], + updateOptions: sbt.librarymanagement.ivy.UpdateOptions, + val baseDirectory: Option[java.io.File], + val uri: Option[java.net.URI], + val extraResolvers: Vector[sbt.librarymanagement.Resolver]) extends sbt.librarymanagement.ivy.IvyConfiguration(lock, log, updateOptions) with Serializable { + + private def this() = this(None, None, sbt.librarymanagement.ivy.UpdateOptions(), None, None, Vector()) + + override def equals(o: Any): Boolean = this.eq(o.asInstanceOf[AnyRef]) || (o match { + case x: ExternalIvyConfiguration => (this.lock == x.lock) && (this.log == x.log) && (this.updateOptions == x.updateOptions) && (this.baseDirectory == x.baseDirectory) && (this.uri == x.uri) && (this.extraResolvers == x.extraResolvers) + case _ => false + }) + override def hashCode: Int = { + 37 * (37 * (37 * (37 * (37 * (37 * (37 * (17 + "sbt.librarymanagement.ivy.ExternalIvyConfiguration".##) + lock.##) + log.##) + updateOptions.##) + baseDirectory.##) + uri.##) + extraResolvers.##) + } + override def toString: String = { + "ExternalIvyConfiguration(" + lock + ", " + log + ", " + updateOptions + ", " + baseDirectory + ", " + uri + ", " + extraResolvers + ")" + } + private[this] def copy(lock: Option[xsbti.GlobalLock] = lock, log: Option[xsbti.Logger] = log, updateOptions: sbt.librarymanagement.ivy.UpdateOptions = updateOptions, baseDirectory: Option[java.io.File] = baseDirectory, uri: Option[java.net.URI] = uri, extraResolvers: Vector[sbt.librarymanagement.Resolver] = extraResolvers): ExternalIvyConfiguration = { + new ExternalIvyConfiguration(lock, log, updateOptions, baseDirectory, uri, extraResolvers) + } + def withLock(lock: Option[xsbti.GlobalLock]): ExternalIvyConfiguration = { + copy(lock = lock) + } + def withLock(lock: xsbti.GlobalLock): ExternalIvyConfiguration = { + copy(lock = Option(lock)) + } + def withLog(log: Option[xsbti.Logger]): ExternalIvyConfiguration = { + copy(log = log) + } + def withLog(log: xsbti.Logger): ExternalIvyConfiguration = { + copy(log = Option(log)) + } + def withUpdateOptions(updateOptions: sbt.librarymanagement.ivy.UpdateOptions): ExternalIvyConfiguration = { + copy(updateOptions = updateOptions) + } + def withBaseDirectory(baseDirectory: Option[java.io.File]): ExternalIvyConfiguration = { + copy(baseDirectory = baseDirectory) + } + def withBaseDirectory(baseDirectory: java.io.File): ExternalIvyConfiguration = { + copy(baseDirectory = Option(baseDirectory)) + } + def withUri(uri: Option[java.net.URI]): ExternalIvyConfiguration = { + copy(uri = uri) + } + def withUri(uri: java.net.URI): ExternalIvyConfiguration = { + copy(uri = Option(uri)) + } + def withExtraResolvers(extraResolvers: Vector[sbt.librarymanagement.Resolver]): ExternalIvyConfiguration = { + copy(extraResolvers = extraResolvers) + } +} +object ExternalIvyConfiguration { + + def apply(): ExternalIvyConfiguration = new ExternalIvyConfiguration() + def apply(lock: Option[xsbti.GlobalLock], log: Option[xsbti.Logger], updateOptions: sbt.librarymanagement.ivy.UpdateOptions, baseDirectory: Option[java.io.File], uri: Option[java.net.URI], extraResolvers: Vector[sbt.librarymanagement.Resolver]): ExternalIvyConfiguration = new ExternalIvyConfiguration(lock, log, updateOptions, baseDirectory, uri, extraResolvers) + def apply(lock: xsbti.GlobalLock, log: xsbti.Logger, updateOptions: sbt.librarymanagement.ivy.UpdateOptions, baseDirectory: java.io.File, uri: java.net.URI, extraResolvers: Vector[sbt.librarymanagement.Resolver]): ExternalIvyConfiguration = new ExternalIvyConfiguration(Option(lock), Option(log), updateOptions, Option(baseDirectory), Option(uri), extraResolvers) +} diff --git a/lm-ivy/src/main/contraband-scala/sbt/librarymanagement/ivy/ExternalIvyConfigurationFormats.scala b/lm-ivy/src/main/contraband-scala/sbt/librarymanagement/ivy/ExternalIvyConfigurationFormats.scala new file mode 100644 index 000000000..553943e65 --- /dev/null +++ b/lm-ivy/src/main/contraband-scala/sbt/librarymanagement/ivy/ExternalIvyConfigurationFormats.scala @@ -0,0 +1,67 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement.ivy +import _root_.sjsonnew.{ Unbuilder, Builder, JsonFormat, deserializationError } +trait ExternalIvyConfigurationFormats { self: sbt.internal.librarymanagement.formats.GlobalLockFormat + with sbt.internal.librarymanagement.formats.LoggerFormat + with sbt.librarymanagement.ivy.formats.UpdateOptionsFormat + with sbt.librarymanagement.ModuleIDFormats + with sbt.librarymanagement.ResolverFormats + with sjsonnew.BasicJsonProtocol + with sbt.librarymanagement.ArtifactFormats + with sbt.librarymanagement.ConfigRefFormats + with sbt.librarymanagement.ChecksumFormats + with sbt.librarymanagement.InclExclRuleFormats + with sbt.librarymanagement.CrossVersionFormats + with sbt.librarymanagement.DisabledFormats + with sbt.librarymanagement.BinaryFormats + with sbt.librarymanagement.ConstantFormats + with sbt.librarymanagement.PatchFormats + with sbt.librarymanagement.FullFormats + with sbt.librarymanagement.For3Use2_13Formats + with sbt.librarymanagement.For2_13Use3Formats + with sbt.librarymanagement.ChainedResolverFormats + with sbt.librarymanagement.MavenRepoFormats + with sbt.librarymanagement.MavenCacheFormats + with sbt.librarymanagement.PatternsFormats + with sbt.librarymanagement.FileConfigurationFormats + with sbt.librarymanagement.FileRepositoryFormats + with sbt.librarymanagement.URLRepositoryFormats + with sbt.librarymanagement.SshConnectionFormats + with sbt.librarymanagement.SshAuthenticationFormats + with sbt.librarymanagement.SshRepositoryFormats + with sbt.librarymanagement.SftpRepositoryFormats + with sbt.librarymanagement.PasswordAuthenticationFormats + with sbt.librarymanagement.KeyFileAuthenticationFormats => + implicit lazy val ExternalIvyConfigurationFormat: JsonFormat[sbt.librarymanagement.ivy.ExternalIvyConfiguration] = new JsonFormat[sbt.librarymanagement.ivy.ExternalIvyConfiguration] { + override def read[J](__jsOpt: Option[J], unbuilder: Unbuilder[J]): sbt.librarymanagement.ivy.ExternalIvyConfiguration = { + __jsOpt match { + case Some(__js) => + unbuilder.beginObject(__js) + val lock = unbuilder.readField[Option[xsbti.GlobalLock]]("lock") + val log = unbuilder.readField[Option[xsbti.Logger]]("log") + val updateOptions = unbuilder.readField[sbt.librarymanagement.ivy.UpdateOptions]("updateOptions") + val baseDirectory = unbuilder.readField[Option[java.io.File]]("baseDirectory") + val uri = unbuilder.readField[Option[java.net.URI]]("uri") + val extraResolvers = unbuilder.readField[Vector[sbt.librarymanagement.Resolver]]("extraResolvers") + unbuilder.endObject() + sbt.librarymanagement.ivy.ExternalIvyConfiguration(lock, log, updateOptions, baseDirectory, uri, extraResolvers) + case None => + deserializationError("Expected JsObject but found None") + } + } + override def write[J](obj: sbt.librarymanagement.ivy.ExternalIvyConfiguration, builder: Builder[J]): Unit = { + builder.beginObject() + builder.addField("lock", obj.lock) + builder.addField("log", obj.log) + builder.addField("updateOptions", obj.updateOptions) + builder.addField("baseDirectory", obj.baseDirectory) + builder.addField("uri", obj.uri) + builder.addField("extraResolvers", obj.extraResolvers) + builder.endObject() + } + } +} diff --git a/lm-ivy/src/main/contraband-scala/sbt/librarymanagement/ivy/InlineIvyConfiguration.scala b/lm-ivy/src/main/contraband-scala/sbt/librarymanagement/ivy/InlineIvyConfiguration.scala new file mode 100644 index 000000000..5455978f7 --- /dev/null +++ b/lm-ivy/src/main/contraband-scala/sbt/librarymanagement/ivy/InlineIvyConfiguration.scala @@ -0,0 +1,105 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement.ivy +final class InlineIvyConfiguration private ( + lock: Option[xsbti.GlobalLock], + log: Option[xsbti.Logger], + updateOptions: sbt.librarymanagement.ivy.UpdateOptions, + val paths: Option[sbt.librarymanagement.ivy.IvyPaths], + val resolvers: Vector[sbt.librarymanagement.Resolver], + val otherResolvers: Vector[sbt.librarymanagement.Resolver], + val moduleConfigurations: Vector[sbt.librarymanagement.ModuleConfiguration], + val checksums: Vector[String], + val managedChecksums: Boolean, + val resolutionCacheDir: Option[java.io.File]) extends sbt.librarymanagement.ivy.IvyConfiguration(lock, log, updateOptions) with Serializable { + + private def this() = this(None, None, sbt.librarymanagement.ivy.UpdateOptions(), None, sbt.librarymanagement.Resolver.defaults, Vector.empty, Vector.empty, sbt.librarymanagement.ivy.IvyDefaults.defaultChecksums, false, None) + + override def equals(o: Any): Boolean = this.eq(o.asInstanceOf[AnyRef]) || (o match { + case x: InlineIvyConfiguration => (this.lock == x.lock) && (this.log == x.log) && (this.updateOptions == x.updateOptions) && (this.paths == x.paths) && (this.resolvers == x.resolvers) && (this.otherResolvers == x.otherResolvers) && (this.moduleConfigurations == x.moduleConfigurations) && (this.checksums == x.checksums) && (this.managedChecksums == x.managedChecksums) && (this.resolutionCacheDir == x.resolutionCacheDir) + case _ => false + }) + override def hashCode: Int = { + 37 * (37 * (37 * (37 * (37 * (37 * (37 * (37 * (37 * (37 * (37 * (17 + "sbt.librarymanagement.ivy.InlineIvyConfiguration".##) + lock.##) + log.##) + updateOptions.##) + paths.##) + resolvers.##) + otherResolvers.##) + moduleConfigurations.##) + checksums.##) + managedChecksums.##) + resolutionCacheDir.##) + } + override def toString: String = { + "InlineIvyConfiguration(" + lock + ", " + log + ", " + updateOptions + ", " + paths + ", " + resolvers + ", " + otherResolvers + ", " + moduleConfigurations + ", " + checksums + ", " + managedChecksums + ", " + resolutionCacheDir + ")" + } + private[this] def copy(lock: Option[xsbti.GlobalLock] = lock, log: Option[xsbti.Logger] = log, updateOptions: sbt.librarymanagement.ivy.UpdateOptions = updateOptions, paths: Option[sbt.librarymanagement.ivy.IvyPaths] = paths, resolvers: Vector[sbt.librarymanagement.Resolver] = resolvers, otherResolvers: Vector[sbt.librarymanagement.Resolver] = otherResolvers, moduleConfigurations: Vector[sbt.librarymanagement.ModuleConfiguration] = moduleConfigurations, checksums: Vector[String] = checksums, managedChecksums: Boolean = managedChecksums, resolutionCacheDir: Option[java.io.File] = resolutionCacheDir): InlineIvyConfiguration = { + new InlineIvyConfiguration(lock, log, updateOptions, paths, resolvers, otherResolvers, moduleConfigurations, checksums, managedChecksums, resolutionCacheDir) + } + def withLock(lock: Option[xsbti.GlobalLock]): InlineIvyConfiguration = { + copy(lock = lock) + } + def withLock(lock: xsbti.GlobalLock): InlineIvyConfiguration = { + copy(lock = Option(lock)) + } + def withLog(log: Option[xsbti.Logger]): InlineIvyConfiguration = { + copy(log = log) + } + def withLog(log: xsbti.Logger): InlineIvyConfiguration = { + copy(log = Option(log)) + } + def withUpdateOptions(updateOptions: sbt.librarymanagement.ivy.UpdateOptions): InlineIvyConfiguration = { + copy(updateOptions = updateOptions) + } + def withPaths(paths: Option[sbt.librarymanagement.ivy.IvyPaths]): InlineIvyConfiguration = { + copy(paths = paths) + } + def withPaths(paths: sbt.librarymanagement.ivy.IvyPaths): InlineIvyConfiguration = { + copy(paths = Option(paths)) + } + def withResolvers(resolvers: Vector[sbt.librarymanagement.Resolver]): InlineIvyConfiguration = { + copy(resolvers = resolvers) + } + def withOtherResolvers(otherResolvers: Vector[sbt.librarymanagement.Resolver]): InlineIvyConfiguration = { + copy(otherResolvers = otherResolvers) + } + def withModuleConfigurations(moduleConfigurations: Vector[sbt.librarymanagement.ModuleConfiguration]): InlineIvyConfiguration = { + copy(moduleConfigurations = moduleConfigurations) + } + def withChecksums(checksums: Vector[String]): InlineIvyConfiguration = { + copy(checksums = checksums) + } + def withManagedChecksums(managedChecksums: Boolean): InlineIvyConfiguration = { + copy(managedChecksums = managedChecksums) + } + def withResolutionCacheDir(resolutionCacheDir: Option[java.io.File]): InlineIvyConfiguration = { + copy(resolutionCacheDir = resolutionCacheDir) + } + def withResolutionCacheDir(resolutionCacheDir: java.io.File): InlineIvyConfiguration = { + copy(resolutionCacheDir = Option(resolutionCacheDir)) + } +} +object InlineIvyConfiguration { + /** Provided for backward compatibility. */ + @deprecated("Use an alternative apply", "1.2.0") + def apply( + paths: sbt.librarymanagement.ivy.IvyPaths, + resolvers: Vector[sbt.librarymanagement.Resolver], + otherResolvers: Vector[sbt.librarymanagement.Resolver], + moduleConfigurations: Vector[sbt.librarymanagement.ModuleConfiguration], + lock: Option[xsbti.GlobalLock], + checksums: Vector[String], + managedChecksums: Boolean, + resolutionCacheDir: Option[java.io.File], + updateOptions: sbt.librarymanagement.ivy.UpdateOptions, + log: xsbti.Logger + ): InlineIvyConfiguration = { + apply() + .withLock(lock) + .withResolvers(resolvers) + .withOtherResolvers(otherResolvers) + .withModuleConfigurations(moduleConfigurations) + .withChecksums(checksums) + .withManagedChecksums(managedChecksums) + .withResolutionCacheDir(resolutionCacheDir) + .withLog(log) + } + def apply(): InlineIvyConfiguration = new InlineIvyConfiguration() + def apply(lock: Option[xsbti.GlobalLock], log: Option[xsbti.Logger], updateOptions: sbt.librarymanagement.ivy.UpdateOptions, paths: Option[sbt.librarymanagement.ivy.IvyPaths], resolvers: Vector[sbt.librarymanagement.Resolver], otherResolvers: Vector[sbt.librarymanagement.Resolver], moduleConfigurations: Vector[sbt.librarymanagement.ModuleConfiguration], checksums: Vector[String], managedChecksums: Boolean, resolutionCacheDir: Option[java.io.File]): InlineIvyConfiguration = new InlineIvyConfiguration(lock, log, updateOptions, paths, resolvers, otherResolvers, moduleConfigurations, checksums, managedChecksums, resolutionCacheDir) + def apply(lock: xsbti.GlobalLock, log: xsbti.Logger, updateOptions: sbt.librarymanagement.ivy.UpdateOptions, paths: sbt.librarymanagement.ivy.IvyPaths, resolvers: Vector[sbt.librarymanagement.Resolver], otherResolvers: Vector[sbt.librarymanagement.Resolver], moduleConfigurations: Vector[sbt.librarymanagement.ModuleConfiguration], checksums: Vector[String], managedChecksums: Boolean, resolutionCacheDir: java.io.File): InlineIvyConfiguration = new InlineIvyConfiguration(Option(lock), Option(log), updateOptions, Option(paths), resolvers, otherResolvers, moduleConfigurations, checksums, managedChecksums, Option(resolutionCacheDir)) +} diff --git a/lm-ivy/src/main/contraband-scala/sbt/librarymanagement/ivy/InlineIvyConfigurationFormats.scala b/lm-ivy/src/main/contraband-scala/sbt/librarymanagement/ivy/InlineIvyConfigurationFormats.scala new file mode 100644 index 000000000..22f92e099 --- /dev/null +++ b/lm-ivy/src/main/contraband-scala/sbt/librarymanagement/ivy/InlineIvyConfigurationFormats.scala @@ -0,0 +1,77 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement.ivy +import _root_.sjsonnew.{ Unbuilder, Builder, JsonFormat, deserializationError } +trait InlineIvyConfigurationFormats { self: sbt.internal.librarymanagement.formats.GlobalLockFormat + with sbt.internal.librarymanagement.formats.LoggerFormat + with sbt.librarymanagement.ivy.formats.UpdateOptionsFormat + with sbt.librarymanagement.ivy.IvyPathsFormats + with sjsonnew.BasicJsonProtocol + with sbt.librarymanagement.ModuleIDFormats + with sbt.librarymanagement.ResolverFormats + with sbt.librarymanagement.ModuleConfigurationFormats + with sbt.librarymanagement.ArtifactFormats + with sbt.librarymanagement.ConfigRefFormats + with sbt.librarymanagement.ChecksumFormats + with sbt.librarymanagement.InclExclRuleFormats + with sbt.librarymanagement.CrossVersionFormats + with sbt.librarymanagement.DisabledFormats + with sbt.librarymanagement.BinaryFormats + with sbt.librarymanagement.ConstantFormats + with sbt.librarymanagement.PatchFormats + with sbt.librarymanagement.FullFormats + with sbt.librarymanagement.For3Use2_13Formats + with sbt.librarymanagement.For2_13Use3Formats + with sbt.librarymanagement.ChainedResolverFormats + with sbt.librarymanagement.MavenRepoFormats + with sbt.librarymanagement.MavenCacheFormats + with sbt.librarymanagement.PatternsFormats + with sbt.librarymanagement.FileConfigurationFormats + with sbt.librarymanagement.FileRepositoryFormats + with sbt.librarymanagement.URLRepositoryFormats + with sbt.librarymanagement.SshConnectionFormats + with sbt.librarymanagement.SshAuthenticationFormats + with sbt.librarymanagement.SshRepositoryFormats + with sbt.librarymanagement.SftpRepositoryFormats + with sbt.librarymanagement.PasswordAuthenticationFormats + with sbt.librarymanagement.KeyFileAuthenticationFormats => + implicit lazy val InlineIvyConfigurationFormat: JsonFormat[sbt.librarymanagement.ivy.InlineIvyConfiguration] = new JsonFormat[sbt.librarymanagement.ivy.InlineIvyConfiguration] { + override def read[J](__jsOpt: Option[J], unbuilder: Unbuilder[J]): sbt.librarymanagement.ivy.InlineIvyConfiguration = { + __jsOpt match { + case Some(__js) => + unbuilder.beginObject(__js) + val lock = unbuilder.readField[Option[xsbti.GlobalLock]]("lock") + val log = unbuilder.readField[Option[xsbti.Logger]]("log") + val updateOptions = unbuilder.readField[sbt.librarymanagement.ivy.UpdateOptions]("updateOptions") + val paths = unbuilder.readField[Option[sbt.librarymanagement.ivy.IvyPaths]]("paths") + val resolvers = unbuilder.readField[Vector[sbt.librarymanagement.Resolver]]("resolvers") + val otherResolvers = unbuilder.readField[Vector[sbt.librarymanagement.Resolver]]("otherResolvers") + val moduleConfigurations = unbuilder.readField[Vector[sbt.librarymanagement.ModuleConfiguration]]("moduleConfigurations") + val checksums = unbuilder.readField[Vector[String]]("checksums") + val managedChecksums = unbuilder.readField[Boolean]("managedChecksums") + val resolutionCacheDir = unbuilder.readField[Option[java.io.File]]("resolutionCacheDir") + unbuilder.endObject() + sbt.librarymanagement.ivy.InlineIvyConfiguration(lock, log, updateOptions, paths, resolvers, otherResolvers, moduleConfigurations, checksums, managedChecksums, resolutionCacheDir) + case None => + deserializationError("Expected JsObject but found None") + } + } + override def write[J](obj: sbt.librarymanagement.ivy.InlineIvyConfiguration, builder: Builder[J]): Unit = { + builder.beginObject() + builder.addField("lock", obj.lock) + builder.addField("log", obj.log) + builder.addField("updateOptions", obj.updateOptions) + builder.addField("paths", obj.paths) + builder.addField("resolvers", obj.resolvers) + builder.addField("otherResolvers", obj.otherResolvers) + builder.addField("moduleConfigurations", obj.moduleConfigurations) + builder.addField("checksums", obj.checksums) + builder.addField("managedChecksums", obj.managedChecksums) + builder.addField("resolutionCacheDir", obj.resolutionCacheDir) + builder.endObject() + } + } +} diff --git a/lm-ivy/src/main/contraband-scala/sbt/librarymanagement/ivy/IvyConfiguration.scala b/lm-ivy/src/main/contraband-scala/sbt/librarymanagement/ivy/IvyConfiguration.scala new file mode 100644 index 000000000..c515b3747 --- /dev/null +++ b/lm-ivy/src/main/contraband-scala/sbt/librarymanagement/ivy/IvyConfiguration.scala @@ -0,0 +1,28 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement.ivy +abstract class IvyConfiguration( + val lock: Option[xsbti.GlobalLock], + val log: Option[xsbti.Logger], + val updateOptions: sbt.librarymanagement.ivy.UpdateOptions) extends Serializable { + + def this() = this(None, None, sbt.librarymanagement.ivy.UpdateOptions()) + + + override def equals(o: Any): Boolean = this.eq(o.asInstanceOf[AnyRef]) || (o match { + case x: IvyConfiguration => (this.lock == x.lock) && (this.log == x.log) && (this.updateOptions == x.updateOptions) + case _ => false + }) + override def hashCode: Int = { + 37 * (37 * (37 * (37 * (17 + "sbt.librarymanagement.ivy.IvyConfiguration".##) + lock.##) + log.##) + updateOptions.##) + } + override def toString: String = { + "IvyConfiguration(" + lock + ", " + log + ", " + updateOptions + ")" + } +} +object IvyConfiguration { + +} diff --git a/lm-ivy/src/main/contraband-scala/sbt/librarymanagement/ivy/IvyConfigurationFormats.scala b/lm-ivy/src/main/contraband-scala/sbt/librarymanagement/ivy/IvyConfigurationFormats.scala new file mode 100644 index 000000000..324710d4f --- /dev/null +++ b/lm-ivy/src/main/contraband-scala/sbt/librarymanagement/ivy/IvyConfigurationFormats.scala @@ -0,0 +1,45 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement.ivy + +import _root_.sjsonnew.JsonFormat +trait IvyConfigurationFormats { self: sbt.internal.librarymanagement.formats.GlobalLockFormat + with sbt.internal.librarymanagement.formats.LoggerFormat + with sbt.librarymanagement.ivy.formats.UpdateOptionsFormat + with sbt.librarymanagement.ivy.IvyPathsFormats + with sbt.librarymanagement.ModuleIDFormats + with sjsonnew.BasicJsonProtocol + with sbt.librarymanagement.ResolverFormats + with sbt.librarymanagement.ModuleConfigurationFormats + with sbt.librarymanagement.ivy.InlineIvyConfigurationFormats + with sbt.librarymanagement.ivy.ExternalIvyConfigurationFormats + with sbt.librarymanagement.ArtifactFormats + with sbt.librarymanagement.ConfigRefFormats + with sbt.librarymanagement.ChecksumFormats + with sbt.librarymanagement.InclExclRuleFormats + with sbt.librarymanagement.CrossVersionFormats + with sbt.librarymanagement.DisabledFormats + with sbt.librarymanagement.BinaryFormats + with sbt.librarymanagement.ConstantFormats + with sbt.librarymanagement.PatchFormats + with sbt.librarymanagement.FullFormats + with sbt.librarymanagement.For3Use2_13Formats + with sbt.librarymanagement.For2_13Use3Formats + with sbt.librarymanagement.ChainedResolverFormats + with sbt.librarymanagement.MavenRepoFormats + with sbt.librarymanagement.MavenCacheFormats + with sbt.librarymanagement.PatternsFormats + with sbt.librarymanagement.FileConfigurationFormats + with sbt.librarymanagement.FileRepositoryFormats + with sbt.librarymanagement.URLRepositoryFormats + with sbt.librarymanagement.SshConnectionFormats + with sbt.librarymanagement.SshAuthenticationFormats + with sbt.librarymanagement.SshRepositoryFormats + with sbt.librarymanagement.SftpRepositoryFormats + with sbt.librarymanagement.PasswordAuthenticationFormats + with sbt.librarymanagement.KeyFileAuthenticationFormats => + implicit lazy val IvyConfigurationFormat: JsonFormat[sbt.librarymanagement.ivy.IvyConfiguration] = flatUnionFormat2[sbt.librarymanagement.ivy.IvyConfiguration, sbt.librarymanagement.ivy.InlineIvyConfiguration, sbt.librarymanagement.ivy.ExternalIvyConfiguration]("type") +} diff --git a/lm-ivy/src/main/contraband-scala/sbt/librarymanagement/ivy/IvyPaths.scala b/lm-ivy/src/main/contraband-scala/sbt/librarymanagement/ivy/IvyPaths.scala new file mode 100644 index 000000000..74268a9d3 --- /dev/null +++ b/lm-ivy/src/main/contraband-scala/sbt/librarymanagement/ivy/IvyPaths.scala @@ -0,0 +1,40 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement.ivy +final class IvyPaths private ( + val baseDirectory: String, + val ivyHome: Option[String]) extends Serializable { + + + + override def equals(o: Any): Boolean = this.eq(o.asInstanceOf[AnyRef]) || (o match { + case x: IvyPaths => (this.baseDirectory == x.baseDirectory) && (this.ivyHome == x.ivyHome) + case _ => false + }) + override def hashCode: Int = { + 37 * (37 * (37 * (17 + "sbt.librarymanagement.ivy.IvyPaths".##) + baseDirectory.##) + ivyHome.##) + } + override def toString: String = { + "IvyPaths(" + baseDirectory + ", " + ivyHome + ")" + } + private[this] def copy(baseDirectory: String = baseDirectory, ivyHome: Option[String] = ivyHome): IvyPaths = { + new IvyPaths(baseDirectory, ivyHome) + } + def withBaseDirectory(baseDirectory: String): IvyPaths = { + copy(baseDirectory = baseDirectory) + } + def withIvyHome(ivyHome: Option[String]): IvyPaths = { + copy(ivyHome = ivyHome) + } + def withIvyHome(ivyHome: String): IvyPaths = { + copy(ivyHome = Option(ivyHome)) + } +} +object IvyPaths { + + def apply(baseDirectory: String, ivyHome: Option[String]): IvyPaths = new IvyPaths(baseDirectory, ivyHome) + def apply(baseDirectory: String, ivyHome: String): IvyPaths = new IvyPaths(baseDirectory, Option(ivyHome)) +} diff --git a/lm-ivy/src/main/contraband-scala/sbt/librarymanagement/ivy/IvyPathsFormats.scala b/lm-ivy/src/main/contraband-scala/sbt/librarymanagement/ivy/IvyPathsFormats.scala new file mode 100644 index 000000000..1483f1d91 --- /dev/null +++ b/lm-ivy/src/main/contraband-scala/sbt/librarymanagement/ivy/IvyPathsFormats.scala @@ -0,0 +1,29 @@ +/** + * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt.librarymanagement.ivy +import _root_.sjsonnew.{ Unbuilder, Builder, JsonFormat, deserializationError } +trait IvyPathsFormats { self: sjsonnew.BasicJsonProtocol => +implicit lazy val IvyPathsFormat: JsonFormat[sbt.librarymanagement.ivy.IvyPaths] = new JsonFormat[sbt.librarymanagement.ivy.IvyPaths] { + override def read[J](__jsOpt: Option[J], unbuilder: Unbuilder[J]): sbt.librarymanagement.ivy.IvyPaths = { + __jsOpt match { + case Some(__js) => + unbuilder.beginObject(__js) + val baseDirectory = unbuilder.readField[String]("baseDirectory") + val ivyHome = unbuilder.readField[Option[String]]("ivyHome") + unbuilder.endObject() + sbt.librarymanagement.ivy.IvyPaths(baseDirectory, ivyHome) + case None => + deserializationError("Expected JsObject but found None") + } + } + override def write[J](obj: sbt.librarymanagement.ivy.IvyPaths, builder: Builder[J]): Unit = { + builder.beginObject() + builder.addField("baseDirectory", obj.baseDirectory) + builder.addField("ivyHome", obj.ivyHome) + builder.endObject() + } +} +} diff --git a/lm-ivy/src/main/contraband/lm-ivy.json b/lm-ivy/src/main/contraband/lm-ivy.json new file mode 100644 index 000000000..d6e90dc76 --- /dev/null +++ b/lm-ivy/src/main/contraband/lm-ivy.json @@ -0,0 +1,145 @@ +{ + "codecNamespace": "sbt.librarymanagement.ivy", + "types": [ + { + "name": "IvyConfiguration", + "namespace": "sbt.librarymanagement.ivy", + "target": "Scala", + "type": "interface", + "fields": [ + { + "name": "lock", + "type": "xsbti.GlobalLock?", + "default": "None", + "since": "0.0.1" + }, + { + "name": "log", + "type": "xsbti.Logger?", + "default": "None", + "since": "0.0.1" + }, + { + "name": "updateOptions", + "type": "sbt.librarymanagement.ivy.UpdateOptions", + "default": "sbt.librarymanagement.ivy.UpdateOptions()", + "since": "0.0.1" + } + ], + "types": [ + { + "name": "InlineIvyConfiguration", + "namespace": "sbt.librarymanagement.ivy", + "target": "Scala", + "type": "record", + "fields": [ + { + "name": "paths", + "type": "sbt.librarymanagement.ivy.IvyPaths?", + "default": "None", + "since": "0.0.1" + }, + { + "name": "resolvers", + "type": "sbt.librarymanagement.Resolver*", + "default": "sbt.librarymanagement.Resolver.defaults", + "since": "0.0.1" + }, + { + "name": "otherResolvers", + "type": "sbt.librarymanagement.Resolver*", + "default": "Vector.empty", + "since": "0.0.1" + }, + { + "name": "moduleConfigurations", + "type": "sbt.librarymanagement.ModuleConfiguration*", + "default": "Vector.empty", + "since": "0.0.1" + }, + { + "name": "checksums", + "type": "String*", + "default": "sbt.librarymanagement.ivy.IvyDefaults.defaultChecksums", + "since": "0.0.1" + }, + { + "name": "managedChecksums", + "type": "Boolean", + "default": "false", + "since": "0.0.1" + }, + { + "name": "resolutionCacheDir", + "type": "java.io.File?", + "default": "None", + "since": "0.0.1" + } + ], + "extraCompanion": [ + "/** Provided for backward compatibility. */", + "@deprecated(\"Use an alternative apply\", \"1.2.0\")", + "def apply(", + " paths: sbt.librarymanagement.ivy.IvyPaths,", + " resolvers: Vector[sbt.librarymanagement.Resolver],", + " otherResolvers: Vector[sbt.librarymanagement.Resolver],", + " moduleConfigurations: Vector[sbt.librarymanagement.ModuleConfiguration],", + " lock: Option[xsbti.GlobalLock],", + " checksums: Vector[String],", + " managedChecksums: Boolean,", + " resolutionCacheDir: Option[java.io.File],", + " updateOptions: sbt.librarymanagement.ivy.UpdateOptions,", + " log: xsbti.Logger", + "): InlineIvyConfiguration = {", + " apply()", + " .withLock(lock)", + " .withResolvers(resolvers)", + " .withOtherResolvers(otherResolvers)", + " .withModuleConfigurations(moduleConfigurations)", + " .withChecksums(checksums)", + " .withManagedChecksums(managedChecksums)", + " .withResolutionCacheDir(resolutionCacheDir)", + " .withLog(log)", + "}" + ] + }, + { + "name": "ExternalIvyConfiguration", + "namespace": "sbt.librarymanagement.ivy", + "target": "Scala", + "type": "record", + "fields": [ + { + "name": "baseDirectory", + "type": "java.io.File?", + "default": "None", + "since": "0.0.1" + }, + { + "name": "uri", + "type": "java.net.URI?", + "default": "None", + "since": "0.0.1" + }, + { + "name": "extraResolvers", + "type": "sbt.librarymanagement.Resolver*", + "default": "Vector()", + "since": "0.0.1" + } + ] + } + ] + }, + { + "name": "IvyPaths", + "namespace": "sbt.librarymanagement.ivy", + "target": "Scala", + "type": "record", + "fields": [ + { "name": "baseDirectory", "type": "String" }, + { "name": "ivyHome", "type": "String?" } + ] + } + ] +} diff --git a/lm-ivy/src/main/java/internal/librarymanagement/ResolverAdapter.java b/lm-ivy/src/main/java/internal/librarymanagement/ResolverAdapter.java new file mode 100644 index 000000000..d983a6149 --- /dev/null +++ b/lm-ivy/src/main/java/internal/librarymanagement/ResolverAdapter.java @@ -0,0 +1,16 @@ +package sbt.internal.librarymanagement; + +import java.util.Map; +import org.apache.ivy.plugins.resolver.DependencyResolver; + +// implements the methods with raw types +@SuppressWarnings("rawtypes") +public abstract class ResolverAdapter implements DependencyResolver { + public String[] listTokenValues(String token, Map otherTokenValues) { + return new String[0]; + } + + public Map[] listTokenValues(String[] tokens, Map criteria) { + return new Map[0]; + } +} diff --git a/lm-ivy/src/main/scala/org/apache/ivy/plugins/parser/m2/ReplaceMavenConfigurationMappings.scala b/lm-ivy/src/main/scala/org/apache/ivy/plugins/parser/m2/ReplaceMavenConfigurationMappings.scala new file mode 100644 index 000000000..fb0b889d6 --- /dev/null +++ b/lm-ivy/src/main/scala/org/apache/ivy/plugins/parser/m2/ReplaceMavenConfigurationMappings.scala @@ -0,0 +1,135 @@ +package org.apache.ivy.plugins.parser.m2 + +import org.apache.ivy.core.module.descriptor.DefaultDependencyDescriptor; + +/** + * It turns out there was a very subtle, and evil, issue sitting the Ivy/maven configuration, and it + * related to dependency mapping. A mapping of `foo->bar(*)` means that the local configuration + * `foo` depends on the remote configuration `bar`, if it exists, or *ALL CONFIGURATIONS* if `bar` + * does not exist. Since the default Ivy configuration mapping was using the random `master` + * configuration, which AFAICT is NEVER specified, just an assumed default, this would cause leaks + * between maven + ivy projects. + * + * i.e. if a maven POM depends on a module denoted by an ivy.xml file, then you'd wind up accidentally + * bleeding ALL the ivy module's configurations into the maven module's configurations. + * + * This fix works around the issue, by assuming that if there is no `master` configuration, than the + * maven default of `compile` is intended. As sbt forces generated `ivy.xml` files to abide by + * maven conventions, this works in all of our test cases. The only scenario where it wouldn't work + * is those who have custom ivy.xml files *and* have pom.xml files which rely on those custom ivy.xml files, + * a very unlikely situation where the workaround is: "define a master configuration". + * + * Also see: http://ant.apache.org/ivy/history/2.3.0/ivyfile/dependency.html + * and: http://svn.apache.org/repos/asf/ant/ivy/core/tags/2.3.0/src/java/org/apache/ivy/plugins/parser/m2/PomModuleDescriptorBuilder.java + */ +object ReplaceMavenConfigurationMappings { + + def addMappings(dd: DefaultDependencyDescriptor, scope: String, isOptional: Boolean) = { + val mapping = ReplaceMavenConfigurationMappings.REPLACEMENT_MAVEN_MAPPINGS.get(scope) + mapping.addMappingConfs(dd, isOptional) + } + + val REPLACEMENT_MAVEN_MAPPINGS = { + // Here we copy paste from Ivy + val REPLACEMENT_MAPPINGS = new java.util.HashMap[String, PomModuleDescriptorBuilder.ConfMapper] + + // NOTE - This code is copied from org.apache.ivy.plugins.parser.m2.PomModuleDescriptorBuilder + // except with altered default configurations... + REPLACEMENT_MAPPINGS.put( + "compile", + new PomModuleDescriptorBuilder.ConfMapper { + def addMappingConfs(dd: DefaultDependencyDescriptor, isOptional: Boolean): Unit = { + if (isOptional) { + dd.addDependencyConfiguration("optional", "compile(*)") + // FIX - Here we take a mroe conservative approach of depending on the compile configuration if master isn't there. + dd.addDependencyConfiguration("optional", "master(compile)") + } else { + dd.addDependencyConfiguration("compile", "compile(*)") + // FIX - Here we take a mroe conservative approach of depending on the compile configuration if master isn't there. + dd.addDependencyConfiguration("compile", "master(compile)") + dd.addDependencyConfiguration("runtime", "runtime(*)") + } + } + } + ) + REPLACEMENT_MAPPINGS.put( + "provided", + new PomModuleDescriptorBuilder.ConfMapper { + def addMappingConfs(dd: DefaultDependencyDescriptor, isOptional: Boolean): Unit = { + if (isOptional) { + dd.addDependencyConfiguration("optional", "compile(*)") + dd.addDependencyConfiguration("optional", "provided(*)") + dd.addDependencyConfiguration("optional", "runtime(*)") + // FIX - Here we take a mroe conservative approach of depending on the compile configuration if master isn't there. + dd.addDependencyConfiguration("optional", "master(compile)") + } else { + dd.addDependencyConfiguration("provided", "compile(*)") + dd.addDependencyConfiguration("provided", "provided(*)") + dd.addDependencyConfiguration("provided", "runtime(*)") + // FIX - Here we take a mroe conservative approach of depending on the compile configuration if master isn't there. + dd.addDependencyConfiguration("provided", "master(compile)") + } + } + } + ) + + REPLACEMENT_MAPPINGS.put( + "runtime", + new PomModuleDescriptorBuilder.ConfMapper { + def addMappingConfs(dd: DefaultDependencyDescriptor, isOptional: Boolean): Unit = { + if (isOptional) { + dd.addDependencyConfiguration("optional", "compile(*)") + dd.addDependencyConfiguration("optional", "provided(*)") + // FIX - Here we take a mroe conservative approach of depending on the compile configuration if master isn't there. + dd.addDependencyConfiguration("optional", "master(compile)") + } else { + dd.addDependencyConfiguration("runtime", "compile(*)") + dd.addDependencyConfiguration("runtime", "runtime(*)") + // FIX - Here we take a mroe conservative approach of depending on the compile configuration if master isn't there. + dd.addDependencyConfiguration("runtime", "master(compile)") + } + } + } + ) + + REPLACEMENT_MAPPINGS.put( + "test", + new PomModuleDescriptorBuilder.ConfMapper { + def addMappingConfs(dd: DefaultDependencyDescriptor, isOptional: Boolean): Unit = { + dd.addDependencyConfiguration("test", "runtime(*)") + // FIX - Here we take a mroe conservative approach of depending on the compile configuration if master isn't there. + dd.addDependencyConfiguration("test", "master(compile)") + } + } + ) + + REPLACEMENT_MAPPINGS.put( + "system", + new PomModuleDescriptorBuilder.ConfMapper { + def addMappingConfs(dd: DefaultDependencyDescriptor, isOptional: Boolean): Unit = { + // FIX - Here we take a mroe conservative approach of depending on the compile configuration if master isn't there. + dd.addDependencyConfiguration("system", "master(compile)") + } + } + ) + + REPLACEMENT_MAPPINGS + } + + def init(): Unit = { + // Here we mutate a static final field, because we have to AND because it's evil. + try { + val map = PomModuleDescriptorBuilder.MAVEN2_CONF_MAPPING + .asInstanceOf[java.util.Map[String, PomModuleDescriptorBuilder.ConfMapper]] + map.clear() + map.putAll(REPLACEMENT_MAVEN_MAPPINGS) + } catch { + case e: Exception => + // TODO - Log that Ivy may not be configured correctly and you could have maven/ivy issues. + throw new RuntimeException( + "FAILURE to install Ivy maven hooks. Your ivy-maven interaction may suffer resolution errors", + e + ) + } + } +} diff --git a/lm-ivy/src/main/scala/sbt/internal/librarymanagement/ComponentManager.scala b/lm-ivy/src/main/scala/sbt/internal/librarymanagement/ComponentManager.scala new file mode 100644 index 000000000..7f84c3bf8 --- /dev/null +++ b/lm-ivy/src/main/scala/sbt/internal/librarymanagement/ComponentManager.scala @@ -0,0 +1,113 @@ +/* sbt -- Simple Build Tool + * Copyright 2008, 2009, 2010 Mark Harrah + */ +package sbt.internal.librarymanagement + +import java.io.File +import java.util.concurrent.Callable +import sbt.util.Logger +import sbt.librarymanagement._ + +/** + * A component manager provides access to the pieces of xsbt that are distributed as components. + * There are two types of components. The first type is compiled subproject jars with their dependencies. + * The second type is a subproject distributed as a source jar so that it can be compiled against a specific + * version of Scala. + * + * The component manager provides services to install and retrieve components to the local repository. + * This is used for compiled source jars so that the compilation need not be repeated for other projects on the same + * machine. + */ +class ComponentManager( + globalLock: xsbti.GlobalLock, + provider: xsbti.ComponentProvider, + ivyHome: Option[File], + val log: Logger +) { + private[this] val ivyCache = new IvyCache(ivyHome) + + /** Get all of the files for component 'id', throwing an exception if no files exist for the component. */ + def files(id: String)(ifMissing: IfMissing): Iterable[File] = { + def fromGlobal = + lockGlobalCache { + try { + update(id); getOrElse(createAndCache) + } catch { + case _: NotInCache => createAndCache + } + } + def getOrElse(orElse: => Iterable[File]): Iterable[File] = { + val existing = provider.component(id) + if (existing.isEmpty) orElse else existing + } + def notFound = invalid("Could not find required component '" + id + "'") + def createAndCache = + ifMissing match { + case IfMissing.Fail => notFound + case d: IfMissing.Define => + d() + if (d.cache) cache(id) + getOrElse(notFound) + } + + lockLocalCache { getOrElse(fromGlobal) } + } + + /** This is used to lock the local cache in project/boot/. By checking the local cache first, we can avoid grabbing a global lock. */ + private def lockLocalCache[T](action: => T): T = lock(provider.lockFile)(action) + + /** This is used to ensure atomic access to components in the global Ivy cache. */ + private def lockGlobalCache[T](action: => T): T = lock(ivyCache.lockFile)(action) + private def lock[T](file: File)(action: => T): T = + globalLock(file, new Callable[T] { def call = action }) + + /** Get the file for component 'id', throwing an exception if no files or multiple files exist for the component. */ + def file(id: String)(ifMissing: IfMissing): File = + files(id)(ifMissing).toList match { + case x :: Nil => x + case xs => + invalid("Expected single file for component '" + id + "', found: " + xs.mkString(", ")) + } + private def invalid(msg: String) = throw new InvalidComponent(msg) + + def define(id: String, files: Iterable[File]) = lockLocalCache { + provider.defineComponent(id, files.toSeq.toArray) + } + + /** Retrieve the file for component 'id' from the local repository. */ + private def update(id: String): Unit = + ivyCache.withCachedJar(sbtModuleID(id), Some(globalLock), log)(jar => define(id, Seq(jar))) + + private def sbtModuleID(id: String) = + ModuleID(SbtArtifacts.Organization, id, ComponentManager.stampedVersion) + + /** Install the files for component 'id' to the local repository. This is usually used after writing files to the directory returned by 'location'. */ + def cache(id: String): Unit = + ivyCache.cacheJar(sbtModuleID(id), file(id)(IfMissing.Fail), Some(globalLock), log) + def clearCache(id: String): Unit = lockGlobalCache { + ivyCache.clearCachedJar(sbtModuleID(id), Some(globalLock), log) + } +} +class InvalidComponent(msg: String, cause: Throwable) extends RuntimeException(msg, cause) { + def this(msg: String) = this(msg, null) +} +sealed trait IfMissing +object IfMissing { + object Fail extends IfMissing + final class Define(val cache: Boolean, define: => Unit) extends IfMissing { + def apply() = define + } +} +object ComponentManager { + lazy val (version, timestamp) = { + val properties = new java.util.Properties + val propertiesStream = getClass.getResourceAsStream("/xsbt.version.properties") + try { + properties.load(propertiesStream) + } finally { + propertiesStream.close() + } + (properties.getProperty("version"), properties.getProperty("timestamp")) + } + lazy val stampedVersion = version + "_" + timestamp +} diff --git a/lm-ivy/src/main/scala/sbt/internal/librarymanagement/ConvertResolver.scala b/lm-ivy/src/main/scala/sbt/internal/librarymanagement/ConvertResolver.scala new file mode 100644 index 000000000..4a8a96e13 --- /dev/null +++ b/lm-ivy/src/main/scala/sbt/internal/librarymanagement/ConvertResolver.scala @@ -0,0 +1,475 @@ +/* sbt -- Simple Build Tool + * Copyright 2008, 2009, 2010 Mark Harrah + */ +package sbt.internal.librarymanagement + +import java.net.URI +import java.util.Collections + +import org.apache.ivy.core.module.descriptor.DependencyDescriptor +import org.apache.ivy.core.resolve.{ DownloadOptions, ResolveData } +import org.apache.ivy.core.settings.IvySettings +import org.apache.ivy.plugins.repository.{ RepositoryCopyProgressListener, Resource, TransferEvent } +import org.apache.ivy.plugins.resolver.{ + BasicResolver, + DependencyResolver, + IBiblioResolver, + RepositoryResolver +} +import org.apache.ivy.plugins.resolver.{ + AbstractPatternsBasedResolver, + AbstractSshBasedResolver, + FileSystemResolver, + SFTPResolver, + SshResolver, + URLResolver +} +import org.apache.ivy.plugins.repository.url.{ URLRepository => URLRepo } +import org.apache.ivy.plugins.repository.file.{ FileResource, FileRepository => FileRepo } +import java.io.{ File, IOException } +import java.util.Date + +import org.apache.ivy.core.module.descriptor.{ Artifact => IArtifact } +import org.apache.ivy.core.module.id.ModuleRevisionId +import org.apache.ivy.core.module.descriptor.DefaultArtifact +import org.apache.ivy.core.report.DownloadReport +import org.apache.ivy.plugins.resolver.util.{ ResolvedResource, ResourceMDParser } +import org.apache.ivy.util.{ ChecksumHelper, FileUtil, Message } +import scala.jdk.CollectionConverters._ +import sbt.internal.librarymanagement.mavenint.PomExtraDependencyAttributes +import sbt.io.IO +import sbt.util.Logger +import sbt.librarymanagement._ +import sbt.librarymanagement.ivy.UpdateOptions + +private[sbt] object ConvertResolver { + import UpdateOptions.ResolverConverter + + /** + * This class contains all the reflective lookups used in the + * checksum-friendly URL publishing shim. + */ + private object ChecksumFriendlyURLResolver { + import java.lang.reflect.AccessibleObject + private def reflectiveLookup[A <: AccessibleObject](f: Class[_] => A): Option[A] = + try { + val cls = classOf[RepositoryResolver] + val thing = f(cls) + thing.setAccessible(true) + Some(thing) + } catch { + case (_: java.lang.NoSuchFieldException) | (_: java.lang.SecurityException) | + (_: java.lang.NoSuchMethodException) => + None + } + private val signerNameField: Option[java.lang.reflect.Field] = + reflectiveLookup(_.getDeclaredField("signerName")) + private val putChecksumMethod: Option[java.lang.reflect.Method] = + reflectiveLookup( + _.getDeclaredMethod( + "putChecksum", + classOf[IArtifact], + classOf[File], + classOf[String], + classOf[Boolean], + classOf[String] + ) + ) + private val putSignatureMethod: Option[java.lang.reflect.Method] = + reflectiveLookup( + _.getDeclaredMethod( + "putSignature", + classOf[IArtifact], + classOf[File], + classOf[String], + classOf[Boolean] + ) + ) + } + + /** + * The default behavior of ivy's overwrite flags ignores the fact that a lot of repositories + * will autogenerate checksums *for* an artifact if it doesn't already exist. Therefore + * if we succeed in publishing an artifact, we need to just blast the checksums in place. + * This acts as a "shim" on RepositoryResolvers so that we can hook our methods into + * both the IBiblioResolver + URLResolver without having to duplicate the code in two + * places. However, this does mean our use of reflection is awesome. + * + * TODO - See about contributing back to ivy. + */ + private trait ChecksumFriendlyURLResolver extends RepositoryResolver { + import ChecksumFriendlyURLResolver._ + private def signerName: String = signerNameField match { + case Some(field) => field.get(this).asInstanceOf[String] + case None => null + } + override protected def put( + artifact: IArtifact, + src: File, + dest: String, + overwrite: Boolean + ): Unit = { + // verify the checksum algorithms before uploading artifacts! + val checksums = getChecksumAlgorithms() + val repository = getRepository() + for { + checksum <- checksums + if !ChecksumHelper.isKnownAlgorithm(checksum) + } throw new IllegalArgumentException("Unknown checksum algorithm: " + checksum) + repository.put(artifact, src, dest, overwrite); + // Fix for sbt#1156 - Artifactory will auto-generate MD5/sha1 files, so + // we need to overwrite what it has. + for (checksum <- checksums) { + putChecksumMethod match { + case Some(method) => + method.invoke(this, artifact, src, dest, true: java.lang.Boolean, checksum) + case None => // TODO - issue warning? + } + } + if (signerName != null) { + putSignatureMethod match { + case None => () + case Some(method) => method.invoke(artifact, src, dest, true: java.lang.Boolean); () + } + } + } + } + + /** Converts the given sbt resolver into an Ivy resolver. */ + @deprecated("Use the variant with updateOptions", "0.13.8") + def apply(r: Resolver, settings: IvySettings, log: Logger): DependencyResolver = + apply(r, settings, UpdateOptions(), log) + + private[librarymanagement] val ManagedChecksums = "sbt.managedChecksums" + + /** Converts the given sbt resolver into an Ivy resolver. */ + def apply( + r: Resolver, + settings: IvySettings, + updateOptions: UpdateOptions, + log: Logger + ): DependencyResolver = + (updateOptions.resolverConverter orElse defaultConvert)((r, settings, log)) + + /** The default implementation of converter. */ + lazy val defaultConvert: ResolverConverter = { case (r, settings, log) => + val managedChecksums = Option(settings.getVariable(ManagedChecksums)) match { + case Some(x) => x.toBoolean + case _ => false + } + r match { + case repo: MavenRepository => { + val pattern = Collections.singletonList( + Resolver.resolvePattern(repo.root, Resolver.mavenStyleBasePattern) + ) + final class PluginCapableResolver + extends IBiblioResolver + with ChecksumFriendlyURLResolver + with DescriptorRequired { + override val managedChecksumsEnabled: Boolean = managedChecksums + override def getResource(resource: Resource, dest: File): Long = get(resource, dest) + def setPatterns(): Unit = { + // done this way for access to protected methods. + setArtifactPatterns(pattern) + setIvyPatterns(pattern) + } + override protected def findResourceUsingPattern( + mrid: ModuleRevisionId, + pattern: String, + artifact: IArtifact, + rmdparser: ResourceMDParser, + date: Date + ): ResolvedResource = { + val extraAttributes = + mrid.getExtraAttributes.asScala.toMap.asInstanceOf[Map[String, String]] + getSbtPluginCrossVersion(extraAttributes) match { + case Some(sbtCrossVersion) => + // if the module is an sbt plugin + // we first try to resolve the artifact with the sbt cross version suffix + // and we fallback to the one without the suffix + val newArtifact = DefaultArtifact.cloneWithAnotherName( + artifact, + artifact.getName + sbtCrossVersion + ) + val resolved = + super.findResourceUsingPattern(mrid, pattern, newArtifact, rmdparser, date) + if (resolved != null) resolved + else super.findResourceUsingPattern(mrid, pattern, artifact, rmdparser, date) + case None => + super.findResourceUsingPattern(mrid, pattern, artifact, rmdparser, date) + } + } + } + val resolver = new PluginCapableResolver + if (repo.localIfFile) resolver.setRepository(new LocalIfFileRepo) + initializeMavenStyle(resolver, repo.name, repo.root) + resolver + .setPatterns() // has to be done after initializeMavenStyle, which calls methods that overwrite the patterns + resolver + } + case repo: SshRepository => { + val resolver = new SshResolver with DescriptorRequired with ThreadSafeSshBasedResolver { + override val managedChecksumsEnabled: Boolean = managedChecksums + override def getResource(resource: Resource, dest: File): Long = get(resource, dest) + } + initializeSSHResolver(resolver, repo, settings) + repo.publishPermissions.foreach(perm => resolver.setPublishPermissions(perm)) + resolver + } + case repo: SftpRepository => { + val resolver = new SFTPResolver with ThreadSafeSshBasedResolver + initializeSSHResolver(resolver, repo, settings) + resolver + } + case repo: FileRepository => { + val resolver = new FileSystemResolver with DescriptorRequired { + // Workaround for #1156 + // Temporarily in sbt 0.13.x we deprecate overwriting + // in local files for non-changing revisions. + // This will be fully enforced in sbt 1.0. + setRepository(new WarnOnOverwriteFileRepo()) + override val managedChecksumsEnabled: Boolean = managedChecksums + override def getResource(resource: Resource, dest: File): Long = get(resource, dest) + } + resolver.setName(repo.name) + initializePatterns(resolver, repo.patterns, settings) + import repo.configuration.{ isLocal, isTransactional } + resolver.setLocal(isLocal) + isTransactional.foreach(value => resolver.setTransactional(value.toString)) + resolver + } + case repo: URLRepository => { + val resolver = new URLResolver with ChecksumFriendlyURLResolver with DescriptorRequired { + override val managedChecksumsEnabled: Boolean = managedChecksums + override def getResource(resource: Resource, dest: File): Long = get(resource, dest) + } + resolver.setName(repo.name) + initializePatterns(resolver, repo.patterns, settings) + resolver + } + case repo: ChainedResolver => + IvySbt.resolverChain(repo.name, repo.resolvers, settings, log) + case repo: RawRepository => + repo.resolver match { + case r: DependencyResolver => r + } + } + } + + private def getSbtPluginCrossVersion(extraAttributes: Map[String, String]): Option[String] = { + for { + sbtVersion <- extraAttributes.get(PomExtraDependencyAttributes.SbtVersionKey) + scalaVersion <- extraAttributes.get(PomExtraDependencyAttributes.ScalaVersionKey) + } yield s"_${scalaVersion}_$sbtVersion" + } + + private sealed trait DescriptorRequired extends BasicResolver { + // Works around implementation restriction to access protected method `get` + def getResource(resource: Resource, dest: File): Long + + /** + * Defines an option to tell ivy to disable checksums when downloading and + * let the user handle verifying these checksums. + * + * This means that the checksums are stored in the ivy cache directory. This + * is good for reproducibility from outside ivy. Sbt can check that jars are + * not corrupted, ever, independently of trusting whatever it's there in the + * local directory. + */ + def managedChecksumsEnabled: Boolean + + import sbt.io.syntax._ + private def downloadChecksum( + resource: Resource, + targetChecksumFile: File, + algorithm: String + ): Boolean = { + if (!ChecksumHelper.isKnownAlgorithm(algorithm)) + throw new IllegalArgumentException(s"Unknown checksum algorithm: $algorithm") + + val checksumResource = resource.clone(s"${resource.getName}.$algorithm") + if (!checksumResource.exists) false + else { + Message.debug(s"$algorithm file found for $resource: downloading...") + // Resource must be cleaned up outside of this function if it's invalid + getResource(checksumResource, targetChecksumFile) + true + } + } + + private final val PartEnd = ".part" + private final val JarEnd = ".jar" + private final val TemporaryJar = JarEnd + PartEnd + + override def getAndCheck(resource: Resource, target: File): Long = { + val targetPath = target.getAbsolutePath + if (!managedChecksumsEnabled || !targetPath.endsWith(TemporaryJar)) { + super.getAndCheck(resource, target) + } else { + // +ivy deviation + val size = getResource(resource, target) + val checksumAlgorithms = getChecksumAlgorithms + checksumAlgorithms.foldLeft(false) { (checked, algorithm) => + // Continue checking until we hit a failure + val checksumFile = new File(targetPath.stripSuffix(PartEnd) + s".$algorithm") + if (checked) checked + else downloadChecksum(resource, checksumFile, algorithm) + } + // -ivy deviation + size + } + } + + override def getDependency(dd: DependencyDescriptor, data: ResolveData) = { + val prev = descriptorString(isAllownomd) + setDescriptor(descriptorString(hasExplicitURL(dd))) + val t = + try super.getDependency(dd, data) + finally setDescriptor(prev) + t + } + def descriptorString(optional: Boolean) = + if (optional) BasicResolver.DESCRIPTOR_OPTIONAL else BasicResolver.DESCRIPTOR_REQUIRED + def hasExplicitURL(dd: DependencyDescriptor): Boolean = + dd.getAllDependencyArtifacts.exists(_.getUrl != null) + } + private def initializeMavenStyle(resolver: IBiblioResolver, name: String, root: String): Unit = { + resolver.setName(name) + resolver.setM2compatible(true) + resolver.setRoot(root) + } + private def initializeSSHResolver( + resolver: AbstractSshBasedResolver, + repo: SshBasedRepository, + settings: IvySettings + ): Unit = { + resolver.setName(repo.name) + resolver.setPassfile(null) + initializePatterns(resolver, repo.patterns, settings) + initializeConnection(resolver, repo.connection) + } + private def initializeConnection( + resolver: AbstractSshBasedResolver, + connection: SshConnection + ): Unit = { + import resolver._ + import connection._ + hostname.foreach(setHost) + port.foreach(setPort) + authentication foreach { + case pa: PasswordAuthentication => + setUser(pa.user) + pa.password.foreach(setUserPassword) + case kfa: KeyFileAuthentication => + setKeyFile(kfa.keyfile) + kfa.password.foreach(setKeyFilePassword) + setUser(kfa.user) + } + } + private def initializePatterns( + resolver: AbstractPatternsBasedResolver, + patterns: Patterns, + settings: IvySettings + ): Unit = { + resolver.setM2compatible(patterns.isMavenCompatible) + resolver.setDescriptor( + if (patterns.descriptorOptional) BasicResolver.DESCRIPTOR_OPTIONAL + else BasicResolver.DESCRIPTOR_REQUIRED + ) + resolver.setCheckconsistency(!patterns.skipConsistencyCheck) + patterns.ivyPatterns.foreach(p => resolver.addIvyPattern(settings substitute p)) + patterns.artifactPatterns.foreach(p => resolver.addArtifactPattern(settings substitute p)) + } + + /** + * A custom Ivy URLRepository that returns FileResources for file URLs. + * This allows using the artifacts from the Maven local repository instead of copying them to the Ivy cache. + */ + private[this] final class LocalIfFileRepo extends URLRepo { + private[this] val repo = new WarnOnOverwriteFileRepo() + private[this] val progress = new RepositoryCopyProgressListener(this); + override def getResource(source: String) = { + val uri = new URI(source) + if (uri.getScheme == IO.FileScheme) + new FileResource(repo, IO.toFile(uri)) + else + super.getResource(source) + } + + override def put(source: File, destination: String, overwrite: Boolean): Unit = { + val uri = new URI(destination) + try { + if (uri.getScheme != IO.FileScheme) super.put(source, destination, overwrite) + else { + // Here we duplicate the put method for files so we don't just bail on trying ot use Http handler + val resource = getResource(destination) + if (!overwrite && resource.exists()) { + throw new IOException(s"destination file exists and overwrite == false"); + } + fireTransferInitiated(resource, TransferEvent.REQUEST_PUT); + try { + val totalLength = source.length + if (totalLength > 0) { + progress.setTotalLength(totalLength); + } + FileUtil.copy(source, new java.io.File(uri), progress, overwrite) + () + } catch { + case ex: IOException => + fireTransferError(ex) + throw ex + case ex: RuntimeException => + fireTransferError(ex) + throw ex + } finally { + progress.setTotalLength(null); + } + } + } catch { + // This error could be thrown either by super.put or the above + case ex: IOException if ex.getMessage.contains("destination file exists") => + throw new IOException( + s"""PUT operation failed because the destination file exists and overwriting is disabled: + | source : $source + | destination: $destination + |If you have a staging repository that has failed, drop it and start over. + |Otherwise fix the double publishing, or relax the setting as follows: + | publishConfiguration := publishConfiguration.value.withOverwrite(true) + | publishLocalConfiguration := publishLocalConfiguration.value.withOverwrite(true) + | + |If you have a remote cache repository, you can enable overwriting as follows: + | pushRemoteCacheConfiguration := pushRemoteCacheConfiguration.value.withOverwrite(true) + |""".stripMargin, + ex + ) + } + } + } + + private[this] final class WarnOnOverwriteFileRepo extends FileRepo() { + override def put(source: java.io.File, destination: String, overwrite: Boolean): Unit = { + try super.put(source, destination, overwrite) + catch { + case e: java.io.IOException if e.getMessage.contains("destination already exists") => + val overwriteWarning = + if (destination contains "-SNAPSHOT") s"Attempting to overwrite $destination" + else + s"Attempting to overwrite $destination (non-SNAPSHOT)\n\tYou need to remove it from the cache manually to take effect." + import org.apache.ivy.util.Message + Message.warn(overwriteWarning) + super.put(source, destination, true) + } + } + } + + private sealed trait ThreadSafeSshBasedResolver + extends org.apache.ivy.plugins.resolver.AbstractSshBasedResolver { +//uncomment to test non-threadsafe behavior +// private def lock = new Object + private val lock = org.apache.ivy.plugins.repository.ssh.SshCache.getInstance + override def download(artifacts: Array[IArtifact], options: DownloadOptions): DownloadReport = + lock.synchronized { + super.download(artifacts, options) + } + } +} diff --git a/lm-ivy/src/main/scala/sbt/internal/librarymanagement/CustomPomParser.scala b/lm-ivy/src/main/scala/sbt/internal/librarymanagement/CustomPomParser.scala new file mode 100644 index 000000000..8f7170d75 --- /dev/null +++ b/lm-ivy/src/main/scala/sbt/internal/librarymanagement/CustomPomParser.scala @@ -0,0 +1,347 @@ +package sbt.internal.librarymanagement + +import org.apache.ivy.core.module.id.ModuleRevisionId +import org.apache.ivy.core.module.descriptor.{ + DefaultArtifact, + DefaultExtendsDescriptor, + DefaultModuleDescriptor, + ModuleDescriptor +} +import org.apache.ivy.core.module.descriptor.{ DefaultDependencyDescriptor, DependencyDescriptor } +import org.apache.ivy.plugins.parser.{ + ModuleDescriptorParser, + ModuleDescriptorParserRegistry, + ParserSettings +} +import org.apache.ivy.plugins.parser.m2.{ + ReplaceMavenConfigurationMappings, + PomModuleDescriptorBuilder, + PomModuleDescriptorParser +} +import org.apache.ivy.plugins.repository.Resource +import org.apache.ivy.plugins.namespace.NamespaceTransformer +import org.apache.ivy.util.extendable.ExtendableItem + +import java.io.{ File, InputStream } +import java.net.URL +import sbt.internal.librarymanagement.mavenint.{ + PomExtraDependencyAttributes, + SbtPomExtraProperties +} +import sbt.io.Hash +import scala.collection.immutable.ArraySeq + +// @deprecated("We now use an Aether-based pom parser.", "0.13.8") +final class CustomPomParser( + delegate: ModuleDescriptorParser, + transform: (ModuleDescriptorParser, ModuleDescriptor) => ModuleDescriptor +) extends ModuleDescriptorParser { + override def parseDescriptor( + ivySettings: ParserSettings, + descriptorURL: URL, + validate: Boolean + ) = + transform(this, delegate.parseDescriptor(ivySettings, descriptorURL, validate)) + + override def parseDescriptor( + ivySettings: ParserSettings, + descriptorURL: URL, + res: Resource, + validate: Boolean + ) = + transform(this, delegate.parseDescriptor(ivySettings, descriptorURL, res, validate)) + + override def toIvyFile(is: InputStream, res: Resource, destFile: File, md: ModuleDescriptor) = + delegate.toIvyFile(is, res, destFile, md) + + override def accept(res: Resource) = delegate.accept(res) + override def getType() = delegate.getType() + override def getMetadataArtifact(mrid: ModuleRevisionId, res: Resource) = + delegate.getMetadataArtifact(mrid, res) +} +// @deprecated("We now use an Aether-based pom parser.", "0.13.8") +object CustomPomParser { + + // Evil hackery to override the default maven pom mappings. + ReplaceMavenConfigurationMappings.init() + + /** The key prefix that indicates that this is used only to store extra information and is not intended for dependency resolution. */ + val InfoKeyPrefix = SbtPomExtraProperties.POM_INFO_KEY_PREFIX + val ApiURLKey = SbtPomExtraProperties.POM_API_KEY + val VersionSchemeKey = SbtPomExtraProperties.VERSION_SCHEME_KEY + + val SbtVersionKey = PomExtraDependencyAttributes.SbtVersionKey + val ScalaVersionKey = PomExtraDependencyAttributes.ScalaVersionKey + val ExtraAttributesKey = PomExtraDependencyAttributes.ExtraAttributesKey + private[this] val unqualifiedKeys = + Set(SbtVersionKey, ScalaVersionKey, ExtraAttributesKey, ApiURLKey, VersionSchemeKey) + + /** + * In the new POM format of sbt plugins, the dependency to an sbt plugin + * contains the sbt cross-version _2.12_1.0. The reason is we want Maven to be able + * to resolve the dependency using the pattern: + * /_2.12_1.0//_2.12_1.0-.pom + * In sbt 1.x we use extra-attributes to resolve sbt plugins, so here we must remove + * the sbt cross-version and keep the extra-attributes. + * Parsing a dependency found in the new POM format produces the same module as + * if it is found in the old POM format. It used not to contain the sbt cross-version + * suffix, but that was invalid. + * Hence we can resolve conflicts between new and old POM formats. + * + * To compare the two formats you can look at the POMs in: + * https://repo1.maven.org/maven2/ch/epfl/scala/sbt-plugin-example-diamond_2.12_1.0/0.5.0/ + */ + private def removeSbtCrossVersion( + properties: Map[String, String], + moduleName: String + ): String = { + val sbtCrossVersion = for { + sbtVersion <- properties.get(s"e:$SbtVersionKey") + scalaVersion <- properties.get(s"e:$ScalaVersionKey") + } yield s"_${scalaVersion}_$sbtVersion" + sbtCrossVersion.map(moduleName.stripSuffix).getOrElse(moduleName) + } + + // packagings that should be jars, but that Ivy doesn't handle as jars + // TODO - move this elsewhere. + val JarPackagings = Set("eclipse-plugin", "hk2-jar", "orbit", "scala-jar") + val default = new CustomPomParser(PomModuleDescriptorParser.getInstance, defaultTransform) + + private[this] val TransformedHashKey = "e:sbtTransformHash" + // A hash of the parameters transformation is based on. + // If a descriptor has a different hash, we need to retransform it. + private[this] def makeCoords(mrid: ModuleRevisionId): String = + s"${mrid.getOrganisation}:${mrid.getName}:${mrid.getRevision}" + + // We now include the ModuleID in a hash, to ensure that parent-pom transformations don't corrupt child poms. + private[this] def MakeTransformHash(md: ModuleDescriptor): String = { + val coords: String = makeCoords(md.getModuleRevisionId) + + hash((unqualifiedKeys ++ JarPackagings ++ Set(coords)).toSeq.sorted) + } + + private[this] def hash(ss: Seq[String]): String = + Hash.toHex(Hash(ss.flatMap(_ getBytes "UTF-8").toArray)) + + // Unfortunately, ModuleDescriptorParserRegistry is add-only and is a singleton instance. + lazy val registerDefault: Unit = ModuleDescriptorParserRegistry.getInstance.addParser(default) + + def defaultTransform(parser: ModuleDescriptorParser, md: ModuleDescriptor): ModuleDescriptor = + if (transformedByThisVersion(md)) md + else defaultTransformImpl(parser, md) + + private[this] def transformedByThisVersion(md: ModuleDescriptor): Boolean = { + val oldTransformedHashKey = "sbtTransformHash" + val extraInfo = md.getExtraInfo + val MyHash = MakeTransformHash(md) + // sbt 0.13.1 used "sbtTransformHash" instead of "e:sbtTransformHash" until #1192 so read both + Option(extraInfo).isDefined && + ((Option(extraInfo get TransformedHashKey) orElse Option( + extraInfo get oldTransformedHashKey + )) match { + case Some(MyHash) => true + case _ => false + }) + } + + private[this] def defaultTransformImpl( + parser: ModuleDescriptorParser, + md: ModuleDescriptor + ): ModuleDescriptor = { + val properties = getPomProperties(md) + + // Extracts extra attributes (currently, sbt and Scala versions) stored in the element of the pom. + // These are attached to the module itself. + val filtered = shouldBeUnqualified(properties) + + // Extracts extra attributes for the dependencies. + // Because the tag in pom.xml cannot include additional metadata, + // sbt includes extra attributes in a 'extraDependencyAttributes' property. + // This is read/written from/to a pure string (no element structure) because Ivy only + // parses the immediate text nodes of the property. + val extraDepAttributes = getDependencyExtra(filtered) + + val unqualify = toUnqualify(filtered) + + // Here we always add extra attributes. There's a scenario where parent-pom information corrupts child-poms with "e:" namespaced xml elements + // and we have to force the every generated xml file to have the appropriate xml namespace + addExtra(unqualify, extraDepAttributes, parser, md) + } + // The element of the pom is used to store additional metadata, such as for sbt plugins or for the base URL for API docs. + // This is done because the pom XSD does not appear to allow extra metadata anywhere else. + // The extra sbt plugin metadata in pom.xml does not need to be readable by maven, but the other information may be. + // However, the pom.xml needs to be valid in all cases because other tools like repository managers may read the pom.xml. + private[sbt] def getPomProperties(md: ModuleDescriptor): Map[String, String] = { + import scala.jdk.CollectionConverters._ + PomModuleDescriptorBuilder + .extractPomProperties(md.getExtraInfo) + .asInstanceOf[java.util.Map[String, String]] + .asScala + .toMap + } + private[sbt] def toUnqualify(propertyAttributes: Map[String, String]): Map[String, String] = + (propertyAttributes - ExtraAttributesKey) map { case (k, v) => ("e:" + k, v) } + + private[this] def shouldBeUnqualified(m: Map[String, String]): Map[String, String] = + m.view.filterKeys(unqualifiedKeys).toMap + + private[this] def addExtra( + properties: Map[String, String], + id: ModuleRevisionId + ): ModuleRevisionId = { + import scala.jdk.CollectionConverters._ + val oldExtra = qualifiedExtra(id) + val newExtra = (oldExtra ++ properties).asJava + // remove the sbt plugin cross version from the resolved ModuleRevisionId + // sbt-plugin-example_2.12_1.0 => sbt-plugin-example + val nameWithoutCrossVersion = removeSbtCrossVersion(properties, id.getName) + ModuleRevisionId.newInstance( + id.getOrganisation, + nameWithoutCrossVersion, + id.getBranch, + id.getRevision, + newExtra + ) + } + + private[this] def getDependencyExtra( + m: Map[String, String] + ): Map[ModuleRevisionId, Map[String, String]] = + PomExtraDependencyAttributes.getDependencyExtra(m) + + def qualifiedExtra(item: ExtendableItem): Map[String, String] = + PomExtraDependencyAttributes.qualifiedExtra(item) + def filterCustomExtra(item: ExtendableItem, include: Boolean): Map[String, String] = + qualifiedExtra(item).view.filterKeys { k => + qualifiedIsExtra(k) == include + }.toMap + + def writeDependencyExtra(s: Seq[DependencyDescriptor]): Seq[String] = + PomExtraDependencyAttributes.writeDependencyExtra(s) + + // parses the sequence of dependencies with extra attribute information, with one dependency per line + def readDependencyExtra(s: String): Seq[ModuleRevisionId] = + PomExtraDependencyAttributes.readDependencyExtra(s) + + def qualifiedIsExtra(k: String): Boolean = PomExtraDependencyAttributes.qualifiedIsExtra(k) + + // Reduces the id to exclude custom extra attributes + // This makes the id suitable as a key to associate a dependency parsed from a element + // with the extra attributes from the section + def simplify(id: ModuleRevisionId): ModuleRevisionId = PomExtraDependencyAttributes.simplify(id) + + private[this] def addExtra( + dep: DependencyDescriptor, + extra: Map[ModuleRevisionId, Map[String, String]] + ): DependencyDescriptor = { + val extras = if (extra.isEmpty) None else extra get simplify(dep.getDependencyRevisionId) + extras match { + case None => dep + case Some(extraAttrs) => transform(dep, revId => addExtra(extraAttrs, revId)) + } + } + private[this] def transform( + dep: DependencyDescriptor, + f: ModuleRevisionId => ModuleRevisionId + ): DependencyDescriptor = + DefaultDependencyDescriptor.transformInstance( + dep, + namespaceTransformer(dep.getDependencyRevisionId, f), + false + ) + + private[this] def namespaceTransformer( + txId: ModuleRevisionId, + f: ModuleRevisionId => ModuleRevisionId + ): NamespaceTransformer = + new NamespaceTransformer { + def transform(revId: ModuleRevisionId): ModuleRevisionId = + if (revId == txId) f(revId) else revId + def isIdentity = false + } + + // TODO: It would be better if we can make dd.isForce to `false` when VersionRange.isVersionRange is `true`. + private[this] def stripVersionRange(dd: DependencyDescriptor): DependencyDescriptor = + VersionRange.stripMavenVersionRange(dd.getDependencyRevisionId.getRevision) match { + case Some(newVersion) => + val id = dd.getDependencyRevisionId + val newId = ModuleRevisionId.newInstance( + id.getOrganisation, + id.getName, + id.getBranch, + newVersion, + id.getExtraAttributes + ) + transform(dd, _ => newId) + case None => dd + } + + import scala.jdk.CollectionConverters._ + def addExtra( + properties: Map[String, String], + dependencyExtra: Map[ModuleRevisionId, Map[String, String]], + parser: ModuleDescriptorParser, + md: ModuleDescriptor + ): ModuleDescriptor = { + val dmd = new DefaultModuleDescriptor(parser, md.getResource) + + val mrid = addExtra(properties, md.getModuleRevisionId) + val resolvedMrid = addExtra(properties, md.getResolvedModuleRevisionId) + dmd.setModuleRevisionId(mrid) + dmd.setResolvedModuleRevisionId(resolvedMrid) + + dmd.setDefault(md.isDefault) + dmd.setHomePage(md.getHomePage) + dmd.setDescription(md.getDescription) + dmd.setLastModified(md.getLastModified) + dmd.setStatus(md.getStatus()) + dmd.setPublicationDate(md.getPublicationDate()) + dmd.setResolvedPublicationDate(md.getResolvedPublicationDate()) + + for (l <- md.getLicenses) dmd.addLicense(l) + for ((key, value) <- md.getExtraInfo.asInstanceOf[java.util.Map[String, String]].asScala) + dmd.addExtraInfo(key, value) + dmd.addExtraInfo( + TransformedHashKey, + MakeTransformHash(md) + ) // mark as transformed by this version, so we don't need to do it again + for ( + (key, value) <- md.getExtraAttributesNamespaces + .asInstanceOf[java.util.Map[String, String]] + .asScala + ) dmd.addExtraAttributeNamespace(key, value) + IvySbt.addExtraNamespace(dmd) + + val withExtra = ArraySeq.unsafeWrapArray(md.getDependencies) map { dd => + addExtra(dd, dependencyExtra) + } + val withVersionRangeMod: Seq[DependencyDescriptor] = + if (LMSysProp.modifyVersionRange) withExtra map { stripVersionRange } + else withExtra + val unique = IvySbt.mergeDuplicateDefinitions(withVersionRangeMod) + unique foreach dmd.addDependency + + for (ed <- md.getInheritedDescriptors) + dmd.addInheritedDescriptor( + new DefaultExtendsDescriptor(md, ed.getLocation, ed.getExtendsTypes) + ) + for (conf <- md.getConfigurations) { + dmd.addConfiguration(conf) + for (art <- md.getArtifacts(conf.getName)) { + val ext = art.getExt + val newExt = if (JarPackagings(ext)) "jar" else ext + val nart = new DefaultArtifact( + mrid, + art.getPublicationDate, + art.getName, + art.getType, + newExt, + art.getUrl, + art.getQualifiedExtraAttributes + ) + dmd.addArtifact(conf.getName, nart) + } + } + dmd + } +} diff --git a/lm-ivy/src/main/scala/sbt/internal/librarymanagement/CustomXmlParser.scala b/lm-ivy/src/main/scala/sbt/internal/librarymanagement/CustomXmlParser.scala new file mode 100644 index 000000000..a72a65339 --- /dev/null +++ b/lm-ivy/src/main/scala/sbt/internal/librarymanagement/CustomXmlParser.scala @@ -0,0 +1,39 @@ +/* sbt -- Simple Build Tool + * Copyright 2008, 2009, 2010 Mark Harrah + */ +package sbt.internal.librarymanagement + +import java.io.ByteArrayInputStream +import java.net.URL + +import org.apache.ivy.core.module.descriptor.{ + DefaultDependencyDescriptor, + DefaultModuleDescriptor +} +import org.apache.ivy.core.settings.IvySettings +import org.apache.ivy.plugins.parser.xml.XmlModuleDescriptorParser +import org.apache.ivy.plugins.repository.Resource +import org.apache.ivy.plugins.repository.url.URLResource + +/** Subclasses the default Ivy file parser in order to provide access to protected methods. */ +private[sbt] object CustomXmlParser extends XmlModuleDescriptorParser { + import XmlModuleDescriptorParser.Parser + class CustomParser(settings: IvySettings, defaultConfig: Option[String]) + extends Parser(CustomXmlParser, settings) { + def setSource(url: URL) = { + super.setResource(new URLResource(url)) + super.setInput(url) + } + def setInput(bytes: Array[Byte]): Unit = setInput(new ByteArrayInputStream(bytes)) + + /** Overridden because the super implementation overwrites the module descriptor. */ + override def setResource(res: Resource): Unit = () + override def setMd(md: DefaultModuleDescriptor) = { + super.setMd(md) + if (defaultConfig.isDefined) setDefaultConfMapping("*->default(compile)") + } + override def parseDepsConfs(confs: String, dd: DefaultDependencyDescriptor) = + super.parseDepsConfs(confs, dd) + override def getDefaultConf = defaultConfig.getOrElse(super.getDefaultConf) + } +} diff --git a/lm-ivy/src/main/scala/sbt/internal/librarymanagement/FakeResolver.scala b/lm-ivy/src/main/scala/sbt/internal/librarymanagement/FakeResolver.scala new file mode 100644 index 000000000..00c0275ae --- /dev/null +++ b/lm-ivy/src/main/scala/sbt/internal/librarymanagement/FakeResolver.scala @@ -0,0 +1,217 @@ +package sbt + +import java.io.File +import java.net.URI + +import org.apache.ivy.core.cache.ArtifactOrigin +import org.apache.ivy.core.cache.{ DefaultRepositoryCacheManager, RepositoryCacheManager } +import org.apache.ivy.core.module.descriptor.{ + Artifact => IvyArtifact, + DefaultArtifact, + DefaultDependencyArtifactDescriptor, + DefaultModuleDescriptor, + DependencyArtifactDescriptor, + DependencyDescriptor +} +import org.apache.ivy.core.module.id.ModuleRevisionId +import org.apache.ivy.core.report.ArtifactDownloadReport +import org.apache.ivy.core.report.{ DownloadReport, DownloadStatus } +import org.apache.ivy.core.report.MetadataArtifactDownloadReport +import org.apache.ivy.core.resolve.{ DownloadOptions, ResolveData, ResolvedModuleRevision } +import org.apache.ivy.core.search.{ ModuleEntry, OrganisationEntry, RevisionEntry } +import org.apache.ivy.core.settings.IvySettings +import org.apache.ivy.plugins.namespace.Namespace +import org.apache.ivy.plugins.resolver.{ DependencyResolver, ResolverSettings } +import org.apache.ivy.plugins.resolver.util.ResolvedResource + +import FakeResolver._ + +/** + * A fake `DependencyResolver` that statically serves predefined artifacts. + */ +private[sbt] class FakeResolver(private var name: String, cacheDir: File, modules: ModulesMap) + extends DependencyResolver { + + private object Artifact { + def unapply(art: IvyArtifact): Some[(String, String, String)] = { + val revisionID = art.getModuleRevisionId() + val organisation = revisionID.getOrganisation + val name = revisionID.getName + val revision = revisionID.getRevision + Some((organisation, name, revision)) + } + + def unapply(dd: DependencyDescriptor): Some[(String, String, String)] = { + val module = dd.getDependencyId() + val organisation = module.getOrganisation + val name = module.getName + val mrid = dd.getDependencyRevisionId() + val revision = mrid.getRevision() + Some((organisation, name, revision)) + } + } + + override def publish(artifact: IvyArtifact, src: File, overwrite: Boolean): Unit = + throw new UnsupportedOperationException("This resolver doesn't support publishing.") + + override def abortPublishTransaction(): Unit = + throw new UnsupportedOperationException("This resolver doesn't support publishing.") + + override def beginPublishTransaction(module: ModuleRevisionId, overwrite: Boolean): Unit = + throw new UnsupportedOperationException("This resolver doesn't support publishing.") + + override def commitPublishTransaction(): Unit = + throw new UnsupportedOperationException("This resolver doesn't support publishing.") + + override def download( + artifact: ArtifactOrigin, + options: DownloadOptions + ): ArtifactDownloadReport = { + + val report = new ArtifactDownloadReport(artifact.getArtifact) + val path = new URI(artifact.getLocation).getPath + val localFile = new File(path) + + if (path.nonEmpty && localFile.exists) { + report.setLocalFile(localFile) + report.setDownloadStatus(DownloadStatus.SUCCESSFUL) + report.setSize(localFile.length) + } else { + report.setDownloadStatus(DownloadStatus.FAILED) + } + + report + } + + override def download(artifacts: Array[IvyArtifact], options: DownloadOptions): DownloadReport = { + val report = new DownloadReport + + artifacts foreach { art => + Option(locate(art)) foreach (o => report.addArtifactReport(download(o, options))) + } + + report + } + + override def dumpSettings(): Unit = () + + override def exists(artifact: IvyArtifact): Boolean = { + val Artifact(organisation, name, revision) = artifact + modules.get((organisation, name, revision)).isDefined + } + + // This is a fake resolver and we don't have Ivy files. Ivy's spec says we can return `null` if + // we can't find the module descriptor. + override def findIvyFileRef(dd: DependencyDescriptor, data: ResolveData): ResolvedResource = null + + override def getDependency( + dd: DependencyDescriptor, + data: ResolveData + ): ResolvedModuleRevision = { + + val Artifact(organisation, name, revision) = dd + val mrid = dd.getDependencyRevisionId() + + val artifact = modules get ((organisation, name, revision)) map { arts => + val artifacts: Array[DependencyArtifactDescriptor] = arts.toArray map (_ artifactOf dd) + val moduleDescriptor = DefaultModuleDescriptor.newDefaultInstance(mrid, artifacts) + val defaultArtifact = arts.headOption match { + case Some(FakeArtifact(name, tpe, ext, _)) => + new DefaultArtifact(mrid, new java.util.Date, name, tpe, ext) + case None => null + } + val metadataReport = new MetadataArtifactDownloadReport(defaultArtifact) + metadataReport.setDownloadStatus(DownloadStatus.SUCCESSFUL) + + new ResolvedModuleRevision(this, this, moduleDescriptor, metadataReport) + } + + artifact.orNull + + } + + override def getName(): String = name + + override val getNamespace: Namespace = { + val ns = new Namespace() + ns.setName(name) + ns + } + + override val getRepositoryCacheManager: RepositoryCacheManager = { + val cacheName = name + "-cache" + val ivySettings = new IvySettings() + val baseDir = cacheDir + new DefaultRepositoryCacheManager(cacheName, ivySettings, baseDir) + } + + override def listModules(organisation: OrganisationEntry): Array[ModuleEntry] = + modules.keys.collect { + case (o, m, _) if o == organisation.getOrganisation => + val organisationEntry = new OrganisationEntry(this, o) + new ModuleEntry(organisationEntry, m) + }.toArray + + override def listOrganisations(): Array[OrganisationEntry] = + modules.keys.map { case (o, _, _) => new OrganisationEntry(this, o) }.toArray + + override def listRevisions(module: ModuleEntry): Array[RevisionEntry] = + modules.keys.collect { + case (o, m, v) if o == module.getOrganisation && m == module.getModule => + new RevisionEntry(module, v) + }.toArray + + override def listTokenValues( + tokens: Array[String], + criteria: java.util.Map[_, _] + ): Array[java.util.Map[_, _]] = + Array.empty + + override def listTokenValues( + token: String, + otherTokenValues: java.util.Map[_, _] + ): Array[String] = + Array.empty + + override def locate(art: IvyArtifact): ArtifactOrigin = { + val Artifact(moduleOrganisation, moduleName, moduleRevision) = art + val artifact = + for { + artifacts <- modules get ((moduleOrganisation, moduleName, moduleRevision)) + artifact <- artifacts find (a => + a.name == art.getName && a.tpe == art.getType && a.ext == art.getExt + ) + } yield new ArtifactOrigin(art, /* isLocal = */ true, artifact.file.toURI.toURL.toString) + + artifact.orNull + + } + + override def reportFailure(art: IvyArtifact): Unit = () + override def reportFailure(): Unit = () + + override def setName(name: String): Unit = { + this.name = name + getNamespace.setName(name) + } + + override def setSettings(settings: ResolverSettings): Unit = () + +} + +private[sbt] object FakeResolver { + + type ModulesMap = Map[(String, String, String), Seq[FakeArtifact]] + + final case class FakeArtifact(name: String, tpe: String, ext: String, file: File) { + def artifactOf(dd: DependencyDescriptor): DependencyArtifactDescriptor = + new DefaultDependencyArtifactDescriptor( + dd, + name, + tpe, + ext, + file.toURI.toURL, + new java.util.HashMap + ) + } +} diff --git a/lm-ivy/src/main/scala/sbt/internal/librarymanagement/Ivy.scala b/lm-ivy/src/main/scala/sbt/internal/librarymanagement/Ivy.scala new file mode 100644 index 000000000..074e7c380 --- /dev/null +++ b/lm-ivy/src/main/scala/sbt/internal/librarymanagement/Ivy.scala @@ -0,0 +1,1136 @@ +/* sbt -- Simple Build Tool + * Copyright 2008, 2009, 2010 Mark Harrah + */ +package sbt.internal.librarymanagement + +import java.io.File +import java.net.URI +import java.util.concurrent.Callable + +import org.apache.ivy.Ivy +import org.apache.ivy.core.IvyPatternHelper +import org.apache.ivy.core.cache.{ CacheMetadataOptions, DefaultRepositoryCacheManager } +import org.apache.ivy.core.event.EventManager +import org.apache.ivy.core.module.descriptor.{ + DefaultArtifact, + DefaultDependencyArtifactDescriptor, + MDArtifact, + Artifact => IArtifact +} +import org.apache.ivy.core.module.descriptor.{ + DefaultDependencyDescriptor, + DefaultModuleDescriptor, + DependencyDescriptor, + License, + ModuleDescriptor +} +import org.apache.ivy.core.module.descriptor.OverrideDependencyDescriptorMediator +import org.apache.ivy.core.module.id.{ ModuleId, ModuleRevisionId } +import org.apache.ivy.core.resolve._ +import org.apache.ivy.core.settings.IvySettings +import org.apache.ivy.core.sort.SortEngine +import org.apache.ivy.plugins.matcher.PatternMatcher +import org.apache.ivy.plugins.resolver.DependencyResolver +import org.apache.ivy.util.{ Message, MessageLogger } +import org.apache.ivy.util.extendable.ExtendableItem +import org.apache.ivy.util.url._ +import scala.xml.NodeSeq +import scala.collection.mutable +import scala.collection.immutable.ArraySeq +import scala.util.{ Success, Failure } +import sbt.util._ +import sbt.librarymanagement.{ ModuleDescriptorConfiguration => InlineConfiguration, _ } +import sbt.librarymanagement.Platform +import sbt.librarymanagement.ivy._ +import sbt.librarymanagement.syntax._ + +import IvyInternalDefaults._ +import Resolver.PluginPattern +import ivyint.{ + CachedResolutionResolveCache, + CachedResolutionResolveEngine, + ParallelResolveEngine, + SbtDefaultDependencyDescriptor, +} +import sjsonnew.JsonFormat +import sjsonnew.support.murmurhash.Hasher +import sbt.librarymanagement.ModuleSettings + +final class IvySbt( + val configuration: IvyConfiguration, +) { self => + /* + * ========== Configuration/Setup ============ + * This part configures the Ivy instance by first creating the logger interface to ivy, then IvySettings, and then the Ivy instance. + * These are lazy so that they are loaded within the right context. This is important so that no Ivy XML configuration needs to be loaded, + * saving some time. This is necessary because Ivy has global state (IvyContext, Message, DocumentBuilder, ...). + */ + + private def withDefaultLogger[T](logger: MessageLogger)(f: => T): T = { + def action() = + IvySbt.synchronized { + val originalLogger = Message.getDefaultLogger + Message.setDefaultLogger(logger) + try { + f + } finally { + Message.setDefaultLogger(originalLogger) + } + } + // Ivy is not thread-safe nor can the cache be used concurrently. + // If provided a GlobalLock, we can use that to ensure safe access to the cache. + // Otherwise, we can at least synchronize within the JVM. + // For thread-safety in particular, Ivy uses a static DocumentBuilder, which is not thread-safe. + configuration.lock match { + case Some(lock) => lock(ivyLockFile, new Callable[T] { def call = action() }) + case None => action() + } + } + + private lazy val basicUrlHandler: URLHandler = new BasicURLHandler + + private lazy val settings: IvySettings = { + val dispatcher: URLHandlerDispatcher = URLHandlerRegistry.getDefault match { + // If the default is already a URLHandlerDispatcher then just use that + case disp: URLHandlerDispatcher => disp + + // Otherwise wrap the existing URLHandler in a URLHandlerDispatcher + // while retaining the existing URLHandler as the default. + case default => + val disp: URLHandlerDispatcher = new URLHandlerDispatcher() + disp.setDefault(default) + URLHandlerRegistry.setDefault(disp) + disp + } + + // Ignore configuration.updateOptions.gigahorse due to sbt/sbt#6912 + val urlHandler: URLHandler = basicUrlHandler + + // Only set the urlHandler for the http/https protocols so we do not conflict with any other plugins + // that might register other protocol handlers. + // For example https://github.com/frugalmechanic/fm-sbt-s3-resolver registers "s3" + dispatcher.setDownloader("http", urlHandler) + dispatcher.setDownloader("https", urlHandler) + + val is = new IvySettings + is.setCircularDependencyStrategy( + configuration.updateOptions.circularDependencyLevel.ivyStrategy + ) + CustomPomParser.registerDefault + val log = getLog(configuration.log) + + configuration match { + case e: ExternalIvyConfiguration => + val baseDirectory = getBaseDirectory(e.baseDirectory) + is.setBaseDir(baseDirectory) + IvySbt.addResolvers(e.extraResolvers, is, log) + IvySbt.loadURI(is, e.uri.getOrElse(sys.error("uri must be specified!"))) + case i: InlineIvyConfiguration => + val paths = getIvyPaths(i.paths) + is.setBaseDir(new File(paths.baseDirectory)) + is.setVariable("ivy.checksums", i.checksums mkString ",") + is.setVariable(ConvertResolver.ManagedChecksums, i.managedChecksums.toString) + paths.ivyHome.foreach { (h) => is.setDefaultIvyUserDir(new File(h)) } + IvySbt.configureCache(is, i.resolutionCacheDir) + IvySbt.setResolvers(is, i.resolvers, i.otherResolvers, configuration.updateOptions, log) + IvySbt.setModuleConfigurations(is, i.moduleConfigurations, log) + } + is + } + + /** + * Defines a parallel [[CachedResolutionResolveEngine]]. + * + * This is defined here because it needs access to [[mkIvy]]. + */ + private class ParallelCachedResolutionResolveEngine( + settings: IvySettings, + eventManager: EventManager, + sortEngine: SortEngine + ) extends ParallelResolveEngine(settings, eventManager, sortEngine) + with CachedResolutionResolveEngine { + def makeInstance: Ivy = mkIvy + val cachedResolutionResolveCache: CachedResolutionResolveCache = + IvySbt.cachedResolutionResolveCache + val projectResolver: Option[ProjectResolver] = { + val res = settings.getResolver(ProjectResolver.InterProject) + Option(res.asInstanceOf[ProjectResolver]) + } + } + + /** + * Provides a default ivy implementation that decides which resolution + * engine to use depending on the passed ivy configuration options. + */ + private class IvyImplementation extends Ivy { + private val loggerEngine = new SbtMessageLoggerEngine + override def getLoggerEngine: SbtMessageLoggerEngine = loggerEngine + override def bind(): Unit = { + val settings = getSettings + val eventManager = new EventManager() + val sortEngine = new SortEngine(settings) + + // We inject the deps we need before we can hook our resolve engine. + setSortEngine(sortEngine) + setEventManager(eventManager) + + val resolveEngine = { + // Decide to use cached resolution if user enabled it + if (configuration.updateOptions.cachedResolution) + new ParallelCachedResolutionResolveEngine(settings, eventManager, sortEngine) + else new ParallelResolveEngine(settings, eventManager, sortEngine) + } + + setResolveEngine(resolveEngine) + super.bind() + } + } + + private[sbt] def mkIvy: Ivy = { + val ivy = new IvyImplementation() + ivy.setSettings(settings) + ivy.bind() + val logger = new IvyLoggerInterface(getLog(configuration.log)) + ivy.getLoggerEngine.pushLogger(logger) + ivy + } + + private lazy val ivy: Ivy = mkIvy + // Must be the same file as is used in Update in the launcher + private lazy val ivyLockFile = new File(settings.getDefaultIvyUserDir, ".sbt.ivy.lock") + + // ========== End Configuration/Setup ============ + + /** Uses the configured Ivy instance within a safe context. */ + def withIvy[T](log: Logger)(f: Ivy => T): T = + withIvy(new IvyLoggerInterface(log))(f) + + def withIvy[T](log: MessageLogger)(f: Ivy => T): T = + withDefaultLogger(log) { + // See #429 - We always insert a helper authenticator here which lets us get more useful authentication errors. + ivyint.ErrorMessageAuthenticator.install() + ivy.pushContext() + ivy.getLoggerEngine.pushLogger(log) + try { + f(ivy) + } finally { + ivy.getLoggerEngine.popLogger() + ivy.popContext() + } + } + + /** Cleans cached resolution cache. */ + private[sbt] def cleanCachedResolutionCache(): Unit = { + if (!configuration.updateOptions.cachedResolution) () + else IvySbt.cachedResolutionResolveCache.clean() + } + + /** + * In the new POM format of sbt plugins, we append the sbt-cross version _2.12_1.0 to + * the module artifactId, and the artifactIds of its dependencies that are sbt plugins. + * + * The goal is to produce a valid Maven POM, a POM that Maven can resolve: + * Maven will try and succeed to resolve the POM of pattern: + * /_2.12_1.0//_2.12_1.0-.pom + */ + final class Module(rawModuleSettings: ModuleSettings, appendSbtCrossVersion: Boolean) + extends sbt.librarymanagement.ModuleDescriptor { self => + + def this(rawModuleSettings: ModuleSettings) = + this(rawModuleSettings, appendSbtCrossVersion = false) + + val moduleSettings: ModuleSettings = + rawModuleSettings match { + case ic: InlineConfiguration => + val icWithCross: ModuleSettings = IvySbt.substituteCross(ic) + if appendSbtCrossVersion then IvySbt.appendSbtCrossVersion(icWithCross) + else icWithCross + case m => m + } + + def directDependencies: Vector[ModuleID] = + moduleSettings match { + case x: InlineConfiguration => x.dependencies + case _ => Vector() + } + + def configurations = + moduleSettings match { + case ic: InlineConfiguration => ic.configurations + case _: PomConfiguration => Configurations.default ++ Configurations.defaultInternal + case _: IvyFileConfiguration => + Configurations.default ++ Configurations.defaultInternal + } + + def scalaModuleInfo: Option[ScalaModuleInfo] = moduleSettings.scalaModuleInfo + + def owner = IvySbt.this + def withModule[T](log: Logger)(f: (Ivy, DefaultModuleDescriptor, String) => T): T = + withIvy[T](log) { ivy => + f(ivy, moduleDescriptor0, defaultConfig0) + } + + def moduleDescriptor(log: Logger): DefaultModuleDescriptor = withModule(log)((_, md, _) => md) + def dependencyMapping(log: Logger): (ModuleRevisionId, ModuleDescriptor) = { + val md = moduleDescriptor(log) + (md.getModuleRevisionId, md) + } + def defaultConfig(log: Logger): String = withModule(log)((_, _, dc) => dc) + // these should only be referenced by withModule because lazy vals synchronize on this object + // withIvy explicitly locks the IvySbt object, so they have to be done in the right order to avoid deadlock + private[this] lazy val (moduleDescriptor0: DefaultModuleDescriptor, defaultConfig0: String) = { + val (baseModule, baseConfiguration) = + moduleSettings match { + case ic: InlineConfiguration => configureInline(ic, getLog(configuration.log)) + case pc: PomConfiguration => configurePom(pc) + case ifc: IvyFileConfiguration => configureIvyFile(ifc) + } + + val configs = configurations + moduleSettings.scalaModuleInfo foreach { is => + val svc = configs filter Configurations.underScalaVersion map { _.name } + IvyScalaUtil.checkModule(baseModule, svc, getLog(configuration.log))(is) + } + IvySbt.addExtraNamespace(baseModule) + (baseModule, baseConfiguration) + } + private def configureInline(ic: InlineConfiguration, log: Logger) = { + import ic._ + val moduleID = newConfiguredModuleID(module, moduleInfo, ic.configurations) + IvySbt.setConflictManager(moduleID, conflictManager, ivy.getSettings) + val defaultConf = defaultConfiguration getOrElse Configuration.of( + "Default", + ModuleDescriptor.DEFAULT_CONFIGURATION + ) + log.debug( + s"Using inline dependencies specified in Scala${(if (ivyXML.isEmpty) "" else " and XML")}." + ) + + val parser = IvySbt.parseIvyXML( + ivy.getSettings, + IvySbt.wrapped(module, ivyXML), + moduleID, + defaultConf.name, + ic.validate + ) + IvySbt.addMainArtifact(moduleID) + IvySbt.addOverrides(moduleID, overrides, ivy.getSettings.getMatcher(PatternMatcher.EXACT)) + IvySbt.addExcludes(moduleID, excludes, ic.scalaModuleInfo) + val transformedDeps = IvySbt.overrideDirect(dependencies, overrides) + IvySbt.addDependencies(moduleID, transformedDeps, parser) + (moduleID, parser.getDefaultConf) + } + private def newConfiguredModuleID( + module: ModuleID, + moduleInfo: ModuleInfo, + configurations: Iterable[Configuration] + ) = { + val mod = new DefaultModuleDescriptor(IvySbt.toID(module), "release", null, false) + mod.setLastModified(System.currentTimeMillis) + mod.setDescription(moduleInfo.description) + moduleInfo.homepage foreach { h => + mod.setHomePage(h.toString) + } + moduleInfo.licenses foreach { l => + mod.addLicense(new License(l._1, l._2.toString)) + } + IvySbt.addConfigurations(mod, configurations) + IvySbt.addArtifacts(mod, module.explicitArtifacts) + mod + } + + /** Parses the Maven pom 'pomFile' from the given `PomConfiguration`. */ + private def configurePom(pc: PomConfiguration) = { + val md = CustomPomParser.default.parseDescriptor(settings, toURL(pc.file), pc.validate) + val dmd = IvySbt.toDefaultModuleDescriptor(md) + IvySbt.addConfigurations(dmd, Configurations.defaultInternal) + val defaultConf = Configurations.DefaultMavenConfiguration.name + for (is <- pc.scalaModuleInfo) if (pc.autoScalaTools) { + val confParser = new CustomXmlParser.CustomParser(settings, Some(defaultConf)) + confParser.setMd(dmd) + addScalaToolDependencies(dmd, confParser, is) + } + (dmd, defaultConf) + } + + /** Parses the Ivy file 'ivyFile' from the given `IvyFileConfiguration`. */ + private def configureIvyFile(ifc: IvyFileConfiguration) = { + val parser = new CustomXmlParser.CustomParser(settings, None) + parser.setValidate(ifc.validate) + parser.setSource(toURL(ifc.file)) + parser.parse() + val dmd = IvySbt.toDefaultModuleDescriptor(parser.getModuleDescriptor()) + for (is <- ifc.scalaModuleInfo) + if (ifc.autoScalaTools) + addScalaToolDependencies(dmd, parser, is) + (dmd, parser.getDefaultConf) + } + private def addScalaToolDependencies( + dmd: DefaultModuleDescriptor, + parser: CustomXmlParser.CustomParser, + is: ScalaModuleInfo + ): Unit = { + IvySbt.addConfigurations(dmd, Configurations.ScalaTool :: Nil) + IvySbt.addDependencies( + dmd, + ScalaArtifacts.toolDependencies(is.scalaOrganization, is.scalaFullVersion), + parser + ) + } + private def toURL(file: File) = file.toURI.toURL + + // Todo: We just need writing side of this codec. We can clean up the reads. + private[sbt] object AltLibraryManagementCodec extends IvyLibraryManagementCodec { + import sbt.io.Hash + type InlineIvyHL = ( + Option[IvyPaths], + Vector[Resolver], + Vector[Resolver], + Vector[ModuleConfiguration], + Vector[String], + Boolean + ) + def inlineIvyToHL(i: InlineIvyConfiguration): InlineIvyHL = + ( + i.paths, + i.resolvers, + i.otherResolvers, + i.moduleConfigurations, + i.checksums, + i.managedChecksums + ) + + type ExternalIvyHL = (Option[PlainFileInfo], Array[Byte]) + def externalIvyToHL(e: ExternalIvyConfiguration): ExternalIvyHL = + ( + e.baseDirectory.map(FileInfo.exists.apply), + e.uri.map(Hash.contentsIfLocal).getOrElse(Array.empty) + ) + + // Redefine to use a subset of properties, that are serialisable + override implicit lazy val InlineIvyConfigurationFormat + : JsonFormat[InlineIvyConfiguration] = { + def hlToInlineIvy(i: InlineIvyHL): InlineIvyConfiguration = { + val ( + paths, + resolvers, + otherResolvers, + moduleConfigurations, + checksums, + managedChecksums + ) = i + InlineIvyConfiguration() + .withPaths(paths) + .withResolvers(resolvers) + .withOtherResolvers(otherResolvers) + .withModuleConfigurations(moduleConfigurations) + .withManagedChecksums(managedChecksums) + .withChecksums(checksums) + } + projectFormat[InlineIvyConfiguration, InlineIvyHL](inlineIvyToHL, hlToInlineIvy) + } + + // Redefine to use a subset of properties, that are serialisable + override implicit lazy val ExternalIvyConfigurationFormat + : JsonFormat[ExternalIvyConfiguration] = { + def hlToExternalIvy(e: ExternalIvyHL): ExternalIvyConfiguration = { + val (baseDirectory, _) = e + ExternalIvyConfiguration( + None, + Some(NullLogger), + UpdateOptions(), + baseDirectory.map(_.file), + None /* the original uri is destroyed.. */, + Vector.empty + ) + } + projectFormat[ExternalIvyConfiguration, ExternalIvyHL](externalIvyToHL, hlToExternalIvy) + } + + // Redefine to switch to unionFormat + override implicit lazy val IvyConfigurationFormat: JsonFormat[IvyConfiguration] = + unionFormat2[IvyConfiguration, InlineIvyConfiguration, ExternalIvyConfiguration] + + object NullLogger extends sbt.internal.util.BasicLogger { + override def control(event: sbt.util.ControlEvent.Value, message: => String): Unit = () + override def log(level: Level.Value, message: => String): Unit = () + override def logAll(events: Seq[sbt.util.LogEvent]): Unit = () + override def success(message: => String): Unit = () + override def trace(t: => Throwable): Unit = () + } + } + + def extraInputHash: Long = { + import AltLibraryManagementCodec._ + Hasher.hash(owner.configuration) match { + case Success(keyHash) => keyHash.toLong + case Failure(_) => 0L + } + } + } +} + +private[sbt] object IvySbt { + val DefaultIvyConfigFilename = "ivysettings.xml" + val DefaultIvyFilename = "ivy.xml" + val DefaultMavenFilename = "pom.xml" + val DefaultChecksums = IvyDefaults.defaultChecksums + private[sbt] def cachedResolutionResolveCache: CachedResolutionResolveCache = + new CachedResolutionResolveCache + + def defaultIvyFile(project: File) = new File(project, DefaultIvyFilename) + def defaultIvyConfiguration(project: File) = new File(project, DefaultIvyConfigFilename) + def defaultPOM(project: File) = new File(project, DefaultMavenFilename) + + def loadURI(is: IvySettings, uri: URI): Unit = { + if (uri.getScheme == "file") + is.load(new File(uri)) // IVY-1114 + else + is.load(uri.toURL) + } + + /** + * Sets the resolvers for 'settings' to 'resolvers'. This is done by creating a new chain and making it the default. + * 'other' is for resolvers that should be in a different chain. These are typically used for publishing or other actions. + */ + private def setResolvers( + settings: IvySettings, + resolvers: Seq[Resolver], + other: Seq[Resolver], + updateOptions: UpdateOptions, + log: Logger + ): Unit = { + def makeChain(label: String, name: String, rs: Seq[Resolver]) = { + log.debug(label + " repositories:") + val chain = resolverChain(name, rs, settings, updateOptions, log) + settings.addResolver(chain) + chain + } + makeChain("Other", "sbt-other", other) + val mainChain = makeChain("Default", "sbt-chain", resolvers) + settings.setDefaultResolver(mainChain.getName) + } + + // TODO: Expose the changing semantics to the caller so that users can specify a regex + private[sbt] def isChanging(dd: DependencyDescriptor): Boolean = + dd.isChanging || isChanging(dd.getDependencyRevisionId) + private[sbt] def isChanging(module: ModuleID): Boolean = + module.revision endsWith "-SNAPSHOT" + private[sbt] def isChanging(mrid: ModuleRevisionId): Boolean = + mrid.getRevision endsWith "-SNAPSHOT" + + def resolverChain( + name: String, + resolvers: Seq[Resolver], + settings: IvySettings, + log: Logger + ): DependencyResolver = resolverChain(name, resolvers, settings, UpdateOptions(), log) + + def resolverChain( + name: String, + resolvers: Seq[Resolver], + settings: IvySettings, + updateOptions: UpdateOptions, + log: Logger + ): DependencyResolver = { + val ivyResolvers = resolvers.map(r => ConvertResolver(r, settings, updateOptions, log)) + val (projectResolvers, rest) = + ivyResolvers.partition(_.getName == ProjectResolver.InterProject) + if (projectResolvers.isEmpty) ivyint.SbtChainResolver(name, rest, settings, updateOptions, log) + else { + // Force that we always look at the project resolver first by wrapping the chain resolver + val delegatedName = s"$name-delegate" + val delegate = ivyint.SbtChainResolver(delegatedName, rest, settings, updateOptions, log) + val initialResolvers = projectResolvers :+ delegate + val freshOptions = UpdateOptions() + .withLatestSnapshots(false) + .withModuleResolvers(updateOptions.moduleResolvers) + ivyint.SbtChainResolver(name, initialResolvers, settings, freshOptions, log) + } + } + + def addResolvers(resolvers: Seq[Resolver], settings: IvySettings, log: Logger): Unit = { + for (r <- resolvers) { + log.debug("\t" + r) + settings.addResolver(ConvertResolver(r, settings, UpdateOptions(), log)) + } + } + + /** + * A hack to detect if the given artifact is an automatically generated request for a classifier, + * as opposed to a user-initiated declaration. It relies on Ivy prefixing classifier with m:, while sbt uses e:. + * Clearly, it would be better to have an explicit option in Ivy to control this. + */ + def hasImplicitClassifier(artifact: IArtifact): Boolean = { + import scala.jdk.CollectionConverters._ + artifact.getQualifiedExtraAttributes.asScala.keys + .exists(_.asInstanceOf[String] startsWith "m:") + } + private def setModuleConfigurations( + settings: IvySettings, + moduleConfigurations: Seq[ModuleConfiguration], + log: Logger + ): Unit = { + val existing = settings.getResolverNames + for (moduleConf <- moduleConfigurations) { + import moduleConf._ + import IvyPatternHelper._ + import PatternMatcher._ + if (!existing.contains(resolver.name)) + settings.addResolver(ConvertResolver(resolver, settings, UpdateOptions(), log)) + val attributes = javaMap( + Map(MODULE_KEY -> name, ORGANISATION_KEY -> organization, REVISION_KEY -> revision) + ) + settings.addModuleConfiguration( + attributes, + settings.getMatcher(EXACT_OR_REGEXP), + resolver.name, + null, + null, + null + ) + } + } + + private def configureCache(settings: IvySettings, resCacheDir: Option[File]): Unit = { + configureResolutionCache(settings, resCacheDir) + configureRepositoryCache(settings) + } + private[this] def configureResolutionCache(settings: IvySettings, resCacheDir: Option[File]) = { + val base = resCacheDir getOrElse settings.getDefaultResolutionCacheBasedir + settings.setResolutionCacheManager(new ResolutionCache(base, settings)) + } + // set the artifact resolver to be the main resolver. + // this is because sometimes the artifact resolver saved in the cache is not correct + // the common case is for resolved.getArtifactResolver to be inter-project from a different project's publish-local + // if there are problems with this, a less aggressive fix might be to only reset the artifact resolver when it is a ProjectResolver + // a possible problem is that fetching artifacts is slower, due to the full chain being the artifact resolver instead of the specific resolver + // This also fixes #760, which occurs when metadata exists in a repository, but the artifact doesn't. + private[sbt] def resetArtifactResolver( + resolved: ResolvedModuleRevision + ): ResolvedModuleRevision = + if (resolved eq null) null + else { + val desc = resolved.getDescriptor + val updatedDescriptor = CustomPomParser.defaultTransform(desc.getParser, desc) + new ResolvedModuleRevision( + resolved.getResolver, + resolved.getResolver, + updatedDescriptor, + resolved.getReport, + resolved.isForce + ) + } + + private[this] def configureRepositoryCache(settings: IvySettings): Unit = { + val cacheDir = settings.getDefaultRepositoryCacheBasedir() + val manager = new DefaultRepositoryCacheManager("default-cache", settings, cacheDir) { + override def findModuleInCache( + dd: DependencyDescriptor, + revId: ModuleRevisionId, + options: CacheMetadataOptions, + r: String + ) = { + // ignore and reset the resolver- not ideal, but avoids thrashing. + val resolved = resetArtifactResolver(super.findModuleInCache(dd, revId, options, null)) + // invalidate the cache if the artifact was removed from the local repository + if (resolved == null) null + else if (isProjectResolver(resolved.getResolver)) { + resolved.getReport.getLocalFile.delete() + null + } else { + val origin = resolved.getReport.getArtifactOrigin + if (!origin.isLocal) resolved + else { + val file = new File(origin.getLocation) + if (file == null || file.exists) resolved + else { + resolved.getReport.getLocalFile.delete() + null + } + } + } + } + private[this] def isProjectResolver(r: DependencyResolver): Boolean = r match { + case _: ProjectResolver => true + case _ => false + } + // ignore the original resolver wherever possible to avoid issues like #704 + override def saveResolvers( + descriptor: ModuleDescriptor, + metadataResolverName: String, + artifactResolverName: String + ): Unit = () + } + manager.setArtifactPattern(PluginPattern + manager.getArtifactPattern) + manager.setDataFilePattern(PluginPattern + manager.getDataFilePattern) + manager.setIvyPattern(PluginPattern + manager.getIvyPattern) + manager.setUseOrigin(true) + manager.setChangingMatcher(PatternMatcher.REGEXP) + manager.setChangingPattern(".*-SNAPSHOT") + settings.addRepositoryCacheManager(manager) + settings.setDefaultRepositoryCacheManager(manager) + } + def toIvyConfiguration(configuration: Configuration) = { + import org.apache.ivy.core.module.descriptor.{ Configuration => IvyConfig } + import IvyConfig.Visibility._ + import configuration._ + new IvyConfig( + name, + if (isPublic) PUBLIC else PRIVATE, + description, + extendsConfigs.map(_.name).toArray, + transitive, + null + ) + } + def addExtraNamespace(dmd: DefaultModuleDescriptor): Unit = + dmd.addExtraAttributeNamespace("e", "http://ant.apache.org/ivy/extra") + + /** Adds the ivy.xml main artifact. */ + private def addMainArtifact(moduleID: DefaultModuleDescriptor): Unit = { + val artifact = DefaultArtifact.newIvyArtifact( + moduleID.getResolvedModuleRevisionId, + moduleID.getPublicationDate + ) + moduleID.setModuleArtifact(artifact) + moduleID.check() + } + private def setConflictManager( + moduleID: DefaultModuleDescriptor, + conflict: ConflictManager, + is: IvySettings + ): Unit = { + val mid = ModuleId.newInstance(conflict.organization, conflict.module) + val matcher = is.getMatcher(PatternMatcher.EXACT_OR_REGEXP) + val manager = is.getConflictManager(conflict.name) + moduleID.addConflictManager(mid, matcher, manager) + } + + /** Converts the given sbt module id into an Ivy ModuleRevisionId. */ + def toID(m: ModuleID) = { + import m._ + ModuleRevisionId.newInstance( + organization, + name, + branchName.orNull, + revision, + javaMap(extraAttributes) + ) + } + + private def substituteCross(m: ModuleSettings): ModuleSettings = + m.scalaModuleInfo match { + case None => m + case Some(is) => substituteCross(m, is.scalaFullVersion, is.scalaBinaryVersion, is.platform) + } + + private def substituteCross( + m: ModuleSettings, + scalaFullVersion: String, + scalaBinaryVersion: String, + platform: Option[String] + ): ModuleSettings = { + m match + case ic: InlineConfiguration => + val applyPlatform: ModuleID => ModuleID = substitutePlatform(platform) + val applyCross = CrossVersion(scalaFullVersion, scalaBinaryVersion) + val transform: ModuleID => ModuleID = (m: ModuleID) => applyCross(applyPlatform(m)) + def propagateCrossVersion(moduleID: ModuleID): ModuleID = { + val crossExclusions: Vector[ExclusionRule] = + moduleID.exclusions.map(CrossVersion.substituteCross(_, ic.scalaModuleInfo)) + transform(moduleID) + .withExclusions(crossExclusions) + } + ic.withModule(transform(ic.module)) + .withDependencies(ic.dependencies.map(propagateCrossVersion)) + .withOverrides(ic.overrides map transform) + case m => m + } + + private def substitutePlatform(platform: Option[String]): ModuleID => ModuleID = { + def addSuffix(m: ModuleID, platformName: String): ModuleID = + platformName match + case "" | Platform.jvm => m + case _ => m.withName(s"${m.name}_$platformName") + (m: ModuleID) => + m.crossVersion match + case _: Disabled => m + case _ => + (platform, m.platformOpt) match + case (Some(p), None) => addSuffix(m, p) + case (_, Some(p)) => addSuffix(m, p) + case _ => m + } + + private def appendSbtCrossVersion(m: ModuleSettings): ModuleSettings = + m match + case ic: InlineConfiguration => + ic.withModule(appendSbtCrossVersion(ic.module)) + .withDependencies(ic.dependencies.map(appendSbtCrossVersion)) + .withOverrides(ic.overrides.map(appendSbtCrossVersion)) + case m => m + + private def appendSbtCrossVersion(mid: ModuleID): ModuleID = { + val crossVersion = for { + scalaVersion <- mid.extraAttributes.get("e:scalaVersion") + sbtVersion <- mid.extraAttributes.get("e:sbtVersion") + } yield s"_${scalaVersion}_$sbtVersion" + crossVersion + .filter(!mid.name.endsWith(_)) + .map(cv => mid.withName(mid.name + cv)) + .getOrElse(mid) + } + + private def toIvyArtifact( + moduleID: ModuleDescriptor, + a: Artifact, + allConfigurations: Vector[ConfigRef] + ): MDArtifact = { + val artifact = new MDArtifact(moduleID, a.name, a.`type`, a.extension, null, extra(a, false)) + copyConfigurations( + a, + (ref: ConfigRef) => { artifact.addConfiguration(ref.name) }, + allConfigurations + ) + artifact + } + def getExtraAttributes(revID: ExtendableItem): Map[String, String] = { + import scala.jdk.CollectionConverters._ + revID.getExtraAttributes.asInstanceOf[java.util.Map[String, String]].asScala.toMap + } + private[sbt] def extra( + artifact: Artifact, + unqualify: Boolean = false + ): java.util.Map[String, String] = { + val ea = artifact.classifier match { + case Some(c) => artifact.extra("e:classifier" -> c); case None => artifact + } + javaMap(ea.extraAttributes, unqualify) + } + private[sbt] def javaMap(m: Map[String, String], unqualify: Boolean = false) = { + import scala.jdk.CollectionConverters._ + val map = if (unqualify) m map { case (k, v) => (k.stripPrefix("e:"), v) } + else m + if (map.isEmpty) null else map.asJava + } + + /** Creates a full ivy file for 'module' using the 'dependencies' XML as the part after the <info>...</info> section. */ + private def wrapped(module: ModuleID, dependencies: NodeSeq) = { + + { + if (hasInfo(module, dependencies)) + NodeSeq.Empty + else + addExtraAttributes(defaultInfo(module), module.extraAttributes) + } + {dependencies} + { + // this is because Ivy adds a default artifact if none are specified. + if ((dependencies \\ "publications").isEmpty) else NodeSeq.Empty + } + + } + private[this] def defaultInfo(module: ModuleID): scala.xml.Elem = { + import module._ + val base = + branchName.fold(base) { br => + base % new scala.xml.UnprefixedAttribute("branch", br, scala.xml.Null) + } + } + private[this] def addExtraAttributes( + elem: scala.xml.Elem, + extra: Map[String, String] + ): scala.xml.Elem = + extra.foldLeft(elem) { case (e, (key, value)) => + e % new scala.xml.UnprefixedAttribute(key, value, scala.xml.Null) + } + private def hasInfo(module: ModuleID, x: scala.xml.NodeSeq) = { + val info = {x} \ "info" + if (info.nonEmpty) { + def check(found: NodeSeq, expected: String, label: String) = + if (found.isEmpty) sys.error("Missing " + label + " in inline Ivy XML.") + else { + val str = found.text + if (str != expected) + sys.error( + "Inconsistent " + label + " in inline Ivy XML. Expected '" + expected + "', got '" + str + "'" + ) + } + check(info \ "@organisation", module.organization, "organisation") + check(info \ "@module", module.name, "name") + check(info \ "@revision", module.revision, "version") + } + info.nonEmpty + } + + /** Parses the given in-memory Ivy file 'xml', using the existing 'moduleID' and specifying the given 'defaultConfiguration'. */ + private def parseIvyXML( + settings: IvySettings, + xml: scala.xml.NodeSeq, + moduleID: DefaultModuleDescriptor, + defaultConfiguration: String, + validate: Boolean + ): CustomXmlParser.CustomParser = + parseIvyXML(settings, xml.toString, moduleID, defaultConfiguration, validate) + + /** Parses the given in-memory Ivy file 'xml', using the existing 'moduleID' and specifying the given 'defaultConfiguration'. */ + private def parseIvyXML( + settings: IvySettings, + xml: String, + moduleID: DefaultModuleDescriptor, + defaultConfiguration: String, + validate: Boolean + ): CustomXmlParser.CustomParser = { + val parser = new CustomXmlParser.CustomParser(settings, Some(defaultConfiguration)) + parser.setMd(moduleID) + parser.setValidate(validate) + parser.setInput(xml.getBytes) + parser.parse() + parser + } + + def inconsistentDuplicateWarning(moduleID: DefaultModuleDescriptor): List[String] = { + import IvyRetrieve.toModuleID + val dds = ArraySeq.unsafeWrapArray(moduleID.getDependencies) + val deps = dds flatMap { dd => + val module = toModuleID(dd.getDependencyRevisionId) + dd.getModuleConfigurations map (c => module.withConfigurations(Some(c))) + } + inconsistentDuplicateWarning(deps) + } + + def inconsistentDuplicateWarning(dependencies: Seq[ModuleID]): List[String] = { + val warningHeader = + "Multiple dependencies with the same organization/name but different versions. To avoid conflict, pick one version:" + val out: mutable.ListBuffer[String] = mutable.ListBuffer() + (dependencies groupBy { dep => + (dep.organization, dep.name, dep.configurations) + }) foreach { + case (_, vs) if vs.size > 1 => + val v0 = vs.head + (vs find { _.revision != v0.revision }) foreach { _ => + out += s" * ${v0.organization}:${v0.name}:(" + (vs map { _.revision }) + .mkString(", ") + ")" + } + case _ => () + } + if (out.isEmpty) Nil + else warningHeader :: out.toList + } + + /** This method is used to add inline dependencies to the provided module. */ + def addDependencies( + moduleID: DefaultModuleDescriptor, + dependencies: Seq[ModuleID], + parser: CustomXmlParser.CustomParser + ): Unit = { + val converted = dependencies map { dependency => + convertDependency(moduleID, dependency, parser) + } + val unique = + if (hasDuplicateDependencies(converted)) mergeDuplicateDefinitions(converted) else converted + unique foreach moduleID.addDependency + } + + /** Determines if there are multiple dependency definitions for the same dependency ID. */ + def hasDuplicateDependencies(dependencies: Seq[DependencyDescriptor]): Boolean = { + val ids = dependencies.map(_.getDependencyRevisionId) + ids.toSet.size != ids.size + } + + /** + * Combines the artifacts, includes, and excludes of duplicate dependency definitions. + * This is somewhat fragile and is only intended to workaround Ivy (or sbt's use of Ivy) not handling this case properly. + * In particular, Ivy will create multiple dependency entries when converting a pom with a dependency on a classified artifact and a non-classified artifact: + * https://github.com/sbt/sbt/issues/468 + * It will also allow users to declare dependencies on classified modules in different configurations: + * https://groups.google.com/d/topic/simple-build-tool/H2MdAARz6e0/discussion + * as well as basic multi-classifier handling: #285, #419, #480. + * Multiple dependency definitions should otherwise be avoided as much as possible. + */ + def mergeDuplicateDefinitions( + dependencies: Seq[DependencyDescriptor] + ): Seq[DependencyDescriptor] = { + // need to preserve basic order of dependencies: can't use dependencies.groupBy + val deps = new java.util.LinkedHashMap[ModuleRevisionId, List[DependencyDescriptor]] + for (dd <- dependencies) { + val id = dd.getDependencyRevisionId + val updated = deps get id match { + case null => dd :: Nil + case v => dd :: v + } + deps.put(id, updated) + } + + import scala.jdk.CollectionConverters._ + deps.values.asScala.toSeq.flatMap { dds => + val mergeable = dds.lazyZip(dds.tail).forall(ivyint.MergeDescriptors.mergeable _) + if (mergeable) dds.reverse.reduceLeft(ivyint.MergeDescriptors.apply _) :: Nil else dds + } + } + + /** Transforms an sbt ModuleID into an Ivy DefaultDependencyDescriptor. */ + def convertDependency( + moduleID: DefaultModuleDescriptor, + dependency: ModuleID, + parser: CustomXmlParser.CustomParser + ): DefaultDependencyDescriptor = { + val dependencyDescriptor = new DefaultDependencyDescriptor( + moduleID, + toID(dependency), + dependency.isForce, + dependency.isChanging, + dependency.isTransitive + ) with SbtDefaultDependencyDescriptor { + def dependencyModuleId = dependency + } + dependency.configurations match { + case None => // The configuration for this dependency was not explicitly specified, so use the default + parser.parseDepsConfs(parser.getDefaultConf, dependencyDescriptor) + case Some( + confs + ) => // The configuration mapping (looks like: test->default) was specified for this dependency + parser.parseDepsConfs(confs, dependencyDescriptor) + } + for (artifact <- dependency.explicitArtifacts) { + import artifact.{ name, `type`, extension, url } + val extraMap = extra(artifact) + val ivyArtifact = new DefaultDependencyArtifactDescriptor( + dependencyDescriptor, + name, + `type`, + extension, + url.map(_.toURL).orNull, + extraMap + ) + copyConfigurations(artifact, (ref: ConfigRef) => { ivyArtifact.addConfiguration(ref.name) }) + for (conf <- dependencyDescriptor.getModuleConfigurations) + dependencyDescriptor.addDependencyArtifact(conf, ivyArtifact) + } + for (excls <- dependency.exclusions) { + for (conf <- dependencyDescriptor.getModuleConfigurations) { + dependencyDescriptor.addExcludeRule( + conf, + IvyScalaUtil.excludeRule( + excls.organization, + excls.name, + excls.configurations map { _.name }, + excls.artifact + ) + ) + } + } + for (incls <- dependency.inclusions) { + for (conf <- dependencyDescriptor.getModuleConfigurations) { + dependencyDescriptor.addIncludeRule( + conf, + IvyScalaUtil.includeRule( + incls.organization, + incls.name, + incls.configurations map { _.name }, + incls.artifact + ) + ) + } + } + + dependencyDescriptor + } + def copyConfigurations(artifact: Artifact, addConfiguration: ConfigRef => Unit): Unit = + copyConfigurations(artifact, addConfiguration, Vector(ConfigRef("*"))) + + private[this] def copyConfigurations( + artifact: Artifact, + addConfiguration: ConfigRef => Unit, + allConfigurations: Vector[ConfigRef] + ): Unit = { + val confs = + if (artifact.configurations.isEmpty) allConfigurations + else artifact.configurations + confs foreach addConfiguration + } + + def addExcludes( + moduleID: DefaultModuleDescriptor, + excludes: Seq[ExclusionRule], + scalaModuleInfo: Option[ScalaModuleInfo] + ): Unit = excludes.foreach(exclude => addExclude(moduleID, scalaModuleInfo)(exclude)) + + def addExclude(moduleID: DefaultModuleDescriptor, scalaModuleInfo: Option[ScalaModuleInfo])( + exclude0: ExclusionRule + ): Unit = { + // this adds _2.11 postfix + val exclude = CrossVersion.substituteCross(exclude0, scalaModuleInfo) + val confs = + if (exclude.configurations.isEmpty) moduleID.getConfigurationsNames.toList + else exclude.configurations map { _.name } + val excludeRule = + IvyScalaUtil.excludeRule(exclude.organization, exclude.name, confs, exclude.artifact) + moduleID.addExcludeRule(excludeRule) + } + + def addOverrides( + moduleID: DefaultModuleDescriptor, + overrides: Vector[ModuleID], + matcher: PatternMatcher + ): Unit = + overrides foreach addOverride(moduleID, matcher) + def addOverride(moduleID: DefaultModuleDescriptor, matcher: PatternMatcher)( + overrideDef: ModuleID + ): Unit = { + val overrideID = new ModuleId(overrideDef.organization, overrideDef.name) + val overrideWith = new OverrideDependencyDescriptorMediator(null, overrideDef.revision) + moduleID.addDependencyDescriptorMediator(overrideID, matcher, overrideWith) + } + + /** + * It is necessary to explicitly modify direct dependencies because Ivy gives + * "IllegalStateException: impossible to get artifacts when data has not been loaded." + * when a direct dependency is overridden with a newer version." + */ + def overrideDirect(dependencies: Seq[ModuleID], overrides: Vector[ModuleID]): Seq[ModuleID] = { + def key(id: ModuleID) = (id.organization, id.name) + val overridden = overrides.map(id => (key(id), id.revision)).toMap + dependencies map { dep => + overridden get key(dep) match { + case Some(rev) => dep.withRevision(rev) + case None => dep + } + } + } + + /** This method is used to add inline artifacts to the provided module. */ + def addArtifacts(moduleID: DefaultModuleDescriptor, artifacts: Iterable[Artifact]): Unit = + for (art <- mapArtifacts(moduleID, artifacts.toSeq); c <- art.getConfigurations) + moduleID.addArtifact(c, art) + + def addConfigurations( + mod: DefaultModuleDescriptor, + configurations: Iterable[Configuration] + ): Unit = + configurations.foreach(config => mod.addConfiguration(toIvyConfiguration(config))) + + def mapArtifacts(moduleID: ModuleDescriptor, artifacts: Seq[Artifact]): Seq[IArtifact] = { + lazy val allConfigurations = moduleID.getPublicConfigurationsNames.toVector map ConfigRef.apply + for (artifact <- artifacts) yield toIvyArtifact(moduleID, artifact, allConfigurations) + } + + /** + * This code converts the given ModuleDescriptor to a DefaultModuleDescriptor by casting or generating an error. + * Ivy 2.0.0 always produces a DefaultModuleDescriptor. + */ + private def toDefaultModuleDescriptor(md: ModuleDescriptor) = + md match { + case dmd: DefaultModuleDescriptor => dmd + case _ => sys.error("Unknown ModuleDescriptor type.") + } + def getConfigurations( + module: ModuleDescriptor, + configurations: Option[Iterable[Configuration]] + ) = + configurations match { + case Some(confs) => confs.map(_.name).toList.toArray + case None => module.getPublicConfigurationsNames + } +} diff --git a/lm-ivy/src/main/scala/sbt/internal/librarymanagement/IvyActions.scala b/lm-ivy/src/main/scala/sbt/internal/librarymanagement/IvyActions.scala new file mode 100644 index 000000000..a7a550bb6 --- /dev/null +++ b/lm-ivy/src/main/scala/sbt/internal/librarymanagement/IvyActions.scala @@ -0,0 +1,525 @@ +/* sbt -- Simple Build Tool + * Copyright 2008, 2009, 2010 Mark Harrah + */ +package sbt.internal.librarymanagement + +import java.io.File + +import ivyint.CachedResolutionResolveEngine +import org.apache.ivy.Ivy +import org.apache.ivy.core.{ IvyPatternHelper, LogOptions } +import org.apache.ivy.core.deliver.DeliverOptions +import org.apache.ivy.core.install.InstallOptions +import org.apache.ivy.core.module.descriptor.{ + DefaultModuleDescriptor, + MDArtifact, + ModuleDescriptor, + Artifact => IArtifact +} +import org.apache.ivy.core.resolve.ResolveOptions +import org.apache.ivy.plugins.resolver.{ BasicResolver, DependencyResolver } +import org.apache.ivy.util.filter.{ Filter => IvyFilter } +import sbt.io.{ IO, PathFinder } +import sbt.util.Logger +import sbt.librarymanagement.{ ModuleDescriptorConfiguration => InlineConfiguration, _ } +import syntax._ +import InternalDefaults._ +import UpdateClassifiersUtil._ +import sbt.internal.librarymanagement.IvyUtil.TransientNetworkException + +object IvyActions { + + /** Installs the dependencies of the given 'module' from the resolver named 'from' to the resolver named 'to'. */ + def install(module: IvySbt#Module, from: String, to: String, log: Logger): Unit = { + module.withModule(log) { (ivy, md, _) => + for (dependency <- md.getDependencies) { + log.info("Installing " + dependency) + val options = new InstallOptions + options.setValidate(module.moduleSettings.validate) + options.setTransitive(dependency.isTransitive) + ivy.install(dependency.getDependencyRevisionId, from, to, options) + } + } + } + + /** Clears the Ivy cache, as configured by 'config'. */ + def cleanCache(ivy: IvySbt, log: Logger) = ivy.withIvy(log) { iv => + iv.getSettings.getResolutionCacheManager.clean() + iv.getSettings.getRepositoryCacheManagers.foreach(_.clean()) + } + + /** + * Cleans the cached resolution cache, if any. + * This is called by clean. + */ + private[sbt] def cleanCachedResolutionCache(module: IvySbt#Module, log: Logger): Unit = + module.withModule(log) { (_, _, _) => + module.owner.cleanCachedResolutionCache() + } + + /** Creates a Maven pom from the given Ivy configuration */ + def makePomFile(module: IvySbt#Module, configuration: MakePomConfiguration, log: Logger): File = { + import configuration.{ + allRepositories, + configurations, + filterRepositories, + process, + includeTypes + } + val file = configuration.file.getOrElse(sys.error("file must be specified.")) + val moduleInfo = configuration.moduleInfo.getOrElse(sys.error("moduleInfo must be specified.")) + val extra = configuration.extra.getOrElse(scala.xml.NodeSeq.Empty) + module.withModule(log) { (ivy, md, _) => + (new MakePom(log)).write( + ivy, + md, + moduleInfo, + configurations, + includeTypes, + extra, + process, + filterRepositories, + allRepositories, + file + ) + log.info("Wrote " + file.getAbsolutePath) + file + } + } + + def deliver(module: IvySbt#Module, configuration: PublishConfiguration, log: Logger): File = { + val deliverIvyPattern = configuration.deliverIvyPattern + .getOrElse(sys.error("deliverIvyPattern must be specified.")) + val status = getDeliverStatus(configuration.status) + module.withModule(log) { case (ivy, md, _) => + val revID = md.getModuleRevisionId + val options = DeliverOptions.newInstance(ivy.getSettings).setStatus(status) + options.setConfs(getConfigurations(md, configuration.configurations)) + ivy.deliver(revID, revID.getRevision, deliverIvyPattern, options) + deliveredFile(ivy, deliverIvyPattern, md) + } + } + + def getConfigurations( + module: ModuleDescriptor, + configurations: Option[Vector[ConfigRef]] + ): Array[String] = + configurations match { + case Some(confs) => (confs map { _.name }).toArray + case None => module.getPublicConfigurationsNames + } + + def deliveredFile(ivy: Ivy, pattern: String, md: ModuleDescriptor): File = + ivy.getSettings.resolveFile( + IvyPatternHelper.substitute(pattern, md.getResolvedModuleRevisionId) + ) + + def publish(module: IvySbt#Module, configuration: PublishConfiguration, log: Logger): Unit = { + val resolverName = configuration.resolverName match { + case Some(x) => x + case _ => sys.error("Resolver name is not specified") + } + + // Todo. Fix publish ordering https://github.com/sbt/sbt/issues/2088#issuecomment-246208872 + val ivyFile: Option[File] = + if (configuration.publishMavenStyle) None + else { + Option(deliver(module, configuration, log)) + } + + val artifacts = Map(configuration.artifacts: _*) + val checksums = configuration.checksums + module.withModule(log) { case (ivy, md, _) => + val resolver = ivy.getSettings.getResolver(resolverName) + if (resolver eq null) sys.error("Undefined resolver '" + resolverName + "'") + val ivyArtifact = ivyFile map { file => + (MDArtifact.newIvyArtifact(md), file) + } + val cross = crossVersionMap(module.moduleSettings) + val as = mapArtifacts(md, cross, artifacts) ++ ivyArtifact.toList + withChecksums(resolver, checksums) { + publish(md, as, resolver, overwrite = configuration.overwrite) + } + } + } + private[this] def withChecksums[T](resolver: DependencyResolver, checksums: Vector[String])( + act: => T + ): T = + resolver match { case br: BasicResolver => withChecksums(br, checksums)(act); case _ => act } + private[this] def withChecksums[T](resolver: BasicResolver, checksums: Vector[String])( + act: => T + ): T = { + val previous = resolver.getChecksumAlgorithms + resolver.setChecksums(checksums mkString ",") + try { + act + } finally { + resolver.setChecksums(previous mkString ",") + } + } + private def crossVersionMap(moduleSettings: ModuleSettings): Option[String => String] = + moduleSettings match { + case i: InlineConfiguration => CrossVersion(i.module, i.scalaModuleInfo) + case _ => None + } + def mapArtifacts( + module: ModuleDescriptor, + cross: Option[String => String], + artifacts: Map[Artifact, File] + ): Vector[(IArtifact, File)] = { + val rawa = artifacts.keys.toVector + val seqa = CrossVersion.substituteCross(rawa, cross) + val zipped = rawa zip IvySbt.mapArtifacts(module, seqa) + zipped map { case (a, ivyA) => (ivyA, artifacts(a)) } + } + + /** + * Updates one module's dependencies performing a dependency resolution and retrieval. + * + * The following mechanism uses ivy under the hood. + * + * @param module The module to be resolved. + * @param configuration The update configuration. + * @param uwconfig The configuration to handle unresolved warnings. + * @param log The logger. + * @return The result, either an unresolved warning or an update report. Note that this + * update report will or will not be successful depending on the `missingOk` option. + */ + private[sbt] def updateEither( + module: IvySbt#Module, + configuration: UpdateConfiguration, + uwconfig: UnresolvedWarningConfiguration, + log: Logger + ): Either[UnresolvedWarning, UpdateReport] = { + module.withModule(log) { case (ivy, moduleDescriptor, _) => + // Warn about duplicated and inconsistent dependencies + val iw = IvySbt.inconsistentDuplicateWarning(moduleDescriptor) + iw.foreach(log.warn(_)) + + val metadataDirectory = configuration.metadataDirectory + + // Create inputs, resolve and retrieve the module descriptor + val inputs = ResolutionInputs(ivy, moduleDescriptor, configuration, log) + val resolutionResult: Either[ResolveException, UpdateReport] = { + if ( + module.owner.configuration.updateOptions.cachedResolution && metadataDirectory.isDefined + ) { + val cache = + metadataDirectory.getOrElse(sys.error("Missing directory for cached resolution.")) + cachedResolveAndRetrieve(inputs, cache) + } else resolveAndRetrieve(inputs) + } + + // Convert to unresolved warning or retrieve update report + resolutionResult.fold( + exception => Left(UnresolvedWarning(exception, uwconfig)), + ur0 => { + val ur = configuration.retrieveManaged match { + case Some(retrieveConf) => retrieve(log, ivy, ur0, retrieveConf) + case _ => ur0 + } + Right(ur) + } + ) + } + } + + def groupedConflicts[T](moduleFilter: ModuleFilter, grouping: ModuleID => T)( + report: UpdateReport + ): Map[T, Set[String]] = + report.configurations.flatMap { confReport => + val evicted = confReport.evicted.filter(moduleFilter) + val evictedSet = evicted.map(m => (m.organization, m.name)).toSet + val conflicted = + confReport.allModules.filter(mod => evictedSet((mod.organization, mod.name))) + grouped(grouping)(conflicted ++ evicted) + }.toMap + + def grouped[T](grouping: ModuleID => T)(mods: Seq[ModuleID]): Map[T, Set[String]] = + mods.groupBy(grouping).view.mapValues(_.map(_.revision).toSet).toMap + + def addExcluded( + report: UpdateReport, + classifiers: Vector[String], + exclude: Map[ModuleID, Set[String]] + ): UpdateReport = + report.addMissing { id => + classifiedArtifacts(id.name, classifiers filter getExcluded(id, exclude)) + } + + private[this] def getExcluded(id: ModuleID, exclude: Map[ModuleID, Set[String]]): Set[String] = + exclude.getOrElse(restrictedCopy(id, false), Set.empty[String]) + + def extractExcludes(report: UpdateReport): Map[ModuleID, Set[String]] = + report.allMissing flatMap { case (_, mod, art) => + art.classifier.map { c => + (restrictedCopy(mod, false), c) + } + } groupBy (_._1) map { case (mod, pairs) => (mod, pairs.map(_._2).toSet) } + + /** + * Represents the inputs to pass in to [[resolveAndRetrieve]] and [[cachedResolveAndRetrieve]]. + * + * @param ivy The ivy instance to resolve and retrieve dependencies. + * @param module The module descriptor to be resolved. + * @param updateConfiguration The update configuration for [[ResolveOptions]]. + * @param log The logger. + */ + private case class ResolutionInputs( + ivy: Ivy, + module: DefaultModuleDescriptor, + updateConfiguration: UpdateConfiguration, + log: Logger + ) + + implicit def toIvyFilter(f: ArtifactTypeFilter): IvyFilter = new IvyFilter { + override def accept(o: Object): Boolean = Option(o) exists { case a: IArtifact => + applyFilter(a) + } + + def applyFilter(a: IArtifact): Boolean = + (f.types contains a.getType) ^ f.inverted + } + + /** + * Defines the internal entrypoint of module resolution and retrieval. + * + * This method is the responsible of populating [[ResolveOptions]] and pass + * it in to the ivy instance to perform the module resolution. + * + * It returns an already resolved [[UpdateReport]] instead of a [[ResolveReport]] + * like its counterpart [[CachedResolutionResolveEngine.customResolve]]. + * + * @param inputs The resolution inputs. + * @return The result of the resolution. + */ + private[this] def resolveAndRetrieve( + inputs: ResolutionInputs + ): Either[ResolveException, UpdateReport] = { + // Populate resolve options from the passed arguments + val ivyInstance = inputs.ivy + val moduleDescriptor = inputs.module + val updateConfiguration = inputs.updateConfiguration + val resolveOptions = new ResolveOptions + val resolveId = ResolveOptions.getDefaultResolveId(moduleDescriptor) + val artifactFilter = getArtifactTypeFilter(updateConfiguration.artifactFilter) + import updateConfiguration._ + resolveOptions.setResolveId(resolveId) + resolveOptions.setArtifactFilter(artifactFilter) + resolveOptions.setUseCacheOnly(offline) + resolveOptions.setLog(ivyLogLevel(logging)) + if (frozen) { + resolveOptions.setTransitive(false) + resolveOptions.setCheckIfChanged(false) + } + ResolutionCache.cleanModule( + moduleDescriptor.getModuleRevisionId, + resolveId, + ivyInstance.getSettings.getResolutionCacheManager + ) + + val resolveReport = ivyInstance.resolve(moduleDescriptor, resolveOptions) + if (resolveReport.hasError && !missingOk) { + import scala.jdk.CollectionConverters._ + // If strict error, collect report information and generated UnresolvedWarning + val messages = resolveReport.getAllProblemMessages.asScala.toSeq.map(_.toString).distinct + val failedPaths = resolveReport.getUnresolvedDependencies.map { node => + val moduleID = IvyRetrieve.toModuleID(node.getId) + val path = IvyRetrieve + .findPath(node, moduleDescriptor.getModuleRevisionId) + .map(x => IvyRetrieve.toModuleID(x.getId)) + moduleID -> path + }.toMap + val failedModules = failedPaths.keys.toSeq + Left(new ResolveException(messages, failedModules, failedPaths)) + } else { + // If no strict error, we convert the resolve report into an update report + val cachedDescriptor = ivyInstance.getSettings.getResolutionCacheManager + .getResolvedIvyFileInCache(moduleDescriptor.getModuleRevisionId) + Right(IvyRetrieve.updateReport(resolveReport, cachedDescriptor)) + } + } + + /** + * Resolves and retrieves a module with a cache mechanism defined + * here. + * + * It's the cached version of [[resolveAndRetrieve]]. + * + * @param inputs The resolution inputs. + * @param logicalClock The clock to check if a file is outdated or not. + * @param cache The optional cache dependency. + * @return The result of the cached resolution. + */ + private[this] def cachedResolveAndRetrieve( + inputs: ResolutionInputs, + cache: File + ): Either[ResolveException, UpdateReport] = { + val log = inputs.log + val descriptor = inputs.module + val updateConfiguration = inputs.updateConfiguration + val resolver = inputs.ivy.getResolveEngine.asInstanceOf[CachedResolutionResolveEngine] + val resolveOptions = new ResolveOptions + val resolveId = ResolveOptions.getDefaultResolveId(descriptor) + val artifactFilter = getArtifactTypeFilter(updateConfiguration.artifactFilter) + import updateConfiguration._ + resolveOptions.setResolveId(resolveId) + resolveOptions.setArtifactFilter(artifactFilter) + resolveOptions.setUseCacheOnly(offline) + resolveOptions.setLog(ivyLogLevel(logging)) + if (frozen) { + resolveOptions.setTransitive(false) + resolveOptions.setCheckIfChanged(false) + } + resolver.customResolve( + descriptor, + missingOk, + updateConfiguration.logicalClock, + resolveOptions, + cache, + log + ) + } + + private def retrieve( + log: Logger, + ivy: Ivy, + report: UpdateReport, + config: RetrieveConfiguration + ): UpdateReport = { + val copyChecksums = + Option(ivy.getVariable(ConvertResolver.ManagedChecksums)) match { + case Some(x) => x.toBoolean + case _ => false + } + val toRetrieve: Option[Vector[ConfigRef]] = config.configurationsToRetrieve + val base = getRetrieveDirectory(config.retrieveDirectory) + val pattern = getRetrievePattern(config.outputPattern) + val existingFiles = PathFinder(base).allPaths.get() filterNot { _.isDirectory } + val toCopy = new collection.mutable.HashSet[(File, File)] + val retReport = report retrieve { (conf: ConfigRef, mid, art, cached) => + toRetrieve match { + case None => performRetrieve(conf, mid, art, base, pattern, cached, copyChecksums, toCopy) + case Some(refs) if refs.contains[ConfigRef](conf) => + performRetrieve(conf, mid, art, base, pattern, cached, copyChecksums, toCopy) + case _ => cached + } + } + IO.copy(toCopy) + val resolvedFiles = toCopy.map(_._2) + if (config.sync) { + val filesToDelete = existingFiles.filterNot(resolvedFiles.contains) + filesToDelete foreach { f => + log.info(s"Deleting old dependency: ${f.getAbsolutePath}") + f.delete() + } + } + + retReport + } + + private def performRetrieve( + conf: ConfigRef, + mid: ModuleID, + art: Artifact, + base: File, + pattern: String, + cached: File, + copyChecksums: Boolean, + toCopy: collection.mutable.HashSet[(File, File)] + ): File = { + val to = retrieveTarget(conf, mid, art, base, pattern) + toCopy += ((cached, to)) + + if (copyChecksums) { + // Copy over to the lib managed directory any checksum for a jar if it exists + // TODO(jvican): Support user-provided checksums + val cachePath = cached.getAbsolutePath + IvySbt.DefaultChecksums.foreach { checksum => + if (cachePath.endsWith(".jar")) { + val cacheChecksum = new File(s"$cachePath.$checksum") + if (cacheChecksum.exists()) { + val toChecksum = new File(s"${to.getAbsolutePath}.$checksum") + toCopy += ((cacheChecksum, toChecksum)) + } + } + } + } + + to + } + + private def retrieveTarget( + conf: ConfigRef, + mid: ModuleID, + art: Artifact, + base: File, + pattern: String + ): File = + new File(base, substitute(conf, mid, art, pattern)) + + private def substitute(conf: ConfigRef, mid: ModuleID, art: Artifact, pattern: String): String = { + val mextra = IvySbt.javaMap(mid.extraAttributes, true) + val aextra = IvySbt.extra(art, true) + IvyPatternHelper.substitute( + pattern, + mid.organization, + mid.name, + mid.branchName.orNull, + mid.revision, + art.name, + art.`type`, + art.extension, + conf.name, + null, + mextra, + aextra + ) + } + + import UpdateLogging.{ Quiet, Full, DownloadOnly, Default } + import LogOptions.{ LOG_QUIET, LOG_DEFAULT, LOG_DOWNLOAD_ONLY } + private def ivyLogLevel(level: UpdateLogging) = + level match { + case Quiet => LOG_QUIET + case DownloadOnly => LOG_DOWNLOAD_ONLY + case Full => LOG_DEFAULT + case Default => LOG_DOWNLOAD_ONLY + } + + def publish( + module: ModuleDescriptor, + artifacts: Seq[(IArtifact, File)], + resolver: DependencyResolver, + overwrite: Boolean + ): Unit = { + if (artifacts.nonEmpty) { + checkFilesPresent(artifacts) + try { + resolver.beginPublishTransaction(module.getModuleRevisionId(), overwrite); + artifacts.foreach { case (artifact, file) => + IvyUtil.retryWithBackoff( + resolver.publish(artifact, file, overwrite), + TransientNetworkException.apply, + maxAttempts = LMSysProp.maxPublishAttempts + ) + } + resolver.commitPublishTransaction() + } catch { + case e: Throwable => + try { + resolver.abortPublishTransaction() + } finally { + throw e + } + } + } + } + private[this] def checkFilesPresent(artifacts: Seq[(IArtifact, File)]): Unit = { + val missing = artifacts filter { case (_, file) => !file.exists } + if (missing.nonEmpty) + sys.error( + "Missing files for publishing:\n\t" + missing.map(_._2.getAbsolutePath).mkString("\n\t") + ) + } +} diff --git a/lm-ivy/src/main/scala/sbt/internal/librarymanagement/IvyCache.scala b/lm-ivy/src/main/scala/sbt/internal/librarymanagement/IvyCache.scala new file mode 100644 index 000000000..1184af117 --- /dev/null +++ b/lm-ivy/src/main/scala/sbt/internal/librarymanagement/IvyCache.scala @@ -0,0 +1,132 @@ +/* sbt -- Simple Build Tool + * Copyright 2008, 2009, 2010 Mark Harrah + */ +package sbt.internal.librarymanagement + +import java.io.File + +import org.apache.ivy.core.cache.{ + ArtifactOrigin, + CacheDownloadOptions, + DefaultRepositoryCacheManager +} +import org.apache.ivy.core.module.descriptor.{ Artifact => IvyArtifact, DefaultArtifact } +import org.apache.ivy.plugins.repository.file.{ FileRepository => IvyFileRepository, FileResource } +import org.apache.ivy.plugins.repository.{ ArtifactResourceResolver, Resource, ResourceDownloader } +import org.apache.ivy.plugins.resolver.util.ResolvedResource +import org.apache.ivy.util.FileUtil +import sbt.io.Path +import sbt.librarymanagement._ +import sbt.librarymanagement.ivy.{ InlineIvyConfiguration, IvyPaths } +import sbt.util.Logger + +class NotInCache(val id: ModuleID, cause: Throwable) + extends RuntimeException(NotInCache(id, cause), cause) { + def this(id: ModuleID) = this(id, null) +} +private object NotInCache { + def apply(id: ModuleID, cause: Throwable) = { + val postfix = if (cause == null) "" else (": " + cause.toString) + "File for " + id + " not in cache" + postfix + } +} + +/** Provides methods for working at the level of a single jar file with the default Ivy cache. */ +class IvyCache(val ivyHome: Option[File]) { + def lockFile = new File(ivyHome getOrElse Path.userHome, ".sbt.cache.lock") + + /** Caches the given 'file' with the given ID. It may be retrieved or cleared using this ID. */ + def cacheJar( + moduleID: ModuleID, + file: File, + lock: Option[xsbti.GlobalLock], + log: Logger + ): Unit = { + val artifact = defaultArtifact(moduleID) + val resolved = + new ResolvedResource(new FileResource(new IvyFileRepository, file), moduleID.revision) + withDefaultCache(lock, log) { cache => + val resolver = new ArtifactResourceResolver { def resolve(artifact: IvyArtifact) = resolved } + cache.download(artifact, resolver, new FileDownloader, new CacheDownloadOptions) + () + } + } + + /** Clears the cache of the jar for the given ID. */ + def clearCachedJar(id: ModuleID, lock: Option[xsbti.GlobalLock], log: Logger): Unit = { + try { + withCachedJar(id, lock, log)(_.delete); () + } catch { + case e: Exception => log.debug("Error cleaning cached jar: " + e.toString) + } + } + + /** Copies the cached jar for the given ID to the directory 'toDirectory'. If the jar is not in the cache, NotInCache is thrown. */ + def retrieveCachedJar( + id: ModuleID, + toDirectory: File, + lock: Option[xsbti.GlobalLock], + log: Logger + ) = + withCachedJar(id, lock, log) { cachedFile => + val copyTo = new File(toDirectory, cachedFile.getName) + FileUtil.copy(cachedFile, copyTo, null) + copyTo + } + + /** Get the location of the cached jar for the given ID in the Ivy cache. If the jar is not in the cache, NotInCache is thrown . */ + def withCachedJar[T](id: ModuleID, lock: Option[xsbti.GlobalLock], log: Logger)( + f: File => T + ): T = { + val cachedFile = + try { + withDefaultCache(lock, log) { cache => + val artifact = defaultArtifact(id) + cache.getArchiveFileInCache(artifact, unknownOrigin(artifact)) + } + } catch { case e: Exception => throw new NotInCache(id, e) } + + if (cachedFile.exists) f(cachedFile) else throw new NotInCache(id) + } + + /** Calls the given function with the default Ivy cache. */ + def withDefaultCache[T](lock: Option[xsbti.GlobalLock], log: Logger)( + f: DefaultRepositoryCacheManager => T + ): T = { + val (ivy, _) = basicLocalIvy(lock, log) + ivy.withIvy(log) { ivy => + val cache = ivy.getSettings.getDefaultRepositoryCacheManager + .asInstanceOf[DefaultRepositoryCacheManager] + cache.setUseOrigin(false) + f(cache) + } + } + private def unknownOrigin(artifact: IvyArtifact) = ArtifactOrigin.unkwnown(artifact) + + /** A minimal Ivy setup with only a local resolver and the current directory as the base directory. */ + private def basicLocalIvy(lock: Option[xsbti.GlobalLock], log: Logger) = { + val local = Resolver.defaultLocal + val paths = IvyPaths(".", ivyHome.map(_.toString)) + val conf = InlineIvyConfiguration() + .withPaths(paths) + .withResolvers(Vector(local)) + .withLock(lock) + .withLog(log) + (new IvySbt(conf), local) + } + + /** Creates a default jar artifact based on the given ID. */ + private def defaultArtifact(moduleID: ModuleID): IvyArtifact = + new DefaultArtifact(IvySbt.toID(moduleID), null, moduleID.name, "jar", "jar") +} + +/** Required by Ivy for copying to the cache. */ +private class FileDownloader extends ResourceDownloader { + def download(artifact: IvyArtifact, resource: Resource, dest: File): Unit = { + if (dest.exists()) dest.delete() + val part = new File(dest.getAbsolutePath + ".part") + FileUtil.copy(resource.openStream, part, null) + if (!part.renameTo(dest)) + sys.error("Could not move temporary file " + part + " to final location " + dest) + } +} diff --git a/lm-ivy/src/main/scala/sbt/internal/librarymanagement/IvyInternalDefaults.scala b/lm-ivy/src/main/scala/sbt/internal/librarymanagement/IvyInternalDefaults.scala new file mode 100644 index 000000000..59f88ffc1 --- /dev/null +++ b/lm-ivy/src/main/scala/sbt/internal/librarymanagement/IvyInternalDefaults.scala @@ -0,0 +1,28 @@ +package sbt +package internal.librarymanagement + +import java.io.File +import sbt.librarymanagement.ivy._ +import sbt.io.syntax._ +import xsbti.{ Logger => XLogger } +import sbt.util.Logger + +/** + * This is a list of functions with default values. + */ +object IvyInternalDefaults { + def defaultBaseDirectory: File = + (new File(".")).getAbsoluteFile / "lib_managed" + + def getBaseDirectory(opt: Option[File]): File = + opt.getOrElse(defaultBaseDirectory) + + def getLog(opt: Option[XLogger]): XLogger = + opt.getOrElse(Logger.Null) + + def defaultIvyPaths: IvyPaths = + IvyPaths(defaultBaseDirectory.toString, None) + + def getIvyPaths(opt: Option[IvyPaths]): IvyPaths = + opt.getOrElse(defaultIvyPaths) +} diff --git a/lm-ivy/src/main/scala/sbt/internal/librarymanagement/IvyLogger.scala b/lm-ivy/src/main/scala/sbt/internal/librarymanagement/IvyLogger.scala new file mode 100644 index 000000000..dd0416399 --- /dev/null +++ b/lm-ivy/src/main/scala/sbt/internal/librarymanagement/IvyLogger.scala @@ -0,0 +1,58 @@ +/* sbt -- Simple Build Tool + * Copyright 2008, 2009, 2010 Mark Harrah + */ +package sbt.internal.librarymanagement + +import org.apache.ivy.util.{ Message, MessageLogger, MessageLoggerEngine } +import sbt.util.Logger + +/** Interface to Ivy logging. */ +private[sbt] final class IvyLoggerInterface(logger: Logger) extends MessageLogger { + def rawlog(msg: String, level: Int): Unit = log(msg, level) + def log(msg: String, level: Int): Unit = { + import Message.{ MSG_DEBUG, MSG_VERBOSE, MSG_INFO, MSG_WARN, MSG_ERR } + level match { + case MSG_DEBUG => debug(msg) + case MSG_VERBOSE => verbose(msg) + case MSG_INFO => info(msg) + case MSG_WARN => warn(msg) + case MSG_ERR => error(msg) + } + } + // DEBUG level messages are very verbose and rarely useful to users. + // TODO: provide access to this information some other way + def debug(msg: String): Unit = () + def verbose(msg: String): Unit = logger.verbose(msg) + def deprecated(msg: String): Unit = warn(msg) + def info(msg: String): Unit = if (SbtIvyLogger.acceptInfo(msg)) logger.info(msg) + def rawinfo(msg: String): Unit = info(msg) + def warn(msg: String): Unit = logger.warn(msg) + def error(msg: String): Unit = if (SbtIvyLogger.acceptError(msg)) logger.error(msg) + + private def emptyList = java.util.Collections.emptyList[String] + def getProblems = emptyList + def getWarns = emptyList + def getErrors = emptyList + + def clearProblems(): Unit = () + def sumupProblems(): Unit = clearProblems() + def progress(): Unit = () + def endProgress(): Unit = () + + def endProgress(msg: String): Unit = info(msg) + def isShowProgress = false + def setShowProgress(progress: Boolean): Unit = () +} +private[sbt] final class SbtMessageLoggerEngine extends MessageLoggerEngine { + + /** This is a hack to filter error messages about 'unknown resolver ...'. */ + override def error(msg: String): Unit = if (SbtIvyLogger.acceptError(msg)) super.error(msg) + override def sumupProblems(): Unit = clearProblems() +} +private[sbt] object SbtIvyLogger { + final val unknownResolver = "unknown resolver" + def acceptError(msg: String) = (msg ne null) && !msg.startsWith(unknownResolver) + + final val loadingSettings = ":: loading settings" + def acceptInfo(msg: String) = (msg ne null) && !msg.startsWith(loadingSettings) +} diff --git a/lm-ivy/src/main/scala/sbt/internal/librarymanagement/IvyRetrieve.scala b/lm-ivy/src/main/scala/sbt/internal/librarymanagement/IvyRetrieve.scala new file mode 100644 index 000000000..836b3ede7 --- /dev/null +++ b/lm-ivy/src/main/scala/sbt/internal/librarymanagement/IvyRetrieve.scala @@ -0,0 +1,276 @@ +/* sbt -- Simple Build Tool + * Copyright 2010 Mark Harrah + */ +package sbt.internal.librarymanagement + +import java.io.File +import java.{ util => ju } +import collection.mutable +import collection.immutable.ArraySeq +import org.apache.ivy.core.{ module, report, resolve } +import module.descriptor.{ Artifact => IvyArtifact, License => IvyLicense } +import module.id.{ ModuleRevisionId, ModuleId => IvyModuleId } +import report.{ ArtifactDownloadReport, ConfigurationResolveReport, ResolveReport } +import resolve.{ IvyNode, IvyNodeCallers } +import IvyNodeCallers.{ Caller => IvyCaller } +import ivyint.SbtDefaultDependencyDescriptor +import sbt.librarymanagement._, syntax._ + +object IvyRetrieve { + def reports(report: ResolveReport): Vector[ConfigurationResolveReport] = + report.getConfigurations.toVector map report.getConfigurationReport + + def moduleReports(confReport: ConfigurationResolveReport): Vector[ModuleReport] = + for { + revId <- confReport.getModuleRevisionIds.toArray.toVector collect { + case revId: ModuleRevisionId => revId + } + } yield moduleRevisionDetail(confReport, confReport.getDependency(revId)) + + @deprecated("Internal only. No longer in use.", "0.13.6") + def artifactReports(mid: ModuleID, artReport: Seq[ArtifactDownloadReport]): ModuleReport = { + val (resolved, missing) = artifacts(artReport) + ModuleReport(mid, resolved, missing) + } + + private[sbt] def artifacts( + artReport: Seq[ArtifactDownloadReport] + ): (Vector[(Artifact, File)], Vector[Artifact]) = { + val missing = new mutable.ListBuffer[Artifact] + val resolved = new mutable.ListBuffer[(Artifact, File)] + for (r <- artReport) { + val fileOpt = Option(r.getLocalFile) + val art = toArtifact(r.getArtifact) + fileOpt match { + case Some(file) => resolved += ((art, file)) + case None => missing += art + } + } + (resolved.toVector, missing.toVector) + } + + // We need this because current module report used as part of UpdateReport/ConfigurationReport contains + // only the revolved modules. + // Sometimes the entire module can be excluded via rules etc. + private[sbt] def organizationArtifactReports( + confReport: ConfigurationResolveReport + ): Vector[OrganizationArtifactReport] = { + val moduleIds = confReport.getModuleIds.toArray.toVector collect { case mId: IvyModuleId => + mId + } + def organizationArtifact(mid: IvyModuleId): OrganizationArtifactReport = { + val deps = confReport.getNodes(mid).toArray.toVector collect { case node: IvyNode => node } + OrganizationArtifactReport( + mid.getOrganisation, + mid.getName, + deps map { + moduleRevisionDetail(confReport, _) + } + ) + } + moduleIds map { organizationArtifact } + } + + private[sbt] def nonEmptyString(s: String): Option[String] = + s match { + case null => None + case x if x.trim == "" => None + case x => Some(x.trim) + } + + private[sbt] def moduleRevisionDetail( + confReport: ConfigurationResolveReport, + dep: IvyNode + ): ModuleReport = { + def toExtraAttributes(ea: ju.Map[_, _]): Map[String, String] = + Map(ea.entrySet.toArray collect { + case entry: ju.Map.Entry[_, _] + if nonEmptyString(entry.getKey.toString).isDefined && nonEmptyString( + entry.getValue.toString + ).isDefined => + (entry.getKey.toString, entry.getValue.toString) + }: _*) + def toCaller(caller: IvyCaller): Caller = { + val m = toModuleID(caller.getModuleRevisionId) + val callerConfigurations = caller.getCallerConfigurations.toVector collect { + case x if nonEmptyString(x).isDefined => ConfigRef(x) + } + val ddOpt = Option(caller.getDependencyDescriptor) + val (extraAttributes, isForce, isChanging, isTransitive, isDirectlyForce) = ddOpt match { + case Some(dd: SbtDefaultDependencyDescriptor) => + val mod = dd.dependencyModuleId + ( + toExtraAttributes(dd.getExtraAttributes), + mod.isForce, + mod.isChanging, + mod.isTransitive, + mod.isForce + ) + case Some(dd) => + ( + toExtraAttributes(dd.getExtraAttributes), + dd.isForce, + dd.isChanging, + dd.isTransitive, + false + ) + case None => (Map.empty[String, String], false, false, true, false) + } + Caller( + m, + callerConfigurations, + extraAttributes, + isForce, + isChanging, + isTransitive, + isDirectlyForce + ) + } + val revId = dep.getResolvedId + val moduleId = toModuleID(revId) + val branch = nonEmptyString(revId.getBranch) + val (status, publicationDate, resolver, artifactResolver) = dep.isLoaded match { + case true => + val c = new ju.GregorianCalendar() + c.setTimeInMillis(dep.getPublication) + ( + nonEmptyString(dep.getDescriptor.getStatus), + Some(c), + nonEmptyString(dep.getModuleRevision.getResolver.getName), + nonEmptyString(dep.getModuleRevision.getArtifactResolver.getName) + ) + case _ => (None, None, None, None) + } + val (evicted, evictedData, evictedReason) = dep.isEvicted(confReport.getConfiguration) match { + case true => + val edOpt = Option(dep.getEvictedData(confReport.getConfiguration)) + edOpt match { + case Some(ed) => + ( + true, + nonEmptyString(Option(ed.getConflictManager) map { _.toString } getOrElse { + "transitive" + }), + nonEmptyString(ed.getDetail) + ) + case None => (true, None, None) + } + case _ => (false, None, None) + } + val problem = dep.hasProblem match { + case true => nonEmptyString(dep.getProblem.getMessage) + case _ => None + } + val mdOpt = for { + mr <- Option(dep.getModuleRevision) + md <- Option(mr.getDescriptor) + } yield md + val homepage = mdOpt match { + case Some(md) => + nonEmptyString(md.getHomePage) + case _ => None + } + val extraAttributes: Map[String, String] = toExtraAttributes(mdOpt match { + case Some(md) => md.getExtraAttributes + case _ => dep.getResolvedId.getExtraAttributes + }) + val isDefault = Option(dep.getDescriptor) map { _.isDefault } + val configurations = dep.getConfigurations(confReport.getConfiguration).toVector map { + ConfigRef(_) + } + val licenses: Vector[(String, Option[String])] = mdOpt match { + case Some(md) => + md.getLicenses.toVector collect { + case lic: IvyLicense if Option(lic.getName).isDefined => + val temporaryURL = "http://localhost" + (lic.getName, nonEmptyString(lic.getUrl) orElse { Some(temporaryURL) }) + } + case _ => Vector.empty + } + val callers = dep.getCallers(confReport.getConfiguration).toVector map { toCaller } + val (resolved, missing) = artifacts( + ArraySeq.unsafeWrapArray(confReport.getDownloadReports(revId)) + ) + + ModuleReport( + moduleId, + resolved, + missing, + status, + publicationDate, + resolver, + artifactResolver, + evicted, + evictedData, + evictedReason, + problem, + homepage, + extraAttributes, + isDefault, + branch, + configurations, + licenses, + callers + ) + } + + def evicted(confReport: ConfigurationResolveReport): Seq[ModuleID] = + ArraySeq.unsafeWrapArray(confReport.getEvictedNodes).map(node => toModuleID(node.getId)) + + def toModuleID(revID: ModuleRevisionId): ModuleID = + ModuleID(revID.getOrganisation, revID.getName, revID.getRevision) + .withExtraAttributes(IvySbt.getExtraAttributes(revID)) + .branch(nonEmptyString(revID.getBranch)) + + def toArtifact(art: IvyArtifact): Artifact = { + import art._ + Artifact( + getName, + getType, + getExt, + Option(getExtraAttribute("classifier")), + getConfigurations.toVector map { (c: String) => + ConfigRef(c) + }, + Option(getUrl).map(_.toURI) + ) + } + + def updateReport(report: ResolveReport, cachedDescriptor: File): UpdateReport = + UpdateReport( + cachedDescriptor, + reports(report) map configurationReport, + updateStats(report), + Map.empty + ).recomputeStamps() + def updateStats(report: ResolveReport): UpdateStats = + UpdateStats(report.getResolveTime, report.getDownloadTime, report.getDownloadSize, false) + def configurationReport(confReport: ConfigurationResolveReport): ConfigurationReport = + ConfigurationReport( + ConfigRef(confReport.getConfiguration), + moduleReports(confReport), + organizationArtifactReports(confReport) + ) + + /** + * Tries to find Ivy graph path the from node to target. + */ + def findPath(target: IvyNode, from: ModuleRevisionId): List[IvyNode] = { + def doFindPath(current: IvyNode, path: List[IvyNode]): List[IvyNode] = { + // Ivy actually returns mix of direct and non-direct callers here. + // that's why we have to calculate all possible paths below and pick the longest path. + val callers = current.getAllRealCallers.toList + val callersRevId = (callers map { _.getModuleRevisionId }).distinct + val paths: List[List[IvyNode]] = ((callersRevId map { revId => + val node = current.findNode(revId) + if (revId == from) node :: path + else if (node == node.getRoot) Nil + else if (path.contains[IvyNode](node)) path + else doFindPath(node, node :: path) + }) sortBy { _.size }).reverse + paths.headOption getOrElse Nil + } + if (target.getId == from) List(target) + else doFindPath(target, List(target)) + } +} diff --git a/lm-ivy/src/main/scala/sbt/internal/librarymanagement/IvyScalaUtil.scala b/lm-ivy/src/main/scala/sbt/internal/librarymanagement/IvyScalaUtil.scala new file mode 100644 index 000000000..a2c3ec452 --- /dev/null +++ b/lm-ivy/src/main/scala/sbt/internal/librarymanagement/IvyScalaUtil.scala @@ -0,0 +1,234 @@ +/* sbt -- Simple Build Tool + * Copyright 2008, 2009, 2010 Mark Harrah + */ +package sbt.internal.librarymanagement + +import java.util.Collections.emptyMap +import scala.collection.mutable.HashSet + +import org.apache.ivy.core.module.descriptor._ +import org.apache.ivy.core.module.id.{ ArtifactId, ModuleId, ModuleRevisionId } +import org.apache.ivy.plugins.matcher.ExactPatternMatcher +import org.apache.ivy.plugins.namespace.NamespaceTransformer +import sbt.util.Logger +import sbt.librarymanagement.ScalaArtifacts._ +import sbt.librarymanagement.{ Configuration, CrossVersion, ScalaModuleInfo } + +object IvyScalaUtil { + + /** Performs checks/adds filters on Scala dependencies (if enabled in ScalaModuleInfo). */ + def checkModule( + module: DefaultModuleDescriptor, + scalaVersionConfigs: Vector[String], + log: Logger + )(check: ScalaModuleInfo): Unit = { + if (check.checkExplicit) + checkDependencies( + module, + check.scalaOrganization, + check.scalaArtifacts, + check.scalaBinaryVersion, + scalaVersionConfigs, + log + ) + if (check.filterImplicit) + excludeScalaJars(module, check.configurations) + if (check.overrideScalaVersion) + overrideScalaVersion( + module, + check.scalaOrganization, + check.scalaFullVersion, + scalaVersionConfigs + ) + } + + class OverrideScalaMediator( + scalaOrganization: String, + scalaVersion: String, + scalaVersionConfigs0: Vector[String] + ) extends DependencyDescriptorMediator { + private[this] val scalaVersionConfigs = scalaVersionConfigs0.toSet + private val binaryVersion = CrossVersion.binaryScalaVersion(scalaVersion) + def mediate(dd: DependencyDescriptor): DependencyDescriptor = { + // Mediate only for the dependencies in scalaVersion configurations. https://github.com/sbt/sbt/issues/2786 + def configQualifies: Boolean = + dd.getModuleConfigurations exists { scalaVersionConfigs } + // Do not rewrite the dependencies of Scala dependencies themselves, this prevents bootstrapping + // a Scala compiler using another Scala compiler. + def dependeeQualifies: Boolean = + dd.getParentRevisionId == null || + !isScala2Artifact(dd.getParentRevisionId.getName) || + !isScala3Artifact(dd.getParentRevisionId.getName) + + def matchBinaryVersion(version: String): Boolean = + CrossVersion.binaryScalaVersion(version) == binaryVersion + + val transformer = + new NamespaceTransformer { + def transform(mrid: ModuleRevisionId): ModuleRevisionId = { + if (mrid == null) mrid + else if ( + (isScala2Artifact(mrid.getName) || isScala3Artifact(mrid.getName)) && + configQualifies && + dependeeQualifies + ) { + // do not override the binary incompatible Scala version because: + // - the artifacts compiled with Scala 3 depends on the Scala 2.13 scala-library + // - the Scala 2 TASTy reader can consume the Scala 3 artifacts + val newScalaVersion = + if (matchBinaryVersion(mrid.getRevision)) scalaVersion + else mrid.getRevision + + ModuleRevisionId.newInstance( + scalaOrganization, + mrid.getName, + mrid.getBranch, + newScalaVersion, + mrid.getQualifiedExtraAttributes + ) + } else mrid + } + + def isIdentity: Boolean = false + } + + DefaultDependencyDescriptor.transformInstance(dd, transformer, false) + } + } + + def overrideScalaVersion( + module: DefaultModuleDescriptor, + organization: String, + version: String, + scalaVersionConfigs: Vector[String] + ): Unit = { + val mediator = new OverrideScalaMediator(organization, version, scalaVersionConfigs) + module.addDependencyDescriptorMediator( + new ModuleId(Organization, "*"), + ExactPatternMatcher.INSTANCE, + mediator + ) + if (organization != Organization) + module.addDependencyDescriptorMediator( + new ModuleId(organization, "*"), + ExactPatternMatcher.INSTANCE, + mediator + ) + } + + def overrideVersion( + module: DefaultModuleDescriptor, + org: String, + name: String, + version: String + ): Unit = { + val id = new ModuleId(org, name) + val over = new OverrideDependencyDescriptorMediator(null, version) + module.addDependencyDescriptorMediator(id, ExactPatternMatcher.INSTANCE, over) + } + + /** + * Checks the immediate dependencies of module for dependencies on scala jars and verifies that the version on the + * dependencies matches scalaVersion. + */ + private def checkDependencies( + module: ModuleDescriptor, + scalaOrganization: String, + scalaArtifacts: Vector[String], + scalaBinaryVersion: String, + scalaVersionConfigs0: Vector[String], + log: Logger + ): Unit = { + val scalaVersionConfigs: String => Boolean = + if (scalaVersionConfigs0.isEmpty) (_: String) => false else scalaVersionConfigs0.toSet + def binaryScalaWarning(dep: DependencyDescriptor): Option[String] = { + val id = dep.getDependencyRevisionId + val depBinaryVersion = CrossVersion.binaryScalaVersion(id.getRevision) + def isScalaLangOrg = id.getOrganisation == scalaOrganization + def isScalaArtifact = scalaArtifacts.contains[String](id.getName) + + def hasBinVerMismatch = + depBinaryVersion != scalaBinaryVersion && + // scala 2.13 is compatible with scala 3.x + !Seq(depBinaryVersion, scalaBinaryVersion) + .forall(bv => bv.startsWith("3") || bv.startsWith("2.13")) + + def matchesOneOfTheConfigs = dep.getModuleConfigurations exists { scalaVersionConfigs } + val mismatched = + isScalaLangOrg && isScalaArtifact && hasBinVerMismatch && matchesOneOfTheConfigs + if (mismatched) + Some( + "Binary version (" + depBinaryVersion + ") for dependency " + id + + "\n\tin " + module.getModuleRevisionId + + " differs from Scala binary version in project (" + scalaBinaryVersion + ")." + ) + else + None + } + module.getDependencies.toList.flatMap(binaryScalaWarning).toSet foreach { (s: String) => + log.warn(s) + } + } + private def configurationSet(configurations: Iterable[Configuration]) = + configurations.map(_.toString).toSet + + /** + * Adds exclusions for the scala library and compiler jars so that they are not downloaded. This is + * done because these jars are provided by the ScalaInstance of the project. The version of Scala to use + * is done by setting scalaVersion in the project definition. + */ + private def excludeScalaJars( + module: DefaultModuleDescriptor, + configurations: Iterable[Configuration] + ): Unit = { + val configurationNames = { + val names = module.getConfigurationsNames + if (configurations.isEmpty) names + else { + val configSet = configurationSet(configurations) + configSet.intersect(HashSet(names: _*)) + configSet.toArray + } + } + def excludeScalaJar(name: String): Unit = + module.addExcludeRule(excludeRule(Organization, name, configurationNames, "jar")) + excludeScalaJar(LibraryID) + excludeScalaJar(CompilerID) + } + + /** + * Creates an ExcludeRule that excludes artifacts with the given module organization and name for + * the given configurations. + */ + private[sbt] def excludeRule( + organization: String, + name: String, + configurationNames: Iterable[String], + excludeTypePattern: String + ): ExcludeRule = { + val artifact = + new ArtifactId(ModuleId.newInstance(organization, name), "*", excludeTypePattern, "*") + val rule = + new DefaultExcludeRule(artifact, ExactPatternMatcher.INSTANCE, emptyMap[AnyRef, AnyRef]) + configurationNames.foreach(rule.addConfiguration) + rule + } + + /** + * Creates an IncludeRule that includes artifacts with the given module organization and name for + * the given configurations. + */ + private[sbt] def includeRule( + organization: String, + name: String, + configurationNames: Iterable[String], + includeTypePattern: String + ): IncludeRule = { + val artifact = + new ArtifactId(ModuleId.newInstance(organization, name), "*", includeTypePattern, "*") + val rule = + new DefaultIncludeRule(artifact, ExactPatternMatcher.INSTANCE, emptyMap[AnyRef, AnyRef]) + configurationNames.foreach(rule.addConfiguration) + rule + } +} diff --git a/lm-ivy/src/main/scala/sbt/internal/librarymanagement/IvyUtil.scala b/lm-ivy/src/main/scala/sbt/internal/librarymanagement/IvyUtil.scala new file mode 100644 index 000000000..04b31adab --- /dev/null +++ b/lm-ivy/src/main/scala/sbt/internal/librarymanagement/IvyUtil.scala @@ -0,0 +1,56 @@ +package sbt.internal.librarymanagement + +import java.io.IOException +import java.net.{ SocketException, SocketTimeoutException } + +import scala.annotation.tailrec +import scala.util.{ Failure, Success, Try } + +private[sbt] object IvyUtil { + def separate[A, B](l: Seq[Either[A, B]]): (Seq[A], Seq[B]) = + (l.flatMap(_.left.toOption), l.flatMap(_.toOption)) + + @tailrec + final def retryWithBackoff[T]( + f: => T, + predicate: Throwable => Boolean, + maxAttempts: Int, + retry: Int = 0 + ): T = { + // Using Try helps in catching NonFatal exceptions only + Try { + f + } match { + case Success(value) => value + case Failure(e) if predicate(e) && retry < (maxAttempts - 1) => + // max 8s backoff + val backoff = math.min(math.pow(2d, retry.toDouble).toLong * 1000L, 8000L) + Thread.sleep(backoff) + retryWithBackoff(f, predicate, maxAttempts, retry + 1) + case Failure(e) => throw e + } + } + + /** + * Currently transient network errors are defined as: + * - a network timeout + * - all server errors (response code 5xx) + * - rate limiting (response code 429) + */ + object TransientNetworkException { + private val _r = """.*HTTP response code: (5\d{2}|408|429).*""".r + + @inline private def check(s: String): Boolean = { + if (s == null) return false + + _r.pattern.matcher(s).matches() + } + + def apply(t: Throwable): Boolean = t match { + case _: SocketException | _: SocketTimeoutException => true + case e: IOException if check(e.getMessage) => true + case _ => false + } + } + +} diff --git a/lm-ivy/src/main/scala/sbt/internal/librarymanagement/MakePom.scala b/lm-ivy/src/main/scala/sbt/internal/librarymanagement/MakePom.scala new file mode 100644 index 000000000..58d05628e --- /dev/null +++ b/lm-ivy/src/main/scala/sbt/internal/librarymanagement/MakePom.scala @@ -0,0 +1,545 @@ +/* sbt -- Simple Build Tool + * Copyright 2008, 2009, 2010 Mark Harrah + */ + +// based on Ivy's PomModuleDescriptorWriter, which is Apache Licensed, Version 2.0 +// http://www.apache.org/licenses/LICENSE-2.0 + +package sbt.internal.librarymanagement + +import java.io.File +import sbt.util.Logger +import sbt.librarymanagement._ +import Resolver._ +import mavenint.PomExtraDependencyAttributes + +import scala.collection.immutable.ArraySeq +// Node needs to be renamed to XNode because the task subproject contains a Node type that will shadow +// scala.xml.Node when generating aggregated API documentation +import scala.xml.{ Elem, Node => XNode, NodeSeq, PrettyPrinter, PrefixedAttribute } +import Configurations.Optional + +import org.apache.ivy.Ivy +import org.apache.ivy.core.settings.IvySettings +import org.apache.ivy.core.module.descriptor.{ + DependencyArtifactDescriptor, + DependencyDescriptor, + License, + ModuleDescriptor, + ExcludeRule +} +import org.apache.ivy.plugins.resolver.{ ChainResolver, DependencyResolver, IBiblioResolver } +import ivyint.CustomRemoteMavenResolver +import sbt.io.IO + +object MakePom { + + /** True if the revision is an ivy-range, not a complete revision. */ + def isDependencyVersionRange(revision: String): Boolean = VersionRange.isVersionRange(revision) + + /** Converts Ivy revision ranges to that of Maven POM */ + def makeDependencyVersion(revision: String): String = + VersionRange.fromIvyToMavenVersion(revision) +} +class MakePom(val log: Logger) { + import MakePom._ + @deprecated( + "Use `write(Ivy, ModuleDescriptor, ModuleInfo, Option[Iterable[Configuration]], Set[String], NodeSeq, XNode => XNode, MavenRepository => Boolean, Boolean, File)` instead", + "0.11.2" + ) + def write( + ivy: Ivy, + module: ModuleDescriptor, + moduleInfo: ModuleInfo, + configurations: Option[Iterable[Configuration]], + extra: NodeSeq, + process: XNode => XNode, + filterRepositories: MavenRepository => Boolean, + allRepositories: Boolean, + output: File + ): Unit = + write( + ivy, + module, + moduleInfo: ModuleInfo, + configurations: Option[Iterable[Configuration]], + Set(Artifact.DefaultType), + extra, + process, + filterRepositories, + allRepositories, + output + ) + def write( + ivy: Ivy, + module: ModuleDescriptor, + moduleInfo: ModuleInfo, + configurations: Option[Iterable[Configuration]], + includeTypes: Set[String], + extra: NodeSeq, + process: XNode => XNode, + filterRepositories: MavenRepository => Boolean, + allRepositories: Boolean, + output: File + ): Unit = + write( + process( + toPom( + ivy, + module, + moduleInfo, + configurations, + includeTypes, + extra, + filterRepositories, + allRepositories + ) + ), + output + ) + // use \n as newline because toString uses PrettyPrinter, which hard codes line endings to be \n + def write(node: XNode, output: File): Unit = write(toString(node), output, "\n") + def write(xmlString: String, output: File, newline: String): Unit = + IO.write(output, "" + newline + xmlString) + + def toString(node: XNode): String = new PrettyPrinter(1000, 4).format(node) + @deprecated( + "Use `toPom(Ivy, ModuleDescriptor, ModuleInfo, Option[Iterable[Configuration]], Set[String], NodeSeq, MavenRepository => Boolean, Boolean)` instead", + "0.11.2" + ) + def toPom( + ivy: Ivy, + module: ModuleDescriptor, + moduleInfo: ModuleInfo, + configurations: Option[Iterable[Configuration]], + extra: NodeSeq, + filterRepositories: MavenRepository => Boolean, + allRepositories: Boolean + ): XNode = + toPom( + ivy, + module, + moduleInfo, + configurations, + Set(Artifact.DefaultType), + extra, + filterRepositories, + allRepositories + ) + def toPom( + ivy: Ivy, + module: ModuleDescriptor, + moduleInfo: ModuleInfo, + configurations: Option[Iterable[Configuration]], + includeTypes: Set[String], + extra: NodeSeq, + filterRepositories: MavenRepository => Boolean, + allRepositories: Boolean + ): XNode = + ( + 4.0.0 + {makeModuleID(module)} + {moduleInfo.nameFormal} + {makeStartYear(moduleInfo)} + {makeOrganization(moduleInfo)} + {makeScmInfo(moduleInfo)} + {makeDeveloperInfo(moduleInfo)} + {extra} + { + val deps = depsInConfs(module, configurations) + makeProperties(module, deps) ++ + makeDependencies(deps, includeTypes, ArraySeq.unsafeWrapArray(module.getAllExcludeRules)) + } + {makeRepositories(ivy.getSettings, allRepositories, filterRepositories)} + ) + + def makeModuleID(module: ModuleDescriptor): NodeSeq = { + val mrid = moduleDescriptor(module) + val a: NodeSeq = + ({mrid.getOrganisation} + {mrid.getName} + {packaging(module)}) + val b: NodeSeq = + ((description(module.getDescription) ++ + homePage(module.getHomePage) ++ + revision(mrid.getRevision) ++ + licenses(module.getLicenses)): NodeSeq) + a ++ b + } + + def makeStartYear(moduleInfo: ModuleInfo): NodeSeq = + moduleInfo.startYear match { + case Some(y) => {y} + case _ => NodeSeq.Empty + } + def makeOrganization(moduleInfo: ModuleInfo): NodeSeq = { + + {moduleInfo.organizationName} + { + moduleInfo.organizationHomepage match { + case Some(h) => {h} + case _ => NodeSeq.Empty + } + } + + } + def makeScmInfo(moduleInfo: ModuleInfo): NodeSeq = { + moduleInfo.scmInfo match { + case Some(s) => + + {s.browseUrl} + {s.connection} + { + s.devConnection match { + case Some(d) => {d} + case _ => NodeSeq.Empty + } + } + + case _ => NodeSeq.Empty + } + } + def makeDeveloperInfo(moduleInfo: ModuleInfo): NodeSeq = { + if (moduleInfo.developers.nonEmpty) { + + { + moduleInfo.developers.map { (developer: Developer) => + + {developer.id} + {developer.name} + {developer.url} + { + developer.email match { + case "" | null => NodeSeq.Empty + case e => {e} + } + } + + } + } + + } else NodeSeq.Empty + } + def makeProperties(module: ModuleDescriptor, dependencies: Seq[DependencyDescriptor]): NodeSeq = { + val extra = IvySbt.getExtraAttributes(module) + val depExtra = PomExtraDependencyAttributes.writeDependencyExtra(dependencies).mkString("\n") + val allExtra = + if (depExtra.isEmpty) extra + else extra.updated(PomExtraDependencyAttributes.ExtraAttributesKey, depExtra) + if (allExtra.isEmpty) NodeSeq.Empty else makeProperties(allExtra) + } + def makeProperties(extra: Map[String, String]): NodeSeq = { + def _extraAttributes(k: String) = + if (k == PomExtraDependencyAttributes.ExtraAttributesKey) xmlSpacePreserve + else scala.xml.Null + { + for ((key, value) <- extra) + yield ({value}).copy(label = key, attributes = _extraAttributes(key)) + } + } + + /** + * Attribute tag that PrettyPrinter won't ignore, saying "don't mess with my spaces" + * Without this, PrettyPrinter will flatten multiple entries for ExtraDependencyAttributes and make them + * unparseable. (e.g. a plugin that depends on multiple plugins will fail) + */ + def xmlSpacePreserve = new PrefixedAttribute("xml", "space", "preserve", scala.xml.Null) + + def description(d: String) = + if ((d eq null) || d.isEmpty) NodeSeq.Empty + else + { + d + } + def licenses(ls: Array[License]) = + if (ls == null || ls.isEmpty) NodeSeq.Empty + else + { + ls.map(license) + } + def license(l: License) = + + {l.getName} + {l.getUrl} + repo + + def homePage(homePage: String) = + if (homePage eq null) NodeSeq.Empty + else + { + homePage + } + def revision(version: String) = + if (version ne null) { + version + } + else NodeSeq.Empty + def packaging(module: ModuleDescriptor) = + module.getAllArtifacts match { + case Array() => "pom" + case Array(x) => x.getType + case xs => + val types = xs.map(_.getType).toList.filterNot(IgnoreTypes) + types match { + case Nil => Artifact.PomType + case xs if xs.contains(Artifact.DefaultType) => Artifact.DefaultType + case x :: (xs @ _) => x + } + } + val IgnoreTypes: Set[String] = Set(Artifact.SourceType, Artifact.DocType, Artifact.PomType) + + @deprecated("Use `makeDependencies` variant which takes excludes", "0.13.9") + def makeDependencies( + dependencies: Seq[DependencyDescriptor], + includeTypes: Set[String] + ): NodeSeq = + makeDependencies(dependencies, includeTypes, Nil) + + def makeDependencies( + dependencies: Seq[DependencyDescriptor], + includeTypes: Set[String], + excludes: Seq[ExcludeRule] + ): NodeSeq = + if (dependencies.isEmpty) + NodeSeq.Empty + else + + { + dependencies.map(makeDependency(_, includeTypes, excludes)) + } + + + @deprecated("Use `makeDependency` variant which takes excludes", "0.13.9") + def makeDependency(dependency: DependencyDescriptor, includeTypes: Set[String]): NodeSeq = + makeDependency(dependency, includeTypes, Nil) + + def makeDependency( + dependency: DependencyDescriptor, + includeTypes: Set[String], + excludes: Seq[ExcludeRule] + ): NodeSeq = { + val artifacts = dependency.getAllDependencyArtifacts + val includeArtifacts = artifacts.filter(d => includeTypes(d.getType)) + if (artifacts.isEmpty) { + val configs = dependency.getModuleConfigurations + if (configs.filterNot(Set("sources", "docs")).nonEmpty) { + val (scope, optional) = getScopeAndOptional(dependency.getModuleConfigurations) + makeDependencyElem(dependency, scope, optional, None, None, excludes) + } else NodeSeq.Empty + } else if (includeArtifacts.isEmpty) + NodeSeq.Empty + else + NodeSeq.fromSeq(artifacts.flatMap(a => makeDependencyElem(dependency, a, excludes))) + } + + @deprecated("Use `makeDependencyElem` variant which takes excludes", "0.13.9") + def makeDependencyElem( + dependency: DependencyDescriptor, + artifact: DependencyArtifactDescriptor + ): Option[Elem] = + makeDependencyElem(dependency, artifact, Nil) + + def makeDependencyElem( + dependency: DependencyDescriptor, + artifact: DependencyArtifactDescriptor, + excludes: Seq[ExcludeRule] + ): Option[Elem] = { + val configs = artifact.getConfigurations.toList match { + case Nil | "*" :: Nil => dependency.getModuleConfigurations + case x => x.toArray + } + if (!configs.forall(Set("sources", "docs"))) { + val (scope, optional) = getScopeAndOptional(configs) + val classifier = artifactClassifier(artifact) + val baseType = artifactType(artifact) + val tpe = (classifier, baseType) match { + case (Some(c), Some(tpe)) if Artifact.classifierType(c) == tpe => None + case _ => baseType + } + Some(makeDependencyElem(dependency, scope, optional, classifier, tpe, excludes)) + } else None + } + + @deprecated("Use `makeDependencyElem` variant which takes excludes", "0.13.9") + def makeDependencyElem( + dependency: DependencyDescriptor, + scope: Option[String], + optional: Boolean, + classifier: Option[String], + tpe: Option[String] + ): Elem = + makeDependencyElem(dependency, scope, optional, classifier, tpe, Nil) + + def makeDependencyElem( + dependency: DependencyDescriptor, + scope: Option[String], + optional: Boolean, + classifier: Option[String], + tpe: Option[String], + excludes: Seq[ExcludeRule] + ): Elem = { + val mrid = dependency.getDependencyRevisionId + + {mrid.getOrganisation} + {mrid.getName} + {makeDependencyVersion(mrid.getRevision)} + {scopeElem(scope)} + {optionalElem(optional)} + {classifierElem(classifier)} + {typeElem(tpe)} + {exclusions(dependency, excludes)} + + } + + @deprecated("No longer used and will be removed.", "0.12.1") + def classifier(dependency: DependencyDescriptor, includeTypes: Set[String]): NodeSeq = { + val jarDep = dependency.getAllDependencyArtifacts.find(d => includeTypes(d.getType)) + jarDep match { + case Some(a) => classifierElem(artifactClassifier(a)) + case None => NodeSeq.Empty + } + } + def artifactType(artifact: DependencyArtifactDescriptor): Option[String] = + Option(artifact.getType).flatMap { tpe => + if (tpe == "jar") None else Some(tpe) + } + def typeElem(tpe: Option[String]): NodeSeq = + tpe match { + case Some(t) => {t} + case None => NodeSeq.Empty + } + + def artifactClassifier(artifact: DependencyArtifactDescriptor): Option[String] = + Option(artifact.getExtraAttribute("classifier")) + def classifierElem(classifier: Option[String]): NodeSeq = + classifier match { + case Some(c) => {c} + case None => NodeSeq.Empty + } + + @deprecated("No longer used and will be removed.", "0.12.1") + def scopeAndOptional(dependency: DependencyDescriptor): NodeSeq = { + val (scope, opt) = getScopeAndOptional(dependency.getModuleConfigurations) + scopeElem(scope) ++ optionalElem(opt) + } + def scopeElem(scope: Option[String]): NodeSeq = scope match { + case None | Some(Configurations.Compile.name) => NodeSeq.Empty + case Some(s) => {s} + } + def optionalElem(opt: Boolean) = if (opt) true else NodeSeq.Empty + def moduleDescriptor(module: ModuleDescriptor) = module.getModuleRevisionId + + def getScopeAndOptional(confs: Array[String]): (Option[String], Boolean) = { + val (opt, notOptional) = confs.partition(_ == Optional.name) + val defaultNotOptional = + Configurations.defaultMavenConfigurations.find({ (c: Configuration) => + notOptional contains c.name + }) + val scope = defaultNotOptional.map(_.name) + (scope, opt.nonEmpty) + } + + @deprecated("Use `exclusions` variant which takes excludes", "0.13.9") + def exclusions(dependency: DependencyDescriptor): NodeSeq = exclusions(dependency, Nil) + + def exclusions(dependency: DependencyDescriptor, excludes: Seq[ExcludeRule]): NodeSeq = { + val excl = ArraySeq.unsafeWrapArray( + dependency.getExcludeRules(dependency.getModuleConfigurations) + ) ++ excludes + val (warns, excls) = IvyUtil.separate(excl.map(makeExclusion)) + if (warns.nonEmpty) log.warn(warns.mkString(IO.Newline)) + if (excls.nonEmpty) { + excls + } + else NodeSeq.Empty + } + def makeExclusion(exclRule: ExcludeRule): Either[String, NodeSeq] = { + val m = exclRule.getId.getModuleId + val (g, a) = (m.getOrganisation, m.getName) + if (g == null || g.isEmpty || a == null || a.isEmpty) + Left( + "Skipped generating '' for %s. Dependency exclusion should have both 'org' and 'module' to comply with Maven POM's schema." + .format(m) + ) + else + Right( + + {g} + {a} + + ) + } + + def makeRepositories( + settings: IvySettings, + includeAll: Boolean, + filterRepositories: MavenRepository => Boolean + ) = { + val repositories = + if (includeAll) allResolvers(settings) else resolvers(settings.getDefaultResolver) + val mavenRepositories = + repositories.flatMap { + // TODO - Would it be ok if bintray were in the pom? We should avoid it for now. + case m: CustomRemoteMavenResolver if m.repo.root == JCenterRepository.root => Nil + case m: IBiblioResolver if m.isM2compatible && m.getRoot == JCenterRepository.root => Nil + case m: CustomRemoteMavenResolver if m.repo.root != DefaultMavenRepository.root => + MavenRepository(m.repo.name, m.repo.root) :: Nil + case m: IBiblioResolver if m.isM2compatible && m.getRoot != DefaultMavenRepository.root => + MavenRepository(m.getName, m.getRoot) :: Nil + case _ => Nil + } + val repositoryElements = mavenRepositories.filter(filterRepositories).map(mavenRepository) + if (repositoryElements.isEmpty) repositoryElements + else + { + repositoryElements + } + } + def allResolvers(settings: IvySettings): Seq[DependencyResolver] = + flatten(castResolvers(settings.getResolvers)).distinct + def flatten(rs: Seq[DependencyResolver]): Seq[DependencyResolver] = + if (rs eq null) Nil else rs.flatMap(resolvers) + def resolvers(r: DependencyResolver): Seq[DependencyResolver] = + r match { case c: ChainResolver => flatten(castResolvers(c.getResolvers)); case _ => r :: Nil } + + // cast the contents of a pre-generics collection + private def castResolvers(s: java.util.Collection[_]): Seq[DependencyResolver] = { + import scala.jdk.CollectionConverters._ + s.asScala.toSeq.map(_.asInstanceOf[DependencyResolver]) + } + + def toID(name: String) = checkID(name.filter(isValidIDCharacter).mkString, name) + def isValidIDCharacter(c: Char) = !"""\/:"<>|?*""".contains(c) + private def checkID(id: String, name: String) = + if (id.isEmpty) sys.error("Could not convert '" + name + "' to an ID") else id + def mavenRepository(repo: MavenRepository): XNode = + mavenRepository(toID(repo.name), repo.name, repo.root) + def mavenRepository(id: String, name: String, root: String): XNode = + + {id} + {name} + {root} + {"default"} + + + /** + * Retain dependencies only with the configurations given, or all public configurations of `module` if `configurations` is None. + * This currently only preserves the information required by makePom + */ + private def depsInConfs( + module: ModuleDescriptor, + configurations: Option[Iterable[Configuration]] + ): Seq[DependencyDescriptor] = { + val keepConfigurations = IvySbt.getConfigurations(module, configurations) + val keepSet: Set[String] = Set(keepConfigurations.toSeq: _*) + def translate(dependency: DependencyDescriptor) = { + val keep = dependency.getModuleConfigurations + .filter((conf: String) => keepSet.contains(conf)) + if (keep.isEmpty) + None + else // TODO: translate the dependency to contain only configurations to keep + Some(dependency) + } + ArraySeq.unsafeWrapArray(module.getDependencies) flatMap translate + } +} diff --git a/lm-ivy/src/main/scala/sbt/internal/librarymanagement/ProjectResolver.scala b/lm-ivy/src/main/scala/sbt/internal/librarymanagement/ProjectResolver.scala new file mode 100644 index 000000000..912f77bd0 --- /dev/null +++ b/lm-ivy/src/main/scala/sbt/internal/librarymanagement/ProjectResolver.scala @@ -0,0 +1,108 @@ +/* sbt -- Simple Build Tool + * Copyright 2011 Mark Harrah + */ +package sbt.internal.librarymanagement + +import java.io.File +import java.util.Date + +import org.apache.ivy.core.{ cache, module, report, resolve, search } +import cache.ArtifactOrigin +import search.{ ModuleEntry, OrganisationEntry, RevisionEntry } +import module.id.ModuleRevisionId +import module.descriptor.{ + Artifact => IArtifact, + DefaultArtifact, + DependencyDescriptor, + ModuleDescriptor +} +import org.apache.ivy.plugins.namespace.Namespace +import org.apache.ivy.plugins.resolver.ResolverSettings +import report.{ + ArtifactDownloadReport, + DownloadReport, + DownloadStatus, + MetadataArtifactDownloadReport +} +import resolve.{ DownloadOptions, ResolveData, ResolvedModuleRevision } + +/** + * A Resolver that uses a predefined mapping from module ids to in-memory descriptors. + * It does not handle artifacts. + */ +class ProjectResolver(name: String, map: Map[ModuleRevisionId, ModuleDescriptor]) + extends ResolverAdapter { + def getName = name + def setName(name: String) = sys.error("Setting name not supported by ProjectResolver") + override def toString = "ProjectResolver(" + name + ", mapped: " + map.keys.mkString(", ") + ")" + + def getDependency(dd: DependencyDescriptor, data: ResolveData): ResolvedModuleRevision = + getDependency(dd.getDependencyRevisionId).orNull + + private[this] def getDependency(revisionId: ModuleRevisionId): Option[ResolvedModuleRevision] = { + def constructResult(descriptor: ModuleDescriptor) = + new ResolvedModuleRevision(this, this, descriptor, report(revisionId), true) + map get revisionId map constructResult + } + + private[sbt] def getModuleDescriptor(revisionId: ModuleRevisionId): Option[ModuleDescriptor] = + map.get(revisionId) + + def report(revisionId: ModuleRevisionId): MetadataArtifactDownloadReport = { + val artifact = DefaultArtifact.newIvyArtifact(revisionId, new Date) + val r = new MetadataArtifactDownloadReport(artifact) + r.setSearched(false) + r.setDownloadStatus(DownloadStatus.FAILED) + r + } + + // this resolver nevers locates artifacts, only resolves dependencies + def exists(artifact: IArtifact) = false + def locate(artifact: IArtifact) = null + def download(artifacts: Array[IArtifact], options: DownloadOptions): DownloadReport = { + val r = new DownloadReport + for (artifact <- artifacts) + if (getDependency(artifact.getModuleRevisionId).isEmpty) + r.addArtifactReport(notDownloaded(artifact)) + r + } + + def download(artifact: ArtifactOrigin, options: DownloadOptions): ArtifactDownloadReport = + notDownloaded(artifact.getArtifact) + def findIvyFileRef(dd: DependencyDescriptor, data: ResolveData) = null + + def notDownloaded(artifact: IArtifact): ArtifactDownloadReport = { + val r = new ArtifactDownloadReport(artifact) + r.setDownloadStatus(DownloadStatus.FAILED) + r + } + + // doesn't support publishing + def publish(artifact: IArtifact, src: File, overwrite: Boolean) = + sys.error("Publish not supported by ProjectResolver") + def beginPublishTransaction(module: ModuleRevisionId, overwrite: Boolean): Unit = () + def abortPublishTransaction(): Unit = () + def commitPublishTransaction(): Unit = () + + def reportFailure(): Unit = () + def reportFailure(art: IArtifact): Unit = () + + def listOrganisations() = new Array[OrganisationEntry](0) + def listModules(org: OrganisationEntry) = new Array[ModuleEntry](0) + def listRevisions(module: ModuleEntry) = new Array[RevisionEntry](0) + + def getNamespace = Namespace.SYSTEM_NAMESPACE + + private[this] var settings: Option[ResolverSettings] = None + + def dumpSettings(): Unit = () + def setSettings(settings: ResolverSettings): Unit = { this.settings = Some(settings) } + def getRepositoryCacheManager = settings match { + case Some(s) => s.getDefaultRepositoryCacheManager; + case None => sys.error("No settings defined for ProjectResolver") + } +} + +object ProjectResolver { + private[sbt] val InterProject = "inter-project" +} diff --git a/lm-ivy/src/main/scala/sbt/internal/librarymanagement/ResolutionCache.scala b/lm-ivy/src/main/scala/sbt/internal/librarymanagement/ResolutionCache.scala new file mode 100644 index 000000000..5c381913a --- /dev/null +++ b/lm-ivy/src/main/scala/sbt/internal/librarymanagement/ResolutionCache.scala @@ -0,0 +1,107 @@ +package sbt.internal.librarymanagement + +import java.io.File +import org.apache.ivy.core +import org.apache.ivy.plugins.parser +import core.IvyPatternHelper +import core.settings.IvySettings +import core.cache.ResolutionCacheManager +import core.module.id.ModuleRevisionId +import core.module.descriptor.ModuleDescriptor +import ResolutionCache.{ Name, ReportDirectory, ResolvedName, ResolvedPattern } +import parser.xml.XmlModuleDescriptorParser +import sbt.io.IO +import sbt.librarymanagement._ + +/** + * Replaces the standard Ivy resolution cache in order to: + * 1. Separate cached resolved Ivy files from resolution reports, making the resolution reports easier to find. + * 2. Have them per-project for easier cleaning (possible with standard cache, but central to this custom one). + * 3. Cache location includes extra attributes so that cross builds of a plugin do not overwrite each other. + */ +private[sbt] final class ResolutionCache(base: File, settings: IvySettings) + extends ResolutionCacheManager { + private[this] def resolvedFileInCache(m: ModuleRevisionId, name: String, ext: String): File = { + val p = ResolvedPattern + val f = IvyPatternHelper.substitute( + p, + m.getOrganisation, + m.getName, + m.getBranch, + m.getRevision, + name, + name, + ext, + null, + null, + m.getAttributes, + null + ) + new File(base, f) + } + private[this] val reportBase: File = new File(base, ReportDirectory) + + def getResolutionCacheRoot: File = base + def clean(): Unit = IO.delete(base) + override def toString = Name + + def getResolvedIvyFileInCache(mrid: ModuleRevisionId): File = + resolvedFileInCache(mrid, ResolvedName, "xml") + def getResolvedIvyPropertiesInCache(mrid: ModuleRevisionId): File = + resolvedFileInCache(mrid, ResolvedName, "properties") + // name needs to be the same as Ivy's default because the ivy-report.xsl stylesheet assumes this + // when making links to reports for other configurations + def getConfigurationResolveReportInCache(resolveId: String, conf: String): File = + new File(reportBase, resolveId + "-" + conf + ".xml") + def getConfigurationResolveReportsInCache(resolveId: String): Array[File] = + IO.listFiles(reportBase).filter(_.getName.startsWith(resolveId + "-")) + + // XXX: this method is required by ResolutionCacheManager in Ivy 2.3.0 final, + // but it is apparently unused by Ivy as sbt uses Ivy. Therefore, it is + // unexercised in tests. Note that the implementation of this method in Ivy 2.3.0's + // DefaultResolutionCache also resolves parent properties for a given mrid + def getResolvedModuleDescriptor(mrid: ModuleRevisionId): ModuleDescriptor = { + val ivyFile = getResolvedIvyFileInCache(mrid) + if (!ivyFile.exists()) { + throw new IllegalStateException("Ivy file not found in cache for " + mrid + "!") + } + + XmlModuleDescriptorParser.getInstance().parseDescriptor(settings, ivyFile.toURI.toURL, false) + } + + def saveResolvedModuleDescriptor(md: ModuleDescriptor): Unit = { + val mrid = md.getResolvedModuleRevisionId + val cachedIvyFile = getResolvedIvyFileInCache(mrid) + md.toIvyFile(cachedIvyFile) + } +} +private[sbt] object ResolutionCache { + + /** + * Removes cached files from the resolution cache for the module with ID `mrid` + * and the resolveId (as set on `ResolveOptions`). + */ + private[sbt] def cleanModule( + mrid: ModuleRevisionId, + resolveId: String, + manager: ResolutionCacheManager + ): Unit = { + val files = + Option(manager.getResolvedIvyFileInCache(mrid)).toList ::: + Option(manager.getResolvedIvyPropertiesInCache(mrid)).toList ::: + Option(manager.getConfigurationResolveReportsInCache(resolveId)).toList.flatten + IO.delete(files) + } + + private val ReportDirectory = "reports" + + // base name (name except for extension) of resolution report file + private val ResolvedName = "resolved.xml" + + // Cache name + private val Name = "sbt-resolution-cache" + + // use sbt-specific extra attributes so that resolved xml files do not get overwritten when using different Scala/sbt versions + private val ResolvedPattern = + "[organisation]/[module]/" + Resolver.PluginPattern + "([branch]/)[revision]/[artifact].[ext]" +} diff --git a/lm-ivy/src/main/scala/sbt/internal/librarymanagement/ivyint/CachedResolutionResolveEngine.scala b/lm-ivy/src/main/scala/sbt/internal/librarymanagement/ivyint/CachedResolutionResolveEngine.scala new file mode 100644 index 000000000..ed0a8a6b1 --- /dev/null +++ b/lm-ivy/src/main/scala/sbt/internal/librarymanagement/ivyint/CachedResolutionResolveEngine.scala @@ -0,0 +1,1028 @@ +package sbt.internal.librarymanagement +package ivyint + +import java.util.Date +import java.io.File +import java.text.SimpleDateFormat +import collection.concurrent +import collection.mutable +import collection.immutable.ListMap +import org.apache.ivy.Ivy +import org.apache.ivy.core +import core.resolve._ +import core.module.id.{ ModuleRevisionId, ModuleId => IvyModuleId } +import core.report.ResolveReport +import core.module.descriptor.{ + DefaultModuleDescriptor, + ModuleDescriptor, + DefaultDependencyDescriptor, + DependencyDescriptor, + Configuration => IvyConfiguration, + ExcludeRule, + IncludeRule +} +import core.module.descriptor.{ OverrideDependencyDescriptorMediator, DependencyArtifactDescriptor } +import core.IvyPatternHelper +import org.apache.ivy.util.{ Message, MessageLogger } +import org.apache.ivy.plugins.latest.{ ArtifactInfo => IvyArtifactInfo } +import org.apache.ivy.plugins.matcher.{ MapMatcher, PatternMatcher } +import annotation.tailrec +import scala.concurrent.duration._ +import sbt.io.{ DirectoryFilter, Hash, IO } +import sbt.librarymanagement._, syntax._ +import sbt.util.Logger + +private[sbt] object CachedResolutionResolveCache { + def createID(organization: String, name: String, revision: String) = + ModuleRevisionId.newInstance(organization, name, revision) + def sbtOrgTemp = JsonUtil.sbtOrgTemp + def graphVersion = "0.13.9C" + val buildStartup: Long = System.currentTimeMillis + lazy val todayStr: String = toYyyymmdd(buildStartup) + lazy val tomorrowStr: String = toYyyymmdd(buildStartup + 1.day.toMillis) + lazy val yesterdayStr: String = toYyyymmdd(buildStartup - 1.day.toMillis) + def toYyyymmdd(timeSinceEpoch: Long): String = yyyymmdd.format(new Date(timeSinceEpoch)) + lazy val yyyymmdd: SimpleDateFormat = new SimpleDateFormat("yyyy-MM-dd") +} + +private[sbt] class CachedResolutionResolveCache { + import CachedResolutionResolveCache._ + val updateReportCache: concurrent.Map[ModuleRevisionId, Either[ResolveException, UpdateReport]] = + concurrent.TrieMap() + // Used for subproject + val projectReportCache + : concurrent.Map[(ModuleRevisionId, LogicalClock), Either[ResolveException, UpdateReport]] = + concurrent.TrieMap() + val resolveReportCache: concurrent.Map[ModuleRevisionId, ResolveReport] = concurrent.TrieMap() + val resolvePropertiesCache: concurrent.Map[ModuleRevisionId, String] = concurrent.TrieMap() + val conflictCache + : concurrent.Map[(ModuleID, ModuleID), (Vector[ModuleID], Vector[ModuleID], String)] = + concurrent.TrieMap() + val maxConflictCacheSize: Int = 1024 + val maxUpdateReportCacheSize: Int = 1024 + + def clean(): Unit = updateReportCache.clear() + + def directDependencies(md0: ModuleDescriptor): Vector[DependencyDescriptor] = + md0.getDependencies.toVector + + // Returns a vector of (module descriptor, changing, dd) + def buildArtificialModuleDescriptors( + md0: ModuleDescriptor, + prOpt: Option[ProjectResolver], + log: Logger + ): Vector[(DefaultModuleDescriptor, Boolean, DependencyDescriptor)] = { + log.debug(s":: building artificial module descriptors from ${md0.getModuleRevisionId}") + // val expanded = expandInternalDependencies(md0, data, prOpt, log) + val rootModuleConfigs = md0.getConfigurations.toVector + directDependencies(md0) map { dd => + val arts = dd.getAllDependencyArtifacts.toVector map { x => + s"""${x.getName}:${x.getType}:${x.getExt}:${x.getExtraAttributes}""" + } + log.debug(s"::: dd: $dd (artifacts: ${arts.mkString(",")})") + buildArtificialModuleDescriptor(dd, rootModuleConfigs, md0, prOpt) + } + } + + def internalDependency( + dd: DependencyDescriptor, + prOpt: Option[ProjectResolver] + ): Option[ModuleDescriptor] = + prOpt match { + case Some(pr) => pr.getModuleDescriptor(dd.getDependencyRevisionId) + case _ => None + } + + def buildArtificialModuleDescriptor( + dd: DependencyDescriptor, + rootModuleConfigs: Vector[IvyConfiguration], + parent: ModuleDescriptor, + prOpt: Option[ProjectResolver] + ): (DefaultModuleDescriptor, Boolean, DependencyDescriptor) = { + def excludeRuleString(rule: ExcludeRule): String = + s"""Exclude(${rule.getId},${rule.getConfigurations.mkString(",")},${rule.getMatcher})""" + def includeRuleString(rule: IncludeRule): String = + s"""Include(${rule.getId},${rule.getConfigurations.mkString(",")},${rule.getMatcher})""" + def artifactString(dad: DependencyArtifactDescriptor): String = + s"""Artifact(${dad.getName},${dad.getType},${dad.getExt},${dad.getUrl},${dad.getConfigurations + .mkString(",")},${dad.getExtraAttributes})""" + val mrid = dd.getDependencyRevisionId + val confMap = (dd.getModuleConfigurations map { conf => + conf + "->(" + dd.getDependencyConfigurations(conf).mkString(",") + ")" + }) + val exclusions = (dd.getModuleConfigurations.toVector flatMap { conf => + dd.getExcludeRules(conf).toVector match { + case Vector() => None + case rules => Some(conf + "->(" + (rules map excludeRuleString).mkString(",") + ")") + } + }) + val inclusions = (dd.getModuleConfigurations.toVector flatMap { conf => + dd.getIncludeRules(conf).toVector match { + case Vector() => None + case rules => Some(conf + "->(" + (rules map includeRuleString).mkString(",") + ")") + } + }) + val explicitArtifacts = dd.getAllDependencyArtifacts.toVector map { artifactString } + val mes = parent.getAllExcludeRules.toVector + val mesStr = (mes map excludeRuleString).mkString(",") + val os = extractOverrides(parent) + val moduleLevel = s"""dependencyOverrides=${os.mkString(",")};moduleExclusions=$mesStr""" + val depsString = s"""$mrid;${confMap.mkString( + "," + )};isForce=${dd.isForce};isChanging=${dd.isChanging};isTransitive=${dd.isTransitive};""" + + s"""exclusions=${exclusions.mkString(",")};inclusions=${inclusions.mkString( + "," + )};explicitArtifacts=${explicitArtifacts + .mkString(",")};$moduleLevel;""" + val sha1 = Hash.toHex( + Hash(s"""graphVersion=${CachedResolutionResolveCache.graphVersion};$depsString""") + ) + val md1 = new DefaultModuleDescriptor( + createID(sbtOrgTemp, "temp-resolve-" + sha1, "1.0"), + "release", + null, + false + ) with ArtificialModuleDescriptor { + def targetModuleRevisionId: ModuleRevisionId = mrid + } + for { + conf <- rootModuleConfigs + } yield md1.addConfiguration(conf) + md1.addDependency(dd) + os foreach { ovr => + md1.addDependencyDescriptorMediator(ovr.moduleId, ovr.pm, ovr.ddm) + } + mes foreach { exclude => + md1.addExcludeRule(exclude) + } + (md1, IvySbt.isChanging(dd) || internalDependency(dd, prOpt).isDefined, dd) + } + def extractOverrides(md0: ModuleDescriptor): Vector[IvyOverride] = { + import scala.jdk.CollectionConverters._ + md0.getAllDependencyDescriptorMediators.getAllRules.asScala.toVector sortBy { case (k, _) => + k.toString + } collect { case (k: MapMatcher, v: OverrideDependencyDescriptorMediator) => + val attr: Map[Any, Any] = k.getAttributes.asScala.toMap + val module = IvyModuleId.newInstance( + attr(IvyPatternHelper.ORGANISATION_KEY).toString, + attr(IvyPatternHelper.MODULE_KEY).toString + ) + val pm = k.getPatternMatcher + IvyOverride(module, pm, v) + } + } + def getOrElseUpdateMiniGraph( + md: ModuleDescriptor, + changing0: Boolean, + logicalClock: LogicalClock, + miniGraphPath: File, + cachedDescriptor: File, + log: Logger + )( + f: => Either[ResolveException, UpdateReport] + ): Either[ResolveException, UpdateReport] = { + import sbt.io.syntax._ + val mrid = md.getResolvedModuleRevisionId + def extraPath(id: ModuleRevisionId, key: String, pattern: String): String = + Option(id.getExtraAttribute(key)).fold(".")(pattern.format(_)) // "." has no affect on paths + def scalaVersion(id: ModuleRevisionId): String = extraPath(id, "e:scalaVersion", "scala_%s") + def sbtVersion(id: ModuleRevisionId): String = extraPath(id, "e:sbtVersion", "sbt_%s") + val (pathOrg, pathName, pathRevision, pathScalaVersion, pathSbtVersion) = md match { + case x: ArtificialModuleDescriptor => + val tmrid = x.targetModuleRevisionId + ( + tmrid.getOrganisation, + tmrid.getName, + tmrid.getRevision + "_" + mrid.getName, + scalaVersion(tmrid), + sbtVersion(tmrid) + ) + case _ => + (mrid.getOrganisation, mrid.getName, mrid.getRevision, scalaVersion(mrid), sbtVersion(mrid)) + } + val staticGraphDirectory = miniGraphPath / "static" + val dynamicGraphDirectory = miniGraphPath / "dynamic" + val staticGraphPath = + staticGraphDirectory / pathScalaVersion / pathSbtVersion / pathOrg / pathName / pathRevision / "graphs" / "graph.json" + val dynamicGraphPath = + dynamicGraphDirectory / todayStr / logicalClock.toString / pathScalaVersion / pathSbtVersion / pathOrg / pathName / pathRevision / "graphs" / "graph.json" + def cleanDynamicGraph(): Unit = { + val list = IO.listFiles(dynamicGraphDirectory, DirectoryFilter).toList + list filterNot { d => + (d.getName == todayStr) || (d.getName == tomorrowStr) || (d.getName == yesterdayStr) + } foreach { d => + log.debug(s"deleting old graphs $d...") + IO.delete(d) + } + } + def loadMiniGraphFromFile: Option[Either[ResolveException, UpdateReport]] = + (if (staticGraphPath.exists) Some(staticGraphPath) + else if (dynamicGraphPath.exists) Some(dynamicGraphPath) + else None) match { + case Some(path) => + log.debug(s"parsing ${path.getAbsolutePath.toString}") + val ur = JsonUtil.parseUpdateReport(path, cachedDescriptor, log) + if (ur.allFiles forall { _.exists }) { + updateReportCache(md.getModuleRevisionId) = Right(ur) + Some(Right(ur)) + } else { + log.debug(s"some files are missing from the cache, so invalidating the minigraph") + IO.delete(path) + None + } + case _ => None + } + (updateReportCache.get(mrid) orElse loadMiniGraphFromFile) match { + case Some(result) => + result match { + case Right(ur) => Right(ur.withStats(ur.stats.withCached(true))) + case x => x + } + case None => + f match { + case Right(ur) => + val changing = changing0 || (ur.configurations exists { cr => + cr.details exists { oar => + oar.modules exists { mr => + IvySbt.isChanging(mr.module) || (mr.callers exists { _.isChangingDependency }) + } + } + }) + IO.createDirectory(miniGraphPath) + val gp = + if (changing) dynamicGraphPath + else staticGraphPath + log.debug(s"saving minigraph to $gp") + if (changing) { + cleanDynamicGraph() + } + JsonUtil.writeUpdateReport(ur, gp) + // limit the update cache size + if (updateReportCache.size > maxUpdateReportCacheSize) { + updateReportCache.remove(updateReportCache.head._1) + } + // don't cache dynamic graphs in memory. + if (!changing) { + updateReportCache(md.getModuleRevisionId) = Right(ur) + } + Right(ur) + case Left(re) => + if (!changing0) { + updateReportCache(md.getModuleRevisionId) = Left(re) + } + Left(re) + } + } + } + + def getOrElseUpdateConflict(cf0: ModuleID, cf1: ModuleID, conflicts: Vector[ModuleReport])( + f: => (Vector[ModuleReport], Vector[ModuleReport], String) + ): (Vector[ModuleReport], Vector[ModuleReport]) = { + def reconstructReports( + surviving: Vector[ModuleID], + evicted: Vector[ModuleID], + mgr: String + ): (Vector[ModuleReport], Vector[ModuleReport]) = { + val moduleIdMap = Map(conflicts map { x => + x.module -> x + }: _*) + ( + surviving map moduleIdMap, + evicted map moduleIdMap map { + _.withEvicted(true).withEvictedReason(Some(mgr.toString)) + } + ) + } + (conflictCache get ((cf0, cf1))) match { + case Some((surviving, evicted, mgr)) => reconstructReports(surviving, evicted, mgr) + case _ => + (conflictCache get ((cf1, cf0))) match { + case Some((surviving, evicted, mgr)) => reconstructReports(surviving, evicted, mgr) + case _ => + val (surviving, evicted, mgr) = f + if (conflictCache.size > maxConflictCacheSize) { + conflictCache.remove(conflictCache.head._1) + } + conflictCache((cf0, cf1)) = (surviving map { _.module }, evicted map { _.module }, mgr) + (surviving, evicted) + } + } + } + def getOrElseUpdateProjectReport(mrid: ModuleRevisionId, logicalClock: LogicalClock)( + f: => Either[ResolveException, UpdateReport] + ): Either[ResolveException, UpdateReport] = + if (projectReportCache contains (mrid -> logicalClock)) projectReportCache((mrid, logicalClock)) + else { + val oldKeys = projectReportCache.keys filter { case (_, clk) => clk != logicalClock } + projectReportCache --= oldKeys + projectReportCache.getOrElseUpdate((mrid, logicalClock), f) + } +} + +private[sbt] trait ArtificialModuleDescriptor { self: DefaultModuleDescriptor => + def targetModuleRevisionId: ModuleRevisionId +} + +private[sbt] trait CachedResolutionResolveEngine extends ResolveEngine { + private[sbt] def cachedResolutionResolveCache: CachedResolutionResolveCache + private[sbt] def projectResolver: Option[ProjectResolver] + private[sbt] def makeInstance: Ivy + private[sbt] val ignoreTransitiveForce: Boolean = true + + def withIvy[A](log: Logger)(f: Ivy => A): A = + withIvy(new IvyLoggerInterface(log))(f) + def withIvy[A](log: MessageLogger)(f: Ivy => A): A = + withDefaultLogger(log) { + val ivy = makeInstance + ivy.pushContext() + ivy.getLoggerEngine.pushLogger(log) + try { + f(ivy) + } finally { + ivy.getLoggerEngine.popLogger() + ivy.popContext() + } + } + def withDefaultLogger[A](log: MessageLogger)(f: => A): A = { + val originalLogger = Message.getDefaultLogger + Message.setDefaultLogger(log) + try { + f + } finally { + Message.setDefaultLogger(originalLogger) + } + } + + /** + * This returns sbt's UpdateReport structure. + * missingOk allows sbt to call this with classifiers that may or may not exist, and grab the JARs. + */ + def customResolve( + md0: ModuleDescriptor, + missingOk: Boolean, + logicalClock: LogicalClock, + options0: ResolveOptions, + depDir: File, + log: Logger + ): Either[ResolveException, UpdateReport] = + cachedResolutionResolveCache.getOrElseUpdateProjectReport( + md0.getModuleRevisionId, + logicalClock + ) { + import sbt.io.syntax._ + val start = System.currentTimeMillis + val miniGraphPath = depDir / "module" + val cachedDescriptor = + getSettings.getResolutionCacheManager.getResolvedIvyFileInCache(md0.getModuleRevisionId) + val cache = cachedResolutionResolveCache + val os = cache.extractOverrides(md0) + val options1 = new ResolveOptions(options0) + val data = new ResolveData(this, options1) + val mds = cache.buildArtificialModuleDescriptors(md0, projectResolver, log) + + def doWork( + md: ModuleDescriptor, + dd: DependencyDescriptor + ): Either[ResolveException, UpdateReport] = + cache.internalDependency(dd, projectResolver) match { + case Some(md1) => + log.debug(s":: call customResolve recursively: $dd") + customResolve(md1, missingOk, logicalClock, options0, depDir, log) match { + case Right(ur) => + Right(remapInternalProject(new IvyNode(data, md1), ur, md0, dd, os, log)) + case Left(e) => Left(e) + } + case None => + log.debug(s":: call ivy resolution: $dd") + doWorkUsingIvy(md) + } + def doWorkUsingIvy(md: ModuleDescriptor): Either[ResolveException, UpdateReport] = { + import scala.jdk.CollectionConverters._ + val options1 = new ResolveOptions(options0) + val rr = withIvy(log) { ivy => + ivy.resolve(md, options1) + } + if (!rr.hasError || missingOk) Right(IvyRetrieve.updateReport(rr, cachedDescriptor)) + else { + val messages = rr.getAllProblemMessages.asScala.toSeq.map(_.toString).distinct + val failedPaths = ListMap(rr.getUnresolvedDependencies map { node => + val m = IvyRetrieve.toModuleID(node.getId) + val path = IvyRetrieve.findPath(node, md.getModuleRevisionId) map { x => + IvyRetrieve.toModuleID(x.getId) + } + log.debug("- Unresolved path " + path.toString) + m -> path + }: _*) + val failed = failedPaths.keys.toSeq + Left(new ResolveException(messages, failed, failedPaths)) + } + } + val (internal, external) = mds.partition { case (_, _, dd) => + cache.internalDependency(dd, projectResolver).isDefined + } + val internalResults = internal map { case (md, changing, dd) => + cache.getOrElseUpdateMiniGraph( + md, + changing, + logicalClock, + miniGraphPath, + cachedDescriptor, + log + ) { + doWork(md, dd) + } + } + val externalResults = external map { case (md0, changing, dd) => + val configurationsInInternal = internalResults flatMap { + case Right(ur) => + ur.allModules.flatMap { case md => + val sameName = md.name == dd.getDependencyId.getName + val sameOrg = md.organization == dd.getDependencyId.getOrganisation + if (sameName && sameOrg) md.configurations + else None + } + case _ => Nil + } + + dd match { + case d: DefaultDependencyDescriptor => + configurationsInInternal foreach { c => + val configurations = c.split(";").map(_.split("->")) + configurations foreach { conf => + try d.addDependencyConfiguration(conf(0), conf(1)) + catch { + case _: Throwable => () + } // An exception will be thrown if `conf(0)` doesn't exist. + } + } + + case _ => () + } + + cache.getOrElseUpdateMiniGraph( + md0, + changing, + logicalClock, + miniGraphPath, + cachedDescriptor, + log + ) { + doWork(md0, dd) + } + } + val results = internalResults ++ externalResults + val uReport = + mergeResults(md0, results, missingOk, System.currentTimeMillis - start, os, log) + val cacheManager = getSettings.getResolutionCacheManager + cacheManager.saveResolvedModuleDescriptor(md0) + val prop0 = "" + val ivyPropertiesInCache0 = + cacheManager.getResolvedIvyPropertiesInCache(md0.getResolvedModuleRevisionId) + IO.write(ivyPropertiesInCache0, prop0) + uReport + } + + def mergeResults( + md0: ModuleDescriptor, + results: Vector[Either[ResolveException, UpdateReport]], + missingOk: Boolean, + resolveTime: Long, + os: Vector[IvyOverride], + log: Logger + ): Either[ResolveException, UpdateReport] = + if (!missingOk && (results exists { _.isLeft })) + Left(mergeErrors(md0, results collect { case Left(re) => re })) + else Right(mergeReports(md0, results collect { case Right(ur) => ur }, resolveTime, os, log)) + + def mergeErrors(md0: ModuleDescriptor, errors: Vector[ResolveException]): ResolveException = { + val messages = errors flatMap { _.messages } + val failed = errors flatMap { _.failed } + val failedPaths = errors flatMap { + _.failedPaths.toList map { case (failed, paths) => + if (paths.isEmpty) (failed, paths) + else + ( + failed, + List(IvyRetrieve.toModuleID(md0.getResolvedModuleRevisionId)) ::: paths.toList.tail + ) + } + } + new ResolveException(messages, failed, ListMap(failedPaths: _*)) + } + + def mergeReports( + md0: ModuleDescriptor, + reports: Vector[UpdateReport], + resolveTime: Long, + os: Vector[IvyOverride], + log: Logger + ): UpdateReport = { + log.debug(s":: merging update reports") + val cachedDescriptor = + getSettings.getResolutionCacheManager.getResolvedIvyFileInCache(md0.getModuleRevisionId) + val rootModuleConfigs = md0.getConfigurations.toVector + val cachedReports = reports filter { !_.stats.cached } + val stats = UpdateStats( + resolveTime, + (cachedReports map { _.stats.downloadTime }).sum, + (cachedReports map { _.stats.downloadSize }).sum, + false + ) + val configReports = rootModuleConfigs map { conf => + log.debug("::: -----------") + val crs = reports flatMap { + _.configurations filter { _.configuration.name == conf.getName } + } + mergeConfigurationReports(ConfigRef(conf.getName), crs, os, log) + } + UpdateReport(cachedDescriptor, configReports, stats, Map.empty) + } + + // memory usage 62%, of which 58% is in mergeOrganizationArtifactReports + def mergeConfigurationReports( + rootModuleConf: ConfigRef, + reports: Vector[ConfigurationReport], + os: Vector[IvyOverride], + log: Logger + ): ConfigurationReport = { + // get the details right, and the rest could be derived + val details = + mergeOrganizationArtifactReports(rootModuleConf, reports flatMap { _.details }, os, log) + val modules = details flatMap { + _.modules filter { mr => + !mr.evicted && mr.problem.isEmpty + } + } + ConfigurationReport(rootModuleConf, modules, details) + } + + /** + * Returns a tuple of (merged org + name combo, newly evicted modules) + */ + def mergeOrganizationArtifactReports( + rootModuleConf: ConfigRef, + reports0: Vector[OrganizationArtifactReport], + os: Vector[IvyOverride], + log: Logger + ): Vector[OrganizationArtifactReport] = { + // filter out evicted modules from further logic + def filterReports(report0: OrganizationArtifactReport): Option[OrganizationArtifactReport] = + report0.modules.toVector flatMap { mr => + if (mr.evicted || mr.problem.nonEmpty) None + else + // https://github.com/sbt/sbt/issues/1763 + Some(mr.withCallers(JsonUtil.filterOutArtificialCallers(mr.callers))) + } match { + case Vector() => None + case ms => Some(OrganizationArtifactReport(report0.organization, report0.name, ms)) + } + + // group by takes up too much memory. trading space with time. + val orgNamePairs: Vector[(String, String)] = (reports0 map { oar => + (oar.organization, oar.name) + }).distinct + // this might take up some memory, but it's limited to a single + val reports1 = reports0 flatMap { filterReports } + val allModules0: Map[(String, String), Vector[OrganizationArtifactReport]] = + Map(orgNamePairs map { case (organization, name) => + val xs = reports1 filter { oar => + oar.organization == organization && oar.name == name + } + ((organization, name), xs) + }: _*) + // this returns a List of Lists of (org, name). should be deterministic + def detectLoops( + allModules: Map[(String, String), Vector[OrganizationArtifactReport]] + ): List[List[(String, String)]] = { + val loopSets: mutable.Set[Set[(String, String)]] = mutable.Set.empty + val loopLists: mutable.ListBuffer[List[(String, String)]] = mutable.ListBuffer.empty + def testLoop( + m: (String, String), + current: (String, String), + history: List[(String, String)] + ): Unit = { + val callers = + (for { + oar <- allModules.getOrElse(current, Vector()) + mr <- oar.modules + c <- mr.callers + } yield (c.caller.organization, c.caller.name)).distinct + callers foreach { c => + if (history.contains[(String, String)](c)) { + val loop = (c :: history.takeWhile(_ != c)) ::: List(c) + if (!loopSets(loop.toSet)) { + loopSets += loop.toSet + loopLists += loop + val loopStr = (loop map { case (o, n) => s"$o:$n" }).mkString("->") + log.warn(s"""avoid circular dependency while using cached resolution: $loopStr""") + } + } else testLoop(m, c, c :: history) + } + } + orgNamePairs map { orgname => + testLoop(orgname, orgname, List(orgname)) + } + loopLists.toList + } + val allModules2: mutable.Map[(String, String), Vector[OrganizationArtifactReport]] = + mutable.Map(allModules0.toSeq: _*) + @tailrec def breakLoops(loops: List[List[(String, String)]]): Unit = + loops match { + case Nil => () + case loop :: rest => + loop match { + case Nil => + breakLoops(rest) + case loop => + val sortedLoop = loop sortBy { x => + (for { + oar <- allModules0(x) + mr <- oar.modules + c <- mr.callers + } yield c).size + } + val moduleWithMostCallers = sortedLoop.reverse.head + val next: (String, String) = loop(loop.indexOf(moduleWithMostCallers) + 1) + // remove the module with most callers as the caller of next. + // so, A -> C, B -> C, and C -> A. C has the most callers, and C -> A will be removed. + allModules2 foreach { + case (k: (String, String), oars0) if k == next => + val oars: Vector[OrganizationArtifactReport] = oars0 map { oar => + val mrs = oar.modules map { mr => + val callers0 = mr.callers + val callers = callers0 filterNot { c => + (c.caller.organization, c.caller.name) == moduleWithMostCallers + } + if (callers.size == callers0.size) mr + else { + log.debug( + s":: $rootModuleConf: removing caller $moduleWithMostCallers -> $next for sorting" + ) + mr.withCallers(callers) + } + } + OrganizationArtifactReport(oar.organization, oar.name, mrs) + } + allModules2(k) = oars + case (_, _) => // do nothing + } + + breakLoops(rest) + } + } + val loop = detectLoops(allModules0) + log.debug(s":: $rootModuleConf: loop: $loop") + breakLoops(loop) + + // sort the all modules such that less called modules comes earlier + @tailrec + def sortModules( + cs: Vector[(String, String)], + acc: Vector[(String, String)], + extra: Vector[(String, String)], + n: Int, + guard: Int + ): Vector[(String, String)] = { + // println(s"sortModules: $n / $guard") + val keys = cs.toSet + val (called, notCalled) = cs partition { k => + val reports = allModules2(k) + reports exists { + _.modules.exists { + _.callers exists { caller => + val m = caller.caller + keys((m.organization, m.name)) + } + } + } + } + lazy val result0 = acc ++ notCalled ++ called ++ extra + def warnCircular(): Unit = { + log.warn( + s"""unexpected circular dependency while using cached resolution: ${cs.mkString(",")}""" + ) + } + (if (n > guard) { + warnCircular() + result0 + } else if (called.isEmpty) result0 + else if (notCalled.isEmpty) { + warnCircular() + sortModules(cs.tail, acc, extra :+ cs.head, n + 1, guard) + } else sortModules(called, acc ++ notCalled, extra, 0, called.size * called.size + 1)) + } + def resolveConflicts( + cs: List[(String, String)], + allModules: Map[(String, String), Vector[OrganizationArtifactReport]] + ): List[OrganizationArtifactReport] = + cs match { + case Nil => Nil + case (organization, name) :: rest => + val reports = allModules((organization, name)) + reports match { + case Vector() => resolveConflicts(rest, allModules) + case Vector(oa) if (oa.modules.isEmpty) => resolveConflicts(rest, allModules) + case Vector(oa) if (oa.modules.size == 1 && !oa.modules.head.evicted) => + log.debug(s":: no conflict $rootModuleConf: ${oa.organization}:${oa.name}") + oa :: resolveConflicts(rest, allModules) + case oas => + (mergeModuleReports(rootModuleConf, oas flatMap { _.modules }, os, log) match { + case (survivor, newlyEvicted) => + val evicted = (survivor ++ newlyEvicted) filter { m => + m.evicted + } + val notEvicted = (survivor ++ newlyEvicted) filter { m => + !m.evicted + } + log.debug("::: adds " + (notEvicted map { _.module }).mkString(", ")) + log.debug("::: evicted " + (evicted map { _.module }).mkString(", ")) + val x = OrganizationArtifactReport(organization, name, survivor ++ newlyEvicted) + val nextModules = + transitivelyEvict(rootModuleConf, rest, allModules, evicted, log) + x :: resolveConflicts(rest, nextModules) + }) + } + } + val guard0 = (orgNamePairs.size * orgNamePairs.size) + 1 + val sorted: Vector[(String, String)] = sortModules(orgNamePairs, Vector(), Vector(), 0, guard0) + val sortedStr = (sorted map { case (o, n) => s"$o:$n" }).mkString(", ") + log.debug(s":: sort result: $sortedStr") + val result = resolveConflicts(sorted.toList, allModules0) + result.toVector + } + + /** + * Merges ModuleReports, which represents orgnization, name, and version. + * Returns a touple of (surviving modules ++ non-conflicting modules, newly evicted modules). + */ + def mergeModuleReports( + rootModuleConf: ConfigRef, + modules: Vector[ModuleReport], + os: Vector[IvyOverride], + log: Logger + ): (Vector[ModuleReport], Vector[ModuleReport]) = { + if (modules.nonEmpty) { + log.debug( + s":: merging module reports for $rootModuleConf: ${modules.head.module.organization}:${modules.head.module.name}" + ) + } + def mergeModuleReports(xs: Vector[ModuleReport]): ModuleReport = { + val completelyEvicted = xs forall { _.evicted } + val allCallers = xs flatMap { _.callers } + // Caller info is often repeated across the subprojects. We only need ModuleID info for later, so xs.head is ok. + val distinctByModuleId = allCallers.groupBy({ _.caller }).toVector map { case (_, xs) => + xs.head + } + val allArtifacts = (xs flatMap { _.artifacts }).distinct + xs.head + .withArtifacts(allArtifacts) + .withEvicted(completelyEvicted) + .withCallers(distinctByModuleId) + } + val merged = (modules groupBy { m => + (m.module.organization, m.module.name, m.module.revision) + }).toVector flatMap { case (_, xs) => + if (xs.size < 2) xs + else Vector(mergeModuleReports(xs)) + } + val conflicts = merged filter { m => + !m.evicted && m.problem.isEmpty + } + if (conflicts.size < 2) (merged, Vector()) + else + resolveConflict(rootModuleConf, conflicts, os, log) match { + case (survivor, evicted) => + ( + survivor ++ (merged filter { m => + m.evicted || m.problem.isDefined + }), + evicted + ) + } + } + + /** + * This transitively evicts any non-evicted modules whose only callers are newly evicted. + */ + def transitivelyEvict( + rootModuleConf: ConfigRef, + pairs: List[(String, String)], + reports0: Map[(String, String), Vector[OrganizationArtifactReport]], + evicted0: Vector[ModuleReport], + log: Logger + ): Map[(String, String), Vector[OrganizationArtifactReport]] = { + val em = (evicted0 map { _.module }).toSet + def isTransitivelyEvicted(mr: ModuleReport): Boolean = + mr.callers forall { c => + em(c.caller) + } + val reports: Seq[((String, String), Vector[OrganizationArtifactReport])] = + reports0.toSeq flatMap { + case (k, _) if !(pairs.contains[(String, String)](k)) => Seq() + case ((organization, name), oars0) => + val oars = oars0 map { oar => + val (affected, unaffected) = oar.modules partition { mr => + val x = !mr.evicted && mr.problem.isEmpty && isTransitivelyEvicted(mr) + if (x) { + log.debug(s""":::: transitively evicted $rootModuleConf: ${mr.module}""") + } + x + } + val newlyEvicted = affected map { + _.withEvicted(true).withEvictedReason(Some("transitive-evict")) + } + if (affected.isEmpty) oar + else OrganizationArtifactReport(organization, name, unaffected ++ newlyEvicted) + } + Seq(((organization, name), oars)) + } + Map(reports: _*) + } + + /** + * resolves dependency resolution conflicts in which multiple candidates are found for organization+name combos. + * The main input is conflicts, which is a Vector of ModuleReport, which contains full info on the modulerevision, including its callers. + * Conflict resolution could be expensive, so this is first cached to `cachedResolutionResolveCache` if the conflict is between 2 modules. + * Otherwise, the default "latest" resolution takes the following precedence: + * 1. overrides passed in to `os`. + * 2. diretly forced dependency within the artificial module. + * 3. latest revision. + * Note transitively forced dependencies are not respected. This seems to be the case for stock Ivy's behavior as well, + * which may be because Ivy makes all Maven dependencies as forced="true". + */ + def resolveConflict( + rootModuleConf: ConfigRef, + conflicts: Vector[ModuleReport], + os: Vector[IvyOverride], + log: Logger + ): (Vector[ModuleReport], Vector[ModuleReport]) = { + import org.apache.ivy.plugins.conflict.{ + NoConflictManager, + StrictConflictManager, + LatestConflictManager + } + val head = conflicts.head + val organization = head.module.organization + val name = head.module.name + log.debug(s"::: resolving conflict in $rootModuleConf:$organization:$name " + (conflicts map { + _.module + }).mkString("(", ", ", ")")) + def useLatest( + lcm: LatestConflictManager + ): (Vector[ModuleReport], Vector[ModuleReport], String) = + (conflicts find { m => + m.callers.exists { _.isDirectlyForceDependency } + }) match { + case Some(m) => + log.debug(s"- directly forced dependency: $m ${m.callers}") + ( + Vector(m), + conflicts filterNot { _ == m } map { + _.withEvicted(true).withEvictedReason(Some("direct-force")) + }, + "direct-force" + ) + case None => + (conflicts find { m => + m.callers.exists { _.isForceDependency } + }) match { + // Ivy translates pom.xml dependencies to forced="true", so transitive force is broken. + case Some(m) if !ignoreTransitiveForce => + log.debug(s"- transitively forced dependency: $m ${m.callers}") + ( + Vector(m), + conflicts filterNot { _ == m } map { + _.withEvicted(true).withEvictedReason(Some("transitive-force")) + }, + "transitive-force" + ) + case _ => + val strategy = lcm.getStrategy + val infos = conflicts map { ModuleReportArtifactInfo(_) } + log.debug(s"- Using $strategy with $infos") + Option(strategy.findLatest(infos.toArray, None.orNull)) match { + case Some(ModuleReportArtifactInfo(m)) => + ( + Vector(m), + conflicts filterNot { _ == m } map { + _.withEvicted(true).withEvictedReason(Some(lcm.toString)) + }, + lcm.toString + ) + case _ => (conflicts, Vector(), lcm.toString) + } + } + } + def doResolveConflict: (Vector[ModuleReport], Vector[ModuleReport], String) = + os find { ovr => + ovr.moduleId.getOrganisation == organization && ovr.moduleId.getName == name + } match { + case Some(ovr) if Option(ovr.ddm.getVersion).isDefined => + val ovrVersion = ovr.ddm.getVersion + conflicts find { mr => + mr.module.revision == ovrVersion + } match { + case Some(m) => + ( + Vector(m), + conflicts filterNot { _ == m } map { + _.withEvicted(true).withEvictedReason(Some("override")) + }, + "override" + ) + case None => + sys.error( + s"override dependency specifies $ovrVersion but no candidates were found: " + (conflicts map { + _.module + }).mkString("(", ", ", ")") + ) + } + case _ => + getSettings.getConflictManager(IvyModuleId.newInstance(organization, name)) match { + case ncm: NoConflictManager => (conflicts, Vector(), ncm.toString) + case _: StrictConflictManager => + sys.error( + (s"conflict was found in $rootModuleConf:$organization:$name " + (conflicts map { + _.module + }).mkString("(", ", ", ")")) + ) + case lcm: LatestConflictManager => useLatest(lcm) + case conflictManager => sys.error(s"Unsupported conflict manager $conflictManager") + } + } + if (conflicts.size == 2 && os.isEmpty) { + val (cf0, cf1) = (conflicts(0).module, conflicts(1).module) + val cache = cachedResolutionResolveCache + val (surviving, evicted) = cache.getOrElseUpdateConflict(cf0, cf1, conflicts) { + doResolveConflict + } + (surviving, evicted) + } else { + val (surviving, evicted, _) = doResolveConflict + (surviving, evicted) + } + } + def remapInternalProject( + node: IvyNode, + ur: UpdateReport, + md0: ModuleDescriptor, + dd: DependencyDescriptor, + os: Vector[IvyOverride], + log: Logger + ): UpdateReport = { + def parentConfigs(c: String): Vector[String] = + Option(md0.getConfiguration(c)) match { + case Some(config) => + config.getExtends.toVector ++ + (config.getExtends.toVector flatMap parentConfigs) + case None => Vector() + } + // These are the configurations from the original project we want to resolve. + val rootModuleConfs = md0.getConfigurations.toVector + val configurations0: Vector[ConfigurationReport] = ur.configurations.toVector + // This is how md looks from md0 via dd's mapping. + val remappedConfigs0: Map[String, Vector[String]] = Map(rootModuleConfs map { conf0 => + val remapped: Vector[String] = dd + .getDependencyConfigurations(conf0.getName) + .toVector flatMap { conf => + node.getRealConfs(conf).toVector + } + conf0.getName -> remapped + }: _*) + // This emulates test-internal extending test configuration etc. + val remappedConfigs: Map[String, Vector[String]] = + rootModuleConfs.foldLeft(remappedConfigs0) { (acc0, c) => + val ps = parentConfigs(c.getName) + ps.foldLeft(acc0) { (acc, parent) => + val vs0 = acc.getOrElse(c.getName, Vector()) + val vs = acc.getOrElse(parent, Vector()) + acc.updated(c.getName, (vs0 ++ vs).distinct) + } + } + log.debug(s"::: remapped configs $remappedConfigs") + val configurations = rootModuleConfs map { conf0 => + val remappedCRs: Vector[ConfigurationReport] = configurations0 filter { cr => + remappedConfigs(conf0.getName).contains[String](cr.configuration.name) + } + mergeConfigurationReports(ConfigRef(conf0.getName), remappedCRs, os, log) + } + UpdateReport(ur.cachedDescriptor, configurations, ur.stats, ur.stamps) + } +} + +private[sbt] case class ModuleReportArtifactInfo(moduleReport: ModuleReport) + extends IvyArtifactInfo { + override def getLastModified: Long = + moduleReport.publicationDate map { _.getTimeInMillis } getOrElse 0L + override def getRevision: String = moduleReport.module.revision + override def toString: String = + s"ModuleReportArtifactInfo(${moduleReport.module}, $getRevision, $getLastModified)" +} +private[sbt] case class IvyOverride( + moduleId: IvyModuleId, + pm: PatternMatcher, + ddm: OverrideDependencyDescriptorMediator +) { + override def toString: String = + s"""IvyOverride($moduleId,$pm,${ddm.getVersion},${ddm.getBranch})""" +} diff --git a/lm-ivy/src/main/scala/sbt/internal/librarymanagement/ivyint/CustomMavenResolver.scala b/lm-ivy/src/main/scala/sbt/internal/librarymanagement/ivyint/CustomMavenResolver.scala new file mode 100644 index 000000000..788b8f1e8 --- /dev/null +++ b/lm-ivy/src/main/scala/sbt/internal/librarymanagement/ivyint/CustomMavenResolver.scala @@ -0,0 +1,11 @@ +package sbt.internal.librarymanagement +package ivyint + +import org.apache.ivy.plugins.resolver.DependencyResolver +import sbt.librarymanagement._ + +// These are placeholder traits for sbt-aether-resolver +trait CustomMavenResolver extends DependencyResolver {} +trait CustomRemoteMavenResolver extends CustomMavenResolver { + def repo: MavenRepository +} diff --git a/lm-ivy/src/main/scala/sbt/internal/librarymanagement/ivyint/ErrorMessageAuthenticator.scala b/lm-ivy/src/main/scala/sbt/internal/librarymanagement/ivyint/ErrorMessageAuthenticator.scala new file mode 100644 index 000000000..478462e8d --- /dev/null +++ b/lm-ivy/src/main/scala/sbt/internal/librarymanagement/ivyint/ErrorMessageAuthenticator.scala @@ -0,0 +1,165 @@ +package sbt.internal.librarymanagement +package ivyint + +import java.lang.reflect.InvocationTargetException +import java.net.{ Authenticator, PasswordAuthentication } + +import org.apache.ivy.util.Message +import org.apache.ivy.util.url.IvyAuthenticator + +/** + * Helper to install an Authenticator that works with the IvyAuthenticator to provide better error messages when + * credentials don't line up. + */ +object ErrorMessageAuthenticator { + private var securityWarningLogged = false + + private def originalAuthenticator: Option[Authenticator] = { + if (LMSysProp.isJavaVersion9Plus) getDefaultAuthenticator + else getTheAuthenticator + } + + private[this] def getTheAuthenticator: Option[Authenticator] = { + withJavaReflectErrorHandling { + val field = classOf[Authenticator].getDeclaredField("theAuthenticator") + field.setAccessible(true) + Option(field.get(null).asInstanceOf[Authenticator]) + } + } + + private[this] def getDefaultAuthenticator: Option[Authenticator] = + withJavaReflectErrorHandling { + val method = classOf[Authenticator].getDeclaredMethod("getDefault") + Option(method.invoke(null).asInstanceOf[Authenticator]) + } + + private[this] def withJavaReflectErrorHandling[A](t: => Option[A]): Option[A] = { + try t + catch { + case e: ReflectiveOperationException => handleReflectionException(e) + case e: SecurityException => handleReflectionException(e) + case e: InvocationTargetException => handleReflectionException(e) + case e: ExceptionInInitializerError => handleReflectionException(e) + case e: IllegalArgumentException => handleReflectionException(e) + case e: NullPointerException => handleReflectionException(e) + case e: ClassCastException => handleReflectionException(e) + } + } + + private[this] def handleReflectionException(t: Throwable) = { + Message.debug("Error occurred while getting the original authenticator: " + t.getMessage) + None + } + + private lazy val ivyOriginalField = { + val field = classOf[IvyAuthenticator].getDeclaredField("original") + field.setAccessible(true) + field + } + // Attempts to get the original authenticator form the ivy class or returns null. + private def installIntoIvy(ivy: IvyAuthenticator): Option[Authenticator] = { + // Here we install ourselves as the IvyAuthenticator's default so we get called AFTER Ivy has a chance to run. + def installIntoIvyImpl(original: Option[Authenticator]): Unit = { + val newOriginal = new ErrorMessageAuthenticator(original) + ivyOriginalField.set(ivy, newOriginal) + } + + try + Option(ivyOriginalField.get(ivy).asInstanceOf[Authenticator]) match { + case Some( + _: ErrorMessageAuthenticator + ) => // We're already installed, no need to do the work again. + case originalOpt => installIntoIvyImpl(originalOpt) + } + catch { + case t: Throwable => + Message.debug( + "Error occurred while trying to install debug messages into Ivy Authentication" + t.getMessage + ) + } + Some(ivy) + } + + /** Installs the error message authenticator so we have nicer error messages when using java's URL for downloading. */ + def install(): Unit = { + // Actually installs the error message authenticator. + def doInstall(original: Option[Authenticator]): Unit = + try Authenticator.setDefault(new ErrorMessageAuthenticator(original)) + catch { + case _: SecurityException if !securityWarningLogged => + securityWarningLogged = true + Message.warn( + "Not enough permissions to set the ErrorMessageAuthenticator. " + + "Helpful debug messages disabled!" + ); + } + // We will try to use the original authenticator as backup authenticator. + // Since there is no getter available, so try to use some reflection to + // obtain it. If that doesn't work, assume there is no original authenticator + def doInstallIfIvy(original: Option[Authenticator]): Unit = + original match { + case Some(_: ErrorMessageAuthenticator) => // Ignore, we're already installed + case Some(ivy: IvyAuthenticator) => + installIntoIvy(ivy); () + case original => doInstall(original) + } + doInstallIfIvy(originalAuthenticator) + } +} + +/** + * An authenticator which just delegates to a previous authenticator and issues *nice* + * error messages on failure to find credentials. + * + * Since ivy installs its own credentials handler EVERY TIME it resolves or publishes, we want to + * install this one at some point and eventually ivy will capture it and use it. + */ +private[sbt] final class ErrorMessageAuthenticator(original: Option[Authenticator]) + extends Authenticator { + + protected override def getPasswordAuthentication(): PasswordAuthentication = { + // We're guaranteed to only get here if Ivy's authentication fails + if (!isProxyAuthentication) { + val host = getRequestingHost + // TODO - levenshtein distance "did you mean" message. + Message.error(s"Unable to find credentials for [${getRequestingPrompt} @ ${host}].") + val configuredRealms = IvyCredentialsLookup.realmsForHost.getOrElse(host, Set.empty) + if (configuredRealms.nonEmpty) { + Message.error(s" Is one of these realms misspelled for host [${host}]:") + configuredRealms foreach { realm => + Message.error(s" * ${realm}") + } + } + } + // TODO - Maybe we should work on a helpful proxy message... + + // TODO - To be more maven friendly, we may want to also try to grab the "first" authentication that shows up for a server and try it. + // or maybe allow that behavior to be configured, since maven users aren't used to realms (which they should be). + + // Grabs the authentication that would have been provided had we not been installed... + def originalAuthentication: Option[PasswordAuthentication] = { + Authenticator.setDefault(original.orNull) + try + Option( + Authenticator.requestPasswordAuthentication( + getRequestingHost, + getRequestingSite, + getRequestingPort, + getRequestingProtocol, + getRequestingPrompt, + getRequestingScheme + ) + ) + finally Authenticator.setDefault(this) + } + originalAuthentication.orNull + } + + /** + * Returns true if this authentication if for a proxy and not for an HTTP server. + * We want to display different error messages, depending. + */ + private def isProxyAuthentication: Boolean = + getRequestorType == Authenticator.RequestorType.PROXY + +} diff --git a/lm-ivy/src/main/scala/sbt/internal/librarymanagement/ivyint/IvyCredentialsLookup.scala b/lm-ivy/src/main/scala/sbt/internal/librarymanagement/ivyint/IvyCredentialsLookup.scala new file mode 100644 index 000000000..9630e3258 --- /dev/null +++ b/lm-ivy/src/main/scala/sbt/internal/librarymanagement/ivyint/IvyCredentialsLookup.scala @@ -0,0 +1,71 @@ +package sbt.internal.librarymanagement +package ivyint + +import org.apache.ivy.util.url.CredentialsStore +import scala.jdk.CollectionConverters._ + +/** A key used to store credentials in the ivy credentials store. */ +private[sbt] sealed trait CredentialKey + +/** Represents a key in the ivy credentials store that is only specific to a host. */ +private[sbt] case class Host(name: String) extends CredentialKey + +/** Represents a key in the ivy credentials store that is keyed to both a host and a "realm". */ +private[sbt] case class Realm(host: String, realm: String) extends CredentialKey + +/** + * Helper mechanism to improve credential related error messages. + * + * This evil class exposes to us the necessary information to warn on credential failure and offer + * spelling/typo suggestions. + */ +private[sbt] object IvyCredentialsLookup { + + /** Helper extractor for Ivy's key-value store of credentials. */ + private object KeySplit { + def unapply(key: String): Option[(String, String)] = { + key.indexOf('@') match { + case -1 => None + case n => Some(key.take(n) -> key.drop(n + 1)) + } + } + } + + /** + * Here we cheat runtime private so we can look in the credentials store. + * + * TODO - Don't bomb at class load time... + */ + private val credKeyringField = { + val tmp = classOf[CredentialsStore].getDeclaredField("KEYRING") + tmp.setAccessible(true) + tmp + } + + /** All the keys for credentials in the ivy configuration store. */ + def keyringKeys: Set[CredentialKey] = { + val map = credKeyringField.get(null).asInstanceOf[java.util.HashMap[String, Any]] + // make a clone of the set... + (map.keySet.asScala.map { + case KeySplit(realm, host) => (Realm(host, realm): CredentialKey) + case host => (Host(host): CredentialKey) + }).toSet + } + + /** + * A mapping of host -> realms in the ivy credentials store. + */ + def realmsForHost: Map[String, Set[String]] = + keyringKeys + .collect { case x: Realm => + x + } + .groupBy { realm => + realm.host + } + .view + .mapValues { realms => + realms map (_.realm) + } + .toMap +} diff --git a/lm-ivy/src/main/scala/sbt/internal/librarymanagement/ivyint/MergeDescriptors.scala b/lm-ivy/src/main/scala/sbt/internal/librarymanagement/ivyint/MergeDescriptors.scala new file mode 100644 index 000000000..a86ed3689 --- /dev/null +++ b/lm-ivy/src/main/scala/sbt/internal/librarymanagement/ivyint/MergeDescriptors.scala @@ -0,0 +1,194 @@ +package sbt.internal.librarymanagement +package ivyint + +import scala.collection.immutable.ArraySeq +import org.apache.ivy.core +import core.module.descriptor.{ DependencyArtifactDescriptor, DefaultDependencyArtifactDescriptor } +import core.module.descriptor.DependencyDescriptor +import core.module.id.{ ArtifactId, ModuleRevisionId } + +private[sbt] object MergeDescriptors { + def mergeable(a: DependencyDescriptor, b: DependencyDescriptor): Boolean = + a.isForce == b.isForce && + a.isChanging == b.isChanging && + a.isTransitive == b.isTransitive && + a.getParentRevisionId == b.getParentRevisionId && + a.getNamespace == b.getNamespace && { + val amrid = a.getDependencyRevisionId + val bmrid = b.getDependencyRevisionId + amrid == bmrid + } && { + val adyn = a.getDynamicConstraintDependencyRevisionId + val bdyn = b.getDynamicConstraintDependencyRevisionId + adyn == bdyn + } + + def apply(a: DependencyDescriptor, b: DependencyDescriptor): DependencyDescriptor = { + assert(mergeable(a, b)) + new MergedDescriptors(a, b) + } +} + +// combines the artifacts, configurations, includes, and excludes for DependencyDescriptors `a` and `b` +// that otherwise have equal IDs +private[sbt] final case class MergedDescriptors(a: DependencyDescriptor, b: DependencyDescriptor) + extends DependencyDescriptor { + def getDependencyId = a.getDependencyId + def isForce = a.isForce + def isChanging = a.isChanging + def isTransitive = a.isTransitive + def getNamespace = a.getNamespace + def getParentRevisionId = a.getParentRevisionId + def getDependencyRevisionId = a.getDependencyRevisionId + def getDynamicConstraintDependencyRevisionId = a.getDynamicConstraintDependencyRevisionId + + def getModuleConfigurations = concat(a.getModuleConfigurations, b.getModuleConfigurations) + + def getDependencyConfigurations(moduleConfiguration: String, requestedConfiguration: String) = + concat( + a.getDependencyConfigurations(moduleConfiguration, requestedConfiguration), + b.getDependencyConfigurations(moduleConfiguration) + ) + + def getDependencyConfigurations(moduleConfiguration: String) = + concat( + a.getDependencyConfigurations(moduleConfiguration), + b.getDependencyConfigurations(moduleConfiguration) + ) + + def getDependencyConfigurations(moduleConfigurations: Array[String]) = + concat( + a.getDependencyConfigurations(moduleConfigurations), + b.getDependencyConfigurations(moduleConfigurations) + ) + + def getAllDependencyArtifacts = + concatArtifacts(a, a.getAllDependencyArtifacts, b, b.getAllDependencyArtifacts) + + def getDependencyArtifacts(moduleConfigurations: String) = + concatArtifacts( + a, + a.getDependencyArtifacts(moduleConfigurations), + b, + b.getDependencyArtifacts(moduleConfigurations) + ) + + def getDependencyArtifacts(moduleConfigurations: Array[String]) = + concatArtifacts( + a, + a.getDependencyArtifacts(moduleConfigurations), + b, + b.getDependencyArtifacts(moduleConfigurations) + ) + + def getAllIncludeRules = concat(a.getAllIncludeRules, b.getAllIncludeRules) + + def getIncludeRules(moduleConfigurations: String) = + concat(a.getIncludeRules(moduleConfigurations), b.getIncludeRules(moduleConfigurations)) + + def getIncludeRules(moduleConfigurations: Array[String]) = + concat(a.getIncludeRules(moduleConfigurations), b.getIncludeRules(moduleConfigurations)) + + private[this] def concatArtifacts( + a: DependencyDescriptor, + as: Array[DependencyArtifactDescriptor], + b: DependencyDescriptor, + bs: Array[DependencyArtifactDescriptor] + ) = { + if (as.isEmpty) + if (bs.isEmpty) as + else defaultArtifact(a) ++ explicitConfigurations(b, bs) + else if (bs.isEmpty) explicitConfigurations(a, as) ++ defaultArtifact(b) + else concat(explicitConfigurations(a, as), explicitConfigurations(b, bs)) + } + private[this] def explicitConfigurations( + base: DependencyDescriptor, + arts: Array[DependencyArtifactDescriptor] + ): Array[DependencyArtifactDescriptor] = + arts map { art => + explicitConfigurations(base, art) + } + private[this] def explicitConfigurations( + base: DependencyDescriptor, + art: DependencyArtifactDescriptor + ): DependencyArtifactDescriptor = { + val aConfs = Option(art.getConfigurations) map { _.toList } + // In case configuration list is "*", we should still specify the module configuration of the DependencyDescriptor + // otherwise the explicit specified artifacts from one dd can leak over to the other. + // See gh-1500, gh-2002 + aConfs match { + case None | Some(Nil) | Some(List("*")) => + copyWithConfigurations(art, ArraySeq.unsafeWrapArray(base.getModuleConfigurations)) + case _ => art + } + } + private[this] def defaultArtifact( + a: DependencyDescriptor + ): Array[DependencyArtifactDescriptor] = { + val dd = new DefaultDependencyArtifactDescriptor( + a, + a.getDependencyRevisionId.getName, + "jar", + "jar", + null, + null + ) + addConfigurations(dd, ArraySeq.unsafeWrapArray(a.getModuleConfigurations)) + // If the dependency descriptor is empty, then it means that it has been created from a POM file. In this case, + // it is correct to create a seemingly non-existent dependency artifact. + if (a.getAllDependencyArtifacts.isEmpty) Array(dd) + else a.getAllDependencyArtifacts filter (_ == dd) + } + private[this] def copyWithConfigurations( + dd: DependencyArtifactDescriptor, + confs: Seq[String] + ): DependencyArtifactDescriptor = { + val dextra = dd.getQualifiedExtraAttributes + val newd = new DefaultDependencyArtifactDescriptor( + dd.getDependencyDescriptor, + dd.getName, + dd.getType, + dd.getExt, + dd.getUrl, + dextra + ) + addConfigurations(newd, confs) + newd + } + private[this] def addConfigurations( + dd: DefaultDependencyArtifactDescriptor, + confs: Seq[String] + ): Unit = + confs foreach dd.addConfiguration + + private[this] def concat[T: reflect.ClassTag](a: Array[T], b: Array[T]): Array[T] = + (a ++ b).distinct + + def getAllExcludeRules = concat(a.getAllExcludeRules, b.getAllExcludeRules) + + def getExcludeRules(moduleConfigurations: String) = + concat(a.getExcludeRules(moduleConfigurations), b.getExcludeRules(moduleConfigurations)) + + def getExcludeRules(moduleConfigurations: Array[String]) = + concat(a.getExcludeRules(moduleConfigurations), b.getExcludeRules(moduleConfigurations)) + + def doesExclude(moduleConfigurations: Array[String], artifactId: ArtifactId) = + a.doesExclude(moduleConfigurations, artifactId) || b.doesExclude( + moduleConfigurations, + artifactId + ) + + def canExclude = a.canExclude || b.canExclude + + def asSystem = this + + def clone(revision: ModuleRevisionId) = + new MergedDescriptors(a.clone(revision), b.clone(revision)) + + def getAttribute(name: String): String = a.getAttribute(name) + def getAttributes = a.getAttributes + def getExtraAttribute(name: String) = a.getExtraAttribute(name) + def getExtraAttributes = a.getExtraAttributes + def getQualifiedExtraAttributes = a.getQualifiedExtraAttributes + def getSourceModule = a.getSourceModule +} diff --git a/lm-ivy/src/main/scala/sbt/internal/librarymanagement/ivyint/ParallelResolveEngine.scala b/lm-ivy/src/main/scala/sbt/internal/librarymanagement/ivyint/ParallelResolveEngine.scala new file mode 100644 index 000000000..e683e5710 --- /dev/null +++ b/lm-ivy/src/main/scala/sbt/internal/librarymanagement/ivyint/ParallelResolveEngine.scala @@ -0,0 +1,114 @@ +package sbt.internal.librarymanagement.ivyint + +import java.util.concurrent.Executors + +import org.apache.ivy.core.event.EventManager +import org.apache.ivy.core.event.download.PrepareDownloadEvent +import org.apache.ivy.core.module.descriptor.Artifact +import org.apache.ivy.core.report._ +import org.apache.ivy.core.resolve._ +import org.apache.ivy.core.sort.SortEngine +import org.apache.ivy.util.filter.Filter + +import scala.concurrent.duration.Duration +import scala.concurrent.{ Await, ExecutionContext, Future } + +private[ivyint] case class DownloadResult( + dep: IvyNode, + report: DownloadReport, + totalSizeDownloaded: Long +) + +object ParallelResolveEngine { + private lazy val resolveExecutionContext: ExecutionContext = { + // This throttles the connection number, especially when Gigahorse is not used. + val maxConnectionCount = 6 + val executor = Executors.newFixedThreadPool(maxConnectionCount) + ExecutionContext.fromExecutor(executor) + } +} + +/** Define an ivy [[ResolveEngine]] that resolves dependencies in parallel. */ +private[sbt] class ParallelResolveEngine( + settings: ResolveEngineSettings, + eventManager: EventManager, + sortEngine: SortEngine +) extends ResolveEngine(settings, eventManager, sortEngine) { + + override def downloadArtifacts( + report: ResolveReport, + artifactFilter: Filter, + options: DownloadOptions + ): Unit = { + import scala.jdk.CollectionConverters._ + val start = System.currentTimeMillis + report.getArtifacts match { + case typed: java.util.List[Artifact @unchecked] => + new PrepareDownloadEvent(typed.asScala.toArray) + } + // Farm out the dependencies for parallel download + implicit val ec = ParallelResolveEngine.resolveExecutionContext + val allDownloadsFuture = Future.traverse(report.getDependencies.asScala) { case dep: IvyNode => + Future { + if ( + !(dep.isCompletelyEvicted || dep.hasProblem) && + dep.getModuleRevision != null + ) { + Some(downloadNodeArtifacts(dep, artifactFilter, options)) + } else None + } + } + val allDownloads = Await.result(allDownloadsFuture, Duration.Inf) + // compute total downloaded size + val totalSize = allDownloads.foldLeft(0L) { + case (size, Some(download)) => + val dependency = download.dep + val moduleConfigurations = dependency.getRootModuleConfigurations + moduleConfigurations.foreach { configuration => + val configurationReport = report.getConfigurationReport(configuration) + + // Take into account artifacts required by the given configuration + if ( + dependency.isEvicted(configuration) || + dependency.isBlacklisted(configuration) + ) { + configurationReport.addDependency(dependency) + } else configurationReport.addDependency(dependency, download.report) + } + + size + download.totalSizeDownloaded + case (size, None) => size + } + + report.setDownloadTime(System.currentTimeMillis() - start) + report.setDownloadSize(totalSize) + } + + /** + * Download all the artifacts associated with an ivy node. + * + * Return the report and the total downloaded size. + */ + private def downloadNodeArtifacts( + dependency: IvyNode, + artifactFilter: Filter, + options: DownloadOptions + ): DownloadResult = { + + val resolver = dependency.getModuleRevision.getArtifactResolver + val selectedArtifacts = dependency.getSelectedArtifacts(artifactFilter) + val downloadReport = resolver.download(selectedArtifacts, options) + val artifactReports = downloadReport.getArtifactsReports + + val totalSize = artifactReports.foldLeft(0L) { (size, artifactReport) => + // Check download status and report resolution failures + artifactReport.getDownloadStatus match { + case DownloadStatus.SUCCESSFUL => + size + artifactReport.getSize + case _ => size + } + } + + DownloadResult(dependency, downloadReport, totalSize) + } +} diff --git a/lm-ivy/src/main/scala/sbt/internal/librarymanagement/ivyint/SbtChainResolver.scala b/lm-ivy/src/main/scala/sbt/internal/librarymanagement/ivyint/SbtChainResolver.scala new file mode 100644 index 000000000..494e72d91 --- /dev/null +++ b/lm-ivy/src/main/scala/sbt/internal/librarymanagement/ivyint/SbtChainResolver.scala @@ -0,0 +1,506 @@ +package sbt.internal.librarymanagement +package ivyint + +import java.io.{ ByteArrayOutputStream, File, PrintWriter } +import java.text.ParseException +import java.util.Date + +import org.apache.ivy.core.cache.ArtifactOrigin +import org.apache.ivy.core.settings.IvySettings +import org.apache.ivy.core.{ IvyContext, LogOptions } +import org.apache.ivy.core.module.descriptor.DefaultModuleDescriptor +import org.apache.ivy.core.module.descriptor.DependencyDescriptor +import org.apache.ivy.core.module.descriptor.ModuleDescriptor +import org.apache.ivy.core.module.descriptor.{ Artifact => IArtifact } +import org.apache.ivy.core.resolve.{ ResolveData, ResolvedModuleRevision } +import org.apache.ivy.plugins.latest.LatestStrategy +import org.apache.ivy.plugins.repository.file.{ FileResource, FileRepository => IFileRepository } +import org.apache.ivy.plugins.repository.url.URLResource +import org.apache.ivy.plugins.resolver._ +import org.apache.ivy.plugins.resolver.util.{ HasLatestStrategy, ResolvedResource } +import org.apache.ivy.util.{ Message, StringUtils => IvyStringUtils } +import sbt.util.Logger +import sbt.librarymanagement._ +import sbt.librarymanagement.ivy.UpdateOptions + +import scala.util.control.NonFatal + +private[sbt] case class SbtChainResolver( + name: String, + resolvers: Seq[DependencyResolver], + settings: IvySettings, + updateOptions: UpdateOptions, + log: Logger +) extends ChainResolver { + override def setCheckmodified(check: Boolean): Unit = super.setCheckmodified(check) + + override def equals(o: Any): Boolean = o match { + case o: SbtChainResolver => + this.name == o.name && + this.resolvers == o.resolvers && + this.settings == o.settings && + this.updateOptions == o.updateOptions + case _ => false + } + + override def hashCode: Int = { + var hash = 1 + hash = hash * 31 + this.name.## + hash = hash * 31 + this.resolvers.## + hash = hash * 31 + this.settings.## + hash = hash * 31 + this.updateOptions.## + hash + } + + // TODO - We need to special case the project resolver so it always "wins" when resolving with inter-project dependencies. + + def initializeChainResolver(): Unit = { + // Initialize ourselves. + setName(name) + setReturnFirst(true) + setCheckmodified(false) + + /* Append all the resolvers to the extended chain resolvers since we get its value later on */ + resolvers.foreach(add) + } + + initializeChainResolver() + + // Technically, this should be applied to module configurations. + // That would require custom subclasses of all resolver types in ConvertResolver (a delegation approach does not work). + // It would be better to get proper support into Ivy. + // A workaround is to configure the ModuleConfiguration resolver to be a ChainResolver. + // + // This method is only used by the pom parsing code in Ivy to find artifacts it doesn't know about. + // In particular, a) it looks up source and javadoc classifiers b) it looks up a main artifact for packaging="pom" + // sbt now provides the update-classifiers or requires explicitly specifying classifiers explicitly + // Providing a main artifact for packaging="pom" does not seem to be correct and the lookup can be expensive. + // + // Ideally this could just skip the lookup, but unfortunately several artifacts in practice do not follow the + // correct behavior for packaging="pom" and so it is only skipped for source/javadoc classifiers. + override def locate(artifact: IArtifact): ArtifactOrigin = + if (IvySbt.hasImplicitClassifier(artifact)) null else super.locate(artifact) + + override def getDependency( + dd: DependencyDescriptor, + data: ResolveData + ): ResolvedModuleRevision = { + if (data.getOptions.getLog == LogOptions.LOG_DEFAULT) + Message.info("Resolving " + dd.getDependencyRevisionId + " ...") + val gd = CustomSbtResolution.getDependency(dd, data) + val mod = IvySbt.resetArtifactResolver(gd) + mod + } + + /** Implements the custom sbt chain resolution with support for snapshots and caching. */ + private object CustomSbtResolution { + def getCached( + dd: DependencyDescriptor, + data: ResolveData, + resolved0: Option[ResolvedModuleRevision] + ): Option[ResolvedModuleRevision] = { + resolved0.orElse { + val resolverName = getName + Message.verbose(s"$resolverName: Checking cache for: $dd") + Option(findModuleInCache(dd, data, true)).map { moduleRev => + Message.verbose(s"$resolverName: module revision found in cache: ${moduleRev.getId}") + forcedRevision(moduleRev) + } + } + } + + /* Copy pasted from `IvyStringUtils` to handle `Throwable` */ + private def getStackTrace(e: Throwable): String = { + if (e == null) return "" + val baos = new ByteArrayOutputStream() + val printWriter = new PrintWriter(baos) + e.printStackTrace(printWriter) + printWriter.flush() + val stackTrace = new String(baos.toByteArray) + printWriter.close() + stackTrace + } + + /** If None, module was not found. Otherwise, hit. */ + type TriedResolution = Option[(ResolvedModuleRevision, DependencyResolver)] + + /** + * Attempts to resolve the artifact from each of the resolvers in the chain. + * + * Contract: + * 1. It doesn't resolve anything when there is a resolved module, `isReturnFirst` is + * enabled and `useLatest` is false (meaning that resolution is pure, no SNAPSHOT). + * 2. Otherwise, we try to resolve it. + * + * @param resolved0 The perhaps already resolved module. + * @param useLatest Whether snapshot resolution should be enabled. + * @param data The resolve data to use. + * @param descriptor The dependency descriptor of the in-resolution module. + */ + def getResults( + resolved0: Option[ResolvedModuleRevision], + useLatest: Boolean, + data: ResolveData, + descriptor: DependencyDescriptor, + resolvers: Seq[DependencyResolver] + ): Seq[Either[Throwable, TriedResolution]] = { + var currentlyResolved = resolved0 + + def performResolution( + resolver: DependencyResolver + ): Option[(ResolvedModuleRevision, DependencyResolver)] = { + // Resolve all resolvers when the module is changing + val previouslyResolved = currentlyResolved + if (useLatest) data.setCurrentResolvedModuleRevision(null) + else data.setCurrentResolvedModuleRevision(currentlyResolved.orNull) + currentlyResolved = Option(resolver.getDependency(descriptor, data)) + if (currentlyResolved eq previouslyResolved) None + else if (useLatest) { + currentlyResolved.map(x => + (reparseModuleDescriptor(descriptor, data, resolver, x), resolver) + ) + } else currentlyResolved.map(x => (forcedRevision(x), resolver)) + } + + def reportError(throwable: Throwable, resolver: DependencyResolver): Unit = { + val trace = getStackTrace(throwable) + Message.verbose(s"problem occurred while resolving $descriptor with $resolver: $trace") + } + + resolvers.map { (resolver: DependencyResolver) => + // Return none when revision is cached and `isReturnFirst` is set + if (isReturnFirst && currentlyResolved.isDefined && !useLatest) Right(None) + else { + // We actually do resolution. + val oldLatest: Option[LatestStrategy] = + setLatestIfRequired(resolver, Option(getLatestStrategy)) + try Right(performResolution(resolver)) + catch { case NonFatal(t) => reportError(t, resolver); Left(t) } + finally { + oldLatest.foreach(_ => doSetLatestStrategy(resolver, oldLatest)) + checkInterrupted() + } + } + } + } + + private final val prefix = "Undefined resolution order" + def resolveLatest( + foundRevisions: Seq[(ResolvedModuleRevision, DependencyResolver)], + descriptor: DependencyDescriptor, + data: ResolveData + ): Option[ResolvedModuleRevision] = { + + val sortedRevisions = foundRevisions.sortBy { case (rmr, resolver) => + val publicationDate = rmr.getPublicationDate + val descriptorDate = rmr.getDescriptor.getPublicationDate + Message.warn(s"Sorting results from $rmr, using $publicationDate and $descriptorDate.") + // Just issue warning about issues with publication date, and fake one on it for now + val chosenPublicationDate = Option(publicationDate).orElse(Option(descriptorDate)) + chosenPublicationDate match { + case Some(date) => date.getTime + case None => + val id = rmr.getId + val resolvedResource = (resolver.findIvyFileRef(descriptor, data), rmr.getDescriptor) + resolvedResource match { + case (res: ResolvedResource, dmd: DefaultModuleDescriptor) => + val resolvedPublicationDate = new java.util.Date(res.getLastModified) + Message.debug(s"No publication date from resolver $resolver for $id.") + Message.debug(s"Setting publication date to: $resolvedPublicationDate.") + dmd.setPublicationDate(resolvedPublicationDate) + res.getLastModified + case (ivf, dmd) => + // The dependency is specified by a direct URL or some sort of non-ivy file + if (ivf == null && descriptor.isChanging) + Message.warn(s"$prefix: changing dependency $id with no ivy/pom file!") + if (dmd == null) + Message.warn(s"$prefix: no publication date from resolver $resolver for $id") + 0L + } + } + } + + val firstHit = sortedRevisions.reverse.headOption + firstHit.map { hit => + val (resolvedModule, resolver) = hit + + if (resolvedModule.getId.getRevision.contains("SNAPSHOT")) { + + Message.warn( + "Resolving a snapshot version. It's going to be slow unless you use `updateOptions := updateOptions.value.withLatestSnapshots(false)` options." + ) + val resolvers = sortedRevisions.map(_._2.getName) + sortedRevisions.foreach(h => { + val (module, resolver) = h + Message.info( + s"Out of ${sortedRevisions.size} candidates we found for ${module.getId} in ${resolvers + .mkString(" and ")}, we are choosing ${resolver}." + ) + }) + } else { + Message.warn(s"Choosing $resolver for ${resolvedModule.getId}") + } + + // Now that we know the real latest revision, let's force Ivy to use it + val resolvedDescriptor = resolvedModule.getDescriptor + val artifactOpt = findFirstArtifactRef(resolvedDescriptor, data, resolver) + // If `None` do nothing -- modules without artifacts. Otherwise cache. + artifactOpt.foreach { artifactRef => + val dep = toSystem(descriptor) + val first = toSystem(resolvedDescriptor).getAllArtifacts.head + val options = getCacheOptions(data) + val cacheManager = getRepositoryCacheManager + cacheManager.cacheModuleDescriptor(resolver, artifactRef, dep, first, null, options) + } + resolvedModule + } + } + + def resolveByAllMeans( + cachedModule: Option[ResolvedModuleRevision], + useLatest: Boolean, + interResolver: Option[DependencyResolver], + resolveModules: () => Seq[Either[Throwable, TriedResolution]], + dd: DependencyDescriptor, + data: ResolveData + ): Option[ResolvedModuleRevision] = { + val internallyResolved: Option[ResolvedModuleRevision] = { + if (!updateOptions.interProjectFirst) None + else interResolver.flatMap(resolver => Option(resolver.getDependency(dd, data))) + } + val internalOrExternal = internallyResolved.orElse { + val foundRevisions: Seq[(ResolvedModuleRevision, DependencyResolver)] = + resolveModules().collect { case Right(Some(x)) => x } + if (useLatest) resolveLatest(foundRevisions, dd, data) + else foundRevisions.reverse.headOption.map(_._1) // Resolvers are hit in reverse order + } + internalOrExternal.orElse(cachedModule) + } + + /** Cleans unnecessary module id information not provided by [[IvyRetrieve.toModuleID()]]. */ + private final val moduleResolvers = updateOptions.moduleResolvers.map { case (key, value) => + val cleanKey = ModuleID(key.organization, key.name, key.revision) + .withExtraAttributes(key.extraAttributes) + .withBranchName(key.branchName) + cleanKey -> value + } + + /** + * Gets the list of resolvers to use for resolving a given descriptor. + * + * NOTE: The ivy implementation guarantees that all resolvers implement dependency resolver. + * @param descriptor The descriptor to be resolved. + */ + def getDependencyResolvers(descriptor: DependencyDescriptor): Vector[DependencyResolver] = { + val moduleRevisionId = descriptor.getDependencyRevisionId + val moduleID = IvyRetrieve.toModuleID(moduleRevisionId) + val resolverForModule = moduleResolvers.get(moduleID) + val ivyResolvers = getResolvers.toArray // Get resolvers from chain resolver directly + val allResolvers = ivyResolvers.collect { case r: DependencyResolver => r }.toVector + // Double check that dependency resolver will always be the super trait of a resolver + assert(ivyResolvers.size == allResolvers.size, "ALERT: Some ivy resolvers were filtered.") + val mappedResolver = resolverForModule.flatMap(r => allResolvers.find(_.getName == r.name)) + mappedResolver match { + case Some(uniqueResolver) => Vector(uniqueResolver) + case None => allResolvers + } + } + + def findInterProjectResolver(resolvers: Seq[DependencyResolver]): Option[DependencyResolver] = + resolvers.find(_.getName == ProjectResolver.InterProject) + + /** + * Gets the dependency for a given descriptor with the pertinent resolve data. + * + * This is a custom sbt chain operation that produces better error output and deals with + * cases that the conventional ivy resolver does not. It accumulates the resolution of + * several resolvers and returns the module which fits the provided resolution strategy. + * + * These are the differences with regard to the default ivy [[ChainResolver]]: + * 1. It skips resolution if "return first" is set to true. + * 2. It skips resolution if a previously resolved or cached resolution is found. + * 3. It always checks all the resolvers and compares timestamps for changing dependencies + * if and only if `latestSnapshots` is enabled in the update options, regardless of what + * the latest strategies are (http://ant.apache.org/ivy/history/2.3.0/settings/latest-strategies.html). + * See https://github.com/sbt/sbt/pull/1520 for more information on this topic. + * + * Note the tradeoff here in SNAPSHOTs: correctness vs slowness. + */ + def getDependency(dd: DependencyDescriptor, data0: ResolveData): ResolvedModuleRevision = { + val isDynamic = dd.isChanging || IvySbt.isChanging(dd.getDependencyRevisionId) + val useLatest = isDynamic && updateOptions.latestSnapshots + if (useLatest) Message.verbose(s"$getName is changing. Checking all resolvers on the chain.") + + /* Get the resolved module descriptor from: + * 1. An already resolved branch of the resolution tree. + * 2. The value from the cache. */ + val data = new ResolveData(data0, doValidate(data0)) + val resolved0 = Option(data.getCurrentResolvedModuleRevision) + val resolvedOrCached = getCached(dd, data0, resolved0) + + val cached: Option[ResolvedModuleRevision] = if (useLatest) None else resolvedOrCached + val resolvers = getDependencyResolvers(dd) + val interResolver = findInterProjectResolver(resolvers) + // TODO: Please, change `Option` return types so that this goes away + lazy val results = getResults(cached, useLatest, data, dd, resolvers) + lazy val errors = results.collect { case Left(t) => t } + val runResolution = () => results + val resolved = resolveByAllMeans(cached, useLatest, interResolver, runResolution, dd, data) + + resolved match { + case None if errors.size == 1 => + errors.head match { + case e: RuntimeException => throw e + case e: ParseException => throw e + case e: Throwable => throw new RuntimeException(e.toString, e) + } + case None if errors.size > 1 => + val traces = errors.toList.map(e => IvyStringUtils.getErrorMessage(e)) + val msg = s"Resolution failed several times for $dd:" + throw new RuntimeException(s"$msg: ${traces.mkString("\n\t", "\n\t", "\n")}") + case _ => + // Can be either `None` with empty error or `Some` + if (resolved0 == resolved) resolved0.orNull + else resolved.map(resolvedRevision).orNull + } + } + } + + /* Ivy keeps module descriptors in memory, so we need to make sure that the + * resolved module revision is in fact the one found in the latest resolver. */ + private[this] def reparseModuleDescriptor( + dd: DependencyDescriptor, + data: ResolveData, + resolver: DependencyResolver, + previouslyResolved: ResolvedModuleRevision + ): ResolvedModuleRevision = { + // TODO: Figure out better alternative or directly attack the + // resolvers ivy uses to get correct behaviour for SNAPSHOT + Option(resolver.findIvyFileRef(dd, data)) flatMap { ivyFile => + ivyFile.getResource match { + case r: FileResource => + val urlDescriptor = r.getFile.toURI.toURL + try { + val parser = previouslyResolved.getDescriptor.getParser + val md = parser.parseDescriptor(settings, urlDescriptor, r, false) + val report = previouslyResolved.getReport + // Note that we always set force for SNAPSHOT resolution... + Some(new ResolvedModuleRevision(resolver, resolver, md, report, true)) + } catch { + case _: ParseException => + Message.warn(s"The descriptor in $urlDescriptor from $resolver could not be parsed.") + Some(previouslyResolved) + } + case unhandledResource => + val unhandledClassName = unhandledResource.getClass.getName + val tip = s"Returning previously resolved $previouslyResolved." + Message.debug(s"Latest snapshots option does not handle `$unhandledClassName`. $tip") + Some(previouslyResolved) + } + } getOrElse { + val previousRevision = dd.getDependencyRevisionId + val date = previouslyResolved.getPublicationDate + // Change from warn to debug -- see https://github.com/sbt/sbt/issues/2650. + Message.debug(s"Unable to find new descriptor for $previousRevision at $date in $resolver.") + previouslyResolved + } + } + + /** Ported from BasicResolver#findFirstAirfactRef. */ + private[this] def findFirstArtifactRef( + md: ModuleDescriptor, + data: ResolveData, + resolver: DependencyResolver + ): Option[ResolvedResource] = { + def artifactRef(artifact: IArtifact, date: Date): Option[ResolvedResource] = + resolver match { + case resolver: BasicResolver => + IvyContext.getContext.set(resolver.getName + ".artifact", artifact) + try { + Option(resolver.doFindArtifactRef(artifact, date)) orElse { + Option(artifact.getUrl) map { url => + Message.verbose("\tusing url for " + artifact + ": " + url) + val resource = + if ("file" == url.getProtocol) + new FileResource(new IFileRepository(), new File(url.getPath)) + else new URLResource(url) + new ResolvedResource(resource, artifact.getModuleRevisionId.getRevision) + } + } + } finally { + IvyContext.getContext.set(resolver.getName + ".artifact", null) + } + case _ => + None + } + val artifactRefs = md.getConfigurations.iterator flatMap { conf => + md.getArtifacts(conf.getName).iterator flatMap { af => + artifactRef(af, data.getDate).iterator + } + } + if (artifactRefs.hasNext) Some(artifactRefs.next()) + else None + } + + /** Ported from ChainResolver#forcedRevision. */ + private[this] def forcedRevision(rmr: ResolvedModuleRevision): ResolvedModuleRevision = + new ResolvedModuleRevision( + rmr.getResolver, + rmr.getArtifactResolver, + rmr.getDescriptor, + rmr.getReport, + true + ) + + /** Ported from ChainResolver#resolvedRevision. */ + private[this] def resolvedRevision(rmr: ResolvedModuleRevision): ResolvedModuleRevision = + if (isDual) + new ResolvedModuleRevision( + rmr.getResolver, + this, + rmr.getDescriptor, + rmr.getReport, + rmr.isForce + ) + else rmr + + /** Ported from ChainResolver#setLatestIfRequired. */ + private[this] def setLatestIfRequired( + resolver: DependencyResolver, + latest: Option[LatestStrategy] + ): Option[LatestStrategy] = + latestStrategyName(resolver) match { + case Some(latestName) if latestName != "default" => + val oldLatest = latestStrategy(resolver) + doSetLatestStrategy(resolver, latest) + oldLatest + case _ => None + } + + /** Ported from ChainResolver#getLatestStrategyName. */ + private[this] def latestStrategyName(resolver: DependencyResolver): Option[String] = + resolver match { + case r: HasLatestStrategy => Some(r.getLatest) + case _ => None + } + + /** Ported from ChainResolver#getLatest. */ + private[this] def latestStrategy(resolver: DependencyResolver): Option[LatestStrategy] = + resolver match { + case r: HasLatestStrategy => Some(r.getLatestStrategy) + case _ => None + } + + /** Ported from ChainResolver#setLatest. */ + private[this] def doSetLatestStrategy( + resolver: DependencyResolver, + latest: Option[LatestStrategy] + ): Option[LatestStrategy] = + resolver match { + case r: HasLatestStrategy => + val oldLatest = latestStrategy(resolver) + r.setLatestStrategy(latest.orNull) + oldLatest + case _ => None + } +} diff --git a/lm-ivy/src/main/scala/sbt/internal/librarymanagement/ivyint/SbtDefaultDependencyDescriptor.scala b/lm-ivy/src/main/scala/sbt/internal/librarymanagement/ivyint/SbtDefaultDependencyDescriptor.scala new file mode 100644 index 000000000..f4f281a50 --- /dev/null +++ b/lm-ivy/src/main/scala/sbt/internal/librarymanagement/ivyint/SbtDefaultDependencyDescriptor.scala @@ -0,0 +1,10 @@ +package sbt.internal.librarymanagement +package ivyint + +import org.apache.ivy.core +import core.module.descriptor.DefaultDependencyDescriptor +import sbt.librarymanagement._ + +trait SbtDefaultDependencyDescriptor { self: DefaultDependencyDescriptor => + def dependencyModuleId: ModuleID +} diff --git a/lm-ivy/src/main/scala/sbt/internal/librarymanagement/mavenint/PomExtraDependencyAttributes.scala b/lm-ivy/src/main/scala/sbt/internal/librarymanagement/mavenint/PomExtraDependencyAttributes.scala new file mode 100644 index 000000000..29c3e2dec --- /dev/null +++ b/lm-ivy/src/main/scala/sbt/internal/librarymanagement/mavenint/PomExtraDependencyAttributes.scala @@ -0,0 +1,133 @@ +package sbt.internal.librarymanagement +package mavenint + +import scala.collection.immutable.ArraySeq +import java.util.Properties +import java.util.regex.Pattern + +import org.apache.ivy.core.module.descriptor.DependencyDescriptor +import org.apache.ivy.core.module.id.ModuleRevisionId +import org.apache.ivy.util.extendable.ExtendableItem + +/** + * This class contains all the logic for dealing with the extra attributes in pom files relating to extra attributes + * on dependency declarations. + * + * Specifically, if we have a dependency on an sbt plugin, there are two properties that need to propogate: + * - `sbtVersion` + * - `scalaVersion` + * + * These need to exist on the *dependency declaration*. Maven/Aether has no way to inject these into + * the section of pom files, so we use Ivy's Extra attribute hackery to inject a lookup table + * of extra attributes by dependency id into POM files and later we read these back. + */ +object PomExtraDependencyAttributes { + + val ExtraAttributesKey = "extraDependencyAttributes" + val SbtVersionKey = "sbtVersion" + val ScalaVersionKey = "scalaVersion" + + /** + * Reads the extra dependency attributes out of a maven property. + * @param props The properties from an Aether resolution. + * @return + * A map of module id to extra dependency attributes associated with dependencies on that module. + */ + def readFromAether( + props: java.util.Map[String, AnyRef] + ): Map[ModuleRevisionId, Map[String, String]] = { + import scala.jdk.CollectionConverters._ + (props.asScala get ExtraAttributesKey) match { + case None => Map.empty + case Some(str) => + def processDep(m: ModuleRevisionId) = (simplify(m), filterCustomExtra(m, include = true)) + (for { + (id, props) <- readDependencyExtra(str.toString).map(processDep) + } yield id -> props).toMap + } + } + + /** + * Mutates the to collection with the extra depdendency attributes from the incoming pom properties list. + * + * @param from The properties directly off a maven POM file + * @param to The aaether properties where we can write whatever we want. + * + * TODO - maybe we can just parse this directly here. Note the `readFromAether` method uses + * whatever we set here. + */ + def transferDependencyExtraAttributes( + from: Properties, + to: java.util.Map[String, AnyRef] + ): Unit = + Option(from.getProperty(ExtraAttributesKey, null)) foreach (to.put(ExtraAttributesKey, _)) + + /** + * Reads the extra dependency information out of Ivy's notion of POM properties and returns + * the map of ID -> Extra Properties. + */ + def getDependencyExtra(m: Map[String, String]): Map[ModuleRevisionId, Map[String, String]] = + (m get ExtraAttributesKey) match { + case None => Map.empty + case Some(str) => + def processDep(m: ModuleRevisionId) = (simplify(m), filterCustomExtra(m, include = true)) + readDependencyExtra(str).map(processDep).toMap + } + + def qualifiedExtra(item: ExtendableItem): Map[String, String] = { + import scala.jdk.CollectionConverters._ + item.getQualifiedExtraAttributes.asInstanceOf[java.util.Map[String, String]].asScala.toMap + } + def filterCustomExtra(item: ExtendableItem, include: Boolean): Map[String, String] = + qualifiedExtra(item).view.filterKeys { k => qualifiedIsExtra(k) == include }.toMap + + def qualifiedIsExtra(k: String): Boolean = + k.endsWith(ScalaVersionKey) || k.endsWith(SbtVersionKey) + + // Reduces the id to exclude custom extra attributes + // This makes the id suitable as a key to associate a dependency parsed from a element + // with the extra attributes from the section + def simplify(id: ModuleRevisionId): ModuleRevisionId = { + import scala.jdk.CollectionConverters._ + ModuleRevisionId.newInstance( + id.getOrganisation, + id.getName, + id.getBranch, + id.getRevision, + filterCustomExtra(id, include = false).asJava + ) + } + + /** parses the sequence of dependencies with extra attribute information, with one dependency per line */ + def readDependencyExtra(s: String): Seq[ModuleRevisionId] = ArraySeq.unsafeWrapArray( + LinesP.split(s).map(_.trim).filter(!_.isEmpty).map(ModuleRevisionId.decode) + ) + + private[this] val LinesP = Pattern.compile("(?m)^") + + /** + * Creates the "extra" property values for DependencyDescriptors that can be written into a maven pom + * so we don't lose the information. + * @param s + * @return + */ + def writeDependencyExtra(s: Seq[DependencyDescriptor]): Seq[String] = + s.flatMap { dd => + val revId = dd.getDependencyRevisionId + val filteredExtra = filterCustomExtra(revId, include = true) + if (filteredExtra.isEmpty) + Nil + else { + import scala.jdk.CollectionConverters._ + val revId0 = ModuleRevisionId.newInstance( + revId.getOrganisation, + revId.getName, + revId.getBranch, + revId.getRevision, + filteredExtra.asJava + ) + revId0.encodeToString :: Nil + } + } + +} diff --git a/lm-ivy/src/main/scala/sbt/librarymanagement/ivy/CircularDependencyLevel.scala b/lm-ivy/src/main/scala/sbt/librarymanagement/ivy/CircularDependencyLevel.scala new file mode 100644 index 000000000..defba16b9 --- /dev/null +++ b/lm-ivy/src/main/scala/sbt/librarymanagement/ivy/CircularDependencyLevel.scala @@ -0,0 +1,33 @@ +package sbt.librarymanagement +package ivy + +import org.apache.ivy.plugins.circular.{ + CircularDependencyStrategy, + WarnCircularDependencyStrategy, + IgnoreCircularDependencyStrategy, + ErrorCircularDependencyStrategy +} + +/** + * Wrapper around circular dependency strategy. + */ +sealed trait CircularDependencyLevel { + private[sbt] def ivyStrategy: CircularDependencyStrategy + private[sbt] def name: String + override def toString: String = name +} + +object CircularDependencyLevel { + val Warn: CircularDependencyLevel = new CircularDependencyLevel { + def ivyStrategy: CircularDependencyStrategy = WarnCircularDependencyStrategy.getInstance + def name: String = "warn" + } + val Ignore: CircularDependencyLevel = new CircularDependencyLevel { + def ivyStrategy: CircularDependencyStrategy = IgnoreCircularDependencyStrategy.getInstance + def name: String = "ignore" + } + val Error: CircularDependencyLevel = new CircularDependencyLevel { + def ivyStrategy: CircularDependencyStrategy = ErrorCircularDependencyStrategy.getInstance + def name: String = "error" + } +} diff --git a/lm-ivy/src/main/scala/sbt/librarymanagement/ivy/Credentials.scala b/lm-ivy/src/main/scala/sbt/librarymanagement/ivy/Credentials.scala new file mode 100644 index 000000000..ee29f9f60 --- /dev/null +++ b/lm-ivy/src/main/scala/sbt/librarymanagement/ivy/Credentials.scala @@ -0,0 +1,95 @@ +/* sbt -- Simple Build Tool + * Copyright 2009 Mark Harrah + */ +package sbt.librarymanagement +package ivy + +import java.io.File +import org.apache.ivy.util.url.CredentialsStore +import sbt.io.IO +import sbt.util.Logger +import sbt.internal.librarymanagement.IvyUtil + +object Credentials { + def apply(realm: String, host: String, userName: String, passwd: String): Credentials = + new DirectCredentials(realm, host, userName, passwd) + def apply(file: File): Credentials = + new FileCredentials(file) + + /** Add the provided credentials to Ivy's credentials cache. */ + def add(realm: String, host: String, userName: String, passwd: String): Unit = + CredentialsStore.INSTANCE.addCredentials(realm, host, userName, passwd) + + /** Load credentials from the given file into Ivy's credentials cache. */ + def add(path: File, log: Logger): Unit = + loadCredentials(path) match { + case Left(err) => log.warn(err) + case Right(dc) => add(dc.realm, dc.host, dc.userName, dc.passwd) + } + + def forHost(sc: Seq[Credentials], host: String) = allDirect(sc) find { _.host == host } + def allDirect(sc: Seq[Credentials]): Seq[DirectCredentials] = sc map toDirect + def toDirect(c: Credentials): DirectCredentials = c match { + case dc: DirectCredentials => dc + case fc: FileCredentials => + loadCredentials(fc.path) match { + case Left(err) => sys.error(err) + case Right(dc) => dc + } + } + + def loadCredentials(path: File): Either[String, DirectCredentials] = + if (path.exists) { + val properties = read(path) + def get(keys: List[String]): Either[String, String] = + keys + .flatMap(properties.get) + .headOption + .toRight(keys.head + " not specified in credentials file: " + path) + + IvyUtil.separate(List(HostKeys, UserKeys, PasswordKeys).map(get)) match + case (Nil, List(host: String, user: String, pass: String)) => + IvyUtil.separate(List(RealmKeys).map(get)) match + case (_, List(realm: String)) => Right(new DirectCredentials(realm, host, user, pass)) + case _ => Right(new DirectCredentials(null, host, user, pass)) + + case (errors, _) => Left(errors.mkString("\n")) + } else Left("Credentials file " + path + " does not exist") + + def register(cs: Seq[Credentials], log: Logger): Unit = + cs foreach { + case f: FileCredentials => add(f.path, log) + case d: DirectCredentials => add(d.realm, d.host, d.userName, d.passwd) + } + + private[this] val RealmKeys = List("realm") + private[this] val HostKeys = List("host", "hostname") + private[this] val UserKeys = List("user", "user.name", "username") + private[this] val PasswordKeys = List("password", "pwd", "pass", "passwd") + + import scala.jdk.CollectionConverters._ + private[this] def read(from: File): Map[String, String] = { + val properties = new java.util.Properties + IO.load(properties, from) + properties.asScala.map { case (k, v) => (k.toString, v.toString.trim) }.toMap + } +} + +sealed trait Credentials +final class FileCredentials(val path: File) extends Credentials { + override def toString = s"""FileCredentials("$path")""" +} +final class DirectCredentials( + val realm: String, + val host: String, + val userName: String, + val passwd: String +) extends Credentials { + override def toString = { + val dq = '"' + val r = + if (realm == null) "null" + else s"$dq$realm$dq" + s"""DirectCredentials($r, "$host", "$userName", ****)""" + } +} diff --git a/lm-ivy/src/main/scala/sbt/librarymanagement/ivy/IvyDefaults.scala b/lm-ivy/src/main/scala/sbt/librarymanagement/ivy/IvyDefaults.scala new file mode 100644 index 000000000..70028721f --- /dev/null +++ b/lm-ivy/src/main/scala/sbt/librarymanagement/ivy/IvyDefaults.scala @@ -0,0 +1,9 @@ +package sbt +package librarymanagement.ivy + +/** + * This is a list of functions with default values. + */ +object IvyDefaults { + val defaultChecksums: Vector[String] = Vector("sha1", "md5") +} diff --git a/lm-ivy/src/main/scala/sbt/librarymanagement/ivy/IvyDependencyResolution.scala b/lm-ivy/src/main/scala/sbt/librarymanagement/ivy/IvyDependencyResolution.scala new file mode 100644 index 000000000..eaa4c0165 --- /dev/null +++ b/lm-ivy/src/main/scala/sbt/librarymanagement/ivy/IvyDependencyResolution.scala @@ -0,0 +1,31 @@ +package sbt +package librarymanagement +package ivy + +import sbt.internal.librarymanagement._ +import sbt.util.Logger + +class IvyDependencyResolution private[sbt] (val ivySbt: IvySbt) + extends DependencyResolutionInterface { + type Module = ivySbt.Module + + override def moduleDescriptor(moduleSetting: ModuleDescriptorConfiguration): ModuleDescriptor = { + new Module(moduleSetting) + } + + override def update( + module: ModuleDescriptor, + configuration: UpdateConfiguration, + uwconfig: UnresolvedWarningConfiguration, + log: Logger + ): Either[UnresolvedWarning, UpdateReport] = + IvyActions.updateEither(toModule(module), configuration, uwconfig, log) + + private[sbt] def toModule(module: ModuleDescriptor): Module = + module.asInstanceOf[Module] +} + +object IvyDependencyResolution { + def apply(ivyConfiguration: IvyConfiguration): DependencyResolution = + DependencyResolution(new IvyDependencyResolution(new IvySbt(ivyConfiguration))) +} diff --git a/lm-ivy/src/main/scala/sbt/librarymanagement/ivy/IvyLibraryManagementCodec.scala b/lm-ivy/src/main/scala/sbt/librarymanagement/ivy/IvyLibraryManagementCodec.scala new file mode 100644 index 000000000..68bbee4d6 --- /dev/null +++ b/lm-ivy/src/main/scala/sbt/librarymanagement/ivy/IvyLibraryManagementCodec.scala @@ -0,0 +1,17 @@ +package sbt.librarymanagement +package ivy + +trait IvyLibraryManagementCodec + extends sjsonnew.BasicJsonProtocol + with LibraryManagementCodec + with sbt.internal.librarymanagement.formats.GlobalLockFormat + with sbt.internal.librarymanagement.formats.LoggerFormat + with sbt.librarymanagement.ivy.formats.UpdateOptionsFormat + with IvyPathsFormats + with ResolverFormats + with ModuleConfigurationFormats + with InlineIvyConfigurationFormats + with ExternalIvyConfigurationFormats + with IvyConfigurationFormats + +object IvyLibraryManagementCodec extends IvyLibraryManagementCodec diff --git a/lm-ivy/src/main/scala/sbt/librarymanagement/ivy/IvyPublisher.scala b/lm-ivy/src/main/scala/sbt/librarymanagement/ivy/IvyPublisher.scala new file mode 100644 index 000000000..ad49c218f --- /dev/null +++ b/lm-ivy/src/main/scala/sbt/librarymanagement/ivy/IvyPublisher.scala @@ -0,0 +1,37 @@ +package sbt +package librarymanagement +package ivy + +import sbt.internal.librarymanagement._ +import sbt.util.Logger +import java.io.File + +class IvyPublisher private[sbt] (val ivySbt: IvySbt) extends PublisherInterface { + type Module = ivySbt.Module + + override def moduleDescriptor(moduleSetting: ModuleDescriptorConfiguration): ModuleDescriptor = { + new Module(moduleSetting) + } + + override def makePomFile( + module: ModuleDescriptor, + configuration: MakePomConfiguration, + log: Logger + ): File = + IvyActions.makePomFile(toModule(module), configuration, log) + + override def publish( + module: ModuleDescriptor, + configuration: PublishConfiguration, + log: Logger + ): Unit = + IvyActions.publish(toModule(module), configuration, log) + + private[sbt] def toModule(module: ModuleDescriptor): Module = + module.asInstanceOf[Module] +} + +object IvyPublisher { + def apply(ivyConfiguration: IvyConfiguration): Publisher = + Publisher(new IvyPublisher(new IvySbt(ivyConfiguration))) +} diff --git a/lm-ivy/src/main/scala/sbt/librarymanagement/ivy/UpdateOptions.scala b/lm-ivy/src/main/scala/sbt/librarymanagement/ivy/UpdateOptions.scala new file mode 100644 index 000000000..1fa856d0e --- /dev/null +++ b/lm-ivy/src/main/scala/sbt/librarymanagement/ivy/UpdateOptions.scala @@ -0,0 +1,117 @@ +package sbt.librarymanagement +package ivy + +import org.apache.ivy.plugins.resolver.DependencyResolver +import org.apache.ivy.core.settings.IvySettings +import sbt.util.Logger +import sbt.internal.librarymanagement.LMSysProp + +/** + * Represents configurable options for update task. + * While UpdateConfiguration is passed into update at runtime, + * UpdateOption is intended to be used while setting up the Ivy object. + * + * See also UpdateConfiguration in IvyActions.scala. + */ +final class UpdateOptions private[sbt] ( + // If set to CircularDependencyLevel.Error, halt the dependency resolution. + val circularDependencyLevel: CircularDependencyLevel, + // If set to true, prioritize inter-project resolver + val interProjectFirst: Boolean, + // If set to true, check all resolvers for snapshots. + val latestSnapshots: Boolean, + // If set to true, use cached resolution. + val cachedResolution: Boolean, + // If set to true, use Gigahorse + val gigahorse: Boolean, + // Extension point for an alternative resolver converter. + val resolverConverter: UpdateOptions.ResolverConverter, + // Map the unique resolver to be checked for the module ID + val moduleResolvers: Map[ModuleID, Resolver] +) { + def withCircularDependencyLevel( + circularDependencyLevel: CircularDependencyLevel + ): UpdateOptions = + copy(circularDependencyLevel = circularDependencyLevel) + def withInterProjectFirst(interProjectFirst: Boolean): UpdateOptions = + copy(interProjectFirst = interProjectFirst) + def withLatestSnapshots(latestSnapshots: Boolean): UpdateOptions = + copy(latestSnapshots = latestSnapshots) + def withCachedResolution(cachedResolution: Boolean): UpdateOptions = + copy(cachedResolution = cachedResolution) + + def withGigahorse(gigahorse: Boolean): UpdateOptions = + copy(gigahorse = gigahorse) + + /** Extention point for an alternative resolver converter. */ + def withResolverConverter(resolverConverter: UpdateOptions.ResolverConverter): UpdateOptions = + copy(resolverConverter = resolverConverter) + + def withModuleResolvers(moduleResolvers: Map[ModuleID, Resolver]): UpdateOptions = + copy(moduleResolvers = moduleResolvers) + + private[sbt] def copy( + circularDependencyLevel: CircularDependencyLevel = this.circularDependencyLevel, + interProjectFirst: Boolean = this.interProjectFirst, + latestSnapshots: Boolean = this.latestSnapshots, + cachedResolution: Boolean = this.cachedResolution, + gigahorse: Boolean = this.gigahorse, + resolverConverter: UpdateOptions.ResolverConverter = this.resolverConverter, + moduleResolvers: Map[ModuleID, Resolver] = this.moduleResolvers + ): UpdateOptions = + new UpdateOptions( + circularDependencyLevel, + interProjectFirst, + latestSnapshots, + cachedResolution, + gigahorse, + resolverConverter, + moduleResolvers + ) + + override def toString(): String = + s"""UpdateOptions( + | circularDependencyLevel = $circularDependencyLevel, + | latestSnapshots = $latestSnapshots, + | cachedResolution = $cachedResolution + |)""".stripMargin + + override def equals(o: Any): Boolean = o match { + case o: UpdateOptions => + this.circularDependencyLevel == o.circularDependencyLevel && + this.interProjectFirst == o.interProjectFirst && + this.latestSnapshots == o.latestSnapshots && + this.cachedResolution == o.cachedResolution && + this.gigahorse == o.gigahorse && + this.resolverConverter == o.resolverConverter && + this.moduleResolvers == o.moduleResolvers + case _ => false + } + + override def hashCode: Int = { + var hash = 1 + hash = hash * 31 + this.circularDependencyLevel.## + hash = hash * 31 + this.interProjectFirst.## + hash = hash * 31 + this.latestSnapshots.## + hash = hash * 31 + this.cachedResolution.## + hash = hash * 31 + this.gigahorse.## + hash = hash * 31 + this.resolverConverter.## + hash = hash * 31 + this.moduleResolvers.## + hash + } +} + +object UpdateOptions { + type ResolverConverter = PartialFunction[(Resolver, IvySettings, Logger), DependencyResolver] + + def apply(): UpdateOptions = + new UpdateOptions( + circularDependencyLevel = CircularDependencyLevel.Warn, + interProjectFirst = true, + latestSnapshots = true, + cachedResolution = false, + gigahorse = LMSysProp.useGigahorse, + resolverConverter = PartialFunction.empty, + moduleResolvers = Map.empty + ) +} diff --git a/lm-ivy/src/main/scala/sbt/librarymanagement/ivy/formats/UpdateOptionsFormat.scala b/lm-ivy/src/main/scala/sbt/librarymanagement/ivy/formats/UpdateOptionsFormat.scala new file mode 100644 index 000000000..6929546be --- /dev/null +++ b/lm-ivy/src/main/scala/sbt/librarymanagement/ivy/formats/UpdateOptionsFormat.scala @@ -0,0 +1,76 @@ +package sbt.librarymanagement.ivy +package formats + +import sjsonnew._ +import sbt.librarymanagement._ + +trait UpdateOptionsFormat { + self: BasicJsonProtocol + with ModuleIDFormats + with ResolverFormats + with sbt.librarymanagement.ArtifactFormats + with sbt.librarymanagement.ConfigRefFormats + with sbt.librarymanagement.ChecksumFormats + with sbt.librarymanagement.InclExclRuleFormats + with sbt.librarymanagement.CrossVersionFormats + with sbt.librarymanagement.DisabledFormats + with sbt.librarymanagement.BinaryFormats + with sbt.librarymanagement.ConstantFormats + with sbt.librarymanagement.PatchFormats + with sbt.librarymanagement.FullFormats + with sbt.librarymanagement.For3Use2_13Formats + with sbt.librarymanagement.For2_13Use3Formats + with sbt.librarymanagement.ChainedResolverFormats + with sbt.librarymanagement.MavenRepoFormats + with sbt.librarymanagement.MavenCacheFormats + with sbt.librarymanagement.PatternsFormats + with sbt.librarymanagement.FileConfigurationFormats + with sbt.librarymanagement.FileRepositoryFormats + with sbt.librarymanagement.URLRepositoryFormats + with sbt.librarymanagement.SshConnectionFormats + with sbt.librarymanagement.SshAuthenticationFormats + with sbt.librarymanagement.SshRepositoryFormats + with sbt.librarymanagement.SftpRepositoryFormats + with sbt.librarymanagement.PasswordAuthenticationFormats + with sbt.librarymanagement.KeyFileAuthenticationFormats => + /* This is necessary to serialize/deserialize `directResolvers`. */ + private implicit val moduleIdJsonKeyFormat: sjsonnew.JsonKeyFormat[ModuleID] = { + new sjsonnew.JsonKeyFormat[ModuleID] { + import sjsonnew.support.scalajson.unsafe._ + val moduleIdFormat: JsonFormat[ModuleID] = implicitly[JsonFormat[ModuleID]] + def write(key: ModuleID): String = + CompactPrinter(Converter.toJsonUnsafe(key)(moduleIdFormat)) + def read(key: String): ModuleID = + Converter.fromJsonUnsafe[ModuleID](Parser.parseUnsafe(key))(moduleIdFormat) + } + } + + implicit lazy val UpdateOptionsFormat: JsonFormat[UpdateOptions] = + projectFormat( + (uo: UpdateOptions) => + ( + uo.circularDependencyLevel.name, + uo.interProjectFirst, + uo.latestSnapshots, + uo.cachedResolution, + uo.gigahorse, + uo.moduleResolvers + ), + (xs: (String, Boolean, Boolean, Boolean, Boolean, Map[ModuleID, Resolver])) => + new UpdateOptions( + levels(xs._1), + xs._2, + xs._3, + xs._4, + xs._5, + PartialFunction.empty, + xs._6 + ) + ) + + private val levels: Map[String, CircularDependencyLevel] = Map( + "warn" -> CircularDependencyLevel.Warn, + "ignore" -> CircularDependencyLevel.Ignore, + "error" -> CircularDependencyLevel.Error + ) +} diff --git a/lm-ivy/src/test/resources/artifact1.jar b/lm-ivy/src/test/resources/artifact1.jar new file mode 100644 index 000000000..be043359e Binary files /dev/null and b/lm-ivy/src/test/resources/artifact1.jar differ diff --git a/lm-ivy/src/test/resources/artifact2.txt b/lm-ivy/src/test/resources/artifact2.txt new file mode 100644 index 000000000..e69de29bb diff --git a/lm-ivy/src/test/resources/test-ivy-repo/com.test/module-with-srcs/0.1.00/ivys/ivy.xml b/lm-ivy/src/test/resources/test-ivy-repo/com.test/module-with-srcs/0.1.00/ivys/ivy.xml new file mode 100755 index 000000000..ab045d5cb --- /dev/null +++ b/lm-ivy/src/test/resources/test-ivy-repo/com.test/module-with-srcs/0.1.00/ivys/ivy.xml @@ -0,0 +1,23 @@ + + + + + Just a test module that publishes both a binary jar and a src jar in the 'compile' configuration. + + + + + + + + + + + + + + + + + + diff --git a/lm-ivy/src/test/resources/test-ivy-repo/com.test/module-with-srcs/0.1.00/jars/libmodule.jar b/lm-ivy/src/test/resources/test-ivy-repo/com.test/module-with-srcs/0.1.00/jars/libmodule.jar new file mode 100644 index 000000000..b21d53c7b Binary files /dev/null and b/lm-ivy/src/test/resources/test-ivy-repo/com.test/module-with-srcs/0.1.00/jars/libmodule.jar differ diff --git a/lm-ivy/src/test/resources/test-ivy-repo/com.test/module-with-srcs/0.1.00/srcs/libmodule-source.jar b/lm-ivy/src/test/resources/test-ivy-repo/com.test/module-with-srcs/0.1.00/srcs/libmodule-source.jar new file mode 100644 index 000000000..b21d53c7b Binary files /dev/null and b/lm-ivy/src/test/resources/test-ivy-repo/com.test/module-with-srcs/0.1.00/srcs/libmodule-source.jar differ diff --git a/lm-ivy/src/test/resources/test-maven-repo/com/test/test-artifact/1.0.0-SNAPSHOT/test-artifact-1.0.0-SNAPSHOT.jar b/lm-ivy/src/test/resources/test-maven-repo/com/test/test-artifact/1.0.0-SNAPSHOT/test-artifact-1.0.0-SNAPSHOT.jar new file mode 100644 index 000000000..e69de29bb diff --git a/lm-ivy/src/test/resources/test-maven-repo/com/test/test-artifact/1.0.0-SNAPSHOT/test-artifact-1.0.0-SNAPSHOT.pom b/lm-ivy/src/test/resources/test-maven-repo/com/test/test-artifact/1.0.0-SNAPSHOT/test-artifact-1.0.0-SNAPSHOT.pom new file mode 100644 index 000000000..7884c5684 --- /dev/null +++ b/lm-ivy/src/test/resources/test-maven-repo/com/test/test-artifact/1.0.0-SNAPSHOT/test-artifact-1.0.0-SNAPSHOT.pom @@ -0,0 +1,15 @@ + + + 4.0.0 + + com.test + test-artifact + 1.0.0-SNAPSHOT + scala-jar + + + UTF-8 + + + diff --git a/lm-ivy/src/test/scala/sbt/internal/librarymanagement/AbstractEngineSpec.scala b/lm-ivy/src/test/scala/sbt/internal/librarymanagement/AbstractEngineSpec.scala new file mode 100644 index 000000000..55089330d --- /dev/null +++ b/lm-ivy/src/test/scala/sbt/internal/librarymanagement/AbstractEngineSpec.scala @@ -0,0 +1,24 @@ +package sbt.internal.librarymanagement + +import sbt.librarymanagement._ +import verify.BasicTestSuite + +abstract class AbstractEngineSpec extends BasicTestSuite { + def cleanCache(): Unit + + def module( + moduleId: ModuleID, + deps: Vector[ModuleID], + scalaFullVersion: Option[String] + ): ModuleDescriptor + + def updateEither(module: ModuleDescriptor): Either[UnresolvedWarning, UpdateReport] + + def update(module: ModuleDescriptor) = + updateEither(module) match { + case Right(r) => r + case Left(w) => throw w.resolveException + } + + def cleanCachedResolutionCache(@deprecated("unused", "") module: ModuleDescriptor): Unit = () +} diff --git a/lm-ivy/src/test/scala/sbt/internal/librarymanagement/BaseCachedResolutionSpec.scala b/lm-ivy/src/test/scala/sbt/internal/librarymanagement/BaseCachedResolutionSpec.scala new file mode 100644 index 000000000..53ad0ba09 --- /dev/null +++ b/lm-ivy/src/test/scala/sbt/internal/librarymanagement/BaseCachedResolutionSpec.scala @@ -0,0 +1,16 @@ +package sbt.internal.librarymanagement + +import sbt.librarymanagement._ +import sbt.librarymanagement.ivy._ + +trait BaseCachedResolutionSpec extends BaseIvySpecification { + override def module( + moduleId: ModuleID, + deps: Vector[ModuleID], + scalaFullVersion: Option[String] + ): ModuleDescriptor = { + val uo: UpdateOptions = UpdateOptions() + .withCachedResolution(true) + module(moduleId, deps, scalaFullVersion, uo, true) + } +} diff --git a/lm-ivy/src/test/scala/sbt/internal/librarymanagement/BaseIvySpecification.scala b/lm-ivy/src/test/scala/sbt/internal/librarymanagement/BaseIvySpecification.scala new file mode 100644 index 000000000..b37fbe886 --- /dev/null +++ b/lm-ivy/src/test/scala/sbt/internal/librarymanagement/BaseIvySpecification.scala @@ -0,0 +1,136 @@ +package sbt.internal.librarymanagement + +import sbt.io.IO +import sbt.io.syntax._ +import java.io.File +import sbt.internal.util.ConsoleLogger +import sbt.librarymanagement._ +import sbt.librarymanagement.ivy._ +import cross.CrossVersionUtil +import Configurations._ + +trait BaseIvySpecification extends AbstractEngineSpec { + def currentBase: File = new File(".") + def label: String = this.getClass.getSimpleName + def currentTarget: File = currentBase / "target" / label / "ivyhome" + def currentManaged: File = currentBase / "target" / label / "lib_managed" + def currentDependency: File = currentBase / "target" / label / "dependency" + def defaultModuleId: ModuleID = + ModuleID("com.example", "foo", "0.1.0").withConfigurations(Some("compile")) + + def scala2_13 = "2.13.10" + + lazy val log = ConsoleLogger() + def lmEngine(uo: UpdateOptions = UpdateOptions()): DependencyResolution = + IvyDependencyResolution(mkIvyConfiguration(uo)) + + def configurations = Vector(Compile, Test, Runtime) + + def module( + moduleId: ModuleID, + deps: Vector[ModuleID], + scalaFullVersion: Option[String] + ): ModuleDescriptor = { + module(moduleId, deps, scalaFullVersion, UpdateOptions(), true) + } + + def module( + moduleId: ModuleID, + deps: Vector[ModuleID], + scalaFullVersion: Option[String], + uo: UpdateOptions = UpdateOptions(), + overrideScalaVersion: Boolean = true, + appendSbtCrossVersion: Boolean = false, + platform: Option[String] = None, + ): IvySbt#Module = { + val scalaModuleInfo = scalaFullVersion map { fv => + ScalaModuleInfo( + scalaFullVersion = fv, + scalaBinaryVersion = CrossVersionUtil.binaryScalaVersion(fv), + configurations = Vector.empty, + checkExplicit = true, + filterImplicit = false, + overrideScalaVersion = overrideScalaVersion + ) + .withPlatform(platform) + } + + val moduleSetting: ModuleSettings = ModuleDescriptorConfiguration(moduleId, ModuleInfo("foo")) + .withDependencies(deps) + .withConfigurations(configurations) + .withScalaModuleInfo(scalaModuleInfo) + val ivySbt = new IvySbt(mkIvyConfiguration(uo)) + new ivySbt.Module(moduleSetting, appendSbtCrossVersion) + } + + def resolvers: Vector[Resolver] = Vector(Resolver.mavenCentral) + + def chainResolver = ChainedResolver("sbt-chain", resolvers) + + def mkIvyConfiguration(uo: UpdateOptions): IvyConfiguration = { + val moduleConfs = Vector(ModuleConfiguration("*", chainResolver)) + val resCacheDir = currentTarget / "resolution-cache" + InlineIvyConfiguration() + .withPaths(IvyPaths(currentBase.toString, Some(currentTarget.toString))) + .withResolvers(resolvers) + .withModuleConfigurations(moduleConfs) + .withChecksums(Vector.empty) + .withResolutionCacheDir(resCacheDir) + .withLog(log) + .withUpdateOptions(uo) + } + + def makeUpdateConfiguration( + offline: Boolean, + metadataDirectory: Option[File] + ): UpdateConfiguration = { + val retrieveConfig = RetrieveConfiguration() + .withRetrieveDirectory(currentManaged) + .withOutputPattern(Resolver.defaultRetrievePattern) + .withSync(false) + + UpdateConfiguration() + .withRetrieveManaged(retrieveConfig) + .withLogging(UpdateLogging.Full) + .withOffline(offline) + .withMetadataDirectory(metadataDirectory) + } + + def updateEither(module: ModuleDescriptor): Either[UnresolvedWarning, UpdateReport] = + ivyUpdateEither(module) + + def ivyUpdateEither(module: ModuleDescriptor): Either[UnresolvedWarning, UpdateReport] = { + module match { + case m: IvySbt#Module => + val config = makeUpdateConfiguration(false, Some(currentDependency)) + IvyActions.updateEither(m, config, UnresolvedWarningConfiguration(), log) + } + } + + def cleanCache(): Unit = cleanIvyCache() + def cleanIvyCache(): Unit = IO.delete(currentTarget / "cache") + + override def cleanCachedResolutionCache(module: ModuleDescriptor): Unit = { + module match { + case m: IvySbt#Module => IvyActions.cleanCachedResolutionCache(m, log) + } + } + + def ivyUpdate(module: ModuleDescriptor): UpdateReport = + update(module) + + def mkPublishConfiguration( + resolver: Resolver, + artifacts: Map[Artifact, File] + ): PublishConfiguration = { + PublishConfiguration() + .withResolverName(resolver.name) + .withArtifacts(artifacts.toVector) + .withChecksums(Vector.empty) + .withOverwrite(true) + } + + def ivyPublish(module: IvySbt#Module, config: PublishConfiguration) = { + IvyActions.publish(module, config, log) + } +} diff --git a/lm-ivy/src/test/scala/sbt/internal/librarymanagement/CachedResolutionSpec.scala b/lm-ivy/src/test/scala/sbt/internal/librarymanagement/CachedResolutionSpec.scala new file mode 100644 index 000000000..dbb23fbe0 --- /dev/null +++ b/lm-ivy/src/test/scala/sbt/internal/librarymanagement/CachedResolutionSpec.scala @@ -0,0 +1,10 @@ +package sbt.internal.librarymanagement + +import sbt.librarymanagement._ + +class CachedResolutionSpec extends ResolutionSpec with BaseCachedResolutionSpec { + override val resolvers = Vector( + Resolver.mavenCentral, + Resolver.sbtPluginRepo("releases") + ) +} diff --git a/lm-ivy/src/test/scala/sbt/internal/librarymanagement/ComponentManagerTest.scala b/lm-ivy/src/test/scala/sbt/internal/librarymanagement/ComponentManagerTest.scala new file mode 100644 index 000000000..5f85748e9 --- /dev/null +++ b/lm-ivy/src/test/scala/sbt/internal/librarymanagement/ComponentManagerTest.scala @@ -0,0 +1,165 @@ +package sbt.internal.librarymanagement + +import verify.BasicTestSuite + +// TODO - We need to re-enable this test. Right now, we dont' have a "stub" launcher for this. +// This is testing something which uses a launcher interface, but was grabbing the underlying class directly +// when it really should, instead, be stubbing out the underyling class. + +object ComponentManagerTest extends BasicTestSuite { + val TestID = "manager-test" + + /* + test( + "Component manager should throw an exception if 'file' is called for a non-existing component" + ) { + withManager { manager => + intercept[InvalidComponent] { + manager.file(TestID)(Fail) + () + } + } + } + + test("it should throw an exception if 'file' is called for an empty component") { + withManager { manager => + manager.define(TestID, Nil) + intercept[InvalidComponent] { + manager.file(TestID)(Fail) + () + } + } + } + + test("it should return the file for a single-file component") { + withManager { manager => + val hash = defineFile(manager, TestID, "a") + assert(checksum(manager.file(TestID)(Fail)) == hash) + } + } + + test("it should throw an exception if 'file' is called for multi-file component") { + withManager { manager => + defineFiles(manager, TestID, "a", "b") + intercept[InvalidComponent] { + manager.file(TestID)(Fail) + () + } + } + } + + test("it should return the files for a multi-file component") { + withManager { manager => + val hashes = defineFiles(manager, TestID, "a", "b") + assert(checksum(manager.files(TestID)(Fail)).toSet == hashes.toSet) + } + } + + test("it should return the files for a single-file component") { + withManager { manager => + val hashes = defineFiles(manager, TestID, "a") + assert(checksum(manager.files(TestID)(Fail)).toSet == hashes.toSet) + } + } + + test("it should throw an exception if 'files' is called for a non-existing component") { + withManager { manager => + intercept[InvalidComponent] { + manager.files(TestID)(Fail) + () + } + } + } + + test("it should properly cache a file and then retrieve it to an unresolved component") { + withTemporaryDirectory { ivyHome => + withManagerHome(ivyHome) { definingManager => + val hash = defineFile(definingManager, TestID, "a") + try { + definingManager.cache(TestID) + withManagerHome(ivyHome) { usingManager => + assert(checksum(usingManager.file(TestID)(Fail)) == hash) + } + } finally { + definingManager.clearCache(TestID) + } + } + } + } + + private def checksum(files: Iterable[File]): Seq[String] = files.map(checksum).toSeq + private def checksum(file: File): String = + if (file.exists) ChecksumHelper.computeAsString(file, "sha1") else "" + private def defineFile(manager: ComponentManager, id: String, name: String): String = + createFile(manager, id, name)(checksum) + private def defineFiles(manager: ComponentManager, id: String, names: String*): Seq[String] = + createFiles(manager, id, names: _*)(checksum) + private def createFile[T](manager: ComponentManager, id: String, name: String)(f: File => T): T = + createFiles(manager, id, name)(files => f(files.toList.head)) + private def createFiles[T](manager: ComponentManager, id: String, names: String*)( + f: Seq[File] => T + ): T = + withTemporaryDirectory { dir => + val files = names.map(name => new File(dir, name)) + files.foreach(writeRandomContent) + manager.define(id, files) + f(files) + } + private def writeRandomContent(file: File) = IO.write(file, randomString) + private def randomString = "asdf" + private def withManager[T](f: ComponentManager => T): T = + withTemporaryDirectory { ivyHome => + withManagerHome(ivyHome)(f) + } + + private def withManagerHome[T](ivyHome: File)(f: ComponentManager => T): T = + TestLogger { logger => + withTemporaryDirectory { temp => + // The actual classes we'll use at runtime. + // val mgr = new ComponentManager(xsbt.boot.Locks, new xsbt.boot.ComponentProvider(temp, true), Some(ivyHome), logger) + + // A stub component manager + object provider extends ComponentProvider { + override def componentLocation(id: String): File = new File(temp, id) + override def lockFile(): File = { + IO.createDirectory(temp) + new java.io.File(temp, "sbt.components.lock") + } + override def defineComponent(id: String, files: Array[File]): Unit = { + val location = componentLocation(id) + if (location.exists) + throw new RuntimeException( + s"Cannot redefine component. ID: $id, files: ${files.mkString(",")}" + ) + else { + IO.copy(files.map { f => + f -> new java.io.File(location, f.getName) + }) + () + } + } + override def addToComponent(id: String, files: Array[File]): Boolean = { + val location = componentLocation(id) + IO.copy(files.map { f => + f -> new java.io.File(location, f.getName) + }) + true + } + override def component(id: String): Array[File] = + Option(componentLocation(id).listFiles()) + .map(_.filter(_.isFile)) + .getOrElse(Array.empty) + } + // A stubbed locking API. + object locks extends xsbti.GlobalLock { + override def apply[U](lockFile: File, run: Callable[U]): U = { + // TODO - do we need to lock? + run.call() + } + } + val mgr = new ComponentManager(locks, provider, Some(ivyHome), logger) + f(mgr) + } + } + */ +} diff --git a/lm-ivy/src/test/scala/sbt/internal/librarymanagement/ConflictWarningSpec.scala b/lm-ivy/src/test/scala/sbt/internal/librarymanagement/ConflictWarningSpec.scala new file mode 100644 index 000000000..bc3f05ebc --- /dev/null +++ b/lm-ivy/src/test/scala/sbt/internal/librarymanagement/ConflictWarningSpec.scala @@ -0,0 +1,40 @@ +package sbt.internal.librarymanagement + +import sbt.librarymanagement._ +import sbt.librarymanagement.syntax._ + +object ConflictWarningSpec extends BaseIvySpecification { + + test("it should print out message about the cross-Scala conflict") { + var found = false + val deps = Vector( + `scala2.13.6`, + `cats-effect3.1.1`, + `cats-core2.6.1`.cross(CrossVersion.for3Use2_13), + ) + val m = module(defaultModuleId, deps, Some("3.0.1-RC2")) + val report = ivyUpdate(m) + val w = ConflictWarning.default("foo") + + try { + ConflictWarning(w, report, log) + } catch { + case e: Throwable => + found = true + assert( + e.getMessage.linesIterator.toList.head + .startsWith("Conflicting cross-version suffixes in") + ) + } + if (!found) { + sys.error("conflict warning was expected, but didn't happen sbt/sbt#6578") + } + } + + lazy val `scala2.13.6` = + ModuleID("org.scala-lang", "scala-library", "2.13.6").withConfigurations(Some("compile")) + lazy val `cats-effect3.1.1` = + ("org.typelevel" %% "cats-effect" % "3.1.1").withConfigurations(Some("compile")) + lazy val `cats-core2.6.1` = + ("org.typelevel" %% "cats-core" % "2.6.1").withConfigurations(Some("compile")) +} diff --git a/lm-ivy/src/test/scala/sbt/internal/librarymanagement/CredentialsSpec.scala b/lm-ivy/src/test/scala/sbt/internal/librarymanagement/CredentialsSpec.scala new file mode 100644 index 000000000..101991df4 --- /dev/null +++ b/lm-ivy/src/test/scala/sbt/internal/librarymanagement/CredentialsSpec.scala @@ -0,0 +1,50 @@ +package sbt.internal.librarymanagement + +import sbt.librarymanagement.ivy.Credentials + +import java.io.File +import java.nio.file.Files + +import org.scalatest.funsuite.AnyFunSuite + +class CredentialsSpec extends AnyFunSuite { + + test("load credential file without authentication") { + val credentialsFile = File.createTempFile("credentials", "tmp") + + val content = + """|host=example.org + |user=username + |password=password""".stripMargin + + Files.write(credentialsFile.toPath(), content.getBytes()) + + val Right(credentials) = Credentials.loadCredentials(credentialsFile): @unchecked + + assert(credentials.realm == null) + + credentialsFile.delete() + } + + test("DirectCredentials.toString") { + assert( + Credentials( + realm = null, + host = "example.org", + userName = "username", + passwd = "password" + ).toString == + """DirectCredentials(null, "example.org", "username", ****)""" + ) + + assert( + Credentials( + realm = "realm", + host = "example.org", + userName = "username", + passwd = "password" + ).toString == + """DirectCredentials("realm", "example.org", "username", ****)""" + ) + } +} diff --git a/lm-ivy/src/test/scala/sbt/internal/librarymanagement/CustomPomParserTest.scala b/lm-ivy/src/test/scala/sbt/internal/librarymanagement/CustomPomParserTest.scala new file mode 100644 index 000000000..e9d5e8271 --- /dev/null +++ b/lm-ivy/src/test/scala/sbt/internal/librarymanagement/CustomPomParserTest.scala @@ -0,0 +1,42 @@ +package sbt.internal.librarymanagement + +import java.io.File +import org.apache.ivy.core.module.descriptor.{ Artifact => IvyArtifact } +import org.apache.ivy.core.module.id.ModuleRevisionId +import org.apache.ivy.core.resolve.ResolveOptions +import sbt.librarymanagement._ +import sbt.librarymanagement.ivy.{ InlineIvyConfiguration, IvyPaths } +import sbt.io.IO.withTemporaryDirectory +import sbt.internal.util.ConsoleLogger +import verify.BasicTestSuite + +object CustomPomParserTest extends BasicTestSuite { + test( + "CustomPomParser should resolve an artifact with packaging 'scala-jar' as a regular jar file." + ) { + val log = ConsoleLogger() + withTemporaryDirectory { cacheDir => + val repoUrl = getClass.getResource("/test-maven-repo") + val local = MavenRepository("Test Repo", repoUrl.toExternalForm) + val paths = IvyPaths(new File(".").toString, Some(cacheDir.toString)) + val conf = InlineIvyConfiguration() + .withPaths(paths) + .withResolvers(Vector(local)) + .withLog(log) + val ivySbt = new IvySbt(conf) + val resolveOpts = new ResolveOptions().setConfs(Array("default")) + val mrid = ModuleRevisionId.newInstance("com.test", "test-artifact", "1.0.0-SNAPSHOT") + + val resolveReport = ivySbt.withIvy(log) { ivy => + ivy.resolve(mrid, resolveOpts, true) + } + + assert(!resolveReport.hasError) + assert(resolveReport.getArtifacts.size() == 1) + val artifact: IvyArtifact = + resolveReport.getArtifacts.asInstanceOf[java.util.List[IvyArtifact]].get(0) + assert(artifact.getModuleRevisionId == mrid) + assert(artifact.getExt == "jar") + } + } +} diff --git a/lm-ivy/src/test/scala/sbt/internal/librarymanagement/DMSerializationSpec.scala b/lm-ivy/src/test/scala/sbt/internal/librarymanagement/DMSerializationSpec.scala new file mode 100644 index 000000000..b380800fc --- /dev/null +++ b/lm-ivy/src/test/scala/sbt/internal/librarymanagement/DMSerializationSpec.scala @@ -0,0 +1,110 @@ +package sbt.internal.librarymanagement + +import java.net.URI +import java.io.File + +import sbt.librarymanagement._ +import sjsonnew.shaded.scalajson.ast.unsafe._ +import sjsonnew._, support.scalajson.unsafe._ +import LibraryManagementCodec._ +import verify.BasicTestSuite + +object DMSerializationSpec extends BasicTestSuite { + test("CrossVersion.full should roundtrip") { + roundtripStr(CrossVersion.full: CrossVersion) + } + + test("CrossVersion.binary should roundtrip") { + roundtripStr(CrossVersion.binary: CrossVersion) + } + + test("CrossVersion.for3Use2_13 should roundtrip") { + roundtripStr(CrossVersion.for3Use2_13: CrossVersion) + } + + test("CrossVersion.for2_13Use3 with prefix should roundtrip") { + roundtripStr(CrossVersion.for2_13Use3With("_sjs1", ""): CrossVersion) + } + + test("CrossVersion.Disabled should roundtrip") { + roundtrip(Disabled(): CrossVersion) + } + + test("""Artifact("foo") should roundtrip""") { + roundtrip(Artifact("foo")) + } + + test("""Artifact("foo", "sources") should roundtrip""") { + roundtrip(Artifact("foo", "sources")) + } + + test("""Artifact.pom("foo") should roundtrip""") { + roundtrip(Artifact.pom("foo")) + } + + test("""Artifact("foo", url("http://example.com/")) should roundtrip""") { + roundtrip(Artifact("foo", new URI("http://example.com/"))) + } + + test("""Artifact("foo").extra(("key", "value")) should roundtrip""") { + roundtrip(Artifact("foo").extra(("key", "value"))) + } + + test("""ModuleID("org", "name", "1.0") should roundtrip""") { + roundtrip(ModuleID("org", "name", "1.0")) + } + + test("""ModuleReport(ModuleID("org", "name", "1.0"), Nil, Nil) should roundtrip""") { + roundtripStr(ModuleReport(ModuleID("org", "name", "1.0"), Vector.empty, Vector.empty)) + } + + test("Organization artifact report should roundtrip") { + roundtripStr(organizationArtifactReportExample) + } + + test("Configuration report should roundtrip") { + roundtripStr(configurationReportExample) + } + + test("Update report should roundtrip") { + roundtripStr(updateReportExample) + } + + lazy val updateReportExample = + UpdateReport( + new File("./foo"), + Vector(configurationReportExample), + UpdateStats(0, 0, 0, false), + Map("./foo" -> 0) + ) + lazy val configurationReportExample = + ConfigurationReport( + ConfigRef("compile"), + Vector(moduleReportExample), + Vector(organizationArtifactReportExample) + ) + lazy val organizationArtifactReportExample = + OrganizationArtifactReport("org", "name", Vector(moduleReportExample)) + lazy val moduleReportExample = + ModuleReport(ModuleID("org", "name", "1.0"), Vector.empty, Vector.empty) + + def roundtrip[A: JsonReader: JsonWriter](a: A): Unit = + roundtripBuilder(a) { (x1, x2) => + assert(x1 == x2) + } + + def roundtripStr[A: JsonReader: JsonWriter](a: A): Unit = + roundtripBuilder(a) { (x1, x2) => + assert(x1.toString == x2.toString) + } + + def roundtripBuilder[A: JsonReader: JsonWriter](a: A)(f: (A, A) => Unit): Unit = { + val json = isoString to (Converter toJsonUnsafe a) + println(json) + val obj = Converter.fromJsonUnsafe[A](isoString from json) + f(a, obj) + } + + implicit val isoString: IsoString[JValue] = + IsoString.iso(CompactPrinter.apply, Parser.parseUnsafe) +} diff --git a/lm-ivy/src/test/scala/sbt/internal/librarymanagement/EvictionErrorSpec.scala b/lm-ivy/src/test/scala/sbt/internal/librarymanagement/EvictionErrorSpec.scala new file mode 100644 index 000000000..2dd357978 --- /dev/null +++ b/lm-ivy/src/test/scala/sbt/internal/librarymanagement/EvictionErrorSpec.scala @@ -0,0 +1,170 @@ +package sbt.internal.librarymanagement + +import sbt.librarymanagement._ +import sbt.internal.librarymanagement.cross.CrossVersionUtil +import sbt.librarymanagement.syntax._ +import sbt.util.Level + +object EvictionErrorSpec extends BaseIvySpecification { + // This is a specification to check the eviction errors + + import TestShowLines.* + import EvictionError.given + + test("Eviction error should detect binary incompatible Scala libraries") { + val deps = Vector(`scala2.10.4`, `akkaActor2.1.4`, `akkaActor2.3.0`) + val m = module(defaultModuleId, deps, Some("2.10.4")) + val report = ivyUpdate(m) + assert(EvictionError(report, m, oldAkkaPvp).incompatibleEvictions.size == 1) + } + + test("it should print out message about the eviction") { + val deps = Vector(`scala2.10.4`, `akkaActor2.1.4`, `akkaActor2.3.0`) + val m = module(defaultModuleId, deps, Some("2.10.4")) + val report = ivyUpdate(m) + assert( + EvictionError(report, m, oldAkkaPvp).lines == + List( + "found version conflict(s) in library dependencies; some are suspected to be binary incompatible:", + "", + "\t* com.typesafe.akka:akka-actor_2.10:2.3.0 (pvp) is selected over 2.1.4", + "\t +- com.example:foo:0.1.0 (depends on 2.1.4)", + "" + ) + ) + } + + test("it should print out message including the transitive dependencies") { + val deps = Vector(`scala2.10.4`, `bananaSesame0.4`, `akkaRemote2.3.4`) + val m = module(defaultModuleId, deps, Some("2.10.4")) + val report = ivyUpdate(m) + assert( + EvictionError(report, m, oldAkkaPvp).lines == + List( + "found version conflict(s) in library dependencies; some are suspected to be binary incompatible:", + "", + "\t* com.typesafe.akka:akka-actor_2.10:2.3.4 (pvp) is selected over 2.1.4", + "\t +- com.typesafe.akka:akka-remote_2.10:2.3.4 (depends on 2.3.4)", + "\t +- org.w3:banana-rdf_2.10:0.4 (depends on 2.1.4)", + "\t +- org.w3:banana-sesame_2.10:0.4 (depends on 2.1.4)", + "" + ) + ) + } + + test("it should be able to emulate eviction warnings") { + val deps = Vector(`scala2.10.4`, `bananaSesame0.4`, `akkaRemote2.3.4`) + val m = module(defaultModuleId, deps, Some("2.10.4")) + val report = ivyUpdate(m) + assert( + EvictionError(report, m, Nil, "pvp", "early-semver", Level.Warn).toAssumedLines == + List( + "found version conflict(s) in library dependencies; some are suspected to be binary incompatible:", + "", + "\t* com.typesafe.akka:akka-actor_2.10:2.3.4 (pvp?) is selected over 2.1.4", + "\t +- com.typesafe.akka:akka-remote_2.10:2.3.4 (depends on 2.3.4)", + "\t +- org.w3:banana-rdf_2.10:0.4 (depends on 2.1.4)", + "\t +- org.w3:banana-sesame_2.10:0.4 (depends on 2.1.4)", + "" + ) + ) + } + + test("it should detect Semantic Versioning violations") { + val deps = Vector(`scala2.13.3`, `http4s0.21.11`, `cats-effect3.0.0-M4`) + val m = module(defaultModuleId, deps, Some("2.13.3")) + val report = ivyUpdate(m) + assert( + EvictionError(report, m, Nil).lines == + List( + "found version conflict(s) in library dependencies; some are suspected to be binary incompatible:", + "", + "\t* org.typelevel:cats-effect_2.13:3.0.0-M4 (early-semver) is selected over {2.0.0, 2.2.0}", + "\t +- com.example:foo:0.1.0 (depends on 3.0.0-M4)", + "\t +- co.fs2:fs2-core_2.13:2.4.5 (depends on 2.2.0)", + "\t +- org.http4s:http4s-core_2.13:0.21.11 (depends on 2.2.0)", + "\t +- io.chrisdavenport:vault_2.13:2.0.0 (depends on 2.0.0)", + "\t +- io.chrisdavenport:unique_2.13:2.0.0 (depends on 2.0.0)", + "" + ) + ) + } + + test("it should selectively allow opt-out from the error") { + val deps = Vector(`scala2.13.3`, `http4s0.21.11`, `cats-effect3.0.0-M4`) + val m = module(defaultModuleId, deps, Some("2.13.3")) + val report = ivyUpdate(m) + val overrideRules = List("org.typelevel" %% "cats-effect" % "always") + assert(EvictionError(report, m, overrideRules).incompatibleEvictions.isEmpty) + } + + test("it should selectively allow opt-out from the error despite assumed scheme") { + val deps = Vector(`scala2.12.17`, `akkaActor2.6.0`, `swagger-akka-http1.4.0`) + val m = module(defaultModuleId, deps, Some("2.12.17")) + val report = ivyUpdate(m) + val overrideRules = List("org.scala-lang.modules" %% "scala-java8-compat" % "always") + assert( + EvictionError( + report = report, + module = m, + schemes = overrideRules, + assumedVersionScheme = "early-semver", + assumedVersionSchemeJava = "always", + assumedEvictionErrorLevel = Level.Error, + ).assumedIncompatibleEvictions.isEmpty + ) + } + + // older Akka was on pvp + def oldAkkaPvp = List("com.typesafe.akka" % "*" % "pvp") + + lazy val `akkaActor2.1.4` = + ModuleID("com.typesafe.akka", "akka-actor", "2.1.4").withConfigurations( + Some("compile") + ) cross CrossVersion.binary + lazy val `akkaActor2.3.0` = + ModuleID("com.typesafe.akka", "akka-actor", "2.3.0").withConfigurations( + Some("compile") + ) cross CrossVersion.binary + lazy val `akkaActor2.6.0` = + ModuleID("com.typesafe.akka", "akka-actor", "2.6.0").withConfigurations( + Some("compile") + ) cross CrossVersion.binary + lazy val `scala2.10.4` = + ModuleID("org.scala-lang", "scala-library", "2.10.4").withConfigurations(Some("compile")) + lazy val `scala2.12.17` = + ModuleID("org.scala-lang", "scala-library", "2.12.17").withConfigurations(Some("compile")) + lazy val `scala2.13.3` = + ModuleID("org.scala-lang", "scala-library", "2.13.3").withConfigurations(Some("compile")) + lazy val `bananaSesame0.4` = + ModuleID("org.w3", "banana-sesame", "0.4").withConfigurations( + Some("compile") + ) cross CrossVersion.binary // uses akka-actor 2.1.4 + lazy val `akkaRemote2.3.4` = + ModuleID("com.typesafe.akka", "akka-remote", "2.3.4").withConfigurations( + Some("compile") + ) cross CrossVersion.binary // uses akka-actor 2.3.4 + lazy val `http4s0.21.11` = + ("org.http4s" %% "http4s-blaze-server" % "0.21.11").withConfigurations(Some("compile")) + // https://repo1.maven.org/maven2/org/typelevel/cats-effect_2.13/3.0.0-M4/cats-effect_2.13-3.0.0-M4.pom + // is published with early-semver + lazy val `cats-effect3.0.0-M4` = + ("org.typelevel" %% "cats-effect" % "3.0.0-M4").withConfigurations(Some("compile")) + lazy val `cats-parse0.1.0` = + ("org.typelevel" %% "cats-parse" % "0.1.0").withConfigurations(Some("compile")) + lazy val `cats-parse0.2.0` = + ("org.typelevel" %% "cats-parse" % "0.2.0").withConfigurations(Some("compile")) + lazy val `swagger-akka-http1.4.0` = + ("com.github.swagger-akka-http" %% "swagger-akka-http" % "1.4.0") + .withConfigurations(Some("compile")) + + def dummyScalaModuleInfo(v: String): ScalaModuleInfo = + ScalaModuleInfo( + scalaFullVersion = v, + scalaBinaryVersion = CrossVersionUtil.binaryScalaVersion(v), + configurations = Vector.empty, + checkExplicit = true, + filterImplicit = false, + overrideScalaVersion = true + ) +} diff --git a/lm-ivy/src/test/scala/sbt/internal/librarymanagement/EvictionWarningSpec.scala b/lm-ivy/src/test/scala/sbt/internal/librarymanagement/EvictionWarningSpec.scala new file mode 100644 index 000000000..0dedc0d14 --- /dev/null +++ b/lm-ivy/src/test/scala/sbt/internal/librarymanagement/EvictionWarningSpec.scala @@ -0,0 +1,364 @@ +package sbt.internal.librarymanagement + +import sbt.librarymanagement._ +import sbt.internal.librarymanagement.cross.CrossVersionUtil +import sbt.librarymanagement.syntax._ + +object EvictionWarningSpec extends BaseIvySpecification { + // This is a specification to check the eviction warnings + + import TestShowLines.* + + def scalaVersionDeps = Vector(scala2102, akkaActor230) + + test("Eviction of non-overridden scala-library whose scalaVersion should be detected") { + val m = module(defaultModuleId, scalaVersionDeps, Some("2.10.2"), overrideScalaVersion = false) + val report = ivyUpdate(m) + assert(EvictionWarning(m, fullOptions, report).scalaEvictions.size == 1) + } + + test("it should not be detected if it's disabled") { + val m = module(defaultModuleId, scalaVersionDeps, Some("2.10.2"), overrideScalaVersion = false) + val report = ivyUpdate(m) + assert( + EvictionWarning( + m, + fullOptions.withWarnScalaVersionEviction(false), + report + ).scalaEvictions.size == 0 + ) + } + + test("it should print out message about the eviction") { + val m = module(defaultModuleId, scalaVersionDeps, Some("2.10.2"), overrideScalaVersion = false) + val report = ivyUpdate(m) + assert( + EvictionWarning(m, fullOptions.withShowCallers(false), report).lines == + List( + "Scala version was updated by one of library dependencies:", + "\t* org.scala-lang:scala-library:2.10.3 is selected over 2.10.2", + "", + "To force scalaVersion, add the following:", + "\tscalaModuleInfo ~= (_.map(_.withOverrideScalaVersion(true)))" + ) + ) + } + + test("it should print out message about the eviction with callers") { + val m = module(defaultModuleId, scalaVersionDeps, Some("2.10.2"), overrideScalaVersion = false) + val report = ivyUpdate(m) + assert( + EvictionWarning(m, fullOptions, report).lines == + List( + "Scala version was updated by one of library dependencies:", + "\t* org.scala-lang:scala-library:2.10.3 is selected over 2.10.2", + "\t +- com.typesafe.akka:akka-actor_2.10:2.3.0 (depends on 2.10.3)", + "\t +- com.example:foo:0.1.0 (depends on 2.10.2)", + "", + "To force scalaVersion, add the following:", + "\tscalaModuleInfo ~= (_.map(_.withOverrideScalaVersion(true)))" + ) + ) + } + + test("it should print out summary about the eviction if warn eviction summary enabled") { + val m = module(defaultModuleId, scalaVersionDeps, Some("2.10.2"), overrideScalaVersion = false) + val report = ivyUpdate(m) + assert( + EvictionWarning(m, EvictionWarningOptions.summary, report).lines == + List( + "There may be incompatibilities among your library dependencies; run 'evicted' to see detailed eviction warnings." + ) + ) + } + + test( + """Non-eviction of overridden scala-library whose scalaVersion should "not be detected if it's enabled"""" + ) { + val m = module(defaultModuleId, scalaVersionDeps, Some("2.10.2")) + val report = ivyUpdate(m) + assert(EvictionWarning(m, fullOptions, report).scalaEvictions.size == 0) + } + + test("it should not be detected if it's disabled") { + val m = module(defaultModuleId, scalaVersionDeps, Some("2.10.2")) + val report = ivyUpdate(m) + assert( + EvictionWarning( + m, + fullOptions.withWarnScalaVersionEviction(false), + report + ).scalaEvictions.size == 0 + ) + } + + test( + """Including two (suspect) binary incompatible Java libraries to direct dependencies should be detected as eviction""" + ) { + val m = module(defaultModuleId, javaLibDirectDeps, Some("2.10.3")) + val report = ivyUpdate(m) + assert(EvictionWarning(m, fullOptions, report).reportedEvictions.size == 1) + } + + test("it should not be detected if it's disabled") { + val m = module(defaultModuleId, javaLibDirectDeps, Some("2.10.3")) + val report = ivyUpdate(m) + assert( + EvictionWarning( + m, + fullOptions + .withWarnDirectEvictions(false) + .withWarnTransitiveEvictions(false), + report + ).reportedEvictions.size == 0 + ) + } + + test("it should print out message about the eviction") { + val m = module(defaultModuleId, javaLibDirectDeps, Some("2.10.3")) + val report = ivyUpdate(m) + assert( + EvictionWarning(m, fullOptions, report).lines == + List( + "Found version conflict(s) in library dependencies; some are suspected to be binary incompatible:", + "", + "\t* commons-io:commons-io:2.4 is selected over 1.4", + "\t +- com.example:foo:0.1.0 (depends on 1.4)", + "" + ) + ) + } + + test("it should print out message about the eviction with callers") { + val m = module(defaultModuleId, javaLibDirectDeps, Some("2.10.3")) + val report = ivyUpdate(m) + assert( + EvictionWarning(m, fullOptions.withShowCallers(true), report).lines == + List( + "Found version conflict(s) in library dependencies; some are suspected to be binary incompatible:", + "", + "\t* commons-io:commons-io:2.4 is selected over 1.4", + "\t +- com.example:foo:0.1.0 (depends on 1.4)", + "" + ) + ) + } + + test("it should print out summary about the eviction if warn eviction summary enabled") { + val m = module(defaultModuleId, javaLibDirectDeps, Some("2.10.3")) + val report = ivyUpdate(m) + assert( + EvictionWarning(m, EvictionWarningOptions.summary, report).lines == + List( + "There may be incompatibilities among your library dependencies; run 'evicted' to see detailed eviction warnings." + ) + ) + } + + test( + """Including two (suspect) binary compatible Java libraries to direct dependencies should not be detected as eviction""" + ) { + val deps = Vector(commonsIo14, commonsIo13) + val m = module(defaultModuleId, deps, Some("2.10.3")) + val report = ivyUpdate(m) + assert(EvictionWarning(m, fullOptions, report).reportedEvictions.size == 0) + } + + test("it should not print out message about the eviction") { + val deps = Vector(commonsIo14, commonsIo13) + val m = module(defaultModuleId, deps, Some("2.10.3")) + val report = ivyUpdate(m) + assert(EvictionWarning(m, fullOptions, report).lines == Nil) + } + + test( + """Including two (suspect) transitively binary incompatible Java libraries to direct dependencies should be detected as eviction""" + ) { + val m = module(defaultModuleId, javaLibTransitiveDeps, Some("2.10.3")) + val report = ivyUpdate(m) + assert(EvictionWarning(m, fullOptions, report).reportedEvictions.size == 1) + } + + test( + """Including two (suspect) binary incompatible Scala libraries to direct dependencies should be detected as eviction""" + ) { + val deps = Vector(scala2104, akkaActor214, akkaActor234) + val m = module(defaultModuleId, deps, Some("2.10.4")) + val report = ivyUpdate(m) + assert(EvictionWarning(m, fullOptions, report).reportedEvictions.size == 1) + } + + test("it should print out message about the eviction") { + val deps = Vector(scala2104, akkaActor214, akkaActor234) + val m = module(defaultModuleId, deps, Some("2.10.4")) + val report = ivyUpdate(m) + assert( + EvictionWarning(m, fullOptions, report).lines == + List( + "Found version conflict(s) in library dependencies; some are suspected to be binary incompatible:", + "", + "\t* com.typesafe.akka:akka-actor_2.10:2.3.4 is selected over 2.1.4", + "\t +- com.example:foo:0.1.0 (depends on 2.1.4)", + "" + ) + ) + } + + test("it should print out summary about the eviction if warn eviction summary enabled") { + val deps = Vector(scala2104, akkaActor214, akkaActor234) + val m = module(defaultModuleId, deps, Some("2.10.4")) + val report = ivyUpdate(m) + assert( + EvictionWarning(m, EvictionWarningOptions.summary, report).lines == + List( + "There may be incompatibilities among your library dependencies; run 'evicted' to see detailed eviction warnings." + ) + ) + } + + test( + """Including two (suspect) binary compatible Scala libraries to direct dependencies should not be detected as eviction""" + ) { + val deps = Vector(scala2104, akkaActor230, akkaActor234) + val m = module(defaultModuleId, deps, Some("2.10.4")) + val report = ivyUpdate(m) + assert(EvictionWarning(m, fullOptions, report).reportedEvictions.size == 0) + } + + test("it should not print out message about the eviction") { + val deps = Vector(scala2104, akkaActor230, akkaActor234) + val m = module(defaultModuleId, deps, Some("2.10.4")) + val report = ivyUpdate(m) + assert(EvictionWarning(m, fullOptions, report).lines == Nil) + } + + test("it should not print out summary about the eviction even if warn eviction summary enabled") { + val deps = Vector(scala2104, akkaActor230, akkaActor234) + val m = module(defaultModuleId, deps, Some("2.10.4")) + val report = ivyUpdate(m) + assert(EvictionWarning(m, EvictionWarningOptions.summary, report).lines == Nil) + } + + test( + """Including two (suspect) transitively binary incompatible Scala libraries to direct dependencies should be detected as eviction""" + ) { + val m = module(defaultModuleId, scalaLibTransitiveDeps, Some("2.10.4")) + val report = ivyUpdate(m) + assert(EvictionWarning(m, fullOptions, report).reportedEvictions.size == 1) + } + + test("it should print out message about the eviction if it's enabled") { + val m = module(defaultModuleId, scalaLibTransitiveDeps, Some("2.10.4")) + val report = ivyUpdate(m) + assert( + EvictionWarning(m, fullOptions, report).lines == + List( + "Found version conflict(s) in library dependencies; some are suspected to be binary incompatible:", + "", + "\t* com.typesafe.akka:akka-actor_2.10:2.3.4 is selected over 2.1.4", + "\t +- com.typesafe.akka:akka-remote_2.10:2.3.4 (depends on 2.3.4)", + "\t +- org.w3:banana-rdf_2.10:0.4 (depends on 2.1.4)", + "\t +- org.w3:banana-sesame_2.10:0.4 (depends on 2.1.4)", + "" + ) + ) + } + + test("it should print out summary about the eviction if warn eviction summary enabled") { + val m = module(defaultModuleId, scalaLibTransitiveDeps, Some("2.10.4")) + val report = ivyUpdate(m) + assert( + EvictionWarning(m, EvictionWarningOptions.summary, report).lines == + List( + "There may be incompatibilities among your library dependencies; run 'evicted' to see detailed eviction warnings." + ) + ) + } + + test("Comparing sbt 0.x should use Second Segment Variation semantics") { + val m1 = "org.scala-sbt" % "util-logging" % "0.13.16" + val m2 = "org.scala-sbt" % "util-logging" % "0.13.1" + assert( + !EvictionWarningOptions.defaultGuess((m1, Option(m2), Option(dummyScalaModuleInfo("2.10.6")))) + ) + } + + test("Comparing sbt 1.x should use Semantic Versioning semantics") { + val m1 = "org.scala-sbt" % "util-logging_2.12" % "1.0.0" + val m2 = "org.scala-sbt" % "util-logging_2.12" % "1.1.0" + assert( + EvictionWarningOptions + .defaultGuess((m1, Option(m2), Option(dummyScalaModuleInfo("2.12.4")))) + ) + } + + test("Comparing 2.13 libraries with pvp under Scala 3.0.0-M3 should work") { + val m1 = "org.scodec" % "scodec-bits_2.13" % "1.1.21" + val m2 = "org.scodec" % "scodec-bits_2.13" % "1.1.22" + assert( + EvictionWarningOptions + .evalPvp((m1, Option(m2), Option(dummyScalaModuleInfo("3.0.0-M3")))) + ) + } + + test("Comparing 2.13 libraries with guessSecondSegment under Scala 3.0.0-M3 should work") { + val m1 = "org.scodec" % "scodec-bits_2.13" % "1.1.21" + val m2 = "org.scodec" % "scodec-bits_2.13" % "1.1.22" + assert( + EvictionWarningOptions + .guessSecondSegment((m1, Option(m2), Option(dummyScalaModuleInfo("3.0.0-M3")))) + ) + } + + def akkaActor214 = + ModuleID("com.typesafe.akka", "akka-actor", "2.1.4").withConfigurations( + Some("compile") + ) cross CrossVersion.binary + def akkaActor230 = + ModuleID("com.typesafe.akka", "akka-actor", "2.3.0").withConfigurations( + Some("compile") + ) cross CrossVersion.binary + def akkaActor234 = + ModuleID("com.typesafe.akka", "akka-actor", "2.3.4").withConfigurations( + Some("compile") + ) cross CrossVersion.binary + def scala2102 = + ModuleID("org.scala-lang", "scala-library", "2.10.2").withConfigurations(Some("compile")) + def scala2103 = + ModuleID("org.scala-lang", "scala-library", "2.10.3").withConfigurations(Some("compile")) + def scala2104 = + ModuleID("org.scala-lang", "scala-library", "2.10.4").withConfigurations(Some("compile")) + def commonsIo13 = ModuleID("commons-io", "commons-io", "1.3").withConfigurations(Some("compile")) + def commonsIo14 = ModuleID("commons-io", "commons-io", "1.4").withConfigurations(Some("compile")) + def commonsIo24 = ModuleID("commons-io", "commons-io", "2.4").withConfigurations(Some("compile")) + def bnfparser10 = + ModuleID("ca.gobits.bnf", "bnfparser", "1.0").withConfigurations( + Some("compile") + ) // uses commons-io 2.4 + def unfilteredUploads080 = + ModuleID("net.databinder", "unfiltered-uploads", "0.8.0").withConfigurations( + Some("compile") + ) cross CrossVersion.binary // uses commons-io 1.4 + def bananaSesame04 = + ModuleID("org.w3", "banana-sesame", "0.4").withConfigurations( + Some("compile") + ) cross CrossVersion.binary // uses akka-actor 2.1.4 + def akkaRemote234 = + ModuleID("com.typesafe.akka", "akka-remote", "2.3.4").withConfigurations( + Some("compile") + ) cross CrossVersion.binary // uses akka-actor 2.3.4 + + def fullOptions = EvictionWarningOptions.full + def javaLibDirectDeps = Vector(commonsIo14, commonsIo24) + def javaLibTransitiveDeps = Vector(unfilteredUploads080, bnfparser10) + def scalaLibTransitiveDeps = Vector(scala2104, bananaSesame04, akkaRemote234) + def dummyScalaModuleInfo(v: String): ScalaModuleInfo = + ScalaModuleInfo( + scalaFullVersion = v, + scalaBinaryVersion = CrossVersionUtil.binaryScalaVersion(v), + configurations = Vector.empty, + checkExplicit = true, + filterImplicit = false, + overrideScalaVersion = true + ) +} diff --git a/lm-ivy/src/test/scala/sbt/internal/librarymanagement/FakeResolverSpecification.scala b/lm-ivy/src/test/scala/sbt/internal/librarymanagement/FakeResolverSpecification.scala new file mode 100644 index 000000000..6ed8f1d7a --- /dev/null +++ b/lm-ivy/src/test/scala/sbt/internal/librarymanagement/FakeResolverSpecification.scala @@ -0,0 +1,89 @@ +package sbt +package internal +package librarymanagement + +import java.io.File + +import sbt.librarymanagement.{ ModuleID, RawRepository, Resolver, UpdateReport, ResolveException } + +object FakeResolverSpecification extends BaseIvySpecification { + import FakeResolver._ + + val myModule = + ModuleID("org.example", "my-module", "0.0.1-SNAPSHOT").withConfigurations(Some("compile")) + val example = ModuleID("com.example", "example", "1.0.0").withConfigurations(Some("compile")) + val anotherExample = + ModuleID("com.example", "another-example", "1.0.0").withConfigurations(Some("compile")) + val nonExisting = + ModuleID("com.example", "does-not-exist", "1.2.3").withConfigurations(Some("compile")) + + test("The FakeResolver should find modules with only one artifact") { + val m = getModule(myModule) + val report = ivyUpdate(m) + val allFiles = getAllFiles(report) + + assert(report.allModules.length == 1) + assert(report.allModuleReports.length == 1) + assert(report.configurations.length == 3) + assert(allFiles.toSet.size == 1) + assert(allFiles(1).getName == "artifact1-0.0.1-SNAPSHOT.jar") + } + + test("it should find modules with more than one artifact") { + val m = getModule(example) + val report = ivyUpdate(m) + val allFiles = getAllFiles(report).toSet + + assert(report.allModules.length == 1) + assert(report.allModuleReports.length == 1) + assert(report.configurations.length == 3) + assert(allFiles.toSet.size == 2) + assert(allFiles.map(_.getName) == Set("artifact1-1.0.0.jar", "artifact2-1.0.0.txt")) + } + + test("it should fail gracefully when asked for unknown modules") { + val m = getModule(nonExisting) + intercept[ResolveException] { + ivyUpdate(m) + () + } + } + + test("it should fail gracefully when some artifacts cannot be found") { + val m = getModule(anotherExample) + intercept[ResolveException] { + ivyUpdate(m) + () + } + } + + private def artifact1 = new File(getClass.getResource("/artifact1.jar").toURI.getPath) + private def artifact2 = new File(getClass.getResource("/artifact2.txt").toURI.getPath) + + private def modules = Map( + ("org.example", "my-module", "0.0.1-SNAPSHOT") -> List( + FakeArtifact("artifact1", "jar", "jar", artifact1) + ), + ("com.example", "example", "1.0.0") -> List( + FakeArtifact("artifact1", "jar", "jar", artifact1), + FakeArtifact("artifact2", "txt", "txt", artifact2) + ), + ("com.example", "another-example", "1.0.0") -> List( + FakeArtifact("artifact1", "jar", "jar", artifact1), + FakeArtifact("non-existing", "txt", "txt", new File("non-existing-file")) + ) + ) + + private def fakeResolver = new FakeResolver("FakeResolver", new File("tmp"), modules) + override def resolvers: Vector[Resolver] = + Vector(new RawRepository(fakeResolver, fakeResolver.getName)) + private def getModule(myModule: ModuleID): IvySbt#Module = + module(defaultModuleId, Vector(myModule), None) + private def getAllFiles(report: UpdateReport) = + for { + conf <- report.configurations + m <- conf.modules + (_, f) <- m.artifacts + } yield f + +} diff --git a/lm-ivy/src/test/scala/sbt/internal/librarymanagement/FrozenModeSpec.scala b/lm-ivy/src/test/scala/sbt/internal/librarymanagement/FrozenModeSpec.scala new file mode 100644 index 000000000..7186fd68b --- /dev/null +++ b/lm-ivy/src/test/scala/sbt/internal/librarymanagement/FrozenModeSpec.scala @@ -0,0 +1,77 @@ +package sbt.internal.librarymanagement + +import sbt.librarymanagement._ +import sbt.librarymanagement.ivy.UpdateOptions +import sbt.librarymanagement.syntax._ + +object FrozenModeSpec extends BaseIvySpecification { + private final val targetDir = Some(currentDependency) + private final val onlineConf = makeUpdateConfiguration(false, targetDir) + private final val frozenConf = makeUpdateConfiguration(false, targetDir).withFrozen(true) + private final val warningConf = UnresolvedWarningConfiguration() + private final val normalOptions = UpdateOptions() + + final val stoml = Vector("me.vican.jorge" % "stoml_2.12" % "0.4" % "compile") + + /* https://repo1.maven.org/maven2/me/vican/jorge/stoml_2.12/0.4/stoml_2.12-0.4.jar + * https://repo1.maven.org/maven2/org/scala-lang/scala-library/2.12.0/scala-library-2.12.0.jar + * https://repo1.maven.org/maven2/com/lihaoyi/fastparse_2.12/0.4.2/fastparse_2.12-0.4.2.jar + * https://repo1.maven.org/maven2/com/lihaoyi/fastparse-utils_2.12/0.4.2/fastparse-utils_2.12-0.4.2.jar + * https://repo1.maven.org/maven2/com/lihaoyi/sourcecode_2.12/0.1.3/sourcecode_2.12-0.1.3.jar */ + final val explicitStoml = Vector( + "me.vican.jorge" % "stoml_2.12" % "0.4" % "compile", + "org.scala-lang" % "scala-library" % "2.12.0" % "compile", + "com.lihaoyi" % "fastparse_2.12" % "0.4.2" % "compile", + "com.lihaoyi" % "fastparse-utils_2.12" % "0.4.2" % "compile", + "com.lihaoyi" % "sourcecode_2.12" % "0.1.3" % "compile" + ) + + test("fail when artifacts are missing in the cache") { + cleanIvyCache() + def update(module: IvySbt#Module, conf: UpdateConfiguration) = + IvyActions.updateEither(module, conf, warningConf, log) + + val toResolve = module(defaultModuleId, stoml, None, normalOptions) + val onlineResolution = update(toResolve, onlineConf) + assert(onlineResolution.isRight) + val numberResolved = + onlineResolution.fold(e => throw e.resolveException, identity).allModules.size + val numberReportsResolved = + onlineResolution.fold(e => throw e.resolveException, identity).allModuleReports.size + + cleanIvyCache() + val singleFrozenResolution = update(toResolve, frozenConf) + assert(singleFrozenResolution.isRight) + assert( + singleFrozenResolution.fold(e => throw e.resolveException, identity).allModules.size == 1, + s"The number of explicit modules in frozen mode should 1" + ) + assert( + singleFrozenResolution + .fold(e => throw e.resolveException, identity) + .allModuleReports + .size == 1, + s"The number of explicit module reports in frozen mode should 1" + ) + + cleanIvyCache() + // This relies on the fact that stoml has 5 transitive dependencies + val toExplicitResolve = module(defaultModuleId, explicitStoml, None, normalOptions) + val frozenResolution = update(toExplicitResolve, frozenConf) + assert(frozenResolution.isRight) + assert( + frozenResolution + .fold(e => throw e.resolveException, identity) + .allModules + .size == numberResolved, + s"The number of explicit modules in frozen mode should be equal than $numberResolved" + ) + assert( + frozenResolution + .fold(e => throw e.resolveException, identity) + .allModuleReports + .size == numberReportsResolved, + s"The number of explicit module reports in frozen mode should be equal than $numberReportsResolved" + ) + } +} diff --git a/lm-ivy/src/test/scala/sbt/internal/librarymanagement/InclExclSpec.scala b/lm-ivy/src/test/scala/sbt/internal/librarymanagement/InclExclSpec.scala new file mode 100644 index 000000000..7305a2ddc --- /dev/null +++ b/lm-ivy/src/test/scala/sbt/internal/librarymanagement/InclExclSpec.scala @@ -0,0 +1,98 @@ +package sbt.internal.librarymanagement + +import sbt.librarymanagement._ +import sbt.librarymanagement.syntax._ +import DependencyBuilders.OrganizationArtifactName + +object InclExclSpec extends BaseIvySpecification { + val scala210 = Some("2.10.4") + test("it should exclude any version of lift-json via a new exclusion rule") { + val toExclude = ExclusionRule("net.liftweb", "lift-json_2.10") + val report = getIvyReport(createLiftDep(toExclude), scala210) + testLiftJsonIsMissing(report) + } + + test("it should exclude any version of lift-json with explicit Scala version") { + val excluded: OrganizationArtifactName = "net.liftweb" % "lift-json_2.10" + val report = getIvyReport(createLiftDep(excluded), scala210) + testLiftJsonIsMissing(report) + } + + test("it should exclude any version of cross-built lift-json") { + val excluded: OrganizationArtifactName = "net.liftweb" %% "lift-json" + val report = getIvyReport(createLiftDep(excluded), scala210) + testLiftJsonIsMissing(report) + } + + val scala2122 = Some("2.12.2") + test("it should exclude a concrete version of lift-json when it's full cross version") { + val excluded: ModuleID = ("org.scalameta" % "scalahost" % "1.7.0").cross(CrossVersion.full) + val report = getIvyReport(createMetaDep(excluded), scala2122) + testScalahostIsMissing(report) + } + + test("it should exclude any version of lift-json when it's full cross version") { + val excluded = new OrganizationArtifactName("net.liftweb", "lift-json", CrossVersion.full) + val report = getIvyReport(createMetaDep(excluded), scala2122) + testScalahostIsMissing(report) + } + + test("it should exclude any version of scala-library via * artifact id") { + val toExclude = ExclusionRule("org.scala-lang", "*") + val report = getIvyReport(createLiftDep(toExclude), scala210) + testScalaLibraryIsMissing(report) + } + + test("it should exclude any version of scala-library via * org id") { + val toExclude = ExclusionRule("*", "scala-library") + val report = getIvyReport(createLiftDep(toExclude), scala210) + testScalaLibraryIsMissing(report) + } + + def createLiftDep(toExclude: ExclusionRule): ModuleID = + ("net.liftweb" %% "lift-mapper" % "2.6-M4" % "compile").excludeAll(toExclude) + + def createMetaDep(toExclude: ExclusionRule): ModuleID = + ("org.scalameta" %% "paradise" % "3.0.0-M8" % "compile") + .cross(CrossVersion.full) + .excludeAll(toExclude) + + def getIvyReport(dep: ModuleID, scalaVersion: Option[String]): UpdateReport = { + cleanIvyCache() + val ivyModule = module(defaultModuleId, Vector(dep), scalaVersion) + ivyUpdate(ivyModule) + } + + def testLiftJsonIsMissing(report: UpdateReport): Unit = { + assert( + !report.allModules.exists(_.name.contains("lift-json")), + "lift-json has not been excluded." + ) + assert( + !report.allModuleReports.exists(_.module.name.contains("lift-json")), + "lift-json has not been excluded." + ) + } + + def testScalaLibraryIsMissing(report: UpdateReport): Unit = { + assert( + !report.allModules.exists(_.name.contains("scala-library")), + "scala-library has not been excluded." + ) + assert( + !report.allModuleReports.exists(_.module.name.contains("scala-library")), + "scala-library has not been excluded." + ) + } + + def testScalahostIsMissing(report: UpdateReport): Unit = { + assert( + !report.allModules.exists(_.name.contains("scalahost")), + "scalahost has not been excluded." + ) + assert( + !report.allModuleReports.exists(_.module.name.contains("scalahost")), + "scalahost has not been excluded." + ) + } +} diff --git a/lm-ivy/src/test/scala/sbt/internal/librarymanagement/InconsistentDuplicateSpec.scala b/lm-ivy/src/test/scala/sbt/internal/librarymanagement/InconsistentDuplicateSpec.scala new file mode 100644 index 000000000..46e830c04 --- /dev/null +++ b/lm-ivy/src/test/scala/sbt/internal/librarymanagement/InconsistentDuplicateSpec.scala @@ -0,0 +1,38 @@ +package sbt.internal.librarymanagement + +import sbt.librarymanagement._ +import verify.BasicTestSuite + +// This is a specification to check the inconsistent duplicate warnings +object InconsistentDuplicateSpec extends BasicTestSuite { + test("Duplicate with different version should be warned") { + assert( + IvySbt.inconsistentDuplicateWarning(Seq(akkaActor214, akkaActor230)) == + List( + "Multiple dependencies with the same organization/name but different versions. To avoid conflict, pick one version:", + " * com.typesafe.akka:akka-actor:(2.1.4, 2.3.0)" + ) + ) + } + + test("it should not be warned if in different configurations") { + assert(IvySbt.inconsistentDuplicateWarning(Seq(akkaActor214, akkaActor230Test)) == Nil) + } + + test("Duplicate with same version should not be warned") { + assert(IvySbt.inconsistentDuplicateWarning(Seq(akkaActor230Test, akkaActor230)) == Nil) + } + + def akkaActor214 = + ModuleID("com.typesafe.akka", "akka-actor", "2.1.4").withConfigurations( + Some("compile") + ) cross CrossVersion.binary + def akkaActor230 = + ModuleID("com.typesafe.akka", "akka-actor", "2.3.0").withConfigurations( + Some("compile") + ) cross CrossVersion.binary + def akkaActor230Test = + ModuleID("com.typesafe.akka", "akka-actor", "2.3.0").withConfigurations( + Some("test") + ) cross CrossVersion.binary +} diff --git a/lm-ivy/src/test/scala/sbt/internal/librarymanagement/IvyModuleSpec.scala b/lm-ivy/src/test/scala/sbt/internal/librarymanagement/IvyModuleSpec.scala new file mode 100644 index 000000000..ac9474912 --- /dev/null +++ b/lm-ivy/src/test/scala/sbt/internal/librarymanagement/IvyModuleSpec.scala @@ -0,0 +1,38 @@ +package sbt.internal.librarymanagement + +import sbt.internal.librarymanagement.mavenint.PomExtraDependencyAttributes.{ + SbtVersionKey, + ScalaVersionKey +} +import sbt.librarymanagement.{ CrossVersion, ModuleDescriptorConfiguration } + +object IvyModuleSpec extends BaseIvySpecification { + + test("The Scala binary version of a Scala module should be appended to its name") { + val m = module( + defaultModuleId.withCrossVersion(CrossVersion.Binary()), + Vector.empty, + Some("2.13.10") + ) + m.moduleSettings match { + case configuration: ModuleDescriptorConfiguration => + assert(configuration.module.name == "foo_2.13") + case _ => fail() + } + } + + test("The sbt cross-version should be appended to the name of an sbt plugin") { + val m = module( + defaultModuleId.extra(SbtVersionKey -> "1.0", ScalaVersionKey -> "2.12"), + Vector.empty, + Some("2.12.17"), + appendSbtCrossVersion = true + ) + m.moduleSettings match { + case configuration: ModuleDescriptorConfiguration => + assert(configuration.module.name == "foo_2.12_1.0") + case _ => fail() + } + } + +} diff --git a/lm-ivy/src/test/scala/sbt/internal/librarymanagement/IvyRepoSpec.scala b/lm-ivy/src/test/scala/sbt/internal/librarymanagement/IvyRepoSpec.scala new file mode 100644 index 000000000..e9902bc96 --- /dev/null +++ b/lm-ivy/src/test/scala/sbt/internal/librarymanagement/IvyRepoSpec.scala @@ -0,0 +1,105 @@ +package sbt.internal.librarymanagement + +import org.scalatest.Inside +import sbt.librarymanagement._ +import sbt.librarymanagement.syntax._ +import InternalDefaults._ + +object IvyRepoSpec extends BaseIvySpecification { + + val ourModuleID = ModuleID("com.example", "foo", "0.1.0").withConfigurations(Some("compile")) + + def makeModuleForDepWithSources = { + // By default a module seems to only have [compile, test, runtime], yet deps automatically map to + // default->compile(default) ... so I guess we have to explicitly use e.g. "compile" + val dep = "com.test" % "module-with-srcs" % "0.1.00" % "compile" + + module( + ourModuleID, + Vector(dep), + None // , UpdateOptions().withCachedResolution(true) + ) + } + + test( + "ivyUpdate from ivy repository should resolve only binary artifact from module which also contains a sources artifact under the same configuration." + ) { + cleanIvyCache() + + val m = makeModuleForDepWithSources + + val report = ivyUpdate(m) + + import Inside._ + inside(report.configuration(ConfigRef("compile")).map(_.modules)) { case Some(Seq(mr)) => + inside(mr.artifacts) { case Seq((ar, _)) => + assert(ar.`type` == "jar") + assert(ar.extension == "jar") + } + } + } + + test( + "it should resolve only sources artifact of an acceptable artifact type, \"src\", when calling updateClassifiers." + ) { + cleanIvyCache() + + val m = makeModuleForDepWithSources + + // the "default" configuration used in `update`. + val c = makeUpdateConfiguration(false, None) + + val scalaModuleInfo = m.moduleSettings.scalaModuleInfo + val srcTypes = Vector("src") + val docTypes = Vector("javadoc") + // These will be the default classifiers that SBT should try, in case a dependency is Maven. + // In this case though, they will be tried and should fail gracefully - only the + val attemptedClassifiers = Vector("sources", "javadoc") + + // The dep that we want to get the "classifiers" (i.e. sources / docs) for. + // We know it has only one source artifact in the "compile" configuration. + val dep = "com.test" % "module-with-srcs" % "0.1.00" % "compile" + + val clMod = { + val externalModules = Vector(dep) + // Note: need to extract ourModuleID so we can plug it in here, can't fish it back out of the IvySbt#Module (`m`) + GetClassifiersModule( + ourModuleID, + scalaModuleInfo, + externalModules, + Vector(Configurations.Compile), + attemptedClassifiers + ) + } + + val artifactFilter = getArtifactTypeFilter(c.artifactFilter) + val gcm = GetClassifiersConfiguration( + clMod, + Vector.empty, + c.withArtifactFilter(artifactFilter.invert), + srcTypes, + docTypes + ) + + val report2 = + lmEngine() + .updateClassifiers(gcm, UnresolvedWarningConfiguration(), Vector(), log) + .fold(e => throw e.resolveException, identity) + + import Inside._ + inside(report2.configuration(ConfigRef("compile")).map(_.modules)) { case Some(Seq(mr)) => + inside(mr.artifacts) { case Seq((ar, _)) => + assert(ar.name == "libmodule-source") + assert(ar.`type` == "src") + assert(ar.extension == "jar") + } + } + } + + override lazy val resolvers: Vector[Resolver] = Vector(testIvy) + + lazy val testIvy = { + val repoUrl = getClass.getResource("/test-ivy-repo") + Resolver.url("Test Repo", repoUrl)(Resolver.ivyStylePatterns) + } +} diff --git a/lm-ivy/src/test/scala/sbt/internal/librarymanagement/IvyResolutionSpec.scala b/lm-ivy/src/test/scala/sbt/internal/librarymanagement/IvyResolutionSpec.scala new file mode 100644 index 000000000..d94c276e3 --- /dev/null +++ b/lm-ivy/src/test/scala/sbt/internal/librarymanagement/IvyResolutionSpec.scala @@ -0,0 +1,10 @@ +package sbt.internal.librarymanagement + +import sbt.librarymanagement._ + +class IvyResolutionSpec extends ResolutionSpec with BaseIvySpecification { + override val resolvers = Vector( + Resolver.mavenCentral, + Resolver.sbtPluginRepo("releases") + ) +} diff --git a/lm-ivy/src/test/scala/sbt/internal/librarymanagement/IvyUtilSpec.scala b/lm-ivy/src/test/scala/sbt/internal/librarymanagement/IvyUtilSpec.scala new file mode 100644 index 000000000..7b000ae45 --- /dev/null +++ b/lm-ivy/src/test/scala/sbt/internal/librarymanagement/IvyUtilSpec.scala @@ -0,0 +1,66 @@ +package sbt.internal.librarymanagement + +import java.io.IOException + +import org.scalatest.funsuite.AnyFunSuite +import sbt.internal.librarymanagement.IvyUtil._ + +class IvyUtilSpec extends AnyFunSuite { + test("503 should be a TransientNetworkException") { + val statusCode503Exception = + new IOException("Server returned HTTP response code: 503 for URL:") + assert(TransientNetworkException(statusCode503Exception)) + } + + test("500 should be a TransientNetworkException") { + val statusCode500Exception = + new IOException("Server returned HTTP response code: 500 for URL:") + assert(TransientNetworkException(statusCode500Exception)) + } + + test("408 should be a TransientNetworkException") { + val statusCode408Exception = + new IOException("Server returned HTTP response code: 408 for URL:") + assert(TransientNetworkException(statusCode408Exception)) + } + + test("429 should be a TransientNetworkException") { + val statusCode429Exception = + new IOException(" Server returned HTTP response code: 429 for URL:") + assert(TransientNetworkException(statusCode429Exception)) + } + + test("404 should not be a TransientNetworkException") { + val statusCode404Exception = + new IOException("Server returned HTTP response code: 404 for URL:") + assert(!TransientNetworkException(statusCode404Exception)) + } + + test("IllegalArgumentException should not be a TransientNetworkException") { + val illegalArgumentException = new IllegalArgumentException() + assert(!TransientNetworkException(illegalArgumentException)) + } + + test("it should retry for 3 attempts") { + var i = 0 + def f: Int = { + i += 1 + if (i < 3) throw new RuntimeException() else i + } + // exception predicate retries on all exceptions for this test + val result = retryWithBackoff(f, _ => true, maxAttempts = 3) + assert(result == 3) + } + + test("it should fail after maxAttempts") { + var i = 0 + def f: Int = { + i += 1 + throw new RuntimeException() + } + intercept[RuntimeException] { + retryWithBackoff(f, _ => true, maxAttempts = 3) + } + assert(i == 3) + } +} diff --git a/lm-ivy/src/test/scala/sbt/internal/librarymanagement/MakePomSpec.scala b/lm-ivy/src/test/scala/sbt/internal/librarymanagement/MakePomSpec.scala new file mode 100644 index 000000000..6b3aafd17 --- /dev/null +++ b/lm-ivy/src/test/scala/sbt/internal/librarymanagement/MakePomSpec.scala @@ -0,0 +1,100 @@ +package sbt.internal.librarymanagement + +import sbt.internal.util.ConsoleLogger +import sbt.librarymanagement.MavenRepository +import verify.BasicTestSuite + +// http://ant.apache.org/ivy/history/2.3.0/ivyfile/dependency.html +// http://maven.apache.org/enforcer/enforcer-rules/versionRanges.html +object MakePomSpec extends BasicTestSuite { + // This is a specification to check the Ivy revision number conversion to pom. + + test("1.0 should convert to 1.0") { + convertTo("1.0", "1.0") + } + + test("[1.0,2.0] should convert to [1.0,2.0]") { + convertTo("[1.0,2.0]", "[1.0,2.0]") + } + + test("[1.0,2.0[ should convert to [1.0,2.0)") { + convertTo("[1.0,2.0[", "[1.0,2.0)") + } + + test("]1.0,2.0] should convert to (1.0,2.0]") { + convertTo("]1.0,2.0]", "(1.0,2.0]") + } + + test("]1.0,2.0[ should convert to (1.0,2.0)") { + convertTo("]1.0,2.0[", "(1.0,2.0)") + } + + test("[1.0,) should convert to [1.0,)") { + convertTo("[1.0,)", "[1.0,)") + } + + test("]1.0,) should convert to (1.0,)") { + convertTo("]1.0,)", "(1.0,)") + } + + test("(,2.0] should convert to (,2.0]") { + convertTo("(,2.0]", "(,2.0]") + } + + test("(,2.0[ should convert to (,2.0)") { + convertTo("(,2.0[", "(,2.0)") + } + + test("1.+ should convert to [1,2)") { + convertTo("1.+", "[1,2)") + } + + test("1.2.3.4.+ should convert to [1.2.3.4,1.2.3.5)") { + convertTo("1.2.3.4.+", "[1.2.3.4,1.2.3.5)") + } + + test("12.31.42.+ should convert to [12.31.42,12.31.43)") { + convertTo("12.31.42.+", "[12.31.42,12.31.43)") + } + + test( + "1.1+ should convert to [1.1,1.2),[1.10,1.20),[1.100,1.200),[1.1000,1.2000),[1.10000,1.20000)" + ) { + convertTo("1.1+", "[1.1,1.2),[1.10,1.20),[1.100,1.200),[1.1000,1.2000),[1.10000,1.20000)") + } + + test("1+ should convert to [1,2),[10,20),[100,200),[1000,2000),[10000,20000)") { + convertTo("1+", "[1,2),[10,20),[100,200),[1000,2000),[10000,20000)") + } + + test("+ should convert to [0,)") { + convertTo("+", "[0,)") + } + + test("foo+ should convert to foo+") { + beParsedAsError("foo+") + } + + test("repository id should not contain maven illegal repo id characters") { + val repository = mp.mavenRepository( + MavenRepository( + """repository-id-\with-/illegal:"<-chars>|?*-others~!@#$%^&`';{}[]=+_,.""", + "uri" + ) + ) + assert( + (repository \ "id").text == "repository-id-with-illegal-chars-others~!@#$%^&`';{}[]=+_,." + ) + } + + val mp = new MakePom(ConsoleLogger()) + def convertTo(s: String, expected: String): Unit = { + assert(MakePom.makeDependencyVersion(s) == expected) + } + def beParsedAsError(s: String): Unit = { + intercept[Throwable] { + MakePom.makeDependencyVersion(s) + () + } + } +} diff --git a/lm-ivy/src/test/scala/sbt/internal/librarymanagement/ManagedChecksumsSpec.scala b/lm-ivy/src/test/scala/sbt/internal/librarymanagement/ManagedChecksumsSpec.scala new file mode 100644 index 000000000..df8241a2c --- /dev/null +++ b/lm-ivy/src/test/scala/sbt/internal/librarymanagement/ManagedChecksumsSpec.scala @@ -0,0 +1,59 @@ +package sbt.internal.librarymanagement + +import org.apache.ivy.util.Message +import sbt.librarymanagement._ +import sbt.librarymanagement.ivy._ +import sbt.io.IO + +object ManagedChecksumsSpec extends BaseIvySpecification { + private final def targetDir = Some(currentDependency) + private final def onlineConf = makeUpdateConfiguration(false, targetDir) + private final def warningConf = UnresolvedWarningConfiguration() + private final val Checksum = "sha1" + + def avro177 = ModuleID("org.apache.avro", "avro", "1.7.7") + def dataAvro1940 = ModuleID("com.linkedin.pegasus", "data-avro", "1.9.40") + def netty320 = ModuleID("org.jboss.netty", "netty", "3.2.0.Final") + final def dependencies: Vector[ModuleID] = + Vector(avro177, dataAvro1940, netty320).map(_.withConfigurations(Some("compile"))) + + import sbt.io.syntax._ + override def mkIvyConfiguration(uo: UpdateOptions): IvyConfiguration = { + val moduleConfs = Vector(ModuleConfiguration("*", chainResolver)) + val resCacheDir = currentTarget / "resolution-cache" + InlineIvyConfiguration() + .withPaths(IvyPaths(currentBase.toString, Some(currentTarget.toString))) + .withResolvers(resolvers) + .withModuleConfigurations(moduleConfs) + .withChecksums(Vector(Checksum)) + .withResolutionCacheDir(resCacheDir) + .withLog(log) + .withUpdateOptions(uo) + .withManagedChecksums(true) + } + + def cleanAll(): Unit = { + cleanIvyCache() + IO.delete(currentTarget) + IO.delete(currentManaged) + IO.delete(currentDependency) + } + + def assertChecksumExists(file: File) = { + val shaFile = new File(file.getAbsolutePath + s".$Checksum") + Message.info(s"Checking $shaFile exists...") + assert(shaFile.exists(), s"The checksum $Checksum for $file does not exist") + } + + test("Managed checksums should should download the checksum files") { + cleanAll() + val updateOptions = UpdateOptions() + val toResolve = module(defaultModuleId, dependencies, None, updateOptions) + val res = IvyActions.updateEither(toResolve, onlineConf, warningConf, log) + assert(res.isRight, s"Resolution with managed checksums failed! $res") + val updateReport = res.fold(e => throw e.resolveException, identity) + val allModuleReports = updateReport.configurations.flatMap(_.modules) + val allArtifacts: Seq[File] = allModuleReports.flatMap(_.artifacts.map(_._2)) + allArtifacts.foreach(assertChecksumExists) + } +} diff --git a/lm-ivy/src/test/scala/sbt/internal/librarymanagement/MergeDescriptorSpec.scala b/lm-ivy/src/test/scala/sbt/internal/librarymanagement/MergeDescriptorSpec.scala new file mode 100644 index 000000000..780613096 --- /dev/null +++ b/lm-ivy/src/test/scala/sbt/internal/librarymanagement/MergeDescriptorSpec.scala @@ -0,0 +1,40 @@ +package sbt.internal.librarymanagement + +import org.apache.ivy.core.module.descriptor.DependencyArtifactDescriptor +import sbt.librarymanagement._ +import sbt.librarymanagement.ivy.UpdateOptions +import sbt.internal.librarymanagement.ivyint._ + +object MergeDescriptorSpec extends BaseIvySpecification { + test("Merging duplicate dependencies should work") { + cleanIvyCache() + val m = module( + ModuleID("com.example", "foo", "0.1.0").withConfigurations(Some("compile")), + Vector(guavaTest, guavaTestTests), + None, + UpdateOptions() + ) + m.withModule(log) { case (_, md, _) => + val deps = md.getDependencies + assert(deps.size == 1) + deps.headOption.getOrElse(sys.error("Dependencies not found")) match { + case dd @ MergedDescriptors(_, _) => + val arts = dd.getAllDependencyArtifacts + val a0: DependencyArtifactDescriptor = arts.toList(0) + val a1: DependencyArtifactDescriptor = arts.toList(1) + val configs0 = a0.getConfigurations.toList + val configs1 = a1.getConfigurations.toList + assert(configs0 == List("compile")) + assert(configs1 == List("test")) + } + } + } + def guavaTest = + ModuleID("com.google.guava", "guava-tests", "18.0").withConfigurations(Option("compile")) + def guavaTestTests = + ModuleID("com.google.guava", "guava-tests", "18.0") + .withConfigurations(Option("test")) + .classifier("tests") + def defaultOptions = EvictionWarningOptions.default + +} diff --git a/lm-ivy/src/test/scala/sbt/internal/librarymanagement/ModuleResolversTest.scala b/lm-ivy/src/test/scala/sbt/internal/librarymanagement/ModuleResolversTest.scala new file mode 100644 index 000000000..295fe9758 --- /dev/null +++ b/lm-ivy/src/test/scala/sbt/internal/librarymanagement/ModuleResolversTest.scala @@ -0,0 +1,51 @@ +package sbt.internal.librarymanagement + +import sbt.librarymanagement._ +import sbt.librarymanagement.syntax._ +import sbt.librarymanagement.ivy.UpdateOptions + +object ModuleResolversTest extends BaseIvySpecification { + override final val resolvers = Vector( + MavenRepository( + "JFrog OSS Releases", + "https://releases.jfrog.io/artifactory/oss-releases/" + ), + Resolver.sbtPluginRepo("releases") + ) + + private final val stubModule = "com.example" % "foo" % "0.1.0" % "compile" + val pluginAttributes = Map("sbtVersion" -> "0.13", "scalaVersion" -> "2.10") + private final val dependencies = Vector( + ("me.lessis" % "bintray-sbt" % "0.3.0" % "compile").withExtraAttributes(pluginAttributes), + "com.jfrog.bintray.client" % "bintray-client-java-api" % "0.9.2" % "compile" + ).map(_.withIsTransitive(false)) + + test("The direct resolvers in update options should skip the rest of resolvers") { + cleanIvyCache() + val updateOptions = UpdateOptions() + val ivyModule = module(stubModule, dependencies, None, updateOptions) + val normalResolution = ivyUpdateEither(ivyModule) + assert(normalResolution.isRight) + val normalResolutionTime = + normalResolution.fold(e => throw e.resolveException, identity).stats.resolveTime + + cleanIvyCache() + val moduleResolvers = Map( + dependencies.head -> resolvers.last, + dependencies.tail.head -> resolvers.init.last + ) + val customUpdateOptions = updateOptions.withModuleResolvers(moduleResolvers) + val ivyModule2 = module(stubModule, dependencies, None, customUpdateOptions) + val fasterResolution = ivyUpdateEither(ivyModule2) + assert(fasterResolution.isRight) + val fasterResolutionTime = + fasterResolution.fold(e => throw e.resolveException, identity).stats.resolveTime + + // THis is left on purpose so that in spurious error we see the times + println(s"NORMAL RESOLUTION TIME $normalResolutionTime") + println(s"FASTER RESOLUTION TIME $fasterResolutionTime") + + // Check that faster resolution is faster + assert(fasterResolutionTime < normalResolutionTime) + } +} diff --git a/lm-ivy/src/test/scala/sbt/internal/librarymanagement/OfflineModeSpec.scala b/lm-ivy/src/test/scala/sbt/internal/librarymanagement/OfflineModeSpec.scala new file mode 100644 index 000000000..04484931e --- /dev/null +++ b/lm-ivy/src/test/scala/sbt/internal/librarymanagement/OfflineModeSpec.scala @@ -0,0 +1,74 @@ +package sbt.internal.librarymanagement + +import sbt.librarymanagement._ +import sbt.librarymanagement.ivy.UpdateOptions +import sbt.io.IO + +object OfflineModeSpec extends BaseIvySpecification { + private final def targetDir = Some(currentDependency) + private final def onlineConf = makeUpdateConfiguration(false, targetDir) + private final def offlineConf = makeUpdateConfiguration(true, targetDir) + private final def warningConf = UnresolvedWarningConfiguration() + private final def normalOptions = UpdateOptions() + private final def cachedOptions = UpdateOptions().withCachedResolution(true) + + def avro177 = ModuleID("org.apache.avro", "avro", "1.7.7") + def dataAvro1940 = ModuleID("com.linkedin.pegasus", "data-avro", "1.9.40") + def netty320 = ModuleID("org.jboss.netty", "netty", "3.2.0.Final") + final def dependencies: Vector[ModuleID] = + Vector(avro177, dataAvro1940, netty320).map(_.withConfigurations(Some("compile"))) + + def cleanAll(): Unit = { + cleanIvyCache() + IO.delete(currentTarget) + IO.delete(currentManaged) + IO.delete(currentDependency) + } + + def checkOnlineAndOfflineResolution(updateOptions: UpdateOptions): Unit = { + cleanAll() + val toResolve = module(defaultModuleId, dependencies, None, updateOptions) + if (updateOptions.cachedResolution) + cleanCachedResolutionCache(toResolve) + + val onlineResolution = + IvyActions.updateEither(toResolve, onlineConf, warningConf, log) + assert(onlineResolution.isRight) + assert(onlineResolution.toOption.exists(report => report.stats.resolveTime > 0)) + + val originalResolveTime = + onlineResolution.fold(e => throw e.resolveException, identity).stats.resolveTime + val offlineResolution = + IvyActions.updateEither(toResolve, offlineConf, warningConf, log) + assert(offlineResolution.isRight) + + val resolveTime = + offlineResolution.fold(e => throw e.resolveException, identity).stats.resolveTime + assert(originalResolveTime > resolveTime) + } + + test("Offline update configuration should reuse the caches when offline is enabled") { + checkOnlineAndOfflineResolution(normalOptions) + } + + test("it should reuse the caches when offline and cached resolution are enabled") { + checkOnlineAndOfflineResolution(cachedOptions) + } + + def checkFailingResolution(updateOptions: UpdateOptions): Unit = { + cleanAll() + val toResolve = module(defaultModuleId, dependencies, None, updateOptions) + if (updateOptions.cachedResolution) cleanCachedResolutionCache(toResolve) + val failedOfflineResolution = + IvyActions.updateEither(toResolve, offlineConf, warningConf, log) + assert(failedOfflineResolution.isLeft) + } + + test("it should fail when artifacts are missing in the cache") { + checkFailingResolution(normalOptions) + } + + test("it should fail when artifacts are missing in the cache for cached resolution") { + checkFailingResolution(cachedOptions) + } +} diff --git a/lm-ivy/src/test/scala/sbt/internal/librarymanagement/PlatformResolutionSpec.scala b/lm-ivy/src/test/scala/sbt/internal/librarymanagement/PlatformResolutionSpec.scala new file mode 100644 index 000000000..f6ce21736 --- /dev/null +++ b/lm-ivy/src/test/scala/sbt/internal/librarymanagement/PlatformResolutionSpec.scala @@ -0,0 +1,82 @@ +package sbt.internal.librarymanagement + +import sbt.librarymanagement.* +import sbt.librarymanagement.syntax.* +import sbt.librarymanagement.Platform.* + +object PlatformResolutionSpec extends BaseIvySpecification { + + test("None platform resolves %% as JVM") { + cleanCache() + val m = exampleAutoModule(platform = None) + assert( + update(m).configurations.head.modules.map(_.toString).mkString + contains "com.github.scopt:scopt_2.13:4.1.0" + ) + } + + test("sjs1 platform resolves %% as sjs1") { + cleanCache() + val m = exampleAutoModule(platform = Some("sjs1")) + assert( + update(m).configurations.head.modules.map(_.toString).mkString + contains "com.github.scopt:scopt_sjs1_2.13" + ) + } + + test("sjs1 platform resolves % as JVM") { + cleanCache() + val m = module( + exampleModuleId("0.6.0"), + deps = Vector(junit), + Some(scala2_13), + platform = Some(sjs1), + ) + assert( + update(m).configurations.head.modules.map(_.toString).mkString + contains "junit:junit:4.13.1" + ) + } + + test("None platform can specify .platform(sjs1) depenency") { + cleanCache() + val m = module( + exampleModuleId("0.6.0"), + deps = Vector(scopt.platform(sjs1)), + Some(scala2_13), + platform = None, + ) + assert( + update(m).configurations.head.modules.map(_.toString).mkString + contains "com.github.scopt:scopt_sjs1_2.13" + ) + } + + test("sjs1 platform can specify .platform(jvm) depenency") { + cleanCache() + val m = module( + exampleModuleId("0.6.0"), + deps = Vector(scopt.platform(jvm)), + Some(scala2_13), + platform = None, + ) + assert( + update(m).configurations.head.modules.map(_.toString).mkString + contains "com.github.scopt:scopt_2.13:4.1.0" + ) + } + + def exampleAutoModule(platform: Option[String]): ModuleDescriptor = module( + exampleModuleId("0.6.0"), + deps = Vector(scopt), + Some(scala2_13), + platform = platform, + ) + + def exampleModuleId(v: String): ModuleID = ("com.example" % "foo" % v % Compile) + def scopt = ("com.github.scopt" %% "scopt" % "4.1.0" % Compile) + def junit = ("junit" % "junit" % "4.13.1" % Compile) + override val resolvers = Vector( + Resolver.mavenCentral, + ) +} diff --git a/lm-ivy/src/test/scala/sbt/internal/librarymanagement/ResolutionSpec.scala b/lm-ivy/src/test/scala/sbt/internal/librarymanagement/ResolutionSpec.scala new file mode 100644 index 000000000..d85b4163a --- /dev/null +++ b/lm-ivy/src/test/scala/sbt/internal/librarymanagement/ResolutionSpec.scala @@ -0,0 +1,117 @@ +package sbt.internal.librarymanagement + +import sbt.util.ShowLines +import sbt.librarymanagement._ +import sbt.librarymanagement.syntax._ + +abstract class ResolutionSpec extends AbstractEngineSpec { + + import TestShowLines.* + + test("Resolving the same module twice should work") { + cleanCache() + val m = module( + exampleModuleId("0.1.0"), + Vector(commonsIo13), + Some("2.10.2") + ) + val report = update(m) + cleanCachedResolutionCache(m) + val _ = update(m) + // first resolution creates the minigraph + println(report) + // second resolution reads from the minigraph + println(report.configurations.head.modules.head.artifacts) + assert(report.configurations.size == 3) + } + + test("Resolving the unsolvable module should should not work") { + // log.setLevel(Level.Debug) + val m = module( + exampleModuleId("0.2.0"), + Vector(mavenCayennePlugin302), + Some("2.10.2") + ) + updateEither(m) match { + case Right(_) => sys.error("this should've failed") + case Left(uw) => + println(uw.lines.mkString("\n")) + } + updateEither(m) match { + case Right(_) => sys.error("this should've failed 2") + case Left(uw) => + List( + "\n\tNote: Unresolved dependencies path:", + "\t\tfoundrylogic.vpp:vpp:2.2.1", + "\t\t +- org.apache.cayenne:cayenne-tools:3.0.2", + "\t\t +- org.apache.cayenne.plugins:maven-cayenne-plugin:3.0.2", + "\t\t +- com.example:foo:0.2.0" + ) foreach { line => + assert(uw.lines.contains[String](line)) + } + } + } + + // https://github.com/sbt/sbt/issues/2046 + // data-avro:1.9.40 depends on avro:1.4.0, which depends on netty:3.2.1.Final. + // avro:1.4.0 will be evicted by avro:1.7.7. + // #2046 says that netty:3.2.0.Final is incorrectly evicted by netty:3.2.1.Final + test("Resolving a module with a pseudo-conflict should work") { + // log.setLevel(Level.Debug) + cleanCache() + val m = module( + exampleModuleId("0.3.0"), + Vector(avro177, dataAvro1940, netty320), + Some("2.10.2") + ) + // first resolution creates the minigraph + val _ = update(m) + cleanCachedResolutionCache(m) + // second resolution reads from the minigraph + val report = update(m) + val modules: Seq[String] = report.configurations.head.modules map { _.toString } + assert(modules exists { (x: String) => + x contains """org.jboss.netty:netty:3.2.0.Final""" + }) + assert(!(modules exists { (x: String) => + x contains """org.jboss.netty:netty:3.2.1.Final""" + })) + } + + test("Resolving a module with sbt cross build should work") { + cleanCache() + val attributes013 = Map("e:sbtVersion" -> "0.13", "e:scalaVersion" -> "2.10") + val attributes10 = Map("e:sbtVersion" -> "1.0", "e:scalaVersion" -> "2.12") + val module013 = module( + exampleModuleId("0.4.0"), + Vector(sbtRelease.withExtraAttributes(attributes013)), + Some("2.10.6") + ) + val module10 = module( + exampleModuleId("0.4.1"), + Vector(sbtRelease.withExtraAttributes(attributes10)), + Some("2.12.3") + ) + assert( + update(module013).configurations.head.modules.map(_.toString) + contains "com.github.gseitz:sbt-release:1.0.6 (scalaVersion=2.10, sbtVersion=0.13)" + ) + assert( + update(module10).configurations.head.modules.map(_.toString) + contains "com.github.gseitz:sbt-release:1.0.6 (scalaVersion=2.12, sbtVersion=1.0)" + ) + } + + def exampleModuleId(v: String): ModuleID = ("com.example" % "foo" % v % Compile) + + def commonsIo13 = ("commons-io" % "commons-io" % "1.3" % Compile) + def mavenCayennePlugin302 = + ("org.apache.cayenne.plugins" % "maven-cayenne-plugin" % "3.0.2" % Compile) + def avro177 = ("org.apache.avro" % "avro" % "1.7.7" % Compile) + def dataAvro1940 = + ("com.linkedin.pegasus" % "data-avro" % "1.9.40" % Compile) + def netty320 = ("org.jboss.netty" % "netty" % "3.2.0.Final" % Compile) + def sbtRelease = ("com.github.gseitz" % "sbt-release" % "1.0.6" % Compile) + + def defaultOptions = EvictionWarningOptions.default +} diff --git a/lm-ivy/src/test/scala/sbt/internal/librarymanagement/ResolverSpec.scala b/lm-ivy/src/test/scala/sbt/internal/librarymanagement/ResolverSpec.scala new file mode 100644 index 000000000..6e7816ae9 --- /dev/null +++ b/lm-ivy/src/test/scala/sbt/internal/librarymanagement/ResolverSpec.scala @@ -0,0 +1,18 @@ +package sbttest + +import java.net.URI +import sbt.librarymanagement._ +import sbt.librarymanagement.syntax._ +import verify.BasicTestSuite + +class ResolverSpec extends BasicTestSuite { + test("Resolver.url") { + Resolver.url("Test Repo", new URI("http://example.com/").toURL)(Resolver.ivyStylePatterns) + () + } + + test("at") { + "something" at "http://example.com" + () + } +} diff --git a/lm-ivy/src/test/scala/sbt/internal/librarymanagement/ScalaOverrideTest.scala b/lm-ivy/src/test/scala/sbt/internal/librarymanagement/ScalaOverrideTest.scala new file mode 100644 index 000000000..688f11baf --- /dev/null +++ b/lm-ivy/src/test/scala/sbt/internal/librarymanagement/ScalaOverrideTest.scala @@ -0,0 +1,276 @@ +package sbt.internal.librarymanagement + +// import org.apache.ivy.core.module.id.ModuleRevisionId +// import org.apache.ivy.core.module.descriptor.DefaultDependencyDescriptor + +// import sbt.internal.librarymanagement.IvyScalaUtil.OverrideScalaMediator +// import sbt.librarymanagement._ +// import sbt.librarymanagement.ScalaArtifacts._ +import verify.BasicTestSuite + +object ScalaOverrideTest extends BasicTestSuite { + /* + val OtherOrgID = "other.org" + + private val scalaConfigs = + Configurations.default.filter(Configurations.underScalaVersion).map(_.name) + + def checkOrgAndVersion( + org0: String, + version0: String + )(org1: String, name1: String, version1: String): Unit = { + val osm = new OverrideScalaMediator(org0, version0, scalaConfigs) + + val mrid = ModuleRevisionId.newInstance(org1, name1, version1) + val dd = new DefaultDependencyDescriptor(mrid, false) + dd.addDependencyConfiguration("compile", "compile") + + val res = osm.mediate(dd) + assert(res.getDependencyRevisionId == ModuleRevisionId.newInstance(org0, name1, version0)) + } + + def checkOnlyOrg( + org0: String, + version0: String + )(org1: String, name1: String, version1: String): Unit = { + val osm = new OverrideScalaMediator(org0, version0, scalaConfigs) + + val mrid = ModuleRevisionId.newInstance(org1, name1, version1) + val dd = new DefaultDependencyDescriptor(mrid, false) + dd.addDependencyConfiguration("compile", "compile") + + val res = osm.mediate(dd) + assert(res.getDependencyRevisionId == ModuleRevisionId.newInstance(org0, name1, version1)) + } + + def checkNoOverride( + org0: String, + version0: String + )(org1: String, name1: String, version1: String): Unit = { + val osm = new OverrideScalaMediator(org0, version0, scalaConfigs) + + val mrid = ModuleRevisionId.newInstance(org1, name1, version1) + val dd = new DefaultDependencyDescriptor(mrid, false) + dd.addDependencyConfiguration("compile", "compile") + + val res = osm.mediate(dd) + assert(res.getDependencyRevisionId == mrid) + } + + test("OverrideScalaMediator should override compiler version") { + checkOrgAndVersion(Organization, "2.11.8")( + Organization, + CompilerID, + "2.11.9" + ) + } + + test("it should override library version") { + checkOrgAndVersion(Organization, "2.11.8")( + Organization, + LibraryID, + "2.11.8" + ) + } + + test("it should override reflect version") { + checkOrgAndVersion(Organization, "2.11.8")( + Organization, + ReflectID, + "2.11.7" + ) + } + + test("it should override actors version") { + checkOrgAndVersion(Organization, "2.11.8")( + Organization, + ActorsID, + "2.11.6" + ) + } + + test("it should override scalap version") { + checkOrgAndVersion(Organization, "2.11.8")( + Organization, + ScalapID, + "2.11.5" + ) + } + + test("it should override default compiler organization") { + checkOrgAndVersion(OtherOrgID, "2.11.8")( + Organization, + CompilerID, + "2.11.9" + ) + } + + test("it should override default library organization") { + checkOrgAndVersion(OtherOrgID, "2.11.8")( + Organization, + LibraryID, + "2.11.8" + ) + } + + test("it should override default reflect organization") { + checkOrgAndVersion(OtherOrgID, "2.11.8")( + Organization, + ReflectID, + "2.11.7" + ) + } + + test("it should override default actors organization") { + checkOrgAndVersion(OtherOrgID, "2.11.8")( + Organization, + ActorsID, + "2.11.6" + ) + } + + test("it should override default scalap organization") { + checkOrgAndVersion(OtherOrgID, "2.11.8")( + Organization, + ScalapID, + "2.11.5" + ) + } + + test("it should override custom compiler organization") { + checkOrgAndVersion(Organization, "2.11.8")( + OtherOrgID, + CompilerID, + "2.11.9" + ) + } + + test("it should override custom library organization") { + checkOrgAndVersion(Organization, "2.11.8")( + OtherOrgID, + LibraryID, + "2.11.8" + ) + } + + test("it should override custom reflect organization") { + checkOrgAndVersion(Organization, "2.11.8")( + OtherOrgID, + ReflectID, + "2.11.7" + ) + } + + test("it should override custom actors organization") { + checkOrgAndVersion(Organization, "2.11.8")( + OtherOrgID, + ActorsID, + "2.11.6" + ) + } + + test("it should override custom scalap organization") { + checkOrgAndVersion(Organization, "2.11.8")( + OtherOrgID, + ScalapID, + "2.11.5" + ) + } + + test("it should override Scala 3 compiler version") { + checkOrgAndVersion(Organization, "3.1.0")( + Organization, + Scala3CompilerPrefix + "3", + "3.0.0" + ) + } + + test("it should override Scala 3 library version") { + checkOrgAndVersion(Organization, "3.1.0")( + Organization, + Scala3LibraryPrefix + "3", + "3.0.0" + ) + } + + test("it should override Scala 3 interfaces version") { + checkOrgAndVersion(Organization, "3.1.0")( + Organization, + Scala3InterfacesID, + "3.0.0" + ) + } + + test("it should override TASTy core version") { + checkOrgAndVersion(Organization, "3.1.0")( + Organization, + TastyCorePrefix + "3", + "3.0.0" + ) + } + + test("it should not override Scala 2 library version when using Scala 3") { + checkNoOverride(Organization, "3.1.0")( + Organization, + LibraryID, + "2.13.4" + ) + } + + test("it should not override TASTy core version when using Scala 2") { + checkNoOverride(Organization, "2.13.4")( + Organization, + TastyCorePrefix + "3", + "3.0.0" + ) + } + + test("it should override default Scala 3 compiler organization") { + checkOrgAndVersion(OtherOrgID, "3.1.0")( + Organization, + Scala3CompilerPrefix + "3", + "3.0.0" + ) + } + + test("it should override default Scala 3 library organization") { + checkOrgAndVersion(OtherOrgID, "3.1.0")( + Organization, + Scala3LibraryPrefix + "3", + "3.0.0" + ) + } + + test("it should override default Scala 3 interfaces organization") { + checkOrgAndVersion(OtherOrgID, "3.1.0")( + Organization, + Scala3InterfacesID, + "3.0.0" + ) + } + + test("it should override default Scala 3 TASTy core organization") { + checkOrgAndVersion(OtherOrgID, "3.1.0")( + Organization, + TastyCorePrefix + "3", + "3.0.0" + ) + } + + test("it should override default Scala 2 library organization when in Scala 3") { + checkOnlyOrg(OtherOrgID, "3.1.0")( + Organization, + LibraryID, + "2.13.4" + ) + } + + test("it should override default TASTy core organization when in Scala 2") { + checkOnlyOrg(OtherOrgID, "2.13.4")( + Organization, + TastyCorePrefix + "3", + "3.0.0" + ) + } + */ +} diff --git a/lm-ivy/src/test/scala/sbt/internal/librarymanagement/SftpRepoSpec.scala b/lm-ivy/src/test/scala/sbt/internal/librarymanagement/SftpRepoSpec.scala new file mode 100644 index 000000000..c131e2900 --- /dev/null +++ b/lm-ivy/src/test/scala/sbt/internal/librarymanagement/SftpRepoSpec.scala @@ -0,0 +1,42 @@ +package sbt.internal.librarymanagement + +import sbt.io._ +import sbt.io.syntax._ +import sbt.util.Level +import sbt.librarymanagement._ +import sbt.librarymanagement.syntax._ +import java.nio.file.Paths + +//by default this test is ignored +//to run this you need to change "repo" to point to some sftp repository which contains a dependency referring a dependency in same repo +//it will then attempt to authenticate via key file and fetch the dependency specified via "org" and "module" +object SftpRepoSpec extends BaseIvySpecification { + val repo: Option[String] = None +// val repo: Option[String] = Some("some repo") + // a dependency which depends on another in the repo + def org(repo: String) = s"com.${repo}" + def module(org: String) = org % "some-lib" % "version" + + override def resolvers = { + implicit val patterns = Resolver.defaultIvyPatterns + repo.map { repo => + val privateKeyFile = Paths.get(sys.env("HOME"), ".ssh", s"id_${repo}").toFile + Resolver.sftp(repo, s"repo.${repo}.com", 2222).as(repo, privateKeyFile) + }.toVector ++ super.resolvers + } + + test("resolving multiple deps from sftp repo should not hang or fail") { + repo match { + case Some(repo) => + IO.delete(currentTarget / "cache" / org(repo)) + // log.setLevel(Level.Debug) + lmEngine().retrieve(module(org(repo)), scalaModuleInfo = None, currentTarget, log) match { + case Right(v) => log.debug(v.toString()) + case Left(e) => + log.log(Level.Error, e.failedPaths.toString()) + throw e.resolveException + } + case None => log.info(s"skipped ${getClass}") + } + } +} diff --git a/lm-ivy/src/test/scala/sbt/internal/librarymanagement/TestLogger.scala b/lm-ivy/src/test/scala/sbt/internal/librarymanagement/TestLogger.scala new file mode 100644 index 000000000..e3cd17d3c --- /dev/null +++ b/lm-ivy/src/test/scala/sbt/internal/librarymanagement/TestLogger.scala @@ -0,0 +1,14 @@ +package sbt +package internal +package librarymanagement + +import sbt.util._ +import sbt.internal.util._ + +object TestLogger { + def apply[T](f: Logger => T): T = { + val log = new BufferedLogger(ConsoleLogger()) + log.setLevel(Level.Debug) + log.bufferQuietly(f(log)) + } +} diff --git a/lm-ivy/src/test/scala/sbt/internal/librarymanagement/TestShowLInes.scala b/lm-ivy/src/test/scala/sbt/internal/librarymanagement/TestShowLInes.scala new file mode 100644 index 000000000..8e318ff53 --- /dev/null +++ b/lm-ivy/src/test/scala/sbt/internal/librarymanagement/TestShowLInes.scala @@ -0,0 +1,9 @@ +package sbt.internal.librarymanagement + +import sbt.util.ShowLines + +object TestShowLines: + extension [A: ShowLines](a: A) + inline def lines: Seq[String] = + implicitly[ShowLines[A]].showLines(a) +end TestShowLines diff --git a/lm-ivy/src/test/scala/sbt/internal/librarymanagement/UpdateOptionsSpec.scala b/lm-ivy/src/test/scala/sbt/internal/librarymanagement/UpdateOptionsSpec.scala new file mode 100644 index 000000000..5729bd06e --- /dev/null +++ b/lm-ivy/src/test/scala/sbt/internal/librarymanagement/UpdateOptionsSpec.scala @@ -0,0 +1,26 @@ +package sbt.internal.librarymanagement + +import sbt.librarymanagement.ivy._ +import verify.BasicTestSuite + +class UpdateOptionsSpec extends BasicTestSuite { + test("UpdateOptions should have proper toString defined") { + assert(UpdateOptions().toString() == """|UpdateOptions( + | circularDependencyLevel = warn, + | latestSnapshots = true, + | cachedResolution = false + |)""".stripMargin) + + assert( + UpdateOptions() + .withCircularDependencyLevel(CircularDependencyLevel.Error) + .withCachedResolution(true) + .withLatestSnapshots(false) + .toString() == """|UpdateOptions( + | circularDependencyLevel = error, + | latestSnapshots = false, + | cachedResolution = true + |)""".stripMargin + ) + } +} diff --git a/main/src/main/scala/sbt/Defaults.scala b/main/src/main/scala/sbt/Defaults.scala index abad8e7f8..35dfd27f0 100644 --- a/main/src/main/scala/sbt/Defaults.scala +++ b/main/src/main/scala/sbt/Defaults.scala @@ -3305,11 +3305,11 @@ object Classpaths { projectInfo := ModuleInfo( name.value, description.value, - homepage.value, + homepage.value.map(_.toURI), startYear.value, - licenses.value.toVector, + licenses.value.map((name, url) => (name, url.toURI)).toVector, organizationName.value, - organizationHomepage.value, + organizationHomepage.value.map(_.toURI), scmInfo.value, developers.value.toVector ), diff --git a/project/Check.scala b/project/Check.scala new file mode 100644 index 000000000..b7d7201a4 --- /dev/null +++ b/project/Check.scala @@ -0,0 +1,26 @@ +import java.io.File +import java.util.zip.ZipFile + +import scala.collection.JavaConverters._ + +object Check { + + def onlyNamespace(ns: String, jar: File, ignoreFiles: Set[String] = Set.empty): Unit = { + val zf = new ZipFile(jar) + val unrecognized = zf + .entries() + .asScala + .map(_.getName) + .filter { n => + !n.startsWith("META-INF/") && !n.startsWith(ns + "/") && + n != "reflect.properties" && // scala-reflect adds that + !ignoreFiles(n) + } + .toVector + .sorted + for (u <- unrecognized) + System.err.println(s"Unrecognized: $u") + assert(unrecognized.isEmpty) + } + +} diff --git a/project/DatatypeConfig.scala b/project/DatatypeConfig.scala new file mode 100644 index 000000000..38807b394 --- /dev/null +++ b/project/DatatypeConfig.scala @@ -0,0 +1,89 @@ +import sbt.contraband.ast._ +import sbt.contraband.CodecCodeGen + +object DatatypeConfig { + + /** Extract the only type parameter from a TpeRef */ + def oneArg(tpe: Type): Type = { + val pat = s"""${tpe.removeTypeParameters.name}[<\\[](.+?)[>\\]]""".r + val pat(arg0) = tpe.name + NamedType(arg0 split '.' toList) + } + + /** Extract the two type parameters from a TpeRef */ + def twoArgs(tpe: Type): List[Type] = { + val pat = s"""${tpe.removeTypeParameters.name}[<\\[](.+?), (.+?)[>\\]]""".r + val pat(arg0, arg1) = tpe.name + NamedType(arg0 split '.' toList) :: NamedType(arg1 split '.' toList) :: Nil + } + + /** Codecs that were manually written. */ + val myCodecs: PartialFunction[String, Type => List[String]] = { + case "scala.xml.NodeSeq" => { _ => + "sbt.internal.librarymanagement.formats.NodeSeqFormat" :: Nil + } + + case "org.apache.ivy.plugins.resolver.DependencyResolver" => { _ => + "sbt.internal.librarymanagement.formats.DependencyResolverFormat" :: Nil + } + + case "xsbti.GlobalLock" => { _ => + "sbt.internal.librarymanagement.formats.GlobalLockFormat" :: Nil + } + case "xsbti.Logger" => { _ => + "sbt.internal.librarymanagement.formats.LoggerFormat" :: Nil + } + + case "sbt.librarymanagement.ivy.UpdateOptions" => { _ => + "sbt.librarymanagement.ivy.formats.UpdateOptionsFormat" :: Nil + } + + case "sbt.librarymanagement.LogicalClock" => { _ => + "sbt.internal.librarymanagement.formats.LogicalClockFormats" :: Nil + } + + case "sbt.librarymanagement.CrossVersion" => { _ => + "sbt.librarymanagement.CrossVersionFormats" :: + "sbt.librarymanagement.DisabledFormats" :: + "sbt.librarymanagement.BinaryFormats" :: + "sbt.librarymanagement.ConstantFormats" :: + "sbt.librarymanagement.PatchFormats" :: + "sbt.librarymanagement.FullFormats" :: + "sbt.librarymanagement.For3Use2_13Formats" :: + "sbt.librarymanagement.For2_13Use3Formats" :: + Nil + } + + case "sbt.librarymanagement.ConfigRef" => { _ => + "sbt.librarymanagement.ConfigRefFormats" :: Nil + } + + // TODO: These are handled by BasicJsonProtocol, and sbt-datatype should handle them by default, imo + case "Option" | "Set" | "scala.Vector" => { tpe => + getFormats(oneArg(tpe)) + } + case "Map" | "Tuple2" | "scala.Tuple2" => { tpe => + twoArgs(tpe).flatMap(getFormats) + } + case "Int" | "Long" => { _ => + Nil + } + } + + /** Types for which we don't include the format -- they're just aliases to InclExclRule */ + val excluded = Set("sbt.librarymanagement.InclusionRule", "sbt.librarymanagement.ExclusionRule") + + /** Returns the list of formats required to encode the given `TpeRef`. */ + val getFormats: Type => List[String] = + CodecCodeGen.extensibleFormatsForType { + case NamedType(List("sbt", "internal", "librarymanagement", "RetrieveConfiguration"), _) => + "sbt.librarymanagement.RetrieveConfigurationFormats" :: Nil + case tpe: Type if myCodecs isDefinedAt tpe.removeTypeParameters.name => + myCodecs(tpe.removeTypeParameters.name)(tpe) + case tpe: Type if excluded contains tpe.removeTypeParameters.name => + Nil + case other => + CodecCodeGen.formatsForType(other) + } + +} diff --git a/project/Dependencies.scala b/project/Dependencies.scala index 570aeede1..364ce24c3 100644 --- a/project/Dependencies.scala +++ b/project/Dependencies.scala @@ -14,15 +14,10 @@ object Dependencies { // sbt modules private val ioVersion = nightlyVersion.getOrElse("1.10.0") - private val lmVersion = - sys.props.get("sbt.build.lm.version").orElse(nightlyVersion).getOrElse("2.0.0-M2") val zincVersion = nightlyVersion.getOrElse("2.0.0-M1") private val sbtIO = "org.scala-sbt" %% "io" % ioVersion - private val libraryManagementCore = "org.scala-sbt" %% "librarymanagement-core" % lmVersion - private val libraryManagementIvy = "org.scala-sbt" %% "librarymanagement-ivy" % lmVersion - val launcherVersion = "1.4.3" val launcherInterface = "org.scala-sbt" % "launcher-interface" % launcherVersion val rawLauncher = "org.scala-sbt" % "launcher" % launcherVersion @@ -66,10 +61,6 @@ object Dependencies { def addSbtIO = addSbtModule(sbtIoPath, "io", sbtIO) - def addSbtLmCore = addSbtModule(sbtLmPath, "lmCore", libraryManagementCore) - def addSbtLmIvy = addSbtModule(sbtLmPath, "lmIvy", libraryManagementIvy) - def addSbtLmIvyTest = addSbtModule(sbtLmPath, "lmIvy", libraryManagementIvy, Some(Test)) - def addSbtCompilerInterface = addSbtModule(sbtZincPath, "compilerInterface", compilerInterface) def addSbtCompilerClasspath = addSbtModule(sbtZincPath, "zincClasspath", compilerClasspath) def addSbtCompilerApiInfo = addSbtModule(sbtZincPath, "zincApiInfo", compilerApiInfo) @@ -77,9 +68,6 @@ object Dependencies { def addSbtZinc = addSbtModule(sbtZincPath, "zinc", zinc) def addSbtZincCompileCore = addSbtModule(sbtZincPath, "zincCompileCore", zincCompileCore) - // val lmCoursierShaded = "io.get-coursier" %% "lm-coursier-shaded" % "2.0.10" - val lmCoursierShaded = "org.scala-sbt" %% "librarymanagement-coursier" % "2.0.0-alpha8" - lazy val sjsonNewVersion = "0.14.0-M1" def sjsonNew(n: String) = Def.setting( "com.eed3si9n" %% n % sjsonNewVersion @@ -148,4 +136,28 @@ object Dependencies { val disruptor = "com.lmax" % "disruptor" % "3.4.2" val kindProjector = ("org.typelevel" % "kind-projector" % "0.13.3").cross(CrossVersion.full) val zeroAllocationHashing = "net.openhft" % "zero-allocation-hashing" % "0.10.1" + val ivy = "org.scala-sbt.ivy" % "ivy" % "2.3.0-sbt-396a783bba347016e7fe30dacc60d355be607fe2" + + // lm dependencies + val jsch = "com.github.mwiede" % "jsch" % "0.2.17" intransitive () + val scalaTest = "org.scalatest" %% "scalatest" % "3.2.18" + val scalaCheck = "org.scalacheck" %% "scalacheck" % "1.15.3" + val gigahorseApacheHttp = "com.eed3si9n" %% "gigahorse-apache-http" % "0.7.0" + + // lm-coursier dependencies + val dataclassScalafixVersion = "0.1.0" + val coursierVersion = "2.1.13" + + val coursier = ("io.get-coursier" %% "coursier" % coursierVersion) + .cross(CrossVersion.for3Use2_13) + .exclude("org.codehaus.plexus", "plexus-archiver") + .exclude("org.codehaus.plexus", "plexus-container-default") + + val coursierSbtMavenRepo = + ("io.get-coursier" %% "coursier-sbt-maven-repository" % coursierVersion) + .cross(CrossVersion.for3Use2_13) + + // FIXME Ideally, we should depend on the same version of io.get-coursier.jniutils:windows-jni-utils that + // io.get-coursier::coursier depends on. + val jniUtilsVersion = "0.3.3" } diff --git a/project/Mima.scala b/project/Mima.scala new file mode 100644 index 000000000..b6b68d643 --- /dev/null +++ b/project/Mima.scala @@ -0,0 +1,79 @@ +import com.typesafe.tools.mima.plugin.MimaPlugin +import com.typesafe.tools.mima.plugin.MimaKeys._ +import sbt._ +import sbt.Keys._ +import sys.process._ + +object Mima { + + private def stable(ver: String): Boolean = + ver.exists(c => c != '0' && c != '.') && + ver + .replace("-RC", "-") + .forall(c => c == '.' || c == '-' || c.isDigit) + + def binaryCompatibilityVersions: Set[String] = + Seq("git", "tag", "--merged", "HEAD^", "--contains", "v2.0.0-RC3-6").!!.linesIterator + .map(_.trim) + .filter(_.startsWith("v")) + .map(_.stripPrefix("v")) + .filter(stable) + .toSet + + def settings: Seq[Setting[_]] = Seq( + MimaPlugin.autoImport.mimaPreviousArtifacts := Set.empty, + // MimaPlugin.autoImport.mimaPreviousArtifacts := { + // binaryCompatibilityVersions.map { ver => + // (organization.value % moduleName.value % ver).cross(crossVersion.value) + // } + // } + ) + + lazy val lmCoursierFilters = { + mimaBinaryIssueFilters ++= { + import com.typesafe.tools.mima.core._ + + Seq( + // spurious errors on CI + ProblemFilters.exclude[IncompatibleSignatureProblem]("*"), + // Methods that shouldn't have been there + ProblemFilters.exclude[DirectMissingMethodProblem]( + "lmcoursier.credentials.FileCredentials.get" + ), + ProblemFilters.exclude[DirectMissingMethodProblem]( + "lmcoursier.credentials.DirectCredentials.matches" + ), + ProblemFilters.exclude[DirectMissingMethodProblem]( + "lmcoursier.credentials.DirectCredentials.get" + ), + ProblemFilters.exclude[DirectMissingMethodProblem]( + "lmcoursier.credentials.DirectCredentials.autoMatches" + ), + ProblemFilters.exclude[DirectMissingMethodProblem]( + "lmcoursier.credentials.Credentials.get" + ), + // Removed unused method, shouldn't have been there in the first place + ProblemFilters.exclude[DirectMissingMethodProblem]( + "lmcoursier.credentials.DirectCredentials.authentication" + ), + // ignore shaded and internal stuff related errors + (pb: Problem) => pb.matchName.forall(!_.startsWith("lmcoursier.internal.")) + ) + } + } + + lazy val lmCoursierShadedFilters = { + mimaBinaryIssueFilters ++= { + import com.typesafe.tools.mima.core._ + + Seq( + // spurious errors on CI + ProblemFilters.exclude[IncompatibleSignatureProblem]("*"), + // Should have been put under lmcoursier.internal? + (pb: Problem) => pb.matchName.forall(!_.startsWith("lmcoursier.definitions.ToCoursier.")), + (pb: Problem) => pb.matchName.forall(!_.startsWith("lmcoursier.definitions.FromCoursier.")) + ) + } + } + +} diff --git a/project/SbtScriptedIT.scala b/project/SbtScriptedIT.scala new file mode 100644 index 000000000..d80d277b9 --- /dev/null +++ b/project/SbtScriptedIT.scala @@ -0,0 +1,108 @@ +// import sbt._ +// import Keys._ + +// import java.io.File +// import java.util.UUID.randomUUID + +// object SbtScriptedIT extends AutoPlugin { + +// object autoImport { +// val scriptedTestSbtRepo = settingKey[String]("SBT repository to be used in scripted tests") +// val scriptedTestSbtRef = settingKey[String]("SBT branch to be used in scripted tests") +// val scriptedTestLMImpl = settingKey[String]("Librarymanagement implementation to be used in scripted tests") +// val scriptedSbtVersion = settingKey[String]("SBT version to be published locally for IT tests") +// } + +// import autoImport._ +// override def requires = ScriptedPlugin + +// override def trigger = noTrigger + +// override lazy val globalSettings = Seq( +// scriptedTestSbtRepo := "https://github.com/sbt/sbt.git", +// scriptedTestSbtRef := "develop", +// scriptedTestLMImpl := "ivy", +// scriptedSbtVersion := s"""${sbtVersion.value}-LM-SNAPSHOT""" +// ) + +// private def cloneSbt(targetDir: File, repo: String, ref: String) = { +// import org.eclipse.jgit.api._ + +// if (!targetDir.exists) { +// IO.createDirectory(targetDir) + +// new CloneCommand() +// .setDirectory(targetDir) +// .setURI(repo) +// .call() + +// val git = Git.open(targetDir) + +// git.checkout().setName(ref).call() +// } +// } + +// private def publishLocalSbt( +// targetDir: File, +// lmVersion: String, +// lmGroupID: String, +// lmArtifactID: String, +// version: String) = { +// import sys.process._ +// Process( +// Seq( +// "sbt", +// "-J-Xms2048m", +// "-J-Xmx2048m", +// "-J-XX:ReservedCodeCacheSize=256m", +// "-J-XX:MaxMetaspaceSize=512m", +// s"""-Dsbt.build.lm.version=${lmVersion}""", +// s"""-Dsbt.build.lm.organization=${lmGroupID}""", +// s"""-Dsbt.build.lm.moduleName=${lmArtifactID}""", +// s"""set ThisBuild / version := "${version}"""", +// "clean", +// "publishLocal" +// ), +// Some(targetDir) +// ) ! +// } + +// private def setScriptedTestsSbtVersion(baseDir: File, version: String) = { +// IO.listFiles(baseDir).foreach { d => +// if (d.isDirectory) { +// IO.createDirectory(d / "project") +// IO.write( +// d / "project" / "build.properties", +// s"sbt.version=$version" +// ) +// } +// } +// } + +// import sbt.ScriptedPlugin.autoImport._ + +// override lazy val projectSettings = Seq( +// scriptedTests := { +// val targetDir = target.value / "sbt" + +// if (!targetDir.exists) { +// cloneSbt(targetDir, scriptedTestSbtRepo.value, scriptedTestSbtRef.value) + +// publishLocalSbt( +// targetDir, +// version.value, +// organization.value, +// s"librarymanagement-${scriptedTestLMImpl.value}", +// scriptedSbtVersion.value +// ) +// } + +// setScriptedTestsSbtVersion( +// sbtTestDirectory.value / thisProject.value.id, +// scriptedSbtVersion.value +// ) + +// scriptedTests.value +// } +// ) +// } diff --git a/project/Util.scala b/project/Utils.scala similarity index 98% rename from project/Util.scala rename to project/Utils.scala index 53d59d0c0..0a94e31b2 100644 --- a/project/Util.scala +++ b/project/Utils.scala @@ -4,7 +4,7 @@ import Keys._ import sbt.internal.inc.Analysis -object Util { +object Utils { val version2_13 = settingKey[String]("version number") val ExclusiveTest: Tags.Tag = Tags.Tag("exclusive-test") @@ -12,7 +12,10 @@ object Util { val scalaKeywords: TaskKey[Set[String]] = taskKey[Set[String]]("") val generateKeywords: TaskKey[File] = taskKey[File]("") - def noPublishSettings: Seq[Setting[_]] = Seq(publish := {}) + lazy val noPublish = Seq( + publish := {}, + publish / skip := true, + ) def crossBuild: Seq[Setting[_]] = Seq( diff --git a/project/plugins.sbt b/project/plugins.sbt index 9a526841b..e0f718b20 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -5,8 +5,11 @@ addSbtPlugin("com.github.sbt" % "sbt-pgp" % "2.1.2") addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.5.2") addSbtPlugin("org.scala-sbt" % "sbt-contraband" % "0.5.3") addSbtPlugin("de.heikoseeberger" % "sbt-header" % "5.6.5") -addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "1.2.0") -addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.8.1") +addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "2.3.0") +addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.1.4") addSbtPlugin("com.swoval" % "sbt-java-format" % "0.3.1") addSbtPlugin("org.scalameta" % "sbt-native-image" % "0.3.1") addDependencyTreePlugin +addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.13.0") + +// libraryDependencies += "org.scala-sbt" %% "scripted-plugin" % sbtVersion.value diff --git a/sbt-allsources.sh b/sbt-allsources.sh index c14239763..77d95d91c 100755 --- a/sbt-allsources.sh +++ b/sbt-allsources.sh @@ -1,3 +1,3 @@ #!/usr/bin/env bash -sbt -Dsbtio.path=../io -Dsbtlm.path=../librarymanagement -Dsbtzinc.path=../zinc "$@" +sbt -Dsbtio.path=../io -Dsbtzinc.path=../zinc "$@" diff --git a/sbt-app/src/sbt-test/dependency-management/cache-update/build.sbt b/sbt-app/src/sbt-test/dependency-management/cache-update/build.sbt index 81e66bb93..de3f9e6cd 100644 --- a/sbt-app/src/sbt-test/dependency-management/cache-update/build.sbt +++ b/sbt-app/src/sbt-test/dependency-management/cache-update/build.sbt @@ -12,14 +12,14 @@ lazy val root = (project in file(".")) organizationName := "eed3si9n", organizationHomepage := Some(url("http://example.com/")), homepage := Some(url("https://github.com/example/example")), - scmInfo := Some(ScmInfo(url("https://github.com/example/example"), "git@github.com:example/example.git")), + scmInfo := Some(ScmInfo(uri("https://github.com/example/example"), "git@github.com:example/example.git")), developers := List( - Developer("harrah", "Mark Harrah", "@harrah", url("https://github.com/harrah")), - Developer("eed3si9n", "Eugene Yokota", "@eed3si9n", url("https://github.com/eed3si9n")), - Developer("jsuereth", "Josh Suereth", "@jsuereth", url("https://github.com/jsuereth")), - Developer("dwijnand", "Dale Wijnand", "@dwijnand", url("https://github.com/dwijnand")), - Developer("gkossakowski", "Grzegorz Kossakowski", "@gkossakowski", url("https://github.com/gkossakowski")), - Developer("Duhemm", "Martin Duhem", "@Duhemm", url("https://github.com/Duhemm")) + Developer("harrah", "Mark Harrah", "@harrah", uri("https://github.com/harrah")), + Developer("eed3si9n", "Eugene Yokota", "@eed3si9n", uri("https://github.com/eed3si9n")), + Developer("jsuereth", "Josh Suereth", "@jsuereth", uri("https://github.com/jsuereth")), + Developer("dwijnand", "Dale Wijnand", "@dwijnand", uri("https://github.com/dwijnand")), + Developer("gkossakowski", "Grzegorz Kossakowski", "@gkossakowski", uri("https://github.com/gkossakowski")), + Developer("Duhemm", "Martin Duhem", "@Duhemm", uri("https://github.com/Duhemm")) ), version := "0.3.1-SNAPSHOT", description := "An HTTP client for Scala with Async Http Client underneath.", diff --git a/sbt-app/src/sbt-test/dependency-management/credentials/build.sbt b/sbt-app/src/sbt-test/dependency-management/missing-credentials/build.sbt similarity index 100% rename from sbt-app/src/sbt-test/dependency-management/credentials/build.sbt rename to sbt-app/src/sbt-test/dependency-management/missing-credentials/build.sbt diff --git a/sbt-app/src/sbt-test/dependency-management/credentials/test b/sbt-app/src/sbt-test/dependency-management/missing-credentials/test similarity index 100% rename from sbt-app/src/sbt-test/dependency-management/credentials/test rename to sbt-app/src/sbt-test/dependency-management/missing-credentials/test diff --git a/sbt-app/src/sbt-test/dependency-management/mvn-local/build.sbt b/sbt-app/src/sbt-test/dependency-management/mvn-local/build.sbt index 914cd1837..b0a72691b 100644 --- a/sbt-app/src/sbt-test/dependency-management/mvn-local/build.sbt +++ b/sbt-app/src/sbt-test/dependency-management/mvn-local/build.sbt @@ -2,11 +2,11 @@ ThisBuild / csrCacheDirectory := (ThisBuild / baseDirectory).value / "coursier-c def commonSettings: Seq[Def.Setting[_]] = Seq( - ivyPaths := IvyPaths( (baseDirectory in ThisBuild).value, Some((baseDirectory in LocalRootProject).value / "ivy-cache")), + ivyPaths := IvyPaths( (ThisBuild / baseDirectory).value, Some((baseDirectory in LocalRootProject).value / "ivy-cache")), dependencyCacheDirectory := (baseDirectory in LocalRootProject).value / "dependency", scalaVersion := "2.10.4", - organization in ThisBuild := "org.example", - version in ThisBuild := "1.0-SNAPSHOT", + ThisBuild / organization := "org.example", + ThisBuild / version := "1.0-SNAPSHOT", resolvers += Resolver.file("old-local", file(sys.props("user.home") + "/.ivy2/local"))(Resolver.ivyStylePatterns) ) diff --git a/sbt-app/src/sbt-test/lm-coursier/api-url/build.sbt b/sbt-app/src/sbt-test/lm-coursier/api-url/build.sbt new file mode 100644 index 000000000..fd71f923a --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/api-url/build.sbt @@ -0,0 +1,24 @@ + +lazy val b = project + .settings( + apiURL := Some(url(s"http://example.org/b")), + publishMavenStyle := false, + publishTo := Some(bResolver.value), + organization := "b", + version := "0.1.0-SNAPSHOT" + ) + +lazy val bResolver = Def.setting { + val dir = (ThisBuild / baseDirectory).value / "b-repo" + Resolver.file("b-resolver", dir)(Resolver.defaultIvyPatterns) +} + +lazy val check = taskKey[Unit]("") + +check := { + import java.nio.file._ + val f = (ThisBuild / baseDirectory).value / "b-repo/b/b_3/0.1.0-SNAPSHOT/ivys/ivy.xml" + assert(f.exists(), s"missing $f") + val content = new String(Files.readAllBytes(f.toPath), "UTF-8") + assert(content.contains("""e:info.apiURL="http://example.org/b"""")) +} diff --git a/sbt-app/src/sbt-test/lm-coursier/api-url/test b/sbt-app/src/sbt-test/lm-coursier/api-url/test new file mode 100644 index 000000000..56d65fd7e --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/api-url/test @@ -0,0 +1,3 @@ +$ delete b-repo +> b/publish +> check diff --git a/sbt-app/src/sbt-test/lm-coursier/caller/build.sbt b/sbt-app/src/sbt-test/lm-coursier/caller/build.sbt new file mode 100644 index 000000000..dd2e96a37 --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/caller/build.sbt @@ -0,0 +1,39 @@ +lazy val check = taskKey[Unit]("") + +ThisBuild / scalaVersion := "2.12.8" +ThisBuild / organization := "com.example" + +lazy val app = (project in file("app")) + .dependsOn(util) + .settings( + name := "app", + libraryDependencies += "com.chuusai" %% "shapeless" % "2.3.3", + check := { + val ur = updateFull.value + val cr = ur.configuration(Compile).get + // configuration report must include a module report for subproject dependency + val coreReport = cr.modules.find(m => + m.module.name == "core_2.12" + ).getOrElse(sys.error("report for core is missing")) + assert(coreReport.callers.exists(c => c.caller.name == "util_2.12"), + s"caller on core is missing util: ${coreReport.callers}") + + // configuration report must include a module report for library dependency + val shapelessReport = cr.modules.find(m => + m.module.name == "shapeless_2.12" + ).getOrElse(sys.error("report for shapeless is missing")) + assert(shapelessReport.callers.exists(c => c.caller.name == "app_2.12"), + s"caller on shapeless is missing self module (app): ${shapelessReport.callers}") + } + ) + +lazy val util = (project in file("util")) + .dependsOn(core) + .settings( + name := "util" + ) + +lazy val core = (project in file("core")) + .settings( + name := "core" + ) diff --git a/sbt-app/src/sbt-test/lm-coursier/caller/test b/sbt-app/src/sbt-test/lm-coursier/caller/test new file mode 100644 index 000000000..15675b169 --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/caller/test @@ -0,0 +1 @@ +> check diff --git a/sbt-app/src/sbt-test/lm-coursier/classifiers/build.sbt b/sbt-app/src/sbt-test/lm-coursier/classifiers/build.sbt new file mode 100644 index 000000000..5948041f3 --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/classifiers/build.sbt @@ -0,0 +1,2 @@ +scalaVersion := "2.12.8" +libraryDependencies += "org.jclouds.api" % "nova" % "1.5.9" classifier "tests" diff --git a/sbt-app/src/sbt-test/lm-coursier/classifiers/src/main/scala/Main.scala b/sbt-app/src/sbt-test/lm-coursier/classifiers/src/main/scala/Main.scala new file mode 100644 index 000000000..b1a40797d --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/classifiers/src/main/scala/Main.scala @@ -0,0 +1,18 @@ +import java.io.File +import java.nio.file.Files + +import scala.util.Try + +object Main extends App { + + def classFound(clsName: String) = Try( + Thread.currentThread() + .getContextClassLoader() + .loadClass(clsName) + ).toOption.nonEmpty + + val name = "org.jclouds.openstack.nova.functions.ParseServerFromJsonResponseTest" + val classifierTest = classFound(name) + + assert(classifierTest, s"Couldn't find $name") +} diff --git a/sbt-app/src/sbt-test/lm-coursier/classifiers/test b/sbt-app/src/sbt-test/lm-coursier/classifiers/test new file mode 100644 index 000000000..e0f9f8b3c --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/classifiers/test @@ -0,0 +1 @@ +> runBlock diff --git a/sbt-app/src/sbt-test/lm-coursier/clean/build.sbt b/sbt-app/src/sbt-test/lm-coursier/clean/build.sbt new file mode 100644 index 000000000..2913d078c --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/clean/build.sbt @@ -0,0 +1,13 @@ +scalaVersion := "2.12.8" + +val checkEmpty = TaskKey[Unit]("checkEmpty") + +checkEmpty := { + assert(coursier.Helper.checkEmpty()) +} + +val checkNotEmpty = TaskKey[Unit]("checkNotEmpty") + +checkNotEmpty := { + assert(!coursier.Helper.checkEmpty()) +} \ No newline at end of file diff --git a/sbt-app/src/sbt-test/lm-coursier/clean/pending b/sbt-app/src/sbt-test/lm-coursier/clean/pending new file mode 100644 index 000000000..c9d15d3a9 --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/clean/pending @@ -0,0 +1,12 @@ +## TODO fix +> clean +> checkEmpty +> update +> checkNotEmpty +> clean +> checkEmpty + +> update +> checkNotEmpty +> reload +> checkEmpty \ No newline at end of file diff --git a/sbt-app/src/sbt-test/lm-coursier/clean/project/Helper.scala b/sbt-app/src/sbt-test/lm-coursier/clean/project/Helper.scala new file mode 100644 index 000000000..308434a75 --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/clean/project/Helper.scala @@ -0,0 +1,8 @@ +package coursier + +object Helper { + + def checkEmpty(): Boolean = + lmcoursier.internal.SbtCoursierCache.default.isEmpty + +} diff --git a/sbt-app/src/sbt-test/lm-coursier/config-deps-resolution/build.sbt b/sbt-app/src/sbt-test/lm-coursier/config-deps-resolution/build.sbt new file mode 100644 index 000000000..a764c1f42 --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/config-deps-resolution/build.sbt @@ -0,0 +1,5 @@ +name := "config-deps-resolution" +libraryDependencies ++= Seq( + "org.slf4j" % "slf4j-api" % "1.7.2", + "ch.qos.logback" % "logback-classic" % "1.1.1" +) diff --git a/sbt-app/src/sbt-test/lm-coursier/config-deps-resolution/project/src/main/scala/sbt/MyPlugin.scala b/sbt-app/src/sbt-test/lm-coursier/config-deps-resolution/project/src/main/scala/sbt/MyPlugin.scala new file mode 100644 index 000000000..816e06f4f --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/config-deps-resolution/project/src/main/scala/sbt/MyPlugin.scala @@ -0,0 +1,19 @@ +package sbt + +import sbt._ +import Keys._ + +object MyPlugin extends AutoPlugin { + override def requires = plugins.JvmPlugin + override def trigger = allRequirements + + val FooConfig = config("foo") + + override def projectSettings = Seq[Setting[_]]( + libraryDependencies ++= Seq( + "org.slf4j" % "slf4j-api" % "1.7.0", + "ch.qos.logback" % "logback-classic" % "1.1.7" + ).map(_ % FooConfig), + ivyConfigurations += FooConfig + ) +} diff --git a/sbt-app/src/sbt-test/lm-coursier/config-deps-resolution/src/main/scala/App.scala b/sbt-app/src/sbt-test/lm-coursier/config-deps-resolution/src/main/scala/App.scala new file mode 100644 index 000000000..4949c6dac --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/config-deps-resolution/src/main/scala/App.scala @@ -0,0 +1,6 @@ +import ch.qos.logback.classic.BasicConfigurator +import ch.qos.logback.classic.LoggerContext + +object GcMetricsApp extends App { + BasicConfigurator.configure(new LoggerContext()) +} diff --git a/sbt-app/src/sbt-test/lm-coursier/config-deps-resolution/test b/sbt-app/src/sbt-test/lm-coursier/config-deps-resolution/test new file mode 100644 index 000000000..8a33e63a9 --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/config-deps-resolution/test @@ -0,0 +1,2 @@ +> compile + diff --git a/sbt-app/src/sbt-test/lm-coursier/credentials-from-file/build.sbt b/sbt-app/src/sbt-test/lm-coursier/credentials-from-file/build.sbt new file mode 100644 index 000000000..e467b3272 --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/credentials-from-file/build.sbt @@ -0,0 +1,20 @@ +import java.nio.file.Files + +scalaVersion := "2.12.8" + +resolvers += "authenticated" at sys.env("TEST_REPOSITORY") + +csrExtraCredentials += { + val content = + s"""foo.host=${uri(sys.env("TEST_REPOSITORY")).getHost} + |foo.username=user + |foo.password=pass + |foo.auto=true + |foo.https-only=false + """.stripMargin + val dest = (ThisBuild / baseDirectory).value / "project" / "target" / "cred" + Files.write(dest.toPath, content.getBytes("UTF-8")) + lmcoursier.credentials.FileCredentials(dest.toString) +} + +libraryDependencies += "com.abc" % "test" % "0.1" diff --git a/sbt-app/src/sbt-test/lm-coursier/credentials-from-file/src/main/scala/Main.scala b/sbt-app/src/sbt-test/lm-coursier/credentials-from-file/src/main/scala/Main.scala new file mode 100644 index 000000000..86ae9e9e3 --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/credentials-from-file/src/main/scala/Main.scala @@ -0,0 +1 @@ +object Main extends App \ No newline at end of file diff --git a/sbt-app/src/sbt-test/lm-coursier/credentials-from-file/test b/sbt-app/src/sbt-test/lm-coursier/credentials-from-file/test new file mode 100644 index 000000000..103bd8d2f --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/credentials-from-file/test @@ -0,0 +1 @@ +> update diff --git a/sbt-app/src/sbt-test/lm-coursier/credentials-global/build.sbt b/sbt-app/src/sbt-test/lm-coursier/credentials-global/build.sbt new file mode 100644 index 000000000..8036b14ae --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/credentials-global/build.sbt @@ -0,0 +1,5 @@ +scalaVersion := "2.12.8" + +resolvers += "authenticated" at sys.env("TEST_REPOSITORY") + +libraryDependencies += "com.abc" % "test" % "0.1" diff --git a/sbt-app/src/sbt-test/lm-coursier/credentials-global/global/build.sbt b/sbt-app/src/sbt-test/lm-coursier/credentials-global/global/build.sbt new file mode 100644 index 000000000..ab0dd6055 --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/credentials-global/global/build.sbt @@ -0,0 +1,5 @@ +csrExtraCredentials += lmcoursier.credentials.DirectCredentials( + uri(sys.env("TEST_REPOSITORY")).getHost, + sys.env("TEST_REPOSITORY_USER"), + sys.env("TEST_REPOSITORY_PASSWORD") +).withHttpsOnly(false).withMatchHost(true) diff --git a/sbt-app/src/sbt-test/lm-coursier/credentials-global/src/main/scala/Main.scala b/sbt-app/src/sbt-test/lm-coursier/credentials-global/src/main/scala/Main.scala new file mode 100644 index 000000000..86ae9e9e3 --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/credentials-global/src/main/scala/Main.scala @@ -0,0 +1 @@ +object Main extends App \ No newline at end of file diff --git a/sbt-app/src/sbt-test/lm-coursier/credentials-global/test b/sbt-app/src/sbt-test/lm-coursier/credentials-global/test new file mode 100644 index 000000000..103bd8d2f --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/credentials-global/test @@ -0,0 +1 @@ +> update diff --git a/sbt-app/src/sbt-test/lm-coursier/credentials-sbt-global/build.sbt b/sbt-app/src/sbt-test/lm-coursier/credentials-sbt-global/build.sbt new file mode 100644 index 000000000..8036b14ae --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/credentials-sbt-global/build.sbt @@ -0,0 +1,5 @@ +scalaVersion := "2.12.8" + +resolvers += "authenticated" at sys.env("TEST_REPOSITORY") + +libraryDependencies += "com.abc" % "test" % "0.1" diff --git a/sbt-app/src/sbt-test/lm-coursier/credentials-sbt-global/global/build.sbt b/sbt-app/src/sbt-test/lm-coursier/credentials-sbt-global/global/build.sbt new file mode 100644 index 000000000..7c17ab948 --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/credentials-sbt-global/global/build.sbt @@ -0,0 +1,6 @@ +credentials += Credentials( + "", + sys.env("TEST_REPOSITORY_HOST"), + sys.env("TEST_REPOSITORY_USER"), + sys.env("TEST_REPOSITORY_PASSWORD") +) diff --git a/sbt-app/src/sbt-test/lm-coursier/credentials-sbt-global/src/main/scala/Main.scala b/sbt-app/src/sbt-test/lm-coursier/credentials-sbt-global/src/main/scala/Main.scala new file mode 100644 index 000000000..86ae9e9e3 --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/credentials-sbt-global/src/main/scala/Main.scala @@ -0,0 +1 @@ +object Main extends App \ No newline at end of file diff --git a/sbt-app/src/sbt-test/lm-coursier/credentials-sbt-global/test b/sbt-app/src/sbt-test/lm-coursier/credentials-sbt-global/test new file mode 100644 index 000000000..103bd8d2f --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/credentials-sbt-global/test @@ -0,0 +1 @@ +> update diff --git a/sbt-app/src/sbt-test/lm-coursier/credentials-sbt/build.sbt b/sbt-app/src/sbt-test/lm-coursier/credentials-sbt/build.sbt new file mode 100644 index 000000000..3781552ed --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/credentials-sbt/build.sbt @@ -0,0 +1,12 @@ +scalaVersion := "2.12.8" + +resolvers += "authenticated" at sys.env("TEST_REPOSITORY") + +credentials += Credentials( + "", + sys.env("TEST_REPOSITORY_HOST"), + sys.env("TEST_REPOSITORY_USER"), + sys.env("TEST_REPOSITORY_PASSWORD") +) + +libraryDependencies += "com.abc" % "test" % "0.1" diff --git a/sbt-app/src/sbt-test/lm-coursier/credentials-sbt/src/main/scala/Main.scala b/sbt-app/src/sbt-test/lm-coursier/credentials-sbt/src/main/scala/Main.scala new file mode 100644 index 000000000..86ae9e9e3 --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/credentials-sbt/src/main/scala/Main.scala @@ -0,0 +1 @@ +object Main extends App \ No newline at end of file diff --git a/sbt-app/src/sbt-test/lm-coursier/credentials-sbt/test b/sbt-app/src/sbt-test/lm-coursier/credentials-sbt/test new file mode 100644 index 000000000..103bd8d2f --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/credentials-sbt/test @@ -0,0 +1 @@ +> update diff --git a/sbt-app/src/sbt-test/lm-coursier/evicted/build.sbt b/sbt-app/src/sbt-test/lm-coursier/evicted/build.sbt new file mode 100644 index 000000000..3500ca36c --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/evicted/build.sbt @@ -0,0 +1,56 @@ + +// examples adapted from https://github.com/coursier/sbt-coursier/pull/75#issuecomment-497128870 + +lazy val a = project + .settings( + scalaVersion := "2.12.8", + libraryDependencies ++= Seq( + "org.typelevel" %% "cats-effect" % "1.3.1", + "org.typelevel" %% "cats-core" % "1.5.0" + ) +) + +lazy val b = project + .settings( + scalaVersion := "2.12.8", + libraryDependencies ++= Seq( + "org.slf4s" %% "slf4s-api" % "1.7.25", // depends on org.slf4j:slf4j-api:1.7.25 + "ch.qos.logback" % "logback-classic" % "1.1.2" // depends on org.slf4j:slf4j-api:1.7.6 + ) +) + +lazy val c = project + .settings( + scalaVersion := "2.12.8", + libraryDependencies ++= Seq( + "org.slf4s" %% "slf4s-api" % "1.7.25", // depends on org.slf4j:slf4j-api:1.7.25 + "ch.qos.logback" % "logback-classic" % "1.1.2" // depends on org.slf4j:slf4j-api:1.7.6 + ), + dependencyOverrides += "org.slf4j" % "slf4j-api" % "1.7.30" +) + +lazy val check = taskKey[Unit]("") + +Global / check := { + inline def doCheck(project: Project, evictionsExpected: Boolean = true): Unit = { + val report = (project / updateFull).value + val compileReport = report + .configurations + .find(_.configuration.name == "compile") + .getOrElse { + sys.error("compile report not found") + } + + val foundEvictions = compileReport.details.exists(_.modules.exists(_.evicted)) + if (foundEvictions != evictionsExpected) + compileReport.details.foreach(println) + assert( + foundEvictions == evictionsExpected, + if evictionsExpected then s"no evictions in ${project.id}" else s"evictions in ${project.id}" + ) + } + + doCheck(a) + doCheck(b) + doCheck(c, evictionsExpected = false) +} diff --git a/sbt-app/src/sbt-test/lm-coursier/evicted/test b/sbt-app/src/sbt-test/lm-coursier/evicted/test new file mode 100644 index 000000000..454ddff24 --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/evicted/test @@ -0,0 +1,2 @@ +> evicted +> check diff --git a/sbt-app/src/sbt-test/lm-coursier/from-no-head/build.sbt b/sbt-app/src/sbt-test/lm-coursier/from-no-head/build.sbt new file mode 100644 index 000000000..fd8b72726 --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/from-no-head/build.sbt @@ -0,0 +1,3 @@ +scalaVersion := "2.12.20" + +libraryDependencies += "ccl.northwestern.edu" % "netlogo" % "5.3.1" % "provided" from s"https://github.com/NetLogo/NetLogo/releases/download/5.3.1/NetLogo.jar" diff --git a/sbt-app/src/sbt-test/lm-coursier/from-no-head/src/main/scala/Main.scala b/sbt-app/src/sbt-test/lm-coursier/from-no-head/src/main/scala/Main.scala new file mode 100644 index 000000000..d75f66eaf --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/from-no-head/src/main/scala/Main.scala @@ -0,0 +1,11 @@ +import java.io.File +import java.nio.file.Files + +object Main extends App { + // Not using directly the NetLogo 5.x lib, which seems to depend on Scala 2.9 + // Can't find a way to check that NetLogo.jar is in the classpath + // These don't work, even with fork := true: + // assert(Thread.currentThread.getContextClassLoader.getResource("org/nlogo/nvm/Task.class") != null) + // Thread.currentThread.getContextClassLoader.getResource("org/nlogo/nvm/Task.class") + Files.write(new File("output").toPath, "OK".getBytes("UTF-8")) +} diff --git a/sbt-app/src/sbt-test/lm-coursier/from-no-head/test b/sbt-app/src/sbt-test/lm-coursier/from-no-head/test new file mode 100644 index 000000000..5efe58689 --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/from-no-head/test @@ -0,0 +1,3 @@ +$ delete output +> runBlock +$ exists output diff --git a/sbt-app/src/sbt-test/lm-coursier/from-wrong-url/build.sbt b/sbt-app/src/sbt-test/lm-coursier/from-wrong-url/build.sbt new file mode 100644 index 000000000..c00f02d93 --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/from-wrong-url/build.sbt @@ -0,0 +1,7 @@ +scalaVersion := "2.12.20" + +// keeping the default cache policies here + +libraryDependencies += "com.chuusai" %% "shapeless" % "2.3.2" from { + "https://repo1.maven.org/maven2/com/chuusai/shapeless_2.12/2.3.242/shapeless_2.12-2.3.242.jar" +} diff --git a/sbt-app/src/sbt-test/lm-coursier/from-wrong-url/src/main/scala/Main.scala b/sbt-app/src/sbt-test/lm-coursier/from-wrong-url/src/main/scala/Main.scala new file mode 100644 index 000000000..c9401ff3e --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/from-wrong-url/src/main/scala/Main.scala @@ -0,0 +1,13 @@ +import java.io.File +import java.nio.file.Files + +import shapeless._ + +object Main extends App { + case class CC(s: String) + val cc = CC("OK") + val l = Generic[CC].to(cc) + val msg = l.head + + Files.write(new File("output").toPath, msg.getBytes("UTF-8")) +} diff --git a/sbt-app/src/sbt-test/lm-coursier/from-wrong-url/test b/sbt-app/src/sbt-test/lm-coursier/from-wrong-url/test new file mode 100644 index 000000000..5efe58689 --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/from-wrong-url/test @@ -0,0 +1,3 @@ +$ delete output +> runBlock +$ exists output diff --git a/sbt-app/src/sbt-test/lm-coursier/from/build.sbt b/sbt-app/src/sbt-test/lm-coursier/from/build.sbt new file mode 100644 index 000000000..8da4d81b0 --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/from/build.sbt @@ -0,0 +1,31 @@ +scalaVersion := "2.12.20" + +libraryDependencies += "com.chuusai" %% "shapeless" % "2.3.41" from { + + val f = file("shapeless_2.12-2.3.3.jar") + + if (!f.exists()) { + val url0 = "https://repo1.maven.org/maven2/com/chuusai/shapeless_2.12/2.3.3/shapeless_2.12-2.3.3.jar" + + sLog.value.warn(s"Fetching $url0") + + val url = new java.net.URL(url0) + + val is = url.openStream() + val os = new java.io.FileOutputStream(f) + + var read = -1 + val b = Array.fill[Byte](16*1024)(0) + while ({ + read = is.read(b) + read >= 0 + }) { + os.write(b, 0, read) + } + + is.close() + os.close() + } + + f.toURI.toString +} diff --git a/sbt-app/src/sbt-test/lm-coursier/from/src/main/scala/Main.scala b/sbt-app/src/sbt-test/lm-coursier/from/src/main/scala/Main.scala new file mode 100644 index 000000000..c9401ff3e --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/from/src/main/scala/Main.scala @@ -0,0 +1,13 @@ +import java.io.File +import java.nio.file.Files + +import shapeless._ + +object Main extends App { + case class CC(s: String) + val cc = CC("OK") + val l = Generic[CC].to(cc) + val msg = l.head + + Files.write(new File("output").toPath, msg.getBytes("UTF-8")) +} diff --git a/sbt-app/src/sbt-test/lm-coursier/from/test b/sbt-app/src/sbt-test/lm-coursier/from/test new file mode 100644 index 000000000..ced2d6b19 --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/from/test @@ -0,0 +1,4 @@ +$ delete output +> runBlock +$ exists output +$ delete shapeless_2.11-2.3.0.jar diff --git a/sbt-app/src/sbt-test/lm-coursier/inter-project-resolvers/a/src/main/scala/A.scala b/sbt-app/src/sbt-test/lm-coursier/inter-project-resolvers/a/src/main/scala/A.scala new file mode 100644 index 000000000..954405774 --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/inter-project-resolvers/a/src/main/scala/A.scala @@ -0,0 +1,6 @@ + +case class A(msg: String) + +object A { + def default = A("OK") +} diff --git a/sbt-app/src/sbt-test/lm-coursier/inter-project-resolvers/b/src/main/scala/Main.scala b/sbt-app/src/sbt-test/lm-coursier/inter-project-resolvers/b/src/main/scala/Main.scala new file mode 100644 index 000000000..5d23b77b8 --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/inter-project-resolvers/b/src/main/scala/Main.scala @@ -0,0 +1,9 @@ +import java.io.File +import java.nio.file.Files + +object Main extends App { + + // TODO Use some jvm-repr stuff as a test + + Files.write(new File("output").toPath, A.default.msg.getBytes("UTF-8")) +} diff --git a/sbt-app/src/sbt-test/lm-coursier/inter-project-resolvers/build.sbt b/sbt-app/src/sbt-test/lm-coursier/inter-project-resolvers/build.sbt new file mode 100644 index 000000000..eaa38b9b5 --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/inter-project-resolvers/build.sbt @@ -0,0 +1,24 @@ + +lazy val a = project + .settings(sharedSettings) + .settings( + resolvers += "Jitpack Repo" at "https://jitpack.io" + ) + +lazy val b = project + .dependsOn(a) + .settings(sharedSettings) + .settings( + // resolver added in inter-project dependency only - should still be fine + libraryDependencies += "com.github.jupyter" % "jvm-repr" % "0.3.0" + ) + +lazy val root = project + .in(file(".")) + .aggregate(a, b) + .settings(sharedSettings) + + +lazy val sharedSettings = Seq( + scalaVersion := "2.12.8" +) diff --git a/sbt-app/src/sbt-test/lm-coursier/inter-project-resolvers/test b/sbt-app/src/sbt-test/lm-coursier/inter-project-resolvers/test new file mode 100644 index 000000000..ea53e1abb --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/inter-project-resolvers/test @@ -0,0 +1,3 @@ +$ delete output +> b/run +$ exists output diff --git a/sbt-app/src/sbt-test/lm-coursier/inter-project-scala-tool/build.sbt b/sbt-app/src/sbt-test/lm-coursier/inter-project-scala-tool/build.sbt new file mode 100644 index 000000000..a82f1b0fe --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/inter-project-scala-tool/build.sbt @@ -0,0 +1,5 @@ +scalaVersion := "2.12.9" +crossScalaVersions := Seq("2.12.8", "2.12.9") // shouldn't be the same version as coursier - putting two versions here to be sure one of them isn't +organization := "org.scala-lang.modules" +name := "scala-xml" +version := "2.0.0-SNAPSHOT" diff --git a/sbt-app/src/sbt-test/lm-coursier/inter-project-scala-tool/src/main/scala/Main.scala b/sbt-app/src/sbt-test/lm-coursier/inter-project-scala-tool/src/main/scala/Main.scala new file mode 100644 index 000000000..8c165902f --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/inter-project-scala-tool/src/main/scala/Main.scala @@ -0,0 +1,13 @@ +import java.io.File +import java.nio.file.Files + + +/** + * Azertyuiopqsdfghjklmwxcvbn + * + * @author A + * @param may not be `'''null'''`!!! + */ +object Main extends App { + Files.write(new File("output").toPath, "OK".getBytes("UTF-8")) +} diff --git a/sbt-app/src/sbt-test/lm-coursier/inter-project-scala-tool/test b/sbt-app/src/sbt-test/lm-coursier/inter-project-scala-tool/test new file mode 100644 index 000000000..d7c624ab5 --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/inter-project-scala-tool/test @@ -0,0 +1 @@ +> +doc diff --git a/sbt-app/src/sbt-test/lm-coursier/logger/build.sbt b/sbt-app/src/sbt-test/lm-coursier/logger/build.sbt new file mode 100644 index 000000000..46eeca01f --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/logger/build.sbt @@ -0,0 +1,73 @@ +val logFile = settingKey[File]("") + +// Arbitrary dependency with no transitive dependencies +libraryDependencies += "org.slf4j" % "slf4j-api" % "1.7.25" +// We want to control when the cache gets a hit +csrCacheDirectory := baseDirectory.value / "cache" +logFile := baseDirectory.value / "log" + +csrLogger := { + var logStream: java.io.PrintStream = null + def log(msg: String): Unit = { + println(msg) + logStream.println(msg) + } + val cacheFile = csrCacheDirectory.value + + val logger = new lmcoursier.definitions.CacheLogger { + override def init(sizeHint: Option[Int]): Unit = { + logStream = new java.io.PrintStream( + new java.io.FileOutputStream(logFile.value, true) + ) + log("init") + } + override def foundLocally(url: String): Unit = { + log(s"found $url") + } + override def downloadingArtifact(url: String): Unit = { + log(s"downloading $url") + } + override def downloadedArtifact(url: String, success: Boolean): Unit = { + log(s"downloaded $url: $success") + } + override def stop(): Unit = { + log("stop") + logStream.flush() + logStream.close() + logStream = null + } + } + + Some(logger) +} + +TaskKey[Unit]("checkDownloaded") := { + val log = IO.readLines(logFile.value) + if (log.head != "init") { + sys.error(s"log started with '${log.head}', not init") + } + if (log.last != "stop") { + sys.error(s"log ended with '${log.last}', not stop") + } + val url = "https://repo1.maven.org/maven2/org/slf4j/slf4j-api/1.7.25/slf4j-api-1.7.25.jar" + val downloadedMsg = s"downloaded $url: true" + val downloadingMsgStart = s"downloading $url" + if (!log.contains(downloadedMsg)) + sys.error(s"log doesn't contain '$downloadedMsg'") + if (!log.exists(_.startsWith(downloadingMsgStart))) + sys.error(s"log doesn't contain line starting with '$downloadingMsgStart'") +} + +TaskKey[Unit]("checkFound") := { + val log = IO.readLines(logFile.value) + if (log.head != "init") { + sys.error(s"log started with '${log.head}', not init") + } + if (log.last != "stop") { + sys.error(s"log ended with '${log.last}', not stop") + } + val url = "https://repo1.maven.org/maven2/org/slf4j/slf4j-api/1.7.25/slf4j-api-1.7.25.jar" + val msg = s"found $url" + if (!log.exists(_.startsWith(msg))) + sys.error(s"log doesn't contain line starting with '$msg'") +} diff --git a/sbt-app/src/sbt-test/lm-coursier/logger/test b/sbt-app/src/sbt-test/lm-coursier/logger/test new file mode 100644 index 000000000..995210423 --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/logger/test @@ -0,0 +1,7 @@ +$ delete cache +> update +> checkDownloaded +> reload +> clean +> update +> checkFound diff --git a/sbt-app/src/sbt-test/lm-coursier/maven-compatible/build.sbt b/sbt-app/src/sbt-test/lm-coursier/maven-compatible/build.sbt new file mode 100644 index 000000000..6f6b423e6 --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/maven-compatible/build.sbt @@ -0,0 +1,18 @@ +scalaVersion := "2.12.8" + +resolvers += Resolver.url( + "jitpack", + new URL("https://jitpack.io") +)( + // patterns should be ignored - and the repo be considered a maven one - because + // isMavenCompatible is true + Patterns( + Resolver.ivyStylePatterns.ivyPatterns, + Resolver.ivyStylePatterns.artifactPatterns, + isMavenCompatible = true, + descriptorOptional = false, + skipConsistencyCheck = false + ) +) + +libraryDependencies += "com.github.jupyter" % "jvm-repr" % "0.3.0" diff --git a/sbt-app/src/sbt-test/lm-coursier/maven-compatible/src/main/scala/Main.scala b/sbt-app/src/sbt-test/lm-coursier/maven-compatible/src/main/scala/Main.scala new file mode 100644 index 000000000..eedd88b98 --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/maven-compatible/src/main/scala/Main.scala @@ -0,0 +1,9 @@ +import java.io.File +import java.nio.file.Files + +object Main extends App { + + // TODO Use some jvm-repr stuff + + Files.write(new File("output").toPath, "OK".getBytes("UTF-8")) +} diff --git a/sbt-app/src/sbt-test/lm-coursier/maven-compatible/test b/sbt-app/src/sbt-test/lm-coursier/maven-compatible/test new file mode 100644 index 000000000..5efe58689 --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/maven-compatible/test @@ -0,0 +1,3 @@ +$ delete output +> runBlock +$ exists output diff --git a/sbt-app/src/sbt-test/lm-coursier/maven-plugin-classpath-type/build.sbt b/sbt-app/src/sbt-test/lm-coursier/maven-plugin-classpath-type/build.sbt new file mode 100644 index 000000000..e29840e97 --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/maven-plugin-classpath-type/build.sbt @@ -0,0 +1,18 @@ +scalaVersion := "2.12.8" +enablePlugins(JavaAppPackaging) + +lazy val check = taskKey[Unit]("") + +check := { + val cmd = "target/universal/stage/bin/maven-plugin-classpath-type" + val cmd0 = + if (sys.props("os.name").toLowerCase(java.util.Locale.ROOT).contains("windows")) + cmd + ".bat" + else + cmd + val b = new ProcessBuilder(cmd0) + b.inheritIO() + val p = b.start() + val retCode = p.waitFor() + assert(retCode == 0, s"Command $cmd returned code $retCode") +} diff --git a/sbt-app/src/sbt-test/lm-coursier/maven-plugin-classpath-type/pending b/sbt-app/src/sbt-test/lm-coursier/maven-plugin-classpath-type/pending new file mode 100644 index 000000000..bb563a74f --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/maven-plugin-classpath-type/pending @@ -0,0 +1,5 @@ +$ delete output +> stage +> check +$ exists output +$ delete output diff --git a/sbt-app/src/sbt-test/lm-coursier/maven-plugin-classpath-type/project/plugins.sbt b/sbt-app/src/sbt-test/lm-coursier/maven-plugin-classpath-type/project/plugins.sbt new file mode 100644 index 000000000..1f7bae5f9 --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/maven-plugin-classpath-type/project/plugins.sbt @@ -0,0 +1 @@ +addSbtPlugin("com.typesafe.sbt" % "sbt-native-packager" % "1.3.25") diff --git a/sbt-app/src/sbt-test/lm-coursier/maven-plugin-classpath-type/src/main/scala/Main.scala b/sbt-app/src/sbt-test/lm-coursier/maven-plugin-classpath-type/src/main/scala/Main.scala new file mode 100644 index 000000000..61295349d --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/maven-plugin-classpath-type/src/main/scala/Main.scala @@ -0,0 +1,6 @@ +import java.io.File +import java.nio.file.Files + +object Main extends App { + Files.write(new File("output").toPath, "OK".getBytes("UTF-8")) +} diff --git a/sbt-app/src/sbt-test/lm-coursier/neo-sbt-scalafmt/build.sbt b/sbt-app/src/sbt-test/lm-coursier/neo-sbt-scalafmt/build.sbt new file mode 100644 index 000000000..6dcc058c2 --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/neo-sbt-scalafmt/build.sbt @@ -0,0 +1,2 @@ +scalaVersion := "2.12.2" +enablePlugins(ScalafmtPlugin) diff --git a/sbt-app/src/sbt-test/lm-coursier/neo-sbt-scalafmt/pending b/sbt-app/src/sbt-test/lm-coursier/neo-sbt-scalafmt/pending new file mode 100644 index 000000000..97b42a4f4 --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/neo-sbt-scalafmt/pending @@ -0,0 +1 @@ +> scalafmt diff --git a/sbt-app/src/sbt-test/lm-coursier/neo-sbt-scalafmt/project/plugins.sbt b/sbt-app/src/sbt-test/lm-coursier/neo-sbt-scalafmt/project/plugins.sbt new file mode 100644 index 000000000..4aa3881f4 --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/neo-sbt-scalafmt/project/plugins.sbt @@ -0,0 +1 @@ +addSbtPlugin("com.lucidchart" % "sbt-scalafmt" % "1.15") diff --git a/sbt-app/src/sbt-test/lm-coursier/neo-sbt-scalafmt/src/main/scala/Main.scala b/sbt-app/src/sbt-test/lm-coursier/neo-sbt-scalafmt/src/main/scala/Main.scala new file mode 100644 index 000000000..61295349d --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/neo-sbt-scalafmt/src/main/scala/Main.scala @@ -0,0 +1,6 @@ +import java.io.File +import java.nio.file.Files + +object Main extends App { + Files.write(new File("output").toPath, "OK".getBytes("UTF-8")) +} diff --git a/sbt-app/src/sbt-test/lm-coursier/per-config-resolution/build.sbt b/sbt-app/src/sbt-test/lm-coursier/per-config-resolution/build.sbt new file mode 100644 index 000000000..463cd25bb --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/per-config-resolution/build.sbt @@ -0,0 +1,8 @@ +scalaVersion := "2.13.2" +libraryDependencies ++= Seq( + "io.get-coursier" %% "coursier-core" % "2.0.0-RC6", + // depends on coursier-core 2.0.0-RC6-16 + "io.get-coursier" %% "coursier" % "2.0.0-RC6-16" % Test +) +Compile / mainClass := Some("Main") +Test / mainClass := Some("Test") diff --git a/sbt-app/src/sbt-test/lm-coursier/per-config-resolution/src/main/scala/Main.scala b/sbt-app/src/sbt-test/lm-coursier/per-config-resolution/src/main/scala/Main.scala new file mode 100644 index 000000000..370b4e4bf --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/per-config-resolution/src/main/scala/Main.scala @@ -0,0 +1,7 @@ +object Main { + def main(args: Array[String]): Unit = { + val version = coursier.util.Properties.version + val expected = "2.0.0-RC6" + assert(version == expected, s"version: $version, expected: $expected") + } +} diff --git a/sbt-app/src/sbt-test/lm-coursier/per-config-resolution/src/test/scala/Test.scala b/sbt-app/src/sbt-test/lm-coursier/per-config-resolution/src/test/scala/Test.scala new file mode 100644 index 000000000..9d91a9318 --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/per-config-resolution/src/test/scala/Test.scala @@ -0,0 +1,7 @@ +object Test { + def main(args: Array[String]): Unit = { + val version = coursier.util.Properties.version + val expected = "2.0.0-RC6-16" + assert(version == expected, s"version: $version, expected: $expected") + } +} diff --git a/sbt-app/src/sbt-test/lm-coursier/per-config-resolution/test b/sbt-app/src/sbt-test/lm-coursier/per-config-resolution/test new file mode 100644 index 000000000..4ffe54064 --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/per-config-resolution/test @@ -0,0 +1,2 @@ +> runBlock +> Test/run diff --git a/sbt-app/src/sbt-test/lm-coursier/publish-local-signed/README b/sbt-app/src/sbt-test/lm-coursier/publish-local-signed/README new file mode 100644 index 000000000..b0e252877 --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/publish-local-signed/README @@ -0,0 +1,3 @@ +Only run with sbt 0.13 here - requires sbt-pgp-coursier with sbt 1.0 + +A few things from this test were grabbed from https://github.com/sbt/sbt-pgp/tree/431c0a50fc5e91b881ebb154f22cc6a0b209be10/pgp-plugin/src/sbt-test/sbt-pgp/skip diff --git a/sbt-app/src/sbt-test/lm-coursier/publish-local-signed/build.sbt b/sbt-app/src/sbt-test/lm-coursier/publish-local-signed/build.sbt new file mode 100644 index 000000000..dda7affb7 --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/publish-local-signed/build.sbt @@ -0,0 +1,9 @@ +scalaVersion := "2.12.3" + +// seen in https://github.com/sbt/sbt-pgp/blob/431c0a50fc5e91b881ebb154f22cc6a0b209be10/pgp-plugin/src/sbt-test/sbt-pgp/skip/build.sbt +credentials.in(GlobalScope) := Seq(Credentials("", "pgp", "", "test password")) +pgpSecretRing := baseDirectory.value / "secring.pgp" +pgpPublicRing := baseDirectory.value / "pubring.pgp" + +// Workaround for https://github.com/sbt/sbt-pgp/issues/148 +publishTo := Some("dummy" at java.nio.file.Paths.get("").toAbsolutePath.toUri.toASCIIString) diff --git a/sbt-app/src/sbt-test/lm-coursier/publish-local-signed/pending b/sbt-app/src/sbt-test/lm-coursier/publish-local-signed/pending new file mode 100644 index 000000000..97640db3a --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/publish-local-signed/pending @@ -0,0 +1 @@ +> publishLocalSigned diff --git a/sbt-app/src/sbt-test/lm-coursier/publish-local-signed/project/plugins.sbt b/sbt-app/src/sbt-test/lm-coursier/publish-local-signed/project/plugins.sbt new file mode 100644 index 000000000..15a80bc35 --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/publish-local-signed/project/plugins.sbt @@ -0,0 +1 @@ +addSbtPlugin("com.jsuereth" % "sbt-pgp" % "1.1.2-1") diff --git a/sbt-app/src/sbt-test/lm-coursier/publish-local-signed/pubring.pgp b/sbt-app/src/sbt-test/lm-coursier/publish-local-signed/pubring.pgp new file mode 100644 index 000000000..ee55772cc --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/publish-local-signed/pubring.pgp @@ -0,0 +1,17 @@ +-----BEGIN PGP PUBLIC KEY BLOCK----- +Version: BCPG v1.51 + +mQENBFVcLH0BCACmERkKh73zr+0nFl7/6WWP3QcNa3msWDlArP6GzQEOmFBNZEtv +1BRSKz1fGl4aZzpNPDh2rNcsjVhJPzwVLUzsJuWgBa2EdYafnsUjQ37V998iFhtZ +ZgbXieJ2yMvaNPPrPjgYBAZFrnQmH0oQDx+GWDd5Y3lQBx0heIJA5YjPN3meDzqZ +FC0qxSVeCPFYaxAFoMnNDEohpvGV3iiUTAyTgSDofxJqVV20oeMCJzC89VskCjSV +KIlCnKIRFA+WtND1AsJuIBM/x4JQkRF8xc/2tS5xGXSQllgwxHXEnhDDXcT0o6pY +Ni2xYSG0gcmwaBGvx8N1RjWcVms/iJ4ViayxABEBAAG0C1Rlc3QgPFRlc3Q+iQEc +BBMBAgAGBQJVXCx9AAoJEKotvJKVuRt6fQkH/1XhdHxOB5m92TasMkxCqjuK0EtQ +0HEIkoRC+Z9gOUqlhK58y+8DK9ZAEx/e09CDK1N+x/R3xQUKh2KbUpS7yiNzzqZO +OejWtmWw6gYNHy1COP9WKT8Qgr+z4d7GBTrGthEhvZftHyyqlN6JZJZ2ZcvE/Odz +QkMbd4aeRXi+KgzHp4fa1hLaA2BT7TT1trYH2L6OI6VbcEebnz+up738liHq9p/R +JrJP4JjLzWtEkL83pE6FHhXowsdaG8WKYfWRstX7RixHq5P13PW/iAZEAmJb08ER +mmguiicjExRk7UaIVwKPa5P1DAOgeK/ejCLsxrUKspIgt6JLQFbprGZHRdE= +=/1s8 +-----END PGP PUBLIC KEY BLOCK----- \ No newline at end of file diff --git a/sbt-app/src/sbt-test/lm-coursier/publish-local-signed/secring.pgp b/sbt-app/src/sbt-test/lm-coursier/publish-local-signed/secring.pgp new file mode 100644 index 000000000..3123d8c7d --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/publish-local-signed/secring.pgp @@ -0,0 +1,32 @@ +-----BEGIN PGP PRIVATE KEY BLOCK----- +Version: BCPG v1.51 + +lQO+BFVcLH0BCACmERkKh73zr+0nFl7/6WWP3QcNa3msWDlArP6GzQEOmFBNZEtv +1BRSKz1fGl4aZzpNPDh2rNcsjVhJPzwVLUzsJuWgBa2EdYafnsUjQ37V998iFhtZ +ZgbXieJ2yMvaNPPrPjgYBAZFrnQmH0oQDx+GWDd5Y3lQBx0heIJA5YjPN3meDzqZ +FC0qxSVeCPFYaxAFoMnNDEohpvGV3iiUTAyTgSDofxJqVV20oeMCJzC89VskCjSV +KIlCnKIRFA+WtND1AsJuIBM/x4JQkRF8xc/2tS5xGXSQllgwxHXEnhDDXcT0o6pY +Ni2xYSG0gcmwaBGvx8N1RjWcVms/iJ4ViayxABEBAAH+AwMCQJCBiVce8z5gAZr+ +L8W6NOs8orCIgLbCjKAHuKan9mZXtvOaml/8EDiBjLvwekUQj0OI6S29y5QsQpvn +lo8vXAsJnA/Q0olrAim2aZPOXVjQOYsOwExo4SAu7zXBQ3w3+jYsM5kHYPWICr3g +3LzhVUmDTHjein/Xa9X7M8bzAY0Esoabl1aSKZ9K15P2ss7noHXrKZPxRk3jb0XP +jHE2h5zTLLkdyXnZ74ILVYYEOjqe7P/+eWBL5TIan73ekNCKkgRBKI+pWB8Bc8sA +ww2WtlkpsogVItjZ7spk03HVykSr/hS2TnVoR1vX+C8bPLjvwksNORWELf9z32vA +ASgqoqeKMc+qHLPnZOrNIoCwUgBHLq/XL09E4Sav1TnqQt14Ya3oTiWQARMY+6jC +FGpHhKhx4XjrBjUyGWm8EzC/IPWIXSs9oeOxfSag7QNCf3F1fOjUOYkt32bTpEiM +X5sxrovkW/bh0U7thnIsHeklDx99U89F3J0K5svN66f7XplN3wYgwBJA/Fjymgj0 +zYN0cibnL1rp6zyMRSu5iDQRq1A9rFzsOQb4+gyNMUvlSM1ajA/sFvHsk0xFXdPG +ADOx9Tn3f4JN58ylGnMygtBmcS4f+lDVS0Q96lRuyFRTze/+JuhVTPNk2kClNp8y +98dc4UMawEyZ55EbxhVKx10jZ74Sswk8N15NhSZN5IKBUwQ1JWAoMnn0UDFeniBW +gjmD/82a0QzosJRkOEKqaCUK02FUgFNcB/6Aauj5Pm6vDehWnk+4Kz4f2QrqdD+Y +DquqcQ88gPj7jJnRX/+lMwKd73PeK3GfpvWCkRcliUs47LQ585uc03lArxpG2j74 +hG9Lxt/B545sqsYt2ViJ0hNBRgwfUdGy6NVef4F6JxVxkD32eavDDUBfXC/a5zRu +m7QLVGVzdCA8VGVzdD6JARwEEwECAAYFAlVcLH0ACgkQqi28kpW5G3p9CQf/VeF0 +fE4Hmb3ZNqwyTEKqO4rQS1DQcQiShEL5n2A5SqWErnzL7wMr1kATH97T0IMrU37H +9HfFBQqHYptSlLvKI3POpk456Na2ZbDqBg0fLUI4/1YpPxCCv7Ph3sYFOsa2ESG9 +l+0fLKqU3olklnZly8T853NCQxt3hp5FeL4qDMenh9rWEtoDYFPtNPW2tgfYvo4j +pVtwR5ufP66nvfyWIer2n9Emsk/gmMvNa0SQvzekToUeFejCx1obxYph9ZGy1ftG +LEerk/Xc9b+IBkQCYlvTwRGaaC6KJyMTFGTtRohXAo9rk/UMA6B4r96MIuzGtQqy +kiC3oktAVumsZkdF0Q== +=wGX/ +-----END PGP PRIVATE KEY BLOCK----- \ No newline at end of file diff --git a/sbt-app/src/sbt-test/lm-coursier/publish-local-signed/src/main/scala/Foo.scala b/sbt-app/src/sbt-test/lm-coursier/publish-local-signed/src/main/scala/Foo.scala new file mode 100644 index 000000000..d3c853cc0 --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/publish-local-signed/src/main/scala/Foo.scala @@ -0,0 +1 @@ +object Foo diff --git a/sbt-app/src/sbt-test/lm-coursier/publish-local-sources-javadoc-conf/build.sbt b/sbt-app/src/sbt-test/lm-coursier/publish-local-sources-javadoc-conf/build.sbt new file mode 100644 index 000000000..1fb518f79 --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/publish-local-sources-javadoc-conf/build.sbt @@ -0,0 +1,82 @@ + +val org = "io.get-coursier.scriptedtest" +val ver = "0.1.0-SNAPSHOT" + +lazy val foo = project + .settings( + shared + ) + +lazy val bar = project + .settings( + shared, + libraryDependencies += org %% "foo" % ver + ) + + +lazy val shared = Seq( + organization := org, + version := ver, + scalaVersion := "2.12.8", + confCheck := { + + val log = streams.value.log + + val updateReport = update.value + val updateClassifiersReport = updateClassifiers.value + + def artifacts(classifier: Option[String], useClassifiersReport: Boolean = false) = { + + val configReport = (if (useClassifiersReport) updateClassifiersReport else updateReport) + .configuration(Compile) + .getOrElse { + throw new Exception( + "Compile configuration not found in update report" + ) + } + + val artifacts = configReport + .modules + .flatMap(_.artifacts) + .collect { + case (a, _) if a.classifier == classifier => + a + } + + log.info( + s"Found ${artifacts.length} artifacts for config Compile / classifier $classifier" + + (if (useClassifiersReport) " in classifiers report" else "") + ) + for (a <- artifacts) + log.info(" " + a) + + artifacts + } + + val compileSourceArtifacts = artifacts(Some("sources")) + val sourceArtifacts = artifacts(Some("sources"), useClassifiersReport = true) + + val compileDocArtifacts = artifacts(Some("javadoc")) + val docArtifacts = artifacts(Some("javadoc"), useClassifiersReport = true) + + assert( + compileSourceArtifacts.isEmpty, + "Expected no source artifact in main update report" + ) + assert( + sourceArtifacts.length == 2, + "Expected 2 source artifacts in classifier report" + ) + assert( + compileDocArtifacts.isEmpty, + "Expected no doc artifact in main update report" + ) + assert( + docArtifacts.length == 2, + "Expected 2 doc artifacts in classifier report" + ) + } +) + + +lazy val confCheck = TaskKey[Unit]("confCheck") diff --git a/sbt-app/src/sbt-test/lm-coursier/publish-local-sources-javadoc-conf/test b/sbt-app/src/sbt-test/lm-coursier/publish-local-sources-javadoc-conf/test new file mode 100644 index 000000000..6df8db773 --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/publish-local-sources-javadoc-conf/test @@ -0,0 +1,2 @@ +> foo/publishLocal +> bar/confCheck diff --git a/sbt-app/src/sbt-test/lm-coursier/sbt-assembly/build.sbt b/sbt-app/src/sbt-test/lm-coursier/sbt-assembly/build.sbt new file mode 100644 index 000000000..9ba11f7b2 --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/sbt-assembly/build.sbt @@ -0,0 +1,30 @@ + +lazy val noJbossInterceptorCheck = TaskKey[Unit]("noJbossInterceptorCheck") + +noJbossInterceptorCheck := { + + val log = streams.value.log + + val configReport = updateSbtClassifiers.value + .configuration(Default) + .getOrElse { + throw new Exception( + "compile configuration not found in update report" + ) + } + + val artifacts = configReport + .modules + .flatMap(_.artifacts) + .map(_._1) + + val jbossInterceptorArtifacts = artifacts + .filter { a => + a.name.contains("jboss-interceptor") + } + + for (a <- jbossInterceptorArtifacts) + log.error(s"Found jboss-interceptor artifact $a") + + assert(jbossInterceptorArtifacts.isEmpty) +} diff --git a/sbt-app/src/sbt-test/lm-coursier/sbt-assembly/pending b/sbt-app/src/sbt-test/lm-coursier/sbt-assembly/pending new file mode 100644 index 000000000..b06452c35 --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/sbt-assembly/pending @@ -0,0 +1 @@ +> noJbossInterceptorCheck diff --git a/sbt-app/src/sbt-test/lm-coursier/sbt-assembly/project/plugins.sbt b/sbt-app/src/sbt-test/lm-coursier/sbt-assembly/project/plugins.sbt new file mode 100644 index 000000000..652a3b93b --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/sbt-assembly/project/plugins.sbt @@ -0,0 +1 @@ +addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.14.6") diff --git a/sbt-app/src/sbt-test/lm-coursier/scala-jars/build.sbt b/sbt-app/src/sbt-test/lm-coursier/scala-jars/build.sbt new file mode 100644 index 000000000..ed222e272 --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/scala-jars/build.sbt @@ -0,0 +1 @@ +scalaVersion := appConfiguration.value.provider.scalaProvider.version diff --git a/sbt-app/src/sbt-test/lm-coursier/scala-jars/src/main/scala/Main.scala b/sbt-app/src/sbt-test/lm-coursier/scala-jars/src/main/scala/Main.scala new file mode 100644 index 000000000..dfa70329b --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/scala-jars/src/main/scala/Main.scala @@ -0,0 +1,66 @@ +import java.io.File +import java.nio.file.Files + +import scala.collection.JavaConverters._ + +object Main extends App { + + val cp = new collection.mutable.ArrayBuffer[File] + + def buildCp(loader: ClassLoader): Unit = + if (loader != null) { + loader match { + case u: java.net.URLClassLoader => + cp ++= u.getURLs + .map(_.toURI) + .map(new File(_)) + case _ => + } + + buildCp(loader.getParent) + } + + buildCp(Thread.currentThread().getContextClassLoader) + + System.err.println("Classpath:") + for (f <- cp) + System.err.println(s" $f") + System.err.println() + + val sbtBase = new File(sys.props.getOrElse( + "sbt.global.base", + sys.props("user.home") + "/.sbt" + )) + val prefixes = Seq(new File(sbtBase, "boot").getAbsolutePath) ++ + Seq("coursier.sbt-launcher.dirs.scala-jars", "coursier.sbt-launcher.dirs.base", "user.dir") + .flatMap(sys.props.get(_)) + .map(new File(_).getAbsolutePath) + val home = new File(sys.props("user.home")).getAbsolutePath + + def notFromCoursierCache(name: String): Unit = { + val jars = cp.filter(_.getName.startsWith(name)).distinct + assert(jars.nonEmpty, s"Found no JARs for $name") + + for (jar <- jars) + assert( + !jar.getAbsolutePath.startsWith(home) || + !jar.getAbsolutePath.toLowerCase(java.util.Locale.ROOT).contains("coursier") || + prefixes.exists(jar.getAbsolutePath.startsWith), + s"JAR for $name ($jar) under $home and not under any of ${prefixes.mkString(", ")}" + ) + } + + val props = Thread.currentThread() + .getContextClassLoader + .getResources("library.properties") + .asScala + .toVector + .map(_.toString) + .sorted + .distinct // TODO should not need distinct + + notFromCoursierCache("scala-library") + assert(props.lengthCompare(1) == 0, s"Found several library.properties files in classpath: $props") + + Files.write(new File("output").toPath, "OK".getBytes("UTF-8")) +} diff --git a/sbt-app/src/sbt-test/lm-coursier/scala-jars/test b/sbt-app/src/sbt-test/lm-coursier/scala-jars/test new file mode 100644 index 000000000..5efe58689 --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/scala-jars/test @@ -0,0 +1,3 @@ +$ delete output +> runBlock +$ exists output diff --git a/sbt-app/src/sbt-test/lm-coursier/scala-sources-javadoc-jars/build.sbt b/sbt-app/src/sbt-test/lm-coursier/scala-sources-javadoc-jars/build.sbt new file mode 100644 index 000000000..b3ac5e630 --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/scala-sources-javadoc-jars/build.sbt @@ -0,0 +1,40 @@ + +scalaVersion := appConfiguration.value.provider.scalaProvider.version + +lazy val updateClassifiersCheck = TaskKey[Unit]("updateClassifiersCheck") + +updateClassifiersCheck := { + + val configReport = updateClassifiers.value + .configuration(Compile) + .getOrElse { + throw new Exception( + "compile configuration not found in updateClassifiers report" + ) + } + + val scalaLibraryArtifacts = configReport + .modules + .collectFirst { + case moduleReport + if moduleReport.module.organization == "org.scala-lang" && + moduleReport.module.name == "scala-library" => + moduleReport.artifacts + } + .toSeq + .flatten + + def classifierArtifact(classifier: String) = + scalaLibraryArtifacts.collectFirst { + case (a, _) if a.classifier == Some(classifier) => a + } + + def ensureHasClassifierArtifact(classifier: String) = + assert( + classifierArtifact(classifier).nonEmpty, + s"scala-library $classifier not found" + ) + + ensureHasClassifierArtifact("javadoc") + ensureHasClassifierArtifact("sources") +} diff --git a/sbt-app/src/sbt-test/lm-coursier/scala-sources-javadoc-jars/src/main/scala/Main.scala b/sbt-app/src/sbt-test/lm-coursier/scala-sources-javadoc-jars/src/main/scala/Main.scala new file mode 100644 index 000000000..3eeff697e --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/scala-sources-javadoc-jars/src/main/scala/Main.scala @@ -0,0 +1,6 @@ +import java.io.File +import java.nio.file.Files + +object Main extends App { + Files.write(new File("output").toPath, "OK".getBytes("UTF-8")) +} \ No newline at end of file diff --git a/sbt-app/src/sbt-test/lm-coursier/scala-sources-javadoc-jars/test b/sbt-app/src/sbt-test/lm-coursier/scala-sources-javadoc-jars/test new file mode 100644 index 000000000..39496485a --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/scala-sources-javadoc-jars/test @@ -0,0 +1,4 @@ +$ delete output +> runBlock +$ exists output +> updateClassifiersCheck \ No newline at end of file diff --git a/sbt-app/src/sbt-test/lm-coursier/semver-reconciliation/build.sbt b/sbt-app/src/sbt-test/lm-coursier/semver-reconciliation/build.sbt new file mode 100644 index 000000000..bb287ebff --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/semver-reconciliation/build.sbt @@ -0,0 +1,33 @@ +import lmcoursier.definitions.* +import lmcoursier.syntax.* + +lazy val semver61 = project + .settings( + scalaVersion := "2.11.12", + libraryDependencies ++= Seq( + "com.github.alexarchambault" %% "argonaut-shapeless_6.2" % "1.2.0-M11", + "io.argonaut" %% "argonaut" % "6.1" + ), + csrReconciliations += ModuleMatchers.all -> Reconciliation.SemVer + ) + +lazy val semver62 = project + .settings( + scalaVersion := "2.11.12", + libraryDependencies ++= Seq( + "com.github.alexarchambault" %% "argonaut-shapeless_6.2" % "1.2.0-M11", + "io.argonaut" %% "argonaut" % "6.2" + ), + csrReconciliations += ModuleMatchers.all -> Reconciliation.SemVer + ) + +lazy val strict62 = project + .settings( + scalaVersion := "2.11.12", + libraryDependencies ++= Seq( + "com.github.alexarchambault" %% "argonaut-shapeless_6.2" % "1.2.0-M11", + "io.argonaut" %% "argonaut" % "6.2" + ), + csrReconciliations += ModuleMatchers.all -> Reconciliation.Strict + ) + diff --git a/sbt-app/src/sbt-test/lm-coursier/semver-reconciliation/test b/sbt-app/src/sbt-test/lm-coursier/semver-reconciliation/test new file mode 100644 index 000000000..68500bae5 --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/semver-reconciliation/test @@ -0,0 +1,3 @@ +-> semver61/update +> semver62/update +-> strict62/update diff --git a/sbt-app/src/sbt-test/lm-coursier/simple/build.sbt b/sbt-app/src/sbt-test/lm-coursier/simple/build.sbt new file mode 100644 index 000000000..5642f168f --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/simple/build.sbt @@ -0,0 +1 @@ +scalaVersion := "2.12.8" diff --git a/sbt-app/src/sbt-test/lm-coursier/simple/src/main/scala/Main.scala b/sbt-app/src/sbt-test/lm-coursier/simple/src/main/scala/Main.scala new file mode 100644 index 000000000..61295349d --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/simple/src/main/scala/Main.scala @@ -0,0 +1,6 @@ +import java.io.File +import java.nio.file.Files + +object Main extends App { + Files.write(new File("output").toPath, "OK".getBytes("UTF-8")) +} diff --git a/sbt-app/src/sbt-test/lm-coursier/simple/test b/sbt-app/src/sbt-test/lm-coursier/simple/test new file mode 100644 index 000000000..7180419df --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/simple/test @@ -0,0 +1,4 @@ +$ delete output +> runBlock +$ exists output +> updateSbtClassifiers diff --git a/sbt-app/src/sbt-test/lm-coursier/strict-conflict-manager/build.sbt b/sbt-app/src/sbt-test/lm-coursier/strict-conflict-manager/build.sbt new file mode 100644 index 000000000..f3866860d --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/strict-conflict-manager/build.sbt @@ -0,0 +1,37 @@ + +import lmcoursier.definitions.* +import lmcoursier.syntax.* + +lazy val shared = Seq( + scalaVersion := "2.12.8", + libraryDependencies ++= Seq( + "com.github.alexarchambault" %% "argonaut-shapeless_6.2" % "1.2.0-M4", + "com.chuusai" %% "shapeless" % "2.3.3" + ), + conflictManager := ConflictManager.strict +) + +lazy val a = project + .settings(shared) + +lazy val b = project + .settings(shared) + .settings( + // strict cm should be fine if we force the conflicting module version + dependencyOverrides += "com.chuusai" %% "shapeless" % "2.3.3" + ) + +lazy val c = project + .settings( + // no shared settings here + scalaVersion := "2.12.11", + libraryDependencies ++= Seq( + "com.github.alexarchambault" %% "argonaut-shapeless_6.2" % "1.2.0", + "com.chuusai" %% "shapeless" % "2.3.2" + ), + csrReconciliations += { + val sv = scalaBinaryVersion.value + ModuleMatchers.only("com.github.alexarchambault", s"argonaut-shapeless_6.2_$sv") -> Reconciliation.Strict + } + ) + diff --git a/sbt-app/src/sbt-test/lm-coursier/strict-conflict-manager/test b/sbt-app/src/sbt-test/lm-coursier/strict-conflict-manager/test new file mode 100644 index 000000000..f8c9f0b8d --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/strict-conflict-manager/test @@ -0,0 +1,3 @@ +-> a/update +> b/update +> c/update diff --git a/sbt-app/src/sbt-test/lm-coursier/tests-classifier/a/src/main/scala/a/A.scala b/sbt-app/src/sbt-test/lm-coursier/tests-classifier/a/src/main/scala/a/A.scala new file mode 100644 index 000000000..fb97bc543 --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/tests-classifier/a/src/main/scala/a/A.scala @@ -0,0 +1,3 @@ +package a + +object A { val value = 42 } diff --git a/sbt-app/src/sbt-test/lm-coursier/tests-classifier/a/src/test/scala/a/ATest.scala b/sbt-app/src/sbt-test/lm-coursier/tests-classifier/a/src/test/scala/a/ATest.scala new file mode 100644 index 000000000..c752e81d6 --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/tests-classifier/a/src/test/scala/a/ATest.scala @@ -0,0 +1,3 @@ +package a + +object ATest { val value = 43 } diff --git a/sbt-app/src/sbt-test/lm-coursier/tests-classifier/b/src/main/scala/b/B.scala b/sbt-app/src/sbt-test/lm-coursier/tests-classifier/b/src/main/scala/b/B.scala new file mode 100644 index 000000000..504fea13e --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/tests-classifier/b/src/main/scala/b/B.scala @@ -0,0 +1,3 @@ +package b + +object B { val value = a.A.value } diff --git a/sbt-app/src/sbt-test/lm-coursier/tests-classifier/b/src/test/scala/b/BTest.scala b/sbt-app/src/sbt-test/lm-coursier/tests-classifier/b/src/test/scala/b/BTest.scala new file mode 100644 index 000000000..a9f43cb9a --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/tests-classifier/b/src/test/scala/b/BTest.scala @@ -0,0 +1,3 @@ +package a + +object BTest { val value = a.ATest.value } diff --git a/sbt-app/src/sbt-test/lm-coursier/tests-classifier/build.sbt b/sbt-app/src/sbt-test/lm-coursier/tests-classifier/build.sbt new file mode 100644 index 000000000..bee4bee2b --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/tests-classifier/build.sbt @@ -0,0 +1,24 @@ + +val org = "io.get-coursier.tests" +val nme = "coursier-test-a" +val ver = "0.1-SNAPSHOT" + +lazy val a = project + .settings( + organization := org, + name := nme, + Test / publishArtifact := true, + version := ver, + Compile / doc / sources := Seq.empty, // TODO fix doc task + Test / doc / sources := Seq.empty + ) + +lazy val b = project + .settings( + classpathTypes += "test-jar", + libraryDependencies ++= Seq( + org %% nme % ver, + org %% nme % ver % "test" classifier "tests" + ) + ) + diff --git a/sbt-app/src/sbt-test/lm-coursier/tests-classifier/test b/sbt-app/src/sbt-test/lm-coursier/tests-classifier/test new file mode 100644 index 000000000..9e133c736 --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/tests-classifier/test @@ -0,0 +1,2 @@ +> a/publishLocal +> b/test diff --git a/sbt-app/src/sbt-test/lm-coursier/update-sbt-classifiers-2/build.sbt b/sbt-app/src/sbt-test/lm-coursier/update-sbt-classifiers-2/build.sbt new file mode 100644 index 000000000..f4ea6551f --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/update-sbt-classifiers-2/build.sbt @@ -0,0 +1,47 @@ + +scalaVersion := appConfiguration.value.provider.scalaProvider.version + +lazy val updateSbtClassifiersCheck = TaskKey[Unit]("updateSbtClassifiersCheck") + +updateSbtClassifiersCheck := { + + val defaultModules = updateSbtClassifiers + .value + .configuration(Default) + .map(_.modules) + .getOrElse(Nil) + + val compileModules = updateSbtClassifiers + .value + .configuration(Compile) + .map(_.modules) + .getOrElse(Nil) + + def artifacts(org: String, name: String) = + (defaultModules ++ compileModules) + .map { m => + println(s"Found module $m") + m + } + .collect { + case moduleReport + if moduleReport.module.organization == org && + moduleReport.module.name == name => + moduleReport.artifacts + } + .toSeq + .flatten + + def ensureHasArtifact(orgName: (String, String)*) = + assert( + orgName.exists { + case (org, name) => + artifacts(org, name).exists(_._2.getName.endsWith("-sources.jar")) + }, + s"Any of $orgName not found" + ) + + ensureHasArtifact("org.scala-lang" -> "scala-library") + ensureHasArtifact("org.scala-lang.modules" -> s"scala-xml_${scalaBinaryVersion.value}") + ensureHasArtifact("org.scala-sbt" -> s"librarymanagement-coursier_${scalaBinaryVersion.value}") +} diff --git a/sbt-app/src/sbt-test/lm-coursier/update-sbt-classifiers-2/pending b/sbt-app/src/sbt-test/lm-coursier/update-sbt-classifiers-2/pending new file mode 100644 index 000000000..f6351a4d9 --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/update-sbt-classifiers-2/pending @@ -0,0 +1 @@ +> updateSbtClassifiersCheck diff --git a/sbt-app/src/sbt-test/lm-coursier/update-sbt-classifiers-2/project/plugins.sbt b/sbt-app/src/sbt-test/lm-coursier/update-sbt-classifiers-2/project/plugins.sbt new file mode 100644 index 000000000..d4b8a14f8 --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/update-sbt-classifiers-2/project/plugins.sbt @@ -0,0 +1 @@ +addSbtPlugin("com.typesafe.play" % "sbt-plugin" % "2.8.0") \ No newline at end of file diff --git a/sbt-app/src/sbt-test/lm-coursier/update-sbt-classifiers/build.sbt b/sbt-app/src/sbt-test/lm-coursier/update-sbt-classifiers/build.sbt new file mode 100644 index 000000000..f4ea6551f --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/update-sbt-classifiers/build.sbt @@ -0,0 +1,47 @@ + +scalaVersion := appConfiguration.value.provider.scalaProvider.version + +lazy val updateSbtClassifiersCheck = TaskKey[Unit]("updateSbtClassifiersCheck") + +updateSbtClassifiersCheck := { + + val defaultModules = updateSbtClassifiers + .value + .configuration(Default) + .map(_.modules) + .getOrElse(Nil) + + val compileModules = updateSbtClassifiers + .value + .configuration(Compile) + .map(_.modules) + .getOrElse(Nil) + + def artifacts(org: String, name: String) = + (defaultModules ++ compileModules) + .map { m => + println(s"Found module $m") + m + } + .collect { + case moduleReport + if moduleReport.module.organization == org && + moduleReport.module.name == name => + moduleReport.artifacts + } + .toSeq + .flatten + + def ensureHasArtifact(orgName: (String, String)*) = + assert( + orgName.exists { + case (org, name) => + artifacts(org, name).exists(_._2.getName.endsWith("-sources.jar")) + }, + s"Any of $orgName not found" + ) + + ensureHasArtifact("org.scala-lang" -> "scala-library") + ensureHasArtifact("org.scala-lang.modules" -> s"scala-xml_${scalaBinaryVersion.value}") + ensureHasArtifact("org.scala-sbt" -> s"librarymanagement-coursier_${scalaBinaryVersion.value}") +} diff --git a/sbt-app/src/sbt-test/lm-coursier/update-sbt-classifiers/test b/sbt-app/src/sbt-test/lm-coursier/update-sbt-classifiers/test new file mode 100644 index 000000000..f6351a4d9 --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/update-sbt-classifiers/test @@ -0,0 +1 @@ +> updateSbtClassifiersCheck diff --git a/sbt-app/src/sbt-test/lm-coursier/version-reconciliation/build.sbt b/sbt-app/src/sbt-test/lm-coursier/version-reconciliation/build.sbt new file mode 100644 index 000000000..31fac9282 --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/version-reconciliation/build.sbt @@ -0,0 +1,21 @@ +import lmcoursier.definitions.* +import lmcoursier.syntax.* + +lazy val shared = Seq( + scalaVersion := "2.12.8", + libraryDependencies ++= Seq( + "com.github.alexarchambault" %% "argonaut-shapeless_6.2" % "1.2.0-M4", + "com.chuusai" %% "shapeless" % "2.3.3" + ), + csrReconciliations += ModuleMatchers.all -> Reconciliation.Strict +) + +lazy val a = project + .settings(shared) + +lazy val b = project + .settings(shared) + .settings( + // strict cm should be fine if we force the conflicting module version + dependencyOverrides += "com.chuusai" %% "shapeless" % "2.3.3" + ) diff --git a/sbt-app/src/sbt-test/lm-coursier/version-reconciliation/test b/sbt-app/src/sbt-test/lm-coursier/version-reconciliation/test new file mode 100644 index 000000000..0de45305f --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/version-reconciliation/test @@ -0,0 +1,2 @@ +-> a/update +> b/update diff --git a/sbt-app/src/sbt-test/lm-coursier/whitespace-resolver/build.sbt b/sbt-app/src/sbt-test/lm-coursier/whitespace-resolver/build.sbt new file mode 100644 index 000000000..97a9c838d --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/whitespace-resolver/build.sbt @@ -0,0 +1,3 @@ +scalaVersion := "2.12.8" + +resolvers += Resolver.file("space-repo", file(raw"/tmp/space the final frontier/repo"))(Resolver.ivyStylePatterns) diff --git a/sbt-app/src/sbt-test/lm-coursier/whitespace-resolver/test b/sbt-app/src/sbt-test/lm-coursier/whitespace-resolver/test new file mode 100644 index 000000000..103bd8d2f --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/whitespace-resolver/test @@ -0,0 +1 @@ +> update diff --git a/sbt-app/src/sbt-test/lm-coursier/zookeeper/build.sbt b/sbt-app/src/sbt-test/lm-coursier/zookeeper/build.sbt new file mode 100644 index 000000000..ec6609a8e --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/zookeeper/build.sbt @@ -0,0 +1,3 @@ +scalaVersion := "2.12.8" + +libraryDependencies += "org.apache.zookeeper" % "zookeeper" % "3.5.0-alpha" diff --git a/sbt-app/src/sbt-test/lm-coursier/zookeeper/src/main/scala/Main.scala b/sbt-app/src/sbt-test/lm-coursier/zookeeper/src/main/scala/Main.scala new file mode 100644 index 000000000..032874759 --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/zookeeper/src/main/scala/Main.scala @@ -0,0 +1,8 @@ +import java.io.File +import java.nio.file.Files + +import org.apache.zookeeper.ZooKeeper + +object Main extends App { + Files.write(new File("output").toPath, classOf[ZooKeeper].getSimpleName.getBytes("UTF-8")) +} diff --git a/sbt-app/src/sbt-test/lm-coursier/zookeeper/test b/sbt-app/src/sbt-test/lm-coursier/zookeeper/test new file mode 100644 index 000000000..5efe58689 --- /dev/null +++ b/sbt-app/src/sbt-test/lm-coursier/zookeeper/test @@ -0,0 +1,3 @@ +$ delete output +> runBlock +$ exists output diff --git a/script/example-tls.sh b/scripts/example-tls.sh similarity index 100% rename from script/example-tls.sh rename to scripts/example-tls.sh diff --git a/scripts/lm-coursier-ci.sh b/scripts/lm-coursier-ci.sh new file mode 100755 index 000000000..a03de4565 --- /dev/null +++ b/scripts/lm-coursier-ci.sh @@ -0,0 +1,25 @@ +#!/usr/bin/env bash +set -euvx + +# Force the use of coursier JNI stuff on Windows, which ought to work fine. +# JNI stuff is used to compute the default cache location on Windows (to get the AppData local +# dir, or something like this, via native Windows APIs). +# Without this, if ever coursier fails to load its JNI library on Windows, it falls back +# to using some powershell scripts (via dirs-dev/directories-jvm), which are often a problem, +# see sbt/sbt#5206. +# Enable this once sbt uses the upcoming lm-coursier-shaded version (> 2.0.10-1) +# export COURSIER_JNI="force" + +if [ "$(expr substr $(uname -s) 1 5 2>/dev/null)" == "Linux" ]; then + SBT="sbt" +elif [ "$(uname)" == "Darwin" ]; then + SBT="sbt" +else + SBT="sbt.bat" +fi + +# publishing locally to ensure shading runs fine +./lm-coursier/metadata/scripts/with-test-repo.sh $SBT \ + lmCoursierShadedPublishing/publishLocal \ + lmCoursier/test \ + 'scripted lm-coursier/*' diff --git a/zinc-lm-integration/src/main/scala/sbt/internal/inc/ZincComponentCompiler.scala b/zinc-lm-integration/src/main/scala/sbt/internal/inc/ZincComponentCompiler.scala index 05a9ea6f1..9d7291d7a 100644 --- a/zinc-lm-integration/src/main/scala/sbt/internal/inc/ZincComponentCompiler.scala +++ b/zinc-lm-integration/src/main/scala/sbt/internal/inc/ZincComponentCompiler.scala @@ -11,7 +11,7 @@ package internal package inc import sbt.internal.inc.classpath.ClasspathUtil -import sbt.internal.librarymanagement._ +import sbt.internal.librarymanagement.JsonUtil import sbt.internal.util.{ BufferedLogger, FullLogger } import sbt.io.IO import sbt.librarymanagement._