diff --git a/main/src/main/scala/sbt/Defaults.scala b/main/src/main/scala/sbt/Defaults.scala index 12d8cc4e1..5f556db45 100755 --- a/main/src/main/scala/sbt/Defaults.scala +++ b/main/src/main/scala/sbt/Defaults.scala @@ -10,7 +10,7 @@ package sbt import java.io.{ File, PrintWriter } import java.net.{ URI, URL, URLClassLoader } import java.util.Optional -import java.util.concurrent.{ Callable, TimeUnit } +import java.util.concurrent.TimeUnit import lmcoursier.CoursierDependencyResolution import lmcoursier.definitions.{ Configuration => CConfiguration } @@ -79,7 +79,6 @@ import sbt.util.CacheImplicits._ import sbt.util.InterfaceUtil.{ toJavaFunction => f1 } import sbt.util._ import sjsonnew._ -import sjsonnew.shaded.scalajson.ast.unsafe.JValue import xsbti.CrossValue import xsbti.compile.{ AnalysisContents, IncOptions, IncToolOptionsUtil } @@ -127,8 +126,7 @@ object Defaults extends BuildCommon { def nameForSrc(config: String) = if (config == Configurations.Compile.name) "main" else config def prefix(config: String) = if (config == Configurations.Compile.name) "" else config + "-" - def lock(app: xsbti.AppConfiguration): xsbti.GlobalLock = - app.provider.scalaProvider.launcher.globalLock + def lock(app: xsbti.AppConfiguration): xsbti.GlobalLock = LibraryManagement.lock(app) def extractAnalysis[T](a: Attributed[T]): (T, CompileAnalysis) = (a.data, a.metadata get Keys.analysis getOrElse Analysis.Empty) @@ -2409,41 +2407,7 @@ object Classpaths { }, dependencyResolution := dependencyResolutionTask.value, csrConfiguration := LMCoursier.updateClassifierConfigurationTask.value, - updateClassifiers in TaskGlobal := (Def.task { - val s = streams.value - val is = ivySbt.value - val lm = dependencyResolution.value - val mod = classifiersModule.value - val updateConfig0 = updateConfiguration.value - val updateConfig = updateConfig0 - .withMetadataDirectory(dependencyCacheDirectory.value) - .withArtifactFilter( - updateConfig0.artifactFilter.map(af => af.withInverted(!af.inverted)) - ) - val app = appConfiguration.value - val srcTypes = sourceArtifactTypes.value - val docTypes = docArtifactTypes.value - val out = is.withIvy(s.log)(_.getSettings.getDefaultIvyUserDir) - val uwConfig = (unresolvedWarningConfiguration in update).value - withExcludes(out, mod.classifiers, lock(app)) { excludes => - lm.updateClassifiers( - GetClassifiersConfiguration( - mod, - excludes.toVector, - updateConfig, - // scalaModule, - srcTypes.toVector, - docTypes.toVector - ), - uwConfig, - Vector.empty, - s.log - ) match { - case Left(_) => ??? - case Right(ur) => ur - } - } - } tag (Tags.Update, Tags.Network)).value, + updateClassifiers in TaskGlobal := LibraryManagement.updateClassifiersTask.value, ) ) ++ Seq( csrProject := CoursierInputsTasks.coursierProjectTask.value, @@ -2747,38 +2711,7 @@ object Classpaths { def withExcludes(out: File, classifiers: Seq[String], lock: xsbti.GlobalLock)( f: Map[ModuleID, Vector[ConfigRef]] => UpdateReport - ): UpdateReport = { - import sbt.librarymanagement.LibraryManagementCodec._ - import sbt.util.FileBasedStore - implicit val isoString: sjsonnew.IsoString[JValue] = - sjsonnew.IsoString.iso( - sjsonnew.support.scalajson.unsafe.CompactPrinter.apply, - sjsonnew.support.scalajson.unsafe.Parser.parseUnsafe - ) - val exclName = "exclude_classifiers" - val file = out / exclName - val store = new FileBasedStore(file, sjsonnew.support.scalajson.unsafe.Converter) - lock( - out / (exclName + ".lock"), - new Callable[UpdateReport] { - def call = { - implicit val midJsonKeyFmt: sjsonnew.JsonKeyFormat[ModuleID] = moduleIdJsonKeyFormat - val excludes = - store - .read[Map[ModuleID, Vector[ConfigRef]]]( - default = Map.empty[ModuleID, Vector[ConfigRef]] - ) - val report = f(excludes) - val allExcludes: Map[ModuleID, Vector[ConfigRef]] = excludes ++ IvyActions - .extractExcludes(report) - .mapValues(cs => cs.map(c => ConfigRef(c)).toVector) - store.write(allExcludes) - IvyActions - .addExcluded(report, classifiers.toVector, allExcludes.mapValues(_.map(_.name).toSet)) - } - } - ) - } + ): UpdateReport = LibraryManagement.withExcludes(out, classifiers, lock)(f) /** * Substitute unmanaged jars for managed jars when the major.minor parts of diff --git a/main/src/main/scala/sbt/internal/LibraryManagement.scala b/main/src/main/scala/sbt/internal/LibraryManagement.scala index 5952da063..fdb19376e 100644 --- a/main/src/main/scala/sbt/internal/LibraryManagement.scala +++ b/main/src/main/scala/sbt/internal/LibraryManagement.scala @@ -9,11 +9,16 @@ package sbt package internal import java.io.File +import java.util.concurrent.Callable import sbt.internal.librarymanagement._ import sbt.librarymanagement._ import sbt.librarymanagement.syntax._ -import sbt.util.{ CacheStore, CacheStoreFactory, Logger, Tracked } +import sbt.util.{ CacheStore, CacheStoreFactory, Logger, Tracked, Level } import sbt.io.IO +import sbt.io.syntax._ +import sbt.Project.richInitializeTask +import sbt.dsl.LinterLevel.Ignore +import sjsonnew.JsonFormat private[sbt] object LibraryManagement { @@ -174,4 +179,155 @@ private[sbt] object LibraryManagement { } ur.withConfigurations(crs2) } + + val moduleIdJsonKeyFormat: sjsonnew.JsonKeyFormat[ModuleID] = + new sjsonnew.JsonKeyFormat[ModuleID] { + import LibraryManagementCodec._ + import sjsonnew.support.scalajson.unsafe._ + val moduleIdFormat: JsonFormat[ModuleID] = implicitly[JsonFormat[ModuleID]] + def write(key: ModuleID): String = + CompactPrinter(Converter.toJsonUnsafe(key)(moduleIdFormat)) + def read(key: String): ModuleID = + Converter.fromJsonUnsafe[ModuleID](Parser.parseUnsafe(key))(moduleIdFormat) + } + + /** + * Resolves and optionally retrieves classified artifacts, such as javadocs and sources, + * for dependency definitions, transitively. + */ + def updateClassifiersTask: Def.Initialize[Task[UpdateReport]] = + (Def.task { + import Keys._ + val s = streams.value + val cacheDirectory = streams.value.cacheDirectory + val csr = useCoursier.value + val lm = dependencyResolution.value + + if (csr) { + // following copied from https://github.com/coursier/sbt-coursier/blob/9173406bb399879508aa481fed16efda72f55820/modules/sbt-lm-coursier/src/main/scala/sbt/hack/Foo.scala + val isRoot = executionRoots.value contains resolvedScoped.value + val shouldForce = isRoot || { + forceUpdatePeriod.value match { + case None => false + case Some(period) => + val fullUpdateOutput = cacheDirectory / "out" + val now = System.currentTimeMillis + val diff = now - fullUpdateOutput.lastModified() + val elapsedDuration = new scala.concurrent.duration.FiniteDuration( + diff, + java.util.concurrent.TimeUnit.MILLISECONDS + ) + fullUpdateOutput.exists() && elapsedDuration > period + } + } + val state0 = state.value + val updateConf = { + import UpdateLogging.{ Full, DownloadOnly, Default } + val conf = updateConfiguration.value + val maybeUpdateLevel = (logLevel in update).?.value + val conf1 = maybeUpdateLevel.orElse(state0.get(logLevel.key)) match { + case Some(Level.Debug) if conf.logging == Default => conf.withLogging(logging = Full) + case Some(_) if conf.logging == Default => conf.withLogging(logging = DownloadOnly) + case _ => conf + } + // logical clock is folded into UpdateConfiguration + conf1.withLogicalClock(LogicalClock(state0.hashCode)) + } + val evictionOptions = Def.taskDyn { + if (executionRoots.value.exists(_.key == evicted.key)) + Def.task(EvictionWarningOptions.empty) + else Def.task((evictionWarningOptions in update).value) + }.value + cachedUpdate( + // LM API + lm = lm, + // Ivy-free ModuleDescriptor + module = ivyModule.value, + s.cacheStoreFactory.sub(updateCacheName.value), + Reference.display(thisProjectRef.value), + updateConf, + identity, + skip = (skip in update).value, + force = shouldForce, + depsUpdated = transitiveUpdate.value.exists(!_.stats.cached), + uwConfig = (unresolvedWarningConfiguration in update).value, + ewo = evictionOptions, + mavenStyle = publishMavenStyle.value, + compatWarning = compatibilityWarningOptions.value, + includeCallers = false, + includeDetails = false, + log = s.log + ) + } else { + val is = ivySbt.value + val mod = classifiersModule.value + val updateConfig0 = updateConfiguration.value + lazy val updateConfig = updateConfig0 + .withMetadataDirectory(dependencyCacheDirectory.value) + .withArtifactFilter( + updateConfig0.artifactFilter.map(af => af.withInverted(!af.inverted)) + ) + val app = appConfiguration.value + val srcTypes = sourceArtifactTypes.value + val docTypes = docArtifactTypes.value + val uwConfig = (unresolvedWarningConfiguration in update).value + val out = is.withIvy(s.log)(_.getSettings.getDefaultIvyUserDir) + withExcludes(out, mod.classifiers, lock(app)) { excludes => + lm.updateClassifiers( + GetClassifiersConfiguration( + mod, + excludes.toVector, + updateConfig, + srcTypes.toVector, + docTypes.toVector + ), + uwConfig, + Vector.empty, + s.log + ) match { + case Left(_) => ??? + case Right(ur) => ur + } + } + } + } tag (Tags.Update, Tags.Network)) + + def withExcludes(out: File, classifiers: Seq[String], lock: xsbti.GlobalLock)( + f: Map[ModuleID, Vector[ConfigRef]] => UpdateReport + ): UpdateReport = { + import sjsonnew.shaded.scalajson.ast.unsafe.JValue + import sbt.librarymanagement.LibraryManagementCodec._ + import sbt.util.FileBasedStore + implicit val isoString: sjsonnew.IsoString[JValue] = + sjsonnew.IsoString.iso( + sjsonnew.support.scalajson.unsafe.CompactPrinter.apply, + sjsonnew.support.scalajson.unsafe.Parser.parseUnsafe + ) + val exclName = "exclude_classifiers" + val file = out / exclName + val store = new FileBasedStore(file, sjsonnew.support.scalajson.unsafe.Converter) + lock( + out / (exclName + ".lock"), + new Callable[UpdateReport] { + def call = { + implicit val midJsonKeyFmt: sjsonnew.JsonKeyFormat[ModuleID] = moduleIdJsonKeyFormat + val excludes = + store + .read[Map[ModuleID, Vector[ConfigRef]]]( + default = Map.empty[ModuleID, Vector[ConfigRef]] + ) + val report = f(excludes) + val allExcludes: Map[ModuleID, Vector[ConfigRef]] = excludes ++ IvyActions + .extractExcludes(report) + .mapValues(cs => cs.map(c => ConfigRef(c)).toVector) + store.write(allExcludes) + IvyActions + .addExcluded(report, classifiers.toVector, allExcludes.mapValues(_.map(_.name).toSet)) + } + } + ) + } + + def lock(app: xsbti.AppConfiguration): xsbti.GlobalLock = + app.provider.scalaProvider.launcher.globalLock } diff --git a/sbt/src/sbt-test/dependency-management/sources-transitive-classifiers/build.sbt b/sbt/src/sbt-test/dependency-management/sources-transitive-classifiers/build.sbt new file mode 100644 index 000000000..91f35d197 --- /dev/null +++ b/sbt/src/sbt-test/dependency-management/sources-transitive-classifiers/build.sbt @@ -0,0 +1,31 @@ +ThisBuild / useCoursier := false + +lazy val root = (project in file(".")) + .settings( + libraryDependencies += "net.liftweb" % "lift-webkit" % "1.0" intransitive(), + libraryDependencies += "org.scalacheck" % "scalacheck" % "1.5" intransitive(), + autoScalaLibrary := false, + managedScalaInstance := false, + transitiveClassifiers := Seq("sources"), + TaskKey[Unit]("checkSources") := (updateClassifiers map checkSources).value, + TaskKey[Unit]("checkBinaries") := (update map checkBinaries).value + ) + +def getSources(report: UpdateReport) = report.matching(artifactFilter(`classifier` = "sources") ) +def checkSources(report: UpdateReport): Unit = + { + val srcs = getSources(report) + if(srcs.isEmpty) + sys.error(s"No sources retrieved\n\n$report") + else if(srcs.size != 2) + sys.error("Incorrect sources retrieved:\n\t" + srcs.mkString("\n\t")) + else + () + } + +def checkBinaries(report: UpdateReport): Unit = + { + val srcs = getSources(report) + if(srcs.nonEmpty) sys.error("Sources retrieved:\n\t" + srcs.mkString("\n\t")) + else () + } diff --git a/sbt/src/sbt-test/dependency-management/sources-transitive-classifiers/test b/sbt/src/sbt-test/dependency-management/sources-transitive-classifiers/test new file mode 100644 index 000000000..ca9281f91 --- /dev/null +++ b/sbt/src/sbt-test/dependency-management/sources-transitive-classifiers/test @@ -0,0 +1,3 @@ +> checkBinaries +> show updateClassifiers +> checkSources diff --git a/sbt/src/sbt-test/dependency-management/sources/build.sbt b/sbt/src/sbt-test/dependency-management/sources/build.sbt index 91f35d197..ebe207024 100644 --- a/sbt/src/sbt-test/dependency-management/sources/build.sbt +++ b/sbt/src/sbt-test/dependency-management/sources/build.sbt @@ -1,31 +1,33 @@ -ThisBuild / useCoursier := false +// ThisBuild / useCoursier := false +ThisBuild / scalaVersion := "2.12.8" lazy val root = (project in file(".")) .settings( - libraryDependencies += "net.liftweb" % "lift-webkit" % "1.0" intransitive(), - libraryDependencies += "org.scalacheck" % "scalacheck" % "1.5" intransitive(), - autoScalaLibrary := false, - managedScalaInstance := false, - transitiveClassifiers := Seq("sources"), + libraryDependencies += "com.typesafe.akka" %% "akka-actor" % "2.5.22", TaskKey[Unit]("checkSources") := (updateClassifiers map checkSources).value, TaskKey[Unit]("checkBinaries") := (update map checkBinaries).value ) def getSources(report: UpdateReport) = report.matching(artifactFilter(`classifier` = "sources") ) -def checkSources(report: UpdateReport): Unit = - { - val srcs = getSources(report) - if(srcs.isEmpty) - sys.error(s"No sources retrieved\n\n$report") - else if(srcs.size != 2) - sys.error("Incorrect sources retrieved:\n\t" + srcs.mkString("\n\t")) - else - () - } +def checkSources(report: UpdateReport): Unit = { + val srcs = getSources(report).map(_.getName) + if(srcs.isEmpty) + sys.error(s"No sources retrieved\n\n$report") + else if (srcs.size != 8 || !srcs.exists(_ == "akka-actor_2.12-2.5.22-sources.jar")) { + // scala-library-2.12.8-sources.jar + // config-1.3.3-sources.jar + // akka-actor_2.12-2.5.22-sources.jar + // scala-java8-compat_2.12-0.8.0-sources.jar + // scala-xml_2.12-1.0.6-sources.jar + // scala-compiler-2.12.8-sources.jar + // scala-reflect-2.12.8-sources.jar + // jline-2.14.6-sources.jar + sys.error("Incorrect sources retrieved:\n\t" + srcs.mkString("\n\t")) + } else () +} -def checkBinaries(report: UpdateReport): Unit = - { - val srcs = getSources(report) - if(srcs.nonEmpty) sys.error("Sources retrieved:\n\t" + srcs.mkString("\n\t")) - else () - } +def checkBinaries(report: UpdateReport): Unit = { + val srcs = getSources(report) + if(srcs.nonEmpty) sys.error("Sources retrieved:\n\t" + srcs.mkString("\n\t")) + else () +}