Merge pull request #2892 from dwijnand/upgrades

-sbinary/+sjson-new, -datatype/+contraband & upgrades
This commit is contained in:
eugene yokota 2017-01-05 18:22:51 -05:00 committed by GitHub
commit 274c8ec65f
98 changed files with 703 additions and 723 deletions

View File

@ -136,7 +136,7 @@ lazy val stdTaskProj = (project in file("tasks-standard")).
name := "Task System",
testExclusive
).
configure(addSbtUtilCollection, addSbtUtilLogging, addSbtIO)
configure(addSbtUtilCollection, addSbtUtilLogging, addSbtUtilCache, addSbtIO)
// Embedded Scala code runner
lazy val runProj = (project in file("run")).
@ -168,7 +168,8 @@ lazy val actionsProj = (project in file("main-actions")).
dependsOn(runProj, stdTaskProj, taskProj, testingProj).
settings(
testedBaseSettings,
name := "Actions"
name := "Actions",
libraryDependencies += sjsonNewScalaJson
).
configure(addSbtCompilerClasspath, addSbtUtilCompletion, addSbtCompilerApiInfo,
addSbtZinc, addSbtCompilerIvyIntegration, addSbtCompilerInterface,
@ -176,12 +177,13 @@ lazy val actionsProj = (project in file("main-actions")).
// General command support and core commands not specific to a build system
lazy val commandProj = (project in file("main-command")).
enablePlugins(DatatypePlugin, JsonCodecPlugin).
enablePlugins(ContrabandPlugin, JsonCodecPlugin).
settings(
testedBaseSettings,
name := "Command",
libraryDependencies ++= Seq(launcherInterface, sjsonNewScalaJson),
sourceManaged in (Compile, generateDatatypes) := baseDirectory.value / "src" / "main" / "datatype-scala"
sourceManaged in (Compile, generateContrabands) := baseDirectory.value / "src" / "main" / "contraband-scala",
contrabandFormatsForType in generateContrabands in Compile := ContrabandConfig.getFormats
).
configure(addSbtCompilerInterface, addSbtIO, addSbtUtilLogging, addSbtUtilCompletion, addSbtCompilerClasspath)

View File

@ -1,234 +0,0 @@
/* sbt -- Simple Build Tool
* Copyright 2010 Mark Harrah
*/
package sbt
import Predef.{ Map, Set, implicitly } // excludes *both 2.10.x conforms and 2.11.x $conforms in source compatible manner.
import sbt.internal.util.{ Cache, HList, HNil, InputCache, LinePosition, LineRange, NoPosition, RangePosition, SourcePosition }
import sbt.internal.util.FileInfo.{ exists, hash }
import sbt.internal.util.Types.{ :+:, idFun }
import java.io.File
import java.{ util => ju }
import java.net.URL
import sbinary.{ DefaultProtocol, Format }
import sbt.internal.librarymanagement._
import sbt.librarymanagement._
import sbt.librarymanagement.RepositoryHelpers._
import Ordering._
import sbt.io.Hash
/**
* InputCaches for IvyConfiguration, ModuleSettings, and UpdateConfiguration
* The InputCaches for a basic data structure is built in two parts.
* Given the data structure:
* Data[A,B,C, ...]
* 1) Define a conversion from Data to the HList A :+: B :+: C :+: ... :+: HNil,
* excluding any members that should not be considered for caching
* 2) In theory, 1) would be enough and wrapHL would generate InputCache[Data] as long
* as all of InputCache[A], InputCache[B], ... exist. However, if any of these child
* InputCaches are constructed using wrapHL, you get a diverging implicit error. (I
* believe scalac is generating this error as specified, but that the implicits would
* be valid and not be infinite. This might take some effort to come up with a new rule
* that allows this)
* 3) So, we need to explicitly define the intermediate implicits. The general approach is:
* {{{
* object LN {
* ... Data => HList conversions ...
* }
* import LN._
* implicit dataCache: InputCache[Data] = wrapHL
*
* object L(N-1) ...
* }}}
* Each Data in LN only uses implicits from L(N-1).
* This way, higher levels (higher N) cannot see the HList conversions of subcomponents but can
* use the explicitly defined subcomponent implicits and there is no divergence.
* 4) Ideally, diverging implicits could be relaxed so that the ... = wrapIn lines could be removed.
*/
object CacheIvy {
def password(s: Option[String]) = new Array[Byte](0)
def names(s: Iterable[Configuration]): Set[String] = s.map(_.name).toSet
import Cache._
implicit def wrapHL[W, H, T <: HList](implicit f: W => H :+: T, cache: InputCache[H :+: T]): InputCache[W] =
Cache.wrapIn(f, cache)
lazy val excludeMap: Format[Map[ModuleID, Set[String]]] = implicitly
lazy val updateIC: InputCache[IvyConfiguration :+: ModuleSettings :+: UpdateConfiguration :+: HNil] = implicitly
/* def deliverIC: InputCache[IvyConfiguration :+: ModuleSettings :+: DeliverConfiguration :+: HNil] = implicitly
def publishIC: InputCache[IvyConfiguration :+: ModuleSettings :+: PublishConfiguration :+: HNil] = implicitly*/
lazy val moduleIDSeqIC: InputCache[Seq[ModuleID]] = implicitly
lazy val modulePositionMapFormat: Format[Map[ModuleID, SourcePosition]] = implicitly
implicit lazy val updateReportFormat: Format[UpdateReport] =
{
import DefaultProtocol.{ StringFormat, FileFormat }
wrap[UpdateReport, (File, Seq[ConfigurationReport], UpdateStats, Map[File, Long])](rep => (rep.cachedDescriptor, rep.configurations, rep.stats, rep.stamps), { case (cd, cs, stats, stamps) => new UpdateReport(cd, cs, stats, stamps) })
}
implicit def updateStatsFormat: Format[UpdateStats] =
wrap[UpdateStats, (Long, Long, Long)](us => (us.resolveTime, us.downloadTime, us.downloadSize), { case (rt, dt, ds) => new UpdateStats(rt, dt, ds, true) })
implicit def confReportFormat(implicit m: Format[String], mr: Format[Seq[ModuleReport]], oar: Format[Seq[OrganizationArtifactReport]]): Format[ConfigurationReport] =
wrap[ConfigurationReport, (String, Seq[ModuleReport], Seq[OrganizationArtifactReport])](r => (r.configuration, r.modules, r.details), { case (c, m, d) => new ConfigurationReport(c, m, d) })
implicit def moduleReportFormat(implicit cf: Format[Seq[Caller]], ff: Format[File]): Format[ModuleReport] = {
wrap[ModuleReport, (ModuleID, Seq[(Artifact, File)], Seq[Artifact], Option[String], Option[Long], Option[String], Option[String], Boolean, Option[String], Option[String], Option[String], Option[String], Map[String, String], Option[Boolean], Option[String], Seq[String], Seq[(String, Option[String])], Seq[Caller])](
m => (m.module, m.artifacts, m.missingArtifacts, m.status, m.publicationDate map { _.getTime }, m.resolver, m.artifactResolver, m.evicted, m.evictedData, m.evictedReason, m.problem, m.homepage, m.extraAttributes, m.isDefault, m.branch, m.configurations, m.licenses, m.callers),
{ case (m, as, ms, s, pd, r, a, e, ed, er, p, h, ea, d, b, cs, ls, ks) => new ModuleReport(m, as, ms, s, pd map { new ju.Date(_) }, r, a, e, ed, er, p, h, ea, d, b, cs, ls, ks) }
)
}
implicit def artifactFormat(implicit sf: Format[String], uf: Format[Option[URL]]): Format[Artifact] = {
wrap[Artifact, (String, String, String, Option[String], Seq[Configuration], Option[URL], Map[String, String])](
a => (a.name, a.`type`, a.extension, a.classifier, a.configurations.toSeq, a.url, a.extraAttributes),
{ case (n, t, x, c, cs, u, e) => Artifact(n, t, x, c, cs, u, e) }
)
}
implicit def organizationArtifactReportFormat(implicit sf: Format[String], bf: Format[Boolean], df: Format[Seq[ModuleReport]]): Format[OrganizationArtifactReport] =
wrap[OrganizationArtifactReport, (String, String, Seq[ModuleReport])](m => (m.organization, m.name, m.modules), { case (o, n, r) => OrganizationArtifactReport(o, n, r) })
implicit def callerFormat: Format[Caller] =
wrap[Caller, (ModuleID, Seq[String], Map[String, String], Boolean, Boolean, Boolean, Boolean)](
c => (c.caller, c.callerConfigurations, c.callerExtraAttributes, c.isForceDependency, c.isChangingDependency, c.isTransitiveDependency, c.isDirectlyForceDependency),
{ case (c, cc, ea, fd, cd, td, df) => new Caller(c, cc, ea, fd, cd, td, df) }
)
implicit def exclusionRuleFormat(implicit sf: Format[String]): Format[InclExclRule] =
wrap[InclExclRule, (String, String, String, Seq[String])](e => (e.organization, e.name, e.artifact, e.configurations), { case (o, n, a, cs) => InclExclRule(o, n, a, cs) })
implicit def crossVersionFormat: Format[CrossVersion] = wrap(crossToInt, crossFromInt)
implicit def sourcePositionFormat: Format[SourcePosition] =
wrap[SourcePosition, (Int, String, Int, Int)](
{
case NoPosition => (0, "", 0, 0)
case LinePosition(p, s) => (1, p, s, 0)
case RangePosition(p, LineRange(s, e)) => (2, p, s, e)
},
{
case (0, _, _, _) => NoPosition
case (1, p, s, _) => LinePosition(p, s)
case (2, p, s, e) => RangePosition(p, LineRange(s, e))
}
)
private[this] final val DisabledValue = 0
private[this] final val BinaryValue = 1
private[this] final val FullValue = 2
import CrossVersion.{ Binary, Disabled, Full }
private[this] val crossFromInt = (i: Int) => i match { case BinaryValue => new Binary(idFun); case FullValue => new Full(idFun); case _ => Disabled }
private[this] val crossToInt = (c: CrossVersion) => c match { case Disabled => 0; case b: Binary => BinaryValue; case f: Full => FullValue }
implicit def moduleIDFormat(implicit sf: Format[String], bf: Format[Boolean]): Format[ModuleID] =
wrap[ModuleID, ((String, String, String, Option[String], Option[String]), (Boolean, Boolean, Boolean, Seq[Artifact], Seq[InclusionRule], Seq[ExclusionRule], Map[String, String], CrossVersion))](
m => ((m.organization, m.name, m.revision, m.configurations, m.branchName), (m.isChanging, m.isTransitive, m.isForce, m.explicitArtifacts, m.inclusions, m.exclusions, m.extraAttributes, m.crossVersion)),
{ case ((o, n, r, cs, br), (ch, t, f, as, incl, excl, x, cv)) => ModuleID(o, n, r, cs, ch, t, f, as, incl, excl, x, cv, br) }
)
// For some reason sbinary seems to detect unserialized instance Set[ModuleID] to be not equal. #1620
implicit def moduleSetIC: InputCache[Set[ModuleID]] =
{
implicit def toSeq(ms: Set[ModuleID]): Seq[ModuleID] = ms.toSeq.sortBy { _.toString }
wrapIn
}
implicit def configurationFormat(implicit sf: Format[String]): Format[Configuration] =
wrap[Configuration, String](_.name, s => new Configuration(s))
implicit def classpathFormat =
{
import DefaultProtocol.FileFormat
implicitly[Format[Map[String, Seq[File]]]]
}
object L5 {
implicit def inlineIvyToHL = (i: InlineIvyConfiguration) => i.paths :+: i.resolvers :+: i.otherResolvers :+: i.moduleConfigurations :+: i.localOnly :+: i.checksums :+: HNil
}
import L5._
implicit def moduleSettingsIC: InputCache[ModuleSettings] =
unionInputCache[ModuleSettings, PomConfiguration :+: InlineConfiguration :+: IvyFileConfiguration :+: HNil]
implicit def ivyConfigurationIC: InputCache[IvyConfiguration] =
unionInputCache[IvyConfiguration, InlineIvyConfiguration :+: ExternalIvyConfiguration :+: HNil]
object L4 {
implicit val inlineToHL = (c: InlineConfiguration) =>
c.module :+: c.dependencies :+: c.ivyXML :+: c.configurations :+: c.defaultConfiguration.map(_.name) :+:
c.ivyScala :+: c.validate :+: c.overrides :+: c.excludes :+: HNil
implicit def moduleConfToHL = (m: ModuleConfiguration) => m.organization :+: m.name :+: m.revision :+: m.resolver :+: HNil
// implicit def inlineToHL = (c: InlineConfiguration) => c.module :+: c.dependencies :+: c.ivyXML :+: c.configurations :+: c.defaultConfiguration.map(_.name) :+: c.ivyScala :+: c.validate :+: c.overrides :+: HNil
}
import L4._
implicit def inlineIC: InputCache[InlineConfiguration] = wrapIn
implicit def moduleConfIC: InputCache[ModuleConfiguration] = wrapIn
object L3 {
implicit def mavenCacheToHL = (m: MavenCache) => m.name :+: m.rootFile.getAbsolutePath :+: HNil
implicit def mavenRToHL = (m: MavenRepository) => m.name :+: m.root :+: HNil
implicit def fileRToHL = (r: FileRepository) => r.name :+: r.configuration :+: r.patterns :+: HNil
implicit def urlRToHL = (u: URLRepository) => u.name :+: u.patterns :+: HNil
implicit def sshRToHL = (s: SshRepository) => s.name :+: s.connection :+: s.patterns :+: s.publishPermissions :+: HNil
implicit def sftpRToHL = (s: SftpRepository) => s.name :+: s.connection :+: s.patterns :+: HNil
implicit def rawRToHL = (r: RawRepository) => r.name :+: r.resolver.getClass.getName :+: HNil
implicit def chainRToHL = (c: ChainedResolver) => c.name :+: c.resolvers :+: HNil
implicit def moduleToHL = (m: ModuleID) => m.organization :+: m.name :+: m.revision :+: m.configurations :+: m.isChanging :+: m.isTransitive :+: m.explicitArtifacts :+: m.exclusions :+: m.inclusions :+: m.extraAttributes :+: m.crossVersion :+: HNil
}
import L3._
implicit lazy val chainedIC: InputCache[ChainedResolver] = InputCache.lzy(wrapIn)
implicit lazy val resolverIC: InputCache[Resolver] =
unionInputCache[Resolver, ChainedResolver :+: MavenRepository :+: MavenCache :+: FileRepository :+: URLRepository :+: SshRepository :+: SftpRepository :+: RawRepository :+: HNil]
implicit def moduleIC: InputCache[ModuleID] = wrapIn
implicitly[InputCache[Seq[Configuration]]]
object L2 {
implicit def updateConfToHL = (u: UpdateConfiguration) => u.retrieve :+: u.missingOk :+: HNil
implicit def pomConfigurationHL = (c: PomConfiguration) => hash(c.file) :+: c.ivyScala :+: c.validate :+: HNil
implicit def ivyFileConfigurationHL = (c: IvyFileConfiguration) => hash(c.file) :+: c.ivyScala :+: c.validate :+: HNil
implicit def sshConnectionToHL = (s: SshConnection) => s.authentication :+: s.hostname :+: s.port :+: HNil
implicit def artifactToHL = (a: Artifact) => a.name :+: a.`type` :+: a.extension :+: a.classifier :+: names(a.configurations) :+: a.url :+: a.extraAttributes :+: HNil
implicit def inclExclToHL = (e: InclExclRule) => e.organization :+: e.name :+: e.artifact :+: e.configurations :+: HNil
implicit def sbtExclusionToHL = (e: SbtExclusionRule) => e.organization :+: e.name :+: e.artifact :+: e.configurations :+: e.crossVersion :+: HNil
implicit def crossToHL = (c: CrossVersion) => crossToInt(c) :+: HNil
/* implicit def deliverConfToHL = (p: DeliverConfiguration) => p.deliverIvyPattern :+: p.status :+: p.configurations :+: HNil
implicit def publishConfToHL = (p: PublishConfiguration) => p.ivyFile :+: p.resolverName :+: p.artifacts :+: HNil*/
}
import L2._
implicit def updateConfIC: InputCache[UpdateConfiguration] = wrapIn
implicit def pomIC: InputCache[PomConfiguration] = wrapIn
implicit def ivyFileIC: InputCache[IvyFileConfiguration] = wrapIn
implicit def connectionIC: InputCache[SshConnection] = wrapIn
implicit def artifactIC: InputCache[Artifact] = wrapIn
implicit def exclusionIC: InputCache[InclExclRule] = wrapIn
implicit def sbtExclusionIC: InputCache[SbtExclusionRule] = wrapIn
implicit def crossVersionIC: InputCache[CrossVersion] = wrapIn
/* implicit def publishConfIC: InputCache[PublishConfiguration] = wrapIn
implicit def deliverConfIC: InputCache[DeliverConfiguration] = wrapIn*/
object L1 {
implicit def retrieveToHL = (r: RetrieveConfiguration) => exists(r.retrieveDirectory) :+: r.outputPattern :+: HNil
implicit def ivyPathsToHL = (p: IvyPaths) => exists(p.baseDirectory) :+: p.ivyHome.map(exists.apply) :+: HNil
implicit def ivyScalaHL = (i: IvyScala) => i.scalaFullVersion :+: i.scalaBinaryVersion :+: names(i.configurations) :+: i.checkExplicit :+: i.filterImplicit :+: HNil
implicit def configurationToHL = (c: Configuration) => c.name :+: c.description :+: c.isPublic :+: names(c.extendsConfigs) :+: c.transitive :+: HNil
implicit def passwordToHL = (s: PasswordAuthentication) => Hash(s.user) :+: password(s.password) :+: HNil
implicit def keyFileToHL = (s: KeyFileAuthentication) => Hash(s.user) :+: hash(s.keyfile) :+: password(s.password) :+: HNil
implicit def patternsToHL = (p: Patterns) => p.ivyPatterns :+: p.artifactPatterns :+: p.isMavenCompatible :+: HNil
implicit def fileConfToHL = (f: FileConfiguration) => f.isLocal :+: f.isTransactional :+: HNil
implicit def externalIvyConfigurationToHL = (e: ExternalIvyConfiguration) =>
exists(e.baseDirectory) :+: Hash.contentsIfLocal(e.uri) :+: HNil
}
import L1._
implicit def ivyScalaIC: InputCache[IvyScala] = wrapIn
implicit def ivyPathsIC: InputCache[IvyPaths] = wrapIn
implicit def retrieveIC: InputCache[RetrieveConfiguration] = wrapIn
implicit def patternsIC: InputCache[Patterns] = wrapIn
implicit def fileConfIC: InputCache[FileConfiguration] = wrapIn
implicit def extIvyIC: InputCache[ExternalIvyConfiguration] = wrapIn
implicit def confIC: InputCache[Configuration] = wrapIn
implicit def authIC: InputCache[SshAuthentication] =
unionInputCache[SshAuthentication, PasswordAuthentication :+: KeyFileAuthentication :+: HNil]
}

View File

@ -12,6 +12,7 @@ import java.io.File
import sbt.internal.librarymanagement.{ ComponentManager, IvyConfiguration }
import sbt.librarymanagement.{ ModuleID, VersionNumber }
import sbt.util.Logger
import sbt.internal.util.CacheStore
object Compiler {
val DefaultMaxErrors = 100
@ -22,10 +23,10 @@ object Compiler {
}
private[sbt] def scalaCompilerBridgeSource2_10: ModuleID =
ModuleID(xsbti.ArtifactInfo.SbtOrganization, "compiler-bridge_2.10",
ComponentCompiler.incrementalVersion, Some("component")).sources()
ComponentCompiler.incrementalVersion).withConfigurations(Some("component")).sources()
private[sbt] def scalaCompilerBridgeSource2_11: ModuleID =
ModuleID(xsbti.ArtifactInfo.SbtOrganization, "compiler-bridge_2.11",
ComponentCompiler.incrementalVersion, Some("component")).sources()
ComponentCompiler.incrementalVersion).withConfigurations(Some("component")).sources()
/** Inputs necessary to run the incremental compiler. */
// final case class Inputs(compilers: Compilers, config: Options, incSetup: IncSetup)
@ -103,12 +104,12 @@ object Compiler {
// new AnalyzingCompiler(instance, provider, cpOptions)
// }
def compilers(cpOptions: ClasspathOptions, ivyConfiguration: IvyConfiguration)(implicit app: AppConfiguration, log: Logger): Compilers =
def compilers(cpOptions: ClasspathOptions, ivyConfiguration: IvyConfiguration, fileToStore: File => CacheStore)(implicit app: AppConfiguration, log: Logger): Compilers =
{
val scalaProvider = app.provider.scalaProvider
val instance = ScalaInstance(scalaProvider.version, scalaProvider.launcher)
val sourceModule = scalaCompilerBridgeSource2_11
compilers(instance, cpOptions, None, ivyConfiguration, sourceModule)
compilers(instance, cpOptions, None, ivyConfiguration, fileToStore, sourceModule)
}
// def compilers(instance: ScalaInstance, cpOptions: ClasspathOptions)(implicit app: AppConfiguration, log: Logger): Compilers =
@ -116,17 +117,17 @@ object Compiler {
// TODO: Get java compiler
def compilers(instance: ScalaInstance, cpOptions: ClasspathOptions, javaHome: Option[File],
ivyConfiguration: IvyConfiguration, sourcesModule: ModuleID)(implicit app: AppConfiguration, log: Logger): Compilers = {
val scalac = scalaCompiler(instance, cpOptions, javaHome, ivyConfiguration, sourcesModule)
ivyConfiguration: IvyConfiguration, fileToStore: File => CacheStore, sourcesModule: ModuleID)(implicit app: AppConfiguration, log: Logger): Compilers = {
val scalac = scalaCompiler(instance, cpOptions, javaHome, ivyConfiguration, fileToStore, sourcesModule)
val javac = JavaTools.directOrFork(instance, cpOptions, javaHome)
new Compilers(scalac, javac)
}
def scalaCompiler(instance: ScalaInstance, cpOptions: ClasspathOptions, javaHome: Option[File], ivyConfiguration: IvyConfiguration, sourcesModule: ModuleID)(implicit app: AppConfiguration, log: Logger): AnalyzingCompiler =
def scalaCompiler(instance: ScalaInstance, cpOptions: ClasspathOptions, javaHome: Option[File], ivyConfiguration: IvyConfiguration, fileToStore: File => CacheStore, sourcesModule: ModuleID)(implicit app: AppConfiguration, log: Logger): AnalyzingCompiler =
{
val launcher = app.provider.scalaProvider.launcher
val componentManager = new ComponentManager(launcher.globalLock, app.provider.components, Option(launcher.ivyHome), log)
val provider = ComponentCompiler.interfaceProvider(componentManager, ivyConfiguration, sourcesModule)
new AnalyzingCompiler(instance, provider, cpOptions)
val provider = ComponentCompiler.interfaceProvider(componentManager, ivyConfiguration, fileToStore, sourcesModule)
new AnalyzingCompiler(instance, provider, cpOptions, _ => (), None)
}
def compile(in: Inputs, log: Logger): CompileResult =

View File

@ -10,6 +10,7 @@ import Predef.{ conforms => _, _ }
import sbt.io.syntax._
import sbt.io.IO
import sbt.internal.util.CacheStoreFactory
import xsbti.Reporter
import xsbti.compile.JavaTools
@ -17,14 +18,14 @@ import sbt.util.Logger
object Doc {
import RawCompileLike._
def scaladoc(label: String, cache: File, compiler: AnalyzingCompiler): Gen =
scaladoc(label, cache, compiler, Seq())
def scaladoc(label: String, cache: File, compiler: AnalyzingCompiler, fileInputOptions: Seq[String]): Gen =
cached(cache, fileInputOptions, prepare(label + " Scala API documentation", compiler.doc))
def javadoc(label: String, cache: File, doc: JavaTools, log: Logger, reporter: Reporter): Gen =
javadoc(label, cache, doc, log, reporter, Seq())
def javadoc(label: String, cache: File, doc: JavaTools, log: Logger, reporter: Reporter, fileInputOptions: Seq[String]): Gen =
cached(cache, fileInputOptions, prepare(label + " Java API documentation", filterSources(
def scaladoc(label: String, cacheStoreFactory: CacheStoreFactory, compiler: AnalyzingCompiler): Gen =
scaladoc(label, cacheStoreFactory, compiler, Seq())
def scaladoc(label: String, cacheStoreFactory: CacheStoreFactory, compiler: AnalyzingCompiler, fileInputOptions: Seq[String]): Gen =
cached(cacheStoreFactory, fileInputOptions, prepare(label + " Scala API documentation", compiler.doc))
def javadoc(label: String, cacheStoreFactory: CacheStoreFactory, doc: JavaTools, log: Logger, reporter: Reporter): Gen =
javadoc(label, cacheStoreFactory, doc, log, reporter, Seq())
def javadoc(label: String, cacheStoreFactory: CacheStoreFactory, doc: JavaTools, log: Logger, reporter: Reporter, fileInputOptions: Seq[String]): Gen =
cached(cacheStoreFactory, fileInputOptions, prepare(label + " Java API documentation", filterSources(
javaSourcesOnly,
(sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String], maxErrors: Int, log: Logger) => {
// doc.doc
@ -34,31 +35,16 @@ object Doc {
val javaSourcesOnly: File => Boolean = _.getName.endsWith(".java")
// @deprecated("Use `scaladoc`", "0.13.0")
// def apply(maximumErrors: Int, compiler: AnalyzingCompiler) = new Scaladoc(maximumErrors, compiler)
// @deprecated("Use `javadoc`", "0.13.0")
// def apply(maximumErrors: Int, compiler: sbt.compiler.Javadoc) = new Javadoc(maximumErrors, compiler)
private[sbt] final class Scaladoc(maximumErrors: Int, compiler: AnalyzingCompiler) extends Doc {
def apply(label: String, sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String], log: Logger): Unit = {
generate("Scala", label, compiler.doc, sources, classpath, outputDirectory, options, maximumErrors, log)
}
}
// private[sbt] final class Javadoc(maximumErrors: Int, doc: sbt.internal.inc.Javadoc) extends Doc {
// def apply(label: String, sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String], log: Logger) {
// // javadoc doesn't handle *.scala properly, so we evict them from javadoc sources list.
// generate("Java", label, doc.doc, sources.filterNot(_.name.endsWith(".scala")), classpath, outputDirectory, options, maximumErrors, log)
// }
// }
}
// @deprecated("No longer used. See `Doc.javadoc` or `Doc.scaladoc`", "0.13.0")
sealed trait Doc {
type Gen = (Seq[File], Seq[File], File, Seq[String], Int, Logger) => Unit
// def apply(label: String, sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String], log: Logger): Unit
private[sbt] final def generate(variant: String, label: String, docf: Gen, sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String], maxErrors: Int, log: Logger): Unit = {
val logSnip = variant + " API documentation"
if (sources.isEmpty)
@ -71,20 +57,4 @@ sealed trait Doc {
log.info(logSnip + " generation successful.")
}
}
// def cached(cache: File, label: String, sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String], log: Logger) {
// type Inputs = FilesInfo[HashFileInfo] :+: FilesInfo[ModifiedFileInfo] :+: String :+: File :+: Seq[String] :+: HNil
// val inputs: Inputs = hash(sources.toSet) :+: lastModified(classpath.toSet) :+: classpath.absString :+: outputDirectory :+: options :+: HNil
// implicit val stringEquiv: Equiv[String] = defaultEquiv
// implicit val fileEquiv: Equiv[File] = defaultEquiv
// val cachedDoc = inputChanged(cache / "inputs") { (inChanged, in: Inputs) =>
// outputChanged(cache / "output") { (outChanged, outputs: FilesInfo[PlainFileInfo]) =>
// if (inChanged || outChanged)
// apply(label, sources, classpath, outputDirectory, options, log)
// else
// log.debug("Doc uptodate: " + outputDirectory.getAbsolutePath)
// }
// }
// cachedDoc(inputs)(() => exists(outputDirectory.allPaths.get.toSet))
// }
}

View File

@ -31,7 +31,7 @@ object DotGraph {
def file(name: String) = new File(outputDir, name)
IO.createDirectory(outputDir)
generateGraph(file("int-class-deps"), "dependencies", relations.internalClassDep, identity[String], identity[String])
generateGraph(file("binary-dependencies"), "externalDependencies", relations.binaryDep, externalToString, sourceToString)
generateGraph(file("binary-dependencies"), "externalDependencies", relations.libraryDep, externalToString, sourceToString)
}
def generateGraph[K, V](file: File, graphName: String, relation: Relation[K, V],

View File

@ -3,24 +3,22 @@
*/
package sbt
import Predef.{ conforms => _, _ }
import scala.Predef.{ conforms => _, _ }
import java.io.File
import java.util.jar.{ Attributes, Manifest }
import collection.JavaConverters._
import scala.collection.JavaConverters._
import sbt.internal.util.Types.:+:
import sbt.io.syntax._
import sbt.io.IO
import sbinary.{ DefaultProtocol, Format }
import DefaultProtocol.{ FileFormat, immutableMapFormat, StringFormat }
import sbt.internal.util.{ Cache, FileInfo, FilesInfo, HNil, ModifiedFileInfo, PlainFileInfo, Tracked }
import Cache.{ defaultEquiv, hConsCache, hNilCache, streamFormat }
import Tracked.{ inputChanged, outputChanged }
import FileInfo.exists
import FilesInfo.lastModified
import sjsonnew.JsonFormat
import sbt.util.Logger
import sbt.internal.util.{ CacheStoreFactory, FilesInfo, HNil, ModifiedFileInfo, PlainFileInfo }
import sbt.internal.util.FileInfo.{ exists, lastModified }
import sbt.internal.util.CacheImplicits._
import sbt.internal.util.Tracked.inputChanged
sealed trait PackageOption
object Package {
final case class JarManifest(m: Manifest) extends PackageOption {
@ -48,7 +46,7 @@ object Package {
}
final class Configuration(val sources: Seq[(File, String)], val jar: File, val options: Seq[PackageOption])
def apply(conf: Configuration, cacheFile: File, log: Logger): Unit = {
def apply(conf: Configuration, cacheStoreFactory: CacheStoreFactory, log: Logger): Unit = {
val manifest = new Manifest
val main = manifest.getMainAttributes
for (option <- conf.options) {
@ -61,9 +59,10 @@ object Package {
}
setVersion(main)
val cachedMakeJar = inputChanged(cacheFile / "inputs") { (inChanged, inputs: Map[File, String] :+: FilesInfo[ModifiedFileInfo] :+: Manifest :+: HNil) =>
val cachedMakeJar = inputChanged(cacheStoreFactory derive "inputs") { (inChanged, inputs: Map[File, String] :+: FilesInfo[ModifiedFileInfo] :+: Manifest :+: HNil) =>
import exists.format
val sources :+: _ :+: manifest :+: HNil = inputs
outputChanged(cacheFile / "output") { (outChanged, jar: PlainFileInfo) =>
inputChanged(cacheStoreFactory derive "output") { (outChanged, jar: PlainFileInfo) =>
if (inChanged || outChanged)
makeJar(sources.toSeq, jar.file, manifest, log)
else
@ -73,7 +72,7 @@ object Package {
val map = conf.sources.toMap
val inputs = map :+: lastModified(map.keySet) :+: manifest :+: HNil
cachedMakeJar(inputs)(() => exists(conf.jar))
cachedMakeJar(inputs)(exists(conf.jar))
}
def setVersion(main: Attributes): Unit = {
val version = Attributes.Name.MANIFEST_VERSION
@ -105,7 +104,14 @@ object Package {
"Input file mappings:\n\t" + (sources map { case (f, s) => s + "\n\t " + f } mkString ("\n\t"))
implicit def manifestEquiv: Equiv[Manifest] = defaultEquiv
implicit def manifestFormat: Format[Manifest] = streamFormat(_ write _, in => new Manifest(in))
implicit def manifestFormat: JsonFormat[Manifest] = project[Manifest, Array[Byte]](
m => {
val bos = new java.io.ByteArrayOutputStream()
m write bos
bos.toByteArray
},
bs => new Manifest(new java.io.ByteArrayInputStream(bs))
)
implicit def stringMapEquiv: Equiv[Map[File, String]] = defaultEquiv
}

View File

@ -10,12 +10,11 @@ import Predef.{ conforms => _, _ }
import sbt.io.syntax._
import sbt.io.IO
import sbinary.DefaultProtocol.FileFormat
import sbt.internal.util.Types.:+:
import sbt.internal.util.Cache.{ defaultEquiv, hConsCache, hNilCache, IntFormat, seqCache, StringFormat }
import sbt.internal.util.Tracked.{ inputChanged, outputChanged }
import sbt.internal.util.{ FilesInfo, HashFileInfo, HNil, ModifiedFileInfo, PlainFileInfo }
import sbt.internal.util.FilesInfo.{ exists, hash, lastModified }
import sbt.internal.util.CacheImplicits._
import sbt.internal.util.Tracked.inputChanged
import sbt.internal.util.{ CacheStoreFactory, FilesInfo, HashFileInfo, HNil, ModifiedFileInfo, PlainFileInfo }
import sbt.internal.util.FileInfo.{ exists, hash, lastModified }
import xsbti.compile.ClasspathOptions
import sbt.util.Logger
@ -40,23 +39,23 @@ object RawCompileLike {
loop(options.toList, Nil)
}
def cached(cache: File, doCompile: Gen): Gen = cached(cache, Seq(), doCompile)
def cached(cache: File, fileInputOpts: Seq[String], doCompile: Gen): Gen = (sources, classpath, outputDirectory, options, maxErrors, log) =>
def cached(cacheStoreFactory: CacheStoreFactory, doCompile: Gen): Gen = cached(cacheStoreFactory, Seq(), doCompile)
def cached(cacheStoreFactory: CacheStoreFactory, fileInputOpts: Seq[String], doCompile: Gen): Gen = (sources, classpath, outputDirectory, options, maxErrors, log) =>
{
type Inputs = FilesInfo[HashFileInfo] :+: FilesInfo[ModifiedFileInfo] :+: Seq[File] :+: File :+: Seq[String] :+: Int :+: HNil
val inputs: Inputs = hash(sources.toSet ++ optionFiles(options, fileInputOpts)) :+: lastModified(classpath.toSet) :+: classpath :+: outputDirectory :+: options :+: maxErrors :+: HNil
implicit val stringEquiv: Equiv[String] = defaultEquiv
implicit val fileEquiv: Equiv[File] = defaultEquiv
implicit val intEquiv: Equiv[Int] = defaultEquiv
val cachedComp = inputChanged(cache / "inputs") { (inChanged, in: Inputs) =>
outputChanged(cache / "output") { (outChanged, outputs: FilesInfo[PlainFileInfo]) =>
val cachedComp = inputChanged(cacheStoreFactory derive "inputs") { (inChanged, in: Inputs) =>
inputChanged(cacheStoreFactory derive "output") { (outChanged, outputs: FilesInfo[PlainFileInfo]) =>
if (inChanged || outChanged)
doCompile(sources, classpath, outputDirectory, options, maxErrors, log)
else
log.debug("Uptodate: " + outputDirectory.getAbsolutePath)
}
}
cachedComp(inputs)(() => exists(outputDirectory.allPaths.get.toSet))
cachedComp(inputs)(exists(outputDirectory.allPaths.get.toSet))
}
def prepare(description: String, doCompile: Gen): Gen = (sources, classpath, outputDirectory, options, maxErrors, log) =>
{
@ -78,8 +77,8 @@ object RawCompileLike {
val compiler = new RawCompiler(instance, cpOptions, log)
compiler(sources, classpath, outputDirectory, options)
}
def compile(label: String, cache: File, instance: ScalaInstance, cpOptions: ClasspathOptions): Gen =
cached(cache, prepare(label + " sources", rawCompile(instance, cpOptions)))
def compile(label: String, cacheStoreFactory: CacheStoreFactory, instance: ScalaInstance, cpOptions: ClasspathOptions): Gen =
cached(cacheStoreFactory, prepare(label + " sources", rawCompile(instance, cpOptions)))
val nop: Gen = (sources, classpath, outputDirectory, options, maxErrors, log) => ()
}

View File

@ -5,10 +5,13 @@ package sbt
import java.io.File
import sbt.internal.util.{ FileInfo, Relation }
import sbt.internal.util.{ CacheStore, FileInfo, Relation }
import sbt.internal.util.CacheImplicits._
import sbt.io.IO
import sjsonnew.{ Builder, JsonFormat, Unbuilder, deserializationError }
/**
* Maintains a set of mappings so that they are uptodate.
* Specifically, 'apply' applies the mappings by creating target directories and copying source files to their destination.
@ -22,14 +25,14 @@ import sbt.io.IO
* It is safe to use for its intended purpose: copying resources to a class output directory.
*/
object Sync {
def apply(cacheFile: File, inStyle: FileInfo.Style = FileInfo.lastModified, outStyle: FileInfo.Style = FileInfo.exists): Traversable[(File, File)] => Relation[File, File] =
def apply(store: CacheStore, inStyle: FileInfo.Style = FileInfo.lastModified, outStyle: FileInfo.Style = FileInfo.exists): Traversable[(File, File)] => Relation[File, File] =
mappings =>
{
val relation = Relation.empty ++ mappings
noDuplicateTargets(relation)
val currentInfo = relation._1s.map(s => (s, inStyle(s))).toMap
val (previousRelation, previousInfo) = readInfo(cacheFile)(inStyle.format)
val (previousRelation, previousInfo) = readInfo(store)(inStyle.format)
val removeTargets = previousRelation._2s -- relation._2s
def outofdate(source: File, target: File): Boolean =
@ -46,7 +49,7 @@ object Sync {
IO.deleteIfEmpty(cleanDirs)
updates.all.foreach((copy _).tupled)
writeInfo(cacheFile, relation, currentInfo)(inStyle.format)
writeInfo(store, relation, currentInfo)(inStyle.format)
relation
}
@ -71,28 +74,35 @@ object Sync {
sys.error("Duplicate mappings:" + dups.mkString)
}
import java.io.{ File, IOException }
import sbinary._
import Operations.{ read, write }
import DefaultProtocol.{ FileFormat => _, _ }
//import sbt.inc.AnalysisFormats.{ fileFormat, relationFormat }
implicit def fileFormat: Format[File] = wrap[File, String](_.getAbsolutePath, s => new File(s))
implicit def relationFormat[A, B](implicit af: Format[Map[A, Set[B]]], bf: Format[Map[B, Set[A]]]): Format[Relation[A, B]] =
asProduct2[Relation[A, B], Map[A, Set[B]], Map[B, Set[A]]](Relation.make _)(r => (r.forwardMap, r.reverseMap))(af, bf)
implicit def relationFormat[A, B](implicit af: JsonFormat[Map[A, Set[B]]], bf: JsonFormat[Map[B, Set[A]]]): JsonFormat[Relation[A, B]] =
new JsonFormat[Relation[A, B]] {
def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]): Relation[A, B] =
jsOpt match {
case Some(js) =>
unbuilder.beginArray(js)
val jForward = unbuilder.nextElement
val jReverse = unbuilder.nextElement
unbuilder.endArray()
Relation.make(af.read(Some(jForward), unbuilder), bf.read(Some(jReverse), unbuilder))
case None =>
deserializationError("Expected JsArray but found None")
}
def write[J](obj: Relation[A, B], builder: Builder[J]): Unit = {
builder.beginArray()
af.write(obj.forwardMap, builder)
bf.write(obj.reverseMap, builder)
builder.endArray()
}
def writeInfo[F <: FileInfo](file: File, relation: Relation[File, File], info: Map[File, F])(implicit infoFormat: Format[F]): Unit =
IO.gzipFileOut(file) { out =>
write(out, (relation, info))
}
def writeInfo[F <: FileInfo](store: CacheStore, relation: Relation[File, File], info: Map[File, F])(implicit infoFormat: JsonFormat[F]): Unit =
store.write((relation, info))
type RelationInfo[F] = (Relation[File, File], Map[File, F])
def readInfo[F <: FileInfo](file: File)(implicit infoFormat: Format[F]): RelationInfo[F] =
try { readUncaught(file)(infoFormat) }
catch { case e: IOException => (Relation.empty, Map.empty) }
def readInfo[F <: FileInfo](store: CacheStore)(implicit infoFormat: JsonFormat[F]): RelationInfo[F] =
store.read(default = (Relation.empty[File, File], Map.empty[File, F]))
def readUncaught[F <: FileInfo](file: File)(implicit infoFormat: Format[F]): RelationInfo[F] =
IO.gzipFileIn(file) { in =>
read[RelationInfo[F]](in)
}
}

View File

@ -6,13 +6,39 @@ import Prop._
import sbt.librarymanagement._
class CacheIvyTest extends Properties("CacheIvy") {
import CacheIvy._
import sbinary.Operations._
import sbinary._
import sbinary.DefaultProtocol._
import sbt.internal.util.{ CacheStore, SingletonCache }
import SingletonCache._
private def cachePreservesEquality[T: Format](m: T, eq: (T, T) => Prop, str: T => String): Prop = {
val out = fromByteArray[T](toByteArray(m))
import sjsonnew._
import sjsonnew.support.scalajson.unsafe.Converter
import scala.json.ast.unsafe.JValue
private class InMemoryStore(converter: SupportConverter[JValue]) extends CacheStore {
private var content: JValue = _
override def delete(): Unit = ()
override def close(): Unit = ()
override def read[T: JsonReader](): T =
try converter.fromJsonUnsafe[T](content)
catch { case t: Throwable => t.printStackTrace(); throw t }
override def read[T: JsonReader](default: => T): T =
try read[T]()
catch { case _: Throwable => default }
override def write[T: JsonWriter](value: T): Unit =
content = converter.toJsonUnsafe(value)
}
private def testCache[T: JsonFormat, U](f: (SingletonCache[T], CacheStore) => U)(implicit cache: SingletonCache[T]): U = {
val store = new InMemoryStore(Converter)
f(cache, store)
}
private def cachePreservesEquality[T: JsonFormat](m: T, eq: (T, T) => Prop, str: T => String): Prop = testCache[T, Prop] { (cache, store) =>
cache.write(store, m)
val out = cache.read(store)
eq(out, m) :| s"Expected: ${str(m)}" :| s"Got: ${str(out)}"
}
@ -22,13 +48,12 @@ class CacheIvyTest extends Properties("CacheIvy") {
n <- Gen.alphaStr
a <- Gen.alphaStr
cs <- arbitrary[List[String]]
} yield ExclusionRule(o, n, a, cs)
} yield ExclusionRule(o, n, a, cs.toVector)
)
implicit val arbCrossVersion: Arbitrary[CrossVersion] = Arbitrary {
// Actual functions don't matter, just Disabled vs Binary vs Full
import CrossVersion._
Gen.oneOf(Disabled, new Binary(identity), new Full(identity))
Gen.oneOf(Disabled(), Binary(), Full())
}
implicit val arbArtifact: Arbitrary[Artifact] = Arbitrary {
@ -54,7 +79,7 @@ class CacheIvyTest extends Properties("CacheIvy") {
crossVersion <- arbitrary[CrossVersion]
} yield ModuleID(
organization = o, name = n, revision = r, configurations = cs, isChanging = isChanging, isTransitive = isTransitive,
isForce = isForce, explicitArtifacts = explicitArtifacts, inclusions = inclusions, exclusions = exclusions,
isForce = isForce, explicitArtifacts = explicitArtifacts.toVector, inclusions = inclusions.toVector, exclusions = exclusions.toVector,
extraAttributes = extraAttributes, crossVersion = crossVersion, branchName = branch
)
}
@ -66,16 +91,16 @@ class CacheIvyTest extends Properties("CacheIvy") {
s"$inclusions, $extraAttributes, $crossVersion, $branchName)"
}
def eq(a: ModuleID, b: ModuleID): Prop = {
import CrossVersion._
def rest = a.copy(crossVersion = b.crossVersion) == b
def rest = a.withCrossVersion(b.crossVersion) == b
(a.crossVersion, b.crossVersion) match {
case (Disabled, Disabled) => rest
case (_: Binary, _: Binary) => rest
case (_: Full, _: Full) => rest
case (a, b) => Prop(false) :| s"CrossVersions don't match: $a vs $b"
case (_: Disabled, _: Disabled) => rest
case (_: Binary, _: Binary) => rest
case (_: Full, _: Full) => rest
case (a, b) => Prop(false) :| s"CrossVersions don't match: $a vs $b"
}
}
import sbt.librarymanagement.LibraryManagementCodec._
cachePreservesEquality(m, eq _, str)
}
}

View File

@ -4,11 +4,11 @@
// DO NOT EDIT MANUALLY
package sbt.internal.server
final class CommandMessage(
final class CommandMessage private (
val `type`: String,
val commandLine: Option[String]) extends Serializable {
def this(`type`: String) = this(`type`, None)
private def this(`type`: String) = this(`type`, None)
override def equals(o: Any): Boolean = o match {
case x: CommandMessage => (this.`type` == x.`type`) && (this.commandLine == x.commandLine)
@ -20,10 +20,7 @@ final class CommandMessage(
override def toString: String = {
"CommandMessage(" + `type` + ", " + commandLine + ")"
}
def copy(`type`: String): CommandMessage = {
new CommandMessage(`type`, commandLine)
}
def copy(`type`: String = `type`, commandLine: Option[String] = commandLine): CommandMessage = {
protected[this] def copy(`type`: String = `type`, commandLine: Option[String] = commandLine): CommandMessage = {
new CommandMessage(`type`, commandLine)
}
def withType(`type`: String): CommandMessage = {
@ -34,6 +31,7 @@ final class CommandMessage(
}
}
object CommandMessage {
def apply(`type`: String): CommandMessage = new CommandMessage(`type`, None)
def apply(`type`: String, commandLine: Option[String]): CommandMessage = new CommandMessage(`type`, commandLine)
}

View File

@ -4,16 +4,16 @@
// DO NOT EDIT MANUALLY
package sbt.internal.server
final class EventMessage(
final class EventMessage private (
val `type`: String,
val status: Option[String],
val commandQueue: Vector[String],
val commandQueue: scala.Vector[String],
val level: Option[String],
val message: Option[String],
val success: Option[Boolean],
val commandLine: Option[String]) extends Serializable {
def this(`type`: String) = this(`type`, None, Vector(), None, None, None, None)
private def this(`type`: String) = this(`type`, None, Vector(), None, None, None, None)
override def equals(o: Any): Boolean = o match {
case x: EventMessage => (this.`type` == x.`type`) && (this.status == x.status) && (this.commandQueue == x.commandQueue) && (this.level == x.level) && (this.message == x.message) && (this.success == x.success) && (this.commandLine == x.commandLine)
@ -25,10 +25,7 @@ final class EventMessage(
override def toString: String = {
"EventMessage(" + `type` + ", " + status + ", " + commandQueue + ", " + level + ", " + message + ", " + success + ", " + commandLine + ")"
}
def copy(`type`: String): EventMessage = {
new EventMessage(`type`, status, commandQueue, level, message, success, commandLine)
}
def copy(`type`: String = `type`, status: Option[String] = status, commandQueue: Vector[String] = commandQueue, level: Option[String] = level, message: Option[String] = message, success: Option[Boolean] = success, commandLine: Option[String] = commandLine): EventMessage = {
protected[this] def copy(`type`: String = `type`, status: Option[String] = status, commandQueue: scala.Vector[String] = commandQueue, level: Option[String] = level, message: Option[String] = message, success: Option[Boolean] = success, commandLine: Option[String] = commandLine): EventMessage = {
new EventMessage(`type`, status, commandQueue, level, message, success, commandLine)
}
def withType(`type`: String): EventMessage = {
@ -37,7 +34,7 @@ final class EventMessage(
def withStatus(status: Option[String]): EventMessage = {
copy(status = status)
}
def withCommandQueue(commandQueue: Vector[String]): EventMessage = {
def withCommandQueue(commandQueue: scala.Vector[String]): EventMessage = {
copy(commandQueue = commandQueue)
}
def withLevel(level: Option[String]): EventMessage = {
@ -54,6 +51,7 @@ final class EventMessage(
}
}
object EventMessage {
def apply(`type`: String): EventMessage = new EventMessage(`type`, None, Vector(), None, None, None, None)
def apply(`type`: String, status: Option[String], commandQueue: Vector[String], level: Option[String], message: Option[String], success: Option[Boolean], commandLine: Option[String]): EventMessage = new EventMessage(`type`, status, commandQueue, level, message, success, commandLine)
def apply(`type`: String, status: Option[String], commandQueue: scala.Vector[String], level: Option[String], message: Option[String], success: Option[Boolean], commandLine: Option[String]): EventMessage = new EventMessage(`type`, status, commandQueue, level, message, success, commandLine)
}

View File

@ -14,7 +14,7 @@ implicit lazy val CommandMessageFormat: JsonFormat[sbt.internal.server.CommandMe
val `type` = unbuilder.readField[String]("type")
val commandLine = unbuilder.readField[Option[String]]("commandLine")
unbuilder.endObject()
new sbt.internal.server.CommandMessage(`type`, commandLine)
sbt.internal.server.CommandMessage(`type`, commandLine)
case None =>
deserializationError("Expected JsObject but found None")
}

View File

@ -13,13 +13,13 @@ implicit lazy val EventMessageFormat: JsonFormat[sbt.internal.server.EventMessag
unbuilder.beginObject(js)
val `type` = unbuilder.readField[String]("type")
val status = unbuilder.readField[Option[String]]("status")
val commandQueue = unbuilder.readField[Vector[String]]("commandQueue")
val commandQueue = unbuilder.readField[scala.Vector[String]]("commandQueue")
val level = unbuilder.readField[Option[String]]("level")
val message = unbuilder.readField[Option[String]]("message")
val success = unbuilder.readField[Option[Boolean]]("success")
val commandLine = unbuilder.readField[Option[String]]("commandLine")
unbuilder.endObject()
new sbt.internal.server.EventMessage(`type`, status, commandQueue, level, message, success, commandLine)
sbt.internal.server.EventMessage(`type`, status, commandQueue, level, message, success, commandLine)
case None =>
deserializationError("Expected JsObject but found None")
}

View File

@ -14,7 +14,7 @@
},
{
"name": "commandLine",
"type": "String?",
"type": "Option[String]",
"default": "None",
"since": "0.1.0"
}
@ -33,37 +33,37 @@
},
{
"name": "status",
"type": "String?",
"type": "Option[String]",
"default": "None",
"since": "0.1.0"
},
{
"name": "commandQueue",
"type": "String*",
"type": "scala.Vector[String]",
"default": "Vector()",
"since": "0.1.0"
},
{
"name": "level",
"type": "String?",
"type": "Option[String]",
"default": "None",
"since": "0.1.0"
},
{
"name": "message",
"type": "String?",
"type": "Option[String]",
"default": "None",
"since": "0.1.0"
},
{
"name": "success",
"type": "boolean?",
"type": "Option[Boolean]",
"default": "None",
"since": "0.1.0"
},
{
"name": "commandLine",
"type": "String?",
"type": "Option[String]",
"default": "None",
"since": "0.1.0"
}

View File

@ -2,14 +2,12 @@ package sbt
import Def.{ Initialize, ScopedKey, streamsManagerKey }
import Previous._
import sbt.internal.util.{ ~>, IMap, RMap }
import sbt.internal.util.{ ~>, AttributeKey, IMap, Input, Output, PlainInput, RMap, StampedFormat }
import sbt.internal.util.Types._
import java.io.{ InputStream, OutputStream }
import scala.util.control.NonFatal
import sbinary.{ DefaultProtocol, Format }
import DefaultProtocol.{ StringFormat, withStamp }
import sjsonnew.{ IsoString, JsonFormat, SupportConverter }
/**
* Reads the previous value of tasks on-demand. The read values are cached so that they are only read once per task execution.
@ -22,7 +20,7 @@ private[sbt] final class Previous(streams: Streams, referenced: IMap[ScopedTaskK
private[this] final class ReferencedValue[T](referenced: Referenced[T]) {
import referenced.{ stamped, task }
lazy val previousValue: Option[T] = {
val in = streams(task).readBinary(task, StreamName)
val in = streams(task).getInput(task, StreamName)
try read(in, stamped) finally in.close()
}
}
@ -32,6 +30,7 @@ private[sbt] final class Previous(streams: Streams, referenced: IMap[ScopedTaskK
map.get(key).flatMap(_.previousValue)
}
object Previous {
import sjsonnew.BasicJsonProtocol.StringJsonFormat
private[sbt]type ScopedTaskKey[T] = ScopedKey[Task[T]]
private type Streams = sbt.std.Streams[ScopedKey[_]]
@ -39,8 +38,8 @@ object Previous {
private final val StreamName = "previous"
/** Represents a reference task.previous*/
private[sbt] final class Referenced[T](val task: ScopedKey[Task[T]], val format: Format[T]) {
lazy val stamped = withStamp(task.key.manifest.toString)(format)
private[sbt] final class Referenced[T](val task: ScopedKey[Task[T]], val format: JsonFormat[T]) {
lazy val stamped = StampedFormat.withStamp(task.key.manifest.toString)(format)
def setTask(newTask: ScopedKey[Task[T]]) = new Referenced(newTask, format)
}
@ -51,9 +50,9 @@ object Previous {
private[sbt] final class References {
private[this] var map = IMap.empty[ScopedTaskKey, Referenced]
// TODO: this arbitrarily chooses a Format.
// TODO: this arbitrarily chooses a JsonFormat.
// The need to choose is a fundamental problem with this approach, but this should at least make a stable choice.
def recordReference[T](key: ScopedKey[Task[T]], format: Format[T]): Unit = synchronized {
def recordReference[T](key: ScopedKey[Task[T]], format: JsonFormat[T]): Unit = synchronized {
map = map.put(key, new Referenced(key, format))
}
def getReferences: IMap[ScopedTaskKey, Referenced] = synchronized { map }
@ -65,7 +64,7 @@ object Previous {
val map = referenced.getReferences
def impl[T](key: ScopedKey[_], result: T): Unit =
for (i <- map.get(key.asInstanceOf[ScopedTaskKey[T]])) {
val out = streams.apply(i.task).binary(StreamName)
val out = streams.apply(i.task).getOutput(StreamName)
try write(out, i.stamped, result) finally out.close()
}
@ -75,16 +74,16 @@ object Previous {
} impl(key, result)
}
private def read[T](stream: InputStream, format: Format[T]): Option[T] =
try Some(format.reads(stream))
catch { case NonFatal(e) => None }
private def read[T](input: Input, format: JsonFormat[T]): Option[T] =
try Some(input.read()(format))
catch { case e: Exception => None }
private def write[T](stream: OutputStream, format: Format[T], value: T): Unit =
try format.writes(stream, value)
catch { case NonFatal(e) => () }
private def write[T](output: Output, format: JsonFormat[T], value: T): Unit =
try output.write(value)(format)
catch { case e: Exception => () }
/** Public as a macro implementation detail. Do not call directly. */
def runtime[T](skey: TaskKey[T])(implicit format: Format[T]): Initialize[Task[Option[T]]] =
def runtime[T](skey: TaskKey[T])(implicit format: JsonFormat[T]): Initialize[Task[Option[T]]] =
{
val inputs = (cache in Global) zip Def.validated(skey, selfRefOk = true) zip (references in Global)
inputs {

View File

@ -116,7 +116,7 @@ object InputWrapper {
unexpectedType(c)(pos, tpe)
}
/** Translates <task: TaskKey[T]>.previous(format) to Previous.runtime(<task>)(format).value*/
def previousMacroImpl[T: c.WeakTypeTag](c: blackbox.Context)(format: c.Expr[sbinary.Format[T]]): c.Expr[Option[T]] =
def previousMacroImpl[T: c.WeakTypeTag](c: blackbox.Context)(format: c.Expr[sjsonnew.JsonFormat[T]]): c.Expr[Option[T]] =
{
import c.universe._
c.macroApplication match {
@ -159,7 +159,7 @@ sealed abstract class ParserInputTask[T] {
}
sealed abstract class MacroPrevious[T] {
@compileTimeOnly("`previous` can only be used within a task macro, such as :=, +=, ++=, or Def.task.")
def previous(implicit format: sbinary.Format[T]): Option[T] = macro InputWrapper.previousMacroImpl[T]
def previous(implicit format: sjsonnew.JsonFormat[T]): Option[T] = macro InputWrapper.previousMacroImpl[T]
}
/** Implementation detail. The wrap method temporarily holds the input parser (as a Tree, at compile time) until the input task macro processes it. */

View File

@ -10,29 +10,31 @@ import Scope.{ fillTaskAxis, GlobalScope, ThisScope }
import sbt.internal.librarymanagement.mavenint.{ PomExtraDependencyAttributes, SbtPomExtraProperties }
import Project.{ inConfig, inScope, inTask, richInitialize, richInitializeTask, richTaskSessionVar }
import Def.{ Initialize, ScopedKey, Setting, SettingsDefinition }
import sbt.internal.librarymanagement.{ CustomPomParser, DependencyFilter }
import sbt.librarymanagement.Artifact.{ DocClassifier, SourceClassifier }
import sbt.librarymanagement.{ Configuration, Configurations, ConflictManager, CrossVersion, MavenRepository, Resolver, ScalaArtifacts, UpdateOptions }
import sbt.librarymanagement.Configurations.{ Compile, CompilerPlugin, IntegrationTest, names, Provided, Runtime, Test }
import sbt.librarymanagement.CrossVersion.{ binarySbtVersion, binaryScalaVersion, partialVersion }
import sbt.internal.util.complete._
import std.TaskExtra._
import testing.{ Framework, Runner, AnnotatedFingerprint, SubclassFingerprint }
import sjsonnew.{ IsoLList, JsonFormat, LList, LNil }, LList.:*:
import sbt.internal.util.CacheImplicits._
import sbt.librarymanagement.{ `package` => _, _ }
import sbt.internal.librarymanagement._
import sbt.internal.librarymanagement.syntax._
import sbt.internal.util._
import sbt.util.{ Level, Logger }
import sys.error
import scala.xml.NodeSeq
import scala.util.control.NonFatal
import org.apache.ivy.core.module.{ descriptor, id }
import descriptor.ModuleDescriptor, id.ModuleRevisionId
import java.io.{ File, PrintWriter }
import java.net.{ URI, URL }
import java.util.concurrent.{ TimeUnit, Callable }
import sbinary.DefaultProtocol.StringFormat
import sbt.internal.util.Cache.seqFormat
import sbt.internal.CommandStrings.ExportStream
import xsbti.{ CrossValue, Maybe }
@ -46,6 +48,7 @@ import sbt.io.{ AllPassFilter, FileFilter, GlobFilter, HiddenFileFilter, IO, Nam
import Path._
import sbt.io.syntax._
import Keys._
import xsbti.compile.IncToolOptionsUtil
// incremental compiler
import xsbt.api.Discovery
@ -130,7 +133,8 @@ object Defaults extends BuildCommon {
includeFilter :== NothingFilter,
includeFilter in unmanagedSources :== ("*.java" | "*.scala") && new SimpleFileFilter(_.isFile),
includeFilter in unmanagedJars :== "*.jar" | "*.so" | "*.dll" | "*.jnilib" | "*.zip",
includeFilter in unmanagedResources :== AllPassFilter
includeFilter in unmanagedResources :== AllPassFilter,
fileToStore :== DefaultFileToStore
)
private[sbt] lazy val globalIvyCore: Seq[Setting[_]] =
@ -145,7 +149,7 @@ object Defaults extends BuildCommon {
configurationsToRetrieve :== None,
scalaOrganization :== ScalaArtifacts.Organization,
sbtResolver := { if (sbtVersion.value endsWith "-SNAPSHOT") Classpaths.sbtIvySnapshots else Classpaths.typesafeReleases },
crossVersion :== CrossVersion.Disabled,
crossVersion :== Disabled(),
buildDependencies := Classpaths.constructBuildDependencies.value,
version :== "0.1-SNAPSHOT",
classpathTypes :== Set("jar", "bundle") ++ CustomPomParser.JarPackagings,
@ -268,7 +272,7 @@ object Defaults extends BuildCommon {
Maybe.just(new TransactionalManagerType(crossTarget.value / "classes.bak", sbt.util.Logger.Null))
),
scalaInstance := scalaInstanceTask.value,
crossVersion := (if (crossPaths.value) CrossVersion.binary else CrossVersion.Disabled),
crossVersion := (if (crossPaths.value) CrossVersion.binary else Disabled()),
crossTarget := makeCrossTarget(target.value, scalaBinaryVersion.value, sbtBinaryVersion.value, sbtPlugin.value, crossPaths.value),
clean := {
val _ = clean.value
@ -304,7 +308,7 @@ object Defaults extends BuildCommon {
}
def compilersSetting = compilers := Compiler.compilers(scalaInstance.value, classpathOptions.value, javaHome.value,
bootIvyConfiguration.value, scalaCompilerBridgeSource.value)(appConfiguration.value, streams.value.log)
bootIvyConfiguration.value, fileToStore.value, scalaCompilerBridgeSource.value)(appConfiguration.value, streams.value.log)
lazy val configTasks = docTaskSettings(doc) ++ inTask(compile)(compileInputsSettings) ++ configGlobal ++ compileAnalysisSettings ++ Seq(
compile := compileTask.value,
@ -725,10 +729,10 @@ object Defaults extends BuildCommon {
case c => Some(c.name)
}
val combined = cPart.toList ++ classifier.toList
if (combined.isEmpty) a.copy(classifier = None, configurations = cOpt.toList) else {
if (combined.isEmpty) a.withClassifier(None).withConfigurations(cOpt.toVector) else {
val classifierString = combined mkString "-"
val confs = cOpt.toList flatMap { c => artifactConfigurations(a, c, classifier) }
a.copy(classifier = Some(classifierString), `type` = Artifact.classifierType(classifierString), configurations = confs)
val confs = cOpt.toVector flatMap { c => artifactConfigurations(a, c, classifier) }
a.withClassifier(Some(classifierString)).withType(Artifact.classifierType(classifierString)).withConfigurations(confs)
}
}
@deprecated("The configuration(s) should not be decided based on the classifier.", "1.0")
@ -754,7 +758,7 @@ object Defaults extends BuildCommon {
))
def packageTask: Initialize[Task[File]] =
(packageConfiguration, streams) map { (config, s) =>
Package(config, s.cacheDirectory, s.log)
Package(config, s.cacheStoreFactory, s.log)
config.jar
}
def packageConfigurationTask: Initialize[Task[Package.Configuration]] =
@ -848,15 +852,15 @@ object Defaults extends BuildCommon {
(hasScala, hasJava) match {
case (true, _) =>
val options = sOpts ++ Opts.doc.externalAPI(xapis)
val runDoc = Doc.scaladoc(label, s.cacheDirectory / "scala",
val runDoc = Doc.scaladoc(label, s.cacheStoreFactory sub "scala",
cs.scalac match {
case ac: AnalyzingCompiler => ac.onArgs(exported(s, "scaladoc"))
},
fiOpts)
runDoc(srcs, cp, out, options, maxErrors.value, s.log)
case (_, true) =>
val javadoc = sbt.inc.Doc.cachedJavadoc(label, s.cacheDirectory / "java", cs.javaTools)
javadoc.run(srcs.toList, cp, out, javacOptions.value.toList, s.log, reporter)
val javadoc = sbt.inc.Doc.cachedJavadoc(label, s.cacheStoreFactory sub "java", cs.javaTools)
javadoc.run(srcs.toList, cp, out, javacOptions.value.toList, IncToolOptionsUtil.defaultIncToolOptions(), s.log, reporter)
case _ => () // do nothing
}
out
@ -947,7 +951,7 @@ object Defaults extends BuildCommon {
compilerCache.value,
incOptions.value,
(compilerReporter in compile).value,
o2m(None),
xsbti.Maybe.nothing(),
// TODO - task / setting for extra,
Array.empty
)
@ -993,7 +997,7 @@ object Defaults extends BuildCommon {
}
def sbtPluginExtra(m: ModuleID, sbtV: String, scalaV: String): ModuleID =
m.extra(PomExtraDependencyAttributes.SbtVersionKey -> sbtV, PomExtraDependencyAttributes.ScalaVersionKey -> scalaV).copy(crossVersion = CrossVersion.Disabled)
m.extra(PomExtraDependencyAttributes.SbtVersionKey -> sbtV, PomExtraDependencyAttributes.ScalaVersionKey -> scalaV).withCrossVersion(Disabled())
def discoverSbtPluginNames: Initialize[Task[PluginDiscovery.DiscoveredNames]] = Def.task {
if (sbtPlugin.value) PluginDiscovery.discoverSourceAll(compile.value) else PluginDiscovery.emptyDiscoveredNames
@ -1001,10 +1005,10 @@ object Defaults extends BuildCommon {
def copyResourcesTask =
(classDirectory, resources, resourceDirectories, streams) map { (target, resrcs, dirs, s) =>
val cacheFile = s.cacheDirectory / "copy-resources"
val cacheStore = s.cacheStoreFactory derive "copy-resources"
val mappings = (resrcs --- dirs) pair (rebase(dirs, target) | flat(target))
s.log.debug("Copy resource mappings: " + mappings.mkString("\n\t", "\n\t", ""))
Sync(cacheFile)(mappings)
Sync(cacheStore)(mappings)
mappings
}
@ -1208,12 +1212,12 @@ object Classpaths {
val scalaVersion = app.provider.scalaProvider.version
val binVersion = binaryScalaVersion(scalaVersion)
val cross = id.crossVersionedValue match {
case CrossValue.Disabled => CrossVersion.Disabled
case CrossValue.Disabled => Disabled()
case CrossValue.Full => CrossVersion.binary
case CrossValue.Binary => CrossVersion.full
}
val base = ModuleID(id.groupID, id.name, sbtVersion.value, crossVersion = cross)
CrossVersion(scalaVersion, binVersion)(base).copy(crossVersion = CrossVersion.Disabled)
val base = ModuleID(id.groupID, id.name, sbtVersion.value).withCrossVersion(cross)
CrossVersion(scalaVersion, binVersion)(base).withCrossVersion(Disabled())
}
))
@ -1226,7 +1230,10 @@ object Classpaths {
organization := (organization or normalizedName).value,
organizationName := (organizationName or organization).value,
organizationHomepage := (organizationHomepage or homepage).value,
projectInfo := ((name, description, homepage, startYear, licenses, organizationName, organizationHomepage, scmInfo, developers) apply ModuleInfo).value,
projectInfo := ModuleInfo(
name.value, description.value, homepage.value, startYear.value, licenses.value.toVector,
organizationName.value, organizationHomepage.value, scmInfo.value, developers.value.toVector
),
overrideBuildResolvers := appConfiguration(isOverrideRepositories).value,
externalResolvers := ((externalResolvers.?.value, resolvers.value, appResolvers.value, useJCenter.value) match {
case (Some(delegated), Seq(), _, _) => delegated
@ -1251,7 +1258,7 @@ object Classpaths {
}
}).value,
moduleName := normalizedName.value,
ivyPaths := new IvyPaths(baseDirectory.value, bootIvyHome(appConfiguration.value)),
ivyPaths := IvyPaths(baseDirectory.value, bootIvyHome(appConfiguration.value)),
dependencyCacheDirectory := {
val st = state.value
BuildPaths.getDependencyDirectory(st, BuildPaths.getGlobalBase(st))
@ -1267,7 +1274,7 @@ object Classpaths {
},
ivyScala :=
(ivyScala or ((scalaHome, scalaVersion in update, scalaBinaryVersion in update, scalaOrganization, sbtPlugin) { (sh, fv, bv, so, plugin) =>
Option(new IvyScala(fv, bv, Nil, filterImplicit = false, checkExplicit = true, overrideScalaVersion = true, scalaOrganization = so))
Option(IvyScala(fv, bv, Vector.empty, filterImplicit = false, checkExplicit = true, overrideScalaVersion = true).withScalaOrganization(so))
})).value,
artifactPath in makePom := artifactPathSetting(artifact in makePom).value,
publishArtifact in makePom := publishMavenStyle.value && publishArtifact.value,
@ -1279,9 +1286,9 @@ object Classpaths {
// Tell the UpdateConfiguration which artifact types are special (for sources and javadocs)
val specialArtifactTypes = sourceArtifactTypes.value union docArtifactTypes.value
// By default, to retrieve all types *but* these (it's assumed that everything else is binary/resource)
new UpdateConfiguration(retrieveConfiguration.value, false, ivyLoggingLevel.value, ArtifactTypeFilter.forbid(specialArtifactTypes))
UpdateConfiguration(retrieveConfiguration.value, false, ivyLoggingLevel.value, ArtifactTypeFilter.forbid(specialArtifactTypes))
},
retrieveConfiguration := { if (retrieveManaged.value) Some(new RetrieveConfiguration(managedDirectory.value, retrievePattern.value, retrieveManagedSync.value, configurationsToRetrieve.value)) else None },
retrieveConfiguration := { if (retrieveManaged.value) Some(RetrieveConfiguration(managedDirectory.value, retrievePattern.value, retrieveManagedSync.value, configurationsToRetrieve.value)) else None },
ivyConfiguration := mkIvyConfiguration.value,
ivyConfigurations := {
val confs = thisProject.value.configurations
@ -1324,7 +1331,7 @@ object Classpaths {
implicit val key = (m: ModuleID) => (m.organization, m.name, m.revision)
val projectDeps = projectDependencies.value.iterator.map(key).toSet
val externalModules = update.value.allModules.filterNot(m => projectDeps contains key(m))
GetClassifiersModule(projectID.value, externalModules, ivyConfigurations.in(updateClassifiers).value, transitiveClassifiers.in(updateClassifiers).value)
GetClassifiersModule(projectID.value, externalModules, ivyConfigurations.in(updateClassifiers).value.toVector, transitiveClassifiers.in(updateClassifiers).value.toVector)
},
updateClassifiers := (Def.task {
val s = streams.value
@ -1338,7 +1345,7 @@ object Classpaths {
val uwConfig = (unresolvedWarningConfiguration in update).value
val depDir = dependencyCacheDirectory.value
withExcludes(out, mod.classifiers, lock(app)) { excludes =>
IvyActions.updateClassifiers(is, GetClassifiersConfiguration(mod, excludes, c.copy(artifactFilter = c.artifactFilter.invert), ivyScala.value, srcTypes, docTypes), uwConfig, LogicalClock(state.value.hashCode), Some(depDir), Vector.empty, s.log)
IvyActions.updateClassifiers(is, GetClassifiersConfiguration(mod, excludes, c.withArtifactFilter(c.artifactFilter.invert), ivyScala.value, srcTypes, docTypes), uwConfig, LogicalClock(state.value.hashCode), Some(depDir), Vector.empty, s.log)
}
} tag (Tags.Update, Tags.Network)).value
)
@ -1348,7 +1355,7 @@ object Classpaths {
// Override the default to handle mixing in the sbtPlugin + scala dependencies.
allDependencies := {
val base = projectDependencies.value ++ libraryDependencies.value
val pluginAdjust = if (sbtPlugin.value) sbtDependency.value.copy(configurations = Some(Provided.name)) +: base else base
val pluginAdjust = if (sbtPlugin.value) sbtDependency.value.withConfigurations(Some(Provided.name)) +: base else base
if (scalaHome.value.isDefined || ivyScala.value.isEmpty || !managedScalaInstance.value)
pluginAdjust
else {
@ -1379,13 +1386,14 @@ object Classpaths {
if (isPlugin) sbtPluginExtra(pid, sbtBV, scalaBV) else pid
}
def ivySbt0: Initialize[Task[IvySbt]] =
(ivyConfiguration, credentials, streams) map { (conf, creds, s) =>
(ivyConfiguration, fileToStore, credentials, streams) map { (conf, fts, creds, s) =>
Credentials.register(creds, s.log)
new IvySbt(conf)
new IvySbt(conf, fts)
}
def moduleSettings0: Initialize[Task[ModuleSettings]] = Def.task {
new InlineConfiguration(projectID.value, projectInfo.value, allDependencies.value, dependencyOverrides.value, excludeDependencies.value,
ivyXML.value, ivyConfigurations.value, defaultConfiguration.value, ivyScala.value, ivyValidate.value, conflictManager.value)
InlineConfiguration(ivyValidate.value, ivyScala.value,
projectID.value, projectInfo.value, allDependencies.value.toVector, dependencyOverrides.value, excludeDependencies.value.toVector,
ivyXML.value, ivyConfigurations.value.toVector, defaultConfiguration.value, conflictManager.value)
}
private[this] def sbtClassifiersGlobalDefaults = Defaults.globalDefaults(Seq(
@ -1396,14 +1404,14 @@ object Classpaths {
val explicit = buildStructure.value.units(thisProjectRef.value.build).unit.plugins.pluginData.resolvers
explicit orElse bootRepositories(appConfiguration.value) getOrElse externalResolvers.value
},
ivyConfiguration := new InlineIvyConfiguration(ivyPaths.value, externalResolvers.value, Nil, Nil, offline.value, Option(lock(appConfiguration.value)),
checksums.value, Some(target.value / "resolution-cache"), UpdateOptions(), streams.value.log),
ivyConfiguration := new InlineIvyConfiguration(ivyPaths.value, externalResolvers.value.toVector, Vector.empty, Vector.empty, offline.value, Option(lock(appConfiguration.value)),
checksums.value.toVector, Some(target.value / "resolution-cache"), UpdateOptions(), streams.value.log),
ivySbt := ivySbt0.value,
classifiersModule := ((projectID, sbtDependency, transitiveClassifiers, loadedBuild, thisProjectRef) map { (pid, sbtDep, classifiers, lb, ref) =>
val pluginClasspath = lb.units(ref.build).unit.plugins.fullClasspath
val pluginClasspath = lb.units(ref.build).unit.plugins.fullClasspath.toVector
val pluginJars = pluginClasspath.filter(_.data.isFile) // exclude directories: an approximation to whether they've been published
val pluginIDs: Seq[ModuleID] = pluginJars.flatMap(_ get moduleID.key)
GetClassifiersModule(pid, sbtDep +: pluginIDs, Configurations.Default :: Nil, classifiers)
val pluginIDs: Vector[ModuleID] = pluginJars.flatMap(_ get moduleID.key)
GetClassifiersModule(pid, sbtDep +: pluginIDs, Vector(Configurations.Default), classifiers.toVector)
}).value,
updateSbtClassifiers in TaskGlobal := (Def.task {
val s = streams.value
@ -1417,8 +1425,8 @@ object Classpaths {
val uwConfig = (unresolvedWarningConfiguration in update).value
val depDir = dependencyCacheDirectory.value
withExcludes(out, mod.classifiers, lock(app)) { excludes =>
val noExplicitCheck = ivyScala.value.map(_.copy(checkExplicit = false))
IvyActions.transitiveScratch(is, "sbt", GetClassifiersConfiguration(mod, excludes, c.copy(artifactFilter = c.artifactFilter.invert), noExplicitCheck, srcTypes, docTypes), uwConfig, LogicalClock(state.value.hashCode), Some(depDir), s.log)
val noExplicitCheck = ivyScala.value.map(_.withCheckExplicit(false))
IvyActions.transitiveScratch(is, "sbt", GetClassifiersConfiguration(mod, excludes, c.withArtifactFilter(c.artifactFilter.invert), noExplicitCheck, srcTypes, docTypes), uwConfig, LogicalClock(state.value.hashCode), Some(depDir), s.log)
}
} tag (Tags.Update, Tags.Network)).value
)) ++ Seq(bootIvyConfiguration := (ivyConfiguration in updateSbtClassifiers).value)
@ -1430,19 +1438,32 @@ object Classpaths {
IvyActions.publish(module, config, s.log)
} tag (Tags.Publish, Tags.Network)
import CacheIvy.{ updateIC, updateReportFormat, excludeMap, moduleIDSeqIC, modulePositionMapFormat }
val moduleIdJsonKeyFormat: sjsonnew.JsonKeyFormat[ModuleID] = new sjsonnew.JsonKeyFormat[ModuleID] {
import sjsonnew.support.scalajson.unsafe._
import LibraryManagementCodec._
val moduleIdFormat: JsonFormat[ModuleID] = implicitly[JsonFormat[ModuleID]]
def write(key: ModuleID): String = CompactPrinter(Converter.toJsonUnsafe(key)(moduleIdFormat))
def read(key: String): ModuleID = Converter.fromJsonUnsafe[ModuleID](Parser.parseUnsafe(key))(moduleIdFormat)
}
def withExcludes(out: File, classifiers: Seq[String], lock: xsbti.GlobalLock)(f: Map[ModuleID, Set[String]] => UpdateReport): UpdateReport =
{
import sbt.librarymanagement.LibraryManagementCodec._
implicit val isoString: sjsonnew.IsoString[scala.json.ast.unsafe.JValue] = sjsonnew.IsoString.iso(
sjsonnew.support.scalajson.unsafe.CompactPrinter.apply,
sjsonnew.support.scalajson.unsafe.Parser.parseUnsafe
)
val exclName = "exclude_classifiers"
val file = out / exclName
val store = new FileBasedStore(file, sjsonnew.support.scalajson.unsafe.Converter)
lock(out / (exclName + ".lock"), new Callable[UpdateReport] {
def call = {
val excludes = CacheIO.fromFile[Map[ModuleID, Set[String]]](excludeMap, Map.empty[ModuleID, Set[String]])(file)
implicit val midJsonKeyFmt: sjsonnew.JsonKeyFormat[ModuleID] = moduleIdJsonKeyFormat
val excludes = store.read[Map[ModuleID, Set[String]]](default = Map.empty[ModuleID, Set[String]])
val report = f(excludes)
val allExcludes = excludes ++ IvyActions.extractExcludes(report)
CacheIO.toFile(excludeMap)(allExcludes)(file)
IvyActions.addExcluded(report, classifiers, allExcludes)
store.write(allExcludes)
IvyActions.addExcluded(report, classifiers.toVector, allExcludes)
}
})
}
@ -1488,31 +1509,108 @@ object Classpaths {
// This code bumps up the sbt.UpdateConfiguration.logging to Full when logLevel is Debug.
import UpdateLogging.{ Full, DownloadOnly, Default }
val uc = (logLevel in update).?.value orElse st.get(logLevel.key) match {
case Some(Level.Debug) if uc0.logging == Default => uc0.copy(logging = Full)
case Some(x) if uc0.logging == Default => uc0.copy(logging = DownloadOnly)
case Some(Level.Debug) if uc0.logging == Default => uc0.withLogging(Full)
case Some(x) if uc0.logging == Default => uc0.withLogging(DownloadOnly)
case _ => uc0
}
val ewo =
if (executionRoots.value exists { _.key == evicted.key }) EvictionWarningOptions.empty
else (evictionWarningOptions in update).value
cachedUpdate(s.cacheDirectory / updateCacheName.value, show, ivyModule.value, uc, transform,
cachedUpdate(s.cacheStoreFactory sub updateCacheName.value, show, ivyModule.value, uc, transform,
skip = (skip in update).value, force = isRoot || forceUpdateByTime, depsUpdated = depsUpdated,
uwConfig = uwConfig, logicalClock = logicalClock, depDir = Some(depDir),
ewo = ewo, mavenStyle = ms, compatWarning = cw, log = s.log)
}
@deprecated("Use cachedUpdate with the variant that takes unresolvedHandler instead.", "0.13.6")
def cachedUpdate(cacheFile: File, label: String, module: IvySbt#Module, config: UpdateConfiguration,
transform: UpdateReport => UpdateReport, skip: Boolean, force: Boolean, depsUpdated: Boolean, log: Logger): UpdateReport =
cachedUpdate(cacheFile, label, module, config, transform, skip, force, depsUpdated,
UnresolvedWarningConfiguration(), LogicalClock.unknown, None, EvictionWarningOptions.empty, true, CompatibilityWarningOptions.default, log)
private[sbt] def cachedUpdate(cacheFile: File, label: String, module: IvySbt#Module, config: UpdateConfiguration,
object AltLibraryManagementCodec extends LibraryManagementCodec {
type In0 = ModuleSettings :+: UpdateConfiguration :+: HNil
type In = IvyConfiguration :+: In0
object NullLogger extends sbt.internal.util.BasicLogger {
override def control(event: sbt.util.ControlEvent.Value, message: String): Unit = ()
override def log(level: Level.Value, message: String): Unit = ()
override def logAll(events: Seq[sbt.util.LogEvent]): Unit = ()
override def success(message: String): Unit = ()
override def trace(t: Throwable): Unit = ()
}
implicit val altRawRepositoryJsonFormat: JsonFormat[RawRepository] =
project(_.name, FakeRawRepository.create)
// Redefine to add RawRepository, and switch to unionFormat
override lazy implicit val ResolverFormat: JsonFormat[Resolver] =
unionFormat8[Resolver, ChainedResolver, MavenRepo, MavenCache, FileRepository, URLRepository, SshRepository, SftpRepository, RawRepository]
type InlineIvyHL = (IvyPaths :+: Vector[Resolver] :+: Vector[Resolver] :+: Vector[ModuleConfiguration] :+: Boolean :+: Vector[String] :+: HNil)
def inlineIvyToHL(i: InlineIvyConfiguration): InlineIvyHL = (
i.paths :+: i.resolvers :+: i.otherResolvers :+: i.moduleConfigurations :+: i.localOnly
:+: i.checksums :+: HNil
)
type ExternalIvyHL = PlainFileInfo :+: Array[Byte] :+: HNil
def externalIvyToHL(e: ExternalIvyConfiguration): ExternalIvyHL =
FileInfo.exists(e.baseDirectory) :+: Hash.contentsIfLocal(e.uri) :+: HNil
// Redefine to use a subset of properties, that are serialisable
override lazy implicit val InlineIvyConfigurationFormat: JsonFormat[InlineIvyConfiguration] = {
def hlToInlineIvy(i: InlineIvyHL): InlineIvyConfiguration = {
val (paths :+: resolvers :+: otherResolvers :+: moduleConfigurations :+: localOnly
:+: checksums :+: HNil) = i
InlineIvyConfiguration(None, IO.createTemporaryDirectory, NullLogger, UpdateOptions(), paths,
resolvers, otherResolvers, moduleConfigurations, localOnly, checksums, None)
}
project[InlineIvyConfiguration, InlineIvyHL](inlineIvyToHL, hlToInlineIvy)
}
// Redefine to use a subset of properties, that are serialisable
override lazy implicit val ExternalIvyConfigurationFormat: JsonFormat[ExternalIvyConfiguration] = {
def hlToExternalIvy(e: ExternalIvyHL): ExternalIvyConfiguration = {
val baseDirectory :+: _ /* uri */ :+: HNil = e
ExternalIvyConfiguration(None, baseDirectory.file, NullLogger, UpdateOptions(),
IO.createTemporaryDirectory.toURI /* the original uri is destroyed.. */ , Vector.empty)
}
project[ExternalIvyConfiguration, ExternalIvyHL](externalIvyToHL, hlToExternalIvy)
}
// Redefine to switch to unionFormat
override implicit lazy val IvyConfigurationFormat: JsonFormat[IvyConfiguration] =
unionFormat2[IvyConfiguration, InlineIvyConfiguration, ExternalIvyConfiguration]
def forHNil[A <: HNil]: Equiv[A] = new Equiv[A] { def equiv(x: A, y: A) = true }
implicit val lnilEquiv1: Equiv[HNil] = forHNil[HNil]
implicit val lnilEquiv2: Equiv[HNil.type] = forHNil[HNil.type]
implicit def hconsEquiv[H, T <: HList](implicit he: Equiv[H], te: Equiv[T]): Equiv[H :+: T] =
new Equiv[H :+: T] {
def equiv(x: H :+: T, y: H :+: T) = he.equiv(x.head, y.head) && te.equiv(x.tail, y.tail)
}
implicit object altIvyConfigurationEquiv extends Equiv[IvyConfiguration] {
def equiv(x: IvyConfiguration, y: IvyConfiguration): Boolean = (x, y) match {
case (x: InlineIvyConfiguration, y: InlineIvyConfiguration) =>
implicitly[Equiv[InlineIvyHL]].equiv(inlineIvyToHL(x), inlineIvyToHL(y))
case (x: ExternalIvyConfiguration, y: ExternalIvyConfiguration) =>
implicitly[Equiv[ExternalIvyHL]].equiv(externalIvyToHL(x), externalIvyToHL(y))
}
}
implicit object altInSingletonCache extends SingletonCache[In] {
def write(to: Output, value: In) = to.write(value)
def read(from: Input) = from.read[In]()
def equiv = hconsEquiv(altIvyConfigurationEquiv, implicitly[Equiv[In0]])
}
}
private[sbt] def cachedUpdate(cacheStoreFactory: CacheStoreFactory, label: String, module: IvySbt#Module, config: UpdateConfiguration,
transform: UpdateReport => UpdateReport, skip: Boolean, force: Boolean, depsUpdated: Boolean,
uwConfig: UnresolvedWarningConfiguration, logicalClock: LogicalClock, depDir: Option[File],
ewo: EvictionWarningOptions, mavenStyle: Boolean, compatWarning: CompatibilityWarningOptions,
log: Logger): UpdateReport =
{
implicit val updateCache = updateIC
type In = IvyConfiguration :+: ModuleSettings :+: UpdateConfiguration :+: HNil
def work = (_: In) match {
case conf :+: settings :+: config :+: HNil =>
import ShowLines._
@ -1538,47 +1636,54 @@ object Classpaths {
out.allFiles.forall(f => fileUptodate(f, out.stamps)) &&
fileUptodate(out.cachedDescriptor, out.stamps)
val outCacheFile = cacheFile / "output"
def skipWork: In => UpdateReport =
Tracked.lastOutput[In, UpdateReport](outCacheFile) {
val outStore = cacheStoreFactory derive "output"
def skipWork: In => UpdateReport = {
import LibraryManagementCodec._
Tracked.lastOutput[In, UpdateReport](outStore) {
case (_, Some(out)) => out
case _ => sys.error("Skipping update requested, but update has not previously run successfully.")
}
def doWork: In => UpdateReport =
Tracked.inputChanged(cacheFile / "inputs") { (inChanged: Boolean, in: In) =>
val outCache = Tracked.lastOutput[In, UpdateReport](outCacheFile) {
case (_, Some(out)) if uptodate(inChanged, out) => out
case _ => work(in)
}
try {
outCache(in)
} catch {
case e: NullPointerException =>
val r = work(in)
log.warn("Update task has failed to cache the report due to null.")
log.warn("Report the following output to sbt:")
r.toString.lines foreach { log.warn(_) }
log.trace(e)
r
case e: OutOfMemoryError =>
val r = work(in)
log.warn("Update task has failed to cache the report due to OutOfMemoryError.")
log.trace(e)
r
}
}
def doWorkInternal = { (inChanged: Boolean, in: In) =>
import LibraryManagementCodec._
val outCache = Tracked.lastOutput[In, UpdateReport](outStore) {
case (_, Some(out)) if uptodate(inChanged, out) => out
case _ => work(in)
}
try {
outCache(in)
} catch {
case e: NullPointerException =>
val r = work(in)
log.warn("Update task has failed to cache the report due to null.")
log.warn("Report the following output to sbt:")
r.toString.lines foreach { log.warn(_) }
log.trace(e)
r
case e: OutOfMemoryError =>
val r = work(in)
log.warn("Update task has failed to cache the report due to OutOfMemoryError.")
log.trace(e)
r
}
}
def doWork: In => UpdateReport = {
import AltLibraryManagementCodec._
Tracked.inputChanged(cacheStoreFactory derive "inputs")(doWorkInternal)
}
val f = if (skip && !force) skipWork else doWork
f(module.owner.configuration :+: module.moduleSettings :+: config :+: HNil)
}
private[this] def fileUptodate(file: File, stamps: Map[File, Long]): Boolean =
stamps.get(file).forall(_ == file.lastModified)
private[sbt] def dependencyPositionsTask: Initialize[Task[Map[ModuleID, SourcePosition]]] = Def.task {
val projRef = thisProjectRef.value
val st = state.value
val s = streams.value
val cacheFile = s.cacheDirectory / updateCacheName.value
implicit val depSourcePosCache = moduleIDSeqIC
implicit val outFormat = modulePositionMapFormat
val cacheStoreFactory = s.cacheStoreFactory sub updateCacheName.value
import sbt.librarymanagement.LibraryManagementCodec._
def modulePositions: Map[ModuleID, SourcePosition] =
try {
val extracted = (Project extract st)
@ -1596,9 +1701,27 @@ object Classpaths {
case NonFatal(e) => Map()
}
val outCacheFile = cacheFile / "output_dsp"
val f = Tracked.inputChanged(cacheFile / "input_dsp") { (inChanged: Boolean, in: Seq[ModuleID]) =>
val outCache = Tracked.lastOutput[Seq[ModuleID], Map[ModuleID, SourcePosition]](outCacheFile) {
val outCacheStore = cacheStoreFactory derive "output_dsp"
val f = Tracked.inputChanged(cacheStoreFactory derive "input_dsp") { (inChanged: Boolean, in: Seq[ModuleID]) =>
implicit val NoPositionFormat: JsonFormat[NoPosition.type] = asSingleton(NoPosition)
implicit val LinePositionFormat: IsoLList.Aux[LinePosition, String :*: Int :*: LNil] = LList.iso(
{ l: LinePosition => ("path", l.path) :*: ("startLine", l.startLine) :*: LNil },
{ in: String :*: Int :*: LNil => LinePosition(in.head, in.tail.head) }
)
implicit val LineRangeFormat: IsoLList.Aux[LineRange, Int :*: Int :*: LNil] = LList.iso(
{ l: LineRange => ("start", l.start) :*: ("end", l.end) :*: LNil },
{ in: Int :*: Int :*: LNil => LineRange(in.head, in.tail.head) }
)
implicit val RangePositionFormat: IsoLList.Aux[RangePosition, String :*: LineRange :*: LNil] = LList.iso(
{ r: RangePosition => ("path", r.path) :*: ("range", r.range) :*: LNil },
{ in: String :*: LineRange :*: LNil => RangePosition(in.head, in.tail.head) }
)
implicit val SourcePositionFormat: JsonFormat[SourcePosition] =
unionFormat3[SourcePosition, NoPosition.type, LinePosition, RangePosition]
implicit val midJsonKeyFmt: sjsonnew.JsonKeyFormat[ModuleID] = moduleIdJsonKeyFormat
val outCache = Tracked.lastOutput[Seq[ModuleID], Map[ModuleID, SourcePosition]](outCacheStore) {
case (_, Some(out)) if !inChanged => out
case _ => modulePositions
}
@ -1621,13 +1744,13 @@ object Classpaths {
def defaultRepositoryFilter = (repo: MavenRepository) => !repo.root.startsWith("file:")
def getPublishTo(repo: Option[Resolver]): Resolver = repo getOrElse sys.error("Repository for publishing is not specified.")
def deliverConfig(outputDirectory: File, status: String = "release", logging: UpdateLogging.Value = UpdateLogging.DownloadOnly) =
def deliverConfig(outputDirectory: File, status: String = "release", logging: UpdateLogging = UpdateLogging.DownloadOnly) =
new DeliverConfiguration(deliverPattern(outputDirectory), status, None, logging)
@deprecated("Previous semantics allowed overwriting cached files, which was unsafe. Please specify overwrite parameter.", "0.13.2")
def publishConfig(artifacts: Map[Artifact, File], ivyFile: Option[File], checksums: Seq[String], resolverName: String, logging: UpdateLogging.Value): PublishConfiguration =
def publishConfig(artifacts: Map[Artifact, File], ivyFile: Option[File], checksums: Seq[String], resolverName: String, logging: UpdateLogging): PublishConfiguration =
publishConfig(artifacts, ivyFile, checksums, resolverName, logging, overwrite = true)
def publishConfig(artifacts: Map[Artifact, File], ivyFile: Option[File], checksums: Seq[String], resolverName: String = "local", logging: UpdateLogging.Value = UpdateLogging.DownloadOnly, overwrite: Boolean = false) =
new PublishConfiguration(ivyFile, resolverName, artifacts, checksums, logging, overwrite)
def publishConfig(artifacts: Map[Artifact, File], ivyFile: Option[File], checksums: Seq[String], resolverName: String = "local", logging: UpdateLogging = UpdateLogging.DownloadOnly, overwrite: Boolean = false) =
new PublishConfiguration(ivyFile, resolverName, artifacts, checksums.toVector, logging, overwrite)
def deliverPattern(outputPath: File): String = (outputPath / "[artifact]-[revision](-[classifier]).[ext]").absolutePath
@ -1635,7 +1758,7 @@ object Classpaths {
(thisProjectRef, settingsData, buildDependencies) map { (ref, data, deps) =>
deps.classpath(ref) flatMap { dep =>
(projectID in dep.project) get data map {
_.copy(configurations = dep.configuration, explicitArtifacts = Nil)
_.withConfigurations(dep.configuration).withExplicitArtifacts(Vector.empty)
}
}
}
@ -1732,7 +1855,7 @@ object Classpaths {
warnResolversConflict(rs ++: other, s.log)
val resCacheDir = t / "resolution-cache"
new InlineIvyConfiguration(paths, rs, other, moduleConfs, off, Option(lock(app)), check, Some(resCacheDir), uo, s.log)
new InlineIvyConfiguration(paths, rs.toVector, other.toVector, moduleConfs.toVector, off, Option(lock(app)), check.toVector, Some(resCacheDir), uo, s.log)
}
import java.util.LinkedHashSet
@ -1854,7 +1977,7 @@ object Classpaths {
lazy val sbtPluginReleases = Resolver.sbtPluginRepo("releases")
def modifyForPlugin(plugin: Boolean, dep: ModuleID): ModuleID =
if (plugin) dep.copy(configurations = Some(Provided.name)) else dep
if (plugin) dep.withConfigurations(Some(Provided.name)) else dep
@deprecated("Explicitly specify the organization using the other variant.", "0.13.0")
def autoLibraryDependency(auto: Boolean, plugin: Boolean, version: String): Seq[ModuleID] =
@ -1923,7 +2046,7 @@ object Classpaths {
report.substitute { (configuration, module, arts) =>
if (module.organization == scalaOrg) {
val jarName = module.name + ".jar"
val replaceWith = scalaJars(module.revision).filter(_.getName == jarName).map(f => (Artifact(f.getName.stripSuffix(".jar")), f))
val replaceWith = scalaJars(module.revision).toVector.filter(_.getName == jarName).map(f => (Artifact(f.getName.stripSuffix(".jar")), f))
if (replaceWith.isEmpty) arts else replaceWith
} else
arts
@ -1969,7 +2092,7 @@ object Classpaths {
repo match {
case m: xsbti.MavenRepository => MavenRepository(m.id, m.url.toString)
case i: xsbti.IvyRepository =>
val patterns = Patterns(i.ivyPattern :: Nil, i.artifactPattern :: Nil, mavenCompatible(i), descriptorOptional(i), skipConsistencyCheck(i))
val patterns = Patterns(Vector(i.ivyPattern), Vector(i.artifactPattern), mavenCompatible(i), descriptorOptional(i), skipConsistencyCheck(i))
i.url.getProtocol match {
case "file" =>
// This hackery is to deal suitably with UNC paths on Windows. Once we can assume Java7, Paths should save us from this.
@ -2045,7 +2168,7 @@ trait BuildExtra extends BuildCommon with DefExtra {
/** Transforms `dependency` to be in the auto-compiler plugin configuration. */
def compilerPlugin(dependency: ModuleID): ModuleID =
dependency.copy(configurations = Some("plugin->default(compile)"))
dependency.withConfigurations(Some("plugin->default(compile)"))
/** Adds `dependency` to `libraryDependencies` in the auto-compiler plugin configuration. */
def addCompilerPlugin(dependency: ModuleID): Setting[Seq[ModuleID]] =
@ -2082,17 +2205,17 @@ trait BuildExtra extends BuildCommon with DefExtra {
case (u, otherTask) =>
otherTask map {
case (base, app, pr, uo, s) =>
val extraResolvers = if (addMultiResolver) pr :: Nil else Nil
new ExternalIvyConfiguration(base, u, Option(lock(app)), extraResolvers, uo, s.log)
val extraResolvers = if (addMultiResolver) Vector(pr) else Vector.empty
ExternalIvyConfiguration(Option(lock(app)), base, s.log, uo, u, extraResolvers)
}
}).value
}
private[this] def inBase(name: String): Initialize[File] = Def.setting { baseDirectory.value / name }
def externalIvyFile(file: Initialize[File] = inBase("ivy.xml"), iScala: Initialize[Option[IvyScala]] = ivyScala): Setting[Task[ModuleSettings]] =
moduleSettings := new IvyFileConfiguration(file.value, iScala.value, ivyValidate.value, managedScalaInstance.value)
moduleSettings := IvyFileConfiguration(ivyValidate.value, iScala.value, file.value, managedScalaInstance.value)
def externalPom(file: Initialize[File] = inBase("pom.xml"), iScala: Initialize[Option[IvyScala]] = ivyScala): Setting[Task[ModuleSettings]] =
moduleSettings := new PomConfiguration(file.value, ivyScala.value, ivyValidate.value, managedScalaInstance.value)
moduleSettings := PomConfiguration(ivyValidate.value, ivyScala.value, file.value, managedScalaInstance.value)
def runInputTask(config: Configuration, mainClass: String, baseArguments: String*): Initialize[InputTask[Unit]] =
Def.inputTask {
@ -2183,13 +2306,13 @@ trait BuildCommon {
def getFromContext[T](task: TaskKey[T], context: ScopedKey[_], s: State): Option[T] =
SessionVar.get(SessionVar.resolveContext(task.scopedKey, context.scope, s), s)
def loadFromContext[T](task: TaskKey[T], context: ScopedKey[_], s: State)(implicit f: sbinary.Format[T]): Option[T] =
def loadFromContext[T](task: TaskKey[T], context: ScopedKey[_], s: State)(implicit f: JsonFormat[T]): Option[T] =
SessionVar.load(SessionVar.resolveContext(task.scopedKey, context.scope, s), s)
// intended for use in constructing InputTasks
def loadForParser[P, T](task: TaskKey[T])(f: (State, Option[T]) => Parser[P])(implicit format: sbinary.Format[T]): Initialize[State => Parser[P]] =
def loadForParser[P, T](task: TaskKey[T])(f: (State, Option[T]) => Parser[P])(implicit format: JsonFormat[T]): Initialize[State => Parser[P]] =
loadForParserI(task)(Def value f)(format)
def loadForParserI[P, T](task: TaskKey[T])(init: Initialize[(State, Option[T]) => Parser[P]])(implicit format: sbinary.Format[T]): Initialize[State => Parser[P]] =
def loadForParserI[P, T](task: TaskKey[T])(init: Initialize[(State, Option[T]) => Parser[P]])(implicit format: JsonFormat[T]): Initialize[State => Parser[P]] =
(resolvedScoped, init)((ctx, f) => (s: State) => f(s, loadFromContext(task, ctx, s)(format)))
def getForParser[P, T](task: TaskKey[T])(init: (State, Option[T]) => Parser[P]): Initialize[State => Parser[P]] =
@ -2198,7 +2321,7 @@ trait BuildCommon {
(resolvedScoped, init)((ctx, f) => (s: State) => f(s, getFromContext(task, ctx, s)))
// these are for use for constructing Tasks
def loadPrevious[T](task: TaskKey[T])(implicit f: sbinary.Format[T]): Initialize[Task[Option[T]]] =
def loadPrevious[T](task: TaskKey[T])(implicit f: JsonFormat[T]): Initialize[Task[Option[T]]] =
(state, resolvedScoped) map { (s, ctx) => loadFromContext(task, ctx, s)(f) }
def getPrevious[T](task: TaskKey[T]): Initialize[Task[Option[T]]] =

View File

@ -31,7 +31,7 @@ import KeyRanks._
import sbt.internal.{ BuildStructure, LoadedBuild, PluginDiscovery, BuildDependencies, SessionSettings }
import sbt.io.FileFilter
import sbt.internal.io.WatchState
import sbt.internal.util.{ AttributeKey, SourcePosition }
import sbt.internal.util.{ AttributeKey, CacheStore, SourcePosition }
import sbt.librarymanagement.Configurations.CompilerPlugin
import sbt.librarymanagement.{
@ -49,11 +49,14 @@ import sbt.librarymanagement.{
ModuleConfiguration,
ModuleID,
ModuleInfo,
ModuleSettings,
Resolver,
ScalaVersion,
ScmInfo,
TrackLevel,
UpdateConfiguration,
UpdateOptions,
UpdateLogging,
UpdateReport
}
import sbt.internal.librarymanagement.{
@ -64,19 +67,18 @@ import sbt.internal.librarymanagement.{
IvyPaths,
IvySbt,
MakePomConfiguration,
ModuleSettings,
PublishConfiguration,
RetrieveConfiguration,
SbtExclusionRule,
UnresolvedWarningConfiguration,
UpdateConfiguration,
UpdateLogging
UnresolvedWarningConfiguration
}
import sbt.util.{ AbstractLogger, Level, Logger }
object Keys {
val TraceValues = "-1 to disable, 0 for up to the first sbt frame, or a positive number to set the maximum number of frames shown."
val fileToStore = SettingKey[File => CacheStore]("file-to-store", "How to go from a file to a store.", ASetting)
// logging
val logLevel = SettingKey[Level.Value]("log-level", "The amount of logging sent to the screen.", ASetting)
val persistLogLevel = SettingKey[Level.Value]("persist-log-level", "The amount of logging sent to a file for persistence.", CSetting)
@ -374,7 +376,7 @@ object Keys {
val ivyXML = SettingKey[NodeSeq]("ivy-xml", "Defines inline Ivy XML for configuring dependency management.", BSetting)
val ivyScala = SettingKey[Option[IvyScala]]("ivy-scala", "Configures how Scala dependencies are checked, filtered, and injected.", CSetting)
val ivyValidate = SettingKey[Boolean]("ivy-validate", "Enables/disables Ivy validation of module metadata.", BSetting)
val ivyLoggingLevel = SettingKey[UpdateLogging.Value]("ivy-logging-level", "The logging level for updating.", BSetting)
val ivyLoggingLevel = SettingKey[UpdateLogging]("ivy-logging-level", "The logging level for updating.", BSetting)
val publishTo = TaskKey[Option[Resolver]]("publish-to", "The resolver to publish to.", ASetting)
val artifacts = SettingKey[Seq[Artifact]]("artifacts", "The artifact definitions for the current module. Must be consistent with " + packagedArtifacts.key.label + ".", BSetting)
val projectDescriptors = TaskKey[Map[ModuleRevisionId, ModuleDescriptor]]("project-descriptors", "Project dependency map for the inter-project resolver.", DTask)

View File

@ -34,8 +34,8 @@ object Opts {
import sbt.io.syntax._
val sonatypeReleases = Resolver.sonatypeRepo("releases")
val sonatypeSnapshots = Resolver.sonatypeRepo("snapshots")
val sonatypeStaging = new MavenRepository("sonatype-staging", "https://oss.sonatype.org/service/local/staging/deploy/maven2")
val mavenLocalFile = Resolver.file("Local Repository", userHome / ".m2" / "repository" asFile)
val sonatypeStaging = MavenRepository("sonatype-staging", "https://oss.sonatype.org/service/local/staging/deploy/maven2")
val mavenLocalFile = Resolver.file("Local Repository", userHome / ".m2" / "repository" asFile)(Resolver.defaultPatterns)
}
}

View File

@ -11,10 +11,12 @@ import Keys.{ appConfiguration, stateBuildStructure, commands, configuration, hi
import Scope.{ GlobalScope, ThisScope }
import Def.{ Flattened, Initialize, ScopedKey, Setting }
import sbt.internal.{ Load, BuildStructure, LoadedBuild, LoadedBuildUnit, SettingGraph, SettingCompletions, AddSettings, SessionSettings }
import sbt.internal.util.{ AttributeKey, AttributeMap, Dag, Eval, Relation, Settings, Show, ~> }
import sbt.internal.util.{ AttributeKey, AttributeMap, Dag, Relation, Settings, Show, ~> }
import sbt.internal.util.Types.{ const, idFun }
import sbt.internal.util.complete.DefaultParsers
import sbt.librarymanagement.Configuration
import sbt.util.Eval
import sjsonnew.JsonFormat
import language.experimental.macros
@ -283,10 +285,10 @@ object Project extends ProjectExtra {
val autoPlugins: Seq[AutoPlugin],
val projectOrigin: ProjectOrigin
) extends ProjectDefinition[PR] {
def aggregate: Seq[PR] = aggregateEval.get
def dependencies: Seq[ClasspathDep[PR]] = dependenciesEval.get
def delegates: Seq[PR] = delegatesEval.get
def settings: Seq[Def.Setting[_]] = settingsEval.get
def aggregate: Seq[PR] = aggregateEval.value
def dependencies: Seq[ClasspathDep[PR]] = dependenciesEval.value
def delegates: Seq[PR] = delegatesEval.value
def settings: Seq[Def.Setting[_]] = settingsEval.value
Dag.topologicalSort(configurations)(_.extendsConfigs) // checks for cyclic references here instead of having to do it in Scope.delegates
}
@ -630,7 +632,7 @@ object Project extends ProjectExtra {
import SessionVar.{ persistAndSet, resolveContext, set, transform => tx }
def updateState(f: (State, S) => State): Def.Initialize[Task[S]] = i(t => tx(t, f))
def storeAs(key: TaskKey[S])(implicit f: sbinary.Format[S]): Def.Initialize[Task[S]] = (Keys.resolvedScoped, i) { (scoped, task) =>
def storeAs(key: TaskKey[S])(implicit f: JsonFormat[S]): Def.Initialize[Task[S]] = (Keys.resolvedScoped, i) { (scoped, task) =>
tx(task, (state, value) => persistAndSet(resolveContext(key, scoped.scope, state), state, value)(f))
}
def keepAs(key: TaskKey[S]): Def.Initialize[Task[S]] =

View File

@ -7,7 +7,7 @@ import sbt.internal.util.{ AttributeMap, IMap, Types }
import Def.ScopedKey
import Types.Id
import Keys.sessionVars
import sbinary.{ Format, Operations }
import sjsonnew.JsonFormat
object SessionVar {
val DefaultDataID = "data"
@ -20,15 +20,15 @@ object SessionVar {
}
def emptyMap = Map(IMap.empty)
def persistAndSet[T](key: ScopedKey[Task[T]], state: State, value: T)(implicit f: sbinary.Format[T]): State =
def persistAndSet[T](key: ScopedKey[Task[T]], state: State, value: T)(implicit f: JsonFormat[T]): State =
{
persist(key, state, value)(f)
set(key, state, value)
}
def persist[T](key: ScopedKey[Task[T]], state: State, value: T)(implicit f: sbinary.Format[T]): Unit =
def persist[T](key: ScopedKey[Task[T]], state: State, value: T)(implicit f: JsonFormat[T]): Unit =
Project.structure(state).streams(state).use(key)(s =>
Operations.write(s.binary(DefaultDataID), value)(f))
s.getOutput(DefaultDataID).write(value))
def clear(s: State): State = s.put(sessionVars, SessionVar.emptyMap)
@ -51,16 +51,16 @@ object SessionVar {
ScopedKey(scope, key.key)
}
def read[T](key: ScopedKey[Task[T]], state: State)(implicit f: Format[T]): Option[T] =
def read[T](key: ScopedKey[Task[T]], state: State)(implicit f: JsonFormat[T]): Option[T] =
Project.structure(state).streams(state).use(key) { s =>
try { Some(Operations.read(s.readBinary(key, DefaultDataID))) }
try { Some(s.getInput(key, DefaultDataID).read[T]) }
catch { case NonFatal(e) => None }
}
def load[T](key: ScopedKey[Task[T]], state: State)(implicit f: Format[T]): Option[T] =
def load[T](key: ScopedKey[Task[T]], state: State)(implicit f: JsonFormat[T]): Option[T] =
get(key, state) orElse read(key, state)(f)
def loadAndSet[T](key: ScopedKey[Task[T]], state: State, setIfUnset: Boolean = true)(implicit f: Format[T]): (State, Option[T]) =
def loadAndSet[T](key: ScopedKey[Task[T]], state: State, setIfUnset: Boolean = true)(implicit f: JsonFormat[T]): (State, Option[T]) =
get(key, state) match {
case s: Some[T] => (state, s)
case None => read(key, state)(f) match {

View File

@ -8,8 +8,9 @@ import java.io.File
import Keys.{ organization, thisProject, autoGeneratedProject }
import Def.Setting
import sbt.io.Hash
import sbt.internal.util.{ Attributed, Eval }
import sbt.internal.util.Attributed
import sbt.internal.inc.ReflectUtilities
import sbt.util.Eval
import sbt.Project._
trait BuildDef {

View File

@ -212,8 +212,10 @@ object BuildStreams {
final val BuildUnitPath = "$build"
final val StreamsDirectory = "streams"
def mkStreams(units: Map[URI, LoadedBuildUnit], root: URI, data: Settings[Scope]): State => Streams = s =>
s get Keys.stateStreams getOrElse std.Streams(path(units, root, data), displayFull, LogManager.construct(data, s))
def mkStreams(units: Map[URI, LoadedBuildUnit], root: URI, data: Settings[Scope]): State => Streams = s => {
implicit val isoString: sjsonnew.IsoString[scala.json.ast.unsafe.JValue] = sjsonnew.IsoString.iso(sjsonnew.support.scalajson.unsafe.CompactPrinter.apply, sjsonnew.support.scalajson.unsafe.Parser.parseUnsafe)
s get Keys.stateStreams getOrElse std.Streams(path(units, root, data), displayFull, LogManager.construct(data, s), sjsonnew.support.scalajson.unsafe.Converter)
}
def path(units: Map[URI, LoadedBuildUnit], root: URI, data: Settings[Scope])(scoped: ScopedKey[_]): File =
resolvePath(projectPath(units, root, scoped, data), nonProjectPath(scoped))

View File

@ -19,7 +19,8 @@ object ConsoleProject {
ScalaInstance(scalaProvider.version, scalaProvider.launcher)
}
val sourcesModule = extracted.get(Keys.scalaCompilerBridgeSource)
val compiler = Compiler.scalaCompiler(scalaInstance, ClasspathOptionsUtil.repl, None, ivyConf, sourcesModule)(state.configuration, log)
val fileToStore = extracted.get(Keys.fileToStore)
val compiler = Compiler.scalaCompiler(scalaInstance, ClasspathOptionsUtil.repl, None, ivyConf, fileToStore, sourcesModule)(state.configuration, log)
val imports = BuildUtil.getImports(unit.unit) ++ BuildUtil.importAll(bindings.map(_._1))
val importString = imports.mkString("", ";\n", ";\n\n")
val initCommands = importString + extra

View File

@ -1,7 +1,8 @@
package sbt
package internal
import sbt.internal.util.{ Eval, RangePosition }
import sbt.internal.util.RangePosition
import sbt.util.Eval
import sbt.librarymanagement.Configuration

View File

@ -7,7 +7,7 @@ package internal
import sbt.internal.util.Attributed
import sbt.util.{ Level, Logger }
import sbt.librarymanagement.{ Configurations, CrossVersion, MavenRepository, ModuleID, Resolver }
import sbt.librarymanagement.{ Configurations, CrossVersion, Disabled, MavenRepository, ModuleID, Resolver }
import java.io.File
import Configurations.Compile
@ -55,15 +55,15 @@ object IvyConsole {
private[this] def parseResolver(arg: String): MavenRepository =
{
val Array(name, url) = arg.split(" at ")
new MavenRepository(name.trim, url.trim)
MavenRepository(name.trim, url.trim)
}
val DepPattern = """([^%]+)%(%?)([^%]+)%([^%]+)""".r
def parseManaged(arg: String, log: Logger): Seq[ModuleID] =
arg match {
case DepPattern(group, cross, name, version) =>
val crossV = if (cross.trim.isEmpty) CrossVersion.Disabled else CrossVersion.binary
ModuleID(group.trim, name.trim, version.trim, crossVersion = crossV) :: Nil
val crossV = if (cross.trim.isEmpty) Disabled() else CrossVersion.binary
ModuleID(group.trim, name.trim, version.trim).withCrossVersion(crossV) :: Nil
case _ => log.warn("Ignoring invalid argument '" + arg + "'"); Nil
}
}

View File

@ -5,7 +5,7 @@ package sbt
package internal
import sbt.librarymanagement.{ Configuration, Configurations, Resolver, UpdateOptions }
import sbt.internal.librarymanagement.{ InlineIvyConfiguration, IvyPaths }
import sbt.internal.librarymanagement.{ DefaultFileToStore, InlineIvyConfiguration, IvyPaths }
import java.io.File
import java.net.{ URI, URL }
@ -35,7 +35,8 @@ import Keys.{
update
}
import tools.nsc.reporters.ConsoleReporter
import sbt.internal.util.{ Attributed, Eval => Ev, Settings, Show, ~> }
import sbt.internal.util.{ Attributed, Settings, Show, ~> }
import sbt.util.{ Eval => Ev }
import sbt.internal.util.Attributed.data
import Scope.GlobalScope
import sbt.internal.util.Types.const
@ -66,11 +67,11 @@ private[sbt] object Load {
val classpath = Attributed.blankSeq(provider.mainClasspath ++ scalaProvider.jars)
val localOnly = false
val lock = None
val checksums = Nil
val ivyPaths = new IvyPaths(baseDirectory, bootIvyHome(state.configuration))
val ivyConfiguration = new InlineIvyConfiguration(ivyPaths, Resolver.withDefaultResolvers(Nil),
Nil, Nil, localOnly, lock, checksums, None, UpdateOptions(), log)
val compilers = Compiler.compilers(ClasspathOptionsUtil.boot, ivyConfiguration)(state.configuration, log)
val checksums = Vector.empty
val ivyPaths = IvyPaths(baseDirectory, bootIvyHome(state.configuration))
val ivyConfiguration = new InlineIvyConfiguration(ivyPaths, Resolver.withDefaultResolvers(Nil).toVector,
Vector.empty, Vector.empty, localOnly, lock, checksums, None, UpdateOptions(), log)
val compilers = Compiler.compilers(ClasspathOptionsUtil.boot, ivyConfiguration, DefaultFileToStore)(state.configuration, log)
val evalPluginDef = EvaluateTask.evalPluginDef(log) _
val delegates = defaultDelegates
val pluginMgmt = PluginManagement(loader)

View File

@ -37,7 +37,7 @@ object PluginManagement {
classpath flatMap { _.metadata get Keys.moduleID.key map keepOverrideInfo } toSet;
def keepOverrideInfo(m: ModuleID): ModuleID =
ModuleID(m.organization, m.name, m.revision, crossVersion = m.crossVersion)
ModuleID(m.organization, m.name, m.revision).withCrossVersion(m.crossVersion)
final class PluginClassLoader(p: ClassLoader) extends URLClassLoader(Array(), p) {
private[this] val urlSet = new collection.mutable.HashSet[URI] // remember: don't use hashCode/equals on URL

View File

@ -0,0 +1,11 @@
package sbt
package internal
package librarymanagement
import sbt.io.IO
import sbt.librarymanagement.RawRepository
object FakeRawRepository {
def create(name: String): RawRepository =
new RawRepository(new FakeResolver(name, IO.createTemporaryDirectory, Map.empty))
}

View File

@ -0,0 +1,34 @@
import sbt.contraband.ast._
import sbt.contraband.CodecCodeGen
object ContrabandConfig {
/** Extract the only type parameter from a TpeRef */
def oneArg(tpe: Type): Type = {
val pat = s"""${tpe.removeTypeParameters.name}[<\\[](.+?)[>\\]]""".r
val pat(arg0) = tpe.name
NamedType(arg0 split '.' toList)
}
/** Extract the two type parameters from a TpeRef */
def twoArgs(tpe: Type): List[Type] = {
val pat = s"""${tpe.removeTypeParameters.name}[<\\[](.+?), (.+?)[>\\]]""".r
val pat(arg0, arg1) = tpe.name
NamedType(arg0 split '.' toList) :: NamedType(arg1 split '.' toList) :: Nil
}
/** Codecs that were manually written. */
val myCodecs: PartialFunction[String, Type => List[String]] = {
// TODO: These are handled by BasicJsonProtocol, and sbt-contraband should handle them by default, imo
case "Option" | "Set" | "scala.Vector" => { tpe => getFormats(oneArg(tpe)) }
case "Map" | "Tuple2" | "scala.Tuple2" => { tpe => twoArgs(tpe).flatMap(getFormats) }
case "Int" | "Long" => { _ => Nil }
}
/** Returns the list of formats required to encode the given `TpeRef`. */
val getFormats: Type => List[String] =
CodecCodeGen.extensibleFormatsForType {
case tpe: Type if myCodecs isDefinedAt tpe.removeTypeParameters.name =>
myCodecs(tpe.removeTypeParameters.name)(tpe)
case other => CodecCodeGen.formatsForType(other)
}
}

View File

@ -9,10 +9,10 @@ object Dependencies {
val scala211 = "2.11.8"
// sbt modules
private val ioVersion = "1.0.0-M6"
private val utilVersion = "0.1.0-M14"
private val lmVersion = "0.1.0-X1"
private val zincVersion = "1.0.0-X3"
private val ioVersion = "1.0.0-M9"
private val utilVersion = "1.0.0-M17"
private val lmVersion = "0.1.0-X3"
private val zincVersion = "1.0.0-X7"
private val sbtIO = "org.scala-sbt" %% "io" % ioVersion
@ -85,7 +85,7 @@ object Dependencies {
def addSbtZinc(p: Project): Project = addSbtModule(p, sbtZincPath, "zinc", zinc)
def addSbtZincCompile(p: Project): Project = addSbtModule(p, sbtZincPath, "zincCompile", zincCompile)
val sjsonNewScalaJson = "com.eed3si9n" %% "sjson-new-scalajson" % "0.4.2"
val sjsonNewScalaJson = "com.eed3si9n" %% "sjson-new-scalajson" % "0.7.0"
val scalaCheck = "org.scalacheck" %% "scalacheck" % "1.11.4"
val specs2 = "org.specs2" %% "specs2" % "2.3.11"

1
project/contraband.sbt Normal file
View File

@ -0,0 +1 @@
addSbtPlugin("org.scala-sbt" % "sbt-contraband" % "0.3.0-M3")

View File

@ -1 +0,0 @@
addSbtPlugin("org.scala-sbt" % "sbt-datatype" % "0.2.6")

View File

@ -64,8 +64,7 @@ trait Import {
type BufferedLogger = sbt.internal.util.BufferedLogger
val Cache = sbt.internal.util.Cache
type Cache[I, O] = sbt.internal.util.Cache[I, O]
val CacheIO = sbt.internal.util.CacheIO
type CacheImplicits = sbt.internal.util.CacheImplicits
val CacheImplicits = sbt.internal.util.CacheImplicits
val ChangeReport = sbt.internal.util.ChangeReport
type ChangeReport[T] = sbt.internal.util.ChangeReport[T]
type Changed[O] = sbt.internal.util.Changed[O]
@ -107,7 +106,6 @@ trait Import {
type HCons[H, T <: HList] = sbt.internal.util.HCons[H, T]
val HList = sbt.internal.util.HList
type HList = sbt.internal.util.HList
type HListCacheImplicits = sbt.internal.util.HListCacheImplicits
val HNil = sbt.internal.util.HNil
type HNil = sbt.internal.util.HNil
type HashFileInfo = sbt.internal.util.HashFileInfo
@ -117,8 +115,6 @@ trait Import {
val IMap = sbt.internal.util.IMap
type IMap[K[_], V[_]] = sbt.internal.util.IMap[K, V]
type Init[S] = sbt.internal.util.Init[S]
val InputCache = sbt.internal.util.InputCache
type InputCache[I] = sbt.internal.util.InputCache[I]
type JLine = sbt.internal.util.JLine
val KCons = sbt.internal.util.KCons
type KCons[H, +T <: KList[M], +M[_]] = sbt.internal.util.KCons[H, T, M]
@ -149,7 +145,6 @@ trait Import {
type RangePosition = sbt.internal.util.RangePosition
val Relation = sbt.internal.util.Relation
type Relation[A, B] = sbt.internal.util.Relation[A, B]
type SBinaryFormats = sbt.internal.util.SBinaryFormats
val ScalaKeywords = sbt.internal.util.ScalaKeywords
type Settings[S] = sbt.internal.util.Settings[S]
type SharedAttributeKey[T] = sbt.internal.util.SharedAttributeKey[T]
@ -172,7 +167,6 @@ trait Import {
type TypeFunctions = sbt.internal.util.TypeFunctions
val Types = sbt.internal.util.Types
type Types = sbt.internal.util.Types
type UnionImplicits = sbt.internal.util.UnionImplicits
type UnprintableException = sbt.internal.util.UnprintableException
val Util = sbt.internal.util.Util
val ~> = sbt.internal.util.~>
@ -217,6 +211,8 @@ trait Import {
// sbt.librarymanagement
val Artifact = sbt.librarymanagement.Artifact
type Artifact = sbt.librarymanagement.Artifact
val Binary = sbt.librarymanagement.Binary
type Binary = sbt.librarymanagement.Binary
val Caller = sbt.librarymanagement.Caller
type Caller = sbt.librarymanagement.Caller
val ChainedResolver = sbt.librarymanagement.ChainedResolver
@ -239,6 +235,8 @@ trait Import {
val DefaultMavenRepository = sbt.librarymanagement.DefaultMavenRepository
val Developer = sbt.librarymanagement.Developer
type Developer = sbt.librarymanagement.Developer
val Disabled = sbt.librarymanagement.Disabled
type Disabled = sbt.librarymanagement.Disabled
type DirectCredentials = sbt.librarymanagement.DirectCredentials
val EvictionPair = sbt.librarymanagement.EvictionPair
type EvictionPair = sbt.librarymanagement.EvictionPair
@ -251,12 +249,16 @@ trait Import {
type FileCredentials = sbt.librarymanagement.FileCredentials
val FileRepository = sbt.librarymanagement.FileRepository
type FileRepository = sbt.librarymanagement.FileRepository
val Full = sbt.librarymanagement.Full
type Full = sbt.librarymanagement.Full
val IvyScala = sbt.librarymanagement.IvyScala
type IvyScala = sbt.librarymanagement.IvyScala
val JCenterRepository = sbt.librarymanagement.JCenterRepository
val JavaNet2Repository = sbt.librarymanagement.JavaNet2Repository
val MavenCache = sbt.librarymanagement.MavenCache
type MavenCache = sbt.librarymanagement.MavenCache
val MavenRepo = sbt.librarymanagement.MavenRepo
type MavenRepo = sbt.librarymanagement.MavenRepo
val MavenRepository = sbt.librarymanagement.MavenRepository
type MavenRepository = sbt.librarymanagement.MavenRepository
val ModuleConfiguration = sbt.librarymanagement.ModuleConfiguration
@ -273,7 +275,6 @@ trait Import {
type Patterns = sbt.librarymanagement.Patterns
type PatternsBasedRepository = sbt.librarymanagement.PatternsBasedRepository
type RawRepository = sbt.librarymanagement.RawRepository
val RepositoryHelpers = sbt.librarymanagement.RepositoryHelpers
val Resolver = sbt.librarymanagement.Resolver
type Resolver = sbt.librarymanagement.Resolver
val SbtArtifacts = sbt.librarymanagement.SbtArtifacts
@ -291,6 +292,7 @@ trait Import {
val TrackLevel = sbt.librarymanagement.TrackLevel
val URLRepository = sbt.librarymanagement.URLRepository
type URLRepository = sbt.librarymanagement.URLRepository
val UpdateLogging = sbt.librarymanagement.UpdateLogging
val UpdateOptions = sbt.librarymanagement.UpdateOptions
type UpdateOptions = sbt.librarymanagement.UpdateOptions
val UpdateReport = sbt.librarymanagement.UpdateReport
@ -303,7 +305,7 @@ trait Import {
// sbt.internal.librarymanagement
type IvyPaths = sbt.internal.librarymanagement.IvyPaths
val UpdateLogging = sbt.internal.librarymanagement.UpdateLogging
val IvyPaths = sbt.internal.librarymanagement.IvyPaths
type IncOptions = xsbti.compile.IncOptions
}

View File

@ -1,7 +1,7 @@
package sbt
import sbt.internal.DslEntry
import sbt.internal.util.Eval
import sbt.util.Eval
object syntax extends syntax

View File

@ -1,11 +1,13 @@
-> a
-> b
-> c
-> d
$ touch succeed
> a
> b
> c
> d
> project input
-> j
@ -18,4 +20,4 @@ $ absent h
> set traceLevel in ThisBuild := 100
> set logLevel in ThisBuild := Level.Debug
> h succeed
$ exists h
$ exists h

View File

@ -1,4 +1,4 @@
import sbinary.DefaultProtocol._
import sjsonnew.BasicJsonProtocol._
lazy val a0 = 1
lazy val b0 = 1
@ -29,4 +29,4 @@ checkNext := {
val actualB = b.value
assert(actualA == expectedA, s"Expected 'a' to be $expectedA, got $actualA")
assert(actualB == expectedB, s"Expected 'b' to be $expectedB, got $actualB")
}
}

View File

@ -1,4 +1,4 @@
import sbinary.DefaultProtocol._
import sjsonnew.BasicJsonProtocol.{ project => _, _ }
lazy val x = taskKey[Int]("x")
lazy val y = taskKey[Int]("y")
@ -44,4 +44,4 @@ checkScopes := {
val actualY = (y in subB in Test).value
assert(actualX == expectedX, s"Expected 'x' to be $expectedX, got $actualX")
assert(actualY == expectedY, s"Expected 'y' to be $expectedY, got $actualY")
}
}

View File

@ -2,6 +2,7 @@ import complete.Parser
import complete.DefaultParsers._
import sbinary.DefaultProtocol._
import Def.Initialize
import sjsonnew.BasicJsonProtocol.{ project => _, _ }
val keep = taskKey[Int]("")
val persist = taskKey[Int]("")

View File

@ -1 +1,5 @@
incOptions := xsbti.compile.IncOptionsUtil.defaultIncOptions
lazy val root = (project in file(".")).
settings(
incOptions := xsbti.compile.IncOptionsUtil.defaultIncOptions,
scalaVersion := "2.11.7"
)

View File

@ -1,9 +1,13 @@
> compile
# comment out `initialized` method in A
$ copy-file changes/A1.scala src/main/scala/A.scala
$ sleep 1000
# compilation of A.scala succeeds but B.scala gets invalidated (properly) and B.scala fails to compile
-> compile
# we change A.scala to its original shape so compilation should succeed again
$ copy-file changes/A2.scala src/main/scala/A.scala
$ sleep 1000
# this fails at the moment due to use of stale class file for A, see #958 for details
> compile

View File

@ -6,5 +6,5 @@ TaskKey[Unit]("verify-binary-deps") := {
val base = baseDirectory.value
val nestedPkgClass = classDir / "test/nested.class"
val fooSrc = base / "src/main/scala/test/nested/Foo.scala"
assert(!a.relations.binaryDeps(fooSrc).contains(nestedPkgClass), a.relations.toString)
assert(!a.relations.libraryDeps(fooSrc).contains(nestedPkgClass), a.relations.toString)
}

View File

@ -5,7 +5,7 @@ lazy val check = taskKey[Unit]("")
lazy val root = (project in file(".")).
settings(
ivyPaths := ((baseDirectory, target)( (dir, t) => new IvyPaths(dir, Some(t / "ivy-cache")))).value,
ivyPaths := ((baseDirectory, target)( (dir, t) => IvyPaths(dir, Some(t / "ivy-cache")))).value,
publishTo := Some(Resolver.file("Test Publish Repo", file("test-repo"))),
resolvers += (baseDirectory { base => "Test Repo" at (base / "test-repo").toURI.toString }).value,
moduleName := artifactID,

View File

@ -1,5 +1,5 @@
def localCache =
ivyPaths := new IvyPaths(baseDirectory.value, Some((baseDirectory in ThisBuild).value / "ivy" / "cache"))
ivyPaths := IvyPaths(baseDirectory.value, Some((baseDirectory in ThisBuild).value / "ivy" / "cache"))
val b = project.settings(localCache)

View File

@ -1 +1 @@
ivyPaths := { new IvyPaths(baseDirectory.value, Some(target.value / ".ivy2")) }
ivyPaths := { IvyPaths(baseDirectory.value, Some(target.value / ".ivy2")) }

View File

@ -1,6 +1,6 @@
ivyPaths in ThisBuild := {
val base = (baseDirectory in ThisBuild).value
new IvyPaths(base, Some(base / "ivy-cache"))
IvyPaths(base, Some(base / "ivy-cache"))
}
managedScalaInstance in ThisBuild := false
autoScalaLibrary in ThisBuild := false

View File

@ -1,33 +1,33 @@
import sbt.internal.librarymanagement._
lazy val check = taskKey[Unit]("Runs the check")
def commonSettings: Seq[Def.Setting[_]] =
Seq(
ivyPaths := new IvyPaths( (baseDirectory in ThisBuild).value, Some((baseDirectory in LocalRootProject).value / "ivy-cache")),
scalaVersion := "2.10.4",
resolvers += Resolver.sonatypeRepo("snapshots")
)
// #1620
lazy val root = (project in file(".")).
settings(
dependencyOverrides in ThisBuild += "com.github.nscala-time" %% "nscala-time" % "1.0.0",
libraryDependencies += "com.github.nscala-time" %% "nscala-time" % "1.0.0",
check := {
import Cache._, CacheIvy.updateIC
implicit val updateCache = updateIC
type In = IvyConfiguration :+: ModuleSettings :+: UpdateConfiguration :+: HNil
val s = (streams in update).value
val cacheFile = s.cacheDirectory / updateCacheName.value
val module = ivyModule.value
val config = updateConfiguration.value
val f: In => Unit =
Tracked.inputChanged(cacheFile / "inputs") { (inChanged: Boolean, in: In) =>
if (inChanged) {
sys.error(s"Update cache is invalidated: ${module.owner.configuration}, ${module.moduleSettings}, $config")
}
}
f(module.owner.configuration :+: module.moduleSettings :+: config :+: HNil)
ivyPaths := IvyPaths(
(baseDirectory in ThisBuild).value,
Some((baseDirectory in LocalRootProject).value / "ivy-cache")
)
scalaVersion := "2.10.4"
dependencyOverrides in ThisBuild += "com.github.nscala-time" %% "nscala-time" % "1.0.0"
libraryDependencies += "com.github.nscala-time" %% "nscala-time" % "1.0.0"
TaskKey[Unit]("check") := {
val s = (streams in update).value
val cacheStoreFactory = s.cacheStoreFactory sub updateCacheName.value
val module = ivyModule.value
val config = updateConfiguration.value
import sbt.internal.librarymanagement.IvyConfiguration
import sbt.librarymanagement.{ ModuleSettings, UpdateConfiguration }
type In = IvyConfiguration :+: ModuleSettings :+: UpdateConfiguration :+: HNil
import sbt.internal.util.CacheImplicits._
import sbt.Classpaths.AltLibraryManagementCodec._
val f: In => Unit =
Tracked.inputChanged(cacheStoreFactory derive "inputs") { (inChanged: Boolean, in: In) =>
if (inChanged)
sys.error(s"Update cache is invalidated: ${module.owner.configuration}, ${module.moduleSettings}, $config")
}
)
f(module.owner.configuration :+: module.moduleSettings :+: config :+: HNil)
}

View File

@ -8,7 +8,7 @@ val akkaVersion = "2.3.1"
def commonSettings: Seq[Def.Setting[_]] =
Seq(
ivyPaths := new IvyPaths( (baseDirectory in ThisBuild).value, Some((target in LocalRootProject).value / "ivy-cache")),
ivyPaths := IvyPaths( (baseDirectory in ThisBuild).value, Some((target in LocalRootProject).value / "ivy-cache")),
scalaVersion := "2.10.4",
fullResolvers := fullResolvers.value.filterNot(_.name == "inter-project"),
updateOptions := updateOptions.value.withCachedResolution(true)

View File

@ -8,7 +8,7 @@ val akkaVersion = "2.3.1"
def commonSettings: Seq[Def.Setting[_]] =
Seq(
ivyPaths := new IvyPaths( (baseDirectory in ThisBuild).value, Some((target in LocalRootProject).value / "ivy-cache")),
ivyPaths := IvyPaths( (baseDirectory in ThisBuild).value, Some((target in LocalRootProject).value / "ivy-cache")),
scalaVersion := "2.10.4",
fullResolvers := fullResolvers.value.filterNot(_.name == "inter-project")
)

View File

@ -2,7 +2,7 @@ lazy val check = taskKey[Unit]("Runs the check")
def commonSettings: Seq[Def.Setting[_]] =
Seq(
ivyPaths := new IvyPaths( (baseDirectory in ThisBuild).value, Some((baseDirectory in LocalRootProject).value / "ivy-cache")),
ivyPaths := IvyPaths( (baseDirectory in ThisBuild).value, Some((baseDirectory in LocalRootProject).value / "ivy-cache")),
dependencyCacheDirectory := (baseDirectory in LocalRootProject).value / "dependency",
scalaVersion := "2.10.4",
resolvers += Resolver.sonatypeRepo("snapshots")

View File

@ -1,68 +1,51 @@
// https://github.com/sbt/sbt/issues/1710
// https://github.com/sbt/sbt/issues/1760
lazy val check = taskKey[Unit]("Runs the check")
def commonSettings: Seq[Def.Setting[_]] =
Seq(
organization := "com.example",
version := "0.1.0",
ivyPaths := new IvyPaths( (baseDirectory in ThisBuild).value, Some((baseDirectory in LocalRootProject).value / "ivy-cache")),
dependencyCacheDirectory := (baseDirectory in LocalRootProject).value / "dependency",
scalaVersion := "2.10.4",
fullResolvers := fullResolvers.value.filterNot(_.name == "inter-project")
inThisBuild(Seq(
organization := "com.example",
version := "0.1.0",
scalaVersion := "2.10.4",
updateOptions := updateOptions.value.withCachedResolution(true)
))
def commonSettings: Seq[Def.Setting[_]] = Seq(
ivyPaths := IvyPaths((baseDirectory in ThisBuild).value, Some((baseDirectory in LocalRootProject).value / "ivy-cache")),
dependencyCacheDirectory := (baseDirectory in LocalRootProject).value / "dependency",
fullResolvers := fullResolvers.value.filterNot(_.name == "inter-project")
)
val x1 = project.settings(
commonSettings,
libraryDependencies += "com.example" %% "y1" % "0.1.0" % "compile;runtime->runtime",
libraryDependencies += "com.example" %% "y2" % "0.1.0" % "compile;runtime->runtime"
)
val y1 = project.settings(
commonSettings,
libraryDependencies ++= Seq(
"com.ning" % "async-http-client" % "1.8.14", // this includes slf4j 1.7.5
"com.twitter" % "summingbird-core_2.10" % "0.5.0", // this includes slf4j 1.6.6
"org.slf4j" % "slf4j-api" % "1.6.6" force(),
"commons-logging" % "commons-logging" % "1.1" // this includes servlet-api 2.3
)
)
lazy val X1 = project.
settings(
commonSettings,
libraryDependencies ++= Seq(
"com.example" %% "y1" % "0.1.0" % "compile->compile;runtime->runtime",
"com.example" %% "y2" % "0.1.0" % "compile->compile;runtime->runtime")
val y2 = project.settings(
commonSettings,
libraryDependencies ++= Seq(
"com.twitter" % "summingbird-core_2.10" % "0.5.0", // this includes slf4j 1.6.6
"com.ning" % "async-http-client" % "1.8.14", // this includes slf4j 1.7.5
"commons-logging" % "commons-logging" % "1.1.3"
)
)
lazy val Y1 = project.
settings(
commonSettings,
name := "y1",
libraryDependencies ++= Seq(
// this includes slf4j 1.7.5
"com.ning" % "async-http-client" % "1.8.14",
// this includes slf4j 1.6.6
"com.twitter" % "summingbird-core_2.10" % "0.5.0",
"org.slf4j" % "slf4j-api" % "1.6.6" force(),
// this includes servlet-api 2.3
"commons-logging" % "commons-logging" % "1.1"
)
)
TaskKey[Unit]("check") := {
val x1cp = (externalDependencyClasspath in Compile in x1).value.map(_.data.getName).sorted
def x1cpStr = x1cp.mkString("\n* ", "\n* ", "")
lazy val Y2 = project.
settings(
commonSettings,
name := "y2",
libraryDependencies ++= Seq(
// this includes slf4j 1.6.6
"com.twitter" % "summingbird-core_2.10" % "0.5.0",
// this includes slf4j 1.7.5
"com.ning" % "async-http-client" % "1.8.14",
"commons-logging" % "commons-logging" % "1.1.3"
)
)
if (!(x1cp contains "slf4j-api-1.6.6.jar"))
sys.error(s"slf4j-api-1.6.6.jar is not found on X1:$x1cpStr")
lazy val root = (project in file(".")).
settings(inThisBuild(Seq(
organization := "org.example",
version := "1.0",
updateOptions := updateOptions.value.withCachedResolution(true),
check := {
val x1cp = (externalDependencyClasspath in Compile in X1).value.map {_.data.getName}.sorted
// sys.error("slf4j-api is not found on X1" + x1cp)
if (!(x1cp contains "slf4j-api-1.6.6.jar")) {
sys.error("slf4j-api-1.6.6.jar is not found on X1" + x1cp)
}
//sys.error(x1cp.toString)
if (x1cp contains "servlet-api-2.3.jar") {
sys.error("servlet-api-2.3.jar is found when it should be evicted:" + x1cp)
}
}
)))
if (x1cp contains "servlet-api-2.3.jar")
sys.error(s"servlet-api-2.3.jar is found when it should be evicted:$x1cpStr")
}

View File

@ -0,0 +1,11 @@
# Quoting @eed3si9n in https://github.com/dwijnand/sbt-lm/pull/1 :
#
# > After several experiments, I'm actually convinced that force() is unrelated to the scripted scenario,
# > and it's # currently passing by virtue of the questionable caching behavior:
# > https://github.com/sbt/sbt/blob/c223dccb542beaf763a3a2909cda74bdad39beca/ivy/src/main/scala/sbt/ivyint/CachedResolutionResolveEngine.scala#L715
# > I think we can mark the failing test as pending for now.
> y1/publishLocal
> y2/publishLocal
> debug
> check

View File

@ -1,7 +0,0 @@
> debug
> Y1/publishLocal
> Y2/publishLocal
> check

View File

@ -3,7 +3,7 @@ lazy val check = taskKey[Unit]("Runs the check")
def commonSettings: Seq[Def.Setting[_]] =
Seq(
ivyPaths := new IvyPaths( (baseDirectory in ThisBuild).value, Some((baseDirectory in LocalRootProject).value / "ivy-cache")),
ivyPaths := IvyPaths( (baseDirectory in ThisBuild).value, Some((baseDirectory in LocalRootProject).value / "ivy-cache")),
dependencyCacheDirectory := (baseDirectory in LocalRootProject).value / "dependency",
scalaVersion := "2.10.4",
resolvers += Resolver.sonatypeRepo("snapshots")

View File

@ -2,7 +2,7 @@ lazy val check = taskKey[Unit]("Runs the check")
def commonSettings: Seq[Def.Setting[_]] =
Seq(
ivyPaths := new IvyPaths( (baseDirectory in ThisBuild).value, Some((baseDirectory in LocalRootProject).value / "ivy-cache")),
ivyPaths := IvyPaths( (baseDirectory in ThisBuild).value, Some((baseDirectory in LocalRootProject).value / "ivy-cache")),
dependencyCacheDirectory := (baseDirectory in LocalRootProject).value / "dependency",
scalaVersion := "2.10.4",
resolvers += Resolver.sonatypeRepo("snapshots")

View File

@ -3,7 +3,7 @@ lazy val check = taskKey[Unit]("Runs the check")
def commonSettings: Seq[Def.Setting[_]] =
Seq(
ivyPaths := new IvyPaths( (baseDirectory in ThisBuild).value, Some((baseDirectory in LocalRootProject).value / "ivy-cache")),
ivyPaths := IvyPaths( (baseDirectory in ThisBuild).value, Some((baseDirectory in LocalRootProject).value / "ivy-cache")),
dependencyCacheDirectory := (baseDirectory in LocalRootProject).value / "dependency",
scalaVersion := "2.11.4",
resolvers += Resolver.sonatypeRepo("snapshots")

View File

@ -2,7 +2,7 @@ lazy val check = taskKey[Unit]("Runs the check")
def commonSettings: Seq[Def.Setting[_]] =
Seq(
ivyPaths := new IvyPaths( (baseDirectory in ThisBuild).value, Some((baseDirectory in LocalRootProject).value / "ivy-cache")),
ivyPaths := IvyPaths( (baseDirectory in ThisBuild).value, Some((baseDirectory in LocalRootProject).value / "ivy-cache")),
dependencyCacheDirectory := (baseDirectory in LocalRootProject).value / "dependency",
libraryDependencies := Seq(
"net.databinder" %% "unfiltered-uploads" % "0.8.0",

View File

@ -2,7 +2,7 @@ lazy val check = taskKey[Unit]("Runs the check")
def commonSettings: Seq[Def.Setting[_]] =
Seq(
ivyPaths := new IvyPaths( (baseDirectory in ThisBuild).value, Some((target in LocalRootProject).value / "ivy-cache")),
ivyPaths := IvyPaths( (baseDirectory in ThisBuild).value, Some((target in LocalRootProject).value / "ivy-cache")),
scalaVersion in ThisBuild := "2.11.7",
organization in ThisBuild := "com.example",
version in ThisBuild := "0.1.0-SNAPSHOT",

View File

@ -2,7 +2,7 @@ lazy val check = taskKey[Unit]("Runs the check")
def commonSettings: Seq[Def.Setting[_]] =
Seq(
ivyPaths := new IvyPaths( (baseDirectory in ThisBuild).value, Some((target in LocalRootProject).value / "ivy-cache")),
ivyPaths := IvyPaths( (baseDirectory in ThisBuild).value, Some((target in LocalRootProject).value / "ivy-cache")),
scalaVersion := "2.10.4",
fullResolvers := fullResolvers.value.filterNot(_.name == "inter-project"),
updateOptions := updateOptions.value.withCircularDependencyLevel(CircularDependencyLevel.Error)

View File

@ -2,7 +2,7 @@ lazy val check = taskKey[Unit]("Runs the check")
def commonSettings: Seq[Def.Setting[_]] =
Seq(
ivyPaths := new IvyPaths( (baseDirectory in ThisBuild).value, Some((target in LocalRootProject).value / "ivy-cache")),
ivyPaths := IvyPaths( (baseDirectory in ThisBuild).value, Some((target in LocalRootProject).value / "ivy-cache")),
scalaVersion := "2.10.4",
fullResolvers := fullResolvers.value.filterNot(_.name == "inter-project")
)

View File

@ -1,3 +1,3 @@
ivyPaths := (baseDirectory, target)( (dir, t) => new IvyPaths(dir, Some(t / "ivy-cache"))).value
ivyPaths := (baseDirectory, target)( (dir, t) => IvyPaths(dir, Some(t / "ivy-cache"))).value
libraryDependencies += "org.testng" % "testng" % "5.7" classifier "jdk15"

View File

@ -1,5 +1,5 @@
scalaVersion := "2.10.4"
conflictManager := ConflictManager.strict.copy(organization = "^(?!org\\.scala-lang).*$")
conflictManager := ConflictManager.strict.withOrganization("^(?!org\\.scala-lang).*$")
libraryDependencies += "org.specs2" %% "specs2" % "2.3.10-scalaz-7.1.0-M6" % "test"

View File

@ -1,7 +1,7 @@
resolvers += Resolver.file("buggy", file("repo"))(
Patterns(
ivyPatterns = Seq("[organization]/[module]/[revision]/ivy.xml"),
artifactPatterns = Seq("[organization]/[module]/[revision]/[artifact].[ext]"),
ivyPatterns = Vector("[organization]/[module]/[revision]/ivy.xml"),
artifactPatterns = Vector("[organization]/[module]/[revision]/[artifact].[ext]"),
isMavenCompatible = false,
descriptorOptional = true,
skipConsistencyCheck = true

View File

@ -17,5 +17,5 @@ version in ThisBuild := "1.0"
lazy val common = Seq(
autoScalaLibrary := false, // avoid downloading fresh scala-library/scala-compiler
managedScalaInstance := false,
ivyPaths := new IvyPaths( (baseDirectory in ThisBuild).value, Some((target in LocalRootProject).value / "ivy-cache"))
ivyPaths := IvyPaths( (baseDirectory in ThisBuild).value, Some((target in LocalRootProject).value / "ivy-cache"))
)

View File

@ -6,7 +6,7 @@ lazy val root = (project in file(".")).
settings(
libraryDependencies ++= baseDirectory(dependencies).value,
scalaVersion := "2.9.2",
ivyScala := { ivyScala.value map {_.copy(overrideScalaVersion = sbtPlugin.value)} },
ivyScala := ivyScala.value map (_.withOverrideScalaVersion(sbtPlugin.value)),
autoScalaLibrary := baseDirectory(base => !(base / "noscala").exists ).value,
scalaOverride := check("scala.App").value
)

View File

@ -1,6 +1,6 @@
lazy val root = (project in file(".")).
settings(
ivyPaths := (baseDirectory, target)( (dir, t) => new IvyPaths(dir, Some(t / "ivy-cache"))).value,
ivyPaths := (baseDirectory, target)( (dir, t) => IvyPaths(dir, Some(t / "ivy-cache"))).value,
libraryDependencies += baseDirectory(transitive("javax.mail" % "mail" % "1.4.1")).value,
TaskKey[Unit]("checkTransitive") := check(true).value,
TaskKey[Unit]("checkIntransitive") := check(false).value

View File

@ -1,4 +1,4 @@
ivyPaths := (baseDirectory, target)( (dir, t) => new IvyPaths(dir, Some(t / "ivy-cache"))).value
ivyPaths := (baseDirectory, target)( (dir, t) => IvyPaths(dir, Some(t / "ivy-cache"))).value
publishMavenStyle := false
@ -12,4 +12,4 @@ organization := "org.scala-sbt"
version := "1.0"
name := "define-color"
name := "define-color"

View File

@ -1,4 +1,4 @@
ivyPaths := (baseDirectory, target)( (dir, t) => new IvyPaths(dir, Some(t / "ivy-cache")) ).value
ivyPaths := (baseDirectory, target)( (dir, t) => IvyPaths(dir, Some(t / "ivy-cache")) ).value
publishMavenStyle := false
@ -14,4 +14,4 @@ libraryDependencies := (baseDirectory { base =>
organization := "org.example"
name := "use-color"
name := "use-color"

View File

@ -1,6 +1,6 @@
lazy val root = (project in file(".")).
settings(
ivyPaths := (baseDirectory, target)( (dir, t) => new IvyPaths(dir, Some(t / "ivy-cache"))).value,
ivyPaths := (baseDirectory, target)( (dir, t) => IvyPaths(dir, Some(t / "ivy-cache"))).value,
libraryDependencies ++= baseDirectory (libraryDeps).value,
TaskKey[Unit]("checkForced") := check("1.2.14").value,
TaskKey[Unit]("checkDepend") := check("1.2.13").value

View File

@ -2,7 +2,7 @@ import scala.xml._
lazy val root = (project in file(".")).
settings(
ivyPaths := (baseDirectory, target)( (dir, t) => new IvyPaths(dir, Some(t / "ivy-cache"))).value,
ivyPaths := (baseDirectory, target)( (dir, t) => IvyPaths(dir, Some(t / "ivy-cache"))).value,
ivyXML := ((customInfo, organization, moduleName, version) apply inlineXML).value,
scalaVersion := "2.9.1",
projectID ~= (_ cross false),

View File

@ -2,7 +2,7 @@ import sbt.internal.librarymanagement.syntax._
libraryDependencies += "org.scalacheck" % "scalacheck" % "1.5"
ivyPaths := baseDirectory( dir => new IvyPaths(dir, Some(dir / "ivy-home"))).value
ivyPaths := baseDirectory( dir => IvyPaths(dir, Some(dir / "ivy-home"))).value
TaskKey[Unit]("check") := {
val report = update.value

View File

@ -69,4 +69,4 @@ def normalize(url: String): String =
val base = uri( url ).normalize.toString
if(base.endsWith("/")) base else (base + "/")
}
def normalize(repo: MavenRepository): MavenRepository = new MavenRepository(repo.name, normalize(repo.root))
def normalize(repo: MavenRepository): MavenRepository = MavenRepository(repo.name, normalize(repo.root))

View File

@ -1,6 +1,6 @@
ivyPaths := (baseDirectory, target)( (dir, t) => new IvyPaths(dir, Some(t / ".ivy2"))).value
ivyPaths := (baseDirectory, target)( (dir, t) => IvyPaths(dir, Some(t / ".ivy2"))).value
// not in the default repositories
libraryDependencies += "com.sun.jmx" % "jmxri" % "1.2.1"
autoScalaLibrary := false
autoScalaLibrary := false

View File

@ -1,6 +1,6 @@
ivyScala ~= { (is: Option[IvyScala]) => is.map(_.copy(checkExplicit = false, overrideScalaVersion = false, filterImplicit = false)) }
ivyPaths := baseDirectory( dir => new IvyPaths(dir, Some(dir / "ivy-home"))).value
ivyPaths := baseDirectory( dir => IvyPaths(dir, Some(dir / "ivy-home"))).value
libraryDependencies += "junit" % "junit" % "4.8"

View File

@ -1,6 +1,6 @@
def commonSettings: Seq[Def.Setting[_]] =
Seq(
ivyPaths := new IvyPaths( (baseDirectory in ThisBuild).value, Some((baseDirectory in LocalRootProject).value / "ivy-cache")),
ivyPaths := IvyPaths( (baseDirectory in ThisBuild).value, Some((baseDirectory in LocalRootProject).value / "ivy-cache")),
dependencyCacheDirectory := (baseDirectory in LocalRootProject).value / "dependency",
scalaVersion := "2.10.4",
organization in ThisBuild := "org.example",

View File

@ -1,4 +1,4 @@
ivyPaths := (baseDirectory, target)( (dir, t) => new IvyPaths(dir, Some(t / "ivy-cache"))).value
ivyPaths := (baseDirectory, target)( (dir, t) => IvyPaths(dir, Some(t / "ivy-cache"))).value
organization := "org.example"
@ -9,4 +9,4 @@ autoScalaLibrary := false
addArtifact(
name { n => Artifact(n, "txt", "txt") },
baseDirectory map { _ / "topublish.txt" }
)
)

View File

@ -1,9 +1,9 @@
autoScalaLibrary := false
ivyPaths := (baseDirectory, target)( (dir, t) => new IvyPaths(dir, Some(t / "ivy-cache"))).value
ivyPaths := (baseDirectory, target)( (dir, t) => IvyPaths(dir, Some(t / "ivy-cache"))).value
ivyScala := ((scalaVersion in update, scalaBinaryVersion in update) { (fv, bv) =>
Some(new IvyScala(fv, bv, Nil, filterImplicit = false, checkExplicit = false, overrideScalaVersion = false))
Some(IvyScala(fv, bv, Vector.empty, filterImplicit = false, checkExplicit = false, overrideScalaVersion = false))
}).value
InputKey[Unit]("check") := (inputTask { args =>
@ -13,4 +13,4 @@ InputKey[Unit]("check") := (inputTask { args =>
}
}).evaluated
scalaVersion := "2.9.1"
scalaVersion := "2.9.1"

View File

@ -7,7 +7,7 @@ lazy val root = (project in file(".")).
makePomConfiguration := ((makePomConfiguration, baseDirectory) { (conf, base) =>
conf.copy(filterRepositories = pomIncludeRepository(base, conf.filterRepositories) )
}).value,
ivyPaths := baseDirectory( dir => new IvyPaths(dir, Some(dir / "ivy-home"))).value
ivyPaths := baseDirectory( dir => IvyPaths(dir, Some(dir / "ivy-home"))).value
)
val local = "local-maven-repo" at "file://" + (Path.userHome / ".m2" /"repository").absolutePath

View File

@ -32,4 +32,4 @@ def checkPackaging(pom: File, expected: String) =
val packaging = (xml.XML.loadFile(pom) \\ "packaging").text
if(packaging != expected) sys.error("Incorrect packaging for '" + pom + "'. Expected '" + expected + "', but got '" + packaging + "'")
}
def warArtifact = artifact in (Compile, packageBin) ~= { _.copy(`type` = "war", extension = "war") }
def warArtifact = artifact in (Compile, packageBin) ~= (_ withType "war" withExtension "war")

View File

@ -3,7 +3,7 @@ val checkIvyXml = taskKey[Unit]("Checks the ivy.xml transform was correct")
lazy val root = (project in file(".")).
settings(
name := "test-parent-pom",
ivyPaths := new IvyPaths( (baseDirectory in ThisBuild).value, Some((target in LocalRootProject).value / "ivy-cache")),
ivyPaths := IvyPaths( (baseDirectory in ThisBuild).value, Some((target in LocalRootProject).value / "ivy-cache")),
resolvers += MavenCache("Maven2 Local Test", baseDirectory.value / "local-repo"),
libraryDependencies += "com.example" % "example-child" % "1.0-SNAPSHOT",
libraryDependencies += "org.apache.geronimo.specs" % "geronimo-jta_1.1_spec" % "1.1.1",

View File

@ -1,5 +1,5 @@
def configIvyScala =
ivyScala ~= { _.map(_.copy(checkExplicit = false)) }
ivyScala ~= (_ map (_ withCheckExplicit false))
val declared = SettingKey[Boolean]("declared")
lazy val a = project.

View File

@ -4,7 +4,7 @@ lazy val root = (project in file(".")).
settings(inThisBuild(List(
organization := "A",
version := "1.0",
ivyPaths := baseDirectory( dir => new IvyPaths(dir, Some(dir / "ivy" / "cache")) ).value,
ivyPaths := baseDirectory( dir => IvyPaths(dir, Some(dir / "ivy" / "cache")) ).value,
externalResolvers := (baseDirectory map { base => Resolver.file("local", base / "ivy" / "local" asFile)(Resolver.ivyStylePatterns) :: Nil }).value
)),
mavenStyle,

View File

@ -2,7 +2,7 @@ lazy val root = (project in file(".")).
settings(inThisBuild(List(
organization := "A",
version := "1.0",
ivyPaths := baseDirectory( dir => new IvyPaths(dir, Some(dir / "ivy" / "cache")) ).value,
ivyPaths := baseDirectory( dir => IvyPaths(dir, Some(dir / "ivy" / "cache")) ).value,
externalResolvers := (baseDirectory map { base => Resolver.file("local", base / "ivy" / "local" asFile)(Resolver.ivyStylePatterns) :: Nil }).value
)),
mavenStyle,

View File

@ -6,8 +6,8 @@ scalaVersion := "2.11.8"
resolvers += Resolver.file("buggy", (baseDirectory in LocalRootProject).value / "repo")(
Patterns(
ivyPatterns = Seq("[organization]/[module]/[revision]/ivy.xml"),
artifactPatterns = Seq("[organization]/[module]/[revision]/dummy.jar"),
ivyPatterns = Vector("[organization]/[module]/[revision]/ivy.xml"),
artifactPatterns = Vector("[organization]/[module]/[revision]/dummy.jar"),
isMavenCompatible = false,
descriptorOptional = true,
skipConsistencyCheck = true
@ -48,7 +48,7 @@ checkDependencies := {
c <- update.value.configurations
m <- c.modules
if !m.evicted
} yield m.module.copy(extraAttributes = Map.empty)).toSet
} yield m.module.withExtraAttributes(Map.empty)).toSet
assert(resolved == expected)
}

View File

@ -1,11 +1,13 @@
def customIvyPaths: Seq[Def.Setting[_]] = Seq(
ivyPaths := new IvyPaths((baseDirectory in ThisBuild).value, Some((baseDirectory in ThisBuild).value / "ivy-cache"))
ivyPaths := IvyPaths((baseDirectory in ThisBuild).value, Some((baseDirectory in ThisBuild).value / "ivy-cache"))
)
lazy val sharedResolver: Resolver =
Resolver.defaultShared.nonlocal()
lazy val sharedResolver: Resolver = {
val r = Resolver.defaultShared
r withConfiguration (r.configuration withIsLocal false)
//MavenRepository("example-shared-repo", "file:///tmp/shared-maven-repo-bad-example")
//Resolver.file("example-shared-repo", repoDir)(Resolver.defaultPatterns)
}
lazy val common = project.
settings(customIvyPaths: _*).
@ -14,12 +16,11 @@ lazy val common = project.
name := "badexample",
version := "1.0-SNAPSHOT",
publishTo := Some(sharedResolver),
crossVersion := CrossVersion.Disabled,
crossVersion := Disabled(),
publishMavenStyle := (sharedResolver match {
case repo: PatternsBasedRepository => repo.patterns.isMavenCompatible
case _: RawRepository => false // TODO - look deeper
case _: MavenRepository => true
case _: MavenCache => true
case _ => false // TODO - Handle chain repository?
})
// updateOptions := updateOptions.value.withLatestSnapshots(true)

View File

@ -16,7 +16,9 @@ $ sleep 1000
> dependent/update
> dependent/compile
# Now let's try this on the opposite order: pubishLocal => publish
# Now let's try this on the opposite order: publishLocal => publish
$ copy-file changes/BadCommon.scala common/src/main/scala/Common.scala
> common/publishLocal

View File

@ -1,6 +1,6 @@
autoScalaLibrary := false
ivyPaths := (baseDirectory, target)( (dir, t) => new IvyPaths(dir, Some(t / "ivy-cache"))).value
ivyPaths := (baseDirectory, target)( (dir, t) => IvyPaths(dir, Some(t / "ivy-cache"))).value
libraryDependencies ++= Seq(
"org.sat4j" % "org.sat4j.pb" % "2.3.1",

View File

@ -1,6 +1,6 @@
ivyPaths := {
val base = baseDirectory.value
new IvyPaths(base, Some(base / "ivy-cache"))
IvyPaths(base, Some(base / "ivy-cache"))
}
managedScalaInstance := false

View File

@ -2,7 +2,7 @@ import sbt.internal.inc.classpath.ClasspathUtilities
lazy val root = (project in file(".")).
settings(
ivyPaths := (baseDirectory, target)( (dir, t) => new IvyPaths(dir, Some(t / "ivy-cache"))).value,
ivyPaths := (baseDirectory, target)( (dir, t) => IvyPaths(dir, Some(t / "ivy-cache"))).value,
libraryDependencies += "org.jsoup" % "jsoup" % "1.9.1" % Test from "http://jsoup.org/packages/jsoup-1.9.1.jar",
ivyLoggingLevel := UpdateLogging.Full,
TaskKey[Unit]("checkInTest") := checkClasspath(Test).value,

View File

@ -5,7 +5,7 @@ val commonSettings = Seq[Def.Setting[_]](
organization := "org.example",
version := "1.0-SNAPSHOT",
scalaVersion := "2.11.7",
ivyPaths := new IvyPaths((baseDirectory in ThisBuild).value, Some(ivyHome.value)),
ivyPaths := IvyPaths((baseDirectory in ThisBuild).value, Some(ivyHome.value)),
fullResolvers := fullResolvers.value.filterNot(_ == projectResolver.value)
)

View File

@ -1,7 +1,7 @@
val commonSettings = Seq(
organization := "com.example",
version := "0.1.0",
ivyPaths := new IvyPaths((baseDirectory in LocalRootProject).value, Some((target in LocalRootProject).value / "ivy-cache"))
ivyPaths := IvyPaths((baseDirectory in LocalRootProject).value, Some((target in LocalRootProject).value / "ivy-cache"))
)
lazy val app = (project in file("app")).

View File

@ -1,3 +1,3 @@
organization := "com.example"
version := "0.1.0"
ivyPaths := new IvyPaths((baseDirectory in LocalRootProject).value, Some((target in LocalRootProject).value / "ivy-cache"))
ivyPaths := IvyPaths((baseDirectory in LocalRootProject).value, Some((target in LocalRootProject).value / "ivy-cache"))

View File

@ -6,6 +6,8 @@ resolvers += (baseDirectory in ThisBuild)(x =>
"test" at (x / "repo/").asURL.toString
).value
resolvers += Resolver.mavenLocal
name := "demo1"
organization := "org.example"

View File

@ -6,6 +6,8 @@ resolvers += (baseDirectory in ThisBuild)(x =>
"test" at (x / "repo").asURL.toString
).value
resolvers += Resolver.mavenLocal
name := "demo2"
organization := "org.example"

View File

@ -1,3 +1,4 @@
lazy val root = (project in file("."))
lazy val a = proj(project in file("a"))
@ -6,10 +7,11 @@ lazy val c = proj(project in file("c"))
def proj(p: Project): Project =
p.settings(
ivyPaths := (baseDirectory in root, target in root)( (dir, t) => new IvyPaths(dir, Some(t / "ivy-cache"))).value,
ivyPaths := (baseDirectory in root, target in root)( (dir, t) => IvyPaths(dir, Some(t / "ivy-cache"))).value,
resolvers += (appConfiguration { app => // need this to resolve sbt
val ivyHome = Classpaths.bootIvyHome(app) getOrElse sys.error("Launcher did not provide the Ivy home directory.")
Resolver.file("real-local", ivyHome / "local")(Resolver.ivyStylePatterns)
}).value,
resolvers += Resolver.typesafeIvyRepo("releases") // not sure why this isn't included by default
resolvers += Resolver.typesafeIvyRepo("releases"), // not sure why this isn't included by default
resolvers += Resolver.mavenLocal
)

View File

@ -6,6 +6,8 @@ resolvers += (baseDirectory in ThisBuild)(x =>
"test" at (x / "repo").asURL.toString
).value
resolvers += Resolver.mavenLocal
name := "demo3"
organization := "org.example"

View File

@ -29,4 +29,4 @@ libraryDependencies += "com.typesafe.akka" %% "akka-actor" % "2.3.3" % "test"
scalaVersion := "2.11.0"
ivyScala := ivyScala.value map {_.copy(overrideScalaVersion = sbtPlugin.value)}
ivyScala := ivyScala.value map (_.withOverrideScalaVersion(sbtPlugin.value))

View File

@ -25,6 +25,9 @@ import sbt.io.syntax._
import sbt.util.Logger
import sjsonnew.{ IsoString, SupportConverter }
import sbt.internal.util.{ CacheStoreFactory, DirectoryStoreFactory, Input, Output, PlainInput, PlainOutput }
// no longer specific to Tasks, so 'TaskStreams' should be renamed
/**
* Represents a set of streams associated with a context.
@ -38,6 +41,9 @@ sealed trait TaskStreams[Key] {
def outID = "out"
def errorID = "err"
def getInput(key: Key, sid: String = default): Input
def getOutput(sid: String = default): Output
/**
* Provides a reader to read text from the stream `sid` for `key`.
* It is the caller's responsibility to coordinate writing to the stream.
@ -66,6 +72,8 @@ sealed trait TaskStreams[Key] {
/** A cache directory that is unique to the context of this streams instance.*/
def cacheDirectory: File
def cacheStoreFactory: CacheStoreFactory
// default logger
/** Obtains the default logger. */
final lazy val log: Logger = log(default)
@ -112,12 +120,18 @@ object Streams {
synchronized { streams.values.foreach(_.close()); streams.clear() }
}
def apply[Key](taskDirectory: Key => File, name: Key => String, mkLogger: (Key, PrintWriter) => Logger): Streams[Key] = new Streams[Key] {
def apply[Key, J: IsoString](taskDirectory: Key => File, name: Key => String, mkLogger: (Key, PrintWriter) => Logger, converter: SupportConverter[J]): Streams[Key] = new Streams[Key] {
def apply(a: Key): ManagedStreams[Key] = new ManagedStreams[Key] {
private[this] var opened: List[Closeable] = Nil
private[this] var closed = false
def getInput(a: Key, sid: String = default): Input =
make(a, sid)(f => new PlainInput(new FileInputStream(f), converter))
def getOutput(sid: String = default): Output =
make(a, sid)(f => new PlainOutput(new FileOutputStream(f), converter))
def readText(a: Key, sid: String = default): BufferedReader =
make(a, sid)(f => new BufferedReader(new InputStreamReader(new FileInputStream(f), IO.defaultCharset)))
@ -136,6 +150,9 @@ object Streams {
dir
}
lazy val cacheStoreFactory: CacheStoreFactory =
new DirectoryStoreFactory(cacheDirectory, converter)
def log(sid: String): Logger = mkLogger(a, text(sid))
def make[T <: Closeable](a: Key, sid: String)(f: File => T): T = synchronized {