From ae15eccd9c7aea2b4336ea454d974aed66d5ec16 Mon Sep 17 00:00:00 2001 From: Grzegorz Kossakowski Date: Wed, 4 Dec 2013 13:22:08 +0100 Subject: [PATCH 001/148] Introduce a companion `Incremental` class. Move most of the functionality from `Incremental` object to its companion class. This commit is a preparation for making it possible to have two different implementation of logic in `Incremental` object. --- .../src/main/scala/sbt/inc/Incremental.scala | 41 +++++++++++-------- 1 file changed, 24 insertions(+), 17 deletions(-) diff --git a/compile/inc/src/main/scala/sbt/inc/Incremental.scala b/compile/inc/src/main/scala/sbt/inc/Incremental.scala index 74ad517ac..518e16538 100644 --- a/compile/inc/src/main/scala/sbt/inc/Incremental.scala +++ b/compile/inc/src/main/scala/sbt/inc/Incremental.scala @@ -21,20 +21,33 @@ object Incremental log: Logger, options: IncOptions)(implicit equivS: Equiv[Stamp]): (Boolean, Analysis) = { - val initialChanges = changedInitial(entry, sources, previous, current, forEntry, options, log) + val incremental = new Incremental + val initialChanges = incremental.changedInitial(entry, sources, previous, current, forEntry, options, log) val binaryChanges = new DependencyChanges { val modifiedBinaries = initialChanges.binaryDeps.toArray val modifiedClasses = initialChanges.external.allModified.toArray def isEmpty = modifiedBinaries.isEmpty && modifiedClasses.isEmpty } - val initialInv = invalidateInitial(previous.relations, initialChanges, log) + val initialInv = incremental.invalidateInitial(previous.relations, initialChanges, log) log.debug("All initially invalidated sources: " + initialInv + "\n") val analysis = manageClassfiles(options) { classfileManager => - cycle(initialInv, sources, binaryChanges, previous, doCompile, classfileManager, 1, log, options) + incremental.cycle(initialInv, sources, binaryChanges, previous, doCompile, classfileManager, 1, log, options) } (!initialInv.isEmpty, analysis) } + private[inc] val apiDebugProp = "xsbt.api.debug" + private[inc] def apiDebug(options: IncOptions): Boolean = options.apiDebug || java.lang.Boolean.getBoolean(apiDebugProp) + + private[sbt] def prune(invalidatedSrcs: Set[File], previous: Analysis): Analysis = + prune(invalidatedSrcs, previous, ClassfileManager.deleteImmediately()) + + private[sbt] def prune(invalidatedSrcs: Set[File], previous: Analysis, classfileManager: ClassfileManager): Analysis = + { + classfileManager.delete( invalidatedSrcs.flatMap(previous.relations.products) ) + previous -- invalidatedSrcs + } + private[this] def manageClassfiles[T](options: IncOptions)(run: ClassfileManager => T): T = { val classfileManager = options.newClassfileManager() @@ -46,10 +59,13 @@ object Incremental result } +} + + +private class Incremental { + val incDebugProp = "xsbt.inc.debug" private def incDebug(options: IncOptions): Boolean = options.relationsDebug || java.lang.Boolean.getBoolean(incDebugProp) - val apiDebugProp = "xsbt.api.debug" - def apiDebug(options: IncOptions): Boolean = options.apiDebug || java.lang.Boolean.getBoolean(apiDebugProp) // setting the related system property to true will skip checking that the class name // still comes from the same classpath entry. This can workaround bugs in classpath construction, @@ -58,7 +74,7 @@ object Incremental // TODO: the Analysis for the last successful compilation should get returned + Boolean indicating success // TODO: full external name changes, scopeInvalidations - @tailrec def cycle(invalidatedRaw: Set[File], allSources: Set[File], binaryChanges: DependencyChanges, previous: Analysis, + @tailrec final def cycle(invalidatedRaw: Set[File], allSources: Set[File], binaryChanges: DependencyChanges, previous: Analysis, doCompile: (Set[File], DependencyChanges) => Analysis, classfileManager: ClassfileManager, cycleNum: Int, log: Logger, options: IncOptions): Analysis = if(invalidatedRaw.isEmpty) previous @@ -67,7 +83,7 @@ object Incremental def debug(s: => String) = if (incDebug(options)) log.debug(s) else () val withPackageObjects = invalidatedRaw ++ invalidatedPackageObjects(invalidatedRaw, previous.relations) val invalidated = expand(withPackageObjects, allSources, log, options) - val pruned = prune(invalidated, previous, classfileManager) + val pruned = Incremental.prune(invalidated, previous, classfileManager) debug("********* Pruned: \n" + pruned.relations + "\n*********") val fresh = doCompile(invalidated, binaryChanges) @@ -144,7 +160,7 @@ object Incremental val newApis = lastSources.toSeq map newAPI val apiChanges = (lastSources, oldApis, newApis).zipped.flatMap { (src, oldApi, newApi) => sameSource(src, oldApi, newApi, log, options) } - if (apiDebug(options) && apiChanges.nonEmpty) { + if (Incremental.apiDebug(options) && apiChanges.nonEmpty) { logApiChanges(apiChanges, oldAPI, newAPI, log, options) } @@ -321,15 +337,6 @@ object Incremental newInv ++ initialDependsOnNew } - def prune(invalidatedSrcs: Set[File], previous: Analysis): Analysis = - prune(invalidatedSrcs, previous, ClassfileManager.deleteImmediately()) - - def prune(invalidatedSrcs: Set[File], previous: Analysis, classfileManager: ClassfileManager): Analysis = - { - classfileManager.delete( invalidatedSrcs.flatMap(previous.relations.products) ) - previous -- invalidatedSrcs - } - def externalBinaryModified(entry: String => Option[File], analysis: File => Option[Analysis], previous: Stamps, current: ReadStamps, log: Logger)(implicit equivS: Equiv[Stamp]): File => Boolean = dependsOn => { From 1de2900a67847a6718642062acf522b990af6aa2 Mon Sep 17 00:00:00 2001 From: Grzegorz Kossakowski Date: Wed, 4 Dec 2013 17:45:45 +0100 Subject: [PATCH 002/148] Add Logger and IncOptions as Incremental class constructor args Both Logger and IncOptions instances were passed around Incremental class implementation unmodified. Given the fact that entire implementation of the class uses exactly the same values for those types it makes sense to extract them as constructor arguments so they are accessible everywhere. This helps reducing signatures of other methods to more essential parameters that are more specific to given method. --- .../src/main/scala/sbt/inc/Incremental.scala | 80 +++++++++---------- 1 file changed, 40 insertions(+), 40 deletions(-) diff --git a/compile/inc/src/main/scala/sbt/inc/Incremental.scala b/compile/inc/src/main/scala/sbt/inc/Incremental.scala index 518e16538..a55021c87 100644 --- a/compile/inc/src/main/scala/sbt/inc/Incremental.scala +++ b/compile/inc/src/main/scala/sbt/inc/Incremental.scala @@ -21,17 +21,17 @@ object Incremental log: Logger, options: IncOptions)(implicit equivS: Equiv[Stamp]): (Boolean, Analysis) = { - val incremental = new Incremental - val initialChanges = incremental.changedInitial(entry, sources, previous, current, forEntry, options, log) + val incremental = new Incremental(log, options) + val initialChanges = incremental.changedInitial(entry, sources, previous, current, forEntry) val binaryChanges = new DependencyChanges { val modifiedBinaries = initialChanges.binaryDeps.toArray val modifiedClasses = initialChanges.external.allModified.toArray def isEmpty = modifiedBinaries.isEmpty && modifiedClasses.isEmpty } - val initialInv = incremental.invalidateInitial(previous.relations, initialChanges, log) + val initialInv = incremental.invalidateInitial(previous.relations, initialChanges) log.debug("All initially invalidated sources: " + initialInv + "\n") val analysis = manageClassfiles(options) { classfileManager => - incremental.cycle(initialInv, sources, binaryChanges, previous, doCompile, classfileManager, 1, log, options) + incremental.cycle(initialInv, sources, binaryChanges, previous, doCompile, classfileManager, 1) } (!initialInv.isEmpty, analysis) } @@ -62,7 +62,7 @@ object Incremental } -private class Incremental { +private class Incremental(log: Logger, options: IncOptions) { val incDebugProp = "xsbt.inc.debug" private def incDebug(options: IncOptions): Boolean = options.relationsDebug || java.lang.Boolean.getBoolean(incDebugProp) @@ -75,14 +75,14 @@ private class Incremental { // TODO: the Analysis for the last successful compilation should get returned + Boolean indicating success // TODO: full external name changes, scopeInvalidations @tailrec final def cycle(invalidatedRaw: Set[File], allSources: Set[File], binaryChanges: DependencyChanges, previous: Analysis, - doCompile: (Set[File], DependencyChanges) => Analysis, classfileManager: ClassfileManager, cycleNum: Int, log: Logger, options: IncOptions): Analysis = + doCompile: (Set[File], DependencyChanges) => Analysis, classfileManager: ClassfileManager, cycleNum: Int): Analysis = if(invalidatedRaw.isEmpty) previous else { def debug(s: => String) = if (incDebug(options)) log.debug(s) else () val withPackageObjects = invalidatedRaw ++ invalidatedPackageObjects(invalidatedRaw, previous.relations) - val invalidated = expand(withPackageObjects, allSources, log, options) + val invalidated = expand(withPackageObjects, allSources) val pruned = Incremental.prune(invalidated, previous, classfileManager) debug("********* Pruned: \n" + pruned.relations + "\n*********") @@ -92,18 +92,18 @@ private class Incremental { val merged = pruned ++ fresh//.copy(relations = pruned.relations ++ fresh.relations, apis = pruned.apis ++ fresh.apis) debug("********* Merged: \n" + merged.relations + "\n*********") - val incChanges = changedIncremental(invalidated, previous.apis.internalAPI _, merged.apis.internalAPI _, log, options) + val incChanges = changedIncremental(invalidated, previous.apis.internalAPI _, merged.apis.internalAPI _) debug("\nChanges:\n" + incChanges) val transitiveStep = options.transitiveStep - val incInv = invalidateIncremental(merged.relations, merged.apis, incChanges, invalidated, cycleNum >= transitiveStep, log) - cycle(incInv, allSources, emptyChanges, merged, doCompile, classfileManager, cycleNum+1, log, options) + val incInv = invalidateIncremental(merged.relations, merged.apis, incChanges, invalidated, cycleNum >= transitiveStep) + cycle(incInv, allSources, emptyChanges, merged, doCompile, classfileManager, cycleNum+1) } private[this] def emptyChanges: DependencyChanges = new DependencyChanges { val modifiedBinaries = new Array[File](0) val modifiedClasses = new Array[String](0) def isEmpty = true } - private[this] def expand(invalidated: Set[File], all: Set[File], log: Logger, options: IncOptions): Set[File] = { + private[this] def expand(invalidated: Set[File], all: Set[File]): Set[File] = { val recompileAllFraction = options.recompileAllFraction if(invalidated.size > all.size * recompileAllFraction) { log.debug("Recompiling all " + all.size + " sources: invalidated sources (" + invalidated.size + ") exceeded " + (recompileAllFraction*100.0) + "% of all sources") @@ -123,7 +123,7 @@ private class Incremental { * NOTE: This method creates a new APIDiff instance on every invocation. */ private def logApiChanges[T](apiChanges: Iterable[APIChange[T]], oldAPIMapping: T => Source, - newAPIMapping: T => Source, log: Logger, options: IncOptions): Unit = { + newAPIMapping: T => Source): Unit = { val contextSize = options.apiDiffContextSize try { val apiDiff = new APIDiff @@ -154,19 +154,19 @@ private class Incremental { * providing the API before and after the last step. The functions should return * an empty API if the file did not/does not exist. */ - def changedIncremental[T](lastSources: collection.Set[T], oldAPI: T => Source, newAPI: T => Source, log: Logger, options: IncOptions): APIChanges[T] = + def changedIncremental[T](lastSources: collection.Set[T], oldAPI: T => Source, newAPI: T => Source): APIChanges[T] = { val oldApis = lastSources.toSeq map oldAPI val newApis = lastSources.toSeq map newAPI - val apiChanges = (lastSources, oldApis, newApis).zipped.flatMap { (src, oldApi, newApi) => sameSource(src, oldApi, newApi, log, options) } + val apiChanges = (lastSources, oldApis, newApis).zipped.flatMap { (src, oldApi, newApi) => sameSource(src, oldApi, newApi) } if (Incremental.apiDebug(options) && apiChanges.nonEmpty) { - logApiChanges(apiChanges, oldAPI, newAPI, log, options) + logApiChanges(apiChanges, oldAPI, newAPI) } new APIChanges(apiChanges) } - def sameSource[T](src: T, a: Source, b: Source, log: Logger, options: IncOptions): Option[APIChange[T]] = { + def sameSource[T](src: T, a: Source, b: Source): Option[APIChange[T]] = { // Clients of a modified source file (ie, one that doesn't satisfy `shortcutSameSource`) containing macros must be recompiled. val hasMacro = a.hasMacro || b.hasMacro if (shortcutSameSource(a, b)) { @@ -174,11 +174,11 @@ private class Incremental { } else { if (hasMacro && options.recompileOnMacroDef) { Some(APIChangeDueToMacroDefinition(src)) - } else sameAPI(src, a, b, log) + } else sameAPI(src, a, b) } } - def sameAPI[T](src: T, a: Source, b: Source, log: Logger): Option[SourceAPIChange[T]] = { + def sameAPI[T](src: T, a: Source, b: Source): Option[SourceAPIChange[T]] = { if (SameAPI(a,b)) None else { @@ -193,15 +193,15 @@ private class Incremental { } def changedInitial(entry: String => Option[File], sources: Set[File], previousAnalysis: Analysis, current: ReadStamps, - forEntry: File => Option[Analysis], options: IncOptions, log: Logger)(implicit equivS: Equiv[Stamp]): InitialChanges = + forEntry: File => Option[Analysis])(implicit equivS: Equiv[Stamp]): InitialChanges = { val previous = previousAnalysis.stamps val previousAPIs = previousAnalysis.apis val srcChanges = changes(previous.allInternalSources.toSet, sources, f => !equivS.equiv( previous.internalSource(f), current.internalSource(f) ) ) val removedProducts = previous.allProducts.filter( p => !equivS.equiv( previous.product(p), current.product(p) ) ).toSet - val binaryDepChanges = previous.allBinaries.filter( externalBinaryModified(entry, forEntry, previous, current, log)).toSet - val extChanges = changedIncremental(previousAPIs.allExternals, previousAPIs.externalAPI _, currentExternalAPI(entry, forEntry), log, options) + val binaryDepChanges = previous.allBinaries.filter( externalBinaryModified(entry, forEntry, previous, current)).toSet + val extChanges = changedIncremental(previousAPIs.allExternals, previousAPIs.externalAPI _, currentExternalAPI(entry, forEntry)) InitialChanges(srcChanges, removedProducts, binaryDepChanges, extChanges ) } @@ -215,14 +215,14 @@ private class Incremental { val (changed, unmodified) = inBoth.partition(existingModified) } - def invalidateIncremental(previous: Relations, apis: APIs, changes: APIChanges[File], recompiledSources: Set[File], transitive: Boolean, log: Logger): Set[File] = + def invalidateIncremental(previous: Relations, apis: APIs, changes: APIChanges[File], recompiledSources: Set[File], transitive: Boolean): Set[File] = { val dependsOnSrc = previous.usesInternalSrc _ val propagated = if(transitive) - transitiveDependencies(dependsOnSrc, changes.allModified.toSet, log) + transitiveDependencies(dependsOnSrc, changes.allModified.toSet) else - invalidateIntermediate(previous, changes, log) + invalidateIntermediate(previous, changes) val dups = invalidateDuplicates(previous) if(dups.nonEmpty) @@ -243,23 +243,23 @@ private class Incremental { /** Returns the transitive source dependencies of `initial`. * Because the intermediate steps do not pull in cycles, this result includes the initial files * if they are part of a cycle containing newly invalidated files . */ - def transitiveDependencies(dependsOnSrc: File => Set[File], initial: Set[File], log: Logger): Set[File] = + def transitiveDependencies(dependsOnSrc: File => Set[File], initial: Set[File]): Set[File] = { - val transitiveWithInitial = transitiveDeps(initial, log)(dependsOnSrc) - val transitivePartial = includeInitialCond(initial, transitiveWithInitial, dependsOnSrc, log) + val transitiveWithInitial = transitiveDeps(initial)(dependsOnSrc) + val transitivePartial = includeInitialCond(initial, transitiveWithInitial, dependsOnSrc) log.debug("Final step, transitive dependencies:\n\t" + transitivePartial) transitivePartial } /** Invalidates sources based on initially detected 'changes' to the sources, products, and dependencies.*/ - def invalidateInitial(previous: Relations, changes: InitialChanges, log: Logger): Set[File] = + def invalidateInitial(previous: Relations, changes: InitialChanges): Set[File] = { val srcChanges = changes.internalSrc val srcDirect = srcChanges.removed ++ srcChanges.removed.flatMap(previous.usesInternalSrc) ++ srcChanges.added ++ srcChanges.changed val byProduct = changes.removedProducts.flatMap(previous.produced) val byBinaryDep = changes.binaryDeps.flatMap(previous.usesBinary) val externalModifiedSources = changes.external.allModified.toSet - val byExtSrcDep = invalidateByExternal(previous, externalModifiedSources, log) //changes.external.modified.flatMap(previous.usesExternal) // ++ scopeInvalidations + val byExtSrcDep = invalidateByExternal(previous, externalModifiedSources) //changes.external.modified.flatMap(previous.usesExternal) // ++ scopeInvalidations checkAbsolute(srcChanges.added.toList) log.debug( "\nInitial source changes: \n\tremoved:" + srcChanges.removed + "\n\tadded: " + srcChanges.added + "\n\tmodified: " + srcChanges.changed + @@ -290,14 +290,14 @@ private class Incremental { } /** Sources invalidated by `external` sources in other projects according to the previous `relations`. */ - def invalidateByExternal(relations: Relations, external: Set[String], log: Logger): Set[File] = + def invalidateByExternal(relations: Relations, external: Set[String]): Set[File] = { // Propagate public inheritance dependencies transitively. // This differs from normal because we need the initial crossing from externals to sources in this project. val externalInheritedR = relations.publicInherited.external val byExternalInherited = external flatMap externalInheritedR.reverse val internalInheritedR = relations.publicInherited.internal - val transitiveInherited = transitiveDeps(byExternalInherited, log)(internalInheritedR.reverse _) + val transitiveInherited = transitiveDeps(byExternalInherited)(internalInheritedR.reverse _) // Get the direct dependencies of all sources transitively invalidated by inheritance val directA = transitiveInherited flatMap relations.direct.internal.reverse @@ -306,38 +306,38 @@ private class Incremental { transitiveInherited ++ directA ++ directB } /** Intermediate invalidation step: steps after the initial invalidation, but before the final transitive invalidation. */ - def invalidateIntermediate(relations: Relations, changes: APIChanges[File], log: Logger): Set[File] = + def invalidateIntermediate(relations: Relations, changes: APIChanges[File]): Set[File] = { def reverse(r: Relations.Source) = r.internal.reverse _ - invalidateSources(reverse(relations.direct), reverse(relations.publicInherited), changes, log) + invalidateSources(reverse(relations.direct), reverse(relations.publicInherited), changes) } /** Invalidates inheritance dependencies, transitively. Then, invalidates direct dependencies. Finally, excludes initial dependencies not * included in a cycle with newly invalidated sources. */ - private[this] def invalidateSources(directDeps: File => Set[File], publicInherited: File => Set[File], changes: APIChanges[File], log: Logger): Set[File] = + private[this] def invalidateSources(directDeps: File => Set[File], publicInherited: File => Set[File], changes: APIChanges[File]): Set[File] = { val initial = changes.allModified.toSet log.debug("Invalidating by inheritance (transitively)...") - val transitiveInherited = transitiveDeps(initial, log)(publicInherited) + val transitiveInherited = transitiveDeps(initial)(publicInherited) log.debug("Invalidated by transitive public inheritance: " + transitiveInherited) val direct = transitiveInherited flatMap directDeps log.debug("Invalidated by direct dependency: " + direct) val all = transitiveInherited ++ direct - includeInitialCond(initial, all, f => directDeps(f) ++ publicInherited(f), log) + includeInitialCond(initial, all, f => directDeps(f) ++ publicInherited(f)) } /** Conditionally include initial sources that are dependencies of newly invalidated sources. ** Initial sources included in this step can be because of a cycle, but not always. */ - private[this] def includeInitialCond(initial: Set[File], currentInvalidations: Set[File], allDeps: File => Set[File], log: Logger): Set[File] = + private[this] def includeInitialCond(initial: Set[File], currentInvalidations: Set[File], allDeps: File => Set[File]): Set[File] = { val newInv = currentInvalidations -- initial log.debug("New invalidations:\n\t" + newInv) - val transitiveOfNew = transitiveDeps(newInv, log)(allDeps) + val transitiveOfNew = transitiveDeps(newInv)(allDeps) val initialDependsOnNew = transitiveOfNew & initial log.debug("Previously invalidated, but (transitively) depend on new invalidations:\n\t" + initialDependsOnNew) newInv ++ initialDependsOnNew } - def externalBinaryModified(entry: String => Option[File], analysis: File => Option[Analysis], previous: Stamps, current: ReadStamps, log: Logger)(implicit equivS: Equiv[Stamp]): File => Boolean = + def externalBinaryModified(entry: String => Option[File], analysis: File => Option[Analysis], previous: Stamps, current: ReadStamps)(implicit equivS: Equiv[Stamp]): File => Boolean = dependsOn => { def inv(reason: String): Boolean = { @@ -389,7 +389,7 @@ private class Incremental { def orEmpty(o: Option[Source]): Source = o getOrElse APIs.emptySource def orTrue(o: Option[Boolean]): Boolean = o getOrElse true - private[this] def transitiveDeps[T](nodes: Iterable[T], log: Logger)(dependencies: T => Iterable[T]): Set[T] = + private[this] def transitiveDeps[T](nodes: Iterable[T])(dependencies: T => Iterable[T]): Set[T] = { val xs = new collection.mutable.HashSet[T] def all(from: T, tos: Iterable[T]): Unit = tos.foreach(to => visit(from, to)) From 946fd53a73a9fb247c5917e37bd1a30ed83ba073 Mon Sep 17 00:00:00 2001 From: Grzegorz Kossakowski Date: Thu, 5 Dec 2013 23:08:21 +0100 Subject: [PATCH 003/148] Introduce abstract `IncrementalCommon` class. Introduce an abstract `IncrementalCommon class that holds the implementation of incremental compiler that was previously done in `Incremental` class. Also, introduce `IncrementalDefaultImpl` that inherits from IncrementalCommon. This is the first step to introduce a design where most of incremental compiler's logic lives in IncrementalCommon and we have two subclasses: 1. Default, which holds implementation specific to the old algorithm known from sbt 0.13.0 2. NameHashing, which holds implementation specific to the name hashing algorithm This commit is purely a refactoring and does not change any behavior. --- compile/inc/src/main/scala/sbt/inc/Incremental.scala | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/compile/inc/src/main/scala/sbt/inc/Incremental.scala b/compile/inc/src/main/scala/sbt/inc/Incremental.scala index a55021c87..fe4f03861 100644 --- a/compile/inc/src/main/scala/sbt/inc/Incremental.scala +++ b/compile/inc/src/main/scala/sbt/inc/Incremental.scala @@ -21,7 +21,7 @@ object Incremental log: Logger, options: IncOptions)(implicit equivS: Equiv[Stamp]): (Boolean, Analysis) = { - val incremental = new Incremental(log, options) + val incremental = new IncrementalDefaultImpl(log, options) val initialChanges = incremental.changedInitial(entry, sources, previous, current, forEntry) val binaryChanges = new DependencyChanges { val modifiedBinaries = initialChanges.binaryDeps.toArray @@ -62,7 +62,7 @@ object Incremental } -private class Incremental(log: Logger, options: IncOptions) { +private abstract class IncrementalCommon(log: Logger, options: IncOptions) { val incDebugProp = "xsbt.inc.debug" private def incDebug(options: IncOptions): Boolean = options.relationsDebug || java.lang.Boolean.getBoolean(incDebugProp) @@ -449,3 +449,5 @@ private class Incremental(log: Logger, options: IncOptions) { def properSubPkg(testParent: Seq[String], testSub: Seq[String]) = testParent.length < testSub.length && testSub.startsWith(testParent) def pkgs(api: Source) = names(api :: Nil).map(pkg)*/ } + +private final class IncrementalDefaultImpl(log: Logger, options: IncOptions) extends IncrementalCommon(log, options) From 83a131e4f5c73e1b20899d5deb8ef7e4c0fb1517 Mon Sep 17 00:00:00 2001 From: Grzegorz Kossakowski Date: Thu, 5 Dec 2013 07:24:17 +0100 Subject: [PATCH 004/148] Introduce `IncrementalCommon.invalidateSource` method. In addition to `invalidateSources` we introduce `invalidateSource` that invalidates dependencies of a single source. This is needed for the name hashing algorithm because its invalidation logic depends on information about API changes of each source file individually. The refactoring is done in `IncrementalCommon` class so it affects the default implementation as well. However, this refactoring does not affect the result of invalidation in the default implementation. --- compile/inc/src/main/scala/sbt/inc/Incremental.scala | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/compile/inc/src/main/scala/sbt/inc/Incremental.scala b/compile/inc/src/main/scala/sbt/inc/Incremental.scala index fe4f03861..d4c6d7871 100644 --- a/compile/inc/src/main/scala/sbt/inc/Incremental.scala +++ b/compile/inc/src/main/scala/sbt/inc/Incremental.scala @@ -316,13 +316,20 @@ private abstract class IncrementalCommon(log: Logger, options: IncOptions) { private[this] def invalidateSources(directDeps: File => Set[File], publicInherited: File => Set[File], changes: APIChanges[File]): Set[File] = { val initial = changes.allModified.toSet + val all = (changes.apiChanges flatMap { change => + invalidateSource(directDeps, publicInherited, change) + }).toSet + includeInitialCond(initial, all, f => directDeps(f) ++ publicInherited(f)) + } + + private[this] def invalidateSource(directDeps: File => Set[File], publicInherited: File => Set[File], change: APIChange[File]): Set[File] = { log.debug("Invalidating by inheritance (transitively)...") - val transitiveInherited = transitiveDeps(initial)(publicInherited) + val transitiveInherited = transitiveDeps(Set(change.modified))(publicInherited) log.debug("Invalidated by transitive public inheritance: " + transitiveInherited) val direct = transitiveInherited flatMap directDeps log.debug("Invalidated by direct dependency: " + direct) val all = transitiveInherited ++ direct - includeInitialCond(initial, all, f => directDeps(f) ++ publicInherited(f)) + all } /** Conditionally include initial sources that are dependencies of newly invalidated sources. From 3643419e7c157bfa3c64f4ffd9584804856c189d Mon Sep 17 00:00:00 2001 From: Grzegorz Kossakowski Date: Thu, 5 Dec 2013 08:50:19 +0100 Subject: [PATCH 005/148] Make `invalidateSource` to take Relations. This way we'll be able to have a polymorphic implementation of this method in the future. One implementation will use the old dependency tracking mechanism and the other will use the new one (implemented for name hashing). --- .../src/main/scala/sbt/inc/Incremental.scala | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/compile/inc/src/main/scala/sbt/inc/Incremental.scala b/compile/inc/src/main/scala/sbt/inc/Incremental.scala index d4c6d7871..8d2235303 100644 --- a/compile/inc/src/main/scala/sbt/inc/Incremental.scala +++ b/compile/inc/src/main/scala/sbt/inc/Incremental.scala @@ -308,21 +308,26 @@ private abstract class IncrementalCommon(log: Logger, options: IncOptions) { /** Intermediate invalidation step: steps after the initial invalidation, but before the final transitive invalidation. */ def invalidateIntermediate(relations: Relations, changes: APIChanges[File]): Set[File] = { - def reverse(r: Relations.Source) = r.internal.reverse _ - invalidateSources(reverse(relations.direct), reverse(relations.publicInherited), changes) + invalidateSources(relations, changes) } /** Invalidates inheritance dependencies, transitively. Then, invalidates direct dependencies. Finally, excludes initial dependencies not * included in a cycle with newly invalidated sources. */ - private[this] def invalidateSources(directDeps: File => Set[File], publicInherited: File => Set[File], changes: APIChanges[File]): Set[File] = + private[this] def invalidateSources(relations: Relations, changes: APIChanges[File]): Set[File] = { val initial = changes.allModified.toSet val all = (changes.apiChanges flatMap { change => - invalidateSource(directDeps, publicInherited, change) + invalidateSource(relations, change) }).toSet - includeInitialCond(initial, all, f => directDeps(f) ++ publicInherited(f)) + includeInitialCond(initial, all, allDeps(relations)) } - private[this] def invalidateSource(directDeps: File => Set[File], publicInherited: File => Set[File], change: APIChange[File]): Set[File] = { + private[this] def allDeps(relations: Relations): File => Set[File] = + f => relations.direct.internal.reverse(f) + + private[this] def invalidateSource(relations: Relations, change: APIChange[File]): Set[File] = { + def reverse(r: Relations.Source) = r.internal.reverse _ + val directDeps: File => Set[File] = reverse(relations.direct) + val publicInherited: File => Set[File] = reverse(relations.publicInherited) log.debug("Invalidating by inheritance (transitively)...") val transitiveInherited = transitiveDeps(Set(change.modified))(publicInherited) log.debug("Invalidated by transitive public inheritance: " + transitiveInherited) From fdc72f3744143d9271f08e156c2eb0257f2f6b0e Mon Sep 17 00:00:00 2001 From: Grzegorz Kossakowski Date: Thu, 5 Dec 2013 20:47:57 +0100 Subject: [PATCH 006/148] The invalidateByExternal takes single external api change. Refactor the `invalidateByExternal` method to take single, external api change. Introduce `invalidateByAllExternal` that takes all APIChanges object. This way `invalidateByExternal` will have an access to APIChange object that represents changed name hashes once name hashing is merged. --- .../src/main/scala/sbt/inc/Incremental.scala | 19 ++++++++++++------- 1 file changed, 12 insertions(+), 7 deletions(-) diff --git a/compile/inc/src/main/scala/sbt/inc/Incremental.scala b/compile/inc/src/main/scala/sbt/inc/Incremental.scala index 8d2235303..c434541fe 100644 --- a/compile/inc/src/main/scala/sbt/inc/Incremental.scala +++ b/compile/inc/src/main/scala/sbt/inc/Incremental.scala @@ -258,13 +258,12 @@ private abstract class IncrementalCommon(log: Logger, options: IncOptions) { val srcDirect = srcChanges.removed ++ srcChanges.removed.flatMap(previous.usesInternalSrc) ++ srcChanges.added ++ srcChanges.changed val byProduct = changes.removedProducts.flatMap(previous.produced) val byBinaryDep = changes.binaryDeps.flatMap(previous.usesBinary) - val externalModifiedSources = changes.external.allModified.toSet - val byExtSrcDep = invalidateByExternal(previous, externalModifiedSources) //changes.external.modified.flatMap(previous.usesExternal) // ++ scopeInvalidations + val byExtSrcDep = invalidateByAllExternal(previous, changes.external) //changes.external.modified.flatMap(previous.usesExternal) // ++ scopeInvalidations checkAbsolute(srcChanges.added.toList) log.debug( "\nInitial source changes: \n\tremoved:" + srcChanges.removed + "\n\tadded: " + srcChanges.added + "\n\tmodified: " + srcChanges.changed + "\nRemoved products: " + changes.removedProducts + - "\nModified external sources: " + externalModifiedSources + + "\nExternal API changes: " + changes.external + "\nModified binary dependencies: " + changes.binaryDeps + "\nInitial directly invalidated sources: " + srcDirect + "\n\nSources indirectly invalidated by:" + @@ -289,20 +288,26 @@ private abstract class IncrementalCommon(log: Logger, options: IncOptions) { } } + def invalidateByAllExternal(relations: Relations, externalAPIChanges: APIChanges[String]): Set[File] = { + (externalAPIChanges.apiChanges.flatMap { externalAPIChange => + invalidateByExternal(relations, externalAPIChange) + }).toSet + } + /** Sources invalidated by `external` sources in other projects according to the previous `relations`. */ - def invalidateByExternal(relations: Relations, external: Set[String]): Set[File] = - { + private def invalidateByExternal(relations: Relations, externalAPIChange: APIChange[String]): Set[File] = { + val modified = externalAPIChange.modified // Propagate public inheritance dependencies transitively. // This differs from normal because we need the initial crossing from externals to sources in this project. val externalInheritedR = relations.publicInherited.external - val byExternalInherited = external flatMap externalInheritedR.reverse + val byExternalInherited = externalInheritedR.reverse(modified) val internalInheritedR = relations.publicInherited.internal val transitiveInherited = transitiveDeps(byExternalInherited)(internalInheritedR.reverse _) // Get the direct dependencies of all sources transitively invalidated by inheritance val directA = transitiveInherited flatMap relations.direct.internal.reverse // Get the sources that directly depend on externals. This includes non-inheritance dependencies and is not transitive. - val directB = external flatMap relations.direct.external.reverse + val directB = relations.direct.external.reverse(modified) transitiveInherited ++ directA ++ directB } /** Intermediate invalidation step: steps after the initial invalidation, but before the final transitive invalidation. */ From 4ebbf3fb8b53773bfb68ed81554cd0cbc1db78a1 Mon Sep 17 00:00:00 2001 From: Grzegorz Kossakowski Date: Thu, 5 Dec 2013 22:52:45 +0100 Subject: [PATCH 007/148] Refactor code in IncrementalCommon and IncrementalDefaultImpl Move implementation of the following methods from IncrementalCommon to IncrementalDefaultImpl: * invalidatedPackageObjects * sameAPI * invalidateByExternal * allDeps * invalidateSource These are the methods that are expected to have different implementation in the name hashing algorithm. Hence, we make them abstract in IncrementalCommon so they can be implemented differently in subclasses. --- .../src/main/scala/sbt/inc/Incremental.scala | 98 +++++++++++-------- 1 file changed, 56 insertions(+), 42 deletions(-) diff --git a/compile/inc/src/main/scala/sbt/inc/Incremental.scala b/compile/inc/src/main/scala/sbt/inc/Incremental.scala index c434541fe..79c5ee8dd 100644 --- a/compile/inc/src/main/scala/sbt/inc/Incremental.scala +++ b/compile/inc/src/main/scala/sbt/inc/Incremental.scala @@ -112,10 +112,7 @@ private abstract class IncrementalCommon(log: Logger, options: IncOptions) { else invalidated } - // Package objects are fragile: if they inherit from an invalidated source, get "class file needed by package is missing" error - // This might be too conservative: we probably only need package objects for packages of invalidated sources. - private[this] def invalidatedPackageObjects(invalidated: Set[File], relations: Relations): Set[File] = - invalidated flatMap relations.publicInherited.internal.reverse filter { _.getName == "package.scala" } + protected def invalidatedPackageObjects(invalidated: Set[File], relations: Relations): Set[File] /** * Logs API changes using debug-level logging. The API are obtained using the APIDiff class. @@ -178,14 +175,7 @@ private abstract class IncrementalCommon(log: Logger, options: IncOptions) { } } - def sameAPI[T](src: T, a: Source, b: Source): Option[SourceAPIChange[T]] = { - if (SameAPI(a,b)) - None - else { - val sourceApiChange = SourceAPIChange(src) - Some(sourceApiChange) - } - } + protected def sameAPI[T](src: T, a: Source, b: Source): Option[SourceAPIChange[T]] def shortcutSameSource(a: Source, b: Source): Boolean = !a.hash.isEmpty && !b.hash.isEmpty && sameCompilation(a.compilation, b.compilation) && (a.hash.deep equals b.hash.deep) def sameCompilation(a: Compilation, b: Compilation): Boolean = a.startTime == b.startTime && a.outputs.corresponds(b.outputs){ @@ -295,21 +285,8 @@ private abstract class IncrementalCommon(log: Logger, options: IncOptions) { } /** Sources invalidated by `external` sources in other projects according to the previous `relations`. */ - private def invalidateByExternal(relations: Relations, externalAPIChange: APIChange[String]): Set[File] = { - val modified = externalAPIChange.modified - // Propagate public inheritance dependencies transitively. - // This differs from normal because we need the initial crossing from externals to sources in this project. - val externalInheritedR = relations.publicInherited.external - val byExternalInherited = externalInheritedR.reverse(modified) - val internalInheritedR = relations.publicInherited.internal - val transitiveInherited = transitiveDeps(byExternalInherited)(internalInheritedR.reverse _) + protected def invalidateByExternal(relations: Relations, externalAPIChange: APIChange[String]): Set[File] - // Get the direct dependencies of all sources transitively invalidated by inheritance - val directA = transitiveInherited flatMap relations.direct.internal.reverse - // Get the sources that directly depend on externals. This includes non-inheritance dependencies and is not transitive. - val directB = relations.direct.external.reverse(modified) - transitiveInherited ++ directA ++ directB - } /** Intermediate invalidation step: steps after the initial invalidation, but before the final transitive invalidation. */ def invalidateIntermediate(relations: Relations, changes: APIChanges[File]): Set[File] = { @@ -326,21 +303,9 @@ private abstract class IncrementalCommon(log: Logger, options: IncOptions) { includeInitialCond(initial, all, allDeps(relations)) } - private[this] def allDeps(relations: Relations): File => Set[File] = - f => relations.direct.internal.reverse(f) + protected def allDeps(relations: Relations): File => Set[File] - private[this] def invalidateSource(relations: Relations, change: APIChange[File]): Set[File] = { - def reverse(r: Relations.Source) = r.internal.reverse _ - val directDeps: File => Set[File] = reverse(relations.direct) - val publicInherited: File => Set[File] = reverse(relations.publicInherited) - log.debug("Invalidating by inheritance (transitively)...") - val transitiveInherited = transitiveDeps(Set(change.modified))(publicInherited) - log.debug("Invalidated by transitive public inheritance: " + transitiveInherited) - val direct = transitiveInherited flatMap directDeps - log.debug("Invalidated by direct dependency: " + direct) - val all = transitiveInherited ++ direct - all - } + protected def invalidateSource(relations: Relations, change: APIChange[File]): Set[File] /** Conditionally include initial sources that are dependencies of newly invalidated sources. ** Initial sources included in this step can be because of a cycle, but not always. */ @@ -406,7 +371,7 @@ private abstract class IncrementalCommon(log: Logger, options: IncOptions) { def orEmpty(o: Option[Source]): Source = o getOrElse APIs.emptySource def orTrue(o: Option[Boolean]): Boolean = o getOrElse true - private[this] def transitiveDeps[T](nodes: Iterable[T])(dependencies: T => Iterable[T]): Set[T] = + protected def transitiveDeps[T](nodes: Iterable[T])(dependencies: T => Iterable[T]): Set[T] = { val xs = new collection.mutable.HashSet[T] def all(from: T, tos: Iterable[T]): Unit = tos.foreach(to => visit(from, to)) @@ -467,4 +432,53 @@ private abstract class IncrementalCommon(log: Logger, options: IncOptions) { def pkgs(api: Source) = names(api :: Nil).map(pkg)*/ } -private final class IncrementalDefaultImpl(log: Logger, options: IncOptions) extends IncrementalCommon(log, options) +private final class IncrementalDefaultImpl(log: Logger, options: IncOptions) extends IncrementalCommon(log, options) { + + // Package objects are fragile: if they inherit from an invalidated source, get "class file needed by package is missing" error + // This might be too conservative: we probably only need package objects for packages of invalidated sources. + override protected def invalidatedPackageObjects(invalidated: Set[File], relations: Relations): Set[File] = + invalidated flatMap relations.publicInherited.internal.reverse filter { _.getName == "package.scala" } + + override protected def sameAPI[T](src: T, a: Source, b: Source): Option[SourceAPIChange[T]] = { + if (SameAPI(a,b)) + None + else { + val sourceApiChange = SourceAPIChange(src) + Some(sourceApiChange) + } + } + + /** Invalidates sources based on initially detected 'changes' to the sources, products, and dependencies.*/ + override protected def invalidateByExternal(relations: Relations, externalAPIChange: APIChange[String]): Set[File] = { + val modified = externalAPIChange.modified + // Propagate public inheritance dependencies transitively. + // This differs from normal because we need the initial crossing from externals to sources in this project. + val externalInheritedR = relations.publicInherited.external + val byExternalInherited = externalInheritedR.reverse(modified) + val internalInheritedR = relations.publicInherited.internal + val transitiveInherited = transitiveDeps(byExternalInherited)(internalInheritedR.reverse _) + + // Get the direct dependencies of all sources transitively invalidated by inheritance + val directA = transitiveInherited flatMap relations.direct.internal.reverse + // Get the sources that directly depend on externals. This includes non-inheritance dependencies and is not transitive. + val directB = relations.direct.external.reverse(modified) + transitiveInherited ++ directA ++ directB + } + + override protected def invalidateSource(relations: Relations, change: APIChange[File]): Set[File] = { + def reverse(r: Relations.Source) = r.internal.reverse _ + val directDeps: File => Set[File] = reverse(relations.direct) + val publicInherited: File => Set[File] = reverse(relations.publicInherited) + log.debug("Invalidating by inheritance (transitively)...") + val transitiveInherited = transitiveDeps(Set(change.modified))(publicInherited) + log.debug("Invalidated by transitive public inheritance: " + transitiveInherited) + val direct = transitiveInherited flatMap directDeps + log.debug("Invalidated by direct dependency: " + direct) + val all = transitiveInherited ++ direct + all + } + + override protected def allDeps(relations: Relations): File => Set[File] = + f => relations.direct.internal.reverse(f) + +} From 88528a43cba84031de63e4a2e5634f0a09f6a930 Mon Sep 17 00:00:00 2001 From: Grzegorz Kossakowski Date: Fri, 6 Dec 2013 15:03:53 +0100 Subject: [PATCH 008/148] Fix a few mistakes related to IncOptions.recompileOnMacroDef The 39036e7c2097c5597df5e66a9d4923dd5154a510 introduced `recompileOnMacroDef` option to IncOptions. However, not all necessary logic has been changed. This commit fixes that: * `copy` method does not forget the value of the `recompileOnMacroDef` flag * `productArity` has been increased to match the arity of the class * `productElement` returns the value of `recompileOnMacroDef` flag * `hashCode` and `equals` methods take into account value of `recompileOnMacroDef` flag * fix the name of the key for `recompileOnMacroDef` flag --- compile/inc/src/main/scala/sbt/inc/IncOptions.scala | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/compile/inc/src/main/scala/sbt/inc/IncOptions.scala b/compile/inc/src/main/scala/sbt/inc/IncOptions.scala index 7077d2291..9b366460b 100644 --- a/compile/inc/src/main/scala/sbt/inc/IncOptions.scala +++ b/compile/inc/src/main/scala/sbt/inc/IncOptions.scala @@ -112,14 +112,14 @@ final class IncOptions( apiDumpDirectory: Option[java.io.File] = this.apiDumpDirectory, newClassfileManager: () => ClassfileManager = this.newClassfileManager): IncOptions = { new IncOptions(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize, - apiDumpDirectory, newClassfileManager) + apiDumpDirectory, newClassfileManager, recompileOnMacroDef) } @deprecated("Methods generated for case class will be removed in the future.", "0.13.2") override def productPrefix: String = "IncOptions" @deprecated("Methods generated for case class will be removed in the future.", "0.13.2") - def productArity: Int = 7 + def productArity: Int = 8 @deprecated("Methods generated for case class will be removed in the future.", "0.13.2") def productElement(x$1: Int): Any = x$1 match { @@ -130,6 +130,7 @@ final class IncOptions( case 4 => IncOptions.this.apiDiffContextSize case 5 => IncOptions.this.apiDumpDirectory case 6 => IncOptions.this.newClassfileManager + case 7 => IncOptions.this.recompileOnMacroDef case _ => throw new IndexOutOfBoundsException(x$1.toString()) } @@ -149,7 +150,8 @@ final class IncOptions( acc = Statics.mix(acc, apiDiffContextSize) acc = Statics.mix(acc, Statics.anyHash(apiDumpDirectory)) acc = Statics.mix(acc, Statics.anyHash(newClassfileManager)) - Statics.finalizeHash(acc, 7) + acc = Statics.mix(acc, if (recompileOnMacroDef) 1231 else 1237) + Statics.finalizeHash(acc, 8) } override def toString(): String = scala.runtime.ScalaRunTime._toString(IncOptions.this) @@ -160,7 +162,8 @@ final class IncOptions( transitiveStep == IncOptions$1.transitiveStep && recompileAllFraction == IncOptions$1.recompileAllFraction && relationsDebug == IncOptions$1.relationsDebug && apiDebug == IncOptions$1.apiDebug && apiDiffContextSize == IncOptions$1.apiDiffContextSize && apiDumpDirectory == IncOptions$1.apiDumpDirectory && - newClassfileManager == IncOptions$1.newClassfileManager + newClassfileManager == IncOptions$1.newClassfileManager && + recompileOnMacroDef == IncOptions$1.recompileOnMacroDef })) } //- EXPANDED CASE CLASS METHOD END -// @@ -217,7 +220,7 @@ object IncOptions extends Serializable { private val apiDebugKey = "apiDebug" private val apiDumpDirectoryKey = "apiDumpDirectory" private val apiDiffContextSizeKey = "apiDiffContextSize" - private val recompileOnMacroDefKey = "recompileOnMacroDefKey" + private val recompileOnMacroDefKey = "recompileOnMacroDef" def fromStringMap(m: java.util.Map[String, String]): IncOptions = { // all the code below doesn't look like idiomatic Scala for a good reason: we are working with Java API From cacb17fb2ea8210d9c8b5c00b0466c4ed4fba44a Mon Sep 17 00:00:00 2001 From: Grzegorz Kossakowski Date: Fri, 6 Dec 2013 15:44:03 +0100 Subject: [PATCH 009/148] Add `nameHashing` option to IncOptions This option is not used anywhere yet. This commit just contains all the boilerplate needed in order to introduce a new field to IncOptions class. --- .../src/main/scala/sbt/inc/IncOptions.scala | 68 +++++++++++++------ 1 file changed, 49 insertions(+), 19 deletions(-) diff --git a/compile/inc/src/main/scala/sbt/inc/IncOptions.scala b/compile/inc/src/main/scala/sbt/inc/IncOptions.scala index 9b366460b..0e634aa4f 100644 --- a/compile/inc/src/main/scala/sbt/inc/IncOptions.scala +++ b/compile/inc/src/main/scala/sbt/inc/IncOptions.scala @@ -51,57 +51,76 @@ final class IncOptions( * Determines whether incremental compiler should recompile all dependencies of a file * that contains a macro definition. */ - val recompileOnMacroDef: Boolean + val recompileOnMacroDef: Boolean, + /** + * Determines whether incremental compiler uses the new algorithm known as name hashing. + * + * This flag is disabled by default so incremental compiler's behavior is the same as in sbt 0.13.0. + * + * IMPLEMENTATION NOTE: + * Enabling this flag enables a few additional functionalities that are needed by the name hashing algorithm: + * + * 1. New dependency source tracking is used. See `sbt.inc.Relations` for details. + * 2. Used names extraction and tracking is enabled. See `sbt.inc.Relations` for details as well. + * 3. Hashing of public names is enabled. See `sbt.inc.AnalysisCallback` for details. + * + */ + val nameHashing: Boolean ) extends Product with Serializable { /** * Secondary constructor introduced to make IncOptions to be binary compatible with version that didn't have - * `recompileOnMacroDef` filed defined. + * `recompileOnMacroDef` and `nameHashing` fields defined. */ def this(transitiveStep: Int, recompileAllFraction: Double, relationsDebug: Boolean, apiDebug: Boolean, apiDiffContextSize: Int, apiDumpDirectory: Option[java.io.File], newClassfileManager: () => ClassfileManager) = { this(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize, - apiDumpDirectory, newClassfileManager, IncOptions.recompileOnMacroDefDefault) + apiDumpDirectory, newClassfileManager, IncOptions.recompileOnMacroDefDefault, IncOptions.nameHashingDefault) } def withTransitiveStep(transitiveStep: Int): IncOptions = { new IncOptions(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize, - apiDumpDirectory, newClassfileManager, recompileOnMacroDef) + apiDumpDirectory, newClassfileManager, recompileOnMacroDef, nameHashing) } def withRecompileAllFraction(recompileAllFraction: Double): IncOptions = { new IncOptions(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize, - apiDumpDirectory, newClassfileManager, recompileOnMacroDef) + apiDumpDirectory, newClassfileManager, recompileOnMacroDef, nameHashing) } def withRelationsDebug(relationsDebug: Boolean): IncOptions = { new IncOptions(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize, - apiDumpDirectory, newClassfileManager, recompileOnMacroDef) + apiDumpDirectory, newClassfileManager, recompileOnMacroDef, nameHashing) } def withApiDebug(apiDebug: Boolean): IncOptions = { new IncOptions(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize, - apiDumpDirectory, newClassfileManager, recompileOnMacroDef) + apiDumpDirectory, newClassfileManager, recompileOnMacroDef, nameHashing) } def withApiDiffContextSize(apiDiffContextSize: Int): IncOptions = { new IncOptions(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize, - apiDumpDirectory, newClassfileManager, recompileOnMacroDef) + apiDumpDirectory, newClassfileManager, recompileOnMacroDef, nameHashing) } def withApiDumpDirectory(apiDumpDirectory: Option[File]): IncOptions = { new IncOptions(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize, - apiDumpDirectory, newClassfileManager, recompileOnMacroDef) + apiDumpDirectory, newClassfileManager, recompileOnMacroDef, nameHashing) } def withNewClassfileManager(newClassfileManager: () => ClassfileManager): IncOptions = { new IncOptions(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize, - apiDumpDirectory, newClassfileManager, recompileOnMacroDef) + apiDumpDirectory, newClassfileManager, recompileOnMacroDef, nameHashing) } def withRecompileOnMacroDef(recompileOnMacroDef: Boolean): IncOptions = { new IncOptions(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize, - apiDumpDirectory, newClassfileManager, recompileOnMacroDef) + apiDumpDirectory, newClassfileManager, recompileOnMacroDef, nameHashing) + } + + def withNameHashing(nameHashing: Boolean): IncOptions = { + new IncOptions(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize, + apiDumpDirectory, newClassfileManager, recompileOnMacroDef, nameHashing) } //- EXPANDED CASE CLASS METHOD BEGIN -// @@ -112,14 +131,14 @@ final class IncOptions( apiDumpDirectory: Option[java.io.File] = this.apiDumpDirectory, newClassfileManager: () => ClassfileManager = this.newClassfileManager): IncOptions = { new IncOptions(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize, - apiDumpDirectory, newClassfileManager, recompileOnMacroDef) + apiDumpDirectory, newClassfileManager, recompileOnMacroDef, nameHashing) } @deprecated("Methods generated for case class will be removed in the future.", "0.13.2") override def productPrefix: String = "IncOptions" @deprecated("Methods generated for case class will be removed in the future.", "0.13.2") - def productArity: Int = 8 + def productArity: Int = 9 @deprecated("Methods generated for case class will be removed in the future.", "0.13.2") def productElement(x$1: Int): Any = x$1 match { @@ -131,6 +150,7 @@ final class IncOptions( case 5 => IncOptions.this.apiDumpDirectory case 6 => IncOptions.this.newClassfileManager case 7 => IncOptions.this.recompileOnMacroDef + case 8 => IncOptions.this.nameHashing case _ => throw new IndexOutOfBoundsException(x$1.toString()) } @@ -151,7 +171,8 @@ final class IncOptions( acc = Statics.mix(acc, Statics.anyHash(apiDumpDirectory)) acc = Statics.mix(acc, Statics.anyHash(newClassfileManager)) acc = Statics.mix(acc, if (recompileOnMacroDef) 1231 else 1237) - Statics.finalizeHash(acc, 8) + acc = Statics.mix(acc, if (nameHashing) 1231 else 1237) + Statics.finalizeHash(acc, 9) } override def toString(): String = scala.runtime.ScalaRunTime._toString(IncOptions.this) @@ -163,7 +184,7 @@ final class IncOptions( relationsDebug == IncOptions$1.relationsDebug && apiDebug == IncOptions$1.apiDebug && apiDiffContextSize == IncOptions$1.apiDiffContextSize && apiDumpDirectory == IncOptions$1.apiDumpDirectory && newClassfileManager == IncOptions$1.newClassfileManager && - recompileOnMacroDef == IncOptions$1.recompileOnMacroDef + recompileOnMacroDef == IncOptions$1.recompileOnMacroDef && nameHashing == IncOptions$1.nameHashing })) } //- EXPANDED CASE CLASS METHOD END -// @@ -171,6 +192,7 @@ final class IncOptions( object IncOptions extends Serializable { private val recompileOnMacroDefDefault: Boolean = true + private val nameHashingDefault: Boolean = false val Default = IncOptions( // 1. recompile changed sources // 2(3). recompile direct dependencies and transitive public inheritance dependencies of sources with API changes in 1(2). @@ -182,7 +204,8 @@ object IncOptions extends Serializable { apiDiffContextSize = 5, apiDumpDirectory = None, newClassfileManager = ClassfileManager.deleteImmediately, - recompileOnMacroDef = recompileOnMacroDefDefault + recompileOnMacroDef = recompileOnMacroDefDefault, + nameHashing = nameHashingDefault ) //- EXPANDED CASE CLASS METHOD BEGIN -// final override def toString(): String = "IncOptions" @@ -195,9 +218,10 @@ object IncOptions extends Serializable { } def apply(transitiveStep: Int, recompileAllFraction: Double, relationsDebug: Boolean, apiDebug: Boolean, apiDiffContextSize: Int, apiDumpDirectory: Option[java.io.File], - newClassfileManager: () => ClassfileManager, recompileOnMacroDef: Boolean): IncOptions = { + newClassfileManager: () => ClassfileManager, recompileOnMacroDef: Boolean, + nameHashing: Boolean): IncOptions = { new IncOptions(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize, - apiDumpDirectory, newClassfileManager, recompileOnMacroDef) + apiDumpDirectory, newClassfileManager, recompileOnMacroDef, nameHashing) } @deprecated("Methods generated for case class will be removed in the future.", "0.13.2") def unapply(x$0: IncOptions): Option[(Int, Double, Boolean, Boolean, Int, Option[java.io.File], () => AnyRef)] = { @@ -221,6 +245,7 @@ object IncOptions extends Serializable { private val apiDumpDirectoryKey = "apiDumpDirectory" private val apiDiffContextSizeKey = "apiDiffContextSize" private val recompileOnMacroDefKey = "recompileOnMacroDef" + private val nameHashingKey = "nameHashing" def fromStringMap(m: java.util.Map[String, String]): IncOptions = { // all the code below doesn't look like idiomatic Scala for a good reason: we are working with Java API @@ -254,9 +279,13 @@ object IncOptions extends Serializable { val k = recompileOnMacroDefKey if (m.containsKey(k)) m.get(k).toBoolean else Default.recompileOnMacroDef } + def getNameHashing: Boolean = { + val k = nameHashingKey + if (m.containsKey(k)) m.get(k).toBoolean else Default.nameHashing + } new IncOptions(getTransitiveStep, getRecompileAllFraction, getRelationsDebug, getApiDebug, getApiDiffContextSize, - getApiDumpDirectory, ClassfileManager.deleteImmediately, getRecompileOnMacroDef) + getApiDumpDirectory, ClassfileManager.deleteImmediately, getRecompileOnMacroDef, getNameHashing) } def toStringMap(o: IncOptions): java.util.Map[String, String] = { @@ -268,6 +297,7 @@ object IncOptions extends Serializable { o.apiDumpDirectory.foreach(f => m.put(apiDumpDirectoryKey, f.toString)) m.put(apiDiffContextSizeKey, o.apiDiffContextSize.toString) m.put(recompileOnMacroDefKey, o.recompileOnMacroDef.toString) + m.put(nameHashingKey, o.nameHashing.toString) m } } From d70bc51b6d077fac3c112f7b61189b4eb422a9b2 Mon Sep 17 00:00:00 2001 From: Grzegorz Kossakowski Date: Fri, 6 Dec 2013 15:53:33 +0100 Subject: [PATCH 010/148] Use `nameHashing` option throughout incremental compiler There are two categories of places in the code that need to refer to `nameHashing` option: * places where Analysis object is created so it gets proper implementation of underlying Relations object * places with logic that is specifically designed to be enabled by that option This commit covers both cases. --- compile/inc/src/main/scala/sbt/inc/Analysis.scala | 2 ++ compile/inc/src/main/scala/sbt/inc/Compile.scala | 4 ++-- compile/inc/src/main/scala/sbt/inc/Incremental.scala | 1 + .../src/main/scala/sbt/compiler/AggressiveCompile.scala | 6 +++--- .../src/main/scala/sbt/compiler/IncrementalCompiler.scala | 6 ++++++ 5 files changed, 14 insertions(+), 5 deletions(-) diff --git a/compile/inc/src/main/scala/sbt/inc/Analysis.scala b/compile/inc/src/main/scala/sbt/inc/Analysis.scala index 212b6fc6d..aaa63918d 100644 --- a/compile/inc/src/main/scala/sbt/inc/Analysis.scala +++ b/compile/inc/src/main/scala/sbt/inc/Analysis.scala @@ -56,6 +56,8 @@ trait Analysis object Analysis { lazy val Empty: Analysis = new MAnalysis(Stamps.empty, APIs.empty, Relations.empty, SourceInfos.empty, Compilations.empty) + private[sbt] def empty(nameHashing: Boolean): Analysis = new MAnalysis(Stamps.empty, APIs.empty, + Relations.empty(nameHashing = nameHashing), SourceInfos.empty, Compilations.empty) /** Merge multiple analysis objects into one. Deps will be internalized as needed. */ def merge(analyses: Traversable[Analysis]): Analysis = { diff --git a/compile/inc/src/main/scala/sbt/inc/Compile.scala b/compile/inc/src/main/scala/sbt/inc/Compile.scala index a5b56a5c5..edf714f06 100644 --- a/compile/inc/src/main/scala/sbt/inc/Compile.scala +++ b/compile/inc/src/main/scala/sbt/inc/Compile.scala @@ -157,9 +157,9 @@ private final class AnalysisCallback(internalMap: File => Option[File], external def usedName(sourceFile: File, name: String) = add(usedNames, sourceFile, name) - def nameHashing: Boolean = false // TODO: define the flag in IncOptions which controls this + def nameHashing: Boolean = options.nameHashing - def get: Analysis = addUsedNames( addCompilation( addExternals( addBinaries( addProducts( addSources(Analysis.Empty) ) ) ) ) ) + def get: Analysis = addUsedNames( addCompilation( addExternals( addBinaries( addProducts( addSources(Analysis.empty(nameHashing = nameHashing)) ) ) ) ) ) def addProducts(base: Analysis): Analysis = addAll(base, classes) { case (a, src, (prod, name)) => a.addProduct(src, prod, current product prod, name ) } def addBinaries(base: Analysis): Analysis = addAll(base, binaryDeps)( (a, src, bin) => a.addBinaryDep(src, bin, binaryClassName(bin), current binary bin) ) def addSources(base: Analysis): Analysis = diff --git a/compile/inc/src/main/scala/sbt/inc/Incremental.scala b/compile/inc/src/main/scala/sbt/inc/Incremental.scala index 79c5ee8dd..dc86f818d 100644 --- a/compile/inc/src/main/scala/sbt/inc/Incremental.scala +++ b/compile/inc/src/main/scala/sbt/inc/Incremental.scala @@ -21,6 +21,7 @@ object Incremental log: Logger, options: IncOptions)(implicit equivS: Equiv[Stamp]): (Boolean, Analysis) = { + assert(!options.nameHashing, "We don't support name hashing algorithm yet.") val incremental = new IncrementalDefaultImpl(log, options) val initialChanges = incremental.changedInitial(entry, sources, previous, current, forEntry) val binaryChanges = new DependencyChanges { diff --git a/compile/integration/src/main/scala/sbt/compiler/AggressiveCompile.scala b/compile/integration/src/main/scala/sbt/compiler/AggressiveCompile.scala index 5fc1bafba..fec36db56 100644 --- a/compile/integration/src/main/scala/sbt/compiler/AggressiveCompile.scala +++ b/compile/integration/src/main/scala/sbt/compiler/AggressiveCompile.scala @@ -61,7 +61,7 @@ class AggressiveCompile(cacheFile: File) cache: GlobalsCache, incrementalCompilerOptions: IncOptions)(implicit log: Logger): Analysis = { - val (previousAnalysis, previousSetup) = extract(store.get()) + val (previousAnalysis, previousSetup) = extract(store.get(), incrementalCompilerOptions) if(skip) previousAnalysis else { @@ -169,11 +169,11 @@ class AggressiveCompile(cacheFile: File) if(!combined.isEmpty) log.info(combined.mkString("Compiling ", " and ", " to " + outputDirs.map(_.getAbsolutePath).mkString(",") + "...")) } - private def extract(previous: Option[(Analysis, CompileSetup)]): (Analysis, Option[CompileSetup]) = + private def extract(previous: Option[(Analysis, CompileSetup)], incOptions: IncOptions): (Analysis, Option[CompileSetup]) = previous match { case Some((an, setup)) => (an, Some(setup)) - case None => (Analysis.Empty, None) + case None => (Analysis.empty(nameHashing = incOptions.nameHashing), None) } def javaOnly(f: File) = f.getName.endsWith(".java") diff --git a/compile/integration/src/main/scala/sbt/compiler/IncrementalCompiler.scala b/compile/integration/src/main/scala/sbt/compiler/IncrementalCompiler.scala index 8600f6a70..68ad63f2c 100644 --- a/compile/integration/src/main/scala/sbt/compiler/IncrementalCompiler.scala +++ b/compile/integration/src/main/scala/sbt/compiler/IncrementalCompiler.scala @@ -36,9 +36,15 @@ object IC extends IncrementalCompiler[Analysis, AnalyzingCompiler] def readCache(file: File): Maybe[(Analysis, CompileSetup)] = try { Maybe.just(readCacheUncaught(file)) } catch { case _: Exception => Maybe.nothing() } + @deprecated("Use overloaded variant which takes `IncOptions` as parameter.", "0.13.2") def readAnalysis(file: File): Analysis = try { readCacheUncaught(file)._1 } catch { case _: Exception => Analysis.Empty } + def readAnalysis(file: File, incOptions: IncOptions): Analysis = + try { readCacheUncaught(file)._1 } catch { + case _: Exception => Analysis.empty(nameHashing = incOptions.nameHashing) + } + def readCacheUncaught(file: File): (Analysis, CompileSetup) = Using.fileReader(IO.utf8)(file) { reader => TextAnalysisFormat.read(reader) } } From c2dc6cd529c0c945699012c291036873f06954f6 Mon Sep 17 00:00:00 2001 From: Grzegorz Kossakowski Date: Thu, 12 Dec 2013 15:57:55 +0100 Subject: [PATCH 011/148] Test for inheritance and member references. Add a test-case that documents current behavior of incremental compiler when it comes to invalidating dependencies that arise from inheritance and member references. --- .../transitive-memberRef/build.sbt | 36 +++++++++++++++++++ .../transitive-memberRef/changes/A1.scala | 5 +++ .../src/main/scala/A.scala | 3 ++ .../src/main/scala/B.scala | 3 ++ .../src/main/scala/C.scala | 3 ++ .../src/main/scala/D.scala | 3 ++ .../src/main/scala/X.scala | 5 +++ .../src/main/scala/Y.scala | 5 +++ .../transitive-memberRef/test | 11 ++++++ 9 files changed, 74 insertions(+) create mode 100644 sbt/src/sbt-test/source-dependencies/transitive-memberRef/build.sbt create mode 100644 sbt/src/sbt-test/source-dependencies/transitive-memberRef/changes/A1.scala create mode 100644 sbt/src/sbt-test/source-dependencies/transitive-memberRef/src/main/scala/A.scala create mode 100644 sbt/src/sbt-test/source-dependencies/transitive-memberRef/src/main/scala/B.scala create mode 100644 sbt/src/sbt-test/source-dependencies/transitive-memberRef/src/main/scala/C.scala create mode 100644 sbt/src/sbt-test/source-dependencies/transitive-memberRef/src/main/scala/D.scala create mode 100644 sbt/src/sbt-test/source-dependencies/transitive-memberRef/src/main/scala/X.scala create mode 100644 sbt/src/sbt-test/source-dependencies/transitive-memberRef/src/main/scala/Y.scala create mode 100644 sbt/src/sbt-test/source-dependencies/transitive-memberRef/test diff --git a/sbt/src/sbt-test/source-dependencies/transitive-memberRef/build.sbt b/sbt/src/sbt-test/source-dependencies/transitive-memberRef/build.sbt new file mode 100644 index 000000000..ef32473dc --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/transitive-memberRef/build.sbt @@ -0,0 +1,36 @@ +logLevel := Level.Debug + +// disable sbt's heauristic which recompiles everything in case +// some fraction (e.g. 50%) of files is scheduled to be recompiled +// in this test we want precise information about recompiled files +// which that heuristic would distort +incOptions := incOptions.value.copy(recompileAllFraction = 1.0) + +/* Performs checks related to compilations: + * a) checks in which compilation given set of files was recompiled + * b) checks overall number of compilations performed + */ +TaskKey[Unit]("check-compilations") <<= (compile in Compile, scalaSource in Compile) map { (a: sbt.inc.Analysis, src: java.io.File) => + def relative(f: java.io.File): java.io.File = f.relativeTo(src) getOrElse f + val allCompilations = a.compilations.allCompilations + val recompiledFiles: Seq[Set[java.io.File]] = allCompilations map { c => + val recompiledFiles = a.apis.internal.collect { + case (file, api) if api.compilation.startTime == c.startTime => relative(file) + } + recompiledFiles.toSet + } + def recompiledFilesInIteration(iteration: Int, fileNames: Set[String]) = { + val files = fileNames.map(new java.io.File(_)) + assert(recompiledFiles(iteration) == files, "%s != %s".format(recompiledFiles(iteration), files)) + } + // Y.scala is compiled only at the beginning as changes to A.scala do not affect it + recompiledFilesInIteration(0, Set("Y.scala")) + // A.scala is changed and recompiled + recompiledFilesInIteration(1, Set("A.scala")) + // change in A.scala causes recompilation of B.scala, C.scala, D.scala which depend on transtiviely + // and by inheritance on A.scala + // X.scala is also recompiled because it depends by member reference on B.scala + // Note that Y.scala is not recompiled because it depends just on X through member reference dependency + recompiledFilesInIteration(2, Set("B.scala", "C.scala", "D.scala", "X.scala")) + assert(allCompilations.size == 3) +} diff --git a/sbt/src/sbt-test/source-dependencies/transitive-memberRef/changes/A1.scala b/sbt/src/sbt-test/source-dependencies/transitive-memberRef/changes/A1.scala new file mode 100644 index 000000000..63a2739e1 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/transitive-memberRef/changes/A1.scala @@ -0,0 +1,5 @@ +package test + +class A { + def foo: Int = 23 +} diff --git a/sbt/src/sbt-test/source-dependencies/transitive-memberRef/src/main/scala/A.scala b/sbt/src/sbt-test/source-dependencies/transitive-memberRef/src/main/scala/A.scala new file mode 100644 index 000000000..1b0178fd9 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/transitive-memberRef/src/main/scala/A.scala @@ -0,0 +1,3 @@ +package test + +class A diff --git a/sbt/src/sbt-test/source-dependencies/transitive-memberRef/src/main/scala/B.scala b/sbt/src/sbt-test/source-dependencies/transitive-memberRef/src/main/scala/B.scala new file mode 100644 index 000000000..b9913245b --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/transitive-memberRef/src/main/scala/B.scala @@ -0,0 +1,3 @@ +package test + +class B extends A diff --git a/sbt/src/sbt-test/source-dependencies/transitive-memberRef/src/main/scala/C.scala b/sbt/src/sbt-test/source-dependencies/transitive-memberRef/src/main/scala/C.scala new file mode 100644 index 000000000..4ce04f8bf --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/transitive-memberRef/src/main/scala/C.scala @@ -0,0 +1,3 @@ +package test + +class C extends B diff --git a/sbt/src/sbt-test/source-dependencies/transitive-memberRef/src/main/scala/D.scala b/sbt/src/sbt-test/source-dependencies/transitive-memberRef/src/main/scala/D.scala new file mode 100644 index 000000000..eff328ce5 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/transitive-memberRef/src/main/scala/D.scala @@ -0,0 +1,3 @@ +package test + +class D extends C diff --git a/sbt/src/sbt-test/source-dependencies/transitive-memberRef/src/main/scala/X.scala b/sbt/src/sbt-test/source-dependencies/transitive-memberRef/src/main/scala/X.scala new file mode 100644 index 000000000..8c0d9edf8 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/transitive-memberRef/src/main/scala/X.scala @@ -0,0 +1,5 @@ +package test + +class X { + def bar(b: B) = b +} diff --git a/sbt/src/sbt-test/source-dependencies/transitive-memberRef/src/main/scala/Y.scala b/sbt/src/sbt-test/source-dependencies/transitive-memberRef/src/main/scala/Y.scala new file mode 100644 index 000000000..df53c3c5c --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/transitive-memberRef/src/main/scala/Y.scala @@ -0,0 +1,5 @@ +package test + +class Y { + def baz(x: X) = x +} diff --git a/sbt/src/sbt-test/source-dependencies/transitive-memberRef/test b/sbt/src/sbt-test/source-dependencies/transitive-memberRef/test new file mode 100644 index 000000000..395f90229 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/transitive-memberRef/test @@ -0,0 +1,11 @@ +# introduces first compile iteration +> compile +# adds a new method to A which will cause transitive invalidation +# of all source files that inherit from it +# also, all direct dependencies of files that inherit from A will +# be invalidated (in our case that's X.scala) +$ copy-file changes/A1.scala src/main/scala/A.scala +# second iteration +> compile +# check in which compile iteration given source file got recompiled +> check-compilations From b6e16f1e959c95a7b66de8c4d9632d47d2a7974b Mon Sep 17 00:00:00 2001 From: xuwei-k <6b656e6a69@gmail.com> Date: Sun, 5 Jan 2014 23:40:02 +0900 Subject: [PATCH 012/148] fix CommandStrings.settingsDetailed --- main/src/main/scala/sbt/CommandStrings.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/main/src/main/scala/sbt/CommandStrings.scala b/main/src/main/scala/sbt/CommandStrings.scala index 073480c31..9baf0e7d1 100644 --- a/main/src/main/scala/sbt/CommandStrings.scala +++ b/main/src/main/scala/sbt/CommandStrings.scala @@ -167,7 +167,7 @@ Syntax summary Displays the main % Date: Mon, 6 Jan 2014 14:08:02 +0100 Subject: [PATCH 013/148] Javadoc changes --- launch/interface/src/main/java/xsbti/Exit.java | 6 ++++-- .../interface/src/main/java/xsbti/MainResult.java | 14 +++++++++----- launch/interface/src/main/java/xsbti/Reboot.java | 8 +++++--- 3 files changed, 18 insertions(+), 10 deletions(-) diff --git a/launch/interface/src/main/java/xsbti/Exit.java b/launch/interface/src/main/java/xsbti/Exit.java index f88c8c591..3363fce39 100644 --- a/launch/interface/src/main/java/xsbti/Exit.java +++ b/launch/interface/src/main/java/xsbti/Exit.java @@ -1,7 +1,9 @@ package xsbti; -/** A launched application returns an instance of this class in order to communicate to the launcher -* that the application is completely finished and the launcher should exit with the given exit code.*/ +/** + * A launched application returns an instance of this class in order to communicate to the launcher + * that the application finished and the launcher should exit with the given exit code. + */ public interface Exit extends MainResult { public int code(); diff --git a/launch/interface/src/main/java/xsbti/MainResult.java b/launch/interface/src/main/java/xsbti/MainResult.java index e81aede2d..b6f27a680 100644 --- a/launch/interface/src/main/java/xsbti/MainResult.java +++ b/launch/interface/src/main/java/xsbti/MainResult.java @@ -1,8 +1,12 @@ package xsbti; -/** A launched application should return an instance of this from its 'run' method -* to communicate to the launcher what should be done now that the application -* has competed. This interface should be treated as 'sealed', with Exit and Reboot the only -* direct subtypes. -*/ +/** + * A launched application should return an instance of this from its 'run' method + * to communicate to the launcher what should be done now that the application + * has completed. This interface should be treated as 'sealed', with Exit and Reboot the only + * direct subtypes. + * + * @see xsbti.Exit + * @see xsbti.Reboot + */ public interface MainResult {} \ No newline at end of file diff --git a/launch/interface/src/main/java/xsbti/Reboot.java b/launch/interface/src/main/java/xsbti/Reboot.java index cb978c32a..0d6136a53 100644 --- a/launch/interface/src/main/java/xsbti/Reboot.java +++ b/launch/interface/src/main/java/xsbti/Reboot.java @@ -2,9 +2,11 @@ package xsbti; import java.io.File; -/** A launched application returns an instance of this class in order to communicate to the launcher -* that the application should be restarted. Different versions of the application and Scala can be used. -* The application can be given different arguments and a new working directory as well.*/ +/** + * A launched application returns an instance of this class in order to communicate to the launcher + * that the application should be restarted. Different versions of the application and Scala can be used. + * The application can be given different arguments as well as a new working directory. + */ public interface Reboot extends MainResult { public String[] arguments(); From f49ed56c1f66bec8a594a2c94b05316e5551318a Mon Sep 17 00:00:00 2001 From: Jacek Laskowski Date: Mon, 6 Jan 2014 14:17:42 +0100 Subject: [PATCH 014/148] Use string interpolation and replace deprecated methods --- main/command/src/main/scala/sbt/BasicCommandStrings.scala | 2 +- main/src/main/scala/sbt/CommandStrings.scala | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/main/command/src/main/scala/sbt/BasicCommandStrings.scala b/main/command/src/main/scala/sbt/BasicCommandStrings.scala index 3c8a38f14..237db8d7d 100644 --- a/main/command/src/main/scala/sbt/BasicCommandStrings.scala +++ b/main/command/src/main/scala/sbt/BasicCommandStrings.scala @@ -19,7 +19,7 @@ object BasicCommandStrings /** The command name to terminate the program.*/ val TerminateAction: String = Exit - def helpBrief = (HelpCommand, "Displays this help message or prints detailed help on requested commands (run 'help ').") + def helpBrief = (HelpCommand, s"Displays this help message or prints detailed help on requested commands (run '$HelpCommand ').") def helpDetailed = HelpCommand + """ Prints a help summary. diff --git a/main/src/main/scala/sbt/CommandStrings.scala b/main/src/main/scala/sbt/CommandStrings.scala index 073480c31..e85141ad8 100644 --- a/main/src/main/scala/sbt/CommandStrings.scala +++ b/main/src/main/scala/sbt/CommandStrings.scala @@ -38,7 +38,7 @@ s"""$multiTaskSyntax def multiTaskBrief = """Executes all of the specified tasks concurrently.""" - def showHelp = Help(ShowCommand, (ShowCommand + " ", actBrief), actDetailed) + def showHelp = Help(ShowCommand, (s"$ShowCommand ", showBrief), showDetailed) def showBrief = "Displays the result of evaluating the setting or task associated with 'key'." def showDetailed = s"""$ShowCommand From 418b85490776faff4f3c0b6505735984ab3a407a Mon Sep 17 00:00:00 2001 From: Josh Suereth Date: Thu, 12 Dec 2013 12:31:23 -0500 Subject: [PATCH 015/148] Launcher can now load servers in addition to regular applications. * Add new ServerMain interface * AppProvider can now choose to load ServerMain or AppMain classes * Explicitly document what happens if something doesn't match an expected interface * Improve error message on inability to load something. * Parse new [server] section that denotes a service and is lock file * Ability to serialize launch configurations. * Attempt to look for active listening server via the lock file * Forks the launcher itself to run servers from serialized launch configuration. * Testing echo server. * Tests to detect basic server functionality will work. * Revamp all the documentation for the launcher, giving it its own section. * Full documentation on launcher configuration files. Revamp launcher documentation to be a bit more in-depth, and split bits into sections. --- .../src/main/java/xsbti/AppMain.java | 19 + .../src/main/java/xsbti/AppProvider.java | 7 +- .../interface/src/main/java/xsbti/Server.java | 36 ++ .../src/main/java/xsbti/ServerMain.java | 17 + launch/src/main/scala/xsbt/boot/Boot.scala | 40 +- .../main/scala/xsbt/boot/Configuration.scala | 21 +- .../scala/xsbt/boot/ConfigurationParser.scala | 17 +- launch/src/main/scala/xsbt/boot/Create.scala | 10 +- .../main/scala/xsbt/boot/Enumeration.scala | 4 +- launch/src/main/scala/xsbt/boot/Launch.scala | 81 +++- .../scala/xsbt/boot/LaunchConfiguration.scala | 35 +- launch/src/main/scala/xsbt/boot/Pre.scala | 25 ++ .../main/scala/xsbt/boot/ResolveValues.scala | 9 +- .../scala/xsbt/boot/ServerApplication.scala | 200 +++++++++ launch/src/main/scala/xsbt/boot/Update.scala | 2 +- launch/src/test/scala/ServerLocatorTest.scala | 53 +++ .../main/scala/xsbt/boot/test/Servers.scala | 74 ++++ src/sphinx/Detailed-Topics/Advanced-Index.rst | 1 - src/sphinx/Detailed-Topics/Launcher.rst | 390 +----------------- src/sphinx/Detailed-Topics/index.rst | 1 + src/sphinx/Launcher/Architecture.rst | 108 +++++ src/sphinx/Launcher/Configuration.rst | 260 ++++++++++++ src/sphinx/Launcher/GettingStarted.rst | 232 +++++++++++ src/sphinx/Launcher/classloaders.png | Bin 0 -> 22551 bytes src/sphinx/Launcher/index.rst | 14 + src/sphinx/index.rst | 1 - 26 files changed, 1206 insertions(+), 451 deletions(-) create mode 100644 launch/interface/src/main/java/xsbti/Server.java create mode 100644 launch/interface/src/main/java/xsbti/ServerMain.java create mode 100644 launch/src/main/scala/xsbt/boot/ServerApplication.scala create mode 100644 launch/src/test/scala/ServerLocatorTest.scala create mode 100644 launch/test-sample/src/main/scala/xsbt/boot/test/Servers.scala create mode 100644 src/sphinx/Launcher/Architecture.rst create mode 100644 src/sphinx/Launcher/Configuration.rst create mode 100644 src/sphinx/Launcher/GettingStarted.rst create mode 100644 src/sphinx/Launcher/classloaders.png create mode 100644 src/sphinx/Launcher/index.rst diff --git a/launch/interface/src/main/java/xsbti/AppMain.java b/launch/interface/src/main/java/xsbti/AppMain.java index ffd1e4c36..b24e02212 100644 --- a/launch/interface/src/main/java/xsbti/AppMain.java +++ b/launch/interface/src/main/java/xsbti/AppMain.java @@ -1,6 +1,25 @@ package xsbti; +/** + * The main entry interface for launching applications. Classes which implement this interface + * can be launched via the sbt launcher. + * + * In addition, classes can be adapted into this interface by the launcher if they have a static method + * matching one of these signatures: + * + * - public static void main(String[] args) + * - public static int main(String[] args) + * - public static xsbti.Exit main(String[] args) + * + */ public interface AppMain { + /** Run the application and return the result. + * + * @param configuration The configuration used to run the application. Includes arguments and access to launcher features. + * @return + * The result of running this app. + * Note: the result can be things like "Please reboot this application". + */ public MainResult run(AppConfiguration configuration); } \ No newline at end of file diff --git a/launch/interface/src/main/java/xsbti/AppProvider.java b/launch/interface/src/main/java/xsbti/AppProvider.java index 24744c83c..ab3914210 100644 --- a/launch/interface/src/main/java/xsbti/AppProvider.java +++ b/launch/interface/src/main/java/xsbti/AppProvider.java @@ -3,9 +3,10 @@ package xsbti; import java.io.File; /** - * This represents an interface that can generate applications. + * This represents an interface that can generate applications or servers. * - * An application is somethign which will run and return an exit value. + * This provider grants access to launcher related features associated with + * the id. */ public interface AppProvider { @@ -33,6 +34,8 @@ public interface AppProvider * It is NOT guaranteed that newMain().getClass() == mainClass(). * The sbt launcher can wrap generic static main methods. In this case, there will be a wrapper class, * and you must use the `entryPoint` method. + * @throws IncompatibleClassChangeError if the configuration used for this Application does not + * represent a launched application. */ public AppMain newMain(); diff --git a/launch/interface/src/main/java/xsbti/Server.java b/launch/interface/src/main/java/xsbti/Server.java new file mode 100644 index 000000000..d4eca176d --- /dev/null +++ b/launch/interface/src/main/java/xsbti/Server.java @@ -0,0 +1,36 @@ +package xsbti; + +/** A running server. + * + * A class implementing this must: + * + * 1. Expose an HTTP port that clients can connect to, returned via the uri method. + * 2. Accept HTTP HEAD requests against the returned URI. These are used as "ping" messages to ensure + * a server is still alive, when new clients connect. + * 3. Create a new thread to execute its service + * 4. Block the calling thread until the server is shutdown via awaitTermination() + */ +public interface Server { + /** + * @return + * A URI denoting the Port which clients can connect to. + * + * Note: we use a URI so that the server can bind to different IP addresses (even a public one) if desired. + * Note: To verify that a server is "up", the sbt launcher will attempt to connect to + * this URI's address and port with a socket. If the connection is accepted, the server is assumed to + * be working. + */ + public java.net.URI uri(); + /** + * This should block the calling thread until the server is shutdown. + * + * @return + * The result that should occur from the server. + * Can be: + * - xsbti.Exit: Shutdown this launch + * - xsbti.Reboot: Restart the server + * + * + */ + public xsbti.MainResult awaitTermination(); +} \ No newline at end of file diff --git a/launch/interface/src/main/java/xsbti/ServerMain.java b/launch/interface/src/main/java/xsbti/ServerMain.java new file mode 100644 index 000000000..da3c8ce2b --- /dev/null +++ b/launch/interface/src/main/java/xsbti/ServerMain.java @@ -0,0 +1,17 @@ +package xsbti; + +/** The main entry point for a launched service. This allows applciations + * to instantiate server instances. + */ +public interface ServerMain { + /** + * This method should launch one or more thread(s) which run the service. After the service has + * been started, it should return the port/URI it is listening for connections on. + * + * @param configuration + * The configuration used to launch this service. + * @return + * A running server. + */ + public Server start(AppConfiguration configuration); +} \ No newline at end of file diff --git a/launch/src/main/scala/xsbt/boot/Boot.scala b/launch/src/main/scala/xsbt/boot/Boot.scala index 06ee1ba82..665407cff 100644 --- a/launch/src/main/scala/xsbt/boot/Boot.scala +++ b/launch/src/main/scala/xsbt/boot/Boot.scala @@ -5,36 +5,48 @@ import java.io.File + // The entry point to the launcher object Boot { def main(args: Array[String]) { - args match { - case Array("--version") => - println("sbt launcher version " + Package.getPackage("xsbt.boot").getImplementationVersion) - case _ => - System.clearProperty("scala.home") // avoid errors from mixing Scala versions in the same JVM - System.setProperty("jline.shutdownhook", "false") // shutdown hooks cause class loader leaks - System.setProperty("jline.esc.timeout", "0") // starts up a thread otherwise - CheckProxy() - run(args) - } + val config = parseArgs(args) + // If we havne't exited, we set up some hooks and launch + System.clearProperty("scala.home") // avoid errors from mixing Scala versions in the same JVM + System.setProperty("jline.shutdownhook", "false") // shutdown hooks cause class loader leaks + System.setProperty("jline.esc.timeout", "0") // starts up a thread otherwise + CheckProxy() + run(config) } + def parseArgs(args: Array[String]): LauncherArguments = { + @annotation.tailrec + def parse(args: List[String], isLocate: Boolean, remaining: List[String]): LauncherArguments = + args match { + case "--version" :: rest => + println("sbt launcher version " + Package.getPackage("xsbt.boot").getImplementationVersion) + exit(1) + case "--locate" :: rest => parse(rest, true, remaining) + case next :: rest => parse(rest, isLocate, next :: remaining) + case Nil => new LauncherArguments(remaining.reverse, isLocate) + } + parse(args.toList, false, Nil) + } + // this arrangement is because Scala does not always properly optimize away // the tail recursion in a catch statement - final def run(args: Array[String]): Unit = runImpl(args) match { + final def run(args: LauncherArguments): Unit = runImpl(args) match { case Some(newArgs) => run(newArgs) case None => () } - private def runImpl(args: Array[String]): Option[Array[String]] = + private def runImpl(args: LauncherArguments): Option[LauncherArguments] = try - Launch(args.toList) map exit + Launch(args) map exit catch { case b: BootException => errorAndExit(b.toString) case r: xsbti.RetrieveException => errorAndExit("Error: " + r.getMessage) - case r: xsbti.FullReload => Some(r.arguments) + case r: xsbti.FullReload => Some(new LauncherArguments(r.arguments.toList, false)) case e: Throwable => e.printStackTrace errorAndExit(Pre.prefixError(e.toString)) diff --git a/launch/src/main/scala/xsbt/boot/Configuration.scala b/launch/src/main/scala/xsbt/boot/Configuration.scala index e9464406a..4028e89cf 100644 --- a/launch/src/main/scala/xsbt/boot/Configuration.scala +++ b/launch/src/main/scala/xsbt/boot/Configuration.scala @@ -10,21 +10,34 @@ import java.util.regex.Pattern import scala.collection.immutable.List import annotation.tailrec +object ConfigurationStorageState extends Enumeration { + val PropertiesFile = value("properties-file") + val SerializedFile = value("serialized-file") +} + object Configuration { + import ConfigurationStorageState._ final val SysPropPrefix = "-D" def parse(file: URL, baseDirectory: File) = Using( new InputStreamReader(file.openStream, "utf8") )( (new ConfigurationParser).apply ) - @tailrec def find(args: List[String], baseDirectory: File): (URL, List[String]) = + + /** + * Finds the configuration location. + * + * Note: Configuration may be previously serialized by a launcher. + */ + @tailrec def find(args: List[String], baseDirectory: File): (URL, List[String], ConfigurationStorageState.Value) = args match { - case head :: tail if head.startsWith("@") => (directConfiguration(head.substring(1), baseDirectory), tail) + case head :: tail if head.startsWith("@load:") => (directConfiguration(head.substring(6), baseDirectory), tail, SerializedFile) + case head :: tail if head.startsWith("@") => (directConfiguration(head.substring(1), baseDirectory), tail, PropertiesFile) case head :: tail if head.startsWith(SysPropPrefix) => setProperty(head stripPrefix SysPropPrefix) find(tail, baseDirectory) case _ => val propertyConfigured = System.getProperty("sbt.boot.properties") val url = if(propertyConfigured == null) configurationOnClasspath else configurationFromFile(propertyConfigured, baseDirectory) - (url , args) + (url, args, PropertiesFile) } def setProperty(head: String) { @@ -108,7 +121,7 @@ object Configuration // We have to hard code them here in order to use them to determine the location of sbt.boot.properties itself def guessSbtVersion: Option[String] = { - val props = ResolveValues.readProperties(new File(DefaultBuildProperties)) + val props = Pre.readProperties(new File(DefaultBuildProperties)) Option(props.getProperty(SbtVersionProperty)) } diff --git a/launch/src/main/scala/xsbt/boot/ConfigurationParser.scala b/launch/src/main/scala/xsbt/boot/ConfigurationParser.scala index 8b1252e4a..659573550 100644 --- a/launch/src/main/scala/xsbt/boot/ConfigurationParser.scala +++ b/launch/src/main/scala/xsbt/boot/ConfigurationParser.scala @@ -78,11 +78,14 @@ class ConfigurationParser val (logging, m5) = processSection(m4, "log", getLogging) val (properties, m6) = processSection(m5, "app-properties", getAppProperties) val ((ivyHome, checksums, isOverrideRepos, rConfigFile), m7) = processSection(m6, "ivy", getIvy) - check(m7, "section") + val (serverOptions, m8) = processSection(m7, "server", getServer) + check(m8, "section") val classifiers = Classifiers(scalaClassifiers, appClassifiers) val repositories = rConfigFile map readRepositoriesConfig getOrElse defaultRepositories val ivyOptions = IvyOptions(ivyHome, classifiers, repositories, checksums, isOverrideRepos) - new LaunchConfiguration(scalaVersion, ivyOptions, app, boot, logging, properties) + + // TODO - Read server properties... + new LaunchConfiguration(scalaVersion, ivyOptions, app, boot, logging, properties, serverOptions) } def getScala(m: LabelMap) = { @@ -178,6 +181,16 @@ class ConfigurationParser val app = new Application(org, name, rev, main, components, LaunchCrossVersion(crossVersioned), classpathExtra) (app, classifiers) } + def getServer(m: LabelMap): (Option[ServerConfiguration]) = + { + val (lock, m1) = optfile(m, "lock") + // TODO - JVM args + val (args, m2) = optfile(m1, "jvmargs") + val (props, m3) = optfile(m2, "jvmprops") + lock map { file => + ServerConfiguration(file, args, props) + } + } def getRepositories(m: LabelMap): List[Repository.Repository] = { import Repository.{Ivy, Maven, Predefined} diff --git a/launch/src/main/scala/xsbt/boot/Create.scala b/launch/src/main/scala/xsbt/boot/Create.scala index b22cd2324..17e549781 100644 --- a/launch/src/main/scala/xsbt/boot/Create.scala +++ b/launch/src/main/scala/xsbt/boot/Create.scala @@ -33,17 +33,11 @@ object Initialize def fill(file: File, spec: List[AppProperty]): Unit = process(file, spec, selectFill) def process(file: File, appProperties: List[AppProperty], select: AppProperty => Option[PropertyInit]) { - val properties = new Properties - if(file.exists) - Using(new FileInputStream(file))( properties.load ) + val properties = readProperties(file) val uninitialized = for(property <- appProperties; init <- select(property) if properties.getProperty(property.name) == null) yield initialize(properties, property.name, init) - if(!uninitialized.isEmpty) - { - file.getParentFile.mkdirs() - Using(new FileOutputStream(file))( out => properties.store(out, "") ) - } + if(!uninitialized.isEmpty) writeProperties(properties, file, "") } def initialize(properties: Properties, name: String, init: PropertyInit) { diff --git a/launch/src/main/scala/xsbt/boot/Enumeration.scala b/launch/src/main/scala/xsbt/boot/Enumeration.scala index 3e5a6f89d..e65309f2a 100644 --- a/launch/src/main/scala/xsbt/boot/Enumeration.scala +++ b/launch/src/main/scala/xsbt/boot/Enumeration.scala @@ -6,7 +6,7 @@ package xsbt.boot import Pre._ import scala.collection.immutable.List -class Enumeration +class Enumeration extends Serializable { def elements: List[Value] = members private lazy val members: List[Value] = @@ -25,6 +25,6 @@ class Enumeration } def value(s: String) = new Value(s, 0) def value(s: String, i: Int) = new Value(s, i) - final class Value(override val toString: String, val id: Int) + final class Value(override val toString: String, val id: Int) extends Serializable def toValue(s: String): Value = elements.find(_.toString == s).getOrElse(error("Expected one of " + elements.mkString(",") + " (got: " + s + ")")) } \ No newline at end of file diff --git a/launch/src/main/scala/xsbt/boot/Launch.scala b/launch/src/main/scala/xsbt/boot/Launch.scala index 4604ce2ef..f27441918 100644 --- a/launch/src/main/scala/xsbt/boot/Launch.scala +++ b/launch/src/main/scala/xsbt/boot/Launch.scala @@ -6,20 +6,64 @@ package xsbt.boot import Pre._ import BootConfiguration.{CompilerModuleName, JAnsiVersion, LibraryModuleName} import java.io.File -import java.net.{URL, URLClassLoader} +import java.net.{URL, URLClassLoader, URI} import java.util.concurrent.Callable import scala.collection.immutable.List import scala.annotation.tailrec +import ConfigurationStorageState._ + +class LauncherArguments(val args: List[String], val isLocate: Boolean) object Launch { - def apply(arguments: List[String]): Option[Int] = apply( (new File("")).getAbsoluteFile , arguments ) + def apply(arguments: LauncherArguments): Option[Int] = apply( (new File("")).getAbsoluteFile , arguments ) - def apply(currentDirectory: File, arguments: List[String]): Option[Int] = { - val (configLocation, newArguments) = Configuration.find(arguments, currentDirectory) - val config = parseAndInitializeConfig(configLocation, currentDirectory) - launch(run(Launcher(config)))(makeRunConfig(currentDirectory, config, newArguments)) + def apply(currentDirectory: File, arguments: LauncherArguments): Option[Int] = { + val (configLocation, newArgs2, state) = Configuration.find(arguments.args, currentDirectory) + val config = state match { + case SerializedFile => LaunchConfiguration.restore(configLocation) + case PropertiesFile => parseAndInitializeConfig(configLocation, currentDirectory) + } + if(arguments.isLocate) { + if(!newArgs2.isEmpty) { + // TODO - Print the arguments without exploding proguard size. + System.err.println("Warning: --locate option ignores arguments.") + } + locate(currentDirectory, config) + } else { + // First check to see if there are java system properties we need to set. Then launch the application. + updateProperties(config) + launch(run(Launcher(config)))(makeRunConfig(currentDirectory, config, newArgs2)) + } } + /** Locate a server, print where it is, and exit. */ + def locate(currentDirectory: File, config: LaunchConfiguration): Option[Int] = { + config.serverConfig match { + case Some(_) => + val uri = ServerLocator.locate(currentDirectory, config) + System.out.println(uri.toASCIIString) + Some(0) + case None => sys.error(s"${config.app.groupID}-${config.app.main} is not configured as a server.") + } + } + /** Some hackery to allow sys.props to be configured via a file. If this launch config has + * a valid file configured, we load the properties and and apply them to this jvm. + */ + def updateProperties(config: LaunchConfiguration): Unit = { + config.serverConfig match { + case Some(config) => + config.jvmPropsFile match { + case Some(file) if file.exists => + try setSystemProperties(readProperties(file)) + catch { + case e: Exception => throw new RuntimeException(s"Unable to load server properties file: ${file}", e) + } + case _ => + } + case None => + } + } + /** Parses the configuration *and* runs the initialization code that will remove variable references. */ def parseAndInitializeConfig(configLocation: URL, currentDirectory: File): LaunchConfiguration = { @@ -84,6 +128,10 @@ object Launch Thread.currentThread.setContextClassLoader(loader) try { eval } finally { Thread.currentThread.setContextClassLoader(oldLoader) } } + + // Cache of classes for lookup later. + val ServerMainClass = classOf[xsbti.ServerMain] + val AppMainClass = classOf[xsbti.AppMain] } final class RunConfiguration(val scalaVersion: Option[String], val app: xsbti.ApplicationID, val workingDirectory: File, val arguments: List[String]) @@ -240,27 +288,32 @@ class Launch private[xsbt](val bootDirectory: File, val lockBoot: Boolean, val i (scalaHome, libDirectory) } - def appProvider(appID: xsbti.ApplicationID, app: RetrievedModule, scalaProvider0: xsbti.ScalaProvider, appHome: File): xsbti.AppProvider = new xsbti.AppProvider - { + def appProvider(appID: xsbti.ApplicationID, app: RetrievedModule, scalaProvider0: xsbti.ScalaProvider, appHome: File): xsbti.AppProvider = + new xsbti.AppProvider { + import Launch.{ServerMainClass,AppMainClass} val scalaProvider = scalaProvider0 val id = appID def mainClasspath = app.fullClasspath lazy val loader = app.createLoader(scalaProvider.loader) + // TODO - For some reason we can't call this from vanilla scala. We get a + // no such method exception UNLESS we're in the same project. lazy val entryPoint: Class[T] forSome { type T } = { val c = Class.forName(id.mainClass, true, loader) if(classOf[xsbti.AppMain].isAssignableFrom(c)) c else if(PlainApplication.isPlainApplication(c)) c - else sys.error(s"Class: ${c} is not an instance of xsbti.AppMain nor does it have one of these static methods:\n"+ - " * void main(String[] args)\n * int main(String[] args)\n * xsbti.Exit main(String[] args)") + else if(ServerApplication.isServerApplication(c)) c + else sys.error(s"${c} is not an instance of xsbti.AppMain, xsbti.ServerMain nor does it have one of these static methods:\n"+ + " * void main(String[] args)\n * int main(String[] args)\n * xsbti.Exit main(String[] args)\n") } // Deprecated API. Remove when we can. - def mainClass: Class[T] forSome { type T <: xsbti.AppMain } = entryPoint.asSubclass(classOf[xsbti.AppMain]) + def mainClass: Class[T] forSome { type T <: xsbti.AppMain } = entryPoint.asSubclass(AppMainClass) def newMain(): xsbti.AppMain = { - if(PlainApplication.isPlainApplication(entryPoint)) PlainApplication(entryPoint) - else mainClass.newInstance + if(ServerApplication.isServerApplication(entryPoint)) ServerApplication(this) + else if(PlainApplication.isPlainApplication(entryPoint)) PlainApplication(entryPoint) + else if(AppMainClass.isAssignableFrom(entryPoint)) mainClass.newInstance + else throw new IncompatibleClassChangeError(s"Main class ${entryPoint.getName} is not an instance of xsbti.AppMain, xsbti.ServerMain nor does it have a valid `main` method.") } - lazy val components = componentProvider(appHome) } def componentProvider(appHome: File) = new ComponentProvider(appHome, lockBoot) diff --git a/launch/src/main/scala/xsbt/boot/LaunchConfiguration.scala b/launch/src/main/scala/xsbt/boot/LaunchConfiguration.scala index f8ccd1782..be1f0fc4a 100644 --- a/launch/src/main/scala/xsbt/boot/LaunchConfiguration.scala +++ b/launch/src/main/scala/xsbt/boot/LaunchConfiguration.scala @@ -9,27 +9,46 @@ import java.net.URL import scala.collection.immutable.List //TODO: use copy constructor, check size change -final case class LaunchConfiguration(scalaVersion: Value[String], ivyConfiguration: IvyOptions, app: Application, boot: BootSetup, logging: Logging, appProperties: List[AppProperty]) +final case class LaunchConfiguration(scalaVersion: Value[String], ivyConfiguration: IvyOptions, app: Application, boot: BootSetup, logging: Logging, appProperties: List[AppProperty], serverConfig: Option[ServerConfiguration]) { + def isServer: Boolean = serverConfig.isDefined def getScalaVersion = { val sv = Value.get(scalaVersion) if(sv == "auto") None else Some(sv) } - def withScalaVersion(newScalaVersion: String) = LaunchConfiguration(new Explicit(newScalaVersion), ivyConfiguration, app, boot, logging, appProperties) - def withApp(app: Application) = LaunchConfiguration(scalaVersion, ivyConfiguration, app, boot, logging, appProperties) - def withAppVersion(newAppVersion: String) = LaunchConfiguration(scalaVersion, ivyConfiguration, app.withVersion(new Explicit(newAppVersion)), boot, logging, appProperties) + def withScalaVersion(newScalaVersion: String) = LaunchConfiguration(new Explicit(newScalaVersion), ivyConfiguration, app, boot, logging, appProperties, serverConfig) + def withApp(app: Application) = LaunchConfiguration(scalaVersion, ivyConfiguration, app, boot, logging, appProperties, serverConfig) + def withAppVersion(newAppVersion: String) = LaunchConfiguration(scalaVersion, ivyConfiguration, app.withVersion(new Explicit(newAppVersion)), boot, logging, appProperties, serverConfig) // TODO: withExplicit def withVersions(newScalaVersion: String, newAppVersion: String, classifiers0: Classifiers) = - LaunchConfiguration(new Explicit(newScalaVersion), ivyConfiguration.copy(classifiers = classifiers0), app.withVersion(new Explicit(newAppVersion)), boot, logging, appProperties) + LaunchConfiguration(new Explicit(newScalaVersion), ivyConfiguration.copy(classifiers = classifiers0), app.withVersion(new Explicit(newAppVersion)), boot, logging, appProperties, serverConfig) - def map(f: File => File) = LaunchConfiguration(scalaVersion, ivyConfiguration.map(f), app.map(f), boot.map(f), logging, appProperties) + def map(f: File => File) = LaunchConfiguration(scalaVersion, ivyConfiguration.map(f), app.map(f), boot.map(f), logging, appProperties, serverConfig.map(_ map f)) +} +object LaunchConfiguration { + // Saves a launch configuration into a file. This is only safe if it is loaded by the *same* launcher version. + def save(config: LaunchConfiguration, f: File): Unit = { + val out = new java.io.ObjectOutputStream(new java.io.FileOutputStream(f)) + try out.writeObject(config) + finally out.close() + } + // Restores a launch configuration from a file. This is only safe if it is loaded by the *same* launcher version. + def restore(url: URL): LaunchConfiguration = { + val in = new java.io.ObjectInputStream(url.openConnection.getInputStream) + try in.readObject.asInstanceOf[LaunchConfiguration] + finally in.close() + } +} +final case class ServerConfiguration(lockFile: File, jvmArgs: Option[File], jvmPropsFile: Option[File]) { + def map(f: File => File) = + ServerConfiguration(f(lockFile), jvmArgs map f, jvmPropsFile map f) } final case class IvyOptions(ivyHome: Option[File], classifiers: Classifiers, repositories: List[Repository.Repository], checksums: List[String], isOverrideRepositories: Boolean) { def map(f: File => File) = IvyOptions(ivyHome.map(f), classifiers, repositories, checksums, isOverrideRepositories) } -sealed trait Value[T] +sealed trait Value[T] extends Serializable final class Explicit[T](val value: T) extends Value[T] { override def toString = value.toString } @@ -130,7 +149,7 @@ sealed trait PropertyInit final class SetProperty(val value: String) extends PropertyInit final class PromptProperty(val label: String, val default: Option[String]) extends PropertyInit -final class Logging(level: LogLevel.Value) +final class Logging(level: LogLevel.Value) extends Serializable { def log(s: => String, at: LogLevel.Value) = if(level.id <= at.id) stream(at).println("[" + at + "] " + s) def debug(s: => String) = log(s, LogLevel.Debug) diff --git a/launch/src/main/scala/xsbt/boot/Pre.scala b/launch/src/main/scala/xsbt/boot/Pre.scala index 05a9585d1..26b83aee9 100644 --- a/launch/src/main/scala/xsbt/boot/Pre.scala +++ b/launch/src/main/scala/xsbt/boot/Pre.scala @@ -70,6 +70,10 @@ object Pre classes.toList.filter(classMissing) } def toURLs(files: Array[File]): Array[URL] = files.map(_.toURI.toURL) + def toFile(url: URL): File = + try { new File(url.toURI) } + catch { case _: java.net.URISyntaxException => new File(url.getPath) } + def delete(f: File) { @@ -82,4 +86,25 @@ object Pre } final val isWindows: Boolean = System.getProperty("os.name").toLowerCase(Locale.ENGLISH).contains("windows") final val isCygwin: Boolean = isWindows && java.lang.Boolean.getBoolean("sbt.cygwin") + + import java.util.Properties + import java.io.{FileInputStream,FileOutputStream} + private[boot] def readProperties(propertiesFile: File) = + { + val properties = new Properties + if(propertiesFile.exists) + Using( new FileInputStream(propertiesFile) )( properties.load ) + properties + } + private[boot] def writeProperties(properties: Properties, file: File, msg: String): Unit = { + file.getParentFile.mkdirs() + Using(new FileOutputStream(file))( out => properties.store(out, msg) ) + } + private[boot] def setSystemProperties(properties: Properties): Unit = { + val nameItr = properties.stringPropertyNames.iterator + while(nameItr.hasNext) { + val propName = nameItr.next + System.setProperty(propName, properties.getProperty(propName)) + } + } } diff --git a/launch/src/main/scala/xsbt/boot/ResolveValues.scala b/launch/src/main/scala/xsbt/boot/ResolveValues.scala index b04cdb949..952d9d970 100644 --- a/launch/src/main/scala/xsbt/boot/ResolveValues.scala +++ b/launch/src/main/scala/xsbt/boot/ResolveValues.scala @@ -12,16 +12,9 @@ object ResolveValues def apply(conf: LaunchConfiguration): LaunchConfiguration = (new ResolveValues(conf))() private def trim(s: String) = if(s eq null) None else notEmpty(s.trim) private def notEmpty(s: String) = if(isEmpty(s)) None else Some(s) - private[boot] def readProperties(propertiesFile: File) = - { - val properties = new Properties - if(propertiesFile.exists) - Using( new FileInputStream(propertiesFile) )( properties.load ) - properties - } } -import ResolveValues.{readProperties, trim} +import ResolveValues.{trim} final class ResolveValues(conf: LaunchConfiguration) { private def propertiesFile = conf.boot.properties diff --git a/launch/src/main/scala/xsbt/boot/ServerApplication.scala b/launch/src/main/scala/xsbt/boot/ServerApplication.scala new file mode 100644 index 000000000..3f592b151 --- /dev/null +++ b/launch/src/main/scala/xsbt/boot/ServerApplication.scala @@ -0,0 +1,200 @@ +package xsbt +package boot + +import java.io.File +import scala.util.control.NonFatal +import java.net.URI +import java.io.IOException +import Pre._ +import scala.annotation.tailrec + +/** A wrapper around 'raw' static methods to meet the sbt application interface. */ +class ServerApplication private (provider: xsbti.AppProvider) extends xsbti.AppMain { + import ServerApplication._ + + override def run(configuration: xsbti.AppConfiguration): xsbti.MainResult = { + val serverMain = provider.entryPoint.asSubclass(ServerMainClass).newInstance + val server = serverMain.start(configuration) + System.out.println(s"${SERVER_SYNCH_TEXT}${server.uri}") + server.awaitTermination() + } +} +/** An object that lets us detect compatible "plain" applications and launch them reflectively. */ +object ServerApplication { + val SERVER_SYNCH_TEXT = "[SERVER-URI]" + val ServerMainClass = classOf[xsbti.ServerMain] + // TODO - We should also adapt friendly static methods into servers, perhaps... + // We could even structurally type things that have a uri + awaitTermination method... + def isServerApplication(clazz: Class[_]): Boolean = + ServerMainClass.isAssignableFrom(clazz) + def apply(provider: xsbti.AppProvider): xsbti.AppMain = + new ServerApplication(provider) + +} +object ServerLocator { + // TODO - Probably want to drop this to reduce classfile size + private def locked[U](file: File)(f: => U): U = { + Locks(file, new java.util.concurrent.Callable[U] { + def call(): U = f + }) + } + // We use the lock file they give us to write the server info. However, + // it seems we cannot both use the server info file for locking *and* + // read from it successfully. Locking seems to blank the file. SO, we create + // another file near the info file to lock.a + def makeLockFile(f: File): File = + new File(f.getParentFile, s"${f.getName}.lock") + // Launch the process and read the port... + def locate(currentDirectory: File, config: LaunchConfiguration): URI = + config.serverConfig match { + case None => sys.error("No server lock file configured. Cannot locate server.") + case Some(sc) => locked(makeLockFile(sc.lockFile)) { + readProperties(sc.lockFile) match { + case Some(uri) if isReachable(uri) => uri + case _ => + val uri = ServerLauncher.startServer(currentDirectory, config) + writeProperties(sc.lockFile, uri) + uri + } + } + } + + private val SERVER_URI_PROPERTY = "server.uri" + def readProperties(f: File): Option[java.net.URI] = { + try { + val props = Pre.readProperties(f) + props.getProperty(SERVER_URI_PROPERTY) match { + case null => None + case uri => Some(new java.net.URI(uri)) + } + } catch { + case e: IOException => None + } + } + def writeProperties(f: File, uri: URI): Unit = { + val props = new java.util.Properties + props.setProperty(SERVER_URI_PROPERTY, uri.toASCIIString) + val output = new java.io.FileOutputStream(f) + val df = new java.text.SimpleDateFormat("yyyy-MM-dd'T'HH:mmZ") + df.setTimeZone(java.util.TimeZone.getTimeZone("UTC")) + Pre.writeProperties(props, f, s"Server Startup at ${df.format(new java.util.Date)}") + } + + def isReachable(uri: java.net.URI): Boolean = + try { + // TODO - For now we assume if we can connect, it means + // that the server is working... + val socket = new java.net.Socket(uri.getHost, uri.getPort) + try socket.isConnected + finally socket.close() + } catch { + case e: IOException => false + } +} +/** A helper class that dumps incoming values into a print stream. */ +class StreamDumper(in: java.io.BufferedReader, out: java.io.PrintStream) extends Thread { + // Don't block the application for this thread. + setDaemon(true) + private val running = new java.util.concurrent.atomic.AtomicBoolean(true) + override def run(): Unit = { + def read(): Unit = if(running.get) in.readLine match { + case null => () + case line => + out.println(line) + read() + } + read() + out.close() + } + + def close(): Unit = running.set(false) +} +object ServerLauncher { + import ServerApplication.SERVER_SYNCH_TEXT + def startServer(currentDirectory: File, config: LaunchConfiguration): URI = { + val serverConfig = config.serverConfig match { + case Some(c) => c + case None => throw new RuntimeException("Logic Failure: Attempting to start a server that isn't configured to be a server. Please report a bug.") + } + val launchConfig = java.io.File.createTempFile("sbtlaunch", "config") + launchConfig.deleteOnExit() + LaunchConfiguration.save(config, launchConfig) + val jvmArgs: List[String] = serverConfig.jvmArgs map readLines match { + case Some(args) => args + case None => Nil + } + val cmd: List[String] = + ("java" :: jvmArgs) ++ + ("-jar" :: defaultLauncherLookup.getCanonicalPath :: s"@load:${launchConfig.toURI.toURL.toString}" :: Nil) + launchProcessAndGetUri(cmd, currentDirectory) + } + + // Here we try to isolate all the stupidity of dealing with Java processes. + def launchProcessAndGetUri(cmd: List[String], cwd: File): URI = { + // TODO - Handle windows path stupidity in arguments. + val pb = new java.lang.ProcessBuilder() + pb.command(cmd:_*) + pb.directory(cwd) + val process = pb.start() + // First we need to grab all the input streams, and close the ones we don't care about. + process.getOutputStream.close() + val stderr = process.getErrorStream + val stdout = process.getInputStream + // Now we start dumping out errors. + val errorDumper = new StreamDumper(new java.io.BufferedReader(new java.io.InputStreamReader(stderr)), System.err) + errorDumper.start() + // Now we look for the URI synch value, and then make sure we close the output files. + try readUntilSynch(new java.io.BufferedReader(new java.io.InputStreamReader(stdout))) match { + case Some(uri) => uri + case _ => sys.error("Failed to start server!") + } finally { + errorDumper.close() + stdout.close() + stderr.close() + } + } + + object ServerUriLine { + def unapply(in: String): Option[URI] = + if(in startsWith SERVER_SYNCH_TEXT) { + Some(new URI(in.substring(SERVER_SYNCH_TEXT.size))) + } else None + } + /** Reads an input steam until it hits the server synch text and server URI. */ + def readUntilSynch(in: java.io.BufferedReader): Option[URI] = { + @tailrec + def read(): Option[URI] = in.readLine match { + case null => None + case ServerUriLine(uri) => Some(uri) + case line => read() + } + try read() + finally in.close() + } + /** Reads all the lines in a file. If it doesn't exist, returns an empty list. Forces UTF-8 strings. */ + def readLines(f: File): List[String] = + if(!f.exists) Nil else { + val reader = new java.io.BufferedReader(new java.io.InputStreamReader(new java.io.FileInputStream(f), "UTF-8")) + @tailrec + def read(current: List[String]): List[String] = + reader.readLine match { + case null => current.reverse + case line => read(line :: current) + } + try read(Nil) + finally reader.close() + } + + def defaultLauncherLookup: File = + try { + val classInLauncher = classOf[AppConfiguration] + val fileOpt = for { + domain <- Option(classInLauncher.getProtectionDomain) + source <- Option(domain.getCodeSource) + location = source.getLocation + } yield toFile(location) + fileOpt.getOrElse(throw new RuntimeException("Could not inspect protection domain or code source")) + } catch { + case e: Throwable => throw new RuntimeException("Unable to find sbt-launch.jar.", e) + } +} \ No newline at end of file diff --git a/launch/src/main/scala/xsbt/boot/Update.scala b/launch/src/main/scala/xsbt/boot/Update.scala index 92e1bec92..8d1c2e206 100644 --- a/launch/src/main/scala/xsbt/boot/Update.scala +++ b/launch/src/main/scala/xsbt/boot/Update.scala @@ -55,7 +55,7 @@ final class Update(config: UpdateConfiguration) val optionProps = Option(System.getProperty("sbt.boot.credentials")) orElse Option(System.getenv("SBT_CREDENTIALS")) map ( path => - ResolveValues.readProperties(new File(path)) + Pre.readProperties(new File(path)) ) optionProps match { case Some(props) => extractCredentials("realm","host","user","password")(props) diff --git a/launch/src/test/scala/ServerLocatorTest.scala b/launch/src/test/scala/ServerLocatorTest.scala new file mode 100644 index 000000000..7e0b30c36 --- /dev/null +++ b/launch/src/test/scala/ServerLocatorTest.scala @@ -0,0 +1,53 @@ +package xsbt.boot + +import java.io.{File,InputStream} +import java.net.URL +import java.util.Properties +import xsbti._ +import org.specs2._ +import mutable.Specification +import LaunchTest._ +import sbt.IO.{createDirectory, touch,withTemporaryDirectory} +import java.net.URI + +object ServerLocatorTest extends Specification +{ + "ServerLocator" should { + // TODO - Maybe use scalacheck to randomnly generate URIs + "read and write server URI properties" in { + withTemporaryDirectory { dir => + val propFile = new File(dir, "server.properties") + val expected = new java.net.URI("http://localhost:8080") + ServerLocator.writeProperties(propFile, expected) + ServerLocator.readProperties(propFile) must equalTo(Some(expected)) + } + } + "detect listening ports" in { + val serverSocket = new java.net.ServerSocket(0) + object serverThread extends Thread { + override def run(): Unit = { + // Accept one connection. + val result = serverSocket.accept() + result.close() + serverSocket.close() + } + } + serverThread.start() + val uri = new java.net.URI(s"http://${serverSocket.getInetAddress.getHostAddress}:${serverSocket.getLocalPort}") + ServerLocator.isReachable(uri) must beTrue + } + } + "ServerLauncher" should { + "detect start URI from reader" in { + val expected = new java.net.URI("http://localhost:8080") + val input = s"""|Some random text + |to start the server + |${ServerApplication.SERVER_SYNCH_TEXT}${expected.toASCIIString} + |Some more output.""".stripMargin + val inputStream = new java.io.BufferedReader(new java.io.StringReader(input)) + val result = try ServerLauncher.readUntilSynch(inputStream) + finally inputStream.close() + result must equalTo(Some(expected)) + } + } +} \ No newline at end of file diff --git a/launch/test-sample/src/main/scala/xsbt/boot/test/Servers.scala b/launch/test-sample/src/main/scala/xsbt/boot/test/Servers.scala new file mode 100644 index 000000000..930e565a9 --- /dev/null +++ b/launch/test-sample/src/main/scala/xsbt/boot/test/Servers.scala @@ -0,0 +1,74 @@ +/** These are packaged and published locally and the resulting artifact is used to test the launcher.*/ +package xsbt.boot.test + +import java.net.Socket +import java.net.SocketTimeoutException + +class EchoServer extends xsbti.ServerMain +{ + def start(configuration: xsbti.AppConfiguration): xsbti.Server = + { + object server extends xsbti.Server { + // TODO - Start a server. + val serverSocket = new java.net.ServerSocket(0) + val port = serverSocket.getLocalPort + val addr = serverSocket.getInetAddress.getHostAddress + override val uri =new java.net.URI(s"http://${addr}:${port}") + // Check for stop every second. + serverSocket.setSoTimeout(1000) + object serverThread extends Thread { + private val running = new java.util.concurrent.atomic.AtomicBoolean(true) + override def run(): Unit = { + while(running.get) try { + val clientSocket = serverSocket.accept() + // Handle client connections + object clientSocketThread extends Thread { + override def run(): Unit = { + echoTo(clientSocket) + } + } + clientSocketThread.start() + } catch { + case e: SocketTimeoutException => // Ignore + } + } + // Simple mechanism to dump input to output. + private def echoTo(socket: Socket): Unit = { + val input = new java.io.BufferedReader(new java.io.InputStreamReader(socket.getInputStream)) + val output = new java.io.BufferedWriter(new java.io.OutputStreamWriter(socket.getOutputStream)) + import scala.util.control.Breaks._ + try { + // Lame way to break out. + breakable { + def read(): Unit = input.readLine match { + case null => () + case "kill" => + running.set(false) + serverSocket.close() + break() + case line => + output.write(line) + output.flush() + read() + } + read() + } + } finally { + output.close() + input.close() + socket.close() + } + } + } + // Start the thread immediately + serverThread.start() + override def awaitTermination(): xsbti.MainResult = { + serverThread.join() + new Exit(0) + } + } + server + } + + +} \ No newline at end of file diff --git a/src/sphinx/Detailed-Topics/Advanced-Index.rst b/src/sphinx/Detailed-Topics/Advanced-Index.rst index 28928ed7e..884a96292 100644 --- a/src/sphinx/Detailed-Topics/Advanced-Index.rst +++ b/src/sphinx/Detailed-Topics/Advanced-Index.rst @@ -9,7 +9,6 @@ Before reading anything in here, you will need the information in the .. toctree:: :maxdepth: 2 - Launcher Scripts TaskInputs Understanding-incremental-recompilation diff --git a/src/sphinx/Detailed-Topics/Launcher.rst b/src/sphinx/Detailed-Topics/Launcher.rst index 6573f2348..eced0102b 100644 --- a/src/sphinx/Detailed-Topics/Launcher.rst +++ b/src/sphinx/Detailed-Topics/Launcher.rst @@ -1,387 +1,5 @@ -====================== -Launcher Specification -====================== +============ +Sbt Launcher +============ -The sbt launcher component is a self-contained jar that boots a Scala -application without Scala or the application already existing on the -system. The only prerequisites are the launcher jar itself, an optional -configuration file, and a java runtime version 1.6 or greater. - -Overview -======== - -A user downloads the launcher jar and creates a script to run it. In -this documentation, the script will be assumed to be called `launch`. -For unix, the script would look like: -`java -jar sbt-launcher.jar "$@"` - -The user then downloads the configuration file for the application (call -it `my.app.configuration`) and creates a script to launch it (call it -`myapp`): `launch @my.app.configuration "$@"` - -The user can then launch the application using `myapp arg1 arg2 ...` - -Like the launcher used to distribute `sbt`, the downloaded launcher -jar will retrieve Scala and the application according to the provided -configuration file. The versions may be fixed or read from a different -configuration file (the location of which is also configurable). The -location to which the Scala and application jars are downloaded is -configurable as well. The repositories searched are configurable. -Optional initialization of a properties file on launch is configurable. - -Once the launcher has downloaded the necessary jars, it loads the -application and calls its entry point. The application is passed -information about how it was called: command line arguments, current -working directory, Scala version, and application ID (organization, -name, version). In addition, the application can ask the launcher to -perform operations such as obtaining the Scala jars and a -`ClassLoader` for any version of Scala retrievable from the -repositories specified in the configuration file. It can request that -other applications be downloaded and run. When the application -completes, it can tell the launcher to exit with a specific exit code or -to reload the application with a different version of Scala, a different -version of the application, or different arguments. - -There are some other options for setup, such as putting the -configuration file inside the launcher jar and distributing that as a -single download. The rest of this documentation describes the details of -configuring, writing, distributing, and running the application. - -Configuration -------------- - -The launcher may be configured in one of the following ways in -increasing order of precedence: - -- Replace the `/sbt/sbt.boot.properties` file in the jar -- Put a configuration file named `sbt.boot.properties` on the - classpath. Put it in the classpath root without the `/sbt` prefix. -- Specify the location of an alternate configuration on the command - line, either as a path or an absolute URI. This can be done by - either specifying the location as the system property - `sbt.boot.properties` or as the first argument to the launcher - prefixed by `'@'`. The system property has lower precedence. - Resolution of a relative path is first attempted against the current - working directory, then against the user's home directory, and then - against the directory containing the launcher jar. An error is - generated if none of these attempts succeed. - -Syntax -~~~~~~ - -The configuration file is line-based, read as UTF-8 encoded, and defined -by the following grammar. `'nl'` is a newline or end of file and -`'text'` is plain text without newlines or the surrounding delimiters -(such as parentheses or square brackets): - -.. productionlist:: - configuration: `scala` `app` `repositories` `boot` `log` `appProperties` - scala: "[" "scala" "]" `nl` `version` `nl` `classifiers` `nl` - app: "[" "app" "]" `nl` `org` `nl` `name` `nl` `version` `nl` `components` `nl` `class` `nl` `crossVersioned` `nl` `resources` `nl` `classifiers` `nl` - repositories: "[" "repositories" "]" `nl` (`repository` `nl`)* - boot: "[" "boot" "]" `nl` `directory` `nl` `bootProperties` `nl` `search` `nl` `promptCreate` `nl` `promptFill` `nl` `quickOption` `nl` - log: "["' "log" "]" `nl` `logLevel` `nl` - appProperties: "[" "app-properties" "]" nl (property nl)* - ivy: "[" "ivy" "]" `nl` `homeDirectory` `nl` `checksums` `nl` `overrideRepos` `nl` `repoConfig` `nl` - directory: "directory" ":" `path` - bootProperties: "properties" ":" `path` - search: "search" ":" ("none" | "nearest" | "root-first" | "only" ) ("," `path`)* - logLevel: "level" ":" ("debug" | "info" | "warn" | "error") - promptCreate: "prompt-create" ":" `label` - promptFill: "prompt-fill" ":" `boolean` - quickOption: "quick-option" ":" `boolean` - version: "version" ":" `versionSpecification` - versionSpecification: `readProperty` | `fixedVersion` - readProperty: "read" "(" `propertyName` ")" "[" `default` "]" - fixedVersion: text - classifiers: "classifiers" ":" text ("," text)* - homeDirectory: "ivy-home" ":" `path` - checksums: "checksums" ":" `checksum` ("," `checksum`)* - overrideRepos: "override-build-repos" ":" `boolean` - repoConfig: "repository-config" ":" `path` - org: "org" ":" text - name: "name" ":" text - class: "class" ":" text - components: "components" ":" `component` ("," `component`)* - crossVersioned: "cross-versioned" ":" ("true" | "false" | "none" | "binary" | "full") - resources: "resources" ":" `path` ("," `path`)* - repository: ( `predefinedRepository` | `customRepository` ) `nl` - predefinedRepository: "local" | "maven-local" | "maven-central" - customRepository: `label` ":" `url` [ ["," `ivyPattern`] ["," `artifactPattern`] [", mavenCompatible"] [", bootOnly"]] - property: `label` ":" `propertyDefinition` ("," `propertyDefinition`)* - propertyDefinition: `mode` "=" (`set` | `prompt`) - mode: "quick" | "new" | "fill" - set: "set" "(" value ")" - prompt: "prompt" "(" `label` ")" ("[" `default` "]")? - boolean: "true" | "false" - nl: "\r\n" | "\n" | "\r" - path: text - propertyName: text - label: text - default: text - checksum: text - ivyPattern: text - artifactPattern: text - url: text - component: text - -In addition to the grammar specified here, property values may include -variable substitutions. A variable substitution has one of these forms: - -- `${variable.name}` -- `${variable.name-default}` - -where `variable.name` is the name of a system property. If a system -property by that name exists, the value is substituted. If it does not -exists and a default is specified, the default is substituted after -recursively substituting variables in it. If the system property does -not exist and no default is specified, the original string is not -substituted. - -Example -~~~~~~~ - -The default configuration file for sbt looks like: - -.. parsed-literal:: - - [scala] - version: ${sbt.scala.version-auto} - - [app] - org: ${sbt.organization-org.scala-sbt} - name: sbt - version: ${sbt.version-read(sbt.version)[\ |release|\ ]} - class: ${sbt.main.class-sbt.xMain} - components: xsbti,extra - cross-versioned: ${sbt.cross.versioned-false} - - [repositories] - local - typesafe-ivy-releases: http://repo.typesafe.com/typesafe/ivy-releases/, [organization]/[module]/[revision]/[type]s/[artifact](-[classifier]).[ext], bootOnly - maven-central - sonatype-snapshots: https://oss.sonatype.org/content/repositories/snapshots - - [boot] - directory: ${sbt.boot.directory-${sbt.global.base-${user.home}/.sbt}/boot/} - - [ivy] - ivy-home: ${sbt.ivy.home-${user.home}/.ivy2/} - checksums: ${sbt.checksums-sha1,md5} - override-build-repos: ${sbt.override.build.repos-false} - repository-config: ${sbt.repository.config-${sbt.global.base-${user.home}/.sbt}/repositories} - -Semantics -~~~~~~~~~ - -The `scala.version` property specifies the version of Scala used to -run the application. If the application is not cross-built, this may be -set to `auto` and it will be auto-detected from the application's -dependencies. If specified, the `scala.classifiers` property defines -classifiers, such as 'sources', of extra Scala artifacts to retrieve. - -The `app.org`, `app.name`, and `app.version` properties specify -the organization, module ID, and version of the application, -respectively. These are used to resolve and retrieve the application -from the repositories listed in `[repositories]`. If -`app.cross-versioned` is `binary`, the resolved module ID is -`{app.name+'_'+CrossVersion.binaryScalaVersion(scala.version)}`. -If `app.cross-versioned` is `true` or `full`, the resolved module ID is -`{app.name+'_'+scala.version}`. The `scala.version` property must be -specified and cannot be `auto` when cross-versioned. The paths given -in `app.resources` are added to the application's classpath. If the -path is relative, it is resolved against the application's working -directory. If specified, the `app.classifiers` property defines -classifiers, like 'sources', of extra artifacts to retrieve for the -application. - -Jars are retrieved to the directory given by `boot.directory`. By -default, this is an absolute path that is shared by all launched -instances on the machine. If multiple versions access it simultaneously. -, you might see messages like: - -.. code-block:: console - - Waiting for lock on to be available... - -This boot directory may be relative to the current directory instead. In -this case, the launched application will have a separate boot directory -for each directory it is launched in. - -The `boot.properties` property specifies the location of the -properties file to use if `app.version` or `scala.version` is -specified as `read`. The `prompt-create`, `prompt-fill`, and -`quick-option` properties together with the property definitions in -`[app.properties]` can be used to initialize the `boot.properties` -file. - -The app.class property specifies the name of the entry point to the -application. An application entry point must be a public class with a -no-argument constructor that implements `xsbti.AppMain`. The -`AppMain` interface specifies the entry method signature 'run'. The -run method is passed an instance of AppConfiguration, which provides -access to the startup environment. `AppConfiguration` also provides an -interface to retrieve other versions of Scala or other applications. -Finally, the return type of the run method is `xsbti.MainResult`, -which has two subtypes: `xsbti.Reboot` and `xsbti.Exit`. To exit -with a specific code, return an instance of `xsbti.Exit` with the -requested code. To restart the application, return an instance of -Reboot. You can change some aspects of the configuration with a reboot, -such as the version of Scala, the application ID, and the arguments. - -The `ivy.cache-directory` property provides an alternative location -for the Ivy cache used by the launcher. This does not automatically set -the Ivy cache for the application, but the application is provided this -location through the AppConfiguration instance. The `checksums` -property selects the checksum algorithms (sha1 or md5) that are used to -verify artifacts downloaded by the launcher. `override-build-repos` is -a flag that can inform the application that the repositories configured -for the launcher should be used in the application. If -`repository-config` is defined, the file it specifies should contain a -`[repositories]` section that is used in place of the section in the -original configuration file. - -Execution ---------- - -On startup, the launcher searches for its configuration in the order -described in the Configuration section and then parses it. If either the -Scala version or the application version are specified as 'read', the -launcher determines them in the following manner. The file given by the -'boot.properties' property is read as a Java properties file to obtain -the version. The expected property names are `${app.name}.version` for -the application version (where `${app.name}` is replaced with the -value of the `app.name` property from the boot configuration file) and -`scala.version` for the Scala version. If the properties file does not -exist, the default value provided is used. If no default was provided, -an error is generated. - -Once the final configuration is resolved, the launcher proceeds to -obtain the necessary jars to launch the application. The -`boot.directory` property is used as a base directory to retrieve jars -to. Locking is done on the directory, so it can be shared system-wide. -The launcher retrieves the requested version of Scala to - -.. code-block:: console - - ${boot.directory}/${scala.version}/lib/ - -If this directory already exists, the launcher takes a shortcut for -startup performance and assumes that the jars have already been -downloaded. If the directory does not exist, the launcher uses Apache -Ivy to resolve and retrieve the jars. A similar process occurs for the -application itself. It and its dependencies are retrieved to - -.. code-block:: console - - ${boot.directory}/${scala.version}/${app.org}/${app.name}/. - -Once all required code is downloaded, the class loaders are set up. The -launcher creates a class loader for the requested version of Scala. It -then creates a child class loader containing the jars for the requested -'app.components' and with the paths specified in `app.resources`. An -application that does not use components will have all of its jars in -this class loader. - -The main class for the application is then instantiated. It must be a -public class with a public no-argument constructor and must conform to -xsbti.AppMain. The `run` method is invoked and execution passes to the -application. The argument to the 'run' method provides configuration -information and a callback to obtain a class loader for any version of -Scala that can be obtained from a repository in [repositories]. The -return value of the run method determines what is done after the -application executes. It can specify that the launcher should restart -the application or that it should exit with the provided exit code. - -Creating a Launched Application -------------------------------- - -This section shows how to make an application that is launched by this -launcher. First, declare a dependency on the launcher-interface. Do not -declare a dependency on the launcher itself. The launcher interface -consists strictly of Java interfaces in order to avoid binary -incompatibility between the version of Scala used to compile the -launcher and the version used to compile your application. The launcher -interface class will be provided by the launcher, so it is only a -compile-time dependency. If you are building with sbt, your dependency -definition would be: - -.. parsed-literal:: - - libraryDependencies += "org.scala-sbt" % "launcher-interface" % "|release|" % "provided" - - resolvers += sbtResolver.value - -Make the entry point to your class implement 'xsbti.AppMain'. An example -that uses some of the information: - -.. code-block:: scala - - package xsbt.test - class Main extends xsbti.AppMain - { - def run(configuration: xsbti.AppConfiguration) = - { - // get the version of Scala used to launch the application - val scalaVersion = configuration.provider.scalaProvider.version - - // Print a message and the arguments to the application - println("Hello world! Running Scala " + scalaVersion) - configuration.arguments.foreach(println) - - // demonstrate the ability to reboot the application into different versions of Scala - // and how to return the code to exit with - scalaVersion match - { - case "2.9.3" => - new xsbti.Reboot { - def arguments = configuration.arguments - def baseDirectory = configuration.baseDirectory - def scalaVersion = "2.10.2 - def app = configuration.provider.id - } - case "2.10.2" => new Exit(1) - case _ => new Exit(0) - } - } - class Exit(val code: Int) extends xsbti.Exit - } - -Next, define a configuration file for the launcher. For the above class, -it might look like: - -.. parsed-literal:: - - [scala] - version: |scalaRelease| - [app] - org: org.scala-sbt - name: xsbt-test - version: |release| - class: xsbt.test.Main - cross-versioned: binary - [repositories] - local - maven-central - [boot] - directory: ${user.home}/.myapp/boot - -Then, `publishLocal` or `+publishLocal` the application to make it -available. - -Running an Application ----------------------- - -As mentioned above, there are a few options to actually run the -application. The first involves providing a modified jar for download. -The second two require providing a configuration file for download. - -- Replace the /sbt/sbt.boot.properties file in the launcher jar and - distribute the modified jar. The user would need a script to run - `java -jar your-launcher.jar arg1 arg2 ...`. -- The user downloads the launcher jar and you provide the configuration - file. - - - The user needs to run `java -Dsbt.boot.properties=your.boot.properties -jar launcher.jar`. - - The user already has a script to run the launcher (call it - 'launch'). The user needs to run `launch @your.boot.properties your-arg-1 your-arg-2` +This docuemntation has been moved to :doc:`The Launcher section `. diff --git a/src/sphinx/Detailed-Topics/index.rst b/src/sphinx/Detailed-Topics/index.rst index f7bebe4fc..7b551dd7d 100644 --- a/src/sphinx/Detailed-Topics/index.rst +++ b/src/sphinx/Detailed-Topics/index.rst @@ -19,3 +19,4 @@ Other resources include the :doc:`Examples ` and Tasks-and-Commands Plugins-and-Best-Practices Advanced-Index + /Launcher/index diff --git a/src/sphinx/Launcher/Architecture.rst b/src/sphinx/Launcher/Architecture.rst new file mode 100644 index 000000000..2e62f84b7 --- /dev/null +++ b/src/sphinx/Launcher/Architecture.rst @@ -0,0 +1,108 @@ +========================= +Sbt Launcher Architecture +========================= + +The sbt launcher is a mechanism whereby modules can be loaded from ivy and +executed within a jvm. It abstracts the mechanism of grabbing and caching jars, +allowing users to focus on what application they want and control its versions. + +The launcher's primary goal is to take configuration for applications, mostly +just ivy coordinates and a main class, and start the application. The +launcher resolves the ivy module, caches the required runtime jars and +starts the application. + +The sbt launcher provides the application with the means to load a different +application when it completes, exit normally, or load additional applications +from inside another. + +The sbt launcher provides these core functions: + +* Module Resolution +* Classloader Caching and Isolation +* File Locking +* Service Discovery and Isolation + +Module Resolution +~~~~~~~~~~~~~~~~~ +The primary purpose of the sbt launcher is to resolve applications and run them. +This is done through the `[app]` configuration section. See :doc:Configuration +for more information on how to configure module resolution. + +Module resolution is performed using the Ivy dependency managemnet library. This +library supports loading artifacts from Maven repositories as well. + +Classloader Caching and Isolation +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +The sbt launcher's classloading structure is different than just starting an +application in the standard java mechanism. Every application loaded by +by the launcher is given its own classloader. This classloader is a child +of the Scala classloader used by the application. The Scala classloader can see +all of the `xsbti.*` classes from the launcher itself. + +Here's an example classloader layout from an sbt launched application. + +.. image:: classloaders.png + +In this diagram, three different applications were loaded. Two of these use the +same version of Scala (2.9.2). In this case, sbt can share the same classloader +for these applications. This has the benefit that any JIT optimisations performed +on scala classes can be re-used between applications thanks to the shared +classloader. + + +Caching +~~~~~~~ +The sbt launcher creates a secondary cache on top of Ivy's own cache. This helps +isolate applications from errors resulting from unstable revisions, like +`-SNAPSHOT`. For any launched application, the launcher creates a directory +to store all its jars. Here's an example layout. + +.. parsed-literal:: + + ${boot.directory}/ + scala_2.9.2/ + lib/ + + /// + + /// + + scala_2.10.3/ + lib/ + + /// + / + +Locking +~~~~~~~ +In addition to providing a secondary cache, the launcher also provides a mechanism +of safely doing file-based locks. This is used in two places directly by the +launcher: + +1. Locking the boot directory. +2. Ensuring located servers have at most one active process. + +This feature requires a filesystem which supports locking. It is exposed via the +`xsbti.GlobalLock` interface. + +*Note: This is both a thread and file lock. Not only are we limiting access to a single process, but also a single thread within that process.* + +Service Discovery and Isolation +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +The launcher also provides a mechanism to ensure that only one instance of a +server is running, while dynamically starting it when a client requests. This +is done through the `--locate` flag on the launcher. When the launcher is +started with the `--locate` flag it will do the following: + +1. Lock on the configured server lock file. +2. Read the server properties to find the URI of the previous server. +3. If the port is still listening to connection requests, print this URI + on the command line. +4. If the port is not listening, start a new server and write the URI + on the command line. +5. Release all locks and shutdown. + +The configured `server.lock` file is thus used to prevent multiple servers from +running. Sbt itself uses this to prevent more than one server running on any +given project directory by configuring `server.lock` to be +`${user.dir}/.sbtserver`. diff --git a/src/sphinx/Launcher/Configuration.rst b/src/sphinx/Launcher/Configuration.rst new file mode 100644 index 000000000..b110d5411 --- /dev/null +++ b/src/sphinx/Launcher/Configuration.rst @@ -0,0 +1,260 @@ +========================== +Sbt Launcher Configuration +========================== + +The launcher may be configured in one of the following ways in +increasing order of precedence: + +- Replace the `/sbt/sbt.boot.properties` file in the launcher jar +- Put a configuration file named `sbt.boot.properties` on the + classpath. Put it in the classpath root without the `/sbt` prefix. +- Specify the location of an alternate configuration on the command + line, either as a path or an absolute URI. This can be done by + either specifying the location as the system property + `sbt.boot.properties` or as the first argument to the launcher + prefixed by `'@'`. The system property has lower precedence. + Resolution of a relative path is first attempted against the current + working directory, then against the user's home directory, and then + against the directory containing the launcher jar. + +An error is generated if none of these attempts succeed. + +Example +~~~~~~~ + +The default configuration file for sbt as an application looks like: + +.. parsed-literal:: + + [scala] + version: ${sbt.scala.version-auto} + + [app] + org: ${sbt.organization-org.scala-sbt} + name: sbt + version: ${sbt.version-read(sbt.version)[\ |release|\ ]} + class: ${sbt.main.class-sbt.xMain} + components: xsbti,extra + cross-versioned: ${sbt.cross.versioned-false} + + [repositories] + local + typesafe-ivy-releases: http://repo.typesafe.com/typesafe/ivy-releases/, [organization]/[module]/[revision]/[type]s/[artifact](-[classifier]).[ext], bootOnly + maven-central + sonatype-snapshots: https://oss.sonatype.org/content/repositories/snapshots + + [boot] + directory: ${sbt.boot.directory-${sbt.global.base-${user.home}/.sbt}/boot/} + + [ivy] + ivy-home: ${sbt.ivy.home-${user.home}/.ivy2/} + checksums: ${sbt.checksums-sha1,md5} + override-build-repos: ${sbt.override.build.repos-false} + repository-config: ${sbt.repository.config-${sbt.global.base-${user.home}/.sbt}/repositories} + +Let's look at all the launcher configuration sections in detail: + +1. Scala Configuration +---------------------- +The `[scala]` section is used to configure the version of Scala. +It has one property: + +* `version` - The version of scala an application uses, or `auto` if the + application is not cross-versioned. +* `classifiers` - The (optional) list of additional scala artifacts to resolve, + e.g. `sources`. + + +2. Applicaiton Identification +----------------------------- +The `[app]` section configures how the launcher will look for your application +using the Ivy dependency manager. It consists of the following properties: + +* `org` - The organization associated with the Ivy module. + (`groupId` in maven vernacular) +* `name` - The name of the Ivy module. (`artifactId` in maven vernacular) +* `version` - The revision of the Ivy module. +* `class` - The name of the "entry point" into the application. An entry + point must be a class which meets one of the following critera + - Extends the `xsbti.AppMain` interface. + - Extends the `xsbti.ServerMain` interfaces. + - Contains a method with the signature `static void main(String[])` + - Contains a method with the signature `static int main(String[])` + - Contains a method with the signature `static xsbti.Exit main(String[])` +* `components` - An optional list of additional components that Ivy should + resolve. +* `cross-versioned` - An optional string denoting how this application is + published. + If `app.cross-versioned` is `binary`, the resolved module ID is + `{app.name+'_'+CrossVersion.binaryScalaVersion(scala.version)}`. + If `app.cross-versioned` is `true` or `full`, the resolved module ID is + `{app.name+'_'+scala.version}`. The `scala.version` property must be + specified and cannot be `auto` when cross-versioned. +* `resources` - An optional list of jar files that should be added to + the application's classpath. +* `classifiers` - An optional list of additional classifiers that should be + resolved with this application, e.g. `sources`. + +3. Repositories Section +----------------------- +The `[repositories]` section configures where and how Ivy will look for +your application. Each line denotes a repository where Ivy will look. + +*Note: This section configured the default location where Ivy will look, but +this can be overriden via user configuration.* + +There are several built-in strings that can be used for common repositories: + +* `local` - the local ivy repository `~/.ivy2/local`. +* `maven-local` - The local maven repository `~/.ivy2/local`. +* `maven-central` - The maven central repository `repo.maven.org`. + +Besides built in repositories, other repositories can be configured using +the following syntax: + +.. parsed-literal:: + name: url(, pattern)(,descriptorOptional)(,skipConsistencyCheck) + +The `name` property is an identifier which Ivy uses to cache modules +resolved from this location. The `name` should be unique across all +repositories. + +The `url` property is the base `url` where Ivy should look for modules. + +The `pattern` property is an optional specification of *how* Ivy should +look for modules. By default, the launcher assumes repositories are in +the maven style format. + +The `skipConsistencyCheck` string is used to tell ivy not to validate checksums +and signatures of files it resolves. + +4. The Boot section +------------------- +The `[boot]` section is used to configure where the sbt launcher will store +its cache and configuration information. It consists of the following properties: + +* `directory` - The directory defined here is used to store all cached JARs + resolved launcher. +* `properties` - (optional) A properties file to use for any `read` variables. + +5. The Ivy section +------------------ +The `[ivy]` section is used to configure the Ivy dependency manager for +resolving applications. It consists of the following properties: + +* `ivy-home` - The home directory for Ivy. This determines where the + `ivy-local` repository is located, and also where the ivy cache is + stored. Defaults to `~/.ivy2` +* `ivy.cache-directory` - provides an alternative location for the Ivy + cache used by the launcher. This does not automatically set the Ivy + cache for the application, but the application is provided this location + through the AppConfiguration instance. +* `checksums` - The comma-separated list of checksums that Ivy should use + to verify artifacts have correctly resolved, e.g. `md5` or `sha1`. +* `override-build-repos` - If this is set, then the `isOverrideRepositories` + method on `xsbti.Launcher` interface will return its value. The use of this + method is application specific, but in the case of sbt denotes that the + configuration of repositories in the launcher should override those used + by any build. Applications should respect this convention if they can. +* `repository-config` - This specifies a configuration location where + ivy repositories can also be configured. If this file exists, then its contents + override the `[repositories]` section. + + +6. The Server Section +--------------------- +When using the `--locate` feature of the launcher, this section configures +how a server is started. It consists of the following properties: + +* `lock` - The file that controls access to the running server. This file + will contain the active port used by a server and must be located on a + a filesystem that supports locking. +* `jvmargs` - A file that contains line-separated JVM arguments that where + use when starting the server. +* `jvmprops` - The location of a properties file that will define override + properties in the server. All properties defined in this file will + be set as `-D` java properties. + +Variable Substitution +~~~~~~~~~~~~~~~~~~~~~ +Property values may include variable substitutions. A variable substitution has +one of these forms: + +- `${variable.name}` +- `${variable.name-default}` + +where `variable.name` is the name of a system property. If a system +property by that name exists, the value is substituted. If it does not +exists and a default is specified, the default is substituted after +recursively substituting variables in it. If the system property does +not exist and no default is specified, the original string is not +substituted. + +There is also a special variable substitution: + +- `read(property.name)[default]` + +This will look in the file configured by `boot.properties` for a value. If +there is no `boot.properties` file configured, or the property does not existt, +then the default value is chosen. + + + +Syntax +~~~~~~ + +The configuration file is line-based, read as UTF-8 encoded, and defined +by the following grammar. `'nl'` is a newline or end of file and +`'text'` is plain text without newlines or the surrounding delimiters +(such as parentheses or square brackets): + +.. productionlist:: + configuration: `scala` `app` `repositories` `boot` `log` `appProperties` + scala: "[" "scala" "]" `nl` `version` `nl` `classifiers` `nl` + app: "[" "app" "]" `nl` `org` `nl` `name` `nl` `version` `nl` `components` `nl` `class` `nl` `crossVersioned` `nl` `resources` `nl` `classifiers` `nl` + repositories: "[" "repositories" "]" `nl` (`repository` `nl`)* + boot: "[" "boot" "]" `nl` `directory` `nl` `bootProperties` `nl` `search` `nl` `promptCreate` `nl` `promptFill` `nl` `quickOption` `nl` + log: "["' "log" "]" `nl` `logLevel` `nl` + appProperties: "[" "app-properties" "]" nl (property nl)* + ivy: "[" "ivy" "]" `nl` `homeDirectory` `nl` `checksums` `nl` `overrideRepos` `nl` `repoConfig` `nl` + directory: "directory" ":" `path` + bootProperties: "properties" ":" `path` + search: "search" ":" ("none" | "nearest" | "root-first" | "only" ) ("," `path`)* + logLevel: "level" ":" ("debug" | "info" | "warn" | "error") + promptCreate: "prompt-create" ":" `label` + promptFill: "prompt-fill" ":" `boolean` + quickOption: "quick-option" ":" `boolean` + version: "version" ":" `versionSpecification` + versionSpecification: `readProperty` | `fixedVersion` + readProperty: "read" "(" `propertyName` ")" "[" `default` "]" + fixedVersion: text + classifiers: "classifiers" ":" text ("," text)* + homeDirectory: "ivy-home" ":" `path` + checksums: "checksums" ":" `checksum` ("," `checksum`)* + overrideRepos: "override-build-repos" ":" `boolean` + repoConfig: "repository-config" ":" `path` + org: "org" ":" text + name: "name" ":" text + class: "class" ":" text + components: "components" ":" `component` ("," `component`)* + crossVersioned: "cross-versioned" ":" ("true" | "false" | "none" | "binary" | "full") + resources: "resources" ":" `path` ("," `path`)* + repository: ( `predefinedRepository` | `customRepository` ) `nl` + predefinedRepository: "local" | "maven-local" | "maven-central" + customRepository: `label` ":" `url` [ ["," `ivyPattern`] ["," `artifactPattern`] [", mavenCompatible"] [", bootOnly"]] + property: `label` ":" `propertyDefinition` ("," `propertyDefinition`)* + propertyDefinition: `mode` "=" (`set` | `prompt`) + mode: "quick" | "new" | "fill" + set: "set" "(" value ")" + prompt: "prompt" "(" `label` ")" ("[" `default` "]")? + boolean: "true" | "false" + nl: "\r\n" | "\n" | "\r" + path: text + propertyName: text + label: text + default: text + checksum: text + ivyPattern: text + artifactPattern: text + url: text + component: text diff --git a/src/sphinx/Launcher/GettingStarted.rst b/src/sphinx/Launcher/GettingStarted.rst new file mode 100644 index 000000000..66b8f6494 --- /dev/null +++ b/src/sphinx/Launcher/GettingStarted.rst @@ -0,0 +1,232 @@ +===================================== +Getting Started with the Sbt Launcher +===================================== + +The sbt launcher component is a self-contained jar that boots a Scala +application or server without Scala or the application already existing +on the system. The only prerequisites are the launcher jar itself, an +optional configuration file, and a java runtime version 1.6 or greater. + +Overview +======== + +A user downloads the launcher jar and creates a script to run it. In +this documentation, the script will be assumed to be called `launch`. +For unix, the script would look like: +`java -jar sbt-launcher.jar "$@"` + +The user can now launch servers and applications which provide sbt +launcher configuration. + +Applications +------------ + +To launch an application, the user then downloads the configuration +file for the application (call it `my.app.configuration`) and creates +a script to launch it (call it `myapp`): `launch @my.app.configuration "$@"` + +The user can then launch the application using `myapp arg1 arg2 ...` + +More on launcher configuration can be found at :doc:`Launcher Configuration ` + + +Servers +------- + +The sbt launcher can be used to launch and discover running servers +on the system. The launcher can be used to launch servers similarly to +applications. However, if desired, the launcher can also be used to +ensure that only one instance of a server is running at time. This is done +by having clients always use the launcher as a *service locator*. + +To discover where a server is running (or launch it if it is not running), +the user downloads the configuration file for the server +(call it `my.server.configuration`) and creates a script to discover +the server (call it `find-myserver`): `launch --locate @my.server.properties`. + +This command will print out one string, the URI at which to reach the server, +e.g. `sbt://127.0.0.1:65501`. Clients should use the IP/port to connect to +to the server and initiate their connection. + +When using the `locate` feature, the sbt launcher makes these following +restrictions to servers: + +- The Server must have a starting class that extends + the `xsbti.ServerMain` class +- The Server must have an entry point (URI) that clients + can use to detect the server +- The server must have defined a lock file which the launcher can + use to ensure that only one instance is running at a time +- The filesystem on which the lock file resides must support + locking. +- The server must allow the launcher to open a socket against the port + without sending any data. This is used to check if a previous + server is still alive. + + +Resolving Applications/Servers +------------------------------ + +Like the launcher used to distribute `sbt`, the downloaded launcher +jar will retrieve Scala and the application according to the provided +configuration file. The versions may be fixed or read from a different +configuration file (the location of which is also configurable). The +location to which the Scala and application jars are downloaded is +configurable as well. The repositories searched are configurable. +Optional initialization of a properties file on launch is configurable. + +Once the launcher has downloaded the necessary jars, it loads the +application/server and calls its entry point. The application is passed +information about how it was called: command line arguments, current +working directory, Scala version, and application ID (organization, +name, version). In addition, the application can ask the launcher to +perform operations such as obtaining the Scala jars and a +`ClassLoader` for any version of Scala retrievable from the +repositories specified in the configuration file. It can request that +other applications be downloaded and run. When the application +completes, it can tell the launcher to exit with a specific exit code or +to reload the application with a different version of Scala, a different +version of the application, or different arguments. + +There are some other options for setup, such as putting the +configuration file inside the launcher jar and distributing that as a +single download. The rest of this documentation describes the details of +configuring, writing, distributing, and running the application. + + +Creating a Launched Application +------------------------------- + +This section shows how to make an application that is launched by this +launcher. First, declare a dependency on the launcher-interface. Do not +declare a dependency on the launcher itself. The launcher interface +consists strictly of Java interfaces in order to avoid binary +incompatibility between the version of Scala used to compile the +launcher and the version used to compile your application. The launcher +interface class will be provided by the launcher, so it is only a +compile-time dependency. If you are building with sbt, your dependency +definition would be: + +.. parsed-literal:: + + libraryDependencies += "org.scala-sbt" % "launcher-interface" % "|release|" % "provided" + + resolvers += sbtResolver.value + +Make the entry point to your class implement 'xsbti.AppMain'. An example +that uses some of the information: + +.. code-block:: scala + + package xsbt.test + class Main extends xsbti.AppMain + { + def run(configuration: xsbti.AppConfiguration) = + { + // get the version of Scala used to launch the application + val scalaVersion = configuration.provider.scalaProvider.version + + // Print a message and the arguments to the application + println("Hello world! Running Scala " + scalaVersion) + configuration.arguments.foreach(println) + + // demonstrate the ability to reboot the application into different versions of Scala + // and how to return the code to exit with + scalaVersion match + { + case "2.9.3" => + new xsbti.Reboot { + def arguments = configuration.arguments + def baseDirectory = configuration.baseDirectory + def scalaVersion = "2.10.2 + def app = configuration.provider.id + } + case "2.10.2" => new Exit(1) + case _ => new Exit(0) + } + } + class Exit(val code: Int) extends xsbti.Exit + } + +Next, define a configuration file for the launcher. For the above class, +it might look like: + +.. parsed-literal:: + + [scala] + version: |scalaRelease| + [app] + org: org.scala-sbt + name: xsbt-test + version: |release| + class: xsbt.test.Main + cross-versioned: binary + [repositories] + local + maven-central + [boot] + directory: ${user.home}/.myapp/boot + +Then, `publishLocal` or `+publishLocal` the application to make it +available. For more information, please see :doc:`Launcher Configuration ` + +Running an Application +---------------------- + +As mentioned above, there are a few options to actually run the +application. The first involves providing a modified jar for download. +The second two require providing a configuration file for download. + +- Replace the /sbt/sbt.boot.properties file in the launcher jar and + distribute the modified jar. The user would need a script to run + `java -jar your-launcher.jar arg1 arg2 ...`. +- The user downloads the launcher jar and you provide the configuration + file. + + - The user needs to run `java -Dsbt.boot.properties=your.boot.properties -jar launcher.jar`. + - The user already has a script to run the launcher (call it + 'launch'). The user needs to run `launch @your.boot.properties your-arg-1 your-arg-2` + + +Execution +--------- + +Let's review what's happening when the launcher starts your application. + +On startup, the launcher searches for its configuration and then +parses it. Once the final configuration is resolved, the launcher +proceeds to obtain the necessary jars to launch the application. The +`boot.directory` property is used as a base directory to retrieve jars +to. Locking is done on the directory, so it can be shared system-wide. +The launcher retrieves the requested version of Scala to + +.. code-block:: console + + ${boot.directory}/${scala.version}/lib/ + +If this directory already exists, the launcher takes a shortcut for +startup performance and assumes that the jars have already been +downloaded. If the directory does not exist, the launcher uses Apache +Ivy to resolve and retrieve the jars. A similar process occurs for the +application itself. It and its dependencies are retrieved to + +.. code-block:: console + + ${boot.directory}/${scala.version}/${app.org}/${app.name}/. + +Once all required code is downloaded, the class loaders are set up. The +launcher creates a class loader for the requested version of Scala. It +then creates a child class loader containing the jars for the requested +'app.components' and with the paths specified in `app.resources`. An +application that does not use components will have all of its jars in +this class loader. + +The main class for the application is then instantiated. It must be a +public class with a public no-argument constructor and must conform to +xsbti.AppMain. The `run` method is invoked and execution passes to the +application. The argument to the 'run' method provides configuration +information and a callback to obtain a class loader for any version of +Scala that can be obtained from a repository in [repositories]. The +return value of the run method determines what is done after the +application executes. It can specify that the launcher should restart +the application or that it should exit with the provided exit code. diff --git a/src/sphinx/Launcher/classloaders.png b/src/sphinx/Launcher/classloaders.png new file mode 100644 index 0000000000000000000000000000000000000000..6f0c1b003b5a502ba9eab4416dfe003e9c4ad5d7 GIT binary patch literal 22551 zcmeFYXH-+$7caU25mAaq{)&JUdqYrqkBYs3h=52J0qIB&Aqho642E-+0{e z^|4(epAHqD*=PFNtaSHw*{(+lTN+QTlaju*t5f>cZrrYIzR9v1bgNSRt{hjAUU#6Q zA#(EvsqrUj4jK^;-DkbGZXy@HJy!D6p(MAoVz|Mv&dE^gO#hIz1Wu0Pz`QC%cOK0y zi$(tEM!`(2U}ZR%!xfW-m33)SAY1?E`acr*|DyzwsL`S`)HegUF?iACA^LN;BJdMC z7FRg1kCh1XSDU_;x5-QUH9GvOD2^}sssy^jz0AswZU0y@Cq_cKV&W8pwxEY+wp7C4?rIF0y0TJ`9E(Q`NqA{z~-bE zTh9HnJUBgY&+r;A@@)V(%xDXHZS{5DfHy&I6w)9O4|&1oGXnIjV?+4J#MN=j!7I>- zz1cmUBAyqL$ykMSSLbtAe_}<$Gf4A0zXpi7*4Y?}4Kar4g`7*R)rWUooaB5yp}eQA zm&>sWr4M%^MMcO2F_$31(=UT=HVCmanyQt_Z&le`O6FeArab(P;ht=xtS_J{t~u^ z^SA25rxVpY=O?^ugpH;IsJH=80A>;Zgs9BZSlFW48bQ0gbPw0!*QtI!aJvU|x}XTr zC#hAS7!#1OjQrZ&`tcvELI!=YqL<-AgV0J!#O2-A-QVR3Q@5Y1!8+B+?XIt_@u+R# zAcPcq&4@2Vs00^`_7eW-f~rOq)x*#k%zOynmkVX+bcbS*q8WZ(Sgr(;wfEJI0nrgo zoM$#XbUzLz!Cz$&JTRPfbp54OBBTz9W3@Wc4~9;RUYHae>C7UMMH<+aNHG_O>^B7u zscm-s=*B&PRq%OL#MsPXFMf^3rZ)MMP%~;}nuBE)Qt&-1jAcb;=j_l3k+T6@-mQj) zCyqb5hb;k6=;9U*R1y~?sT{@Ub5W%andi(BhO*&9t6Um_&;4nR;hIxgFE!T@{GOjz z(%T#FXC22(x~QFGNN*8Pj6W9A2`mw{52{y6Ume2a>t9rQe<}1Cd-ln;B&p+o=i$eH zeSfiELjcV`(CL5AnzTPWaWw5R3cg;0vvn%aYC%mGlH21fDU5pH9< zgbw_!neou7(m)2h_Zu-5;%Y>k$s)ixcrH|2Ca0a)$2okQ)WT}ac!{mh^%}Hpj}X!) zfKvz@)%H!CFH#t8Gnc$<#cjl1;XBeLqGuex`cN8f6++o%%(JOz_6pkvFnBqA0|-i= zY4=FuyZcG%ULH-HPks$_-Ke^O_%_lt&adOhwh*et*J-ED{9amwT{>9(yL_mxjIY+s;f^TTOoakFpqMdXY)Y6k4lo)SRFlp> z<2dQif4<=C{_Y#Z^V^HZP@#JhukEfZ^l;+lt4muO6Lo`APEmAv$IXdH@RRH)F4w|3 zNFHxK^?3bEtn!nQi^;#6vCXps(5ErS-y*}bh#aBdi0xt4=x1ksx1a6n+Hn7&;px93 z|2)pdEt@7E!ara~E#wYwGiKKZwTeJlhxRW}tEY6lQ{0+ExmpY^m6HvXaPf^ngdNTb$$gBSj4$>5U8`JCzs-KS zuSooyGsrw2TF)-IVYJTTLtGGARHDCpses%v=o{4PMwMHD9|2^5<}FFW<4-Td=cylh zm3Bp)qNeAey5)lJA;qs&goKJiw|_V0gcUyfDoMXH#cGl=`*5KDeE3N*2K}dzQmuk~ zOW;7^_nhSb^97XAVy&k!K|L+oucE^}!DfMb&DMCQDL16ZManYEMLGN)%IalC+{Aor zE8D~)_RXt&rCqR{kFUvCSbHru;y2&WFscbvF{%mob1Qg)?%ZNBIHb?LPVmy()W)Oj zjW3eZ6*^lxvvQ7KVm-Y+K=eOr~ZV56ZtgF16xk~ux03i-bf)ZS5J?{qb{3oC& zoqq)9G3vO~o-e8OjHBJx<=T{fh@F#hnm$){A0L)+4ZTvHx_xLHR1zf)f_sJd5!|;W z1yhP@=#$MyFc~eF9A`GJPCG!Y23xQoW|+%&)af&TD{wl=Y6U>SkG|8tYK)&TjcE;2 z0^ZYmbIyvjZo&MZOr3szyJuYq6Wmuk4(`dCy^IHDWzJjfsnOPRRcvZ4(#gy-nREJ9 z+~MjbFB9qua{}ySjK=D;=x(RAFh#{a z0=9wnbI}pJM8q%UlrC1<%Z?&`U;OB%`DsefEe6KkiXy)=nw9<<5K?Aw%T@y?9`!VH z?%DYvd&EsMJ4S5)^8JkZ^u{wgZ+TrlS}C%C1egzdveoOJ2A0G0V&HmLbHsLu1kf1k z0O5xo?}@mU#k>ZfExgLZ-VB(0D%1IIlhuxy^6&@U}`F!{cdR71)w z%BK2R*}~!PvY~+M)}&yn@c0k@BXV*<0^yK1??Zs0Tx(55DLK3-tm}Qh(9Nb@{ysdZ zJD?WXlJaW+`b{nWF`bnB>a@`B%!5np>dE;cF<>pmbcXDYXCF z!#!VGElK@uth(8IGG%0o?kh!qh6too!DwRLXhHIxowc)@Za6V^*Zb%yV%ANT1QtB; z!v8_qoM%BSQ@=$5Zfs3THx5^OcI7db6lBIfiY8?H4POF0V}el$VF~H1Mn{Sz_diJ( zjdN-9BO|e1Wl2QqCk@O_J29@XgF#lm_Pxk z{3nRQaOTVnv^Kv0br{zt6>5Lh?3~bzH(9j8sc)jXS>~^J<~A)|muOWC>-Vd)*a3aX zIWeuf7Z4LVZuiIt@oQkhfxSZR@8K42AtxUCKR_}06s^5+jODrz@tP(Q#B2!JX0hQ9+^mXAO6 zEGrQm@p%ooZJ;}qUylB6fwRP7;aIE@pY^)86mS<#mTp@7opQZB{!Oi}?HTo32cqs} zR;(K>$w(sIzS|Ve9NN(4+82`5qH1|;28!e37C$PD-zzEPuTQrlNROnKmei1xHrbp@AmCPb1src`qkw1Q3nzHk)>O0TU zz;4ws0hO2Lntd9-M?PX}@w(Aoxu>VwGlG|H!}LOAUx^##!$NHdG(zhM{B45!f0Bed z%SH8HYX*V~ZA@ckSN)|UDlQ_$k(SvrA7i)N-I~jV(t;Dibmm6~$M#=KG-g1yUrF_oY&_v~-Ye zg4c>?W0OI6pGz5eQPr-^@lV-NDEpOAF40RfNxhZfvS?Z-v z%W#;bl^GWM<~0wYKdi5!Gkm=25NIrtcl^GcU0XSrw>%iU67`9@%KSVl6QTW*0`(u3 zcNwx_TdAaMaZdA}Vsqu&TyfpSprGEFx zZEVm#7V*1JV>nIW;Sf}l7E$U&!evNq;mhDd0FFKl2Pz$uL{^5SyD&ulO)uhpfK0zD z_P%;#{B5xA1~5vjmwW0bD6Qx~C~hrJvsXu|ZA7IxtLJPrC>#4pTYr#ye)j>NJ?+z7Ia)I?d?lYR02Xj0Tu+V9vY&y+I=%-01!Pgj2I7cbtAg6d(k?Qox=^@FV)3#d!j zu)1e~Bv2zvRnIvq`e7$ZI+l;3G3YB#Iu~e8UqUpNdE$vi)=YMHSnBh+@3Xj)rX!bO zv;GLX(Z2Ug^SY4GcMyCuG)#S!WcZU<+lTKFKOKE6YD^Fezo}o_2?9fOK-z^kA&q~D znh2x=&%~v3j&klnFy|{hHqSSs*e^mcn5emTn!F#g*{(00gcCKyIL+T*#JS`_QGp?T z$NMh|{a{6Z+B=lWIki8%^BCv@{XklD;!oduIK6+>Zns(Ee?> zHN-{SG7qBGB)VzZtOXZtK{|!=PttlVT#%Q24--SNk$Gv6ioO@ISE;78;g17WR8o><}Q2al-Ki4Y;1#KYdp)O&$F}ZsD}El3~^Sz19e`7^Q|F-p4i?+7LO<9xnTOldEQTtKfa*v1Iw#w(Gy1UrJ0GFcES?u}!{qJ@Xo za?(WSU}^>?WTIY|=_t2?>Zus0n%vqZM4R{_DIq-Tf&gE_HDQ9644C;!UEl2hI7?Rn zx`4eI^4+7*0rOtpvz?uNp<0%4i8IBM8`pAWoj;P;;nG#ZSyD@%%n0Kmag&3oNfy;? zDrbF?NF^z1?~hB(gyDsOMii80=h1Usvj;#;+fgqjd`C=M*AYVNA|HhmHurtb0k=$g zv$0L^aOcufy_w1Cvb@epE$wE9C|Q7i0&rG;z&HJWPQ?FC(1+;%B<@C=UmSls6V5jU zGjTxu{skQezG(rNVVjLeEv?}KroccBZ!#~eMl<&-BGN?qfr#DHv}}yc5sk9E!rgw- z2gKAhpy1c!d{*n#ypp`XQ}mf&pbeq;A+FevS(%yOAF^50oY_P%F?TYh(CghHtqvS& zv3>YJ?d5kueG}$x{t+R?gv@y)q#(Bw#zUUfHe;oLnPAnB0bk$LV-j@uv#QgonaSf_ zV?DIObQ5M;rbXwzn{xaQ#79DEorxR(IQFTg4848V-lbJ=w&|O2=?p%`44Nbx`|*iC zq=h~S0Nzd;5YZu4Hpfsi=aWyL7-wcC&?A~~M_!CA3PK0An*7l``e;(fD(;m6+72e0@=8K-Q;BBCiIED8XsNShO@FGm)u{p#_|meG&d*hyVm-A(?3vQklc zo%)Z2hV8$45Rse)BqHJHMkyeP@kff*<`m_vwy2@-M_Iz#X%MfQAfX5o+c{&Cu4Lc) zhC(R4BGov&&Kv;D+-BA61U2pfQ8TbKv#8B2aUIP*r=7{a<3(&m4y;Mow4k#(ZJ&+KI*b#z$WIRg zlvZHm3s9g*n-iJm6K8E&aa8Ej%f~*c^+gMo)Z%*t z@&Euc{v`i_W%x<0P*dAZ32x`{x9q)6&MljCsVUq^n}{5pc`zS!C;e|q+34=hwElM0 zd)~Gv9}ht$CevQ2az6mH+1zQ}9CF_<{ z(aymAS6fDc)(%&DBC$2S^9_ z+n55tj^~@*t*z9QTD;Pw!Qlbq>SIfjQPHVuOr3Z#x722%_#w@#oo{vw@lvbmAb7k} zJC|~;TgulSHieqOgFZpmL@GzJ_H7poWj2Ln$wq@)5PFOxc2rh_%x+GJ0~N3;D)Mzn z*b~0q+TphU>f5)DvxtW%-btg1Mi}Bz#_$f1mwh;sv8ZO-DsXtSp0=IVTte(B1boii zoG3K8MBg0n6F$v`C$7#=L)X;r)!zFymL`ul=9~IKG4o%-I*HzW66?QJ^QVWv>Tg1( z9)^f*+{Ttq;OQpy?`-(U6168-p9a~d#k^k*q4|T9+H!t43VVKfBfL*+Faas0%XJVZ zAAI;0007r5*Q6-y=Jf_Fz5<0(6_r_KInY`r%-y`Yct%8T&%xu7tqufZR13D4T_<2A zU`V!35Xz3?75$Jb+xcbCpq*RFKUly&<-w^f9Y|hXl?Gs^9=Q`Ac|oUV*pQX=b;_)C z+T!q#pmof+Rx%Sq4}#6~e`SP+9H)=fCNwZZaY!6H!pw#MD%{4l<=r+yq)ox|;d|f^ zDlh}mY+a)%U3K*W;QSAaSzz=U?)>5(?DC-0^)-NU?fu_F)&oYR;Hwu`L6mbto4UaT z`L+vdVCFjSg6BZ_b8ymekPP^6`Ts_g4+jt{T`Jrk8?FY0%Z=_A&!pDZ{X-)!^jgtQ z5!JLcD0Xp^4M99blt_kIgg}bMkW0(LQ)qr~ptw&40uiQRg{4E)`q23nPf=-^zcTW- zb+9(D^u`n)7CYbF8X_LX3Wr^Kig2?nhL|4gWd6j@9#%H#S(b>Fm~GmJVvrvN;oPe| zD=DM@987azhFsR~Mo-xJT1X^Mv=c=oyo)l1_+$@&^WfYU+n)4F@?M*GCp;Ld5WAl-oneNs9&`niSr` zkU(`aCyiEQy`|z0V@SieehMYMeHF!i8dzcZ!v`l5kpe3e=xetK9(Hnv@>Ue=oNT(y z4?Jeq+{Uc-3XwtLX5^}GxYc-VF8dXcRr5NZ7;J-sh6(Ww^bD5nJ1}680bshoe$?LW zdMly{eaQk;rdIjUvU$7UM_UgeBgOBeZ-S?zqg15B_`TD`|*lJ_c(wV0fU{~iok`QVic2?#pin}Fmt$E1587L<|ff!NEw z@O%J8-+QT?i}-EsN;^$4@;Tg{E10RahkLUD)X^u?`*d_EeXd2raWy^NX+r@P?8R-e zqn8ZF^!t1Gc`NWqM~Oc*()$)+-RQ;(5biTNVNu}Ne!UhAg*ZaPRWlLS`B1lQNnDz) z@S(j^!WBb{n@srEL<4odA(pnJU(PSy zQ>^+IQdbeB9_#v|P1+w7;9 zwZEsZT_`0Lmk4lTVZCF*&jqbIZ1;y}t=~~TzsyH}5O=qG2|a}^t#CU&X$AuKHCOyy z9xfbNS8mdRlQL*u3D#g|11R9=!*|D4j=FF>2%L$i$77#G0S7VI;XxLHRtRSKWDCF7 z6*B>4m70&ifcz-B(;yRdmcUFpKItg;f;mDHL#HQt5$|u?xC$yzxaKME_5t386}Ta0 z-r~y9tx2_XK8BRw(QF@}E)txk;rKf4{N$BK;fKfYBJ>Q1$BA~y{bwa2kBER54u5SW z%dMy*JYPU3o9iT(0;Zs_%YT+|o(=DhN~(StMT&d>caq&A<(!#rRM{VYV=&`cT09z) z7S7u>NjF;f@%a3>RVKyA^M~2R4HtdQGBM11p|Gn$z+ExR$}}^dIaHoTtMR<=_u+jX z#f{%$^lM91VS;H*pPO1?Hv1V^RXDcQeb}#Ol{-=%&imqrhK8!=J4%^#=fzy)Tt#3m zj34XH>+rYM=Y*yN(8`PwYc=~fXyu}(nI(OTh6G((etPRM3TPdY4!`!7d@jId!je)B#mOFFTBhSU+)GY;sKq zCQJ4ne?-@|->0037(AkgbF!}5dUS+Lc6kz| z6&IL9{@l8aH$x@{?%`*O*1cUQl4{7FN>@Kb)4u&B;2?c3&-y7ZuZe#G-JkY3 z){@j@r@5mz)q@9rNhA-*epoQ|zTJnfyh6dce2mmiDm&W%`QqSXNx@S0ReDTIpfe-BzL$TkUt&dND z74}KkskYKTszyf!nC4!C-7h`gG5spn#~a%eo!Pagfh2x5>9&Obz(fk zz2P>FBZFe+veBYa&3$P0Rokq?J3{#Nr!#EAS?=o&%N>Yp+ed-dzT0a+Wgek^Z5*O# zCe>p7yn1b1DgI7#7v@mv4mCp=-X!x8QJkOBKEFIa^939Ge1wDb!jOHC(dSa|9ciYh zvfzqsr)p^bI(Epr-0%vQOsiK3;br@qm4JaoAu&qxKv@))$iFv)4ghTa7fF8MMqnlx z7%{!|<5dNheCT00<@P1tW9?JSjbGZ$u2m2`x(3YK0*m)fy=U|}VaZLI`9?2GlN)l@ zUrlJCaMihz#ll-TMS8=v2R00tC$iR8hNL*Jt2(&u-B)|^Btkuo4eu?bU#tvP_ZZKRMTV?_sVUW1j*nDpcN>OR#oH^bX= z83qF3alUZz+oYif$F@EYva*xj#cuq_qYghZ=`txsPrWhEHlwsC_BhJD-o~~R(UaCq zgk5J} zNz}01ZZf{&^2az@^Tye{Mghk{ zszEQsIL7-RhVAqD1(nlPDRN zxc`TGep|wpIn>gCN8cj}r?GvvlnVo-z335}$Flb_HwT3eCd40-rtgcFyestXANA zdG}E)OXGgvl0S?z_nD8c*xBva-W>cT>M~eu5I7e&xC_o!hLW`vx)uh6uB?Ieqr)bS z?JdC#xAN0(s4g;7?JQ@iJ+LKX%|o3*$gb#>KV(C}5;k>9AlT1w8dHMU4(Gq&N4YUB z^4o8!cr$^I7HjT{eEMnO$4(2|*Oa+DnWK5uj5sQQ;`J;n_G7u3*=*&ZSoyb(a^IOE zkJ!1RatVd?wwWDY3>HsRrzzPhLi(g0t(5O;v!~`Bz%e~=zc-z3-!Va)f)zsR_zYeSi__jdhrm_L9Fyql* z$DHsNNLNb7{)BV5Y{+<3=C#o&{Vk3z$o!@a&b)!e6UR6w zcDN%zKp`iRfp7!f;&|{aeOu zwV|~;M4ulY(tR(q=jrk?SA%xA*up^pHyyoBD=XRTfT zBBjYo9^ab?5ndj1dYFFjq^8ovzzx~27R63-`bD(nL^)FZ7vU?SI|0QLlUD56mP3W? zIM4cvilQ*(G~R9(re)NW-mLK@1$oS{7xOD$)aoR>#0%Th2DH7Dj!4)Y^eXtHuk>m4 zlkA8lljjAd)hF+$2dj15kSa_G_BgdyF=v&KMsp&Dz^aQ(fTFQ$4EEsZ;w`}8zb?yu zD%tFbqvw<&&V|4b-qP;(-W$~zV4L#K(w{k7A0UCHg{#)P9P4IMPRW1EaT&Dke$IAg zjQe}WWR15vs=*c6O*ksxvl=L+|?U6{@2zRvH!=Impi+Eczr2VZi9LZ zS_Qkd47KyPXOY>u z@pw+aNz%X(pdWTh2%zQtY?X6O!G0=#IEa5FYa=2Oogy!l50xR$l>)0-II3-V=1XEvy3DC(>Sk_ zmn3RLsk(J9t>V5P+`#kHEsK?u5{GLx-SWds3A$nl-D1*gKij&Q~BD^pkeysbYsFVP>HWLRz-`Bubh|&D~8T%NY`jyJt zg8AY7ID=njfb~hXS|F4q6dd~YFNl>H1Oopck_FZ${{h7Q?jbn5_An74y9VBX$XC@> z5KIGm@j$fezbKjFZH&`e$zToq`WyfH`~5$F81O%drdtBqgoPi=+IQ+8EEs(*cPI|t zXlMn%YKhF`(q>cUhs=cN zcj%m@O9=l}-C^f+26q$CX3}u`T_vTsxnn*5S^F=o46q_0qY*ydD6Ql8k2i%VN6E;DgPEG*;JG z#eY%G`Y>9U*%V?nL{WfwoH2GcogH_SLqxVQ#AClpr24YufF#>}3=>ZX%F4@(VTtfR zaiDMR+v3ZK)p3QLm$QbTCfd!qa{iHFiF(@wUyj0$?%%y5@*X`SL=OO7$I-S&s6^9E z*V9ki7Gu;?qofyOQZr<$MFzXZ_?Zc}<7W?~ttQ@F6-}gaN-@jbrl=i3R z+8ooJJ%UJ7IM!(9GV40yS$e6fu~F?MlAafJVj=dT{ITgb zZm}OK6#RN+aglYjk;0BDQv2R_GCyE2K?C*@m26=<-@hrbXHpsf_)WWg8}L2~7Rrj< z`VB91q9xnkV$~NHHQ&Q`w*QJ^RiLnzFPRj{;hK}e!kvKaSCUN z3K{whye~^Fd7b3O(ZSj7jS( zzfe)AsEbibk$tE8tTQ|!f`eQ`{1Cqdru)KR<~7 z&^_zAI=6i#Ox?~if~Dh8ZDvg;XHv%IW#wce9m*86+z;MA%(y>%}o8a^msLiqUb5@CbR(7mi=%uxd7i`MrwNYj|<^7-Jil zxIFDoMbRkDozs`8LbLX|H3fA8_m9X%qb#3a71kgU&4zXh-86V-HFyjSp7yr8za-&i z(6Boxv)ao0VNn=OG|Zv0vvY1$kK$4y<3tW#0CZOMw46(xU%gP0>U8obkbdLQ2BPOY zNgo5JD-#?J#w4*|VWRX}mY0AzL|<;g=6G;R!(VxDL+NI)cCaQK`rKmO3j)^{XtnK^ zOR@!VpU!!@SZIH^;Q3W1;4JO~<2bO|4F30b#pJ@lg8vptz`K9_%)vF+vF2JX{M!r; zvh#0IWx%Ew$@^X8Pnds$w1II_2?oO7t+!fa!cD`t@0yv zy#%Zg5cn)fpVeG_md6T?5EahO51A?&E5-uA? zFMZ!{bEjh*xm2W`525kh_w8xrY13^as9Q^s^s=G#qnAwa7`2tWu{Ji>fGYX@E5n}w zAA@owE=xVZ2%)mp&qWJxC=4AcuE21cwF$ydu3s|)LV(#UHhBmROJ=un{rWg7(16BZ zIC3(ZHjm=Q2e;--HX}v(MF}t=FBn!xVGPE)|EHB3kH9dFs2Ao>7sy!=yXwUibV+Yl z-~>L1-dwA>wC3+YA0I??IqAR`E#9qvb4W_h`qfrXm0Zt=-?Ftt(FieLFfvIO8#*d> zEBN*i%;oCQf;3ZVG!yDO&b>x5>Kim~TgFZ1#N1}^3XmdfuXu@`I5V(1Ot0f2*q75=l35LZ7G~L?Q|@A@SUjz*`zTmWwqN8k(<)ia<@1g11yrV!j%2<4Y$c*{!48 z0LhGqka#8uJ3T>PwUI~$U69-vg3wE{IKbWVkQ-lry3nb=7sfGs{g19$T;n-;MJ>0S zctz{xw}29K&I*!t?4wj@4Oqo?6?on0m>UbazVEiv_Fiu5tcGNEb$&5XPeU4BFB9?7 z98Zu>b)8-JYh!tL+HuLZEK%EPHK$?TxM%m*ieo;gSl+iMz(&5PPEp9~HdrMQ z_hjms zA8x2r*=e;fnFwAzw&~AUUa6q_)$4rv-rxI9k*~0JB)ziO@Aj_|Q1U1sM}23Ewj&om zK<9#qcP<|Yq=q;x3a&@`mwSj@n40W>kTVj4urmW52_L`wdYvuxed_ z{=D%jsS0I*eJ}mfQ@O42iW8#mbk)MoYdi)(P6R=+7CnkFMYsJ)RBJ>US|BH9unxAJ zDDePG8P=*td`t1aVieo^NR?a~Nw#(+Bm7_MHjrF-mwLC>ec({LZ@+C$#vam(yWHy_ zFlu{8+FfI?)MrTUqXpKapUwi|qa_j=JCY{Jqb;pCTDRuOx5f0$UK{PH4}*1bn@Yy| zbL?qFJ-EuNRNBpwLpx2|2Wnf?*5?Z-y-Si6`=9cxEYD7=|7K&8$QTGdUpfY}~YxiP=9->sDd zxMSJar4c@g%n!okv7cY@#J6ts=F;j^yT8I-Jx1;9gI@laAk=aqjkC8_|7$Cl$6cuH z_14+AlXpHk^NtSU;oSI?h?6%@Mx6}d={dI>Sm(-nLp?u6@V4N5aC!sJ$z)PN+-?A8 zOJMzSR{Mo=(^4G2_pU~Zbdgfo2`0Vu4k>~4u4y^}Zhphi@_L7Rb33exJA=eAh*%gc zjM{EaIZ83)I22+6>jj4^DegRu2W>a*#D4|x=hw|ccdJJPIK?8;vsYEuFb`I3tQ#6F zB0qYZbov;j@jCJ%x@DjB)9E`s+%1Px{v(`b#6ay3n6W0PE`%&Gb(DwYg!8<~HMpHs zrI@oX4>NvGmVTWctLUs%f_lExXFcKBg@-n4i4rMagN2@CD}9P(|3%Wb-;0y}7BR|W zH^9T!bwTGvP{gfXX}SKBL&{E^q8Bk=Et+MY{%u|dk6!`ZRHabu|^C% zVqtYb1JfC;r$!cS(bfO}Z%A&1!`2;`?PaJbgpI}e3Jt+e3oVU4+L1zLtWzhyO9&%1 zv|u#pWjLFxK+?V8u*GVSIesvJh*XHqV4~(5;mQ%(Nm-olNzo=UbMrn(9zeIQ6mA7q z1_b~_IHL#-xS5j;uVQw_8Y=k$wD4K&KF_Q=Fd_j^TeoW2&x`@9t<_*v^W^t;C!t_-TQ_+_ zn;>i;J-r`KtD1 zhvRq?q^1Mxkfd4Lu^!wR#+7eqV?1Q>zCxN+naU^E5j^1c;hg){siw= zv-sA8M0+N5NM9fQilM2(ZD}Jb0$`6ynXaYW((tk4eH}oQWP9>vN|Ac1Ds zo}+3Z1gFLsb1otfksp$kK;1}pH?ty)eE}PBA%0wZZlCOS^Ho1#vP60mMB z`ET8}Ue!{|o_`yrZrp;ays$d>U|Box$b2tIdqb`m)sVH#*dVi|k_jMit( z>ocU{^A8^nHwGX^X>UrjaOs^#f9Q4ZZ)Ug~*@!D2kiD!>Lz`BoWhtDcDw=tl^26?g z_io~P(|YB$vO&)Dyu{@pwz&jGIK179dhcs+wk~6T=d@jKZNgYqm;nH>w!-ais+ZJ_ zFRlQIvkjgM`TEHHb3TIY_y!>?@FWea)>u061+V>5}ri!Zg* zb-COtB|6_0&NX9mJIOBh2?16IszUCEQJROFqs+LaMAo`d<`F#dGv77(4=`$ahW{aW z$oU+wk6#AMOFd-&h}%@}Ml9=$DWjO|?VlT(@JTjMtg z6Y9IDhn1d#mez8#rTPw%CAFz7reJ)W`u+!HWof$s_|YyJICFkId2l=@;+l1v{8ge7 z5fA@%ByU4o%(0q{0c%zz&y{c^TQ1!&{h-Sn{Fb|z>#REt>tuMDR9B#x(#uT@0Ai5V z^Nu=B+JqpT0uZ-9ycce2^02lOracI~<^s0~aGx#lz5)J(pve4w1o;zpR+}uTxdX*$ z*$H+CgsZl#kpM6{XPzguw6O1+yYc#(P3isZcd?qbMA^$5T6|9fkC=gp5ejZ+JEkGV zcFod1KetbB()KX1;WQoFP@$3ZuJ4F==G1n!Ohhw+e5styrm`SLsMZDklYJX(@DIFW zzb~!qPwRIHZM|(~y<@rX$huKh?y{Geo};FBeDrFga$B4+rEYNI&#}eEhPv<_nqwaQL?X3MXQb3oraXF0ERYISwGaGm>)(wLM>Gia}#L2vhd zL|l2EeX|1DghL;QBB}{8oG4Sj=J6MgG>2YA$BC9&GO|NBF*Fb#{qI%3$JRXR(X>Mz z2%;pV>Z(4!AxJq5zSLQZ8j{xhEwJX1fe?Z{*cZR$pJ6kfwkKs#{sTVV&<6ggB2rCI zEy{Dp*53Xvu2EeJ8&u22#l366M|v;M*Z2Ct7&GSgvET3{?e|BXTU?95?^6aoa=Ow$ zSo>gs5<>n1up`KPmsRu33zxoDh8*oJ7!8%kTwk_?tRJ;Bf&*rYQf9~<+9C|v0h}-( zRlb@J{yJ$5-~Rt7G+P7M!Gr*WM>e$mdoQyFs{dcm2YC1YI?nr>!2b*xrNCn4abVlu zh|0gsss9UN13u+)S}UJsatRW4?K|Ye>8(xvs^ZM-UdiCD8uk#X&paXq#_k3d%ILvsY(t-lTjcr9up zs|6?ijc*6|<-Olu_g2*;o)LnM{}5j-?Bc;k54663N)KAtD>o&swbm zbN`QTzC%L`tAlJX@vDZlUP3nHm5Xrc0Ys$D=zq0Q@Lx8RTnki9tlJ0+ZUo4(5QPzb zhb(g#5=m}Zs}Fw}IlXD0%8#&P(pl{AEC`#~Dx!T{2YRlz5Q1K^-c&0=z?(R78 zObMB^{KcHJMCAy-S$%PC>-7!Qk1Tv~CI7WZ1tRF?0{M^KlFg;evAr#auDCq@uj0m> zW94(;5n9)|g1sk}Sgq4e&up_)jJT+JoY@-#A0vD_?>ZXHjqi-BVe#WdbaUko^7YH+ z1R{5MQ^?-p$qNJUEAUQBWQ=%3H$X9~k0^?Ya;UHpJ9SEEYCer0W3@;Atr1<&{CL58 zwaY7>sMemPa+RW!qcv=We(jotU#T$(wHfoH~2?E8JFtT?sR1#EUrd|_R&u<=s=*$~E^Ol0$G;WaJfl5B`egg)d5Z&lm(0qeuBxAJ7`->c zH$m+RpGCpSvdwIZxj&T&A`JNB(L(cwj6d{CbM5<%&8CL+soH((onwnH478ijzMlVE ziAyvaV=mfk62h&6ZZkZWE-_ZWikZzNi|c4yH2pnYv#NB{jU!DfQPLCF0K3<{zVd(B znktz0UkDFJQr_HUTj3J^T<}aNNbHlQDK@oBxxgBhc#o`>ddAMA=NG{B4H?zNJ??-)*#VF5+sDxMi3Ddq)OOd z57xS_ea+s#_cho4`F%gW_kG^?{l4#c?)$m#s2e%g)fc1Q9#khWioGm7l-EY2f&Vb$ zO)vF7{n^&`bTC5}JJ>O}j#|Y5iNniD#d34AnFv4&)*$`3X9f7HSlF}|+G@DAggsRJ z@i6Pp+naX4Y+>1P2lvFZ;(Sx{8s@4c&;KDTqB)qoLg2Aq(`NI*9GLlTt2;AY-$#Kx ziaI1(8%6pFxLUjG$#lP_u*^7IE|t#u2wf|l;=?gnHHrHy) z=%QE9(r#q1EJc0mik3w$9LO**(=V^ zZFyU!eaOn+0Ym+?6tqXf(`_K_+p&|DNQ3dYc6Zu!-N`s*%Y zgyzi6HXZ!dB3pR&Q13w0*GwBFE^sn}dvS$jqq8Jg>^M?)RJb+Y^=M2){kjNAWp5-@ z-2>efKYsccKb1J8Xl(d-$6fj9fuUJK;lTOcVT{=I^5DD5jKi9qjix@_1#_tdC-3{I zG@c`UgUK8`N1!dNPhL;d27EMX8MFiP#|rA=U`yB0q)V|&vlv&9f(3pmG%m8^8H#h- zW?;00y;6~8A;tGX=f%$GE#2Gsmd|Bxco)iiMChq%{dn(W>y=8X)oVmp#6siU=nzbj8&?)d3p-8uX^=)Vn z?GqS%&mc8#Tl^)};PpUB2cW%{%HCDByCrm9)JOY#1GoQj0;;Ja-4B-X-MEcpWbU%z zNR2`W=PYr9^;+kI?7Bp*z^{$0qQ;(uUfuNL=-=2sV(Y`a*uQkked zNPT3}NU?i&f{FM9wy3hPB*Awj14VW+6CZ{K&9oso-VcO*>j+Z7Ee(-ThBU@)N%&-h zrlx#kac{i!YOSmpsQf??6F8vP48awydM>T9sY#vl50}oGy>W&J=EM$)#QHDBg}l&{ z<}q3ReDxr1;S)81*g+D66r~~+)sR;!O=;-)bbwu0Z>g8ehCPsgP`%8$L^QCPC21}E zhS#TojtT7gT-aO^r!%R62E#9e){r#qlWABc+vRfFY1p zmuS(vG-KOMK8ODmXy5E494Y<^3< zYy)e}GnfgxioDd=^BHG!?4;Hp7}WrFWEw2UEuY`8xY&?A#9{Te~Tz z;6G_cqufAD1dWBncyc`yt{Mpt?))Ts%Q@dvLiVE^WeJh6yMiZBL7j`oOP*;~!w^!q zkB8B=5qVz=f3>KgefCNBCT={XKi}}rwHMnPz>zCiohV-n2HVi@?cN6p}&Apo_&2=@4UQEO= zc+B#7(&&QP8FGtZ0O=J$jlOKWr3Cq*#k=kY(;N}<;q+--y%vgCR?*u>Xw>+~Y9k`E z*iB6+u7C{9UX6`8i8-(r^gf>)d{Sfh_iK>P!^$E{b{9vs?x}O=u6)*r{~YU`%sY|n z7v<|$UOL8G^RB381WI{?gAW{16 z%XDQ1E6)KLt4qvWifc&_`A80NTZhBR#C2IvJKf7?f+?bB%{4C3Wby8Jt`Av;$!pSu zr=?8U4)%E8O)k6Xkv9KOT~QyICSQNi*4#Iol^^G>3k;ug8MPyQe5=nKXf``&kNH#m z_^8o{ZEFwW(du20b$=mRJBpgK-QRQNP}|f{UU^7k3{Onho+OZ2UbngA3 z0h2pHW?A?ju{Osxq*967ad=maRX}|nRDmO>pZvJ5P z*zj^guzOoVPWrvZ!MG<7<15!E+b)r-TQ+KOn@pyMagqJImB>N~j?Q)nH(lYk`(d*i z@pU{Mg0L4rbiO{yvA7fE@oIl zz*n1Btww9W)hfI^JdRa_lo^!Y)VDrazS6A0FwYT+-ui#lrAyLT?KA}N*AL}K+UK>H zA7H`)cAfct$p8i0ZDHb4W;nb?86Ub3#?j9>92|b!Wn=PTJh&h&MP-AgtnB&Iz|*tYBu)Ot-=&uRFlkL_Y31vESsmK z64$F$%*Xii5=ZL3&~SAUARNz!cl`u>!plPt3~&$m!i}_CX^n(bnUV2gp^EA1*9vgy z4JgHa==NI6{WK%5Z2Sx7NvC)Tjk1xVEsWo+nvC^R^GxbjOAL9Oz1rIt{zkHziwaW= ziCA4}kkqo(k~2YJm zU?TcL^f-q}MRxN!is+u}-624`Kg)CYquv~-{SrCaE9;Owmk$%r`Y{K(#+Kw`R zzwn+W(&8lWYkEL=P$~-ua1qpKuo?>S?bTd4C8QKRysQqp5l>`H3|PLdo->kaAz~a0 zJPUVoQ|wltCfE(gQ86UTly3QkNaYWz0av5e5!bcRvA~HGfSb!NOciJO^RgTni zSC~U*;)zo|TL&9EEG_0#LACs9`d;rmtY?f))n}l^xrjwHZkC2h@ zi2P;I>CFl-z17Nn;!u{^mB8dYTZP&;YT8|ofjGrPXj{0k=kRy|p~UW$$!82rwlc-P zGa6rFQEu_0pdgap|DDhU6@5t1tJZL4VtI{nxV1-_vve!Za0<})B$)@(#I33{9;=;v z!S|{@alOBB5?IVFQ-+g<3XV6o72IjPwLI+-p+kaxcIdlpKyHIogVMOjGtr0-?;X zGr`ICDtCTfrpZvQ$yzd8v-pWSUpkfsd%<$x8vkv)HH$^8T}rV_uLKgr!}Wlfbuvi^j>|N_qlafs^+cHx%Z@* zQ9DMosCR6eH}0EnKU_0&F8mI>^oPB*zOg(`G6cdqHn1U9Z43Rw2KxVj{hwI@5*K8U X{h8Bg`Qpg?Ih{Tc_zlDN;*I|TL1_QA literal 0 HcmV?d00001 diff --git a/src/sphinx/Launcher/index.rst b/src/sphinx/Launcher/index.rst new file mode 100644 index 000000000..fcc5802e9 --- /dev/null +++ b/src/sphinx/Launcher/index.rst @@ -0,0 +1,14 @@ +============== + Sbt Launcher +============== + +The sbt launcher provides a generic container that can load and run programs +resolved using the Ivy dependency manager. Sbt uses this as its own deployment +mechanism. + +.. toctree:: + :maxdepth: 2 + + GettingStarted + Configuration + Architecture \ No newline at end of file diff --git a/src/sphinx/index.rst b/src/sphinx/index.rst index 3169538b7..8a07fbe72 100644 --- a/src/sphinx/index.rst +++ b/src/sphinx/index.rst @@ -22,7 +22,6 @@ the :doc:`index of names and types `. Examples/index Name-Index - .. The following includes documents that are not important enough to be in a visible toctree They are linked from other documents, which is enough. From a48ab0d38b73aa89eb3a592d411c9ef103d83cdd Mon Sep 17 00:00:00 2001 From: Grzegorz Kossakowski Date: Sun, 15 Dec 2013 18:59:14 +0100 Subject: [PATCH 016/148] Implement name hashing algorithm in incremental compiler. Provide implementation of invalidation logic that takes computed name hashes into account. The implementation is spread amongst two classes: 1. `IncrementalNameHashing` which implements a variant of incremental compilation algorithm that computes modified names and delegates to `MemberReferenceInvalidationStrategy` when invalidating member reference dependencies 2. `MemberReferenceInvalidationStrategy` which implements the core logic of dealing with dependencies introduced by member reference. See documentation of that class for details. The name hashing optimization is applied when invalidating source files having both internal and external dependencies (in initial iteration), check `invalidateByExternal` and `invalidateSource` methods for details. As seen in implementation of `MemberReferenceInvalidationStrategy` the name hashing optimization is not applied when implicit members change. NOTE: All functionality introduced in this commit is enabled only when `IncOptions.nameHashing` flag is set to true. The `source-dependencies/transitive-memberRef` test has been changed to test name hashing variant of incremental compilation. The change to invalidated files reflects the difference between the old and the new algorithm. Also, there a few new tests added that cover issues previously found while testing name hashing algorithm and are fixed in this commit. Each paragraph describes a single test. Add a test case which shows that detect properly changes to type aliases in the name hashing algorithm. See gkossakowski/sbt#6 for details. Add test covering bug with use of symbolic names (issue gkossakowski/sbt#5). Add a test which covers the case where we refer to a name that is declared in the same file. See issue gkossakowski/sbt#3 for details. --- .../inc/src/main/scala/sbt/inc/Changes.scala | 33 +++++ .../src/main/scala/sbt/inc/Incremental.scala | 95 +++++++++++++- .../scala/sbt/inc/MemberRefInvalidator.scala | 124 ++++++++++++++++++ .../backtick-quoted-names/A.scala | 3 + .../backtick-quoted-names/B.scala | 3 + .../backtick-quoted-names/build.sbt | 1 + .../backtick-quoted-names/changes/A.scala | 3 + .../backtick-quoted-names/test | 7 + .../same-file-used-names/A.scala | 8 ++ .../same-file-used-names/B.scala | 3 + .../same-file-used-names/build.sbt | 1 + .../same-file-used-names/changes/B.scala | 3 + .../same-file-used-names/test | 7 + .../transitive-memberRef/build.sbt | 6 +- .../source-dependencies/type-alias/A.scala | 4 + .../source-dependencies/type-alias/B.scala | 3 + .../source-dependencies/type-alias/build.sbt | 3 + .../type-alias/changes/A.scala | 3 + .../source-dependencies/type-alias/test | 7 + 19 files changed, 309 insertions(+), 8 deletions(-) create mode 100644 compile/inc/src/main/scala/sbt/inc/MemberRefInvalidator.scala create mode 100644 sbt/src/sbt-test/source-dependencies/backtick-quoted-names/A.scala create mode 100644 sbt/src/sbt-test/source-dependencies/backtick-quoted-names/B.scala create mode 100644 sbt/src/sbt-test/source-dependencies/backtick-quoted-names/build.sbt create mode 100644 sbt/src/sbt-test/source-dependencies/backtick-quoted-names/changes/A.scala create mode 100644 sbt/src/sbt-test/source-dependencies/backtick-quoted-names/test create mode 100644 sbt/src/sbt-test/source-dependencies/same-file-used-names/A.scala create mode 100644 sbt/src/sbt-test/source-dependencies/same-file-used-names/B.scala create mode 100644 sbt/src/sbt-test/source-dependencies/same-file-used-names/build.sbt create mode 100644 sbt/src/sbt-test/source-dependencies/same-file-used-names/changes/B.scala create mode 100644 sbt/src/sbt-test/source-dependencies/same-file-used-names/test create mode 100644 sbt/src/sbt-test/source-dependencies/type-alias/A.scala create mode 100644 sbt/src/sbt-test/source-dependencies/type-alias/B.scala create mode 100644 sbt/src/sbt-test/source-dependencies/type-alias/build.sbt create mode 100644 sbt/src/sbt-test/source-dependencies/type-alias/changes/A.scala create mode 100644 sbt/src/sbt-test/source-dependencies/type-alias/test diff --git a/compile/inc/src/main/scala/sbt/inc/Changes.scala b/compile/inc/src/main/scala/sbt/inc/Changes.scala index 3fce46738..f1de55044 100644 --- a/compile/inc/src/main/scala/sbt/inc/Changes.scala +++ b/compile/inc/src/main/scala/sbt/inc/Changes.scala @@ -6,6 +6,8 @@ package inc import xsbt.api.NameChanges import java.io.File +import xsbti.api.{_internalOnly_NameHashes => NameHashes} +import xsbti.api.{_internalOnly_NameHash => NameHash} final case class InitialChanges(internalSrc: Changes[File], removedProducts: Set[File], binaryDeps: Set[File], external: APIChanges[String]) final class APIChanges[T](val apiChanges: Iterable[APIChange[T]]) @@ -22,6 +24,37 @@ sealed abstract class APIChange[T](val modified: T) */ case class APIChangeDueToMacroDefinition[T](modified0: T) extends APIChange(modified0) case class SourceAPIChange[T](modified0: T) extends APIChange(modified0) +/** + * An APIChange that carries information about modified names. + * + * This class is used only when name hashing algorithm is enabled. + */ +case class NamesChange[T](modified0: T, modifiedNames: ModifiedNames) extends APIChange(modified0) + +/** + * ModifiedNames are determined by comparing name hashes in two versions of an API representation. + * + * Note that we distinguish between sets of regular (non-implicit) and implicit modified names. + * This distinction is needed because the name hashing algorithm makes different decisions based + * on whether modified name is implicit or not. Implicit names are much more difficult to handle + * due to difficulty of reasoning about the implicit scope. + */ +case class ModifiedNames(regularNames: Set[String], implicitNames: Set[String]) { + override def toString: String = + s"ModifiedNames(regularNames = ${regularNames mkString ", "}, implicitNames = ${implicitNames mkString ", "})" +} +object ModifiedNames { + def compareTwoNameHashes(a: NameHashes, b: NameHashes): ModifiedNames = { + val modifiedRegularNames = calculateModifiedNames(a.regularMembers.toSet, b.regularMembers.toSet) + val modifiedImplicitNames = calculateModifiedNames(a.implicitMembers.toSet, b.implicitMembers.toSet) + ModifiedNames(modifiedRegularNames, modifiedImplicitNames) + } + private def calculateModifiedNames(xs: Set[NameHash], ys: Set[NameHash]): Set[String] = { + val differentNameHashes = (xs union ys) diff (xs intersect ys) + differentNameHashes.map(_.name) + } +} + trait Changes[A] { diff --git a/compile/inc/src/main/scala/sbt/inc/Incremental.scala b/compile/inc/src/main/scala/sbt/inc/Incremental.scala index dc86f818d..ee4352787 100644 --- a/compile/inc/src/main/scala/sbt/inc/Incremental.scala +++ b/compile/inc/src/main/scala/sbt/inc/Incremental.scala @@ -21,8 +21,11 @@ object Incremental log: Logger, options: IncOptions)(implicit equivS: Equiv[Stamp]): (Boolean, Analysis) = { - assert(!options.nameHashing, "We don't support name hashing algorithm yet.") - val incremental = new IncrementalDefaultImpl(log, options) + val incremental: IncrementalCommon = + if (!options.nameHashing) + new IncrementalDefaultImpl(log, options) + else + new IncrementalNameHashing(log, options) val initialChanges = incremental.changedInitial(entry, sources, previous, current, forEntry) val binaryChanges = new DependencyChanges { val modifiedBinaries = initialChanges.binaryDeps.toArray @@ -128,7 +131,8 @@ private abstract class IncrementalCommon(log: Logger, options: IncOptions) { apiChanges foreach { case APIChangeDueToMacroDefinition(src) => log.debug(s"Public API is considered to be changed because $src contains a macro definition.") - case SourceAPIChange(src) => + case apiChange@(_: SourceAPIChange[T] | _: NamesChange[T]) => + val src = apiChange.modified val oldApi = oldAPIMapping(src) val newApi = newAPIMapping(src) val apiUnifiedPatch = apiDiff.generateApiDiff(src.toString, oldApi.api, newApi.api, contextSize) @@ -176,7 +180,7 @@ private abstract class IncrementalCommon(log: Logger, options: IncOptions) { } } - protected def sameAPI[T](src: T, a: Source, b: Source): Option[SourceAPIChange[T]] + protected def sameAPI[T](src: T, a: Source, b: Source): Option[APIChange[T]] def shortcutSameSource(a: Source, b: Source): Boolean = !a.hash.isEmpty && !b.hash.isEmpty && sameCompilation(a.compilation, b.compilation) && (a.hash.deep equals b.hash.deep) def sameCompilation(a: Compilation, b: Compilation): Boolean = a.startTime == b.startTime && a.outputs.corresponds(b.outputs){ @@ -475,11 +479,90 @@ private final class IncrementalDefaultImpl(log: Logger, options: IncOptions) ext log.debug("Invalidated by transitive public inheritance: " + transitiveInherited) val direct = transitiveInherited flatMap directDeps log.debug("Invalidated by direct dependency: " + direct) - val all = transitiveInherited ++ direct - all + transitiveInherited ++ direct } override protected def allDeps(relations: Relations): File => Set[File] = f => relations.direct.internal.reverse(f) } + +/** + * Implementation of incremental algorithm known as "name hashing". It differs from the default implementation + * by applying pruning (filter) of member reference dependencies based on used and modified simple names. + * + * See MemberReferenceInvalidationStrategy for some more information. + */ +private final class IncrementalNameHashing(log: Logger, options: IncOptions) extends IncrementalCommon(log, options) { + + private val memberRefInvalidator = new MemberRefInvalidator(log) + + // Package objects are fragile: if they inherit from an invalidated source, get "class file needed by package is missing" error + // This might be too conservative: we probably only need package objects for packages of invalidated sources. + override protected def invalidatedPackageObjects(invalidated: Set[File], relations: Relations): Set[File] = + invalidated flatMap relations.inheritance.internal.reverse filter { _.getName == "package.scala" } + + override protected def sameAPI[T](src: T, a: Source, b: Source): Option[APIChange[T]] = { + if (SameAPI(a,b)) + None + else { + val aNameHashes = a._internalOnly_nameHashes + val bNameHashes = b._internalOnly_nameHashes + val modifiedNames = ModifiedNames.compareTwoNameHashes(aNameHashes, bNameHashes) + val apiChange = NamesChange(src, modifiedNames) + Some(apiChange) + } + } + + /** Invalidates sources based on initially detected 'changes' to the sources, products, and dependencies.*/ + override protected def invalidateByExternal(relations: Relations, externalAPIChange: APIChange[String]): Set[File] = { + val modified = externalAPIChange.modified + val invalidationReason = memberRefInvalidator.invalidationReason(externalAPIChange) + log.debug(s"$invalidationReason\nAll member reference dependencies will be considered within this context.") + // Propagate inheritance dependencies transitively. + // This differs from normal because we need the initial crossing from externals to sources in this project. + val externalInheritanceR = relations.inheritance.external + val byExternalInheritance = externalInheritanceR.reverse(modified) + log.debug(s"Files invalidated by inheriting from (external) $modified: $byExternalInheritance; now invalidating by inheritance (internally).") + val transitiveInheritance = byExternalInheritance flatMap { file => + invalidateByInheritance(relations, file) + } + val memberRefInvalidationInternal = memberRefInvalidator.get(relations.memberRef.internal, + relations.names, externalAPIChange) + val memberRefInvalidationExternal = memberRefInvalidator.get(relations.memberRef.external, + relations.names, externalAPIChange) + + // Get the member reference dependencies of all sources transitively invalidated by inheritance + log.debug("Getting direct dependencies of all sources transitively invalidated by inheritance.") + val memberRefA = transitiveInheritance flatMap memberRefInvalidationInternal + // Get the sources that depend on externals by member reference. + // This includes non-inheritance dependencies and is not transitive. + log.debug(s"Getting sources that directly depend on (external) $modified.") + val memberRefB = memberRefInvalidationExternal(modified) + transitiveInheritance ++ memberRefA ++ memberRefB + } + + private def invalidateByInheritance(relations: Relations, modified: File): Set[File] = { + val inheritanceDeps = relations.inheritance.internal.reverse _ + log.debug(s"Invalidating (transitively) by inheritance from $modified...") + val transitiveInheritance = transitiveDeps(Set(modified))(inheritanceDeps) + log.debug("Invalidated by transitive inheritance dependency: " + transitiveInheritance) + transitiveInheritance + } + + override protected def invalidateSource(relations: Relations, change: APIChange[File]): Set[File] = { + log.debug(s"Invalidating ${change.modified}...") + val transitiveInheritance = invalidateByInheritance(relations, change.modified) + val reasonForInvalidation = memberRefInvalidator.invalidationReason(change) + log.debug(s"$reasonForInvalidation\nAll member reference dependencies will be considered within this context.") + val memberRefInvalidation = memberRefInvalidator.get(relations.memberRef.internal, + relations.names, change) + val memberRef = transitiveInheritance flatMap memberRefInvalidation + val all = transitiveInheritance ++ memberRef + all + } + + override protected def allDeps(relations: Relations): File => Set[File] = + f => relations.memberRef.internal.reverse(f) + +} diff --git a/compile/inc/src/main/scala/sbt/inc/MemberRefInvalidator.scala b/compile/inc/src/main/scala/sbt/inc/MemberRefInvalidator.scala new file mode 100644 index 000000000..22537c78d --- /dev/null +++ b/compile/inc/src/main/scala/sbt/inc/MemberRefInvalidator.scala @@ -0,0 +1,124 @@ +package sbt.inc + +import sbt.Relation +import java.io.File +import sbt.Logger +import xsbt.api.APIUtil + +/** + * Implements various strategies for invalidating dependencies introduced by member reference. + * + * The strategy is represented as function T => Set[File] where T is a source file that other + * source files depend on. When you apply that function to given element `src` you get set of + * files that depend on `src` by member reference and should be invalidated due to api change + * that was passed to a method constructing that function. There are two questions that arise: + * + * 1. Why is signature T => Set[File] and not T => Set[T] or File => Set[File]? + * 2. Why would we apply that function to any other `src` that then one that got modified + * and the modification is described by APIChange? + * + * Let's address the second question with the following example of source code structure: + * + * // A.scala + * class A + * + * // B.scala + * class B extends A + * + * // C.scala + * class C { def foo(a: A) = ??? } + * + * // D.scala + * class D { def bar(b: B) = ??? } + * + * Member reference dependencies on A.scala are B.scala, C.scala. When the api of A changes + * then we would consider B and C for invalidation. However, B is also a dependency by inheritance + * so we always invalidate it. The api change to A is relevant when B is considered (because + * of how inheritance works) so we would invalidate B by inheritance and then we would like to + * invalidate member reference dependencies of B as well. In other words, we have a function + * because we want to apply it (with the same api change in mind) to all src files invalidated + * by inheritance of the originally modified file. + * + * The first question is a bit more straightforward to answer. We always invalidate internal + * source files (in given project) that are represented as File but they might depend either on + * internal source files (then T=File) or they can depend on external class name (then T=String). + * + * The specific invalidation strategy is determined based on APIChange that describes a change to api + * of a single source file. + * + * For example, if we get APIChangeDueToMacroDefinition then we invalidate all member reference + * dependencies unconditionally. On the other hand, if api change is due to modified name hashes + * of regular members then we'll invalidate sources that use those names. + */ +private[inc] class MemberRefInvalidator(log: Logger) { + def get[T](memberRef: Relation[File, T], usedNames: Relation[File, String], apiChange: APIChange[_]): + T => Set[File] = apiChange match { + case _: APIChangeDueToMacroDefinition[_] => + new InvalidateUnconditionally(memberRef) + case NamesChange(_, modifiedNames) if !modifiedNames.implicitNames.isEmpty => + new InvalidateUnconditionally(memberRef) + case NamesChange(modifiedSrcFile, modifiedNames) => + new NameHashFilteredInvalidator[T](usedNames, memberRef, modifiedNames.regularNames) + case _: SourceAPIChange[_] => + sys.error(wrongAPIChangeMsg) + } + + def invalidationReason(apiChange: APIChange[_]): String = apiChange match { + case APIChangeDueToMacroDefinition(modifiedSrcFile) => + s"The $modifiedSrcFile source file declares a macro." + case NamesChange(modifiedSrcFile, modifiedNames) if !modifiedNames.implicitNames.isEmpty => + s"""|The $modifiedSrcFile source file has the following implicit definitions changed: + |\t${modifiedNames.implicitNames.mkString(", ")}.""".stripMargin + case NamesChange(modifiedSrcFile, modifiedNames) => + s"""|The $modifiedSrcFile source file has the following regular definitions changed: + |\t${modifiedNames.regularNames.mkString(", ")}.""".stripMargin + case _: SourceAPIChange[_] => + sys.error(wrongAPIChangeMsg) + } + + private val wrongAPIChangeMsg = + "MemberReferenceInvalidator.get should be called when name hashing is enabled " + + "and in that case we shouldn't have SourceAPIChange as an api change." + + private class InvalidateUnconditionally[T](memberRef: Relation[File, T]) extends (T => Set[File]) { + def apply(from: T): Set[File] = { + val invalidated = memberRef.reverse(from) + if (!invalidated.isEmpty) + log.debug(s"The following member ref dependencies of $from are invalidated:\n" + + formatInvalidated(invalidated)) + invalidated + } + private def formatInvalidated(invalidated: Set[File]): String = { + val sortedFiles = invalidated.toSeq.sortBy(_.getAbsolutePath) + sortedFiles.map(file => "\t"+file).mkString("\n") + } + } + + private class NameHashFilteredInvalidator[T]( + usedNames: Relation[File, String], + memberRef: Relation[File, T], + modifiedNames: Set[String]) extends (T => Set[File]) { + + def apply(to: T): Set[File] = { + val dependent = memberRef.reverse(to) + filteredDependencies(dependent) + } + private def filteredDependencies(dependent: Set[File]): Set[File] = { + dependent.filter { + case from if APIUtil.isScalaSourceName(from.getName) => + val usedNamesInDependent = usedNames.forward(from) + val modifiedAndUsedNames = modifiedNames intersect usedNamesInDependent + if (modifiedAndUsedNames.isEmpty) { + log.debug(s"None of the modified names appears in $from. This dependency is not being considered for invalidation.") + false + } else { + log.debug(s"The following modified names cause invalidation of $from: $modifiedAndUsedNames") + true + } + case from => + log.debug(s"Name hashing optimization doesn't apply to non-Scala dependency: $from") + true + } + } + } +} diff --git a/sbt/src/sbt-test/source-dependencies/backtick-quoted-names/A.scala b/sbt/src/sbt-test/source-dependencies/backtick-quoted-names/A.scala new file mode 100644 index 000000000..1d3a976a8 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/backtick-quoted-names/A.scala @@ -0,0 +1,3 @@ +object A { + def `=` = 3 +} diff --git a/sbt/src/sbt-test/source-dependencies/backtick-quoted-names/B.scala b/sbt/src/sbt-test/source-dependencies/backtick-quoted-names/B.scala new file mode 100644 index 000000000..7cbd62e1d --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/backtick-quoted-names/B.scala @@ -0,0 +1,3 @@ +object B extends App { + println(A.`=`) +} diff --git a/sbt/src/sbt-test/source-dependencies/backtick-quoted-names/build.sbt b/sbt/src/sbt-test/source-dependencies/backtick-quoted-names/build.sbt new file mode 100644 index 000000000..8a38ef414 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/backtick-quoted-names/build.sbt @@ -0,0 +1 @@ +incOptions := incOptions.value.withNameHashing(true) diff --git a/sbt/src/sbt-test/source-dependencies/backtick-quoted-names/changes/A.scala b/sbt/src/sbt-test/source-dependencies/backtick-quoted-names/changes/A.scala new file mode 100644 index 000000000..b473714fa --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/backtick-quoted-names/changes/A.scala @@ -0,0 +1,3 @@ +object A { + def asdf = 3 +} diff --git a/sbt/src/sbt-test/source-dependencies/backtick-quoted-names/test b/sbt/src/sbt-test/source-dependencies/backtick-quoted-names/test new file mode 100644 index 000000000..d4d386615 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/backtick-quoted-names/test @@ -0,0 +1,7 @@ +> compile + +# rename def with symbolic name (`=`) +$ copy-file changes/A.scala A.scala + +# Both A.scala and B.scala should be recompiled, producing a compile error +-> compile diff --git a/sbt/src/sbt-test/source-dependencies/same-file-used-names/A.scala b/sbt/src/sbt-test/source-dependencies/same-file-used-names/A.scala new file mode 100644 index 000000000..d91afb5ca --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/same-file-used-names/A.scala @@ -0,0 +1,8 @@ +object A { + def x = 3 + + def y = { + import B._ + x + } +} diff --git a/sbt/src/sbt-test/source-dependencies/same-file-used-names/B.scala b/sbt/src/sbt-test/source-dependencies/same-file-used-names/B.scala new file mode 100644 index 000000000..5e34efa4d --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/same-file-used-names/B.scala @@ -0,0 +1,3 @@ +object B { +// def x = 3 +} diff --git a/sbt/src/sbt-test/source-dependencies/same-file-used-names/build.sbt b/sbt/src/sbt-test/source-dependencies/same-file-used-names/build.sbt new file mode 100644 index 000000000..8a38ef414 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/same-file-used-names/build.sbt @@ -0,0 +1 @@ +incOptions := incOptions.value.withNameHashing(true) diff --git a/sbt/src/sbt-test/source-dependencies/same-file-used-names/changes/B.scala b/sbt/src/sbt-test/source-dependencies/same-file-used-names/changes/B.scala new file mode 100644 index 000000000..4bf188fb2 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/same-file-used-names/changes/B.scala @@ -0,0 +1,3 @@ +object B { + def x = 3 +} diff --git a/sbt/src/sbt-test/source-dependencies/same-file-used-names/test b/sbt/src/sbt-test/source-dependencies/same-file-used-names/test new file mode 100644 index 000000000..781b4aafb --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/same-file-used-names/test @@ -0,0 +1,7 @@ +> compile + +# uncomment definition of `x` that leads to ambiguity error in A +$ copy-file changes/B.scala B.scala + +# Both A.scala and B.scala should be recompiled, producing a compile error +-> compile diff --git a/sbt/src/sbt-test/source-dependencies/transitive-memberRef/build.sbt b/sbt/src/sbt-test/source-dependencies/transitive-memberRef/build.sbt index ef32473dc..de908146c 100644 --- a/sbt/src/sbt-test/source-dependencies/transitive-memberRef/build.sbt +++ b/sbt/src/sbt-test/source-dependencies/transitive-memberRef/build.sbt @@ -1,5 +1,7 @@ logLevel := Level.Debug +incOptions := incOptions.value.withNameHashing(true) + // disable sbt's heauristic which recompiles everything in case // some fraction (e.g. 50%) of files is scheduled to be recompiled // in this test we want precise information about recompiled files @@ -24,13 +26,13 @@ TaskKey[Unit]("check-compilations") <<= (compile in Compile, scalaSource in Comp assert(recompiledFiles(iteration) == files, "%s != %s".format(recompiledFiles(iteration), files)) } // Y.scala is compiled only at the beginning as changes to A.scala do not affect it - recompiledFilesInIteration(0, Set("Y.scala")) + recompiledFilesInIteration(0, Set("X.scala", "Y.scala")) // A.scala is changed and recompiled recompiledFilesInIteration(1, Set("A.scala")) // change in A.scala causes recompilation of B.scala, C.scala, D.scala which depend on transtiviely // and by inheritance on A.scala // X.scala is also recompiled because it depends by member reference on B.scala // Note that Y.scala is not recompiled because it depends just on X through member reference dependency - recompiledFilesInIteration(2, Set("B.scala", "C.scala", "D.scala", "X.scala")) + recompiledFilesInIteration(2, Set("B.scala", "C.scala", "D.scala")) assert(allCompilations.size == 3) } diff --git a/sbt/src/sbt-test/source-dependencies/type-alias/A.scala b/sbt/src/sbt-test/source-dependencies/type-alias/A.scala new file mode 100644 index 000000000..c0c8794a7 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/type-alias/A.scala @@ -0,0 +1,4 @@ +object A { + type X = Option[Int] +} + diff --git a/sbt/src/sbt-test/source-dependencies/type-alias/B.scala b/sbt/src/sbt-test/source-dependencies/type-alias/B.scala new file mode 100644 index 000000000..81640ed8d --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/type-alias/B.scala @@ -0,0 +1,3 @@ +object B { + def y: A.X = Option(3) +} diff --git a/sbt/src/sbt-test/source-dependencies/type-alias/build.sbt b/sbt/src/sbt-test/source-dependencies/type-alias/build.sbt new file mode 100644 index 000000000..c5a1099aa --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/type-alias/build.sbt @@ -0,0 +1,3 @@ +logLevel in compile := Level.Debug + +incOptions := incOptions.value.withNameHashing(true) diff --git a/sbt/src/sbt-test/source-dependencies/type-alias/changes/A.scala b/sbt/src/sbt-test/source-dependencies/type-alias/changes/A.scala new file mode 100644 index 000000000..53aee1626 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/type-alias/changes/A.scala @@ -0,0 +1,3 @@ +object A { + type X = Int +} diff --git a/sbt/src/sbt-test/source-dependencies/type-alias/test b/sbt/src/sbt-test/source-dependencies/type-alias/test new file mode 100644 index 000000000..f0a7fe8a1 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/type-alias/test @@ -0,0 +1,7 @@ +> compile + +# change type alias +$ copy-file changes/A.scala A.scala + +# Both A.scala and B.scala should be recompiled, producing a compile error +-> compile From cd6b2a2a8cd4903848c815660025b2472160bbc4 Mon Sep 17 00:00:00 2001 From: Grzegorz Kossakowski Date: Sun, 15 Dec 2013 23:46:07 +0100 Subject: [PATCH 017/148] Mark tests that regress when name hashing is enabled. There are number of scripted tests that fail if we switch to name hashing being enabled by default. There's no easy way to mark those tests as pending only when name hashing flag is enabled so I decided to "mark" them by copying those tests, enabling name hashing in each of them and mark those copies as pending. Here's explanation of each failing test: * `constants` and `java-static` fail due to typer inlining constants so we can't track dependencies properly (see SI-7173) * `macro` fails for similar reasons as above: typer expands macros and we can't track dependencies properly * `struct` fails because it turns out that we need to handle structural types in a special way both at declaration and use sites. At the moment we handle them explicitly at declaration site so `struct-usage` passes but `struct` fails --- .../constants-name-hashing/build.sbt | 1 + .../constants-name-hashing/changes/A1.scala | 1 + .../constants-name-hashing/changes/A2.scala | 1 + .../constants-name-hashing/changes/B.scala | 4 +++ .../constants-name-hashing/pending | 11 +++++++ .../java-static-name-hashing/build.sbt | 1 + .../java-static-name-hashing/changes/J1.java | 4 +++ .../java-static-name-hashing/changes/J2.java | 4 +++ .../java-static-name-hashing/changes/S.scala | 4 +++ .../java-static-name-hashing/pending | 24 +++++++++++++++ .../macro-client/Client.scala | 5 ++++ .../macro-provider/Provider.scala | 8 +++++ .../macro-provider/changes/Provider.scala | 8 +++++ .../macro-name-hashing/pending | 13 +++++++++ .../macro-name-hashing/project/build.scala | 29 +++++++++++++++++++ .../struct-name-hashing/A.scala | 3 ++ .../struct-name-hashing/B.scala | 4 +++ .../struct-name-hashing/C.scala | 4 +++ .../struct-name-hashing/build.sbt | 1 + .../struct-name-hashing/changes/A.scala | 3 ++ .../struct-name-hashing/pending | 6 ++++ 21 files changed, 139 insertions(+) create mode 100644 sbt/src/sbt-test/source-dependencies/constants-name-hashing/build.sbt create mode 100644 sbt/src/sbt-test/source-dependencies/constants-name-hashing/changes/A1.scala create mode 100644 sbt/src/sbt-test/source-dependencies/constants-name-hashing/changes/A2.scala create mode 100644 sbt/src/sbt-test/source-dependencies/constants-name-hashing/changes/B.scala create mode 100644 sbt/src/sbt-test/source-dependencies/constants-name-hashing/pending create mode 100644 sbt/src/sbt-test/source-dependencies/java-static-name-hashing/build.sbt create mode 100644 sbt/src/sbt-test/source-dependencies/java-static-name-hashing/changes/J1.java create mode 100644 sbt/src/sbt-test/source-dependencies/java-static-name-hashing/changes/J2.java create mode 100644 sbt/src/sbt-test/source-dependencies/java-static-name-hashing/changes/S.scala create mode 100644 sbt/src/sbt-test/source-dependencies/java-static-name-hashing/pending create mode 100644 sbt/src/sbt-test/source-dependencies/macro-name-hashing/macro-client/Client.scala create mode 100644 sbt/src/sbt-test/source-dependencies/macro-name-hashing/macro-provider/Provider.scala create mode 100644 sbt/src/sbt-test/source-dependencies/macro-name-hashing/macro-provider/changes/Provider.scala create mode 100644 sbt/src/sbt-test/source-dependencies/macro-name-hashing/pending create mode 100644 sbt/src/sbt-test/source-dependencies/macro-name-hashing/project/build.scala create mode 100644 sbt/src/sbt-test/source-dependencies/struct-name-hashing/A.scala create mode 100644 sbt/src/sbt-test/source-dependencies/struct-name-hashing/B.scala create mode 100644 sbt/src/sbt-test/source-dependencies/struct-name-hashing/C.scala create mode 100644 sbt/src/sbt-test/source-dependencies/struct-name-hashing/build.sbt create mode 100644 sbt/src/sbt-test/source-dependencies/struct-name-hashing/changes/A.scala create mode 100644 sbt/src/sbt-test/source-dependencies/struct-name-hashing/pending diff --git a/sbt/src/sbt-test/source-dependencies/constants-name-hashing/build.sbt b/sbt/src/sbt-test/source-dependencies/constants-name-hashing/build.sbt new file mode 100644 index 000000000..8a38ef414 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/constants-name-hashing/build.sbt @@ -0,0 +1 @@ +incOptions := incOptions.value.withNameHashing(true) diff --git a/sbt/src/sbt-test/source-dependencies/constants-name-hashing/changes/A1.scala b/sbt/src/sbt-test/source-dependencies/constants-name-hashing/changes/A1.scala new file mode 100644 index 000000000..f67b6f474 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/constants-name-hashing/changes/A1.scala @@ -0,0 +1 @@ +object A { final val x = 1 } diff --git a/sbt/src/sbt-test/source-dependencies/constants-name-hashing/changes/A2.scala b/sbt/src/sbt-test/source-dependencies/constants-name-hashing/changes/A2.scala new file mode 100644 index 000000000..4f9396f13 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/constants-name-hashing/changes/A2.scala @@ -0,0 +1 @@ +object A { final val x = 2 } diff --git a/sbt/src/sbt-test/source-dependencies/constants-name-hashing/changes/B.scala b/sbt/src/sbt-test/source-dependencies/constants-name-hashing/changes/B.scala new file mode 100644 index 000000000..058527993 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/constants-name-hashing/changes/B.scala @@ -0,0 +1,4 @@ +object B +{ + def main(args: Array[String]) = assert(args(0).toInt == A.x ) +} \ No newline at end of file diff --git a/sbt/src/sbt-test/source-dependencies/constants-name-hashing/pending b/sbt/src/sbt-test/source-dependencies/constants-name-hashing/pending new file mode 100644 index 000000000..61df26ef6 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/constants-name-hashing/pending @@ -0,0 +1,11 @@ +# Tests if source dependencies are tracked properly +# for compile-time constants (like final vals in top-level objects) +# see https://issues.scala-lang.org/browse/SI-7173 for details +# why compile-time constants can be tricky to track due to early inlining + +$ copy-file changes/B.scala B.scala + +$ copy-file changes/A1.scala A.scala +> run 1 +$ copy-file changes/A2.scala A.scala +> run 2 diff --git a/sbt/src/sbt-test/source-dependencies/java-static-name-hashing/build.sbt b/sbt/src/sbt-test/source-dependencies/java-static-name-hashing/build.sbt new file mode 100644 index 000000000..8a38ef414 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/java-static-name-hashing/build.sbt @@ -0,0 +1 @@ +incOptions := incOptions.value.withNameHashing(true) diff --git a/sbt/src/sbt-test/source-dependencies/java-static-name-hashing/changes/J1.java b/sbt/src/sbt-test/source-dependencies/java-static-name-hashing/changes/J1.java new file mode 100644 index 000000000..a3a75fefd --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/java-static-name-hashing/changes/J1.java @@ -0,0 +1,4 @@ +public class J +{ + public static final int x = 3; +} \ No newline at end of file diff --git a/sbt/src/sbt-test/source-dependencies/java-static-name-hashing/changes/J2.java b/sbt/src/sbt-test/source-dependencies/java-static-name-hashing/changes/J2.java new file mode 100644 index 000000000..8ff2e24c6 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/java-static-name-hashing/changes/J2.java @@ -0,0 +1,4 @@ +public class J +{ + public static final String x = "3"; +} \ No newline at end of file diff --git a/sbt/src/sbt-test/source-dependencies/java-static-name-hashing/changes/S.scala b/sbt/src/sbt-test/source-dependencies/java-static-name-hashing/changes/S.scala new file mode 100644 index 000000000..45436972b --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/java-static-name-hashing/changes/S.scala @@ -0,0 +1,4 @@ +object S +{ + val y: Int = J.x +} diff --git a/sbt/src/sbt-test/source-dependencies/java-static-name-hashing/pending b/sbt/src/sbt-test/source-dependencies/java-static-name-hashing/pending new file mode 100644 index 000000000..42890ca74 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/java-static-name-hashing/pending @@ -0,0 +1,24 @@ +# When a Java class is loaded from a class file and not parsed from a source file, scalac reports +# the statics as an object without a file and so the Analyzer must know to look for the +# object's linked class. +# This test verifies this happens. +# The test compiles a Java class with a static field. +# It then adds a Scala object that references the static field. Because the object only depends on a +# static member and because the Java source is not included in the compilation (since it didn't change), +# this triggers the special case above. + +# add and compile the Java source +$ copy-file changes/J1.java src/main/java/J.java +> compile + +# add and compile the Scala source +$ copy-file changes/S.scala src/main/scala/S.scala +> compile + +# change the Java source so that a compile error should occur if S.scala is also recompiled (which will happen if the dependency was properly recorded) +$ copy-file changes/J2.java src/main/java/J.java +-> compile + +# verify it should have failed by doing a full recompilation +> clean +-> compile \ No newline at end of file diff --git a/sbt/src/sbt-test/source-dependencies/macro-name-hashing/macro-client/Client.scala b/sbt/src/sbt-test/source-dependencies/macro-name-hashing/macro-client/Client.scala new file mode 100644 index 000000000..90932d136 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/macro-name-hashing/macro-client/Client.scala @@ -0,0 +1,5 @@ +package macro + +object Client { + Provider.tree(0) +} diff --git a/sbt/src/sbt-test/source-dependencies/macro-name-hashing/macro-provider/Provider.scala b/sbt/src/sbt-test/source-dependencies/macro-name-hashing/macro-provider/Provider.scala new file mode 100644 index 000000000..9b6d27676 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/macro-name-hashing/macro-provider/Provider.scala @@ -0,0 +1,8 @@ +package macro +import scala.language.experimental.macros +import scala.reflect.macros._ + +object Provider { + def tree(args: Any) = macro treeImpl + def treeImpl(c: Context)(args: c.Expr[Any]) = c.universe.reify(args.splice) +} diff --git a/sbt/src/sbt-test/source-dependencies/macro-name-hashing/macro-provider/changes/Provider.scala b/sbt/src/sbt-test/source-dependencies/macro-name-hashing/macro-provider/changes/Provider.scala new file mode 100644 index 000000000..711989b32 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/macro-name-hashing/macro-provider/changes/Provider.scala @@ -0,0 +1,8 @@ +package macro +import scala.language.experimental.macros +import scala.reflect.macros._ + +object Provider { + def tree(args: Any) = macro treeImpl + def treeImpl(c: Context)(args: c.Expr[Any]) = sys.error("no macro for you!") +} diff --git a/sbt/src/sbt-test/source-dependencies/macro-name-hashing/pending b/sbt/src/sbt-test/source-dependencies/macro-name-hashing/pending new file mode 100644 index 000000000..b3755d4ee --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/macro-name-hashing/pending @@ -0,0 +1,13 @@ +> compile + +# replace macro with one that throws an error + +$ copy-file macro-provider/changes/Provider.scala macro-provider/Provider.scala + +> macro-provider/compile + +-> macro-client/compile + +> clean + +-> compile diff --git a/sbt/src/sbt-test/source-dependencies/macro-name-hashing/project/build.scala b/sbt/src/sbt-test/source-dependencies/macro-name-hashing/project/build.scala new file mode 100644 index 000000000..a5382240f --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/macro-name-hashing/project/build.scala @@ -0,0 +1,29 @@ +import sbt._ +import Keys._ + +object build extends Build { + val defaultSettings = Seq( + libraryDependencies <+= scalaVersion("org.scala-lang" % "scala-reflect" % _ ), + incOptions := incOptions.value.withNameHashing(true) + ) + + lazy val root = Project( + base = file("."), + id = "macro", + aggregate = Seq(macroProvider, macroClient), + settings = Defaults.defaultSettings ++ defaultSettings + ) + + lazy val macroProvider = Project( + base = file("macro-provider"), + id = "macro-provider", + settings = Defaults.defaultSettings ++ defaultSettings + ) + + lazy val macroClient = Project( + base = file("macro-client"), + id = "macro-client", + dependencies = Seq(macroProvider), + settings = Defaults.defaultSettings ++ defaultSettings + ) +} diff --git a/sbt/src/sbt-test/source-dependencies/struct-name-hashing/A.scala b/sbt/src/sbt-test/source-dependencies/struct-name-hashing/A.scala new file mode 100644 index 000000000..d17a6e20a --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/struct-name-hashing/A.scala @@ -0,0 +1,3 @@ +object A { + def x: Int = 3 +} \ No newline at end of file diff --git a/sbt/src/sbt-test/source-dependencies/struct-name-hashing/B.scala b/sbt/src/sbt-test/source-dependencies/struct-name-hashing/B.scala new file mode 100644 index 000000000..635568727 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/struct-name-hashing/B.scala @@ -0,0 +1,4 @@ +object B { + def onX(m: { def x: Int } ) = + m.x +} \ No newline at end of file diff --git a/sbt/src/sbt-test/source-dependencies/struct-name-hashing/C.scala b/sbt/src/sbt-test/source-dependencies/struct-name-hashing/C.scala new file mode 100644 index 000000000..413cd6d63 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/struct-name-hashing/C.scala @@ -0,0 +1,4 @@ +object C { + def main(args: Array[String]) = + println(B.onX(A)) +} \ No newline at end of file diff --git a/sbt/src/sbt-test/source-dependencies/struct-name-hashing/build.sbt b/sbt/src/sbt-test/source-dependencies/struct-name-hashing/build.sbt new file mode 100644 index 000000000..8a38ef414 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/struct-name-hashing/build.sbt @@ -0,0 +1 @@ +incOptions := incOptions.value.withNameHashing(true) diff --git a/sbt/src/sbt-test/source-dependencies/struct-name-hashing/changes/A.scala b/sbt/src/sbt-test/source-dependencies/struct-name-hashing/changes/A.scala new file mode 100644 index 000000000..dc9bbd3c0 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/struct-name-hashing/changes/A.scala @@ -0,0 +1,3 @@ +object A { + def x: Byte = 3 +} diff --git a/sbt/src/sbt-test/source-dependencies/struct-name-hashing/pending b/sbt/src/sbt-test/source-dependencies/struct-name-hashing/pending new file mode 100644 index 000000000..8c7328ea4 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/struct-name-hashing/pending @@ -0,0 +1,6 @@ +> compile + +# modify A.scala so that it does not conform to the structural type in B.scala +$ copy-file changes/A.scala A.scala + +-> compile \ No newline at end of file From 6cf79aba08da5145b1a85466305a23928a031ba2 Mon Sep 17 00:00:00 2001 From: Grzegorz Kossakowski Date: Sun, 15 Dec 2013 23:51:46 +0100 Subject: [PATCH 018/148] Mark test that passes when name hashing is enabled. There's one test that starts to pass when we enable name hashing. It's `import-class` which tests whether tracking of dependencies that arise from imports is properly tracked. The name hashing algorithm uses different dependency tracking compared to the old algorithm and the new dependency extraction logic does handle import tree nodes properly so the test passes. We "mark" the test passing by copying it and enabling the name hashing flag in it. This is done similarly as in 940f7ff46d. --- .../source-dependencies/import-class-name-hashing/A.scala | 3 +++ .../source-dependencies/import-class-name-hashing/B.scala | 1 + .../import-class-name-hashing/build.sbt | 1 + .../import-class-name-hashing/changes/A.scala | 1 + .../source-dependencies/import-class-name-hashing/test | 8 ++++++++ 5 files changed, 14 insertions(+) create mode 100644 sbt/src/sbt-test/source-dependencies/import-class-name-hashing/A.scala create mode 100644 sbt/src/sbt-test/source-dependencies/import-class-name-hashing/B.scala create mode 100644 sbt/src/sbt-test/source-dependencies/import-class-name-hashing/build.sbt create mode 100644 sbt/src/sbt-test/source-dependencies/import-class-name-hashing/changes/A.scala create mode 100644 sbt/src/sbt-test/source-dependencies/import-class-name-hashing/test diff --git a/sbt/src/sbt-test/source-dependencies/import-class-name-hashing/A.scala b/sbt/src/sbt-test/source-dependencies/import-class-name-hashing/A.scala new file mode 100644 index 000000000..a93bbe535 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/import-class-name-hashing/A.scala @@ -0,0 +1,3 @@ +package a + +class A diff --git a/sbt/src/sbt-test/source-dependencies/import-class-name-hashing/B.scala b/sbt/src/sbt-test/source-dependencies/import-class-name-hashing/B.scala new file mode 100644 index 000000000..0489f4a26 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/import-class-name-hashing/B.scala @@ -0,0 +1 @@ +import a.A diff --git a/sbt/src/sbt-test/source-dependencies/import-class-name-hashing/build.sbt b/sbt/src/sbt-test/source-dependencies/import-class-name-hashing/build.sbt new file mode 100644 index 000000000..8a38ef414 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/import-class-name-hashing/build.sbt @@ -0,0 +1 @@ +incOptions := incOptions.value.withNameHashing(true) diff --git a/sbt/src/sbt-test/source-dependencies/import-class-name-hashing/changes/A.scala b/sbt/src/sbt-test/source-dependencies/import-class-name-hashing/changes/A.scala new file mode 100644 index 000000000..2a93cdef5 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/import-class-name-hashing/changes/A.scala @@ -0,0 +1 @@ +package a diff --git a/sbt/src/sbt-test/source-dependencies/import-class-name-hashing/test b/sbt/src/sbt-test/source-dependencies/import-class-name-hashing/test new file mode 100644 index 000000000..7679ba52c --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/import-class-name-hashing/test @@ -0,0 +1,8 @@ +> compile + +# remove class a.A +$ copy-file changes/A.scala A.scala + +# 'import a.A' should now fail in B.scala +# succeeds because scalac doesn't track this dependency +-> compile From f4940df48d0af3a068669b320f238dd7c25db451 Mon Sep 17 00:00:00 2001 From: Grzegorz Kossakowski Date: Mon, 6 Jan 2014 22:24:00 +0100 Subject: [PATCH 019/148] Make all APIChange subclasses final. They should have been final from the beginning. We are fixing that omission now. --- compile/inc/src/main/scala/sbt/inc/Changes.scala | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/compile/inc/src/main/scala/sbt/inc/Changes.scala b/compile/inc/src/main/scala/sbt/inc/Changes.scala index f1de55044..94bb1ec18 100644 --- a/compile/inc/src/main/scala/sbt/inc/Changes.scala +++ b/compile/inc/src/main/scala/sbt/inc/Changes.scala @@ -22,14 +22,14 @@ sealed abstract class APIChange[T](val modified: T) * api has changed. The reason is that there's no way to determine if changes to macros implementation * are affecting its users or not. Therefore we err on the side of caution. */ -case class APIChangeDueToMacroDefinition[T](modified0: T) extends APIChange(modified0) -case class SourceAPIChange[T](modified0: T) extends APIChange(modified0) +final case class APIChangeDueToMacroDefinition[T](modified0: T) extends APIChange(modified0) +final case class SourceAPIChange[T](modified0: T) extends APIChange(modified0) /** * An APIChange that carries information about modified names. * * This class is used only when name hashing algorithm is enabled. */ -case class NamesChange[T](modified0: T, modifiedNames: ModifiedNames) extends APIChange(modified0) +final case class NamesChange[T](modified0: T, modifiedNames: ModifiedNames) extends APIChange(modified0) /** * ModifiedNames are determined by comparing name hashes in two versions of an API representation. @@ -39,7 +39,7 @@ case class NamesChange[T](modified0: T, modifiedNames: ModifiedNames) extends AP * on whether modified name is implicit or not. Implicit names are much more difficult to handle * due to difficulty of reasoning about the implicit scope. */ -case class ModifiedNames(regularNames: Set[String], implicitNames: Set[String]) { +final case class ModifiedNames(regularNames: Set[String], implicitNames: Set[String]) { override def toString: String = s"ModifiedNames(regularNames = ${regularNames mkString ", "}, implicitNames = ${implicitNames mkString ", "})" } From 2e1809e17eda54e3ae4fe0e94c68a2af0a221b44 Mon Sep 17 00:00:00 2001 From: Benjy Date: Tue, 7 Jan 2014 22:39:47 +0000 Subject: [PATCH 020/148] Make analysis file portable. Serializes CompileSetup as text instead of base64-encoded binary-serialized object. This is necessary so that file paths in the CompileSetup can be rebased when porting analysis files between systems. --- .../inc/src/main/scala/sbt/CompileSetup.scala | 6 +- .../scala/sbt/inc/TextAnalysisFormat.scala | 184 ++++++++++++------ 2 files changed, 127 insertions(+), 63 deletions(-) diff --git a/compile/inc/src/main/scala/sbt/CompileSetup.scala b/compile/inc/src/main/scala/sbt/CompileSetup.scala index c96cee680..59e9e2975 100644 --- a/compile/inc/src/main/scala/sbt/CompileSetup.scala +++ b/compile/inc/src/main/scala/sbt/CompileSetup.scala @@ -27,12 +27,14 @@ object CompileSetup def equiv(a: File, b: File) = a.getAbsoluteFile == b.getAbsoluteFile } implicit val equivOutput: Equiv[APIOutput] = new Equiv[APIOutput] { + implicit val outputGroupsOrdering = Ordering.by((og: MultipleOutput.OutputGroup) => og.sourceDirectory) def equiv(out1: APIOutput, out2: APIOutput) = (out1, out2) match { case (m1: MultipleOutput, m2: MultipleOutput) => - m1.outputGroups zip (m2.outputGroups) forall { + (m1.outputGroups.length == m2.outputGroups.length) && + (m1.outputGroups.sorted zip m2.outputGroups.sorted forall { case (a,b) => equivFile.equiv(a.sourceDirectory, b.sourceDirectory) && equivFile.equiv(a.outputDirectory, b.outputDirectory) - } + }) case (s1: SingleOutput, s2: SingleOutput) => equivFile.equiv(s1.outputDirectory, s2.outputDirectory) case _ => false } diff --git a/compile/persist/src/main/scala/sbt/inc/TextAnalysisFormat.scala b/compile/persist/src/main/scala/sbt/inc/TextAnalysisFormat.scala index 8a754f596..59e432493 100644 --- a/compile/persist/src/main/scala/sbt/inc/TextAnalysisFormat.scala +++ b/compile/persist/src/main/scala/sbt/inc/TextAnalysisFormat.scala @@ -4,6 +4,7 @@ package inc import java.io._ import sbt.{CompileSetup, Relation} import xsbti.api.{Compilation, Source} +import xsbti.compile.{MultipleOutput, SingleOutput} import javax.xml.bind.DatatypeConverter @@ -55,7 +56,7 @@ object TextAnalysisFormat { implicit val compilationF = xsbt.api.CompilationFormat def write(out: Writer, analysis: Analysis, setup: CompileSetup) { - VersionF.write(out) + VersionF.write(out) // We start with relations because that's the part of greatest interest to external readers, // who can abort reading early once they're read them. FormatTimer.time("write relations") { RelationsF.write(out, analysis.relations) } @@ -68,7 +69,7 @@ object TextAnalysisFormat { } def read(in: BufferedReader): (Analysis, CompileSetup) = { - VersionF.read(in) + VersionF.read(in) val relations = FormatTimer.time("read relations") { RelationsF.read(in) } val stamps = FormatTimer.time("read stamps") { StampsF.read(in) } val apis = FormatTimer.time("read apis") { APIsF.read(in) } @@ -80,27 +81,27 @@ object TextAnalysisFormat { } private[this] object VersionF { - val currentVersion = "2" + val currentVersion = "4" - def write(out: Writer) { - out.write("format version: %s\n".format(currentVersion)) - } + def write(out: Writer) { + out.write("format version: %s\n".format(currentVersion)) + } - private val versionPattern = """format version: (\w+)""".r - def read(in: BufferedReader) { - in.readLine() match { - case versionPattern(version) => validateVersion(version) - case s: String => throw new ReadException("\"format version: \"", s) - case null => throw new EOFException - } - } + private val versionPattern = """format version: (\w+)""".r + def read(in: BufferedReader) { + in.readLine() match { + case versionPattern(version) => validateVersion(version) + case s: String => throw new ReadException("\"format version: \"", s) + case null => throw new EOFException + } + } - def validateVersion(version: String) { - // TODO: Support backwards compatibility? - if (version != currentVersion) { - throw new ReadException("File uses format version %s, but we are compatible with version %s only.".format(version, currentVersion)) - } - } + def validateVersion(version: String) { + // TODO: Support backwards compatibility? + if (version != currentVersion) { + throw new ReadException("File uses format version %s, but we are compatible with version %s only.".format(version, currentVersion)) + } + } } private[this] object RelationsF { @@ -128,8 +129,8 @@ object TextAnalysisFormat { // We sort for ease of debugging and for more efficient reconstruction when reading. // Note that we don't share code with writeMap. Each is implemented more efficiently // than the shared code would be, and the difference is measurable on large analyses. - rel.forwardMap.toSeq.sortBy(_._1).foreach { case (k, vs) => - val kStr = k.toString + rel.forwardMap.toSeq.sortBy(_._1).foreach { case (k, vs) => + val kStr = k.toString vs.toSeq.sorted foreach { v => out.write(kStr); out.write(" -> "); out.write(v.toString); out.write("\n") } @@ -137,8 +138,8 @@ object TextAnalysisFormat { } val nameHashing = relations.nameHashing - writeRelation(Headers.srcProd, relations.srcProd) - writeRelation(Headers.binaryDep, relations.binaryDep) + writeRelation(Headers.srcProd, relations.srcProd) + writeRelation(Headers.binaryDep, relations.binaryDep) val direct = if (nameHashing) Relations.emptySource else relations.direct val publicInherited = if (nameHashing) @@ -160,7 +161,7 @@ object TextAnalysisFormat { writeRelation(Headers.inheritanceInternalDep, inheritance.internal) writeRelation(Headers.inheritanceExternalDep, inheritance.external) - writeRelation(Headers.classes, relations.classes) + writeRelation(Headers.classes, relations.classes) writeRelation(Headers.usedNames, names) } @@ -188,19 +189,19 @@ object TextAnalysisFormat { def readFileRelation(expectedHeader: String) = readRelation(expectedHeader, { new File(_) }) def readStringRelation(expectedHeader: String) = readRelation(expectedHeader, identity[String]) - val srcProd = readFileRelation(Headers.srcProd) - val binaryDep = readFileRelation(Headers.binaryDep) + val srcProd = readFileRelation(Headers.srcProd) + val binaryDep = readFileRelation(Headers.binaryDep) import sbt.inc.Relations.{Source, SourceDependencies, makeSourceDependencies, emptySource, makeSource, emptySourceDependencies} val directSrcDeps: Source = { - val internalSrcDep = readFileRelation(Headers.directSrcDep) - val externalDep = readStringRelation(Headers.directExternalDep) + val internalSrcDep = readFileRelation(Headers.directSrcDep) + val externalDep = readStringRelation(Headers.directExternalDep) makeSource(internalSrcDep, externalDep) } val publicInheritedSrcDeps: Source = { val internalSrcDepPI = readFileRelation(Headers.internalSrcDepPI) - val externalDepPI = readStringRelation(Headers.externalDepPI) + val externalDepPI = readStringRelation(Headers.externalDepPI) makeSource(internalSrcDepPI, externalDepPI) } val memberRefSrcDeps: SourceDependencies = { @@ -218,7 +219,7 @@ object TextAnalysisFormat { assert((directSrcDeps == emptySource) || (memberRefSrcDeps == emptySourceDependencies), "One mechanism is supported for tracking source dependencies at the time") val nameHashing = memberRefSrcDeps != emptySourceDependencies - val classes = readStringRelation(Headers.classes) + val classes = readStringRelation(Headers.classes) val names = readStringRelation(Headers.usedNames) if (nameHashing) @@ -250,9 +251,9 @@ object TextAnalysisFormat { def read(in: BufferedReader): Stamps = { def doReadMap[V](expectedHeader: String, s2v: String => V) = readMap(in)(expectedHeader, new File(_), s2v) - val products = doReadMap(Headers.products, Stamp.fromString) - val sources = doReadMap(Headers.sources, Stamp.fromString) - val binaries = doReadMap(Headers.binaries, Stamp.fromString) + val products = doReadMap(Headers.products, Stamp.fromString) + val sources = doReadMap(Headers.sources, Stamp.fromString) + val binaries = doReadMap(Headers.binaries, Stamp.fromString) val classNames = doReadMap(Headers.classNames, identity[String]) Stamps(products, sources, binaries, classNames) @@ -260,10 +261,10 @@ object TextAnalysisFormat { } private[this] object APIsF { - object Headers { - val internal = "internal apis" - val external = "external apis" - } + object Headers { + val internal = "internal apis" + val external = "external apis" + } val stringToSource = ObjectStringifier.stringToObj[Source] _ val sourceToString = ObjectStringifier.objToString[Source] _ @@ -286,9 +287,9 @@ object TextAnalysisFormat { } private[this] object SourceInfosF { - object Headers { - val infos = "source infos" - } + object Headers { + val infos = "source infos" + } val stringToSourceInfo = ObjectStringifier.stringToObj[SourceInfo] _ val sourceInfoToString = ObjectStringifier.objToString[SourceInfo] _ @@ -298,31 +299,79 @@ object TextAnalysisFormat { } private[this] object CompilationsF { - object Headers { - val compilations = "compilations" - } + object Headers { + val compilations = "compilations" + } val stringToCompilation = ObjectStringifier.stringToObj[Compilation] _ val compilationToString = ObjectStringifier.objToString[Compilation] _ def write(out: Writer, compilations: Compilations) { - def toMapEntry(x: (Compilation, Int)): (String, Compilation) = "%03d".format(x._2) -> x._1 - writeMap(out)(Headers.compilations, compilations.allCompilations.zipWithIndex.map(toMapEntry).toMap, compilationToString, inlineVals=false) + writeSeq(out)(Headers.compilations, compilations.allCompilations, compilationToString) } - def read(in: BufferedReader): Compilations = - Compilations.make(readMap(in)(Headers.compilations, identity[String], stringToCompilation).values.toSeq) + + def read(in: BufferedReader): Compilations = Compilations.make( + readSeq[Compilation](in)(Headers.compilations, stringToCompilation)) } private[this] object CompileSetupF { - object Headers { - val setup = "compile setup" - } + object Headers { + val outputMode = "output mode" + val outputDir = "output directories" + val compileOptions = "compile options" + val javacOptions = "javac options" + val compilerVersion = "compiler version" + val compileOrder = "compile order" + } - val stringToSetup = ObjectStringifier.stringToObj[CompileSetup] _ - val setupToString = ObjectStringifier.objToString[CompileSetup] _ + private[this] val singleOutputMode = "single" + private[this] val multipleOutputMode = "multiple" + private[this] val singleOutputKey = new File("output dir") - def write(out: Writer, setup: CompileSetup) { writeMap(out)(Headers.setup, Map("1" -> setup), setupToString, inlineVals=false)} - def read(in: BufferedReader): CompileSetup = readMap(in)(Headers.setup, identity[String], stringToSetup).head._2 + def write(out: Writer, setup: CompileSetup) { + val (mode, outputAsMap) = setup.output match { + case s: SingleOutput => (singleOutputMode, Map(singleOutputKey -> s.outputDirectory)) + case m: MultipleOutput => (multipleOutputMode, m.outputGroups.map(x => x.sourceDirectory -> x.outputDirectory).toMap) + } + + writeSeq(out)(Headers.outputMode, mode :: Nil, identity[String]) + writeMap(out)(Headers.outputDir, outputAsMap, { f: File => f.getPath }) + writeSeq(out)(Headers.compileOptions, setup.options.options, identity[String]) + writeSeq(out)(Headers.javacOptions, setup.options.javacOptions, identity[String]) + writeSeq(out)(Headers.compilerVersion, setup.compilerVersion :: Nil, identity[String]) + writeSeq(out)(Headers.compileOrder, setup.order.name :: Nil, identity[String]) + } + + def read(in: BufferedReader): CompileSetup = { + def s2f(s: String) = new File(s) + val outputDirMode = readSeq(in)(Headers.outputMode, identity[String]).headOption + val outputAsMap = readMap(in)(Headers.outputDir, s2f, s2f) + val compileOptions = readSeq(in)(Headers.compileOptions, identity[String]) + val javacOptions = readSeq(in)(Headers.javacOptions, identity[String]) + val compilerVersion = readSeq(in)(Headers.compilerVersion, identity[String]).head + val compileOrder = readSeq(in)(Headers.compileOrder, identity[String]).head + + val output = outputDirMode match { + case Some(s) => s match { + case `singleOutputMode` => new SingleOutput { + val outputDirectory = outputAsMap(singleOutputKey) + } + case `multipleOutputMode` => new MultipleOutput { + val outputGroups = outputAsMap.toArray.map { + case (src: File, out: File) => new MultipleOutput.OutputGroup { + val sourceDirectory = src + val outputDirectory = out + } + } + } + case str: String => throw new ReadException("Unrecognized output mode: " + str) + } + case None => throw new ReadException("No output mode specified") + } + + new CompileSetup(output, new CompileOptions(compileOptions, javacOptions), compilerVersion, + xsbti.compile.CompileOrder.valueOf(compileOrder)) + } } private[this] object ObjectStringifier { @@ -348,8 +397,8 @@ object TextAnalysisFormat { } private[this] def expectHeader(in: BufferedReader, expectedHeader: String) { - val header = in.readLine() - if (header != expectedHeader + ":") throw new ReadException(expectedHeader, if (header == null) "EOF" else header) + val header = in.readLine() + if (header != expectedHeader + ":") throw new ReadException(expectedHeader, if (header == null) "EOF" else header) } private[this] def writeSize(out: Writer, n: Int) { @@ -361,10 +410,23 @@ object TextAnalysisFormat { in.readLine() match { case itemsPattern(nStr) => Integer.parseInt(nStr) case s: String => throw new ReadException("\" items\"", s) - case null => throw new EOFException + case null => throw new EOFException } } + private[this] def writeSeq[T](out: Writer)(header: String, s: Seq[T], t2s: T => String) { + // We write sequences as idx -> element maps, for uniformity with maps/relations. + def n = s.length + val numDigits = if (n < 2) 1 else math.log10(n - 1).toInt + 1 + val fmtStr = "%%0%dd".format(numDigits) + // We only use this for relatively short seqs, so creating this extra map won't be a performance hit. + val m: Map[String, T] = s.zipWithIndex.map(x => fmtStr.format(x._2) -> x._1).toMap + writeMap(out)(header, m, t2s) + } + + private[this] def readSeq[T](in: BufferedReader)(expectedHeader: String, s2t: String => T): Seq[T] = + (readPairs(in)(expectedHeader, identity[String], s2t) map(_._2)).toSeq + private[this] def writeMap[K, V](out: Writer)(header: String, m: Map[K, V], v2s: V => String, inlineVals: Boolean=true)(implicit ord: Ordering[K]) { writeHeader(out, header) writeSize(out, m.size) @@ -379,7 +441,7 @@ object TextAnalysisFormat { private[this] def readPairs[K, V](in: BufferedReader)(expectedHeader: String, s2k: String => K, s2v: String => V): Traversable[(K, V)] = { def toPair(s: String): (K, V) = { - if (s == null) throw new EOFException + if (s == null) throw new EOFException val p = s.indexOf(" -> ") val k = s2k(s.substring(0, p)) // Pair is either "a -> b" or "a -> \nb". This saves us a lot of substring munging when b is a large blob. @@ -387,8 +449,8 @@ object TextAnalysisFormat { (k, v) } expectHeader(in, expectedHeader) - val n = readSize(in) - for (i <- 0 until n) yield toPair(in.readLine()) + val n = readSize(in) + for (i <- 0 until n) yield toPair(in.readLine()) } private[this] def readMap[K, V](in: BufferedReader)(expectedHeader: String, s2k: String => K, s2v: String => V): Map[K, V] = { From b9489c6035f32173f12fab043e297f8b42c549c9 Mon Sep 17 00:00:00 2001 From: Josh Suereth Date: Tue, 7 Jan 2014 21:42:24 -0500 Subject: [PATCH 021/148] Fix compilation failure due to missing type annotation. --- compile/persist/src/main/scala/sbt/inc/TextAnalysisFormat.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compile/persist/src/main/scala/sbt/inc/TextAnalysisFormat.scala b/compile/persist/src/main/scala/sbt/inc/TextAnalysisFormat.scala index 59e432493..a23a87725 100644 --- a/compile/persist/src/main/scala/sbt/inc/TextAnalysisFormat.scala +++ b/compile/persist/src/main/scala/sbt/inc/TextAnalysisFormat.scala @@ -357,7 +357,7 @@ object TextAnalysisFormat { val outputDirectory = outputAsMap(singleOutputKey) } case `multipleOutputMode` => new MultipleOutput { - val outputGroups = outputAsMap.toArray.map { + val outputGroups: Array[MultipleOutput.OutputGroup] = outputAsMap.toArray.map { case (src: File, out: File) => new MultipleOutput.OutputGroup { val sourceDirectory = src val outputDirectory = out From dcb327e6f4dc0e5bc096bba691d059b991513bf4 Mon Sep 17 00:00:00 2001 From: Grzegorz Kossakowski Date: Wed, 8 Jan 2014 22:50:54 +0100 Subject: [PATCH 022/148] Do not compute name hashes when name hashing is disabled We should compute name hashes only when name hashing is enabled. Otherwise, we just store an empty value for name hashes. --- compile/inc/src/main/scala/sbt/inc/Compile.scala | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/compile/inc/src/main/scala/sbt/inc/Compile.scala b/compile/inc/src/main/scala/sbt/inc/Compile.scala index edf714f06..925e8fd3d 100644 --- a/compile/inc/src/main/scala/sbt/inc/Compile.scala +++ b/compile/inc/src/main/scala/sbt/inc/Compile.scala @@ -146,10 +146,18 @@ private final class AnalysisCallback(internalMap: File => Option[File], external classToSource.put(module, source) } + // empty value used when name hashing algorithm is disabled + private val emptyNameHashes = new xsbti.api._internalOnly_NameHashes(Array.empty, Array.empty) + def api(sourceFile: File, source: SourceAPI) { import xsbt.api.{APIUtil, HashAPI} if (APIUtil.isScalaSourceName(sourceFile.getName) && APIUtil.hasMacro(source)) macroSources += sourceFile - publicNameHashes(sourceFile) = (new NameHashing).nameHashes(source) + publicNameHashes(sourceFile) = { + if (nameHashing) + (new NameHashing).nameHashes(source) + else + emptyNameHashes + } val shouldMinimize = !Incremental.apiDebug(options) val savedSource = if (shouldMinimize) APIUtil.minimize(source) else source apis(sourceFile) = (HashAPI(source), savedSource) From e308cc31c71309f96698b7e36abfebd7fbc8aaf5 Mon Sep 17 00:00:00 2001 From: sam Date: Mon, 6 Jan 2014 20:06:54 +0000 Subject: [PATCH 023/148] Added sbt-scoverage to list of plugins. --- src/sphinx/Community/Community-Plugins.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/src/sphinx/Community/Community-Plugins.rst b/src/sphinx/Community/Community-Plugins.rst index e7b0b6e5c..433943d6a 100644 --- a/src/sphinx/Community/Community-Plugins.rst +++ b/src/sphinx/Community/Community-Plugins.rst @@ -278,6 +278,7 @@ Code coverage plugins ~~~~~~~~~~~~~~~~~~~~~ - sbt-scct: https://github.com/dvc94ch/sbt-scct +- sbt-scoverage: https://github.com/scoverage/sbt-scoverage - jacoco4sbt: https://github.com/sbt/jacoco4sbt - xsbt-coveralls-plugin: https://github.com/theon/xsbt-coveralls-plugin From 0099b4ab551794c20bbbf43d65678a3eef6388a4 Mon Sep 17 00:00:00 2001 From: Jens Halm Date: Thu, 9 Jan 2014 00:52:15 +0100 Subject: [PATCH 024/148] add Laika documentation plugin to the list of community plugins --- src/sphinx/Community/Community-Plugins.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/sphinx/Community/Community-Plugins.rst b/src/sphinx/Community/Community-Plugins.rst index 433943d6a..ef0040e19 100644 --- a/src/sphinx/Community/Community-Plugins.rst +++ b/src/sphinx/Community/Community-Plugins.rst @@ -228,6 +228,8 @@ Documentation plugins Textile, to HTML): http://software.clapper.org/sbt-lwm/ - sbt-site (Site generation for SBT): https://github.com/sbt/sbt-site +- Laika (Template-based site generation, Markdown, reStructuredText, + no external tools): http://planet42.github.io/Laika/ - literator-plugin (Converts sources into markdown documents): https://github.com/laughedelic/literator From 9e7e93c632b42882daa79a603bbcb8f01a256638 Mon Sep 17 00:00:00 2001 From: Grzegorz Kossakowski Date: Mon, 13 Jan 2014 16:29:16 +0100 Subject: [PATCH 025/148] Bring back and deprecate `Incremental.incDebugProp`. The ae15eccd9c7aea2b4336ea454d974aed66d5ec16 accidentally removed `Incremental.incDebugProp` which broke Scala IDE build that relies on it. We bring back that val but at the same time we deprecate it because we have better mechanism for configuring incremental compiler now. I also added a little comment with the history of `incDebugProp` which explains proper migration path. --- compile/inc/src/main/scala/sbt/inc/Incremental.scala | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/compile/inc/src/main/scala/sbt/inc/Incremental.scala b/compile/inc/src/main/scala/sbt/inc/Incremental.scala index ee4352787..f64c284c5 100644 --- a/compile/inc/src/main/scala/sbt/inc/Incremental.scala +++ b/compile/inc/src/main/scala/sbt/inc/Incremental.scala @@ -40,6 +40,13 @@ object Incremental (!initialInv.isEmpty, analysis) } + // the name of system property that was meant to enable debugging mode of incremental compiler but + // it ended up being used just to enable debugging of relations. That's why if you migrate to new + // API for configuring incremental compiler (IncOptions) it's enough to control value of `relationsDebug` + // flag to achieve the same effect as using `incDebugProp`. + @deprecated("Use `IncOptions.relationsDebug` flag to enable debugging of relations.", "0.13.2") + val incDebugProp = "xsbt.inc.debug" + private[inc] val apiDebugProp = "xsbt.api.debug" private[inc] def apiDebug(options: IncOptions): Boolean = options.apiDebug || java.lang.Boolean.getBoolean(apiDebugProp) @@ -68,8 +75,7 @@ object Incremental private abstract class IncrementalCommon(log: Logger, options: IncOptions) { - val incDebugProp = "xsbt.inc.debug" - private def incDebug(options: IncOptions): Boolean = options.relationsDebug || java.lang.Boolean.getBoolean(incDebugProp) + private def incDebug(options: IncOptions): Boolean = options.relationsDebug || java.lang.Boolean.getBoolean(Incremental.incDebugProp) // setting the related system property to true will skip checking that the class name // still comes from the same classpath entry. This can workaround bugs in classpath construction, From 2abe7574df937278e4ae271a37c710288d71a2f9 Mon Sep 17 00:00:00 2001 From: Josh Suereth Date: Tue, 14 Jan 2014 08:09:55 -0500 Subject: [PATCH 026/148] Fixes to the internal API hooks for the sbt server. * Alter the TaskProgress listener key to be `State => TaskProgress` so it can be instantiated from the current server/sbt state. * Expose the xsbti.Reporter interface for compilation through to sbt builds. --- .../actions/src/main/scala/sbt/Compiler.scala | 19 ++++++++++------ main/src/main/scala/sbt/Aggregation.scala | 4 ++-- main/src/main/scala/sbt/Defaults.scala | 12 ++++++---- main/src/main/scala/sbt/EvaluateTask.scala | 22 ++++++++++++++----- main/src/main/scala/sbt/Keys.scala | 5 ++++- 5 files changed, 42 insertions(+), 20 deletions(-) diff --git a/main/actions/src/main/scala/sbt/Compiler.scala b/main/actions/src/main/scala/sbt/Compiler.scala index 0d0a33c49..459636f8e 100644 --- a/main/actions/src/main/scala/sbt/Compiler.scala +++ b/main/actions/src/main/scala/sbt/Compiler.scala @@ -58,16 +58,21 @@ object Compiler val provider = ComponentCompiler.interfaceProvider(componentManager) new AnalyzingCompiler(instance, provider, cpOptions, log) } - - def apply(in: Inputs, log: Logger): Analysis = + def apply(in: Inputs, log: Logger): Analysis = { - import in.compilers._ - import in.config._ - import in.incSetup._ - + import in.compilers._ + import in.config._ + import in.incSetup._ + apply(in, log, new LoggerReporter(maxErrors, log, sourcePositionMapper)) + } + def apply(in: Inputs, log: Logger, reporter: xsbti.Reporter): Analysis = + { + import in.compilers._ + import in.config._ + import in.incSetup._ val agg = new AggressiveCompile(cacheFile) agg(scalac, javac, sources, classpath, CompileOutput(classesDirectory), cache, None, options, javacOptions, - analysisMap, definesClass, new LoggerReporter(maxErrors, log, sourcePositionMapper), order, skip, incOptions)(log) + analysisMap, definesClass, reporter, order, skip, incOptions)(log) } private[sbt] def foldMappers[A](mappers: Seq[A => Option[A]]) = diff --git a/main/src/main/scala/sbt/Aggregation.scala b/main/src/main/scala/sbt/Aggregation.scala index 81a9d5494..57b7c8c4f 100644 --- a/main/src/main/scala/sbt/Aggregation.scala +++ b/main/src/main/scala/sbt/Aggregation.scala @@ -59,7 +59,7 @@ final object Aggregation import extracted.structure val toRun = ts map { case KeyValue(k,t) => t.map(v => KeyValue(k,v)) } join; val roots = ts map { case KeyValue(k,_) => k } - val config = extractedConfig(extracted, structure) + val config = extractedConfig(extracted, structure, s) val start = System.currentTimeMillis val (newS, result) = withStreams(structure, s){ str => @@ -211,4 +211,4 @@ final object Aggregation @deprecated("Use BuildUtil.aggregationRelation", "0.13.0") def relation(units: Map[URI, LoadedBuildUnit]): Relation[ProjectRef, ProjectRef] = BuildUtil.aggregationRelation(units) -} \ No newline at end of file +} diff --git a/main/src/main/scala/sbt/Defaults.scala b/main/src/main/scala/sbt/Defaults.scala index 8283b5b54..9310c44f7 100755 --- a/main/src/main/scala/sbt/Defaults.scala +++ b/main/src/main/scala/sbt/Defaults.scala @@ -732,13 +732,16 @@ object Defaults extends BuildCommon @deprecated("Use inTask(compile)(compileInputsSettings)", "0.13.0") def compileTaskSettings: Seq[Setting[_]] = inTask(compile)(compileInputsSettings) - def compileTask: Initialize[Task[inc.Analysis]] = Def.task { compileTaskImpl(streams.value, (compileInputs in compile).value) } - private[this] def compileTaskImpl(s: TaskStreams, ci: Compiler.Inputs): inc.Analysis = + def compileTask: Initialize[Task[inc.Analysis]] = Def.task { compileTaskImpl(streams.value, (compileInputs in compile).value, (compilerReporter in compile).value) } + private[this] def compileTaskImpl(s: TaskStreams, ci: Compiler.Inputs, reporter: Option[xsbti.Reporter]): inc.Analysis = { lazy val x = s.text(ExportStream) def onArgs(cs: Compiler.Compilers) = cs.copy(scalac = cs.scalac.onArgs(exported(x, "scalac")), javac = cs.javac.onArgs(exported(x, "javac"))) val i = ci.copy(compilers = onArgs(ci.compilers)) - try Compiler(i,s.log) + try reporter match { + case Some(reporter) => Compiler(i, s.log, reporter) + case None => Compiler(i, s.log) + } finally x.close() // workaround for #937 } def compileIncSetupTask = @@ -749,7 +752,8 @@ object Defaults extends BuildCommon Seq(compileInputs := { val cp = classDirectory.value +: data(dependencyClasspath.value) Compiler.inputs(cp, sources.value, classDirectory.value, scalacOptions.value, javacOptions.value, maxErrors.value, sourcePositionMappers.value, compileOrder.value)(compilers.value, compileIncSetup.value, streams.value.log) - }) + }, + compilerReporter := None) def printWarningsTask: Initialize[Task[Unit]] = (streams, compile, maxErrors, sourcePositionMappers) map { (s, analysis, max, spms) => diff --git a/main/src/main/scala/sbt/EvaluateTask.scala b/main/src/main/scala/sbt/EvaluateTask.scala index 66192feb2..4c8fe8756 100644 --- a/main/src/main/scala/sbt/EvaluateTask.scala +++ b/main/src/main/scala/sbt/EvaluateTask.scala @@ -44,18 +44,25 @@ object EvaluateTask def defaultConfig(state: State): EvaluateConfig = { val extracted = Project.extract(state) - defaultConfig(extracted, extracted.structure) + extractedConfig(extracted, extracted.structure, state) } @deprecated("Use extractedConfig.", "0.13.0") def defaultConfig(extracted: Extracted, structure: BuildStructure) = - EvaluateConfig(false, restrictions(extracted, structure), progress = executeProgress(extracted, structure)) + EvaluateConfig(false, restrictions(extracted, structure), progress = defaultProgress) + @deprecated("Use other extractedConfig", "0.13.2") def extractedConfig(extracted: Extracted, structure: BuildStructure): EvaluateConfig = { val workers = restrictions(extracted, structure) val canCancel = cancelable(extracted, structure) - val progress = executeProgress(extracted, structure) + EvaluateConfig(cancelable = canCancel, restrictions = workers, progress = defaultProgress) + } + def extractedConfig(extracted: Extracted, structure: BuildStructure, state: State): EvaluateConfig = + { + val workers = restrictions(extracted, structure) + val canCancel = cancelable(extracted, structure) + val progress = executeProgress(extracted, structure, state) EvaluateConfig(cancelable = canCancel, restrictions = workers, progress = progress) } @@ -78,8 +85,11 @@ object EvaluateTask def cancelable(extracted: Extracted, structure: BuildStructure): Boolean = getSetting(Keys.cancelable, false, extracted, structure) - private[sbt] def executeProgress(extracted: Extracted, structure: BuildStructure): ExecuteProgress[Task] = - getSetting(Keys.executeProgress, new Keys.TaskProgress(defaultProgress), extracted, structure).progress + private[sbt] def executeProgress(extracted: Extracted, structure: BuildStructure, state: State): ExecuteProgress[Task] = { + import Types.const + val maker: State => Keys.TaskProgress = getSetting(Keys.executeProgress, const(new Keys.TaskProgress(defaultProgress)), extracted, structure) + maker(state).progress + } def getSetting[T](key: SettingKey[T], default: T, extracted: Extracted, structure: BuildStructure): T = key in extracted.currentRef get structure.data getOrElse default @@ -94,7 +104,7 @@ object EvaluateTask { val root = ProjectRef(pluginDef.root, Load.getRootProject(pluginDef.units)(pluginDef.root)) val pluginKey = pluginData - val config = extractedConfig(Project.extract(state), pluginDef) + val config = extractedConfig(Project.extract(state), pluginDef, state) val evaluated = apply(pluginDef, ScopedKey(pluginKey.scope, pluginKey.key), state, root, config) val (newS, result) = evaluated getOrElse sys.error("Plugin data does not exist for plugin definition at " + pluginDef.root) Project.runUnloadHooks(newS) // discard states diff --git a/main/src/main/scala/sbt/Keys.scala b/main/src/main/scala/sbt/Keys.scala index c0ffdbd6d..17f64e0b3 100644 --- a/main/src/main/scala/sbt/Keys.scala +++ b/main/src/main/scala/sbt/Keys.scala @@ -344,8 +344,11 @@ object Keys // wrapper to work around SI-2915 private[sbt] final class TaskProgress(val progress: ExecuteProgress[Task]) - private[sbt] val executeProgress = SettingKey[TaskProgress]("executeProgress", "Experimental task execution listener.", DTask) + private[sbt] val executeProgress = SettingKey[State => TaskProgress]("executeProgress", "Experimental task execution listener.", DTask) + // Experimental in sbt 0.13.2 to enable grabing semantic compile failures. + private[sbt] val compilerReporter = TaskKey[Option[xsbti.Reporter]]("compilerReporter", "Experimental hook to listen (or send) compilation failure messages.", DTask) + val triggeredBy = Def.triggeredBy val runBefore = Def.runBefore From 0199a93ffbed2a89f8ce88be1ea04333c48320ec Mon Sep 17 00:00:00 2001 From: Dan Sanduleac Date: Fri, 17 Jan 2014 10:39:20 +0000 Subject: [PATCH 027/148] Expose overwrite flag in IvyActions.publish, and by default only overwrite if it's a snapshot --- ivy/src/main/scala/sbt/IvyActions.scala | 9 +++++---- main/src/main/scala/sbt/Defaults.scala | 10 +++++----- 2 files changed, 10 insertions(+), 9 deletions(-) diff --git a/ivy/src/main/scala/sbt/IvyActions.scala b/ivy/src/main/scala/sbt/IvyActions.scala index f3ac22c82..65c45a6a7 100644 --- a/ivy/src/main/scala/sbt/IvyActions.scala +++ b/ivy/src/main/scala/sbt/IvyActions.scala @@ -16,7 +16,8 @@ import core.resolve.ResolveOptions import plugins.resolver.{BasicResolver, DependencyResolver} final class DeliverConfiguration(val deliverIvyPattern: String, val status: String, val configurations: Option[Seq[Configuration]], val logging: UpdateLogging.Value) -final class PublishConfiguration(val ivyFile: Option[File], val resolverName: String, val artifacts: Map[Artifact, File], val checksums: Seq[String], val logging: UpdateLogging.Value) +final class PublishConfiguration(val ivyFile: Option[File], val resolverName: String, val artifacts: Map[Artifact, File], val checksums: Seq[String], val logging: UpdateLogging.Value, + val overwrite: Boolean = false) final class UpdateConfiguration(val retrieve: Option[RetrieveConfiguration], val missingOk: Boolean, val logging: UpdateLogging.Value) final class RetrieveConfiguration(val retrieveDirectory: File, val outputPattern: String) @@ -86,11 +87,11 @@ object IvyActions import configuration._ module.withModule(log) { case (ivy, md, default) => val resolver = ivy.getSettings.getResolver(resolverName) - if(resolver eq null) error("Undefined resolver '" + resolverName + "'") + if(resolver eq null) sys.error("Undefined resolver '" + resolverName + "'") val ivyArtifact = ivyFile map { file => (MDArtifact.newIvyArtifact(md), file) } val cross = crossVersionMap(module.moduleSettings) - val as = mapArtifacts(md, cross, artifacts) ++ ivyArtifact.toList - withChecksums(resolver, checksums) { publish(md, as, resolver, overwrite = true) } + val as = mapArtifacts(md, cross, artifacts) ++ ivyArtifact.toSeq + withChecksums(resolver, checksums) { publish(md, as, resolver, overwrite = overwrite) } } } private[this] def withChecksums[T](resolver: DependencyResolver, checksums: Seq[String])(act: => T): T = diff --git a/main/src/main/scala/sbt/Defaults.scala b/main/src/main/scala/sbt/Defaults.scala index 9310c44f7..0a5595d57 100755 --- a/main/src/main/scala/sbt/Defaults.scala +++ b/main/src/main/scala/sbt/Defaults.scala @@ -1043,9 +1043,9 @@ object Classpaths makePomConfiguration := new MakePomConfiguration(artifactPath in makePom value, projectInfo.value, None, pomExtra.value, pomPostProcess.value, pomIncludeRepository.value, pomAllRepositories.value), deliverLocalConfiguration := deliverConfig(crossTarget.value, status = if (isSnapshot.value) "integration" else "release", logging = ivyLoggingLevel.value ), deliverConfiguration <<= deliverLocalConfiguration, - publishConfiguration := publishConfig(packagedArtifacts.in(publish).value, if(publishMavenStyle.value) None else Some(deliver.value), resolverName = getPublishTo(publishTo.value).name, checksums = checksums.in(publish).value, logging = ivyLoggingLevel.value), - publishLocalConfiguration := publishConfig(packagedArtifacts.in(publishLocal).value, Some(deliverLocal.value), checksums.in(publishLocal).value, logging = ivyLoggingLevel.value ), - publishM2Configuration := publishConfig(packagedArtifacts.in(publishM2).value, None, resolverName = Resolver.publishMavenLocal.name, checksums = checksums.in(publishM2).value, logging = ivyLoggingLevel.value), + publishConfiguration := publishConfig(packagedArtifacts.in(publish).value, if(publishMavenStyle.value) None else Some(deliver.value), resolverName = getPublishTo(publishTo.value).name, checksums = checksums.in(publish).value, logging = ivyLoggingLevel.value, overwrite = isSnapshot.value), + publishLocalConfiguration := publishConfig(packagedArtifacts.in(publishLocal).value, Some(deliverLocal.value), checksums.in(publishLocal).value, logging = ivyLoggingLevel.value, overwrite = isSnapshot.value), + publishM2Configuration := publishConfig(packagedArtifacts.in(publishM2).value, None, resolverName = Resolver.publishMavenLocal.name, checksums = checksums.in(publishM2).value, logging = ivyLoggingLevel.value, overwrite = isSnapshot.value), ivySbt <<= ivySbt0, ivyModule := { val is = ivySbt.value; new is.Module(moduleSettings.value) }, transitiveUpdate <<= transitiveUpdateTask, @@ -1213,8 +1213,8 @@ object Classpaths def deliverConfig(outputDirectory: File, status: String = "release", logging: UpdateLogging.Value = UpdateLogging.DownloadOnly) = new DeliverConfiguration(deliverPattern(outputDirectory), status, None, logging) - def publishConfig(artifacts: Map[Artifact, File], ivyFile: Option[File], checksums: Seq[String], resolverName: String = "local", logging: UpdateLogging.Value = UpdateLogging.DownloadOnly) = - new PublishConfiguration(ivyFile, resolverName, artifacts, checksums, logging) + def publishConfig(artifacts: Map[Artifact, File], ivyFile: Option[File], checksums: Seq[String], resolverName: String = "local", logging: UpdateLogging.Value = UpdateLogging.DownloadOnly, overwrite: Boolean = false) = + new PublishConfiguration(ivyFile, resolverName, artifacts, checksums, logging, overwrite) def deliverPattern(outputPath: File): String = (outputPath / "[artifact]-[revision](-[classifier]).[ext]").absolutePath From 034eb26af9a91074b4c54030678a3746d6819a41 Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Tue, 20 Aug 2013 12:02:15 -0700 Subject: [PATCH 028/148] Scala 2.11 modules now under org.scala-lang.modules Using 1.0-RC2 for scala-xml, 1.0-RC1 for scala-parser-combinators. --- project/Util.scala | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/project/Util.scala b/project/Util.scala index a5b0cbb4f..4d79d78a7 100644 --- a/project/Util.scala +++ b/project/Util.scala @@ -174,11 +174,13 @@ object Common lazy val sbinary = libraryDependencies <+= Util.nightly211(n => "org.scala-tools.sbinary" % "sbinary" % "0.4.2" cross(if(n) CrossVersion.full else CrossVersion.binary)) lazy val scalaCompiler = libraryDependencies <+= scalaVersion("org.scala-lang" % "scala-compiler" % _ ) lazy val testInterface = lib("org.scala-sbt" % "test-interface" % "1.0") - def libModular(name: String) = libraryDependencies <++= (scalaVersion, scalaOrganization)( (sv,o) => - if(sv.startsWith("2.11.")) (o % name % sv) :: Nil else Nil - ) - lazy val scalaXml = libModular("scala-xml") - lazy val scalaParsers = libModular("scala-parser-combinators") + private def scala211Module(name: String, moduleVersion: String) = + libraryDependencies <++= (scalaVersion)( scalaVersion => + if (scalaVersion startsWith "2.11.") ("org.scala-lang.modules" %% name % moduleVersion) :: Nil + else Nil + ) + lazy val scalaXml = scala211Module("scala-xml", "1.0-RC2") + lazy val scalaParsers = scala211Module("scala-parser-combinators", "1.0-RC1") } object Licensed { From ff0fd6eec658502f276f89c46f4aba0e0e268ddc Mon Sep 17 00:00:00 2001 From: Grzegorz Kossakowski Date: Sat, 18 Jan 2014 01:09:24 +0100 Subject: [PATCH 029/148] Update dependencies to Scala 2.11.0-M7. I had to sprinkle some excludes due to dependency of scala-compiler M7 on M6 modules. Those excludes won't be needed by the time M8 is out. --- project/Sbt.scala | 4 ++-- project/Util.scala | 7 ++++--- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/project/Sbt.scala b/project/Sbt.scala index 83a85259e..800ef4a5d 100644 --- a/project/Sbt.scala +++ b/project/Sbt.scala @@ -276,7 +276,7 @@ object Sbt extends Build artifact in (Compile, packageSrc) := Artifact(srcID).copy(configurations = Compile :: Nil).extra("e:component" -> srcID) ) def compilerSettings = Seq( - libraryDependencies <+= scalaVersion( "org.scala-lang" % "scala-compiler" % _ % "test"), + libraryDependencies <+= scalaVersion( "org.scala-lang" % "scala-compiler" % _ % "test" excludeAll(ExclusionRule(organization = "org.scala-lang.modules"))), unmanagedJars in Test <<= (packageSrc in compileInterfaceSub in Compile).map(x => Seq(x).classpath) ) def precompiled(scalav: String): Project = baseProject(compilePath / "interface", "Precompiled " + scalav.replace('.', '_')) dependsOn(interfaceSub) settings(precompiledSettings : _*) settings( @@ -290,6 +290,6 @@ object Sbt extends Build sources in Test := Nil ) def ioSettings: Seq[Setting[_]] = Seq( - libraryDependencies <+= scalaVersion("org.scala-lang" % "scala-compiler" % _ % "test") + libraryDependencies <+= scalaVersion("org.scala-lang" % "scala-compiler" % _ % "test" excludeAll(ExclusionRule(organization = "org.scala-lang.modules"))) ) } diff --git a/project/Util.scala b/project/Util.scala index 4d79d78a7..61c58ab54 100644 --- a/project/Util.scala +++ b/project/Util.scala @@ -172,15 +172,16 @@ object Common lazy val httpclient = lib("commons-httpclient" % "commons-httpclient" % "3.1") lazy val jsch = lib("com.jcraft" % "jsch" % "0.1.46" intransitive() ) lazy val sbinary = libraryDependencies <+= Util.nightly211(n => "org.scala-tools.sbinary" % "sbinary" % "0.4.2" cross(if(n) CrossVersion.full else CrossVersion.binary)) - lazy val scalaCompiler = libraryDependencies <+= scalaVersion("org.scala-lang" % "scala-compiler" % _ ) + lazy val scalaCompiler = libraryDependencies <+= scalaVersion( + sv => "org.scala-lang" % "scala-compiler" % sv excludeAll(ExclusionRule(organization = "org.scala-lang.modules"))) lazy val testInterface = lib("org.scala-sbt" % "test-interface" % "1.0") private def scala211Module(name: String, moduleVersion: String) = libraryDependencies <++= (scalaVersion)( scalaVersion => if (scalaVersion startsWith "2.11.") ("org.scala-lang.modules" %% name % moduleVersion) :: Nil else Nil ) - lazy val scalaXml = scala211Module("scala-xml", "1.0-RC2") - lazy val scalaParsers = scala211Module("scala-parser-combinators", "1.0-RC1") + lazy val scalaXml = scala211Module("scala-xml", "1.0.0-RC7") + lazy val scalaParsers = scala211Module("scala-parser-combinators", "1.0.0-RC5") } object Licensed { From 45cc2fd111b4c54e8fef4181c5c0d2cf6dc1edd7 Mon Sep 17 00:00:00 2001 From: "Taro L. Saito" Date: Tue, 21 Jan 2014 22:15:11 +0900 Subject: [PATCH 030/148] Add link to sbt-sonatype plugin --- src/sphinx/Community/Community-Plugins.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/sphinx/Community/Community-Plugins.rst b/src/sphinx/Community/Community-Plugins.rst index ef0040e19..8886f7ece 100644 --- a/src/sphinx/Community/Community-Plugins.rst +++ b/src/sphinx/Community/Community-Plugins.rst @@ -158,6 +158,8 @@ Release plugins https://github.com/sbt/sbt-start-script - sbt-native-packager: https://github.com/sbt/sbt-native-packager +- sbt-sonatype-plugin (releases to Sonatype Nexus repository) + https://github.com/xerial/sbt-sonatype System plugins ~~~~~~~~~~~~~~ From ba4d57b12055e0180dec1d3ee9c56af414b640e3 Mon Sep 17 00:00:00 2001 From: Dan Sanduleac Date: Thu, 23 Jan 2014 12:07:37 +0000 Subject: [PATCH 031/148] Fix binary compatibility with 0.13 --- ivy/src/main/scala/sbt/IvyActions.scala | 5 ++++- main/src/main/scala/sbt/Defaults.scala | 5 ++++- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/ivy/src/main/scala/sbt/IvyActions.scala b/ivy/src/main/scala/sbt/IvyActions.scala index 65c45a6a7..0ad3376d3 100644 --- a/ivy/src/main/scala/sbt/IvyActions.scala +++ b/ivy/src/main/scala/sbt/IvyActions.scala @@ -17,7 +17,10 @@ import plugins.resolver.{BasicResolver, DependencyResolver} final class DeliverConfiguration(val deliverIvyPattern: String, val status: String, val configurations: Option[Seq[Configuration]], val logging: UpdateLogging.Value) final class PublishConfiguration(val ivyFile: Option[File], val resolverName: String, val artifacts: Map[Artifact, File], val checksums: Seq[String], val logging: UpdateLogging.Value, - val overwrite: Boolean = false) + val overwrite: Boolean) { + def this(ivyFile: Option[File], resolverName: String, artifacts: Map[Artifact, File], checksums: Seq[String], logging: UpdateLogging.Value) = + this(ivyFile, resolverName, artifacts, checksums, logging, false) +} final class UpdateConfiguration(val retrieve: Option[RetrieveConfiguration], val missingOk: Boolean, val logging: UpdateLogging.Value) final class RetrieveConfiguration(val retrieveDirectory: File, val outputPattern: String) diff --git a/main/src/main/scala/sbt/Defaults.scala b/main/src/main/scala/sbt/Defaults.scala index 0a5595d57..465578606 100755 --- a/main/src/main/scala/sbt/Defaults.scala +++ b/main/src/main/scala/sbt/Defaults.scala @@ -1213,8 +1213,11 @@ object Classpaths def deliverConfig(outputDirectory: File, status: String = "release", logging: UpdateLogging.Value = UpdateLogging.DownloadOnly) = new DeliverConfiguration(deliverPattern(outputDirectory), status, None, logging) + def publishConfig(artifacts: Map[Artifact, File], ivyFile: Option[File], checksums: Seq[String], resolverName: String, logging: UpdateLogging.Value): PublishConfiguration = + publishConfig(artifacts, ivyFile, checksums, resolverName, logging, overwrite = false) def publishConfig(artifacts: Map[Artifact, File], ivyFile: Option[File], checksums: Seq[String], resolverName: String = "local", logging: UpdateLogging.Value = UpdateLogging.DownloadOnly, overwrite: Boolean = false) = - new PublishConfiguration(ivyFile, resolverName, artifacts, checksums, logging, overwrite) + new PublishConfiguration(ivyFile, resolverName, artifacts, checksums, logging, overwrite) + def deliverPattern(outputPath: File): String = (outputPath / "[artifact]-[revision](-[classifier]).[ext]").absolutePath From 322f6de6551665cade7d56b532348ea5dc3d54db Mon Sep 17 00:00:00 2001 From: Mark Harrah Date: Fri, 3 Jan 2014 19:32:18 -0500 Subject: [PATCH 032/148] Logic system supporting auto plugins and initial outline of AutoPlugin and Natures types. * Not integrated into project loading * Doesn't yet check that negation is acyclic before execution --- main/src/main/scala/sbt/AutoPlugin.scala | 128 ++++++++ project/Sbt.scala | 4 +- util/collection/src/main/scala/sbt/Dag.scala | 19 +- .../src/main/scala/sbt/logic/Logic.scala | 297 ++++++++++++++++++ .../logic/src/test/scala/sbt/logic/Test.scala | 84 +++++ .../src/main/scala/sbt/Relation.scala | 8 +- 6 files changed, 526 insertions(+), 14 deletions(-) create mode 100644 main/src/main/scala/sbt/AutoPlugin.scala create mode 100644 util/logic/src/main/scala/sbt/logic/Logic.scala create mode 100644 util/logic/src/test/scala/sbt/logic/Test.scala diff --git a/main/src/main/scala/sbt/AutoPlugin.scala b/main/src/main/scala/sbt/AutoPlugin.scala new file mode 100644 index 000000000..cbf8d8bc5 --- /dev/null +++ b/main/src/main/scala/sbt/AutoPlugin.scala @@ -0,0 +1,128 @@ +package sbt + + import logic.{Atom, Clause, Clauses, Formula, Literal, Logic} + import Def.Setting + import Natures._ + +/** +An AutoPlugin defines a group of settings and the conditions that the settings are automatically added to a build (called "activation"). +The `select` method defines the conditions, + `provides` defines an identifier for the AutoPlugin, + and the a method like `projectSettings` defines the settings to add. + +Steps for plugin authors: +1. Determine the natures that, when present (or absent), activate the AutoPlugin. +2. Determine the settings/configurations to automatically inject when activated. +3. Define a new, unique identifying [[Nature]] (which is a wrapper around a String ID). + +For example, the following will automatically add the settings in `projectSettings` + to a project that has both the `Web` and `Javascript` natures enabled. It will itself + define the `MyStuff` nature. This nature can be explicitly disabled by the user to + prevent the plugin from activating. + + object MyPlugin extends AutoPlugin { + def select = Web && Javascript + def provides = MyStuff + def projectSettings = Seq(...) + } + +Steps for users: +1. add dependencies on plugins as usual with addSbtPlugin +2. add Natures to Projects, which will automatically select the plugin settings to add for those Projects. + +For example, given natures Web and Javascript (perhaps provided by plugins added with addSbtPlugin), + + .natures( Web && Javascript ) + +will activate `MyPlugin` defined above and have its settings automatically added. If the user instead defines + + .natures( Web && Javascript && !MyStuff) + +then the `MyPlugin` settings (and anything that activates when `MyStuff` is activated) will not be added. +*/ +abstract class AutoPlugin +{ + /** This AutoPlugin will be activated for a project when the [[Natures]] matcher returned by this method matches that project's natures + * AND the user does not explicitly exclude the Nature returned by `provides`. + * + * For example, if this method returns `Web && Javascript`, this plugin instance will only be added + * if the `Web` and `Javascript` natures are enabled. */ + def select: Natures + + /** The unique [[Nature]] for this AutoPlugin instance. This has two purposes: + * 1. The user can explicitly disable this AutoPlugin. + * 2. Other plugins can activate based on whether this AutoPlugin was activated. + */ + def provides: Nature + + /** The [[Configuration]]s to add to each project that activates this AutoPlugin.*/ + def projectConfigurations: Seq[Configuration] = Nil + + /** The [[Setting]]s to add in the scope of each project that activates this AutoPlugin. */ + def projectSettings: Seq[Setting[_]] = Nil + + /** The [[Setting]]s to add to the build scope for each project that activates this AutoPlugin. + * The settings returned here are guaranteed to be added to a given build scope only once + * regardless of how many projects for that build activate this AutoPlugin. */ + def buildSettings: Seq[Setting[_]] = Nil + + /** The [[Setting]]s to add to the global scope exactly once if any project activates this AutoPlugin. */ + def globalSettings: Seq[Setting[_]] = Nil + + // TODO?: def commands: Seq[Command] +} + +/** An expression that matches `Nature`s. */ +sealed trait Natures { + def && (o: Basic): Natures +} + +/** Represents a feature or conceptual group of settings. +* `label` is the unique ID for this nature. */ +final case class Nature(label: String) extends Basic { + /** Constructs a Natures matcher that excludes this Nature. */ + def unary_! : Basic = Exclude(this) +} + +object Natures +{ + // TODO: allow multiple AutoPlugins to provide the same Nature? + // TODO: translate error messages + /** Select the AutoPlugins to include according to the user-specified natures in `requested` and all discovered AutoPlugins in `defined`.*/ + def evaluate(requested: Natures, defined: List[AutoPlugin]): Seq[AutoPlugin] = + { + val byAtom = defined.map(x => (Atom(x.provides.label), x)).toMap + val clauses = Clauses( defined.map(d => asClause(d)) ) + val results = Logic.reduce(clauses, flatten(requested).toSet) + results.ordered.map(byAtom) + } + + /** An included or excluded Nature. TODO: better name than Basic. */ + sealed abstract class Basic extends Natures { + def &&(o: Basic): Natures = And(this :: o :: Nil) + } + private[sbt] final case class Exclude(n: Nature) extends Basic { + def unary_! : Nature = n + } + private[sbt] final case class And(natures: List[Basic]) extends Natures { + def &&(o: Basic): Natures = And(o :: natures) + } + + private[sbt] def asClause(ap: AutoPlugin): Clause = + Clause( convert(ap.select), Set(Atom(ap.provides.label)) ) + + private[this] def flatten(n: Natures): Seq[Literal] = n match { + case And(ns) => convertAll(ns) + case b: Basic => convertBasic(b) :: Nil + } + + private[this] def convert(n: Natures): Formula = n match { + case And(ns) => convertAll(ns).reduce[Formula](_ && _) + case b: Basic => convertBasic(b) + } + private[this] def convertBasic(b: Basic): Literal = b match { + case Exclude(n) => !convertBasic(n) + case Nature(s) => Atom(s) + } + private[this] def convertAll(ns: Seq[Basic]): Seq[Literal] = ns map convertBasic +} \ No newline at end of file diff --git a/project/Sbt.scala b/project/Sbt.scala index 800ef4a5d..e6fdf84ca 100644 --- a/project/Sbt.scala +++ b/project/Sbt.scala @@ -73,6 +73,8 @@ object Sbt extends Build lazy val datatypeSub = baseProject(utilPath /"datatype", "Datatype Generator") dependsOn(ioSub) // cross versioning lazy val crossSub = baseProject(utilPath / "cross", "Cross") settings(inConfig(Compile)(Transform.crossGenSettings): _*) + // A monotonic logic that includes restricted negation as failure + lazy val logicSub = baseProject(utilPath / "logic", "Logic").dependsOn(collectionSub, relationSub) /* **** Intermediate-level Modules **** */ @@ -130,7 +132,7 @@ object Sbt extends Build completeSub, classpathSub, stdTaskSub, processSub) settings( sbinary ) // The main integration project for sbt. It brings all of the subsystems together, configures them, and provides for overriding conventions. - lazy val mainSub = testedBaseProject(mainPath, "Main") dependsOn(actionsSub, mainSettingsSub, interfaceSub, ioSub, ivySub, launchInterfaceSub, logSub, processSub, runSub, commandSub) settings(scalaXml) + lazy val mainSub = testedBaseProject(mainPath, "Main") dependsOn(actionsSub, mainSettingsSub, interfaceSub, ioSub, ivySub, launchInterfaceSub, logSub, logicSub, processSub, runSub, commandSub) settings(scalaXml) // Strictly for bringing implicits and aliases from subsystems into the top-level sbt namespace through a single package object // technically, we need a dependency on all of mainSub's dependencies, but we don't do that since this is strictly an integration project diff --git a/util/collection/src/main/scala/sbt/Dag.scala b/util/collection/src/main/scala/sbt/Dag.scala index 4250b0f10..ef8f9cec1 100644 --- a/util/collection/src/main/scala/sbt/Dag.scala +++ b/util/collection/src/main/scala/sbt/Dag.scala @@ -15,7 +15,7 @@ object Dag import JavaConverters.asScalaSetConverter def topologicalSort[T](root: T)(dependencies: T => Iterable[T]): List[T] = topologicalSort(root :: Nil)(dependencies) - + def topologicalSort[T](nodes: Iterable[T])(dependencies: T => Iterable[T]): List[T] = { val discovered = new mutable.HashSet[T] @@ -24,7 +24,7 @@ object Dag def visitAll(nodes: Iterable[T]) = nodes foreach visit def visit(node : T){ if (!discovered(node)) { - discovered(node) = true; + discovered(node) = true; try { visitAll(dependencies(node)); } catch { case c: Cyclic => throw node :: c } finished += node; } @@ -33,11 +33,13 @@ object Dag } visitAll(nodes); - + finished.toList; } // doesn't check for cycles - def topologicalSortUnchecked[T](node: T)(dependencies: T => Iterable[T]): List[T] = + def topologicalSortUnchecked[T](node: T)(dependencies: T => Iterable[T]): List[T] = topologicalSortUnchecked(node :: Nil)(dependencies) + + def topologicalSortUnchecked[T](nodes: Iterable[T])(dependencies: T => Iterable[T]): List[T] = { val discovered = new mutable.HashSet[T] var finished: List[T] = Nil @@ -45,23 +47,23 @@ object Dag def visitAll(nodes: Iterable[T]) = nodes foreach visit def visit(node : T){ if (!discovered(node)) { - discovered(node) = true; + discovered(node) = true; visitAll(dependencies(node)) finished ::= node; } } - visit(node); + visitAll(nodes); finished; } final class Cyclic(val value: Any, val all: List[Any], val complete: Boolean) extends Exception( "Cyclic reference involving " + - (if(complete) all.mkString("\n ", "\n ", "") else value) + (if(complete) all.mkString("\n ", "\n ", "") else value) ) { def this(value: Any) = this(value, value :: Nil, false) override def toString = getMessage - def ::(a: Any): Cyclic = + def ::(a: Any): Cyclic = if(complete) this else if(a == value) @@ -70,4 +72,3 @@ object Dag new Cyclic(value, a :: all, false) } } - diff --git a/util/logic/src/main/scala/sbt/logic/Logic.scala b/util/logic/src/main/scala/sbt/logic/Logic.scala new file mode 100644 index 000000000..8d02b2ab9 --- /dev/null +++ b/util/logic/src/main/scala/sbt/logic/Logic.scala @@ -0,0 +1,297 @@ +package sbt +package logic + + import scala.annotation.tailrec + import Formula.{And, True} + +/* +Defines a propositional logic with negation as failure and only allows stratified rule sets (negation must be acyclic) in order to have a unique minimal model. + +For example, this is not allowed: + + p :- not q + + q :- not p +but this is: + + p :- q + + q :- p +as is this: + + p :- q + + q := not r + + + Some useful links: + + https://en.wikipedia.org/wiki/Nonmonotonic_logic + + https://en.wikipedia.org/wiki/Negation_as_failure + + https://en.wikipedia.org/wiki/Propositional_logic + + https://en.wikipedia.org/wiki/Stable_model_semantics + + http://www.w3.org/2005/rules/wg/wiki/negation +*/ + + +/** Disjunction (or) of the list of clauses. */ +final case class Clauses(clauses: List[Clause]) { + assert(clauses.nonEmpty, "At least one clause is required.") +} + +/** When the `body` Formula succeeds, atoms in `head` are true. */ +final case class Clause(body: Formula, head: Set[Atom]) + +/** A literal is an [[Atom]] or its [[negation|Negated]]. */ +sealed abstract class Literal extends Formula { + /** The underlying (positive) atom. */ + def atom: Atom + /** Negates this literal.*/ + def unary_! : Literal +} +/** A variable with name `label`. */ +final case class Atom(label: String) extends Literal { + def atom = this + def unary_! : Negated = Negated(this) +} +/** A negated atom, in the sense of negation as failure, not logical negation. +* That is, it is true if `atom` is not known/defined. */ +final case class Negated(atom: Atom) extends Literal { + def unary_! : Atom = atom +} + +/** A formula consists of variables, negation, and conjunction (and). +* (Disjunction is not currently included- it is modeled at the level of a sequence of clauses. +* This is less convenient when defining clauses, but is not less powerful.) */ +sealed abstract class Formula { + /** Constructs a clause that proves `atoms` when this formula is true. */ + def proves(atom: Atom, atoms: Atom*): Clause = Clause(this, (atom +: atoms).toSet) + + /** Constructs a formula that is true iff this formula and `f` are both true.*/ + def && (f: Formula): Formula = (this, f) match { + case (True, x) => x + case (x, True) => x + case (And(as), And(bs)) => And(as ++ bs) + case (And(as), b: Literal) => And(as + b) + case (a: Literal, And(bs)) => And(bs + a) + case (a: Literal, b: Literal) => And( Set(a,b) ) + } +} + + +object Formula { + /** A conjunction of literals. */ + final case class And(literals: Set[Literal]) extends Formula { + assert(literals.nonEmpty, "'And' requires at least one literal.") + } + final case object True extends Formula +} + +object Logic +{ + def reduceAll(clauses: List[Clause], initialFacts: Set[Literal]): Matched = reduce(Clauses(clauses), initialFacts) + + /** Computes the variables in the unique stable model for the program represented by `clauses` and `initialFacts`. + * `clause` may not have any negative feedback (that is, negation is acyclic) + * and `initialFacts` cannot be in the head of any clauses in `clause`. + * These restrictions ensure that the logic program has a unique minimal model. */ + def reduce(clauses: Clauses, initialFacts: Set[Literal]): Matched = + { + val (posSeq, negSeq) = separate(initialFacts.toSeq) + val (pos, neg) = (posSeq.toSet, negSeq.toSet) + + checkContradictions(pos, neg) + checkOverlap(clauses, pos) + checkAcyclic(clauses) + + reduce0(clauses, initialFacts, Matched.empty) + } + + + /** Verifies `initialFacts` are not in the head of any `clauses`. + * This avoids the situation where an atom is proved but no clauses prove it. + * This isn't necessarily a problem, but the main sbt use cases expects + * a proven atom to have at least one clause satisfied. */ + def checkOverlap(clauses: Clauses, initialFacts: Set[Atom]) { + val as = atoms(clauses) + val initialOverlap = initialFacts.filter(as.inHead) + if(initialOverlap.nonEmpty) throw new InitialOverlap(initialOverlap) + } + + private[this] def checkContradictions(pos: Set[Atom], neg: Set[Atom]) { + val contradictions = pos intersect neg + if(contradictions.nonEmpty) throw new InitialContradictions(contradictions) + } + + def checkAcyclic(clauses: Clauses) { + // TODO + } + + final class InitialContradictions(val literals: Set[Atom]) extends RuntimeException("Initial facts cannot be both true and false:\n\t" + literals.mkString("\n\t")) + final class InitialOverlap(val literals: Set[Atom]) extends RuntimeException("Initial positive facts cannot be implied by any clauses:\n\t" + literals.mkString("\n\t")) + final class CyclicNegation(val cycle: List[Atom]) extends RuntimeException("Negation may not be involved in a cycle:\n\t" + cycle.mkString("\n\t")) + + /** Tracks proven atoms in the reverse order they were proved. */ + final class Matched private(val provenSet: Set[Atom], reverseOrdered: List[Atom]) { + def add(atoms: Set[Atom]): Matched = add(atoms.toList) + def add(atoms: List[Atom]): Matched = { + val newOnly = atoms.filterNot(provenSet) + new Matched(provenSet ++ newOnly, newOnly ::: reverseOrdered) + } + def ordered: List[Atom] = reverseOrdered.reverse + override def toString = ordered.map(_.label).mkString("Matched(", ",", ")") + } + object Matched { + val empty = new Matched(Set.empty, Nil) + } + + /** Separates a sequence of literals into `(pos, neg)` atom sequences. */ + private[this] def separate(lits: Seq[Literal]): (Seq[Atom], Seq[Atom]) = Util.separate(lits) { + case a: Atom => Left(a) + case Negated(n) => Right(n) + } + + /** Finds clauses that have no body and thus prove their head. + * Returns `(, )`. */ + private[this] def findProven(c: Clauses): (Set[Atom], List[Clause]) = + { + val (proven, unproven) = c.clauses.partition(_.body == True) + (proven.flatMap(_.head).toSet, unproven) + } + private[this] def keepPositive(lits: Set[Literal]): Set[Atom] = + lits.collect{ case a: Atom => a}.toSet + + // precondition: factsToProcess contains no contradictions + @tailrec + private[this] def reduce0(clauses: Clauses, factsToProcess: Set[Literal], state: Matched): Matched = + applyAll(clauses, factsToProcess) match { + case None => // all of the remaining clauses failed on the new facts + state + case Some(applied) => + val (proven, unprovenClauses) = findProven(applied) + val processedFacts = state add keepPositive(factsToProcess) + val newlyProven = proven -- processedFacts.provenSet + val newState = processedFacts add newlyProven + if(unprovenClauses.isEmpty) + newState // no remaining clauses, done. + else { + val unproven = Clauses(unprovenClauses) + val nextFacts: Set[Literal] = if(newlyProven.nonEmpty) newlyProven.toSet else inferFailure(unproven) + reduce0(unproven, nextFacts, newState) + } + } + + /** Finds negated atoms under the negation as failure rule and returns them. + * This should be called only after there are no more known atoms to be substituted. */ + private[this] def inferFailure(clauses: Clauses): Set[Literal] = + { + /* At this point, there is at least one clause and one of the following is the case as the result of the acyclic negation rule: + i. there is at least one variable that occurs in a clause body but not in the head of a clause + ii. there is at least one variable that occurs in the head of a clause and does not transitively depend on a negated variable + In either case, each such variable x cannot be proven true and therefore proves 'not x' (negation as failure, !x in the code). + */ + val allAtoms = atoms(clauses) + val newFacts: Set[Literal] = negated(allAtoms.triviallyFalse) + if(newFacts.nonEmpty) + newFacts + else { + val possiblyTrue = hasNegatedDependency(clauses.clauses, Relation.empty, Relation.empty) + val newlyFalse: Set[Literal] = negated(allAtoms.inHead -- possiblyTrue) + if(newlyFalse.nonEmpty) + newlyFalse + else // should never happen due to the acyclic negation rule + error(s"No progress:\n\tclauses: $clauses\n\tpossibly true: $possiblyTrue") + } + } + + private[this] def negated(atoms: Set[Atom]): Set[Literal] = atoms.map(a => Negated(a)) + + /** Computes the set of atoms in `clauses` that directly or transitively take a negated atom as input. + * For example, for the following clauses, this method would return `List(a, d)` : + * a :- b, not c + * d :- a + */ + @tailrec + def hasNegatedDependency(clauses: Seq[Clause], posDeps: Relation[Atom, Atom], negDeps: Relation[Atom, Atom]): List[Atom] = + clauses match { + case Seq() => + // because cycles between positive literals are allowed, this isn't strictly a topological sort + Dag.topologicalSortUnchecked(negDeps._1s)(posDeps.reverse) + case Clause(formula, head) +: tail => + // collect direct positive and negative literals and track them in separate graphs + val (pos, neg) = directDeps(formula) + val (newPos, newNeg) = ( (posDeps, negDeps) /: head) { case ( (pdeps, ndeps), d) => + (pdeps + (d, pos), ndeps + (d, neg) ) + } + hasNegatedDependency(tail, newPos, newNeg) + } + + /** Computes the `(positive, negative)` literals in `formula`. */ + private[this] def directDeps(formula: Formula): (Seq[Atom], Seq[Atom]) = formula match { + case And(lits) => separate(lits.toSeq) + case Negated(a) => (Nil, a :: Nil) + case a: Atom => (a :: Nil, Nil) + case True => (Nil, Nil) + } + + /** Computes the atoms in the heads and bodies of the clauses in `clause`. */ + def atoms(cs: Clauses): Atoms = cs.clauses.map(c => Atoms(c.head, atoms(c.body))).reduce(_ ++ _) + + /** Computes the set of all atoms in `formula`. */ + def atoms(formula: Formula): Set[Atom] = formula match { + case And(lits) => lits.map(_.atom) + case Negated(lit) => Set(lit) + case a: Atom => Set(a) + case True => Set() + } + + /** Represents the set of atoms in the heads of clauses and in the bodies (formulas) of clauses. */ + final case class Atoms(val inHead: Set[Atom], val inFormula: Set[Atom]) { + /** Concatenates this with `as`. */ + def ++ (as: Atoms): Atoms = Atoms(inHead ++ as.inHead, inFormula ++ as.inFormula) + /** Atoms that cannot be true because they do not occur in a head. */ + def triviallyFalse: Set[Atom] = inFormula -- inHead + } + + /** Applies known facts to `clause`s, deriving a new, possibly empty list of clauses. + * 1. If a fact is in the body of a clause, the derived clause has that fact removed from the body. + * 2. If the negation of a fact is in a body of a clause, that clause fails and is removed. + * 3. If a fact or its negation is in the head of a clause, the derived clause has that fact (or its negation) removed from the head. + * 4. If a head is empty, the clause proves nothing and is removed. + * + * NOTE: empty bodies do not cause a clause to succeed yet. + * All known facts must be applied before this can be done in order to avoid inconsistencies. + * Precondition: no contradictions in `facts` + * Postcondition: no atom in `facts` is present in the result + * Postcondition: No clauses have an empty head + * */ + def applyAll(cs: Clauses, facts: Set[Literal]): Option[Clauses] = + { + val newClauses = + if(facts.isEmpty) + cs.clauses.filter(_.head.nonEmpty) // still need to drop clauses with an empty head + else + cs.clauses.map(c => applyAll(c, facts)).flatMap(_.toList) + if(newClauses.isEmpty) None else Some(Clauses(newClauses)) + } + + def applyAll(c: Clause, facts: Set[Literal]): Option[Clause] = + { + val atoms = facts.map(_.atom) + val newHead = c.head -- atoms // 3. + if(newHead.isEmpty) // 4. empty head + None + else + substitute(c.body, facts).map( f => Clause(f, newHead) ) // 1, 2 + } + + /** Derives the formula that results from substituting `facts` into `formula`. */ + @tailrec + def substitute(formula: Formula, facts: Set[Literal]): Option[Formula] = formula match { + case And(lits) => + def negated(lits: Set[Literal]): Set[Literal] = lits.map(a => !a) + if( lits.exists( negated(facts) ) ) // 2. + None + else { + val newLits = lits -- facts + val newF = if(newLits.isEmpty) True else And(newLits) + Some(newF) // 1. + } + case True => Some(True) + case lit: Literal => // define in terms of And + substitute(And(Set(lit)), facts) + } +} diff --git a/util/logic/src/test/scala/sbt/logic/Test.scala b/util/logic/src/test/scala/sbt/logic/Test.scala new file mode 100644 index 000000000..49836998a --- /dev/null +++ b/util/logic/src/test/scala/sbt/logic/Test.scala @@ -0,0 +1,84 @@ +package sbt +package logic + +object Test { + val A = Atom("A") + val B = Atom("B") + val C = Atom("C") + val D = Atom("D") + val E = Atom("E") + val F = Atom("F") + val G = Atom("G") + + val clauses = + A.proves(B) :: + A.proves(F) :: + B.proves(F) :: + F.proves(A) :: + (!C).proves(F) :: + D.proves(C) :: + C.proves(D) :: + Nil + + val cycles = Logic.reduceAll(clauses, Set()) + + val badClauses = + A.proves(D) :: + clauses + + val excludedNeg = { + val cs = + (!A).proves(B) :: + Nil + val init = + (!A) :: + (!B) :: + Nil + Logic.reduceAll(cs, init.toSet) + } + + val excludedPos = { + val cs = + A.proves(B) :: + Nil + val init = + A :: + (!B) :: + Nil + Logic.reduceAll(cs, init.toSet) + } + + val trivial = { + val cs = + Formula.True.proves(A) :: + Nil + Logic.reduceAll(cs, Set.empty) + } + + val lessTrivial = { + val cs = + Formula.True.proves(A) :: + Formula.True.proves(B) :: + (A && B && (!C)).proves(D) :: + Nil + Logic.reduceAll(cs, Set()) + } + + val ordering = { + val cs = + E.proves(F) :: + (C && !D).proves(E) :: + (A && B).proves(C) :: + Nil + Logic.reduceAll(cs, Set(A,B)) + } + + def all { + println(s"Cycles: $cycles") + println(s"xNeg: $excludedNeg") + println(s"xPos: $excludedPos") + println(s"trivial: $trivial") + println(s"lessTrivial: $lessTrivial") + println(s"ordering: $ordering") + } +} diff --git a/util/relation/src/main/scala/sbt/Relation.scala b/util/relation/src/main/scala/sbt/Relation.scala index 725512d0b..77c0b70c2 100644 --- a/util/relation/src/main/scala/sbt/Relation.scala +++ b/util/relation/src/main/scala/sbt/Relation.scala @@ -40,7 +40,7 @@ object Relation private[sbt] def get[X,Y](map: M[X,Y], t: X): Set[Y] = map.getOrElse(t, Set.empty[Y]) - private[sbt] type M[X,Y] = Map[X, Set[Y]] + private[sbt] type M[X,Y] = Map[X, Set[Y]] } /** Binary relation between A and B. It is a set of pairs (_1, _2) for _1 in A, _2 in B. */ @@ -111,7 +111,7 @@ private final class MRelation[A,B](fwd: Map[A, Set[B]], rev: Map[B, Set[A]]) ext { def forwardMap = fwd def reverseMap = rev - + def forward(t: A) = get(fwd, t) def reverse(t: B) = get(rev, t) @@ -119,12 +119,12 @@ private final class MRelation[A,B](fwd: Map[A, Set[B]], rev: Map[B, Set[A]]) ext def _2s = rev.keySet def size = (fwd.valuesIterator map { _.size }).foldLeft(0)(_ + _) - + def all: Traversable[(A,B)] = fwd.iterator.flatMap { case (a, bs) => bs.iterator.map( b => (a,b) ) }.toTraversable def +(pair: (A,B)) = this + (pair._1, Set(pair._2)) def +(from: A, to: B) = this + (from, to :: Nil) - def +(from: A, to: Traversable[B]) = + def +(from: A, to: Traversable[B]) = if(to.isEmpty) this else new MRelation( add(fwd, from, to), (rev /: to) { (map, t) => add(map, t, from :: Nil) }) def ++(rs: Traversable[(A,B)]) = ((this: Relation[A,B]) /: rs) { _ + _ } From b8619f4aaecedd3bac6a6975f997239105ceb7aa Mon Sep 17 00:00:00 2001 From: Mark Harrah Date: Tue, 14 Jan 2014 12:38:06 -0500 Subject: [PATCH 033/148] Main part of integrating natures into project loading. --- main/src/main/scala/sbt/AutoPlugin.scala | 43 ++++-- main/src/main/scala/sbt/Build.scala | 1 + main/src/main/scala/sbt/BuildStructure.scala | 27 +++- main/src/main/scala/sbt/BuildUtil.scala | 12 +- .../main/scala/sbt/GroupedAutoPlugins.scala | 20 +++ main/src/main/scala/sbt/Load.scala | 146 ++++++++++++------ main/src/main/scala/sbt/Main.scala | 3 +- main/src/main/scala/sbt/Project.scala | 76 +++++++-- project/Sbt.scala | 2 +- .../sbt-test/project/auto-plugins/build.sbt | 34 ++++ .../project/auto-plugins/project/Q.scala | 65 ++++++++ sbt/src/sbt-test/project/auto-plugins/test | 1 + .../src/main/scala/sbt/ModuleUtilities.scala | 8 +- 13 files changed, 363 insertions(+), 75 deletions(-) create mode 100644 main/src/main/scala/sbt/GroupedAutoPlugins.scala create mode 100644 sbt/src/sbt-test/project/auto-plugins/build.sbt create mode 100644 sbt/src/sbt-test/project/auto-plugins/project/Q.scala create mode 100644 sbt/src/sbt-test/project/auto-plugins/test diff --git a/main/src/main/scala/sbt/AutoPlugin.scala b/main/src/main/scala/sbt/AutoPlugin.scala index cbf8d8bc5..9f11a6771 100644 --- a/main/src/main/scala/sbt/AutoPlugin.scala +++ b/main/src/main/scala/sbt/AutoPlugin.scala @@ -4,6 +4,9 @@ package sbt import Def.Setting import Natures._ +/** Marks a top-level object so that sbt will wildcard import it for .sbt files, `consoleProject`, and `set`. */ +trait AutoImport + /** An AutoPlugin defines a group of settings and the conditions that the settings are automatically added to a build (called "activation"). The `select` method defines the conditions, @@ -59,15 +62,15 @@ abstract class AutoPlugin def projectConfigurations: Seq[Configuration] = Nil /** The [[Setting]]s to add in the scope of each project that activates this AutoPlugin. */ - def projectSettings: Seq[Setting[_]] = Nil + def projectSettings: Seq[Setting[_]] = Nil /** The [[Setting]]s to add to the build scope for each project that activates this AutoPlugin. * The settings returned here are guaranteed to be added to a given build scope only once * regardless of how many projects for that build activate this AutoPlugin. */ - def buildSettings: Seq[Setting[_]] = Nil + def buildSettings: Seq[Setting[_]] = Nil /** The [[Setting]]s to add to the global scope exactly once if any project activates this AutoPlugin. */ - def globalSettings: Seq[Setting[_]] = Nil + def globalSettings: Seq[Setting[_]] = Nil // TODO?: def commands: Seq[Command] } @@ -82,19 +85,30 @@ sealed trait Natures { final case class Nature(label: String) extends Basic { /** Constructs a Natures matcher that excludes this Nature. */ def unary_! : Basic = Exclude(this) + override def toString = label } object Natures { // TODO: allow multiple AutoPlugins to provide the same Nature? // TODO: translate error messages - /** Select the AutoPlugins to include according to the user-specified natures in `requested` and all discovered AutoPlugins in `defined`.*/ - def evaluate(requested: Natures, defined: List[AutoPlugin]): Seq[AutoPlugin] = - { - val byAtom = defined.map(x => (Atom(x.provides.label), x)).toMap - val clauses = Clauses( defined.map(d => asClause(d)) ) - val results = Logic.reduce(clauses, flatten(requested).toSet) - results.ordered.map(byAtom) + def compile(defined: List[AutoPlugin]): Natures => Seq[AutoPlugin] = + if(defined.isEmpty) + Types.const(Nil) + else + { + val byAtom = defined.map(x => (Atom(x.provides.label), x)).toMap + val clauses = Clauses( defined.map(d => asClause(d)) ) + requestedNatures => { + val results = Logic.reduce(clauses, flatten(requestedNatures).toSet) + results.ordered.flatMap(a => byAtom.get(a).toList) + } + } + + def empty: Natures = Empty + private[sbt] final object Empty extends Natures { + def &&(o: Basic): Natures = o + override def toString = "" } /** An included or excluded Nature. TODO: better name than Basic. */ @@ -103,9 +117,16 @@ object Natures } private[sbt] final case class Exclude(n: Nature) extends Basic { def unary_! : Nature = n + override def toString = s"!$n" } private[sbt] final case class And(natures: List[Basic]) extends Natures { def &&(o: Basic): Natures = And(o :: natures) + override def toString = natures.mkString(", ") + } + private[sbt] def and(a: Natures, b: Natures) = b match { + case Empty => a + case And(ns) => (a /: ns)(_ && _) + case b: Basic => a && b } private[sbt] def asClause(ap: AutoPlugin): Clause = @@ -114,11 +135,13 @@ object Natures private[this] def flatten(n: Natures): Seq[Literal] = n match { case And(ns) => convertAll(ns) case b: Basic => convertBasic(b) :: Nil + case Empty => Nil } private[this] def convert(n: Natures): Formula = n match { case And(ns) => convertAll(ns).reduce[Formula](_ && _) case b: Basic => convertBasic(b) + case Empty => Formula.True } private[this] def convertBasic(b: Basic): Literal = b match { case Exclude(n) => !convertBasic(n) diff --git a/main/src/main/scala/sbt/Build.scala b/main/src/main/scala/sbt/Build.scala index 0501f9259..7bcb704ec 100644 --- a/main/src/main/scala/sbt/Build.scala +++ b/main/src/main/scala/sbt/Build.scala @@ -18,6 +18,7 @@ trait Build * If None, the root project is the first project in the build's root directory or just the first project if none are in the root directory.*/ def rootProject: Option[Project] = None } +// TODO 0.14.0: decide if Plugin should be deprecated in favor of AutoPlugin trait Plugin { @deprecated("Override projectSettings or buildSettings instead.", "0.12.0") diff --git a/main/src/main/scala/sbt/BuildStructure.scala b/main/src/main/scala/sbt/BuildStructure.scala index 9db86a90f..7b35d348d 100644 --- a/main/src/main/scala/sbt/BuildStructure.scala +++ b/main/src/main/scala/sbt/BuildStructure.scala @@ -41,11 +41,35 @@ final class LoadedBuildUnit(val unit: BuildUnit, val defined: Map[String, Resolv override def toString = unit.toString } +// TODO: figure out how to deprecate and drop buildNames final class LoadedDefinitions(val base: File, val target: Seq[File], val loader: ClassLoader, val builds: Seq[Build], val projects: Seq[Project], val buildNames: Seq[String]) -final class LoadedPlugins(val base: File, val pluginData: PluginData, val loader: ClassLoader, val plugins: Seq[Plugin], val pluginNames: Seq[String]) + +final class DetectedModules[T](val modules: Seq[(String, T)]) { + def names: Seq[String] = modules.map(_._1) + def values: Seq[T] = modules.map(_._2) +} + +final class DetectedPlugins(val plugins: DetectedModules[Plugin], val autoImports: DetectedModules[AutoImport], val autoPlugins: DetectedModules[AutoPlugin], val builds: DetectedModules[Build]) { + lazy val imports: Seq[String] = BuildUtil.getImports(plugins.names ++ builds.names ++ autoImports.names) + lazy val compileNatures: Natures => Seq[AutoPlugin] = Natures.compile(autoPlugins.values.toList) +} +final class LoadedPlugins(val base: File, val pluginData: PluginData, val loader: ClassLoader, val detected: DetectedPlugins) +{ +/* + // TODO: uncomment before COMMIT for compatibility + @deprecated("Use the primary constructor.", "0.13.2") + def this(base: File, pluginData: PluginData, loader: ClassLoader, plugins: Seq[Plugin], pluginNames: Seq[String]) = + this(base, pluginData, loader, DetectedPlugins(DetectedModules(pluginNames zip plugins), DetectedModules(Nil), DetectedModules(Nil), DetectedModules(Nil))) + @deprecated("Use detected.plugins.values.", "0.13.2") + val plugins = detected.plugins.values + @deprecated("Use detected.plugins.names.", "0.13.2") + val pluginNames = detected.plugins.names +*/ + def fullClasspath: Seq[Attributed[File]] = pluginData.classpath def classpath = data(fullClasspath) + } final class BuildUnit(val uri: URI, val localBase: File, val definitions: LoadedDefinitions, val plugins: LoadedPlugins) { @@ -57,6 +81,7 @@ final class LoadedBuild(val root: URI, val units: Map[URI, LoadedBuildUnit]) BuildUtil.checkCycles(units) def allProjectRefs: Seq[(ProjectRef, ResolvedProject)] = for( (uri, unit) <- units.toSeq; (id, proj) <- unit.defined ) yield ProjectRef(uri, id) -> proj def extra(data: Settings[Scope])(keyIndex: KeyIndex): BuildUtil[ResolvedProject] = BuildUtil(root, units, keyIndex, data) + private[sbt] def autos = GroupedAutoPlugins(units) } final class PartBuild(val root: URI, val units: Map[URI, PartBuildUnit]) sealed trait BuildUnitBase { def rootProjects: Seq[String]; def buildSettings: Seq[Setting[_]] } diff --git a/main/src/main/scala/sbt/BuildUtil.scala b/main/src/main/scala/sbt/BuildUtil.scala index df57581bd..c48e721f7 100644 --- a/main/src/main/scala/sbt/BuildUtil.scala +++ b/main/src/main/scala/sbt/BuildUtil.scala @@ -35,7 +35,7 @@ final class BuildUtil[Proj]( case _ => None } - val configurationsForAxis: Option[ResolvedReference] => Seq[String] = + val configurationsForAxis: Option[ResolvedReference] => Seq[String] = refOpt => configurations(projectForAxis(refOpt)).map(_.name) } object BuildUtil @@ -60,8 +60,14 @@ object BuildUtil } } def baseImports: Seq[String] = "import sbt._, Keys._" :: Nil - def getImports(unit: BuildUnit): Seq[String] = getImports(unit.plugins.pluginNames, unit.definitions.buildNames) - def getImports(pluginNames: Seq[String], buildNames: Seq[String]): Seq[String] = baseImports ++ importAllRoot(pluginNames ++ buildNames) + + def getImports(unit: BuildUnit): Seq[String] = unit.plugins.detected.imports + + @deprecated("Use getImports(Seq[String]).", "0.13.2") + def getImports(pluginNames: Seq[String], buildNames: Seq[String]): Seq[String] = getImports(pluginNames ++ buildNames) + + def getImports(names: Seq[String]): Seq[String] = baseImports ++ importAllRoot(names) + def importAll(values: Seq[String]): Seq[String] = if(values.isEmpty) Nil else values.map( _ + "._" ).mkString("import ", ", ", "") :: Nil def importAllRoot(values: Seq[String]): Seq[String] = importAll(values map rootedName) def rootedName(s: String): String = if(s contains '.') "_root_." + s else s diff --git a/main/src/main/scala/sbt/GroupedAutoPlugins.scala b/main/src/main/scala/sbt/GroupedAutoPlugins.scala new file mode 100644 index 000000000..2c99b2d85 --- /dev/null +++ b/main/src/main/scala/sbt/GroupedAutoPlugins.scala @@ -0,0 +1,20 @@ +package sbt + + import Def.Setting + import java.net.URI + +final class GroupedAutoPlugins(val all: Seq[AutoPlugin], val byBuild: Map[URI, Seq[AutoPlugin]]) +{ + def globalSettings: Seq[Setting[_]] = all.flatMap(_.globalSettings) + def buildSettings(uri: URI): Seq[Setting[_]] = byBuild.getOrElse(uri, Nil).flatMap(_.buildSettings) +} + +object GroupedAutoPlugins +{ + private[sbt] def apply(units: Map[URI, LoadedBuildUnit]): GroupedAutoPlugins = + { + val byBuild: Map[URI, Seq[AutoPlugin]] = units.mapValues(unit => unit.defined.values.flatMap(_.autoPlugins).toSeq.distinct).toMap + val all: Seq[AutoPlugin] = byBuild.values.toSeq.flatten.distinct + new GroupedAutoPlugins(all, byBuild) + } +} \ No newline at end of file diff --git a/main/src/main/scala/sbt/Load.scala b/main/src/main/scala/sbt/Load.scala index 8b7f3465a..f68ae1878 100755 --- a/main/src/main/scala/sbt/Load.scala +++ b/main/src/main/scala/sbt/Load.scala @@ -180,7 +180,7 @@ object Load val keys = Index.allKeys(settings) val attributeKeys = Index.attributeKeys(data) ++ keys.map(_.key) val scopedKeys = keys ++ data.allKeys( (s,k) => ScopedKey(s,k)) - val projectsMap = projects.mapValues(_.defined.keySet) + val projectsMap = projects.mapValues(_.defined.keySet).toMap val keyIndex = KeyIndex(scopedKeys, projectsMap) val aggIndex = KeyIndex.aggregate(scopedKeys, extra(keyIndex), projectsMap) new sbt.StructureIndex(Index.stringToKeyMap(attributeKeys), Index.taskToKeyMap(data), Index.triggers(data), keyIndex, aggIndex) @@ -201,10 +201,10 @@ object Load { ((loadedBuild in GlobalScope :== loaded) +: transformProjectOnly(loaded.root, rootProject, injectSettings.global)) ++ - inScope(GlobalScope)( pluginGlobalSettings(loaded) ) ++ + inScope(GlobalScope)( pluginGlobalSettings(loaded) ++ loaded.autos.globalSettings ) ++ loaded.units.toSeq.flatMap { case (uri, build) => - val plugins = build.unit.plugins.plugins - val pluginBuildSettings = plugins.flatMap(_.buildSettings) + val plugins = build.unit.plugins.detected.plugins.values + val pluginBuildSettings = plugins.flatMap(_.buildSettings) ++ loaded.autos.buildSettings(uri) val pluginNotThis = plugins.flatMap(_.settings) filterNot isProjectThis val projectSettings = build.defined flatMap { case (id, project) => val ref = ProjectRef(uri, id) @@ -220,9 +220,10 @@ object Load buildSettings ++ projectSettings } } + @deprecated("Does not account for AutoPlugins and will be made private.", "0.13.2") def pluginGlobalSettings(loaded: sbt.LoadedBuild): Seq[Setting[_]] = loaded.units.toSeq flatMap { case (_, build) => - build.unit.plugins.plugins flatMap { _.globalSettings } + build.unit.plugins.detected.plugins.values flatMap { _.globalSettings } } @deprecated("No longer used.", "0.13.0") @@ -368,10 +369,11 @@ object Load def resolveProjects(loaded: sbt.PartBuild): sbt.LoadedBuild = { val rootProject = getRootProject(loaded.units) - new sbt.LoadedBuild(loaded.root, loaded.units map { case (uri, unit) => + val units = loaded.units map { case (uri, unit) => IO.assertAbsolute(uri) (uri, resolveProjects(uri, unit, rootProject)) - }) + } + new sbt.LoadedBuild(loaded.root, units) } def resolveProjects(uri: URI, unit: sbt.PartBuildUnit, rootProject: URI => String): sbt.LoadedBuildUnit = { @@ -399,10 +401,10 @@ object Load def getBuild[T](map: Map[URI, T], uri: URI): T = map.getOrElse(uri, noBuild(uri)) - def emptyBuild(uri: URI) = sys.error("No root project defined for build unit '" + uri + "'") - def noBuild(uri: URI) = sys.error("Build unit '" + uri + "' not defined.") - def noProject(uri: URI, id: String) = sys.error("No project '" + id + "' defined in '" + uri + "'.") - def noConfiguration(uri: URI, id: String, conf: String) = sys.error("No configuration '" + conf + "' defined in project '" + id + "' in '" + uri +"'") + def emptyBuild(uri: URI) = sys.error(s"No root project defined for build unit '$uri'") + def noBuild(uri: URI) = sys.error(s"Build unit '$uri' not defined.") + def noProject(uri: URI, id: String) = sys.error(s"No project '$id' defined in '$uri'.") + def noConfiguration(uri: URI, id: String, conf: String) = sys.error(s"No configuration '$conf' defined in project '$id' in '$uri'") def loadUnit(uri: URI, localBase: File, s: State, config: sbt.LoadBuildConfiguration): sbt.BuildUnit = { @@ -410,15 +412,13 @@ object Load val defDir = projectStandard(normBase) val plugs = plugins(defDir, s, config.copy(pluginManagement = config.pluginManagement.forPlugin)) - val defNames = analyzed(plugs.fullClasspath) flatMap findDefinitions - val defsScala = if(defNames.isEmpty) Nil else loadDefinitions(plugs.loader, defNames) - val imports = BuildUtil.getImports(plugs.pluginNames, defNames) + val defsScala = plugs.detected.builds.values lazy val eval = mkEval(plugs.classpath, defDir, plugs.pluginData.scalacOptions) val initialProjects = defsScala.flatMap(b => projectsFromBuild(b, normBase)) val memoSettings = new mutable.HashMap[File, LoadedSbtFile] - def loadProjects(ps: Seq[Project]) = loadTransitive(ps, normBase, imports, plugs, () => eval, config.injectSettings, Nil, memoSettings) + def loadProjects(ps: Seq[Project]) = loadTransitive(ps, normBase, plugs, () => eval, config.injectSettings, Nil, memoSettings) val loadedProjectsRaw = loadProjects(initialProjects) val hasRoot = loadedProjectsRaw.exists(_.base == normBase) || defsScala.exists(_.rootProject.isDefined) val (loadedProjects, defaultBuildIfNone) = @@ -434,7 +434,7 @@ object Load } val defs = if(defsScala.isEmpty) defaultBuildIfNone :: Nil else defsScala - val loadedDefs = new sbt.LoadedDefinitions(defDir, Nil, plugs.loader, defs, loadedProjects, defNames) + val loadedDefs = new sbt.LoadedDefinitions(defDir, Nil, plugs.loader, defs, loadedProjects, plugs.detected.builds.names) new sbt.BuildUnit(uri, normBase, loadedDefs, plugs) } @@ -460,16 +460,19 @@ object Load private[this] def projectsFromBuild(b: Build, base: File): Seq[Project] = b.projectDefinitions(base).map(resolveBase(base)) - private[this] def loadTransitive(newProjects: Seq[Project], buildBase: File, imports: Seq[String], plugins: sbt.LoadedPlugins, eval: () => Eval, injectSettings: InjectSettings, acc: Seq[Project], memoSettings: mutable.Map[File, LoadedSbtFile]): Seq[Project] = + private[this] def loadTransitive(newProjects: Seq[Project], buildBase: File, plugins: sbt.LoadedPlugins, eval: () => Eval, injectSettings: InjectSettings, acc: Seq[Project], memoSettings: mutable.Map[File, LoadedSbtFile]): Seq[Project] = { - def loadSbtFiles(auto: AddSettings, base: File): LoadedSbtFile = - loadSettings(auto, base, imports, plugins, eval, injectSettings, memoSettings) + def loadSbtFiles(auto: AddSettings, base: File, autoPlugins: Seq[AutoPlugin]): LoadedSbtFile = + loadSettings(auto, base, plugins, eval, injectSettings, memoSettings, autoPlugins) def loadForProjects = newProjects map { project => - val loadedSbtFiles = loadSbtFiles(project.auto, project.base) - val transformed = project.copy(settings = (project.settings: Seq[Setting[_]]) ++ loadedSbtFiles.settings) + val autoPlugins = plugins.detected.compileNatures(project.natures) + val autoConfigs = autoPlugins.flatMap(_.projectConfigurations) + val loadedSbtFiles = loadSbtFiles(project.auto, project.base, autoPlugins) + val newSettings = (project.settings: Seq[Setting[_]]) ++ loadedSbtFiles.settings + val transformed = project.copy(settings = newSettings).setAutoPlugins(autoPlugins).overrideConfigs(autoConfigs : _*) (transformed, loadedSbtFiles.projects) } - def defaultLoad = loadSbtFiles(AddSettings.defaultSbtFiles, buildBase).projects + def defaultLoad = loadSbtFiles(AddSettings.defaultSbtFiles, buildBase, Nil).projects val (nextProjects, loadedProjects) = if(newProjects.isEmpty) // load the .sbt files in the root directory to look for Projects (defaultLoad, acc) @@ -481,10 +484,10 @@ object Load if(nextProjects.isEmpty) loadedProjects else - loadTransitive(nextProjects, buildBase, imports, plugins, eval, injectSettings, loadedProjects, memoSettings) + loadTransitive(nextProjects, buildBase, plugins, eval, injectSettings, loadedProjects, memoSettings) } - private[this] def loadSettings(auto: AddSettings, projectBase: File, buildImports: Seq[String], loadedPlugins: sbt.LoadedPlugins, eval: ()=>Eval, injectSettings: InjectSettings, memoSettings: mutable.Map[File, LoadedSbtFile]): LoadedSbtFile = + private[this] def loadSettings(auto: AddSettings, projectBase: File, loadedPlugins: sbt.LoadedPlugins, eval: ()=>Eval, injectSettings: InjectSettings, memoSettings: mutable.Map[File, LoadedSbtFile], autoPlugins: Seq[AutoPlugin]): LoadedSbtFile = { lazy val defaultSbtFiles = configurationSources(projectBase) def settings(ss: Seq[Setting[_]]) = new LoadedSbtFile(ss, Nil, Nil) @@ -499,14 +502,20 @@ object Load lf } def loadSettingsFile(src: File): LoadedSbtFile = - EvaluateConfigurations.evaluateSbtFile(eval(), src, IO.readLines(src), buildImports, 0)(loader) + EvaluateConfigurations.evaluateSbtFile(eval(), src, IO.readLines(src), loadedPlugins.detected.imports, 0)(loader) import AddSettings.{User,SbtFiles,DefaultSbtFiles,Plugins,Sequence} + def pluginSettings(f: Plugins) = { + val included = loadedPlugins.detected.plugins.values.filter(f.include) // don't apply the filter to AutoPlugins, only Plugins + val oldStyle = included.flatMap(p => p.settings.filter(isProjectThis) ++ p.projectSettings) + val autoStyle = autoPlugins.flatMap(_.projectSettings) + oldStyle ++ autoStyle + } def expand(auto: AddSettings): LoadedSbtFile = auto match { case User => settings(injectSettings.projectLoaded(loader)) case sf: SbtFiles => loadSettings( sf.files.map(f => IO.resolve(projectBase, f))) case sf: DefaultSbtFiles => loadSettings( defaultSbtFiles.filter(sf.include)) - case f: Plugins => settings(loadedPlugins.plugins.filter(f.include).flatMap(p => p.settings.filter(isProjectThis) ++ p.projectSettings)) + case p: Plugins => settings(pluginSettings(p)) case q: Sequence => (LoadedSbtFile.empty /: q.sequence) { (b,add) => b.merge( expand(add) ) } } expand(auto) @@ -599,27 +608,48 @@ object Load config.evalPluginDef(pluginDef, pluginState) } + +/* +// TODO: UNCOMMENT BEFORE COMMIT + @deprecated("Use ModuleUtilities.getCheckedObjects[Build].", "0.13.2") def loadDefinitions(loader: ClassLoader, defs: Seq[String]): Seq[Build] = defs map { definition => loadDefinition(loader, definition) } + + @deprecated("Use ModuleUtilities.getCheckedObject[Build].", "0.13.2") def loadDefinition(loader: ClassLoader, definition: String): Build = ModuleUtilities.getObject(definition, loader).asInstanceOf[Build] +*/ def loadPlugins(dir: File, data: PluginData, loader: ClassLoader): sbt.LoadedPlugins = { - val (pluginNames, plugins) = if(data.classpath.isEmpty) (Nil, Nil) else { - val names = getPluginNames(data.classpath, loader) - val loaded = - try loadPlugins(loader, names) - catch { - case e: ExceptionInInitializerError => - val cause = e.getCause - if(cause eq null) throw e else throw cause - case e: LinkageError => incompatiblePlugins(data, e) - } - (names, loaded) - } - new sbt.LoadedPlugins(dir, data, loader, plugins, pluginNames) + // TODO: binary detection for builds, autoImports, autoPlugins + import AutoBinaryResource._ + val plugins = detectModules[Plugin](data, loader, Plugins) + val builds = detectModules[Build](data, loader, Builds) + val autoImports = detectModules[AutoImport](data, loader, AutoImports) + val autoPlugins = detectModules[AutoPlugin](data, loader, AutoPlugins) + val detected = new DetectedPlugins(plugins, autoImports, autoPlugins, builds) + new sbt.LoadedPlugins(dir, data, loader, detected) } + private[this] def detectModules[T](data: PluginData, loader: ClassLoader, resourceName: String)(implicit mf: reflect.ClassManifest[T]): DetectedModules[T] = + { + val classpath = data.classpath + val namesAndValues = if(classpath.isEmpty) Nil else { + val names = discoverModuleNames(classpath, loader, resourceName, mf.erasure.getName) + loadModules[T](data, names, loader) + } + new DetectedModules(namesAndValues) + } + + private[this] def loadModules[T: ClassManifest](data: PluginData, names: Seq[String], loader: ClassLoader): Seq[(String,T)] = + try ModuleUtilities.getCheckedObjects[T](names, loader) + catch { + case e: ExceptionInInitializerError => + val cause = e.getCause + if(cause eq null) throw e else throw cause + case e: LinkageError => incompatiblePlugins(data, e) + } + private[this] def incompatiblePlugins(data: PluginData, t: LinkageError): Nothing = { val evicted = data.report.toList.flatMap(_.configurations.flatMap(_.evicted)) @@ -629,26 +659,54 @@ object Load val msgExtra = if(evictedStrings.isEmpty) "" else "\nNote that conflicts were resolved for some dependencies:\n\t" + evictedStrings.mkString("\n\t") throw new IncompatiblePluginsException(msgBase + msgExtra, t) } - def getPluginNames(classpath: Seq[Attributed[File]], loader: ClassLoader): Seq[String] = - ( binaryPlugins(data(classpath), loader) ++ (analyzed(classpath) flatMap findPlugins) ).distinct + def discoverModuleNames(classpath: Seq[Attributed[File]], loader: ClassLoader, resourceName: String, moduleTypes: String*): Seq[String] = + ( + binaryPlugins(data(classpath), loader, resourceName) ++ + (analyzed(classpath) flatMap (a => discover(a, moduleTypes : _*))) + ).distinct + + @deprecated("Replaced by the more general discoverModuleNames and will be made private.", "0.13.2") + def getPluginNames(classpath: Seq[Attributed[File]], loader: ClassLoader): Seq[String] = + discoverModuleNames(classpath, loader, AutoBinaryResource.Plugins, classOf[Plugin].getName) + +/* +TODO: UNCOMMENT BEFORE COMMIT + @deprecated("Explicitly specify the resource name.", "0.13.2") def binaryPlugins(classpath: Seq[File], loader: ClassLoader): Seq[String] = + binaryPlugins(classpath, loader, AutoBinaryResource.Plugins) +*/ + + object AutoBinaryResource { + final val AutoPlugins = "sbt/sbt.autoplugins" + final val Plugins = "sbt/sbt.plugins" + final val Builds = "sbt/sbt.builds" + final val AutoImports = "sbt/sbt.autoimports" + } + def binaryPlugins(classpath: Seq[File], loader: ClassLoader, resourceName: String): Seq[String] = { import collection.JavaConversions._ - loader.getResources("sbt/sbt.plugins").toSeq.filter(onClasspath(classpath)) flatMap { u => + loader.getResources(resourceName).toSeq.filter(onClasspath(classpath)) flatMap { u => IO.readLinesURL(u).map( _.trim).filter(!_.isEmpty) } } def onClasspath(classpath: Seq[File])(url: URL): Boolean = IO.urlAsFile(url) exists (classpath.contains _) +/* +// TODO: UNCOMMENT BEFORE COMMIT + @deprecated("Use ModuleUtilities.getCheckedObjects[Plugin].", "0.13.2") def loadPlugins(loader: ClassLoader, pluginNames: Seq[String]): Seq[Plugin] = - pluginNames.map(pluginName => loadPlugin(pluginName, loader)) + ModuleUtilities.getCheckedObjects[Plugin](loader, pluginNames) + @deprecated("Use ModuleUtilities.getCheckedObject[Plugin].", "0.13.2") def loadPlugin(pluginName: String, loader: ClassLoader): Plugin = - ModuleUtilities.getObject(pluginName, loader).asInstanceOf[Plugin] + ModuleUtilities.getCheckedObject[Plugin](pluginName, loader) + @deprecated("No longer used.", "0.13.2") def findPlugins(analysis: inc.Analysis): Seq[String] = discover(analysis, "sbt.Plugin") +*/ + def findDefinitions(analysis: inc.Analysis): Seq[String] = discover(analysis, "sbt.Build") def discover(analysis: inc.Analysis, subclasses: String*): Seq[String] = { diff --git a/main/src/main/scala/sbt/Main.scala b/main/src/main/scala/sbt/Main.scala index 295ffca33..462d5a49b 100644 --- a/main/src/main/scala/sbt/Main.scala +++ b/main/src/main/scala/sbt/Main.scala @@ -125,7 +125,8 @@ object BuiltinCommands def aboutPlugins(e: Extracted): String = { - val allPluginNames = e.structure.units.values.flatMap(_.unit.plugins.pluginNames).toSeq.distinct + def list(b: BuildUnit) = b.plugins.detected.autoPlugins.values.map(_.provides) ++ b.plugins.detected.plugins.names + val allPluginNames = e.structure.units.values.flatMap(u => list(u.unit)).toSeq.distinct if(allPluginNames.isEmpty) "" else allPluginNames.mkString("Available Plugins: ", ", ", "") } def aboutScala(s: State, e: Extracted): String = diff --git a/main/src/main/scala/sbt/Project.scala b/main/src/main/scala/sbt/Project.scala index db0705299..8baa06997 100755 --- a/main/src/main/scala/sbt/Project.scala +++ b/main/src/main/scala/sbt/Project.scala @@ -50,33 +50,52 @@ sealed trait ProjectDefinition[PR <: ProjectReference] /** Configures the sources of automatically appended settings.*/ def auto: AddSettings + /** The [[Natures]] associated with this project. + A [[Nature]] is a common label that is used by plugins to determine what settings, if any, to add to a project. */ + def natures: Natures + + /** The [[AutoPlugin]]s enabled for this project. This value is only available on a loaded Project. */ + private[sbt] def autoPlugins: Seq[AutoPlugin] + override final def hashCode: Int = id.hashCode ^ base.hashCode ^ getClass.hashCode override final def equals(o: Any) = o match { case p: ProjectDefinition[_] => p.getClass == this.getClass && p.id == id && p.base == base case _ => false } - override def toString = "Project(id: " + id + ", base: " + base + ", aggregate: " + aggregate + ", dependencies: " + dependencies + ", configurations: " + configurations + ")" + override def toString = + { + val agg = ifNonEmpty("aggregate", aggregate) + val dep = ifNonEmpty("dependencies", dependencies) + val conf = ifNonEmpty("configurations", configurations) + val autos = ifNonEmpty("autoPlugins", autoPlugins.map(_.provides)) + val fields = s"id $id" :: s"base: $base" :: agg ::: dep ::: conf ::: (s"natures: List($natures)" :: autos) + s"Project(${fields.mkString(", ")})" + } + private[this] def ifNonEmpty[T](label: String, ts: Iterable[T]): List[String] = if(ts.isEmpty) Nil else s"$label: $ts" :: Nil } sealed trait Project extends ProjectDefinition[ProjectReference] { + // TODO: add parameters for natures and autoPlugins in 0.14.0 (not reasonable to do in a binary compatible way in 0.13) def copy(id: String = id, base: File = base, aggregate: => Seq[ProjectReference] = aggregate, dependencies: => Seq[ClasspathDep[ProjectReference]] = dependencies, delegates: => Seq[ProjectReference] = delegates, settings: => Seq[Setting[_]] = settings, configurations: Seq[Configuration] = configurations, auto: AddSettings = auto): Project = - Project(id, base, aggregate = aggregate, dependencies = dependencies, delegates = delegates, settings, configurations, auto) + unresolved(id, base, aggregate = aggregate, dependencies = dependencies, delegates = delegates, settings, configurations, auto, natures, autoPlugins) def resolve(resolveRef: ProjectReference => ProjectRef): ResolvedProject = { def resolveRefs(prs: Seq[ProjectReference]) = prs map resolveRef def resolveDeps(ds: Seq[ClasspathDep[ProjectReference]]) = ds map resolveDep def resolveDep(d: ClasspathDep[ProjectReference]) = ResolvedClasspathDependency(resolveRef(d.project), d.configuration) - resolved(id, base, aggregate = resolveRefs(aggregate), dependencies = resolveDeps(dependencies), delegates = resolveRefs(delegates), settings, configurations, auto) + resolved(id, base, aggregate = resolveRefs(aggregate), dependencies = resolveDeps(dependencies), delegates = resolveRefs(delegates), + settings, configurations, auto, natures, autoPlugins) } def resolveBuild(resolveRef: ProjectReference => ProjectReference): Project = { def resolveRefs(prs: Seq[ProjectReference]) = prs map resolveRef def resolveDeps(ds: Seq[ClasspathDep[ProjectReference]]) = ds map resolveDep def resolveDep(d: ClasspathDep[ProjectReference]) = ClasspathDependency(resolveRef(d.project), d.configuration) - apply(id, base, aggregate = resolveRefs(aggregate), dependencies = resolveDeps(dependencies), delegates = resolveRefs(delegates), settings, configurations, auto) + unresolved(id, base, aggregate = resolveRefs(aggregate), dependencies = resolveDeps(dependencies), delegates = resolveRefs(delegates), + settings, configurations, auto, natures, autoPlugins) } /** Applies the given functions to this Project. @@ -116,8 +135,24 @@ sealed trait Project extends ProjectDefinition[ProjectReference] /** Sets the list of .sbt files to parse for settings to be appended to this project's settings. * Any configured .sbt files are removed from this project's list.*/ def setSbtFiles(files: File*): Project = copy(auto = AddSettings.append( AddSettings.clearSbtFiles(auto), AddSettings.sbtFiles(files: _*)) ) + + /** Sets the [[Natures]] of this project. + A [[Nature]] is a common label that is used by plugins to determine what settings, if any, to add to a project. */ + def addNatures(ns: Natures): Project = { + // TODO: for 0.14.0, use copy when it has the additional `natures` parameter + unresolved(id, base, aggregate = aggregate, dependencies = dependencies, delegates = delegates, settings, configurations, auto, Natures.and(natures, ns), autoPlugins) + } + + /** Definitively set the [[AutoPlugin]]s for this project. */ + private[sbt] def setAutoPlugins(autos: Seq[AutoPlugin]): Project = { + // TODO: for 0.14.0, use copy when it has the additional `autoPlugins` parameter + unresolved(id, base, aggregate = aggregate, dependencies = dependencies, delegates = delegates, settings, configurations, auto, natures, autos) + } +} +sealed trait ResolvedProject extends ProjectDefinition[ProjectRef] { + /** The [[AutoPlugin]]s enabled for this project as computed from [[natures]].*/ + def autoPlugins: Seq[AutoPlugin] } -sealed trait ResolvedProject extends ProjectDefinition[ProjectRef] sealed trait ClasspathDep[PR <: ProjectReference] { def project: PR; def configuration: Option[String] } final case class ResolvedClasspathDependency(project: ProjectRef, configuration: Option[String]) extends ClasspathDep[ProjectRef] @@ -150,23 +185,22 @@ object Project extends ProjectExtra Def.showRelativeKey( ProjectRef(loaded.root, loaded.units(loaded.root).rootProjects.head), loaded.allProjectRefs.size > 1, keyNameColor) private abstract class ProjectDef[PR <: ProjectReference](val id: String, val base: File, aggregate0: => Seq[PR], dependencies0: => Seq[ClasspathDep[PR]], - delegates0: => Seq[PR], settings0: => Seq[Def.Setting[_]], val configurations: Seq[Configuration], val auto: AddSettings) extends ProjectDefinition[PR] + delegates0: => Seq[PR], settings0: => Seq[Def.Setting[_]], val configurations: Seq[Configuration], val auto: AddSettings, + val natures: Natures, val autoPlugins: Seq[AutoPlugin]) extends ProjectDefinition[PR] { lazy val aggregate = aggregate0 lazy val dependencies = dependencies0 lazy val delegates = delegates0 lazy val settings = settings0 - + Dag.topologicalSort(configurations)(_.extendsConfigs) // checks for cyclic references here instead of having to do it in Scope.delegates } + // TODO: add parameter for natures in 0.14.0 def apply(id: String, base: File, aggregate: => Seq[ProjectReference] = Nil, dependencies: => Seq[ClasspathDep[ProjectReference]] = Nil, delegates: => Seq[ProjectReference] = Nil, settings: => Seq[Def.Setting[_]] = defaultSettings, configurations: Seq[Configuration] = Configurations.default, auto: AddSettings = AddSettings.allDefaults): Project = - { - validProjectID(id).foreach(errMsg => sys.error("Invalid project ID: " + errMsg)) - new ProjectDef[ProjectReference](id, base, aggregate, dependencies, delegates, settings, configurations, auto) with Project - } + unresolved(id, base, aggregate, dependencies, delegates, settings, configurations, auto, Natures.empty, Nil) /** Returns None if `id` is a valid Project ID or Some containing the parser error message if it is not.*/ def validProjectID(id: String): Option[String] = DefaultParsers.parse(id, DefaultParsers.ID).left.toOption @@ -185,9 +219,23 @@ object Project extends ProjectExtra * This is a best effort implementation, since valid characters are not documented or consistent.*/ def normalizeModuleID(id: String): String = normalizeBase(id) + @deprecated("Will be removed.", "0.13.2") def resolved(id: String, base: File, aggregate: => Seq[ProjectRef], dependencies: => Seq[ResolvedClasspathDependency], delegates: => Seq[ProjectRef], settings: Seq[Def.Setting[_]], configurations: Seq[Configuration], auto: AddSettings): ResolvedProject = - new ProjectDef[ProjectRef](id, base, aggregate, dependencies, delegates, settings, configurations, auto) with ResolvedProject + resolved(id, base, aggregate, dependencies, delegates, settings, configurations, auto, Natures.empty, Nil) + + private def resolved(id: String, base: File, aggregate: => Seq[ProjectRef], dependencies: => Seq[ClasspathDep[ProjectRef]], + delegates: => Seq[ProjectRef], settings: Seq[Def.Setting[_]], configurations: Seq[Configuration], auto: AddSettings, + natures: Natures, autoPlugins: Seq[AutoPlugin]): ResolvedProject = + new ProjectDef[ProjectRef](id, base, aggregate, dependencies, delegates, settings, configurations, auto, natures, autoPlugins) with ResolvedProject + + private def unresolved(id: String, base: File, aggregate: => Seq[ProjectReference], dependencies: => Seq[ClasspathDep[ProjectReference]], + delegates: => Seq[ProjectReference], settings: => Seq[Def.Setting[_]], configurations: Seq[Configuration], auto: AddSettings, + natures: Natures, autoPlugins: Seq[AutoPlugin]): Project = + { + validProjectID(id).foreach(errMsg => sys.error("Invalid project ID: " + errMsg)) + new ProjectDef[ProjectReference](id, base, aggregate, dependencies, delegates, settings, configurations, auto, natures, autoPlugins) with Project + } def defaultSettings: Seq[Def.Setting[_]] = Defaults.defaultSettings @@ -307,7 +355,7 @@ object Project extends ProjectExtra def details(structure: BuildStructure, actual: Boolean, scope: Scope, key: AttributeKey[_])(implicit display: Show[ScopedKey[_]]): String = { val scoped = ScopedKey(scope,key) - + val data = scopedKeyData(structure, scope, key) map {_.description} getOrElse {"No entry for key."} val description = key.description match { case Some(desc) => "Description:\n\t" + desc + "\n"; case None => "" } @@ -413,7 +461,7 @@ object Project extends ProjectExtra import DefaultParsers._ val loadActionParser = token(Space ~> ("plugins" ^^^ Plugins | "return" ^^^ Return)) ?? Current - + val ProjectReturn = AttributeKey[List[File]]("project-return", "Maintains a stack of builds visited using reload.") def projectReturn(s: State): List[File] = getOrNil(s, ProjectReturn) def inPluginProject(s: State): Boolean = projectReturn(s).toList.length > 1 diff --git a/project/Sbt.scala b/project/Sbt.scala index e6fdf84ca..3fb03a64a 100644 --- a/project/Sbt.scala +++ b/project/Sbt.scala @@ -73,7 +73,7 @@ object Sbt extends Build lazy val datatypeSub = baseProject(utilPath /"datatype", "Datatype Generator") dependsOn(ioSub) // cross versioning lazy val crossSub = baseProject(utilPath / "cross", "Cross") settings(inConfig(Compile)(Transform.crossGenSettings): _*) - // A monotonic logic that includes restricted negation as failure + // A logic with restricted negation as failure for a unique, stable model lazy val logicSub = baseProject(utilPath / "logic", "Logic").dependsOn(collectionSub, relationSub) /* **** Intermediate-level Modules **** */ diff --git a/sbt/src/sbt-test/project/auto-plugins/build.sbt b/sbt/src/sbt-test/project/auto-plugins/build.sbt new file mode 100644 index 000000000..d9543939b --- /dev/null +++ b/sbt/src/sbt-test/project/auto-plugins/build.sbt @@ -0,0 +1,34 @@ +// !C will exclude C, and thus D, from being auto-added +lazy val a = project.addNatures(A && B && !C) + +// without B, C is not added +lazy val b = project.addNatures(A) + +// with both A and B, C is selected, which in turn selects D +lazy val c = project.addNatures(A && B) + +// with no natures defined, nothing is auto-added +lazy val d = project + + +check := { + val ddel = (del in d).?.value // should be None + same(ddel, None, "del in d") + val bdel = (del in b).?.value // should be None + same(bdel, None, "del in b") + val adel = (del in a).?.value // should be None + same(adel, None, "del in a") +// + val buildValue = (demo in ThisBuild).value + same(buildValue, "build 0", "demo in ThisBuild") + val globalValue = (demo in Global).value + same(globalValue, "global 0", "demo in Global") + val projValue = (demo in c).value + same(projValue, "project c Q R", "demo in c") + val qValue = (del in c in q).value + same(qValue, " Q R", "del in c in q") +} + +def same[T](actual: T, expected: T, label: String) { + assert(actual == expected, s"Expected '$expected' for `$label`, got '$actual'") +} \ No newline at end of file diff --git a/sbt/src/sbt-test/project/auto-plugins/project/Q.scala b/sbt/src/sbt-test/project/auto-plugins/project/Q.scala new file mode 100644 index 000000000..73dd5211b --- /dev/null +++ b/sbt/src/sbt-test/project/auto-plugins/project/Q.scala @@ -0,0 +1,65 @@ + import sbt._ + import sbt.Keys.{name, resolvedScoped} + import java.util.concurrent.atomic.{AtomicInteger => AInt} + +object AI extends AutoImport +{ + lazy val A = Nature("A") + lazy val B = Nature("B") + lazy val C = Nature("C") + lazy val D = Nature("D") + lazy val E = Nature("E") + + lazy val q = config("q") + lazy val p = config("p").extend(q) + + lazy val demo = settingKey[String]("A demo setting.") + lazy val del = settingKey[String]("Another demo setting.") + + lazy val check = settingKey[Unit]("Verifies settings are as they should be.") +} + + import AI._ + +object Q extends AutoPlugin +{ + def select: Natures = A && B + + def provides = C + + override def projectConfigurations: Seq[Configuration] = + p :: + q :: + Nil + + override def projectSettings: Seq[Setting[_]] = + (demo := s"project ${name.value}") :: + (del in q := " Q") :: + Nil + + override def buildSettings: Seq[Setting[_]] = + (demo := s"build ${buildCount.getAndIncrement}") :: + Nil + + override def globalSettings: Seq[Setting[_]] = + (demo := s"global ${globalCount.getAndIncrement}") :: + Nil + + // used to ensure the build-level and global settings are only added once + private[this] val buildCount = new AInt(0) + private[this] val globalCount = new AInt(0) +} + +object R extends AutoPlugin +{ + def select = C && !D + + def provides = E + + override def projectSettings = Seq( + // tests proper ordering: R requires C, so C settings should come first + del in q += " R", + // tests that configurations are properly registered, enabling delegation from p to q + demo += (del in p).value + ) +} \ No newline at end of file diff --git a/sbt/src/sbt-test/project/auto-plugins/test b/sbt/src/sbt-test/project/auto-plugins/test new file mode 100644 index 000000000..15675b169 --- /dev/null +++ b/sbt/src/sbt-test/project/auto-plugins/test @@ -0,0 +1 @@ +> check diff --git a/util/classpath/src/main/scala/sbt/ModuleUtilities.scala b/util/classpath/src/main/scala/sbt/ModuleUtilities.scala index d939c040b..69dfa31dc 100644 --- a/util/classpath/src/main/scala/sbt/ModuleUtilities.scala +++ b/util/classpath/src/main/scala/sbt/ModuleUtilities.scala @@ -6,7 +6,7 @@ package sbt object ModuleUtilities { /** Reflectively loads and returns the companion object for top-level class `className` from `loader`. - * The class name should not include the `$` that scalac appends to the underlying jvm class for + * The class name should not include the `$` that scalac appends to the underlying jvm class for * a companion object. */ def getObject(className: String, loader: ClassLoader): AnyRef = { @@ -14,4 +14,10 @@ object ModuleUtilities val singletonField = obj.getField("MODULE$") singletonField.get(null) } + + def getCheckedObject[T](className: String, loader: ClassLoader)(implicit mf: reflect.ClassManifest[T]): T = + mf.erasure.cast(getObject(className, loader)).asInstanceOf[T] + + def getCheckedObjects[T](classNames: Seq[String], loader: ClassLoader)(implicit mf: reflect.ClassManifest[T]): Seq[(String,T)] = + classNames.map(name => (name, getCheckedObject(name, loader))) } \ No newline at end of file From 30658f98bbde40deeb46fc770258d9ca4a59f7cb Mon Sep 17 00:00:00 2001 From: Mark Harrah Date: Fri, 24 Jan 2014 14:19:18 -0500 Subject: [PATCH 034/148] API documentation and comments related to natures --- main/src/main/scala/sbt/AutoPlugin.scala | 10 ++- main/src/main/scala/sbt/BuildStructure.scala | 63 ++++++++++++++++++- .../main/scala/sbt/GroupedAutoPlugins.scala | 4 +- main/src/main/scala/sbt/Load.scala | 9 ++- 4 files changed, 79 insertions(+), 7 deletions(-) diff --git a/main/src/main/scala/sbt/AutoPlugin.scala b/main/src/main/scala/sbt/AutoPlugin.scala index 9f11a6771..d571242bc 100644 --- a/main/src/main/scala/sbt/AutoPlugin.scala +++ b/main/src/main/scala/sbt/AutoPlugin.scala @@ -16,7 +16,7 @@ The `select` method defines the conditions, Steps for plugin authors: 1. Determine the natures that, when present (or absent), activate the AutoPlugin. 2. Determine the settings/configurations to automatically inject when activated. -3. Define a new, unique identifying [[Nature]] (which is a wrapper around a String ID). +3. Define a new, unique identifying [[Nature]], which is a wrapper around a String ID. For example, the following will automatically add the settings in `projectSettings` to a project that has both the `Web` and `Javascript` natures enabled. It will itself @@ -26,7 +26,7 @@ For example, the following will automatically add the settings in `projectSettin object MyPlugin extends AutoPlugin { def select = Web && Javascript def provides = MyStuff - def projectSettings = Seq(...) + override def projectSettings = Seq(...) } Steps for users: @@ -92,6 +92,8 @@ object Natures { // TODO: allow multiple AutoPlugins to provide the same Nature? // TODO: translate error messages + /** Given the available auto plugins `defined`, returns a function that selects [[AutoPlugin]]s for the provided [[Nature]]s. + * The [[AutoPlugin]]s are topologically sorted so that a selected [[AutoPlugin]] comes before its selecting [[AutoPlugin]].*/ def compile(defined: List[AutoPlugin]): Natures => Seq[AutoPlugin] = if(defined.isEmpty) Types.const(Nil) @@ -101,10 +103,13 @@ object Natures val clauses = Clauses( defined.map(d => asClause(d)) ) requestedNatures => { val results = Logic.reduce(clauses, flatten(requestedNatures).toSet) + // results includes the originally requested (positive) atoms, + // which won't have a corresponding AutoPlugin to map back to results.ordered.flatMap(a => byAtom.get(a).toList) } } + /** [[Natures]] instance that doesn't require any [[Nature]]s. */ def empty: Natures = Empty private[sbt] final object Empty extends Natures { def &&(o: Basic): Natures = o @@ -129,6 +134,7 @@ object Natures case b: Basic => a && b } + /** Defines a clause for `ap` such that the [[Nature]] provided by `ap` is the head and the selector for `ap` is the body. */ private[sbt] def asClause(ap: AutoPlugin): Clause = Clause( convert(ap.select), Set(Atom(ap.provides.label)) ) diff --git a/main/src/main/scala/sbt/BuildStructure.scala b/main/src/main/scala/sbt/BuildStructure.scala index 7b35d348d..8ddf7bb88 100644 --- a/main/src/main/scala/sbt/BuildStructure.scala +++ b/main/src/main/scala/sbt/BuildStructure.scala @@ -30,30 +30,86 @@ final class StructureIndex( val keyIndex: KeyIndex, val aggregateKeyIndex: KeyIndex ) + +/** A resolved build unit. (`ResolvedBuildUnit` would be a better name to distinguish it from the loaded, but unresolved `BuildUnit`.) +* @param unit The loaded, but unresolved [[BuildUnit]] this was resolved from. +* @param defined The definitive map from project IDs to resolved projects. +* These projects have had [[Reference]]s resolved and [[AutoPlugin]]s evaluated. +* @param rootProjects The list of project IDs for the projects considered roots of this build. +* The first root project is used as the default in several situations where a project is not otherwise selected. +*/ final class LoadedBuildUnit(val unit: BuildUnit, val defined: Map[String, ResolvedProject], val rootProjects: Seq[String], val buildSettings: Seq[Setting[_]]) extends BuildUnitBase { assert(!rootProjects.isEmpty, "No root projects defined for build unit " + unit) + /** The project to use as the default when one is not otherwise selected. + * [[LocalRootProject]] resolves to this from within the same build.*/ val root = rootProjects.head + + /** The base directory of the build unit (not the build definition).*/ def localBase = unit.localBase + + /** The classpath to use when compiling against this build unit's publicly visible code. + * It includes build definition and plugin classes, but not classes for .sbt file statements and expressions. */ def classpath: Seq[File] = unit.definitions.target ++ unit.plugins.classpath + + /** The class loader to use for this build unit's publicly visible code. + * It includes build definition and plugin classes, but not classes for .sbt file statements and expressions. */ def loader = unit.definitions.loader + + /** The imports to use for .sbt files, `consoleProject` and other contexts that use code from the build definition. */ def imports = BuildUtil.getImports(unit) override def toString = unit.toString } // TODO: figure out how to deprecate and drop buildNames +/** The built and loaded build definition, including loaded but unresolved [[Project]]s, for a build unit (for a single URI). +* +* @param base The base directory of the build definition, typically `/project/`. +* @param loader The ClassLoader containing all classes and plugins for the build definition project. +* Note that this does not include classes for .sbt files. +* @param builds The list of [[Build]]s for the build unit. +* In addition to auto-discovered [[Build]]s, this includes any auto-generated default [[Build]]s. +* @param projects The list of all [[Project]]s from all [[Build]]s. +* These projects have not yet been resolved, but they have had auto-plugins applied. +* In particular, each [[Project]]'s `autoPlugins` field is populated according to their configured `natures` +* and their `settings` and `configurations` updated as appropriate. +* @param buildNames No longer used and will be deprecated once feasible. +*/ final class LoadedDefinitions(val base: File, val target: Seq[File], val loader: ClassLoader, val builds: Seq[Build], val projects: Seq[Project], val buildNames: Seq[String]) -final class DetectedModules[T](val modules: Seq[(String, T)]) { +/** Auto-detected top-level modules (as in `object X`) of type `T` paired with their source names. */ +final class DetectedModules[T](val modules: Seq[(String, T)]) +{ + /** The source names of the modules. This is "X" in `object X`, as opposed to the implementation class name "X$". + * The names are returned in a stable order such that `names zip values` pairs a name with the actual module. */ def names: Seq[String] = modules.map(_._1) + + /** The singleton value of the module. + * The values are returned in a stable order such that `names zip values` pairs a name with the actual module. */ def values: Seq[T] = modules.map(_._2) } +/** Auto-discovered modules for the build definition project. These include modules defined in build definition sources +* as well as modules in binary dependencies. +* +* @param builds The [[Build]]s detected in the build definition. This does not include the default [[Build]] that sbt creates if none is defined. +*/ final class DetectedPlugins(val plugins: DetectedModules[Plugin], val autoImports: DetectedModules[AutoImport], val autoPlugins: DetectedModules[AutoPlugin], val builds: DetectedModules[Build]) { + /** Sequence of import expressions for the build definition. This includes the names of the [[Plugin]], [[Build]], and [[AutoImport]] modules, but not the [[AutoPlugin]] modules. */ lazy val imports: Seq[String] = BuildUtil.getImports(plugins.names ++ builds.names ++ autoImports.names) + + /** A function to select the right [[AutoPlugin]]s from [[autoPlugins]] given the defined [[Natures]] for a [[Project]]. */ lazy val compileNatures: Natures => Seq[AutoPlugin] = Natures.compile(autoPlugins.values.toList) } + +/** The built and loaded build definition project. +* @param base The base directory for the build definition project (not the base of the project itself). +* @param pluginData Evaluated tasks/settings from the build definition for later use. +* This is necessary because the build definition project is discarded. +* @param loader The class loader for the build definition project, notably excluding classes used for .sbt files. +* @param detected Auto-detected modules in the build definition. +*/ final class LoadedPlugins(val base: File, val pluginData: PluginData, val loader: ClassLoader, val detected: DetectedPlugins) { /* @@ -71,6 +127,11 @@ final class LoadedPlugins(val base: File, val pluginData: PluginData, val loader def classpath = data(fullClasspath) } +/** The loaded, but unresolved build unit. +* @param uri The uniquely identifying URI for the build. +* @param localBase The working location of the build on the filesystem. +* For local URIs, this is the same as `uri`, but for remote URIs, this is the local copy or workspace allocated for the build. +*/ final class BuildUnit(val uri: URI, val localBase: File, val definitions: LoadedDefinitions, val plugins: LoadedPlugins) { override def toString = if(uri.getScheme == "file") localBase.toString else (uri + " (locally: " + localBase +")") diff --git a/main/src/main/scala/sbt/GroupedAutoPlugins.scala b/main/src/main/scala/sbt/GroupedAutoPlugins.scala index 2c99b2d85..d020ad31e 100644 --- a/main/src/main/scala/sbt/GroupedAutoPlugins.scala +++ b/main/src/main/scala/sbt/GroupedAutoPlugins.scala @@ -3,13 +3,13 @@ package sbt import Def.Setting import java.net.URI -final class GroupedAutoPlugins(val all: Seq[AutoPlugin], val byBuild: Map[URI, Seq[AutoPlugin]]) +private[sbt] final class GroupedAutoPlugins(val all: Seq[AutoPlugin], val byBuild: Map[URI, Seq[AutoPlugin]]) { def globalSettings: Seq[Setting[_]] = all.flatMap(_.globalSettings) def buildSettings(uri: URI): Seq[Setting[_]] = byBuild.getOrElse(uri, Nil).flatMap(_.buildSettings) } -object GroupedAutoPlugins +private[sbt] object GroupedAutoPlugins { private[sbt] def apply(units: Map[URI, LoadedBuildUnit]): GroupedAutoPlugins = { diff --git a/main/src/main/scala/sbt/Load.scala b/main/src/main/scala/sbt/Load.scala index f68ae1878..8141b0c7a 100755 --- a/main/src/main/scala/sbt/Load.scala +++ b/main/src/main/scala/sbt/Load.scala @@ -469,6 +469,7 @@ object Load val autoConfigs = autoPlugins.flatMap(_.projectConfigurations) val loadedSbtFiles = loadSbtFiles(project.auto, project.base, autoPlugins) val newSettings = (project.settings: Seq[Setting[_]]) ++ loadedSbtFiles.settings + // add the automatically selected settings, record the selected AutoPlugins, and register the automatically selected configurations val transformed = project.copy(settings = newSettings).setAutoPlugins(autoPlugins).overrideConfigs(autoConfigs : _*) (transformed, loadedSbtFiles.projects) } @@ -621,6 +622,9 @@ object Load */ def loadPlugins(dir: File, data: PluginData, loader: ClassLoader): sbt.LoadedPlugins = + new sbt.LoadedPlugins(dir, data, loader, autoDetect(data, loader)) + + private[this] def autoDetect(data: PluginData, loader: ClassLoader): DetectedPlugins = { // TODO: binary detection for builds, autoImports, autoPlugins import AutoBinaryResource._ @@ -628,8 +632,7 @@ object Load val builds = detectModules[Build](data, loader, Builds) val autoImports = detectModules[AutoImport](data, loader, AutoImports) val autoPlugins = detectModules[AutoPlugin](data, loader, AutoPlugins) - val detected = new DetectedPlugins(plugins, autoImports, autoPlugins, builds) - new sbt.LoadedPlugins(dir, data, loader, detected) + new DetectedPlugins(plugins, autoImports, autoPlugins, builds) } private[this] def detectModules[T](data: PluginData, loader: ClassLoader, resourceName: String)(implicit mf: reflect.ClassManifest[T]): DetectedModules[T] = { @@ -677,6 +680,8 @@ TODO: UNCOMMENT BEFORE COMMIT binaryPlugins(classpath, loader, AutoBinaryResource.Plugins) */ + /** Relative paths of resources that list top-level modules that are available. + * Normally, the classes for those modules will be in the same classpath entry as the resource. */ object AutoBinaryResource { final val AutoPlugins = "sbt/sbt.autoplugins" final val Plugins = "sbt/sbt.plugins" From 7a38338509bb64b7d7af45802dbda5bf51e8b2a0 Mon Sep 17 00:00:00 2001 From: Mark Harrah Date: Fri, 24 Jan 2014 14:19:18 -0500 Subject: [PATCH 035/148] logic system/auto-plugins: Provide deprecated methods for binary compatibility --- main/src/main/scala/sbt/BuildStructure.scala | 12 ++++++------ main/src/main/scala/sbt/Load.scala | 10 ---------- 2 files changed, 6 insertions(+), 16 deletions(-) diff --git a/main/src/main/scala/sbt/BuildStructure.scala b/main/src/main/scala/sbt/BuildStructure.scala index 8ddf7bb88..0ae96b923 100644 --- a/main/src/main/scala/sbt/BuildStructure.scala +++ b/main/src/main/scala/sbt/BuildStructure.scala @@ -112,16 +112,16 @@ final class DetectedPlugins(val plugins: DetectedModules[Plugin], val autoImport */ final class LoadedPlugins(val base: File, val pluginData: PluginData, val loader: ClassLoader, val detected: DetectedPlugins) { -/* - // TODO: uncomment before COMMIT for compatibility @deprecated("Use the primary constructor.", "0.13.2") def this(base: File, pluginData: PluginData, loader: ClassLoader, plugins: Seq[Plugin], pluginNames: Seq[String]) = - this(base, pluginData, loader, DetectedPlugins(DetectedModules(pluginNames zip plugins), DetectedModules(Nil), DetectedModules(Nil), DetectedModules(Nil))) + this(base, pluginData, loader, + new DetectedPlugins(new DetectedModules(pluginNames zip plugins), new DetectedModules(Nil), new DetectedModules(Nil), new DetectedModules(Nil)) + ) + @deprecated("Use detected.plugins.values.", "0.13.2") - val plugins = detected.plugins.values + val plugins: Seq[Plugin] = detected.plugins.values @deprecated("Use detected.plugins.names.", "0.13.2") - val pluginNames = detected.plugins.names -*/ + val pluginNames: Seq[String] = detected.plugins.names def fullClasspath: Seq[Attributed[File]] = pluginData.classpath def classpath = data(fullClasspath) diff --git a/main/src/main/scala/sbt/Load.scala b/main/src/main/scala/sbt/Load.scala index 8141b0c7a..2e9e1504f 100755 --- a/main/src/main/scala/sbt/Load.scala +++ b/main/src/main/scala/sbt/Load.scala @@ -609,9 +609,6 @@ object Load config.evalPluginDef(pluginDef, pluginState) } - -/* -// TODO: UNCOMMENT BEFORE COMMIT @deprecated("Use ModuleUtilities.getCheckedObjects[Build].", "0.13.2") def loadDefinitions(loader: ClassLoader, defs: Seq[String]): Seq[Build] = defs map { definition => loadDefinition(loader, definition) } @@ -619,7 +616,6 @@ object Load @deprecated("Use ModuleUtilities.getCheckedObject[Build].", "0.13.2") def loadDefinition(loader: ClassLoader, definition: String): Build = ModuleUtilities.getObject(definition, loader).asInstanceOf[Build] -*/ def loadPlugins(dir: File, data: PluginData, loader: ClassLoader): sbt.LoadedPlugins = new sbt.LoadedPlugins(dir, data, loader, autoDetect(data, loader)) @@ -673,12 +669,9 @@ object Load def getPluginNames(classpath: Seq[Attributed[File]], loader: ClassLoader): Seq[String] = discoverModuleNames(classpath, loader, AutoBinaryResource.Plugins, classOf[Plugin].getName) -/* -TODO: UNCOMMENT BEFORE COMMIT @deprecated("Explicitly specify the resource name.", "0.13.2") def binaryPlugins(classpath: Seq[File], loader: ClassLoader): Seq[String] = binaryPlugins(classpath, loader, AutoBinaryResource.Plugins) -*/ /** Relative paths of resources that list top-level modules that are available. * Normally, the classes for those modules will be in the same classpath entry as the resource. */ @@ -698,8 +691,6 @@ TODO: UNCOMMENT BEFORE COMMIT def onClasspath(classpath: Seq[File])(url: URL): Boolean = IO.urlAsFile(url) exists (classpath.contains _) -/* -// TODO: UNCOMMENT BEFORE COMMIT @deprecated("Use ModuleUtilities.getCheckedObjects[Plugin].", "0.13.2") def loadPlugins(loader: ClassLoader, pluginNames: Seq[String]): Seq[Plugin] = ModuleUtilities.getCheckedObjects[Plugin](loader, pluginNames) @@ -710,7 +701,6 @@ TODO: UNCOMMENT BEFORE COMMIT @deprecated("No longer used.", "0.13.2") def findPlugins(analysis: inc.Analysis): Seq[String] = discover(analysis, "sbt.Plugin") -*/ def findDefinitions(analysis: inc.Analysis): Seq[String] = discover(analysis, "sbt.Build") def discover(analysis: inc.Analysis, subclasses: String*): Seq[String] = From 09c76f29a328fbe661d31e4a7a9e89b6a10d3654 Mon Sep 17 00:00:00 2001 From: Mark Harrah Date: Fri, 24 Jan 2014 14:19:18 -0500 Subject: [PATCH 036/148] Discover all sbt-related modules (not just Plugin) and write names to resources for use from binaries. --- main/src/main/scala/sbt/Defaults.scala | 31 ++--- main/src/main/scala/sbt/Keys.scala | 3 +- main/src/main/scala/sbt/Load.scala | 90 ++----------- main/src/main/scala/sbt/PluginDiscovery.scala | 121 ++++++++++++++++++ 4 files changed, 148 insertions(+), 97 deletions(-) create mode 100644 main/src/main/scala/sbt/PluginDiscovery.scala diff --git a/main/src/main/scala/sbt/Defaults.scala b/main/src/main/scala/sbt/Defaults.scala index 9310c44f7..5ca5a6356 100755 --- a/main/src/main/scala/sbt/Defaults.scala +++ b/main/src/main/scala/sbt/Defaults.scala @@ -179,7 +179,7 @@ object Defaults extends BuildCommon unmanagedResources <<= collectFiles(unmanagedResourceDirectories, includeFilter in unmanagedResources, excludeFilter in unmanagedResources), watchSources in ConfigGlobal ++= unmanagedResources.value, resourceGenerators :== Nil, - resourceGenerators <+= (definedSbtPlugins, resourceManaged) map writePluginsDescriptor, + resourceGenerators <+= (discoveredSbtPlugins, resourceManaged) map PluginDiscovery.writeDescriptors, managedResources <<= generate(resourceGenerators), resources <<= Classpaths.concat(managedResources, unmanagedResources) ) @@ -233,6 +233,7 @@ object Defaults extends BuildCommon consoleQuick <<= consoleQuickTask, discoveredMainClasses <<= compile map discoverMainClasses storeAs discoveredMainClasses triggeredBy compile, definedSbtPlugins <<= discoverPlugins, + discoveredSbtPlugins <<= discoverSbtPluginNames, inTask(run)(runnerTask :: Nil).head, selectMainClass := mainClass.value orElse selectRunMain(discoveredMainClasses.value), mainClass in run := (selectMainClass in run).value, @@ -764,27 +765,21 @@ object Defaults extends BuildCommon def sbtPluginExtra(m: ModuleID, sbtV: String, scalaV: String): ModuleID = m.extra(CustomPomParser.SbtVersionKey -> sbtV, CustomPomParser.ScalaVersionKey -> scalaV).copy(crossVersion = CrossVersion.Disabled) + + @deprecated("Use PluginDiscovery.writeDescriptor.", "0.13.2") def writePluginsDescriptor(plugins: Set[String], dir: File): Seq[File] = - { - val descriptor: File = dir / "sbt" / "sbt.plugins" - if(plugins.isEmpty) - { - IO.delete(descriptor) - Nil - } - else - { - IO.writeLines(descriptor, plugins.toSeq.sorted) - descriptor :: Nil - } + PluginDiscovery.writeDescriptor(plugins.toSeq, dir, PluginDiscovery.Paths.Plugins).toList + + def discoverSbtPluginNames: Initialize[Task[PluginDiscovery.DiscoveredNames]] = Def.task { + if(sbtPlugin.value) PluginDiscovery.discoverSourceAll(compile.value) else PluginDiscovery.emptyDiscoveredNames } + + @deprecated("Use discoverSbtPluginNames.", "0.13.2") def discoverPlugins: Initialize[Task[Set[String]]] = (compile, sbtPlugin, streams) map { (analysis, isPlugin, s) => if(isPlugin) discoverSbtPlugins(analysis, s.log) else Set.empty } + + @deprecated("Use PluginDiscovery.sourceModuleNames[Plugin].", "0.13.2") def discoverSbtPlugins(analysis: inc.Analysis, log: Logger): Set[String] = - { - val pluginClass = classOf[Plugin].getName - val discovery = Discovery(Set(pluginClass), Set.empty)( Tests allDefs analysis ) - discovery collect { case (df, disc) if (disc.baseClasses contains pluginClass) && disc.isModule => df.name } toSet; - } + PluginDiscovery.sourceModuleNames(analysis, classOf[Plugin].getName).toSet def copyResourcesTask = (classDirectory, resources, resourceDirectories, streams) map { (target, resrcs, dirs, s) => diff --git a/main/src/main/scala/sbt/Keys.scala b/main/src/main/scala/sbt/Keys.scala index 17f64e0b3..ceb7813ed 100644 --- a/main/src/main/scala/sbt/Keys.scala +++ b/main/src/main/scala/sbt/Keys.scala @@ -131,6 +131,7 @@ object Keys val crossVersion = SettingKey[CrossVersion]("cross-version", "Configures handling of the Scala version when cross-building.", CSetting) val classpathOptions = SettingKey[ClasspathOptions]("classpath-options", "Configures handling of Scala classpaths.", DSetting) val definedSbtPlugins = TaskKey[Set[String]]("defined-sbt-plugins", "The set of names of Plugin implementations defined by this project.", CTask) + val discoveredSbtPlugins = TaskKey[PluginDiscovery.DiscoveredNames]("discovered-sbt-plugins", "The names of sbt plugin-related modules (modules that extend Build, Plugin, AutoImport, AutoPlugin) defined by this project.", CTask) val sbtPlugin = SettingKey[Boolean]("sbt-plugin", "If true, enables adding sbt as a dependency and auto-generation of the plugin descriptor file.", BMinusSetting) val printWarnings = TaskKey[Unit]("print-warnings", "Shows warnings from compilation, including ones that weren't printed initially.", BPlusTask) val fileInputOptions = SettingKey[Seq[String]]("file-input-options", "Options that take file input, which may invalidate the cache.", CSetting) @@ -348,7 +349,7 @@ object Keys // Experimental in sbt 0.13.2 to enable grabing semantic compile failures. private[sbt] val compilerReporter = TaskKey[Option[xsbti.Reporter]]("compilerReporter", "Experimental hook to listen (or send) compilation failure messages.", DTask) - + val triggeredBy = Def.triggeredBy val runBefore = Def.runBefore diff --git a/main/src/main/scala/sbt/Load.scala b/main/src/main/scala/sbt/Load.scala index 2e9e1504f..b822a96cb 100755 --- a/main/src/main/scala/sbt/Load.scala +++ b/main/src/main/scala/sbt/Load.scala @@ -6,7 +6,6 @@ package sbt import java.io.File import java.net.{URI,URL} import compiler.{Eval,EvalImports} - import xsbt.api.{Discovered,Discovery} import xsbti.compile.CompileOrder import classpath.ClasspathUtilities import scala.annotation.tailrec @@ -18,7 +17,6 @@ package sbt import Keys.{appConfiguration, baseDirectory, configuration, fullResolvers, fullClasspath, pluginData, streams, thisProject, thisProjectRef, update} import Keys.{exportedProducts, loadedBuild, onLoadMessage, resolvedScoped, sbtPlugin, scalacOptions, taskDefinitionKey} import tools.nsc.reporters.ConsoleReporter - import Build.analyzed import Attributed.data import Scope.{GlobalScope, ThisScope} import Types.const @@ -618,82 +616,23 @@ object Load ModuleUtilities.getObject(definition, loader).asInstanceOf[Build] def loadPlugins(dir: File, data: PluginData, loader: ClassLoader): sbt.LoadedPlugins = - new sbt.LoadedPlugins(dir, data, loader, autoDetect(data, loader)) + new sbt.LoadedPlugins(dir, data, loader, PluginDiscovery.discoverAll(data, loader)) - private[this] def autoDetect(data: PluginData, loader: ClassLoader): DetectedPlugins = - { - // TODO: binary detection for builds, autoImports, autoPlugins - import AutoBinaryResource._ - val plugins = detectModules[Plugin](data, loader, Plugins) - val builds = detectModules[Build](data, loader, Builds) - val autoImports = detectModules[AutoImport](data, loader, AutoImports) - val autoPlugins = detectModules[AutoPlugin](data, loader, AutoPlugins) - new DetectedPlugins(plugins, autoImports, autoPlugins, builds) - } - private[this] def detectModules[T](data: PluginData, loader: ClassLoader, resourceName: String)(implicit mf: reflect.ClassManifest[T]): DetectedModules[T] = - { - val classpath = data.classpath - val namesAndValues = if(classpath.isEmpty) Nil else { - val names = discoverModuleNames(classpath, loader, resourceName, mf.erasure.getName) - loadModules[T](data, names, loader) - } - new DetectedModules(namesAndValues) - } - - private[this] def loadModules[T: ClassManifest](data: PluginData, names: Seq[String], loader: ClassLoader): Seq[(String,T)] = - try ModuleUtilities.getCheckedObjects[T](names, loader) - catch { - case e: ExceptionInInitializerError => - val cause = e.getCause - if(cause eq null) throw e else throw cause - case e: LinkageError => incompatiblePlugins(data, e) - } - - private[this] def incompatiblePlugins(data: PluginData, t: LinkageError): Nothing = - { - val evicted = data.report.toList.flatMap(_.configurations.flatMap(_.evicted)) - val evictedModules = evicted map { id => (id.organization, id.name) } distinct ; - val evictedStrings = evictedModules map { case (o,n) => o + ":" + n } - val msgBase = "Binary incompatibility in plugins detected." - val msgExtra = if(evictedStrings.isEmpty) "" else "\nNote that conflicts were resolved for some dependencies:\n\t" + evictedStrings.mkString("\n\t") - throw new IncompatiblePluginsException(msgBase + msgExtra, t) - } - - def discoverModuleNames(classpath: Seq[Attributed[File]], loader: ClassLoader, resourceName: String, moduleTypes: String*): Seq[String] = - ( - binaryPlugins(data(classpath), loader, resourceName) ++ - (analyzed(classpath) flatMap (a => discover(a, moduleTypes : _*))) - ).distinct - - @deprecated("Replaced by the more general discoverModuleNames and will be made private.", "0.13.2") + @deprecated("Replaced by the more general PluginDiscovery.binarySourceModuleNames and will be made private.", "0.13.2") def getPluginNames(classpath: Seq[Attributed[File]], loader: ClassLoader): Seq[String] = - discoverModuleNames(classpath, loader, AutoBinaryResource.Plugins, classOf[Plugin].getName) + PluginDiscovery.binarySourceModuleNames(classpath, loader, PluginDiscovery.Paths.Plugins, classOf[Plugin].getName) - @deprecated("Explicitly specify the resource name.", "0.13.2") + @deprecated("Use PluginDiscovery.binaryModuleNames.", "0.13.2") def binaryPlugins(classpath: Seq[File], loader: ClassLoader): Seq[String] = - binaryPlugins(classpath, loader, AutoBinaryResource.Plugins) + PluginDiscovery.binaryModuleNames(classpath, loader, PluginDiscovery.Paths.Plugins) - /** Relative paths of resources that list top-level modules that are available. - * Normally, the classes for those modules will be in the same classpath entry as the resource. */ - object AutoBinaryResource { - final val AutoPlugins = "sbt/sbt.autoplugins" - final val Plugins = "sbt/sbt.plugins" - final val Builds = "sbt/sbt.builds" - final val AutoImports = "sbt/sbt.autoimports" - } - def binaryPlugins(classpath: Seq[File], loader: ClassLoader, resourceName: String): Seq[String] = - { - import collection.JavaConversions._ - loader.getResources(resourceName).toSeq.filter(onClasspath(classpath)) flatMap { u => - IO.readLinesURL(u).map( _.trim).filter(!_.isEmpty) - } - } + @deprecated("Use PluginDiscovery.onClasspath", "0.13.2") def onClasspath(classpath: Seq[File])(url: URL): Boolean = - IO.urlAsFile(url) exists (classpath.contains _) + PluginDiscovery.onClasspath(classpath)(url) @deprecated("Use ModuleUtilities.getCheckedObjects[Plugin].", "0.13.2") def loadPlugins(loader: ClassLoader, pluginNames: Seq[String]): Seq[Plugin] = - ModuleUtilities.getCheckedObjects[Plugin](loader, pluginNames) + ModuleUtilities.getCheckedObjects[Plugin](pluginNames, loader).map(_._2) @deprecated("Use ModuleUtilities.getCheckedObject[Plugin].", "0.13.2") def loadPlugin(pluginName: String, loader: ClassLoader): Plugin = @@ -702,17 +641,12 @@ object Load @deprecated("No longer used.", "0.13.2") def findPlugins(analysis: inc.Analysis): Seq[String] = discover(analysis, "sbt.Plugin") + @deprecated("No longer used.", "0.13.2") def findDefinitions(analysis: inc.Analysis): Seq[String] = discover(analysis, "sbt.Build") + + @deprecated("Use PluginDiscovery.sourceModuleNames", "0.13.2") def discover(analysis: inc.Analysis, subclasses: String*): Seq[String] = - { - val subclassSet = subclasses.toSet - val ds = Discovery(subclassSet, Set.empty)(Tests.allDefs(analysis)) - ds.flatMap { - case (definition, Discovered(subs,_,_,true)) => - if((subs & subclassSet).isEmpty) Nil else definition.name :: Nil - case _ => Nil - } - } + PluginDiscovery.sourceModuleNames(analysis, subclasses : _*) def initialSession(structure: sbt.BuildStructure, rootEval: () => Eval, s: State): SessionSettings = { val session = s get Keys.sessionSettings diff --git a/main/src/main/scala/sbt/PluginDiscovery.scala b/main/src/main/scala/sbt/PluginDiscovery.scala new file mode 100644 index 000000000..351debfb8 --- /dev/null +++ b/main/src/main/scala/sbt/PluginDiscovery.scala @@ -0,0 +1,121 @@ +package sbt + + import java.io.File + import java.net.URL + import Attributed.data + import Build.analyzed + import xsbt.api.{Discovered,Discovery} + +object PluginDiscovery +{ + /** Relative paths of resources that list top-level modules that are available. + * Normally, the classes for those modules will be in the same classpath entry as the resource. */ + object Paths + { + final val AutoPlugins = "sbt/sbt.autoplugins" + final val Plugins = "sbt/sbt.plugins" + final val Builds = "sbt/sbt.builds" + final val AutoImports = "sbt/sbt.autoimports" + } + final class DiscoveredNames(val plugins: Seq[String], val autoImports: Seq[String], val autoPlugins: Seq[String], val builds: Seq[String]) + def emptyDiscoveredNames: DiscoveredNames = new DiscoveredNames(Nil, Nil, Nil, Nil) + + def discoverAll(data: PluginData, loader: ClassLoader): DetectedPlugins = + { + def discover[T](resource: String)(implicit mf: reflect.ClassManifest[T]) = + binarySourceModules[T](data, loader, resource) + import Paths._ + new DetectedPlugins(discover[Plugin](Plugins), discover[AutoImport](AutoImports), discover[AutoPlugin](AutoPlugins), discover[Build](Builds)) + } + def discoverSourceAll(analysis: inc.Analysis): DiscoveredNames = + { + def discover[T](implicit mf: reflect.ClassManifest[T]): Seq[String] = + sourceModuleNames(analysis, mf.erasure.getName) + new DiscoveredNames(discover[Plugin], discover[AutoImport], discover[AutoPlugin], discover[Build]) + } + + // TODO: for 0.14.0, consider consolidating into a single file, which would make the classpath search 4x faster + def writeDescriptors(names: DiscoveredNames, dir: File): Seq[File] = + { + import Paths._ + val files = + writeDescriptor(names.plugins, dir, Plugins) :: + writeDescriptor(names.autoPlugins, dir, AutoPlugins) :: + writeDescriptor(names.builds, dir, Builds) :: + writeDescriptor(names.autoImports, dir, AutoImports) :: + Nil + files.flatMap(_.toList) + } + + def writeDescriptor(names: Seq[String], dir: File, path: String): Option[File] = + { + val descriptor: File = new File(dir, path) + if(names.isEmpty) + { + IO.delete(descriptor) + None + } + else + { + IO.writeLines(descriptor, names.distinct.sorted) + Some(descriptor) + } + } + + + def binarySourceModuleNames(classpath: Seq[Attributed[File]], loader: ClassLoader, resourceName: String, subclasses: String*): Seq[String] = + ( + binaryModuleNames(data(classpath), loader, resourceName) ++ + (analyzed(classpath) flatMap ( a => sourceModuleNames(a, subclasses : _*) )) + ).distinct + + def sourceModuleNames(analysis: inc.Analysis, subclasses: String*): Seq[String] = + { + val subclassSet = subclasses.toSet + val ds = Discovery(subclassSet, Set.empty)(Tests.allDefs(analysis)) + ds.flatMap { + case (definition, Discovered(subs,_,_,true)) => + if((subs & subclassSet).isEmpty) Nil else definition.name :: Nil + case _ => Nil + } + } + + def binaryModuleNames(classpath: Seq[File], loader: ClassLoader, resourceName: String): Seq[String] = + { + import collection.JavaConversions._ + loader.getResources(resourceName).toSeq.filter(onClasspath(classpath)) flatMap { u => + IO.readLinesURL(u).map( _.trim).filter(!_.isEmpty) + } + } + def onClasspath(classpath: Seq[File])(url: URL): Boolean = + IO.urlAsFile(url) exists (classpath.contains _) + + private[sbt] def binarySourceModules[T](data: PluginData, loader: ClassLoader, resourceName: String)(implicit mf: reflect.ClassManifest[T]): DetectedModules[T] = + { + val classpath = data.classpath + val namesAndValues = if(classpath.isEmpty) Nil else { + val names = binarySourceModuleNames(classpath, loader, resourceName, mf.erasure.getName) + loadModules[T](data, names, loader) + } + new DetectedModules(namesAndValues) + } + + private[this] def loadModules[T: ClassManifest](data: PluginData, names: Seq[String], loader: ClassLoader): Seq[(String,T)] = + try ModuleUtilities.getCheckedObjects[T](names, loader) + catch { + case e: ExceptionInInitializerError => + val cause = e.getCause + if(cause eq null) throw e else throw cause + case e: LinkageError => incompatiblePlugins(data, e) + } + + private[this] def incompatiblePlugins(data: PluginData, t: LinkageError): Nothing = + { + val evicted = data.report.toList.flatMap(_.configurations.flatMap(_.evicted)) + val evictedModules = evicted map { id => (id.organization, id.name) } distinct ; + val evictedStrings = evictedModules map { case (o,n) => o + ":" + n } + val msgBase = "Binary incompatibility in plugins detected." + val msgExtra = if(evictedStrings.isEmpty) "" else "\nNote that conflicts were resolved for some dependencies:\n\t" + evictedStrings.mkString("\n\t") + throw new IncompatiblePluginsException(msgBase + msgExtra, t) + } +} \ No newline at end of file From 359170b0f439874bb84df6ba5b6f5057d4de2860 Mon Sep 17 00:00:00 2001 From: Mark Harrah Date: Fri, 24 Jan 2014 14:19:18 -0500 Subject: [PATCH 037/148] Test for automatic handling of AutoImport, AutoPlugin, and Build from binary plugins. --- .../binary-plugin/changes/define/A.scala | 23 +++++++++++++++++++ .../binary-plugin/changes/define/build.sbt | 3 +++ .../binary-plugin/changes/use/plugins.sbt | 1 + .../sbt-test/project/binary-plugin/common.sbt | 7 ++++++ sbt/src/sbt-test/project/binary-plugin/test | 10 ++++++++ 5 files changed, 44 insertions(+) create mode 100644 sbt/src/sbt-test/project/binary-plugin/changes/define/A.scala create mode 100644 sbt/src/sbt-test/project/binary-plugin/changes/define/build.sbt create mode 100644 sbt/src/sbt-test/project/binary-plugin/changes/use/plugins.sbt create mode 100644 sbt/src/sbt-test/project/binary-plugin/common.sbt create mode 100644 sbt/src/sbt-test/project/binary-plugin/test diff --git a/sbt/src/sbt-test/project/binary-plugin/changes/define/A.scala b/sbt/src/sbt-test/project/binary-plugin/changes/define/A.scala new file mode 100644 index 000000000..5e4a3930e --- /dev/null +++ b/sbt/src/sbt-test/project/binary-plugin/changes/define/A.scala @@ -0,0 +1,23 @@ +import sbt._ +import Keys._ + + +object C extends AutoImport { + lazy val aN = Nature("A") + lazy val bN = Nature("B") + lazy val check = taskKey[Unit]("Checks that the AutoPlugin and Build are automatically added.") +} + + import C._ + +object A extends AutoPlugin { + override def provides = aN + override def select = bN + override def projectSettings = Seq( + check := {} + ) +} + +object B extends Build { + lazy val extra = project.addNatures(bN) +} diff --git a/sbt/src/sbt-test/project/binary-plugin/changes/define/build.sbt b/sbt/src/sbt-test/project/binary-plugin/changes/define/build.sbt new file mode 100644 index 000000000..f8a8d32b8 --- /dev/null +++ b/sbt/src/sbt-test/project/binary-plugin/changes/define/build.sbt @@ -0,0 +1,3 @@ +sbtPlugin := true + +name := "demo-plugin" diff --git a/sbt/src/sbt-test/project/binary-plugin/changes/use/plugins.sbt b/sbt/src/sbt-test/project/binary-plugin/changes/use/plugins.sbt new file mode 100644 index 000000000..b20bc97c3 --- /dev/null +++ b/sbt/src/sbt-test/project/binary-plugin/changes/use/plugins.sbt @@ -0,0 +1 @@ +addSbtPlugin("org.example" % "demo-plugin" % "3.4") diff --git a/sbt/src/sbt-test/project/binary-plugin/common.sbt b/sbt/src/sbt-test/project/binary-plugin/common.sbt new file mode 100644 index 000000000..0cf61c76c --- /dev/null +++ b/sbt/src/sbt-test/project/binary-plugin/common.sbt @@ -0,0 +1,7 @@ +organization in ThisBuild := "org.example" + +version in ThisBuild := "3.4" + +lazy val define = project + +lazy val use = project diff --git a/sbt/src/sbt-test/project/binary-plugin/test b/sbt/src/sbt-test/project/binary-plugin/test new file mode 100644 index 000000000..ceb4e6a76 --- /dev/null +++ b/sbt/src/sbt-test/project/binary-plugin/test @@ -0,0 +1,10 @@ +$ copy-file changes/define/build.sbt build.sbt +$ copy-file changes/define/A.scala A.scala + +# reload implied +> publishLocal + +$ delete build.sbt A.scala +$ copy-file changes/use/plugins.sbt project/plugins.sbt +> reload +> extra/check From 5add7306c26e72f9a34bbc9d305f60bfefc75134 Mon Sep 17 00:00:00 2001 From: Mark Harrah Date: Fri, 24 Jan 2014 14:19:18 -0500 Subject: [PATCH 038/148] Acyclic negation checking in logic system that backs auto-plugins. --- util/collection/src/main/scala/sbt/Dag.scala | 44 +++++++++++++++++++ .../src/main/scala/sbt/logic/Logic.scala | 37 +++++++++++++--- 2 files changed, 74 insertions(+), 7 deletions(-) diff --git a/util/collection/src/main/scala/sbt/Dag.scala b/util/collection/src/main/scala/sbt/Dag.scala index ef8f9cec1..58fb397ed 100644 --- a/util/collection/src/main/scala/sbt/Dag.scala +++ b/util/collection/src/main/scala/sbt/Dag.scala @@ -71,4 +71,48 @@ object Dag else new Cyclic(value, a :: all, false) } + + private[sbt] trait System[A] { + type B + def dependencies(t: A): List[B] + def isNegated(b: B): Boolean + def toA(b: B): A + } + private[sbt] def findNegativeCycle[T](system: System[T])(nodes: List[system.B]): List[system.B] = + { + import scala.annotation.tailrec + import system._ + val finished = new mutable.HashSet[T] + val visited = new mutable.HashSet[T] + + def visit(nodes: List[B], stack: List[B]): List[B] = nodes match { + case Nil => Nil + case node :: tail => + val atom = toA(node) + if(!visited(atom)) + { + visited += atom + visit(dependencies(atom), node :: stack) match { + case Nil => + finished += atom + visit(tail, stack) + case cycle => cycle + } + } + else if(!finished(atom)) + { + // cycle. If negation is involved, it is an error. + val between = stack.takeWhile(f => toA(f) != atom) + if(between exists isNegated) + between + else + visit(tail, stack) + } + else + visit(tail, stack) + } + + visit(nodes, Nil) + } + } diff --git a/util/logic/src/main/scala/sbt/logic/Logic.scala b/util/logic/src/main/scala/sbt/logic/Logic.scala index 8d02b2ab9..bb6731949 100644 --- a/util/logic/src/main/scala/sbt/logic/Logic.scala +++ b/util/logic/src/main/scala/sbt/logic/Logic.scala @@ -117,12 +117,31 @@ object Logic } def checkAcyclic(clauses: Clauses) { - // TODO + val deps = dependencyMap(clauses) + val cycle = Dag.findNegativeCycle(system(deps))(deps.keys.toList) + if(cycle.nonEmpty) + throw new CyclicNegation(cycle) } + private[this] def system(deps: Map[Atom, Set[Literal]]) = new Dag.System[Atom] { + type B = Literal + def dependencies(a: Atom) = deps.getOrElse(a, Set.empty).toList + def isNegated(b: Literal) = b match { + case Negated(_) => true + case Atom(_) => false + } + def toA(b: Literal) = b.atom + } + + private[this] def dependencyMap(clauses: Clauses): Map[Atom, Set[Literal]] = + (Map.empty[Atom, Set[Literal]] /: clauses.clauses) { + case (m, Clause(formula, heads)) => + val deps = literals(formula) + (m /: heads) { (n, head) => n.updated(head, n.getOrElse(head, Set.empty) ++ deps) } + } final class InitialContradictions(val literals: Set[Atom]) extends RuntimeException("Initial facts cannot be both true and false:\n\t" + literals.mkString("\n\t")) final class InitialOverlap(val literals: Set[Atom]) extends RuntimeException("Initial positive facts cannot be implied by any clauses:\n\t" + literals.mkString("\n\t")) - final class CyclicNegation(val cycle: List[Atom]) extends RuntimeException("Negation may not be involved in a cycle:\n\t" + cycle.mkString("\n\t")) + final class CyclicNegation(val cycle: List[Literal]) extends RuntimeException("Negation may not be involved in a cycle:\n\t" + cycle.mkString("\n\t")) /** Tracks proven atoms in the reverse order they were proved. */ final class Matched private(val provenSet: Set[Atom], reverseOrdered: List[Atom]) { @@ -220,11 +239,15 @@ object Logic } /** Computes the `(positive, negative)` literals in `formula`. */ - private[this] def directDeps(formula: Formula): (Seq[Atom], Seq[Atom]) = formula match { - case And(lits) => separate(lits.toSeq) - case Negated(a) => (Nil, a :: Nil) - case a: Atom => (a :: Nil, Nil) - case True => (Nil, Nil) + private[this] def directDeps(formula: Formula): (Seq[Atom], Seq[Atom]) = + Util.separate(literals(formula).toSeq) { + case Negated(a) => Right(a) + case a: Atom => Left(a) + } + private[this] def literals(formula: Formula): Set[Literal] = formula match { + case And(lits) => lits + case l: Literal => Set(l) + case True => Set.empty } /** Computes the atoms in the heads and bodies of the clauses in `clause`. */ From 1afd1931c4e3998b9bb6986438b19582703811d8 Mon Sep 17 00:00:00 2001 From: Mark Harrah Date: Fri, 24 Jan 2014 14:19:18 -0500 Subject: [PATCH 039/148] Translate errors from logic system to Natures system. --- main/src/main/scala/sbt/AutoPlugin.scala | 30 ++++++++++++---- main/src/main/scala/sbt/Load.scala | 9 +++-- util/collection/src/main/scala/sbt/Dag.scala | 3 +- .../src/main/scala/sbt/logic/Logic.scala | 36 ++++++++++--------- 4 files changed, 51 insertions(+), 27 deletions(-) diff --git a/main/src/main/scala/sbt/AutoPlugin.scala b/main/src/main/scala/sbt/AutoPlugin.scala index d571242bc..3a799d601 100644 --- a/main/src/main/scala/sbt/AutoPlugin.scala +++ b/main/src/main/scala/sbt/AutoPlugin.scala @@ -1,6 +1,7 @@ package sbt - import logic.{Atom, Clause, Clauses, Formula, Literal, Logic} + import logic.{Atom, Clause, Clauses, Formula, Literal, Logic, Negated} + import Logic.{CyclicNegation, InitialContradictions, InitialOverlap, LogicException} import Def.Setting import Natures._ @@ -75,6 +76,11 @@ abstract class AutoPlugin // TODO?: def commands: Seq[Command] } +final class AutoPluginException(val origin: LogicException, prefix: String) extends RuntimeException(prefix + Natures.translateMessage(origin)) { + def withPrefix(p: String) = new AutoPluginException(origin, p) +} + + /** An expression that matches `Nature`s. */ sealed trait Natures { def && (o: Basic): Natures @@ -101,14 +107,24 @@ object Natures { val byAtom = defined.map(x => (Atom(x.provides.label), x)).toMap val clauses = Clauses( defined.map(d => asClause(d)) ) - requestedNatures => { - val results = Logic.reduce(clauses, flatten(requestedNatures).toSet) - // results includes the originally requested (positive) atoms, - // which won't have a corresponding AutoPlugin to map back to - results.ordered.flatMap(a => byAtom.get(a).toList) - } + requestedNatures => + Logic.reduce(clauses, flatten(requestedNatures).toSet) match { + case Left(problem) => throw new AutoPluginException(problem, "") + case Right(results) => + // results includes the originally requested (positive) atoms, + // which won't have a corresponding AutoPlugin to map back to + results.ordered.flatMap(a => byAtom.get(a).toList) + } } + private[sbt] def translateMessage(e: LogicException) = e match { + case ic: InitialContradictions => s"Contradiction in selected natures. These natures were both included and excluded: ${literalsString(ic.literals.toSeq)}" + case io: InitialOverlap => s"Cannot directly enable plugins. Plugins are enabled when their required natures are satisifed. The directly selected plugins were: ${literalsString(io.literals.toSeq)}" + case cn: CyclicNegation => s"Cycles in plugin requirements cannot involve excludes. The problematic cycle is: ${literalsString(cn.cycle)}" + } + private[this] def literalsString(lits: Seq[Literal]): String = + lits map { case Atom(l) => l; case Negated(Atom(l)) => l } mkString(", ") + /** [[Natures]] instance that doesn't require any [[Nature]]s. */ def empty: Natures = Empty private[sbt] final object Empty extends Natures { diff --git a/main/src/main/scala/sbt/Load.scala b/main/src/main/scala/sbt/Load.scala index b822a96cb..2a00e7329 100755 --- a/main/src/main/scala/sbt/Load.scala +++ b/main/src/main/scala/sbt/Load.scala @@ -6,11 +6,10 @@ package sbt import java.io.File import java.net.{URI,URL} import compiler.{Eval,EvalImports} - import xsbti.compile.CompileOrder import classpath.ClasspathUtilities import scala.annotation.tailrec import collection.mutable - import Compiler.{Compilers,Inputs} + import Compiler.Compilers import inc.{FileValueCache, Locate} import Project.{inScope,makeSettings} import Def.{isDummy, ScopedKey, ScopeLocal, Setting} @@ -463,7 +462,9 @@ object Load def loadSbtFiles(auto: AddSettings, base: File, autoPlugins: Seq[AutoPlugin]): LoadedSbtFile = loadSettings(auto, base, plugins, eval, injectSettings, memoSettings, autoPlugins) def loadForProjects = newProjects map { project => - val autoPlugins = plugins.detected.compileNatures(project.natures) + val autoPlugins = + try plugins.detected.compileNatures(project.natures) + catch { case e: AutoPluginException => throw translateAutoPluginException(e, project) } val autoConfigs = autoPlugins.flatMap(_.projectConfigurations) val loadedSbtFiles = loadSbtFiles(project.auto, project.base, autoPlugins) val newSettings = (project.settings: Seq[Setting[_]]) ++ loadedSbtFiles.settings @@ -485,6 +486,8 @@ object Load else loadTransitive(nextProjects, buildBase, plugins, eval, injectSettings, loadedProjects, memoSettings) } + private[this] def translateAutoPluginException(e: AutoPluginException, project: Project): AutoPluginException = + e.withPrefix(s"Error determining plugins for project '${project.id}' in ${project.base}:\n") private[this] def loadSettings(auto: AddSettings, projectBase: File, loadedPlugins: sbt.LoadedPlugins, eval: ()=>Eval, injectSettings: InjectSettings, memoSettings: mutable.Map[File, LoadedSbtFile], autoPlugins: Seq[AutoPlugin]): LoadedSbtFile = { diff --git a/util/collection/src/main/scala/sbt/Dag.scala b/util/collection/src/main/scala/sbt/Dag.scala index 58fb397ed..0ce07baf2 100644 --- a/util/collection/src/main/scala/sbt/Dag.scala +++ b/util/collection/src/main/scala/sbt/Dag.scala @@ -88,6 +88,7 @@ object Dag def visit(nodes: List[B], stack: List[B]): List[B] = nodes match { case Nil => Nil case node :: tail => + def indent = "\t" * stack.size val atom = toA(node) if(!visited(atom)) { @@ -102,7 +103,7 @@ object Dag else if(!finished(atom)) { // cycle. If negation is involved, it is an error. - val between = stack.takeWhile(f => toA(f) != atom) + val between = node :: stack.takeWhile(f => toA(f) != atom) if(between exists isNegated) between else diff --git a/util/logic/src/main/scala/sbt/logic/Logic.scala b/util/logic/src/main/scala/sbt/logic/Logic.scala index bb6731949..2181fbb7e 100644 --- a/util/logic/src/main/scala/sbt/logic/Logic.scala +++ b/util/logic/src/main/scala/sbt/logic/Logic.scala @@ -82,22 +82,26 @@ object Formula { object Logic { - def reduceAll(clauses: List[Clause], initialFacts: Set[Literal]): Matched = reduce(Clauses(clauses), initialFacts) + def reduceAll(clauses: List[Clause], initialFacts: Set[Literal]): Either[LogicException, Matched] = + reduce(Clauses(clauses), initialFacts) /** Computes the variables in the unique stable model for the program represented by `clauses` and `initialFacts`. * `clause` may not have any negative feedback (that is, negation is acyclic) * and `initialFacts` cannot be in the head of any clauses in `clause`. * These restrictions ensure that the logic program has a unique minimal model. */ - def reduce(clauses: Clauses, initialFacts: Set[Literal]): Matched = + def reduce(clauses: Clauses, initialFacts: Set[Literal]): Either[LogicException, Matched] = { val (posSeq, negSeq) = separate(initialFacts.toSeq) val (pos, neg) = (posSeq.toSet, negSeq.toSet) - checkContradictions(pos, neg) - checkOverlap(clauses, pos) - checkAcyclic(clauses) + val problem = + checkContradictions(pos, neg) orElse + checkOverlap(clauses, pos) orElse + checkAcyclic(clauses) - reduce0(clauses, initialFacts, Matched.empty) + problem.toLeft( + reduce0(clauses, initialFacts, Matched.empty) + ) } @@ -105,22 +109,21 @@ object Logic * This avoids the situation where an atom is proved but no clauses prove it. * This isn't necessarily a problem, but the main sbt use cases expects * a proven atom to have at least one clause satisfied. */ - def checkOverlap(clauses: Clauses, initialFacts: Set[Atom]) { + private[this] def checkOverlap(clauses: Clauses, initialFacts: Set[Atom]): Option[InitialOverlap] = { val as = atoms(clauses) val initialOverlap = initialFacts.filter(as.inHead) - if(initialOverlap.nonEmpty) throw new InitialOverlap(initialOverlap) + if(initialOverlap.nonEmpty) Some(new InitialOverlap(initialOverlap)) else None } - private[this] def checkContradictions(pos: Set[Atom], neg: Set[Atom]) { + private[this] def checkContradictions(pos: Set[Atom], neg: Set[Atom]): Option[InitialContradictions] = { val contradictions = pos intersect neg - if(contradictions.nonEmpty) throw new InitialContradictions(contradictions) + if(contradictions.nonEmpty) Some(new InitialContradictions(contradictions)) else None } - def checkAcyclic(clauses: Clauses) { + private[this] def checkAcyclic(clauses: Clauses): Option[CyclicNegation] = { val deps = dependencyMap(clauses) val cycle = Dag.findNegativeCycle(system(deps))(deps.keys.toList) - if(cycle.nonEmpty) - throw new CyclicNegation(cycle) + if(cycle.nonEmpty) Some(new CyclicNegation(cycle)) else None } private[this] def system(deps: Map[Atom, Set[Literal]]) = new Dag.System[Atom] { type B = Literal @@ -139,9 +142,10 @@ object Logic (m /: heads) { (n, head) => n.updated(head, n.getOrElse(head, Set.empty) ++ deps) } } - final class InitialContradictions(val literals: Set[Atom]) extends RuntimeException("Initial facts cannot be both true and false:\n\t" + literals.mkString("\n\t")) - final class InitialOverlap(val literals: Set[Atom]) extends RuntimeException("Initial positive facts cannot be implied by any clauses:\n\t" + literals.mkString("\n\t")) - final class CyclicNegation(val cycle: List[Literal]) extends RuntimeException("Negation may not be involved in a cycle:\n\t" + cycle.mkString("\n\t")) + sealed abstract class LogicException(override val toString: String) + final class InitialContradictions(val literals: Set[Atom]) extends LogicException("Initial facts cannot be both true and false:\n\t" + literals.mkString("\n\t")) + final class InitialOverlap(val literals: Set[Atom]) extends LogicException("Initial positive facts cannot be implied by any clauses:\n\t" + literals.mkString("\n\t")) + final class CyclicNegation(val cycle: List[Literal]) extends LogicException("Negation may not be involved in a cycle:\n\t" + cycle.mkString("\n\t")) /** Tracks proven atoms in the reverse order they were proved. */ final class Matched private(val provenSet: Set[Atom], reverseOrdered: List[Atom]) { From 9264099594b853a12dc4a6a6b7292fc40f260057 Mon Sep 17 00:00:00 2001 From: Mark Harrah Date: Fri, 24 Jan 2014 14:19:18 -0500 Subject: [PATCH 040/148] API docs, better terminology for negative cycle checking in logic system. --- main/src/main/scala/sbt/AutoPlugin.scala | 6 +- main/src/main/scala/sbt/PluginDiscovery.scala | 16 +++++- util/collection/src/main/scala/sbt/Dag.scala | 57 ++++++++++++------- .../src/main/scala/sbt/logic/Logic.scala | 11 ++-- 4 files changed, 61 insertions(+), 29 deletions(-) diff --git a/main/src/main/scala/sbt/AutoPlugin.scala b/main/src/main/scala/sbt/AutoPlugin.scala index 3a799d601..7521cd8c9 100644 --- a/main/src/main/scala/sbt/AutoPlugin.scala +++ b/main/src/main/scala/sbt/AutoPlugin.scala @@ -76,7 +76,11 @@ abstract class AutoPlugin // TODO?: def commands: Seq[Command] } -final class AutoPluginException(val origin: LogicException, prefix: String) extends RuntimeException(prefix + Natures.translateMessage(origin)) { +/** An error that occurs when auto-plugins aren't configured properly. +* It translates the error from the underlying logic system to be targeted at end users. */ +final class AutoPluginException(val origin: LogicException, prefix: String) extends RuntimeException(prefix + Natures.translateMessage(origin)) +{ + /** Prepends `p` to the error message derived from `origin`. */ def withPrefix(p: String) = new AutoPluginException(origin, p) } diff --git a/main/src/main/scala/sbt/PluginDiscovery.scala b/main/src/main/scala/sbt/PluginDiscovery.scala index 351debfb8..0d49e6fd7 100644 --- a/main/src/main/scala/sbt/PluginDiscovery.scala +++ b/main/src/main/scala/sbt/PluginDiscovery.scala @@ -17,9 +17,12 @@ object PluginDiscovery final val Builds = "sbt/sbt.builds" final val AutoImports = "sbt/sbt.autoimports" } + /** Names of top-level modules that subclass sbt plugin-related classes: [[Plugin]], [[AutoImport]], [[AutoPlugin]], and [[Build]]. */ final class DiscoveredNames(val plugins: Seq[String], val autoImports: Seq[String], val autoPlugins: Seq[String], val builds: Seq[String]) + def emptyDiscoveredNames: DiscoveredNames = new DiscoveredNames(Nil, Nil, Nil, Nil) + /** Discovers and loads the sbt-plugin-related top-level modules from the classpath and source analysis in `data` and using the provided class `loader`. */ def discoverAll(data: PluginData, loader: ClassLoader): DetectedPlugins = { def discover[T](resource: String)(implicit mf: reflect.ClassManifest[T]) = @@ -27,6 +30,8 @@ object PluginDiscovery import Paths._ new DetectedPlugins(discover[Plugin](Plugins), discover[AutoImport](AutoImports), discover[AutoPlugin](AutoPlugins), discover[Build](Builds)) } + + /** Discovers the sbt-plugin-related top-level modules from the provided source `analysis`. */ def discoverSourceAll(analysis: inc.Analysis): DiscoveredNames = { def discover[T](implicit mf: reflect.ClassManifest[T]): Seq[String] = @@ -35,6 +40,7 @@ object PluginDiscovery } // TODO: for 0.14.0, consider consolidating into a single file, which would make the classpath search 4x faster + /** Writes discovered module `names` to zero or more files in `dir` as per [[writeDescriptor]] and returns the list of files written. */ def writeDescriptors(names: DiscoveredNames, dir: File): Seq[File] = { import Paths._ @@ -47,6 +53,7 @@ object PluginDiscovery files.flatMap(_.toList) } + /** Stores the module `names` in `dir / path`, one per line, unless `names` is empty and then the file is deleted and `None` returned. */ def writeDescriptor(names: Seq[String], dir: File, path: String): Option[File] = { val descriptor: File = new File(dir, path) @@ -62,13 +69,15 @@ object PluginDiscovery } } - + /** Discovers the names of top-level modules listed in resources named `resourceName` as per [[binaryModuleNames]] or + * available as analyzed source and extending from any of `subclasses` as per [[sourceModuleNames]]. */ def binarySourceModuleNames(classpath: Seq[Attributed[File]], loader: ClassLoader, resourceName: String, subclasses: String*): Seq[String] = ( binaryModuleNames(data(classpath), loader, resourceName) ++ (analyzed(classpath) flatMap ( a => sourceModuleNames(a, subclasses : _*) )) ).distinct + /** Discovers top-level modules in `analysis` that inherit from any of `subclasses`. */ def sourceModuleNames(analysis: inc.Analysis, subclasses: String*): Seq[String] = { val subclassSet = subclasses.toSet @@ -80,6 +89,9 @@ object PluginDiscovery } } + /** Obtains the list of modules identified in all resource files `resourceName` from `loader` that are on `classpath`. + * `classpath` and `loader` are both required to ensure that `loader` + * doesn't bring in any resources outside of the intended `classpath`, such as from parent loaders. */ def binaryModuleNames(classpath: Seq[File], loader: ClassLoader, resourceName: String): Seq[String] = { import collection.JavaConversions._ @@ -87,6 +99,8 @@ object PluginDiscovery IO.readLinesURL(u).map( _.trim).filter(!_.isEmpty) } } + + /** Returns `true` if `url` is an entry in `classpath`.*/ def onClasspath(classpath: Seq[File])(url: URL): Boolean = IO.urlAsFile(url) exists (classpath.contains _) diff --git a/util/collection/src/main/scala/sbt/Dag.scala b/util/collection/src/main/scala/sbt/Dag.scala index 0ce07baf2..f0594ed50 100644 --- a/util/collection/src/main/scala/sbt/Dag.scala +++ b/util/collection/src/main/scala/sbt/Dag.scala @@ -72,39 +72,52 @@ object Dag new Cyclic(value, a :: all, false) } - private[sbt] trait System[A] { - type B - def dependencies(t: A): List[B] - def isNegated(b: B): Boolean - def toA(b: B): A + /** A directed graph with edges labeled positive or negative. */ + private[sbt] trait DirectedSignedGraph[Node] + { + /** Directed edge type that tracks the sign and target (head) vertex. + * The sign can be obtained via [[isNegative]] and the target vertex via [[head]]. */ + type Arrow + /** List of initial nodes. */ + def nodes: List[Arrow] + /** Outgoing edges for `n`. */ + def dependencies(n: Node): List[Arrow] + /** `true` if the edge `a` is "negative", false if it is "positive". */ + def isNegative(a: Arrow): Boolean + /** The target of the directed edge `a`. */ + def head(a: Arrow): Node } - private[sbt] def findNegativeCycle[T](system: System[T])(nodes: List[system.B]): List[system.B] = + + /** Traverses a directed graph defined by `graph` looking for a cycle that includes a "negative" edge. + * The directed edges are weighted by the caller as "positive" or "negative". + * If a cycle containing a "negative" edge is detected, its member edges are returned in order. + * Otherwise, the empty list is returned. */ + private[sbt] def findNegativeCycle[Node](graph: DirectedSignedGraph[Node]): List[graph.Arrow] = { import scala.annotation.tailrec - import system._ - val finished = new mutable.HashSet[T] - val visited = new mutable.HashSet[T] + import graph._ + val finished = new mutable.HashSet[Node] + val visited = new mutable.HashSet[Node] - def visit(nodes: List[B], stack: List[B]): List[B] = nodes match { + def visit(edges: List[Arrow], stack: List[Arrow]): List[Arrow] = edges match { case Nil => Nil - case node :: tail => - def indent = "\t" * stack.size - val atom = toA(node) - if(!visited(atom)) + case edge :: tail => + val node = head(edge) + if(!visited(node)) { - visited += atom - visit(dependencies(atom), node :: stack) match { + visited += node + visit(dependencies(node), edge :: stack) match { case Nil => - finished += atom + finished += node visit(tail, stack) case cycle => cycle } } - else if(!finished(atom)) + else if(!finished(node)) { - // cycle. If negation is involved, it is an error. - val between = node :: stack.takeWhile(f => toA(f) != atom) - if(between exists isNegated) + // cycle. If a negative edge is involved, it is an error. + val between = edge :: stack.takeWhile(f => head(f) != node) + if(between exists isNegative) between else visit(tail, stack) @@ -113,7 +126,7 @@ object Dag visit(tail, stack) } - visit(nodes, Nil) + visit(graph.nodes, Nil) } } diff --git a/util/logic/src/main/scala/sbt/logic/Logic.scala b/util/logic/src/main/scala/sbt/logic/Logic.scala index 2181fbb7e..4eb8e64b1 100644 --- a/util/logic/src/main/scala/sbt/logic/Logic.scala +++ b/util/logic/src/main/scala/sbt/logic/Logic.scala @@ -122,17 +122,18 @@ object Logic private[this] def checkAcyclic(clauses: Clauses): Option[CyclicNegation] = { val deps = dependencyMap(clauses) - val cycle = Dag.findNegativeCycle(system(deps))(deps.keys.toList) + val cycle = Dag.findNegativeCycle(graph(deps)) if(cycle.nonEmpty) Some(new CyclicNegation(cycle)) else None } - private[this] def system(deps: Map[Atom, Set[Literal]]) = new Dag.System[Atom] { - type B = Literal + private[this] def graph(deps: Map[Atom, Set[Literal]]) = new Dag.DirectedSignedGraph[Atom] { + type Arrow = Literal + def nodes = deps.keys.toList def dependencies(a: Atom) = deps.getOrElse(a, Set.empty).toList - def isNegated(b: Literal) = b match { + def isNegative(b: Literal) = b match { case Negated(_) => true case Atom(_) => false } - def toA(b: Literal) = b.atom + def head(b: Literal) = b.atom } private[this] def dependencyMap(clauses: Clauses): Map[Atom, Set[Literal]] = From 162d8094baba1db29861cebceda4de2331a8e7e6 Mon Sep 17 00:00:00 2001 From: Mark Harrah Date: Fri, 24 Jan 2014 14:19:18 -0500 Subject: [PATCH 041/148] Convert logic system test cases into unit tests. Still TODO for auto-plugins/logic: * property-based tests for logic system * user documentation * (optional) 'about plugins' or similar to show more information about the auto-plugins for a project * (deferred) allow AutoPlugin to inject Commands directly? * (deferred) provide AutoPlugin functionality to arbitrary scopes instead of just at the Project level? --- project/Sbt.scala | 2 +- .../logic/src/test/scala/sbt/logic/Test.scala | 35 ++++++++++++++++++- 2 files changed, 35 insertions(+), 2 deletions(-) diff --git a/project/Sbt.scala b/project/Sbt.scala index 3fb03a64a..c434c5098 100644 --- a/project/Sbt.scala +++ b/project/Sbt.scala @@ -74,7 +74,7 @@ object Sbt extends Build // cross versioning lazy val crossSub = baseProject(utilPath / "cross", "Cross") settings(inConfig(Compile)(Transform.crossGenSettings): _*) // A logic with restricted negation as failure for a unique, stable model - lazy val logicSub = baseProject(utilPath / "logic", "Logic").dependsOn(collectionSub, relationSub) + lazy val logicSub = testedBaseProject(utilPath / "logic", "Logic").dependsOn(collectionSub, relationSub) /* **** Intermediate-level Modules **** */ diff --git a/util/logic/src/test/scala/sbt/logic/Test.scala b/util/logic/src/test/scala/sbt/logic/Test.scala index 49836998a..cf50ef9fd 100644 --- a/util/logic/src/test/scala/sbt/logic/Test.scala +++ b/util/logic/src/test/scala/sbt/logic/Test.scala @@ -1,7 +1,40 @@ package sbt package logic -object Test { + import org.scalacheck._ + import Prop.secure + import Logic.{LogicException, Matched} + +object LogicTest extends Properties("Logic") +{ + import TestClauses._ + + property("Handles trivial resolution.") = secure( expect(trivial, Set(A) ) ) + property("Handles less trivial resolution.") = secure( expect(lessTrivial, Set(B,A,D)) ) + property("Handles cycles without negation") = secure( expect(cycles, Set(F,A,B)) ) + property("Handles basic exclusion.") = secure( expect(excludedPos, Set()) ) + property("Handles exclusion of head proved by negation.") = secure( expect(excludedNeg, Set()) ) + // TODO: actually check ordering, probably as part of a check that dependencies are satisifed + property("Properly orders results.") = secure( expect(ordering, Set(B,A,C,E,F))) + property("Detects cyclic negation") = secure( + Logic.reduceAll(badClauses, Set()) match { + case Right(res) => false + case Left(err: Logic.CyclicNegation) => true + case Left(err) => error(s"Expected cyclic error, got: $err") + } + ) + + def expect(result: Either[LogicException, Matched], expected: Set[Atom]) = result match { + case Left(err) => false + case Right(res) => + val actual = res.provenSet + (actual == expected) || error(s"Expected to prove $expected, but actually proved $actual") + } +} + +object TestClauses +{ + val A = Atom("A") val B = Atom("B") val C = Atom("C") From 9af1585e8d49dfc83df51f46b0c6893f44a3dfbd Mon Sep 17 00:00:00 2001 From: kalmanb Date: Sat, 25 Jan 2014 22:21:49 +1300 Subject: [PATCH 042/148] Add sbt-ctags community plugin information --- src/sphinx/Community/Community-Plugins.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/sphinx/Community/Community-Plugins.rst b/src/sphinx/Community/Community-Plugins.rst index 8886f7ece..36ef5f170 100644 --- a/src/sphinx/Community/Community-Plugins.rst +++ b/src/sphinx/Community/Community-Plugins.rst @@ -68,6 +68,8 @@ Plugins for IDEs: - Sublime Text: https://github.com/orrsella/sbt-sublime - Ensime: https://github.com/aemoncannon/ensime-sbt-cmd - sbt-mode for Emacs: https://github.com/hvesalai/sbt-mode +- sbt-ctags (manage library dependency sources for vim, emacs, sublime) + https://github.com/kalmanb/sbt-ctags Web Plugins ~~~~~~~~~~~ From 91650c7f71483650d36999212dfaa1022c3b90d6 Mon Sep 17 00:00:00 2001 From: "Taro L. Saito" Date: Sun, 26 Jan 2014 00:12:16 +0900 Subject: [PATCH 043/148] Add a link to sbt-sonatype plugin in Using-Sonatype page --- src/sphinx/Community/Using-Sonatype.rst | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/sphinx/Community/Using-Sonatype.rst b/src/sphinx/Community/Using-Sonatype.rst index 77560258d..a1f4bff51 100644 --- a/src/sphinx/Community/Using-Sonatype.rst +++ b/src/sphinx/Community/Using-Sonatype.rst @@ -1,3 +1,6 @@ + + + ======================= Deploying to Sonatype ======================= @@ -161,7 +164,7 @@ In sbt, run `publishSigned` and you should see something like the following: After publishing you have to follow the `Release workflow of nexus `_. -In the future, we hope to provide a Nexus sbt plugin that allows the +`sbt-sonatype plugin `_ allows the release workflow procedures to be performed directly from sbt. *Note: Staged releases allow testing across large projects of From 02a7fd8c83b6e93043be3e34cf1c6e9edcac2772 Mon Sep 17 00:00:00 2001 From: Ngoc Dao Date: Mon, 27 Jan 2014 19:16:23 +0900 Subject: [PATCH 044/148] Add link to xitrum-package (collects dependency .jar files for standalone Scala programs) --- src/sphinx/Community/Community-Plugins.rst | 64 +++++++++++----------- 1 file changed, 33 insertions(+), 31 deletions(-) diff --git a/src/sphinx/Community/Community-Plugins.rst b/src/sphinx/Community/Community-Plugins.rst index 8886f7ece..14342a403 100644 --- a/src/sphinx/Community/Community-Plugins.rst +++ b/src/sphinx/Community/Community-Plugins.rst @@ -4,9 +4,9 @@ Community Plugins sbt Organization ================ - -The `sbt organization `_ is available for use by any sbt plugin. -Developers who contribute their plugins into the community organization will still retain + +The `sbt organization `_ is available for use by any sbt plugin. +Developers who contribute their plugins into the community organization will still retain control over their repository and its access. The goal of the sbt organization is to organize sbt software into one central location. @@ -21,18 +21,18 @@ If you would like to publish your project to this Ivy repository, first contact :: publishTo := Some(Resolver.url("sbt-plugin-releases", new URL("http://repo.scala-sbt.org/scalasbt/sbt-plugin-releases/"))(Resolver.ivyStylePatterns)) - + publishMavenStyle := false - + You'll also need to add your credentials somewhere. For example, you might use a `~/.sbt/pluginpublish.sbt` file: - + :: - credentials += Credentials("Artifactory Realm", + credentials += Credentials("Artifactory Realm", "repo.scala-sbt.org", "@user name@", "@my encrypted password@") - + Where `@my encrypted password@` is actually obtained using the following `instructions `_. - + *Note: Your code must abide by the* `repository polices `_. To automatically deploy snapshot/release versions of your plugin use the following configuration: @@ -55,8 +55,8 @@ Available Plugins Please feel free to `submit a pull request `_ that adds your plugin to the list. -Plugins for IDEs: -~~~~~~~~~~~~~~~~~ +Plugins for IDEs +~~~~~~~~~~~~~~~~ - IntelliJ IDEA - sbt Plugin to generate IDEA project configuration: @@ -116,25 +116,6 @@ One jar plugins - sbt-onejar (Packages your project using One-JARâ„¢): https://github.com/sbt/sbt-onejar -Frontend development plugins -~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -- coffeescripted-sbt: https://github.com/softprops/coffeescripted-sbt -- less-sbt (for less-1.3.0): https://github.com/softprops/less-sbt -- sbt-less-plugin (it uses less-1.3.0): - https://github.com/btd/sbt-less-plugin -- sbt-emberjs: https://github.com/stefri/sbt-emberjs -- sbt-closure: https://github.com/eltimn/sbt-closure -- sbt-yui-compressor: https://github.com/indrajitr/sbt-yui-compressor -- sbt-requirejs: https://github.com/scalatra/sbt-requirejs -- sbt-vaadin-plugin: https://github.com/henrikerola/sbt-vaadin-plugin - -Game development plugins -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -- sbt-lwjgl-plugin (Light Weight Java Game Library): https://github.com/philcali/sbt-lwjgl-plugin -- sbt-scage-plugin (Scala Game Engine): https://github.com/mvallerie/sbt-scage-plugin - Release plugins ~~~~~~~~~~~~~~~ @@ -160,6 +141,27 @@ Release plugins https://github.com/sbt/sbt-native-packager - sbt-sonatype-plugin (releases to Sonatype Nexus repository) https://github.com/xerial/sbt-sonatype +- xitrum-package (collects dependency .jar files for standalone Scala programs): + https://github.com/ngocdaothanh/xitrum-package + +Frontend development plugins +~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +- coffeescripted-sbt: https://github.com/softprops/coffeescripted-sbt +- less-sbt (for less-1.3.0): https://github.com/softprops/less-sbt +- sbt-less-plugin (it uses less-1.3.0): + https://github.com/btd/sbt-less-plugin +- sbt-emberjs: https://github.com/stefri/sbt-emberjs +- sbt-closure: https://github.com/eltimn/sbt-closure +- sbt-yui-compressor: https://github.com/indrajitr/sbt-yui-compressor +- sbt-requirejs: https://github.com/scalatra/sbt-requirejs +- sbt-vaadin-plugin: https://github.com/henrikerola/sbt-vaadin-plugin + +Game development plugins +~~~~~~~~~~~~~~~~~~~~~~~~ + +- sbt-lwjgl-plugin (Light Weight Java Game Library): https://github.com/philcali/sbt-lwjgl-plugin +- sbt-scage-plugin (Scala Game Engine): https://github.com/mvallerie/sbt-scage-plugin System plugins ~~~~~~~~~~~~~~ @@ -305,5 +307,5 @@ OSGi plugin Plugin bundles ~~~~~~~~~~~~~~ -- tl-os-sbt-plugins (Version, Release, and Package Management, Play 2.0 and Git utilities) : +- tl-os-sbt-plugins (Version, Release, and Package Management, Play 2.0 and Git utilities) : https://github.com/trafficland/tl-os-sbt-plugins From 8bfab5313e5e90ee1eb90c0407fb6f200311da60 Mon Sep 17 00:00:00 2001 From: Dan Sanduleac Date: Mon, 27 Jan 2014 14:35:53 +0000 Subject: [PATCH 045/148] Fixed SessionSettings replacing existing setting --- main/src/main/scala/sbt/SessionSettings.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/main/src/main/scala/sbt/SessionSettings.scala b/main/src/main/scala/sbt/SessionSettings.scala index caff96b56..81342477c 100755 --- a/main/src/main/scala/sbt/SessionSettings.scala +++ b/main/src/main/scala/sbt/SessionSettings.scala @@ -110,7 +110,7 @@ object SessionSettings val RangePosition(_, r@LineRange(start, end)) = s.pos settings find (_._1.key == s.key) match { case Some(ss@(ns, newLines)) if !ns.init.dependencies.contains(ns.key) => - val shifted = ns withPos RangePosition(path, LineRange(start - offs, start - offs + 1)) + val shifted = ns withPos RangePosition(path, LineRange(start - offs, start - offs + newLines.size)) (offs + end - start - newLines.size, shifted::olds, ss::repl, lineMap + (start -> (end, newLines))) case _ => val shifted = s withPos RangePosition(path, r shift -offs) From 668ae8d8b1c61fcd93ed20331c2d84d891129879 Mon Sep 17 00:00:00 2001 From: Grzegorz Kossakowski Date: Mon, 27 Jan 2014 19:41:04 +0100 Subject: [PATCH 046/148] Fix typo in assertion message in TextAnalysisFormat Add missing string interpolation indicator in assertion message. --- .../persist/src/main/scala/sbt/inc/TextAnalysisFormat.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/compile/persist/src/main/scala/sbt/inc/TextAnalysisFormat.scala b/compile/persist/src/main/scala/sbt/inc/TextAnalysisFormat.scala index a23a87725..3bd28190c 100644 --- a/compile/persist/src/main/scala/sbt/inc/TextAnalysisFormat.scala +++ b/compile/persist/src/main/scala/sbt/inc/TextAnalysisFormat.scala @@ -225,8 +225,8 @@ object TextAnalysisFormat { if (nameHashing) Relations.make(srcProd, binaryDep, memberRefSrcDeps, inheritanceSrcDeps, classes, names) else { - assert(names.all.isEmpty, s"When `nameHashing` is disabled `names` relation " + - "should be empty: $names") + assert(names.all.isEmpty, "When `nameHashing` is disabled `names` relation " + + s"should be empty: $names") Relations.make(srcProd, binaryDep, directSrcDeps, publicInheritedSrcDeps, classes) } } From bb8dd21620d9a8a35c6d1a4a4070d5279e1c2950 Mon Sep 17 00:00:00 2001 From: Grzegorz Kossakowski Date: Mon, 27 Jan 2014 19:48:13 +0100 Subject: [PATCH 047/148] Record the name of an Analysis file in case of a read failure. Catch ReadException and wrap it in IOException that carries the name of the file we failed to read in its message. We have to catch exception and wrap them because in TextAnalysisFormat we don't have an access to the file name (it operates using an abstract reader). --- .../main/scala/sbt/compiler/IncrementalCompiler.scala | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/compile/integration/src/main/scala/sbt/compiler/IncrementalCompiler.scala b/compile/integration/src/main/scala/sbt/compiler/IncrementalCompiler.scala index 68ad63f2c..5028c7996 100644 --- a/compile/integration/src/main/scala/sbt/compiler/IncrementalCompiler.scala +++ b/compile/integration/src/main/scala/sbt/compiler/IncrementalCompiler.scala @@ -46,5 +46,12 @@ object IC extends IncrementalCompiler[Analysis, AnalyzingCompiler] } def readCacheUncaught(file: File): (Analysis, CompileSetup) = - Using.fileReader(IO.utf8)(file) { reader => TextAnalysisFormat.read(reader) } + Using.fileReader(IO.utf8)(file) { reader => + try { + TextAnalysisFormat.read(reader) + } catch { + case ex: sbt.inc.ReadException => + throw new java.io.IOException(s"Error while reading $file", ex) + } + } } From 264e49a912a9c92b5c5f91c51bbf14d27f9d3757 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 28 Jan 2014 10:45:15 +0100 Subject: [PATCH 048/148] Avoid compiler crash for naked `key.value` calls. Untyped trees underneath typed trees makes Jack and sad boy. And they make superaccessors a sad phase. The recent refactoring to retain original types in the trees representing the argument to the task macro meant that the `value` macro also was changed to try to avoid this untyped-under-typed problem. However, it didn't go deep enough, and left the child trees of the placeholder tree `InputWrapper.wrap[T](key)` untyped. This commit uses `c.typeCheck` to locally typeheck that tree fully instead. Fixes #1031 --- .../src/main/scala/sbt/std/InputWrapper.scala | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/main/settings/src/main/scala/sbt/std/InputWrapper.scala b/main/settings/src/main/scala/sbt/std/InputWrapper.scala index 400dbc8f8..5a2ee0dd1 100644 --- a/main/settings/src/main/scala/sbt/std/InputWrapper.scala +++ b/main/settings/src/main/scala/sbt/std/InputWrapper.scala @@ -75,8 +75,16 @@ object InputWrapper sel.setPos(pos) // need to set the position on Select, because that is where the compileTimeOnly check looks val tree = ApplyTree(TypeApply(sel, TypeTree(tpe) :: Nil), ts.tree :: Nil) tree.setPos(ts.tree.pos) - tree.setType(tpe) - c.Expr[T](tree) + // JZ: I'm not sure why we need to do this. Presumably a caller is wrapping this tree in a + // typed tree *before* handing the whole thing back to the macro engine. One must never splice + // untyped trees under typed trees, as the type checker doesn't descend if `tree.tpe == null`. + // + // #1031 The previous attempt to fix this just set the type on `tree`, which worked in cases when the + // call to `.value` was inside a the task macro and eliminated before the end of the typer phase. + // But, if a "naked" call to `.value` left the typer, the superaccessors phase would freak out when + // if hit the untyped trees, before we could get to refchecks and the desired @compileTimeOnly warning. + val typedTree = c.typeCheck(tree) + c.Expr[T](typedTree) } def valueMacroImpl[T: c.WeakTypeTag](c: Context): c.Expr[T] = From 083eb38bd2a3492395dca10ebae689aac8940d0f Mon Sep 17 00:00:00 2001 From: Dan Sanduleac Date: Sun, 2 Feb 2014 08:50:47 +0000 Subject: [PATCH 049/148] [scripted] Session update without reload --- .../session-update-from-cmd/build.check.1 | 10 ++++++++ .../project/session-update-from-cmd/build.sbt | 7 ++++++ .../project/build.scala | 25 +++++++++++++++++++ .../project/session-update-from-cmd/test | 4 +++ 4 files changed, 46 insertions(+) create mode 100644 sbt/src/sbt-test/project/session-update-from-cmd/build.check.1 create mode 100644 sbt/src/sbt-test/project/session-update-from-cmd/build.sbt create mode 100644 sbt/src/sbt-test/project/session-update-from-cmd/project/build.scala create mode 100644 sbt/src/sbt-test/project/session-update-from-cmd/test diff --git a/sbt/src/sbt-test/project/session-update-from-cmd/build.check.1 b/sbt/src/sbt-test/project/session-update-from-cmd/build.check.1 new file mode 100644 index 000000000..6363b1678 --- /dev/null +++ b/sbt/src/sbt-test/project/session-update-from-cmd/build.check.1 @@ -0,0 +1,10 @@ +name := "projectName" + +k1 := { +// +// +} + +k2 := { + println("This is k2") +} diff --git a/sbt/src/sbt-test/project/session-update-from-cmd/build.sbt b/sbt/src/sbt-test/project/session-update-from-cmd/build.sbt new file mode 100644 index 000000000..8bd18ad5c --- /dev/null +++ b/sbt/src/sbt-test/project/session-update-from-cmd/build.sbt @@ -0,0 +1,7 @@ +name := "projectName" + +k1 := {} + +k2 := { + println("This is k2") +} diff --git a/sbt/src/sbt-test/project/session-update-from-cmd/project/build.scala b/sbt/src/sbt-test/project/session-update-from-cmd/project/build.scala new file mode 100644 index 000000000..c7c6c8238 --- /dev/null +++ b/sbt/src/sbt-test/project/session-update-from-cmd/project/build.scala @@ -0,0 +1,25 @@ +import sbt._ +import Keys._ + +object build extends Build { + lazy val k1 = taskKey[Unit]("") + lazy val k2 = taskKey[Unit]("") + + val UpdateK1 = Command.command("UpdateK1") { st: State => + val ex = Project extract st + import ex._ + val session2 = BuiltinCommands.setThis(st, ex, Seq(k1 := {}), """k1 := { + |// + |// + |}""".stripMargin).session + val st1 = BuiltinCommands.reapply(session2, structure, st) + // SessionSettings.writeSettings(ex.currentRef, session2, ex.session.original, ex.structure) + SessionSettings.saveAllSettings(st1) + } + + lazy val root = Project("root", file(".")) settings( + commands += UpdateK1 + ) +} + +// vim: set ts=4 sw=4 et: diff --git a/sbt/src/sbt-test/project/session-update-from-cmd/test b/sbt/src/sbt-test/project/session-update-from-cmd/test new file mode 100644 index 000000000..d29ba8270 --- /dev/null +++ b/sbt/src/sbt-test/project/session-update-from-cmd/test @@ -0,0 +1,4 @@ +> UpdateK1 +$ must-mirror build.sbt build.check.1 +> UpdateK1 +$ must-mirror build.sbt build.check.1 From 4ab8074753695665300b692a4fee26635b1ed118 Mon Sep 17 00:00:00 2001 From: Suzanne Hamilton Date: Thu, 6 Feb 2014 00:55:21 +0000 Subject: [PATCH 050/148] Fix formatting of inline code samples which are pluralized --- src/sphinx/Getting-Started/Basic-Def.rst | 6 +++--- src/sphinx/Howto/runningcommands.rst | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/sphinx/Getting-Started/Basic-Def.rst b/src/sphinx/Getting-Started/Basic-Def.rst index 422cba303..e1cb36b7a 100644 --- a/src/sphinx/Getting-Started/Basic-Def.rst +++ b/src/sphinx/Getting-Started/Basic-Def.rst @@ -48,7 +48,7 @@ becomes sbt's new map. To create the map, sbt first sorts the list of settings so that all changes to the same key are made together, and values that depend on other keys are processed after the keys they depend on. Then sbt walks -over the sorted list of `Setting`s and applies each one to the map in +over the sorted list of `Setting`\ s and applies each one to the map in turn. Summary: A build definition defines a list of `Setting[T]`, where a @@ -77,8 +77,8 @@ Here's an example: Each `Setting` is defined with a Scala expression. The expressions in `build.sbt` are independent of one another, and they are expressions, rather than complete Scala statements. These -expressions may be interspersed with `val`s, `lazy val`s, and `def`s. -Top-level `object`s and `class`es are not allowed in `build.sbt`. +expressions may be interspersed with `val`\ s, `lazy val`\ s, and `def`\ s. +Top-level `object`\ s and `class`\ es are not allowed in `build.sbt`. Those should go in the `project/` directory as full Scala source files. On the left, :key:`name`, :key:`version`, and :key:`scalaVersion` are *keys*. A diff --git a/src/sphinx/Howto/runningcommands.rst b/src/sphinx/Howto/runningcommands.rst index bf620f6b6..22554277e 100644 --- a/src/sphinx/Howto/runningcommands.rst +++ b/src/sphinx/Howto/runningcommands.rst @@ -84,5 +84,5 @@ For example, > eval 2+2 4: Int -Variables defined by an `eval` are not visible to subsequent `eval`s, although changes to system properties persist and affect the JVM that is running sbt. +Variables defined by an `eval` are not visible to subsequent `eval`\ s, although changes to system properties persist and affect the JVM that is running sbt. Use the Scala REPL (:key:`console` and related commands) for full support for evaluating Scala code interactively. From 4feb7d3dc8d24092f00a9f8fa57787b84ce52ced Mon Sep 17 00:00:00 2001 From: Mark Harrah Date: Sat, 8 Feb 2014 10:23:31 -0500 Subject: [PATCH 051/148] Fix typos in AutoPlugin API docs. --- main/src/main/scala/sbt/AutoPlugin.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/main/src/main/scala/sbt/AutoPlugin.scala b/main/src/main/scala/sbt/AutoPlugin.scala index 7521cd8c9..3f217bb6b 100644 --- a/main/src/main/scala/sbt/AutoPlugin.scala +++ b/main/src/main/scala/sbt/AutoPlugin.scala @@ -12,7 +12,7 @@ trait AutoImport An AutoPlugin defines a group of settings and the conditions that the settings are automatically added to a build (called "activation"). The `select` method defines the conditions, `provides` defines an identifier for the AutoPlugin, - and the a method like `projectSettings` defines the settings to add. + and a method like `projectSettings` defines the settings to add. Steps for plugin authors: 1. Determine the natures that, when present (or absent), activate the AutoPlugin. @@ -42,7 +42,7 @@ will activate `MyPlugin` defined above and have its settings automatically added .natures( Web && Javascript && !MyStuff) -then the `MyPlugin` settings (and anything that activates when `MyStuff` is activated) will not be added. +then the `MyPlugin` settings (and anything that activates only when `MyStuff` is activated) will not be added. */ abstract class AutoPlugin { From 708a3b107bfa22e55ca40fb4241730c130cf1338 Mon Sep 17 00:00:00 2001 From: Mark Harrah Date: Sat, 8 Feb 2014 10:23:31 -0500 Subject: [PATCH 052/148] minor API updates --- main/src/main/scala/sbt/AutoPlugin.scala | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/main/src/main/scala/sbt/AutoPlugin.scala b/main/src/main/scala/sbt/AutoPlugin.scala index 3f217bb6b..182d531f6 100644 --- a/main/src/main/scala/sbt/AutoPlugin.scala +++ b/main/src/main/scala/sbt/AutoPlugin.scala @@ -9,15 +9,15 @@ package sbt trait AutoImport /** -An AutoPlugin defines a group of settings and the conditions that the settings are automatically added to a build (called "activation"). +An AutoPlugin defines a group of settings and the conditions where the settings are automatically added to a build (called "activation"). The `select` method defines the conditions, `provides` defines an identifier for the AutoPlugin, and a method like `projectSettings` defines the settings to add. Steps for plugin authors: -1. Determine the natures that, when present (or absent), activate the AutoPlugin. +1. Determine the [[Nature]]s that, when present (or absent), activate the AutoPlugin. 2. Determine the settings/configurations to automatically inject when activated. -3. Define a new, unique identifying [[Nature]], which is a wrapper around a String ID. +3. Define a new, unique identifying [[Nature]] associated with the AutoPlugin, where a Nature is essentially a String ID. For example, the following will automatically add the settings in `projectSettings` to a project that has both the `Web` and `Javascript` natures enabled. It will itself From 0fc5f1525e9322008073077f1c2f2ab4c4b93f69 Mon Sep 17 00:00:00 2001 From: Mark Harrah Date: Sat, 8 Feb 2014 10:23:31 -0500 Subject: [PATCH 053/148] Generate error when multiple AutoPlugins provide the same Nature. --- main/src/main/scala/sbt/AutoPlugin.scala | 17 ++++++++++++++--- 1 file changed, 14 insertions(+), 3 deletions(-) diff --git a/main/src/main/scala/sbt/AutoPlugin.scala b/main/src/main/scala/sbt/AutoPlugin.scala index 182d531f6..22701c5ca 100644 --- a/main/src/main/scala/sbt/AutoPlugin.scala +++ b/main/src/main/scala/sbt/AutoPlugin.scala @@ -78,7 +78,7 @@ abstract class AutoPlugin /** An error that occurs when auto-plugins aren't configured properly. * It translates the error from the underlying logic system to be targeted at end users. */ -final class AutoPluginException(val origin: LogicException, prefix: String) extends RuntimeException(prefix + Natures.translateMessage(origin)) +final class AutoPluginException(val origin: Option[LogicException], prefix: String) extends RuntimeException(prefix + Natures.translateMessage(origin)) { /** Prepends `p` to the error message derived from `origin`. */ def withPrefix(p: String) = new AutoPluginException(origin, p) @@ -109,7 +109,9 @@ object Natures Types.const(Nil) else { - val byAtom = defined.map(x => (Atom(x.provides.label), x)).toMap + val byAtom = defined.map(x => (Atom(x.provides.label), x)) + val byAtomMap = byAtom.toMap + if(byAtom.size != byAtomMap.size) duplicateProvidesError(byAtom) val clauses = Clauses( defined.map(d => asClause(d)) ) requestedNatures => Logic.reduce(clauses, flatten(requestedNatures).toSet) match { @@ -117,7 +119,7 @@ object Natures case Right(results) => // results includes the originally requested (positive) atoms, // which won't have a corresponding AutoPlugin to map back to - results.ordered.flatMap(a => byAtom.get(a).toList) + results.ordered.flatMap(a => byAtomMap.get(a).toList) } } @@ -129,6 +131,15 @@ object Natures private[this] def literalsString(lits: Seq[Literal]): String = lits map { case Atom(l) => l; case Negated(Atom(l)) => l } mkString(", ") + private[this] def duplicateProvidesError(byAtom: Seq[(Atom, AutoPlugin)]) { + val dupsByAtom = defined.groupBy(_._1).mapValues(_._2) + val dupStrings = for( (atom, dups) <- dupsByAtom if dups.size > 1 ) yield + s"${atom.label} by ${dups.mkString(", ")} + val (ns, nl) = if(dupStrings > 1) ("s", "\n\t") else ("", " ") + val message = s"Nature$ns provided by multiple AutoPlugins:$nl${dupStrings.mkString(nl)}" + throw new AutoPluginException(None, message) + } + /** [[Natures]] instance that doesn't require any [[Nature]]s. */ def empty: Natures = Empty private[sbt] final object Empty extends Natures { From eb7da2f6892fade0d71382a0932e8cff5c8193e3 Mon Sep 17 00:00:00 2001 From: Mark Harrah Date: Sat, 8 Feb 2014 10:23:31 -0500 Subject: [PATCH 054/148] move Nature-related classes to Natures.scala --- main/src/main/scala/sbt/AutoPlugin.scala | 114 ++--------------------- main/src/main/scala/sbt/Natures.scala | 105 +++++++++++++++++++++ 2 files changed, 112 insertions(+), 107 deletions(-) create mode 100644 main/src/main/scala/sbt/Natures.scala diff --git a/main/src/main/scala/sbt/AutoPlugin.scala b/main/src/main/scala/sbt/AutoPlugin.scala index 22701c5ca..a087d8982 100644 --- a/main/src/main/scala/sbt/AutoPlugin.scala +++ b/main/src/main/scala/sbt/AutoPlugin.scala @@ -1,9 +1,7 @@ package sbt - import logic.{Atom, Clause, Clauses, Formula, Literal, Logic, Negated} - import Logic.{CyclicNegation, InitialContradictions, InitialOverlap, LogicException} import Def.Setting - import Natures._ + import logic.Logic.LogicException /** Marks a top-level object so that sbt will wildcard import it for .sbt files, `consoleProject`, and `set`. */ trait AutoImport @@ -78,111 +76,13 @@ abstract class AutoPlugin /** An error that occurs when auto-plugins aren't configured properly. * It translates the error from the underlying logic system to be targeted at end users. */ -final class AutoPluginException(val origin: Option[LogicException], prefix: String) extends RuntimeException(prefix + Natures.translateMessage(origin)) +final class AutoPluginException private(val message: String, val origin: Option[LogicException]) extends RuntimeException(message) { /** Prepends `p` to the error message derived from `origin`. */ - def withPrefix(p: String) = new AutoPluginException(origin, p) + def withPrefix(p: String) = new AutoPluginException(p + message, origin) } - - -/** An expression that matches `Nature`s. */ -sealed trait Natures { - def && (o: Basic): Natures -} - -/** Represents a feature or conceptual group of settings. -* `label` is the unique ID for this nature. */ -final case class Nature(label: String) extends Basic { - /** Constructs a Natures matcher that excludes this Nature. */ - def unary_! : Basic = Exclude(this) - override def toString = label -} - -object Natures +object AutoPluginException { - // TODO: allow multiple AutoPlugins to provide the same Nature? - // TODO: translate error messages - /** Given the available auto plugins `defined`, returns a function that selects [[AutoPlugin]]s for the provided [[Nature]]s. - * The [[AutoPlugin]]s are topologically sorted so that a selected [[AutoPlugin]] comes before its selecting [[AutoPlugin]].*/ - def compile(defined: List[AutoPlugin]): Natures => Seq[AutoPlugin] = - if(defined.isEmpty) - Types.const(Nil) - else - { - val byAtom = defined.map(x => (Atom(x.provides.label), x)) - val byAtomMap = byAtom.toMap - if(byAtom.size != byAtomMap.size) duplicateProvidesError(byAtom) - val clauses = Clauses( defined.map(d => asClause(d)) ) - requestedNatures => - Logic.reduce(clauses, flatten(requestedNatures).toSet) match { - case Left(problem) => throw new AutoPluginException(problem, "") - case Right(results) => - // results includes the originally requested (positive) atoms, - // which won't have a corresponding AutoPlugin to map back to - results.ordered.flatMap(a => byAtomMap.get(a).toList) - } - } - - private[sbt] def translateMessage(e: LogicException) = e match { - case ic: InitialContradictions => s"Contradiction in selected natures. These natures were both included and excluded: ${literalsString(ic.literals.toSeq)}" - case io: InitialOverlap => s"Cannot directly enable plugins. Plugins are enabled when their required natures are satisifed. The directly selected plugins were: ${literalsString(io.literals.toSeq)}" - case cn: CyclicNegation => s"Cycles in plugin requirements cannot involve excludes. The problematic cycle is: ${literalsString(cn.cycle)}" - } - private[this] def literalsString(lits: Seq[Literal]): String = - lits map { case Atom(l) => l; case Negated(Atom(l)) => l } mkString(", ") - - private[this] def duplicateProvidesError(byAtom: Seq[(Atom, AutoPlugin)]) { - val dupsByAtom = defined.groupBy(_._1).mapValues(_._2) - val dupStrings = for( (atom, dups) <- dupsByAtom if dups.size > 1 ) yield - s"${atom.label} by ${dups.mkString(", ")} - val (ns, nl) = if(dupStrings > 1) ("s", "\n\t") else ("", " ") - val message = s"Nature$ns provided by multiple AutoPlugins:$nl${dupStrings.mkString(nl)}" - throw new AutoPluginException(None, message) - } - - /** [[Natures]] instance that doesn't require any [[Nature]]s. */ - def empty: Natures = Empty - private[sbt] final object Empty extends Natures { - def &&(o: Basic): Natures = o - override def toString = "" - } - - /** An included or excluded Nature. TODO: better name than Basic. */ - sealed abstract class Basic extends Natures { - def &&(o: Basic): Natures = And(this :: o :: Nil) - } - private[sbt] final case class Exclude(n: Nature) extends Basic { - def unary_! : Nature = n - override def toString = s"!$n" - } - private[sbt] final case class And(natures: List[Basic]) extends Natures { - def &&(o: Basic): Natures = And(o :: natures) - override def toString = natures.mkString(", ") - } - private[sbt] def and(a: Natures, b: Natures) = b match { - case Empty => a - case And(ns) => (a /: ns)(_ && _) - case b: Basic => a && b - } - - /** Defines a clause for `ap` such that the [[Nature]] provided by `ap` is the head and the selector for `ap` is the body. */ - private[sbt] def asClause(ap: AutoPlugin): Clause = - Clause( convert(ap.select), Set(Atom(ap.provides.label)) ) - - private[this] def flatten(n: Natures): Seq[Literal] = n match { - case And(ns) => convertAll(ns) - case b: Basic => convertBasic(b) :: Nil - case Empty => Nil - } - - private[this] def convert(n: Natures): Formula = n match { - case And(ns) => convertAll(ns).reduce[Formula](_ && _) - case b: Basic => convertBasic(b) - case Empty => Formula.True - } - private[this] def convertBasic(b: Basic): Literal = b match { - case Exclude(n) => !convertBasic(n) - case Nature(s) => Atom(s) - } - private[this] def convertAll(ns: Seq[Basic]): Seq[Literal] = ns map convertBasic -} \ No newline at end of file + def apply(msg: String): AutoPluginException = new AutoPluginException(msg, None) + def apply(origin: LogicException): AutoPluginException = new AutoPluginException(Natures.translateMessage(origin), Some(origin)) +} diff --git a/main/src/main/scala/sbt/Natures.scala b/main/src/main/scala/sbt/Natures.scala new file mode 100644 index 000000000..4d8f7095b --- /dev/null +++ b/main/src/main/scala/sbt/Natures.scala @@ -0,0 +1,105 @@ +package sbt + + import logic.{Atom, Clause, Clauses, Formula, Literal, Logic, Negated} + import Logic.{CyclicNegation, InitialContradictions, InitialOverlap, LogicException} + import Natures._ + +/** An expression that matches `Nature`s. */ +sealed trait Natures { + def && (o: Basic): Natures +} + +/** Represents a feature or conceptual group of settings. +* `label` is the unique ID for this nature. */ +final case class Nature(label: String) extends Basic { + /** Constructs a Natures matcher that excludes this Nature. */ + def unary_! : Basic = Exclude(this) + override def toString = label +} + +object Natures +{ + /** Given the available auto plugins `defined`, returns a function that selects [[AutoPlugin]]s for the provided [[Nature]]s. + * The [[AutoPlugin]]s are topologically sorted so that a selected [[AutoPlugin]] comes before its selecting [[AutoPlugin]].*/ + def compile(defined: List[AutoPlugin]): Natures => Seq[AutoPlugin] = + if(defined.isEmpty) + Types.const(Nil) + else + { + val byAtom = defined.map(x => (Atom(x.provides.label), x)) + val byAtomMap = byAtom.toMap + if(byAtom.size != byAtomMap.size) duplicateProvidesError(byAtom) + val clauses = Clauses( defined.map(d => asClause(d)) ) + requestedNatures => + Logic.reduce(clauses, flatten(requestedNatures).toSet) match { + case Left(problem) => throw AutoPluginException(problem) + case Right(results) => + // results includes the originally requested (positive) atoms, + // which won't have a corresponding AutoPlugin to map back to + results.ordered.flatMap(a => byAtomMap.get(a).toList) + } + } + + private[sbt] def translateMessage(e: LogicException) = e match { + case ic: InitialContradictions => s"Contradiction in selected natures. These natures were both included and excluded: ${literalsString(ic.literals.toSeq)}" + case io: InitialOverlap => s"Cannot directly enable plugins. Plugins are enabled when their required natures are satisifed. The directly selected plugins were: ${literalsString(io.literals.toSeq)}" + case cn: CyclicNegation => s"Cycles in plugin requirements cannot involve excludes. The problematic cycle is: ${literalsString(cn.cycle)}" + } + private[this] def literalsString(lits: Seq[Literal]): String = + lits map { case Atom(l) => l; case Negated(Atom(l)) => l } mkString(", ") + + private[this] def duplicateProvidesError(byAtom: Seq[(Atom, AutoPlugin)]) { + val dupsByAtom = byAtom.groupBy(_._1).mapValues(_.map(_._2)) + val dupStrings = for( (atom, dups) <- dupsByAtom if dups.size > 1 ) yield + s"${atom.label} by ${dups.mkString(", ")}" + val (ns, nl) = if(dupStrings.size > 1) ("s", "\n\t") else ("", " ") + val message = s"Nature$ns provided by multiple AutoPlugins:$nl${dupStrings.mkString(nl)}" + throw AutoPluginException(message) + } + + /** [[Natures]] instance that doesn't require any [[Nature]]s. */ + def empty: Natures = Empty + private[sbt] final object Empty extends Natures { + def &&(o: Basic): Natures = o + override def toString = "" + } + + /** An included or excluded Nature. TODO: better name than Basic. */ + sealed abstract class Basic extends Natures { + def &&(o: Basic): Natures = And(this :: o :: Nil) + } + private[sbt] final case class Exclude(n: Nature) extends Basic { + def unary_! : Nature = n + override def toString = s"!$n" + } + private[sbt] final case class And(natures: List[Basic]) extends Natures { + def &&(o: Basic): Natures = And(o :: natures) + override def toString = natures.mkString(", ") + } + private[sbt] def and(a: Natures, b: Natures) = b match { + case Empty => a + case And(ns) => (a /: ns)(_ && _) + case b: Basic => a && b + } + + /** Defines a clause for `ap` such that the [[Nature]] provided by `ap` is the head and the selector for `ap` is the body. */ + private[sbt] def asClause(ap: AutoPlugin): Clause = + Clause( convert(ap.select), Set(Atom(ap.provides.label)) ) + + private[this] def flatten(n: Natures): Seq[Literal] = n match { + case And(ns) => convertAll(ns) + case b: Basic => convertBasic(b) :: Nil + case Empty => Nil + } + + private[this] def convert(n: Natures): Formula = n match { + case And(ns) => convertAll(ns).reduce[Formula](_ && _) + case b: Basic => convertBasic(b) + case Empty => Formula.True + } + private[this] def convertBasic(b: Basic): Literal = b match { + case Exclude(n) => !convertBasic(n) + case Nature(s) => Atom(s) + } + private[this] def convertAll(ns: Seq[Basic]): Seq[Literal] = ns map convertBasic +} \ No newline at end of file From e037731d812f25551ce7eea091e11859a22aa802 Mon Sep 17 00:00:00 2001 From: Mark Harrah Date: Sat, 8 Feb 2014 10:23:31 -0500 Subject: [PATCH 055/148] TODO --- main/src/main/scala/sbt/AutoPlugin.scala | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/main/src/main/scala/sbt/AutoPlugin.scala b/main/src/main/scala/sbt/AutoPlugin.scala index a087d8982..4cf06bfe3 100644 --- a/main/src/main/scala/sbt/AutoPlugin.scala +++ b/main/src/main/scala/sbt/AutoPlugin.scala @@ -1,3 +1,12 @@ +/* +TODO: +- Natured type contains AutoPlugin and Nature +- atoms of AutoPlugin.select are Natured +- atoms of Project.natures are Nature +- no more AutoPlugin.provides: name comes from module name +- index all available AutoPlugins to get the tasks that will be added +- error message when a task doesn't exist that it would be provided by plugin x, enabled by natures y,z, blocked by a, b +*/ package sbt import Def.Setting From 49bf842b3dfbf209845944068c4593dbbff6f06a Mon Sep 17 00:00:00 2001 From: Mark Harrah Date: Sat, 8 Feb 2014 10:23:31 -0500 Subject: [PATCH 056/148] Restructure Natures/AutoPlugin types - remove AutoPlugin.provides * name comes from module name * AutoPlugin is Nature-like via Basic - Project.addNatures only accepts varags of Nature values * enforces that a user cannot explicitly enable an AutoPlugin * drops need for && and - combinators - Project.excludeNatures accepts varags of AutoPlugin values * enforces that only AutoPlugins can be excluded * drops need for && and - combinators --- main/src/main/scala/sbt/AutoPlugin.scala | 97 ------------------- main/src/main/scala/sbt/Main.scala | 2 +- main/src/main/scala/sbt/Natures.scala | 95 +++++++++++++++++- main/src/main/scala/sbt/Project.scala | 14 ++- .../sbt-test/project/auto-plugins/build.sbt | 6 +- .../project/auto-plugins/project/Q.scala | 8 +- .../binary-plugin/changes/define/A.scala | 2 - 7 files changed, 106 insertions(+), 118 deletions(-) delete mode 100644 main/src/main/scala/sbt/AutoPlugin.scala diff --git a/main/src/main/scala/sbt/AutoPlugin.scala b/main/src/main/scala/sbt/AutoPlugin.scala deleted file mode 100644 index 4cf06bfe3..000000000 --- a/main/src/main/scala/sbt/AutoPlugin.scala +++ /dev/null @@ -1,97 +0,0 @@ -/* -TODO: -- Natured type contains AutoPlugin and Nature -- atoms of AutoPlugin.select are Natured -- atoms of Project.natures are Nature -- no more AutoPlugin.provides: name comes from module name -- index all available AutoPlugins to get the tasks that will be added -- error message when a task doesn't exist that it would be provided by plugin x, enabled by natures y,z, blocked by a, b -*/ -package sbt - - import Def.Setting - import logic.Logic.LogicException - -/** Marks a top-level object so that sbt will wildcard import it for .sbt files, `consoleProject`, and `set`. */ -trait AutoImport - -/** -An AutoPlugin defines a group of settings and the conditions where the settings are automatically added to a build (called "activation"). -The `select` method defines the conditions, - `provides` defines an identifier for the AutoPlugin, - and a method like `projectSettings` defines the settings to add. - -Steps for plugin authors: -1. Determine the [[Nature]]s that, when present (or absent), activate the AutoPlugin. -2. Determine the settings/configurations to automatically inject when activated. -3. Define a new, unique identifying [[Nature]] associated with the AutoPlugin, where a Nature is essentially a String ID. - -For example, the following will automatically add the settings in `projectSettings` - to a project that has both the `Web` and `Javascript` natures enabled. It will itself - define the `MyStuff` nature. This nature can be explicitly disabled by the user to - prevent the plugin from activating. - - object MyPlugin extends AutoPlugin { - def select = Web && Javascript - def provides = MyStuff - override def projectSettings = Seq(...) - } - -Steps for users: -1. add dependencies on plugins as usual with addSbtPlugin -2. add Natures to Projects, which will automatically select the plugin settings to add for those Projects. - -For example, given natures Web and Javascript (perhaps provided by plugins added with addSbtPlugin), - - .natures( Web && Javascript ) - -will activate `MyPlugin` defined above and have its settings automatically added. If the user instead defines - - .natures( Web && Javascript && !MyStuff) - -then the `MyPlugin` settings (and anything that activates only when `MyStuff` is activated) will not be added. -*/ -abstract class AutoPlugin -{ - /** This AutoPlugin will be activated for a project when the [[Natures]] matcher returned by this method matches that project's natures - * AND the user does not explicitly exclude the Nature returned by `provides`. - * - * For example, if this method returns `Web && Javascript`, this plugin instance will only be added - * if the `Web` and `Javascript` natures are enabled. */ - def select: Natures - - /** The unique [[Nature]] for this AutoPlugin instance. This has two purposes: - * 1. The user can explicitly disable this AutoPlugin. - * 2. Other plugins can activate based on whether this AutoPlugin was activated. - */ - def provides: Nature - - /** The [[Configuration]]s to add to each project that activates this AutoPlugin.*/ - def projectConfigurations: Seq[Configuration] = Nil - - /** The [[Setting]]s to add in the scope of each project that activates this AutoPlugin. */ - def projectSettings: Seq[Setting[_]] = Nil - - /** The [[Setting]]s to add to the build scope for each project that activates this AutoPlugin. - * The settings returned here are guaranteed to be added to a given build scope only once - * regardless of how many projects for that build activate this AutoPlugin. */ - def buildSettings: Seq[Setting[_]] = Nil - - /** The [[Setting]]s to add to the global scope exactly once if any project activates this AutoPlugin. */ - def globalSettings: Seq[Setting[_]] = Nil - - // TODO?: def commands: Seq[Command] -} - -/** An error that occurs when auto-plugins aren't configured properly. -* It translates the error from the underlying logic system to be targeted at end users. */ -final class AutoPluginException private(val message: String, val origin: Option[LogicException]) extends RuntimeException(message) -{ - /** Prepends `p` to the error message derived from `origin`. */ - def withPrefix(p: String) = new AutoPluginException(p + message, origin) -} -object AutoPluginException -{ - def apply(msg: String): AutoPluginException = new AutoPluginException(msg, None) - def apply(origin: LogicException): AutoPluginException = new AutoPluginException(Natures.translateMessage(origin), Some(origin)) -} diff --git a/main/src/main/scala/sbt/Main.scala b/main/src/main/scala/sbt/Main.scala index 462d5a49b..c582426ae 100644 --- a/main/src/main/scala/sbt/Main.scala +++ b/main/src/main/scala/sbt/Main.scala @@ -125,7 +125,7 @@ object BuiltinCommands def aboutPlugins(e: Extracted): String = { - def list(b: BuildUnit) = b.plugins.detected.autoPlugins.values.map(_.provides) ++ b.plugins.detected.plugins.names + def list(b: BuildUnit) = b.plugins.detected.autoPlugins.values.map(_.label) ++ b.plugins.detected.plugins.names val allPluginNames = e.structure.units.values.flatMap(u => list(u.unit)).toSeq.distinct if(allPluginNames.isEmpty) "" else allPluginNames.mkString("Available Plugins: ", ", ", "") } diff --git a/main/src/main/scala/sbt/Natures.scala b/main/src/main/scala/sbt/Natures.scala index 4d8f7095b..b121df408 100644 --- a/main/src/main/scala/sbt/Natures.scala +++ b/main/src/main/scala/sbt/Natures.scala @@ -1,9 +1,95 @@ package sbt +/* +TODO: +- index all available AutoPlugins to get the tasks that will be added +- error message when a task doesn't exist that it would be provided by plugin x, enabled by natures y,z, blocked by a, b +*/ import logic.{Atom, Clause, Clauses, Formula, Literal, Logic, Negated} import Logic.{CyclicNegation, InitialContradictions, InitialOverlap, LogicException} + import Def.Setting import Natures._ +/** Marks a top-level object so that sbt will wildcard import it for .sbt files, `consoleProject`, and `set`. */ +trait AutoImport + +/** +An AutoPlugin defines a group of settings and the conditions where the settings are automatically added to a build (called "activation"). +The `select` method defines the conditions, + `provides` defines an identifier for the AutoPlugin, + and a method like `projectSettings` defines the settings to add. + +Steps for plugin authors: +1. Determine the [[Nature]]s that, when present (or absent), activate the AutoPlugin. +2. Determine the settings/configurations to automatically inject when activated. +3. Define a new, unique identifying [[Nature]] associated with the AutoPlugin, where a Nature is essentially a String ID. + +For example, the following will automatically add the settings in `projectSettings` + to a project that has both the `Web` and `Javascript` natures enabled. It will itself + define the `MyStuff` nature. This nature can be explicitly disabled by the user to + prevent the plugin from activating. + + object MyPlugin extends AutoPlugin { + def select = Web && Javascript + def provides = MyStuff + override def projectSettings = Seq(...) + } + +Steps for users: +1. add dependencies on plugins as usual with addSbtPlugin +2. add Natures to Projects, which will automatically select the plugin settings to add for those Projects. + +For example, given natures Web and Javascript (perhaps provided by plugins added with addSbtPlugin), + + .natures( Web && Javascript ) + +will activate `MyPlugin` defined above and have its settings automatically added. If the user instead defines + + .natures( Web && Javascript && !MyStuff) + +then the `MyPlugin` settings (and anything that activates only when `MyStuff` is activated) will not be added. +*/ +abstract class AutoPlugin extends Natures.Basic +{ + /** This AutoPlugin will be activated for a project when the [[Natures]] matcher returned by this method matches that project's natures + * AND the user does not explicitly exclude the Nature returned by `provides`. + * + * For example, if this method returns `Web && Javascript`, this plugin instance will only be added + * if the `Web` and `Javascript` natures are enabled. */ + def select: Natures + + val label: String = getClass.getName.stripSuffix("$") + + /** The [[Configuration]]s to add to each project that activates this AutoPlugin.*/ + def projectConfigurations: Seq[Configuration] = Nil + + /** The [[Setting]]s to add in the scope of each project that activates this AutoPlugin. */ + def projectSettings: Seq[Setting[_]] = Nil + + /** The [[Setting]]s to add to the build scope for each project that activates this AutoPlugin. + * The settings returned here are guaranteed to be added to a given build scope only once + * regardless of how many projects for that build activate this AutoPlugin. */ + def buildSettings: Seq[Setting[_]] = Nil + + /** The [[Setting]]s to add to the global scope exactly once if any project activates this AutoPlugin. */ + def globalSettings: Seq[Setting[_]] = Nil + + // TODO?: def commands: Seq[Command] +} + +/** An error that occurs when auto-plugins aren't configured properly. +* It translates the error from the underlying logic system to be targeted at end users. */ +final class AutoPluginException private(val message: String, val origin: Option[LogicException]) extends RuntimeException(message) +{ + /** Prepends `p` to the error message derived from `origin`. */ + def withPrefix(p: String) = new AutoPluginException(p + message, origin) +} +object AutoPluginException +{ + def apply(msg: String): AutoPluginException = new AutoPluginException(msg, None) + def apply(origin: LogicException): AutoPluginException = new AutoPluginException(Natures.translateMessage(origin), Some(origin)) +} + /** An expression that matches `Nature`s. */ sealed trait Natures { def && (o: Basic): Natures @@ -26,7 +112,7 @@ object Natures Types.const(Nil) else { - val byAtom = defined.map(x => (Atom(x.provides.label), x)) + val byAtom = defined.map(x => (Atom(x.label), x)) val byAtomMap = byAtom.toMap if(byAtom.size != byAtomMap.size) duplicateProvidesError(byAtom) val clauses = Clauses( defined.map(d => asClause(d)) ) @@ -68,8 +154,8 @@ object Natures sealed abstract class Basic extends Natures { def &&(o: Basic): Natures = And(this :: o :: Nil) } - private[sbt] final case class Exclude(n: Nature) extends Basic { - def unary_! : Nature = n + private[sbt] final case class Exclude(n: Basic) extends Basic { + def unary_! : Basic = n override def toString = s"!$n" } private[sbt] final case class And(natures: List[Basic]) extends Natures { @@ -84,7 +170,7 @@ object Natures /** Defines a clause for `ap` such that the [[Nature]] provided by `ap` is the head and the selector for `ap` is the body. */ private[sbt] def asClause(ap: AutoPlugin): Clause = - Clause( convert(ap.select), Set(Atom(ap.provides.label)) ) + Clause( convert(ap.select), Set(Atom(ap.label)) ) private[this] def flatten(n: Natures): Seq[Literal] = n match { case And(ns) => convertAll(ns) @@ -100,6 +186,7 @@ object Natures private[this] def convertBasic(b: Basic): Literal = b match { case Exclude(n) => !convertBasic(n) case Nature(s) => Atom(s) + case a: AutoPlugin => Atom(a.label) } private[this] def convertAll(ns: Seq[Basic]): Seq[Literal] = ns map convertBasic } \ No newline at end of file diff --git a/main/src/main/scala/sbt/Project.scala b/main/src/main/scala/sbt/Project.scala index 8baa06997..647013bed 100755 --- a/main/src/main/scala/sbt/Project.scala +++ b/main/src/main/scala/sbt/Project.scala @@ -67,7 +67,7 @@ sealed trait ProjectDefinition[PR <: ProjectReference] val agg = ifNonEmpty("aggregate", aggregate) val dep = ifNonEmpty("dependencies", dependencies) val conf = ifNonEmpty("configurations", configurations) - val autos = ifNonEmpty("autoPlugins", autoPlugins.map(_.provides)) + val autos = ifNonEmpty("autoPlugins", autoPlugins.map(_.label)) val fields = s"id $id" :: s"base: $base" :: agg ::: dep ::: conf ::: (s"natures: List($natures)" :: autos) s"Project(${fields.mkString(", ")})" } @@ -136,11 +136,17 @@ sealed trait Project extends ProjectDefinition[ProjectReference] * Any configured .sbt files are removed from this project's list.*/ def setSbtFiles(files: File*): Project = copy(auto = AddSettings.append( AddSettings.clearSbtFiles(auto), AddSettings.sbtFiles(files: _*)) ) - /** Sets the [[Natures]] of this project. + /** Sets the [[Nature]]s of this project. A [[Nature]] is a common label that is used by plugins to determine what settings, if any, to add to a project. */ - def addNatures(ns: Natures): Project = { + def addNatures(ns: Nature*): Project = setNatures(Natures.and(natures, Natures.And(ns.toList))) + + /** Disable the given plugins on this project. */ + def disablePlugins(plugins: AutoPlugin*): Project = + setNatures(Natures.and(natures, Natures.And(plugins.map(p => Natures.Exclude(p)).toList))) + + private[this] def setNatures(ns: Natures): Project = { // TODO: for 0.14.0, use copy when it has the additional `natures` parameter - unresolved(id, base, aggregate = aggregate, dependencies = dependencies, delegates = delegates, settings, configurations, auto, Natures.and(natures, ns), autoPlugins) + unresolved(id, base, aggregate = aggregate, dependencies = dependencies, delegates = delegates, settings, configurations, auto, ns, autoPlugins) } /** Definitively set the [[AutoPlugin]]s for this project. */ diff --git a/sbt/src/sbt-test/project/auto-plugins/build.sbt b/sbt/src/sbt-test/project/auto-plugins/build.sbt index d9543939b..f48a1f0e5 100644 --- a/sbt/src/sbt-test/project/auto-plugins/build.sbt +++ b/sbt/src/sbt-test/project/auto-plugins/build.sbt @@ -1,11 +1,11 @@ -// !C will exclude C, and thus D, from being auto-added -lazy val a = project.addNatures(A && B && !C) +// excludePlugins(C) will prevent C, and thus D, from being auto-added +lazy val a = project.addNatures(A, B).disablePlugins(Q) // without B, C is not added lazy val b = project.addNatures(A) // with both A and B, C is selected, which in turn selects D -lazy val c = project.addNatures(A && B) +lazy val c = project.addNatures(A, B) // with no natures defined, nothing is auto-added lazy val d = project diff --git a/sbt/src/sbt-test/project/auto-plugins/project/Q.scala b/sbt/src/sbt-test/project/auto-plugins/project/Q.scala index 73dd5211b..db51922cf 100644 --- a/sbt/src/sbt-test/project/auto-plugins/project/Q.scala +++ b/sbt/src/sbt-test/project/auto-plugins/project/Q.scala @@ -6,9 +6,7 @@ object AI extends AutoImport { lazy val A = Nature("A") lazy val B = Nature("B") - lazy val C = Nature("C") lazy val D = Nature("D") - lazy val E = Nature("E") lazy val q = config("q") lazy val p = config("p").extend(q) @@ -25,8 +23,6 @@ object Q extends AutoPlugin { def select: Natures = A && B - def provides = C - override def projectConfigurations: Seq[Configuration] = p :: q :: @@ -52,9 +48,7 @@ object Q extends AutoPlugin object R extends AutoPlugin { - def select = C && !D - - def provides = E + def select = Q && !D override def projectSettings = Seq( // tests proper ordering: R requires C, so C settings should come first diff --git a/sbt/src/sbt-test/project/binary-plugin/changes/define/A.scala b/sbt/src/sbt-test/project/binary-plugin/changes/define/A.scala index 5e4a3930e..c38558d4f 100644 --- a/sbt/src/sbt-test/project/binary-plugin/changes/define/A.scala +++ b/sbt/src/sbt-test/project/binary-plugin/changes/define/A.scala @@ -3,7 +3,6 @@ import Keys._ object C extends AutoImport { - lazy val aN = Nature("A") lazy val bN = Nature("B") lazy val check = taskKey[Unit]("Checks that the AutoPlugin and Build are automatically added.") } @@ -11,7 +10,6 @@ object C extends AutoImport { import C._ object A extends AutoPlugin { - override def provides = aN override def select = bN override def projectSettings = Seq( check := {} From 6abac450ef3c3685708a493d88f41acb620c08cd Mon Sep 17 00:00:00 2001 From: Dan Sanduleac Date: Thu, 6 Feb 2014 15:38:37 +0000 Subject: [PATCH 057/148] Retrieve dynamic app versions correctly --- launch/src/main/scala/xsbt/boot/Launch.scala | 25 +++++--- .../scala/xsbt/boot/ModuleDefinition.scala | 6 +- launch/src/main/scala/xsbt/boot/Update.scala | 62 ++++++++++++------- 3 files changed, 59 insertions(+), 34 deletions(-) diff --git a/launch/src/main/scala/xsbt/boot/Launch.scala b/launch/src/main/scala/xsbt/boot/Launch.scala index f27441918..688a769ee 100644 --- a/launch/src/main/scala/xsbt/boot/Launch.scala +++ b/launch/src/main/scala/xsbt/boot/Launch.scala @@ -200,28 +200,34 @@ class Launch private[xsbt](val bootDirectory: File, val lockBoot: Boolean, val i @tailrec private[this] final def getAppProvider0(id: xsbti.ApplicationID, explicitScalaVersion: Option[String], forceAppUpdate: Boolean): xsbti.AppProvider = { val app = appModule(id, explicitScalaVersion, true, "app") - val baseDirs = (base: File) => appBaseDirs(base, id) + /** Replace the version of an ApplicationID with the given one, if set. */ + def resolveId(appVersion: Option[String], id: xsbti.ApplicationID) = appVersion map { v => + import id._ + AppID(groupID(), name(), v, mainClass(), mainComponents(), crossVersionedValue(), classpathExtra()) + } getOrElse id + val baseDirs = (resolvedVersion: Option[String]) => (base: File) => appBaseDirs(base, resolveId(resolvedVersion, id)) def retrieve() = { - val sv = update(app, "") + val (appv, sv) = update(app, "") val scalaVersion = strictOr(explicitScalaVersion, sv) - new RetrievedModule(true, app, sv, baseDirs(scalaHome(ScalaOrg, scalaVersion))) + new RetrievedModule(true, app, sv, appv, baseDirs(appv)(scalaHome(ScalaOrg, scalaVersion))) } val retrievedApp = if(forceAppUpdate) retrieve() else - existing(app, ScalaOrg, explicitScalaVersion, baseDirs) getOrElse retrieve() + existing(app, ScalaOrg, explicitScalaVersion, baseDirs(None)) getOrElse retrieve() val scalaVersion = getOrError(strictOr(explicitScalaVersion, retrievedApp.detectedScalaVersion), "No Scala version specified or detected") val scalaProvider = getScala(scalaVersion, "(for " + id.name + ")") + val resolvedId = resolveId(retrievedApp.resolvedAppVersion, id) - val (missing, appProvider) = checkedAppProvider(id, retrievedApp, scalaProvider) + val (missing, appProvider) = checkedAppProvider(resolvedId, retrievedApp, scalaProvider) if(missing.isEmpty) appProvider else if(retrievedApp.fresh) app.retrieveCorrupt(missing) else - getAppProvider0(id, explicitScalaVersion, true) + getAppProvider0(resolvedId, explicitScalaVersion, true) } def scalaHome(scalaOrg: String, scalaVersion: Option[String]): File = new File(bootDirectory, baseDirectoryName(scalaOrg, scalaVersion)) def appHome(id: xsbti.ApplicationID, scalaVersion: Option[String]): File = appDirectory(scalaHome(ScalaOrg, scalaVersion), id) @@ -248,7 +254,7 @@ class Launch private[xsbt](val bootDirectory: File, val lockBoot: Boolean, val i try Some(provider(mod)) catch { case e: Exception => None } } getOrElse { - val scalaVersion = update(scalaM, reason) + val (_, scalaVersion) = update(scalaM, reason) provider( new RetrievedModule(true, scalaM, scalaVersion, baseDirs) ) } } @@ -343,10 +349,11 @@ class Launch private[xsbt](val bootDirectory: File, val lockBoot: Boolean, val i failLabel = "Scala " + version, extraClasspath = array() ) - def update(mm: ModuleDefinition, reason: String): Option[String] = + /** Returns the resolved appVersion (if this was an App), as well as the scalaVersion. */ + def update(mm: ModuleDefinition, reason: String): (Option[String], Option[String]) = { val result = ( new Update(mm.configuration) )(mm.target, reason) - if(result.success) result.scalaVersion else mm.retrieveFailed + if(result.success) result.appVersion -> result.scalaVersion else mm.retrieveFailed } } object Launcher diff --git a/launch/src/main/scala/xsbt/boot/ModuleDefinition.scala b/launch/src/main/scala/xsbt/boot/ModuleDefinition.scala index c5903d415..800247743 100644 --- a/launch/src/main/scala/xsbt/boot/ModuleDefinition.scala +++ b/launch/src/main/scala/xsbt/boot/ModuleDefinition.scala @@ -13,8 +13,12 @@ final class ModuleDefinition(val configuration: UpdateConfiguration, val extraCl private def versionString: String = target match { case _: UpdateScala => configuration.getScalaVersion; case a: UpdateApp => Value.get(a.id.version) } } -final class RetrievedModule(val fresh: Boolean, val definition: ModuleDefinition, val detectedScalaVersion: Option[String], val baseDirectories: List[File]) +final class RetrievedModule(val fresh: Boolean, val definition: ModuleDefinition, val detectedScalaVersion: Option[String], val resolvedAppVersion: Option[String], val baseDirectories: List[File]) { + /** Use this constructor only when the module exists already, or when its version is not dynamic (so its resolved version would be the same) */ + def this(fresh: Boolean, definition: ModuleDefinition, detectedScalaVersion: Option[String], baseDirectories: List[File]) = + this(fresh, definition, detectedScalaVersion, None, baseDirectories) + lazy val classpath: Array[File] = getJars(baseDirectories) lazy val fullClasspath: Array[File] = concat(classpath, definition.extraClasspath) diff --git a/launch/src/main/scala/xsbt/boot/Update.scala b/launch/src/main/scala/xsbt/boot/Update.scala index 8d1c2e206..cbf0fb020 100644 --- a/launch/src/main/scala/xsbt/boot/Update.scala +++ b/launch/src/main/scala/xsbt/boot/Update.scala @@ -39,7 +39,10 @@ final class UpdateConfiguration(val bootDirectory: File, val ivyHome: Option[Fil def getScalaVersion = scalaVersion match { case Some(sv) => sv; case None => "" } } -final class UpdateResult(val success: Boolean, val scalaVersion: Option[String]) +final class UpdateResult(val success: Boolean, val scalaVersion: Option[String], val appVersion: Option[String]) { + @deprecated("0.13.2", "Please use the other constructor providing appVersion.") + def this(success: Boolean, scalaVersion: Option[String]) = this(success, scalaVersion, None) +} /** Ensures that the Scala and application jars exist for the given versions or else downloads them.*/ final class Update(config: UpdateConfiguration) @@ -109,7 +112,7 @@ final class Update(config: UpdateConfiguration) e.printStackTrace(logWriter) log(e.toString) System.out.println(" (see " + logFile + " for complete log)") - new UpdateResult(false, None) + new UpdateResult(false, None, None) } finally { @@ -127,15 +130,16 @@ final class Update(config: UpdateConfiguration) moduleID.setLastModified(System.currentTimeMillis) moduleID.addConfiguration(new IvyConfiguration(DefaultIvyConfiguration, PUBLIC, "", new Array(0), true, null)) // add dependencies based on which target needs updating - target match + val dep = target match { case u: UpdateScala => val scalaVersion = getScalaVersion addDependency(moduleID, scalaOrg, CompilerModuleName, scalaVersion, "default;optional(default)", u.classifiers) - addDependency(moduleID, scalaOrg, LibraryModuleName, scalaVersion, "default", u.classifiers) + val ddesc = addDependency(moduleID, scalaOrg, LibraryModuleName, scalaVersion, "default", u.classifiers) excludeJUnit(moduleID) val scalaOrgString = if (scalaOrg != ScalaOrg) " " + scalaOrg else "" System.out.println("Getting" + scalaOrgString + " Scala " + scalaVersion + " " + reason + "...") + ddesc.getDependencyId case u: UpdateApp => val app = u.id val resolvedName = (app.crossVersioned, scalaVersion) match { @@ -143,24 +147,31 @@ final class Update(config: UpdateConfiguration) case (xsbti.CrossValue.Binary, Some(sv)) => app.name + "_" + CrossVersionUtil.binaryScalaVersion(sv) case _ => app.name } - addDependency(moduleID, app.groupID, resolvedName, app.getVersion, "default(compile)", u.classifiers) + val ddesc = addDependency(moduleID, app.groupID, resolvedName, app.getVersion, "default(compile)", u.classifiers) System.out.println("Getting " + app.groupID + " " + resolvedName + " " + app.getVersion + " " + reason + "...") + ddesc.getDependencyId } - update(moduleID, target) + update(moduleID, target, dep) } /** Runs the resolve and retrieve for the given moduleID, which has had its dependencies added already. */ - private def update(moduleID: DefaultModuleDescriptor, target: UpdateTarget): UpdateResult = + private def update(moduleID: DefaultModuleDescriptor, target: UpdateTarget, dep: ModuleId): UpdateResult = { val eventManager = new EventManager - val autoScalaVersion = resolve(eventManager, moduleID) + val (autoScalaVersion, depVersion) = resolve(eventManager, moduleID, dep) + // Fix up target.id with the depVersion that we know for sure is resolved (not dynamic) -- this way, `retrieve` + // will put them in the right version directory. + val target1 = (depVersion, target) match { + case (Some(dv), u: UpdateApp) => import u._; new UpdateApp(id.copy(version = new Explicit(dv)), classifiers, tpe) + case _ => target + } setScalaVariable(settings, autoScalaVersion) - retrieve(eventManager, moduleID, target, autoScalaVersion) - new UpdateResult(true, autoScalaVersion) + retrieve(eventManager, moduleID, target1, autoScalaVersion) + new UpdateResult(true, autoScalaVersion, depVersion) } private def createID(organization: String, name: String, revision: String) = ModuleRevisionId.newInstance(organization, name, revision) /** Adds the given dependency to the default configuration of 'moduleID'. */ - private def addDependency(moduleID: DefaultModuleDescriptor, organization: String, name: String, revision: String, conf: String, classifiers: List[String]) + private def addDependency(moduleID: DefaultModuleDescriptor, organization: String, name: String, revision: String, conf: String, classifiers: List[String]) = { val dep = new DefaultDependencyDescriptor(moduleID, createID(organization, name, revision), false, false, true) for(c <- conf.split(";")) @@ -168,6 +179,7 @@ final class Update(config: UpdateConfiguration) for(classifier <- classifiers) addClassifier(dep, name, classifier) moduleID.addDependency(dep) + dep } private def addClassifier(dep: DefaultDependencyDescriptor, name: String, classifier: String) { @@ -186,8 +198,9 @@ final class Update(config: UpdateConfiguration) rule.addConfiguration(DefaultIvyConfiguration) rule } - // returns the version of any Scala dependency - private def resolve(eventManager: EventManager, module: ModuleDescriptor): Option[String] = + val scalaLibraryId = ModuleId.newInstance(ScalaOrg, LibraryModuleName) + // Returns the version of the scala library, as well as `dep` (a dependency of `module`) after it's been resolved + private def resolve(eventManager: EventManager, module: ModuleDescriptor, dep: ModuleId): (Option[String], Option[String]) = { val resolveOptions = new ResolveOptions // this reduces the substantial logging done by Ivy, including the progress dots when downloading artifacts @@ -203,18 +216,18 @@ final class Update(config: UpdateConfiguration) System.out.println(seen.toArray.mkString(System.getProperty("line.separator"))) error("Error retrieving required libraries") } - scalaDependencyVersion(resolveReport).headOption + val modules = moduleRevisionIDs(resolveReport) + extractVersion(modules, scalaLibraryId) -> extractVersion(modules, dep) } - private[this] def scalaDependencyVersion(report: ResolveReport): List[String] = + private[this] def extractVersion(modules: Seq[ModuleRevisionId], dep: ModuleId): Option[String] = { - val modules = report.getConfigurations.toList flatMap { config => - report.getConfigurationReport(config).getModuleRevisionIds.toArray - } - modules flatMap { - case module: ModuleRevisionId if module.getOrganisation == ScalaOrg && module.getName == LibraryModuleName => - module.getRevision :: Nil - case _ => Nil - } + modules collectFirst { case m if m.getModuleId.equals(dep) => m.getRevision } + } + private[this] def moduleRevisionIDs(report: ResolveReport): Seq[ModuleRevisionId] = + { + import collection.JavaConverters._ + import org.apache.ivy.core.resolve.IvyNode + report.getDependencies.asInstanceOf[java.util.List[IvyNode]].asScala map (_.getResolvedId) } /** Exceptions are logged to the update log file. */ @@ -244,7 +257,8 @@ final class Update(config: UpdateConfiguration) val filter = (a: IArtifact) => retrieveType(a.getType) && a.getExtraAttribute("classifier") == null && extraFilter(a) retrieveOptions.setArtifactFilter(new ArtifactFilter(filter)) val scalaV = strictOr(scalaVersion, autoScalaVersion) - retrieveEngine.retrieve(module.getModuleRevisionId, baseDirectoryName(scalaOrg, scalaV) + "/" + pattern, retrieveOptions) + retrieveOptions.setDestArtifactPattern(baseDirectoryName(scalaOrg, scalaV) + "/" + pattern) + retrieveEngine.retrieve(module.getModuleRevisionId, retrieveOptions) } private[this] def notCoreScala(a: IArtifact) = a.getName match { case LibraryModuleName | CompilerModuleName => false From 05d96912d1367b3731b878ba72dcc084bf5bcf5b Mon Sep 17 00:00:00 2001 From: Roch Delsalle Date: Wed, 12 Feb 2014 11:02:53 +0100 Subject: [PATCH 058/148] Update Community-Plugins.rst sbt-scct fork / published on central with 0.13 compatibility --- src/sphinx/Community/Community-Plugins.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/sphinx/Community/Community-Plugins.rst b/src/sphinx/Community/Community-Plugins.rst index 8e6c11870..d13ad1c75 100644 --- a/src/sphinx/Community/Community-Plugins.rst +++ b/src/sphinx/Community/Community-Plugins.rst @@ -285,7 +285,7 @@ Utility plugins Code coverage plugins ~~~~~~~~~~~~~~~~~~~~~ -- sbt-scct: https://github.com/dvc94ch/sbt-scct +- sbt-scct: https://github.com/sqality/sbt-scct - sbt-scoverage: https://github.com/scoverage/sbt-scoverage - jacoco4sbt: https://github.com/sbt/jacoco4sbt - xsbt-coveralls-plugin: https://github.com/theon/xsbt-coveralls-plugin From 9b4564f0cd1d3e112552a660bab4f1aafafbf285 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 12 Feb 2014 12:59:32 +0100 Subject: [PATCH 059/148] SI-8262 Fix compilation with 2.11 due to shadowing of Range We now have `global.Range`, so our wildcard import of `global._` shadows `scala.Range`. This commit fully qualifies that type so as to be compatible with Scala 2.10 and 2.11. --- main/actions/src/main/scala/sbt/compiler/Eval.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/main/actions/src/main/scala/sbt/compiler/Eval.scala b/main/actions/src/main/scala/sbt/compiler/Eval.scala index 505897ff6..1fb4f6bb4 100644 --- a/main/actions/src/main/scala/sbt/compiler/Eval.scala +++ b/main/actions/src/main/scala/sbt/compiler/Eval.scala @@ -87,7 +87,7 @@ final class Eval(optionsNoncp: Seq[String], classpath: Seq[File], mkReporter: Se val value = (cl: ClassLoader) => getValue[Any](i.enclosingModule, i.loader(cl)) new EvalResult(i.extra, value, i.generated, i.enclosingModule) } - def evalDefinitions(definitions: Seq[(String,Range)], imports: EvalImports, srcName: String, valTypes: Seq[String]): EvalDefinitions = + def evalDefinitions(definitions: Seq[(String,scala.Range)], imports: EvalImports, srcName: String, valTypes: Seq[String]): EvalDefinitions = { require(definitions.nonEmpty, "Definitions to evaluate cannot be empty.") val ev = new EvalType[Seq[String]] { @@ -349,7 +349,7 @@ final class Eval(optionsNoncp: Seq[String], classpath: Seq[File], mkReporter: Se } /** Constructs a CompilationUnit for each definition, which can be used to independently parse the definition into a Tree. * Additionally, a CompilationUnit for the combined definitions is constructed for use by combined compilation after parsing. */ - private[this] def mkDefsUnit(srcName: String, definitions: Seq[(String,Range)]): (CompilationUnit, Seq[CompilationUnit]) = + private[this] def mkDefsUnit(srcName: String, definitions: Seq[(String,scala.Range)]): (CompilationUnit, Seq[CompilationUnit]) = { def fragmentUnit(content: String, lineMap: Array[Int]) = new CompilationUnit(fragmentSourceFile(srcName, content, lineMap)) From 4e073373daf1e0e2a58722f58749283296d9732c Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 12 Feb 2014 12:59:32 +0100 Subject: [PATCH 060/148] SI-8263 Avoid SOE in Symbol#logicallyEnclosingMember under Scala 2.11 Since the fix for SI-2066, Scala 2.11 calls logicallyEnclosingMember on the `x` in the expansion of the task macro: InitializeInstance.app[[T0[x]](T0[java.io.File], T0[java.io.File]), Seq[java.io.File]] This exposed the fact that SBT has created `T0` with `NoSymbol` as the owner. This led to the a SOE. I will also change the compiler to be more tolerant of this, but we can observe good discipline in the macro and pick a sensible owner. --- util/appmacro/src/main/scala/sbt/appmacro/KListBuilder.scala | 4 ++-- util/appmacro/src/main/scala/sbt/appmacro/TupleNBuilder.scala | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/util/appmacro/src/main/scala/sbt/appmacro/KListBuilder.scala b/util/appmacro/src/main/scala/sbt/appmacro/KListBuilder.scala index e9fb207d8..81d3be06f 100644 --- a/util/appmacro/src/main/scala/sbt/appmacro/KListBuilder.scala +++ b/util/appmacro/src/main/scala/sbt/appmacro/KListBuilder.scala @@ -24,7 +24,7 @@ object KListBuilder extends TupleBuilder val kconsTC: Type = kconsTpe.typeConstructor /** This is the L in the type function [L[x]] ... */ - val tcVariable: TypeSymbol = newTCVariable(NoSymbol) + val tcVariable: TypeSymbol = newTCVariable(util.initialOwner) /** Instantiates KCons[h, t <: KList[L], L], where L is the type constructor variable */ def kconsType(h: Type, t: Type): Type = @@ -65,4 +65,4 @@ object KListBuilder extends TupleBuilder val alistInstance: ctx.universe.Tree = TypeApply(select(Ident(alist), "klist"), TypeTree(representationC) :: Nil) def extract(param: ValDef) = bindKList(param, Nil, inputs.map(_.local)) } -} \ No newline at end of file +} diff --git a/util/appmacro/src/main/scala/sbt/appmacro/TupleNBuilder.scala b/util/appmacro/src/main/scala/sbt/appmacro/TupleNBuilder.scala index 89fe31792..871932b20 100644 --- a/util/appmacro/src/main/scala/sbt/appmacro/TupleNBuilder.scala +++ b/util/appmacro/src/main/scala/sbt/appmacro/TupleNBuilder.scala @@ -25,7 +25,7 @@ object TupleNBuilder extends TupleBuilder val ctx: c.type = c val representationC: PolyType = { - val tcVariable: Symbol = newTCVariable(NoSymbol) + val tcVariable: Symbol = newTCVariable(util.initialOwner) val tupleTypeArgs = inputs.map(in => typeRef(NoPrefix, tcVariable, in.tpe :: Nil).asInstanceOf[global.Type]) val tuple = global.definitions.tupleType(tupleTypeArgs) PolyType(tcVariable :: Nil, tuple.asInstanceOf[Type] ) From 4a9981720a7dfa7e958aef702b0c897d4f1574e2 Mon Sep 17 00:00:00 2001 From: Grzegorz Kossakowski Date: Wed, 12 Feb 2014 14:12:20 +0100 Subject: [PATCH 061/148] Remove work-arounds for Scala 2.11 problematic dependencies The ff0fd6eec658502f276f89c46f4aba0e0e268ddc introduced some exclusions that were necessary for getting sbt to resolve dependencies properly against Scala 2.11.0-M7. Scala 2.11.0-M8 fixed its dependency structure so we can get rid of those exclusions now. --- project/Sbt.scala | 4 ++-- project/Util.scala | 3 +-- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/project/Sbt.scala b/project/Sbt.scala index 800ef4a5d..b4c820dea 100644 --- a/project/Sbt.scala +++ b/project/Sbt.scala @@ -276,7 +276,7 @@ object Sbt extends Build artifact in (Compile, packageSrc) := Artifact(srcID).copy(configurations = Compile :: Nil).extra("e:component" -> srcID) ) def compilerSettings = Seq( - libraryDependencies <+= scalaVersion( "org.scala-lang" % "scala-compiler" % _ % "test" excludeAll(ExclusionRule(organization = "org.scala-lang.modules"))), + libraryDependencies <+= scalaVersion("org.scala-lang" % "scala-compiler" % _ % "test"), unmanagedJars in Test <<= (packageSrc in compileInterfaceSub in Compile).map(x => Seq(x).classpath) ) def precompiled(scalav: String): Project = baseProject(compilePath / "interface", "Precompiled " + scalav.replace('.', '_')) dependsOn(interfaceSub) settings(precompiledSettings : _*) settings( @@ -290,6 +290,6 @@ object Sbt extends Build sources in Test := Nil ) def ioSettings: Seq[Setting[_]] = Seq( - libraryDependencies <+= scalaVersion("org.scala-lang" % "scala-compiler" % _ % "test" excludeAll(ExclusionRule(organization = "org.scala-lang.modules"))) + libraryDependencies <+= scalaVersion("org.scala-lang" % "scala-compiler" % _ % "test") ) } diff --git a/project/Util.scala b/project/Util.scala index 61c58ab54..125d46b81 100644 --- a/project/Util.scala +++ b/project/Util.scala @@ -172,8 +172,7 @@ object Common lazy val httpclient = lib("commons-httpclient" % "commons-httpclient" % "3.1") lazy val jsch = lib("com.jcraft" % "jsch" % "0.1.46" intransitive() ) lazy val sbinary = libraryDependencies <+= Util.nightly211(n => "org.scala-tools.sbinary" % "sbinary" % "0.4.2" cross(if(n) CrossVersion.full else CrossVersion.binary)) - lazy val scalaCompiler = libraryDependencies <+= scalaVersion( - sv => "org.scala-lang" % "scala-compiler" % sv excludeAll(ExclusionRule(organization = "org.scala-lang.modules"))) + lazy val scalaCompiler = libraryDependencies <+= scalaVersion(sv => "org.scala-lang" % "scala-compiler" % sv) lazy val testInterface = lib("org.scala-sbt" % "test-interface" % "1.0") private def scala211Module(name: String, moduleVersion: String) = libraryDependencies <++= (scalaVersion)( scalaVersion => From 7f6abbaf359906bc741b9af069cdc933ce4d6fcd Mon Sep 17 00:00:00 2001 From: "Simeon H.K. Fitch" Date: Wed, 12 Feb 2014 12:31:15 -0500 Subject: [PATCH 062/148] Added basic documentation for DefinableSetting, inferring behavior from implementation. --- .../src/main/scala/sbt/Structure.scala | 30 +++++++++++++++++-- 1 file changed, 27 insertions(+), 3 deletions(-) diff --git a/main/settings/src/main/scala/sbt/Structure.scala b/main/settings/src/main/scala/sbt/Structure.scala index 8c023c876..24808c3cb 100644 --- a/main/settings/src/main/scala/sbt/Structure.scala +++ b/main/settings/src/main/scala/sbt/Structure.scala @@ -27,7 +27,7 @@ sealed trait ScopedTaskable[T] extends Scoped { /** Identifies a setting. It consists of three parts: the scope, the name, and the type of a value associated with this key. * The scope is represented by a value of type Scope. -* The name and the type are represented by a value of type AttributeKey[T]. +* The name and the type are represented by a value of type `AttributeKey[T]`. * Instances are constructed using the companion object. */ sealed abstract class SettingKey[T] extends ScopedTaskable[T] with KeyedInitialize[T] with Scoped.ScopingSetting[SettingKey[T]] with Scoped.DefinableSetting[T] { @@ -52,7 +52,7 @@ sealed abstract class SettingKey[T] extends ScopedTaskable[T] with KeyedInitiali /** Identifies a task. It consists of three parts: the scope, the name, and the type of the value computed by a task associated with this key. * The scope is represented by a value of type Scope. -* The name and the type are represented by a value of type AttributeKey[Task[T]]. +* The name and the type are represented by a value of type `AttributeKey[Task[T]]`. * Instances are constructed using the companion object. */ sealed abstract class TaskKey[T] extends ScopedTaskable[T] with KeyedInitialize[Task[T]] with Scoped.ScopingSetting[TaskKey[T]] with Scoped.DefinableTask[T] { @@ -76,7 +76,7 @@ sealed abstract class TaskKey[T] extends ScopedTaskable[T] with KeyedInitialize[ /** Identifies an input task. An input task parses input and produces a task to run. * It consists of three parts: the scope, the name, and the type of the value produced by an input task associated with this key. * The scope is represented by a value of type Scope. -* The name and the type are represented by a value of type AttributeKey[InputTask[T]]. +* The name and the type are represented by a value of type `AttributeKey[InputTask[T]]`. * Instances are constructed using the companion object. */ sealed trait InputKey[T] extends Scoped with KeyedInitialize[InputTask[T]] with Scoped.ScopingSetting[InputKey[T]] with Scoped.DefinableSetting[InputTask[T]] { @@ -95,6 +95,21 @@ object Scoped implicit def taskScopedToKey[T](s: TaskKey[T]): ScopedKey[Task[T]] = ScopedKey(s.scope, s.key) implicit def inputScopedToKey[T](s: InputKey[T]): ScopedKey[InputTask[T]] = ScopedKey(s.scope, s.key) + /** + * Mixin trait for adding convenience vocabulary associated with specifiying the [[Scope]] of a setting. + * Allows specification of the Scope or part of the [[Scope]] of a setting being referenced. + * @example + * {{{ + * name in Global := "hello Global scope" + * + * name in (Compile, packageBin) := "hello Compile scope packageBin" + * + * name in Compile := "hello Compile scope" + + * name.in(Compile).:=("hello ugly syntax") + * }}} + * + */ sealed trait ScopingSetting[Result] { def in(s: Scope): Result @@ -113,16 +128,25 @@ object Scoped def scopedInput[T](s: Scope, k: AttributeKey[InputTask[T]]): InputKey[T] = new InputKey[T] { val scope = s; val key = k } def scopedTask[T](s: Scope, k: AttributeKey[Task[T]]): TaskKey[T] = new TaskKey[T] { val scope = s; val key = k } + /** + * Mixin trait for adding convenience vocabulary associated with applying a setting to a configuration item. + */ sealed trait DefinableSetting[S] { def scopedKey: ScopedKey[S] private[sbt] final def :==(app: S): Setting[S] = macro std.TaskMacro.settingAssignPure[S] + /** Binds a single value to this. A new `Setting` is defined using the value(s) of `app`. */ final def <<= (app: Initialize[S]): Setting[S] = macro std.TaskMacro.settingAssignPosition[S] + /** Internally used function for setting a value along with the `.sbt` file location where it is defined. */ final def set (app: Initialize[S], source: SourcePosition): Setting[S] = setting(scopedKey, app, source) + /** Setting accessor with explicit Scope specification. */ final def get(settings: Settings[Scope]): Option[S] = settings.get(scopedKey.scope, scopedKey.key) + /** Lift this into an Option. */ final def ? : Initialize[Option[S]] = Def.optional(scopedKey)(idFun) + /** Lift this into an Option, and then call `getOrElse` using `i` as the fallback value. */ final def or[T >: S](i: Initialize[T]): Initialize[T] = (this.?, i)(_ getOrElse _ ) + /** Lift this into an Option, and then call `getOrElse`, evaluating `or` if needed. */ final def ??[T >: S](or: => T): Initialize[T] = Def.optional(scopedKey)(_ getOrElse or ) } final class RichInitialize[S](init: Initialize[S]) From 9f43d0660d3938fccd75f1997f007f3c13554524 Mon Sep 17 00:00:00 2001 From: "Simeon H.K. Fitch" Date: Wed, 12 Feb 2014 15:37:49 -0500 Subject: [PATCH 063/148] Revised documentation based on feedback from @jsuereth --- .../src/main/scala/sbt/Structure.scala | 33 ++++++++++++++++--- 1 file changed, 28 insertions(+), 5 deletions(-) diff --git a/main/settings/src/main/scala/sbt/Structure.scala b/main/settings/src/main/scala/sbt/Structure.scala index 24808c3cb..bd23f3f4d 100644 --- a/main/settings/src/main/scala/sbt/Structure.scala +++ b/main/settings/src/main/scala/sbt/Structure.scala @@ -136,19 +136,42 @@ object Scoped def scopedKey: ScopedKey[S] private[sbt] final def :==(app: S): Setting[S] = macro std.TaskMacro.settingAssignPure[S] - /** Binds a single value to this. A new `Setting` is defined using the value(s) of `app`. */ + + /** Binds a single value to this. A new [Def.Setting] is defined using the value(s) of `app`. + * @param app value to bind to this key + * @return setting binding this key to the given value. + */ final def <<= (app: Initialize[S]): Setting[S] = macro std.TaskMacro.settingAssignPosition[S] + /** Internally used function for setting a value along with the `.sbt` file location where it is defined. */ final def set (app: Initialize[S], source: SourcePosition): Setting[S] = setting(scopedKey, app, source) - /** Setting accessor with explicit Scope specification. */ + + /** From the given [[Settings]], extract the value bound to this key. */ final def get(settings: Settings[Scope]): Option[S] = settings.get(scopedKey.scope, scopedKey.key) - /** Lift this into an Option. */ + + /** Creates an [[Def.Initialize]] with value [[scala.None]] if there was no previous definition of this key, + * and `[[scala.Some]](value)` if a definition exists. Useful for when you want to use the ''existence'' of + * one setting in order to define another setting. + * @return currently bound value wrapped in `Initialize[Some[T]]`, or `Initialize[None]` if unbound. */ final def ? : Initialize[Option[S]] = Def.optional(scopedKey)(idFun) - /** Lift this into an Option, and then call `getOrElse` using `i` as the fallback value. */ + + /** Creates an [[Def.Initialize]] with value bound to this key, or returns `i` parameter if unbound. + * @param i value to return if this setting doesn't have a value. + * @return currently bound setting value, or `i` if unbound. + */ final def or[T >: S](i: Initialize[T]): Initialize[T] = (this.?, i)(_ getOrElse _ ) - /** Lift this into an Option, and then call `getOrElse`, evaluating `or` if needed. */ + + /** Like [[?]], but with a call-by-name parameter rather than an existing [[Def.Initialize]]. + * Useful when you want to have a value computed when no value is bound to this key. + * @param or by-name expression evaluated when a value is needed. + * @return currently bound setting value, or the result of `or` if unbound. + */ final def ??[T >: S](or: => T): Initialize[T] = Def.optional(scopedKey)(_ getOrElse or ) } + + /** + * Wraps an [[sbt.Def.Initialize]] instance to provide `map` and `flatMap` symantics. + */ final class RichInitialize[S](init: Initialize[S]) { def map[T](f: S => T): Initialize[Task[T]] = init(s => mktask(f(s)) ) From bd943b8e83b1585c63522ffb314f94b4b37627af Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Sat, 15 Feb 2014 12:59:03 +0100 Subject: [PATCH 064/148] using compat._ to plug source compatibility breakages This commit makes the code source compatible across Scala 2.10.3 and https://github.com/scala/scala/pull/3452, which is proposed for inclusion in Scala 2.11.0-RC1. We only strictly need the incremental compiler to build on Scala 2.11, as that is integrated into the IDE. But we gain valuable insight into compiler regressions by building *all* of SBT with 2.11. We only got there recently (the 0.13 branch of SBT now fully cross compiles with 2.10.3 and 2.11.0-SNAPSHOT), and this aims to keep things that way. Once 2.10 support is dropped, SBT macros will be able to exploit the new reflection APIs in 2.11 to avoid the need for casting to compiler internals, which aren't governed by binary compatibility. This has been prototyped by @xeno-by: https://github.com/sbt/sbt/pull/1121 --- main/settings/src/main/scala/sbt/std/InputWrapper.scala | 4 ++++ main/settings/src/main/scala/sbt/std/TaskMacro.scala | 4 ++++ util/appmacro/src/main/scala/sbt/appmacro/ContextUtil.scala | 4 ++++ util/appmacro/src/main/scala/sbt/appmacro/KListBuilder.scala | 4 ++++ util/appmacro/src/main/scala/sbt/appmacro/TupleNBuilder.scala | 4 ++++ 5 files changed, 20 insertions(+) diff --git a/main/settings/src/main/scala/sbt/std/InputWrapper.scala b/main/settings/src/main/scala/sbt/std/InputWrapper.scala index 5a2ee0dd1..8d8fe6182 100644 --- a/main/settings/src/main/scala/sbt/std/InputWrapper.scala +++ b/main/settings/src/main/scala/sbt/std/InputWrapper.scala @@ -59,6 +59,9 @@ object InputWrapper private[std] def wrapPrevious[T: c.WeakTypeTag](c: Context)(ts: c.Expr[Any], pos: c.Position): c.Expr[Option[T]] = wrapImpl[Option[T],InputWrapper.type](c, InputWrapper, WrapPreviousName)(ts, pos) + // TODO 2.11 Remove this after dropping 2.10.x support. + private object HasCompat { val compat = ??? }; import HasCompat._ + /** Wraps an arbitrary Tree in a call to the `.` method of this module for later processing by an enclosing macro. * The resulting Tree is the manually constructed version of: * @@ -67,6 +70,7 @@ object InputWrapper def wrapImpl[T: c.WeakTypeTag, S <: AnyRef with Singleton](c: Context, s: S, wrapName: String)(ts: c.Expr[Any], pos: c.Position)(implicit it: c.TypeTag[s.type]): c.Expr[T] = { import c.universe.{Apply=>ApplyTree,_} + import compat._ val util = new ContextUtil[c.type](c) val iw = util.singleton(s) val tpe = c.weakTypeOf[T] diff --git a/main/settings/src/main/scala/sbt/std/TaskMacro.scala b/main/settings/src/main/scala/sbt/std/TaskMacro.scala index b4789247c..a15f8f5d6 100644 --- a/main/settings/src/main/scala/sbt/std/TaskMacro.scala +++ b/main/settings/src/main/scala/sbt/std/TaskMacro.scala @@ -281,9 +281,13 @@ object TaskMacro private[this] def iTaskMacro[T: c.WeakTypeTag](c: Context)(t: c.Expr[T]): c.Expr[Task[T]] = Instance.contImpl[T,Id](c, TaskInstance, TaskConvert, MixedBuilder)(Left(t), Instance.idTransform) + // TODO 2.11 Remove this after dropping 2.10.x support. + private object HasCompat { val compat = ??? }; import HasCompat._ + private[this] def inputTaskDynMacro0[T: c.WeakTypeTag](c: Context)(t: c.Expr[Initialize[Task[T]]]): c.Expr[Initialize[InputTask[T]]] = { import c.universe.{Apply=>ApplyTree,_} + import compat._ val tag = implicitly[c.WeakTypeTag[T]] val util = ContextUtil[c.type](c) diff --git a/util/appmacro/src/main/scala/sbt/appmacro/ContextUtil.scala b/util/appmacro/src/main/scala/sbt/appmacro/ContextUtil.scala index 381674e47..c0c849fab 100644 --- a/util/appmacro/src/main/scala/sbt/appmacro/ContextUtil.scala +++ b/util/appmacro/src/main/scala/sbt/appmacro/ContextUtil.scala @@ -32,12 +32,16 @@ object ContextUtil { def unexpectedTree[C <: Context](tree: C#Tree): Nothing = sys.error("Unexpected macro application tree (" + tree.getClass + "): " + tree) } +// TODO 2.11 Remove this after dropping 2.10.x support. +private object HasCompat { val compat = ??? }; import HasCompat._ + /** Utility methods for macros. Several methods assume that the context's universe is a full compiler (`scala.tools.nsc.Global`). * This is not thread safe due to the underlying Context and related data structures not being thread safe. * Use `ContextUtil[c.type](c)` to construct. */ final class ContextUtil[C <: Context](val ctx: C) { import ctx.universe.{Apply=>ApplyTree,_} + import compat._ val powerContext = ctx.asInstanceOf[reflect.macros.runtime.Context] val global: powerContext.universe.type = powerContext.universe diff --git a/util/appmacro/src/main/scala/sbt/appmacro/KListBuilder.scala b/util/appmacro/src/main/scala/sbt/appmacro/KListBuilder.scala index 81d3be06f..d9dbebe42 100644 --- a/util/appmacro/src/main/scala/sbt/appmacro/KListBuilder.scala +++ b/util/appmacro/src/main/scala/sbt/appmacro/KListBuilder.scala @@ -9,11 +9,15 @@ package appmacro /** A `TupleBuilder` that uses a KList as the tuple representation.*/ object KListBuilder extends TupleBuilder { + // TODO 2.11 Remove this after dropping 2.10.x support. + private object HasCompat { val compat = ??? }; import HasCompat._ + def make(c: Context)(mt: c.Type, inputs: Inputs[c.universe.type]): BuilderResult[c.type] = new BuilderResult[c.type] { val ctx: c.type = c val util = ContextUtil[c.type](c) import c.universe.{Apply=>ApplyTree,_} + import compat._ import util._ val knilType = c.typeOf[KNil] diff --git a/util/appmacro/src/main/scala/sbt/appmacro/TupleNBuilder.scala b/util/appmacro/src/main/scala/sbt/appmacro/TupleNBuilder.scala index 871932b20..28fa581a4 100644 --- a/util/appmacro/src/main/scala/sbt/appmacro/TupleNBuilder.scala +++ b/util/appmacro/src/main/scala/sbt/appmacro/TupleNBuilder.scala @@ -14,10 +14,14 @@ object TupleNBuilder extends TupleBuilder final val MaxInputs = 11 final val TupleMethodName = "tuple" + // TODO 2.11 Remove this after dropping 2.10.x support. + private object HasCompat { val compat = ??? }; import HasCompat._ + def make(c: Context)(mt: c.Type, inputs: Inputs[c.universe.type]): BuilderResult[c.type] = new BuilderResult[c.type] { val util = ContextUtil[c.type](c) import c.universe.{Apply=>ApplyTree,_} + import compat._ import util._ val global: Global = c.universe.asInstanceOf[Global] From c7f435026ff8591085f427b8ee4d192e1f10f783 Mon Sep 17 00:00:00 2001 From: Grzegorz Kossakowski Date: Mon, 17 Feb 2014 17:00:19 +0100 Subject: [PATCH 065/148] Include value of `nameHashing` flag in `CompileSetup`. The CompileSetup class is being used to detect changes to arguments of incremental compiler that affect result of compilation and trigger recompilation. Examples of such arguments include, the target (output) directory, Scala compiler options, Scala compiler version, etc. By adding `nameHashing` to CompileSetup we have a chance to handle change to that flag smoothly by throwing away old Analysis object and starting with an empty one. That's implemented in AggressiveComile by extending the logic that was responsible for detection of changes to CompileSetup values. Thanks to this change we fix #1081. Analysis formats has been updated to support persisting of newly added value in CompileSetup. We used to not store the value of `nameHashing` flag in persisted Analysis file and infer it from contents of relations but that leads to issue #1071 when empty relations are involved. Given the fact that CompileSetup stores `nameHashing` value now, we can just use it when reading relations and fix #1071. This requires reading/writing compile setup before reading relations. I decided to make that change even if there's a comment saying that reading/writing relations first was done intentionally. --- .../inc/src/main/scala/sbt/CompileSetup.scala | 12 +++++---- .../sbt/compiler/AggressiveCompile.scala | 9 ++++++- .../main/scala/sbt/inc/AnalysisFormats.scala | 4 +-- .../scala/sbt/inc/TextAnalysisFormat.scala | 27 ++++++++++++------- 4 files changed, 34 insertions(+), 18 deletions(-) diff --git a/compile/inc/src/main/scala/sbt/CompileSetup.scala b/compile/inc/src/main/scala/sbt/CompileSetup.scala index 59e9e2975..e45f85e58 100644 --- a/compile/inc/src/main/scala/sbt/CompileSetup.scala +++ b/compile/inc/src/main/scala/sbt/CompileSetup.scala @@ -11,7 +11,8 @@ package sbt // because complexity(Equiv[Seq[String]]) > complexity(Equiv[CompileSetup]) // (6 > 4) final class CompileOptions(val options: Seq[String], val javacOptions: Seq[String]) -final class CompileSetup(val output: APIOutput, val options: CompileOptions, val compilerVersion: String, val order: CompileOrder) +final class CompileSetup(val output: APIOutput, val options: CompileOptions, val compilerVersion: String, + val order: CompileOrder, val nameHashing: Boolean) object CompileSetup { @@ -21,7 +22,8 @@ object CompileSetup equivOutput.equiv(a.output, b.output) && equivOpts.equiv(a.options, b.options) && equivComp.equiv(a.compilerVersion, b.compilerVersion) && - a.order == b.order // equivOrder.equiv(a.order, b.order) + a.order == b.order && // equivOrder.equiv(a.order, b.order) + a.nameHashing == b.nameHashing } implicit val equivFile: Equiv[File] = new Equiv[File] { def equiv(a: File, b: File) = a.getAbsoluteFile == b.getAbsoluteFile @@ -32,7 +34,7 @@ object CompileSetup case (m1: MultipleOutput, m2: MultipleOutput) => (m1.outputGroups.length == m2.outputGroups.length) && (m1.outputGroups.sorted zip m2.outputGroups.sorted forall { - case (a,b) => + case (a,b) => equivFile.equiv(a.sourceDirectory, b.sourceDirectory) && equivFile.equiv(a.outputDirectory, b.outputDirectory) }) case (s1: SingleOutput, s2: SingleOutput) => equivFile.equiv(s1.outputDirectory, s2.outputDirectory) @@ -42,12 +44,12 @@ object CompileSetup implicit val equivOpts: Equiv[CompileOptions] = new Equiv[CompileOptions] { def equiv(a: CompileOptions, b: CompileOptions) = (a.options sameElements b.options) && - (a.javacOptions sameElements b.javacOptions) + (a.javacOptions sameElements b.javacOptions) } implicit val equivCompilerVersion: Equiv[String] = new Equiv[String] { def equiv(a: String, b: String) = a == b } - + implicit val equivOrder: Equiv[CompileOrder] = new Equiv[CompileOrder] { def equiv(a: CompileOrder, b: CompileOrder) = a == b } diff --git a/compile/integration/src/main/scala/sbt/compiler/AggressiveCompile.scala b/compile/integration/src/main/scala/sbt/compiler/AggressiveCompile.scala index fec36db56..2c711d14f 100644 --- a/compile/integration/src/main/scala/sbt/compiler/AggressiveCompile.scala +++ b/compile/integration/src/main/scala/sbt/compiler/AggressiveCompile.scala @@ -41,7 +41,8 @@ class AggressiveCompile(cacheFile: File) skip: Boolean = false, incrementalCompilerOptions: IncOptions)(implicit log: Logger): Analysis = { - val setup = new CompileSetup(output, new CompileOptions(options, javacOptions), compiler.scalaInstance.actualVersion, compileOrder) + val setup = new CompileSetup(output, new CompileOptions(options, javacOptions), + compiler.scalaInstance.actualVersion, compileOrder, incrementalCompilerOptions.nameHashing) compile1(sources, classpath, setup, progress, store, analysisMap, definesClass, compiler, javac, reporter, skip, cache, incrementalCompilerOptions) } @@ -144,6 +145,12 @@ class AggressiveCompile(cacheFile: File) val sourcesSet = sources.toSet val analysis = previousSetup match { + case Some(previous) if previous.nameHashing != currentSetup.nameHashing => + // if the value of `nameHashing` flag has changed we have to throw away + // previous Analysis completely and start with empty Analysis object + // that supports the particular value of the `nameHashing` flag. + // Otherwise we'll be getting UnsupportedOperationExceptions + Analysis.empty(currentSetup.nameHashing) case Some(previous) if equiv.equiv(previous, currentSetup) => previousAnalysis case _ => Incremental.prune(sourcesSet, previousAnalysis) } diff --git a/compile/persist/src/main/scala/sbt/inc/AnalysisFormats.scala b/compile/persist/src/main/scala/sbt/inc/AnalysisFormats.scala index 5f2c7b9c6..73b619e0f 100644 --- a/compile/persist/src/main/scala/sbt/inc/AnalysisFormats.scala +++ b/compile/persist/src/main/scala/sbt/inc/AnalysisFormats.scala @@ -73,8 +73,8 @@ object AnalysisFormats wrap[Severity, Byte]( _.ordinal.toByte, b => Severity.values.apply(b.toInt) ) - implicit def setupFormat(implicit outputF: Format[APIOutput], optionF: Format[CompileOptions], compilerVersion: Format[String], orderF: Format[CompileOrder]): Format[CompileSetup] = - asProduct4[CompileSetup, APIOutput, CompileOptions, String, CompileOrder]( (a,b,c,d) => new CompileSetup(a,b,c,d) )(s => (s.output, s.options, s.compilerVersion, s.order))(outputF, optionF, compilerVersion, orderF) + implicit def setupFormat(implicit outputF: Format[APIOutput], optionF: Format[CompileOptions], compilerVersion: Format[String], orderF: Format[CompileOrder], nameHashingF: Format[Boolean]): Format[CompileSetup] = + asProduct5[CompileSetup, APIOutput, CompileOptions, String, CompileOrder, Boolean]( (a,b,c,d,e) => new CompileSetup(a,b,c,d,e) )(s => (s.output, s.options, s.compilerVersion, s.order, s.nameHashing))(outputF, optionF, compilerVersion, orderF, nameHashingF) implicit val outputGroupFormat: Format[OutputGroup] = asProduct2((a: File,b: File) => new OutputGroup{def sourceDirectory = a; def outputDirectory = b}) { out => (out.sourceDirectory, out.outputDirectory) }(fileFormat, fileFormat) diff --git a/compile/persist/src/main/scala/sbt/inc/TextAnalysisFormat.scala b/compile/persist/src/main/scala/sbt/inc/TextAnalysisFormat.scala index 3bd28190c..f3e13d23a 100644 --- a/compile/persist/src/main/scala/sbt/inc/TextAnalysisFormat.scala +++ b/compile/persist/src/main/scala/sbt/inc/TextAnalysisFormat.scala @@ -57,31 +57,33 @@ object TextAnalysisFormat { def write(out: Writer, analysis: Analysis, setup: CompileSetup) { VersionF.write(out) - // We start with relations because that's the part of greatest interest to external readers, + // We start with writing compile setup which contains value of the `nameHashing` + // flag that is needed to properly deserialize relations + FormatTimer.time("write setup") { CompileSetupF.write(out, setup) } + // Next we write relations because that's the part of greatest interest to external readers, // who can abort reading early once they're read them. FormatTimer.time("write relations") { RelationsF.write(out, analysis.relations) } FormatTimer.time("write stamps") { StampsF.write(out, analysis.stamps) } FormatTimer.time("write apis") { APIsF.write(out, analysis.apis) } FormatTimer.time("write sourceinfos") { SourceInfosF.write(out, analysis.infos) } FormatTimer.time("write compilations") { CompilationsF.write(out, analysis.compilations) } - FormatTimer.time("write setup") { CompileSetupF.write(out, setup) } out.flush() } def read(in: BufferedReader): (Analysis, CompileSetup) = { VersionF.read(in) - val relations = FormatTimer.time("read relations") { RelationsF.read(in) } + val setup = FormatTimer.time("read setup") { CompileSetupF.read(in) } + val relations = FormatTimer.time("read relations") { RelationsF.read(in, setup.nameHashing) } val stamps = FormatTimer.time("read stamps") { StampsF.read(in) } val apis = FormatTimer.time("read apis") { APIsF.read(in) } val infos = FormatTimer.time("read sourceinfos") { SourceInfosF.read(in) } val compilations = FormatTimer.time("read compilations") { CompilationsF.read(in) } - val setup = FormatTimer.time("read setup") { CompileSetupF.read(in) } (Analysis.Empty.copy(stamps, apis, relations, infos, compilations), setup) } private[this] object VersionF { - val currentVersion = "4" + val currentVersion = "5" def write(out: Writer) { out.write("format version: %s\n".format(currentVersion)) @@ -165,7 +167,7 @@ object TextAnalysisFormat { writeRelation(Headers.usedNames, names) } - def read(in: BufferedReader): Relations = { + def read(in: BufferedReader, nameHashing: Boolean): Relations = { def readRelation[T](expectedHeader: String, s2t: String => T): Relation[File, T] = { val items = readPairs(in)(expectedHeader, new File(_), s2t).toIterator // Reconstruct the forward map. This is more efficient than Relation.empty ++ items. @@ -216,9 +218,10 @@ object TextAnalysisFormat { } // we don't check for emptiness of publicInherited/inheritance relations because // we assume that invariant that says they are subsets of direct/memberRef holds - assert((directSrcDeps == emptySource) || (memberRefSrcDeps == emptySourceDependencies), - "One mechanism is supported for tracking source dependencies at the time") - val nameHashing = memberRefSrcDeps != emptySourceDependencies + assert(nameHashing || (memberRefSrcDeps == emptySourceDependencies), + "When name hashing is disabled the `memberRef` relation should be empty.") + assert(!nameHashing || (directSrcDeps == emptySource), + "When name hashing is enabled the `direct` relation should be empty.") val classes = readStringRelation(Headers.classes) val names = readStringRelation(Headers.usedNames) @@ -322,6 +325,7 @@ object TextAnalysisFormat { val javacOptions = "javac options" val compilerVersion = "compiler version" val compileOrder = "compile order" + val nameHashing = "name hashing" } private[this] val singleOutputMode = "single" @@ -340,16 +344,19 @@ object TextAnalysisFormat { writeSeq(out)(Headers.javacOptions, setup.options.javacOptions, identity[String]) writeSeq(out)(Headers.compilerVersion, setup.compilerVersion :: Nil, identity[String]) writeSeq(out)(Headers.compileOrder, setup.order.name :: Nil, identity[String]) + writeSeq(out)(Headers.nameHashing, setup.nameHashing :: Nil, (b: Boolean) => b.toString) } def read(in: BufferedReader): CompileSetup = { def s2f(s: String) = new File(s) + def s2b(s: String): Boolean = s.toBoolean val outputDirMode = readSeq(in)(Headers.outputMode, identity[String]).headOption val outputAsMap = readMap(in)(Headers.outputDir, s2f, s2f) val compileOptions = readSeq(in)(Headers.compileOptions, identity[String]) val javacOptions = readSeq(in)(Headers.javacOptions, identity[String]) val compilerVersion = readSeq(in)(Headers.compilerVersion, identity[String]).head val compileOrder = readSeq(in)(Headers.compileOrder, identity[String]).head + val nameHashing = readSeq(in)(Headers.nameHashing, s2b).head val output = outputDirMode match { case Some(s) => s match { @@ -370,7 +377,7 @@ object TextAnalysisFormat { } new CompileSetup(output, new CompileOptions(compileOptions, javacOptions), compilerVersion, - xsbti.compile.CompileOrder.valueOf(compileOrder)) + xsbti.compile.CompileOrder.valueOf(compileOrder), nameHashing) } } From 19ca7a1edc64f98b1d3cfc96950ef5526a5f13fd Mon Sep 17 00:00:00 2001 From: Grzegorz Kossakowski Date: Tue, 18 Feb 2014 12:18:07 +0100 Subject: [PATCH 066/148] Make change to CompileSetup backwards compatible. The c7f435026ff8591085f427b8ee4d192e1f10f783 introduced a new parameter to the constructor of `CompileSetup` but it turns out that this class is being used in zinc. Introduce an overloaded variant of that constructor that preserves backwards compatibility. --- compile/inc/src/main/scala/sbt/CompileSetup.scala | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/compile/inc/src/main/scala/sbt/CompileSetup.scala b/compile/inc/src/main/scala/sbt/CompileSetup.scala index e45f85e58..11ecc6805 100644 --- a/compile/inc/src/main/scala/sbt/CompileSetup.scala +++ b/compile/inc/src/main/scala/sbt/CompileSetup.scala @@ -12,7 +12,12 @@ package sbt // (6 > 4) final class CompileOptions(val options: Seq[String], val javacOptions: Seq[String]) final class CompileSetup(val output: APIOutput, val options: CompileOptions, val compilerVersion: String, - val order: CompileOrder, val nameHashing: Boolean) + val order: CompileOrder, val nameHashing: Boolean) { + @deprecated("Use the other overloaded variant of the constructor that takes `nameHashing` value, instead.", "0.13.2") + def this(output: APIOutput, options: CompileOptions, compilerVersion: String, order: CompileOrder) = { + this(output, options, compilerVersion, order, false) + } +} object CompileSetup { From 3389906b017613820e3af4f828b46007a5dd8f94 Mon Sep 17 00:00:00 2001 From: Eugene Platonov Date: Tue, 18 Feb 2014 23:11:59 -0500 Subject: [PATCH 067/148] explicitly disapprove plugins using default package --- src/sphinx/Extending/Plugins-Best-Practices.rst | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/src/sphinx/Extending/Plugins-Best-Practices.rst b/src/sphinx/Extending/Plugins-Best-Practices.rst index df72f825c..eeafba4c7 100644 --- a/src/sphinx/Extending/Plugins-Best-Practices.rst +++ b/src/sphinx/Extending/Plugins-Best-Practices.rst @@ -16,6 +16,12 @@ Specifically: Here are some current plugin best practices. **NOTE:** Best practices are evolving, so check back frequently. +Don't use default package +--------------------------- + +Users who have their build files in some package will not be able to +use your plugin if it's defined in default (no-name) package. + Avoid overriding `settings` ----------------------------- From a3d89dc8656d7ee5fb2656dae79b5154ebe9a5e2 Mon Sep 17 00:00:00 2001 From: Mark Harrah Date: Wed, 19 Feb 2014 23:04:06 -0500 Subject: [PATCH 068/148] Infrastructure for debugging natures and plugins. * Can provide suggestions for how to define a plugin given a context (a loaded Project in practice). * When a user requests an undefined key at the command line, can indicate whether any (deactivated) plugins provide the key. TODO: * Hook up to the key parser * Implement 'help ' * Determine how to best provide the context (the current project is often an aggregating root, which is not typically a useful context) --- main/src/main/scala/sbt/Natures.scala | 49 ++-- main/src/main/scala/sbt/NaturesDebug.scala | 323 +++++++++++++++++++++ 2 files changed, 355 insertions(+), 17 deletions(-) create mode 100644 main/src/main/scala/sbt/NaturesDebug.scala diff --git a/main/src/main/scala/sbt/Natures.scala b/main/src/main/scala/sbt/Natures.scala index b121df408..06b0a0e2f 100644 --- a/main/src/main/scala/sbt/Natures.scala +++ b/main/src/main/scala/sbt/Natures.scala @@ -15,29 +15,24 @@ trait AutoImport /** An AutoPlugin defines a group of settings and the conditions where the settings are automatically added to a build (called "activation"). -The `select` method defines the conditions, - `provides` defines an identifier for the AutoPlugin, - and a method like `projectSettings` defines the settings to add. +The `select` method defines the conditions and a method like `projectSettings` defines the settings to add. Steps for plugin authors: 1. Determine the [[Nature]]s that, when present (or absent), activate the AutoPlugin. 2. Determine the settings/configurations to automatically inject when activated. -3. Define a new, unique identifying [[Nature]] associated with the AutoPlugin, where a Nature is essentially a String ID. For example, the following will automatically add the settings in `projectSettings` - to a project that has both the `Web` and `Javascript` natures enabled. It will itself - define the `MyStuff` nature. This nature can be explicitly disabled by the user to - prevent the plugin from activating. + to a project that has both the `Web` and `Javascript` natures enabled. object MyPlugin extends AutoPlugin { def select = Web && Javascript - def provides = MyStuff override def projectSettings = Seq(...) } Steps for users: -1. add dependencies on plugins as usual with addSbtPlugin -2. add Natures to Projects, which will automatically select the plugin settings to add for those Projects. +1. Add dependencies on plugins as usual with addSbtPlugin +2. Add Natures to Projects, which will automatically select the plugin settings to add for those Projects. +3. Exclude plugins, if desired. For example, given natures Web and Javascript (perhaps provided by plugins added with addSbtPlugin), @@ -45,9 +40,9 @@ For example, given natures Web and Javascript (perhaps provided by plugins added will activate `MyPlugin` defined above and have its settings automatically added. If the user instead defines - .natures( Web && Javascript && !MyStuff) + .natures( Web && Javascript && !MyPlugin) -then the `MyPlugin` settings (and anything that activates only when `MyStuff` is activated) will not be added. +then the `MyPlugin` settings (and anything that activates only when `MyPlugin` is activated) will not be added. */ abstract class AutoPlugin extends Natures.Basic { @@ -75,6 +70,8 @@ abstract class AutoPlugin extends Natures.Basic def globalSettings: Seq[Setting[_]] = Nil // TODO?: def commands: Seq[Command] + + def unary_! : Exclude = Exclude(this) } /** An error that occurs when auto-plugins aren't configured properly. @@ -99,7 +96,6 @@ sealed trait Natures { * `label` is the unique ID for this nature. */ final case class Nature(label: String) extends Basic { /** Constructs a Natures matcher that excludes this Nature. */ - def unary_! : Basic = Exclude(this) override def toString = label } @@ -117,7 +113,7 @@ object Natures if(byAtom.size != byAtomMap.size) duplicateProvidesError(byAtom) val clauses = Clauses( defined.map(d => asClause(d)) ) requestedNatures => - Logic.reduce(clauses, flatten(requestedNatures).toSet) match { + Logic.reduce(clauses, flattenConvert(requestedNatures).toSet) match { case Left(problem) => throw AutoPluginException(problem) case Right(results) => // results includes the originally requested (positive) atoms, @@ -154,8 +150,7 @@ object Natures sealed abstract class Basic extends Natures { def &&(o: Basic): Natures = And(this :: o :: Nil) } - private[sbt] final case class Exclude(n: Basic) extends Basic { - def unary_! : Basic = n + private[sbt] final case class Exclude(n: AutoPlugin) extends Basic { override def toString = s"!$n" } private[sbt] final case class And(natures: List[Basic]) extends Natures { @@ -167,16 +162,28 @@ object Natures case And(ns) => (a /: ns)(_ && _) case b: Basic => a && b } + private[sbt] def remove(a: Natures, del: Set[Basic]): Natures = a match { + case b: Basic => if(del(b)) Empty else b + case Empty => Empty + case And(ns) => + val removed = ns.filterNot(del) + if(removed.isEmpty) Empty else And(removed) + } /** Defines a clause for `ap` such that the [[Nature]] provided by `ap` is the head and the selector for `ap` is the body. */ private[sbt] def asClause(ap: AutoPlugin): Clause = Clause( convert(ap.select), Set(Atom(ap.label)) ) - private[this] def flatten(n: Natures): Seq[Literal] = n match { + private[this] def flattenConvert(n: Natures): Seq[Literal] = n match { case And(ns) => convertAll(ns) case b: Basic => convertBasic(b) :: Nil case Empty => Nil } + private[sbt] def flatten(n: Natures): Seq[Basic] = n match { + case And(ns) => ns + case b: Basic => b :: Nil + case Empty => Nil + } private[this] def convert(n: Natures): Formula = n match { case And(ns) => convertAll(ns).reduce[Formula](_ && _) @@ -189,4 +196,12 @@ object Natures case a: AutoPlugin => Atom(a.label) } private[this] def convertAll(ns: Seq[Basic]): Seq[Literal] = ns map convertBasic + + /** True if the select clause `n` is satisifed by `model`. */ + def satisfied(n: Natures, model: Set[AutoPlugin], natures: Set[Nature]): Boolean = + flatten(n) forall { + case Exclude(a) => !model(a) + case n: Nature => natures(n) + case ap: AutoPlugin => model(ap) + } } \ No newline at end of file diff --git a/main/src/main/scala/sbt/NaturesDebug.scala b/main/src/main/scala/sbt/NaturesDebug.scala new file mode 100644 index 000000000..8b3bc3595 --- /dev/null +++ b/main/src/main/scala/sbt/NaturesDebug.scala @@ -0,0 +1,323 @@ +package sbt + + import Def.Setting + import Natures._ + import NaturesDebug._ + +private[sbt] class NaturesDebug(val available: List[AutoPlugin], val nameToKey: Map[String, AttributeKey[_]], val provided: Relation[AutoPlugin, AttributeKey[_]]) +{ + /** The set of [[AutoPlugin]]s that might define a key named `keyName`. + * Because plugins can define keys in different scopes, this should only be used as a guideline. */ + def providers(keyName: String): Set[AutoPlugin] = nameToKey.get(keyName) match { + case None => Set.empty + case Some(key) => provided.reverse(key) + } + /** Describes alternative approaches for defining key [[keyName]] in [[context]].*/ + def toEnable(keyName: String, context: Context): List[PluginEnable] = + providers(keyName).toList.map(plugin => pluginEnable(context, plugin)) + + /** Provides text to suggest how [[notFoundKey]] can be defined in [[context]]. */ + def debug(notFoundKey: String, context: Context): String = + { + val (activated, deactivated) = Util.separate(toEnable(notFoundKey, context)) { + case pa: PluginActivated => Left(pa) + case pd: EnableDeactivated => Right(pd) + } + val activePrefix = if(activated.nonEmpty) s"Some already activated plugins define $notFoundKey: ${activated.mkString(", ")}\n" else "" + activePrefix + debugDeactivated(notFoundKey, deactivated) + } + private[this] def debugDeactivated(notFoundKey: String, deactivated: Seq[EnableDeactivated]): String = + { + val (impossible, possible) = Util.separate(deactivated) { + case pi: PluginImpossible => Left(pi) + case pr: PluginRequirements => Right(pr) + } + if(possible.nonEmpty) { + val explained = possible.map(explainPluginEnable) + val possibleString = + if(explained.size > 1) explained.zipWithIndex.map{case (s,i) => s"$i. $s"}.mkString("Multiple plugins are available that can provide $notFoundKey:\n", "\n", "") + else s"$notFoundKey is provided by an available (but not activated) plugin:\n${explained.mkString}" + def impossiblePlugins = impossible.map(_.plugin.label).mkString(", ") + val imPostfix = if(impossible.isEmpty) "" else s"\n\nThere are other available plugins that provide $notFoundKey, but they are impossible to add: $impossiblePlugins" + possibleString + imPostfix + } + else if(impossible.isEmpty) + s"No available plugin provides key $notFoundKey." + else { + val explanations = impossible.map(explainPluginEnable) + explanations.mkString(s"Plugins are available that could provide $notFoundKey, but they are impossible to add:\n\t", "\n\t", "") + } + } + + /** Text that suggests how to activate [[plugin]] in [[context]] if possible and if it is not already activated.*/ + def help(plugin: AutoPlugin, context: Context): String = + if(context.enabled.contains(plugin)) + activatedHelp(plugin) + else + deactivatedHelp(plugin, context) + private[this] def activatedHelp(plugin: AutoPlugin): String = + { + val prefix = s"${plugin.label} is activated." + val keys = provided.forward(plugin) + val keysString = if(keys.isEmpty) "" else s"\nIt may affect these keys: ${multi(keys.toList.map(_.label))}" + val configs = plugin.projectConfigurations + val confsString = if(configs.isEmpty) "" else s"\nIt defines these configurations: ${multi(configs.map(_.name))}" + prefix + keysString + confsString + } + private[this] def deactivatedHelp(plugin: AutoPlugin, context: Context): String = + { + val prefix = s"${plugin.label} is not activated." + val keys = provided.forward(plugin) + val keysString = if(keys.isEmpty) "" else s"\nActivating it may affect these keys: ${multi(keys.toList.map(_.label))}" + val configs = plugin.projectConfigurations + val confsString = if(configs.isEmpty) "" else s"\nActivating it will define these configurations: ${multi(configs.map(_.name))}" + val toActivate = explainPluginEnable(pluginEnable(context, plugin)) + s"$prefix$keysString$confsString\n$toActivate" + } + + private[this] def multi(strs: Seq[String]): String = strs.mkString(if(strs.size > 4) "\n\t" else ", ") +} + +private[sbt] object NaturesDebug +{ + /** Precomputes information for debugging natures and plugins. */ + def apply(available: List[AutoPlugin]): NaturesDebug = + { + val keyR = definedKeys(available) + val nameToKey: Map[String, AttributeKey[_]] = keyR._2s.toList.map(key => (key.label, key)).toMap + new NaturesDebug(available, nameToKey, keyR) + } + + /** The context for debugging a plugin (de)activation. + * @param initial The initially defined [[Nature]]s. + * @param enabled The resulting model. + * @param compile The function used to compute the model. + * @param available All [[AutoPlugin]]s available for consideration. */ + final case class Context(initial: Natures, enabled: Seq[AutoPlugin], compile: Natures => Seq[AutoPlugin], available: List[AutoPlugin]) + + /** Describes the steps to activate a plugin in some context. */ + sealed abstract class PluginEnable + /** Describes a [[plugin]] that is already activated in the [[context]].*/ + final case class PluginActivated(plugin: AutoPlugin, context: Context) extends PluginEnable + sealed abstract class EnableDeactivated extends PluginEnable + /** Describes a [[plugin]] that cannot be activated in a [[context]] due to [[contradictions]] in requirements. */ + final case class PluginImpossible(plugin: AutoPlugin, context: Context, contradictions: Set[AutoPlugin]) extends EnableDeactivated + + /** Describes the requirements for activating [[plugin]] in [[context]]. + * @param context The base natures, exclusions, and ultimately activated plugins + * @param blockingExcludes Existing exclusions that prevent [[plugin]] from being activated and must be dropped + * @param enablingNatures [[Nature]]s that are not currently enabled, but need to be enabled for [[plugin]] to activate + * @param extraEnabledPlugins Plugins that will be enabled as a result of [[plugin]] activating, but are not required for [[plugin]] to activate + * @param willRemove Plugins that will be deactivated as a result of [[plugin]] activating + * @param deactivate Describes plugins that must be deactivated for [[plugin]] to activate. These require an explicit exclusion or dropping a transitive [[Nature]].*/ + final case class PluginRequirements(plugin: AutoPlugin, context: Context, blockingExcludes: Set[AutoPlugin], enablingNatures: Set[Nature], extraEnabledPlugins: Set[AutoPlugin], willRemove: Set[AutoPlugin], deactivate: List[DeactivatePlugin]) extends EnableDeactivated + + /** Describes a [[plugin]] that must be removed in order to activate another plugin in some context. + * The [[plugin]] can always be directly, explicitly excluded. + * @param removeOneOf If non-empty, removing one of these [[Nature]]s will deactivate [[plugin]] without affecting the other plugin. If empty, a direct exclusion is required. + * @param newlySelected If false, this plugin was selected in the original context. */ + final case class DeactivatePlugin(plugin: AutoPlugin, removeOneOf: Set[Nature], newlySelected: Boolean) + + /** Determines how to enable [[plugin]] in [[context]]. */ + def pluginEnable(context: Context, plugin: AutoPlugin): PluginEnable = + if(context.enabled.contains(plugin)) + PluginActivated(plugin, context) + else + enableDeactivated(context, plugin) + + private[this] def enableDeactivated(context: Context, plugin: AutoPlugin): PluginEnable = + { + // deconstruct the context + val initialModel = context.enabled.toSet + val initial = flatten(context.initial) + val initialNatures = natures(initial) + val initialExcludes = excludes(initial) + + val minModel = minimalModel(plugin) + + /* example 1 + A :- B, not C + C :- D, E + initial: B, D, E + propose: drop D or E + + initial: B, not A + propose: drop 'not A' + + example 2 + A :- B, not C + C :- B + initial: + propose: B, exclude C + */ + + // `plugin` will only be activated when all of these natures are activated + // Deactivating any one of these would deactivate `plugin`. + val minRequiredNatures = natures(minModel) + + // `plugin` will only be activated when all of these plugins are activated + // Deactivating any one of these would deactivate `plugin`. + val minRequiredPlugins = minModel.collect{ case a: AutoPlugin => a }.toSet + + // The presence of any one of these plugins would deactivate `plugin` + val minAbsentPlugins = excludes(minModel).toSet + + // Plugins that must be both activated and deactivated for `plugin` to activate. + // A non-empty list here cannot be satisfied and is an error. + val contradictions = minAbsentPlugins & minRequiredPlugins + + if(contradictions.nonEmpty) + PluginImpossible(plugin, context, contradictions) + else + { + // Natures that the user has to add to the currently selected natures in order to enable `plugin`. + val addToExistingNatures = minRequiredNatures -- initialNatures + + // Plugins that are currently excluded that need to be allowed. + val blockingExcludes = initialExcludes & minRequiredPlugins + + // The model that results when the minimal natures are enabled and the minimal plugins are excluded. + // This can include more plugins than just `minRequiredPlugins` because the natures required for `plugin` + // might activate other plugins as well. + val modelForMin = context.compile(and(includeAll(minRequiredNatures), excludeAll(minAbsentPlugins))) + + val incrementalInputs = and( includeAll(minRequiredNatures ++ initialNatures), excludeAll(minAbsentPlugins ++ initialExcludes -- minRequiredPlugins)) + val incrementalModel = context.compile(incrementalInputs).toSet + + // Plugins that are newly enabled as a result of selecting the natures needed for `plugin`, but aren't strictly required for `plugin`. + // These could be excluded and `plugin` and the user's current plugins would still be activated. + val extraPlugins = incrementalModel.toSet -- minRequiredPlugins -- initialModel + + // Plugins that will no longer be enabled as a result of enabling `plugin`. + val willRemove = initialModel -- incrementalModel + + // Determine the plugins that must be independently deactivated. + // If both A and B must be deactivated, but A transitively depends on B, deactivating B will deactivate A. + // If A must be deactivated, but one if its (transitively) required natures isn't present, it won't be activated. + // So, in either of these cases, A doesn't need to be considered further and won't be included in this set. + val minDeactivate = minAbsentPlugins.filter(p => Natures.satisfied(p.select, incrementalModel, natures(flatten(incrementalInputs)))) + + val deactivate = for(d <- minDeactivate.toList) yield { + // removing any one of these natures will deactivate `d`. TODO: This is not an especially efficient implementation. + val removeToDeactivate = natures(minimalModel(d)) -- minRequiredNatures + val newlySelected = !initialModel(d) + // a. suggest removing a nature in removeOneToDeactivate to deactivate d + // b. suggest excluding `d` to directly deactivate it in any case + // c. note whether d was already activated (in context.enabled) or is newly selected + DeactivatePlugin(d, removeToDeactivate, newlySelected) + } + + PluginRequirements(plugin, context, blockingExcludes, addToExistingNatures, extraPlugins, willRemove, deactivate) + } + } + + private[this] def includeAll[T <: Basic](basic: Set[T]): Natures = And(basic.toList) + private[this] def excludeAll(plugins: Set[AutoPlugin]): Natures = And(plugins map (p => Exclude(p)) toList) + + private[this] def excludes(bs: Seq[Basic]): Set[AutoPlugin] = bs.collect { case Exclude(b) => b }.toSet + private[this] def natures(bs: Seq[Basic]): Set[Nature] = bs.collect { case n: Nature => n }.toSet + + // If there is a model that includes `plugin`, it includes at least what is returned by this method. + // This is the list of natures and plugins that must be included as well as list of plugins that must not be present. + // It might not be valid, such as if there are contradictions or if there are cycles that are unsatisfiable. + // The actual model might be larger, since other plugins might be enabled by the selected natures. + private[this] def minimalModel(plugin: AutoPlugin): Seq[Basic] = Dag.topologicalSortUnchecked(plugin: Basic) { + case _: Exclude | _: Nature => Nil + case ap: AutoPlugin => Natures.flatten(ap.select) + } + + /** String representation of [[PluginEnable]], intended for end users. */ + def explainPluginEnable(ps: PluginEnable): String = + ps match { + case PluginRequirements(plugin, context, blockingExcludes, enablingNatures, extraEnabledPlugins, toBeRemoved, deactivate) => + val parts = + excludedError(false /* TODO */, blockingExcludes.toList) :: + required(enablingNatures.toList) :: + willAdd(plugin, extraEnabledPlugins.toList) :: + willRemove(plugin, toBeRemoved.toList) :: + needToDeactivate(deactivate) :: + Nil + parts.mkString("\n") + case PluginImpossible(plugin, context, contradictions) => pluginImpossible(plugin, contradictions) + case PluginActivated(plugin, context) => s"Plugin ${plugin.label} already activated." + } + + /** Provides a [[Relation]] between plugins and the keys they potentially define. + * Because plugins can define keys in different scopes and keys can be overridden, this is not definitive.*/ + def definedKeys(available: List[AutoPlugin]): Relation[AutoPlugin, AttributeKey[_]] = + { + def extractDefinedKeys(ss: Seq[Setting[_]]): Seq[AttributeKey[_]] = + ss.map(_.key.key) + def allSettings(p: AutoPlugin): Seq[Setting[_]] = p.projectSettings ++ p.buildSettings ++ p.globalSettings + val empty = Relation.empty[AutoPlugin, AttributeKey[_]] + (empty /: available)( (r,p) => r + (p, extractDefinedKeys(allSettings(p))) ) + } + + private[this] def excludedError(transitive: Boolean, dependencies: List[AutoPlugin]): String = + str(dependencies)(excludedPluginError(transitive), excludedPluginsError(transitive)) + + private[this] def excludedPluginError(transitive: Boolean)(dependency: AutoPlugin) = + s"Required ${transitiveString(transitive)}dependency ${dependency.label} was excluded." + private[this] def excludedPluginsError(transitive: Boolean)(dependencies: List[AutoPlugin]) = + s"Required ${transitiveString(transitive)}dependencies were excluded:\n\t${labels(dependencies).mkString("\n\t")}" + private[this] def transitiveString(transitive: Boolean) = + if(transitive) "(transitive) " else "" + + private[this] def required(natures: List[Nature]): String = + str(natures)(requiredNature, requiredNatures) + + private[this] def requiredNature(nature: Nature) = + s"Required nature ${nature.label} not present." + private[this] def requiredNatures(natures: List[Nature]) = + s"Required natures not present:\n\t${natures.map(_.label).mkString("\n\t")}" + + private[this] def str[A](list: List[A])(f: A => String, fs: List[A] => String): String = list match { + case Nil => "" + case single :: Nil => f(single) + case _ => fs(list) + } + + private[this] def willAdd(base: AutoPlugin, plugins: List[AutoPlugin]): String = + str(plugins)(willAddPlugin(base), willAddPlugins(base)) + + private[this] def willAddPlugin(base: AutoPlugin)(plugin: AutoPlugin) = + s"Enabling ${base.label} will also enable ${plugin.label}" + private[this] def willAddPlugins(base: AutoPlugin)(plugins: List[AutoPlugin]) = + s"Enabling ${base.label} will also enable:\n\t${labels(plugins).mkString("\n\t")}" + + private[this] def willRemove(base: AutoPlugin, plugins: List[AutoPlugin]): String = + str(plugins)(willRemovePlugin(base), willRemovePlugins(base)) + + private[this] def willRemovePlugin(base: AutoPlugin)(plugin: AutoPlugin) = + s"Enabling ${base.label} will disable ${plugin.label}" + private[this] def willRemovePlugins(base: AutoPlugin)(plugins: List[AutoPlugin]) = + s"Enabling ${base.label} will disable:\n\t${labels(plugins).mkString("\n\t")}" + + private[this] def labels(plugins: List[AutoPlugin]): List[String] = + plugins.map(_.label) + + private[this] def needToDeactivate(deactivate: List[DeactivatePlugin]): String = + str(deactivate)(deactivate1, deactivateN) + private[this] def deactivateN(plugins: List[DeactivatePlugin]): String = + plugins.map(deactivate1).mkString("These plugins need to be deactivated:\n\t", "\n\t", "") + private[this] def deactivate1(deactivate: DeactivatePlugin): String = + s"Deactivate ${deactivateString(deactivate)}" + private[this] def deactivateString(d: DeactivatePlugin): String = + { + val removeNaturesString: String = + d.removeOneOf.toList match { + case Nil => "" + case x :: Nil => s"or no longer include $x" + case xs => s"or remove one of ${xs.mkString(", ")}" + } + s"${d.plugin.label}: directly exclude it${removeNaturesString}" + } + + private[this] def pluginImpossible(plugin: AutoPlugin, contradictions: Set[AutoPlugin]): String = + str(contradictions.toList)(pluginImpossible1(plugin), pluginImpossibleN(plugin)) + + private[this] def pluginImpossible1(plugin: AutoPlugin)(contradiction: AutoPlugin): String = + s"There is no way to enable plugin ${plugin.label}. It (or its dependencies) requires plugin ${contradiction.label} to both be present and absent. Please report the problem to the plugin's author." + private[this] def pluginImpossibleN(plugin: AutoPlugin)(contradictions: List[AutoPlugin]): String = + s"There is no way to enable plugin ${plugin.label}. It (or its dependencies) requires these plugins to be both present and absent:\n\t${labels(contradictions).mkString("\n\t")}\nPlease report the problem to the plugin's author." +} \ No newline at end of file From 2bf127aaf60ac7b18baf7151192138104d7fa1b5 Mon Sep 17 00:00:00 2001 From: Mark Harrah Date: Thu, 20 Feb 2014 13:28:02 -0500 Subject: [PATCH 069/148] Make BuildDependencies construction a proper function outside of the settings sytem. --- main/src/main/scala/sbt/BuildUtil.scala | 14 ++++++++++++++ main/src/main/scala/sbt/Defaults.scala | 15 ++------------- 2 files changed, 16 insertions(+), 13 deletions(-) diff --git a/main/src/main/scala/sbt/BuildUtil.scala b/main/src/main/scala/sbt/BuildUtil.scala index c48e721f7..dd963e05d 100644 --- a/main/src/main/scala/sbt/BuildUtil.scala +++ b/main/src/main/scala/sbt/BuildUtil.scala @@ -48,6 +48,20 @@ object BuildUtil new BuildUtil(keyIndex, data, root, Load getRootProject units, getp, configs, aggregates) } + def dependencies(units: Map[URI, LoadedBuildUnit]): BuildDependencies = + { + import collection.mutable.HashMap + val agg = new HashMap[ProjectRef, Seq[ProjectRef]] + val cp = new HashMap[ProjectRef, Seq[ClasspathDep[ProjectRef]]] + for(lbu <- units.values; rp <- lbu.defined.values) + { + val ref = ProjectRef(lbu.unit.uri, rp.id) + cp(ref) = rp.dependencies + agg(ref) = rp.aggregate + } + BuildDependencies(cp.toMap, agg.toMap) + } + def checkCycles(units: Map[URI, LoadedBuildUnit]) { def getRef(pref: ProjectRef) = units(pref.build).defined(pref.project) diff --git a/main/src/main/scala/sbt/Defaults.scala b/main/src/main/scala/sbt/Defaults.scala index 5ca5a6356..cd651e21d 100755 --- a/main/src/main/scala/sbt/Defaults.scala +++ b/main/src/main/scala/sbt/Defaults.scala @@ -1254,19 +1254,8 @@ object Classpaths if(useJars) Seq(pkgTask).join else psTask } - def constructBuildDependencies: Initialize[BuildDependencies] = - loadedBuild { lb => - import collection.mutable.HashMap - val agg = new HashMap[ProjectRef, Seq[ProjectRef]] - val cp = new HashMap[ProjectRef, Seq[ClasspathDep[ProjectRef]]] - for(lbu <- lb.units.values; rp <- lbu.defined.values) - { - val ref = ProjectRef(lbu.unit.uri, rp.id) - cp(ref) = rp.dependencies - agg(ref) = rp.aggregate - } - BuildDependencies(cp.toMap, agg.toMap) - } + def constructBuildDependencies: Initialize[BuildDependencies] = loadedBuild(lb => BuildUtil.dependencies(lb.units)) + def internalDependencies: Initialize[Task[Classpath]] = (thisProjectRef, classpathConfiguration, configuration, settingsData, buildDependencies) flatMap internalDependencies0 def unmanagedDependencies: Initialize[Task[Classpath]] = From 90134b3af094db9a1d1133664df095fd07c8e847 Mon Sep 17 00:00:00 2001 From: Mark Harrah Date: Thu, 20 Feb 2014 13:51:17 -0500 Subject: [PATCH 070/148] 'plugins' and 'plugin ' commands to inspect plugins * 'plugins' displays the list of plugins available for each build along with the project IDs each is enabled on * 'plugin ' displays information about a specific plugin in the context of the current project - if the plugin is activated on the current project and if so, information about the keys/configurations it provides - how the plugin could be activated if possible * tries to detect when it is run on an aggregating project and adjusts accordingly - indicates if an aggregated project has the plugin activated - indicates to change to the specific project to get the right context This is a rough implementation and needs lots of polishing and deduplicating. The help for the commands needs to be added/expanded. --- main/src/main/scala/sbt/BuildStructure.scala | 1 + main/src/main/scala/sbt/CommandStrings.scala | 5 ++ main/src/main/scala/sbt/Main.scala | 16 +++- main/src/main/scala/sbt/NaturesDebug.scala | 89 +++++++++++++++++--- 4 files changed, 97 insertions(+), 14 deletions(-) diff --git a/main/src/main/scala/sbt/BuildStructure.scala b/main/src/main/scala/sbt/BuildStructure.scala index 0ae96b923..1fddbf2a0 100644 --- a/main/src/main/scala/sbt/BuildStructure.scala +++ b/main/src/main/scala/sbt/BuildStructure.scala @@ -142,6 +142,7 @@ final class LoadedBuild(val root: URI, val units: Map[URI, LoadedBuildUnit]) BuildUtil.checkCycles(units) def allProjectRefs: Seq[(ProjectRef, ResolvedProject)] = for( (uri, unit) <- units.toSeq; (id, proj) <- unit.defined ) yield ProjectRef(uri, id) -> proj def extra(data: Settings[Scope])(keyIndex: KeyIndex): BuildUtil[ResolvedProject] = BuildUtil(root, units, keyIndex, data) + private[sbt] def autos = GroupedAutoPlugins(units) } final class PartBuild(val root: URI, val units: Map[URI, PartBuildUnit]) diff --git a/main/src/main/scala/sbt/CommandStrings.scala b/main/src/main/scala/sbt/CommandStrings.scala index 9baf0e7d1..14cc6fee9 100644 --- a/main/src/main/scala/sbt/CommandStrings.scala +++ b/main/src/main/scala/sbt/CommandStrings.scala @@ -49,6 +49,11 @@ $ShowCommand Evaluates the specified task and display the value returned by the task.""" + val PluginsCommand = "plugins" + val PluginCommand = "plugin" + def pluginsBrief = "Lists currently available plugins." + def pluginsDetailed = pluginsBrief // TODO: expand + val LastCommand = "last" val LastGrepCommand = "last-grep" val ExportCommand = "export" diff --git a/main/src/main/scala/sbt/Main.scala b/main/src/main/scala/sbt/Main.scala index c582426ae..ad5291ec2 100644 --- a/main/src/main/scala/sbt/Main.scala +++ b/main/src/main/scala/sbt/Main.scala @@ -89,7 +89,7 @@ object BuiltinCommands def ScriptCommands: Seq[Command] = Seq(ignore, exit, Script.command, setLogLevel, early, act, nop) def DefaultCommands: Seq[Command] = Seq(ignore, help, completionsCommand, about, tasks, settingsCommand, loadProject, projects, project, reboot, read, history, set, sessionCommand, inspect, loadProjectImpl, loadFailed, Cross.crossBuild, Cross.switchVersion, - setOnFailure, clearOnFailure, stashOnFailure, popOnFailure, setLogLevel, + setOnFailure, clearOnFailure, stashOnFailure, popOnFailure, setLogLevel, plugin, plugins, ifLast, multi, shell, continuous, eval, alias, append, last, lastGrep, export, boot, nop, call, exit, early, initialize, act) ++ compatCommands def DefaultBootCommands: Seq[String] = LoadProject :: (IfLast + " " + Shell) :: Nil @@ -375,6 +375,20 @@ object BuiltinCommands Help.detailOnly(taskDetail(allTaskAndSettingKeys(s))) else Help.empty + def plugins = Command.command(PluginsCommand, pluginsBrief, pluginsDetailed) { s => + val helpString = NaturesDebug.helpAll(s) + System.out.println(helpString) + s + } + val pluginParser: State => Parser[AutoPlugin] = s => { + val autoPlugins: Map[String, AutoPlugin] = NaturesDebug.autoPluginMap(s) + token(Space) ~> Act.knownIDParser(autoPlugins, "plugin") + } + def plugin = Command(PluginCommand)(pluginParser) { (s, plugin) => + val helpString = NaturesDebug.help(plugin, s) + System.out.println(helpString) + s + } def projects = Command(ProjectsCommand, (ProjectsCommand, projectsBrief), projectsDetailed )(s => projectsParser(s).?) { case (s, Some(modifyBuilds)) => transformExtraBuilds(s, modifyBuilds) diff --git a/main/src/main/scala/sbt/NaturesDebug.scala b/main/src/main/scala/sbt/NaturesDebug.scala index 8b3bc3595..d0e27a9dd 100644 --- a/main/src/main/scala/sbt/NaturesDebug.scala +++ b/main/src/main/scala/sbt/NaturesDebug.scala @@ -3,6 +3,7 @@ package sbt import Def.Setting import Natures._ import NaturesDebug._ + import java.net.URI private[sbt] class NaturesDebug(val available: List[AutoPlugin], val nameToKey: Map[String, AttributeKey[_]], val provided: Relation[AutoPlugin, AttributeKey[_]]) { @@ -55,7 +56,7 @@ private[sbt] class NaturesDebug(val available: List[AutoPlugin], val nameToKey: activatedHelp(plugin) else deactivatedHelp(plugin, context) - private[this] def activatedHelp(plugin: AutoPlugin): String = + private def activatedHelp(plugin: AutoPlugin): String = { val prefix = s"${plugin.label} is activated." val keys = provided.forward(plugin) @@ -64,9 +65,9 @@ private[sbt] class NaturesDebug(val available: List[AutoPlugin], val nameToKey: val confsString = if(configs.isEmpty) "" else s"\nIt defines these configurations: ${multi(configs.map(_.name))}" prefix + keysString + confsString } - private[this] def deactivatedHelp(plugin: AutoPlugin, context: Context): String = + private def deactivatedHelp(plugin: AutoPlugin, context: Context): String = { - val prefix = s"${plugin.label} is not activated." + val prefix = s"${plugin.label} is NOT activated." val keys = provided.forward(plugin) val keysString = if(keys.isEmpty) "" else s"\nActivating it may affect these keys: ${multi(keys.toList.map(_.label))}" val configs = plugin.projectConfigurations @@ -80,6 +81,66 @@ private[sbt] class NaturesDebug(val available: List[AutoPlugin], val nameToKey: private[sbt] object NaturesDebug { + def helpAll(s: State): String = + if(Project.isProjectLoaded(s)) + { + val extracted = Project.extract(s) + import extracted._ + def helpBuild(uri: URI, build: LoadedBuildUnit): String = + { + val pluginStrings = for(plugin <- availableAutoPlugins(build)) yield { + val activatedIn = build.defined.values.toList.filter(_.autoPlugins.contains(plugin)).map(_.id) + val actString = if(activatedIn.nonEmpty) activatedIn.mkString(": enabled in ", ", ", "") else "" // TODO: deal with large builds + s"\n\t${plugin.label}$actString" + } + s"In $uri${pluginStrings.mkString}" + } + val buildStrings = for((uri, build) <- structure.units) yield helpBuild(uri, build) + buildStrings.mkString("\n") + } + else + "No project is currently loaded." + + def autoPluginMap(s: State): Map[String, AutoPlugin] = + { + val extracted = Project.extract(s) + import extracted._ + structure.units.values.toList.flatMap(availableAutoPlugins).map(plugin => (plugin.label, plugin)).toMap + } + private[this] def availableAutoPlugins(build: LoadedBuildUnit): Seq[AutoPlugin] = + build.unit.plugins.detected.autoPlugins.values + + def help(plugin: AutoPlugin, s: State): String = + { + val extracted = Project.extract(s) + import extracted._ + def definesPlugin(p: ResolvedProject): Boolean = p.autoPlugins.contains(plugin) + def projectForRef(ref: ProjectRef): ResolvedProject = get(Keys.thisProject in ref) + val perBuild: Map[URI, Set[AutoPlugin]] = structure.units.mapValues(unit => availableAutoPlugins(unit).toSet) + val pluginsThisBuild = perBuild.getOrElse(currentRef.build, Set.empty).toList + lazy val context = Context(currentProject.natures, currentProject.autoPlugins, Natures.compile(pluginsThisBuild), pluginsThisBuild) + lazy val debug = NaturesDebug(context.available) + if(!pluginsThisBuild.contains(plugin)) { + val availableInBuilds: List[URI] = perBuild.toList.filter(_._2(plugin)).map(_._1) + s"Plugin ${plugin.label} is only available in builds:\n\t${availableInBuilds.mkString("\n\t")}\nSwitch to a project in one of those builds using `project` and rerun this command for more information." + } else if(definesPlugin(currentProject)) + debug.activatedHelp(plugin) + else { + val thisAggregated = BuildUtil.dependencies(structure.units).aggregateTransitive.getOrElse(currentRef, Nil) + val definedInAggregated = thisAggregated.filter(ref => definesPlugin(projectForRef(ref))) + if(definedInAggregated.nonEmpty) { + val projectNames = definedInAggregated.map(_.project) // TODO: usually in this build, but could technically require the build to be qualified + s"Plugin ${plugin.label} is not activated on this project, but this project aggregates projects where it is activated:\n\t${projectNames.mkString("\n\t")}" + } else { + val base = debug.deactivatedHelp(plugin, context) + val aggNote = if(thisAggregated.nonEmpty) "Note: This project aggregates other projects and this" else "Note: This" + val common = " information is for this project only." + val helpOther = "To see how to activate this plugin for another project, change to the project using `project ` and rerun this command." + s"$base\n$aggNote$common\n$helpOther" + } + } + } + /** Precomputes information for debugging natures and plugins. */ def apply(available: List[AutoPlugin]): NaturesDebug = { @@ -230,14 +291,16 @@ private[sbt] object NaturesDebug def explainPluginEnable(ps: PluginEnable): String = ps match { case PluginRequirements(plugin, context, blockingExcludes, enablingNatures, extraEnabledPlugins, toBeRemoved, deactivate) => + def indent(str: String) = if(str.isEmpty) "" else s"\t$str" + def note(str: String) = if(str.isEmpty) "" else s"Note: $str" val parts = - excludedError(false /* TODO */, blockingExcludes.toList) :: - required(enablingNatures.toList) :: - willAdd(plugin, extraEnabledPlugins.toList) :: - willRemove(plugin, toBeRemoved.toList) :: - needToDeactivate(deactivate) :: + indent(excludedError(false /* TODO */, blockingExcludes.toList)) :: + indent(required(enablingNatures.toList)) :: + indent(needToDeactivate(deactivate)) :: + note(willAdd(plugin, extraEnabledPlugins.toList)) :: + note(willRemove(plugin, toBeRemoved.toList)) :: Nil - parts.mkString("\n") + parts.filterNot(_.isEmpty).mkString("\n") case PluginImpossible(plugin, context, contradictions) => pluginImpossible(plugin, contradictions) case PluginActivated(plugin, context) => s"Plugin ${plugin.label} already activated." } @@ -299,16 +362,16 @@ private[sbt] object NaturesDebug private[this] def needToDeactivate(deactivate: List[DeactivatePlugin]): String = str(deactivate)(deactivate1, deactivateN) private[this] def deactivateN(plugins: List[DeactivatePlugin]): String = - plugins.map(deactivate1).mkString("These plugins need to be deactivated:\n\t", "\n\t", "") + plugins.map(deactivateString).mkString("These plugins need to be deactivated:\n\t", "\n\t", "") private[this] def deactivate1(deactivate: DeactivatePlugin): String = - s"Deactivate ${deactivateString(deactivate)}" + s"Need to deactivate ${deactivateString(deactivate)}" private[this] def deactivateString(d: DeactivatePlugin): String = { val removeNaturesString: String = d.removeOneOf.toList match { case Nil => "" - case x :: Nil => s"or no longer include $x" - case xs => s"or remove one of ${xs.mkString(", ")}" + case x :: Nil => s" or no longer include $x" + case xs => s" or remove one of ${xs.mkString(", ")}" } s"${d.plugin.label}: directly exclude it${removeNaturesString}" } From 2ee2576cb91821b197675738910ef4a934638b61 Mon Sep 17 00:00:00 2001 From: Josh Suereth Date: Thu, 20 Feb 2014 20:04:13 -0500 Subject: [PATCH 071/148] Fix auto-plugin test from removal of unary_! on Nature but not AutoPlugin. --- sbt/src/sbt-test/project/auto-plugins/project/Q.scala | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/sbt/src/sbt-test/project/auto-plugins/project/Q.scala b/sbt/src/sbt-test/project/auto-plugins/project/Q.scala index db51922cf..e092e0fd1 100644 --- a/sbt/src/sbt-test/project/auto-plugins/project/Q.scala +++ b/sbt/src/sbt-test/project/auto-plugins/project/Q.scala @@ -6,7 +6,7 @@ object AI extends AutoImport { lazy val A = Nature("A") lazy val B = Nature("B") - lazy val D = Nature("D") + lazy val E = Nature("E") lazy val q = config("q") lazy val p = config("p").extend(q) @@ -19,6 +19,10 @@ object AI extends AutoImport import AI._ +object D extends AutoPlugin { + def select: Natures = E +} + object Q extends AutoPlugin { def select: Natures = A && B @@ -48,6 +52,7 @@ object Q extends AutoPlugin object R extends AutoPlugin { + // NOTE - Only plugins themselves support exclusions... def select = Q && !D override def projectSettings = Seq( From 0e337b8cbdefcece68604db597117b2c2e0419ed Mon Sep 17 00:00:00 2001 From: Josh Suereth Date: Fri, 21 Feb 2014 09:26:32 -0500 Subject: [PATCH 072/148] Fix binary-plugin scripted test so it can run more than once. --- .../sbt-test/project/binary-plugin/changes/use/plugins.sbt | 2 +- sbt/src/sbt-test/project/binary-plugin/common.sbt | 6 +++--- sbt/src/sbt-test/project/binary-plugin/test | 4 +++- 3 files changed, 7 insertions(+), 5 deletions(-) diff --git a/sbt/src/sbt-test/project/binary-plugin/changes/use/plugins.sbt b/sbt/src/sbt-test/project/binary-plugin/changes/use/plugins.sbt index b20bc97c3..795dff137 100644 --- a/sbt/src/sbt-test/project/binary-plugin/changes/use/plugins.sbt +++ b/sbt/src/sbt-test/project/binary-plugin/changes/use/plugins.sbt @@ -1 +1 @@ -addSbtPlugin("org.example" % "demo-plugin" % "3.4") +addSbtPlugin("org.example" % "demo-plugin" % "3.4-SNAPSHOT") diff --git a/sbt/src/sbt-test/project/binary-plugin/common.sbt b/sbt/src/sbt-test/project/binary-plugin/common.sbt index 0cf61c76c..4b30c03d6 100644 --- a/sbt/src/sbt-test/project/binary-plugin/common.sbt +++ b/sbt/src/sbt-test/project/binary-plugin/common.sbt @@ -1,7 +1,7 @@ organization in ThisBuild := "org.example" -version in ThisBuild := "3.4" +// We have to use snapshot because this is publishing to our local ivy cache instead of +// an integration cache, so we're in danger land. +version in ThisBuild := "3.4-SNAPSHOT" -lazy val define = project -lazy val use = project diff --git a/sbt/src/sbt-test/project/binary-plugin/test b/sbt/src/sbt-test/project/binary-plugin/test index ceb4e6a76..169511975 100644 --- a/sbt/src/sbt-test/project/binary-plugin/test +++ b/sbt/src/sbt-test/project/binary-plugin/test @@ -1,10 +1,12 @@ +# First we define the plugin project and publish it $ copy-file changes/define/build.sbt build.sbt $ copy-file changes/define/A.scala A.scala # reload implied > publishLocal +# Now we remove the source code and define a project which uses the build. $ delete build.sbt A.scala $ copy-file changes/use/plugins.sbt project/plugins.sbt > reload -> extra/check +> check From ba8a22679a73afd2a2e7b9ef354aeee85f64b676 Mon Sep 17 00:00:00 2001 From: Josh Suereth Date: Fri, 21 Feb 2014 13:19:00 -0500 Subject: [PATCH 073/148] Migrate docs to use bintray for plugins. --- src/sphinx/Community/Bintray-For-Plugins.rst | 3 ++ src/sphinx/Community/Community-Plugins.rst | 34 +------------------- 2 files changed, 4 insertions(+), 33 deletions(-) diff --git a/src/sphinx/Community/Bintray-For-Plugins.rst b/src/sphinx/Community/Bintray-For-Plugins.rst index 35efa331a..ca274a9bc 100644 --- a/src/sphinx/Community/Bintray-For-Plugins.rst +++ b/src/sphinx/Community/Bintray-For-Plugins.rst @@ -88,6 +88,9 @@ Make sure your project has a valid license specified, as well as unique name and Make a release ============== + +*Note: bintray does not support snapshots. We recommend using `git-revisions supplied by the sbt-git plugin `_. + Once your build is configured, open the sbt console in your build and run: .. code-block:: console diff --git a/src/sphinx/Community/Community-Plugins.rst b/src/sphinx/Community/Community-Plugins.rst index d13ad1c75..d59374506 100644 --- a/src/sphinx/Community/Community-Plugins.rst +++ b/src/sphinx/Community/Community-Plugins.rst @@ -16,39 +16,7 @@ Community Ivy Repository ======================== `Typesafe `_ has provided a freely available `Ivy Repository `_ for sbt projects to use. -If you would like to publish your project to this Ivy repository, first contact `sbt-repo-admins `_ and request privileges (we have to verify code ownership, rights to publish, etc.). After which, you can deploy your plugins using the following configuration: - -:: - - publishTo := Some(Resolver.url("sbt-plugin-releases", new URL("http://repo.scala-sbt.org/scalasbt/sbt-plugin-releases/"))(Resolver.ivyStylePatterns)) - - publishMavenStyle := false - -You'll also need to add your credentials somewhere. For example, you might use a `~/.sbt/pluginpublish.sbt` file: - -:: - - credentials += Credentials("Artifactory Realm", - "repo.scala-sbt.org", "@user name@", "@my encrypted password@") - -Where `@my encrypted password@` is actually obtained using the following `instructions `_. - -*Note: Your code must abide by the* `repository polices `_. - -To automatically deploy snapshot/release versions of your plugin use the following configuration: - -:: - - publishTo := { - val scalasbt = "http://repo.scala-sbt.org/scalasbt/" - val (name, url) = if (version.value.contains("-SNAPSHOT")) - ("sbt-plugin-snapshots", scalasbt+"sbt-plugin-snapshots") - else - ("sbt-plugin-releases", scalasbt+"sbt-plugin-releases") - Some(Resolver.url(name, new URL(url))(Resolver.ivyStylePatterns)) - } - -*Note: ivy repositories currently don't support Maven-style snapshots.* +This ivy repository is mirrored from the freely available `Bintray service `_. If you'd like to submit your plugin, please follow these instructions: `Bintray For Plugins `_. Available Plugins ================= From 1baa672275dd40e5fd010557dc9777d1e60d5189 Mon Sep 17 00:00:00 2001 From: Charles Feduke Date: Fri, 21 Feb 2014 16:12:34 -0500 Subject: [PATCH 074/148] updated doc to include publishing PGP keys to the key server pool --- src/sphinx/Community/Using-Sonatype.rst | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/src/sphinx/Community/Using-Sonatype.rst b/src/sphinx/Community/Using-Sonatype.rst index a1f4bff51..62b705ba2 100644 --- a/src/sphinx/Community/Using-Sonatype.rst +++ b/src/sphinx/Community/Using-Sonatype.rst @@ -18,6 +18,19 @@ Follow the instructions for the plugin and you'll have PGP signed artifacts in n artifacts. It can work with the GPG command line tool, but the command line is not needed.* +If your PGP key has not yet been distributed to the keyserver pool, i.e., +you've just generated it, you'll need to publish it. You can do so using +the `sbt-pgp `_ plugin: + +:: + + pgp-cmd send-key keyname hkp://pool.sks-keyservers.net/ + +(where keyname is the name, email address used when creating the key or +hexadecimal identifier for the key.) + +If you see no output from sbt-pgp then the key name specified was not found. + Second - Maven Publishing Settings ---------------------------------- From 8c4527317f22ace5aea36567bd7921fc7dd47352 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Fri, 21 Feb 2014 22:14:13 +0100 Subject: [PATCH 075/148] SI-7788 Adapt to renamed implict Predef.conforms In 2.11, the implicit version is named `$conforms` so as to avoid accidental shadowing by user code, which renders methods using views and subtype bounds inexplicable unusable. But, SBT intentionally needs to hide it to make the implicits in this file line up. This commit opts-in the the required identifiers from Predef, rather than opting out of conforms. This makes the same code source compatible with 2.10 and 2.11. --- main/actions/src/main/scala/sbt/CacheIvy.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/main/actions/src/main/scala/sbt/CacheIvy.scala b/main/actions/src/main/scala/sbt/CacheIvy.scala index ec823bb12..b09ce7f2b 100644 --- a/main/actions/src/main/scala/sbt/CacheIvy.scala +++ b/main/actions/src/main/scala/sbt/CacheIvy.scala @@ -3,7 +3,7 @@ */ package sbt - import Predef.{conforms => _, _} + import Predef.{Map, Set, implicitly} // excludes *both 2.10.x conforms and 2.11.x $conforms in source compatible manner. import FileInfo.{exists, hash} import java.io.File From 0f6fe85d3fab9187f24d1da7845f8ce63e1f4f6a Mon Sep 17 00:00:00 2001 From: Antoine Gourlay Date: Wed, 26 Feb 2014 01:23:32 +0100 Subject: [PATCH 076/148] update doc: javaHome is not File but Option[File]. --- src/sphinx/Detailed-Topics/Forking.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/sphinx/Detailed-Topics/Forking.rst b/src/sphinx/Detailed-Topics/Forking.rst index 0eb910ee3..4dd424f90 100644 --- a/src/sphinx/Detailed-Topics/Forking.rst +++ b/src/sphinx/Detailed-Topics/Forking.rst @@ -104,7 +104,7 @@ directory: :: - javaHome := file("/path/to/jre/") + javaHome := Some(file("/path/to/jre/")) Note that if this is set globally, it also sets the Java installation used to compile Java sources. You can restrict it to running only by @@ -112,7 +112,7 @@ setting it in the :key:`run` scope: :: - javaHome in run := file("/path/to/jre/") + javaHome in run := Some(file("/path/to/jre/")) As with the other settings, you can specify the configuration to affect only the main or test :key:`run` tasks or just the :key:`test` tasks. From c9208a47195939834c270b10febc03b54731eed1 Mon Sep 17 00:00:00 2001 From: Luca Milanesio Date: Thu, 27 Feb 2014 10:40:42 +0000 Subject: [PATCH 077/148] Completed a full example of forked tests with grouping The previous example of how to fork group of tests in different JVMs was incomplete and not fully working. a) The "testGrouping in Test" is needed, otherwise the grouping is not known to which phase to apply b) Added a more complete Project definition to explain what the curly brackets section refers to and where the settings need to be included The new complete example works out of the box :-) --- src/sphinx/Detailed-Topics/Testing.rst | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/src/sphinx/Detailed-Topics/Testing.rst b/src/sphinx/Detailed-Topics/Testing.rst index 120c1b740..d33021508 100644 --- a/src/sphinx/Detailed-Topics/Testing.rst +++ b/src/sphinx/Detailed-Topics/Testing.rst @@ -195,14 +195,22 @@ available with :key:`testGrouping` key. For example: :: + import sbt._ + import Keys._ import Tests._ + import Defaults._ - { + object ForkTestsBuild extends Build { def groupByFirst(tests: Seq[TestDefinition]) = tests groupBy (_.name(0)) map { case (letter, tests) => new Group(letter.toString, tests, SubProcess(Seq("-Dfirst.letter"+letter))) } toSeq; - testGrouping := groupByFirst( (definedTests in Test).value ) + + lazy val root = Project("root", file("."), settings = defaultSettings ++ Seq( + scalaVersion := "2.10.3", + testGrouping in Test := groupByFirst( (definedTests in Test).value ), + libraryDependencies += "org.scalatest" %% "scalatest" % "2.0" % "test" + )) } The tests in a single group are run sequentially. Control the number From 63583d2544eddc793eb23256850011900e03c42f Mon Sep 17 00:00:00 2001 From: Luca Milanesio Date: Thu, 27 Feb 2014 10:51:06 +0000 Subject: [PATCH 078/148] Removed redundant semicolon --- src/sphinx/Detailed-Topics/Testing.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/sphinx/Detailed-Topics/Testing.rst b/src/sphinx/Detailed-Topics/Testing.rst index d33021508..67704e7a2 100644 --- a/src/sphinx/Detailed-Topics/Testing.rst +++ b/src/sphinx/Detailed-Topics/Testing.rst @@ -204,7 +204,7 @@ available with :key:`testGrouping` key. For example: def groupByFirst(tests: Seq[TestDefinition]) = tests groupBy (_.name(0)) map { case (letter, tests) => new Group(letter.toString, tests, SubProcess(Seq("-Dfirst.letter"+letter))) - } toSeq; + } toSeq lazy val root = Project("root", file("."), settings = defaultSettings ++ Seq( scalaVersion := "2.10.3", From bedfb12163da3172f0abee76508648b635a64161 Mon Sep 17 00:00:00 2001 From: Luca Milanesio Date: Fri, 28 Feb 2014 15:18:08 +0000 Subject: [PATCH 079/148] Making example specific to build.sbt (see discussion on https://github.com/sbt/sbt/pull/1139) --- src/sphinx/Detailed-Topics/Testing.rst | 13 +++---------- 1 file changed, 3 insertions(+), 10 deletions(-) diff --git a/src/sphinx/Detailed-Topics/Testing.rst b/src/sphinx/Detailed-Topics/Testing.rst index 67704e7a2..c3ff52fa3 100644 --- a/src/sphinx/Detailed-Topics/Testing.rst +++ b/src/sphinx/Detailed-Topics/Testing.rst @@ -191,26 +191,19 @@ The setting: specifies that all tests will be executed in a single external JVM. See :doc:`Forking` for configuring standard options for forking. More control over how tests are assigned to JVMs and what options to pass to those is -available with :key:`testGrouping` key. For example: +available with :key:`testGrouping` key. For example in build.sbt: :: - import sbt._ - import Keys._ import Tests._ - import Defaults._ - object ForkTestsBuild extends Build { + { def groupByFirst(tests: Seq[TestDefinition]) = tests groupBy (_.name(0)) map { case (letter, tests) => new Group(letter.toString, tests, SubProcess(Seq("-Dfirst.letter"+letter))) } toSeq - lazy val root = Project("root", file("."), settings = defaultSettings ++ Seq( - scalaVersion := "2.10.3", - testGrouping in Test := groupByFirst( (definedTests in Test).value ), - libraryDependencies += "org.scalatest" %% "scalatest" % "2.0" % "test" - )) + testGrouping in Test <<= groupByFirst( (definedTests in Test).value ) } The tests in a single group are run sequentially. Control the number From 71c9ec0e553152b5bf7148bd96ca6edb41560e98 Mon Sep 17 00:00:00 2001 From: Josh Suereth Date: Mon, 3 Mar 2014 10:20:15 -0500 Subject: [PATCH 080/148] Adding quick test for #1136. * Just checks to see if + in version numbers are removed from pom files. --- .../make-pom/project/MakePomTest.scala | 14 +++++++++++++- .../sbt-test/dependency-management/make-pom/test | 3 ++- 2 files changed, 15 insertions(+), 2 deletions(-) diff --git a/sbt/src/sbt-test/dependency-management/make-pom/project/MakePomTest.scala b/sbt/src/sbt-test/dependency-management/make-pom/project/MakePomTest.scala index 961edec25..342c4cf42 100644 --- a/sbt/src/sbt-test/dependency-management/make-pom/project/MakePomTest.scala +++ b/sbt/src/sbt-test/dependency-management/make-pom/project/MakePomTest.scala @@ -8,8 +8,10 @@ object MakePomTest extends Build readPom <<= makePom map XML.loadFile, TaskKey[Unit]("check-pom") <<= checkPom, TaskKey[Unit]("check-extra") <<= checkExtra, + TaskKey[Unit]("check-version-plus-mapping") <<= checkVersionPlusMapping, resolvers += Resolver.sonatypeRepo("snapshots"), - makePomConfiguration ~= { _.copy(extra = ) } + makePomConfiguration ~= { _.copy(extra = ) }, + libraryDependencies += "com.google.code.findbugs" % "jsr305" % "1.3.+" ) val readPom = TaskKey[Elem]("read-pom") @@ -33,6 +35,16 @@ object MakePomTest extends Build if(extra.isEmpty) error("'" + extraTagName + "' not found in generated pom.xml.") else () } + lazy val checkVersionPlusMapping = (readPom) map { (pomXml) => + var found = false + for { + dep <- pomXml \ "dependencies" \ "dependency" + if (dep \ "artifactId").text == "jsr305" + if (dep \ "version").text contains "+" + } sys.error(s"Found dependency with invalid maven version: $dep") + () + } + lazy val checkPom = (readPom, fullResolvers) map { (pomXML, ivyRepositories) => checkProject(pomXML) withRepositories(pomXML) { repositoriesElement => diff --git a/sbt/src/sbt-test/dependency-management/make-pom/test b/sbt/src/sbt-test/dependency-management/make-pom/test index 3d7f79218..4e3cfe973 100644 --- a/sbt/src/sbt-test/dependency-management/make-pom/test +++ b/sbt/src/sbt-test/dependency-management/make-pom/test @@ -1,2 +1,3 @@ > check-pom -> check-extra \ No newline at end of file +> check-extra +> check-version-plus-mapping \ No newline at end of file From 8cd44646b7bac84feef3d37e397a3aa52eb29c15 Mon Sep 17 00:00:00 2001 From: Josh Suereth Date: Mon, 3 Mar 2014 12:10:37 -0500 Subject: [PATCH 081/148] Fix #1136 - Ivy's `+` dependencies not converted to maven style syntax. * Attempt to convert dependencies that end in `+` into maven-style version range * if a failure occurs, just use the original version (could be bad...). --- ivy/src/main/scala/sbt/MakePom.scala | 21 ++++++++++++++++++- .../make-pom/project/MakePomTest.scala | 2 +- 2 files changed, 21 insertions(+), 2 deletions(-) diff --git a/ivy/src/main/scala/sbt/MakePom.scala b/ivy/src/main/scala/sbt/MakePom.scala index 5e49243d3..4c4b7c8d0 100644 --- a/ivy/src/main/scala/sbt/MakePom.scala +++ b/ivy/src/main/scala/sbt/MakePom.scala @@ -188,7 +188,7 @@ class MakePom(val log: Logger) {mrid.getOrganisation} {mrid.getName} - {mrid.getRevision} + {makeDependencyVersion(mrid.getRevision)} { scopeElem(scope) } { optionalElem(optional) } { classifierElem(classifier) } @@ -197,6 +197,25 @@ class MakePom(val log: Logger) } + + + def makeDependencyVersion(revision: String): String = { + if(revision endsWith "+") try { + // TODO - this is the slowest possible implementation. + val beforePlus = revision.reverse.dropWhile(_ != '.').drop(1).reverse + val lastVersion = beforePlus.reverse.takeWhile(_ != '.').reverse + val lastVersionInt = lastVersion.toInt + val prefixVersion = beforePlus.reverse.dropWhile(_ != '.').drop(1).reverse + s"[$beforePlus, ${prefixVersion}.${lastVersionInt+1})" + } catch { + case e: NumberFormatException => + // TODO - if the version deosn't meet our expectations, maybe we just issue a hard + // error instead of softly ignoring the attempt to rewrite. + //sys.error(s"Could not fix version [$revision] into maven style version") + revision + } else revision + } + @deprecated("No longer used and will be removed.", "0.12.1") def classifier(dependency: DependencyDescriptor, includeTypes: Set[String]): NodeSeq = { diff --git a/sbt/src/sbt-test/dependency-management/make-pom/project/MakePomTest.scala b/sbt/src/sbt-test/dependency-management/make-pom/project/MakePomTest.scala index 342c4cf42..3f5f893fa 100644 --- a/sbt/src/sbt-test/dependency-management/make-pom/project/MakePomTest.scala +++ b/sbt/src/sbt-test/dependency-management/make-pom/project/MakePomTest.scala @@ -40,7 +40,7 @@ object MakePomTest extends Build for { dep <- pomXml \ "dependencies" \ "dependency" if (dep \ "artifactId").text == "jsr305" - if (dep \ "version").text contains "+" + if (dep \ "version").text != "[1.3, 1.4)" } sys.error(s"Found dependency with invalid maven version: $dep") () } From 21da87c4e7df3904ab5dbdcd5d5bdfa53102e86f Mon Sep 17 00:00:00 2001 From: Josh Suereth Date: Tue, 4 Mar 2014 11:36:34 -0500 Subject: [PATCH 082/148] Fixes #429 - Add better error messages when credentials are not found. * Create hackery to inspect registered credentials in the IvyCredentialStore. * Create a new authenticator which inserts itself *after* the ivy authenticator. - Will issue an error message detailing host/realm required if credentials are not found. - Also lists out configured Realms with a 'is misspelled' message. - Ignores proxy-related authentication errors, for now. --- ivy/src/main/scala/sbt/Ivy.scala | 2 + .../ivyint/ErrorMessageAuthenticator.scala | 128 ++++++++++++++++++ .../sbt/ivyint/IvyCredentialsLookup.scala | 63 +++++++++ 3 files changed, 193 insertions(+) create mode 100644 ivy/src/main/scala/sbt/ivyint/ErrorMessageAuthenticator.scala create mode 100644 ivy/src/main/scala/sbt/ivyint/IvyCredentialsLookup.scala diff --git a/ivy/src/main/scala/sbt/Ivy.scala b/ivy/src/main/scala/sbt/Ivy.scala index e1dca53ae..2408992e6 100644 --- a/ivy/src/main/scala/sbt/Ivy.scala +++ b/ivy/src/main/scala/sbt/Ivy.scala @@ -99,6 +99,8 @@ final class IvySbt(val configuration: IvyConfiguration) def withIvy[T](log: MessageLogger)(f: Ivy => T): T = withDefaultLogger(log) { + // See #429 - We always insert a helper authenticator here which lets us get more useful authentication errors. + ivyint.ErrorMessageAuthenticator.install() ivy.pushContext() ivy.getLoggerEngine.pushLogger(log) try { f(ivy) } diff --git a/ivy/src/main/scala/sbt/ivyint/ErrorMessageAuthenticator.scala b/ivy/src/main/scala/sbt/ivyint/ErrorMessageAuthenticator.scala new file mode 100644 index 000000000..3d0d174d8 --- /dev/null +++ b/ivy/src/main/scala/sbt/ivyint/ErrorMessageAuthenticator.scala @@ -0,0 +1,128 @@ +package sbt +package ivyint + +import java.lang.reflect.Field +import java.lang.reflect.Method +import java.net.Authenticator +import java.net.PasswordAuthentication +import org.apache.ivy.util.Credentials +import org.apache.ivy.util.Message +import org.apache.ivy.util.url.IvyAuthenticator +import org.apache.ivy.util.url.CredentialsStore + +/** + * Helper to install an Authenticator that works with the IvyAuthenticator to provide better error messages when + * credentials don't line up. + */ +object ErrorMessageAuthenticator { + private var securityWarningLogged = false + + private def originalAuthenticator: Option[Authenticator] = { + try { + val f = classOf[Authenticator].getDeclaredField("theAuthenticator"); + f.setAccessible(true); + Option(f.get(null).asInstanceOf[Authenticator]) + } catch { + // TODO - Catch more specific errors. + case t: Throwable => + Message.debug("Error occurred while getting the original authenticator: " + t.getMessage) + None + } + } + + private lazy val ivyOriginalField = { + val field = classOf[IvyAuthenticator].getDeclaredField("original") + field.setAccessible(true) + field + } + // Attempts to get the original authenticator form the ivy class or returns null. + private def installIntoIvy(ivy: IvyAuthenticator): Option[Authenticator] = { + // Here we install ourselves as the IvyAuthenticator's default so we get called AFTER Ivy has a chance to run. + def installIntoIvyImpl(original: Option[Authenticator]): Unit = { + val newOriginal = new ErrorMessageAuthenticator(original) + ivyOriginalField.set(ivy, newOriginal) + } + + try Option(ivyOriginalField.get(ivy).asInstanceOf[Authenticator]) match { + case Some(alreadyThere: ErrorMessageAuthenticator) => // We're already installed, no need to do the work again. + case originalOpt => installIntoIvyImpl(originalOpt) + } catch { + case t: Throwable => + Message.debug("Error occurred will trying to install debug messages into Ivy Authentication" + t.getMessage) + } + Some(ivy) + } + + /** Installs the error message authenticator so we have nicer error messages when using java's URL for downloading. */ + def install() { + // Actually installs the error message authenticator. + def doInstall(original: Option[Authenticator]): Unit = + try Authenticator.setDefault(new ErrorMessageAuthenticator(original)) + catch { + case e: SecurityException if !securityWarningLogged => + securityWarningLogged = true; + Message.warn("Not enough permissions to set the ErorrMessageAuthenticator. " + + "Helpful debug messages disabled!"); + } + // We will try to use the original authenticator as backup authenticator. + // Since there is no getter available, so try to use some reflection to + // obtain it. If that doesn't work, assume there is no original authenticator + def doInstallIfIvy(original: Option[Authenticator]): Unit = + original match { + case Some(installed: ErrorMessageAuthenticator) => // Ignore, we're already installed + case Some(ivy: IvyAuthenticator) => installIntoIvy(ivy) + case original => doInstall(original) + } + doInstallIfIvy(originalAuthenticator) + } +} +/** + * An authenticator which just delegates to a previous authenticator and issues *nice* + * error messages on failure to find credentials. + * + * Since ivy installs its own credentials handler EVERY TIME it resolves or publishes, we want to + * install this one at some point and eventually ivy will capture it and use it. + */ +private[sbt] final class ErrorMessageAuthenticator(original: Option[Authenticator]) extends Authenticator { + + protected override def getPasswordAuthentication(): PasswordAuthentication = { + // We're guaranteed to only get here if Ivy's authentication fails + if (!isProxyAuthentication) { + val host = getRequestingHost + // TODO - levenshtein distance "did you mean" message. + Message.error(s"Unable to find credentials for [${getRequestingPrompt} @ ${host}].") + val configuredRealms = IvyCredentialsLookup.realmsForHost.getOrElse(host, Set.empty) + if(!configuredRealms.isEmpty) { + Message.error(s" Is one of these realms mispelled for host [${host}]:") + configuredRealms foreach { realm => + Message.error(s" * ${realm}") + } + } + } + // TODO - Maybe we should work on a helpful proxy message... + + // TODO - To be more maven friendly, we may want to also try to grab the "first" authentication that shows up for a server and try it. + // or maybe allow that behavior to be configured, since maven users aren't used to realms (which they should be). + + // Grabs the authentication that would have been provided had we not been installed... + def originalAuthentication: Option[PasswordAuthentication] = { + Authenticator.setDefault(original.getOrElse(null)) + try Option(Authenticator.requestPasswordAuthentication( + getRequestingHost, + getRequestingSite, + getRequestingPort, + getRequestingProtocol, + getRequestingPrompt, + getRequestingScheme)) + finally Authenticator.setDefault(this) + } + originalAuthentication.getOrElse(null) + } + + /** Returns true if this authentication if for a proxy and not for an HTTP server. + * We want to display different error messages, depending. + */ + private def isProxyAuthentication: Boolean = + getRequestorType == Authenticator.RequestorType.PROXY + +} \ No newline at end of file diff --git a/ivy/src/main/scala/sbt/ivyint/IvyCredentialsLookup.scala b/ivy/src/main/scala/sbt/ivyint/IvyCredentialsLookup.scala new file mode 100644 index 000000000..365ffe698 --- /dev/null +++ b/ivy/src/main/scala/sbt/ivyint/IvyCredentialsLookup.scala @@ -0,0 +1,63 @@ +package sbt +package ivyint + +import org.apache.ivy.util.url.CredentialsStore +import collection.JavaConverters._ + +/** A key used to store credentials in the ivy credentials store. */ +private[sbt] sealed trait CredentialKey +/** Represents a key in the ivy credentials store that is only specific to a host. */ +private[sbt] case class Host(name: String) extends CredentialKey +/** Represents a key in the ivy credentials store that is keyed to both a host and a "realm". */ +private[sbt] case class Realm(host: String, realm: String) extends CredentialKey + +/** + * Helper mechanism to improve credential related error messages. + * + * This evil class exposes to us the necessary information to warn on credential failure and offer + * spelling/typo suggestions. + */ +private[sbt] object IvyCredentialsLookup { + + /** Helper extractor for Ivy's key-value store of credentials. */ + private object KeySplit { + def unapply(key: String): Option[(String,String)] = { + key.indexOf('@') match { + case -1 => None + case n => Some(key.take(n) -> key.drop(n+1)) + } + } + } + + /** Here we cheat runtime private so we can look in the credentials store. + * + * TODO - Don't bomb at class load time... + */ + private val credKeyringField = { + val tmp = classOf[CredentialsStore].getDeclaredField("KEYRING") + tmp.setAccessible(true) + tmp + } + + /** All the keys for credentials in the ivy configuration store. */ + def keyringKeys: Set[CredentialKey] = { + val map = credKeyringField.get(null).asInstanceOf[java.util.HashMap[String, Any]] + // make a clone of the set... + (map.keySet.asScala.map { + case KeySplit(realm, host) => Realm(host, realm) + case host => Host(host) + })(collection.breakOut) + } + + /** + * A mapping of host -> realms in the ivy credentials store. + */ + def realmsForHost: Map[String, Set[String]] = + keyringKeys collect { + case x: Realm => x + } groupBy { realm => + realm.host + } mapValues { realms => + realms map (_.realm) + } +} \ No newline at end of file From 5486daf7006462be13b82d6ae225e816f038c9ff Mon Sep 17 00:00:00 2001 From: Josh Suereth Date: Wed, 5 Mar 2014 19:14:21 -0500 Subject: [PATCH 083/148] Pull in @cunei's suggestion for a more complete Ivy->Maven dynamic revision convertor. We attempt to convert these constructs to maven: * 1.+ * ]1,2] * [2,3[ * 1+ - Albeit this one is a heuristic for accuracy. This should help ivy users which prefer the nicer 1.2.+ syntax. Also adds tests/improves exisitng tests. --- ivy/src/main/scala/sbt/MakePom.scala | 43 +++++++++++++------ ivy/src/test/scala/MakePomTest.scala | 29 +++++++++++++ .../make-pom/project/MakePomTest.scala | 3 +- 3 files changed, 62 insertions(+), 13 deletions(-) create mode 100644 ivy/src/test/scala/MakePomTest.scala diff --git a/ivy/src/main/scala/sbt/MakePom.scala b/ivy/src/main/scala/sbt/MakePom.scala index 4c4b7c8d0..16ec28333 100644 --- a/ivy/src/main/scala/sbt/MakePom.scala +++ b/ivy/src/main/scala/sbt/MakePom.scala @@ -200,20 +200,39 @@ class MakePom(val log: Logger) def makeDependencyVersion(revision: String): String = { - if(revision endsWith "+") try { - // TODO - this is the slowest possible implementation. - val beforePlus = revision.reverse.dropWhile(_ != '.').drop(1).reverse - val lastVersion = beforePlus.reverse.takeWhile(_ != '.').reverse + def plusRange(s:String, shift:Int = 0) = { + def pow(i:Int):Int = if (i>0) 10 * pow(i-1) else 1 + val (prefixVersion, lastVersion) = (s+"0"*shift).reverse.split("\\.",2) match { + case Array(revLast,revRest) => + ( revRest.reverse + ".", revLast.reverse ) + case Array(revLast) => ("", revLast.reverse) + } val lastVersionInt = lastVersion.toInt - val prefixVersion = beforePlus.reverse.dropWhile(_ != '.').drop(1).reverse - s"[$beforePlus, ${prefixVersion}.${lastVersionInt+1})" + s"[${prefixVersion}${lastVersion},${prefixVersion}${lastVersionInt+pow(shift)})" + } + val startSym=Set(']','[','(') + val stopSym=Set(']','[',')') + try { + if (revision endsWith ".+") { + plusRange(revision.substring(0,revision.length-2)) + } else if (revision endsWith "+") { + val base = revision.take(revision.length-1) + // This is a heuristic. Maven just doesn't support Ivy's notions of 1+, so + // we assume version ranges never go beyond 5 siginificant digits. + (0 to 5).map(plusRange(base,_)).mkString(",") + } else if (startSym(revision(0)) && stopSym(revision(revision.length-1))) { + val start = revision(0) + val stop = revision(revision.length-1) + val mid = revision.substring(1,revision.length-1) + (if (start == ']') "(" else start) + mid + (if (stop == '[') ")" else stop) + } else revision } catch { - case e: NumberFormatException => - // TODO - if the version deosn't meet our expectations, maybe we just issue a hard - // error instead of softly ignoring the attempt to rewrite. - //sys.error(s"Could not fix version [$revision] into maven style version") - revision - } else revision + case e: NumberFormatException => + // TODO - if the version doesn't meet our expectations, maybe we just issue a hard + // error instead of softly ignoring the attempt to rewrite. + //sys.error(s"Could not fix version [$revision] into maven style version") + revision + } } @deprecated("No longer used and will be removed.", "0.12.1") diff --git a/ivy/src/test/scala/MakePomTest.scala b/ivy/src/test/scala/MakePomTest.scala new file mode 100644 index 000000000..1341b207d --- /dev/null +++ b/ivy/src/test/scala/MakePomTest.scala @@ -0,0 +1,29 @@ +package sbt + +import java.io.File +import org.specs2._ +import mutable.Specification + +object MakePomTest extends Specification +{ + val mp = new MakePom(ConsoleLogger()) + import mp.{makeDependencyVersion=>v} + "MakePom makeDependencyVersion" should { + "Handle .+ in versions" in { + v("1.+") must_== "[1,2)" + v("1.2.3.4.+") must_== "[1.2.3.4,1.2.3.5)" + v("12.31.42.+") must_== "[12.31.42,12.31.43)" + } + /* TODO - do we care about this case? + * 1+ --> [1,2),[10,20),[100,200),[1000,2000),[10000,20000),[100000,200000) + */ + "Handle ]* bracket in version ranges" in { + v("]1,3]") must_== "(1,3]" + v("]1.1,1.3]") must_== "(1.1,1.3]" + } + "Handle *[ bracket in version ranges" in { + v("[1,3[") must_== "[1,3)" + v("[1.1,1.3[") must_== "[1.1,1.3)" + } + } +} diff --git a/sbt/src/sbt-test/dependency-management/make-pom/project/MakePomTest.scala b/sbt/src/sbt-test/dependency-management/make-pom/project/MakePomTest.scala index 3f5f893fa..8c26070cd 100644 --- a/sbt/src/sbt-test/dependency-management/make-pom/project/MakePomTest.scala +++ b/sbt/src/sbt-test/dependency-management/make-pom/project/MakePomTest.scala @@ -40,7 +40,8 @@ object MakePomTest extends Build for { dep <- pomXml \ "dependencies" \ "dependency" if (dep \ "artifactId").text == "jsr305" - if (dep \ "version").text != "[1.3, 1.4)" + // TODO - Ignore space here. + if (dep \ "version").text != "[1.3,1.4)" } sys.error(s"Found dependency with invalid maven version: $dep") () } From 32d28ab85b2057d535cb992be33fe2b0da8253f1 Mon Sep 17 00:00:00 2001 From: Mark Harrah Date: Thu, 6 Mar 2014 07:39:06 -0500 Subject: [PATCH 084/148] Override certain SecurityManager methods to avoid filesystem performance hit. --- run/src/main/scala/sbt/TrapExit.scala | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/run/src/main/scala/sbt/TrapExit.scala b/run/src/main/scala/sbt/TrapExit.scala index c61df5ab7..cf9eb87bd 100644 --- a/run/src/main/scala/sbt/TrapExit.scala +++ b/run/src/main/scala/sbt/TrapExit.scala @@ -413,6 +413,15 @@ private final class TrapExit(delegateManager: SecurityManager) extends SecurityM private def isRealExit(element: StackTraceElement): Boolean = element.getClassName == "java.lang.Runtime" && element.getMethodName == "exit" + // These are overridden to do nothing because there is a substantial filesystem performance penalty + // when there is a SecurityManager defined. The default implementations of these construct a + // FilePermission, and its initialization involves canonicalization, which is expensive. + override def checkRead(file: String) {} + override def checkRead(file: String, context: AnyRef) {} + override def checkWrite(file: String) {} + override def checkDelete(file: String) {} + override def checkExec(cmd: String) {} + override def checkPermission(perm: Permission) { if(delegateManager ne null) From 0a8d844071b675c496649892e5c17cbfd7e215b5 Mon Sep 17 00:00:00 2001 From: Josh Suereth Date: Thu, 6 Mar 2014 10:10:38 -0500 Subject: [PATCH 085/148] Fix merge conflicts in pulling doc fixes from 0.13.1 into 0.13 branch. --- src/sphinx/Community/Community-Plugins.rst | 39 ++++++++++--------- src/sphinx/Detailed-Topics/Advanced-Index.rst | 2 + src/sphinx/Detailed-Topics/Launcher.rst | 2 +- src/sphinx/Howto/generatefiles.rst | 6 +-- testing/agent/src/main/java/sbt/ForkMain.java | 1 - 5 files changed, 26 insertions(+), 24 deletions(-) diff --git a/src/sphinx/Community/Community-Plugins.rst b/src/sphinx/Community/Community-Plugins.rst index d59374506..4c40ce10a 100644 --- a/src/sphinx/Community/Community-Plugins.rst +++ b/src/sphinx/Community/Community-Plugins.rst @@ -86,6 +86,25 @@ One jar plugins - sbt-onejar (Packages your project using One-JARâ„¢): https://github.com/sbt/sbt-onejar +Frontend development plugins +~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +- coffeescripted-sbt: https://github.com/softprops/coffeescripted-sbt +- less-sbt (for less-1.3.0): https://github.com/softprops/less-sbt +- sbt-less-plugin (it uses less-1.3.0): + https://github.com/btd/sbt-less-plugin +- sbt-emberjs: https://github.com/stefri/sbt-emberjs +- sbt-closure: https://github.com/eltimn/sbt-closure +- sbt-yui-compressor: https://github.com/indrajitr/sbt-yui-compressor +- sbt-requirejs: https://github.com/scalatra/sbt-requirejs +- sbt-vaadin-plugin: https://github.com/henrikerola/sbt-vaadin-plugin + +Game development plugins +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +- sbt-lwjgl-plugin (Light Weight Java Game Library): https://github.com/philcali/sbt-lwjgl-plugin +- sbt-scage-plugin (Scala Game Engine): https://github.com/mvallerie/sbt-scage-plugin + Release plugins ~~~~~~~~~~~~~~~ @@ -114,24 +133,6 @@ Release plugins - xitrum-package (collects dependency .jar files for standalone Scala programs): https://github.com/ngocdaothanh/xitrum-package -Frontend development plugins -~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -- coffeescripted-sbt: https://github.com/softprops/coffeescripted-sbt -- less-sbt (for less-1.3.0): https://github.com/softprops/less-sbt -- sbt-less-plugin (it uses less-1.3.0): - https://github.com/btd/sbt-less-plugin -- sbt-emberjs: https://github.com/stefri/sbt-emberjs -- sbt-closure: https://github.com/eltimn/sbt-closure -- sbt-yui-compressor: https://github.com/indrajitr/sbt-yui-compressor -- sbt-requirejs: https://github.com/scalatra/sbt-requirejs -- sbt-vaadin-plugin: https://github.com/henrikerola/sbt-vaadin-plugin - -Game development plugins -~~~~~~~~~~~~~~~~~~~~~~~~ - -- sbt-lwjgl-plugin (Light Weight Java Game Library): https://github.com/philcali/sbt-lwjgl-plugin -- sbt-scage-plugin (Scala Game Engine): https://github.com/mvallerie/sbt-scage-plugin System plugins ~~~~~~~~~~~~~~ @@ -253,7 +254,7 @@ Utility plugins Code coverage plugins ~~~~~~~~~~~~~~~~~~~~~ -- sbt-scct: https://github.com/sqality/sbt-scct +- sbt-scct: https://github.com/dvc94ch/sbt-scct - sbt-scoverage: https://github.com/scoverage/sbt-scoverage - jacoco4sbt: https://github.com/sbt/jacoco4sbt - xsbt-coveralls-plugin: https://github.com/theon/xsbt-coveralls-plugin diff --git a/src/sphinx/Detailed-Topics/Advanced-Index.rst b/src/sphinx/Detailed-Topics/Advanced-Index.rst index 884a96292..95d00810e 100644 --- a/src/sphinx/Detailed-Topics/Advanced-Index.rst +++ b/src/sphinx/Detailed-Topics/Advanced-Index.rst @@ -9,6 +9,8 @@ Before reading anything in here, you will need the information in the .. toctree:: :maxdepth: 2 + + Launcher Scripts TaskInputs Understanding-incremental-recompilation diff --git a/src/sphinx/Detailed-Topics/Launcher.rst b/src/sphinx/Detailed-Topics/Launcher.rst index eced0102b..3f3a78836 100644 --- a/src/sphinx/Detailed-Topics/Launcher.rst +++ b/src/sphinx/Detailed-Topics/Launcher.rst @@ -2,4 +2,4 @@ Sbt Launcher ============ -This docuemntation has been moved to :doc:`The Launcher section `. +This documentation has been moved to :doc:`The Launcher section `. \ No newline at end of file diff --git a/src/sphinx/Howto/generatefiles.rst b/src/sphinx/Howto/generatefiles.rst index a8a1c3ec3..f90806598 100644 --- a/src/sphinx/Howto/generatefiles.rst +++ b/src/sphinx/Howto/generatefiles.rst @@ -29,7 +29,7 @@ As a specific example, the following generates a hello world source file: :: - sourceGenerators in Compile += Def.task { + sourceGenerators in Compile <+= Def.task { val file = (sourceManaged in Compile).value / "demo" / "Test.scala" IO.write(file, """object Test extends App { println("Hi") }""") Seq(file) @@ -44,7 +44,7 @@ By default, generated sources are not included in the packaged source artifact. :title: Generate resources :type: setting - resourceGenerators in Compile += + resourceGenerators in Compile += .taskValue A resource generation task should generate resources in a subdirectory of :key:`resourceManaged` and return a sequence of files generated. The key to add the task to is called :key:`resourceGenerators`. Because we want to add the unexecuted task, we use `taskValue` instead of the usual `value`. It should be scoped according to whether the generated files are main (`Compile`) or test (`Test`) resources. This basic structure looks like: @@ -56,7 +56,7 @@ For example, assuming a method `def makeSomeResources(base: File): Seq[File]`, :: - resourceGenerators in Compile += Def.task { + resourceGenerators in Compile <+= Def.task { makeSomeResources( (resourceManaged in Compile).value / "demo") }.taskValue diff --git a/testing/agent/src/main/java/sbt/ForkMain.java b/testing/agent/src/main/java/sbt/ForkMain.java index a56783fcd..32cbb62ef 100755 --- a/testing/agent/src/main/java/sbt/ForkMain.java +++ b/testing/agent/src/main/java/sbt/ForkMain.java @@ -115,7 +115,6 @@ public class ForkMain { final ObjectOutputStream os = new ObjectOutputStream(socket.getOutputStream()); // Must flush the header that the constructor writes, otherwise the ObjectInputStream on the other end may block indefinitely os.flush(); - try { new Run().run(is, os); } finally { From 3017bfcd07466a2c3aa9bc5fe4760929db8ae0ed Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Fri, 7 Mar 2014 17:48:31 +0100 Subject: [PATCH 086/148] Fix task macro's handling of Symbol owners in .value The qualifier of the `.value` call may contain `DefTree`s (e.g. vals, defs) or `Function` trees. When we snip them out of the tree and graft them into a new context, we must also call `changeOwner`, so that the symbol owner structure and the tree structure are coherent. Failure to do so resulted in a crash in the compiler backend. Fixes #1150 --- .../sbt-test/project/setting-macro/build.sbt | 9 ++++++++ .../main/scala/sbt/appmacro/ContextUtil.scala | 22 ++++++++++--------- 2 files changed, 21 insertions(+), 10 deletions(-) diff --git a/sbt/src/sbt-test/project/setting-macro/build.sbt b/sbt/src/sbt-test/project/setting-macro/build.sbt index d48af8bc9..add6ef28e 100644 --- a/sbt/src/sbt-test/project/setting-macro/build.sbt +++ b/sbt/src/sbt-test/project/setting-macro/build.sbt @@ -15,3 +15,12 @@ demo := { val (n, s) = parser.parsed s * n } + +// Tests for correct Symbol owner structure in the lifted qualifiers of +// the `.value` macro within a task macro. (#1150) +val touchIfChanged = taskKey[Unit]("") + +touchIfChanged := { + val foo = (sourceDirectory in Compile).apply(base => base).value.get + () +} diff --git a/util/appmacro/src/main/scala/sbt/appmacro/ContextUtil.scala b/util/appmacro/src/main/scala/sbt/appmacro/ContextUtil.scala index c0c849fab..389fd33f8 100644 --- a/util/appmacro/src/main/scala/sbt/appmacro/ContextUtil.scala +++ b/util/appmacro/src/main/scala/sbt/appmacro/ContextUtil.scala @@ -226,17 +226,19 @@ final class ContextUtil[C <: Context](val ctx: C) object appTransformer extends Transformer { override def transform(tree: Tree): Tree = - tree match - { - case ApplyTree(TypeApply(Select(_, nme), targ :: Nil), qual :: Nil) => subWrapper(nme.decoded, targ.tpe, qual, tree) match { - case Converted.Success(t, finalTx) => finalTx(t) - case Converted.Failure(p,m) => ctx.abort(p, m) - case _: Converted.NotApplicable[_] => super.transform(tree) - } + tree match { + case ApplyTree(TypeApply(Select(_, nme), targ :: Nil), qual :: Nil) => + changeOwner(qual, currentOwner, initialOwner) // Fixes https://github.com/sbt/sbt/issues/1150 + subWrapper(nme.decoded, targ.tpe, qual, tree) match { + case Converted.Success(t, finalTx) => finalTx(t) + case Converted.Failure(p,m) => ctx.abort(p, m) + case _: Converted.NotApplicable[_] => super.transform(tree) + } case _ => super.transform(tree) } } - - appTransformer.transform(t) + appTransformer.atOwner(initialOwner) { + appTransformer.transform(t) + } } -} \ No newline at end of file +} From a4b288f35be527d7d10d6323d665ad9cd8719cf0 Mon Sep 17 00:00:00 2001 From: Josh Suereth Date: Fri, 7 Mar 2014 22:25:29 -0500 Subject: [PATCH 087/148] Revert "Fix task macro's handling of Symbol owners in .value" This reverts commit 3017bfcd07466a2c3aa9bc5fe4760929db8ae0ed. This was causing sbt to be unable to compile. Reverting temporarily until we have a shot at a full fix. --- .../sbt-test/project/setting-macro/build.sbt | 9 -------- .../main/scala/sbt/appmacro/ContextUtil.scala | 22 +++++++++---------- 2 files changed, 10 insertions(+), 21 deletions(-) diff --git a/sbt/src/sbt-test/project/setting-macro/build.sbt b/sbt/src/sbt-test/project/setting-macro/build.sbt index add6ef28e..d48af8bc9 100644 --- a/sbt/src/sbt-test/project/setting-macro/build.sbt +++ b/sbt/src/sbt-test/project/setting-macro/build.sbt @@ -15,12 +15,3 @@ demo := { val (n, s) = parser.parsed s * n } - -// Tests for correct Symbol owner structure in the lifted qualifiers of -// the `.value` macro within a task macro. (#1150) -val touchIfChanged = taskKey[Unit]("") - -touchIfChanged := { - val foo = (sourceDirectory in Compile).apply(base => base).value.get - () -} diff --git a/util/appmacro/src/main/scala/sbt/appmacro/ContextUtil.scala b/util/appmacro/src/main/scala/sbt/appmacro/ContextUtil.scala index 389fd33f8..c0c849fab 100644 --- a/util/appmacro/src/main/scala/sbt/appmacro/ContextUtil.scala +++ b/util/appmacro/src/main/scala/sbt/appmacro/ContextUtil.scala @@ -226,19 +226,17 @@ final class ContextUtil[C <: Context](val ctx: C) object appTransformer extends Transformer { override def transform(tree: Tree): Tree = - tree match { - case ApplyTree(TypeApply(Select(_, nme), targ :: Nil), qual :: Nil) => - changeOwner(qual, currentOwner, initialOwner) // Fixes https://github.com/sbt/sbt/issues/1150 - subWrapper(nme.decoded, targ.tpe, qual, tree) match { - case Converted.Success(t, finalTx) => finalTx(t) - case Converted.Failure(p,m) => ctx.abort(p, m) - case _: Converted.NotApplicable[_] => super.transform(tree) - } + tree match + { + case ApplyTree(TypeApply(Select(_, nme), targ :: Nil), qual :: Nil) => subWrapper(nme.decoded, targ.tpe, qual, tree) match { + case Converted.Success(t, finalTx) => finalTx(t) + case Converted.Failure(p,m) => ctx.abort(p, m) + case _: Converted.NotApplicable[_] => super.transform(tree) + } case _ => super.transform(tree) } } - appTransformer.atOwner(initialOwner) { - appTransformer.transform(t) - } + + appTransformer.transform(t) } -} +} \ No newline at end of file From dff52ce11e33ce3daab74c5799d4b2ec08a70024 Mon Sep 17 00:00:00 2001 From: Josh Suereth Date: Sat, 8 Mar 2014 09:30:23 -0500 Subject: [PATCH 088/148] Removing deprecated "re-publish release artifacts" behavior from tests. In sbt 1.0 republishing released artifacts will be removed, causing these tests to fail. We migrate to SNAPSHOT version now, to prevent suprises. --- .../cache-resolver/changes/def/Build.scala | 2 +- .../dependency-management/cache-resolver/changes/use/build.sbt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/sbt/src/sbt-test/dependency-management/cache-resolver/changes/def/Build.scala b/sbt/src/sbt-test/dependency-management/cache-resolver/changes/def/Build.scala index d9aec4fe0..6ac5b8a06 100644 --- a/sbt/src/sbt-test/dependency-management/cache-resolver/changes/def/Build.scala +++ b/sbt/src/sbt-test/dependency-management/cache-resolver/changes/def/Build.scala @@ -5,7 +5,7 @@ object B extends Build { override def settings = super.settings ++ Seq( organization := "org.example", - version := "2.0" + version := "2.0-SNAPSHOT" ) lazy val root = proj("root", ".") aggregate(a,b) diff --git a/sbt/src/sbt-test/dependency-management/cache-resolver/changes/use/build.sbt b/sbt/src/sbt-test/dependency-management/cache-resolver/changes/use/build.sbt index b458ab994..943590924 100644 --- a/sbt/src/sbt-test/dependency-management/cache-resolver/changes/use/build.sbt +++ b/sbt/src/sbt-test/dependency-management/cache-resolver/changes/use/build.sbt @@ -4,6 +4,6 @@ organization := "org.example" version := "1.0" -libraryDependencies += "org.example" % "b" % "2.0" +libraryDependencies += "org.example" % "b" % "2.0-SNAPSHOT" ivyPaths <<= ivyPaths in ThisBuild \ No newline at end of file From 56232e20254d542f48558cc199401d14562ba266 Mon Sep 17 00:00:00 2001 From: Josh Suereth Date: Thu, 20 Feb 2014 20:11:17 -0500 Subject: [PATCH 089/148] First attempt at travis-ci integration for sbt. --- .travis.yml | 9 +++++++++ 1 file changed, 9 insertions(+) create mode 100644 .travis.yml diff --git a/.travis.yml b/.travis.yml new file mode 100644 index 000000000..98060f5ae --- /dev/null +++ b/.travis.yml @@ -0,0 +1,9 @@ +language: scala +script: + - sbt launcher/test actions/test api/test apply-macro/test cache/test classfile/test classpath/test collections/test command/test compile/test completion/test contorl/test cross/test incremental-compiler/test io/test ivy/test logic/test main/test main-settings/test persist/test relation/test run/test tasks/test test-agent/test tracking/test testing/test + - sbt scripted +jdk: + - openjdk6 +notifications: + email: + - qbranch@typesafe.com From c09c78a9b5cd48463b4d92cb593b817bf27e8cab Mon Sep 17 00:00:00 2001 From: Josh Suereth Date: Sat, 8 Mar 2014 10:22:03 -0500 Subject: [PATCH 090/148] For now only run integration tests from travis (OoM errors). --- .travis.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 98060f5ae..57126e4b7 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,6 +1,5 @@ language: scala script: - - sbt launcher/test actions/test api/test apply-macro/test cache/test classfile/test classpath/test collections/test command/test compile/test completion/test contorl/test cross/test incremental-compiler/test io/test ivy/test logic/test main/test main-settings/test persist/test relation/test run/test tasks/test test-agent/test tracking/test testing/test - sbt scripted jdk: - openjdk6 From 4a7f5aa92f280c7bd6bc8aeb48785825d01cc477 Mon Sep 17 00:00:00 2001 From: Josh Suereth Date: Sat, 8 Mar 2014 10:50:33 -0500 Subject: [PATCH 091/148] Split scripted tests into matrix so we run on parallel virtual machines. --- .travis.yml | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 57126e4b7..09b114161 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,6 +1,17 @@ language: scala script: - - sbt scripted + - sbt "scripted $SCRIPTED_TEST" +env: + - SCRIPTED_TEST=actions/* + - SCRIPTED_TEST=api/* + - SCRIPTED_TEST=compiler-project/* + - SCRIPTED_TEST=dependency-management/* + - SCRIPTED_TEST=java/* + - SCRIPTED_TEST=package/* + - SCRIPTED_TEST=reporter/* + - SCRIPTED_TEST=run/* + - SCRIPTED_TEST=source-dependencies/* + - SCRIPTED_TEST=tests/* jdk: - openjdk6 notifications: From 875562f3dbfef38fd6b173705bba99b33eb5b3e0 Mon Sep 17 00:00:00 2001 From: Josh Suereth Date: Sat, 8 Mar 2014 00:24:15 -0500 Subject: [PATCH 092/148] Don't fail publishing on overwrite, but issue a warning. Workaround for #1156. * Creates a new FileRepository that will stil allow local files to be transfered if overwrite is true (non-snapshot module), but will issue a warning about deprecated behavior. * Ensure warning is long enough to annoy people into asking what it's about. --- ivy/src/main/scala/sbt/ConvertResolver.scala | 22 ++++++++++++++++++-- 1 file changed, 20 insertions(+), 2 deletions(-) diff --git a/ivy/src/main/scala/sbt/ConvertResolver.scala b/ivy/src/main/scala/sbt/ConvertResolver.scala index 58ba6d4ac..a93a57011 100644 --- a/ivy/src/main/scala/sbt/ConvertResolver.scala +++ b/ivy/src/main/scala/sbt/ConvertResolver.scala @@ -61,7 +61,13 @@ private object ConvertResolver } case repo: FileRepository => { - val resolver = new FileSystemResolver with DescriptorRequired + val resolver = new FileSystemResolver with DescriptorRequired { + // Workaround for #1156 + // Temporarily in sbt 0.13.x we deprecate overwriting + // in local files for non-changing revisions. + // This will be fully enforced in sbt 1.0. + setRepository(new WarnOnOverwriteFileRepo()) + } resolver.setName(repo.name) initializePatterns(resolver, repo.patterns, settings) import repo.configuration.{isLocal, isTransactional} @@ -135,7 +141,7 @@ private object ConvertResolver /** A custom Ivy URLRepository that returns FileResources for file URLs. * This allows using the artifacts from the Maven local repository instead of copying them to the Ivy cache. */ private[this] final class LocalIfFileRepo extends URLRepo { - private[this] val repo = new FileRepo + private[this] val repo = new WarnOnOverwriteFileRepo() override def getResource(source: String) = { val url = new URL(source) if(url.getProtocol == IO.FileScheme) @@ -144,4 +150,16 @@ private object ConvertResolver super.getResource(source) } } + + private[this] final class WarnOnOverwriteFileRepo extends FileRepo() { + override def put(source: java.io.File, destination: String, overwrite: Boolean): Unit = { + try super.put(source, destination, overwrite) + catch { + case e: java.io.IOException if e.getMessage.contains("destination already exists") => + import org.apache.ivy.util.Message + Message.warn(s"Attempting to overwrite $destination\n\tThis usage is deprecated and will be removed in sbt 1.0.") + super.put(source, destination, true) + } + } + } } From dbb47b3ce822ff7ec25881dadd71a3b29e202273 Mon Sep 17 00:00:00 2001 From: Josh Suereth Date: Fri, 7 Mar 2014 13:31:02 -0500 Subject: [PATCH 093/148] Updates `last` and `export` commands to read from correct key. Fixes #1155. It seems that somehow during the 0.13.{1 -> 2 } transition, we stopped pointing at the correct key for TaskKeys (either that or task streams are now all associated with the `streams` key). I think this may have been inadvertently caused from several refactorings to enable greater control over the execution of tasks. This points the `last*` methods at the correct key for tasks, fixing both `last ` and `export ` commands. --- main/src/main/scala/sbt/Output.scala | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/main/src/main/scala/sbt/Output.scala b/main/src/main/scala/sbt/Output.scala index 06b1fda4d..ed3fd3714 100644 --- a/main/src/main/scala/sbt/Output.scala +++ b/main/src/main/scala/sbt/Output.scala @@ -20,7 +20,7 @@ object Output def last(keys: Values[_], streams: Streams, printLines: Seq[String] => Unit)(implicit display: Show[ScopedKey[_]]): Unit = last(keys, streams, printLines, None)(display) - def last(keys: Values[_], streams: Streams, printLines: Seq[String] => Unit, sid: Option[String])(implicit display: Show[ScopedKey[_]]): Unit = + def last(keys: Values[_], streams: Streams, printLines: Seq[String] => Unit, sid: Option[String])(implicit display: Show[ScopedKey[_]]): Unit = printLines( flatLines(lastLines(keys, streams, sid))(idFun) ) def last(file: File, printLines: Seq[String] => Unit, tailDelim: String = DefaultTail): Unit = @@ -55,7 +55,17 @@ object Output @deprecated("Explicitly provide None for the stream ID.", "0.13.0") def lastLines(key: ScopedKey[_], mgr: Streams): Seq[String] = lastLines(key, mgr, None) - def lastLines(key: ScopedKey[_], mgr: Streams, sid: Option[String]): Seq[String] = mgr.use(key) { s => IO.readLines(s.readText( Project.fillTaskAxis(key), sid )) } + def lastLines(key: ScopedKey[_], mgr: Streams, sid: Option[String]): Seq[String] = + mgr.use(key) { s => + // Workaround for #1155 - Keys.streams are always scoped by the task they're included in + // but are keyed by the Keys.streams key. I think this isn't actually a workaround, but + // is how things are expected to work now. + // You can see where streams are injected using their own key scope in + // EvaluateTask.injectStreams. + val streamScopedKey: ScopedKey[_] = ScopedKey(Project.fillTaskAxis(key).scope, Keys.streams.key) + val tmp = s.readText( streamScopedKey, sid ) + IO.readLines(tmp) + } def tailLines(file: File, tailDelim: String): Seq[String] = headLines(IO.readLines(file).reverse, tailDelim).reverse From 563415aa6a91be23f2bc4c5886552f2d8ca5371c Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Fri, 7 Mar 2014 17:48:31 +0100 Subject: [PATCH 094/148] Fix task macro's handling of Symbol owners in .value The qualifier of the `.value` call may contain `DefTree`s (e.g. vals, defs) or `Function` trees. When we snip them out of the tree and graft them into a new context, we must also call `changeOwner`, so that the symbol owner structure and the tree structure are coherent. Failure to do so resulted in a crash in the compiler backend. Fixes #1150 --- .../sbt-test/project/setting-macro/build.sbt | 12 ++++++++++ .../main/scala/sbt/appmacro/ContextUtil.scala | 23 +++++++++++-------- 2 files changed, 25 insertions(+), 10 deletions(-) diff --git a/sbt/src/sbt-test/project/setting-macro/build.sbt b/sbt/src/sbt-test/project/setting-macro/build.sbt index d48af8bc9..22b5e4e3c 100644 --- a/sbt/src/sbt-test/project/setting-macro/build.sbt +++ b/sbt/src/sbt-test/project/setting-macro/build.sbt @@ -15,3 +15,15 @@ demo := { val (n, s) = parser.parsed s * n } + +// Tests for correct Symbol owner structure in the lifted qualifiers of +// the `.value` macro within a task macro. (#1150) +val key1 = taskKey[Unit]("") + +key1 := { + val foo = (sourceDirectory in Compile).apply(base => base).value.get + testFrameworks.value.flatMap(f => + None.map(_ => f) + ) + () +} diff --git a/util/appmacro/src/main/scala/sbt/appmacro/ContextUtil.scala b/util/appmacro/src/main/scala/sbt/appmacro/ContextUtil.scala index c0c849fab..fe1baa696 100644 --- a/util/appmacro/src/main/scala/sbt/appmacro/ContextUtil.scala +++ b/util/appmacro/src/main/scala/sbt/appmacro/ContextUtil.scala @@ -226,17 +226,20 @@ final class ContextUtil[C <: Context](val ctx: C) object appTransformer extends Transformer { override def transform(tree: Tree): Tree = - tree match - { - case ApplyTree(TypeApply(Select(_, nme), targ :: Nil), qual :: Nil) => subWrapper(nme.decoded, targ.tpe, qual, tree) match { - case Converted.Success(t, finalTx) => finalTx(t) - case Converted.Failure(p,m) => ctx.abort(p, m) - case _: Converted.NotApplicable[_] => super.transform(tree) - } + tree match { + case ApplyTree(TypeApply(Select(_, nme), targ :: Nil), qual :: Nil) => + subWrapper(nme.decoded, targ.tpe, qual, tree) match { + case Converted.Success(t, finalTx) => + changeOwner(qual, currentOwner, initialOwner) // Fixes https://github.com/sbt/sbt/issues/1150 + finalTx(t) + case Converted.Failure(p,m) => ctx.abort(p, m) + case _: Converted.NotApplicable[_] => super.transform(tree) + } case _ => super.transform(tree) } } - - appTransformer.transform(t) + appTransformer.atOwner(initialOwner) { + appTransformer.transform(t) + } } -} \ No newline at end of file +} From af7eca108055b5766b2a7cbc354a86de6cac0aaf Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Fri, 7 Feb 2014 18:05:00 +0100 Subject: [PATCH 095/148] Fix NPE in task macro accessing q"{...}".symbol.pos We shouldn't assume that the qualifier of a `Select` is a `SymTree`; it may be a `Block`. One place that happens is after the transformation of named/defaults applications. That causes the reported `NullPointerException'. In any case, using `qual.symbol.pos` sense here; it yields the position of the defintions *referred to* by `qual`, not the position of `qual` itself. Both problems are easily fixed: use `qual.pos` instead. Fixes #1107 --- sbt/src/sbt-test/project/setting-macro/build.sbt | 5 +++++ util/appmacro/src/main/scala/sbt/appmacro/Instance.scala | 2 +- 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/sbt/src/sbt-test/project/setting-macro/build.sbt b/sbt/src/sbt-test/project/setting-macro/build.sbt index 22b5e4e3c..c07c3c0ad 100644 --- a/sbt/src/sbt-test/project/setting-macro/build.sbt +++ b/sbt/src/sbt-test/project/setting-macro/build.sbt @@ -27,3 +27,8 @@ key1 := { ) () } + +// https://github.com/sbt/sbt/issues/1107 +def appcfgTask(a: String, b: String) = Def.task("") + +TaskKey[Unit]("test") := appcfgTask(b = "", a = "").value diff --git a/util/appmacro/src/main/scala/sbt/appmacro/Instance.scala b/util/appmacro/src/main/scala/sbt/appmacro/Instance.scala index 0de166b67..043ad8731 100644 --- a/util/appmacro/src/main/scala/sbt/appmacro/Instance.scala +++ b/util/appmacro/src/main/scala/sbt/appmacro/Instance.scala @@ -167,7 +167,7 @@ object Instance def addType(tpe: Type, qual: Tree, selection: Tree): Tree = { qual.foreach(checkQual) - val vd = util.freshValDef(tpe, qual.symbol.pos, functionSym) + val vd = util.freshValDef(tpe, qual.pos, functionSym) inputs ::= new Input(tpe, qual, vd) util.refVal(selection, vd) } From 7f8d21c2f1a54d9180eecc82baaacedbfd7b7790 Mon Sep 17 00:00:00 2001 From: Josh Suereth Date: Wed, 5 Mar 2014 17:56:34 -0500 Subject: [PATCH 096/148] Remove Natures from AutoPlugins feature. * remove the notion of Natures from Autoplugins. * Update tests to use AutoPlugins with no selection for inclusion. * Rename exisitng Natures code to Plugins/PluginsDebug. --- main/src/main/scala/sbt/BuildStructure.scala | 6 +- main/src/main/scala/sbt/Load.scala | 2 +- main/src/main/scala/sbt/Main.scala | 6 +- .../sbt/{Natures.scala => Plugins.scala} | 95 ++++++++-------- ...{NaturesDebug.scala => PluginsDebug.scala} | 102 +++++++++--------- main/src/main/scala/sbt/Project.scala | 50 ++++----- .../sbt-test/project/auto-plugins/build.sbt | 6 +- .../project/auto-plugins/project/Q.scala | 13 ++- .../binary-plugin/changes/define/A.scala | 6 +- 9 files changed, 140 insertions(+), 146 deletions(-) rename main/src/main/scala/sbt/{Natures.scala => Plugins.scala} (69%) rename main/src/main/scala/sbt/{NaturesDebug.scala => PluginsDebug.scala} (82%) diff --git a/main/src/main/scala/sbt/BuildStructure.scala b/main/src/main/scala/sbt/BuildStructure.scala index 1fddbf2a0..d63752d87 100644 --- a/main/src/main/scala/sbt/BuildStructure.scala +++ b/main/src/main/scala/sbt/BuildStructure.scala @@ -71,7 +71,7 @@ final class LoadedBuildUnit(val unit: BuildUnit, val defined: Map[String, Resolv * In addition to auto-discovered [[Build]]s, this includes any auto-generated default [[Build]]s. * @param projects The list of all [[Project]]s from all [[Build]]s. * These projects have not yet been resolved, but they have had auto-plugins applied. -* In particular, each [[Project]]'s `autoPlugins` field is populated according to their configured `natures` +* In particular, each [[Project]]'s `autoPlugins` field is populated according to their configured `plugins` * and their `settings` and `configurations` updated as appropriate. * @param buildNames No longer used and will be deprecated once feasible. */ @@ -99,8 +99,8 @@ final class DetectedPlugins(val plugins: DetectedModules[Plugin], val autoImport /** Sequence of import expressions for the build definition. This includes the names of the [[Plugin]], [[Build]], and [[AutoImport]] modules, but not the [[AutoPlugin]] modules. */ lazy val imports: Seq[String] = BuildUtil.getImports(plugins.names ++ builds.names ++ autoImports.names) - /** A function to select the right [[AutoPlugin]]s from [[autoPlugins]] given the defined [[Natures]] for a [[Project]]. */ - lazy val compileNatures: Natures => Seq[AutoPlugin] = Natures.compile(autoPlugins.values.toList) + /** A function to select the right [[AutoPlugin]]s from [[autoPlugins]] for a [[Project]]. */ + lazy val compilePlugins: Plugins => Seq[AutoPlugin] = Plugins.compile(autoPlugins.values.toList) } /** The built and loaded build definition project. diff --git a/main/src/main/scala/sbt/Load.scala b/main/src/main/scala/sbt/Load.scala index 2a00e7329..d6c9eff85 100755 --- a/main/src/main/scala/sbt/Load.scala +++ b/main/src/main/scala/sbt/Load.scala @@ -463,7 +463,7 @@ object Load loadSettings(auto, base, plugins, eval, injectSettings, memoSettings, autoPlugins) def loadForProjects = newProjects map { project => val autoPlugins = - try plugins.detected.compileNatures(project.natures) + try plugins.detected.compilePlugins(project.plugins) catch { case e: AutoPluginException => throw translateAutoPluginException(e, project) } val autoConfigs = autoPlugins.flatMap(_.projectConfigurations) val loadedSbtFiles = loadSbtFiles(project.auto, project.base, autoPlugins) diff --git a/main/src/main/scala/sbt/Main.scala b/main/src/main/scala/sbt/Main.scala index ad5291ec2..c44bd5a1b 100644 --- a/main/src/main/scala/sbt/Main.scala +++ b/main/src/main/scala/sbt/Main.scala @@ -376,16 +376,16 @@ object BuiltinCommands else Help.empty def plugins = Command.command(PluginsCommand, pluginsBrief, pluginsDetailed) { s => - val helpString = NaturesDebug.helpAll(s) + val helpString = PluginsDebug.helpAll(s) System.out.println(helpString) s } val pluginParser: State => Parser[AutoPlugin] = s => { - val autoPlugins: Map[String, AutoPlugin] = NaturesDebug.autoPluginMap(s) + val autoPlugins: Map[String, AutoPlugin] = PluginsDebug.autoPluginMap(s) token(Space) ~> Act.knownIDParser(autoPlugins, "plugin") } def plugin = Command(PluginCommand)(pluginParser) { (s, plugin) => - val helpString = NaturesDebug.help(plugin, s) + val helpString = PluginsDebug.help(plugin, s) System.out.println(helpString) s } diff --git a/main/src/main/scala/sbt/Natures.scala b/main/src/main/scala/sbt/Plugins.scala similarity index 69% rename from main/src/main/scala/sbt/Natures.scala rename to main/src/main/scala/sbt/Plugins.scala index 06b0a0e2f..5e814d082 100644 --- a/main/src/main/scala/sbt/Natures.scala +++ b/main/src/main/scala/sbt/Plugins.scala @@ -8,7 +8,7 @@ TODO: import logic.{Atom, Clause, Clauses, Formula, Literal, Logic, Negated} import Logic.{CyclicNegation, InitialContradictions, InitialOverlap, LogicException} import Def.Setting - import Natures._ + import Plugins._ /** Marks a top-level object so that sbt will wildcard import it for .sbt files, `consoleProject`, and `set`. */ trait AutoImport @@ -18,11 +18,11 @@ An AutoPlugin defines a group of settings and the conditions where the settings The `select` method defines the conditions and a method like `projectSettings` defines the settings to add. Steps for plugin authors: -1. Determine the [[Nature]]s that, when present (or absent), activate the AutoPlugin. +1. Determine the [[AutoPlugins]]s that, when present (or absent), activate the AutoPlugin. 2. Determine the settings/configurations to automatically inject when activated. For example, the following will automatically add the settings in `projectSettings` - to a project that has both the `Web` and `Javascript` natures enabled. + to a project that has both the `Web` and `Javascript` plugins enabled. object MyPlugin extends AutoPlugin { def select = Web && Javascript @@ -30,28 +30,28 @@ For example, the following will automatically add the settings in `projectSettin } Steps for users: -1. Add dependencies on plugins as usual with addSbtPlugin -2. Add Natures to Projects, which will automatically select the plugin settings to add for those Projects. +1. Add dependencies on plugins in `project/plugins.sbt` as usual with `addSbtPlugin` +2. Add key plugins to Projects, which will automatically select the plugin + dependent plugin settings to add for those Projects. 3. Exclude plugins, if desired. -For example, given natures Web and Javascript (perhaps provided by plugins added with addSbtPlugin), +For example, given plugins Web and Javascript (perhaps provided by plugins added with addSbtPlugin), - .natures( Web && Javascript ) + .plugins( Web && Javascript ) will activate `MyPlugin` defined above and have its settings automatically added. If the user instead defines - .natures( Web && Javascript && !MyPlugin) + .plugins( Web && Javascript && !MyPlugin) then the `MyPlugin` settings (and anything that activates only when `MyPlugin` is activated) will not be added. */ -abstract class AutoPlugin extends Natures.Basic +abstract class AutoPlugin extends Plugins.Basic { - /** This AutoPlugin will be activated for a project when the [[Natures]] matcher returned by this method matches that project's natures - * AND the user does not explicitly exclude the Nature returned by `provides`. + /** This AutoPlugin will be activated for a project when the [[Plugins]] matcher returned by this method matches that project's plugins + * AND the user does not explicitly exclude the Plugin returned by `provides`. * * For example, if this method returns `Web && Javascript`, this plugin instance will only be added - * if the `Web` and `Javascript` natures are enabled. */ - def select: Natures + * if the `Web` and `Javascript` plugins are enabled. */ + def select: Plugins val label: String = getClass.getName.stripSuffix("$") @@ -84,26 +84,19 @@ final class AutoPluginException private(val message: String, val origin: Option[ object AutoPluginException { def apply(msg: String): AutoPluginException = new AutoPluginException(msg, None) - def apply(origin: LogicException): AutoPluginException = new AutoPluginException(Natures.translateMessage(origin), Some(origin)) + def apply(origin: LogicException): AutoPluginException = new AutoPluginException(Plugins.translateMessage(origin), Some(origin)) } -/** An expression that matches `Nature`s. */ -sealed trait Natures { - def && (o: Basic): Natures +/** An expression that matches `AutoPlugin`s. */ +sealed trait Plugins { + def && (o: Basic): Plugins } -/** Represents a feature or conceptual group of settings. -* `label` is the unique ID for this nature. */ -final case class Nature(label: String) extends Basic { - /** Constructs a Natures matcher that excludes this Nature. */ - override def toString = label -} - -object Natures +object Plugins { - /** Given the available auto plugins `defined`, returns a function that selects [[AutoPlugin]]s for the provided [[Nature]]s. + /** Given the available auto plugins `defined`, returns a function that selects [[AutoPlugin]]s for the provided [[AutoPlugin]]s. * The [[AutoPlugin]]s are topologically sorted so that a selected [[AutoPlugin]] comes before its selecting [[AutoPlugin]].*/ - def compile(defined: List[AutoPlugin]): Natures => Seq[AutoPlugin] = + def compile(defined: List[AutoPlugin]): Plugins => Seq[AutoPlugin] = if(defined.isEmpty) Types.const(Nil) else @@ -112,8 +105,8 @@ object Natures val byAtomMap = byAtom.toMap if(byAtom.size != byAtomMap.size) duplicateProvidesError(byAtom) val clauses = Clauses( defined.map(d => asClause(d)) ) - requestedNatures => - Logic.reduce(clauses, flattenConvert(requestedNatures).toSet) match { + requestedPlugins => + Logic.reduce(clauses, flattenConvert(requestedPlugins).toSet) match { case Left(problem) => throw AutoPluginException(problem) case Right(results) => // results includes the originally requested (positive) atoms, @@ -123,8 +116,8 @@ object Natures } private[sbt] def translateMessage(e: LogicException) = e match { - case ic: InitialContradictions => s"Contradiction in selected natures. These natures were both included and excluded: ${literalsString(ic.literals.toSeq)}" - case io: InitialOverlap => s"Cannot directly enable plugins. Plugins are enabled when their required natures are satisifed. The directly selected plugins were: ${literalsString(io.literals.toSeq)}" + case ic: InitialContradictions => s"Contradiction in selected plugins. These plguins were both included and excluded: ${literalsString(ic.literals.toSeq)}" + case io: InitialOverlap => s"Cannot directly enable plugins. Plugins are enabled when their required plugins are satisifed. The directly selected plugins were: ${literalsString(io.literals.toSeq)}" case cn: CyclicNegation => s"Cycles in plugin requirements cannot involve excludes. The problematic cycle is: ${literalsString(cn.cycle)}" } private[this] def literalsString(lits: Seq[Literal]): String = @@ -135,34 +128,36 @@ object Natures val dupStrings = for( (atom, dups) <- dupsByAtom if dups.size > 1 ) yield s"${atom.label} by ${dups.mkString(", ")}" val (ns, nl) = if(dupStrings.size > 1) ("s", "\n\t") else ("", " ") - val message = s"Nature$ns provided by multiple AutoPlugins:$nl${dupStrings.mkString(nl)}" + val message = s"Plugin$ns provided by multiple AutoPlugins:$nl${dupStrings.mkString(nl)}" throw AutoPluginException(message) } - /** [[Natures]] instance that doesn't require any [[Nature]]s. */ - def empty: Natures = Empty - private[sbt] final object Empty extends Natures { - def &&(o: Basic): Natures = o + /** [[Plugins]] instance that doesn't require any [[Plugins]]s. */ + def empty: Plugins = Empty + private[sbt] final object Empty extends Plugins { + def &&(o: Basic): Plugins = o override def toString = "" } - /** An included or excluded Nature. TODO: better name than Basic. */ - sealed abstract class Basic extends Natures { - def &&(o: Basic): Natures = And(this :: o :: Nil) + /** An included or excluded Nature/Plugin. TODO: better name than Basic. Also, can we dump + * this class. + */ + sealed abstract class Basic extends Plugins { + def &&(o: Basic): Plugins = And(this :: o :: Nil) } private[sbt] final case class Exclude(n: AutoPlugin) extends Basic { override def toString = s"!$n" } - private[sbt] final case class And(natures: List[Basic]) extends Natures { - def &&(o: Basic): Natures = And(o :: natures) - override def toString = natures.mkString(", ") + private[sbt] final case class And(plugins: List[Basic]) extends Plugins { + def &&(o: Basic): Plugins = And(o :: plugins) + override def toString = plugins.mkString(", ") } - private[sbt] def and(a: Natures, b: Natures) = b match { + private[sbt] def and(a: Plugins, b: Plugins) = b match { case Empty => a case And(ns) => (a /: ns)(_ && _) case b: Basic => a && b } - private[sbt] def remove(a: Natures, del: Set[Basic]): Natures = a match { + private[sbt] def remove(a: Plugins, del: Set[Basic]): Plugins = a match { case b: Basic => if(del(b)) Empty else b case Empty => Empty case And(ns) => @@ -170,38 +165,36 @@ object Natures if(removed.isEmpty) Empty else And(removed) } - /** Defines a clause for `ap` such that the [[Nature]] provided by `ap` is the head and the selector for `ap` is the body. */ + /** Defines a clause for `ap` such that the [[AutPlugin]] provided by `ap` is the head and the selector for `ap` is the body. */ private[sbt] def asClause(ap: AutoPlugin): Clause = Clause( convert(ap.select), Set(Atom(ap.label)) ) - private[this] def flattenConvert(n: Natures): Seq[Literal] = n match { + private[this] def flattenConvert(n: Plugins): Seq[Literal] = n match { case And(ns) => convertAll(ns) case b: Basic => convertBasic(b) :: Nil case Empty => Nil } - private[sbt] def flatten(n: Natures): Seq[Basic] = n match { + private[sbt] def flatten(n: Plugins): Seq[Basic] = n match { case And(ns) => ns case b: Basic => b :: Nil case Empty => Nil } - private[this] def convert(n: Natures): Formula = n match { + private[this] def convert(n: Plugins): Formula = n match { case And(ns) => convertAll(ns).reduce[Formula](_ && _) case b: Basic => convertBasic(b) case Empty => Formula.True } private[this] def convertBasic(b: Basic): Literal = b match { case Exclude(n) => !convertBasic(n) - case Nature(s) => Atom(s) case a: AutoPlugin => Atom(a.label) } private[this] def convertAll(ns: Seq[Basic]): Seq[Literal] = ns map convertBasic /** True if the select clause `n` is satisifed by `model`. */ - def satisfied(n: Natures, model: Set[AutoPlugin], natures: Set[Nature]): Boolean = + def satisfied(n: Plugins, model: Set[AutoPlugin]): Boolean = flatten(n) forall { case Exclude(a) => !model(a) - case n: Nature => natures(n) case ap: AutoPlugin => model(ap) } } \ No newline at end of file diff --git a/main/src/main/scala/sbt/NaturesDebug.scala b/main/src/main/scala/sbt/PluginsDebug.scala similarity index 82% rename from main/src/main/scala/sbt/NaturesDebug.scala rename to main/src/main/scala/sbt/PluginsDebug.scala index d0e27a9dd..e130b2c8b 100644 --- a/main/src/main/scala/sbt/NaturesDebug.scala +++ b/main/src/main/scala/sbt/PluginsDebug.scala @@ -1,11 +1,11 @@ package sbt import Def.Setting - import Natures._ - import NaturesDebug._ + import Plugins._ + import PluginsDebug._ import java.net.URI -private[sbt] class NaturesDebug(val available: List[AutoPlugin], val nameToKey: Map[String, AttributeKey[_]], val provided: Relation[AutoPlugin, AttributeKey[_]]) +private[sbt] class PluginsDebug(val available: List[AutoPlugin], val nameToKey: Map[String, AttributeKey[_]], val provided: Relation[AutoPlugin, AttributeKey[_]]) { /** The set of [[AutoPlugin]]s that might define a key named `keyName`. * Because plugins can define keys in different scopes, this should only be used as a guideline. */ @@ -79,7 +79,7 @@ private[sbt] class NaturesDebug(val available: List[AutoPlugin], val nameToKey: private[this] def multi(strs: Seq[String]): String = strs.mkString(if(strs.size > 4) "\n\t" else ", ") } -private[sbt] object NaturesDebug +private[sbt] object PluginsDebug { def helpAll(s: State): String = if(Project.isProjectLoaded(s)) @@ -118,8 +118,8 @@ private[sbt] object NaturesDebug def projectForRef(ref: ProjectRef): ResolvedProject = get(Keys.thisProject in ref) val perBuild: Map[URI, Set[AutoPlugin]] = structure.units.mapValues(unit => availableAutoPlugins(unit).toSet) val pluginsThisBuild = perBuild.getOrElse(currentRef.build, Set.empty).toList - lazy val context = Context(currentProject.natures, currentProject.autoPlugins, Natures.compile(pluginsThisBuild), pluginsThisBuild) - lazy val debug = NaturesDebug(context.available) + lazy val context = Context(currentProject.plugins, currentProject.autoPlugins, Plugins.compile(pluginsThisBuild), pluginsThisBuild) + lazy val debug = PluginsDebug(context.available) if(!pluginsThisBuild.contains(plugin)) { val availableInBuilds: List[URI] = perBuild.toList.filter(_._2(plugin)).map(_._1) s"Plugin ${plugin.label} is only available in builds:\n\t${availableInBuilds.mkString("\n\t")}\nSwitch to a project in one of those builds using `project` and rerun this command for more information." @@ -141,20 +141,20 @@ private[sbt] object NaturesDebug } } - /** Precomputes information for debugging natures and plugins. */ - def apply(available: List[AutoPlugin]): NaturesDebug = + /** Precomputes information for debugging plugins. */ + def apply(available: List[AutoPlugin]): PluginsDebug = { val keyR = definedKeys(available) val nameToKey: Map[String, AttributeKey[_]] = keyR._2s.toList.map(key => (key.label, key)).toMap - new NaturesDebug(available, nameToKey, keyR) + new PluginsDebug(available, nameToKey, keyR) } /** The context for debugging a plugin (de)activation. - * @param initial The initially defined [[Nature]]s. + * @param initial The initially defined [[AutoPlugin]]s. * @param enabled The resulting model. * @param compile The function used to compute the model. * @param available All [[AutoPlugin]]s available for consideration. */ - final case class Context(initial: Natures, enabled: Seq[AutoPlugin], compile: Natures => Seq[AutoPlugin], available: List[AutoPlugin]) + final case class Context(initial: Plugins, enabled: Seq[AutoPlugin], compile: Plugins => Seq[AutoPlugin], available: List[AutoPlugin]) /** Describes the steps to activate a plugin in some context. */ sealed abstract class PluginEnable @@ -165,19 +165,19 @@ private[sbt] object NaturesDebug final case class PluginImpossible(plugin: AutoPlugin, context: Context, contradictions: Set[AutoPlugin]) extends EnableDeactivated /** Describes the requirements for activating [[plugin]] in [[context]]. - * @param context The base natures, exclusions, and ultimately activated plugins + * @param context The base plguins, exclusions, and ultimately activated plugins * @param blockingExcludes Existing exclusions that prevent [[plugin]] from being activated and must be dropped - * @param enablingNatures [[Nature]]s that are not currently enabled, but need to be enabled for [[plugin]] to activate + * @param enablingPlguins [[AutoPlugin]]s that are not currently enabled, but need to be enabled for [[plugin]] to activate * @param extraEnabledPlugins Plugins that will be enabled as a result of [[plugin]] activating, but are not required for [[plugin]] to activate * @param willRemove Plugins that will be deactivated as a result of [[plugin]] activating - * @param deactivate Describes plugins that must be deactivated for [[plugin]] to activate. These require an explicit exclusion or dropping a transitive [[Nature]].*/ - final case class PluginRequirements(plugin: AutoPlugin, context: Context, blockingExcludes: Set[AutoPlugin], enablingNatures: Set[Nature], extraEnabledPlugins: Set[AutoPlugin], willRemove: Set[AutoPlugin], deactivate: List[DeactivatePlugin]) extends EnableDeactivated + * @param deactivate Describes plugins that must be deactivated for [[plugin]] to activate. These require an explicit exclusion or dropping a transitive [[AutoPlugin]].*/ + final case class PluginRequirements(plugin: AutoPlugin, context: Context, blockingExcludes: Set[AutoPlugin], enablingPlugins: Set[AutoPlugin], extraEnabledPlugins: Set[AutoPlugin], willRemove: Set[AutoPlugin], deactivate: List[DeactivatePlugin]) extends EnableDeactivated /** Describes a [[plugin]] that must be removed in order to activate another plugin in some context. * The [[plugin]] can always be directly, explicitly excluded. - * @param removeOneOf If non-empty, removing one of these [[Nature]]s will deactivate [[plugin]] without affecting the other plugin. If empty, a direct exclusion is required. + * @param removeOneOf If non-empty, removing one of these [[AutoPlugin]]s will deactivate [[plugin]] without affecting the other plugin. If empty, a direct exclusion is required. * @param newlySelected If false, this plugin was selected in the original context. */ - final case class DeactivatePlugin(plugin: AutoPlugin, removeOneOf: Set[Nature], newlySelected: Boolean) + final case class DeactivatePlugin(plugin: AutoPlugin, removeOneOf: Set[AutoPlugin], newlySelected: Boolean) /** Determines how to enable [[plugin]] in [[context]]. */ def pluginEnable(context: Context, plugin: AutoPlugin): PluginEnable = @@ -191,7 +191,7 @@ private[sbt] object NaturesDebug // deconstruct the context val initialModel = context.enabled.toSet val initial = flatten(context.initial) - val initialNatures = natures(initial) + val initialPlugins = plugins(initial) val initialExcludes = excludes(initial) val minModel = minimalModel(plugin) @@ -212,13 +212,9 @@ private[sbt] object NaturesDebug propose: B, exclude C */ - // `plugin` will only be activated when all of these natures are activated - // Deactivating any one of these would deactivate `plugin`. - val minRequiredNatures = natures(minModel) - // `plugin` will only be activated when all of these plugins are activated // Deactivating any one of these would deactivate `plugin`. - val minRequiredPlugins = minModel.collect{ case a: AutoPlugin => a }.toSet + val minRequiredPlugins = plugins(minModel) // The presence of any one of these plugins would deactivate `plugin` val minAbsentPlugins = excludes(minModel).toSet @@ -231,21 +227,21 @@ private[sbt] object NaturesDebug PluginImpossible(plugin, context, contradictions) else { - // Natures that the user has to add to the currently selected natures in order to enable `plugin`. - val addToExistingNatures = minRequiredNatures -- initialNatures + // Plguins that the user has to add to the currently selected plugins in order to enable `plugin`. + val addToExistingPlugins = minRequiredPlugins -- initialPlugins // Plugins that are currently excluded that need to be allowed. val blockingExcludes = initialExcludes & minRequiredPlugins - // The model that results when the minimal natures are enabled and the minimal plugins are excluded. - // This can include more plugins than just `minRequiredPlugins` because the natures required for `plugin` + // The model that results when the minimal plugins are enabled and the minimal plugins are excluded. + // This can include more plugins than just `minRequiredPlugins` because the plguins required for `plugin` // might activate other plugins as well. - val modelForMin = context.compile(and(includeAll(minRequiredNatures), excludeAll(minAbsentPlugins))) + val modelForMin = context.compile(and(includeAll(minRequiredPlugins), excludeAll(minAbsentPlugins))) - val incrementalInputs = and( includeAll(minRequiredNatures ++ initialNatures), excludeAll(minAbsentPlugins ++ initialExcludes -- minRequiredPlugins)) + val incrementalInputs = and( includeAll(minRequiredPlugins ++ initialPlugins), excludeAll(minAbsentPlugins ++ initialExcludes -- minRequiredPlugins)) val incrementalModel = context.compile(incrementalInputs).toSet - // Plugins that are newly enabled as a result of selecting the natures needed for `plugin`, but aren't strictly required for `plugin`. + // Plugins that are newly enabled as a result of selecting the plugins needed for `plugin`, but aren't strictly required for `plugin`. // These could be excluded and `plugin` and the user's current plugins would still be activated. val extraPlugins = incrementalModel.toSet -- minRequiredPlugins -- initialModel @@ -254,48 +250,48 @@ private[sbt] object NaturesDebug // Determine the plugins that must be independently deactivated. // If both A and B must be deactivated, but A transitively depends on B, deactivating B will deactivate A. - // If A must be deactivated, but one if its (transitively) required natures isn't present, it won't be activated. + // If A must be deactivated, but one if its (transitively) required plugins isn't present, it won't be activated. // So, in either of these cases, A doesn't need to be considered further and won't be included in this set. - val minDeactivate = minAbsentPlugins.filter(p => Natures.satisfied(p.select, incrementalModel, natures(flatten(incrementalInputs)))) + val minDeactivate = minAbsentPlugins.filter(p => Plugins.satisfied(p.select, incrementalModel)) val deactivate = for(d <- minDeactivate.toList) yield { - // removing any one of these natures will deactivate `d`. TODO: This is not an especially efficient implementation. - val removeToDeactivate = natures(minimalModel(d)) -- minRequiredNatures + // removing any one of these plugins will deactivate `d`. TODO: This is not an especially efficient implementation. + val removeToDeactivate = plugins(minimalModel(d)) -- minRequiredPlugins val newlySelected = !initialModel(d) - // a. suggest removing a nature in removeOneToDeactivate to deactivate d + // a. suggest removing a plugin in removeOneToDeactivate to deactivate d // b. suggest excluding `d` to directly deactivate it in any case // c. note whether d was already activated (in context.enabled) or is newly selected DeactivatePlugin(d, removeToDeactivate, newlySelected) } - PluginRequirements(plugin, context, blockingExcludes, addToExistingNatures, extraPlugins, willRemove, deactivate) + PluginRequirements(plugin, context, blockingExcludes, addToExistingPlugins, extraPlugins, willRemove, deactivate) } } - private[this] def includeAll[T <: Basic](basic: Set[T]): Natures = And(basic.toList) - private[this] def excludeAll(plugins: Set[AutoPlugin]): Natures = And(plugins map (p => Exclude(p)) toList) + private[this] def includeAll[T <: Basic](basic: Set[T]): Plugins = And(basic.toList) + private[this] def excludeAll(plugins: Set[AutoPlugin]): Plugins = And(plugins map (p => Exclude(p)) toList) private[this] def excludes(bs: Seq[Basic]): Set[AutoPlugin] = bs.collect { case Exclude(b) => b }.toSet - private[this] def natures(bs: Seq[Basic]): Set[Nature] = bs.collect { case n: Nature => n }.toSet + private[this] def plugins(bs: Seq[Basic]): Set[AutoPlugin] = bs.collect { case n: AutoPlugin => n }.toSet // If there is a model that includes `plugin`, it includes at least what is returned by this method. - // This is the list of natures and plugins that must be included as well as list of plugins that must not be present. + // This is the list of plugins that must be included as well as list of plugins that must not be present. // It might not be valid, such as if there are contradictions or if there are cycles that are unsatisfiable. - // The actual model might be larger, since other plugins might be enabled by the selected natures. + // The actual model might be larger, since other plugins might be enabled by the selected plugins. private[this] def minimalModel(plugin: AutoPlugin): Seq[Basic] = Dag.topologicalSortUnchecked(plugin: Basic) { - case _: Exclude | _: Nature => Nil - case ap: AutoPlugin => Natures.flatten(ap.select) + case _: Exclude => Nil + case ap: AutoPlugin => Plugins.flatten(ap.select) } /** String representation of [[PluginEnable]], intended for end users. */ def explainPluginEnable(ps: PluginEnable): String = ps match { - case PluginRequirements(plugin, context, blockingExcludes, enablingNatures, extraEnabledPlugins, toBeRemoved, deactivate) => + case PluginRequirements(plugin, context, blockingExcludes, enablingPlugins, extraEnabledPlugins, toBeRemoved, deactivate) => def indent(str: String) = if(str.isEmpty) "" else s"\t$str" def note(str: String) = if(str.isEmpty) "" else s"Note: $str" val parts = indent(excludedError(false /* TODO */, blockingExcludes.toList)) :: - indent(required(enablingNatures.toList)) :: + indent(required(enablingPlugins.toList)) :: indent(needToDeactivate(deactivate)) :: note(willAdd(plugin, extraEnabledPlugins.toList)) :: note(willRemove(plugin, toBeRemoved.toList)) :: @@ -326,13 +322,13 @@ private[sbt] object NaturesDebug private[this] def transitiveString(transitive: Boolean) = if(transitive) "(transitive) " else "" - private[this] def required(natures: List[Nature]): String = - str(natures)(requiredNature, requiredNatures) + private[this] def required(plugins: List[AutoPlugin]): String = + str(plugins)(requiredPlugin, requiredPlugins) - private[this] def requiredNature(nature: Nature) = - s"Required nature ${nature.label} not present." - private[this] def requiredNatures(natures: List[Nature]) = - s"Required natures not present:\n\t${natures.map(_.label).mkString("\n\t")}" + private[this] def requiredPlugin(plugin: AutoPlugin) = + s"Required plugin ${plugin.label} not present." + private[this] def requiredPlugins(plugins: List[AutoPlugin]) = + s"Required plugins not present:\n\t${plugins.map(_.label).mkString("\n\t")}" private[this] def str[A](list: List[A])(f: A => String, fs: List[A] => String): String = list match { case Nil => "" @@ -367,13 +363,13 @@ private[sbt] object NaturesDebug s"Need to deactivate ${deactivateString(deactivate)}" private[this] def deactivateString(d: DeactivatePlugin): String = { - val removeNaturesString: String = + val removePluginsString: String = d.removeOneOf.toList match { case Nil => "" case x :: Nil => s" or no longer include $x" case xs => s" or remove one of ${xs.mkString(", ")}" } - s"${d.plugin.label}: directly exclude it${removeNaturesString}" + s"${d.plugin.label}: directly exclude it${removePluginsString}" } private[this] def pluginImpossible(plugin: AutoPlugin, contradictions: Set[AutoPlugin]): String = diff --git a/main/src/main/scala/sbt/Project.scala b/main/src/main/scala/sbt/Project.scala index 647013bed..a44598e94 100755 --- a/main/src/main/scala/sbt/Project.scala +++ b/main/src/main/scala/sbt/Project.scala @@ -50,9 +50,9 @@ sealed trait ProjectDefinition[PR <: ProjectReference] /** Configures the sources of automatically appended settings.*/ def auto: AddSettings - /** The [[Natures]] associated with this project. - A [[Nature]] is a common label that is used by plugins to determine what settings, if any, to add to a project. */ - def natures: Natures + /** The defined [[Plugins]] associated with this project. + A [[AutoPlguin]] is a common label that is used by plugins to determine what settings, if any, to add to a project. */ + def plugins: Plugins /** The [[AutoPlugin]]s enabled for this project. This value is only available on a loaded Project. */ private[sbt] def autoPlugins: Seq[AutoPlugin] @@ -68,18 +68,18 @@ sealed trait ProjectDefinition[PR <: ProjectReference] val dep = ifNonEmpty("dependencies", dependencies) val conf = ifNonEmpty("configurations", configurations) val autos = ifNonEmpty("autoPlugins", autoPlugins.map(_.label)) - val fields = s"id $id" :: s"base: $base" :: agg ::: dep ::: conf ::: (s"natures: List($natures)" :: autos) + val fields = s"id $id" :: s"base: $base" :: agg ::: dep ::: conf ::: (s"plugins: List($plugins)" :: autos) s"Project(${fields.mkString(", ")})" } private[this] def ifNonEmpty[T](label: String, ts: Iterable[T]): List[String] = if(ts.isEmpty) Nil else s"$label: $ts" :: Nil } sealed trait Project extends ProjectDefinition[ProjectReference] { - // TODO: add parameters for natures and autoPlugins in 0.14.0 (not reasonable to do in a binary compatible way in 0.13) + // TODO: add parameters for plugins in 0.14.0 (not reasonable to do in a binary compatible way in 0.13) def copy(id: String = id, base: File = base, aggregate: => Seq[ProjectReference] = aggregate, dependencies: => Seq[ClasspathDep[ProjectReference]] = dependencies, delegates: => Seq[ProjectReference] = delegates, settings: => Seq[Setting[_]] = settings, configurations: Seq[Configuration] = configurations, auto: AddSettings = auto): Project = - unresolved(id, base, aggregate = aggregate, dependencies = dependencies, delegates = delegates, settings, configurations, auto, natures, autoPlugins) + unresolved(id, base, aggregate = aggregate, dependencies = dependencies, delegates = delegates, settings, configurations, auto, plugins, autoPlugins) def resolve(resolveRef: ProjectReference => ProjectRef): ResolvedProject = { @@ -87,7 +87,7 @@ sealed trait Project extends ProjectDefinition[ProjectReference] def resolveDeps(ds: Seq[ClasspathDep[ProjectReference]]) = ds map resolveDep def resolveDep(d: ClasspathDep[ProjectReference]) = ResolvedClasspathDependency(resolveRef(d.project), d.configuration) resolved(id, base, aggregate = resolveRefs(aggregate), dependencies = resolveDeps(dependencies), delegates = resolveRefs(delegates), - settings, configurations, auto, natures, autoPlugins) + settings, configurations, auto, plugins, autoPlugins) } def resolveBuild(resolveRef: ProjectReference => ProjectReference): Project = { @@ -95,7 +95,7 @@ sealed trait Project extends ProjectDefinition[ProjectReference] def resolveDeps(ds: Seq[ClasspathDep[ProjectReference]]) = ds map resolveDep def resolveDep(d: ClasspathDep[ProjectReference]) = ClasspathDependency(resolveRef(d.project), d.configuration) unresolved(id, base, aggregate = resolveRefs(aggregate), dependencies = resolveDeps(dependencies), delegates = resolveRefs(delegates), - settings, configurations, auto, natures, autoPlugins) + settings, configurations, auto, plugins, autoPlugins) } /** Applies the given functions to this Project. @@ -136,27 +136,27 @@ sealed trait Project extends ProjectDefinition[ProjectReference] * Any configured .sbt files are removed from this project's list.*/ def setSbtFiles(files: File*): Project = copy(auto = AddSettings.append( AddSettings.clearSbtFiles(auto), AddSettings.sbtFiles(files: _*)) ) - /** Sets the [[Nature]]s of this project. - A [[Nature]] is a common label that is used by plugins to determine what settings, if any, to add to a project. */ - def addNatures(ns: Nature*): Project = setNatures(Natures.and(natures, Natures.And(ns.toList))) + /** Sets the [[AutoPlugin]]s of this project. + A [[AutoPlugin]] is a common label that is used by plugins to determine what settings, if any, to add to a project. */ + def addPlugins(ns: AutoPlugin*): Project = setPlugins(Plugins.and(plugins, Plugins.And(ns.toList))) /** Disable the given plugins on this project. */ - def disablePlugins(plugins: AutoPlugin*): Project = - setNatures(Natures.and(natures, Natures.And(plugins.map(p => Natures.Exclude(p)).toList))) + def disablePlugins(ps: AutoPlugin*): Project = + setPlugins(Plugins.and(plugins, Plugins.And(ps.map(p => Plugins.Exclude(p)).toList))) - private[this] def setNatures(ns: Natures): Project = { - // TODO: for 0.14.0, use copy when it has the additional `natures` parameter + private[this] def setPlugins(ns: Plugins): Project = { + // TODO: for 0.14.0, use copy when it has the additional `plugins` parameter unresolved(id, base, aggregate = aggregate, dependencies = dependencies, delegates = delegates, settings, configurations, auto, ns, autoPlugins) } /** Definitively set the [[AutoPlugin]]s for this project. */ private[sbt] def setAutoPlugins(autos: Seq[AutoPlugin]): Project = { // TODO: for 0.14.0, use copy when it has the additional `autoPlugins` parameter - unresolved(id, base, aggregate = aggregate, dependencies = dependencies, delegates = delegates, settings, configurations, auto, natures, autos) + unresolved(id, base, aggregate = aggregate, dependencies = dependencies, delegates = delegates, settings, configurations, auto, plugins, autos) } } sealed trait ResolvedProject extends ProjectDefinition[ProjectRef] { - /** The [[AutoPlugin]]s enabled for this project as computed from [[natures]].*/ + /** The [[AutoPlugin]]s enabled for this project as computed from [[plugins]].*/ def autoPlugins: Seq[AutoPlugin] } @@ -192,7 +192,7 @@ object Project extends ProjectExtra private abstract class ProjectDef[PR <: ProjectReference](val id: String, val base: File, aggregate0: => Seq[PR], dependencies0: => Seq[ClasspathDep[PR]], delegates0: => Seq[PR], settings0: => Seq[Def.Setting[_]], val configurations: Seq[Configuration], val auto: AddSettings, - val natures: Natures, val autoPlugins: Seq[AutoPlugin]) extends ProjectDefinition[PR] + val plugins: Plugins, val autoPlugins: Seq[AutoPlugin]) extends ProjectDefinition[PR] { lazy val aggregate = aggregate0 lazy val dependencies = dependencies0 @@ -202,11 +202,11 @@ object Project extends ProjectExtra Dag.topologicalSort(configurations)(_.extendsConfigs) // checks for cyclic references here instead of having to do it in Scope.delegates } - // TODO: add parameter for natures in 0.14.0 + // TODO: add parameter for plugins in 0.14.0 def apply(id: String, base: File, aggregate: => Seq[ProjectReference] = Nil, dependencies: => Seq[ClasspathDep[ProjectReference]] = Nil, delegates: => Seq[ProjectReference] = Nil, settings: => Seq[Def.Setting[_]] = defaultSettings, configurations: Seq[Configuration] = Configurations.default, auto: AddSettings = AddSettings.allDefaults): Project = - unresolved(id, base, aggregate, dependencies, delegates, settings, configurations, auto, Natures.empty, Nil) + unresolved(id, base, aggregate, dependencies, delegates, settings, configurations, auto, Plugins.empty, Nil) /** Returns None if `id` is a valid Project ID or Some containing the parser error message if it is not.*/ def validProjectID(id: String): Option[String] = DefaultParsers.parse(id, DefaultParsers.ID).left.toOption @@ -228,19 +228,19 @@ object Project extends ProjectExtra @deprecated("Will be removed.", "0.13.2") def resolved(id: String, base: File, aggregate: => Seq[ProjectRef], dependencies: => Seq[ResolvedClasspathDependency], delegates: => Seq[ProjectRef], settings: Seq[Def.Setting[_]], configurations: Seq[Configuration], auto: AddSettings): ResolvedProject = - resolved(id, base, aggregate, dependencies, delegates, settings, configurations, auto, Natures.empty, Nil) + resolved(id, base, aggregate, dependencies, delegates, settings, configurations, auto, Plugins.empty, Nil) private def resolved(id: String, base: File, aggregate: => Seq[ProjectRef], dependencies: => Seq[ClasspathDep[ProjectRef]], delegates: => Seq[ProjectRef], settings: Seq[Def.Setting[_]], configurations: Seq[Configuration], auto: AddSettings, - natures: Natures, autoPlugins: Seq[AutoPlugin]): ResolvedProject = - new ProjectDef[ProjectRef](id, base, aggregate, dependencies, delegates, settings, configurations, auto, natures, autoPlugins) with ResolvedProject + plugins: Plugins, autoPlugins: Seq[AutoPlugin]): ResolvedProject = + new ProjectDef[ProjectRef](id, base, aggregate, dependencies, delegates, settings, configurations, auto, plugins, autoPlugins) with ResolvedProject private def unresolved(id: String, base: File, aggregate: => Seq[ProjectReference], dependencies: => Seq[ClasspathDep[ProjectReference]], delegates: => Seq[ProjectReference], settings: => Seq[Def.Setting[_]], configurations: Seq[Configuration], auto: AddSettings, - natures: Natures, autoPlugins: Seq[AutoPlugin]): Project = + plugins: Plugins, autoPlugins: Seq[AutoPlugin]): Project = { validProjectID(id).foreach(errMsg => sys.error("Invalid project ID: " + errMsg)) - new ProjectDef[ProjectReference](id, base, aggregate, dependencies, delegates, settings, configurations, auto, natures, autoPlugins) with Project + new ProjectDef[ProjectReference](id, base, aggregate, dependencies, delegates, settings, configurations, auto, plugins, autoPlugins) with Project } def defaultSettings: Seq[Def.Setting[_]] = Defaults.defaultSettings diff --git a/sbt/src/sbt-test/project/auto-plugins/build.sbt b/sbt/src/sbt-test/project/auto-plugins/build.sbt index f48a1f0e5..57e1394c8 100644 --- a/sbt/src/sbt-test/project/auto-plugins/build.sbt +++ b/sbt/src/sbt-test/project/auto-plugins/build.sbt @@ -1,11 +1,11 @@ // excludePlugins(C) will prevent C, and thus D, from being auto-added -lazy val a = project.addNatures(A, B).disablePlugins(Q) +lazy val a = project.addPlugins(A, B).disablePlugins(Q) // without B, C is not added -lazy val b = project.addNatures(A) +lazy val b = project.addPlugins(A) // with both A and B, C is selected, which in turn selects D -lazy val c = project.addNatures(A, B) +lazy val c = project.addPlugins(A, B) // with no natures defined, nothing is auto-added lazy val d = project diff --git a/sbt/src/sbt-test/project/auto-plugins/project/Q.scala b/sbt/src/sbt-test/project/auto-plugins/project/Q.scala index e092e0fd1..c6dea7ba8 100644 --- a/sbt/src/sbt-test/project/auto-plugins/project/Q.scala +++ b/sbt/src/sbt-test/project/auto-plugins/project/Q.scala @@ -4,9 +4,12 @@ object AI extends AutoImport { - lazy val A = Nature("A") - lazy val B = Nature("B") - lazy val E = Nature("E") + trait EmptyAutoPlugin extends AutoPlugin { + def select = Plugins.empty + } + object A extends EmptyAutoPlugin + object B extends EmptyAutoPlugin + object E extends EmptyAutoPlugin lazy val q = config("q") lazy val p = config("p").extend(q) @@ -20,12 +23,12 @@ object AI extends AutoImport import AI._ object D extends AutoPlugin { - def select: Natures = E + def select: Plugins = E } object Q extends AutoPlugin { - def select: Natures = A && B + def select: Plugins = A && B override def projectConfigurations: Seq[Configuration] = p :: diff --git a/sbt/src/sbt-test/project/binary-plugin/changes/define/A.scala b/sbt/src/sbt-test/project/binary-plugin/changes/define/A.scala index c38558d4f..a9f71c928 100644 --- a/sbt/src/sbt-test/project/binary-plugin/changes/define/A.scala +++ b/sbt/src/sbt-test/project/binary-plugin/changes/define/A.scala @@ -3,7 +3,9 @@ import Keys._ object C extends AutoImport { - lazy val bN = Nature("B") + object bN extends AutoPlugin { + def select = Plugins.empty + } lazy val check = taskKey[Unit]("Checks that the AutoPlugin and Build are automatically added.") } @@ -17,5 +19,5 @@ object A extends AutoPlugin { } object B extends Build { - lazy val extra = project.addNatures(bN) + lazy val extra = project.addPlugins(bN) } From ac9391066b572321f8b183b96367dc476bff5d32 Mon Sep 17 00:00:00 2001 From: Josh Suereth Date: Wed, 5 Mar 2014 17:59:29 -0500 Subject: [PATCH 097/148] Allow Build.scala project settings to be ordered via AddSettings. * Create new AddSettings.ProjectSettings that can be used in the Addsettings order. * Update Load.scala to correctly abide by AddSettings orderings. --- main/src/main/scala/sbt/AddSettings.scala | 14 +++++++++++++- main/src/main/scala/sbt/Load.scala | 16 ++++++++-------- 2 files changed, 21 insertions(+), 9 deletions(-) diff --git a/main/src/main/scala/sbt/AddSettings.scala b/main/src/main/scala/sbt/AddSettings.scala index 2d698b874..ab90c8d8f 100644 --- a/main/src/main/scala/sbt/AddSettings.scala +++ b/main/src/main/scala/sbt/AddSettings.scala @@ -14,10 +14,21 @@ object AddSettings private[sbt] final class Plugins(val include: Plugin => Boolean) extends AddSettings private[sbt] final class DefaultSbtFiles(val include: File => Boolean) extends AddSettings private[sbt] final class SbtFiles(val files: Seq[File]) extends AddSettings + // Settings created with the Project().settings() commands in build.scala files. + private[sbt] final object ProjectSettings extends AddSettings /** Adds all settings from a plugin to a project. */ val allPlugins: AddSettings = plugins(const(true)) + /** Adds all settings from autoplugins. */ + val autoPlugins: AddSettings = plugins(_.isInstanceOf[AutoPlugin]) + + /** Settings specified in Build.scala `Project` constructors. */ + val projectSettings: AddSettings = ProjectSettings + + /** All plugins that aren't auto plugins. */ + val nonAutoPlugins: AddSettings = plugins(!_.isInstanceOf[AutoPlugin]) + /** Allows the plugins whose names match the `names` filter to automatically add settings to a project. */ def plugins(include: Plugin => Boolean): AddSettings = new Plugins(include) @@ -33,7 +44,8 @@ object AddSettings /** Includes settings automatically*/ def seq(autos: AddSettings*): AddSettings = new Sequence(autos) - val allDefaults: AddSettings = seq(userSettings, allPlugins, defaultSbtFiles) + /** The default inclusion of settings. */ + val allDefaults: AddSettings = seq(autoPlugins, projectSettings, userSettings, nonAutoPlugins, defaultSbtFiles) /** Combines two automatic setting configurations. */ def append(a: AddSettings, b: AddSettings): AddSettings = (a,b) match { diff --git a/main/src/main/scala/sbt/Load.scala b/main/src/main/scala/sbt/Load.scala index d6c9eff85..b040ee0ae 100755 --- a/main/src/main/scala/sbt/Load.scala +++ b/main/src/main/scala/sbt/Load.scala @@ -459,20 +459,19 @@ object Load private[this] def loadTransitive(newProjects: Seq[Project], buildBase: File, plugins: sbt.LoadedPlugins, eval: () => Eval, injectSettings: InjectSettings, acc: Seq[Project], memoSettings: mutable.Map[File, LoadedSbtFile]): Seq[Project] = { - def loadSbtFiles(auto: AddSettings, base: File, autoPlugins: Seq[AutoPlugin]): LoadedSbtFile = - loadSettings(auto, base, plugins, eval, injectSettings, memoSettings, autoPlugins) + def loadSbtFiles(auto: AddSettings, base: File, autoPlugins: Seq[AutoPlugin], projectSettings: Seq[Setting[_]]): LoadedSbtFile = + loadSettings(auto, base, plugins, eval, injectSettings, memoSettings, autoPlugins, projectSettings) def loadForProjects = newProjects map { project => val autoPlugins = try plugins.detected.compilePlugins(project.plugins) catch { case e: AutoPluginException => throw translateAutoPluginException(e, project) } val autoConfigs = autoPlugins.flatMap(_.projectConfigurations) - val loadedSbtFiles = loadSbtFiles(project.auto, project.base, autoPlugins) - val newSettings = (project.settings: Seq[Setting[_]]) ++ loadedSbtFiles.settings + val loadedSbtFiles = loadSbtFiles(project.auto, project.base, autoPlugins, project.settings) // add the automatically selected settings, record the selected AutoPlugins, and register the automatically selected configurations - val transformed = project.copy(settings = newSettings).setAutoPlugins(autoPlugins).overrideConfigs(autoConfigs : _*) + val transformed = project.copy(settings = loadedSbtFiles.settings).setAutoPlugins(autoPlugins).overrideConfigs(autoConfigs : _*) (transformed, loadedSbtFiles.projects) } - def defaultLoad = loadSbtFiles(AddSettings.defaultSbtFiles, buildBase, Nil).projects + def defaultLoad = loadSbtFiles(AddSettings.defaultSbtFiles, buildBase, Nil, Nil).projects val (nextProjects, loadedProjects) = if(newProjects.isEmpty) // load the .sbt files in the root directory to look for Projects (defaultLoad, acc) @@ -489,7 +488,7 @@ object Load private[this] def translateAutoPluginException(e: AutoPluginException, project: Project): AutoPluginException = e.withPrefix(s"Error determining plugins for project '${project.id}' in ${project.base}:\n") - private[this] def loadSettings(auto: AddSettings, projectBase: File, loadedPlugins: sbt.LoadedPlugins, eval: ()=>Eval, injectSettings: InjectSettings, memoSettings: mutable.Map[File, LoadedSbtFile], autoPlugins: Seq[AutoPlugin]): LoadedSbtFile = + private[this] def loadSettings(auto: AddSettings, projectBase: File, loadedPlugins: sbt.LoadedPlugins, eval: ()=>Eval, injectSettings: InjectSettings, memoSettings: mutable.Map[File, LoadedSbtFile], autoPlugins: Seq[AutoPlugin], projectSettings: Seq[Setting[_]]): LoadedSbtFile = { lazy val defaultSbtFiles = configurationSources(projectBase) def settings(ss: Seq[Setting[_]]) = new LoadedSbtFile(ss, Nil, Nil) @@ -506,7 +505,7 @@ object Load def loadSettingsFile(src: File): LoadedSbtFile = EvaluateConfigurations.evaluateSbtFile(eval(), src, IO.readLines(src), loadedPlugins.detected.imports, 0)(loader) - import AddSettings.{User,SbtFiles,DefaultSbtFiles,Plugins,Sequence} + import AddSettings.{User,SbtFiles,DefaultSbtFiles,Plugins,Sequence, ProjectSettings} def pluginSettings(f: Plugins) = { val included = loadedPlugins.detected.plugins.values.filter(f.include) // don't apply the filter to AutoPlugins, only Plugins val oldStyle = included.flatMap(p => p.settings.filter(isProjectThis) ++ p.projectSettings) @@ -514,6 +513,7 @@ object Load oldStyle ++ autoStyle } def expand(auto: AddSettings): LoadedSbtFile = auto match { + case ProjectSettings => settings(projectSettings) case User => settings(injectSettings.projectLoaded(loader)) case sf: SbtFiles => loadSettings( sf.files.map(f => IO.resolve(projectBase, f))) case sf: DefaultSbtFiles => loadSettings( defaultSbtFiles.filter(sf.include)) From a44a14f2c838745a1ab7c741dafccdd8aa3faa88 Mon Sep 17 00:00:00 2001 From: Josh Suereth Date: Wed, 5 Mar 2014 18:03:00 -0500 Subject: [PATCH 098/148] AutoPlugins appropriately participate in AddSettings. * Add new AutoPlugins type to AddSettings. * Ensure any Plugins filter doesn't just automatically always add autoplugins every time. * Load.scala can now adjust AutoPlugins ordering Note: Adjusting autoplugin ordering is dangerous BUT doing a glob of "put autoplugin settings here" is generally ok. --- main/src/main/scala/sbt/AddSettings.scala | 11 ++++++----- main/src/main/scala/sbt/Load.scala | 12 ++++++++---- main/src/main/scala/sbt/Plugins.scala | 23 ++++++++++++++++++++++- 3 files changed, 36 insertions(+), 10 deletions(-) diff --git a/main/src/main/scala/sbt/AddSettings.scala b/main/src/main/scala/sbt/AddSettings.scala index ab90c8d8f..ceb0bc751 100644 --- a/main/src/main/scala/sbt/AddSettings.scala +++ b/main/src/main/scala/sbt/AddSettings.scala @@ -12,22 +12,23 @@ object AddSettings private[sbt] final class Sequence(val sequence: Seq[AddSettings]) extends AddSettings private[sbt] final object User extends AddSettings private[sbt] final class Plugins(val include: Plugin => Boolean) extends AddSettings + private[sbt] final class AutoPlugins(val include: AutoPlugin => Boolean) extends AddSettings private[sbt] final class DefaultSbtFiles(val include: File => Boolean) extends AddSettings private[sbt] final class SbtFiles(val files: Seq[File]) extends AddSettings // Settings created with the Project().settings() commands in build.scala files. private[sbt] final object ProjectSettings extends AddSettings - /** Adds all settings from a plugin to a project. */ - val allPlugins: AddSettings = plugins(const(true)) - /** Adds all settings from autoplugins. */ - val autoPlugins: AddSettings = plugins(_.isInstanceOf[AutoPlugin]) + val autoPlugins: AddSettings = new AutoPlugins(const(true)) /** Settings specified in Build.scala `Project` constructors. */ val projectSettings: AddSettings = ProjectSettings /** All plugins that aren't auto plugins. */ - val nonAutoPlugins: AddSettings = plugins(!_.isInstanceOf[AutoPlugin]) + val nonAutoPlugins: AddSettings = plugins(const(true)) + + /** Adds all settings from a plugin to a project. */ + val allPlugins: AddSettings = seq(autoPlugins, nonAutoPlugins) /** Allows the plugins whose names match the `names` filter to automatically add settings to a project. */ def plugins(include: Plugin => Boolean): AddSettings = new Plugins(include) diff --git a/main/src/main/scala/sbt/Load.scala b/main/src/main/scala/sbt/Load.scala index b040ee0ae..a275c907a 100755 --- a/main/src/main/scala/sbt/Load.scala +++ b/main/src/main/scala/sbt/Load.scala @@ -505,19 +505,23 @@ object Load def loadSettingsFile(src: File): LoadedSbtFile = EvaluateConfigurations.evaluateSbtFile(eval(), src, IO.readLines(src), loadedPlugins.detected.imports, 0)(loader) - import AddSettings.{User,SbtFiles,DefaultSbtFiles,Plugins,Sequence, ProjectSettings} + import AddSettings.{User,SbtFiles,DefaultSbtFiles,Plugins,AutoPlugins,Sequence, ProjectSettings} def pluginSettings(f: Plugins) = { val included = loadedPlugins.detected.plugins.values.filter(f.include) // don't apply the filter to AutoPlugins, only Plugins - val oldStyle = included.flatMap(p => p.settings.filter(isProjectThis) ++ p.projectSettings) - val autoStyle = autoPlugins.flatMap(_.projectSettings) - oldStyle ++ autoStyle + included.flatMap(p => p.settings.filter(isProjectThis) ++ p.projectSettings) } + // Filter the AutoPlugin settings we included based on which ones are + // intended in the AddSettings.AutoPlugins filter. + def autoPluginSettings(f: AutoPlugins) = + autoPlugins.filter(f.include).flatMap(_.projectSettings) + def expand(auto: AddSettings): LoadedSbtFile = auto match { case ProjectSettings => settings(projectSettings) case User => settings(injectSettings.projectLoaded(loader)) case sf: SbtFiles => loadSettings( sf.files.map(f => IO.resolve(projectBase, f))) case sf: DefaultSbtFiles => loadSettings( defaultSbtFiles.filter(sf.include)) case p: Plugins => settings(pluginSettings(p)) + case p: AutoPlugins => settings(autoPluginSettings(p)) case q: Sequence => (LoadedSbtFile.empty /: q.sequence) { (b,add) => b.merge( expand(add) ) } } expand(auto) diff --git a/main/src/main/scala/sbt/Plugins.scala b/main/src/main/scala/sbt/Plugins.scala index 5e814d082..a7ada9b6e 100644 --- a/main/src/main/scala/sbt/Plugins.scala +++ b/main/src/main/scala/sbt/Plugins.scala @@ -72,6 +72,24 @@ abstract class AutoPlugin extends Plugins.Basic // TODO?: def commands: Seq[Command] def unary_! : Exclude = Exclude(this) + + + /** If this plugin requries itself to be included, it means we're actually a nature, + * not a normal plugin. The user must specifically enable this plugin + * but other plugins can rely on its existence. + */ + final def isRoot: Boolean = + this match { + case _: RootAutoPlugin => true + case _ => false + } +} +/** + * A root AutoPlugin is a plugin which must be explicitly enabled by users in their `setPlugins` method + * on a project. However, RootAutoPlugins represent the "root" of a tree of dependent auto-plugins. + */ +abstract class RootAutoPlugin extends AutoPlugin { + final def select: Plugins = this } /** An error that occurs when auto-plugins aren't configured properly. @@ -104,7 +122,10 @@ object Plugins val byAtom = defined.map(x => (Atom(x.label), x)) val byAtomMap = byAtom.toMap if(byAtom.size != byAtomMap.size) duplicateProvidesError(byAtom) - val clauses = Clauses( defined.map(d => asClause(d)) ) + // Ignore clauses for plugins that just require themselves be specified. + // Avoids the requirement for pure Nature strings *and* possible + // circular dependencies in the logic. + val clauses = Clauses( defined.filterNot(_.isRoot).map(d => asClause(d)) ) requestedPlugins => Logic.reduce(clauses, flattenConvert(requestedPlugins).toSet) match { case Left(problem) => throw AutoPluginException(problem) From 3576baa76c8d0ac5e559e7a69ae4b89f94bbe057 Mon Sep 17 00:00:00 2001 From: Josh Suereth Date: Wed, 5 Mar 2014 17:43:49 -0500 Subject: [PATCH 099/148] Split Defaults.scala into three plugins: Global,Ivy,Jvm. * GlobalPlugin has defaults for controlling parallelism on tasks, basic command stuff. * IvyModule has the configuration for resolving/publishing modules to ivy, assuming each project is a single module. * JvmModule has the configuration for compiling/running/testing/packaging Java/Scala projects. --- main/src/main/scala/sbt/Build.scala | 11 +- main/src/main/scala/sbt/Defaults.scala | 195 +++++++++++------- main/src/main/scala/sbt/PluginDiscovery.scala | 10 +- main/src/main/scala/sbt/Project.scala | 6 +- .../main/scala/sbt/plugins/GlobalModule.scala | 19 ++ .../main/scala/sbt/plugins/IvyModule.scala | 24 +++ .../main/scala/sbt/plugins/JvmModule.scala | 35 ++++ 7 files changed, 218 insertions(+), 82 deletions(-) create mode 100644 main/src/main/scala/sbt/plugins/GlobalModule.scala create mode 100644 main/src/main/scala/sbt/plugins/IvyModule.scala create mode 100644 main/src/main/scala/sbt/plugins/JvmModule.scala diff --git a/main/src/main/scala/sbt/Build.scala b/main/src/main/scala/sbt/Build.scala index 7bcb704ec..030e54dfb 100644 --- a/main/src/main/scala/sbt/Build.scala +++ b/main/src/main/scala/sbt/Build.scala @@ -12,6 +12,7 @@ trait Build { def projectDefinitions(baseDirectory: File): Seq[Project] = projects def projects: Seq[Project] = ReflectUtilities.allVals[Project](this).values.toSeq + // TODO: Should we grab the build core setting shere or in a plugin? def settings: Seq[Setting[_]] = Defaults.buildCore def buildLoaders: Seq[BuildLoader.Components] = Nil /** Explicitly defines the root project. @@ -46,8 +47,16 @@ object Build @deprecated("Explicitly specify the ID", "0.13.0") def defaultProject(base: File): Project = defaultProject(defaultID(base), base) def defaultProject(id: String, base: File): Project = Project(id, base).settings( + // TODO - Can we move this somewhere else? ordering of settings is causing this to get borked. // if the user has overridden the name, use the normal organization that is derived from the name. - organization <<= (thisProject, organization, name) { (p, o, n) => if(p.id == n) "default" else o } + organization := { + val overridden = thisProject.value.id == name.value + organization.?.value match { + case Some(o) if !overridden => o + case _ => "default" + } + //(thisProject, organization, name) { (p, o, n) => if(p.id == n) "default" else o } + } ) def defaultAggregatedProject(id: String, base: File, agg: Seq[ProjectRef]): Project = defaultProject(id, base).aggregate(agg : _*) diff --git a/main/src/main/scala/sbt/Defaults.scala b/main/src/main/scala/sbt/Defaults.scala index 9657c3644..e63ba2ed3 100755 --- a/main/src/main/scala/sbt/Defaults.scala +++ b/main/src/main/scala/sbt/Defaults.scala @@ -56,94 +56,107 @@ object Defaults extends BuildCommon def thisBuildCore: Seq[Setting[_]] = inScope(GlobalScope.copy(project = Select(ThisBuild)))(Seq( managedDirectory := baseDirectory.value / "lib_managed" )) + @deprecated("0.13.2", "Use AutoPlugins and globalSbtCore instead.") lazy val globalCore: Seq[Setting[_]] = globalDefaults(defaultTestTasks(test) ++ defaultTestTasks(testOnly) ++ defaultTestTasks(testQuick) ++ Seq( + excludeFilter :== HiddenFileFilter + ) ++ globalIvyCore ++ globalJvmCore) ++ globalSbtCore + + private[sbt] lazy val globalJvmCore: Seq[Setting[_]] = + Seq( compilerCache := state.value get Keys.stateCompilerCache getOrElse compiler.CompilerCache.fresh, - crossVersion :== CrossVersion.Disabled, + sourcesInBase :== true, + autoAPIMappings := false, + apiMappings := Map.empty, + autoScalaLibrary :== true, + managedScalaInstance :== true, + definesClass :== FileValueCache(Locate.definesClass _ ).get, + traceLevel in run :== 0, + traceLevel in runMain :== 0, + traceLevel in console :== Int.MaxValue, + traceLevel in consoleProject :== Int.MaxValue, + autoCompilerPlugins :== true, + scalaHome :== None, + apiURL := None, + javaHome :== None, + testForkedParallel :== false, + javaOptions :== Nil, + sbtPlugin :== false, + crossPaths :== true, + sourcePositionMappers :== Nil, + artifactClassifier in packageSrc :== Some(SourceClassifier), + artifactClassifier in packageDoc :== Some(DocClassifier), + includeFilter :== NothingFilter, + includeFilter in unmanagedSources :== "*.java" | "*.scala", + includeFilter in unmanagedJars :== "*.jar" | "*.so" | "*.dll" | "*.jnilib" | "*.zip", + includeFilter in unmanagedResources :== AllPassFilter + ) + + private[sbt] lazy val globalIvyCore: Seq[Setting[_]] = + Seq( + internalConfigurationMap :== Configurations.internalMap _, + credentials :== Nil, + exportJars :== false, + retrieveManaged :== false, scalaOrganization :== ScalaArtifacts.Organization, + sbtResolver := { if(sbtVersion.value endsWith "-SNAPSHOT") Classpaths.typesafeSnapshots else Classpaths.typesafeReleases }, + crossVersion :== CrossVersion.Disabled, buildDependencies <<= Classpaths.constructBuildDependencies, + version :== "0.1-SNAPSHOT", + classpathTypes :== Set("jar", "bundle") ++ CustomPomParser.JarPackagings, + artifactClassifier :== None, + checksums := Classpaths.bootChecksums(appConfiguration.value), + conflictManager := ConflictManager.default, + pomExtra :== NodeSeq.Empty, + pomPostProcess :== idFun, + pomAllRepositories :== false, + pomIncludeRepository :== Classpaths.defaultRepositoryFilter + ) + + /** Core non-plugin settings for sbt builds. These *must* be on every build or the sbt engine will fail to run at all. */ + private[sbt] lazy val globalSbtCore: Seq[Setting[_]] = globalDefaults(Seq( + outputStrategy :== None, // TODO - This might belong elsewhere. + buildStructure := Project.structure(state.value), + settingsData := buildStructure.value.data, + trapExit :== true, + connectInput :== false, + cancelable :== false, + envVars :== Map.empty, + sbtVersion := appConfiguration.value.provider.id.version, + sbtBinaryVersion := binarySbtVersion(sbtVersion.value), + watchingMessage := Watched.defaultWatchingMessage, + triggeredMessage := Watched.defaultTriggeredMessage, + onLoad := idFun[State], + onUnload := idFun[State], + onUnload := { s => try onUnload.value(s) finally IO.delete(taskTemporaryDirectory.value) }, + extraLoggers :== { _ => Nil }, + watchSources :== Nil, + skip :== false, taskTemporaryDirectory := { val dir = IO.createTemporaryDirectory; dir.deleteOnExit(); dir }, onComplete := { val dir = taskTemporaryDirectory.value; () => {IO.delete(dir); IO.createDirectory(dir) }}, Previous.cache <<= Previous.cacheSetting, Previous.references :== new Previous.References, concurrentRestrictions <<= defaultRestrictions, parallelExecution :== true, - sbtVersion := appConfiguration.value.provider.id.version, - sbtBinaryVersion := binarySbtVersion(sbtVersion.value), - sbtResolver := { if(sbtVersion.value endsWith "-SNAPSHOT") Classpaths.typesafeSnapshots else Classpaths.typesafeReleases }, pollInterval :== 500, logBuffered :== false, - connectInput :== false, - cancelable :== false, - envVars :== Map.empty, - sourcesInBase :== true, - autoAPIMappings := false, - apiMappings := Map.empty, - autoScalaLibrary :== true, - managedScalaInstance :== true, - onLoad := idFun[State], - onUnload := idFun[State], - onUnload := { s => try onUnload.value(s) finally IO.delete(taskTemporaryDirectory.value) }, - watchingMessage := Watched.defaultWatchingMessage, - triggeredMessage := Watched.defaultTriggeredMessage, - definesClass :== FileValueCache(Locate.definesClass _ ).get, - trapExit :== true, - traceLevel in run :== 0, - traceLevel in runMain :== 0, - traceLevel in console :== Int.MaxValue, - traceLevel in consoleProject :== Int.MaxValue, - autoCompilerPlugins :== true, - internalConfigurationMap :== Configurations.internalMap _, - initialize :== {}, - credentials :== Nil, - scalaHome :== None, - apiURL := None, - javaHome :== None, - extraLoggers :== { _ => Nil }, - skip :== false, - watchSources :== Nil, - version :== "0.1-SNAPSHOT", - outputStrategy :== None, - exportJars :== false, - fork :== false, - testForkedParallel :== false, - javaOptions :== Nil, - sbtPlugin :== false, - crossPaths :== true, - classpathTypes :== Set("jar", "bundle") ++ CustomPomParser.JarPackagings, - aggregate :== true, - maxErrors :== 100, - sourcePositionMappers :== Nil, + commands :== Nil, + showSuccess :== true, showTiming :== true, timingFormat :== Aggregation.defaultFormat, - showSuccess :== true, - commands :== Nil, - retrieveManaged :== false, - buildStructure := Project.structure(state.value), - settingsData := buildStructure.value.data, - artifactClassifier :== None, - artifactClassifier in packageSrc :== Some(SourceClassifier), - artifactClassifier in packageDoc :== Some(DocClassifier), - checksums := Classpaths.bootChecksums(appConfiguration.value), - conflictManager := ConflictManager.default, - pomExtra :== NodeSeq.Empty, - pomPostProcess :== idFun, - pomAllRepositories :== false, - includeFilter :== NothingFilter, - includeFilter in unmanagedSources :== "*.java" | "*.scala", - includeFilter in unmanagedJars :== "*.jar" | "*.so" | "*.dll" | "*.jnilib" | "*.zip", - includeFilter in unmanagedResources :== AllPassFilter, - excludeFilter :== HiddenFileFilter, - pomIncludeRepository :== Classpaths.defaultRepositoryFilter + aggregate :== true, + maxErrors :== 100, + fork :== false, + initialize :== {} )) def defaultTestTasks(key: Scoped): Seq[Setting[_]] = inTask(key)(Seq( tags := Seq(Tags.Test -> 1), logBuffered := true )) + // TODO: This should be on the new default settings for a project. def projectCore: Seq[Setting[_]] = Seq( name := thisProject.value.id, logManager := LogManager.defaults(extraLoggers.value, StandardMain.console), - onLoadMessage <<= onLoadMessage or (name, thisProjectRef)("Set current project to " + _ + " (in build " + _.build +")"), - runnerTask + onLoadMessage <<= onLoadMessage or (name, thisProjectRef)("Set current project to " + _ + " (in build " + _.build +")") ) def paths = Seq( baseDirectory := thisProject.value.base, @@ -852,6 +865,7 @@ object Defaults extends BuildCommon lazy val disableAggregation = Defaults.globalDefaults( noAggregation map disableAggregate ) def disableAggregate(k: Scoped) = aggregate in k :== false + lazy val runnerSettings: Seq[Setting[_]] = Seq(runnerTask) lazy val baseTasks: Seq[Setting[_]] = projectTasks ++ packageBase lazy val baseClasspaths: Seq[Setting[_]] = Classpaths.publishSettings ++ Classpaths.baseSettings @@ -865,7 +879,12 @@ object Defaults extends BuildCommon // settings that are not specific to a configuration - lazy val projectBaseSettings: Seq[Setting[_]] = projectCore ++ paths ++ baseClasspaths ++ baseTasks ++ compileBase ++ disableAggregation + @deprecated("0.13.2", "Settings now split into AutoPlugins.") + lazy val projectBaseSettings: Seq[Setting[_]] = projectCore ++ runnerSettings ++ paths ++ baseClasspaths ++ baseTasks ++ compileBase ++ disableAggregation + + // These are project level settings that MUST be on every project. + lazy val coreDefaultSettings: Seq[Setting[_]] = projectCore ++ disableAggregation + @deprecated("0.13.2", "Default settings split into `coreDefaultSettings` and IvyModule/JvmModule plugins.") lazy val defaultSettings: Seq[Setting[_]] = projectBaseSettings ++ defaultConfigs } object Classpaths @@ -935,9 +954,14 @@ object Classpaths publishArtifact in Test:== false )) - val publishSettings: Seq[Setting[_]] = publishGlobalDefaults ++ Seq( - artifacts <<= artifactDefs(defaultArtifactTasks), - packagedArtifacts <<= packaged(defaultArtifactTasks), + val jvmPublishSettings: Seq[Setting[_]] = Seq( + artifacts <<= artifactDefs(defaultArtifactTasks), + packagedArtifacts <<= packaged(defaultArtifactTasks) + ) + + val ivyPublishSettings: Seq[Setting[_]] = publishGlobalDefaults ++ Seq( + artifacts :== Nil, + packagedArtifacts :== Map.empty, makePom := { val config = makePomConfiguration.value; IvyActions.makePom(ivyModule.value, config, streams.value.log); config.file }, packagedArtifact in makePom := (artifact in makePom value, makePom value), deliver <<= deliverTask(deliverConfiguration), @@ -946,6 +970,8 @@ object Classpaths publishLocal <<= publishTask(publishLocalConfiguration, deliverLocal), publishM2 <<= publishTask(publishM2Configuration, deliverLocal) ) + @deprecated("0.13.2", "This has been split into jvmIvySettings and ivyPublishSettings.") + val publishSettings: Seq[Setting[_]] = jvmPublishSettings ++ ivyPublishSettings private[this] def baseGlobalDefaults = Defaults.globalDefaults(Seq( conflictWarning :== ConflictWarning.default("global"), @@ -976,7 +1002,7 @@ object Classpaths } )) - val baseSettings: Seq[Setting[_]] = baseGlobalDefaults ++ sbtClassifiersTasks ++ Seq( + val ivyBaseSettings: Seq[Setting[_]] = baseGlobalDefaults ++ sbtClassifiersTasks ++ Seq( conflictWarning := conflictWarning.value.copy(label = Reference.display(thisProjectRef.value)), unmanagedBase := baseDirectory.value / "lib", normalizedName := Project.normalizeModuleID(name.value), @@ -1007,14 +1033,11 @@ object Classpaths otherResolvers := Resolver.publishMavenLocal :: publishTo.value.toList, projectResolver <<= projectResolverTask, projectDependencies <<= projectDependenciesTask, - libraryDependencies ++= autoLibraryDependency(autoScalaLibrary.value && !scalaHome.value.isDefined && managedScalaInstance.value, sbtPlugin.value, scalaOrganization.value, scalaVersion.value), + // TODO - Is this the appropriate split? Ivy defines this simply as + // just project + library, while the JVM plugin will define it as + // having the additional sbtPlugin + autoScala magikz. allDependencies := { - val base = projectDependencies.value ++ libraryDependencies.value - val pluginAdjust = if(sbtPlugin.value) sbtDependency.value.copy(configurations = Some(Provided.name)) +: base else base - if(scalaHome.value.isDefined || ivyScala.value.isEmpty || !managedScalaInstance.value) - pluginAdjust - else - ScalaArtifacts.toolDependencies(scalaOrganization.value, scalaVersion.value) ++ pluginAdjust + projectDependencies.value ++ libraryDependencies.value }, ivyScala <<= ivyScala or (scalaHome, scalaVersion in update, scalaBinaryVersion in update, scalaOrganization) { (sh,fv,bv,so) => Some(new IvyScala(fv, bv, Nil, filterImplicit = false, checkExplicit = true, overrideScalaVersion = false, scalaOrganization = so)) @@ -1054,6 +1077,22 @@ object Classpaths } } tag(Tags.Update, Tags.Network) ) + + val jvmBaseSettings: Seq[Setting[_]] = Seq( + libraryDependencies ++= autoLibraryDependency(autoScalaLibrary.value && !scalaHome.value.isDefined && managedScalaInstance.value, sbtPlugin.value, scalaOrganization.value, scalaVersion.value), + // Override the default to handle mixing in the sbtPlugin + scala dependencies. + allDependencies := { + val base = projectDependencies.value ++ libraryDependencies.value + val pluginAdjust = if(sbtPlugin.value) sbtDependency.value.copy(configurations = Some(Provided.name)) +: base else base + if(scalaHome.value.isDefined || ivyScala.value.isEmpty || !managedScalaInstance.value) + pluginAdjust + else + ScalaArtifacts.toolDependencies(scalaOrganization.value, scalaVersion.value) ++ pluginAdjust + } + ) + @deprecated("0.13.2", "Split into ivyBaseSettings and jvmBaseSettings.") + val baseSettings: Seq[Setting[_]] = ivyBaseSettings ++ jvmBaseSettings + def warnResolversConflict(ress: Seq[Resolver], log: Logger) { val resset = ress.toSet for ((name, r) <- resset groupBy (_.name) if r.size > 1) { diff --git a/main/src/main/scala/sbt/PluginDiscovery.scala b/main/src/main/scala/sbt/PluginDiscovery.scala index 0d49e6fd7..ae945f78a 100644 --- a/main/src/main/scala/sbt/PluginDiscovery.scala +++ b/main/src/main/scala/sbt/PluginDiscovery.scala @@ -28,7 +28,15 @@ object PluginDiscovery def discover[T](resource: String)(implicit mf: reflect.ClassManifest[T]) = binarySourceModules[T](data, loader, resource) import Paths._ - new DetectedPlugins(discover[Plugin](Plugins), discover[AutoImport](AutoImports), discover[AutoPlugin](AutoPlugins), discover[Build](Builds)) + // TODO - Fix this once we can autodetect AutoPlugins defined by sbt itself. + val defaultAutoPlugins = Seq( + "sbt.plugins.IvyModule" -> sbt.plugins.IvyModule, + "sbt.plugins.JvmModule" -> sbt.plugins.JvmModule, + "sbt.plugins.GlobalModule" -> sbt.plugins.GlobalModule + ) + val detectedAutoPugins = discover[AutoPlugin](AutoPlugins) + val allAutoPlugins = new DetectedModules(defaultAutoPlugins ++ detectedAutoPugins.modules) + new DetectedPlugins(discover[Plugin](Plugins), discover[AutoImport](AutoImports), allAutoPlugins, discover[Build](Builds)) } /** Discovers the sbt-plugin-related top-level modules from the provided source `analysis`. */ diff --git a/main/src/main/scala/sbt/Project.scala b/main/src/main/scala/sbt/Project.scala index a44598e94..7604c9d27 100755 --- a/main/src/main/scala/sbt/Project.scala +++ b/main/src/main/scala/sbt/Project.scala @@ -203,10 +203,11 @@ object Project extends ProjectExtra } // TODO: add parameter for plugins in 0.14.0 + // TODO: Modify default settings to be the core settings, and automatically add the IvyModule + JvmPlugins. def apply(id: String, base: File, aggregate: => Seq[ProjectReference] = Nil, dependencies: => Seq[ClasspathDep[ProjectReference]] = Nil, - delegates: => Seq[ProjectReference] = Nil, settings: => Seq[Def.Setting[_]] = defaultSettings, configurations: Seq[Configuration] = Configurations.default, + delegates: => Seq[ProjectReference] = Nil, settings: => Seq[Def.Setting[_]] = Nil, configurations: Seq[Configuration] = Nil, auto: AddSettings = AddSettings.allDefaults): Project = - unresolved(id, base, aggregate, dependencies, delegates, settings, configurations, auto, Plugins.empty, Nil) + unresolved(id, base, aggregate, dependencies, delegates, settings, configurations, auto, Plugins.empty, Nil) // Note: JvmModule/IvyModule auto included... /** Returns None if `id` is a valid Project ID or Some containing the parser error message if it is not.*/ def validProjectID(id: String): Option[String] = DefaultParsers.parse(id, DefaultParsers.ID).left.toOption @@ -243,6 +244,7 @@ object Project extends ProjectExtra new ProjectDef[ProjectReference](id, base, aggregate, dependencies, delegates, settings, configurations, auto, plugins, autoPlugins) with Project } + @deprecated("0.13.2", "Use Defaults.coreDefaultSettings instead, combined with AutoPlugins.") def defaultSettings: Seq[Def.Setting[_]] = Defaults.defaultSettings final class Constructor(p: ProjectReference) { diff --git a/main/src/main/scala/sbt/plugins/GlobalModule.scala b/main/src/main/scala/sbt/plugins/GlobalModule.scala new file mode 100644 index 000000000..570cbc80f --- /dev/null +++ b/main/src/main/scala/sbt/plugins/GlobalModule.scala @@ -0,0 +1,19 @@ +package sbt +package plugins + +import Def.Setting + +/** + * Plugin for core sbt-isms. + * + * Can control task-level paralleism, logging, etc. + */ +object GlobalModule extends AutoPlugin { + // We must be explicitly enabled + def select = Plugins.empty + + override lazy val projectSettings: Seq[Setting[_]] = + Defaults.coreDefaultSettings + override lazy val globalSettings: Seq[Setting[_]] = + Defaults.globalSbtCore +} \ No newline at end of file diff --git a/main/src/main/scala/sbt/plugins/IvyModule.scala b/main/src/main/scala/sbt/plugins/IvyModule.scala new file mode 100644 index 000000000..6ce0d9a9d --- /dev/null +++ b/main/src/main/scala/sbt/plugins/IvyModule.scala @@ -0,0 +1,24 @@ +package sbt +package plugins + +import Def.Setting + +/** + * Plugin that enables resolving artifacts via ivy. + * + * Core Tasks + * - `update` + * - `makePom` + * - `publish` + * - `artifacts` + * - `publishedArtifacts` + */ +object IvyModule extends AutoPlugin { + // We must be explicitly enabled + def select = GlobalModule + + override lazy val projectSettings: Seq[Setting[_]] = + Classpaths.ivyPublishSettings ++ Classpaths.ivyBaseSettings + override lazy val globalSettings: Seq[Setting[_]] = + Defaults.globalIvyCore +} \ No newline at end of file diff --git a/main/src/main/scala/sbt/plugins/JvmModule.scala b/main/src/main/scala/sbt/plugins/JvmModule.scala new file mode 100644 index 000000000..6dd95d9c0 --- /dev/null +++ b/main/src/main/scala/sbt/plugins/JvmModule.scala @@ -0,0 +1,35 @@ +package sbt +package plugins + +import Def.Setting + +/** A plugin representing the ability to build a JVM project. + * + * Core tasks/keys: + * - `run` + * - `test` + * - `compile` + * - `fullClasspath` + * Core configurations + * - `Test` + * - `Compile` + */ +object JvmModule extends AutoPlugin { + // We must be explicitly enabled + def select = IvyModule + + override lazy val projectSettings: Seq[Setting[_]] = + Defaults.runnerSettings ++ + Defaults.paths ++ + Classpaths.jvmPublishSettings ++ + Classpaths.jvmBaseSettings ++ + Defaults.projectTasks ++ + Defaults.packageBase ++ + Defaults.compileBase ++ + Defaults.defaultConfigs + override lazy val globalSettings: Seq[Setting[_]] = + Defaults.globalJvmCore + + override def projectConfigurations: Seq[Configuration] = + Configurations.default +} \ No newline at end of file From 01bb7ce2fdb3cb73742ca4988e4a2414a48b8ff2 Mon Sep 17 00:00:00 2001 From: Josh Suereth Date: Wed, 5 Mar 2014 22:59:30 -0500 Subject: [PATCH 100/148] Remove defaultSettings usage from the docs. Given the addition/promotion of AutoPlugins, remove the references to the previously necessary Defaults.defaultSettings method. --- .../Examples/Full-Configuration-Example.rst | 2 +- src/sphinx/Getting-Started/Full-Def.rst | 2 +- src/sphinx/faq.rst | 18 +++++++++++------- 3 files changed, 13 insertions(+), 9 deletions(-) diff --git a/src/sphinx/Examples/Full-Configuration-Example.rst b/src/sphinx/Examples/Full-Configuration-Example.rst index c84bb5a49..132dad7e0 100644 --- a/src/sphinx/Examples/Full-Configuration-Example.rst +++ b/src/sphinx/Examples/Full-Configuration-Example.rst @@ -16,7 +16,7 @@ into multiple files. val buildVersion = "2.0.29" val buildScalaVersion = "2.9.0-1" - val buildSettings = Defaults.defaultSettings ++ Seq ( + val buildSettings = Seq ( organization := buildOrganization, version := buildVersion, scalaVersion := buildScalaVersion, diff --git a/src/sphinx/Getting-Started/Full-Def.rst b/src/sphinx/Getting-Started/Full-Def.rst index 5c104e2b4..0d1ac25bd 100644 --- a/src/sphinx/Getting-Started/Full-Def.rst +++ b/src/sphinx/Getting-Started/Full-Def.rst @@ -113,7 +113,7 @@ The following two files illustrate. First, if your project is in lazy val root = Project(id = "hello", base = file("."), - settings = Project.defaultSettings ++ Seq(sampleKeyB := "B: in the root project settings in Build.scala")) + settings = Seq(sampleKeyB := "B: in the root project settings in Build.scala")) } Now, create `hello/build.sbt` as follows: diff --git a/src/sphinx/faq.rst b/src/sphinx/faq.rst index 29738dab8..4ce8645f8 100644 --- a/src/sphinx/faq.rst +++ b/src/sphinx/faq.rst @@ -444,24 +444,28 @@ before it is initialized with an empty sequence. settings = Seq( libraryDependencies += "commons-io" % "commons-io" % "1.4" % "test" ) - ) + ).disablePlugins(plugins.IvyModule) } -To correct this, include the default settings, which includes -`libraryDependencies := Seq()`. +To correct this, include the IvyModule plugin settings, which includes +`libraryDependencies := Seq()`. So, we just drop the explicit disabling. :: - settings = Defaults.defaultSettings ++ Seq( - libraryDependencies += "commons-io" % "commons-io" % "1.4" % "test" - ) + object MyBuild extends Build { + val root = Project(id = "root", base = file("."), + settings = Seq( + libraryDependencies += "commons-io" % "commons-io" % "1.4" % "test" + ) + ) + } A more subtle variation of this error occurs when using :doc:`scoped settings `. :: // error: Reference to uninitialized setting - settings = Defaults.defaultSettings ++ Seq( + settings = Seq( libraryDependencies += "commons-io" % "commons-io" % "1.2" % "test", fullClasspath := fullClasspath.value.filterNot(_.data.name.contains("commons-io")) ) From ea8c0b32a71d2d63a55809bd98666c5cf1bd05c4 Mon Sep 17 00:00:00 2001 From: Josh Suereth Date: Thu, 6 Mar 2014 13:57:07 -0500 Subject: [PATCH 101/148] Update documentation for AutoPlugins. * Add notes about AutoPlugins vs. RootPlugins in Plugins section * Modify best practices section to recomend using AutoPlugins. * Modify Using-Plugins section in getting started to denote auto plugins. --- .../Extending/Plugins-Best-Practices.rst | 18 ++--- src/sphinx/Extending/Plugins.rst | 67 +++++++++++++++---- src/sphinx/Getting-Started/Using-Plugins.rst | 37 ++++++++-- 3 files changed, 92 insertions(+), 30 deletions(-) diff --git a/src/sphinx/Extending/Plugins-Best-Practices.rst b/src/sphinx/Extending/Plugins-Best-Practices.rst index 6b4d80564..c7adb5655 100644 --- a/src/sphinx/Extending/Plugins-Best-Practices.rst +++ b/src/sphinx/Extending/Plugins-Best-Practices.rst @@ -22,20 +22,12 @@ Don't use default package Users who have their build files in some package will not be able to use your plugin if it's defined in default (no-name) package. -Avoid overriding `settings` ------------------------------ +Avoid older `sbt.Plugin` mechanism +---------------------------------- -sbt will automatically load your plugin's `settings` into the build. -Overriding `val settings` should only be done by plugins intending to -provide commands. Regular plugins defining tasks and settings should -provide a sequence named after the plugin like so: - -:: - - val obfuscateSettings = Seq(...) - -This allows build user to choose which subproject the plugin would be -used. See later section for how the settings should be scoped. +sbt has deprecated the old `sbt.Plugin` mechanism in favor of `sbt.AutoPlugin`. +The new mechanism features a set of user-level controls and dependency declarations +that cleans up a lot of long-standing issues with plugins. Reuse existing keys ------------------- diff --git a/src/sphinx/Extending/Plugins.rst b/src/sphinx/Extending/Plugins.rst index 18cae5ee6..3d510a8ba 100644 --- a/src/sphinx/Extending/Plugins.rst +++ b/src/sphinx/Extending/Plugins.rst @@ -176,6 +176,10 @@ It is recommended to explicitly specify the commit or tag by appending it to the lazy val assemblyPlugin = uri("git://github.com/sbt/sbt-assembly#0.9.1") +One caveat to using this method is that the local sbt will try to run the remote plugin's build. It +is quite possible that the plugin's own build uses a different sbt version, as many plugins cross-publish for +several sbt versions. As such, it is recommended to stick with binary artifacts when possible. + 2) Use the library ~~~~~~~~~~~~~~~~~~ @@ -221,22 +225,25 @@ To make a plugin, create a project and configure `sbtPlugin` to `true`. Then, write the plugin code and publish your project to a repository. The plugin can be used as described in the previous section. -A plugin can implement `sbt.Plugin`. The contents of a Plugin -singleton, declared like `object MyPlugin extends Plugin`, are +A plugin can implement `sbt.AutoImpot`. The contents of an AutoImport +singleton, declared like `object MyPlugin extends AutoImport`, are wildcard imported in `set`, `eval`, and `.sbt` files. Typically, this is used to provide new keys (SettingKey, TaskKey, or InputKey) or core methods without requiring an import or qualification. -In addition, a `Plugin` can implement `projectSettings`, `buildSettings`, and `globalSettings` as appropriate. -The Plugin's `projectSettings` is automatically appended to each project's settings. +In addition, a plugin can implement the `AutoPlugin` class. This has additoinal features, such as + +* Specifying plugin dependencies. +* Specifying `projectSettings`, `buildSettings`, and `globalSettings` as appropriate. + +The AutoPlugin's `projectSettings` is automatically appended to each project's settings, when its dependencies also exist on that project +The `select` method defines the conditions by which this plugin's settings are automatically imported. The `buildSettings` is appended to each build's settings (that is, `in ThisBuild`). The `globalSettings` is appended once to the global settings (`in Global`). These allow a plugin to automatically provide new functionality or new defaults. One main use of this feature is to globally add commands, such as for IDE plugins. Use `globalSettings` to define the default value of a setting. -These automatic features should be used judiciously because the automatic activation generally reduces control for the build author (the user of the plugin). -Some control is returned to them via `Project.autoSettings`, which changes how automatically added settings are added and in what order. Example Plugin -------------- @@ -258,16 +265,18 @@ An example of a typical plugin: :: import sbt._ - object MyPlugin extends Plugin + object MyPlugin extends AutoPlugin { + // Only enable this plugin for projects which are JvmModules. + def select = sbt.plugins.JvmModule + // configuration points, like the built in `version`, `libraryDependencies`, or `compile` // by implementing Plugin, these are automatically imported in a user's `build.sbt` val newTask = taskKey[Unit]("A new task.") val newSetting = settingKey[String]("A new setting.") - // a group of settings ready to be added to a Project - // to automatically add them, do - val newSettings = Seq( + // a group of settings that are automatically added to projects. + val projectSettings = Seq( newSetting := "test", newTask := println(newSetting.value) ) @@ -289,7 +298,17 @@ A build definition that uses the plugin might look like: newSetting := "example" -Example command plugin + +Root Plugins +------------ + +Some plugins should always be explicitly enabled on projects. Sbt calls these "RootPlugins", i.e. plugins +that are "root" nodes in the plugin depdendency graph. To define a root plugin, just extend the `sbt.RootPlugin` +interface. This interface is exactly like the `AutoPlugin` interface except that a `select` method is not +needed. + + +Example command root plugin ---------------------- A basic plugin that adds commands looks like: @@ -310,9 +329,9 @@ A basic plugin that adds commands looks like: import sbt._ import Keys._ - object MyPlugin extends Plugin + object MyPlugin extends RootPlugin { - override lazy val settings = Seq(commands += myCommand) + override lazy val projectSettings = Seq(commands += myCommand) lazy val myCommand = Command.command("hello") { (state: State) => @@ -327,6 +346,28 @@ included in one plugin (for example, use `commands ++= Seq(a,b)`). See :doc:`Commands` for defining more useful commands, including ones that accept arguments and affect the execution state. +For a user to consume this plugin, it requires an explicit include via the `Project` instance. +Here's what their local sbt will look like. + +`build.sbt` + +:: + + val root = Project("example-plugin-usage", file(".")).setPlugins(MyPlugin) + + +The `setPlugins` method allows projects to explicitly define the `RootPlugin`s they wish to consume. +`AutoPlugin`s are automatically added to the project as appropriate. + +Projects can also exclude any type of plugin using the `disablePlugins` method. For example, if +we wish to remove the JvmModule settings (`compile`,`test`,`run`), we modify our `build.sbt` as +follows: + +:: + + val root = Project("example-plugin-usage", file(".")).setPlugins(MyPlugin).disablePlugins(plugins.JvmModule) + + Global plugins example ---------------------- diff --git a/src/sphinx/Getting-Started/Using-Plugins.rst b/src/sphinx/Getting-Started/Using-Plugins.rst index 5dfc05db9..57dcc0a1d 100644 --- a/src/sphinx/Getting-Started/Using-Plugins.rst +++ b/src/sphinx/Getting-Started/Using-Plugins.rst @@ -34,8 +34,36 @@ Adding settings for a plugin ---------------------------- A plugin can declare that its settings be automatically added, in which case you don't have to do anything to add them. -However, plugins often avoid this because you wouldn't control which projects in a :doc:`multi-project build ` would use the plugin. -The plugin documentation will indicate how to configure it, but typically it involves adding the base settings for the plugin and customizing as necessary. + +As of sbt 0.13.2, there is a new :doc:`auto-plugins <../DetailedTopics/AutoPlugins>` feature that enables plugins +to automatically, and safely, ensure their settings and dependencies are on a project. Most plugins should have +their default settings automatically, however some may require explicit enablement. + +If you're using a plugin that requires explicit enablement, then you you have to add the following to your +`build.sbt` :: + + lazy val util = project.setPlugins(ThePluginIWant) + +Most plugins document whether they need to explicitly enabled. If you're curious which plugins are enabled +for a given project, just run the `plugins` command on the sbt console. + +For example :: + + > plugins + In file:/home/jsuereth/projects/sbt/test-ivy-issues/ + sbt.plugins.IvyModule: enabled in test-ivy-issues + sbt.plugins.JvmModule: enabled in test-ivy-issues + sbt.plugins.GlobalModule: enabled in test-ivy-issues + + +Here, the plugins output is showing that the sbt default plugins are all enabled. Sbt's default settings are provided via three plugins: + +1. GlobalModule: Provides the core parallelism controls for tasks +2. IvyModule: Provides the mechanisms to publish/resolve modules. +3. JvmModule: Provides the mechanisms to compile/test/run/package Java/Scala projects. + + +However, older plugins often required settings to be added explictly, so that :doc:`multi-project build ` could have different types of projects. The plugin documentation will indicate how to configure it, but typically for older plugins this involves adding the base settings for the plugin and customizing as necessary. For example, for the sbt-site plugin, add :: @@ -91,9 +119,10 @@ To create an sbt plugin, 1. Create a new project for the plugin. 2. Set `sbtPlugin := true` for the project in `build.sbt`. This adds a dependency on sbt and will detect and record Plugins that you define. - 3. (optional) Define an `object` that extends `Plugin`. The contents of this object will be automatically imported in `.sbt` files, so ensure it only contains important API definitions and types. + 3. Define an `object` that extends `AutoPlugin` or `RootPlugin`. The contents of this object will be automatically imported in `.sbt` files, so ensure it only contains important API definitions and types. 4. Define any custom tasks or settings (see the next section :doc:`Custom-Settings`). - 5. Collect the default settings to apply to a project in a list for the user to add. Optionally override one or more of Plugin's methods to have settings automatically added to user projects. + 5. Collect the default settings to apply to a project in a list for the user to add. Optionally override one or more of `AutoPlugin`'s methods to have settings automatically added to user projects. + 6. (Optional) For non-root plguins, declare dependencies on other plugins by overriding the `select` method. 6. Publish the project. There is a :doc:`community repository ` available for open source plugins. For more details, including ways of developing plugins, see :doc:`/Extending/Plugins`. From 548b38c7f82232d86928dede7989a96a2197e12f Mon Sep 17 00:00:00 2001 From: Josh Suereth Date: Fri, 7 Mar 2014 08:52:05 -0500 Subject: [PATCH 102/148] Add note about not exposing fine-grained autoplugin inclusion controlls. AddSettings should only expose coarse-grained features of AutoPlugins or else the Logic we use to ensure safe addition completely breaks down. Leaving it in the code as an escape hatch if we get desparate, but we need an alternative for controlling ordering later. --- main/src/main/scala/sbt/AddSettings.scala | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/main/src/main/scala/sbt/AddSettings.scala b/main/src/main/scala/sbt/AddSettings.scala index ceb0bc751..9677af329 100644 --- a/main/src/main/scala/sbt/AddSettings.scala +++ b/main/src/main/scala/sbt/AddSettings.scala @@ -19,7 +19,12 @@ object AddSettings private[sbt] final object ProjectSettings extends AddSettings /** Adds all settings from autoplugins. */ - val autoPlugins: AddSettings = new AutoPlugins(const(true)) + val autoPlugins: AddSettings = new AutoPlugins(const(true)) // Note: We do not expose fine-grained autoplugins because + // it's dangerous to control at that level right now. + // Leaving the hook in place in case we need to expose + // it, but most likely it will remain locked out + // for users with an alternative ordering feature + // in place. /** Settings specified in Build.scala `Project` constructors. */ val projectSettings: AddSettings = ProjectSettings From 893794a8ba988ce263e71b51e5e0de9e99d7a5be Mon Sep 17 00:00:00 2001 From: Josh Suereth Date: Mon, 10 Mar 2014 10:44:38 -0400 Subject: [PATCH 103/148] Fix overwrite default to true in deprecated method calls. We cannot break existing users, but we can deprecate the improper usage. This is part #2 of the workaround for #1156. This ensures that users will stop using the legacy methods after 0.13.2 is out. --- main/src/main/scala/sbt/Defaults.scala | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/main/src/main/scala/sbt/Defaults.scala b/main/src/main/scala/sbt/Defaults.scala index 9657c3644..4f5b8b5c0 100755 --- a/main/src/main/scala/sbt/Defaults.scala +++ b/main/src/main/scala/sbt/Defaults.scala @@ -1208,8 +1208,9 @@ object Classpaths def deliverConfig(outputDirectory: File, status: String = "release", logging: UpdateLogging.Value = UpdateLogging.DownloadOnly) = new DeliverConfiguration(deliverPattern(outputDirectory), status, None, logging) + @deprecated("0.13.2", "Previous semantics allowed overwriting cached files, which was unsafe. Please specify overwrite parameter.") def publishConfig(artifacts: Map[Artifact, File], ivyFile: Option[File], checksums: Seq[String], resolverName: String, logging: UpdateLogging.Value): PublishConfiguration = - publishConfig(artifacts, ivyFile, checksums, resolverName, logging, overwrite = false) + publishConfig(artifacts, ivyFile, checksums, resolverName, logging, overwrite = true) def publishConfig(artifacts: Map[Artifact, File], ivyFile: Option[File], checksums: Seq[String], resolverName: String = "local", logging: UpdateLogging.Value = UpdateLogging.DownloadOnly, overwrite: Boolean = false) = new PublishConfiguration(ivyFile, resolverName, artifacts, checksums, logging, overwrite) From 041bc4bf01d049e30ac249c62f2039b8f224cedc Mon Sep 17 00:00:00 2001 From: Josh Suereth Date: Mon, 10 Mar 2014 12:45:35 -0400 Subject: [PATCH 104/148] Add documentation for AddSettigns and load ordering. * Add Architecture setting. * Cover how settings are loaded and ordered * Show basic controls on setting ordreing. --- .../Architecture/Setting-Initialization.rst | 97 ++++++++++++++++++ src/sphinx/Architecture/index.rst | 13 +++ .../settings-initialization-load-ordering.png | Bin 0 -> 83259 bytes src/sphinx/Detailed-Topics/index.rst | 1 + 4 files changed, 111 insertions(+) create mode 100644 src/sphinx/Architecture/Setting-Initialization.rst create mode 100644 src/sphinx/Architecture/index.rst create mode 100644 src/sphinx/Architecture/settings-initialization-load-ordering.png diff --git a/src/sphinx/Architecture/Setting-Initialization.rst b/src/sphinx/Architecture/Setting-Initialization.rst new file mode 100644 index 000000000..d636e598c --- /dev/null +++ b/src/sphinx/Architecture/Setting-Initialization.rst @@ -0,0 +1,97 @@ +====================== +Setting Initialization +====================== + +This page outlines the mechanisms by which sbt loads settings for a particular build, including the hooks where +users can control the ordering of everything. + +As stated elsewhere, sbt constructs its initialization graph and task graph via ``Setting[_]`` objects. A setting +is something which can take the values stored at other Keys in the build state, and generates a new value for +a particular build key. Sbt converts all registered ``Setting[_]`` objects into a giant linear sequence and +*compiles* them into the a task graph. This task graph is then used to execute your build. + +All of sbt's loading semantics are contained within the `Load.scala <../../sxr/sbt/Load.scala.html>` file. It is approximately the following: + +.. image:: settings-initialization-load-ordering.png + +The blue circles represent actions happening when sbt loads a project. We can see that sbt performs the following actions in load: + +1. Compile the user-level project (``~/.sbt//``) + a. Load any plugins defined by this project (``~/.sbt//plugins/*.sbt`` and ``~/.sbt//plugins/project/*.scala``) + b. Load all settings defined (``~/.sbt//*.sbt`` and ``~/.sbt//plugins/*.scala``) +2. Compile the current project (``/*.sbt``) +4. All local configurations (``build.sbt``) + + + +Controlling Initialization +========================== + +The order which sbt uses to load settings is configurable at a *project* level. This means that we can't control +the order of settings added to Build/Global namespace, but we can control how each project loads, e.g. plugins and ``.sbt`` files. +To do so, use the ``AddSettings`` class :: + + + import sbt._ + import Keys._ + + import AddSettings._ + + object MyOwnOrder extends Build { + // here we load config from a txt file. + lazy val root = project.in(file(".")).autoSettings( autoPlugins, projectSettings, sbtFiles(file("silly.txt")) ) + } + +In the above project, we've modified the order of settings to be: + +1. All AutoPlugin settings. +2. All settings defined in the ``project/Build.scala`` file (shown above). +3. All settings found in the ``silly.txt`` file. + +What we've excluded: + +* All settings from the user directory (``~/.sbt/``) +* All ``*.sbt`` settings. + +The AddSettings object provides the following "groups" of settings you can use for ordering: + +``autoPlugins`` + All the ordered settings of plugins after they've gone through dependency resolution +``projectSettings`` + The full sequence of settings defined directly in ``project/*.scala`` builds. +``sbtFiles(*)`` + Specifies the exact setting DSL files to include (files must use the ``.sbt`` file format) +``userSettings`` + All the settings defined in the user directory ``~/.sbt//``. +``defaultSbtFiles`` + Include all local ``*.sbt`` file settings. + + +*Note: Be very careful when reordering settings. It's easy to accidentally remove core functionality.* \ No newline at end of file diff --git a/src/sphinx/Architecture/index.rst b/src/sphinx/Architecture/index.rst new file mode 100644 index 000000000..d20bce232 --- /dev/null +++ b/src/sphinx/Architecture/index.rst @@ -0,0 +1,13 @@ +============== + Architecture +============== + +This is the fledgeling set of documentation about the Architecture of sbt. This will cover all the core components of +sbt as well as the general notion of how they all work together. This documentation is suitable for those who wish to +have a deeper understanding of sbt's core, but already understand the fundamentals of ``Setting[_]``, ``Task[_]`` and +constructing builds. + +.. toctree:: + :maxdepth: 2 + + Setting-Initialization \ No newline at end of file diff --git a/src/sphinx/Architecture/settings-initialization-load-ordering.png b/src/sphinx/Architecture/settings-initialization-load-ordering.png new file mode 100644 index 0000000000000000000000000000000000000000..82055d7d7221bf9577ed747456c8ea1c75de33ac GIT binary patch literal 83259 zcmZ^~c{r4B*gpP@vG0|A8)c2`d)6otC1uT?Ez8(rCWaAGwh?9DN{D1%GZ(%{xS~QewlmGzGXy3m1005vk03aG5 zCk6jv<;Pk90BAt_<_!bH_*y+plHnq9ukOWI54E@;jo`;8d3Ro4{P0Bl`K{v8Bpiii zl=Xo1bA@y zsv<{3Ti4w2Y&dXzE;+n%Z|z`I%75eFta!D-VRzi+^Rg|zc;<_0hF~18_W$qC2d@!q zp-6n^A#UjxUf{paJ#cS!zZSpqj^a7re}DPSI2RU~+JB@LFcF;@-}$T;(W?Mh=2pMs z)qZd`^s!JzAvLaYi6J$)g$?Z7##KF5*iY>KV$X!?=uu=@C$|5&GYQx zU!Z-r)}biF;bBL(+3=%ueCN1|V|ex!jrR40ok7=dxfmlk(Y9(O7oDlmqCn3OKzqMD0T^8VLvc@^b^OZ^c7ddkJUeaXHz z0toioituH;w8s6DubmHRk9PCsr_VNcVqi~M6n#fwCspA$4qFT1p17U>(N{5u zyj~s=a6AgMJ&`r|JD)79oyi@4TsXBBj4vw0Z(N(@^j<5O!6?mr@atFdcV>^Z4E)i& z|66Q|VW#A`VSm@X=vCv+<~(7%J;SMdBj7BYFeujetw)=2@?}T4f zbcm-!!>U`IFzd#ouvr`dy6l*ya8TWjx&F&9S_5-nA+Y?0oytEuEgV^1r_w#Sld|M? z?>4xxMApfG3HZCKK*HrxnXFZNpC^LpcSEL1xP_wcY(n2Fb;dTsp& z#U?AZs^QdVjL1cSZMzjK5!A!8-HDZK?AabAI3D}%znqT@voGR#w=gleWAVF4E@Q#w z85KCxf4lI(`BIif>BlfrvtQOL_>bfmJgeiqK=-y*@A-bw29*Y6E0oanH1(%e#=htA zTAvlUQ8%uJb_VtPfcQ=&EmeOrfmu2op2RjdDjuTOjHxPp5UESTIhXw%v=vhUR7+g*Z+Bo%lmM@rl{z& zo)(v6z7YQ2+y_S;TncW^>`w^dEW`09N;OFG2kqJ6V)m|T$wuu$?Stn-6aGQ>w`2>x zB~{;DZM>k0U8>rz%nZ;2u4%^&XRU&S4CAIN>wDdYMRJ;mWI5@2yh5YZW+#NoU&cUE*>Ts}F3nj$AR zWiP8KWc!9hST%WO2J>RUtMe~X1lqQggTD1_$Mts$$WD*A0OoijN}y`h%OmKM*WdMA zLg=v{IlgjCTzoG0tipQ(0!c2s7R52I$YqkKCXw+|skW8qv349WF5|QR=J2Sh6*+Dp zg~%s-I&kh|z)*$xP<^rcEBBh+f(0dnNau|}0@>=%cDc!^s{V2_Bfj==TYoMwX@#Op zQxEOOe|nV$9e~-8JWd=xMmxo$mMtZf-JX2ZQ7;pj z`9b$KS8b%G^lqoh!#)+BVD;1X4F9d->phBGGF}KTr68y7t?bT`oxS>i_3(JSQokoh zi|rxt>_nietyP=Lj4)z|LSZ>*{2it>RQAZ*VVUH#HdsxyFgm6v?>vb7dh)tm=Nq>d zVuoq6>yU^3Veoan#H7lIYOPw1J${tI?KlHG;OfzO;CuF`)D0p&t*^#lBCU57aGV~9 z0g~F)K6C1X{Z3}iNOhH~kumvGUuDEmz%x)GJJydK3uIsFY=idNVjG<@yn^abL9_Mt zf!7vlN9ms^)vsw?mR!B$?J&Q9tv5~61@GVknOFDw6AIx^RktQoD)i)kz2OZq#tE-t zH2bcrDItF&J880jmshtmeZESgLcy7J0@O)F=!Uy0-T^;9;f&=5^ zudbW5n@iEJmw)l?5uUR$^BJa?Sq15o@WeA-5Ut-Wb^NI(Vtx^GI88M?Ad@^pT~WazHk5j)+s?Wz>gu zi2MxU6g#0nd_F2(=BkS@k62!!$34ok*>4YW9Fhx<-hIby60+}6+tkQ(h5n)m`P{KNOhqF!g1sV9~c^ zNuIYzyvom)x1%+OPv=}LkgGo7Ry{enH1FbG%M<9kEJiFUX-c8gGBaHx<8ZYkt3dh! zbLef%H?4SH?H}a0x{RO?Q-Ln)jrp9w#Ju>26#GbECR%i&Usx z8zZvyGdYa_?{(@${-G~t8WQ0!MP}2afuvy!F=fBM4b!P5-nU6OmDtR#i8nM|rvR$1 zm>4)$p;j(&n?{kzdv!Cg8hm$DYUFU~W}kaaa_7Vw@^Ov1HlnwcNZfm5OlUym$suFj zzx79Z!Q{!-!hHvwZ0A#rm9pp^lM6BUPUb+Shj;6Xil+A{FQqpfHnXAvwBro&0qSex z*K2ShE{8->RR)+DI`tQDPBWT7avieUx_&BdJt2k=)VT(s`C8r2O8 z{(`(i%R-rPqe)6kG0vF(lP-P3NvexFX7f-m@jDp34P*eUX$|>;3ydN8j60DZ~`SAfD{v)NE&)E1J#phqIl<>-!5&2w93D`-R zk~iu6k+`aDPx7HRqt{E^SuKVC0o!fQagN1dKyD(w5~c4}+ag`5X-Trk;37%_JiJ`{ z>&>M|%k`Dhx&%t{zk%`ADpymnF27E>yYg`jjCSfhMo0VloopyeI=<E&J)zhnkk(O4~b<6yJ_<(u7Ts(-<4AcbAiwvPV@%ytMKe@k@bDznF z9UGPv_JU|$yC{z`sq&O1?7Bg`;Ms5dnd=*kP^V;{0I{Y z^7E0RUOzH~ciI(BGL?3)Do&}sdK{=b7|im|?j^rkgg%^qa2NVJEc2|OySmD>$)TM~=;dyvgJt40phFK)79<_d0m zdN3W5*^nc%dBw)vFuTTc{8Z*!lqnFf!D*#i)L)SwANMqH%;+P$uX^b;HE!GHIA{gW z95cT0DtfOq^K7zx;;?%8ZTp0B3C5l!7%4d40`r+FHKMhG9L3ET#(kX%i_pE&R)u?VBSuTJWI-LWPn9!aU z-i}B>xfk6yic~cjapS-d9;R;S9YU?4`)abbg!=K%dO?a~iJlbZe-lD9+ddAL9kN`) z20;u~CC0^C*m7;WL_ZL%3UQ0qfj zRjODD`k*YZlx{v()d@ub89k)8)aYy3^%xVanviL|)OVmj=DpIWa@>BpEju#mm-+1U zW2uKq5wo|`w&|~p?8i}9uRRUhps=7_8Agf^kwh#gL7_BTq#lF?PsN@75V$S(jQdN~ zZMw}G1{TiT;)nTCfa~RM;6o#);7-#JAa71d0oWgjj8ywjq9ONpj}AYNaO`lr`w?mi z{eqJzVPA*NdL2*d(8WLfhiz7? z)pWS1<#Y5mK}N?KSK*G5ww4jx=~XJrv+xMk3 z+rKO=KFkJ#*U*AGi3lxkLirL8%4$Yf!e>YS7v||5-OYxmq20C~WRJX%pT%kv5 zk`<8M3|#MQ?5e{wJ#Fxu&1s8VgO}iSn+ir z!{1)VOt1vG2v~)PLPeB9*7nY6k60(mJBp_-7WXQVNB7*W%N)#M{6@JuvMPiD*Mm2A z^P=G){8*QYOV2+x!7?8!(Es|Rsu`jbG*bk4II3sCRoAvYF;(rb z(@A=J_Ek`Qp~0D(6-jJA$uPstEF8WV5u!nlg}Xd%_kypbj6VJOf(Qc^4yD)x77ZC} z=5@)`?vE()Pq5(#M^8-%H@BkTavrd?UV~}LyRtY?G5C?7gT4D$UyNKd zS`880Y@4T2_3G4L<_=9-E*v#vd->Bo_nFu-#Ss^gyE{Z2&H6$K`c=tM$MY#8nu2cG z)8usuAvLj)2dGNraF`^wQOsK3V18jSNCu1e^aEk#17weWzXI96Kco^aGjqMg`+>EO zjSsK8tX0RlI9WC;YC-400Z?K^Pqo6E#tOt>IVJ+Qtd&~V{6PBPSr1f~Rub8^1i42Q z--aF^fN~Itzx_PMWfYxoP9OgE7x{C!_z^IQ{jyX~vn>l5uIOq++@cUzLWD?!+z>w2 z=zD_T$oNqNY_dA0rz1SJ!;&6NBv?hZGmUQdHe--QEcBI1CT&k_e^UYXgumO5(0tsW z1-vyvO11*LEvbRjyh$>%Mw&W;+D*teqCd(?xhXVyD4!4X2~AHT$+EXOe3qaOwBrsX zfJ>6Pf*veeuB{)b1tHd3o}Jx0dr)iC?VJKH34nuZc_NHCK%AfnZBFln&_a?3rBlT$ zcxIr^Yg+3K^x8QD!bmH4)h7NR#$7H9CEeT~fC|pPj#4Uqqb$qPLuM5?{lxKzW0xnO zZ#?&<@lbGe?`|*IdmweOFlB7M0hp~gp|-=<77>XTF9&S15xJ`Xv=|-#yBA{I4q9J# z%d`(>7~L!!@7emqWBBteZ$1^^nlVa$vokrGh^_>Z*Cbie{Xhd((3p0A!Rxuk$3A0{ zakBeD@>=O1iQt!q4uB*S!NE3AUu?Vr@3cz$FJDf)5>G z+~01oCy+y58JzM-kE-=r4{X^FQgVQYrE`iT>pGEjh}fZ4ow)bFL%>y&Tv6N=S!zD$Tto{01u<33 z!ra%6`$B{}y^gEu6AeO<%K*8znQR6^xh9@rX=t8mEe@1nred!dSW}Ykj4W3^MF%@? zOEW(r+Xq1+S7qA6NB(NqFrPtExTn&x1TOLKfr9?{@#DqNS0iJNwPWFD8wIG)8IsqM_p~I0MGx`njdvo61FUBU`e*F8I(T;G=n-XI&4F_~-TinY9q_j0RgNa+3% zNx6L6p;I^QBGfu&C(&nX`8nUKW8Y(;u~U{vh_zQ9_)moX!3b&WvL-Qp`4bU5!&S99 zHI;;^$ju`nJkCY0p%~d#o7>}UV8^%;1B`A(wRkwX@L==xMcP)I&4`@hR8+u=Ed+G` zEoTmOp=87LT(x|@1te(5C9_Y=f)H2NtL#C_#%dB?E?*z8ru@_eE^HE$6J4K^luJw8 zhQzATFXMk-#~D$iU=8~yYP)1cPOfk?jHrapsz-UY&(kQjlTNSgp`O)~kO8}0+lHhO zkWM(%wvrb+nU!CX2tXO8B36HYLREtdu%KTXGF$M29y^Ksec18+@~mc}d(GjuvL8v-w(e z{94sv>B^*Ax3Xy?;k~42GlC$0G<{a08hXcD1=jJ)%;Z{RtSvgh7rbFHIh?yB_X!-~ z?|OTJMoEtbtPa~s%@x_bS$v}el1iL+9kGamU<cu1-GJ2$xl-|K3`a6ei7Fx1g0j7ZYuue5ic}P_1A|vVh2CMH|z9a zumA6fyV=ezWb4u9-+0X|wfI*l=5r~zMyE~qU*3P>l^YC?cplb$rIV5!IpaIe?UGua z@U1=j{1urhuyz}k8oWJUo12EmT{s@IFa9nbo^BXVX|#d z+aziFltp2TM^C;J+tAp5YQcDQ!Sq}wE7Wxb3yw~|p&cu8`m_CC?(O|7xrdIY&_ampa> zYa9>4RIlxWX+$s7@rmi(yS?sLjCmt>j7XX#P3>72Yx*-)%4q`1Hx}i}gB(}0@BW>| zEiRoj9FTizKy4F;7M}faa1K{!~Ddftf88$;Vk8(PWt!`AN@>|0E zDsIo1HX!`hJV@h#%fGl4wECNrGLTPONReRUgE5S1>kz}*9H2Z`SKTQz?aC}^y&XiX ziVMmOeKTO4^TL0Zd*h%{|0pMhxDl+((l>=fJS}qX(IH?EXxC->xj9 zTEJ@GGyr6IBJ@C1dE8VY<1)DO4O9i(0gr7Y>^8Rv|0-C8pJhL{0@cnth-AUS@>WU* z8x4$tEJ1hf-NlLyyD6#n!Y|9@=L^dxZn*C%?-}iGY~6M_08@bfBbLbkP-5*mpN3(Y zrX5^=*mocH1W~Y5eJ!G~-qYp52Q4+8_@}OVjmMfq^~<_-+^>o@v-VWWf<_$RDd$x0 zZw+!w{`j3NRO?C18<6=miS8bcD8%pCu=H<&kQuXqu(o(mqm{)IU)lf5zP3~%4@0*R z(a652p0<3emapr71Y@CxcVBtgq|}|jb`X{Pddgz?mUTHhhLG4oD<>mxrCBct8#8Gdmb)8`6nt!I3h z?rX-9ai+xxR4iLofaTwrf|I>Y`q*`WLV(YOE?!IbtrO#L+|G;Az8$b~R0+0-_>#l$ zxr+=-=>n2Me@Dz0yl~~ZK-c2!IE?^7n(Lcbi~ZL+7xf7BE?m*)WYJ=xorP@khH=pZ zgIEJ$_iMS%dQsyw{h@{j+eL++yB_Hb^HU)HS6SD-`_jwM%h&z~L{%16*QQ>hBB}dt|9Icl zv0aC@y*5U;9Dgmph?&hRu9da)-TRH$c_Bi<{cz6M+EhhTUm8eUZFzdQ_H#9U)wj*m z5mwpe#x;>x`B`7|YhW;RO0)jye;cG-z%R}p^~`8E;iiHZjJ`s#E8doaglplFR_4V_ zaxLzUf9mB#F^Vs5iM^ii)Bfrl$N6~;&yF14I=Z6`(+26 z0pemkboUhz9kDf|wNHI3l|pC6rXt8Q&tqm=Vaph?+SvMUQ2w|SO-?hyF*^W+WmR=FoFHws$9j2Qlj9xVKE z!`Y=oxq`0=*|UUz-z6NrkG1{ZCFK`Z^guQx?}q))^XCca!)ZQJf3AZ(S=UHKRuA^? zj)T}ae|3|++pa7^@&?ElgTsMj^zYyP*#}sSP8nlHY~)Is-Aa$O?! z=Og@o{^R|R*h&o&08yTMJ1&B3_FU_CLmf`!?~dy=FgJn%`vgAdJbz7|EB=z)vy(Y) zEo)#Q&?a_jZBYl0O2zlgq+1=SAFm}CYV~SdUE3_Sy4Th#lh2DTUu|{C7H;1_p>!ns zd-l$0u%)1O%Y*l@n= zJaOtnwI&@7dJM#A88I=(N?CykvcQQu@Q+QGt+oz0z0x>kKt&3dgQsEVK#oi zdN$lU_Q*-0=G4|dqcM~QS_CMRTwIFy*^Ba-+9I|a3vaA=ozN2LH=%?f%||p3E&=_# z_=CAw*+8X)(0hZ^KSaDQZj@%~MQJH)4ck`{O5x{Z?8Nt_?_BYzUA1ssC(Fm%8Isk1 z!fPEHTD&EV>PXWtCAO)nkDJ^6+70B3U?&}9I3!^E}dN#bqNtxtnnD7dQSLnb8mf2-wSGzu9WZ;Ue_0%ZGFLJ4^}L>}+3 zhFN3%i@a%wp^iCE{)egPPLMNf2dh{DsPyCeSeF2^R5i->dkRNUi18uq&a)(#5NZ*N z;XNX{jiWvqw{3JsdoLilAX2OBxUV&lSEp~fl|+YE;W;qwHFYBWBZoM1z|2)@(>7=# zK{io|)PbULiLFEoIp&&|dHS>rX;;|8cr+%fC;%Ve?=RJRlt~pm(iq5cHdsuL#y@RM zs-^w&s@+^J#Hf46UeVi5pVWp{ynS$eL8`sFcO>bdo|~d5_Lq9*D5lgDmQ2?8aok() z)KD@{mhKPcWDWb@Vd=UX$nvMr0iKk5GwvA^nsA#a|FrfC6sJatTZIZwKY)G#9R#*0 zy&QNRCeW(+SRebvbq|x%h`X2M?@E$PT|w|A%5&nnsdkNn&V%fuCl|Enu}0Q5MEr&E z#9p(cqnahlmsO3mQ{{eM^m=A|?VLpa!Ey`xy+*2fBkGGMge_I?S!js`v(k@-_QZx; zeG_Ja#MeQ#jx4Ei6^9gYoETp`{H_Ws`;2iVdx1WW}HhV@wW-9_rx#?#m0LlAX5OUo14YGL_ z^`JrZfL|D-3{Npz)rKcuKmtB=;KeeKgzPI&@nc34EMLG!qbbZ(D9CB*Cs{5}oGP$- zFEGk%Pl#4Ty}m`na`Ui_W1rjEa^Itm#_gopHXSl+(XaY_-JG;=_qs$4-CjA7BQ^tR zF}m7WjKepI{*~!L==zB~B=@3Ee6D)j7c!h0)9f7kyLSuyhtvHaRfJ4iYd_e6*lGdh zQHf*fkr4In5I!HdKdw-X3bQ9}XccUNS{+h>CS|jO&2><0=GyXzbz>di`mX69LIx6` zpFw1S7(Ma!ipRA*_e)ZdY^yLIuXy~#=APf93)kGZ1~lcxSQPqIHhoFa8e#$ak1AX; zhl`Z8RupY#gPOBiN@oeo=Sn+hwHkEx8HkAPeB>R`+cEx}c1_CEU&8(<>dkgEVchki zv<~RuinL~Y9`pFGUtvz64vI4p?A&teMcafZDTM{Yzn#8Qm3_NhN>jg;zrA2ndGBsM{8TSm<+5$2gbNJPhvyuwDVNPKa zYYP8G^CuNhEwtB{klwI$ouiDx&#-4b68+NDY!$Ynu=TqdUE*ANc zb=B!0CE0M^a+xDuD`kTgu@y;H+O)Z{fK+ikl^TgFzh5px@IDLok>)GvAEuK zu1A>ym!OkDTJ@*%J#g)s68=T-E!w|kIZ%b2^EH1%MMmmsYzLpm9Uf-d&F#XdW{V4t3;ON2a~B0*>THz6y`PBSutqbyHD$`OEWpR)N{+ z^%cwM`;|dT2E)ULhi34TsS12f6=AY&>6=~Jxu(Us*Ir~YKswq(wcVoh)Yolwm3>ow zqs71Psn>|hySl*n$(=VJ%AnR}ddxMIKQ$QQ%HC_Jbj&J8nY&#Bv|saX*X$4j50knI z(v3oTCBj`v-5*k;G#ZUAg>N!loLebq*3V!u9@+0L4e;%oj7XXdpXh=u3EH&C`CV}f`$|wzS z9c_=sRV7?n2s=Zl?@?IE+RlRiNwCe~lOL78GN*|Dx&gHx960Ne*CgvMxjRcGL-jo~ zx@5EJt=n$F#i?ddt8?uWvy~Uol^oGX+tVbiCGWARPw@x#8OiQYhawG$5f{+48C3M? zQ;-ZDx6Xr1hgTRwoZO^pe{PU=;8q=uG%YXlpUPpJ{6I~G(m&CNbO2~>OG0qSf!P!rK(*mi?Bb4T*nbePH z5b)&Oj!(zE5++*3kf2|eV=85WTNYoscJi1#=xU^mYVEII*9NNKRfSC6ZukcQ-Bx5t zXP>#5=dD!_T}E#H15hXdrJ#kxK2NI{b~2EfDK9*!;;G@o?xi^~^ICR+8l_CbeDKp( z%-%Yyp9oZ{XK%SHqoP)up3WV5q}N!Q%N4;!N_lP<<>w!um}1UEkIo#(+86lsV!TM& zK_94PwvafW{}C`Cf8}(v&GIt)?kt(f6^1dg8$qZ?+_B5S4}ftp!M??68~EYs{$Unl zRg))L90j2~ac*35$+CEIZw(Q(xl+N0>XBE)k2a`0Ik;7|DZ7P@jw~Hyo@K!BS+m5% zw>$kPNUoE%&fz=^@oXq*t_r@^&3%9pm2RjYcyVx996FB(bI74={Kub?TLq~w3_6~r zEU~*Zj9lS@yD-}pIw6}}vpIL?D!UH~RsZE}p?XPLpUw+>Rz5CFUS}%5xp~LbU-x<+ zrEi>x|21R1HGE=jT;xvPfq0$W+*%))eo`mm^69<#Tv6w1^#T$c${N8nnD9-4&WcUG zD?&;s(J8FsnwTscZf=T9`kPsQ(@7S)MTW##$jHbvzS=@~rQTyt4ebd$OmVrifBA!v zO{e6H@49mAg3wIu;oO2lM;NS@R|Ry0Q8DeZiCg2?#~Rtos(4)n%Vc%k??ODTMibo$ zP5!yVyTI6=zL=lO7$3*z9-1nJLBuDUQTG=uvbx0lOOZpVM7{*<=iYG_Q-3*{+vHD4 z%mqtsmgazl?6z~mE1)+l!c&pLVMqD)f(XE}OqCD5U!+u5n7&xF*V}@{t4zRNl0JRs zmL3W^eN1m+x=gcCyU26YCW{>YYeb1NxO1UPiO1B~trFg5Ux2Hz$owb>eyr50 z|7>4cpPJ4dZaJ*E%A*Eci|_oq0}q}?E{2mkO=ncLT;`4~?|vY+U|U8VGkeFVQ%B+R zbP3<-d6oa%1?d&BceAz`K1trSGJoU$9=DH#?F>#~j_WT2^eWduD>l|^ia$)UyFkU~ z@U=i-+CqM&D#}3#tZ+40_=%o#Jf_5UDuRkv=U;-KH(&kclkI>#+Xf4h?Q_*zJte&&qSRQrmeF=y;@z2m+T55$AUdp08_bh)9VsyzWSs0iSFwKk6YSOeB$Y2y$gTHV3q^Xy$x+9DM1W$@xJ>+<1oy+HuyS&0 zwbSMCXsO1$-%V8bHKV5dpbS)ofi8E}5zfPxAvL4?4%?_+@ z4CvnlbVsFZo2e$#w@XB&az|eMy)p%~(Vo-ORrHPtSFd1^+q>z{8Iuc{DV?UkytEH` z0a~9v-PyQ>|D-+7=r+)9umKStSODFwTPzzhCw?N)j+O|w0I;fxa<6URQvv<(?*n`C z7Bf{q3w-ur%cDw!@ZS~>P=)<@?(Z2&n*{OZMXM>vJh` z5e9Rzk*tu)mm`xw54|k=&K6rUb=A>>>XFD8l)UQpg(90C7Kx=_pv>>QTHH%^a&TV7 zx;BCy<%b$kPVKwz(gVEX95|#v*Rxv_%LwO&n5A`7{=bE~$~eL!H}E+TB9>QgywV+( z)BNFcMf9HhF9ujBodf*Vu;wq@Pk#W8C13*MY3=-V)24HVVqQOF;0&P_*r5ShTT%4nxai3kl z)|o?6{@k*U*h-$C7~eYD+<#FQFALGY{W!!01{QtpWnAX){@vMW8#9*{6> zwSDviFTPQsk;y`5D@?piI-)$cL)N9BH+~oR*QHTFtv~cz=IDm>lg-&Zld=`iro<1t z?(7o0nPUI-yi0|JtA6?x{B}Jx4)=jB_}s!?#(8^{0wCSmycS-Yx3+z5l)?n-pkq&H z>rd1b#bhQnSdS*tm@uc?3kb;$>@AfWU zMIeeqsL*tvMf=!`6kXak&7bV1aoTd-J3}aerTD@MlhAX<7Z)E=N3=JPUB_Mpu({FS z_U_G(iAWm{_x+V+QT)vE_}ozUP@TL=ngy?iP1t(G2u%bJGm)G3O97?c$ZMDWmN|=8 z0~yNORxUT(-sG}4$w~bQ+I@JEVmpOKZyRS>8~N}`r*z@V|GYLH1ES_ot(Sf+vB#G5PC?nV^_CO}(A*L(%#D8i1%c(wqn0`UxH1clzg zZo-slU1Gy5oaxQKD7nS78qW37JY$1L@7Sb-JKnJL_4fhsCndq+B0PQ5J~1YfZEy4-cX|0L5Dw_VkZD!d=yjwBX(En8oIsWH zh!b9%ZjxHiAbD&nr`La)ah@&5;?locB}8{&3)_snAtjpeiO3kVsFs3m(zc*O$4k`D zwf#-9FkiO-YcC55v#Q7U5YmuaRi=uFfM5|ysRNwDkMz!P8J3(9#-lPO4->*@ag@0b zkb734cR^THD|yrT@b@yO9~s&yTvl9_%nu|4c<~<=>5nuK(#tw#A+;26 z4gd1-j_9#l%w(Bn_qwHsU~K2%+;{EFM>SvjH_YE*+HTITZI_AxESr^Smbh=jYclTn zR%(+NXi zz4VpRbWuuau1E1lRvnQI!wHn;lMv>P`Rxv21b9?F$j{PgGE0@xn`(wg&SfZ%2j5x@XI^~iLsXBc_kOQ ztQ6f5_tXPFk4wDkb0SIFjPWXLv`SiyAd`kuyi@1W>tkhl{&>=9{(}|5*6D1u+A{vp z)#Tk438vrdN00V>9x1YLO+Ai$@S2QT&5xw{iImlAk*d6CoquQZnU*k5HTLzaF@L*TNT?3TcGP`%82)@be%9g>y_q%I^hKt6m8baAF z%La7>>K3hK_Y2|tBcR{>%c^8t{L{4LU@RMcS>5Apk?$wB+_JBaiO#35oiYP^w%Lz; zAH|gYJ5h!)DFoGBvWj?7DZ`U;ezHT`-x{9<&)0v#yP_W!+%cg`h~Gk3@$)B$Sgx!Pw= z{fZ?0-$3L>Si&pDpHTIZWq9pMVUX@bo1g{sb;VsPD!kT9aohDLs%-}gRI0*+NBagLyFMfkOmIl8We z07bOpUoJ(;lzs9U+oK1fjm|a!OJfh~h;isE%a@B~;7S$tn5*}Oea6AoD-O`*xL>EI zJhLvp<=!v9y5@V7E`@(i?A79Xa&Vbg1tu+7JnG17mDv-So)~Z+W}pGA_v~99EH>FS zoNNznw8~9Si!LZo2QN)3EwM?$PIqjr8hb%EI-+hwV|=nBLaLxuODmic8r!Qc+Mkd% z7NA27-zjRc+)E0W4xVjuN20tZ zdS8H)?!lbNQn<=^FA3h4fJsA|%CWdWyHk*KzkR z_u2Vve}1Q{LdJ}oUcui&r1&$SVV2%zl2&Ld2{fYfR&Rd)d0XHIbwb@)S@}= z|Ed||H^;G5%Jy$*aX1k*AFsiDEu>6uHaYGXW(9awnQ6TspQP<4VHE4i`- zR+1lRNDnd_1_Xjst;Mzq0wxYt%9~uY6#ACzcdD(xUQfK+MIX@ZsGU1AX-(ep8M5~n zNGd&QCpvQ_E7Z{QlqVHJ_%U#axwymX9^YuN4@1vYnwqgKdU@e6_hiQYH+}7!9X4CGRVlzIPMBs5!kMvcBOIy* zu!)t>Ig$E1t@0*bCuZv}f(XxM6cb5uJ!;Mz%$PT!dRpxpX(1bdY&A zh8dBAzsi3M$YT|7GJht-xtDUukDOU^)>+@puWKzr^+nf^H^Wf7(I7!l%>oUpT=Ls( zvr#y2x4|}o5%W5k%cvzN;CxM;gayA;!0yF}3H3bsTVCxvybzHTnqqVbU}wk)PHf`l zw!=VlvDph1Wm9|3tZK>a!Za#lc|!^~ba5t3=cU-?sf5M|W*H&uOVuiP~@aWr@eO{QKvsN3@P`t2b2b>ply* zr9hPN_Es${=Da*1%GjH!lveq(OQi)7cj+#wJ5yqJ zfz5x}d`{PP-YVGF6bSZsWpE^JAY$hC>HB^~R(5l!AU1XgU|U4mxahR)c!jL&=QGXU zcSB#i_+r;M61b%t{Qe)eq2(+sF2v7J^$1{b&qZoT+?X$>?MOCXnHpU81(^ZOBMop)p@|E8w2pe++`^SMP5GqrMWfr7Ox#% zEiCA?_vM!=UypAMp zVb!{CZ@@kXG<>$x^SJ*2KBKOHJGbS)@s^`6W4 zKFQk$!0X8oLBZ!Fu}3ub05ibCD4P$8OC zdHI+rCR&cU-$$M)DzFp5xJ%+( zGYFul8{M4!mlb;O*2Qwci$@qKVhuwFYkUge5JleXa9Ln%D!bD)cR(FQomCj&Ve&Cw z@m5Jsw`*(uv4>+L+<(pZ!#&}lTbDmdvs7F$2}SsU8!?+1Se{japu?RRwZe_$%c zT17=P+cE79YR!{oO-CJx+)S|@Ai*~qWir`KP~am?m^?SmBLyATA;36HiqxcJKex;$)d~A z8fRKL{!Q<#S$VgG;)JPqA?{T}v^LF5Tkrw$O(Oj@tr{uStzfa{XpLD=I?JQTAt@WH z76y7~$jme;#Pc%$Twqy9YakmPpJV%`w`=bj_`^~CA>Kso^$Gf}B9rwxXyK++yD(c> z`{$Eb`1!WD_BzyEXsIkYiTF%3bpbfixhWyArYEm35{>VIiuU#JWLDBJZV_aayTY9N zf5Cy>V8;7Ayv!Sqlf0b)T`qSmj}%fILW3ZEWYd927BOdg*~ZSk)RTe-8}{u~50{|Q zmiA{7b6=wrq`P&h%x$(A!to|1(kK$wU)e!C+%)LFZ9=E&*0$SBBy&Tp27PUtDwiJJ zZLGQXXR6=-ubDmW!*2uch2aO4;$wHD)Y`%gsgwA=j-d|t=An;KVmz}NDjylOh@K&- ztqF;-k5yIKfXZXD-NgWw`%`ZN=hG`?!f8ec54==nyd&h^bkzl35Qw2_q8n%#T-|7| zebT%CWwlA{*3xXI1xyi$h4lIWsw}(T9|D=0+{6EJ2GAye0d*jCU&pVXfkr3rpZUCgL#;8n9=Co7rdg3z@=h8vqQ5g@V^Tu#^@lw=vN=gdS z$Y=6bk6V4gxwVIox5%tT!+mXD`agFyj5~5h+(Yg2C4eLajH%h~Xm*-@=H_GwiRk{_gzVY;q z#q5Xfw*$VQ(!Tc4?SwXr6M04{Hn(2gTKV&CS$}$(b!w*~$@#AB(yBkGR&{RhOA{e= zE$}^av!r?~0Y@R=LAR4fHYwLv zDF3DEy+!_h+%}^fPNtrOJRQ48kBv|Img7GL*lilJVt zi2ReO1ZGqHD5)vN{Kv50s9)q84~G+J4P^%RMRNm%`aKzZ!S+ab!GIJz*JS(qWknOK^^)S) z>k+x~J${UG$+m^_WoW{}J|qc48OK+eE`5>>Pfjq&SjsS=L%Ykeb@d`pPC`{S`O<}aMFYx--#?FjR(8@79GX@+F?uk{J}kWC73qb(~j>8k_ElLL^f(+{ky8o z#`+W~ax$3#+F|c?efgiH*zP4+gxz<6@3kk0DL8Tb!PH+uTNid*%1Bmy-J%pCi;MXn zdGc~osSc<8pj?emK}?d~bL>*W?FsEH)uXI87Ufb8rb#5aaMsU<{8=fZ!heeKJNZV^EQd9ani_bhZgeq-ozZ7}8msZe2X!e0RxTw+F_9O3 zziz(it!9WlwXE(rPR@xiJF3A2^Vit;jJFoypDfiiq)sNgYlj<(2PcMw&;!;)t!E&$ zc4;g1qlz(1UMq9tnfCb<2V9&I-E}hYvG*DtH+vRb>6iF&1>QVTTukS5?aOJc;xoel z4#|6#?zHfEW9nOu$@H7I{p*f38S`^~e~v`jLcIM^`A5Ta>6`EU;` z4Nd0H_-=E$SFfCXJU%9dyUmLgV@Mk0dPsc~itCNk0)KYACv)&=mg|m)AHBJiHKKjz zcX#_fe%8w~g#P_2H^mZd9qpgKG~YC4?B5G@W49;4{{%X}KTb69(wbuW0~IZY0`q7-BzeFGbfMmsbJHIX zqL*~&PRyq}$;FACY1;i0g`#46*9>pDhp{hrcVKgP1~H0k{H_hdNmBZ|`*<^K3c4x3r3g7{ZU7JykZxBHAIAZp^bg z+i||u)-puO*&$_jT!(JPehJgMa-)1IF|GV`XPzR-(?(YvpW~PP#n@#T_Y{))))3vs zYv1Jj>RoRajp^6JXWIFu-tA$bG0bsVc5*OKNZ%}R-Z8vb1b0#M23LUF;t#=I$Z(@r z7pP-rDT31Y{Mqj{M}#X$(MjEIO+zORY3?>{o^(lG*_F1X`ov?kWhu>HZ*c%PX7Vok z;u82U9Pg@-BroxwWjRM))tJshe3>*pT4V#N!sf(wUQg_KIM@6|>_FRH3ui0T%-qjg z$`re*9KDo==I*`0|6o$NLp4JFLOTsa9&LZG%QPZ$mq?UfX-Oyp4d7+9N8z9$g+nx zD44o2U%Y$$b3+Y~od0fC5RjC!v;Tbe@+;h{`I<#|Z*~P330~Q@LujUa$sTUtmb+mx6+x${L%=H5x#o6xLNh=LsgZyn5PoE*P zB3ce9Sf0592%*-I7`Txt?Y?yF$>c>JQWkS&H@hb(0RvuSfTjMCtR12`-{;0F7PGnJ z32JI9okkB3O?|ielmXZ!oVIf?o}m-><(%YRs*R_H*~;M?+@qZt{*5r$AlzToj=Oe5 zegIueUbiYB{ULfkEELr(R0frnep;mHwewf#-h9hdVW7hC(Y#VP!O-oR?<;=!ziN}F zzAlfktTfd{ur>af*U*Yw6-m-OUx$z4E$0EH1V=Drj&$OfJ1`T45g|F2q$#xRWs`-d z`yD=uj}6o({ZOTY?19h(t*sa~zYnh~02}(>{PsJVGtPXCKwItzNo8F<4PJ#iamgUF z9bzkP35B(^#sx^l{plvzt$8NV$52v{PXR#TUe_fKuZ0!ZIrekE2^qG*PpPr(A53+eKymQ2liI5JaeEuA=3|UYELdO6=^X1&52Wq8>gY4?U*h^=-b$Uy9 z&}Qx|REn%#dlj)c-9~8eE8&<7;O@uFybOU&ueShg1H*{9dQ#CyhpM@YcOoIeJ~2x| zxe#8aALz2;Q^;XSFvCP+XJut2+SI_;4Ay!E zirtri6C(dSXFt(^wts}q!vpQa$G0q`ufdogbrKl@qE+G(bR^P{Q3d+F1OnK|m$g@u zRgh7p;eOqxz^FU&hOS+kJz}wkhBtsns5HF$t3n<0E$JyNj9!4m_ zb1NRl(|@UmW=)GLmQbeW)yUP*QW~e}7M=KqQ0Mk!7%G}) zD8mPYGEV)mKAAq#Uf<>;g%_+?pUPuYy{ajdNuE1_z!<#c-?xxw(uir)ssgXR(=0J^25 z6YEu>WvZnJ(2nmjM%`rj-T=9$`cHDkU5-uSLUTO+gl*Nr&t;jURi+iKxp|Wnawf@X zmhrCO$;K27B8OT@P2Jn&XL-K;qbIp9)|K><6Fp_{A;QfMI*%U2WoNY~2{AF2Kd0(U&o&RvmDu>x)S48ENWg7XoFrl(5I5uIWpq0dNtm0&st=@ z1p;zhT%TX%qqrU{2!9SDdsiVr_^ZtPnJfGu1wVUbeh<2*qScv&X4fek&qwN2F!n02 z98uRBd*szbO2jT_NnRh&e0}R!|8C)Lw_y+}ZU0Wt0JOKx^oqhjP%4>pHSpXhJ*h1J zo}AjVWdGX~?3r7k%S5uY|M`*dhdkA(P9@7549I&AK^x;^g+F&nLLxrL-}Cqy@bwEX zVOazQFt;S_R&`0N8D{I;Lwg@kdbb<;J0zDaahM0ulAPEeBxe8ny?CECj~DBPndtyp zEj$0ac?igA=O?3@2J=LGL$~pc!}77X8dI}t9vaVq3g!Zwly6OFwdbuEyI<4!WAC62 zHD}rxLHst)(z@%oHO!VF`#14KyH@SeNM_H7}b|5 z#j5-KlWVZ^!RhG%-=~Crw+v*I8f#7u6xJL;sWlbh61A4WZ7f@z89l>$j+x0P2Rz8Z zUK8A!z9}{-fky6%>mAX54uifl$0098-tbq&9JfLy3B|3yPIC(xdE6^`yI}ex0Sm>O zx>O_3wHYcCQKV_?;`5&aukgjx!=rwFa&)Z!`v+)kM1E|MDslfuXovm~&?>YNkZ&+0 zP*<1v@9!Yx>NP()PMUUHDW$Rf zsc~TI(>VRlObD8$l{9eSxHFrE_TwxTivI-xe*BkRCZo<2bWZ&LA9AqoMc--szkgLg zP%{SM9^U}!|004oNBI9c7C4!df|I2p{M;rdw}4$h!g`6H_`kT1DF$}{Uv11Wx)i%o|(#9je-$9wWij-{|mOj;0Y(u1?kT2eHGQe7Ruvp z^77-S-*TQr{}&JZKR1#_7L~TxS}BLoaNqU+MQgxrKzT%~qM~6DhYSBK#evfNH>Pag z5*{K?gX|i^B89^&-GzVdWAy)J`<9hcSC8NP_Y)up?H$@rf*zF@|DBLR;d^b$BSjUt zt`O7k2Nu--4PgvHC}_^A8qY4s7k)cwWgXY#|Gd5EPRiZ?hN(~^i?;q_@hl?kqq?j9 zn>NYV8Va@AU{vyQjcM*DZJ#`RZN%pMsq}cRHTt?_-(_2BJ?uo@uR7csZtW+S#GuPt z@;s|&p4*I>SgS4$AJ@^0>ARnA$=|7Tux4?qrIR{kfcDw1 zt>uLFb)cQ#8Al^e`!wR`7Q(ashKKHR4o|-ma$7a5{N8A>zhKvZD3d#wLs!rxxMA+@ zaiMpWF6Zny9dt91grcu|j=LS3jvgdr|FNzPsMprEcI0Ui>S1FU$lZTynDjWWCd4#i0nw~?Z7JDd0n0dNH4e={(bSob1 zqDB`T{?XwhJ1Ap20>?ee_k- zrT&xZNXF-dES!hFvh+^B#pOmH9?L@>%&b4o2oy`15tv3VBMv{xM#!f-yj!&G`u_w` z<~tm0mhbY2_tVi4i%Ofa-MQd$ok-8?oY+4OXpUitXRp!cVuv$66 zA?`2agO!KJk6!)FH`?JE?qH+fs^BDidCSM%VXTzDi(bN@X-?i~Tvq(1U>CX{e}?wT z7AN>S(zRZW9pbY&`_#X#;%X6ZuY6v2=Y2Y*ff>L<|wN8Ip4tnbS756US083Gy085hciT1#=T!;Kx@1q%BGYR zHgNn6S;^KgXZ)HWL-&@X?JS3lg5{-4TbMqE@qrJy8w+@dGQr9 zA*#nrB~bmzzcgj--`AJkJba*?_OVSu+9(?6>CYr%gZ-rv4Lj6tF;c?lS!P}mp$A_q z9__-hRA;Z_II6*Gs52S+W}pF2Ri~`34txN|Fs+>Cn`Gv}=|SDY-GZg5iQl@a{PJa7 zPZr1T_+vpz@bvAQnu3=Tly?^+iJW3i8=RdaON`N6^W)FeRL2DyN@kOBnwM=Br%W3d zL+H0>yriXBI0zs0oJHV{GrtMT6XvWFCMApAZTf?-FF`zLxCYhH%+($j8`0T?cb*s( zffM#vmQCgOWw=}9DUC`@ zzB@5)b|T;K#+<&Y)jsreE{Vd`Dcmx-IFto1A-j)4sb1^;h__1c!%zWJT<2_G1YrHkCP#$0E zw(f7aNmwreo&F!3n;B5n$4j~~iVq(?C22gJ3XMM4$RT-=Z>|Imu-X*EIsDk~C`0(- z|A+FuK)b(hJs!NMhm6sGv5s|J2jUHGw^Wjgd%`CYz!rMpqI^U9t|Ay|yn1n}ZL}AN zu@qHQZ_`WaHPMgw@E+J8-!~Ey<8zoV{Y7YRrZV?` zWDmp8HAqQjCC+8JZJC^Remo5TusVu#wxZ+ z-N5TkCPnP^ob;Stdo^2~&-N~5+*-)DFG16yW$X6f3 z#q8{FK4Ldsbr%;KwYP3#wcm97#&-5QIHc(OY*yv|HDwZ5CM-QJ2zgQ3Td8a|52rp8 z{3a-fCb$zgCiJr3hxw~aPagRB*7Xc|oNtNripc&_3>YMP*}0F=1#lyJ!l`J6zX^!> zk4TBEM8?|sU9jeL!J)pIi5Cs?m8#X4t~M*S9E%qhpsRT@Sn_}VEQP+Tm7PnMz}z3A zFCuzF&^3I%y>B4i5(@GhKiyAO2E5~1j((C(e=8FMyjOAG}xzA z=WB(UI-u%Bqhv%IkzUiALZHB|RN{gx;I?9CKjM1nbNwA2F8)7OtUkb!G2Rivd&4Rv zd^CjdWkIBaY~iEKcfYe1&i&7$x5XtbOY*c7NRK63`S8SM0}zngtcw&2?Gc%A8%mBv zlt}pA#KA4Zg2TInrmNpr^U+vOpFyvFU0cWaB5q)fQs%3~C@8}G*L=IJ$R9~>dmpIU z-J$I|)pTcJL2xvVtxMc@Cy4(E@om%&%qwGP+(rF&AdQx0-H0#j+rMO5U!V=g8~O?E zj10gBz^=l*H`|s#cmLOk2hhi;1i>=xz2xi`hi36%!IL~na+r4h9=R=$eA95=Z%j1I z-ov+ZN_?4xC3?I3s$N5`xRrX{>1(R+Qo;G@Q_!|8}DY7!ui z&r|?=BDX)~(2I=TcQ4U)9O)Mn;Sja_-aNjrT*uduvHE-Mp`N8kUkfv*&hHe!87T{D1 zc7xQFAOMU=9tfl0^NuJtD*7@L3n&Yqs+;GkX$|?D%pnTwLYJANP=)=OXwX%XwG!Kov_}+fBu5E%p<;mHrJw5f1Gx`6>M?zC#T|~q zG2pDlE*Cm;x(=qc9)%T|M!(&+gNMPn|GmCJaxnu48@t1bDZz1orfdh_2EE^c?q4k$ zbs_rCp8^$;wQ>w~3;vnS%zJ z&`JyrG0-&5gkA8bx$y?NelJp|^vq=L=HAVgta&Dx#R2SkZ}A-_PtQn(!}_BTlXD>m zUJlN|^)+7X&BbxuV^r3eCWD4QJ;h2#9f>ESvTHxNw)LNheDspBp61=<3^6-8RpOxL zuaD+Q`FV)O+_VQNj}JqV7T;RRWtBD`E!iUQMwmgxC>#XUdDz}w?BGs+3ym`HYX+Da zI;uBO=P8hgHm+er!+v@$q~-XK`*!il4WNz4V}(r82Tu%lgOqpdb+#O>2lNiLYP7|X ziO)MWn|Q37#K~AF9IeOA)@d|iJmo@K!573v;RY^oqrn~r*!uQ))v1O*`L;c&u>Zb& zhtg*7OSL=GhRI8SHlmCVAgW&!$W|_OwDHluPhj{+BUdYon$fO86g~q*Mf;1>)NKQF*N`?_ad5m_ zsR(h1M$L!XX-DAtB>p?E)PBrRX9r7AcJtMYc@+L{k=3VopYQAD2F)U9oqPZE{tU=- z@%baTjjsHDs-ovvtr&CGUdNvG#Dwh^W4Ob(1CxoY@&OgsMV=SQD>21du_ZNxaTi_b z&@IeIna$!Sospf+3+vc2Q<2x#ZV+klCY*@qy8G`^%%P}p2IlYU6!Aa&YI7^7jB{sy z9lG$DwT%whD{+uh8we-*X6$`*zE09fHy&7>I_@l+GAp+Uau?HOFC+pc`8%HPN|p@= z)6uGqXqKR3X9ja|Cly%+tKakOQeSmzP>O*V7gAu21|5I8@Wp+I4JqZJR$qhn@>hl) zF!|*McCsP43kQ;!(Vf$3O2;lIy6>Yxmqe#2&Yrp<-%;{_-OeyyxuO1<37;Jt;PN3+ zio=?_1SN}ZO1WjAEgDvIt?ibj#le0Qoh#3EbPn>H6@jNfsTJv7SK^?mJ_krBGoWf{ z(uJ3Y2ry0A)RAqrR?}k`s;R|sM7g$M8WC8$ePx~2ir$nb+c*Ds=hES8?hjJXQWQ1O z0D!&&y^%K{Z~|0G#~VUHg$_$yvf{X|yv+AU6ftz~gG)j7#KN&@Z#EssxQfd+>A$N~ zXu_e;W%<=>m@Ni;;59>M+~;vrHuDJiVz z_V7N4DE^90nQO^roKvOl`*|&mXPvZwM%jHdbaRW(cTpR?8B8Nl8<1N0X7FN3mKED2?%%a`*Y9rk*uL88X})OqQ~S;O8QNNjHg4x&^ednuWoxbnd^NR{WuIx}pkCxccpB0{v;Ws%dS@Gq2x&~ipT6QJ z3j}N*4G@uV+MaO8kntr(3@BNwK!PUmsIAUcJ%}MJ<}l~E2*TeTQ>^rk57wFm4b+4I zHWqAflBL>4f4QI7vt{09vHgtUz9i;LDm5ljA?KeS78Jj*l0XG8p+iM*rv^S2zlcuSd9a-MP=xf_q*Y{mn&f$M&xxp^)O7Cf0KNlh*r-q{LNVNv z4CpTeU@Dz6dV3P+t1l?ua73#RGi@E_-uWsfyHCT|CHqbGSpNqy8D&tT`5jX?P)U?Z z*cWa2e!dNIt!(QU=8(TQh;Y6@)T$S}=+V%&(hs)y5K8G~UVT+RhJaV%v#SNBQWv1# zaE-Zb*~z*@FTzyUhT}e@EomyFL6MO4X6V#!k_5INmTI1IxJV^vc#Mj$mSw1ae-2Cn z>5`YWes2564c@)3sWCmes`NAtsk6Ar4WDKg^%T>_eXXQWOVO?DL;&&mRg|j1j#gqJB&ijX(2? zCh=|7i}gD{mF-?Qe7a}W<-XYBNVU6M8t-oB@(IUj?&jN^>S^gW=fa|zwH{GCv?Qt*yeS1P2n3&^@&J5h|G$SJm8txLU%ldp;$&} zp8g17lXg&|;BYFb#i9@KsWpO|xhJ{(Gi3}!s}>U)4A2HNJ~yY`p_OU!@RtQ}e)t=7 zj4TcB>qF^m8E}q3A7zgzCQ~@jHi0~t`(){8Qb{mfXtb8tec5W6$dRV_HOf+>KW@4H z&D{6L0wKjL-Rc5zu3awoEiEh`L3lhGJq(kvL!iw5hE6j3jMRI9QFzLz=d}7OiWod* zM)09vN@sCs!C;mmTiv0)UWAk&Auj2u&PE$<2B~bx(h}ByED+IB1O41X?9iJocCG;!UR2toSHtQ4o^cqzFjjn###~Q8E|9= z9t-5{9Ki!ttOyjWzCbwtwo>Bc;7@iPNNwJM8Rz$q>|kwaDqTX^u)kG z?o)e{(I+7OS-j$z!E{gD%G5hJgw5WpQJkNTLtKSPv)l`Pp76SvkjlME*A|(Ch zEDYn4vE*E7bx41}VVn{F@MRNYusA79B)3^{N%s-O@&PDxR z57ZyH;cU5!43k_U5Rf3QJk{rSYhNc8swHd7GT-}0M3rY$ujRoiTVc_t35UTKkh=|+H?TRGGege-y^nZQ`EMALV^79xa*OYzhAYrtsIJX0 z*0EgLN*_$egk6F&sb$~m>okg4K;SyWqY z%A&J(`VwloK3_|ls7;WpU8ERMoilMc=-X*oXrbf;E8*;PHkY_8-i|dMJXl5d&Kn_y z_sW}ociL<5TZadmpsP7C*|Kp#v0drEJaG&e33nOy?up;eA)|U7jwKCwJwy@p(T9=1 zo;fNoFkPK6nWRn#+|TIn2S$6X#EGgF4}x%k#*mMVbV5MYtzJLr(U$j@CKXJ6Eg)BA z(YdFS-ZStJm^6&*Fy7zCgd0sd5w%gCPa4ro)r4d{s80|awy#eoCYxmR_hH=-IBl5& z&dw8?Vi;PFu8k14YN}GvGw?;V0-yB??#H)BeN`G$yW@0HuR*k!S1SFxFR9C;-jzd^Yb5 z%RF_POYWVS81MO$Az8-D-tb6Pd(ovM+1wKCb7V6%7^}j z5Qo2i7d;6~wn<7Qu&dwKr+@koqh9uor2MhK1HUIZ;9f`LIYy=CbORU)+J_$s$P@aF zwt7}M5(IDn_krs}lkVdQ@SsAoqMKyDe{OmYRLHZ+)4Q5Dp(OlLJNCL!@MKSzZ3u&; zX6d!hiPC9a|9G)qwmTKQ`AJs=9xPY|>Z5QtMN9nI#z{i$X}J0XH`;{tXSczlD?mE& zup#}Ep|${oGO|YFb8X?DXDdMBN1++)RW0(NA71n6 zmrA}mEX^?4;gG~JjP^O08bZva?r>~sK~y6AkL}i>ssuF;S_j;S&Gz}LxD%@A`&*B< zE(UC!NbKR>iW}r4iBc^qh=$8P1cUyHyRNfoW7zPN>%!Gph9C(dW@4e?c2JL@Z$N3a z*?C>b zwtgdYTAi{NVWW)J=sY+mF^z{9$~Z@g-PA7+ntzK9^IEFse~~P%S4$Yxm9BE=`7xGT zT*_ycFw$F+n3rC_O5JIpi{f!COGaX=>YVKwUAQw0(zj{XEU4#x^@KHN_b_on+6|hv z6|h~!FfLZ)`mu4&RP(h;U8k?daII`@Js(06l7td0yG z)H>;Jg)Whtc3p7wQE&-hiIT$nSAHXGQ30&8rI^x0V?mdA->coJ5Evn{+%+W>o*ThH zn^H>tu+ErmlkS4L;%DW$^=Room{pGzE+mA4f8py}kz#&sPxv4J$`t?N0zGqY^{Z&L zh%&<+W*sD&e?IN8rnXOY_1I+)yz*fIFbCW8bOVz?NFS90@gWp`u`G+d3Hbtz;)Dwk zzUq8qelqN)_Xay9YQ@1P;I}Xqt??T`STJ(q4Mg~;oMugImx`oiLL0EA*Y_H10NMn< z7-<`&tVx-Tz_ZRixVv_uG*#39>LL=kgMS`#w& z(5QMXkaPE>^;fH5`3-744q1;$(7r|4MjDL&kjJ!4;;%xyzXaL6_^=#;kf9q9FT8q( zKJ^tG@QqH=w*1|!1-F>Gd=*me5mhBgiWSsDfx*LL(Pnn}m3V&3o*@TtNXNZDTm_*& zZ+hAp8X_dHo5XWSLP`#+VwVGrEktX!sS6GBPgsA7!K%lsCF0f-F-Z65ClVhB=p#3F3V!go6`Fde`4kLV3=HG&^p0oRL+2!*M*Nh`lOzN zDu2NeD-bt0UQ#0YvXcUV*+w(x@N;t)e*Fl;C>c}-z?K}eJG^CwE$9JKW?afWmMh^Q z2#%t^AO}u-BIW=J<`OXRl(Fj(!S7X-fX_k`*bPHz++|M|$;+(O?ofTvq=w-~yHSx> zh}ah;*tYdc%Zv-)6}?T&Bse-_#Q5Ujtn-&Rx4fww;Z_gcPS#DfAt~~O-~UAQDj~RD z@0@KSz-N?>25y{3w$?a`g`8r=>41JFr{;|POA77{B~bpzWKzbT^Om0!RzkoZCGea5 zH_mWd9(e-Bj?IpKKV2>R@6T4j3(Yv@Zpt`iK;82i4SO7{-q0EcixXT!Cvn<54dYdB z4`Sh96sC6%%6MUP;iki_(lh|uP;yvgv=yfgTGLrLo-i>W4fN2yuU$>E5NbX$1;;v! zQCM@VmyuR9O>w=;E_E}d2$rNS2dplfBmGVYLCk7T_pTP83s2dS?eG-4RpRpN*(5F1 z=GKbtwW5XjvxeyJQ_o|Z$V7gm2XykQd~h6arfpJ5u8)0PmAASc>EueP0%cDgd zR=c;lr!`qt6G?0c+-S>H2MLq%@Jy^+I0fVtUW%E#ffg_@mY-z7#tDciU;WP$V5fYC zMe_~sVe$AOiMLBHDyB5i_lW}-FB!N%ylX||eP@G4_FfsJPCTiDPbr@(qbGncDp^yA z6gIf`eX(VwYPkhtO#9Lh2@Thd+}kKVhG|ZBcI0&~VSW}7lvE|*EITAHffS9?7l)n- zbKp*$P`tK$p;e<1vC0SiXeoBtTx0s)kv&1@7gYES3^(Oe80BEMsRirA!rOWCK+IZdF={nS$)7;{W^*R;tELw!U@Sl@YglcOH?{ z8XT8SB*|FzM@W34OfG&2K=Vge;u^~`m$L*@Aw$r5PH{1Q@ppK$!4TUJ;`I z?_<)Zml$Mo+X&ZBDe}~x8__z{PRO3s>@>B!_+wtxSx*Y}0r7+UERq?hwfVs>HB~}0 zlS&*M!-IEED&^`OlVmwK?5dLkAO1hwi%|EG4U zoA1^~cGvpGT)V)w*VXzXu=A&+gjZn5{U)_Dsyvo8?dDSC+hGiM*=*F+GV+AL&ZP2g z7*bgR;K8ols(Z~=F<u`<3R~Yk| zR6KGtC0Yl`A=?4Anw|{t<*DXFecwWYCG!pwq*Q&cNB!u=YE{KdWj_3!)IXH!SF}|s zO$D^9Ox_ZiSW-R=zaEK}zgf`pJ4v6|@tR*)o=Ws%24&tDk3N1{@BT0(zIl3*N<{$N z(@)h1D@c1e$9`V4+)ZGGi+avFktCR1=qhfi5f1QtxI5F2DhlP$9vjrUe;oKisl-X* zkXR`OsDBB8c4nz&^YkJ;!JSoa4+1PNlV<%UBzl(*=cX^`3~POKdJLE|lKg_v8@a`Q ze{!?%gosdyMCxj9g0)WN{Q6TQGO>8q^EvR?TGW_q?FN@YCFz6Bz(~&(6Lo=#8o}P; zhY{xF;xw~67^O!Yw1?R30SP&Twqd@g7s{^n=|4VzDCSZopj49&82Y3l^F5NmOV?^; zGV!0ohM|)Qc;`7&0`pN1}+TP>yedH`*^2;Q?vLT?}eV71;C$WfwruwDV?(XMfhd#;-?ed)&+u$tyNxW3+}i z4~prga04ie5Fjk)iI;j>a7B%_wD(w2mf!7(RLjPxde3!7mcc}GXJtwys_vF$aG_-> zU6pkdDX}@<3#K<#ID0#Xq;>9%>0>~kgyL}_A9oR zW;ljJboE3W58P*t4F0UFhSL%Nn3=rgi`Q60N?-|cXWdWB4=-$Kg*Y!e^; zJ%{*Q$;;5$-p7Dor-$B0nIKC?@ap?RAWSH2Sp1-Y`+<6n863^sR>xuNju11Nx#^#C zR7v?4l&{g5gu2+bzvc;?r4pJ+OY`kc)?OIUZsT|rMXfNZhn%U~Ji6uz%_mcN@ry*0 z@=4`5QZ~C=bC-5zkf@Ex@PiiT{MR}+HXsZ)?ii?nO30KdLFUQu2KU%#C^1HhgHN3& zZqGd0)uoxBB=Y1IG`e_*I$dzHT=y-#2&VF0h}I#X6!7Q8t}COemD=9Ch9S5QcHeF( z6cwhqf8XyUvy6FD(01fVsq#6^!f8K)@56@cQZKdP9jbf7Q+{V(?%?vZdOW+GNw4m24eJHDa|b7SYh1Pu-MA~=dy!^oo-3+@5{3{ZISTpe@0n9O26 zPH?fRO|-`p!A1crO}UAX=a$%}(!w-^WZ6?$ToJSr_dcDbaS$#pua#>I zW9@?PVou96`u$YTSNxCUCivp<A&dRAkG?MvwNDbS-Q0R`^;zSn z@jO2+9a_H5Yb&pCa-lk!zqs3|%xvb=r<%N%d;1il@v9%>)Ech#DuEmG6SWe^cL`R! zQmtD-M4de}@?n`xLhNZvdfDbj&9(L0OYlCH`BpWDS^dj^jsQTQcC?`rOqh$ch(L;_sy!h_@NBli9+59z(-&E}{a(ov%B_ zQgTXronf6A8ce#{J9hMdnvcj3UG9ARbLu(G=3i|V&GsQ$@tClTi#}t^bX#u{TBUK` z_g>VGLF21qfLcd&=drM_P}@VTyfl7E%WqIOJKq|x=4XW)<`HGshre2FYSq4=iMO>g)B$bR8Xc zQ&5-1JaPfvTkBToDdW|KZERsOKY-~zGh9RQmIS{3F;T(UD2+D}5K1xMEl0~5Bc0&k zJ5B?hOxd%?1{6mf{PST#81(M&Ju5c)tR(Y3`ANF5uR3{EhuvMJG}NZUZ|H(f7#PXlBy(Ua^$rp zWu^Mq%Lwnzq8?f?x82X9$mYD`=EK|JmEHPPzu|h{lUtwLE3r}b@iw|qu@cV%F-ysf zI=`#Kv3z0OvOOylT5Emyh$#7x8JUo_wP}|U+k7}HbcH&%RXc&77dzi@zwrWB?>BF( z5T1x71`K>$-R&}p3EM*Y4(5W5Q#p6IRO#K_&wEeB1D4e9JYDF_B*U}U3pt~5v-l9^bcl)1+ zuUyU48=0;xzeom9-^c05Fa#{{^MXAGDfYE`@F zG;Qv)xt$H!Hr!Uad+;Cxq`!;(jG*N+LryM5+>$mAnJbH53Yn)n@7URFkdOM!N?uRT znvIq=*bN_T+ReC+mVCS2Z$&)V>RP=SlqUsJb10>$r^!dlRHnHaiYK_zo*mCCux5C+ zZT*aW?R)$yD?*fK?6ALLw)I-@{6u$Ag1+hs(;!rBC-w2)hgkL`zMUmjF@?3)-`-|a zAxY2(_u=~I!~QYk?lDVYM@rI^xV@PLEf)7F5VV1Hc5^0%yqFkB@>#!JXDnOM6Sqh zyW`Y0$v9{eBN)TQA5?woxM}5J@VswWMVdSKBFXA@#qD^j{+)bvb+x7 zOT8)j==+!4XFB5U#egM-q2`DWSxQ*2pgnRB&l2sb{yX~0Ny)H;o>4Y+g~4S&RKg*} z6*se4?3bnUrh9_h`S=TWCftlX8UQRc<)=kT)%1XsHmlK}||T9dp8U^$#?A!%M~` z{}I@sL9zlASY<@?SLOu@Rx+Pw7Nci&2&{$P#LCQF9E28hJiUL-?TtV%{_G%wspc*2 z$Wef%N>M@I9`LZBygg=Y<^vaAsWz|so~PzoayEff7qx(ar$=>r-DZ$s@(PZQ$2GfLpnkT)->WOu@0q0f`DfguX5yyj+WI+dmZtN)nWZ~{U znZtBBM}vb&m``H`97xL3YnT<#=p%KYJ#>s#fKwa}g`Hwg5RvkxFXCIJ`gEi09i)b-%y}{T5Z7!)JZO})FE*lq?Ke%9E zGBdK(Yk#J^@CAg|(vV@bAF~#|P~!xb`Ib$)=G}!J@4`20B}%J*oBwaM`M3b%!_q~W zdLAyg4Ps#lRVjCKg2UUD#PP^q_NF)UdZiBR%&Sy()(5o}zfu0m{*~|kJsNw4*$E~b zYV=`DIp#5OKrX?j%@2>RrMtWH4W>3cn=`UfizS{Oc_4wyzg!Rp*R2%>o7Y80z8`Ps z$Y+PERDJTSP%)--Lf47Q``o3}N41(y&y^d$ncrLn9UX(Y4@EPV*@S_Q?X%PIQ+ybD zY6{ZG(lfdfJ2_tP^|1oA8D6bb11xSeqeN9@tJbuX`)b0;F!B79TF+?hP&)78E}+?~ zKY13p?tZwMT*Y2X3Xo|Pk2lb|E=TuT<|(h$sJ*5}UWU1Y3oK&%#c{vmeSJm}0}8%~ zOlp(?E&%T9@;oL;Q`_h_?ltm>6}>`CkakI_J;^i*tKF3oM{obeF8qQPN9fBdD|PPM zX(sU_0$7nr(@oE?+83`I8f4L=WFa=J%P{?v<=bJx(RJRf(KW0N6zxf=)IImYhk@`O z^|C&~fVtw_=*I;wnV@MY**kKQJRa>vBTo;4djB7`-a8zwzU>;G(MB(c-cu00_ff)y zgdovHCy3sn_nxSU-bE5b8=`lE2+^aBZqy;lVD$QJ*L}aw_rAyR{O_>$?ETwkS?5~o zWMN*CM@e>*@Bwc(2iuE*Iq+d*mF2e6%zpTbkAYiTEQOzcdEpWEMw~hSx3k_mrJaFT z^wr_Rr-1^p2)$IZ4|{+vqXmsGE+5|mN*A`0H08CLuFtk5cEIHE0hnyg#LRQ~z&r8zS{13E`nG{@`Kxe<`gvSwvO=G=@G8zC+DeYTU zQsh&9eeGvSI??3irN{CAiHCTueV%52{h+_PD-Ld?@iT6J9;+WmsEEUA4*cXsV-Xqq zTfZ0T4L~`8H+Tgnd0OvTH_ zc63sa@M;2QIDwiCFMG22KzDguM-5^oi4I5N~rh;6)s?FDG5yX8bOUuK>Sk` z#!3Cchx04(6q_f+kK)R|zQ0c0J}CM*u)6z#^yE%G1O(c2Wx}v*y(4djN1%mezpeyG zR{!|QWh3dWYx6Aj9;v=TN*Qfy`>0dLapf%wA{heZluCoejX0mal#^O*#DPn)+NQ>_ zT((5JgQvTajX3gho1D7Req##1co4^@)nrj&k%%NA9L6uJ63Yb~6yko91^wktoSYDO zM(fliwM?mGh%%R)D8v%z>Gg4Hnjs7Y?6|a;8xP@PL*A{JD>AD{bcpx`Y0ZFll5|_D z&TEY2kGTd>0Pn`B)+7r8mKw;yRNA%0r?bh^0| z{BtUp3r2b=<#XD!1j^yL@*l&c7O-KuF7;d^*|A3!7Hpg9%t^7&K|8VLk-EuXJy61W znnM-@j`zmrRSmQcVeG5>2VfYeC14B*0&Nz0I~q&PS=p6;p2K==&a66e^=Ud7{(1X@WUN$ajH zT(7-zn#WG*##4Z&P}{?%y&_5M`hj8-lPac~22(RcA(a2T-5I4If)<(`qREok|^xt|&^Da&Jri<5kyG=Yx#_mzb+}~=MnjfuwZ>c`qwHQDs}%k$_Ye(1&Vo5|#*tvZ!FKdJMR#9&kN5tm!_!n@R!ZaL&rCG6`{ypj5F5`{IE*&4-9eYhj)?6m39o9<@en9se9JF zdZGky$?Oe5ogR~nBoCnF$IZPxad>|VWK?B8m(|-R67mY#;ddwlSEo`d2kChE;Zc4(E@6OlCr=-(01X@8{2l-dbaF*2N8sh zAnUdZ&8B#&roQ|!t!3C%ksiyd<=$139$&2W|3D@W&?|q7Uu?IGnz<;iie4u_7LD4F z9&#|%q~E?YeOSD4{xr@r$kkW#gZTiG<3SHK=bKjwsIyyfmHe4a^!56)h96ZMG$!Zc z-T>$y_>%CO;qYH`UgE;x!>pC2AwQPyvBd`To3ejt2{x5C;;JuIUATHk>hLOih3x_f0b$D^x`)3V78DT<*=xveJsO<4= zudWXqsaSpfSJ=ITx2(UgCnT=lJT8GfIMV>^FHf72!#k}n7LJ)|)t(bnhpK_n`cP5h$@kY2a+{8q&Jstg(rHZ}?&L87~0?gRyOygB*|X?IkkWWS06dIk-9vt(w-5 zL&A0$vP^QAyV#AH?{|yoCgIoOlcur1G6rvkRprbD=T4Vaxm{bWt^)&FxQ`xI9?Kh* z_L0DJHfunQF`yS3ut>??p6WY$E1d!QRh96qi6@pn9#z)J0K=29DnoY-nt-cz$|(FF zRIL5&w_W^65Jb7=az^VLXs-&3hhuQ0_LM(a7xB{{k0*0%2*RSv515s0{UuDcEIcnk zAn)vGOEsCy^#b2Ubmkhk(j!uv$*mk?Z03oA8n*kgDdgkAJE64}Kgv`orz0U!~vbSFJyXLRT@zk6BQP^?b=aACKn`PCe9^fcu(S^*uF1DMpRCkT0 z_L*M;H>6z9_{-qHPQ!^YCfAdCpGo#t_7dPs)A=bsZYaryQV9d^_*VcV{=4T1&x2x} zp!ppk_Cnjg`2cbyM`BX+Z0tTmzGl#&i$AVQb4M`N;X@>01|SS(7b$*p^$)@YN3*6JWW>-)W6gDmX5D>~m;(jykmE1oBf*~c6qi*$kI*ZjP#%f+RE8_nh3w7 z53XO>4B@ffG#`DIoQTE*bf~^1O(s)(kT2YyLpO{2`d{k$ny%#D6nN_W!bUlIQde^s z3d_5eZ4-F}!bm|b0KIIB*>#Hpac0Hud2?{Da8go%8thp>L?d>-y&V@DR_tcJVayNgm6J`(CQ+OeKZp0RFPA+R{z9%W3WFSem4U$z&h93~ffYI&)ua|q zn_FzB2QgcSBHgSO(~ClL6?X0i##xpw|GDB592X*m4@O^4dwmlZc~~}@yShp9IXy{@ zLFv98`hJzmj%2S&@y+B$MAB|s%KQ9yZ>~pcIuK+rKbOiTVYSeAK!07-u2&jPq2+wk z=BpOrp>+mHOyd$Ac!S+Q7IbC>+=|qCKh{1~D94%i!by3s2^Z2C^Rl%|@sNjXX!Xco zl>GJ1g?peHC%xFL|ADy5QfmB7i(3Ns-~&FqPJq~dcqQJRUVi&`HJWxH_->dvRxb`9 zRELAXOQV-rw}w+oDASwHjU!sGtIgtO<-ZQuUT@yn1bnsCSOh4p{O5aV`Z)ip2#Yj) z8$~*!?~fcT_ml-R9(d7Ae(C$sws_gFNVY$G8o2ld;p2f+JuesupqYgnoQt8)$vPZ5 zNN(h&$#5zsuFbqKgN99~-@hh6g_^GUqdmCO0@xSi$jcqR zwby6<#7DJulS=QiHE@bZ-<+|^d^n^c; z-pVI8yCf|x26nASj?6{v@|{!6V^ro_cZtv^@}+i|f=8=tpO1yxp2% zi5hdm5;9yYWHmN^PgurSu`tN^vw1PMQ_SZDS%nW-lrIGTd8J&$n3b~X#;|2E-IZNj zI_GjRipqrrV3aENP?z+IKz%kfmv3|YCxVs5|48sRLvOA@S|CA4*%-o}87Xy7e64T+ zd0D#fJ+s>Syjv4r>rQ3$;ADD9M#ptf5p1qpZ}>|Q4onxn!#%Et2PzrIHoX~dQgP8! z%J~<4y}0o8#K-o>@`OmCp+R~waEmI={Qn=kV6_25(0#F&58ipEF@v)-DinzWhsS>2 z`+83SpBhB=15NPtqXDxrP1|92h27G@UKvc|Zpu5+t=f@9mHG2Jzf)`K(Z!R5j4!uu zE*)#`=J5^B*IpKa|1+{x8WC+>8dpBa!VrF~s|HOE=X-o1VZL4A0$S`w&Iz}(<>*_N z7D-{5*sdG^DBCx8z6u8zeyz02!-t1sf|y0~wwgk$s>38a8CR*_*jvWvP?K$$>J~Me z`P>_e&JFnP5%cl~Sgo&4^p|y*3!mvzSC2?p7d~wOJ8JDPB##?W<807?S6BO&8s<1@etI?P)u>lM0`vGWGArmJP-`4@?%b%!$OK8T< z1&iE>%*`db8AUEofAyqZW-F)RTP4YtUE@3a#X0+Sh7<)?r06BjbC^0m$ih6p$qpQ~ zsWqFnq3s8Qa&#nDuGv5*%bJOT&vqv?b+m3-;2?i6yJns^=WSF<2>> z!Vfk;0N)88`N&78pP_G4J!%{Q4|()B{r=FpyF;#IzcoJ8@t z-#>59Ms3Tcp%to^j3`J_(K-0D68OojCfL;)1f(^DX5jh{MTJ5q^7|6Qm^G}3<&zy* z`Y=n;Qu#S^C)zz@J|H@M(Tw@}P2d80_c%U@obPv&X}UDt)tFMLe>88LDgjL^@L2HR z3NDW<%z02x3+6#DWGN~YM|g89){ml41A^ehsTu4IxDb;oZ5XxppxK2>KDPnsU^R4b zofqJFqOU+3t}^C%>w39aR(;JuC;QfyVk6!=zinjXTRKJ$ggpC=j7I|M2OW(5GB$$v zFxjvpX4uR>WB^b3L;N@LFq*%w3YHb~9ST*;6h3p~z~?>a-q*G#24T~ZYCG)+i}}#e z|B-6#UD}uJpKrZpdKBnjFhn=dFOx7r&{)JTU}Xl3w7!Wyp1b(q4z3~rjfxqfSV7bv z@t6&-ZZx_{!lh+1X>CsKrU?+sIy?j=g#I~nA;55ngR&dMhqu{8B+voA=3so@)4sUY zZ~M#O5iZmDa*uW6Y40YGM9|w0EYSVML-Tw76$II<%L6AQ0vJg=P>c+|Zlm@v*1QvQ zjK0tQBbxdl|B5ViCRpYBKkhO@B95CFK7DBqXeAX$A1|ar^TF7!E?>bfe&zhN{$C^M zY%@v1(q5ywE|SlJvT@4_sXMDXO{CM@!2*{9mp3#10G{b8;}Ay_BixFv$3490f1{2M zH$+-9@i)C#$3aSraP57*lr~F_c-%67mp8^caFa7&ggh76sxQ$ON zEl(|2Sw<8X^YoWBe7}-;=NT)4UOHs4;X9v=qUc258OP$nQ?Zc(7x*8$$DiNAk&i7# z@zM$h!E)pvcH_+JNIf32MmdleZC$%0+IZ^`1PktyQS&f&4Gh3wB;bxa0Z3vaD@Trx z)*&+MIE*y2{5e>nY_&cCp#ba0gUp&8hb3)O*c}K zK9bWR#f%{}$EML?8r>fX-0EUvuDps>k?$+rQY)2tbxlVmH^Tudw~ru3?83<;{8;lI z54hC?T%f9k++543`g1ZqOA!wJ6HvN8{WpK=#nkqZADAqw718BCZj`AE<`g_%ir(XJ zPib5%mQ+@V$L~Zfxk#FA{=MYCQh+~578L~D4`S?4c?KKeHy{oPtqlbU>_=NuRS}NR zeD}N_#Hbx``B#A-N-nypzIT5SekYtT;fa8XVYE<83>JUVE{%V0S8Dja%mQXXQcqn? z985`~WQjr`c6i|hh@_`FL>>tM-c%xYBqvr31B(O_R4Te~En46i8ajW{o&=fy)sHZ> zdKdwEH5@-n>&0{BDJ#>5rzvTJX9XdIAGP5j(yD*91A~k}r-i@3w;S9A?bvQHC3oz5 zqfQDJ^JCN)P%NhJ`iAi>HQXk_3E5qKRBlYCH&u(NbHe^~aHQZHS&Uz?A$)M*NVNH; zxzHY5B?AR*E&Jh})Vsu(9mrdt%x%bo^xnV7bstg?nEcWE{rO)F-cHe+SK+-JAm-2D z06mVdpztdb`APWn-p~<5>^}I^>w<(Rm^79GtBd%}te+U~J?bcBee4iSoQH_!J7^#U z>ZjXWIsO%Jgz)^fglDLaMMLaSMBxM>lZdIWX#_z)@^!^`dbbIB!o=JpC$=1DU^i1l z^H4knf*leM!Ab*u2v}OmhFORBg0`?h(x)3^J#2!)`2GH4DI;Z)Fj=Y`{c`wwBgVJ3 z-btE;MqS%Nn+*J2uWdYmX(R-E0Sx$~1)1rmA@651|PA8WDH)7)5rQ^&G4yUgG z>&nL9|q}b#gL>|$F4_o zWISTzp!x){#k6&*9@Id0T&LSxMt-u#2Z|6B<;EX+WbZg}B-O{EfqpkbX#;RsCstLb zx507pYV``&a-Vd_px`f*{#LY^XTPFe8EYdq0K6aM?uJdI+WjW zjN28^+==mA+H@4ZkZ3EKvz<(qiDcj+$OY{Ml0kFMO$Rb2g>92%VhWx36yl zDBc>grbC2q_#^ZFCqfq8aa_V!Ir=^yUU@)^P9_nYiot<(=5VolqYlQhCCBEnF34Cc{&b%kAC^Cg@;r9UX}%qJlLaxL4_n-idK9Fv%6~H$?wL!&31Yq> zKZ1fxYmY+LHmwrWQAsxd+fH*i2~0C3hib(n18|f*-#ozCEK8X&{;+nk@~5Zl1+Ccf zyaDf;CP4@{VJARn$kMiDiU12!{7+qyxsC~GHpuMG5KVL-dON@cG07H zqv(_U$6Z)!B zFbpfgM^Qi#rw8iL2H55u)Du_~Abmr-gR&2(9HUd_KH3nlKx?d5&|S{2?W4itiF%1S zImsK_jaaEBxp{5ivzduglmPwv{T+iN*`_s|AsdXK)Sif0A0An7+^&*AmK%@?j6&Yeo`b&Cqsh%B6Uyt~$T4E?`i}iBE<)GH zB&4oSwU^WErCZ^=2m4HsB?=GFlAF%~C8;}c>xP5^K6_3U#wNI2#lLDXj=`Ca_8C6H zcN>?c5Y^?k!$B6(?PYT-wrSC;s}6KS;t8J=bJCZ1K+Zn+dEeMe2%NqrkD|_e0BOML zG~Pl?Sy=)tIX!O3!9UeJJpDMXV0X|*-4?vTY6W1|-UsiA(kI%r1e=0S@Y?r_4CF0@X)%iIln`kCoIv*j$sXHi<~z}DIFUL-F0fFO3&gDU#>(Fg%N z^LB{fV=JGPnlvCIXxhTsWBJm4_d5qP;J~TEk;m;$oQ)#7>Oxl|@{8C|m_~xH8&dG~ z9bz}zJb@rxjThl|smTLRoAVI2h(3y#mKP!pOy~}BV!$g1(WBl38Ty0*d_nnlSImp% z@)Je8mY1!hiTB@QUxaj*zck12*#Zls^i87|U4_Gy;UBNFy`@YOoybWCrF=oetfZn{ zaJZPPH*!%v&7toGyxb^3vxFTJygXTSrlL#?BY#-?#@Iec^AVtohE+i7^kVkqtd zv3npR1CPa=hL8d&n;koh}id5~Jc3FIiBqFa%18ifT^ZZ&j0-@8tEAU>=fmr*@U zE1o3t<-FCzecYZmzRn+s{_IxLO zZ5{o!M}_57>b`aU322n}8;^!f3$ENNAu(Bc;S+3fXW)#yPNEUL5 z0bzUkGMgu4BCe@i0OrQ$QFq@OP{x(RY;z9PuNks%?6g%!i@hEwJS}+e7}q@1y8Y3z zxpMG>QM~2=vPO35ER8;Z;RvAQ{WzUW_6ac#Ib|cGws&4E#`KbhBwJSzhE1R2Gq4>W zw54Gai+t*4s8q>&ix2Pl?Dt*wbB#g-xRso_2ITWZ60Wz#Jy4@&AP_<q0)fRss^2`^v?66ec2;Y+!o>SQw>?6wv=~Zko(2+N#dOQ0NhUK)| zW9!?|!I@CKaapLo+jvlWuYg?NxxRD+?+n0FNXz7fN0(dE7e325a!(2R@ZrI037o9+ z%p9=GhGiz8ca$K|{81J^KCID3GE(-=C35qwRPDsoz7e^A!V3`NhmoP8=KZDo*l9dO z4IfT3tnpssjriaJUoyO2SE^ppdBz3b$QNYn$ZRg(Y|t*y)Kt%R`Rgp-qjtpzXb2=! zs_NL}X{m9EJRC^I_WuD0x79_357%Zk_NRC1?P+y(HefFWS*ER$k_j+JH0(0is?E{qVaZq<&PiYV3-Tzpv!i)B#d9j_@`zny6paq{mg{~J;g5pO43*0F{jh|}!#PHf zWb$uXEP_PLhrAq3vXi|99;`h`St(eP5KNF9{%vz0T;qbrahPzQwc)t2sK9oC zbsENk!ZKc6Kuyn7f@Sh!=VDG8#B5l+;%WmonP@;XvvJ#%&-q%xVDOTkSK#4Mn#*as=~0PvvbLr);NT9N zJMu!?lA}@2e)v58t5-4CXgBQB zUArkq7{)*paxlNbSqDW3)EcsboN-!~H!`C`Htqz*bIKzf^QnH3;N{MUe{6TrH&pqm z>5(9S?r!%!>u+fMc9jM>^lfCp2!lc zd^fd`%`eVeW@??LHpnRypj%Nc&7PZTfLbHxGwQY+xq=X(Lj zwpI2zeiJna!Gn|dDw)`z%)}}}b{oh`fWt4YXgfognV@-$hoCjEsJBMmm91@e?H%@X zg#4*EsGp@n&-7c~9J=XTk@MEDtJgb4Mz+Ta zW~TTeb9I4P@p9|m6hkOO0tdcLg&j+fCPbgkIG-_JR$m#JdM}xLF1g5+pF)c*`lWvfT-352%xKr5-0$d4_FbpUoz;LQSAkFtJfY&7MX3}IjVR`d(*ATF}8^ga~mVW{1 z0$PX{wUa$>);h#9VozqoB2X9TCcE)m49WZ+B0&~nr#5A4lyggkz8W_bs9yyKfemM~ ztSlB9%w!8eRf4l|nAO>XEGyq$QT?Lr(P3*4()ur{gF;0Z671mEuB;GW0MQcb zFe{}&Ts*mBsdx>ad|&gyJHj!Q(lp3zBbIaOnR5?(J-|f)ux;Y7;>6@iJ89S>|G5-l1+dISe} zuR$YYA%NpT7iM5OJq1I%**W08#(zRuaG@2GrR; zHH(i1r;xyz;UyC%s^*D6AccS+ReBhlW=Lg)=|6$E3ks|V+T`~TK3#6|TJ)gz`-w>) z5O#3`rWc0^HC*B{8&M|m+`24vGPG2Ww^q3AE zUuMlC^+canc8%JqGqF9_;w@XSV@5VpkV(b1JU!D*r+!D5+j@JFh#vt=nXt{>tyX&p_`8c39zHaaV#2RSy4>@VNg_!*&pyp4~!Oj|Wa@YlLzJH|5^z%{q=-bTlpcwO>Ie|vuX9L*g zggifT_UC*qn4x{}Vz3o4aA5}Qyxjpfb(IGlZe!oL=hIJ5VpN;@tn5L(2R#KpeclhX zIDT}Vd*kaPONY5j$=k=f^`1{ zHK{Voe}QRTcyuFX($);&NJe{=Zpj0 zU-UQZ@HPy{Bz}1e`ROUma!Fvv;eCRaszZKy9ptz(I`)<-+}FsVGJSF8{R8uRHh*0P zyMHJ8nv6PRXnQcIf|aiKO`Zo$ggirB^7!CQBYUlSz@Tmh#7DKE;vi4_az}&g;p&9C zo=(I$>^@Ell}5qH-MyzOS&$MI9YyuzdMuv*2HI-AEdVvbqRA}B1Jm+zBZ)p^MGJ1; zTOrP{j+A2fz8t!S0WEuqoBBUHUZD*c8u)#H?bQmI2Im-31p%Hg+%9c><&x?0@*Fv$Drm!WmtGX0J`jxZYM3x;V=O30MmNM zWqvTD`u#)rD(#klFVhLy4RZAv^hK0~fgI^k1>Uxxu1gt4%~#@vukcM7>mhNu|6o9)sRbCr5!IfgNx&63hL+6wQr{+8Zg=m9!z&=TW`102|%=H2h%^?GX+tQ1*HdnHio=jz4wd9t}5v{ zrtF)&AF^*s=D?++V~C$Oj)QV27~yZ;zz>|NxPf=sYUQFNAk9sn|_dL5^M9($aenTz2>|C-8lqb zN5{cm_JJ?w!1`9=?~X2A(JFV@0=Qys?3X8M%}z%6-D|`RChmsS$kkJ}J7600J8PI0 zr}?&1<$e)0gZ~vlLBn7SVPW1=c_nu42Ezz>Dk*DwvDSCW#IWKe5(Ajdb;>1@*IpXE*QBp zwYknK#G}z=`b}l^H*VoTF*C$q>Iq!!X=9r3$S@XJYpV&&RyFejUB{=Un<&m?-Tn>I z@$2B$rCZqMY1LTCp_3%5PimP9&agR(@@^|XW9+o&u@V++u%Ly2onyF1W&3@TRe-kd&g}wuH-7if} zi|+RPa3a$V7FI2s%C<8rs{y+$H$opaJ0$&Xu%gk|rRgZ=?1BwWx`$54Gf70UyAD5M zQ6WElOGq_o0%Y$EfTLVB>`i$BP3!H+OZm7x(TTKf`-3%04+YXl@ZTj$SWY}m-<`qr zDp6I5*hNuV;m^N z8WyQE+T8GI8*+T7IvX0yQuQ|2{#RA#;4yn~&WetbJYs`);lGMdI|G~@zN38xfWN(( zCH|sb2ftpy%4fpC_s05S&xY(j*Q&NVl7~<4O#kR@3%Ub4nLAGx>RAo1G9k%#G(K=9 zVF%X0tYFL+aHBB`Z2Z#Y_L<+pqF<+y9at1`t0Mq4xNrG<+9VpXAb7F$Ol!!l`L-fT zo0H;`Ai~a-OKmm$#@Rn!VgFe$3Dw2Fi$8QhR$bqJb7!OdS_7a4P9BnyzxWPW^WPno zKyJ>uV$C>O=_^<6km|kbD@D6kd?7n6My3>>Nju-l&3-!hW@2YHn|Ts6rxBI=vCd-xoM{0SY9lK38N*w!m@3;f4C#v}h-?`++Y7M|0q5pXoioY!fzh$M2 zg{~^WI8e!0#zuiEfuC{qRh=d_<@|R473A$Syzo2j;?yS7k~zCDk{Zi-qfi+tUF7}x zc+Gn(V8Z403{?UwmoZ(Q9PT6@kwmG`3Zc~6c~QX&>KW_75PsmY9Ym$inD0sMGXpf%$aR%d2 z;coMbHm9*=5sv!1y5lSTwx9C&R-^5OM(m%Rb>^SV(2QMr)=}%oH@D1*!d{Ck4ZCt3 zPW?AT{Ow1gNQ(DyP8z6IN&$%dfmen-p#%xZGm_mqs6NUsNV0yeVTpYyPt90!xx|p! z+2{!P%#vs*WVlT%a7LT~%Vc>2&000yr>;OyP)IM<&k|Dtgps=%09TfiDKdXE7+-$g z__iBa;8X#zQJC=hUqyc@9RCshF_l%uJboTdc}j-YU*FXAcXu#WTAe(H`lROLK}oj{ zPpq-oW{uV>pnU~O_s7qbL>I%q7?wf$MnO9XaH(su(ei=YCoNyre+(toTZwT`BT|qC zYj%JxMsETLjV}ym_RtPPQlnyHz#^s}E*Y#DvI9j8_6WpXH2e2IUc3T)VpBDV2bbN5 z$6tX2zPQZdblM5_+3(G{*8@?y-4HO%0=_&sRp4w8cG3LdW-zg6#*Nx-u74ebkPj;8xQ6&ihy8;hPbi{fE2j_|fi*PaG8JwzH8SM)Dj#Ol6_&sbQ;(V; ziGR?5;NkiSU`<;A%AJx_)8;tw5I;QH%vp13U!JG5>d}PPi=a}unNnQF(GH~FWQ?m z@4gpZqJQSv$7HoDr)tq#02@{RN8?vQ$^sH^>?%nyeJ6uy6#s&-)N_Rm4AiP; zdubMoXg$d$YF1bOlS02;dCPUDB)QY=eVZM}L99|OFog)<*7g}WI+^3!lzuOWC>-`6 z3``^JK^JnL@lxC{k|r2v+yvb+808-@WG^=e4hO4b4m((V3OrGd~ znP$!a{HhnpSD0TnpwAm;r;b7PV)B<{yeh4 zzs{_lvsu${_Lpe~EcEz`Inn~wu-hKkWmSF5eu-FxZlX_jmzL*tLLmDu2f{8_ptlzx za(jPnYP`ryDC)9-(%P_&-OqfWdfgt_Gx9BoWlbONWBM6nwdZEJVNulcJ^4IUfL@X@ z9wXmKNQ)9(M~y^8P#L6Dt393j3bkrH)fCj^D`Bfrsjf%s@?{C(bt|11yvhJ|oIMzP z7dT#~PCDg)89*uLn{A(J?=1f4KUT8&nXevT6nQyK^R!~!r1Hb8Q;yup(4X@iqA8>8 zxd8iG7i??HSOeq|-?NyTk=O`5qMrxkbgt6OYdF7nJ+){uu!1W@i_G>5#%vHkCQVosA)Y3-h_|yU#fhNoQb-A>1 z5qJ99*n$G~`{uZH(=S44dE*RWxIFCf2;U?LyV>gM(7Lml6INSW>vSVdbxUfivMb4N z>p$W#U;XeqqCWWa~R!nO};(4*twF3iPjB z)Oa#VLyv&)MQsRM;M12#9plo^*L6|EU)(v2J(>>hz|+Nr!}8I6i^MN;^M33-BTL$d z&k+x?O-t?ccJoY#bgh;NX`tJC>4Mg{l{PT>$+OZ4kfrUyw=r)=+|NzdUuwseH%Qng zJHZ5UBpKgin7#RE;qvZZvto(2yWkExcG_vYO<7p$;1x*7P^H& zc(2Ll#b3@_r`BzZyVFQ0lC*f#Rz~2ovJF=CHhKM1y=}RJZ`sB{UCz9g``A#~Ows{! zL5rbptMpZUXdlhj>z#Kq7A=piPj;l`{xH3!H~T02#IG&vf%luL zTNYe3&oJ+QdS=b*N!a;7{ssQ9dSDE52IkHAw}SLhJQT_{q)`3Cn~qOBXq(_s31RG6 z&}f~c`Rpg`H8IbZIm70-d4pdii}6QW%rEh>3MU8!_#GM9WrZ(>UwF(cPU@x0q4%0M zeXPjZm~3}E^Qk-QLxUM-q1;kl+*O5l$x&CE5${^gWc9@Y0O;#%SKGh!mx2k zRs7*MAL!<$*G?tpmTcJ@clyT;C|@OCq3T;?HBXH@7Ej*F!xc*Au;d%8YTwheOjW>q zq$2OKW~%ysm7&k+nGZg`eUR&=xv{)IOdl-3c>n3X%Jr?kiPZ~mA!MWfcM)4m2S)|0 zDyccJK|#}|g~4ZUlNgcS!8|;-poR0|wB_o192=~vZvh;vu?(NI3Fhkwk~^81x;NZO zXi%{~;<$Vxu6!Ef2^;liAj5ZznYFCsKH{?V-OP>NZTn*@{${@TwTW5Ev+2cUKZo$s zjjdCj=USqaQBQBDCS9zxfG=2?;?x2vE^Q3Rk(ntS>g1O*kQNu82Szkgj^K=n25O&9 z=b(=2Zqdq$aR>QKZ}*_vtKhsDGS`HoIkd~ZT@J(n^Yd5M=!n?s1$ZfIFK*N8tELzI zc1wU@^K~GD=FnR;OxVca`B1~3~2DWt9$yKL!(*c_0<-8J8&*wpIBlZI!x81?6geZFPSF z^3vxdGrte3#=aB*Rd4C@<(>z`g2YQ}1$33KDs(jKa{A=XIxdY*bi|S-cI=NkYzI=& zjBwksS0aRcrhBT{z4~10|K@+KnK#Rd)0DT}G4Z2m2@g;7VI0BSrpzw3?p6H4RDdn# z^U}OV)Ua2eWP^q2Lb*jN$LfRG)x>pv6EkNN7#MaWrQj;2%~rh$$uF#$xFBR^_{5gB zG?1ct-?ItW+9Te0@1fWbHplPV*z0g?3r&V=MG|F-(A>q7mYs8dht3n+Ub}ipiA26& z)3R&z-~u`C+0W`Pd4cm^&3=CKFlX4gp`$^s|-s^_g9(#oQbvO?2Qyh zeKYTI7obgFZRgakyjr;PF3At>RO*mEy=vGM{nsAG?W*y9u>U;4>Ind~OyBYdKU-Zl zK!;|Y6YI}U3eum zVzH{&D6J9lV3#}FvgPuS(x;Ypj+Bx6tcy4-Sc4F>MD^!<4nRjZXb# zsNCRA^shFztDCP0^q9Y(xfBRLSCQ9wbh|Ed2I_3Ps6+y|bv(BYqiw=i8>iEK!w+1y z08wC(k{xhQ`}fZMEpc*Pt25ifO_#yx6vs&l35g8eD`mDF)?;Pb#U|IlZjg81TpP}W z$%^2%`?JSm{0`Ix4u9VA&&Hp8WwO(pO+D#(6u2RYx=HZjr${=zgmlK=T<{$s{Hw>0 zGyw;UG}>Bg*%6aJG9NoT>xK4u74lmy4EZCFY|T)%5A7x0y2Z1Vz$2W1W4(Jq-9veA zK!$kNi_OPzByf8!Nbb6sA^CLa=F^)$W00hEGPzICI~o+)*;lFRHTEx0pF3vI)6DH| z&n&m@q2J$rJ5|QZ+(clrfex8($svrZzu$UuT?0lSfw5VH!hvQqi|Bg4M*9BNZ0d^_ z{in)uW^#{LH!= zR|_?#k^y#$-ZdQ}_!RBX$?o6uh-9KH0^zU9w|^W^Io>xceUc{o2bWs34}a=~Jh{9f z>UehN!W3zBn8cypzJGl8nqN~0u-AUXGFgilA*WPJ(&6aZjy?5uc&9nHiMB43-?G<# zz3Gug`%i8Emhv-`2Am?$G9nPYYOKP!bWTdo2{ zcgf5P)SXWydGm`;-#El%^Z#`^Tzo?XkmW9G*6v7LB!zkNV(Z9FIVmCjxU4Rt2+a|` zLwTU_63MZwaxUmFi&Uza@-rYo)O5YZL^W#GJ6KF)=WP5lfQuxSkfA!Qb^4s&EVuO_5Z$w?T-#H||pe%cxgxl)lU9xfsxcq#x_|El=t&HQ* zy-?NVh9ysz!;_P;Bu4%N4Mei0xEdMI>`#7VKWmrdV|`B8*D}zUPa7sGF#UDH>ga#G z`~TyTDt^{XpcVceYU=)8uZO8DM1P9sZgE_kv84hj0q*14Dy0TOASKBA_}x~|tw{KIe<@_D($stZG_W=q81r@>2&lGF9~J zry5dnGYRXzPw_&&Kn9%_v4wRT$N|#IEADHVE!%~Y&)eMQsb`@(Y|NI?7dfo>d*hY> zH)6^a)cShi`D&~s-5RkBjm!i`aKLg;|KKk&hIp3U87?M^l7q_d^}=Ib zKIzIvMCF;fvs7jTIlV4y0`$U!UVsZ`SE0VldF~qj#;_)P&f>jp0S^;?uJB$Zpnl8% zE#%&`*9h3=mbm4kWg3YVkVRn(_la}Fg>n8%FcN#GHIIbqa5gEMywnVGuh1k(c zJM%LD^GUGC2uRW>#scK&sb<_)fi`8^atInUF9w2kkMtz@|7iN^sHnd8>lsRs6jZvT zTR=KLfG8c(jWCi!ca9<;DxjovNy8Ar(4hz;-3%~v4KRds!+ZJu*84YWvF<%{?m7F} zdq4XrJ{=Lee}DG@a|6wo!d>$opq}Zv?P;K>;E6NSZmCBXcgDnEzdqhOfdVIM=`OtU z*<)jUgTVGMHmKbUO!y71(LXx{&&N!9^` zn~PlU-q|^L+TsG8({oDyvG@YP?raQRw=&aBD@e#3C%Uke$4Bcf=bi6jJQHYEeft9Q zB58)<#8&#|1Em#GIwD2w>(c}S|@WUpt@`%*P!#YacE1C6N3KIh@XB*MBX){8872-7!z(ZR3H) z>!+%Ehkq5bt@y^SZEH_~=okX*KZ>DkdnQ&3tH)coH(8U!TR>n9BMdu13dO)N149`z~ryFRB{<$=?YoFs~N1rOOf+S|uND#-oNQaOWeL zOcw;fwLAO#rjuSLh-dq#cmmP;|&o5vRd-vG*5@i$QmGhIO(r!>ikOXz9=EAQ70&PEUYY(9am^M6e~I&u2a4 z=73hG9!q7KAF_wvaV(0{B6)Q7AQ)0}fwy#rakd{?&czi0*$3<6!bbDM7`?99Us#!` znUGJCktO03Mh)3aJkqq!umt7%zb$IN%xr~C zFoDuJN9CNWE(?%tfP2*c`OeRbaPYM|&a)l+J^73A&Q=Ve_h?%NZukDE93Q9El+16y zCPkFBECV9QF^N1y5fEerUPRaK{Ko#3Gv53CS@2bB2Vdu<`k)}WOJ5oro+${(KT!6K znPcW?+58gzz$dwfHs5;kE*+xE^Ep1`$yPF_s@}7IAsuGuXAHPr>4F~rt){jceM^wx zzW1I=bmQ0`{td`Mv85Lv*6jZi%9)15d}xbOJ)MqdSMy`yq5dcMj3w za`eDVR9*fWu04tHB7STf8Z@O?PUHO-{gU#@FA5Oq`?{3X*JUV`MSSojQl5(MuN{d? zKRf-07q4L|V;sQw(hCgsZXU&KRwQ!-ko}uqWMLu`I`3d|V7}jt%M|%;&(#hzn5Hd8 zFbyPIR&R|EJiP|HUrkfPDuy3fQ~l;-9B6rOay?K@XI8DKa}5ldFx5?mxRg=Xngm5b zT~K-7r<=M?Snd{Yb+Q~Gh`n?*W3H^!m}Hv-@NK8$hk%Wy*Y(P(PEvp!>$THWK*X;L zD8N}fIaa$FYq!K~InRh6db`Cgf}{Ris1R6x5?TXXwxsunI+D|AgQPo8d%+z`K_AR0 zwKR$a#fEvhUBnS@9vAfCcYMgxZu@uDKcs{AdFU$&FxBLa4`Oa>2KZPt(tLV852Ar= zWLo$j+klXy%`6?6T*KSe90d+%#bzMyHi~70*V{SqN8!j{5Le2?hzIVLvjs4q;K|5A zj1cjGQM$ZFV2~^g#K=@fB`Io~xW7dc##|=@Q|4~E4dR8$7ze6J z?H_OG0sB@IB2{Z!noDAp` z(Q?D3S^?S?ivr%^fK$r@cD@y5krLVQkMZlprzw86*ol-&d0INXkP41(-$L{`|MNF9 z*DgM>Okt90LQ8Ye(!wnCe>`x@HEv!lH1i-IQty= z?}0?x;Y6j1>C>KNZRS{zOZWr-?usb`iI3!gTPuhn&*w1uz}3#pT$h~RpVFrjHTH#4 zqf*^H;U@m{B=(i``Ib zMqdItMJ@z)@jsrQS!-G%*5pk0&|hqTw7VpwKD>6}RKC8pe;cSy_YJK%0rnCW{AX<& zw3Y9L($c7V5{O|$W81%-NCIn6tXePZ;Vq?_^Sr5u;`et2$HH87aZ&3ER&w z>0iGNGzsJi` zH6)5X_#JwUFM|_URbsU6L+>O>|zNL9=dbz5}Bg`tJmE)V>ZB!9v&Qr|t9Z^skK@c=w0wa}6KB?vZuWGGz4+ ze~zY_dSO#v^RCX3dIlJFjCyUXLwiBoF^&zF#0>lPKssyOPKeAWJumO-bw|26ASZ}N z%x7?FMf;W9*;3quLGxs1T^al3`cL_vIu4A9oLeW{swBD3jv8qQ>Oq4Q;D10m>lWPy z@?Ke2=wkw0#j~}&v0p+h>-RVmeF$Oq+{7ZtCZhN;-%%el%?xZ!Hc_9Q1_W#$@T8V1 zpjt@jgx(Ovv;&p+Q>fWv_$bFLl8}z{nxH8VE8g5^oWxz1c}FyADnGWKQpR3Zm<(`MUNkC9voWz5A*lqrNBKw74ar{Lq@A<*590t&Wa#zhfrA)12OZvQ1*h zAFb^YEO!lb@W=-s`)Z95WQwJohrwo3w5x7^|^t16`24Ic5xRI^#tUlyA%y5`34|Q(1;`^)2fQ}Qs(rEhycIiNwUmd zY;!_>oTQj6fpjcz6LW!N`#jc7r`z61&#xTr?~BBF-7hg=8SYpx6Kp76^r}S->}8r& z?X@6Qf4YQ2yP@{sxB*$qcbVZ?l<2$R27zel4^z~zN}y@2i4naa*h04`3nVfT#cnQ_ z7}0xH)SUdn-ccp*57%y`ch9}SM_w)a0Y9(bewRnXP3mJNp!)G!h9BhId8?-Vfmq55bQaAqOltn$c3EP z#THb|PXx@IUZK8k#5w-m=PqO@E01)na_2Y}r#pic zY>>#mkYSn%ZEwaWuKf(vebTe<)kgfIYP;J^nvM~+6SVMwkyX`+?XHJ`-Zrg)K}Cgp z))LR0hHHvWrU9o$y%pJD{?AN<`(pyH`q>Y_-D~wTAbkxCZyWWzSnEfZ`} zpeOW(ngXgu2a=i>&(a(utb#NHEf17?;qNGue_EDEb|4rr33ambB7f=mKy+1D1rPZXfh7-_ zf6iJa2&UtI0bbxhM9`uX*KZTR5;XT9zllthJZ-|^K7<-=8L$V5&_)$6-yntW(J~&zpuPe?_H~MM#{f8v9SIXhP zxTyE73kQzaR(nalP>ZTPs8`6>LYSNxjYtKB%{Z$I|-`_$soJEdw0fn zkwM8p8WExHKOtM~PVEC*bIqk?O?NXZPlK{fbDNz|H^vj*D7W^#Kone}S+3>J!DkR9 z9u6_PbsmAGo{lz7Mq*N}Xt>g*E-Oc?;k5oGE=K?{zC=oJ1TdbEx5M!el9WoCJ$92Y z>RLH8@yiTS>TBs3ztQwP!J4Gb9BlFX^JD8IA`C4V`lTPfv0fVX`qi!oN+`Zk{YGQD zWy5{9F7&OpEsGUvbzt=3ZLfQ149+;c&8EoVneop zDS9eDL+HR9_wlF$VWEVW{^W^L5O4T*ZJ22G*pCI_fZjwW%c7dETfbfM-r~3%dX7mw zKh=jNFl2U07Dn86H>LTh=w<0eShY@UmTWXs5Yb!FETsZGTfy%hjJH58Jrz&KiBz+g zC?-C~$LDnpCr-vTqX12^e{;i#X~k{WmJ|Qt8P!Si)dT!u$utY!EZ%~la})B9+wY(L z#L}WPxy6&ik};9tlLiYqFnU217~y*#SZ>tOp8Vo1YEk`a>jkXB#b`7Qk@1x%jbAmC zO}(cv!_d+N$AxCP)+65iSQ$39sz;JH?ybr`-H0~@XRnA(wJbuF>5FQ@D*i}89l2x+ z)9=qjF?4pb5JMt-SpV~K{``2c_L{w*T?dRKqq&p{hpyO4$|E|o8BG^LQva)|~kAk9{z?A1FW5U$QFJOeA#l^wP`_KLx zuLKFMqo8ZLbYa=8rE7ICi#(!Q;imY^A9yDD?1Wzi5=hcDO#;ejUq|E>x)h5}K7dT8 z*#a+cY>HKb;s^#9B#ZH(wbJX$k+o1gJ58L5&pvvv9gcUavMr1u6}FDs+ZlvGUTx$%f*r;E6@&AYQxgrWaS?<8m~9FaC<-c?3}AXD74l|%&^P16>c|P|tvVY? ze?2XOKxd@Ridg1nOMCT$52&|&JdQ3$^INpXb+IN7z>@)67j@be;oZUkV?P)>yE!Sb zX9I*fub->!JZiu?0UI7nQLicS zP({D2!80vb5Zi5PBN+lZfh*wRlJKVLT(4UARN$WXb)a@Ua{PQV^1ZA7tC^$`#Hj>I z=*Y5g&Y|j5RwlA$x2eb4w#Wk&WKww;wqswcoh{cMC$E94urvJRKFy<{h~?N`v; z1G+ro7PazQGe$>M=id>a6t1lKlSC?HMP+>wW?X2&wYQD ze59_C6rg41m-?5hx(x z-NROvj>gym7w_Af>-?pAq?}4|ASL^Ij8%^b;o4wC{951 ziv$xYHfeTmpwI0~cdYz;yH;iPNuyW(fLZp2H$2WYR6=THcIeD&8#O-E4vbn(sqaH` zB4hV2B@Q-NuRMB_90inNkMl`or`+-@eFUV6f*707R@sb9VWSNv=PhDlzR zgQn+JM)K)VOy4rK>KAS)U!X3z_1(V(YJuVJELMtcS81BE`#@Rwl2btA~2faQv5K_5c-+qyUEkm3a- zdExl#6JD{Tn50^^dW@7r6!s#E@~@kaI3ZEbO=K?Vh#A%drn$=IOrh4Zk!MzUn5y1m z%vHvb63-bu{x|&PndfX>T*ZEzYTI+InJ8rgwED>bgj&$~{C${)m-ew#^!Jl8+o+fy zY#%9$-KGMB$ytQ@GU^GIHGzG6%B#Z?$med7bA z4NExZr5YTNyk{& z#kf*_uN>Fh;>&E%-c6|1r*&hs9P)PuJvlY;YxK5BaHrU^G!=u+Q5jQHRg{ZGLm&4q zr_}=NB#>4AZIT`my8DSY9i^G@wC^006X@WPC}*VjUfy@E?CXekeAkQa^cdz2ihOt1 z65-}#LQ?#xg2caSvur+TD|B+Mo#~ zzq{E?H@ouT4yU`B{SI=@*rsKxnz39drz2WA!%9MI*o%`P+7GH!D}G2y8W~r<=9n5( zMGi|w(41rZWUx%AflmZ{?^IxAFW#r80gEiwOFyG0h4lq}AsHX~= z9v}PQlIojY=ijOR?%jHyqxSe8`&i&WNAtIB4kV+$Dfj;A7w%XR>mA+N?=us`9tB3H zKpOg1mYs8iXZ)36Izm^EH%t{Pe^=)3?kpzc+;^!RJUC!6zv=(_7q0 z9H0kFVy|y(>Cr*Vx-w5;n@rQG!IATCO&3D1Z!FxtCI>d!9-ltMe_z~LUYUGbqcs$A zaWuVDLt+ZbIjI`OoGxoTOVkcE`ZIcIdUfrZ!XKvs3u5kYG%-%VPq0nVgU?Qw8%LK9 zgb%QHsO}RhVGLe%mSjDs8ek%2T?n$=kYvY~Xw;@#soggUFAK&g-)>PBUpV6?%HE^C z|4g55KvOfQ)$W#XLjPmqFFakQTn-mjQd-FJC9=Prmw|lvN<}rCvg~0dUIEp7#L4FzbWf?@{}H^ zH=yL+$xloEk4GQoquJQKpw?c|BE#L$TaLcM1g{ho8a2k|^3_}5;t)^#DFB}6wpZCW zGuT8Rn%SwIsL`h{cJ0(nqWu3HC>g?euP`ZQe7T`9X{KKv30u3!&MaSj_R5%_c9%mqgz{&q6K7pKXD%4Zy z${kWKU)*LSz2U^4T35w#C?Ck`zBP{NRQxu1qN-}>JC9I;<<985b?{hd3z7HsNGKN^ zYJc*O<0EbKJ;op@sq6B?nJ)3eFcXUf!BrzV1&jN%1w(5#TFmW42QHk1^W<(t1>Xu2 z&t#s6zXzd3)!=Lzh?dx7re4mDPwh=N&eISw5#dsl6Vps3KhlL7`fH)mY&UxU*H)T5$byB%9<*`HS0O`MhMKB{^kkJ?0&WPQkYlfI`8n%waC`wQ0PU&sy@Sme{Vtu zCK-8BgP&zdf45Kw-jWu@`t@`Zd)Hpb5A4`_urtyxi&u4LX10b5Yp3nU|)W1;OkH3WzNHFQ=s#$|#o< z>4+{aH-2M0NL+PNvCnlx9q$dz7F&>fG#rdd=IB@owh4l^d5wlbn`^yBF5%T1f$ zim+T-9jZTTlR~Ds=Mf9W2bslCq}kv5|LHgMn>e4F8ri?dZVR#UnHH84ZUY^6^)sWV z`rVJ3t|H4nTHWeKvo+Y@HOwMwB-@W=khK{$nWcwwADGD(RTNDM@I8{b1&AC!4Sx51 zXQimxSHaAMr+eZ5rmt+osIrMgs`mWv>H%Tz{miaP4p)v_U>!()m;Ou85lIE$@oPw#Fto%KPrD`FshqSeM#JlTR#4r^l(>km9dT-fx%` zHz^uC;ndiWu;)Gu+XVoqI=AyV&bYYYg$C8gR^o!*-r;=Ulf`DHhOwc`@%A;T$kl7mEcJTAAS$mK8=6??N^RWI~UBF<~ zESWW8D~qKQKHDR7fBR|ks2>A9Z0o})=Afstrj8#nQ@_0Bx|M`hRotWjaM43yG;`kMSlb}m$6c`j2gOOb=zAX&aX!S;?aaMJ#y-f3 zw5z#Uw3GYOzAIIm;r59m%QJ1yvbh>6jidbB?({9c*^h#*$el2Snii8jstT{^NJDQ= z0tXGo)$rG<+&(``gZudu-h0gr&+bZ{_%q+n1N8Rx_jD=iX96c8OYv%-aOH%aris}a z);erj+!xYK+21~~3S7zz=`vrCl|AnhVr{+UKioUe(D3p0AaL+51fJU2AXJGkR9Cc`|MQODNl0+*TJb6*Wlg)NN&zIGhGmS2Fk!HJwJrgS|e3kx#nE8)#>-|dF-=a)D z8k|c5SF^WrHxb1FZ=ToSfLQrL+R~PaRnJ8W@hDnYlC8Vc_ig*U2%mYu2=n0UAl8nr zpVmo+2L4<;XEBW4*tE~Yq`(@3cF0Wpz89BHH64nFlJ1F$hoBc4R80zB(ahiey=s=E zL^W=YgVqmm6le`EuHy4)elpDEDhugF{TPz7>Wk`^X-a~=Ag*vs#UborIvgw`&lLiyU2y`}X-+usbZj$C2g z_ERke=15Z5JsG4$+cTn*QXK~V_ z=zO<$F#2}5m3*jVn~js=31pMpHjjE_uK5-=-tWrST-mT4*y6M$HljO743f1k04n98 z=cx+a2WI@vg=L!)(gJ=_Q&jJvRID9%zoa@qT~|wVObZZ;tr{<=@A^eJtB|2r-s_^Q z73ss$L{o~Rzv~sDTAXi;t-QQ_#+3Fd)l!Z_owXY0Z_V@Q`rwl_^YjaLsh_uT6jHx$ zafJwU*~3T-xY`GI90QY{gnhcrj0TBo;my61R=%rP@{wO+;pP(Jfsj)~Jmc%jcK?$I zNONI!`}NNa#AETpn?fSsczvN%K%?`8x;bZm(qo0=?8{;b+?<-fEySSKX--A(@7BN1 z#)jI(+-Z15dvWtcz|}X|?#8L{ecUl-jy~_waD72j!0gQe#phk?8!Sixe|Rr#E_M_X zm}EtjJw*9#yRZv>CPltTKw(YjNGj*$oe(|QNj*p3>%dQ+ihtFa7a6K*%wpI zmroj8lC>$0OyNMU*SG)V&c1f$QF3N3_mw3XWcUrljPU!xPc$3Z?BwT9INr~M51%>G zi)PUp#Z-2itWh)<3|GL83rh63%DaYC4L9pnwfdyzZa3ntg2#(2gFB<7NKvIN zGor>^UU2Mdni{y#A=KwuL?CqqskD-}KO*7S@_+$t?0#z?E?|!6=?LGy6y>~Oejd*R z0Tt^NHpymwLWeI7N!`Df`Bs<%q?7-lU?^B(EA^_)Oz9xKg+O!Y;&O3so)oXa>#a})z42O0 zuCm&$3Zu+#d5(v45)L`H7qucq-e0Aleybxd&oC$GFnRgC$*#3uOTJRON1q2zy*ziW zznvKX1R6#GQ+U|8dXTz<@dS?bD=&(h+)L8^xas!mw2QhvFF1vRMOjR4w}NeMrKp&y zZn8ewe})Ns@pTN|F%@&rlHs7VEn(=6djJ}jMCg+h40VYxf3>=fDUa&5dA-~o7pBbI z#M!|^mwYDIx>Mr&>$3OCusuf9UJs#nnXt``>k`L(S+Y|b;%^?EyN=)yDdlry7w(X9 z3z{F)Me&qO9IM8%FZa&!@RlRlE0Vhm3s1HWhmI;yk{F6bpyF~K5=Q%6TaQ^?+HTBr zB5O0dQNqo%whXT+uC8M3uU9B+BiamA(y}d@6mS#MzEyLXeu>5xCMNV%C$Fe>H?3`y zbxzXjLq7PIAtTGb;<8^zEIDG%;75 z^$`!5`!wlf%d+fhxn)RE4&78 z9Qx#2r(G9pl2L66W!V>_p^{ZpSL%3edBsz{cLK^A&lf~ZdS@ptQ3jOdiulGgm5q&# zmDT4%lY{`*XTX@{Os~&T0Tmq*M+uQz<57da1R$6>@JaR2kOHTkfH`aouYa;vlyG8H zp~uH7N!I|Vdu>PxhI=zQzdmz9T6`F5#$ zwF1Lp@TCEeDm?9-quBuEq%DPcNTt@aBgQVTyjsw1@Hq}KC~4gZj{MdAGM=ZA}fxh_Bc zXz6i|=uPUzMdZCzKP(>msjS6fqs8lIXv-yA1Z9hziL1zWW@#kCuT0E!aXOeij7Wx# zo)~6j5{vHE?0@^=bZZj{{KWi+{Eo@PTR?H+S;C%R4N3Po{DmNLq0u5qo)`Djzby2^Bv^CBY)W! z2(r~}x8e>_34LgtXQXIRdF)ldo=Z)JN1gLfl{ft6JDCFr#LmtaV%g))3|!|Ubh+^m zpHGuRNm3Rf8~|2hjV~jmK<0(*g{ zeUG_4b2ua-9tI9O^CjH5@In57Wx*9P<{o6iB@GjO-}^`t5Qw2`P%XPigdR9YG{^zT za9G#8$ZomZSOU}N66Ebe6!MHRedU(x0Y6E#H0hbU`UetFB2ZeEnbuoQn3*Sq6t#BzS z`r;mwr*V{o>+A=IW$vOnWY6otk8jO|i#8+xfx%`1JA@AY6n2T4{3*gRM<2T<7 zeXVV;Z1h$dJ9c`s%v5MWIkd)$Tp@$V#6>se1GFgcnYqZ^%n~fTIHhWz*NwlHt-t0B zHLqJxM4&v>eYS(EUDhf}7(P(V1zLCml%C(I3^KTB6Nu-gOnL)~(15I`uZnIZw>i#%`Vbm))?Vmpl6% zw%RYXThCOY?#h)B-rK8CgUG>wL9b2`^h~7^ssP)(Ug&m-&GZQAvoljxaE*}@Ny8uwe-X;kK=?J9FWyM}3$`$qPUH)~`<#Y807^FM3=I9QNg=$`3 z*$(abi(QEQioR18LL@C>6T+-a0UIoR$;om{&4f=Ypu&T|<>wUgE0Y)D6E}YCAb$T)P%`x&fn*ZISeZ*3fVQ!*3~y`$ikOERl5QN zd{`S~m&$ovRvGrty7<EUI%Z-P z_C}D5n1x$`KgM3)>avPTaSK_xLF8lf_vbYtanA>Sy_a~-5#ihF>JK*LLYMKm19z=* zCmB5>bpAp)N_QhgVEhK-Q`45J7z%AppRp^xYbK=v zKh=4S$$+HA*#NqQxa7~1|JbtTZu%T1nqn+tMFmY#R{)M@fU`U8vVy=OjZ%(*o511K z49>4`?{f>x-z&Jr)%6rjoVpyh3e1);>BklpkRE3tlPA1~X4ll}Cu}eZ#pqmRt%5(f zAoXJQ$Vr!8ga~1BmFli}Mu2o?#So1A*%?F796>o-<-!I2~_s#gdwYX$v2WGXC7_dCYte#(=DPG z)ADUmEe|0voQM|Mx$29{wnWppG6RdRD@rlDM`ghmdMP^`q@X(OpZQ0UF{!^699R8@ zhI&!ULg&Me+nRpeD#Dsk6UJ}KMmNpp@-$%y2ZcI?dP5~>Z#TlKzQ%Ege72*e+Qg?l zfxRlGzNtd~^dF;dW$oVskP?CN;`yxArX~L1D8sUHFX?#VoDE}(4eDu%p~BGs$h2?& zoCXVpn@t5?X~rRT0;yP7OP@YRo*|ki{kaVbC6KdkK2YbJOs{eQ_y12%Y4)3tP@mvA zy5%83M|HNF9-mgzcXG_mKJUfw8qRc5EB^GKJG$Yv5^q)O5NN_Cta&SVYlwt1WglAi zcncp?tU2S9AFTZo<#{m~z&`YAkUyioeAs-}e6*yoB`}6fl)FPr;KW$i++42$YT#<| zaq4MDIWBRd?$1(o$KPe!aOGunBY`7G@$(-~ow)U*bF`KYE8E$ne6}PTYyLE^kjaQx zEDh*_a!&6wGx7IVPk8Ik3%fT&+x^bd`2|#GAH9Gz)AYLXG&>*qX+}=`C-tZm&<%U0 z9UC9vWSm%H(Aq{`FrpnD{-u8QOtH8rwZt(zWA;VI19(-y@OkXJJ8mp?eZSQ4>-xX8 z@P8CBTz|F{GKEu&{cf{8rqACDT$O@~rAT}J1OY6FY083L-%g}IlM(VVBmO|Tg@5hX zt!dloF|OX=L$JVqTq~uG?Y$3}WZ!z<8&Su)xOiDqC^!6{P{Ll7MwEF%J4fxlZ`!E} zo}7InYp;nsh-lKnNJVxVFqAz6AbgFVsF?-AWjp*IS;wy^`gP3S(-vjc$n|pdkDLH& z*$!Tn5`SSr5%wJiHjh-x78iPCz}w!+QXfI?`#w}m3vU${EFe7o%lI&CESdK~!7!fj zp>aarMRBk(LsFRs!=fVo6dMYF|Jc`?HEHJ4vvWGI{d`NT|pnjcFCKrRdMwf#I-1g7Ms5ezi1tH|3^5I+6r;u3Ie6iT{(^x3`$615N` zub{bZM5It@T`;uQm1@1}7k6b?`qQzl7s|p#Xnhz&1P1RuO{~^{3-XR6EtdQ-q9`Km z72BIg-Urm1VZh81lV+*<(V|7JYGJ? zMGlm-R?3l=NPv6N$Jxrp&w)Wn{LxWE7RtBxLaoZc)fHpcCscAIC22$hjttD>ZYyh) z>pybZU7;Bh_-B!Z_HBvSCRYZpjh@P1 z6L7Q>eMQ&6Kzv*diJ-#v!-w#bgUNcyL`aTE-XUvQ$d zmfl;H*tk$U*QB&IyER5fRwixC&^r+kRtm58?3A~d=Bn6ECh`T)<56$D+uvtOJCSVV z!RW6rL25U~HOOTJlah(!Bpmb)$b!r=Omf~mQ8>>z3LqnTzaSscE9hw^17NT#o792Y z!QNgmrvPS%f&p#jiPSYOck&O%W^;zLB$njoDbnF3<)wptlSex|-j){7N-sk(^Mf=# z=Rx)kM8>z|c)fK)#dK#??!GH0X~_FA#A{!Rm7;XY`_FL91*hkUR^(N7!g6Xk_I|s~ zKe_ftr+*~+FOfwgc;P}bl)FmV$kD*OSUK={dm2csR|8O!5oKrUB|lJQ`Pln`hgaVwSMBqkdp$9+-6o&{g+BULEN%Uf zWFi`Afm}m>H3Q0s5gnDOb#bftKpI;;Nw8&qC1OS?GlS|BkRYsf;!F>9k;sr@N*#5CGed1KQJXtx;1$RRbEh?HJ!AA3hf;Z%Al6!3%hzXS4L#jmPQ} zFo%=8%gr;}cOWGxewzL3nCtzq1HME_O67nzfXsF$-{i9GC*d!A9Fz)Ed4APF`m~&^ z=Cr#V7Tx_>{XD_&na$>53&G&*Ooo+Da8_sBi#2@XLy_a9ROXK}B-np3AS3w%i{V){ z8mm7k%)PwrFM{3KKkgDxm{S{Gm~X~rCMBZb#uR@AAG0N42tutMD$jM9ht?Tzb_!Py z&U01BtCfiSmnffDx8^GUYYtPIV_q;c!B%fkjdNZROY9y@8TsJJ`{JG9nzvi*LRnVO z3FKK@$mLB!=n^SSQ>9h!D|I+CC!Ve(_-z~2ud95^c0w2IG}Y@1b9K+pSCa&vmrrfD zmB2cOF5db!2JBuU2ba0?832__A$z<`_lgiu0q}5U9b`77QshtxvrpUWls?-6XG`Ex z#M$kkeq^2$HT~7ExHj)K#hNy}cOXEq$|vvLNmIDFnHQbcQ~2xVSgi*vMqx$b%IbyXru7dWvfRh}t^mX8x|(2~oL7i;18UC{SYbz)`7CJkkNc5; zCe3-sXjcpzKuYRtfZ%4019sJKtZP1WgDlH8cAH`*S5L3cnsU|Tin_$v^3HIdgT^^l zR&J5fG@s^Q&ixHu_p?R&;@wGgrO|gxY!><^fi&)ayr{?J&F;P3FDeXyRku{XDx|>z zb+f)njhT#YxBq4MIJ5nlfn6DjR(9|MxL!SDMWfaWHv;JgV#F4gOM02!_|r`gT<p&d%*ON$mZ3%Mb8|uNg z-TDl2B<+5;;E_ms1Pfy;IY5=atxU?mHbnTIwYgmvZoETg2~1Z5huRB254?uHXVoP5 z##XG&qAVQ~9Q9qn&jNZx2WRSEZkF<@WqKATb2)3@zNG-df&n1wi6` z7}w!c$Gu8SujT1!x?tU1q0bh?x|oid!Ocw27fYT-6`iiK^WT-3p(zyT2m z6a%vgS5y26MKynPNFuEI<8@coO&j$e=Sl3P(RmG~Otj1bB_E? zN9pUbJtF>hz`PfMcA3eqxtCA8JJ|wmq}pGKxoJMOzP@t2O|}R<>%mB89NwupXh5F) za(fZYU-`7dfIj=;=!e7EyH064<>On|o${z3@P4MhwHAg>d& zvW9JLdlkv(pATmjwV^z`WgxHw%$sRTX@qdkCwDrFbiB*E($)a%I_~4D1PCh5^RaHQ z_SF71i^jx;|Pec0YL>Zv_bz{gmA@- ziWfBL7m1Py0QCB3jsdGo`RaZhKm}Jp5{}(Gr6EO~wq}OLdKF*XX6hz#K({{z*AV11 zwWF9W63`Q(SG0b37qXsOtJwiqMvr^;_V@3D`fxSi{{o;Hg7$FyN=9YfD3){W@<9CJvvr4 zG$X}0gyBIIyia&CP-|^6(ELE1(0hllYM{u#a3ZvgEu;6uG`zI7v9$*f>h%5cXj;#Rdagt1Z<O7UCB3%Jg(W2gJxF(XYQj?^Wd@;=yn1}%!DfKtPsuzt0$b)ah_Y@|^ zC}v$EEbUzNgg?$xEt=OjR_VN^mOwk29O4D2aq#iELJmgIh;KXz~WtICxlG2%(q08wD-rUao2Cg*9W0hjQ z$AlG(U__+$g*XcFfYORz`^Wju{V*QEzrBo6I4-G^wE@jt-fsjr^o1)9E>;@{+HaCc zkqGA=Kj4&t5>_RX#anZzzZZ2gWJrXDSSCFT^W6SUir&NmV)Dt5W4exu(*D)2!uDOZ zin+(u5reiHZAw>C9ZN#qZ5$o(W{@C*gNzn2KqGMe28MGdH+SDvjo6fZ*P*5fyLaHy zl*zN(EddmD8L|E~(+hb{k1ZbKjy_mF(HBOMRnS z(WWi^KhaX1Dk9ENl8xb|S3upPgdZq2-OHMt=WdcMVj&grJG8qva}I!1B!}|aEEaw* zh<~6SStUn9H``Cs=MK_m;9Xwm6nyq*B79f*tiauvsNteXVZj%bMyk&0c>YvY;Mat0 z8fzJG+xqN`Z!b<+nI~6A@9!Lyko#u%|z7StSC}^=4|w zp22h>zp=#j>bPg^>9ZhL!H}QVO~{I{aKqB@;-aZraq{E7rU-Im!XUvv8&^ypoYcL` z;1L-Iv16iEUez8S1FuOq6_;L>&F*5GV1F)l50%8_ZPeDkXWMh#UKXT{rUB0q`jw%%72`27mh&9Gn z8O_bJJU^O1`{(C)bNe}(5E-p3)&BeYw>eUNhsQkB%2x*2n`}`my38NcgZzu>p9Ki- z6{JGbqKuz?^EH745Z_FW`jxgsl{Xj8Cb17PBcnPXth?*gP5Pqsv;Mm1YeSDBoHvgj z7-V_>FVF;nJF+eUA_9&I#jJ+NSNEz2lY-*&m7WtBSW5!=|j_i_-Se2 z@snt=f7YE@Zl=35M$D?IP$)WNiaO?>Yz|OM=mp=9x%1)hvirHJ*#dz-4eiA=lxpqp zHh=@OZjxXA@%HgsT7Cuy%=BY>@eUGiKxAwnbwz0wMr*Rv6N5H(wQe zm5z7>rI+7Yrm{^2Z6fcVakKn1Pg7=M{f9D9Y;+tbPda}JAEfR4!=wwRbq1Lgeo>G| zVkr;W0tA$9(BsAy1jm;dx+NKE=~p}2U_X4~`FRngDS5~!ojL`%PexlKdHqIv6z*`g z4|HE0%xO0iI#+5yEi+&}0dSK{^9Ux|t{sQzcUK3<4zFxRBOCkX)%-cWzB5Z$KyAGx zN_T_X3E|>{v@Zt;$bJ&Md+b9iiaFdow!3QjHZSV+ud>`P{oUex*Nk}({4vH3Y6=Fc zOC@0OGsdn!Kw9mMtU%dMDLLqS#MK<3#5)Ci{<{(DPusPMc9wlE{5~?`zT_Jj$KR1v(9s^1D8ZTXUp|mWV~e%nie@EV^`xu$8c#J}9uU*MAnLU=_3qLpIl@GbyK$=g^Euy<;Pdj*M_bf* zKA^54AWL%KZ+)YM>G3g@N@GsWLZ}VxK5K0G#HF`Su4G3LVzYib1-CL~lOiaxwaYkY zto|U$mXb@iy~H=k)vg^L*ed$|8pgA$xSK0Jvc8tn(&!}p49(oKbW?4EPXlq;o&p|u zo{H|UOh5yx#>_+7`&G?7uz*|j^8*f!dfU^l@2Plu9*?N|?Whs@*$M73&TuR#_TKc( z`3&KG^f3NS!trTf>VpXwl+5UobQYI}aIU+N8wjH`C{jI4d}a9V{;3ygb6qMvRrQ@#l+H~gu6p?4oS2a)ciE(w zQQI=-w)jbC@h7;Y=nM!zOl-e#Nf)Td=Di=~;rzcuDKW|=iQNt#rL)vhNy47F-U+QR zI`q|g#l!X5F5_du=Y9QHt-#o2JXguId;Lj`=K&u@eF$3&QhBRlgCaT-S!(cHf11>7kT{s#K@P{HT;Vr*<<}_dvU|LiCtc<-?HQ54W4>(X0{~(f;-Pw z_qF8na&2zzMA1u{pxHlsj!JilP#xrJb5sicL=Z^2R__Eg!fh%1R6>#Np~?Omrh6@N zlK;Uwztps!Jh&wtyqB2Nu*yZJ9tX--wU!>{2?(fMHj(0c>HN zu5x@@z7ovf-_m9U-`$eZ@NHXKSGbir+PVpqtCF4F;~IKI4UD=QX`_P^lPxGY)7-gu z!7_ov7P#rISIxyPpWq1FLG8ry z4;!B4&Oa(~=pVd~M{S&a;)s#o`{vL6vra{D<{{v>7@Rer?^78zFSm_JORI$%B$@czMpy-YfZ~Up-jEp8 zKeK~a&aT?I)`d6;?V}t6Y7~=`4~@*KjP| zktp0gp5kvy*_ir2K?*}_vpmw?kVx~o*-xgbmen|Cz%*`$;`r*FG=A@0g%l)}ZXaAZ zYtD3Wy10-z02hyjW=gg2c#PU_zAKAV?D)y6z99bH{{jX6i$K@MX}taK3p7&hOA2iL z(LXFYK=Sn5*O6yy3Q(2Bho;ZeoB!veUfG20%3>vj+IXm2g+5s2zz~DX$fy{~w|(Od zNxDV8RnJ>HSN2{vyQcQt|BQ-T`Yg_VJ0_%TfO1;lT)OGVFWax%qchrrkK;E*2>2&t0CgarWtK&t>V&|xBH4Mg}iPrGni7uqF!uMvC=0gTsUNdRJX%bxul zfKvh9uzF`_E4h zVxYIUb@#tC!NGNHzkipY$0W|~+;tx-_hn7=H$IyYHIqUnEo_f{X1^$ho-M18bWvSe z_BEAqA@tbzth0{l_~8evQR~kaHI(u+0{2c_2wA(`_F!Ig1Zc8 zeQalRDiQ&4GWh^WzoqOkddqrH>+q34oYCK5D}&KKFPSh!6mXaB$}>x0gw)@@iTwH( zEvorv^RWOj`>(MJfkdE$*K;p!CYvR09AZn8Y_$`XC z8bI>2{^f(dgMOyU6FBfAB3RiFz{YO@V#J^MMm#KtbOB%K?3S z+|O#{Jg9L9cuqhS5x$C~LdbCW^b-c#Rc;UP)!(Tg1R?N0dB?A8mGY3T(A53*eG>D{ zKQsQV7urzirZ*m&XW}j}p_g<-8P2Yrchxy1I^?U_RPsEE4uME`4d@@i-W$OJ+_lsD zP1z+l{mcKA|50Qs4lgk@e{ePbd>+}izInvIZSV4o+qqw`m%e@FBw^$lG~HsO-sO@t zs6mqg(gt}E-yY7zn8T$Vf6q`|V^29=lm_(~q1LUet}di-VpW5Zu@@3y@Zz2Skj5Au>dKIIO_m2mv&B!eu7 zf!xagXB6)8-?npJXztgEcet3%J&o0QW$bz`j)7_%AAn=@>R^XUcwrOp&LH}2v~~m6 zfF)1E*o}u*6=>-o`Gt(=MZ?lsUrdYb6)fY0PBNi*zdnv!y#Ce;1^hT+H*@%}u+Gp0 z?>IQ|0zH`LE8dOUUP@2=6GRBhQzS`9IDKV@`zQ=9a)Bw!#mfEy)xpNdwCetKKa+Js#sK9da~Y3sHu3WOCa~VjocR7%v(=KFVHETmO^Cw zhhc(04oyn_JAbokunMlKyLlva)(i($>Kk44=AWjedUr8VO8m~F+CKGUZcQ|PUDB>~L0L95gXjgOOtV?}>Dj{R z9r4ef3->S>@%R+-L{34UJ*JJ`1@j1e3cV!H@tlKE&`YyVcc{A^@vyVvgSNzYfZI&N z8@roi;6LvDCx9hLTt4$a{n!=G_jfpnov*gfg(30^dVB477367qS+UHmZ4pTu!G7l{ zk7aLj35OwihXyS@@?VPWy|5791C^9+C85k*a|A2lIE^I(H{-x_hZUrE~a*@$VEaCGZtP$UzqwTkmoZPrpd_;(CTyYt>{$EA)>ud-A?%IQskd zi)F@6JN-1z!;23Z=V;_GveNp0x@UjKrZs|%`yZn?-1W23I6!WzRl%b28ZoJfSp=0D z3+-w)&z=sb=TXa~e496hL*1~8Rwrfn0KdNXuRXbG1r6uBYK#I{Iq{oId}Nt38EHk2 zh$oV=y9H_fw%k|i+a*eiLcx2C@8hlIm{VPbyx{Z?t<_N9WU_O*t5+EIEJ;5vJSB3Z zBrWd)hrLV6_FOyX@$>*R#>%C;TG*>r>IIuDgMPCsHxd z*Oi?077ogn>*w(Ys%{$}qEh14#D14He+fR_TDl#widuGB+W729%6*IlFZ0gV*vz=V z)OVlQKD+ytx1xXi*8DZ_$OYG7K74LIahMI~?ayU!0|0ns670|r<`3tu{c_@|ZiSk2 zknZ>TFl|RDK6a|#O*(zVbhIXWdh>{CoX>V?xEqd3*?M&N;fr{3?(fY{jXg3yueeOgMuO|9g;9KvzP~E4IJq7$1wY7BPS%O_9NOA+$IwgETlZI;lp(np#n+slRv|a zj+kK>eKH4uqIO0aWP=XPIb-bRuNbW#Sfx%g)t=&wbY6YoRje=)wEp!tjd6&-9F9;6 z9;OZo&$qkC^p$Xnoru85n~~@xqhWh`l9+fbe3?P${#mev zBtp_~XCD4LOC!@p(9(V1s5i!su&FO(w(9}eyn}JQB(D~kFJsaSR3M`d$3x595WDaV z=HRECNhLG;Jo*(wOxnZAcDKTu9_ENE)aU#>r}GU4>xnjk3GGx1hj3;?xHJmZS*zG8 zusw+ZLBq&4l^^cm-eFk++O*cf9etJ28sk-`!n&8C^&lbGBTT_^Au^HyFiDgKxj!kdzgcE>KU~F)>)fj9)7Kaa*Jw% zb^LMTXS-=M4PmHEa$k`L zdPboIO6fbvMbrA6L~+$hFZ+jB)|R~8Q?ZaVpTmSzSldvBZXa^p!N^I#ZQ_-b^K&2` zP+S(4-3cG^8oB)5V!>c#q{cDE2cc+pyL67kv%=9O1e)$t9C=(Yz4hpHfH3QY(<^*E zmu0c(|Hz(ZWrwna?>vZCr|T+y56w)oOF9D}LEzHYL&!m@u%#TJ%&W$B;~@Oe&@95q zI5dr+HWR)Qb7jdmFg#<`9zJ>1q}~bp%2xW|y%DU-6unN@kNS3xziWp+rIB>AXYSrO zQ$KnPr7fOX%zQlB>pps5J?=})O*icEaYN6Q5qiWkGygKdo>61I#J*AhXJbB*N1QyA zF)H`G8=)Rx72+3W=-35Xl**~$NNdsK57_Fe% zZ_AMQcSw?h6ik{PVIrQNtEt8y-amV*{=D>>O7f%xk^PY5U0YM$2+H;Cbl6t49cbE|SDLOT|K# z_7)8@k_uJUwaefs?*8Zu3g&JjfS)!q8yJLID$|uI%HFXuuDi}2t&S+OT#Vhpy zro(Pz{gnb&m*TA|l#OAqz#wlbULOmKtl?_Ho(7eW%bG{BwPDSWBvtfYx>`D0iFP^x z+Gh@qno3^t);l(8>KJd#VG#2{RP~K5da?|;XC?i~+8B`L9Bs~eH0sf*z&L&GCCcVE zOTK0d52nJXOkL)w_f&P)F`R&s?LCncans?X?_NYG}tTT-iJG zFPCWRzMvJ2!fH|$Qsh^b!9v__+qeeoc)ROWs`enR{A%YreT$bec+25HSvq%Mn}$Ju zA{&DrODpNw?Kj=YNk4VXgq3H6kJ?3f=0!48u%U+sE*3P6cL#6j;zj4 zm%kQ9t4QV?luwCc*y`fED%M$tFntSIzW^;BZ5LvQD%XCSXa4M_Y{(#bbgyaxUDNC} zOO7eT*xnDpgf1lydF2DyE!s<^nAa{8PxveQoF%mQ!>Zi`W$rSDf9jgdj`a~OfG|0a zOf`(gN)~(N@h%V-Me1;J(K%WX52o<;ObI}#?U%!oD=DHu^XGE^Gl)hCmdj1PAAYZ6 zM;_c~Kc7Auzrt_$_3lJhm!y5qpdpbK4VV5;%#4<~32dCLN#r*5_A5BnmmT+xp4#Nh zr!M%Iqiny)WRMi0oMNZ@ocVx5D*#b3xctOR1zRTkwyyzUszS3q6D`5!qew(4!Own9 zeYB^TREN&a0l78AAI;3KWjSKRHA(?3gEBW^4mxXIM2f6=6tgweVA9ts}y=;a>}}GX=gny2iBe)WI&+E*y)ob&9u(Vyi~n z0~``q>s8x&{riT9;%=r%9rUlxuEjaT>C{xyl*kb}@G3hQ`mLTO{bL(JW>BoSpovI=$p9wMo+RB(}%922wP zWt~1lYqD~fuL^aHYnyxMhHrDvk*T@)Z~~uw-HMKlC|ln*RHwJJJFjVV_4cDWq5*5- z!oq^xlF(d%Fl;q=Pj;t-W~Dmu{Sb{fosxco$=~~sCZkJ(P80LBa&U*GJLT)RqiSIwH*ggYfYm0=poL)?yL8RRVdqUO%+#By+%H3x~#w2ih< z&soWyTi1FA8TX>S+k~~&qn))F@imsPyzf70^AIYlr-OSo&{>xG%D5*1{d47G6Po(DqG=F2DO9COoI&ug z74&ZYXo9<(YVC_it#HQTbII3c4T$Ysqhk_0g%2*Z%>m|phJnNZxl+#K_cAn6FJbFx zd<8Iehr%c&6acTP$&DY1-reg)btg(Y%y!xyYO)g-atL#QJ>lEmJ>k;f_TpCIr&>wT zkH(NKaDY2r~%I2p~kT)o_AT!KiC7QRrIU0GYf~IItX14=bZo)#wKAp3uq&~K{Zvj z#B?dl_D8SGjEvNo8Ai;lF8S1p@(qi-~TtzgjI;x~wuWADW-4lEl|Hx|tV$cp zKA61I8?ERu$?JGGn>8eQ-omiVUBzayMR-o!DrO15UYq7AA|f^UcT6kyMmTJ5B!TsjP8Kv3r;kc)KK-*!S}gGQ~|mCiW5KCkYKt*?_-RyY2bv zhwVDndj^>Tro;>$7Hl8v$@Z=rdbF{9p#FaH*QKUuIM8Hf^d|#Ib9gDmMy`}3$KjZ; zHuYIFptzdi?^Yu$^JzJgTmVq9gB(rN2>5>d@WnI7{x0dmSe&N-f0T3)!IvvqXjTc{ zQ4LwT*f~0Q6rHS1kj;4Ul=#s@nk3=_#^y9lf)QgmSMz8&gZeLId;8z0XMo<5Y}c<2qYx!{wpuMaSKn$)R zZwJeT^$ykcaQftby4x)-+2A-$0s>{m8-`&3o0K}!r`z)K8vev8{gcnXzFFE4hui9z z>;<@GsQ=+yocF)Gi)|`&$8-)@db;G*AE!Znjgb)~T~dC751l`7} z;0u4|kGE#)$};@ok8aAXf>G}}kZ~d20gnTvlR}%nuJ*c)!~3TG5ctUHfkrr-uTeaL zz=TPQ0QrJm^XJm)sA*8@^Nx=jA+exrn7Z-=oep>tq4OYb@XL@7@Yyoi4B zy)T9>DX~%V=p*{{KsUZhOF5uw?KsT1SP{Q@d$1Cci=fI4#QOSd9-8gyF7WrgA3!yt zQFTCdBXaM+cm8D_Bz|ekM_h{a%CD$|*J6>lsACK%@HrB#e)MEKuYTBUtt*!)LONnG z@I8CQhL~G1uFw$_ZKN0>_as(ZgOv&@(tzUCSbQ7TlPQ%;rIJqB+j`A9CAf`_$^q9Q zG0EeZE&p35(0i3wvJD`v5TPpW^~4R=jPw;N`2-D&an(YgH4iqaL{8k@_OAw4SZS(s zoGu-f9L$@GZNi$ytj5Z|wXsA1mbO?r$4K;5Pt!tlyA_ZPSh`uGDxm;#d@kvS>MmaL zTGcKYbRCm#>zg)Cz%4PgBfR`w+U4P?Q-4-;I3p+{t}D5W{Sj&u<1VUnS$|q(G~ej& z-}aTURsD#(#4tiJJ!ZJ5tm0(pb4?CVmh=vGae>7Pw|N1QZLC{&UZ?QG{MBExW!rbp zMz^OBaH*|tQ5z%?7xru=#w4*dQET{>oUj<@s@wXcdaFOYfko)RbXotd~bGJapRnb{1+X zsA7JVPv?&!QgVSD9at7f?9Dlx<3I_y?GM&iI0ra)NP6(tDjzJ08q+ zk*&Kx8f`4Nh11u7P99Ek?}lPbrP{~qc1-&Hyq_>@Kd50a7M|L~YbIRM{%%{%j7CTb zo7buy6d0506uB1>`c{kO(H6_!?CD<8T2;IAMmx4+gT`%dO;TJ0aR=`t+Ly4(Paq<} z+LW6jmfg(1BF;K6qz1)79b1*%V6Is=6M%>_NezBxHL(T;QB74 z5ei7ponOqmy@)2%l`K&^|1jY?qDiSBwwEGX#f-txicX2^DNw;sr;TM| zKc}PHVWC(a*y44pI4#~Wjkv;A6*Hr#x&^S?PnsgyhdGJc-*goIta2( zbX6=Q)BTx<4YcdU;KYbpOSIj3&^+)@EPB4X>h0(Vu;3~k)OSDHC?UAY!$|w*I>vlq z?H_VeVjS#T#x;mklhSTodeyL8)?xn{U5RetO?_81y_t+UvpwJ~1pY=&N z+OJfd&Q6bnH4_EGAh8dEqC|{qYl)McY`)A$xdJ60e)~zYBO_NoP{tW;iZHNas>30! znjkQ9)Fv_0z{>kJ0z4VTiqz-iu*llz6(EHeS}=D9O6giJKRe8us$aN{aF2_f7iSM# zZA>H`!IleKau!vJeXEIzw3pWca?^xN4EjL{U$ClA1#E*P1RU*RwqbL zi&2Thq7Aa!FlcNgKI?&PF)BO?y0I#5_<(5`)oEk5Ys>4Z!)fFQT=!Xnan`TD#c>c0oiMy ze*WR;3YPQ;I8CX0bx%}EqjjIeXeZ?+LIYTJd9!U|XEe5aSWVc9F=9=JM%~DKjeWh& zoy)T$;ZezOV67%eXOrc-!&F5MP(9OATZvT3mG#96ck>0zlgyLY@Y&Z)ZUvXK>1eGL zt`8=NuS%Ap|Ium<2l6O@UrB==QH{(q(wMrRw?t=+T`|#Te>j?stx*Ub)ddFB#QFwX zaEnf+!6l-&Z;6o?oYHhO|@5 zFHQME+KbAqF*2;;;fSqERTJnBw_+@0T^}+)0@b{p{n1l5jc-Gws=V*A#2Tme^EeWO zsg^O_7eI;~0x1n9I#e5|GHwKfqhj1p=+pN|{ebJ(!kX7AMyt;=^Y{3;-)QBLf1R6O z604WrYOJu2s^kcSAh&f5a|SE)=xpkZpnUm?aku;Xpmp^bcL8OOD zSV>Iw;C4XrUy2VxHfc-6&<%q***HReIuE*X9{4>;6Wgg z^O62Ot2@T4vhfy*bf!vQ+G(QYV3yiN<2pUN-b~TsT;$w~h>j&m3Da*Zd_~w6C70%5 zq_Xbvt}PR_rM`z^$ydK6+{R6h4k5+i(`NI$W8Hay-{~(_RZ86$c%oX448@2Wpp?+SOjTr*aLE7ifCV|mt1H52nZ=rD10`Baj-?x1c1tJp2b&2&SSgWgzZ zPv`d8a0L(PC`Dd10k;&eWU9kV>Tn{2CrkN@X7Dd zRS;8>DtL3<{N)jGp}b=BFBhfQqG=?$%nGhx!XIFxJ@I7i}SKi)Mm5OOB;0q#gm zD#*&rsNcw^qJ*_jZ5St3v~_$~5<5sOXPSW*Jj8#YD3%Mnqr@m<2 zhHU&?V!?`*U9-9eRLE?Btp}Juesnd7(SdmB!`pkNJZP#i06FW07JtxP0H_7WNT+I;bvzEuya8twaRP- zX@*xxseI79>wTy@f3Q~VS#rX>Oi818UElMxwR@_bpbBzJ)b=#)ZXG`Az|)nj*mX3Y zh-BaC4C;X}239-a@KfrERiO{9V~u@uwmuBP5?l4;Sf4B#NCOMl$YXGTN4vk^+EjMo z8GZw~^n^82(9k1%MzGAlEt(Scv+Hza-Y;h#1ZX2f9(@g)jsM^nj9u3*^kPYbpYTY1 zMWSnMe}T&3{^VULzaz(P-+BR>qJ*0f8w3IhAbWI4Az}$SQ6aPf)SE{_9C4-hp}{w4 zzJFNzd2%7k(u1|u?`YRgcx&e*;d6XQNVtn7;V?+v18gTd^pWS0% zyeI+@34siD1(>yroC~W8za{#Q6=|c}>^Eyr&z7mVEaEMr6>Qvs@_wSy%TnzP-KA!I#6aa>iRJtZNqD={Ts#) z%rrhb?EUADW`%H)`yf|#oNR$Gg07X<^`AtX{^(Zl1M;Q5dDE|out>vqfdcb2QV8Z{ zZ(*F)u=I4anExXoUV5gmm{w#@nfoZk8on>=Q^&9a;CB2Qw;NPKa0+M730Yun7U)e5 z$>&Xa@$z8BEWVRX`X>3P9i8dv`uB^5dJG#OSB)rnmbp!Z65{08%z?$l_xZcLi?p6C zHXLR;Z_016(D34U319HYDU`diyJZ^P0t2rK51rLAe_jPUyG8&fP^05r6cUcBMwdOn zC57yiJy`E(aeUh7ui^wHZfA~1Ty&os61F(?<7(hF#I@>J#` and Tasks-and-Commands Plugins-and-Best-Practices Advanced-Index + /Architecture/index /Launcher/index From af25df1777dd5e64091cdd62983371008cfe5985 Mon Sep 17 00:00:00 2001 From: Josh Suereth Date: Mon, 10 Mar 2014 14:22:56 -0400 Subject: [PATCH 105/148] Fix @retronym's comments * Fill out more details in archiecture section. * Leave sections blank we don't have time to write. * Consolidate on `Sub project` vernacular instead of module. * Add a few examples to make statements concrete. --- src/sphinx/Architecture/Command-Engine.rst | 5 ++ src/sphinx/Architecture/Core-Principles.rst | 70 +++++++++++++++++++ .../Architecture/Setting-Initialization.rst | 42 +++++++++-- src/sphinx/Architecture/Task-Engine.rst | 5 ++ src/sphinx/Architecture/index.rst | 7 +- 5 files changed, 123 insertions(+), 6 deletions(-) create mode 100644 src/sphinx/Architecture/Command-Engine.rst create mode 100644 src/sphinx/Architecture/Core-Principles.rst create mode 100644 src/sphinx/Architecture/Task-Engine.rst diff --git a/src/sphinx/Architecture/Command-Engine.rst b/src/sphinx/Architecture/Command-Engine.rst new file mode 100644 index 000000000..5388dbbae --- /dev/null +++ b/src/sphinx/Architecture/Command-Engine.rst @@ -0,0 +1,5 @@ +================= + Command Engine +================= + +Placeholder for command engine details. \ No newline at end of file diff --git a/src/sphinx/Architecture/Core-Principles.rst b/src/sphinx/Architecture/Core-Principles.rst new file mode 100644 index 000000000..2f101c6ee --- /dev/null +++ b/src/sphinx/Architecture/Core-Principles.rst @@ -0,0 +1,70 @@ +================= + Core Principles +================= + +This document details the core principles overarching sbt's design and code style. Sbt's core principles can +be stated quite simply: + +1. Everything should have a ``Type``, enforced as much as is practical. +2. Dependencies should be **explicit**. +3. Once learned, a concept should hold throughout **all** parts of sbt. +4. Parallel is the default. + +With these principles in mind, let's walk through the core design of sbt. + + +Introduction to build state +=========================== +This is the first piece you hit when starting sbt. Sbt's command engine is the means by which +it processes user requests using the build state. The command engine is essentially a means of applying +**state transformations** on the build state, to execute user requests. + +In sbt, commands are functions that take the current build state (``sbt.State``) and produce the next state. In +other words, they are essentially functions of ``sbt.State => sbt.State``. However, in reality, Commands are +actually string processors which take some string input and act on it, returning the next build state. + +The details of the command engine are covered in :doc:`the command engine section `. + +So, the entirety of sbt is driven off the ``sbt.State`` class. Since this class needs to be resilient in the +face of custom code and plugins, it needs a mechanism to store the state from any potential client. In +dynamic languages, this can be done directly on objects. + +A naive approach in Scala is to use a ``Map``. However, this vioaltes tennant #1: Everythign should have a `Type`. +So, sbt defines a new type of map called an ``AttributeMap``. An ``AttributeMap`` is a key-value storage mechanism where +keys are both strings *and* expected `Type`s for their value. + +Here is what the typesafe ``AttributeKey`` key looks like :: + + sealed trait AttributeKey[T] { + /** The label is the identifier for the key and is camelCase by convention. */ + def label: String + /** The runtime evidence for `T` */ + def manifest: Manifest[T] + } + +These keys store both a `label` (``string``) and some runtime type information (``manifest``). To put or get something on +the AttributeMap, we first need to construct one of these keys. Let's look at the basic definition of the ``AttributeMap`` :: + + trait AttributeMap { + /** Gets the value of type `T` associated with the key `k` or `None` if no value is associated. + * If a key with the same label but a different type is defined, this method will return `None`. */ + def get[T](k: AttributeKey[T]): Option[T] + + + /** Adds the mapping `k -> value` to this map, replacing any existing mapping for `k`. + * Any mappings for keys with the same label but different types are unaffected. */ + def put[T](k: AttributeKey[T], value: T): AttributeMap + } + + +Now that there's a definition of what build state is, there needs to be a way to dynamically construct it. In sbt, this is +done through the ``Setting[_]`` sequence. + +Introduction to Settings +======================== + +TODO - Discuss ``Setting[_]`` + +TODO - Transition into ``Task[_]`` + +TODO - Transition into ``InputTask[_]`` \ No newline at end of file diff --git a/src/sphinx/Architecture/Setting-Initialization.rst b/src/sphinx/Architecture/Setting-Initialization.rst index d636e598c..e2e54fd47 100644 --- a/src/sphinx/Architecture/Setting-Initialization.rst +++ b/src/sphinx/Architecture/Setting-Initialization.rst @@ -12,6 +12,9 @@ a particular build key. Sbt converts all registered ``Setting[_]`` objects into All of sbt's loading semantics are contained within the `Load.scala <../../sxr/sbt/Load.scala.html>` file. It is approximately the following: +.. Note: This image comes from a google drawing: https://docs.google.com/a/typesafe.com/drawings/d/1Aj_IkOaJpRXJNhrVtVJaS8m-YRcKsympVOj3M2sUz7E/edit +.. Feel free to request access to modify as appropriate. + .. image:: settings-initialization-load-ordering.png The blue circles represent actions happening when sbt loads a project. We can see that sbt performs the following actions in load: @@ -26,14 +29,19 @@ The blue circles represent actions happening when sbt loads a project. We can s Each of these loads defines several sequences of settings. The diagram shows the two most important: -* ``buildSettings`` - These are settings defined to be ``in ThisBuild``. They are initialized *once* for the build. +* ``buildSettings`` - These are settings defined to be ``in ThisBuild`` or directly against the ``Build`` object. They are initialized *once* for the build. You can add these, e.g. in ``project/build.scala`` :: object MyBuild extends Build { - override val settings = ... + override val settings = Seq(foo := "hi") } -* ``projectSettings`` - These are settings specific to a project. They are specific to a *particular submodule* in the build. A + or in a ``build.sbt`` file :: + + foo in ThisBuild := "hi" + + +* ``projectSettings`` - These are settings specific to a project. They are specific to a *particular sub project* in the build. A plugin may be contributing its settings to more than on project, in which case the values are duplicated for each project. You add project specific settings, eg. in ``project/build.scala`` :: @@ -94,4 +102,30 @@ The AddSettings object provides the following "groups" of settings you can use f Include all local ``*.sbt`` file settings. -*Note: Be very careful when reordering settings. It's easy to accidentally remove core functionality.* \ No newline at end of file +*Note: Be very careful when reordering settings. It's easy to accidentally remove core functionality.* + +For example, let's see what happens if we move the ``build.sbt`` files *before* the ``projectSettings``. + +Let's create an example project the following defintiion: + +`project/build.scala` :: + + object MyTestBuild extends Build { + + val testProject = project.in(file(".")).autoSettings(autoPlugins, defaultSbtFiles, projectSettings).settings( + version := scalaBinaryVersion.value match { + case "2.10" => "1.0-SNAPSHOT" + case v => "1.0-for-${v}-SNAPSHOT" + } + ) + } + +This build defines a version string which appends the scala version if the current scala version is not the in the ``2.10.x`` series. +Now, when issuing a release we want to lock down the version. Most tools assume this can happen by writing a ``version.sbt`` file: + +`version.sbt` :: + + version := "1.0.0" + +However, when we load this new build, we find that the ``version`` in ``version.sbt`` has been **overriden** by the one defined +in ``project/Build.scala`` because of the order we defined for settings, so the new ``version.sbt`` file has no effect. \ No newline at end of file diff --git a/src/sphinx/Architecture/Task-Engine.rst b/src/sphinx/Architecture/Task-Engine.rst new file mode 100644 index 000000000..9d7be5da0 --- /dev/null +++ b/src/sphinx/Architecture/Task-Engine.rst @@ -0,0 +1,5 @@ +================= + Task Engine +================= + +Placeholder for task engine design details. \ No newline at end of file diff --git a/src/sphinx/Architecture/index.rst b/src/sphinx/Architecture/index.rst index d20bce232..ea16cae7a 100644 --- a/src/sphinx/Architecture/index.rst +++ b/src/sphinx/Architecture/index.rst @@ -2,7 +2,7 @@ Architecture ============== -This is the fledgeling set of documentation about the Architecture of sbt. This will cover all the core components of +This is the set of documentation about the Architecture of sbt. This covers all the core components of sbt as well as the general notion of how they all work together. This documentation is suitable for those who wish to have a deeper understanding of sbt's core, but already understand the fundamentals of ``Setting[_]``, ``Task[_]`` and constructing builds. @@ -10,4 +10,7 @@ constructing builds. .. toctree:: :maxdepth: 2 - Setting-Initialization \ No newline at end of file + Core-Principles + Setting-Initialization + Task-Engine + Command-Engine From 7413fbe9c097f099b1774a621ae9074ff2acc049 Mon Sep 17 00:00:00 2001 From: Josh Suereth Date: Mon, 10 Mar 2014 16:12:05 -0400 Subject: [PATCH 106/148] Doc fix ups from review. * Fix plugin mispellings * Clarify `select` behavior. --- main/src/main/scala/sbt/Plugins.scala | 2 +- main/src/main/scala/sbt/PluginsDebug.scala | 4 ++-- main/src/main/scala/sbt/Project.scala | 2 +- main/src/main/scala/sbt/plugins/IvyModule.scala | 3 ++- main/src/main/scala/sbt/plugins/JvmModule.scala | 3 ++- 5 files changed, 8 insertions(+), 6 deletions(-) diff --git a/main/src/main/scala/sbt/Plugins.scala b/main/src/main/scala/sbt/Plugins.scala index a7ada9b6e..926defd11 100644 --- a/main/src/main/scala/sbt/Plugins.scala +++ b/main/src/main/scala/sbt/Plugins.scala @@ -85,7 +85,7 @@ abstract class AutoPlugin extends Plugins.Basic } } /** - * A root AutoPlugin is a plugin which must be explicitly enabled by users in their `setPlugins` method + * A root AutoPlugin is a plugin which must be explicitly enabled by users in their `addPlugins` method * on a project. However, RootAutoPlugins represent the "root" of a tree of dependent auto-plugins. */ abstract class RootAutoPlugin extends AutoPlugin { diff --git a/main/src/main/scala/sbt/PluginsDebug.scala b/main/src/main/scala/sbt/PluginsDebug.scala index e130b2c8b..a24546c23 100644 --- a/main/src/main/scala/sbt/PluginsDebug.scala +++ b/main/src/main/scala/sbt/PluginsDebug.scala @@ -165,9 +165,9 @@ private[sbt] object PluginsDebug final case class PluginImpossible(plugin: AutoPlugin, context: Context, contradictions: Set[AutoPlugin]) extends EnableDeactivated /** Describes the requirements for activating [[plugin]] in [[context]]. - * @param context The base plguins, exclusions, and ultimately activated plugins + * @param context The base plugins, exclusions, and ultimately activated plugins * @param blockingExcludes Existing exclusions that prevent [[plugin]] from being activated and must be dropped - * @param enablingPlguins [[AutoPlugin]]s that are not currently enabled, but need to be enabled for [[plugin]] to activate + * @param enablingPlugins [[AutoPlugin]]s that are not currently enabled, but need to be enabled for [[plugin]] to activate * @param extraEnabledPlugins Plugins that will be enabled as a result of [[plugin]] activating, but are not required for [[plugin]] to activate * @param willRemove Plugins that will be deactivated as a result of [[plugin]] activating * @param deactivate Describes plugins that must be deactivated for [[plugin]] to activate. These require an explicit exclusion or dropping a transitive [[AutoPlugin]].*/ diff --git a/main/src/main/scala/sbt/Project.scala b/main/src/main/scala/sbt/Project.scala index 7604c9d27..afcc29826 100755 --- a/main/src/main/scala/sbt/Project.scala +++ b/main/src/main/scala/sbt/Project.scala @@ -51,7 +51,7 @@ sealed trait ProjectDefinition[PR <: ProjectReference] def auto: AddSettings /** The defined [[Plugins]] associated with this project. - A [[AutoPlguin]] is a common label that is used by plugins to determine what settings, if any, to add to a project. */ + A [[AutoPlugin]] is a common label that is used by plugins to determine what settings, if any, to add to a project. */ def plugins: Plugins /** The [[AutoPlugin]]s enabled for this project. This value is only available on a loaded Project. */ diff --git a/main/src/main/scala/sbt/plugins/IvyModule.scala b/main/src/main/scala/sbt/plugins/IvyModule.scala index 6ce0d9a9d..a0e361503 100644 --- a/main/src/main/scala/sbt/plugins/IvyModule.scala +++ b/main/src/main/scala/sbt/plugins/IvyModule.scala @@ -14,7 +14,8 @@ import Def.Setting * - `publishedArtifacts` */ object IvyModule extends AutoPlugin { - // We must be explicitly enabled + // We are automatically included on everything that has the global module, + // which is automatically included on everything. def select = GlobalModule override lazy val projectSettings: Seq[Setting[_]] = diff --git a/main/src/main/scala/sbt/plugins/JvmModule.scala b/main/src/main/scala/sbt/plugins/JvmModule.scala index 6dd95d9c0..0a7219c26 100644 --- a/main/src/main/scala/sbt/plugins/JvmModule.scala +++ b/main/src/main/scala/sbt/plugins/JvmModule.scala @@ -15,7 +15,8 @@ import Def.Setting * - `Compile` */ object JvmModule extends AutoPlugin { - // We must be explicitly enabled + // We are automatically enabled for any IvyModule project. We also require its settings + // for ours to work. def select = IvyModule override lazy val projectSettings: Seq[Setting[_]] = From dc76a8feb3a5baf7545e8c36b79c40f6f7e2c707 Mon Sep 17 00:00:00 2001 From: Josh Suereth Date: Mon, 10 Mar 2014 23:49:09 -0400 Subject: [PATCH 107/148] Fixes missing tests from autoplugins feature. For some reason, autoSettings fixes for tests disappeared. * Fixes setting orderings now that defaults come from autoPlugins * Removes autoPlugin settings where expected. --- .../sbt-test/project/auto-settings/project/P.scala | 12 ++++++------ sbt/src/sbt-test/project/auto-settings/test | 2 ++ .../project/delegate_config/project/Build.scala | 8 ++++++-- sbt/src/sbt-test/project/multi/changes/Build1.scala | 3 ++- sbt/src/sbt-test/project/multi/changes/Build2.scala | 3 ++- sbt/src/sbt-test/project/sbt-file-projects/build.sbt | 2 +- .../sbt-file-projects/changes/Restricted.scala | 1 + 7 files changed, 20 insertions(+), 11 deletions(-) diff --git a/sbt/src/sbt-test/project/auto-settings/project/P.scala b/sbt/src/sbt-test/project/auto-settings/project/P.scala index eddc49a76..1705e0482 100644 --- a/sbt/src/sbt-test/project/auto-settings/project/P.scala +++ b/sbt/src/sbt-test/project/auto-settings/project/P.scala @@ -6,22 +6,22 @@ object B extends Build { // version should be from explicit/a.txt - lazy val root = project("root", "1.4") autoSettings( userSettings, sbtFiles(file("explicit/a.txt")) ) + lazy val root = project("root", "1.4") autoSettings( projectSettings,userSettings, sbtFiles(file("explicit/a.txt")) ) // version should be from global/user.sbt - lazy val a = project("a", "1.1") autoSettings( userSettings ) + lazy val a = project("a", "1.1") autoSettings( projectSettings, userSettings ) // version should be the default 0.1-SNAPSHOT - lazy val b = project("b", "0.1-SNAPSHOT") autoSettings() + lazy val b = project("b", "0.1-SNAPSHOT") autoSettings(projectSettings) // version should be from the explicit settings call - lazy val c = project("c", "0.9") settings(version := "0.9") autoSettings() + lazy val c = project("c", "0.9") settings(version := "0.9") autoSettings(projectSettings) // version should be from d/build.sbt - lazy val d = project("d", "1.3") settings(version := "0.9") autoSettings( defaultSbtFiles ) + lazy val d = project("d", "1.3") settings(version := "0.9") autoSettings( projectSettings, defaultSbtFiles ) // version should be from global/user.sbt - lazy val e = project("e", "1.1") settings(version := "0.9") autoSettings( defaultSbtFiles, sbtFiles(file("../explicit/a.txt")), userSettings ) + lazy val e = project("e", "1.1") settings(version := "0.9") autoSettings( projectSettings, defaultSbtFiles, sbtFiles(file("../explicit/a.txt")), userSettings ) def project(id: String, expectedVersion: String): Project = Project(id, if(id == "root") file(".") else file(id)) settings( TaskKey[Unit]("check") <<= version map { v => diff --git a/sbt/src/sbt-test/project/auto-settings/test b/sbt/src/sbt-test/project/auto-settings/test index 249b0b2f9..dcccfd271 100644 --- a/sbt/src/sbt-test/project/auto-settings/test +++ b/sbt/src/sbt-test/project/auto-settings/test @@ -1,3 +1,5 @@ +> plugins + > root/check > a/check diff --git a/sbt/src/sbt-test/project/delegate_config/project/Build.scala b/sbt/src/sbt-test/project/delegate_config/project/Build.scala index 9c6cdc35e..dbd97b466 100644 --- a/sbt/src/sbt-test/project/delegate_config/project/Build.scala +++ b/sbt/src/sbt-test/project/delegate_config/project/Build.scala @@ -1,6 +1,7 @@ import sbt._ import complete.DefaultParsers._ import Keys._ +import AddSettings._ object B extends Build { @@ -11,8 +12,11 @@ object B extends Build val sample = SettingKey[Int]("sample") val check = TaskKey[Unit]("check") - lazy val root = Project("root", file("."), settings = Nil) - lazy val sub = Project("sub", file("."), delegates = root :: Nil, configurations = newConfig :: Nil, settings = incSample :: checkTask(4) :: Nil) + lazy val root = Project("root", file("."), settings = Nil).autoSettings() + lazy val sub = Project("sub", file("."), + delegates = root :: Nil, + configurations = newConfig :: Nil, + settings = incSample :: checkTask(4) :: Nil).autoSettings(projectSettings) override lazy val settings = (sample in newConfig := 3) :: checkTask(3) :: diff --git a/sbt/src/sbt-test/project/multi/changes/Build1.scala b/sbt/src/sbt-test/project/multi/changes/Build1.scala index 0e7156c39..8d886bd51 100644 --- a/sbt/src/sbt-test/project/multi/changes/Build1.scala +++ b/sbt/src/sbt-test/project/multi/changes/Build1.scala @@ -1,5 +1,6 @@ import sbt._ import Keys.name +import AddSettings._ object TestBuild extends Build { @@ -7,5 +8,5 @@ object TestBuild extends Build proj("a", "."), proj("b", "b") ) - def proj(id: String, dir: String) = Project(id, file(dir), settings = Seq( name := id ) ) + def proj(id: String, dir: String) = Project(id, file(dir), settings = Seq( name := id ) ).autoSettings(projectSettings) } \ No newline at end of file diff --git a/sbt/src/sbt-test/project/multi/changes/Build2.scala b/sbt/src/sbt-test/project/multi/changes/Build2.scala index 5858fa425..2d96cfe5c 100644 --- a/sbt/src/sbt-test/project/multi/changes/Build2.scala +++ b/sbt/src/sbt-test/project/multi/changes/Build2.scala @@ -11,5 +11,6 @@ object SecondBuild extends MakeBuild } trait MakeBuild extends Build { - def proj(id: String, dir: String) = Project(id, file(dir), settings = Seq( name := id ) ) + import AddSettings._ + def proj(id: String, dir: String) = Project(id, file(dir), settings = Seq( name := id ) ).autoSettings(projectSettings, defaultSbtFiles) } \ No newline at end of file diff --git a/sbt/src/sbt-test/project/sbt-file-projects/build.sbt b/sbt/src/sbt-test/project/sbt-file-projects/build.sbt index 0afc94a34..710ef1b89 100644 --- a/sbt/src/sbt-test/project/sbt-file-projects/build.sbt +++ b/sbt/src/sbt-test/project/sbt-file-projects/build.sbt @@ -2,7 +2,7 @@ val a = "a" val f = file("a") val g = taskKey[Unit]("A task in the root project") -val p = Project(a, f).autoSettings(AddSettings.sbtFiles( file("a.sbt") )) +val p = Project(a, f).autoSettings(AddSettings.autoPlugins, AddSettings.sbtFiles( file("a.sbt") )) val b = Project("b", file("b")) diff --git a/sbt/src/sbt-test/project/sbt-file-projects/changes/Restricted.scala b/sbt/src/sbt-test/project/sbt-file-projects/changes/Restricted.scala index c7a3ee533..46b874741 100644 --- a/sbt/src/sbt-test/project/sbt-file-projects/changes/Restricted.scala +++ b/sbt/src/sbt-test/project/sbt-file-projects/changes/Restricted.scala @@ -3,5 +3,6 @@ import Keys._ object B extends Build { lazy val root = Project("root", file(".")).autoSettings( + AddSettings.autoPlugins, AddSettings.sbtFiles( file("other.sbt") )) // ignore build.sbt } \ No newline at end of file From 905028a6aefa5468ddda782d710f27671c9f92d6 Mon Sep 17 00:00:00 2001 From: Josh Suereth Date: Tue, 11 Mar 2014 10:25:18 -0400 Subject: [PATCH 108/148] Bump version for further 0.13.x development. --- project/Sbt.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/Sbt.scala b/project/Sbt.scala index 956c368d4..7ea722818 100644 --- a/project/Sbt.scala +++ b/project/Sbt.scala @@ -14,7 +14,7 @@ object Sbt extends Build override lazy val settings = super.settings ++ buildSettings ++ Status.settings ++ nightlySettings def buildSettings = Seq( organization := "org.scala-sbt", - version := "0.13.2-SNAPSHOT", + version := "0.13.3-SNAPSHOT", publishArtifact in packageDoc := false, scalaVersion := "2.10.3", publishMavenStyle := false, From ee6b674966cf667207bb85cb0646e305e259377a Mon Sep 17 00:00:00 2001 From: Josh Suereth Date: Tue, 11 Mar 2014 11:44:00 -0400 Subject: [PATCH 109/148] Add introduction to the setting system. --- src/sphinx/Architecture/Core-Principles.rst | 61 +++++++++++++++++- .../Architecture/overview-setting-example.png | Bin 0 -> 29366 bytes 2 files changed, 58 insertions(+), 3 deletions(-) create mode 100644 src/sphinx/Architecture/overview-setting-example.png diff --git a/src/sphinx/Architecture/Core-Principles.rst b/src/sphinx/Architecture/Core-Principles.rst index 2f101c6ee..843828468 100644 --- a/src/sphinx/Architecture/Core-Principles.rst +++ b/src/sphinx/Architecture/Core-Principles.rst @@ -63,8 +63,63 @@ done through the ``Setting[_]`` sequence. Introduction to Settings ======================== -TODO - Discuss ``Setting[_]`` +A Setting represents the means of constructing the value of one particular ``AttributeKey[_]`` in the ``AttributeMap`` of build state. A setting consists of two pieces: -TODO - Transition into ``Task[_]`` +1. The ``AttributeKey[T]`` where the value of the setting should be assigned. +2. An ``Intialize[T]`` object which is able to construct the value for this setting. -TODO - Transition into ``InputTask[_]`` \ No newline at end of file +Sbt's intiialization time is basically just taking a sequence of these ``Setting[_]`` objects and running their initialization objects and then storing the value into the ``AttributeMap``. This means overwriting an exisitng value at a key is as easy as appending a +``Setting[_]`` to the end of the sequence which does so. + +Where it gets interesting is that ``Initialize[T]`` can depend on other ``AttributeKey[_]``s in the build state. Each ``Initialize[_]`` +can pull values from any ``AttributeKey[_]`` in the build state's ``AttributeMap`` to compute its value. Sbt ensures a few things +when it comes to ``Initialize[_]`` dependencies: + +1. There can be no circular dependencies +2. If one ``Initialize[_]`` depends on another ``Initialize[_]`` key, then *all* associated ``Initialize[_]`` blocks for that key must + have run before we load the value. + +The above image shows a bit of what's expLet's look at what gets stored for the setting :: + + normalizedName := normalize(name.value) + + + +.. Note: This image comes from a google drawing: https://docs.google.com/a/typesafe.com/drawings/d/1hvE89XVrQiXdSBsgaQgQGTmcO44EBZPg4_0WxKXU7Pw/edit +.. Feel free to request access to modify as appropriate. + +.. image:: overview-setting-example.png + + +Here, a ``Setting[_]`` is constructed that understands it depends on the value in the ``name`` AttributeKey. Its intiialize block first grabs the value of the ``name`` key, then runs the function normalize on it to compute its value. + +This represents the core mechanism of how to construct sbt's build state. Conceptually, at some point we have a graph of dependencies +and initialization functions which we can use to construct the first build state. Once this is completed, we can then start to process +user requests. + + + +Introduction to Tasks +===================== + +The next layer in sbt is around these user request, or tasks. When a user configures a build, they are defining a set of repeatable +tasks that they can run on their project. Things like ``compile`` or ``test``. These tasks *also* have a dependency graph, where +e.g. the ``test`` task requires that ``compile`` has run before it can successfully execute. + +Sbt's defines a class ``Task[T]``. The ``T`` type parameter represents the type of data returned by a task. Remember the tenenats of +sbt? "All things have types" and "Dependencies are explicit" both hold true for tasks. Sbt promotes a style of task dependencies that +is closer to functional programming: Return data for your users rather than using shared mutable state. + +Most build tools communciate over the filesystem, and indeed sbt, by necessity, does some of this. However, for stable parallelization it is far better to keep tasks isolated on the filesystem and communicate directly through types. + +Similarly to how a ``Setting[_]`` stores both dependencies and an initialization function, a ``Task[_]`` stores both its +``Task[_]``dependencies and its behavior (a function). + + + + +TODO - More on ``Task[_]`` + +TODO - Transition into ``InputTask[_]``, rehash Command + +TODO - Tansition into Scope. \ No newline at end of file diff --git a/src/sphinx/Architecture/overview-setting-example.png b/src/sphinx/Architecture/overview-setting-example.png new file mode 100644 index 0000000000000000000000000000000000000000..f5bbcb2090dae99decd255775bd61f9781214f56 GIT binary patch literal 29366 zcmeEtcTiJZ_ihjkprO5jbP!M!5QKpACQ=0Hy%$61q4%yJC{k2bY+wn418nmO;-m3xdkf8J=XD=-D)F^r}~j{WR8{;yQ)d><{!ChgszZJRoO4E zi(Pp^p_5x=JK;T;qRR2G{i)oSvYKx+ymx44I#0_jtTah1d)c`tNv}e_q zK5xt`yI7%f2*N?_Ykx;3!5B^S_sU0vp8W6Co{J#!->ao%TZz9{o!w(Tf3HF!2JVrP z-K`_|^Cw^D-)})Axm~QUN&fxgzyA9*;U_j@Hy}Q}Xr}9R@#LjD%Vsf)`QLs+zO&y< z8UWeTEy`yG?bhis3!K&#Cd>fsbUWh0gtm@|U3k&J%* zb?U16q3Bj^_0_ejf4bA4{%$1e53f4g&rBs`M|Nz6V!f|%G;yzD&)@NHI@39HCwGPn z^S&f#mlnxnO+4`oJ#V<0rQ}W2dx3j5lX2@o)=>8~cQf=254H19m}wXRek+0QJJ2(h z$k2QSkzY@oAAKY0wDnCMQt5EiP;=G1sA%7(8{P6E1%jq6tL6IE79MtP(*+{LKc^EB z5$YtE$*5$l)FiZ#*>Hl_BjCJ8~tx zF696@8e@d%Y{N9Eyc-Ldr=$c@*GKvKLPHX2mejuF#jk(_oA7#NrA zN3@o=Z-3n^V(f^6XuxE%PQKjVcsu;>_&Y&pA5p;tnL?gP@5+5kBNv+@k*V4)DJsdp zd&gLZ#5a$6DVX!@bCKdGaxZs#WGzffX5)n?bccw z^xN^ayQx!KhtTt^(6S#3B#1TBLRd-tutV+PjDMHw!Y(82i=MwTU=d>1`XI^NimsX_ z17*8sb+7&U!D(mIqgN`)DyfoYBK^s9HyfcJJ71AjyG0U+ z|8$(=AMI^u5!9`lP2I=n#G}s=;N|^EsDe}~?cF`XGlfr5W?r*wxoQ?sms@g$d=E#) zK{s^$oJdjdtjqluDMM4Bh_^nU>6U2NW0o^IL&RU41W8-H?(U%_gy5Gpgdp5WD3h*` zJJex3Ho;3m=(u%eU8G_<@r~8+NF0F~wQB$e&sUO|KGG=fJg4TB^YJp|=WxN_#d2Z&IA2(*OS!80Ly{c4as zI9-(a6B>vMEs~!bT57x1`2ez){eW~Ynt!eSVO=a8-SCb7862apAfH*v>Pg@jG=nE` zhMp=fNFLg0-BzIxVnehh#oFD}5C*Z~hg22EGf@*{zW8e>MI!W!s|38)3ygk8K?ZCe z?Cd`2ant<&0OB&i*z>lAuTwi?cmw@@_r@|2JYxIA{pZyj> zqLL4(V2hC~8xfq|nt#z`^iONy(_ae~zAYk&gFgqlYAgu@5VoQMsv6#;RS4p=axBNy z-su#A3ev$g(T<>;k-8ylM$G5N-0`@5L`X(<-~#!!P58d$XL|_WpFU)0lAY*!q=^(9 zv*4&qPcgMO`A>KKyJ|KG^O{FaT*TNyi>=Rs_L@>`z7D=9#TIy>L7R=4Xm%!ti?R6X$n{H(yIf4DA_mq$JHe1TDWgeRol$J zW)Kn+UStH;cu1n+k$j@&A?78T{E4~CH#+)waL{><5YxIQa*Sxy8=@bOO2EmGQ$#M-UgSq4R zkA{!WM}22t9|pG5|4cB5Tye$oCX!H?XpP%$&N22ioSui2Gj2Z>kue=L>YK14`geYq zV|xC0I#6df!M`yiB!ciiF$8Gj?`KT*g&AC6r^O2RsMGPfB3hE zf}Rnq@wX2dY8ev5-xhx70DI! z<^M32KVQqd8eQTG`D^N+5|Fa(-?7D$<6q@ZJI4omf?vjyv*Yfl?oA{=CQVc$Llh?q%FD))UL7>8`E<^GH(4wyWKG^Y|xAT=| z!ULwQ0hTR#^%b_$&C#(s- zde+EH+_0NEquK6}c#ZiHhiJy>!%9-D&*j2W0IjOiZ^0QfvHJP(^(!D|GW>#uoQ-qy zHA9;s^_+&M=D67W=ayjSMH{{}nJYmj8ywyjA8&cjM2ZKj+$S_t*-yRNSGC#7Vt9%0 zUJrbBc4V9Ci(HhlFEn8rhW2$45}79TEQwJ{f&pJUiU%}?*&aXay4tEI(G0GurRs}P zK|0!FLvMMrb`-^_4+C6=BzNMBjpviF?HZn|LRo(6L*+XwX;Y`$zL@c}^CMmHSO)1S zXKXi_wBQu&^i+YF_w0D-8qu^ng*gIP^~Zk=tw_y}2rEsU-i$#HJbLsf*yVX{Bh-Zc z9>xuAviPnq-BRmWCBk2*1zXXM?%>rP$@*qq*!a_xE8#THJKEfBw#Q<#IhM2?KumvJ zL=mxI;E0R5fCSI4x`jRsALaFi$e&!ghIajt-*5Rx@oWS|)>ovBmnImMZx-`UsX5Zu zn-3W+zLoF7S~Ygpse|5`&@;YF240_Hw0ncmwrs~dS4Dl~iQg&phlkG16Q)(ie!&u4 zoC66wrsmWeLFvrhN_yxhjkI2T!k@OFZ)Rn?Q?O3b+Dhh9RwtA_7Aaa#$}s z_s*S;1m9i$291*9=MKLP_l?n|A9Z)dbBbRJuI+sLaf8#~RlnBh!UnE%x4M0Vi2mDw zP$7?46dm5$@Gf%F$`8Lq-)Q&1d^A5ZeB0-hSPLmm;_Lo*UuP?;>~4GW6le*QaJ4lD zN0@8e5PqRst%E7p!mDbkCh1iZ9Lg5jLUw<@?4rE&2oe| zcxk<9>5|5p7W5a*Tli6agb)H-3JSymSq30I%!?=-02;m955aybBmw=2Dko3meQ){t zO?z~r>+P@y0=?h_CBo!G{+W2Odyq*FU|)2uz~Ey~=qLU(JTpPfsx|{I`e`a+r;< zA>t=4j2LIM322INGE8C7#v0o?7QF}!a zE-Ua6J)e2u*zVm+&a^db{#Ev_O&FH};18E+hUlf98$=Yxw{|#Wpq_h!WGHSDrdI~0 z2h56Kxu;EHDI(ur#EJ~AgI_(Fv?%oVTL@ZmJIb$}BbuHI6_qLK&wuW~^Do)Yk{`c_a?UGkD~K!Qs#X=K^bp<-0JDc^Bs}}P zt@FJCaO95cXX1tBeu67u1bm%>%wSqp2wyyDDp6e^J-hC!Hs8K0(xe|^q-DP(M$Q-i zoG-RX|A+aW1Oq!HgrMLTUnj9;2y371gGFBS8%d|VYP|KYe`gP?884ZlW@XB>|q4qa-&Ojwd-|&%g=HSNa-PFouHm?IaR}cT0Zo$Kj^Uh-$y~O>$zaT zV6N-IF-gZ7PtN(s=*j&D>}Qc^p-)#pe4TJQp8Fmo2ne4nt;K!FVWtsNY2##>uK?a} zpzRcie*{KVL6rsb*{s6u?P~+$1^>PE&J&69gM;>Vfz%$W)pYCCJ$Yj4&%}$U18ZsY zrxO?6WfqY^#p@b&k5xMV4EY0k5$%RK77{XFcEZ%A#E8lk6^&EeAgzDR_mTa$7z{v% zKeOf$5tNzemod=sQ~X1PY731CbZEW?Co1-hIOhX{C289z1q2qBfy$wywFGs7U~Uv2 zK~L0dcZL`wS8&GYYKh&C?sL(YtlX7o3gvjgt4|af*H|SH?5+K1nV^0G5MdPl0UJRd zziy8^_tfZirP}Rym=Iwaxky$z`qUg&dNt#mGHJhH8*!D4t00mM3(afvz+&EsSWq*5 z!F`F#78RLxG*IoOXVvji@Yy)gp3Uo8_wZ|ZVk8KHTl z>;x+GjKybAI<~@5d-JEy$2Hr*-Nkgb)XS!*09KWhnM?lA7;NlvkMrD&6I<-**Rcy5 zF>oM9;P3qR7ygO?2lugW$}2s-#-QF0VQ-^F*PcLkW{*CN1h!}6xL zMrqToy`?CjwR8GikfMKj?JONt_cKd;kOlJ21AxQS7|=la7rDOlFB1KAay&F}3#*{o zN1aeN6hBWGflv=e7Wk`Bhk5g9 zWr`UnE-a9`RyYwpSc$7B_bfDXdE>vEw|UOe*C;q?LRvLA8R^L|zl#VK`0Wl1)yjUR zSICjOh+ygnPLU@S3$;U?WO$>eVc61m81kh9#`IB~E61-e%X=r}_rFb{*P6p;=q)5) zYZz}P*ZSJ#Y{Q#*XJfS1kd27vAvvFHAIuHJ=+2)Yg&W3|6o_@}1k0Z66Q$XY73*wx zFD!C6k$`BIN`yMyFD`^G6@LxumbY8#mLR^4(m#O@$T1^Q&Nmj#(?tvCMUchS-My8tM=Oy-N6}mRqbGJPM{7n>>0*v)b&f-oS_$MpWFf=o zaW&CJJb;y9srpDO9fr4eozAVxlqA}g1uk%|uyS}~bWUtD@9AJ`$MJB)EXHV?vR>U? zHGaX#Sy2JwR9WPnq&eLb))4{t9ZGs!0l&-crk{i3T&TlN4lIhIgbOiy66Fs#>)G8& z0Od&^TuiyY1-86Cl7QLm6jEO6TzF%=9t;&tJPJ{aO$T+yT*D8w2_&deG$Y+~kn<6Was#)`J{plwdzQwm=un{SXt;U}4O@fF>r{eqi$1jM% zw8~*Ut=kT`XeAu%o1jq4FrRFZQN815CZ`T|T#D_Ud8oeyxf2b38TmV*jvPh~3l8(G z=wNl-oR-up2o;3Lq3AGZ;I+KGJiNe-zY|zQs4diE*}kvGAT{0@Hg07NH;8_Z-fWyO zN9YfPN0+}T8@RJk&yVC9hgVkGmi1V8TmMsUCi1e}UH@@~OZ^(a3Bf|T*Z#2y$?JKU zmTSB`;kXZC4KU^J)L;|rK_r|6=kmMI-%wnmVGT2LH($WL^tW62yd6j~LVWoZC!T0b z1Ohiw2#S!T5q+i7|FBM~?;iuLYNEABK{}SoC`P9wRDY=6svt-dMLWQt-5&@ZZ8F4J zLt1mQ{3s~k<9FpHS7tF1PaD7Yuad#ax4AHpS{M#dhPa|Y`(QEa6dm_6W%1!25M}@l z?K~y>6g?yK-um2_a?5e{8X-hJcMR>{jH2J40&;)?Az9)aV#%5d%+9YTL>Tg^UGLdd z1tJi{<-|1G5mKcU0KZTvso)X#5yM0`JPE=S)CmwW3d28eo=q?jI`%ADe-I6VjYpk4 zQc$<+_IoG+7PfCi7@&UXZB(n2(lI=6FLxIlTQO11MdiWxrpM%73eYXNF!azZ6>Zv> zSE{^14GICXCU@%ZH+9y*ARJUCy&A2?D@zRL{qV?PK_!^?;5PkP7MV5&K|I@QlFxjd zJrtb%^g^8+&9-T^&lxP_Zx4A1&{-gF+dw<%X|}F%&KpT9_iWL}6-0hrK7hgEw>`|! z;gK(U5XQaC3o#4Al$LR;Og8_(n-7hso5dL-9CIgtR#&@ZC+alliJ}R@*>We^ywff0 z^Dw(cD!sQ@!`w#05$eb8xyIysp3Y@Ij;?^H;UT(9O5g_)yi-p(G%(QBs&`LNI|}mFLvEVjubCB! z9IDpl;ge>IQC*MFH{nn_Xg~pR!jS`m5%Ot-YHM*SBeQ$y>&h5;^4lL*@&$$nw?3(S z&VO2i%0r}9&v9&+J|%QCbLjAjH{Q02p&^7^lS4Dqr`mo_s&sl@_E47i$7Awx|IP}k zbMA;Mh9C_R@YiGk)obx~qIAZ+Tp#Y%7XB)L=Ju$hirPP#nsj`qPgsWN59?1aE~(~! z?R9#X9UGkcq`7)ZH{M_+VLLFzen2@#X#F#EbT;BnZTaCOm}3Y-h1(d+FSbX;sABlv z8_Sy?X2v|BG(ceF-ojP7Cv&1xylWmjg+t0!P&e}}OV2&e?A-+lD+7mAUvOolq2MEg z>FR`K=e8^bjY$<5vyXQzLvli*_UiPfcdNR|J$L029H573%y8)NFQ$O%|9cG7Dru7wY<)X zsUc&W*?ZV^dc>sC<*E@77ryK{bXfawVwPYbtm=Y(8P(E1XEQ`vGiCLOS#>(E81`4V zN-@%ul`)ja5z6eWPAH8^bQT?du+xhdv^OfqPx&-_XT@1shPW^hW6eMana__XOekRY z8vB8s*T?Q=$3;@eUYi0U&#ziUIx7BWmVdDNYT{KOAtQ{cNmP>8i?5p_tzw&j?tA4M zgU#2`%f9*iouj1TKC?fry5o4w+TW>h$`Vs+_M_;eo+tBn0V$M_yTcU;MvD+l1J6xWI98i-(F}v|(=X6r^|ZD3jZI5e zz$}}1aGIeRM!}`2e1MFm!Elb1rlVel0ZD8pD31`?KbGol2lQ;S@;lR0t%R>u5gy}wlm*3gYW2qwW))t3FFR>h8&YttMIlsw@Ua6!0GP`ol zG|Y2j&n1LF;r{YR&^`Jg@g~D*#eGfGoM4jLJz3(rPt~0eo)J^z{G4OK2Nz~N<$v!c zCWJHzbfLwYg;FDazdu|osC`2rOA=q>Hr%quD1P_Usj%FF708nRCO9`zCE6-C-A+?r zXTJB>t=PgeYCQKpTyrut%}{!Ck<+cC1Tda&!S`7A@d-l@=w8$eM`X6|Jzd9FF?VH& z6C!l7@tj!{Nf6qOq5ld{IQV9I8=s6UL37)_PM*Eu@!%+x!>8DIj&A3fv(pf!`QdJ0 z&iNt(N^bW-{8%rHiG{oG%*i6Uq06_d7yl+#_x`Hz5|QRcQ-?Q z7^N;}z*K9>_CT4vDZWqNJUn%X{Rsnq|6x!&pM|RI_~2_!9yWLxdoyAhm(_Pwyes`H zKvmUtd+G40f$t;*-r6|Dr%Agj!2ad=8Km)BWv^(AJ(}c#nu8X|*nng)hg8+QPH$QU z)|*(~7&^=j7IENg<_^VeTrgcYAf8}q0DRMBRKN}E>jsOocF59M;M#zs=y&w)9%c6& zPfOP3-VEN_0i23tI-KIi^v)ahmkbw=PB)DzFx;&bN#yUsGi!SnF+UQC0Gcd}uX9r6 zAuMR}P2sozSVUWpGYuh6w*>4e+9X~^q0WN7op7Bb8V~dRuPt4n#rUY@~rF8O4suHz}ZXjQ!A)M<14Yc+hb0+97 zlE<-T7wb6+6#Qy3(tEE-C!-ypNU7P@Vcl(Se%gSbfP)@oPUZhd^*qRIJFg-@VQmrn9HH_=#R{KY>R6j$1QLQD!Bb-NF%v?3onMkI_S^7`Z6{7V!}zw$;TWF$XN9;N z=9XkXOeL!wS6;vKJOlw$`=D>3c9AaqI?(`!8NA4wZ&g{UV-_v!`I5gb^2+A<9<7CE z4}3M=Gn-9|-o0YGv?xBY_N|$x5ZC}m{A_iERw0fH5wp3aWOJf$cGPmKA?+C^SusV9 z&N#)L&PWpU&Tg3M{UpDwuhON^`p|4e59wwg)5_>6>xI`x+=OWAVZC&H8duH0_mUx% z1B2}%_fgmNGY>U;^|mRuWnw`vZOSFl3zGIoV^$ez1skSS0tNhvtYS&)n<5V^f;7_! z65rQ!us}_qF3{($n+iyo2*8Svnv}XJx+zk0(-pV6J{V3uVd|5!f)CcmsMz=fLvxI# zRIt=G|4@GxlCM<36w%i~)&PQ++(3%!+H6_rN}rX=kZAvb0@5gr%@f+{SAM^(Eq&H- z7Z##vqU*A6df>s+Aa@(6KlI?Ye1+8p5S|vGs}0=pZ!`{RE$CYJY9gO1q|}>0s+3-k z(bQ;(RSp&*<^a4KS&;grW;|jD2Q4;I z%Mi+sApNVbP)@#p{WMj1!!QEgc!H^JnwyVxIbL#2a4F+Pn~7#s?JK~?xdVlY1%wz) zhi!KI!78iq4|yuNDozEWQMy+LRN}%26n@+xRn+K_b0If-O9K9%HzQHuTh&+Sj`EZ% z(g7R5j1P@^psnW>v=#zFdc}R3YlZh~Z}J~a<*Q=$u?i`2+KpW3xo+k8#FU2sc)MO_ zrYJeec8wm+jl4AGFc2U-_=X~(wV)>l*c5PMI(tX3>&q)L$>#A_2_2&_0732f71gsX zCR!juYZmIfLNs;ZXigc6W&Z+|6nyf8U7EL_6OU!d7a80ms1Xq?MvW` zy7+C=bbuY)P0Nt)q3CZQK=rPOb;V0st`z7z7tpVKua$M!^aho>o&=?{xVb)fr&b+F zve{?_8L~NTfaGP&G6F#TWqnx!*Q^xNhwxipSM{}6d21gAnh_H^4nLzVigwkccBFHv zPm$!H1K`JdE~N3|FXN_!D`46`&VS$qvl>Q(w?jasi(BV9=Rb_X`ju@_JuJ7}dF{`G zHLe;&o_36-C{1+VVX7RoofFzpp0*EFfkm{ir~Gb2I1JV>2U&(_R*ZKul!|%WY|ttC zc$>olZ-Pm5CEwQ-!;;r%a^`f@IQS(Zs^7D9s%ae36Gk<&A+eh+S^zY8f%S$q=mLp2 zuw&J@o=qewTMEbrQph$jSRh$3u+X^s)RgE;Ph_k^YpqsZ0$Dls05mvw$dgy*&Tltv zViO-d=n`ge>jmYU*&jL%EMg}p-jysjRH#fumNR#kkn^Tv{K1MYJ&Mba`^OAPkCsDO ztt3a&+%U;qMfwirWD9B?#xIClf5S)I$BEmzLzM;3dP!NsG`x}gt9c5sf*b~~s8iT! zv!m=>impThHVPFHeX1#LF~}kdaHaDvy{zf^B1B&j^h=9v@{!{q#Bb~Dludnf|ES>; z03E&lwK_-%(WUzI2?WE>%)NLs>k4yZ5&txh60oik0~_nljOnx0Kjuo!N-a2%SS$>{#8DN?_U3MxTl z?ieM02;W3s>b}h-YOuj;PW}Yo69xIo1uf(Cb#W#>ph@I-MIdH-{e7bQTQJ0hPEPY* z+ccZ2^b&LRk)+O|1VEF?rjV31&FAM5d=?=9n56)VIIHL~OioTV#X1{3>%9R?8k-=H zlg3>?zp?A=>@=+b0G#^;1X4j?G+g{{X+&RC)C zfbf`GKo|KVHvaR}B<|XPsZs<`)0bp4(V=w77e9d7FZR5^B^wh#*O6jXarx7#ZtA5i{ zW>`o|v=~{C;&Xg@ihm&C6@an2^JOnTc%2pAzow(mL&i009z^g#c7EA>-=z?7StZx*Y;B4Pw@?exv9i zecGF6+PlsVw}>o&W&m3O?#2|4IF~T-IycA1+yuI*Oh z-x;n@-{l$_dngX^NC)0l_vI8KU{LK9^UG@l5SMjZmw!aatDaCkkfSWK&sPLWsKWJK z`04~7VC^OMRV9NXn?@=>y#%PA>%0_TT1#L68GIPIVDn&q;f%Y0nNlMG;OPbwz;0e+ zj*$-uBf{rYz>e%3TQpxk!)K>Ve{L%Vs>Dt^hvf!WLHHRFu*IB4%u!$Z)K>1!VWZ2Y z)A$iJE$%DqZ$x8-8zeLLMft#uT*;O0(txQm-NAQMim&;4dU~z~1w(}Z*P=rG6VUY( zsRu(h55Y_Vo#z+cyU)@IP$SDRX=}~VG+CQwho9+o%!4i}nT8E_84xD!!n)bqaSM17 zRSfH|zo~{rHE&qBl#E|9i~lyUcbv zq20@)dm}B3%(&9#Qz&Z8gL5W?z>hQvHJn^$4;X0luv4 z%6taZPwBI*&o3WfGGJACshZHX_SfCKE~WrjMu}|g>X0DzEUB6uboX`ZA6~96 zhj?n@%}Rhkz0H7__s7Pu9lFApJ=KYW;+rt`aPo)dKMR-aC3#9aFj;1voo-OmQx!kI zL+KOipNWYnJ5{H#Em_Q7vN#bvei+EzERc;M$fADB=QCZTe*^0 zLsyAOA<#2UUP%!FKKEgcJNSR7rLIxW?)mJi4S zFrK^9>BlwkN1bB#V9BaDRh;9Kh!r1)UNlfbFhdv*Y&4pmTzJn;Px7xDS-Ho|GRs%} z9@fg(Z~O&Yr@$Lz$$b|*eTusfd_Zyszfsjn2!vKlx)l^SYc1LDo_8PC1_)RcUUQJ7 zD{8!-*HZ?(EIW*w{ee58aMM2R)MR(lohcTQ@@A)BWrp%8-6P~Z4DNl`xL<}mllvQx z_btij+05a`YyU_|wNZ_ZqTepiYr1~8fcT1kryTIK>=tZF**F& zDeu#ujxcao{qK6HlRgkIA6MrE)KYmv zpw1}P8y!hyaKB$pvA%u~ zSz(w?AVhx0O*BFUar1)tD@JJXjpMK;BKOx9+NIWNtu=8#pCShMx3GS_mLr5WQ1~n` z1wzUy`6j=S-{L>szka_WvR!5pZURarLf8wOrZ%|u7!}C?!B#TA7#;y_0Djk0F4}zn1eyAoj<136A z0XtUM3ADPWVl?v)=D@+ROz^BWUBavJ`ugCC&0p`mrQDuYvS5$DD4LZ?9xqKTJwC|I zUL2#MMWk$0zj(hBtNRg%(txb5N$tD}y;1LUV>`pQ8y-H}v{kxOF81$WB!@8_qho&wsT> z3Y88~<-|sQTeAn8E>`2wb3i$A`f;IfK#$~Z?m#p0O%^U70zoFa9LIFDnX=HD zuFsWmp>1j!+dwYWxC7V+k>qY$gV1Y(kp`9h+vAP=oq}dpNUO9A;HlqGfF7WTGxRqi z1U^fJ7`7Jw4DF2cn*~oqMbw*f%Vunwv_kWYq&5WTE>t*2y~0c|#95&gub8c^FhW30 zU|ap#NHVxMJIdCdUiAQ|C}=qOd0vuX`^wAPoBjx9)aC)Ez)f0m1KnP|9pidF@kKj; zozva-0vUY~c(SqSPA0PeP#1(a54UPVNSKRKO;N&}=yw70QjswqD% z?h4b}@rpCG8yTWN2-D=y|4jMF%M0M4u&p;nTmfse(^uVjFh4Eq+%vdw;kP2Zb3mFE zwYYJw;n><`Gt)di;SCS6z2RK7!E;}&Y%DyJeH$S zEBN35w!6qscK^4BdiR^#u#mU%p>dtQAsG+eTJ+_ASvr*3(zvWdpr}~m`KNGR)X(V{ zkyBjr`qm0#Qo00*eb+~Bmg*RkKkVjMUsUjV!7Xt9UZp&ROVN5bh$jwiHgL7yq(r$U z<}p}j9U!+$XdOgbh!di!+nn9a6qfCW#Ju-Yj!I$tmE9|Di*rYfi$V0#4$j(<^t<-s zQMv@iy%n$bgm0OwG!zftYegWs(+kHVJQu{kZIywx{>@He3OLD@S$71YIrLXN!E_D4 zC77>7!{ZM@PqcMX5Xrj{vjbAjxcYYT<&pXdROSBlESSmn6gC6HypN0*UyA`~fF657 zoWu2y8L0!0Zb1M-HhgVy4o1X+DSq=|Ow97D(oV`vsZBFCcr6Pufz$u^mukCacU7X$ zxMS|1-%_?io^7*A4YRUUb43a1xAmc=$4xyPBgAaIl2wkhOEs-BulPElAE5UYy!tCt zq88qk%#|HlrACdtE!}wlvv!ttGCy)#4Wtvs)%Re5I^4{u0|W)h<*&AuOi{$_eP)WF ziusc29;@Zk4nqd?@-PTZ9f?JDrXIynM!&tjDRI~&s_t$!ixNHBJ9)K+(Z#&dMguHN zDT>?Op84rlOnnE-kuRpwGr*52MrX|}hIT34*0tyiKpnbvUU&sx@CJ*#2f*PD(RS&| zX~Il8MBHOo5GF^X-scCfT-1+pylO%Ci+6?nVP)xfMS`2;&y}n%Rxyj%I_}rB+{jZa z75;sYemOuA_k9GhJbiccec^BqDi#KS-!84|?l#ubLe0%Y3xHe!$Dz-`Y1f0mFmxQC z3$idL+EI=#X1&h$D;r}GUso0P50~<8!W`4SS*sr!@ZNVIOy2}h>xcV2AeNBKucryZ zc=X652LyDAb>YCF@JXiuRrhm97N83}0Cl^$n37}T?nNJHCq81bA zVH*ptt1&dUYc0sFx!@SfR#aM(IBC6cGcbzlBXT$VBvVe(;@#t^#_f)l!?Yl=`td~= zmq`b}fiI>q<*O0K#}W+JUl){BqSDPKwcCt{pgWy|qe`{P1tp6lS3gC^TAk`lesOA%4C$W)PaY|hp?01$tbR@XrvyNcpD1aX z>B6O4ea;E2pA?tSm@Rx))E&s7NEmbhNW)k4Zri=jLuz9OBUn^C6zPr2Ue};#v|XOL z)0S==xNOl87TwgpmP%zScirQ2jwomz5EB;M1PFS#P@+C{Dv*L~geVV2wsNwS+_&?N zddmfNh-AV^Qn!rbA{P|eoD2KyFH;NkR~5%azPZ$mE<~touajo|8ez9zRPYO4P&IcJ zwp%{8Z?W#6?_hKysx>cXw-enKcQQ8;;^Hyc49Aw&Sx|!o))?#Os|co`nghkrc}L~b zTzb^rK{K>k0@E%&9b)E^561X6jO0u=185!Lw~LG#*@jpJT7`c)lXj8m+5Jt;$mFXi zedP4it(_oN7nTWhlSIVb1@1pJs6pmO0bnh?NKPb)UaX9jyY`w;$<&hVa zIfb^-T=_t4*f5Q*p$s2pfg$w!$QUd#8mOi(t74x`i=^w71#UE3X-_C`vv>wrvuGlW zzua#bJZp=s!1iX7OE~RaD#(SWNgT5MCs7IA zzzlV1vWU+Lx6$_+-*<-|zI+yNlY7N#K>)1Gfi^pGRudgkZxFf`Ul#?*eWTWIk@B;G zEngFC@cyxGrcl8{-kM^pYW(k-g3>d%XB0F-%#10bCnxoFL5%7Emy!LhevhW+Q>NYY zjt(9-b7%#0)QHz{?jS^F|K~>ikxgy^$D~Pb@i&bgNAGFS=JLxJ!!;YWhS?V~*ODo`d#^JcSN9fi&)s+}p<1$9qzN2!S&jb?+o4on&7i@dFAI zH&tqbYo**FOe(+{Wdl-Xmyt{ly@;m;kOVgC&wT7XR`lImeaBqSi?Goo0@sSI$_|XI zceUBoMQ?lR_ad;8d65=yaj-#g({fN*RCY|aTn^%9>533|eoO!Tgd^K3G-kE}@rBxM zN=W4wPkq9$C>LBJAjb4jXOBvRb|3r#-UB}nKuRjg@7zrZ4z$kwqB)JE+W)6Cdh)2) zV>a1RwL0H^oE;z9bLo~++%JX$9WEaR1bPmqldV(w4cP?fk!1~K3eXU4y;m!2II%Mw zpcx+ne85#Dc@m(I6My8N9F?j9=Vy2AG3fK)X0xHmgs!UFjrf^!j0%0yguzu)9aw=< zVG49DB{xg#R;{_bNUa?aY)*IPYwJDs;`7g{bVvu}5#K751#AE|Q_x>yP8=3T>u98C zHBEAGNJ?@7RrdhpV(I+poJ-i@w$V*`${vt@DuF@~y>1TJ=c)0jzPk5x`?iM&rR;rz z@m*KgKJ|9-a4$q`A-12|!EE6^=V$$1OSTqhAk+{FQ^r0j9#`%|O~)DP$W3HEn~E~! zi6=m*ns5lrP=^-oqfe4wHw-`HTsVS8#t(d*g}tjcYll?1TO58Tq|&eIcYL-zWeDHM z2TDjeFC()KMROO4^z7?O6sMm9Oa8}i8J=HC1LVg~55Q+#Iao^d2W0c2&4G?3xuwYT zJYh!Bc+L-xp`3N}{tj%}&;S(OgpHA6xMsL~j|NP)yP%!|D60RiQaNZU1%Exb7O(6$ z`zgjJ&2@z9X#vr-$PZ~cTLHj<5-dr#n!R+y{m+3mD(`EG4t!0Y1X}lq-UIX-DJ1ll zPDq80)paYpitrE3rG2{Jv6&9YR6$JYKn75K>p^?F_8$43Mh@d%xe)yH}n6$9iLB+)eY(RMg zWOy`J(GHRtub;If62vMyqw>(p&rQ6RIRgs>SKRnIj~Z9e!|4{eT^(3KCPI@1Aj?lD z!n1<%e0pnnP3(Wz5&W=ntN{)GB1|wyAg$O@nN^-im8+8TcmDnK7{l z83*uxgwnfY!t2rcbd$EcC4C25hNNW^gY6jofJds5+|iTPUduv3o02+Z7JG)^3zzp zs?Gi?Z~n*g5LJe$DQ2tOE@aeI8ur&fAp)pQHo1&;5w#)rH4`+&vH?UwkNA2NW~{Hj zmd1(9<~WfmvdtfSXL@ZWnXHXPXxBQbU@!raJ(w9)1xX)wJWS!_Kpt*?X_!hVMvGa( zm^@XKOUV?)s5ujsC)%mo$n-~JW}St`QT1jz^wpea$aA|6Xor#7THeZ-Xi5L@lw}uz zpH(=4j4?~<;?2!V`U8ww&rSl(XIhzPz_;{6T*NXi+J}DtC9N?MdE%UZ^n(h>!!<6D zzos5{l#@I&3?DO$@WqqiYtx`hrL`Kwi1i;O0o#1g^z+O1-0Pp`Y$*FH!((X=r5|@7 z9AAO?|KlH^fb!!KNeGB6kZ7Phc#ex)o%$j5R^uXH8B^?W8XOvATzUP_17RQv-YPtCP?@s-C5{upCSuTD>A#0jcFmwIXk%8-LH=bt%!cFb>pp0I8;U90UpNwiP z>w}%weZR0?q@(7XDK|0&Sv)Dub+7$=jBgrDo5LmCNyHQ|%A64)X8PxugXY%t8yDyB z(O&6gsp>=OohD*W&C;y7Z_H!6PvcGl5U>12{tA;_ZrL(Mr2&d$KzIJQ_a%(VG@RZ+yX5v73h>G$0fbxgjLg%b zwmV8mD3p4IvfFkz1`%>ve*?3^SC zo%WbeHa2=P2L;55ukAYT#~KeTG?mnMuVEIBksYdSm98VR1&R6_4Zf1Q1(k|UxN1_& z+{5AY6=xiPWoU&><>f1eGXr7}0h5(>pN9VyRH*!*8vtz|F^t!Oyg_ZxY>^xDD;Rtr z7}a_Bxs_<)sGR7GYx1Tv)_2hdP+PY+D-5Ua8D7@z{f71>napg_aA?)34fL1IhI;`p zJ%DhoWtVcNuP}OFwS3iiQphuAHewLr6Jv!~IS3y)?m5Ae2@S6jr<)J-xzzn`*KFnN zo-V$B_e(dpOjz%)q-7D$h6Tvd&bOj8NnCcMjyLxLy z>OSE=F<(kRV}%NtAL`$~F*+)WW!hE@Rn;+AgPU#6S)~uvK--|pni&0f(f+8s-|@^L zKXbyl@{D`1OzO3ghn*k)PJvAkUzmT_UGz^CU{BLAEMI$D=?!Fb^{OwN)tt|IGJ;mL5}< z%y@4-uP99Ey+b-Z>Iy)s>*GLe;&}2Lf6xh^V3BcrOMcvogLx&0IgUr(K!i{q>gEgC z2fvB}<^6ugc1X`q!T(J;m^@{{TeAGO;E*;C{=DFyqMNrt@rrqC04tLMkULrk1nh2z z&*M*@(MZ+}z2Z`G0K{vblr?$72-@uzt}HHvC82fHlo8u81bVJK)IGW8T2kYo6oD)r z^K7}T<3Z}rBvtRbqm3QOBmN2$cES$DE?St)6z)+19Iaez*(FN);Kzi3+AoL;Kum}< zRzze$AHZ_$VxUNsf!F;Ko0XAcLYJ`O2;8A?=5apU#0qa+AVxsw2Ux+JTxV0&+ern8t&J33+ z0WeAd`xSvtPa9S#cAQd$`&FC6{Tg#NkyotR$RxLd+1Xm5t!d@)OU(ZkIeA5hrkT#% znJxh>z!)X4#$4mTF~4SkHNlo%QYqAYd=!vOSKdFzV@142%VK+&4n?FrN8aVmy|+@# z-%dNU!O&2uypS4a_-g1tgS}@fHGZ`|vw{@j{P5q}D@_LAh0HgP8@7UC$XJ2eO3wNt zv2S^bjSt(J9=Wxtif6ed0fjp}(<5$w@P&{|1*0_ymw!t=pTMuBBfOYUsQD}3wu9N} zexTt>BlWz#j<6uPv(8wFjpRH%RmDbZVE4bp-)oo0>$Jd*gQqRTGR;d*Rpq1YusNRO z-|I{zMRYC86Vq0kfm1{={@?ceq+f}7vDDXJ7?{Ii5>m_fd!YI{F1#7>Y83~brZ-G) z${(mZA4dh33o=WD_9X=EuEW)_{;At`HHAS2k036rw*Pv4tmf%^X-RMfo&-_he2#hZ zrf<_^50K#<-rxp~Kz+jt#0oVB7;PfyF=Mc3jP7IZXcpb8hVK=TGbpEpnVkE#Hs-$C zKd6{h)AWh%NbH*ICZfKk{o_Yw1mpPnBa*slJ8q473BG1+Nz$$lmLc4~uK+TX5?exv zr@cYIL6M(#fERwH9IivTAmZ*0X!hoSXlEJ^8$15y$-uIYE~XZ3Z(Y~I0&=A$psdKq zr|-&fi2M0|=iN=I_1MRWhI$iP*p<@Cme0a|skfAR)DsHfR|Z5sma#n-Ba*lCRP(!? z-9IH}o?=K%sQpQgoa_Mf7*<=0=p8+tqY342e#R4LysI-zB{%uczo7_{UIt^4dQSUi zN;LW$rw(7MsY>OVn|lM5+Bc$l(T~X(tdRN+%^dTk2c1GRd5kuhaH_ko+-R$U?f$9e zUT@ukntC3@;zHIKa74vfnxptKppXv* z8Bl5tE9n7klH@N|XUDy!kWIKNZ~U;)D)mvvZWq&_CRawk@0|08eEu(~T{7rn&L4h2 zL1YJi@&?`Hxqv@r&*3Yyb_vu6+*K;Pk zn(R9}V^dHaz1pyBx)*kcs=>`{8A71q_XiZ8APswi0FCQ9AF!C$eomf?pMBKs`5r^- z`Ju%UbCZbb6TQIBL~b-r9x-mPxzsH9TmaOZhFx%Vt^q|tq+uAO21H=UL75>281}Ws=h)A2y!#K>UtT_N5Npj^ zv##Iu%kw;^kn^oWXhzl_UfNI%&WTunthu41#@TM@FtlPiA-t<1dojJ?#u+NzjYa!5 zsniALD-W{Dn=?aH@42+44xI;CK$^}}zmBNhXKp&ze3Q2o9!tGUq*9FB;gJQLfa`pB z(Jd-lWn4lYbnwb7E7yY5CiFWN)Bn6_c-&Cwq=@Hcy#(><<)Sp0=po3dMw+rVBa6>L z9z1bi$fJzg0W;J0PxHI!0ys@c>^`c% zrC;eJe$%enETo0)g?ReQ9@5aOfWnhh?D_KYgKrb<6&Po+rw|1aLJRwFK9pdi9|X`d z)@n*&@%JNSq1!_0vR_q73W$dju^nZe$BSHm1^#g6y~9~EB!801$sH4FDAd7n>!`<% zGXz;^K~5IWYIwS}Cs0t1odx8F>7v*oa)!?*K5Qn`FH<2(sY>rZR-uhsKh*A=yYWhe zXK=BiIS|a_Pa6*bU65ehQKx<=J3_JDI6RI6gwMMy;jttfRhPh_6}@6rxQjKx?`%Tt zh8%pLqSZy4#NU37vCuI9^}jywj-&Q9c**51iNu;Qdahhi2G&*f%xJ^%xnGRD~Yvz`?=ZwVACa_e{y zn5gV0xZYsZT(#nSo~0A{42*hb|F= zC5@JEB&Cjo1PpS&z7ShMZHPs9Nr6)mgcC3jo+GYi;3?EKKMZ!rSRV&ql9d~M_x!KD z5GRtS&*wHY_!IXIAEEC<^cnO0zAQpT+IM7;?= zg1l?G-|Z~ASC`lGvuCHLWhS-B3_8jl1&w~?T#6%s$8M9I@B-ZiIl(8aF1{6>(>3>hJu-Lw&Ev~7u_A^}2_M*hh>8KjT+|2xSf=vj?8B2ZS1>LDgPhH}7l#i*jBeg3Eeru!o&&># zyej6IC2M?TNeN(4|I4Gd#?fy68|P(o`N7?As98*;k`=(yl}s?BqIWBMR=5ZDt!3tb z2u-N9N0RFph~Ss9=wAWGaQSkEKvpD6a4Fkjfr&;YsAz4ecFwji!sh;ClPOnP9<#}M z@k57SSX*xAYXU>$D(UqA5GPabv)D|4gtqQ!Cq998JHJRrmOiPZyCNe$jbQz>Cc zbs4?%ki5}%y2xoYxI5@5r8Q^`71(1Jn0#1^A$%tdu4~j9#;kVuN1*!8paHI4)TKS< z9@%ApMkAHG%CaM0&$r$f@!(!kQOB-iGuv_RlUEw?_^x47FT0R6%B|NP*uJOD0S%QE zO}i6rXFHM(Q;K^o!0-Excl{-|f3uDXer7x5P`s-p?~lncNo53O>t2a|GwvbTAz5*Q z=+^w0;P}b+;nRBdVcpd}Hyc9HPkZ^0_NeW1L=7utj&aa4A(Q!k&mqmq{luBYpp|>2T$JeL;~8%rC2=1Mqg7zdFKesane9?!TC zeA>FNJJ<|B>8E2@19`I`QfW$}KFGi3T4fWm8~e)@$iaM?aM0uUiJOcsnX7%m4BUZB z+kx?ZqpTmid|IyS)wqtcVNlNcr8rIIQXy4M_7Cc&P-qqb4_YYOy@o2P z^x(tk{kb*mI}J)fB|#0JLu!m>!fDaR>kUO>8JJ1i7FxhuhviBclxOS%qQ`hz9+*SX z6#?8v^2});U^3>TG0`Dy&FeE?^CtJ64MK4pd`3;@qv1{=3 z?~TA_PGt|UA83x7gB_TAU<@iJlVqnQL(QsQ2`>p51x=8MhP1rTxcrp8&yytk@X<{8I1 z8kK_RNc3){Hds)(b7B!2wrQB3M*dVp&6`|74Fy#QVvtu0Ekhv@Xs%0X~dPk^U9WGoBz%I2=LMlb2m&ewy^e6Vi>07N|H}6sC_dah z3by(zLI~`ryU00GGzqj3ELr<`@=9P(^1_8tnn^h01}O$m>*ikJYO;O~L1$3d_5IcH zt|#um-Mnf5TY|l=+&cn1nW3YZ3|F8#B)6;(mNPmC2Z|?>vcO4QDA$LdvY>X-OEL9k z%`Z#&$iy{)c0_6v;q4I6ebmT{9T|jISI6Jw=NzG+<5{nuU1jwqKU%RI5gP-SWpZwWnBkb;k|xYDZ;QM&XLax5!f9;v@C(n$736A z7$1l_g&O~XrGiC24DW(D!tP$gWuGTpH_7l3a}9|ex(s@gZT8)=^P5kd3NaPM68=9I z-A3=}ggq&w=QFdJpN*^+OB<%&aUBP!oiBK^XEF)u&Xz7y}1fPa!BIeG%1h$SZc zol#KmX#!RDWS|-I+9eoKOG$_PA?>+I%m^sL$2cv4FPnR-l@-9|Q+m`l7h_MYl}l4~ zNck*qcVmt;WSt+wN3^U`uH}o;BAKaBy^A;&57gBvRLQ-S{3VbCy6#^%zSfA4*GN*6 zRM!c+2PjV1h~ZWcE{7c3F(LahzQDe&df#iT8yLVJXdV`!RkZ_$|K?p#a5YBGU zox+NaXObREMJ3IygVd(e_LJm!e=Wj6alKbI(C@gx7~_2Lm)rb-yk@?~50&1V534(j zzkhj*O3-35SIF_sI^C&d5a?9UgR;Sf^RExFOQMi8rA2Z@tvg`0Z3BB531dIkl27G6 zR*1&vnq_|Wu5>5m;lvT(Gj5$Q22KBeor+|hF7I6er)gqUO2%ciBl8?qlDzW!D=CsC z4*@2&Uo66RRtk9ng#aWz0Ew(ght8(xt43xa zHkXpNXH7_tsgc8f8XedIinr2ix}uq!oTq26`4%@^a$4w(kG^*5)KIo$>hnxh5BZy! zvfATLEb~^j$NHN+8(emKESv_fwpHYmYc=Vur8C>6*WxO=TDr9U2zz701vdl^U0b;b*As(*42Hv=#1qo9-sq_I}}^gZukIU9zqq+9~;m-meRO zBhcaN=|rwfcJUHvDarM;*fNMWX5qTGnG>;+%o92;w&O$Y!?OoPf_PLe3lQw-mft_7$au_n*-EQ`Jv#ay#UN4Z5rJ)hJ-%7+ zRnezzqlB+YcA=gH7xOMz`)f@qnSt$`XsFs#Yif#Sm_Uc6#Ay4WMY;U%-CO@Z3PDM1nTZ0!Hv_@ zPj`Oqjn=qfUg{qAz(VdyHzC|?cdq+VHF`ocG%{G<>uC_UqJvBBL4Lhl5CHD&DDvm^ zRh*2^Y8BrZ3bZxi>lg3qdu;~TaeH^hd4bRcU6juIk3wDPZlu@ZAFUe~Xg#xu#idCj z;(4>oiiIs`Y=*s93RtgxH8Eq^IQ|i`p)WP*J>H^8_o?g3b}6!lhsz4+pg10RP(Oao z$n0dtftC;7WAv5&B+>f-G!UpXnf#42(0R(UGPHge9wYS&Ui*dQ4gT)T*pCbz+~mZ1 z?qA9-X_{mG6Fn$KN$T5Zos$Hv=11z+rlCMl4nYP$PmC7sH@GWeP;*;Irk0k>pjdya zU@0nN1`*pBuU&i?g=^Nb-L(ky=4sK3=E`UQXn_lUzEtkM!vf4}@-JC^Qjz}lmqmso zQapJINhU?K&x%(|e<>TaAT=(vGzJKrz_qZd@nn1@c~kvt4+&AQ@K=95z6xrk9F1i> zTQk>91ynMprhk%L#GS3Qlu0X<-E&*ynwi>KG}fb-VKSwt}Ye z{DAbux~ArO6D=Ajb_@wft)nd{)tSZe9bIvfqOqR`vx$nkem}hpSyMCFy|?frt)1{I)HZ4_zZ$ zdkqEu_;or(h=?LvdJ#Wa+wA=}q@gc`gQA5Emr)O@TPlZ_0GcV+CtNn}njouNH`>Nl zeSxQd-80Y2%6kl_ui5m0#l4C_3~cD>yj{l`G=qdyB0` z1io4KQKbd_NECAE-F!yYr?z!7VlchIh0W<;s*5hE9-g1QC-`=n&$-UJPv^IxqBh5q z-s%qjr@bgc{}0JAf+H)EuS*W2T7j5{BpF@&n1FqTtt2;Zh3DHvQMxRIEk}#X^hj`3 zQ0n^piu|KliS*?!W1_hBr1SAYF+TSlpJ|R;*#yK(y3$BYnjKB2l_lrHYZRyBtqIhr zKD8Zrd+t!2JoGWKI{ z2-j?(^VsujrAnwi#DAVo&I+#F>S0k8Pnwq5zZ)@_7Jt*#uwGlIbwkQ)lY>3erO3=A z27e>`(i>+-*=AFv8T9wgvMt(k4KrT}!T%IE?Ac7;IF<~IpN12B=rs}WI_F}6kBdGD zLgUSu1?|qW-m(7tc~0h^&SOJ98*l80e7L!3>*p!up=j1yR`yMBkOr_*s+~Un+&Dq? zc}e0>^Qf@dY7*)cASDQtFy;I{n#$r!vOO*o3x6KJp7-Z1`@2QINKKo^)JZM(?F7lh zjhGu{mt|8gQhl?TEhd5F<_-(ZB!T$j$xh>}FoRY4h(Kp)TVbh%4#G;ukW73$^GBha zuR6$ZW*}ubt6=2w7&0hB(V!@;SD@(> z-oW}}$s$Q6U>ft-eYKsMi7QYKEtU&R`gGWJAJW-hY#trS=sd3eMNg#>3?;@~VT&NE zE(jh@w>6D2>;`!uXk(|mp0@9c8yDT)m%zC3Hk$qQ^LOdqg+I8pJ*Y}=ZU9)8m)Xxr zx=q)+waQK??x&ig)%jdo|EhzksJ{yy*Gc^~kMOlLDg$J~d{BR{;1QR>l^y%kLtGpG z9{2W372ZTU>@Ap%bTwIqUI9RcwgDV7RXyX73q7$7C^zhh^17W2d>MZkk^SHS$omyupZ+!j7dC%p`L*&QXmbFE!-gPB-l6!E{cdltGuU4wH*yT?W=`>mJG)xgTnK%1dn z%dJ@X?RmZ5G-~&t#b+UV0n|umy>HHcLA zASvgJbcjWa0DSDKYsBAWDvu%n6ZsfS3EDc9X6I>ZRl%@B!-oFzSl~|bVb&n;<-cpC zo4x==jP-quBsqc4z}7XE8O_zhR_lP!i8D-n77r1(Nnkoc<;RC@!8FwOk2N|3?-r*3 zW7h!kHR^t1xMNPB@ioLt4ZR@Ht6UJ5ma0Nv^0~q-Ujmx*WiE_BXZfERP7L-a=}F1y zFAZUe{~S3&qI)K20>gtSK5-D_?|?3Ts2i4isGAgW;)7t-Ibz0O3|u`zBg2Vn57Xok z;W${iO$ktq;7~@!1t9R?8JY0U3R%Yz#vh1QQ~~$^s6~WgUNXl*_<>{lGghIHz~I}g zGsGFJOwdtX8Y+A&puHo!DN8Z-w6)$Zy| zhj>uY3(cc~XM4bk!=76;rEwc{e=PSlhDP!4bx7L)KnN1OoJz`G_1u7@fYFla9N+lj zZ#&35R$x{JekN~>$y|36d2M?raw+JJ`*-M|KI+(1L}%hbb_-3UiJENlskBRWA*IJ>nm0+XU1@7tUdF z#P;*lpA>}Cy=~dA)wKhp2Wg%m*0y78#b=oDmP9A8O6cl(YrBuhm4*)dsna9T0*}tGSAXutt6ziSs_v~6V28T}^HWP7f?Kq^Ff zyR6=S>k-~~QU78ocwsr;=G()&sIMou^5yUSLR`Go3Xg`QhO@NE&{ZJ+zxgnMmKqC?(A_i8Fz z7L)uGp7y$?kl|HccZEmA-@tzSRbYiZ_ z$NTqS0qVm7-Id3OpJmy2w~v4IawLeA>u{fD%gCbj$p=ake|$ZgOCG!GN7yh}P|!|s znZa-|1k-o23!3d75PCW8OWBp6a3-6`b1=D(CZ6S}JxjOcdgObhKm&@HzCac1qbcDA z0iS>wmh7jtKD96yezHYs_>|L0W?SDGZq+94v+hS}b!F{n(;rLtbmzhBsgd;6?V)sO z8n8jEJPLbwi_D<4zFSw;N;vdKcdz@iSr3+&5`;YKuJ9|>jgX-)A>Q+rT_#|f_<9*r_qXRQ ze)F(PXLSVOqdp_u>*cZ>2)v1|_O$ z-%xI!T_YIhF4oBeHjC=qXd zqKpr$eI@~!4y;De>v5Jn9~WQC!Iz&632yh`NAGUZ&$A1zMtu=$nR-eO$Km(KSN$7Z zHM-KIV5TuioKG7V(tf|Oka9#Hn_o$**bPM0Sz3SEsX{p2p@ZkX7V?7)G$>Av-sQfq zI29J9beXqb;g|91Vh}aFa>wL@`De;CPhlsYTJi~Zla4oI514bs+ADUC7haszyuri1 z$`x>DkJc?{=GhZA2b~InU{E|~pgai`Y;_#3L&4_8r($Kt!Kz+o&XD*+^85Z)MGYo! zLPBx^l`V+gqx8&&jLSSbh9z}WhvKxU_HsN6Gv$jb5;I@+d+ICh`v#s?)T8wd&cH@Q zB6bY2S5K!+>w0d+T(GzzF@8<_kIrv6btNAcqJQGczPIZ~7|g3+DzCY{_Hyv5I_54M zdthd?5YrcXAD$KAR`mAtsp%j_)r`+W^&Bjh;cNV0iPnwd4WAoXsF_#OQFlPv|eTas;xtM*EF zqp-e&^>0eiA{e;CAA!|Mzj(~YO25p+6}=H=rZjc6bNsNHY2|+47(&KPDLNM8RAYE^ z;I%LIzTW75shsm`s%$l8cymgRhppNF6FP1`z^Q&(e+*eMdqMu9-5*%$(|gK?JDr`K zQJdI!jo$=y=Kf;w3lxd-TPsoxhr=;3_uwBq8}}L3Je^ifOo_(QH(97wXn7DaL%Cuf zPow@^ij{Ya*>B+^-+L2OZ9MWCStMD`;~eHsD^8aovrW4IlQd5IbaQx@VTKZIpS(Nz zbXHGvSkwBAmwxuqsL=8Tr9MyVG1h7b(T{PgpWepeE;I^jbXoj?9h@p?XMPL|3oBrW zS+|@EK+;oV0!FGoC^HDxPmB9|9!@{-!3(T1jIC}N)*ju@eckno#GO9haLCM8hP562 zR$;mIOSDDe?lWwBJj#g2(0}banU<_h= z^QGulqiGX&^|0A0Ilgv7+vmU|b|0#W?QXO*uznIfpX4gf&nF_7kqXQGhh7s4VlTMH zJ@w2+mQ~ynZ7jifl#=+TY(9T6lmOwARm>+_@>R26h+{YwhjV3t!A5$!cjlv{yFY3V zbN^e2xr+2QMn#*3HY57lzEyo+*TbX`i4!;G#BUxD*3Z`ysw#?6X4P2GK6#7%>=G0ySGf(PMnHFsYCPvh|7 z1==2uuZ=cc5OMtLuds2HT9bvd8PTsX-t<=z7bCiC!t$;syK=o)YRUAkx^(ek)|(A+ zo1l#rGUB=)gqtF{t{3yZAvzVX?_wZV#Z1XL^vhVZI(mZ#%+4;%M=!BuV0 z%Rv^lAYz#LT(zG8=v74u(S4n1KP@9ink5HBT; zOs=E+S$46WJ?5UFj!Zt&YGtr1a_>{=@2NU97XvzE_g>cid*owdK99M!`~?GGa;B z{&=+H*p7d+A!&$gaRmk#R7^C$W1&=$1k>oOc9yd1gjSW?rwFfZ&0oGsePK<4u?#lx9UYULI3tyD*<}Cpsxahu-*O*8#K#;SXDUEuR`*pOMJUlG-n@>pu*9 zfeK$f?vZl4*4~`CFIM%BU$Qw1bBi8HRC8-c*ggEEffA_hz6)>r%DfYVYG7Bs8u1~4E-boj$-`$LQXrkMvcmqY4 zzjV!S-3H~mIo)&mbn|U*^;W-A4QDb)f5gX|qq1i6^77u5^_}<(xb@F*E*b7t&WV*6 zFKB@wFFKxG!Y5(rWj zdE&m?vqzp1MjtodVrQ2@?VC>9 zKe=4NTf{31o3{uq<178(&TTbJ7t{B8T%T_SDuQ@bro&H=dV68@G&Syw7hpmoO`SvB z%2I>!PNbFos1TGVXIT3ye0i-5i%ff(dazLXFC}jn8 zE3}P4aoXjbq*xpw3TAj`Kve{5QNCFl^*=sp3mXN+Kk&~L5?<(|x%`#DF=a3^=xpH7 zi7!0-3;ct)Nr~43A4ovlU!Zt?y}=cyMhQLv0R~(yh|-BfBLhGE_X|xEH$XW7ejgwO z@e&5Z#)D(DK_UUp2Hv0e!r9PG6aC2l^G1-mOlib-4mJr`V)nQJ4i4~xdoh#YuG>{x zl`iq6SfQc+eru{`>NRwfRh7|}vB~u4VYK`2H)H*{C7wfOy8m&)Jk#F~6a%mNzhD3R k1pfC4{Qr9b?dVgOeM+FJ;g!vK=q?^BJb6_8!2IR^0keM$oB#j- literal 0 HcmV?d00001 From 152e04bc05c92d7a3431202d006ec362d530c8b2 Mon Sep 17 00:00:00 2001 From: Josh Suereth Date: Tue, 11 Mar 2014 12:47:42 -0400 Subject: [PATCH 110/148] Fix spelling issues in architecture document. --- src/sphinx/Architecture/Core-Principles.rst | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/src/sphinx/Architecture/Core-Principles.rst b/src/sphinx/Architecture/Core-Principles.rst index 843828468..a90a0b73c 100644 --- a/src/sphinx/Architecture/Core-Principles.rst +++ b/src/sphinx/Architecture/Core-Principles.rst @@ -60,15 +60,15 @@ the AttributeMap, we first need to construct one of these keys. Let's look at t Now that there's a definition of what build state is, there needs to be a way to dynamically construct it. In sbt, this is done through the ``Setting[_]`` sequence. -Introduction to Settings -======================== +Settings Architecture +===================== A Setting represents the means of constructing the value of one particular ``AttributeKey[_]`` in the ``AttributeMap`` of build state. A setting consists of two pieces: 1. The ``AttributeKey[T]`` where the value of the setting should be assigned. -2. An ``Intialize[T]`` object which is able to construct the value for this setting. +2. An ``Initialize[T]`` object which is able to construct the value for this setting. -Sbt's intiialization time is basically just taking a sequence of these ``Setting[_]`` objects and running their initialization objects and then storing the value into the ``AttributeMap``. This means overwriting an exisitng value at a key is as easy as appending a +Sbt's initialization time is basically just taking a sequence of these ``Setting[_]`` objects and running their initialization objects and then storing the value into the ``AttributeMap``. This means overwriting an exisitng value at a key is as easy as appending a ``Setting[_]`` to the end of the sequence which does so. Where it gets interesting is that ``Initialize[T]`` can depend on other ``AttributeKey[_]``s in the build state. Each ``Initialize[_]`` @@ -79,7 +79,7 @@ when it comes to ``Initialize[_]`` dependencies: 2. If one ``Initialize[_]`` depends on another ``Initialize[_]`` key, then *all* associated ``Initialize[_]`` blocks for that key must have run before we load the value. -The above image shows a bit of what's expLet's look at what gets stored for the setting :: +Let's look at what gets stored for the setting :: normalizedName := normalize(name.value) @@ -91,7 +91,7 @@ The above image shows a bit of what's expLet's look at what gets stored for the .. image:: overview-setting-example.png -Here, a ``Setting[_]`` is constructed that understands it depends on the value in the ``name`` AttributeKey. Its intiialize block first grabs the value of the ``name`` key, then runs the function normalize on it to compute its value. +Here, a ``Setting[_]`` is constructed that understands it depends on the value in the ``name`` AttributeKey. Its initialize block first grabs the value of the ``name`` key, then runs the function normalize on it to compute its value. This represents the core mechanism of how to construct sbt's build state. Conceptually, at some point we have a graph of dependencies and initialization functions which we can use to construct the first build state. Once this is completed, we can then start to process @@ -99,14 +99,14 @@ user requests. -Introduction to Tasks -===================== +Task Architecture +================= The next layer in sbt is around these user request, or tasks. When a user configures a build, they are defining a set of repeatable tasks that they can run on their project. Things like ``compile`` or ``test``. These tasks *also* have a dependency graph, where e.g. the ``test`` task requires that ``compile`` has run before it can successfully execute. -Sbt's defines a class ``Task[T]``. The ``T`` type parameter represents the type of data returned by a task. Remember the tenenats of +Sbt's defines a class ``Task[T]``. The ``T`` type parameter represents the type of data returned by a task. Remember the tenets of sbt? "All things have types" and "Dependencies are explicit" both hold true for tasks. Sbt promotes a style of task dependencies that is closer to functional programming: Return data for your users rather than using shared mutable state. From fbffdb6551f2c8b900c4ee2e7003b267f9f99c8b Mon Sep 17 00:00:00 2001 From: Josh Suereth Date: Tue, 11 Mar 2014 13:18:35 -0400 Subject: [PATCH 111/148] Add changes documentation for 0.13.1->0.13.2 --- src/sphinx/Community/Changes.rst | 27 +++++++++++++++++++++++++++ 1 file changed, 27 insertions(+) diff --git a/src/sphinx/Community/Changes.rst b/src/sphinx/Community/Changes.rst index 94b42d084..f08a8fa2d 100644 --- a/src/sphinx/Community/Changes.rst +++ b/src/sphinx/Community/Changes.rst @@ -2,6 +2,33 @@ Changes ======= +0.13.1 to 0.13.2 +~~~~~~~~~~~~~~~~ +- Improved the control over included settings in ``Addsettings``. Can now control when settings in ``project/*.scala`` files are included. +- Adding new ``AutoPlugin`` feature, and associated ``plugins`` command. +- Adding new name-hashing feature to incremental compiler. Alters how scala dependencies are tracked, reducing number of recompiles necessary. +- Added the ability to launch servers via the sbt-launcher. +- Added ``.previous`` feature on tasks which can load the pervious value. +- Added an ``all`` command which can run more than tasks in parallel. +- Exposed the 'overwrite' flags from ivy. Added warning if overwriting a release version. +- Improve the error message when credentials are not found in Ivy. +- Improve task macros to handle more scala constructs. +- Fix ``last`` and ``export`` tasks to read from the correct stream. +- Fix issue where ivy's ``.+`` dependency ranges were not correctly translated to maven. +- Override security manager to ignore file permissions (performance issue) +- 2.11 compatibility fixes +- Launcher can now handle ivy's ``.+`` revisions. +- SessionSettings now correctly overwrite existing settings. +- Adding a simple Logic system for inclusionary/dependency logic of plugins. +- Improve build hooks for ``LoggerReporter`` and ``TaskProgress``. +- Serialize incremental compiler analysis into text-file format. +- Issue a warning when generating Paths and separate already exists in the path. +- Migrate to Ivy 2.3.0-final. +- Docs: Use bintray as default repository host +- Docs: improved docs on test groups. +- Docs: updated documentation on the Launcher. +- Docs: started architecture document. + 0.13.0 to 0.13.1 ~~~~~~~~~~~~~~~~ From 7c15eb01f388cf87b0d66425c0db45e5f04f6c95 Mon Sep 17 00:00:00 2001 From: Josh Suereth Date: Wed, 12 Mar 2014 08:50:37 -0400 Subject: [PATCH 112/148] When fragmenting Defaults, we mixed back settings in the wrong order. * packageArtifacts is not cleared by defautlSettings * Added a test for this behavior (this one test should ensure the ordering for most settings is correct.) Fixes #1176 --- main/src/main/scala/sbt/Defaults.scala | 2 +- sbt/src/sbt-test/project/default-settings/build.sbt | 8 ++++++++ sbt/src/sbt-test/project/default-settings/test | 1 + 3 files changed, 10 insertions(+), 1 deletion(-) create mode 100644 sbt/src/sbt-test/project/default-settings/build.sbt create mode 100644 sbt/src/sbt-test/project/default-settings/test diff --git a/main/src/main/scala/sbt/Defaults.scala b/main/src/main/scala/sbt/Defaults.scala index d01745b45..04b73d5ed 100755 --- a/main/src/main/scala/sbt/Defaults.scala +++ b/main/src/main/scala/sbt/Defaults.scala @@ -971,7 +971,7 @@ object Classpaths publishM2 <<= publishTask(publishM2Configuration, deliverLocal) ) @deprecated("0.13.2", "This has been split into jvmIvySettings and ivyPublishSettings.") - val publishSettings: Seq[Setting[_]] = jvmPublishSettings ++ ivyPublishSettings + val publishSettings: Seq[Setting[_]] = ivyPublishSettings ++ jvmPublishSettings private[this] def baseGlobalDefaults = Defaults.globalDefaults(Seq( conflictWarning :== ConflictWarning.default("global"), diff --git a/sbt/src/sbt-test/project/default-settings/build.sbt b/sbt/src/sbt-test/project/default-settings/build.sbt new file mode 100644 index 000000000..a00c6c634 --- /dev/null +++ b/sbt/src/sbt-test/project/default-settings/build.sbt @@ -0,0 +1,8 @@ + +val root = Project("root", file("."), settings=Defaults.defaultSettings) + + +TaskKey[Unit]("checkArtifacts", "test") := { + val arts = packagedArtifacts.value + assert(!arts.isEmpty, "Packaged artifacts must not be empty!") +} \ No newline at end of file diff --git a/sbt/src/sbt-test/project/default-settings/test b/sbt/src/sbt-test/project/default-settings/test new file mode 100644 index 000000000..0f165ede1 --- /dev/null +++ b/sbt/src/sbt-test/project/default-settings/test @@ -0,0 +1 @@ +> checkArtifacts \ No newline at end of file From 9f20f04e1656f9732c32a3e3d95217b70ca4e9cf Mon Sep 17 00:00:00 2001 From: James Roper Date: Tue, 18 Mar 2014 19:44:32 +1100 Subject: [PATCH 113/148] Allow end users to add Plugins, not just AutoPlugins This allows plugins to define a Plugins instance that captures both the plugin and its required dependencies. Also fixed up some scaladocs that were wrong. --- main/src/main/scala/sbt/Plugins.scala | 6 +++--- main/src/main/scala/sbt/Project.scala | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/main/src/main/scala/sbt/Plugins.scala b/main/src/main/scala/sbt/Plugins.scala index 926defd11..1d1b7b977 100644 --- a/main/src/main/scala/sbt/Plugins.scala +++ b/main/src/main/scala/sbt/Plugins.scala @@ -36,11 +36,11 @@ Steps for users: For example, given plugins Web and Javascript (perhaps provided by plugins added with addSbtPlugin), - .plugins( Web && Javascript ) + .addPlugins( Web && Javascript ) will activate `MyPlugin` defined above and have its settings automatically added. If the user instead defines - .plugins( Web && Javascript && !MyPlugin) + .addPlugins( Web && Javascript ).disablePlugins(MyPlugin) then the `MyPlugin` settings (and anything that activates only when `MyPlugin` is activated) will not be added. */ @@ -186,7 +186,7 @@ object Plugins if(removed.isEmpty) Empty else And(removed) } - /** Defines a clause for `ap` such that the [[AutPlugin]] provided by `ap` is the head and the selector for `ap` is the body. */ + /** Defines a clause for `ap` such that the [[AutoPlugin]] provided by `ap` is the head and the selector for `ap` is the body. */ private[sbt] def asClause(ap: AutoPlugin): Clause = Clause( convert(ap.select), Set(Atom(ap.label)) ) diff --git a/main/src/main/scala/sbt/Project.scala b/main/src/main/scala/sbt/Project.scala index afcc29826..510e3e531 100755 --- a/main/src/main/scala/sbt/Project.scala +++ b/main/src/main/scala/sbt/Project.scala @@ -138,7 +138,7 @@ sealed trait Project extends ProjectDefinition[ProjectReference] /** Sets the [[AutoPlugin]]s of this project. A [[AutoPlugin]] is a common label that is used by plugins to determine what settings, if any, to add to a project. */ - def addPlugins(ns: AutoPlugin*): Project = setPlugins(Plugins.and(plugins, Plugins.And(ns.toList))) + def addPlugins(ns: Plugins*): Project = setPlugins(ns.foldLeft(plugins)(Plugins.and)) /** Disable the given plugins on this project. */ def disablePlugins(ps: AutoPlugin*): Project = From 6588ac1b62259956f4c47713e1b93088f9d47d6a Mon Sep 17 00:00:00 2001 From: Dan Sanduleac Date: Wed, 19 Mar 2014 11:04:06 +0000 Subject: [PATCH 114/148] sbtTransformHash -> e:sbtTransformHash --- ivy/src/main/scala/sbt/CustomPomParser.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ivy/src/main/scala/sbt/CustomPomParser.scala b/ivy/src/main/scala/sbt/CustomPomParser.scala index 7023ab8d9..dc56866bb 100644 --- a/ivy/src/main/scala/sbt/CustomPomParser.scala +++ b/ivy/src/main/scala/sbt/CustomPomParser.scala @@ -43,7 +43,7 @@ object CustomPomParser val JarPackagings = Set("eclipse-plugin", "hk2-jar", "orbit") val default = new CustomPomParser(PomModuleDescriptorParser.getInstance, defaultTransform) - private[this] val TransformedHashKey = "sbtTransformHash" + private[this] val TransformedHashKey = "e:sbtTransformHash" // A hash of the parameters transformation is based on. // If a descriptor has a different hash, we need to retransform it. private[this] val TransformHash: String = hash((unqualifiedKeys ++ JarPackagings).toSeq.sorted) From 2a98355c64ab37509e4c5b333cbdc5ae51dce5bd Mon Sep 17 00:00:00 2001 From: Grzegorz Kossakowski Date: Fri, 7 Feb 2014 22:52:44 +0100 Subject: [PATCH 115/148] Test for missing dependencies of macro arguments Add a test which shows the problem of not properly capturing dependencies of macro arguments. --- .../macro-arg-dep/macro-client/Client.scala | 5 ++++ .../macro-arg-dep/macro-client/Foo.scala | 5 ++++ .../macro-client/changes/Foo.scala | 3 ++ .../macro-provider/Provider.scala | 12 ++++++++ .../macro-arg-dep/project/build.scala | 29 +++++++++++++++++++ .../source-dependencies/macro-arg-dep/test | 15 ++++++++++ 6 files changed, 69 insertions(+) create mode 100644 sbt/src/sbt-test/source-dependencies/macro-arg-dep/macro-client/Client.scala create mode 100644 sbt/src/sbt-test/source-dependencies/macro-arg-dep/macro-client/Foo.scala create mode 100644 sbt/src/sbt-test/source-dependencies/macro-arg-dep/macro-client/changes/Foo.scala create mode 100644 sbt/src/sbt-test/source-dependencies/macro-arg-dep/macro-provider/Provider.scala create mode 100644 sbt/src/sbt-test/source-dependencies/macro-arg-dep/project/build.scala create mode 100644 sbt/src/sbt-test/source-dependencies/macro-arg-dep/test diff --git a/sbt/src/sbt-test/source-dependencies/macro-arg-dep/macro-client/Client.scala b/sbt/src/sbt-test/source-dependencies/macro-arg-dep/macro-client/Client.scala new file mode 100644 index 000000000..94ad4bcc8 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/macro-arg-dep/macro-client/Client.scala @@ -0,0 +1,5 @@ +package macro + +object Client { + Provider.printTree(Foo.str) +} diff --git a/sbt/src/sbt-test/source-dependencies/macro-arg-dep/macro-client/Foo.scala b/sbt/src/sbt-test/source-dependencies/macro-arg-dep/macro-client/Foo.scala new file mode 100644 index 000000000..1908f0673 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/macro-arg-dep/macro-client/Foo.scala @@ -0,0 +1,5 @@ +package macro + +object Foo { + def str: String = "abc" +} diff --git a/sbt/src/sbt-test/source-dependencies/macro-arg-dep/macro-client/changes/Foo.scala b/sbt/src/sbt-test/source-dependencies/macro-arg-dep/macro-client/changes/Foo.scala new file mode 100644 index 000000000..e3deb0f43 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/macro-arg-dep/macro-client/changes/Foo.scala @@ -0,0 +1,3 @@ +package macro +object Foo { +} diff --git a/sbt/src/sbt-test/source-dependencies/macro-arg-dep/macro-provider/Provider.scala b/sbt/src/sbt-test/source-dependencies/macro-arg-dep/macro-provider/Provider.scala new file mode 100644 index 000000000..facc4a468 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/macro-arg-dep/macro-provider/Provider.scala @@ -0,0 +1,12 @@ +package macro +import scala.language.experimental.macros +import scala.reflect.macros._ + +object Provider { + def printTree(arg: Any) = macro printTreeImpl + def printTreeImpl(c: Context)(arg: c.Expr[Any]): c.Expr[String] = { + val argStr = arg.tree.toString + val literalStr = c.universe.Literal(c.universe.Constant(argStr)) + c.Expr[String](literalStr) + } +} diff --git a/sbt/src/sbt-test/source-dependencies/macro-arg-dep/project/build.scala b/sbt/src/sbt-test/source-dependencies/macro-arg-dep/project/build.scala new file mode 100644 index 000000000..a5382240f --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/macro-arg-dep/project/build.scala @@ -0,0 +1,29 @@ +import sbt._ +import Keys._ + +object build extends Build { + val defaultSettings = Seq( + libraryDependencies <+= scalaVersion("org.scala-lang" % "scala-reflect" % _ ), + incOptions := incOptions.value.withNameHashing(true) + ) + + lazy val root = Project( + base = file("."), + id = "macro", + aggregate = Seq(macroProvider, macroClient), + settings = Defaults.defaultSettings ++ defaultSettings + ) + + lazy val macroProvider = Project( + base = file("macro-provider"), + id = "macro-provider", + settings = Defaults.defaultSettings ++ defaultSettings + ) + + lazy val macroClient = Project( + base = file("macro-client"), + id = "macro-client", + dependencies = Seq(macroProvider), + settings = Defaults.defaultSettings ++ defaultSettings + ) +} diff --git a/sbt/src/sbt-test/source-dependencies/macro-arg-dep/test b/sbt/src/sbt-test/source-dependencies/macro-arg-dep/test new file mode 100644 index 000000000..e6486db5e --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/macro-arg-dep/test @@ -0,0 +1,15 @@ +> compile + +# remove `Foo.str` which is an argument to a macro that incremental compiler doesn't see in +# Client.scala because macro has been already expanded + +$ copy-file macro-client/changes/Foo.scala macro-client/Foo.scala + +# we should recompile Foo.scala first and then fail to compile Client.scala due to missing +# `Foo.str`; however recompilation of Client.scala is never triggered due to missing +# dependency +-> macro-client/compile + +> clean + +-> compile From 70fecfe767f5d9a771a546e86238b3d2c06250a4 Mon Sep 17 00:00:00 2001 From: Martin Duhem Date: Tue, 4 Mar 2014 18:21:44 +0100 Subject: [PATCH 116/148] Record dependencies on macro arguments Macros take arguments as trees and return some other trees; both of them have dependencies but we see trees only after expansion and recorded only those dependencies. This commit solves this problem by looking into the attachments of the trees that are supposed to contain originals of macro expansions and recording dependencies of the macro before its expansion. --- .../src/main/scala/xsbt/Compat.scala | 38 ++++++++++++ .../src/main/scala/xsbt/Dependency.scala | 2 + .../main/scala/xsbt/ExtractUsedNames.scala | 62 ++++++++++++------- 3 files changed, 78 insertions(+), 24 deletions(-) diff --git a/compile/interface/src/main/scala/xsbt/Compat.scala b/compile/interface/src/main/scala/xsbt/Compat.scala index 17a1a8f6b..d92ba6e73 100644 --- a/compile/interface/src/main/scala/xsbt/Compat.scala +++ b/compile/interface/src/main/scala/xsbt/Compat.scala @@ -91,4 +91,42 @@ abstract class Compat private[this] def sourceCompatibilityOnly: Nothing = throw new RuntimeException("For source compatibility only: should not get here.") private[this] final implicit def miscCompat(n: AnyRef): MiscCompat = new MiscCompat + + object MacroExpansionOf { + def unapply(tree: Tree): Option[Tree] = { + + // MacroExpansionAttachment (MEA) compatibility for 2.8.x and 2.9.x + object Compat { + class MacroExpansionAttachment(val original: Tree) + + // Trees have no attachments in 2.8.x and 2.9.x + implicit def withAttachments(tree: Tree): WithAttachments = new WithAttachments(tree) + class WithAttachments(val tree: Tree) { + object EmptyAttachments { + def all = Set.empty[Any] + } + val attachments = EmptyAttachments + } + } + import Compat._ + + locally { + // Wildcard imports are necessary since 2.8.x and 2.9.x don't have `MacroExpansionAttachment` at all + import global._ // this is where MEA lives in 2.10.x + + // `original` has been renamed to `expandee` in 2.11.x + implicit def withExpandee(att: MacroExpansionAttachment): WithExpandee = new WithExpandee(att) + class WithExpandee(att: MacroExpansionAttachment) { + def expandee: Tree = att.original + } + + locally { + import analyzer._ // this is where MEA lives in 2.11.x + tree.attachments.all.collect { + case att: MacroExpansionAttachment => att.expandee + } headOption + } + } + } + } } diff --git a/compile/interface/src/main/scala/xsbt/Dependency.scala b/compile/interface/src/main/scala/xsbt/Dependency.scala index e9b482ef9..b8a55c8a9 100644 --- a/compile/interface/src/main/scala/xsbt/Dependency.scala +++ b/compile/interface/src/main/scala/xsbt/Dependency.scala @@ -146,6 +146,8 @@ final class Dependency(val global: CallbackGlobal) extends LocateClassFile deps.foreach(addDependency) case Template(parents, self, body) => traverseTrees(body) + case MacroExpansionOf(original) => + this.traverse(original) case other => () } super.traverse(tree) diff --git a/compile/interface/src/main/scala/xsbt/ExtractUsedNames.scala b/compile/interface/src/main/scala/xsbt/ExtractUsedNames.scala index 9f89a3459..6ab01c9eb 100644 --- a/compile/interface/src/main/scala/xsbt/ExtractUsedNames.scala +++ b/compile/interface/src/main/scala/xsbt/ExtractUsedNames.scala @@ -38,7 +38,7 @@ import scala.tools.nsc._ * The tree walking algorithm walks into TypeTree.original explicitly. * */ -class ExtractUsedNames[GlobalType <: CallbackGlobal](val global: GlobalType) { +class ExtractUsedNames[GlobalType <: CallbackGlobal](val global: GlobalType) extends Compat { import global._ def extract(unit: CompilationUnit): Set[String] = { @@ -53,30 +53,44 @@ class ExtractUsedNames[GlobalType <: CallbackGlobal](val global: GlobalType) { val symbolNameAsString = symbol.name.decode.trim namesBuffer += symbolNameAsString } - def handleTreeNode(node: Tree): Unit = node match { - case _: DefTree | _: Template => () - // turns out that Import node has a TermSymbol associated with it - // I (Grzegorz) tried to understand why it's there and what does it represent but - // that logic was introduced in 2005 without any justification I'll just ignore the - // import node altogether and just process the selectors in the import node - case Import(_, selectors: List[ImportSelector]) => - def usedNameInImportSelector(name: Name): Unit = - if ((name != null) && (name != nme.WILDCARD)) namesBuffer += name.toString - selectors foreach { selector => - usedNameInImportSelector(selector.name) - usedNameInImportSelector(selector.rename) - } - // TODO: figure out whether we should process the original tree or walk the type - // the argument for processing the original tree: we process what user wrote - // the argument for processing the type: we catch all transformations that typer applies - // to types but that might be a bad thing because it might expand aliases eagerly which - // not what we need - case t: TypeTree if t.original != null => - t.original.foreach(handleTreeNode) - case t if t.hasSymbol && eligibleAsUsedName(t.symbol) => - addSymbol(t.symbol) - case _ => () + + def handleTreeNode(node: Tree): Unit = { + def handleMacroExpansion(original: Tree): Unit = original.foreach(handleTreeNode) + + def handleClassicTreeNode(node: Tree): Unit = node match { + case _: DefTree | _: Template => () + // turns out that Import node has a TermSymbol associated with it + // I (Grzegorz) tried to understand why it's there and what does it represent but + // that logic was introduced in 2005 without any justification I'll just ignore the + // import node altogether and just process the selectors in the import node + case Import(_, selectors: List[ImportSelector]) => + def usedNameInImportSelector(name: Name): Unit = + if ((name != null) && (name != nme.WILDCARD)) namesBuffer += name.toString + selectors foreach { selector => + usedNameInImportSelector(selector.name) + usedNameInImportSelector(selector.rename) + } + // TODO: figure out whether we should process the original tree or walk the type + // the argument for processing the original tree: we process what user wrote + // the argument for processing the type: we catch all transformations that typer applies + // to types but that might be a bad thing because it might expand aliases eagerly which + // not what we need + case t: TypeTree if t.original != null => + t.original.foreach(handleTreeNode) + case t if t.hasSymbol && eligibleAsUsedName(t.symbol) => + addSymbol(t.symbol) + case _ => () + } + + node match { + case MacroExpansionOf(original) => + handleClassicTreeNode(node) + handleMacroExpansion(original) + case _ => + handleClassicTreeNode(node) + } } + tree.foreach(handleTreeNode) namesBuffer.toSet } From b21e47536453f606682e1b24ba2182e5c01995f8 Mon Sep 17 00:00:00 2001 From: Martin Duhem Date: Wed, 19 Mar 2014 22:21:29 +0100 Subject: [PATCH 117/148] Improve unit testing compiler It was not possible to make `ScalaCompilerForUnitTesting` compile several files in different runs, which means that it was not possible to compile and use a macro in a test case, since macros cannot be used in the same compilation run that defines them. This commit allows a test case to provide multiple grouped snippets of code that will be compiled in separate runs. For instance : List(Map(, ), Map()) Here, and will be compiled together, and then will be compiled, and will be able to use symbols defined in or . --- .../xsbt/ScalaCompilerForUnitTesting.scala | 63 ++++++++++++++----- 1 file changed, 46 insertions(+), 17 deletions(-) diff --git a/compile/interface/src/test/scala/xsbt/ScalaCompilerForUnitTesting.scala b/compile/interface/src/test/scala/xsbt/ScalaCompilerForUnitTesting.scala index 5362b1ca6..cb10d1d53 100644 --- a/compile/interface/src/test/scala/xsbt/ScalaCompilerForUnitTesting.scala +++ b/compile/interface/src/test/scala/xsbt/ScalaCompilerForUnitTesting.scala @@ -53,15 +53,19 @@ class ScalaCompilerForUnitTesting(nameHashing: Boolean = false) { * dependencies between snippets. Source code snippets are identified by symbols. Each symbol should * be associated with one snippet only. * + * Snippets can be grouped to be compiled together in the same compiler run. This is + * useful to compile macros, which cannot be used in the same compilation run that + * defines them. + * * Symbols are used to express extracted dependencies between source code snippets. This way we have * file system-independent way of testing dependencies between source code "files". */ - def extractDependenciesFromSrcs(srcs: (Symbol, String)*): ExtractedSourceDependencies = { - val (symbolsForSrcs, rawSrcs) = srcs.unzip - assert(symbolsForSrcs.distinct.size == symbolsForSrcs.size, - s"Duplicate symbols for srcs detected: $symbolsForSrcs") - val (tempSrcFiles, testCallback) = compileSrcs(rawSrcs: _*) - val fileToSymbol = (tempSrcFiles zip symbolsForSrcs).toMap + def extractDependenciesFromSrcs(srcs: List[Map[Symbol, String]]): ExtractedSourceDependencies = { + val rawGroupedSrcs = srcs.map(_.values.toList).toList + val symbols = srcs.map(_.keys).flatten + val (tempSrcFiles, testCallback) = compileSrcs(rawGroupedSrcs) + val fileToSymbol = (tempSrcFiles zip symbols).toMap + val memberRefFileDeps = testCallback.sourceDependencies collect { // false indicates that those dependencies are not introduced by inheritance case (target, src, false) => (src, target) @@ -82,40 +86,64 @@ class ScalaCompilerForUnitTesting(nameHashing: Boolean = false) { // convert all collections to immutable variants multiMap.toMap.mapValues(_.toSet).withDefaultValue(Set.empty) } + ExtractedSourceDependencies(pairsToMultiMap(memberRefDeps), pairsToMultiMap(inheritanceDeps)) } + def extractDependenciesFromSrcs(srcs: (Symbol, String)*): ExtractedSourceDependencies = { + val symbols = srcs.map(_._1) + assert(symbols.distinct.size == symbols.size, + s"Duplicate symbols for srcs detected: $symbols") + extractDependenciesFromSrcs(List(srcs.toMap)) + } + /** - * Compiles given source code snippets written to a temporary files. Each snippet is + * Compiles given source code snippets written to temporary files. Each snippet is * written to a separate temporary file. * + * Snippets can be grouped to be compiled together in the same compiler run. This is + * useful to compile macros, which cannot be used in the same compilation run that + * defines them. + * * The sequence of temporary files corresponding to passed snippets and analysis * callback is returned as a result. */ - private def compileSrcs(srcs: String*): (Seq[File], TestCallback) = { + private def compileSrcs(groupedSrcs: List[List[String]]): (Seq[File], TestCallback) = { withTemporaryDirectory { temp => val analysisCallback = new TestCallback(nameHashing) val classesDir = new File(temp, "classes") classesDir.mkdir() - val compiler = prepareCompiler(classesDir, analysisCallback) - val run = new compiler.Run - val srcFiles = srcs.toSeq.zipWithIndex map { case (src, i) => - val fileName = s"Test_$i.scala" - prepareSrcFile(temp, fileName, src) + + val compiler = prepareCompiler(classesDir, analysisCallback, classesDir.toString) + + val files = for((compilationUnit, unitId) <- groupedSrcs.zipWithIndex) yield { + val run = new compiler.Run + val srcFiles = compilationUnit.toSeq.zipWithIndex map { case (src, i) => + val fileName = s"Test-$unitId-$i.scala" + prepareSrcFile(temp, fileName, src) + } + val srcFilePaths = srcFiles.map(srcFile => srcFile.getAbsolutePath).toList + + run.compile(srcFilePaths) + + srcFilePaths.foreach(f => new File(f).delete) + srcFiles } - val srcFilePaths = srcFiles.map(srcFile => srcFile.getAbsolutePath).toList - run.compile(srcFilePaths) - (srcFiles, analysisCallback) + (files.flatten.toSeq, analysisCallback) } } + private def compileSrcs(srcs: String*): (Seq[File], TestCallback) = { + compileSrcs(List(srcs.toList)) + } + private def prepareSrcFile(baseDir: File, fileName: String, src: String): File = { val srcFile = new File(baseDir, fileName) sbt.IO.write(srcFile, src) srcFile } - private def prepareCompiler(outputDir: File, analysisCallback: AnalysisCallback): CachedCompiler0#Compiler = { + private def prepareCompiler(outputDir: File, analysisCallback: AnalysisCallback, classpath: String = "."): CachedCompiler0#Compiler = { val args = Array.empty[String] object output extends SingleOutput { def outputDirectory: File = outputDir @@ -123,6 +151,7 @@ class ScalaCompilerForUnitTesting(nameHashing: Boolean = false) { val weakLog = new WeakLog(ConsoleLogger(), ConsoleReporter) val cachedCompiler = new CachedCompiler0(args, output, weakLog, false) val settings = cachedCompiler.settings + settings.classpath.value = classpath settings.usejavacp.value = true val scalaReporter = new ConsoleReporter(settings) val delegatingReporter = DelegatingReporter(settings, ConsoleReporter) From f8bedf4012a5c44cb159abe2c816702767219d0b Mon Sep 17 00:00:00 2001 From: Eugene Yokota Date: Thu, 20 Mar 2014 07:08:33 -0400 Subject: [PATCH 118/148] Unifies AutoPlugin and RootPlugin * Separates def select into def trigger and def requires. * Setting trigger = noTrigger and requires = empty makes a plugin a root. --- main/src/main/scala/sbt/BuildStructure.scala | 2 +- main/src/main/scala/sbt/Load.scala | 9 +- main/src/main/scala/sbt/Plugins.scala | 143 ++++++++++++------ main/src/main/scala/sbt/PluginsDebug.scala | 14 +- .../main/scala/sbt/plugins/GlobalModule.scala | 5 +- .../main/scala/sbt/plugins/IvyModule.scala | 3 +- .../main/scala/sbt/plugins/JvmModule.scala | 3 +- .../sbt-test/project/auto-plugins/build.sbt | 40 ++--- .../project/auto-plugins/project/Q.scala | 28 +++- .../binary-plugin/changes/define/A.scala | 6 +- src/sphinx/Extending/Plugins.rst | 21 +-- src/sphinx/Getting-Started/Using-Plugins.rst | 11 +- 12 files changed, 184 insertions(+), 101 deletions(-) diff --git a/main/src/main/scala/sbt/BuildStructure.scala b/main/src/main/scala/sbt/BuildStructure.scala index d63752d87..615a80771 100644 --- a/main/src/main/scala/sbt/BuildStructure.scala +++ b/main/src/main/scala/sbt/BuildStructure.scala @@ -100,7 +100,7 @@ final class DetectedPlugins(val plugins: DetectedModules[Plugin], val autoImport lazy val imports: Seq[String] = BuildUtil.getImports(plugins.names ++ builds.names ++ autoImports.names) /** A function to select the right [[AutoPlugin]]s from [[autoPlugins]] for a [[Project]]. */ - lazy val compilePlugins: Plugins => Seq[AutoPlugin] = Plugins.compile(autoPlugins.values.toList) + lazy val deducePlugins: (Plugins, Logger) => Seq[AutoPlugin] = Plugins.deducer(autoPlugins.values.toList) } /** The built and loaded build definition project. diff --git a/main/src/main/scala/sbt/Load.scala b/main/src/main/scala/sbt/Load.scala index a275c907a..4b9edb637 100755 --- a/main/src/main/scala/sbt/Load.scala +++ b/main/src/main/scala/sbt/Load.scala @@ -415,7 +415,7 @@ object Load val initialProjects = defsScala.flatMap(b => projectsFromBuild(b, normBase)) val memoSettings = new mutable.HashMap[File, LoadedSbtFile] - def loadProjects(ps: Seq[Project]) = loadTransitive(ps, normBase, plugs, () => eval, config.injectSettings, Nil, memoSettings) + def loadProjects(ps: Seq[Project]) = loadTransitive(ps, normBase, plugs, () => eval, config.injectSettings, Nil, memoSettings, config.log) val loadedProjectsRaw = loadProjects(initialProjects) val hasRoot = loadedProjectsRaw.exists(_.base == normBase) || defsScala.exists(_.rootProject.isDefined) val (loadedProjects, defaultBuildIfNone) = @@ -457,13 +457,14 @@ object Load private[this] def projectsFromBuild(b: Build, base: File): Seq[Project] = b.projectDefinitions(base).map(resolveBase(base)) - private[this] def loadTransitive(newProjects: Seq[Project], buildBase: File, plugins: sbt.LoadedPlugins, eval: () => Eval, injectSettings: InjectSettings, acc: Seq[Project], memoSettings: mutable.Map[File, LoadedSbtFile]): Seq[Project] = + private[this] def loadTransitive(newProjects: Seq[Project], buildBase: File, plugins: sbt.LoadedPlugins, eval: () => Eval, injectSettings: InjectSettings, + acc: Seq[Project], memoSettings: mutable.Map[File, LoadedSbtFile], log: Logger): Seq[Project] = { def loadSbtFiles(auto: AddSettings, base: File, autoPlugins: Seq[AutoPlugin], projectSettings: Seq[Setting[_]]): LoadedSbtFile = loadSettings(auto, base, plugins, eval, injectSettings, memoSettings, autoPlugins, projectSettings) def loadForProjects = newProjects map { project => val autoPlugins = - try plugins.detected.compilePlugins(project.plugins) + try plugins.detected.deducePlugins(project.plugins, log) catch { case e: AutoPluginException => throw translateAutoPluginException(e, project) } val autoConfigs = autoPlugins.flatMap(_.projectConfigurations) val loadedSbtFiles = loadSbtFiles(project.auto, project.base, autoPlugins, project.settings) @@ -483,7 +484,7 @@ object Load if(nextProjects.isEmpty) loadedProjects else - loadTransitive(nextProjects, buildBase, plugins, eval, injectSettings, loadedProjects, memoSettings) + loadTransitive(nextProjects, buildBase, plugins, eval, injectSettings, loadedProjects, memoSettings, log) } private[this] def translateAutoPluginException(e: AutoPluginException, project: Project): AutoPluginException = e.withPrefix(s"Error determining plugins for project '${project.id}' in ${project.base}:\n") diff --git a/main/src/main/scala/sbt/Plugins.scala b/main/src/main/scala/sbt/Plugins.scala index 1d1b7b977..63f83397f 100644 --- a/main/src/main/scala/sbt/Plugins.scala +++ b/main/src/main/scala/sbt/Plugins.scala @@ -9,13 +9,14 @@ TODO: import Logic.{CyclicNegation, InitialContradictions, InitialOverlap, LogicException} import Def.Setting import Plugins._ + import annotation.tailrec /** Marks a top-level object so that sbt will wildcard import it for .sbt files, `consoleProject`, and `set`. */ trait AutoImport /** An AutoPlugin defines a group of settings and the conditions where the settings are automatically added to a build (called "activation"). -The `select` method defines the conditions and a method like `projectSettings` defines the settings to add. +The `requires` and `trigger` method defines the conditions and a method like `projectSettings` defines the settings to add. Steps for plugin authors: 1. Determine the [[AutoPlugins]]s that, when present (or absent), activate the AutoPlugin. @@ -25,7 +26,7 @@ For example, the following will automatically add the settings in `projectSettin to a project that has both the `Web` and `Javascript` plugins enabled. object MyPlugin extends AutoPlugin { - def select = Web && Javascript + def requires = Web && Javascript override def projectSettings = Seq(...) } @@ -44,14 +45,20 @@ will activate `MyPlugin` defined above and have its settings automatically added then the `MyPlugin` settings (and anything that activates only when `MyPlugin` is activated) will not be added. */ -abstract class AutoPlugin extends Plugins.Basic +abstract class AutoPlugin extends Plugins.Basic with PluginsFunctions { - /** This AutoPlugin will be activated for a project when the [[Plugins]] matcher returned by this method matches that project's plugins - * AND the user does not explicitly exclude the Plugin returned by `provides`. - * - * For example, if this method returns `Web && Javascript`, this plugin instance will only be added - * if the `Web` and `Javascript` plugins are enabled. */ - def select: Plugins + /** Determines whether this AutoPlugin will be activated for this project when the `requires` clause is satisfied. + * + * When this method returns `allRequirements`, and `requires` method returns `Web && Javascript`, this plugin + * instance will be added automatically if the `Web` and `Javascript` plugins are enbled. + * + * When this method returns `noTrigger`, and `requires` method returns `Web && Javascript`, this plugin + * instance will be added only if the build user enables it, but it will automatically add both `Web` and `Javascript`. */ + def trigger: PluginTrigger + + /** This AutoPlugin requires the plugins the [[Plugins]] matcher returned by this method. See [[trigger]]. + */ + def requires: Plugins val label: String = getClass.getName.stripSuffix("$") @@ -74,22 +81,16 @@ abstract class AutoPlugin extends Plugins.Basic def unary_! : Exclude = Exclude(this) - /** If this plugin requries itself to be included, it means we're actually a nature, - * not a normal plugin. The user must specifically enable this plugin - * but other plugins can rely on its existence. - */ - final def isRoot: Boolean = - this match { - case _: RootAutoPlugin => true + /** If this plugin does not have any requirements, it means it is actually a root plugin. */ + private[sbt] final def isRoot: Boolean = + requires match { + case Empty => true case _ => false } -} -/** - * A root AutoPlugin is a plugin which must be explicitly enabled by users in their `addPlugins` method - * on a project. However, RootAutoPlugins represent the "root" of a tree of dependent auto-plugins. - */ -abstract class RootAutoPlugin extends AutoPlugin { - final def select: Plugins = this + + /** If this plugin does not have any requirements, it means it is actually a root plugin. */ + private[sbt] final def isAlwaysEnabled: Boolean = + isRoot && (trigger == AllRequirements) } /** An error that occurs when auto-plugins aren't configured properly. @@ -105,37 +106,83 @@ object AutoPluginException def apply(origin: LogicException): AutoPluginException = new AutoPluginException(Plugins.translateMessage(origin), Some(origin)) } +sealed trait PluginTrigger +case object AllRequirements extends PluginTrigger +case object NoTrigger extends PluginTrigger + /** An expression that matches `AutoPlugin`s. */ sealed trait Plugins { def && (o: Basic): Plugins } -object Plugins + +sealed trait PluginsFunctions +{ + /** [[Plugins]] instance that doesn't require any [[Plugins]]s. */ + def empty: Plugins = Plugins.Empty + + /** This plugin is activated when all required plugins are present. */ + def allRequirements: PluginTrigger = AllRequirements + /** This plugin is activated only when it is manually activated. */ + def noTrigger: PluginTrigger = NoTrigger +} + +object Plugins extends PluginsFunctions { /** Given the available auto plugins `defined`, returns a function that selects [[AutoPlugin]]s for the provided [[AutoPlugin]]s. - * The [[AutoPlugin]]s are topologically sorted so that a selected [[AutoPlugin]] comes before its selecting [[AutoPlugin]].*/ - def compile(defined: List[AutoPlugin]): Plugins => Seq[AutoPlugin] = - if(defined.isEmpty) - Types.const(Nil) + * The [[AutoPlugin]]s are topologically sorted so that a required [[AutoPlugin]] comes before its requiring [[AutoPlugin]].*/ + def deducer(defined0: List[AutoPlugin]): (Plugins, Logger) => Seq[AutoPlugin] = + if(defined0.isEmpty) (_, _) => Nil else { - val byAtom = defined.map(x => (Atom(x.label), x)) + // TODO: defined should return all the plugins + val allReqs = (defined0 flatMap { asRequirements }).toSet + val diff = allReqs diff defined0.toSet + val defined = if (!diff.isEmpty) diff.toList ::: defined0 + else defined0 + + val byAtom = defined map { x => (Atom(x.label), x) } val byAtomMap = byAtom.toMap if(byAtom.size != byAtomMap.size) duplicateProvidesError(byAtom) - // Ignore clauses for plugins that just require themselves be specified. + // Ignore clauses for plugins that does not require anything else. // Avoids the requirement for pure Nature strings *and* possible // circular dependencies in the logic. - val clauses = Clauses( defined.filterNot(_.isRoot).map(d => asClause(d)) ) - requestedPlugins => - Logic.reduce(clauses, flattenConvert(requestedPlugins).toSet) match { + val allRequirementsClause = defined.filterNot(_.isRoot).flatMap(d => asRequirementsClauses(d)) + val allEnabledByClause = defined.filterNot(_.isRoot).flatMap(d => asEnabledByClauses(d)) + (requestedPlugins, log) => { + val alwaysEnabled: List[AutoPlugin] = defined.filter(_.isAlwaysEnabled) + val knowlege0: Set[Atom] = ((flatten(requestedPlugins) ++ alwaysEnabled) collect { + case x: AutoPlugin => Atom(x.label) + }).toSet + val clauses = Clauses((allRequirementsClause ::: allEnabledByClause) filterNot { _.head subsetOf knowlege0 }) + log.debug(s"deducing auto plugins based on known facts ${knowlege0.toString} and clauses ${clauses.toString}") + Logic.reduce(clauses, (flattenConvert(requestedPlugins) ++ convertAll(alwaysEnabled)).toSet) match { case Left(problem) => throw AutoPluginException(problem) - case Right(results) => - // results includes the originally requested (positive) atoms, - // which won't have a corresponding AutoPlugin to map back to - results.ordered.flatMap(a => byAtomMap.get(a).toList) + case Right(results0) => + log.debug(s" :: deduced result: ${results0}") + val plugins = results0.ordered map { a => + byAtomMap.getOrElse(a, throw AutoPluginException(s"${a} was not found in atom map.")) + } + val retval = topologicalSort(plugins, log) + log.debug(s" :: sorted deduced result: ${retval.toString}") + retval } + } } - + private[sbt] def topologicalSort(ns: List[AutoPlugin], log: Logger): List[AutoPlugin] = { + log.debug(s"sorting: ns: ${ns.toString}") + @tailrec def doSort(found0: List[AutoPlugin], notFound0: List[AutoPlugin], limit0: Int): List[AutoPlugin] = { + log.debug(s" :: sorting:: found: ${found0.toString} not found ${notFound0.toString}") + if (limit0 < 0) throw AutoPluginException(s"Failed to sort ${ns} topologically") + else if (notFound0.isEmpty) found0 + else { + val (found1, notFound1) = notFound0 partition { n => asRequirements(n).toSet subsetOf found0.toSet } + doSort(found0 ::: found1, notFound1, limit0 - 1) + } + } + val (roots, nonRoots) = ns partition (_.isRoot) + doSort(roots, nonRoots, ns.size * ns.size + 1) + } private[sbt] def translateMessage(e: LogicException) = e match { case ic: InitialContradictions => s"Contradiction in selected plugins. These plguins were both included and excluded: ${literalsString(ic.literals.toSeq)}" case io: InitialOverlap => s"Cannot directly enable plugins. Plugins are enabled when their required plugins are satisifed. The directly selected plugins were: ${literalsString(io.literals.toSeq)}" @@ -153,8 +200,6 @@ object Plugins throw AutoPluginException(message) } - /** [[Plugins]] instance that doesn't require any [[Plugins]]s. */ - def empty: Plugins = Empty private[sbt] final object Empty extends Plugins { def &&(o: Basic): Plugins = o override def toString = "" @@ -186,10 +231,18 @@ object Plugins if(removed.isEmpty) Empty else And(removed) } - /** Defines a clause for `ap` such that the [[AutoPlugin]] provided by `ap` is the head and the selector for `ap` is the body. */ - private[sbt] def asClause(ap: AutoPlugin): Clause = - Clause( convert(ap.select), Set(Atom(ap.label)) ) - + /** Defines enabled-by clauses for `ap`. */ + private[sbt] def asEnabledByClauses(ap: AutoPlugin): List[Clause] = + // `ap` is the head and the required plugins for `ap` is the body. + if (ap.trigger == AllRequirements) Clause( convert(ap.requires), Set(Atom(ap.label)) ) :: Nil + else Nil + /** Defines requirements clauses for `ap`. */ + private[sbt] def asRequirementsClauses(ap: AutoPlugin): List[Clause] = + // required plugin is the head and `ap` is the body. + asRequirements(ap) map { x => Clause( convert(ap), Set(Atom(x.label)) ) } + private[sbt] def asRequirements(ap: AutoPlugin): List[AutoPlugin] = flatten(ap.requires).toList collect { + case x: AutoPlugin => x + } private[this] def flattenConvert(n: Plugins): Seq[Literal] = n match { case And(ns) => convertAll(ns) case b: Basic => convertBasic(b) :: Nil @@ -212,7 +265,7 @@ object Plugins } private[this] def convertAll(ns: Seq[Basic]): Seq[Literal] = ns map convertBasic - /** True if the select clause `n` is satisifed by `model`. */ + /** True if the trigger clause `n` is satisifed by `model`. */ def satisfied(n: Plugins, model: Set[AutoPlugin]): Boolean = flatten(n) forall { case Exclude(a) => !model(a) diff --git a/main/src/main/scala/sbt/PluginsDebug.scala b/main/src/main/scala/sbt/PluginsDebug.scala index a24546c23..7d9b2670a 100644 --- a/main/src/main/scala/sbt/PluginsDebug.scala +++ b/main/src/main/scala/sbt/PluginsDebug.scala @@ -118,7 +118,7 @@ private[sbt] object PluginsDebug def projectForRef(ref: ProjectRef): ResolvedProject = get(Keys.thisProject in ref) val perBuild: Map[URI, Set[AutoPlugin]] = structure.units.mapValues(unit => availableAutoPlugins(unit).toSet) val pluginsThisBuild = perBuild.getOrElse(currentRef.build, Set.empty).toList - lazy val context = Context(currentProject.plugins, currentProject.autoPlugins, Plugins.compile(pluginsThisBuild), pluginsThisBuild) + lazy val context = Context(currentProject.plugins, currentProject.autoPlugins, Plugins.deducer(pluginsThisBuild), pluginsThisBuild, s.log) lazy val debug = PluginsDebug(context.available) if(!pluginsThisBuild.contains(plugin)) { val availableInBuilds: List[URI] = perBuild.toList.filter(_._2(plugin)).map(_._1) @@ -152,9 +152,9 @@ private[sbt] object PluginsDebug /** The context for debugging a plugin (de)activation. * @param initial The initially defined [[AutoPlugin]]s. * @param enabled The resulting model. - * @param compile The function used to compute the model. + * @param deducePlugin The function used to compute the model. * @param available All [[AutoPlugin]]s available for consideration. */ - final case class Context(initial: Plugins, enabled: Seq[AutoPlugin], compile: Plugins => Seq[AutoPlugin], available: List[AutoPlugin]) + final case class Context(initial: Plugins, enabled: Seq[AutoPlugin], deducePlugin: (Plugins, Logger) => Seq[AutoPlugin], available: List[AutoPlugin], log: Logger) /** Describes the steps to activate a plugin in some context. */ sealed abstract class PluginEnable @@ -236,10 +236,10 @@ private[sbt] object PluginsDebug // The model that results when the minimal plugins are enabled and the minimal plugins are excluded. // This can include more plugins than just `minRequiredPlugins` because the plguins required for `plugin` // might activate other plugins as well. - val modelForMin = context.compile(and(includeAll(minRequiredPlugins), excludeAll(minAbsentPlugins))) + val modelForMin = context.deducePlugin(and(includeAll(minRequiredPlugins), excludeAll(minAbsentPlugins)), context.log) val incrementalInputs = and( includeAll(minRequiredPlugins ++ initialPlugins), excludeAll(minAbsentPlugins ++ initialExcludes -- minRequiredPlugins)) - val incrementalModel = context.compile(incrementalInputs).toSet + val incrementalModel = context.deducePlugin(incrementalInputs, context.log).toSet // Plugins that are newly enabled as a result of selecting the plugins needed for `plugin`, but aren't strictly required for `plugin`. // These could be excluded and `plugin` and the user's current plugins would still be activated. @@ -252,7 +252,7 @@ private[sbt] object PluginsDebug // If both A and B must be deactivated, but A transitively depends on B, deactivating B will deactivate A. // If A must be deactivated, but one if its (transitively) required plugins isn't present, it won't be activated. // So, in either of these cases, A doesn't need to be considered further and won't be included in this set. - val minDeactivate = minAbsentPlugins.filter(p => Plugins.satisfied(p.select, incrementalModel)) + val minDeactivate = minAbsentPlugins.filter(p => Plugins.satisfied(p.requires, incrementalModel)) val deactivate = for(d <- minDeactivate.toList) yield { // removing any one of these plugins will deactivate `d`. TODO: This is not an especially efficient implementation. @@ -280,7 +280,7 @@ private[sbt] object PluginsDebug // The actual model might be larger, since other plugins might be enabled by the selected plugins. private[this] def minimalModel(plugin: AutoPlugin): Seq[Basic] = Dag.topologicalSortUnchecked(plugin: Basic) { case _: Exclude => Nil - case ap: AutoPlugin => Plugins.flatten(ap.select) + case ap: AutoPlugin => Plugins.flatten(ap.requires) :+ plugin } /** String representation of [[PluginEnable]], intended for end users. */ diff --git a/main/src/main/scala/sbt/plugins/GlobalModule.scala b/main/src/main/scala/sbt/plugins/GlobalModule.scala index 570cbc80f..00485a5e0 100644 --- a/main/src/main/scala/sbt/plugins/GlobalModule.scala +++ b/main/src/main/scala/sbt/plugins/GlobalModule.scala @@ -9,8 +9,9 @@ import Def.Setting * Can control task-level paralleism, logging, etc. */ object GlobalModule extends AutoPlugin { - // We must be explicitly enabled - def select = Plugins.empty + // This is included by default + def requires = empty + def trigger = allRequirements override lazy val projectSettings: Seq[Setting[_]] = Defaults.coreDefaultSettings diff --git a/main/src/main/scala/sbt/plugins/IvyModule.scala b/main/src/main/scala/sbt/plugins/IvyModule.scala index a0e361503..0b01f4670 100644 --- a/main/src/main/scala/sbt/plugins/IvyModule.scala +++ b/main/src/main/scala/sbt/plugins/IvyModule.scala @@ -16,7 +16,8 @@ import Def.Setting object IvyModule extends AutoPlugin { // We are automatically included on everything that has the global module, // which is automatically included on everything. - def select = GlobalModule + def requires = GlobalModule + def trigger = allRequirements override lazy val projectSettings: Seq[Setting[_]] = Classpaths.ivyPublishSettings ++ Classpaths.ivyBaseSettings diff --git a/main/src/main/scala/sbt/plugins/JvmModule.scala b/main/src/main/scala/sbt/plugins/JvmModule.scala index 0a7219c26..f50fb1e7d 100644 --- a/main/src/main/scala/sbt/plugins/JvmModule.scala +++ b/main/src/main/scala/sbt/plugins/JvmModule.scala @@ -17,7 +17,8 @@ import Def.Setting object JvmModule extends AutoPlugin { // We are automatically enabled for any IvyModule project. We also require its settings // for ours to work. - def select = IvyModule + def requires = IvyModule + def trigger = allRequirements override lazy val projectSettings: Seq[Setting[_]] = Defaults.runnerSettings ++ diff --git a/sbt/src/sbt-test/project/auto-plugins/build.sbt b/sbt/src/sbt-test/project/auto-plugins/build.sbt index 57e1394c8..7039ed235 100644 --- a/sbt/src/sbt-test/project/auto-plugins/build.sbt +++ b/sbt/src/sbt-test/project/auto-plugins/build.sbt @@ -1,34 +1,38 @@ -// excludePlugins(C) will prevent C, and thus D, from being auto-added -lazy val a = project.addPlugins(A, B).disablePlugins(Q) +// disablePlugins(Q) will prevent R from being auto-added +lazy val projA = project.addPlugins(A, B).disablePlugins(Q) -// without B, C is not added -lazy val b = project.addPlugins(A) +// without B, Q is not added +lazy val projB = project.addPlugins(A) -// with both A and B, C is selected, which in turn selects D -lazy val c = project.addPlugins(A, B) +// with both A and B, Q is selected, which in turn selects R, but not S +lazy val projC = project.addPlugins(A, B) // with no natures defined, nothing is auto-added -lazy val d = project +lazy val projD = project +// with S selected, Q is loaded automatically, which in turn selects R +lazy val projE = project.addPlugins(S) check := { - val ddel = (del in d).?.value // should be None - same(ddel, None, "del in d") - val bdel = (del in b).?.value // should be None - same(bdel, None, "del in b") - val adel = (del in a).?.value // should be None - same(adel, None, "del in a") + val adel = (del in projA).?.value // should be None + same(adel, None, "del in projA") + val bdel = (del in projB).?.value // should be None + same(bdel, None, "del in projB") + val ddel = (del in projD).?.value // should be None + same(ddel, None, "del in projD") // val buildValue = (demo in ThisBuild).value same(buildValue, "build 0", "demo in ThisBuild") val globalValue = (demo in Global).value same(globalValue, "global 0", "demo in Global") - val projValue = (demo in c).value - same(projValue, "project c Q R", "demo in c") - val qValue = (del in c in q).value - same(qValue, " Q R", "del in c in q") + val projValue = (demo in projC).value + same(projValue, "project projC Q R", "demo in projC") + val qValue = (del in projC in q).value + same(qValue, " Q R", "del in projC in q") + val optInValue = (del in projE in q).value + same(optInValue, " Q S R", "del in projE in q") } def same[T](actual: T, expected: T, label: String) { assert(actual == expected, s"Expected '$expected' for `$label`, got '$actual'") -} \ No newline at end of file +} diff --git a/sbt/src/sbt-test/project/auto-plugins/project/Q.scala b/sbt/src/sbt-test/project/auto-plugins/project/Q.scala index c6dea7ba8..84cef307f 100644 --- a/sbt/src/sbt-test/project/auto-plugins/project/Q.scala +++ b/sbt/src/sbt-test/project/auto-plugins/project/Q.scala @@ -5,7 +5,8 @@ object AI extends AutoImport { trait EmptyAutoPlugin extends AutoPlugin { - def select = Plugins.empty + def requires = empty + def trigger = noTrigger } object A extends EmptyAutoPlugin object B extends EmptyAutoPlugin @@ -23,12 +24,14 @@ object AI extends AutoImport import AI._ object D extends AutoPlugin { - def select: Plugins = E + def requires: Plugins = E + def trigger = allRequirements } object Q extends AutoPlugin { - def select: Plugins = A && B + def requires: Plugins = A && B + def trigger = allRequirements override def projectConfigurations: Seq[Configuration] = p :: @@ -56,12 +59,25 @@ object Q extends AutoPlugin object R extends AutoPlugin { // NOTE - Only plugins themselves support exclusions... - def select = Q && !D + def requires = Q && !D + def trigger = allRequirements override def projectSettings = Seq( - // tests proper ordering: R requires C, so C settings should come first + // tests proper ordering: R requires Q, so Q settings should come first del in q += " R", // tests that configurations are properly registered, enabling delegation from p to q demo += (del in p).value ) -} \ No newline at end of file +} + +// This is an opt-in plugin with a requirement +// Unless explicitly loaded by the build user, this will not be activated. +object S extends AutoPlugin +{ + def requires = Q + def trigger = noTrigger + + override def projectSettings = Seq( + del in q += " S" + ) +} diff --git a/sbt/src/sbt-test/project/binary-plugin/changes/define/A.scala b/sbt/src/sbt-test/project/binary-plugin/changes/define/A.scala index a9f71c928..99cd6d527 100644 --- a/sbt/src/sbt-test/project/binary-plugin/changes/define/A.scala +++ b/sbt/src/sbt-test/project/binary-plugin/changes/define/A.scala @@ -4,7 +4,8 @@ import Keys._ object C extends AutoImport { object bN extends AutoPlugin { - def select = Plugins.empty + def requires = empty + def trigger = allRequirements } lazy val check = taskKey[Unit]("Checks that the AutoPlugin and Build are automatically added.") } @@ -12,7 +13,8 @@ object C extends AutoImport { import C._ object A extends AutoPlugin { - override def select = bN + def requires = bN + def trigger = allRequirements override def projectSettings = Seq( check := {} ) diff --git a/src/sphinx/Extending/Plugins.rst b/src/sphinx/Extending/Plugins.rst index 3d510a8ba..77f5da73e 100644 --- a/src/sphinx/Extending/Plugins.rst +++ b/src/sphinx/Extending/Plugins.rst @@ -234,10 +234,12 @@ core methods without requiring an import or qualification. In addition, a plugin can implement the `AutoPlugin` class. This has additoinal features, such as * Specifying plugin dependencies. +* Automatically activating itself when all dependencies are present. * Specifying `projectSettings`, `buildSettings`, and `globalSettings` as appropriate. The AutoPlugin's `projectSettings` is automatically appended to each project's settings, when its dependencies also exist on that project -The `select` method defines the conditions by which this plugin's settings are automatically imported. +The `requires` method defines the dependencies to other plugins. +The `trigger` method defines the conditions by which this plugin's settings are automatically activated. The `buildSettings` is appended to each build's settings (that is, `in ThisBuild`). The `globalSettings` is appended once to the global settings (`in Global`). These allow a plugin to automatically provide new functionality or new defaults. @@ -268,8 +270,9 @@ An example of a typical plugin: object MyPlugin extends AutoPlugin { // Only enable this plugin for projects which are JvmModules. - def select = sbt.plugins.JvmModule - + def trigger = allRequirements + def requires = sbt.plugins.JvmModule + // configuration points, like the built in `version`, `libraryDependencies`, or `compile` // by implementing Plugin, these are automatically imported in a user's `build.sbt` val newTask = taskKey[Unit]("A new task.") @@ -302,11 +305,8 @@ A build definition that uses the plugin might look like: Root Plugins ------------ -Some plugins should always be explicitly enabled on projects. Sbt calls these "RootPlugins", i.e. plugins -that are "root" nodes in the plugin depdendency graph. To define a root plugin, just extend the `sbt.RootPlugin` -interface. This interface is exactly like the `AutoPlugin` interface except that a `select` method is not -needed. - +Some plugins should always be explicitly enabled on projects. Sbt calls these root plugins, i.e. plugins +that are "root" nodes in the plugin depdendency graph. To define a root plugin, set the `trigger` method to `noTrigger` and the `requires` method to `empty`. Example command root plugin ---------------------- @@ -329,8 +329,11 @@ A basic plugin that adds commands looks like: import sbt._ import Keys._ - object MyPlugin extends RootPlugin + object MyPlugin extends AutoPlugin { + def trigger = noTrigger + def requires = empty + override lazy val projectSettings = Seq(commands += myCommand) lazy val myCommand = diff --git a/src/sphinx/Getting-Started/Using-Plugins.rst b/src/sphinx/Getting-Started/Using-Plugins.rst index 57dcc0a1d..4ab353755 100644 --- a/src/sphinx/Getting-Started/Using-Plugins.rst +++ b/src/sphinx/Getting-Started/Using-Plugins.rst @@ -119,11 +119,12 @@ To create an sbt plugin, 1. Create a new project for the plugin. 2. Set `sbtPlugin := true` for the project in `build.sbt`. This adds a dependency on sbt and will detect and record Plugins that you define. - 3. Define an `object` that extends `AutoPlugin` or `RootPlugin`. The contents of this object will be automatically imported in `.sbt` files, so ensure it only contains important API definitions and types. - 4. Define any custom tasks or settings (see the next section :doc:`Custom-Settings`). - 5. Collect the default settings to apply to a project in a list for the user to add. Optionally override one or more of `AutoPlugin`'s methods to have settings automatically added to user projects. - 6. (Optional) For non-root plguins, declare dependencies on other plugins by overriding the `select` method. - 6. Publish the project. There is a :doc:`community repository ` available for open source plugins. + 3. Define another `object` that extends `AutoImport`. The contents of this object will be automatically imported in `.sbt` files, so ensure it only contains important API definitions and types. + 4. Define an `object` that extends `AutoPlugin`. + 5. Declare dependencies on other plugins by defining the `requires` method. + 5. Define any custom tasks or settings (see the next section :doc:`Custom-Settings`). + 6. Collect the default settings to apply to a project in a list for the user to add. Optionally override one or more of `AutoPlugin`'s methods to have settings automatically added to user projects. + 8. Publish the project. There is a :doc:`community repository ` available for open source plugins. For more details, including ways of developing plugins, see :doc:`/Extending/Plugins`. For best practices, see :doc:`/Extending/Plugins-Best-Practices`. From 133ba07eb81f07f5b2acb85446bfbcbb348da4a8 Mon Sep 17 00:00:00 2001 From: Martin Duhem Date: Wed, 19 Mar 2014 22:23:25 +0100 Subject: [PATCH 119/148] Unit test for dependency extraction from macro applications Add a unit test which checks whether we capture dependencies introduced by arguments to macros. Those dependencies are special because macros get expanded during type checking and arguments to macros are not visible during regular tree walk. --- .../scala/xsbt/DependencySpecification.scala | 34 +++++++++++++++++++ 1 file changed, 34 insertions(+) diff --git a/compile/interface/src/test/scala/xsbt/DependencySpecification.scala b/compile/interface/src/test/scala/xsbt/DependencySpecification.scala index 040ad1d6e..ec2f76ed9 100644 --- a/compile/interface/src/test/scala/xsbt/DependencySpecification.scala +++ b/compile/interface/src/test/scala/xsbt/DependencySpecification.scala @@ -65,6 +65,19 @@ class DependencySpecification extends Specification { inheritance('D) === Set('A, 'C) } + "Extracted source dependencies from macro arguments" in { + val sourceDependencies = extractSourceDependenciesFromMacroArgument + val memberRef = sourceDependencies.memberRef + val inheritance = sourceDependencies.inheritance + + memberRef('A) === Set('B, 'C) + inheritance('A) === Set.empty + memberRef('B) === Set.empty + inheritance('B) === Set.empty + memberRef('C) === Set.empty + inheritance('C) === Set.empty + } + private def extractSourceDependenciesPublic: ExtractedSourceDependencies = { val srcA = "class A" val srcB = "class B extends D[A]" @@ -109,4 +122,25 @@ class DependencySpecification extends Specification { compilerForTesting.extractDependenciesFromSrcs('A -> srcA, 'B -> srcB, 'C -> srcC, 'D -> srcD) sourceDependencies } + + private def extractSourceDependenciesFromMacroArgument: ExtractedSourceDependencies = { + val srcA = "class A { println(B.printTree(C.foo)) }" + val srcB = """ + |import scala.language.experimental.macros + |import scala.reflect.macros._ + |object B { + | def printTree(arg: Any) = macro printTreeImpl + | def printTreeImpl(c: Context)(arg: c.Expr[Any]): c.Expr[String] = { + | val argStr = arg.tree.toString + | val literalStr = c.universe.Literal(c.universe.Constant(argStr)) + | c.Expr[String](literalStr) + | } + |}""".stripMargin + val srcC = "object C { val foo = 1 }" + + val compilerForTesting = new ScalaCompilerForUnitTesting(nameHashing = true) + val sourceDependencies = + compilerForTesting.extractDependenciesFromSrcs(List(Map('B -> srcB, 'C -> srcC), Map('A -> srcA))) + sourceDependencies + } } From 04e226bd598122f96fe39132fa4c1d9d8b35f523 Mon Sep 17 00:00:00 2001 From: Martin Duhem Date: Wed, 19 Mar 2014 20:57:20 +0100 Subject: [PATCH 120/148] Add scripted test for nested macros Add test analogous to source-dependencies/macro-arg-dep but check if dependencies of nested macro applications are handled properly. Nested macro applications are tricky because we have to look into original (before macro expansion) trees recursively. This test verifies that. --- .../macro-client/Client.scala | 5 ++++ .../macro-client/Foo.scala | 5 ++++ .../macro-client/changes/Foo.scala | 3 ++ .../macro-provider/Provider.scala | 12 ++++++++ .../macro-arg-dep-nested/project/build.scala | 29 +++++++++++++++++++ .../macro-arg-dep-nested/test | 13 +++++++++ .../source-dependencies/macro-arg-dep/test | 7 ++--- 7 files changed, 69 insertions(+), 5 deletions(-) create mode 100644 sbt/src/sbt-test/source-dependencies/macro-arg-dep-nested/macro-client/Client.scala create mode 100644 sbt/src/sbt-test/source-dependencies/macro-arg-dep-nested/macro-client/Foo.scala create mode 100644 sbt/src/sbt-test/source-dependencies/macro-arg-dep-nested/macro-client/changes/Foo.scala create mode 100644 sbt/src/sbt-test/source-dependencies/macro-arg-dep-nested/macro-provider/Provider.scala create mode 100644 sbt/src/sbt-test/source-dependencies/macro-arg-dep-nested/project/build.scala create mode 100644 sbt/src/sbt-test/source-dependencies/macro-arg-dep-nested/test diff --git a/sbt/src/sbt-test/source-dependencies/macro-arg-dep-nested/macro-client/Client.scala b/sbt/src/sbt-test/source-dependencies/macro-arg-dep-nested/macro-client/Client.scala new file mode 100644 index 000000000..d80fd559e --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/macro-arg-dep-nested/macro-client/Client.scala @@ -0,0 +1,5 @@ +package macro + +object Client { + Provider.printTree(Provider.printTree(Foo.str)) +} diff --git a/sbt/src/sbt-test/source-dependencies/macro-arg-dep-nested/macro-client/Foo.scala b/sbt/src/sbt-test/source-dependencies/macro-arg-dep-nested/macro-client/Foo.scala new file mode 100644 index 000000000..1908f0673 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/macro-arg-dep-nested/macro-client/Foo.scala @@ -0,0 +1,5 @@ +package macro + +object Foo { + def str: String = "abc" +} diff --git a/sbt/src/sbt-test/source-dependencies/macro-arg-dep-nested/macro-client/changes/Foo.scala b/sbt/src/sbt-test/source-dependencies/macro-arg-dep-nested/macro-client/changes/Foo.scala new file mode 100644 index 000000000..e3deb0f43 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/macro-arg-dep-nested/macro-client/changes/Foo.scala @@ -0,0 +1,3 @@ +package macro +object Foo { +} diff --git a/sbt/src/sbt-test/source-dependencies/macro-arg-dep-nested/macro-provider/Provider.scala b/sbt/src/sbt-test/source-dependencies/macro-arg-dep-nested/macro-provider/Provider.scala new file mode 100644 index 000000000..facc4a468 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/macro-arg-dep-nested/macro-provider/Provider.scala @@ -0,0 +1,12 @@ +package macro +import scala.language.experimental.macros +import scala.reflect.macros._ + +object Provider { + def printTree(arg: Any) = macro printTreeImpl + def printTreeImpl(c: Context)(arg: c.Expr[Any]): c.Expr[String] = { + val argStr = arg.tree.toString + val literalStr = c.universe.Literal(c.universe.Constant(argStr)) + c.Expr[String](literalStr) + } +} diff --git a/sbt/src/sbt-test/source-dependencies/macro-arg-dep-nested/project/build.scala b/sbt/src/sbt-test/source-dependencies/macro-arg-dep-nested/project/build.scala new file mode 100644 index 000000000..a5382240f --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/macro-arg-dep-nested/project/build.scala @@ -0,0 +1,29 @@ +import sbt._ +import Keys._ + +object build extends Build { + val defaultSettings = Seq( + libraryDependencies <+= scalaVersion("org.scala-lang" % "scala-reflect" % _ ), + incOptions := incOptions.value.withNameHashing(true) + ) + + lazy val root = Project( + base = file("."), + id = "macro", + aggregate = Seq(macroProvider, macroClient), + settings = Defaults.defaultSettings ++ defaultSettings + ) + + lazy val macroProvider = Project( + base = file("macro-provider"), + id = "macro-provider", + settings = Defaults.defaultSettings ++ defaultSettings + ) + + lazy val macroClient = Project( + base = file("macro-client"), + id = "macro-client", + dependencies = Seq(macroProvider), + settings = Defaults.defaultSettings ++ defaultSettings + ) +} diff --git a/sbt/src/sbt-test/source-dependencies/macro-arg-dep-nested/test b/sbt/src/sbt-test/source-dependencies/macro-arg-dep-nested/test new file mode 100644 index 000000000..231939418 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/macro-arg-dep-nested/test @@ -0,0 +1,13 @@ +> compile + +# remove `Foo.str` which is an argument to a macro +# (this macro itself that is an argument to another macro) +$ copy-file macro-client/changes/Foo.scala macro-client/Foo.scala + +# we should recompile Foo.scala first and then fail to compile Client.scala due to missing +# `Foo.str` +-> macro-client/compile + +> clean + +-> compile diff --git a/sbt/src/sbt-test/source-dependencies/macro-arg-dep/test b/sbt/src/sbt-test/source-dependencies/macro-arg-dep/test index e6486db5e..183aa6c49 100644 --- a/sbt/src/sbt-test/source-dependencies/macro-arg-dep/test +++ b/sbt/src/sbt-test/source-dependencies/macro-arg-dep/test @@ -1,13 +1,10 @@ > compile -# remove `Foo.str` which is an argument to a macro that incremental compiler doesn't see in -# Client.scala because macro has been already expanded - +# remove `Foo.str` which is an argument to a macro $ copy-file macro-client/changes/Foo.scala macro-client/Foo.scala # we should recompile Foo.scala first and then fail to compile Client.scala due to missing -# `Foo.str`; however recompilation of Client.scala is never triggered due to missing -# dependency +# `Foo.str` -> macro-client/compile > clean From 910d39f3a965d898e9a40f6b156e602187260838 Mon Sep 17 00:00:00 2001 From: Sergey Andreev Date: Thu, 20 Mar 2014 17:46:42 -0700 Subject: [PATCH 121/148] fixes #1196 --- main/actions/src/main/scala/sbt/Tests.scala | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/main/actions/src/main/scala/sbt/Tests.scala b/main/actions/src/main/scala/sbt/Tests.scala index d2276cf24..fb7f2cdcb 100644 --- a/main/actions/src/main/scala/sbt/Tests.scala +++ b/main/actions/src/main/scala/sbt/Tests.scala @@ -220,7 +220,9 @@ object Tests def processResults(results: Iterable[(String, SuiteResult)]): Output = Output(overall(results.map(_._2.result)), results.toMap, Iterable.empty) def foldTasks(results: Seq[Task[Output]], parallel: Boolean): Task[Output] = - if (parallel) + if (results.isEmpty) + task { Output(TestResult.Passed, Map.empty, Nil) } + else if (parallel) reduced(results.toIndexedSeq, { case (Output(v1, m1, _), Output(v2, m2, _)) => Output(if (v1.id < v2.id) v2 else v1, m1 ++ m2, Iterable.empty) }) @@ -336,4 +338,4 @@ object Tests } } -final class TestsFailedException extends RuntimeException("Tests unsuccessful") with FeedbackProvidedException \ No newline at end of file +final class TestsFailedException extends RuntimeException("Tests unsuccessful") with FeedbackProvidedException From c7dc499fe49b53a30dc03622e32cd8271200821f Mon Sep 17 00:00:00 2001 From: Eugene Yokota Date: Thu, 20 Mar 2014 21:47:49 -0400 Subject: [PATCH 122/148] Updated comments. Typo fix per @jozic --- main/src/main/scala/sbt/Plugins.scala | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/main/src/main/scala/sbt/Plugins.scala b/main/src/main/scala/sbt/Plugins.scala index 63f83397f..12d771bcc 100644 --- a/main/src/main/scala/sbt/Plugins.scala +++ b/main/src/main/scala/sbt/Plugins.scala @@ -16,17 +16,19 @@ trait AutoImport /** An AutoPlugin defines a group of settings and the conditions where the settings are automatically added to a build (called "activation"). -The `requires` and `trigger` method defines the conditions and a method like `projectSettings` defines the settings to add. +The `requires` and `trigger` methods together define the conditions, and a method like `projectSettings` defines the settings to add. Steps for plugin authors: -1. Determine the [[AutoPlugins]]s that, when present (or absent), activate the AutoPlugin. -2. Determine the settings/configurations to automatically inject when activated. +1. Determine if the AutoPlugin should automatically be activated when all requirements are met, or should be opt-in. +2. Determine the [[AutoPlugins]]s that, when present (or absent), act as the requirements for the AutoPlugin. +3. Determine the settings/configurations to that the AutoPlugin injects when activated. For example, the following will automatically add the settings in `projectSettings` to a project that has both the `Web` and `Javascript` plugins enabled. object MyPlugin extends AutoPlugin { def requires = Web && Javascript + def trigger = allRequirements override def projectSettings = Seq(...) } From e95935a7dbc86936fa73bf24f65cfc9efe1ea288 Mon Sep 17 00:00:00 2001 From: Eugene Yokota Date: Fri, 21 Mar 2014 01:06:00 -0400 Subject: [PATCH 123/148] Adds conflict check at the end to enforce exclusion requirements. --- main/src/main/scala/sbt/Plugins.scala | 18 +++++++--- .../project/auto-plugins-conflict/build.sbt | 4 +++ .../auto-plugins-conflict/project/Q.scala | 35 +++++++++++++++++++ .../project/auto-plugins-conflict/test | 1 + 4 files changed, 54 insertions(+), 4 deletions(-) create mode 100644 sbt/src/sbt-test/project/auto-plugins-conflict/build.sbt create mode 100644 sbt/src/sbt-test/project/auto-plugins-conflict/project/Q.scala create mode 100644 sbt/src/sbt-test/project/auto-plugins-conflict/test diff --git a/main/src/main/scala/sbt/Plugins.scala b/main/src/main/scala/sbt/Plugins.scala index 12d771bcc..efe8f28f5 100644 --- a/main/src/main/scala/sbt/Plugins.scala +++ b/main/src/main/scala/sbt/Plugins.scala @@ -160,12 +160,19 @@ object Plugins extends PluginsFunctions log.debug(s"deducing auto plugins based on known facts ${knowlege0.toString} and clauses ${clauses.toString}") Logic.reduce(clauses, (flattenConvert(requestedPlugins) ++ convertAll(alwaysEnabled)).toSet) match { case Left(problem) => throw AutoPluginException(problem) - case Right(results0) => - log.debug(s" :: deduced result: ${results0}") - val plugins = results0.ordered map { a => + case Right(results) => + log.debug(s" :: deduced result: ${results}") + val selectedAtoms: List[Atom] = results.ordered + val selectedPlugins = selectedAtoms map { a => byAtomMap.getOrElse(a, throw AutoPluginException(s"${a} was not found in atom map.")) } - val retval = topologicalSort(plugins, log) + val forbidden: Set[AutoPlugin] = (selectedPlugins flatMap { Plugins.asExclusions }).toSet + val c = selectedPlugins.toSet & forbidden + if (!c.isEmpty) { + val listString = (c map {_.label}).mkString(", ") + throw AutoPluginException(s"Contradiction in selected plugins. These plguins were both included and excluded: ${listString}") + } + val retval = topologicalSort(selectedPlugins, log) log.debug(s" :: sorted deduced result: ${retval.toString}") retval } @@ -245,6 +252,9 @@ object Plugins extends PluginsFunctions private[sbt] def asRequirements(ap: AutoPlugin): List[AutoPlugin] = flatten(ap.requires).toList collect { case x: AutoPlugin => x } + private[sbt] def asExclusions(ap: AutoPlugin): List[AutoPlugin] = flatten(ap.requires).toList collect { + case Exclude(x) => x + } private[this] def flattenConvert(n: Plugins): Seq[Literal] = n match { case And(ns) => convertAll(ns) case b: Basic => convertBasic(b) :: Nil diff --git a/sbt/src/sbt-test/project/auto-plugins-conflict/build.sbt b/sbt/src/sbt-test/project/auto-plugins-conflict/build.sbt new file mode 100644 index 000000000..aede25923 --- /dev/null +++ b/sbt/src/sbt-test/project/auto-plugins-conflict/build.sbt @@ -0,0 +1,4 @@ +// with S selected, Q is loaded automatically, which in turn selects R +lazy val projA = project.addPlugins(S) + +check := () diff --git a/sbt/src/sbt-test/project/auto-plugins-conflict/project/Q.scala b/sbt/src/sbt-test/project/auto-plugins-conflict/project/Q.scala new file mode 100644 index 000000000..405900f11 --- /dev/null +++ b/sbt/src/sbt-test/project/auto-plugins-conflict/project/Q.scala @@ -0,0 +1,35 @@ + import sbt._ + +object AI extends AutoImport +{ + trait EmptyAutoPlugin extends AutoPlugin { + def requires = empty + def trigger = noTrigger + } + object A extends EmptyAutoPlugin + object B extends EmptyAutoPlugin + + lazy val check = settingKey[Unit]("Verifies settings are as they should be.") +} + + import AI._ + +object Q extends AutoPlugin +{ + def requires: Plugins = A && B + def trigger = allRequirements +} + +object R extends AutoPlugin +{ + def requires = Q + def trigger = allRequirements +} + +// This is an opt-in plugin with a requirement +// Unless explicitly loaded by the build user, this will not be activated. +object S extends AutoPlugin +{ + def requires = Q && !R + def trigger = noTrigger +} diff --git a/sbt/src/sbt-test/project/auto-plugins-conflict/test b/sbt/src/sbt-test/project/auto-plugins-conflict/test new file mode 100644 index 000000000..3c2c89325 --- /dev/null +++ b/sbt/src/sbt-test/project/auto-plugins-conflict/test @@ -0,0 +1 @@ +-> check From 49faf8a7528f646fbfa93a7ca21d10f28e1edffe Mon Sep 17 00:00:00 2001 From: Eugene Yokota Date: Fri, 21 Mar 2014 17:15:33 -0400 Subject: [PATCH 124/148] Removes Changes entry on AutoPlugin and AddSettings --- src/sphinx/Community/Changes.rst | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/sphinx/Community/Changes.rst b/src/sphinx/Community/Changes.rst index f08a8fa2d..07e518a3e 100644 --- a/src/sphinx/Community/Changes.rst +++ b/src/sphinx/Community/Changes.rst @@ -4,8 +4,6 @@ Changes 0.13.1 to 0.13.2 ~~~~~~~~~~~~~~~~ -- Improved the control over included settings in ``Addsettings``. Can now control when settings in ``project/*.scala`` files are included. -- Adding new ``AutoPlugin`` feature, and associated ``plugins`` command. - Adding new name-hashing feature to incremental compiler. Alters how scala dependencies are tracked, reducing number of recompiles necessary. - Added the ability to launch servers via the sbt-launcher. - Added ``.previous`` feature on tasks which can load the pervious value. From d692191c24997f346eba1dde858770f89a463f51 Mon Sep 17 00:00:00 2001 From: Eugene Yokota Date: Fri, 21 Mar 2014 22:45:32 -0400 Subject: [PATCH 125/148] More plugins conflict test --- .../project/auto-plugins-conflict2/build.sbt | 7 +++ .../auto-plugins-conflict2/project/Q.scala | 58 +++++++++++++++++++ .../project/auto-plugins-conflict2/test | 1 + 3 files changed, 66 insertions(+) create mode 100644 sbt/src/sbt-test/project/auto-plugins-conflict2/build.sbt create mode 100644 sbt/src/sbt-test/project/auto-plugins-conflict2/project/Q.scala create mode 100644 sbt/src/sbt-test/project/auto-plugins-conflict2/test diff --git a/sbt/src/sbt-test/project/auto-plugins-conflict2/build.sbt b/sbt/src/sbt-test/project/auto-plugins-conflict2/build.sbt new file mode 100644 index 000000000..36b8bc100 --- /dev/null +++ b/sbt/src/sbt-test/project/auto-plugins-conflict2/build.sbt @@ -0,0 +1,7 @@ +// with S selected, Q is loaded automatically, which in turn selects R +lazy val projA = project.addPlugins(S) + +// S and T have direct conflicts of dependent plugins. +lazy val projB = project.addPlugins(S, T) + +check := () diff --git a/sbt/src/sbt-test/project/auto-plugins-conflict2/project/Q.scala b/sbt/src/sbt-test/project/auto-plugins-conflict2/project/Q.scala new file mode 100644 index 000000000..b2acd024a --- /dev/null +++ b/sbt/src/sbt-test/project/auto-plugins-conflict2/project/Q.scala @@ -0,0 +1,58 @@ + import sbt._ + +object AI extends AutoImport +{ + trait EmptyAutoPlugin extends AutoPlugin { + def requires = empty + def trigger = noTrigger + } + object A extends EmptyAutoPlugin { + val a = settingKey[String]("") + override def projectSettings = Seq(a := "a") + } + object B extends EmptyAutoPlugin { + val b = settingKey[String]("") + override def projectSettings = Seq(b := "b") + } + + lazy val check = settingKey[Unit]("Verifies settings are as they should be.") +} + + import AI._ + +object Q extends AutoPlugin +{ + def requires: Plugins = A && B + def trigger = allRequirements + val q = settingKey[String]("") + override def projectSettings = Seq(q := "q") +} + +object R extends AutoPlugin +{ + def requires = Q + def trigger = allRequirements + val r = settingKey[String]("") + override def projectSettings = Seq(r := "r") +} + +// This is an opt-in plugin with a requirement +// Unless explicitly loaded by the build user, this will not be activated. +object S extends AutoPlugin +{ + def requires = Q && !R + def trigger = noTrigger + val s = settingKey[String]("") + override def projectSettings = Seq(s := "s") +} + +// This is an opt-in plugin with a requirement +// Unless explicitly loaded by the build user, this will not be activated. +object T extends AutoPlugin +{ + def requires = A && !Q + def trigger = noTrigger + + val t = settingKey[String]("") + override def projectSettings = Seq(t := "T") +} diff --git a/sbt/src/sbt-test/project/auto-plugins-conflict2/test b/sbt/src/sbt-test/project/auto-plugins-conflict2/test new file mode 100644 index 000000000..3c2c89325 --- /dev/null +++ b/sbt/src/sbt-test/project/auto-plugins-conflict2/test @@ -0,0 +1 @@ +-> check From f58fbb8d7d0bcba0e32353094f6fb34ff974d974 Mon Sep 17 00:00:00 2001 From: Eugene Yokota Date: Fri, 21 Mar 2014 23:53:06 -0400 Subject: [PATCH 126/148] CustomPomParser compatibility with 0.13.1. #1191 #1192 --- ivy/src/main/scala/sbt/CustomPomParser.scala | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/ivy/src/main/scala/sbt/CustomPomParser.scala b/ivy/src/main/scala/sbt/CustomPomParser.scala index dc56866bb..871c1f07c 100644 --- a/ivy/src/main/scala/sbt/CustomPomParser.scala +++ b/ivy/src/main/scala/sbt/CustomPomParser.scala @@ -57,8 +57,14 @@ object CustomPomParser private[this] def transformedByThisVersion(md: ModuleDescriptor): Boolean = { + val oldTransformedHashKey = "sbtTransformHash" val extraInfo = md.getExtraInfo - extraInfo != null && extraInfo.get(TransformedHashKey) == TransformHash + // sbt 0.13.1 used "sbtTransformHash" instead of "e:sbtTransformHash" until #1192 so read both + Option(extraInfo).isDefined && + ((Option(extraInfo get TransformedHashKey) orElse Option(extraInfo get oldTransformedHashKey)) match { + case Some(TransformHash) => true + case _ => false + }) } private[this] def defaultTransformImpl(parser: ModuleDescriptorParser, md: ModuleDescriptor): ModuleDescriptor = From f43daecee314de082c2deabd5f87d0fb45de8b77 Mon Sep 17 00:00:00 2001 From: Eugene Yokota Date: Sat, 22 Mar 2014 02:46:17 -0400 Subject: [PATCH 127/148] Use tabs --- .../auto-plugins-conflict2/project/Q.scala | 62 +++++++++---------- 1 file changed, 31 insertions(+), 31 deletions(-) diff --git a/sbt/src/sbt-test/project/auto-plugins-conflict2/project/Q.scala b/sbt/src/sbt-test/project/auto-plugins-conflict2/project/Q.scala index b2acd024a..91f29c00d 100644 --- a/sbt/src/sbt-test/project/auto-plugins-conflict2/project/Q.scala +++ b/sbt/src/sbt-test/project/auto-plugins-conflict2/project/Q.scala @@ -1,58 +1,58 @@ - import sbt._ + import sbt._ object AI extends AutoImport { - trait EmptyAutoPlugin extends AutoPlugin { - def requires = empty - def trigger = noTrigger - } - object A extends EmptyAutoPlugin { - val a = settingKey[String]("") - override def projectSettings = Seq(a := "a") - } - object B extends EmptyAutoPlugin { - val b = settingKey[String]("") - override def projectSettings = Seq(b := "b") - } + trait EmptyAutoPlugin extends AutoPlugin { + def requires = empty + def trigger = noTrigger + } + object A extends EmptyAutoPlugin { + val a = settingKey[String]("") + override def projectSettings = Seq(a := "a") + } + object B extends EmptyAutoPlugin { + val b = settingKey[String]("") + override def projectSettings = Seq(b := "b") + } - lazy val check = settingKey[Unit]("Verifies settings are as they should be.") + lazy val check = settingKey[Unit]("Verifies settings are as they should be.") } - import AI._ + import AI._ object Q extends AutoPlugin { - def requires: Plugins = A && B - def trigger = allRequirements - val q = settingKey[String]("") - override def projectSettings = Seq(q := "q") + def requires: Plugins = A && B + def trigger = allRequirements + val q = settingKey[String]("") + override def projectSettings = Seq(q := "q") } object R extends AutoPlugin { - def requires = Q - def trigger = allRequirements - val r = settingKey[String]("") - override def projectSettings = Seq(r := "r") + def requires = Q + def trigger = allRequirements + val r = settingKey[String]("") + override def projectSettings = Seq(r := "r") } // This is an opt-in plugin with a requirement // Unless explicitly loaded by the build user, this will not be activated. object S extends AutoPlugin { - def requires = Q && !R - def trigger = noTrigger - val s = settingKey[String]("") - override def projectSettings = Seq(s := "s") + def requires = Q && !R + def trigger = noTrigger + val s = settingKey[String]("") + override def projectSettings = Seq(s := "s") } // This is an opt-in plugin with a requirement // Unless explicitly loaded by the build user, this will not be activated. object T extends AutoPlugin { - def requires = A && !Q - def trigger = noTrigger + def requires = A && !Q + def trigger = noTrigger - val t = settingKey[String]("") - override def projectSettings = Seq(t := "T") + val t = settingKey[String]("") + override def projectSettings = Seq(t := "T") } From 5b1c33dd6e818c08a80822532be8ecb1c5d00d9e Mon Sep 17 00:00:00 2001 From: Eugene Yokota Date: Sat, 22 Mar 2014 02:47:11 -0400 Subject: [PATCH 128/148] Added conflict report and unit tests --- main/src/main/scala/sbt/Plugins.scala | 32 +++++++-- main/src/test/scala/PluginsTest.scala | 96 +++++++++++++++++++++++++++ 2 files changed, 124 insertions(+), 4 deletions(-) create mode 100644 main/src/test/scala/PluginsTest.scala diff --git a/main/src/main/scala/sbt/Plugins.scala b/main/src/main/scala/sbt/Plugins.scala index efe8f28f5..cb7bcac6e 100644 --- a/main/src/main/scala/sbt/Plugins.scala +++ b/main/src/main/scala/sbt/Plugins.scala @@ -64,6 +64,8 @@ abstract class AutoPlugin extends Plugins.Basic with PluginsFunctions val label: String = getClass.getName.stripSuffix("$") + override def toString: String = label + /** The [[Configuration]]s to add to each project that activates this AutoPlugin.*/ def projectConfigurations: Seq[Configuration] = Nil @@ -169,8 +171,7 @@ object Plugins extends PluginsFunctions val forbidden: Set[AutoPlugin] = (selectedPlugins flatMap { Plugins.asExclusions }).toSet val c = selectedPlugins.toSet & forbidden if (!c.isEmpty) { - val listString = (c map {_.label}).mkString(", ") - throw AutoPluginException(s"Contradiction in selected plugins. These plguins were both included and excluded: ${listString}") + exlusionConflictError(requestedPlugins, selectedPlugins, c.toSeq sortBy {_.label}) } val retval = topologicalSort(selectedPlugins, log) log.debug(s" :: sorted deduced result: ${retval.toString}") @@ -193,7 +194,7 @@ object Plugins extends PluginsFunctions doSort(roots, nonRoots, ns.size * ns.size + 1) } private[sbt] def translateMessage(e: LogicException) = e match { - case ic: InitialContradictions => s"Contradiction in selected plugins. These plguins were both included and excluded: ${literalsString(ic.literals.toSeq)}" + case ic: InitialContradictions => s"Contradiction in selected plugins. These plugins were both included and excluded: ${literalsString(ic.literals.toSeq)}" case io: InitialOverlap => s"Cannot directly enable plugins. Plugins are enabled when their required plugins are satisifed. The directly selected plugins were: ${literalsString(io.literals.toSeq)}" case cn: CyclicNegation => s"Cycles in plugin requirements cannot involve excludes. The problematic cycle is: ${literalsString(cn.cycle)}" } @@ -208,6 +209,29 @@ object Plugins extends PluginsFunctions val message = s"Plugin$ns provided by multiple AutoPlugins:$nl${dupStrings.mkString(nl)}" throw AutoPluginException(message) } + private[this] def exlusionConflictError(requested: Plugins, selected: Seq[AutoPlugin], conflicting: Seq[AutoPlugin]) { + def listConflicts(ns: Seq[AutoPlugin]) = (ns map { c => + val reasons = (if (flatten(requested) contains c) List("requested") + else Nil) ++ + (if (c.requires != empty && c.trigger == allRequirements) List(s"enabled by ${c.requires.toString}") + else Nil) ++ + { + val reqs = selected filter { x => asRequirements(x) contains c } + if (!reqs.isEmpty) List(s"""required by ${reqs.mkString(", ")}""") + else Nil + } ++ + { + val exs = selected filter { x => asExclusions(x) contains c } + if (!exs.isEmpty) List(s"""excluded by ${exs.mkString(", ")}""") + else Nil + } + s""" - conflict: ${c.label} is ${reasons.mkString("; ")}""" + }).mkString("\n") + throw AutoPluginException(s"""Contradiction in enabled plugins: + - requested: ${requested.toString} + - enabled: ${selected.mkString(", ")} +${listConflicts(conflicting)}""") + } private[sbt] final object Empty extends Plugins { def &&(o: Basic): Plugins = o @@ -225,7 +249,7 @@ object Plugins extends PluginsFunctions } private[sbt] final case class And(plugins: List[Basic]) extends Plugins { def &&(o: Basic): Plugins = And(o :: plugins) - override def toString = plugins.mkString(", ") + override def toString = plugins.mkString(" && ") } private[sbt] def and(a: Plugins, b: Plugins) = b match { case Empty => a diff --git a/main/src/test/scala/PluginsTest.scala b/main/src/test/scala/PluginsTest.scala new file mode 100644 index 000000000..a558854f1 --- /dev/null +++ b/main/src/test/scala/PluginsTest.scala @@ -0,0 +1,96 @@ +package sbt + +import java.io.File +import org.specs2._ +import mutable.Specification + +object PluginsTest extends Specification +{ + import AI._ + + "Auto plugin" should { + "enable plugins with trigger=allRequirements AND requirements met" in { + deducePlugin(A && B, log) must contain(Q) + } + "enable transive plugins with trigger=allRequirements AND requirements met" in { + deducePlugin(A && B, log) must contain(R) + } + "order enable plugins after required plugins" in { + val ns = deducePlugin(A && B, log) + ( (ns indexOf Q) must beGreaterThan(ns indexOf A) ) and + ( (ns indexOf Q) must beGreaterThan(ns indexOf B) ) and + ( (ns indexOf R) must beGreaterThan(ns indexOf A) ) and + ( (ns indexOf R) must beGreaterThan(ns indexOf B) ) and + ( (ns indexOf R) must beGreaterThan(ns indexOf Q) ) + } + "not enable plugins with trigger=allRequirements but conflicting requirements" in { + deducePlugin(A && B, log) must not contain(S) + } + "enable plugins that are required by the requested plugins" in { + val ns = deducePlugin(Q, log) + (ns must contain(A)) and + (ns must contain(B)) + } + "throw an AutoPluginException on conflicting requirements" in { + deducePlugin(S, log) must throwAn[AutoPluginException](message = """Contradiction in enabled plugins: + - requested: sbt.AI\$S + - enabled: sbt.AI\$S, sbt.AI\$Q, sbt.AI\$R, sbt.AI\$B, sbt.AI\$A + - conflict: sbt.AI\$R is enabled by sbt.AI\$Q; excluded by sbt.AI\$S""") + } + "generates a detailed report on conflicting requirements" in { + deducePlugin(T && U, log) must throwAn[AutoPluginException](message = """Contradiction in enabled plugins: + - requested: sbt.AI\$T && sbt.AI\$U + - enabled: sbt.AI\$U, sbt.AI\$T, sbt.AI\$A, sbt.AI\$Q, sbt.AI\$R, sbt.AI\$B + - conflict: sbt.AI\$Q is enabled by sbt.AI\$A && sbt.AI\$B; required by sbt.AI\$T, sbt.AI\$R; excluded by sbt.AI\$U + - conflict: sbt.AI\$R is enabled by sbt.AI\$Q; excluded by sbt.AI\$T""") + } + } +} + +object AI +{ + lazy val allPlugins: List[AutoPlugin] = List(A, B, Q, R, S, T, U) + lazy val deducePlugin = Plugins.deducer(allPlugins) + lazy val log = Logger.Null + + trait EmptyAutoPlugin extends AutoPlugin { + def requires = empty + def trigger = noTrigger + } + object A extends EmptyAutoPlugin + object B extends EmptyAutoPlugin + + object Q extends AutoPlugin + { + def requires: Plugins = A && B + def trigger = allRequirements + } + + object R extends AutoPlugin + { + def requires = Q + def trigger = allRequirements + } + + object S extends AutoPlugin + { + def requires = Q && !R + def trigger = allRequirements + } + + // This is an opt-in plugin with a requirement + // Unless explicitly loaded by the build user, this will not be activated. + object T extends AutoPlugin + { + def requires = Q && !R + def trigger = noTrigger + } + + // This is an opt-in plugin with a requirement + // Unless explicitly loaded by the build user, this will not be activated. + object U extends AutoPlugin + { + def requires = A && !Q + def trigger = noTrigger + } +} From d697c10950af977cf20ff8772972ed86130286be Mon Sep 17 00:00:00 2001 From: Eugene Yokota Date: Mon, 24 Mar 2014 18:59:14 -0400 Subject: [PATCH 129/148] Hide unary_! operator from API --- main/src/main/scala/sbt/Plugins.scala | 2 +- .../project/auto-plugins-conflict/build.sbt | 4 -- .../auto-plugins-conflict/project/Q.scala | 35 ----------- .../project/auto-plugins-conflict/test | 1 - .../project/auto-plugins-conflict2/build.sbt | 7 --- .../auto-plugins-conflict2/project/Q.scala | 58 ------------------- .../project/auto-plugins-conflict2/test | 1 - .../project/auto-plugins/project/Q.scala | 2 +- 8 files changed, 2 insertions(+), 108 deletions(-) delete mode 100644 sbt/src/sbt-test/project/auto-plugins-conflict/build.sbt delete mode 100644 sbt/src/sbt-test/project/auto-plugins-conflict/project/Q.scala delete mode 100644 sbt/src/sbt-test/project/auto-plugins-conflict/test delete mode 100644 sbt/src/sbt-test/project/auto-plugins-conflict2/build.sbt delete mode 100644 sbt/src/sbt-test/project/auto-plugins-conflict2/project/Q.scala delete mode 100644 sbt/src/sbt-test/project/auto-plugins-conflict2/test diff --git a/main/src/main/scala/sbt/Plugins.scala b/main/src/main/scala/sbt/Plugins.scala index cb7bcac6e..925fb6916 100644 --- a/main/src/main/scala/sbt/Plugins.scala +++ b/main/src/main/scala/sbt/Plugins.scala @@ -82,7 +82,7 @@ abstract class AutoPlugin extends Plugins.Basic with PluginsFunctions // TODO?: def commands: Seq[Command] - def unary_! : Exclude = Exclude(this) + private[sbt] def unary_! : Exclude = Exclude(this) /** If this plugin does not have any requirements, it means it is actually a root plugin. */ diff --git a/sbt/src/sbt-test/project/auto-plugins-conflict/build.sbt b/sbt/src/sbt-test/project/auto-plugins-conflict/build.sbt deleted file mode 100644 index aede25923..000000000 --- a/sbt/src/sbt-test/project/auto-plugins-conflict/build.sbt +++ /dev/null @@ -1,4 +0,0 @@ -// with S selected, Q is loaded automatically, which in turn selects R -lazy val projA = project.addPlugins(S) - -check := () diff --git a/sbt/src/sbt-test/project/auto-plugins-conflict/project/Q.scala b/sbt/src/sbt-test/project/auto-plugins-conflict/project/Q.scala deleted file mode 100644 index 405900f11..000000000 --- a/sbt/src/sbt-test/project/auto-plugins-conflict/project/Q.scala +++ /dev/null @@ -1,35 +0,0 @@ - import sbt._ - -object AI extends AutoImport -{ - trait EmptyAutoPlugin extends AutoPlugin { - def requires = empty - def trigger = noTrigger - } - object A extends EmptyAutoPlugin - object B extends EmptyAutoPlugin - - lazy val check = settingKey[Unit]("Verifies settings are as they should be.") -} - - import AI._ - -object Q extends AutoPlugin -{ - def requires: Plugins = A && B - def trigger = allRequirements -} - -object R extends AutoPlugin -{ - def requires = Q - def trigger = allRequirements -} - -// This is an opt-in plugin with a requirement -// Unless explicitly loaded by the build user, this will not be activated. -object S extends AutoPlugin -{ - def requires = Q && !R - def trigger = noTrigger -} diff --git a/sbt/src/sbt-test/project/auto-plugins-conflict/test b/sbt/src/sbt-test/project/auto-plugins-conflict/test deleted file mode 100644 index 3c2c89325..000000000 --- a/sbt/src/sbt-test/project/auto-plugins-conflict/test +++ /dev/null @@ -1 +0,0 @@ --> check diff --git a/sbt/src/sbt-test/project/auto-plugins-conflict2/build.sbt b/sbt/src/sbt-test/project/auto-plugins-conflict2/build.sbt deleted file mode 100644 index 36b8bc100..000000000 --- a/sbt/src/sbt-test/project/auto-plugins-conflict2/build.sbt +++ /dev/null @@ -1,7 +0,0 @@ -// with S selected, Q is loaded automatically, which in turn selects R -lazy val projA = project.addPlugins(S) - -// S and T have direct conflicts of dependent plugins. -lazy val projB = project.addPlugins(S, T) - -check := () diff --git a/sbt/src/sbt-test/project/auto-plugins-conflict2/project/Q.scala b/sbt/src/sbt-test/project/auto-plugins-conflict2/project/Q.scala deleted file mode 100644 index 91f29c00d..000000000 --- a/sbt/src/sbt-test/project/auto-plugins-conflict2/project/Q.scala +++ /dev/null @@ -1,58 +0,0 @@ - import sbt._ - -object AI extends AutoImport -{ - trait EmptyAutoPlugin extends AutoPlugin { - def requires = empty - def trigger = noTrigger - } - object A extends EmptyAutoPlugin { - val a = settingKey[String]("") - override def projectSettings = Seq(a := "a") - } - object B extends EmptyAutoPlugin { - val b = settingKey[String]("") - override def projectSettings = Seq(b := "b") - } - - lazy val check = settingKey[Unit]("Verifies settings are as they should be.") -} - - import AI._ - -object Q extends AutoPlugin -{ - def requires: Plugins = A && B - def trigger = allRequirements - val q = settingKey[String]("") - override def projectSettings = Seq(q := "q") -} - -object R extends AutoPlugin -{ - def requires = Q - def trigger = allRequirements - val r = settingKey[String]("") - override def projectSettings = Seq(r := "r") -} - -// This is an opt-in plugin with a requirement -// Unless explicitly loaded by the build user, this will not be activated. -object S extends AutoPlugin -{ - def requires = Q && !R - def trigger = noTrigger - val s = settingKey[String]("") - override def projectSettings = Seq(s := "s") -} - -// This is an opt-in plugin with a requirement -// Unless explicitly loaded by the build user, this will not be activated. -object T extends AutoPlugin -{ - def requires = A && !Q - def trigger = noTrigger - - val t = settingKey[String]("") - override def projectSettings = Seq(t := "T") -} diff --git a/sbt/src/sbt-test/project/auto-plugins-conflict2/test b/sbt/src/sbt-test/project/auto-plugins-conflict2/test deleted file mode 100644 index 3c2c89325..000000000 --- a/sbt/src/sbt-test/project/auto-plugins-conflict2/test +++ /dev/null @@ -1 +0,0 @@ --> check diff --git a/sbt/src/sbt-test/project/auto-plugins/project/Q.scala b/sbt/src/sbt-test/project/auto-plugins/project/Q.scala index 84cef307f..74d8b5f6d 100644 --- a/sbt/src/sbt-test/project/auto-plugins/project/Q.scala +++ b/sbt/src/sbt-test/project/auto-plugins/project/Q.scala @@ -59,7 +59,7 @@ object Q extends AutoPlugin object R extends AutoPlugin { // NOTE - Only plugins themselves support exclusions... - def requires = Q && !D + def requires = Q def trigger = allRequirements override def projectSettings = Seq( From 2c654b2d903c92fcf7dd746cd1ff15ec7b6aa81f Mon Sep 17 00:00:00 2001 From: Eugene Yokota Date: Thu, 20 Mar 2014 21:15:40 -0400 Subject: [PATCH 130/148] Unifies AutoPlugin and AutoImport. Fixes #1188 * AutoImport trait is subsumed by def autoImport method under AutoPlugin class. * When def autoImport is overridden by a lazy val or a val, *.sbt automatically imports autoImport._. --- main/src/main/scala/sbt/BuildStructure.scala | 14 ++++++++++---- main/src/main/scala/sbt/Keys.scala | 2 +- main/src/main/scala/sbt/Main.scala | 2 +- main/src/main/scala/sbt/PluginDiscovery.scala | 16 ++++++++-------- main/src/main/scala/sbt/Plugins.scala | 19 ++++++++++++++++--- main/src/main/scala/sbt/PluginsDebug.scala | 2 +- .../sbt-test/project/auto-plugins/build.sbt | 2 ++ .../project/auto-plugins/project/Q.scala | 11 +++++++++-- .../binary-plugin/changes/define/A.scala | 14 ++++++++++---- 9 files changed, 58 insertions(+), 24 deletions(-) diff --git a/main/src/main/scala/sbt/BuildStructure.scala b/main/src/main/scala/sbt/BuildStructure.scala index 615a80771..dcc0e0b58 100644 --- a/main/src/main/scala/sbt/BuildStructure.scala +++ b/main/src/main/scala/sbt/BuildStructure.scala @@ -89,16 +89,22 @@ final class DetectedModules[T](val modules: Seq[(String, T)]) def values: Seq[T] = modules.map(_._2) } +/** Auto-detected auto plugin. */ +case class DetectedAutoPlugin(val name: String, val value: AutoPlugin, val hasStableAutoImport: Boolean) + /** Auto-discovered modules for the build definition project. These include modules defined in build definition sources * as well as modules in binary dependencies. * * @param builds The [[Build]]s detected in the build definition. This does not include the default [[Build]] that sbt creates if none is defined. */ -final class DetectedPlugins(val plugins: DetectedModules[Plugin], val autoImports: DetectedModules[AutoImport], val autoPlugins: DetectedModules[AutoPlugin], val builds: DetectedModules[Build]) +final class DetectedPlugins(val plugins: DetectedModules[Plugin], val autoPlugins: Seq[DetectedAutoPlugin], val builds: DetectedModules[Build]) { /** Sequence of import expressions for the build definition. This includes the names of the [[Plugin]], [[Build]], and [[AutoImport]] modules, but not the [[AutoPlugin]] modules. */ - lazy val imports: Seq[String] = BuildUtil.getImports(plugins.names ++ builds.names ++ autoImports.names) - + lazy val imports: Seq[String] = BuildUtil.getImports(plugins.names ++ builds.names ++ + (autoPlugins flatMap { case DetectedAutoPlugin(name, ap, hasAutoImport) => + if (hasAutoImport) Some(name + ".autoImport") + else None + })) /** A function to select the right [[AutoPlugin]]s from [[autoPlugins]] for a [[Project]]. */ lazy val deducePlugins: (Plugins, Logger) => Seq[AutoPlugin] = Plugins.deducer(autoPlugins.values.toList) } @@ -115,7 +121,7 @@ final class LoadedPlugins(val base: File, val pluginData: PluginData, val loader @deprecated("Use the primary constructor.", "0.13.2") def this(base: File, pluginData: PluginData, loader: ClassLoader, plugins: Seq[Plugin], pluginNames: Seq[String]) = this(base, pluginData, loader, - new DetectedPlugins(new DetectedModules(pluginNames zip plugins), new DetectedModules(Nil), new DetectedModules(Nil), new DetectedModules(Nil)) + new DetectedPlugins(new DetectedModules(pluginNames zip plugins), Nil, new DetectedModules(Nil)) ) @deprecated("Use detected.plugins.values.", "0.13.2") diff --git a/main/src/main/scala/sbt/Keys.scala b/main/src/main/scala/sbt/Keys.scala index ceb7813ed..53ccdfc97 100644 --- a/main/src/main/scala/sbt/Keys.scala +++ b/main/src/main/scala/sbt/Keys.scala @@ -131,7 +131,7 @@ object Keys val crossVersion = SettingKey[CrossVersion]("cross-version", "Configures handling of the Scala version when cross-building.", CSetting) val classpathOptions = SettingKey[ClasspathOptions]("classpath-options", "Configures handling of Scala classpaths.", DSetting) val definedSbtPlugins = TaskKey[Set[String]]("defined-sbt-plugins", "The set of names of Plugin implementations defined by this project.", CTask) - val discoveredSbtPlugins = TaskKey[PluginDiscovery.DiscoveredNames]("discovered-sbt-plugins", "The names of sbt plugin-related modules (modules that extend Build, Plugin, AutoImport, AutoPlugin) defined by this project.", CTask) + val discoveredSbtPlugins = TaskKey[PluginDiscovery.DiscoveredNames]("discovered-sbt-plugins", "The names of sbt plugin-related modules (modules that extend Build, Plugin, AutoPlugin) defined by this project.", CTask) val sbtPlugin = SettingKey[Boolean]("sbt-plugin", "If true, enables adding sbt as a dependency and auto-generation of the plugin descriptor file.", BMinusSetting) val printWarnings = TaskKey[Unit]("print-warnings", "Shows warnings from compilation, including ones that weren't printed initially.", BPlusTask) val fileInputOptions = SettingKey[Seq[String]]("file-input-options", "Options that take file input, which may invalidate the cache.", CSetting) diff --git a/main/src/main/scala/sbt/Main.scala b/main/src/main/scala/sbt/Main.scala index c44bd5a1b..7cd85867d 100644 --- a/main/src/main/scala/sbt/Main.scala +++ b/main/src/main/scala/sbt/Main.scala @@ -125,7 +125,7 @@ object BuiltinCommands def aboutPlugins(e: Extracted): String = { - def list(b: BuildUnit) = b.plugins.detected.autoPlugins.values.map(_.label) ++ b.plugins.detected.plugins.names + def list(b: BuildUnit) = b.plugins.detected.autoPlugins.map(_.value.label) ++ b.plugins.detected.plugins.names val allPluginNames = e.structure.units.values.flatMap(u => list(u.unit)).toSeq.distinct if(allPluginNames.isEmpty) "" else allPluginNames.mkString("Available Plugins: ", ", ", "") } diff --git a/main/src/main/scala/sbt/PluginDiscovery.scala b/main/src/main/scala/sbt/PluginDiscovery.scala index ae945f78a..b28fed112 100644 --- a/main/src/main/scala/sbt/PluginDiscovery.scala +++ b/main/src/main/scala/sbt/PluginDiscovery.scala @@ -15,12 +15,11 @@ object PluginDiscovery final val AutoPlugins = "sbt/sbt.autoplugins" final val Plugins = "sbt/sbt.plugins" final val Builds = "sbt/sbt.builds" - final val AutoImports = "sbt/sbt.autoimports" } - /** Names of top-level modules that subclass sbt plugin-related classes: [[Plugin]], [[AutoImport]], [[AutoPlugin]], and [[Build]]. */ - final class DiscoveredNames(val plugins: Seq[String], val autoImports: Seq[String], val autoPlugins: Seq[String], val builds: Seq[String]) + /** Names of top-level modules that subclass sbt plugin-related classes: [[Plugin]], [[AutoPlugin]], and [[Build]]. */ + final class DiscoveredNames(val plugins: Seq[String], val autoPlugins: Seq[String], val builds: Seq[String]) - def emptyDiscoveredNames: DiscoveredNames = new DiscoveredNames(Nil, Nil, Nil, Nil) + def emptyDiscoveredNames: DiscoveredNames = new DiscoveredNames(Nil, Nil, Nil) /** Discovers and loads the sbt-plugin-related top-level modules from the classpath and source analysis in `data` and using the provided class `loader`. */ def discoverAll(data: PluginData, loader: ClassLoader): DetectedPlugins = @@ -35,8 +34,10 @@ object PluginDiscovery "sbt.plugins.GlobalModule" -> sbt.plugins.GlobalModule ) val detectedAutoPugins = discover[AutoPlugin](AutoPlugins) - val allAutoPlugins = new DetectedModules(defaultAutoPlugins ++ detectedAutoPugins.modules) - new DetectedPlugins(discover[Plugin](Plugins), discover[AutoImport](AutoImports), allAutoPlugins, discover[Build](Builds)) + val allAutoPlugins = (defaultAutoPlugins ++ detectedAutoPugins.modules) map { case (name, value) => + DetectedAutoPlugin(name, value, sbt.Plugins.hasStableAutoImport(value, loader)) + } + new DetectedPlugins(discover[Plugin](Plugins), allAutoPlugins, discover[Build](Builds)) } /** Discovers the sbt-plugin-related top-level modules from the provided source `analysis`. */ @@ -44,7 +45,7 @@ object PluginDiscovery { def discover[T](implicit mf: reflect.ClassManifest[T]): Seq[String] = sourceModuleNames(analysis, mf.erasure.getName) - new DiscoveredNames(discover[Plugin], discover[AutoImport], discover[AutoPlugin], discover[Build]) + new DiscoveredNames(discover[Plugin], discover[AutoPlugin], discover[Build]) } // TODO: for 0.14.0, consider consolidating into a single file, which would make the classpath search 4x faster @@ -56,7 +57,6 @@ object PluginDiscovery writeDescriptor(names.plugins, dir, Plugins) :: writeDescriptor(names.autoPlugins, dir, AutoPlugins) :: writeDescriptor(names.builds, dir, Builds) :: - writeDescriptor(names.autoImports, dir, AutoImports) :: Nil files.flatMap(_.toList) } diff --git a/main/src/main/scala/sbt/Plugins.scala b/main/src/main/scala/sbt/Plugins.scala index 925fb6916..3da012b1e 100644 --- a/main/src/main/scala/sbt/Plugins.scala +++ b/main/src/main/scala/sbt/Plugins.scala @@ -11,9 +11,6 @@ TODO: import Plugins._ import annotation.tailrec -/** Marks a top-level object so that sbt will wildcard import it for .sbt files, `consoleProject`, and `set`. */ -trait AutoImport - /** An AutoPlugin defines a group of settings and the conditions where the settings are automatically added to a build (called "activation"). The `requires` and `trigger` methods together define the conditions, and a method like `projectSettings` defines the settings to add. @@ -66,6 +63,10 @@ abstract class AutoPlugin extends Plugins.Basic with PluginsFunctions override def toString: String = label + /** When this method is overridden with a val or a lazy val, `autoImport._` is automatically + * imported to *.sbt scripts. */ + def autoImport: Any = () + /** The [[Configuration]]s to add to each project that activates this AutoPlugin.*/ def projectConfigurations: Seq[Configuration] = Nil @@ -307,4 +308,16 @@ ${listConflicts(conflicting)}""") case Exclude(a) => !model(a) case ap: AutoPlugin => model(ap) } + + private[sbt] def hasStableAutoImport(ap: AutoPlugin, loader: ClassLoader): Boolean = { + import reflect.runtime.{universe => ru} + import util.control.Exception.catching + val m = ru.runtimeMirror(loader) + val im = m.reflect(ap) + val fmOpt = catching(classOf[ScalaReflectionException]) opt { + val autoImportSym = im.symbol.asType.toType.declaration(ru.newTermName("autoImport")).asTerm + im.reflectField(autoImportSym) + } + fmOpt.isDefined + } } \ No newline at end of file diff --git a/main/src/main/scala/sbt/PluginsDebug.scala b/main/src/main/scala/sbt/PluginsDebug.scala index 7d9b2670a..bace7b52b 100644 --- a/main/src/main/scala/sbt/PluginsDebug.scala +++ b/main/src/main/scala/sbt/PluginsDebug.scala @@ -108,7 +108,7 @@ private[sbt] object PluginsDebug structure.units.values.toList.flatMap(availableAutoPlugins).map(plugin => (plugin.label, plugin)).toMap } private[this] def availableAutoPlugins(build: LoadedBuildUnit): Seq[AutoPlugin] = - build.unit.plugins.detected.autoPlugins.values + build.unit.plugins.detected.autoPlugins map {_.value} def help(plugin: AutoPlugin, s: State): String = { diff --git a/sbt/src/sbt-test/project/auto-plugins/build.sbt b/sbt/src/sbt-test/project/auto-plugins/build.sbt index 7039ed235..dbb06d44c 100644 --- a/sbt/src/sbt-test/project/auto-plugins/build.sbt +++ b/sbt/src/sbt-test/project/auto-plugins/build.sbt @@ -1,3 +1,5 @@ +import sbttest.{Q} + // disablePlugins(Q) will prevent R from being auto-added lazy val projA = project.addPlugins(A, B).disablePlugins(Q) diff --git a/sbt/src/sbt-test/project/auto-plugins/project/Q.scala b/sbt/src/sbt-test/project/auto-plugins/project/Q.scala index 74d8b5f6d..bb190091e 100644 --- a/sbt/src/sbt-test/project/auto-plugins/project/Q.scala +++ b/sbt/src/sbt-test/project/auto-plugins/project/Q.scala @@ -1,8 +1,10 @@ +package sbttest // you need package http://stackoverflow.com/questions/9822008/ + import sbt._ import sbt.Keys.{name, resolvedScoped} import java.util.concurrent.atomic.{AtomicInteger => AInt} -object AI extends AutoImport +object Imports { trait EmptyAutoPlugin extends AutoPlugin { def requires = empty @@ -21,7 +23,12 @@ object AI extends AutoImport lazy val check = settingKey[Unit]("Verifies settings are as they should be.") } - import AI._ +object X extends AutoPlugin { + override lazy val autoImport = Imports + def select = Plugins.empty +} + + import Imports._ object D extends AutoPlugin { def requires: Plugins = E diff --git a/sbt/src/sbt-test/project/binary-plugin/changes/define/A.scala b/sbt/src/sbt-test/project/binary-plugin/changes/define/A.scala index 99cd6d527..88065f085 100644 --- a/sbt/src/sbt-test/project/binary-plugin/changes/define/A.scala +++ b/sbt/src/sbt-test/project/binary-plugin/changes/define/A.scala @@ -1,16 +1,22 @@ +package sbttest // you need package http://stackoverflow.com/questions/9822008/ + import sbt._ import Keys._ - -object C extends AutoImport { +object Imports { object bN extends AutoPlugin { def requires = empty def trigger = allRequirements } - lazy val check = taskKey[Unit]("Checks that the AutoPlugin and Build are automatically added.") + lazy val check = taskKey[Unit]("Checks that the AutoPlugin and Build are automatically added.") } - import C._ +object C extends AutoPlugin { + override lazy val autoImport = Imports + def select = Plugins.empty +} + + import Imports._ object A extends AutoPlugin { def requires = bN From cc80d216ab42584d1115582252e4c6b73a45d1f9 Mon Sep 17 00:00:00 2001 From: Eugene Yokota Date: Mon, 24 Mar 2014 19:25:17 -0400 Subject: [PATCH 131/148] Adjusting to 0.13 changes --- main/src/main/scala/sbt/BuildStructure.scala | 2 +- sbt/src/sbt-test/project/auto-plugins/build.sbt | 2 +- sbt/src/sbt-test/project/auto-plugins/project/Q.scala | 3 ++- sbt/src/sbt-test/project/binary-plugin/changes/define/A.scala | 3 ++- src/main/conscript/sbt/launchconfig | 2 +- src/main/conscript/scalas/launchconfig | 2 +- src/main/conscript/screpl/launchconfig | 2 +- 7 files changed, 9 insertions(+), 7 deletions(-) diff --git a/main/src/main/scala/sbt/BuildStructure.scala b/main/src/main/scala/sbt/BuildStructure.scala index dcc0e0b58..f5c3200c7 100644 --- a/main/src/main/scala/sbt/BuildStructure.scala +++ b/main/src/main/scala/sbt/BuildStructure.scala @@ -106,7 +106,7 @@ final class DetectedPlugins(val plugins: DetectedModules[Plugin], val autoPlugin else None })) /** A function to select the right [[AutoPlugin]]s from [[autoPlugins]] for a [[Project]]. */ - lazy val deducePlugins: (Plugins, Logger) => Seq[AutoPlugin] = Plugins.deducer(autoPlugins.values.toList) + lazy val deducePlugins: (Plugins, Logger) => Seq[AutoPlugin] = Plugins.deducer(autoPlugins.toList map {_.value}) } /** The built and loaded build definition project. diff --git a/sbt/src/sbt-test/project/auto-plugins/build.sbt b/sbt/src/sbt-test/project/auto-plugins/build.sbt index dbb06d44c..de4f386e3 100644 --- a/sbt/src/sbt-test/project/auto-plugins/build.sbt +++ b/sbt/src/sbt-test/project/auto-plugins/build.sbt @@ -1,4 +1,4 @@ -import sbttest.{Q} +import sbttest.{Q, S} // disablePlugins(Q) will prevent R from being auto-added lazy val projA = project.addPlugins(A, B).disablePlugins(Q) diff --git a/sbt/src/sbt-test/project/auto-plugins/project/Q.scala b/sbt/src/sbt-test/project/auto-plugins/project/Q.scala index bb190091e..044d7ab4d 100644 --- a/sbt/src/sbt-test/project/auto-plugins/project/Q.scala +++ b/sbt/src/sbt-test/project/auto-plugins/project/Q.scala @@ -25,7 +25,8 @@ object Imports object X extends AutoPlugin { override lazy val autoImport = Imports - def select = Plugins.empty + def requires = Plugins.empty + def trigger = noTrigger } import Imports._ diff --git a/sbt/src/sbt-test/project/binary-plugin/changes/define/A.scala b/sbt/src/sbt-test/project/binary-plugin/changes/define/A.scala index 88065f085..b33a8e8d2 100644 --- a/sbt/src/sbt-test/project/binary-plugin/changes/define/A.scala +++ b/sbt/src/sbt-test/project/binary-plugin/changes/define/A.scala @@ -13,7 +13,8 @@ object Imports { object C extends AutoPlugin { override lazy val autoImport = Imports - def select = Plugins.empty + def requires = empty + def trigger = noTrigger } import Imports._ diff --git a/src/main/conscript/sbt/launchconfig b/src/main/conscript/sbt/launchconfig index 788738650..754d63c95 100644 --- a/src/main/conscript/sbt/launchconfig +++ b/src/main/conscript/sbt/launchconfig @@ -4,7 +4,7 @@ [app] org: ${sbt.organization-org.scala-sbt} name: sbt - version: ${sbt.version-read(sbt.version)[0.13.2-SNAPSHOT]} + version: ${sbt.version-read(sbt.version)[0.13.3-SNAPSHOT]} class: sbt.xMain components: xsbti,extra cross-versioned: ${sbt.cross.versioned-false} diff --git a/src/main/conscript/scalas/launchconfig b/src/main/conscript/scalas/launchconfig index 75c4138ed..5710eac15 100644 --- a/src/main/conscript/scalas/launchconfig +++ b/src/main/conscript/scalas/launchconfig @@ -4,7 +4,7 @@ [app] org: ${sbt.organization-org.scala-sbt} name: sbt - version: ${sbt.version-read(sbt.version)[0.13.2-SNAPSHOT]} + version: ${sbt.version-read(sbt.version)[0.13.3-SNAPSHOT]} class: sbt.ScriptMain components: xsbti,extra cross-versioned: ${sbt.cross.versioned-false} diff --git a/src/main/conscript/screpl/launchconfig b/src/main/conscript/screpl/launchconfig index 18fbfa911..c0e1d473a 100644 --- a/src/main/conscript/screpl/launchconfig +++ b/src/main/conscript/screpl/launchconfig @@ -4,7 +4,7 @@ [app] org: ${sbt.organization-org.scala-sbt} name: sbt - version: ${sbt.version-read(sbt.version)[0.13.2-SNAPSHOT]} + version: ${sbt.version-read(sbt.version)[0.13.3-SNAPSHOT]} class: sbt.ConsoleMain components: xsbti,extra cross-versioned: ${sbt.cross.versioned-false} From ff77d0b0f2794b4b1f4e095a5f6b6037b2aedcca Mon Sep 17 00:00:00 2001 From: Eugene Yokota Date: Mon, 24 Mar 2014 23:04:22 -0400 Subject: [PATCH 132/148] Remove def autoImport: Any from AutoPlugin. --- main/src/main/scala/sbt/BuildStructure.scala | 3 ++- main/src/main/scala/sbt/PluginDiscovery.scala | 2 +- main/src/main/scala/sbt/Plugins.scala | 26 +++++++++++-------- .../project/auto-plugins/project/Q.scala | 2 +- .../binary-plugin/changes/define/A.scala | 18 ++++++------- 5 files changed, 27 insertions(+), 24 deletions(-) diff --git a/main/src/main/scala/sbt/BuildStructure.scala b/main/src/main/scala/sbt/BuildStructure.scala index f5c3200c7..da24a2444 100644 --- a/main/src/main/scala/sbt/BuildStructure.scala +++ b/main/src/main/scala/sbt/BuildStructure.scala @@ -90,7 +90,7 @@ final class DetectedModules[T](val modules: Seq[(String, T)]) } /** Auto-detected auto plugin. */ -case class DetectedAutoPlugin(val name: String, val value: AutoPlugin, val hasStableAutoImport: Boolean) +case class DetectedAutoPlugin(val name: String, val value: AutoPlugin, val hasAutoImport: Boolean) /** Auto-discovered modules for the build definition project. These include modules defined in build definition sources * as well as modules in binary dependencies. @@ -105,6 +105,7 @@ final class DetectedPlugins(val plugins: DetectedModules[Plugin], val autoPlugin if (hasAutoImport) Some(name + ".autoImport") else None })) + /** A function to select the right [[AutoPlugin]]s from [[autoPlugins]] for a [[Project]]. */ lazy val deducePlugins: (Plugins, Logger) => Seq[AutoPlugin] = Plugins.deducer(autoPlugins.toList map {_.value}) } diff --git a/main/src/main/scala/sbt/PluginDiscovery.scala b/main/src/main/scala/sbt/PluginDiscovery.scala index b28fed112..54939d6b9 100644 --- a/main/src/main/scala/sbt/PluginDiscovery.scala +++ b/main/src/main/scala/sbt/PluginDiscovery.scala @@ -35,7 +35,7 @@ object PluginDiscovery ) val detectedAutoPugins = discover[AutoPlugin](AutoPlugins) val allAutoPlugins = (defaultAutoPlugins ++ detectedAutoPugins.modules) map { case (name, value) => - DetectedAutoPlugin(name, value, sbt.Plugins.hasStableAutoImport(value, loader)) + DetectedAutoPlugin(name, value, sbt.Plugins.hasAutoImportGetter(value, loader)) } new DetectedPlugins(discover[Plugin](Plugins), allAutoPlugins, discover[Build](Builds)) } diff --git a/main/src/main/scala/sbt/Plugins.scala b/main/src/main/scala/sbt/Plugins.scala index 3da012b1e..9451940c3 100644 --- a/main/src/main/scala/sbt/Plugins.scala +++ b/main/src/main/scala/sbt/Plugins.scala @@ -19,14 +19,19 @@ Steps for plugin authors: 1. Determine if the AutoPlugin should automatically be activated when all requirements are met, or should be opt-in. 2. Determine the [[AutoPlugins]]s that, when present (or absent), act as the requirements for the AutoPlugin. 3. Determine the settings/configurations to that the AutoPlugin injects when activated. +4. Determine the keys and other names to be automatically imported to *.sbt scripts. For example, the following will automatically add the settings in `projectSettings` to a project that has both the `Web` and `Javascript` plugins enabled. - object MyPlugin extends AutoPlugin { + object Plugin extends sbt.AutoPlugin { def requires = Web && Javascript def trigger = allRequirements override def projectSettings = Seq(...) + + object autoImport { + lazy val obfuscate = taskKey[Seq[File]]("Obfuscates the source.") + } } Steps for users: @@ -43,6 +48,7 @@ will activate `MyPlugin` defined above and have its settings automatically added .addPlugins( Web && Javascript ).disablePlugins(MyPlugin) then the `MyPlugin` settings (and anything that activates only when `MyPlugin` is activated) will not be added. + */ abstract class AutoPlugin extends Plugins.Basic with PluginsFunctions { @@ -63,10 +69,6 @@ abstract class AutoPlugin extends Plugins.Basic with PluginsFunctions override def toString: String = label - /** When this method is overridden with a val or a lazy val, `autoImport._` is automatically - * imported to *.sbt scripts. */ - def autoImport: Any = () - /** The [[Configuration]]s to add to each project that activates this AutoPlugin.*/ def projectConfigurations: Seq[Configuration] = Nil @@ -309,15 +311,17 @@ ${listConflicts(conflicting)}""") case ap: AutoPlugin => model(ap) } - private[sbt] def hasStableAutoImport(ap: AutoPlugin, loader: ClassLoader): Boolean = { + private[sbt] def hasAutoImportGetter(ap: AutoPlugin, loader: ClassLoader): Boolean = { import reflect.runtime.{universe => ru} import util.control.Exception.catching val m = ru.runtimeMirror(loader) val im = m.reflect(ap) - val fmOpt = catching(classOf[ScalaReflectionException]) opt { - val autoImportSym = im.symbol.asType.toType.declaration(ru.newTermName("autoImport")).asTerm - im.reflectField(autoImportSym) + val hasGetterOpt = catching(classOf[ScalaReflectionException]) opt { + im.symbol.asType.toType.declaration(ru.newTermName("autoImport")) match { + case ru.NoSymbol => false + case sym => sym.asTerm.isGetter + } } - fmOpt.isDefined + hasGetterOpt getOrElse false } -} \ No newline at end of file +} diff --git a/sbt/src/sbt-test/project/auto-plugins/project/Q.scala b/sbt/src/sbt-test/project/auto-plugins/project/Q.scala index 044d7ab4d..5eb6f792c 100644 --- a/sbt/src/sbt-test/project/auto-plugins/project/Q.scala +++ b/sbt/src/sbt-test/project/auto-plugins/project/Q.scala @@ -24,7 +24,7 @@ object Imports } object X extends AutoPlugin { - override lazy val autoImport = Imports + val autoImport = Imports def requires = Plugins.empty def trigger = noTrigger } diff --git a/sbt/src/sbt-test/project/binary-plugin/changes/define/A.scala b/sbt/src/sbt-test/project/binary-plugin/changes/define/A.scala index b33a8e8d2..7906da1f2 100644 --- a/sbt/src/sbt-test/project/binary-plugin/changes/define/A.scala +++ b/sbt/src/sbt-test/project/binary-plugin/changes/define/A.scala @@ -3,21 +3,19 @@ package sbttest // you need package http://stackoverflow.com/questions/9822008/ import sbt._ import Keys._ -object Imports { - object bN extends AutoPlugin { - def requires = empty - def trigger = allRequirements - } - lazy val check = taskKey[Unit]("Checks that the AutoPlugin and Build are automatically added.") -} - object C extends AutoPlugin { - override lazy val autoImport = Imports + object autoImport { + object bN extends AutoPlugin { + def requires = empty + def trigger = allRequirements + } + lazy val check = taskKey[Unit]("Checks that the AutoPlugin and Build are automatically added.") + } def requires = empty def trigger = noTrigger } - import Imports._ + import C.autoImport._ object A extends AutoPlugin { def requires = bN From 575e65796214e638571697c9cb97578dbab4b89b Mon Sep 17 00:00:00 2001 From: Eugene Yokota Date: Tue, 25 Mar 2014 00:20:11 -0400 Subject: [PATCH 133/148] Default AutoPlugin to an empty root plugin --- main/src/main/scala/sbt/Plugins.scala | 8 +- .../main/scala/sbt/plugins/GlobalModule.scala | 3 +- .../main/scala/sbt/plugins/IvyModule.scala | 4 +- .../main/scala/sbt/plugins/JvmModule.scala | 4 +- .../project/auto-plugins/project/Q.scala | 28 +++--- .../binary-plugin/changes/define/A.scala | 9 +- src/sphinx/Extending/Plugins.rst | 87 +++++++++++-------- 7 files changed, 72 insertions(+), 71 deletions(-) diff --git a/main/src/main/scala/sbt/Plugins.scala b/main/src/main/scala/sbt/Plugins.scala index 9451940c3..63795bd6d 100644 --- a/main/src/main/scala/sbt/Plugins.scala +++ b/main/src/main/scala/sbt/Plugins.scala @@ -25,8 +25,8 @@ For example, the following will automatically add the settings in `projectSettin to a project that has both the `Web` and `Javascript` plugins enabled. object Plugin extends sbt.AutoPlugin { - def requires = Web && Javascript - def trigger = allRequirements + override def requires = Web && Javascript + override def trigger = allRequirements override def projectSettings = Seq(...) object autoImport { @@ -59,11 +59,11 @@ abstract class AutoPlugin extends Plugins.Basic with PluginsFunctions * * When this method returns `noTrigger`, and `requires` method returns `Web && Javascript`, this plugin * instance will be added only if the build user enables it, but it will automatically add both `Web` and `Javascript`. */ - def trigger: PluginTrigger + def trigger: PluginTrigger = noTrigger /** This AutoPlugin requires the plugins the [[Plugins]] matcher returned by this method. See [[trigger]]. */ - def requires: Plugins + def requires: Plugins = empty val label: String = getClass.getName.stripSuffix("$") diff --git a/main/src/main/scala/sbt/plugins/GlobalModule.scala b/main/src/main/scala/sbt/plugins/GlobalModule.scala index 00485a5e0..9ee80889e 100644 --- a/main/src/main/scala/sbt/plugins/GlobalModule.scala +++ b/main/src/main/scala/sbt/plugins/GlobalModule.scala @@ -10,8 +10,7 @@ import Def.Setting */ object GlobalModule extends AutoPlugin { // This is included by default - def requires = empty - def trigger = allRequirements + override def trigger = allRequirements override lazy val projectSettings: Seq[Setting[_]] = Defaults.coreDefaultSettings diff --git a/main/src/main/scala/sbt/plugins/IvyModule.scala b/main/src/main/scala/sbt/plugins/IvyModule.scala index 0b01f4670..58202c852 100644 --- a/main/src/main/scala/sbt/plugins/IvyModule.scala +++ b/main/src/main/scala/sbt/plugins/IvyModule.scala @@ -16,8 +16,8 @@ import Def.Setting object IvyModule extends AutoPlugin { // We are automatically included on everything that has the global module, // which is automatically included on everything. - def requires = GlobalModule - def trigger = allRequirements + override def requires = GlobalModule + override def trigger = allRequirements override lazy val projectSettings: Seq[Setting[_]] = Classpaths.ivyPublishSettings ++ Classpaths.ivyBaseSettings diff --git a/main/src/main/scala/sbt/plugins/JvmModule.scala b/main/src/main/scala/sbt/plugins/JvmModule.scala index f50fb1e7d..9cd1840ce 100644 --- a/main/src/main/scala/sbt/plugins/JvmModule.scala +++ b/main/src/main/scala/sbt/plugins/JvmModule.scala @@ -17,8 +17,8 @@ import Def.Setting object JvmModule extends AutoPlugin { // We are automatically enabled for any IvyModule project. We also require its settings // for ours to work. - def requires = IvyModule - def trigger = allRequirements + override def requires = IvyModule + override def trigger = allRequirements override lazy val projectSettings: Seq[Setting[_]] = Defaults.runnerSettings ++ diff --git a/sbt/src/sbt-test/project/auto-plugins/project/Q.scala b/sbt/src/sbt-test/project/auto-plugins/project/Q.scala index 5eb6f792c..f135d444b 100644 --- a/sbt/src/sbt-test/project/auto-plugins/project/Q.scala +++ b/sbt/src/sbt-test/project/auto-plugins/project/Q.scala @@ -6,13 +6,9 @@ package sbttest // you need package http://stackoverflow.com/questions/9822008/ object Imports { - trait EmptyAutoPlugin extends AutoPlugin { - def requires = empty - def trigger = noTrigger - } - object A extends EmptyAutoPlugin - object B extends EmptyAutoPlugin - object E extends EmptyAutoPlugin + object A extends AutoPlugin + object B extends AutoPlugin + object E extends AutoPlugin lazy val q = config("q") lazy val p = config("p").extend(q) @@ -25,21 +21,19 @@ object Imports object X extends AutoPlugin { val autoImport = Imports - def requires = Plugins.empty - def trigger = noTrigger } import Imports._ object D extends AutoPlugin { - def requires: Plugins = E - def trigger = allRequirements + override def requires: Plugins = E + override def trigger = allRequirements } object Q extends AutoPlugin { - def requires: Plugins = A && B - def trigger = allRequirements + override def requires: Plugins = A && B + override def trigger = allRequirements override def projectConfigurations: Seq[Configuration] = p :: @@ -67,8 +61,8 @@ object Q extends AutoPlugin object R extends AutoPlugin { // NOTE - Only plugins themselves support exclusions... - def requires = Q - def trigger = allRequirements + override def requires = Q + override def trigger = allRequirements override def projectSettings = Seq( // tests proper ordering: R requires Q, so Q settings should come first @@ -82,8 +76,8 @@ object R extends AutoPlugin // Unless explicitly loaded by the build user, this will not be activated. object S extends AutoPlugin { - def requires = Q - def trigger = noTrigger + override def requires = Q + override def trigger = noTrigger override def projectSettings = Seq( del in q += " S" diff --git a/sbt/src/sbt-test/project/binary-plugin/changes/define/A.scala b/sbt/src/sbt-test/project/binary-plugin/changes/define/A.scala index 7906da1f2..dde89c439 100644 --- a/sbt/src/sbt-test/project/binary-plugin/changes/define/A.scala +++ b/sbt/src/sbt-test/project/binary-plugin/changes/define/A.scala @@ -6,20 +6,17 @@ import Keys._ object C extends AutoPlugin { object autoImport { object bN extends AutoPlugin { - def requires = empty - def trigger = allRequirements + override def trigger = allRequirements } lazy val check = taskKey[Unit]("Checks that the AutoPlugin and Build are automatically added.") } - def requires = empty - def trigger = noTrigger } import C.autoImport._ object A extends AutoPlugin { - def requires = bN - def trigger = allRequirements + override def requires = bN + override def trigger = allRequirements override def projectSettings = Seq( check := {} ) diff --git a/src/sphinx/Extending/Plugins.rst b/src/sphinx/Extending/Plugins.rst index 77f5da73e..4b155b254 100644 --- a/src/sphinx/Extending/Plugins.rst +++ b/src/sphinx/Extending/Plugins.rst @@ -225,19 +225,17 @@ To make a plugin, create a project and configure `sbtPlugin` to `true`. Then, write the plugin code and publish your project to a repository. The plugin can be used as described in the previous section. -A plugin can implement `sbt.AutoImpot`. The contents of an AutoImport -singleton, declared like `object MyPlugin extends AutoImport`, are -wildcard imported in `set`, `eval`, and `.sbt` files. Typically, -this is used to provide new keys (SettingKey, TaskKey, or InputKey) or -core methods without requiring an import or qualification. - -In addition, a plugin can implement the `AutoPlugin` class. This has additoinal features, such as - +* Automatically importing selective names to `.sbt` files. * Specifying plugin dependencies. * Automatically activating itself when all dependencies are present. * Specifying `projectSettings`, `buildSettings`, and `globalSettings` as appropriate. -The AutoPlugin's `projectSettings` is automatically appended to each project's settings, when its dependencies also exist on that project +When an AutoPlugin provides a stable field such as `val` or `object` named `autoImport`, +the contents of the field are wildcard imported in in `set`, `eval`, and `.sbt` files. Typically, +this is used to provide new keys (SettingKey, TaskKey, or InputKey) or +core methods without requiring an import or qualification. + +The AutoPlugin's `projectSettings` is automatically appended to each project's settings, when its dependencies also exist on that project. The `requires` method defines the dependencies to other plugins. The `trigger` method defines the conditions by which this plugin's settings are automatically activated. The `buildSettings` is appended to each build's settings (that is, `in ThisBuild`). @@ -246,7 +244,6 @@ These allow a plugin to automatically provide new functionality or new defaults. One main use of this feature is to globally add commands, such as for IDE plugins. Use `globalSettings` to define the default value of a setting. - Example Plugin -------------- @@ -258,34 +255,51 @@ An example of a typical plugin: sbtPlugin := true - name := "example-plugin" + name := "sbt-obfuscate" organization := "org.example" -`MyPlugin.scala`: +`Plugin.scala`: :: + package sbtobfuscate + import sbt._ - object MyPlugin extends AutoPlugin + + object Plugin extends AutoPlugin { - // Only enable this plugin for projects which are JvmModules. - def trigger = allRequirements - def requires = sbt.plugins.JvmModule + // by definging autoImport, these are automatically imported into user's `*.sbt` + object autoImport + { + // configuration points, like the built in `version`, `libraryDependencies`, or `compile` + val obfuscate = taskKey[Seq[File]]("Obfuscates files.") + val obfuscateLiterals = settingKey[Boolean]("Obfuscate literals.") + + // default values for the tasks and settings + lazy val baseObfuscateSettings: Seq[sbt.Def.Setting[_]] = Seq( + obfuscate := { + Obfuscate(sources.value, (obfuscateLiterals in obfuscate).value) + }, + obfuscateLiterals in obfuscate := false + ) + } + + import autoImport._ + override def requires = sbt.plugins.JvmModule + + // This plugin is automatically enabled for projects which are JvmModules. + override def trigger = allRequirements - // configuration points, like the built in `version`, `libraryDependencies`, or `compile` - // by implementing Plugin, these are automatically imported in a user's `build.sbt` - val newTask = taskKey[Unit]("A new task.") - val newSetting = settingKey[String]("A new setting.") - // a group of settings that are automatically added to projects. - val projectSettings = Seq( - newSetting := "test", - newTask := println(newSetting.value) - ) + override val projectSettings = + inConfig(Compile)(baseObfucscateSettings) ++ + inConfig(Test)(baseObfuscateSettings) + } - // alternatively, by overriding `settings`, they could be automatically added to a Project - // override val settings = Seq(...) + object Obfuscate + { + def apply(sources: Seq[File]): Seq[File] := sources } Usage example @@ -293,20 +307,18 @@ Usage example A build definition that uses the plugin might look like: -`build.sbt` +`obfuscate.sbt` :: - MyPlugin.newSettings - - newSetting := "example" + obfuscateLiterals in obfuscate := true Root Plugins ------------ Some plugins should always be explicitly enabled on projects. Sbt calls these root plugins, i.e. plugins -that are "root" nodes in the plugin depdendency graph. To define a root plugin, set the `trigger` method to `noTrigger` and the `requires` method to `empty`. +that are "root" nodes in the plugin depdendency graph. `AutoPlugin` by default defines a root plugin. Example command root plugin ---------------------- @@ -319,21 +331,20 @@ A basic plugin that adds commands looks like: sbtPlugin := true - name := "example-plugin" + name := "sbt-sample" organization := "org.example" -`MyPlugin.scala` +`Plugin.scala` :: + package sbtsample + import sbt._ import Keys._ - object MyPlugin extends AutoPlugin + object Plugin extends AutoPlugin { - def trigger = noTrigger - def requires = empty - override lazy val projectSettings = Seq(commands += myCommand) lazy val myCommand = From 3f2f12f5bb35a71c622a68941e2aef5773704e57 Mon Sep 17 00:00:00 2001 From: Eugene Yokota Date: Tue, 25 Mar 2014 01:49:08 -0400 Subject: [PATCH 134/148] Fix PluginsTest --- main/src/test/scala/PluginsTest.scala | 26 ++++++++++---------------- 1 file changed, 10 insertions(+), 16 deletions(-) diff --git a/main/src/test/scala/PluginsTest.scala b/main/src/test/scala/PluginsTest.scala index a558854f1..eed8a1143 100644 --- a/main/src/test/scala/PluginsTest.scala +++ b/main/src/test/scala/PluginsTest.scala @@ -53,44 +53,38 @@ object AI lazy val deducePlugin = Plugins.deducer(allPlugins) lazy val log = Logger.Null - trait EmptyAutoPlugin extends AutoPlugin { - def requires = empty - def trigger = noTrigger - } - object A extends EmptyAutoPlugin - object B extends EmptyAutoPlugin + object A extends AutoPlugin + object B extends AutoPlugin object Q extends AutoPlugin { - def requires: Plugins = A && B - def trigger = allRequirements + override def requires: Plugins = A && B + override def trigger = allRequirements } object R extends AutoPlugin { - def requires = Q - def trigger = allRequirements + override def requires = Q + override def trigger = allRequirements } object S extends AutoPlugin { - def requires = Q && !R - def trigger = allRequirements + override def requires = Q && !R + override def trigger = allRequirements } // This is an opt-in plugin with a requirement // Unless explicitly loaded by the build user, this will not be activated. object T extends AutoPlugin { - def requires = Q && !R - def trigger = noTrigger + override def requires = Q && !R } // This is an opt-in plugin with a requirement // Unless explicitly loaded by the build user, this will not be activated. object U extends AutoPlugin { - def requires = A && !Q - def trigger = noTrigger + override def requires = A && !Q } } From 66ada09f0645b1c86b1f6402b2301407f929c119 Mon Sep 17 00:00:00 2001 From: Eugene Yokota Date: Tue, 25 Mar 2014 01:49:33 -0400 Subject: [PATCH 135/148] 0.13.5-M1 --- project/Sbt.scala | 2 +- src/main/conscript/sbt/launchconfig | 3 +-- src/main/conscript/scalas/launchconfig | 3 +-- src/main/conscript/screpl/launchconfig | 3 +-- 4 files changed, 4 insertions(+), 7 deletions(-) diff --git a/project/Sbt.scala b/project/Sbt.scala index 7ea722818..c226a3cd3 100644 --- a/project/Sbt.scala +++ b/project/Sbt.scala @@ -14,7 +14,7 @@ object Sbt extends Build override lazy val settings = super.settings ++ buildSettings ++ Status.settings ++ nightlySettings def buildSettings = Seq( organization := "org.scala-sbt", - version := "0.13.3-SNAPSHOT", + version := "0.13.5-M1", publishArtifact in packageDoc := false, scalaVersion := "2.10.3", publishMavenStyle := false, diff --git a/src/main/conscript/sbt/launchconfig b/src/main/conscript/sbt/launchconfig index 754d63c95..18c257d2d 100644 --- a/src/main/conscript/sbt/launchconfig +++ b/src/main/conscript/sbt/launchconfig @@ -4,7 +4,7 @@ [app] org: ${sbt.organization-org.scala-sbt} name: sbt - version: ${sbt.version-read(sbt.version)[0.13.3-SNAPSHOT]} + version: ${sbt.version-read(sbt.version)[0.13.5-M1]} class: sbt.xMain components: xsbti,extra cross-versioned: ${sbt.cross.versioned-false} @@ -13,7 +13,6 @@ [repositories] local typesafe-ivy-releases: http://repo.typesafe.com/typesafe/ivy-releases/, [organization]/[module]/[revision]/[type]s/[artifact](-[classifier]).[ext], bootOnly - typesafe-ivy-snapshots: http://repo.typesafe.com/typesafe/ivy-snapshots/, [organization]/[module]/[revision]/[type]s/[artifact](-[classifier]).[ext], bootOnly maven-central [boot] diff --git a/src/main/conscript/scalas/launchconfig b/src/main/conscript/scalas/launchconfig index 5710eac15..1bfecae6f 100644 --- a/src/main/conscript/scalas/launchconfig +++ b/src/main/conscript/scalas/launchconfig @@ -4,7 +4,7 @@ [app] org: ${sbt.organization-org.scala-sbt} name: sbt - version: ${sbt.version-read(sbt.version)[0.13.3-SNAPSHOT]} + version: ${sbt.version-read(sbt.version)[0.13.5-M1]} class: sbt.ScriptMain components: xsbti,extra cross-versioned: ${sbt.cross.versioned-false} @@ -13,7 +13,6 @@ [repositories] local typesafe-ivy-releases: http://repo.typesafe.com/typesafe/ivy-releases/, [organization]/[module]/[revision]/[type]s/[artifact](-[classifier]).[ext], bootOnly - typesafe-ivy-snapshots: http://repo.typesafe.com/typesafe/ivy-snapshots/, [organization]/[module]/[revision]/[type]s/[artifact](-[classifier]).[ext], bootOnly maven-central [boot] diff --git a/src/main/conscript/screpl/launchconfig b/src/main/conscript/screpl/launchconfig index c0e1d473a..e76744f32 100644 --- a/src/main/conscript/screpl/launchconfig +++ b/src/main/conscript/screpl/launchconfig @@ -4,7 +4,7 @@ [app] org: ${sbt.organization-org.scala-sbt} name: sbt - version: ${sbt.version-read(sbt.version)[0.13.3-SNAPSHOT]} + version: ${sbt.version-read(sbt.version)[0.13.5-M1]} class: sbt.ConsoleMain components: xsbti,extra cross-versioned: ${sbt.cross.versioned-false} @@ -13,7 +13,6 @@ [repositories] local typesafe-ivy-releases: http://repo.typesafe.com/typesafe/ivy-releases/, [organization]/[module]/[revision]/[type]s/[artifact](-[classifier]).[ext], bootOnly - typesafe-ivy-snapshots: http://repo.typesafe.com/typesafe/ivy-snapshots/, [organization]/[module]/[revision]/[type]s/[artifact](-[classifier]).[ext], bootOnly maven-central [boot] From 1debc48568083a37e8d0f3d4989ff066ac3fecdd Mon Sep 17 00:00:00 2001 From: Eugene Yokota Date: Tue, 25 Mar 2014 15:49:07 -0400 Subject: [PATCH 136/148] Rename GlobalModule -> CorePlugin --- main/src/main/scala/sbt/PluginDiscovery.scala | 6 +++--- .../{GlobalModule.scala => CorePlugin.scala} | 2 +- .../plugins/{IvyModule.scala => IvyPlugin.scala} | 8 ++++---- .../plugins/{JvmModule.scala => JvmPlugin.scala} | 4 ++-- src/sphinx/Getting-Started/Using-Plugins.rst | 14 +++++++------- 5 files changed, 17 insertions(+), 17 deletions(-) rename main/src/main/scala/sbt/plugins/{GlobalModule.scala => CorePlugin.scala} (90%) rename main/src/main/scala/sbt/plugins/{IvyModule.scala => IvyPlugin.scala} (87%) rename main/src/main/scala/sbt/plugins/{JvmModule.scala => JvmPlugin.scala} (92%) diff --git a/main/src/main/scala/sbt/PluginDiscovery.scala b/main/src/main/scala/sbt/PluginDiscovery.scala index 54939d6b9..e504cd264 100644 --- a/main/src/main/scala/sbt/PluginDiscovery.scala +++ b/main/src/main/scala/sbt/PluginDiscovery.scala @@ -29,9 +29,9 @@ object PluginDiscovery import Paths._ // TODO - Fix this once we can autodetect AutoPlugins defined by sbt itself. val defaultAutoPlugins = Seq( - "sbt.plugins.IvyModule" -> sbt.plugins.IvyModule, - "sbt.plugins.JvmModule" -> sbt.plugins.JvmModule, - "sbt.plugins.GlobalModule" -> sbt.plugins.GlobalModule + "sbt.plugins.IvyPlugin" -> sbt.plugins.IvyPlugin, + "sbt.plugins.JvmPlugin" -> sbt.plugins.JvmPlugin, + "sbt.plugins.CorePlugin" -> sbt.plugins.CorePlugin ) val detectedAutoPugins = discover[AutoPlugin](AutoPlugins) val allAutoPlugins = (defaultAutoPlugins ++ detectedAutoPugins.modules) map { case (name, value) => diff --git a/main/src/main/scala/sbt/plugins/GlobalModule.scala b/main/src/main/scala/sbt/plugins/CorePlugin.scala similarity index 90% rename from main/src/main/scala/sbt/plugins/GlobalModule.scala rename to main/src/main/scala/sbt/plugins/CorePlugin.scala index 9ee80889e..9702046c2 100644 --- a/main/src/main/scala/sbt/plugins/GlobalModule.scala +++ b/main/src/main/scala/sbt/plugins/CorePlugin.scala @@ -8,7 +8,7 @@ import Def.Setting * * Can control task-level paralleism, logging, etc. */ -object GlobalModule extends AutoPlugin { +object CorePlugin extends AutoPlugin { // This is included by default override def trigger = allRequirements diff --git a/main/src/main/scala/sbt/plugins/IvyModule.scala b/main/src/main/scala/sbt/plugins/IvyPlugin.scala similarity index 87% rename from main/src/main/scala/sbt/plugins/IvyModule.scala rename to main/src/main/scala/sbt/plugins/IvyPlugin.scala index 58202c852..43239bdbf 100644 --- a/main/src/main/scala/sbt/plugins/IvyModule.scala +++ b/main/src/main/scala/sbt/plugins/IvyPlugin.scala @@ -13,14 +13,14 @@ import Def.Setting * - `artifacts` * - `publishedArtifacts` */ -object IvyModule extends AutoPlugin { +object IvyPlugin extends AutoPlugin { // We are automatically included on everything that has the global module, // which is automatically included on everything. - override def requires = GlobalModule + override def requires = CorePlugin override def trigger = allRequirements - + override lazy val projectSettings: Seq[Setting[_]] = Classpaths.ivyPublishSettings ++ Classpaths.ivyBaseSettings override lazy val globalSettings: Seq[Setting[_]] = Defaults.globalIvyCore -} \ No newline at end of file +} diff --git a/main/src/main/scala/sbt/plugins/JvmModule.scala b/main/src/main/scala/sbt/plugins/JvmPlugin.scala similarity index 92% rename from main/src/main/scala/sbt/plugins/JvmModule.scala rename to main/src/main/scala/sbt/plugins/JvmPlugin.scala index 9cd1840ce..e3c20056a 100644 --- a/main/src/main/scala/sbt/plugins/JvmModule.scala +++ b/main/src/main/scala/sbt/plugins/JvmPlugin.scala @@ -14,10 +14,10 @@ import Def.Setting * - `Test` * - `Compile` */ -object JvmModule extends AutoPlugin { +object JvmPlugin extends AutoPlugin { // We are automatically enabled for any IvyModule project. We also require its settings // for ours to work. - override def requires = IvyModule + override def requires = IvyPlugin override def trigger = allRequirements override lazy val projectSettings: Seq[Setting[_]] = diff --git a/src/sphinx/Getting-Started/Using-Plugins.rst b/src/sphinx/Getting-Started/Using-Plugins.rst index 4ab353755..edde8e0d1 100644 --- a/src/sphinx/Getting-Started/Using-Plugins.rst +++ b/src/sphinx/Getting-Started/Using-Plugins.rst @@ -35,7 +35,7 @@ Adding settings for a plugin A plugin can declare that its settings be automatically added, in which case you don't have to do anything to add them. -As of sbt 0.13.2, there is a new :doc:`auto-plugins <../DetailedTopics/AutoPlugins>` feature that enables plugins +As of sbt 0.13.5, there is a new :doc:`auto-plugins <../DetailedTopics/AutoPlugins>` feature that enables plugins to automatically, and safely, ensure their settings and dependencies are on a project. Most plugins should have their default settings automatically, however some may require explicit enablement. @@ -51,16 +51,16 @@ For example :: > plugins In file:/home/jsuereth/projects/sbt/test-ivy-issues/ - sbt.plugins.IvyModule: enabled in test-ivy-issues - sbt.plugins.JvmModule: enabled in test-ivy-issues - sbt.plugins.GlobalModule: enabled in test-ivy-issues + sbt.plugins.IvyPlugin: enabled in test-ivy-issues + sbt.plugins.JvmPlugin: enabled in test-ivy-issues + sbt.plugins.CorePlugin: enabled in test-ivy-issues Here, the plugins output is showing that the sbt default plugins are all enabled. Sbt's default settings are provided via three plugins: -1. GlobalModule: Provides the core parallelism controls for tasks -2. IvyModule: Provides the mechanisms to publish/resolve modules. -3. JvmModule: Provides the mechanisms to compile/test/run/package Java/Scala projects. +1. CorePlugin: Provides the core parallelism controls for tasks +2. IvyPlugin: Provides the mechanisms to publish/resolve modules. +3. JvmPlugin: Provides the mechanisms to compile/test/run/package Java/Scala projects. However, older plugins often required settings to be added explictly, so that :doc:`multi-project build ` could have different types of projects. The plugin documentation will indicate how to configure it, but typically for older plugins this involves adding the base settings for the plugin and customizing as necessary. From 607e061a6a2f681f643dd098a5f3a27fe6dd5729 Mon Sep 17 00:00:00 2001 From: Ismael Juma Date: Tue, 25 Mar 2014 22:08:04 +0000 Subject: [PATCH 137/148] Bump Scala version to 2.10.4. --- project/Sbt.scala | 2 +- src/sphinx/Community/Changes.rst | 4 ++++ src/sphinx/Getting-Started/Basic-Def.rst | 2 +- 3 files changed, 6 insertions(+), 2 deletions(-) diff --git a/project/Sbt.scala b/project/Sbt.scala index c226a3cd3..3fa63dbd9 100644 --- a/project/Sbt.scala +++ b/project/Sbt.scala @@ -16,7 +16,7 @@ object Sbt extends Build organization := "org.scala-sbt", version := "0.13.5-M1", publishArtifact in packageDoc := false, - scalaVersion := "2.10.3", + scalaVersion := "2.10.4", publishMavenStyle := false, componentID := None, crossPaths := false, diff --git a/src/sphinx/Community/Changes.rst b/src/sphinx/Community/Changes.rst index 07e518a3e..600a28f68 100644 --- a/src/sphinx/Community/Changes.rst +++ b/src/sphinx/Community/Changes.rst @@ -2,6 +2,10 @@ Changes ======= +0.13.2 to 0.13.5 +~~~~~~~~~~~~~~~~ +- The Scala version for sbt and sbt plugins is now 2.10.4. This is a compatible version bump. + 0.13.1 to 0.13.2 ~~~~~~~~~~~~~~~~ - Adding new name-hashing feature to incremental compiler. Alters how scala dependencies are tracked, reducing number of recompiles necessary. diff --git a/src/sphinx/Getting-Started/Basic-Def.rst b/src/sphinx/Getting-Started/Basic-Def.rst index e1cb36b7a..a89248f76 100644 --- a/src/sphinx/Getting-Started/Basic-Def.rst +++ b/src/sphinx/Getting-Started/Basic-Def.rst @@ -72,7 +72,7 @@ Here's an example: version := "1.0" - scalaVersion := "2.10.3" + scalaVersion := "2.10.4" Each `Setting` is defined with a Scala expression. The expressions in `build.sbt` are independent of one another, and From 4f969491d0f846bd8ccf0264b7577becef1e2782 Mon Sep 17 00:00:00 2001 From: Eugene Yokota Date: Wed, 26 Mar 2014 11:35:50 -0400 Subject: [PATCH 138/148] Deprecate autoSettings, Rename ProjectSettings => BuildScalaFiles * autoSettings is renamed to settingSets. Since this was part of 0.13.0 we need to deprecate it first. --- main/src/main/scala/sbt/AddSettings.scala | 7 +++---- main/src/main/scala/sbt/Load.scala | 8 ++++---- main/src/main/scala/sbt/Project.scala | 5 ++++- .../sbt-test/project/auto-settings/project/P.scala | 12 ++++++------ .../project/delegate_config/project/Build.scala | 4 ++-- sbt/src/sbt-test/project/multi/changes/Build1.scala | 4 ++-- sbt/src/sbt-test/project/multi/changes/Build2.scala | 4 ++-- sbt/src/sbt-test/project/sbt-file-projects/build.sbt | 2 +- .../sbt-file-projects/changes/Restricted.scala | 4 ++-- src/sphinx/Architecture/Setting-Initialization.rst | 10 +++++----- 10 files changed, 31 insertions(+), 29 deletions(-) diff --git a/main/src/main/scala/sbt/AddSettings.scala b/main/src/main/scala/sbt/AddSettings.scala index 9677af329..9d1a2ac80 100644 --- a/main/src/main/scala/sbt/AddSettings.scala +++ b/main/src/main/scala/sbt/AddSettings.scala @@ -15,8 +15,7 @@ object AddSettings private[sbt] final class AutoPlugins(val include: AutoPlugin => Boolean) extends AddSettings private[sbt] final class DefaultSbtFiles(val include: File => Boolean) extends AddSettings private[sbt] final class SbtFiles(val files: Seq[File]) extends AddSettings - // Settings created with the Project().settings() commands in build.scala files. - private[sbt] final object ProjectSettings extends AddSettings + private[sbt] final object BuildScalaFiles extends AddSettings /** Adds all settings from autoplugins. */ val autoPlugins: AddSettings = new AutoPlugins(const(true)) // Note: We do not expose fine-grained autoplugins because @@ -27,7 +26,7 @@ object AddSettings // in place. /** Settings specified in Build.scala `Project` constructors. */ - val projectSettings: AddSettings = ProjectSettings + val buildScalaFiles: AddSettings = BuildScalaFiles /** All plugins that aren't auto plugins. */ val nonAutoPlugins: AddSettings = plugins(const(true)) @@ -51,7 +50,7 @@ object AddSettings def seq(autos: AddSettings*): AddSettings = new Sequence(autos) /** The default inclusion of settings. */ - val allDefaults: AddSettings = seq(autoPlugins, projectSettings, userSettings, nonAutoPlugins, defaultSbtFiles) + val allDefaults: AddSettings = seq(autoPlugins, buildScalaFiles, userSettings, nonAutoPlugins, defaultSbtFiles) /** Combines two automatic setting configurations. */ def append(a: AddSettings, b: AddSettings): AddSettings = (a,b) match { diff --git a/main/src/main/scala/sbt/Load.scala b/main/src/main/scala/sbt/Load.scala index 4b9edb637..a45dec8dd 100755 --- a/main/src/main/scala/sbt/Load.scala +++ b/main/src/main/scala/sbt/Load.scala @@ -489,7 +489,7 @@ object Load private[this] def translateAutoPluginException(e: AutoPluginException, project: Project): AutoPluginException = e.withPrefix(s"Error determining plugins for project '${project.id}' in ${project.base}:\n") - private[this] def loadSettings(auto: AddSettings, projectBase: File, loadedPlugins: sbt.LoadedPlugins, eval: ()=>Eval, injectSettings: InjectSettings, memoSettings: mutable.Map[File, LoadedSbtFile], autoPlugins: Seq[AutoPlugin], projectSettings: Seq[Setting[_]]): LoadedSbtFile = + private[this] def loadSettings(auto: AddSettings, projectBase: File, loadedPlugins: sbt.LoadedPlugins, eval: ()=>Eval, injectSettings: InjectSettings, memoSettings: mutable.Map[File, LoadedSbtFile], autoPlugins: Seq[AutoPlugin], buildScalaFiles: Seq[Setting[_]]): LoadedSbtFile = { lazy val defaultSbtFiles = configurationSources(projectBase) def settings(ss: Seq[Setting[_]]) = new LoadedSbtFile(ss, Nil, Nil) @@ -506,7 +506,7 @@ object Load def loadSettingsFile(src: File): LoadedSbtFile = EvaluateConfigurations.evaluateSbtFile(eval(), src, IO.readLines(src), loadedPlugins.detected.imports, 0)(loader) - import AddSettings.{User,SbtFiles,DefaultSbtFiles,Plugins,AutoPlugins,Sequence, ProjectSettings} + import AddSettings.{User,SbtFiles,DefaultSbtFiles,Plugins,AutoPlugins,Sequence,BuildScalaFiles} def pluginSettings(f: Plugins) = { val included = loadedPlugins.detected.plugins.values.filter(f.include) // don't apply the filter to AutoPlugins, only Plugins included.flatMap(p => p.settings.filter(isProjectThis) ++ p.projectSettings) @@ -517,7 +517,7 @@ object Load autoPlugins.filter(f.include).flatMap(_.projectSettings) def expand(auto: AddSettings): LoadedSbtFile = auto match { - case ProjectSettings => settings(projectSettings) + case BuildScalaFiles => settings(buildScalaFiles) case User => settings(injectSettings.projectLoaded(loader)) case sf: SbtFiles => loadSettings( sf.files.map(f => IO.resolve(projectBase, f))) case sf: DefaultSbtFiles => loadSettings( defaultSbtFiles.filter(sf.include)) @@ -743,4 +743,4 @@ final case class LoadBuildConfiguration(stagingDirectory: File, classpath: Seq[A lazy val globalPluginNames = if(classpath.isEmpty) Nil else Load.getPluginNames(classpath, pluginManagement.initialLoader) } -final class IncompatiblePluginsException(msg: String, cause: Throwable) extends Exception(msg, cause) \ No newline at end of file +final class IncompatiblePluginsException(msg: String, cause: Throwable) extends Exception(msg, cause) diff --git a/main/src/main/scala/sbt/Project.scala b/main/src/main/scala/sbt/Project.scala index 510e3e531..faea25e13 100755 --- a/main/src/main/scala/sbt/Project.scala +++ b/main/src/main/scala/sbt/Project.scala @@ -125,8 +125,11 @@ sealed trait Project extends ProjectDefinition[ProjectReference] /** Appends settings to the current settings sequence for this project. */ def settings(ss: Setting[_]*): Project = copy(settings = (settings: Seq[Setting[_]]) ++ ss) + @deprecated("Use settingSets method.", "0.13.5") + def autoSettings(select: AddSettings*): Project = settingSets(select.toSeq: _*) + /** Configures how settings from other sources, such as .sbt files, are appended to the explicitly specified settings for this project. */ - def autoSettings(select: AddSettings*): Project = copy(auto = AddSettings.seq(select : _*)) + def settingSets(select: AddSettings*): Project = copy(auto = AddSettings.seq(select : _*)) /** Adds a list of .sbt files whose settings will be appended to the settings of this project. * They will be appended after the explicit settings and already defined automatic settings sources. */ diff --git a/sbt/src/sbt-test/project/auto-settings/project/P.scala b/sbt/src/sbt-test/project/auto-settings/project/P.scala index 1705e0482..3f1433d6d 100644 --- a/sbt/src/sbt-test/project/auto-settings/project/P.scala +++ b/sbt/src/sbt-test/project/auto-settings/project/P.scala @@ -6,22 +6,22 @@ object B extends Build { // version should be from explicit/a.txt - lazy val root = project("root", "1.4") autoSettings( projectSettings,userSettings, sbtFiles(file("explicit/a.txt")) ) + lazy val root = project("root", "1.4") settingSets( buildScalaFiles, userSettings, sbtFiles(file("explicit/a.txt")) ) // version should be from global/user.sbt - lazy val a = project("a", "1.1") autoSettings( projectSettings, userSettings ) + lazy val a = project("a", "1.1") settingSets( buildScalaFiles, userSettings ) // version should be the default 0.1-SNAPSHOT - lazy val b = project("b", "0.1-SNAPSHOT") autoSettings(projectSettings) + lazy val b = project("b", "0.1-SNAPSHOT") settingSets(buildScalaFiles) // version should be from the explicit settings call - lazy val c = project("c", "0.9") settings(version := "0.9") autoSettings(projectSettings) + lazy val c = project("c", "0.9") settings(version := "0.9") settingSets(buildScalaFiles) // version should be from d/build.sbt - lazy val d = project("d", "1.3") settings(version := "0.9") autoSettings( projectSettings, defaultSbtFiles ) + lazy val d = project("d", "1.3") settings(version := "0.9") settingSets( buildScalaFiles, defaultSbtFiles ) // version should be from global/user.sbt - lazy val e = project("e", "1.1") settings(version := "0.9") autoSettings( projectSettings, defaultSbtFiles, sbtFiles(file("../explicit/a.txt")), userSettings ) + lazy val e = project("e", "1.1") settings(version := "0.9") settingSets( buildScalaFiles, defaultSbtFiles, sbtFiles(file("../explicit/a.txt")), userSettings ) def project(id: String, expectedVersion: String): Project = Project(id, if(id == "root") file(".") else file(id)) settings( TaskKey[Unit]("check") <<= version map { v => diff --git a/sbt/src/sbt-test/project/delegate_config/project/Build.scala b/sbt/src/sbt-test/project/delegate_config/project/Build.scala index dbd97b466..25318261b 100644 --- a/sbt/src/sbt-test/project/delegate_config/project/Build.scala +++ b/sbt/src/sbt-test/project/delegate_config/project/Build.scala @@ -12,11 +12,11 @@ object B extends Build val sample = SettingKey[Int]("sample") val check = TaskKey[Unit]("check") - lazy val root = Project("root", file("."), settings = Nil).autoSettings() + lazy val root = Project("root", file("."), settings = Nil).settingSets() lazy val sub = Project("sub", file("."), delegates = root :: Nil, configurations = newConfig :: Nil, - settings = incSample :: checkTask(4) :: Nil).autoSettings(projectSettings) + settings = incSample :: checkTask(4) :: Nil).settingSets(buildScalaFiles) override lazy val settings = (sample in newConfig := 3) :: checkTask(3) :: diff --git a/sbt/src/sbt-test/project/multi/changes/Build1.scala b/sbt/src/sbt-test/project/multi/changes/Build1.scala index 8d886bd51..1de02bfcc 100644 --- a/sbt/src/sbt-test/project/multi/changes/Build1.scala +++ b/sbt/src/sbt-test/project/multi/changes/Build1.scala @@ -8,5 +8,5 @@ object TestBuild extends Build proj("a", "."), proj("b", "b") ) - def proj(id: String, dir: String) = Project(id, file(dir), settings = Seq( name := id ) ).autoSettings(projectSettings) -} \ No newline at end of file + def proj(id: String, dir: String) = Project(id, file(dir), settings = Seq( name := id ) ).settingSets(buildScalaFiles) +} diff --git a/sbt/src/sbt-test/project/multi/changes/Build2.scala b/sbt/src/sbt-test/project/multi/changes/Build2.scala index 2d96cfe5c..27c2314b5 100644 --- a/sbt/src/sbt-test/project/multi/changes/Build2.scala +++ b/sbt/src/sbt-test/project/multi/changes/Build2.scala @@ -12,5 +12,5 @@ object SecondBuild extends MakeBuild trait MakeBuild extends Build { import AddSettings._ - def proj(id: String, dir: String) = Project(id, file(dir), settings = Seq( name := id ) ).autoSettings(projectSettings, defaultSbtFiles) -} \ No newline at end of file + def proj(id: String, dir: String) = Project(id, file(dir), settings = Seq( name := id ) ).settingSets(buildScalaFiles, defaultSbtFiles) +} diff --git a/sbt/src/sbt-test/project/sbt-file-projects/build.sbt b/sbt/src/sbt-test/project/sbt-file-projects/build.sbt index 710ef1b89..500a32076 100644 --- a/sbt/src/sbt-test/project/sbt-file-projects/build.sbt +++ b/sbt/src/sbt-test/project/sbt-file-projects/build.sbt @@ -2,7 +2,7 @@ val a = "a" val f = file("a") val g = taskKey[Unit]("A task in the root project") -val p = Project(a, f).autoSettings(AddSettings.autoPlugins, AddSettings.sbtFiles( file("a.sbt") )) +val p = Project(a, f).settingSets(AddSettings.autoPlugins, AddSettings.sbtFiles( file("a.sbt") )) val b = Project("b", file("b")) diff --git a/sbt/src/sbt-test/project/sbt-file-projects/changes/Restricted.scala b/sbt/src/sbt-test/project/sbt-file-projects/changes/Restricted.scala index 46b874741..12f45d9ec 100644 --- a/sbt/src/sbt-test/project/sbt-file-projects/changes/Restricted.scala +++ b/sbt/src/sbt-test/project/sbt-file-projects/changes/Restricted.scala @@ -2,7 +2,7 @@ import sbt._ import Keys._ object B extends Build { - lazy val root = Project("root", file(".")).autoSettings( + lazy val root = Project("root", file(".")).settingSets( AddSettings.autoPlugins, AddSettings.sbtFiles( file("other.sbt") )) // ignore build.sbt -} \ No newline at end of file +} diff --git a/src/sphinx/Architecture/Setting-Initialization.rst b/src/sphinx/Architecture/Setting-Initialization.rst index e2e54fd47..fdf54e33a 100644 --- a/src/sphinx/Architecture/Setting-Initialization.rst +++ b/src/sphinx/Architecture/Setting-Initialization.rst @@ -74,7 +74,7 @@ To do so, use the ``AddSettings`` class :: object MyOwnOrder extends Build { // here we load config from a txt file. - lazy val root = project.in(file(".")).autoSettings( autoPlugins, projectSettings, sbtFiles(file("silly.txt")) ) + lazy val root = project.in(file(".")).settingSets( autoPlugins, buildScalaFiles, sbtFiles(file("silly.txt")) ) } In the above project, we've modified the order of settings to be: @@ -92,7 +92,7 @@ The AddSettings object provides the following "groups" of settings you can use f ``autoPlugins`` All the ordered settings of plugins after they've gone through dependency resolution -``projectSettings`` +``buildScalaFiles`` The full sequence of settings defined directly in ``project/*.scala`` builds. ``sbtFiles(*)`` Specifies the exact setting DSL files to include (files must use the ``.sbt`` file format) @@ -104,7 +104,7 @@ The AddSettings object provides the following "groups" of settings you can use f *Note: Be very careful when reordering settings. It's easy to accidentally remove core functionality.* -For example, let's see what happens if we move the ``build.sbt`` files *before* the ``projectSettings``. +For example, let's see what happens if we move the ``build.sbt`` files *before* the ``buildScalaFile``. Let's create an example project the following defintiion: @@ -112,7 +112,7 @@ Let's create an example project the following defintiion: object MyTestBuild extends Build { - val testProject = project.in(file(".")).autoSettings(autoPlugins, defaultSbtFiles, projectSettings).settings( + val testProject = project.in(file(".")).settingSets(autoPlugins, defaultSbtFiles, buildScalaFile).settings( version := scalaBinaryVersion.value match { case "2.10" => "1.0-SNAPSHOT" case v => "1.0-for-${v}-SNAPSHOT" @@ -128,4 +128,4 @@ Now, when issuing a release we want to lock down the version. Most tools assume version := "1.0.0" However, when we load this new build, we find that the ``version`` in ``version.sbt`` has been **overriden** by the one defined -in ``project/Build.scala`` because of the order we defined for settings, so the new ``version.sbt`` file has no effect. \ No newline at end of file +in ``project/Build.scala`` because of the order we defined for settings, so the new ``version.sbt`` file has no effect. From f6d1044c4b0ed81ed899e73341d940f4bee37d91 Mon Sep 17 00:00:00 2001 From: Eugene Yokota Date: Wed, 26 Mar 2014 14:33:24 -0400 Subject: [PATCH 139/148] 0.13.5-SNAPSHOT --- project/Sbt.scala | 2 +- src/main/conscript/sbt/launchconfig | 1 + src/main/conscript/scalas/launchconfig | 1 + src/main/conscript/screpl/launchconfig | 1 + 4 files changed, 4 insertions(+), 1 deletion(-) diff --git a/project/Sbt.scala b/project/Sbt.scala index 3fa63dbd9..537d4881d 100644 --- a/project/Sbt.scala +++ b/project/Sbt.scala @@ -14,7 +14,7 @@ object Sbt extends Build override lazy val settings = super.settings ++ buildSettings ++ Status.settings ++ nightlySettings def buildSettings = Seq( organization := "org.scala-sbt", - version := "0.13.5-M1", + version := "0.13.5-SNAPSHOT", publishArtifact in packageDoc := false, scalaVersion := "2.10.4", publishMavenStyle := false, diff --git a/src/main/conscript/sbt/launchconfig b/src/main/conscript/sbt/launchconfig index 18c257d2d..fefc16c1a 100644 --- a/src/main/conscript/sbt/launchconfig +++ b/src/main/conscript/sbt/launchconfig @@ -13,6 +13,7 @@ [repositories] local typesafe-ivy-releases: http://repo.typesafe.com/typesafe/ivy-releases/, [organization]/[module]/[revision]/[type]s/[artifact](-[classifier]).[ext], bootOnly + typesafe-ivy-snapshots: http://repo.typesafe.com/typesafe/ivy-snapshots/, [organization]/[module]/[revision]/[type]s/[artifact](-[classifier]).[ext], bootOnly maven-central [boot] diff --git a/src/main/conscript/scalas/launchconfig b/src/main/conscript/scalas/launchconfig index 1bfecae6f..4efc15c1f 100644 --- a/src/main/conscript/scalas/launchconfig +++ b/src/main/conscript/scalas/launchconfig @@ -13,6 +13,7 @@ [repositories] local typesafe-ivy-releases: http://repo.typesafe.com/typesafe/ivy-releases/, [organization]/[module]/[revision]/[type]s/[artifact](-[classifier]).[ext], bootOnly + typesafe-ivy-snapshots: http://repo.typesafe.com/typesafe/ivy-snapshots/, [organization]/[module]/[revision]/[type]s/[artifact](-[classifier]).[ext], bootOnly maven-central [boot] diff --git a/src/main/conscript/screpl/launchconfig b/src/main/conscript/screpl/launchconfig index e76744f32..b8ab1ad17 100644 --- a/src/main/conscript/screpl/launchconfig +++ b/src/main/conscript/screpl/launchconfig @@ -13,6 +13,7 @@ [repositories] local typesafe-ivy-releases: http://repo.typesafe.com/typesafe/ivy-releases/, [organization]/[module]/[revision]/[type]s/[artifact](-[classifier]).[ext], bootOnly + typesafe-ivy-snapshots: http://repo.typesafe.com/typesafe/ivy-snapshots/, [organization]/[module]/[revision]/[type]s/[artifact](-[classifier]).[ext], bootOnly maven-central [boot] From 5f7e68c7ddce3a03ffe07de3c2168835dc6ebfc8 Mon Sep 17 00:00:00 2001 From: Eugene Yokota Date: Thu, 27 Mar 2014 17:36:40 -0400 Subject: [PATCH 140/148] Auto plugin names are imported with or without autoImport. Fixes #1217 --- main/src/main/scala/sbt/BuildStructure.scala | 3 ++- main/src/main/scala/sbt/BuildUtil.scala | 11 +++++++++-- sbt/src/sbt-test/project/auto-plugins/build.sbt | 2 -- 3 files changed, 11 insertions(+), 5 deletions(-) diff --git a/main/src/main/scala/sbt/BuildStructure.scala b/main/src/main/scala/sbt/BuildStructure.scala index da24a2444..be6bad658 100644 --- a/main/src/main/scala/sbt/BuildStructure.scala +++ b/main/src/main/scala/sbt/BuildStructure.scala @@ -104,7 +104,8 @@ final class DetectedPlugins(val plugins: DetectedModules[Plugin], val autoPlugin (autoPlugins flatMap { case DetectedAutoPlugin(name, ap, hasAutoImport) => if (hasAutoImport) Some(name + ".autoImport") else None - })) + })) ++ + BuildUtil.importNamesRoot(autoPlugins map { _.name }) /** A function to select the right [[AutoPlugin]]s from [[autoPlugins]] for a [[Project]]. */ lazy val deducePlugins: (Plugins, Logger) => Seq[AutoPlugin] = Plugins.deducer(autoPlugins.toList map {_.value}) diff --git a/main/src/main/scala/sbt/BuildUtil.scala b/main/src/main/scala/sbt/BuildUtil.scala index dd963e05d..db0d31f8e 100644 --- a/main/src/main/scala/sbt/BuildUtil.scala +++ b/main/src/main/scala/sbt/BuildUtil.scala @@ -80,9 +80,16 @@ object BuildUtil @deprecated("Use getImports(Seq[String]).", "0.13.2") def getImports(pluginNames: Seq[String], buildNames: Seq[String]): Seq[String] = getImports(pluginNames ++ buildNames) + /** `import sbt._, Keys._`, and wildcard import `._` for all names. */ def getImports(names: Seq[String]): Seq[String] = baseImports ++ importAllRoot(names) - def importAll(values: Seq[String]): Seq[String] = if(values.isEmpty) Nil else values.map( _ + "._" ).mkString("import ", ", ", "") :: Nil + /** Import just the names. */ + def importNames(names: Seq[String]): Seq[String] = if (names.isEmpty) Nil else names.mkString("import ", ", ", "") :: Nil + /** Prepend `_root_` and import just the names. */ + def importNamesRoot(names: Seq[String]): Seq[String] = importNames(names map rootedName) + + /** Wildcard import `._` for all values. */ + def importAll(values: Seq[String]): Seq[String] = importNames(values map { _ + "._" }) def importAllRoot(values: Seq[String]): Seq[String] = importAll(values map rootedName) def rootedName(s: String): String = if(s contains '.') "_root_." + s else s @@ -98,4 +105,4 @@ object BuildUtil (ref, agg) Relation.empty ++ depPairs } -} \ No newline at end of file +} diff --git a/sbt/src/sbt-test/project/auto-plugins/build.sbt b/sbt/src/sbt-test/project/auto-plugins/build.sbt index de4f386e3..7039ed235 100644 --- a/sbt/src/sbt-test/project/auto-plugins/build.sbt +++ b/sbt/src/sbt-test/project/auto-plugins/build.sbt @@ -1,5 +1,3 @@ -import sbttest.{Q, S} - // disablePlugins(Q) will prevent R from being auto-added lazy val projA = project.addPlugins(A, B).disablePlugins(Q) From a4a6aa0bc305bb38faa9c09adf24b6f899bc25a1 Mon Sep 17 00:00:00 2001 From: Eugene Yokota Date: Thu, 27 Mar 2014 23:28:53 -0400 Subject: [PATCH 141/148] 0.13.5-M2 --- project/Sbt.scala | 2 +- src/main/conscript/sbt/launchconfig | 3 +-- src/main/conscript/scalas/launchconfig | 3 +-- src/main/conscript/screpl/launchconfig | 3 +-- 4 files changed, 4 insertions(+), 7 deletions(-) diff --git a/project/Sbt.scala b/project/Sbt.scala index 537d4881d..ebb9ba299 100644 --- a/project/Sbt.scala +++ b/project/Sbt.scala @@ -14,7 +14,7 @@ object Sbt extends Build override lazy val settings = super.settings ++ buildSettings ++ Status.settings ++ nightlySettings def buildSettings = Seq( organization := "org.scala-sbt", - version := "0.13.5-SNAPSHOT", + version := "0.13.5-M2", publishArtifact in packageDoc := false, scalaVersion := "2.10.4", publishMavenStyle := false, diff --git a/src/main/conscript/sbt/launchconfig b/src/main/conscript/sbt/launchconfig index fefc16c1a..d4fa5eead 100644 --- a/src/main/conscript/sbt/launchconfig +++ b/src/main/conscript/sbt/launchconfig @@ -4,7 +4,7 @@ [app] org: ${sbt.organization-org.scala-sbt} name: sbt - version: ${sbt.version-read(sbt.version)[0.13.5-M1]} + version: ${sbt.version-read(sbt.version)[0.13.5-M2]} class: sbt.xMain components: xsbti,extra cross-versioned: ${sbt.cross.versioned-false} @@ -13,7 +13,6 @@ [repositories] local typesafe-ivy-releases: http://repo.typesafe.com/typesafe/ivy-releases/, [organization]/[module]/[revision]/[type]s/[artifact](-[classifier]).[ext], bootOnly - typesafe-ivy-snapshots: http://repo.typesafe.com/typesafe/ivy-snapshots/, [organization]/[module]/[revision]/[type]s/[artifact](-[classifier]).[ext], bootOnly maven-central [boot] diff --git a/src/main/conscript/scalas/launchconfig b/src/main/conscript/scalas/launchconfig index 4efc15c1f..67d249d13 100644 --- a/src/main/conscript/scalas/launchconfig +++ b/src/main/conscript/scalas/launchconfig @@ -4,7 +4,7 @@ [app] org: ${sbt.organization-org.scala-sbt} name: sbt - version: ${sbt.version-read(sbt.version)[0.13.5-M1]} + version: ${sbt.version-read(sbt.version)[0.13.5-M2]} class: sbt.ScriptMain components: xsbti,extra cross-versioned: ${sbt.cross.versioned-false} @@ -13,7 +13,6 @@ [repositories] local typesafe-ivy-releases: http://repo.typesafe.com/typesafe/ivy-releases/, [organization]/[module]/[revision]/[type]s/[artifact](-[classifier]).[ext], bootOnly - typesafe-ivy-snapshots: http://repo.typesafe.com/typesafe/ivy-snapshots/, [organization]/[module]/[revision]/[type]s/[artifact](-[classifier]).[ext], bootOnly maven-central [boot] diff --git a/src/main/conscript/screpl/launchconfig b/src/main/conscript/screpl/launchconfig index b8ab1ad17..bb259ad7c 100644 --- a/src/main/conscript/screpl/launchconfig +++ b/src/main/conscript/screpl/launchconfig @@ -4,7 +4,7 @@ [app] org: ${sbt.organization-org.scala-sbt} name: sbt - version: ${sbt.version-read(sbt.version)[0.13.5-M1]} + version: ${sbt.version-read(sbt.version)[0.13.5-M2]} class: sbt.ConsoleMain components: xsbti,extra cross-versioned: ${sbt.cross.versioned-false} @@ -13,7 +13,6 @@ [repositories] local typesafe-ivy-releases: http://repo.typesafe.com/typesafe/ivy-releases/, [organization]/[module]/[revision]/[type]s/[artifact](-[classifier]).[ext], bootOnly - typesafe-ivy-snapshots: http://repo.typesafe.com/typesafe/ivy-snapshots/, [organization]/[module]/[revision]/[type]s/[artifact](-[classifier]).[ext], bootOnly maven-central [boot] From 02ad34442fdc5edbf45893f8b3ee8207d0b94307 Mon Sep 17 00:00:00 2001 From: Eugene Yokota Date: Sat, 29 Mar 2014 13:41:28 -0400 Subject: [PATCH 142/148] back to 0.13.5-SNAPSHOT --- project/Sbt.scala | 2 +- src/main/conscript/sbt/launchconfig | 3 ++- src/main/conscript/scalas/launchconfig | 3 ++- src/main/conscript/screpl/launchconfig | 3 ++- 4 files changed, 7 insertions(+), 4 deletions(-) diff --git a/project/Sbt.scala b/project/Sbt.scala index ebb9ba299..537d4881d 100644 --- a/project/Sbt.scala +++ b/project/Sbt.scala @@ -14,7 +14,7 @@ object Sbt extends Build override lazy val settings = super.settings ++ buildSettings ++ Status.settings ++ nightlySettings def buildSettings = Seq( organization := "org.scala-sbt", - version := "0.13.5-M2", + version := "0.13.5-SNAPSHOT", publishArtifact in packageDoc := false, scalaVersion := "2.10.4", publishMavenStyle := false, diff --git a/src/main/conscript/sbt/launchconfig b/src/main/conscript/sbt/launchconfig index d4fa5eead..ff85bfccf 100644 --- a/src/main/conscript/sbt/launchconfig +++ b/src/main/conscript/sbt/launchconfig @@ -4,7 +4,7 @@ [app] org: ${sbt.organization-org.scala-sbt} name: sbt - version: ${sbt.version-read(sbt.version)[0.13.5-M2]} + version: ${sbt.version-read(sbt.version)[0.13.5-SNAPSHOT]} class: sbt.xMain components: xsbti,extra cross-versioned: ${sbt.cross.versioned-false} @@ -13,6 +13,7 @@ [repositories] local typesafe-ivy-releases: http://repo.typesafe.com/typesafe/ivy-releases/, [organization]/[module]/[revision]/[type]s/[artifact](-[classifier]).[ext], bootOnly + typesafe-ivy-snapshots: http://repo.typesafe.com/typesafe/ivy-snapshots/, [organization]/[module]/[revision]/[type]s/[artifact](-[classifier]).[ext], bootOnly maven-central [boot] diff --git a/src/main/conscript/scalas/launchconfig b/src/main/conscript/scalas/launchconfig index 67d249d13..91882ac37 100644 --- a/src/main/conscript/scalas/launchconfig +++ b/src/main/conscript/scalas/launchconfig @@ -4,7 +4,7 @@ [app] org: ${sbt.organization-org.scala-sbt} name: sbt - version: ${sbt.version-read(sbt.version)[0.13.5-M2]} + version: ${sbt.version-read(sbt.version)[0.13.5-SNAPSHOT]} class: sbt.ScriptMain components: xsbti,extra cross-versioned: ${sbt.cross.versioned-false} @@ -13,6 +13,7 @@ [repositories] local typesafe-ivy-releases: http://repo.typesafe.com/typesafe/ivy-releases/, [organization]/[module]/[revision]/[type]s/[artifact](-[classifier]).[ext], bootOnly + typesafe-ivy-snapshots: http://repo.typesafe.com/typesafe/ivy-snapshots/, [organization]/[module]/[revision]/[type]s/[artifact](-[classifier]).[ext], bootOnly maven-central [boot] diff --git a/src/main/conscript/screpl/launchconfig b/src/main/conscript/screpl/launchconfig index bb259ad7c..b4bbf8354 100644 --- a/src/main/conscript/screpl/launchconfig +++ b/src/main/conscript/screpl/launchconfig @@ -4,7 +4,7 @@ [app] org: ${sbt.organization-org.scala-sbt} name: sbt - version: ${sbt.version-read(sbt.version)[0.13.5-M2]} + version: ${sbt.version-read(sbt.version)[0.13.5-SNAPSHOT]} class: sbt.ConsoleMain components: xsbti,extra cross-versioned: ${sbt.cross.versioned-false} @@ -13,6 +13,7 @@ [repositories] local typesafe-ivy-releases: http://repo.typesafe.com/typesafe/ivy-releases/, [organization]/[module]/[revision]/[type]s/[artifact](-[classifier]).[ext], bootOnly + typesafe-ivy-snapshots: http://repo.typesafe.com/typesafe/ivy-snapshots/, [organization]/[module]/[revision]/[type]s/[artifact](-[classifier]).[ext], bootOnly maven-central [boot] From b5b07348f0189491ae6be3c2f57ddfbd04665535 Mon Sep 17 00:00:00 2001 From: Martin Duhem Date: Sun, 16 Mar 2014 16:35:39 +0100 Subject: [PATCH 143/148] Add a pending test for sbt/sbt#1142 Add a scripted test documents the current behavior of incremental compiler when it comes to handling of inherited macros. A whitespace change to a file that inherits a macro triggers recompilation of all files that depend (by composition or inheritance) on that file. --- .../inherited-macros/changes/Client.scala | 7 +++++ .../inherited-macros/macro-client/build.sbt | 9 ++++++ .../macro-client/src/main/scala/Client.scala | 7 +++++ .../macro-client/src/main/scala/Foo.scala | 5 ++++ .../src/main/scala/Provider.scala | 7 +++++ .../inherited-macros/pending | 12 ++++++++ .../inherited-macros/project/build.scala | 29 +++++++++++++++++++ 7 files changed, 76 insertions(+) create mode 100644 sbt/src/sbt-test/source-dependencies/inherited-macros/changes/Client.scala create mode 100644 sbt/src/sbt-test/source-dependencies/inherited-macros/macro-client/build.sbt create mode 100644 sbt/src/sbt-test/source-dependencies/inherited-macros/macro-client/src/main/scala/Client.scala create mode 100644 sbt/src/sbt-test/source-dependencies/inherited-macros/macro-client/src/main/scala/Foo.scala create mode 100644 sbt/src/sbt-test/source-dependencies/inherited-macros/macro-provider/src/main/scala/Provider.scala create mode 100644 sbt/src/sbt-test/source-dependencies/inherited-macros/pending create mode 100644 sbt/src/sbt-test/source-dependencies/inherited-macros/project/build.scala diff --git a/sbt/src/sbt-test/source-dependencies/inherited-macros/changes/Client.scala b/sbt/src/sbt-test/source-dependencies/inherited-macros/changes/Client.scala new file mode 100644 index 000000000..19633db64 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/inherited-macros/changes/Client.scala @@ -0,0 +1,7 @@ +package macro + +object Client { + object RealClient extends Provider { + // Some comment... + } +} diff --git a/sbt/src/sbt-test/source-dependencies/inherited-macros/macro-client/build.sbt b/sbt/src/sbt-test/source-dependencies/inherited-macros/macro-client/build.sbt new file mode 100644 index 000000000..75588e23c --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/inherited-macros/macro-client/build.sbt @@ -0,0 +1,9 @@ +// Check that a file has not been recompiled during last compilation +InputKey[Unit]("check-not-recompiled") <<= inputTask { (argTask: TaskKey[Seq[String]]) => + (argTask, compile in Compile) map { (args: Seq[String], a: sbt.inc.Analysis) => + assert(args.size == 1) + val fileCompilation = a.apis.internal.collect { case (file, src) if file.name.endsWith(args(0)) => src.compilation }.head + val lastCompilation = a.compilations.allCompilations.last + assert(fileCompilation.startTime != lastCompilation.startTime, "File has been recompiled during last compilation.") + } +} \ No newline at end of file diff --git a/sbt/src/sbt-test/source-dependencies/inherited-macros/macro-client/src/main/scala/Client.scala b/sbt/src/sbt-test/source-dependencies/inherited-macros/macro-client/src/main/scala/Client.scala new file mode 100644 index 000000000..6351461a7 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/inherited-macros/macro-client/src/main/scala/Client.scala @@ -0,0 +1,7 @@ +package macro + +object Client { + object RealClient extends Provider { + + } +} diff --git a/sbt/src/sbt-test/source-dependencies/inherited-macros/macro-client/src/main/scala/Foo.scala b/sbt/src/sbt-test/source-dependencies/inherited-macros/macro-client/src/main/scala/Foo.scala new file mode 100644 index 000000000..be7a40427 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/inherited-macros/macro-client/src/main/scala/Foo.scala @@ -0,0 +1,5 @@ +package macro + +object Foo { + val c = Client.RealClient +} diff --git a/sbt/src/sbt-test/source-dependencies/inherited-macros/macro-provider/src/main/scala/Provider.scala b/sbt/src/sbt-test/source-dependencies/inherited-macros/macro-provider/src/main/scala/Provider.scala new file mode 100644 index 000000000..14523f149 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/inherited-macros/macro-provider/src/main/scala/Provider.scala @@ -0,0 +1,7 @@ +package macro +import scala.language.experimental.macros +import scala.reflect.macros._ + +abstract class Provider { + def notImplementedMacro = macro ??? +} diff --git a/sbt/src/sbt-test/source-dependencies/inherited-macros/pending b/sbt/src/sbt-test/source-dependencies/inherited-macros/pending new file mode 100644 index 000000000..9a6e7dfcf --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/inherited-macros/pending @@ -0,0 +1,12 @@ +> macro-provider/compile + +> macro-client/compile + +# Introduce a comment in Client, which inherits a macro from Provider +$ copy-file changes/Client.scala macro-client/src/main/scala/Client.scala + +> macro-client/compile + +# Object Foo depends on Client via composition, thus a whitespace change to +# Client shouldn't trigger its recompilation +> check-not-recompiled Foo.scala diff --git a/sbt/src/sbt-test/source-dependencies/inherited-macros/project/build.scala b/sbt/src/sbt-test/source-dependencies/inherited-macros/project/build.scala new file mode 100644 index 000000000..27a684ef8 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/inherited-macros/project/build.scala @@ -0,0 +1,29 @@ +import sbt._ +import Keys._ + +object build extends Build { + val defaultSettings = Seq( + libraryDependencies <+= scalaVersion("org.scala-lang" % "scala-reflect" % _ )//, + //incOptions := incOptions.value.withNameHashing(true) + ) + + lazy val root = Project( + base = file("."), + id = "macro", + aggregate = Seq(macroProvider, macroClient), + settings = Defaults.defaultSettings ++ defaultSettings + ) + + lazy val macroProvider = Project( + base = file("macro-provider"), + id = "macro-provider", + settings = Defaults.defaultSettings ++ defaultSettings + ) + + lazy val macroClient = Project( + base = file("macro-client"), + id = "macro-client", + dependencies = Seq(macroProvider), + settings = Defaults.defaultSettings ++ defaultSettings + ) +} From 5a40641cc158e370026df263fcdd124f09c6c574 Mon Sep 17 00:00:00 2001 From: Martin Duhem Date: Sun, 16 Mar 2014 17:09:02 +0100 Subject: [PATCH 144/148] Classes that only inherit a macro don't have a macro Prior to this commit, a class that inherited a macro from another class was considered by incremental compiler as having a macro. Now, only classes that explicitly define a macro are considered as having a macro. This influences decision whether to invalidate (recompile) dependencies of a file that inherits a macro upon a whitespace change. From now on, we don't invalidate dependencies in such case which results in much better incremental compiler experience when macros are being involved. Check #1142 for detailed discussion. The change to the behavior is reflected by marking the source-dependencies/inherited-macros test as passing. The source-dependencies/macro test covers the case of defining the macro directly in source file. Therefore we know that the desired behavior of invalidating dependencies of macros is preserved. Fixes #1142 --- compile/api/src/main/scala/xsbt/api/APIUtil.scala | 8 ++++++++ .../inherited-macros/{pending => test} | 0 2 files changed, 8 insertions(+) rename sbt/src/sbt-test/source-dependencies/inherited-macros/{pending => test} (100%) diff --git a/compile/api/src/main/scala/xsbt/api/APIUtil.scala b/compile/api/src/main/scala/xsbt/api/APIUtil.scala index 96892f3d8..50d287fe4 100644 --- a/compile/api/src/main/scala/xsbt/api/APIUtil.scala +++ b/compile/api/src/main/scala/xsbt/api/APIUtil.scala @@ -29,6 +29,14 @@ object APIUtil { var hasMacro = false + // Don't visit inherited definitions since we consider that a class + // that inherits a macro does not have a macro. + override def visitStructure0(structure: Structure) + { + visitTypes(structure.parents) + visitDefinitions(structure.declared) + } + override def visitModifiers(m: Modifiers) { hasMacro ||= m.isMacro diff --git a/sbt/src/sbt-test/source-dependencies/inherited-macros/pending b/sbt/src/sbt-test/source-dependencies/inherited-macros/test similarity index 100% rename from sbt/src/sbt-test/source-dependencies/inherited-macros/pending rename to sbt/src/sbt-test/source-dependencies/inherited-macros/test From c4f6217a7071390b51849060a83d993d2c993be7 Mon Sep 17 00:00:00 2001 From: Dan Sanduleac Date: Fri, 4 Apr 2014 10:04:11 +0100 Subject: [PATCH 145/148] Fix URITests to work under Windows --- launch/src/test/scala/URITests.scala | 25 +++++++++++++++++-------- 1 file changed, 17 insertions(+), 8 deletions(-) diff --git a/launch/src/test/scala/URITests.scala b/launch/src/test/scala/URITests.scala index 42c2dc2bd..1fc18b949 100644 --- a/launch/src/test/scala/URITests.scala +++ b/launch/src/test/scala/URITests.scala @@ -8,14 +8,23 @@ import java.net.URI object URITests extends Properties("URI Tests") { + // Need a platform-specific root, otherwise URI will not be absolute (e.g. if we use a "/a/b/c" path in Windows) + // Note: + // If I use "C:" instead of "/C:", then isAbsolute == true for the resulting URI, but resolve is broken: + // e.g. scala> new URI("file", "c:/a/b'/has spaces", null).resolve("a") broken + // res0: java.net.URI = a + // scala> new URI("file", "/c:/a/b'/has spaces", null).resolve("a") working + // res1: java.net.URI = file:/c:/a/b'/a + val Root = if (xsbt.boot.Pre.isWindows) "/C:/" else "/" + val FileProtocol = "file" property("directoryURI adds trailing slash") = secure { - val dirURI = directoryURI(new File("/a/b/c")) - val directURI = filePathURI("/a/b/c/") + val dirURI = directoryURI(new File(Root + "a/b/c")) + val directURI = filePathURI(Root + "a/b/c/") dirURI == directURI } property("directoryURI preserves trailing slash") = secure { - directoryURI(new File("/a/b/c/")) == filePathURI("/a/b/c/") + directoryURI(new File(Root + "a/b/c/")) == filePathURI(Root + "a/b/c/") } property("filePathURI encodes spaces") = secure { @@ -33,18 +42,18 @@ object URITests extends Properties("URI Tests") } property("filePathURI and File.toURI agree for absolute file") = secure { - val s = "/a/b'/has spaces" + val s = Root + "a/b'/has spaces" val viaPath = filePathURI(s) - val viaFile = (new File(s)).toURI + val viaFile = new File(s).toURI s"via path: $viaPath" |: s"via file: $viaFile" |: (viaPath == viaFile) } property("filePathURI supports URIs") = secure { - val s = "file:///is/a/uri/with%20spaces" - val decoded = "/is/a/uri/with spaces" - val encoded = "/is/a/uri/with%20spaces" + val s = s"file://${Root}is/a/uri/with%20spaces" + val decoded = Root + "is/a/uri/with spaces" + val encoded = Root + "is/a/uri/with%20spaces" val fpURI = filePathURI(s) val directURI = new URI(s) s"filePathURI: $fpURI" |: From 60a457d0833d63a67d7c0bbf0aced3305a65dab5 Mon Sep 17 00:00:00 2001 From: Josh Suereth Date: Fri, 4 Apr 2014 16:38:44 -0400 Subject: [PATCH 146/148] Ensure that if artifact is published, we overwrite default checksums. Fixes # 1233 * Add a new "shim" for RepositoryResolvers that modifies the "put" method so that it will ignore the overwrite flag for checksums. --- ivy/src/main/scala/sbt/ConvertResolver.scala | 77 +++++++++++++++++++- 1 file changed, 74 insertions(+), 3 deletions(-) diff --git a/ivy/src/main/scala/sbt/ConvertResolver.scala b/ivy/src/main/scala/sbt/ConvertResolver.scala index a93a57011..ab1cf95ce 100644 --- a/ivy/src/main/scala/sbt/ConvertResolver.scala +++ b/ivy/src/main/scala/sbt/ConvertResolver.scala @@ -10,13 +10,84 @@ import core.module.id.ModuleRevisionId import core.module.descriptor.DependencyDescriptor import core.resolve.ResolveData import core.settings.IvySettings -import plugins.resolver.{BasicResolver, DependencyResolver, IBiblioResolver} +import plugins.resolver.{BasicResolver, DependencyResolver, IBiblioResolver, RepositoryResolver} import plugins.resolver.{AbstractPatternsBasedResolver, AbstractSshBasedResolver, FileSystemResolver, SFTPResolver, SshResolver, URLResolver} import plugins.repository.url.{URLRepository => URLRepo} import plugins.repository.file.{FileRepository => FileRepo, FileResource} +import java.io.File +import org.apache.ivy.util.ChecksumHelper +import org.apache.ivy.core.module.descriptor.{Artifact=>IArtifact} + private object ConvertResolver { + /** This class contains all the reflective lookups used in the + * checksum-friendly URL publishing shim. + */ + private object ChecksumFriendlyURLResolver { + import java.lang.reflect.AccessibleObject + private def reflectiveLookup[A <: AccessibleObject](f: Class[_] => A): Option[A] = + try { + val cls = classOf[RepositoryResolver] + val thing = f(cls) + thing.setAccessible(true) + Some(thing) + } catch { + case e: java.lang.ReflectiveOperationException => None + } + private val signerNameField: Option[java.lang.reflect.Field] = + reflectiveLookup(_.getDeclaredField("signerName")) + private val putChecksumMethod: Option[java.lang.reflect.Method] = + reflectiveLookup(_.getDeclaredMethod("putChecksum", + classOf[IArtifact], classOf[File], classOf[String], + classOf[Boolean], classOf[String])) + private val putSignatureMethod: Option[java.lang.reflect.Method] = + reflectiveLookup(_.getDeclaredMethod("putSignature", + classOf[IArtifact], classOf[File], classOf[String], + classOf[Boolean])) + } + /** + * The default behavior of ivy's overwrite flags ignores the fact that a lot of repositories + * will autogenerate checksums *for* an artifact if it doesn't already exist. Therefore + * if we succeed in publishing an artifact, we need to just blast the checksums in place. + * This acts as a "shim" on RepositoryResolvers so that we can hook our methods into + * both the IBiblioResolver + URLResolver without having to duplicate the code in two + * places. However, this does mean our use of reflection is awesome. + * + * TODO - See about contributing back to ivy. + */ + private trait ChecksumFriendlyURLResolver extends RepositoryResolver { + import ChecksumFriendlyURLResolver._ + private def signerName: String = signerNameField match { + case Some(field) => field.get(this).asInstanceOf[String] + case None => null + } + override protected def put(artifact: IArtifact, src: File, dest: String, overwrite: Boolean): Unit = { + // verify the checksum algorithms before uploading artifacts! + val checksums = getChecksumAlgorithms() + val repository = getRepository() + for { + checksum <- checksums + if !ChecksumHelper.isKnownAlgorithm(checksum) + } throw new IllegalArgumentException("Unknown checksum algorithm: " + checksum) + repository.put(artifact, src, dest, overwrite); + // Fix for sbt#1156 - Artifactory will auto-generate MD5/sha1 files, so + // we need to overwrite what it has. + for (checksum <- checksums) { + putChecksumMethod match { + case Some(method) => method.invoke(this, artifact, src, dest, true: java.lang.Boolean, checksum) + case None => // TODO - issue warning? + } + } + if (signerName != null) { + putSignatureMethod match { + case None => () + case Some(method) => method.invoke(artifact, src, dest, true: java.lang.Boolean) + } + } + } + } + /** Converts the given sbt resolver into an Ivy resolver..*/ def apply(r: Resolver, settings: IvySettings, log: Logger) = { @@ -25,7 +96,7 @@ private object ConvertResolver case repo: MavenRepository => { val pattern = Collections.singletonList(Resolver.resolvePattern(repo.root, Resolver.mavenStyleBasePattern)) - final class PluginCapableResolver extends IBiblioResolver with DescriptorRequired { + final class PluginCapableResolver extends IBiblioResolver with ChecksumFriendlyURLResolver with DescriptorRequired { def setPatterns() { // done this way for access to protected methods. setArtifactPatterns(pattern) setIvyPatterns(pattern) @@ -77,7 +148,7 @@ private object ConvertResolver } case repo: URLRepository => { - val resolver = new URLResolver with DescriptorRequired + val resolver = new URLResolver with ChecksumFriendlyURLResolver with DescriptorRequired resolver.setName(repo.name) initializePatterns(resolver, repo.patterns, settings) resolver From e8dd19cb92b3c3befa00d02c899629eeaf279153 Mon Sep 17 00:00:00 2001 From: Josh Suereth Date: Fri, 4 Apr 2014 20:25:37 -0400 Subject: [PATCH 147/148] Remove JDK7 features from Resolver shim. --- ivy/src/main/scala/sbt/ConvertResolver.scala | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/ivy/src/main/scala/sbt/ConvertResolver.scala b/ivy/src/main/scala/sbt/ConvertResolver.scala index ab1cf95ce..74c5c119c 100644 --- a/ivy/src/main/scala/sbt/ConvertResolver.scala +++ b/ivy/src/main/scala/sbt/ConvertResolver.scala @@ -25,15 +25,22 @@ private object ConvertResolver * checksum-friendly URL publishing shim. */ private object ChecksumFriendlyURLResolver { - import java.lang.reflect.AccessibleObject + // TODO - When we dump JDK6 support we can remove this hackery + // import java.lang.reflect.AccessibleObject + type AccessibleObject = { + def setAccessible(value: Boolean): Unit + } private def reflectiveLookup[A <: AccessibleObject](f: Class[_] => A): Option[A] = try { val cls = classOf[RepositoryResolver] val thing = f(cls) + import scala.language.reflectiveCalls thing.setAccessible(true) Some(thing) } catch { - case e: java.lang.ReflectiveOperationException => None + case (_: java.lang.NoSuchFieldException) | + (_: java.lang.SecurityException) | + (_: java.lang.NoSuchMethodException) => None } private val signerNameField: Option[java.lang.reflect.Field] = reflectiveLookup(_.getDeclaredField("signerName")) From d8ee16c600b54105b270a90c26fa37daab412e6f Mon Sep 17 00:00:00 2001 From: Eugene Yokota Date: Sat, 5 Apr 2014 01:51:08 -0700 Subject: [PATCH 148/148] Roll back sbt/sbt@482f99 (aggressive deletion of cache). Fixes #1091. * sbt 0.12.3 introduced sbt/sbt@482f99 to fix #637 and #641. * The underlying issue of #641 was an incorrect classifier, and it was fixed in sbt/sbt@718fa9 for #683 and shipped as sbt 0.13.0. --- ivy/src/main/scala/sbt/Ivy.scala | 36 -------------------------------- 1 file changed, 36 deletions(-) diff --git a/ivy/src/main/scala/sbt/Ivy.scala b/ivy/src/main/scala/sbt/Ivy.scala index 2408992e6..a6519c6bc 100644 --- a/ivy/src/main/scala/sbt/Ivy.scala +++ b/ivy/src/main/scala/sbt/Ivy.scala @@ -7,7 +7,6 @@ import Resolver.PluginPattern import java.io.File import java.net.URI -import java.text.ParseException import java.util.concurrent.Callable import java.util.{Collection, Collections => CS} import CS.singleton @@ -24,9 +23,7 @@ import core.settings.IvySettings import plugins.latest.LatestRevisionStrategy import plugins.matcher.PatternMatcher import plugins.parser.m2.PomModuleDescriptorParser -import plugins.repository.ResourceDownloader import plugins.resolver.{ChainResolver, DependencyResolver} -import plugins.resolver.util.ResolvedResource import util.{Message, MessageLogger} import util.extendable.ExtendableItem @@ -358,41 +355,8 @@ private object IvySbt case pr: ProjectResolver => true case _ => false } - /** This is overridden to delete outofdate artifacts of changing modules that are not listed in the metadata. - * This occurs for artifacts with classifiers, for example. */ - @throws(classOf[ParseException]) - override def cacheModuleDescriptor(resolver: DependencyResolver, mdRef: ResolvedResource, dd: DependencyDescriptor, moduleArtifact: IArtifact, downloader: ResourceDownloader, options: CacheMetadataOptions): ResolvedModuleRevision = - { - val rmrRaw = super.cacheModuleDescriptor(null, mdRef, dd, moduleArtifact, downloader, options) - val rmr = resetArtifactResolver(rmrRaw) - val mrid = moduleArtifact.getModuleRevisionId - def shouldClear(): Boolean = rmr != null && - ( (rmr.getReport != null && rmr.getReport.isSearched && isChanging(dd, mrid)) || - isProjectResolver(rmr.getResolver) ) - // only handle changing modules whose metadata actually changed. - // Typically, the publication date in the metadata has to change to get here. - if(shouldClear()) { - // this is the locally cached metadata as originally retrieved (e.g. the pom) - val original = rmr.getReport.getOriginalLocalFile - if(original != null) { - // delete all files in subdirectories that are older than the original metadata file's publication date - // The publication date is used because the metadata will be redownloaded for changing files, - // so the last modified time changes, but the publication date doesn't - val pubDate = rmrRaw.getPublicationDate - val lm = if(pubDate eq null) original.lastModified else pubDate.getTime - val indirectFiles = PathFinder(original.getParentFile).*(DirectoryFilter).**(-DirectoryFilter).get.toList - val older = indirectFiles.filter(f => f.lastModified < lm).toList - Message.verbose("Deleting additional old artifacts from cache for changed module " + mrid + older.mkString(":\n\t", "\n\t", "")) - IO.delete(older) - } - } - rmr - } // ignore the original resolver wherever possible to avoid issues like #704 override def saveResolvers(descriptor: ModuleDescriptor, metadataResolverName: String, artifactResolverName: String) {} - - def isChanging(dd: DependencyDescriptor, requestedRevisionId: ModuleRevisionId): Boolean = - !localOnly && (dd.isChanging || requestedRevisionId.getRevision.contains("-SNAPSHOT")) } manager.setArtifactPattern(PluginPattern + manager.getArtifactPattern) manager.setDataFilePattern(PluginPattern + manager.getDataFilePattern)