diff --git a/.travis.yml b/.travis.yml new file mode 100644 index 000000000..09b114161 --- /dev/null +++ b/.travis.yml @@ -0,0 +1,19 @@ +language: scala +script: + - sbt "scripted $SCRIPTED_TEST" +env: + - SCRIPTED_TEST=actions/* + - SCRIPTED_TEST=api/* + - SCRIPTED_TEST=compiler-project/* + - SCRIPTED_TEST=dependency-management/* + - SCRIPTED_TEST=java/* + - SCRIPTED_TEST=package/* + - SCRIPTED_TEST=reporter/* + - SCRIPTED_TEST=run/* + - SCRIPTED_TEST=source-dependencies/* + - SCRIPTED_TEST=tests/* +jdk: + - openjdk6 +notifications: + email: + - qbranch@typesafe.com diff --git a/compile/api/src/main/scala/xsbt/api/APIUtil.scala b/compile/api/src/main/scala/xsbt/api/APIUtil.scala index 96892f3d8..50d287fe4 100644 --- a/compile/api/src/main/scala/xsbt/api/APIUtil.scala +++ b/compile/api/src/main/scala/xsbt/api/APIUtil.scala @@ -29,6 +29,14 @@ object APIUtil { var hasMacro = false + // Don't visit inherited definitions since we consider that a class + // that inherits a macro does not have a macro. + override def visitStructure0(structure: Structure) + { + visitTypes(structure.parents) + visitDefinitions(structure.declared) + } + override def visitModifiers(m: Modifiers) { hasMacro ||= m.isMacro diff --git a/compile/inc/src/main/scala/sbt/CompileSetup.scala b/compile/inc/src/main/scala/sbt/CompileSetup.scala index c96cee680..11ecc6805 100644 --- a/compile/inc/src/main/scala/sbt/CompileSetup.scala +++ b/compile/inc/src/main/scala/sbt/CompileSetup.scala @@ -11,7 +11,13 @@ package sbt // because complexity(Equiv[Seq[String]]) > complexity(Equiv[CompileSetup]) // (6 > 4) final class CompileOptions(val options: Seq[String], val javacOptions: Seq[String]) -final class CompileSetup(val output: APIOutput, val options: CompileOptions, val compilerVersion: String, val order: CompileOrder) +final class CompileSetup(val output: APIOutput, val options: CompileOptions, val compilerVersion: String, + val order: CompileOrder, val nameHashing: Boolean) { + @deprecated("Use the other overloaded variant of the constructor that takes `nameHashing` value, instead.", "0.13.2") + def this(output: APIOutput, options: CompileOptions, compilerVersion: String, order: CompileOrder) = { + this(output, options, compilerVersion, order, false) + } +} object CompileSetup { @@ -21,18 +27,21 @@ object CompileSetup equivOutput.equiv(a.output, b.output) && equivOpts.equiv(a.options, b.options) && equivComp.equiv(a.compilerVersion, b.compilerVersion) && - a.order == b.order // equivOrder.equiv(a.order, b.order) + a.order == b.order && // equivOrder.equiv(a.order, b.order) + a.nameHashing == b.nameHashing } implicit val equivFile: Equiv[File] = new Equiv[File] { def equiv(a: File, b: File) = a.getAbsoluteFile == b.getAbsoluteFile } implicit val equivOutput: Equiv[APIOutput] = new Equiv[APIOutput] { + implicit val outputGroupsOrdering = Ordering.by((og: MultipleOutput.OutputGroup) => og.sourceDirectory) def equiv(out1: APIOutput, out2: APIOutput) = (out1, out2) match { case (m1: MultipleOutput, m2: MultipleOutput) => - m1.outputGroups zip (m2.outputGroups) forall { - case (a,b) => + (m1.outputGroups.length == m2.outputGroups.length) && + (m1.outputGroups.sorted zip m2.outputGroups.sorted forall { + case (a,b) => equivFile.equiv(a.sourceDirectory, b.sourceDirectory) && equivFile.equiv(a.outputDirectory, b.outputDirectory) - } + }) case (s1: SingleOutput, s2: SingleOutput) => equivFile.equiv(s1.outputDirectory, s2.outputDirectory) case _ => false } @@ -40,12 +49,12 @@ object CompileSetup implicit val equivOpts: Equiv[CompileOptions] = new Equiv[CompileOptions] { def equiv(a: CompileOptions, b: CompileOptions) = (a.options sameElements b.options) && - (a.javacOptions sameElements b.javacOptions) + (a.javacOptions sameElements b.javacOptions) } implicit val equivCompilerVersion: Equiv[String] = new Equiv[String] { def equiv(a: String, b: String) = a == b } - + implicit val equivOrder: Equiv[CompileOrder] = new Equiv[CompileOrder] { def equiv(a: CompileOrder, b: CompileOrder) = a == b } diff --git a/compile/inc/src/main/scala/sbt/inc/Analysis.scala b/compile/inc/src/main/scala/sbt/inc/Analysis.scala index 212b6fc6d..aaa63918d 100644 --- a/compile/inc/src/main/scala/sbt/inc/Analysis.scala +++ b/compile/inc/src/main/scala/sbt/inc/Analysis.scala @@ -56,6 +56,8 @@ trait Analysis object Analysis { lazy val Empty: Analysis = new MAnalysis(Stamps.empty, APIs.empty, Relations.empty, SourceInfos.empty, Compilations.empty) + private[sbt] def empty(nameHashing: Boolean): Analysis = new MAnalysis(Stamps.empty, APIs.empty, + Relations.empty(nameHashing = nameHashing), SourceInfos.empty, Compilations.empty) /** Merge multiple analysis objects into one. Deps will be internalized as needed. */ def merge(analyses: Traversable[Analysis]): Analysis = { diff --git a/compile/inc/src/main/scala/sbt/inc/Changes.scala b/compile/inc/src/main/scala/sbt/inc/Changes.scala index 3fce46738..94bb1ec18 100644 --- a/compile/inc/src/main/scala/sbt/inc/Changes.scala +++ b/compile/inc/src/main/scala/sbt/inc/Changes.scala @@ -6,6 +6,8 @@ package inc import xsbt.api.NameChanges import java.io.File +import xsbti.api.{_internalOnly_NameHashes => NameHashes} +import xsbti.api.{_internalOnly_NameHash => NameHash} final case class InitialChanges(internalSrc: Changes[File], removedProducts: Set[File], binaryDeps: Set[File], external: APIChanges[String]) final class APIChanges[T](val apiChanges: Iterable[APIChange[T]]) @@ -20,8 +22,39 @@ sealed abstract class APIChange[T](val modified: T) * api has changed. The reason is that there's no way to determine if changes to macros implementation * are affecting its users or not. Therefore we err on the side of caution. */ -case class APIChangeDueToMacroDefinition[T](modified0: T) extends APIChange(modified0) -case class SourceAPIChange[T](modified0: T) extends APIChange(modified0) +final case class APIChangeDueToMacroDefinition[T](modified0: T) extends APIChange(modified0) +final case class SourceAPIChange[T](modified0: T) extends APIChange(modified0) +/** + * An APIChange that carries information about modified names. + * + * This class is used only when name hashing algorithm is enabled. + */ +final case class NamesChange[T](modified0: T, modifiedNames: ModifiedNames) extends APIChange(modified0) + +/** + * ModifiedNames are determined by comparing name hashes in two versions of an API representation. + * + * Note that we distinguish between sets of regular (non-implicit) and implicit modified names. + * This distinction is needed because the name hashing algorithm makes different decisions based + * on whether modified name is implicit or not. Implicit names are much more difficult to handle + * due to difficulty of reasoning about the implicit scope. + */ +final case class ModifiedNames(regularNames: Set[String], implicitNames: Set[String]) { + override def toString: String = + s"ModifiedNames(regularNames = ${regularNames mkString ", "}, implicitNames = ${implicitNames mkString ", "})" +} +object ModifiedNames { + def compareTwoNameHashes(a: NameHashes, b: NameHashes): ModifiedNames = { + val modifiedRegularNames = calculateModifiedNames(a.regularMembers.toSet, b.regularMembers.toSet) + val modifiedImplicitNames = calculateModifiedNames(a.implicitMembers.toSet, b.implicitMembers.toSet) + ModifiedNames(modifiedRegularNames, modifiedImplicitNames) + } + private def calculateModifiedNames(xs: Set[NameHash], ys: Set[NameHash]): Set[String] = { + val differentNameHashes = (xs union ys) diff (xs intersect ys) + differentNameHashes.map(_.name) + } +} + trait Changes[A] { diff --git a/compile/inc/src/main/scala/sbt/inc/Compile.scala b/compile/inc/src/main/scala/sbt/inc/Compile.scala index a5b56a5c5..925e8fd3d 100644 --- a/compile/inc/src/main/scala/sbt/inc/Compile.scala +++ b/compile/inc/src/main/scala/sbt/inc/Compile.scala @@ -146,10 +146,18 @@ private final class AnalysisCallback(internalMap: File => Option[File], external classToSource.put(module, source) } + // empty value used when name hashing algorithm is disabled + private val emptyNameHashes = new xsbti.api._internalOnly_NameHashes(Array.empty, Array.empty) + def api(sourceFile: File, source: SourceAPI) { import xsbt.api.{APIUtil, HashAPI} if (APIUtil.isScalaSourceName(sourceFile.getName) && APIUtil.hasMacro(source)) macroSources += sourceFile - publicNameHashes(sourceFile) = (new NameHashing).nameHashes(source) + publicNameHashes(sourceFile) = { + if (nameHashing) + (new NameHashing).nameHashes(source) + else + emptyNameHashes + } val shouldMinimize = !Incremental.apiDebug(options) val savedSource = if (shouldMinimize) APIUtil.minimize(source) else source apis(sourceFile) = (HashAPI(source), savedSource) @@ -157,9 +165,9 @@ private final class AnalysisCallback(internalMap: File => Option[File], external def usedName(sourceFile: File, name: String) = add(usedNames, sourceFile, name) - def nameHashing: Boolean = false // TODO: define the flag in IncOptions which controls this + def nameHashing: Boolean = options.nameHashing - def get: Analysis = addUsedNames( addCompilation( addExternals( addBinaries( addProducts( addSources(Analysis.Empty) ) ) ) ) ) + def get: Analysis = addUsedNames( addCompilation( addExternals( addBinaries( addProducts( addSources(Analysis.empty(nameHashing = nameHashing)) ) ) ) ) ) def addProducts(base: Analysis): Analysis = addAll(base, classes) { case (a, src, (prod, name)) => a.addProduct(src, prod, current product prod, name ) } def addBinaries(base: Analysis): Analysis = addAll(base, binaryDeps)( (a, src, bin) => a.addBinaryDep(src, bin, binaryClassName(bin), current binary bin) ) def addSources(base: Analysis): Analysis = diff --git a/compile/inc/src/main/scala/sbt/inc/IncOptions.scala b/compile/inc/src/main/scala/sbt/inc/IncOptions.scala index 7077d2291..0e634aa4f 100644 --- a/compile/inc/src/main/scala/sbt/inc/IncOptions.scala +++ b/compile/inc/src/main/scala/sbt/inc/IncOptions.scala @@ -51,57 +51,76 @@ final class IncOptions( * Determines whether incremental compiler should recompile all dependencies of a file * that contains a macro definition. */ - val recompileOnMacroDef: Boolean + val recompileOnMacroDef: Boolean, + /** + * Determines whether incremental compiler uses the new algorithm known as name hashing. + * + * This flag is disabled by default so incremental compiler's behavior is the same as in sbt 0.13.0. + * + * IMPLEMENTATION NOTE: + * Enabling this flag enables a few additional functionalities that are needed by the name hashing algorithm: + * + * 1. New dependency source tracking is used. See `sbt.inc.Relations` for details. + * 2. Used names extraction and tracking is enabled. See `sbt.inc.Relations` for details as well. + * 3. Hashing of public names is enabled. See `sbt.inc.AnalysisCallback` for details. + * + */ + val nameHashing: Boolean ) extends Product with Serializable { /** * Secondary constructor introduced to make IncOptions to be binary compatible with version that didn't have - * `recompileOnMacroDef` filed defined. + * `recompileOnMacroDef` and `nameHashing` fields defined. */ def this(transitiveStep: Int, recompileAllFraction: Double, relationsDebug: Boolean, apiDebug: Boolean, apiDiffContextSize: Int, apiDumpDirectory: Option[java.io.File], newClassfileManager: () => ClassfileManager) = { this(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize, - apiDumpDirectory, newClassfileManager, IncOptions.recompileOnMacroDefDefault) + apiDumpDirectory, newClassfileManager, IncOptions.recompileOnMacroDefDefault, IncOptions.nameHashingDefault) } def withTransitiveStep(transitiveStep: Int): IncOptions = { new IncOptions(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize, - apiDumpDirectory, newClassfileManager, recompileOnMacroDef) + apiDumpDirectory, newClassfileManager, recompileOnMacroDef, nameHashing) } def withRecompileAllFraction(recompileAllFraction: Double): IncOptions = { new IncOptions(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize, - apiDumpDirectory, newClassfileManager, recompileOnMacroDef) + apiDumpDirectory, newClassfileManager, recompileOnMacroDef, nameHashing) } def withRelationsDebug(relationsDebug: Boolean): IncOptions = { new IncOptions(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize, - apiDumpDirectory, newClassfileManager, recompileOnMacroDef) + apiDumpDirectory, newClassfileManager, recompileOnMacroDef, nameHashing) } def withApiDebug(apiDebug: Boolean): IncOptions = { new IncOptions(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize, - apiDumpDirectory, newClassfileManager, recompileOnMacroDef) + apiDumpDirectory, newClassfileManager, recompileOnMacroDef, nameHashing) } def withApiDiffContextSize(apiDiffContextSize: Int): IncOptions = { new IncOptions(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize, - apiDumpDirectory, newClassfileManager, recompileOnMacroDef) + apiDumpDirectory, newClassfileManager, recompileOnMacroDef, nameHashing) } def withApiDumpDirectory(apiDumpDirectory: Option[File]): IncOptions = { new IncOptions(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize, - apiDumpDirectory, newClassfileManager, recompileOnMacroDef) + apiDumpDirectory, newClassfileManager, recompileOnMacroDef, nameHashing) } def withNewClassfileManager(newClassfileManager: () => ClassfileManager): IncOptions = { new IncOptions(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize, - apiDumpDirectory, newClassfileManager, recompileOnMacroDef) + apiDumpDirectory, newClassfileManager, recompileOnMacroDef, nameHashing) } def withRecompileOnMacroDef(recompileOnMacroDef: Boolean): IncOptions = { new IncOptions(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize, - apiDumpDirectory, newClassfileManager, recompileOnMacroDef) + apiDumpDirectory, newClassfileManager, recompileOnMacroDef, nameHashing) + } + + def withNameHashing(nameHashing: Boolean): IncOptions = { + new IncOptions(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize, + apiDumpDirectory, newClassfileManager, recompileOnMacroDef, nameHashing) } //- EXPANDED CASE CLASS METHOD BEGIN -// @@ -112,14 +131,14 @@ final class IncOptions( apiDumpDirectory: Option[java.io.File] = this.apiDumpDirectory, newClassfileManager: () => ClassfileManager = this.newClassfileManager): IncOptions = { new IncOptions(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize, - apiDumpDirectory, newClassfileManager) + apiDumpDirectory, newClassfileManager, recompileOnMacroDef, nameHashing) } @deprecated("Methods generated for case class will be removed in the future.", "0.13.2") override def productPrefix: String = "IncOptions" @deprecated("Methods generated for case class will be removed in the future.", "0.13.2") - def productArity: Int = 7 + def productArity: Int = 9 @deprecated("Methods generated for case class will be removed in the future.", "0.13.2") def productElement(x$1: Int): Any = x$1 match { @@ -130,6 +149,8 @@ final class IncOptions( case 4 => IncOptions.this.apiDiffContextSize case 5 => IncOptions.this.apiDumpDirectory case 6 => IncOptions.this.newClassfileManager + case 7 => IncOptions.this.recompileOnMacroDef + case 8 => IncOptions.this.nameHashing case _ => throw new IndexOutOfBoundsException(x$1.toString()) } @@ -149,7 +170,9 @@ final class IncOptions( acc = Statics.mix(acc, apiDiffContextSize) acc = Statics.mix(acc, Statics.anyHash(apiDumpDirectory)) acc = Statics.mix(acc, Statics.anyHash(newClassfileManager)) - Statics.finalizeHash(acc, 7) + acc = Statics.mix(acc, if (recompileOnMacroDef) 1231 else 1237) + acc = Statics.mix(acc, if (nameHashing) 1231 else 1237) + Statics.finalizeHash(acc, 9) } override def toString(): String = scala.runtime.ScalaRunTime._toString(IncOptions.this) @@ -160,7 +183,8 @@ final class IncOptions( transitiveStep == IncOptions$1.transitiveStep && recompileAllFraction == IncOptions$1.recompileAllFraction && relationsDebug == IncOptions$1.relationsDebug && apiDebug == IncOptions$1.apiDebug && apiDiffContextSize == IncOptions$1.apiDiffContextSize && apiDumpDirectory == IncOptions$1.apiDumpDirectory && - newClassfileManager == IncOptions$1.newClassfileManager + newClassfileManager == IncOptions$1.newClassfileManager && + recompileOnMacroDef == IncOptions$1.recompileOnMacroDef && nameHashing == IncOptions$1.nameHashing })) } //- EXPANDED CASE CLASS METHOD END -// @@ -168,6 +192,7 @@ final class IncOptions( object IncOptions extends Serializable { private val recompileOnMacroDefDefault: Boolean = true + private val nameHashingDefault: Boolean = false val Default = IncOptions( // 1. recompile changed sources // 2(3). recompile direct dependencies and transitive public inheritance dependencies of sources with API changes in 1(2). @@ -179,7 +204,8 @@ object IncOptions extends Serializable { apiDiffContextSize = 5, apiDumpDirectory = None, newClassfileManager = ClassfileManager.deleteImmediately, - recompileOnMacroDef = recompileOnMacroDefDefault + recompileOnMacroDef = recompileOnMacroDefDefault, + nameHashing = nameHashingDefault ) //- EXPANDED CASE CLASS METHOD BEGIN -// final override def toString(): String = "IncOptions" @@ -192,9 +218,10 @@ object IncOptions extends Serializable { } def apply(transitiveStep: Int, recompileAllFraction: Double, relationsDebug: Boolean, apiDebug: Boolean, apiDiffContextSize: Int, apiDumpDirectory: Option[java.io.File], - newClassfileManager: () => ClassfileManager, recompileOnMacroDef: Boolean): IncOptions = { + newClassfileManager: () => ClassfileManager, recompileOnMacroDef: Boolean, + nameHashing: Boolean): IncOptions = { new IncOptions(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize, - apiDumpDirectory, newClassfileManager, recompileOnMacroDef) + apiDumpDirectory, newClassfileManager, recompileOnMacroDef, nameHashing) } @deprecated("Methods generated for case class will be removed in the future.", "0.13.2") def unapply(x$0: IncOptions): Option[(Int, Double, Boolean, Boolean, Int, Option[java.io.File], () => AnyRef)] = { @@ -217,7 +244,8 @@ object IncOptions extends Serializable { private val apiDebugKey = "apiDebug" private val apiDumpDirectoryKey = "apiDumpDirectory" private val apiDiffContextSizeKey = "apiDiffContextSize" - private val recompileOnMacroDefKey = "recompileOnMacroDefKey" + private val recompileOnMacroDefKey = "recompileOnMacroDef" + private val nameHashingKey = "nameHashing" def fromStringMap(m: java.util.Map[String, String]): IncOptions = { // all the code below doesn't look like idiomatic Scala for a good reason: we are working with Java API @@ -251,9 +279,13 @@ object IncOptions extends Serializable { val k = recompileOnMacroDefKey if (m.containsKey(k)) m.get(k).toBoolean else Default.recompileOnMacroDef } + def getNameHashing: Boolean = { + val k = nameHashingKey + if (m.containsKey(k)) m.get(k).toBoolean else Default.nameHashing + } new IncOptions(getTransitiveStep, getRecompileAllFraction, getRelationsDebug, getApiDebug, getApiDiffContextSize, - getApiDumpDirectory, ClassfileManager.deleteImmediately, getRecompileOnMacroDef) + getApiDumpDirectory, ClassfileManager.deleteImmediately, getRecompileOnMacroDef, getNameHashing) } def toStringMap(o: IncOptions): java.util.Map[String, String] = { @@ -265,6 +297,7 @@ object IncOptions extends Serializable { o.apiDumpDirectory.foreach(f => m.put(apiDumpDirectoryKey, f.toString)) m.put(apiDiffContextSizeKey, o.apiDiffContextSize.toString) m.put(recompileOnMacroDefKey, o.recompileOnMacroDef.toString) + m.put(nameHashingKey, o.nameHashing.toString) m } } diff --git a/compile/inc/src/main/scala/sbt/inc/Incremental.scala b/compile/inc/src/main/scala/sbt/inc/Incremental.scala index 74ad517ac..f64c284c5 100644 --- a/compile/inc/src/main/scala/sbt/inc/Incremental.scala +++ b/compile/inc/src/main/scala/sbt/inc/Incremental.scala @@ -21,20 +21,44 @@ object Incremental log: Logger, options: IncOptions)(implicit equivS: Equiv[Stamp]): (Boolean, Analysis) = { - val initialChanges = changedInitial(entry, sources, previous, current, forEntry, options, log) + val incremental: IncrementalCommon = + if (!options.nameHashing) + new IncrementalDefaultImpl(log, options) + else + new IncrementalNameHashing(log, options) + val initialChanges = incremental.changedInitial(entry, sources, previous, current, forEntry) val binaryChanges = new DependencyChanges { val modifiedBinaries = initialChanges.binaryDeps.toArray val modifiedClasses = initialChanges.external.allModified.toArray def isEmpty = modifiedBinaries.isEmpty && modifiedClasses.isEmpty } - val initialInv = invalidateInitial(previous.relations, initialChanges, log) + val initialInv = incremental.invalidateInitial(previous.relations, initialChanges) log.debug("All initially invalidated sources: " + initialInv + "\n") val analysis = manageClassfiles(options) { classfileManager => - cycle(initialInv, sources, binaryChanges, previous, doCompile, classfileManager, 1, log, options) + incremental.cycle(initialInv, sources, binaryChanges, previous, doCompile, classfileManager, 1) } (!initialInv.isEmpty, analysis) } + // the name of system property that was meant to enable debugging mode of incremental compiler but + // it ended up being used just to enable debugging of relations. That's why if you migrate to new + // API for configuring incremental compiler (IncOptions) it's enough to control value of `relationsDebug` + // flag to achieve the same effect as using `incDebugProp`. + @deprecated("Use `IncOptions.relationsDebug` flag to enable debugging of relations.", "0.13.2") + val incDebugProp = "xsbt.inc.debug" + + private[inc] val apiDebugProp = "xsbt.api.debug" + private[inc] def apiDebug(options: IncOptions): Boolean = options.apiDebug || java.lang.Boolean.getBoolean(apiDebugProp) + + private[sbt] def prune(invalidatedSrcs: Set[File], previous: Analysis): Analysis = + prune(invalidatedSrcs, previous, ClassfileManager.deleteImmediately()) + + private[sbt] def prune(invalidatedSrcs: Set[File], previous: Analysis, classfileManager: ClassfileManager): Analysis = + { + classfileManager.delete( invalidatedSrcs.flatMap(previous.relations.products) ) + previous -- invalidatedSrcs + } + private[this] def manageClassfiles[T](options: IncOptions)(run: ClassfileManager => T): T = { val classfileManager = options.newClassfileManager() @@ -46,10 +70,12 @@ object Incremental result } - val incDebugProp = "xsbt.inc.debug" - private def incDebug(options: IncOptions): Boolean = options.relationsDebug || java.lang.Boolean.getBoolean(incDebugProp) - val apiDebugProp = "xsbt.api.debug" - def apiDebug(options: IncOptions): Boolean = options.apiDebug || java.lang.Boolean.getBoolean(apiDebugProp) +} + + +private abstract class IncrementalCommon(log: Logger, options: IncOptions) { + + private def incDebug(options: IncOptions): Boolean = options.relationsDebug || java.lang.Boolean.getBoolean(Incremental.incDebugProp) // setting the related system property to true will skip checking that the class name // still comes from the same classpath entry. This can workaround bugs in classpath construction, @@ -58,16 +84,16 @@ object Incremental // TODO: the Analysis for the last successful compilation should get returned + Boolean indicating success // TODO: full external name changes, scopeInvalidations - @tailrec def cycle(invalidatedRaw: Set[File], allSources: Set[File], binaryChanges: DependencyChanges, previous: Analysis, - doCompile: (Set[File], DependencyChanges) => Analysis, classfileManager: ClassfileManager, cycleNum: Int, log: Logger, options: IncOptions): Analysis = + @tailrec final def cycle(invalidatedRaw: Set[File], allSources: Set[File], binaryChanges: DependencyChanges, previous: Analysis, + doCompile: (Set[File], DependencyChanges) => Analysis, classfileManager: ClassfileManager, cycleNum: Int): Analysis = if(invalidatedRaw.isEmpty) previous else { def debug(s: => String) = if (incDebug(options)) log.debug(s) else () val withPackageObjects = invalidatedRaw ++ invalidatedPackageObjects(invalidatedRaw, previous.relations) - val invalidated = expand(withPackageObjects, allSources, log, options) - val pruned = prune(invalidated, previous, classfileManager) + val invalidated = expand(withPackageObjects, allSources) + val pruned = Incremental.prune(invalidated, previous, classfileManager) debug("********* Pruned: \n" + pruned.relations + "\n*********") val fresh = doCompile(invalidated, binaryChanges) @@ -76,18 +102,18 @@ object Incremental val merged = pruned ++ fresh//.copy(relations = pruned.relations ++ fresh.relations, apis = pruned.apis ++ fresh.apis) debug("********* Merged: \n" + merged.relations + "\n*********") - val incChanges = changedIncremental(invalidated, previous.apis.internalAPI _, merged.apis.internalAPI _, log, options) + val incChanges = changedIncremental(invalidated, previous.apis.internalAPI _, merged.apis.internalAPI _) debug("\nChanges:\n" + incChanges) val transitiveStep = options.transitiveStep - val incInv = invalidateIncremental(merged.relations, merged.apis, incChanges, invalidated, cycleNum >= transitiveStep, log) - cycle(incInv, allSources, emptyChanges, merged, doCompile, classfileManager, cycleNum+1, log, options) + val incInv = invalidateIncremental(merged.relations, merged.apis, incChanges, invalidated, cycleNum >= transitiveStep) + cycle(incInv, allSources, emptyChanges, merged, doCompile, classfileManager, cycleNum+1) } private[this] def emptyChanges: DependencyChanges = new DependencyChanges { val modifiedBinaries = new Array[File](0) val modifiedClasses = new Array[String](0) def isEmpty = true } - private[this] def expand(invalidated: Set[File], all: Set[File], log: Logger, options: IncOptions): Set[File] = { + private[this] def expand(invalidated: Set[File], all: Set[File]): Set[File] = { val recompileAllFraction = options.recompileAllFraction if(invalidated.size > all.size * recompileAllFraction) { log.debug("Recompiling all " + all.size + " sources: invalidated sources (" + invalidated.size + ") exceeded " + (recompileAllFraction*100.0) + "% of all sources") @@ -96,10 +122,7 @@ object Incremental else invalidated } - // Package objects are fragile: if they inherit from an invalidated source, get "class file needed by package is missing" error - // This might be too conservative: we probably only need package objects for packages of invalidated sources. - private[this] def invalidatedPackageObjects(invalidated: Set[File], relations: Relations): Set[File] = - invalidated flatMap relations.publicInherited.internal.reverse filter { _.getName == "package.scala" } + protected def invalidatedPackageObjects(invalidated: Set[File], relations: Relations): Set[File] /** * Logs API changes using debug-level logging. The API are obtained using the APIDiff class. @@ -107,14 +130,15 @@ object Incremental * NOTE: This method creates a new APIDiff instance on every invocation. */ private def logApiChanges[T](apiChanges: Iterable[APIChange[T]], oldAPIMapping: T => Source, - newAPIMapping: T => Source, log: Logger, options: IncOptions): Unit = { + newAPIMapping: T => Source): Unit = { val contextSize = options.apiDiffContextSize try { val apiDiff = new APIDiff apiChanges foreach { case APIChangeDueToMacroDefinition(src) => log.debug(s"Public API is considered to be changed because $src contains a macro definition.") - case SourceAPIChange(src) => + case apiChange@(_: SourceAPIChange[T] | _: NamesChange[T]) => + val src = apiChange.modified val oldApi = oldAPIMapping(src) val newApi = newAPIMapping(src) val apiUnifiedPatch = apiDiff.generateApiDiff(src.toString, oldApi.api, newApi.api, contextSize) @@ -138,19 +162,19 @@ object Incremental * providing the API before and after the last step. The functions should return * an empty API if the file did not/does not exist. */ - def changedIncremental[T](lastSources: collection.Set[T], oldAPI: T => Source, newAPI: T => Source, log: Logger, options: IncOptions): APIChanges[T] = + def changedIncremental[T](lastSources: collection.Set[T], oldAPI: T => Source, newAPI: T => Source): APIChanges[T] = { val oldApis = lastSources.toSeq map oldAPI val newApis = lastSources.toSeq map newAPI - val apiChanges = (lastSources, oldApis, newApis).zipped.flatMap { (src, oldApi, newApi) => sameSource(src, oldApi, newApi, log, options) } + val apiChanges = (lastSources, oldApis, newApis).zipped.flatMap { (src, oldApi, newApi) => sameSource(src, oldApi, newApi) } - if (apiDebug(options) && apiChanges.nonEmpty) { - logApiChanges(apiChanges, oldAPI, newAPI, log, options) + if (Incremental.apiDebug(options) && apiChanges.nonEmpty) { + logApiChanges(apiChanges, oldAPI, newAPI) } new APIChanges(apiChanges) } - def sameSource[T](src: T, a: Source, b: Source, log: Logger, options: IncOptions): Option[APIChange[T]] = { + def sameSource[T](src: T, a: Source, b: Source): Option[APIChange[T]] = { // Clients of a modified source file (ie, one that doesn't satisfy `shortcutSameSource`) containing macros must be recompiled. val hasMacro = a.hasMacro || b.hasMacro if (shortcutSameSource(a, b)) { @@ -158,18 +182,11 @@ object Incremental } else { if (hasMacro && options.recompileOnMacroDef) { Some(APIChangeDueToMacroDefinition(src)) - } else sameAPI(src, a, b, log) + } else sameAPI(src, a, b) } } - def sameAPI[T](src: T, a: Source, b: Source, log: Logger): Option[SourceAPIChange[T]] = { - if (SameAPI(a,b)) - None - else { - val sourceApiChange = SourceAPIChange(src) - Some(sourceApiChange) - } - } + protected def sameAPI[T](src: T, a: Source, b: Source): Option[APIChange[T]] def shortcutSameSource(a: Source, b: Source): Boolean = !a.hash.isEmpty && !b.hash.isEmpty && sameCompilation(a.compilation, b.compilation) && (a.hash.deep equals b.hash.deep) def sameCompilation(a: Compilation, b: Compilation): Boolean = a.startTime == b.startTime && a.outputs.corresponds(b.outputs){ @@ -177,15 +194,15 @@ object Incremental } def changedInitial(entry: String => Option[File], sources: Set[File], previousAnalysis: Analysis, current: ReadStamps, - forEntry: File => Option[Analysis], options: IncOptions, log: Logger)(implicit equivS: Equiv[Stamp]): InitialChanges = + forEntry: File => Option[Analysis])(implicit equivS: Equiv[Stamp]): InitialChanges = { val previous = previousAnalysis.stamps val previousAPIs = previousAnalysis.apis val srcChanges = changes(previous.allInternalSources.toSet, sources, f => !equivS.equiv( previous.internalSource(f), current.internalSource(f) ) ) val removedProducts = previous.allProducts.filter( p => !equivS.equiv( previous.product(p), current.product(p) ) ).toSet - val binaryDepChanges = previous.allBinaries.filter( externalBinaryModified(entry, forEntry, previous, current, log)).toSet - val extChanges = changedIncremental(previousAPIs.allExternals, previousAPIs.externalAPI _, currentExternalAPI(entry, forEntry), log, options) + val binaryDepChanges = previous.allBinaries.filter( externalBinaryModified(entry, forEntry, previous, current)).toSet + val extChanges = changedIncremental(previousAPIs.allExternals, previousAPIs.externalAPI _, currentExternalAPI(entry, forEntry)) InitialChanges(srcChanges, removedProducts, binaryDepChanges, extChanges ) } @@ -199,14 +216,14 @@ object Incremental val (changed, unmodified) = inBoth.partition(existingModified) } - def invalidateIncremental(previous: Relations, apis: APIs, changes: APIChanges[File], recompiledSources: Set[File], transitive: Boolean, log: Logger): Set[File] = + def invalidateIncremental(previous: Relations, apis: APIs, changes: APIChanges[File], recompiledSources: Set[File], transitive: Boolean): Set[File] = { val dependsOnSrc = previous.usesInternalSrc _ val propagated = if(transitive) - transitiveDependencies(dependsOnSrc, changes.allModified.toSet, log) + transitiveDependencies(dependsOnSrc, changes.allModified.toSet) else - invalidateIntermediate(previous, changes, log) + invalidateIntermediate(previous, changes) val dups = invalidateDuplicates(previous) if(dups.nonEmpty) @@ -227,28 +244,27 @@ object Incremental /** Returns the transitive source dependencies of `initial`. * Because the intermediate steps do not pull in cycles, this result includes the initial files * if they are part of a cycle containing newly invalidated files . */ - def transitiveDependencies(dependsOnSrc: File => Set[File], initial: Set[File], log: Logger): Set[File] = + def transitiveDependencies(dependsOnSrc: File => Set[File], initial: Set[File]): Set[File] = { - val transitiveWithInitial = transitiveDeps(initial, log)(dependsOnSrc) - val transitivePartial = includeInitialCond(initial, transitiveWithInitial, dependsOnSrc, log) + val transitiveWithInitial = transitiveDeps(initial)(dependsOnSrc) + val transitivePartial = includeInitialCond(initial, transitiveWithInitial, dependsOnSrc) log.debug("Final step, transitive dependencies:\n\t" + transitivePartial) transitivePartial } /** Invalidates sources based on initially detected 'changes' to the sources, products, and dependencies.*/ - def invalidateInitial(previous: Relations, changes: InitialChanges, log: Logger): Set[File] = + def invalidateInitial(previous: Relations, changes: InitialChanges): Set[File] = { val srcChanges = changes.internalSrc val srcDirect = srcChanges.removed ++ srcChanges.removed.flatMap(previous.usesInternalSrc) ++ srcChanges.added ++ srcChanges.changed val byProduct = changes.removedProducts.flatMap(previous.produced) val byBinaryDep = changes.binaryDeps.flatMap(previous.usesBinary) - val externalModifiedSources = changes.external.allModified.toSet - val byExtSrcDep = invalidateByExternal(previous, externalModifiedSources, log) //changes.external.modified.flatMap(previous.usesExternal) // ++ scopeInvalidations + val byExtSrcDep = invalidateByAllExternal(previous, changes.external) //changes.external.modified.flatMap(previous.usesExternal) // ++ scopeInvalidations checkAbsolute(srcChanges.added.toList) log.debug( "\nInitial source changes: \n\tremoved:" + srcChanges.removed + "\n\tadded: " + srcChanges.added + "\n\tmodified: " + srcChanges.changed + "\nRemoved products: " + changes.removedProducts + - "\nModified external sources: " + externalModifiedSources + + "\nExternal API changes: " + changes.external + "\nModified binary dependencies: " + changes.binaryDeps + "\nInitial directly invalidated sources: " + srcDirect + "\n\nSources indirectly invalidated by:" + @@ -273,64 +289,48 @@ object Incremental } } - /** Sources invalidated by `external` sources in other projects according to the previous `relations`. */ - def invalidateByExternal(relations: Relations, external: Set[String], log: Logger): Set[File] = - { - // Propagate public inheritance dependencies transitively. - // This differs from normal because we need the initial crossing from externals to sources in this project. - val externalInheritedR = relations.publicInherited.external - val byExternalInherited = external flatMap externalInheritedR.reverse - val internalInheritedR = relations.publicInherited.internal - val transitiveInherited = transitiveDeps(byExternalInherited, log)(internalInheritedR.reverse _) - - // Get the direct dependencies of all sources transitively invalidated by inheritance - val directA = transitiveInherited flatMap relations.direct.internal.reverse - // Get the sources that directly depend on externals. This includes non-inheritance dependencies and is not transitive. - val directB = external flatMap relations.direct.external.reverse - transitiveInherited ++ directA ++ directB + def invalidateByAllExternal(relations: Relations, externalAPIChanges: APIChanges[String]): Set[File] = { + (externalAPIChanges.apiChanges.flatMap { externalAPIChange => + invalidateByExternal(relations, externalAPIChange) + }).toSet } + + /** Sources invalidated by `external` sources in other projects according to the previous `relations`. */ + protected def invalidateByExternal(relations: Relations, externalAPIChange: APIChange[String]): Set[File] + /** Intermediate invalidation step: steps after the initial invalidation, but before the final transitive invalidation. */ - def invalidateIntermediate(relations: Relations, changes: APIChanges[File], log: Logger): Set[File] = + def invalidateIntermediate(relations: Relations, changes: APIChanges[File]): Set[File] = { - def reverse(r: Relations.Source) = r.internal.reverse _ - invalidateSources(reverse(relations.direct), reverse(relations.publicInherited), changes, log) + invalidateSources(relations, changes) } /** Invalidates inheritance dependencies, transitively. Then, invalidates direct dependencies. Finally, excludes initial dependencies not * included in a cycle with newly invalidated sources. */ - private[this] def invalidateSources(directDeps: File => Set[File], publicInherited: File => Set[File], changes: APIChanges[File], log: Logger): Set[File] = + private[this] def invalidateSources(relations: Relations, changes: APIChanges[File]): Set[File] = { val initial = changes.allModified.toSet - log.debug("Invalidating by inheritance (transitively)...") - val transitiveInherited = transitiveDeps(initial, log)(publicInherited) - log.debug("Invalidated by transitive public inheritance: " + transitiveInherited) - val direct = transitiveInherited flatMap directDeps - log.debug("Invalidated by direct dependency: " + direct) - val all = transitiveInherited ++ direct - includeInitialCond(initial, all, f => directDeps(f) ++ publicInherited(f), log) + val all = (changes.apiChanges flatMap { change => + invalidateSource(relations, change) + }).toSet + includeInitialCond(initial, all, allDeps(relations)) } + protected def allDeps(relations: Relations): File => Set[File] + + protected def invalidateSource(relations: Relations, change: APIChange[File]): Set[File] + /** Conditionally include initial sources that are dependencies of newly invalidated sources. ** Initial sources included in this step can be because of a cycle, but not always. */ - private[this] def includeInitialCond(initial: Set[File], currentInvalidations: Set[File], allDeps: File => Set[File], log: Logger): Set[File] = + private[this] def includeInitialCond(initial: Set[File], currentInvalidations: Set[File], allDeps: File => Set[File]): Set[File] = { val newInv = currentInvalidations -- initial log.debug("New invalidations:\n\t" + newInv) - val transitiveOfNew = transitiveDeps(newInv, log)(allDeps) + val transitiveOfNew = transitiveDeps(newInv)(allDeps) val initialDependsOnNew = transitiveOfNew & initial log.debug("Previously invalidated, but (transitively) depend on new invalidations:\n\t" + initialDependsOnNew) newInv ++ initialDependsOnNew } - def prune(invalidatedSrcs: Set[File], previous: Analysis): Analysis = - prune(invalidatedSrcs, previous, ClassfileManager.deleteImmediately()) - - def prune(invalidatedSrcs: Set[File], previous: Analysis, classfileManager: ClassfileManager): Analysis = - { - classfileManager.delete( invalidatedSrcs.flatMap(previous.relations.products) ) - previous -- invalidatedSrcs - } - - def externalBinaryModified(entry: String => Option[File], analysis: File => Option[Analysis], previous: Stamps, current: ReadStamps, log: Logger)(implicit equivS: Equiv[Stamp]): File => Boolean = + def externalBinaryModified(entry: String => Option[File], analysis: File => Option[Analysis], previous: Stamps, current: ReadStamps)(implicit equivS: Equiv[Stamp]): File => Boolean = dependsOn => { def inv(reason: String): Boolean = { @@ -382,7 +382,7 @@ object Incremental def orEmpty(o: Option[Source]): Source = o getOrElse APIs.emptySource def orTrue(o: Option[Boolean]): Boolean = o getOrElse true - private[this] def transitiveDeps[T](nodes: Iterable[T], log: Logger)(dependencies: T => Iterable[T]): Set[T] = + protected def transitiveDeps[T](nodes: Iterable[T])(dependencies: T => Iterable[T]): Set[T] = { val xs = new collection.mutable.HashSet[T] def all(from: T, tos: Iterable[T]): Unit = tos.foreach(to => visit(from, to)) @@ -442,3 +442,133 @@ object Incremental def properSubPkg(testParent: Seq[String], testSub: Seq[String]) = testParent.length < testSub.length && testSub.startsWith(testParent) def pkgs(api: Source) = names(api :: Nil).map(pkg)*/ } + +private final class IncrementalDefaultImpl(log: Logger, options: IncOptions) extends IncrementalCommon(log, options) { + + // Package objects are fragile: if they inherit from an invalidated source, get "class file needed by package is missing" error + // This might be too conservative: we probably only need package objects for packages of invalidated sources. + override protected def invalidatedPackageObjects(invalidated: Set[File], relations: Relations): Set[File] = + invalidated flatMap relations.publicInherited.internal.reverse filter { _.getName == "package.scala" } + + override protected def sameAPI[T](src: T, a: Source, b: Source): Option[SourceAPIChange[T]] = { + if (SameAPI(a,b)) + None + else { + val sourceApiChange = SourceAPIChange(src) + Some(sourceApiChange) + } + } + + /** Invalidates sources based on initially detected 'changes' to the sources, products, and dependencies.*/ + override protected def invalidateByExternal(relations: Relations, externalAPIChange: APIChange[String]): Set[File] = { + val modified = externalAPIChange.modified + // Propagate public inheritance dependencies transitively. + // This differs from normal because we need the initial crossing from externals to sources in this project. + val externalInheritedR = relations.publicInherited.external + val byExternalInherited = externalInheritedR.reverse(modified) + val internalInheritedR = relations.publicInherited.internal + val transitiveInherited = transitiveDeps(byExternalInherited)(internalInheritedR.reverse _) + + // Get the direct dependencies of all sources transitively invalidated by inheritance + val directA = transitiveInherited flatMap relations.direct.internal.reverse + // Get the sources that directly depend on externals. This includes non-inheritance dependencies and is not transitive. + val directB = relations.direct.external.reverse(modified) + transitiveInherited ++ directA ++ directB + } + + override protected def invalidateSource(relations: Relations, change: APIChange[File]): Set[File] = { + def reverse(r: Relations.Source) = r.internal.reverse _ + val directDeps: File => Set[File] = reverse(relations.direct) + val publicInherited: File => Set[File] = reverse(relations.publicInherited) + log.debug("Invalidating by inheritance (transitively)...") + val transitiveInherited = transitiveDeps(Set(change.modified))(publicInherited) + log.debug("Invalidated by transitive public inheritance: " + transitiveInherited) + val direct = transitiveInherited flatMap directDeps + log.debug("Invalidated by direct dependency: " + direct) + transitiveInherited ++ direct + } + + override protected def allDeps(relations: Relations): File => Set[File] = + f => relations.direct.internal.reverse(f) + +} + +/** + * Implementation of incremental algorithm known as "name hashing". It differs from the default implementation + * by applying pruning (filter) of member reference dependencies based on used and modified simple names. + * + * See MemberReferenceInvalidationStrategy for some more information. + */ +private final class IncrementalNameHashing(log: Logger, options: IncOptions) extends IncrementalCommon(log, options) { + + private val memberRefInvalidator = new MemberRefInvalidator(log) + + // Package objects are fragile: if they inherit from an invalidated source, get "class file needed by package is missing" error + // This might be too conservative: we probably only need package objects for packages of invalidated sources. + override protected def invalidatedPackageObjects(invalidated: Set[File], relations: Relations): Set[File] = + invalidated flatMap relations.inheritance.internal.reverse filter { _.getName == "package.scala" } + + override protected def sameAPI[T](src: T, a: Source, b: Source): Option[APIChange[T]] = { + if (SameAPI(a,b)) + None + else { + val aNameHashes = a._internalOnly_nameHashes + val bNameHashes = b._internalOnly_nameHashes + val modifiedNames = ModifiedNames.compareTwoNameHashes(aNameHashes, bNameHashes) + val apiChange = NamesChange(src, modifiedNames) + Some(apiChange) + } + } + + /** Invalidates sources based on initially detected 'changes' to the sources, products, and dependencies.*/ + override protected def invalidateByExternal(relations: Relations, externalAPIChange: APIChange[String]): Set[File] = { + val modified = externalAPIChange.modified + val invalidationReason = memberRefInvalidator.invalidationReason(externalAPIChange) + log.debug(s"$invalidationReason\nAll member reference dependencies will be considered within this context.") + // Propagate inheritance dependencies transitively. + // This differs from normal because we need the initial crossing from externals to sources in this project. + val externalInheritanceR = relations.inheritance.external + val byExternalInheritance = externalInheritanceR.reverse(modified) + log.debug(s"Files invalidated by inheriting from (external) $modified: $byExternalInheritance; now invalidating by inheritance (internally).") + val transitiveInheritance = byExternalInheritance flatMap { file => + invalidateByInheritance(relations, file) + } + val memberRefInvalidationInternal = memberRefInvalidator.get(relations.memberRef.internal, + relations.names, externalAPIChange) + val memberRefInvalidationExternal = memberRefInvalidator.get(relations.memberRef.external, + relations.names, externalAPIChange) + + // Get the member reference dependencies of all sources transitively invalidated by inheritance + log.debug("Getting direct dependencies of all sources transitively invalidated by inheritance.") + val memberRefA = transitiveInheritance flatMap memberRefInvalidationInternal + // Get the sources that depend on externals by member reference. + // This includes non-inheritance dependencies and is not transitive. + log.debug(s"Getting sources that directly depend on (external) $modified.") + val memberRefB = memberRefInvalidationExternal(modified) + transitiveInheritance ++ memberRefA ++ memberRefB + } + + private def invalidateByInheritance(relations: Relations, modified: File): Set[File] = { + val inheritanceDeps = relations.inheritance.internal.reverse _ + log.debug(s"Invalidating (transitively) by inheritance from $modified...") + val transitiveInheritance = transitiveDeps(Set(modified))(inheritanceDeps) + log.debug("Invalidated by transitive inheritance dependency: " + transitiveInheritance) + transitiveInheritance + } + + override protected def invalidateSource(relations: Relations, change: APIChange[File]): Set[File] = { + log.debug(s"Invalidating ${change.modified}...") + val transitiveInheritance = invalidateByInheritance(relations, change.modified) + val reasonForInvalidation = memberRefInvalidator.invalidationReason(change) + log.debug(s"$reasonForInvalidation\nAll member reference dependencies will be considered within this context.") + val memberRefInvalidation = memberRefInvalidator.get(relations.memberRef.internal, + relations.names, change) + val memberRef = transitiveInheritance flatMap memberRefInvalidation + val all = transitiveInheritance ++ memberRef + all + } + + override protected def allDeps(relations: Relations): File => Set[File] = + f => relations.memberRef.internal.reverse(f) + +} diff --git a/compile/inc/src/main/scala/sbt/inc/MemberRefInvalidator.scala b/compile/inc/src/main/scala/sbt/inc/MemberRefInvalidator.scala new file mode 100644 index 000000000..22537c78d --- /dev/null +++ b/compile/inc/src/main/scala/sbt/inc/MemberRefInvalidator.scala @@ -0,0 +1,124 @@ +package sbt.inc + +import sbt.Relation +import java.io.File +import sbt.Logger +import xsbt.api.APIUtil + +/** + * Implements various strategies for invalidating dependencies introduced by member reference. + * + * The strategy is represented as function T => Set[File] where T is a source file that other + * source files depend on. When you apply that function to given element `src` you get set of + * files that depend on `src` by member reference and should be invalidated due to api change + * that was passed to a method constructing that function. There are two questions that arise: + * + * 1. Why is signature T => Set[File] and not T => Set[T] or File => Set[File]? + * 2. Why would we apply that function to any other `src` that then one that got modified + * and the modification is described by APIChange? + * + * Let's address the second question with the following example of source code structure: + * + * // A.scala + * class A + * + * // B.scala + * class B extends A + * + * // C.scala + * class C { def foo(a: A) = ??? } + * + * // D.scala + * class D { def bar(b: B) = ??? } + * + * Member reference dependencies on A.scala are B.scala, C.scala. When the api of A changes + * then we would consider B and C for invalidation. However, B is also a dependency by inheritance + * so we always invalidate it. The api change to A is relevant when B is considered (because + * of how inheritance works) so we would invalidate B by inheritance and then we would like to + * invalidate member reference dependencies of B as well. In other words, we have a function + * because we want to apply it (with the same api change in mind) to all src files invalidated + * by inheritance of the originally modified file. + * + * The first question is a bit more straightforward to answer. We always invalidate internal + * source files (in given project) that are represented as File but they might depend either on + * internal source files (then T=File) or they can depend on external class name (then T=String). + * + * The specific invalidation strategy is determined based on APIChange that describes a change to api + * of a single source file. + * + * For example, if we get APIChangeDueToMacroDefinition then we invalidate all member reference + * dependencies unconditionally. On the other hand, if api change is due to modified name hashes + * of regular members then we'll invalidate sources that use those names. + */ +private[inc] class MemberRefInvalidator(log: Logger) { + def get[T](memberRef: Relation[File, T], usedNames: Relation[File, String], apiChange: APIChange[_]): + T => Set[File] = apiChange match { + case _: APIChangeDueToMacroDefinition[_] => + new InvalidateUnconditionally(memberRef) + case NamesChange(_, modifiedNames) if !modifiedNames.implicitNames.isEmpty => + new InvalidateUnconditionally(memberRef) + case NamesChange(modifiedSrcFile, modifiedNames) => + new NameHashFilteredInvalidator[T](usedNames, memberRef, modifiedNames.regularNames) + case _: SourceAPIChange[_] => + sys.error(wrongAPIChangeMsg) + } + + def invalidationReason(apiChange: APIChange[_]): String = apiChange match { + case APIChangeDueToMacroDefinition(modifiedSrcFile) => + s"The $modifiedSrcFile source file declares a macro." + case NamesChange(modifiedSrcFile, modifiedNames) if !modifiedNames.implicitNames.isEmpty => + s"""|The $modifiedSrcFile source file has the following implicit definitions changed: + |\t${modifiedNames.implicitNames.mkString(", ")}.""".stripMargin + case NamesChange(modifiedSrcFile, modifiedNames) => + s"""|The $modifiedSrcFile source file has the following regular definitions changed: + |\t${modifiedNames.regularNames.mkString(", ")}.""".stripMargin + case _: SourceAPIChange[_] => + sys.error(wrongAPIChangeMsg) + } + + private val wrongAPIChangeMsg = + "MemberReferenceInvalidator.get should be called when name hashing is enabled " + + "and in that case we shouldn't have SourceAPIChange as an api change." + + private class InvalidateUnconditionally[T](memberRef: Relation[File, T]) extends (T => Set[File]) { + def apply(from: T): Set[File] = { + val invalidated = memberRef.reverse(from) + if (!invalidated.isEmpty) + log.debug(s"The following member ref dependencies of $from are invalidated:\n" + + formatInvalidated(invalidated)) + invalidated + } + private def formatInvalidated(invalidated: Set[File]): String = { + val sortedFiles = invalidated.toSeq.sortBy(_.getAbsolutePath) + sortedFiles.map(file => "\t"+file).mkString("\n") + } + } + + private class NameHashFilteredInvalidator[T]( + usedNames: Relation[File, String], + memberRef: Relation[File, T], + modifiedNames: Set[String]) extends (T => Set[File]) { + + def apply(to: T): Set[File] = { + val dependent = memberRef.reverse(to) + filteredDependencies(dependent) + } + private def filteredDependencies(dependent: Set[File]): Set[File] = { + dependent.filter { + case from if APIUtil.isScalaSourceName(from.getName) => + val usedNamesInDependent = usedNames.forward(from) + val modifiedAndUsedNames = modifiedNames intersect usedNamesInDependent + if (modifiedAndUsedNames.isEmpty) { + log.debug(s"None of the modified names appears in $from. This dependency is not being considered for invalidation.") + false + } else { + log.debug(s"The following modified names cause invalidation of $from: $modifiedAndUsedNames") + true + } + case from => + log.debug(s"Name hashing optimization doesn't apply to non-Scala dependency: $from") + true + } + } + } +} diff --git a/compile/integration/src/main/scala/sbt/compiler/AggressiveCompile.scala b/compile/integration/src/main/scala/sbt/compiler/AggressiveCompile.scala index 5fc1bafba..2c711d14f 100644 --- a/compile/integration/src/main/scala/sbt/compiler/AggressiveCompile.scala +++ b/compile/integration/src/main/scala/sbt/compiler/AggressiveCompile.scala @@ -41,7 +41,8 @@ class AggressiveCompile(cacheFile: File) skip: Boolean = false, incrementalCompilerOptions: IncOptions)(implicit log: Logger): Analysis = { - val setup = new CompileSetup(output, new CompileOptions(options, javacOptions), compiler.scalaInstance.actualVersion, compileOrder) + val setup = new CompileSetup(output, new CompileOptions(options, javacOptions), + compiler.scalaInstance.actualVersion, compileOrder, incrementalCompilerOptions.nameHashing) compile1(sources, classpath, setup, progress, store, analysisMap, definesClass, compiler, javac, reporter, skip, cache, incrementalCompilerOptions) } @@ -61,7 +62,7 @@ class AggressiveCompile(cacheFile: File) cache: GlobalsCache, incrementalCompilerOptions: IncOptions)(implicit log: Logger): Analysis = { - val (previousAnalysis, previousSetup) = extract(store.get()) + val (previousAnalysis, previousSetup) = extract(store.get(), incrementalCompilerOptions) if(skip) previousAnalysis else { @@ -144,6 +145,12 @@ class AggressiveCompile(cacheFile: File) val sourcesSet = sources.toSet val analysis = previousSetup match { + case Some(previous) if previous.nameHashing != currentSetup.nameHashing => + // if the value of `nameHashing` flag has changed we have to throw away + // previous Analysis completely and start with empty Analysis object + // that supports the particular value of the `nameHashing` flag. + // Otherwise we'll be getting UnsupportedOperationExceptions + Analysis.empty(currentSetup.nameHashing) case Some(previous) if equiv.equiv(previous, currentSetup) => previousAnalysis case _ => Incremental.prune(sourcesSet, previousAnalysis) } @@ -169,11 +176,11 @@ class AggressiveCompile(cacheFile: File) if(!combined.isEmpty) log.info(combined.mkString("Compiling ", " and ", " to " + outputDirs.map(_.getAbsolutePath).mkString(",") + "...")) } - private def extract(previous: Option[(Analysis, CompileSetup)]): (Analysis, Option[CompileSetup]) = + private def extract(previous: Option[(Analysis, CompileSetup)], incOptions: IncOptions): (Analysis, Option[CompileSetup]) = previous match { case Some((an, setup)) => (an, Some(setup)) - case None => (Analysis.Empty, None) + case None => (Analysis.empty(nameHashing = incOptions.nameHashing), None) } def javaOnly(f: File) = f.getName.endsWith(".java") diff --git a/compile/integration/src/main/scala/sbt/compiler/IncrementalCompiler.scala b/compile/integration/src/main/scala/sbt/compiler/IncrementalCompiler.scala index 8600f6a70..5028c7996 100644 --- a/compile/integration/src/main/scala/sbt/compiler/IncrementalCompiler.scala +++ b/compile/integration/src/main/scala/sbt/compiler/IncrementalCompiler.scala @@ -36,9 +36,22 @@ object IC extends IncrementalCompiler[Analysis, AnalyzingCompiler] def readCache(file: File): Maybe[(Analysis, CompileSetup)] = try { Maybe.just(readCacheUncaught(file)) } catch { case _: Exception => Maybe.nothing() } + @deprecated("Use overloaded variant which takes `IncOptions` as parameter.", "0.13.2") def readAnalysis(file: File): Analysis = try { readCacheUncaught(file)._1 } catch { case _: Exception => Analysis.Empty } + def readAnalysis(file: File, incOptions: IncOptions): Analysis = + try { readCacheUncaught(file)._1 } catch { + case _: Exception => Analysis.empty(nameHashing = incOptions.nameHashing) + } + def readCacheUncaught(file: File): (Analysis, CompileSetup) = - Using.fileReader(IO.utf8)(file) { reader => TextAnalysisFormat.read(reader) } + Using.fileReader(IO.utf8)(file) { reader => + try { + TextAnalysisFormat.read(reader) + } catch { + case ex: sbt.inc.ReadException => + throw new java.io.IOException(s"Error while reading $file", ex) + } + } } diff --git a/compile/interface/src/main/scala/xsbt/Compat.scala b/compile/interface/src/main/scala/xsbt/Compat.scala index 17a1a8f6b..d92ba6e73 100644 --- a/compile/interface/src/main/scala/xsbt/Compat.scala +++ b/compile/interface/src/main/scala/xsbt/Compat.scala @@ -91,4 +91,42 @@ abstract class Compat private[this] def sourceCompatibilityOnly: Nothing = throw new RuntimeException("For source compatibility only: should not get here.") private[this] final implicit def miscCompat(n: AnyRef): MiscCompat = new MiscCompat + + object MacroExpansionOf { + def unapply(tree: Tree): Option[Tree] = { + + // MacroExpansionAttachment (MEA) compatibility for 2.8.x and 2.9.x + object Compat { + class MacroExpansionAttachment(val original: Tree) + + // Trees have no attachments in 2.8.x and 2.9.x + implicit def withAttachments(tree: Tree): WithAttachments = new WithAttachments(tree) + class WithAttachments(val tree: Tree) { + object EmptyAttachments { + def all = Set.empty[Any] + } + val attachments = EmptyAttachments + } + } + import Compat._ + + locally { + // Wildcard imports are necessary since 2.8.x and 2.9.x don't have `MacroExpansionAttachment` at all + import global._ // this is where MEA lives in 2.10.x + + // `original` has been renamed to `expandee` in 2.11.x + implicit def withExpandee(att: MacroExpansionAttachment): WithExpandee = new WithExpandee(att) + class WithExpandee(att: MacroExpansionAttachment) { + def expandee: Tree = att.original + } + + locally { + import analyzer._ // this is where MEA lives in 2.11.x + tree.attachments.all.collect { + case att: MacroExpansionAttachment => att.expandee + } headOption + } + } + } + } } diff --git a/compile/interface/src/main/scala/xsbt/Dependency.scala b/compile/interface/src/main/scala/xsbt/Dependency.scala index e9b482ef9..b8a55c8a9 100644 --- a/compile/interface/src/main/scala/xsbt/Dependency.scala +++ b/compile/interface/src/main/scala/xsbt/Dependency.scala @@ -146,6 +146,8 @@ final class Dependency(val global: CallbackGlobal) extends LocateClassFile deps.foreach(addDependency) case Template(parents, self, body) => traverseTrees(body) + case MacroExpansionOf(original) => + this.traverse(original) case other => () } super.traverse(tree) diff --git a/compile/interface/src/main/scala/xsbt/ExtractUsedNames.scala b/compile/interface/src/main/scala/xsbt/ExtractUsedNames.scala index 9f89a3459..6ab01c9eb 100644 --- a/compile/interface/src/main/scala/xsbt/ExtractUsedNames.scala +++ b/compile/interface/src/main/scala/xsbt/ExtractUsedNames.scala @@ -38,7 +38,7 @@ import scala.tools.nsc._ * The tree walking algorithm walks into TypeTree.original explicitly. * */ -class ExtractUsedNames[GlobalType <: CallbackGlobal](val global: GlobalType) { +class ExtractUsedNames[GlobalType <: CallbackGlobal](val global: GlobalType) extends Compat { import global._ def extract(unit: CompilationUnit): Set[String] = { @@ -53,30 +53,44 @@ class ExtractUsedNames[GlobalType <: CallbackGlobal](val global: GlobalType) { val symbolNameAsString = symbol.name.decode.trim namesBuffer += symbolNameAsString } - def handleTreeNode(node: Tree): Unit = node match { - case _: DefTree | _: Template => () - // turns out that Import node has a TermSymbol associated with it - // I (Grzegorz) tried to understand why it's there and what does it represent but - // that logic was introduced in 2005 without any justification I'll just ignore the - // import node altogether and just process the selectors in the import node - case Import(_, selectors: List[ImportSelector]) => - def usedNameInImportSelector(name: Name): Unit = - if ((name != null) && (name != nme.WILDCARD)) namesBuffer += name.toString - selectors foreach { selector => - usedNameInImportSelector(selector.name) - usedNameInImportSelector(selector.rename) - } - // TODO: figure out whether we should process the original tree or walk the type - // the argument for processing the original tree: we process what user wrote - // the argument for processing the type: we catch all transformations that typer applies - // to types but that might be a bad thing because it might expand aliases eagerly which - // not what we need - case t: TypeTree if t.original != null => - t.original.foreach(handleTreeNode) - case t if t.hasSymbol && eligibleAsUsedName(t.symbol) => - addSymbol(t.symbol) - case _ => () + + def handleTreeNode(node: Tree): Unit = { + def handleMacroExpansion(original: Tree): Unit = original.foreach(handleTreeNode) + + def handleClassicTreeNode(node: Tree): Unit = node match { + case _: DefTree | _: Template => () + // turns out that Import node has a TermSymbol associated with it + // I (Grzegorz) tried to understand why it's there and what does it represent but + // that logic was introduced in 2005 without any justification I'll just ignore the + // import node altogether and just process the selectors in the import node + case Import(_, selectors: List[ImportSelector]) => + def usedNameInImportSelector(name: Name): Unit = + if ((name != null) && (name != nme.WILDCARD)) namesBuffer += name.toString + selectors foreach { selector => + usedNameInImportSelector(selector.name) + usedNameInImportSelector(selector.rename) + } + // TODO: figure out whether we should process the original tree or walk the type + // the argument for processing the original tree: we process what user wrote + // the argument for processing the type: we catch all transformations that typer applies + // to types but that might be a bad thing because it might expand aliases eagerly which + // not what we need + case t: TypeTree if t.original != null => + t.original.foreach(handleTreeNode) + case t if t.hasSymbol && eligibleAsUsedName(t.symbol) => + addSymbol(t.symbol) + case _ => () + } + + node match { + case MacroExpansionOf(original) => + handleClassicTreeNode(node) + handleMacroExpansion(original) + case _ => + handleClassicTreeNode(node) + } } + tree.foreach(handleTreeNode) namesBuffer.toSet } diff --git a/compile/interface/src/test/scala/xsbt/DependencySpecification.scala b/compile/interface/src/test/scala/xsbt/DependencySpecification.scala index 040ad1d6e..ec2f76ed9 100644 --- a/compile/interface/src/test/scala/xsbt/DependencySpecification.scala +++ b/compile/interface/src/test/scala/xsbt/DependencySpecification.scala @@ -65,6 +65,19 @@ class DependencySpecification extends Specification { inheritance('D) === Set('A, 'C) } + "Extracted source dependencies from macro arguments" in { + val sourceDependencies = extractSourceDependenciesFromMacroArgument + val memberRef = sourceDependencies.memberRef + val inheritance = sourceDependencies.inheritance + + memberRef('A) === Set('B, 'C) + inheritance('A) === Set.empty + memberRef('B) === Set.empty + inheritance('B) === Set.empty + memberRef('C) === Set.empty + inheritance('C) === Set.empty + } + private def extractSourceDependenciesPublic: ExtractedSourceDependencies = { val srcA = "class A" val srcB = "class B extends D[A]" @@ -109,4 +122,25 @@ class DependencySpecification extends Specification { compilerForTesting.extractDependenciesFromSrcs('A -> srcA, 'B -> srcB, 'C -> srcC, 'D -> srcD) sourceDependencies } + + private def extractSourceDependenciesFromMacroArgument: ExtractedSourceDependencies = { + val srcA = "class A { println(B.printTree(C.foo)) }" + val srcB = """ + |import scala.language.experimental.macros + |import scala.reflect.macros._ + |object B { + | def printTree(arg: Any) = macro printTreeImpl + | def printTreeImpl(c: Context)(arg: c.Expr[Any]): c.Expr[String] = { + | val argStr = arg.tree.toString + | val literalStr = c.universe.Literal(c.universe.Constant(argStr)) + | c.Expr[String](literalStr) + | } + |}""".stripMargin + val srcC = "object C { val foo = 1 }" + + val compilerForTesting = new ScalaCompilerForUnitTesting(nameHashing = true) + val sourceDependencies = + compilerForTesting.extractDependenciesFromSrcs(List(Map('B -> srcB, 'C -> srcC), Map('A -> srcA))) + sourceDependencies + } } diff --git a/compile/interface/src/test/scala/xsbt/ScalaCompilerForUnitTesting.scala b/compile/interface/src/test/scala/xsbt/ScalaCompilerForUnitTesting.scala index 5362b1ca6..cb10d1d53 100644 --- a/compile/interface/src/test/scala/xsbt/ScalaCompilerForUnitTesting.scala +++ b/compile/interface/src/test/scala/xsbt/ScalaCompilerForUnitTesting.scala @@ -53,15 +53,19 @@ class ScalaCompilerForUnitTesting(nameHashing: Boolean = false) { * dependencies between snippets. Source code snippets are identified by symbols. Each symbol should * be associated with one snippet only. * + * Snippets can be grouped to be compiled together in the same compiler run. This is + * useful to compile macros, which cannot be used in the same compilation run that + * defines them. + * * Symbols are used to express extracted dependencies between source code snippets. This way we have * file system-independent way of testing dependencies between source code "files". */ - def extractDependenciesFromSrcs(srcs: (Symbol, String)*): ExtractedSourceDependencies = { - val (symbolsForSrcs, rawSrcs) = srcs.unzip - assert(symbolsForSrcs.distinct.size == symbolsForSrcs.size, - s"Duplicate symbols for srcs detected: $symbolsForSrcs") - val (tempSrcFiles, testCallback) = compileSrcs(rawSrcs: _*) - val fileToSymbol = (tempSrcFiles zip symbolsForSrcs).toMap + def extractDependenciesFromSrcs(srcs: List[Map[Symbol, String]]): ExtractedSourceDependencies = { + val rawGroupedSrcs = srcs.map(_.values.toList).toList + val symbols = srcs.map(_.keys).flatten + val (tempSrcFiles, testCallback) = compileSrcs(rawGroupedSrcs) + val fileToSymbol = (tempSrcFiles zip symbols).toMap + val memberRefFileDeps = testCallback.sourceDependencies collect { // false indicates that those dependencies are not introduced by inheritance case (target, src, false) => (src, target) @@ -82,40 +86,64 @@ class ScalaCompilerForUnitTesting(nameHashing: Boolean = false) { // convert all collections to immutable variants multiMap.toMap.mapValues(_.toSet).withDefaultValue(Set.empty) } + ExtractedSourceDependencies(pairsToMultiMap(memberRefDeps), pairsToMultiMap(inheritanceDeps)) } + def extractDependenciesFromSrcs(srcs: (Symbol, String)*): ExtractedSourceDependencies = { + val symbols = srcs.map(_._1) + assert(symbols.distinct.size == symbols.size, + s"Duplicate symbols for srcs detected: $symbols") + extractDependenciesFromSrcs(List(srcs.toMap)) + } + /** - * Compiles given source code snippets written to a temporary files. Each snippet is + * Compiles given source code snippets written to temporary files. Each snippet is * written to a separate temporary file. * + * Snippets can be grouped to be compiled together in the same compiler run. This is + * useful to compile macros, which cannot be used in the same compilation run that + * defines them. + * * The sequence of temporary files corresponding to passed snippets and analysis * callback is returned as a result. */ - private def compileSrcs(srcs: String*): (Seq[File], TestCallback) = { + private def compileSrcs(groupedSrcs: List[List[String]]): (Seq[File], TestCallback) = { withTemporaryDirectory { temp => val analysisCallback = new TestCallback(nameHashing) val classesDir = new File(temp, "classes") classesDir.mkdir() - val compiler = prepareCompiler(classesDir, analysisCallback) - val run = new compiler.Run - val srcFiles = srcs.toSeq.zipWithIndex map { case (src, i) => - val fileName = s"Test_$i.scala" - prepareSrcFile(temp, fileName, src) + + val compiler = prepareCompiler(classesDir, analysisCallback, classesDir.toString) + + val files = for((compilationUnit, unitId) <- groupedSrcs.zipWithIndex) yield { + val run = new compiler.Run + val srcFiles = compilationUnit.toSeq.zipWithIndex map { case (src, i) => + val fileName = s"Test-$unitId-$i.scala" + prepareSrcFile(temp, fileName, src) + } + val srcFilePaths = srcFiles.map(srcFile => srcFile.getAbsolutePath).toList + + run.compile(srcFilePaths) + + srcFilePaths.foreach(f => new File(f).delete) + srcFiles } - val srcFilePaths = srcFiles.map(srcFile => srcFile.getAbsolutePath).toList - run.compile(srcFilePaths) - (srcFiles, analysisCallback) + (files.flatten.toSeq, analysisCallback) } } + private def compileSrcs(srcs: String*): (Seq[File], TestCallback) = { + compileSrcs(List(srcs.toList)) + } + private def prepareSrcFile(baseDir: File, fileName: String, src: String): File = { val srcFile = new File(baseDir, fileName) sbt.IO.write(srcFile, src) srcFile } - private def prepareCompiler(outputDir: File, analysisCallback: AnalysisCallback): CachedCompiler0#Compiler = { + private def prepareCompiler(outputDir: File, analysisCallback: AnalysisCallback, classpath: String = "."): CachedCompiler0#Compiler = { val args = Array.empty[String] object output extends SingleOutput { def outputDirectory: File = outputDir @@ -123,6 +151,7 @@ class ScalaCompilerForUnitTesting(nameHashing: Boolean = false) { val weakLog = new WeakLog(ConsoleLogger(), ConsoleReporter) val cachedCompiler = new CachedCompiler0(args, output, weakLog, false) val settings = cachedCompiler.settings + settings.classpath.value = classpath settings.usejavacp.value = true val scalaReporter = new ConsoleReporter(settings) val delegatingReporter = DelegatingReporter(settings, ConsoleReporter) diff --git a/compile/persist/src/main/scala/sbt/inc/AnalysisFormats.scala b/compile/persist/src/main/scala/sbt/inc/AnalysisFormats.scala index 5f2c7b9c6..73b619e0f 100644 --- a/compile/persist/src/main/scala/sbt/inc/AnalysisFormats.scala +++ b/compile/persist/src/main/scala/sbt/inc/AnalysisFormats.scala @@ -73,8 +73,8 @@ object AnalysisFormats wrap[Severity, Byte]( _.ordinal.toByte, b => Severity.values.apply(b.toInt) ) - implicit def setupFormat(implicit outputF: Format[APIOutput], optionF: Format[CompileOptions], compilerVersion: Format[String], orderF: Format[CompileOrder]): Format[CompileSetup] = - asProduct4[CompileSetup, APIOutput, CompileOptions, String, CompileOrder]( (a,b,c,d) => new CompileSetup(a,b,c,d) )(s => (s.output, s.options, s.compilerVersion, s.order))(outputF, optionF, compilerVersion, orderF) + implicit def setupFormat(implicit outputF: Format[APIOutput], optionF: Format[CompileOptions], compilerVersion: Format[String], orderF: Format[CompileOrder], nameHashingF: Format[Boolean]): Format[CompileSetup] = + asProduct5[CompileSetup, APIOutput, CompileOptions, String, CompileOrder, Boolean]( (a,b,c,d,e) => new CompileSetup(a,b,c,d,e) )(s => (s.output, s.options, s.compilerVersion, s.order, s.nameHashing))(outputF, optionF, compilerVersion, orderF, nameHashingF) implicit val outputGroupFormat: Format[OutputGroup] = asProduct2((a: File,b: File) => new OutputGroup{def sourceDirectory = a; def outputDirectory = b}) { out => (out.sourceDirectory, out.outputDirectory) }(fileFormat, fileFormat) diff --git a/compile/persist/src/main/scala/sbt/inc/TextAnalysisFormat.scala b/compile/persist/src/main/scala/sbt/inc/TextAnalysisFormat.scala index 8a754f596..f3e13d23a 100644 --- a/compile/persist/src/main/scala/sbt/inc/TextAnalysisFormat.scala +++ b/compile/persist/src/main/scala/sbt/inc/TextAnalysisFormat.scala @@ -4,6 +4,7 @@ package inc import java.io._ import sbt.{CompileSetup, Relation} import xsbti.api.{Compilation, Source} +import xsbti.compile.{MultipleOutput, SingleOutput} import javax.xml.bind.DatatypeConverter @@ -55,52 +56,54 @@ object TextAnalysisFormat { implicit val compilationF = xsbt.api.CompilationFormat def write(out: Writer, analysis: Analysis, setup: CompileSetup) { - VersionF.write(out) - // We start with relations because that's the part of greatest interest to external readers, + VersionF.write(out) + // We start with writing compile setup which contains value of the `nameHashing` + // flag that is needed to properly deserialize relations + FormatTimer.time("write setup") { CompileSetupF.write(out, setup) } + // Next we write relations because that's the part of greatest interest to external readers, // who can abort reading early once they're read them. FormatTimer.time("write relations") { RelationsF.write(out, analysis.relations) } FormatTimer.time("write stamps") { StampsF.write(out, analysis.stamps) } FormatTimer.time("write apis") { APIsF.write(out, analysis.apis) } FormatTimer.time("write sourceinfos") { SourceInfosF.write(out, analysis.infos) } FormatTimer.time("write compilations") { CompilationsF.write(out, analysis.compilations) } - FormatTimer.time("write setup") { CompileSetupF.write(out, setup) } out.flush() } def read(in: BufferedReader): (Analysis, CompileSetup) = { - VersionF.read(in) - val relations = FormatTimer.time("read relations") { RelationsF.read(in) } + VersionF.read(in) + val setup = FormatTimer.time("read setup") { CompileSetupF.read(in) } + val relations = FormatTimer.time("read relations") { RelationsF.read(in, setup.nameHashing) } val stamps = FormatTimer.time("read stamps") { StampsF.read(in) } val apis = FormatTimer.time("read apis") { APIsF.read(in) } val infos = FormatTimer.time("read sourceinfos") { SourceInfosF.read(in) } val compilations = FormatTimer.time("read compilations") { CompilationsF.read(in) } - val setup = FormatTimer.time("read setup") { CompileSetupF.read(in) } (Analysis.Empty.copy(stamps, apis, relations, infos, compilations), setup) } private[this] object VersionF { - val currentVersion = "2" + val currentVersion = "5" - def write(out: Writer) { - out.write("format version: %s\n".format(currentVersion)) - } + def write(out: Writer) { + out.write("format version: %s\n".format(currentVersion)) + } - private val versionPattern = """format version: (\w+)""".r - def read(in: BufferedReader) { - in.readLine() match { - case versionPattern(version) => validateVersion(version) - case s: String => throw new ReadException("\"format version: \"", s) - case null => throw new EOFException - } - } + private val versionPattern = """format version: (\w+)""".r + def read(in: BufferedReader) { + in.readLine() match { + case versionPattern(version) => validateVersion(version) + case s: String => throw new ReadException("\"format version: \"", s) + case null => throw new EOFException + } + } - def validateVersion(version: String) { - // TODO: Support backwards compatibility? - if (version != currentVersion) { - throw new ReadException("File uses format version %s, but we are compatible with version %s only.".format(version, currentVersion)) - } - } + def validateVersion(version: String) { + // TODO: Support backwards compatibility? + if (version != currentVersion) { + throw new ReadException("File uses format version %s, but we are compatible with version %s only.".format(version, currentVersion)) + } + } } private[this] object RelationsF { @@ -128,8 +131,8 @@ object TextAnalysisFormat { // We sort for ease of debugging and for more efficient reconstruction when reading. // Note that we don't share code with writeMap. Each is implemented more efficiently // than the shared code would be, and the difference is measurable on large analyses. - rel.forwardMap.toSeq.sortBy(_._1).foreach { case (k, vs) => - val kStr = k.toString + rel.forwardMap.toSeq.sortBy(_._1).foreach { case (k, vs) => + val kStr = k.toString vs.toSeq.sorted foreach { v => out.write(kStr); out.write(" -> "); out.write(v.toString); out.write("\n") } @@ -137,8 +140,8 @@ object TextAnalysisFormat { } val nameHashing = relations.nameHashing - writeRelation(Headers.srcProd, relations.srcProd) - writeRelation(Headers.binaryDep, relations.binaryDep) + writeRelation(Headers.srcProd, relations.srcProd) + writeRelation(Headers.binaryDep, relations.binaryDep) val direct = if (nameHashing) Relations.emptySource else relations.direct val publicInherited = if (nameHashing) @@ -160,11 +163,11 @@ object TextAnalysisFormat { writeRelation(Headers.inheritanceInternalDep, inheritance.internal) writeRelation(Headers.inheritanceExternalDep, inheritance.external) - writeRelation(Headers.classes, relations.classes) + writeRelation(Headers.classes, relations.classes) writeRelation(Headers.usedNames, names) } - def read(in: BufferedReader): Relations = { + def read(in: BufferedReader, nameHashing: Boolean): Relations = { def readRelation[T](expectedHeader: String, s2t: String => T): Relation[File, T] = { val items = readPairs(in)(expectedHeader, new File(_), s2t).toIterator // Reconstruct the forward map. This is more efficient than Relation.empty ++ items. @@ -188,19 +191,19 @@ object TextAnalysisFormat { def readFileRelation(expectedHeader: String) = readRelation(expectedHeader, { new File(_) }) def readStringRelation(expectedHeader: String) = readRelation(expectedHeader, identity[String]) - val srcProd = readFileRelation(Headers.srcProd) - val binaryDep = readFileRelation(Headers.binaryDep) + val srcProd = readFileRelation(Headers.srcProd) + val binaryDep = readFileRelation(Headers.binaryDep) import sbt.inc.Relations.{Source, SourceDependencies, makeSourceDependencies, emptySource, makeSource, emptySourceDependencies} val directSrcDeps: Source = { - val internalSrcDep = readFileRelation(Headers.directSrcDep) - val externalDep = readStringRelation(Headers.directExternalDep) + val internalSrcDep = readFileRelation(Headers.directSrcDep) + val externalDep = readStringRelation(Headers.directExternalDep) makeSource(internalSrcDep, externalDep) } val publicInheritedSrcDeps: Source = { val internalSrcDepPI = readFileRelation(Headers.internalSrcDepPI) - val externalDepPI = readStringRelation(Headers.externalDepPI) + val externalDepPI = readStringRelation(Headers.externalDepPI) makeSource(internalSrcDepPI, externalDepPI) } val memberRefSrcDeps: SourceDependencies = { @@ -215,17 +218,18 @@ object TextAnalysisFormat { } // we don't check for emptiness of publicInherited/inheritance relations because // we assume that invariant that says they are subsets of direct/memberRef holds - assert((directSrcDeps == emptySource) || (memberRefSrcDeps == emptySourceDependencies), - "One mechanism is supported for tracking source dependencies at the time") - val nameHashing = memberRefSrcDeps != emptySourceDependencies - val classes = readStringRelation(Headers.classes) + assert(nameHashing || (memberRefSrcDeps == emptySourceDependencies), + "When name hashing is disabled the `memberRef` relation should be empty.") + assert(!nameHashing || (directSrcDeps == emptySource), + "When name hashing is enabled the `direct` relation should be empty.") + val classes = readStringRelation(Headers.classes) val names = readStringRelation(Headers.usedNames) if (nameHashing) Relations.make(srcProd, binaryDep, memberRefSrcDeps, inheritanceSrcDeps, classes, names) else { - assert(names.all.isEmpty, s"When `nameHashing` is disabled `names` relation " + - "should be empty: $names") + assert(names.all.isEmpty, "When `nameHashing` is disabled `names` relation " + + s"should be empty: $names") Relations.make(srcProd, binaryDep, directSrcDeps, publicInheritedSrcDeps, classes) } } @@ -250,9 +254,9 @@ object TextAnalysisFormat { def read(in: BufferedReader): Stamps = { def doReadMap[V](expectedHeader: String, s2v: String => V) = readMap(in)(expectedHeader, new File(_), s2v) - val products = doReadMap(Headers.products, Stamp.fromString) - val sources = doReadMap(Headers.sources, Stamp.fromString) - val binaries = doReadMap(Headers.binaries, Stamp.fromString) + val products = doReadMap(Headers.products, Stamp.fromString) + val sources = doReadMap(Headers.sources, Stamp.fromString) + val binaries = doReadMap(Headers.binaries, Stamp.fromString) val classNames = doReadMap(Headers.classNames, identity[String]) Stamps(products, sources, binaries, classNames) @@ -260,10 +264,10 @@ object TextAnalysisFormat { } private[this] object APIsF { - object Headers { - val internal = "internal apis" - val external = "external apis" - } + object Headers { + val internal = "internal apis" + val external = "external apis" + } val stringToSource = ObjectStringifier.stringToObj[Source] _ val sourceToString = ObjectStringifier.objToString[Source] _ @@ -286,9 +290,9 @@ object TextAnalysisFormat { } private[this] object SourceInfosF { - object Headers { - val infos = "source infos" - } + object Headers { + val infos = "source infos" + } val stringToSourceInfo = ObjectStringifier.stringToObj[SourceInfo] _ val sourceInfoToString = ObjectStringifier.objToString[SourceInfo] _ @@ -298,31 +302,83 @@ object TextAnalysisFormat { } private[this] object CompilationsF { - object Headers { - val compilations = "compilations" - } + object Headers { + val compilations = "compilations" + } val stringToCompilation = ObjectStringifier.stringToObj[Compilation] _ val compilationToString = ObjectStringifier.objToString[Compilation] _ def write(out: Writer, compilations: Compilations) { - def toMapEntry(x: (Compilation, Int)): (String, Compilation) = "%03d".format(x._2) -> x._1 - writeMap(out)(Headers.compilations, compilations.allCompilations.zipWithIndex.map(toMapEntry).toMap, compilationToString, inlineVals=false) + writeSeq(out)(Headers.compilations, compilations.allCompilations, compilationToString) } - def read(in: BufferedReader): Compilations = - Compilations.make(readMap(in)(Headers.compilations, identity[String], stringToCompilation).values.toSeq) + + def read(in: BufferedReader): Compilations = Compilations.make( + readSeq[Compilation](in)(Headers.compilations, stringToCompilation)) } private[this] object CompileSetupF { - object Headers { - val setup = "compile setup" - } + object Headers { + val outputMode = "output mode" + val outputDir = "output directories" + val compileOptions = "compile options" + val javacOptions = "javac options" + val compilerVersion = "compiler version" + val compileOrder = "compile order" + val nameHashing = "name hashing" + } - val stringToSetup = ObjectStringifier.stringToObj[CompileSetup] _ - val setupToString = ObjectStringifier.objToString[CompileSetup] _ + private[this] val singleOutputMode = "single" + private[this] val multipleOutputMode = "multiple" + private[this] val singleOutputKey = new File("output dir") - def write(out: Writer, setup: CompileSetup) { writeMap(out)(Headers.setup, Map("1" -> setup), setupToString, inlineVals=false)} - def read(in: BufferedReader): CompileSetup = readMap(in)(Headers.setup, identity[String], stringToSetup).head._2 + def write(out: Writer, setup: CompileSetup) { + val (mode, outputAsMap) = setup.output match { + case s: SingleOutput => (singleOutputMode, Map(singleOutputKey -> s.outputDirectory)) + case m: MultipleOutput => (multipleOutputMode, m.outputGroups.map(x => x.sourceDirectory -> x.outputDirectory).toMap) + } + + writeSeq(out)(Headers.outputMode, mode :: Nil, identity[String]) + writeMap(out)(Headers.outputDir, outputAsMap, { f: File => f.getPath }) + writeSeq(out)(Headers.compileOptions, setup.options.options, identity[String]) + writeSeq(out)(Headers.javacOptions, setup.options.javacOptions, identity[String]) + writeSeq(out)(Headers.compilerVersion, setup.compilerVersion :: Nil, identity[String]) + writeSeq(out)(Headers.compileOrder, setup.order.name :: Nil, identity[String]) + writeSeq(out)(Headers.nameHashing, setup.nameHashing :: Nil, (b: Boolean) => b.toString) + } + + def read(in: BufferedReader): CompileSetup = { + def s2f(s: String) = new File(s) + def s2b(s: String): Boolean = s.toBoolean + val outputDirMode = readSeq(in)(Headers.outputMode, identity[String]).headOption + val outputAsMap = readMap(in)(Headers.outputDir, s2f, s2f) + val compileOptions = readSeq(in)(Headers.compileOptions, identity[String]) + val javacOptions = readSeq(in)(Headers.javacOptions, identity[String]) + val compilerVersion = readSeq(in)(Headers.compilerVersion, identity[String]).head + val compileOrder = readSeq(in)(Headers.compileOrder, identity[String]).head + val nameHashing = readSeq(in)(Headers.nameHashing, s2b).head + + val output = outputDirMode match { + case Some(s) => s match { + case `singleOutputMode` => new SingleOutput { + val outputDirectory = outputAsMap(singleOutputKey) + } + case `multipleOutputMode` => new MultipleOutput { + val outputGroups: Array[MultipleOutput.OutputGroup] = outputAsMap.toArray.map { + case (src: File, out: File) => new MultipleOutput.OutputGroup { + val sourceDirectory = src + val outputDirectory = out + } + } + } + case str: String => throw new ReadException("Unrecognized output mode: " + str) + } + case None => throw new ReadException("No output mode specified") + } + + new CompileSetup(output, new CompileOptions(compileOptions, javacOptions), compilerVersion, + xsbti.compile.CompileOrder.valueOf(compileOrder), nameHashing) + } } private[this] object ObjectStringifier { @@ -348,8 +404,8 @@ object TextAnalysisFormat { } private[this] def expectHeader(in: BufferedReader, expectedHeader: String) { - val header = in.readLine() - if (header != expectedHeader + ":") throw new ReadException(expectedHeader, if (header == null) "EOF" else header) + val header = in.readLine() + if (header != expectedHeader + ":") throw new ReadException(expectedHeader, if (header == null) "EOF" else header) } private[this] def writeSize(out: Writer, n: Int) { @@ -361,10 +417,23 @@ object TextAnalysisFormat { in.readLine() match { case itemsPattern(nStr) => Integer.parseInt(nStr) case s: String => throw new ReadException("\" items\"", s) - case null => throw new EOFException + case null => throw new EOFException } } + private[this] def writeSeq[T](out: Writer)(header: String, s: Seq[T], t2s: T => String) { + // We write sequences as idx -> element maps, for uniformity with maps/relations. + def n = s.length + val numDigits = if (n < 2) 1 else math.log10(n - 1).toInt + 1 + val fmtStr = "%%0%dd".format(numDigits) + // We only use this for relatively short seqs, so creating this extra map won't be a performance hit. + val m: Map[String, T] = s.zipWithIndex.map(x => fmtStr.format(x._2) -> x._1).toMap + writeMap(out)(header, m, t2s) + } + + private[this] def readSeq[T](in: BufferedReader)(expectedHeader: String, s2t: String => T): Seq[T] = + (readPairs(in)(expectedHeader, identity[String], s2t) map(_._2)).toSeq + private[this] def writeMap[K, V](out: Writer)(header: String, m: Map[K, V], v2s: V => String, inlineVals: Boolean=true)(implicit ord: Ordering[K]) { writeHeader(out, header) writeSize(out, m.size) @@ -379,7 +448,7 @@ object TextAnalysisFormat { private[this] def readPairs[K, V](in: BufferedReader)(expectedHeader: String, s2k: String => K, s2v: String => V): Traversable[(K, V)] = { def toPair(s: String): (K, V) = { - if (s == null) throw new EOFException + if (s == null) throw new EOFException val p = s.indexOf(" -> ") val k = s2k(s.substring(0, p)) // Pair is either "a -> b" or "a -> \nb". This saves us a lot of substring munging when b is a large blob. @@ -387,8 +456,8 @@ object TextAnalysisFormat { (k, v) } expectHeader(in, expectedHeader) - val n = readSize(in) - for (i <- 0 until n) yield toPair(in.readLine()) + val n = readSize(in) + for (i <- 0 until n) yield toPair(in.readLine()) } private[this] def readMap[K, V](in: BufferedReader)(expectedHeader: String, s2k: String => K, s2v: String => V): Map[K, V] = { diff --git a/ivy/src/main/scala/sbt/ConvertResolver.scala b/ivy/src/main/scala/sbt/ConvertResolver.scala index 58ba6d4ac..74c5c119c 100644 --- a/ivy/src/main/scala/sbt/ConvertResolver.scala +++ b/ivy/src/main/scala/sbt/ConvertResolver.scala @@ -10,13 +10,91 @@ import core.module.id.ModuleRevisionId import core.module.descriptor.DependencyDescriptor import core.resolve.ResolveData import core.settings.IvySettings -import plugins.resolver.{BasicResolver, DependencyResolver, IBiblioResolver} +import plugins.resolver.{BasicResolver, DependencyResolver, IBiblioResolver, RepositoryResolver} import plugins.resolver.{AbstractPatternsBasedResolver, AbstractSshBasedResolver, FileSystemResolver, SFTPResolver, SshResolver, URLResolver} import plugins.repository.url.{URLRepository => URLRepo} import plugins.repository.file.{FileRepository => FileRepo, FileResource} +import java.io.File +import org.apache.ivy.util.ChecksumHelper +import org.apache.ivy.core.module.descriptor.{Artifact=>IArtifact} + private object ConvertResolver { + /** This class contains all the reflective lookups used in the + * checksum-friendly URL publishing shim. + */ + private object ChecksumFriendlyURLResolver { + // TODO - When we dump JDK6 support we can remove this hackery + // import java.lang.reflect.AccessibleObject + type AccessibleObject = { + def setAccessible(value: Boolean): Unit + } + private def reflectiveLookup[A <: AccessibleObject](f: Class[_] => A): Option[A] = + try { + val cls = classOf[RepositoryResolver] + val thing = f(cls) + import scala.language.reflectiveCalls + thing.setAccessible(true) + Some(thing) + } catch { + case (_: java.lang.NoSuchFieldException) | + (_: java.lang.SecurityException) | + (_: java.lang.NoSuchMethodException) => None + } + private val signerNameField: Option[java.lang.reflect.Field] = + reflectiveLookup(_.getDeclaredField("signerName")) + private val putChecksumMethod: Option[java.lang.reflect.Method] = + reflectiveLookup(_.getDeclaredMethod("putChecksum", + classOf[IArtifact], classOf[File], classOf[String], + classOf[Boolean], classOf[String])) + private val putSignatureMethod: Option[java.lang.reflect.Method] = + reflectiveLookup(_.getDeclaredMethod("putSignature", + classOf[IArtifact], classOf[File], classOf[String], + classOf[Boolean])) + } + /** + * The default behavior of ivy's overwrite flags ignores the fact that a lot of repositories + * will autogenerate checksums *for* an artifact if it doesn't already exist. Therefore + * if we succeed in publishing an artifact, we need to just blast the checksums in place. + * This acts as a "shim" on RepositoryResolvers so that we can hook our methods into + * both the IBiblioResolver + URLResolver without having to duplicate the code in two + * places. However, this does mean our use of reflection is awesome. + * + * TODO - See about contributing back to ivy. + */ + private trait ChecksumFriendlyURLResolver extends RepositoryResolver { + import ChecksumFriendlyURLResolver._ + private def signerName: String = signerNameField match { + case Some(field) => field.get(this).asInstanceOf[String] + case None => null + } + override protected def put(artifact: IArtifact, src: File, dest: String, overwrite: Boolean): Unit = { + // verify the checksum algorithms before uploading artifacts! + val checksums = getChecksumAlgorithms() + val repository = getRepository() + for { + checksum <- checksums + if !ChecksumHelper.isKnownAlgorithm(checksum) + } throw new IllegalArgumentException("Unknown checksum algorithm: " + checksum) + repository.put(artifact, src, dest, overwrite); + // Fix for sbt#1156 - Artifactory will auto-generate MD5/sha1 files, so + // we need to overwrite what it has. + for (checksum <- checksums) { + putChecksumMethod match { + case Some(method) => method.invoke(this, artifact, src, dest, true: java.lang.Boolean, checksum) + case None => // TODO - issue warning? + } + } + if (signerName != null) { + putSignatureMethod match { + case None => () + case Some(method) => method.invoke(artifact, src, dest, true: java.lang.Boolean) + } + } + } + } + /** Converts the given sbt resolver into an Ivy resolver..*/ def apply(r: Resolver, settings: IvySettings, log: Logger) = { @@ -25,7 +103,7 @@ private object ConvertResolver case repo: MavenRepository => { val pattern = Collections.singletonList(Resolver.resolvePattern(repo.root, Resolver.mavenStyleBasePattern)) - final class PluginCapableResolver extends IBiblioResolver with DescriptorRequired { + final class PluginCapableResolver extends IBiblioResolver with ChecksumFriendlyURLResolver with DescriptorRequired { def setPatterns() { // done this way for access to protected methods. setArtifactPatterns(pattern) setIvyPatterns(pattern) @@ -61,7 +139,13 @@ private object ConvertResolver } case repo: FileRepository => { - val resolver = new FileSystemResolver with DescriptorRequired + val resolver = new FileSystemResolver with DescriptorRequired { + // Workaround for #1156 + // Temporarily in sbt 0.13.x we deprecate overwriting + // in local files for non-changing revisions. + // This will be fully enforced in sbt 1.0. + setRepository(new WarnOnOverwriteFileRepo()) + } resolver.setName(repo.name) initializePatterns(resolver, repo.patterns, settings) import repo.configuration.{isLocal, isTransactional} @@ -71,7 +155,7 @@ private object ConvertResolver } case repo: URLRepository => { - val resolver = new URLResolver with DescriptorRequired + val resolver = new URLResolver with ChecksumFriendlyURLResolver with DescriptorRequired resolver.setName(repo.name) initializePatterns(resolver, repo.patterns, settings) resolver @@ -135,7 +219,7 @@ private object ConvertResolver /** A custom Ivy URLRepository that returns FileResources for file URLs. * This allows using the artifacts from the Maven local repository instead of copying them to the Ivy cache. */ private[this] final class LocalIfFileRepo extends URLRepo { - private[this] val repo = new FileRepo + private[this] val repo = new WarnOnOverwriteFileRepo() override def getResource(source: String) = { val url = new URL(source) if(url.getProtocol == IO.FileScheme) @@ -144,4 +228,16 @@ private object ConvertResolver super.getResource(source) } } + + private[this] final class WarnOnOverwriteFileRepo extends FileRepo() { + override def put(source: java.io.File, destination: String, overwrite: Boolean): Unit = { + try super.put(source, destination, overwrite) + catch { + case e: java.io.IOException if e.getMessage.contains("destination already exists") => + import org.apache.ivy.util.Message + Message.warn(s"Attempting to overwrite $destination\n\tThis usage is deprecated and will be removed in sbt 1.0.") + super.put(source, destination, true) + } + } + } } diff --git a/ivy/src/main/scala/sbt/CustomPomParser.scala b/ivy/src/main/scala/sbt/CustomPomParser.scala index 7023ab8d9..871c1f07c 100644 --- a/ivy/src/main/scala/sbt/CustomPomParser.scala +++ b/ivy/src/main/scala/sbt/CustomPomParser.scala @@ -43,7 +43,7 @@ object CustomPomParser val JarPackagings = Set("eclipse-plugin", "hk2-jar", "orbit") val default = new CustomPomParser(PomModuleDescriptorParser.getInstance, defaultTransform) - private[this] val TransformedHashKey = "sbtTransformHash" + private[this] val TransformedHashKey = "e:sbtTransformHash" // A hash of the parameters transformation is based on. // If a descriptor has a different hash, we need to retransform it. private[this] val TransformHash: String = hash((unqualifiedKeys ++ JarPackagings).toSeq.sorted) @@ -57,8 +57,14 @@ object CustomPomParser private[this] def transformedByThisVersion(md: ModuleDescriptor): Boolean = { + val oldTransformedHashKey = "sbtTransformHash" val extraInfo = md.getExtraInfo - extraInfo != null && extraInfo.get(TransformedHashKey) == TransformHash + // sbt 0.13.1 used "sbtTransformHash" instead of "e:sbtTransformHash" until #1192 so read both + Option(extraInfo).isDefined && + ((Option(extraInfo get TransformedHashKey) orElse Option(extraInfo get oldTransformedHashKey)) match { + case Some(TransformHash) => true + case _ => false + }) } private[this] def defaultTransformImpl(parser: ModuleDescriptorParser, md: ModuleDescriptor): ModuleDescriptor = diff --git a/ivy/src/main/scala/sbt/Ivy.scala b/ivy/src/main/scala/sbt/Ivy.scala index e1dca53ae..a6519c6bc 100644 --- a/ivy/src/main/scala/sbt/Ivy.scala +++ b/ivy/src/main/scala/sbt/Ivy.scala @@ -7,7 +7,6 @@ import Resolver.PluginPattern import java.io.File import java.net.URI -import java.text.ParseException import java.util.concurrent.Callable import java.util.{Collection, Collections => CS} import CS.singleton @@ -24,9 +23,7 @@ import core.settings.IvySettings import plugins.latest.LatestRevisionStrategy import plugins.matcher.PatternMatcher import plugins.parser.m2.PomModuleDescriptorParser -import plugins.repository.ResourceDownloader import plugins.resolver.{ChainResolver, DependencyResolver} -import plugins.resolver.util.ResolvedResource import util.{Message, MessageLogger} import util.extendable.ExtendableItem @@ -99,6 +96,8 @@ final class IvySbt(val configuration: IvyConfiguration) def withIvy[T](log: MessageLogger)(f: Ivy => T): T = withDefaultLogger(log) { + // See #429 - We always insert a helper authenticator here which lets us get more useful authentication errors. + ivyint.ErrorMessageAuthenticator.install() ivy.pushContext() ivy.getLoggerEngine.pushLogger(log) try { f(ivy) } @@ -356,41 +355,8 @@ private object IvySbt case pr: ProjectResolver => true case _ => false } - /** This is overridden to delete outofdate artifacts of changing modules that are not listed in the metadata. - * This occurs for artifacts with classifiers, for example. */ - @throws(classOf[ParseException]) - override def cacheModuleDescriptor(resolver: DependencyResolver, mdRef: ResolvedResource, dd: DependencyDescriptor, moduleArtifact: IArtifact, downloader: ResourceDownloader, options: CacheMetadataOptions): ResolvedModuleRevision = - { - val rmrRaw = super.cacheModuleDescriptor(null, mdRef, dd, moduleArtifact, downloader, options) - val rmr = resetArtifactResolver(rmrRaw) - val mrid = moduleArtifact.getModuleRevisionId - def shouldClear(): Boolean = rmr != null && - ( (rmr.getReport != null && rmr.getReport.isSearched && isChanging(dd, mrid)) || - isProjectResolver(rmr.getResolver) ) - // only handle changing modules whose metadata actually changed. - // Typically, the publication date in the metadata has to change to get here. - if(shouldClear()) { - // this is the locally cached metadata as originally retrieved (e.g. the pom) - val original = rmr.getReport.getOriginalLocalFile - if(original != null) { - // delete all files in subdirectories that are older than the original metadata file's publication date - // The publication date is used because the metadata will be redownloaded for changing files, - // so the last modified time changes, but the publication date doesn't - val pubDate = rmrRaw.getPublicationDate - val lm = if(pubDate eq null) original.lastModified else pubDate.getTime - val indirectFiles = PathFinder(original.getParentFile).*(DirectoryFilter).**(-DirectoryFilter).get.toList - val older = indirectFiles.filter(f => f.lastModified < lm).toList - Message.verbose("Deleting additional old artifacts from cache for changed module " + mrid + older.mkString(":\n\t", "\n\t", "")) - IO.delete(older) - } - } - rmr - } // ignore the original resolver wherever possible to avoid issues like #704 override def saveResolvers(descriptor: ModuleDescriptor, metadataResolverName: String, artifactResolverName: String) {} - - def isChanging(dd: DependencyDescriptor, requestedRevisionId: ModuleRevisionId): Boolean = - !localOnly && (dd.isChanging || requestedRevisionId.getRevision.contains("-SNAPSHOT")) } manager.setArtifactPattern(PluginPattern + manager.getArtifactPattern) manager.setDataFilePattern(PluginPattern + manager.getDataFilePattern) diff --git a/ivy/src/main/scala/sbt/IvyActions.scala b/ivy/src/main/scala/sbt/IvyActions.scala index f3ac22c82..0ad3376d3 100644 --- a/ivy/src/main/scala/sbt/IvyActions.scala +++ b/ivy/src/main/scala/sbt/IvyActions.scala @@ -16,7 +16,11 @@ import core.resolve.ResolveOptions import plugins.resolver.{BasicResolver, DependencyResolver} final class DeliverConfiguration(val deliverIvyPattern: String, val status: String, val configurations: Option[Seq[Configuration]], val logging: UpdateLogging.Value) -final class PublishConfiguration(val ivyFile: Option[File], val resolverName: String, val artifacts: Map[Artifact, File], val checksums: Seq[String], val logging: UpdateLogging.Value) +final class PublishConfiguration(val ivyFile: Option[File], val resolverName: String, val artifacts: Map[Artifact, File], val checksums: Seq[String], val logging: UpdateLogging.Value, + val overwrite: Boolean) { + def this(ivyFile: Option[File], resolverName: String, artifacts: Map[Artifact, File], checksums: Seq[String], logging: UpdateLogging.Value) = + this(ivyFile, resolverName, artifacts, checksums, logging, false) +} final class UpdateConfiguration(val retrieve: Option[RetrieveConfiguration], val missingOk: Boolean, val logging: UpdateLogging.Value) final class RetrieveConfiguration(val retrieveDirectory: File, val outputPattern: String) @@ -86,11 +90,11 @@ object IvyActions import configuration._ module.withModule(log) { case (ivy, md, default) => val resolver = ivy.getSettings.getResolver(resolverName) - if(resolver eq null) error("Undefined resolver '" + resolverName + "'") + if(resolver eq null) sys.error("Undefined resolver '" + resolverName + "'") val ivyArtifact = ivyFile map { file => (MDArtifact.newIvyArtifact(md), file) } val cross = crossVersionMap(module.moduleSettings) - val as = mapArtifacts(md, cross, artifacts) ++ ivyArtifact.toList - withChecksums(resolver, checksums) { publish(md, as, resolver, overwrite = true) } + val as = mapArtifacts(md, cross, artifacts) ++ ivyArtifact.toSeq + withChecksums(resolver, checksums) { publish(md, as, resolver, overwrite = overwrite) } } } private[this] def withChecksums[T](resolver: DependencyResolver, checksums: Seq[String])(act: => T): T = diff --git a/ivy/src/main/scala/sbt/MakePom.scala b/ivy/src/main/scala/sbt/MakePom.scala index 5e49243d3..16ec28333 100644 --- a/ivy/src/main/scala/sbt/MakePom.scala +++ b/ivy/src/main/scala/sbt/MakePom.scala @@ -188,7 +188,7 @@ class MakePom(val log: Logger) {mrid.getOrganisation} {mrid.getName} - {mrid.getRevision} + {makeDependencyVersion(mrid.getRevision)} { scopeElem(scope) } { optionalElem(optional) } { classifierElem(classifier) } @@ -197,6 +197,44 @@ class MakePom(val log: Logger) } + + + def makeDependencyVersion(revision: String): String = { + def plusRange(s:String, shift:Int = 0) = { + def pow(i:Int):Int = if (i>0) 10 * pow(i-1) else 1 + val (prefixVersion, lastVersion) = (s+"0"*shift).reverse.split("\\.",2) match { + case Array(revLast,revRest) => + ( revRest.reverse + ".", revLast.reverse ) + case Array(revLast) => ("", revLast.reverse) + } + val lastVersionInt = lastVersion.toInt + s"[${prefixVersion}${lastVersion},${prefixVersion}${lastVersionInt+pow(shift)})" + } + val startSym=Set(']','[','(') + val stopSym=Set(']','[',')') + try { + if (revision endsWith ".+") { + plusRange(revision.substring(0,revision.length-2)) + } else if (revision endsWith "+") { + val base = revision.take(revision.length-1) + // This is a heuristic. Maven just doesn't support Ivy's notions of 1+, so + // we assume version ranges never go beyond 5 siginificant digits. + (0 to 5).map(plusRange(base,_)).mkString(",") + } else if (startSym(revision(0)) && stopSym(revision(revision.length-1))) { + val start = revision(0) + val stop = revision(revision.length-1) + val mid = revision.substring(1,revision.length-1) + (if (start == ']') "(" else start) + mid + (if (stop == '[') ")" else stop) + } else revision + } catch { + case e: NumberFormatException => + // TODO - if the version doesn't meet our expectations, maybe we just issue a hard + // error instead of softly ignoring the attempt to rewrite. + //sys.error(s"Could not fix version [$revision] into maven style version") + revision + } + } + @deprecated("No longer used and will be removed.", "0.12.1") def classifier(dependency: DependencyDescriptor, includeTypes: Set[String]): NodeSeq = { diff --git a/ivy/src/main/scala/sbt/ivyint/ErrorMessageAuthenticator.scala b/ivy/src/main/scala/sbt/ivyint/ErrorMessageAuthenticator.scala new file mode 100644 index 000000000..3d0d174d8 --- /dev/null +++ b/ivy/src/main/scala/sbt/ivyint/ErrorMessageAuthenticator.scala @@ -0,0 +1,128 @@ +package sbt +package ivyint + +import java.lang.reflect.Field +import java.lang.reflect.Method +import java.net.Authenticator +import java.net.PasswordAuthentication +import org.apache.ivy.util.Credentials +import org.apache.ivy.util.Message +import org.apache.ivy.util.url.IvyAuthenticator +import org.apache.ivy.util.url.CredentialsStore + +/** + * Helper to install an Authenticator that works with the IvyAuthenticator to provide better error messages when + * credentials don't line up. + */ +object ErrorMessageAuthenticator { + private var securityWarningLogged = false + + private def originalAuthenticator: Option[Authenticator] = { + try { + val f = classOf[Authenticator].getDeclaredField("theAuthenticator"); + f.setAccessible(true); + Option(f.get(null).asInstanceOf[Authenticator]) + } catch { + // TODO - Catch more specific errors. + case t: Throwable => + Message.debug("Error occurred while getting the original authenticator: " + t.getMessage) + None + } + } + + private lazy val ivyOriginalField = { + val field = classOf[IvyAuthenticator].getDeclaredField("original") + field.setAccessible(true) + field + } + // Attempts to get the original authenticator form the ivy class or returns null. + private def installIntoIvy(ivy: IvyAuthenticator): Option[Authenticator] = { + // Here we install ourselves as the IvyAuthenticator's default so we get called AFTER Ivy has a chance to run. + def installIntoIvyImpl(original: Option[Authenticator]): Unit = { + val newOriginal = new ErrorMessageAuthenticator(original) + ivyOriginalField.set(ivy, newOriginal) + } + + try Option(ivyOriginalField.get(ivy).asInstanceOf[Authenticator]) match { + case Some(alreadyThere: ErrorMessageAuthenticator) => // We're already installed, no need to do the work again. + case originalOpt => installIntoIvyImpl(originalOpt) + } catch { + case t: Throwable => + Message.debug("Error occurred will trying to install debug messages into Ivy Authentication" + t.getMessage) + } + Some(ivy) + } + + /** Installs the error message authenticator so we have nicer error messages when using java's URL for downloading. */ + def install() { + // Actually installs the error message authenticator. + def doInstall(original: Option[Authenticator]): Unit = + try Authenticator.setDefault(new ErrorMessageAuthenticator(original)) + catch { + case e: SecurityException if !securityWarningLogged => + securityWarningLogged = true; + Message.warn("Not enough permissions to set the ErorrMessageAuthenticator. " + + "Helpful debug messages disabled!"); + } + // We will try to use the original authenticator as backup authenticator. + // Since there is no getter available, so try to use some reflection to + // obtain it. If that doesn't work, assume there is no original authenticator + def doInstallIfIvy(original: Option[Authenticator]): Unit = + original match { + case Some(installed: ErrorMessageAuthenticator) => // Ignore, we're already installed + case Some(ivy: IvyAuthenticator) => installIntoIvy(ivy) + case original => doInstall(original) + } + doInstallIfIvy(originalAuthenticator) + } +} +/** + * An authenticator which just delegates to a previous authenticator and issues *nice* + * error messages on failure to find credentials. + * + * Since ivy installs its own credentials handler EVERY TIME it resolves or publishes, we want to + * install this one at some point and eventually ivy will capture it and use it. + */ +private[sbt] final class ErrorMessageAuthenticator(original: Option[Authenticator]) extends Authenticator { + + protected override def getPasswordAuthentication(): PasswordAuthentication = { + // We're guaranteed to only get here if Ivy's authentication fails + if (!isProxyAuthentication) { + val host = getRequestingHost + // TODO - levenshtein distance "did you mean" message. + Message.error(s"Unable to find credentials for [${getRequestingPrompt} @ ${host}].") + val configuredRealms = IvyCredentialsLookup.realmsForHost.getOrElse(host, Set.empty) + if(!configuredRealms.isEmpty) { + Message.error(s" Is one of these realms mispelled for host [${host}]:") + configuredRealms foreach { realm => + Message.error(s" * ${realm}") + } + } + } + // TODO - Maybe we should work on a helpful proxy message... + + // TODO - To be more maven friendly, we may want to also try to grab the "first" authentication that shows up for a server and try it. + // or maybe allow that behavior to be configured, since maven users aren't used to realms (which they should be). + + // Grabs the authentication that would have been provided had we not been installed... + def originalAuthentication: Option[PasswordAuthentication] = { + Authenticator.setDefault(original.getOrElse(null)) + try Option(Authenticator.requestPasswordAuthentication( + getRequestingHost, + getRequestingSite, + getRequestingPort, + getRequestingProtocol, + getRequestingPrompt, + getRequestingScheme)) + finally Authenticator.setDefault(this) + } + originalAuthentication.getOrElse(null) + } + + /** Returns true if this authentication if for a proxy and not for an HTTP server. + * We want to display different error messages, depending. + */ + private def isProxyAuthentication: Boolean = + getRequestorType == Authenticator.RequestorType.PROXY + +} \ No newline at end of file diff --git a/ivy/src/main/scala/sbt/ivyint/IvyCredentialsLookup.scala b/ivy/src/main/scala/sbt/ivyint/IvyCredentialsLookup.scala new file mode 100644 index 000000000..365ffe698 --- /dev/null +++ b/ivy/src/main/scala/sbt/ivyint/IvyCredentialsLookup.scala @@ -0,0 +1,63 @@ +package sbt +package ivyint + +import org.apache.ivy.util.url.CredentialsStore +import collection.JavaConverters._ + +/** A key used to store credentials in the ivy credentials store. */ +private[sbt] sealed trait CredentialKey +/** Represents a key in the ivy credentials store that is only specific to a host. */ +private[sbt] case class Host(name: String) extends CredentialKey +/** Represents a key in the ivy credentials store that is keyed to both a host and a "realm". */ +private[sbt] case class Realm(host: String, realm: String) extends CredentialKey + +/** + * Helper mechanism to improve credential related error messages. + * + * This evil class exposes to us the necessary information to warn on credential failure and offer + * spelling/typo suggestions. + */ +private[sbt] object IvyCredentialsLookup { + + /** Helper extractor for Ivy's key-value store of credentials. */ + private object KeySplit { + def unapply(key: String): Option[(String,String)] = { + key.indexOf('@') match { + case -1 => None + case n => Some(key.take(n) -> key.drop(n+1)) + } + } + } + + /** Here we cheat runtime private so we can look in the credentials store. + * + * TODO - Don't bomb at class load time... + */ + private val credKeyringField = { + val tmp = classOf[CredentialsStore].getDeclaredField("KEYRING") + tmp.setAccessible(true) + tmp + } + + /** All the keys for credentials in the ivy configuration store. */ + def keyringKeys: Set[CredentialKey] = { + val map = credKeyringField.get(null).asInstanceOf[java.util.HashMap[String, Any]] + // make a clone of the set... + (map.keySet.asScala.map { + case KeySplit(realm, host) => Realm(host, realm) + case host => Host(host) + })(collection.breakOut) + } + + /** + * A mapping of host -> realms in the ivy credentials store. + */ + def realmsForHost: Map[String, Set[String]] = + keyringKeys collect { + case x: Realm => x + } groupBy { realm => + realm.host + } mapValues { realms => + realms map (_.realm) + } +} \ No newline at end of file diff --git a/ivy/src/test/scala/MakePomTest.scala b/ivy/src/test/scala/MakePomTest.scala new file mode 100644 index 000000000..1341b207d --- /dev/null +++ b/ivy/src/test/scala/MakePomTest.scala @@ -0,0 +1,29 @@ +package sbt + +import java.io.File +import org.specs2._ +import mutable.Specification + +object MakePomTest extends Specification +{ + val mp = new MakePom(ConsoleLogger()) + import mp.{makeDependencyVersion=>v} + "MakePom makeDependencyVersion" should { + "Handle .+ in versions" in { + v("1.+") must_== "[1,2)" + v("1.2.3.4.+") must_== "[1.2.3.4,1.2.3.5)" + v("12.31.42.+") must_== "[12.31.42,12.31.43)" + } + /* TODO - do we care about this case? + * 1+ --> [1,2),[10,20),[100,200),[1000,2000),[10000,20000),[100000,200000) + */ + "Handle ]* bracket in version ranges" in { + v("]1,3]") must_== "(1,3]" + v("]1.1,1.3]") must_== "(1.1,1.3]" + } + "Handle *[ bracket in version ranges" in { + v("[1,3[") must_== "[1,3)" + v("[1.1,1.3[") must_== "[1.1,1.3)" + } + } +} diff --git a/launch/interface/src/main/java/xsbti/AppMain.java b/launch/interface/src/main/java/xsbti/AppMain.java index ffd1e4c36..b24e02212 100644 --- a/launch/interface/src/main/java/xsbti/AppMain.java +++ b/launch/interface/src/main/java/xsbti/AppMain.java @@ -1,6 +1,25 @@ package xsbti; +/** + * The main entry interface for launching applications. Classes which implement this interface + * can be launched via the sbt launcher. + * + * In addition, classes can be adapted into this interface by the launcher if they have a static method + * matching one of these signatures: + * + * - public static void main(String[] args) + * - public static int main(String[] args) + * - public static xsbti.Exit main(String[] args) + * + */ public interface AppMain { + /** Run the application and return the result. + * + * @param configuration The configuration used to run the application. Includes arguments and access to launcher features. + * @return + * The result of running this app. + * Note: the result can be things like "Please reboot this application". + */ public MainResult run(AppConfiguration configuration); } \ No newline at end of file diff --git a/launch/interface/src/main/java/xsbti/AppProvider.java b/launch/interface/src/main/java/xsbti/AppProvider.java index 24744c83c..ab3914210 100644 --- a/launch/interface/src/main/java/xsbti/AppProvider.java +++ b/launch/interface/src/main/java/xsbti/AppProvider.java @@ -3,9 +3,10 @@ package xsbti; import java.io.File; /** - * This represents an interface that can generate applications. + * This represents an interface that can generate applications or servers. * - * An application is somethign which will run and return an exit value. + * This provider grants access to launcher related features associated with + * the id. */ public interface AppProvider { @@ -33,6 +34,8 @@ public interface AppProvider * It is NOT guaranteed that newMain().getClass() == mainClass(). * The sbt launcher can wrap generic static main methods. In this case, there will be a wrapper class, * and you must use the `entryPoint` method. + * @throws IncompatibleClassChangeError if the configuration used for this Application does not + * represent a launched application. */ public AppMain newMain(); diff --git a/launch/interface/src/main/java/xsbti/Exit.java b/launch/interface/src/main/java/xsbti/Exit.java index f88c8c591..3363fce39 100644 --- a/launch/interface/src/main/java/xsbti/Exit.java +++ b/launch/interface/src/main/java/xsbti/Exit.java @@ -1,7 +1,9 @@ package xsbti; -/** A launched application returns an instance of this class in order to communicate to the launcher -* that the application is completely finished and the launcher should exit with the given exit code.*/ +/** + * A launched application returns an instance of this class in order to communicate to the launcher + * that the application finished and the launcher should exit with the given exit code. + */ public interface Exit extends MainResult { public int code(); diff --git a/launch/interface/src/main/java/xsbti/MainResult.java b/launch/interface/src/main/java/xsbti/MainResult.java index e81aede2d..b6f27a680 100644 --- a/launch/interface/src/main/java/xsbti/MainResult.java +++ b/launch/interface/src/main/java/xsbti/MainResult.java @@ -1,8 +1,12 @@ package xsbti; -/** A launched application should return an instance of this from its 'run' method -* to communicate to the launcher what should be done now that the application -* has competed. This interface should be treated as 'sealed', with Exit and Reboot the only -* direct subtypes. -*/ +/** + * A launched application should return an instance of this from its 'run' method + * to communicate to the launcher what should be done now that the application + * has completed. This interface should be treated as 'sealed', with Exit and Reboot the only + * direct subtypes. + * + * @see xsbti.Exit + * @see xsbti.Reboot + */ public interface MainResult {} \ No newline at end of file diff --git a/launch/interface/src/main/java/xsbti/Reboot.java b/launch/interface/src/main/java/xsbti/Reboot.java index cb978c32a..0d6136a53 100644 --- a/launch/interface/src/main/java/xsbti/Reboot.java +++ b/launch/interface/src/main/java/xsbti/Reboot.java @@ -2,9 +2,11 @@ package xsbti; import java.io.File; -/** A launched application returns an instance of this class in order to communicate to the launcher -* that the application should be restarted. Different versions of the application and Scala can be used. -* The application can be given different arguments and a new working directory as well.*/ +/** + * A launched application returns an instance of this class in order to communicate to the launcher + * that the application should be restarted. Different versions of the application and Scala can be used. + * The application can be given different arguments as well as a new working directory. + */ public interface Reboot extends MainResult { public String[] arguments(); diff --git a/launch/interface/src/main/java/xsbti/Server.java b/launch/interface/src/main/java/xsbti/Server.java new file mode 100644 index 000000000..d4eca176d --- /dev/null +++ b/launch/interface/src/main/java/xsbti/Server.java @@ -0,0 +1,36 @@ +package xsbti; + +/** A running server. + * + * A class implementing this must: + * + * 1. Expose an HTTP port that clients can connect to, returned via the uri method. + * 2. Accept HTTP HEAD requests against the returned URI. These are used as "ping" messages to ensure + * a server is still alive, when new clients connect. + * 3. Create a new thread to execute its service + * 4. Block the calling thread until the server is shutdown via awaitTermination() + */ +public interface Server { + /** + * @return + * A URI denoting the Port which clients can connect to. + * + * Note: we use a URI so that the server can bind to different IP addresses (even a public one) if desired. + * Note: To verify that a server is "up", the sbt launcher will attempt to connect to + * this URI's address and port with a socket. If the connection is accepted, the server is assumed to + * be working. + */ + public java.net.URI uri(); + /** + * This should block the calling thread until the server is shutdown. + * + * @return + * The result that should occur from the server. + * Can be: + * - xsbti.Exit: Shutdown this launch + * - xsbti.Reboot: Restart the server + * + * + */ + public xsbti.MainResult awaitTermination(); +} \ No newline at end of file diff --git a/launch/interface/src/main/java/xsbti/ServerMain.java b/launch/interface/src/main/java/xsbti/ServerMain.java new file mode 100644 index 000000000..da3c8ce2b --- /dev/null +++ b/launch/interface/src/main/java/xsbti/ServerMain.java @@ -0,0 +1,17 @@ +package xsbti; + +/** The main entry point for a launched service. This allows applciations + * to instantiate server instances. + */ +public interface ServerMain { + /** + * This method should launch one or more thread(s) which run the service. After the service has + * been started, it should return the port/URI it is listening for connections on. + * + * @param configuration + * The configuration used to launch this service. + * @return + * A running server. + */ + public Server start(AppConfiguration configuration); +} \ No newline at end of file diff --git a/launch/src/main/scala/xsbt/boot/Boot.scala b/launch/src/main/scala/xsbt/boot/Boot.scala index 06ee1ba82..665407cff 100644 --- a/launch/src/main/scala/xsbt/boot/Boot.scala +++ b/launch/src/main/scala/xsbt/boot/Boot.scala @@ -5,36 +5,48 @@ import java.io.File + // The entry point to the launcher object Boot { def main(args: Array[String]) { - args match { - case Array("--version") => - println("sbt launcher version " + Package.getPackage("xsbt.boot").getImplementationVersion) - case _ => - System.clearProperty("scala.home") // avoid errors from mixing Scala versions in the same JVM - System.setProperty("jline.shutdownhook", "false") // shutdown hooks cause class loader leaks - System.setProperty("jline.esc.timeout", "0") // starts up a thread otherwise - CheckProxy() - run(args) - } + val config = parseArgs(args) + // If we havne't exited, we set up some hooks and launch + System.clearProperty("scala.home") // avoid errors from mixing Scala versions in the same JVM + System.setProperty("jline.shutdownhook", "false") // shutdown hooks cause class loader leaks + System.setProperty("jline.esc.timeout", "0") // starts up a thread otherwise + CheckProxy() + run(config) } + def parseArgs(args: Array[String]): LauncherArguments = { + @annotation.tailrec + def parse(args: List[String], isLocate: Boolean, remaining: List[String]): LauncherArguments = + args match { + case "--version" :: rest => + println("sbt launcher version " + Package.getPackage("xsbt.boot").getImplementationVersion) + exit(1) + case "--locate" :: rest => parse(rest, true, remaining) + case next :: rest => parse(rest, isLocate, next :: remaining) + case Nil => new LauncherArguments(remaining.reverse, isLocate) + } + parse(args.toList, false, Nil) + } + // this arrangement is because Scala does not always properly optimize away // the tail recursion in a catch statement - final def run(args: Array[String]): Unit = runImpl(args) match { + final def run(args: LauncherArguments): Unit = runImpl(args) match { case Some(newArgs) => run(newArgs) case None => () } - private def runImpl(args: Array[String]): Option[Array[String]] = + private def runImpl(args: LauncherArguments): Option[LauncherArguments] = try - Launch(args.toList) map exit + Launch(args) map exit catch { case b: BootException => errorAndExit(b.toString) case r: xsbti.RetrieveException => errorAndExit("Error: " + r.getMessage) - case r: xsbti.FullReload => Some(r.arguments) + case r: xsbti.FullReload => Some(new LauncherArguments(r.arguments.toList, false)) case e: Throwable => e.printStackTrace errorAndExit(Pre.prefixError(e.toString)) diff --git a/launch/src/main/scala/xsbt/boot/Configuration.scala b/launch/src/main/scala/xsbt/boot/Configuration.scala index e9464406a..4028e89cf 100644 --- a/launch/src/main/scala/xsbt/boot/Configuration.scala +++ b/launch/src/main/scala/xsbt/boot/Configuration.scala @@ -10,21 +10,34 @@ import java.util.regex.Pattern import scala.collection.immutable.List import annotation.tailrec +object ConfigurationStorageState extends Enumeration { + val PropertiesFile = value("properties-file") + val SerializedFile = value("serialized-file") +} + object Configuration { + import ConfigurationStorageState._ final val SysPropPrefix = "-D" def parse(file: URL, baseDirectory: File) = Using( new InputStreamReader(file.openStream, "utf8") )( (new ConfigurationParser).apply ) - @tailrec def find(args: List[String], baseDirectory: File): (URL, List[String]) = + + /** + * Finds the configuration location. + * + * Note: Configuration may be previously serialized by a launcher. + */ + @tailrec def find(args: List[String], baseDirectory: File): (URL, List[String], ConfigurationStorageState.Value) = args match { - case head :: tail if head.startsWith("@") => (directConfiguration(head.substring(1), baseDirectory), tail) + case head :: tail if head.startsWith("@load:") => (directConfiguration(head.substring(6), baseDirectory), tail, SerializedFile) + case head :: tail if head.startsWith("@") => (directConfiguration(head.substring(1), baseDirectory), tail, PropertiesFile) case head :: tail if head.startsWith(SysPropPrefix) => setProperty(head stripPrefix SysPropPrefix) find(tail, baseDirectory) case _ => val propertyConfigured = System.getProperty("sbt.boot.properties") val url = if(propertyConfigured == null) configurationOnClasspath else configurationFromFile(propertyConfigured, baseDirectory) - (url , args) + (url, args, PropertiesFile) } def setProperty(head: String) { @@ -108,7 +121,7 @@ object Configuration // We have to hard code them here in order to use them to determine the location of sbt.boot.properties itself def guessSbtVersion: Option[String] = { - val props = ResolveValues.readProperties(new File(DefaultBuildProperties)) + val props = Pre.readProperties(new File(DefaultBuildProperties)) Option(props.getProperty(SbtVersionProperty)) } diff --git a/launch/src/main/scala/xsbt/boot/ConfigurationParser.scala b/launch/src/main/scala/xsbt/boot/ConfigurationParser.scala index 8b1252e4a..659573550 100644 --- a/launch/src/main/scala/xsbt/boot/ConfigurationParser.scala +++ b/launch/src/main/scala/xsbt/boot/ConfigurationParser.scala @@ -78,11 +78,14 @@ class ConfigurationParser val (logging, m5) = processSection(m4, "log", getLogging) val (properties, m6) = processSection(m5, "app-properties", getAppProperties) val ((ivyHome, checksums, isOverrideRepos, rConfigFile), m7) = processSection(m6, "ivy", getIvy) - check(m7, "section") + val (serverOptions, m8) = processSection(m7, "server", getServer) + check(m8, "section") val classifiers = Classifiers(scalaClassifiers, appClassifiers) val repositories = rConfigFile map readRepositoriesConfig getOrElse defaultRepositories val ivyOptions = IvyOptions(ivyHome, classifiers, repositories, checksums, isOverrideRepos) - new LaunchConfiguration(scalaVersion, ivyOptions, app, boot, logging, properties) + + // TODO - Read server properties... + new LaunchConfiguration(scalaVersion, ivyOptions, app, boot, logging, properties, serverOptions) } def getScala(m: LabelMap) = { @@ -178,6 +181,16 @@ class ConfigurationParser val app = new Application(org, name, rev, main, components, LaunchCrossVersion(crossVersioned), classpathExtra) (app, classifiers) } + def getServer(m: LabelMap): (Option[ServerConfiguration]) = + { + val (lock, m1) = optfile(m, "lock") + // TODO - JVM args + val (args, m2) = optfile(m1, "jvmargs") + val (props, m3) = optfile(m2, "jvmprops") + lock map { file => + ServerConfiguration(file, args, props) + } + } def getRepositories(m: LabelMap): List[Repository.Repository] = { import Repository.{Ivy, Maven, Predefined} diff --git a/launch/src/main/scala/xsbt/boot/Create.scala b/launch/src/main/scala/xsbt/boot/Create.scala index b22cd2324..17e549781 100644 --- a/launch/src/main/scala/xsbt/boot/Create.scala +++ b/launch/src/main/scala/xsbt/boot/Create.scala @@ -33,17 +33,11 @@ object Initialize def fill(file: File, spec: List[AppProperty]): Unit = process(file, spec, selectFill) def process(file: File, appProperties: List[AppProperty], select: AppProperty => Option[PropertyInit]) { - val properties = new Properties - if(file.exists) - Using(new FileInputStream(file))( properties.load ) + val properties = readProperties(file) val uninitialized = for(property <- appProperties; init <- select(property) if properties.getProperty(property.name) == null) yield initialize(properties, property.name, init) - if(!uninitialized.isEmpty) - { - file.getParentFile.mkdirs() - Using(new FileOutputStream(file))( out => properties.store(out, "") ) - } + if(!uninitialized.isEmpty) writeProperties(properties, file, "") } def initialize(properties: Properties, name: String, init: PropertyInit) { diff --git a/launch/src/main/scala/xsbt/boot/Enumeration.scala b/launch/src/main/scala/xsbt/boot/Enumeration.scala index 3e5a6f89d..e65309f2a 100644 --- a/launch/src/main/scala/xsbt/boot/Enumeration.scala +++ b/launch/src/main/scala/xsbt/boot/Enumeration.scala @@ -6,7 +6,7 @@ package xsbt.boot import Pre._ import scala.collection.immutable.List -class Enumeration +class Enumeration extends Serializable { def elements: List[Value] = members private lazy val members: List[Value] = @@ -25,6 +25,6 @@ class Enumeration } def value(s: String) = new Value(s, 0) def value(s: String, i: Int) = new Value(s, i) - final class Value(override val toString: String, val id: Int) + final class Value(override val toString: String, val id: Int) extends Serializable def toValue(s: String): Value = elements.find(_.toString == s).getOrElse(error("Expected one of " + elements.mkString(",") + " (got: " + s + ")")) } \ No newline at end of file diff --git a/launch/src/main/scala/xsbt/boot/Launch.scala b/launch/src/main/scala/xsbt/boot/Launch.scala index 4604ce2ef..688a769ee 100644 --- a/launch/src/main/scala/xsbt/boot/Launch.scala +++ b/launch/src/main/scala/xsbt/boot/Launch.scala @@ -6,20 +6,64 @@ package xsbt.boot import Pre._ import BootConfiguration.{CompilerModuleName, JAnsiVersion, LibraryModuleName} import java.io.File -import java.net.{URL, URLClassLoader} +import java.net.{URL, URLClassLoader, URI} import java.util.concurrent.Callable import scala.collection.immutable.List import scala.annotation.tailrec +import ConfigurationStorageState._ + +class LauncherArguments(val args: List[String], val isLocate: Boolean) object Launch { - def apply(arguments: List[String]): Option[Int] = apply( (new File("")).getAbsoluteFile , arguments ) + def apply(arguments: LauncherArguments): Option[Int] = apply( (new File("")).getAbsoluteFile , arguments ) - def apply(currentDirectory: File, arguments: List[String]): Option[Int] = { - val (configLocation, newArguments) = Configuration.find(arguments, currentDirectory) - val config = parseAndInitializeConfig(configLocation, currentDirectory) - launch(run(Launcher(config)))(makeRunConfig(currentDirectory, config, newArguments)) + def apply(currentDirectory: File, arguments: LauncherArguments): Option[Int] = { + val (configLocation, newArgs2, state) = Configuration.find(arguments.args, currentDirectory) + val config = state match { + case SerializedFile => LaunchConfiguration.restore(configLocation) + case PropertiesFile => parseAndInitializeConfig(configLocation, currentDirectory) + } + if(arguments.isLocate) { + if(!newArgs2.isEmpty) { + // TODO - Print the arguments without exploding proguard size. + System.err.println("Warning: --locate option ignores arguments.") + } + locate(currentDirectory, config) + } else { + // First check to see if there are java system properties we need to set. Then launch the application. + updateProperties(config) + launch(run(Launcher(config)))(makeRunConfig(currentDirectory, config, newArgs2)) + } } + /** Locate a server, print where it is, and exit. */ + def locate(currentDirectory: File, config: LaunchConfiguration): Option[Int] = { + config.serverConfig match { + case Some(_) => + val uri = ServerLocator.locate(currentDirectory, config) + System.out.println(uri.toASCIIString) + Some(0) + case None => sys.error(s"${config.app.groupID}-${config.app.main} is not configured as a server.") + } + } + /** Some hackery to allow sys.props to be configured via a file. If this launch config has + * a valid file configured, we load the properties and and apply them to this jvm. + */ + def updateProperties(config: LaunchConfiguration): Unit = { + config.serverConfig match { + case Some(config) => + config.jvmPropsFile match { + case Some(file) if file.exists => + try setSystemProperties(readProperties(file)) + catch { + case e: Exception => throw new RuntimeException(s"Unable to load server properties file: ${file}", e) + } + case _ => + } + case None => + } + } + /** Parses the configuration *and* runs the initialization code that will remove variable references. */ def parseAndInitializeConfig(configLocation: URL, currentDirectory: File): LaunchConfiguration = { @@ -84,6 +128,10 @@ object Launch Thread.currentThread.setContextClassLoader(loader) try { eval } finally { Thread.currentThread.setContextClassLoader(oldLoader) } } + + // Cache of classes for lookup later. + val ServerMainClass = classOf[xsbti.ServerMain] + val AppMainClass = classOf[xsbti.AppMain] } final class RunConfiguration(val scalaVersion: Option[String], val app: xsbti.ApplicationID, val workingDirectory: File, val arguments: List[String]) @@ -152,28 +200,34 @@ class Launch private[xsbt](val bootDirectory: File, val lockBoot: Boolean, val i @tailrec private[this] final def getAppProvider0(id: xsbti.ApplicationID, explicitScalaVersion: Option[String], forceAppUpdate: Boolean): xsbti.AppProvider = { val app = appModule(id, explicitScalaVersion, true, "app") - val baseDirs = (base: File) => appBaseDirs(base, id) + /** Replace the version of an ApplicationID with the given one, if set. */ + def resolveId(appVersion: Option[String], id: xsbti.ApplicationID) = appVersion map { v => + import id._ + AppID(groupID(), name(), v, mainClass(), mainComponents(), crossVersionedValue(), classpathExtra()) + } getOrElse id + val baseDirs = (resolvedVersion: Option[String]) => (base: File) => appBaseDirs(base, resolveId(resolvedVersion, id)) def retrieve() = { - val sv = update(app, "") + val (appv, sv) = update(app, "") val scalaVersion = strictOr(explicitScalaVersion, sv) - new RetrievedModule(true, app, sv, baseDirs(scalaHome(ScalaOrg, scalaVersion))) + new RetrievedModule(true, app, sv, appv, baseDirs(appv)(scalaHome(ScalaOrg, scalaVersion))) } val retrievedApp = if(forceAppUpdate) retrieve() else - existing(app, ScalaOrg, explicitScalaVersion, baseDirs) getOrElse retrieve() + existing(app, ScalaOrg, explicitScalaVersion, baseDirs(None)) getOrElse retrieve() val scalaVersion = getOrError(strictOr(explicitScalaVersion, retrievedApp.detectedScalaVersion), "No Scala version specified or detected") val scalaProvider = getScala(scalaVersion, "(for " + id.name + ")") + val resolvedId = resolveId(retrievedApp.resolvedAppVersion, id) - val (missing, appProvider) = checkedAppProvider(id, retrievedApp, scalaProvider) + val (missing, appProvider) = checkedAppProvider(resolvedId, retrievedApp, scalaProvider) if(missing.isEmpty) appProvider else if(retrievedApp.fresh) app.retrieveCorrupt(missing) else - getAppProvider0(id, explicitScalaVersion, true) + getAppProvider0(resolvedId, explicitScalaVersion, true) } def scalaHome(scalaOrg: String, scalaVersion: Option[String]): File = new File(bootDirectory, baseDirectoryName(scalaOrg, scalaVersion)) def appHome(id: xsbti.ApplicationID, scalaVersion: Option[String]): File = appDirectory(scalaHome(ScalaOrg, scalaVersion), id) @@ -200,7 +254,7 @@ class Launch private[xsbt](val bootDirectory: File, val lockBoot: Boolean, val i try Some(provider(mod)) catch { case e: Exception => None } } getOrElse { - val scalaVersion = update(scalaM, reason) + val (_, scalaVersion) = update(scalaM, reason) provider( new RetrievedModule(true, scalaM, scalaVersion, baseDirs) ) } } @@ -240,27 +294,32 @@ class Launch private[xsbt](val bootDirectory: File, val lockBoot: Boolean, val i (scalaHome, libDirectory) } - def appProvider(appID: xsbti.ApplicationID, app: RetrievedModule, scalaProvider0: xsbti.ScalaProvider, appHome: File): xsbti.AppProvider = new xsbti.AppProvider - { + def appProvider(appID: xsbti.ApplicationID, app: RetrievedModule, scalaProvider0: xsbti.ScalaProvider, appHome: File): xsbti.AppProvider = + new xsbti.AppProvider { + import Launch.{ServerMainClass,AppMainClass} val scalaProvider = scalaProvider0 val id = appID def mainClasspath = app.fullClasspath lazy val loader = app.createLoader(scalaProvider.loader) + // TODO - For some reason we can't call this from vanilla scala. We get a + // no such method exception UNLESS we're in the same project. lazy val entryPoint: Class[T] forSome { type T } = { val c = Class.forName(id.mainClass, true, loader) if(classOf[xsbti.AppMain].isAssignableFrom(c)) c else if(PlainApplication.isPlainApplication(c)) c - else sys.error(s"Class: ${c} is not an instance of xsbti.AppMain nor does it have one of these static methods:\n"+ - " * void main(String[] args)\n * int main(String[] args)\n * xsbti.Exit main(String[] args)") + else if(ServerApplication.isServerApplication(c)) c + else sys.error(s"${c} is not an instance of xsbti.AppMain, xsbti.ServerMain nor does it have one of these static methods:\n"+ + " * void main(String[] args)\n * int main(String[] args)\n * xsbti.Exit main(String[] args)\n") } // Deprecated API. Remove when we can. - def mainClass: Class[T] forSome { type T <: xsbti.AppMain } = entryPoint.asSubclass(classOf[xsbti.AppMain]) + def mainClass: Class[T] forSome { type T <: xsbti.AppMain } = entryPoint.asSubclass(AppMainClass) def newMain(): xsbti.AppMain = { - if(PlainApplication.isPlainApplication(entryPoint)) PlainApplication(entryPoint) - else mainClass.newInstance + if(ServerApplication.isServerApplication(entryPoint)) ServerApplication(this) + else if(PlainApplication.isPlainApplication(entryPoint)) PlainApplication(entryPoint) + else if(AppMainClass.isAssignableFrom(entryPoint)) mainClass.newInstance + else throw new IncompatibleClassChangeError(s"Main class ${entryPoint.getName} is not an instance of xsbti.AppMain, xsbti.ServerMain nor does it have a valid `main` method.") } - lazy val components = componentProvider(appHome) } def componentProvider(appHome: File) = new ComponentProvider(appHome, lockBoot) @@ -290,10 +349,11 @@ class Launch private[xsbt](val bootDirectory: File, val lockBoot: Boolean, val i failLabel = "Scala " + version, extraClasspath = array() ) - def update(mm: ModuleDefinition, reason: String): Option[String] = + /** Returns the resolved appVersion (if this was an App), as well as the scalaVersion. */ + def update(mm: ModuleDefinition, reason: String): (Option[String], Option[String]) = { val result = ( new Update(mm.configuration) )(mm.target, reason) - if(result.success) result.scalaVersion else mm.retrieveFailed + if(result.success) result.appVersion -> result.scalaVersion else mm.retrieveFailed } } object Launcher diff --git a/launch/src/main/scala/xsbt/boot/LaunchConfiguration.scala b/launch/src/main/scala/xsbt/boot/LaunchConfiguration.scala index f8ccd1782..be1f0fc4a 100644 --- a/launch/src/main/scala/xsbt/boot/LaunchConfiguration.scala +++ b/launch/src/main/scala/xsbt/boot/LaunchConfiguration.scala @@ -9,27 +9,46 @@ import java.net.URL import scala.collection.immutable.List //TODO: use copy constructor, check size change -final case class LaunchConfiguration(scalaVersion: Value[String], ivyConfiguration: IvyOptions, app: Application, boot: BootSetup, logging: Logging, appProperties: List[AppProperty]) +final case class LaunchConfiguration(scalaVersion: Value[String], ivyConfiguration: IvyOptions, app: Application, boot: BootSetup, logging: Logging, appProperties: List[AppProperty], serverConfig: Option[ServerConfiguration]) { + def isServer: Boolean = serverConfig.isDefined def getScalaVersion = { val sv = Value.get(scalaVersion) if(sv == "auto") None else Some(sv) } - def withScalaVersion(newScalaVersion: String) = LaunchConfiguration(new Explicit(newScalaVersion), ivyConfiguration, app, boot, logging, appProperties) - def withApp(app: Application) = LaunchConfiguration(scalaVersion, ivyConfiguration, app, boot, logging, appProperties) - def withAppVersion(newAppVersion: String) = LaunchConfiguration(scalaVersion, ivyConfiguration, app.withVersion(new Explicit(newAppVersion)), boot, logging, appProperties) + def withScalaVersion(newScalaVersion: String) = LaunchConfiguration(new Explicit(newScalaVersion), ivyConfiguration, app, boot, logging, appProperties, serverConfig) + def withApp(app: Application) = LaunchConfiguration(scalaVersion, ivyConfiguration, app, boot, logging, appProperties, serverConfig) + def withAppVersion(newAppVersion: String) = LaunchConfiguration(scalaVersion, ivyConfiguration, app.withVersion(new Explicit(newAppVersion)), boot, logging, appProperties, serverConfig) // TODO: withExplicit def withVersions(newScalaVersion: String, newAppVersion: String, classifiers0: Classifiers) = - LaunchConfiguration(new Explicit(newScalaVersion), ivyConfiguration.copy(classifiers = classifiers0), app.withVersion(new Explicit(newAppVersion)), boot, logging, appProperties) + LaunchConfiguration(new Explicit(newScalaVersion), ivyConfiguration.copy(classifiers = classifiers0), app.withVersion(new Explicit(newAppVersion)), boot, logging, appProperties, serverConfig) - def map(f: File => File) = LaunchConfiguration(scalaVersion, ivyConfiguration.map(f), app.map(f), boot.map(f), logging, appProperties) + def map(f: File => File) = LaunchConfiguration(scalaVersion, ivyConfiguration.map(f), app.map(f), boot.map(f), logging, appProperties, serverConfig.map(_ map f)) +} +object LaunchConfiguration { + // Saves a launch configuration into a file. This is only safe if it is loaded by the *same* launcher version. + def save(config: LaunchConfiguration, f: File): Unit = { + val out = new java.io.ObjectOutputStream(new java.io.FileOutputStream(f)) + try out.writeObject(config) + finally out.close() + } + // Restores a launch configuration from a file. This is only safe if it is loaded by the *same* launcher version. + def restore(url: URL): LaunchConfiguration = { + val in = new java.io.ObjectInputStream(url.openConnection.getInputStream) + try in.readObject.asInstanceOf[LaunchConfiguration] + finally in.close() + } +} +final case class ServerConfiguration(lockFile: File, jvmArgs: Option[File], jvmPropsFile: Option[File]) { + def map(f: File => File) = + ServerConfiguration(f(lockFile), jvmArgs map f, jvmPropsFile map f) } final case class IvyOptions(ivyHome: Option[File], classifiers: Classifiers, repositories: List[Repository.Repository], checksums: List[String], isOverrideRepositories: Boolean) { def map(f: File => File) = IvyOptions(ivyHome.map(f), classifiers, repositories, checksums, isOverrideRepositories) } -sealed trait Value[T] +sealed trait Value[T] extends Serializable final class Explicit[T](val value: T) extends Value[T] { override def toString = value.toString } @@ -130,7 +149,7 @@ sealed trait PropertyInit final class SetProperty(val value: String) extends PropertyInit final class PromptProperty(val label: String, val default: Option[String]) extends PropertyInit -final class Logging(level: LogLevel.Value) +final class Logging(level: LogLevel.Value) extends Serializable { def log(s: => String, at: LogLevel.Value) = if(level.id <= at.id) stream(at).println("[" + at + "] " + s) def debug(s: => String) = log(s, LogLevel.Debug) diff --git a/launch/src/main/scala/xsbt/boot/ModuleDefinition.scala b/launch/src/main/scala/xsbt/boot/ModuleDefinition.scala index c5903d415..800247743 100644 --- a/launch/src/main/scala/xsbt/boot/ModuleDefinition.scala +++ b/launch/src/main/scala/xsbt/boot/ModuleDefinition.scala @@ -13,8 +13,12 @@ final class ModuleDefinition(val configuration: UpdateConfiguration, val extraCl private def versionString: String = target match { case _: UpdateScala => configuration.getScalaVersion; case a: UpdateApp => Value.get(a.id.version) } } -final class RetrievedModule(val fresh: Boolean, val definition: ModuleDefinition, val detectedScalaVersion: Option[String], val baseDirectories: List[File]) +final class RetrievedModule(val fresh: Boolean, val definition: ModuleDefinition, val detectedScalaVersion: Option[String], val resolvedAppVersion: Option[String], val baseDirectories: List[File]) { + /** Use this constructor only when the module exists already, or when its version is not dynamic (so its resolved version would be the same) */ + def this(fresh: Boolean, definition: ModuleDefinition, detectedScalaVersion: Option[String], baseDirectories: List[File]) = + this(fresh, definition, detectedScalaVersion, None, baseDirectories) + lazy val classpath: Array[File] = getJars(baseDirectories) lazy val fullClasspath: Array[File] = concat(classpath, definition.extraClasspath) diff --git a/launch/src/main/scala/xsbt/boot/Pre.scala b/launch/src/main/scala/xsbt/boot/Pre.scala index 05a9585d1..26b83aee9 100644 --- a/launch/src/main/scala/xsbt/boot/Pre.scala +++ b/launch/src/main/scala/xsbt/boot/Pre.scala @@ -70,6 +70,10 @@ object Pre classes.toList.filter(classMissing) } def toURLs(files: Array[File]): Array[URL] = files.map(_.toURI.toURL) + def toFile(url: URL): File = + try { new File(url.toURI) } + catch { case _: java.net.URISyntaxException => new File(url.getPath) } + def delete(f: File) { @@ -82,4 +86,25 @@ object Pre } final val isWindows: Boolean = System.getProperty("os.name").toLowerCase(Locale.ENGLISH).contains("windows") final val isCygwin: Boolean = isWindows && java.lang.Boolean.getBoolean("sbt.cygwin") + + import java.util.Properties + import java.io.{FileInputStream,FileOutputStream} + private[boot] def readProperties(propertiesFile: File) = + { + val properties = new Properties + if(propertiesFile.exists) + Using( new FileInputStream(propertiesFile) )( properties.load ) + properties + } + private[boot] def writeProperties(properties: Properties, file: File, msg: String): Unit = { + file.getParentFile.mkdirs() + Using(new FileOutputStream(file))( out => properties.store(out, msg) ) + } + private[boot] def setSystemProperties(properties: Properties): Unit = { + val nameItr = properties.stringPropertyNames.iterator + while(nameItr.hasNext) { + val propName = nameItr.next + System.setProperty(propName, properties.getProperty(propName)) + } + } } diff --git a/launch/src/main/scala/xsbt/boot/ResolveValues.scala b/launch/src/main/scala/xsbt/boot/ResolveValues.scala index b04cdb949..952d9d970 100644 --- a/launch/src/main/scala/xsbt/boot/ResolveValues.scala +++ b/launch/src/main/scala/xsbt/boot/ResolveValues.scala @@ -12,16 +12,9 @@ object ResolveValues def apply(conf: LaunchConfiguration): LaunchConfiguration = (new ResolveValues(conf))() private def trim(s: String) = if(s eq null) None else notEmpty(s.trim) private def notEmpty(s: String) = if(isEmpty(s)) None else Some(s) - private[boot] def readProperties(propertiesFile: File) = - { - val properties = new Properties - if(propertiesFile.exists) - Using( new FileInputStream(propertiesFile) )( properties.load ) - properties - } } -import ResolveValues.{readProperties, trim} +import ResolveValues.{trim} final class ResolveValues(conf: LaunchConfiguration) { private def propertiesFile = conf.boot.properties diff --git a/launch/src/main/scala/xsbt/boot/ServerApplication.scala b/launch/src/main/scala/xsbt/boot/ServerApplication.scala new file mode 100644 index 000000000..3f592b151 --- /dev/null +++ b/launch/src/main/scala/xsbt/boot/ServerApplication.scala @@ -0,0 +1,200 @@ +package xsbt +package boot + +import java.io.File +import scala.util.control.NonFatal +import java.net.URI +import java.io.IOException +import Pre._ +import scala.annotation.tailrec + +/** A wrapper around 'raw' static methods to meet the sbt application interface. */ +class ServerApplication private (provider: xsbti.AppProvider) extends xsbti.AppMain { + import ServerApplication._ + + override def run(configuration: xsbti.AppConfiguration): xsbti.MainResult = { + val serverMain = provider.entryPoint.asSubclass(ServerMainClass).newInstance + val server = serverMain.start(configuration) + System.out.println(s"${SERVER_SYNCH_TEXT}${server.uri}") + server.awaitTermination() + } +} +/** An object that lets us detect compatible "plain" applications and launch them reflectively. */ +object ServerApplication { + val SERVER_SYNCH_TEXT = "[SERVER-URI]" + val ServerMainClass = classOf[xsbti.ServerMain] + // TODO - We should also adapt friendly static methods into servers, perhaps... + // We could even structurally type things that have a uri + awaitTermination method... + def isServerApplication(clazz: Class[_]): Boolean = + ServerMainClass.isAssignableFrom(clazz) + def apply(provider: xsbti.AppProvider): xsbti.AppMain = + new ServerApplication(provider) + +} +object ServerLocator { + // TODO - Probably want to drop this to reduce classfile size + private def locked[U](file: File)(f: => U): U = { + Locks(file, new java.util.concurrent.Callable[U] { + def call(): U = f + }) + } + // We use the lock file they give us to write the server info. However, + // it seems we cannot both use the server info file for locking *and* + // read from it successfully. Locking seems to blank the file. SO, we create + // another file near the info file to lock.a + def makeLockFile(f: File): File = + new File(f.getParentFile, s"${f.getName}.lock") + // Launch the process and read the port... + def locate(currentDirectory: File, config: LaunchConfiguration): URI = + config.serverConfig match { + case None => sys.error("No server lock file configured. Cannot locate server.") + case Some(sc) => locked(makeLockFile(sc.lockFile)) { + readProperties(sc.lockFile) match { + case Some(uri) if isReachable(uri) => uri + case _ => + val uri = ServerLauncher.startServer(currentDirectory, config) + writeProperties(sc.lockFile, uri) + uri + } + } + } + + private val SERVER_URI_PROPERTY = "server.uri" + def readProperties(f: File): Option[java.net.URI] = { + try { + val props = Pre.readProperties(f) + props.getProperty(SERVER_URI_PROPERTY) match { + case null => None + case uri => Some(new java.net.URI(uri)) + } + } catch { + case e: IOException => None + } + } + def writeProperties(f: File, uri: URI): Unit = { + val props = new java.util.Properties + props.setProperty(SERVER_URI_PROPERTY, uri.toASCIIString) + val output = new java.io.FileOutputStream(f) + val df = new java.text.SimpleDateFormat("yyyy-MM-dd'T'HH:mmZ") + df.setTimeZone(java.util.TimeZone.getTimeZone("UTC")) + Pre.writeProperties(props, f, s"Server Startup at ${df.format(new java.util.Date)}") + } + + def isReachable(uri: java.net.URI): Boolean = + try { + // TODO - For now we assume if we can connect, it means + // that the server is working... + val socket = new java.net.Socket(uri.getHost, uri.getPort) + try socket.isConnected + finally socket.close() + } catch { + case e: IOException => false + } +} +/** A helper class that dumps incoming values into a print stream. */ +class StreamDumper(in: java.io.BufferedReader, out: java.io.PrintStream) extends Thread { + // Don't block the application for this thread. + setDaemon(true) + private val running = new java.util.concurrent.atomic.AtomicBoolean(true) + override def run(): Unit = { + def read(): Unit = if(running.get) in.readLine match { + case null => () + case line => + out.println(line) + read() + } + read() + out.close() + } + + def close(): Unit = running.set(false) +} +object ServerLauncher { + import ServerApplication.SERVER_SYNCH_TEXT + def startServer(currentDirectory: File, config: LaunchConfiguration): URI = { + val serverConfig = config.serverConfig match { + case Some(c) => c + case None => throw new RuntimeException("Logic Failure: Attempting to start a server that isn't configured to be a server. Please report a bug.") + } + val launchConfig = java.io.File.createTempFile("sbtlaunch", "config") + launchConfig.deleteOnExit() + LaunchConfiguration.save(config, launchConfig) + val jvmArgs: List[String] = serverConfig.jvmArgs map readLines match { + case Some(args) => args + case None => Nil + } + val cmd: List[String] = + ("java" :: jvmArgs) ++ + ("-jar" :: defaultLauncherLookup.getCanonicalPath :: s"@load:${launchConfig.toURI.toURL.toString}" :: Nil) + launchProcessAndGetUri(cmd, currentDirectory) + } + + // Here we try to isolate all the stupidity of dealing with Java processes. + def launchProcessAndGetUri(cmd: List[String], cwd: File): URI = { + // TODO - Handle windows path stupidity in arguments. + val pb = new java.lang.ProcessBuilder() + pb.command(cmd:_*) + pb.directory(cwd) + val process = pb.start() + // First we need to grab all the input streams, and close the ones we don't care about. + process.getOutputStream.close() + val stderr = process.getErrorStream + val stdout = process.getInputStream + // Now we start dumping out errors. + val errorDumper = new StreamDumper(new java.io.BufferedReader(new java.io.InputStreamReader(stderr)), System.err) + errorDumper.start() + // Now we look for the URI synch value, and then make sure we close the output files. + try readUntilSynch(new java.io.BufferedReader(new java.io.InputStreamReader(stdout))) match { + case Some(uri) => uri + case _ => sys.error("Failed to start server!") + } finally { + errorDumper.close() + stdout.close() + stderr.close() + } + } + + object ServerUriLine { + def unapply(in: String): Option[URI] = + if(in startsWith SERVER_SYNCH_TEXT) { + Some(new URI(in.substring(SERVER_SYNCH_TEXT.size))) + } else None + } + /** Reads an input steam until it hits the server synch text and server URI. */ + def readUntilSynch(in: java.io.BufferedReader): Option[URI] = { + @tailrec + def read(): Option[URI] = in.readLine match { + case null => None + case ServerUriLine(uri) => Some(uri) + case line => read() + } + try read() + finally in.close() + } + /** Reads all the lines in a file. If it doesn't exist, returns an empty list. Forces UTF-8 strings. */ + def readLines(f: File): List[String] = + if(!f.exists) Nil else { + val reader = new java.io.BufferedReader(new java.io.InputStreamReader(new java.io.FileInputStream(f), "UTF-8")) + @tailrec + def read(current: List[String]): List[String] = + reader.readLine match { + case null => current.reverse + case line => read(line :: current) + } + try read(Nil) + finally reader.close() + } + + def defaultLauncherLookup: File = + try { + val classInLauncher = classOf[AppConfiguration] + val fileOpt = for { + domain <- Option(classInLauncher.getProtectionDomain) + source <- Option(domain.getCodeSource) + location = source.getLocation + } yield toFile(location) + fileOpt.getOrElse(throw new RuntimeException("Could not inspect protection domain or code source")) + } catch { + case e: Throwable => throw new RuntimeException("Unable to find sbt-launch.jar.", e) + } +} \ No newline at end of file diff --git a/launch/src/main/scala/xsbt/boot/Update.scala b/launch/src/main/scala/xsbt/boot/Update.scala index 92e1bec92..cbf0fb020 100644 --- a/launch/src/main/scala/xsbt/boot/Update.scala +++ b/launch/src/main/scala/xsbt/boot/Update.scala @@ -39,7 +39,10 @@ final class UpdateConfiguration(val bootDirectory: File, val ivyHome: Option[Fil def getScalaVersion = scalaVersion match { case Some(sv) => sv; case None => "" } } -final class UpdateResult(val success: Boolean, val scalaVersion: Option[String]) +final class UpdateResult(val success: Boolean, val scalaVersion: Option[String], val appVersion: Option[String]) { + @deprecated("0.13.2", "Please use the other constructor providing appVersion.") + def this(success: Boolean, scalaVersion: Option[String]) = this(success, scalaVersion, None) +} /** Ensures that the Scala and application jars exist for the given versions or else downloads them.*/ final class Update(config: UpdateConfiguration) @@ -55,7 +58,7 @@ final class Update(config: UpdateConfiguration) val optionProps = Option(System.getProperty("sbt.boot.credentials")) orElse Option(System.getenv("SBT_CREDENTIALS")) map ( path => - ResolveValues.readProperties(new File(path)) + Pre.readProperties(new File(path)) ) optionProps match { case Some(props) => extractCredentials("realm","host","user","password")(props) @@ -109,7 +112,7 @@ final class Update(config: UpdateConfiguration) e.printStackTrace(logWriter) log(e.toString) System.out.println(" (see " + logFile + " for complete log)") - new UpdateResult(false, None) + new UpdateResult(false, None, None) } finally { @@ -127,15 +130,16 @@ final class Update(config: UpdateConfiguration) moduleID.setLastModified(System.currentTimeMillis) moduleID.addConfiguration(new IvyConfiguration(DefaultIvyConfiguration, PUBLIC, "", new Array(0), true, null)) // add dependencies based on which target needs updating - target match + val dep = target match { case u: UpdateScala => val scalaVersion = getScalaVersion addDependency(moduleID, scalaOrg, CompilerModuleName, scalaVersion, "default;optional(default)", u.classifiers) - addDependency(moduleID, scalaOrg, LibraryModuleName, scalaVersion, "default", u.classifiers) + val ddesc = addDependency(moduleID, scalaOrg, LibraryModuleName, scalaVersion, "default", u.classifiers) excludeJUnit(moduleID) val scalaOrgString = if (scalaOrg != ScalaOrg) " " + scalaOrg else "" System.out.println("Getting" + scalaOrgString + " Scala " + scalaVersion + " " + reason + "...") + ddesc.getDependencyId case u: UpdateApp => val app = u.id val resolvedName = (app.crossVersioned, scalaVersion) match { @@ -143,24 +147,31 @@ final class Update(config: UpdateConfiguration) case (xsbti.CrossValue.Binary, Some(sv)) => app.name + "_" + CrossVersionUtil.binaryScalaVersion(sv) case _ => app.name } - addDependency(moduleID, app.groupID, resolvedName, app.getVersion, "default(compile)", u.classifiers) + val ddesc = addDependency(moduleID, app.groupID, resolvedName, app.getVersion, "default(compile)", u.classifiers) System.out.println("Getting " + app.groupID + " " + resolvedName + " " + app.getVersion + " " + reason + "...") + ddesc.getDependencyId } - update(moduleID, target) + update(moduleID, target, dep) } /** Runs the resolve and retrieve for the given moduleID, which has had its dependencies added already. */ - private def update(moduleID: DefaultModuleDescriptor, target: UpdateTarget): UpdateResult = + private def update(moduleID: DefaultModuleDescriptor, target: UpdateTarget, dep: ModuleId): UpdateResult = { val eventManager = new EventManager - val autoScalaVersion = resolve(eventManager, moduleID) + val (autoScalaVersion, depVersion) = resolve(eventManager, moduleID, dep) + // Fix up target.id with the depVersion that we know for sure is resolved (not dynamic) -- this way, `retrieve` + // will put them in the right version directory. + val target1 = (depVersion, target) match { + case (Some(dv), u: UpdateApp) => import u._; new UpdateApp(id.copy(version = new Explicit(dv)), classifiers, tpe) + case _ => target + } setScalaVariable(settings, autoScalaVersion) - retrieve(eventManager, moduleID, target, autoScalaVersion) - new UpdateResult(true, autoScalaVersion) + retrieve(eventManager, moduleID, target1, autoScalaVersion) + new UpdateResult(true, autoScalaVersion, depVersion) } private def createID(organization: String, name: String, revision: String) = ModuleRevisionId.newInstance(organization, name, revision) /** Adds the given dependency to the default configuration of 'moduleID'. */ - private def addDependency(moduleID: DefaultModuleDescriptor, organization: String, name: String, revision: String, conf: String, classifiers: List[String]) + private def addDependency(moduleID: DefaultModuleDescriptor, organization: String, name: String, revision: String, conf: String, classifiers: List[String]) = { val dep = new DefaultDependencyDescriptor(moduleID, createID(organization, name, revision), false, false, true) for(c <- conf.split(";")) @@ -168,6 +179,7 @@ final class Update(config: UpdateConfiguration) for(classifier <- classifiers) addClassifier(dep, name, classifier) moduleID.addDependency(dep) + dep } private def addClassifier(dep: DefaultDependencyDescriptor, name: String, classifier: String) { @@ -186,8 +198,9 @@ final class Update(config: UpdateConfiguration) rule.addConfiguration(DefaultIvyConfiguration) rule } - // returns the version of any Scala dependency - private def resolve(eventManager: EventManager, module: ModuleDescriptor): Option[String] = + val scalaLibraryId = ModuleId.newInstance(ScalaOrg, LibraryModuleName) + // Returns the version of the scala library, as well as `dep` (a dependency of `module`) after it's been resolved + private def resolve(eventManager: EventManager, module: ModuleDescriptor, dep: ModuleId): (Option[String], Option[String]) = { val resolveOptions = new ResolveOptions // this reduces the substantial logging done by Ivy, including the progress dots when downloading artifacts @@ -203,18 +216,18 @@ final class Update(config: UpdateConfiguration) System.out.println(seen.toArray.mkString(System.getProperty("line.separator"))) error("Error retrieving required libraries") } - scalaDependencyVersion(resolveReport).headOption + val modules = moduleRevisionIDs(resolveReport) + extractVersion(modules, scalaLibraryId) -> extractVersion(modules, dep) } - private[this] def scalaDependencyVersion(report: ResolveReport): List[String] = + private[this] def extractVersion(modules: Seq[ModuleRevisionId], dep: ModuleId): Option[String] = { - val modules = report.getConfigurations.toList flatMap { config => - report.getConfigurationReport(config).getModuleRevisionIds.toArray - } - modules flatMap { - case module: ModuleRevisionId if module.getOrganisation == ScalaOrg && module.getName == LibraryModuleName => - module.getRevision :: Nil - case _ => Nil - } + modules collectFirst { case m if m.getModuleId.equals(dep) => m.getRevision } + } + private[this] def moduleRevisionIDs(report: ResolveReport): Seq[ModuleRevisionId] = + { + import collection.JavaConverters._ + import org.apache.ivy.core.resolve.IvyNode + report.getDependencies.asInstanceOf[java.util.List[IvyNode]].asScala map (_.getResolvedId) } /** Exceptions are logged to the update log file. */ @@ -244,7 +257,8 @@ final class Update(config: UpdateConfiguration) val filter = (a: IArtifact) => retrieveType(a.getType) && a.getExtraAttribute("classifier") == null && extraFilter(a) retrieveOptions.setArtifactFilter(new ArtifactFilter(filter)) val scalaV = strictOr(scalaVersion, autoScalaVersion) - retrieveEngine.retrieve(module.getModuleRevisionId, baseDirectoryName(scalaOrg, scalaV) + "/" + pattern, retrieveOptions) + retrieveOptions.setDestArtifactPattern(baseDirectoryName(scalaOrg, scalaV) + "/" + pattern) + retrieveEngine.retrieve(module.getModuleRevisionId, retrieveOptions) } private[this] def notCoreScala(a: IArtifact) = a.getName match { case LibraryModuleName | CompilerModuleName => false diff --git a/launch/src/test/scala/ServerLocatorTest.scala b/launch/src/test/scala/ServerLocatorTest.scala new file mode 100644 index 000000000..7e0b30c36 --- /dev/null +++ b/launch/src/test/scala/ServerLocatorTest.scala @@ -0,0 +1,53 @@ +package xsbt.boot + +import java.io.{File,InputStream} +import java.net.URL +import java.util.Properties +import xsbti._ +import org.specs2._ +import mutable.Specification +import LaunchTest._ +import sbt.IO.{createDirectory, touch,withTemporaryDirectory} +import java.net.URI + +object ServerLocatorTest extends Specification +{ + "ServerLocator" should { + // TODO - Maybe use scalacheck to randomnly generate URIs + "read and write server URI properties" in { + withTemporaryDirectory { dir => + val propFile = new File(dir, "server.properties") + val expected = new java.net.URI("http://localhost:8080") + ServerLocator.writeProperties(propFile, expected) + ServerLocator.readProperties(propFile) must equalTo(Some(expected)) + } + } + "detect listening ports" in { + val serverSocket = new java.net.ServerSocket(0) + object serverThread extends Thread { + override def run(): Unit = { + // Accept one connection. + val result = serverSocket.accept() + result.close() + serverSocket.close() + } + } + serverThread.start() + val uri = new java.net.URI(s"http://${serverSocket.getInetAddress.getHostAddress}:${serverSocket.getLocalPort}") + ServerLocator.isReachable(uri) must beTrue + } + } + "ServerLauncher" should { + "detect start URI from reader" in { + val expected = new java.net.URI("http://localhost:8080") + val input = s"""|Some random text + |to start the server + |${ServerApplication.SERVER_SYNCH_TEXT}${expected.toASCIIString} + |Some more output.""".stripMargin + val inputStream = new java.io.BufferedReader(new java.io.StringReader(input)) + val result = try ServerLauncher.readUntilSynch(inputStream) + finally inputStream.close() + result must equalTo(Some(expected)) + } + } +} \ No newline at end of file diff --git a/launch/src/test/scala/URITests.scala b/launch/src/test/scala/URITests.scala index 42c2dc2bd..1fc18b949 100644 --- a/launch/src/test/scala/URITests.scala +++ b/launch/src/test/scala/URITests.scala @@ -8,14 +8,23 @@ import java.net.URI object URITests extends Properties("URI Tests") { + // Need a platform-specific root, otherwise URI will not be absolute (e.g. if we use a "/a/b/c" path in Windows) + // Note: + // If I use "C:" instead of "/C:", then isAbsolute == true for the resulting URI, but resolve is broken: + // e.g. scala> new URI("file", "c:/a/b'/has spaces", null).resolve("a") broken + // res0: java.net.URI = a + // scala> new URI("file", "/c:/a/b'/has spaces", null).resolve("a") working + // res1: java.net.URI = file:/c:/a/b'/a + val Root = if (xsbt.boot.Pre.isWindows) "/C:/" else "/" + val FileProtocol = "file" property("directoryURI adds trailing slash") = secure { - val dirURI = directoryURI(new File("/a/b/c")) - val directURI = filePathURI("/a/b/c/") + val dirURI = directoryURI(new File(Root + "a/b/c")) + val directURI = filePathURI(Root + "a/b/c/") dirURI == directURI } property("directoryURI preserves trailing slash") = secure { - directoryURI(new File("/a/b/c/")) == filePathURI("/a/b/c/") + directoryURI(new File(Root + "a/b/c/")) == filePathURI(Root + "a/b/c/") } property("filePathURI encodes spaces") = secure { @@ -33,18 +42,18 @@ object URITests extends Properties("URI Tests") } property("filePathURI and File.toURI agree for absolute file") = secure { - val s = "/a/b'/has spaces" + val s = Root + "a/b'/has spaces" val viaPath = filePathURI(s) - val viaFile = (new File(s)).toURI + val viaFile = new File(s).toURI s"via path: $viaPath" |: s"via file: $viaFile" |: (viaPath == viaFile) } property("filePathURI supports URIs") = secure { - val s = "file:///is/a/uri/with%20spaces" - val decoded = "/is/a/uri/with spaces" - val encoded = "/is/a/uri/with%20spaces" + val s = s"file://${Root}is/a/uri/with%20spaces" + val decoded = Root + "is/a/uri/with spaces" + val encoded = Root + "is/a/uri/with%20spaces" val fpURI = filePathURI(s) val directURI = new URI(s) s"filePathURI: $fpURI" |: diff --git a/launch/test-sample/src/main/scala/xsbt/boot/test/Servers.scala b/launch/test-sample/src/main/scala/xsbt/boot/test/Servers.scala new file mode 100644 index 000000000..930e565a9 --- /dev/null +++ b/launch/test-sample/src/main/scala/xsbt/boot/test/Servers.scala @@ -0,0 +1,74 @@ +/** These are packaged and published locally and the resulting artifact is used to test the launcher.*/ +package xsbt.boot.test + +import java.net.Socket +import java.net.SocketTimeoutException + +class EchoServer extends xsbti.ServerMain +{ + def start(configuration: xsbti.AppConfiguration): xsbti.Server = + { + object server extends xsbti.Server { + // TODO - Start a server. + val serverSocket = new java.net.ServerSocket(0) + val port = serverSocket.getLocalPort + val addr = serverSocket.getInetAddress.getHostAddress + override val uri =new java.net.URI(s"http://${addr}:${port}") + // Check for stop every second. + serverSocket.setSoTimeout(1000) + object serverThread extends Thread { + private val running = new java.util.concurrent.atomic.AtomicBoolean(true) + override def run(): Unit = { + while(running.get) try { + val clientSocket = serverSocket.accept() + // Handle client connections + object clientSocketThread extends Thread { + override def run(): Unit = { + echoTo(clientSocket) + } + } + clientSocketThread.start() + } catch { + case e: SocketTimeoutException => // Ignore + } + } + // Simple mechanism to dump input to output. + private def echoTo(socket: Socket): Unit = { + val input = new java.io.BufferedReader(new java.io.InputStreamReader(socket.getInputStream)) + val output = new java.io.BufferedWriter(new java.io.OutputStreamWriter(socket.getOutputStream)) + import scala.util.control.Breaks._ + try { + // Lame way to break out. + breakable { + def read(): Unit = input.readLine match { + case null => () + case "kill" => + running.set(false) + serverSocket.close() + break() + case line => + output.write(line) + output.flush() + read() + } + read() + } + } finally { + output.close() + input.close() + socket.close() + } + } + } + // Start the thread immediately + serverThread.start() + override def awaitTermination(): xsbti.MainResult = { + serverThread.join() + new Exit(0) + } + } + server + } + + +} \ No newline at end of file diff --git a/main/actions/src/main/scala/sbt/CacheIvy.scala b/main/actions/src/main/scala/sbt/CacheIvy.scala index ec823bb12..b09ce7f2b 100644 --- a/main/actions/src/main/scala/sbt/CacheIvy.scala +++ b/main/actions/src/main/scala/sbt/CacheIvy.scala @@ -3,7 +3,7 @@ */ package sbt - import Predef.{conforms => _, _} + import Predef.{Map, Set, implicitly} // excludes *both 2.10.x conforms and 2.11.x $conforms in source compatible manner. import FileInfo.{exists, hash} import java.io.File diff --git a/main/actions/src/main/scala/sbt/Compiler.scala b/main/actions/src/main/scala/sbt/Compiler.scala index 0d0a33c49..459636f8e 100644 --- a/main/actions/src/main/scala/sbt/Compiler.scala +++ b/main/actions/src/main/scala/sbt/Compiler.scala @@ -58,16 +58,21 @@ object Compiler val provider = ComponentCompiler.interfaceProvider(componentManager) new AnalyzingCompiler(instance, provider, cpOptions, log) } - - def apply(in: Inputs, log: Logger): Analysis = + def apply(in: Inputs, log: Logger): Analysis = { - import in.compilers._ - import in.config._ - import in.incSetup._ - + import in.compilers._ + import in.config._ + import in.incSetup._ + apply(in, log, new LoggerReporter(maxErrors, log, sourcePositionMapper)) + } + def apply(in: Inputs, log: Logger, reporter: xsbti.Reporter): Analysis = + { + import in.compilers._ + import in.config._ + import in.incSetup._ val agg = new AggressiveCompile(cacheFile) agg(scalac, javac, sources, classpath, CompileOutput(classesDirectory), cache, None, options, javacOptions, - analysisMap, definesClass, new LoggerReporter(maxErrors, log, sourcePositionMapper), order, skip, incOptions)(log) + analysisMap, definesClass, reporter, order, skip, incOptions)(log) } private[sbt] def foldMappers[A](mappers: Seq[A => Option[A]]) = diff --git a/main/actions/src/main/scala/sbt/Tests.scala b/main/actions/src/main/scala/sbt/Tests.scala index d2276cf24..fb7f2cdcb 100644 --- a/main/actions/src/main/scala/sbt/Tests.scala +++ b/main/actions/src/main/scala/sbt/Tests.scala @@ -220,7 +220,9 @@ object Tests def processResults(results: Iterable[(String, SuiteResult)]): Output = Output(overall(results.map(_._2.result)), results.toMap, Iterable.empty) def foldTasks(results: Seq[Task[Output]], parallel: Boolean): Task[Output] = - if (parallel) + if (results.isEmpty) + task { Output(TestResult.Passed, Map.empty, Nil) } + else if (parallel) reduced(results.toIndexedSeq, { case (Output(v1, m1, _), Output(v2, m2, _)) => Output(if (v1.id < v2.id) v2 else v1, m1 ++ m2, Iterable.empty) }) @@ -336,4 +338,4 @@ object Tests } } -final class TestsFailedException extends RuntimeException("Tests unsuccessful") with FeedbackProvidedException \ No newline at end of file +final class TestsFailedException extends RuntimeException("Tests unsuccessful") with FeedbackProvidedException diff --git a/main/actions/src/main/scala/sbt/compiler/Eval.scala b/main/actions/src/main/scala/sbt/compiler/Eval.scala index 505897ff6..1fb4f6bb4 100644 --- a/main/actions/src/main/scala/sbt/compiler/Eval.scala +++ b/main/actions/src/main/scala/sbt/compiler/Eval.scala @@ -87,7 +87,7 @@ final class Eval(optionsNoncp: Seq[String], classpath: Seq[File], mkReporter: Se val value = (cl: ClassLoader) => getValue[Any](i.enclosingModule, i.loader(cl)) new EvalResult(i.extra, value, i.generated, i.enclosingModule) } - def evalDefinitions(definitions: Seq[(String,Range)], imports: EvalImports, srcName: String, valTypes: Seq[String]): EvalDefinitions = + def evalDefinitions(definitions: Seq[(String,scala.Range)], imports: EvalImports, srcName: String, valTypes: Seq[String]): EvalDefinitions = { require(definitions.nonEmpty, "Definitions to evaluate cannot be empty.") val ev = new EvalType[Seq[String]] { @@ -349,7 +349,7 @@ final class Eval(optionsNoncp: Seq[String], classpath: Seq[File], mkReporter: Se } /** Constructs a CompilationUnit for each definition, which can be used to independently parse the definition into a Tree. * Additionally, a CompilationUnit for the combined definitions is constructed for use by combined compilation after parsing. */ - private[this] def mkDefsUnit(srcName: String, definitions: Seq[(String,Range)]): (CompilationUnit, Seq[CompilationUnit]) = + private[this] def mkDefsUnit(srcName: String, definitions: Seq[(String,scala.Range)]): (CompilationUnit, Seq[CompilationUnit]) = { def fragmentUnit(content: String, lineMap: Array[Int]) = new CompilationUnit(fragmentSourceFile(srcName, content, lineMap)) diff --git a/main/command/src/main/scala/sbt/BasicCommandStrings.scala b/main/command/src/main/scala/sbt/BasicCommandStrings.scala index 3c8a38f14..237db8d7d 100644 --- a/main/command/src/main/scala/sbt/BasicCommandStrings.scala +++ b/main/command/src/main/scala/sbt/BasicCommandStrings.scala @@ -19,7 +19,7 @@ object BasicCommandStrings /** The command name to terminate the program.*/ val TerminateAction: String = Exit - def helpBrief = (HelpCommand, "Displays this help message or prints detailed help on requested commands (run 'help ').") + def helpBrief = (HelpCommand, s"Displays this help message or prints detailed help on requested commands (run '$HelpCommand ').") def helpDetailed = HelpCommand + """ Prints a help summary. diff --git a/main/settings/src/main/scala/sbt/Structure.scala b/main/settings/src/main/scala/sbt/Structure.scala index 8c023c876..bd23f3f4d 100644 --- a/main/settings/src/main/scala/sbt/Structure.scala +++ b/main/settings/src/main/scala/sbt/Structure.scala @@ -27,7 +27,7 @@ sealed trait ScopedTaskable[T] extends Scoped { /** Identifies a setting. It consists of three parts: the scope, the name, and the type of a value associated with this key. * The scope is represented by a value of type Scope. -* The name and the type are represented by a value of type AttributeKey[T]. +* The name and the type are represented by a value of type `AttributeKey[T]`. * Instances are constructed using the companion object. */ sealed abstract class SettingKey[T] extends ScopedTaskable[T] with KeyedInitialize[T] with Scoped.ScopingSetting[SettingKey[T]] with Scoped.DefinableSetting[T] { @@ -52,7 +52,7 @@ sealed abstract class SettingKey[T] extends ScopedTaskable[T] with KeyedInitiali /** Identifies a task. It consists of three parts: the scope, the name, and the type of the value computed by a task associated with this key. * The scope is represented by a value of type Scope. -* The name and the type are represented by a value of type AttributeKey[Task[T]]. +* The name and the type are represented by a value of type `AttributeKey[Task[T]]`. * Instances are constructed using the companion object. */ sealed abstract class TaskKey[T] extends ScopedTaskable[T] with KeyedInitialize[Task[T]] with Scoped.ScopingSetting[TaskKey[T]] with Scoped.DefinableTask[T] { @@ -76,7 +76,7 @@ sealed abstract class TaskKey[T] extends ScopedTaskable[T] with KeyedInitialize[ /** Identifies an input task. An input task parses input and produces a task to run. * It consists of three parts: the scope, the name, and the type of the value produced by an input task associated with this key. * The scope is represented by a value of type Scope. -* The name and the type are represented by a value of type AttributeKey[InputTask[T]]. +* The name and the type are represented by a value of type `AttributeKey[InputTask[T]]`. * Instances are constructed using the companion object. */ sealed trait InputKey[T] extends Scoped with KeyedInitialize[InputTask[T]] with Scoped.ScopingSetting[InputKey[T]] with Scoped.DefinableSetting[InputTask[T]] { @@ -95,6 +95,21 @@ object Scoped implicit def taskScopedToKey[T](s: TaskKey[T]): ScopedKey[Task[T]] = ScopedKey(s.scope, s.key) implicit def inputScopedToKey[T](s: InputKey[T]): ScopedKey[InputTask[T]] = ScopedKey(s.scope, s.key) + /** + * Mixin trait for adding convenience vocabulary associated with specifiying the [[Scope]] of a setting. + * Allows specification of the Scope or part of the [[Scope]] of a setting being referenced. + * @example + * {{{ + * name in Global := "hello Global scope" + * + * name in (Compile, packageBin) := "hello Compile scope packageBin" + * + * name in Compile := "hello Compile scope" + + * name.in(Compile).:=("hello ugly syntax") + * }}} + * + */ sealed trait ScopingSetting[Result] { def in(s: Scope): Result @@ -113,18 +128,50 @@ object Scoped def scopedInput[T](s: Scope, k: AttributeKey[InputTask[T]]): InputKey[T] = new InputKey[T] { val scope = s; val key = k } def scopedTask[T](s: Scope, k: AttributeKey[Task[T]]): TaskKey[T] = new TaskKey[T] { val scope = s; val key = k } + /** + * Mixin trait for adding convenience vocabulary associated with applying a setting to a configuration item. + */ sealed trait DefinableSetting[S] { def scopedKey: ScopedKey[S] private[sbt] final def :==(app: S): Setting[S] = macro std.TaskMacro.settingAssignPure[S] + + /** Binds a single value to this. A new [Def.Setting] is defined using the value(s) of `app`. + * @param app value to bind to this key + * @return setting binding this key to the given value. + */ final def <<= (app: Initialize[S]): Setting[S] = macro std.TaskMacro.settingAssignPosition[S] + + /** Internally used function for setting a value along with the `.sbt` file location where it is defined. */ final def set (app: Initialize[S], source: SourcePosition): Setting[S] = setting(scopedKey, app, source) + + /** From the given [[Settings]], extract the value bound to this key. */ final def get(settings: Settings[Scope]): Option[S] = settings.get(scopedKey.scope, scopedKey.key) + + /** Creates an [[Def.Initialize]] with value [[scala.None]] if there was no previous definition of this key, + * and `[[scala.Some]](value)` if a definition exists. Useful for when you want to use the ''existence'' of + * one setting in order to define another setting. + * @return currently bound value wrapped in `Initialize[Some[T]]`, or `Initialize[None]` if unbound. */ final def ? : Initialize[Option[S]] = Def.optional(scopedKey)(idFun) + + /** Creates an [[Def.Initialize]] with value bound to this key, or returns `i` parameter if unbound. + * @param i value to return if this setting doesn't have a value. + * @return currently bound setting value, or `i` if unbound. + */ final def or[T >: S](i: Initialize[T]): Initialize[T] = (this.?, i)(_ getOrElse _ ) + + /** Like [[?]], but with a call-by-name parameter rather than an existing [[Def.Initialize]]. + * Useful when you want to have a value computed when no value is bound to this key. + * @param or by-name expression evaluated when a value is needed. + * @return currently bound setting value, or the result of `or` if unbound. + */ final def ??[T >: S](or: => T): Initialize[T] = Def.optional(scopedKey)(_ getOrElse or ) } + + /** + * Wraps an [[sbt.Def.Initialize]] instance to provide `map` and `flatMap` symantics. + */ final class RichInitialize[S](init: Initialize[S]) { def map[T](f: S => T): Initialize[Task[T]] = init(s => mktask(f(s)) ) diff --git a/main/settings/src/main/scala/sbt/std/InputWrapper.scala b/main/settings/src/main/scala/sbt/std/InputWrapper.scala index 400dbc8f8..8d8fe6182 100644 --- a/main/settings/src/main/scala/sbt/std/InputWrapper.scala +++ b/main/settings/src/main/scala/sbt/std/InputWrapper.scala @@ -59,6 +59,9 @@ object InputWrapper private[std] def wrapPrevious[T: c.WeakTypeTag](c: Context)(ts: c.Expr[Any], pos: c.Position): c.Expr[Option[T]] = wrapImpl[Option[T],InputWrapper.type](c, InputWrapper, WrapPreviousName)(ts, pos) + // TODO 2.11 Remove this after dropping 2.10.x support. + private object HasCompat { val compat = ??? }; import HasCompat._ + /** Wraps an arbitrary Tree in a call to the `.` method of this module for later processing by an enclosing macro. * The resulting Tree is the manually constructed version of: * @@ -67,6 +70,7 @@ object InputWrapper def wrapImpl[T: c.WeakTypeTag, S <: AnyRef with Singleton](c: Context, s: S, wrapName: String)(ts: c.Expr[Any], pos: c.Position)(implicit it: c.TypeTag[s.type]): c.Expr[T] = { import c.universe.{Apply=>ApplyTree,_} + import compat._ val util = new ContextUtil[c.type](c) val iw = util.singleton(s) val tpe = c.weakTypeOf[T] @@ -75,8 +79,16 @@ object InputWrapper sel.setPos(pos) // need to set the position on Select, because that is where the compileTimeOnly check looks val tree = ApplyTree(TypeApply(sel, TypeTree(tpe) :: Nil), ts.tree :: Nil) tree.setPos(ts.tree.pos) - tree.setType(tpe) - c.Expr[T](tree) + // JZ: I'm not sure why we need to do this. Presumably a caller is wrapping this tree in a + // typed tree *before* handing the whole thing back to the macro engine. One must never splice + // untyped trees under typed trees, as the type checker doesn't descend if `tree.tpe == null`. + // + // #1031 The previous attempt to fix this just set the type on `tree`, which worked in cases when the + // call to `.value` was inside a the task macro and eliminated before the end of the typer phase. + // But, if a "naked" call to `.value` left the typer, the superaccessors phase would freak out when + // if hit the untyped trees, before we could get to refchecks and the desired @compileTimeOnly warning. + val typedTree = c.typeCheck(tree) + c.Expr[T](typedTree) } def valueMacroImpl[T: c.WeakTypeTag](c: Context): c.Expr[T] = diff --git a/main/settings/src/main/scala/sbt/std/TaskMacro.scala b/main/settings/src/main/scala/sbt/std/TaskMacro.scala index b4789247c..a15f8f5d6 100644 --- a/main/settings/src/main/scala/sbt/std/TaskMacro.scala +++ b/main/settings/src/main/scala/sbt/std/TaskMacro.scala @@ -281,9 +281,13 @@ object TaskMacro private[this] def iTaskMacro[T: c.WeakTypeTag](c: Context)(t: c.Expr[T]): c.Expr[Task[T]] = Instance.contImpl[T,Id](c, TaskInstance, TaskConvert, MixedBuilder)(Left(t), Instance.idTransform) + // TODO 2.11 Remove this after dropping 2.10.x support. + private object HasCompat { val compat = ??? }; import HasCompat._ + private[this] def inputTaskDynMacro0[T: c.WeakTypeTag](c: Context)(t: c.Expr[Initialize[Task[T]]]): c.Expr[Initialize[InputTask[T]]] = { import c.universe.{Apply=>ApplyTree,_} + import compat._ val tag = implicitly[c.WeakTypeTag[T]] val util = ContextUtil[c.type](c) diff --git a/main/src/main/scala/sbt/AddSettings.scala b/main/src/main/scala/sbt/AddSettings.scala index 2d698b874..9d1a2ac80 100644 --- a/main/src/main/scala/sbt/AddSettings.scala +++ b/main/src/main/scala/sbt/AddSettings.scala @@ -12,11 +12,27 @@ object AddSettings private[sbt] final class Sequence(val sequence: Seq[AddSettings]) extends AddSettings private[sbt] final object User extends AddSettings private[sbt] final class Plugins(val include: Plugin => Boolean) extends AddSettings + private[sbt] final class AutoPlugins(val include: AutoPlugin => Boolean) extends AddSettings private[sbt] final class DefaultSbtFiles(val include: File => Boolean) extends AddSettings private[sbt] final class SbtFiles(val files: Seq[File]) extends AddSettings + private[sbt] final object BuildScalaFiles extends AddSettings + + /** Adds all settings from autoplugins. */ + val autoPlugins: AddSettings = new AutoPlugins(const(true)) // Note: We do not expose fine-grained autoplugins because + // it's dangerous to control at that level right now. + // Leaving the hook in place in case we need to expose + // it, but most likely it will remain locked out + // for users with an alternative ordering feature + // in place. + + /** Settings specified in Build.scala `Project` constructors. */ + val buildScalaFiles: AddSettings = BuildScalaFiles + + /** All plugins that aren't auto plugins. */ + val nonAutoPlugins: AddSettings = plugins(const(true)) /** Adds all settings from a plugin to a project. */ - val allPlugins: AddSettings = plugins(const(true)) + val allPlugins: AddSettings = seq(autoPlugins, nonAutoPlugins) /** Allows the plugins whose names match the `names` filter to automatically add settings to a project. */ def plugins(include: Plugin => Boolean): AddSettings = new Plugins(include) @@ -33,7 +49,8 @@ object AddSettings /** Includes settings automatically*/ def seq(autos: AddSettings*): AddSettings = new Sequence(autos) - val allDefaults: AddSettings = seq(userSettings, allPlugins, defaultSbtFiles) + /** The default inclusion of settings. */ + val allDefaults: AddSettings = seq(autoPlugins, buildScalaFiles, userSettings, nonAutoPlugins, defaultSbtFiles) /** Combines two automatic setting configurations. */ def append(a: AddSettings, b: AddSettings): AddSettings = (a,b) match { diff --git a/main/src/main/scala/sbt/Aggregation.scala b/main/src/main/scala/sbt/Aggregation.scala index 81a9d5494..57b7c8c4f 100644 --- a/main/src/main/scala/sbt/Aggregation.scala +++ b/main/src/main/scala/sbt/Aggregation.scala @@ -59,7 +59,7 @@ final object Aggregation import extracted.structure val toRun = ts map { case KeyValue(k,t) => t.map(v => KeyValue(k,v)) } join; val roots = ts map { case KeyValue(k,_) => k } - val config = extractedConfig(extracted, structure) + val config = extractedConfig(extracted, structure, s) val start = System.currentTimeMillis val (newS, result) = withStreams(structure, s){ str => @@ -211,4 +211,4 @@ final object Aggregation @deprecated("Use BuildUtil.aggregationRelation", "0.13.0") def relation(units: Map[URI, LoadedBuildUnit]): Relation[ProjectRef, ProjectRef] = BuildUtil.aggregationRelation(units) -} \ No newline at end of file +} diff --git a/main/src/main/scala/sbt/Build.scala b/main/src/main/scala/sbt/Build.scala index 0501f9259..030e54dfb 100644 --- a/main/src/main/scala/sbt/Build.scala +++ b/main/src/main/scala/sbt/Build.scala @@ -12,12 +12,14 @@ trait Build { def projectDefinitions(baseDirectory: File): Seq[Project] = projects def projects: Seq[Project] = ReflectUtilities.allVals[Project](this).values.toSeq + // TODO: Should we grab the build core setting shere or in a plugin? def settings: Seq[Setting[_]] = Defaults.buildCore def buildLoaders: Seq[BuildLoader.Components] = Nil /** Explicitly defines the root project. * If None, the root project is the first project in the build's root directory or just the first project if none are in the root directory.*/ def rootProject: Option[Project] = None } +// TODO 0.14.0: decide if Plugin should be deprecated in favor of AutoPlugin trait Plugin { @deprecated("Override projectSettings or buildSettings instead.", "0.12.0") @@ -45,8 +47,16 @@ object Build @deprecated("Explicitly specify the ID", "0.13.0") def defaultProject(base: File): Project = defaultProject(defaultID(base), base) def defaultProject(id: String, base: File): Project = Project(id, base).settings( + // TODO - Can we move this somewhere else? ordering of settings is causing this to get borked. // if the user has overridden the name, use the normal organization that is derived from the name. - organization <<= (thisProject, organization, name) { (p, o, n) => if(p.id == n) "default" else o } + organization := { + val overridden = thisProject.value.id == name.value + organization.?.value match { + case Some(o) if !overridden => o + case _ => "default" + } + //(thisProject, organization, name) { (p, o, n) => if(p.id == n) "default" else o } + } ) def defaultAggregatedProject(id: String, base: File, agg: Seq[ProjectRef]): Project = defaultProject(id, base).aggregate(agg : _*) diff --git a/main/src/main/scala/sbt/BuildStructure.scala b/main/src/main/scala/sbt/BuildStructure.scala index 9db86a90f..be6bad658 100644 --- a/main/src/main/scala/sbt/BuildStructure.scala +++ b/main/src/main/scala/sbt/BuildStructure.scala @@ -30,23 +30,116 @@ final class StructureIndex( val keyIndex: KeyIndex, val aggregateKeyIndex: KeyIndex ) + +/** A resolved build unit. (`ResolvedBuildUnit` would be a better name to distinguish it from the loaded, but unresolved `BuildUnit`.) +* @param unit The loaded, but unresolved [[BuildUnit]] this was resolved from. +* @param defined The definitive map from project IDs to resolved projects. +* These projects have had [[Reference]]s resolved and [[AutoPlugin]]s evaluated. +* @param rootProjects The list of project IDs for the projects considered roots of this build. +* The first root project is used as the default in several situations where a project is not otherwise selected. +*/ final class LoadedBuildUnit(val unit: BuildUnit, val defined: Map[String, ResolvedProject], val rootProjects: Seq[String], val buildSettings: Seq[Setting[_]]) extends BuildUnitBase { assert(!rootProjects.isEmpty, "No root projects defined for build unit " + unit) + /** The project to use as the default when one is not otherwise selected. + * [[LocalRootProject]] resolves to this from within the same build.*/ val root = rootProjects.head + + /** The base directory of the build unit (not the build definition).*/ def localBase = unit.localBase + + /** The classpath to use when compiling against this build unit's publicly visible code. + * It includes build definition and plugin classes, but not classes for .sbt file statements and expressions. */ def classpath: Seq[File] = unit.definitions.target ++ unit.plugins.classpath + + /** The class loader to use for this build unit's publicly visible code. + * It includes build definition and plugin classes, but not classes for .sbt file statements and expressions. */ def loader = unit.definitions.loader + + /** The imports to use for .sbt files, `consoleProject` and other contexts that use code from the build definition. */ def imports = BuildUtil.getImports(unit) override def toString = unit.toString } +// TODO: figure out how to deprecate and drop buildNames +/** The built and loaded build definition, including loaded but unresolved [[Project]]s, for a build unit (for a single URI). +* +* @param base The base directory of the build definition, typically `/project/`. +* @param loader The ClassLoader containing all classes and plugins for the build definition project. +* Note that this does not include classes for .sbt files. +* @param builds The list of [[Build]]s for the build unit. +* In addition to auto-discovered [[Build]]s, this includes any auto-generated default [[Build]]s. +* @param projects The list of all [[Project]]s from all [[Build]]s. +* These projects have not yet been resolved, but they have had auto-plugins applied. +* In particular, each [[Project]]'s `autoPlugins` field is populated according to their configured `plugins` +* and their `settings` and `configurations` updated as appropriate. +* @param buildNames No longer used and will be deprecated once feasible. +*/ final class LoadedDefinitions(val base: File, val target: Seq[File], val loader: ClassLoader, val builds: Seq[Build], val projects: Seq[Project], val buildNames: Seq[String]) -final class LoadedPlugins(val base: File, val pluginData: PluginData, val loader: ClassLoader, val plugins: Seq[Plugin], val pluginNames: Seq[String]) + +/** Auto-detected top-level modules (as in `object X`) of type `T` paired with their source names. */ +final class DetectedModules[T](val modules: Seq[(String, T)]) { + /** The source names of the modules. This is "X" in `object X`, as opposed to the implementation class name "X$". + * The names are returned in a stable order such that `names zip values` pairs a name with the actual module. */ + def names: Seq[String] = modules.map(_._1) + + /** The singleton value of the module. + * The values are returned in a stable order such that `names zip values` pairs a name with the actual module. */ + def values: Seq[T] = modules.map(_._2) +} + +/** Auto-detected auto plugin. */ +case class DetectedAutoPlugin(val name: String, val value: AutoPlugin, val hasAutoImport: Boolean) + +/** Auto-discovered modules for the build definition project. These include modules defined in build definition sources +* as well as modules in binary dependencies. +* +* @param builds The [[Build]]s detected in the build definition. This does not include the default [[Build]] that sbt creates if none is defined. +*/ +final class DetectedPlugins(val plugins: DetectedModules[Plugin], val autoPlugins: Seq[DetectedAutoPlugin], val builds: DetectedModules[Build]) +{ + /** Sequence of import expressions for the build definition. This includes the names of the [[Plugin]], [[Build]], and [[AutoImport]] modules, but not the [[AutoPlugin]] modules. */ + lazy val imports: Seq[String] = BuildUtil.getImports(plugins.names ++ builds.names ++ + (autoPlugins flatMap { case DetectedAutoPlugin(name, ap, hasAutoImport) => + if (hasAutoImport) Some(name + ".autoImport") + else None + })) ++ + BuildUtil.importNamesRoot(autoPlugins map { _.name }) + + /** A function to select the right [[AutoPlugin]]s from [[autoPlugins]] for a [[Project]]. */ + lazy val deducePlugins: (Plugins, Logger) => Seq[AutoPlugin] = Plugins.deducer(autoPlugins.toList map {_.value}) +} + +/** The built and loaded build definition project. +* @param base The base directory for the build definition project (not the base of the project itself). +* @param pluginData Evaluated tasks/settings from the build definition for later use. +* This is necessary because the build definition project is discarded. +* @param loader The class loader for the build definition project, notably excluding classes used for .sbt files. +* @param detected Auto-detected modules in the build definition. +*/ +final class LoadedPlugins(val base: File, val pluginData: PluginData, val loader: ClassLoader, val detected: DetectedPlugins) +{ + @deprecated("Use the primary constructor.", "0.13.2") + def this(base: File, pluginData: PluginData, loader: ClassLoader, plugins: Seq[Plugin], pluginNames: Seq[String]) = + this(base, pluginData, loader, + new DetectedPlugins(new DetectedModules(pluginNames zip plugins), Nil, new DetectedModules(Nil)) + ) + + @deprecated("Use detected.plugins.values.", "0.13.2") + val plugins: Seq[Plugin] = detected.plugins.values + @deprecated("Use detected.plugins.names.", "0.13.2") + val pluginNames: Seq[String] = detected.plugins.names + def fullClasspath: Seq[Attributed[File]] = pluginData.classpath def classpath = data(fullClasspath) + } +/** The loaded, but unresolved build unit. +* @param uri The uniquely identifying URI for the build. +* @param localBase The working location of the build on the filesystem. +* For local URIs, this is the same as `uri`, but for remote URIs, this is the local copy or workspace allocated for the build. +*/ final class BuildUnit(val uri: URI, val localBase: File, val definitions: LoadedDefinitions, val plugins: LoadedPlugins) { override def toString = if(uri.getScheme == "file") localBase.toString else (uri + " (locally: " + localBase +")") @@ -57,6 +150,8 @@ final class LoadedBuild(val root: URI, val units: Map[URI, LoadedBuildUnit]) BuildUtil.checkCycles(units) def allProjectRefs: Seq[(ProjectRef, ResolvedProject)] = for( (uri, unit) <- units.toSeq; (id, proj) <- unit.defined ) yield ProjectRef(uri, id) -> proj def extra(data: Settings[Scope])(keyIndex: KeyIndex): BuildUtil[ResolvedProject] = BuildUtil(root, units, keyIndex, data) + + private[sbt] def autos = GroupedAutoPlugins(units) } final class PartBuild(val root: URI, val units: Map[URI, PartBuildUnit]) sealed trait BuildUnitBase { def rootProjects: Seq[String]; def buildSettings: Seq[Setting[_]] } diff --git a/main/src/main/scala/sbt/BuildUtil.scala b/main/src/main/scala/sbt/BuildUtil.scala index df57581bd..db0d31f8e 100644 --- a/main/src/main/scala/sbt/BuildUtil.scala +++ b/main/src/main/scala/sbt/BuildUtil.scala @@ -35,7 +35,7 @@ final class BuildUtil[Proj]( case _ => None } - val configurationsForAxis: Option[ResolvedReference] => Seq[String] = + val configurationsForAxis: Option[ResolvedReference] => Seq[String] = refOpt => configurations(projectForAxis(refOpt)).map(_.name) } object BuildUtil @@ -48,6 +48,20 @@ object BuildUtil new BuildUtil(keyIndex, data, root, Load getRootProject units, getp, configs, aggregates) } + def dependencies(units: Map[URI, LoadedBuildUnit]): BuildDependencies = + { + import collection.mutable.HashMap + val agg = new HashMap[ProjectRef, Seq[ProjectRef]] + val cp = new HashMap[ProjectRef, Seq[ClasspathDep[ProjectRef]]] + for(lbu <- units.values; rp <- lbu.defined.values) + { + val ref = ProjectRef(lbu.unit.uri, rp.id) + cp(ref) = rp.dependencies + agg(ref) = rp.aggregate + } + BuildDependencies(cp.toMap, agg.toMap) + } + def checkCycles(units: Map[URI, LoadedBuildUnit]) { def getRef(pref: ProjectRef) = units(pref.build).defined(pref.project) @@ -60,9 +74,22 @@ object BuildUtil } } def baseImports: Seq[String] = "import sbt._, Keys._" :: Nil - def getImports(unit: BuildUnit): Seq[String] = getImports(unit.plugins.pluginNames, unit.definitions.buildNames) - def getImports(pluginNames: Seq[String], buildNames: Seq[String]): Seq[String] = baseImports ++ importAllRoot(pluginNames ++ buildNames) - def importAll(values: Seq[String]): Seq[String] = if(values.isEmpty) Nil else values.map( _ + "._" ).mkString("import ", ", ", "") :: Nil + + def getImports(unit: BuildUnit): Seq[String] = unit.plugins.detected.imports + + @deprecated("Use getImports(Seq[String]).", "0.13.2") + def getImports(pluginNames: Seq[String], buildNames: Seq[String]): Seq[String] = getImports(pluginNames ++ buildNames) + + /** `import sbt._, Keys._`, and wildcard import `._` for all names. */ + def getImports(names: Seq[String]): Seq[String] = baseImports ++ importAllRoot(names) + + /** Import just the names. */ + def importNames(names: Seq[String]): Seq[String] = if (names.isEmpty) Nil else names.mkString("import ", ", ", "") :: Nil + /** Prepend `_root_` and import just the names. */ + def importNamesRoot(names: Seq[String]): Seq[String] = importNames(names map rootedName) + + /** Wildcard import `._` for all values. */ + def importAll(values: Seq[String]): Seq[String] = importNames(values map { _ + "._" }) def importAllRoot(values: Seq[String]): Seq[String] = importAll(values map rootedName) def rootedName(s: String): String = if(s contains '.') "_root_." + s else s @@ -78,4 +105,4 @@ object BuildUtil (ref, agg) Relation.empty ++ depPairs } -} \ No newline at end of file +} diff --git a/main/src/main/scala/sbt/CommandStrings.scala b/main/src/main/scala/sbt/CommandStrings.scala index 073480c31..aa2123b82 100644 --- a/main/src/main/scala/sbt/CommandStrings.scala +++ b/main/src/main/scala/sbt/CommandStrings.scala @@ -38,7 +38,7 @@ s"""$multiTaskSyntax def multiTaskBrief = """Executes all of the specified tasks concurrently.""" - def showHelp = Help(ShowCommand, (ShowCommand + " ", actBrief), actDetailed) + def showHelp = Help(ShowCommand, (s"$ShowCommand ", showBrief), showDetailed) def showBrief = "Displays the result of evaluating the setting or task associated with 'key'." def showDetailed = s"""$ShowCommand @@ -49,6 +49,11 @@ $ShowCommand Evaluates the specified task and display the value returned by the task.""" + val PluginsCommand = "plugins" + val PluginCommand = "plugin" + def pluginsBrief = "Lists currently available plugins." + def pluginsDetailed = pluginsBrief // TODO: expand + val LastCommand = "last" val LastGrepCommand = "last-grep" val ExportCommand = "export" @@ -167,7 +172,7 @@ Syntax summary Displays the main % try onUnload.value(s) finally IO.delete(taskTemporaryDirectory.value) }, + extraLoggers :== { _ => Nil }, + watchSources :== Nil, + skip :== false, taskTemporaryDirectory := { val dir = IO.createTemporaryDirectory; dir.deleteOnExit(); dir }, onComplete := { val dir = taskTemporaryDirectory.value; () => {IO.delete(dir); IO.createDirectory(dir) }}, Previous.cache <<= Previous.cacheSetting, Previous.references :== new Previous.References, concurrentRestrictions <<= defaultRestrictions, parallelExecution :== true, - sbtVersion := appConfiguration.value.provider.id.version, - sbtBinaryVersion := binarySbtVersion(sbtVersion.value), - sbtResolver := { if(sbtVersion.value endsWith "-SNAPSHOT") Classpaths.typesafeSnapshots else Classpaths.typesafeReleases }, pollInterval :== 500, logBuffered :== false, - connectInput :== false, - cancelable :== false, - envVars :== Map.empty, - sourcesInBase :== true, - autoAPIMappings := false, - apiMappings := Map.empty, - autoScalaLibrary :== true, - managedScalaInstance :== true, - onLoad := idFun[State], - onUnload := idFun[State], - onUnload := { s => try onUnload.value(s) finally IO.delete(taskTemporaryDirectory.value) }, - watchingMessage := Watched.defaultWatchingMessage, - triggeredMessage := Watched.defaultTriggeredMessage, - definesClass :== FileValueCache(Locate.definesClass _ ).get, - trapExit :== true, - traceLevel in run :== 0, - traceLevel in runMain :== 0, - traceLevel in console :== Int.MaxValue, - traceLevel in consoleProject :== Int.MaxValue, - autoCompilerPlugins :== true, - internalConfigurationMap :== Configurations.internalMap _, - initialize :== {}, - credentials :== Nil, - scalaHome :== None, - apiURL := None, - javaHome :== None, - extraLoggers :== { _ => Nil }, - skip :== false, - watchSources :== Nil, - version :== "0.1-SNAPSHOT", - outputStrategy :== None, - exportJars :== false, - fork :== false, - testForkedParallel :== false, - javaOptions :== Nil, - sbtPlugin :== false, - crossPaths :== true, - classpathTypes :== Set("jar", "bundle") ++ CustomPomParser.JarPackagings, - aggregate :== true, - maxErrors :== 100, - sourcePositionMappers :== Nil, + commands :== Nil, + showSuccess :== true, showTiming :== true, timingFormat :== Aggregation.defaultFormat, - showSuccess :== true, - commands :== Nil, - retrieveManaged :== false, - buildStructure := Project.structure(state.value), - settingsData := buildStructure.value.data, - artifactClassifier :== None, - artifactClassifier in packageSrc :== Some(SourceClassifier), - artifactClassifier in packageDoc :== Some(DocClassifier), - checksums := Classpaths.bootChecksums(appConfiguration.value), - conflictManager := ConflictManager.default, - pomExtra :== NodeSeq.Empty, - pomPostProcess :== idFun, - pomAllRepositories :== false, - includeFilter :== NothingFilter, - includeFilter in unmanagedSources :== "*.java" | "*.scala", - includeFilter in unmanagedJars :== "*.jar" | "*.so" | "*.dll" | "*.jnilib" | "*.zip", - includeFilter in unmanagedResources :== AllPassFilter, - excludeFilter :== HiddenFileFilter, - pomIncludeRepository :== Classpaths.defaultRepositoryFilter + aggregate :== true, + maxErrors :== 100, + fork :== false, + initialize :== {} )) def defaultTestTasks(key: Scoped): Seq[Setting[_]] = inTask(key)(Seq( tags := Seq(Tags.Test -> 1), logBuffered := true )) + // TODO: This should be on the new default settings for a project. def projectCore: Seq[Setting[_]] = Seq( name := thisProject.value.id, logManager := LogManager.defaults(extraLoggers.value, StandardMain.console), - onLoadMessage <<= onLoadMessage or (name, thisProjectRef)("Set current project to " + _ + " (in build " + _.build +")"), - runnerTask + onLoadMessage <<= onLoadMessage or (name, thisProjectRef)("Set current project to " + _ + " (in build " + _.build +")") ) def paths = Seq( baseDirectory := thisProject.value.base, @@ -179,7 +192,7 @@ object Defaults extends BuildCommon unmanagedResources <<= collectFiles(unmanagedResourceDirectories, includeFilter in unmanagedResources, excludeFilter in unmanagedResources), watchSources in ConfigGlobal ++= unmanagedResources.value, resourceGenerators :== Nil, - resourceGenerators <+= (definedSbtPlugins, resourceManaged) map writePluginsDescriptor, + resourceGenerators <+= (discoveredSbtPlugins, resourceManaged) map PluginDiscovery.writeDescriptors, managedResources <<= generate(resourceGenerators), resources <<= Classpaths.concat(managedResources, unmanagedResources) ) @@ -233,6 +246,7 @@ object Defaults extends BuildCommon consoleQuick <<= consoleQuickTask, discoveredMainClasses <<= compile map discoverMainClasses storeAs discoveredMainClasses triggeredBy compile, definedSbtPlugins <<= discoverPlugins, + discoveredSbtPlugins <<= discoverSbtPluginNames, inTask(run)(runnerTask :: Nil).head, selectMainClass := mainClass.value orElse selectRunMain(discoveredMainClasses.value), mainClass in run := (selectMainClass in run).value, @@ -732,13 +746,16 @@ object Defaults extends BuildCommon @deprecated("Use inTask(compile)(compileInputsSettings)", "0.13.0") def compileTaskSettings: Seq[Setting[_]] = inTask(compile)(compileInputsSettings) - def compileTask: Initialize[Task[inc.Analysis]] = Def.task { compileTaskImpl(streams.value, (compileInputs in compile).value) } - private[this] def compileTaskImpl(s: TaskStreams, ci: Compiler.Inputs): inc.Analysis = + def compileTask: Initialize[Task[inc.Analysis]] = Def.task { compileTaskImpl(streams.value, (compileInputs in compile).value, (compilerReporter in compile).value) } + private[this] def compileTaskImpl(s: TaskStreams, ci: Compiler.Inputs, reporter: Option[xsbti.Reporter]): inc.Analysis = { lazy val x = s.text(ExportStream) def onArgs(cs: Compiler.Compilers) = cs.copy(scalac = cs.scalac.onArgs(exported(x, "scalac")), javac = cs.javac.onArgs(exported(x, "javac"))) val i = ci.copy(compilers = onArgs(ci.compilers)) - try Compiler(i,s.log) + try reporter match { + case Some(reporter) => Compiler(i, s.log, reporter) + case None => Compiler(i, s.log) + } finally x.close() // workaround for #937 } def compileIncSetupTask = @@ -749,7 +766,8 @@ object Defaults extends BuildCommon Seq(compileInputs := { val cp = classDirectory.value +: data(dependencyClasspath.value) Compiler.inputs(cp, sources.value, classDirectory.value, scalacOptions.value, javacOptions.value, maxErrors.value, sourcePositionMappers.value, compileOrder.value)(compilers.value, compileIncSetup.value, streams.value.log) - }) + }, + compilerReporter := None) def printWarningsTask: Initialize[Task[Unit]] = (streams, compile, maxErrors, sourcePositionMappers) map { (s, analysis, max, spms) => @@ -760,27 +778,21 @@ object Defaults extends BuildCommon def sbtPluginExtra(m: ModuleID, sbtV: String, scalaV: String): ModuleID = m.extra(CustomPomParser.SbtVersionKey -> sbtV, CustomPomParser.ScalaVersionKey -> scalaV).copy(crossVersion = CrossVersion.Disabled) + + @deprecated("Use PluginDiscovery.writeDescriptor.", "0.13.2") def writePluginsDescriptor(plugins: Set[String], dir: File): Seq[File] = - { - val descriptor: File = dir / "sbt" / "sbt.plugins" - if(plugins.isEmpty) - { - IO.delete(descriptor) - Nil - } - else - { - IO.writeLines(descriptor, plugins.toSeq.sorted) - descriptor :: Nil - } + PluginDiscovery.writeDescriptor(plugins.toSeq, dir, PluginDiscovery.Paths.Plugins).toList + + def discoverSbtPluginNames: Initialize[Task[PluginDiscovery.DiscoveredNames]] = Def.task { + if(sbtPlugin.value) PluginDiscovery.discoverSourceAll(compile.value) else PluginDiscovery.emptyDiscoveredNames } + + @deprecated("Use discoverSbtPluginNames.", "0.13.2") def discoverPlugins: Initialize[Task[Set[String]]] = (compile, sbtPlugin, streams) map { (analysis, isPlugin, s) => if(isPlugin) discoverSbtPlugins(analysis, s.log) else Set.empty } + + @deprecated("Use PluginDiscovery.sourceModuleNames[Plugin].", "0.13.2") def discoverSbtPlugins(analysis: inc.Analysis, log: Logger): Set[String] = - { - val pluginClass = classOf[Plugin].getName - val discovery = Discovery(Set(pluginClass), Set.empty)( Tests allDefs analysis ) - discovery collect { case (df, disc) if (disc.baseClasses contains pluginClass) && disc.isModule => df.name } toSet; - } + PluginDiscovery.sourceModuleNames(analysis, classOf[Plugin].getName).toSet def copyResourcesTask = (classDirectory, resources, resourceDirectories, streams) map { (target, resrcs, dirs, s) => @@ -853,6 +865,7 @@ object Defaults extends BuildCommon lazy val disableAggregation = Defaults.globalDefaults( noAggregation map disableAggregate ) def disableAggregate(k: Scoped) = aggregate in k :== false + lazy val runnerSettings: Seq[Setting[_]] = Seq(runnerTask) lazy val baseTasks: Seq[Setting[_]] = projectTasks ++ packageBase lazy val baseClasspaths: Seq[Setting[_]] = Classpaths.publishSettings ++ Classpaths.baseSettings @@ -866,7 +879,12 @@ object Defaults extends BuildCommon // settings that are not specific to a configuration - lazy val projectBaseSettings: Seq[Setting[_]] = projectCore ++ paths ++ baseClasspaths ++ baseTasks ++ compileBase ++ disableAggregation + @deprecated("0.13.2", "Settings now split into AutoPlugins.") + lazy val projectBaseSettings: Seq[Setting[_]] = projectCore ++ runnerSettings ++ paths ++ baseClasspaths ++ baseTasks ++ compileBase ++ disableAggregation + + // These are project level settings that MUST be on every project. + lazy val coreDefaultSettings: Seq[Setting[_]] = projectCore ++ disableAggregation + @deprecated("0.13.2", "Default settings split into `coreDefaultSettings` and IvyModule/JvmModule plugins.") lazy val defaultSettings: Seq[Setting[_]] = projectBaseSettings ++ defaultConfigs } object Classpaths @@ -936,9 +954,14 @@ object Classpaths publishArtifact in Test:== false )) - val publishSettings: Seq[Setting[_]] = publishGlobalDefaults ++ Seq( - artifacts <<= artifactDefs(defaultArtifactTasks), - packagedArtifacts <<= packaged(defaultArtifactTasks), + val jvmPublishSettings: Seq[Setting[_]] = Seq( + artifacts <<= artifactDefs(defaultArtifactTasks), + packagedArtifacts <<= packaged(defaultArtifactTasks) + ) + + val ivyPublishSettings: Seq[Setting[_]] = publishGlobalDefaults ++ Seq( + artifacts :== Nil, + packagedArtifacts :== Map.empty, makePom := { val config = makePomConfiguration.value; IvyActions.makePom(ivyModule.value, config, streams.value.log); config.file }, packagedArtifact in makePom := (artifact in makePom value, makePom value), deliver <<= deliverTask(deliverConfiguration), @@ -947,6 +970,8 @@ object Classpaths publishLocal <<= publishTask(publishLocalConfiguration, deliverLocal), publishM2 <<= publishTask(publishM2Configuration, deliverLocal) ) + @deprecated("0.13.2", "This has been split into jvmIvySettings and ivyPublishSettings.") + val publishSettings: Seq[Setting[_]] = ivyPublishSettings ++ jvmPublishSettings private[this] def baseGlobalDefaults = Defaults.globalDefaults(Seq( conflictWarning :== ConflictWarning.default("global"), @@ -977,7 +1002,7 @@ object Classpaths } )) - val baseSettings: Seq[Setting[_]] = baseGlobalDefaults ++ sbtClassifiersTasks ++ Seq( + val ivyBaseSettings: Seq[Setting[_]] = baseGlobalDefaults ++ sbtClassifiersTasks ++ Seq( conflictWarning := conflictWarning.value.copy(label = Reference.display(thisProjectRef.value)), unmanagedBase := baseDirectory.value / "lib", normalizedName := Project.normalizeModuleID(name.value), @@ -1008,14 +1033,11 @@ object Classpaths otherResolvers := Resolver.publishMavenLocal :: publishTo.value.toList, projectResolver <<= projectResolverTask, projectDependencies <<= projectDependenciesTask, - libraryDependencies ++= autoLibraryDependency(autoScalaLibrary.value && !scalaHome.value.isDefined && managedScalaInstance.value, sbtPlugin.value, scalaOrganization.value, scalaVersion.value), + // TODO - Is this the appropriate split? Ivy defines this simply as + // just project + library, while the JVM plugin will define it as + // having the additional sbtPlugin + autoScala magikz. allDependencies := { - val base = projectDependencies.value ++ libraryDependencies.value - val pluginAdjust = if(sbtPlugin.value) sbtDependency.value.copy(configurations = Some(Provided.name)) +: base else base - if(scalaHome.value.isDefined || ivyScala.value.isEmpty || !managedScalaInstance.value) - pluginAdjust - else - ScalaArtifacts.toolDependencies(scalaOrganization.value, scalaVersion.value) ++ pluginAdjust + projectDependencies.value ++ libraryDependencies.value }, ivyScala <<= ivyScala or (scalaHome, scalaVersion in update, scalaBinaryVersion in update, scalaOrganization) { (sh,fv,bv,so) => Some(new IvyScala(fv, bv, Nil, filterImplicit = false, checkExplicit = true, overrideScalaVersion = false, scalaOrganization = so)) @@ -1039,9 +1061,9 @@ object Classpaths makePomConfiguration := new MakePomConfiguration(artifactPath in makePom value, projectInfo.value, None, pomExtra.value, pomPostProcess.value, pomIncludeRepository.value, pomAllRepositories.value), deliverLocalConfiguration := deliverConfig(crossTarget.value, status = if (isSnapshot.value) "integration" else "release", logging = ivyLoggingLevel.value ), deliverConfiguration <<= deliverLocalConfiguration, - publishConfiguration := publishConfig(packagedArtifacts.in(publish).value, if(publishMavenStyle.value) None else Some(deliver.value), resolverName = getPublishTo(publishTo.value).name, checksums = checksums.in(publish).value, logging = ivyLoggingLevel.value), - publishLocalConfiguration := publishConfig(packagedArtifacts.in(publishLocal).value, Some(deliverLocal.value), checksums.in(publishLocal).value, logging = ivyLoggingLevel.value ), - publishM2Configuration := publishConfig(packagedArtifacts.in(publishM2).value, None, resolverName = Resolver.publishMavenLocal.name, checksums = checksums.in(publishM2).value, logging = ivyLoggingLevel.value), + publishConfiguration := publishConfig(packagedArtifacts.in(publish).value, if(publishMavenStyle.value) None else Some(deliver.value), resolverName = getPublishTo(publishTo.value).name, checksums = checksums.in(publish).value, logging = ivyLoggingLevel.value, overwrite = isSnapshot.value), + publishLocalConfiguration := publishConfig(packagedArtifacts.in(publishLocal).value, Some(deliverLocal.value), checksums.in(publishLocal).value, logging = ivyLoggingLevel.value, overwrite = isSnapshot.value), + publishM2Configuration := publishConfig(packagedArtifacts.in(publishM2).value, None, resolverName = Resolver.publishMavenLocal.name, checksums = checksums.in(publishM2).value, logging = ivyLoggingLevel.value, overwrite = isSnapshot.value), ivySbt <<= ivySbt0, ivyModule := { val is = ivySbt.value; new is.Module(moduleSettings.value) }, transitiveUpdate <<= transitiveUpdateTask, @@ -1055,6 +1077,22 @@ object Classpaths } } tag(Tags.Update, Tags.Network) ) + + val jvmBaseSettings: Seq[Setting[_]] = Seq( + libraryDependencies ++= autoLibraryDependency(autoScalaLibrary.value && !scalaHome.value.isDefined && managedScalaInstance.value, sbtPlugin.value, scalaOrganization.value, scalaVersion.value), + // Override the default to handle mixing in the sbtPlugin + scala dependencies. + allDependencies := { + val base = projectDependencies.value ++ libraryDependencies.value + val pluginAdjust = if(sbtPlugin.value) sbtDependency.value.copy(configurations = Some(Provided.name)) +: base else base + if(scalaHome.value.isDefined || ivyScala.value.isEmpty || !managedScalaInstance.value) + pluginAdjust + else + ScalaArtifacts.toolDependencies(scalaOrganization.value, scalaVersion.value) ++ pluginAdjust + } + ) + @deprecated("0.13.2", "Split into ivyBaseSettings and jvmBaseSettings.") + val baseSettings: Seq[Setting[_]] = ivyBaseSettings ++ jvmBaseSettings + def warnResolversConflict(ress: Seq[Resolver], log: Logger) { val resset = ress.toSet for ((name, r) <- resset groupBy (_.name) if r.size > 1) { @@ -1209,8 +1247,12 @@ object Classpaths def deliverConfig(outputDirectory: File, status: String = "release", logging: UpdateLogging.Value = UpdateLogging.DownloadOnly) = new DeliverConfiguration(deliverPattern(outputDirectory), status, None, logging) - def publishConfig(artifacts: Map[Artifact, File], ivyFile: Option[File], checksums: Seq[String], resolverName: String = "local", logging: UpdateLogging.Value = UpdateLogging.DownloadOnly) = - new PublishConfiguration(ivyFile, resolverName, artifacts, checksums, logging) + @deprecated("0.13.2", "Previous semantics allowed overwriting cached files, which was unsafe. Please specify overwrite parameter.") + def publishConfig(artifacts: Map[Artifact, File], ivyFile: Option[File], checksums: Seq[String], resolverName: String, logging: UpdateLogging.Value): PublishConfiguration = + publishConfig(artifacts, ivyFile, checksums, resolverName, logging, overwrite = true) + def publishConfig(artifacts: Map[Artifact, File], ivyFile: Option[File], checksums: Seq[String], resolverName: String = "local", logging: UpdateLogging.Value = UpdateLogging.DownloadOnly, overwrite: Boolean = false) = + new PublishConfiguration(ivyFile, resolverName, artifacts, checksums, logging, overwrite) + def deliverPattern(outputPath: File): String = (outputPath / "[artifact]-[revision](-[classifier]).[ext]").absolutePath @@ -1255,19 +1297,8 @@ object Classpaths if(useJars) Seq(pkgTask).join else psTask } - def constructBuildDependencies: Initialize[BuildDependencies] = - loadedBuild { lb => - import collection.mutable.HashMap - val agg = new HashMap[ProjectRef, Seq[ProjectRef]] - val cp = new HashMap[ProjectRef, Seq[ClasspathDep[ProjectRef]]] - for(lbu <- lb.units.values; rp <- lbu.defined.values) - { - val ref = ProjectRef(lbu.unit.uri, rp.id) - cp(ref) = rp.dependencies - agg(ref) = rp.aggregate - } - BuildDependencies(cp.toMap, agg.toMap) - } + def constructBuildDependencies: Initialize[BuildDependencies] = loadedBuild(lb => BuildUtil.dependencies(lb.units)) + def internalDependencies: Initialize[Task[Classpath]] = (thisProjectRef, classpathConfiguration, configuration, settingsData, buildDependencies) flatMap internalDependencies0 def unmanagedDependencies: Initialize[Task[Classpath]] = diff --git a/main/src/main/scala/sbt/EvaluateTask.scala b/main/src/main/scala/sbt/EvaluateTask.scala index 66192feb2..4c8fe8756 100644 --- a/main/src/main/scala/sbt/EvaluateTask.scala +++ b/main/src/main/scala/sbt/EvaluateTask.scala @@ -44,18 +44,25 @@ object EvaluateTask def defaultConfig(state: State): EvaluateConfig = { val extracted = Project.extract(state) - defaultConfig(extracted, extracted.structure) + extractedConfig(extracted, extracted.structure, state) } @deprecated("Use extractedConfig.", "0.13.0") def defaultConfig(extracted: Extracted, structure: BuildStructure) = - EvaluateConfig(false, restrictions(extracted, structure), progress = executeProgress(extracted, structure)) + EvaluateConfig(false, restrictions(extracted, structure), progress = defaultProgress) + @deprecated("Use other extractedConfig", "0.13.2") def extractedConfig(extracted: Extracted, structure: BuildStructure): EvaluateConfig = { val workers = restrictions(extracted, structure) val canCancel = cancelable(extracted, structure) - val progress = executeProgress(extracted, structure) + EvaluateConfig(cancelable = canCancel, restrictions = workers, progress = defaultProgress) + } + def extractedConfig(extracted: Extracted, structure: BuildStructure, state: State): EvaluateConfig = + { + val workers = restrictions(extracted, structure) + val canCancel = cancelable(extracted, structure) + val progress = executeProgress(extracted, structure, state) EvaluateConfig(cancelable = canCancel, restrictions = workers, progress = progress) } @@ -78,8 +85,11 @@ object EvaluateTask def cancelable(extracted: Extracted, structure: BuildStructure): Boolean = getSetting(Keys.cancelable, false, extracted, structure) - private[sbt] def executeProgress(extracted: Extracted, structure: BuildStructure): ExecuteProgress[Task] = - getSetting(Keys.executeProgress, new Keys.TaskProgress(defaultProgress), extracted, structure).progress + private[sbt] def executeProgress(extracted: Extracted, structure: BuildStructure, state: State): ExecuteProgress[Task] = { + import Types.const + val maker: State => Keys.TaskProgress = getSetting(Keys.executeProgress, const(new Keys.TaskProgress(defaultProgress)), extracted, structure) + maker(state).progress + } def getSetting[T](key: SettingKey[T], default: T, extracted: Extracted, structure: BuildStructure): T = key in extracted.currentRef get structure.data getOrElse default @@ -94,7 +104,7 @@ object EvaluateTask { val root = ProjectRef(pluginDef.root, Load.getRootProject(pluginDef.units)(pluginDef.root)) val pluginKey = pluginData - val config = extractedConfig(Project.extract(state), pluginDef) + val config = extractedConfig(Project.extract(state), pluginDef, state) val evaluated = apply(pluginDef, ScopedKey(pluginKey.scope, pluginKey.key), state, root, config) val (newS, result) = evaluated getOrElse sys.error("Plugin data does not exist for plugin definition at " + pluginDef.root) Project.runUnloadHooks(newS) // discard states diff --git a/main/src/main/scala/sbt/GroupedAutoPlugins.scala b/main/src/main/scala/sbt/GroupedAutoPlugins.scala new file mode 100644 index 000000000..d020ad31e --- /dev/null +++ b/main/src/main/scala/sbt/GroupedAutoPlugins.scala @@ -0,0 +1,20 @@ +package sbt + + import Def.Setting + import java.net.URI + +private[sbt] final class GroupedAutoPlugins(val all: Seq[AutoPlugin], val byBuild: Map[URI, Seq[AutoPlugin]]) +{ + def globalSettings: Seq[Setting[_]] = all.flatMap(_.globalSettings) + def buildSettings(uri: URI): Seq[Setting[_]] = byBuild.getOrElse(uri, Nil).flatMap(_.buildSettings) +} + +private[sbt] object GroupedAutoPlugins +{ + private[sbt] def apply(units: Map[URI, LoadedBuildUnit]): GroupedAutoPlugins = + { + val byBuild: Map[URI, Seq[AutoPlugin]] = units.mapValues(unit => unit.defined.values.flatMap(_.autoPlugins).toSeq.distinct).toMap + val all: Seq[AutoPlugin] = byBuild.values.toSeq.flatten.distinct + new GroupedAutoPlugins(all, byBuild) + } +} \ No newline at end of file diff --git a/main/src/main/scala/sbt/Keys.scala b/main/src/main/scala/sbt/Keys.scala index c0ffdbd6d..53ccdfc97 100644 --- a/main/src/main/scala/sbt/Keys.scala +++ b/main/src/main/scala/sbt/Keys.scala @@ -131,6 +131,7 @@ object Keys val crossVersion = SettingKey[CrossVersion]("cross-version", "Configures handling of the Scala version when cross-building.", CSetting) val classpathOptions = SettingKey[ClasspathOptions]("classpath-options", "Configures handling of Scala classpaths.", DSetting) val definedSbtPlugins = TaskKey[Set[String]]("defined-sbt-plugins", "The set of names of Plugin implementations defined by this project.", CTask) + val discoveredSbtPlugins = TaskKey[PluginDiscovery.DiscoveredNames]("discovered-sbt-plugins", "The names of sbt plugin-related modules (modules that extend Build, Plugin, AutoPlugin) defined by this project.", CTask) val sbtPlugin = SettingKey[Boolean]("sbt-plugin", "If true, enables adding sbt as a dependency and auto-generation of the plugin descriptor file.", BMinusSetting) val printWarnings = TaskKey[Unit]("print-warnings", "Shows warnings from compilation, including ones that weren't printed initially.", BPlusTask) val fileInputOptions = SettingKey[Seq[String]]("file-input-options", "Options that take file input, which may invalidate the cache.", CSetting) @@ -344,7 +345,10 @@ object Keys // wrapper to work around SI-2915 private[sbt] final class TaskProgress(val progress: ExecuteProgress[Task]) - private[sbt] val executeProgress = SettingKey[TaskProgress]("executeProgress", "Experimental task execution listener.", DTask) + private[sbt] val executeProgress = SettingKey[State => TaskProgress]("executeProgress", "Experimental task execution listener.", DTask) + + // Experimental in sbt 0.13.2 to enable grabing semantic compile failures. + private[sbt] val compilerReporter = TaskKey[Option[xsbti.Reporter]]("compilerReporter", "Experimental hook to listen (or send) compilation failure messages.", DTask) val triggeredBy = Def.triggeredBy val runBefore = Def.runBefore diff --git a/main/src/main/scala/sbt/Load.scala b/main/src/main/scala/sbt/Load.scala index 8b7f3465a..a45dec8dd 100755 --- a/main/src/main/scala/sbt/Load.scala +++ b/main/src/main/scala/sbt/Load.scala @@ -6,19 +6,16 @@ package sbt import java.io.File import java.net.{URI,URL} import compiler.{Eval,EvalImports} - import xsbt.api.{Discovered,Discovery} - import xsbti.compile.CompileOrder import classpath.ClasspathUtilities import scala.annotation.tailrec import collection.mutable - import Compiler.{Compilers,Inputs} + import Compiler.Compilers import inc.{FileValueCache, Locate} import Project.{inScope,makeSettings} import Def.{isDummy, ScopedKey, ScopeLocal, Setting} import Keys.{appConfiguration, baseDirectory, configuration, fullResolvers, fullClasspath, pluginData, streams, thisProject, thisProjectRef, update} import Keys.{exportedProducts, loadedBuild, onLoadMessage, resolvedScoped, sbtPlugin, scalacOptions, taskDefinitionKey} import tools.nsc.reporters.ConsoleReporter - import Build.analyzed import Attributed.data import Scope.{GlobalScope, ThisScope} import Types.const @@ -180,7 +177,7 @@ object Load val keys = Index.allKeys(settings) val attributeKeys = Index.attributeKeys(data) ++ keys.map(_.key) val scopedKeys = keys ++ data.allKeys( (s,k) => ScopedKey(s,k)) - val projectsMap = projects.mapValues(_.defined.keySet) + val projectsMap = projects.mapValues(_.defined.keySet).toMap val keyIndex = KeyIndex(scopedKeys, projectsMap) val aggIndex = KeyIndex.aggregate(scopedKeys, extra(keyIndex), projectsMap) new sbt.StructureIndex(Index.stringToKeyMap(attributeKeys), Index.taskToKeyMap(data), Index.triggers(data), keyIndex, aggIndex) @@ -201,10 +198,10 @@ object Load { ((loadedBuild in GlobalScope :== loaded) +: transformProjectOnly(loaded.root, rootProject, injectSettings.global)) ++ - inScope(GlobalScope)( pluginGlobalSettings(loaded) ) ++ + inScope(GlobalScope)( pluginGlobalSettings(loaded) ++ loaded.autos.globalSettings ) ++ loaded.units.toSeq.flatMap { case (uri, build) => - val plugins = build.unit.plugins.plugins - val pluginBuildSettings = plugins.flatMap(_.buildSettings) + val plugins = build.unit.plugins.detected.plugins.values + val pluginBuildSettings = plugins.flatMap(_.buildSettings) ++ loaded.autos.buildSettings(uri) val pluginNotThis = plugins.flatMap(_.settings) filterNot isProjectThis val projectSettings = build.defined flatMap { case (id, project) => val ref = ProjectRef(uri, id) @@ -220,9 +217,10 @@ object Load buildSettings ++ projectSettings } } + @deprecated("Does not account for AutoPlugins and will be made private.", "0.13.2") def pluginGlobalSettings(loaded: sbt.LoadedBuild): Seq[Setting[_]] = loaded.units.toSeq flatMap { case (_, build) => - build.unit.plugins.plugins flatMap { _.globalSettings } + build.unit.plugins.detected.plugins.values flatMap { _.globalSettings } } @deprecated("No longer used.", "0.13.0") @@ -368,10 +366,11 @@ object Load def resolveProjects(loaded: sbt.PartBuild): sbt.LoadedBuild = { val rootProject = getRootProject(loaded.units) - new sbt.LoadedBuild(loaded.root, loaded.units map { case (uri, unit) => + val units = loaded.units map { case (uri, unit) => IO.assertAbsolute(uri) (uri, resolveProjects(uri, unit, rootProject)) - }) + } + new sbt.LoadedBuild(loaded.root, units) } def resolveProjects(uri: URI, unit: sbt.PartBuildUnit, rootProject: URI => String): sbt.LoadedBuildUnit = { @@ -399,10 +398,10 @@ object Load def getBuild[T](map: Map[URI, T], uri: URI): T = map.getOrElse(uri, noBuild(uri)) - def emptyBuild(uri: URI) = sys.error("No root project defined for build unit '" + uri + "'") - def noBuild(uri: URI) = sys.error("Build unit '" + uri + "' not defined.") - def noProject(uri: URI, id: String) = sys.error("No project '" + id + "' defined in '" + uri + "'.") - def noConfiguration(uri: URI, id: String, conf: String) = sys.error("No configuration '" + conf + "' defined in project '" + id + "' in '" + uri +"'") + def emptyBuild(uri: URI) = sys.error(s"No root project defined for build unit '$uri'") + def noBuild(uri: URI) = sys.error(s"Build unit '$uri' not defined.") + def noProject(uri: URI, id: String) = sys.error(s"No project '$id' defined in '$uri'.") + def noConfiguration(uri: URI, id: String, conf: String) = sys.error(s"No configuration '$conf' defined in project '$id' in '$uri'") def loadUnit(uri: URI, localBase: File, s: State, config: sbt.LoadBuildConfiguration): sbt.BuildUnit = { @@ -410,15 +409,13 @@ object Load val defDir = projectStandard(normBase) val plugs = plugins(defDir, s, config.copy(pluginManagement = config.pluginManagement.forPlugin)) - val defNames = analyzed(plugs.fullClasspath) flatMap findDefinitions - val defsScala = if(defNames.isEmpty) Nil else loadDefinitions(plugs.loader, defNames) - val imports = BuildUtil.getImports(plugs.pluginNames, defNames) + val defsScala = plugs.detected.builds.values lazy val eval = mkEval(plugs.classpath, defDir, plugs.pluginData.scalacOptions) val initialProjects = defsScala.flatMap(b => projectsFromBuild(b, normBase)) val memoSettings = new mutable.HashMap[File, LoadedSbtFile] - def loadProjects(ps: Seq[Project]) = loadTransitive(ps, normBase, imports, plugs, () => eval, config.injectSettings, Nil, memoSettings) + def loadProjects(ps: Seq[Project]) = loadTransitive(ps, normBase, plugs, () => eval, config.injectSettings, Nil, memoSettings, config.log) val loadedProjectsRaw = loadProjects(initialProjects) val hasRoot = loadedProjectsRaw.exists(_.base == normBase) || defsScala.exists(_.rootProject.isDefined) val (loadedProjects, defaultBuildIfNone) = @@ -434,7 +431,7 @@ object Load } val defs = if(defsScala.isEmpty) defaultBuildIfNone :: Nil else defsScala - val loadedDefs = new sbt.LoadedDefinitions(defDir, Nil, plugs.loader, defs, loadedProjects, defNames) + val loadedDefs = new sbt.LoadedDefinitions(defDir, Nil, plugs.loader, defs, loadedProjects, plugs.detected.builds.names) new sbt.BuildUnit(uri, normBase, loadedDefs, plugs) } @@ -460,16 +457,22 @@ object Load private[this] def projectsFromBuild(b: Build, base: File): Seq[Project] = b.projectDefinitions(base).map(resolveBase(base)) - private[this] def loadTransitive(newProjects: Seq[Project], buildBase: File, imports: Seq[String], plugins: sbt.LoadedPlugins, eval: () => Eval, injectSettings: InjectSettings, acc: Seq[Project], memoSettings: mutable.Map[File, LoadedSbtFile]): Seq[Project] = + private[this] def loadTransitive(newProjects: Seq[Project], buildBase: File, plugins: sbt.LoadedPlugins, eval: () => Eval, injectSettings: InjectSettings, + acc: Seq[Project], memoSettings: mutable.Map[File, LoadedSbtFile], log: Logger): Seq[Project] = { - def loadSbtFiles(auto: AddSettings, base: File): LoadedSbtFile = - loadSettings(auto, base, imports, plugins, eval, injectSettings, memoSettings) + def loadSbtFiles(auto: AddSettings, base: File, autoPlugins: Seq[AutoPlugin], projectSettings: Seq[Setting[_]]): LoadedSbtFile = + loadSettings(auto, base, plugins, eval, injectSettings, memoSettings, autoPlugins, projectSettings) def loadForProjects = newProjects map { project => - val loadedSbtFiles = loadSbtFiles(project.auto, project.base) - val transformed = project.copy(settings = (project.settings: Seq[Setting[_]]) ++ loadedSbtFiles.settings) + val autoPlugins = + try plugins.detected.deducePlugins(project.plugins, log) + catch { case e: AutoPluginException => throw translateAutoPluginException(e, project) } + val autoConfigs = autoPlugins.flatMap(_.projectConfigurations) + val loadedSbtFiles = loadSbtFiles(project.auto, project.base, autoPlugins, project.settings) + // add the automatically selected settings, record the selected AutoPlugins, and register the automatically selected configurations + val transformed = project.copy(settings = loadedSbtFiles.settings).setAutoPlugins(autoPlugins).overrideConfigs(autoConfigs : _*) (transformed, loadedSbtFiles.projects) } - def defaultLoad = loadSbtFiles(AddSettings.defaultSbtFiles, buildBase).projects + def defaultLoad = loadSbtFiles(AddSettings.defaultSbtFiles, buildBase, Nil, Nil).projects val (nextProjects, loadedProjects) = if(newProjects.isEmpty) // load the .sbt files in the root directory to look for Projects (defaultLoad, acc) @@ -481,10 +484,12 @@ object Load if(nextProjects.isEmpty) loadedProjects else - loadTransitive(nextProjects, buildBase, imports, plugins, eval, injectSettings, loadedProjects, memoSettings) + loadTransitive(nextProjects, buildBase, plugins, eval, injectSettings, loadedProjects, memoSettings, log) } + private[this] def translateAutoPluginException(e: AutoPluginException, project: Project): AutoPluginException = + e.withPrefix(s"Error determining plugins for project '${project.id}' in ${project.base}:\n") - private[this] def loadSettings(auto: AddSettings, projectBase: File, buildImports: Seq[String], loadedPlugins: sbt.LoadedPlugins, eval: ()=>Eval, injectSettings: InjectSettings, memoSettings: mutable.Map[File, LoadedSbtFile]): LoadedSbtFile = + private[this] def loadSettings(auto: AddSettings, projectBase: File, loadedPlugins: sbt.LoadedPlugins, eval: ()=>Eval, injectSettings: InjectSettings, memoSettings: mutable.Map[File, LoadedSbtFile], autoPlugins: Seq[AutoPlugin], buildScalaFiles: Seq[Setting[_]]): LoadedSbtFile = { lazy val defaultSbtFiles = configurationSources(projectBase) def settings(ss: Seq[Setting[_]]) = new LoadedSbtFile(ss, Nil, Nil) @@ -499,14 +504,25 @@ object Load lf } def loadSettingsFile(src: File): LoadedSbtFile = - EvaluateConfigurations.evaluateSbtFile(eval(), src, IO.readLines(src), buildImports, 0)(loader) + EvaluateConfigurations.evaluateSbtFile(eval(), src, IO.readLines(src), loadedPlugins.detected.imports, 0)(loader) + + import AddSettings.{User,SbtFiles,DefaultSbtFiles,Plugins,AutoPlugins,Sequence,BuildScalaFiles} + def pluginSettings(f: Plugins) = { + val included = loadedPlugins.detected.plugins.values.filter(f.include) // don't apply the filter to AutoPlugins, only Plugins + included.flatMap(p => p.settings.filter(isProjectThis) ++ p.projectSettings) + } + // Filter the AutoPlugin settings we included based on which ones are + // intended in the AddSettings.AutoPlugins filter. + def autoPluginSettings(f: AutoPlugins) = + autoPlugins.filter(f.include).flatMap(_.projectSettings) - import AddSettings.{User,SbtFiles,DefaultSbtFiles,Plugins,Sequence} def expand(auto: AddSettings): LoadedSbtFile = auto match { + case BuildScalaFiles => settings(buildScalaFiles) case User => settings(injectSettings.projectLoaded(loader)) case sf: SbtFiles => loadSettings( sf.files.map(f => IO.resolve(projectBase, f))) case sf: DefaultSbtFiles => loadSettings( defaultSbtFiles.filter(sf.include)) - case f: Plugins => settings(loadedPlugins.plugins.filter(f.include).flatMap(p => p.settings.filter(isProjectThis) ++ p.projectSettings)) + case p: Plugins => settings(pluginSettings(p)) + case p: AutoPlugins => settings(autoPluginSettings(p)) case q: Sequence => (LoadedSbtFile.empty /: q.sequence) { (b,add) => b.merge( expand(add) ) } } expand(auto) @@ -599,67 +615,46 @@ object Load config.evalPluginDef(pluginDef, pluginState) } + @deprecated("Use ModuleUtilities.getCheckedObjects[Build].", "0.13.2") def loadDefinitions(loader: ClassLoader, defs: Seq[String]): Seq[Build] = defs map { definition => loadDefinition(loader, definition) } + + @deprecated("Use ModuleUtilities.getCheckedObject[Build].", "0.13.2") def loadDefinition(loader: ClassLoader, definition: String): Build = ModuleUtilities.getObject(definition, loader).asInstanceOf[Build] def loadPlugins(dir: File, data: PluginData, loader: ClassLoader): sbt.LoadedPlugins = - { - val (pluginNames, plugins) = if(data.classpath.isEmpty) (Nil, Nil) else { - val names = getPluginNames(data.classpath, loader) - val loaded = - try loadPlugins(loader, names) - catch { - case e: ExceptionInInitializerError => - val cause = e.getCause - if(cause eq null) throw e else throw cause - case e: LinkageError => incompatiblePlugins(data, e) - } - (names, loaded) - } - new sbt.LoadedPlugins(dir, data, loader, plugins, pluginNames) - } - private[this] def incompatiblePlugins(data: PluginData, t: LinkageError): Nothing = - { - val evicted = data.report.toList.flatMap(_.configurations.flatMap(_.evicted)) - val evictedModules = evicted map { id => (id.organization, id.name) } distinct ; - val evictedStrings = evictedModules map { case (o,n) => o + ":" + n } - val msgBase = "Binary incompatibility in plugins detected." - val msgExtra = if(evictedStrings.isEmpty) "" else "\nNote that conflicts were resolved for some dependencies:\n\t" + evictedStrings.mkString("\n\t") - throw new IncompatiblePluginsException(msgBase + msgExtra, t) - } + new sbt.LoadedPlugins(dir, data, loader, PluginDiscovery.discoverAll(data, loader)) + + @deprecated("Replaced by the more general PluginDiscovery.binarySourceModuleNames and will be made private.", "0.13.2") def getPluginNames(classpath: Seq[Attributed[File]], loader: ClassLoader): Seq[String] = - ( binaryPlugins(data(classpath), loader) ++ (analyzed(classpath) flatMap findPlugins) ).distinct + PluginDiscovery.binarySourceModuleNames(classpath, loader, PluginDiscovery.Paths.Plugins, classOf[Plugin].getName) + @deprecated("Use PluginDiscovery.binaryModuleNames.", "0.13.2") def binaryPlugins(classpath: Seq[File], loader: ClassLoader): Seq[String] = - { - import collection.JavaConversions._ - loader.getResources("sbt/sbt.plugins").toSeq.filter(onClasspath(classpath)) flatMap { u => - IO.readLinesURL(u).map( _.trim).filter(!_.isEmpty) - } - } + PluginDiscovery.binaryModuleNames(classpath, loader, PluginDiscovery.Paths.Plugins) + + @deprecated("Use PluginDiscovery.onClasspath", "0.13.2") def onClasspath(classpath: Seq[File])(url: URL): Boolean = - IO.urlAsFile(url) exists (classpath.contains _) + PluginDiscovery.onClasspath(classpath)(url) + @deprecated("Use ModuleUtilities.getCheckedObjects[Plugin].", "0.13.2") def loadPlugins(loader: ClassLoader, pluginNames: Seq[String]): Seq[Plugin] = - pluginNames.map(pluginName => loadPlugin(pluginName, loader)) + ModuleUtilities.getCheckedObjects[Plugin](pluginNames, loader).map(_._2) + @deprecated("Use ModuleUtilities.getCheckedObject[Plugin].", "0.13.2") def loadPlugin(pluginName: String, loader: ClassLoader): Plugin = - ModuleUtilities.getObject(pluginName, loader).asInstanceOf[Plugin] + ModuleUtilities.getCheckedObject[Plugin](pluginName, loader) + @deprecated("No longer used.", "0.13.2") def findPlugins(analysis: inc.Analysis): Seq[String] = discover(analysis, "sbt.Plugin") + + @deprecated("No longer used.", "0.13.2") def findDefinitions(analysis: inc.Analysis): Seq[String] = discover(analysis, "sbt.Build") + + @deprecated("Use PluginDiscovery.sourceModuleNames", "0.13.2") def discover(analysis: inc.Analysis, subclasses: String*): Seq[String] = - { - val subclassSet = subclasses.toSet - val ds = Discovery(subclassSet, Set.empty)(Tests.allDefs(analysis)) - ds.flatMap { - case (definition, Discovered(subs,_,_,true)) => - if((subs & subclassSet).isEmpty) Nil else definition.name :: Nil - case _ => Nil - } - } + PluginDiscovery.sourceModuleNames(analysis, subclasses : _*) def initialSession(structure: sbt.BuildStructure, rootEval: () => Eval, s: State): SessionSettings = { val session = s get Keys.sessionSettings @@ -748,4 +743,4 @@ final case class LoadBuildConfiguration(stagingDirectory: File, classpath: Seq[A lazy val globalPluginNames = if(classpath.isEmpty) Nil else Load.getPluginNames(classpath, pluginManagement.initialLoader) } -final class IncompatiblePluginsException(msg: String, cause: Throwable) extends Exception(msg, cause) \ No newline at end of file +final class IncompatiblePluginsException(msg: String, cause: Throwable) extends Exception(msg, cause) diff --git a/main/src/main/scala/sbt/Main.scala b/main/src/main/scala/sbt/Main.scala index 295ffca33..7cd85867d 100644 --- a/main/src/main/scala/sbt/Main.scala +++ b/main/src/main/scala/sbt/Main.scala @@ -89,7 +89,7 @@ object BuiltinCommands def ScriptCommands: Seq[Command] = Seq(ignore, exit, Script.command, setLogLevel, early, act, nop) def DefaultCommands: Seq[Command] = Seq(ignore, help, completionsCommand, about, tasks, settingsCommand, loadProject, projects, project, reboot, read, history, set, sessionCommand, inspect, loadProjectImpl, loadFailed, Cross.crossBuild, Cross.switchVersion, - setOnFailure, clearOnFailure, stashOnFailure, popOnFailure, setLogLevel, + setOnFailure, clearOnFailure, stashOnFailure, popOnFailure, setLogLevel, plugin, plugins, ifLast, multi, shell, continuous, eval, alias, append, last, lastGrep, export, boot, nop, call, exit, early, initialize, act) ++ compatCommands def DefaultBootCommands: Seq[String] = LoadProject :: (IfLast + " " + Shell) :: Nil @@ -125,7 +125,8 @@ object BuiltinCommands def aboutPlugins(e: Extracted): String = { - val allPluginNames = e.structure.units.values.flatMap(_.unit.plugins.pluginNames).toSeq.distinct + def list(b: BuildUnit) = b.plugins.detected.autoPlugins.map(_.value.label) ++ b.plugins.detected.plugins.names + val allPluginNames = e.structure.units.values.flatMap(u => list(u.unit)).toSeq.distinct if(allPluginNames.isEmpty) "" else allPluginNames.mkString("Available Plugins: ", ", ", "") } def aboutScala(s: State, e: Extracted): String = @@ -374,6 +375,20 @@ object BuiltinCommands Help.detailOnly(taskDetail(allTaskAndSettingKeys(s))) else Help.empty + def plugins = Command.command(PluginsCommand, pluginsBrief, pluginsDetailed) { s => + val helpString = PluginsDebug.helpAll(s) + System.out.println(helpString) + s + } + val pluginParser: State => Parser[AutoPlugin] = s => { + val autoPlugins: Map[String, AutoPlugin] = PluginsDebug.autoPluginMap(s) + token(Space) ~> Act.knownIDParser(autoPlugins, "plugin") + } + def plugin = Command(PluginCommand)(pluginParser) { (s, plugin) => + val helpString = PluginsDebug.help(plugin, s) + System.out.println(helpString) + s + } def projects = Command(ProjectsCommand, (ProjectsCommand, projectsBrief), projectsDetailed )(s => projectsParser(s).?) { case (s, Some(modifyBuilds)) => transformExtraBuilds(s, modifyBuilds) diff --git a/main/src/main/scala/sbt/Output.scala b/main/src/main/scala/sbt/Output.scala index 06b1fda4d..ed3fd3714 100644 --- a/main/src/main/scala/sbt/Output.scala +++ b/main/src/main/scala/sbt/Output.scala @@ -20,7 +20,7 @@ object Output def last(keys: Values[_], streams: Streams, printLines: Seq[String] => Unit)(implicit display: Show[ScopedKey[_]]): Unit = last(keys, streams, printLines, None)(display) - def last(keys: Values[_], streams: Streams, printLines: Seq[String] => Unit, sid: Option[String])(implicit display: Show[ScopedKey[_]]): Unit = + def last(keys: Values[_], streams: Streams, printLines: Seq[String] => Unit, sid: Option[String])(implicit display: Show[ScopedKey[_]]): Unit = printLines( flatLines(lastLines(keys, streams, sid))(idFun) ) def last(file: File, printLines: Seq[String] => Unit, tailDelim: String = DefaultTail): Unit = @@ -55,7 +55,17 @@ object Output @deprecated("Explicitly provide None for the stream ID.", "0.13.0") def lastLines(key: ScopedKey[_], mgr: Streams): Seq[String] = lastLines(key, mgr, None) - def lastLines(key: ScopedKey[_], mgr: Streams, sid: Option[String]): Seq[String] = mgr.use(key) { s => IO.readLines(s.readText( Project.fillTaskAxis(key), sid )) } + def lastLines(key: ScopedKey[_], mgr: Streams, sid: Option[String]): Seq[String] = + mgr.use(key) { s => + // Workaround for #1155 - Keys.streams are always scoped by the task they're included in + // but are keyed by the Keys.streams key. I think this isn't actually a workaround, but + // is how things are expected to work now. + // You can see where streams are injected using their own key scope in + // EvaluateTask.injectStreams. + val streamScopedKey: ScopedKey[_] = ScopedKey(Project.fillTaskAxis(key).scope, Keys.streams.key) + val tmp = s.readText( streamScopedKey, sid ) + IO.readLines(tmp) + } def tailLines(file: File, tailDelim: String): Seq[String] = headLines(IO.readLines(file).reverse, tailDelim).reverse diff --git a/main/src/main/scala/sbt/PluginDiscovery.scala b/main/src/main/scala/sbt/PluginDiscovery.scala new file mode 100644 index 000000000..e504cd264 --- /dev/null +++ b/main/src/main/scala/sbt/PluginDiscovery.scala @@ -0,0 +1,143 @@ +package sbt + + import java.io.File + import java.net.URL + import Attributed.data + import Build.analyzed + import xsbt.api.{Discovered,Discovery} + +object PluginDiscovery +{ + /** Relative paths of resources that list top-level modules that are available. + * Normally, the classes for those modules will be in the same classpath entry as the resource. */ + object Paths + { + final val AutoPlugins = "sbt/sbt.autoplugins" + final val Plugins = "sbt/sbt.plugins" + final val Builds = "sbt/sbt.builds" + } + /** Names of top-level modules that subclass sbt plugin-related classes: [[Plugin]], [[AutoPlugin]], and [[Build]]. */ + final class DiscoveredNames(val plugins: Seq[String], val autoPlugins: Seq[String], val builds: Seq[String]) + + def emptyDiscoveredNames: DiscoveredNames = new DiscoveredNames(Nil, Nil, Nil) + + /** Discovers and loads the sbt-plugin-related top-level modules from the classpath and source analysis in `data` and using the provided class `loader`. */ + def discoverAll(data: PluginData, loader: ClassLoader): DetectedPlugins = + { + def discover[T](resource: String)(implicit mf: reflect.ClassManifest[T]) = + binarySourceModules[T](data, loader, resource) + import Paths._ + // TODO - Fix this once we can autodetect AutoPlugins defined by sbt itself. + val defaultAutoPlugins = Seq( + "sbt.plugins.IvyPlugin" -> sbt.plugins.IvyPlugin, + "sbt.plugins.JvmPlugin" -> sbt.plugins.JvmPlugin, + "sbt.plugins.CorePlugin" -> sbt.plugins.CorePlugin + ) + val detectedAutoPugins = discover[AutoPlugin](AutoPlugins) + val allAutoPlugins = (defaultAutoPlugins ++ detectedAutoPugins.modules) map { case (name, value) => + DetectedAutoPlugin(name, value, sbt.Plugins.hasAutoImportGetter(value, loader)) + } + new DetectedPlugins(discover[Plugin](Plugins), allAutoPlugins, discover[Build](Builds)) + } + + /** Discovers the sbt-plugin-related top-level modules from the provided source `analysis`. */ + def discoverSourceAll(analysis: inc.Analysis): DiscoveredNames = + { + def discover[T](implicit mf: reflect.ClassManifest[T]): Seq[String] = + sourceModuleNames(analysis, mf.erasure.getName) + new DiscoveredNames(discover[Plugin], discover[AutoPlugin], discover[Build]) + } + + // TODO: for 0.14.0, consider consolidating into a single file, which would make the classpath search 4x faster + /** Writes discovered module `names` to zero or more files in `dir` as per [[writeDescriptor]] and returns the list of files written. */ + def writeDescriptors(names: DiscoveredNames, dir: File): Seq[File] = + { + import Paths._ + val files = + writeDescriptor(names.plugins, dir, Plugins) :: + writeDescriptor(names.autoPlugins, dir, AutoPlugins) :: + writeDescriptor(names.builds, dir, Builds) :: + Nil + files.flatMap(_.toList) + } + + /** Stores the module `names` in `dir / path`, one per line, unless `names` is empty and then the file is deleted and `None` returned. */ + def writeDescriptor(names: Seq[String], dir: File, path: String): Option[File] = + { + val descriptor: File = new File(dir, path) + if(names.isEmpty) + { + IO.delete(descriptor) + None + } + else + { + IO.writeLines(descriptor, names.distinct.sorted) + Some(descriptor) + } + } + + /** Discovers the names of top-level modules listed in resources named `resourceName` as per [[binaryModuleNames]] or + * available as analyzed source and extending from any of `subclasses` as per [[sourceModuleNames]]. */ + def binarySourceModuleNames(classpath: Seq[Attributed[File]], loader: ClassLoader, resourceName: String, subclasses: String*): Seq[String] = + ( + binaryModuleNames(data(classpath), loader, resourceName) ++ + (analyzed(classpath) flatMap ( a => sourceModuleNames(a, subclasses : _*) )) + ).distinct + + /** Discovers top-level modules in `analysis` that inherit from any of `subclasses`. */ + def sourceModuleNames(analysis: inc.Analysis, subclasses: String*): Seq[String] = + { + val subclassSet = subclasses.toSet + val ds = Discovery(subclassSet, Set.empty)(Tests.allDefs(analysis)) + ds.flatMap { + case (definition, Discovered(subs,_,_,true)) => + if((subs & subclassSet).isEmpty) Nil else definition.name :: Nil + case _ => Nil + } + } + + /** Obtains the list of modules identified in all resource files `resourceName` from `loader` that are on `classpath`. + * `classpath` and `loader` are both required to ensure that `loader` + * doesn't bring in any resources outside of the intended `classpath`, such as from parent loaders. */ + def binaryModuleNames(classpath: Seq[File], loader: ClassLoader, resourceName: String): Seq[String] = + { + import collection.JavaConversions._ + loader.getResources(resourceName).toSeq.filter(onClasspath(classpath)) flatMap { u => + IO.readLinesURL(u).map( _.trim).filter(!_.isEmpty) + } + } + + /** Returns `true` if `url` is an entry in `classpath`.*/ + def onClasspath(classpath: Seq[File])(url: URL): Boolean = + IO.urlAsFile(url) exists (classpath.contains _) + + private[sbt] def binarySourceModules[T](data: PluginData, loader: ClassLoader, resourceName: String)(implicit mf: reflect.ClassManifest[T]): DetectedModules[T] = + { + val classpath = data.classpath + val namesAndValues = if(classpath.isEmpty) Nil else { + val names = binarySourceModuleNames(classpath, loader, resourceName, mf.erasure.getName) + loadModules[T](data, names, loader) + } + new DetectedModules(namesAndValues) + } + + private[this] def loadModules[T: ClassManifest](data: PluginData, names: Seq[String], loader: ClassLoader): Seq[(String,T)] = + try ModuleUtilities.getCheckedObjects[T](names, loader) + catch { + case e: ExceptionInInitializerError => + val cause = e.getCause + if(cause eq null) throw e else throw cause + case e: LinkageError => incompatiblePlugins(data, e) + } + + private[this] def incompatiblePlugins(data: PluginData, t: LinkageError): Nothing = + { + val evicted = data.report.toList.flatMap(_.configurations.flatMap(_.evicted)) + val evictedModules = evicted map { id => (id.organization, id.name) } distinct ; + val evictedStrings = evictedModules map { case (o,n) => o + ":" + n } + val msgBase = "Binary incompatibility in plugins detected." + val msgExtra = if(evictedStrings.isEmpty) "" else "\nNote that conflicts were resolved for some dependencies:\n\t" + evictedStrings.mkString("\n\t") + throw new IncompatiblePluginsException(msgBase + msgExtra, t) + } +} \ No newline at end of file diff --git a/main/src/main/scala/sbt/Plugins.scala b/main/src/main/scala/sbt/Plugins.scala new file mode 100644 index 000000000..63795bd6d --- /dev/null +++ b/main/src/main/scala/sbt/Plugins.scala @@ -0,0 +1,327 @@ +package sbt +/* +TODO: +- index all available AutoPlugins to get the tasks that will be added +- error message when a task doesn't exist that it would be provided by plugin x, enabled by natures y,z, blocked by a, b +*/ + + import logic.{Atom, Clause, Clauses, Formula, Literal, Logic, Negated} + import Logic.{CyclicNegation, InitialContradictions, InitialOverlap, LogicException} + import Def.Setting + import Plugins._ + import annotation.tailrec + +/** +An AutoPlugin defines a group of settings and the conditions where the settings are automatically added to a build (called "activation"). +The `requires` and `trigger` methods together define the conditions, and a method like `projectSettings` defines the settings to add. + +Steps for plugin authors: +1. Determine if the AutoPlugin should automatically be activated when all requirements are met, or should be opt-in. +2. Determine the [[AutoPlugins]]s that, when present (or absent), act as the requirements for the AutoPlugin. +3. Determine the settings/configurations to that the AutoPlugin injects when activated. +4. Determine the keys and other names to be automatically imported to *.sbt scripts. + +For example, the following will automatically add the settings in `projectSettings` + to a project that has both the `Web` and `Javascript` plugins enabled. + + object Plugin extends sbt.AutoPlugin { + override def requires = Web && Javascript + override def trigger = allRequirements + override def projectSettings = Seq(...) + + object autoImport { + lazy val obfuscate = taskKey[Seq[File]]("Obfuscates the source.") + } + } + +Steps for users: +1. Add dependencies on plugins in `project/plugins.sbt` as usual with `addSbtPlugin` +2. Add key plugins to Projects, which will automatically select the plugin + dependent plugin settings to add for those Projects. +3. Exclude plugins, if desired. + +For example, given plugins Web and Javascript (perhaps provided by plugins added with addSbtPlugin), + + .addPlugins( Web && Javascript ) + +will activate `MyPlugin` defined above and have its settings automatically added. If the user instead defines + + .addPlugins( Web && Javascript ).disablePlugins(MyPlugin) + +then the `MyPlugin` settings (and anything that activates only when `MyPlugin` is activated) will not be added. + +*/ +abstract class AutoPlugin extends Plugins.Basic with PluginsFunctions +{ + /** Determines whether this AutoPlugin will be activated for this project when the `requires` clause is satisfied. + * + * When this method returns `allRequirements`, and `requires` method returns `Web && Javascript`, this plugin + * instance will be added automatically if the `Web` and `Javascript` plugins are enbled. + * + * When this method returns `noTrigger`, and `requires` method returns `Web && Javascript`, this plugin + * instance will be added only if the build user enables it, but it will automatically add both `Web` and `Javascript`. */ + def trigger: PluginTrigger = noTrigger + + /** This AutoPlugin requires the plugins the [[Plugins]] matcher returned by this method. See [[trigger]]. + */ + def requires: Plugins = empty + + val label: String = getClass.getName.stripSuffix("$") + + override def toString: String = label + + /** The [[Configuration]]s to add to each project that activates this AutoPlugin.*/ + def projectConfigurations: Seq[Configuration] = Nil + + /** The [[Setting]]s to add in the scope of each project that activates this AutoPlugin. */ + def projectSettings: Seq[Setting[_]] = Nil + + /** The [[Setting]]s to add to the build scope for each project that activates this AutoPlugin. + * The settings returned here are guaranteed to be added to a given build scope only once + * regardless of how many projects for that build activate this AutoPlugin. */ + def buildSettings: Seq[Setting[_]] = Nil + + /** The [[Setting]]s to add to the global scope exactly once if any project activates this AutoPlugin. */ + def globalSettings: Seq[Setting[_]] = Nil + + // TODO?: def commands: Seq[Command] + + private[sbt] def unary_! : Exclude = Exclude(this) + + + /** If this plugin does not have any requirements, it means it is actually a root plugin. */ + private[sbt] final def isRoot: Boolean = + requires match { + case Empty => true + case _ => false + } + + /** If this plugin does not have any requirements, it means it is actually a root plugin. */ + private[sbt] final def isAlwaysEnabled: Boolean = + isRoot && (trigger == AllRequirements) +} + +/** An error that occurs when auto-plugins aren't configured properly. +* It translates the error from the underlying logic system to be targeted at end users. */ +final class AutoPluginException private(val message: String, val origin: Option[LogicException]) extends RuntimeException(message) +{ + /** Prepends `p` to the error message derived from `origin`. */ + def withPrefix(p: String) = new AutoPluginException(p + message, origin) +} +object AutoPluginException +{ + def apply(msg: String): AutoPluginException = new AutoPluginException(msg, None) + def apply(origin: LogicException): AutoPluginException = new AutoPluginException(Plugins.translateMessage(origin), Some(origin)) +} + +sealed trait PluginTrigger +case object AllRequirements extends PluginTrigger +case object NoTrigger extends PluginTrigger + +/** An expression that matches `AutoPlugin`s. */ +sealed trait Plugins { + def && (o: Basic): Plugins +} + + +sealed trait PluginsFunctions +{ + /** [[Plugins]] instance that doesn't require any [[Plugins]]s. */ + def empty: Plugins = Plugins.Empty + + /** This plugin is activated when all required plugins are present. */ + def allRequirements: PluginTrigger = AllRequirements + /** This plugin is activated only when it is manually activated. */ + def noTrigger: PluginTrigger = NoTrigger +} + +object Plugins extends PluginsFunctions +{ + /** Given the available auto plugins `defined`, returns a function that selects [[AutoPlugin]]s for the provided [[AutoPlugin]]s. + * The [[AutoPlugin]]s are topologically sorted so that a required [[AutoPlugin]] comes before its requiring [[AutoPlugin]].*/ + def deducer(defined0: List[AutoPlugin]): (Plugins, Logger) => Seq[AutoPlugin] = + if(defined0.isEmpty) (_, _) => Nil + else + { + // TODO: defined should return all the plugins + val allReqs = (defined0 flatMap { asRequirements }).toSet + val diff = allReqs diff defined0.toSet + val defined = if (!diff.isEmpty) diff.toList ::: defined0 + else defined0 + + val byAtom = defined map { x => (Atom(x.label), x) } + val byAtomMap = byAtom.toMap + if(byAtom.size != byAtomMap.size) duplicateProvidesError(byAtom) + // Ignore clauses for plugins that does not require anything else. + // Avoids the requirement for pure Nature strings *and* possible + // circular dependencies in the logic. + val allRequirementsClause = defined.filterNot(_.isRoot).flatMap(d => asRequirementsClauses(d)) + val allEnabledByClause = defined.filterNot(_.isRoot).flatMap(d => asEnabledByClauses(d)) + (requestedPlugins, log) => { + val alwaysEnabled: List[AutoPlugin] = defined.filter(_.isAlwaysEnabled) + val knowlege0: Set[Atom] = ((flatten(requestedPlugins) ++ alwaysEnabled) collect { + case x: AutoPlugin => Atom(x.label) + }).toSet + val clauses = Clauses((allRequirementsClause ::: allEnabledByClause) filterNot { _.head subsetOf knowlege0 }) + log.debug(s"deducing auto plugins based on known facts ${knowlege0.toString} and clauses ${clauses.toString}") + Logic.reduce(clauses, (flattenConvert(requestedPlugins) ++ convertAll(alwaysEnabled)).toSet) match { + case Left(problem) => throw AutoPluginException(problem) + case Right(results) => + log.debug(s" :: deduced result: ${results}") + val selectedAtoms: List[Atom] = results.ordered + val selectedPlugins = selectedAtoms map { a => + byAtomMap.getOrElse(a, throw AutoPluginException(s"${a} was not found in atom map.")) + } + val forbidden: Set[AutoPlugin] = (selectedPlugins flatMap { Plugins.asExclusions }).toSet + val c = selectedPlugins.toSet & forbidden + if (!c.isEmpty) { + exlusionConflictError(requestedPlugins, selectedPlugins, c.toSeq sortBy {_.label}) + } + val retval = topologicalSort(selectedPlugins, log) + log.debug(s" :: sorted deduced result: ${retval.toString}") + retval + } + } + } + private[sbt] def topologicalSort(ns: List[AutoPlugin], log: Logger): List[AutoPlugin] = { + log.debug(s"sorting: ns: ${ns.toString}") + @tailrec def doSort(found0: List[AutoPlugin], notFound0: List[AutoPlugin], limit0: Int): List[AutoPlugin] = { + log.debug(s" :: sorting:: found: ${found0.toString} not found ${notFound0.toString}") + if (limit0 < 0) throw AutoPluginException(s"Failed to sort ${ns} topologically") + else if (notFound0.isEmpty) found0 + else { + val (found1, notFound1) = notFound0 partition { n => asRequirements(n).toSet subsetOf found0.toSet } + doSort(found0 ::: found1, notFound1, limit0 - 1) + } + } + val (roots, nonRoots) = ns partition (_.isRoot) + doSort(roots, nonRoots, ns.size * ns.size + 1) + } + private[sbt] def translateMessage(e: LogicException) = e match { + case ic: InitialContradictions => s"Contradiction in selected plugins. These plugins were both included and excluded: ${literalsString(ic.literals.toSeq)}" + case io: InitialOverlap => s"Cannot directly enable plugins. Plugins are enabled when their required plugins are satisifed. The directly selected plugins were: ${literalsString(io.literals.toSeq)}" + case cn: CyclicNegation => s"Cycles in plugin requirements cannot involve excludes. The problematic cycle is: ${literalsString(cn.cycle)}" + } + private[this] def literalsString(lits: Seq[Literal]): String = + lits map { case Atom(l) => l; case Negated(Atom(l)) => l } mkString(", ") + + private[this] def duplicateProvidesError(byAtom: Seq[(Atom, AutoPlugin)]) { + val dupsByAtom = byAtom.groupBy(_._1).mapValues(_.map(_._2)) + val dupStrings = for( (atom, dups) <- dupsByAtom if dups.size > 1 ) yield + s"${atom.label} by ${dups.mkString(", ")}" + val (ns, nl) = if(dupStrings.size > 1) ("s", "\n\t") else ("", " ") + val message = s"Plugin$ns provided by multiple AutoPlugins:$nl${dupStrings.mkString(nl)}" + throw AutoPluginException(message) + } + private[this] def exlusionConflictError(requested: Plugins, selected: Seq[AutoPlugin], conflicting: Seq[AutoPlugin]) { + def listConflicts(ns: Seq[AutoPlugin]) = (ns map { c => + val reasons = (if (flatten(requested) contains c) List("requested") + else Nil) ++ + (if (c.requires != empty && c.trigger == allRequirements) List(s"enabled by ${c.requires.toString}") + else Nil) ++ + { + val reqs = selected filter { x => asRequirements(x) contains c } + if (!reqs.isEmpty) List(s"""required by ${reqs.mkString(", ")}""") + else Nil + } ++ + { + val exs = selected filter { x => asExclusions(x) contains c } + if (!exs.isEmpty) List(s"""excluded by ${exs.mkString(", ")}""") + else Nil + } + s""" - conflict: ${c.label} is ${reasons.mkString("; ")}""" + }).mkString("\n") + throw AutoPluginException(s"""Contradiction in enabled plugins: + - requested: ${requested.toString} + - enabled: ${selected.mkString(", ")} +${listConflicts(conflicting)}""") + } + + private[sbt] final object Empty extends Plugins { + def &&(o: Basic): Plugins = o + override def toString = "" + } + + /** An included or excluded Nature/Plugin. TODO: better name than Basic. Also, can we dump + * this class. + */ + sealed abstract class Basic extends Plugins { + def &&(o: Basic): Plugins = And(this :: o :: Nil) + } + private[sbt] final case class Exclude(n: AutoPlugin) extends Basic { + override def toString = s"!$n" + } + private[sbt] final case class And(plugins: List[Basic]) extends Plugins { + def &&(o: Basic): Plugins = And(o :: plugins) + override def toString = plugins.mkString(" && ") + } + private[sbt] def and(a: Plugins, b: Plugins) = b match { + case Empty => a + case And(ns) => (a /: ns)(_ && _) + case b: Basic => a && b + } + private[sbt] def remove(a: Plugins, del: Set[Basic]): Plugins = a match { + case b: Basic => if(del(b)) Empty else b + case Empty => Empty + case And(ns) => + val removed = ns.filterNot(del) + if(removed.isEmpty) Empty else And(removed) + } + + /** Defines enabled-by clauses for `ap`. */ + private[sbt] def asEnabledByClauses(ap: AutoPlugin): List[Clause] = + // `ap` is the head and the required plugins for `ap` is the body. + if (ap.trigger == AllRequirements) Clause( convert(ap.requires), Set(Atom(ap.label)) ) :: Nil + else Nil + /** Defines requirements clauses for `ap`. */ + private[sbt] def asRequirementsClauses(ap: AutoPlugin): List[Clause] = + // required plugin is the head and `ap` is the body. + asRequirements(ap) map { x => Clause( convert(ap), Set(Atom(x.label)) ) } + private[sbt] def asRequirements(ap: AutoPlugin): List[AutoPlugin] = flatten(ap.requires).toList collect { + case x: AutoPlugin => x + } + private[sbt] def asExclusions(ap: AutoPlugin): List[AutoPlugin] = flatten(ap.requires).toList collect { + case Exclude(x) => x + } + private[this] def flattenConvert(n: Plugins): Seq[Literal] = n match { + case And(ns) => convertAll(ns) + case b: Basic => convertBasic(b) :: Nil + case Empty => Nil + } + private[sbt] def flatten(n: Plugins): Seq[Basic] = n match { + case And(ns) => ns + case b: Basic => b :: Nil + case Empty => Nil + } + + private[this] def convert(n: Plugins): Formula = n match { + case And(ns) => convertAll(ns).reduce[Formula](_ && _) + case b: Basic => convertBasic(b) + case Empty => Formula.True + } + private[this] def convertBasic(b: Basic): Literal = b match { + case Exclude(n) => !convertBasic(n) + case a: AutoPlugin => Atom(a.label) + } + private[this] def convertAll(ns: Seq[Basic]): Seq[Literal] = ns map convertBasic + + /** True if the trigger clause `n` is satisifed by `model`. */ + def satisfied(n: Plugins, model: Set[AutoPlugin]): Boolean = + flatten(n) forall { + case Exclude(a) => !model(a) + case ap: AutoPlugin => model(ap) + } + + private[sbt] def hasAutoImportGetter(ap: AutoPlugin, loader: ClassLoader): Boolean = { + import reflect.runtime.{universe => ru} + import util.control.Exception.catching + val m = ru.runtimeMirror(loader) + val im = m.reflect(ap) + val hasGetterOpt = catching(classOf[ScalaReflectionException]) opt { + im.symbol.asType.toType.declaration(ru.newTermName("autoImport")) match { + case ru.NoSymbol => false + case sym => sym.asTerm.isGetter + } + } + hasGetterOpt getOrElse false + } +} diff --git a/main/src/main/scala/sbt/PluginsDebug.scala b/main/src/main/scala/sbt/PluginsDebug.scala new file mode 100644 index 000000000..bace7b52b --- /dev/null +++ b/main/src/main/scala/sbt/PluginsDebug.scala @@ -0,0 +1,382 @@ +package sbt + + import Def.Setting + import Plugins._ + import PluginsDebug._ + import java.net.URI + +private[sbt] class PluginsDebug(val available: List[AutoPlugin], val nameToKey: Map[String, AttributeKey[_]], val provided: Relation[AutoPlugin, AttributeKey[_]]) +{ + /** The set of [[AutoPlugin]]s that might define a key named `keyName`. + * Because plugins can define keys in different scopes, this should only be used as a guideline. */ + def providers(keyName: String): Set[AutoPlugin] = nameToKey.get(keyName) match { + case None => Set.empty + case Some(key) => provided.reverse(key) + } + /** Describes alternative approaches for defining key [[keyName]] in [[context]].*/ + def toEnable(keyName: String, context: Context): List[PluginEnable] = + providers(keyName).toList.map(plugin => pluginEnable(context, plugin)) + + /** Provides text to suggest how [[notFoundKey]] can be defined in [[context]]. */ + def debug(notFoundKey: String, context: Context): String = + { + val (activated, deactivated) = Util.separate(toEnable(notFoundKey, context)) { + case pa: PluginActivated => Left(pa) + case pd: EnableDeactivated => Right(pd) + } + val activePrefix = if(activated.nonEmpty) s"Some already activated plugins define $notFoundKey: ${activated.mkString(", ")}\n" else "" + activePrefix + debugDeactivated(notFoundKey, deactivated) + } + private[this] def debugDeactivated(notFoundKey: String, deactivated: Seq[EnableDeactivated]): String = + { + val (impossible, possible) = Util.separate(deactivated) { + case pi: PluginImpossible => Left(pi) + case pr: PluginRequirements => Right(pr) + } + if(possible.nonEmpty) { + val explained = possible.map(explainPluginEnable) + val possibleString = + if(explained.size > 1) explained.zipWithIndex.map{case (s,i) => s"$i. $s"}.mkString("Multiple plugins are available that can provide $notFoundKey:\n", "\n", "") + else s"$notFoundKey is provided by an available (but not activated) plugin:\n${explained.mkString}" + def impossiblePlugins = impossible.map(_.plugin.label).mkString(", ") + val imPostfix = if(impossible.isEmpty) "" else s"\n\nThere are other available plugins that provide $notFoundKey, but they are impossible to add: $impossiblePlugins" + possibleString + imPostfix + } + else if(impossible.isEmpty) + s"No available plugin provides key $notFoundKey." + else { + val explanations = impossible.map(explainPluginEnable) + explanations.mkString(s"Plugins are available that could provide $notFoundKey, but they are impossible to add:\n\t", "\n\t", "") + } + } + + /** Text that suggests how to activate [[plugin]] in [[context]] if possible and if it is not already activated.*/ + def help(plugin: AutoPlugin, context: Context): String = + if(context.enabled.contains(plugin)) + activatedHelp(plugin) + else + deactivatedHelp(plugin, context) + private def activatedHelp(plugin: AutoPlugin): String = + { + val prefix = s"${plugin.label} is activated." + val keys = provided.forward(plugin) + val keysString = if(keys.isEmpty) "" else s"\nIt may affect these keys: ${multi(keys.toList.map(_.label))}" + val configs = plugin.projectConfigurations + val confsString = if(configs.isEmpty) "" else s"\nIt defines these configurations: ${multi(configs.map(_.name))}" + prefix + keysString + confsString + } + private def deactivatedHelp(plugin: AutoPlugin, context: Context): String = + { + val prefix = s"${plugin.label} is NOT activated." + val keys = provided.forward(plugin) + val keysString = if(keys.isEmpty) "" else s"\nActivating it may affect these keys: ${multi(keys.toList.map(_.label))}" + val configs = plugin.projectConfigurations + val confsString = if(configs.isEmpty) "" else s"\nActivating it will define these configurations: ${multi(configs.map(_.name))}" + val toActivate = explainPluginEnable(pluginEnable(context, plugin)) + s"$prefix$keysString$confsString\n$toActivate" + } + + private[this] def multi(strs: Seq[String]): String = strs.mkString(if(strs.size > 4) "\n\t" else ", ") +} + +private[sbt] object PluginsDebug +{ + def helpAll(s: State): String = + if(Project.isProjectLoaded(s)) + { + val extracted = Project.extract(s) + import extracted._ + def helpBuild(uri: URI, build: LoadedBuildUnit): String = + { + val pluginStrings = for(plugin <- availableAutoPlugins(build)) yield { + val activatedIn = build.defined.values.toList.filter(_.autoPlugins.contains(plugin)).map(_.id) + val actString = if(activatedIn.nonEmpty) activatedIn.mkString(": enabled in ", ", ", "") else "" // TODO: deal with large builds + s"\n\t${plugin.label}$actString" + } + s"In $uri${pluginStrings.mkString}" + } + val buildStrings = for((uri, build) <- structure.units) yield helpBuild(uri, build) + buildStrings.mkString("\n") + } + else + "No project is currently loaded." + + def autoPluginMap(s: State): Map[String, AutoPlugin] = + { + val extracted = Project.extract(s) + import extracted._ + structure.units.values.toList.flatMap(availableAutoPlugins).map(plugin => (plugin.label, plugin)).toMap + } + private[this] def availableAutoPlugins(build: LoadedBuildUnit): Seq[AutoPlugin] = + build.unit.plugins.detected.autoPlugins map {_.value} + + def help(plugin: AutoPlugin, s: State): String = + { + val extracted = Project.extract(s) + import extracted._ + def definesPlugin(p: ResolvedProject): Boolean = p.autoPlugins.contains(plugin) + def projectForRef(ref: ProjectRef): ResolvedProject = get(Keys.thisProject in ref) + val perBuild: Map[URI, Set[AutoPlugin]] = structure.units.mapValues(unit => availableAutoPlugins(unit).toSet) + val pluginsThisBuild = perBuild.getOrElse(currentRef.build, Set.empty).toList + lazy val context = Context(currentProject.plugins, currentProject.autoPlugins, Plugins.deducer(pluginsThisBuild), pluginsThisBuild, s.log) + lazy val debug = PluginsDebug(context.available) + if(!pluginsThisBuild.contains(plugin)) { + val availableInBuilds: List[URI] = perBuild.toList.filter(_._2(plugin)).map(_._1) + s"Plugin ${plugin.label} is only available in builds:\n\t${availableInBuilds.mkString("\n\t")}\nSwitch to a project in one of those builds using `project` and rerun this command for more information." + } else if(definesPlugin(currentProject)) + debug.activatedHelp(plugin) + else { + val thisAggregated = BuildUtil.dependencies(structure.units).aggregateTransitive.getOrElse(currentRef, Nil) + val definedInAggregated = thisAggregated.filter(ref => definesPlugin(projectForRef(ref))) + if(definedInAggregated.nonEmpty) { + val projectNames = definedInAggregated.map(_.project) // TODO: usually in this build, but could technically require the build to be qualified + s"Plugin ${plugin.label} is not activated on this project, but this project aggregates projects where it is activated:\n\t${projectNames.mkString("\n\t")}" + } else { + val base = debug.deactivatedHelp(plugin, context) + val aggNote = if(thisAggregated.nonEmpty) "Note: This project aggregates other projects and this" else "Note: This" + val common = " information is for this project only." + val helpOther = "To see how to activate this plugin for another project, change to the project using `project ` and rerun this command." + s"$base\n$aggNote$common\n$helpOther" + } + } + } + + /** Precomputes information for debugging plugins. */ + def apply(available: List[AutoPlugin]): PluginsDebug = + { + val keyR = definedKeys(available) + val nameToKey: Map[String, AttributeKey[_]] = keyR._2s.toList.map(key => (key.label, key)).toMap + new PluginsDebug(available, nameToKey, keyR) + } + + /** The context for debugging a plugin (de)activation. + * @param initial The initially defined [[AutoPlugin]]s. + * @param enabled The resulting model. + * @param deducePlugin The function used to compute the model. + * @param available All [[AutoPlugin]]s available for consideration. */ + final case class Context(initial: Plugins, enabled: Seq[AutoPlugin], deducePlugin: (Plugins, Logger) => Seq[AutoPlugin], available: List[AutoPlugin], log: Logger) + + /** Describes the steps to activate a plugin in some context. */ + sealed abstract class PluginEnable + /** Describes a [[plugin]] that is already activated in the [[context]].*/ + final case class PluginActivated(plugin: AutoPlugin, context: Context) extends PluginEnable + sealed abstract class EnableDeactivated extends PluginEnable + /** Describes a [[plugin]] that cannot be activated in a [[context]] due to [[contradictions]] in requirements. */ + final case class PluginImpossible(plugin: AutoPlugin, context: Context, contradictions: Set[AutoPlugin]) extends EnableDeactivated + + /** Describes the requirements for activating [[plugin]] in [[context]]. + * @param context The base plugins, exclusions, and ultimately activated plugins + * @param blockingExcludes Existing exclusions that prevent [[plugin]] from being activated and must be dropped + * @param enablingPlugins [[AutoPlugin]]s that are not currently enabled, but need to be enabled for [[plugin]] to activate + * @param extraEnabledPlugins Plugins that will be enabled as a result of [[plugin]] activating, but are not required for [[plugin]] to activate + * @param willRemove Plugins that will be deactivated as a result of [[plugin]] activating + * @param deactivate Describes plugins that must be deactivated for [[plugin]] to activate. These require an explicit exclusion or dropping a transitive [[AutoPlugin]].*/ + final case class PluginRequirements(plugin: AutoPlugin, context: Context, blockingExcludes: Set[AutoPlugin], enablingPlugins: Set[AutoPlugin], extraEnabledPlugins: Set[AutoPlugin], willRemove: Set[AutoPlugin], deactivate: List[DeactivatePlugin]) extends EnableDeactivated + + /** Describes a [[plugin]] that must be removed in order to activate another plugin in some context. + * The [[plugin]] can always be directly, explicitly excluded. + * @param removeOneOf If non-empty, removing one of these [[AutoPlugin]]s will deactivate [[plugin]] without affecting the other plugin. If empty, a direct exclusion is required. + * @param newlySelected If false, this plugin was selected in the original context. */ + final case class DeactivatePlugin(plugin: AutoPlugin, removeOneOf: Set[AutoPlugin], newlySelected: Boolean) + + /** Determines how to enable [[plugin]] in [[context]]. */ + def pluginEnable(context: Context, plugin: AutoPlugin): PluginEnable = + if(context.enabled.contains(plugin)) + PluginActivated(plugin, context) + else + enableDeactivated(context, plugin) + + private[this] def enableDeactivated(context: Context, plugin: AutoPlugin): PluginEnable = + { + // deconstruct the context + val initialModel = context.enabled.toSet + val initial = flatten(context.initial) + val initialPlugins = plugins(initial) + val initialExcludes = excludes(initial) + + val minModel = minimalModel(plugin) + + /* example 1 + A :- B, not C + C :- D, E + initial: B, D, E + propose: drop D or E + + initial: B, not A + propose: drop 'not A' + + example 2 + A :- B, not C + C :- B + initial: + propose: B, exclude C + */ + + // `plugin` will only be activated when all of these plugins are activated + // Deactivating any one of these would deactivate `plugin`. + val minRequiredPlugins = plugins(minModel) + + // The presence of any one of these plugins would deactivate `plugin` + val minAbsentPlugins = excludes(minModel).toSet + + // Plugins that must be both activated and deactivated for `plugin` to activate. + // A non-empty list here cannot be satisfied and is an error. + val contradictions = minAbsentPlugins & minRequiredPlugins + + if(contradictions.nonEmpty) + PluginImpossible(plugin, context, contradictions) + else + { + // Plguins that the user has to add to the currently selected plugins in order to enable `plugin`. + val addToExistingPlugins = minRequiredPlugins -- initialPlugins + + // Plugins that are currently excluded that need to be allowed. + val blockingExcludes = initialExcludes & minRequiredPlugins + + // The model that results when the minimal plugins are enabled and the minimal plugins are excluded. + // This can include more plugins than just `minRequiredPlugins` because the plguins required for `plugin` + // might activate other plugins as well. + val modelForMin = context.deducePlugin(and(includeAll(minRequiredPlugins), excludeAll(minAbsentPlugins)), context.log) + + val incrementalInputs = and( includeAll(minRequiredPlugins ++ initialPlugins), excludeAll(minAbsentPlugins ++ initialExcludes -- minRequiredPlugins)) + val incrementalModel = context.deducePlugin(incrementalInputs, context.log).toSet + + // Plugins that are newly enabled as a result of selecting the plugins needed for `plugin`, but aren't strictly required for `plugin`. + // These could be excluded and `plugin` and the user's current plugins would still be activated. + val extraPlugins = incrementalModel.toSet -- minRequiredPlugins -- initialModel + + // Plugins that will no longer be enabled as a result of enabling `plugin`. + val willRemove = initialModel -- incrementalModel + + // Determine the plugins that must be independently deactivated. + // If both A and B must be deactivated, but A transitively depends on B, deactivating B will deactivate A. + // If A must be deactivated, but one if its (transitively) required plugins isn't present, it won't be activated. + // So, in either of these cases, A doesn't need to be considered further and won't be included in this set. + val minDeactivate = minAbsentPlugins.filter(p => Plugins.satisfied(p.requires, incrementalModel)) + + val deactivate = for(d <- minDeactivate.toList) yield { + // removing any one of these plugins will deactivate `d`. TODO: This is not an especially efficient implementation. + val removeToDeactivate = plugins(minimalModel(d)) -- minRequiredPlugins + val newlySelected = !initialModel(d) + // a. suggest removing a plugin in removeOneToDeactivate to deactivate d + // b. suggest excluding `d` to directly deactivate it in any case + // c. note whether d was already activated (in context.enabled) or is newly selected + DeactivatePlugin(d, removeToDeactivate, newlySelected) + } + + PluginRequirements(plugin, context, blockingExcludes, addToExistingPlugins, extraPlugins, willRemove, deactivate) + } + } + + private[this] def includeAll[T <: Basic](basic: Set[T]): Plugins = And(basic.toList) + private[this] def excludeAll(plugins: Set[AutoPlugin]): Plugins = And(plugins map (p => Exclude(p)) toList) + + private[this] def excludes(bs: Seq[Basic]): Set[AutoPlugin] = bs.collect { case Exclude(b) => b }.toSet + private[this] def plugins(bs: Seq[Basic]): Set[AutoPlugin] = bs.collect { case n: AutoPlugin => n }.toSet + + // If there is a model that includes `plugin`, it includes at least what is returned by this method. + // This is the list of plugins that must be included as well as list of plugins that must not be present. + // It might not be valid, such as if there are contradictions or if there are cycles that are unsatisfiable. + // The actual model might be larger, since other plugins might be enabled by the selected plugins. + private[this] def minimalModel(plugin: AutoPlugin): Seq[Basic] = Dag.topologicalSortUnchecked(plugin: Basic) { + case _: Exclude => Nil + case ap: AutoPlugin => Plugins.flatten(ap.requires) :+ plugin + } + + /** String representation of [[PluginEnable]], intended for end users. */ + def explainPluginEnable(ps: PluginEnable): String = + ps match { + case PluginRequirements(plugin, context, blockingExcludes, enablingPlugins, extraEnabledPlugins, toBeRemoved, deactivate) => + def indent(str: String) = if(str.isEmpty) "" else s"\t$str" + def note(str: String) = if(str.isEmpty) "" else s"Note: $str" + val parts = + indent(excludedError(false /* TODO */, blockingExcludes.toList)) :: + indent(required(enablingPlugins.toList)) :: + indent(needToDeactivate(deactivate)) :: + note(willAdd(plugin, extraEnabledPlugins.toList)) :: + note(willRemove(plugin, toBeRemoved.toList)) :: + Nil + parts.filterNot(_.isEmpty).mkString("\n") + case PluginImpossible(plugin, context, contradictions) => pluginImpossible(plugin, contradictions) + case PluginActivated(plugin, context) => s"Plugin ${plugin.label} already activated." + } + + /** Provides a [[Relation]] between plugins and the keys they potentially define. + * Because plugins can define keys in different scopes and keys can be overridden, this is not definitive.*/ + def definedKeys(available: List[AutoPlugin]): Relation[AutoPlugin, AttributeKey[_]] = + { + def extractDefinedKeys(ss: Seq[Setting[_]]): Seq[AttributeKey[_]] = + ss.map(_.key.key) + def allSettings(p: AutoPlugin): Seq[Setting[_]] = p.projectSettings ++ p.buildSettings ++ p.globalSettings + val empty = Relation.empty[AutoPlugin, AttributeKey[_]] + (empty /: available)( (r,p) => r + (p, extractDefinedKeys(allSettings(p))) ) + } + + private[this] def excludedError(transitive: Boolean, dependencies: List[AutoPlugin]): String = + str(dependencies)(excludedPluginError(transitive), excludedPluginsError(transitive)) + + private[this] def excludedPluginError(transitive: Boolean)(dependency: AutoPlugin) = + s"Required ${transitiveString(transitive)}dependency ${dependency.label} was excluded." + private[this] def excludedPluginsError(transitive: Boolean)(dependencies: List[AutoPlugin]) = + s"Required ${transitiveString(transitive)}dependencies were excluded:\n\t${labels(dependencies).mkString("\n\t")}" + private[this] def transitiveString(transitive: Boolean) = + if(transitive) "(transitive) " else "" + + private[this] def required(plugins: List[AutoPlugin]): String = + str(plugins)(requiredPlugin, requiredPlugins) + + private[this] def requiredPlugin(plugin: AutoPlugin) = + s"Required plugin ${plugin.label} not present." + private[this] def requiredPlugins(plugins: List[AutoPlugin]) = + s"Required plugins not present:\n\t${plugins.map(_.label).mkString("\n\t")}" + + private[this] def str[A](list: List[A])(f: A => String, fs: List[A] => String): String = list match { + case Nil => "" + case single :: Nil => f(single) + case _ => fs(list) + } + + private[this] def willAdd(base: AutoPlugin, plugins: List[AutoPlugin]): String = + str(plugins)(willAddPlugin(base), willAddPlugins(base)) + + private[this] def willAddPlugin(base: AutoPlugin)(plugin: AutoPlugin) = + s"Enabling ${base.label} will also enable ${plugin.label}" + private[this] def willAddPlugins(base: AutoPlugin)(plugins: List[AutoPlugin]) = + s"Enabling ${base.label} will also enable:\n\t${labels(plugins).mkString("\n\t")}" + + private[this] def willRemove(base: AutoPlugin, plugins: List[AutoPlugin]): String = + str(plugins)(willRemovePlugin(base), willRemovePlugins(base)) + + private[this] def willRemovePlugin(base: AutoPlugin)(plugin: AutoPlugin) = + s"Enabling ${base.label} will disable ${plugin.label}" + private[this] def willRemovePlugins(base: AutoPlugin)(plugins: List[AutoPlugin]) = + s"Enabling ${base.label} will disable:\n\t${labels(plugins).mkString("\n\t")}" + + private[this] def labels(plugins: List[AutoPlugin]): List[String] = + plugins.map(_.label) + + private[this] def needToDeactivate(deactivate: List[DeactivatePlugin]): String = + str(deactivate)(deactivate1, deactivateN) + private[this] def deactivateN(plugins: List[DeactivatePlugin]): String = + plugins.map(deactivateString).mkString("These plugins need to be deactivated:\n\t", "\n\t", "") + private[this] def deactivate1(deactivate: DeactivatePlugin): String = + s"Need to deactivate ${deactivateString(deactivate)}" + private[this] def deactivateString(d: DeactivatePlugin): String = + { + val removePluginsString: String = + d.removeOneOf.toList match { + case Nil => "" + case x :: Nil => s" or no longer include $x" + case xs => s" or remove one of ${xs.mkString(", ")}" + } + s"${d.plugin.label}: directly exclude it${removePluginsString}" + } + + private[this] def pluginImpossible(plugin: AutoPlugin, contradictions: Set[AutoPlugin]): String = + str(contradictions.toList)(pluginImpossible1(plugin), pluginImpossibleN(plugin)) + + private[this] def pluginImpossible1(plugin: AutoPlugin)(contradiction: AutoPlugin): String = + s"There is no way to enable plugin ${plugin.label}. It (or its dependencies) requires plugin ${contradiction.label} to both be present and absent. Please report the problem to the plugin's author." + private[this] def pluginImpossibleN(plugin: AutoPlugin)(contradictions: List[AutoPlugin]): String = + s"There is no way to enable plugin ${plugin.label}. It (or its dependencies) requires these plugins to be both present and absent:\n\t${labels(contradictions).mkString("\n\t")}\nPlease report the problem to the plugin's author." +} \ No newline at end of file diff --git a/main/src/main/scala/sbt/Project.scala b/main/src/main/scala/sbt/Project.scala index db0705299..faea25e13 100755 --- a/main/src/main/scala/sbt/Project.scala +++ b/main/src/main/scala/sbt/Project.scala @@ -50,33 +50,52 @@ sealed trait ProjectDefinition[PR <: ProjectReference] /** Configures the sources of automatically appended settings.*/ def auto: AddSettings + /** The defined [[Plugins]] associated with this project. + A [[AutoPlugin]] is a common label that is used by plugins to determine what settings, if any, to add to a project. */ + def plugins: Plugins + + /** The [[AutoPlugin]]s enabled for this project. This value is only available on a loaded Project. */ + private[sbt] def autoPlugins: Seq[AutoPlugin] + override final def hashCode: Int = id.hashCode ^ base.hashCode ^ getClass.hashCode override final def equals(o: Any) = o match { case p: ProjectDefinition[_] => p.getClass == this.getClass && p.id == id && p.base == base case _ => false } - override def toString = "Project(id: " + id + ", base: " + base + ", aggregate: " + aggregate + ", dependencies: " + dependencies + ", configurations: " + configurations + ")" + override def toString = + { + val agg = ifNonEmpty("aggregate", aggregate) + val dep = ifNonEmpty("dependencies", dependencies) + val conf = ifNonEmpty("configurations", configurations) + val autos = ifNonEmpty("autoPlugins", autoPlugins.map(_.label)) + val fields = s"id $id" :: s"base: $base" :: agg ::: dep ::: conf ::: (s"plugins: List($plugins)" :: autos) + s"Project(${fields.mkString(", ")})" + } + private[this] def ifNonEmpty[T](label: String, ts: Iterable[T]): List[String] = if(ts.isEmpty) Nil else s"$label: $ts" :: Nil } sealed trait Project extends ProjectDefinition[ProjectReference] { + // TODO: add parameters for plugins in 0.14.0 (not reasonable to do in a binary compatible way in 0.13) def copy(id: String = id, base: File = base, aggregate: => Seq[ProjectReference] = aggregate, dependencies: => Seq[ClasspathDep[ProjectReference]] = dependencies, delegates: => Seq[ProjectReference] = delegates, settings: => Seq[Setting[_]] = settings, configurations: Seq[Configuration] = configurations, auto: AddSettings = auto): Project = - Project(id, base, aggregate = aggregate, dependencies = dependencies, delegates = delegates, settings, configurations, auto) + unresolved(id, base, aggregate = aggregate, dependencies = dependencies, delegates = delegates, settings, configurations, auto, plugins, autoPlugins) def resolve(resolveRef: ProjectReference => ProjectRef): ResolvedProject = { def resolveRefs(prs: Seq[ProjectReference]) = prs map resolveRef def resolveDeps(ds: Seq[ClasspathDep[ProjectReference]]) = ds map resolveDep def resolveDep(d: ClasspathDep[ProjectReference]) = ResolvedClasspathDependency(resolveRef(d.project), d.configuration) - resolved(id, base, aggregate = resolveRefs(aggregate), dependencies = resolveDeps(dependencies), delegates = resolveRefs(delegates), settings, configurations, auto) + resolved(id, base, aggregate = resolveRefs(aggregate), dependencies = resolveDeps(dependencies), delegates = resolveRefs(delegates), + settings, configurations, auto, plugins, autoPlugins) } def resolveBuild(resolveRef: ProjectReference => ProjectReference): Project = { def resolveRefs(prs: Seq[ProjectReference]) = prs map resolveRef def resolveDeps(ds: Seq[ClasspathDep[ProjectReference]]) = ds map resolveDep def resolveDep(d: ClasspathDep[ProjectReference]) = ClasspathDependency(resolveRef(d.project), d.configuration) - apply(id, base, aggregate = resolveRefs(aggregate), dependencies = resolveDeps(dependencies), delegates = resolveRefs(delegates), settings, configurations, auto) + unresolved(id, base, aggregate = resolveRefs(aggregate), dependencies = resolveDeps(dependencies), delegates = resolveRefs(delegates), + settings, configurations, auto, plugins, autoPlugins) } /** Applies the given functions to this Project. @@ -106,8 +125,11 @@ sealed trait Project extends ProjectDefinition[ProjectReference] /** Appends settings to the current settings sequence for this project. */ def settings(ss: Setting[_]*): Project = copy(settings = (settings: Seq[Setting[_]]) ++ ss) + @deprecated("Use settingSets method.", "0.13.5") + def autoSettings(select: AddSettings*): Project = settingSets(select.toSeq: _*) + /** Configures how settings from other sources, such as .sbt files, are appended to the explicitly specified settings for this project. */ - def autoSettings(select: AddSettings*): Project = copy(auto = AddSettings.seq(select : _*)) + def settingSets(select: AddSettings*): Project = copy(auto = AddSettings.seq(select : _*)) /** Adds a list of .sbt files whose settings will be appended to the settings of this project. * They will be appended after the explicit settings and already defined automatic settings sources. */ @@ -116,8 +138,30 @@ sealed trait Project extends ProjectDefinition[ProjectReference] /** Sets the list of .sbt files to parse for settings to be appended to this project's settings. * Any configured .sbt files are removed from this project's list.*/ def setSbtFiles(files: File*): Project = copy(auto = AddSettings.append( AddSettings.clearSbtFiles(auto), AddSettings.sbtFiles(files: _*)) ) + + /** Sets the [[AutoPlugin]]s of this project. + A [[AutoPlugin]] is a common label that is used by plugins to determine what settings, if any, to add to a project. */ + def addPlugins(ns: Plugins*): Project = setPlugins(ns.foldLeft(plugins)(Plugins.and)) + + /** Disable the given plugins on this project. */ + def disablePlugins(ps: AutoPlugin*): Project = + setPlugins(Plugins.and(plugins, Plugins.And(ps.map(p => Plugins.Exclude(p)).toList))) + + private[this] def setPlugins(ns: Plugins): Project = { + // TODO: for 0.14.0, use copy when it has the additional `plugins` parameter + unresolved(id, base, aggregate = aggregate, dependencies = dependencies, delegates = delegates, settings, configurations, auto, ns, autoPlugins) + } + + /** Definitively set the [[AutoPlugin]]s for this project. */ + private[sbt] def setAutoPlugins(autos: Seq[AutoPlugin]): Project = { + // TODO: for 0.14.0, use copy when it has the additional `autoPlugins` parameter + unresolved(id, base, aggregate = aggregate, dependencies = dependencies, delegates = delegates, settings, configurations, auto, plugins, autos) + } +} +sealed trait ResolvedProject extends ProjectDefinition[ProjectRef] { + /** The [[AutoPlugin]]s enabled for this project as computed from [[plugins]].*/ + def autoPlugins: Seq[AutoPlugin] } -sealed trait ResolvedProject extends ProjectDefinition[ProjectRef] sealed trait ClasspathDep[PR <: ProjectReference] { def project: PR; def configuration: Option[String] } final case class ResolvedClasspathDependency(project: ProjectRef, configuration: Option[String]) extends ClasspathDep[ProjectRef] @@ -150,23 +194,23 @@ object Project extends ProjectExtra Def.showRelativeKey( ProjectRef(loaded.root, loaded.units(loaded.root).rootProjects.head), loaded.allProjectRefs.size > 1, keyNameColor) private abstract class ProjectDef[PR <: ProjectReference](val id: String, val base: File, aggregate0: => Seq[PR], dependencies0: => Seq[ClasspathDep[PR]], - delegates0: => Seq[PR], settings0: => Seq[Def.Setting[_]], val configurations: Seq[Configuration], val auto: AddSettings) extends ProjectDefinition[PR] + delegates0: => Seq[PR], settings0: => Seq[Def.Setting[_]], val configurations: Seq[Configuration], val auto: AddSettings, + val plugins: Plugins, val autoPlugins: Seq[AutoPlugin]) extends ProjectDefinition[PR] { lazy val aggregate = aggregate0 lazy val dependencies = dependencies0 lazy val delegates = delegates0 lazy val settings = settings0 - + Dag.topologicalSort(configurations)(_.extendsConfigs) // checks for cyclic references here instead of having to do it in Scope.delegates } + // TODO: add parameter for plugins in 0.14.0 + // TODO: Modify default settings to be the core settings, and automatically add the IvyModule + JvmPlugins. def apply(id: String, base: File, aggregate: => Seq[ProjectReference] = Nil, dependencies: => Seq[ClasspathDep[ProjectReference]] = Nil, - delegates: => Seq[ProjectReference] = Nil, settings: => Seq[Def.Setting[_]] = defaultSettings, configurations: Seq[Configuration] = Configurations.default, + delegates: => Seq[ProjectReference] = Nil, settings: => Seq[Def.Setting[_]] = Nil, configurations: Seq[Configuration] = Nil, auto: AddSettings = AddSettings.allDefaults): Project = - { - validProjectID(id).foreach(errMsg => sys.error("Invalid project ID: " + errMsg)) - new ProjectDef[ProjectReference](id, base, aggregate, dependencies, delegates, settings, configurations, auto) with Project - } + unresolved(id, base, aggregate, dependencies, delegates, settings, configurations, auto, Plugins.empty, Nil) // Note: JvmModule/IvyModule auto included... /** Returns None if `id` is a valid Project ID or Some containing the parser error message if it is not.*/ def validProjectID(id: String): Option[String] = DefaultParsers.parse(id, DefaultParsers.ID).left.toOption @@ -185,10 +229,25 @@ object Project extends ProjectExtra * This is a best effort implementation, since valid characters are not documented or consistent.*/ def normalizeModuleID(id: String): String = normalizeBase(id) + @deprecated("Will be removed.", "0.13.2") def resolved(id: String, base: File, aggregate: => Seq[ProjectRef], dependencies: => Seq[ResolvedClasspathDependency], delegates: => Seq[ProjectRef], settings: Seq[Def.Setting[_]], configurations: Seq[Configuration], auto: AddSettings): ResolvedProject = - new ProjectDef[ProjectRef](id, base, aggregate, dependencies, delegates, settings, configurations, auto) with ResolvedProject + resolved(id, base, aggregate, dependencies, delegates, settings, configurations, auto, Plugins.empty, Nil) + private def resolved(id: String, base: File, aggregate: => Seq[ProjectRef], dependencies: => Seq[ClasspathDep[ProjectRef]], + delegates: => Seq[ProjectRef], settings: Seq[Def.Setting[_]], configurations: Seq[Configuration], auto: AddSettings, + plugins: Plugins, autoPlugins: Seq[AutoPlugin]): ResolvedProject = + new ProjectDef[ProjectRef](id, base, aggregate, dependencies, delegates, settings, configurations, auto, plugins, autoPlugins) with ResolvedProject + + private def unresolved(id: String, base: File, aggregate: => Seq[ProjectReference], dependencies: => Seq[ClasspathDep[ProjectReference]], + delegates: => Seq[ProjectReference], settings: => Seq[Def.Setting[_]], configurations: Seq[Configuration], auto: AddSettings, + plugins: Plugins, autoPlugins: Seq[AutoPlugin]): Project = + { + validProjectID(id).foreach(errMsg => sys.error("Invalid project ID: " + errMsg)) + new ProjectDef[ProjectReference](id, base, aggregate, dependencies, delegates, settings, configurations, auto, plugins, autoPlugins) with Project + } + + @deprecated("0.13.2", "Use Defaults.coreDefaultSettings instead, combined with AutoPlugins.") def defaultSettings: Seq[Def.Setting[_]] = Defaults.defaultSettings final class Constructor(p: ProjectReference) { @@ -307,7 +366,7 @@ object Project extends ProjectExtra def details(structure: BuildStructure, actual: Boolean, scope: Scope, key: AttributeKey[_])(implicit display: Show[ScopedKey[_]]): String = { val scoped = ScopedKey(scope,key) - + val data = scopedKeyData(structure, scope, key) map {_.description} getOrElse {"No entry for key."} val description = key.description match { case Some(desc) => "Description:\n\t" + desc + "\n"; case None => "" } @@ -413,7 +472,7 @@ object Project extends ProjectExtra import DefaultParsers._ val loadActionParser = token(Space ~> ("plugins" ^^^ Plugins | "return" ^^^ Return)) ?? Current - + val ProjectReturn = AttributeKey[List[File]]("project-return", "Maintains a stack of builds visited using reload.") def projectReturn(s: State): List[File] = getOrNil(s, ProjectReturn) def inPluginProject(s: State): Boolean = projectReturn(s).toList.length > 1 diff --git a/main/src/main/scala/sbt/SessionSettings.scala b/main/src/main/scala/sbt/SessionSettings.scala index caff96b56..81342477c 100755 --- a/main/src/main/scala/sbt/SessionSettings.scala +++ b/main/src/main/scala/sbt/SessionSettings.scala @@ -110,7 +110,7 @@ object SessionSettings val RangePosition(_, r@LineRange(start, end)) = s.pos settings find (_._1.key == s.key) match { case Some(ss@(ns, newLines)) if !ns.init.dependencies.contains(ns.key) => - val shifted = ns withPos RangePosition(path, LineRange(start - offs, start - offs + 1)) + val shifted = ns withPos RangePosition(path, LineRange(start - offs, start - offs + newLines.size)) (offs + end - start - newLines.size, shifted::olds, ss::repl, lineMap + (start -> (end, newLines))) case _ => val shifted = s withPos RangePosition(path, r shift -offs) diff --git a/main/src/main/scala/sbt/plugins/CorePlugin.scala b/main/src/main/scala/sbt/plugins/CorePlugin.scala new file mode 100644 index 000000000..9702046c2 --- /dev/null +++ b/main/src/main/scala/sbt/plugins/CorePlugin.scala @@ -0,0 +1,19 @@ +package sbt +package plugins + +import Def.Setting + +/** + * Plugin for core sbt-isms. + * + * Can control task-level paralleism, logging, etc. + */ +object CorePlugin extends AutoPlugin { + // This is included by default + override def trigger = allRequirements + + override lazy val projectSettings: Seq[Setting[_]] = + Defaults.coreDefaultSettings + override lazy val globalSettings: Seq[Setting[_]] = + Defaults.globalSbtCore +} \ No newline at end of file diff --git a/main/src/main/scala/sbt/plugins/IvyPlugin.scala b/main/src/main/scala/sbt/plugins/IvyPlugin.scala new file mode 100644 index 000000000..43239bdbf --- /dev/null +++ b/main/src/main/scala/sbt/plugins/IvyPlugin.scala @@ -0,0 +1,26 @@ +package sbt +package plugins + +import Def.Setting + +/** + * Plugin that enables resolving artifacts via ivy. + * + * Core Tasks + * - `update` + * - `makePom` + * - `publish` + * - `artifacts` + * - `publishedArtifacts` + */ +object IvyPlugin extends AutoPlugin { + // We are automatically included on everything that has the global module, + // which is automatically included on everything. + override def requires = CorePlugin + override def trigger = allRequirements + + override lazy val projectSettings: Seq[Setting[_]] = + Classpaths.ivyPublishSettings ++ Classpaths.ivyBaseSettings + override lazy val globalSettings: Seq[Setting[_]] = + Defaults.globalIvyCore +} diff --git a/main/src/main/scala/sbt/plugins/JvmPlugin.scala b/main/src/main/scala/sbt/plugins/JvmPlugin.scala new file mode 100644 index 000000000..e3c20056a --- /dev/null +++ b/main/src/main/scala/sbt/plugins/JvmPlugin.scala @@ -0,0 +1,37 @@ +package sbt +package plugins + +import Def.Setting + +/** A plugin representing the ability to build a JVM project. + * + * Core tasks/keys: + * - `run` + * - `test` + * - `compile` + * - `fullClasspath` + * Core configurations + * - `Test` + * - `Compile` + */ +object JvmPlugin extends AutoPlugin { + // We are automatically enabled for any IvyModule project. We also require its settings + // for ours to work. + override def requires = IvyPlugin + override def trigger = allRequirements + + override lazy val projectSettings: Seq[Setting[_]] = + Defaults.runnerSettings ++ + Defaults.paths ++ + Classpaths.jvmPublishSettings ++ + Classpaths.jvmBaseSettings ++ + Defaults.projectTasks ++ + Defaults.packageBase ++ + Defaults.compileBase ++ + Defaults.defaultConfigs + override lazy val globalSettings: Seq[Setting[_]] = + Defaults.globalJvmCore + + override def projectConfigurations: Seq[Configuration] = + Configurations.default +} \ No newline at end of file diff --git a/main/src/test/scala/PluginsTest.scala b/main/src/test/scala/PluginsTest.scala new file mode 100644 index 000000000..eed8a1143 --- /dev/null +++ b/main/src/test/scala/PluginsTest.scala @@ -0,0 +1,90 @@ +package sbt + +import java.io.File +import org.specs2._ +import mutable.Specification + +object PluginsTest extends Specification +{ + import AI._ + + "Auto plugin" should { + "enable plugins with trigger=allRequirements AND requirements met" in { + deducePlugin(A && B, log) must contain(Q) + } + "enable transive plugins with trigger=allRequirements AND requirements met" in { + deducePlugin(A && B, log) must contain(R) + } + "order enable plugins after required plugins" in { + val ns = deducePlugin(A && B, log) + ( (ns indexOf Q) must beGreaterThan(ns indexOf A) ) and + ( (ns indexOf Q) must beGreaterThan(ns indexOf B) ) and + ( (ns indexOf R) must beGreaterThan(ns indexOf A) ) and + ( (ns indexOf R) must beGreaterThan(ns indexOf B) ) and + ( (ns indexOf R) must beGreaterThan(ns indexOf Q) ) + } + "not enable plugins with trigger=allRequirements but conflicting requirements" in { + deducePlugin(A && B, log) must not contain(S) + } + "enable plugins that are required by the requested plugins" in { + val ns = deducePlugin(Q, log) + (ns must contain(A)) and + (ns must contain(B)) + } + "throw an AutoPluginException on conflicting requirements" in { + deducePlugin(S, log) must throwAn[AutoPluginException](message = """Contradiction in enabled plugins: + - requested: sbt.AI\$S + - enabled: sbt.AI\$S, sbt.AI\$Q, sbt.AI\$R, sbt.AI\$B, sbt.AI\$A + - conflict: sbt.AI\$R is enabled by sbt.AI\$Q; excluded by sbt.AI\$S""") + } + "generates a detailed report on conflicting requirements" in { + deducePlugin(T && U, log) must throwAn[AutoPluginException](message = """Contradiction in enabled plugins: + - requested: sbt.AI\$T && sbt.AI\$U + - enabled: sbt.AI\$U, sbt.AI\$T, sbt.AI\$A, sbt.AI\$Q, sbt.AI\$R, sbt.AI\$B + - conflict: sbt.AI\$Q is enabled by sbt.AI\$A && sbt.AI\$B; required by sbt.AI\$T, sbt.AI\$R; excluded by sbt.AI\$U + - conflict: sbt.AI\$R is enabled by sbt.AI\$Q; excluded by sbt.AI\$T""") + } + } +} + +object AI +{ + lazy val allPlugins: List[AutoPlugin] = List(A, B, Q, R, S, T, U) + lazy val deducePlugin = Plugins.deducer(allPlugins) + lazy val log = Logger.Null + + object A extends AutoPlugin + object B extends AutoPlugin + + object Q extends AutoPlugin + { + override def requires: Plugins = A && B + override def trigger = allRequirements + } + + object R extends AutoPlugin + { + override def requires = Q + override def trigger = allRequirements + } + + object S extends AutoPlugin + { + override def requires = Q && !R + override def trigger = allRequirements + } + + // This is an opt-in plugin with a requirement + // Unless explicitly loaded by the build user, this will not be activated. + object T extends AutoPlugin + { + override def requires = Q && !R + } + + // This is an opt-in plugin with a requirement + // Unless explicitly loaded by the build user, this will not be activated. + object U extends AutoPlugin + { + override def requires = A && !Q + } +} diff --git a/project/Sbt.scala b/project/Sbt.scala index 83a85259e..537d4881d 100644 --- a/project/Sbt.scala +++ b/project/Sbt.scala @@ -14,9 +14,9 @@ object Sbt extends Build override lazy val settings = super.settings ++ buildSettings ++ Status.settings ++ nightlySettings def buildSettings = Seq( organization := "org.scala-sbt", - version := "0.13.2-SNAPSHOT", + version := "0.13.5-SNAPSHOT", publishArtifact in packageDoc := false, - scalaVersion := "2.10.3", + scalaVersion := "2.10.4", publishMavenStyle := false, componentID := None, crossPaths := false, @@ -73,6 +73,8 @@ object Sbt extends Build lazy val datatypeSub = baseProject(utilPath /"datatype", "Datatype Generator") dependsOn(ioSub) // cross versioning lazy val crossSub = baseProject(utilPath / "cross", "Cross") settings(inConfig(Compile)(Transform.crossGenSettings): _*) + // A logic with restricted negation as failure for a unique, stable model + lazy val logicSub = testedBaseProject(utilPath / "logic", "Logic").dependsOn(collectionSub, relationSub) /* **** Intermediate-level Modules **** */ @@ -130,7 +132,7 @@ object Sbt extends Build completeSub, classpathSub, stdTaskSub, processSub) settings( sbinary ) // The main integration project for sbt. It brings all of the subsystems together, configures them, and provides for overriding conventions. - lazy val mainSub = testedBaseProject(mainPath, "Main") dependsOn(actionsSub, mainSettingsSub, interfaceSub, ioSub, ivySub, launchInterfaceSub, logSub, processSub, runSub, commandSub) settings(scalaXml) + lazy val mainSub = testedBaseProject(mainPath, "Main") dependsOn(actionsSub, mainSettingsSub, interfaceSub, ioSub, ivySub, launchInterfaceSub, logSub, logicSub, processSub, runSub, commandSub) settings(scalaXml) // Strictly for bringing implicits and aliases from subsystems into the top-level sbt namespace through a single package object // technically, we need a dependency on all of mainSub's dependencies, but we don't do that since this is strictly an integration project @@ -276,7 +278,7 @@ object Sbt extends Build artifact in (Compile, packageSrc) := Artifact(srcID).copy(configurations = Compile :: Nil).extra("e:component" -> srcID) ) def compilerSettings = Seq( - libraryDependencies <+= scalaVersion( "org.scala-lang" % "scala-compiler" % _ % "test"), + libraryDependencies <+= scalaVersion("org.scala-lang" % "scala-compiler" % _ % "test"), unmanagedJars in Test <<= (packageSrc in compileInterfaceSub in Compile).map(x => Seq(x).classpath) ) def precompiled(scalav: String): Project = baseProject(compilePath / "interface", "Precompiled " + scalav.replace('.', '_')) dependsOn(interfaceSub) settings(precompiledSettings : _*) settings( diff --git a/project/Util.scala b/project/Util.scala index a5b0cbb4f..125d46b81 100644 --- a/project/Util.scala +++ b/project/Util.scala @@ -172,13 +172,15 @@ object Common lazy val httpclient = lib("commons-httpclient" % "commons-httpclient" % "3.1") lazy val jsch = lib("com.jcraft" % "jsch" % "0.1.46" intransitive() ) lazy val sbinary = libraryDependencies <+= Util.nightly211(n => "org.scala-tools.sbinary" % "sbinary" % "0.4.2" cross(if(n) CrossVersion.full else CrossVersion.binary)) - lazy val scalaCompiler = libraryDependencies <+= scalaVersion("org.scala-lang" % "scala-compiler" % _ ) + lazy val scalaCompiler = libraryDependencies <+= scalaVersion(sv => "org.scala-lang" % "scala-compiler" % sv) lazy val testInterface = lib("org.scala-sbt" % "test-interface" % "1.0") - def libModular(name: String) = libraryDependencies <++= (scalaVersion, scalaOrganization)( (sv,o) => - if(sv.startsWith("2.11.")) (o % name % sv) :: Nil else Nil - ) - lazy val scalaXml = libModular("scala-xml") - lazy val scalaParsers = libModular("scala-parser-combinators") + private def scala211Module(name: String, moduleVersion: String) = + libraryDependencies <++= (scalaVersion)( scalaVersion => + if (scalaVersion startsWith "2.11.") ("org.scala-lang.modules" %% name % moduleVersion) :: Nil + else Nil + ) + lazy val scalaXml = scala211Module("scala-xml", "1.0.0-RC7") + lazy val scalaParsers = scala211Module("scala-parser-combinators", "1.0.0-RC5") } object Licensed { diff --git a/run/src/main/scala/sbt/TrapExit.scala b/run/src/main/scala/sbt/TrapExit.scala index c61df5ab7..cf9eb87bd 100644 --- a/run/src/main/scala/sbt/TrapExit.scala +++ b/run/src/main/scala/sbt/TrapExit.scala @@ -413,6 +413,15 @@ private final class TrapExit(delegateManager: SecurityManager) extends SecurityM private def isRealExit(element: StackTraceElement): Boolean = element.getClassName == "java.lang.Runtime" && element.getMethodName == "exit" + // These are overridden to do nothing because there is a substantial filesystem performance penalty + // when there is a SecurityManager defined. The default implementations of these construct a + // FilePermission, and its initialization involves canonicalization, which is expensive. + override def checkRead(file: String) {} + override def checkRead(file: String, context: AnyRef) {} + override def checkWrite(file: String) {} + override def checkDelete(file: String) {} + override def checkExec(cmd: String) {} + override def checkPermission(perm: Permission) { if(delegateManager ne null) diff --git a/sbt/src/sbt-test/dependency-management/cache-resolver/changes/def/Build.scala b/sbt/src/sbt-test/dependency-management/cache-resolver/changes/def/Build.scala index d9aec4fe0..6ac5b8a06 100644 --- a/sbt/src/sbt-test/dependency-management/cache-resolver/changes/def/Build.scala +++ b/sbt/src/sbt-test/dependency-management/cache-resolver/changes/def/Build.scala @@ -5,7 +5,7 @@ object B extends Build { override def settings = super.settings ++ Seq( organization := "org.example", - version := "2.0" + version := "2.0-SNAPSHOT" ) lazy val root = proj("root", ".") aggregate(a,b) diff --git a/sbt/src/sbt-test/dependency-management/cache-resolver/changes/use/build.sbt b/sbt/src/sbt-test/dependency-management/cache-resolver/changes/use/build.sbt index b458ab994..943590924 100644 --- a/sbt/src/sbt-test/dependency-management/cache-resolver/changes/use/build.sbt +++ b/sbt/src/sbt-test/dependency-management/cache-resolver/changes/use/build.sbt @@ -4,6 +4,6 @@ organization := "org.example" version := "1.0" -libraryDependencies += "org.example" % "b" % "2.0" +libraryDependencies += "org.example" % "b" % "2.0-SNAPSHOT" ivyPaths <<= ivyPaths in ThisBuild \ No newline at end of file diff --git a/sbt/src/sbt-test/dependency-management/make-pom/project/MakePomTest.scala b/sbt/src/sbt-test/dependency-management/make-pom/project/MakePomTest.scala index 961edec25..8c26070cd 100644 --- a/sbt/src/sbt-test/dependency-management/make-pom/project/MakePomTest.scala +++ b/sbt/src/sbt-test/dependency-management/make-pom/project/MakePomTest.scala @@ -8,8 +8,10 @@ object MakePomTest extends Build readPom <<= makePom map XML.loadFile, TaskKey[Unit]("check-pom") <<= checkPom, TaskKey[Unit]("check-extra") <<= checkExtra, + TaskKey[Unit]("check-version-plus-mapping") <<= checkVersionPlusMapping, resolvers += Resolver.sonatypeRepo("snapshots"), - makePomConfiguration ~= { _.copy(extra = ) } + makePomConfiguration ~= { _.copy(extra = ) }, + libraryDependencies += "com.google.code.findbugs" % "jsr305" % "1.3.+" ) val readPom = TaskKey[Elem]("read-pom") @@ -33,6 +35,17 @@ object MakePomTest extends Build if(extra.isEmpty) error("'" + extraTagName + "' not found in generated pom.xml.") else () } + lazy val checkVersionPlusMapping = (readPom) map { (pomXml) => + var found = false + for { + dep <- pomXml \ "dependencies" \ "dependency" + if (dep \ "artifactId").text == "jsr305" + // TODO - Ignore space here. + if (dep \ "version").text != "[1.3,1.4)" + } sys.error(s"Found dependency with invalid maven version: $dep") + () + } + lazy val checkPom = (readPom, fullResolvers) map { (pomXML, ivyRepositories) => checkProject(pomXML) withRepositories(pomXML) { repositoriesElement => diff --git a/sbt/src/sbt-test/dependency-management/make-pom/test b/sbt/src/sbt-test/dependency-management/make-pom/test index 3d7f79218..4e3cfe973 100644 --- a/sbt/src/sbt-test/dependency-management/make-pom/test +++ b/sbt/src/sbt-test/dependency-management/make-pom/test @@ -1,2 +1,3 @@ > check-pom -> check-extra \ No newline at end of file +> check-extra +> check-version-plus-mapping \ No newline at end of file diff --git a/sbt/src/sbt-test/project/auto-plugins/build.sbt b/sbt/src/sbt-test/project/auto-plugins/build.sbt new file mode 100644 index 000000000..7039ed235 --- /dev/null +++ b/sbt/src/sbt-test/project/auto-plugins/build.sbt @@ -0,0 +1,38 @@ +// disablePlugins(Q) will prevent R from being auto-added +lazy val projA = project.addPlugins(A, B).disablePlugins(Q) + +// without B, Q is not added +lazy val projB = project.addPlugins(A) + +// with both A and B, Q is selected, which in turn selects R, but not S +lazy val projC = project.addPlugins(A, B) + +// with no natures defined, nothing is auto-added +lazy val projD = project + +// with S selected, Q is loaded automatically, which in turn selects R +lazy val projE = project.addPlugins(S) + +check := { + val adel = (del in projA).?.value // should be None + same(adel, None, "del in projA") + val bdel = (del in projB).?.value // should be None + same(bdel, None, "del in projB") + val ddel = (del in projD).?.value // should be None + same(ddel, None, "del in projD") +// + val buildValue = (demo in ThisBuild).value + same(buildValue, "build 0", "demo in ThisBuild") + val globalValue = (demo in Global).value + same(globalValue, "global 0", "demo in Global") + val projValue = (demo in projC).value + same(projValue, "project projC Q R", "demo in projC") + val qValue = (del in projC in q).value + same(qValue, " Q R", "del in projC in q") + val optInValue = (del in projE in q).value + same(optInValue, " Q S R", "del in projE in q") +} + +def same[T](actual: T, expected: T, label: String) { + assert(actual == expected, s"Expected '$expected' for `$label`, got '$actual'") +} diff --git a/sbt/src/sbt-test/project/auto-plugins/project/Q.scala b/sbt/src/sbt-test/project/auto-plugins/project/Q.scala new file mode 100644 index 000000000..f135d444b --- /dev/null +++ b/sbt/src/sbt-test/project/auto-plugins/project/Q.scala @@ -0,0 +1,85 @@ +package sbttest // you need package http://stackoverflow.com/questions/9822008/ + + import sbt._ + import sbt.Keys.{name, resolvedScoped} + import java.util.concurrent.atomic.{AtomicInteger => AInt} + +object Imports +{ + object A extends AutoPlugin + object B extends AutoPlugin + object E extends AutoPlugin + + lazy val q = config("q") + lazy val p = config("p").extend(q) + + lazy val demo = settingKey[String]("A demo setting.") + lazy val del = settingKey[String]("Another demo setting.") + + lazy val check = settingKey[Unit]("Verifies settings are as they should be.") +} + +object X extends AutoPlugin { + val autoImport = Imports +} + + import Imports._ + +object D extends AutoPlugin { + override def requires: Plugins = E + override def trigger = allRequirements +} + +object Q extends AutoPlugin +{ + override def requires: Plugins = A && B + override def trigger = allRequirements + + override def projectConfigurations: Seq[Configuration] = + p :: + q :: + Nil + + override def projectSettings: Seq[Setting[_]] = + (demo := s"project ${name.value}") :: + (del in q := " Q") :: + Nil + + override def buildSettings: Seq[Setting[_]] = + (demo := s"build ${buildCount.getAndIncrement}") :: + Nil + + override def globalSettings: Seq[Setting[_]] = + (demo := s"global ${globalCount.getAndIncrement}") :: + Nil + + // used to ensure the build-level and global settings are only added once + private[this] val buildCount = new AInt(0) + private[this] val globalCount = new AInt(0) +} + +object R extends AutoPlugin +{ + // NOTE - Only plugins themselves support exclusions... + override def requires = Q + override def trigger = allRequirements + + override def projectSettings = Seq( + // tests proper ordering: R requires Q, so Q settings should come first + del in q += " R", + // tests that configurations are properly registered, enabling delegation from p to q + demo += (del in p).value + ) +} + +// This is an opt-in plugin with a requirement +// Unless explicitly loaded by the build user, this will not be activated. +object S extends AutoPlugin +{ + override def requires = Q + override def trigger = noTrigger + + override def projectSettings = Seq( + del in q += " S" + ) +} diff --git a/sbt/src/sbt-test/project/auto-plugins/test b/sbt/src/sbt-test/project/auto-plugins/test new file mode 100644 index 000000000..15675b169 --- /dev/null +++ b/sbt/src/sbt-test/project/auto-plugins/test @@ -0,0 +1 @@ +> check diff --git a/sbt/src/sbt-test/project/auto-settings/project/P.scala b/sbt/src/sbt-test/project/auto-settings/project/P.scala index eddc49a76..3f1433d6d 100644 --- a/sbt/src/sbt-test/project/auto-settings/project/P.scala +++ b/sbt/src/sbt-test/project/auto-settings/project/P.scala @@ -6,22 +6,22 @@ object B extends Build { // version should be from explicit/a.txt - lazy val root = project("root", "1.4") autoSettings( userSettings, sbtFiles(file("explicit/a.txt")) ) + lazy val root = project("root", "1.4") settingSets( buildScalaFiles, userSettings, sbtFiles(file("explicit/a.txt")) ) // version should be from global/user.sbt - lazy val a = project("a", "1.1") autoSettings( userSettings ) + lazy val a = project("a", "1.1") settingSets( buildScalaFiles, userSettings ) // version should be the default 0.1-SNAPSHOT - lazy val b = project("b", "0.1-SNAPSHOT") autoSettings() + lazy val b = project("b", "0.1-SNAPSHOT") settingSets(buildScalaFiles) // version should be from the explicit settings call - lazy val c = project("c", "0.9") settings(version := "0.9") autoSettings() + lazy val c = project("c", "0.9") settings(version := "0.9") settingSets(buildScalaFiles) // version should be from d/build.sbt - lazy val d = project("d", "1.3") settings(version := "0.9") autoSettings( defaultSbtFiles ) + lazy val d = project("d", "1.3") settings(version := "0.9") settingSets( buildScalaFiles, defaultSbtFiles ) // version should be from global/user.sbt - lazy val e = project("e", "1.1") settings(version := "0.9") autoSettings( defaultSbtFiles, sbtFiles(file("../explicit/a.txt")), userSettings ) + lazy val e = project("e", "1.1") settings(version := "0.9") settingSets( buildScalaFiles, defaultSbtFiles, sbtFiles(file("../explicit/a.txt")), userSettings ) def project(id: String, expectedVersion: String): Project = Project(id, if(id == "root") file(".") else file(id)) settings( TaskKey[Unit]("check") <<= version map { v => diff --git a/sbt/src/sbt-test/project/auto-settings/test b/sbt/src/sbt-test/project/auto-settings/test index 249b0b2f9..dcccfd271 100644 --- a/sbt/src/sbt-test/project/auto-settings/test +++ b/sbt/src/sbt-test/project/auto-settings/test @@ -1,3 +1,5 @@ +> plugins + > root/check > a/check diff --git a/sbt/src/sbt-test/project/binary-plugin/changes/define/A.scala b/sbt/src/sbt-test/project/binary-plugin/changes/define/A.scala new file mode 100644 index 000000000..dde89c439 --- /dev/null +++ b/sbt/src/sbt-test/project/binary-plugin/changes/define/A.scala @@ -0,0 +1,27 @@ +package sbttest // you need package http://stackoverflow.com/questions/9822008/ + +import sbt._ +import Keys._ + +object C extends AutoPlugin { + object autoImport { + object bN extends AutoPlugin { + override def trigger = allRequirements + } + lazy val check = taskKey[Unit]("Checks that the AutoPlugin and Build are automatically added.") + } +} + + import C.autoImport._ + +object A extends AutoPlugin { + override def requires = bN + override def trigger = allRequirements + override def projectSettings = Seq( + check := {} + ) +} + +object B extends Build { + lazy val extra = project.addPlugins(bN) +} diff --git a/sbt/src/sbt-test/project/binary-plugin/changes/define/build.sbt b/sbt/src/sbt-test/project/binary-plugin/changes/define/build.sbt new file mode 100644 index 000000000..f8a8d32b8 --- /dev/null +++ b/sbt/src/sbt-test/project/binary-plugin/changes/define/build.sbt @@ -0,0 +1,3 @@ +sbtPlugin := true + +name := "demo-plugin" diff --git a/sbt/src/sbt-test/project/binary-plugin/changes/use/plugins.sbt b/sbt/src/sbt-test/project/binary-plugin/changes/use/plugins.sbt new file mode 100644 index 000000000..795dff137 --- /dev/null +++ b/sbt/src/sbt-test/project/binary-plugin/changes/use/plugins.sbt @@ -0,0 +1 @@ +addSbtPlugin("org.example" % "demo-plugin" % "3.4-SNAPSHOT") diff --git a/sbt/src/sbt-test/project/binary-plugin/common.sbt b/sbt/src/sbt-test/project/binary-plugin/common.sbt new file mode 100644 index 000000000..4b30c03d6 --- /dev/null +++ b/sbt/src/sbt-test/project/binary-plugin/common.sbt @@ -0,0 +1,7 @@ +organization in ThisBuild := "org.example" + +// We have to use snapshot because this is publishing to our local ivy cache instead of +// an integration cache, so we're in danger land. +version in ThisBuild := "3.4-SNAPSHOT" + + diff --git a/sbt/src/sbt-test/project/binary-plugin/test b/sbt/src/sbt-test/project/binary-plugin/test new file mode 100644 index 000000000..169511975 --- /dev/null +++ b/sbt/src/sbt-test/project/binary-plugin/test @@ -0,0 +1,12 @@ +# First we define the plugin project and publish it +$ copy-file changes/define/build.sbt build.sbt +$ copy-file changes/define/A.scala A.scala + +# reload implied +> publishLocal + +# Now we remove the source code and define a project which uses the build. +$ delete build.sbt A.scala +$ copy-file changes/use/plugins.sbt project/plugins.sbt +> reload +> check diff --git a/sbt/src/sbt-test/project/default-settings/build.sbt b/sbt/src/sbt-test/project/default-settings/build.sbt new file mode 100644 index 000000000..a00c6c634 --- /dev/null +++ b/sbt/src/sbt-test/project/default-settings/build.sbt @@ -0,0 +1,8 @@ + +val root = Project("root", file("."), settings=Defaults.defaultSettings) + + +TaskKey[Unit]("checkArtifacts", "test") := { + val arts = packagedArtifacts.value + assert(!arts.isEmpty, "Packaged artifacts must not be empty!") +} \ No newline at end of file diff --git a/sbt/src/sbt-test/project/default-settings/test b/sbt/src/sbt-test/project/default-settings/test new file mode 100644 index 000000000..0f165ede1 --- /dev/null +++ b/sbt/src/sbt-test/project/default-settings/test @@ -0,0 +1 @@ +> checkArtifacts \ No newline at end of file diff --git a/sbt/src/sbt-test/project/delegate_config/project/Build.scala b/sbt/src/sbt-test/project/delegate_config/project/Build.scala index 9c6cdc35e..25318261b 100644 --- a/sbt/src/sbt-test/project/delegate_config/project/Build.scala +++ b/sbt/src/sbt-test/project/delegate_config/project/Build.scala @@ -1,6 +1,7 @@ import sbt._ import complete.DefaultParsers._ import Keys._ +import AddSettings._ object B extends Build { @@ -11,8 +12,11 @@ object B extends Build val sample = SettingKey[Int]("sample") val check = TaskKey[Unit]("check") - lazy val root = Project("root", file("."), settings = Nil) - lazy val sub = Project("sub", file("."), delegates = root :: Nil, configurations = newConfig :: Nil, settings = incSample :: checkTask(4) :: Nil) + lazy val root = Project("root", file("."), settings = Nil).settingSets() + lazy val sub = Project("sub", file("."), + delegates = root :: Nil, + configurations = newConfig :: Nil, + settings = incSample :: checkTask(4) :: Nil).settingSets(buildScalaFiles) override lazy val settings = (sample in newConfig := 3) :: checkTask(3) :: diff --git a/sbt/src/sbt-test/project/multi/changes/Build1.scala b/sbt/src/sbt-test/project/multi/changes/Build1.scala index 0e7156c39..1de02bfcc 100644 --- a/sbt/src/sbt-test/project/multi/changes/Build1.scala +++ b/sbt/src/sbt-test/project/multi/changes/Build1.scala @@ -1,5 +1,6 @@ import sbt._ import Keys.name +import AddSettings._ object TestBuild extends Build { @@ -7,5 +8,5 @@ object TestBuild extends Build proj("a", "."), proj("b", "b") ) - def proj(id: String, dir: String) = Project(id, file(dir), settings = Seq( name := id ) ) -} \ No newline at end of file + def proj(id: String, dir: String) = Project(id, file(dir), settings = Seq( name := id ) ).settingSets(buildScalaFiles) +} diff --git a/sbt/src/sbt-test/project/multi/changes/Build2.scala b/sbt/src/sbt-test/project/multi/changes/Build2.scala index 5858fa425..27c2314b5 100644 --- a/sbt/src/sbt-test/project/multi/changes/Build2.scala +++ b/sbt/src/sbt-test/project/multi/changes/Build2.scala @@ -11,5 +11,6 @@ object SecondBuild extends MakeBuild } trait MakeBuild extends Build { - def proj(id: String, dir: String) = Project(id, file(dir), settings = Seq( name := id ) ) -} \ No newline at end of file + import AddSettings._ + def proj(id: String, dir: String) = Project(id, file(dir), settings = Seq( name := id ) ).settingSets(buildScalaFiles, defaultSbtFiles) +} diff --git a/sbt/src/sbt-test/project/sbt-file-projects/build.sbt b/sbt/src/sbt-test/project/sbt-file-projects/build.sbt index 0afc94a34..500a32076 100644 --- a/sbt/src/sbt-test/project/sbt-file-projects/build.sbt +++ b/sbt/src/sbt-test/project/sbt-file-projects/build.sbt @@ -2,7 +2,7 @@ val a = "a" val f = file("a") val g = taskKey[Unit]("A task in the root project") -val p = Project(a, f).autoSettings(AddSettings.sbtFiles( file("a.sbt") )) +val p = Project(a, f).settingSets(AddSettings.autoPlugins, AddSettings.sbtFiles( file("a.sbt") )) val b = Project("b", file("b")) diff --git a/sbt/src/sbt-test/project/sbt-file-projects/changes/Restricted.scala b/sbt/src/sbt-test/project/sbt-file-projects/changes/Restricted.scala index c7a3ee533..12f45d9ec 100644 --- a/sbt/src/sbt-test/project/sbt-file-projects/changes/Restricted.scala +++ b/sbt/src/sbt-test/project/sbt-file-projects/changes/Restricted.scala @@ -2,6 +2,7 @@ import sbt._ import Keys._ object B extends Build { - lazy val root = Project("root", file(".")).autoSettings( + lazy val root = Project("root", file(".")).settingSets( + AddSettings.autoPlugins, AddSettings.sbtFiles( file("other.sbt") )) // ignore build.sbt -} \ No newline at end of file +} diff --git a/sbt/src/sbt-test/project/session-update-from-cmd/build.check.1 b/sbt/src/sbt-test/project/session-update-from-cmd/build.check.1 new file mode 100644 index 000000000..6363b1678 --- /dev/null +++ b/sbt/src/sbt-test/project/session-update-from-cmd/build.check.1 @@ -0,0 +1,10 @@ +name := "projectName" + +k1 := { +// +// +} + +k2 := { + println("This is k2") +} diff --git a/sbt/src/sbt-test/project/session-update-from-cmd/build.sbt b/sbt/src/sbt-test/project/session-update-from-cmd/build.sbt new file mode 100644 index 000000000..8bd18ad5c --- /dev/null +++ b/sbt/src/sbt-test/project/session-update-from-cmd/build.sbt @@ -0,0 +1,7 @@ +name := "projectName" + +k1 := {} + +k2 := { + println("This is k2") +} diff --git a/sbt/src/sbt-test/project/session-update-from-cmd/project/build.scala b/sbt/src/sbt-test/project/session-update-from-cmd/project/build.scala new file mode 100644 index 000000000..c7c6c8238 --- /dev/null +++ b/sbt/src/sbt-test/project/session-update-from-cmd/project/build.scala @@ -0,0 +1,25 @@ +import sbt._ +import Keys._ + +object build extends Build { + lazy val k1 = taskKey[Unit]("") + lazy val k2 = taskKey[Unit]("") + + val UpdateK1 = Command.command("UpdateK1") { st: State => + val ex = Project extract st + import ex._ + val session2 = BuiltinCommands.setThis(st, ex, Seq(k1 := {}), """k1 := { + |// + |// + |}""".stripMargin).session + val st1 = BuiltinCommands.reapply(session2, structure, st) + // SessionSettings.writeSettings(ex.currentRef, session2, ex.session.original, ex.structure) + SessionSettings.saveAllSettings(st1) + } + + lazy val root = Project("root", file(".")) settings( + commands += UpdateK1 + ) +} + +// vim: set ts=4 sw=4 et: diff --git a/sbt/src/sbt-test/project/session-update-from-cmd/test b/sbt/src/sbt-test/project/session-update-from-cmd/test new file mode 100644 index 000000000..d29ba8270 --- /dev/null +++ b/sbt/src/sbt-test/project/session-update-from-cmd/test @@ -0,0 +1,4 @@ +> UpdateK1 +$ must-mirror build.sbt build.check.1 +> UpdateK1 +$ must-mirror build.sbt build.check.1 diff --git a/sbt/src/sbt-test/project/setting-macro/build.sbt b/sbt/src/sbt-test/project/setting-macro/build.sbt index d48af8bc9..c07c3c0ad 100644 --- a/sbt/src/sbt-test/project/setting-macro/build.sbt +++ b/sbt/src/sbt-test/project/setting-macro/build.sbt @@ -15,3 +15,20 @@ demo := { val (n, s) = parser.parsed s * n } + +// Tests for correct Symbol owner structure in the lifted qualifiers of +// the `.value` macro within a task macro. (#1150) +val key1 = taskKey[Unit]("") + +key1 := { + val foo = (sourceDirectory in Compile).apply(base => base).value.get + testFrameworks.value.flatMap(f => + None.map(_ => f) + ) + () +} + +// https://github.com/sbt/sbt/issues/1107 +def appcfgTask(a: String, b: String) = Def.task("") + +TaskKey[Unit]("test") := appcfgTask(b = "", a = "").value diff --git a/sbt/src/sbt-test/source-dependencies/backtick-quoted-names/A.scala b/sbt/src/sbt-test/source-dependencies/backtick-quoted-names/A.scala new file mode 100644 index 000000000..1d3a976a8 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/backtick-quoted-names/A.scala @@ -0,0 +1,3 @@ +object A { + def `=` = 3 +} diff --git a/sbt/src/sbt-test/source-dependencies/backtick-quoted-names/B.scala b/sbt/src/sbt-test/source-dependencies/backtick-quoted-names/B.scala new file mode 100644 index 000000000..7cbd62e1d --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/backtick-quoted-names/B.scala @@ -0,0 +1,3 @@ +object B extends App { + println(A.`=`) +} diff --git a/sbt/src/sbt-test/source-dependencies/backtick-quoted-names/build.sbt b/sbt/src/sbt-test/source-dependencies/backtick-quoted-names/build.sbt new file mode 100644 index 000000000..8a38ef414 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/backtick-quoted-names/build.sbt @@ -0,0 +1 @@ +incOptions := incOptions.value.withNameHashing(true) diff --git a/sbt/src/sbt-test/source-dependencies/backtick-quoted-names/changes/A.scala b/sbt/src/sbt-test/source-dependencies/backtick-quoted-names/changes/A.scala new file mode 100644 index 000000000..b473714fa --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/backtick-quoted-names/changes/A.scala @@ -0,0 +1,3 @@ +object A { + def asdf = 3 +} diff --git a/sbt/src/sbt-test/source-dependencies/backtick-quoted-names/test b/sbt/src/sbt-test/source-dependencies/backtick-quoted-names/test new file mode 100644 index 000000000..d4d386615 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/backtick-quoted-names/test @@ -0,0 +1,7 @@ +> compile + +# rename def with symbolic name (`=`) +$ copy-file changes/A.scala A.scala + +# Both A.scala and B.scala should be recompiled, producing a compile error +-> compile diff --git a/sbt/src/sbt-test/source-dependencies/constants-name-hashing/build.sbt b/sbt/src/sbt-test/source-dependencies/constants-name-hashing/build.sbt new file mode 100644 index 000000000..8a38ef414 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/constants-name-hashing/build.sbt @@ -0,0 +1 @@ +incOptions := incOptions.value.withNameHashing(true) diff --git a/sbt/src/sbt-test/source-dependencies/constants-name-hashing/changes/A1.scala b/sbt/src/sbt-test/source-dependencies/constants-name-hashing/changes/A1.scala new file mode 100644 index 000000000..f67b6f474 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/constants-name-hashing/changes/A1.scala @@ -0,0 +1 @@ +object A { final val x = 1 } diff --git a/sbt/src/sbt-test/source-dependencies/constants-name-hashing/changes/A2.scala b/sbt/src/sbt-test/source-dependencies/constants-name-hashing/changes/A2.scala new file mode 100644 index 000000000..4f9396f13 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/constants-name-hashing/changes/A2.scala @@ -0,0 +1 @@ +object A { final val x = 2 } diff --git a/sbt/src/sbt-test/source-dependencies/constants-name-hashing/changes/B.scala b/sbt/src/sbt-test/source-dependencies/constants-name-hashing/changes/B.scala new file mode 100644 index 000000000..058527993 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/constants-name-hashing/changes/B.scala @@ -0,0 +1,4 @@ +object B +{ + def main(args: Array[String]) = assert(args(0).toInt == A.x ) +} \ No newline at end of file diff --git a/sbt/src/sbt-test/source-dependencies/constants-name-hashing/pending b/sbt/src/sbt-test/source-dependencies/constants-name-hashing/pending new file mode 100644 index 000000000..61df26ef6 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/constants-name-hashing/pending @@ -0,0 +1,11 @@ +# Tests if source dependencies are tracked properly +# for compile-time constants (like final vals in top-level objects) +# see https://issues.scala-lang.org/browse/SI-7173 for details +# why compile-time constants can be tricky to track due to early inlining + +$ copy-file changes/B.scala B.scala + +$ copy-file changes/A1.scala A.scala +> run 1 +$ copy-file changes/A2.scala A.scala +> run 2 diff --git a/sbt/src/sbt-test/source-dependencies/import-class-name-hashing/A.scala b/sbt/src/sbt-test/source-dependencies/import-class-name-hashing/A.scala new file mode 100644 index 000000000..a93bbe535 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/import-class-name-hashing/A.scala @@ -0,0 +1,3 @@ +package a + +class A diff --git a/sbt/src/sbt-test/source-dependencies/import-class-name-hashing/B.scala b/sbt/src/sbt-test/source-dependencies/import-class-name-hashing/B.scala new file mode 100644 index 000000000..0489f4a26 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/import-class-name-hashing/B.scala @@ -0,0 +1 @@ +import a.A diff --git a/sbt/src/sbt-test/source-dependencies/import-class-name-hashing/build.sbt b/sbt/src/sbt-test/source-dependencies/import-class-name-hashing/build.sbt new file mode 100644 index 000000000..8a38ef414 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/import-class-name-hashing/build.sbt @@ -0,0 +1 @@ +incOptions := incOptions.value.withNameHashing(true) diff --git a/sbt/src/sbt-test/source-dependencies/import-class-name-hashing/changes/A.scala b/sbt/src/sbt-test/source-dependencies/import-class-name-hashing/changes/A.scala new file mode 100644 index 000000000..2a93cdef5 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/import-class-name-hashing/changes/A.scala @@ -0,0 +1 @@ +package a diff --git a/sbt/src/sbt-test/source-dependencies/import-class-name-hashing/test b/sbt/src/sbt-test/source-dependencies/import-class-name-hashing/test new file mode 100644 index 000000000..7679ba52c --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/import-class-name-hashing/test @@ -0,0 +1,8 @@ +> compile + +# remove class a.A +$ copy-file changes/A.scala A.scala + +# 'import a.A' should now fail in B.scala +# succeeds because scalac doesn't track this dependency +-> compile diff --git a/sbt/src/sbt-test/source-dependencies/inherited-macros/changes/Client.scala b/sbt/src/sbt-test/source-dependencies/inherited-macros/changes/Client.scala new file mode 100644 index 000000000..19633db64 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/inherited-macros/changes/Client.scala @@ -0,0 +1,7 @@ +package macro + +object Client { + object RealClient extends Provider { + // Some comment... + } +} diff --git a/sbt/src/sbt-test/source-dependencies/inherited-macros/macro-client/build.sbt b/sbt/src/sbt-test/source-dependencies/inherited-macros/macro-client/build.sbt new file mode 100644 index 000000000..75588e23c --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/inherited-macros/macro-client/build.sbt @@ -0,0 +1,9 @@ +// Check that a file has not been recompiled during last compilation +InputKey[Unit]("check-not-recompiled") <<= inputTask { (argTask: TaskKey[Seq[String]]) => + (argTask, compile in Compile) map { (args: Seq[String], a: sbt.inc.Analysis) => + assert(args.size == 1) + val fileCompilation = a.apis.internal.collect { case (file, src) if file.name.endsWith(args(0)) => src.compilation }.head + val lastCompilation = a.compilations.allCompilations.last + assert(fileCompilation.startTime != lastCompilation.startTime, "File has been recompiled during last compilation.") + } +} \ No newline at end of file diff --git a/sbt/src/sbt-test/source-dependencies/inherited-macros/macro-client/src/main/scala/Client.scala b/sbt/src/sbt-test/source-dependencies/inherited-macros/macro-client/src/main/scala/Client.scala new file mode 100644 index 000000000..6351461a7 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/inherited-macros/macro-client/src/main/scala/Client.scala @@ -0,0 +1,7 @@ +package macro + +object Client { + object RealClient extends Provider { + + } +} diff --git a/sbt/src/sbt-test/source-dependencies/inherited-macros/macro-client/src/main/scala/Foo.scala b/sbt/src/sbt-test/source-dependencies/inherited-macros/macro-client/src/main/scala/Foo.scala new file mode 100644 index 000000000..be7a40427 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/inherited-macros/macro-client/src/main/scala/Foo.scala @@ -0,0 +1,5 @@ +package macro + +object Foo { + val c = Client.RealClient +} diff --git a/sbt/src/sbt-test/source-dependencies/inherited-macros/macro-provider/src/main/scala/Provider.scala b/sbt/src/sbt-test/source-dependencies/inherited-macros/macro-provider/src/main/scala/Provider.scala new file mode 100644 index 000000000..14523f149 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/inherited-macros/macro-provider/src/main/scala/Provider.scala @@ -0,0 +1,7 @@ +package macro +import scala.language.experimental.macros +import scala.reflect.macros._ + +abstract class Provider { + def notImplementedMacro = macro ??? +} diff --git a/sbt/src/sbt-test/source-dependencies/inherited-macros/project/build.scala b/sbt/src/sbt-test/source-dependencies/inherited-macros/project/build.scala new file mode 100644 index 000000000..27a684ef8 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/inherited-macros/project/build.scala @@ -0,0 +1,29 @@ +import sbt._ +import Keys._ + +object build extends Build { + val defaultSettings = Seq( + libraryDependencies <+= scalaVersion("org.scala-lang" % "scala-reflect" % _ )//, + //incOptions := incOptions.value.withNameHashing(true) + ) + + lazy val root = Project( + base = file("."), + id = "macro", + aggregate = Seq(macroProvider, macroClient), + settings = Defaults.defaultSettings ++ defaultSettings + ) + + lazy val macroProvider = Project( + base = file("macro-provider"), + id = "macro-provider", + settings = Defaults.defaultSettings ++ defaultSettings + ) + + lazy val macroClient = Project( + base = file("macro-client"), + id = "macro-client", + dependencies = Seq(macroProvider), + settings = Defaults.defaultSettings ++ defaultSettings + ) +} diff --git a/sbt/src/sbt-test/source-dependencies/inherited-macros/test b/sbt/src/sbt-test/source-dependencies/inherited-macros/test new file mode 100644 index 000000000..9a6e7dfcf --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/inherited-macros/test @@ -0,0 +1,12 @@ +> macro-provider/compile + +> macro-client/compile + +# Introduce a comment in Client, which inherits a macro from Provider +$ copy-file changes/Client.scala macro-client/src/main/scala/Client.scala + +> macro-client/compile + +# Object Foo depends on Client via composition, thus a whitespace change to +# Client shouldn't trigger its recompilation +> check-not-recompiled Foo.scala diff --git a/sbt/src/sbt-test/source-dependencies/java-static-name-hashing/build.sbt b/sbt/src/sbt-test/source-dependencies/java-static-name-hashing/build.sbt new file mode 100644 index 000000000..8a38ef414 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/java-static-name-hashing/build.sbt @@ -0,0 +1 @@ +incOptions := incOptions.value.withNameHashing(true) diff --git a/sbt/src/sbt-test/source-dependencies/java-static-name-hashing/changes/J1.java b/sbt/src/sbt-test/source-dependencies/java-static-name-hashing/changes/J1.java new file mode 100644 index 000000000..a3a75fefd --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/java-static-name-hashing/changes/J1.java @@ -0,0 +1,4 @@ +public class J +{ + public static final int x = 3; +} \ No newline at end of file diff --git a/sbt/src/sbt-test/source-dependencies/java-static-name-hashing/changes/J2.java b/sbt/src/sbt-test/source-dependencies/java-static-name-hashing/changes/J2.java new file mode 100644 index 000000000..8ff2e24c6 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/java-static-name-hashing/changes/J2.java @@ -0,0 +1,4 @@ +public class J +{ + public static final String x = "3"; +} \ No newline at end of file diff --git a/sbt/src/sbt-test/source-dependencies/java-static-name-hashing/changes/S.scala b/sbt/src/sbt-test/source-dependencies/java-static-name-hashing/changes/S.scala new file mode 100644 index 000000000..45436972b --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/java-static-name-hashing/changes/S.scala @@ -0,0 +1,4 @@ +object S +{ + val y: Int = J.x +} diff --git a/sbt/src/sbt-test/source-dependencies/java-static-name-hashing/pending b/sbt/src/sbt-test/source-dependencies/java-static-name-hashing/pending new file mode 100644 index 000000000..42890ca74 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/java-static-name-hashing/pending @@ -0,0 +1,24 @@ +# When a Java class is loaded from a class file and not parsed from a source file, scalac reports +# the statics as an object without a file and so the Analyzer must know to look for the +# object's linked class. +# This test verifies this happens. +# The test compiles a Java class with a static field. +# It then adds a Scala object that references the static field. Because the object only depends on a +# static member and because the Java source is not included in the compilation (since it didn't change), +# this triggers the special case above. + +# add and compile the Java source +$ copy-file changes/J1.java src/main/java/J.java +> compile + +# add and compile the Scala source +$ copy-file changes/S.scala src/main/scala/S.scala +> compile + +# change the Java source so that a compile error should occur if S.scala is also recompiled (which will happen if the dependency was properly recorded) +$ copy-file changes/J2.java src/main/java/J.java +-> compile + +# verify it should have failed by doing a full recompilation +> clean +-> compile \ No newline at end of file diff --git a/sbt/src/sbt-test/source-dependencies/macro-arg-dep-nested/macro-client/Client.scala b/sbt/src/sbt-test/source-dependencies/macro-arg-dep-nested/macro-client/Client.scala new file mode 100644 index 000000000..d80fd559e --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/macro-arg-dep-nested/macro-client/Client.scala @@ -0,0 +1,5 @@ +package macro + +object Client { + Provider.printTree(Provider.printTree(Foo.str)) +} diff --git a/sbt/src/sbt-test/source-dependencies/macro-arg-dep-nested/macro-client/Foo.scala b/sbt/src/sbt-test/source-dependencies/macro-arg-dep-nested/macro-client/Foo.scala new file mode 100644 index 000000000..1908f0673 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/macro-arg-dep-nested/macro-client/Foo.scala @@ -0,0 +1,5 @@ +package macro + +object Foo { + def str: String = "abc" +} diff --git a/sbt/src/sbt-test/source-dependencies/macro-arg-dep-nested/macro-client/changes/Foo.scala b/sbt/src/sbt-test/source-dependencies/macro-arg-dep-nested/macro-client/changes/Foo.scala new file mode 100644 index 000000000..e3deb0f43 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/macro-arg-dep-nested/macro-client/changes/Foo.scala @@ -0,0 +1,3 @@ +package macro +object Foo { +} diff --git a/sbt/src/sbt-test/source-dependencies/macro-arg-dep-nested/macro-provider/Provider.scala b/sbt/src/sbt-test/source-dependencies/macro-arg-dep-nested/macro-provider/Provider.scala new file mode 100644 index 000000000..facc4a468 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/macro-arg-dep-nested/macro-provider/Provider.scala @@ -0,0 +1,12 @@ +package macro +import scala.language.experimental.macros +import scala.reflect.macros._ + +object Provider { + def printTree(arg: Any) = macro printTreeImpl + def printTreeImpl(c: Context)(arg: c.Expr[Any]): c.Expr[String] = { + val argStr = arg.tree.toString + val literalStr = c.universe.Literal(c.universe.Constant(argStr)) + c.Expr[String](literalStr) + } +} diff --git a/sbt/src/sbt-test/source-dependencies/macro-arg-dep-nested/project/build.scala b/sbt/src/sbt-test/source-dependencies/macro-arg-dep-nested/project/build.scala new file mode 100644 index 000000000..a5382240f --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/macro-arg-dep-nested/project/build.scala @@ -0,0 +1,29 @@ +import sbt._ +import Keys._ + +object build extends Build { + val defaultSettings = Seq( + libraryDependencies <+= scalaVersion("org.scala-lang" % "scala-reflect" % _ ), + incOptions := incOptions.value.withNameHashing(true) + ) + + lazy val root = Project( + base = file("."), + id = "macro", + aggregate = Seq(macroProvider, macroClient), + settings = Defaults.defaultSettings ++ defaultSettings + ) + + lazy val macroProvider = Project( + base = file("macro-provider"), + id = "macro-provider", + settings = Defaults.defaultSettings ++ defaultSettings + ) + + lazy val macroClient = Project( + base = file("macro-client"), + id = "macro-client", + dependencies = Seq(macroProvider), + settings = Defaults.defaultSettings ++ defaultSettings + ) +} diff --git a/sbt/src/sbt-test/source-dependencies/macro-arg-dep-nested/test b/sbt/src/sbt-test/source-dependencies/macro-arg-dep-nested/test new file mode 100644 index 000000000..231939418 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/macro-arg-dep-nested/test @@ -0,0 +1,13 @@ +> compile + +# remove `Foo.str` which is an argument to a macro +# (this macro itself that is an argument to another macro) +$ copy-file macro-client/changes/Foo.scala macro-client/Foo.scala + +# we should recompile Foo.scala first and then fail to compile Client.scala due to missing +# `Foo.str` +-> macro-client/compile + +> clean + +-> compile diff --git a/sbt/src/sbt-test/source-dependencies/macro-arg-dep/macro-client/Client.scala b/sbt/src/sbt-test/source-dependencies/macro-arg-dep/macro-client/Client.scala new file mode 100644 index 000000000..94ad4bcc8 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/macro-arg-dep/macro-client/Client.scala @@ -0,0 +1,5 @@ +package macro + +object Client { + Provider.printTree(Foo.str) +} diff --git a/sbt/src/sbt-test/source-dependencies/macro-arg-dep/macro-client/Foo.scala b/sbt/src/sbt-test/source-dependencies/macro-arg-dep/macro-client/Foo.scala new file mode 100644 index 000000000..1908f0673 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/macro-arg-dep/macro-client/Foo.scala @@ -0,0 +1,5 @@ +package macro + +object Foo { + def str: String = "abc" +} diff --git a/sbt/src/sbt-test/source-dependencies/macro-arg-dep/macro-client/changes/Foo.scala b/sbt/src/sbt-test/source-dependencies/macro-arg-dep/macro-client/changes/Foo.scala new file mode 100644 index 000000000..e3deb0f43 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/macro-arg-dep/macro-client/changes/Foo.scala @@ -0,0 +1,3 @@ +package macro +object Foo { +} diff --git a/sbt/src/sbt-test/source-dependencies/macro-arg-dep/macro-provider/Provider.scala b/sbt/src/sbt-test/source-dependencies/macro-arg-dep/macro-provider/Provider.scala new file mode 100644 index 000000000..facc4a468 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/macro-arg-dep/macro-provider/Provider.scala @@ -0,0 +1,12 @@ +package macro +import scala.language.experimental.macros +import scala.reflect.macros._ + +object Provider { + def printTree(arg: Any) = macro printTreeImpl + def printTreeImpl(c: Context)(arg: c.Expr[Any]): c.Expr[String] = { + val argStr = arg.tree.toString + val literalStr = c.universe.Literal(c.universe.Constant(argStr)) + c.Expr[String](literalStr) + } +} diff --git a/sbt/src/sbt-test/source-dependencies/macro-arg-dep/project/build.scala b/sbt/src/sbt-test/source-dependencies/macro-arg-dep/project/build.scala new file mode 100644 index 000000000..a5382240f --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/macro-arg-dep/project/build.scala @@ -0,0 +1,29 @@ +import sbt._ +import Keys._ + +object build extends Build { + val defaultSettings = Seq( + libraryDependencies <+= scalaVersion("org.scala-lang" % "scala-reflect" % _ ), + incOptions := incOptions.value.withNameHashing(true) + ) + + lazy val root = Project( + base = file("."), + id = "macro", + aggregate = Seq(macroProvider, macroClient), + settings = Defaults.defaultSettings ++ defaultSettings + ) + + lazy val macroProvider = Project( + base = file("macro-provider"), + id = "macro-provider", + settings = Defaults.defaultSettings ++ defaultSettings + ) + + lazy val macroClient = Project( + base = file("macro-client"), + id = "macro-client", + dependencies = Seq(macroProvider), + settings = Defaults.defaultSettings ++ defaultSettings + ) +} diff --git a/sbt/src/sbt-test/source-dependencies/macro-arg-dep/test b/sbt/src/sbt-test/source-dependencies/macro-arg-dep/test new file mode 100644 index 000000000..183aa6c49 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/macro-arg-dep/test @@ -0,0 +1,12 @@ +> compile + +# remove `Foo.str` which is an argument to a macro +$ copy-file macro-client/changes/Foo.scala macro-client/Foo.scala + +# we should recompile Foo.scala first and then fail to compile Client.scala due to missing +# `Foo.str` +-> macro-client/compile + +> clean + +-> compile diff --git a/sbt/src/sbt-test/source-dependencies/macro-name-hashing/macro-client/Client.scala b/sbt/src/sbt-test/source-dependencies/macro-name-hashing/macro-client/Client.scala new file mode 100644 index 000000000..90932d136 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/macro-name-hashing/macro-client/Client.scala @@ -0,0 +1,5 @@ +package macro + +object Client { + Provider.tree(0) +} diff --git a/sbt/src/sbt-test/source-dependencies/macro-name-hashing/macro-provider/Provider.scala b/sbt/src/sbt-test/source-dependencies/macro-name-hashing/macro-provider/Provider.scala new file mode 100644 index 000000000..9b6d27676 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/macro-name-hashing/macro-provider/Provider.scala @@ -0,0 +1,8 @@ +package macro +import scala.language.experimental.macros +import scala.reflect.macros._ + +object Provider { + def tree(args: Any) = macro treeImpl + def treeImpl(c: Context)(args: c.Expr[Any]) = c.universe.reify(args.splice) +} diff --git a/sbt/src/sbt-test/source-dependencies/macro-name-hashing/macro-provider/changes/Provider.scala b/sbt/src/sbt-test/source-dependencies/macro-name-hashing/macro-provider/changes/Provider.scala new file mode 100644 index 000000000..711989b32 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/macro-name-hashing/macro-provider/changes/Provider.scala @@ -0,0 +1,8 @@ +package macro +import scala.language.experimental.macros +import scala.reflect.macros._ + +object Provider { + def tree(args: Any) = macro treeImpl + def treeImpl(c: Context)(args: c.Expr[Any]) = sys.error("no macro for you!") +} diff --git a/sbt/src/sbt-test/source-dependencies/macro-name-hashing/pending b/sbt/src/sbt-test/source-dependencies/macro-name-hashing/pending new file mode 100644 index 000000000..b3755d4ee --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/macro-name-hashing/pending @@ -0,0 +1,13 @@ +> compile + +# replace macro with one that throws an error + +$ copy-file macro-provider/changes/Provider.scala macro-provider/Provider.scala + +> macro-provider/compile + +-> macro-client/compile + +> clean + +-> compile diff --git a/sbt/src/sbt-test/source-dependencies/macro-name-hashing/project/build.scala b/sbt/src/sbt-test/source-dependencies/macro-name-hashing/project/build.scala new file mode 100644 index 000000000..a5382240f --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/macro-name-hashing/project/build.scala @@ -0,0 +1,29 @@ +import sbt._ +import Keys._ + +object build extends Build { + val defaultSettings = Seq( + libraryDependencies <+= scalaVersion("org.scala-lang" % "scala-reflect" % _ ), + incOptions := incOptions.value.withNameHashing(true) + ) + + lazy val root = Project( + base = file("."), + id = "macro", + aggregate = Seq(macroProvider, macroClient), + settings = Defaults.defaultSettings ++ defaultSettings + ) + + lazy val macroProvider = Project( + base = file("macro-provider"), + id = "macro-provider", + settings = Defaults.defaultSettings ++ defaultSettings + ) + + lazy val macroClient = Project( + base = file("macro-client"), + id = "macro-client", + dependencies = Seq(macroProvider), + settings = Defaults.defaultSettings ++ defaultSettings + ) +} diff --git a/sbt/src/sbt-test/source-dependencies/same-file-used-names/A.scala b/sbt/src/sbt-test/source-dependencies/same-file-used-names/A.scala new file mode 100644 index 000000000..d91afb5ca --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/same-file-used-names/A.scala @@ -0,0 +1,8 @@ +object A { + def x = 3 + + def y = { + import B._ + x + } +} diff --git a/sbt/src/sbt-test/source-dependencies/same-file-used-names/B.scala b/sbt/src/sbt-test/source-dependencies/same-file-used-names/B.scala new file mode 100644 index 000000000..5e34efa4d --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/same-file-used-names/B.scala @@ -0,0 +1,3 @@ +object B { +// def x = 3 +} diff --git a/sbt/src/sbt-test/source-dependencies/same-file-used-names/build.sbt b/sbt/src/sbt-test/source-dependencies/same-file-used-names/build.sbt new file mode 100644 index 000000000..8a38ef414 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/same-file-used-names/build.sbt @@ -0,0 +1 @@ +incOptions := incOptions.value.withNameHashing(true) diff --git a/sbt/src/sbt-test/source-dependencies/same-file-used-names/changes/B.scala b/sbt/src/sbt-test/source-dependencies/same-file-used-names/changes/B.scala new file mode 100644 index 000000000..4bf188fb2 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/same-file-used-names/changes/B.scala @@ -0,0 +1,3 @@ +object B { + def x = 3 +} diff --git a/sbt/src/sbt-test/source-dependencies/same-file-used-names/test b/sbt/src/sbt-test/source-dependencies/same-file-used-names/test new file mode 100644 index 000000000..781b4aafb --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/same-file-used-names/test @@ -0,0 +1,7 @@ +> compile + +# uncomment definition of `x` that leads to ambiguity error in A +$ copy-file changes/B.scala B.scala + +# Both A.scala and B.scala should be recompiled, producing a compile error +-> compile diff --git a/sbt/src/sbt-test/source-dependencies/struct-name-hashing/A.scala b/sbt/src/sbt-test/source-dependencies/struct-name-hashing/A.scala new file mode 100644 index 000000000..d17a6e20a --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/struct-name-hashing/A.scala @@ -0,0 +1,3 @@ +object A { + def x: Int = 3 +} \ No newline at end of file diff --git a/sbt/src/sbt-test/source-dependencies/struct-name-hashing/B.scala b/sbt/src/sbt-test/source-dependencies/struct-name-hashing/B.scala new file mode 100644 index 000000000..635568727 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/struct-name-hashing/B.scala @@ -0,0 +1,4 @@ +object B { + def onX(m: { def x: Int } ) = + m.x +} \ No newline at end of file diff --git a/sbt/src/sbt-test/source-dependencies/struct-name-hashing/C.scala b/sbt/src/sbt-test/source-dependencies/struct-name-hashing/C.scala new file mode 100644 index 000000000..413cd6d63 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/struct-name-hashing/C.scala @@ -0,0 +1,4 @@ +object C { + def main(args: Array[String]) = + println(B.onX(A)) +} \ No newline at end of file diff --git a/sbt/src/sbt-test/source-dependencies/struct-name-hashing/build.sbt b/sbt/src/sbt-test/source-dependencies/struct-name-hashing/build.sbt new file mode 100644 index 000000000..8a38ef414 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/struct-name-hashing/build.sbt @@ -0,0 +1 @@ +incOptions := incOptions.value.withNameHashing(true) diff --git a/sbt/src/sbt-test/source-dependencies/struct-name-hashing/changes/A.scala b/sbt/src/sbt-test/source-dependencies/struct-name-hashing/changes/A.scala new file mode 100644 index 000000000..dc9bbd3c0 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/struct-name-hashing/changes/A.scala @@ -0,0 +1,3 @@ +object A { + def x: Byte = 3 +} diff --git a/sbt/src/sbt-test/source-dependencies/struct-name-hashing/pending b/sbt/src/sbt-test/source-dependencies/struct-name-hashing/pending new file mode 100644 index 000000000..8c7328ea4 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/struct-name-hashing/pending @@ -0,0 +1,6 @@ +> compile + +# modify A.scala so that it does not conform to the structural type in B.scala +$ copy-file changes/A.scala A.scala + +-> compile \ No newline at end of file diff --git a/sbt/src/sbt-test/source-dependencies/transitive-memberRef/build.sbt b/sbt/src/sbt-test/source-dependencies/transitive-memberRef/build.sbt new file mode 100644 index 000000000..de908146c --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/transitive-memberRef/build.sbt @@ -0,0 +1,38 @@ +logLevel := Level.Debug + +incOptions := incOptions.value.withNameHashing(true) + +// disable sbt's heauristic which recompiles everything in case +// some fraction (e.g. 50%) of files is scheduled to be recompiled +// in this test we want precise information about recompiled files +// which that heuristic would distort +incOptions := incOptions.value.copy(recompileAllFraction = 1.0) + +/* Performs checks related to compilations: + * a) checks in which compilation given set of files was recompiled + * b) checks overall number of compilations performed + */ +TaskKey[Unit]("check-compilations") <<= (compile in Compile, scalaSource in Compile) map { (a: sbt.inc.Analysis, src: java.io.File) => + def relative(f: java.io.File): java.io.File = f.relativeTo(src) getOrElse f + val allCompilations = a.compilations.allCompilations + val recompiledFiles: Seq[Set[java.io.File]] = allCompilations map { c => + val recompiledFiles = a.apis.internal.collect { + case (file, api) if api.compilation.startTime == c.startTime => relative(file) + } + recompiledFiles.toSet + } + def recompiledFilesInIteration(iteration: Int, fileNames: Set[String]) = { + val files = fileNames.map(new java.io.File(_)) + assert(recompiledFiles(iteration) == files, "%s != %s".format(recompiledFiles(iteration), files)) + } + // Y.scala is compiled only at the beginning as changes to A.scala do not affect it + recompiledFilesInIteration(0, Set("X.scala", "Y.scala")) + // A.scala is changed and recompiled + recompiledFilesInIteration(1, Set("A.scala")) + // change in A.scala causes recompilation of B.scala, C.scala, D.scala which depend on transtiviely + // and by inheritance on A.scala + // X.scala is also recompiled because it depends by member reference on B.scala + // Note that Y.scala is not recompiled because it depends just on X through member reference dependency + recompiledFilesInIteration(2, Set("B.scala", "C.scala", "D.scala")) + assert(allCompilations.size == 3) +} diff --git a/sbt/src/sbt-test/source-dependencies/transitive-memberRef/changes/A1.scala b/sbt/src/sbt-test/source-dependencies/transitive-memberRef/changes/A1.scala new file mode 100644 index 000000000..63a2739e1 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/transitive-memberRef/changes/A1.scala @@ -0,0 +1,5 @@ +package test + +class A { + def foo: Int = 23 +} diff --git a/sbt/src/sbt-test/source-dependencies/transitive-memberRef/src/main/scala/A.scala b/sbt/src/sbt-test/source-dependencies/transitive-memberRef/src/main/scala/A.scala new file mode 100644 index 000000000..1b0178fd9 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/transitive-memberRef/src/main/scala/A.scala @@ -0,0 +1,3 @@ +package test + +class A diff --git a/sbt/src/sbt-test/source-dependencies/transitive-memberRef/src/main/scala/B.scala b/sbt/src/sbt-test/source-dependencies/transitive-memberRef/src/main/scala/B.scala new file mode 100644 index 000000000..b9913245b --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/transitive-memberRef/src/main/scala/B.scala @@ -0,0 +1,3 @@ +package test + +class B extends A diff --git a/sbt/src/sbt-test/source-dependencies/transitive-memberRef/src/main/scala/C.scala b/sbt/src/sbt-test/source-dependencies/transitive-memberRef/src/main/scala/C.scala new file mode 100644 index 000000000..4ce04f8bf --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/transitive-memberRef/src/main/scala/C.scala @@ -0,0 +1,3 @@ +package test + +class C extends B diff --git a/sbt/src/sbt-test/source-dependencies/transitive-memberRef/src/main/scala/D.scala b/sbt/src/sbt-test/source-dependencies/transitive-memberRef/src/main/scala/D.scala new file mode 100644 index 000000000..eff328ce5 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/transitive-memberRef/src/main/scala/D.scala @@ -0,0 +1,3 @@ +package test + +class D extends C diff --git a/sbt/src/sbt-test/source-dependencies/transitive-memberRef/src/main/scala/X.scala b/sbt/src/sbt-test/source-dependencies/transitive-memberRef/src/main/scala/X.scala new file mode 100644 index 000000000..8c0d9edf8 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/transitive-memberRef/src/main/scala/X.scala @@ -0,0 +1,5 @@ +package test + +class X { + def bar(b: B) = b +} diff --git a/sbt/src/sbt-test/source-dependencies/transitive-memberRef/src/main/scala/Y.scala b/sbt/src/sbt-test/source-dependencies/transitive-memberRef/src/main/scala/Y.scala new file mode 100644 index 000000000..df53c3c5c --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/transitive-memberRef/src/main/scala/Y.scala @@ -0,0 +1,5 @@ +package test + +class Y { + def baz(x: X) = x +} diff --git a/sbt/src/sbt-test/source-dependencies/transitive-memberRef/test b/sbt/src/sbt-test/source-dependencies/transitive-memberRef/test new file mode 100644 index 000000000..395f90229 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/transitive-memberRef/test @@ -0,0 +1,11 @@ +# introduces first compile iteration +> compile +# adds a new method to A which will cause transitive invalidation +# of all source files that inherit from it +# also, all direct dependencies of files that inherit from A will +# be invalidated (in our case that's X.scala) +$ copy-file changes/A1.scala src/main/scala/A.scala +# second iteration +> compile +# check in which compile iteration given source file got recompiled +> check-compilations diff --git a/sbt/src/sbt-test/source-dependencies/type-alias/A.scala b/sbt/src/sbt-test/source-dependencies/type-alias/A.scala new file mode 100644 index 000000000..c0c8794a7 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/type-alias/A.scala @@ -0,0 +1,4 @@ +object A { + type X = Option[Int] +} + diff --git a/sbt/src/sbt-test/source-dependencies/type-alias/B.scala b/sbt/src/sbt-test/source-dependencies/type-alias/B.scala new file mode 100644 index 000000000..81640ed8d --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/type-alias/B.scala @@ -0,0 +1,3 @@ +object B { + def y: A.X = Option(3) +} diff --git a/sbt/src/sbt-test/source-dependencies/type-alias/build.sbt b/sbt/src/sbt-test/source-dependencies/type-alias/build.sbt new file mode 100644 index 000000000..c5a1099aa --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/type-alias/build.sbt @@ -0,0 +1,3 @@ +logLevel in compile := Level.Debug + +incOptions := incOptions.value.withNameHashing(true) diff --git a/sbt/src/sbt-test/source-dependencies/type-alias/changes/A.scala b/sbt/src/sbt-test/source-dependencies/type-alias/changes/A.scala new file mode 100644 index 000000000..53aee1626 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/type-alias/changes/A.scala @@ -0,0 +1,3 @@ +object A { + type X = Int +} diff --git a/sbt/src/sbt-test/source-dependencies/type-alias/test b/sbt/src/sbt-test/source-dependencies/type-alias/test new file mode 100644 index 000000000..f0a7fe8a1 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/type-alias/test @@ -0,0 +1,7 @@ +> compile + +# change type alias +$ copy-file changes/A.scala A.scala + +# Both A.scala and B.scala should be recompiled, producing a compile error +-> compile diff --git a/src/main/conscript/sbt/launchconfig b/src/main/conscript/sbt/launchconfig index 788738650..ff85bfccf 100644 --- a/src/main/conscript/sbt/launchconfig +++ b/src/main/conscript/sbt/launchconfig @@ -4,7 +4,7 @@ [app] org: ${sbt.organization-org.scala-sbt} name: sbt - version: ${sbt.version-read(sbt.version)[0.13.2-SNAPSHOT]} + version: ${sbt.version-read(sbt.version)[0.13.5-SNAPSHOT]} class: sbt.xMain components: xsbti,extra cross-versioned: ${sbt.cross.versioned-false} diff --git a/src/main/conscript/scalas/launchconfig b/src/main/conscript/scalas/launchconfig index 75c4138ed..91882ac37 100644 --- a/src/main/conscript/scalas/launchconfig +++ b/src/main/conscript/scalas/launchconfig @@ -4,7 +4,7 @@ [app] org: ${sbt.organization-org.scala-sbt} name: sbt - version: ${sbt.version-read(sbt.version)[0.13.2-SNAPSHOT]} + version: ${sbt.version-read(sbt.version)[0.13.5-SNAPSHOT]} class: sbt.ScriptMain components: xsbti,extra cross-versioned: ${sbt.cross.versioned-false} diff --git a/src/main/conscript/screpl/launchconfig b/src/main/conscript/screpl/launchconfig index 18fbfa911..b4bbf8354 100644 --- a/src/main/conscript/screpl/launchconfig +++ b/src/main/conscript/screpl/launchconfig @@ -4,7 +4,7 @@ [app] org: ${sbt.organization-org.scala-sbt} name: sbt - version: ${sbt.version-read(sbt.version)[0.13.2-SNAPSHOT]} + version: ${sbt.version-read(sbt.version)[0.13.5-SNAPSHOT]} class: sbt.ConsoleMain components: xsbti,extra cross-versioned: ${sbt.cross.versioned-false} diff --git a/src/sphinx/Architecture/Command-Engine.rst b/src/sphinx/Architecture/Command-Engine.rst new file mode 100644 index 000000000..5388dbbae --- /dev/null +++ b/src/sphinx/Architecture/Command-Engine.rst @@ -0,0 +1,5 @@ +================= + Command Engine +================= + +Placeholder for command engine details. \ No newline at end of file diff --git a/src/sphinx/Architecture/Core-Principles.rst b/src/sphinx/Architecture/Core-Principles.rst new file mode 100644 index 000000000..a90a0b73c --- /dev/null +++ b/src/sphinx/Architecture/Core-Principles.rst @@ -0,0 +1,125 @@ +================= + Core Principles +================= + +This document details the core principles overarching sbt's design and code style. Sbt's core principles can +be stated quite simply: + +1. Everything should have a ``Type``, enforced as much as is practical. +2. Dependencies should be **explicit**. +3. Once learned, a concept should hold throughout **all** parts of sbt. +4. Parallel is the default. + +With these principles in mind, let's walk through the core design of sbt. + + +Introduction to build state +=========================== +This is the first piece you hit when starting sbt. Sbt's command engine is the means by which +it processes user requests using the build state. The command engine is essentially a means of applying +**state transformations** on the build state, to execute user requests. + +In sbt, commands are functions that take the current build state (``sbt.State``) and produce the next state. In +other words, they are essentially functions of ``sbt.State => sbt.State``. However, in reality, Commands are +actually string processors which take some string input and act on it, returning the next build state. + +The details of the command engine are covered in :doc:`the command engine section `. + +So, the entirety of sbt is driven off the ``sbt.State`` class. Since this class needs to be resilient in the +face of custom code and plugins, it needs a mechanism to store the state from any potential client. In +dynamic languages, this can be done directly on objects. + +A naive approach in Scala is to use a ``Map``. However, this vioaltes tennant #1: Everythign should have a `Type`. +So, sbt defines a new type of map called an ``AttributeMap``. An ``AttributeMap`` is a key-value storage mechanism where +keys are both strings *and* expected `Type`s for their value. + +Here is what the typesafe ``AttributeKey`` key looks like :: + + sealed trait AttributeKey[T] { + /** The label is the identifier for the key and is camelCase by convention. */ + def label: String + /** The runtime evidence for `T` */ + def manifest: Manifest[T] + } + +These keys store both a `label` (``string``) and some runtime type information (``manifest``). To put or get something on +the AttributeMap, we first need to construct one of these keys. Let's look at the basic definition of the ``AttributeMap`` :: + + trait AttributeMap { + /** Gets the value of type `T` associated with the key `k` or `None` if no value is associated. + * If a key with the same label but a different type is defined, this method will return `None`. */ + def get[T](k: AttributeKey[T]): Option[T] + + + /** Adds the mapping `k -> value` to this map, replacing any existing mapping for `k`. + * Any mappings for keys with the same label but different types are unaffected. */ + def put[T](k: AttributeKey[T], value: T): AttributeMap + } + + +Now that there's a definition of what build state is, there needs to be a way to dynamically construct it. In sbt, this is +done through the ``Setting[_]`` sequence. + +Settings Architecture +===================== + +A Setting represents the means of constructing the value of one particular ``AttributeKey[_]`` in the ``AttributeMap`` of build state. A setting consists of two pieces: + +1. The ``AttributeKey[T]`` where the value of the setting should be assigned. +2. An ``Initialize[T]`` object which is able to construct the value for this setting. + +Sbt's initialization time is basically just taking a sequence of these ``Setting[_]`` objects and running their initialization objects and then storing the value into the ``AttributeMap``. This means overwriting an exisitng value at a key is as easy as appending a +``Setting[_]`` to the end of the sequence which does so. + +Where it gets interesting is that ``Initialize[T]`` can depend on other ``AttributeKey[_]``s in the build state. Each ``Initialize[_]`` +can pull values from any ``AttributeKey[_]`` in the build state's ``AttributeMap`` to compute its value. Sbt ensures a few things +when it comes to ``Initialize[_]`` dependencies: + +1. There can be no circular dependencies +2. If one ``Initialize[_]`` depends on another ``Initialize[_]`` key, then *all* associated ``Initialize[_]`` blocks for that key must + have run before we load the value. + +Let's look at what gets stored for the setting :: + + normalizedName := normalize(name.value) + + + +.. Note: This image comes from a google drawing: https://docs.google.com/a/typesafe.com/drawings/d/1hvE89XVrQiXdSBsgaQgQGTmcO44EBZPg4_0WxKXU7Pw/edit +.. Feel free to request access to modify as appropriate. + +.. image:: overview-setting-example.png + + +Here, a ``Setting[_]`` is constructed that understands it depends on the value in the ``name`` AttributeKey. Its initialize block first grabs the value of the ``name`` key, then runs the function normalize on it to compute its value. + +This represents the core mechanism of how to construct sbt's build state. Conceptually, at some point we have a graph of dependencies +and initialization functions which we can use to construct the first build state. Once this is completed, we can then start to process +user requests. + + + +Task Architecture +================= + +The next layer in sbt is around these user request, or tasks. When a user configures a build, they are defining a set of repeatable +tasks that they can run on their project. Things like ``compile`` or ``test``. These tasks *also* have a dependency graph, where +e.g. the ``test`` task requires that ``compile`` has run before it can successfully execute. + +Sbt's defines a class ``Task[T]``. The ``T`` type parameter represents the type of data returned by a task. Remember the tenets of +sbt? "All things have types" and "Dependencies are explicit" both hold true for tasks. Sbt promotes a style of task dependencies that +is closer to functional programming: Return data for your users rather than using shared mutable state. + +Most build tools communciate over the filesystem, and indeed sbt, by necessity, does some of this. However, for stable parallelization it is far better to keep tasks isolated on the filesystem and communicate directly through types. + +Similarly to how a ``Setting[_]`` stores both dependencies and an initialization function, a ``Task[_]`` stores both its +``Task[_]``dependencies and its behavior (a function). + + + + +TODO - More on ``Task[_]`` + +TODO - Transition into ``InputTask[_]``, rehash Command + +TODO - Tansition into Scope. \ No newline at end of file diff --git a/src/sphinx/Architecture/Setting-Initialization.rst b/src/sphinx/Architecture/Setting-Initialization.rst new file mode 100644 index 000000000..fdf54e33a --- /dev/null +++ b/src/sphinx/Architecture/Setting-Initialization.rst @@ -0,0 +1,131 @@ +====================== +Setting Initialization +====================== + +This page outlines the mechanisms by which sbt loads settings for a particular build, including the hooks where +users can control the ordering of everything. + +As stated elsewhere, sbt constructs its initialization graph and task graph via ``Setting[_]`` objects. A setting +is something which can take the values stored at other Keys in the build state, and generates a new value for +a particular build key. Sbt converts all registered ``Setting[_]`` objects into a giant linear sequence and +*compiles* them into the a task graph. This task graph is then used to execute your build. + +All of sbt's loading semantics are contained within the `Load.scala <../../sxr/sbt/Load.scala.html>` file. It is approximately the following: + +.. Note: This image comes from a google drawing: https://docs.google.com/a/typesafe.com/drawings/d/1Aj_IkOaJpRXJNhrVtVJaS8m-YRcKsympVOj3M2sUz7E/edit +.. Feel free to request access to modify as appropriate. + +.. image:: settings-initialization-load-ordering.png + +The blue circles represent actions happening when sbt loads a project. We can see that sbt performs the following actions in load: + +1. Compile the user-level project (``~/.sbt//``) + a. Load any plugins defined by this project (``~/.sbt//plugins/*.sbt`` and ``~/.sbt//plugins/project/*.scala``) + b. Load all settings defined (``~/.sbt//*.sbt`` and ``~/.sbt//plugins/*.scala``) +2. Compile the current project (``/*.sbt``) +4. All local configurations (``build.sbt``) + + + +Controlling Initialization +========================== + +The order which sbt uses to load settings is configurable at a *project* level. This means that we can't control +the order of settings added to Build/Global namespace, but we can control how each project loads, e.g. plugins and ``.sbt`` files. +To do so, use the ``AddSettings`` class :: + + + import sbt._ + import Keys._ + + import AddSettings._ + + object MyOwnOrder extends Build { + // here we load config from a txt file. + lazy val root = project.in(file(".")).settingSets( autoPlugins, buildScalaFiles, sbtFiles(file("silly.txt")) ) + } + +In the above project, we've modified the order of settings to be: + +1. All AutoPlugin settings. +2. All settings defined in the ``project/Build.scala`` file (shown above). +3. All settings found in the ``silly.txt`` file. + +What we've excluded: + +* All settings from the user directory (``~/.sbt/``) +* All ``*.sbt`` settings. + +The AddSettings object provides the following "groups" of settings you can use for ordering: + +``autoPlugins`` + All the ordered settings of plugins after they've gone through dependency resolution +``buildScalaFiles`` + The full sequence of settings defined directly in ``project/*.scala`` builds. +``sbtFiles(*)`` + Specifies the exact setting DSL files to include (files must use the ``.sbt`` file format) +``userSettings`` + All the settings defined in the user directory ``~/.sbt//``. +``defaultSbtFiles`` + Include all local ``*.sbt`` file settings. + + +*Note: Be very careful when reordering settings. It's easy to accidentally remove core functionality.* + +For example, let's see what happens if we move the ``build.sbt`` files *before* the ``buildScalaFile``. + +Let's create an example project the following defintiion: + +`project/build.scala` :: + + object MyTestBuild extends Build { + + val testProject = project.in(file(".")).settingSets(autoPlugins, defaultSbtFiles, buildScalaFile).settings( + version := scalaBinaryVersion.value match { + case "2.10" => "1.0-SNAPSHOT" + case v => "1.0-for-${v}-SNAPSHOT" + } + ) + } + +This build defines a version string which appends the scala version if the current scala version is not the in the ``2.10.x`` series. +Now, when issuing a release we want to lock down the version. Most tools assume this can happen by writing a ``version.sbt`` file: + +`version.sbt` :: + + version := "1.0.0" + +However, when we load this new build, we find that the ``version`` in ``version.sbt`` has been **overriden** by the one defined +in ``project/Build.scala`` because of the order we defined for settings, so the new ``version.sbt`` file has no effect. diff --git a/src/sphinx/Architecture/Task-Engine.rst b/src/sphinx/Architecture/Task-Engine.rst new file mode 100644 index 000000000..9d7be5da0 --- /dev/null +++ b/src/sphinx/Architecture/Task-Engine.rst @@ -0,0 +1,5 @@ +================= + Task Engine +================= + +Placeholder for task engine design details. \ No newline at end of file diff --git a/src/sphinx/Architecture/index.rst b/src/sphinx/Architecture/index.rst new file mode 100644 index 000000000..ea16cae7a --- /dev/null +++ b/src/sphinx/Architecture/index.rst @@ -0,0 +1,16 @@ +============== + Architecture +============== + +This is the set of documentation about the Architecture of sbt. This covers all the core components of +sbt as well as the general notion of how they all work together. This documentation is suitable for those who wish to +have a deeper understanding of sbt's core, but already understand the fundamentals of ``Setting[_]``, ``Task[_]`` and +constructing builds. + +.. toctree:: + :maxdepth: 2 + + Core-Principles + Setting-Initialization + Task-Engine + Command-Engine diff --git a/src/sphinx/Architecture/overview-setting-example.png b/src/sphinx/Architecture/overview-setting-example.png new file mode 100644 index 000000000..f5bbcb209 Binary files /dev/null and b/src/sphinx/Architecture/overview-setting-example.png differ diff --git a/src/sphinx/Architecture/settings-initialization-load-ordering.png b/src/sphinx/Architecture/settings-initialization-load-ordering.png new file mode 100644 index 000000000..82055d7d7 Binary files /dev/null and b/src/sphinx/Architecture/settings-initialization-load-ordering.png differ diff --git a/src/sphinx/Community/Bintray-For-Plugins.rst b/src/sphinx/Community/Bintray-For-Plugins.rst index 35efa331a..ca274a9bc 100644 --- a/src/sphinx/Community/Bintray-For-Plugins.rst +++ b/src/sphinx/Community/Bintray-For-Plugins.rst @@ -88,6 +88,9 @@ Make sure your project has a valid license specified, as well as unique name and Make a release ============== + +*Note: bintray does not support snapshots. We recommend using `git-revisions supplied by the sbt-git plugin `_. + Once your build is configured, open the sbt console in your build and run: .. code-block:: console diff --git a/src/sphinx/Community/Changes.rst b/src/sphinx/Community/Changes.rst index 94b42d084..600a28f68 100644 --- a/src/sphinx/Community/Changes.rst +++ b/src/sphinx/Community/Changes.rst @@ -2,6 +2,35 @@ Changes ======= +0.13.2 to 0.13.5 +~~~~~~~~~~~~~~~~ +- The Scala version for sbt and sbt plugins is now 2.10.4. This is a compatible version bump. + +0.13.1 to 0.13.2 +~~~~~~~~~~~~~~~~ +- Adding new name-hashing feature to incremental compiler. Alters how scala dependencies are tracked, reducing number of recompiles necessary. +- Added the ability to launch servers via the sbt-launcher. +- Added ``.previous`` feature on tasks which can load the pervious value. +- Added an ``all`` command which can run more than tasks in parallel. +- Exposed the 'overwrite' flags from ivy. Added warning if overwriting a release version. +- Improve the error message when credentials are not found in Ivy. +- Improve task macros to handle more scala constructs. +- Fix ``last`` and ``export`` tasks to read from the correct stream. +- Fix issue where ivy's ``.+`` dependency ranges were not correctly translated to maven. +- Override security manager to ignore file permissions (performance issue) +- 2.11 compatibility fixes +- Launcher can now handle ivy's ``.+`` revisions. +- SessionSettings now correctly overwrite existing settings. +- Adding a simple Logic system for inclusionary/dependency logic of plugins. +- Improve build hooks for ``LoggerReporter`` and ``TaskProgress``. +- Serialize incremental compiler analysis into text-file format. +- Issue a warning when generating Paths and separate already exists in the path. +- Migrate to Ivy 2.3.0-final. +- Docs: Use bintray as default repository host +- Docs: improved docs on test groups. +- Docs: updated documentation on the Launcher. +- Docs: started architecture document. + 0.13.0 to 0.13.1 ~~~~~~~~~~~~~~~~ diff --git a/src/sphinx/Community/Community-Plugins.rst b/src/sphinx/Community/Community-Plugins.rst index e7b0b6e5c..4c40ce10a 100644 --- a/src/sphinx/Community/Community-Plugins.rst +++ b/src/sphinx/Community/Community-Plugins.rst @@ -4,9 +4,9 @@ Community Plugins sbt Organization ================ - -The `sbt organization `_ is available for use by any sbt plugin. -Developers who contribute their plugins into the community organization will still retain + +The `sbt organization `_ is available for use by any sbt plugin. +Developers who contribute their plugins into the community organization will still retain control over their repository and its access. The goal of the sbt organization is to organize sbt software into one central location. @@ -16,47 +16,15 @@ Community Ivy Repository ======================== `Typesafe `_ has provided a freely available `Ivy Repository `_ for sbt projects to use. -If you would like to publish your project to this Ivy repository, first contact `sbt-repo-admins `_ and request privileges (we have to verify code ownership, rights to publish, etc.). After which, you can deploy your plugins using the following configuration: - -:: - - publishTo := Some(Resolver.url("sbt-plugin-releases", new URL("http://repo.scala-sbt.org/scalasbt/sbt-plugin-releases/"))(Resolver.ivyStylePatterns)) - - publishMavenStyle := false - -You'll also need to add your credentials somewhere. For example, you might use a `~/.sbt/pluginpublish.sbt` file: - -:: - - credentials += Credentials("Artifactory Realm", - "repo.scala-sbt.org", "@user name@", "@my encrypted password@") - -Where `@my encrypted password@` is actually obtained using the following `instructions `_. - -*Note: Your code must abide by the* `repository polices `_. - -To automatically deploy snapshot/release versions of your plugin use the following configuration: - -:: - - publishTo := { - val scalasbt = "http://repo.scala-sbt.org/scalasbt/" - val (name, url) = if (version.value.contains("-SNAPSHOT")) - ("sbt-plugin-snapshots", scalasbt+"sbt-plugin-snapshots") - else - ("sbt-plugin-releases", scalasbt+"sbt-plugin-releases") - Some(Resolver.url(name, new URL(url))(Resolver.ivyStylePatterns)) - } - -*Note: ivy repositories currently don't support Maven-style snapshots.* +This ivy repository is mirrored from the freely available `Bintray service `_. If you'd like to submit your plugin, please follow these instructions: `Bintray For Plugins `_. Available Plugins ================= Please feel free to `submit a pull request `_ that adds your plugin to the list. -Plugins for IDEs: -~~~~~~~~~~~~~~~~~ +Plugins for IDEs +~~~~~~~~~~~~~~~~ - IntelliJ IDEA - sbt Plugin to generate IDEA project configuration: @@ -68,6 +36,8 @@ Plugins for IDEs: - Sublime Text: https://github.com/orrsella/sbt-sublime - Ensime: https://github.com/aemoncannon/ensime-sbt-cmd - sbt-mode for Emacs: https://github.com/hvesalai/sbt-mode +- sbt-ctags (manage library dependency sources for vim, emacs, sublime) + https://github.com/kalmanb/sbt-ctags Web Plugins ~~~~~~~~~~~ @@ -158,6 +128,11 @@ Release plugins https://github.com/sbt/sbt-start-script - sbt-native-packager: https://github.com/sbt/sbt-native-packager +- sbt-sonatype-plugin (releases to Sonatype Nexus repository) + https://github.com/xerial/sbt-sonatype +- xitrum-package (collects dependency .jar files for standalone Scala programs): + https://github.com/ngocdaothanh/xitrum-package + System plugins ~~~~~~~~~~~~~~ @@ -228,6 +203,8 @@ Documentation plugins Textile, to HTML): http://software.clapper.org/sbt-lwm/ - sbt-site (Site generation for SBT): https://github.com/sbt/sbt-site +- Laika (Template-based site generation, Markdown, reStructuredText, + no external tools): http://planet42.github.io/Laika/ - literator-plugin (Converts sources into markdown documents): https://github.com/laughedelic/literator @@ -278,6 +255,7 @@ Code coverage plugins ~~~~~~~~~~~~~~~~~~~~~ - sbt-scct: https://github.com/dvc94ch/sbt-scct +- sbt-scoverage: https://github.com/scoverage/sbt-scoverage - jacoco4sbt: https://github.com/sbt/jacoco4sbt - xsbt-coveralls-plugin: https://github.com/theon/xsbt-coveralls-plugin @@ -300,5 +278,5 @@ OSGi plugin Plugin bundles ~~~~~~~~~~~~~~ -- tl-os-sbt-plugins (Version, Release, and Package Management, Play 2.0 and Git utilities) : +- tl-os-sbt-plugins (Version, Release, and Package Management, Play 2.0 and Git utilities) : https://github.com/trafficland/tl-os-sbt-plugins diff --git a/src/sphinx/Community/Using-Sonatype.rst b/src/sphinx/Community/Using-Sonatype.rst index 77560258d..62b705ba2 100644 --- a/src/sphinx/Community/Using-Sonatype.rst +++ b/src/sphinx/Community/Using-Sonatype.rst @@ -1,3 +1,6 @@ + + + ======================= Deploying to Sonatype ======================= @@ -15,6 +18,19 @@ Follow the instructions for the plugin and you'll have PGP signed artifacts in n artifacts. It can work with the GPG command line tool, but the command line is not needed.* +If your PGP key has not yet been distributed to the keyserver pool, i.e., +you've just generated it, you'll need to publish it. You can do so using +the `sbt-pgp `_ plugin: + +:: + + pgp-cmd send-key keyname hkp://pool.sks-keyservers.net/ + +(where keyname is the name, email address used when creating the key or +hexadecimal identifier for the key.) + +If you see no output from sbt-pgp then the key name specified was not found. + Second - Maven Publishing Settings ---------------------------------- @@ -161,7 +177,7 @@ In sbt, run `publishSigned` and you should see something like the following: After publishing you have to follow the `Release workflow of nexus `_. -In the future, we hope to provide a Nexus sbt plugin that allows the +`sbt-sonatype plugin `_ allows the release workflow procedures to be performed directly from sbt. *Note: Staged releases allow testing across large projects of diff --git a/src/sphinx/Detailed-Topics/Advanced-Index.rst b/src/sphinx/Detailed-Topics/Advanced-Index.rst index 28928ed7e..95d00810e 100644 --- a/src/sphinx/Detailed-Topics/Advanced-Index.rst +++ b/src/sphinx/Detailed-Topics/Advanced-Index.rst @@ -9,6 +9,7 @@ Before reading anything in here, you will need the information in the .. toctree:: :maxdepth: 2 + Launcher Scripts TaskInputs diff --git a/src/sphinx/Detailed-Topics/Forking.rst b/src/sphinx/Detailed-Topics/Forking.rst index 0eb910ee3..4dd424f90 100644 --- a/src/sphinx/Detailed-Topics/Forking.rst +++ b/src/sphinx/Detailed-Topics/Forking.rst @@ -104,7 +104,7 @@ directory: :: - javaHome := file("/path/to/jre/") + javaHome := Some(file("/path/to/jre/")) Note that if this is set globally, it also sets the Java installation used to compile Java sources. You can restrict it to running only by @@ -112,7 +112,7 @@ setting it in the :key:`run` scope: :: - javaHome in run := file("/path/to/jre/") + javaHome in run := Some(file("/path/to/jre/")) As with the other settings, you can specify the configuration to affect only the main or test :key:`run` tasks or just the :key:`test` tasks. diff --git a/src/sphinx/Detailed-Topics/Launcher.rst b/src/sphinx/Detailed-Topics/Launcher.rst index 6573f2348..3f3a78836 100644 --- a/src/sphinx/Detailed-Topics/Launcher.rst +++ b/src/sphinx/Detailed-Topics/Launcher.rst @@ -1,387 +1,5 @@ -====================== -Launcher Specification -====================== +============ +Sbt Launcher +============ -The sbt launcher component is a self-contained jar that boots a Scala -application without Scala or the application already existing on the -system. The only prerequisites are the launcher jar itself, an optional -configuration file, and a java runtime version 1.6 or greater. - -Overview -======== - -A user downloads the launcher jar and creates a script to run it. In -this documentation, the script will be assumed to be called `launch`. -For unix, the script would look like: -`java -jar sbt-launcher.jar "$@"` - -The user then downloads the configuration file for the application (call -it `my.app.configuration`) and creates a script to launch it (call it -`myapp`): `launch @my.app.configuration "$@"` - -The user can then launch the application using `myapp arg1 arg2 ...` - -Like the launcher used to distribute `sbt`, the downloaded launcher -jar will retrieve Scala and the application according to the provided -configuration file. The versions may be fixed or read from a different -configuration file (the location of which is also configurable). The -location to which the Scala and application jars are downloaded is -configurable as well. The repositories searched are configurable. -Optional initialization of a properties file on launch is configurable. - -Once the launcher has downloaded the necessary jars, it loads the -application and calls its entry point. The application is passed -information about how it was called: command line arguments, current -working directory, Scala version, and application ID (organization, -name, version). In addition, the application can ask the launcher to -perform operations such as obtaining the Scala jars and a -`ClassLoader` for any version of Scala retrievable from the -repositories specified in the configuration file. It can request that -other applications be downloaded and run. When the application -completes, it can tell the launcher to exit with a specific exit code or -to reload the application with a different version of Scala, a different -version of the application, or different arguments. - -There are some other options for setup, such as putting the -configuration file inside the launcher jar and distributing that as a -single download. The rest of this documentation describes the details of -configuring, writing, distributing, and running the application. - -Configuration -------------- - -The launcher may be configured in one of the following ways in -increasing order of precedence: - -- Replace the `/sbt/sbt.boot.properties` file in the jar -- Put a configuration file named `sbt.boot.properties` on the - classpath. Put it in the classpath root without the `/sbt` prefix. -- Specify the location of an alternate configuration on the command - line, either as a path or an absolute URI. This can be done by - either specifying the location as the system property - `sbt.boot.properties` or as the first argument to the launcher - prefixed by `'@'`. The system property has lower precedence. - Resolution of a relative path is first attempted against the current - working directory, then against the user's home directory, and then - against the directory containing the launcher jar. An error is - generated if none of these attempts succeed. - -Syntax -~~~~~~ - -The configuration file is line-based, read as UTF-8 encoded, and defined -by the following grammar. `'nl'` is a newline or end of file and -`'text'` is plain text without newlines or the surrounding delimiters -(such as parentheses or square brackets): - -.. productionlist:: - configuration: `scala` `app` `repositories` `boot` `log` `appProperties` - scala: "[" "scala" "]" `nl` `version` `nl` `classifiers` `nl` - app: "[" "app" "]" `nl` `org` `nl` `name` `nl` `version` `nl` `components` `nl` `class` `nl` `crossVersioned` `nl` `resources` `nl` `classifiers` `nl` - repositories: "[" "repositories" "]" `nl` (`repository` `nl`)* - boot: "[" "boot" "]" `nl` `directory` `nl` `bootProperties` `nl` `search` `nl` `promptCreate` `nl` `promptFill` `nl` `quickOption` `nl` - log: "["' "log" "]" `nl` `logLevel` `nl` - appProperties: "[" "app-properties" "]" nl (property nl)* - ivy: "[" "ivy" "]" `nl` `homeDirectory` `nl` `checksums` `nl` `overrideRepos` `nl` `repoConfig` `nl` - directory: "directory" ":" `path` - bootProperties: "properties" ":" `path` - search: "search" ":" ("none" | "nearest" | "root-first" | "only" ) ("," `path`)* - logLevel: "level" ":" ("debug" | "info" | "warn" | "error") - promptCreate: "prompt-create" ":" `label` - promptFill: "prompt-fill" ":" `boolean` - quickOption: "quick-option" ":" `boolean` - version: "version" ":" `versionSpecification` - versionSpecification: `readProperty` | `fixedVersion` - readProperty: "read" "(" `propertyName` ")" "[" `default` "]" - fixedVersion: text - classifiers: "classifiers" ":" text ("," text)* - homeDirectory: "ivy-home" ":" `path` - checksums: "checksums" ":" `checksum` ("," `checksum`)* - overrideRepos: "override-build-repos" ":" `boolean` - repoConfig: "repository-config" ":" `path` - org: "org" ":" text - name: "name" ":" text - class: "class" ":" text - components: "components" ":" `component` ("," `component`)* - crossVersioned: "cross-versioned" ":" ("true" | "false" | "none" | "binary" | "full") - resources: "resources" ":" `path` ("," `path`)* - repository: ( `predefinedRepository` | `customRepository` ) `nl` - predefinedRepository: "local" | "maven-local" | "maven-central" - customRepository: `label` ":" `url` [ ["," `ivyPattern`] ["," `artifactPattern`] [", mavenCompatible"] [", bootOnly"]] - property: `label` ":" `propertyDefinition` ("," `propertyDefinition`)* - propertyDefinition: `mode` "=" (`set` | `prompt`) - mode: "quick" | "new" | "fill" - set: "set" "(" value ")" - prompt: "prompt" "(" `label` ")" ("[" `default` "]")? - boolean: "true" | "false" - nl: "\r\n" | "\n" | "\r" - path: text - propertyName: text - label: text - default: text - checksum: text - ivyPattern: text - artifactPattern: text - url: text - component: text - -In addition to the grammar specified here, property values may include -variable substitutions. A variable substitution has one of these forms: - -- `${variable.name}` -- `${variable.name-default}` - -where `variable.name` is the name of a system property. If a system -property by that name exists, the value is substituted. If it does not -exists and a default is specified, the default is substituted after -recursively substituting variables in it. If the system property does -not exist and no default is specified, the original string is not -substituted. - -Example -~~~~~~~ - -The default configuration file for sbt looks like: - -.. parsed-literal:: - - [scala] - version: ${sbt.scala.version-auto} - - [app] - org: ${sbt.organization-org.scala-sbt} - name: sbt - version: ${sbt.version-read(sbt.version)[\ |release|\ ]} - class: ${sbt.main.class-sbt.xMain} - components: xsbti,extra - cross-versioned: ${sbt.cross.versioned-false} - - [repositories] - local - typesafe-ivy-releases: http://repo.typesafe.com/typesafe/ivy-releases/, [organization]/[module]/[revision]/[type]s/[artifact](-[classifier]).[ext], bootOnly - maven-central - sonatype-snapshots: https://oss.sonatype.org/content/repositories/snapshots - - [boot] - directory: ${sbt.boot.directory-${sbt.global.base-${user.home}/.sbt}/boot/} - - [ivy] - ivy-home: ${sbt.ivy.home-${user.home}/.ivy2/} - checksums: ${sbt.checksums-sha1,md5} - override-build-repos: ${sbt.override.build.repos-false} - repository-config: ${sbt.repository.config-${sbt.global.base-${user.home}/.sbt}/repositories} - -Semantics -~~~~~~~~~ - -The `scala.version` property specifies the version of Scala used to -run the application. If the application is not cross-built, this may be -set to `auto` and it will be auto-detected from the application's -dependencies. If specified, the `scala.classifiers` property defines -classifiers, such as 'sources', of extra Scala artifacts to retrieve. - -The `app.org`, `app.name`, and `app.version` properties specify -the organization, module ID, and version of the application, -respectively. These are used to resolve and retrieve the application -from the repositories listed in `[repositories]`. If -`app.cross-versioned` is `binary`, the resolved module ID is -`{app.name+'_'+CrossVersion.binaryScalaVersion(scala.version)}`. -If `app.cross-versioned` is `true` or `full`, the resolved module ID is -`{app.name+'_'+scala.version}`. The `scala.version` property must be -specified and cannot be `auto` when cross-versioned. The paths given -in `app.resources` are added to the application's classpath. If the -path is relative, it is resolved against the application's working -directory. If specified, the `app.classifiers` property defines -classifiers, like 'sources', of extra artifacts to retrieve for the -application. - -Jars are retrieved to the directory given by `boot.directory`. By -default, this is an absolute path that is shared by all launched -instances on the machine. If multiple versions access it simultaneously. -, you might see messages like: - -.. code-block:: console - - Waiting for lock on to be available... - -This boot directory may be relative to the current directory instead. In -this case, the launched application will have a separate boot directory -for each directory it is launched in. - -The `boot.properties` property specifies the location of the -properties file to use if `app.version` or `scala.version` is -specified as `read`. The `prompt-create`, `prompt-fill`, and -`quick-option` properties together with the property definitions in -`[app.properties]` can be used to initialize the `boot.properties` -file. - -The app.class property specifies the name of the entry point to the -application. An application entry point must be a public class with a -no-argument constructor that implements `xsbti.AppMain`. The -`AppMain` interface specifies the entry method signature 'run'. The -run method is passed an instance of AppConfiguration, which provides -access to the startup environment. `AppConfiguration` also provides an -interface to retrieve other versions of Scala or other applications. -Finally, the return type of the run method is `xsbti.MainResult`, -which has two subtypes: `xsbti.Reboot` and `xsbti.Exit`. To exit -with a specific code, return an instance of `xsbti.Exit` with the -requested code. To restart the application, return an instance of -Reboot. You can change some aspects of the configuration with a reboot, -such as the version of Scala, the application ID, and the arguments. - -The `ivy.cache-directory` property provides an alternative location -for the Ivy cache used by the launcher. This does not automatically set -the Ivy cache for the application, but the application is provided this -location through the AppConfiguration instance. The `checksums` -property selects the checksum algorithms (sha1 or md5) that are used to -verify artifacts downloaded by the launcher. `override-build-repos` is -a flag that can inform the application that the repositories configured -for the launcher should be used in the application. If -`repository-config` is defined, the file it specifies should contain a -`[repositories]` section that is used in place of the section in the -original configuration file. - -Execution ---------- - -On startup, the launcher searches for its configuration in the order -described in the Configuration section and then parses it. If either the -Scala version or the application version are specified as 'read', the -launcher determines them in the following manner. The file given by the -'boot.properties' property is read as a Java properties file to obtain -the version. The expected property names are `${app.name}.version` for -the application version (where `${app.name}` is replaced with the -value of the `app.name` property from the boot configuration file) and -`scala.version` for the Scala version. If the properties file does not -exist, the default value provided is used. If no default was provided, -an error is generated. - -Once the final configuration is resolved, the launcher proceeds to -obtain the necessary jars to launch the application. The -`boot.directory` property is used as a base directory to retrieve jars -to. Locking is done on the directory, so it can be shared system-wide. -The launcher retrieves the requested version of Scala to - -.. code-block:: console - - ${boot.directory}/${scala.version}/lib/ - -If this directory already exists, the launcher takes a shortcut for -startup performance and assumes that the jars have already been -downloaded. If the directory does not exist, the launcher uses Apache -Ivy to resolve and retrieve the jars. A similar process occurs for the -application itself. It and its dependencies are retrieved to - -.. code-block:: console - - ${boot.directory}/${scala.version}/${app.org}/${app.name}/. - -Once all required code is downloaded, the class loaders are set up. The -launcher creates a class loader for the requested version of Scala. It -then creates a child class loader containing the jars for the requested -'app.components' and with the paths specified in `app.resources`. An -application that does not use components will have all of its jars in -this class loader. - -The main class for the application is then instantiated. It must be a -public class with a public no-argument constructor and must conform to -xsbti.AppMain. The `run` method is invoked and execution passes to the -application. The argument to the 'run' method provides configuration -information and a callback to obtain a class loader for any version of -Scala that can be obtained from a repository in [repositories]. The -return value of the run method determines what is done after the -application executes. It can specify that the launcher should restart -the application or that it should exit with the provided exit code. - -Creating a Launched Application -------------------------------- - -This section shows how to make an application that is launched by this -launcher. First, declare a dependency on the launcher-interface. Do not -declare a dependency on the launcher itself. The launcher interface -consists strictly of Java interfaces in order to avoid binary -incompatibility between the version of Scala used to compile the -launcher and the version used to compile your application. The launcher -interface class will be provided by the launcher, so it is only a -compile-time dependency. If you are building with sbt, your dependency -definition would be: - -.. parsed-literal:: - - libraryDependencies += "org.scala-sbt" % "launcher-interface" % "|release|" % "provided" - - resolvers += sbtResolver.value - -Make the entry point to your class implement 'xsbti.AppMain'. An example -that uses some of the information: - -.. code-block:: scala - - package xsbt.test - class Main extends xsbti.AppMain - { - def run(configuration: xsbti.AppConfiguration) = - { - // get the version of Scala used to launch the application - val scalaVersion = configuration.provider.scalaProvider.version - - // Print a message and the arguments to the application - println("Hello world! Running Scala " + scalaVersion) - configuration.arguments.foreach(println) - - // demonstrate the ability to reboot the application into different versions of Scala - // and how to return the code to exit with - scalaVersion match - { - case "2.9.3" => - new xsbti.Reboot { - def arguments = configuration.arguments - def baseDirectory = configuration.baseDirectory - def scalaVersion = "2.10.2 - def app = configuration.provider.id - } - case "2.10.2" => new Exit(1) - case _ => new Exit(0) - } - } - class Exit(val code: Int) extends xsbti.Exit - } - -Next, define a configuration file for the launcher. For the above class, -it might look like: - -.. parsed-literal:: - - [scala] - version: |scalaRelease| - [app] - org: org.scala-sbt - name: xsbt-test - version: |release| - class: xsbt.test.Main - cross-versioned: binary - [repositories] - local - maven-central - [boot] - directory: ${user.home}/.myapp/boot - -Then, `publishLocal` or `+publishLocal` the application to make it -available. - -Running an Application ----------------------- - -As mentioned above, there are a few options to actually run the -application. The first involves providing a modified jar for download. -The second two require providing a configuration file for download. - -- Replace the /sbt/sbt.boot.properties file in the launcher jar and - distribute the modified jar. The user would need a script to run - `java -jar your-launcher.jar arg1 arg2 ...`. -- The user downloads the launcher jar and you provide the configuration - file. - - - The user needs to run `java -Dsbt.boot.properties=your.boot.properties -jar launcher.jar`. - - The user already has a script to run the launcher (call it - 'launch'). The user needs to run `launch @your.boot.properties your-arg-1 your-arg-2` +This documentation has been moved to :doc:`The Launcher section `. \ No newline at end of file diff --git a/src/sphinx/Detailed-Topics/Testing.rst b/src/sphinx/Detailed-Topics/Testing.rst index 120c1b740..c3ff52fa3 100644 --- a/src/sphinx/Detailed-Topics/Testing.rst +++ b/src/sphinx/Detailed-Topics/Testing.rst @@ -191,7 +191,7 @@ The setting: specifies that all tests will be executed in a single external JVM. See :doc:`Forking` for configuring standard options for forking. More control over how tests are assigned to JVMs and what options to pass to those is -available with :key:`testGrouping` key. For example: +available with :key:`testGrouping` key. For example in build.sbt: :: @@ -201,8 +201,9 @@ available with :key:`testGrouping` key. For example: def groupByFirst(tests: Seq[TestDefinition]) = tests groupBy (_.name(0)) map { case (letter, tests) => new Group(letter.toString, tests, SubProcess(Seq("-Dfirst.letter"+letter))) - } toSeq; - testGrouping := groupByFirst( (definedTests in Test).value ) + } toSeq + + testGrouping in Test <<= groupByFirst( (definedTests in Test).value ) } The tests in a single group are run sequentially. Control the number diff --git a/src/sphinx/Detailed-Topics/index.rst b/src/sphinx/Detailed-Topics/index.rst index f7bebe4fc..498ef5b65 100644 --- a/src/sphinx/Detailed-Topics/index.rst +++ b/src/sphinx/Detailed-Topics/index.rst @@ -19,3 +19,5 @@ Other resources include the :doc:`Examples ` and Tasks-and-Commands Plugins-and-Best-Practices Advanced-Index + /Architecture/index + /Launcher/index diff --git a/src/sphinx/Examples/Full-Configuration-Example.rst b/src/sphinx/Examples/Full-Configuration-Example.rst index c84bb5a49..132dad7e0 100644 --- a/src/sphinx/Examples/Full-Configuration-Example.rst +++ b/src/sphinx/Examples/Full-Configuration-Example.rst @@ -16,7 +16,7 @@ into multiple files. val buildVersion = "2.0.29" val buildScalaVersion = "2.9.0-1" - val buildSettings = Defaults.defaultSettings ++ Seq ( + val buildSettings = Seq ( organization := buildOrganization, version := buildVersion, scalaVersion := buildScalaVersion, diff --git a/src/sphinx/Extending/Plugins-Best-Practices.rst b/src/sphinx/Extending/Plugins-Best-Practices.rst index 4084f5ba1..c7adb5655 100644 --- a/src/sphinx/Extending/Plugins-Best-Practices.rst +++ b/src/sphinx/Extending/Plugins-Best-Practices.rst @@ -16,20 +16,18 @@ Specifically: Here are some current plugin best practices. **NOTE:** Best practices are evolving, so check back frequently. -Avoid overriding `settings` ------------------------------ +Don't use default package +--------------------------- -sbt will automatically load your plugin's `settings` into the build. -Overriding `val settings` should only be done by plugins intending to -provide commands. Regular plugins defining tasks and settings should -provide a sequence named after the plugin like so: +Users who have their build files in some package will not be able to +use your plugin if it's defined in default (no-name) package. -:: +Avoid older `sbt.Plugin` mechanism +---------------------------------- - val obfuscateSettings = Seq(...) - -This allows build user to choose which subproject the plugin would be -used. See later section for how the settings should be scoped. +sbt has deprecated the old `sbt.Plugin` mechanism in favor of `sbt.AutoPlugin`. +The new mechanism features a set of user-level controls and dependency declarations +that cleans up a lot of long-standing issues with plugins. Reuse existing keys ------------------- diff --git a/src/sphinx/Extending/Plugins.rst b/src/sphinx/Extending/Plugins.rst index 18cae5ee6..4b155b254 100644 --- a/src/sphinx/Extending/Plugins.rst +++ b/src/sphinx/Extending/Plugins.rst @@ -176,6 +176,10 @@ It is recommended to explicitly specify the commit or tag by appending it to the lazy val assemblyPlugin = uri("git://github.com/sbt/sbt-assembly#0.9.1") +One caveat to using this method is that the local sbt will try to run the remote plugin's build. It +is quite possible that the plugin's own build uses a different sbt version, as many plugins cross-publish for +several sbt versions. As such, it is recommended to stick with binary artifacts when possible. + 2) Use the library ~~~~~~~~~~~~~~~~~~ @@ -221,23 +225,25 @@ To make a plugin, create a project and configure `sbtPlugin` to `true`. Then, write the plugin code and publish your project to a repository. The plugin can be used as described in the previous section. -A plugin can implement `sbt.Plugin`. The contents of a Plugin -singleton, declared like `object MyPlugin extends Plugin`, are -wildcard imported in `set`, `eval`, and `.sbt` files. Typically, +* Automatically importing selective names to `.sbt` files. +* Specifying plugin dependencies. +* Automatically activating itself when all dependencies are present. +* Specifying `projectSettings`, `buildSettings`, and `globalSettings` as appropriate. + +When an AutoPlugin provides a stable field such as `val` or `object` named `autoImport`, +the contents of the field are wildcard imported in in `set`, `eval`, and `.sbt` files. Typically, this is used to provide new keys (SettingKey, TaskKey, or InputKey) or core methods without requiring an import or qualification. -In addition, a `Plugin` can implement `projectSettings`, `buildSettings`, and `globalSettings` as appropriate. -The Plugin's `projectSettings` is automatically appended to each project's settings. +The AutoPlugin's `projectSettings` is automatically appended to each project's settings, when its dependencies also exist on that project. +The `requires` method defines the dependencies to other plugins. +The `trigger` method defines the conditions by which this plugin's settings are automatically activated. The `buildSettings` is appended to each build's settings (that is, `in ThisBuild`). The `globalSettings` is appended once to the global settings (`in Global`). These allow a plugin to automatically provide new functionality or new defaults. One main use of this feature is to globally add commands, such as for IDE plugins. Use `globalSettings` to define the default value of a setting. -These automatic features should be used judiciously because the automatic activation generally reduces control for the build author (the user of the plugin). -Some control is returned to them via `Project.autoSettings`, which changes how automatically added settings are added and in what order. - Example Plugin -------------- @@ -249,31 +255,51 @@ An example of a typical plugin: sbtPlugin := true - name := "example-plugin" + name := "sbt-obfuscate" organization := "org.example" -`MyPlugin.scala`: +`Plugin.scala`: :: + package sbtobfuscate + import sbt._ - object MyPlugin extends Plugin + + object Plugin extends AutoPlugin { - // configuration points, like the built in `version`, `libraryDependencies`, or `compile` - // by implementing Plugin, these are automatically imported in a user's `build.sbt` - val newTask = taskKey[Unit]("A new task.") - val newSetting = settingKey[String]("A new setting.") + // by definging autoImport, these are automatically imported into user's `*.sbt` + object autoImport + { + // configuration points, like the built in `version`, `libraryDependencies`, or `compile` + val obfuscate = taskKey[Seq[File]]("Obfuscates files.") + val obfuscateLiterals = settingKey[Boolean]("Obfuscate literals.") - // a group of settings ready to be added to a Project - // to automatically add them, do - val newSettings = Seq( - newSetting := "test", - newTask := println(newSetting.value) - ) + // default values for the tasks and settings + lazy val baseObfuscateSettings: Seq[sbt.Def.Setting[_]] = Seq( + obfuscate := { + Obfuscate(sources.value, (obfuscateLiterals in obfuscate).value) + }, + obfuscateLiterals in obfuscate := false + ) + } - // alternatively, by overriding `settings`, they could be automatically added to a Project - // override val settings = Seq(...) + import autoImport._ + override def requires = sbt.plugins.JvmModule + + // This plugin is automatically enabled for projects which are JvmModules. + override def trigger = allRequirements + + // a group of settings that are automatically added to projects. + override val projectSettings = + inConfig(Compile)(baseObfucscateSettings) ++ + inConfig(Test)(baseObfuscateSettings) + } + + object Obfuscate + { + def apply(sources: Seq[File]): Seq[File] := sources } Usage example @@ -281,15 +307,20 @@ Usage example A build definition that uses the plugin might look like: -`build.sbt` +`obfuscate.sbt` :: - MyPlugin.newSettings + obfuscateLiterals in obfuscate := true - newSetting := "example" -Example command plugin +Root Plugins +------------ + +Some plugins should always be explicitly enabled on projects. Sbt calls these root plugins, i.e. plugins +that are "root" nodes in the plugin depdendency graph. `AutoPlugin` by default defines a root plugin. + +Example command root plugin ---------------------- A basic plugin that adds commands looks like: @@ -300,19 +331,21 @@ A basic plugin that adds commands looks like: sbtPlugin := true - name := "example-plugin" + name := "sbt-sample" organization := "org.example" -`MyPlugin.scala` +`Plugin.scala` :: + package sbtsample + import sbt._ import Keys._ - object MyPlugin extends Plugin + object Plugin extends AutoPlugin { - override lazy val settings = Seq(commands += myCommand) + override lazy val projectSettings = Seq(commands += myCommand) lazy val myCommand = Command.command("hello") { (state: State) => @@ -327,6 +360,28 @@ included in one plugin (for example, use `commands ++= Seq(a,b)`). See :doc:`Commands` for defining more useful commands, including ones that accept arguments and affect the execution state. +For a user to consume this plugin, it requires an explicit include via the `Project` instance. +Here's what their local sbt will look like. + +`build.sbt` + +:: + + val root = Project("example-plugin-usage", file(".")).setPlugins(MyPlugin) + + +The `setPlugins` method allows projects to explicitly define the `RootPlugin`s they wish to consume. +`AutoPlugin`s are automatically added to the project as appropriate. + +Projects can also exclude any type of plugin using the `disablePlugins` method. For example, if +we wish to remove the JvmModule settings (`compile`,`test`,`run`), we modify our `build.sbt` as +follows: + +:: + + val root = Project("example-plugin-usage", file(".")).setPlugins(MyPlugin).disablePlugins(plugins.JvmModule) + + Global plugins example ---------------------- diff --git a/src/sphinx/Getting-Started/Basic-Def.rst b/src/sphinx/Getting-Started/Basic-Def.rst index 422cba303..a89248f76 100644 --- a/src/sphinx/Getting-Started/Basic-Def.rst +++ b/src/sphinx/Getting-Started/Basic-Def.rst @@ -48,7 +48,7 @@ becomes sbt's new map. To create the map, sbt first sorts the list of settings so that all changes to the same key are made together, and values that depend on other keys are processed after the keys they depend on. Then sbt walks -over the sorted list of `Setting`s and applies each one to the map in +over the sorted list of `Setting`\ s and applies each one to the map in turn. Summary: A build definition defines a list of `Setting[T]`, where a @@ -72,13 +72,13 @@ Here's an example: version := "1.0" - scalaVersion := "2.10.3" + scalaVersion := "2.10.4" Each `Setting` is defined with a Scala expression. The expressions in `build.sbt` are independent of one another, and they are expressions, rather than complete Scala statements. These -expressions may be interspersed with `val`s, `lazy val`s, and `def`s. -Top-level `object`s and `class`es are not allowed in `build.sbt`. +expressions may be interspersed with `val`\ s, `lazy val`\ s, and `def`\ s. +Top-level `object`\ s and `class`\ es are not allowed in `build.sbt`. Those should go in the `project/` directory as full Scala source files. On the left, :key:`name`, :key:`version`, and :key:`scalaVersion` are *keys*. A diff --git a/src/sphinx/Getting-Started/Full-Def.rst b/src/sphinx/Getting-Started/Full-Def.rst index 5c104e2b4..0d1ac25bd 100644 --- a/src/sphinx/Getting-Started/Full-Def.rst +++ b/src/sphinx/Getting-Started/Full-Def.rst @@ -113,7 +113,7 @@ The following two files illustrate. First, if your project is in lazy val root = Project(id = "hello", base = file("."), - settings = Project.defaultSettings ++ Seq(sampleKeyB := "B: in the root project settings in Build.scala")) + settings = Seq(sampleKeyB := "B: in the root project settings in Build.scala")) } Now, create `hello/build.sbt` as follows: diff --git a/src/sphinx/Getting-Started/Using-Plugins.rst b/src/sphinx/Getting-Started/Using-Plugins.rst index 5dfc05db9..edde8e0d1 100644 --- a/src/sphinx/Getting-Started/Using-Plugins.rst +++ b/src/sphinx/Getting-Started/Using-Plugins.rst @@ -34,8 +34,36 @@ Adding settings for a plugin ---------------------------- A plugin can declare that its settings be automatically added, in which case you don't have to do anything to add them. -However, plugins often avoid this because you wouldn't control which projects in a :doc:`multi-project build ` would use the plugin. -The plugin documentation will indicate how to configure it, but typically it involves adding the base settings for the plugin and customizing as necessary. + +As of sbt 0.13.5, there is a new :doc:`auto-plugins <../DetailedTopics/AutoPlugins>` feature that enables plugins +to automatically, and safely, ensure their settings and dependencies are on a project. Most plugins should have +their default settings automatically, however some may require explicit enablement. + +If you're using a plugin that requires explicit enablement, then you you have to add the following to your +`build.sbt` :: + + lazy val util = project.setPlugins(ThePluginIWant) + +Most plugins document whether they need to explicitly enabled. If you're curious which plugins are enabled +for a given project, just run the `plugins` command on the sbt console. + +For example :: + + > plugins + In file:/home/jsuereth/projects/sbt/test-ivy-issues/ + sbt.plugins.IvyPlugin: enabled in test-ivy-issues + sbt.plugins.JvmPlugin: enabled in test-ivy-issues + sbt.plugins.CorePlugin: enabled in test-ivy-issues + + +Here, the plugins output is showing that the sbt default plugins are all enabled. Sbt's default settings are provided via three plugins: + +1. CorePlugin: Provides the core parallelism controls for tasks +2. IvyPlugin: Provides the mechanisms to publish/resolve modules. +3. JvmPlugin: Provides the mechanisms to compile/test/run/package Java/Scala projects. + + +However, older plugins often required settings to be added explictly, so that :doc:`multi-project build ` could have different types of projects. The plugin documentation will indicate how to configure it, but typically for older plugins this involves adding the base settings for the plugin and customizing as necessary. For example, for the sbt-site plugin, add :: @@ -91,10 +119,12 @@ To create an sbt plugin, 1. Create a new project for the plugin. 2. Set `sbtPlugin := true` for the project in `build.sbt`. This adds a dependency on sbt and will detect and record Plugins that you define. - 3. (optional) Define an `object` that extends `Plugin`. The contents of this object will be automatically imported in `.sbt` files, so ensure it only contains important API definitions and types. - 4. Define any custom tasks or settings (see the next section :doc:`Custom-Settings`). - 5. Collect the default settings to apply to a project in a list for the user to add. Optionally override one or more of Plugin's methods to have settings automatically added to user projects. - 6. Publish the project. There is a :doc:`community repository ` available for open source plugins. + 3. Define another `object` that extends `AutoImport`. The contents of this object will be automatically imported in `.sbt` files, so ensure it only contains important API definitions and types. + 4. Define an `object` that extends `AutoPlugin`. + 5. Declare dependencies on other plugins by defining the `requires` method. + 5. Define any custom tasks or settings (see the next section :doc:`Custom-Settings`). + 6. Collect the default settings to apply to a project in a list for the user to add. Optionally override one or more of `AutoPlugin`'s methods to have settings automatically added to user projects. + 8. Publish the project. There is a :doc:`community repository ` available for open source plugins. For more details, including ways of developing plugins, see :doc:`/Extending/Plugins`. For best practices, see :doc:`/Extending/Plugins-Best-Practices`. diff --git a/src/sphinx/Howto/generatefiles.rst b/src/sphinx/Howto/generatefiles.rst index a8a1c3ec3..f90806598 100644 --- a/src/sphinx/Howto/generatefiles.rst +++ b/src/sphinx/Howto/generatefiles.rst @@ -29,7 +29,7 @@ As a specific example, the following generates a hello world source file: :: - sourceGenerators in Compile += Def.task { + sourceGenerators in Compile <+= Def.task { val file = (sourceManaged in Compile).value / "demo" / "Test.scala" IO.write(file, """object Test extends App { println("Hi") }""") Seq(file) @@ -44,7 +44,7 @@ By default, generated sources are not included in the packaged source artifact. :title: Generate resources :type: setting - resourceGenerators in Compile += + resourceGenerators in Compile += .taskValue A resource generation task should generate resources in a subdirectory of :key:`resourceManaged` and return a sequence of files generated. The key to add the task to is called :key:`resourceGenerators`. Because we want to add the unexecuted task, we use `taskValue` instead of the usual `value`. It should be scoped according to whether the generated files are main (`Compile`) or test (`Test`) resources. This basic structure looks like: @@ -56,7 +56,7 @@ For example, assuming a method `def makeSomeResources(base: File): Seq[File]`, :: - resourceGenerators in Compile += Def.task { + resourceGenerators in Compile <+= Def.task { makeSomeResources( (resourceManaged in Compile).value / "demo") }.taskValue diff --git a/src/sphinx/Howto/runningcommands.rst b/src/sphinx/Howto/runningcommands.rst index bf620f6b6..22554277e 100644 --- a/src/sphinx/Howto/runningcommands.rst +++ b/src/sphinx/Howto/runningcommands.rst @@ -84,5 +84,5 @@ For example, > eval 2+2 4: Int -Variables defined by an `eval` are not visible to subsequent `eval`s, although changes to system properties persist and affect the JVM that is running sbt. +Variables defined by an `eval` are not visible to subsequent `eval`\ s, although changes to system properties persist and affect the JVM that is running sbt. Use the Scala REPL (:key:`console` and related commands) for full support for evaluating Scala code interactively. diff --git a/src/sphinx/Launcher/Architecture.rst b/src/sphinx/Launcher/Architecture.rst new file mode 100644 index 000000000..2e62f84b7 --- /dev/null +++ b/src/sphinx/Launcher/Architecture.rst @@ -0,0 +1,108 @@ +========================= +Sbt Launcher Architecture +========================= + +The sbt launcher is a mechanism whereby modules can be loaded from ivy and +executed within a jvm. It abstracts the mechanism of grabbing and caching jars, +allowing users to focus on what application they want and control its versions. + +The launcher's primary goal is to take configuration for applications, mostly +just ivy coordinates and a main class, and start the application. The +launcher resolves the ivy module, caches the required runtime jars and +starts the application. + +The sbt launcher provides the application with the means to load a different +application when it completes, exit normally, or load additional applications +from inside another. + +The sbt launcher provides these core functions: + +* Module Resolution +* Classloader Caching and Isolation +* File Locking +* Service Discovery and Isolation + +Module Resolution +~~~~~~~~~~~~~~~~~ +The primary purpose of the sbt launcher is to resolve applications and run them. +This is done through the `[app]` configuration section. See :doc:Configuration +for more information on how to configure module resolution. + +Module resolution is performed using the Ivy dependency managemnet library. This +library supports loading artifacts from Maven repositories as well. + +Classloader Caching and Isolation +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +The sbt launcher's classloading structure is different than just starting an +application in the standard java mechanism. Every application loaded by +by the launcher is given its own classloader. This classloader is a child +of the Scala classloader used by the application. The Scala classloader can see +all of the `xsbti.*` classes from the launcher itself. + +Here's an example classloader layout from an sbt launched application. + +.. image:: classloaders.png + +In this diagram, three different applications were loaded. Two of these use the +same version of Scala (2.9.2). In this case, sbt can share the same classloader +for these applications. This has the benefit that any JIT optimisations performed +on scala classes can be re-used between applications thanks to the shared +classloader. + + +Caching +~~~~~~~ +The sbt launcher creates a secondary cache on top of Ivy's own cache. This helps +isolate applications from errors resulting from unstable revisions, like +`-SNAPSHOT`. For any launched application, the launcher creates a directory +to store all its jars. Here's an example layout. + +.. parsed-literal:: + + ${boot.directory}/ + scala_2.9.2/ + lib/ + + /// + + /// + + scala_2.10.3/ + lib/ + + /// + / + +Locking +~~~~~~~ +In addition to providing a secondary cache, the launcher also provides a mechanism +of safely doing file-based locks. This is used in two places directly by the +launcher: + +1. Locking the boot directory. +2. Ensuring located servers have at most one active process. + +This feature requires a filesystem which supports locking. It is exposed via the +`xsbti.GlobalLock` interface. + +*Note: This is both a thread and file lock. Not only are we limiting access to a single process, but also a single thread within that process.* + +Service Discovery and Isolation +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +The launcher also provides a mechanism to ensure that only one instance of a +server is running, while dynamically starting it when a client requests. This +is done through the `--locate` flag on the launcher. When the launcher is +started with the `--locate` flag it will do the following: + +1. Lock on the configured server lock file. +2. Read the server properties to find the URI of the previous server. +3. If the port is still listening to connection requests, print this URI + on the command line. +4. If the port is not listening, start a new server and write the URI + on the command line. +5. Release all locks and shutdown. + +The configured `server.lock` file is thus used to prevent multiple servers from +running. Sbt itself uses this to prevent more than one server running on any +given project directory by configuring `server.lock` to be +`${user.dir}/.sbtserver`. diff --git a/src/sphinx/Launcher/Configuration.rst b/src/sphinx/Launcher/Configuration.rst new file mode 100644 index 000000000..b110d5411 --- /dev/null +++ b/src/sphinx/Launcher/Configuration.rst @@ -0,0 +1,260 @@ +========================== +Sbt Launcher Configuration +========================== + +The launcher may be configured in one of the following ways in +increasing order of precedence: + +- Replace the `/sbt/sbt.boot.properties` file in the launcher jar +- Put a configuration file named `sbt.boot.properties` on the + classpath. Put it in the classpath root without the `/sbt` prefix. +- Specify the location of an alternate configuration on the command + line, either as a path or an absolute URI. This can be done by + either specifying the location as the system property + `sbt.boot.properties` or as the first argument to the launcher + prefixed by `'@'`. The system property has lower precedence. + Resolution of a relative path is first attempted against the current + working directory, then against the user's home directory, and then + against the directory containing the launcher jar. + +An error is generated if none of these attempts succeed. + +Example +~~~~~~~ + +The default configuration file for sbt as an application looks like: + +.. parsed-literal:: + + [scala] + version: ${sbt.scala.version-auto} + + [app] + org: ${sbt.organization-org.scala-sbt} + name: sbt + version: ${sbt.version-read(sbt.version)[\ |release|\ ]} + class: ${sbt.main.class-sbt.xMain} + components: xsbti,extra + cross-versioned: ${sbt.cross.versioned-false} + + [repositories] + local + typesafe-ivy-releases: http://repo.typesafe.com/typesafe/ivy-releases/, [organization]/[module]/[revision]/[type]s/[artifact](-[classifier]).[ext], bootOnly + maven-central + sonatype-snapshots: https://oss.sonatype.org/content/repositories/snapshots + + [boot] + directory: ${sbt.boot.directory-${sbt.global.base-${user.home}/.sbt}/boot/} + + [ivy] + ivy-home: ${sbt.ivy.home-${user.home}/.ivy2/} + checksums: ${sbt.checksums-sha1,md5} + override-build-repos: ${sbt.override.build.repos-false} + repository-config: ${sbt.repository.config-${sbt.global.base-${user.home}/.sbt}/repositories} + +Let's look at all the launcher configuration sections in detail: + +1. Scala Configuration +---------------------- +The `[scala]` section is used to configure the version of Scala. +It has one property: + +* `version` - The version of scala an application uses, or `auto` if the + application is not cross-versioned. +* `classifiers` - The (optional) list of additional scala artifacts to resolve, + e.g. `sources`. + + +2. Applicaiton Identification +----------------------------- +The `[app]` section configures how the launcher will look for your application +using the Ivy dependency manager. It consists of the following properties: + +* `org` - The organization associated with the Ivy module. + (`groupId` in maven vernacular) +* `name` - The name of the Ivy module. (`artifactId` in maven vernacular) +* `version` - The revision of the Ivy module. +* `class` - The name of the "entry point" into the application. An entry + point must be a class which meets one of the following critera + - Extends the `xsbti.AppMain` interface. + - Extends the `xsbti.ServerMain` interfaces. + - Contains a method with the signature `static void main(String[])` + - Contains a method with the signature `static int main(String[])` + - Contains a method with the signature `static xsbti.Exit main(String[])` +* `components` - An optional list of additional components that Ivy should + resolve. +* `cross-versioned` - An optional string denoting how this application is + published. + If `app.cross-versioned` is `binary`, the resolved module ID is + `{app.name+'_'+CrossVersion.binaryScalaVersion(scala.version)}`. + If `app.cross-versioned` is `true` or `full`, the resolved module ID is + `{app.name+'_'+scala.version}`. The `scala.version` property must be + specified and cannot be `auto` when cross-versioned. +* `resources` - An optional list of jar files that should be added to + the application's classpath. +* `classifiers` - An optional list of additional classifiers that should be + resolved with this application, e.g. `sources`. + +3. Repositories Section +----------------------- +The `[repositories]` section configures where and how Ivy will look for +your application. Each line denotes a repository where Ivy will look. + +*Note: This section configured the default location where Ivy will look, but +this can be overriden via user configuration.* + +There are several built-in strings that can be used for common repositories: + +* `local` - the local ivy repository `~/.ivy2/local`. +* `maven-local` - The local maven repository `~/.ivy2/local`. +* `maven-central` - The maven central repository `repo.maven.org`. + +Besides built in repositories, other repositories can be configured using +the following syntax: + +.. parsed-literal:: + name: url(, pattern)(,descriptorOptional)(,skipConsistencyCheck) + +The `name` property is an identifier which Ivy uses to cache modules +resolved from this location. The `name` should be unique across all +repositories. + +The `url` property is the base `url` where Ivy should look for modules. + +The `pattern` property is an optional specification of *how* Ivy should +look for modules. By default, the launcher assumes repositories are in +the maven style format. + +The `skipConsistencyCheck` string is used to tell ivy not to validate checksums +and signatures of files it resolves. + +4. The Boot section +------------------- +The `[boot]` section is used to configure where the sbt launcher will store +its cache and configuration information. It consists of the following properties: + +* `directory` - The directory defined here is used to store all cached JARs + resolved launcher. +* `properties` - (optional) A properties file to use for any `read` variables. + +5. The Ivy section +------------------ +The `[ivy]` section is used to configure the Ivy dependency manager for +resolving applications. It consists of the following properties: + +* `ivy-home` - The home directory for Ivy. This determines where the + `ivy-local` repository is located, and also where the ivy cache is + stored. Defaults to `~/.ivy2` +* `ivy.cache-directory` - provides an alternative location for the Ivy + cache used by the launcher. This does not automatically set the Ivy + cache for the application, but the application is provided this location + through the AppConfiguration instance. +* `checksums` - The comma-separated list of checksums that Ivy should use + to verify artifacts have correctly resolved, e.g. `md5` or `sha1`. +* `override-build-repos` - If this is set, then the `isOverrideRepositories` + method on `xsbti.Launcher` interface will return its value. The use of this + method is application specific, but in the case of sbt denotes that the + configuration of repositories in the launcher should override those used + by any build. Applications should respect this convention if they can. +* `repository-config` - This specifies a configuration location where + ivy repositories can also be configured. If this file exists, then its contents + override the `[repositories]` section. + + +6. The Server Section +--------------------- +When using the `--locate` feature of the launcher, this section configures +how a server is started. It consists of the following properties: + +* `lock` - The file that controls access to the running server. This file + will contain the active port used by a server and must be located on a + a filesystem that supports locking. +* `jvmargs` - A file that contains line-separated JVM arguments that where + use when starting the server. +* `jvmprops` - The location of a properties file that will define override + properties in the server. All properties defined in this file will + be set as `-D` java properties. + +Variable Substitution +~~~~~~~~~~~~~~~~~~~~~ +Property values may include variable substitutions. A variable substitution has +one of these forms: + +- `${variable.name}` +- `${variable.name-default}` + +where `variable.name` is the name of a system property. If a system +property by that name exists, the value is substituted. If it does not +exists and a default is specified, the default is substituted after +recursively substituting variables in it. If the system property does +not exist and no default is specified, the original string is not +substituted. + +There is also a special variable substitution: + +- `read(property.name)[default]` + +This will look in the file configured by `boot.properties` for a value. If +there is no `boot.properties` file configured, or the property does not existt, +then the default value is chosen. + + + +Syntax +~~~~~~ + +The configuration file is line-based, read as UTF-8 encoded, and defined +by the following grammar. `'nl'` is a newline or end of file and +`'text'` is plain text without newlines or the surrounding delimiters +(such as parentheses or square brackets): + +.. productionlist:: + configuration: `scala` `app` `repositories` `boot` `log` `appProperties` + scala: "[" "scala" "]" `nl` `version` `nl` `classifiers` `nl` + app: "[" "app" "]" `nl` `org` `nl` `name` `nl` `version` `nl` `components` `nl` `class` `nl` `crossVersioned` `nl` `resources` `nl` `classifiers` `nl` + repositories: "[" "repositories" "]" `nl` (`repository` `nl`)* + boot: "[" "boot" "]" `nl` `directory` `nl` `bootProperties` `nl` `search` `nl` `promptCreate` `nl` `promptFill` `nl` `quickOption` `nl` + log: "["' "log" "]" `nl` `logLevel` `nl` + appProperties: "[" "app-properties" "]" nl (property nl)* + ivy: "[" "ivy" "]" `nl` `homeDirectory` `nl` `checksums` `nl` `overrideRepos` `nl` `repoConfig` `nl` + directory: "directory" ":" `path` + bootProperties: "properties" ":" `path` + search: "search" ":" ("none" | "nearest" | "root-first" | "only" ) ("," `path`)* + logLevel: "level" ":" ("debug" | "info" | "warn" | "error") + promptCreate: "prompt-create" ":" `label` + promptFill: "prompt-fill" ":" `boolean` + quickOption: "quick-option" ":" `boolean` + version: "version" ":" `versionSpecification` + versionSpecification: `readProperty` | `fixedVersion` + readProperty: "read" "(" `propertyName` ")" "[" `default` "]" + fixedVersion: text + classifiers: "classifiers" ":" text ("," text)* + homeDirectory: "ivy-home" ":" `path` + checksums: "checksums" ":" `checksum` ("," `checksum`)* + overrideRepos: "override-build-repos" ":" `boolean` + repoConfig: "repository-config" ":" `path` + org: "org" ":" text + name: "name" ":" text + class: "class" ":" text + components: "components" ":" `component` ("," `component`)* + crossVersioned: "cross-versioned" ":" ("true" | "false" | "none" | "binary" | "full") + resources: "resources" ":" `path` ("," `path`)* + repository: ( `predefinedRepository` | `customRepository` ) `nl` + predefinedRepository: "local" | "maven-local" | "maven-central" + customRepository: `label` ":" `url` [ ["," `ivyPattern`] ["," `artifactPattern`] [", mavenCompatible"] [", bootOnly"]] + property: `label` ":" `propertyDefinition` ("," `propertyDefinition`)* + propertyDefinition: `mode` "=" (`set` | `prompt`) + mode: "quick" | "new" | "fill" + set: "set" "(" value ")" + prompt: "prompt" "(" `label` ")" ("[" `default` "]")? + boolean: "true" | "false" + nl: "\r\n" | "\n" | "\r" + path: text + propertyName: text + label: text + default: text + checksum: text + ivyPattern: text + artifactPattern: text + url: text + component: text diff --git a/src/sphinx/Launcher/GettingStarted.rst b/src/sphinx/Launcher/GettingStarted.rst new file mode 100644 index 000000000..66b8f6494 --- /dev/null +++ b/src/sphinx/Launcher/GettingStarted.rst @@ -0,0 +1,232 @@ +===================================== +Getting Started with the Sbt Launcher +===================================== + +The sbt launcher component is a self-contained jar that boots a Scala +application or server without Scala or the application already existing +on the system. The only prerequisites are the launcher jar itself, an +optional configuration file, and a java runtime version 1.6 or greater. + +Overview +======== + +A user downloads the launcher jar and creates a script to run it. In +this documentation, the script will be assumed to be called `launch`. +For unix, the script would look like: +`java -jar sbt-launcher.jar "$@"` + +The user can now launch servers and applications which provide sbt +launcher configuration. + +Applications +------------ + +To launch an application, the user then downloads the configuration +file for the application (call it `my.app.configuration`) and creates +a script to launch it (call it `myapp`): `launch @my.app.configuration "$@"` + +The user can then launch the application using `myapp arg1 arg2 ...` + +More on launcher configuration can be found at :doc:`Launcher Configuration ` + + +Servers +------- + +The sbt launcher can be used to launch and discover running servers +on the system. The launcher can be used to launch servers similarly to +applications. However, if desired, the launcher can also be used to +ensure that only one instance of a server is running at time. This is done +by having clients always use the launcher as a *service locator*. + +To discover where a server is running (or launch it if it is not running), +the user downloads the configuration file for the server +(call it `my.server.configuration`) and creates a script to discover +the server (call it `find-myserver`): `launch --locate @my.server.properties`. + +This command will print out one string, the URI at which to reach the server, +e.g. `sbt://127.0.0.1:65501`. Clients should use the IP/port to connect to +to the server and initiate their connection. + +When using the `locate` feature, the sbt launcher makes these following +restrictions to servers: + +- The Server must have a starting class that extends + the `xsbti.ServerMain` class +- The Server must have an entry point (URI) that clients + can use to detect the server +- The server must have defined a lock file which the launcher can + use to ensure that only one instance is running at a time +- The filesystem on which the lock file resides must support + locking. +- The server must allow the launcher to open a socket against the port + without sending any data. This is used to check if a previous + server is still alive. + + +Resolving Applications/Servers +------------------------------ + +Like the launcher used to distribute `sbt`, the downloaded launcher +jar will retrieve Scala and the application according to the provided +configuration file. The versions may be fixed or read from a different +configuration file (the location of which is also configurable). The +location to which the Scala and application jars are downloaded is +configurable as well. The repositories searched are configurable. +Optional initialization of a properties file on launch is configurable. + +Once the launcher has downloaded the necessary jars, it loads the +application/server and calls its entry point. The application is passed +information about how it was called: command line arguments, current +working directory, Scala version, and application ID (organization, +name, version). In addition, the application can ask the launcher to +perform operations such as obtaining the Scala jars and a +`ClassLoader` for any version of Scala retrievable from the +repositories specified in the configuration file. It can request that +other applications be downloaded and run. When the application +completes, it can tell the launcher to exit with a specific exit code or +to reload the application with a different version of Scala, a different +version of the application, or different arguments. + +There are some other options for setup, such as putting the +configuration file inside the launcher jar and distributing that as a +single download. The rest of this documentation describes the details of +configuring, writing, distributing, and running the application. + + +Creating a Launched Application +------------------------------- + +This section shows how to make an application that is launched by this +launcher. First, declare a dependency on the launcher-interface. Do not +declare a dependency on the launcher itself. The launcher interface +consists strictly of Java interfaces in order to avoid binary +incompatibility between the version of Scala used to compile the +launcher and the version used to compile your application. The launcher +interface class will be provided by the launcher, so it is only a +compile-time dependency. If you are building with sbt, your dependency +definition would be: + +.. parsed-literal:: + + libraryDependencies += "org.scala-sbt" % "launcher-interface" % "|release|" % "provided" + + resolvers += sbtResolver.value + +Make the entry point to your class implement 'xsbti.AppMain'. An example +that uses some of the information: + +.. code-block:: scala + + package xsbt.test + class Main extends xsbti.AppMain + { + def run(configuration: xsbti.AppConfiguration) = + { + // get the version of Scala used to launch the application + val scalaVersion = configuration.provider.scalaProvider.version + + // Print a message and the arguments to the application + println("Hello world! Running Scala " + scalaVersion) + configuration.arguments.foreach(println) + + // demonstrate the ability to reboot the application into different versions of Scala + // and how to return the code to exit with + scalaVersion match + { + case "2.9.3" => + new xsbti.Reboot { + def arguments = configuration.arguments + def baseDirectory = configuration.baseDirectory + def scalaVersion = "2.10.2 + def app = configuration.provider.id + } + case "2.10.2" => new Exit(1) + case _ => new Exit(0) + } + } + class Exit(val code: Int) extends xsbti.Exit + } + +Next, define a configuration file for the launcher. For the above class, +it might look like: + +.. parsed-literal:: + + [scala] + version: |scalaRelease| + [app] + org: org.scala-sbt + name: xsbt-test + version: |release| + class: xsbt.test.Main + cross-versioned: binary + [repositories] + local + maven-central + [boot] + directory: ${user.home}/.myapp/boot + +Then, `publishLocal` or `+publishLocal` the application to make it +available. For more information, please see :doc:`Launcher Configuration ` + +Running an Application +---------------------- + +As mentioned above, there are a few options to actually run the +application. The first involves providing a modified jar for download. +The second two require providing a configuration file for download. + +- Replace the /sbt/sbt.boot.properties file in the launcher jar and + distribute the modified jar. The user would need a script to run + `java -jar your-launcher.jar arg1 arg2 ...`. +- The user downloads the launcher jar and you provide the configuration + file. + + - The user needs to run `java -Dsbt.boot.properties=your.boot.properties -jar launcher.jar`. + - The user already has a script to run the launcher (call it + 'launch'). The user needs to run `launch @your.boot.properties your-arg-1 your-arg-2` + + +Execution +--------- + +Let's review what's happening when the launcher starts your application. + +On startup, the launcher searches for its configuration and then +parses it. Once the final configuration is resolved, the launcher +proceeds to obtain the necessary jars to launch the application. The +`boot.directory` property is used as a base directory to retrieve jars +to. Locking is done on the directory, so it can be shared system-wide. +The launcher retrieves the requested version of Scala to + +.. code-block:: console + + ${boot.directory}/${scala.version}/lib/ + +If this directory already exists, the launcher takes a shortcut for +startup performance and assumes that the jars have already been +downloaded. If the directory does not exist, the launcher uses Apache +Ivy to resolve and retrieve the jars. A similar process occurs for the +application itself. It and its dependencies are retrieved to + +.. code-block:: console + + ${boot.directory}/${scala.version}/${app.org}/${app.name}/. + +Once all required code is downloaded, the class loaders are set up. The +launcher creates a class loader for the requested version of Scala. It +then creates a child class loader containing the jars for the requested +'app.components' and with the paths specified in `app.resources`. An +application that does not use components will have all of its jars in +this class loader. + +The main class for the application is then instantiated. It must be a +public class with a public no-argument constructor and must conform to +xsbti.AppMain. The `run` method is invoked and execution passes to the +application. The argument to the 'run' method provides configuration +information and a callback to obtain a class loader for any version of +Scala that can be obtained from a repository in [repositories]. The +return value of the run method determines what is done after the +application executes. It can specify that the launcher should restart +the application or that it should exit with the provided exit code. diff --git a/src/sphinx/Launcher/classloaders.png b/src/sphinx/Launcher/classloaders.png new file mode 100644 index 000000000..6f0c1b003 Binary files /dev/null and b/src/sphinx/Launcher/classloaders.png differ diff --git a/src/sphinx/Launcher/index.rst b/src/sphinx/Launcher/index.rst new file mode 100644 index 000000000..fcc5802e9 --- /dev/null +++ b/src/sphinx/Launcher/index.rst @@ -0,0 +1,14 @@ +============== + Sbt Launcher +============== + +The sbt launcher provides a generic container that can load and run programs +resolved using the Ivy dependency manager. Sbt uses this as its own deployment +mechanism. + +.. toctree:: + :maxdepth: 2 + + GettingStarted + Configuration + Architecture \ No newline at end of file diff --git a/src/sphinx/faq.rst b/src/sphinx/faq.rst index 29738dab8..4ce8645f8 100644 --- a/src/sphinx/faq.rst +++ b/src/sphinx/faq.rst @@ -444,24 +444,28 @@ before it is initialized with an empty sequence. settings = Seq( libraryDependencies += "commons-io" % "commons-io" % "1.4" % "test" ) - ) + ).disablePlugins(plugins.IvyModule) } -To correct this, include the default settings, which includes -`libraryDependencies := Seq()`. +To correct this, include the IvyModule plugin settings, which includes +`libraryDependencies := Seq()`. So, we just drop the explicit disabling. :: - settings = Defaults.defaultSettings ++ Seq( - libraryDependencies += "commons-io" % "commons-io" % "1.4" % "test" - ) + object MyBuild extends Build { + val root = Project(id = "root", base = file("."), + settings = Seq( + libraryDependencies += "commons-io" % "commons-io" % "1.4" % "test" + ) + ) + } A more subtle variation of this error occurs when using :doc:`scoped settings `. :: // error: Reference to uninitialized setting - settings = Defaults.defaultSettings ++ Seq( + settings = Seq( libraryDependencies += "commons-io" % "commons-io" % "1.2" % "test", fullClasspath := fullClasspath.value.filterNot(_.data.name.contains("commons-io")) ) diff --git a/src/sphinx/index.rst b/src/sphinx/index.rst index 3169538b7..8a07fbe72 100644 --- a/src/sphinx/index.rst +++ b/src/sphinx/index.rst @@ -22,7 +22,6 @@ the :doc:`index of names and types `. Examples/index Name-Index - .. The following includes documents that are not important enough to be in a visible toctree They are linked from other documents, which is enough. diff --git a/testing/agent/src/main/java/sbt/ForkMain.java b/testing/agent/src/main/java/sbt/ForkMain.java index a56783fcd..32cbb62ef 100755 --- a/testing/agent/src/main/java/sbt/ForkMain.java +++ b/testing/agent/src/main/java/sbt/ForkMain.java @@ -115,7 +115,6 @@ public class ForkMain { final ObjectOutputStream os = new ObjectOutputStream(socket.getOutputStream()); // Must flush the header that the constructor writes, otherwise the ObjectInputStream on the other end may block indefinitely os.flush(); - try { new Run().run(is, os); } finally { diff --git a/util/appmacro/src/main/scala/sbt/appmacro/ContextUtil.scala b/util/appmacro/src/main/scala/sbt/appmacro/ContextUtil.scala index 381674e47..fe1baa696 100644 --- a/util/appmacro/src/main/scala/sbt/appmacro/ContextUtil.scala +++ b/util/appmacro/src/main/scala/sbt/appmacro/ContextUtil.scala @@ -32,12 +32,16 @@ object ContextUtil { def unexpectedTree[C <: Context](tree: C#Tree): Nothing = sys.error("Unexpected macro application tree (" + tree.getClass + "): " + tree) } +// TODO 2.11 Remove this after dropping 2.10.x support. +private object HasCompat { val compat = ??? }; import HasCompat._ + /** Utility methods for macros. Several methods assume that the context's universe is a full compiler (`scala.tools.nsc.Global`). * This is not thread safe due to the underlying Context and related data structures not being thread safe. * Use `ContextUtil[c.type](c)` to construct. */ final class ContextUtil[C <: Context](val ctx: C) { import ctx.universe.{Apply=>ApplyTree,_} + import compat._ val powerContext = ctx.asInstanceOf[reflect.macros.runtime.Context] val global: powerContext.universe.type = powerContext.universe @@ -222,17 +226,20 @@ final class ContextUtil[C <: Context](val ctx: C) object appTransformer extends Transformer { override def transform(tree: Tree): Tree = - tree match - { - case ApplyTree(TypeApply(Select(_, nme), targ :: Nil), qual :: Nil) => subWrapper(nme.decoded, targ.tpe, qual, tree) match { - case Converted.Success(t, finalTx) => finalTx(t) - case Converted.Failure(p,m) => ctx.abort(p, m) - case _: Converted.NotApplicable[_] => super.transform(tree) - } + tree match { + case ApplyTree(TypeApply(Select(_, nme), targ :: Nil), qual :: Nil) => + subWrapper(nme.decoded, targ.tpe, qual, tree) match { + case Converted.Success(t, finalTx) => + changeOwner(qual, currentOwner, initialOwner) // Fixes https://github.com/sbt/sbt/issues/1150 + finalTx(t) + case Converted.Failure(p,m) => ctx.abort(p, m) + case _: Converted.NotApplicable[_] => super.transform(tree) + } case _ => super.transform(tree) } } - - appTransformer.transform(t) + appTransformer.atOwner(initialOwner) { + appTransformer.transform(t) + } } -} \ No newline at end of file +} diff --git a/util/appmacro/src/main/scala/sbt/appmacro/Instance.scala b/util/appmacro/src/main/scala/sbt/appmacro/Instance.scala index 0de166b67..043ad8731 100644 --- a/util/appmacro/src/main/scala/sbt/appmacro/Instance.scala +++ b/util/appmacro/src/main/scala/sbt/appmacro/Instance.scala @@ -167,7 +167,7 @@ object Instance def addType(tpe: Type, qual: Tree, selection: Tree): Tree = { qual.foreach(checkQual) - val vd = util.freshValDef(tpe, qual.symbol.pos, functionSym) + val vd = util.freshValDef(tpe, qual.pos, functionSym) inputs ::= new Input(tpe, qual, vd) util.refVal(selection, vd) } diff --git a/util/appmacro/src/main/scala/sbt/appmacro/KListBuilder.scala b/util/appmacro/src/main/scala/sbt/appmacro/KListBuilder.scala index e9fb207d8..d9dbebe42 100644 --- a/util/appmacro/src/main/scala/sbt/appmacro/KListBuilder.scala +++ b/util/appmacro/src/main/scala/sbt/appmacro/KListBuilder.scala @@ -9,11 +9,15 @@ package appmacro /** A `TupleBuilder` that uses a KList as the tuple representation.*/ object KListBuilder extends TupleBuilder { + // TODO 2.11 Remove this after dropping 2.10.x support. + private object HasCompat { val compat = ??? }; import HasCompat._ + def make(c: Context)(mt: c.Type, inputs: Inputs[c.universe.type]): BuilderResult[c.type] = new BuilderResult[c.type] { val ctx: c.type = c val util = ContextUtil[c.type](c) import c.universe.{Apply=>ApplyTree,_} + import compat._ import util._ val knilType = c.typeOf[KNil] @@ -24,7 +28,7 @@ object KListBuilder extends TupleBuilder val kconsTC: Type = kconsTpe.typeConstructor /** This is the L in the type function [L[x]] ... */ - val tcVariable: TypeSymbol = newTCVariable(NoSymbol) + val tcVariable: TypeSymbol = newTCVariable(util.initialOwner) /** Instantiates KCons[h, t <: KList[L], L], where L is the type constructor variable */ def kconsType(h: Type, t: Type): Type = @@ -65,4 +69,4 @@ object KListBuilder extends TupleBuilder val alistInstance: ctx.universe.Tree = TypeApply(select(Ident(alist), "klist"), TypeTree(representationC) :: Nil) def extract(param: ValDef) = bindKList(param, Nil, inputs.map(_.local)) } -} \ No newline at end of file +} diff --git a/util/appmacro/src/main/scala/sbt/appmacro/TupleNBuilder.scala b/util/appmacro/src/main/scala/sbt/appmacro/TupleNBuilder.scala index 89fe31792..28fa581a4 100644 --- a/util/appmacro/src/main/scala/sbt/appmacro/TupleNBuilder.scala +++ b/util/appmacro/src/main/scala/sbt/appmacro/TupleNBuilder.scala @@ -14,10 +14,14 @@ object TupleNBuilder extends TupleBuilder final val MaxInputs = 11 final val TupleMethodName = "tuple" + // TODO 2.11 Remove this after dropping 2.10.x support. + private object HasCompat { val compat = ??? }; import HasCompat._ + def make(c: Context)(mt: c.Type, inputs: Inputs[c.universe.type]): BuilderResult[c.type] = new BuilderResult[c.type] { val util = ContextUtil[c.type](c) import c.universe.{Apply=>ApplyTree,_} + import compat._ import util._ val global: Global = c.universe.asInstanceOf[Global] @@ -25,7 +29,7 @@ object TupleNBuilder extends TupleBuilder val ctx: c.type = c val representationC: PolyType = { - val tcVariable: Symbol = newTCVariable(NoSymbol) + val tcVariable: Symbol = newTCVariable(util.initialOwner) val tupleTypeArgs = inputs.map(in => typeRef(NoPrefix, tcVariable, in.tpe :: Nil).asInstanceOf[global.Type]) val tuple = global.definitions.tupleType(tupleTypeArgs) PolyType(tcVariable :: Nil, tuple.asInstanceOf[Type] ) diff --git a/util/classpath/src/main/scala/sbt/ModuleUtilities.scala b/util/classpath/src/main/scala/sbt/ModuleUtilities.scala index d939c040b..69dfa31dc 100644 --- a/util/classpath/src/main/scala/sbt/ModuleUtilities.scala +++ b/util/classpath/src/main/scala/sbt/ModuleUtilities.scala @@ -6,7 +6,7 @@ package sbt object ModuleUtilities { /** Reflectively loads and returns the companion object for top-level class `className` from `loader`. - * The class name should not include the `$` that scalac appends to the underlying jvm class for + * The class name should not include the `$` that scalac appends to the underlying jvm class for * a companion object. */ def getObject(className: String, loader: ClassLoader): AnyRef = { @@ -14,4 +14,10 @@ object ModuleUtilities val singletonField = obj.getField("MODULE$") singletonField.get(null) } + + def getCheckedObject[T](className: String, loader: ClassLoader)(implicit mf: reflect.ClassManifest[T]): T = + mf.erasure.cast(getObject(className, loader)).asInstanceOf[T] + + def getCheckedObjects[T](classNames: Seq[String], loader: ClassLoader)(implicit mf: reflect.ClassManifest[T]): Seq[(String,T)] = + classNames.map(name => (name, getCheckedObject(name, loader))) } \ No newline at end of file diff --git a/util/collection/src/main/scala/sbt/Dag.scala b/util/collection/src/main/scala/sbt/Dag.scala index 4250b0f10..f0594ed50 100644 --- a/util/collection/src/main/scala/sbt/Dag.scala +++ b/util/collection/src/main/scala/sbt/Dag.scala @@ -15,7 +15,7 @@ object Dag import JavaConverters.asScalaSetConverter def topologicalSort[T](root: T)(dependencies: T => Iterable[T]): List[T] = topologicalSort(root :: Nil)(dependencies) - + def topologicalSort[T](nodes: Iterable[T])(dependencies: T => Iterable[T]): List[T] = { val discovered = new mutable.HashSet[T] @@ -24,7 +24,7 @@ object Dag def visitAll(nodes: Iterable[T]) = nodes foreach visit def visit(node : T){ if (!discovered(node)) { - discovered(node) = true; + discovered(node) = true; try { visitAll(dependencies(node)); } catch { case c: Cyclic => throw node :: c } finished += node; } @@ -33,11 +33,13 @@ object Dag } visitAll(nodes); - + finished.toList; } // doesn't check for cycles - def topologicalSortUnchecked[T](node: T)(dependencies: T => Iterable[T]): List[T] = + def topologicalSortUnchecked[T](node: T)(dependencies: T => Iterable[T]): List[T] = topologicalSortUnchecked(node :: Nil)(dependencies) + + def topologicalSortUnchecked[T](nodes: Iterable[T])(dependencies: T => Iterable[T]): List[T] = { val discovered = new mutable.HashSet[T] var finished: List[T] = Nil @@ -45,23 +47,23 @@ object Dag def visitAll(nodes: Iterable[T]) = nodes foreach visit def visit(node : T){ if (!discovered(node)) { - discovered(node) = true; + discovered(node) = true; visitAll(dependencies(node)) finished ::= node; } } - visit(node); + visitAll(nodes); finished; } final class Cyclic(val value: Any, val all: List[Any], val complete: Boolean) extends Exception( "Cyclic reference involving " + - (if(complete) all.mkString("\n ", "\n ", "") else value) + (if(complete) all.mkString("\n ", "\n ", "") else value) ) { def this(value: Any) = this(value, value :: Nil, false) override def toString = getMessage - def ::(a: Any): Cyclic = + def ::(a: Any): Cyclic = if(complete) this else if(a == value) @@ -69,5 +71,62 @@ object Dag else new Cyclic(value, a :: all, false) } -} + /** A directed graph with edges labeled positive or negative. */ + private[sbt] trait DirectedSignedGraph[Node] + { + /** Directed edge type that tracks the sign and target (head) vertex. + * The sign can be obtained via [[isNegative]] and the target vertex via [[head]]. */ + type Arrow + /** List of initial nodes. */ + def nodes: List[Arrow] + /** Outgoing edges for `n`. */ + def dependencies(n: Node): List[Arrow] + /** `true` if the edge `a` is "negative", false if it is "positive". */ + def isNegative(a: Arrow): Boolean + /** The target of the directed edge `a`. */ + def head(a: Arrow): Node + } + + /** Traverses a directed graph defined by `graph` looking for a cycle that includes a "negative" edge. + * The directed edges are weighted by the caller as "positive" or "negative". + * If a cycle containing a "negative" edge is detected, its member edges are returned in order. + * Otherwise, the empty list is returned. */ + private[sbt] def findNegativeCycle[Node](graph: DirectedSignedGraph[Node]): List[graph.Arrow] = + { + import scala.annotation.tailrec + import graph._ + val finished = new mutable.HashSet[Node] + val visited = new mutable.HashSet[Node] + + def visit(edges: List[Arrow], stack: List[Arrow]): List[Arrow] = edges match { + case Nil => Nil + case edge :: tail => + val node = head(edge) + if(!visited(node)) + { + visited += node + visit(dependencies(node), edge :: stack) match { + case Nil => + finished += node + visit(tail, stack) + case cycle => cycle + } + } + else if(!finished(node)) + { + // cycle. If a negative edge is involved, it is an error. + val between = edge :: stack.takeWhile(f => head(f) != node) + if(between exists isNegative) + between + else + visit(tail, stack) + } + else + visit(tail, stack) + } + + visit(graph.nodes, Nil) + } + +} diff --git a/util/logic/src/main/scala/sbt/logic/Logic.scala b/util/logic/src/main/scala/sbt/logic/Logic.scala new file mode 100644 index 000000000..4eb8e64b1 --- /dev/null +++ b/util/logic/src/main/scala/sbt/logic/Logic.scala @@ -0,0 +1,325 @@ +package sbt +package logic + + import scala.annotation.tailrec + import Formula.{And, True} + +/* +Defines a propositional logic with negation as failure and only allows stratified rule sets (negation must be acyclic) in order to have a unique minimal model. + +For example, this is not allowed: + + p :- not q + + q :- not p +but this is: + + p :- q + + q :- p +as is this: + + p :- q + + q := not r + + + Some useful links: + + https://en.wikipedia.org/wiki/Nonmonotonic_logic + + https://en.wikipedia.org/wiki/Negation_as_failure + + https://en.wikipedia.org/wiki/Propositional_logic + + https://en.wikipedia.org/wiki/Stable_model_semantics + + http://www.w3.org/2005/rules/wg/wiki/negation +*/ + + +/** Disjunction (or) of the list of clauses. */ +final case class Clauses(clauses: List[Clause]) { + assert(clauses.nonEmpty, "At least one clause is required.") +} + +/** When the `body` Formula succeeds, atoms in `head` are true. */ +final case class Clause(body: Formula, head: Set[Atom]) + +/** A literal is an [[Atom]] or its [[negation|Negated]]. */ +sealed abstract class Literal extends Formula { + /** The underlying (positive) atom. */ + def atom: Atom + /** Negates this literal.*/ + def unary_! : Literal +} +/** A variable with name `label`. */ +final case class Atom(label: String) extends Literal { + def atom = this + def unary_! : Negated = Negated(this) +} +/** A negated atom, in the sense of negation as failure, not logical negation. +* That is, it is true if `atom` is not known/defined. */ +final case class Negated(atom: Atom) extends Literal { + def unary_! : Atom = atom +} + +/** A formula consists of variables, negation, and conjunction (and). +* (Disjunction is not currently included- it is modeled at the level of a sequence of clauses. +* This is less convenient when defining clauses, but is not less powerful.) */ +sealed abstract class Formula { + /** Constructs a clause that proves `atoms` when this formula is true. */ + def proves(atom: Atom, atoms: Atom*): Clause = Clause(this, (atom +: atoms).toSet) + + /** Constructs a formula that is true iff this formula and `f` are both true.*/ + def && (f: Formula): Formula = (this, f) match { + case (True, x) => x + case (x, True) => x + case (And(as), And(bs)) => And(as ++ bs) + case (And(as), b: Literal) => And(as + b) + case (a: Literal, And(bs)) => And(bs + a) + case (a: Literal, b: Literal) => And( Set(a,b) ) + } +} + + +object Formula { + /** A conjunction of literals. */ + final case class And(literals: Set[Literal]) extends Formula { + assert(literals.nonEmpty, "'And' requires at least one literal.") + } + final case object True extends Formula +} + +object Logic +{ + def reduceAll(clauses: List[Clause], initialFacts: Set[Literal]): Either[LogicException, Matched] = + reduce(Clauses(clauses), initialFacts) + + /** Computes the variables in the unique stable model for the program represented by `clauses` and `initialFacts`. + * `clause` may not have any negative feedback (that is, negation is acyclic) + * and `initialFacts` cannot be in the head of any clauses in `clause`. + * These restrictions ensure that the logic program has a unique minimal model. */ + def reduce(clauses: Clauses, initialFacts: Set[Literal]): Either[LogicException, Matched] = + { + val (posSeq, negSeq) = separate(initialFacts.toSeq) + val (pos, neg) = (posSeq.toSet, negSeq.toSet) + + val problem = + checkContradictions(pos, neg) orElse + checkOverlap(clauses, pos) orElse + checkAcyclic(clauses) + + problem.toLeft( + reduce0(clauses, initialFacts, Matched.empty) + ) + } + + + /** Verifies `initialFacts` are not in the head of any `clauses`. + * This avoids the situation where an atom is proved but no clauses prove it. + * This isn't necessarily a problem, but the main sbt use cases expects + * a proven atom to have at least one clause satisfied. */ + private[this] def checkOverlap(clauses: Clauses, initialFacts: Set[Atom]): Option[InitialOverlap] = { + val as = atoms(clauses) + val initialOverlap = initialFacts.filter(as.inHead) + if(initialOverlap.nonEmpty) Some(new InitialOverlap(initialOverlap)) else None + } + + private[this] def checkContradictions(pos: Set[Atom], neg: Set[Atom]): Option[InitialContradictions] = { + val contradictions = pos intersect neg + if(contradictions.nonEmpty) Some(new InitialContradictions(contradictions)) else None + } + + private[this] def checkAcyclic(clauses: Clauses): Option[CyclicNegation] = { + val deps = dependencyMap(clauses) + val cycle = Dag.findNegativeCycle(graph(deps)) + if(cycle.nonEmpty) Some(new CyclicNegation(cycle)) else None + } + private[this] def graph(deps: Map[Atom, Set[Literal]]) = new Dag.DirectedSignedGraph[Atom] { + type Arrow = Literal + def nodes = deps.keys.toList + def dependencies(a: Atom) = deps.getOrElse(a, Set.empty).toList + def isNegative(b: Literal) = b match { + case Negated(_) => true + case Atom(_) => false + } + def head(b: Literal) = b.atom + } + + private[this] def dependencyMap(clauses: Clauses): Map[Atom, Set[Literal]] = + (Map.empty[Atom, Set[Literal]] /: clauses.clauses) { + case (m, Clause(formula, heads)) => + val deps = literals(formula) + (m /: heads) { (n, head) => n.updated(head, n.getOrElse(head, Set.empty) ++ deps) } + } + + sealed abstract class LogicException(override val toString: String) + final class InitialContradictions(val literals: Set[Atom]) extends LogicException("Initial facts cannot be both true and false:\n\t" + literals.mkString("\n\t")) + final class InitialOverlap(val literals: Set[Atom]) extends LogicException("Initial positive facts cannot be implied by any clauses:\n\t" + literals.mkString("\n\t")) + final class CyclicNegation(val cycle: List[Literal]) extends LogicException("Negation may not be involved in a cycle:\n\t" + cycle.mkString("\n\t")) + + /** Tracks proven atoms in the reverse order they were proved. */ + final class Matched private(val provenSet: Set[Atom], reverseOrdered: List[Atom]) { + def add(atoms: Set[Atom]): Matched = add(atoms.toList) + def add(atoms: List[Atom]): Matched = { + val newOnly = atoms.filterNot(provenSet) + new Matched(provenSet ++ newOnly, newOnly ::: reverseOrdered) + } + def ordered: List[Atom] = reverseOrdered.reverse + override def toString = ordered.map(_.label).mkString("Matched(", ",", ")") + } + object Matched { + val empty = new Matched(Set.empty, Nil) + } + + /** Separates a sequence of literals into `(pos, neg)` atom sequences. */ + private[this] def separate(lits: Seq[Literal]): (Seq[Atom], Seq[Atom]) = Util.separate(lits) { + case a: Atom => Left(a) + case Negated(n) => Right(n) + } + + /** Finds clauses that have no body and thus prove their head. + * Returns `(, )`. */ + private[this] def findProven(c: Clauses): (Set[Atom], List[Clause]) = + { + val (proven, unproven) = c.clauses.partition(_.body == True) + (proven.flatMap(_.head).toSet, unproven) + } + private[this] def keepPositive(lits: Set[Literal]): Set[Atom] = + lits.collect{ case a: Atom => a}.toSet + + // precondition: factsToProcess contains no contradictions + @tailrec + private[this] def reduce0(clauses: Clauses, factsToProcess: Set[Literal], state: Matched): Matched = + applyAll(clauses, factsToProcess) match { + case None => // all of the remaining clauses failed on the new facts + state + case Some(applied) => + val (proven, unprovenClauses) = findProven(applied) + val processedFacts = state add keepPositive(factsToProcess) + val newlyProven = proven -- processedFacts.provenSet + val newState = processedFacts add newlyProven + if(unprovenClauses.isEmpty) + newState // no remaining clauses, done. + else { + val unproven = Clauses(unprovenClauses) + val nextFacts: Set[Literal] = if(newlyProven.nonEmpty) newlyProven.toSet else inferFailure(unproven) + reduce0(unproven, nextFacts, newState) + } + } + + /** Finds negated atoms under the negation as failure rule and returns them. + * This should be called only after there are no more known atoms to be substituted. */ + private[this] def inferFailure(clauses: Clauses): Set[Literal] = + { + /* At this point, there is at least one clause and one of the following is the case as the result of the acyclic negation rule: + i. there is at least one variable that occurs in a clause body but not in the head of a clause + ii. there is at least one variable that occurs in the head of a clause and does not transitively depend on a negated variable + In either case, each such variable x cannot be proven true and therefore proves 'not x' (negation as failure, !x in the code). + */ + val allAtoms = atoms(clauses) + val newFacts: Set[Literal] = negated(allAtoms.triviallyFalse) + if(newFacts.nonEmpty) + newFacts + else { + val possiblyTrue = hasNegatedDependency(clauses.clauses, Relation.empty, Relation.empty) + val newlyFalse: Set[Literal] = negated(allAtoms.inHead -- possiblyTrue) + if(newlyFalse.nonEmpty) + newlyFalse + else // should never happen due to the acyclic negation rule + error(s"No progress:\n\tclauses: $clauses\n\tpossibly true: $possiblyTrue") + } + } + + private[this] def negated(atoms: Set[Atom]): Set[Literal] = atoms.map(a => Negated(a)) + + /** Computes the set of atoms in `clauses` that directly or transitively take a negated atom as input. + * For example, for the following clauses, this method would return `List(a, d)` : + * a :- b, not c + * d :- a + */ + @tailrec + def hasNegatedDependency(clauses: Seq[Clause], posDeps: Relation[Atom, Atom], negDeps: Relation[Atom, Atom]): List[Atom] = + clauses match { + case Seq() => + // because cycles between positive literals are allowed, this isn't strictly a topological sort + Dag.topologicalSortUnchecked(negDeps._1s)(posDeps.reverse) + case Clause(formula, head) +: tail => + // collect direct positive and negative literals and track them in separate graphs + val (pos, neg) = directDeps(formula) + val (newPos, newNeg) = ( (posDeps, negDeps) /: head) { case ( (pdeps, ndeps), d) => + (pdeps + (d, pos), ndeps + (d, neg) ) + } + hasNegatedDependency(tail, newPos, newNeg) + } + + /** Computes the `(positive, negative)` literals in `formula`. */ + private[this] def directDeps(formula: Formula): (Seq[Atom], Seq[Atom]) = + Util.separate(literals(formula).toSeq) { + case Negated(a) => Right(a) + case a: Atom => Left(a) + } + private[this] def literals(formula: Formula): Set[Literal] = formula match { + case And(lits) => lits + case l: Literal => Set(l) + case True => Set.empty + } + + /** Computes the atoms in the heads and bodies of the clauses in `clause`. */ + def atoms(cs: Clauses): Atoms = cs.clauses.map(c => Atoms(c.head, atoms(c.body))).reduce(_ ++ _) + + /** Computes the set of all atoms in `formula`. */ + def atoms(formula: Formula): Set[Atom] = formula match { + case And(lits) => lits.map(_.atom) + case Negated(lit) => Set(lit) + case a: Atom => Set(a) + case True => Set() + } + + /** Represents the set of atoms in the heads of clauses and in the bodies (formulas) of clauses. */ + final case class Atoms(val inHead: Set[Atom], val inFormula: Set[Atom]) { + /** Concatenates this with `as`. */ + def ++ (as: Atoms): Atoms = Atoms(inHead ++ as.inHead, inFormula ++ as.inFormula) + /** Atoms that cannot be true because they do not occur in a head. */ + def triviallyFalse: Set[Atom] = inFormula -- inHead + } + + /** Applies known facts to `clause`s, deriving a new, possibly empty list of clauses. + * 1. If a fact is in the body of a clause, the derived clause has that fact removed from the body. + * 2. If the negation of a fact is in a body of a clause, that clause fails and is removed. + * 3. If a fact or its negation is in the head of a clause, the derived clause has that fact (or its negation) removed from the head. + * 4. If a head is empty, the clause proves nothing and is removed. + * + * NOTE: empty bodies do not cause a clause to succeed yet. + * All known facts must be applied before this can be done in order to avoid inconsistencies. + * Precondition: no contradictions in `facts` + * Postcondition: no atom in `facts` is present in the result + * Postcondition: No clauses have an empty head + * */ + def applyAll(cs: Clauses, facts: Set[Literal]): Option[Clauses] = + { + val newClauses = + if(facts.isEmpty) + cs.clauses.filter(_.head.nonEmpty) // still need to drop clauses with an empty head + else + cs.clauses.map(c => applyAll(c, facts)).flatMap(_.toList) + if(newClauses.isEmpty) None else Some(Clauses(newClauses)) + } + + def applyAll(c: Clause, facts: Set[Literal]): Option[Clause] = + { + val atoms = facts.map(_.atom) + val newHead = c.head -- atoms // 3. + if(newHead.isEmpty) // 4. empty head + None + else + substitute(c.body, facts).map( f => Clause(f, newHead) ) // 1, 2 + } + + /** Derives the formula that results from substituting `facts` into `formula`. */ + @tailrec + def substitute(formula: Formula, facts: Set[Literal]): Option[Formula] = formula match { + case And(lits) => + def negated(lits: Set[Literal]): Set[Literal] = lits.map(a => !a) + if( lits.exists( negated(facts) ) ) // 2. + None + else { + val newLits = lits -- facts + val newF = if(newLits.isEmpty) True else And(newLits) + Some(newF) // 1. + } + case True => Some(True) + case lit: Literal => // define in terms of And + substitute(And(Set(lit)), facts) + } +} diff --git a/util/logic/src/test/scala/sbt/logic/Test.scala b/util/logic/src/test/scala/sbt/logic/Test.scala new file mode 100644 index 000000000..cf50ef9fd --- /dev/null +++ b/util/logic/src/test/scala/sbt/logic/Test.scala @@ -0,0 +1,117 @@ +package sbt +package logic + + import org.scalacheck._ + import Prop.secure + import Logic.{LogicException, Matched} + +object LogicTest extends Properties("Logic") +{ + import TestClauses._ + + property("Handles trivial resolution.") = secure( expect(trivial, Set(A) ) ) + property("Handles less trivial resolution.") = secure( expect(lessTrivial, Set(B,A,D)) ) + property("Handles cycles without negation") = secure( expect(cycles, Set(F,A,B)) ) + property("Handles basic exclusion.") = secure( expect(excludedPos, Set()) ) + property("Handles exclusion of head proved by negation.") = secure( expect(excludedNeg, Set()) ) + // TODO: actually check ordering, probably as part of a check that dependencies are satisifed + property("Properly orders results.") = secure( expect(ordering, Set(B,A,C,E,F))) + property("Detects cyclic negation") = secure( + Logic.reduceAll(badClauses, Set()) match { + case Right(res) => false + case Left(err: Logic.CyclicNegation) => true + case Left(err) => error(s"Expected cyclic error, got: $err") + } + ) + + def expect(result: Either[LogicException, Matched], expected: Set[Atom]) = result match { + case Left(err) => false + case Right(res) => + val actual = res.provenSet + (actual == expected) || error(s"Expected to prove $expected, but actually proved $actual") + } +} + +object TestClauses +{ + + val A = Atom("A") + val B = Atom("B") + val C = Atom("C") + val D = Atom("D") + val E = Atom("E") + val F = Atom("F") + val G = Atom("G") + + val clauses = + A.proves(B) :: + A.proves(F) :: + B.proves(F) :: + F.proves(A) :: + (!C).proves(F) :: + D.proves(C) :: + C.proves(D) :: + Nil + + val cycles = Logic.reduceAll(clauses, Set()) + + val badClauses = + A.proves(D) :: + clauses + + val excludedNeg = { + val cs = + (!A).proves(B) :: + Nil + val init = + (!A) :: + (!B) :: + Nil + Logic.reduceAll(cs, init.toSet) + } + + val excludedPos = { + val cs = + A.proves(B) :: + Nil + val init = + A :: + (!B) :: + Nil + Logic.reduceAll(cs, init.toSet) + } + + val trivial = { + val cs = + Formula.True.proves(A) :: + Nil + Logic.reduceAll(cs, Set.empty) + } + + val lessTrivial = { + val cs = + Formula.True.proves(A) :: + Formula.True.proves(B) :: + (A && B && (!C)).proves(D) :: + Nil + Logic.reduceAll(cs, Set()) + } + + val ordering = { + val cs = + E.proves(F) :: + (C && !D).proves(E) :: + (A && B).proves(C) :: + Nil + Logic.reduceAll(cs, Set(A,B)) + } + + def all { + println(s"Cycles: $cycles") + println(s"xNeg: $excludedNeg") + println(s"xPos: $excludedPos") + println(s"trivial: $trivial") + println(s"lessTrivial: $lessTrivial") + println(s"ordering: $ordering") + } +} diff --git a/util/relation/src/main/scala/sbt/Relation.scala b/util/relation/src/main/scala/sbt/Relation.scala index 725512d0b..77c0b70c2 100644 --- a/util/relation/src/main/scala/sbt/Relation.scala +++ b/util/relation/src/main/scala/sbt/Relation.scala @@ -40,7 +40,7 @@ object Relation private[sbt] def get[X,Y](map: M[X,Y], t: X): Set[Y] = map.getOrElse(t, Set.empty[Y]) - private[sbt] type M[X,Y] = Map[X, Set[Y]] + private[sbt] type M[X,Y] = Map[X, Set[Y]] } /** Binary relation between A and B. It is a set of pairs (_1, _2) for _1 in A, _2 in B. */ @@ -111,7 +111,7 @@ private final class MRelation[A,B](fwd: Map[A, Set[B]], rev: Map[B, Set[A]]) ext { def forwardMap = fwd def reverseMap = rev - + def forward(t: A) = get(fwd, t) def reverse(t: B) = get(rev, t) @@ -119,12 +119,12 @@ private final class MRelation[A,B](fwd: Map[A, Set[B]], rev: Map[B, Set[A]]) ext def _2s = rev.keySet def size = (fwd.valuesIterator map { _.size }).foldLeft(0)(_ + _) - + def all: Traversable[(A,B)] = fwd.iterator.flatMap { case (a, bs) => bs.iterator.map( b => (a,b) ) }.toTraversable def +(pair: (A,B)) = this + (pair._1, Set(pair._2)) def +(from: A, to: B) = this + (from, to :: Nil) - def +(from: A, to: Traversable[B]) = + def +(from: A, to: Traversable[B]) = if(to.isEmpty) this else new MRelation( add(fwd, from, to), (rev /: to) { (map, t) => add(map, t, from :: Nil) }) def ++(rs: Traversable[(A,B)]) = ((this: Relation[A,B]) /: rs) { _ + _ }