diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index f9c54e533..c9fa68c64 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -84,6 +84,22 @@ Whether implementing a new feature, fixing a bug, or modifying documentation, pl Binary compatible changes will be backported to a previous series (currently, 0.12.x) at the time of the next stable release. See below for instructions on building sbt from source. +All pull requests are required to include a "Notes" file which documents the change. This file should reside in the +directory: + + + notes/ + / + .md + +Notes files should have the following contents: + +* Bullet item description under one of the following sections: + - `### Bug fixes` + - `### Improvements` + - `### Fixes with compatibility implications` +* Complete section describing new features. + Documentation ------------- diff --git a/compile/inc/src/main/scala/sbt/inc/Relations.scala b/compile/inc/src/main/scala/sbt/inc/Relations.scala index f244c431f..6a60ca418 100644 --- a/compile/inc/src/main/scala/sbt/inc/Relations.scala +++ b/compile/inc/src/main/scala/sbt/inc/Relations.scala @@ -175,9 +175,67 @@ trait Relations { * Relation between source files and _unqualified_ term and type names used in given source file. */ private[inc] def names: Relation[File, String] + + /** + * Lists of all the pairs (header, relation) that sbt knows of. + * Used by TextAnalysisFormat to persist relations. + * This cannot be stored as a Map because the order is important. + */ + private[inc] def allRelations: List[(String, Relation[File, _])] } object Relations { + + /** + * Represents all the relations that sbt knows of along with a way to recreate each + * of their elements from their string representation. + */ + private[inc] val existingRelations = { + val string2File: String => File = new File(_) + List( + ("products", string2File), + ("binary dependencies", string2File), + ("direct source dependencies", string2File), + ("direct external dependencies", identity[String] _), + ("public inherited source dependencies", string2File), + ("public inherited external dependencies", identity[String] _), + ("member reference internal dependencies", string2File), + ("member reference external dependencies", identity[String] _), + ("inheritance internal dependencies", string2File), + ("inheritance external dependencies", identity[String] _), + ("class names", identity[String] _), + ("used names", identity[String] _)) + } + /** + * Reconstructs a Relations from a list of Relation + * The order in which the relations are read matters and is defined by `existingRelations`. + */ + def construct(nameHashing: Boolean, relations: List[Relation[_, _]]) = + relations match { + case p :: bin :: di :: de :: pii :: pie :: mri :: mre :: ii :: ie :: cn :: un :: Nil => + val srcProd = p.asInstanceOf[Relation[File, File]] + val binaryDep = bin.asInstanceOf[Relation[File, File]] + val directSrcDeps = makeSource(di.asInstanceOf[Relation[File, File]], de.asInstanceOf[Relation[File, String]]) + val publicInheritedSrcDeps = makeSource(pii.asInstanceOf[Relation[File, File]], pie.asInstanceOf[Relation[File, String]]) + val memberRefSrcDeps = makeSourceDependencies(mri.asInstanceOf[Relation[File, File]], mre.asInstanceOf[Relation[File, String]]) + val inheritanceSrcDeps = makeSourceDependencies(ii.asInstanceOf[Relation[File, File]], ie.asInstanceOf[Relation[File, String]]) + val classes = cn.asInstanceOf[Relation[File, String]] + val names = un.asInstanceOf[Relation[File, String]] + + // we don't check for emptiness of publicInherited/inheritance relations because + // we assume that invariant that says they are subsets of direct/memberRef holds + assert(nameHashing || (memberRefSrcDeps == emptySourceDependencies), "When name hashing is disabled the `memberRef` relation should be empty.") + assert(!nameHashing || (directSrcDeps == emptySource), "When name hashing is enabled the `direct` relation should be empty.") + + if (nameHashing) + Relations.make(srcProd, binaryDep, memberRefSrcDeps, inheritanceSrcDeps, classes, names) + else { + assert(names.all.isEmpty, s"When `nameHashing` is disabled `names` relation should be empty: $names") + Relations.make(srcProd, binaryDep, directSrcDeps, publicInheritedSrcDeps, classes) + } + case _ => throw new java.io.IOException(s"Expected to read ${existingRelations.length} relations but read ${relations.length}.") + } + /** Tracks internal and external source dependencies for a specific dependency type, such as direct or inherited.*/ final class Source private[sbt] (val internal: Relation[File, File], val external: Relation[File, String]) { def addInternal(source: File, dependsOn: Iterable[File]): Source = new Source(internal + (source, dependsOn), external) @@ -403,6 +461,23 @@ private class MRelationsDefaultImpl(srcProd: Relation[File, File], binaryDep: Re case _ => false } + def allRelations = { + val rels = List( + srcProd, + binaryDep, + direct.internal, + direct.external, + publicInherited.internal, + publicInherited.external, + Relations.emptySourceDependencies.internal, // Default implementation doesn't provide memberRef source deps + Relations.emptySourceDependencies.external, // Default implementation doesn't provide memberRef source deps + Relations.emptySourceDependencies.internal, // Default implementation doesn't provide inheritance source deps + Relations.emptySourceDependencies.external, // Default implementation doesn't provide inheritance source deps + classes, + Relation.empty[File, String]) // Default implementation doesn't provide used names relation + Relations.existingRelations map (_._1) zip rels + } + override def hashCode = (srcProd :: binaryDep :: direct :: publicInherited :: classes :: Nil).hashCode override def toString = ( @@ -490,6 +565,23 @@ private class MRelationsNameHashing(srcProd: Relation[File, File], binaryDep: Re case _ => false } + def allRelations = { + val rels = List( + srcProd, + binaryDep, + Relations.emptySource.internal, // NameHashing doesn't provide direct dependencies + Relations.emptySource.external, // NameHashing doesn't provide direct dependencies + Relations.emptySource.internal, // NameHashing doesn't provide public inherited dependencies + Relations.emptySource.external, // NameHashing doesn't provide public inherited dependencies + memberRef.internal, + memberRef.external, + inheritance.internal, + inheritance.external, + classes, + names) + Relations.existingRelations map (_._1) zip rels + } + override def hashCode = (srcProd :: binaryDep :: memberRef :: inheritance :: classes :: Nil).hashCode override def toString = ( diff --git a/compile/inc/src/test/scala/sbt/inc/AnalysisTest.scala b/compile/inc/src/test/scala/sbt/inc/AnalysisTest.scala index 13a2b1cc6..437176881 100644 --- a/compile/inc/src/test/scala/sbt/inc/AnalysisTest.scala +++ b/compile/inc/src/test/scala/sbt/inc/AnalysisTest.scala @@ -65,7 +65,9 @@ object AnalysisTest extends Properties("Analysis") { // Merge and split large, generated examples. // Mustn't shrink, as the default Shrink[Int] doesn't respect the lower bound of choose(), which will cause // a divide-by-zero error masking the original error. - property("Complex Merge and Split") = forAllNoShrink(genAnalysis, choose(1, 10)) { (analysis: Analysis, numSplits: Int) => + // Note that the generated Analyses have nameHashing = false (Grouping of Analyses with name hashing enabled + // is not supported right now) + property("Complex Merge and Split") = forAllNoShrink(genAnalysis(nameHashing = false), choose(1, 10)) { (analysis: Analysis, numSplits: Int) => val grouped: Map[Int, Analysis] = analysis.groupBy({ f: File => abs(f.hashCode()) % numSplits }) def getGroup(i: Int): Analysis = grouped.getOrElse(i, Analysis.empty(false)) val splits = (Range(0, numSplits) map getGroup).toList diff --git a/compile/inc/src/test/scala/sbt/inc/TestCaseGenerators.scala b/compile/inc/src/test/scala/sbt/inc/TestCaseGenerators.scala index f260a0fbf..2c8d7a505 100644 --- a/compile/inc/src/test/scala/sbt/inc/TestCaseGenerators.scala +++ b/compile/inc/src/test/scala/sbt/inc/TestCaseGenerators.scala @@ -25,9 +25,18 @@ object TestCaseGenerators { // Ensure that we generate unique class names and file paths every time. // Using repeated strings may lead to all sorts of undesirable interactions. - val used = scala.collection.mutable.Set.empty[String] + val used1 = scala.collection.mutable.Set.empty[String] + val used2 = scala.collection.mutable.Set.empty[String] - def unique[T](g: Gen[T]) = g retryUntil { o: T => used.add(o.toString) } + // When using `retryUntil`, the condition is actually tested twice (see implementation in ScalaCheck), + // which is why we need to insert twice the element. + // If the element is present in both sets, then it has already been used. + def unique[T](g: Gen[T]) = g retryUntil { o: T => + if (used1.add(o.toString)) + true + else + used2.add(o.toString) + } def identifier: Gen[String] = sized { size => resize(Math.max(size, 3), Gen.identifier) @@ -134,6 +143,18 @@ object TestCaseGenerators { external <- someOf(src.external.all.toList) } yield Relations.makeSource(Relation.empty ++ internal, Relation.empty ++ external) + def genRSourceDependencies(srcs: List[File]): Gen[Relations.SourceDependencies] = for { + internal <- listOfN(srcs.length, someOf(srcs)) + external <- genStringRelation(srcs) + } yield Relations.makeSourceDependencies( + Relation.reconstruct((srcs zip (internal map { _.toSet }) map { case (a, b) => (a, b - a) }).toMap), + external) + + def genSubRSourceDependencies(src: Relations.SourceDependencies): Gen[Relations.SourceDependencies] = for { + internal <- someOf(src.internal.all.toList) + external <- someOf(src.external.all.toList) + } yield Relations.makeSourceDependencies(Relation.empty ++ internal, Relation.empty ++ external) + def genRelations: Gen[Relations] = for { numSrcs <- choose(0, maxSources) srcs <- listOfN(numSrcs, genFile) @@ -145,8 +166,19 @@ object TestCaseGenerators { } yield Relations.make(srcProd, binaryDep, direct, publicInherited, classes) - def genAnalysis: Gen[Analysis] = for { - rels <- genRelations + def genRelationsNameHashing: Gen[Relations] = for { + numSrcs <- choose(0, maxSources) + srcs <- listOfN(numSrcs, genFile) + srcProd <- genFileRelation(srcs) + binaryDep <- genFileRelation(srcs) + memberRef <- genRSourceDependencies(srcs) + inheritance <- genSubRSourceDependencies(memberRef) + classes <- genStringRelation(srcs) + names <- genStringRelation(srcs) + } yield Relations.make(srcProd, binaryDep, memberRef, inheritance, classes, names) + + def genAnalysis(nameHashing: Boolean): Gen[Analysis] = for { + rels <- if (nameHashing) genRelationsNameHashing else genRelations stamps <- genStamps(rels) apis <- genAPIs(rels) } yield new MAnalysis(stamps, apis, rels, SourceInfos.empty, Compilations.empty) diff --git a/compile/interface/src/main/scala/xsbt/Dependency.scala b/compile/interface/src/main/scala/xsbt/Dependency.scala index b2b4e012d..5fb688c73 100644 --- a/compile/interface/src/main/scala/xsbt/Dependency.scala +++ b/compile/interface/src/main/scala/xsbt/Dependency.scala @@ -102,6 +102,16 @@ final class Dependency(val global: CallbackGlobal) extends LocateClassFile { } private class ExtractDependenciesByMemberRefTraverser extends ExtractDependenciesTraverser { + + /* + * Some macros appear to contain themselves as original tree. + * We must check that we don't inspect the same tree over and over. + * See https://issues.scala-lang.org/browse/SI-8486 + * https://github.com/sbt/sbt/issues/1237 + * https://github.com/sbt/sbt/issues/1544 + */ + private val inspectedOriginalTrees = collection.mutable.Set.empty[Tree] + override def traverse(tree: Tree): Unit = { tree match { case Import(expr, selectors) => @@ -118,13 +128,13 @@ final class Dependency(val global: CallbackGlobal) extends LocateClassFile { case select: Select => addDependency(select.symbol) /* - * Idents are used in number of situations: - * - to refer to local variable - * - to refer to a top-level package (other packages are nested selections) - * - to refer to a term defined in the same package as an enclosing class; - * this looks fishy, see this thread: - * https://groups.google.com/d/topic/scala-internals/Ms9WUAtokLo/discussion - */ + * Idents are used in number of situations: + * - to refer to local variable + * - to refer to a top-level package (other packages are nested selections) + * - to refer to a term defined in the same package as an enclosing class; + * this looks fishy, see this thread: + * https://groups.google.com/d/topic/scala-internals/Ms9WUAtokLo/discussion + */ case ident: Ident => addDependency(ident.symbol) case typeTree: TypeTree => @@ -136,13 +146,7 @@ final class Dependency(val global: CallbackGlobal) extends LocateClassFile { deps.foreach(addDependency) case Template(parents, self, body) => traverseTrees(body) - /* - * Some macros appear to contain themselves as original tree - * In this case, we don't need to inspect the original tree because - * we already inspected its expansion, which is equal. - * See https://issues.scala-lang.org/browse/SI-8486 - */ - case MacroExpansionOf(original) if original != tree => + case MacroExpansionOf(original) if inspectedOriginalTrees.add(original) => this.traverse(original) case other => () } @@ -191,4 +195,4 @@ final class Dependency(val global: CallbackGlobal) extends LocateClassFile { // for Scala 2.8 and 2.9 this method is provided through SymbolCompat sym.enclosingTopLevelClass -} +} \ No newline at end of file diff --git a/compile/persist/src/main/scala/sbt/inc/TextAnalysisFormat.scala b/compile/persist/src/main/scala/sbt/inc/TextAnalysisFormat.scala index 7f3fd6f3b..3749298ea 100644 --- a/compile/persist/src/main/scala/sbt/inc/TextAnalysisFormat.scala +++ b/compile/persist/src/main/scala/sbt/inc/TextAnalysisFormat.scala @@ -122,7 +122,13 @@ object TextAnalysisFormat { } def write(out: Writer, relations: Relations) { - def writeRelation[T](header: String, rel: Relation[File, T])(implicit ord: Ordering[T]) { + // This ordering is used to persist all values in order. Since all values will be + // persisted using their string representation, it makes sense to sort them using + // their string representation. + val toStringOrd = new Ordering[Any] { + def compare(a: Any, b: Any) = a.toString compare b.toString + } + def writeRelation[T](header: String, rel: Relation[File, T]) { writeHeader(out, header) writeSize(out, rel.size) // We sort for ease of debugging and for more efficient reconstruction when reading. @@ -131,38 +137,15 @@ object TextAnalysisFormat { rel.forwardMap.toSeq.sortBy(_._1).foreach { case (k, vs) => val kStr = k.toString - vs.toSeq.sorted foreach { v => + vs.toSeq.sorted(toStringOrd) foreach { v => out.write(kStr); out.write(" -> "); out.write(v.toString); out.write("\n") } } } - val nameHashing = relations.nameHashing - writeRelation(Headers.srcProd, relations.srcProd) - writeRelation(Headers.binaryDep, relations.binaryDep) - - val direct = if (nameHashing) Relations.emptySource else relations.direct - val publicInherited = if (nameHashing) - Relations.emptySource else relations.publicInherited - - val memberRef = if (nameHashing) - relations.memberRef else Relations.emptySourceDependencies - val inheritance = if (nameHashing) - relations.inheritance else Relations.emptySourceDependencies - val names = if (nameHashing) relations.names else Relation.empty[File, String] - - writeRelation(Headers.directSrcDep, direct.internal) - writeRelation(Headers.directExternalDep, direct.external) - writeRelation(Headers.internalSrcDepPI, publicInherited.internal) - writeRelation(Headers.externalDepPI, publicInherited.external) - - writeRelation(Headers.memberRefInternalDep, memberRef.internal) - writeRelation(Headers.memberRefExternalDep, memberRef.external) - writeRelation(Headers.inheritanceInternalDep, inheritance.internal) - writeRelation(Headers.inheritanceExternalDep, inheritance.external) - - writeRelation(Headers.classes, relations.classes) - writeRelation(Headers.usedNames, names) + relations.allRelations.foreach { + case (header, rel) => writeRelation(header, rel) + } } def read(in: BufferedReader, nameHashing: Boolean): Relations = { @@ -186,56 +169,9 @@ object TextAnalysisFormat { Relation.reconstruct(forward.toMap) } - def readFileRelation(expectedHeader: String) = readRelation(expectedHeader, { new File(_) }) - def readStringRelation(expectedHeader: String) = readRelation(expectedHeader, identity[String]) + val relations = Relations.existingRelations map { case (header, fun) => readRelation(header, fun) } - val srcProd = readFileRelation(Headers.srcProd) - val binaryDep = readFileRelation(Headers.binaryDep) - - import sbt.inc.Relations.{ - Source, - SourceDependencies, - makeSourceDependencies, - emptySource, - makeSource, - emptySourceDependencies - } - val directSrcDeps: Source = { - val internalSrcDep = readFileRelation(Headers.directSrcDep) - val externalDep = readStringRelation(Headers.directExternalDep) - makeSource(internalSrcDep, externalDep) - } - val publicInheritedSrcDeps: Source = { - val internalSrcDepPI = readFileRelation(Headers.internalSrcDepPI) - val externalDepPI = readStringRelation(Headers.externalDepPI) - makeSource(internalSrcDepPI, externalDepPI) - } - val memberRefSrcDeps: SourceDependencies = { - val internalMemberRefDep = readFileRelation(Headers.memberRefInternalDep) - val externalMemberRefDep = readStringRelation(Headers.memberRefExternalDep) - makeSourceDependencies(internalMemberRefDep, externalMemberRefDep) - } - val inheritanceSrcDeps: SourceDependencies = { - val internalInheritanceDep = readFileRelation(Headers.inheritanceInternalDep) - val externalInheritanceDep = readStringRelation(Headers.inheritanceExternalDep) - makeSourceDependencies(internalInheritanceDep, externalInheritanceDep) - } - // we don't check for emptiness of publicInherited/inheritance relations because - // we assume that invariant that says they are subsets of direct/memberRef holds - assert(nameHashing || (memberRefSrcDeps == emptySourceDependencies), - "When name hashing is disabled the `memberRef` relation should be empty.") - assert(!nameHashing || (directSrcDeps == emptySource), - "When name hashing is enabled the `direct` relation should be empty.") - val classes = readStringRelation(Headers.classes) - val names = readStringRelation(Headers.usedNames) - - if (nameHashing) - Relations.make(srcProd, binaryDep, memberRefSrcDeps, inheritanceSrcDeps, classes, names) - else { - assert(names.all.isEmpty, "When `nameHashing` is disabled `names` relation " + - s"should be empty: $names") - Relations.make(srcProd, binaryDep, directSrcDeps, publicInheritedSrcDeps, classes) - } + Relations.construct(nameHashing, relations) } } diff --git a/compile/persist/src/test/scala/sbt/inc/TextAnalysisFormatSpecification.scala b/compile/persist/src/test/scala/sbt/inc/TextAnalysisFormatSpecification.scala new file mode 100644 index 000000000..1b8c4cb15 --- /dev/null +++ b/compile/persist/src/test/scala/sbt/inc/TextAnalysisFormatSpecification.scala @@ -0,0 +1,112 @@ +package sbt +package inc + +import java.io.{ BufferedReader, File, StringReader, StringWriter } +import scala.math.abs +import org.scalacheck._ +import Gen._ +import Prop._ + +object TextAnalysisFormatTest extends Properties("TextAnalysisFormat") { + + val nameHashing = true + val dummyOutput = new xsbti.compile.SingleOutput { def outputDirectory: java.io.File = new java.io.File("dummy") } + val commonSetup = new CompileSetup(dummyOutput, new CompileOptions(Nil, Nil), "2.10.4", xsbti.compile.CompileOrder.Mixed, nameHashing) + val commonHeader = """format version: 5 + |output mode: + |1 items + |0 -> single + |output directories: + |1 items + |output dir -> dummy + |compile options: + |0 items + |javac options: + |0 items + |compiler version: + |1 items + |0 -> 2.10.4 + |compile order: + |1 items + |0 -> Mixed + |name hashing: + |1 items + |0 -> true""".stripMargin + + property("Write and read empty Analysis") = { + + val writer = new StringWriter + val analysis = Analysis.empty(nameHashing) + TextAnalysisFormat.write(writer, analysis, commonSetup) + + val result = writer.toString + + result.startsWith(commonHeader) + val reader = new BufferedReader(new StringReader(result)) + + val (readAnalysis, readSetup) = TextAnalysisFormat.read(reader) + + analysis == readAnalysis + + } + + property("Write and read simple Analysis") = { + + import TestCaseGenerators._ + + def f(s: String) = new File(s) + val aScala = f("A.scala") + val bScala = f("B.scala") + val aSource = genSource("A" :: "A$" :: Nil).sample.get + val bSource = genSource("B" :: "B$" :: Nil).sample.get + val cSource = genSource("C" :: Nil).sample.get + val exists = new Exists(true) + val sourceInfos = SourceInfos.makeInfo(Nil, Nil) + + var analysis = Analysis.empty(nameHashing) + analysis = analysis.addProduct(aScala, f("A.class"), exists, "A") + analysis = analysis.addProduct(aScala, f("A$.class"), exists, "A$") + analysis = analysis.addSource(aScala, aSource, exists, Nil, Nil, sourceInfos) + analysis = analysis.addBinaryDep(aScala, f("x.jar"), "x", exists) + analysis = analysis.addExternalDep(aScala, "C", cSource, inherited = false) + + val writer = new StringWriter + + TextAnalysisFormat.write(writer, analysis, commonSetup) + + val result = writer.toString + + result.startsWith(commonHeader) + val reader = new BufferedReader(new StringReader(result)) + + val (readAnalysis, readSetup) = TextAnalysisFormat.read(reader) + + compare(analysis, readAnalysis) + + } + + property("Write and read complex Analysis") = forAllNoShrink(TestCaseGenerators.genAnalysis(nameHashing)) { analysis: Analysis => + val writer = new StringWriter + + TextAnalysisFormat.write(writer, analysis, commonSetup) + + val result = writer.toString + + result.startsWith(commonHeader) + val reader = new BufferedReader(new StringReader(result)) + + val (readAnalysis, readSetup) = TextAnalysisFormat.read(reader) + + compare(analysis, readAnalysis) + } + + // Compare two analyses with useful labelling when they aren't equal. + private[this] def compare(left: Analysis, right: Analysis): Prop = + s" LEFT: $left" |: + s"RIGHT: $right" |: + s"STAMPS EQUAL: ${left.stamps == right.stamps}" |: + s"APIS EQUAL: ${left.apis == right.apis}" |: + s"RELATIONS EQUAL: ${left.relations == right.relations}" |: + "UNEQUAL" |: + (left == right) +} \ No newline at end of file diff --git a/ivy/src/main/scala/org/apache/ivy/plugins/parser/m2/ReplaceMavenConfigurationMappings.scala b/ivy/src/main/scala/org/apache/ivy/plugins/parser/m2/ReplaceMavenConfigurationMappings.scala new file mode 100644 index 000000000..68a9dad40 --- /dev/null +++ b/ivy/src/main/scala/org/apache/ivy/plugins/parser/m2/ReplaceMavenConfigurationMappings.scala @@ -0,0 +1,113 @@ +package org.apache.ivy.plugins.parser.m2 + +import org.apache.ivy.core.module.descriptor.DefaultDependencyDescriptor; + +/** + * It turns out there was a very subtle, and evil, issue sitting the Ivy/maven configuration, and it + * related to dependency mapping. A mapping of `foo->bar(*)` means that the local configuration + * `foo` depends on the remote configuration `bar`, if it exists, or *ALL CONFIGURATIONS* if `bar` + * does not exist. Since the default Ivy configuration mapping was using the random `master` + * configuration, which AFAICT is NEVER specified, just an assumed default, this would cause leaks + * between maven + ivy projects. + * + * i.e. if a maven POM depends on a module denoted by an ivy.xml file, then you'd wind up accidentally + * bleeding ALL the ivy module's configurations into the maven module's configurations. + * + * This fix works around the issue, by assuming that if there is no `master` configuration, than the + * maven default of `compile` is intended. As sbt forces generated `ivy.xml` files to abide by + * maven conventions, this works in all of our test cases. The only scenario where it wouldn't work + * is those who have custom ivy.xml files *and* have pom.xml files which rely on those custom ivy.xml files, + * a very unlikely situation where the workaround is: "define a master configuration". + * + * Also see: http://ant.apache.org/ivy/history/2.3.0/ivyfile/dependency.html + * and: http://svn.apache.org/repos/asf/ant/ivy/core/tags/2.3.0/src/java/org/apache/ivy/plugins/parser/m2/PomModuleDescriptorBuilder.java + * + * + */ +object ReplaceMavenConfigurationMappings { + + val REPLACEMENT_MAVEN_MAPPINGS = { + // Here we copy paste from Ivy + val REPLACEMENT_MAPPINGS = new java.util.HashMap[String, PomModuleDescriptorBuilder.ConfMapper] + + // NOTE - This code is copied from org.apache.ivy.plugins.parser.m2.PomModuleDescriptorBuilder + // except with altered default configurations... + REPLACEMENT_MAPPINGS.put("compile", new PomModuleDescriptorBuilder.ConfMapper { + def addMappingConfs(dd: DefaultDependencyDescriptor, isOptional: Boolean) { + if (isOptional) { + dd.addDependencyConfiguration("optional", "compile(*)") + // FIX - Here we take a mroe conservative approach of depending on the compile configuration if master isn't there. + dd.addDependencyConfiguration("optional", "master(compile)") + } else { + dd.addDependencyConfiguration("compile", "compile(*)") + // FIX - Here we take a mroe conservative approach of depending on the compile configuration if master isn't there. + dd.addDependencyConfiguration("compile", "master(compile)") + dd.addDependencyConfiguration("runtime", "runtime(*)") + } + } + }) + REPLACEMENT_MAPPINGS.put("provided", new PomModuleDescriptorBuilder.ConfMapper { + def addMappingConfs(dd: DefaultDependencyDescriptor, isOptional: Boolean) { + if (isOptional) { + dd.addDependencyConfiguration("optional", "compile(*)") + dd.addDependencyConfiguration("optional", "provided(*)") + dd.addDependencyConfiguration("optional", "runtime(*)") + // FIX - Here we take a mroe conservative approach of depending on the compile configuration if master isn't there. + dd.addDependencyConfiguration("optional", "master(compile)") + } else { + dd.addDependencyConfiguration("provided", "compile(*)") + dd.addDependencyConfiguration("provided", "provided(*)") + dd.addDependencyConfiguration("provided", "runtime(*)") + // FIX - Here we take a mroe conservative approach of depending on the compile configuration if master isn't there. + dd.addDependencyConfiguration("provided", "master(compile)") + } + } + }) + + REPLACEMENT_MAPPINGS.put("runtime", new PomModuleDescriptorBuilder.ConfMapper { + def addMappingConfs(dd: DefaultDependencyDescriptor, isOptional: Boolean) { + if (isOptional) { + dd.addDependencyConfiguration("optional", "compile(*)") + dd.addDependencyConfiguration("optional", "provided(*)") + // FIX - Here we take a mroe conservative approach of depending on the compile configuration if master isn't there. + dd.addDependencyConfiguration("optional", "master(compile)") + } else { + dd.addDependencyConfiguration("runtime", "compile(*)") + dd.addDependencyConfiguration("runtime", "runtime(*)") + // FIX - Here we take a mroe conservative approach of depending on the compile configuration if master isn't there. + dd.addDependencyConfiguration("runtime", "master(compile)") + } + } + }) + + REPLACEMENT_MAPPINGS.put("test", new PomModuleDescriptorBuilder.ConfMapper { + def addMappingConfs(dd: DefaultDependencyDescriptor, isOptional: Boolean) { + dd.addDependencyConfiguration("test", "runtime(*)") + // FIX - Here we take a mroe conservative approach of depending on the compile configuration if master isn't there. + dd.addDependencyConfiguration("test", "master(compile)") + } + }) + + REPLACEMENT_MAPPINGS.put("system", new PomModuleDescriptorBuilder.ConfMapper { + def addMappingConfs(dd: DefaultDependencyDescriptor, isOptional: Boolean) { + // FIX - Here we take a mroe conservative approach of depending on the compile configuration if master isn't there. + dd.addDependencyConfiguration("system", "master(compile)") + } + }) + + REPLACEMENT_MAPPINGS + } + + def init(): Unit = { + // Here we mutate a static final field, because we have to AND because it's evil. + try { + val map = PomModuleDescriptorBuilder.MAVEN2_CONF_MAPPING.asInstanceOf[java.util.Map[String, PomModuleDescriptorBuilder.ConfMapper]] + map.clear() + map.putAll(REPLACEMENT_MAVEN_MAPPINGS) + } catch { + case e: Exception => + // TODO - Log that Ivy may not be configured correctly and you could have maven/ivy issues. + throw new RuntimeException("FAILURE to install Ivy maven hooks. Your ivy-maven interaction may suffer resolution errors", e) + } + } +} \ No newline at end of file diff --git a/ivy/src/main/scala/sbt/CustomPomParser.scala b/ivy/src/main/scala/sbt/CustomPomParser.scala index bcb8a2476..7bc878b2b 100644 --- a/ivy/src/main/scala/sbt/CustomPomParser.scala +++ b/ivy/src/main/scala/sbt/CustomPomParser.scala @@ -4,7 +4,7 @@ import org.apache.ivy.core.module.id.ModuleRevisionId import org.apache.ivy.core.module.descriptor.{ DefaultArtifact, DefaultExtendsDescriptor, DefaultModuleDescriptor, ModuleDescriptor } import org.apache.ivy.core.module.descriptor.{ DefaultDependencyDescriptor, DependencyDescriptor } import org.apache.ivy.plugins.parser.{ ModuleDescriptorParser, ModuleDescriptorParserRegistry, ParserSettings } -import org.apache.ivy.plugins.parser.m2.{ PomModuleDescriptorBuilder, PomModuleDescriptorParser } +import org.apache.ivy.plugins.parser.m2.{ ReplaceMavenConfigurationMappings, PomModuleDescriptorBuilder, PomModuleDescriptorParser } import org.apache.ivy.plugins.repository.Resource import org.apache.ivy.plugins.namespace.NamespaceTransformer import org.apache.ivy.util.extendable.ExtendableItem @@ -27,6 +27,10 @@ final class CustomPomParser(delegate: ModuleDescriptorParser, transform: (Module override def getMetadataArtifact(mrid: ModuleRevisionId, res: Resource) = delegate.getMetadataArtifact(mrid, res) } object CustomPomParser { + + // Evil hackery to override the default maven pom mappings. + ReplaceMavenConfigurationMappings.init() + /** The key prefix that indicates that this is used only to store extra information and is not intended for dependency resolution.*/ val InfoKeyPrefix = "info." val ApiURLKey = "info.apiURL" diff --git a/main/src/main/scala/sbt/SettingCompletions.scala b/main/src/main/scala/sbt/SettingCompletions.scala index f3668de79..6b94c4c6b 100644 --- a/main/src/main/scala/sbt/SettingCompletions.scala +++ b/main/src/main/scala/sbt/SettingCompletions.scala @@ -27,7 +27,7 @@ private[sbt] object SettingCompletions { { import extracted._ val r = relation(extracted.structure, true) - val allDefs = r._1s.toSeq + val allDefs = Def.flattenLocals(Def.compiled(extracted.structure.settings, true)(structure.delegates, structure.scopeLocal, implicitly[Show[ScopedKey[_]]])).map(_._1) val projectScope = Load.projectScope(currentRef) def resolve(s: Setting[_]): Seq[Setting[_]] = Load.transformSettings(projectScope, currentRef.build, rootProject, s :: Nil) def rescope[T](setting: Setting[T]): Seq[Setting[_]] = @@ -353,4 +353,4 @@ private[sbt] object SettingCompletions { classOf[Long], classOf[String] ) -} \ No newline at end of file +} diff --git a/notes/0.13.6.md b/notes/0.13.6.markdown similarity index 99% rename from notes/0.13.6.md rename to notes/0.13.6.markdown index fa4923d1c..998fe9f76 100644 --- a/notes/0.13.6.md +++ b/notes/0.13.6.markdown @@ -74,10 +74,6 @@ [@puffnfresh]: https://github.com/puffnfresh [@rtyley]: https://github.com/rtyley -### Changes since 0.13.6-M1 - -- Fixes `NullPointerException` during `update`. [#1484][1484] by [@eed3si9n][@eed3si9n] - ### Fixes with compatibility implications - Maven Central Repository, Java.net Maven 2 Repository, Typesafe Repository, and sbt Plugin repository now defaults to HTTPS. (See below) diff --git a/notes/0.13.7/cross-ivy-maven.md b/notes/0.13.7/cross-ivy-maven.md new file mode 100644 index 000000000..1dc45d06d --- /dev/null +++ b/notes/0.13.7/cross-ivy-maven.md @@ -0,0 +1,8 @@ + [1586]: https://github.com/sbt/sbt/pull/1586 + [@jsuereth]: https://github.com/jsuereth + + +### Fixes with compatibility implications + +* Maven artifact dependencies now limit their transitive dependencies to "compile" rather than "every configuration" + if no `master` configuration is found. [#1586][1586] by [@jsuereth][@jsuereth] diff --git a/notes/about.md b/notes/about.markdown similarity index 100% rename from notes/about.md rename to notes/about.markdown diff --git a/project/Sbt.scala b/project/Sbt.scala index 5b2453dfb..bfe0a386f 100644 --- a/project/Sbt.scala +++ b/project/Sbt.scala @@ -17,7 +17,7 @@ object Sbt extends Build { s"all control/$task collections/$task io/$task completion/$task" def buildSettings = Seq( organization := "org.scala-sbt", - version := "0.13.6-SNAPSHOT", + version := "0.13.7-SNAPSHOT", publishArtifact in packageDoc := false, scalaVersion := "2.10.4", publishMavenStyle := false, @@ -65,6 +65,7 @@ object Sbt extends Build { commands += Command.command("release-sbt") { state => // TODO - Any sort of validation "checkCredentials" :: + "conscript-configs" :: "publishSigned" :: "publishLauncher" :: "release-libs-211" :: @@ -153,7 +154,7 @@ object Sbt extends Build { // Defines the data structures for representing file fingerprints and relationships and the overall source analysis lazy val compileIncrementalSub = testedBaseProject(compilePath / "inc", "Incremental Compiler") dependsOn (apiSub, ioSub, logSub, classpathSub, relationSub) // Persists the incremental data structures using SBinary - lazy val compilePersistSub = baseProject(compilePath / "persist", "Persist") dependsOn (compileIncrementalSub, apiSub) settings (sbinary) + lazy val compilePersistSub = testedBaseProject(compilePath / "persist", "Persist") dependsOn (compileIncrementalSub, apiSub, compileIncrementalSub % "test->test") settings (sbinary) // sbt-side interface to compiler. Calls compiler-side interface reflectively lazy val compilerSub = testedBaseProject(compilePath, "Compile") dependsOn (launchInterfaceSub, interfaceSub % "compile;test->test", logSub, ioSub, classpathSub, logSub % "test->test", launchSub % "test->test", apiSub % "test") settings (compilerSettings: _*) @@ -315,7 +316,7 @@ object Sbt extends Build { autoScalaLibrary := false, description := "sbt application launcher", publishLauncher <<= Release.deployLauncher, - packageBin in Compile <<= (proguard in Proguard, Transform.conscriptConfigs).map((x, y) => x) + packageBin in Compile <<= proguard in Proguard ) def interfaceSettings = javaOnly ++ Seq( diff --git a/sbt/src/sbt-test/dependency-management/cross-ivy-maven/build.sbt b/sbt/src/sbt-test/dependency-management/cross-ivy-maven/build.sbt new file mode 100644 index 000000000..60a2d2c01 --- /dev/null +++ b/sbt/src/sbt-test/dependency-management/cross-ivy-maven/build.sbt @@ -0,0 +1,15 @@ + +val repoFile = file("mvn-repo") + +resolvers += "bad-mvn-repo" at repoFile.toURI.toURL.toString + +libraryDependencies += "bad" % "mvn" % "1.0" + +TaskKey[Unit]("check") := { + val cp = (fullClasspath in Compile).value + def isTestJar(n: String): Boolean = + (n contains "scalacheck") || + (n contains "specs2") + val testLibs = cp map (_.data.getName) filter isTestJar + assert(testLibs.isEmpty, s"Compile Classpath has test libs:\n * ${testLibs.mkString("\n * ")}") +} \ No newline at end of file diff --git a/sbt/src/sbt-test/dependency-management/cross-ivy-maven/mvn-repo/bad/mvn/1.0/mvn-1.0.jar b/sbt/src/sbt-test/dependency-management/cross-ivy-maven/mvn-repo/bad/mvn/1.0/mvn-1.0.jar new file mode 100644 index 000000000..e69de29bb diff --git a/sbt/src/sbt-test/dependency-management/cross-ivy-maven/mvn-repo/bad/mvn/1.0/mvn-1.0.pom b/sbt/src/sbt-test/dependency-management/cross-ivy-maven/mvn-repo/bad/mvn/1.0/mvn-1.0.pom new file mode 100644 index 000000000..2821446dd --- /dev/null +++ b/sbt/src/sbt-test/dependency-management/cross-ivy-maven/mvn-repo/bad/mvn/1.0/mvn-1.0.pom @@ -0,0 +1,15 @@ + + + 4.0.0 + bad + mvn + jar + 1.0 + + + org.scala-sbt + completion + 0.13.5 + + + diff --git a/sbt/src/sbt-test/dependency-management/cross-ivy-maven/test b/sbt/src/sbt-test/dependency-management/cross-ivy-maven/test new file mode 100644 index 000000000..a5912a391 --- /dev/null +++ b/sbt/src/sbt-test/dependency-management/cross-ivy-maven/test @@ -0,0 +1 @@ +> check \ No newline at end of file diff --git a/sbt/src/sbt-test/dependency-management/exclude-transitive/project/TestProject.scala b/sbt/src/sbt-test/dependency-management/exclude-transitive/project/TestProject.scala index c5f3887ff..7e2796e5b 100644 --- a/sbt/src/sbt-test/dependency-management/exclude-transitive/project/TestProject.scala +++ b/sbt/src/sbt-test/dependency-management/exclude-transitive/project/TestProject.scala @@ -16,9 +16,14 @@ object TestProject extends Build private def check(transitive: Boolean) = (dependencyClasspath in Compile) map { downloaded => val jars = downloaded.size - if(transitive) - if(jars <= 2) error("Transitive dependencies not downloaded") else () - else - if(jars > 2) error("Transitive dependencies downloaded (" + downloaded.files.mkString(", ") + ")") else () + if(transitive) { + if (jars <= 2) + sys.error(s"Transitive dependencies not downloaded, found:\n * ${downloaded.mkString("\n * ")}") + else () + } else { + if (jars > 2) + sys.error(s"Transitive dependencies not downloaded, found:\n * ${downloaded.mkString("\n * ")}") + else () + } } } diff --git a/sbt/src/sbt-test/dependency-management/exclude-transitive/test b/sbt/src/sbt-test/dependency-management/exclude-transitive/test index b7200b24b..511952ae3 100644 --- a/sbt/src/sbt-test/dependency-management/exclude-transitive/test +++ b/sbt/src/sbt-test/dependency-management/exclude-transitive/test @@ -1,3 +1,4 @@ +> debug # load the project definition with transitive dependencies enabled # and check that they are not downloaded #$ pause diff --git a/sbt/src/sbt-test/source-dependencies/macro/pending b/sbt/src/sbt-test/source-dependencies/macro/test similarity index 100% rename from sbt/src/sbt-test/source-dependencies/macro/pending rename to sbt/src/sbt-test/source-dependencies/macro/test diff --git a/sbt/src/sbt-test/tests/set-every/build.sbt b/sbt/src/sbt-test/tests/set-every/build.sbt new file mode 100644 index 000000000..bd8e9b438 --- /dev/null +++ b/sbt/src/sbt-test/tests/set-every/build.sbt @@ -0,0 +1,16 @@ +val a = project.settings(version := "2.8.1") + +val trySetEvery = taskKey[Unit]("Tests \"set every\"") + +trySetEvery := { + val s = state.value + val extracted = Project.extract(s) + import extracted._ + val allProjs = structure.allProjectRefs + val Some(aProj) = allProjs.find(_.project == "a") + val aVer = (version in aProj get structure.data).get + if (aVer != "1.0") { + println("Version of project a: " + aVer + ", expected: 1.0") + error("\"set every\" did not change the version of all projects.") + } +} diff --git a/sbt/src/sbt-test/tests/set-every/test b/sbt/src/sbt-test/tests/set-every/test new file mode 100644 index 000000000..8fa56c46e --- /dev/null +++ b/sbt/src/sbt-test/tests/set-every/test @@ -0,0 +1,3 @@ +> set every version := '"1.0"' +> trySetEvery +