From c55ae4fd1823d09fa5b75d5d6bd3c3a4625087c9 Mon Sep 17 00:00:00 2001 From: Martin Duhem Date: Wed, 3 Sep 2014 23:10:22 +0200 Subject: [PATCH 1/3] Fix random Analysis generator for ScalaCheck Unit tests in incremental-compiler subproject use a generator to create random Analysis objects. This generator was unfortunately not working properly and generated only empty Analyses (it failed to generate any non-empty Analysis because of a bug in the `unique` generator). --- .../src/test/scala/sbt/inc/AnalysisTest.scala | 4 +- .../scala/sbt/inc/TestCaseGenerators.scala | 40 +++++++++++++++++-- 2 files changed, 39 insertions(+), 5 deletions(-) diff --git a/compile/inc/src/test/scala/sbt/inc/AnalysisTest.scala b/compile/inc/src/test/scala/sbt/inc/AnalysisTest.scala index 13a2b1cc6..437176881 100644 --- a/compile/inc/src/test/scala/sbt/inc/AnalysisTest.scala +++ b/compile/inc/src/test/scala/sbt/inc/AnalysisTest.scala @@ -65,7 +65,9 @@ object AnalysisTest extends Properties("Analysis") { // Merge and split large, generated examples. // Mustn't shrink, as the default Shrink[Int] doesn't respect the lower bound of choose(), which will cause // a divide-by-zero error masking the original error. - property("Complex Merge and Split") = forAllNoShrink(genAnalysis, choose(1, 10)) { (analysis: Analysis, numSplits: Int) => + // Note that the generated Analyses have nameHashing = false (Grouping of Analyses with name hashing enabled + // is not supported right now) + property("Complex Merge and Split") = forAllNoShrink(genAnalysis(nameHashing = false), choose(1, 10)) { (analysis: Analysis, numSplits: Int) => val grouped: Map[Int, Analysis] = analysis.groupBy({ f: File => abs(f.hashCode()) % numSplits }) def getGroup(i: Int): Analysis = grouped.getOrElse(i, Analysis.empty(false)) val splits = (Range(0, numSplits) map getGroup).toList diff --git a/compile/inc/src/test/scala/sbt/inc/TestCaseGenerators.scala b/compile/inc/src/test/scala/sbt/inc/TestCaseGenerators.scala index f260a0fbf..2c8d7a505 100644 --- a/compile/inc/src/test/scala/sbt/inc/TestCaseGenerators.scala +++ b/compile/inc/src/test/scala/sbt/inc/TestCaseGenerators.scala @@ -25,9 +25,18 @@ object TestCaseGenerators { // Ensure that we generate unique class names and file paths every time. // Using repeated strings may lead to all sorts of undesirable interactions. - val used = scala.collection.mutable.Set.empty[String] + val used1 = scala.collection.mutable.Set.empty[String] + val used2 = scala.collection.mutable.Set.empty[String] - def unique[T](g: Gen[T]) = g retryUntil { o: T => used.add(o.toString) } + // When using `retryUntil`, the condition is actually tested twice (see implementation in ScalaCheck), + // which is why we need to insert twice the element. + // If the element is present in both sets, then it has already been used. + def unique[T](g: Gen[T]) = g retryUntil { o: T => + if (used1.add(o.toString)) + true + else + used2.add(o.toString) + } def identifier: Gen[String] = sized { size => resize(Math.max(size, 3), Gen.identifier) @@ -134,6 +143,18 @@ object TestCaseGenerators { external <- someOf(src.external.all.toList) } yield Relations.makeSource(Relation.empty ++ internal, Relation.empty ++ external) + def genRSourceDependencies(srcs: List[File]): Gen[Relations.SourceDependencies] = for { + internal <- listOfN(srcs.length, someOf(srcs)) + external <- genStringRelation(srcs) + } yield Relations.makeSourceDependencies( + Relation.reconstruct((srcs zip (internal map { _.toSet }) map { case (a, b) => (a, b - a) }).toMap), + external) + + def genSubRSourceDependencies(src: Relations.SourceDependencies): Gen[Relations.SourceDependencies] = for { + internal <- someOf(src.internal.all.toList) + external <- someOf(src.external.all.toList) + } yield Relations.makeSourceDependencies(Relation.empty ++ internal, Relation.empty ++ external) + def genRelations: Gen[Relations] = for { numSrcs <- choose(0, maxSources) srcs <- listOfN(numSrcs, genFile) @@ -145,8 +166,19 @@ object TestCaseGenerators { } yield Relations.make(srcProd, binaryDep, direct, publicInherited, classes) - def genAnalysis: Gen[Analysis] = for { - rels <- genRelations + def genRelationsNameHashing: Gen[Relations] = for { + numSrcs <- choose(0, maxSources) + srcs <- listOfN(numSrcs, genFile) + srcProd <- genFileRelation(srcs) + binaryDep <- genFileRelation(srcs) + memberRef <- genRSourceDependencies(srcs) + inheritance <- genSubRSourceDependencies(memberRef) + classes <- genStringRelation(srcs) + names <- genStringRelation(srcs) + } yield Relations.make(srcProd, binaryDep, memberRef, inheritance, classes, names) + + def genAnalysis(nameHashing: Boolean): Gen[Analysis] = for { + rels <- if (nameHashing) genRelationsNameHashing else genRelations stamps <- genStamps(rels) apis <- genAPIs(rels) } yield new MAnalysis(stamps, apis, rels, SourceInfos.empty, Compilations.empty) From efa71548d153ed4ab2d8450462b8ad5f936c9dc9 Mon Sep 17 00:00:00 2001 From: Martin Duhem Date: Wed, 3 Sep 2014 23:24:08 +0200 Subject: [PATCH 2/3] Add unit tests for TextAnalysisFormat Those tests use the random Analysis generator that is used in the unit tests for the subproject `incremental-compiler`. Random Analyses are serialized and then constructed back from this representation. --- .../inc/TextAnalysisFormatSpecification.scala | 112 ++++++++++++++++++ project/Sbt.scala | 2 +- 2 files changed, 113 insertions(+), 1 deletion(-) create mode 100644 compile/persist/src/test/scala/sbt/inc/TextAnalysisFormatSpecification.scala diff --git a/compile/persist/src/test/scala/sbt/inc/TextAnalysisFormatSpecification.scala b/compile/persist/src/test/scala/sbt/inc/TextAnalysisFormatSpecification.scala new file mode 100644 index 000000000..1b8c4cb15 --- /dev/null +++ b/compile/persist/src/test/scala/sbt/inc/TextAnalysisFormatSpecification.scala @@ -0,0 +1,112 @@ +package sbt +package inc + +import java.io.{ BufferedReader, File, StringReader, StringWriter } +import scala.math.abs +import org.scalacheck._ +import Gen._ +import Prop._ + +object TextAnalysisFormatTest extends Properties("TextAnalysisFormat") { + + val nameHashing = true + val dummyOutput = new xsbti.compile.SingleOutput { def outputDirectory: java.io.File = new java.io.File("dummy") } + val commonSetup = new CompileSetup(dummyOutput, new CompileOptions(Nil, Nil), "2.10.4", xsbti.compile.CompileOrder.Mixed, nameHashing) + val commonHeader = """format version: 5 + |output mode: + |1 items + |0 -> single + |output directories: + |1 items + |output dir -> dummy + |compile options: + |0 items + |javac options: + |0 items + |compiler version: + |1 items + |0 -> 2.10.4 + |compile order: + |1 items + |0 -> Mixed + |name hashing: + |1 items + |0 -> true""".stripMargin + + property("Write and read empty Analysis") = { + + val writer = new StringWriter + val analysis = Analysis.empty(nameHashing) + TextAnalysisFormat.write(writer, analysis, commonSetup) + + val result = writer.toString + + result.startsWith(commonHeader) + val reader = new BufferedReader(new StringReader(result)) + + val (readAnalysis, readSetup) = TextAnalysisFormat.read(reader) + + analysis == readAnalysis + + } + + property("Write and read simple Analysis") = { + + import TestCaseGenerators._ + + def f(s: String) = new File(s) + val aScala = f("A.scala") + val bScala = f("B.scala") + val aSource = genSource("A" :: "A$" :: Nil).sample.get + val bSource = genSource("B" :: "B$" :: Nil).sample.get + val cSource = genSource("C" :: Nil).sample.get + val exists = new Exists(true) + val sourceInfos = SourceInfos.makeInfo(Nil, Nil) + + var analysis = Analysis.empty(nameHashing) + analysis = analysis.addProduct(aScala, f("A.class"), exists, "A") + analysis = analysis.addProduct(aScala, f("A$.class"), exists, "A$") + analysis = analysis.addSource(aScala, aSource, exists, Nil, Nil, sourceInfos) + analysis = analysis.addBinaryDep(aScala, f("x.jar"), "x", exists) + analysis = analysis.addExternalDep(aScala, "C", cSource, inherited = false) + + val writer = new StringWriter + + TextAnalysisFormat.write(writer, analysis, commonSetup) + + val result = writer.toString + + result.startsWith(commonHeader) + val reader = new BufferedReader(new StringReader(result)) + + val (readAnalysis, readSetup) = TextAnalysisFormat.read(reader) + + compare(analysis, readAnalysis) + + } + + property("Write and read complex Analysis") = forAllNoShrink(TestCaseGenerators.genAnalysis(nameHashing)) { analysis: Analysis => + val writer = new StringWriter + + TextAnalysisFormat.write(writer, analysis, commonSetup) + + val result = writer.toString + + result.startsWith(commonHeader) + val reader = new BufferedReader(new StringReader(result)) + + val (readAnalysis, readSetup) = TextAnalysisFormat.read(reader) + + compare(analysis, readAnalysis) + } + + // Compare two analyses with useful labelling when they aren't equal. + private[this] def compare(left: Analysis, right: Analysis): Prop = + s" LEFT: $left" |: + s"RIGHT: $right" |: + s"STAMPS EQUAL: ${left.stamps == right.stamps}" |: + s"APIS EQUAL: ${left.apis == right.apis}" |: + s"RELATIONS EQUAL: ${left.relations == right.relations}" |: + "UNEQUAL" |: + (left == right) +} \ No newline at end of file diff --git a/project/Sbt.scala b/project/Sbt.scala index 5b2453dfb..2ece99dcf 100644 --- a/project/Sbt.scala +++ b/project/Sbt.scala @@ -153,7 +153,7 @@ object Sbt extends Build { // Defines the data structures for representing file fingerprints and relationships and the overall source analysis lazy val compileIncrementalSub = testedBaseProject(compilePath / "inc", "Incremental Compiler") dependsOn (apiSub, ioSub, logSub, classpathSub, relationSub) // Persists the incremental data structures using SBinary - lazy val compilePersistSub = baseProject(compilePath / "persist", "Persist") dependsOn (compileIncrementalSub, apiSub) settings (sbinary) + lazy val compilePersistSub = testedBaseProject(compilePath / "persist", "Persist") dependsOn (compileIncrementalSub, apiSub, compileIncrementalSub % "test->test") settings (sbinary) // sbt-side interface to compiler. Calls compiler-side interface reflectively lazy val compilerSub = testedBaseProject(compilePath, "Compile") dependsOn (launchInterfaceSub, interfaceSub % "compile;test->test", logSub, ioSub, classpathSub, logSub % "test->test", launchSub % "test->test", apiSub % "test") settings (compilerSettings: _*) From 82152b89058b2509f7d42ccb29d6d86d0c931f9a Mon Sep 17 00:00:00 2001 From: Martin Duhem Date: Wed, 3 Sep 2014 23:35:34 +0200 Subject: [PATCH 3/3] Don't hardcode existing relations in TextAnalysisFormat The previous implementation of TextAnalysisFormat contained the list of all the existing relations that sbt knew of, and used this information to write to and read from the disk the persisted analyses. In this knew implementation, TextAnalysisFormat gets from the Relations object what are the existing relations, and then persists them to disk. The previous situation was not optimal since it meant that, in order to add a new dependency kind, one had to modify both the Relations and TextAnalysisFormat. Using this new implementation, no change to TextAnalysisFormat is required whenever a new dependency kind is added. --- .../src/main/scala/sbt/inc/Relations.scala | 92 +++++++++++++++++++ .../scala/sbt/inc/TextAnalysisFormat.scala | 90 +++--------------- 2 files changed, 105 insertions(+), 77 deletions(-) diff --git a/compile/inc/src/main/scala/sbt/inc/Relations.scala b/compile/inc/src/main/scala/sbt/inc/Relations.scala index f244c431f..6a60ca418 100644 --- a/compile/inc/src/main/scala/sbt/inc/Relations.scala +++ b/compile/inc/src/main/scala/sbt/inc/Relations.scala @@ -175,9 +175,67 @@ trait Relations { * Relation between source files and _unqualified_ term and type names used in given source file. */ private[inc] def names: Relation[File, String] + + /** + * Lists of all the pairs (header, relation) that sbt knows of. + * Used by TextAnalysisFormat to persist relations. + * This cannot be stored as a Map because the order is important. + */ + private[inc] def allRelations: List[(String, Relation[File, _])] } object Relations { + + /** + * Represents all the relations that sbt knows of along with a way to recreate each + * of their elements from their string representation. + */ + private[inc] val existingRelations = { + val string2File: String => File = new File(_) + List( + ("products", string2File), + ("binary dependencies", string2File), + ("direct source dependencies", string2File), + ("direct external dependencies", identity[String] _), + ("public inherited source dependencies", string2File), + ("public inherited external dependencies", identity[String] _), + ("member reference internal dependencies", string2File), + ("member reference external dependencies", identity[String] _), + ("inheritance internal dependencies", string2File), + ("inheritance external dependencies", identity[String] _), + ("class names", identity[String] _), + ("used names", identity[String] _)) + } + /** + * Reconstructs a Relations from a list of Relation + * The order in which the relations are read matters and is defined by `existingRelations`. + */ + def construct(nameHashing: Boolean, relations: List[Relation[_, _]]) = + relations match { + case p :: bin :: di :: de :: pii :: pie :: mri :: mre :: ii :: ie :: cn :: un :: Nil => + val srcProd = p.asInstanceOf[Relation[File, File]] + val binaryDep = bin.asInstanceOf[Relation[File, File]] + val directSrcDeps = makeSource(di.asInstanceOf[Relation[File, File]], de.asInstanceOf[Relation[File, String]]) + val publicInheritedSrcDeps = makeSource(pii.asInstanceOf[Relation[File, File]], pie.asInstanceOf[Relation[File, String]]) + val memberRefSrcDeps = makeSourceDependencies(mri.asInstanceOf[Relation[File, File]], mre.asInstanceOf[Relation[File, String]]) + val inheritanceSrcDeps = makeSourceDependencies(ii.asInstanceOf[Relation[File, File]], ie.asInstanceOf[Relation[File, String]]) + val classes = cn.asInstanceOf[Relation[File, String]] + val names = un.asInstanceOf[Relation[File, String]] + + // we don't check for emptiness of publicInherited/inheritance relations because + // we assume that invariant that says they are subsets of direct/memberRef holds + assert(nameHashing || (memberRefSrcDeps == emptySourceDependencies), "When name hashing is disabled the `memberRef` relation should be empty.") + assert(!nameHashing || (directSrcDeps == emptySource), "When name hashing is enabled the `direct` relation should be empty.") + + if (nameHashing) + Relations.make(srcProd, binaryDep, memberRefSrcDeps, inheritanceSrcDeps, classes, names) + else { + assert(names.all.isEmpty, s"When `nameHashing` is disabled `names` relation should be empty: $names") + Relations.make(srcProd, binaryDep, directSrcDeps, publicInheritedSrcDeps, classes) + } + case _ => throw new java.io.IOException(s"Expected to read ${existingRelations.length} relations but read ${relations.length}.") + } + /** Tracks internal and external source dependencies for a specific dependency type, such as direct or inherited.*/ final class Source private[sbt] (val internal: Relation[File, File], val external: Relation[File, String]) { def addInternal(source: File, dependsOn: Iterable[File]): Source = new Source(internal + (source, dependsOn), external) @@ -403,6 +461,23 @@ private class MRelationsDefaultImpl(srcProd: Relation[File, File], binaryDep: Re case _ => false } + def allRelations = { + val rels = List( + srcProd, + binaryDep, + direct.internal, + direct.external, + publicInherited.internal, + publicInherited.external, + Relations.emptySourceDependencies.internal, // Default implementation doesn't provide memberRef source deps + Relations.emptySourceDependencies.external, // Default implementation doesn't provide memberRef source deps + Relations.emptySourceDependencies.internal, // Default implementation doesn't provide inheritance source deps + Relations.emptySourceDependencies.external, // Default implementation doesn't provide inheritance source deps + classes, + Relation.empty[File, String]) // Default implementation doesn't provide used names relation + Relations.existingRelations map (_._1) zip rels + } + override def hashCode = (srcProd :: binaryDep :: direct :: publicInherited :: classes :: Nil).hashCode override def toString = ( @@ -490,6 +565,23 @@ private class MRelationsNameHashing(srcProd: Relation[File, File], binaryDep: Re case _ => false } + def allRelations = { + val rels = List( + srcProd, + binaryDep, + Relations.emptySource.internal, // NameHashing doesn't provide direct dependencies + Relations.emptySource.external, // NameHashing doesn't provide direct dependencies + Relations.emptySource.internal, // NameHashing doesn't provide public inherited dependencies + Relations.emptySource.external, // NameHashing doesn't provide public inherited dependencies + memberRef.internal, + memberRef.external, + inheritance.internal, + inheritance.external, + classes, + names) + Relations.existingRelations map (_._1) zip rels + } + override def hashCode = (srcProd :: binaryDep :: memberRef :: inheritance :: classes :: Nil).hashCode override def toString = ( diff --git a/compile/persist/src/main/scala/sbt/inc/TextAnalysisFormat.scala b/compile/persist/src/main/scala/sbt/inc/TextAnalysisFormat.scala index 7f3fd6f3b..3749298ea 100644 --- a/compile/persist/src/main/scala/sbt/inc/TextAnalysisFormat.scala +++ b/compile/persist/src/main/scala/sbt/inc/TextAnalysisFormat.scala @@ -122,7 +122,13 @@ object TextAnalysisFormat { } def write(out: Writer, relations: Relations) { - def writeRelation[T](header: String, rel: Relation[File, T])(implicit ord: Ordering[T]) { + // This ordering is used to persist all values in order. Since all values will be + // persisted using their string representation, it makes sense to sort them using + // their string representation. + val toStringOrd = new Ordering[Any] { + def compare(a: Any, b: Any) = a.toString compare b.toString + } + def writeRelation[T](header: String, rel: Relation[File, T]) { writeHeader(out, header) writeSize(out, rel.size) // We sort for ease of debugging and for more efficient reconstruction when reading. @@ -131,38 +137,15 @@ object TextAnalysisFormat { rel.forwardMap.toSeq.sortBy(_._1).foreach { case (k, vs) => val kStr = k.toString - vs.toSeq.sorted foreach { v => + vs.toSeq.sorted(toStringOrd) foreach { v => out.write(kStr); out.write(" -> "); out.write(v.toString); out.write("\n") } } } - val nameHashing = relations.nameHashing - writeRelation(Headers.srcProd, relations.srcProd) - writeRelation(Headers.binaryDep, relations.binaryDep) - - val direct = if (nameHashing) Relations.emptySource else relations.direct - val publicInherited = if (nameHashing) - Relations.emptySource else relations.publicInherited - - val memberRef = if (nameHashing) - relations.memberRef else Relations.emptySourceDependencies - val inheritance = if (nameHashing) - relations.inheritance else Relations.emptySourceDependencies - val names = if (nameHashing) relations.names else Relation.empty[File, String] - - writeRelation(Headers.directSrcDep, direct.internal) - writeRelation(Headers.directExternalDep, direct.external) - writeRelation(Headers.internalSrcDepPI, publicInherited.internal) - writeRelation(Headers.externalDepPI, publicInherited.external) - - writeRelation(Headers.memberRefInternalDep, memberRef.internal) - writeRelation(Headers.memberRefExternalDep, memberRef.external) - writeRelation(Headers.inheritanceInternalDep, inheritance.internal) - writeRelation(Headers.inheritanceExternalDep, inheritance.external) - - writeRelation(Headers.classes, relations.classes) - writeRelation(Headers.usedNames, names) + relations.allRelations.foreach { + case (header, rel) => writeRelation(header, rel) + } } def read(in: BufferedReader, nameHashing: Boolean): Relations = { @@ -186,56 +169,9 @@ object TextAnalysisFormat { Relation.reconstruct(forward.toMap) } - def readFileRelation(expectedHeader: String) = readRelation(expectedHeader, { new File(_) }) - def readStringRelation(expectedHeader: String) = readRelation(expectedHeader, identity[String]) + val relations = Relations.existingRelations map { case (header, fun) => readRelation(header, fun) } - val srcProd = readFileRelation(Headers.srcProd) - val binaryDep = readFileRelation(Headers.binaryDep) - - import sbt.inc.Relations.{ - Source, - SourceDependencies, - makeSourceDependencies, - emptySource, - makeSource, - emptySourceDependencies - } - val directSrcDeps: Source = { - val internalSrcDep = readFileRelation(Headers.directSrcDep) - val externalDep = readStringRelation(Headers.directExternalDep) - makeSource(internalSrcDep, externalDep) - } - val publicInheritedSrcDeps: Source = { - val internalSrcDepPI = readFileRelation(Headers.internalSrcDepPI) - val externalDepPI = readStringRelation(Headers.externalDepPI) - makeSource(internalSrcDepPI, externalDepPI) - } - val memberRefSrcDeps: SourceDependencies = { - val internalMemberRefDep = readFileRelation(Headers.memberRefInternalDep) - val externalMemberRefDep = readStringRelation(Headers.memberRefExternalDep) - makeSourceDependencies(internalMemberRefDep, externalMemberRefDep) - } - val inheritanceSrcDeps: SourceDependencies = { - val internalInheritanceDep = readFileRelation(Headers.inheritanceInternalDep) - val externalInheritanceDep = readStringRelation(Headers.inheritanceExternalDep) - makeSourceDependencies(internalInheritanceDep, externalInheritanceDep) - } - // we don't check for emptiness of publicInherited/inheritance relations because - // we assume that invariant that says they are subsets of direct/memberRef holds - assert(nameHashing || (memberRefSrcDeps == emptySourceDependencies), - "When name hashing is disabled the `memberRef` relation should be empty.") - assert(!nameHashing || (directSrcDeps == emptySource), - "When name hashing is enabled the `direct` relation should be empty.") - val classes = readStringRelation(Headers.classes) - val names = readStringRelation(Headers.usedNames) - - if (nameHashing) - Relations.make(srcProd, binaryDep, memberRefSrcDeps, inheritanceSrcDeps, classes, names) - else { - assert(names.all.isEmpty, "When `nameHashing` is disabled `names` relation " + - s"should be empty: $names") - Relations.make(srcProd, binaryDep, directSrcDeps, publicInheritedSrcDeps, classes) - } + Relations.construct(nameHashing, relations) } }