mirror of https://github.com/sbt/sbt.git
Merge pull request #1316 from sbt/wip/fix-broken-build
This doesn't quite fix the build, but gets closer
This commit is contained in:
commit
d198ea4099
29
.travis.yml
29
.travis.yml
|
|
@ -1,17 +1,24 @@
|
|||
language: scala
|
||||
script:
|
||||
- sbt "scripted $SCRIPTED_TEST"
|
||||
- sbt "$SCRIPTED_TEST"
|
||||
env:
|
||||
- SCRIPTED_TEST=actions/*
|
||||
- SCRIPTED_TEST=api/*
|
||||
- SCRIPTED_TEST=compiler-project/*
|
||||
- SCRIPTED_TEST=dependency-management/*
|
||||
- SCRIPTED_TEST=java/*
|
||||
- SCRIPTED_TEST=package/*
|
||||
- SCRIPTED_TEST=reporter/*
|
||||
- SCRIPTED_TEST=run/*
|
||||
- SCRIPTED_TEST=source-dependencies/*
|
||||
- SCRIPTED_TEST=tests/*
|
||||
- SCRIPTED_TEST="scripted actions/*"
|
||||
- SCRIPTED_TEST="scripted api/*"
|
||||
- SCRIPTED_TEST="scripted compiler-project/*""
|
||||
- SCRIPTED_TEST="scripted dependency-management/*1of2"
|
||||
- SCRIPTED_TEST="scripted dependency-management/*2of2"
|
||||
- SCRIPTED_TEST="scripted java/*"
|
||||
- SCRIPTED_TEST="scripted package/*"
|
||||
- SCRIPTED_TEST="scripted project/*"
|
||||
- SCRIPTED_TEST="scripted reporter/*"
|
||||
- SCRIPTED_TEST="scripted run/*"
|
||||
- SCRIPTED_TEST="scripted source-dependencies/*1of3"
|
||||
- SCRIPTED_TEST="scripted source-dependencies/*2of3"
|
||||
- SCRIPTED_TEST="scripted source-dependencies/*3of3"
|
||||
- SCRIPTED_TEST="scripted tests/*"
|
||||
- SCRIPTED_TEST="all launcher/test main-settings/test main/test ivy/test logic/test completion/test"
|
||||
- SCRIPTED_TEST="all actions/test classpath/test collections/test incremental-compiler/test logging/test run/test task-system/test"
|
||||
# TODO - we'd like to actually test everything, but the process library has a deadlock right now
|
||||
jdk:
|
||||
- openjdk6
|
||||
notifications:
|
||||
|
|
|
|||
|
|
@ -3,30 +3,29 @@ package sbt
|
|||
import java.io.File
|
||||
import Types.:+:
|
||||
|
||||
object CacheTest// extends Properties("Cache test")
|
||||
object CacheTest // extends Properties("Cache test")
|
||||
{
|
||||
val lengthCache = new File("/tmp/length-cache")
|
||||
val cCache = new File("/tmp/c-cache")
|
||||
val lengthCache = new File("/tmp/length-cache")
|
||||
val cCache = new File("/tmp/c-cache")
|
||||
|
||||
import Cache._
|
||||
import FileInfo.hash._
|
||||
import Ordering._
|
||||
import sbinary.DefaultProtocol.FileFormat
|
||||
def test
|
||||
{
|
||||
lazy val create = new File("test")
|
||||
import Cache._
|
||||
import FileInfo.hash._
|
||||
import Ordering._
|
||||
import sbinary.DefaultProtocol.FileFormat
|
||||
def test {
|
||||
lazy val create = new File("test")
|
||||
|
||||
val length = cached(lengthCache) {
|
||||
(f: File) => { println("File length: " + f.length); f.length }
|
||||
}
|
||||
val length = cached(lengthCache) {
|
||||
(f: File) => { println("File length: " + f.length); f.length }
|
||||
}
|
||||
|
||||
lazy val fileLength = length(create)
|
||||
lazy val fileLength = length(create)
|
||||
|
||||
val c = cached(cCache) { (in: (File :+: Long :+: HNil)) =>
|
||||
val file :+: len :+: HNil = in
|
||||
println("File: " + file + " (" + file.exists + "), length: " + len)
|
||||
(len+1) :+: file :+: HNil
|
||||
}
|
||||
c(create :+: fileLength :+: HNil)
|
||||
}
|
||||
val c = cached(cCache) { (in: (File :+: Long :+: HNil)) =>
|
||||
val file :+: len :+: HNil = in
|
||||
println("File: " + file + " (" + file.exists + "), length: " + len)
|
||||
(len + 1) :+: file :+: HNil
|
||||
}
|
||||
c(create :+: fileLength :+: HNil)
|
||||
}
|
||||
}
|
||||
|
|
@ -8,206 +8,206 @@ import org.specs2.runner.JUnitRunner
|
|||
@RunWith(classOf[JUnitRunner])
|
||||
class NameHashingSpecification extends Specification {
|
||||
|
||||
/**
|
||||
* Very basic test which checks whether a name hash is insensitive to
|
||||
* definition order (across the whole compilation unit).
|
||||
*/
|
||||
"new member" in {
|
||||
val nameHashing = new NameHashing
|
||||
val def1 = new Def(Array.empty, strTpe, Array.empty, "foo", publicAccess, defaultModifiers, Array.empty)
|
||||
val def2 = new Def(Array.empty, intTpe, Array.empty, "bar", publicAccess, defaultModifiers, Array.empty)
|
||||
val classBar1 = simpleClass("Bar", def1)
|
||||
val classBar2 = simpleClass("Bar", def1, def2)
|
||||
val api1 = new SourceAPI(Array.empty, Array(classBar1))
|
||||
val api2 = new SourceAPI(Array.empty, Array(classBar2))
|
||||
val nameHashes1 = nameHashing.nameHashes(api1)
|
||||
val nameHashes2 = nameHashing.nameHashes(api2)
|
||||
assertNameHashEqualForRegularName("Bar", nameHashes1, nameHashes2)
|
||||
assertNameHashEqualForRegularName("foo", nameHashes1, nameHashes2)
|
||||
nameHashes1.regularMembers.map(_.name).toSeq must not contain("bar")
|
||||
nameHashes2.regularMembers.map(_.name).toSeq must contain("bar")
|
||||
}
|
||||
/**
|
||||
* Very basic test which checks whether a name hash is insensitive to
|
||||
* definition order (across the whole compilation unit).
|
||||
*/
|
||||
"new member" in {
|
||||
val nameHashing = new NameHashing
|
||||
val def1 = new Def(Array.empty, strTpe, Array.empty, "foo", publicAccess, defaultModifiers, Array.empty)
|
||||
val def2 = new Def(Array.empty, intTpe, Array.empty, "bar", publicAccess, defaultModifiers, Array.empty)
|
||||
val classBar1 = simpleClass("Bar", def1)
|
||||
val classBar2 = simpleClass("Bar", def1, def2)
|
||||
val api1 = new SourceAPI(Array.empty, Array(classBar1))
|
||||
val api2 = new SourceAPI(Array.empty, Array(classBar2))
|
||||
val nameHashes1 = nameHashing.nameHashes(api1)
|
||||
val nameHashes2 = nameHashing.nameHashes(api2)
|
||||
assertNameHashEqualForRegularName("Bar", nameHashes1, nameHashes2)
|
||||
assertNameHashEqualForRegularName("foo", nameHashes1, nameHashes2)
|
||||
nameHashes1.regularMembers.map(_.name).toSeq must not contain ("bar")
|
||||
nameHashes2.regularMembers.map(_.name).toSeq must contain("bar")
|
||||
}
|
||||
|
||||
/**
|
||||
* Very basic test which checks whether a name hash is insensitive to
|
||||
* definition order (across the whole compilation unit).
|
||||
*/
|
||||
"definition order" in {
|
||||
val nameHashing = new NameHashing
|
||||
val def1 = new Def(Array.empty, intTpe, Array.empty, "bar", publicAccess, defaultModifiers, Array.empty)
|
||||
val def2 = new Def(Array.empty, strTpe, Array.empty, "bar", publicAccess, defaultModifiers, Array.empty)
|
||||
val nestedBar1 = simpleClass("Bar1", def1)
|
||||
val nestedBar2 = simpleClass("Bar2", def2)
|
||||
val classA = simpleClass("Foo", nestedBar1, nestedBar2)
|
||||
val classB = simpleClass("Foo", nestedBar2, nestedBar1)
|
||||
val api1 = new SourceAPI(Array.empty, Array(classA))
|
||||
val api2 = new SourceAPI(Array.empty, Array(classB))
|
||||
val nameHashes1 = nameHashing.nameHashes(api1)
|
||||
val nameHashes2 = nameHashing.nameHashes(api2)
|
||||
val def1Hash = HashAPI(def1)
|
||||
val def2Hash = HashAPI(def2)
|
||||
def1Hash !=== def2Hash
|
||||
nameHashes1 === nameHashes2
|
||||
}
|
||||
/**
|
||||
* Very basic test which checks whether a name hash is insensitive to
|
||||
* definition order (across the whole compilation unit).
|
||||
*/
|
||||
"definition order" in {
|
||||
val nameHashing = new NameHashing
|
||||
val def1 = new Def(Array.empty, intTpe, Array.empty, "bar", publicAccess, defaultModifiers, Array.empty)
|
||||
val def2 = new Def(Array.empty, strTpe, Array.empty, "bar", publicAccess, defaultModifiers, Array.empty)
|
||||
val nestedBar1 = simpleClass("Bar1", def1)
|
||||
val nestedBar2 = simpleClass("Bar2", def2)
|
||||
val classA = simpleClass("Foo", nestedBar1, nestedBar2)
|
||||
val classB = simpleClass("Foo", nestedBar2, nestedBar1)
|
||||
val api1 = new SourceAPI(Array.empty, Array(classA))
|
||||
val api2 = new SourceAPI(Array.empty, Array(classB))
|
||||
val nameHashes1 = nameHashing.nameHashes(api1)
|
||||
val nameHashes2 = nameHashing.nameHashes(api2)
|
||||
val def1Hash = HashAPI(def1)
|
||||
val def2Hash = HashAPI(def2)
|
||||
def1Hash !=== def2Hash
|
||||
nameHashes1 === nameHashes2
|
||||
}
|
||||
|
||||
/**
|
||||
* Very basic test which asserts that a name hash is sensitive to definition location.
|
||||
*
|
||||
* For example, if we have:
|
||||
* // Foo1.scala
|
||||
* class Foo { def xyz: Int = ... }
|
||||
* object Foo
|
||||
*
|
||||
* and:
|
||||
* // Foo2.scala
|
||||
* class Foo
|
||||
* object Foo { def xyz: Int = ... }
|
||||
*
|
||||
* then hash for `xyz` name should differ in those two cases
|
||||
* because method `xyz` was moved from class to an object.
|
||||
*/
|
||||
"definition location" in {
|
||||
val nameHashing = new NameHashing
|
||||
val deff = new Def(Array.empty, intTpe, Array.empty, "bar", publicAccess, defaultModifiers, Array.empty)
|
||||
val classA = {
|
||||
val nestedBar1 = simpleClass("Bar1", deff)
|
||||
val nestedBar2 = simpleClass("Bar2")
|
||||
simpleClass("Foo", nestedBar1, nestedBar2)
|
||||
}
|
||||
val classB = {
|
||||
val nestedBar1 = simpleClass("Bar1")
|
||||
val nestedBar2 = simpleClass("Bar2", deff)
|
||||
simpleClass("Foo", nestedBar1, nestedBar2)
|
||||
}
|
||||
val api1 = new SourceAPI(Array.empty, Array(classA))
|
||||
val api2 = new SourceAPI(Array.empty, Array(classB))
|
||||
val nameHashes1 = nameHashing.nameHashes(api1)
|
||||
val nameHashes2 = nameHashing.nameHashes(api2)
|
||||
nameHashes1 !=== nameHashes2
|
||||
}
|
||||
/**
|
||||
* Very basic test which asserts that a name hash is sensitive to definition location.
|
||||
*
|
||||
* For example, if we have:
|
||||
* // Foo1.scala
|
||||
* class Foo { def xyz: Int = ... }
|
||||
* object Foo
|
||||
*
|
||||
* and:
|
||||
* // Foo2.scala
|
||||
* class Foo
|
||||
* object Foo { def xyz: Int = ... }
|
||||
*
|
||||
* then hash for `xyz` name should differ in those two cases
|
||||
* because method `xyz` was moved from class to an object.
|
||||
*/
|
||||
"definition location" in {
|
||||
val nameHashing = new NameHashing
|
||||
val deff = new Def(Array.empty, intTpe, Array.empty, "bar", publicAccess, defaultModifiers, Array.empty)
|
||||
val classA = {
|
||||
val nestedBar1 = simpleClass("Bar1", deff)
|
||||
val nestedBar2 = simpleClass("Bar2")
|
||||
simpleClass("Foo", nestedBar1, nestedBar2)
|
||||
}
|
||||
val classB = {
|
||||
val nestedBar1 = simpleClass("Bar1")
|
||||
val nestedBar2 = simpleClass("Bar2", deff)
|
||||
simpleClass("Foo", nestedBar1, nestedBar2)
|
||||
}
|
||||
val api1 = new SourceAPI(Array.empty, Array(classA))
|
||||
val api2 = new SourceAPI(Array.empty, Array(classB))
|
||||
val nameHashes1 = nameHashing.nameHashes(api1)
|
||||
val nameHashes2 = nameHashing.nameHashes(api2)
|
||||
nameHashes1 !=== nameHashes2
|
||||
}
|
||||
|
||||
/**
|
||||
* Test if members introduced in parent class affect hash of a name
|
||||
* of a child class.
|
||||
*
|
||||
* For example, if we have:
|
||||
* // Test1.scala
|
||||
* class Parent
|
||||
* class Child extends Parent
|
||||
*
|
||||
* and:
|
||||
* // Test2.scala
|
||||
* class Parent { def bar: Int = ... }
|
||||
* class Child extends Parent
|
||||
*
|
||||
* then hash for `Child` name should be the same in both
|
||||
* cases.
|
||||
*/
|
||||
"definition in parent class" in {
|
||||
val parentA = simpleClass("Parent")
|
||||
val barMethod = new Def(Array.empty, intTpe, Array.empty, "bar", publicAccess, defaultModifiers, Array.empty)
|
||||
val parentB = simpleClass("Parent", barMethod)
|
||||
val childA = {
|
||||
val structure = new Structure(lzy(Array[Type](parentA.structure)), lzy(Array.empty[Definition]), lzy(Array.empty[Definition]))
|
||||
simpleClass("Child", structure)
|
||||
}
|
||||
val childB = {
|
||||
val structure = new Structure(lzy(Array[Type](parentB.structure)), lzy(Array.empty[Definition]), lzy(Array[Definition](barMethod)))
|
||||
simpleClass("Child", structure)
|
||||
}
|
||||
val parentANameHashes = nameHashesForClass(parentA)
|
||||
val parentBNameHashes = nameHashesForClass(parentB)
|
||||
Seq("Parent") === parentANameHashes.regularMembers.map(_.name).toSeq
|
||||
Seq("Parent", "bar") === parentBNameHashes.regularMembers.map(_.name).toSeq
|
||||
parentANameHashes !=== parentBNameHashes
|
||||
val childANameHashes = nameHashesForClass(childA)
|
||||
val childBNameHashes = nameHashesForClass(childB)
|
||||
assertNameHashEqualForRegularName("Child", childANameHashes, childBNameHashes)
|
||||
}
|
||||
/**
|
||||
* Test if members introduced in parent class affect hash of a name
|
||||
* of a child class.
|
||||
*
|
||||
* For example, if we have:
|
||||
* // Test1.scala
|
||||
* class Parent
|
||||
* class Child extends Parent
|
||||
*
|
||||
* and:
|
||||
* // Test2.scala
|
||||
* class Parent { def bar: Int = ... }
|
||||
* class Child extends Parent
|
||||
*
|
||||
* then hash for `Child` name should be the same in both
|
||||
* cases.
|
||||
*/
|
||||
"definition in parent class" in {
|
||||
val parentA = simpleClass("Parent")
|
||||
val barMethod = new Def(Array.empty, intTpe, Array.empty, "bar", publicAccess, defaultModifiers, Array.empty)
|
||||
val parentB = simpleClass("Parent", barMethod)
|
||||
val childA = {
|
||||
val structure = new Structure(lzy(Array[Type](parentA.structure)), lzy(Array.empty[Definition]), lzy(Array.empty[Definition]))
|
||||
simpleClass("Child", structure)
|
||||
}
|
||||
val childB = {
|
||||
val structure = new Structure(lzy(Array[Type](parentB.structure)), lzy(Array.empty[Definition]), lzy(Array[Definition](barMethod)))
|
||||
simpleClass("Child", structure)
|
||||
}
|
||||
val parentANameHashes = nameHashesForClass(parentA)
|
||||
val parentBNameHashes = nameHashesForClass(parentB)
|
||||
Seq("Parent") === parentANameHashes.regularMembers.map(_.name).toSeq
|
||||
Seq("Parent", "bar") === parentBNameHashes.regularMembers.map(_.name).toSeq
|
||||
parentANameHashes !=== parentBNameHashes
|
||||
val childANameHashes = nameHashesForClass(childA)
|
||||
val childBNameHashes = nameHashesForClass(childB)
|
||||
assertNameHashEqualForRegularName("Child", childANameHashes, childBNameHashes)
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if changes to structural types that appear in method signature
|
||||
* affect name hash of the method. For example, if we have:
|
||||
*
|
||||
* // Test1.scala
|
||||
* class A {
|
||||
* def foo: { bar: Int }
|
||||
* }
|
||||
*
|
||||
* // Test2.scala
|
||||
* class A {
|
||||
* def foo: { bar: String }
|
||||
* }
|
||||
*
|
||||
* then name hash for "foo" should be different in those two cases.
|
||||
*/
|
||||
"structural type in definition" in {
|
||||
/** def foo: { bar: Int } */
|
||||
val fooMethod1 = {
|
||||
val barMethod1 = new Def(Array.empty, intTpe, Array.empty, "bar", publicAccess, defaultModifiers, Array.empty)
|
||||
new Def(Array.empty, simpleStructure(barMethod1), Array.empty, "foo", publicAccess, defaultModifiers, Array.empty)
|
||||
}
|
||||
/** def foo: { bar: String } */
|
||||
val fooMethod2 = {
|
||||
val barMethod2 = new Def(Array.empty, strTpe, Array.empty, "bar", publicAccess, defaultModifiers, Array.empty)
|
||||
new Def(Array.empty, simpleStructure(barMethod2), Array.empty, "foo", publicAccess, defaultModifiers, Array.empty)
|
||||
}
|
||||
val aClass1 = simpleClass("A", fooMethod1)
|
||||
val aClass2 = simpleClass("A", fooMethod2)
|
||||
val nameHashes1 = nameHashesForClass(aClass1)
|
||||
val nameHashes2 = nameHashesForClass(aClass2)
|
||||
// note that `bar` does appear here
|
||||
Seq("A", "foo", "bar") === nameHashes1.regularMembers.map(_.name).toSeq
|
||||
Seq("A", "foo", "bar") === nameHashes2.regularMembers.map(_.name).toSeq
|
||||
assertNameHashEqualForRegularName("A", nameHashes1, nameHashes2)
|
||||
assertNameHashNotEqualForRegularName("foo", nameHashes1, nameHashes2)
|
||||
assertNameHashNotEqualForRegularName("bar", nameHashes1, nameHashes2)
|
||||
}
|
||||
/**
|
||||
* Checks if changes to structural types that appear in method signature
|
||||
* affect name hash of the method. For example, if we have:
|
||||
*
|
||||
* // Test1.scala
|
||||
* class A {
|
||||
* def foo: { bar: Int }
|
||||
* }
|
||||
*
|
||||
* // Test2.scala
|
||||
* class A {
|
||||
* def foo: { bar: String }
|
||||
* }
|
||||
*
|
||||
* then name hash for "foo" should be different in those two cases.
|
||||
*/
|
||||
"structural type in definition" in {
|
||||
/** def foo: { bar: Int } */
|
||||
val fooMethod1 = {
|
||||
val barMethod1 = new Def(Array.empty, intTpe, Array.empty, "bar", publicAccess, defaultModifiers, Array.empty)
|
||||
new Def(Array.empty, simpleStructure(barMethod1), Array.empty, "foo", publicAccess, defaultModifiers, Array.empty)
|
||||
}
|
||||
/** def foo: { bar: String } */
|
||||
val fooMethod2 = {
|
||||
val barMethod2 = new Def(Array.empty, strTpe, Array.empty, "bar", publicAccess, defaultModifiers, Array.empty)
|
||||
new Def(Array.empty, simpleStructure(barMethod2), Array.empty, "foo", publicAccess, defaultModifiers, Array.empty)
|
||||
}
|
||||
val aClass1 = simpleClass("A", fooMethod1)
|
||||
val aClass2 = simpleClass("A", fooMethod2)
|
||||
val nameHashes1 = nameHashesForClass(aClass1)
|
||||
val nameHashes2 = nameHashesForClass(aClass2)
|
||||
// note that `bar` does appear here
|
||||
Seq("A", "foo", "bar") === nameHashes1.regularMembers.map(_.name).toSeq
|
||||
Seq("A", "foo", "bar") === nameHashes2.regularMembers.map(_.name).toSeq
|
||||
assertNameHashEqualForRegularName("A", nameHashes1, nameHashes2)
|
||||
assertNameHashNotEqualForRegularName("foo", nameHashes1, nameHashes2)
|
||||
assertNameHashNotEqualForRegularName("bar", nameHashes1, nameHashes2)
|
||||
}
|
||||
|
||||
private def assertNameHashEqualForRegularName(name: String, nameHashes1: _internalOnly_NameHashes,
|
||||
nameHashes2: _internalOnly_NameHashes): Unit = {
|
||||
val nameHash1 = nameHashForRegularName(nameHashes1, name)
|
||||
val nameHash2 = nameHashForRegularName(nameHashes1, name)
|
||||
nameHash1 === nameHash2
|
||||
}
|
||||
private def assertNameHashEqualForRegularName(name: String, nameHashes1: _internalOnly_NameHashes,
|
||||
nameHashes2: _internalOnly_NameHashes): Unit = {
|
||||
val nameHash1 = nameHashForRegularName(nameHashes1, name)
|
||||
val nameHash2 = nameHashForRegularName(nameHashes1, name)
|
||||
nameHash1 === nameHash2
|
||||
}
|
||||
|
||||
private def assertNameHashNotEqualForRegularName(name: String, nameHashes1: _internalOnly_NameHashes,
|
||||
nameHashes2: _internalOnly_NameHashes): Unit = {
|
||||
val nameHash1 = nameHashForRegularName(nameHashes1, name)
|
||||
val nameHash2 = nameHashForRegularName(nameHashes2, name)
|
||||
nameHash1 !=== nameHash2
|
||||
}
|
||||
private def assertNameHashNotEqualForRegularName(name: String, nameHashes1: _internalOnly_NameHashes,
|
||||
nameHashes2: _internalOnly_NameHashes): Unit = {
|
||||
val nameHash1 = nameHashForRegularName(nameHashes1, name)
|
||||
val nameHash2 = nameHashForRegularName(nameHashes2, name)
|
||||
nameHash1 !=== nameHash2
|
||||
}
|
||||
|
||||
private def nameHashForRegularName(nameHashes: _internalOnly_NameHashes, name: String): _internalOnly_NameHash =
|
||||
try {
|
||||
nameHashes.regularMembers.find(_.name == name).get
|
||||
} catch {
|
||||
case e: NoSuchElementException => throw new RuntimeException(s"Couldn't find $name in $nameHashes", e)
|
||||
}
|
||||
private def nameHashForRegularName(nameHashes: _internalOnly_NameHashes, name: String): _internalOnly_NameHash =
|
||||
try {
|
||||
nameHashes.regularMembers.find(_.name == name).get
|
||||
} catch {
|
||||
case e: NoSuchElementException => throw new RuntimeException(s"Couldn't find $name in $nameHashes", e)
|
||||
}
|
||||
|
||||
private def nameHashesForClass(cl: ClassLike): _internalOnly_NameHashes = {
|
||||
val sourceAPI = new SourceAPI(Array.empty, Array(cl))
|
||||
val nameHashing = new NameHashing
|
||||
nameHashing.nameHashes(sourceAPI)
|
||||
}
|
||||
private def nameHashesForClass(cl: ClassLike): _internalOnly_NameHashes = {
|
||||
val sourceAPI = new SourceAPI(Array.empty, Array(cl))
|
||||
val nameHashing = new NameHashing
|
||||
nameHashing.nameHashes(sourceAPI)
|
||||
}
|
||||
|
||||
private def lzy[T](x: T): Lazy[T] = new Lazy[T] { def get: T = x }
|
||||
private def lzy[T](x: T): Lazy[T] = new Lazy[T] { def get: T = x }
|
||||
|
||||
private def simpleStructure(defs: Definition*) = new Structure(lzy(Array.empty[Type]), lzy(defs.toArray), lzy(Array.empty[Definition]))
|
||||
private def simpleStructure(defs: Definition*) = new Structure(lzy(Array.empty[Type]), lzy(defs.toArray), lzy(Array.empty[Definition]))
|
||||
|
||||
private def simpleClass(name: String, defs: Definition*): ClassLike = {
|
||||
val structure = simpleStructure(defs: _*)
|
||||
simpleClass(name, structure)
|
||||
}
|
||||
private def simpleClass(name: String, defs: Definition*): ClassLike = {
|
||||
val structure = simpleStructure(defs: _*)
|
||||
simpleClass(name, structure)
|
||||
}
|
||||
|
||||
private def simpleClass(name: String, structure: Structure): ClassLike = {
|
||||
new ClassLike(DefinitionType.ClassDef, lzy(emptyType), lzy(structure), Array.empty, Array.empty, name, publicAccess, defaultModifiers, Array.empty)
|
||||
}
|
||||
private def simpleClass(name: String, structure: Structure): ClassLike = {
|
||||
new ClassLike(DefinitionType.ClassDef, lzy(emptyType), lzy(structure), Array.empty, Array.empty, name, publicAccess, defaultModifiers, Array.empty)
|
||||
}
|
||||
|
||||
private val emptyType = new EmptyType
|
||||
private val intTpe = new Projection(emptyType, "Int")
|
||||
private val strTpe = new Projection(emptyType, "String")
|
||||
private val publicAccess = new Public
|
||||
private val defaultModifiers = new Modifiers(false, false, false, false, false, false, false)
|
||||
private val emptyType = new EmptyType
|
||||
private val intTpe = new Projection(emptyType, "Int")
|
||||
private val strTpe = new Projection(emptyType, "String")
|
||||
private val publicAccess = new Public
|
||||
private val defaultModifiers = new Modifiers(false, false, false, false, false, false, false)
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -8,80 +8,79 @@ import org.scalacheck._
|
|||
import Gen._
|
||||
import Prop._
|
||||
|
||||
|
||||
object AnalysisTest extends Properties("Analysis") {
|
||||
// Merge and split a hard-coded trivial example.
|
||||
property("Simple Merge and Split") = {
|
||||
def f(s: String) = new File(s)
|
||||
val aScala = f("A.scala")
|
||||
val bScala = f("B.scala")
|
||||
val aSource = genSource("A" :: "A$" :: Nil).sample.get
|
||||
val bSource = genSource("B" :: "B$" :: Nil).sample.get
|
||||
val cSource = genSource("C" :: Nil).sample.get
|
||||
val exists = new Exists(true)
|
||||
val sourceInfos = SourceInfos.makeInfo(Nil, Nil)
|
||||
// Merge and split a hard-coded trivial example.
|
||||
property("Simple Merge and Split") = {
|
||||
def f(s: String) = new File(s)
|
||||
val aScala = f("A.scala")
|
||||
val bScala = f("B.scala")
|
||||
val aSource = genSource("A" :: "A$" :: Nil).sample.get
|
||||
val bSource = genSource("B" :: "B$" :: Nil).sample.get
|
||||
val cSource = genSource("C" :: Nil).sample.get
|
||||
val exists = new Exists(true)
|
||||
val sourceInfos = SourceInfos.makeInfo(Nil, Nil)
|
||||
|
||||
// a
|
||||
var a = Analysis.Empty
|
||||
a = a.addProduct(aScala, f("A.class"), exists, "A")
|
||||
a = a.addProduct(aScala, f("A$.class"), exists, "A$")
|
||||
a = a.addSource(aScala, aSource, exists, Nil, Nil, sourceInfos)
|
||||
a = a.addBinaryDep(aScala, f("x.jar"), "x", exists)
|
||||
a = a.addExternalDep(aScala, "C", cSource, inherited=false)
|
||||
// a
|
||||
var a = Analysis.Empty
|
||||
a = a.addProduct(aScala, f("A.class"), exists, "A")
|
||||
a = a.addProduct(aScala, f("A$.class"), exists, "A$")
|
||||
a = a.addSource(aScala, aSource, exists, Nil, Nil, sourceInfos)
|
||||
a = a.addBinaryDep(aScala, f("x.jar"), "x", exists)
|
||||
a = a.addExternalDep(aScala, "C", cSource, inherited = false)
|
||||
|
||||
// b
|
||||
var b = Analysis.Empty
|
||||
b = b.addProduct(bScala, f("B.class"), exists, "B")
|
||||
b = b.addProduct(bScala, f("B$.class"), exists, "B$")
|
||||
b = b.addSource(bScala, bSource, exists, Nil, Nil, sourceInfos)
|
||||
b = b.addBinaryDep(bScala, f("x.jar"), "x", exists)
|
||||
b = b.addBinaryDep(bScala, f("y.jar"), "y", exists)
|
||||
b = b.addExternalDep(bScala, "A", aSource, inherited=true)
|
||||
// b
|
||||
var b = Analysis.Empty
|
||||
b = b.addProduct(bScala, f("B.class"), exists, "B")
|
||||
b = b.addProduct(bScala, f("B$.class"), exists, "B$")
|
||||
b = b.addSource(bScala, bSource, exists, Nil, Nil, sourceInfos)
|
||||
b = b.addBinaryDep(bScala, f("x.jar"), "x", exists)
|
||||
b = b.addBinaryDep(bScala, f("y.jar"), "y", exists)
|
||||
b = b.addExternalDep(bScala, "A", aSource, inherited = true)
|
||||
|
||||
// ab
|
||||
var ab = Analysis.Empty
|
||||
ab = ab.addProduct(aScala, f("A.class"), exists, "A")
|
||||
ab = ab.addProduct(aScala, f("A$.class"), exists, "A$")
|
||||
ab = ab.addProduct(bScala, f("B.class"), exists, "B")
|
||||
ab = ab.addProduct(bScala, f("B$.class"), exists, "B$")
|
||||
ab = ab.addSource(aScala, aSource, exists, Nil, Nil, sourceInfos)
|
||||
ab = ab.addSource(bScala, bSource, exists, aScala :: Nil, aScala :: Nil, sourceInfos)
|
||||
ab = ab.addBinaryDep(aScala, f("x.jar"), "x", exists)
|
||||
ab = ab.addBinaryDep(bScala, f("x.jar"), "x", exists)
|
||||
ab = ab.addBinaryDep(bScala, f("y.jar"), "y", exists)
|
||||
ab = ab.addExternalDep(aScala, "C", cSource, inherited=false)
|
||||
// ab
|
||||
var ab = Analysis.Empty
|
||||
ab = ab.addProduct(aScala, f("A.class"), exists, "A")
|
||||
ab = ab.addProduct(aScala, f("A$.class"), exists, "A$")
|
||||
ab = ab.addProduct(bScala, f("B.class"), exists, "B")
|
||||
ab = ab.addProduct(bScala, f("B$.class"), exists, "B$")
|
||||
ab = ab.addSource(aScala, aSource, exists, Nil, Nil, sourceInfos)
|
||||
ab = ab.addSource(bScala, bSource, exists, aScala :: Nil, aScala :: Nil, sourceInfos)
|
||||
ab = ab.addBinaryDep(aScala, f("x.jar"), "x", exists)
|
||||
ab = ab.addBinaryDep(bScala, f("x.jar"), "x", exists)
|
||||
ab = ab.addBinaryDep(bScala, f("y.jar"), "y", exists)
|
||||
ab = ab.addExternalDep(aScala, "C", cSource, inherited = false)
|
||||
|
||||
val split: Map[String, Analysis] = ab.groupBy({ f: File => f.getName.substring(0, 1) })
|
||||
val split: Map[String, Analysis] = ab.groupBy({ f: File => f.getName.substring(0, 1) })
|
||||
|
||||
val aSplit = split.getOrElse("A", Analysis.Empty)
|
||||
val bSplit = split.getOrElse("B", Analysis.Empty)
|
||||
val aSplit = split.getOrElse("A", Analysis.Empty)
|
||||
val bSplit = split.getOrElse("B", Analysis.Empty)
|
||||
|
||||
val merged = Analysis.merge(a :: b :: Nil)
|
||||
val merged = Analysis.merge(a :: b :: Nil)
|
||||
|
||||
("split(AB)(A) == A" |: compare(a, aSplit)) &&
|
||||
("split(AB)(B) == B" |: compare(b, bSplit)) &&
|
||||
("merge(A, B) == AB" |: compare(merged, ab))
|
||||
}
|
||||
("split(AB)(A) == A" |: compare(a, aSplit)) &&
|
||||
("split(AB)(B) == B" |: compare(b, bSplit)) &&
|
||||
("merge(A, B) == AB" |: compare(merged, ab))
|
||||
}
|
||||
|
||||
// Merge and split large, generated examples.
|
||||
// Mustn't shrink, as the default Shrink[Int] doesn't respect the lower bound of choose(), which will cause
|
||||
// a divide-by-zero error masking the original error.
|
||||
property("Complex Merge and Split") = forAllNoShrink(genAnalysis, choose(1, 10)) { (analysis: Analysis, numSplits: Int) =>
|
||||
val grouped: Map[Int, Analysis] = analysis.groupBy({ f: File => abs(f.hashCode()) % numSplits})
|
||||
def getGroup(i: Int): Analysis = grouped.getOrElse(i, Analysis.Empty)
|
||||
val splits = (Range(0, numSplits) map getGroup).toList
|
||||
// Merge and split large, generated examples.
|
||||
// Mustn't shrink, as the default Shrink[Int] doesn't respect the lower bound of choose(), which will cause
|
||||
// a divide-by-zero error masking the original error.
|
||||
property("Complex Merge and Split") = forAllNoShrink(genAnalysis, choose(1, 10)) { (analysis: Analysis, numSplits: Int) =>
|
||||
val grouped: Map[Int, Analysis] = analysis.groupBy({ f: File => abs(f.hashCode()) % numSplits })
|
||||
def getGroup(i: Int): Analysis = grouped.getOrElse(i, Analysis.Empty)
|
||||
val splits = (Range(0, numSplits) map getGroup).toList
|
||||
|
||||
val merged: Analysis = Analysis.merge(splits)
|
||||
"Merge all" |: compare(analysis, merged)
|
||||
}
|
||||
val merged: Analysis = Analysis.merge(splits)
|
||||
"Merge all" |: compare(analysis, merged)
|
||||
}
|
||||
|
||||
// Compare two analyses with useful labelling when they aren't equal.
|
||||
private[this] def compare(left: Analysis, right: Analysis): Prop =
|
||||
s" LEFT: $left" |:
|
||||
s"RIGHT: $right" |:
|
||||
s"STAMPS EQUAL: ${left.stamps == right.stamps}" |:
|
||||
s"APIS EQUAL: ${left.apis == right.apis}" |:
|
||||
s"RELATIONS EQUAL: ${left.relations == right.relations}" |:
|
||||
"UNEQUAL" |:
|
||||
(left == right)
|
||||
// Compare two analyses with useful labelling when they aren't equal.
|
||||
private[this] def compare(left: Analysis, right: Analysis): Prop =
|
||||
s" LEFT: $left" |:
|
||||
s"RIGHT: $right" |:
|
||||
s"STAMPS EQUAL: ${left.stamps == right.stamps}" |:
|
||||
s"APIS EQUAL: ${left.apis == right.apis}" |:
|
||||
s"RELATIONS EQUAL: ${left.relations == right.relations}" |:
|
||||
"UNEQUAL" |:
|
||||
(left == right)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -11,144 +11,143 @@ import sbt.Relation
|
|||
import xsbti.api._
|
||||
import xsbti.SafeLazy
|
||||
|
||||
|
||||
/**
|
||||
* Scalacheck generators for Analysis objects and their substructures.
|
||||
* Fairly complex, as Analysis has interconnected state that can't be
|
||||
* independently generated.
|
||||
*/
|
||||
object TestCaseGenerators {
|
||||
// We restrict sizes, otherwise the generated Analysis objects get huge and the tests take a long time.
|
||||
val maxSources = 10 // Max number of source files.
|
||||
val maxRelatives = 10 // Max number of things that a source x can relate to in a single Relation.
|
||||
val maxPathSegmentLen = 10 // Max number of characters in a path segment.
|
||||
val maxPathLen = 6 // Max number of path segments in a path.
|
||||
// We restrict sizes, otherwise the generated Analysis objects get huge and the tests take a long time.
|
||||
val maxSources = 10 // Max number of source files.
|
||||
val maxRelatives = 10 // Max number of things that a source x can relate to in a single Relation.
|
||||
val maxPathSegmentLen = 10 // Max number of characters in a path segment.
|
||||
val maxPathLen = 6 // Max number of path segments in a path.
|
||||
|
||||
// Ensure that we generate unique class names and file paths every time.
|
||||
// Using repeated strings may lead to all sorts of undesirable interactions.
|
||||
val used = scala.collection.mutable.Set.empty[String]
|
||||
// Ensure that we generate unique class names and file paths every time.
|
||||
// Using repeated strings may lead to all sorts of undesirable interactions.
|
||||
val used = scala.collection.mutable.Set.empty[String]
|
||||
|
||||
def unique[T](g: Gen[T]) = g retryUntil { o: T => used.add(o.toString) }
|
||||
def unique[T](g: Gen[T]) = g retryUntil { o: T => used.add(o.toString) }
|
||||
|
||||
def identifier: Gen[String] = sized { size =>
|
||||
resize(Math.max(size, 3), Gen.identifier)
|
||||
}
|
||||
def identifier: Gen[String] = sized { size =>
|
||||
resize(Math.max(size, 3), Gen.identifier)
|
||||
}
|
||||
|
||||
def genFilePathSegment: Gen[String] = for {
|
||||
n <- choose(3, maxPathSegmentLen) // Segments have at least 3 characters.
|
||||
c <- alphaChar
|
||||
cs <- listOfN(n - 1, alphaNumChar)
|
||||
} yield (c::cs).mkString
|
||||
def genFilePathSegment: Gen[String] = for {
|
||||
n <- choose(3, maxPathSegmentLen) // Segments have at least 3 characters.
|
||||
c <- alphaChar
|
||||
cs <- listOfN(n - 1, alphaNumChar)
|
||||
} yield (c :: cs).mkString
|
||||
|
||||
def genFile: Gen[File] = for {
|
||||
n <- choose(2, maxPathLen) // Paths have at least 2 segments.
|
||||
path <- listOfN(n, genFilePathSegment)
|
||||
} yield new File(path.mkString("/"))
|
||||
def genFile: Gen[File] = for {
|
||||
n <- choose(2, maxPathLen) // Paths have at least 2 segments.
|
||||
path <- listOfN(n, genFilePathSegment)
|
||||
} yield new File(path.mkString("/"))
|
||||
|
||||
def genStamp: Gen[Stamp] = for {
|
||||
b <- oneOf(true, false)
|
||||
} yield new Exists(b)
|
||||
def genStamp: Gen[Stamp] = for {
|
||||
b <- oneOf(true, false)
|
||||
} yield new Exists(b)
|
||||
|
||||
def zipMap[A, B](a: Seq[A], b: Seq[B]): Map[A, B] = (a zip b).toMap
|
||||
def zipMap[A, B](a: Seq[A], b: Seq[B]): Map[A, B] = (a zip b).toMap
|
||||
|
||||
def genStamps(rel: Relations): Gen[Stamps] = {
|
||||
val prod = rel.allProducts.toList
|
||||
val src = rel.allSources.toList
|
||||
val bin = rel.allBinaryDeps.toList
|
||||
for {
|
||||
prodStamps <- listOfN(prod.length, genStamp)
|
||||
srcStamps <- listOfN(src.length, genStamp)
|
||||
binStamps <- listOfN(bin.length, genStamp)
|
||||
binClassNames <- listOfN(bin.length, unique(identifier))
|
||||
} yield Stamps(zipMap(prod, prodStamps), zipMap(src, srcStamps), zipMap(bin, binStamps), zipMap(bin, binClassNames))
|
||||
}
|
||||
def genStamps(rel: Relations): Gen[Stamps] = {
|
||||
val prod = rel.allProducts.toList
|
||||
val src = rel.allSources.toList
|
||||
val bin = rel.allBinaryDeps.toList
|
||||
for {
|
||||
prodStamps <- listOfN(prod.length, genStamp)
|
||||
srcStamps <- listOfN(src.length, genStamp)
|
||||
binStamps <- listOfN(bin.length, genStamp)
|
||||
binClassNames <- listOfN(bin.length, unique(identifier))
|
||||
} yield Stamps(zipMap(prod, prodStamps), zipMap(src, srcStamps), zipMap(bin, binStamps), zipMap(bin, binClassNames))
|
||||
}
|
||||
|
||||
// We need "proper" definitions with specific class names, as groupBy use these to pick a representative top-level class when splitting.
|
||||
private[this] def makeDefinition(name: String): Definition =
|
||||
new ClassLike(DefinitionType.ClassDef, lzy(new EmptyType()),
|
||||
lzy(new Structure(lzy(Array()), lzy(Array()), lzy(Array()))), Array(), Array(),
|
||||
name, new Public(), new Modifiers(false, false, false, false, false, false, false), Array())
|
||||
// We need "proper" definitions with specific class names, as groupBy use these to pick a representative top-level class when splitting.
|
||||
private[this] def makeDefinition(name: String): Definition =
|
||||
new ClassLike(DefinitionType.ClassDef, lzy(new EmptyType()),
|
||||
lzy(new Structure(lzy(Array()), lzy(Array()), lzy(Array()))), Array(), Array(),
|
||||
name, new Public(), new Modifiers(false, false, false, false, false, false, false), Array())
|
||||
|
||||
private [this] def lzy[T <: AnyRef](x: T) = SafeLazy.strict(x)
|
||||
private[this] def lzy[T <: AnyRef](x: T) = SafeLazy.strict(x)
|
||||
|
||||
def genNameHash(defn: String): Gen[xsbti.api._internalOnly_NameHash] =
|
||||
value(new xsbti.api._internalOnly_NameHash(defn, defn.hashCode()))
|
||||
def genNameHash(defn: String): Gen[xsbti.api._internalOnly_NameHash] =
|
||||
value(new xsbti.api._internalOnly_NameHash(defn, defn.hashCode()))
|
||||
|
||||
def genNameHashes(defns: Seq[String]): Gen[xsbti.api._internalOnly_NameHashes] = {
|
||||
def partitionAccordingToMask[T](mask: List[Boolean], xs: List[T]): (List[T], List[T]) = {
|
||||
val (p1, p2) = (mask zip xs).partition(_._1)
|
||||
(p1.map(_._2), p2.map(_._2))
|
||||
}
|
||||
val pairsOfGenerators = for (defn <- defns) yield {
|
||||
for {
|
||||
isRegularMember <- arbitrary[Boolean]
|
||||
nameHash <- genNameHash(defn)
|
||||
} yield (isRegularMember, nameHash)
|
||||
}
|
||||
val genNameHashesList = Gen.sequence[List, xsbti.api._internalOnly_NameHash](defns.map(genNameHash))
|
||||
val genTwoListOfNameHashes = for {
|
||||
nameHashesList <- genNameHashesList
|
||||
isRegularMemberList <- listOfN(nameHashesList.length, arbitrary[Boolean])
|
||||
} yield partitionAccordingToMask(isRegularMemberList, nameHashesList)
|
||||
for {
|
||||
(regularMemberNameHashes, implicitMemberNameHashes) <- genTwoListOfNameHashes
|
||||
} yield new xsbti.api._internalOnly_NameHashes(regularMemberNameHashes.toArray, implicitMemberNameHashes.toArray)
|
||||
}
|
||||
def genNameHashes(defns: Seq[String]): Gen[xsbti.api._internalOnly_NameHashes] = {
|
||||
def partitionAccordingToMask[T](mask: List[Boolean], xs: List[T]): (List[T], List[T]) = {
|
||||
val (p1, p2) = (mask zip xs).partition(_._1)
|
||||
(p1.map(_._2), p2.map(_._2))
|
||||
}
|
||||
val pairsOfGenerators = for (defn <- defns) yield {
|
||||
for {
|
||||
isRegularMember <- arbitrary[Boolean]
|
||||
nameHash <- genNameHash(defn)
|
||||
} yield (isRegularMember, nameHash)
|
||||
}
|
||||
val genNameHashesList = Gen.sequence[List, xsbti.api._internalOnly_NameHash](defns.map(genNameHash))
|
||||
val genTwoListOfNameHashes = for {
|
||||
nameHashesList <- genNameHashesList
|
||||
isRegularMemberList <- listOfN(nameHashesList.length, arbitrary[Boolean])
|
||||
} yield partitionAccordingToMask(isRegularMemberList, nameHashesList)
|
||||
for {
|
||||
(regularMemberNameHashes, implicitMemberNameHashes) <- genTwoListOfNameHashes
|
||||
} yield new xsbti.api._internalOnly_NameHashes(regularMemberNameHashes.toArray, implicitMemberNameHashes.toArray)
|
||||
}
|
||||
|
||||
def genSource(defns: Seq[String]): Gen[Source] = for {
|
||||
startTime <- arbitrary[Long]
|
||||
hashLen <- choose(10, 20) // Requred by SameAPI to be > 0.
|
||||
hash <- Gen.containerOfN[Array,Byte](hashLen, arbitrary[Byte])
|
||||
apiHash <- arbitrary[Int]
|
||||
hasMacro <- arbitrary[Boolean]
|
||||
nameHashes <- genNameHashes(defns)
|
||||
} yield new Source(new Compilation(startTime, Array()), hash, new SourceAPI(Array(), Array(defns map makeDefinition:_*)), apiHash, nameHashes, hasMacro)
|
||||
def genSource(defns: Seq[String]): Gen[Source] = for {
|
||||
startTime <- arbitrary[Long]
|
||||
hashLen <- choose(10, 20) // Requred by SameAPI to be > 0.
|
||||
hash <- Gen.containerOfN[Array, Byte](hashLen, arbitrary[Byte])
|
||||
apiHash <- arbitrary[Int]
|
||||
hasMacro <- arbitrary[Boolean]
|
||||
nameHashes <- genNameHashes(defns)
|
||||
} yield new Source(new Compilation(startTime, Array()), hash, new SourceAPI(Array(), Array(defns map makeDefinition: _*)), apiHash, nameHashes, hasMacro)
|
||||
|
||||
def genSources(all_defns: Seq[Seq[String]]): Gen[Seq[Source]] = Gen.sequence[List, Source](all_defns.map(genSource))
|
||||
def genSources(all_defns: Seq[Seq[String]]): Gen[Seq[Source]] = Gen.sequence[List, Source](all_defns.map(genSource))
|
||||
|
||||
def genAPIs(rel: Relations): Gen[APIs] = {
|
||||
val internal = rel.allInternalSrcDeps.toList.sorted
|
||||
val external = rel.allExternalDeps.toList.sorted
|
||||
for {
|
||||
internalSources <- genSources(internal map { f: File => rel.classNames(f).toList.sorted })
|
||||
externalSources <- genSources(external map { s: String => s :: Nil })
|
||||
} yield APIs(zipMap(internal, internalSources), zipMap(external, externalSources))
|
||||
}
|
||||
def genAPIs(rel: Relations): Gen[APIs] = {
|
||||
val internal = rel.allInternalSrcDeps.toList.sorted
|
||||
val external = rel.allExternalDeps.toList.sorted
|
||||
for {
|
||||
internalSources <- genSources(internal map { f: File => rel.classNames(f).toList.sorted })
|
||||
externalSources <- genSources(external map { s: String => s :: Nil })
|
||||
} yield APIs(zipMap(internal, internalSources), zipMap(external, externalSources))
|
||||
}
|
||||
|
||||
def genRelation[T](g: Gen[T])(srcs: List[File]): Gen[Relation[File, T]] = for {
|
||||
n <- choose(1, maxRelatives)
|
||||
entries <- listOfN(srcs.length, containerOfN[Set, T](n, g))
|
||||
} yield Relation.reconstruct(zipMap(srcs, entries))
|
||||
def genRelation[T](g: Gen[T])(srcs: List[File]): Gen[Relation[File, T]] = for {
|
||||
n <- choose(1, maxRelatives)
|
||||
entries <- listOfN(srcs.length, containerOfN[Set, T](n, g))
|
||||
} yield Relation.reconstruct(zipMap(srcs, entries))
|
||||
|
||||
val genFileRelation = genRelation[File](unique(genFile)) _
|
||||
val genStringRelation = genRelation[String](unique(identifier)) _
|
||||
val genFileRelation = genRelation[File](unique(genFile)) _
|
||||
val genStringRelation = genRelation[String](unique(identifier)) _
|
||||
|
||||
def genRSource(srcs: List[File]): Gen[Relations.Source] = for {
|
||||
internal <- listOfN(srcs.length, someOf(srcs)) // Internal dep targets must come from list of sources.
|
||||
external <- genStringRelation(srcs)
|
||||
} yield Relations.makeSource( // Ensure that we don't generate a dep of some file on itself.
|
||||
Relation.reconstruct((srcs zip (internal map { _.toSet } ) map {case (a, b) => (a, b - a) }).toMap),
|
||||
external)
|
||||
def genRSource(srcs: List[File]): Gen[Relations.Source] = for {
|
||||
internal <- listOfN(srcs.length, someOf(srcs)) // Internal dep targets must come from list of sources.
|
||||
external <- genStringRelation(srcs)
|
||||
} yield Relations.makeSource( // Ensure that we don't generate a dep of some file on itself.
|
||||
Relation.reconstruct((srcs zip (internal map { _.toSet }) map { case (a, b) => (a, b - a) }).toMap),
|
||||
external)
|
||||
|
||||
def genSubRSource(src: Relations.Source): Gen[Relations.Source] = for {
|
||||
internal <- someOf(src.internal.all.toList)
|
||||
external <- someOf(src.external.all.toList)
|
||||
} yield Relations.makeSource(Relation.empty ++ internal, Relation.empty ++ external)
|
||||
def genSubRSource(src: Relations.Source): Gen[Relations.Source] = for {
|
||||
internal <- someOf(src.internal.all.toList)
|
||||
external <- someOf(src.external.all.toList)
|
||||
} yield Relations.makeSource(Relation.empty ++ internal, Relation.empty ++ external)
|
||||
|
||||
def genRelations: Gen[Relations] = for {
|
||||
numSrcs <- choose(0, maxSources)
|
||||
srcs <- listOfN(numSrcs, genFile)
|
||||
srcProd <- genFileRelation(srcs)
|
||||
binaryDep <- genFileRelation(srcs)
|
||||
direct <- genRSource(srcs)
|
||||
publicInherited <- genSubRSource(direct)
|
||||
classes <- genStringRelation(srcs)
|
||||
def genRelations: Gen[Relations] = for {
|
||||
numSrcs <- choose(0, maxSources)
|
||||
srcs <- listOfN(numSrcs, genFile)
|
||||
srcProd <- genFileRelation(srcs)
|
||||
binaryDep <- genFileRelation(srcs)
|
||||
direct <- genRSource(srcs)
|
||||
publicInherited <- genSubRSource(direct)
|
||||
classes <- genStringRelation(srcs)
|
||||
|
||||
} yield Relations.make(srcProd, binaryDep, direct, publicInherited , classes)
|
||||
} yield Relations.make(srcProd, binaryDep, direct, publicInherited, classes)
|
||||
|
||||
def genAnalysis: Gen[Analysis] = for {
|
||||
rels <- genRelations
|
||||
stamps <- genStamps(rels)
|
||||
apis <- genAPIs(rels)
|
||||
} yield new MAnalysis(stamps, apis, rels, SourceInfos.empty, Compilations.empty)
|
||||
def genAnalysis: Gen[Analysis] = for {
|
||||
rels <- genRelations
|
||||
stamps <- genStamps(rels)
|
||||
apis <- genAPIs(rels)
|
||||
} yield new MAnalysis(stamps, apis, rels, SourceInfos.empty, Compilations.empty)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -12,120 +12,120 @@ import ScalaCompilerForUnitTesting.ExtractedSourceDependencies
|
|||
@RunWith(classOf[JUnitRunner])
|
||||
class DependencySpecification extends Specification {
|
||||
|
||||
"Extracted source dependencies from public members" in {
|
||||
val sourceDependencies = extractSourceDependenciesPublic
|
||||
val memberRef = sourceDependencies.memberRef
|
||||
val inheritance = sourceDependencies.inheritance
|
||||
memberRef('A) === Set.empty
|
||||
inheritance('A) === Set.empty
|
||||
memberRef('B) === Set('A, 'D)
|
||||
inheritance('B) === Set('D)
|
||||
memberRef('C) === Set('A)
|
||||
inheritance('C) === Set.empty
|
||||
memberRef('D) === Set.empty
|
||||
inheritance('D) === Set.empty
|
||||
memberRef('E) === Set.empty
|
||||
inheritance('E) === Set.empty
|
||||
memberRef('F) === Set('A, 'B, 'C, 'D, 'E)
|
||||
inheritance('F) === Set('A, 'E)
|
||||
memberRef('H) === Set('B, 'E, 'G)
|
||||
// aliases and applied type constructors are expanded so we have inheritance dependency on B
|
||||
inheritance('H) === Set('B, 'E)
|
||||
}
|
||||
"Extracted source dependencies from public members" in {
|
||||
val sourceDependencies = extractSourceDependenciesPublic
|
||||
val memberRef = sourceDependencies.memberRef
|
||||
val inheritance = sourceDependencies.inheritance
|
||||
memberRef('A) === Set.empty
|
||||
inheritance('A) === Set.empty
|
||||
memberRef('B) === Set('A, 'D)
|
||||
inheritance('B) === Set('D)
|
||||
memberRef('C) === Set('A)
|
||||
inheritance('C) === Set.empty
|
||||
memberRef('D) === Set.empty
|
||||
inheritance('D) === Set.empty
|
||||
memberRef('E) === Set.empty
|
||||
inheritance('E) === Set.empty
|
||||
memberRef('F) === Set('A, 'B, 'C, 'D, 'E)
|
||||
inheritance('F) === Set('A, 'E)
|
||||
memberRef('H) === Set('B, 'E, 'G)
|
||||
// aliases and applied type constructors are expanded so we have inheritance dependency on B
|
||||
inheritance('H) === Set('B, 'E)
|
||||
}
|
||||
|
||||
"Extracted source dependencies from private members" in {
|
||||
val sourceDependencies = extractSourceDependenciesPrivate
|
||||
val memberRef = sourceDependencies.memberRef
|
||||
val inheritance = sourceDependencies.inheritance
|
||||
memberRef('A) === Set.empty
|
||||
inheritance('A) === Set.empty
|
||||
memberRef('B) === Set.empty
|
||||
inheritance('B) === Set.empty
|
||||
memberRef('C) === Set('A)
|
||||
inheritance('C) === Set('A)
|
||||
memberRef('D) === Set('B)
|
||||
inheritance('D) === Set('B)
|
||||
}
|
||||
"Extracted source dependencies from private members" in {
|
||||
val sourceDependencies = extractSourceDependenciesPrivate
|
||||
val memberRef = sourceDependencies.memberRef
|
||||
val inheritance = sourceDependencies.inheritance
|
||||
memberRef('A) === Set.empty
|
||||
inheritance('A) === Set.empty
|
||||
memberRef('B) === Set.empty
|
||||
inheritance('B) === Set.empty
|
||||
memberRef('C) === Set('A)
|
||||
inheritance('C) === Set('A)
|
||||
memberRef('D) === Set('B)
|
||||
inheritance('D) === Set('B)
|
||||
}
|
||||
|
||||
"Extracted source dependencies with trait as first parent" in {
|
||||
val sourceDependencies = extractSourceDependenciesTraitAsFirstPatent
|
||||
val memberRef = sourceDependencies.memberRef
|
||||
val inheritance = sourceDependencies.inheritance
|
||||
memberRef('A) === Set.empty
|
||||
inheritance('A) === Set.empty
|
||||
memberRef('B) === Set('A)
|
||||
inheritance('B) === Set('A)
|
||||
// verify that memberRef captures the oddity described in documentation of `Relations.inheritance`
|
||||
// we are mainly interested whether dependency on A is captured in `memberRef` relation so
|
||||
// the invariant that says that memberRef is superset of inheritance relation is preserved
|
||||
memberRef('C) === Set('A, 'B)
|
||||
inheritance('C) === Set('A, 'B)
|
||||
// same as above but indirect (C -> B -> A), note that only A is visible here
|
||||
memberRef('D) === Set('A, 'C)
|
||||
inheritance('D) === Set('A, 'C)
|
||||
}
|
||||
"Extracted source dependencies with trait as first parent" in {
|
||||
val sourceDependencies = extractSourceDependenciesTraitAsFirstPatent
|
||||
val memberRef = sourceDependencies.memberRef
|
||||
val inheritance = sourceDependencies.inheritance
|
||||
memberRef('A) === Set.empty
|
||||
inheritance('A) === Set.empty
|
||||
memberRef('B) === Set('A)
|
||||
inheritance('B) === Set('A)
|
||||
// verify that memberRef captures the oddity described in documentation of `Relations.inheritance`
|
||||
// we are mainly interested whether dependency on A is captured in `memberRef` relation so
|
||||
// the invariant that says that memberRef is superset of inheritance relation is preserved
|
||||
memberRef('C) === Set('A, 'B)
|
||||
inheritance('C) === Set('A, 'B)
|
||||
// same as above but indirect (C -> B -> A), note that only A is visible here
|
||||
memberRef('D) === Set('A, 'C)
|
||||
inheritance('D) === Set('A, 'C)
|
||||
}
|
||||
|
||||
"Extracted source dependencies from macro arguments" in {
|
||||
val sourceDependencies = extractSourceDependenciesFromMacroArgument
|
||||
val memberRef = sourceDependencies.memberRef
|
||||
val inheritance = sourceDependencies.inheritance
|
||||
"Extracted source dependencies from macro arguments" in {
|
||||
val sourceDependencies = extractSourceDependenciesFromMacroArgument
|
||||
val memberRef = sourceDependencies.memberRef
|
||||
val inheritance = sourceDependencies.inheritance
|
||||
|
||||
memberRef('A) === Set('B, 'C)
|
||||
inheritance('A) === Set.empty
|
||||
memberRef('B) === Set.empty
|
||||
inheritance('B) === Set.empty
|
||||
memberRef('C) === Set.empty
|
||||
inheritance('C) === Set.empty
|
||||
}
|
||||
memberRef('A) === Set('B, 'C)
|
||||
inheritance('A) === Set.empty
|
||||
memberRef('B) === Set.empty
|
||||
inheritance('B) === Set.empty
|
||||
memberRef('C) === Set.empty
|
||||
inheritance('C) === Set.empty
|
||||
}
|
||||
|
||||
private def extractSourceDependenciesPublic: ExtractedSourceDependencies = {
|
||||
val srcA = "class A"
|
||||
val srcB = "class B extends D[A]"
|
||||
val srcC = """|class C {
|
||||
private def extractSourceDependenciesPublic: ExtractedSourceDependencies = {
|
||||
val srcA = "class A"
|
||||
val srcB = "class B extends D[A]"
|
||||
val srcC = """|class C {
|
||||
| def a: A = null
|
||||
|}""".stripMargin
|
||||
val srcD = "class D[T]"
|
||||
val srcE = "trait E[T]"
|
||||
val srcF = "trait F extends A with E[D[B]] { self: C => }"
|
||||
val srcG = "object G { type T[x] = B }"
|
||||
// T is a type constructor [x]B
|
||||
// B extends D
|
||||
// E verifies the core type gets pulled out
|
||||
val srcH = "trait H extends G.T[Int] with (E[Int] @unchecked)"
|
||||
val srcD = "class D[T]"
|
||||
val srcE = "trait E[T]"
|
||||
val srcF = "trait F extends A with E[D[B]] { self: C => }"
|
||||
val srcG = "object G { type T[x] = B }"
|
||||
// T is a type constructor [x]B
|
||||
// B extends D
|
||||
// E verifies the core type gets pulled out
|
||||
val srcH = "trait H extends G.T[Int] with (E[Int] @unchecked)"
|
||||
|
||||
val compilerForTesting = new ScalaCompilerForUnitTesting(nameHashing = true)
|
||||
val sourceDependencies = compilerForTesting.extractDependenciesFromSrcs('A -> srcA, 'B -> srcB, 'C -> srcC,
|
||||
'D -> srcD, 'E -> srcE, 'F -> srcF, 'G -> srcG, 'H -> srcH)
|
||||
sourceDependencies
|
||||
}
|
||||
val compilerForTesting = new ScalaCompilerForUnitTesting(nameHashing = true)
|
||||
val sourceDependencies = compilerForTesting.extractDependenciesFromSrcs('A -> srcA, 'B -> srcB, 'C -> srcC,
|
||||
'D -> srcD, 'E -> srcE, 'F -> srcF, 'G -> srcG, 'H -> srcH)
|
||||
sourceDependencies
|
||||
}
|
||||
|
||||
private def extractSourceDependenciesPrivate: ExtractedSourceDependencies = {
|
||||
val srcA = "class A"
|
||||
val srcB = "class B"
|
||||
val srcC = "class C { private class Inner1 extends A }"
|
||||
val srcD = "class D { def foo: Unit = { class Inner2 extends B } }"
|
||||
private def extractSourceDependenciesPrivate: ExtractedSourceDependencies = {
|
||||
val srcA = "class A"
|
||||
val srcB = "class B"
|
||||
val srcC = "class C { private class Inner1 extends A }"
|
||||
val srcD = "class D { def foo: Unit = { class Inner2 extends B } }"
|
||||
|
||||
val compilerForTesting = new ScalaCompilerForUnitTesting(nameHashing = true)
|
||||
val sourceDependencies =
|
||||
compilerForTesting.extractDependenciesFromSrcs('A -> srcA, 'B -> srcB, 'C -> srcC, 'D -> srcD)
|
||||
sourceDependencies
|
||||
}
|
||||
val compilerForTesting = new ScalaCompilerForUnitTesting(nameHashing = true)
|
||||
val sourceDependencies =
|
||||
compilerForTesting.extractDependenciesFromSrcs('A -> srcA, 'B -> srcB, 'C -> srcC, 'D -> srcD)
|
||||
sourceDependencies
|
||||
}
|
||||
|
||||
private def extractSourceDependenciesTraitAsFirstPatent: ExtractedSourceDependencies = {
|
||||
val srcA = "class A"
|
||||
val srcB = "trait B extends A"
|
||||
val srcC = "trait C extends B"
|
||||
val srcD = "class D extends C"
|
||||
private def extractSourceDependenciesTraitAsFirstPatent: ExtractedSourceDependencies = {
|
||||
val srcA = "class A"
|
||||
val srcB = "trait B extends A"
|
||||
val srcC = "trait C extends B"
|
||||
val srcD = "class D extends C"
|
||||
|
||||
val compilerForTesting = new ScalaCompilerForUnitTesting(nameHashing = true)
|
||||
val sourceDependencies =
|
||||
compilerForTesting.extractDependenciesFromSrcs('A -> srcA, 'B -> srcB, 'C -> srcC, 'D -> srcD)
|
||||
sourceDependencies
|
||||
}
|
||||
val compilerForTesting = new ScalaCompilerForUnitTesting(nameHashing = true)
|
||||
val sourceDependencies =
|
||||
compilerForTesting.extractDependenciesFromSrcs('A -> srcA, 'B -> srcB, 'C -> srcC, 'D -> srcD)
|
||||
sourceDependencies
|
||||
}
|
||||
|
||||
private def extractSourceDependenciesFromMacroArgument: ExtractedSourceDependencies = {
|
||||
val srcA = "class A { println(B.printTree(C.foo)) }"
|
||||
val srcB = """
|
||||
private def extractSourceDependenciesFromMacroArgument: ExtractedSourceDependencies = {
|
||||
val srcA = "class A { println(B.printTree(C.foo)) }"
|
||||
val srcB = """
|
||||
|import scala.language.experimental.macros
|
||||
|import scala.reflect.macros._
|
||||
|object B {
|
||||
|
|
@ -136,11 +136,11 @@ class DependencySpecification extends Specification {
|
|||
| c.Expr[String](literalStr)
|
||||
| }
|
||||
|}""".stripMargin
|
||||
val srcC = "object C { val foo = 1 }"
|
||||
val srcC = "object C { val foo = 1 }"
|
||||
|
||||
val compilerForTesting = new ScalaCompilerForUnitTesting(nameHashing = true)
|
||||
val sourceDependencies =
|
||||
compilerForTesting.extractDependenciesFromSrcs(List(Map('B -> srcB, 'C -> srcC), Map('A -> srcA)))
|
||||
sourceDependencies
|
||||
}
|
||||
val compilerForTesting = new ScalaCompilerForUnitTesting(nameHashing = true)
|
||||
val sourceDependencies =
|
||||
compilerForTesting.extractDependenciesFromSrcs(List(Map('B -> srcB, 'C -> srcC), Map('A -> srcA)))
|
||||
sourceDependencies
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -10,33 +10,33 @@ import org.specs2.runner.JUnitRunner
|
|||
@RunWith(classOf[JUnitRunner])
|
||||
class ExtractAPISpecification extends Specification {
|
||||
|
||||
"Existential types in method signatures" should {
|
||||
"have stable names" in { stableExistentialNames }
|
||||
}
|
||||
"Existential types in method signatures" should {
|
||||
"have stable names" in { stableExistentialNames }
|
||||
}
|
||||
|
||||
def stableExistentialNames: Boolean = {
|
||||
def compileAndGetFooMethodApi(src: String): Def = {
|
||||
val compilerForTesting = new ScalaCompilerForUnitTesting
|
||||
val sourceApi = compilerForTesting.extractApiFromSrc(src)
|
||||
val FooApi = sourceApi.definitions().find(_.name() == "Foo").get.asInstanceOf[ClassLike]
|
||||
val fooMethodApi = FooApi.structure().declared().find(_.name == "foo").get
|
||||
fooMethodApi.asInstanceOf[Def]
|
||||
}
|
||||
val src1 = """
|
||||
def stableExistentialNames: Boolean = {
|
||||
def compileAndGetFooMethodApi(src: String): Def = {
|
||||
val compilerForTesting = new ScalaCompilerForUnitTesting
|
||||
val sourceApi = compilerForTesting.extractApiFromSrc(src)
|
||||
val FooApi = sourceApi.definitions().find(_.name() == "Foo").get.asInstanceOf[ClassLike]
|
||||
val fooMethodApi = FooApi.structure().declared().find(_.name == "foo").get
|
||||
fooMethodApi.asInstanceOf[Def]
|
||||
}
|
||||
val src1 = """
|
||||
|class Box[T]
|
||||
|class Foo {
|
||||
| def foo: Box[_] = null
|
||||
|
|
||||
}""".stripMargin
|
||||
val fooMethodApi1 = compileAndGetFooMethodApi(src1)
|
||||
val src2 = """
|
||||
val fooMethodApi1 = compileAndGetFooMethodApi(src1)
|
||||
val src2 = """
|
||||
|class Box[T]
|
||||
|class Foo {
|
||||
| def bar: Box[_] = null
|
||||
| def foo: Box[_] = null
|
||||
|
|
||||
}""".stripMargin
|
||||
val fooMethodApi2 = compileAndGetFooMethodApi(src2)
|
||||
SameAPI.apply(fooMethodApi1, fooMethodApi2)
|
||||
}
|
||||
val fooMethodApi2 = compileAndGetFooMethodApi(src2)
|
||||
SameAPI.apply(fooMethodApi1, fooMethodApi2)
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -12,31 +12,31 @@ import org.specs2.mutable.Specification
|
|||
@RunWith(classOf[JUnit4])
|
||||
class ExtractUsedNamesSpecification extends Specification {
|
||||
|
||||
/**
|
||||
* Standard names that appear in every compilation unit that has any class
|
||||
* definition.
|
||||
*/
|
||||
private val standardNames = Set(
|
||||
// AnyRef is added as default parent of a class
|
||||
"scala", "AnyRef",
|
||||
// class receives a default constructor which is internally called "<init>"
|
||||
"<init>")
|
||||
/**
|
||||
* Standard names that appear in every compilation unit that has any class
|
||||
* definition.
|
||||
*/
|
||||
private val standardNames = Set(
|
||||
// AnyRef is added as default parent of a class
|
||||
"scala", "AnyRef",
|
||||
// class receives a default constructor which is internally called "<init>"
|
||||
"<init>")
|
||||
|
||||
"imported name" in {
|
||||
val src = """
|
||||
"imported name" in {
|
||||
val src = """
|
||||
|package a { class A }
|
||||
|package b {
|
||||
| import a.{A => A2}
|
||||
|}""".stripMargin
|
||||
val compilerForTesting = new ScalaCompilerForUnitTesting(nameHashing = true)
|
||||
val usedNames = compilerForTesting.extractUsedNamesFromSrc(src)
|
||||
val expectedNames = standardNames ++ Set("a", "A", "A2", "b")
|
||||
usedNames === expectedNames
|
||||
}
|
||||
val compilerForTesting = new ScalaCompilerForUnitTesting(nameHashing = true)
|
||||
val usedNames = compilerForTesting.extractUsedNamesFromSrc(src)
|
||||
val expectedNames = standardNames ++ Set("a", "A", "A2", "b")
|
||||
usedNames === expectedNames
|
||||
}
|
||||
|
||||
// test covers https://github.com/gkossakowski/sbt/issues/6
|
||||
"names in type tree" in {
|
||||
val srcA = """|
|
||||
// test covers https://github.com/gkossakowski/sbt/issues/6
|
||||
"names in type tree" in {
|
||||
val srcA = """|
|
||||
|package a {
|
||||
| class A {
|
||||
| class C { class D }
|
||||
|
|
@ -44,65 +44,65 @@ class ExtractUsedNamesSpecification extends Specification {
|
|||
| class B[T]
|
||||
| class BB
|
||||
|}""".stripMargin
|
||||
val srcB = """|
|
||||
val srcB = """|
|
||||
|package b {
|
||||
| abstract class X {
|
||||
| def foo: a.A#C#D
|
||||
| def bar: a.B[a.BB]
|
||||
| }
|
||||
|}""".stripMargin
|
||||
val compilerForTesting = new ScalaCompilerForUnitTesting(nameHashing = true)
|
||||
val usedNames = compilerForTesting.extractUsedNamesFromSrc(srcA, srcB)
|
||||
val expectedNames = standardNames ++ Set("a", "A", "B", "C", "D", "b", "X", "BB")
|
||||
usedNames === expectedNames
|
||||
}
|
||||
val compilerForTesting = new ScalaCompilerForUnitTesting(nameHashing = true)
|
||||
val usedNames = compilerForTesting.extractUsedNamesFromSrc(srcA, srcB)
|
||||
val expectedNames = standardNames ++ Set("a", "A", "B", "C", "D", "b", "X", "BB")
|
||||
usedNames === expectedNames
|
||||
}
|
||||
|
||||
// test for https://github.com/gkossakowski/sbt/issues/5
|
||||
"symbolic names" in {
|
||||
val srcA = """|
|
||||
// test for https://github.com/gkossakowski/sbt/issues/5
|
||||
"symbolic names" in {
|
||||
val srcA = """|
|
||||
|class A {
|
||||
| def `=`: Int = 3
|
||||
|}""".stripMargin
|
||||
val srcB = """|
|
||||
val srcB = """|
|
||||
|class B {
|
||||
| def foo(a: A) = a.`=`
|
||||
|}""".stripMargin
|
||||
val compilerForTesting = new ScalaCompilerForUnitTesting(nameHashing = true)
|
||||
val usedNames = compilerForTesting.extractUsedNamesFromSrc(srcA, srcB)
|
||||
val expectedNames = standardNames ++ Set("A", "a", "B", "=")
|
||||
usedNames === expectedNames
|
||||
}
|
||||
val compilerForTesting = new ScalaCompilerForUnitTesting(nameHashing = true)
|
||||
val usedNames = compilerForTesting.extractUsedNamesFromSrc(srcA, srcB)
|
||||
val expectedNames = standardNames ++ Set("A", "a", "B", "=")
|
||||
usedNames === expectedNames
|
||||
}
|
||||
|
||||
// test for https://github.com/gkossakowski/sbt/issues/3
|
||||
"used names from the same compilation unit" in {
|
||||
val src = "class A { def foo: Int = 0; def bar: Int = foo }"
|
||||
val compilerForTesting = new ScalaCompilerForUnitTesting(nameHashing = true)
|
||||
val usedNames = compilerForTesting.extractUsedNamesFromSrc(src)
|
||||
val expectedNames = standardNames ++ Set("A", "foo", "Int")
|
||||
usedNames === expectedNames
|
||||
}
|
||||
// test for https://github.com/gkossakowski/sbt/issues/3
|
||||
"used names from the same compilation unit" in {
|
||||
val src = "class A { def foo: Int = 0; def bar: Int = foo }"
|
||||
val compilerForTesting = new ScalaCompilerForUnitTesting(nameHashing = true)
|
||||
val usedNames = compilerForTesting.extractUsedNamesFromSrc(src)
|
||||
val expectedNames = standardNames ++ Set("A", "foo", "Int")
|
||||
usedNames === expectedNames
|
||||
}
|
||||
|
||||
// pending test for https://issues.scala-lang.org/browse/SI-7173
|
||||
"names of constants" in {
|
||||
val src = "class A { final val foo = 12; def bar: Int = foo }"
|
||||
val compilerForTesting = new ScalaCompilerForUnitTesting(nameHashing = true)
|
||||
val usedNames = compilerForTesting.extractUsedNamesFromSrc(src)
|
||||
val expectedNames = standardNames ++ Set("A", "foo", "Int")
|
||||
usedNames === expectedNames
|
||||
}.pendingUntilFixed("Scala's type checker inlines constants so we can't see the original name.")
|
||||
// pending test for https://issues.scala-lang.org/browse/SI-7173
|
||||
"names of constants" in {
|
||||
val src = "class A { final val foo = 12; def bar: Int = foo }"
|
||||
val compilerForTesting = new ScalaCompilerForUnitTesting(nameHashing = true)
|
||||
val usedNames = compilerForTesting.extractUsedNamesFromSrc(src)
|
||||
val expectedNames = standardNames ++ Set("A", "foo", "Int")
|
||||
usedNames === expectedNames
|
||||
}.pendingUntilFixed("Scala's type checker inlines constants so we can't see the original name.")
|
||||
|
||||
// pending test for https://github.com/gkossakowski/sbt/issues/4
|
||||
// TODO: we should fix it by having special treatment of `selectDynamic` and `applyDynamic` calls
|
||||
"names from method calls on Dynamic" in {
|
||||
val srcA = """|import scala.language.dynamics
|
||||
// pending test for https://github.com/gkossakowski/sbt/issues/4
|
||||
// TODO: we should fix it by having special treatment of `selectDynamic` and `applyDynamic` calls
|
||||
"names from method calls on Dynamic" in {
|
||||
val srcA = """|import scala.language.dynamics
|
||||
|class A extends Dynamic {
|
||||
| def selectDynamic(name: String): Int = name.length
|
||||
|}""".stripMargin
|
||||
val srcB = "class B { def foo(a: A): Int = a.bla }"
|
||||
val compilerForTesting = new ScalaCompilerForUnitTesting(nameHashing = true)
|
||||
val usedNames = compilerForTesting.extractUsedNamesFromSrc(srcA, srcB)
|
||||
val expectedNames = standardNames ++ Set("B", "A", "a", "Int", "selectDynamic", "bla")
|
||||
usedNames === expectedNames
|
||||
}.pendingUntilFixed("Call to Dynamic is desugared in type checker so Select nodes is turned into string literal.")
|
||||
val srcB = "class B { def foo(a: A): Int = a.bla }"
|
||||
val compilerForTesting = new ScalaCompilerForUnitTesting(nameHashing = true)
|
||||
val usedNames = compilerForTesting.extractUsedNamesFromSrc(srcA, srcB)
|
||||
val expectedNames = standardNames ++ Set("B", "A", "a", "Int", "selectDynamic", "bla")
|
||||
usedNames === expectedNames
|
||||
}.pendingUntilFixed("Call to Dynamic is desugared in type checker so Select nodes is turned into string literal.")
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -21,158 +21,160 @@ import ScalaCompilerForUnitTesting.ExtractedSourceDependencies
|
|||
*/
|
||||
class ScalaCompilerForUnitTesting(nameHashing: Boolean = false) {
|
||||
|
||||
/**
|
||||
* Compiles given source code using Scala compiler and returns API representation
|
||||
* extracted by ExtractAPI class.
|
||||
*/
|
||||
def extractApiFromSrc(src: String): SourceAPI = {
|
||||
val (Seq(tempSrcFile), analysisCallback) = compileSrcs(src)
|
||||
analysisCallback.apis(tempSrcFile)
|
||||
}
|
||||
/**
|
||||
* Compiles given source code using Scala compiler and returns API representation
|
||||
* extracted by ExtractAPI class.
|
||||
*/
|
||||
def extractApiFromSrc(src: String): SourceAPI = {
|
||||
val (Seq(tempSrcFile), analysisCallback) = compileSrcs(src)
|
||||
analysisCallback.apis(tempSrcFile)
|
||||
}
|
||||
|
||||
def extractUsedNamesFromSrc(src: String): Set[String] = {
|
||||
val (Seq(tempSrcFile), analysisCallback) = compileSrcs(src)
|
||||
analysisCallback.usedNames(tempSrcFile).toSet
|
||||
}
|
||||
def extractUsedNamesFromSrc(src: String): Set[String] = {
|
||||
val (Seq(tempSrcFile), analysisCallback) = compileSrcs(src)
|
||||
analysisCallback.usedNames(tempSrcFile).toSet
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract used names from src provided as the second argument.
|
||||
*
|
||||
* The purpose of the first argument is to define names that the second
|
||||
* source is going to refer to. Both files are compiled in the same compiler
|
||||
* Run but only names used in the second src file are returned.
|
||||
*/
|
||||
def extractUsedNamesFromSrc(definitionSrc: String, actualSrc: String): Set[String] = {
|
||||
// we drop temp src file corresponding to the definition src file
|
||||
val (Seq(_, tempSrcFile), analysisCallback) = compileSrcs(definitionSrc, actualSrc)
|
||||
analysisCallback.usedNames(tempSrcFile).toSet
|
||||
}
|
||||
/**
|
||||
* Extract used names from src provided as the second argument.
|
||||
*
|
||||
* The purpose of the first argument is to define names that the second
|
||||
* source is going to refer to. Both files are compiled in the same compiler
|
||||
* Run but only names used in the second src file are returned.
|
||||
*/
|
||||
def extractUsedNamesFromSrc(definitionSrc: String, actualSrc: String): Set[String] = {
|
||||
// we drop temp src file corresponding to the definition src file
|
||||
val (Seq(_, tempSrcFile), analysisCallback) = compileSrcs(definitionSrc, actualSrc)
|
||||
analysisCallback.usedNames(tempSrcFile).toSet
|
||||
}
|
||||
|
||||
/**
|
||||
* Compiles given source code snippets (passed as Strings) using Scala compiler and returns extracted
|
||||
* dependencies between snippets. Source code snippets are identified by symbols. Each symbol should
|
||||
* be associated with one snippet only.
|
||||
*
|
||||
* Snippets can be grouped to be compiled together in the same compiler run. This is
|
||||
* useful to compile macros, which cannot be used in the same compilation run that
|
||||
* defines them.
|
||||
*
|
||||
* Symbols are used to express extracted dependencies between source code snippets. This way we have
|
||||
* file system-independent way of testing dependencies between source code "files".
|
||||
*/
|
||||
def extractDependenciesFromSrcs(srcs: List[Map[Symbol, String]]): ExtractedSourceDependencies = {
|
||||
val rawGroupedSrcs = srcs.map(_.values.toList).toList
|
||||
val symbols = srcs.map(_.keys).flatten
|
||||
val (tempSrcFiles, testCallback) = compileSrcs(rawGroupedSrcs)
|
||||
val fileToSymbol = (tempSrcFiles zip symbols).toMap
|
||||
/**
|
||||
* Compiles given source code snippets (passed as Strings) using Scala compiler and returns extracted
|
||||
* dependencies between snippets. Source code snippets are identified by symbols. Each symbol should
|
||||
* be associated with one snippet only.
|
||||
*
|
||||
* Snippets can be grouped to be compiled together in the same compiler run. This is
|
||||
* useful to compile macros, which cannot be used in the same compilation run that
|
||||
* defines them.
|
||||
*
|
||||
* Symbols are used to express extracted dependencies between source code snippets. This way we have
|
||||
* file system-independent way of testing dependencies between source code "files".
|
||||
*/
|
||||
def extractDependenciesFromSrcs(srcs: List[Map[Symbol, String]]): ExtractedSourceDependencies = {
|
||||
val rawGroupedSrcs = srcs.map(_.values.toList).toList
|
||||
val symbols = srcs.map(_.keys).flatten
|
||||
val (tempSrcFiles, testCallback) = compileSrcs(rawGroupedSrcs)
|
||||
val fileToSymbol = (tempSrcFiles zip symbols).toMap
|
||||
|
||||
val memberRefFileDeps = testCallback.sourceDependencies collect {
|
||||
// false indicates that those dependencies are not introduced by inheritance
|
||||
case (target, src, false) => (src, target)
|
||||
}
|
||||
val inheritanceFileDeps = testCallback.sourceDependencies collect {
|
||||
// true indicates that those dependencies are introduced by inheritance
|
||||
case (target, src, true) => (src, target)
|
||||
}
|
||||
def toSymbols(src: File, target: File): (Symbol, Symbol) = (fileToSymbol(src), fileToSymbol(target))
|
||||
val memberRefDeps = memberRefFileDeps map { case (src, target) => toSymbols(src, target) }
|
||||
val inheritanceDeps = inheritanceFileDeps map { case (src, target) => toSymbols(src, target) }
|
||||
def pairsToMultiMap[A, B](pairs: Seq[(A, B)]): Map[A, Set[B]] = {
|
||||
import scala.collection.mutable.{HashMap, MultiMap}
|
||||
val emptyMultiMap = new HashMap[A, scala.collection.mutable.Set[B]] with MultiMap[A, B]
|
||||
val multiMap = pairs.foldLeft(emptyMultiMap) { case (acc, (key, value)) =>
|
||||
acc.addBinding(key, value)
|
||||
}
|
||||
// convert all collections to immutable variants
|
||||
multiMap.toMap.mapValues(_.toSet).withDefaultValue(Set.empty)
|
||||
}
|
||||
val memberRefFileDeps = testCallback.sourceDependencies collect {
|
||||
// false indicates that those dependencies are not introduced by inheritance
|
||||
case (target, src, false) => (src, target)
|
||||
}
|
||||
val inheritanceFileDeps = testCallback.sourceDependencies collect {
|
||||
// true indicates that those dependencies are introduced by inheritance
|
||||
case (target, src, true) => (src, target)
|
||||
}
|
||||
def toSymbols(src: File, target: File): (Symbol, Symbol) = (fileToSymbol(src), fileToSymbol(target))
|
||||
val memberRefDeps = memberRefFileDeps map { case (src, target) => toSymbols(src, target) }
|
||||
val inheritanceDeps = inheritanceFileDeps map { case (src, target) => toSymbols(src, target) }
|
||||
def pairsToMultiMap[A, B](pairs: Seq[(A, B)]): Map[A, Set[B]] = {
|
||||
import scala.collection.mutable.{ HashMap, MultiMap }
|
||||
val emptyMultiMap = new HashMap[A, scala.collection.mutable.Set[B]] with MultiMap[A, B]
|
||||
val multiMap = pairs.foldLeft(emptyMultiMap) {
|
||||
case (acc, (key, value)) =>
|
||||
acc.addBinding(key, value)
|
||||
}
|
||||
// convert all collections to immutable variants
|
||||
multiMap.toMap.mapValues(_.toSet).withDefaultValue(Set.empty)
|
||||
}
|
||||
|
||||
ExtractedSourceDependencies(pairsToMultiMap(memberRefDeps), pairsToMultiMap(inheritanceDeps))
|
||||
}
|
||||
ExtractedSourceDependencies(pairsToMultiMap(memberRefDeps), pairsToMultiMap(inheritanceDeps))
|
||||
}
|
||||
|
||||
def extractDependenciesFromSrcs(srcs: (Symbol, String)*): ExtractedSourceDependencies = {
|
||||
val symbols = srcs.map(_._1)
|
||||
assert(symbols.distinct.size == symbols.size,
|
||||
s"Duplicate symbols for srcs detected: $symbols")
|
||||
extractDependenciesFromSrcs(List(srcs.toMap))
|
||||
}
|
||||
def extractDependenciesFromSrcs(srcs: (Symbol, String)*): ExtractedSourceDependencies = {
|
||||
val symbols = srcs.map(_._1)
|
||||
assert(symbols.distinct.size == symbols.size,
|
||||
s"Duplicate symbols for srcs detected: $symbols")
|
||||
extractDependenciesFromSrcs(List(srcs.toMap))
|
||||
}
|
||||
|
||||
/**
|
||||
* Compiles given source code snippets written to temporary files. Each snippet is
|
||||
* written to a separate temporary file.
|
||||
*
|
||||
* Snippets can be grouped to be compiled together in the same compiler run. This is
|
||||
* useful to compile macros, which cannot be used in the same compilation run that
|
||||
* defines them.
|
||||
*
|
||||
* The sequence of temporary files corresponding to passed snippets and analysis
|
||||
* callback is returned as a result.
|
||||
*/
|
||||
private def compileSrcs(groupedSrcs: List[List[String]]): (Seq[File], TestCallback) = {
|
||||
withTemporaryDirectory { temp =>
|
||||
val analysisCallback = new TestCallback(nameHashing)
|
||||
val classesDir = new File(temp, "classes")
|
||||
classesDir.mkdir()
|
||||
/**
|
||||
* Compiles given source code snippets written to temporary files. Each snippet is
|
||||
* written to a separate temporary file.
|
||||
*
|
||||
* Snippets can be grouped to be compiled together in the same compiler run. This is
|
||||
* useful to compile macros, which cannot be used in the same compilation run that
|
||||
* defines them.
|
||||
*
|
||||
* The sequence of temporary files corresponding to passed snippets and analysis
|
||||
* callback is returned as a result.
|
||||
*/
|
||||
private def compileSrcs(groupedSrcs: List[List[String]]): (Seq[File], TestCallback) = {
|
||||
withTemporaryDirectory { temp =>
|
||||
val analysisCallback = new TestCallback(nameHashing)
|
||||
val classesDir = new File(temp, "classes")
|
||||
classesDir.mkdir()
|
||||
|
||||
val compiler = prepareCompiler(classesDir, analysisCallback, classesDir.toString)
|
||||
val compiler = prepareCompiler(classesDir, analysisCallback, classesDir.toString)
|
||||
|
||||
val files = for((compilationUnit, unitId) <- groupedSrcs.zipWithIndex) yield {
|
||||
val run = new compiler.Run
|
||||
val srcFiles = compilationUnit.toSeq.zipWithIndex map { case (src, i) =>
|
||||
val fileName = s"Test-$unitId-$i.scala"
|
||||
prepareSrcFile(temp, fileName, src)
|
||||
}
|
||||
val srcFilePaths = srcFiles.map(srcFile => srcFile.getAbsolutePath).toList
|
||||
val files = for ((compilationUnit, unitId) <- groupedSrcs.zipWithIndex) yield {
|
||||
val run = new compiler.Run
|
||||
val srcFiles = compilationUnit.toSeq.zipWithIndex map {
|
||||
case (src, i) =>
|
||||
val fileName = s"Test-$unitId-$i.scala"
|
||||
prepareSrcFile(temp, fileName, src)
|
||||
}
|
||||
val srcFilePaths = srcFiles.map(srcFile => srcFile.getAbsolutePath).toList
|
||||
|
||||
run.compile(srcFilePaths)
|
||||
run.compile(srcFilePaths)
|
||||
|
||||
srcFilePaths.foreach(f => new File(f).delete)
|
||||
srcFiles
|
||||
}
|
||||
(files.flatten.toSeq, analysisCallback)
|
||||
}
|
||||
}
|
||||
srcFilePaths.foreach(f => new File(f).delete)
|
||||
srcFiles
|
||||
}
|
||||
(files.flatten.toSeq, analysisCallback)
|
||||
}
|
||||
}
|
||||
|
||||
private def compileSrcs(srcs: String*): (Seq[File], TestCallback) = {
|
||||
compileSrcs(List(srcs.toList))
|
||||
}
|
||||
private def compileSrcs(srcs: String*): (Seq[File], TestCallback) = {
|
||||
compileSrcs(List(srcs.toList))
|
||||
}
|
||||
|
||||
private def prepareSrcFile(baseDir: File, fileName: String, src: String): File = {
|
||||
val srcFile = new File(baseDir, fileName)
|
||||
sbt.IO.write(srcFile, src)
|
||||
srcFile
|
||||
}
|
||||
private def prepareSrcFile(baseDir: File, fileName: String, src: String): File = {
|
||||
val srcFile = new File(baseDir, fileName)
|
||||
sbt.IO.write(srcFile, src)
|
||||
srcFile
|
||||
}
|
||||
|
||||
private def prepareCompiler(outputDir: File, analysisCallback: AnalysisCallback, classpath: String = "."): CachedCompiler0#Compiler = {
|
||||
val args = Array.empty[String]
|
||||
object output extends SingleOutput {
|
||||
def outputDirectory: File = outputDir
|
||||
}
|
||||
val weakLog = new WeakLog(ConsoleLogger(), ConsoleReporter)
|
||||
val cachedCompiler = new CachedCompiler0(args, output, weakLog, false)
|
||||
val settings = cachedCompiler.settings
|
||||
settings.classpath.value = classpath
|
||||
settings.usejavacp.value = true
|
||||
val scalaReporter = new ConsoleReporter(settings)
|
||||
val delegatingReporter = DelegatingReporter(settings, ConsoleReporter)
|
||||
val compiler = cachedCompiler.compiler
|
||||
compiler.set(analysisCallback, delegatingReporter)
|
||||
compiler
|
||||
}
|
||||
private def prepareCompiler(outputDir: File, analysisCallback: AnalysisCallback, classpath: String = "."): CachedCompiler0#Compiler = {
|
||||
val args = Array.empty[String]
|
||||
object output extends SingleOutput {
|
||||
def outputDirectory: File = outputDir
|
||||
}
|
||||
val weakLog = new WeakLog(ConsoleLogger(), ConsoleReporter)
|
||||
val cachedCompiler = new CachedCompiler0(args, output, weakLog, false)
|
||||
val settings = cachedCompiler.settings
|
||||
settings.classpath.value = classpath
|
||||
settings.usejavacp.value = true
|
||||
val scalaReporter = new ConsoleReporter(settings)
|
||||
val delegatingReporter = DelegatingReporter(settings, ConsoleReporter)
|
||||
val compiler = cachedCompiler.compiler
|
||||
compiler.set(analysisCallback, delegatingReporter)
|
||||
compiler
|
||||
}
|
||||
|
||||
private object ConsoleReporter extends Reporter {
|
||||
def reset(): Unit = ()
|
||||
def hasErrors: Boolean = false
|
||||
def hasWarnings: Boolean = false
|
||||
def printWarnings(): Unit = ()
|
||||
def problems: Array[Problem] = Array.empty
|
||||
def log(pos: Position, msg: String, sev: Severity): Unit = println(msg)
|
||||
def comment(pos: Position, msg: String): Unit = ()
|
||||
def printSummary(): Unit = ()
|
||||
}
|
||||
private object ConsoleReporter extends Reporter {
|
||||
def reset(): Unit = ()
|
||||
def hasErrors: Boolean = false
|
||||
def hasWarnings: Boolean = false
|
||||
def printWarnings(): Unit = ()
|
||||
def problems: Array[Problem] = Array.empty
|
||||
def log(pos: Position, msg: String, sev: Severity): Unit = println(msg)
|
||||
def comment(pos: Position, msg: String): Unit = ()
|
||||
def printSummary(): Unit = ()
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
object ScalaCompilerForUnitTesting {
|
||||
case class ExtractedSourceDependencies(memberRef: Map[Symbol, Set[Symbol]], inheritance: Map[Symbol, Set[Symbol]])
|
||||
case class ExtractedSourceDependencies(memberRef: Map[Symbol, Set[Symbol]], inheritance: Map[Symbol, Set[Symbol]])
|
||||
}
|
||||
|
|
|
|||
|
|
@ -3,90 +3,87 @@ package sbt
|
|||
import java.io.File
|
||||
import org.specs2._
|
||||
import mutable.Specification
|
||||
import IO.{createDirectory, delete, touch, withTemporaryDirectory}
|
||||
import IO.{ createDirectory, delete, touch, withTemporaryDirectory }
|
||||
import org.apache.ivy.util.ChecksumHelper
|
||||
import IfMissing.Fail
|
||||
|
||||
object ComponentManagerTest extends Specification
|
||||
{
|
||||
val TestID = "manager-test"
|
||||
"Component manager" should {
|
||||
"throw an exception if 'file' is called for a non-existing component" in {
|
||||
withManager { _.file(TestID)(Fail) must throwA[InvalidComponent] }
|
||||
}
|
||||
"throw an exception if 'file' is called for an empty component" in {
|
||||
withManager { manager =>
|
||||
manager.define(TestID, Nil)
|
||||
( manager.file(TestID)(Fail) ) must throwA[InvalidComponent]
|
||||
}
|
||||
}
|
||||
"return the file for a single-file component" in {
|
||||
withManager { manager =>
|
||||
val hash = defineFile(manager, TestID, "a")
|
||||
checksum(manager.file(TestID)(Fail)) must beEqualTo(hash)
|
||||
}
|
||||
}
|
||||
object ComponentManagerTest extends Specification {
|
||||
val TestID = "manager-test"
|
||||
"Component manager" should {
|
||||
"throw an exception if 'file' is called for a non-existing component" in {
|
||||
withManager { _.file(TestID)(Fail) must throwA[InvalidComponent] }
|
||||
}
|
||||
"throw an exception if 'file' is called for an empty component" in {
|
||||
withManager { manager =>
|
||||
manager.define(TestID, Nil)
|
||||
(manager.file(TestID)(Fail)) must throwA[InvalidComponent]
|
||||
}
|
||||
}
|
||||
"return the file for a single-file component" in {
|
||||
withManager { manager =>
|
||||
val hash = defineFile(manager, TestID, "a")
|
||||
checksum(manager.file(TestID)(Fail)) must beEqualTo(hash)
|
||||
}
|
||||
}
|
||||
|
||||
"throw an exception if 'file' is called for multi-file component" in {
|
||||
withManager { manager =>
|
||||
defineFiles(manager, TestID, "a", "b")
|
||||
( manager.file(TestID)(Fail) ) must throwA[InvalidComponent]
|
||||
}
|
||||
}
|
||||
"return the files for a multi-file component" in {
|
||||
withManager { manager =>
|
||||
val hashes = defineFiles(manager, TestID, "a", "b")
|
||||
checksum(manager.files(TestID)(Fail)) must haveTheSameElementsAs(hashes)
|
||||
}
|
||||
}
|
||||
"return the files for a single-file component" in {
|
||||
withManager { manager =>
|
||||
val hashes = defineFiles(manager, TestID, "a")
|
||||
checksum(manager.files(TestID)(Fail)) must haveTheSameElementsAs(hashes)
|
||||
}
|
||||
}
|
||||
"throw an exception if 'files' is called for a non-existing component" in {
|
||||
withManager { _.files(TestID)(Fail) must throwA[InvalidComponent] }
|
||||
}
|
||||
"throw an exception if 'file' is called for multi-file component" in {
|
||||
withManager { manager =>
|
||||
defineFiles(manager, TestID, "a", "b")
|
||||
(manager.file(TestID)(Fail)) must throwA[InvalidComponent]
|
||||
}
|
||||
}
|
||||
"return the files for a multi-file component" in {
|
||||
withManager { manager =>
|
||||
val hashes = defineFiles(manager, TestID, "a", "b")
|
||||
checksum(manager.files(TestID)(Fail)) must haveTheSameElementsAs(hashes)
|
||||
}
|
||||
}
|
||||
"return the files for a single-file component" in {
|
||||
withManager { manager =>
|
||||
val hashes = defineFiles(manager, TestID, "a")
|
||||
checksum(manager.files(TestID)(Fail)) must haveTheSameElementsAs(hashes)
|
||||
}
|
||||
}
|
||||
"throw an exception if 'files' is called for a non-existing component" in {
|
||||
withManager { _.files(TestID)(Fail) must throwA[InvalidComponent] }
|
||||
}
|
||||
|
||||
"properly cache a file and then retrieve it to an unresolved component" in {
|
||||
withTemporaryDirectory { ivyHome =>
|
||||
withManagerHome(ivyHome) { definingManager =>
|
||||
val hash = defineFile(definingManager, TestID, "a")
|
||||
try
|
||||
{
|
||||
definingManager.cache(TestID)
|
||||
withManagerHome(ivyHome) { usingManager =>
|
||||
checksum(usingManager.file(TestID)(Fail)) must beEqualTo(hash)
|
||||
}
|
||||
}
|
||||
finally { definingManager.clearCache(TestID) }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
private def checksum(files: Iterable[File]): Seq[String] = files.map(checksum).toSeq
|
||||
private def checksum(file: File): String = if(file.exists) ChecksumHelper.computeAsString(file, "sha1") else ""
|
||||
private def defineFile(manager: ComponentManager, id: String, name: String): String = createFile(manager, id, name)(checksum)
|
||||
private def defineFiles(manager: ComponentManager, id: String, names: String*): Seq[String] = createFiles(manager, id, names : _*)(checksum)
|
||||
private def createFile[T](manager: ComponentManager, id: String, name: String)(f: File => T): T = createFiles(manager, id, name)(files => f(files.toList.head))
|
||||
private def createFiles[T](manager: ComponentManager, id: String, names: String*)(f: Seq[File] => T): T =
|
||||
withTemporaryDirectory { dir =>
|
||||
val files = names.map(name => new File(dir, name) )
|
||||
files.foreach(writeRandomContent)
|
||||
manager.define(id, files)
|
||||
f(files)
|
||||
}
|
||||
private def writeRandomContent(file: File) = IO.write(file, randomString)
|
||||
private def randomString = "asdf"
|
||||
private def withManager[T](f: ComponentManager => T): T =
|
||||
withTemporaryDirectory { ivyHome => withManagerHome(ivyHome)(f) }
|
||||
"properly cache a file and then retrieve it to an unresolved component" in {
|
||||
withTemporaryDirectory { ivyHome =>
|
||||
withManagerHome(ivyHome) { definingManager =>
|
||||
val hash = defineFile(definingManager, TestID, "a")
|
||||
try {
|
||||
definingManager.cache(TestID)
|
||||
withManagerHome(ivyHome) { usingManager =>
|
||||
checksum(usingManager.file(TestID)(Fail)) must beEqualTo(hash)
|
||||
}
|
||||
} finally { definingManager.clearCache(TestID) }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
private def checksum(files: Iterable[File]): Seq[String] = files.map(checksum).toSeq
|
||||
private def checksum(file: File): String = if (file.exists) ChecksumHelper.computeAsString(file, "sha1") else ""
|
||||
private def defineFile(manager: ComponentManager, id: String, name: String): String = createFile(manager, id, name)(checksum)
|
||||
private def defineFiles(manager: ComponentManager, id: String, names: String*): Seq[String] = createFiles(manager, id, names: _*)(checksum)
|
||||
private def createFile[T](manager: ComponentManager, id: String, name: String)(f: File => T): T = createFiles(manager, id, name)(files => f(files.toList.head))
|
||||
private def createFiles[T](manager: ComponentManager, id: String, names: String*)(f: Seq[File] => T): T =
|
||||
withTemporaryDirectory { dir =>
|
||||
val files = names.map(name => new File(dir, name))
|
||||
files.foreach(writeRandomContent)
|
||||
manager.define(id, files)
|
||||
f(files)
|
||||
}
|
||||
private def writeRandomContent(file: File) = IO.write(file, randomString)
|
||||
private def randomString = "asdf"
|
||||
private def withManager[T](f: ComponentManager => T): T =
|
||||
withTemporaryDirectory { ivyHome => withManagerHome(ivyHome)(f) }
|
||||
|
||||
private def withManagerHome[T](ivyHome: File)(f: ComponentManager => T): T =
|
||||
TestLogger { logger =>
|
||||
withTemporaryDirectory { temp =>
|
||||
val mgr = new ComponentManager(xsbt.boot.Locks, new xsbt.boot.ComponentProvider(temp, true), Some(ivyHome), logger)
|
||||
f(mgr)
|
||||
}
|
||||
}
|
||||
private def withManagerHome[T](ivyHome: File)(f: ComponentManager => T): T =
|
||||
TestLogger { logger =>
|
||||
withTemporaryDirectory { temp =>
|
||||
val mgr = new ComponentManager(xsbt.boot.Locks, new xsbt.boot.ComponentProvider(temp, true), Some(ivyHome), logger)
|
||||
f(mgr)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -4,120 +4,119 @@ import java.io.File
|
|||
import org.specs2._
|
||||
import mutable.Specification
|
||||
|
||||
object CrossVersionTest extends Specification
|
||||
{
|
||||
"Cross version" should {
|
||||
"return sbt API for xyz as None" in {
|
||||
CrossVersion.sbtApiVersion("xyz") must_== None
|
||||
}
|
||||
"return sbt API for 0.12 as None" in {
|
||||
CrossVersion.sbtApiVersion("0.12") must_== None
|
||||
}
|
||||
"return sbt API for 0.12.0-SNAPSHOT as None" in {
|
||||
CrossVersion.sbtApiVersion("0.12.0-SNAPSHOT") must_== None
|
||||
}
|
||||
"return sbt API for 0.12.0-RC1 as Some((0, 12))" in {
|
||||
CrossVersion.sbtApiVersion("0.12.0-RC1") must_== Some((0, 12))
|
||||
}
|
||||
"return sbt API for 0.12.0 as Some((0, 12))" in {
|
||||
CrossVersion.sbtApiVersion("0.12.0") must_== Some((0, 12))
|
||||
}
|
||||
"return sbt API for 0.12.1-SNAPSHOT as Some((0, 12))" in {
|
||||
CrossVersion.sbtApiVersion("0.12.1-SNAPSHOT") must_== Some((0, 12))
|
||||
}
|
||||
"return sbt API for 0.12.1-RC1 as Some((0, 12))" in {
|
||||
CrossVersion.sbtApiVersion("0.12.1-RC1") must_== Some((0, 12))
|
||||
}
|
||||
"return sbt API for 0.12.1 as Some((0, 12))" in {
|
||||
CrossVersion.sbtApiVersion("0.12.1") must_== Some((0, 12))
|
||||
}
|
||||
"return sbt API compatibility for 0.12.0-M1 as false" in {
|
||||
CrossVersion.isSbtApiCompatible("0.12.0-M1") must_== false
|
||||
}
|
||||
"return sbt API compatibility for 0.12.0-RC1 as true" in {
|
||||
CrossVersion.isSbtApiCompatible("0.12.0-RC1") must_== true
|
||||
}
|
||||
"return sbt API compatibility for 0.12.1-RC1 as true" in {
|
||||
CrossVersion.isSbtApiCompatible("0.12.1-RC1") must_== true
|
||||
}
|
||||
"return binary sbt version for 0.11.3 as 0.11.3" in {
|
||||
CrossVersion.binarySbtVersion("0.11.3") must_== "0.11.3"
|
||||
}
|
||||
"return binary sbt version for 0.12.0-M1 as 0.12.0-M1" in {
|
||||
CrossVersion.binarySbtVersion("0.12.0-M1") must_== "0.12.0-M1"
|
||||
}
|
||||
"return binary sbt version for 0.12.0-RC1 as 0.12" in {
|
||||
CrossVersion.binarySbtVersion("0.12.0-RC1") must_== "0.12"
|
||||
}
|
||||
"return binary sbt version for 0.12.0 as 0.12" in {
|
||||
CrossVersion.binarySbtVersion("0.12.0") must_== "0.12"
|
||||
}
|
||||
"return binary sbt version for 0.12.1-SNAPSHOT as 0.12" in {
|
||||
CrossVersion.binarySbtVersion("0.12.1-SNAPSHOT") must_== "0.12"
|
||||
}
|
||||
"return binary sbt version for 0.12.1-RC1 as 0.12" in {
|
||||
CrossVersion.binarySbtVersion("0.12.1-RC1") must_== "0.12"
|
||||
}
|
||||
"return binary sbt version for 0.12.1 as 0.12" in {
|
||||
CrossVersion.binarySbtVersion("0.12.1") must_== "0.12"
|
||||
}
|
||||
object CrossVersionTest extends Specification {
|
||||
"Cross version" should {
|
||||
"return sbt API for xyz as None" in {
|
||||
CrossVersion.sbtApiVersion("xyz") must_== None
|
||||
}
|
||||
"return sbt API for 0.12 as None" in {
|
||||
CrossVersion.sbtApiVersion("0.12") must_== None
|
||||
}
|
||||
"return sbt API for 0.12.0-SNAPSHOT as None" in {
|
||||
CrossVersion.sbtApiVersion("0.12.0-SNAPSHOT") must_== None
|
||||
}
|
||||
"return sbt API for 0.12.0-RC1 as Some((0, 12))" in {
|
||||
CrossVersion.sbtApiVersion("0.12.0-RC1") must_== Some((0, 12))
|
||||
}
|
||||
"return sbt API for 0.12.0 as Some((0, 12))" in {
|
||||
CrossVersion.sbtApiVersion("0.12.0") must_== Some((0, 12))
|
||||
}
|
||||
"return sbt API for 0.12.1-SNAPSHOT as Some((0, 12))" in {
|
||||
CrossVersion.sbtApiVersion("0.12.1-SNAPSHOT") must_== Some((0, 12))
|
||||
}
|
||||
"return sbt API for 0.12.1-RC1 as Some((0, 12))" in {
|
||||
CrossVersion.sbtApiVersion("0.12.1-RC1") must_== Some((0, 12))
|
||||
}
|
||||
"return sbt API for 0.12.1 as Some((0, 12))" in {
|
||||
CrossVersion.sbtApiVersion("0.12.1") must_== Some((0, 12))
|
||||
}
|
||||
"return sbt API compatibility for 0.12.0-M1 as false" in {
|
||||
CrossVersion.isSbtApiCompatible("0.12.0-M1") must_== false
|
||||
}
|
||||
"return sbt API compatibility for 0.12.0-RC1 as true" in {
|
||||
CrossVersion.isSbtApiCompatible("0.12.0-RC1") must_== true
|
||||
}
|
||||
"return sbt API compatibility for 0.12.1-RC1 as true" in {
|
||||
CrossVersion.isSbtApiCompatible("0.12.1-RC1") must_== true
|
||||
}
|
||||
"return binary sbt version for 0.11.3 as 0.11.3" in {
|
||||
CrossVersion.binarySbtVersion("0.11.3") must_== "0.11.3"
|
||||
}
|
||||
"return binary sbt version for 0.12.0-M1 as 0.12.0-M1" in {
|
||||
CrossVersion.binarySbtVersion("0.12.0-M1") must_== "0.12.0-M1"
|
||||
}
|
||||
"return binary sbt version for 0.12.0-RC1 as 0.12" in {
|
||||
CrossVersion.binarySbtVersion("0.12.0-RC1") must_== "0.12"
|
||||
}
|
||||
"return binary sbt version for 0.12.0 as 0.12" in {
|
||||
CrossVersion.binarySbtVersion("0.12.0") must_== "0.12"
|
||||
}
|
||||
"return binary sbt version for 0.12.1-SNAPSHOT as 0.12" in {
|
||||
CrossVersion.binarySbtVersion("0.12.1-SNAPSHOT") must_== "0.12"
|
||||
}
|
||||
"return binary sbt version for 0.12.1-RC1 as 0.12" in {
|
||||
CrossVersion.binarySbtVersion("0.12.1-RC1") must_== "0.12"
|
||||
}
|
||||
"return binary sbt version for 0.12.1 as 0.12" in {
|
||||
CrossVersion.binarySbtVersion("0.12.1") must_== "0.12"
|
||||
}
|
||||
|
||||
"return Scala API for xyz as None" in {
|
||||
CrossVersion.scalaApiVersion("xyz") must_== None
|
||||
}
|
||||
"return Scala API for 2.10 as None" in {
|
||||
CrossVersion.scalaApiVersion("2.10") must_== None
|
||||
}
|
||||
"return Scala API for 2.10.0-SNAPSHOT as None" in {
|
||||
CrossVersion.scalaApiVersion("2.10.0-SNAPSHOT") must_== None
|
||||
}
|
||||
"return Scala API for 2.10.0-RC1 as None" in {
|
||||
CrossVersion.scalaApiVersion("2.10.0-RC1") must_== None
|
||||
}
|
||||
"return Scala API for 2.10.0 as Some((2, 10))" in {
|
||||
CrossVersion.scalaApiVersion("2.10.0") must_== Some((2, 10))
|
||||
}
|
||||
"return Scala API for 2.10.0-1 as Some((2, 10))" in {
|
||||
CrossVersion.scalaApiVersion("2.10.0-1") must_== Some((2, 10))
|
||||
}
|
||||
"return Scala API for 2.10.1-SNAPSHOT as Some((2, 10))" in {
|
||||
CrossVersion.scalaApiVersion("2.10.1-SNAPSHOT") must_== Some((2, 10))
|
||||
}
|
||||
"return Scala API for 2.10.1-RC1 as Some((2, 10))" in {
|
||||
CrossVersion.scalaApiVersion("2.10.1-RC1") must_== Some((2, 10))
|
||||
}
|
||||
"return Scala API for 2.10.1 as Some((2, 10))" in {
|
||||
CrossVersion.scalaApiVersion("2.10.1") must_== Some((2, 10))
|
||||
}
|
||||
"return Scala API compatibility for 2.10.0-M1 as false" in {
|
||||
CrossVersion.isScalaApiCompatible("2.10.0-M1") must_== false
|
||||
}
|
||||
"return Scala API compatibility for 2.10.0-RC1 as false" in {
|
||||
CrossVersion.isScalaApiCompatible("2.10.0-RC1") must_== false
|
||||
}
|
||||
"return Scala API compatibility for 2.10.1-RC1 as false" in {
|
||||
CrossVersion.isScalaApiCompatible("2.10.1-RC1") must_== true
|
||||
}
|
||||
"return binary Scala version for 2.9.2 as 2.9.2" in {
|
||||
CrossVersion.binaryScalaVersion("2.9.2") must_== "2.9.2"
|
||||
}
|
||||
"return binary Scala version for 2.10.0-M1 as 2.10.0-M1" in {
|
||||
CrossVersion.binaryScalaVersion("2.10.0-M1") must_== "2.10.0-M1"
|
||||
}
|
||||
"return binary Scala version for 2.10.0-RC1 as 2.10.0-RC1" in {
|
||||
CrossVersion.binaryScalaVersion("2.10.0-RC1") must_== "2.10.0-RC1"
|
||||
}
|
||||
"return binary Scala version for 2.10.0 as 2.10" in {
|
||||
CrossVersion.binaryScalaVersion("2.10.0") must_== "2.10"
|
||||
}
|
||||
"return binary Scala version for 2.10.1-M1 as 2.10" in {
|
||||
CrossVersion.binaryScalaVersion("2.10.1-M1") must_== "2.10"
|
||||
}
|
||||
"return binary Scala version for 2.10.1-RC1 as 2.10" in {
|
||||
CrossVersion.binaryScalaVersion("2.10.1-RC1") must_== "2.10"
|
||||
}
|
||||
"return binary Scala version for 2.10.1 as 2.10" in {
|
||||
CrossVersion.binaryScalaVersion("2.10.1") must_== "2.10"
|
||||
}
|
||||
}
|
||||
"return Scala API for xyz as None" in {
|
||||
CrossVersion.scalaApiVersion("xyz") must_== None
|
||||
}
|
||||
"return Scala API for 2.10 as None" in {
|
||||
CrossVersion.scalaApiVersion("2.10") must_== None
|
||||
}
|
||||
"return Scala API for 2.10.0-SNAPSHOT as None" in {
|
||||
CrossVersion.scalaApiVersion("2.10.0-SNAPSHOT") must_== None
|
||||
}
|
||||
"return Scala API for 2.10.0-RC1 as None" in {
|
||||
CrossVersion.scalaApiVersion("2.10.0-RC1") must_== None
|
||||
}
|
||||
"return Scala API for 2.10.0 as Some((2, 10))" in {
|
||||
CrossVersion.scalaApiVersion("2.10.0") must_== Some((2, 10))
|
||||
}
|
||||
"return Scala API for 2.10.0-1 as Some((2, 10))" in {
|
||||
CrossVersion.scalaApiVersion("2.10.0-1") must_== Some((2, 10))
|
||||
}
|
||||
"return Scala API for 2.10.1-SNAPSHOT as Some((2, 10))" in {
|
||||
CrossVersion.scalaApiVersion("2.10.1-SNAPSHOT") must_== Some((2, 10))
|
||||
}
|
||||
"return Scala API for 2.10.1-RC1 as Some((2, 10))" in {
|
||||
CrossVersion.scalaApiVersion("2.10.1-RC1") must_== Some((2, 10))
|
||||
}
|
||||
"return Scala API for 2.10.1 as Some((2, 10))" in {
|
||||
CrossVersion.scalaApiVersion("2.10.1") must_== Some((2, 10))
|
||||
}
|
||||
"return Scala API compatibility for 2.10.0-M1 as false" in {
|
||||
CrossVersion.isScalaApiCompatible("2.10.0-M1") must_== false
|
||||
}
|
||||
"return Scala API compatibility for 2.10.0-RC1 as false" in {
|
||||
CrossVersion.isScalaApiCompatible("2.10.0-RC1") must_== false
|
||||
}
|
||||
"return Scala API compatibility for 2.10.1-RC1 as false" in {
|
||||
CrossVersion.isScalaApiCompatible("2.10.1-RC1") must_== true
|
||||
}
|
||||
"return binary Scala version for 2.9.2 as 2.9.2" in {
|
||||
CrossVersion.binaryScalaVersion("2.9.2") must_== "2.9.2"
|
||||
}
|
||||
"return binary Scala version for 2.10.0-M1 as 2.10.0-M1" in {
|
||||
CrossVersion.binaryScalaVersion("2.10.0-M1") must_== "2.10.0-M1"
|
||||
}
|
||||
"return binary Scala version for 2.10.0-RC1 as 2.10.0-RC1" in {
|
||||
CrossVersion.binaryScalaVersion("2.10.0-RC1") must_== "2.10.0-RC1"
|
||||
}
|
||||
"return binary Scala version for 2.10.0 as 2.10" in {
|
||||
CrossVersion.binaryScalaVersion("2.10.0") must_== "2.10"
|
||||
}
|
||||
"return binary Scala version for 2.10.1-M1 as 2.10" in {
|
||||
CrossVersion.binaryScalaVersion("2.10.1-M1") must_== "2.10"
|
||||
}
|
||||
"return binary Scala version for 2.10.1-RC1 as 2.10" in {
|
||||
CrossVersion.binaryScalaVersion("2.10.1-RC1") must_== "2.10"
|
||||
}
|
||||
"return binary Scala version for 2.10.1 as 2.10" in {
|
||||
CrossVersion.binaryScalaVersion("2.10.1") must_== "2.10"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -4,26 +4,25 @@ import java.io.File
|
|||
import org.specs2._
|
||||
import mutable.Specification
|
||||
|
||||
object MakePomTest extends Specification
|
||||
{
|
||||
val mp = new MakePom(ConsoleLogger())
|
||||
import mp.{makeDependencyVersion=>v}
|
||||
"MakePom makeDependencyVersion" should {
|
||||
"Handle .+ in versions" in {
|
||||
v("1.+") must_== "[1,2)"
|
||||
v("1.2.3.4.+") must_== "[1.2.3.4,1.2.3.5)"
|
||||
v("12.31.42.+") must_== "[12.31.42,12.31.43)"
|
||||
}
|
||||
/* TODO - do we care about this case?
|
||||
object MakePomTest extends Specification {
|
||||
val mp = new MakePom(ConsoleLogger())
|
||||
import mp.{ makeDependencyVersion => v }
|
||||
"MakePom makeDependencyVersion" should {
|
||||
"Handle .+ in versions" in {
|
||||
v("1.+") must_== "[1,2)"
|
||||
v("1.2.3.4.+") must_== "[1.2.3.4,1.2.3.5)"
|
||||
v("12.31.42.+") must_== "[12.31.42,12.31.43)"
|
||||
}
|
||||
/* TODO - do we care about this case?
|
||||
* 1+ --> [1,2),[10,20),[100,200),[1000,2000),[10000,20000),[100000,200000)
|
||||
*/
|
||||
"Handle ]* bracket in version ranges" in {
|
||||
v("]1,3]") must_== "(1,3]"
|
||||
v("]1.1,1.3]") must_== "(1.1,1.3]"
|
||||
}
|
||||
"Handle *[ bracket in version ranges" in {
|
||||
v("[1,3[") must_== "[1,3)"
|
||||
v("[1.1,1.3[") must_== "[1.1,1.3)"
|
||||
}
|
||||
}
|
||||
"Handle ]* bracket in version ranges" in {
|
||||
v("]1,3]") must_== "(1,3]"
|
||||
v("]1.1,1.3]") must_== "(1.1,1.3]"
|
||||
}
|
||||
"Handle *[ bracket in version ranges" in {
|
||||
v("[1,3[") must_== "[1,3)"
|
||||
v("[1.1,1.3[") must_== "[1.1,1.3)"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -3,13 +3,12 @@ package xsbt.boot
|
|||
import org.scalacheck._
|
||||
import Prop._
|
||||
|
||||
object CacheTest extends Properties("Cache")
|
||||
{
|
||||
implicit val functions: Arbitrary[Int => Int] = Arbitrary { Gen.oneOf( Seq(identity[Int], i => -i, i=>i/2, i => i+1) ) }
|
||||
object CacheTest extends Properties("Cache") {
|
||||
implicit val functions: Arbitrary[Int => Int] = Arbitrary { Gen.oneOf(Seq(identity[Int], i => -i, i => i / 2, i => i + 1)) }
|
||||
|
||||
property("Cache") = Prop.forAll { (key: Int, keys: List[Int], map: Int => Int) =>
|
||||
val cache = new Cache( (i: Int, _: Unit) => map(i) )
|
||||
def toProperty(key: Int) = ("Key " + key) |: ("Value: " + map(key)) |: (cache.apply(key, ()) == map(key))
|
||||
Prop.all( keys.map(toProperty) : _*)
|
||||
}
|
||||
property("Cache") = Prop.forAll { (key: Int, keys: List[Int], map: Int => Int) =>
|
||||
val cache = new Cache((i: Int, _: Unit) => map(i))
|
||||
def toProperty(key: Int) = ("Key " + key) |: ("Value: " + map(key)) |: (cache.apply(key, ()) == map(key))
|
||||
Prop.all(keys.map(toProperty): _*)
|
||||
}
|
||||
}
|
||||
|
|
@ -1,92 +1,91 @@
|
|||
package xsbt.boot
|
||||
|
||||
import java.io.{File,InputStream}
|
||||
import java.io.{ File, InputStream }
|
||||
import java.net.URL
|
||||
import java.util.Properties
|
||||
import xsbti._
|
||||
import org.specs2._
|
||||
import mutable.Specification
|
||||
import sbt.IO.{createDirectory, touch,withTemporaryDirectory}
|
||||
import sbt.IO.{ createDirectory, touch, withTemporaryDirectory }
|
||||
|
||||
object ConfigurationParserTest extends Specification
|
||||
{
|
||||
"Configuration Parser" should {
|
||||
"Correctly parse bootOnly" in {
|
||||
object ConfigurationParserTest extends Specification {
|
||||
"Configuration Parser" should {
|
||||
"Correctly parse bootOnly" in {
|
||||
|
||||
repoFileContains("""|[repositories]
|
||||
repoFileContains("""|[repositories]
|
||||
| local: bootOnly""".stripMargin,
|
||||
Repository.Predefined("local", true))
|
||||
Repository.Predefined("local", true))
|
||||
|
||||
repoFileContains("""|[repositories]
|
||||
repoFileContains("""|[repositories]
|
||||
| local""".stripMargin,
|
||||
Repository.Predefined("local", false))
|
||||
Repository.Predefined("local", false))
|
||||
|
||||
repoFileContains("""|[repositories]
|
||||
repoFileContains("""|[repositories]
|
||||
| id: http://repo1.maven.org""".stripMargin,
|
||||
Repository.Maven("id", new URL("http://repo1.maven.org"), false))
|
||||
Repository.Maven("id", new URL("http://repo1.maven.org"), false))
|
||||
|
||||
repoFileContains("""|[repositories]
|
||||
repoFileContains("""|[repositories]
|
||||
| id: http://repo1.maven.org, bootOnly""".stripMargin,
|
||||
Repository.Maven("id", new URL("http://repo1.maven.org"), true))
|
||||
Repository.Maven("id", new URL("http://repo1.maven.org"), true))
|
||||
|
||||
repoFileContains("""|[repositories]
|
||||
repoFileContains("""|[repositories]
|
||||
| id: http://repo1.maven.org, [orgPath]""".stripMargin,
|
||||
Repository.Ivy("id", new URL("http://repo1.maven.org"), "[orgPath]", "[orgPath]", false, false))
|
||||
Repository.Ivy("id", new URL("http://repo1.maven.org"), "[orgPath]", "[orgPath]", false, false))
|
||||
|
||||
repoFileContains("""|[repositories]
|
||||
repoFileContains("""|[repositories]
|
||||
| id: http://repo1.maven.org, [orgPath], mavenCompatible""".stripMargin,
|
||||
Repository.Ivy("id", new URL("http://repo1.maven.org"), "[orgPath]", "[orgPath]", true, false))
|
||||
Repository.Ivy("id", new URL("http://repo1.maven.org"), "[orgPath]", "[orgPath]", true, false))
|
||||
|
||||
repoFileContains("""|[repositories]
|
||||
repoFileContains("""|[repositories]
|
||||
| id: http://repo1.maven.org, [orgPath], mavenCompatible, bootOnly""".stripMargin,
|
||||
Repository.Ivy("id", new URL("http://repo1.maven.org"), "[orgPath]", "[orgPath]", true, true))
|
||||
Repository.Ivy("id", new URL("http://repo1.maven.org"), "[orgPath]", "[orgPath]", true, true))
|
||||
|
||||
repoFileContains("""|[repositories]
|
||||
repoFileContains("""|[repositories]
|
||||
| id: http://repo1.maven.org, [orgPath], bootOnly, mavenCompatible""".stripMargin,
|
||||
Repository.Ivy("id", new URL("http://repo1.maven.org"), "[orgPath]", "[orgPath]", true, true))
|
||||
Repository.Ivy("id", new URL("http://repo1.maven.org"), "[orgPath]", "[orgPath]", true, true))
|
||||
|
||||
repoFileContains("""|[repositories]
|
||||
repoFileContains("""|[repositories]
|
||||
| id: http://repo1.maven.org, [orgPath], bootOnly""".stripMargin,
|
||||
Repository.Ivy("id", new URL("http://repo1.maven.org"), "[orgPath]", "[orgPath]", false, true))
|
||||
Repository.Ivy("id", new URL("http://repo1.maven.org"), "[orgPath]", "[orgPath]", false, true))
|
||||
|
||||
repoFileContains("""|[repositories]
|
||||
repoFileContains("""|[repositories]
|
||||
| id: http://repo1.maven.org, [orgPath], [artPath]""".stripMargin,
|
||||
Repository.Ivy("id", new URL("http://repo1.maven.org"), "[orgPath]", "[artPath]", false, false))
|
||||
Repository.Ivy("id", new URL("http://repo1.maven.org"), "[orgPath]", "[artPath]", false, false))
|
||||
|
||||
repoFileContains("""|[repositories]
|
||||
repoFileContains("""|[repositories]
|
||||
| id: http://repo1.maven.org, [orgPath], [artPath], descriptorOptional""".stripMargin,
|
||||
Repository.Ivy("id", new URL("http://repo1.maven.org"), "[orgPath]", "[artPath]", false, false, true, false))
|
||||
Repository.Ivy("id", new URL("http://repo1.maven.org"), "[orgPath]", "[artPath]", false, false, true, false))
|
||||
|
||||
repoFileContains("""|[repositories]
|
||||
repoFileContains("""|[repositories]
|
||||
| id: http://repo1.maven.org, [orgPath], [artPath], descriptorOptional, skipConsistencyCheck""".stripMargin,
|
||||
Repository.Ivy("id", new URL("http://repo1.maven.org"), "[orgPath]", "[artPath]", false, false, true, true))
|
||||
Repository.Ivy("id", new URL("http://repo1.maven.org"), "[orgPath]", "[artPath]", false, false, true, true))
|
||||
|
||||
repoFileContains("""|[repositories]
|
||||
repoFileContains("""|[repositories]
|
||||
| id: http://repo1.maven.org, [orgPath], [artPath], skipConsistencyCheck, descriptorOptional""".stripMargin,
|
||||
Repository.Ivy("id", new URL("http://repo1.maven.org"), "[orgPath]", "[artPath]", false, false, true, true))
|
||||
Repository.Ivy("id", new URL("http://repo1.maven.org"), "[orgPath]", "[artPath]", false, false, true, true))
|
||||
|
||||
repoFileContains("""|[repositories]
|
||||
repoFileContains("""|[repositories]
|
||||
| id: http://repo1.maven.org, [orgPath], [artPath], skipConsistencyCheck, descriptorOptional, mavenCompatible, bootOnly""".stripMargin,
|
||||
Repository.Ivy("id", new URL("http://repo1.maven.org"), "[orgPath]", "[artPath]", true, true, true, true))
|
||||
Repository.Ivy("id", new URL("http://repo1.maven.org"), "[orgPath]", "[artPath]", true, true, true, true))
|
||||
|
||||
repoFileContains("""|[repositories]
|
||||
repoFileContains("""|[repositories]
|
||||
| id: http://repo1.maven.org, [orgPath], [artPath], bootOnly""".stripMargin,
|
||||
Repository.Ivy("id", new URL("http://repo1.maven.org"), "[orgPath]", "[artPath]", false, true))
|
||||
Repository.Ivy("id", new URL("http://repo1.maven.org"), "[orgPath]", "[artPath]", false, true))
|
||||
|
||||
repoFileContains("""|[repositories]
|
||||
repoFileContains("""|[repositories]
|
||||
| id: http://repo1.maven.org, [orgPath], [artPath], bootOnly, mavenCompatible""".stripMargin,
|
||||
Repository.Ivy("id", new URL("http://repo1.maven.org"), "[orgPath]", "[artPath]", true, true))
|
||||
Repository.Ivy("id", new URL("http://repo1.maven.org"), "[orgPath]", "[artPath]", true, true))
|
||||
|
||||
repoFileContains("""|[repositories]
|
||||
repoFileContains("""|[repositories]
|
||||
| id: http://repo1.maven.org, [orgPath], [artPath], mavenCompatible, bootOnly""".stripMargin,
|
||||
Repository.Ivy("id", new URL("http://repo1.maven.org"), "[orgPath]", "[artPath]", true, true))
|
||||
Repository.Ivy("id", new URL("http://repo1.maven.org"), "[orgPath]", "[artPath]", true, true))
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def repoFileContains(file: String, repo: Repository.Repository) =
|
||||
loadRepoFile(file) must contain(repo)
|
||||
|
||||
def loadRepoFile(file: String) =
|
||||
(new ConfigurationParser) readRepositoriesConfig file
|
||||
def loadRepoFile(file: String) =
|
||||
(new ConfigurationParser) readRepositoriesConfig file
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,55 +1,52 @@
|
|||
package xsbt.boot
|
||||
|
||||
import org.scalacheck._
|
||||
import Prop.{Exception => _,_}
|
||||
import Prop.{ Exception => _, _ }
|
||||
|
||||
object EnumerationTest extends Properties("Enumeration")
|
||||
{
|
||||
property("MultiEnum.toValue") = checkToValue(MultiEnum, multiElements : _*)
|
||||
property("MultiEnum.elements") = checkElements(MultiEnum, multiElements : _*)
|
||||
property("EmptyEnum.toValue") = checkToValue(EmptyEnum)
|
||||
property("EmptyEnum.elements") = EmptyEnum.elements.isEmpty
|
||||
property("SingleEnum.toValue") = checkToValue( SingleEnum, singleElements )
|
||||
property("SingleEnum.elements") = checkElements( SingleEnum,singleElements )
|
||||
object EnumerationTest extends Properties("Enumeration") {
|
||||
property("MultiEnum.toValue") = checkToValue(MultiEnum, multiElements: _*)
|
||||
property("MultiEnum.elements") = checkElements(MultiEnum, multiElements: _*)
|
||||
property("EmptyEnum.toValue") = checkToValue(EmptyEnum)
|
||||
property("EmptyEnum.elements") = EmptyEnum.elements.isEmpty
|
||||
property("SingleEnum.toValue") = checkToValue(SingleEnum, singleElements)
|
||||
property("SingleEnum.elements") = checkElements(SingleEnum, singleElements)
|
||||
|
||||
def singleElements = ("A", SingleEnum.a)
|
||||
def multiElements =
|
||||
{
|
||||
import MultiEnum.{a,b,c}
|
||||
List(("A" -> a), ("B" -> b), ("C" -> c))
|
||||
}
|
||||
def singleElements = ("A", SingleEnum.a)
|
||||
def multiElements =
|
||||
{
|
||||
import MultiEnum.{ a, b, c }
|
||||
List(("A" -> a), ("B" -> b), ("C" -> c))
|
||||
}
|
||||
|
||||
def checkElements(enum: Enumeration, mapped: (String, Enumeration#Value)*) =
|
||||
{
|
||||
val elements = enum.elements
|
||||
("elements: " + elements) |:
|
||||
( mapped.forall{ case (s,v) => elements.contains(v) } && (elements.length == mapped.length) )
|
||||
}
|
||||
def checkToValue(enum: Enumeration, mapped: (String, Enumeration#Value)*) =
|
||||
{
|
||||
def invalid(s: String) =
|
||||
("valueOf(" + s + ")") |:
|
||||
Prop.throws(classOf[Exception])(enum.toValue(s))
|
||||
def valid(s: String, expected: Enumeration#Value) =
|
||||
("valueOf(" + s + ")") |:
|
||||
("Expected " + expected) |:
|
||||
( enum.toValue(s) == expected )
|
||||
val map = Map( mapped : _*)
|
||||
Prop.forAll( (s: String) =>
|
||||
map.get(s) match {
|
||||
case Some(v) => valid(s, v)
|
||||
case None => invalid(s)
|
||||
} )
|
||||
}
|
||||
object MultiEnum extends Enumeration
|
||||
{
|
||||
val a = value("A")
|
||||
val b = value("B")
|
||||
val c = value("C")
|
||||
}
|
||||
object SingleEnum extends Enumeration
|
||||
{
|
||||
val a = value("A")
|
||||
}
|
||||
object EmptyEnum extends Enumeration
|
||||
def checkElements(enum: Enumeration, mapped: (String, Enumeration#Value)*) =
|
||||
{
|
||||
val elements = enum.elements
|
||||
("elements: " + elements) |:
|
||||
(mapped.forall { case (s, v) => elements.contains(v) } && (elements.length == mapped.length))
|
||||
}
|
||||
def checkToValue(enum: Enumeration, mapped: (String, Enumeration#Value)*) =
|
||||
{
|
||||
def invalid(s: String) =
|
||||
("valueOf(" + s + ")") |:
|
||||
Prop.throws(classOf[Exception])(enum.toValue(s))
|
||||
def valid(s: String, expected: Enumeration#Value) =
|
||||
("valueOf(" + s + ")") |:
|
||||
("Expected " + expected) |:
|
||||
(enum.toValue(s) == expected)
|
||||
val map = Map(mapped: _*)
|
||||
Prop.forAll((s: String) =>
|
||||
map.get(s) match {
|
||||
case Some(v) => valid(s, v)
|
||||
case None => invalid(s)
|
||||
})
|
||||
}
|
||||
object MultiEnum extends Enumeration {
|
||||
val a = value("A")
|
||||
val b = value("B")
|
||||
val c = value("C")
|
||||
}
|
||||
object SingleEnum extends Enumeration {
|
||||
val a = value("A")
|
||||
}
|
||||
object EmptyEnum extends Enumeration
|
||||
}
|
||||
|
|
@ -2,38 +2,36 @@ package xsbt.boot
|
|||
|
||||
import org.scalacheck._
|
||||
|
||||
object ListMapProperties extends Properties("ListMap")
|
||||
{
|
||||
implicit val genListMap = Arbitrary ( for(list <- Arbitrary.arbitrary[List[(Int,Int)]]) yield ListMap(list : _*) )
|
||||
object ListMapProperties extends Properties("ListMap") {
|
||||
implicit val genListMap = Arbitrary(for (list <- Arbitrary.arbitrary[List[(Int, Int)]]) yield ListMap(list: _*))
|
||||
|
||||
property("ListMap from List contains all members of that List") = Prop.forAll { (list: List[(Int,Int)]) =>
|
||||
val map = ListMap(list : _*)
|
||||
list forall { entry => map contains entry._1 }
|
||||
}
|
||||
property("contains added entry") = Prop.forAll { (map: ListMap[Int,Int], key: Int, value: Int) =>
|
||||
{ (map + (key, value) ) contains(key) } &&
|
||||
{ (map + (key, value) )(key) == value } &&
|
||||
{ (map + (key, value) ).get(key) == Some(value) }
|
||||
}
|
||||
property("remove") = Prop.forAll { (map: ListMap[Int,Int], key: Int) =>
|
||||
{ Prop.throws(classOf[Exception])((map - key)(key)) } &&
|
||||
{ !(map - key).contains(key) } &&
|
||||
{ (map - key).get(key).isEmpty }
|
||||
}
|
||||
property("empty") = Prop.forAll { (key: Int) =>
|
||||
{ Prop.throws(classOf[Exception])(ListMap.empty(key)) }
|
||||
{ !ListMap.empty.contains(key) } &&
|
||||
{ ListMap.empty.get(key).isEmpty }
|
||||
}
|
||||
property("ListMap from List contains all members of that List") = Prop.forAll { (list: List[(Int, Int)]) =>
|
||||
val map = ListMap(list: _*)
|
||||
list forall { entry => map contains entry._1 }
|
||||
}
|
||||
property("contains added entry") = Prop.forAll { (map: ListMap[Int, Int], key: Int, value: Int) =>
|
||||
{ (map + (key, value)) contains (key) } &&
|
||||
{ (map + (key, value))(key) == value } &&
|
||||
{ (map + (key, value)).get(key) == Some(value) }
|
||||
}
|
||||
property("remove") = Prop.forAll { (map: ListMap[Int, Int], key: Int) =>
|
||||
{ Prop.throws(classOf[Exception])((map - key)(key)) } &&
|
||||
{ !(map - key).contains(key) } &&
|
||||
{ (map - key).get(key).isEmpty }
|
||||
}
|
||||
property("empty") = Prop.forAll { (key: Int) =>
|
||||
{ Prop.throws(classOf[Exception])(ListMap.empty(key)) }
|
||||
{ !ListMap.empty.contains(key) } &&
|
||||
{ ListMap.empty.get(key).isEmpty }
|
||||
}
|
||||
}
|
||||
|
||||
object ListMapEmpty extends Properties("ListMap.empty")
|
||||
{
|
||||
import ListMap.empty
|
||||
property("isEmpty") = empty.isEmpty
|
||||
property("toList.isEmpty") = empty.toList.isEmpty
|
||||
property("toSeq.isEmpty") = empty.toSeq.isEmpty
|
||||
property("toStream.isEmpty") = empty.toStream.isEmpty
|
||||
property("keys.isEmpty") = empty.keys.isEmpty
|
||||
property("iterator.isEmpty") = empty.iterator.isEmpty
|
||||
object ListMapEmpty extends Properties("ListMap.empty") {
|
||||
import ListMap.empty
|
||||
property("isEmpty") = empty.isEmpty
|
||||
property("toList.isEmpty") = empty.toList.isEmpty
|
||||
property("toSeq.isEmpty") = empty.toSeq.isEmpty
|
||||
property("toStream.isEmpty") = empty.toStream.isEmpty
|
||||
property("keys.isEmpty") = empty.keys.isEmpty
|
||||
property("iterator.isEmpty") = empty.iterator.isEmpty
|
||||
}
|
||||
|
|
@ -5,53 +5,54 @@ import Prop._
|
|||
import java.io.File
|
||||
import sbt.IO.withTemporaryDirectory
|
||||
|
||||
/** These mainly test that things work in the uncontested case and that no OverlappingFileLockExceptions occur.
|
||||
* There is no real locking testing, just the coordination of locking.*/
|
||||
object LocksTest extends Properties("Locks")
|
||||
{
|
||||
property("Lock in nonexisting directory") = spec {
|
||||
withTemporaryDirectory { dir =>
|
||||
val lockFile = new File(dir, "doesntexist/lock")
|
||||
Locks(lockFile, callTrue)
|
||||
}
|
||||
}
|
||||
|
||||
property("Uncontested re-entrant lock") = spec {
|
||||
withTemporaryDirectory { dir =>
|
||||
val lockFile = new File(dir, "lock")
|
||||
Locks(lockFile, callLocked(lockFile)) &&
|
||||
Locks(lockFile, callLocked(lockFile))
|
||||
}
|
||||
}
|
||||
|
||||
property("Uncontested double lock") = spec {
|
||||
withTemporaryDirectory { dir =>
|
||||
val lockFileA = new File(dir, "lockA")
|
||||
val lockFileB = new File(dir, "lockB")
|
||||
Locks(lockFileA, callLocked(lockFileB)) &&
|
||||
Locks(lockFileB, callLocked(lockFileA))
|
||||
}
|
||||
}
|
||||
|
||||
property("Contested single lock") = spec {
|
||||
withTemporaryDirectory { dir =>
|
||||
val lockFile = new File(dir, "lock")
|
||||
forkFold(2000){i => Locks(lockFile, callTrue) }
|
||||
}
|
||||
}
|
||||
/**
|
||||
* These mainly test that things work in the uncontested case and that no OverlappingFileLockExceptions occur.
|
||||
* There is no real locking testing, just the coordination of locking.
|
||||
*/
|
||||
object LocksTest extends Properties("Locks") {
|
||||
property("Lock in nonexisting directory") = spec {
|
||||
withTemporaryDirectory { dir =>
|
||||
val lockFile = new File(dir, "doesntexist/lock")
|
||||
Locks(lockFile, callTrue)
|
||||
}
|
||||
}
|
||||
|
||||
private def spec(f: => Boolean): Prop = Prop { _ => Result(if(f) True else False) }
|
||||
|
||||
private def call[T](impl: => T) = new java.util.concurrent.Callable[T] { def call = impl }
|
||||
private def callLocked(lockFile: File) = call { Locks(lockFile, callTrue) }
|
||||
private lazy val callTrue = call { true }
|
||||
|
||||
private def forkFold(n: Int)(impl: Int => Boolean): Boolean =
|
||||
(true /: forkWait(n)(impl))(_ && _)
|
||||
private def forkWait(n: Int)(impl: Int => Boolean): Iterable[Boolean] =
|
||||
{
|
||||
import scala.concurrent.ops.future
|
||||
val futures = (0 until n).map { i => future { impl(i) } }
|
||||
futures.toList.map(_())
|
||||
}
|
||||
property("Uncontested re-entrant lock") = spec {
|
||||
withTemporaryDirectory { dir =>
|
||||
val lockFile = new File(dir, "lock")
|
||||
Locks(lockFile, callLocked(lockFile)) &&
|
||||
Locks(lockFile, callLocked(lockFile))
|
||||
}
|
||||
}
|
||||
|
||||
property("Uncontested double lock") = spec {
|
||||
withTemporaryDirectory { dir =>
|
||||
val lockFileA = new File(dir, "lockA")
|
||||
val lockFileB = new File(dir, "lockB")
|
||||
Locks(lockFileA, callLocked(lockFileB)) &&
|
||||
Locks(lockFileB, callLocked(lockFileA))
|
||||
}
|
||||
}
|
||||
|
||||
property("Contested single lock") = spec {
|
||||
withTemporaryDirectory { dir =>
|
||||
val lockFile = new File(dir, "lock")
|
||||
forkFold(2000) { i => Locks(lockFile, callTrue) }
|
||||
}
|
||||
}
|
||||
|
||||
private def spec(f: => Boolean): Prop = Prop { _ => Result(if (f) True else False) }
|
||||
|
||||
private def call[T](impl: => T) = new java.util.concurrent.Callable[T] { def call = impl }
|
||||
private def callLocked(lockFile: File) = call { Locks(lockFile, callTrue) }
|
||||
private lazy val callTrue = call { true }
|
||||
|
||||
private def forkFold(n: Int)(impl: Int => Boolean): Boolean =
|
||||
(true /: forkWait(n)(impl))(_ && _)
|
||||
private def forkWait(n: Int)(impl: Int => Boolean): Iterable[Boolean] =
|
||||
{
|
||||
import scala.concurrent.ops.future
|
||||
val futures = (0 until n).map { i => future { impl(i) } }
|
||||
futures.toList.map(_())
|
||||
}
|
||||
}
|
||||
|
|
@ -1,32 +1,31 @@
|
|||
package xsbt.boot
|
||||
|
||||
import java.io.File
|
||||
import java.util.Arrays.{equals => arrEquals}
|
||||
import java.util.Arrays.{ equals => arrEquals }
|
||||
import org.scalacheck._
|
||||
|
||||
object PreTest extends Properties("Pre")
|
||||
{
|
||||
import Pre._
|
||||
property("isEmpty") = Prop.forAll( (s: String) => (s.isEmpty == isEmpty(s)) )
|
||||
property("isNonEmpty") = Prop.forAll( (s: String) => (isEmpty(s) != isNonEmpty(s)) )
|
||||
property("assert true") = { assert(true); true }
|
||||
property("assert false") = Prop.throws(classOf[AssertionError])(assert(false))
|
||||
property("assert true with message") = Prop.forAll { (s: String) => assert(true, s); true }
|
||||
property("assert false with message") = Prop.forAll( (s: String) => Prop.throws(classOf[AssertionError] )(assert(false, s)) )
|
||||
property("require false") = Prop.forAll( (s: String) => Prop.throws(classOf[IllegalArgumentException])(require(false, s)) )
|
||||
property("require true") = Prop.forAll { (s: String) => require(true, s); true }
|
||||
property("error") = Prop.forAll( (s: String) => Prop.throws(classOf[BootException])(error(s)) )
|
||||
property("toBoolean") = Prop.forAll( (s: String) => trap(toBoolean(s)) == trap(java.lang.Boolean.parseBoolean(s)) )
|
||||
property("toArray") = Prop.forAll( (list: List[Int]) => arrEquals(list.toArray, toArray(list)) )
|
||||
property("toArray") = Prop.forAll( (list: List[String]) => objArrEquals(list.toArray, toArray(list)) )
|
||||
property("concat") = Prop.forAll(genFiles, genFiles) { (a: Array[File], b: Array[File]) => (a ++ b) sameElements concat(a, b) }
|
||||
property("array") = Prop.forAll(genFiles) { (a: Array[File]) => array(a.toList : _*) sameElements Array(a: _*) }
|
||||
object PreTest extends Properties("Pre") {
|
||||
import Pre._
|
||||
property("isEmpty") = Prop.forAll((s: String) => (s.isEmpty == isEmpty(s)))
|
||||
property("isNonEmpty") = Prop.forAll((s: String) => (isEmpty(s) != isNonEmpty(s)))
|
||||
property("assert true") = { assert(true); true }
|
||||
property("assert false") = Prop.throws(classOf[AssertionError])(assert(false))
|
||||
property("assert true with message") = Prop.forAll { (s: String) => assert(true, s); true }
|
||||
property("assert false with message") = Prop.forAll((s: String) => Prop.throws(classOf[AssertionError])(assert(false, s)))
|
||||
property("require false") = Prop.forAll((s: String) => Prop.throws(classOf[IllegalArgumentException])(require(false, s)))
|
||||
property("require true") = Prop.forAll { (s: String) => require(true, s); true }
|
||||
property("error") = Prop.forAll((s: String) => Prop.throws(classOf[BootException])(error(s)))
|
||||
property("toBoolean") = Prop.forAll((s: String) => trap(toBoolean(s)) == trap(java.lang.Boolean.parseBoolean(s)))
|
||||
property("toArray") = Prop.forAll((list: List[Int]) => arrEquals(list.toArray, toArray(list)))
|
||||
property("toArray") = Prop.forAll((list: List[String]) => objArrEquals(list.toArray, toArray(list)))
|
||||
property("concat") = Prop.forAll(genFiles, genFiles) { (a: Array[File], b: Array[File]) => (a ++ b) sameElements concat(a, b) }
|
||||
property("array") = Prop.forAll(genFiles) { (a: Array[File]) => array(a.toList: _*) sameElements Array(a: _*) }
|
||||
|
||||
implicit lazy val arbFile: Arbitrary[File] = Arbitrary { for(i <- Arbitrary.arbitrary[Int] ) yield new File(i.toString) }
|
||||
implicit lazy val genFiles: Gen[Array[File]] = Arbitrary.arbitrary[Array[File]]
|
||||
implicit lazy val arbFile: Arbitrary[File] = Arbitrary { for (i <- Arbitrary.arbitrary[Int]) yield new File(i.toString) }
|
||||
implicit lazy val genFiles: Gen[Array[File]] = Arbitrary.arbitrary[Array[File]]
|
||||
|
||||
def trap[T](t: => T): Option[T] = try { Some(t) } catch { case e: Exception => None }
|
||||
def trap[T](t: => T): Option[T] = try { Some(t) } catch { case e: Exception => None }
|
||||
|
||||
private[this] def objArrEquals[T <: AnyRef](a: Array[T], b: Array[T]): Boolean =
|
||||
arrEquals(a.asInstanceOf[Array[AnyRef]], b.asInstanceOf[Array[AnyRef]])
|
||||
private[this] def objArrEquals[T <: AnyRef](a: Array[T], b: Array[T]): Boolean =
|
||||
arrEquals(a.asInstanceOf[Array[AnyRef]], b.asInstanceOf[Array[AnyRef]])
|
||||
}
|
||||
|
|
@ -1,98 +1,98 @@
|
|||
package xsbt.boot
|
||||
|
||||
import java.io.{File,InputStream}
|
||||
import java.io.{ File, InputStream }
|
||||
import java.net.URL
|
||||
import java.util.Properties
|
||||
import xsbti._
|
||||
import org.specs2._
|
||||
import mutable.Specification
|
||||
import LaunchTest._
|
||||
import sbt.IO.{createDirectory, touch,withTemporaryDirectory}
|
||||
import sbt.IO.{ createDirectory, touch, withTemporaryDirectory }
|
||||
|
||||
object ScalaProviderTest extends Specification
|
||||
{
|
||||
"Launch" should {
|
||||
"provide ClassLoader for Scala 2.8.0" in { checkScalaLoader("2.8.0") }
|
||||
"provide ClassLoader for Scala 2.8.2" in { checkScalaLoader("2.8.2") }
|
||||
"provide ClassLoader for Scala 2.9.0" in { checkScalaLoader("2.9.0") }
|
||||
"provide ClassLoader for Scala 2.9.2" in { checkScalaLoader("2.9.2") }
|
||||
}
|
||||
object ScalaProviderTest extends Specification {
|
||||
"Launch" should {
|
||||
//"provide ClassLoader for Scala 2.8.0" in { checkScalaLoader("2.8.0") }
|
||||
"provide ClassLoader for Scala 2.8.2" in { checkScalaLoader("2.8.2") }
|
||||
"provide ClassLoader for Scala 2.9.0" in { checkScalaLoader("2.9.0") }
|
||||
"provide ClassLoader for Scala 2.9.2" in { checkScalaLoader("2.9.2") }
|
||||
"provide ClassLoader for Scala 2.10.4" in { checkScalaLoader("2.10.4") }
|
||||
"provide ClassLoader for Scala 2.11.0" in { checkScalaLoader("2.11.0") }
|
||||
}
|
||||
|
||||
"Launch" should {
|
||||
"Successfully load an application from local repository and run it with correct arguments" in {
|
||||
checkLoad(List("test"), "xsbt.boot.test.ArgumentTest").asInstanceOf[Exit].code must equalTo(0)
|
||||
checkLoad(List(), "xsbt.boot.test.ArgumentTest") must throwA[RuntimeException]
|
||||
}
|
||||
"Successfully load an plain application from local repository and run it with correct arguments" in {
|
||||
checkLoad(List("test"), "xsbt.boot.test.PlainArgumentTest").asInstanceOf[Exit].code must equalTo(0)
|
||||
checkLoad(List(), "xsbt.boot.test.PlainArgumentTest") must throwA[RuntimeException]
|
||||
}
|
||||
"Successfully load an plain application with int return from local repository and run it with correct arguments" in {
|
||||
checkLoad(List("test"), "xsbt.boot.test.PlainArgumentTestWithReturn").asInstanceOf[Exit].code must equalTo(0)
|
||||
checkLoad(List(), "xsbt.boot.test.PlainArgumentTestWithReturn").asInstanceOf[Exit].code must equalTo(1)
|
||||
}
|
||||
"Successfully load an application from local repository and run it with correct sbt version" in {
|
||||
checkLoad(List(AppVersion), "xsbt.boot.test.AppVersionTest").asInstanceOf[Exit].code must equalTo(0)
|
||||
}
|
||||
"Add extra resources to the classpath" in {
|
||||
checkLoad(testResources, "xsbt.boot.test.ExtraTest", createExtra).asInstanceOf[Exit].code must equalTo(0)
|
||||
}
|
||||
|
||||
}
|
||||
"Launch" should {
|
||||
"Successfully load an application from local repository and run it with correct arguments" in {
|
||||
checkLoad(List("test"), "xsbt.boot.test.ArgumentTest").asInstanceOf[Exit].code must equalTo(0)
|
||||
checkLoad(List(), "xsbt.boot.test.ArgumentTest") must throwA[RuntimeException]
|
||||
}
|
||||
"Successfully load an plain application from local repository and run it with correct arguments" in {
|
||||
checkLoad(List("test"), "xsbt.boot.test.PlainArgumentTest").asInstanceOf[Exit].code must equalTo(0)
|
||||
checkLoad(List(), "xsbt.boot.test.PlainArgumentTest") must throwA[RuntimeException]
|
||||
}
|
||||
"Successfully load an plain application with int return from local repository and run it with correct arguments" in {
|
||||
checkLoad(List("test"), "xsbt.boot.test.PlainArgumentTestWithReturn").asInstanceOf[Exit].code must equalTo(0)
|
||||
checkLoad(List(), "xsbt.boot.test.PlainArgumentTestWithReturn").asInstanceOf[Exit].code must equalTo(1)
|
||||
}
|
||||
"Successfully load an application from local repository and run it with correct sbt version" in {
|
||||
checkLoad(List(AppVersion), "xsbt.boot.test.AppVersionTest").asInstanceOf[Exit].code must equalTo(0)
|
||||
}
|
||||
"Add extra resources to the classpath" in {
|
||||
checkLoad(testResources, "xsbt.boot.test.ExtraTest", createExtra).asInstanceOf[Exit].code must equalTo(0)
|
||||
}
|
||||
|
||||
def checkLoad(arguments: List[String], mainClassName: String): MainResult =
|
||||
checkLoad(arguments, mainClassName, _ => Array[File]())
|
||||
def checkLoad(arguments: List[String], mainClassName: String, extra: File => Array[File]): MainResult =
|
||||
withTemporaryDirectory { currentDirectory =>
|
||||
withLauncher { launcher =>
|
||||
Launch.run(launcher)(
|
||||
new RunConfiguration(Some(unmapScalaVersion(LaunchTest.getScalaVersion)), LaunchTest.testApp(mainClassName, extra(currentDirectory)).toID, currentDirectory, arguments)
|
||||
)
|
||||
}
|
||||
}
|
||||
private def testResources = List("test-resourceA", "a/b/test-resourceB", "sub/test-resource")
|
||||
private def createExtra(currentDirectory: File) =
|
||||
{
|
||||
val resourceDirectory = new File(currentDirectory, "resources")
|
||||
createDirectory(resourceDirectory)
|
||||
testResources.foreach(resource => touch(new File(resourceDirectory, resource.replace('/', File.separatorChar))))
|
||||
Array(resourceDirectory)
|
||||
}
|
||||
private def checkScalaLoader(version: String): Unit = withLauncher( checkLauncher(version, mapScalaVersion(version)) )
|
||||
private def checkLauncher(version: String, versionValue: String)(launcher: Launcher): Unit =
|
||||
{
|
||||
val provider = launcher.getScala(version)
|
||||
val loader = provider.loader
|
||||
// ensure that this loader can load Scala classes by trying scala.ScalaObject.
|
||||
tryScala(loader)
|
||||
getScalaVersion(loader) must beEqualTo(versionValue)
|
||||
}
|
||||
private def tryScala(loader: ClassLoader): Unit = Class.forName("scala.Product", false, loader).getClassLoader must be(loader)
|
||||
}
|
||||
|
||||
def checkLoad(arguments: List[String], mainClassName: String): MainResult =
|
||||
checkLoad(arguments, mainClassName, _ => Array[File]())
|
||||
def checkLoad(arguments: List[String], mainClassName: String, extra: File => Array[File]): MainResult =
|
||||
withTemporaryDirectory { currentDirectory =>
|
||||
withLauncher { launcher =>
|
||||
Launch.run(launcher)(
|
||||
new RunConfiguration(Some(unmapScalaVersion(LaunchTest.getScalaVersion)), LaunchTest.testApp(mainClassName, extra(currentDirectory)).toID, currentDirectory, arguments)
|
||||
)
|
||||
}
|
||||
}
|
||||
private def testResources = List("test-resourceA", "a/b/test-resourceB", "sub/test-resource")
|
||||
private def createExtra(currentDirectory: File) =
|
||||
{
|
||||
val resourceDirectory = new File(currentDirectory, "resources")
|
||||
createDirectory(resourceDirectory)
|
||||
testResources.foreach(resource => touch(new File(resourceDirectory, resource.replace('/', File.separatorChar))))
|
||||
Array(resourceDirectory)
|
||||
}
|
||||
private def checkScalaLoader(version: String): Unit = withLauncher(checkLauncher(version, mapScalaVersion(version)))
|
||||
private def checkLauncher(version: String, versionValue: String)(launcher: Launcher): Unit =
|
||||
{
|
||||
val provider = launcher.getScala(version)
|
||||
val loader = provider.loader
|
||||
// ensure that this loader can load Scala classes by trying scala.ScalaObject.
|
||||
tryScala(loader)
|
||||
getScalaVersion(loader) must beEqualTo(versionValue)
|
||||
}
|
||||
private def tryScala(loader: ClassLoader): Unit = Class.forName("scala.Product", false, loader).getClassLoader must be(loader)
|
||||
}
|
||||
object LaunchTest
|
||||
{
|
||||
def testApp(main: String): Application = testApp(main, Array[File]())
|
||||
def testApp(main: String, extra: Array[File]): Application = Application("org.scala-sbt", "launch-test", new Explicit(AppVersion), main, Nil, CrossValue.Disabled, extra)
|
||||
import Predefined._
|
||||
def testRepositories = List(Local, ScalaToolsReleases, ScalaToolsSnapshots).map(Repository.Predefined(_))
|
||||
def withLauncher[T](f: xsbti.Launcher => T): T =
|
||||
withTemporaryDirectory { bootDirectory =>
|
||||
f(Launcher(bootDirectory, testRepositories))
|
||||
}
|
||||
object LaunchTest {
|
||||
def testApp(main: String): Application = testApp(main, Array[File]())
|
||||
def testApp(main: String, extra: Array[File]): Application = Application("org.scala-sbt", "launch-test", new Explicit(AppVersion), main, Nil, CrossValue.Disabled, extra)
|
||||
import Predefined._
|
||||
def testRepositories = List(Local, ScalaToolsReleases, ScalaToolsSnapshots).map(Repository.Predefined(_))
|
||||
def withLauncher[T](f: xsbti.Launcher => T): T =
|
||||
withTemporaryDirectory { bootDirectory =>
|
||||
f(Launcher(bootDirectory, testRepositories))
|
||||
}
|
||||
|
||||
val finalStyle = Set("2.9.1", "2.9.0-1", "2.9.0", "2.8.2", "2.8.1", "2.8.0")
|
||||
def unmapScalaVersion(versionNumber: String) = versionNumber.stripSuffix(".final")
|
||||
def mapScalaVersion(versionNumber: String) = if(finalStyle(versionNumber)) versionNumber + ".final" else versionNumber
|
||||
|
||||
def getScalaVersion: String = getScalaVersion(getClass.getClassLoader)
|
||||
def getScalaVersion(loader: ClassLoader): String = getProperty(loader, "library.properties", "version.number")
|
||||
lazy val AppVersion = getProperty(getClass.getClassLoader, "xsbt.version.properties", "version")
|
||||
val finalStyle = Set("2.9.1", "2.9.0-1", "2.9.0", "2.8.2", "2.8.1", "2.8.0")
|
||||
def unmapScalaVersion(versionNumber: String) = versionNumber.stripSuffix(".final")
|
||||
def mapScalaVersion(versionNumber: String) = if (finalStyle(versionNumber)) versionNumber + ".final" else versionNumber
|
||||
|
||||
private[this] def getProperty(loader: ClassLoader, res: String, prop: String) = loadProperties(loader.getResourceAsStream(res)).getProperty(prop)
|
||||
private[this] def loadProperties(propertiesStream: InputStream): Properties =
|
||||
{
|
||||
val properties = new Properties
|
||||
try { properties.load(propertiesStream) } finally { propertiesStream.close() }
|
||||
properties
|
||||
}
|
||||
def getScalaVersion: String = getScalaVersion(getClass.getClassLoader)
|
||||
def getScalaVersion(loader: ClassLoader): String = getProperty(loader, "library.properties", "version.number")
|
||||
lazy val AppVersion = getProperty(getClass.getClassLoader, "xsbt.version.properties", "version")
|
||||
|
||||
private[this] def getProperty(loader: ClassLoader, res: String, prop: String) = loadProperties(loader.getResourceAsStream(res)).getProperty(prop)
|
||||
private[this] def loadProperties(propertiesStream: InputStream): Properties =
|
||||
{
|
||||
val properties = new Properties
|
||||
try { properties.load(propertiesStream) } finally { propertiesStream.close() }
|
||||
properties
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,17 +1,16 @@
|
|||
package xsbt.boot
|
||||
|
||||
import java.io.{File,InputStream}
|
||||
import java.io.{ File, InputStream }
|
||||
import java.net.URL
|
||||
import java.util.Properties
|
||||
import xsbti._
|
||||
import org.specs2._
|
||||
import mutable.Specification
|
||||
import LaunchTest._
|
||||
import sbt.IO.{createDirectory, touch,withTemporaryDirectory}
|
||||
import sbt.IO.{ createDirectory, touch, withTemporaryDirectory }
|
||||
import java.net.URI
|
||||
|
||||
object ServerLocatorTest extends Specification
|
||||
{
|
||||
object ServerLocatorTest extends Specification {
|
||||
"ServerLocator" should {
|
||||
// TODO - Maybe use scalacheck to randomnly generate URIs
|
||||
"read and write server URI properties" in {
|
||||
|
|
@ -46,7 +45,7 @@ object ServerLocatorTest extends Specification
|
|||
|Some more output.""".stripMargin
|
||||
val inputStream = new java.io.BufferedReader(new java.io.StringReader(input))
|
||||
val result = try ServerLauncher.readUntilSynch(inputStream)
|
||||
finally inputStream.close()
|
||||
finally inputStream.close()
|
||||
result must equalTo(Some(expected))
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -6,63 +6,62 @@ import Configuration._
|
|||
import java.io.File
|
||||
import java.net.URI
|
||||
|
||||
object URITests extends Properties("URI Tests")
|
||||
{
|
||||
// Need a platform-specific root, otherwise URI will not be absolute (e.g. if we use a "/a/b/c" path in Windows)
|
||||
// Note:
|
||||
// If I use "C:" instead of "/C:", then isAbsolute == true for the resulting URI, but resolve is broken:
|
||||
// e.g. scala> new URI("file", "c:/a/b'/has spaces", null).resolve("a") broken
|
||||
// res0: java.net.URI = a
|
||||
// scala> new URI("file", "/c:/a/b'/has spaces", null).resolve("a") working
|
||||
// res1: java.net.URI = file:/c:/a/b'/a
|
||||
val Root = if (xsbt.boot.Pre.isWindows) "/C:/" else "/"
|
||||
object URITests extends Properties("URI Tests") {
|
||||
// Need a platform-specific root, otherwise URI will not be absolute (e.g. if we use a "/a/b/c" path in Windows)
|
||||
// Note:
|
||||
// If I use "C:" instead of "/C:", then isAbsolute == true for the resulting URI, but resolve is broken:
|
||||
// e.g. scala> new URI("file", "c:/a/b'/has spaces", null).resolve("a") broken
|
||||
// res0: java.net.URI = a
|
||||
// scala> new URI("file", "/c:/a/b'/has spaces", null).resolve("a") working
|
||||
// res1: java.net.URI = file:/c:/a/b'/a
|
||||
val Root = if (xsbt.boot.Pre.isWindows) "/C:/" else "/"
|
||||
|
||||
val FileProtocol = "file"
|
||||
property("directoryURI adds trailing slash") = secure {
|
||||
val dirURI = directoryURI(new File(Root + "a/b/c"))
|
||||
val directURI = filePathURI(Root + "a/b/c/")
|
||||
dirURI == directURI
|
||||
}
|
||||
property("directoryURI preserves trailing slash") = secure {
|
||||
directoryURI(new File(Root + "a/b/c/")) == filePathURI(Root + "a/b/c/")
|
||||
}
|
||||
val FileProtocol = "file"
|
||||
property("directoryURI adds trailing slash") = secure {
|
||||
val dirURI = directoryURI(new File(Root + "a/b/c"))
|
||||
val directURI = filePathURI(Root + "a/b/c/")
|
||||
dirURI == directURI
|
||||
}
|
||||
property("directoryURI preserves trailing slash") = secure {
|
||||
directoryURI(new File(Root + "a/b/c/")) == filePathURI(Root + "a/b/c/")
|
||||
}
|
||||
|
||||
property("filePathURI encodes spaces") = secure {
|
||||
val decoded = "has spaces"
|
||||
val encoded = "has%20spaces"
|
||||
val fpURI = filePathURI(decoded)
|
||||
val directURI = new URI(encoded)
|
||||
s"filePathURI: $fpURI" |:
|
||||
s"direct URI: $directURI" |:
|
||||
s"getPath: ${fpURI.getPath}" |:
|
||||
s"getRawPath: ${fpURI.getRawPath}" |:
|
||||
(fpURI == directURI) &&
|
||||
(fpURI.getPath == decoded) &&
|
||||
(fpURI.getRawPath == encoded)
|
||||
}
|
||||
property("filePathURI encodes spaces") = secure {
|
||||
val decoded = "has spaces"
|
||||
val encoded = "has%20spaces"
|
||||
val fpURI = filePathURI(decoded)
|
||||
val directURI = new URI(encoded)
|
||||
s"filePathURI: $fpURI" |:
|
||||
s"direct URI: $directURI" |:
|
||||
s"getPath: ${fpURI.getPath}" |:
|
||||
s"getRawPath: ${fpURI.getRawPath}" |:
|
||||
(fpURI == directURI) &&
|
||||
(fpURI.getPath == decoded) &&
|
||||
(fpURI.getRawPath == encoded)
|
||||
}
|
||||
|
||||
property("filePathURI and File.toURI agree for absolute file") = secure {
|
||||
val s = Root + "a/b'/has spaces"
|
||||
val viaPath = filePathURI(s)
|
||||
val viaFile = new File(s).toURI
|
||||
s"via path: $viaPath" |:
|
||||
s"via file: $viaFile" |:
|
||||
(viaPath == viaFile)
|
||||
}
|
||||
property("filePathURI and File.toURI agree for absolute file") = secure {
|
||||
val s = Root + "a/b'/has spaces"
|
||||
val viaPath = filePathURI(s)
|
||||
val viaFile = new File(s).toURI
|
||||
s"via path: $viaPath" |:
|
||||
s"via file: $viaFile" |:
|
||||
(viaPath == viaFile)
|
||||
}
|
||||
|
||||
property("filePathURI supports URIs") = secure {
|
||||
val s = s"file://${Root}is/a/uri/with%20spaces"
|
||||
val decoded = Root + "is/a/uri/with spaces"
|
||||
val encoded = Root + "is/a/uri/with%20spaces"
|
||||
val fpURI = filePathURI(s)
|
||||
val directURI = new URI(s)
|
||||
s"filePathURI: $fpURI" |:
|
||||
s"direct URI: $directURI" |:
|
||||
s"getPath: ${fpURI.getPath}" |:
|
||||
s"getRawPath: ${fpURI.getRawPath}" |:
|
||||
(fpURI == directURI) &&
|
||||
(fpURI.getPath == decoded) &&
|
||||
(fpURI.getRawPath == encoded)
|
||||
}
|
||||
property("filePathURI supports URIs") = secure {
|
||||
val s = s"file://${Root}is/a/uri/with%20spaces"
|
||||
val decoded = Root + "is/a/uri/with spaces"
|
||||
val encoded = Root + "is/a/uri/with%20spaces"
|
||||
val fpURI = filePathURI(s)
|
||||
val directURI = new URI(s)
|
||||
s"filePathURI: $fpURI" |:
|
||||
s"direct URI: $directURI" |:
|
||||
s"getPath: ${fpURI.getPath}" |:
|
||||
s"getRawPath: ${fpURI.getRawPath}" |:
|
||||
(fpURI == directURI) &&
|
||||
(fpURI.getPath == decoded) &&
|
||||
(fpURI.getRawPath == encoded)
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -1,69 +1,68 @@
|
|||
package xsbt.boot
|
||||
package xsbt.boot
|
||||
|
||||
import org.scalacheck._
|
||||
import Prop._
|
||||
import org.scalacheck._
|
||||
import Prop._
|
||||
|
||||
object VersionParts extends Properties("VersionParts")
|
||||
{
|
||||
property("Valid version, no qualifier") = Prop.forAll { (x0: Int, y0: Int, z0: Int) =>
|
||||
val (x,y,z) = (norm(x0), norm(y0), norm(z0))
|
||||
val str = s"$x.$y.$z"
|
||||
val expected =
|
||||
s"$x.$y.$z" ::
|
||||
s"$x.$y" ::
|
||||
"" ::
|
||||
Nil
|
||||
check(str, expected)
|
||||
}
|
||||
object VersionParts extends Properties("VersionParts") {
|
||||
property("Valid version, no qualifier") = Prop.forAll { (x0: Int, y0: Int, z0: Int) =>
|
||||
val (x, y, z) = (norm(x0), norm(y0), norm(z0))
|
||||
val str = s"$x.$y.$z"
|
||||
val expected =
|
||||
s"$x.$y.$z" ::
|
||||
s"$x.$y" ::
|
||||
"" ::
|
||||
Nil
|
||||
check(str, expected)
|
||||
}
|
||||
|
||||
property("Valid version with qualifier") = Prop.forAll { (x0: Int, y0: Int, z0: Int, q0: String) =>
|
||||
val (x,y,z,q) = (norm(x0), norm(y0), norm(z0), normS(q0))
|
||||
val str = s"$x.$y.$z-$q"
|
||||
val expected =
|
||||
s"$x.$y.$z-$q" ::
|
||||
s"$x.$y.$z" ::
|
||||
s"$x.$y" ::
|
||||
"" ::
|
||||
Nil
|
||||
check(str, expected)
|
||||
}
|
||||
property("Valid version with qualifier") = Prop.forAll { (x0: Int, y0: Int, z0: Int, q0: String) =>
|
||||
val (x, y, z, q) = (norm(x0), norm(y0), norm(z0), normS(q0))
|
||||
val str = s"$x.$y.$z-$q"
|
||||
val expected =
|
||||
s"$x.$y.$z-$q" ::
|
||||
s"$x.$y.$z" ::
|
||||
s"$x.$y" ::
|
||||
"" ::
|
||||
Nil
|
||||
check(str, expected)
|
||||
}
|
||||
|
||||
property("Invalid version") = Prop.forAll { (x0: Int, y0: Int, z0: Int, q0: String) =>
|
||||
val (x,y,z,q) = (norm(x0), norm(y0), norm(z0), normS(q0))
|
||||
val strings =
|
||||
x.toString ::
|
||||
s"$x.$y" ::
|
||||
s"$x.$y-$q" ::
|
||||
s"$x.$y.$z.$q" ::
|
||||
Nil
|
||||
all(strings.map(str => check(str, Configuration.noMatchParts)) : _*)
|
||||
}
|
||||
property("Invalid version") = Prop.forAll { (x0: Int, y0: Int, z0: Int, q0: String) =>
|
||||
val (x, y, z, q) = (norm(x0), norm(y0), norm(z0), normS(q0))
|
||||
val strings =
|
||||
x.toString ::
|
||||
s"$x.$y" ::
|
||||
s"$x.$y-$q" ::
|
||||
s"$x.$y.$z.$q" ::
|
||||
Nil
|
||||
all(strings.map(str => check(str, Configuration.noMatchParts)): _*)
|
||||
}
|
||||
|
||||
private[this] def check(versionString: String, expectedParts: List[String]) =
|
||||
{
|
||||
def printParts(s: List[String]): String = s.map("'" + _ + "'").mkString("(", ", ", ")")
|
||||
val actual = Configuration.versionParts(versionString)
|
||||
s"Version string '$versionString'" |:
|
||||
s"Expected '${printParts(expectedParts)}'" |:
|
||||
s"Actual'${printParts(actual)}'" |:
|
||||
(actual == expectedParts)
|
||||
}
|
||||
private[this] def check(versionString: String, expectedParts: List[String]) =
|
||||
{
|
||||
def printParts(s: List[String]): String = s.map("'" + _ + "'").mkString("(", ", ", ")")
|
||||
val actual = Configuration.versionParts(versionString)
|
||||
s"Version string '$versionString'" |:
|
||||
s"Expected '${printParts(expectedParts)}'" |:
|
||||
s"Actual'${printParts(actual)}'" |:
|
||||
(actual == expectedParts)
|
||||
}
|
||||
|
||||
// Make `i` non-negative
|
||||
private[this] def norm(i: Int): Int =
|
||||
if(i == Int.MinValue) Int.MaxValue else math.abs(i)
|
||||
// Make `i` non-negative
|
||||
private[this] def norm(i: Int): Int =
|
||||
if (i == Int.MinValue) Int.MaxValue else math.abs(i)
|
||||
|
||||
// Make `s` non-empty and suitable for java.util.regex input
|
||||
private[this] def normS(s: String): String =
|
||||
{
|
||||
val filtered = s filter validChar
|
||||
if(filtered.isEmpty) "q" else filtered
|
||||
}
|
||||
// Make `s` non-empty and suitable for java.util.regex input
|
||||
private[this] def normS(s: String): String =
|
||||
{
|
||||
val filtered = s filter validChar
|
||||
if (filtered.isEmpty) "q" else filtered
|
||||
}
|
||||
|
||||
// strip whitespace and characters not supported by Pattern
|
||||
private[this] def validChar(c: Char) =
|
||||
!java.lang.Character.isWhitespace(c) &&
|
||||
!java.lang.Character.isISOControl(c) &&
|
||||
!Character.isHighSurrogate(c) &&
|
||||
!Character.isLowSurrogate(c)
|
||||
// strip whitespace and characters not supported by Pattern
|
||||
private[this] def validChar(c: Char) =
|
||||
!java.lang.Character.isWhitespace(c) &&
|
||||
!java.lang.Character.isISOControl(c) &&
|
||||
!Character.isHighSurrogate(c) &&
|
||||
!Character.isLowSurrogate(c)
|
||||
}
|
||||
|
|
@ -1,51 +1,50 @@
|
|||
package sbt
|
||||
package compiler
|
||||
|
||||
import scala.language.reflectiveCalls
|
||||
import org.scalacheck._
|
||||
import Prop._
|
||||
import scala.tools.nsc.reporters.StoreReporter
|
||||
import scala.language.reflectiveCalls
|
||||
import org.scalacheck._
|
||||
import Prop._
|
||||
import scala.tools.nsc.reporters.StoreReporter
|
||||
|
||||
object EvalTest extends Properties("eval")
|
||||
{
|
||||
private[this] val reporter = new StoreReporter
|
||||
import reporter.{ERROR,Info,Severity}
|
||||
private[this] val eval = new Eval(_ => reporter, None)
|
||||
object EvalTest extends Properties("eval") {
|
||||
private[this] val reporter = new StoreReporter
|
||||
import reporter.{ ERROR, Info, Severity }
|
||||
private[this] val eval = new Eval(_ => reporter, None)
|
||||
|
||||
property("inferred integer") = forAll{ (i: Int) =>
|
||||
val result = eval.eval(i.toString)
|
||||
(label("Value", value(result)) |: (value(result) == i)) &&
|
||||
(label("Type", value(result)) |: (result.tpe == IntType)) &&
|
||||
(label("Files", result.generated) |: (result.generated.isEmpty))
|
||||
}
|
||||
property("inferred integer") = forAll { (i: Int) =>
|
||||
val result = eval.eval(i.toString)
|
||||
(label("Value", value(result)) |: (value(result) == i)) &&
|
||||
(label("Type", value(result)) |: (result.tpe == IntType)) &&
|
||||
(label("Files", result.generated) |: (result.generated.isEmpty))
|
||||
}
|
||||
|
||||
property("explicit integer") = forAll{ (i: Int) =>
|
||||
val result = eval.eval(i.toString, tpeName = Some(IntType))
|
||||
(label("Value", value(result)) |: (value(result) == i)) &&
|
||||
(label("Type", result.tpe) |: (result.tpe == IntType)) &&
|
||||
(label("Files", result.generated) |: (result.generated.isEmpty))
|
||||
}
|
||||
property("explicit integer") = forAll { (i: Int) =>
|
||||
val result = eval.eval(i.toString, tpeName = Some(IntType))
|
||||
(label("Value", value(result)) |: (value(result) == i)) &&
|
||||
(label("Type", result.tpe) |: (result.tpe == IntType)) &&
|
||||
(label("Files", result.generated) |: (result.generated.isEmpty))
|
||||
}
|
||||
|
||||
property("type mismatch") = forAll{ (i: Int, l: Int) =>
|
||||
val line = math.abs(l)
|
||||
val src = "mismatch"
|
||||
throws(classOf[RuntimeException])(eval.eval(i.toString, tpeName =Some(BooleanType), line = line, srcName = src)) &&
|
||||
hasErrors(line+1, src)
|
||||
}
|
||||
property("type mismatch") = forAll { (i: Int, l: Int) =>
|
||||
val line = math.abs(l)
|
||||
val src = "mismatch"
|
||||
throws(classOf[RuntimeException])(eval.eval(i.toString, tpeName = Some(BooleanType), line = line, srcName = src)) &&
|
||||
hasErrors(line + 1, src)
|
||||
}
|
||||
|
||||
property("backed local class") = forAll{ (i: Int) =>
|
||||
IO.withTemporaryDirectory { dir =>
|
||||
val eval = new Eval(_ => reporter, backing = Some(dir))
|
||||
val result = eval.eval(local(i))
|
||||
val v = value(result).asInstanceOf[{def i: Int}].i
|
||||
(label("Value", v) |: (v == i)) &&
|
||||
(label("Type", result.tpe) |: (result.tpe == LocalType)) &&
|
||||
(label("Files", result.generated) |: (!result.generated.isEmpty))
|
||||
}
|
||||
}
|
||||
property("backed local class") = forAll { (i: Int) =>
|
||||
IO.withTemporaryDirectory { dir =>
|
||||
val eval = new Eval(_ => reporter, backing = Some(dir))
|
||||
val result = eval.eval(local(i))
|
||||
val v = value(result).asInstanceOf[{ def i: Int }].i
|
||||
(label("Value", v) |: (v == i)) &&
|
||||
(label("Type", result.tpe) |: (result.tpe == LocalType)) &&
|
||||
(label("Files", result.generated) |: (!result.generated.isEmpty))
|
||||
}
|
||||
}
|
||||
|
||||
val ValTestNames = Set("x", "a")
|
||||
val ValTestContent = """
|
||||
val ValTestNames = Set("x", "a")
|
||||
val ValTestContent = """
|
||||
val x: Int = {
|
||||
val y: Int = 4
|
||||
y
|
||||
|
|
@ -59,40 +58,39 @@ val p = {
|
|||
}
|
||||
"""
|
||||
|
||||
property("val test") = secure {
|
||||
val defs = (ValTestContent, 1 to 7) :: Nil
|
||||
val res = eval.evalDefinitions(defs, new EvalImports(Nil, ""), "<defs>", "scala.Int" :: Nil)
|
||||
label("Val names", res.valNames) |: (res.valNames.toSet == ValTestNames)
|
||||
}
|
||||
property("val test") = secure {
|
||||
val defs = (ValTestContent, 1 to 7) :: Nil
|
||||
val res = eval.evalDefinitions(defs, new EvalImports(Nil, ""), "<defs>", "scala.Int" :: Nil)
|
||||
label("Val names", res.valNames) |: (res.valNames.toSet == ValTestNames)
|
||||
}
|
||||
|
||||
property("explicit import") = forAll(testImport("import math.abs" :: Nil))
|
||||
property("wildcard import") = forAll(testImport("import math._" :: Nil))
|
||||
property("comma-separated imports") = forAll(testImport("import util._, math._, xml._" :: Nil))
|
||||
property("multiple imports") = forAll(testImport("import util._" :: "import math._" :: "import xml._" :: Nil))
|
||||
|
||||
property("explicit import") = forAll(testImport("import math.abs" :: Nil))
|
||||
property("wildcard import") = forAll(testImport("import math._" :: Nil))
|
||||
property("comma-separated imports") = forAll(testImport("import util._, math._, xml._" :: Nil))
|
||||
property("multiple imports") = forAll(testImport("import util._" :: "import math._" :: "import xml._" :: Nil))
|
||||
private[this] def testImport(imports: Seq[String]): Int => Prop = i =>
|
||||
value(eval.eval("abs(" + i + ")", new EvalImports(imports.zipWithIndex, "imp"))) == math.abs(i)
|
||||
|
||||
private[this] def testImport(imports: Seq[String]): Int => Prop = i =>
|
||||
value(eval.eval("abs("+i+")", new EvalImports(imports.zipWithIndex, "imp"))) == math.abs(i)
|
||||
private[this] def local(i: Int) = "{ class ETest(val i: Int); new ETest(" + i + ") }"
|
||||
val LocalType = "AnyRef{val i: Int}"
|
||||
|
||||
private[this] def local(i: Int) = "{ class ETest(val i: Int); new ETest(" + i + ") }"
|
||||
val LocalType = "AnyRef{val i: Int}"
|
||||
private[this] def value(r: EvalResult) = r.getValue(getClass.getClassLoader)
|
||||
private[this] def hasErrors(line: Int, src: String) =
|
||||
{
|
||||
val is = reporter.infos
|
||||
("Has errors" |: (!is.isEmpty)) &&
|
||||
all(is.toSeq.map(validPosition(line, src)): _*)
|
||||
}
|
||||
private[this] def validPosition(line: Int, src: String)(i: Info) =
|
||||
{
|
||||
val nme = i.pos.source.file.name
|
||||
(label("Severity", i.severity) |: (i.severity == ERROR)) &&
|
||||
(label("Line", i.pos.line) |: (i.pos.line == line)) &&
|
||||
(label("Name", nme) |: (nme == src))
|
||||
}
|
||||
val IntType = "Int"
|
||||
val BooleanType = "Boolean"
|
||||
|
||||
private[this] def value(r: EvalResult) = r.getValue(getClass.getClassLoader)
|
||||
private[this] def hasErrors(line: Int, src: String) =
|
||||
{
|
||||
val is = reporter.infos
|
||||
("Has errors" |: (!is.isEmpty)) &&
|
||||
all(is.toSeq.map(validPosition(line,src)) :_*)
|
||||
}
|
||||
private[this] def validPosition(line: Int, src: String)(i: Info) =
|
||||
{
|
||||
val nme = i.pos.source.file.name
|
||||
(label("Severity", i.severity) |: (i.severity == ERROR)) &&
|
||||
(label("Line", i.pos.line) |: (i.pos.line == line)) &&
|
||||
(label("Name", nme) |: (nme == src))
|
||||
}
|
||||
val IntType = "Int"
|
||||
val BooleanType = "Boolean"
|
||||
|
||||
def label(s: String, value: Any) = s + " (" + value + ")"
|
||||
def label(s: String, value: Any) = s + " (" + value + ")"
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,8 +1,7 @@
|
|||
package sbt
|
||||
package std
|
||||
|
||||
import complete.{DefaultParsers, Parsers}
|
||||
|
||||
import complete.{ DefaultParsers, Parsers }
|
||||
|
||||
/*object UseTask
|
||||
{
|
||||
|
|
@ -18,30 +17,29 @@ package std
|
|||
if(y.value) z else x
|
||||
}
|
||||
}*/
|
||||
object Assign
|
||||
{
|
||||
import java.io.File
|
||||
import Def.{inputKey,settingKey,taskKey}
|
||||
import Def.{Initialize,macroValueT,parserToInput}
|
||||
// import UseTask.{x,y,z,a,set,plain}
|
||||
object Assign {
|
||||
import java.io.File
|
||||
import Def.{ inputKey, settingKey, taskKey }
|
||||
import Def.{ Initialize, macroValueT, parserToInput }
|
||||
// import UseTask.{x,y,z,a,set,plain}
|
||||
|
||||
val ak = taskKey[Int]("a")
|
||||
val bk = taskKey[Seq[Int]]("b")
|
||||
val ck = settingKey[File]("c")
|
||||
val sk = taskKey[Set[_]]("s")
|
||||
val ak = taskKey[Int]("a")
|
||||
val bk = taskKey[Seq[Int]]("b")
|
||||
val ck = settingKey[File]("c")
|
||||
val sk = taskKey[Set[_]]("s")
|
||||
|
||||
val ik = inputKey[Int]("i")
|
||||
val isk = inputKey[String]("is")
|
||||
val mk = settingKey[Int]("m")
|
||||
val tk = taskKey[Int]("t")
|
||||
val name = settingKey[String]("name")
|
||||
val dummyt = taskKey[complete.Parser[String]]("dummyt")
|
||||
val dummys = settingKey[complete.Parser[String]]("dummys")
|
||||
val dummy3 = settingKey[complete.Parser[(String,Int)]]("dummy3")
|
||||
val tsk: complete.Parser[Task[String]] = ???
|
||||
val itsk: Initialize[InputTask[Int]] = ???
|
||||
val ik = inputKey[Int]("i")
|
||||
val isk = inputKey[String]("is")
|
||||
val mk = settingKey[Int]("m")
|
||||
val tk = taskKey[Int]("t")
|
||||
val name = settingKey[String]("name")
|
||||
val dummyt = taskKey[complete.Parser[String]]("dummyt")
|
||||
val dummys = settingKey[complete.Parser[String]]("dummys")
|
||||
val dummy3 = settingKey[complete.Parser[(String, Int)]]("dummy3")
|
||||
val tsk: complete.Parser[Task[String]] = ???
|
||||
val itsk: Initialize[InputTask[Int]] = ???
|
||||
|
||||
/* def azy = sk.value
|
||||
/* def azy = sk.value
|
||||
|
||||
def azy2 = appmacro.Debug.checkWild(Def.task{ sk.value.size })
|
||||
|
||||
|
|
@ -52,54 +50,54 @@ object Assign
|
|||
bk ++= Seq(z.value)
|
||||
)*/
|
||||
|
||||
val zz = Def.task { mk.value + tk.value + mk.value + tk.value + mk.value + tk.value + mk.value + tk.value + mk.value + tk.value + mk.value + tk.value }
|
||||
val zz = Def.task { mk.value + tk.value + mk.value + tk.value + mk.value + tk.value + mk.value + tk.value + mk.value + tk.value + mk.value + tk.value }
|
||||
|
||||
import DefaultParsers._
|
||||
val p = Def.setting { name.value ~> Space ~> ID }
|
||||
val is = Seq(
|
||||
mk := 3,
|
||||
name := "asdf",
|
||||
tk := (math.random*1000).toInt,
|
||||
isk := dummys.value.parsed // should not compile: cannot use a task to define the parser
|
||||
// ik := { if( tsk.parsed.value == "blue") tk.value else mk.value }
|
||||
)
|
||||
import DefaultParsers._
|
||||
val p = Def.setting { name.value ~> Space ~> ID }
|
||||
val is = Seq(
|
||||
mk := 3,
|
||||
name := "asdf",
|
||||
tk := (math.random * 1000).toInt,
|
||||
isk := dummys.value.parsed // should not compile: cannot use a task to define the parser
|
||||
// ik := { if( tsk.parsed.value == "blue") tk.value else mk.value }
|
||||
)
|
||||
|
||||
val it1 = Def.inputTask {
|
||||
tsk.parsed //"as" //dummy.value.parsed
|
||||
}
|
||||
val it2 = Def.inputTask {
|
||||
"lit"
|
||||
}
|
||||
val it1 = Def.inputTask {
|
||||
tsk.parsed //"as" //dummy.value.parsed
|
||||
}
|
||||
val it2 = Def.inputTask {
|
||||
"lit"
|
||||
}
|
||||
|
||||
val it3: Initialize[InputTask[String]] = Def.inputTask[String] {
|
||||
tsk.parsed.value + itsk.parsed.value.toString + isk.value
|
||||
}
|
||||
// should not compile: cannot use a task to define the parser
|
||||
/* val it4 = Def.inputTask {
|
||||
val it3: Initialize[InputTask[String]] = Def.inputTask[String] {
|
||||
tsk.parsed.value + itsk.parsed.value.toString + isk.value
|
||||
}
|
||||
// should not compile: cannot use a task to define the parser
|
||||
/* val it4 = Def.inputTask {
|
||||
dummyt.value.parsed
|
||||
}*/
|
||||
// should compile: can use a setting to define the parser
|
||||
val it5 = Def.inputTask {
|
||||
dummys.parsed
|
||||
}
|
||||
val it6 = Def.inputTaskDyn {
|
||||
val d3 = dummy3.parsed
|
||||
val x = d3._1
|
||||
val i = d3._2
|
||||
Def.task { tk.value + i}
|
||||
}
|
||||
// should compile: can use a setting to define the parser
|
||||
val it5 = Def.inputTask {
|
||||
dummys.parsed
|
||||
}
|
||||
val it6 = Def.inputTaskDyn {
|
||||
val d3 = dummy3.parsed
|
||||
val x = d3._1
|
||||
val i = d3._2
|
||||
Def.task { tk.value + i }
|
||||
}
|
||||
|
||||
val it7 = Def.inputTask {
|
||||
it5.parsed
|
||||
}
|
||||
val it7 = Def.inputTask {
|
||||
it5.parsed
|
||||
}
|
||||
|
||||
def bool: Initialize[Boolean] = Def.setting { true }
|
||||
def enabledOnly[T](key: Initialize[T]): Initialize[Seq[T]] = Def.setting {
|
||||
val keys: Seq[T] = forallIn(key).value
|
||||
val enabled: Seq[Boolean] = forallIn(bool).value
|
||||
(keys zip enabled) collect { case (a, true) => a }
|
||||
}
|
||||
def forallIn[T](key: Initialize[T]): Initialize[Seq[T]] = Def.setting {
|
||||
key.value :: Nil
|
||||
}
|
||||
def bool: Initialize[Boolean] = Def.setting { true }
|
||||
def enabledOnly[T](key: Initialize[T]): Initialize[Seq[T]] = Def.setting {
|
||||
val keys: Seq[T] = forallIn(key).value
|
||||
val enabled: Seq[Boolean] = forallIn(bool).value
|
||||
(keys zip enabled) collect { case (a, true) => a }
|
||||
}
|
||||
def forallIn[T](key: Initialize[T]): Initialize[Seq[T]] = Def.setting {
|
||||
key.value :: Nil
|
||||
}
|
||||
}
|
||||
|
|
@ -1,75 +1,73 @@
|
|||
package sbt
|
||||
|
||||
import Project._
|
||||
import Types.{idFun,some}
|
||||
import TestBuild._
|
||||
import Project._
|
||||
import Types.{ idFun, some }
|
||||
import TestBuild._
|
||||
|
||||
import java.io.File
|
||||
import java.net.URI
|
||||
import org.scalacheck._
|
||||
import Prop._
|
||||
import Gen._
|
||||
import java.io.File
|
||||
import java.net.URI
|
||||
import org.scalacheck._
|
||||
import Prop._
|
||||
import Gen._
|
||||
|
||||
object Delegates extends Properties("delegates")
|
||||
{
|
||||
property("generate non-empty configs") = forAll { (c: Seq[Config]) => !c.isEmpty }
|
||||
property("generate non-empty tasks") = forAll { (t: Seq[Taskk]) => !t.isEmpty }
|
||||
object Delegates extends Properties("delegates") {
|
||||
property("generate non-empty configs") = forAll { (c: Seq[Config]) => !c.isEmpty }
|
||||
property("generate non-empty tasks") = forAll { (t: Seq[Taskk]) => !t.isEmpty }
|
||||
|
||||
property("no duplicate scopes") = forAll { (keys: Keys) =>
|
||||
allDelegates(keys) { (_, ds) => ds.distinct.size == ds.size }
|
||||
}
|
||||
property("delegates non-empty") = forAll { (keys: Keys) =>
|
||||
allDelegates(keys) { (_, ds) => !ds.isEmpty }
|
||||
}
|
||||
property("no duplicate scopes") = forAll { (keys: Keys) =>
|
||||
allDelegates(keys) { (_, ds) => ds.distinct.size == ds.size }
|
||||
}
|
||||
property("delegates non-empty") = forAll { (keys: Keys) =>
|
||||
allDelegates(keys) { (_, ds) => !ds.isEmpty }
|
||||
}
|
||||
|
||||
property("An initially Global axis is Global in all delegates") = allAxes(alwaysGlobal)
|
||||
property("An initially Global axis is Global in all delegates") = allAxes(alwaysGlobal)
|
||||
|
||||
property("Projects precede builds precede Global") = forAll { (keys: Keys) =>
|
||||
allDelegates(keys) { (scope, ds) =>
|
||||
val projectAxes = ds.map(_.project)
|
||||
val nonProject = projectAxes.dropWhile { case Select(_: ProjectRef) => true; case _ => false }
|
||||
val global = nonProject.dropWhile { case Select(_: BuildRef) => true; case _ => false }
|
||||
global forall { _ == Global }
|
||||
}
|
||||
}
|
||||
property("Initial scope present with all combinations of Global axes") = allAxes(globalCombinations)
|
||||
property("Projects precede builds precede Global") = forAll { (keys: Keys) =>
|
||||
allDelegates(keys) { (scope, ds) =>
|
||||
val projectAxes = ds.map(_.project)
|
||||
val nonProject = projectAxes.dropWhile { case Select(_: ProjectRef) => true; case _ => false }
|
||||
val global = nonProject.dropWhile { case Select(_: BuildRef) => true; case _ => false }
|
||||
global forall { _ == Global }
|
||||
}
|
||||
}
|
||||
property("Initial scope present with all combinations of Global axes") = allAxes(globalCombinations)
|
||||
|
||||
property("initial scope first") = forAll { (keys: Keys) =>
|
||||
allDelegates(keys) { (scope, ds) => ds.head == scope }
|
||||
}
|
||||
property("global scope last") = forAll { (keys: Keys) =>
|
||||
allDelegates(keys) { (_, ds) => ds.last == Scope.GlobalScope }
|
||||
}
|
||||
property("initial scope first") = forAll { (keys: Keys) =>
|
||||
allDelegates(keys) { (scope, ds) => ds.head == scope }
|
||||
}
|
||||
property("global scope last") = forAll { (keys: Keys) =>
|
||||
allDelegates(keys) { (_, ds) => ds.last == Scope.GlobalScope }
|
||||
}
|
||||
|
||||
def allAxes(f: (Scope, Seq[Scope], Scope => ScopeAxis[_]) => Prop): Prop = forAll { (keys: Keys) =>
|
||||
allDelegates(keys) { (s, ds) =>
|
||||
all( f(s, ds, _.project), f(s, ds, _.config), f(s, ds, _.task), f(s, ds, _.extra) )
|
||||
}
|
||||
}
|
||||
def allDelegates(keys: Keys)(f: (Scope, Seq[Scope]) => Prop): Prop = all( keys.scopes map { scope =>
|
||||
val delegates = keys.env.delegates(scope)
|
||||
("Scope: " + Scope.display(scope, "_")) |:
|
||||
("Delegates:\n\t" + delegates.map( scope => Scope.display(scope, "_") ).mkString("\n\t")) |:
|
||||
f(scope, delegates)
|
||||
} : _*)
|
||||
def alwaysGlobal(s: Scope, ds: Seq[Scope], axis: Scope => ScopeAxis[_]): Prop =
|
||||
(axis(s) != Global) ||
|
||||
all( ds map { d => (axis(d) == Global) : Prop } : _*)
|
||||
def globalCombinations(s: Scope, ds: Seq[Scope], axis: Scope => ScopeAxis[_]): Prop =
|
||||
{
|
||||
val value = axis(s)
|
||||
val mods = List[Scope => Scope](_.copy(project = Global), _.copy(config = Global), _.copy(task = Global), _.copy(extra = Global) )
|
||||
val modAndIdent = mods.map(_ :: idFun[Scope] :: Nil)
|
||||
def allAxes(f: (Scope, Seq[Scope], Scope => ScopeAxis[_]) => Prop): Prop = forAll { (keys: Keys) =>
|
||||
allDelegates(keys) { (s, ds) =>
|
||||
all(f(s, ds, _.project), f(s, ds, _.config), f(s, ds, _.task), f(s, ds, _.extra))
|
||||
}
|
||||
}
|
||||
def allDelegates(keys: Keys)(f: (Scope, Seq[Scope]) => Prop): Prop = all(keys.scopes map { scope =>
|
||||
val delegates = keys.env.delegates(scope)
|
||||
("Scope: " + Scope.display(scope, "_")) |:
|
||||
("Delegates:\n\t" + delegates.map(scope => Scope.display(scope, "_")).mkString("\n\t")) |:
|
||||
f(scope, delegates)
|
||||
}: _*)
|
||||
def alwaysGlobal(s: Scope, ds: Seq[Scope], axis: Scope => ScopeAxis[_]): Prop =
|
||||
(axis(s) != Global) ||
|
||||
all(ds map { d => (axis(d) == Global): Prop }: _*)
|
||||
def globalCombinations(s: Scope, ds: Seq[Scope], axis: Scope => ScopeAxis[_]): Prop =
|
||||
{
|
||||
val value = axis(s)
|
||||
val mods = List[Scope => Scope](_.copy(project = Global), _.copy(config = Global), _.copy(task = Global), _.copy(extra = Global))
|
||||
val modAndIdent = mods.map(_ :: idFun[Scope] :: Nil)
|
||||
|
||||
def loop(cur: Scope, acc: List[Scope], rem: List[Seq[Scope => Scope]]): Seq[Scope] =
|
||||
rem match
|
||||
{
|
||||
case Nil => acc
|
||||
case x :: xs => x flatMap { mod =>
|
||||
val s = mod(cur)
|
||||
loop(s, s :: acc, xs)
|
||||
}
|
||||
}
|
||||
all( loop(s, Nil, modAndIdent).map( ds contains _ : Prop) : _*)
|
||||
}
|
||||
def loop(cur: Scope, acc: List[Scope], rem: List[Seq[Scope => Scope]]): Seq[Scope] =
|
||||
rem match {
|
||||
case Nil => acc
|
||||
case x :: xs => x flatMap { mod =>
|
||||
val s = mod(cur)
|
||||
loop(s, s :: acc, xs)
|
||||
}
|
||||
}
|
||||
all(loop(s, Nil, modAndIdent).map(ds contains _: Prop): _*)
|
||||
}
|
||||
}
|
||||
|
|
@ -1,129 +1,128 @@
|
|||
package sbt
|
||||
|
||||
import Def.{displayFull,displayMasked,ScopedKey}
|
||||
import java.net.URI
|
||||
import TestBuild._
|
||||
import complete._
|
||||
import Def.{ displayFull, displayMasked, ScopedKey }
|
||||
import java.net.URI
|
||||
import TestBuild._
|
||||
import complete._
|
||||
|
||||
import org.scalacheck._
|
||||
import Gen._
|
||||
import Prop._
|
||||
import Arbitrary.arbBool
|
||||
import org.scalacheck._
|
||||
import Gen._
|
||||
import Prop._
|
||||
import Arbitrary.arbBool
|
||||
|
||||
/** Tests that the scoped key parser in Act can correctly parse a ScopedKey converted by Def.show*Key.
|
||||
* This includes properly resolving omitted components.*/
|
||||
object ParseKey extends Properties("Key parser test")
|
||||
{
|
||||
final val MaxKeys = 5
|
||||
final val MaxScopedKeys = 100
|
||||
/**
|
||||
* Tests that the scoped key parser in Act can correctly parse a ScopedKey converted by Def.show*Key.
|
||||
* This includes properly resolving omitted components.
|
||||
*/
|
||||
object ParseKey extends Properties("Key parser test") {
|
||||
final val MaxKeys = 5
|
||||
final val MaxScopedKeys = 100
|
||||
|
||||
implicit val gstructure = genStructure
|
||||
implicit val gstructure = genStructure
|
||||
|
||||
property("An explicitly specified axis is always parsed to that explicit value") =
|
||||
forAllNoShrink(structureDefinedKey) { (skm: StructureKeyMask) =>
|
||||
import skm.{structure, key, mask}
|
||||
|
||||
val expected = resolve(structure, key, mask)
|
||||
val string = displayMasked(key, mask)
|
||||
property("An explicitly specified axis is always parsed to that explicit value") =
|
||||
forAllNoShrink(structureDefinedKey) { (skm: StructureKeyMask) =>
|
||||
import skm.{ structure, key, mask }
|
||||
|
||||
("Key: " + displayFull(key)) |:
|
||||
parseExpected(structure, string, expected, mask)
|
||||
}
|
||||
val expected = resolve(structure, key, mask)
|
||||
val string = displayMasked(key, mask)
|
||||
|
||||
property("An unspecified project axis resolves to the current project") =
|
||||
forAllNoShrink(structureDefinedKey) { (skm: StructureKeyMask) =>
|
||||
import skm.{structure, key}
|
||||
("Key: " + displayFull(key)) |:
|
||||
parseExpected(structure, string, expected, mask)
|
||||
}
|
||||
|
||||
val mask = skm.mask.copy(project = false)
|
||||
val string = displayMasked(key, mask)
|
||||
property("An unspecified project axis resolves to the current project") =
|
||||
forAllNoShrink(structureDefinedKey) { (skm: StructureKeyMask) =>
|
||||
import skm.{ structure, key }
|
||||
|
||||
("Key: " + displayFull(key)) |:
|
||||
("Mask: " + mask) |:
|
||||
("Current: " + structure.current) |:
|
||||
parse(structure, string) {
|
||||
case Left(err) => false
|
||||
case Right(sk) => sk.scope.project == Select(structure.current)
|
||||
}
|
||||
}
|
||||
val mask = skm.mask.copy(project = false)
|
||||
val string = displayMasked(key, mask)
|
||||
|
||||
property("An unspecified task axis resolves to Global") =
|
||||
forAllNoShrink(structureDefinedKey) { (skm: StructureKeyMask) =>
|
||||
import skm.{structure, key}
|
||||
val mask = skm.mask.copy(task = false)
|
||||
val string = displayMasked(key, mask)
|
||||
("Key: " + displayFull(key)) |:
|
||||
("Mask: " + mask) |:
|
||||
("Current: " + structure.current) |:
|
||||
parse(structure, string) {
|
||||
case Left(err) => false
|
||||
case Right(sk) => sk.scope.project == Select(structure.current)
|
||||
}
|
||||
}
|
||||
|
||||
("Key: " + displayFull(key)) |:
|
||||
("Mask: " + mask) |:
|
||||
parse(structure, string) {
|
||||
case Left(err) => false
|
||||
case Right(sk) => sk.scope.task == Global
|
||||
}
|
||||
}
|
||||
property("An unspecified task axis resolves to Global") =
|
||||
forAllNoShrink(structureDefinedKey) { (skm: StructureKeyMask) =>
|
||||
import skm.{ structure, key }
|
||||
val mask = skm.mask.copy(task = false)
|
||||
val string = displayMasked(key, mask)
|
||||
|
||||
property("An unspecified configuration axis resolves to the first configuration directly defining the key or else Global") =
|
||||
forAllNoShrink(structureDefinedKey) { (skm: StructureKeyMask) =>
|
||||
import skm.{structure, key}
|
||||
val mask = ScopeMask(config = false)
|
||||
val string = displayMasked(key, mask)
|
||||
val resolvedConfig = Resolve.resolveConfig(structure.extra, key.key, mask)(key.scope).config
|
||||
("Key: " + displayFull(key)) |:
|
||||
("Mask: " + mask) |:
|
||||
parse(structure, string) {
|
||||
case Left(err) => false
|
||||
case Right(sk) => sk.scope.task == Global
|
||||
}
|
||||
}
|
||||
|
||||
("Key: " + displayFull(key)) |:
|
||||
("Mask: " + mask) |:
|
||||
("Expected configuration: " + resolvedConfig.map(_.name)) |:
|
||||
parse(structure, string) {
|
||||
case Right(sk) => sk.scope.config == resolvedConfig
|
||||
case Left(err) => false
|
||||
}
|
||||
}
|
||||
property("An unspecified configuration axis resolves to the first configuration directly defining the key or else Global") =
|
||||
forAllNoShrink(structureDefinedKey) { (skm: StructureKeyMask) =>
|
||||
import skm.{ structure, key }
|
||||
val mask = ScopeMask(config = false)
|
||||
val string = displayMasked(key, mask)
|
||||
val resolvedConfig = Resolve.resolveConfig(structure.extra, key.key, mask)(key.scope).config
|
||||
|
||||
lazy val structureDefinedKey: Gen[StructureKeyMask] = structureKeyMask { s =>
|
||||
for( scope <- TestBuild.scope(s.env); key <- oneOf(s.allAttributeKeys.toSeq)) yield ScopedKey(scope, key)
|
||||
}
|
||||
def structureKeyMask(genKey: Structure => Gen[ScopedKey[_]])(implicit maskGen: Gen[ScopeMask], structureGen: Gen[Structure]): Gen[StructureKeyMask] =
|
||||
for(mask <- maskGen; structure <- structureGen; key <- genKey(structure)) yield
|
||||
new StructureKeyMask(structure, key, mask)
|
||||
final class StructureKeyMask(val structure: Structure, val key: ScopedKey[_], val mask: ScopeMask)
|
||||
|
||||
def resolve(structure: Structure, key: ScopedKey[_], mask: ScopeMask): ScopedKey[_] =
|
||||
ScopedKey(Resolve(structure.extra, Select(structure.current), key.key, mask)(key.scope), key.key)
|
||||
("Key: " + displayFull(key)) |:
|
||||
("Mask: " + mask) |:
|
||||
("Expected configuration: " + resolvedConfig.map(_.name)) |:
|
||||
parse(structure, string) {
|
||||
case Right(sk) => sk.scope.config == resolvedConfig
|
||||
case Left(err) => false
|
||||
}
|
||||
}
|
||||
|
||||
def parseExpected(structure: Structure, s: String, expected: ScopedKey[_], mask: ScopeMask): Prop =
|
||||
("Expected: " + displayFull(expected)) |:
|
||||
("Mask: " + mask) |:
|
||||
parse(structure, s) {
|
||||
case Left(err) => false
|
||||
case Right(sk) => Project.equal(sk, expected, mask)
|
||||
}
|
||||
lazy val structureDefinedKey: Gen[StructureKeyMask] = structureKeyMask { s =>
|
||||
for (scope <- TestBuild.scope(s.env); key <- oneOf(s.allAttributeKeys.toSeq)) yield ScopedKey(scope, key)
|
||||
}
|
||||
def structureKeyMask(genKey: Structure => Gen[ScopedKey[_]])(implicit maskGen: Gen[ScopeMask], structureGen: Gen[Structure]): Gen[StructureKeyMask] =
|
||||
for (mask <- maskGen; structure <- structureGen; key <- genKey(structure)) yield new StructureKeyMask(structure, key, mask)
|
||||
final class StructureKeyMask(val structure: Structure, val key: ScopedKey[_], val mask: ScopeMask)
|
||||
|
||||
def parse(structure: Structure, s: String)(f: Either[String,ScopedKey[_]] => Prop): Prop =
|
||||
{
|
||||
val parser = makeParser(structure)
|
||||
val parsed = DefaultParsers.result(parser, s).left.map(_().toString)
|
||||
val showParsed = parsed.right.map(displayFull)
|
||||
("Key string: '" + s + "'") |:
|
||||
("Parsed: " + showParsed) |:
|
||||
("Structure: " + structure) |:
|
||||
f(parsed)
|
||||
}
|
||||
def resolve(structure: Structure, key: ScopedKey[_], mask: ScopeMask): ScopedKey[_] =
|
||||
ScopedKey(Resolve(structure.extra, Select(structure.current), key.key, mask)(key.scope), key.key)
|
||||
|
||||
def genStructure(implicit genEnv: Gen[Env]): Gen[Structure] =
|
||||
structureGenF { (scopes: Seq[Scope], env: Env, current: ProjectRef) =>
|
||||
val settings = for(scope <- scopes; t <- env.tasks) yield Def.setting(ScopedKey(scope, t.key), Def.value(""))
|
||||
TestBuild.structure(env, settings, current)
|
||||
}
|
||||
def parseExpected(structure: Structure, s: String, expected: ScopedKey[_], mask: ScopeMask): Prop =
|
||||
("Expected: " + displayFull(expected)) |:
|
||||
("Mask: " + mask) |:
|
||||
parse(structure, s) {
|
||||
case Left(err) => false
|
||||
case Right(sk) => Project.equal(sk, expected, mask)
|
||||
}
|
||||
|
||||
def structureGenF(f: (Seq[Scope], Env, ProjectRef) => Structure)(implicit genEnv: Gen[Env]): Gen[Structure] =
|
||||
structureGen( (s,e,p) => Gen.value(f(s,e,p)))
|
||||
def structureGen(f: (Seq[Scope], Env, ProjectRef) => Gen[Structure])(implicit genEnv: Gen[Env]): Gen[Structure] =
|
||||
for {
|
||||
env <- genEnv
|
||||
loadFactor <- choose(0.0, 1.0)
|
||||
scopes <- pickN(loadFactor, env.allFullScopes)
|
||||
current <- oneOf(env.allProjects.unzip._1)
|
||||
structure <- f(scopes, env, current)
|
||||
} yield
|
||||
structure
|
||||
def parse(structure: Structure, s: String)(f: Either[String, ScopedKey[_]] => Prop): Prop =
|
||||
{
|
||||
val parser = makeParser(structure)
|
||||
val parsed = DefaultParsers.result(parser, s).left.map(_().toString)
|
||||
val showParsed = parsed.right.map(displayFull)
|
||||
("Key string: '" + s + "'") |:
|
||||
("Parsed: " + showParsed) |:
|
||||
("Structure: " + structure) |:
|
||||
f(parsed)
|
||||
}
|
||||
|
||||
def pickN[T](load: Double, from: Seq[T]): Gen[Seq[T]] =
|
||||
pick( (load*from.size).toInt, from )
|
||||
def genStructure(implicit genEnv: Gen[Env]): Gen[Structure] =
|
||||
structureGenF { (scopes: Seq[Scope], env: Env, current: ProjectRef) =>
|
||||
val settings = for (scope <- scopes; t <- env.tasks) yield Def.setting(ScopedKey(scope, t.key), Def.value(""))
|
||||
TestBuild.structure(env, settings, current)
|
||||
}
|
||||
|
||||
def structureGenF(f: (Seq[Scope], Env, ProjectRef) => Structure)(implicit genEnv: Gen[Env]): Gen[Structure] =
|
||||
structureGen((s, e, p) => Gen.value(f(s, e, p)))
|
||||
def structureGen(f: (Seq[Scope], Env, ProjectRef) => Gen[Structure])(implicit genEnv: Gen[Env]): Gen[Structure] =
|
||||
for {
|
||||
env <- genEnv
|
||||
loadFactor <- choose(0.0, 1.0)
|
||||
scopes <- pickN(loadFactor, env.allFullScopes)
|
||||
current <- oneOf(env.allProjects.unzip._1)
|
||||
structure <- f(scopes, env, current)
|
||||
} yield structure
|
||||
|
||||
def pickN[T](load: Double, from: Seq[T]): Gen[Seq[T]] =
|
||||
pick((load * from.size).toInt, from)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,67 +1,64 @@
|
|||
package sbt
|
||||
|
||||
import org.scalacheck._
|
||||
import Prop._
|
||||
import Project.project
|
||||
import java.io.File
|
||||
import org.scalacheck._
|
||||
import Prop._
|
||||
import Project.project
|
||||
import java.io.File
|
||||
|
||||
class ProjectDefs
|
||||
{
|
||||
lazy val p = project
|
||||
class ProjectDefs {
|
||||
lazy val p = project
|
||||
|
||||
val x = project
|
||||
val x = project
|
||||
|
||||
// should not compile
|
||||
// def y = project
|
||||
// should not compile
|
||||
// def y = project
|
||||
|
||||
val z = project in new File("dir")
|
||||
val z = project in new File("dir")
|
||||
|
||||
val a: Project = project
|
||||
val a: Project = project
|
||||
|
||||
lazy val aa: Project = project
|
||||
lazy val aa: Project = project
|
||||
}
|
||||
|
||||
object ProjectMacro extends Properties("ProjectMacro") {
|
||||
lazy val pd = new ProjectDefs
|
||||
import pd._
|
||||
|
||||
object ProjectMacro extends Properties("ProjectMacro")
|
||||
{
|
||||
lazy val pd = new ProjectDefs
|
||||
import pd._
|
||||
|
||||
def secure(f: => Prop): Prop = try {
|
||||
Prop.secure(f)
|
||||
} catch { case e: Throwable =>
|
||||
e.printStackTrace
|
||||
throw e
|
||||
}
|
||||
def secure(f: => Prop): Prop = try {
|
||||
Prop.secure(f)
|
||||
} catch {
|
||||
case e: Throwable =>
|
||||
e.printStackTrace
|
||||
throw e
|
||||
}
|
||||
|
||||
property("Explicit type on lazy val supported") = secure {
|
||||
check(aa, "aa", "aa")
|
||||
}
|
||||
property("Explicit type on lazy val supported") = secure {
|
||||
check(aa, "aa", "aa")
|
||||
}
|
||||
|
||||
property("Explicit type on val supported") = secure {
|
||||
check(a, "a", "a")
|
||||
}
|
||||
property("Explicit type on val supported") = secure {
|
||||
check(a, "a", "a")
|
||||
}
|
||||
|
||||
property("lazy vals supported") = secure {
|
||||
check(p, "p", "p")
|
||||
}
|
||||
|
||||
property("plain vals supported") = secure {
|
||||
check(x, "x", "x")
|
||||
}
|
||||
|
||||
property("Directory overridable") = secure {
|
||||
check(z, "z", "dir")
|
||||
}
|
||||
property("lazy vals supported") = secure {
|
||||
check(p, "p", "p")
|
||||
}
|
||||
|
||||
def check(p: Project, id: String, dir: String): Prop =
|
||||
{
|
||||
s"Expected id: $id" |:
|
||||
s"Expected dir: $dir" |:
|
||||
s"Actual id: ${p.id}" |:
|
||||
s"Actual dir: ${p.base}" |:
|
||||
(p.id == id) &&
|
||||
(p.base.getName == dir)
|
||||
}
|
||||
property("plain vals supported") = secure {
|
||||
check(x, "x", "x")
|
||||
}
|
||||
|
||||
property("Directory overridable") = secure {
|
||||
check(z, "z", "dir")
|
||||
}
|
||||
|
||||
def check(p: Project, id: String, dir: String): Prop =
|
||||
{
|
||||
s"Expected id: $id" |:
|
||||
s"Expected dir: $dir" |:
|
||||
s"Actual id: ${p.id}" |:
|
||||
s"Actual dir: ${p.base}" |:
|
||||
(p.id == id) &&
|
||||
(p.base.getName == dir)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -1,40 +1,39 @@
|
|||
package sbt
|
||||
|
||||
import org.scalacheck._
|
||||
import Gen.{listOf}
|
||||
import Gen.{ listOf }
|
||||
import Prop._
|
||||
import Tags._
|
||||
|
||||
object TagsTest extends Properties("Tags")
|
||||
{
|
||||
final case class Size(value: Int)
|
||||
object TagsTest extends Properties("Tags") {
|
||||
final case class Size(value: Int)
|
||||
|
||||
def tagMap: Gen[TagMap] = for(ts <- listOf(tagAndFrequency)) yield ts.toMap
|
||||
def tagAndFrequency: Gen[(Tag, Int)] = for(t <- tag; count <- Arbitrary.arbitrary[Int]) yield (t, count)
|
||||
def tag: Gen[Tag] = for(s <- Gen.alphaStr if !s.isEmpty) yield Tag(s)
|
||||
def size: Gen[Size] = for(i <- Arbitrary.arbitrary[Int] if i != Int.MinValue) yield Size(math.abs(i))
|
||||
def tagMap: Gen[TagMap] = for (ts <- listOf(tagAndFrequency)) yield ts.toMap
|
||||
def tagAndFrequency: Gen[(Tag, Int)] = for (t <- tag; count <- Arbitrary.arbitrary[Int]) yield (t, count)
|
||||
def tag: Gen[Tag] = for (s <- Gen.alphaStr if !s.isEmpty) yield Tag(s)
|
||||
def size: Gen[Size] = for (i <- Arbitrary.arbitrary[Int] if i != Int.MinValue) yield Size(math.abs(i))
|
||||
|
||||
implicit def aTagMap = Arbitrary(tagMap)
|
||||
implicit def aTagAndFrequency = Arbitrary(tagAndFrequency)
|
||||
implicit def aTag = Arbitrary(tag)
|
||||
implicit def aSize = Arbitrary(size)
|
||||
implicit def aTagMap = Arbitrary(tagMap)
|
||||
implicit def aTagAndFrequency = Arbitrary(tagAndFrequency)
|
||||
implicit def aTag = Arbitrary(tag)
|
||||
implicit def aSize = Arbitrary(size)
|
||||
|
||||
property("exclusive allows all groups without the exclusive tag") = forAll { (tm: TagMap, tag: Tag) =>
|
||||
excl(tag)(tm - tag)
|
||||
}
|
||||
property("exclusive allows all groups without the exclusive tag") = forAll { (tm: TagMap, tag: Tag) =>
|
||||
excl(tag)(tm - tag)
|
||||
}
|
||||
|
||||
property("exclusive only allows a group with an excusive tag when the size is one") = forAll { (tm: TagMap, size: Size, etag: Tag) =>
|
||||
val absSize = size.value
|
||||
val tm2: TagMap = tm.updated(etag, absSize).updated(Tags.All, tm.getOrElse(Tags.All, 0) + absSize)
|
||||
(s"TagMap: $tm2") |:
|
||||
( excl(etag)(tm2) == (absSize <= 1) )
|
||||
}
|
||||
property("exclusive only allows a group with an excusive tag when the size is one") = forAll { (tm: TagMap, size: Size, etag: Tag) =>
|
||||
val absSize = size.value
|
||||
val tm2: TagMap = tm.updated(etag, absSize).updated(Tags.All, tm.getOrElse(Tags.All, 0) + absSize)
|
||||
(s"TagMap: $tm2") |:
|
||||
(excl(etag)(tm2) == (absSize <= 1))
|
||||
}
|
||||
|
||||
property("exclusive always allows a group of size one") = forAll { (etag: Tag, mapTag: Tag) =>
|
||||
val tm: TagMap = Map(mapTag -> 1, Tags.All -> 1)
|
||||
excl(etag)(tm)
|
||||
}
|
||||
property("exclusive always allows a group of size one") = forAll { (etag: Tag, mapTag: Tag) =>
|
||||
val tm: TagMap = Map(mapTag -> 1, Tags.All -> 1)
|
||||
excl(etag)(tm)
|
||||
}
|
||||
|
||||
private[this] def excl(tag: Tag): TagMap => Boolean = predicate(exclusive(tag) :: Nil)
|
||||
private[this] def excl(tag: Tag): TagMap => Boolean = predicate(exclusive(tag) :: Nil)
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,269 +1,252 @@
|
|||
package sbt
|
||||
|
||||
import Def.{ScopedKey, Setting}
|
||||
import Types.{const,idFun,some}
|
||||
import complete.Parser
|
||||
import Def.{ ScopedKey, Setting }
|
||||
import Types.{ const, idFun, some }
|
||||
import complete.Parser
|
||||
|
||||
import java.io.File
|
||||
import java.net.URI
|
||||
import org.scalacheck._
|
||||
import Prop._
|
||||
import Gen._
|
||||
import Arbitrary.arbBool
|
||||
import java.io.File
|
||||
import java.net.URI
|
||||
import org.scalacheck._
|
||||
import Prop._
|
||||
import Gen._
|
||||
import Arbitrary.arbBool
|
||||
|
||||
// Notes:
|
||||
// Generator doesn't produce cross-build project dependencies or do anything with the 'extra' axis
|
||||
object TestBuild
|
||||
{
|
||||
val MaxTasks = 6
|
||||
val MaxProjects = 7
|
||||
val MaxConfigs = 5
|
||||
val MaxBuilds = 4
|
||||
val MaxIDSize = 8
|
||||
val MaxDeps = 8
|
||||
val KeysPerEnv = 10
|
||||
object TestBuild {
|
||||
val MaxTasks = 6
|
||||
val MaxProjects = 7
|
||||
val MaxConfigs = 5
|
||||
val MaxBuilds = 4
|
||||
val MaxIDSize = 8
|
||||
val MaxDeps = 8
|
||||
val KeysPerEnv = 10
|
||||
|
||||
val MaxTasksGen = chooseShrinkable(1, MaxTasks)
|
||||
val MaxProjectsGen = chooseShrinkable(1, MaxProjects)
|
||||
val MaxConfigsGen = chooseShrinkable(1, MaxConfigs)
|
||||
val MaxBuildsGen = chooseShrinkable(1, MaxBuilds)
|
||||
val MaxDepsGen = chooseShrinkable(0, MaxDeps)
|
||||
val MaxTasksGen = chooseShrinkable(1, MaxTasks)
|
||||
val MaxProjectsGen = chooseShrinkable(1, MaxProjects)
|
||||
val MaxConfigsGen = chooseShrinkable(1, MaxConfigs)
|
||||
val MaxBuildsGen = chooseShrinkable(1, MaxBuilds)
|
||||
val MaxDepsGen = chooseShrinkable(0, MaxDeps)
|
||||
|
||||
def chooseShrinkable(min: Int, max: Int): Gen[Int] =
|
||||
sized( sz => choose(min, (max min sz) max 1) )
|
||||
def chooseShrinkable(min: Int, max: Int): Gen[Int] =
|
||||
sized(sz => choose(min, (max min sz) max 1))
|
||||
|
||||
implicit val cGen = Arbitrary { genConfigs(idGen, MaxDepsGen, MaxConfigsGen) }
|
||||
implicit val tGen = Arbitrary { genTasks(idGen, MaxDepsGen, MaxTasksGen) }
|
||||
implicit val cGen = Arbitrary { genConfigs(idGen, MaxDepsGen, MaxConfigsGen) }
|
||||
implicit val tGen = Arbitrary { genTasks(idGen, MaxDepsGen, MaxTasksGen) }
|
||||
|
||||
final class Keys(val env: Env, val scopes: Seq[Scope]) {
|
||||
override def toString = env + "\n" + scopes.mkString("Scopes:\n\t", "\n\t", "")
|
||||
lazy val delegated = scopes map env.delegates
|
||||
}
|
||||
|
||||
final class Keys(val env: Env, val scopes: Seq[Scope])
|
||||
{
|
||||
override def toString = env + "\n" + scopes.mkString("Scopes:\n\t", "\n\t", "")
|
||||
lazy val delegated = scopes map env.delegates
|
||||
}
|
||||
final case class Structure(env: Env, current: ProjectRef, data: Settings[Scope], keyIndex: KeyIndex, keyMap: Map[String, AttributeKey[_]]) {
|
||||
override def toString = env.toString + "\n" + "current: " + current + "\nSettings:\n\t" + showData + keyMap.keys.mkString("All keys:\n\t", ", ", "")
|
||||
def showKeys(map: AttributeMap): String = map.keys.mkString("\n\t ", ",", "\n")
|
||||
def showData: String =
|
||||
{
|
||||
val scopeStrings =
|
||||
for ((scope, map) <- data.data) yield (Scope.display(scope, "<key>"), showKeys(map))
|
||||
scopeStrings.toSeq.sorted.map(t => t._1 + t._2).mkString("\n\t")
|
||||
}
|
||||
val extra: BuildUtil[Proj] =
|
||||
{
|
||||
val getp = (build: URI, project: String) => env.buildMap(build).projectMap(project)
|
||||
new BuildUtil(keyIndex, data, env.root.uri, env.rootProject, getp, _.configurations.map(c => ConfigKey(c.name)), Relation.empty)
|
||||
}
|
||||
|
||||
final case class Structure(env: Env, current: ProjectRef, data: Settings[Scope], keyIndex: KeyIndex, keyMap: Map[String, AttributeKey[_]])
|
||||
{
|
||||
override def toString = env.toString + "\n" + "current: " + current + "\nSettings:\n\t" + showData + keyMap.keys.mkString("All keys:\n\t", ", ", "")
|
||||
def showKeys(map: AttributeMap): String = map.keys.mkString("\n\t ",",", "\n")
|
||||
def showData: String =
|
||||
{
|
||||
val scopeStrings =
|
||||
for( (scope, map) <- data.data ) yield
|
||||
(Scope.display(scope, "<key>"), showKeys(map))
|
||||
scopeStrings.toSeq.sorted.map(t => t._1 + t._2).mkString("\n\t")
|
||||
}
|
||||
val extra: BuildUtil[Proj] =
|
||||
{
|
||||
val getp = (build: URI, project: String) => env.buildMap(build).projectMap(project)
|
||||
new BuildUtil(keyIndex, data, env.root.uri, env.rootProject, getp, _.configurations.map(c => ConfigKey(c.name)), Relation.empty)
|
||||
}
|
||||
lazy val allAttributeKeys: Set[AttributeKey[_]] = data.data.values.flatMap(_.keys).toSet
|
||||
lazy val (taskAxes, globalTaskAxis, onlyTaskAxis, multiTaskAxis) =
|
||||
{
|
||||
import collection.{ breakOut, mutable }
|
||||
import mutable.HashSet
|
||||
|
||||
lazy val allAttributeKeys: Set[AttributeKey[_]] = data.data.values.flatMap(_.keys).toSet
|
||||
lazy val (taskAxes, globalTaskAxis, onlyTaskAxis, multiTaskAxis) =
|
||||
{
|
||||
import collection.{breakOut, mutable}
|
||||
import mutable.HashSet
|
||||
// task axis of Scope is set to Global and the value of the second map is the original task axis
|
||||
val taskAxesMappings =
|
||||
for ((scope, keys) <- data.data.toIterable; key <- keys.keys) yield (ScopedKey(scope.copy(task = Global), key), scope.task): (ScopedKey[_], ScopeAxis[AttributeKey[_]])
|
||||
|
||||
// task axis of Scope is set to Global and the value of the second map is the original task axis
|
||||
val taskAxesMappings =
|
||||
for( (scope, keys) <- data.data.toIterable; key <- keys.keys ) yield
|
||||
(ScopedKey(scope.copy(task = Global), key), scope.task) : (ScopedKey[_], ScopeAxis[AttributeKey[_]])
|
||||
val taskAxes = Relation.empty ++ taskAxesMappings
|
||||
val global = new HashSet[ScopedKey[_]]
|
||||
val single = new HashSet[ScopedKey[_]]
|
||||
val multi = new HashSet[ScopedKey[_]]
|
||||
for ((skey, tasks) <- taskAxes.forwardMap) {
|
||||
def makeKey(task: ScopeAxis[AttributeKey[_]]) = ScopedKey(skey.scope.copy(task = task), skey.key)
|
||||
val hasGlobal = tasks(Global)
|
||||
if (hasGlobal)
|
||||
global += skey
|
||||
else {
|
||||
val keys = tasks map makeKey
|
||||
if (keys.size == 1)
|
||||
single ++= keys
|
||||
else if (keys.size > 1)
|
||||
multi ++= keys
|
||||
}
|
||||
}
|
||||
(taskAxes, global.toSet, single.toSet, multi.toSet)
|
||||
}
|
||||
}
|
||||
final class Env(val builds: Seq[Build], val tasks: Seq[Taskk]) {
|
||||
override def toString = "Env:\n " + " Tasks:\n " + tasks.mkString("\n ") + "\n" + builds.mkString("\n ")
|
||||
val root = builds.head
|
||||
val buildMap = mapBy(builds)(_.uri)
|
||||
val taskMap = mapBy(tasks)(getKey)
|
||||
def project(ref: ProjectRef) = buildMap(ref.build).projectMap(ref.project)
|
||||
def projectFor(ref: ResolvedReference) = ref match { case pr: ProjectRef => project(pr); case BuildRef(uri) => buildMap(uri).root }
|
||||
|
||||
val taskAxes = Relation.empty ++ taskAxesMappings
|
||||
val global = new HashSet[ScopedKey[_]]
|
||||
val single = new HashSet[ScopedKey[_]]
|
||||
val multi = new HashSet[ScopedKey[_]]
|
||||
for( (skey, tasks) <- taskAxes.forwardMap)
|
||||
{
|
||||
def makeKey(task: ScopeAxis[AttributeKey[_]]) = ScopedKey(skey.scope.copy(task = task), skey.key)
|
||||
val hasGlobal = tasks(Global)
|
||||
if(hasGlobal)
|
||||
global += skey
|
||||
else
|
||||
{
|
||||
val keys = tasks map makeKey
|
||||
if( keys.size == 1)
|
||||
single ++= keys
|
||||
else if(keys.size > 1)
|
||||
multi ++= keys
|
||||
}
|
||||
}
|
||||
(taskAxes, global.toSet, single.toSet, multi.toSet)
|
||||
}
|
||||
}
|
||||
final class Env(val builds: Seq[Build], val tasks: Seq[Taskk])
|
||||
{
|
||||
override def toString = "Env:\n "+ " Tasks:\n " + tasks.mkString("\n ") +"\n" + builds.mkString("\n ")
|
||||
val root = builds.head
|
||||
val buildMap = mapBy(builds)(_.uri)
|
||||
val taskMap = mapBy(tasks)(getKey)
|
||||
def project(ref: ProjectRef) = buildMap(ref.build).projectMap(ref.project)
|
||||
def projectFor(ref: ResolvedReference) = ref match { case pr: ProjectRef => project(pr); case BuildRef(uri) => buildMap(uri).root }
|
||||
lazy val allProjects = builds.flatMap(_.allProjects)
|
||||
def rootProject(uri: URI): String = buildMap(uri).root.id
|
||||
def inheritConfig(ref: ResolvedReference, config: ConfigKey) = projectFor(ref).confMap(config.name).extended map toConfigKey
|
||||
def inheritTask(task: AttributeKey[_]) = taskMap.get(task) match { case None => Nil; case Some(t) => t.delegates map getKey }
|
||||
def inheritProject(ref: ProjectRef) = project(ref).delegates
|
||||
def resolve(ref: Reference) = Scope.resolveReference(builds.head.uri, rootProject, ref)
|
||||
lazy val delegates: Scope => Seq[Scope] =
|
||||
Scope.delegates(
|
||||
allProjects,
|
||||
(_: Proj).configurations.map(toConfigKey),
|
||||
resolve,
|
||||
uri => buildMap(uri).root.id,
|
||||
inheritProject,
|
||||
inheritConfig,
|
||||
inheritTask,
|
||||
(ref, mp) => Nil
|
||||
)
|
||||
lazy val allFullScopes: Seq[Scope] =
|
||||
for {
|
||||
(ref, p) <- (Global, root.root) +: allProjects.map { case (ref, p) => (Select(ref), p) }
|
||||
t <- Global +: tasks.map(t => Select(t.key))
|
||||
c <- Global +: p.configurations.map(c => Select(ConfigKey(c.name)))
|
||||
} yield Scope(project = ref, config = c, task = t, extra = Global)
|
||||
}
|
||||
def getKey: Taskk => AttributeKey[_] = _.key
|
||||
def toConfigKey: Config => ConfigKey = c => ConfigKey(c.name)
|
||||
final class Build(val uri: URI, val projects: Seq[Proj]) {
|
||||
override def toString = "Build " + uri.toString + " :\n " + projects.mkString("\n ")
|
||||
val allProjects = projects map { p => (ProjectRef(uri, p.id), p) }
|
||||
val root = projects.head
|
||||
val projectMap = mapBy(projects)(_.id)
|
||||
}
|
||||
final class Proj(val id: String, val delegates: Seq[ProjectRef], val configurations: Seq[Config]) {
|
||||
override def toString = "Project " + id + "\n Delegates:\n " + delegates.mkString("\n ") +
|
||||
"\n Configurations:\n " + configurations.mkString("\n ")
|
||||
val confMap = mapBy(configurations)(_.name)
|
||||
}
|
||||
|
||||
lazy val allProjects = builds.flatMap(_.allProjects)
|
||||
def rootProject(uri: URI): String = buildMap(uri).root.id
|
||||
def inheritConfig(ref: ResolvedReference, config: ConfigKey) = projectFor(ref).confMap(config.name).extended map toConfigKey
|
||||
def inheritTask(task: AttributeKey[_]) = taskMap.get(task) match { case None => Nil; case Some(t) => t.delegates map getKey }
|
||||
def inheritProject(ref: ProjectRef) = project(ref).delegates
|
||||
def resolve(ref: Reference) = Scope.resolveReference(builds.head.uri, rootProject, ref)
|
||||
lazy val delegates: Scope => Seq[Scope] =
|
||||
Scope.delegates(
|
||||
allProjects,
|
||||
(_: Proj).configurations.map(toConfigKey),
|
||||
resolve,
|
||||
uri => buildMap(uri).root.id,
|
||||
inheritProject,
|
||||
inheritConfig,
|
||||
inheritTask,
|
||||
(ref, mp) => Nil
|
||||
)
|
||||
lazy val allFullScopes: Seq[Scope] =
|
||||
for {
|
||||
(ref, p) <- (Global, root.root) +: allProjects.map { case (ref, p) => (Select(ref), p) }
|
||||
t <- Global +: tasks.map(t => Select(t.key))
|
||||
c <- Global +: p.configurations.map(c => Select(ConfigKey(c.name)))
|
||||
} yield
|
||||
Scope(project = ref, config = c, task = t, extra = Global)
|
||||
}
|
||||
def getKey: Taskk => AttributeKey[_] = _.key
|
||||
def toConfigKey: Config => ConfigKey = c => ConfigKey(c.name)
|
||||
final class Build(val uri: URI, val projects: Seq[Proj])
|
||||
{
|
||||
override def toString = "Build " + uri.toString + " :\n " + projects.mkString("\n ")
|
||||
val allProjects = projects map { p => (ProjectRef(uri, p.id), p) }
|
||||
val root = projects.head
|
||||
val projectMap = mapBy(projects)(_.id)
|
||||
}
|
||||
final class Proj(val id: String, val delegates: Seq[ProjectRef], val configurations: Seq[Config])
|
||||
{
|
||||
override def toString = "Project " + id + "\n Delegates:\n " + delegates.mkString("\n ") +
|
||||
"\n Configurations:\n " + configurations.mkString("\n ")
|
||||
val confMap = mapBy(configurations)(_.name)
|
||||
}
|
||||
final class Config(val name: String, val extended: Seq[Config]) {
|
||||
override def toString = name + " (extends: " + extended.map(_.name).mkString(", ") + ")"
|
||||
}
|
||||
final class Taskk(val key: AttributeKey[String], val delegates: Seq[Taskk]) {
|
||||
override def toString = key.label + " (delegates: " + delegates.map(_.key.label).mkString(", ") + ")"
|
||||
}
|
||||
|
||||
final class Config(val name: String, val extended: Seq[Config])
|
||||
{
|
||||
override def toString = name + " (extends: " + extended.map(_.name).mkString(", ") + ")"
|
||||
}
|
||||
final class Taskk(val key: AttributeKey[String], val delegates: Seq[Taskk])
|
||||
{
|
||||
override def toString = key.label + " (delegates: " + delegates.map(_.key.label).mkString(", ") + ")"
|
||||
}
|
||||
def mapBy[K, T](s: Seq[T])(f: T => K): Map[K, T] = s map { t => (f(t), t) } toMap;
|
||||
|
||||
def mapBy[K, T](s: Seq[T])(f: T => K): Map[K, T] = s map { t => (f(t), t) } toMap;
|
||||
implicit lazy val arbKeys: Arbitrary[Keys] = Arbitrary(keysGen)
|
||||
lazy val keysGen: Gen[Keys] = for (env <- mkEnv; keyCount <- chooseShrinkable(1, KeysPerEnv); keys <- listOfN(keyCount, scope(env))) yield new Keys(env, keys)
|
||||
|
||||
implicit lazy val arbKeys: Arbitrary[Keys] = Arbitrary(keysGen)
|
||||
lazy val keysGen: Gen[Keys] = for(env <- mkEnv; keyCount <- chooseShrinkable(1, KeysPerEnv); keys <- listOfN(keyCount, scope(env)) ) yield new Keys(env, keys)
|
||||
def scope(env: Env): Gen[Scope] =
|
||||
for {
|
||||
build <- oneOf(env.builds)
|
||||
project <- oneOf(build.projects)
|
||||
cAxis <- oneOrGlobal(project.configurations map toConfigKey)
|
||||
tAxis <- oneOrGlobal(env.tasks map getKey)
|
||||
pAxis <- orGlobal(frequency((1, BuildRef(build.uri)), (3, ProjectRef(build.uri, project.id))))
|
||||
} yield Scope(pAxis, cAxis, tAxis, Global)
|
||||
|
||||
def scope(env: Env): Gen[Scope] =
|
||||
for {
|
||||
build <- oneOf(env.builds)
|
||||
project <- oneOf(build.projects)
|
||||
cAxis <- oneOrGlobal(project.configurations map toConfigKey)
|
||||
tAxis <- oneOrGlobal( env.tasks map getKey )
|
||||
pAxis <- orGlobal( frequency( (1, BuildRef(build.uri)), (3, ProjectRef(build.uri, project.id) ) ) )
|
||||
} yield
|
||||
Scope( pAxis, cAxis, tAxis, Global)
|
||||
def orGlobal[T](gen: Gen[T]): Gen[ScopeAxis[T]] =
|
||||
frequency((1, gen map Select.apply), (1, Global))
|
||||
def oneOrGlobal[T](gen: Seq[T]): Gen[ScopeAxis[T]] = orGlobal(oneOf(gen))
|
||||
|
||||
def orGlobal[T](gen: Gen[T]): Gen[ScopeAxis[T]] =
|
||||
frequency( (1, gen map Select.apply), (1, Global) )
|
||||
def oneOrGlobal[T](gen: Seq[T]): Gen[ScopeAxis[T]] = orGlobal(oneOf(gen))
|
||||
def makeParser(structure: Structure): Parser[ScopedKey[_]] =
|
||||
{
|
||||
import structure._
|
||||
def confs(uri: URI) = env.buildMap.get(uri).toList.flatMap { _.root.configurations.map(_.name) }
|
||||
val defaultConfs: Option[ResolvedReference] => Seq[String] = {
|
||||
case None => confs(env.root.uri)
|
||||
case Some(BuildRef(uri)) => confs(uri)
|
||||
case Some(ref: ProjectRef) => env.project(ref).configurations.map(_.name)
|
||||
}
|
||||
Act.scopedKey(keyIndex, current, defaultConfs, keyMap, data)
|
||||
}
|
||||
|
||||
def makeParser(structure: Structure): Parser[ScopedKey[_]] =
|
||||
{
|
||||
import structure._
|
||||
def confs(uri: URI) = env.buildMap.get(uri).toList.flatMap { _.root.configurations.map(_.name) }
|
||||
val defaultConfs: Option[ResolvedReference] => Seq[String] = {
|
||||
case None => confs(env.root.uri)
|
||||
case Some(BuildRef(uri)) => confs(uri)
|
||||
case Some(ref: ProjectRef) => env.project(ref).configurations.map(_.name)
|
||||
}
|
||||
Act.scopedKey(keyIndex, current, defaultConfs, keyMap, data)
|
||||
}
|
||||
def structure(env: Env, settings: Seq[Setting[_]], current: ProjectRef): Structure =
|
||||
{
|
||||
implicit val display = Def.showRelativeKey(current, env.allProjects.size > 1)
|
||||
val data = Def.make(settings)(env.delegates, const(Nil), display)
|
||||
val keys = data.allKeys((s, key) => ScopedKey(s, key))
|
||||
val keyMap = keys.map(k => (k.key.label, k.key)).toMap[String, AttributeKey[_]]
|
||||
val projectsMap = env.builds.map(b => (b.uri, b.projects.map(_.id).toSet)).toMap
|
||||
new Structure(env, current, data, KeyIndex(keys, projectsMap), keyMap)
|
||||
}
|
||||
|
||||
def structure(env: Env, settings: Seq[Setting[_]], current: ProjectRef): Structure =
|
||||
{
|
||||
implicit val display = Def.showRelativeKey(current, env.allProjects.size > 1)
|
||||
val data = Def.make(settings)(env.delegates, const(Nil), display)
|
||||
val keys = data.allKeys( (s, key) => ScopedKey(s, key))
|
||||
val keyMap = keys.map(k => (k.key.label, k.key)).toMap[String, AttributeKey[_]]
|
||||
val projectsMap = env.builds.map(b => (b.uri, b.projects.map(_.id).toSet)).toMap
|
||||
new Structure(env, current, data, KeyIndex(keys, projectsMap), keyMap)
|
||||
}
|
||||
implicit lazy val mkEnv: Gen[Env] =
|
||||
{
|
||||
implicit val cGen = genConfigs(idGen, MaxDepsGen, MaxConfigsGen)
|
||||
implicit val tGen = genTasks(idGen, MaxDepsGen, MaxTasksGen)
|
||||
implicit val pGen = (uri: URI) => genProjects(uri)(idGen, MaxDepsGen, MaxProjectsGen, cGen)
|
||||
envGen(buildGen(uriGen, pGen), tGen)
|
||||
}
|
||||
|
||||
implicit lazy val mkEnv: Gen[Env] =
|
||||
{
|
||||
implicit val cGen = genConfigs(idGen, MaxDepsGen, MaxConfigsGen)
|
||||
implicit val tGen = genTasks(idGen, MaxDepsGen, MaxTasksGen)
|
||||
implicit val pGen = (uri: URI) => genProjects(uri)(idGen, MaxDepsGen, MaxProjectsGen, cGen)
|
||||
envGen(buildGen(uriGen, pGen), tGen)
|
||||
}
|
||||
implicit def maskGen(implicit arbBoolean: Arbitrary[Boolean]): Gen[ScopeMask] =
|
||||
{
|
||||
val b = arbBoolean.arbitrary
|
||||
for (p <- b; c <- b; t <- b; x <- b) yield ScopeMask(project = p, config = c, task = t, extra = x)
|
||||
}
|
||||
|
||||
implicit def maskGen(implicit arbBoolean: Arbitrary[Boolean]): Gen[ScopeMask] =
|
||||
{
|
||||
val b = arbBoolean.arbitrary
|
||||
for(p <- b; c <- b; t <- b; x <- b) yield
|
||||
ScopeMask(project = p, config = c, task = t, extra = x)
|
||||
}
|
||||
implicit lazy val idGen: Gen[String] = for (size <- chooseShrinkable(1, MaxIDSize); cs <- listOfN(size, alphaChar)) yield cs.mkString
|
||||
implicit lazy val optIDGen: Gen[Option[String]] = frequency((1, idGen map some.fn), (1, None))
|
||||
implicit lazy val uriGen: Gen[URI] = for (sch <- idGen; ssp <- idGen; frag <- optIDGen) yield new URI(sch, ssp, frag.orNull)
|
||||
|
||||
implicit lazy val idGen: Gen[String] = for(size <- chooseShrinkable(1, MaxIDSize); cs <- listOfN(size, alphaChar)) yield cs.mkString
|
||||
implicit lazy val optIDGen: Gen[Option[String]] = frequency( (1, idGen map some.fn), (1, None) )
|
||||
implicit lazy val uriGen: Gen[URI] = for(sch <- idGen; ssp <- idGen; frag <- optIDGen) yield new URI(sch, ssp, frag.orNull)
|
||||
implicit def envGen(implicit bGen: Gen[Build], tasks: Gen[Seq[Taskk]]): Gen[Env] =
|
||||
for (i <- MaxBuildsGen; bs <- listOfN(i, bGen); ts <- tasks) yield new Env(bs, ts)
|
||||
implicit def buildGen(implicit uGen: Gen[URI], pGen: URI => Gen[Seq[Proj]]): Gen[Build] = for (u <- uGen; ps <- pGen(u)) yield new Build(u, ps)
|
||||
|
||||
implicit def envGen(implicit bGen: Gen[Build], tasks: Gen[Seq[Taskk]]): Gen[Env] =
|
||||
for(i <- MaxBuildsGen; bs <- listOfN(i, bGen); ts <- tasks) yield new Env(bs, ts)
|
||||
implicit def buildGen(implicit uGen: Gen[URI], pGen: URI => Gen[Seq[Proj]]): Gen[Build] = for(u <- uGen; ps <- pGen(u)) yield new Build(u, ps)
|
||||
def nGen[T](igen: Gen[Int])(implicit g: Gen[T]): Gen[List[T]] = igen flatMap { ig => listOfN(ig, g) }
|
||||
|
||||
def nGen[T](igen: Gen[Int])(implicit g: Gen[T]): Gen[List[T]] = igen flatMap { ig => listOfN(ig, g) }
|
||||
implicit def genProjects(build: URI)(implicit genID: Gen[String], maxDeps: Gen[Int], count: Gen[Int], confs: Gen[Seq[Config]]): Gen[Seq[Proj]] =
|
||||
genAcyclic(maxDeps, genID, count) { (id: String) =>
|
||||
for (cs <- confs) yield { (deps: Seq[Proj]) =>
|
||||
new Proj(id, deps.map { dep => ProjectRef(build, dep.id) }, cs)
|
||||
}
|
||||
}
|
||||
def genConfigs(implicit genName: Gen[String], maxDeps: Gen[Int], count: Gen[Int]): Gen[Seq[Config]] =
|
||||
genAcyclicDirect[Config, String](maxDeps, genName, count)((key, deps) => new Config(key, deps))
|
||||
def genTasks(implicit genName: Gen[String], maxDeps: Gen[Int], count: Gen[Int]): Gen[Seq[Taskk]] =
|
||||
genAcyclicDirect[Taskk, String](maxDeps, genName, count)((key, deps) => new Taskk(AttributeKey[String](key), deps))
|
||||
|
||||
implicit def genProjects(build: URI)(implicit genID: Gen[String], maxDeps: Gen[Int], count: Gen[Int], confs: Gen[Seq[Config]]): Gen[Seq[Proj]] =
|
||||
genAcyclic(maxDeps, genID, count) { (id: String) =>
|
||||
for(cs <- confs) yield { (deps: Seq[Proj]) =>
|
||||
new Proj(id, deps.map{dep => ProjectRef(build, dep.id) }, cs)
|
||||
}
|
||||
}
|
||||
def genConfigs(implicit genName: Gen[String], maxDeps: Gen[Int], count: Gen[Int]): Gen[Seq[Config]] =
|
||||
genAcyclicDirect[Config,String](maxDeps, genName, count)( (key, deps) => new Config(key, deps) )
|
||||
def genTasks(implicit genName: Gen[String], maxDeps: Gen[Int], count: Gen[Int]): Gen[Seq[Taskk]] =
|
||||
genAcyclicDirect[Taskk,String](maxDeps, genName, count)( (key, deps) => new Taskk(AttributeKey[String](key), deps) )
|
||||
def genAcyclicDirect[A, T](maxDeps: Gen[Int], keyGen: Gen[T], max: Gen[Int])(make: (T, Seq[A]) => A): Gen[Seq[A]] =
|
||||
genAcyclic[A, T](maxDeps, keyGen, max) { t =>
|
||||
Gen.value { deps =>
|
||||
make(t, deps)
|
||||
}
|
||||
}
|
||||
|
||||
def genAcyclicDirect[A,T](maxDeps: Gen[Int], keyGen: Gen[T], max: Gen[Int])(make: (T, Seq[A]) => A): Gen[Seq[ A ]] =
|
||||
genAcyclic[A,T](maxDeps, keyGen, max) { t =>
|
||||
Gen.value { deps =>
|
||||
make(t, deps)
|
||||
}
|
||||
}
|
||||
def genAcyclic[A, T](maxDeps: Gen[Int], keyGen: Gen[T], max: Gen[Int])(make: T => Gen[Seq[A] => A]): Gen[Seq[A]] =
|
||||
max flatMap { count =>
|
||||
listOfN(count, keyGen) flatMap { keys =>
|
||||
genAcyclic(maxDeps, keys.distinct)(make)
|
||||
}
|
||||
}
|
||||
def genAcyclic[A, T](maxDeps: Gen[Int], keys: List[T])(make: T => Gen[Seq[A] => A]): Gen[Seq[A]] =
|
||||
genAcyclic(maxDeps, keys, Nil) flatMap { pairs =>
|
||||
sequence(pairs.map { case (key, deps) => mapMake(key, deps, make) }) flatMap { inputs =>
|
||||
val made = new collection.mutable.HashMap[T, A]
|
||||
for ((key, deps, mk) <- inputs)
|
||||
made(key) = mk(deps map made)
|
||||
keys map made
|
||||
}
|
||||
}
|
||||
|
||||
def genAcyclic[A,T](maxDeps: Gen[Int], keyGen: Gen[T], max: Gen[Int])(make: T => Gen[Seq[A] => A]): Gen[Seq[ A ]] =
|
||||
max flatMap { count =>
|
||||
listOfN(count, keyGen) flatMap { keys =>
|
||||
genAcyclic(maxDeps, keys.distinct)(make)
|
||||
}
|
||||
}
|
||||
def genAcyclic[A,T](maxDeps: Gen[Int], keys: List[T])(make: T => Gen[Seq[A] => A]): Gen[Seq[ A ]] =
|
||||
genAcyclic(maxDeps, keys, Nil) flatMap { pairs =>
|
||||
sequence( pairs.map { case (key, deps) => mapMake(key, deps, make) } ) flatMap { inputs =>
|
||||
val made = new collection.mutable.HashMap[T, A]
|
||||
for( (key, deps, mk) <- inputs)
|
||||
made(key) = mk(deps map made)
|
||||
keys map made
|
||||
}
|
||||
}
|
||||
def mapMake[A, T](key: T, deps: Seq[T], make: T => Gen[Seq[A] => A]): Gen[Inputs[A, T]] =
|
||||
make(key) map { (mk: Seq[A] => A) => (key, deps, mk) }
|
||||
|
||||
def mapMake[A,T](key: T, deps: Seq[T], make: T => Gen[Seq[A] => A]): Gen[Inputs[A,T]] =
|
||||
make(key) map { (mk: Seq[A] => A) => (key, deps, mk) }
|
||||
|
||||
def genAcyclic[T](maxDeps: Gen[Int], names: List[T], acc: List[Gen[ (T,Seq[T]) ]]): Gen[Seq[ (T,Seq[T]) ]] =
|
||||
names match
|
||||
{
|
||||
case Nil => sequence(acc)
|
||||
case x :: xs =>
|
||||
val next = for(depCount <- maxDeps; d <- pick(depCount min xs.size, xs) ) yield (x, d.toList)
|
||||
genAcyclic(maxDeps, xs, next :: acc)
|
||||
}
|
||||
def sequence[T](gs: Seq[Gen[T]]): Gen[Seq[T]] = Gen.parameterized { prms =>
|
||||
wrap( gs map { g => g(prms) getOrElse error("failed generator") } )
|
||||
}
|
||||
type Inputs[A,T] = (T, Seq[T], Seq[A] => A)
|
||||
def genAcyclic[T](maxDeps: Gen[Int], names: List[T], acc: List[Gen[(T, Seq[T])]]): Gen[Seq[(T, Seq[T])]] =
|
||||
names match {
|
||||
case Nil => sequence(acc)
|
||||
case x :: xs =>
|
||||
val next = for (depCount <- maxDeps; d <- pick(depCount min xs.size, xs)) yield (x, d.toList)
|
||||
genAcyclic(maxDeps, xs, next :: acc)
|
||||
}
|
||||
def sequence[T](gs: Seq[Gen[T]]): Gen[Seq[T]] = Gen.parameterized { prms =>
|
||||
wrap(gs map { g => g(prms) getOrElse error("failed generator") })
|
||||
}
|
||||
type Inputs[A, T] = (T, Seq[T], Seq[A] => A)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -152,6 +152,7 @@ object Sbt extends Build {
|
|||
)
|
||||
|
||||
private def doScripted(launcher: File, scriptedSbtClasspath: Seq[Attributed[File]], scriptedSbtInstance: ScalaInstance, sourcePath: File, args: Seq[String]) {
|
||||
System.err.println(s"About to run tests: ${args.mkString("\n * ", "\n * ", "\n")}")
|
||||
val noJLine = new classpath.FilteredLoader(scriptedSbtInstance.loader, "jline." :: Nil)
|
||||
val loader = classpath.ClasspathUtilities.toLoader(scriptedSbtClasspath.files, noJLine)
|
||||
val m = ModuleUtilities.getObject("sbt.test.ScriptedTests", loader)
|
||||
|
|
@ -174,6 +175,8 @@ object Sbt extends Build {
|
|||
|
||||
import sbt.complete._
|
||||
import DefaultParsers._
|
||||
// Paging, 1-index based.
|
||||
case class ScriptedTestPage(page: Int, total: Int)
|
||||
def scriptedParser(scriptedBase: File): Parser[Seq[String]] =
|
||||
{
|
||||
val pairs = (scriptedBase * AllPassFilter * AllPassFilter * "test").get map { (f: File) =>
|
||||
|
|
@ -184,9 +187,36 @@ object Sbt extends Build {
|
|||
|
||||
val id = charClass(c => !c.isWhitespace && c != '/').+.string
|
||||
val groupP = token(id.examples(pairMap.keySet.toSet)) <~ token('/')
|
||||
def nameP(group: String) = token("*".id | id.examples(pairMap(group)))
|
||||
val testID = for (group <- groupP; name <- nameP(group)) yield (group, name)
|
||||
(token(Space) ~> matched(testID)).*
|
||||
|
||||
// A parser for page definitions
|
||||
val pageP: Parser[ScriptedTestPage] = ("*" ~ NatBasic ~ "of" ~ NatBasic) map {
|
||||
case _ ~ page ~ _ ~ total => ScriptedTestPage(page, total)
|
||||
}
|
||||
// Grabs the filenames from a given test group in the current page definition.
|
||||
def pagedFilenames(group: String, page: ScriptedTestPage): Seq[String] = {
|
||||
val files = pairMap(group).toSeq.sortBy(_.toLowerCase)
|
||||
val pageSize = files.size / page.total
|
||||
// The last page may loose some values, so we explicitly keep them
|
||||
val dropped = files.drop(pageSize * (page.page - 1))
|
||||
if (page.page == page.total) dropped
|
||||
else dropped.take(pageSize)
|
||||
}
|
||||
def nameP(group: String) = {
|
||||
token("*".id | id.examples(pairMap(group)))
|
||||
}
|
||||
val PagedIds: Parser[Seq[String]] =
|
||||
for {
|
||||
group <- groupP
|
||||
page <- pageP
|
||||
files = pagedFilenames(group, page)
|
||||
// TODO - Fail the parser if we don't have enough files for the given page size
|
||||
//if !files.isEmpty
|
||||
} yield files map (f => group + '/' + f)
|
||||
|
||||
val testID = (for (group <- groupP; name <- nameP(group)) yield (group, name))
|
||||
val testIdAsGroup = matched(testID) map (test => Seq(test))
|
||||
//(token(Space) ~> matched(testID)).*
|
||||
(token(Space) ~> (PagedIds | testIdAsGroup)).* map (_.flatten)
|
||||
}
|
||||
|
||||
lazy val scripted = InputKey[Unit]("scripted")
|
||||
|
|
|
|||
|
|
@ -1,55 +1,58 @@
|
|||
package sbt
|
||||
|
||||
import org.scalacheck._
|
||||
import Prop.{Exception => _, _}
|
||||
import Gen.{alphaNumChar,frequency,listOf1,oneOf}
|
||||
import Prop.{ Exception => _, _ }
|
||||
import Gen.{ alphaNumChar, frequency, listOf1, oneOf }
|
||||
import java.io.File
|
||||
|
||||
object ForkTest extends Properties("Fork")
|
||||
{
|
||||
/** Heuristic for limiting the length of the classpath string.
|
||||
* Longer than this will hit hard limits in the total space
|
||||
* allowed for process initialization, which includes environment variables, at least on linux. */
|
||||
final val MaximumClasspathLength = 100000
|
||||
object ForkTest extends Properties("Fork") {
|
||||
/**
|
||||
* Heuristic for limiting the length of the classpath string.
|
||||
* Longer than this will hit hard limits in the total space
|
||||
* allowed for process initialization, which includes environment variables, at least on linux.
|
||||
*/
|
||||
final val MaximumClasspathLength = 100000
|
||||
|
||||
lazy val genOptionName = frequency( ( 9, Some("-cp")), (9, Some("-classpath")), (1, None))
|
||||
lazy val pathElement = listOf1(alphaNumChar).map(_.mkString)
|
||||
lazy val path = listOf1(pathElement).map(_.mkString(File.separator))
|
||||
lazy val genRelClasspath = listOf1(path)
|
||||
lazy val genOptionName = frequency((9, Some("-cp")), (9, Some("-classpath")), (1, None))
|
||||
lazy val pathElement = listOf1(alphaNumChar).map(_.mkString)
|
||||
lazy val path = listOf1(pathElement).map(_.mkString(File.separator))
|
||||
lazy val genRelClasspath = listOf1(path)
|
||||
|
||||
lazy val requiredEntries =
|
||||
IO.classLocationFile[scala.Option[_]] ::
|
||||
IO.classLocationFile[sbt.exit.type] ::
|
||||
Nil
|
||||
lazy val mainAndArgs =
|
||||
"sbt.exit" ::
|
||||
"0" ::
|
||||
Nil
|
||||
lazy val requiredEntries =
|
||||
IO.classLocationFile[scala.Option[_]] ::
|
||||
IO.classLocationFile[sbt.exit.type] ::
|
||||
Nil
|
||||
lazy val mainAndArgs =
|
||||
"sbt.exit" ::
|
||||
"0" ::
|
||||
Nil
|
||||
|
||||
property("Arbitrary length classpath successfully passed.") = forAllNoShrink(genOptionName, genRelClasspath) { (optionName: Option[String], relCP: List[String]) =>
|
||||
IO.withTemporaryDirectory { dir => TestLogger { log =>
|
||||
val withScala = requiredEntries ::: relCP.map(rel => new File(dir, rel))
|
||||
val absClasspath = trimClasspath(Path.makeString(withScala))
|
||||
val args = optionName.map(_ :: absClasspath :: Nil).toList.flatten ++ mainAndArgs
|
||||
val config = ForkOptions(outputStrategy = Some(LoggedOutput(log)))
|
||||
val exitCode = try Fork.java(config, args) catch { case e: Exception => e.printStackTrace; 1 }
|
||||
val expectedCode = if(optionName.isEmpty) 1 else 0
|
||||
s"temporary directory: ${dir.getAbsolutePath}" |:
|
||||
s"required classpath: ${requiredEntries.mkString("\n\t", "\n\t", "")}" |:
|
||||
s"main and args: ${mainAndArgs.mkString(" ")}" |:
|
||||
s"args length: ${args.mkString(" ").length}" |:
|
||||
s"exitCode: $exitCode, expected: $expectedCode" |:
|
||||
(exitCode == expectedCode)
|
||||
}}
|
||||
}
|
||||
property("Arbitrary length classpath successfully passed.") = forAllNoShrink(genOptionName, genRelClasspath) { (optionName: Option[String], relCP: List[String]) =>
|
||||
IO.withTemporaryDirectory { dir =>
|
||||
TestLogger { log =>
|
||||
val withScala = requiredEntries ::: relCP.map(rel => new File(dir, rel))
|
||||
val absClasspath = trimClasspath(Path.makeString(withScala))
|
||||
val args = optionName.map(_ :: absClasspath :: Nil).toList.flatten ++ mainAndArgs
|
||||
val config = ForkOptions(outputStrategy = Some(LoggedOutput(log)))
|
||||
val exitCode = try Fork.java(config, args) catch { case e: Exception => e.printStackTrace; 1 }
|
||||
val expectedCode = if (optionName.isEmpty) 1 else 0
|
||||
s"temporary directory: ${dir.getAbsolutePath}" |:
|
||||
s"required classpath: ${requiredEntries.mkString("\n\t", "\n\t", "")}" |:
|
||||
s"main and args: ${mainAndArgs.mkString(" ")}" |:
|
||||
s"args length: ${args.mkString(" ").length}" |:
|
||||
s"exitCode: $exitCode, expected: $expectedCode" |:
|
||||
(exitCode == expectedCode)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private[this] def trimClasspath(cp: String): String =
|
||||
if(cp.length > MaximumClasspathLength) {
|
||||
val lastEntryI = cp.lastIndexOf(File.pathSeparatorChar, MaximumClasspathLength)
|
||||
if(lastEntryI > 0)
|
||||
cp.substring(0, lastEntryI)
|
||||
else
|
||||
cp
|
||||
} else
|
||||
cp
|
||||
private[this] def trimClasspath(cp: String): String =
|
||||
if (cp.length > MaximumClasspathLength) {
|
||||
val lastEntryI = cp.lastIndexOf(File.pathSeparatorChar, MaximumClasspathLength)
|
||||
if (lastEntryI > 0)
|
||||
cp.substring(0, lastEntryI)
|
||||
else
|
||||
cp
|
||||
} else
|
||||
cp
|
||||
}
|
||||
|
|
@ -8,43 +8,42 @@ import Prop._
|
|||
import TaskGen._
|
||||
import Task._
|
||||
|
||||
object ExecuteSpec extends Properties("Execute")
|
||||
{
|
||||
val iGen = Arbitrary.arbInt.arbitrary
|
||||
property("evaluates simple task") = forAll(iGen, MaxWorkersGen) { (i: Int, workers: Int) =>
|
||||
("Workers: " + workers) |:
|
||||
checkResult(tryRun(task(i), false, workers), i)
|
||||
}
|
||||
// no direct dependencies currently
|
||||
/*property("evaluates simple static graph") = forAll(iGen, MaxWorkersGen) { (i: Int, workers: Int) =>
|
||||
object ExecuteSpec extends Properties("Execute") {
|
||||
val iGen = Arbitrary.arbInt.arbitrary
|
||||
property("evaluates simple task") = forAll(iGen, MaxWorkersGen) { (i: Int, workers: Int) =>
|
||||
("Workers: " + workers) |:
|
||||
checkResult(tryRun(task(i), false, workers), i)
|
||||
}
|
||||
// no direct dependencies currently
|
||||
/*property("evaluates simple static graph") = forAll(iGen, MaxWorkersGen) { (i: Int, workers: Int) =>
|
||||
("Workers: " + workers) |:
|
||||
{
|
||||
def result = tryRun(Task(i) dependsOn(task(false),task("a")), false, workers)
|
||||
checkResult(result, i)
|
||||
}
|
||||
}*/
|
||||
|
||||
property("evaluates simple mapped task") = forAll(iGen, MaxTasksGen, MaxWorkersGen) { (i: Int, times: Int, workers: Int) =>
|
||||
("Workers: " + workers) |: ("Value: " + i) |: ("Times: " + times) |:
|
||||
{
|
||||
def result = tryRun(task(i).map(_*times), false, workers)
|
||||
checkResult(result, i*times)
|
||||
}
|
||||
}
|
||||
property("evaluates chained mapped task") = forAllNoShrink(iGen, MaxTasksGen, MaxWorkersGen) { (i: Int, times: Int, workers: Int) =>
|
||||
("Workers: " + workers) |: ("Value: " + i) |: ("Times: " + times) |:
|
||||
{
|
||||
val initial = task(0) map(identity[Int])
|
||||
def t = ( initial /: (0 until times) )( (t,ignore) => t.map(_ + i))
|
||||
checkResult(tryRun(t, false, workers), i*times)
|
||||
}
|
||||
}
|
||||
|
||||
property("evaluates simple bind") = forAll(iGen, MaxTasksGen, MaxWorkersGen) { (i: Int, times: Int, workers: Int) =>
|
||||
("Workers: " + workers) |: ("Value: " + i) |: ("Times: " + times) |:
|
||||
{
|
||||
def result = tryRun(task(i).flatMap(x => task(x*times)), false, workers)
|
||||
checkResult(result, i*times)
|
||||
}
|
||||
}
|
||||
property("evaluates simple mapped task") = forAll(iGen, MaxTasksGen, MaxWorkersGen) { (i: Int, times: Int, workers: Int) =>
|
||||
("Workers: " + workers) |: ("Value: " + i) |: ("Times: " + times) |:
|
||||
{
|
||||
def result = tryRun(task(i).map(_ * times), false, workers)
|
||||
checkResult(result, i * times)
|
||||
}
|
||||
}
|
||||
property("evaluates chained mapped task") = forAllNoShrink(iGen, MaxTasksGen, MaxWorkersGen) { (i: Int, times: Int, workers: Int) =>
|
||||
("Workers: " + workers) |: ("Value: " + i) |: ("Times: " + times) |:
|
||||
{
|
||||
val initial = task(0) map (identity[Int])
|
||||
def t = (initial /: (0 until times))((t, ignore) => t.map(_ + i))
|
||||
checkResult(tryRun(t, false, workers), i * times)
|
||||
}
|
||||
}
|
||||
|
||||
property("evaluates simple bind") = forAll(iGen, MaxTasksGen, MaxWorkersGen) { (i: Int, times: Int, workers: Int) =>
|
||||
("Workers: " + workers) |: ("Value: " + i) |: ("Times: " + times) |:
|
||||
{
|
||||
def result = tryRun(task(i).flatMap(x => task(x * times)), false, workers)
|
||||
checkResult(result, i * times)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -6,28 +6,27 @@ package sbt
|
|||
import org.scalacheck._
|
||||
import Gen.choose
|
||||
|
||||
object TaskGen extends std.TaskExtra
|
||||
{
|
||||
// upper bounds to make the tests finish in reasonable time
|
||||
val MaxTasks = 100
|
||||
val MaxWorkers = 29
|
||||
val MaxJoin = 20
|
||||
|
||||
val MaxTasksGen = choose(0, MaxTasks)
|
||||
val MaxWorkersGen = choose(1, MaxWorkers)
|
||||
val MaxJoinGen = choose(0, MaxJoin)
|
||||
val TaskListGen = MaxTasksGen.flatMap(size => Gen.listOfN(size, Arbitrary.arbInt.arbitrary))
|
||||
object TaskGen extends std.TaskExtra {
|
||||
// upper bounds to make the tests finish in reasonable time
|
||||
val MaxTasks = 100
|
||||
val MaxWorkers = 29
|
||||
val MaxJoin = 20
|
||||
|
||||
def run[T](root: Task[T], checkCycles: Boolean, maxWorkers: Int): Result[T] =
|
||||
{
|
||||
val (service, shutdown) = CompletionService[Task[_], Completed](maxWorkers)
|
||||
val dummies = std.Transform.DummyTaskMap(Nil)
|
||||
val x = new Execute[Task](Execute.config(checkCycles), Execute.noTriggers, ExecuteProgress.empty[Task])(std.Transform(dummies))
|
||||
try { x.run(root)(service) } finally { shutdown() }
|
||||
}
|
||||
def tryRun[T](root: Task[T], checkCycles: Boolean, maxWorkers: Int): T =
|
||||
run(root, checkCycles, maxWorkers) match {
|
||||
case Value(v) => v
|
||||
case Inc(i) => throw i
|
||||
}
|
||||
val MaxTasksGen = choose(0, MaxTasks)
|
||||
val MaxWorkersGen = choose(1, MaxWorkers)
|
||||
val MaxJoinGen = choose(0, MaxJoin)
|
||||
val TaskListGen = MaxTasksGen.flatMap(size => Gen.listOfN(size, Arbitrary.arbInt.arbitrary))
|
||||
|
||||
def run[T](root: Task[T], checkCycles: Boolean, maxWorkers: Int): Result[T] =
|
||||
{
|
||||
val (service, shutdown) = CompletionService[Task[_], Completed](maxWorkers)
|
||||
val dummies = std.Transform.DummyTaskMap(Nil)
|
||||
val x = new Execute[Task](Execute.config(checkCycles), Execute.noTriggers, ExecuteProgress.empty[Task])(std.Transform(dummies))
|
||||
try { x.run(root)(service) } finally { shutdown() }
|
||||
}
|
||||
def tryRun[T](root: Task[T], checkCycles: Boolean, maxWorkers: Int): T =
|
||||
run(root, checkCycles, maxWorkers) match {
|
||||
case Value(v) => v
|
||||
case Inc(i) => throw i
|
||||
}
|
||||
}
|
||||
|
|
@ -5,35 +5,34 @@ import Prop._
|
|||
import TaskGen._
|
||||
import Task._
|
||||
|
||||
object TaskRunnerCircularTest extends Properties("TaskRunner Circular")
|
||||
{
|
||||
property("Catches circular references") = forAll(MaxTasksGen, MaxWorkersGen) { checkCircularReferences _ }
|
||||
property("Allows references to completed tasks") = forAllNoShrink(MaxTasksGen, MaxWorkersGen) { allowedReference _ }
|
||||
final def allowedReference(intermediate: Int, workers: Int) =
|
||||
{
|
||||
val top = task(intermediate).named("top")
|
||||
def iterate(tk: Task[Int]): Task[Int] =
|
||||
tk flatMap { t =>
|
||||
if(t <= 0)
|
||||
top
|
||||
else
|
||||
iterate(task(t-1).named((t-1).toString) )
|
||||
}
|
||||
try { checkResult(tryRun(iterate(top), true, workers), intermediate) }
|
||||
catch { case i: Incomplete if cyclic(i) => ("Unexpected cyclic exception: " + i) |: false }
|
||||
}
|
||||
final def checkCircularReferences(intermediate: Int, workers: Int) =
|
||||
{
|
||||
lazy val top = iterate(task(intermediate).named("bottom"), intermediate)
|
||||
def iterate(tk: Task[Int], i: Int): Task[Int] =
|
||||
tk flatMap { t =>
|
||||
if(t <= 0)
|
||||
top
|
||||
else
|
||||
iterate(task(t-1).named((t-1).toString), i-1)
|
||||
}
|
||||
try { tryRun(top, true, workers); false }
|
||||
catch { case i: Incomplete => cyclic(i) }
|
||||
}
|
||||
def cyclic(i: Incomplete) = Incomplete.allExceptions(i).exists(_.isInstanceOf[Execute[Task]#CyclicException[_]])
|
||||
object TaskRunnerCircularTest extends Properties("TaskRunner Circular") {
|
||||
property("Catches circular references") = forAll(MaxTasksGen, MaxWorkersGen) { checkCircularReferences _ }
|
||||
property("Allows references to completed tasks") = forAllNoShrink(MaxTasksGen, MaxWorkersGen) { allowedReference _ }
|
||||
final def allowedReference(intermediate: Int, workers: Int) =
|
||||
{
|
||||
val top = task(intermediate).named("top")
|
||||
def iterate(tk: Task[Int]): Task[Int] =
|
||||
tk flatMap { t =>
|
||||
if (t <= 0)
|
||||
top
|
||||
else
|
||||
iterate(task(t - 1).named((t - 1).toString))
|
||||
}
|
||||
try { checkResult(tryRun(iterate(top), true, workers), intermediate) }
|
||||
catch { case i: Incomplete if cyclic(i) => ("Unexpected cyclic exception: " + i) |: false }
|
||||
}
|
||||
final def checkCircularReferences(intermediate: Int, workers: Int) =
|
||||
{
|
||||
lazy val top = iterate(task(intermediate).named("bottom"), intermediate)
|
||||
def iterate(tk: Task[Int], i: Int): Task[Int] =
|
||||
tk flatMap { t =>
|
||||
if (t <= 0)
|
||||
top
|
||||
else
|
||||
iterate(task(t - 1).named((t - 1).toString), i - 1)
|
||||
}
|
||||
try { tryRun(top, true, workers); false }
|
||||
catch { case i: Incomplete => cyclic(i) }
|
||||
}
|
||||
def cyclic(i: Incomplete) = Incomplete.allExceptions(i).exists(_.isInstanceOf[Execute[Task]#CyclicException[_]])
|
||||
}
|
||||
|
|
@ -6,32 +6,30 @@ import Task._
|
|||
import TaskGen._
|
||||
import math.abs
|
||||
|
||||
object TaskRunnerForkTest extends Properties("TaskRunner Fork")
|
||||
{
|
||||
property("fork m tasks and wait for all to complete") = forAll(MaxTasksGen, MaxWorkersGen) { (m: Int, workers: Int) =>
|
||||
val values = (0 until m).toList
|
||||
checkResult(tryRun(values.fork(f => () ).join.map(_.toList),false, workers), values)
|
||||
true
|
||||
}
|
||||
property("Fork and reduce 2") = forAll(MaxTasksGen, MaxWorkersGen) { (m: Int, workers: Int) =>
|
||||
(m > 1) ==> {
|
||||
val task = (0 to m) fork {_ * 10} reduced{_ + _}
|
||||
checkResult(tryRun(task, false, workers), 5*(m+1)*m)
|
||||
}
|
||||
}
|
||||
property("Double join") = forAll(MaxJoinGen, MaxJoinGen, MaxWorkersGen) { (a: Int, b: Int, workers: Int) =>
|
||||
runDoubleJoin(abs(a),abs(b),workers)
|
||||
true
|
||||
}
|
||||
def runDoubleJoin(a: Int, b: Int, workers: Int)
|
||||
{
|
||||
def inner(i: Int) = List.range(0, b).map(j => task(j).named(j.toString)).join
|
||||
tryRun( List.range(0,a).map(inner).join, false, workers)
|
||||
}
|
||||
property("fork and reduce") = forAll(TaskListGen, MaxWorkersGen) { (m: List[Int], workers: Int) =>
|
||||
(!m.isEmpty) ==> {
|
||||
val expected = m.reduceLeft(_+_)
|
||||
checkResult(tryRun( m.tasks.reduced(_ + _), false, workers), expected)
|
||||
}
|
||||
}
|
||||
object TaskRunnerForkTest extends Properties("TaskRunner Fork") {
|
||||
property("fork m tasks and wait for all to complete") = forAll(MaxTasksGen, MaxWorkersGen) { (m: Int, workers: Int) =>
|
||||
val values = (0 until m).toList
|
||||
checkResult(tryRun(values.fork(f => ()).join.map(_.toList), false, workers), values)
|
||||
true
|
||||
}
|
||||
property("Fork and reduce 2") = forAll(MaxTasksGen, MaxWorkersGen) { (m: Int, workers: Int) =>
|
||||
(m > 1) ==> {
|
||||
val task = (0 to m) fork { _ * 10 } reduced { _ + _ }
|
||||
checkResult(tryRun(task, false, workers), 5 * (m + 1) * m)
|
||||
}
|
||||
}
|
||||
property("Double join") = forAll(MaxJoinGen, MaxJoinGen, MaxWorkersGen) { (a: Int, b: Int, workers: Int) =>
|
||||
runDoubleJoin(abs(a), abs(b), workers)
|
||||
true
|
||||
}
|
||||
def runDoubleJoin(a: Int, b: Int, workers: Int) {
|
||||
def inner(i: Int) = List.range(0, b).map(j => task(j).named(j.toString)).join
|
||||
tryRun(List.range(0, a).map(inner).join, false, workers)
|
||||
}
|
||||
property("fork and reduce") = forAll(TaskListGen, MaxWorkersGen) { (m: List[Int], workers: Int) =>
|
||||
(!m.isEmpty) ==> {
|
||||
val expected = m.reduceLeft(_ + _)
|
||||
checkResult(tryRun(m.tasks.reduced(_ + _), false, workers), expected)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,78 +1,76 @@
|
|||
package sbt
|
||||
package std
|
||||
|
||||
import Types._
|
||||
import TaskExtra._
|
||||
import TaskTest.tryRun
|
||||
import TaskGen.{MaxWorkers,MaxWorkersGen}
|
||||
import Types._
|
||||
import TaskExtra._
|
||||
import TaskTest.tryRun
|
||||
import TaskGen.{ MaxWorkers, MaxWorkersGen }
|
||||
|
||||
import org.scalacheck._
|
||||
import Prop.forAll
|
||||
import Transform.taskToNode
|
||||
import ConcurrentRestrictions.{All, completionService, limitTotal, tagged => tagged0, TagMap, unrestricted}
|
||||
import org.scalacheck._
|
||||
import Prop.forAll
|
||||
import Transform.taskToNode
|
||||
import ConcurrentRestrictions.{ All, completionService, limitTotal, tagged => tagged0, TagMap, unrestricted }
|
||||
|
||||
import java.util.concurrent.{CountDownLatch, TimeUnit}
|
||||
import java.util.concurrent.{ CountDownLatch, TimeUnit }
|
||||
|
||||
object TaskSerial extends Properties("task serial")
|
||||
{
|
||||
val checkCycles = true
|
||||
val Timeout = 100 // in milliseconds
|
||||
object TaskSerial extends Properties("task serial") {
|
||||
val checkCycles = true
|
||||
val Timeout = 100 // in milliseconds
|
||||
|
||||
def eval[T](t: Task[T]): T = tryRun(t, checkCycles, limitTotal(MaxWorkers))
|
||||
def eval[T](t: Task[T]): T = tryRun(t, checkCycles, limitTotal(MaxWorkers))
|
||||
|
||||
property("Evaluates basic") = forAll { (i: Int) =>
|
||||
checkResult( eval( task(i) ), i )
|
||||
}
|
||||
property("Evaluates basic") = forAll { (i: Int) =>
|
||||
checkResult(eval(task(i)), i)
|
||||
}
|
||||
|
||||
property("Evaluates Function0") = forAll { (i: Int) =>
|
||||
checkResult( eval( () => i ), i )
|
||||
}
|
||||
property("Evaluates Function0") = forAll { (i: Int) =>
|
||||
checkResult(eval(() => i), i)
|
||||
}
|
||||
|
||||
// verifies that all tasks get scheduled simultaneously (1-3) or do not (4)
|
||||
property("Allows arbitrary task limit") = forAll(MaxWorkersGen) { (sze: Int) =>
|
||||
val size = math.max(1, sze)
|
||||
val halfSize = size / 2 + 1
|
||||
val all =
|
||||
checkArbitrary(size, tagged(_ => true), true ) &&
|
||||
checkArbitrary(size, unrestricted[Task[_]], true ) &&
|
||||
checkArbitrary(size, limitTotal[Task[_]](size), true ) &&
|
||||
checkArbitrary(size, limitTotal[Task[_]](halfSize), size <= halfSize )
|
||||
all :| ("Size: " + size) :| ("Half size: " + halfSize)
|
||||
}
|
||||
// verifies that all tasks get scheduled simultaneously (1-3) or do not (4)
|
||||
property("Allows arbitrary task limit") = forAll(MaxWorkersGen) { (sze: Int) =>
|
||||
val size = math.max(1, sze)
|
||||
val halfSize = size / 2 + 1
|
||||
val all =
|
||||
checkArbitrary(size, tagged(_ => true), true) &&
|
||||
checkArbitrary(size, unrestricted[Task[_]], true) &&
|
||||
checkArbitrary(size, limitTotal[Task[_]](size), true) &&
|
||||
checkArbitrary(size, limitTotal[Task[_]](halfSize), size <= halfSize)
|
||||
all :| ("Size: " + size) :| ("Half size: " + halfSize)
|
||||
}
|
||||
|
||||
def checkArbitrary(size: Int, restrictions: ConcurrentRestrictions[Task[_]], shouldSucceed: Boolean) =
|
||||
{
|
||||
val latch = task { new CountDownLatch(size) }
|
||||
def mktask = latch map { l =>
|
||||
l.countDown()
|
||||
l.await(Timeout, TimeUnit.MILLISECONDS)
|
||||
}
|
||||
val tasks = (0 until size).map(_ => mktask).toList.join.map { results =>
|
||||
val success = results.forall(idFun[Boolean])
|
||||
assert( success == shouldSucceed, if(shouldSucceed) unschedulableMsg else scheduledMsg)
|
||||
}
|
||||
checkResult( evalRestricted( tasks )( restrictions ), () )
|
||||
}
|
||||
def unschedulableMsg = "Some tasks were unschedulable: verify this is an actual failure by extending the timeout to several seconds."
|
||||
def scheduledMsg = "All tasks were unexpectedly scheduled."
|
||||
def checkArbitrary(size: Int, restrictions: ConcurrentRestrictions[Task[_]], shouldSucceed: Boolean) =
|
||||
{
|
||||
val latch = task { new CountDownLatch(size) }
|
||||
def mktask = latch map { l =>
|
||||
l.countDown()
|
||||
l.await(Timeout, TimeUnit.MILLISECONDS)
|
||||
}
|
||||
val tasks = (0 until size).map(_ => mktask).toList.join.map { results =>
|
||||
val success = results.forall(idFun[Boolean])
|
||||
assert(success == shouldSucceed, if (shouldSucceed) unschedulableMsg else scheduledMsg)
|
||||
}
|
||||
checkResult(evalRestricted(tasks)(restrictions), ())
|
||||
}
|
||||
def unschedulableMsg = "Some tasks were unschedulable: verify this is an actual failure by extending the timeout to several seconds."
|
||||
def scheduledMsg = "All tasks were unexpectedly scheduled."
|
||||
|
||||
def tagged(f: TagMap => Boolean) = tagged0[Task[_]](_.tags, f)
|
||||
def evalRestricted[T](t: Task[T])(restrictions: ConcurrentRestrictions[Task[_]]): T =
|
||||
tryRun[T](t, checkCycles, restrictions)
|
||||
def tagged(f: TagMap => Boolean) = tagged0[Task[_]](_.tags, f)
|
||||
def evalRestricted[T](t: Task[T])(restrictions: ConcurrentRestrictions[Task[_]]): T =
|
||||
tryRun[T](t, checkCycles, restrictions)
|
||||
}
|
||||
|
||||
object TaskTest
|
||||
{
|
||||
def run[T](root: Task[T], checkCycles: Boolean, restrictions: ConcurrentRestrictions[Task[_]]): Result[T] =
|
||||
{
|
||||
val (service, shutdown) = completionService[Task[_],Completed](restrictions, (x: String) => System.err.println(x))
|
||||
|
||||
val x = new Execute[Task](Execute.config(checkCycles), Execute.noTriggers, ExecuteProgress.empty[Task])(taskToNode(idK[Task]))
|
||||
try { x.run(root)(service) } finally { shutdown() }
|
||||
}
|
||||
def tryRun[T](root: Task[T], checkCycles: Boolean, restrictions: ConcurrentRestrictions[Task[_]]): T =
|
||||
run(root, checkCycles, restrictions) match {
|
||||
case Value(v) => v
|
||||
case Inc(i) => throw i
|
||||
}
|
||||
object TaskTest {
|
||||
def run[T](root: Task[T], checkCycles: Boolean, restrictions: ConcurrentRestrictions[Task[_]]): Result[T] =
|
||||
{
|
||||
val (service, shutdown) = completionService[Task[_], Completed](restrictions, (x: String) => System.err.println(x))
|
||||
|
||||
val x = new Execute[Task](Execute.config(checkCycles), Execute.noTriggers, ExecuteProgress.empty[Task])(taskToNode(idK[Task]))
|
||||
try { x.run(root)(service) } finally { shutdown() }
|
||||
}
|
||||
def tryRun[T](root: Task[T], checkCycles: Boolean, restrictions: ConcurrentRestrictions[Task[_]]): T =
|
||||
run(root, checkCycles, restrictions) match {
|
||||
case Value(v) => v
|
||||
case Inc(i) => throw i
|
||||
}
|
||||
}
|
||||
|
|
@ -7,54 +7,52 @@ import Types._
|
|||
import Task._
|
||||
import Execute._
|
||||
|
||||
object Test extends std.TaskExtra
|
||||
{
|
||||
def t2[A, B](a: Task[A], b: Task[B]) = multInputTask[({type l[L[x]] = (L[A], L[B])})#l]((a, b))(AList.tuple2)
|
||||
def t3[A, B, C](a: Task[A], b: Task[B], c: Task[C]) = multInputTask[({type l[L[x]] = (L[A], L[B], L[C])})#l]((a, b, c))(AList.tuple3)
|
||||
object Test extends std.TaskExtra {
|
||||
def t2[A, B](a: Task[A], b: Task[B]) = multInputTask[({ type l[L[x]] = (L[A], L[B]) })#l]((a, b))(AList.tuple2)
|
||||
def t3[A, B, C](a: Task[A], b: Task[B], c: Task[C]) = multInputTask[({ type l[L[x]] = (L[A], L[B], L[C]) })#l]((a, b, c))(AList.tuple3)
|
||||
|
||||
val a = task(3)
|
||||
val b = task[Boolean](error("test"))
|
||||
val b2 = task(true)
|
||||
val c = task("asdf")
|
||||
val a = task(3)
|
||||
val b = task[Boolean](error("test"))
|
||||
val b2 = task(true)
|
||||
val c = task("asdf")
|
||||
|
||||
val h1 = t3(a,b,c).map { case (aa,bb,cc) => aa + " " + bb + " " + cc }
|
||||
val h2 = t3(a,b2,c).map { case (aa,bb,cc) => aa + " " + bb + " " + cc }
|
||||
val h1 = t3(a, b, c).map { case (aa, bb, cc) => aa + " " + bb + " " + cc }
|
||||
val h2 = t3(a, b2, c).map { case (aa, bb, cc) => aa + " " + bb + " " + cc }
|
||||
|
||||
type Values = (Result[Int], Result[Boolean], Result[String])
|
||||
type Values = (Result[Int], Result[Boolean], Result[String])
|
||||
|
||||
val f: Values => Any = {
|
||||
case (Value(aa), Value(bb), Value(cc)) => aa + " " + bb + " " + cc
|
||||
case x =>
|
||||
val cs = x.productIterator.toList.collect { case Inc(x) => x } // workaround for double definition bug
|
||||
throw Incomplete(None, causes = cs)
|
||||
}
|
||||
val d2 = t3(a,b2,c) mapR f
|
||||
val f2: Values => Task[Any] = {
|
||||
case (Value(aa), Value(bb), Value(cc)) => task(aa + " " + bb + " " + cc)
|
||||
case x => d3
|
||||
}
|
||||
lazy val d = t3(a,b,c) flatMapR f2
|
||||
val f3: Values => Task[Any] = {
|
||||
case (Value(aa), Value(bb), Value(cc)) => task(aa + " " + bb + " " + cc)
|
||||
case x => d2
|
||||
}
|
||||
lazy val d3= t3(a,b,c) flatMapR f3
|
||||
val f: Values => Any = {
|
||||
case (Value(aa), Value(bb), Value(cc)) => aa + " " + bb + " " + cc
|
||||
case x =>
|
||||
val cs = x.productIterator.toList.collect { case Inc(x) => x } // workaround for double definition bug
|
||||
throw Incomplete(None, causes = cs)
|
||||
}
|
||||
val d2 = t3(a, b2, c) mapR f
|
||||
val f2: Values => Task[Any] = {
|
||||
case (Value(aa), Value(bb), Value(cc)) => task(aa + " " + bb + " " + cc)
|
||||
case x => d3
|
||||
}
|
||||
lazy val d = t3(a, b, c) flatMapR f2
|
||||
val f3: Values => Task[Any] = {
|
||||
case (Value(aa), Value(bb), Value(cc)) => task(aa + " " + bb + " " + cc)
|
||||
case x => d2
|
||||
}
|
||||
lazy val d3 = t3(a, b, c) flatMapR f3
|
||||
|
||||
def d4(i: Int): Task[Int] = nop flatMap { _ => val x = math.random; if(x < 0.01) task(i); else d4(i+1) }
|
||||
def d4(i: Int): Task[Int] = nop flatMap { _ => val x = math.random; if (x < 0.01) task(i); else d4(i + 1) }
|
||||
|
||||
def go()
|
||||
{
|
||||
def run[T](root: Task[T]) =
|
||||
println("Result : " + TaskGen.run(root, true, 2))
|
||||
|
||||
run(a)
|
||||
run(b)
|
||||
run(b2)
|
||||
run(c)
|
||||
run(d)
|
||||
run(d2)
|
||||
run( d4(0) )
|
||||
run(h1)
|
||||
run(h2)
|
||||
}
|
||||
def go() {
|
||||
def run[T](root: Task[T]) =
|
||||
println("Result : " + TaskGen.run(root, true, 2))
|
||||
|
||||
run(a)
|
||||
run(b)
|
||||
run(b2)
|
||||
run(c)
|
||||
run(d)
|
||||
run(d2)
|
||||
run(d4(0))
|
||||
run(h1)
|
||||
run(h2)
|
||||
}
|
||||
}
|
||||
|
|
@ -5,40 +5,39 @@ import Prop._
|
|||
import TaskGen._
|
||||
import Task._
|
||||
|
||||
object TaskRunnerCallTest extends Properties("TaskRunner Call")
|
||||
{
|
||||
property("calculates fibonacci") = forAll(MaxTasksGen, MaxWorkersGen) { (i: Int, workers: Int) =>
|
||||
(i > 0) ==> {
|
||||
val f = fibDirect(i)
|
||||
("Workers: " + workers) |: ("i: " + i) |: ("fib(i): " + f) |:
|
||||
{
|
||||
def result = tryRun( fibTask(i), false, workers)
|
||||
checkResult(result, f)
|
||||
}
|
||||
}
|
||||
}
|
||||
final def fibTask(i: Int) =
|
||||
{
|
||||
require(i > 0)
|
||||
lazy val next: (Int,Int,Int) => Task[Int] =
|
||||
(index, x1, x2) =>
|
||||
{
|
||||
if(index == i)
|
||||
task(x2)
|
||||
else
|
||||
iterate( (index+1, x2, x1+x2) )
|
||||
}
|
||||
def iterate(iteration: (Int,Int,Int)) = task( iteration ) flatMap next.tupled
|
||||
iterate( (1, 0, 1) )
|
||||
}
|
||||
final def fibDirect(i: Int): Int =
|
||||
{
|
||||
require(i > 0)
|
||||
def build(index: Int, x1: Int, x2: Int): Int =
|
||||
if(index == i)
|
||||
x2
|
||||
else
|
||||
build(index+1, x2, x1+x2)
|
||||
build(1, 0, 1)
|
||||
}
|
||||
object TaskRunnerCallTest extends Properties("TaskRunner Call") {
|
||||
property("calculates fibonacci") = forAll(MaxTasksGen, MaxWorkersGen) { (i: Int, workers: Int) =>
|
||||
(i > 0) ==> {
|
||||
val f = fibDirect(i)
|
||||
("Workers: " + workers) |: ("i: " + i) |: ("fib(i): " + f) |:
|
||||
{
|
||||
def result = tryRun(fibTask(i), false, workers)
|
||||
checkResult(result, f)
|
||||
}
|
||||
}
|
||||
}
|
||||
final def fibTask(i: Int) =
|
||||
{
|
||||
require(i > 0)
|
||||
lazy val next: (Int, Int, Int) => Task[Int] =
|
||||
(index, x1, x2) =>
|
||||
{
|
||||
if (index == i)
|
||||
task(x2)
|
||||
else
|
||||
iterate((index + 1, x2, x1 + x2))
|
||||
}
|
||||
def iterate(iteration: (Int, Int, Int)) = task(iteration) flatMap next.tupled
|
||||
iterate((1, 0, 1))
|
||||
}
|
||||
final def fibDirect(i: Int): Int =
|
||||
{
|
||||
require(i > 0)
|
||||
def build(index: Int, x1: Int, x2: Int): Int =
|
||||
if (index == i)
|
||||
x2
|
||||
else
|
||||
build(index + 1, x2, x1 + x2)
|
||||
build(1, 0, 1)
|
||||
}
|
||||
}
|
||||
|
|
@ -9,42 +9,39 @@ import TaskGen._
|
|||
import Task._
|
||||
import Types._
|
||||
|
||||
object TaskRunnerSortTest extends Properties("TaskRunnerSort")
|
||||
{
|
||||
property("sort") = forAll(TaskListGen, MaxWorkersGen) { (list: List[Int], workers: Int) =>
|
||||
val a = list.toArray
|
||||
val sorted = a.toArray
|
||||
java.util.Arrays.sort(sorted)
|
||||
("Workers: " + workers) |: ("Array: " + a.toList) |:
|
||||
{
|
||||
def result = tryRun( sort(a.toSeq), false, if(workers > 0) workers else 1)
|
||||
checkResult(result.toList, sorted.toList)
|
||||
}
|
||||
}
|
||||
final def sortDirect(a: Seq[Int]): Seq[Int] =
|
||||
{
|
||||
if(a.length < 2)
|
||||
a
|
||||
else
|
||||
{
|
||||
val pivot = a(0)
|
||||
val (lt,gte) = a.view.drop(1).partition(_ < pivot)
|
||||
sortDirect(lt) ++ List(pivot) ++ sortDirect(gte)
|
||||
}
|
||||
}
|
||||
final def sort(a: Seq[Int]): Task[Seq[Int]] =
|
||||
{
|
||||
if(a.length < 200)
|
||||
task(sortDirect(a))
|
||||
else
|
||||
{
|
||||
task(a) flatMap { a =>
|
||||
val pivot = a(0)
|
||||
val (lt,gte) = a.view.drop(1).partition(_ < pivot)
|
||||
Test.t2(sort(lt), sort(gte)) map {
|
||||
case (l, g) => l ++ List(pivot) ++ g
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
object TaskRunnerSortTest extends Properties("TaskRunnerSort") {
|
||||
property("sort") = forAll(TaskListGen, MaxWorkersGen) { (list: List[Int], workers: Int) =>
|
||||
val a = list.toArray
|
||||
val sorted = a.toArray
|
||||
java.util.Arrays.sort(sorted)
|
||||
("Workers: " + workers) |: ("Array: " + a.toList) |:
|
||||
{
|
||||
def result = tryRun(sort(a.toSeq), false, if (workers > 0) workers else 1)
|
||||
checkResult(result.toList, sorted.toList)
|
||||
}
|
||||
}
|
||||
final def sortDirect(a: Seq[Int]): Seq[Int] =
|
||||
{
|
||||
if (a.length < 2)
|
||||
a
|
||||
else {
|
||||
val pivot = a(0)
|
||||
val (lt, gte) = a.view.drop(1).partition(_ < pivot)
|
||||
sortDirect(lt) ++ List(pivot) ++ sortDirect(gte)
|
||||
}
|
||||
}
|
||||
final def sort(a: Seq[Int]): Task[Seq[Int]] =
|
||||
{
|
||||
if (a.length < 200)
|
||||
task(sortDirect(a))
|
||||
else {
|
||||
task(a) flatMap { a =>
|
||||
val pivot = a(0)
|
||||
val (lt, gte) = a.view.drop(1).partition(_ < pivot)
|
||||
Test.t2(sort(lt), sort(gte)) map {
|
||||
case (l, g) => l ++ List(pivot) ++ g
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -5,24 +5,20 @@ package sbt
|
|||
|
||||
import org.scalacheck.Prop._
|
||||
|
||||
object checkResult
|
||||
{
|
||||
def apply[T](run: => T, expected: T) =
|
||||
{
|
||||
("Expected: " + expected) |:
|
||||
(try
|
||||
{
|
||||
val actual = run
|
||||
("Actual: " + actual) |: (actual == expected)
|
||||
}
|
||||
catch
|
||||
{
|
||||
case i: Incomplete =>
|
||||
println(i)
|
||||
"One or more tasks failed" |: false
|
||||
case e =>
|
||||
e.printStackTrace
|
||||
"Error in framework" |: false
|
||||
})
|
||||
}
|
||||
object checkResult {
|
||||
def apply[T](run: => T, expected: T) =
|
||||
{
|
||||
("Expected: " + expected) |:
|
||||
(try {
|
||||
val actual = run
|
||||
("Actual: " + actual) |: (actual == expected)
|
||||
} catch {
|
||||
case i: Incomplete =>
|
||||
println(i)
|
||||
"One or more tasks failed" |: false
|
||||
case e =>
|
||||
e.printStackTrace
|
||||
"Error in framework" |: false
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
@ -5,27 +5,26 @@ import org.scalacheck._
|
|||
import Prop._
|
||||
import java.io.File
|
||||
|
||||
object ConcurrentCache extends Properties("ClassLoaderCache concurrent access")
|
||||
{
|
||||
implicit lazy val concurrentArb: Arbitrary[Int] = Arbitrary( Gen.choose(1, 1000) )
|
||||
implicit lazy val filenameArb: Arbitrary[String] = Arbitrary( Gen.alphaStr )
|
||||
object ConcurrentCache extends Properties("ClassLoaderCache concurrent access") {
|
||||
implicit lazy val concurrentArb: Arbitrary[Int] = Arbitrary(Gen.choose(1, 1000))
|
||||
implicit lazy val filenameArb: Arbitrary[String] = Arbitrary(Gen.alphaStr)
|
||||
|
||||
property("Same class loader for same classpaths concurrently processed") = forAll { (names: List[String], concurrent: Int) =>
|
||||
withcp(names.distinct) { files =>
|
||||
val cache = new ClassLoaderCache(null)
|
||||
val loaders = (1 to concurrent).par.map(_ => cache(files)).toList
|
||||
sameClassLoader(loaders)
|
||||
}
|
||||
}
|
||||
property("Same class loader for same classpaths concurrently processed") = forAll { (names: List[String], concurrent: Int) =>
|
||||
withcp(names.distinct) { files =>
|
||||
val cache = new ClassLoaderCache(null)
|
||||
val loaders = (1 to concurrent).par.map(_ => cache(files)).toList
|
||||
sameClassLoader(loaders)
|
||||
}
|
||||
}
|
||||
|
||||
private[this] def withcp[T](names: List[String])(f: List[File] => T): T = IO.withTemporaryDirectory { tmp =>
|
||||
val files = names.map{ name =>
|
||||
val file = new File(tmp, name)
|
||||
IO.touch(file)
|
||||
file
|
||||
}
|
||||
f(files)
|
||||
}
|
||||
private[this] def sameClassLoader(loaders: Seq[ClassLoader]): Boolean = loaders.size < 2 ||
|
||||
loaders.sliding(2).forall { case Seq(x,y) => x == y }
|
||||
private[this] def withcp[T](names: List[String])(f: List[File] => T): T = IO.withTemporaryDirectory { tmp =>
|
||||
val files = names.map { name =>
|
||||
val file = new File(tmp, name)
|
||||
IO.touch(file)
|
||||
file
|
||||
}
|
||||
f(files)
|
||||
}
|
||||
private[this] def sameClassLoader(loaders: Seq[ClassLoader]): Boolean = loaders.size < 2 ||
|
||||
loaders.sliding(2).forall { case Seq(x, y) => x == y }
|
||||
}
|
||||
|
|
@ -8,49 +8,43 @@ import Prop._
|
|||
|
||||
import scala.collection.mutable.HashSet
|
||||
|
||||
object DagSpecification extends Properties("Dag")
|
||||
{
|
||||
property("No repeated nodes") = forAll{ (dag: TestDag) => isSet(dag.topologicalSort) }
|
||||
property("Sort contains node") = forAll{ (dag: TestDag) => dag.topologicalSort.contains(dag) }
|
||||
property("Dependencies precede node") = forAll{ (dag: TestDag) => dependenciesPrecedeNodes(dag.topologicalSort) }
|
||||
object DagSpecification extends Properties("Dag") {
|
||||
property("No repeated nodes") = forAll { (dag: TestDag) => isSet(dag.topologicalSort) }
|
||||
property("Sort contains node") = forAll { (dag: TestDag) => dag.topologicalSort.contains(dag) }
|
||||
property("Dependencies precede node") = forAll { (dag: TestDag) => dependenciesPrecedeNodes(dag.topologicalSort) }
|
||||
|
||||
implicit lazy val arbTestDag: Arbitrary[TestDag] = Arbitrary(Gen.sized(dagGen))
|
||||
private def dagGen(nodeCount: Int): Gen[TestDag] =
|
||||
{
|
||||
val nodes = new HashSet[TestDag]
|
||||
def nonterminalGen(p: Gen.Parameters): Gen[TestDag] =
|
||||
{
|
||||
for(i <- 0 until nodeCount; nextDeps <- Gen.someOf(nodes).apply(p))
|
||||
nodes += new TestDag(i, nextDeps)
|
||||
for(nextDeps <- Gen.someOf(nodes)) yield
|
||||
new TestDag(nodeCount, nextDeps)
|
||||
}
|
||||
Gen.parameterized(nonterminalGen)
|
||||
}
|
||||
implicit lazy val arbTestDag: Arbitrary[TestDag] = Arbitrary(Gen.sized(dagGen))
|
||||
private def dagGen(nodeCount: Int): Gen[TestDag] =
|
||||
{
|
||||
val nodes = new HashSet[TestDag]
|
||||
def nonterminalGen(p: Gen.Parameters): Gen[TestDag] =
|
||||
{
|
||||
for (i <- 0 until nodeCount; nextDeps <- Gen.someOf(nodes).apply(p))
|
||||
nodes += new TestDag(i, nextDeps)
|
||||
for (nextDeps <- Gen.someOf(nodes)) yield new TestDag(nodeCount, nextDeps)
|
||||
}
|
||||
Gen.parameterized(nonterminalGen)
|
||||
}
|
||||
|
||||
private def isSet[T](c: Seq[T]) = Set(c: _*).size == c.size
|
||||
private def dependenciesPrecedeNodes(sort: List[TestDag]) =
|
||||
{
|
||||
val seen = new HashSet[TestDag]
|
||||
def iterate(remaining: List[TestDag]): Boolean =
|
||||
{
|
||||
remaining match
|
||||
{
|
||||
case Nil => true
|
||||
case node :: tail =>
|
||||
if(node.dependencies.forall(seen.contains) && !seen.contains(node))
|
||||
{
|
||||
seen += node
|
||||
iterate(tail)
|
||||
}
|
||||
else
|
||||
false
|
||||
}
|
||||
}
|
||||
iterate(sort)
|
||||
}
|
||||
private def isSet[T](c: Seq[T]) = Set(c: _*).size == c.size
|
||||
private def dependenciesPrecedeNodes(sort: List[TestDag]) =
|
||||
{
|
||||
val seen = new HashSet[TestDag]
|
||||
def iterate(remaining: List[TestDag]): Boolean =
|
||||
{
|
||||
remaining match {
|
||||
case Nil => true
|
||||
case node :: tail =>
|
||||
if (node.dependencies.forall(seen.contains) && !seen.contains(node)) {
|
||||
seen += node
|
||||
iterate(tail)
|
||||
} else
|
||||
false
|
||||
}
|
||||
}
|
||||
iterate(sort)
|
||||
}
|
||||
}
|
||||
class TestDag(id: Int, val dependencies: Iterable[TestDag]) extends Dag[TestDag]
|
||||
{
|
||||
override def toString = id + "->" + dependencies.mkString("[", ",", "]")
|
||||
class TestDag(id: Int, val dependencies: Iterable[TestDag]) extends Dag[TestDag] {
|
||||
override def toString = id + "->" + dependencies.mkString("[", ",", "]")
|
||||
}
|
||||
|
|
@ -1,35 +1,32 @@
|
|||
package sbt
|
||||
|
||||
import org.scalacheck._
|
||||
import Prop._
|
||||
import org.scalacheck._
|
||||
import Prop._
|
||||
|
||||
object KeyTest extends Properties("AttributeKey")
|
||||
{
|
||||
property("equality") = {
|
||||
compare(AttributeKey[Int]("test"), AttributeKey[Int]("test"), true) &&
|
||||
compare(AttributeKey[Int]("test"), AttributeKey[Int]("test", "description"), true) &&
|
||||
compare(AttributeKey[Int]("test", "a"), AttributeKey[Int]("test", "b"), true) &&
|
||||
compare(AttributeKey[Int]("test"), AttributeKey[Int]("tests"), false) &&
|
||||
compare(AttributeKey[Int]("test"), AttributeKey[Double]("test"), false) &&
|
||||
compare(AttributeKey[java.lang.Integer]("test"), AttributeKey[Int]("test"), false) &&
|
||||
compare(AttributeKey[Map[Int, String]]("test"), AttributeKey[Map[Int, String]]("test"), true) &&
|
||||
compare(AttributeKey[Map[Int, String]]("test"), AttributeKey[Map[Int, _]]("test"), false)
|
||||
}
|
||||
object KeyTest extends Properties("AttributeKey") {
|
||||
property("equality") = {
|
||||
compare(AttributeKey[Int]("test"), AttributeKey[Int]("test"), true) &&
|
||||
compare(AttributeKey[Int]("test"), AttributeKey[Int]("test", "description"), true) &&
|
||||
compare(AttributeKey[Int]("test", "a"), AttributeKey[Int]("test", "b"), true) &&
|
||||
compare(AttributeKey[Int]("test"), AttributeKey[Int]("tests"), false) &&
|
||||
compare(AttributeKey[Int]("test"), AttributeKey[Double]("test"), false) &&
|
||||
compare(AttributeKey[java.lang.Integer]("test"), AttributeKey[Int]("test"), false) &&
|
||||
compare(AttributeKey[Map[Int, String]]("test"), AttributeKey[Map[Int, String]]("test"), true) &&
|
||||
compare(AttributeKey[Map[Int, String]]("test"), AttributeKey[Map[Int, _]]("test"), false)
|
||||
}
|
||||
|
||||
def compare(a: AttributeKey[_], b: AttributeKey[_], same: Boolean) =
|
||||
("a.label: " + a.label) |:
|
||||
("a.manifest: " + a.manifest) |:
|
||||
("b.label: " + b.label) |:
|
||||
("b.manifest: " + b.manifest) |:
|
||||
("expected equal? " + same) |:
|
||||
compare0(a, b, same)
|
||||
def compare(a: AttributeKey[_], b: AttributeKey[_], same: Boolean) =
|
||||
("a.label: " + a.label) |:
|
||||
("a.manifest: " + a.manifest) |:
|
||||
("b.label: " + b.label) |:
|
||||
("b.manifest: " + b.manifest) |:
|
||||
("expected equal? " + same) |:
|
||||
compare0(a, b, same)
|
||||
|
||||
def compare0(a: AttributeKey[_], b: AttributeKey[_], same: Boolean) =
|
||||
if(same)
|
||||
{
|
||||
("equality" |: (a == b)) &&
|
||||
("hash" |: (a.hashCode == b.hashCode))
|
||||
}
|
||||
else
|
||||
("equality" |: (a != b))
|
||||
def compare0(a: AttributeKey[_], b: AttributeKey[_], same: Boolean) =
|
||||
if (same) {
|
||||
("equality" |: (a == b)) &&
|
||||
("hash" |: (a.hashCode == b.hashCode))
|
||||
} else
|
||||
("equality" |: (a != b))
|
||||
}
|
||||
|
|
@ -7,11 +7,11 @@ import Types._
|
|||
|
||||
// compilation test
|
||||
object LiteralTest {
|
||||
def x[A[_],B[_]](f: A ~> B) = f
|
||||
def x[A[_], B[_]](f: A ~> B) = f
|
||||
|
||||
import Param._
|
||||
val f = x { (p: Param[Option,List]) => p.ret( p.in.toList ) }
|
||||
val f = x { (p: Param[Option, List]) => p.ret(p.in.toList) }
|
||||
|
||||
val a: List[Int] = f( Some(3) )
|
||||
val b: List[String] = f( Some("aa") )
|
||||
val a: List[Int] = f(Some(3))
|
||||
val b: List[String] = f(Some("aa"))
|
||||
}
|
||||
|
|
@ -6,14 +6,13 @@ package sbt
|
|||
import Types._
|
||||
|
||||
// compilation test
|
||||
object PMapTest
|
||||
{
|
||||
val mp = new DelegatingPMap[Some, Id](new collection.mutable.HashMap)
|
||||
mp(Some("asdf")) = "a"
|
||||
mp(Some(3)) = 9
|
||||
val x = Some(3) :^: Some("asdf") :^: KNil
|
||||
val y = x.transform[Id](mp)
|
||||
assert(y.head == 9)
|
||||
assert(y.tail.head == "a")
|
||||
assert(y.tail.tail == KNil)
|
||||
object PMapTest {
|
||||
val mp = new DelegatingPMap[Some, Id](new collection.mutable.HashMap)
|
||||
mp(Some("asdf")) = "a"
|
||||
mp(Some(3)) = 9
|
||||
val x = Some(3) :^: Some("asdf") :^: KNil
|
||||
val y = x.transform[Id](mp)
|
||||
assert(y.head == 9)
|
||||
assert(y.tail.head == "a")
|
||||
assert(y.tail.tail == KNil)
|
||||
}
|
||||
|
|
@ -10,78 +10,78 @@ final case class Scope(nestIndex: Int, idAtIndex: Int = 0)
|
|||
// Lots of type constructors would become binary, which as you may know requires lots of type lambdas
|
||||
// when you want a type function with only one parameter.
|
||||
// That would be a general pain.)
|
||||
object SettingsExample extends Init[Scope]
|
||||
{
|
||||
// Provides a way of showing a Scope+AttributeKey[_]
|
||||
val showFullKey: Show[ScopedKey[_]] = new Show[ScopedKey[_]] {
|
||||
def apply(key: ScopedKey[_]) = s"${key.scope.nestIndex}(${key.scope.idAtIndex})/${key.key.label}"
|
||||
}
|
||||
object SettingsExample extends Init[Scope] {
|
||||
// Provides a way of showing a Scope+AttributeKey[_]
|
||||
val showFullKey: Show[ScopedKey[_]] = new Show[ScopedKey[_]] {
|
||||
def apply(key: ScopedKey[_]) = s"${key.scope.nestIndex}(${key.scope.idAtIndex})/${key.key.label}"
|
||||
}
|
||||
|
||||
// A sample delegation function that delegates to a Scope with a lower index.
|
||||
val delegates: Scope => Seq[Scope] = { case s @ Scope(index, proj) =>
|
||||
s +: (if(index <= 0) Nil else { (if (proj > 0) List(Scope(index)) else Nil) ++: delegates(Scope(index-1)) })
|
||||
}
|
||||
// A sample delegation function that delegates to a Scope with a lower index.
|
||||
val delegates: Scope => Seq[Scope] = {
|
||||
case s @ Scope(index, proj) =>
|
||||
s +: (if (index <= 0) Nil else { (if (proj > 0) List(Scope(index)) else Nil) ++: delegates(Scope(index - 1)) })
|
||||
}
|
||||
|
||||
// Not using this feature in this example.
|
||||
val scopeLocal: ScopeLocal = _ => Nil
|
||||
// Not using this feature in this example.
|
||||
val scopeLocal: ScopeLocal = _ => Nil
|
||||
|
||||
// These three functions + a scope (here, Scope) are sufficient for defining our settings system.
|
||||
// These three functions + a scope (here, Scope) are sufficient for defining our settings system.
|
||||
}
|
||||
|
||||
/** Usage Example **/
|
||||
|
||||
object SettingsUsage
|
||||
{
|
||||
import SettingsExample._
|
||||
import Types._
|
||||
object SettingsUsage {
|
||||
import SettingsExample._
|
||||
import Types._
|
||||
|
||||
// Define some keys
|
||||
val a = AttributeKey[Int]("a")
|
||||
val b = AttributeKey[Int]("b")
|
||||
// Define some keys
|
||||
val a = AttributeKey[Int]("a")
|
||||
val b = AttributeKey[Int]("b")
|
||||
|
||||
// Scope these keys
|
||||
val a3 = ScopedKey(Scope(3), a)
|
||||
val a4 = ScopedKey(Scope(4), a)
|
||||
val a5 = ScopedKey(Scope(5), a)
|
||||
// Scope these keys
|
||||
val a3 = ScopedKey(Scope(3), a)
|
||||
val a4 = ScopedKey(Scope(4), a)
|
||||
val a5 = ScopedKey(Scope(5), a)
|
||||
|
||||
val b4 = ScopedKey(Scope(4), b)
|
||||
val b4 = ScopedKey(Scope(4), b)
|
||||
|
||||
// Define some settings
|
||||
val mySettings: Seq[Setting[_]] = Seq(
|
||||
setting( a3, value( 3 ) ),
|
||||
setting( b4, map(a4)(_ * 3)),
|
||||
update(a5)(_ + 1)
|
||||
)
|
||||
// Define some settings
|
||||
val mySettings: Seq[Setting[_]] = Seq(
|
||||
setting(a3, value(3)),
|
||||
setting(b4, map(a4)(_ * 3)),
|
||||
update(a5)(_ + 1)
|
||||
)
|
||||
|
||||
// "compiles" and applies the settings.
|
||||
// This can be split into multiple steps to access intermediate results if desired.
|
||||
// The 'inspect' command operates on the output of 'compile', for example.
|
||||
val applied: Settings[Scope] = make(mySettings)(delegates, scopeLocal, showFullKey)
|
||||
// "compiles" and applies the settings.
|
||||
// This can be split into multiple steps to access intermediate results if desired.
|
||||
// The 'inspect' command operates on the output of 'compile', for example.
|
||||
val applied: Settings[Scope] = make(mySettings)(delegates, scopeLocal, showFullKey)
|
||||
|
||||
// Show results.
|
||||
/* for(i <- 0 to 5; k <- Seq(a, b)) {
|
||||
// Show results.
|
||||
/* for(i <- 0 to 5; k <- Seq(a, b)) {
|
||||
println( k.label + i + " = " + applied.get( Scope(i), k) )
|
||||
}*/
|
||||
|
||||
/** Output:
|
||||
* For the None results, we never defined the value and there was no value to delegate to.
|
||||
* For a3, we explicitly defined it to be 3.
|
||||
* a4 wasn't defined, so it delegates to a3 according to our delegates function.
|
||||
* b4 gets the value for a4 (which delegates to a3, so it is 3) and multiplies by 3
|
||||
* a5 is defined as the previous value of a5 + 1 and
|
||||
* since no previous value of a5 was defined, it delegates to a4, resulting in 3+1=4.
|
||||
* b5 isn't defined explicitly, so it delegates to b4 and is therefore equal to 9 as well
|
||||
a0 = None
|
||||
b0 = None
|
||||
a1 = None
|
||||
b1 = None
|
||||
a2 = None
|
||||
b2 = None
|
||||
a3 = Some(3)
|
||||
b3 = None
|
||||
a4 = Some(3)
|
||||
b4 = Some(9)
|
||||
a5 = Some(4)
|
||||
b5 = Some(9)
|
||||
**/
|
||||
/**
|
||||
* Output:
|
||||
* For the None results, we never defined the value and there was no value to delegate to.
|
||||
* For a3, we explicitly defined it to be 3.
|
||||
* a4 wasn't defined, so it delegates to a3 according to our delegates function.
|
||||
* b4 gets the value for a4 (which delegates to a3, so it is 3) and multiplies by 3
|
||||
* a5 is defined as the previous value of a5 + 1 and
|
||||
* since no previous value of a5 was defined, it delegates to a4, resulting in 3+1=4.
|
||||
* b5 isn't defined explicitly, so it delegates to b4 and is therefore equal to 9 as well
|
||||
* a0 = None
|
||||
* b0 = None
|
||||
* a1 = None
|
||||
* b1 = None
|
||||
* a2 = None
|
||||
* b2 = None
|
||||
* a3 = Some(3)
|
||||
* b3 = None
|
||||
* a4 = Some(3)
|
||||
* b4 = Some(9)
|
||||
* a5 = Some(4)
|
||||
* b5 = Some(9)
|
||||
*/
|
||||
}
|
||||
|
|
|
|||
|
|
@ -5,175 +5,174 @@ import Prop._
|
|||
import SettingsUsage._
|
||||
import SettingsExample._
|
||||
|
||||
object SettingsTest extends Properties("settings")
|
||||
{
|
||||
object SettingsTest extends Properties("settings") {
|
||||
|
||||
import scala.reflect.Manifest
|
||||
import scala.reflect.Manifest
|
||||
|
||||
final val ChainMax = 5000
|
||||
lazy val chainLengthGen = Gen.choose(1, ChainMax)
|
||||
final val ChainMax = 5000
|
||||
lazy val chainLengthGen = Gen.choose(1, ChainMax)
|
||||
|
||||
property("Basic settings test") = secure( all( tests: _*) )
|
||||
property("Basic settings test") = secure(all(tests: _*))
|
||||
|
||||
property("Basic chain") = forAll(chainLengthGen) { (i: Int) =>
|
||||
val abs = math.abs(i)
|
||||
singleIntTest( chain( abs, value(0)), abs )
|
||||
}
|
||||
property("Basic bind chain") = forAll(chainLengthGen) { (i: Int) =>
|
||||
val abs = math.abs(i)
|
||||
singleIntTest( chainBind(value(abs)), 0 )
|
||||
}
|
||||
property("Basic chain") = forAll(chainLengthGen) { (i: Int) =>
|
||||
val abs = math.abs(i)
|
||||
singleIntTest(chain(abs, value(0)), abs)
|
||||
}
|
||||
property("Basic bind chain") = forAll(chainLengthGen) { (i: Int) =>
|
||||
val abs = math.abs(i)
|
||||
singleIntTest(chainBind(value(abs)), 0)
|
||||
}
|
||||
|
||||
property("Allows references to completed settings") = forAllNoShrink(30) { allowedReference }
|
||||
final def allowedReference(intermediate: Int): Prop =
|
||||
{
|
||||
val top = value(intermediate)
|
||||
def iterate(init: Initialize[Int]): Initialize[Int] =
|
||||
bind(init) { t =>
|
||||
if(t <= 0)
|
||||
top
|
||||
else
|
||||
iterate(value(t-1) )
|
||||
}
|
||||
evaluate( setting(chk, iterate(top)) :: Nil); true
|
||||
}
|
||||
property("Allows references to completed settings") = forAllNoShrink(30) { allowedReference }
|
||||
final def allowedReference(intermediate: Int): Prop =
|
||||
{
|
||||
val top = value(intermediate)
|
||||
def iterate(init: Initialize[Int]): Initialize[Int] =
|
||||
bind(init) { t =>
|
||||
if (t <= 0)
|
||||
top
|
||||
else
|
||||
iterate(value(t - 1))
|
||||
}
|
||||
evaluate(setting(chk, iterate(top)) :: Nil); true
|
||||
}
|
||||
|
||||
property("Derived setting chain depending on (prev derived, normal setting)") = forAllNoShrink(Gen.choose(1, 100)) { derivedSettings }
|
||||
final def derivedSettings(nr: Int): Prop =
|
||||
{
|
||||
val genScopedKeys = {
|
||||
val attrKeys = mkAttrKeys[Int](nr)
|
||||
attrKeys map (_ map (ak => ScopedKey(Scope(0), ak)))
|
||||
}
|
||||
forAll(genScopedKeys) { scopedKeys =>
|
||||
val last = scopedKeys.last
|
||||
val derivedSettings: Seq[Setting[Int]] = (
|
||||
for {
|
||||
List(scoped0, scoped1) <- chk :: scopedKeys sliding 2
|
||||
nextInit = if (scoped0 == chk) chk
|
||||
else (scoped0 zipWith chk) { (p, _) => p + 1 }
|
||||
} yield derive(setting(scoped1, nextInit))
|
||||
).toSeq
|
||||
property("Derived setting chain depending on (prev derived, normal setting)") = forAllNoShrink(Gen.choose(1, 100)) { derivedSettings }
|
||||
final def derivedSettings(nr: Int): Prop =
|
||||
{
|
||||
val genScopedKeys = {
|
||||
val attrKeys = mkAttrKeys[Int](nr)
|
||||
attrKeys map (_ map (ak => ScopedKey(Scope(0), ak)))
|
||||
}
|
||||
forAll(genScopedKeys) { scopedKeys =>
|
||||
val last = scopedKeys.last
|
||||
val derivedSettings: Seq[Setting[Int]] = (
|
||||
for {
|
||||
List(scoped0, scoped1) <- chk :: scopedKeys sliding 2
|
||||
nextInit = if (scoped0 == chk) chk
|
||||
else (scoped0 zipWith chk) { (p, _) => p + 1 }
|
||||
} yield derive(setting(scoped1, nextInit))
|
||||
).toSeq
|
||||
|
||||
{ checkKey(last, Some(nr-1), evaluate(setting(chk, value(0)) +: derivedSettings)) :| "Not derived?" } &&
|
||||
{ checkKey( last, None, evaluate(derivedSettings)) :| "Should not be derived" }
|
||||
}
|
||||
}
|
||||
{ checkKey(last, Some(nr - 1), evaluate(setting(chk, value(0)) +: derivedSettings)) :| "Not derived?" } &&
|
||||
{ checkKey(last, None, evaluate(derivedSettings)) :| "Should not be derived" }
|
||||
}
|
||||
}
|
||||
|
||||
private def mkAttrKeys[T](nr: Int)(implicit mf: Manifest[T]): Gen[List[AttributeKey[T]]] =
|
||||
{
|
||||
val alphaStr = Gen.alphaStr
|
||||
for {
|
||||
list <- Gen.listOfN(nr, alphaStr) suchThat (l => l.size == l.distinct.size)
|
||||
item <- list
|
||||
} yield AttributeKey[T](item)
|
||||
}
|
||||
private def mkAttrKeys[T](nr: Int)(implicit mf: Manifest[T]): Gen[List[AttributeKey[T]]] =
|
||||
{
|
||||
val alphaStr = Gen.alphaStr
|
||||
for {
|
||||
list <- Gen.listOfN(nr, alphaStr) suchThat (l => l.size == l.distinct.size)
|
||||
item <- list
|
||||
} yield AttributeKey[T](item)
|
||||
}
|
||||
|
||||
property("Derived setting(s) replace DerivedSetting in the Seq[Setting[_]]") = derivedKeepsPosition
|
||||
final def derivedKeepsPosition: Prop =
|
||||
{
|
||||
val a: ScopedKey[Int] = ScopedKey(Scope(0), AttributeKey[Int]("a"))
|
||||
val b: ScopedKey[Int] = ScopedKey(Scope(0), AttributeKey[Int]("b"))
|
||||
val prop1 = {
|
||||
val settings: Seq[Setting[_]] = Seq(
|
||||
setting(a, value(3)),
|
||||
setting(b, value(6)),
|
||||
derive(setting(b, a)),
|
||||
setting(a, value(5)),
|
||||
setting(b, value(8))
|
||||
)
|
||||
val ev = evaluate(settings)
|
||||
checkKey(a, Some(5), ev) && checkKey(b, Some(8), ev)
|
||||
}
|
||||
val prop2 = {
|
||||
val settings: Seq[Setting[Int]] = Seq(
|
||||
setting(a, value(3)),
|
||||
setting(b, value(6)),
|
||||
derive(setting(b, a)),
|
||||
setting(a, value(5))
|
||||
)
|
||||
val ev = evaluate(settings)
|
||||
checkKey(a, Some(5), ev) && checkKey(b, Some(5), ev)
|
||||
}
|
||||
prop1 && prop2
|
||||
}
|
||||
property("Derived setting(s) replace DerivedSetting in the Seq[Setting[_]]") = derivedKeepsPosition
|
||||
final def derivedKeepsPosition: Prop =
|
||||
{
|
||||
val a: ScopedKey[Int] = ScopedKey(Scope(0), AttributeKey[Int]("a"))
|
||||
val b: ScopedKey[Int] = ScopedKey(Scope(0), AttributeKey[Int]("b"))
|
||||
val prop1 = {
|
||||
val settings: Seq[Setting[_]] = Seq(
|
||||
setting(a, value(3)),
|
||||
setting(b, value(6)),
|
||||
derive(setting(b, a)),
|
||||
setting(a, value(5)),
|
||||
setting(b, value(8))
|
||||
)
|
||||
val ev = evaluate(settings)
|
||||
checkKey(a, Some(5), ev) && checkKey(b, Some(8), ev)
|
||||
}
|
||||
val prop2 = {
|
||||
val settings: Seq[Setting[Int]] = Seq(
|
||||
setting(a, value(3)),
|
||||
setting(b, value(6)),
|
||||
derive(setting(b, a)),
|
||||
setting(a, value(5))
|
||||
)
|
||||
val ev = evaluate(settings)
|
||||
checkKey(a, Some(5), ev) && checkKey(b, Some(5), ev)
|
||||
}
|
||||
prop1 && prop2
|
||||
}
|
||||
|
||||
property("DerivedSetting in ThisBuild scopes derived settings under projects thus allowing safe +=") = forAllNoShrink(Gen.choose(1, 100)) { derivedSettingsScope }
|
||||
final def derivedSettingsScope(nrProjects: Int): Prop =
|
||||
{
|
||||
forAll(mkAttrKeys[Int](2)) { case List(key, derivedKey) =>
|
||||
val projectKeys = for { proj <- 1 to nrProjects } yield ScopedKey(Scope(1, proj), key)
|
||||
val projectDerivedKeys = for { proj <- 1 to nrProjects } yield ScopedKey(Scope(1, proj), derivedKey)
|
||||
val globalKey = ScopedKey(Scope(0), key)
|
||||
val globalDerivedKey = ScopedKey(Scope(0), derivedKey)
|
||||
// Each project defines an initial value, but the update is defined in globalKey.
|
||||
// However, the derived Settings that come from this should be scoped in each project.
|
||||
val settings: Seq[Setting[_]] =
|
||||
derive(setting(globalDerivedKey, SettingsExample.map(globalKey)(_ + 1))) +: projectKeys.map(pk => setting(pk, value(0)))
|
||||
val ev = evaluate(settings)
|
||||
// Also check that the key has no value at the "global" scope
|
||||
val props = for { pk <- projectDerivedKeys } yield checkKey(pk, Some(1), ev)
|
||||
checkKey(globalDerivedKey, None, ev) && Prop.all(props: _*)
|
||||
}
|
||||
}
|
||||
property("DerivedSetting in ThisBuild scopes derived settings under projects thus allowing safe +=") = forAllNoShrink(Gen.choose(1, 100)) { derivedSettingsScope }
|
||||
final def derivedSettingsScope(nrProjects: Int): Prop =
|
||||
{
|
||||
forAll(mkAttrKeys[Int](2)) {
|
||||
case List(key, derivedKey) =>
|
||||
val projectKeys = for { proj <- 1 to nrProjects } yield ScopedKey(Scope(1, proj), key)
|
||||
val projectDerivedKeys = for { proj <- 1 to nrProjects } yield ScopedKey(Scope(1, proj), derivedKey)
|
||||
val globalKey = ScopedKey(Scope(0), key)
|
||||
val globalDerivedKey = ScopedKey(Scope(0), derivedKey)
|
||||
// Each project defines an initial value, but the update is defined in globalKey.
|
||||
// However, the derived Settings that come from this should be scoped in each project.
|
||||
val settings: Seq[Setting[_]] =
|
||||
derive(setting(globalDerivedKey, SettingsExample.map(globalKey)(_ + 1))) +: projectKeys.map(pk => setting(pk, value(0)))
|
||||
val ev = evaluate(settings)
|
||||
// Also check that the key has no value at the "global" scope
|
||||
val props = for { pk <- projectDerivedKeys } yield checkKey(pk, Some(1), ev)
|
||||
checkKey(globalDerivedKey, None, ev) && Prop.all(props: _*)
|
||||
}
|
||||
}
|
||||
|
||||
// Circular (dynamic) references currently loop infinitely.
|
||||
// This is the expected behavior (detecting dynamic cycles is expensive),
|
||||
// but it may be necessary to provide an option to detect them (with a performance hit)
|
||||
// This would test that cycle detection.
|
||||
// property("Catches circular references") = forAll(chainLengthGen) { checkCircularReferences _ }
|
||||
final def checkCircularReferences(intermediate: Int): Prop =
|
||||
{
|
||||
val ccr = new CCR(intermediate)
|
||||
try { evaluate( setting(chk, ccr.top) :: Nil); false }
|
||||
catch { case e: java.lang.Exception => true }
|
||||
}
|
||||
// Circular (dynamic) references currently loop infinitely.
|
||||
// This is the expected behavior (detecting dynamic cycles is expensive),
|
||||
// but it may be necessary to provide an option to detect them (with a performance hit)
|
||||
// This would test that cycle detection.
|
||||
// property("Catches circular references") = forAll(chainLengthGen) { checkCircularReferences _ }
|
||||
final def checkCircularReferences(intermediate: Int): Prop =
|
||||
{
|
||||
val ccr = new CCR(intermediate)
|
||||
try { evaluate(setting(chk, ccr.top) :: Nil); false }
|
||||
catch { case e: java.lang.Exception => true }
|
||||
}
|
||||
|
||||
def tests =
|
||||
for(i <- 0 to 5; k <- Seq(a, b)) yield {
|
||||
val expected = expectedValues(2*i + (if(k == a) 0 else 1))
|
||||
checkKey[Int]( ScopedKey( Scope(i), k ), expected, applied)
|
||||
}
|
||||
def tests =
|
||||
for (i <- 0 to 5; k <- Seq(a, b)) yield {
|
||||
val expected = expectedValues(2 * i + (if (k == a) 0 else 1))
|
||||
checkKey[Int](ScopedKey(Scope(i), k), expected, applied)
|
||||
}
|
||||
|
||||
lazy val expectedValues = None :: None :: None :: None :: None :: None :: Some(3) :: None :: Some(3) :: Some(9) :: Some(4) :: Some(9) :: Nil
|
||||
lazy val expectedValues = None :: None :: None :: None :: None :: None :: Some(3) :: None :: Some(3) :: Some(9) :: Some(4) :: Some(9) :: Nil
|
||||
|
||||
lazy val ch = AttributeKey[Int]("ch")
|
||||
lazy val chk = ScopedKey( Scope(0), ch)
|
||||
def chain(i: Int, prev: Initialize[Int]): Initialize[Int] =
|
||||
if(i <= 0) prev else chain(i - 1, prev(_ + 1))
|
||||
lazy val ch = AttributeKey[Int]("ch")
|
||||
lazy val chk = ScopedKey(Scope(0), ch)
|
||||
def chain(i: Int, prev: Initialize[Int]): Initialize[Int] =
|
||||
if (i <= 0) prev else chain(i - 1, prev(_ + 1))
|
||||
|
||||
def chainBind(prev: Initialize[Int]): Initialize[Int] =
|
||||
bind(prev) { v =>
|
||||
if(v <= 0) prev else chainBind(value(v - 1) )
|
||||
}
|
||||
def singleIntTest(i: Initialize[Int], expected: Int) =
|
||||
{
|
||||
val eval = evaluate( setting( chk, i ) :: Nil )
|
||||
checkKey( chk, Some(expected), eval )
|
||||
}
|
||||
def chainBind(prev: Initialize[Int]): Initialize[Int] =
|
||||
bind(prev) { v =>
|
||||
if (v <= 0) prev else chainBind(value(v - 1))
|
||||
}
|
||||
def singleIntTest(i: Initialize[Int], expected: Int) =
|
||||
{
|
||||
val eval = evaluate(setting(chk, i) :: Nil)
|
||||
checkKey(chk, Some(expected), eval)
|
||||
}
|
||||
|
||||
def checkKey[T](key: ScopedKey[T], expected: Option[T], settings: Settings[Scope]) =
|
||||
{
|
||||
val value = settings.get( key.scope, key.key)
|
||||
("Key: " + key) |:
|
||||
("Value: " + value) |:
|
||||
("Expected: " + expected) |:
|
||||
(value == expected)
|
||||
}
|
||||
def checkKey[T](key: ScopedKey[T], expected: Option[T], settings: Settings[Scope]) =
|
||||
{
|
||||
val value = settings.get(key.scope, key.key)
|
||||
("Key: " + key) |:
|
||||
("Value: " + value) |:
|
||||
("Expected: " + expected) |:
|
||||
(value == expected)
|
||||
}
|
||||
|
||||
def evaluate(settings: Seq[Setting[_]]): Settings[Scope] =
|
||||
try { make(settings)(delegates, scopeLocal, showFullKey) }
|
||||
catch { case e: Throwable => e.printStackTrace; throw e }
|
||||
def evaluate(settings: Seq[Setting[_]]): Settings[Scope] =
|
||||
try { make(settings)(delegates, scopeLocal, showFullKey) }
|
||||
catch { case e: Throwable => e.printStackTrace; throw e }
|
||||
}
|
||||
// This setup is a workaround for module synchronization issues
|
||||
final class CCR(intermediate: Int)
|
||||
{
|
||||
lazy val top = iterate(value(intermediate), intermediate)
|
||||
def iterate(init: Initialize[Int], i: Int): Initialize[Int] =
|
||||
bind(init) { t =>
|
||||
if(t <= 0)
|
||||
top
|
||||
else
|
||||
iterate(value(t - 1), t-1)
|
||||
}
|
||||
final class CCR(intermediate: Int) {
|
||||
lazy val top = iterate(value(intermediate), intermediate)
|
||||
def iterate(init: Initialize[Int], i: Int): Initialize[Int] =
|
||||
bind(init) { t =>
|
||||
if (t <= 0)
|
||||
top
|
||||
else
|
||||
iterate(value(t - 1), t - 1)
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,154 +1,148 @@
|
|||
package sbt.complete
|
||||
|
||||
object JLineTest
|
||||
{
|
||||
import DefaultParsers._
|
||||
object JLineTest {
|
||||
import DefaultParsers._
|
||||
|
||||
val one = "blue" | "green" | "black"
|
||||
val two = token("color" ~> Space) ~> token(one)
|
||||
val three = token("color" ~> Space) ~> token(ID.examples("blue", "green", "black"))
|
||||
val four = token("color" ~> Space) ~> token(ID, "<color name>")
|
||||
val one = "blue" | "green" | "black"
|
||||
val two = token("color" ~> Space) ~> token(one)
|
||||
val three = token("color" ~> Space) ~> token(ID.examples("blue", "green", "black"))
|
||||
val four = token("color" ~> Space) ~> token(ID, "<color name>")
|
||||
|
||||
val num = token(NatBasic)
|
||||
val five = (num ~ token("+" | "-") ~ num) <~ token('=') flatMap {
|
||||
case a ~ "+" ~ b => token((a+b).toString)
|
||||
case a ~ "-" ~ b => token((a-b).toString)
|
||||
}
|
||||
val num = token(NatBasic)
|
||||
val five = (num ~ token("+" | "-") ~ num) <~ token('=') flatMap {
|
||||
case a ~ "+" ~ b => token((a + b).toString)
|
||||
case a ~ "-" ~ b => token((a - b).toString)
|
||||
}
|
||||
|
||||
val parsers = Map("1" -> one, "2" -> two, "3" -> three, "4" -> four, "5" -> five)
|
||||
def main(args: Array[String])
|
||||
{
|
||||
import jline.TerminalFactory
|
||||
import jline.console.ConsoleReader
|
||||
val reader = new ConsoleReader()
|
||||
TerminalFactory.get.init
|
||||
val parsers = Map("1" -> one, "2" -> two, "3" -> three, "4" -> four, "5" -> five)
|
||||
def main(args: Array[String]) {
|
||||
import jline.TerminalFactory
|
||||
import jline.console.ConsoleReader
|
||||
val reader = new ConsoleReader()
|
||||
TerminalFactory.get.init
|
||||
|
||||
val parser = parsers(args(0))
|
||||
JLineCompletion.installCustomCompletor(reader, parser)
|
||||
def loop() {
|
||||
val line = reader.readLine("> ")
|
||||
if(line ne null) {
|
||||
println("Result: " + apply(parser)(line).resultEmpty)
|
||||
loop()
|
||||
}
|
||||
}
|
||||
loop()
|
||||
}
|
||||
val parser = parsers(args(0))
|
||||
JLineCompletion.installCustomCompletor(reader, parser)
|
||||
def loop() {
|
||||
val line = reader.readLine("> ")
|
||||
if (line ne null) {
|
||||
println("Result: " + apply(parser)(line).resultEmpty)
|
||||
loop()
|
||||
}
|
||||
}
|
||||
loop()
|
||||
}
|
||||
}
|
||||
|
||||
import Parser._
|
||||
import org.scalacheck._
|
||||
import Parser._
|
||||
import org.scalacheck._
|
||||
|
||||
object ParserTest extends Properties("Completing Parser")
|
||||
{
|
||||
import Parsers._
|
||||
import DefaultParsers.matches
|
||||
object ParserTest extends Properties("Completing Parser") {
|
||||
import Parsers._
|
||||
import DefaultParsers.matches
|
||||
|
||||
val nested = (token("a1") ~ token("b2")) ~ "c3"
|
||||
val nestedDisplay = (token("a1", "<a1>") ~ token("b2", "<b2>")) ~ "c3"
|
||||
val nested = (token("a1") ~ token("b2")) ~ "c3"
|
||||
val nestedDisplay = (token("a1", "<a1>") ~ token("b2", "<b2>")) ~ "c3"
|
||||
|
||||
val spacePort = (token(Space) ~> Port)
|
||||
val spacePort = (token(Space) ~> Port)
|
||||
|
||||
def p[T](f: T): T = { println(f); f }
|
||||
def p[T](f: T): T = { println(f); f }
|
||||
|
||||
def checkSingle(in: String, expect: Completion)(expectDisplay: Completion = expect) =
|
||||
( ("token '" + in + "'") |: checkOne(in, nested, expect)) &&
|
||||
( ("display '" + in + "'") |: checkOne(in, nestedDisplay, expectDisplay) )
|
||||
|
||||
def checkOne(in: String, parser: Parser[_], expect: Completion): Prop =
|
||||
completions(parser, in, 1) == Completions.single(expect)
|
||||
def checkSingle(in: String, expect: Completion)(expectDisplay: Completion = expect) =
|
||||
(("token '" + in + "'") |: checkOne(in, nested, expect)) &&
|
||||
(("display '" + in + "'") |: checkOne(in, nestedDisplay, expectDisplay))
|
||||
|
||||
def checkAll(in: String, parser: Parser[_], expect: Completions): Prop =
|
||||
{
|
||||
val cs = completions(parser, in, 1)
|
||||
("completions: " + cs) |: ("Expected: " + expect) |: ( (cs == expect): Prop)
|
||||
}
|
||||
|
||||
def checkInvalid(in: String) =
|
||||
( ("token '" + in + "'") |: checkInv(in, nested) ) &&
|
||||
( ("display '" + in + "'") |: checkInv(in, nestedDisplay) )
|
||||
def checkOne(in: String, parser: Parser[_], expect: Completion): Prop =
|
||||
completions(parser, in, 1) == Completions.single(expect)
|
||||
|
||||
def checkInv(in: String, parser: Parser[_]): Prop =
|
||||
{
|
||||
val cs = completions(parser, in, 1)
|
||||
("completions: " + cs) |: (( cs == Completions.nil): Prop)
|
||||
}
|
||||
|
||||
property("nested tokens a") = checkSingle("", Completion.tokenStrict("","a1") )( Completion.displayStrict("<a1>"))
|
||||
property("nested tokens a1") = checkSingle("a", Completion.tokenStrict("a","1") )( Completion.displayStrict("<a1>"))
|
||||
property("nested tokens a inv") = checkInvalid("b")
|
||||
property("nested tokens b") = checkSingle("a1", Completion.tokenStrict("","b2") )( Completion.displayStrict("<b2>"))
|
||||
property("nested tokens b2") = checkSingle("a1b", Completion.tokenStrict("b","2") )( Completion.displayStrict("<b2>"))
|
||||
property("nested tokens b inv") = checkInvalid("a1a")
|
||||
property("nested tokens c") = checkSingle("a1b2", Completion.suggestStrict("c3") )()
|
||||
property("nested tokens c3") = checkSingle("a1b2c", Completion.suggestStrict("3"))()
|
||||
property("nested tokens c inv") = checkInvalid("a1b2a")
|
||||
def checkAll(in: String, parser: Parser[_], expect: Completions): Prop =
|
||||
{
|
||||
val cs = completions(parser, in, 1)
|
||||
("completions: " + cs) |: ("Expected: " + expect) |: ((cs == expect): Prop)
|
||||
}
|
||||
|
||||
property("suggest space") = checkOne("", spacePort, Completion.tokenStrict("", " "))
|
||||
property("suggest port") = checkOne(" ", spacePort, Completion.displayStrict("<port>") )
|
||||
property("no suggest at end") = checkOne("asdf", "asdf", Completion.suggestStrict(""))
|
||||
property("no suggest at token end") = checkOne("asdf", token("asdf"), Completion.suggestStrict(""))
|
||||
property("empty suggest for examples") = checkOne("asdf", any.+.examples("asdf", "qwer"), Completion.suggestStrict(""))
|
||||
property("empty suggest for examples token") = checkOne("asdf", token(any.+.examples("asdf", "qwer")), Completion.suggestStrict(""))
|
||||
def checkInvalid(in: String) =
|
||||
(("token '" + in + "'") |: checkInv(in, nested)) &&
|
||||
(("display '" + in + "'") |: checkInv(in, nestedDisplay))
|
||||
|
||||
val colors = Set("blue", "green", "red")
|
||||
val base = (seen: Seq[String]) => token( ID examples (colors -- seen) )
|
||||
val sep = token( Space )
|
||||
val repeat = repeatDep( base, sep)
|
||||
def completionStrings(ss: Set[String]): Completions = Completions(ss.map { s => Completion.tokenStrict("", s) })
|
||||
def checkInv(in: String, parser: Parser[_]): Prop =
|
||||
{
|
||||
val cs = completions(parser, in, 1)
|
||||
("completions: " + cs) |: ((cs == Completions.nil): Prop)
|
||||
}
|
||||
|
||||
property("repeatDep no suggestions for bad input") = checkInv(".", repeat)
|
||||
property("repeatDep suggest all") = checkAll("", repeat, completionStrings(colors))
|
||||
property("repeatDep suggest remaining two") = {
|
||||
val first = colors.toSeq.head
|
||||
checkAll(first + " ", repeat, completionStrings(colors - first))
|
||||
}
|
||||
property("repeatDep suggest remaining one") = {
|
||||
val take = colors.toSeq.take(2)
|
||||
checkAll(take.mkString("", " ", " "), repeat, completionStrings(colors -- take))
|
||||
}
|
||||
property("repeatDep requires at least one token") = !matches(repeat, "")
|
||||
property("repeatDep accepts one token") = matches(repeat, colors.toSeq.head)
|
||||
property("repeatDep accepts two tokens") = matches(repeat, colors.toSeq.take(2).mkString(" "))
|
||||
property("nested tokens a") = checkSingle("", Completion.tokenStrict("", "a1"))(Completion.displayStrict("<a1>"))
|
||||
property("nested tokens a1") = checkSingle("a", Completion.tokenStrict("a", "1"))(Completion.displayStrict("<a1>"))
|
||||
property("nested tokens a inv") = checkInvalid("b")
|
||||
property("nested tokens b") = checkSingle("a1", Completion.tokenStrict("", "b2"))(Completion.displayStrict("<b2>"))
|
||||
property("nested tokens b2") = checkSingle("a1b", Completion.tokenStrict("b", "2"))(Completion.displayStrict("<b2>"))
|
||||
property("nested tokens b inv") = checkInvalid("a1a")
|
||||
property("nested tokens c") = checkSingle("a1b2", Completion.suggestStrict("c3"))()
|
||||
property("nested tokens c3") = checkSingle("a1b2c", Completion.suggestStrict("3"))()
|
||||
property("nested tokens c inv") = checkInvalid("a1b2a")
|
||||
|
||||
property("suggest space") = checkOne("", spacePort, Completion.tokenStrict("", " "))
|
||||
property("suggest port") = checkOne(" ", spacePort, Completion.displayStrict("<port>"))
|
||||
property("no suggest at end") = checkOne("asdf", "asdf", Completion.suggestStrict(""))
|
||||
property("no suggest at token end") = checkOne("asdf", token("asdf"), Completion.suggestStrict(""))
|
||||
property("empty suggest for examples") = checkOne("asdf", any.+.examples("asdf", "qwer"), Completion.suggestStrict(""))
|
||||
property("empty suggest for examples token") = checkOne("asdf", token(any.+.examples("asdf", "qwer")), Completion.suggestStrict(""))
|
||||
|
||||
val colors = Set("blue", "green", "red")
|
||||
val base = (seen: Seq[String]) => token(ID examples (colors -- seen))
|
||||
val sep = token(Space)
|
||||
val repeat = repeatDep(base, sep)
|
||||
def completionStrings(ss: Set[String]): Completions = Completions(ss.map { s => Completion.tokenStrict("", s) })
|
||||
|
||||
property("repeatDep no suggestions for bad input") = checkInv(".", repeat)
|
||||
property("repeatDep suggest all") = checkAll("", repeat, completionStrings(colors))
|
||||
property("repeatDep suggest remaining two") = {
|
||||
val first = colors.toSeq.head
|
||||
checkAll(first + " ", repeat, completionStrings(colors - first))
|
||||
}
|
||||
property("repeatDep suggest remaining one") = {
|
||||
val take = colors.toSeq.take(2)
|
||||
checkAll(take.mkString("", " ", " "), repeat, completionStrings(colors -- take))
|
||||
}
|
||||
property("repeatDep requires at least one token") = !matches(repeat, "")
|
||||
property("repeatDep accepts one token") = matches(repeat, colors.toSeq.head)
|
||||
property("repeatDep accepts two tokens") = matches(repeat, colors.toSeq.take(2).mkString(" "))
|
||||
}
|
||||
object ParserExample
|
||||
{
|
||||
val ws = charClass(_.isWhitespace)+
|
||||
val notws = charClass(!_.isWhitespace)+
|
||||
object ParserExample {
|
||||
val ws = charClass(_.isWhitespace)+
|
||||
val notws = charClass(!_.isWhitespace)+
|
||||
|
||||
val name = token("test")
|
||||
val options = (ws ~> token("quick" | "failed" | "new") )*
|
||||
val exampleSet = Set("am", "is", "are", "was", "were")
|
||||
val include = (ws ~> token(examples(notws.string, new FixedSetExamples(exampleSet), exampleSet.size, false )) )*
|
||||
val name = token("test")
|
||||
val options = (ws ~> token("quick" | "failed" | "new"))*
|
||||
val exampleSet = Set("am", "is", "are", "was", "were")
|
||||
val include = (ws ~> token(examples(notws.string, new FixedSetExamples(exampleSet), exampleSet.size, false)))*
|
||||
|
||||
val t = name ~ options ~ include
|
||||
val t = name ~ options ~ include
|
||||
|
||||
// Get completions for some different inputs
|
||||
println(completions(t, "te", 1))
|
||||
println(completions(t, "test ",1))
|
||||
println(completions(t, "test w", 1))
|
||||
// Get completions for some different inputs
|
||||
println(completions(t, "te", 1))
|
||||
println(completions(t, "test ", 1))
|
||||
println(completions(t, "test w", 1))
|
||||
|
||||
// Get the parsed result for different inputs
|
||||
println(apply(t)("te").resultEmpty)
|
||||
println(apply(t)("test").resultEmpty)
|
||||
println(apply(t)("test w").resultEmpty)
|
||||
println(apply(t)("test was were").resultEmpty)
|
||||
// Get the parsed result for different inputs
|
||||
println(apply(t)("te").resultEmpty)
|
||||
println(apply(t)("test").resultEmpty)
|
||||
println(apply(t)("test w").resultEmpty)
|
||||
println(apply(t)("test was were").resultEmpty)
|
||||
|
||||
def run(n: Int)
|
||||
{
|
||||
val a = 'a'.id
|
||||
val aq = a.?
|
||||
val aqn = repeat(aq, min = n, max = n)
|
||||
val an = repeat(a, min = n, max = n)
|
||||
val ann = aqn ~ an
|
||||
def run(n: Int) {
|
||||
val a = 'a'.id
|
||||
val aq = a.?
|
||||
val aqn = repeat(aq, min = n, max = n)
|
||||
val an = repeat(a, min = n, max = n)
|
||||
val ann = aqn ~ an
|
||||
|
||||
def r = apply(ann)("a"*(n*2)).resultEmpty
|
||||
println(r.isValid)
|
||||
}
|
||||
def run2(n: Int)
|
||||
{
|
||||
val ab = "ab".?.*
|
||||
val r = apply(ab)("a"*n).resultEmpty
|
||||
println(r)
|
||||
}
|
||||
def r = apply(ann)("a" * (n * 2)).resultEmpty
|
||||
println(r.isValid)
|
||||
}
|
||||
def run2(n: Int) {
|
||||
val ab = "ab".?.*
|
||||
val r = apply(ab)("a" * n).resultEmpty
|
||||
println(r)
|
||||
}
|
||||
}
|
||||
|
|
@ -6,87 +6,85 @@ import sbt.IO.withTemporaryDirectory
|
|||
import java.io.File
|
||||
import sbt.IO._
|
||||
|
||||
class FileExamplesTest extends Specification
|
||||
{
|
||||
class FileExamplesTest extends Specification {
|
||||
|
||||
"listing all files in an absolute base directory" should {
|
||||
"produce the entire base directory's contents" in new directoryStructure {
|
||||
fileExamples().toList should containTheSameElementsAs(allRelativizedPaths)
|
||||
}
|
||||
}
|
||||
"listing all files in an absolute base directory" should {
|
||||
"produce the entire base directory's contents" in new directoryStructure {
|
||||
fileExamples().toList should containTheSameElementsAs(allRelativizedPaths)
|
||||
}
|
||||
}
|
||||
|
||||
"listing files with a prefix that matches none" should {
|
||||
"produce an empty list" in new directoryStructure(withCompletionPrefix = "z") {
|
||||
fileExamples().toList should beEmpty
|
||||
}
|
||||
}
|
||||
"listing files with a prefix that matches none" should {
|
||||
"produce an empty list" in new directoryStructure(withCompletionPrefix = "z") {
|
||||
fileExamples().toList should beEmpty
|
||||
}
|
||||
}
|
||||
|
||||
"listing single-character prefixed files" should {
|
||||
"produce matching paths only" in new directoryStructure(withCompletionPrefix = "f") {
|
||||
fileExamples().toList should containTheSameElementsAs(prefixedPathsOnly)
|
||||
}
|
||||
}
|
||||
"listing single-character prefixed files" should {
|
||||
"produce matching paths only" in new directoryStructure(withCompletionPrefix = "f") {
|
||||
fileExamples().toList should containTheSameElementsAs(prefixedPathsOnly)
|
||||
}
|
||||
}
|
||||
|
||||
"listing directory-prefixed files" should {
|
||||
"produce matching paths only" in new directoryStructure(withCompletionPrefix = "far") {
|
||||
fileExamples().toList should containTheSameElementsAs(prefixedPathsOnly)
|
||||
}
|
||||
"listing directory-prefixed files" should {
|
||||
"produce matching paths only" in new directoryStructure(withCompletionPrefix = "far") {
|
||||
fileExamples().toList should containTheSameElementsAs(prefixedPathsOnly)
|
||||
}
|
||||
|
||||
"produce sub-dir contents only when appending a file separator to the directory" in new directoryStructure(withCompletionPrefix = "far" + File.separator) {
|
||||
fileExamples().toList should containTheSameElementsAs(prefixedPathsOnly)
|
||||
}
|
||||
}
|
||||
"produce sub-dir contents only when appending a file separator to the directory" in new directoryStructure(withCompletionPrefix = "far" + File.separator) {
|
||||
fileExamples().toList should containTheSameElementsAs(prefixedPathsOnly)
|
||||
}
|
||||
}
|
||||
|
||||
"listing files with a sub-path prefix" should {
|
||||
"produce matching paths only" in new directoryStructure(withCompletionPrefix = "far" + File.separator + "ba") {
|
||||
fileExamples().toList should containTheSameElementsAs(prefixedPathsOnly)
|
||||
}
|
||||
}
|
||||
"listing files with a sub-path prefix" should {
|
||||
"produce matching paths only" in new directoryStructure(withCompletionPrefix = "far" + File.separator + "ba") {
|
||||
fileExamples().toList should containTheSameElementsAs(prefixedPathsOnly)
|
||||
}
|
||||
}
|
||||
|
||||
"completing a full path" should {
|
||||
"produce a list with an empty string" in new directoryStructure(withCompletionPrefix = "bazaar") {
|
||||
fileExamples().toList shouldEqual List("")
|
||||
}
|
||||
}
|
||||
"completing a full path" should {
|
||||
"produce a list with an empty string" in new directoryStructure(withCompletionPrefix = "bazaar") {
|
||||
fileExamples().toList shouldEqual List("")
|
||||
}
|
||||
}
|
||||
|
||||
class directoryStructure(withCompletionPrefix: String = "") extends Scope with DelayedInit
|
||||
{
|
||||
var fileExamples: FileExamples = _
|
||||
var baseDir: File = _
|
||||
var childFiles: List[File] = _
|
||||
var childDirectories: List[File] = _
|
||||
var nestedFiles: List[File] = _
|
||||
var nestedDirectories: List[File] = _
|
||||
class directoryStructure(withCompletionPrefix: String = "") extends Scope with DelayedInit {
|
||||
var fileExamples: FileExamples = _
|
||||
var baseDir: File = _
|
||||
var childFiles: List[File] = _
|
||||
var childDirectories: List[File] = _
|
||||
var nestedFiles: List[File] = _
|
||||
var nestedDirectories: List[File] = _
|
||||
|
||||
def allRelativizedPaths: List[String] =
|
||||
(childFiles ++ childDirectories ++ nestedFiles ++ nestedDirectories).map(relativize(baseDir, _).get)
|
||||
def allRelativizedPaths: List[String] =
|
||||
(childFiles ++ childDirectories ++ nestedFiles ++ nestedDirectories).map(relativize(baseDir, _).get)
|
||||
|
||||
def prefixedPathsOnly: List[String] =
|
||||
allRelativizedPaths.filter(_ startsWith withCompletionPrefix).map(_ substring withCompletionPrefix.length)
|
||||
def prefixedPathsOnly: List[String] =
|
||||
allRelativizedPaths.filter(_ startsWith withCompletionPrefix).map(_ substring withCompletionPrefix.length)
|
||||
|
||||
override def delayedInit(testBody: => Unit): Unit = {
|
||||
withTemporaryDirectory {
|
||||
tempDir =>
|
||||
createSampleDirStructure(tempDir)
|
||||
fileExamples = new FileExamples(baseDir, withCompletionPrefix)
|
||||
testBody
|
||||
}
|
||||
}
|
||||
override def delayedInit(testBody: => Unit): Unit = {
|
||||
withTemporaryDirectory {
|
||||
tempDir =>
|
||||
createSampleDirStructure(tempDir)
|
||||
fileExamples = new FileExamples(baseDir, withCompletionPrefix)
|
||||
testBody
|
||||
}
|
||||
}
|
||||
|
||||
private def createSampleDirStructure(tempDir: File): Unit = {
|
||||
childFiles = toChildFiles(tempDir, List("foo", "bar", "bazaar"))
|
||||
childDirectories = toChildFiles(tempDir, List("moo", "far"))
|
||||
nestedFiles = toChildFiles(childDirectories(1), List("farfile1", "barfile2"))
|
||||
nestedDirectories = toChildFiles(childDirectories(1), List("fardir1", "bardir2"))
|
||||
private def createSampleDirStructure(tempDir: File): Unit = {
|
||||
childFiles = toChildFiles(tempDir, List("foo", "bar", "bazaar"))
|
||||
childDirectories = toChildFiles(tempDir, List("moo", "far"))
|
||||
nestedFiles = toChildFiles(childDirectories(1), List("farfile1", "barfile2"))
|
||||
nestedDirectories = toChildFiles(childDirectories(1), List("fardir1", "bardir2"))
|
||||
|
||||
(childDirectories ++ nestedDirectories).map(_.mkdirs())
|
||||
(childFiles ++ nestedFiles).map(_.createNewFile())
|
||||
(childDirectories ++ nestedDirectories).map(_.mkdirs())
|
||||
(childFiles ++ nestedFiles).map(_.createNewFile())
|
||||
|
||||
// NOTE: Creating a new file here because `tempDir.listFiles()` returned an empty list.
|
||||
baseDir = new File(tempDir.getCanonicalPath)
|
||||
}
|
||||
// NOTE: Creating a new file here because `tempDir.listFiles()` returned an empty list.
|
||||
baseDir = new File(tempDir.getCanonicalPath)
|
||||
}
|
||||
|
||||
private def toChildFiles(baseDir: File, files: List[String]): List[File] = files.map(new File(baseDir, _))
|
||||
}
|
||||
private def toChildFiles(baseDir: File, files: List[String]): List[File] = files.map(new File(baseDir, _))
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -5,22 +5,22 @@ import org.specs2.specification.Scope
|
|||
|
||||
class FixedSetExamplesTest extends Specification {
|
||||
|
||||
"adding a prefix" should {
|
||||
"produce a smaller set of examples with the prefix removed" in new examples {
|
||||
fixedSetExamples.withAddedPrefix("f")() must containTheSameElementsAs(List("oo", "ool", "u"))
|
||||
fixedSetExamples.withAddedPrefix("fo")() must containTheSameElementsAs(List("o", "ol"))
|
||||
fixedSetExamples.withAddedPrefix("b")() must containTheSameElementsAs(List("ar"))
|
||||
}
|
||||
}
|
||||
"adding a prefix" should {
|
||||
"produce a smaller set of examples with the prefix removed" in new examples {
|
||||
fixedSetExamples.withAddedPrefix("f")() must containTheSameElementsAs(List("oo", "ool", "u"))
|
||||
fixedSetExamples.withAddedPrefix("fo")() must containTheSameElementsAs(List("o", "ol"))
|
||||
fixedSetExamples.withAddedPrefix("b")() must containTheSameElementsAs(List("ar"))
|
||||
}
|
||||
}
|
||||
|
||||
"without a prefix" should {
|
||||
"produce the original set" in new examples {
|
||||
fixedSetExamples() mustEqual exampleSet
|
||||
}
|
||||
}
|
||||
"without a prefix" should {
|
||||
"produce the original set" in new examples {
|
||||
fixedSetExamples() mustEqual exampleSet
|
||||
}
|
||||
}
|
||||
|
||||
trait examples extends Scope {
|
||||
val exampleSet = List("foo", "bar", "fool", "fu")
|
||||
val fixedSetExamples = FixedSetExamples(exampleSet)
|
||||
}
|
||||
trait examples extends Scope {
|
||||
val exampleSet = List("foo", "bar", "fool", "fu")
|
||||
val fixedSetExamples = FixedSetExamples(exampleSet)
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -6,88 +6,88 @@ import Completion._
|
|||
|
||||
class ParserWithExamplesTest extends Specification {
|
||||
|
||||
"listing a limited number of completions" should {
|
||||
"grab only the needed number of elements from the iterable source of examples" in new parserWithLazyExamples {
|
||||
parserWithExamples.completions(0)
|
||||
examples.size shouldEqual maxNumberOfExamples
|
||||
}
|
||||
}
|
||||
"listing a limited number of completions" should {
|
||||
"grab only the needed number of elements from the iterable source of examples" in new parserWithLazyExamples {
|
||||
parserWithExamples.completions(0)
|
||||
examples.size shouldEqual maxNumberOfExamples
|
||||
}
|
||||
}
|
||||
|
||||
"listing only valid completions" should {
|
||||
"use the delegate parser to remove invalid examples" in new parserWithValidExamples {
|
||||
val validCompletions = Completions(Set(
|
||||
suggestion("blue"),
|
||||
suggestion("red")
|
||||
))
|
||||
parserWithExamples.completions(0) shouldEqual validCompletions
|
||||
}
|
||||
}
|
||||
"listing only valid completions" should {
|
||||
"use the delegate parser to remove invalid examples" in new parserWithValidExamples {
|
||||
val validCompletions = Completions(Set(
|
||||
suggestion("blue"),
|
||||
suggestion("red")
|
||||
))
|
||||
parserWithExamples.completions(0) shouldEqual validCompletions
|
||||
}
|
||||
}
|
||||
|
||||
"listing valid completions in a derived parser" should {
|
||||
"produce only valid examples that start with the character of the derivation" in new parserWithValidExamples {
|
||||
val derivedCompletions = Completions(Set(
|
||||
suggestion("lue")
|
||||
))
|
||||
parserWithExamples.derive('b').completions(0) shouldEqual derivedCompletions
|
||||
}
|
||||
}
|
||||
"listing valid completions in a derived parser" should {
|
||||
"produce only valid examples that start with the character of the derivation" in new parserWithValidExamples {
|
||||
val derivedCompletions = Completions(Set(
|
||||
suggestion("lue")
|
||||
))
|
||||
parserWithExamples.derive('b').completions(0) shouldEqual derivedCompletions
|
||||
}
|
||||
}
|
||||
|
||||
"listing valid and invalid completions" should {
|
||||
"produce the entire source of examples" in new parserWithAllExamples {
|
||||
val completions = Completions(examples.map(suggestion(_)).toSet)
|
||||
parserWithExamples.completions(0) shouldEqual completions
|
||||
}
|
||||
}
|
||||
"listing valid and invalid completions" should {
|
||||
"produce the entire source of examples" in new parserWithAllExamples {
|
||||
val completions = Completions(examples.map(suggestion(_)).toSet)
|
||||
parserWithExamples.completions(0) shouldEqual completions
|
||||
}
|
||||
}
|
||||
|
||||
"listing valid and invalid completions in a derived parser" should {
|
||||
"produce only examples that start with the character of the derivation" in new parserWithAllExamples {
|
||||
val derivedCompletions = Completions(Set(
|
||||
suggestion("lue"),
|
||||
suggestion("lock")
|
||||
))
|
||||
parserWithExamples.derive('b').completions(0) shouldEqual derivedCompletions
|
||||
}
|
||||
}
|
||||
"listing valid and invalid completions in a derived parser" should {
|
||||
"produce only examples that start with the character of the derivation" in new parserWithAllExamples {
|
||||
val derivedCompletions = Completions(Set(
|
||||
suggestion("lue"),
|
||||
suggestion("lock")
|
||||
))
|
||||
parserWithExamples.derive('b').completions(0) shouldEqual derivedCompletions
|
||||
}
|
||||
}
|
||||
|
||||
class parserWithLazyExamples extends parser(GrowableSourceOfExamples(), maxNumberOfExamples = 5, removeInvalidExamples = false)
|
||||
class parserWithLazyExamples extends parser(GrowableSourceOfExamples(), maxNumberOfExamples = 5, removeInvalidExamples = false)
|
||||
|
||||
class parserWithValidExamples extends parser(removeInvalidExamples = true)
|
||||
class parserWithValidExamples extends parser(removeInvalidExamples = true)
|
||||
|
||||
class parserWithAllExamples extends parser(removeInvalidExamples = false)
|
||||
class parserWithAllExamples extends parser(removeInvalidExamples = false)
|
||||
|
||||
case class parser(examples: Iterable[String] = Set("blue", "yellow", "greeen", "block", "red"),
|
||||
maxNumberOfExamples: Int = 25,
|
||||
removeInvalidExamples: Boolean) extends Scope {
|
||||
case class parser(examples: Iterable[String] = Set("blue", "yellow", "greeen", "block", "red"),
|
||||
maxNumberOfExamples: Int = 25,
|
||||
removeInvalidExamples: Boolean) extends Scope {
|
||||
|
||||
import DefaultParsers._
|
||||
import DefaultParsers._
|
||||
|
||||
val colorParser = "blue" | "green" | "black" | "red"
|
||||
val parserWithExamples: Parser[String] = new ParserWithExamples[String](
|
||||
colorParser,
|
||||
FixedSetExamples(examples),
|
||||
maxNumberOfExamples,
|
||||
removeInvalidExamples
|
||||
)
|
||||
}
|
||||
val colorParser = "blue" | "green" | "black" | "red"
|
||||
val parserWithExamples: Parser[String] = new ParserWithExamples[String](
|
||||
colorParser,
|
||||
FixedSetExamples(examples),
|
||||
maxNumberOfExamples,
|
||||
removeInvalidExamples
|
||||
)
|
||||
}
|
||||
|
||||
case class GrowableSourceOfExamples() extends Iterable[String] {
|
||||
private var numberOfIteratedElements: Int = 0
|
||||
case class GrowableSourceOfExamples() extends Iterable[String] {
|
||||
private var numberOfIteratedElements: Int = 0
|
||||
|
||||
override def iterator: Iterator[String] = {
|
||||
new Iterator[String] {
|
||||
var currentElement = 0
|
||||
override def iterator: Iterator[String] = {
|
||||
new Iterator[String] {
|
||||
var currentElement = 0
|
||||
|
||||
override def next(): String = {
|
||||
currentElement += 1
|
||||
numberOfIteratedElements = Math.max(currentElement, numberOfIteratedElements)
|
||||
numberOfIteratedElements.toString
|
||||
}
|
||||
override def next(): String = {
|
||||
currentElement += 1
|
||||
numberOfIteratedElements = Math.max(currentElement, numberOfIteratedElements)
|
||||
numberOfIteratedElements.toString
|
||||
}
|
||||
|
||||
override def hasNext: Boolean = true
|
||||
}
|
||||
}
|
||||
override def hasNext: Boolean = true
|
||||
}
|
||||
}
|
||||
|
||||
override def size: Int = numberOfIteratedElements
|
||||
}
|
||||
override def size: Int = numberOfIteratedElements
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -6,65 +6,64 @@ import org.scalacheck._
|
|||
import Prop._
|
||||
import Arbitrary.arbLong
|
||||
|
||||
object CopySpec extends Properties("Copy")
|
||||
{
|
||||
// set to 0.25 GB by default for success on most systems without running out of space.
|
||||
// when modifying IO.copyFile, verify against 1 GB or higher, preferably > 4 GB
|
||||
final val MaxFileSizeBits = 28
|
||||
final val BufferSize = 1*1024*1024
|
||||
object CopySpec extends Properties("Copy") {
|
||||
// set to 0.25 GB by default for success on most systems without running out of space.
|
||||
// when modifying IO.copyFile, verify against 1 GB or higher, preferably > 4 GB
|
||||
final val MaxFileSizeBits = 28
|
||||
final val BufferSize = 1 * 1024 * 1024
|
||||
|
||||
val randomSize = Gen.choose(0, MaxFileSizeBits).map( 1L << _ )
|
||||
val pow2Size = (0 to (MaxFileSizeBits - 1)).toList.map( 1L << _ )
|
||||
val derivedSize = pow2Size.map(_ - 1) ::: pow2Size.map(_ + 1) ::: pow2Size
|
||||
val randomSize = Gen.choose(0, MaxFileSizeBits).map(1L << _)
|
||||
val pow2Size = (0 to (MaxFileSizeBits - 1)).toList.map(1L << _)
|
||||
val derivedSize = pow2Size.map(_ - 1) ::: pow2Size.map(_ + 1) ::: pow2Size
|
||||
|
||||
val fileSizeGen: Gen[Long] =
|
||||
Gen.frequency(
|
||||
80 -> Gen.oneOf(derivedSize),
|
||||
8 -> randomSize,
|
||||
1 -> Gen.value(0)
|
||||
)
|
||||
val fileSizeGen: Gen[Long] =
|
||||
Gen.frequency(
|
||||
80 -> Gen.oneOf(derivedSize),
|
||||
8 -> randomSize,
|
||||
1 -> Gen.value(0)
|
||||
)
|
||||
|
||||
property("same contents") = forAll(fileSizeGen, arbLong.arbitrary) { (size: Long, seed: Long) =>
|
||||
IO.withTemporaryDirectory { dir =>
|
||||
val f1 = new File(dir, "source")
|
||||
val f2 = new File(dir, "dest")
|
||||
generate(seed = seed, size = size, file = f1)
|
||||
IO.copyFile(f1, f2)
|
||||
checkContentsSame(f1, f2)
|
||||
true
|
||||
}
|
||||
}
|
||||
property("same contents") = forAll(fileSizeGen, arbLong.arbitrary) { (size: Long, seed: Long) =>
|
||||
IO.withTemporaryDirectory { dir =>
|
||||
val f1 = new File(dir, "source")
|
||||
val f2 = new File(dir, "dest")
|
||||
generate(seed = seed, size = size, file = f1)
|
||||
IO.copyFile(f1, f2)
|
||||
checkContentsSame(f1, f2)
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
def generate(seed: Long, size: Long, file: File) {
|
||||
val rnd = new java.util.Random(seed)
|
||||
|
||||
val buffer = new Array[Byte](BufferSize)
|
||||
def loop(offset: Long) {
|
||||
val len = math.min(size - offset, BufferSize)
|
||||
if(len > 0) {
|
||||
rnd.nextBytes(buffer)
|
||||
IO.append(file, buffer)
|
||||
loop(offset + len)
|
||||
}
|
||||
}
|
||||
if(size == 0L) IO.touch(file) else loop(0)
|
||||
}
|
||||
def checkContentsSame(f1: File, f2: File) {
|
||||
val len = f1.length
|
||||
assert(len == f2.length, "File lengths differ: " + (len, f2.length).toString + " for " + (f1, f2).toString)
|
||||
Using.fileInputStream(f1) { in1 =>
|
||||
Using.fileInputStream(f2) { in2 =>
|
||||
val buffer1 = new Array[Byte](BufferSize)
|
||||
val buffer2 = new Array[Byte](BufferSize)
|
||||
def loop(offset: Long): Unit = if(offset < len) {
|
||||
val read1 = in1.read(buffer1)
|
||||
val read2 = in2.read(buffer2)
|
||||
assert(read1 == read2, "Read " + (read1, read2).toString + " bytes from " + (f1, f2).toString)
|
||||
assert(Arrays.equals(buffer1, buffer2), "Contents differed.")
|
||||
loop(offset + read1)
|
||||
}
|
||||
loop(0)
|
||||
}
|
||||
}
|
||||
}
|
||||
def generate(seed: Long, size: Long, file: File) {
|
||||
val rnd = new java.util.Random(seed)
|
||||
|
||||
val buffer = new Array[Byte](BufferSize)
|
||||
def loop(offset: Long) {
|
||||
val len = math.min(size - offset, BufferSize)
|
||||
if (len > 0) {
|
||||
rnd.nextBytes(buffer)
|
||||
IO.append(file, buffer)
|
||||
loop(offset + len)
|
||||
}
|
||||
}
|
||||
if (size == 0L) IO.touch(file) else loop(0)
|
||||
}
|
||||
def checkContentsSame(f1: File, f2: File) {
|
||||
val len = f1.length
|
||||
assert(len == f2.length, "File lengths differ: " + (len, f2.length).toString + " for " + (f1, f2).toString)
|
||||
Using.fileInputStream(f1) { in1 =>
|
||||
Using.fileInputStream(f2) { in2 =>
|
||||
val buffer1 = new Array[Byte](BufferSize)
|
||||
val buffer2 = new Array[Byte](BufferSize)
|
||||
def loop(offset: Long): Unit = if (offset < len) {
|
||||
val read1 = in1.read(buffer1)
|
||||
val read2 = in2.read(buffer2)
|
||||
assert(read1 == read2, "Read " + (read1, read2).toString + " bytes from " + (f1, f2).toString)
|
||||
assert(Arrays.equals(buffer1, buffer2), "Contents differed.")
|
||||
loop(offset + read1)
|
||||
}
|
||||
loop(0)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -5,74 +5,73 @@ package sbt
|
|||
|
||||
import org.scalacheck._
|
||||
import Prop._
|
||||
import Arbitrary.{arbString => _, arbChar => _, _}
|
||||
import java.io.{File, IOException}
|
||||
import Arbitrary.{ arbString => _, arbChar => _, _ }
|
||||
import java.io.{ File, IOException }
|
||||
|
||||
object WriteContentSpecification extends Properties("Write content")
|
||||
{
|
||||
property("Roundtrip string") = forAll( writeAndCheckString _)
|
||||
property("Roundtrip bytes") = forAll(writeAndCheckBytes _)
|
||||
property("Write string overwrites") = forAll(overwriteAndCheckStrings _)
|
||||
property("Write bytes overwrites") = forAll( overwriteAndCheckBytes _)
|
||||
property("Append string appends") = forAll( appendAndCheckStrings _)
|
||||
property("Append bytes appends") = forAll( appendAndCheckBytes _)
|
||||
property("Unzip doesn't stack overflow") = largeUnzip()
|
||||
object WriteContentSpecification extends Properties("Write content") {
|
||||
property("Roundtrip string") = forAll(writeAndCheckString _)
|
||||
property("Roundtrip bytes") = forAll(writeAndCheckBytes _)
|
||||
property("Write string overwrites") = forAll(overwriteAndCheckStrings _)
|
||||
property("Write bytes overwrites") = forAll(overwriteAndCheckBytes _)
|
||||
property("Append string appends") = forAll(appendAndCheckStrings _)
|
||||
property("Append bytes appends") = forAll(appendAndCheckBytes _)
|
||||
property("Unzip doesn't stack overflow") = largeUnzip()
|
||||
|
||||
implicit lazy val validChar: Arbitrary[Char] = Arbitrary( for(i <- Gen.choose(0, 0xd7ff)) yield i.toChar )
|
||||
implicit lazy val validString: Arbitrary[String] = Arbitrary(arbitrary[List[Char]] map (_.mkString))
|
||||
implicit lazy val validChar: Arbitrary[Char] = Arbitrary(for (i <- Gen.choose(0, 0xd7ff)) yield i.toChar)
|
||||
implicit lazy val validString: Arbitrary[String] = Arbitrary(arbitrary[List[Char]] map (_.mkString))
|
||||
|
||||
private def largeUnzip() =
|
||||
{
|
||||
testUnzip[Product]
|
||||
testUnzip[scala.tools.nsc.Global]
|
||||
true
|
||||
}
|
||||
private def testUnzip[T](implicit mf: scala.reflect.Manifest[T]) =
|
||||
unzipFile(IO.classLocationFile(mf.runtimeClass))
|
||||
private def unzipFile(jar: File) =
|
||||
IO.withTemporaryDirectory { tmp =>
|
||||
IO.unzip(jar, tmp)
|
||||
}
|
||||
private def largeUnzip() =
|
||||
{
|
||||
testUnzip[Product]
|
||||
testUnzip[scala.tools.nsc.Global]
|
||||
true
|
||||
}
|
||||
private def testUnzip[T](implicit mf: scala.reflect.Manifest[T]) =
|
||||
unzipFile(IO.classLocationFile(mf.runtimeClass))
|
||||
private def unzipFile(jar: File) =
|
||||
IO.withTemporaryDirectory { tmp =>
|
||||
IO.unzip(jar, tmp)
|
||||
}
|
||||
|
||||
// make the test independent of underlying platform and allow any unicode character in Strings to be encoded
|
||||
val charset = IO.utf8
|
||||
|
||||
import IO._
|
||||
private def writeAndCheckString(s: String) =
|
||||
withTemporaryFile { file =>
|
||||
write(file, s, charset)
|
||||
read(file, charset) == s
|
||||
}
|
||||
private def writeAndCheckBytes(b: Array[Byte]) =
|
||||
withTemporaryFile { file =>
|
||||
write(file, b)
|
||||
readBytes(file) sameElements b
|
||||
}
|
||||
private def overwriteAndCheckStrings(a: String, b: String) =
|
||||
withTemporaryFile { file =>
|
||||
write(file, a, charset)
|
||||
write(file, b, charset)
|
||||
read(file, charset) == b
|
||||
}
|
||||
private def overwriteAndCheckBytes(a: Array[Byte], b: Array[Byte]) =
|
||||
withTemporaryFile { file =>
|
||||
write(file, a)
|
||||
write(file, b)
|
||||
readBytes(file) sameElements b
|
||||
}
|
||||
private def appendAndCheckStrings(a: String, b: String) =
|
||||
withTemporaryFile { file =>
|
||||
append(file, a, charset)
|
||||
append(file, b, charset)
|
||||
read(file, charset) == (a+b)
|
||||
}
|
||||
private def appendAndCheckBytes(a: Array[Byte], b: Array[Byte]) =
|
||||
withTemporaryFile { file =>
|
||||
append(file, a)
|
||||
append(file, b)
|
||||
readBytes(file) sameElements (a++b)
|
||||
}
|
||||
|
||||
private def withTemporaryFile[T](f: File => T): T =
|
||||
withTemporaryDirectory { dir => f(new java.io.File(dir, "out")) }
|
||||
// make the test independent of underlying platform and allow any unicode character in Strings to be encoded
|
||||
val charset = IO.utf8
|
||||
|
||||
import IO._
|
||||
private def writeAndCheckString(s: String) =
|
||||
withTemporaryFile { file =>
|
||||
write(file, s, charset)
|
||||
read(file, charset) == s
|
||||
}
|
||||
private def writeAndCheckBytes(b: Array[Byte]) =
|
||||
withTemporaryFile { file =>
|
||||
write(file, b)
|
||||
readBytes(file) sameElements b
|
||||
}
|
||||
private def overwriteAndCheckStrings(a: String, b: String) =
|
||||
withTemporaryFile { file =>
|
||||
write(file, a, charset)
|
||||
write(file, b, charset)
|
||||
read(file, charset) == b
|
||||
}
|
||||
private def overwriteAndCheckBytes(a: Array[Byte], b: Array[Byte]) =
|
||||
withTemporaryFile { file =>
|
||||
write(file, a)
|
||||
write(file, b)
|
||||
readBytes(file) sameElements b
|
||||
}
|
||||
private def appendAndCheckStrings(a: String, b: String) =
|
||||
withTemporaryFile { file =>
|
||||
append(file, a, charset)
|
||||
append(file, b, charset)
|
||||
read(file, charset) == (a + b)
|
||||
}
|
||||
private def appendAndCheckBytes(a: Array[Byte], b: Array[Byte]) =
|
||||
withTemporaryFile { file =>
|
||||
append(file, a)
|
||||
append(file, b)
|
||||
readBytes(file) sameElements (a ++ b)
|
||||
}
|
||||
|
||||
private def withTemporaryFile[T](f: File => T): T =
|
||||
withTemporaryDirectory { dir => f(new java.io.File(dir, "out")) }
|
||||
}
|
||||
|
|
|
|||
|
|
@ -6,39 +6,44 @@ package sbt
|
|||
import org.scalacheck._
|
||||
import Prop._
|
||||
|
||||
object NameFilterSpecification extends Properties("NameFilter")
|
||||
{
|
||||
property("All pass accepts everything") = forAll{ (s: String) => AllPassFilter.accept(s) }
|
||||
property("Exact filter matches provided string") = forAll {
|
||||
(s1: String, s2: String) => (new ExactFilter(s1)).accept(s2) == (s1 == s2) }
|
||||
property("Exact filter matches valid string") = forAll{ (s: String) => (new ExactFilter(s)).accept(s) }
|
||||
|
||||
property("Glob filter matches provided string if no *s") = forAll {
|
||||
(s1: String, s2: String) =>
|
||||
{
|
||||
val stripped = stripAsterisksAndControl(s1)
|
||||
(GlobFilter(stripped).accept(s2) == (stripped == s2))
|
||||
} }
|
||||
property("Glob filter matches valid string if no *s") = forAll {
|
||||
(s: String) =>
|
||||
{
|
||||
val stripped = stripAsterisksAndControl(s)
|
||||
GlobFilter(stripped).accept(stripped)
|
||||
}}
|
||||
|
||||
property("Glob filter matches valid") = forAll {
|
||||
(list: List[String]) =>
|
||||
{
|
||||
val stripped = list.map(stripAsterisksAndControl)
|
||||
GlobFilter(stripped.mkString("*")).accept(stripped.mkString)
|
||||
}}
|
||||
|
||||
/** Raw control characters are stripped because they are not allowed in expressions.
|
||||
* Asterisks are stripped because they are added under the control of the tests.*/
|
||||
private def stripAsterisksAndControl(s: String) = (s filter validChar).toString
|
||||
private[this] def validChar(c: Char) =
|
||||
!java.lang.Character.isISOControl(c) &&
|
||||
c != '*' &&
|
||||
!Character.isHighSurrogate(c) &&
|
||||
!Character.isLowSurrogate(c)
|
||||
object NameFilterSpecification extends Properties("NameFilter") {
|
||||
property("All pass accepts everything") = forAll { (s: String) => AllPassFilter.accept(s) }
|
||||
property("Exact filter matches provided string") = forAll {
|
||||
(s1: String, s2: String) => (new ExactFilter(s1)).accept(s2) == (s1 == s2)
|
||||
}
|
||||
property("Exact filter matches valid string") = forAll { (s: String) => (new ExactFilter(s)).accept(s) }
|
||||
|
||||
property("Glob filter matches provided string if no *s") = forAll {
|
||||
(s1: String, s2: String) =>
|
||||
{
|
||||
val stripped = stripAsterisksAndControl(s1)
|
||||
(GlobFilter(stripped).accept(s2) == (stripped == s2))
|
||||
}
|
||||
}
|
||||
property("Glob filter matches valid string if no *s") = forAll {
|
||||
(s: String) =>
|
||||
{
|
||||
val stripped = stripAsterisksAndControl(s)
|
||||
GlobFilter(stripped).accept(stripped)
|
||||
}
|
||||
}
|
||||
|
||||
property("Glob filter matches valid") = forAll {
|
||||
(list: List[String]) =>
|
||||
{
|
||||
val stripped = list.map(stripAsterisksAndControl)
|
||||
GlobFilter(stripped.mkString("*")).accept(stripped.mkString)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Raw control characters are stripped because they are not allowed in expressions.
|
||||
* Asterisks are stripped because they are added under the control of the tests.
|
||||
*/
|
||||
private def stripAsterisksAndControl(s: String) = (s filter validChar).toString
|
||||
private[this] def validChar(c: Char) =
|
||||
!java.lang.Character.isISOControl(c) &&
|
||||
c != '*' &&
|
||||
!Character.isHighSurrogate(c) &&
|
||||
!Character.isLowSurrogate(c)
|
||||
}
|
||||
|
|
@ -10,46 +10,47 @@ import java.net.URI
|
|||
import RichURI._
|
||||
|
||||
object RichURISpecification extends Properties("Rich URI") {
|
||||
val strGen = {
|
||||
val charGen = frequency((1, value(' ')), (9, alphaChar))
|
||||
val withEmptyGen = for(cs <- listOf(charGen)) yield cs.mkString
|
||||
withEmptyGen map (_.trim.replace(" ", "%20")) filter (!_.isEmpty)
|
||||
}
|
||||
val strGen = {
|
||||
val charGen = frequency((1, value(' ')), (9, alphaChar))
|
||||
val withEmptyGen = for (cs <- listOf(charGen)) yield cs.mkString
|
||||
withEmptyGen map (_.trim.replace(" ", "%20")) filter (!_.isEmpty)
|
||||
}
|
||||
|
||||
val pathGen =
|
||||
for(s <- listOf1(strGen)) yield s.mkString("/", "/", "")
|
||||
val pathGen =
|
||||
for (s <- listOf1(strGen)) yield s.mkString("/", "/", "")
|
||||
|
||||
def nullable[T >: Null](g: Gen[T]): Gen[T] = frequency((1, value(null)), (25, g))
|
||||
def nullable[T >: Null](g: Gen[T]): Gen[T] = frequency((1, value(null)), (25, g))
|
||||
|
||||
implicit val arbitraryURI: Arbitrary[URI] =
|
||||
Arbitrary(
|
||||
for (scheme <- identifier;
|
||||
path <- pathGen;
|
||||
fragment <- nullable(strGen))
|
||||
yield new URI(scheme, "file:" + path, fragment)
|
||||
)
|
||||
implicit val arbitraryURI: Arbitrary[URI] =
|
||||
Arbitrary(
|
||||
for (
|
||||
scheme <- identifier;
|
||||
path <- pathGen;
|
||||
fragment <- nullable(strGen)
|
||||
) yield new URI(scheme, "file:" + path, fragment)
|
||||
)
|
||||
|
||||
property("withoutFragment should drop fragment") = forAll { (uri: URI) =>
|
||||
uri.withoutFragment.getFragment eq null
|
||||
}
|
||||
property("withoutFragment should drop fragment") = forAll { (uri: URI) =>
|
||||
uri.withoutFragment.getFragment eq null
|
||||
}
|
||||
|
||||
property("withoutFragment should keep scheme") = forAll { (uri: URI) =>
|
||||
uri.withoutFragment.getScheme == uri.getScheme
|
||||
}
|
||||
property("withoutFragment should keep scheme") = forAll { (uri: URI) =>
|
||||
uri.withoutFragment.getScheme == uri.getScheme
|
||||
}
|
||||
|
||||
property("withoutFragment should keep scheme specific part") = forAll { (uri: URI) =>
|
||||
uri.withoutFragment.getSchemeSpecificPart == uri.getSchemeSpecificPart
|
||||
}
|
||||
property("withoutFragment should keep scheme specific part") = forAll { (uri: URI) =>
|
||||
uri.withoutFragment.getSchemeSpecificPart == uri.getSchemeSpecificPart
|
||||
}
|
||||
|
||||
property("withoutMarkerScheme should drop marker scheme") = forAll { (uri: URI) =>
|
||||
uri.withoutMarkerScheme.getScheme == "file"
|
||||
}
|
||||
property("withoutMarkerScheme should drop marker scheme") = forAll { (uri: URI) =>
|
||||
uri.withoutMarkerScheme.getScheme == "file"
|
||||
}
|
||||
|
||||
property("withoutMarkerScheme should keep path") = forAll { (uri: URI) =>
|
||||
uri.withoutMarkerScheme.getPath == uri.getSchemeSpecificPart.stripPrefix("file:")
|
||||
}
|
||||
property("withoutMarkerScheme should keep path") = forAll { (uri: URI) =>
|
||||
uri.withoutMarkerScheme.getPath == uri.getSchemeSpecificPart.stripPrefix("file:")
|
||||
}
|
||||
|
||||
property("withoutMarkerScheme should keep fragment") = forAll { (uri: URI) =>
|
||||
uri.withoutMarkerScheme.getFragment == uri.getFragment
|
||||
}
|
||||
property("withoutMarkerScheme should keep fragment") = forAll { (uri: URI) =>
|
||||
uri.withoutMarkerScheme.getFragment == uri.getFragment
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -10,73 +10,68 @@ import IO._
|
|||
import java.io.File
|
||||
import Function.tupled
|
||||
|
||||
object CheckStash extends Specification
|
||||
{
|
||||
"stash" should {
|
||||
"handle empty files" in {
|
||||
stash(Set()) { }
|
||||
true must beTrue
|
||||
}
|
||||
|
||||
"move files during execution" in {
|
||||
WithFiles(TestFiles : _*) ( checkMove )
|
||||
}
|
||||
|
||||
"restore files on exceptions but not errors" in {
|
||||
WithFiles(TestFiles : _*) ( checkRestore )
|
||||
}
|
||||
}
|
||||
|
||||
def checkRestore(seq: Seq[File])
|
||||
{
|
||||
allCorrect(seq)
|
||||
|
||||
stash0(seq, throw new TestRuntimeException) must beFalse
|
||||
allCorrect(seq)
|
||||
|
||||
stash0(seq, throw new TestException) must beFalse
|
||||
allCorrect(seq)
|
||||
|
||||
stash0(seq, throw new TestError) must beFalse
|
||||
noneExist(seq)
|
||||
}
|
||||
def checkMove(seq: Seq[File])
|
||||
{
|
||||
allCorrect(seq)
|
||||
stash0(seq, ()) must beTrue
|
||||
noneExist(seq)
|
||||
}
|
||||
def stash0(seq: Seq[File], post: => Unit): Boolean =
|
||||
try
|
||||
{
|
||||
stash(Set() ++ seq) {
|
||||
noneExist(seq)
|
||||
post
|
||||
}
|
||||
true
|
||||
}
|
||||
catch {
|
||||
case _: TestError | _: TestException | _: TestRuntimeException => false
|
||||
}
|
||||
|
||||
def allCorrect(s: Seq[File]) = (s.toList zip TestFiles.toList).foreach((correct _).tupled)
|
||||
def correct(check: File, ref: (File, String)) =
|
||||
{
|
||||
check.exists must beTrue
|
||||
read(check) must equalTo(ref._2)
|
||||
}
|
||||
def noneExist(s: Seq[File]) = s.forall(!_.exists) must beTrue
|
||||
|
||||
lazy val TestFiles =
|
||||
Seq(
|
||||
"a/b/c" -> "content1",
|
||||
"a/b/e" -> "content1",
|
||||
"c" -> "",
|
||||
"e/g" -> "asdf",
|
||||
"a/g/c" -> "other"
|
||||
) map {
|
||||
case (f, c) => (new File(f), c)
|
||||
}
|
||||
object CheckStash extends Specification {
|
||||
"stash" should {
|
||||
"handle empty files" in {
|
||||
stash(Set()) {}
|
||||
true must beTrue
|
||||
}
|
||||
|
||||
"move files during execution" in {
|
||||
WithFiles(TestFiles: _*)(checkMove)
|
||||
}
|
||||
|
||||
"restore files on exceptions but not errors" in {
|
||||
WithFiles(TestFiles: _*)(checkRestore)
|
||||
}
|
||||
}
|
||||
|
||||
def checkRestore(seq: Seq[File]) {
|
||||
allCorrect(seq)
|
||||
|
||||
stash0(seq, throw new TestRuntimeException) must beFalse
|
||||
allCorrect(seq)
|
||||
|
||||
stash0(seq, throw new TestException) must beFalse
|
||||
allCorrect(seq)
|
||||
|
||||
stash0(seq, throw new TestError) must beFalse
|
||||
noneExist(seq)
|
||||
}
|
||||
def checkMove(seq: Seq[File]) {
|
||||
allCorrect(seq)
|
||||
stash0(seq, ()) must beTrue
|
||||
noneExist(seq)
|
||||
}
|
||||
def stash0(seq: Seq[File], post: => Unit): Boolean =
|
||||
try {
|
||||
stash(Set() ++ seq) {
|
||||
noneExist(seq)
|
||||
post
|
||||
}
|
||||
true
|
||||
} catch {
|
||||
case _: TestError | _: TestException | _: TestRuntimeException => false
|
||||
}
|
||||
|
||||
def allCorrect(s: Seq[File]) = (s.toList zip TestFiles.toList).foreach((correct _).tupled)
|
||||
def correct(check: File, ref: (File, String)) =
|
||||
{
|
||||
check.exists must beTrue
|
||||
read(check) must equalTo(ref._2)
|
||||
}
|
||||
def noneExist(s: Seq[File]) = s.forall(!_.exists) must beTrue
|
||||
|
||||
lazy val TestFiles =
|
||||
Seq(
|
||||
"a/b/c" -> "content1",
|
||||
"a/b/e" -> "content1",
|
||||
"c" -> "",
|
||||
"e/g" -> "asdf",
|
||||
"a/g/c" -> "other"
|
||||
) map {
|
||||
case (f, c) => (new File(f), c)
|
||||
}
|
||||
}
|
||||
class TestError extends Error
|
||||
class TestRuntimeException extends RuntimeException
|
||||
|
|
|
|||
|
|
@ -1,25 +1,25 @@
|
|||
package sbt
|
||||
|
||||
import java.io.File
|
||||
import IO.{withTemporaryDirectory, write}
|
||||
import IO.{ withTemporaryDirectory, write }
|
||||
|
||||
object WithFiles
|
||||
{
|
||||
/** Takes the relative path -> content pairs and writes the content to a file in a temporary directory. The written file
|
||||
* path is the relative path resolved against the temporary directory path. The provided function is called with the resolved file paths
|
||||
* in the same order as the inputs. */
|
||||
def apply[T](sources: (File, String)*)(f: Seq[File] => T): T =
|
||||
{
|
||||
withTemporaryDirectory { dir =>
|
||||
val sourceFiles =
|
||||
for((file, content) <- sources) yield
|
||||
{
|
||||
assert(!file.isAbsolute)
|
||||
val to = new File(dir, file.getPath)
|
||||
write(to, content)
|
||||
to
|
||||
}
|
||||
f(sourceFiles)
|
||||
}
|
||||
}
|
||||
object WithFiles {
|
||||
/**
|
||||
* Takes the relative path -> content pairs and writes the content to a file in a temporary directory. The written file
|
||||
* path is the relative path resolved against the temporary directory path. The provided function is called with the resolved file paths
|
||||
* in the same order as the inputs.
|
||||
*/
|
||||
def apply[T](sources: (File, String)*)(f: Seq[File] => T): T =
|
||||
{
|
||||
withTemporaryDirectory { dir =>
|
||||
val sourceFiles =
|
||||
for ((file, content) <- sources) yield {
|
||||
assert(!file.isAbsolute)
|
||||
val to = new File(dir, file.getPath)
|
||||
write(to, content)
|
||||
to
|
||||
}
|
||||
f(sourceFiles)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -2,90 +2,85 @@ package sbt
|
|||
|
||||
import org.scalacheck._
|
||||
import Prop._
|
||||
import Gen.{listOf, oneOf}
|
||||
import Gen.{ listOf, oneOf }
|
||||
|
||||
import ConsoleLogger.{ESC, hasEscapeSequence, isEscapeTerminator, removeEscapeSequences}
|
||||
import ConsoleLogger.{ ESC, hasEscapeSequence, isEscapeTerminator, removeEscapeSequences }
|
||||
|
||||
object Escapes extends Properties("Escapes")
|
||||
{
|
||||
property("genTerminator only generates terminators") =
|
||||
forAllNoShrink(genTerminator) { (c: Char) => isEscapeTerminator(c) }
|
||||
object Escapes extends Properties("Escapes") {
|
||||
property("genTerminator only generates terminators") =
|
||||
forAllNoShrink(genTerminator) { (c: Char) => isEscapeTerminator(c) }
|
||||
|
||||
property("genWithoutTerminator only generates terminators") =
|
||||
forAllNoShrink(genWithoutTerminator) { (s: String) =>
|
||||
s.forall { c => !isEscapeTerminator(c) }
|
||||
}
|
||||
property("genWithoutTerminator only generates terminators") =
|
||||
forAllNoShrink(genWithoutTerminator) { (s: String) =>
|
||||
s.forall { c => !isEscapeTerminator(c) }
|
||||
}
|
||||
|
||||
property("hasEscapeSequence is false when no escape character is present") = forAllNoShrink(genWithoutEscape) { (s: String) =>
|
||||
!hasEscapeSequence(s)
|
||||
}
|
||||
property("hasEscapeSequence is false when no escape character is present") = forAllNoShrink(genWithoutEscape) { (s: String) =>
|
||||
!hasEscapeSequence(s)
|
||||
}
|
||||
|
||||
property("hasEscapeSequence is true when escape character is present") = forAllNoShrink(genWithRandomEscapes) { (s: String) =>
|
||||
hasEscapeSequence(s)
|
||||
}
|
||||
property("hasEscapeSequence is true when escape character is present") = forAllNoShrink(genWithRandomEscapes) { (s: String) =>
|
||||
hasEscapeSequence(s)
|
||||
}
|
||||
|
||||
property("removeEscapeSequences is the identity when no escape character is present") = forAllNoShrink(genWithoutEscape) { (s: String) =>
|
||||
val removed: String = removeEscapeSequences(s)
|
||||
("Escape sequence removed: '" + removed + "'") |:
|
||||
(removed == s)
|
||||
}
|
||||
property("removeEscapeSequences is the identity when no escape character is present") = forAllNoShrink(genWithoutEscape) { (s: String) =>
|
||||
val removed: String = removeEscapeSequences(s)
|
||||
("Escape sequence removed: '" + removed + "'") |:
|
||||
(removed == s)
|
||||
}
|
||||
|
||||
property("No escape characters remain after removeEscapeSequences") = forAll { (s: String) =>
|
||||
val removed: String = removeEscapeSequences(s)
|
||||
("Escape sequence removed: '" + removed + "'") |:
|
||||
!hasEscapeSequence(removed)
|
||||
}
|
||||
property("No escape characters remain after removeEscapeSequences") = forAll { (s: String) =>
|
||||
val removed: String = removeEscapeSequences(s)
|
||||
("Escape sequence removed: '" + removed + "'") |:
|
||||
!hasEscapeSequence(removed)
|
||||
}
|
||||
|
||||
property("removeEscapeSequences returns string without escape sequences") =
|
||||
forAllNoShrink( genWithoutEscape, genEscapePairs ) { (start: String, escapes: List[EscapeAndNot]) =>
|
||||
val withEscapes: String = start + escapes.map { ean => ean.escape.makeString + ean.notEscape }
|
||||
val removed: String = removeEscapeSequences(withEscapes)
|
||||
val original = start + escapes.map(_.notEscape)
|
||||
("Input string with escapes: '" + withEscapes + "'") |:
|
||||
("Escapes removed '" + removed + "'") |:
|
||||
(original == removed)
|
||||
}
|
||||
property("removeEscapeSequences returns string without escape sequences") =
|
||||
forAllNoShrink(genWithoutEscape, genEscapePairs) { (start: String, escapes: List[EscapeAndNot]) =>
|
||||
val withEscapes: String = start + escapes.map { ean => ean.escape.makeString + ean.notEscape }
|
||||
val removed: String = removeEscapeSequences(withEscapes)
|
||||
val original = start + escapes.map(_.notEscape)
|
||||
("Input string with escapes: '" + withEscapes + "'") |:
|
||||
("Escapes removed '" + removed + "'") |:
|
||||
(original == removed)
|
||||
}
|
||||
|
||||
final case class EscapeAndNot(escape: EscapeSequence, notEscape: String)
|
||||
final case class EscapeSequence(content: String, terminator: Char)
|
||||
{
|
||||
assert( content.forall(c => !isEscapeTerminator(c) ), "Escape sequence content contains an escape terminator: '" + content + "'" )
|
||||
assert( isEscapeTerminator(terminator) )
|
||||
def makeString: String = ESC + content + terminator
|
||||
}
|
||||
private[this] def noEscape(s: String): String = s.replace(ESC, ' ')
|
||||
final case class EscapeAndNot(escape: EscapeSequence, notEscape: String)
|
||||
final case class EscapeSequence(content: String, terminator: Char) {
|
||||
assert(content.forall(c => !isEscapeTerminator(c)), "Escape sequence content contains an escape terminator: '" + content + "'")
|
||||
assert(isEscapeTerminator(terminator))
|
||||
def makeString: String = ESC + content + terminator
|
||||
}
|
||||
private[this] def noEscape(s: String): String = s.replace(ESC, ' ')
|
||||
|
||||
lazy val genEscapeSequence: Gen[EscapeSequence] = oneOf(genKnownSequence, genArbitraryEscapeSequence)
|
||||
lazy val genEscapePair: Gen[EscapeAndNot] = for(esc <- genEscapeSequence; not <- genWithoutEscape) yield EscapeAndNot(esc, not)
|
||||
lazy val genEscapePairs: Gen[List[EscapeAndNot]] = listOf(genEscapePair)
|
||||
lazy val genEscapeSequence: Gen[EscapeSequence] = oneOf(genKnownSequence, genArbitraryEscapeSequence)
|
||||
lazy val genEscapePair: Gen[EscapeAndNot] = for (esc <- genEscapeSequence; not <- genWithoutEscape) yield EscapeAndNot(esc, not)
|
||||
lazy val genEscapePairs: Gen[List[EscapeAndNot]] = listOf(genEscapePair)
|
||||
|
||||
lazy val genArbitraryEscapeSequence: Gen[EscapeSequence] =
|
||||
for(content <- genWithoutTerminator; term <- genTerminator) yield
|
||||
new EscapeSequence(content, term)
|
||||
|
||||
lazy val genKnownSequence: Gen[EscapeSequence] =
|
||||
oneOf((misc ++ setGraphicsMode ++ setMode ++ resetMode).map(toEscapeSequence))
|
||||
|
||||
def toEscapeSequence(s: String): EscapeSequence = EscapeSequence(s.init, s.last)
|
||||
lazy val genArbitraryEscapeSequence: Gen[EscapeSequence] =
|
||||
for (content <- genWithoutTerminator; term <- genTerminator) yield new EscapeSequence(content, term)
|
||||
|
||||
lazy val misc = Seq("14;23H", "5;3f", "2A", "94B", "19C", "85D", "s", "u", "2J", "K")
|
||||
lazy val genKnownSequence: Gen[EscapeSequence] =
|
||||
oneOf((misc ++ setGraphicsMode ++ setMode ++ resetMode).map(toEscapeSequence))
|
||||
|
||||
lazy val setGraphicsMode: Seq[String] =
|
||||
for(txt <- 0 to 8; fg <- 30 to 37; bg <- 40 to 47) yield
|
||||
txt.toString + ";" + fg.toString + ";" + bg.toString + "m"
|
||||
def toEscapeSequence(s: String): EscapeSequence = EscapeSequence(s.init, s.last)
|
||||
|
||||
lazy val resetMode = setModeLike('I')
|
||||
lazy val setMode = setModeLike('h')
|
||||
def setModeLike(term: Char): Seq[String] = (0 to 19).map(i => "=" + i.toString + term)
|
||||
|
||||
lazy val genWithoutTerminator = genRawString.map( _.filter { c => !isEscapeTerminator(c) } )
|
||||
lazy val misc = Seq("14;23H", "5;3f", "2A", "94B", "19C", "85D", "s", "u", "2J", "K")
|
||||
|
||||
lazy val genTerminator: Gen[Char] = Gen.choose('@', '~')
|
||||
lazy val genWithoutEscape: Gen[String] = genRawString.map(noEscape)
|
||||
lazy val setGraphicsMode: Seq[String] =
|
||||
for (txt <- 0 to 8; fg <- 30 to 37; bg <- 40 to 47) yield txt.toString + ";" + fg.toString + ";" + bg.toString + "m"
|
||||
|
||||
def genWithRandomEscapes: Gen[String] =
|
||||
for(ls <- listOf(genRawString); end <- genRawString) yield
|
||||
ls.mkString("", ESC.toString, ESC.toString + end)
|
||||
lazy val resetMode = setModeLike('I')
|
||||
lazy val setMode = setModeLike('h')
|
||||
def setModeLike(term: Char): Seq[String] = (0 to 19).map(i => "=" + i.toString + term)
|
||||
|
||||
private def genRawString = Arbitrary.arbString.arbitrary
|
||||
lazy val genWithoutTerminator = genRawString.map(_.filter { c => !isEscapeTerminator(c) })
|
||||
|
||||
lazy val genTerminator: Gen[Char] = Gen.choose('@', '~')
|
||||
lazy val genWithoutEscape: Gen[String] = genRawString.map(noEscape)
|
||||
|
||||
def genWithRandomEscapes: Gen[String] =
|
||||
for (ls <- listOf(genRawString); end <- genRawString) yield ls.mkString("", ESC.toString, ESC.toString + end)
|
||||
|
||||
private def genRawString = Arbitrary.arbString.arbitrary
|
||||
}
|
||||
|
|
|
|||
|
|
@ -4,157 +4,147 @@
|
|||
package sbt
|
||||
|
||||
import org.scalacheck._
|
||||
import Arbitrary.{arbitrary => arb, _}
|
||||
import Gen.{listOfN, oneOf}
|
||||
import Arbitrary.{ arbitrary => arb, _ }
|
||||
import Gen.{ listOfN, oneOf }
|
||||
import Prop._
|
||||
|
||||
import java.io.Writer
|
||||
|
||||
object LogWriterTest extends Properties("Log Writer")
|
||||
{
|
||||
final val MaxLines = 100
|
||||
final val MaxSegments = 10
|
||||
object LogWriterTest extends Properties("Log Writer") {
|
||||
final val MaxLines = 100
|
||||
final val MaxSegments = 10
|
||||
|
||||
/* Tests that content written through a LoggerWriter is properly passed to the underlying Logger.
|
||||
/* Tests that content written through a LoggerWriter is properly passed to the underlying Logger.
|
||||
* Each line, determined by the specified newline separator, must be logged at the correct logging level. */
|
||||
property("properly logged") = forAll { (output: Output, newLine: NewLine) =>
|
||||
import output.{lines, level}
|
||||
val log = new RecordingLogger
|
||||
val writer = new LoggerWriter(log, Some(level), newLine.str)
|
||||
logLines(writer, lines, newLine.str)
|
||||
val events = log.getEvents
|
||||
("Recorded:\n" + events.map(show).mkString("\n")) |:
|
||||
check( toLines(lines), events, level)
|
||||
}
|
||||
|
||||
/** Displays a LogEvent in a useful format for debugging. In particular, we are only interested in `Log` types
|
||||
* and non-printable characters should be escaped*/
|
||||
def show(event: LogEvent): String =
|
||||
event match
|
||||
{
|
||||
case l: Log => "Log('" + Escape(l.msg) + "', " + l.level + ")"
|
||||
case _ => "Not Log"
|
||||
}
|
||||
/** Writes the given lines to the Writer. `lines` is taken to be a list of lines, which are
|
||||
* represented as separately written segments (ToLog instances). ToLog.`byCharacter`
|
||||
* indicates whether to write the segment by character (true) or all at once (false)*/
|
||||
def logLines(writer: Writer, lines: List[List[ToLog]], newLine: String)
|
||||
{
|
||||
for(line <- lines; section <- line)
|
||||
{
|
||||
val content = section.content
|
||||
val normalized = Escape.newline(content, newLine)
|
||||
if(section.byCharacter)
|
||||
normalized.foreach { c => writer.write(c.toInt) }
|
||||
else
|
||||
writer.write(normalized)
|
||||
}
|
||||
writer.flush()
|
||||
}
|
||||
|
||||
/** Converts the given lines in segments to lines as Strings for checking the results of the test.*/
|
||||
def toLines(lines: List[List[ToLog]]): List[String] =
|
||||
lines.map(_.map(_.contentOnly).mkString)
|
||||
/** Checks that the expected `lines` were recorded as `events` at level `Lvl`.*/
|
||||
def check(lines: List[String], events: List[LogEvent], Lvl: Level.Value): Boolean =
|
||||
(lines zip events) forall {
|
||||
case (line, log : Log) => log.level == Lvl && line == log.msg
|
||||
case _ => false
|
||||
}
|
||||
|
||||
/* The following are implicit generators to build up a write sequence.
|
||||
property("properly logged") = forAll { (output: Output, newLine: NewLine) =>
|
||||
import output.{ lines, level }
|
||||
val log = new RecordingLogger
|
||||
val writer = new LoggerWriter(log, Some(level), newLine.str)
|
||||
logLines(writer, lines, newLine.str)
|
||||
val events = log.getEvents
|
||||
("Recorded:\n" + events.map(show).mkString("\n")) |:
|
||||
check(toLines(lines), events, level)
|
||||
}
|
||||
|
||||
/**
|
||||
* Displays a LogEvent in a useful format for debugging. In particular, we are only interested in `Log` types
|
||||
* and non-printable characters should be escaped
|
||||
*/
|
||||
def show(event: LogEvent): String =
|
||||
event match {
|
||||
case l: Log => "Log('" + Escape(l.msg) + "', " + l.level + ")"
|
||||
case _ => "Not Log"
|
||||
}
|
||||
/**
|
||||
* Writes the given lines to the Writer. `lines` is taken to be a list of lines, which are
|
||||
* represented as separately written segments (ToLog instances). ToLog.`byCharacter`
|
||||
* indicates whether to write the segment by character (true) or all at once (false)
|
||||
*/
|
||||
def logLines(writer: Writer, lines: List[List[ToLog]], newLine: String) {
|
||||
for (line <- lines; section <- line) {
|
||||
val content = section.content
|
||||
val normalized = Escape.newline(content, newLine)
|
||||
if (section.byCharacter)
|
||||
normalized.foreach { c => writer.write(c.toInt) }
|
||||
else
|
||||
writer.write(normalized)
|
||||
}
|
||||
writer.flush()
|
||||
}
|
||||
|
||||
/** Converts the given lines in segments to lines as Strings for checking the results of the test.*/
|
||||
def toLines(lines: List[List[ToLog]]): List[String] =
|
||||
lines.map(_.map(_.contentOnly).mkString)
|
||||
/** Checks that the expected `lines` were recorded as `events` at level `Lvl`.*/
|
||||
def check(lines: List[String], events: List[LogEvent], Lvl: Level.Value): Boolean =
|
||||
(lines zip events) forall {
|
||||
case (line, log: Log) => log.level == Lvl && line == log.msg
|
||||
case _ => false
|
||||
}
|
||||
|
||||
/* The following are implicit generators to build up a write sequence.
|
||||
* ToLog represents a written segment. NewLine represents one of the possible
|
||||
* newline separators. A List[ToLog] represents a full line and always includes a
|
||||
* final ToLog with a trailing '\n'. Newline characters are otherwise not present in
|
||||
* the `content` of a ToLog instance.*/
|
||||
|
||||
implicit lazy val arbOut: Arbitrary[Output] = Arbitrary(genOutput)
|
||||
implicit lazy val arbLog: Arbitrary[ToLog] = Arbitrary(genLog)
|
||||
implicit lazy val arbLine: Arbitrary[List[ToLog]] = Arbitrary(genLine)
|
||||
implicit lazy val arbNewLine: Arbitrary[NewLine] = Arbitrary(genNewLine)
|
||||
implicit lazy val arbLevel : Arbitrary[Level.Value] = Arbitrary(genLevel)
|
||||
|
||||
implicit def genLine(implicit logG: Gen[ToLog]): Gen[List[ToLog]] =
|
||||
for(l <- listOf[ToLog](MaxSegments); last <- logG) yield
|
||||
(addNewline(last) :: l.filter(!_.content.isEmpty)).reverse
|
||||
|
||||
implicit def genLog(implicit content: Arbitrary[String], byChar: Arbitrary[Boolean]): Gen[ToLog] =
|
||||
for(c <- content.arbitrary; by <- byChar.arbitrary) yield
|
||||
{
|
||||
assert(c != null)
|
||||
new ToLog(removeNewlines(c), by)
|
||||
}
|
||||
|
||||
implicit lazy val genNewLine: Gen[NewLine] =
|
||||
for(str <- oneOf("\n", "\r", "\r\n")) yield
|
||||
new NewLine(str)
|
||||
|
||||
implicit lazy val genLevel: Gen[Level.Value] =
|
||||
oneOf(Level.values.toSeq)
|
||||
|
||||
implicit lazy val genOutput: Gen[Output] =
|
||||
for(ls <- listOf[List[ToLog]](MaxLines); lv <- genLevel) yield
|
||||
new Output(ls, lv)
|
||||
|
||||
def removeNewlines(s: String) = s.replaceAll("""[\n\r]+""", "")
|
||||
def addNewline(l: ToLog): ToLog =
|
||||
new ToLog(l.content + "\n", l.byCharacter) // \n will be replaced by a random line terminator for all lines
|
||||
implicit lazy val arbOut: Arbitrary[Output] = Arbitrary(genOutput)
|
||||
implicit lazy val arbLog: Arbitrary[ToLog] = Arbitrary(genLog)
|
||||
implicit lazy val arbLine: Arbitrary[List[ToLog]] = Arbitrary(genLine)
|
||||
implicit lazy val arbNewLine: Arbitrary[NewLine] = Arbitrary(genNewLine)
|
||||
implicit lazy val arbLevel: Arbitrary[Level.Value] = Arbitrary(genLevel)
|
||||
|
||||
def listOf[T](max: Int)(implicit content: Arbitrary[T]): Gen[List[T]] =
|
||||
Gen.choose(0, max) flatMap { sz => listOfN(sz, content.arbitrary) }
|
||||
implicit def genLine(implicit logG: Gen[ToLog]): Gen[List[ToLog]] =
|
||||
for (l <- listOf[ToLog](MaxSegments); last <- logG) yield (addNewline(last) :: l.filter(!_.content.isEmpty)).reverse
|
||||
|
||||
implicit def genLog(implicit content: Arbitrary[String], byChar: Arbitrary[Boolean]): Gen[ToLog] =
|
||||
for (c <- content.arbitrary; by <- byChar.arbitrary) yield {
|
||||
assert(c != null)
|
||||
new ToLog(removeNewlines(c), by)
|
||||
}
|
||||
|
||||
implicit lazy val genNewLine: Gen[NewLine] =
|
||||
for (str <- oneOf("\n", "\r", "\r\n")) yield new NewLine(str)
|
||||
|
||||
implicit lazy val genLevel: Gen[Level.Value] =
|
||||
oneOf(Level.values.toSeq)
|
||||
|
||||
implicit lazy val genOutput: Gen[Output] =
|
||||
for (ls <- listOf[List[ToLog]](MaxLines); lv <- genLevel) yield new Output(ls, lv)
|
||||
|
||||
def removeNewlines(s: String) = s.replaceAll("""[\n\r]+""", "")
|
||||
def addNewline(l: ToLog): ToLog =
|
||||
new ToLog(l.content + "\n", l.byCharacter) // \n will be replaced by a random line terminator for all lines
|
||||
|
||||
def listOf[T](max: Int)(implicit content: Arbitrary[T]): Gen[List[T]] =
|
||||
Gen.choose(0, max) flatMap { sz => listOfN(sz, content.arbitrary) }
|
||||
}
|
||||
|
||||
/* Helper classes*/
|
||||
|
||||
final class Output(val lines: List[List[ToLog]], val level: Level.Value) extends NotNull
|
||||
{
|
||||
override def toString =
|
||||
"Level: " + level + "\n" + lines.map(_.mkString).mkString("\n")
|
||||
final class Output(val lines: List[List[ToLog]], val level: Level.Value) extends NotNull {
|
||||
override def toString =
|
||||
"Level: " + level + "\n" + lines.map(_.mkString).mkString("\n")
|
||||
}
|
||||
final class NewLine(val str: String) extends NotNull
|
||||
{
|
||||
override def toString = Escape(str)
|
||||
final class NewLine(val str: String) extends NotNull {
|
||||
override def toString = Escape(str)
|
||||
}
|
||||
final class ToLog(val content: String, val byCharacter: Boolean) extends NotNull
|
||||
{
|
||||
def contentOnly = Escape.newline(content, "")
|
||||
override def toString = if(content.isEmpty) "" else "ToLog('" + Escape(contentOnly) + "', " + byCharacter + ")"
|
||||
final class ToLog(val content: String, val byCharacter: Boolean) extends NotNull {
|
||||
def contentOnly = Escape.newline(content, "")
|
||||
override def toString = if (content.isEmpty) "" else "ToLog('" + Escape(contentOnly) + "', " + byCharacter + ")"
|
||||
}
|
||||
/** Defines some utility methods for escaping unprintable characters.*/
|
||||
object Escape
|
||||
{
|
||||
/** Escapes characters with code less than 20 by printing them as unicode escapes.*/
|
||||
def apply(s: String): String =
|
||||
{
|
||||
val builder = new StringBuilder(s.length)
|
||||
for(c <- s)
|
||||
{
|
||||
def escaped = pad(c.toInt.toHexString.toUpperCase, 4, '0')
|
||||
if(c < 20) builder.append("\\u").append(escaped) else builder.append(c)
|
||||
}
|
||||
builder.toString
|
||||
}
|
||||
def pad(s: String, minLength: Int, extra: Char) =
|
||||
{
|
||||
val diff = minLength - s.length
|
||||
if(diff <= 0) s else List.make(diff, extra).mkString("", "", s)
|
||||
}
|
||||
/** Replaces a \n character at the end of a string `s` with `nl`.*/
|
||||
def newline(s: String, nl: String): String =
|
||||
if(s.endsWith("\n")) s.substring(0, s.length - 1) + nl else s
|
||||
object Escape {
|
||||
/** Escapes characters with code less than 20 by printing them as unicode escapes.*/
|
||||
def apply(s: String): String =
|
||||
{
|
||||
val builder = new StringBuilder(s.length)
|
||||
for (c <- s) {
|
||||
def escaped = pad(c.toInt.toHexString.toUpperCase, 4, '0')
|
||||
if (c < 20) builder.append("\\u").append(escaped) else builder.append(c)
|
||||
}
|
||||
builder.toString
|
||||
}
|
||||
def pad(s: String, minLength: Int, extra: Char) =
|
||||
{
|
||||
val diff = minLength - s.length
|
||||
if (diff <= 0) s else List.make(diff, extra).mkString("", "", s)
|
||||
}
|
||||
/** Replaces a \n character at the end of a string `s` with `nl`.*/
|
||||
def newline(s: String, nl: String): String =
|
||||
if (s.endsWith("\n")) s.substring(0, s.length - 1) + nl else s
|
||||
}
|
||||
/** Records logging events for later retrieval.*/
|
||||
final class RecordingLogger extends BasicLogger
|
||||
{
|
||||
private var events: List[LogEvent] = Nil
|
||||
|
||||
def getEvents = events.reverse
|
||||
|
||||
override def ansiCodesSupported = true
|
||||
def trace(t: => Throwable) { events ::= new Trace(t) }
|
||||
def log(level: Level.Value, message: => String) { events ::= new Log(level, message) }
|
||||
def success(message: => String) { events ::= new Success(message) }
|
||||
def logAll(es: Seq[LogEvent]) { events :::= es.toList }
|
||||
def control(event: ControlEvent.Value, message: => String) { events ::= new ControlEvent(event, message) }
|
||||
|
||||
final class RecordingLogger extends BasicLogger {
|
||||
private var events: List[LogEvent] = Nil
|
||||
|
||||
def getEvents = events.reverse
|
||||
|
||||
override def ansiCodesSupported = true
|
||||
def trace(t: => Throwable) { events ::= new Trace(t) }
|
||||
def log(level: Level.Value, message: => String) { events ::= new Log(level, message) }
|
||||
def success(message: => String) { events ::= new Success(message) }
|
||||
def logAll(es: Seq[LogEvent]) { events :::= es.toList }
|
||||
def control(event: ControlEvent.Value, message: => String) { events ::= new ControlEvent(event, message) }
|
||||
|
||||
}
|
||||
|
|
@ -1,11 +1,10 @@
|
|||
package sbt
|
||||
|
||||
object TestLogger
|
||||
{
|
||||
def apply[T](f: Logger => T): T =
|
||||
{
|
||||
val log = new BufferedLogger(ConsoleLogger())
|
||||
log.setLevel(Level.Debug)
|
||||
log.bufferQuietly(f(log))
|
||||
}
|
||||
object TestLogger {
|
||||
def apply[T](f: Logger => T): T =
|
||||
{
|
||||
val log = new BufferedLogger(ConsoleLogger())
|
||||
log.setLevel(Level.Debug)
|
||||
log.bufferQuietly(f(log))
|
||||
}
|
||||
}
|
||||
|
|
@ -1,117 +1,115 @@
|
|||
package sbt
|
||||
package logic
|
||||
|
||||
import org.scalacheck._
|
||||
import Prop.secure
|
||||
import Logic.{LogicException, Matched}
|
||||
import org.scalacheck._
|
||||
import Prop.secure
|
||||
import Logic.{ LogicException, Matched }
|
||||
|
||||
object LogicTest extends Properties("Logic")
|
||||
{
|
||||
import TestClauses._
|
||||
object LogicTest extends Properties("Logic") {
|
||||
import TestClauses._
|
||||
|
||||
property("Handles trivial resolution.") = secure( expect(trivial, Set(A) ) )
|
||||
property("Handles less trivial resolution.") = secure( expect(lessTrivial, Set(B,A,D)) )
|
||||
property("Handles cycles without negation") = secure( expect(cycles, Set(F,A,B)) )
|
||||
property("Handles basic exclusion.") = secure( expect(excludedPos, Set()) )
|
||||
property("Handles exclusion of head proved by negation.") = secure( expect(excludedNeg, Set()) )
|
||||
// TODO: actually check ordering, probably as part of a check that dependencies are satisifed
|
||||
property("Properly orders results.") = secure( expect(ordering, Set(B,A,C,E,F)))
|
||||
property("Detects cyclic negation") = secure(
|
||||
Logic.reduceAll(badClauses, Set()) match {
|
||||
case Right(res) => false
|
||||
case Left(err: Logic.CyclicNegation) => true
|
||||
case Left(err) => error(s"Expected cyclic error, got: $err")
|
||||
}
|
||||
)
|
||||
property("Handles trivial resolution.") = secure(expect(trivial, Set(A)))
|
||||
property("Handles less trivial resolution.") = secure(expect(lessTrivial, Set(B, A, D)))
|
||||
property("Handles cycles without negation") = secure(expect(cycles, Set(F, A, B)))
|
||||
property("Handles basic exclusion.") = secure(expect(excludedPos, Set()))
|
||||
property("Handles exclusion of head proved by negation.") = secure(expect(excludedNeg, Set()))
|
||||
// TODO: actually check ordering, probably as part of a check that dependencies are satisifed
|
||||
property("Properly orders results.") = secure(expect(ordering, Set(B, A, C, E, F)))
|
||||
property("Detects cyclic negation") = secure(
|
||||
Logic.reduceAll(badClauses, Set()) match {
|
||||
case Right(res) => false
|
||||
case Left(err: Logic.CyclicNegation) => true
|
||||
case Left(err) => error(s"Expected cyclic error, got: $err")
|
||||
}
|
||||
)
|
||||
|
||||
def expect(result: Either[LogicException, Matched], expected: Set[Atom]) = result match {
|
||||
case Left(err) => false
|
||||
case Right(res) =>
|
||||
val actual = res.provenSet
|
||||
(actual == expected) || error(s"Expected to prove $expected, but actually proved $actual")
|
||||
}
|
||||
def expect(result: Either[LogicException, Matched], expected: Set[Atom]) = result match {
|
||||
case Left(err) => false
|
||||
case Right(res) =>
|
||||
val actual = res.provenSet
|
||||
(actual == expected) || error(s"Expected to prove $expected, but actually proved $actual")
|
||||
}
|
||||
}
|
||||
|
||||
object TestClauses
|
||||
{
|
||||
object TestClauses {
|
||||
|
||||
val A = Atom("A")
|
||||
val B = Atom("B")
|
||||
val C = Atom("C")
|
||||
val D = Atom("D")
|
||||
val E = Atom("E")
|
||||
val F = Atom("F")
|
||||
val G = Atom("G")
|
||||
val A = Atom("A")
|
||||
val B = Atom("B")
|
||||
val C = Atom("C")
|
||||
val D = Atom("D")
|
||||
val E = Atom("E")
|
||||
val F = Atom("F")
|
||||
val G = Atom("G")
|
||||
|
||||
val clauses =
|
||||
A.proves(B) ::
|
||||
A.proves(F) ::
|
||||
B.proves(F) ::
|
||||
F.proves(A) ::
|
||||
(!C).proves(F) ::
|
||||
D.proves(C) ::
|
||||
C.proves(D) ::
|
||||
Nil
|
||||
val clauses =
|
||||
A.proves(B) ::
|
||||
A.proves(F) ::
|
||||
B.proves(F) ::
|
||||
F.proves(A) ::
|
||||
(!C).proves(F) ::
|
||||
D.proves(C) ::
|
||||
C.proves(D) ::
|
||||
Nil
|
||||
|
||||
val cycles = Logic.reduceAll(clauses, Set())
|
||||
val cycles = Logic.reduceAll(clauses, Set())
|
||||
|
||||
val badClauses =
|
||||
A.proves(D) ::
|
||||
clauses
|
||||
val badClauses =
|
||||
A.proves(D) ::
|
||||
clauses
|
||||
|
||||
val excludedNeg = {
|
||||
val cs =
|
||||
(!A).proves(B) ::
|
||||
Nil
|
||||
val init =
|
||||
(!A) ::
|
||||
(!B) ::
|
||||
Nil
|
||||
Logic.reduceAll(cs, init.toSet)
|
||||
}
|
||||
val excludedNeg = {
|
||||
val cs =
|
||||
(!A).proves(B) ::
|
||||
Nil
|
||||
val init =
|
||||
(!A) ::
|
||||
(!B) ::
|
||||
Nil
|
||||
Logic.reduceAll(cs, init.toSet)
|
||||
}
|
||||
|
||||
val excludedPos = {
|
||||
val cs =
|
||||
A.proves(B) ::
|
||||
Nil
|
||||
val init =
|
||||
A ::
|
||||
(!B) ::
|
||||
Nil
|
||||
Logic.reduceAll(cs, init.toSet)
|
||||
}
|
||||
val excludedPos = {
|
||||
val cs =
|
||||
A.proves(B) ::
|
||||
Nil
|
||||
val init =
|
||||
A ::
|
||||
(!B) ::
|
||||
Nil
|
||||
Logic.reduceAll(cs, init.toSet)
|
||||
}
|
||||
|
||||
val trivial = {
|
||||
val cs =
|
||||
Formula.True.proves(A) ::
|
||||
Nil
|
||||
Logic.reduceAll(cs, Set.empty)
|
||||
}
|
||||
val trivial = {
|
||||
val cs =
|
||||
Formula.True.proves(A) ::
|
||||
Nil
|
||||
Logic.reduceAll(cs, Set.empty)
|
||||
}
|
||||
|
||||
val lessTrivial = {
|
||||
val cs =
|
||||
Formula.True.proves(A) ::
|
||||
Formula.True.proves(B) ::
|
||||
(A && B && (!C)).proves(D) ::
|
||||
Nil
|
||||
Logic.reduceAll(cs, Set())
|
||||
}
|
||||
val lessTrivial = {
|
||||
val cs =
|
||||
Formula.True.proves(A) ::
|
||||
Formula.True.proves(B) ::
|
||||
(A && B && (!C)).proves(D) ::
|
||||
Nil
|
||||
Logic.reduceAll(cs, Set())
|
||||
}
|
||||
|
||||
val ordering = {
|
||||
val cs =
|
||||
E.proves(F) ::
|
||||
(C && !D).proves(E) ::
|
||||
(A && B).proves(C) ::
|
||||
Nil
|
||||
Logic.reduceAll(cs, Set(A,B))
|
||||
}
|
||||
val ordering = {
|
||||
val cs =
|
||||
E.proves(F) ::
|
||||
(C && !D).proves(E) ::
|
||||
(A && B).proves(C) ::
|
||||
Nil
|
||||
Logic.reduceAll(cs, Set(A, B))
|
||||
}
|
||||
|
||||
def all {
|
||||
println(s"Cycles: $cycles")
|
||||
println(s"xNeg: $excludedNeg")
|
||||
println(s"xPos: $excludedPos")
|
||||
println(s"trivial: $trivial")
|
||||
println(s"lessTrivial: $lessTrivial")
|
||||
println(s"ordering: $ordering")
|
||||
}
|
||||
def all {
|
||||
println(s"Cycles: $cycles")
|
||||
println(s"xNeg: $excludedNeg")
|
||||
println(s"xPos: $excludedPos")
|
||||
println(s"trivial: $trivial")
|
||||
println(s"lessTrivial: $lessTrivial")
|
||||
println(s"ordering: $ordering")
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,133 +1,131 @@
|
|||
package sbt
|
||||
|
||||
import java.io.File
|
||||
import org.scalacheck.{Arbitrary, Gen, Prop, Properties}
|
||||
import org.scalacheck.{ Arbitrary, Gen, Prop, Properties }
|
||||
import Prop._
|
||||
|
||||
import Process._
|
||||
|
||||
object ProcessSpecification extends Properties("Process I/O")
|
||||
{
|
||||
implicit val exitCodeArb: Arbitrary[Array[Byte]] = Arbitrary(
|
||||
for(size <- Gen.choose(0, 10);
|
||||
l <- Gen.listOfN[Byte](size, Arbitrary.arbByte.arbitrary))
|
||||
yield
|
||||
l.toArray
|
||||
)
|
||||
object ProcessSpecification extends Properties("Process I/O") {
|
||||
implicit val exitCodeArb: Arbitrary[Array[Byte]] = Arbitrary(
|
||||
for (
|
||||
size <- Gen.choose(0, 10);
|
||||
l <- Gen.listOfN[Byte](size, Arbitrary.arbByte.arbitrary)
|
||||
) yield l.toArray
|
||||
)
|
||||
|
||||
/*property("Correct exit code") = forAll( (exitCode: Byte) => checkExit(exitCode))
|
||||
/*property("Correct exit code") = forAll( (exitCode: Byte) => checkExit(exitCode))
|
||||
property("#&& correct") = forAll( (exitCodes: Array[Byte]) => checkBinary(exitCodes)(_ #&& _)(_ && _))
|
||||
property("#|| correct") = forAll( (exitCodes: Array[Byte]) => checkBinary(exitCodes)(_ #|| _)(_ || _))
|
||||
property("### correct") = forAll( (exitCodes: Array[Byte]) => checkBinary(exitCodes)(_ ### _)( (x,latest) => latest))*/
|
||||
property("Pipe to output file") = forAll( (data: Array[Byte]) => checkFileOut(data))
|
||||
property("Pipe from input file") = forAll( (data: Array[Byte]) => checkFileIn(data))
|
||||
property("Pipe to process") = forAll( (data: Array[Byte]) => checkPipe(data))
|
||||
property("Pipe to process ignores input exit code") = forAll( (data: Array[Byte], code: Byte) => checkPipeExit(data, code))
|
||||
property("Pipe from input file to bad process preserves correct exit code.") = forAll( (data: Array[Byte], code: Byte) => checkFileInExit(data, code))
|
||||
property("Pipe to output file from bad process preserves correct exit code.") = forAll( (data: Array[Byte], code: Byte) => checkFileOutExit(data, code))
|
||||
property("Pipe to output file") = forAll((data: Array[Byte]) => checkFileOut(data))
|
||||
property("Pipe from input file") = forAll((data: Array[Byte]) => checkFileIn(data))
|
||||
property("Pipe to process") = forAll((data: Array[Byte]) => checkPipe(data))
|
||||
property("Pipe to process ignores input exit code") = forAll((data: Array[Byte], code: Byte) => checkPipeExit(data, code))
|
||||
property("Pipe from input file to bad process preserves correct exit code.") = forAll((data: Array[Byte], code: Byte) => checkFileInExit(data, code))
|
||||
property("Pipe to output file from bad process preserves correct exit code.") = forAll((data: Array[Byte], code: Byte) => checkFileOutExit(data, code))
|
||||
|
||||
private def checkBinary(codes: Array[Byte])(reduceProcesses: (ProcessBuilder, ProcessBuilder) => ProcessBuilder)(reduceExit: (Boolean, Boolean) => Boolean) =
|
||||
{
|
||||
(codes.length > 1) ==>
|
||||
{
|
||||
val unsignedCodes = codes.map(unsigned)
|
||||
val exitCode = unsignedCodes.map(code => Process(process("sbt.exit " + code))).reduceLeft(reduceProcesses) !
|
||||
val expectedExitCode = unsignedCodes.map(toBoolean).reduceLeft(reduceExit)
|
||||
toBoolean(exitCode) == expectedExitCode
|
||||
}
|
||||
}
|
||||
private def toBoolean(exitCode: Int) = exitCode == 0
|
||||
private def checkExit(code: Byte) =
|
||||
{
|
||||
val exitCode = unsigned(code)
|
||||
(process("sbt.exit " + exitCode) !) == exitCode
|
||||
}
|
||||
private def checkFileOut(data: Array[Byte]) =
|
||||
{
|
||||
withData(data) { (temporaryFile, temporaryFile2) =>
|
||||
val catCommand = process("sbt.cat " + temporaryFile.getAbsolutePath)
|
||||
catCommand #> temporaryFile2
|
||||
}
|
||||
}
|
||||
private def checkFileIn(data: Array[Byte]) =
|
||||
{
|
||||
withData(data) { (temporaryFile, temporaryFile2) =>
|
||||
val catCommand = process("sbt.cat")
|
||||
temporaryFile #> catCommand #> temporaryFile2
|
||||
}
|
||||
}
|
||||
private def checkPipe(data: Array[Byte]) =
|
||||
{
|
||||
withData(data) { (temporaryFile, temporaryFile2) =>
|
||||
val catCommand = process("sbt.cat")
|
||||
temporaryFile #> catCommand #| catCommand #> temporaryFile2
|
||||
}
|
||||
}
|
||||
private def checkPipeExit(data: Array[Byte], code: Byte) =
|
||||
withTempFiles { (a,b) =>
|
||||
IO.write(a, data)
|
||||
val catCommand = process("sbt.cat")
|
||||
val exitCommand = process(s"sbt.exit $code")
|
||||
val exit = (a #> exitCommand #| catCommand #> b).!
|
||||
(s"Exit code: $exit") |:
|
||||
(s"Output file length: ${b.length}") |:
|
||||
(exit == 0) &&
|
||||
(b.length == 0)
|
||||
}
|
||||
private def checkBinary(codes: Array[Byte])(reduceProcesses: (ProcessBuilder, ProcessBuilder) => ProcessBuilder)(reduceExit: (Boolean, Boolean) => Boolean) =
|
||||
{
|
||||
(codes.length > 1) ==>
|
||||
{
|
||||
val unsignedCodes = codes.map(unsigned)
|
||||
val exitCode = unsignedCodes.map(code => Process(process("sbt.exit " + code))).reduceLeft(reduceProcesses) !
|
||||
val expectedExitCode = unsignedCodes.map(toBoolean).reduceLeft(reduceExit)
|
||||
toBoolean(exitCode) == expectedExitCode
|
||||
}
|
||||
}
|
||||
private def toBoolean(exitCode: Int) = exitCode == 0
|
||||
private def checkExit(code: Byte) =
|
||||
{
|
||||
val exitCode = unsigned(code)
|
||||
(process("sbt.exit " + exitCode) !) == exitCode
|
||||
}
|
||||
private def checkFileOut(data: Array[Byte]) =
|
||||
{
|
||||
withData(data) { (temporaryFile, temporaryFile2) =>
|
||||
val catCommand = process("sbt.cat " + temporaryFile.getAbsolutePath)
|
||||
catCommand #> temporaryFile2
|
||||
}
|
||||
}
|
||||
private def checkFileIn(data: Array[Byte]) =
|
||||
{
|
||||
withData(data) { (temporaryFile, temporaryFile2) =>
|
||||
val catCommand = process("sbt.cat")
|
||||
temporaryFile #> catCommand #> temporaryFile2
|
||||
}
|
||||
}
|
||||
private def checkPipe(data: Array[Byte]) =
|
||||
{
|
||||
withData(data) { (temporaryFile, temporaryFile2) =>
|
||||
val catCommand = process("sbt.cat")
|
||||
temporaryFile #> catCommand #| catCommand #> temporaryFile2
|
||||
}
|
||||
}
|
||||
private def checkPipeExit(data: Array[Byte], code: Byte) =
|
||||
withTempFiles { (a, b) =>
|
||||
IO.write(a, data)
|
||||
val catCommand = process("sbt.cat")
|
||||
val exitCommand = process(s"sbt.exit $code")
|
||||
val exit = (a #> exitCommand #| catCommand #> b).!
|
||||
(s"Exit code: $exit") |:
|
||||
(s"Output file length: ${b.length}") |:
|
||||
(exit == 0) &&
|
||||
(b.length == 0)
|
||||
}
|
||||
|
||||
private def checkFileOutExit(data: Array[Byte], exitCode: Byte) =
|
||||
withTempFiles { (a,b) =>
|
||||
IO.write(a, data)
|
||||
val code = unsigned(exitCode)
|
||||
val command = process(s"sbt.exit $code")
|
||||
val exit = (a #> command #> b).!
|
||||
(s"Exit code: $exit, expected: $code") |:
|
||||
(s"Output file length: ${b.length}") |:
|
||||
(exit == code) &&
|
||||
(b.length == 0)
|
||||
}
|
||||
private def checkFileOutExit(data: Array[Byte], exitCode: Byte) =
|
||||
withTempFiles { (a, b) =>
|
||||
IO.write(a, data)
|
||||
val code = unsigned(exitCode)
|
||||
val command = process(s"sbt.exit $code")
|
||||
val exit = (a #> command #> b).!
|
||||
(s"Exit code: $exit, expected: $code") |:
|
||||
(s"Output file length: ${b.length}") |:
|
||||
(exit == code) &&
|
||||
(b.length == 0)
|
||||
}
|
||||
|
||||
private def checkFileInExit(data: Array[Byte], exitCode: Byte) =
|
||||
withTempFiles { (a,b) =>
|
||||
IO.write(a, data)
|
||||
val code = unsigned(exitCode)
|
||||
val command = process(s"sbt.exit $code")
|
||||
val exit = (a #> command).!
|
||||
(s"Exit code: $exit, expected: $code") |:
|
||||
(exit == code)
|
||||
}
|
||||
private def checkFileInExit(data: Array[Byte], exitCode: Byte) =
|
||||
withTempFiles { (a, b) =>
|
||||
IO.write(a, data)
|
||||
val code = unsigned(exitCode)
|
||||
val command = process(s"sbt.exit $code")
|
||||
val exit = (a #> command).!
|
||||
(s"Exit code: $exit, expected: $code") |:
|
||||
(exit == code)
|
||||
}
|
||||
|
||||
private def temp() = File.createTempFile("sbt", "")
|
||||
private def withData(data: Array[Byte])(f: (File, File) => ProcessBuilder) =
|
||||
withTempFiles { (a, b) =>
|
||||
IO.write(a, data)
|
||||
val process = f(a, b)
|
||||
( process ! ) == 0 && sameFiles(a, b)
|
||||
}
|
||||
private def sameFiles(a: File, b: File) =
|
||||
IO.readBytes(a) sameElements IO.readBytes(b)
|
||||
private def temp() = File.createTempFile("sbt", "")
|
||||
private def withData(data: Array[Byte])(f: (File, File) => ProcessBuilder) =
|
||||
withTempFiles { (a, b) =>
|
||||
IO.write(a, data)
|
||||
val process = f(a, b)
|
||||
(process !) == 0 && sameFiles(a, b)
|
||||
}
|
||||
private def sameFiles(a: File, b: File) =
|
||||
IO.readBytes(a) sameElements IO.readBytes(b)
|
||||
|
||||
private def withTempFiles[T](f: (File, File) => T): T =
|
||||
{
|
||||
val temporaryFile1 = temp()
|
||||
val temporaryFile2 = temp()
|
||||
try f(temporaryFile1, temporaryFile2)
|
||||
finally
|
||||
{
|
||||
temporaryFile1.delete()
|
||||
temporaryFile2.delete()
|
||||
}
|
||||
}
|
||||
private def unsigned(b: Int): Int = ((b: Int) +256) % 256
|
||||
private def unsigned(b: Byte): Int = unsigned(b: Int)
|
||||
private def process(command: String) =
|
||||
{
|
||||
val ignore = echo // just for the compile dependency so that this test is rerun when TestedProcess.scala changes, not used otherwise
|
||||
private def withTempFiles[T](f: (File, File) => T): T =
|
||||
{
|
||||
val temporaryFile1 = temp()
|
||||
val temporaryFile2 = temp()
|
||||
try f(temporaryFile1, temporaryFile2)
|
||||
finally {
|
||||
temporaryFile1.delete()
|
||||
temporaryFile2.delete()
|
||||
}
|
||||
}
|
||||
private def unsigned(b: Int): Int = ((b: Int) + 256) % 256
|
||||
private def unsigned(b: Byte): Int = unsigned(b: Int)
|
||||
private def process(command: String) =
|
||||
{
|
||||
val ignore = echo // just for the compile dependency so that this test is rerun when TestedProcess.scala changes, not used otherwise
|
||||
|
||||
val thisClasspath = List(getSource[Product], getSource[IO.type], getSource[SourceTag]).mkString(File.pathSeparator)
|
||||
"java -cp " + thisClasspath + " " + command
|
||||
}
|
||||
private def getSource[T : Manifest]: String =
|
||||
IO.classLocationFile[T].getAbsolutePath
|
||||
val thisClasspath = List(getSource[Product], getSource[IO.type], getSource[SourceTag]).mkString(File.pathSeparator)
|
||||
"java -cp " + thisClasspath + " " + command
|
||||
}
|
||||
private def getSource[T: Manifest]: String =
|
||||
IO.classLocationFile[T].getAbsolutePath
|
||||
}
|
||||
private trait SourceTag
|
||||
|
|
|
|||
|
|
@ -1,56 +1,47 @@
|
|||
package sbt
|
||||
|
||||
import java.io.{File, FileNotFoundException, IOException}
|
||||
import java.io.{ File, FileNotFoundException, IOException }
|
||||
|
||||
object exit
|
||||
{
|
||||
def main(args: Array[String])
|
||||
{
|
||||
System.exit(java.lang.Integer.parseInt(args(0)))
|
||||
}
|
||||
object exit {
|
||||
def main(args: Array[String]) {
|
||||
System.exit(java.lang.Integer.parseInt(args(0)))
|
||||
}
|
||||
}
|
||||
object cat
|
||||
{
|
||||
def main(args: Array[String])
|
||||
{
|
||||
try {
|
||||
if(args.length == 0)
|
||||
IO.transfer(System.in, System.out)
|
||||
else
|
||||
catFiles(args.toList)
|
||||
System.exit(0)
|
||||
} catch {
|
||||
case e =>
|
||||
e.printStackTrace()
|
||||
System.err.println("Error: " + e.toString)
|
||||
System.exit(1)
|
||||
}
|
||||
}
|
||||
private def catFiles(filenames: List[String]): Option[String] =
|
||||
{
|
||||
filenames match
|
||||
{
|
||||
case head :: tail =>
|
||||
val file = new File(head)
|
||||
if(file.isDirectory)
|
||||
throw new IOException("Is directory: " + file)
|
||||
else if(file.exists)
|
||||
{
|
||||
Using.fileInputStream(file) { stream =>
|
||||
IO.transfer(stream, System.out)
|
||||
}
|
||||
catFiles(tail)
|
||||
}
|
||||
else
|
||||
throw new FileNotFoundException("No such file or directory: " + file)
|
||||
case Nil => None
|
||||
}
|
||||
}
|
||||
object cat {
|
||||
def main(args: Array[String]) {
|
||||
try {
|
||||
if (args.length == 0)
|
||||
IO.transfer(System.in, System.out)
|
||||
else
|
||||
catFiles(args.toList)
|
||||
System.exit(0)
|
||||
} catch {
|
||||
case e =>
|
||||
e.printStackTrace()
|
||||
System.err.println("Error: " + e.toString)
|
||||
System.exit(1)
|
||||
}
|
||||
}
|
||||
private def catFiles(filenames: List[String]): Option[String] =
|
||||
{
|
||||
filenames match {
|
||||
case head :: tail =>
|
||||
val file = new File(head)
|
||||
if (file.isDirectory)
|
||||
throw new IOException("Is directory: " + file)
|
||||
else if (file.exists) {
|
||||
Using.fileInputStream(file) { stream =>
|
||||
IO.transfer(stream, System.out)
|
||||
}
|
||||
catFiles(tail)
|
||||
} else
|
||||
throw new FileNotFoundException("No such file or directory: " + file)
|
||||
case Nil => None
|
||||
}
|
||||
}
|
||||
}
|
||||
object echo
|
||||
{
|
||||
def main(args: Array[String])
|
||||
{
|
||||
System.out.println(args.mkString(" "))
|
||||
}
|
||||
object echo {
|
||||
def main(args: Array[String]) {
|
||||
System.out.println(args.mkString(" "))
|
||||
}
|
||||
}
|
||||
|
|
@ -6,79 +6,79 @@ package sbt
|
|||
import org.scalacheck._
|
||||
import Prop._
|
||||
|
||||
object RelationTest extends Properties("Relation")
|
||||
{
|
||||
property("Added entry check") = forAll { (pairs: List[(Int, Double)]) =>
|
||||
val r = Relation.empty[Int, Double] ++ pairs
|
||||
check(r, pairs)
|
||||
}
|
||||
def check(r: Relation[Int, Double], pairs: Seq[(Int, Double)]) =
|
||||
{
|
||||
val _1s = pairs.map(_._1).toSet
|
||||
val _2s = pairs.map(_._2).toSet
|
||||
|
||||
r._1s == _1s && r.forwardMap.keySet == _1s &&
|
||||
r._2s == _2s && r.reverseMap.keySet == _2s &&
|
||||
pairs.forall { case (a, b) =>
|
||||
(r.forward(a) contains b) &&
|
||||
(r.reverse(b) contains a) &&
|
||||
(r.forwardMap(a) contains b) &&
|
||||
(r.reverseMap(b) contains a)
|
||||
}
|
||||
}
|
||||
|
||||
property("Does not contain removed entries") = forAll { (pairs: List[(Int, Double, Boolean)]) =>
|
||||
val add = pairs.map { case (a,b,c) => (a,b) }
|
||||
val added = Relation.empty[Int, Double] ++ add
|
||||
|
||||
val removeFine = pairs.collect { case (a,b,true) => (a,b) }
|
||||
val removeCoarse = removeFine.map(_._1)
|
||||
val r = added -- removeCoarse
|
||||
|
||||
def notIn[X,Y](map: Map[X, Set[Y]], a: X, b: Y) = map.get(a).forall(set => ! (set contains b) )
|
||||
|
||||
all(removeCoarse) { rem =>
|
||||
("_1s does not contain removed" |: (!r._1s.contains(rem)) ) &&
|
||||
("Forward does not contain removed" |: r.forward(rem).isEmpty ) &&
|
||||
("Forward map does not contain removed" |: !r.forwardMap.contains(rem) ) &&
|
||||
("Removed is not a value in reverse map" |: !r.reverseMap.values.toSet.contains(rem) )
|
||||
} &&
|
||||
all(removeFine) { case (a, b) =>
|
||||
("Forward does not contain removed" |: ( !r.forward(a).contains(b) ) ) &&
|
||||
("Reverse does not contain removed" |: ( !r.reverse(b).contains(a) ) ) &&
|
||||
("Forward map does not contain removed" |: ( notIn(r.forwardMap, a, b) ) ) &&
|
||||
("Reverse map does not contain removed" |: ( notIn(r.reverseMap, b, a) ) )
|
||||
}
|
||||
}
|
||||
object RelationTest extends Properties("Relation") {
|
||||
property("Added entry check") = forAll { (pairs: List[(Int, Double)]) =>
|
||||
val r = Relation.empty[Int, Double] ++ pairs
|
||||
check(r, pairs)
|
||||
}
|
||||
def check(r: Relation[Int, Double], pairs: Seq[(Int, Double)]) =
|
||||
{
|
||||
val _1s = pairs.map(_._1).toSet
|
||||
val _2s = pairs.map(_._2).toSet
|
||||
|
||||
property("Groups correctly") = forAll { (entries: List[(Int, Double)], randomInt: Int) =>
|
||||
val splitInto = math.abs(randomInt) % 10 + 1 // Split into 1-10 groups.
|
||||
val rel = Relation.empty[Int, Double] ++ entries
|
||||
val grouped = rel groupBy (_._1 % splitInto)
|
||||
all(grouped.toSeq) {
|
||||
case (k, rel_k) => rel_k._1s forall { _ % splitInto == k }
|
||||
}
|
||||
}
|
||||
r._1s == _1s && r.forwardMap.keySet == _1s &&
|
||||
r._2s == _2s && r.reverseMap.keySet == _2s &&
|
||||
pairs.forall {
|
||||
case (a, b) =>
|
||||
(r.forward(a) contains b) &&
|
||||
(r.reverse(b) contains a) &&
|
||||
(r.forwardMap(a) contains b) &&
|
||||
(r.reverseMap(b) contains a)
|
||||
}
|
||||
}
|
||||
|
||||
property("Does not contain removed entries") = forAll { (pairs: List[(Int, Double, Boolean)]) =>
|
||||
val add = pairs.map { case (a, b, c) => (a, b) }
|
||||
val added = Relation.empty[Int, Double] ++ add
|
||||
|
||||
val removeFine = pairs.collect { case (a, b, true) => (a, b) }
|
||||
val removeCoarse = removeFine.map(_._1)
|
||||
val r = added -- removeCoarse
|
||||
|
||||
def notIn[X, Y](map: Map[X, Set[Y]], a: X, b: Y) = map.get(a).forall(set => !(set contains b))
|
||||
|
||||
all(removeCoarse) { rem =>
|
||||
("_1s does not contain removed" |: (!r._1s.contains(rem))) &&
|
||||
("Forward does not contain removed" |: r.forward(rem).isEmpty) &&
|
||||
("Forward map does not contain removed" |: !r.forwardMap.contains(rem)) &&
|
||||
("Removed is not a value in reverse map" |: !r.reverseMap.values.toSet.contains(rem))
|
||||
} &&
|
||||
all(removeFine) {
|
||||
case (a, b) =>
|
||||
("Forward does not contain removed" |: (!r.forward(a).contains(b))) &&
|
||||
("Reverse does not contain removed" |: (!r.reverse(b).contains(a))) &&
|
||||
("Forward map does not contain removed" |: (notIn(r.forwardMap, a, b))) &&
|
||||
("Reverse map does not contain removed" |: (notIn(r.reverseMap, b, a)))
|
||||
}
|
||||
}
|
||||
|
||||
property("Groups correctly") = forAll { (entries: List[(Int, Double)], randomInt: Int) =>
|
||||
val splitInto = math.abs(randomInt) % 10 + 1 // Split into 1-10 groups.
|
||||
val rel = Relation.empty[Int, Double] ++ entries
|
||||
val grouped = rel groupBy (_._1 % splitInto)
|
||||
all(grouped.toSeq) {
|
||||
case (k, rel_k) => rel_k._1s forall { _ % splitInto == k }
|
||||
}
|
||||
}
|
||||
|
||||
property("Computes size correctly") = forAll { (entries: List[(Int, Double)]) =>
|
||||
val rel = Relation.empty[Int, Double] ++ entries
|
||||
val expected = rel.all.size // Note: not entries.length, as entries may have duplicates.
|
||||
val expected = rel.all.size // Note: not entries.length, as entries may have duplicates.
|
||||
val computed = rel.size
|
||||
"Expected size: %d. Computed size: %d.".format(expected, computed) |: expected == computed
|
||||
}
|
||||
|
||||
def all[T](s: Seq[T])(p: T => Prop): Prop =
|
||||
if(s.isEmpty) true else s.map(p).reduceLeft(_ && _)
|
||||
def all[T](s: Seq[T])(p: T => Prop): Prop =
|
||||
if (s.isEmpty) true else s.map(p).reduceLeft(_ && _)
|
||||
}
|
||||
|
||||
object EmptyRelationTest extends Properties("Empty relation")
|
||||
{
|
||||
lazy val e = Relation.empty[Int, Double]
|
||||
object EmptyRelationTest extends Properties("Empty relation") {
|
||||
lazy val e = Relation.empty[Int, Double]
|
||||
|
||||
property("Forward empty") = forAll { (i: Int) => e.forward(i).isEmpty }
|
||||
property("Reverse empty") = forAll { (i: Double) => e.reverse(i).isEmpty }
|
||||
property("Forward map empty") = e.forwardMap.isEmpty
|
||||
property("Reverse map empty") = e.reverseMap.isEmpty
|
||||
property("_1 empty") = e._1s.isEmpty
|
||||
property("_2 empty") = e._2s.isEmpty
|
||||
property("Forward empty") = forAll { (i: Int) => e.forward(i).isEmpty }
|
||||
property("Reverse empty") = forAll { (i: Double) => e.reverse(i).isEmpty }
|
||||
property("Forward map empty") = e.forwardMap.isEmpty
|
||||
property("Reverse map empty") = e.reverseMap.isEmpty
|
||||
property("_1 empty") = e._1s.isEmpty
|
||||
property("_2 empty") = e._2s.isEmpty
|
||||
}
|
||||
Loading…
Reference in New Issue