summaryrefslogtreecommitdiff
path: root/test/files/scalacheck
diff options
context:
space:
mode:
authorAleksandar Pokopec <aleksandar.prokopec@epfl.ch>2010-10-28 12:10:00 +0000
committerAleksandar Pokopec <aleksandar.prokopec@epfl.ch>2010-10-28 12:10:00 +0000
commit8d311558f3774cd628a53fc675da93b550d06090 (patch)
tree6811285bc37af76d356980adaa579fa1d181faaf /test/files/scalacheck
parent962a348ab26f189a19dd74aeb3bbc8fd5d63061a (diff)
downloadscala-8d311558f3774cd628a53fc675da93b550d06090.tar.gz
scala-8d311558f3774cd628a53fc675da93b550d06090.tar.bz2
scala-8d311558f3774cd628a53fc675da93b550d06090.zip
Debugging parallel hash tables.
No review.
Diffstat (limited to 'test/files/scalacheck')
-rw-r--r--test/files/scalacheck/parallel-collections/IntOperators.scala105
-rw-r--r--test/files/scalacheck/parallel-collections/ParallelHashMapCheck.scala24
-rw-r--r--test/files/scalacheck/parallel-collections/ParallelIterableCheck.scala529
-rw-r--r--test/files/scalacheck/parallel-collections/ParallelMapCheck1.scala14
-rw-r--r--test/files/scalacheck/parallel-collections/pc.scala12
5 files changed, 361 insertions, 323 deletions
diff --git a/test/files/scalacheck/parallel-collections/IntOperators.scala b/test/files/scalacheck/parallel-collections/IntOperators.scala
index 2b7e0191a2..8d214b614f 100644
--- a/test/files/scalacheck/parallel-collections/IntOperators.scala
+++ b/test/files/scalacheck/parallel-collections/IntOperators.scala
@@ -10,82 +10,85 @@ trait IntOperators extends Operators[Int] {
def forallPredicates = List(_ >= 0, _ < 0, _ % 2 == 0, _ != 55, _ != 505, _ != 5005)
def existsPredicates = List(_ >= 0, _ < 0, _ % 2 == 0, _ == 55, _ == 505, _ == 5005)
def findPredicates = List(_ >= 0, _ % 2 == 0, _ < 0, _ == 50, _ == 500, _ == 5000)
- def mapFunctions = List(-_)
+ def mapFunctions = List(-_, math.abs(_), _ % 2, _ % 3, _ % 4, _ % 150, _ % 500)
def partialMapFunctions = List({case x => -x}, { case 0 => -1; case x if x > 0 => x + 1}, {case x if x % 3 == 0 => x / 3})
def flatMapFunctions = List(
- (n: Int) => if (n < 0) List() else if (n % 2 == 0) List(1, 2, 3) else List(4, 5, 6),
- (n: Int) => List[Int](),
- (n: Int) => if (n == 0) List(1, 2, 3, 4, 5) else if (n < 0) List(1, 2, 3) else List()
- )
+ (n: Int) => if (n < 0) List() else if (n % 2 == 0) List(1, 2, 3) else List(4, 5, 6),
+ (n: Int) => List[Int](),
+ (n: Int) => if (n == 0) List(1, 2, 3, 4, 5) else if (n < 0) List(1, 2, 3) else List()
+ )
def filterPredicates = List(
- _ % 2 == 0, _ % 3 == 0, n => n > 50 && n < 100, _ >= 0, _ < 0, _ == 99,
- _ > 500, _ > 5000, _ > 50000)
+ _ % 2 == 0, _ % 3 == 0, _ % 4 != 0, _ % 17 != 0, n => n > 50 && n < 100, _ >= 0, _ < 0, _ == 99,
+ _ > 500, _ > 5000, _ > 50000, _ < 500, _ < 50, _ < -50, _ < -5e5,
+ x => true, x => false, x => x % 53 == 0 && x % 17 == 0
+ )
def filterNotPredicates = filterPredicates
def partitionPredicates = filterPredicates
def takeWhilePredicates = List(
- _ != 50, _ != 500, _ != 5000, _ != 50000, _ % 2 == 0, _ % 3 == 1, _ % 47 != 0,
- _ < 100, _ < 1000, _ < 10000, _ < 0,
- _ < -100, _ < -1000, _ > -200, _ > -50,
- n => -90 < n && n < -10,
- n => 50 < n && n < 550,
- n => 5000 < n && n < 7500)
+ _ != 50, _ != 500, _ != 5000, _ != 50000, _ % 2 == 0, _ % 3 == 1, _ % 47 != 0,
+ _ < 100, _ < 1000, _ < 10000, _ < 0,
+ _ < -100, _ < -1000, _ > -200, _ > -50,
+ n => -90 < n && n < -10,
+ n => 50 < n && n < 550,
+ n => 5000 < n && n < 7500
+ )
def dropWhilePredicates = takeWhilePredicates
def spanPredicates = takeWhilePredicates
def foldArguments = List(
- (0, _ + _),
- (1, _ * _),
- (Int.MinValue, math.max(_, _))
- )
+ (0, _ + _),
+ (1, _ * _),
+ (Int.MinValue, math.max(_, _))
+ )
def addAllTraversables = List(
- List[Int](),
- List(1),
- List(1, 2),
- List(1, 2, 3, 4, 5, 6, 7, 8, 9, 10),
- Array.fill(1000)(1).toSeq
- )
+ List[Int](),
+ List(1),
+ List(1, 2),
+ List(1, 2, 3, 4, 5, 6, 7, 8, 9, 10),
+ Array.fill(1000)(1).toSeq
+ )
def newArray(sz: Int) = new Array[Int](sz)
}
trait IntSeqOperators extends IntOperators with SeqOperators[Int] {
def segmentLengthPredicates = List(
- _ % 2 == 0, _ > 0, _ >= 0, _ < 0, _ <= 0, _ > -5000, _ > 5000, _ % 541 != 0, _ < -50, _ > 500,
- n => -90 < n && n < -10, n => 500 < n && n < 1500
+ _ % 2 == 0, _ > 0, _ >= 0, _ < 0, _ <= 0, _ > -5000, _ > 5000, _ % 541 != 0, _ < -50, _ > 500,
+ n => -90 < n && n < -10, n => 500 < n && n < 1500
)
def indexWherePredicates = List(
- _ % 2 == 0, _ % 11 == 0, _ % 123 == 0, _ % 901 == 0,
- _ > 0, _ >= 0, _ < 0, _ <= 0,
- _ > 50, _ > 500, _ > 5000,
- _ < -10, _ < -100, _ < -1000,
- n => n > 50 && n < 100,
- n => n * n > 1000000 && n % 111 == 0
+ _ % 2 == 0, _ % 11 == 0, _ % 123 == 0, _ % 901 == 0,
+ _ > 0, _ >= 0, _ < 0, _ <= 0,
+ _ > 50, _ > 500, _ > 5000,
+ _ < -10, _ < -100, _ < -1000,
+ n => n > 50 && n < 100,
+ n => n * n > 1000000 && n % 111 == 0
)
def lastIndexWherePredicates = List(
- _ % 2 == 0, _ % 17 == 0, _ % 314 == 0, _ % 1017 == 0,
- _ > 0, _ >= 0, _ < 0, _ <= 0,
- _ > 50, _ > 500, _ > 5000,
- _ < -20, _ < -200, _ < -2000,
- _ == 0,
- n => n > -40 && n < 40,
- n => n > -80 && n < -10,
- n => n > 110 && n < 150
+ _ % 2 == 0, _ % 17 == 0, _ % 314 == 0, _ % 1017 == 0,
+ _ > 0, _ >= 0, _ < 0, _ <= 0,
+ _ > 50, _ > 500, _ > 5000,
+ _ < -20, _ < -200, _ < -2000,
+ _ == 0,
+ n => n > -40 && n < 40,
+ n => n > -80 && n < -10,
+ n => n > 110 && n < 150
)
def reverseMapFunctions = List(-_, n => n * n, _ + 1)
def sameElementsSeqs = List(
- List[Int](),
- List(1),
- List(1, 2, 3, 4, 5, 6, 7, 8, 9),
- Array.fill(150)(1).toSeq,
- Array.fill(1000)(1).toSeq
+ List[Int](),
+ List(1),
+ List(1, 2, 3, 4, 5, 6, 7, 8, 9),
+ Array.fill(150)(1).toSeq,
+ Array.fill(1000)(1).toSeq
)
def startEndSeqs = List(
- Nil,
- List(1),
- List(1, 2, 3, 4, 5),
- List(0, 1, 2, 3, 4, 5),
- List(4, 5, 6, 7, 8, 9, 10),
- List(4, 5, 6, 7, 8, 9, 0),
- List(-4, -3, -2, -1)
+ Nil,
+ List(1),
+ List(1, 2, 3, 4, 5),
+ List(0, 1, 2, 3, 4, 5),
+ List(4, 5, 6, 7, 8, 9, 10),
+ List(4, 5, 6, 7, 8, 9, 0),
+ List(-4, -3, -2, -1)
)
}
diff --git a/test/files/scalacheck/parallel-collections/ParallelHashMapCheck.scala b/test/files/scalacheck/parallel-collections/ParallelHashMapCheck.scala
index 5278d28f0f..d53c0ba9d6 100644
--- a/test/files/scalacheck/parallel-collections/ParallelHashMapCheck.scala
+++ b/test/files/scalacheck/parallel-collections/ParallelHashMapCheck.scala
@@ -58,10 +58,32 @@ with PairValues[Int, Int]
override def printDataStructureDebugInfo(ds: AnyRef) = ds match {
case pm: ParHashMap[k, v] =>
- println("Mutable parallel hash map")
+ println("Mutable parallel hash map\n" + pm.hashTableContents.debugInformation)
case _ =>
println("could not match data structure type: " + ds.getClass)
}
+
+ override def checkDataStructureInvariants(orig: Traversable[(Int, Int)], ds: AnyRef) = ds match {
+ case pm: ParHashMap[k, v] =>
+ val invs = pm.brokenInvariants
+
+ val containsall = (for ((k, v) <- orig) yield {
+ if (pm.asInstanceOf[ParHashMap[Int, Int]].get(k) == Some(v)) true
+ else {
+ println("Does not contain original element: " + (k, v))
+ false
+ }
+ }).foldLeft(true)(_ && _)
+
+
+ if (invs.isEmpty) containsall
+ else {
+ println("Invariants broken:\n" + invs.mkString("\n"))
+ false
+ }
+ case _ => true
+ }
+
}
diff --git a/test/files/scalacheck/parallel-collections/ParallelIterableCheck.scala b/test/files/scalacheck/parallel-collections/ParallelIterableCheck.scala
index 52edf9e641..0acdb2b0a7 100644
--- a/test/files/scalacheck/parallel-collections/ParallelIterableCheck.scala
+++ b/test/files/scalacheck/parallel-collections/ParallelIterableCheck.scala
@@ -29,9 +29,17 @@ abstract class ParallelIterableCheck[T](collName: String) extends Properties(col
sz =>
ofSize(vals, sz)
),
- for (sz <- choose(1000, 2000)) yield ofSize(vals, sz)
+ for (sz <- choose(1000, 2000)) yield ofSize(vals, sz),
+ for (sz <- choose(4000, 4001)) yield ofSize(vals, sz),
+ for (sz <- choose(10000, 10001)) yield ofSize(vals, sz)
)
+ // used to check if constructed collection is valid
+ def checkDataStructureInvariants(orig: Traversable[T], cf: AnyRef) = {
+ // can be overriden in subclasses
+ true
+ }
+
def printDataStructureDebugInfo(cf: AnyRef) {
// can be overridden in subclasses
}
@@ -63,6 +71,17 @@ abstract class ParallelIterableCheck[T](collName: String) extends Properties(col
(inst, fromTraversable(inst), modif)
}
+ def areEqual(t1: Traversable[T], t2: Traversable[T]) = if (hasStrictOrder) {
+ t1 == t2 && t2 == t1
+ } else (t1, t2) match { // it is slightly delicate what `equal` means if the order is not strict
+ case (m1: Map[_, _], m2: Map[_, _]) => m1 == m2 && m2 == m1
+ case (i1: Iterable[_], i2: Iterable[_]) =>
+ val i1s = i1.toSet
+ val i2s = i2.toSet
+ i1s == i2s && i2s == i1s
+ case _ => t1 == t2 && t2 == t1
+ }
+
property("reductions must be equal for assoc. operators") = forAll(collectionPairs) { case (t, coll) =>
if (t.size != 0) {
val results = for ((op, ind) <- reduceOperators.zipWithIndex) yield {
@@ -81,263 +100,257 @@ abstract class ParallelIterableCheck[T](collName: String) extends Properties(col
} else "has size 0" |: true
}
- // property("counts must be equal") = forAll(collectionPairs) { case (t, coll) =>
- // val results = for ((pred, ind) <- countPredicates.zipWithIndex) yield {
- // val tc = t.count(pred)
- // val cc = coll.count(pred)
- // if (tc != cc) {
- // println("from: " + t)
- // println("and: " + coll.toList)
- // println(tc)
- // println(cc)
- // }
- // ("op index: " + ind) |: tc == cc
- // }
- // results.reduceLeft(_ && _)
- // }
-
- // property("forall must be equal") = forAll(collectionPairs) { case (t, coll) =>
- // val results = for ((pred, ind) <- forallPredicates.zipWithIndex)
- // yield ("op index: " + ind) |: t.forall(pred) == coll.forall(pred)
- // results.reduceLeft(_ && _)
- // }
-
- // property("exists must be equal") = forAll(collectionPairs) { case (t, coll) =>
- // val results = for ((pred, ind) <- existsPredicates.zipWithIndex)
- // yield ("op index: " + ind) |: t.exists(pred) == coll.exists(pred)
- // results.reduceLeft(_ && _)
- // }
-
- // property("both must find or not find an element") = forAll(collectionPairs) { case (t, coll) =>
- // val results = for ((pred, ind) <- findPredicates.zipWithIndex) yield {
- // val ft = t.find(pred)
- // val fcoll = coll.find(pred)
- // ("op index: " + ind) |: ((ft == None && fcoll == None) || (ft != None && fcoll != None))
- // }
- // results.reduceLeft(_ && _)
- // }
-
- // def areEqual(t1: Traversable[T], t2: Traversable[T]) = if (hasStrictOrder) {
- // t1 == t2
- // } else (t1, t2) match { // it is slightly delicate what `equal` means if the order is not strict
- // case (m1: Map[_, _], m2: Map[_, _]) => m1 == m2
- // case (i1: Iterable[_], i2: Iterable[_]) => i1.toSet == i2.toSet
- // case _ => t1 == t2
- // }
-
- // property("mappings must be equal") = forAll(collectionPairs) { case (t, coll) =>
- // val results = for ((f, ind) <- mapFunctions.zipWithIndex) yield {
- // val ms = t.map(f)
- // val mp = coll.map(f)
- // if (!areEqual(ms, mp)) {
- // println(t)
- // println(coll)
- // println("mapped to: ")
- // println(ms)
- // println(mp)
- // }
- // ("op index: " + ind) |: areEqual(ms, mp)
- // }
- // results.reduceLeft(_ && _)
- // }
-
- // property("collects must be equal") = forAll(collectionPairs) { case (t, coll) =>
- // val results = for ((f, ind) <- partialMapFunctions.zipWithIndex) yield {
- // val ps = t.collect(f)
- // val pp = coll.collect(f)
- // if (!areEqual(ps, pp)) {
- // println(t)
- // println(coll)
- // println("collected to: ")
- // println(ps)
- // println(pp)
- // }
- // ("op index: " + ind) |: areEqual(ps, pp)
- // }
- // results.reduceLeft(_ && _)
- // }
-
- // property("flatMaps must be equal") = forAll(collectionPairs) { case (t, coll) =>
- // (for ((f, ind) <- flatMapFunctions.zipWithIndex)
- // yield ("op index: " + ind) |: areEqual(t.flatMap(f), coll.flatMap(f))).reduceLeft(_ && _)
- // }
-
- // property("filters must be equal") = forAll(collectionPairs) { case (t, coll) =>
- // (for ((p, ind) <- filterPredicates.zipWithIndex) yield {
- // val tf = t.filter(p)
- // val cf = coll.filter(p)
- // if (tf != cf || cf != tf) {
- // println(t)
- // println(coll)
- // println("filtered to:")
- // println(cf)
- // println(tf)
- // println("tf == cf - " + (tf == cf))
- // println("cf == tf - " + (cf == tf))
- // printDataStructureDebugInfo(cf)
- // }
- // ("op index: " + ind) |: tf == cf && cf == tf
- // }).reduceLeft(_ && _)
- // }
-
- // property("filterNots must be equal") = forAll(collectionPairs) { case (t, coll) =>
- // (for ((p, ind) <- filterNotPredicates.zipWithIndex)
- // yield ("op index: " + ind) |: t.filterNot(p) == coll.filterNot(p)).reduceLeft(_ && _)
- // }
-
- // if (!isCheckingViews) property("partitions must be equal") = forAll(collectionPairs) { case (t, coll) =>
- // (for ((p, ind) <- partitionPredicates.zipWithIndex) yield {
- // val tpart = t.partition(p)
- // val cpart = coll.partition(p)
- // if (tpart != cpart) {
- // println("from: " + t)
- // println("and: " + coll)
- // println(cpart)
- // println(tpart)
- // }
- // ("op index: " + ind) |: tpart == cpart
- // }).reduceLeft(_ && _)
- // }
-
- // if (hasStrictOrder) property("takes must be equal") = forAll(collectionPairsWithLengths) { case (t, coll, n) =>
- // ("take " + n + " elements") |: t.take(n) == coll.take(n)
- // }
-
- // if (hasStrictOrder) property("drops must be equal") = forAll(collectionPairsWithLengths) { case (t, coll, n) =>
- // ("drop " + n + " elements") |: t.drop(n) == coll.drop(n)
- // }
-
- // if (hasStrictOrder) property("slices must be equal") = forAll(collectionPairsWith2Indices)
- // { case (t, coll, fr, slicelength) =>
- // val from = if (fr < 0) 0 else fr
- // val until = if (from + slicelength > t.size) t.size else from + slicelength
- // val tsl = t.slice(from, until)
- // val collsl = coll.slice(from, until)
- // if (tsl != collsl) {
- // println("---------------------- " + from + ", " + until)
- // println("from: " + t)
- // println("and: " + coll)
- // println(tsl)
- // println(collsl)
- // println("as list: " + collsl.toList)
- // println(collsl.iterator.hasNext)
- // println(collsl.iterator.next)
- // println(collsl.iterator.hasNext)
- // println(collsl.iterator.next)
- // println(collsl.iterator.hasNext)
- // println(collsl.iterator.next)
- // println(collsl.iterator.hasNext)
- // }
- // ("slice from " + from + " until " + until) |: tsl == collsl
- // }
-
- // if (hasStrictOrder) property("splits must be equal") = forAll(collectionPairsWithLengths) { case (t, coll, n) =>
- // val tspl = t.splitAt(n)
- // val cspl = coll.splitAt(n)
- // if (tspl != cspl) {
- // println("at: " + n)
- // println("from: " + t)
- // println("and: " + coll)
- // println(tspl)
- // println(cspl)
- // }
- // ("splitAt " + n) |: tspl == cspl
- // }
-
- // if (hasStrictOrder) property("takeWhiles must be equal") = forAll(collectionPairs) { case (t, coll) =>
- // (for ((pred, ind) <- takeWhilePredicates.zipWithIndex) yield {
- // val tt = t.takeWhile(pred)
- // val ct = coll.takeWhile(pred)
- // if (tt != ct) {
- // println("from: " + t)
- // println("and: " + coll)
- // println("taking while...")
- // println(tt)
- // println(ct)
- // }
- // ("operator " + ind) |: tt == ct
- // }).reduceLeft(_ && _)
- // }
-
- // if (hasStrictOrder) property("spans must be equal") = forAll(collectionPairs) { case (t, coll) =>
- // (for ((pred, ind) <- spanPredicates.zipWithIndex) yield {
- // val tsp = t.span(pred)
- // val csp = coll.span(pred)
- // if (tsp != csp) {
- // println("from: " + t)
- // println("and: " + coll)
- // println("span with predicate " + ind)
- // println(tsp)
- // println(csp)
- // println("---------------------------------")
- // println(coll.span(pred))
- // println("---------------------------------")
- // }
- // ("operator " + ind) |: tsp == csp
- // }).reduceLeft(_ && _)
- // }
-
- // if (hasStrictOrder) property("dropWhiles must be equal") = forAll(collectionPairs) { case (t, coll) =>
- // (for ((pred, ind) <- dropWhilePredicates.zipWithIndex) yield {
- // ("operator " + ind) |: t.dropWhile(pred) == coll.dropWhile(pred)
- // }).reduceLeft(_ && _)
- // }
-
- // property("folds must be equal for assoc. operators") = forAll(collectionPairs) { case (t, coll) =>
- // (for (((first, op), ind) <- foldArguments.zipWithIndex) yield {
- // val tres = t.foldLeft(first)(op)
- // val cres = coll.fold(first)(op)
- // if (cres != tres) {
- // println("from: " + t)
- // println("and: " + coll)
- // println("folds are: ")
- // println(tres)
- // println(cres)
- // }
- // ("operator " + ind) |: tres == cres
- // }).reduceLeft(_ && _)
- // }
-
- // property("++s must be equal") = forAll(collectionTriplets) { case (t, coll, colltoadd) =>
- // val toadd = colltoadd
- // val tr = t ++ toadd.iterator
- // val cr = coll ++ toadd.iterator
- // if (!areEqual(tr, cr)) {
- // println("from: " + t)
- // println("and: " + coll.iterator.toList)
- // println("adding: " + toadd)
- // println(tr.toList)
- // println(cr.iterator.toList)
- // }
- // ("adding " |: areEqual(tr, cr)) &&
- // (for ((trav, ind) <- (addAllTraversables).zipWithIndex) yield {
- // val tadded = t ++ trav
- // val cadded = coll ++ collection.parallel.mutable.ParArray(trav.toSeq: _*)
- // if (!areEqual(tadded, cadded)) {
- // println("----------------------")
- // println("from: " + t)
- // println("and: " + coll)
- // println("adding: " + trav)
- // println(tadded)
- // println(cadded)
- // }
- // ("traversable " + ind) |: areEqual(tadded, cadded)
- // }).reduceLeft(_ && _)
- // }
-
- // if (hasStrictOrder) property("copies to array must be equal") = forAll(collectionPairs) { case (t, coll) =>
- // val tarr = newArray(t.size)
- // val collarr = newArray(coll.size)
- // t.copyToArray(tarr, 0, t.size)
- // coll.copyToArray(collarr, 0, coll.size)
- // if (tarr.toSeq != collarr.toSeq) {
- // println("from: " + t)
- // println("and: " + coll)
- // println(tarr.toSeq)
- // println(collarr.toSeq)
- // }
- // tarr.toSeq == collarr.toSeq
- // }
+ property("counts must be equal") = forAll(collectionPairs) { case (t, coll) =>
+ val results = for ((pred, ind) <- countPredicates.zipWithIndex) yield {
+ val tc = t.count(pred)
+ val cc = coll.count(pred)
+ if (tc != cc) {
+ println("from: " + t)
+ println("and: " + coll.toList)
+ println(tc)
+ println(cc)
+ }
+ ("op index: " + ind) |: tc == cc
+ }
+ results.reduceLeft(_ && _)
+ }
+
+ property("forall must be equal") = forAll(collectionPairs) { case (t, coll) =>
+ val results = for ((pred, ind) <- forallPredicates.zipWithIndex)
+ yield ("op index: " + ind) |: t.forall(pred) == coll.forall(pred)
+ results.reduceLeft(_ && _)
+ }
+
+ property("exists must be equal") = forAll(collectionPairs) { case (t, coll) =>
+ val results = for ((pred, ind) <- existsPredicates.zipWithIndex)
+ yield ("op index: " + ind) |: t.exists(pred) == coll.exists(pred)
+ results.reduceLeft(_ && _)
+ }
+
+ property("both must find or not find an element") = forAll(collectionPairs) { case (t, coll) =>
+ val results = for ((pred, ind) <- findPredicates.zipWithIndex) yield {
+ val ft = t.find(pred)
+ val fcoll = coll.find(pred)
+ ("op index: " + ind) |: ((ft == None && fcoll == None) || (ft != None && fcoll != None))
+ }
+ results.reduceLeft(_ && _)
+ }
+
+ property("mappings must be equal") = forAll(collectionPairs) { case (t, coll) =>
+ val results = for ((f, ind) <- mapFunctions.zipWithIndex) yield {
+ val ms = t.map(f)
+ val mp = coll.map(f)
+ if (!areEqual(ms, mp) || !checkDataStructureInvariants(ms, mp)) {
+ println(t)
+ println(coll)
+ println("mapped to: ")
+ println(ms)
+ println(mp)
+ println("valid: " + !checkDataStructureInvariants(ms, mp))
+ }
+ ("op index: " + ind) |: (areEqual(ms, mp) && checkDataStructureInvariants(ms, mp))
+ }
+ results.reduceLeft(_ && _)
+ }
+
+ property("collects must be equal") = forAll(collectionPairs) { case (t, coll) =>
+ val results = for ((f, ind) <- partialMapFunctions.zipWithIndex) yield {
+ val ps = t.collect(f)
+ val pp = coll.collect(f)
+ if (!areEqual(ps, pp)) {
+ println(t)
+ println(coll)
+ println("collected to: ")
+ println(ps)
+ println(pp)
+ }
+ ("op index: " + ind) |: areEqual(ps, pp)
+ }
+ results.reduceLeft(_ && _)
+ }
+
+ property("flatMaps must be equal") = forAll(collectionPairs) { case (t, coll) =>
+ (for ((f, ind) <- flatMapFunctions.zipWithIndex)
+ yield ("op index: " + ind) |: areEqual(t.flatMap(f), coll.flatMap(f))).reduceLeft(_ && _)
+ }
+
+ property("filters must be equal") = forAll(collectionPairs) { case (t, coll) =>
+ (for ((p, ind) <- filterPredicates.zipWithIndex) yield {
+ val tf = t.filter(p)
+ val cf = coll.filter(p)
+ if (tf != cf || cf != tf || !checkDataStructureInvariants(tf, cf)) {
+ println(t)
+ println(coll)
+ println("filtered to:")
+ println(cf)
+ println(tf)
+ println("tf == cf - " + (tf == cf))
+ println("cf == tf - " + (cf == tf))
+ printDataStructureDebugInfo(cf)
+ println("valid: " + checkDataStructureInvariants(tf, cf))
+ }
+ ("op index: " + ind) |: tf == cf && cf == tf && checkDataStructureInvariants(tf, cf)
+ }).reduceLeft(_ && _)
+ }
+
+ property("filterNots must be equal") = forAll(collectionPairs) { case (t, coll) =>
+ (for ((p, ind) <- filterNotPredicates.zipWithIndex)
+ yield ("op index: " + ind) |: t.filterNot(p) == coll.filterNot(p)).reduceLeft(_ && _)
+ }
+
+ if (!isCheckingViews) property("partitions must be equal") = forAll(collectionPairs) { case (t, coll) =>
+ (for ((p, ind) <- partitionPredicates.zipWithIndex) yield {
+ val tpart = t.partition(p)
+ val cpart = coll.partition(p)
+ if (tpart != cpart) {
+ println("from: " + t)
+ println("and: " + coll)
+ println(cpart)
+ println(tpart)
+ }
+ ("op index: " + ind) |: tpart == cpart
+ }).reduceLeft(_ && _)
+ }
+
+ if (hasStrictOrder) property("takes must be equal") = forAll(collectionPairsWithLengths) { case (t, coll, n) =>
+ ("take " + n + " elements") |: t.take(n) == coll.take(n)
+ }
+
+ if (hasStrictOrder) property("drops must be equal") = forAll(collectionPairsWithLengths) { case (t, coll, n) =>
+ ("drop " + n + " elements") |: t.drop(n) == coll.drop(n)
+ }
+
+ if (hasStrictOrder) property("slices must be equal") = forAll(collectionPairsWith2Indices)
+ { case (t, coll, fr, slicelength) =>
+ val from = if (fr < 0) 0 else fr
+ val until = if (from + slicelength > t.size) t.size else from + slicelength
+ val tsl = t.slice(from, until)
+ val collsl = coll.slice(from, until)
+ if (tsl != collsl) {
+ println("---------------------- " + from + ", " + until)
+ println("from: " + t)
+ println("and: " + coll)
+ println(tsl)
+ println(collsl)
+ println("as list: " + collsl.toList)
+ println(collsl.iterator.hasNext)
+ println(collsl.iterator.next)
+ println(collsl.iterator.hasNext)
+ println(collsl.iterator.next)
+ println(collsl.iterator.hasNext)
+ println(collsl.iterator.next)
+ println(collsl.iterator.hasNext)
+ }
+ ("slice from " + from + " until " + until) |: tsl == collsl
+ }
+
+ if (hasStrictOrder) property("splits must be equal") = forAll(collectionPairsWithLengths) { case (t, coll, n) =>
+ val tspl = t.splitAt(n)
+ val cspl = coll.splitAt(n)
+ if (tspl != cspl) {
+ println("at: " + n)
+ println("from: " + t)
+ println("and: " + coll)
+ println(tspl)
+ println(cspl)
+ }
+ ("splitAt " + n) |: tspl == cspl
+ }
+
+ if (hasStrictOrder) property("takeWhiles must be equal") = forAll(collectionPairs) { case (t, coll) =>
+ (for ((pred, ind) <- takeWhilePredicates.zipWithIndex) yield {
+ val tt = t.takeWhile(pred)
+ val ct = coll.takeWhile(pred)
+ if (tt != ct) {
+ println("from: " + t)
+ println("and: " + coll)
+ println("taking while...")
+ println(tt)
+ println(ct)
+ }
+ ("operator " + ind) |: tt == ct
+ }).reduceLeft(_ && _)
+ }
+
+ if (hasStrictOrder) property("spans must be equal") = forAll(collectionPairs) { case (t, coll) =>
+ (for ((pred, ind) <- spanPredicates.zipWithIndex) yield {
+ val tsp = t.span(pred)
+ val csp = coll.span(pred)
+ if (tsp != csp) {
+ println("from: " + t)
+ println("and: " + coll)
+ println("span with predicate " + ind)
+ println(tsp)
+ println(csp)
+ println("---------------------------------")
+ println(coll.span(pred))
+ println("---------------------------------")
+ }
+ ("operator " + ind) |: tsp == csp
+ }).reduceLeft(_ && _)
+ }
+
+ if (hasStrictOrder) property("dropWhiles must be equal") = forAll(collectionPairs) { case (t, coll) =>
+ (for ((pred, ind) <- dropWhilePredicates.zipWithIndex) yield {
+ ("operator " + ind) |: t.dropWhile(pred) == coll.dropWhile(pred)
+ }).reduceLeft(_ && _)
+ }
+
+ property("folds must be equal for assoc. operators") = forAll(collectionPairs) { case (t, coll) =>
+ (for (((first, op), ind) <- foldArguments.zipWithIndex) yield {
+ val tres = t.foldLeft(first)(op)
+ val cres = coll.fold(first)(op)
+ if (cres != tres) {
+ println("from: " + t)
+ println("and: " + coll)
+ println("folds are: ")
+ println(tres)
+ println(cres)
+ }
+ ("operator " + ind) |: tres == cres
+ }).reduceLeft(_ && _)
+ }
+
+ property("++s must be equal") = forAll(collectionTriplets) { case (t, coll, colltoadd) =>
+ val toadd = colltoadd
+ val tr = t ++ toadd.iterator
+ val cr = coll ++ toadd.iterator
+ if (!areEqual(tr, cr)) {
+ println("from: " + t)
+ println("and: " + coll.iterator.toList)
+ println("adding: " + toadd)
+ println(tr.toList)
+ println(cr.iterator.toList)
+ }
+ ("adding " |: areEqual(tr, cr)) &&
+ (for ((trav, ind) <- (addAllTraversables).zipWithIndex) yield {
+ val tadded = t ++ trav
+ val cadded = coll ++ collection.parallel.mutable.ParArray(trav.toSeq: _*)
+ if (!areEqual(tadded, cadded)) {
+ println("----------------------")
+ println("from: " + t)
+ println("and: " + coll)
+ println("adding: " + trav)
+ println(tadded)
+ println(cadded)
+ }
+ ("traversable " + ind) |: areEqual(tadded, cadded)
+ }).reduceLeft(_ && _)
+ }
+
+ if (hasStrictOrder) property("copies to array must be equal") = forAll(collectionPairs) { case (t, coll) =>
+ val tarr = newArray(t.size)
+ val collarr = newArray(coll.size)
+ t.copyToArray(tarr, 0, t.size)
+ coll.copyToArray(collarr, 0, coll.size)
+ if (tarr.toSeq != collarr.toSeq) {
+ println("from: " + t)
+ println("and: " + coll)
+ println(tarr.toSeq)
+ println(collarr.toSeq)
+ }
+ tarr.toSeq == collarr.toSeq
+ }
}
diff --git a/test/files/scalacheck/parallel-collections/ParallelMapCheck1.scala b/test/files/scalacheck/parallel-collections/ParallelMapCheck1.scala
index c4f241c02e..6b30f61b57 100644
--- a/test/files/scalacheck/parallel-collections/ParallelMapCheck1.scala
+++ b/test/files/scalacheck/parallel-collections/ParallelMapCheck1.scala
@@ -17,13 +17,13 @@ import scala.collection.parallel._
abstract class ParallelMapCheck[K, V](collname: String) extends ParallelIterableCheck[(K, V)](collname) {
type CollType <: ParMap[K, V] with Sequentializable[(K, V), Map[K, V]]
- // property("gets iterated keys") = forAll(collectionPairs) {
- // case (t, coll) =>
- // val containsT = for ((k, v) <- t) yield (coll.get(k) == Some(v))
- // val containsSelf = for ((k, v) <- coll) yield (coll.get(k) == Some(v))
- // ("Par contains elements of seq map" |: containsT.forall(_ == true)) &&
- // ("Par contains elements of itself" |: containsSelf.forall(_ == true))
- // }
+ property("gets iterated keys") = forAll(collectionPairs) {
+ case (t, coll) =>
+ val containsT = for ((k, v) <- t) yield (coll.get(k) == Some(v))
+ val containsSelf = for ((k, v) <- coll) yield (coll.get(k) == Some(v))
+ ("Par contains elements of seq map" |: containsT.forall(_ == true)) &&
+ ("Par contains elements of itself" |: containsSelf.forall(_ == true))
+ }
}
diff --git a/test/files/scalacheck/parallel-collections/pc.scala b/test/files/scalacheck/parallel-collections/pc.scala
index fe406a9408..aba8efef6a 100644
--- a/test/files/scalacheck/parallel-collections/pc.scala
+++ b/test/files/scalacheck/parallel-collections/pc.scala
@@ -11,16 +11,16 @@ class ParCollProperties extends Properties("Parallel collections") {
/* Collections */
// parallel arrays
- //include(mutable.IntParallelArrayCheck)
+ include(mutable.IntParallelArrayCheck)
// parallel ranges
- //include(immutable.ParallelRangeCheck)
+ include(immutable.ParallelRangeCheck)
// parallel immutable hash maps (tries)
- //include(immutable.IntIntParallelHashMapCheck)
+ include(immutable.IntIntParallelHashMapCheck)
// parallel immutable hash sets (tries)
- //include(immutable.IntParallelHashSetCheck)
+ include(immutable.IntParallelHashSetCheck)
// parallel mutable hash maps (tables)
include(mutable.IntIntParallelHashMapCheck)
@@ -44,8 +44,8 @@ object Test {
testCallback = new ConsoleReporter(0),
workers = 1,
minSize = 0,
- maxSize = 250,
- minSuccessfulTests = 250
+ maxSize = 4000,
+ minSuccessfulTests = 100
),
pc
)