summaryrefslogtreecommitdiff
path: root/test/files/scalacheck/parallel-collections
diff options
context:
space:
mode:
Diffstat (limited to 'test/files/scalacheck/parallel-collections')
-rw-r--r--test/files/scalacheck/parallel-collections/ParallelCtrieCheck.scala98
-rw-r--r--test/files/scalacheck/parallel-collections/ParallelIterableCheck.scala30
-rw-r--r--test/files/scalacheck/parallel-collections/pc.scala3
3 files changed, 116 insertions, 15 deletions
diff --git a/test/files/scalacheck/parallel-collections/ParallelCtrieCheck.scala b/test/files/scalacheck/parallel-collections/ParallelCtrieCheck.scala
new file mode 100644
index 0000000000..d1924f0ada
--- /dev/null
+++ b/test/files/scalacheck/parallel-collections/ParallelCtrieCheck.scala
@@ -0,0 +1,98 @@
+package scala.collection.parallel
+package mutable
+
+
+
+import org.scalacheck._
+import org.scalacheck.Gen
+import org.scalacheck.Gen._
+import org.scalacheck.Prop._
+import org.scalacheck.Properties
+import org.scalacheck.Arbitrary._
+
+import scala.collection._
+import scala.collection.parallel.ops._
+
+
+
+abstract class ParallelCtrieCheck[K, V](tp: String) extends ParallelMapCheck[K, V]("mutable.ParCtrie[" + tp + "]") {
+ // ForkJoinTasks.defaultForkJoinPool.setMaximumPoolSize(Runtime.getRuntime.availableProcessors * 2)
+ // ForkJoinTasks.defaultForkJoinPool.setParallelism(Runtime.getRuntime.availableProcessors * 2)
+
+ type CollType = ParCtrie[K, V]
+
+ def isCheckingViews = false
+
+ def hasStrictOrder = false
+
+ def ofSize(vals: Seq[Gen[(K, V)]], sz: Int) = {
+ val ct = new mutable.Ctrie[K, V]
+ val gen = vals(rnd.nextInt(vals.size))
+ for (i <- 0 until sz) ct += sample(gen)
+ ct
+ }
+
+ def fromTraversable(t: Traversable[(K, V)]) = {
+ val pct = new ParCtrie[K, V]
+ var i = 0
+ for (kv <- t.toList) {
+ pct += kv
+ i += 1
+ }
+ pct
+ }
+
+}
+
+
+object IntIntParallelCtrieCheck extends ParallelCtrieCheck[Int, Int]("Int, Int")
+with PairOperators[Int, Int]
+with PairValues[Int, Int]
+{
+ def intvalues = new IntValues {}
+ def kvalues = intvalues.values
+ def vvalues = intvalues.values
+
+ val intoperators = new IntOperators {}
+ def voperators = intoperators
+ def koperators = intoperators
+
+ override def printDataStructureDebugInfo(ds: AnyRef) = ds match {
+ case pm: ParCtrie[k, v] =>
+ println("Mutable parallel ctrie")
+ case _ =>
+ println("could not match data structure type: " + ds.getClass)
+ }
+
+ override def checkDataStructureInvariants(orig: Traversable[(Int, Int)], ds: AnyRef) = ds match {
+ // case pm: ParHashMap[k, v] if 1 == 0 => // disabled this to make tests faster
+ // val invs = pm.brokenInvariants
+
+ // val containsall = (for ((k, v) <- orig) yield {
+ // if (pm.asInstanceOf[ParHashMap[Int, Int]].get(k) == Some(v)) true
+ // else {
+ // println("Does not contain original element: " + (k, v))
+ // false
+ // }
+ // }).foldLeft(true)(_ && _)
+
+
+ // if (invs.isEmpty) containsall
+ // else {
+ // println("Invariants broken:\n" + invs.mkString("\n"))
+ // false
+ // }
+ case _ => true
+ }
+
+}
+
+
+
+
+
+
+
+
+
+
diff --git a/test/files/scalacheck/parallel-collections/ParallelIterableCheck.scala b/test/files/scalacheck/parallel-collections/ParallelIterableCheck.scala
index fbacb9f45c..8273e302a2 100644
--- a/test/files/scalacheck/parallel-collections/ParallelIterableCheck.scala
+++ b/test/files/scalacheck/parallel-collections/ParallelIterableCheck.scala
@@ -414,21 +414,21 @@ abstract class ParallelIterableCheck[T](collName: String) extends Properties(col
}).reduceLeft(_ && _)
}
- // property("groupBy must be equal") = forAll(collectionPairs) {
- // case (t, coll) =>
- // (for ((f, ind) <- groupByFunctions.zipWithIndex) yield {
- // val tgroup = t.groupBy(f)
- // val cgroup = coll.groupBy(f)
- // if (tgroup != cgroup || cgroup != tgroup) {
- // println("from: " + t)
- // println("and: " + coll)
- // println("groups are: ")
- // println(tgroup)
- // println(cgroup)
- // }
- // ("operator " + ind) |: tgroup == cgroup && cgroup == tgroup
- // }).reduceLeft(_ && _)
- // }
+ property("groupBy must be equal") = forAll(collectionPairs) {
+ case (t, coll) =>
+ (for ((f, ind) <- groupByFunctions.zipWithIndex) yield {
+ val tgroup = t.groupBy(f)
+ val cgroup = coll.groupBy(f)
+ if (tgroup != cgroup || cgroup != tgroup) {
+ println("from: " + t)
+ println("and: " + coll)
+ println("groups are: ")
+ println(tgroup)
+ println(cgroup)
+ }
+ ("operator " + ind) |: tgroup == cgroup && cgroup == tgroup
+ }).reduceLeft(_ && _)
+ }
}
diff --git a/test/files/scalacheck/parallel-collections/pc.scala b/test/files/scalacheck/parallel-collections/pc.scala
index cc0382303a..8a0dba3c25 100644
--- a/test/files/scalacheck/parallel-collections/pc.scala
+++ b/test/files/scalacheck/parallel-collections/pc.scala
@@ -25,6 +25,9 @@ class ParCollProperties extends Properties("Parallel collections") {
// parallel mutable hash maps (tables)
include(mutable.IntIntParallelHashMapCheck)
+ // parallel ctrie
+ include(mutable.IntIntParallelCtrieCheck)
+
// parallel mutable hash sets (tables)
include(mutable.IntParallelHashSetCheck)