summaryrefslogtreecommitdiff
path: root/test/files/scalacheck/parallel-collections/ParallelHashTrieCheck.scala
diff options
context:
space:
mode:
authorAleksandar Pokopec <aleksandar.prokopec@epfl.ch>2010-10-28 12:09:49 +0000
committerAleksandar Pokopec <aleksandar.prokopec@epfl.ch>2010-10-28 12:09:49 +0000
commitf388aaaf52dab4ceaf8e5f26c72eb4a0d1d3b3e7 (patch)
tree2a5770c3ca5f08410dd197035e3bae7a0b52ba23 /test/files/scalacheck/parallel-collections/ParallelHashTrieCheck.scala
parent4ddb4ce1e28afe15f9335a4bd51e07f68161b27c (diff)
downloadscala-f388aaaf52dab4ceaf8e5f26c72eb4a0d1d3b3e7.tar.gz
scala-f388aaaf52dab4ceaf8e5f26c72eb4a0d1d3b3e7.tar.bz2
scala-f388aaaf52dab4ceaf8e5f26c72eb4a0d1d3b3e7.zip
Mostly refactored existing test functionality f...
Mostly refactored existing test functionality for parallel collections. Added immutable hash set tests. No review.
Diffstat (limited to 'test/files/scalacheck/parallel-collections/ParallelHashTrieCheck.scala')
-rw-r--r--test/files/scalacheck/parallel-collections/ParallelHashTrieCheck.scala138
1 files changed, 138 insertions, 0 deletions
diff --git a/test/files/scalacheck/parallel-collections/ParallelHashTrieCheck.scala b/test/files/scalacheck/parallel-collections/ParallelHashTrieCheck.scala
new file mode 100644
index 0000000000..10329c19f2
--- /dev/null
+++ b/test/files/scalacheck/parallel-collections/ParallelHashTrieCheck.scala
@@ -0,0 +1,138 @@
+package scala.collection.parallel
+package immutable
+
+
+
+import org.scalacheck._
+import org.scalacheck.Gen
+import org.scalacheck.Gen._
+import org.scalacheck.Prop._
+import org.scalacheck.Properties
+import org.scalacheck.Arbitrary._
+
+import scala.collection._
+import scala.collection.parallel.ops._
+
+
+abstract class ParallelHashMapCheck[K, V](tp: String) extends ParallelMapCheck[K, V]("immutable.ParHashMap[" + tp + "]") {
+ ForkJoinTasks.defaultForkJoinPool.setMaximumPoolSize(Runtime.getRuntime.availableProcessors * 2)
+ ForkJoinTasks.defaultForkJoinPool.setParallelism(Runtime.getRuntime.availableProcessors * 2)
+
+ type CollType = ParHashMap[K, V]
+
+ def isCheckingViews = false
+
+ def hasStrictOrder = false
+
+ def ofSize(vals: Seq[Gen[(K, V)]], sz: Int) = {
+ var hm = new immutable.HashMap[K, V]
+ val gen = vals(rnd.nextInt(vals.size))
+ for (i <- 0 until sz) hm += sample(gen)
+ hm
+ }
+
+ def fromTraversable(t: Traversable[(K, V)]) = {
+ var phm = new ParHashMap[K, V]
+ var i = 0
+ for (kv <- t.toList) {
+ phm += kv
+ i += 1
+ }
+ phm
+ }
+
+}
+
+
+object IntIntParallelHashMapCheck extends ParallelHashMapCheck[Int, Int]("Int, Int")
+with PairOperators[Int, Int]
+with PairValues[Int, Int]
+{
+ def intvalues = new IntValues {}
+ def kvalues = intvalues.values
+ def vvalues = intvalues.values
+
+ val intoperators = new IntOperators {}
+ def voperators = intoperators
+ def koperators = intoperators
+
+ override def printDataStructureDebugInfo(ds: AnyRef) = ds match {
+ case pm: ParHashMap[k, v] =>
+ pm.printDebugInfo
+ case _ =>
+ println("could not match data structure type: " + ds.getClass)
+ }
+}
+
+
+
+abstract class ParallelHashSetCheck[T](tp: String) extends ParallelSetCheck[T]("immutable.ParHashSet[" + tp + "]") {
+ ForkJoinTasks.defaultForkJoinPool.setMaximumPoolSize(Runtime.getRuntime.availableProcessors * 2)
+ ForkJoinTasks.defaultForkJoinPool.setParallelism(Runtime.getRuntime.availableProcessors * 2)
+
+ type CollType = ParHashSet[T]
+
+ def isCheckingViews = false
+
+ def hasStrictOrder = false
+
+ def ofSize(vals: Seq[Gen[T]], sz: Int) = {
+ var hm = new immutable.HashSet[T]
+ val gen = vals(rnd.nextInt(vals.size))
+ for (i <- 0 until sz) hm += sample(gen)
+ hm
+ }
+
+ def fromTraversable(t: Traversable[T]) = {
+ var phm = new ParHashSet[T]
+ var i = 0
+ for (kv <- t.toList) {
+ phm += kv
+ i += 1
+ }
+ phm
+ }
+
+ override def printDataStructureDebugInfo(ds: AnyRef) = ds match {
+ case pm: ParHashSet[t] =>
+ println("Parallel hash set")
+ case _ =>
+ println("could not match data structure type: " + ds.getClass)
+ }
+
+}
+
+
+object IntParallelHashSetCheck extends ParallelHashSetCheck[Int]("Int")
+with IntOperators
+with IntValues
+{
+ def intvalues = new IntValues {}
+ def kvalues = intvalues.values
+ def vvalues = intvalues.values
+
+ override def printDataStructureDebugInfo(ds: AnyRef) = ds match {
+ case pm: ParHashMap[k, v] =>
+ pm.printDebugInfo
+ case _ =>
+ println("could not match data structure type: " + ds.getClass)
+ }
+}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+