summaryrefslogtreecommitdiff
path: root/test/scalacheck/scala/collection/parallel/mutable
diff options
context:
space:
mode:
Diffstat (limited to 'test/scalacheck/scala/collection/parallel/mutable')
-rw-r--r--test/scalacheck/scala/collection/parallel/mutable/ParallelArrayCheck.scala62
-rw-r--r--test/scalacheck/scala/collection/parallel/mutable/ParallelArrayTest.scala112
-rw-r--r--test/scalacheck/scala/collection/parallel/mutable/ParallelArrayViewCheck.scala122
-rw-r--r--test/scalacheck/scala/collection/parallel/mutable/ParallelCtrieCheck.scala101
-rw-r--r--test/scalacheck/scala/collection/parallel/mutable/ParallelHashMapCheck.scala100
-rw-r--r--test/scalacheck/scala/collection/parallel/mutable/ParallelHashSetCheck.scala97
6 files changed, 594 insertions, 0 deletions
diff --git a/test/scalacheck/scala/collection/parallel/mutable/ParallelArrayCheck.scala b/test/scalacheck/scala/collection/parallel/mutable/ParallelArrayCheck.scala
new file mode 100644
index 0000000000..39370f8c38
--- /dev/null
+++ b/test/scalacheck/scala/collection/parallel/mutable/ParallelArrayCheck.scala
@@ -0,0 +1,62 @@
+package scala.collection.parallel
+package mutable
+
+
+
+import org.scalacheck._
+import org.scalacheck.Gen
+import org.scalacheck.Gen._
+import org.scalacheck.Prop._
+import org.scalacheck.Properties
+import org.scalacheck.Arbitrary._
+
+import scala.collection._
+import scala.collection.parallel.ops._
+
+
+abstract class ParallelArrayCheck[T](tp: String) extends ParallelSeqCheck[T]("ParArray[" + tp + "]") {
+ // ForkJoinTasks.defaultForkJoinPool.setMaximumPoolSize(Runtime.getRuntime.availableProcessors * 2)
+ // ForkJoinTasks.defaultForkJoinPool.setParallelism(Runtime.getRuntime.availableProcessors * 2)
+
+ type CollType = ParArray[T]
+
+ def isCheckingViews = false
+
+ def hasStrictOrder = true
+
+ def tasksupport: TaskSupport
+
+ def ofSize(vals: Seq[Gen[T]], sz: Int) = {
+ val a = new mutable.ArrayBuffer[T](sz)
+ val gen = vals(rnd.nextInt(vals.size))
+ for (i <- 0 until sz) a += sample(gen)
+ a
+ }
+
+ def fromSeq(a: Seq[T]) = {
+ val pa = new ParArray[T](a.size)
+ pa.tasksupport = tasksupport
+ var i = 0
+ for (elem <- a.toList) {
+ pa(i) = elem
+ i += 1
+ }
+ pa
+ }
+
+ property("array mappings must be equal") = forAllNoShrink(collectionPairs) { case (t, coll) =>
+ val results = for ((f, ind) <- mapFunctions.zipWithIndex)
+ yield ("op index: " + ind) |: t.map(f) == coll.map(f)
+ results.reduceLeft(_ && _)
+ }
+
+}
+
+
+abstract class IntParallelArrayCheck(val tasksupport: TaskSupport, descriptor: String) extends ParallelArrayCheck[Int](s"Int ($descriptor)") with IntSeqOperators with IntValues {
+ override def instances(vals: Seq[Gen[Int]]) = oneOf(super.instances(vals), sized { sz =>
+ (0 until sz).toArray.toSeq
+ }, sized { sz =>
+ (-sz until 0).toArray.toSeq
+ })
+}
diff --git a/test/scalacheck/scala/collection/parallel/mutable/ParallelArrayTest.scala b/test/scalacheck/scala/collection/parallel/mutable/ParallelArrayTest.scala
new file mode 100644
index 0000000000..db2b1ea01e
--- /dev/null
+++ b/test/scalacheck/scala/collection/parallel/mutable/ParallelArrayTest.scala
@@ -0,0 +1,112 @@
+// package test.scala.collection.parallel.mutable
+
+// import org.scalatest.FunSuite
+// import collection.parallel.mutable.ParallelArray
+
+// /**
+// * Notes:
+// */
+// class ParallelArrayTest extends FunSuite {
+
+// test("create new parallel array with a bad initial capacity"){
+// intercept[IllegalArgumentException]{
+// new ParallelArray(-5)
+// }
+
+// /**
+// * this currently passes, but do we want it to?
+// * does it have meaning to have an empty parallel array?
+// */
+// new ParallelArray(0)
+// ()
+// }
+
+// test("compare identical ParallelArrays"){
+// assert(new ParallelArray(5) === new ParallelArray(5))
+// assert(ParallelArray(1,2,3,4,5) === ParallelArray(1,2,3,4,5))
+// }
+
+// /**
+// * this test needs attention. how is equality defined on ParallelArrays?
+// * Well, the same way it is for normal collections, I guess. For normal arrays its reference equality.
+// * I do not think it should be that way in the case of ParallelArray-s. I'll check this with Martin.
+// */
+// test("compare non-identical ParallelArrays"){
+// assert(ParallelArray(1,2,3,4,5) != ParallelArray(1,2,3,4),
+// "compared PA's that I expect to not be identical, but they were!")
+// }
+
+// test("creation via PA object [String]"){
+// val paFromApply: ParallelArray[String] = ParallelArray("x", "1", "true", "etrijwejiorwer")
+// val paFromHandoff: ParallelArray[String] = ParallelArray.handoff(Array("x", "1", "true", "etrijwejiorwer"))
+// val paFromCopy: ParallelArray[String] = ParallelArray.createFromCopy(Array("x", "1", "true", "etrijwejiorwer"))
+// assert( paFromApply === paFromCopy )
+// assert( paFromApply === paFromCopy )
+// }
+
+// // // handoffs dont work for primitive types...
+// // test("creation via PA object [Boolean]"){
+// // val paFromApply: ParallelArray[Boolean] = ParallelArray(true, false, true, false)
+// // val paFromCopy: ParallelArray[Boolean] = ParallelArray.createFromCopy(Array(true, false, true, false))
+// // assert( paFromApply === paFromCopy )
+// // }
+// //
+// // // handoffs dont work for primitive types...
+// // test("creation via PA object [Int]"){
+// // val paFromApply: ParallelArray[Int] = ParallelArray(1, 2, 4, 3)
+// // val paFromCopy: ParallelArray[Int] = ParallelArray.createFromCopy(Array(1, 2, 4, 3))
+// // assert( paFromApply === paFromCopy )
+// // }
+
+// /**
+// * This fails because handoff is really doing a copy.
+// * TODO: look at handoff
+// */
+// test("Handoff Is Really A Handoff"){
+// val arrayToHandOff = Array("a", "x", "y", "z")
+// val paFromHandoff: ParallelArray[String] = ParallelArray.handoff(arrayToHandOff)
+// arrayToHandOff(0) = "w"
+// assert(paFromHandoff(0) === "w")
+// }
+
+// test("simple reduce"){
+// assert( ParallelArray(1,2,3,4,5).reduce(_+_) === 15 )
+// }
+
+// test("simple count"){
+// assert( ParallelArray[Int]().count(_ > 7) === 0 )
+// assert( ParallelArray(1,2,3).count(_ > 7) === 0 )
+// assert( ParallelArray(1,2,3).count(_ <= 3) === 3 )
+// assert( ParallelArray(1,2,3,4,5,6,7,8,9,10).count(_ > 7 ) === 3 )
+// }
+
+// test("simple forall"){
+// assert( ParallelArray[Int]().forall(_ > 7) === true )
+// assert( ParallelArray(1,2,3).forall(_ > 3) === false )
+// assert( ParallelArray(1,2,3).forall(_ <= 3) === true )
+// assert( ParallelArray(1,2,3,4,5,6,7,8,9,10).forall(_ > 0) === true )
+// assert( ParallelArray(1,2,3,4,5,6,7,8,9,10).forall(_ < 5) === false )
+// }
+
+// /**
+// */
+// test("simple foreach"){
+// val buf = new java.util.concurrent.ArrayBlockingQueue[Int](10000)
+// ParallelArray((1 to 10000):_*).foreach(buf add _)
+// (1 to 10000).foreach(i => assert( buf contains i, "buf should have contained:" + i ))
+// }
+
+// test("simple exists"){
+// assert( ParallelArray[Int]().exists(_ => true) === false )
+// assert( ParallelArray(1,2,3).forall(_ > 3) === false )
+// assert( ParallelArray(1,2,3,4,5,6,7,8,9,10).exists(_ > 7) === true )
+// }
+
+// test("simple filter"){
+// assert(ParallelArray(1,2,3,4,5).filter( _ < 4 ) === ParallelArray(1,2,3))
+// }
+
+// test("simple map test"){
+// assert(ParallelArray(1,2,3,4,5).map( (_:Int) * 10 ) === ParallelArray(10,20,30,40,50))
+// }
+// }
diff --git a/test/scalacheck/scala/collection/parallel/mutable/ParallelArrayViewCheck.scala b/test/scalacheck/scala/collection/parallel/mutable/ParallelArrayViewCheck.scala
new file mode 100644
index 0000000000..fb09a5bbb7
--- /dev/null
+++ b/test/scalacheck/scala/collection/parallel/mutable/ParallelArrayViewCheck.scala
@@ -0,0 +1,122 @@
+// package scala.collection.parallel
+// package mutable
+
+
+
+
+
+
+// import org.scalacheck._
+// import org.scalacheck.Gen
+// import org.scalacheck.Gen._
+// import org.scalacheck.Prop._
+// import org.scalacheck.Properties
+// import org.scalacheck.Arbitrary._
+
+// import scala.collection.TraversableView
+// import scala.collection.mutable.ArrayBuffer
+// import scala.collection.parallel.ops._
+// import scala.collection.mutable.ArraySeq
+
+
+
+// abstract class ParallelArrayViewCheck[T](tp: String)
+// extends ParallelSeqCheck[T]("ParallelSeqView[" + tp + ", ParallelArray[" + tp + "]]") {
+// // ForkJoinTasks.defaultForkJoinPool.setMaximumPoolSize(Runtime.getRuntime.availableProcessors * 2)
+// // ForkJoinTasks.defaultForkJoinPool.setParallelism(Runtime.getRuntime.availableProcessors * 2)
+
+// type CollType = ParallelSeqView[T, ParallelArray[T], ArraySeq[T]]
+
+// def isCheckingViews = true
+
+// def instances(vals: Seq[Gen[T]]): Gen[Seq[T]] = sized { sz =>
+// val a = new ArrayBuffer[T](sz)
+// val gen = vals(rnd.nextInt(vals.size))
+// for (i <- 0 until sz) a += sample(gen)
+// a
+// }
+
+// def fromSeq(a: Seq[T]) = {
+// val pa = new ParallelArray[T](a.size)
+// var i = 0
+// for (elem <- a) {
+// pa(i) = elem
+// i += 1
+// }
+// pa.view
+// }
+
+// property("forces must be equal") = forAllNoShrink(collectionPairs) { case (s, coll) =>
+// val smodif = (s ++ s).reverse.take(s.length).reverse.zip(s).drop(s.length / 2)
+// val cmodif = (coll ++ s).reverse.take(s.length).reverse.zip(s).drop(s.length / 2).force
+// smodif == cmodif
+// }
+
+// }
+
+
+// object IntParallelArrayViewCheck extends ParallelArrayViewCheck[Int]("Int") with IntSeqOperators with IntValues {
+// override def instances(vals: Seq[Gen[Int]]) = oneOf(super.instances(vals), sized { sz =>
+// (0 until sz).toArray.toSeq
+// }, sized { sz =>
+// (-sz until 0).toArray.toSeq
+// })
+// }
+
+
+// abstract class ParallelArrayViewComposedCheck[T](tp: String)
+// extends ParallelSeqCheck[T]("ParallelSeqView[" + tp + "], ParallelArray[" + tp + "].++.patch.reverse.take.reverse") {
+// ForkJoinTasks.defaultForkJoinPool.setMaximumPoolSize(Runtime.getRuntime.availableProcessors * 2)
+// ForkJoinTasks.defaultForkJoinPool.setParallelism(Runtime.getRuntime.availableProcessors * 2)
+
+// type CollType = collection.parallel.ParallelSeq[T]
+
+// def isCheckingViews = true
+
+// def instances(vals: Seq[Gen[T]]): Gen[Seq[T]] = sized { sz =>
+// val a = new ArrayBuffer[T](sz)
+// val gen = vals(rnd.nextInt(vals.size))
+// for (i <- 0 until sz) a += sample(gen)
+// a
+// }
+
+// def fromSeq(a: Seq[T]) = {
+// val pa = new ParallelArray[T](a.size)
+// var i = 0
+// for (elem <- a) {
+// pa(i) = elem
+// i += 1
+// }
+// val modified = (pa.view ++ a).patch(0, a, a.length).reverse
+// val original = modified.take(modified.length / 2).reverse
+// original
+// }
+
+// }
+
+
+// object IntParallelArrayViewComposedCheck extends ParallelArrayViewComposedCheck[Int]("Int") with IntSeqOperators with IntValues {
+// override def instances(vals: Seq[Gen[Int]]) = oneOf(super.instances(vals), sized { sz =>
+// (0 until sz).toArray.toSeq
+// }, sized { sz =>
+// (-sz until 0).toArray.toSeq
+// })
+// }
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/test/scalacheck/scala/collection/parallel/mutable/ParallelCtrieCheck.scala b/test/scalacheck/scala/collection/parallel/mutable/ParallelCtrieCheck.scala
new file mode 100644
index 0000000000..ebdcf78bea
--- /dev/null
+++ b/test/scalacheck/scala/collection/parallel/mutable/ParallelCtrieCheck.scala
@@ -0,0 +1,101 @@
+package scala.collection.parallel
+package mutable
+
+
+
+import org.scalacheck._
+import org.scalacheck.Gen
+import org.scalacheck.Gen._
+import org.scalacheck.Prop._
+import org.scalacheck.Properties
+import org.scalacheck.Arbitrary._
+
+import scala.collection._
+import scala.collection.parallel.ops._
+
+
+
+abstract class ParallelConcurrentTrieMapCheck[K, V](tp: String) extends ParallelMapCheck[K, V]("mutable.ParConcurrentTrieMap[" + tp + "]") {
+ // ForkJoinTasks.defaultForkJoinPool.setMaximumPoolSize(Runtime.getRuntime.availableProcessors * 2)
+ // ForkJoinTasks.defaultForkJoinPool.setParallelism(Runtime.getRuntime.availableProcessors * 2)
+
+ type CollType = ParTrieMap[K, V]
+
+ def isCheckingViews = false
+
+ def hasStrictOrder = false
+
+ def tasksupport: TaskSupport
+
+ def ofSize(vals: Seq[Gen[(K, V)]], sz: Int) = {
+ val ct = new concurrent.TrieMap[K, V]
+ val gen = vals(rnd.nextInt(vals.size))
+ for (i <- 0 until sz) ct += sample(gen)
+ ct
+ }
+
+ def fromTraversable(t: Traversable[(K, V)]) = {
+ val pct = new ParTrieMap[K, V]
+ pct.tasksupport = tasksupport
+ var i = 0
+ for (kv <- t.toList) {
+ pct += kv
+ i += 1
+ }
+ pct
+ }
+
+}
+
+
+abstract class IntIntParallelConcurrentTrieMapCheck(val tasksupport: TaskSupport, descriptor: String) extends ParallelConcurrentTrieMapCheck[Int, Int](s"Int, Int ($descriptor)")
+with PairOperators[Int, Int]
+with PairValues[Int, Int]
+{
+ def intvalues = new IntValues {}
+ def kvalues = intvalues.values
+ def vvalues = intvalues.values
+
+ val intoperators = new IntOperators {}
+ def voperators = intoperators
+ def koperators = intoperators
+
+ override def printDataStructureDebugInfo(ds: AnyRef) = ds match {
+ case pm: ParTrieMap[k, v] =>
+ println("Mutable parallel ctrie")
+ case _ =>
+ println("could not match data structure type: " + ds.getClass)
+ }
+
+ override def checkDataStructureInvariants(orig: Traversable[(Int, Int)], ds: AnyRef) = ds match {
+ // case pm: ParHashMap[k, v] if 1 == 0 => // disabled this to make tests faster
+ // val invs = pm.brokenInvariants
+
+ // val containsall = (for ((k, v) <- orig) yield {
+ // if (pm.asInstanceOf[ParHashMap[Int, Int]].get(k) == Some(v)) true
+ // else {
+ // println("Does not contain original element: " + (k, v))
+ // false
+ // }
+ // }).foldLeft(true)(_ && _)
+
+
+ // if (invs.isEmpty) containsall
+ // else {
+ // println("Invariants broken:\n" + invs.mkString("\n"))
+ // false
+ // }
+ case _ => true
+ }
+
+}
+
+
+
+
+
+
+
+
+
+
diff --git a/test/scalacheck/scala/collection/parallel/mutable/ParallelHashMapCheck.scala b/test/scalacheck/scala/collection/parallel/mutable/ParallelHashMapCheck.scala
new file mode 100644
index 0000000000..06fdb66080
--- /dev/null
+++ b/test/scalacheck/scala/collection/parallel/mutable/ParallelHashMapCheck.scala
@@ -0,0 +1,100 @@
+package scala.collection.parallel
+package mutable
+
+
+
+import org.scalacheck._
+import org.scalacheck.Gen
+import org.scalacheck.Gen._
+import org.scalacheck.Prop._
+import org.scalacheck.Properties
+import org.scalacheck.Arbitrary._
+
+import scala.collection._
+import scala.collection.parallel.ops._
+
+
+abstract class ParallelHashMapCheck[K, V](tp: String) extends ParallelMapCheck[K, V]("mutable.ParHashMap[" + tp + "]") {
+ // ForkJoinTasks.defaultForkJoinPool.setMaximumPoolSize(Runtime.getRuntime.availableProcessors * 2)
+ // ForkJoinTasks.defaultForkJoinPool.setParallelism(Runtime.getRuntime.availableProcessors * 2)
+
+ type CollType = ParHashMap[K, V]
+
+ def isCheckingViews = false
+
+ def hasStrictOrder = false
+
+ def tasksupport: TaskSupport
+
+ def ofSize(vals: Seq[Gen[(K, V)]], sz: Int) = {
+ val hm = new mutable.HashMap[K, V]
+ val gen = vals(rnd.nextInt(vals.size))
+ for (i <- 0 until sz) hm += sample(gen)
+ hm
+ }
+
+ def fromTraversable(t: Traversable[(K, V)]) = {
+ val phm = new ParHashMap[K, V]
+ phm.tasksupport = tasksupport
+ var i = 0
+ for (kv <- t.toList) {
+ phm += kv
+ i += 1
+ }
+ phm
+ }
+
+}
+
+
+abstract class IntIntParallelHashMapCheck(val tasksupport: TaskSupport, descriptor: String) extends ParallelHashMapCheck[Int, Int](s"Int, Int ($descriptor)")
+with PairOperators[Int, Int]
+with PairValues[Int, Int]
+{
+ def intvalues = new IntValues {}
+ def kvalues = intvalues.values
+ def vvalues = intvalues.values
+
+ val intoperators = new IntOperators {}
+ def voperators = intoperators
+ def koperators = intoperators
+
+ override def printDataStructureDebugInfo(ds: AnyRef) = ds match {
+ case pm: ParHashMap[k, v] =>
+ println("Mutable parallel hash map\n" + pm.hashTableContents.debugInformation)
+ case _ =>
+ println("could not match data structure type: " + ds.getClass)
+ }
+
+ override def checkDataStructureInvariants(orig: Traversable[(Int, Int)], ds: AnyRef) = ds match {
+ // case pm: ParHashMap[k, v] if 1 == 0 => // disabled this to make tests faster
+ // val invs = pm.brokenInvariants
+
+ // val containsall = (for ((k, v) <- orig) yield {
+ // if (pm.asInstanceOf[ParHashMap[Int, Int]].get(k) == Some(v)) true
+ // else {
+ // println("Does not contain original element: " + (k, v))
+ // false
+ // }
+ // }).foldLeft(true)(_ && _)
+
+
+ // if (invs.isEmpty) containsall
+ // else {
+ // println("Invariants broken:\n" + invs.mkString("\n"))
+ // false
+ // }
+ case _ => true
+ }
+
+}
+
+
+
+
+
+
+
+
+
+
diff --git a/test/scalacheck/scala/collection/parallel/mutable/ParallelHashSetCheck.scala b/test/scalacheck/scala/collection/parallel/mutable/ParallelHashSetCheck.scala
new file mode 100644
index 0000000000..a968ed053f
--- /dev/null
+++ b/test/scalacheck/scala/collection/parallel/mutable/ParallelHashSetCheck.scala
@@ -0,0 +1,97 @@
+package scala.collection.parallel
+package mutable
+
+
+
+import org.scalacheck._
+import org.scalacheck.Gen
+import org.scalacheck.Gen._
+import org.scalacheck.Prop._
+import org.scalacheck.Properties
+import org.scalacheck.Arbitrary._
+
+import scala.collection._
+import scala.collection.parallel.ops._
+
+
+abstract class ParallelHashSetCheck[T](tp: String) extends ParallelSetCheck[T]("mutable.ParHashSet[" + tp + "]") {
+ // ForkJoinTasks.defaultForkJoinPool.setMaximumPoolSize(Runtime.getRuntime.availableProcessors * 2)
+ // ForkJoinTasks.defaultForkJoinPool.setParallelism(Runtime.getRuntime.availableProcessors * 2)
+
+ type CollType = ParHashSet[T]
+
+ def isCheckingViews = false
+
+ def hasStrictOrder = false
+
+ def tasksupport: TaskSupport
+
+ def ofSize(vals: Seq[Gen[T]], sz: Int) = {
+ val hm = new mutable.HashSet[T]
+ val gen = vals(rnd.nextInt(vals.size))
+ for (i <- 0 until sz) hm += sample(gen)
+ hm
+ }
+
+ def fromTraversable(t: Traversable[T]) = {
+ val phs = new ParHashSet[T]
+ phs.tasksupport = tasksupport
+ var i = 0
+ for (kv <- t.toList) {
+ phs += kv
+ i += 1
+ }
+ phs
+ }
+
+}
+
+
+abstract class IntParallelHashSetCheck(val tasksupport: TaskSupport, descriptor: String) extends ParallelHashSetCheck[Int](s"Int ($descriptor)")
+with IntOperators
+with IntValues
+{
+ override def printDataStructureDebugInfo(ds: AnyRef) = ds match {
+ case pm: ParHashSet[t] =>
+ println("Mutable parallel hash set")
+ case _ =>
+ println("could not match data structure type: " + ds.getClass)
+ }
+
+ override def checkDataStructureInvariants(orig: Traversable[Int], ds: AnyRef) = ds match {
+ // case pm: ParHashSet[t] if 1 == 0 =>
+ // // for an example of how not to write code proceed below
+ // val invs = pm.brokenInvariants
+
+ // val containsall = (for (elem <- orig) yield {
+ // if (pm.asInstanceOf[ParHashSet[Int]](elem) == true) true
+ // else {
+ // println("Does not contain original element: " + elem)
+ // println(pm.hashTableContents.table.find(_ == elem))
+ // println(pm.hashTableContents.table.indexOf(elem))
+ // false
+ // }
+ // }).foldLeft(true)(_ && _)
+
+
+ // if (invs.isEmpty) {
+ // if (!containsall) println(pm.debugInformation)
+ // containsall
+ // } else {
+ // println("Invariants broken:\n" + invs.mkString("\n"))
+ // false
+ // }
+ case _ => true
+ }
+
+}
+
+
+
+
+
+
+
+
+
+