summaryrefslogtreecommitdiff
path: root/test/scalacheck/parallel-collections
diff options
context:
space:
mode:
authorSeth Tisue <seth@tisue.net>2017-01-23 14:35:47 -0800
committerSeth Tisue <seth@tisue.net>2017-01-27 09:29:53 -0800
commit4386b948a0b597cc78e4f3b22b51e0588a5b6d60 (patch)
treeeca8cea41d110d8b14d27a83ae06a90e966f9621 /test/scalacheck/parallel-collections
parent27c10db549e6f43571663d0162b58fc04fbb34bf (diff)
downloadscala-4386b948a0b597cc78e4f3b22b51e0588a5b6d60.tar.gz
scala-4386b948a0b597cc78e4f3b22b51e0588a5b6d60.tar.bz2
scala-4386b948a0b597cc78e4f3b22b51e0588a5b6d60.zip
run ScalaCheck tests directly, not through partest
ScalaCheck ever being under partest in the first place is ancient history, from back in the Ant build days (shudder) ScalaCheck support was removed from partest 1.1.0, which we already upgraded to in a recent commit also upgrades ScalaCheck from 1.11.6 to 1.13.4, since we might as well. no source changes were necessary.
Diffstat (limited to 'test/scalacheck/parallel-collections')
-rw-r--r--test/scalacheck/parallel-collections/IntOperators.scala110
-rw-r--r--test/scalacheck/parallel-collections/IntValues.scala29
-rw-r--r--test/scalacheck/parallel-collections/Operators.scala36
-rw-r--r--test/scalacheck/parallel-collections/PairOperators.scala101
-rw-r--r--test/scalacheck/parallel-collections/PairValues.scala28
-rw-r--r--test/scalacheck/parallel-collections/ParallelArrayCheck.scala62
-rw-r--r--test/scalacheck/parallel-collections/ParallelArrayTest.scala112
-rw-r--r--test/scalacheck/parallel-collections/ParallelArrayViewCheck.scala122
-rw-r--r--test/scalacheck/parallel-collections/ParallelCtrieCheck.scala101
-rw-r--r--test/scalacheck/parallel-collections/ParallelHashMapCheck.scala100
-rw-r--r--test/scalacheck/parallel-collections/ParallelHashSetCheck.scala97
-rw-r--r--test/scalacheck/parallel-collections/ParallelHashTrieCheck.scala144
-rw-r--r--test/scalacheck/parallel-collections/ParallelIterableCheck.scala477
-rw-r--r--test/scalacheck/parallel-collections/ParallelMapCheck1.scala67
-rw-r--r--test/scalacheck/parallel-collections/ParallelRangeCheck.scala75
-rw-r--r--test/scalacheck/parallel-collections/ParallelSeqCheck.scala300
-rw-r--r--test/scalacheck/parallel-collections/ParallelSetCheck.scala62
-rw-r--r--test/scalacheck/parallel-collections/ParallelVectorCheck.scala67
-rw-r--r--test/scalacheck/parallel-collections/pc.scala61
19 files changed, 2151 insertions, 0 deletions
diff --git a/test/scalacheck/parallel-collections/IntOperators.scala b/test/scalacheck/parallel-collections/IntOperators.scala
new file mode 100644
index 0000000000..c7f43b6526
--- /dev/null
+++ b/test/scalacheck/parallel-collections/IntOperators.scala
@@ -0,0 +1,110 @@
+package scala.collection.parallel.ops
+
+
+import scala.collection.parallel._
+
+
+trait IntOperators extends Operators[Int] {
+ def reduceOperators = List(_ + _, _ * _, math.min(_, _), math.max(_, _), _ ^ _)
+ def countPredicates = List(
+ x => true,
+ _ >= 0, _ < 0, _ < 50, _ < 500, _ < 5000, _ < 50000, _ % 2 == 0, _ == 99,
+ x => x > 50 && x < 150,
+ x => x > 350 && x < 550,
+ x => (x > 1000 && x < 1500) || (x > 400 && x < 500)
+ )
+ def forallPredicates = List(_ >= 0, _ < 0, _ % 2 == 0, _ != 55, _ != 505, _ != 5005)
+ def existsPredicates = List(_ >= 0, _ < 0, _ % 2 == 0, _ == 55, _ == 505, _ == 5005)
+ def findPredicates = List(_ >= 0, _ % 2 == 0, _ < 0, _ == 50, _ == 500, _ == 5000)
+ def mapFunctions = List(-_, math.abs(_), _ % 2, _ % 3, _ % 4, _ % 150, _ % 500)
+ def partialMapFunctions = List({case x => -x}, { case 0 => -1; case x if x > 0 => x + 1}, {case x if x % 3 == 0 => x / 3})
+ def flatMapFunctions = List(
+ (n: Int) => if (n < 0) List() else if (n % 2 == 0) List(1, 2, 3) else List(4, 5, 6),
+ (n: Int) => List[Int](),
+ (n: Int) => if (n == 0) List(1, 2, 3, 4, 5) else if (n < 0) List(1, 2, 3) else List()
+ )
+ def filterPredicates = List(
+ _ % 2 == 0, _ % 3 == 0,
+ _ % 4 != 0, _ % 17 != 0,
+ n => n > 50 && n < 100,
+ _ >= 0, _ < 0, _ == 99,
+ _ > 500, _ > 5000, _ > 50000,
+ _ < 500, _ < 50, _ < -50, _ < -5e5,
+ x => true, x => false,
+ x => x % 53 == 0 && x % 17 == 0
+ )
+ def filterNotPredicates = filterPredicates
+ def partitionPredicates = filterPredicates
+ def takeWhilePredicates = List(
+ _ != 50, _ != 500, _ != 5000, _ != 50000, _ % 2 == 0, _ % 3 == 1, _ % 47 != 0,
+ _ < 100, _ < 1000, _ < 10000, _ < 0,
+ _ < -100, _ < -1000, _ > -200, _ > -50,
+ n => -90 < n && n < -10,
+ n => 50 < n && n < 550,
+ n => 5000 < n && n < 7500,
+ n => -50 < n && n < 450
+ )
+ def dropWhilePredicates = takeWhilePredicates
+ def spanPredicates = takeWhilePredicates
+ def foldArguments = List(
+ (0, _ + _),
+ (1, _ * _),
+ (Int.MinValue, math.max(_, _)),
+ (Int.MaxValue, math.min(_, _))
+ )
+ def addAllTraversables = List(
+ List[Int](),
+ List(1),
+ List(1, 2),
+ List(1, 2, 3, 4, 5, 6, 7, 8, 9, 10),
+ Array.fill(1000)(1).toSeq
+ )
+ def newArray(sz: Int) = new Array[Int](sz)
+ def groupByFunctions = List(
+ _ % 2, _ % 3, _ % 5, _ % 10, _ % 154, _% 3217,
+ _ * 2, _ + 1
+ )
+}
+
+
+trait IntSeqOperators extends IntOperators with SeqOperators[Int] {
+ def segmentLengthPredicates = List(
+ _ % 2 == 0, _ > 0, _ >= 0, _ < 0, _ <= 0, _ > -5000, _ > 5000, _ % 541 != 0, _ < -50, _ > 500,
+ n => -90 < n && n < -10, n => 500 < n && n < 1500
+ )
+ def indexWherePredicates = List(
+ _ % 2 == 0, _ % 11 == 0, _ % 123 == 0, _ % 901 == 0,
+ _ > 0, _ >= 0, _ < 0, _ <= 0,
+ _ > 50, _ > 500, _ > 5000,
+ _ < -10, _ < -100, _ < -1000,
+ n => n > 50 && n < 100,
+ n => n * n > 1000000 && n % 111 == 0
+ )
+ def lastIndexWherePredicates = List(
+ _ % 2 == 0, _ % 17 == 0, _ % 314 == 0, _ % 1017 == 0,
+ _ > 0, _ >= 0, _ < 0, _ <= 0,
+ _ > 50, _ > 500, _ > 5000,
+ _ < -20, _ < -200, _ < -2000,
+ _ == 0,
+ n => n > -40 && n < 40,
+ n => n > -80 && n < -10,
+ n => n > 110 && n < 150
+ )
+ def reverseMapFunctions = List(-_, n => n * n, _ + 1)
+ def sameElementsSeqs = List(
+ List[Int](),
+ List(1),
+ List(1, 2, 3, 4, 5, 6, 7, 8, 9),
+ Array.fill(150)(1).toSeq,
+ Array.fill(1000)(1).toSeq
+ )
+ def startEndSeqs = List(
+ Nil,
+ List(1),
+ List(1, 2, 3, 4, 5),
+ List(0, 1, 2, 3, 4, 5),
+ List(4, 5, 6, 7, 8, 9, 10),
+ List(4, 5, 6, 7, 8, 9, 0),
+ List(-4, -3, -2, -1)
+ )
+}
diff --git a/test/scalacheck/parallel-collections/IntValues.scala b/test/scalacheck/parallel-collections/IntValues.scala
new file mode 100644
index 0000000000..cab60ead76
--- /dev/null
+++ b/test/scalacheck/parallel-collections/IntValues.scala
@@ -0,0 +1,29 @@
+package scala.collection.parallel.ops
+
+
+
+
+
+import org.scalacheck._
+import org.scalacheck.Gen
+import org.scalacheck.Gen._
+import org.scalacheck.Prop._
+import org.scalacheck.Properties
+import org.scalacheck.Arbitrary._
+
+
+
+
+trait IntValues {
+ def values = Seq(
+ arbitrary[Int],
+ arbitrary[Int] suchThat (_ >= 0),
+ arbitrary[Int] suchThat (_ < 0),
+ choose(0, 0),
+ choose(0, 10),
+ choose(0, 100),
+ choose(0, 1000) suchThat (_ % 2 == 0),
+ choose(0, 1000) suchThat (_ % 2 != 0),
+ choose(0, 1000) suchThat (n => (n % 2 == 0) || (n % 3 == 0))
+ )
+}
diff --git a/test/scalacheck/parallel-collections/Operators.scala b/test/scalacheck/parallel-collections/Operators.scala
new file mode 100644
index 0000000000..72133a5009
--- /dev/null
+++ b/test/scalacheck/parallel-collections/Operators.scala
@@ -0,0 +1,36 @@
+package scala.collection.parallel
+
+
+
+
+trait Operators[T] {
+ def reduceOperators: List[(T, T) => T]
+ def countPredicates: List[T => Boolean]
+ def forallPredicates: List[T => Boolean]
+ def existsPredicates: List[T => Boolean]
+ def findPredicates: List[T => Boolean]
+ def mapFunctions: List[T => T]
+ def partialMapFunctions: List[PartialFunction[T, T]]
+ def flatMapFunctions: List[T => Traversable[T]]
+ def filterPredicates: List[T => Boolean]
+ def filterNotPredicates: List[T => Boolean]
+ def partitionPredicates: List[T => Boolean]
+ def takeWhilePredicates: List[T => Boolean]
+ def dropWhilePredicates: List[T => Boolean]
+ def spanPredicates: List[T => Boolean]
+ def foldArguments: List[(T, (T, T) => T)]
+ def addAllTraversables: List[Traversable[T]]
+ def newArray(sz: Int): Array[T]
+ def groupByFunctions: List[T => T]
+}
+
+
+
+trait SeqOperators[T] extends Operators[T] {
+ def segmentLengthPredicates: List[T => Boolean]
+ def indexWherePredicates: List[T => Boolean]
+ def lastIndexWherePredicates: List[T => Boolean]
+ def reverseMapFunctions: List[T => T]
+ def sameElementsSeqs: List[Seq[T]]
+ def startEndSeqs: List[Seq[T]]
+}
diff --git a/test/scalacheck/parallel-collections/PairOperators.scala b/test/scalacheck/parallel-collections/PairOperators.scala
new file mode 100644
index 0000000000..fe851114be
--- /dev/null
+++ b/test/scalacheck/parallel-collections/PairOperators.scala
@@ -0,0 +1,101 @@
+package scala.collection.parallel.ops
+
+
+import scala.collection.parallel._
+
+
+trait PairOperators[K, V] extends Operators[(K, V)] {
+ def koperators: Operators[K]
+ def voperators: Operators[V]
+
+ private def zipPredicates(kps: List[K => Boolean], vps: List[V => Boolean]): List[((K, V)) => Boolean] = for {
+ (kp, vp) <- koperators.countPredicates zip voperators.countPredicates
+ } yield new Function1[(K, V), Boolean] {
+ def apply(kv: (K, V)) = kp(kv._1) && vp(kv._2)
+ }
+
+ /* operators */
+
+ def reduceOperators = for {
+ (kop, vop) <- koperators.reduceOperators zip voperators.reduceOperators
+ } yield new Function2[(K, V), (K, V), (K, V)] {
+ def apply(kv1: (K, V), kv2: (K, V)) = (kop(kv1._1, kv2._1), vop(kv1._2, kv2._2))
+ }
+
+ def countPredicates = zipPredicates(koperators.countPredicates, voperators.countPredicates)
+
+ def forallPredicates = zipPredicates(koperators.forallPredicates, voperators.forallPredicates)
+
+ def existsPredicates = zipPredicates(koperators.existsPredicates, voperators.existsPredicates)
+
+ def findPredicates = zipPredicates(koperators.findPredicates, voperators.findPredicates)
+
+ def mapFunctions = for {
+ (km, vm) <- koperators.mapFunctions zip voperators.mapFunctions
+ } yield new Function1[(K, V), (K, V)] {
+ def apply(kv: (K, V)) = (km(kv._1), vm(kv._2))
+ }
+
+ def partialMapFunctions = for {
+ (kpm, vpm) <- koperators.partialMapFunctions zip voperators.partialMapFunctions
+ } yield new PartialFunction[(K, V), (K, V)] {
+ def isDefinedAt(kv: (K, V)) = kpm.isDefinedAt(kv._1) && vpm.isDefinedAt(kv._2)
+ def apply(kv: (K, V)) = (kpm(kv._1), vpm(kv._2))
+ }
+
+ def flatMapFunctions = for {
+ (kfm, vfm) <- koperators.flatMapFunctions zip voperators.flatMapFunctions
+ } yield new Function1[(K, V), Traversable[(K, V)]] {
+ def apply(kv: (K, V)) = kfm(kv._1).toIterable zip vfm(kv._2).toIterable
+ }
+
+ def filterPredicates = zipPredicates(koperators.filterPredicates, voperators.filterPredicates)
+
+ def filterNotPredicates = filterPredicates
+
+ def partitionPredicates = filterPredicates
+
+ def takeWhilePredicates = zipPredicates(koperators.takeWhilePredicates, voperators.takeWhilePredicates)
+
+ def dropWhilePredicates = takeWhilePredicates
+
+ def spanPredicates = takeWhilePredicates
+
+ def foldArguments = for {
+ ((kinit, kop), (vinit, vop)) <- koperators.foldArguments zip voperators.foldArguments
+ } yield ((kinit, vinit), new Function2[(K, V), (K, V), (K, V)] {
+ def apply(kv1: (K, V), kv2: (K, V)) = (kop(kv1._1, kv2._1), vop(kv1._2, kv2._2))
+ })
+
+ def addAllTraversables = for {
+ (kt, vt) <- koperators.addAllTraversables zip voperators.addAllTraversables
+ } yield kt.toIterable zip vt.toIterable
+
+ def newArray(sz: Int) = new Array[(K, V)](sz)
+
+ def groupByFunctions = (koperators.groupByFunctions zip voperators.groupByFunctions) map {
+ opt => { (p: (K, V)) => (opt._1(p._1), opt._2(p._2)) }
+ }
+
+}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/test/scalacheck/parallel-collections/PairValues.scala b/test/scalacheck/parallel-collections/PairValues.scala
new file mode 100644
index 0000000000..864dad2425
--- /dev/null
+++ b/test/scalacheck/parallel-collections/PairValues.scala
@@ -0,0 +1,28 @@
+package scala.collection.parallel.ops
+
+
+
+
+
+import org.scalacheck._
+import org.scalacheck.Gen
+import org.scalacheck.Gen._
+import org.scalacheck.Prop._
+import org.scalacheck.Properties
+import org.scalacheck.Arbitrary._
+
+
+
+
+trait PairValues[K, V] {
+ def kvalues: Seq[Gen[K]]
+ def vvalues: Seq[Gen[V]]
+
+ def values = for {
+ kg <- kvalues
+ vg <- vvalues
+ } yield for {
+ k <- kg
+ v <- vg
+ } yield (k, v)
+}
diff --git a/test/scalacheck/parallel-collections/ParallelArrayCheck.scala b/test/scalacheck/parallel-collections/ParallelArrayCheck.scala
new file mode 100644
index 0000000000..39370f8c38
--- /dev/null
+++ b/test/scalacheck/parallel-collections/ParallelArrayCheck.scala
@@ -0,0 +1,62 @@
+package scala.collection.parallel
+package mutable
+
+
+
+import org.scalacheck._
+import org.scalacheck.Gen
+import org.scalacheck.Gen._
+import org.scalacheck.Prop._
+import org.scalacheck.Properties
+import org.scalacheck.Arbitrary._
+
+import scala.collection._
+import scala.collection.parallel.ops._
+
+
+abstract class ParallelArrayCheck[T](tp: String) extends ParallelSeqCheck[T]("ParArray[" + tp + "]") {
+ // ForkJoinTasks.defaultForkJoinPool.setMaximumPoolSize(Runtime.getRuntime.availableProcessors * 2)
+ // ForkJoinTasks.defaultForkJoinPool.setParallelism(Runtime.getRuntime.availableProcessors * 2)
+
+ type CollType = ParArray[T]
+
+ def isCheckingViews = false
+
+ def hasStrictOrder = true
+
+ def tasksupport: TaskSupport
+
+ def ofSize(vals: Seq[Gen[T]], sz: Int) = {
+ val a = new mutable.ArrayBuffer[T](sz)
+ val gen = vals(rnd.nextInt(vals.size))
+ for (i <- 0 until sz) a += sample(gen)
+ a
+ }
+
+ def fromSeq(a: Seq[T]) = {
+ val pa = new ParArray[T](a.size)
+ pa.tasksupport = tasksupport
+ var i = 0
+ for (elem <- a.toList) {
+ pa(i) = elem
+ i += 1
+ }
+ pa
+ }
+
+ property("array mappings must be equal") = forAllNoShrink(collectionPairs) { case (t, coll) =>
+ val results = for ((f, ind) <- mapFunctions.zipWithIndex)
+ yield ("op index: " + ind) |: t.map(f) == coll.map(f)
+ results.reduceLeft(_ && _)
+ }
+
+}
+
+
+abstract class IntParallelArrayCheck(val tasksupport: TaskSupport, descriptor: String) extends ParallelArrayCheck[Int](s"Int ($descriptor)") with IntSeqOperators with IntValues {
+ override def instances(vals: Seq[Gen[Int]]) = oneOf(super.instances(vals), sized { sz =>
+ (0 until sz).toArray.toSeq
+ }, sized { sz =>
+ (-sz until 0).toArray.toSeq
+ })
+}
diff --git a/test/scalacheck/parallel-collections/ParallelArrayTest.scala b/test/scalacheck/parallel-collections/ParallelArrayTest.scala
new file mode 100644
index 0000000000..db2b1ea01e
--- /dev/null
+++ b/test/scalacheck/parallel-collections/ParallelArrayTest.scala
@@ -0,0 +1,112 @@
+// package test.scala.collection.parallel.mutable
+
+// import org.scalatest.FunSuite
+// import collection.parallel.mutable.ParallelArray
+
+// /**
+// * Notes:
+// */
+// class ParallelArrayTest extends FunSuite {
+
+// test("create new parallel array with a bad initial capacity"){
+// intercept[IllegalArgumentException]{
+// new ParallelArray(-5)
+// }
+
+// /**
+// * this currently passes, but do we want it to?
+// * does it have meaning to have an empty parallel array?
+// */
+// new ParallelArray(0)
+// ()
+// }
+
+// test("compare identical ParallelArrays"){
+// assert(new ParallelArray(5) === new ParallelArray(5))
+// assert(ParallelArray(1,2,3,4,5) === ParallelArray(1,2,3,4,5))
+// }
+
+// /**
+// * this test needs attention. how is equality defined on ParallelArrays?
+// * Well, the same way it is for normal collections, I guess. For normal arrays its reference equality.
+// * I do not think it should be that way in the case of ParallelArray-s. I'll check this with Martin.
+// */
+// test("compare non-identical ParallelArrays"){
+// assert(ParallelArray(1,2,3,4,5) != ParallelArray(1,2,3,4),
+// "compared PA's that I expect to not be identical, but they were!")
+// }
+
+// test("creation via PA object [String]"){
+// val paFromApply: ParallelArray[String] = ParallelArray("x", "1", "true", "etrijwejiorwer")
+// val paFromHandoff: ParallelArray[String] = ParallelArray.handoff(Array("x", "1", "true", "etrijwejiorwer"))
+// val paFromCopy: ParallelArray[String] = ParallelArray.createFromCopy(Array("x", "1", "true", "etrijwejiorwer"))
+// assert( paFromApply === paFromCopy )
+// assert( paFromApply === paFromCopy )
+// }
+
+// // // handoffs dont work for primitive types...
+// // test("creation via PA object [Boolean]"){
+// // val paFromApply: ParallelArray[Boolean] = ParallelArray(true, false, true, false)
+// // val paFromCopy: ParallelArray[Boolean] = ParallelArray.createFromCopy(Array(true, false, true, false))
+// // assert( paFromApply === paFromCopy )
+// // }
+// //
+// // // handoffs dont work for primitive types...
+// // test("creation via PA object [Int]"){
+// // val paFromApply: ParallelArray[Int] = ParallelArray(1, 2, 4, 3)
+// // val paFromCopy: ParallelArray[Int] = ParallelArray.createFromCopy(Array(1, 2, 4, 3))
+// // assert( paFromApply === paFromCopy )
+// // }
+
+// /**
+// * This fails because handoff is really doing a copy.
+// * TODO: look at handoff
+// */
+// test("Handoff Is Really A Handoff"){
+// val arrayToHandOff = Array("a", "x", "y", "z")
+// val paFromHandoff: ParallelArray[String] = ParallelArray.handoff(arrayToHandOff)
+// arrayToHandOff(0) = "w"
+// assert(paFromHandoff(0) === "w")
+// }
+
+// test("simple reduce"){
+// assert( ParallelArray(1,2,3,4,5).reduce(_+_) === 15 )
+// }
+
+// test("simple count"){
+// assert( ParallelArray[Int]().count(_ > 7) === 0 )
+// assert( ParallelArray(1,2,3).count(_ > 7) === 0 )
+// assert( ParallelArray(1,2,3).count(_ <= 3) === 3 )
+// assert( ParallelArray(1,2,3,4,5,6,7,8,9,10).count(_ > 7 ) === 3 )
+// }
+
+// test("simple forall"){
+// assert( ParallelArray[Int]().forall(_ > 7) === true )
+// assert( ParallelArray(1,2,3).forall(_ > 3) === false )
+// assert( ParallelArray(1,2,3).forall(_ <= 3) === true )
+// assert( ParallelArray(1,2,3,4,5,6,7,8,9,10).forall(_ > 0) === true )
+// assert( ParallelArray(1,2,3,4,5,6,7,8,9,10).forall(_ < 5) === false )
+// }
+
+// /**
+// */
+// test("simple foreach"){
+// val buf = new java.util.concurrent.ArrayBlockingQueue[Int](10000)
+// ParallelArray((1 to 10000):_*).foreach(buf add _)
+// (1 to 10000).foreach(i => assert( buf contains i, "buf should have contained:" + i ))
+// }
+
+// test("simple exists"){
+// assert( ParallelArray[Int]().exists(_ => true) === false )
+// assert( ParallelArray(1,2,3).forall(_ > 3) === false )
+// assert( ParallelArray(1,2,3,4,5,6,7,8,9,10).exists(_ > 7) === true )
+// }
+
+// test("simple filter"){
+// assert(ParallelArray(1,2,3,4,5).filter( _ < 4 ) === ParallelArray(1,2,3))
+// }
+
+// test("simple map test"){
+// assert(ParallelArray(1,2,3,4,5).map( (_:Int) * 10 ) === ParallelArray(10,20,30,40,50))
+// }
+// }
diff --git a/test/scalacheck/parallel-collections/ParallelArrayViewCheck.scala b/test/scalacheck/parallel-collections/ParallelArrayViewCheck.scala
new file mode 100644
index 0000000000..fb09a5bbb7
--- /dev/null
+++ b/test/scalacheck/parallel-collections/ParallelArrayViewCheck.scala
@@ -0,0 +1,122 @@
+// package scala.collection.parallel
+// package mutable
+
+
+
+
+
+
+// import org.scalacheck._
+// import org.scalacheck.Gen
+// import org.scalacheck.Gen._
+// import org.scalacheck.Prop._
+// import org.scalacheck.Properties
+// import org.scalacheck.Arbitrary._
+
+// import scala.collection.TraversableView
+// import scala.collection.mutable.ArrayBuffer
+// import scala.collection.parallel.ops._
+// import scala.collection.mutable.ArraySeq
+
+
+
+// abstract class ParallelArrayViewCheck[T](tp: String)
+// extends ParallelSeqCheck[T]("ParallelSeqView[" + tp + ", ParallelArray[" + tp + "]]") {
+// // ForkJoinTasks.defaultForkJoinPool.setMaximumPoolSize(Runtime.getRuntime.availableProcessors * 2)
+// // ForkJoinTasks.defaultForkJoinPool.setParallelism(Runtime.getRuntime.availableProcessors * 2)
+
+// type CollType = ParallelSeqView[T, ParallelArray[T], ArraySeq[T]]
+
+// def isCheckingViews = true
+
+// def instances(vals: Seq[Gen[T]]): Gen[Seq[T]] = sized { sz =>
+// val a = new ArrayBuffer[T](sz)
+// val gen = vals(rnd.nextInt(vals.size))
+// for (i <- 0 until sz) a += sample(gen)
+// a
+// }
+
+// def fromSeq(a: Seq[T]) = {
+// val pa = new ParallelArray[T](a.size)
+// var i = 0
+// for (elem <- a) {
+// pa(i) = elem
+// i += 1
+// }
+// pa.view
+// }
+
+// property("forces must be equal") = forAllNoShrink(collectionPairs) { case (s, coll) =>
+// val smodif = (s ++ s).reverse.take(s.length).reverse.zip(s).drop(s.length / 2)
+// val cmodif = (coll ++ s).reverse.take(s.length).reverse.zip(s).drop(s.length / 2).force
+// smodif == cmodif
+// }
+
+// }
+
+
+// object IntParallelArrayViewCheck extends ParallelArrayViewCheck[Int]("Int") with IntSeqOperators with IntValues {
+// override def instances(vals: Seq[Gen[Int]]) = oneOf(super.instances(vals), sized { sz =>
+// (0 until sz).toArray.toSeq
+// }, sized { sz =>
+// (-sz until 0).toArray.toSeq
+// })
+// }
+
+
+// abstract class ParallelArrayViewComposedCheck[T](tp: String)
+// extends ParallelSeqCheck[T]("ParallelSeqView[" + tp + "], ParallelArray[" + tp + "].++.patch.reverse.take.reverse") {
+// ForkJoinTasks.defaultForkJoinPool.setMaximumPoolSize(Runtime.getRuntime.availableProcessors * 2)
+// ForkJoinTasks.defaultForkJoinPool.setParallelism(Runtime.getRuntime.availableProcessors * 2)
+
+// type CollType = collection.parallel.ParallelSeq[T]
+
+// def isCheckingViews = true
+
+// def instances(vals: Seq[Gen[T]]): Gen[Seq[T]] = sized { sz =>
+// val a = new ArrayBuffer[T](sz)
+// val gen = vals(rnd.nextInt(vals.size))
+// for (i <- 0 until sz) a += sample(gen)
+// a
+// }
+
+// def fromSeq(a: Seq[T]) = {
+// val pa = new ParallelArray[T](a.size)
+// var i = 0
+// for (elem <- a) {
+// pa(i) = elem
+// i += 1
+// }
+// val modified = (pa.view ++ a).patch(0, a, a.length).reverse
+// val original = modified.take(modified.length / 2).reverse
+// original
+// }
+
+// }
+
+
+// object IntParallelArrayViewComposedCheck extends ParallelArrayViewComposedCheck[Int]("Int") with IntSeqOperators with IntValues {
+// override def instances(vals: Seq[Gen[Int]]) = oneOf(super.instances(vals), sized { sz =>
+// (0 until sz).toArray.toSeq
+// }, sized { sz =>
+// (-sz until 0).toArray.toSeq
+// })
+// }
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/test/scalacheck/parallel-collections/ParallelCtrieCheck.scala b/test/scalacheck/parallel-collections/ParallelCtrieCheck.scala
new file mode 100644
index 0000000000..ebdcf78bea
--- /dev/null
+++ b/test/scalacheck/parallel-collections/ParallelCtrieCheck.scala
@@ -0,0 +1,101 @@
+package scala.collection.parallel
+package mutable
+
+
+
+import org.scalacheck._
+import org.scalacheck.Gen
+import org.scalacheck.Gen._
+import org.scalacheck.Prop._
+import org.scalacheck.Properties
+import org.scalacheck.Arbitrary._
+
+import scala.collection._
+import scala.collection.parallel.ops._
+
+
+
+abstract class ParallelConcurrentTrieMapCheck[K, V](tp: String) extends ParallelMapCheck[K, V]("mutable.ParConcurrentTrieMap[" + tp + "]") {
+ // ForkJoinTasks.defaultForkJoinPool.setMaximumPoolSize(Runtime.getRuntime.availableProcessors * 2)
+ // ForkJoinTasks.defaultForkJoinPool.setParallelism(Runtime.getRuntime.availableProcessors * 2)
+
+ type CollType = ParTrieMap[K, V]
+
+ def isCheckingViews = false
+
+ def hasStrictOrder = false
+
+ def tasksupport: TaskSupport
+
+ def ofSize(vals: Seq[Gen[(K, V)]], sz: Int) = {
+ val ct = new concurrent.TrieMap[K, V]
+ val gen = vals(rnd.nextInt(vals.size))
+ for (i <- 0 until sz) ct += sample(gen)
+ ct
+ }
+
+ def fromTraversable(t: Traversable[(K, V)]) = {
+ val pct = new ParTrieMap[K, V]
+ pct.tasksupport = tasksupport
+ var i = 0
+ for (kv <- t.toList) {
+ pct += kv
+ i += 1
+ }
+ pct
+ }
+
+}
+
+
+abstract class IntIntParallelConcurrentTrieMapCheck(val tasksupport: TaskSupport, descriptor: String) extends ParallelConcurrentTrieMapCheck[Int, Int](s"Int, Int ($descriptor)")
+with PairOperators[Int, Int]
+with PairValues[Int, Int]
+{
+ def intvalues = new IntValues {}
+ def kvalues = intvalues.values
+ def vvalues = intvalues.values
+
+ val intoperators = new IntOperators {}
+ def voperators = intoperators
+ def koperators = intoperators
+
+ override def printDataStructureDebugInfo(ds: AnyRef) = ds match {
+ case pm: ParTrieMap[k, v] =>
+ println("Mutable parallel ctrie")
+ case _ =>
+ println("could not match data structure type: " + ds.getClass)
+ }
+
+ override def checkDataStructureInvariants(orig: Traversable[(Int, Int)], ds: AnyRef) = ds match {
+ // case pm: ParHashMap[k, v] if 1 == 0 => // disabled this to make tests faster
+ // val invs = pm.brokenInvariants
+
+ // val containsall = (for ((k, v) <- orig) yield {
+ // if (pm.asInstanceOf[ParHashMap[Int, Int]].get(k) == Some(v)) true
+ // else {
+ // println("Does not contain original element: " + (k, v))
+ // false
+ // }
+ // }).foldLeft(true)(_ && _)
+
+
+ // if (invs.isEmpty) containsall
+ // else {
+ // println("Invariants broken:\n" + invs.mkString("\n"))
+ // false
+ // }
+ case _ => true
+ }
+
+}
+
+
+
+
+
+
+
+
+
+
diff --git a/test/scalacheck/parallel-collections/ParallelHashMapCheck.scala b/test/scalacheck/parallel-collections/ParallelHashMapCheck.scala
new file mode 100644
index 0000000000..06fdb66080
--- /dev/null
+++ b/test/scalacheck/parallel-collections/ParallelHashMapCheck.scala
@@ -0,0 +1,100 @@
+package scala.collection.parallel
+package mutable
+
+
+
+import org.scalacheck._
+import org.scalacheck.Gen
+import org.scalacheck.Gen._
+import org.scalacheck.Prop._
+import org.scalacheck.Properties
+import org.scalacheck.Arbitrary._
+
+import scala.collection._
+import scala.collection.parallel.ops._
+
+
+abstract class ParallelHashMapCheck[K, V](tp: String) extends ParallelMapCheck[K, V]("mutable.ParHashMap[" + tp + "]") {
+ // ForkJoinTasks.defaultForkJoinPool.setMaximumPoolSize(Runtime.getRuntime.availableProcessors * 2)
+ // ForkJoinTasks.defaultForkJoinPool.setParallelism(Runtime.getRuntime.availableProcessors * 2)
+
+ type CollType = ParHashMap[K, V]
+
+ def isCheckingViews = false
+
+ def hasStrictOrder = false
+
+ def tasksupport: TaskSupport
+
+ def ofSize(vals: Seq[Gen[(K, V)]], sz: Int) = {
+ val hm = new mutable.HashMap[K, V]
+ val gen = vals(rnd.nextInt(vals.size))
+ for (i <- 0 until sz) hm += sample(gen)
+ hm
+ }
+
+ def fromTraversable(t: Traversable[(K, V)]) = {
+ val phm = new ParHashMap[K, V]
+ phm.tasksupport = tasksupport
+ var i = 0
+ for (kv <- t.toList) {
+ phm += kv
+ i += 1
+ }
+ phm
+ }
+
+}
+
+
+abstract class IntIntParallelHashMapCheck(val tasksupport: TaskSupport, descriptor: String) extends ParallelHashMapCheck[Int, Int](s"Int, Int ($descriptor)")
+with PairOperators[Int, Int]
+with PairValues[Int, Int]
+{
+ def intvalues = new IntValues {}
+ def kvalues = intvalues.values
+ def vvalues = intvalues.values
+
+ val intoperators = new IntOperators {}
+ def voperators = intoperators
+ def koperators = intoperators
+
+ override def printDataStructureDebugInfo(ds: AnyRef) = ds match {
+ case pm: ParHashMap[k, v] =>
+ println("Mutable parallel hash map\n" + pm.hashTableContents.debugInformation)
+ case _ =>
+ println("could not match data structure type: " + ds.getClass)
+ }
+
+ override def checkDataStructureInvariants(orig: Traversable[(Int, Int)], ds: AnyRef) = ds match {
+ // case pm: ParHashMap[k, v] if 1 == 0 => // disabled this to make tests faster
+ // val invs = pm.brokenInvariants
+
+ // val containsall = (for ((k, v) <- orig) yield {
+ // if (pm.asInstanceOf[ParHashMap[Int, Int]].get(k) == Some(v)) true
+ // else {
+ // println("Does not contain original element: " + (k, v))
+ // false
+ // }
+ // }).foldLeft(true)(_ && _)
+
+
+ // if (invs.isEmpty) containsall
+ // else {
+ // println("Invariants broken:\n" + invs.mkString("\n"))
+ // false
+ // }
+ case _ => true
+ }
+
+}
+
+
+
+
+
+
+
+
+
+
diff --git a/test/scalacheck/parallel-collections/ParallelHashSetCheck.scala b/test/scalacheck/parallel-collections/ParallelHashSetCheck.scala
new file mode 100644
index 0000000000..a968ed053f
--- /dev/null
+++ b/test/scalacheck/parallel-collections/ParallelHashSetCheck.scala
@@ -0,0 +1,97 @@
+package scala.collection.parallel
+package mutable
+
+
+
+import org.scalacheck._
+import org.scalacheck.Gen
+import org.scalacheck.Gen._
+import org.scalacheck.Prop._
+import org.scalacheck.Properties
+import org.scalacheck.Arbitrary._
+
+import scala.collection._
+import scala.collection.parallel.ops._
+
+
+abstract class ParallelHashSetCheck[T](tp: String) extends ParallelSetCheck[T]("mutable.ParHashSet[" + tp + "]") {
+ // ForkJoinTasks.defaultForkJoinPool.setMaximumPoolSize(Runtime.getRuntime.availableProcessors * 2)
+ // ForkJoinTasks.defaultForkJoinPool.setParallelism(Runtime.getRuntime.availableProcessors * 2)
+
+ type CollType = ParHashSet[T]
+
+ def isCheckingViews = false
+
+ def hasStrictOrder = false
+
+ def tasksupport: TaskSupport
+
+ def ofSize(vals: Seq[Gen[T]], sz: Int) = {
+ val hm = new mutable.HashSet[T]
+ val gen = vals(rnd.nextInt(vals.size))
+ for (i <- 0 until sz) hm += sample(gen)
+ hm
+ }
+
+ def fromTraversable(t: Traversable[T]) = {
+ val phs = new ParHashSet[T]
+ phs.tasksupport = tasksupport
+ var i = 0
+ for (kv <- t.toList) {
+ phs += kv
+ i += 1
+ }
+ phs
+ }
+
+}
+
+
+abstract class IntParallelHashSetCheck(val tasksupport: TaskSupport, descriptor: String) extends ParallelHashSetCheck[Int](s"Int ($descriptor)")
+with IntOperators
+with IntValues
+{
+ override def printDataStructureDebugInfo(ds: AnyRef) = ds match {
+ case pm: ParHashSet[t] =>
+ println("Mutable parallel hash set")
+ case _ =>
+ println("could not match data structure type: " + ds.getClass)
+ }
+
+ override def checkDataStructureInvariants(orig: Traversable[Int], ds: AnyRef) = ds match {
+ // case pm: ParHashSet[t] if 1 == 0 =>
+ // // for an example of how not to write code proceed below
+ // val invs = pm.brokenInvariants
+
+ // val containsall = (for (elem <- orig) yield {
+ // if (pm.asInstanceOf[ParHashSet[Int]](elem) == true) true
+ // else {
+ // println("Does not contain original element: " + elem)
+ // println(pm.hashTableContents.table.find(_ == elem))
+ // println(pm.hashTableContents.table.indexOf(elem))
+ // false
+ // }
+ // }).foldLeft(true)(_ && _)
+
+
+ // if (invs.isEmpty) {
+ // if (!containsall) println(pm.debugInformation)
+ // containsall
+ // } else {
+ // println("Invariants broken:\n" + invs.mkString("\n"))
+ // false
+ // }
+ case _ => true
+ }
+
+}
+
+
+
+
+
+
+
+
+
+
diff --git a/test/scalacheck/parallel-collections/ParallelHashTrieCheck.scala b/test/scalacheck/parallel-collections/ParallelHashTrieCheck.scala
new file mode 100644
index 0000000000..e1df95e051
--- /dev/null
+++ b/test/scalacheck/parallel-collections/ParallelHashTrieCheck.scala
@@ -0,0 +1,144 @@
+package scala.collection.parallel
+package immutable
+
+
+
+import org.scalacheck._
+import org.scalacheck.Gen
+import org.scalacheck.Gen._
+import org.scalacheck.Prop._
+import org.scalacheck.Properties
+import org.scalacheck.Arbitrary._
+
+import scala.collection._
+import scala.collection.parallel.ops._
+
+
+abstract class ParallelHashMapCheck[K, V](tp: String) extends ParallelMapCheck[K, V]("immutable.ParHashMap[" + tp + "]") {
+ // ForkJoinTasks.defaultForkJoinPool.setMaximumPoolSize(Runtime.getRuntime.availableProcessors * 2)
+ // ForkJoinTasks.defaultForkJoinPool.setParallelism(Runtime.getRuntime.availableProcessors * 2)
+
+ type CollType = ParHashMap[K, V]
+
+ def isCheckingViews = false
+
+ def hasStrictOrder = false
+
+ def tasksupport: TaskSupport
+
+ def ofSize(vals: Seq[Gen[(K, V)]], sz: Int) = {
+ var hm = new immutable.HashMap[K, V]
+ val gen = vals(rnd.nextInt(vals.size))
+ for (i <- 0 until sz) hm += sample(gen)
+ hm
+ }
+
+ def fromTraversable(t: Traversable[(K, V)]) = {
+ var phm = new ParHashMap[K, V]
+ phm.tasksupport = tasksupport
+ var i = 0
+ for (kv <- t.toList) {
+ phm += kv
+ i += 1
+ }
+ phm
+ }
+
+}
+
+
+abstract class IntIntParallelHashMapCheck(val tasksupport: TaskSupport, descriptor: String) extends ParallelHashMapCheck[Int, Int](s"Int, Int ($descriptor)")
+with PairOperators[Int, Int]
+with PairValues[Int, Int]
+{
+ def intvalues = new IntValues {}
+ def kvalues = intvalues.values
+ def vvalues = intvalues.values
+
+ val intoperators = new IntOperators {}
+ def voperators = intoperators
+ def koperators = intoperators
+
+ override def printDataStructureDebugInfo(ds: AnyRef) = ds match {
+ case pm: ParHashMap[k, v] =>
+ pm.printDebugInfo
+ case _ =>
+ println("could not match data structure type: " + ds.getClass)
+ }
+}
+
+
+
+abstract class ParallelHashSetCheck[T](tp: String) extends ParallelSetCheck[T]("immutable.ParHashSet[" + tp + "]") {
+ // ForkJoinTasks.defaultForkJoinPool.setMaximumPoolSize(Runtime.getRuntime.availableProcessors * 2)
+ // ForkJoinTasks.defaultForkJoinPool.setParallelism(Runtime.getRuntime.availableProcessors * 2)
+
+ type CollType = ParHashSet[T]
+
+ def isCheckingViews = false
+
+ def hasStrictOrder = false
+
+ def tasksupport: TaskSupport
+
+ def ofSize(vals: Seq[Gen[T]], sz: Int) = {
+ var hm = new immutable.HashSet[T]
+ val gen = vals(rnd.nextInt(vals.size))
+ for (i <- 0 until sz) hm += sample(gen)
+ hm
+ }
+
+ def fromTraversable(t: Traversable[T]) = {
+ var phs = new ParHashSet[T]
+ phs.tasksupport = tasksupport
+ var i = 0
+ for (kv <- t.toList) {
+ phs += kv
+ i += 1
+ }
+ phs
+ }
+
+ override def printDataStructureDebugInfo(ds: AnyRef) = ds match {
+ case pm: ParHashSet[t] =>
+ println("Parallel hash set")
+ case _ =>
+ println("could not match data structure type: " + ds.getClass)
+ }
+
+}
+
+
+abstract class IntParallelHashSetCheck(val tasksupport: TaskSupport, descriptor: String) extends ParallelHashSetCheck[Int](s"Int ($descriptor)")
+with IntOperators
+with IntValues
+{
+ def intvalues = new IntValues {}
+ def kvalues = intvalues.values
+ def vvalues = intvalues.values
+
+ override def printDataStructureDebugInfo(ds: AnyRef) = ds match {
+ case pm: ParHashMap[k, v] =>
+ pm.printDebugInfo
+ case _ =>
+ println("could not match data structure type: " + ds.getClass)
+ }
+}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/test/scalacheck/parallel-collections/ParallelIterableCheck.scala b/test/scalacheck/parallel-collections/ParallelIterableCheck.scala
new file mode 100644
index 0000000000..7e7ef2ce1b
--- /dev/null
+++ b/test/scalacheck/parallel-collections/ParallelIterableCheck.scala
@@ -0,0 +1,477 @@
+package scala.collection.parallel
+
+
+
+import org.scalacheck._
+import org.scalacheck.Gen
+import org.scalacheck.Gen._
+import org.scalacheck.Prop._
+import org.scalacheck.Properties
+
+import scala.collection._
+import scala.collection.parallel._
+
+
+
+
+abstract class ParallelIterableCheck[T](collName: String) extends Properties(collName) with Operators[T] {
+ type CollType <: ParIterable[T]
+
+ def values: Seq[Gen[T]]
+ def ofSize(vals: Seq[Gen[T]], sz: Int): Iterable[T]
+ def fromTraversable(t: Traversable[T]): CollType
+ def isCheckingViews: Boolean
+ def hasStrictOrder: Boolean
+
+
+ def instances(vals: Seq[Gen[T]]): Gen[Iterable[T]] = oneOf(
+ sized(
+ sz =>
+ ofSize(vals, sz)
+ ),
+ for (sz <- choose(1000, 2000)) yield ofSize(vals, sz),
+ for (sz <- choose(4000, 4001)) yield ofSize(vals, sz),
+ for (sz <- choose(10000, 10001)) yield ofSize(vals, sz)
+ )
+
+ // used to check if constructed collection is valid
+ def checkDataStructureInvariants(orig: Traversable[T], cf: AnyRef) = {
+ // can be overridden in subclasses
+ true
+ }
+
+ def printDataStructureDebugInfo(cf: AnyRef) {
+ // can be overridden in subclasses
+ }
+
+ val rnd = new scala.util.Random
+
+ def sample(gen: Gen[T]): T = {
+ var s = gen.sample
+ while (s == None) s = gen.sample
+ s.get
+ }
+
+ def sampleValue: T = sample(values(rnd.nextInt(values.length)))
+
+ def collectionPairs = for (inst <- instances(values)) yield (inst, fromTraversable(inst))
+
+ def collectionPairsWithLengths = for (inst <- instances(values); s <- choose(0, inst.size))
+ yield (inst, fromTraversable(inst), s)
+
+ def collectionPairsWith2Indices = for (
+ inst <- instances(values);
+ f <- choose(0, inst.size);
+ s <- choose(0, inst.size))
+ yield (inst, fromTraversable(inst), f, s)
+
+ def collectionTriplets = for (inst <- instances(values);
+ updStart <- choose(0, inst.size); howMany <- choose(0, inst.size)) yield {
+ val modif = inst.toSeq.patch(updStart, inst.toSeq, howMany)
+ (inst, fromTraversable(inst), modif)
+ }
+
+ def areEqual(t1: GenTraversable[T], t2: GenTraversable[T]) = if (hasStrictOrder) {
+ t1 == t2 && t2 == t1
+ } else (t1, t2) match { // it is slightly delicate what `equal` means if the order is not strict
+ case (m1: GenMap[_, _], m2: GenMap[_, _]) => m1 == m2 && m2 == m1
+ case (i1: GenIterable[_], i2: GenIterable[_]) =>
+ val i1s = i1.toSet
+ val i2s = i2.toSet
+ i1s == i2s && i2s == i1s
+ case _ => t1 == t2 && t2 == t1
+ }
+
+ def printDebugInfo(coll: ParIterableLike[_, _, _]) {
+ println("Collection debug info: ")
+ coll.printDebugBuffer
+ println("Task debug info: ")
+ println(coll.tasksupport.debugMessages.mkString("\n"))
+ }
+
+ def printComparison(t: Traversable[_], coll: ParIterable[_], tf: Traversable[_], cf: ParIterable[_], ind: Int) {
+ printDebugInfo(coll)
+ println("Operator: " + ind)
+ println("sz: " + t.size)
+ println(t)
+ println
+ println("sz: " + coll.size)
+ println(coll)
+ println("transformed to:")
+ println
+ println("size: " + tf.size)
+ println(tf)
+ println
+ println("size: " + cf.size)
+ println(cf)
+ println
+ println("tf == cf - " + (tf == cf))
+ println("cf == tf - " + (cf == tf))
+ }
+
+ property("reductions must be equal for assoc. operators") = forAllNoShrink(collectionPairs) { case (t, coll) =>
+ if (t.size != 0) {
+ val results = for ((op, ind) <- reduceOperators.zipWithIndex) yield {
+ val tr = t.reduceLeft(op)
+ val cr = coll.reduce(op)
+ if (tr != cr) {
+ println("from: " + t)
+ println("and: " + coll)
+ println("reducing with " + ind)
+ println(tr)
+ println(cr)
+ }
+ ("op index: " + ind) |: tr == cr
+ }
+ results.reduceLeft(_ && _)
+ } else "has size 0" |: true
+ }
+
+ property("counts must be equal") = forAllNoShrink(collectionPairs) { case (t, coll) =>
+ val results = for ((pred, ind) <- countPredicates.zipWithIndex) yield {
+ val tc = t.count(pred)
+ val cc = coll.count(pred)
+ if (tc != cc) {
+ println("from: " + t + " - size: " + t.size)
+ println("and: " + coll + " - size: " + coll.toList.size)
+ println(tc)
+ println(cc)
+ printDebugInfo(coll)
+ }
+ ("op index: " + ind) |: tc == cc
+ }
+ results.reduceLeft(_ && _)
+ }
+
+ property("forall must be equal") = forAllNoShrink(collectionPairs) { case (t, coll) =>
+ val results = for ((pred, ind) <- forallPredicates.zipWithIndex)
+ yield ("op index: " + ind) |: t.forall(pred) == coll.forall(pred)
+ results.reduceLeft(_ && _)
+ }
+
+ property("exists must be equal") = forAllNoShrink(collectionPairs) { case (t, coll) =>
+ val results = for ((pred, ind) <- existsPredicates.zipWithIndex)
+ yield ("op index: " + ind) |: t.exists(pred) == coll.exists(pred)
+ results.reduceLeft(_ && _)
+ }
+
+ property("both must find or not find an element") = forAllNoShrink(collectionPairs) { case (t, coll) =>
+ val results = for ((pred, ind) <- findPredicates.zipWithIndex) yield {
+ val ft = t.find(pred)
+ val fcoll = coll.find(pred)
+ ("op index: " + ind) |: ((ft == None && fcoll == None) || (ft != None && fcoll != None))
+ }
+ results.reduceLeft(_ && _)
+ }
+
+ property("mappings must be equal") = forAllNoShrink(collectionPairs) { case (t, coll) =>
+ val results = for ((f, ind) <- mapFunctions.zipWithIndex) yield {
+ val ms = t.map(f)
+ val mp = coll.map(f)
+ val invs = checkDataStructureInvariants(ms, mp)
+ if (!areEqual(ms, mp) || !invs) {
+ println(t)
+ println(coll)
+ println("mapped to: ")
+ println(ms)
+ println(mp)
+ println("sizes: ")
+ println(ms.size)
+ println(mp.size)
+ println("valid: " + invs)
+ }
+ ("op index: " + ind) |: (areEqual(ms, mp) && invs)
+ }
+ results.reduceLeft(_ && _)
+ }
+
+ property("collects must be equal") = forAllNoShrink(collectionPairs) { case (t, coll) =>
+ val results = for ((f, ind) <- partialMapFunctions.zipWithIndex) yield {
+ val ps = t.collect(f)
+ val pp = coll.collect(f)
+ if (!areEqual(ps, pp)) {
+ println(t)
+ println(coll)
+ println("collected to: ")
+ println(ps)
+ println(pp)
+ }
+ ("op index: " + ind) |: areEqual(ps, pp)
+ }
+ results.reduceLeft(_ && _)
+ }
+
+ property("flatMaps must be equal") = forAllNoShrink(collectionPairs) { case (t, coll) =>
+ (for ((f, ind) <- flatMapFunctions.zipWithIndex)
+ yield ("op index: " + ind) |: areEqual(t.flatMap(f), coll.flatMap(f))).reduceLeft(_ && _)
+ }
+
+ property("filters must be equal") = forAllNoShrink(collectionPairs) { case (t, coll) =>
+ (for ((p, ind) <- filterPredicates.zipWithIndex) yield {
+ val tf = t.filter(p)
+ val cf = coll.filter(p)
+ val invs = checkDataStructureInvariants(tf, cf)
+ if (tf != cf || cf != tf || !invs) {
+ printDebugInfo(coll)
+ println("Operator: " + ind)
+ println("sz: " + t.size)
+ println(t)
+ println
+ println("sz: " + coll.size)
+ println(coll)
+ println
+ println("filtered to:")
+ println
+ println(cf)
+ println
+ println(tf)
+ println
+ println("tf == cf - " + (tf == cf))
+ println("cf == tf - " + (cf == tf))
+ printDataStructureDebugInfo(cf)
+ println("valid: " + invs)
+ }
+ ("op index: " + ind) |: tf == cf && cf == tf && invs
+ }).reduceLeft(_ && _)
+ }
+
+ property("filterNots must be equal") = forAllNoShrink(collectionPairs) { case (t, coll) =>
+ (for ((p, ind) <- filterNotPredicates.zipWithIndex) yield {
+ val tf = t.filterNot(p)
+ val cf = coll.filterNot(p)
+ if (tf != cf || cf != tf) printComparison(t, coll, tf, cf, ind)
+ ("op index: " + ind) |: tf == cf && cf == tf
+ }).reduceLeft(_ && _)
+ }
+
+ if (!isCheckingViews) property("partitions must be equal") = forAllNoShrink(collectionPairs) { case (t, coll) =>
+ (for ((p, ind) <- partitionPredicates.zipWithIndex) yield {
+ val tpart = t.partition(p)
+ val cpart = coll.partition(p)
+ if (tpart != cpart) {
+ println("from: " + t)
+ println("and: " + coll)
+ println(cpart)
+ println(tpart)
+ }
+ ("op index: " + ind) |: tpart == cpart
+ }).reduceLeft(_ && _)
+ }
+
+ if (hasStrictOrder) property("takes must be equal") = forAllNoShrink(collectionPairsWithLengths) { case (t, coll, n) =>
+ ("take " + n + " elements") |: t.take(n) == coll.take(n)
+ }
+
+ if (hasStrictOrder) property("drops must be equal") = forAllNoShrink(collectionPairsWithLengths) { case (t, coll, n) =>
+ ("drop " + n + " elements") |: t.drop(n) == coll.drop(n)
+ }
+
+ if (hasStrictOrder) property("slices must be equal") = forAllNoShrink(collectionPairsWith2Indices)
+ { case (t, coll, fr, slicelength) =>
+ val from = if (fr < 0) 0 else fr
+ val until = if (from + slicelength > t.size) t.size else from + slicelength
+ val tsl = t.slice(from, until)
+ val collsl = coll.slice(from, until)
+ if (tsl != collsl) {
+ println("---------------------- " + from + ", " + until)
+ println("from: " + t)
+ println("and: " + coll)
+ println(tsl)
+ println(collsl)
+ println("as list: " + collsl.toList)
+ println(collsl.iterator.hasNext)
+ println(collsl.iterator.next)
+ println(collsl.iterator.hasNext)
+ println(collsl.iterator.next)
+ println(collsl.iterator.hasNext)
+ println(collsl.iterator.next)
+ println(collsl.iterator.hasNext)
+ }
+ ("slice from " + from + " until " + until) |: tsl == collsl
+ }
+
+ if (hasStrictOrder) property("splits must be equal") = forAllNoShrink(collectionPairsWithLengths) { case (t, coll, n) =>
+ val tspl = t.splitAt(n)
+ val cspl = coll.splitAt(n)
+ if (tspl != cspl) {
+ println("at: " + n)
+ println("from: " + t)
+ println("and: " + coll)
+ println(tspl)
+ println(cspl)
+ }
+ ("splitAt " + n) |: tspl == cspl
+ }
+
+ if (hasStrictOrder) property("takeWhiles must be equal") = forAllNoShrink(collectionPairs) { case (t, coll) =>
+ (for ((pred, ind) <- takeWhilePredicates.zipWithIndex) yield {
+ val tt = t.takeWhile(pred)
+ val ct = coll.takeWhile(pred)
+ if (tt != ct) {
+ println("from: " + t)
+ println("and: " + coll)
+ println("taking while...")
+ println(tt)
+ println(ct)
+ }
+ ("operator " + ind) |: tt == ct
+ }).reduceLeft(_ && _)
+ }
+
+ if (hasStrictOrder) property("spans must be equal") = forAllNoShrink(collectionPairs) { case (t, coll) =>
+ (for ((pred, ind) <- spanPredicates.zipWithIndex) yield {
+ val tsp = t.span(pred)
+ val csp = coll.span(pred)
+ if (tsp != csp) {
+ println("from: " + t)
+ println("and: " + coll)
+ println("span with predicate " + ind)
+ println(tsp)
+ println(csp)
+ println("---------------------------------")
+ println(coll.span(pred))
+ println("---------------------------------")
+ }
+ ("operator " + ind) |: tsp == csp
+ }).reduceLeft(_ && _)
+ }
+
+ if (hasStrictOrder) property("dropWhiles must be equal") = forAllNoShrink(collectionPairs) { case (t, coll) =>
+ (for ((pred, ind) <- dropWhilePredicates.zipWithIndex) yield {
+ ("operator " + ind) |: t.dropWhile(pred) == coll.dropWhile(pred)
+ }).reduceLeft(_ && _)
+ }
+
+ property("folds must be equal for assoc. operators") = forAllNoShrink(collectionPairs) { case (t, coll) =>
+ (for (((first, op), ind) <- foldArguments.zipWithIndex) yield {
+ val tres = t.foldLeft(first)(op)
+ val cres = coll.fold(first)(op)
+ if (cres != tres) {
+ println("from: " + t)
+ println("and: " + coll)
+ println("folds are: ")
+ println(tres)
+ println(cres)
+ }
+ ("operator " + ind) |: tres == cres
+ }).reduceLeft(_ && _)
+ }
+
+ property("++s must be equal") = forAll(collectionTriplets) { case (t, coll, colltoadd) =>
+ try {
+ val toadd = colltoadd
+ val tr = t ++ toadd.iterator
+ val cr = coll ++ toadd.iterator
+ if (!areEqual(tr, cr)) {
+ println("from: " + t)
+ println("and: " + coll.iterator.toList)
+ println("adding: " + toadd)
+ println(tr.toList)
+ println(cr.iterator.toList)
+ }
+ ("adding " |: areEqual(tr, cr)) &&
+ (for ((trav, ind) <- (addAllTraversables).zipWithIndex) yield {
+ val tadded = t ++ trav
+ val cadded = coll ++ collection.parallel.mutable.ParArray(trav.toSeq: _*)
+ if (!areEqual(tadded, cadded)) {
+ println("----------------------")
+ println("from: " + t)
+ println("and: " + coll)
+ println("adding: " + trav)
+ println(tadded)
+ println(cadded)
+ }
+ ("traversable " + ind) |: areEqual(tadded, cadded)
+ }).reduceLeft(_ && _)
+ } catch {
+ case e: java.lang.Exception =>
+ throw e
+ }
+ }
+
+ if (hasStrictOrder) property("copies to array must be equal") = forAllNoShrink(collectionPairs) { case (t, coll) =>
+ val tarr = newArray(t.size)
+ val collarr = newArray(coll.size)
+ t.copyToArray(tarr, 0, t.size)
+ coll.copyToArray(collarr, 0, coll.size)
+ if (tarr.toSeq != collarr.toSeq) {
+ println("from: " + t)
+ println("and: " + coll)
+ println(tarr.toSeq)
+ println(collarr.toSeq)
+ }
+ tarr.toSeq == collarr.toSeq
+ }
+
+ if (hasStrictOrder) property("scans must be equal") = forAllNoShrink(collectionPairs) {
+ case (t, coll) =>
+ (for (((first, op), ind) <- foldArguments.zipWithIndex) yield {
+ val tscan = t.scanLeft(first)(op)
+ val cscan = coll.scan(first)(op)
+ if (tscan != cscan || cscan != tscan) {
+ println("from: " + t)
+ println("and: " + coll)
+ println("scans are: ")
+ println(tscan)
+ println(cscan)
+ }
+ ("operator " + ind) |: tscan == cscan && cscan == tscan
+ }).reduceLeft(_ && _)
+ }
+
+ property("groupBy must be equal") = forAllNoShrink(collectionPairs) {
+ case (t, coll) =>
+ (for ((f, ind) <- groupByFunctions.zipWithIndex) yield {
+ val tgroup = t.groupBy(f)
+ val cgroup = coll.groupBy(f)
+ if (tgroup != cgroup || cgroup != tgroup) {
+ println("from: " + t)
+ println("and: " + coll)
+ println("groups are: ")
+ println(tgroup)
+ println(cgroup)
+ }
+ ("operator " + ind) |: tgroup == cgroup && cgroup == tgroup
+ }).reduceLeft(_ && _)
+ }
+
+}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/test/scalacheck/parallel-collections/ParallelMapCheck1.scala b/test/scalacheck/parallel-collections/ParallelMapCheck1.scala
new file mode 100644
index 0000000000..50aa4ad0c7
--- /dev/null
+++ b/test/scalacheck/parallel-collections/ParallelMapCheck1.scala
@@ -0,0 +1,67 @@
+package scala.collection.parallel
+
+
+
+import org.scalacheck._
+import org.scalacheck.Gen
+import org.scalacheck.Gen._
+import org.scalacheck.Prop._
+import org.scalacheck.Properties
+
+import scala.collection._
+import scala.collection.parallel._
+
+
+
+
+abstract class ParallelMapCheck[K, V](collname: String) extends ParallelIterableCheck[(K, V)](collname) {
+ type CollType <: ParMap[K, V]
+
+ property("gets iterated keys") = forAllNoShrink(collectionPairs) {
+ case (t, coll) =>
+ val containsT = for ((k, v) <- t) yield (coll.get(k) == Some(v))
+ val containsSelf = coll.map { case (k, v) => coll.get(k) == Some(v) }
+ ("Par contains elements of seq map" |: containsT.forall(_ == true)) &&
+ ("Par contains elements of itself" |: containsSelf.forall(_ == true))
+ }
+
+}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/test/scalacheck/parallel-collections/ParallelRangeCheck.scala b/test/scalacheck/parallel-collections/ParallelRangeCheck.scala
new file mode 100644
index 0000000000..5b783fadf2
--- /dev/null
+++ b/test/scalacheck/parallel-collections/ParallelRangeCheck.scala
@@ -0,0 +1,75 @@
+package scala.collection.parallel
+package immutable
+
+
+
+
+import org.scalacheck._
+import org.scalacheck.Gen
+import org.scalacheck.Gen._
+import org.scalacheck.Prop._
+import org.scalacheck.Properties
+import org.scalacheck.Arbitrary._
+
+import scala.collection._
+import scala.collection.parallel.ops._
+
+
+
+
+abstract class ParallelRangeCheck(val tasksupport: TaskSupport, descriptor: String) extends ParallelSeqCheck[Int](s"ParallelRange[Int] ($descriptor)") with ops.IntSeqOperators {
+ // ForkJoinTasks.defaultForkJoinPool.setMaximumPoolSize(Runtime.getRuntime.availableProcessors * 2)
+ // ForkJoinTasks.defaultForkJoinPool.setParallelism(Runtime.getRuntime.availableProcessors * 2)
+
+ type CollType = collection.parallel.ParSeq[Int]
+
+ def hasStrictOrder = true
+
+ def isCheckingViews = false
+
+ def ofSize(vals: Seq[Gen[Int]], sz: Int) = throw new UnsupportedOperationException
+
+ override def instances(vals: Seq[Gen[Int]]): Gen[Seq[Int]] = sized { start =>
+ sized { end =>
+ sized { step =>
+ new Range(start, end, if (step != 0) step else 1)
+ }
+ }
+ }
+
+ def fromSeq(a: Seq[Int]) = a match {
+ case r: Range =>
+ val pr = ParRange(r.start, r.end, r.step, false)
+ pr.tasksupport = tasksupport
+ pr
+ case _ =>
+ val pa = new parallel.mutable.ParArray[Int](a.length)
+ pa.tasksupport = tasksupport
+ for (i <- 0 until a.length) pa(i) = a(i)
+ pa
+ }
+
+ override def traversable2Seq(t: Traversable[Int]): Seq[Int] = t match {
+ case r: Range => r
+ case _ => t.toSeq
+ }
+
+ def values = Seq(choose(-100, 100))
+
+}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/test/scalacheck/parallel-collections/ParallelSeqCheck.scala b/test/scalacheck/parallel-collections/ParallelSeqCheck.scala
new file mode 100644
index 0000000000..48c3d3f745
--- /dev/null
+++ b/test/scalacheck/parallel-collections/ParallelSeqCheck.scala
@@ -0,0 +1,300 @@
+package scala.collection.parallel
+
+
+
+import org.scalacheck._
+import org.scalacheck.Gen
+import org.scalacheck.Gen._
+import org.scalacheck.Prop._
+import org.scalacheck.Properties
+
+import scala.collection._
+import scala.collection.parallel._
+
+
+
+
+
+abstract class ParallelSeqCheck[T](collName: String) extends ParallelIterableCheck[T](collName) with SeqOperators[T] {
+
+ type CollType <: collection.parallel.ParSeq[T]
+
+
+ def ofSize(vals: Seq[Gen[T]], sz: Int): Seq[T]
+ def fromSeq(s: Seq[T]): CollType
+
+ override def instances(vals: Seq[Gen[T]]): Gen[Seq[T]] = oneOf(
+ Gen.const(ofSize(vals, 1)),
+ sized(
+ sz =>
+ ofSize(vals, sz)
+ ),
+ for (sz <- choose(1000, 2000)) yield ofSize(vals, sz)
+ )
+
+
+ def fromTraversable(t: Traversable[T]) = fromSeq(traversable2Seq(t))
+ def traversable2Seq(t: Traversable[T]): Seq[T] = {
+ if (t.isInstanceOf[Iterable[_]]) t.asInstanceOf[Iterable[T]].iterator.toList else t.toList
+ }
+
+ override def collectionPairs: Gen[(Seq[T], CollType)] = for (inst <- instances(values)) yield (inst, fromSeq(inst))
+
+ override def collectionPairsWithLengths: Gen[(Seq[T], CollType, Int)] =
+ for (inst <- instances(values); s <- choose(0, inst.size)) yield (inst, fromSeq(inst), s);
+
+ def collectionPairsWithModifiedWithLengths: Gen[(Seq[T], CollType, ParSeq[T], Int)] =
+ for (inst <- instances(values); s <- choose(0, inst.size);
+ updateStart <- choose(0, inst.size); howMany <- choose(0, inst.size)) yield {
+ val parcoll = fromSeq(inst)
+ val parcollmodif = fromSeq(modifySlightly(inst, updateStart, howMany))
+ (inst, parcoll, parcollmodif, s)
+ }
+
+ def collectionPairsWithModified: Gen[(Seq[T], CollType, ParSeq[T])] =
+ for (inst <- instances(values); updateStart <- choose(0, inst.size); howMany <- choose(0, inst.size)) yield {
+ val parcoll = fromSeq(inst)
+ val parcollmodif = fromSeq(modifySlightly(inst, updateStart, howMany))
+ (inst, parcoll, parcollmodif)
+ }
+
+ def collectionPairsWithSliced: Gen[(Seq[T], CollType, ParSeq[T])] =
+ for (inst <- instances(values); sliceStart <- choose(0, inst.size); howMany <- choose(0, inst.size)) yield {
+ val parcoll = fromSeq(inst)
+ val parcollsliced = fromSeq(inst.slice(sliceStart, sliceStart + howMany))
+ (inst, parcoll, parcollsliced)
+ }
+
+ def collectionTripletsWith2Indices: Gen[(Seq[T], CollType, Seq[T], Int, Int)] =
+ for (inst <- instances(values); f <- choose(0, inst.size); s <- choose(0, inst.size - f);
+ third <- instances(values); sliceStart <- choose(0, inst.size); howMany <- choose(0, inst.size)) yield {
+ (inst, fromSeq(inst), inst.slice(sliceStart, sliceStart + howMany), f, s)
+ }
+
+ private def modifySlightly(coll: Seq[T], updateStart: Int, howMany: Int) = {
+ coll.patch(updateStart, coll, howMany)
+ }
+
+ property("segmentLengths must be equal") = forAllNoShrink(collectionPairsWithLengths) { case (s, coll, len) =>
+ (for ((pred, ind) <- segmentLengthPredicates.zipWithIndex) yield {
+ val slen = s.segmentLength(pred, if (len < 0) 0 else len)
+ val clen = coll.segmentLength(pred, len)
+ if (slen != clen) {
+ println("from: " + s)
+ println("and: " + coll)
+ println(slen)
+ println(clen)
+ }
+ ("operator " + ind) |: slen == clen
+ }).reduceLeft(_ && _)
+ }
+
+ property("prefixLengths must be equal") = forAllNoShrink(collectionPairs) { case (s, coll) =>
+ (for ((pred, ind) <- segmentLengthPredicates.zipWithIndex) yield {
+ ("operator " + ind) |: s.prefixLength(pred) == coll.prefixLength(pred)
+ }).reduceLeft(_ && _)
+ }
+
+ property("indexWheres must be equal") = forAllNoShrink(collectionPairsWithLengths) { case (s, coll, len) =>
+ (for ((pred, ind) <- indexWherePredicates.zipWithIndex) yield {
+ val sind = s.indexWhere(pred, len)
+ val cind = coll.indexWhere(pred, len)
+ if (sind != cind) {
+ println("from: " + s)
+ println("and: " + coll)
+ println("at: " + len)
+ println(sind)
+ println(cind)
+ }
+ ("operator " + ind) |: sind == cind
+ }).reduceLeft(_ && _)
+ }
+
+ property("lastIndexWheres must be equal") = forAllNoShrink(collectionPairsWithLengths) { case (s, coll, len) =>
+ (for ((pred, ind) <- lastIndexWherePredicates.zipWithIndex) yield {
+ val end = if (len >= s.size) s.size - 1 else len
+ val sind = s.lastIndexWhere(pred, end)
+ val cind = coll.lastIndexWhere(pred, end)
+ ("operator " + ind) |: sind == cind
+ }).reduceLeft(_ && _)
+ }
+
+ property("reverses must be equal") = forAllNoShrink(collectionPairs) { case (s, coll) =>
+ (s.length == 0 && s.getClass == classOf[collection.immutable.Range]) ||
+ {
+ val sr = s.reverse
+ val cr = coll.reverse
+ if (sr != cr) {
+ println("from: " + s)
+ println("and: " + coll)
+ println(sr)
+ println(cr)
+ }
+ sr == cr
+ }
+ }
+
+ property("reverseMaps must be equal") = forAllNoShrink(collectionPairs) { case (s, coll) =>
+ (for ((f, ind) <- reverseMapFunctions.zipWithIndex) yield {
+ ("operator " + ind) |: s.reverseMap(f) == coll.reverseMap(f)
+ }).reduceLeft(_ && _)
+ }
+
+ property("sameElements must be equal") = forAllNoShrink(collectionPairsWithModifiedWithLengths) {
+ case (s, coll, collmodif, len) =>
+ val pos = if (len < 0) 0 else len
+ val scm = s.sameElements(collmodif)
+ val ccm = coll.sameElements(collmodif)
+ if (scm != ccm) {
+ println("Comparing: " + s)
+ println("and: " + coll)
+ println("with: " + collmodif)
+ println(scm)
+ println(ccm)
+ }
+ ("Nil" |: s.sameElements(Nil) == coll.sameElements(Nil)) &&
+ ("toList" |: s.sameElements(s.toList) == coll.sameElements(coll.toList)) &&
+ ("identity" |: s.sameElements(s.map(e => e)) == coll.sameElements(coll.map(e => e))) &&
+ ("vice-versa" |: s.sameElements(coll) == coll.sameElements(s)) &&
+ ("equal" |: s.sameElements(coll)) &&
+ ("modified" |: scm == ccm) &&
+ (for ((it, ind) <- sameElementsSeqs.zipWithIndex) yield {
+ val sres = s.sameElements(it)
+ val pres = coll.sameElements(it)
+ if (sres != pres) {
+ println("Comparing: " + s)
+ println("and: " + coll)
+ println("with: " + it)
+ println(sres)
+ println(pres)
+ }
+ ("collection " + ind) |: sres == pres
+ }).reduceLeft(_ && _)
+ }
+
+ property("startsWiths must be equal") = forAllNoShrink(collectionPairsWithModifiedWithLengths) {
+ case (s, coll, collmodif, len) =>
+ val pos = if (len < 0) 0 else len
+ ("start with self" |: s.startsWith(s) == coll.startsWith(coll)) &&
+ ("tails correspond" |: (s.length == 0 || s.startsWith(s.tail, 1) == coll.startsWith(coll.tail, 1))) &&
+ ("with each other" |: coll.startsWith(s)) &&
+ ("modified" |: s.startsWith(collmodif) == coll.startsWith(collmodif)) &&
+ ("modified2" |: s.startsWith(collmodif, pos) == coll.startsWith(collmodif, pos)) &&
+ (for (sq <- startEndSeqs) yield {
+ val ss = s.startsWith(sq, pos)
+ val cs = coll.startsWith(fromSeq(sq), pos)
+ if (ss != cs) {
+ println("from: " + s)
+ println("and: " + coll)
+ println("test seq: " + sq)
+ println("from pos: " + pos)
+ println(ss)
+ println(cs)
+ println(coll.iterator.psplit(pos, coll.length - pos)(1).toList)
+ }
+ ("seq " + sq) |: ss == cs
+ }).reduceLeft(_ && _)
+ }
+
+ property("endsWiths must be equal") = forAllNoShrink(collectionPairsWithModified) {
+ case (s, coll, collmodif) =>
+ ("ends with self" |: s.endsWith(s) == coll.endsWith(s)) &&
+ ("ends with tail" |: (s.length == 0 || s.endsWith(s.tail) == coll.endsWith(coll.tail))) &&
+ ("with each other" |: coll.endsWith(s)) &&
+ ("modified" |: s.startsWith(collmodif) == coll.endsWith(collmodif)) &&
+ (for (sq <- startEndSeqs) yield {
+ val sew = s.endsWith(sq)
+ val cew = coll.endsWith(fromSeq(sq))
+ if (sew != cew) {
+ println("from: " + s)
+ println("and: " + coll)
+ println(sew)
+ println(cew)
+ }
+ ("seq " + sq) |: sew == cew
+ }).reduceLeft(_ && _)
+ }
+
+ property("unions must be equal") = forAllNoShrink(collectionPairsWithModified) { case (s, coll, collmodif) =>
+ ("modified" |: s.union(collmodif.seq) == coll.union(collmodif)) &&
+ ("empty" |: s.union(Nil) == coll.union(fromSeq(Nil)))
+ }
+
+ // This is failing with my views patch: array index out of bounds in the array iterator.
+ // Couldn't see why this and only this was impacted, could use a second pair of eyes.
+ //
+ // This was failing because some corner cases weren't added to the patch method in ParSeqLike.
+ // Curiously, this wasn't detected before.
+ //
+ if (!isCheckingViews) property("patches must be equal") = forAll(collectionTripletsWith2Indices) {
+ case (s, coll, pat, from, repl) =>
+ ("with seq" |: s.patch(from, pat, repl) == coll.patch(from, pat, repl)) &&
+ ("with par" |: s.patch(from, pat, repl) == coll.patch(from, fromSeq(pat), repl)) &&
+ ("with empty" |: s.patch(from, Nil, repl) == coll.patch(from, fromSeq(Nil), repl)) &&
+ ("with one" |: (s.length == 0 || s.patch(from, List(s(0)), 1) == coll.patch(from, fromSeq(List(coll(0))), 1)))
+ }
+
+ if (!isCheckingViews) property("updates must be equal") = forAllNoShrink(collectionPairsWithLengths) { case (s, coll, len) =>
+ val pos = if (len >= s.length) s.length - 1 else len
+ if (s.length > 0) {
+ val supd = s.updated(pos, s(0))
+ val cupd = coll.updated(pos, coll(0))
+ if (supd != cupd) {
+ println("from: " + s)
+ println("and: " + coll)
+ println(supd)
+ println(cupd)
+ }
+ "from first" |: (supd == cupd)
+ } else "trivially" |: true
+ }
+
+ property("prepends must be equal") = forAllNoShrink(collectionPairs) { case (s, coll) =>
+ s.length == 0 || s(0) +: s == coll(0) +: coll
+ }
+
+ property("appends must be equal") = forAllNoShrink(collectionPairs) { case (s, coll) =>
+ s.length == 0 || s :+ s(0) == coll :+ coll(0)
+ }
+
+ property("padTos must be equal") = forAllNoShrink(collectionPairsWithLengths) { case (s, coll, len) =>
+ val someValue = sampleValue
+ val sdoub = s.padTo(len * 2, someValue)
+ val cdoub = coll.padTo(len * 2, someValue)
+ if (sdoub != cdoub) {
+ println("from: " + s)
+ println("and: " + coll)
+ println(sdoub)
+ println(cdoub)
+ }
+ ("smaller" |: s.padTo(len / 2, someValue) == coll.padTo(len / 2, someValue)) &&
+ ("bigger" |: sdoub == cdoub)
+ }
+
+ property("corresponds must be equal") = forAllNoShrink(collectionPairsWithModified) { case (s, coll, modified) =>
+ val modifcut = modified.toSeq.slice(0, modified.length)
+ ("self" |: s.corresponds(s)(_ == _) == coll.corresponds(coll)(_ == _)) &&
+ ("modified" |: s.corresponds(modified.seq)(_ == _) == coll.corresponds(modified)(_ == _)) &&
+ ("modified2" |: s.corresponds(modifcut)(_ == _) == coll.corresponds(modifcut)(_ == _))
+ }
+
+}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/test/scalacheck/parallel-collections/ParallelSetCheck.scala b/test/scalacheck/parallel-collections/ParallelSetCheck.scala
new file mode 100644
index 0000000000..c22dddf96d
--- /dev/null
+++ b/test/scalacheck/parallel-collections/ParallelSetCheck.scala
@@ -0,0 +1,62 @@
+package scala.collection.parallel
+
+
+
+import org.scalacheck._
+import org.scalacheck.Gen
+import org.scalacheck.Gen._
+import org.scalacheck.Prop._
+import org.scalacheck.Properties
+
+import scala.collection._
+import scala.collection.parallel._
+
+
+
+
+abstract class ParallelSetCheck[T](collname: String) extends ParallelIterableCheck[T](collname) {
+ type CollType <: ParSet[T]
+
+ property("gets iterated keys") = forAllNoShrink(collectionPairs) {
+ case (t, coll) =>
+ val containsT = for (elem <- t) yield (coll.contains(elem))
+ val containsSelf = for (elem <- coll) yield (coll.contains(elem))
+ ("Par contains elements of seq map" |: containsT.forall(_ == true)) &&
+ ("Par contains elements of itself" |: containsSelf.forall(_ == true))
+ }
+
+}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/test/scalacheck/parallel-collections/ParallelVectorCheck.scala b/test/scalacheck/parallel-collections/ParallelVectorCheck.scala
new file mode 100644
index 0000000000..1afcf2ce4c
--- /dev/null
+++ b/test/scalacheck/parallel-collections/ParallelVectorCheck.scala
@@ -0,0 +1,67 @@
+package scala.collection
+package parallel.immutable
+
+
+
+import org.scalacheck._
+import org.scalacheck.Gen
+import org.scalacheck.Gen._
+import org.scalacheck.Prop._
+import org.scalacheck.Properties
+import org.scalacheck.Arbitrary._
+
+import scala.collection._
+import scala.collection.parallel.ops._
+
+
+import immutable.Vector
+import immutable.VectorBuilder
+
+import scala.collection.parallel.TaskSupport
+
+
+
+
+abstract class ParallelVectorCheck[T](tp: String) extends collection.parallel.ParallelSeqCheck[T]("ParVector[" + tp + "]") {
+ // ForkJoinTasks.defaultForkJoinPool.setMaximumPoolSize(Runtime.getRuntime.availableProcessors * 2)
+ // ForkJoinTasks.defaultForkJoinPool.setParallelism(Runtime.getRuntime.availableProcessors * 2)
+
+ type CollType = ParVector[T]
+
+ def isCheckingViews = false
+
+ def hasStrictOrder = true
+
+ def tasksupport: TaskSupport
+
+ def ofSize(vals: Seq[Gen[T]], sz: Int) = {
+ val vb = new immutable.VectorBuilder[T]()
+ val gen = vals(rnd.nextInt(vals.size))
+ for (i <- 0 until sz) vb += sample(gen)
+ vb.result
+ }
+
+ def fromSeq(a: Seq[T]) = {
+ val pc = ParVector.newCombiner[T]
+ for (elem <- a.toList) pc += elem
+ val pv = pc.result
+ pv.tasksupport = tasksupport
+ pv
+ }
+
+}
+
+
+
+abstract class IntParallelVectorCheck(val tasksupport: TaskSupport, descriptor: String) extends ParallelVectorCheck[Int](s"Int ($descriptor)") with IntSeqOperators with IntValues {
+ override def instances(vals: Seq[Gen[Int]]) = oneOf(super.instances(vals), sized { sz =>
+ (0 until sz).toArray.toSeq
+ }, sized { sz =>
+ (-sz until 0).toArray.toSeq
+ })
+}
+
+
+
+
+
diff --git a/test/scalacheck/parallel-collections/pc.scala b/test/scalacheck/parallel-collections/pc.scala
new file mode 100644
index 0000000000..10d0643be8
--- /dev/null
+++ b/test/scalacheck/parallel-collections/pc.scala
@@ -0,0 +1,61 @@
+// package here to be able access the package-private implementation and shutdown the pool
+package scala
+
+import org.scalacheck._
+import scala.collection.parallel._
+
+class ParCollProperties extends Properties("Parallel collections") {
+
+ def includeAllTestsWith(support: TaskSupport, descriptor: String) {
+ // parallel arrays with default task support
+ include(new mutable.IntParallelArrayCheck(support, descriptor) { })
+
+ // parallel ranges
+ include(new immutable.ParallelRangeCheck(support, descriptor) { })
+
+ // parallel immutable hash maps (tries)
+ include(new immutable.IntIntParallelHashMapCheck(support, descriptor) { })
+
+ // parallel immutable hash sets (tries)
+ include(new immutable.IntParallelHashSetCheck(support, descriptor) { })
+
+ // parallel mutable hash maps (tables)
+ include(new mutable.IntIntParallelHashMapCheck(support, descriptor) { })
+
+ // parallel ctrie
+ include(new mutable.IntIntParallelConcurrentTrieMapCheck(support, descriptor) { })
+
+ // parallel mutable hash sets (tables)
+ include(new mutable.IntParallelHashSetCheck(support, descriptor) { })
+
+ // parallel vectors
+ include(new immutable.IntParallelVectorCheck(support, descriptor) { })
+ }
+
+ includeAllTestsWith(defaultTaskSupport, "defaultTaskSupport")
+
+ val ec = scala.concurrent.ExecutionContext.fromExecutorService(java.util.concurrent.Executors.newFixedThreadPool(5))
+ val ectasks = new collection.parallel.ExecutionContextTaskSupport(ec)
+ includeAllTestsWith(ectasks, "ectasks")
+
+ // no post test hooks in scalacheck, so cannot do:
+ // ec.shutdown()
+
+}
+
+/*
+def main(args: Array[String]) {
+ val pc = new ParCollProperties
+ org.scalacheck.Test.checkProperties(
+ org.scalacheck.Test.Params(
+ rng = new java.util.Random(5134L),
+ testCallback = new ConsoleReporter(0),
+ workers = 1,
+ minSize = 0,
+ maxSize = 4000,
+ minSuccessfulTests = 5
+ ),
+ pc
+ )
+}
+*/