summaryrefslogtreecommitdiff
path: root/test/files
diff options
context:
space:
mode:
authorAleksandar Prokopec <axel22@gmail.com>2013-10-28 11:20:23 +0100
committerAleksandar Prokopec <axel22@gmail.com>2013-10-29 12:15:10 +0100
commit344ac60c3f34cc0a1c6e6aae1479878fe63476eb (patch)
tree97e5de64b81b5cb76913e8773171a1ad93b43244 /test/files
parent1819af77fd4ecc66c89a84ea321aa7d6f92285ec (diff)
downloadscala-344ac60c3f34cc0a1c6e6aae1479878fe63476eb.tar.gz
scala-344ac60c3f34cc0a1c6e6aae1479878fe63476eb.tar.bz2
scala-344ac60c3f34cc0a1c6e6aae1479878fe63476eb.zip
SI-7938 - parallel collections should use default ExecutionContext
Parallel collections now use `scala.concurrent.ExecutionContext` by default. The `ExecutionContextTaskSupport` is optimized to use the `ForkJoinPool` underlying the `ExecutionContext` if possible. Otherwise, a fallback `TaskSupport` that creates a reduction tree and execute an operation through `Future`s is used.
Diffstat (limited to 'test/files')
-rw-r--r--test/files/scalacheck/parallel-collections/ParallelArrayCheck.scala5
-rw-r--r--test/files/scalacheck/parallel-collections/ParallelCtrieCheck.scala5
-rw-r--r--test/files/scalacheck/parallel-collections/ParallelHashMapCheck.scala5
-rw-r--r--test/files/scalacheck/parallel-collections/ParallelHashSetCheck.scala11
-rw-r--r--test/files/scalacheck/parallel-collections/ParallelHashTrieCheck.scala16
-rw-r--r--test/files/scalacheck/parallel-collections/ParallelIterableCheck.scala49
-rw-r--r--test/files/scalacheck/parallel-collections/ParallelRangeCheck.scala8
-rw-r--r--test/files/scalacheck/parallel-collections/ParallelVectorCheck.scala14
-rw-r--r--test/files/scalacheck/parallel-collections/pc.scala65
9 files changed, 114 insertions, 64 deletions
diff --git a/test/files/scalacheck/parallel-collections/ParallelArrayCheck.scala b/test/files/scalacheck/parallel-collections/ParallelArrayCheck.scala
index 255c04498e..691a3e961e 100644
--- a/test/files/scalacheck/parallel-collections/ParallelArrayCheck.scala
+++ b/test/files/scalacheck/parallel-collections/ParallelArrayCheck.scala
@@ -24,6 +24,8 @@ abstract class ParallelArrayCheck[T](tp: String) extends ParallelSeqCheck[T]("Pa
def hasStrictOrder = true
+ def tasksupport: TaskSupport
+
def ofSize(vals: Seq[Gen[T]], sz: Int) = {
val a = new mutable.ArrayBuffer[T](sz)
val gen = vals(rnd.nextInt(vals.size))
@@ -33,6 +35,7 @@ abstract class ParallelArrayCheck[T](tp: String) extends ParallelSeqCheck[T]("Pa
def fromSeq(a: Seq[T]) = {
val pa = new ParArray[T](a.size)
+ pa.tasksupport = tasksupport
var i = 0
for (elem <- a.toList) {
pa(i) = elem
@@ -50,7 +53,7 @@ abstract class ParallelArrayCheck[T](tp: String) extends ParallelSeqCheck[T]("Pa
}
-object IntParallelArrayCheck extends ParallelArrayCheck[Int]("Int") with IntSeqOperators with IntValues {
+class IntParallelArrayCheck(val tasksupport: TaskSupport) extends ParallelArrayCheck[Int]("Int") with IntSeqOperators with IntValues {
override def instances(vals: Seq[Gen[Int]]) = oneOf(super.instances(vals), sized { sz =>
(0 until sz).toArray.toSeq
}, sized { sz =>
diff --git a/test/files/scalacheck/parallel-collections/ParallelCtrieCheck.scala b/test/files/scalacheck/parallel-collections/ParallelCtrieCheck.scala
index b952704af2..cf15afb3b9 100644
--- a/test/files/scalacheck/parallel-collections/ParallelCtrieCheck.scala
+++ b/test/files/scalacheck/parallel-collections/ParallelCtrieCheck.scala
@@ -25,6 +25,8 @@ abstract class ParallelConcurrentTrieMapCheck[K, V](tp: String) extends Parallel
def hasStrictOrder = false
+ def tasksupport: TaskSupport
+
def ofSize(vals: Seq[Gen[(K, V)]], sz: Int) = {
val ct = new concurrent.TrieMap[K, V]
val gen = vals(rnd.nextInt(vals.size))
@@ -34,6 +36,7 @@ abstract class ParallelConcurrentTrieMapCheck[K, V](tp: String) extends Parallel
def fromTraversable(t: Traversable[(K, V)]) = {
val pct = new ParTrieMap[K, V]
+ pct.tasksupport = tasksupport
var i = 0
for (kv <- t.toList) {
pct += kv
@@ -45,7 +48,7 @@ abstract class ParallelConcurrentTrieMapCheck[K, V](tp: String) extends Parallel
}
-object IntIntParallelConcurrentTrieMapCheck extends ParallelConcurrentTrieMapCheck[Int, Int]("Int, Int")
+class IntIntParallelConcurrentTrieMapCheck(val tasksupport: TaskSupport) extends ParallelConcurrentTrieMapCheck[Int, Int]("Int, Int")
with PairOperators[Int, Int]
with PairValues[Int, Int]
{
diff --git a/test/files/scalacheck/parallel-collections/ParallelHashMapCheck.scala b/test/files/scalacheck/parallel-collections/ParallelHashMapCheck.scala
index 9299a201a1..34b3f33de2 100644
--- a/test/files/scalacheck/parallel-collections/ParallelHashMapCheck.scala
+++ b/test/files/scalacheck/parallel-collections/ParallelHashMapCheck.scala
@@ -24,6 +24,8 @@ abstract class ParallelHashMapCheck[K, V](tp: String) extends ParallelMapCheck[K
def hasStrictOrder = false
+ def tasksupport: TaskSupport
+
def ofSize(vals: Seq[Gen[(K, V)]], sz: Int) = {
val hm = new mutable.HashMap[K, V]
val gen = vals(rnd.nextInt(vals.size))
@@ -33,6 +35,7 @@ abstract class ParallelHashMapCheck[K, V](tp: String) extends ParallelMapCheck[K
def fromTraversable(t: Traversable[(K, V)]) = {
val phm = new ParHashMap[K, V]
+ phm.tasksupport = tasksupport
var i = 0
for (kv <- t.toList) {
phm += kv
@@ -44,7 +47,7 @@ abstract class ParallelHashMapCheck[K, V](tp: String) extends ParallelMapCheck[K
}
-object IntIntParallelHashMapCheck extends ParallelHashMapCheck[Int, Int]("Int, Int")
+class IntIntParallelHashMapCheck(val tasksupport: TaskSupport) extends ParallelHashMapCheck[Int, Int]("Int, Int")
with PairOperators[Int, Int]
with PairValues[Int, Int]
{
diff --git a/test/files/scalacheck/parallel-collections/ParallelHashSetCheck.scala b/test/files/scalacheck/parallel-collections/ParallelHashSetCheck.scala
index 8b41908a26..91de2472a7 100644
--- a/test/files/scalacheck/parallel-collections/ParallelHashSetCheck.scala
+++ b/test/files/scalacheck/parallel-collections/ParallelHashSetCheck.scala
@@ -24,6 +24,8 @@ abstract class ParallelHashSetCheck[T](tp: String) extends ParallelSetCheck[T]("
def hasStrictOrder = false
+ def tasksupport: TaskSupport
+
def ofSize(vals: Seq[Gen[T]], sz: Int) = {
val hm = new mutable.HashSet[T]
val gen = vals(rnd.nextInt(vals.size))
@@ -32,19 +34,20 @@ abstract class ParallelHashSetCheck[T](tp: String) extends ParallelSetCheck[T]("
}
def fromTraversable(t: Traversable[T]) = {
- val phm = new ParHashSet[T]
+ val phs = new ParHashSet[T]
+ phs.tasksupport = tasksupport
var i = 0
for (kv <- t.toList) {
- phm += kv
+ phs += kv
i += 1
}
- phm
+ phs
}
}
-object IntParallelHashSetCheck extends ParallelHashSetCheck[Int]("Int")
+class IntParallelHashSetCheck(val tasksupport: TaskSupport) extends ParallelHashSetCheck[Int]("Int")
with IntOperators
with IntValues
{
diff --git a/test/files/scalacheck/parallel-collections/ParallelHashTrieCheck.scala b/test/files/scalacheck/parallel-collections/ParallelHashTrieCheck.scala
index bbec52dc92..9e29be5429 100644
--- a/test/files/scalacheck/parallel-collections/ParallelHashTrieCheck.scala
+++ b/test/files/scalacheck/parallel-collections/ParallelHashTrieCheck.scala
@@ -24,6 +24,8 @@ abstract class ParallelHashMapCheck[K, V](tp: String) extends ParallelMapCheck[K
def hasStrictOrder = false
+ def tasksupport: TaskSupport
+
def ofSize(vals: Seq[Gen[(K, V)]], sz: Int) = {
var hm = new immutable.HashMap[K, V]
val gen = vals(rnd.nextInt(vals.size))
@@ -33,6 +35,7 @@ abstract class ParallelHashMapCheck[K, V](tp: String) extends ParallelMapCheck[K
def fromTraversable(t: Traversable[(K, V)]) = {
var phm = new ParHashMap[K, V]
+ phm.tasksupport = tasksupport
var i = 0
for (kv <- t.toList) {
phm += kv
@@ -44,7 +47,7 @@ abstract class ParallelHashMapCheck[K, V](tp: String) extends ParallelMapCheck[K
}
-object IntIntParallelHashMapCheck extends ParallelHashMapCheck[Int, Int]("Int, Int")
+class IntIntParallelHashMapCheck(val tasksupport: TaskSupport) extends ParallelHashMapCheck[Int, Int]("Int, Int")
with PairOperators[Int, Int]
with PairValues[Int, Int]
{
@@ -76,6 +79,8 @@ abstract class ParallelHashSetCheck[T](tp: String) extends ParallelSetCheck[T]("
def hasStrictOrder = false
+ def tasksupport: TaskSupport
+
def ofSize(vals: Seq[Gen[T]], sz: Int) = {
var hm = new immutable.HashSet[T]
val gen = vals(rnd.nextInt(vals.size))
@@ -84,13 +89,14 @@ abstract class ParallelHashSetCheck[T](tp: String) extends ParallelSetCheck[T]("
}
def fromTraversable(t: Traversable[T]) = {
- var phm = new ParHashSet[T]
+ var phs = new ParHashSet[T]
+ phs.tasksupport = tasksupport
var i = 0
for (kv <- t.toList) {
- phm += kv
+ phs += kv
i += 1
}
- phm
+ phs
}
override def printDataStructureDebugInfo(ds: AnyRef) = ds match {
@@ -103,7 +109,7 @@ abstract class ParallelHashSetCheck[T](tp: String) extends ParallelSetCheck[T]("
}
-object IntParallelHashSetCheck extends ParallelHashSetCheck[Int]("Int")
+class IntParallelHashSetCheck(val tasksupport: TaskSupport) extends ParallelHashSetCheck[Int]("Int")
with IntOperators
with IntValues
{
diff --git a/test/files/scalacheck/parallel-collections/ParallelIterableCheck.scala b/test/files/scalacheck/parallel-collections/ParallelIterableCheck.scala
index 26fa71d72c..774d6f428b 100644
--- a/test/files/scalacheck/parallel-collections/ParallelIterableCheck.scala
+++ b/test/files/scalacheck/parallel-collections/ParallelIterableCheck.scala
@@ -358,30 +358,35 @@ abstract class ParallelIterableCheck[T](collName: String) extends Properties(col
}
property("++s must be equal") = forAll(collectionTriplets) { case (t, coll, colltoadd) =>
- val toadd = colltoadd
- val tr = t ++ toadd.iterator
- val cr = coll ++ toadd.iterator
- if (!areEqual(tr, cr)) {
- println("from: " + t)
- println("and: " + coll.iterator.toList)
- println("adding: " + toadd)
- println(tr.toList)
- println(cr.iterator.toList)
- }
- ("adding " |: areEqual(tr, cr)) &&
- (for ((trav, ind) <- (addAllTraversables).zipWithIndex) yield {
- val tadded = t ++ trav
- val cadded = coll ++ collection.parallel.mutable.ParArray(trav.toSeq: _*)
- if (!areEqual(tadded, cadded)) {
- println("----------------------")
+ try {
+ val toadd = colltoadd
+ val tr = t ++ toadd.iterator
+ val cr = coll ++ toadd.iterator
+ if (!areEqual(tr, cr)) {
println("from: " + t)
- println("and: " + coll)
- println("adding: " + trav)
- println(tadded)
- println(cadded)
+ println("and: " + coll.iterator.toList)
+ println("adding: " + toadd)
+ println(tr.toList)
+ println(cr.iterator.toList)
}
- ("traversable " + ind) |: areEqual(tadded, cadded)
- }).reduceLeft(_ && _)
+ ("adding " |: areEqual(tr, cr)) &&
+ (for ((trav, ind) <- (addAllTraversables).zipWithIndex) yield {
+ val tadded = t ++ trav
+ val cadded = coll ++ collection.parallel.mutable.ParArray(trav.toSeq: _*)
+ if (!areEqual(tadded, cadded)) {
+ println("----------------------")
+ println("from: " + t)
+ println("and: " + coll)
+ println("adding: " + trav)
+ println(tadded)
+ println(cadded)
+ }
+ ("traversable " + ind) |: areEqual(tadded, cadded)
+ }).reduceLeft(_ && _)
+ } catch {
+ case e: java.lang.Exception =>
+ throw e
+ }
}
if (hasStrictOrder) property("copies to array must be equal") = forAll(collectionPairs) { case (t, coll) =>
diff --git a/test/files/scalacheck/parallel-collections/ParallelRangeCheck.scala b/test/files/scalacheck/parallel-collections/ParallelRangeCheck.scala
index 372d6b9fbd..f490d9490a 100644
--- a/test/files/scalacheck/parallel-collections/ParallelRangeCheck.scala
+++ b/test/files/scalacheck/parallel-collections/ParallelRangeCheck.scala
@@ -17,7 +17,7 @@ import scala.collection.parallel.ops._
-object ParallelRangeCheck extends ParallelSeqCheck[Int]("ParallelRange[Int]") with ops.IntSeqOperators {
+class ParallelRangeCheck(val tasksupport: TaskSupport) extends ParallelSeqCheck[Int]("ParallelRange[Int]") with ops.IntSeqOperators {
// ForkJoinTasks.defaultForkJoinPool.setMaximumPoolSize(Runtime.getRuntime.availableProcessors * 2)
// ForkJoinTasks.defaultForkJoinPool.setParallelism(Runtime.getRuntime.availableProcessors * 2)
@@ -38,9 +38,13 @@ object ParallelRangeCheck extends ParallelSeqCheck[Int]("ParallelRange[Int]") wi
}
def fromSeq(a: Seq[Int]) = a match {
- case r: Range => ParRange(r.start, r.end, r.step, false)
+ case r: Range =>
+ val pr = ParRange(r.start, r.end, r.step, false)
+ pr.tasksupport = tasksupport
+ pr
case _ =>
val pa = new parallel.mutable.ParArray[Int](a.length)
+ pa.tasksupport = tasksupport
for (i <- 0 until a.length) pa(i) = a(i)
pa
}
diff --git a/test/files/scalacheck/parallel-collections/ParallelVectorCheck.scala b/test/files/scalacheck/parallel-collections/ParallelVectorCheck.scala
index a2b6cef96d..bbebd51919 100644
--- a/test/files/scalacheck/parallel-collections/ParallelVectorCheck.scala
+++ b/test/files/scalacheck/parallel-collections/ParallelVectorCheck.scala
@@ -17,6 +17,8 @@ import scala.collection.parallel.ops._
import immutable.Vector
import immutable.VectorBuilder
+import scala.collection.parallel.TaskSupport
+
@@ -30,6 +32,8 @@ abstract class ParallelVectorCheck[T](tp: String) extends collection.parallel.Pa
def hasStrictOrder = true
+ def tasksupport: TaskSupport
+
def ofSize(vals: Seq[Gen[T]], sz: Int) = {
val vb = new immutable.VectorBuilder[T]()
val gen = vals(rnd.nextInt(vals.size))
@@ -38,16 +42,18 @@ abstract class ParallelVectorCheck[T](tp: String) extends collection.parallel.Pa
}
def fromSeq(a: Seq[T]) = {
- val pa = ParVector.newCombiner[T]
- for (elem <- a.toList) pa += elem
- pa.result
+ val pc = ParVector.newCombiner[T]
+ for (elem <- a.toList) pc += elem
+ val pv = pc.result
+ pv.tasksupport = tasksupport
+ pv
}
}
-object IntParallelVectorCheck extends ParallelVectorCheck[Int]("Int") with IntSeqOperators with IntValues {
+class IntParallelVectorCheck(val tasksupport: TaskSupport) extends ParallelVectorCheck[Int]("Int") with IntSeqOperators with IntValues {
override def instances(vals: Seq[Gen[Int]]) = oneOf(super.instances(vals), sized { sz =>
(0 until sz).toArray.toSeq
}, sized { sz =>
diff --git a/test/files/scalacheck/parallel-collections/pc.scala b/test/files/scalacheck/parallel-collections/pc.scala
index c588692fd2..a3c1df4054 100644
--- a/test/files/scalacheck/parallel-collections/pc.scala
+++ b/test/files/scalacheck/parallel-collections/pc.scala
@@ -6,35 +6,52 @@
import org.scalacheck._
import scala.collection.parallel._
-class ParCollProperties extends Properties("Parallel collections") {
- /* Collections */
+// package here to be able access the package-private implementation and shutdown the pool
+package scala {
- // parallel arrays
- include(mutable.IntParallelArrayCheck)
+ class ParCollProperties extends Properties("Parallel collections") {
+
+ def includeAllTestsWith(support: TaskSupport) {
+ // parallel arrays with default task support
+ include(new mutable.IntParallelArrayCheck(support))
+
+ // parallel ranges
+ include(new immutable.ParallelRangeCheck(support))
+
+ // parallel immutable hash maps (tries)
+ include(new immutable.IntIntParallelHashMapCheck(support))
+
+ // parallel immutable hash sets (tries)
+ include(new immutable.IntParallelHashSetCheck(support))
+
+ // parallel mutable hash maps (tables)
+ include(new mutable.IntIntParallelHashMapCheck(support))
+
+ // parallel ctrie
+ include(new mutable.IntIntParallelConcurrentTrieMapCheck(support))
+
+ // parallel mutable hash sets (tables)
+ include(new mutable.IntParallelHashSetCheck(support))
+
+ // parallel vectors
+ include(new immutable.IntParallelVectorCheck(support))
+ }
+
+ includeAllTestsWith(defaultTaskSupport)
+
+ val ec = scala.concurrent.ExecutionContext.fromExecutorService(java.util.concurrent.Executors.newFixedThreadPool(5))
+ val ectasks = new collection.parallel.ExecutionContextTaskSupport(ec)
+ includeAllTestsWith(ectasks)
- // parallel ranges
- include(immutable.ParallelRangeCheck)
-
- // parallel immutable hash maps (tries)
- include(immutable.IntIntParallelHashMapCheck)
-
- // parallel immutable hash sets (tries)
- include(immutable.IntParallelHashSetCheck)
-
- // parallel mutable hash maps (tables)
- include(mutable.IntIntParallelHashMapCheck)
-
- // parallel ctrie
- include(mutable.IntIntParallelConcurrentTrieMapCheck)
-
- // parallel mutable hash sets (tables)
- include(mutable.IntParallelHashSetCheck)
+ // no post test hooks in scalacheck, so cannot do:
+ // ec.shutdown()
+
+ }
- // parallel vectors
- include(immutable.IntParallelVectorCheck)
}
-object Test extends ParCollProperties {
+
+object Test extends scala.ParCollProperties {
/*
def main(args: Array[String]) {
val pc = new ParCollProperties