summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAleksandar Pokopec <aleksandar.prokopec@epfl.ch>2010-12-09 10:08:24 +0000
committerAleksandar Pokopec <aleksandar.prokopec@epfl.ch>2010-12-09 10:08:24 +0000
commit11dfc5a64dd8bbcb7fca7d608a23b513316de6cc (patch)
treec951c2e3730ad2a88de39e9d5ca40303e22d2c91
parentf2ecbd04691b1914e2f77c60afc2b296aa6826ae (diff)
downloadscala-11dfc5a64dd8bbcb7fca7d608a23b513316de6cc.tar.gz
scala-11dfc5a64dd8bbcb7fca7d608a23b513316de6cc.tar.bz2
scala-11dfc5a64dd8bbcb7fca7d608a23b513316de6cc.zip
Made parallel collections serializable.
No review.
-rw-r--r--src/library/scala/collection/generic/GenericParTemplate.scala7
-rw-r--r--src/library/scala/collection/parallel/Combiner.scala5
-rw-r--r--src/library/scala/collection/parallel/ParIterableLike.scala5
-rw-r--r--src/library/scala/collection/parallel/ParIterableViewLike.scala1
-rw-r--r--src/library/scala/collection/parallel/ParSeqLike.scala2
-rw-r--r--src/library/scala/collection/parallel/Tasks.scala9
-rw-r--r--src/library/scala/collection/parallel/UnrolledBuffer.scala29
-rw-r--r--src/library/scala/collection/parallel/immutable/ParHashMap.scala2
-rw-r--r--src/library/scala/collection/parallel/immutable/ParHashSet.scala2
-rw-r--r--src/library/scala/collection/parallel/immutable/ParRange.scala6
-rw-r--r--src/library/scala/collection/parallel/mutable/ParArray.scala21
-rw-r--r--src/library/scala/collection/parallel/mutable/ParHashMap.scala10
-rw-r--r--src/library/scala/collection/parallel/mutable/ParHashSet.scala12
-rw-r--r--src/library/scala/collection/parallel/package.scala2
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/hashtables/ParallelHashTableSets.scala2
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/hashtables/ParallelHashTables.scala2
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/ParallelHashTries.scala2
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/MatrixMultiplication.scala2
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/Resettable.scala2
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_range/RangeBenches.scala2
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_view/SeqViewBenches.scala2
-rw-r--r--test/files/jvm/serialization.check35
-rw-r--r--test/files/jvm/serialization.scala57
-rw-r--r--test/files/scalacheck/parallel-collections/ParallelIterableCheck.scala2
-rw-r--r--test/files/scalacheck/parallel-collections/pc.scala2
25 files changed, 189 insertions, 34 deletions
diff --git a/src/library/scala/collection/generic/GenericParTemplate.scala b/src/library/scala/collection/generic/GenericParTemplate.scala
index 1c3c43269d..0d87a2d548 100644
--- a/src/library/scala/collection/generic/GenericParTemplate.scala
+++ b/src/library/scala/collection/generic/GenericParTemplate.scala
@@ -26,15 +26,12 @@ trait GenericParTemplate[+A, +CC[X] <: ParIterable[X]]
extends GenericTraversableTemplate[A, CC]
with HasNewCombiner[A, CC[A] @uncheckedVariance]
{
- private[collection] def tasksupport: TaskSupport
-
def companion: GenericCompanion[CC] with GenericParCompanion[CC]
protected[this] override def newBuilder: collection.mutable.Builder[A, CC[A]] = newCombiner
protected[this] override def newCombiner: Combiner[A, CC[A]] = {
val cb = companion.newCombiner[A]
- cb.tasksupport.environment = tasksupport.environment
cb
}
@@ -42,7 +39,6 @@ extends GenericTraversableTemplate[A, CC]
def genericCombiner[B]: Combiner[B, CC[B]] = {
val cb = companion.newCombiner[B]
- cb.tasksupport.environment = tasksupport.environment
cb
}
@@ -51,13 +47,10 @@ extends GenericTraversableTemplate[A, CC]
trait GenericParMapTemplate[K, +V, +CC[X, Y] <: ParMap[X, Y]]
{
- private[collection] def tasksupport: TaskSupport
-
def mapCompanion: GenericParMapCompanion[CC]
def genericMapCombiner[P, Q]: Combiner[(P, Q), CC[P, Q]] = {
val cb = mapCompanion.newCombiner[P, Q]
- cb.tasksupport.environment = tasksupport.environment
cb
}
}
diff --git a/src/library/scala/collection/parallel/Combiner.scala b/src/library/scala/collection/parallel/Combiner.scala
index 93522185fb..7b133cdbba 100644
--- a/src/library/scala/collection/parallel/Combiner.scala
+++ b/src/library/scala/collection/parallel/Combiner.scala
@@ -57,10 +57,7 @@ self: EnvironmentPassingCombiner[Elem, To] =>
trait EnvironmentPassingCombiner[-Elem, +To] extends Combiner[Elem, To] {
abstract override def result = {
val res = super.result
- res match {
- case pc: ParIterableLike[_, _, _] => pc.tasksupport.environment = tasksupport.environment
- case _ =>
- }
+ //
res
}
}
diff --git a/src/library/scala/collection/parallel/ParIterableLike.scala b/src/library/scala/collection/parallel/ParIterableLike.scala
index d3e6eb42d4..caa4af10c9 100644
--- a/src/library/scala/collection/parallel/ParIterableLike.scala
+++ b/src/library/scala/collection/parallel/ParIterableLike.scala
@@ -15,6 +15,8 @@ import scala.collection.generic._
import java.util.concurrent.atomic.AtomicBoolean
+import annotation.unchecked.uncheckedStable
+
// TODO update docs!!
/** A template trait for parallel collections of type `ParIterable[T]`.
@@ -126,7 +128,6 @@ extends IterableLike[T, Repr]
{
self =>
- private[collection] final val tasksupport: TaskSupport = getTaskSupport
import tasksupport._
/** Parallel iterators are split iterators that have additional accessor and
@@ -476,7 +477,7 @@ self =>
val copythis = new Copy(() => pbf(repr), parallelIterator)
val copythat = wrap {
val othtask = new other.Copy(() => pbf(self.repr), other.parallelIterator)
- other.tasksupport.executeAndWaitResult(othtask)
+ tasksupport.executeAndWaitResult(othtask)
}
val task = (copythis parallel copythat) { _ combine _ } mapResult {
_.result
diff --git a/src/library/scala/collection/parallel/ParIterableViewLike.scala b/src/library/scala/collection/parallel/ParIterableViewLike.scala
index 6fb924e57e..6e3d2ff9fe 100644
--- a/src/library/scala/collection/parallel/ParIterableViewLike.scala
+++ b/src/library/scala/collection/parallel/ParIterableViewLike.scala
@@ -49,7 +49,6 @@ self =>
trait Transformed[+S] extends ParIterableView[S, Coll, CollSeq] with super.Transformed[S] {
override def parallelIterator: ParIterableIterator[S]
override def iterator = parallelIterator
- tasksupport.environment = self.tasksupport.environment
}
trait Sliced extends super.Sliced with Transformed[T] {
diff --git a/src/library/scala/collection/parallel/ParSeqLike.scala b/src/library/scala/collection/parallel/ParSeqLike.scala
index 91e15fa946..58e8bcd031 100644
--- a/src/library/scala/collection/parallel/ParSeqLike.scala
+++ b/src/library/scala/collection/parallel/ParSeqLike.scala
@@ -244,7 +244,7 @@ self =>
val copystart = new Copy[U, That](() => pbf(repr), pits(0))
val copymiddle = wrap {
val tsk = new that.Copy[U, That](() => pbf(repr), that.parallelIterator)
- that.tasksupport.executeAndWaitResult(tsk)
+ tasksupport.executeAndWaitResult(tsk)
}
val copyend = new Copy[U, That](() => pbf(repr), pits(2))
executeAndWaitResult(((copystart parallel copymiddle) { _ combine _ } parallel copyend) { _ combine _ } mapResult {
diff --git a/src/library/scala/collection/parallel/Tasks.scala b/src/library/scala/collection/parallel/Tasks.scala
index b111ecb87c..964e01e8d1 100644
--- a/src/library/scala/collection/parallel/Tasks.scala
+++ b/src/library/scala/collection/parallel/Tasks.scala
@@ -264,7 +264,7 @@ trait ThreadPoolTasks extends Tasks {
var environment: AnyRef = ThreadPoolTasks.defaultThreadPool
def executor = environment.asInstanceOf[ThreadPoolExecutor]
def queue = executor.getQueue.asInstanceOf[LinkedBlockingQueue[Runnable]]
- var totaltasks = 0
+ @volatile var totaltasks = 0
private def incrTasks = synchronized {
totaltasks += 1
@@ -312,6 +312,13 @@ object ThreadPoolTasks {
Int.MaxValue,
60L, TimeUnit.MILLISECONDS,
new LinkedBlockingQueue[Runnable],
+ new ThreadFactory {
+ def newThread(r: Runnable) = {
+ val t = new Thread(r)
+ t.setDaemon(true)
+ t
+ }
+ },
new ThreadPoolExecutor.CallerRunsPolicy
)
}
diff --git a/src/library/scala/collection/parallel/UnrolledBuffer.scala b/src/library/scala/collection/parallel/UnrolledBuffer.scala
index c7a8b388bd..7f81cf779d 100644
--- a/src/library/scala/collection/parallel/UnrolledBuffer.scala
+++ b/src/library/scala/collection/parallel/UnrolledBuffer.scala
@@ -38,17 +38,19 @@ import annotation.tailrec
* @coll unrolled buffer
* @Coll UnrolledBuffer
*/
+@SerialVersionUID(1L)
class UnrolledBuffer[T](implicit val manifest: ClassManifest[T])
extends collection.mutable.Buffer[T]
with collection.mutable.BufferLike[T, UnrolledBuffer[T]]
with GenericClassManifestTraversableTemplate[T, UnrolledBuffer]
with collection.mutable.Builder[T, UnrolledBuffer[T]]
+ with Serializable
{
import UnrolledBuffer.Unrolled
- private var headptr = newUnrolled
- private var lastptr = headptr
- private var sz = 0
+ @transient private var headptr = newUnrolled
+ @transient private var lastptr = headptr
+ @transient private var sz = 0
private[parallel] def headPtr = headptr
private[parallel] def headPtr_=(head: Unrolled[T]) = headptr = head
@@ -146,6 +148,27 @@ extends collection.mutable.Buffer[T]
sz += elems.size
} else outofbounds(idx)
+ private def writeObject(out: java.io.ObjectOutputStream) {
+ out.defaultWriteObject
+ out.writeInt(sz)
+ for (elem <- this) out.writeObject(elem)
+ }
+
+ private def readObject(in: java.io.ObjectInputStream) {
+ in.defaultReadObject
+
+ val num = in.readInt
+
+ headPtr = newUnrolled
+ lastPtr = headPtr
+ sz = 0
+ var i = 0
+ while (i < num) {
+ this += in.readObject.asInstanceOf[T]
+ i += 1
+ }
+ }
+
override def stringPrefix = "UnrolledBuffer"
}
diff --git a/src/library/scala/collection/parallel/immutable/ParHashMap.scala b/src/library/scala/collection/parallel/immutable/ParHashMap.scala
index a411a1cc44..4d9475038d 100644
--- a/src/library/scala/collection/parallel/immutable/ParHashMap.scala
+++ b/src/library/scala/collection/parallel/immutable/ParHashMap.scala
@@ -28,10 +28,12 @@ import annotation.unchecked.uncheckedVariance
*
* @author prokopec
*/
+@SerialVersionUID(1L)
class ParHashMap[K, +V] private[immutable] (private[this] val trie: HashMap[K, V])
extends ParMap[K, V]
with GenericParMapTemplate[K, V, ParHashMap]
with ParMapLike[K, V, ParHashMap[K, V], HashMap[K, V]]
+ with Serializable
{
self =>
diff --git a/src/library/scala/collection/parallel/immutable/ParHashSet.scala b/src/library/scala/collection/parallel/immutable/ParHashSet.scala
index 0b1f9c5b7e..d17b258be6 100644
--- a/src/library/scala/collection/parallel/immutable/ParHashSet.scala
+++ b/src/library/scala/collection/parallel/immutable/ParHashSet.scala
@@ -29,10 +29,12 @@ import scala.collection.immutable.HashSet
*
* @author prokopec
*/
+@SerialVersionUID(1L)
class ParHashSet[T] private[immutable] (private[this] val trie: HashSet[T])
extends ParSet[T]
with GenericParTemplate[T, ParHashSet]
with ParSetLike[T, ParHashSet[T], HashSet[T]]
+ with Serializable
{
self =>
diff --git a/src/library/scala/collection/parallel/immutable/ParRange.scala b/src/library/scala/collection/parallel/immutable/ParRange.scala
index ab5e509515..ec5aeefc87 100644
--- a/src/library/scala/collection/parallel/immutable/ParRange.scala
+++ b/src/library/scala/collection/parallel/immutable/ParRange.scala
@@ -11,8 +11,10 @@ import scala.collection.parallel.ParIterableIterator
-class ParRange(range: Range)
-extends ParSeq[Int] {
+@SerialVersionUID(1L)
+class ParRange(val range: Range)
+extends ParSeq[Int]
+ with Serializable {
self =>
def seq = range
diff --git a/src/library/scala/collection/parallel/mutable/ParArray.scala b/src/library/scala/collection/parallel/mutable/ParArray.scala
index 8f70547a03..536976f5e3 100644
--- a/src/library/scala/collection/parallel/mutable/ParArray.scala
+++ b/src/library/scala/collection/parallel/mutable/ParArray.scala
@@ -37,15 +37,17 @@ import scala.collection.Sequentializable
* @define Coll ParArray
* @define coll parallel array
*/
+@SerialVersionUID(1L)
class ParArray[T] private[mutable] (val arrayseq: ArraySeq[T])
extends ParSeq[T]
with GenericParTemplate[T, ParArray]
with ParSeqLike[T, ParArray[T], ArraySeq[T]]
+ with Serializable
{
self =>
- import tasksupport._
+ import collection.parallel.tasksupport._
- private val array: Array[Any] = arrayseq.array.asInstanceOf[Array[Any]]
+ @transient private var array: Array[Any] = arrayseq.array.asInstanceOf[Array[Any]]
override def companion: GenericCompanion[ParArray] with GenericParCompanion[ParArray] = ParArray
@@ -582,7 +584,7 @@ self =>
} else super.map(f)(bf)
override def scan[U >: T, That](z: U)(op: (U, U) => U)(implicit cbf: CanCombineFrom[ParArray[T], U, That]): That =
- if (tasksupport.parallelismLevel > 1 && buildsArray(cbf(repr))) {
+ if (parallelismLevel > 1 && buildsArray(cbf(repr))) {
// reserve an array
val targarrseq = new ArraySeq[U](length + 1)
val targetarr = targarrseq.array.asInstanceOf[Array[Any]]
@@ -655,6 +657,19 @@ self =>
def shouldSplitFurther = howmany > collection.parallel.thresholdFromSize(length, parallelismLevel)
}
+ /* serialization */
+
+ private def writeObject(out: java.io.ObjectOutputStream) {
+ out.defaultWriteObject
+ }
+
+ private def readObject(in: java.io.ObjectInputStream) {
+ in.defaultReadObject
+
+ // get raw array from arrayseq
+ array = arrayseq.array.asInstanceOf[Array[Any]]
+ }
+
}
diff --git a/src/library/scala/collection/parallel/mutable/ParHashMap.scala b/src/library/scala/collection/parallel/mutable/ParHashMap.scala
index 537c442e23..d231068b6c 100644
--- a/src/library/scala/collection/parallel/mutable/ParHashMap.scala
+++ b/src/library/scala/collection/parallel/mutable/ParHashMap.scala
@@ -11,11 +11,13 @@ import collection.mutable.HashTable
+@SerialVersionUID(1L)
class ParHashMap[K, V] private[collection] (contents: HashTable.Contents[K, DefaultEntry[K, V]])
extends ParMap[K, V]
with GenericParMapTemplate[K, V, ParHashMap]
with ParMapLike[K, V, ParHashMap[K, V], collection.mutable.HashMap[K, V]]
with ParHashTable[K, DefaultEntry[K, V]]
+ with Serializable
{
self =>
initWithContents(contents)
@@ -77,6 +79,14 @@ self =>
new ParHashMapIterator(idxFrom, idxUntil, totalSz, es) with SCPI
}
+ private def writeObject(out: java.io.ObjectOutputStream) {
+ serializeTo(out, _.value)
+ }
+
+ private def readObject(in: java.io.ObjectInputStream) {
+ init[V](in, new Entry(_, _))
+ }
+
private[parallel] override def brokenInvariants = {
// bucket by bucket, count elements
val buckets = for (i <- 0 until (table.length / sizeMapBucketSize)) yield checkBucket(i)
diff --git a/src/library/scala/collection/parallel/mutable/ParHashSet.scala b/src/library/scala/collection/parallel/mutable/ParHashSet.scala
index 66303862d3..cff8eeb9c9 100644
--- a/src/library/scala/collection/parallel/mutable/ParHashSet.scala
+++ b/src/library/scala/collection/parallel/mutable/ParHashSet.scala
@@ -13,11 +13,13 @@ import collection.parallel.UnrolledBuffer
+@SerialVersionUID(1L)
class ParHashSet[T] private[collection] (contents: FlatHashTable.Contents[T])
extends ParSet[T]
with GenericParTemplate[T, ParHashSet]
with ParSetLike[T, ParHashSet[T], collection.mutable.HashSet[T]]
with ParFlatHashTable[T]
+ with Serializable
{
initWithContents(contents)
// println("----> new par hash set!")
@@ -46,6 +48,8 @@ extends ParSet[T]
this
}
+ override def stringPrefix = "ParHashSet"
+
def contains(elem: T) = containsEntry(elem)
def parallelIterator = new ParHashSetIterator(0, table.length, size) with SCPI
@@ -58,6 +62,14 @@ extends ParSet[T]
def newIterator(start: Int, until: Int, total: Int) = new ParHashSetIterator(start, until, total) with SCPI
}
+ private def writeObject(s: java.io.ObjectOutputStream) {
+ serializeTo(s)
+ }
+
+ private def readObject(in: java.io.ObjectInputStream) {
+ init(in, x => x)
+ }
+
import collection.DebugUtils._
override def debugInformation = buildString {
append =>
diff --git a/src/library/scala/collection/parallel/package.scala b/src/library/scala/collection/parallel/package.scala
index 6b3f3bf448..5faf73c1db 100644
--- a/src/library/scala/collection/parallel/package.scala
+++ b/src/library/scala/collection/parallel/package.scala
@@ -40,6 +40,8 @@ package object parallel {
if (util.Properties.isJavaAtLeast("1.6")) new ForkJoinTaskSupport
else new ThreadPoolTaskSupport
+ private[parallel] val tasksupport = getTaskSupport
+
/* implicit conversions */
/** An implicit conversion providing arrays with a `par` method, which
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtables/ParallelHashTableSets.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtables/ParallelHashTableSets.scala
index 6ac8e7a3ad..96d75f480f 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtables/ParallelHashTableSets.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtables/ParallelHashTableSets.scala
@@ -137,7 +137,7 @@ object RefParHashTableSetBenches extends ParHashTableSetBenches[Dummy] {
val phm = new ParHashSet[Dummy]
for (i <- 0 until sz) phm += new Dummy(i)
forkJoinPool.setParallelism(p)
- phm.tasksupport.environment = forkJoinPool
+ collection.parallel.tasksupport.environment = forkJoinPool
phm
}
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtables/ParallelHashTables.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtables/ParallelHashTables.scala
index 83e3177324..53e6defd91 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtables/ParallelHashTables.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtables/ParallelHashTables.scala
@@ -206,7 +206,7 @@ object RefParHashTableBenches extends ParHashTableBenches[Dummy, Dummy] {
val phm = new ParHashMap[Dummy, Dummy]
for (i <- 0 until sz) phm += ((new Dummy(i), new Dummy(i)))
forkJoinPool.setParallelism(p)
- phm.tasksupport.environment = forkJoinPool
+ collection.parallel.tasksupport.environment = forkJoinPool
phm
}
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/ParallelHashTries.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/ParallelHashTries.scala
index 87a34e1e0e..81d4f095da 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/ParallelHashTries.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/ParallelHashTries.scala
@@ -164,7 +164,7 @@ object RefParHashTrieBenches extends ParHashTrieBenches[Dummy, Dummy] {
var pht = new ParHashMap[Dummy, Dummy]
for (i <- 0 until sz) pht += ((new Dummy(i), new Dummy(i)))
forkJoinPool.setParallelism(p)
- pht.tasksupport.environment = forkJoinPool
+ collection.parallel.tasksupport.environment = forkJoinPool
pht
}
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/MatrixMultiplication.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/MatrixMultiplication.scala
index 3c1cc47088..29d6ff7580 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/MatrixMultiplication.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/MatrixMultiplication.scala
@@ -15,6 +15,7 @@ object MatrixMultiplication extends Companion {
class MatrixMultiplication(sz: Int, p: Int, what: String)
extends Resettable(sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont]) {
def companion = MatrixMultiplication
+ collection.parallel.tasksupport.environment = forkjoinpool
val a = Matrix.unit[Int](sz)
val b = Matrix.unit[Int](sz)
@@ -39,7 +40,6 @@ extends Resettable(sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont]) {
def assignProduct(a: Matrix[T], b: Matrix[T]) = {
val range = ParRange(0, n * n, 1, false)
- range.tasksupport.environment = forkjoinpool
for (i <- range) this(i / n, i % n) = calcProduct(a, b, i / n, i % n);
}
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/Resettable.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/Resettable.scala
index c75432360b..957e258ca8 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/Resettable.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/Resettable.scala
@@ -77,7 +77,7 @@ extends Bench with SequentialOps[T] {
for (i <- 0 until size) arr(i) = elemcreator(i)
case "par" =>
pa = new ParArray[T](size)
- pa.tasksupport.environment = forkjoinpool
+ collection.parallel.tasksupport.environment = forkjoinpool
for (i <- 0 until size) pa(i) = elemcreator(i)
case "jsr" =>
jsrarr = JSR166Array.create(size, cls, papool)
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_range/RangeBenches.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_range/RangeBenches.scala
index 4f32d366a4..1bd35180c8 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_range/RangeBenches.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_range/RangeBenches.scala
@@ -22,7 +22,7 @@ object RangeBenches extends StandardParIterableBenches[Int, ParRange] {
def createParallel(sz: Int, p: Int) = {
val pr = collection.parallel.immutable.ParRange(0, sz, 1, false)
forkJoinPool.setParallelism(p)
- pr.tasksupport.environment = forkJoinPool
+ collection.parallel.tasksupport.environment = forkJoinPool
pr
}
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_view/SeqViewBenches.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_view/SeqViewBenches.scala
index abd9b7838f..1c1cd52120 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_view/SeqViewBenches.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_view/SeqViewBenches.scala
@@ -31,7 +31,7 @@ extends ParSeqViewBenches[Dummy, ParSeqView[Dummy, ParSeq[Dummy], Seq[Dummy]], S
forkJoinPool.setParallelism(p)
for (i <- 0 until sz) pa(i) = new Dummy(i)
val v = pa.view
- v.tasksupport.environment = forkJoinPool
+ collection.parallel.tasksupport.environment = forkJoinPool
v
}
def createSeqView(sz: Int, p: Int) = createSequential(sz, p).view
diff --git a/test/files/jvm/serialization.check b/test/files/jvm/serialization.check
index 7b2b8cc18f..0717de2a8e 100644
--- a/test/files/jvm/serialization.check
+++ b/test/files/jvm/serialization.check
@@ -270,4 +270,37 @@ x equals y: true, y equals x: true
1
2
1
-2 \ No newline at end of file
+2
+
+x = UnrolledBuffer(one, two)
+y = UnrolledBuffer(one, two)
+x equals y: true, y equals x: true
+
+x = ParArray(abc, def, etc)
+y = ParArray(abc, def, etc)
+x equals y: true, y equals x: true
+
+x = ParHashMap(1 -> 2, 2 -> 4)
+y = ParHashMap(1 -> 2, 2 -> 4)
+x equals y: true, y equals x: true
+
+x = ParHashSet(2, 1, 3)
+y = ParHashSet(2, 1, 3)
+x equals y: true, y equals x: true
+
+x = ParRange(0, 1, 2, 3, 4)
+y = ParRange(0, 1, 2, 3, 4)
+x equals y: true, y equals x: true
+
+x = ParRange(0, 1, 2, 3)
+y = ParRange(0, 1, 2, 3)
+x equals y: true, y equals x: true
+
+x = ParMap(5 -> 1, 10 -> 2)
+y = ParMap(5 -> 1, 10 -> 2)
+x equals y: true, y equals x: true
+
+x = ParSet(two, one)
+y = ParSet(two, one)
+x equals y: true, y equals x: true
+
diff --git a/test/files/jvm/serialization.scala b/test/files/jvm/serialization.scala
index 2e34079505..8c4df8fc37 100644
--- a/test/files/jvm/serialization.scala
+++ b/test/files/jvm/serialization.scala
@@ -397,6 +397,7 @@ object Test3_mutable {
}
}
+
//############################################################################
// Test classes in package "scala.xml"
@@ -587,8 +588,64 @@ object Test {
Test6
Test7
Test8
+ Test9_parallel
}
}
//############################################################################
+
+//############################################################################
+// Test classes in package "scala.collection.parallel" and subpackages
+object Test9_parallel {
+ import scala.collection.parallel._
+
+ try {
+ println()
+
+ // UnrolledBuffer
+ val ub = new UnrolledBuffer[String]
+ ub ++= List("one", "two")
+ val _ub: UnrolledBuffer[String] = read(write(ub))
+ check(ub, _ub)
+
+ // mutable.ParArray
+ val pa = mutable.ParArray("abc", "def", "etc")
+ val _pa: mutable.ParArray[String] = read(write(pa))
+ check(pa, _pa)
+
+ // mutable.ParHashMap
+ val mpm = mutable.ParHashMap(1 -> 2, 2 -> 4)
+ val _mpm: mutable.ParHashMap[Int, Int] = read(write(mpm))
+ check(mpm, _mpm)
+
+ // mutable.ParHashSet
+ val mps = mutable.ParHashSet(1, 2, 3)
+ val _mps: mutable.ParHashSet[Int] = read(write(mps))
+ check(mps, _mps)
+
+ // immutable.ParRange
+ val pr1 = immutable.ParRange(0, 4, 1, true)
+ val _pr1: immutable.ParRange = read(write(pr1))
+ check(pr1, _pr1)
+
+ val pr2 = immutable.ParRange(0, 4, 1, false)
+ val _pr2: immutable.ParRange = read(write(pr2))
+ check(pr2, _pr2)
+
+ // immutable.ParHashMap
+ val ipm = immutable.ParHashMap(5 -> 1, 10 -> 2)
+ val _ipm: immutable.ParHashMap[Int, Int] = read(write(ipm))
+ check(ipm, _ipm)
+
+ // immutable.ParHashSet
+ val ips = immutable.ParHashSet("one", "two")
+ val _ips: immutable.ParHashSet[String] = read(write(ips))
+ check(ips, _ips)
+
+ } catch {
+ case e: Exception =>
+ println("Error in Test5_parallel: " + e)
+ throw e
+ }
+}
diff --git a/test/files/scalacheck/parallel-collections/ParallelIterableCheck.scala b/test/files/scalacheck/parallel-collections/ParallelIterableCheck.scala
index 0dcd877ecb..60e8c8b1f2 100644
--- a/test/files/scalacheck/parallel-collections/ParallelIterableCheck.scala
+++ b/test/files/scalacheck/parallel-collections/ParallelIterableCheck.scala
@@ -86,7 +86,7 @@ abstract class ParallelIterableCheck[T](collName: String) extends Properties(col
println("Collection debug info: ")
coll.printDebugBuffer
println("Task debug info: ")
- println(coll.tasksupport.debugMessages.mkString("\n"))
+ println(tasksupport.debugMessages.mkString("\n"))
}
def printComparison(t: Traversable[_], coll: ParIterable[_], tf: Traversable[_], cf: ParIterable[_], ind: Int) {
diff --git a/test/files/scalacheck/parallel-collections/pc.scala b/test/files/scalacheck/parallel-collections/pc.scala
index 590da6dba4..8d88d36610 100644
--- a/test/files/scalacheck/parallel-collections/pc.scala
+++ b/test/files/scalacheck/parallel-collections/pc.scala
@@ -52,7 +52,7 @@ object Test {
workers = 1,
minSize = 0,
maxSize = 4000,
- minSuccessfulTests = 120
+ minSuccessfulTests = 20
),
pc
)