summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/library/scala/collection/parallel/ParIterableLike.scala10
-rw-r--r--src/library/scala/collection/parallel/immutable/ParHashMap.scala14
-rw-r--r--src/library/scala/collection/parallel/immutable/ParHashSet.scala13
-rw-r--r--src/library/scala/collection/parallel/immutable/ParRange.scala13
-rw-r--r--src/library/scala/collection/parallel/immutable/ParVector.scala12
-rw-r--r--src/library/scala/collection/parallel/mutable/ParArray.scala4
-rw-r--r--src/library/scala/collection/parallel/mutable/ParCtrie.scala12
-rw-r--r--src/library/scala/collection/parallel/mutable/ParHashMap.scala2
-rw-r--r--src/library/scala/collection/parallel/mutable/ParHashSet.scala2
9 files changed, 78 insertions, 4 deletions
diff --git a/src/library/scala/collection/parallel/ParIterableLike.scala b/src/library/scala/collection/parallel/ParIterableLike.scala
index 5e6bf8c1a3..7e0fa366ab 100644
--- a/src/library/scala/collection/parallel/ParIterableLike.scala
+++ b/src/library/scala/collection/parallel/ParIterableLike.scala
@@ -58,7 +58,7 @@ import annotation.unchecked.uncheckedStable
* }}}
*
* which returns an instance of `IterableSplitter[T]`, which is a subtype of `Splitter[T]`.
- * Parallel iterators have a method `remaining` to check the remaining number of elements,
+ * Splitters have a method `remaining` to check the remaining number of elements,
* and method `split` which is defined by splitters. Method `split` divides the splitters
* iterate over into disjunct subsets:
*
@@ -96,7 +96,7 @@ import annotation.unchecked.uncheckedStable
* The combination of methods `toMap`, `toSeq` or `toSet` along with `par` and `seq` is a flexible
* way to change between different collection types.
*
- * Since this trait extends the `Iterable` trait, methods like `size` must also
+ * Since this trait extends the `GenIterable` trait, methods like `size` must also
* be implemented in concrete collections, while `iterator` forwards to `splitter` by
* default.
*
@@ -116,7 +116,7 @@ import annotation.unchecked.uncheckedStable
* which do not know the number of elements remaining. To do this, the new collection implementation must override
* `isStrictSplitterCollection` to `false`. This will make some operations unavailable.
*
- * To create a new parallel collection, extend the `ParIterable` trait, and implement `size`, `parallelIterator`,
+ * To create a new parallel collection, extend the `ParIterable` trait, and implement `size`, `splitter`,
* `newCombiner` and `seq`. Having an implicit combiner factory requires extending this trait in addition, as
* well as providing a companion object, as with regular collections.
*
@@ -159,6 +159,10 @@ self: ParIterableLike[T, Repr, Sequential] =>
@volatile
private var _tasksupport = defaultTaskSupport
+ protected def initTaskSupport() {
+ _tasksupport = defaultTaskSupport
+ }
+
def tasksupport = {
val ts = _tasksupport
if (ts eq null) {
diff --git a/src/library/scala/collection/parallel/immutable/ParHashMap.scala b/src/library/scala/collection/parallel/immutable/ParHashMap.scala
index 266b179401..52d6531f9e 100644
--- a/src/library/scala/collection/parallel/immutable/ParHashMap.scala
+++ b/src/library/scala/collection/parallel/immutable/ParHashMap.scala
@@ -116,7 +116,21 @@ self =>
def remaining = sz - i
override def toString = "HashTrieIterator(" + sz + ")"
}
+
+ /* serialization */
+
+ private def writeObject(out: java.io.ObjectOutputStream) {
+ out.defaultWriteObject
+ }
+ private def readObject(in: java.io.ObjectInputStream) {
+ in.defaultReadObject
+
+ initTaskSupport()
+ }
+
+ /* debug */
+
private[parallel] def printDebugInfo() {
println("Parallel hash trie")
println("Top level inner trie type: " + trie.getClass)
diff --git a/src/library/scala/collection/parallel/immutable/ParHashSet.scala b/src/library/scala/collection/parallel/immutable/ParHashSet.scala
index 0d7f04976e..0a19afc426 100644
--- a/src/library/scala/collection/parallel/immutable/ParHashSet.scala
+++ b/src/library/scala/collection/parallel/immutable/ParHashSet.scala
@@ -111,6 +111,19 @@ self =>
}
def remaining = sz - i
}
+
+ /* serialization */
+
+ private def writeObject(out: java.io.ObjectOutputStream) {
+ out.defaultWriteObject
+ }
+
+ private def readObject(in: java.io.ObjectInputStream) {
+ in.defaultReadObject
+
+ initTaskSupport()
+ }
+
}
diff --git a/src/library/scala/collection/parallel/immutable/ParRange.scala b/src/library/scala/collection/parallel/immutable/ParRange.scala
index 64e07ce4ff..364175fe41 100644
--- a/src/library/scala/collection/parallel/immutable/ParRange.scala
+++ b/src/library/scala/collection/parallel/immutable/ParRange.scala
@@ -105,6 +105,19 @@ self =>
cb
}
}
+
+ /* serialization */
+
+ private def writeObject(out: java.io.ObjectOutputStream) {
+ out.defaultWriteObject
+ }
+
+ private def readObject(in: java.io.ObjectInputStream) {
+ in.defaultReadObject
+
+ initTaskSupport()
+ }
+
}
object ParRange {
diff --git a/src/library/scala/collection/parallel/immutable/ParVector.scala b/src/library/scala/collection/parallel/immutable/ParVector.scala
index 5d9c431bc1..310b09a016 100644
--- a/src/library/scala/collection/parallel/immutable/ParVector.scala
+++ b/src/library/scala/collection/parallel/immutable/ParVector.scala
@@ -78,6 +78,18 @@ extends ParSeq[T]
splitted.map(v => new ParVector(v).splitter.asInstanceOf[ParVectorIterator])
}
}
+
+ /* serialization */
+
+ private def writeObject(out: java.io.ObjectOutputStream) {
+ out.defaultWriteObject
+ }
+
+ private def readObject(in: java.io.ObjectInputStream) {
+ in.defaultReadObject
+
+ initTaskSupport()
+ }
}
diff --git a/src/library/scala/collection/parallel/mutable/ParArray.scala b/src/library/scala/collection/parallel/mutable/ParArray.scala
index 5c3da66be0..683b7eaa9a 100644
--- a/src/library/scala/collection/parallel/mutable/ParArray.scala
+++ b/src/library/scala/collection/parallel/mutable/ParArray.scala
@@ -672,9 +672,11 @@ self =>
private def readObject(in: java.io.ObjectInputStream) {
in.defaultReadObject
-
+
// get raw array from arrayseq
array = arrayseq.array.asInstanceOf[Array[Any]]
+
+ initTaskSupport()
}
}
diff --git a/src/library/scala/collection/parallel/mutable/ParCtrie.scala b/src/library/scala/collection/parallel/mutable/ParCtrie.scala
index 470972adad..b4dc8beb2a 100644
--- a/src/library/scala/collection/parallel/mutable/ParCtrie.scala
+++ b/src/library/scala/collection/parallel/mutable/ParCtrie.scala
@@ -115,6 +115,18 @@ extends ParMap[K, V]
override def merge(that: Size) = result = result + that.result
}
+ /* serialization */
+
+ private def writeObject(out: java.io.ObjectOutputStream) {
+ out.defaultWriteObject
+ }
+
+ private def readObject(in: java.io.ObjectInputStream) {
+ in.defaultReadObject
+
+ initTaskSupport()
+ }
+
}
diff --git a/src/library/scala/collection/parallel/mutable/ParHashMap.scala b/src/library/scala/collection/parallel/mutable/ParHashMap.scala
index 6ce6c45460..72526aadb1 100644
--- a/src/library/scala/collection/parallel/mutable/ParHashMap.scala
+++ b/src/library/scala/collection/parallel/mutable/ParHashMap.scala
@@ -106,6 +106,8 @@ self =>
private def readObject(in: java.io.ObjectInputStream) {
init[V](in, new Entry(_, _))
+
+ initTaskSupport()
}
private[parallel] override def brokenInvariants = {
diff --git a/src/library/scala/collection/parallel/mutable/ParHashSet.scala b/src/library/scala/collection/parallel/mutable/ParHashSet.scala
index e0a2ab03df..84b7c4e42c 100644
--- a/src/library/scala/collection/parallel/mutable/ParHashSet.scala
+++ b/src/library/scala/collection/parallel/mutable/ParHashSet.scala
@@ -84,6 +84,8 @@ extends ParSet[T]
private def readObject(in: java.io.ObjectInputStream) {
init(in, x => x)
+
+ initTaskSupport()
}
import collection.DebugUtils._