summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAleksandar Pokopec <aleksandar.prokopec@epfl.ch>2011-04-13 16:31:42 +0000
committerAleksandar Pokopec <aleksandar.prokopec@epfl.ch>2011-04-13 16:31:42 +0000
commit3de96153e5bfbde16dcc89bfbd71ff6e8cf1f6c6 (patch)
tree2794a7bd176b315a9f4bdc3f5ef5553254b7dd47
parent9b5cb18dbd2d3e87def5da47ae76adb2e776487e (diff)
downloadscala-3de96153e5bfbde16dcc89bfbd71ff6e8cf1f6c6.tar.gz
scala-3de96153e5bfbde16dcc89bfbd71ff6e8cf1f6c6.tar.bz2
scala-3de96153e5bfbde16dcc89bfbd71ff6e8cf1f6c6.zip
Refactoring the collections api to support diff...
Refactoring the collections api to support differentiation between referring to a sequential collection and a parallel collection, and to support referring to both types of collections. New set of traits Gen* are now superclasses of both their * and Par* subclasses. For example, GenIterable is a superclass of both Iterable and ParIterable. Iterable and ParIterable are not in a subclassing relation. The new class hierarchy is illustrated below (simplified, not all relations and classes are shown): TraversableOnce --> GenTraversableOnce ^ ^ | | Traversable --> GenTraversable ^ ^ | | Iterable --> GenIterable <-- ParIterable ^ ^ ^ | | | Seq --> GenSeq <-- ParSeq (the *Like, *View and *ViewLike traits have a similar hierarchy) General views extract common view functionality from parallel and sequential collections. This design also allows for more flexible extensions to the collections framework. It also allows slowly factoring out common functionality up into Gen* traits. From now on, it is possible to write this: import collection._ val p = parallel.ParSeq(1, 2, 3) val g: GenSeq[Int] = p // meaning a General Sequence val s = g.seq // type of s is Seq[Int] for (elem <- g) { // do something without guarantees on sequentiality of foreach // this foreach may be executed in parallel } for (elem <- s) { // do something with a guarantee that foreach is executed in order, sequentially } for (elem <- p) { // do something concurrently, in parallel } This also means that some signatures had to be changed. For example, method `flatMap` now takes `A => GenTraversableOnce[B]`, and `zip` takes a `GenIterable[B]`. Also, there are mutable & immutable Gen* trait variants. They have generic companion functionality.
-rw-r--r--src/library/scala/collection/CustomParallelizable.scala3
-rw-r--r--src/library/scala/collection/GenIterable.scala36
-rw-r--r--src/library/scala/collection/GenIterableLike.scala43
-rw-r--r--src/library/scala/collection/GenIterableView.scala18
-rw-r--r--src/library/scala/collection/GenIterableViewLike.scala83
-rw-r--r--src/library/scala/collection/GenMap.scala38
-rw-r--r--src/library/scala/collection/GenMapLike.scala61
-rw-r--r--src/library/scala/collection/GenSeq.scala36
-rw-r--r--src/library/scala/collection/GenSeqLike.scala162
-rw-r--r--src/library/scala/collection/GenSeqView.scala18
-rw-r--r--src/library/scala/collection/GenSeqViewLike.scala164
-rw-r--r--src/library/scala/collection/GenSet.scala37
-rw-r--r--src/library/scala/collection/GenSetLike.scala123
-rw-r--r--src/library/scala/collection/GenTraversable.scala39
-rw-r--r--src/library/scala/collection/GenTraversableLike.scala100
-rw-r--r--src/library/scala/collection/GenTraversableOnce.scala23
-rw-r--r--src/library/scala/collection/GenTraversableOnceLike.scala173
-rw-r--r--src/library/scala/collection/GenTraversableView.scala18
-rw-r--r--src/library/scala/collection/GenTraversableViewLike.scala141
-rwxr-xr-xsrc/library/scala/collection/IndexedSeqOptimized.scala8
-rw-r--r--src/library/scala/collection/Iterable.scala7
-rw-r--r--src/library/scala/collection/IterableLike.scala8
-rw-r--r--src/library/scala/collection/IterableProxyLike.scala6
-rw-r--r--src/library/scala/collection/IterableView.scala2
-rw-r--r--src/library/scala/collection/IterableViewLike.scala92
-rw-r--r--src/library/scala/collection/Iterator.scala4
-rwxr-xr-xsrc/library/scala/collection/LinearSeqOptimized.scala2
-rw-r--r--src/library/scala/collection/Map.scala4
-rw-r--r--src/library/scala/collection/MapLike.scala40
-rw-r--r--src/library/scala/collection/MapProxyLike.scala2
-rw-r--r--src/library/scala/collection/Parallel.scala4
-rw-r--r--src/library/scala/collection/Parallelizable.scala6
-rw-r--r--src/library/scala/collection/Seq.scala1
-rw-r--r--src/library/scala/collection/SeqLike.scala64
-rw-r--r--src/library/scala/collection/SeqProxyLike.scala26
-rw-r--r--src/library/scala/collection/SeqView.scala2
-rw-r--r--src/library/scala/collection/SeqViewLike.scala160
-rw-r--r--src/library/scala/collection/Set.scala1
-rw-r--r--src/library/scala/collection/SetLike.scala92
-rw-r--r--src/library/scala/collection/SetProxyLike.scala14
-rw-r--r--src/library/scala/collection/SortedSetLike.scala2
-rw-r--r--src/library/scala/collection/Traversable.scala12
-rw-r--r--src/library/scala/collection/TraversableLike.scala27
-rw-r--r--src/library/scala/collection/TraversableOnce.scala579
-rw-r--r--src/library/scala/collection/TraversableOnceLike.scala627
-rw-r--r--src/library/scala/collection/TraversableProxyLike.scala4
-rw-r--r--src/library/scala/collection/TraversableView.scala2
-rw-r--r--src/library/scala/collection/TraversableViewLike.scala139
-rw-r--r--src/library/scala/collection/generic/Addable.scala2
-rw-r--r--src/library/scala/collection/generic/CanCombineFrom.scala9
-rwxr-xr-xsrc/library/scala/collection/generic/FilterMonadic.scala11
-rw-r--r--src/library/scala/collection/generic/GenericCompanion.scala2
-rw-r--r--src/library/scala/collection/generic/GenericParCompanion.scala9
-rw-r--r--src/library/scala/collection/generic/GenericParTemplate.scala16
-rw-r--r--src/library/scala/collection/generic/GenericSetTemplate.scala2
-rw-r--r--src/library/scala/collection/generic/GenericTraversableTemplate.scala2
-rw-r--r--src/library/scala/collection/generic/HasNewCombiner.scala9
-rw-r--r--src/library/scala/collection/generic/IterableForwarder.scala2
-rw-r--r--src/library/scala/collection/generic/MapFactory.scala6
-rw-r--r--src/library/scala/collection/generic/MutableSetFactory.scala2
-rw-r--r--src/library/scala/collection/generic/ParFactory.scala11
-rw-r--r--src/library/scala/collection/generic/ParMapFactory.scala9
-rw-r--r--src/library/scala/collection/generic/ParSetFactory.scala9
-rw-r--r--src/library/scala/collection/generic/SeqFactory.scala2
-rw-r--r--src/library/scala/collection/generic/SeqForwarder.scala18
-rw-r--r--src/library/scala/collection/generic/SetFactory.scala2
-rw-r--r--src/library/scala/collection/generic/Signalling.scala8
-rw-r--r--src/library/scala/collection/generic/Subtractable.scala2
-rw-r--r--src/library/scala/collection/generic/TraversableFactory.scala2
-rw-r--r--src/library/scala/collection/immutable/GenIterable.scala37
-rw-r--r--src/library/scala/collection/immutable/GenMap.scala40
-rw-r--r--src/library/scala/collection/immutable/GenSeq.scala49
-rw-r--r--src/library/scala/collection/immutable/GenSet.scala43
-rw-r--r--src/library/scala/collection/immutable/GenTraversable.scala41
-rw-r--r--src/library/scala/collection/immutable/HashMap.scala2
-rw-r--r--src/library/scala/collection/immutable/Iterable.scala5
-rw-r--r--src/library/scala/collection/immutable/List.scala4
-rw-r--r--src/library/scala/collection/immutable/ListMap.scala4
-rw-r--r--src/library/scala/collection/immutable/ListSet.scala4
-rw-r--r--src/library/scala/collection/immutable/Map.scala1
-rw-r--r--src/library/scala/collection/immutable/MapLike.scala4
-rw-r--r--src/library/scala/collection/immutable/MapProxy.scala2
-rw-r--r--src/library/scala/collection/immutable/Range.scala2
-rw-r--r--src/library/scala/collection/immutable/Seq.scala6
-rw-r--r--src/library/scala/collection/immutable/Set.scala6
-rw-r--r--src/library/scala/collection/immutable/SortedMap.scala4
-rw-r--r--src/library/scala/collection/immutable/Stream.scala8
-rw-r--r--src/library/scala/collection/immutable/StreamViewLike.scala12
-rw-r--r--src/library/scala/collection/immutable/StringOps.scala2
-rw-r--r--src/library/scala/collection/immutable/Traversable.scala2
-rw-r--r--src/library/scala/collection/immutable/TreeMap.scala4
-rw-r--r--src/library/scala/collection/immutable/Vector.scala4
-rw-r--r--src/library/scala/collection/interfaces/IterableMethods.scala6
-rw-r--r--src/library/scala/collection/interfaces/MapMethods.scala2
-rw-r--r--src/library/scala/collection/interfaces/SeqMethods.scala2
-rw-r--r--src/library/scala/collection/interfaces/TraversableMethods.scala4
-rw-r--r--src/library/scala/collection/mutable/ArrayOps.scala3
-rw-r--r--src/library/scala/collection/mutable/BufferLike.scala4
-rw-r--r--src/library/scala/collection/mutable/BufferProxy.scala2
-rw-r--r--src/library/scala/collection/mutable/Cloneable.scala1
-rw-r--r--src/library/scala/collection/mutable/GenIterable.scala37
-rw-r--r--src/library/scala/collection/mutable/GenMap.scala40
-rw-r--r--src/library/scala/collection/mutable/GenSeq.scala44
-rw-r--r--src/library/scala/collection/mutable/GenSet.scala46
-rw-r--r--src/library/scala/collection/mutable/GenTraversable.scala38
-rw-r--r--src/library/scala/collection/mutable/Iterable.scala5
-rw-r--r--src/library/scala/collection/mutable/Map.scala1
-rw-r--r--src/library/scala/collection/mutable/MapBuilder.scala2
-rw-r--r--src/library/scala/collection/mutable/MapLike.scala6
-rw-r--r--src/library/scala/collection/mutable/MapProxy.scala2
-rw-r--r--src/library/scala/collection/mutable/PriorityQueue.scala2
-rw-r--r--src/library/scala/collection/mutable/Seq.scala7
-rw-r--r--src/library/scala/collection/mutable/SeqLike.scala3
-rw-r--r--src/library/scala/collection/mutable/Set.scala1
-rw-r--r--src/library/scala/collection/mutable/SetLike.scala6
-rw-r--r--src/library/scala/collection/mutable/SynchronizedBuffer.scala2
-rw-r--r--src/library/scala/collection/mutable/SynchronizedSet.scala4
-rw-r--r--src/library/scala/collection/mutable/Traversable.scala2
-rw-r--r--src/library/scala/collection/parallel/Combiner.scala5
-rw-r--r--src/library/scala/collection/parallel/ParIterable.scala12
-rw-r--r--src/library/scala/collection/parallel/ParIterableLike.scala385
-rw-r--r--src/library/scala/collection/parallel/ParIterableView.scala9
-rw-r--r--src/library/scala/collection/parallel/ParIterableViewLike.scala80
-rw-r--r--src/library/scala/collection/parallel/ParMap.scala7
-rw-r--r--src/library/scala/collection/parallel/ParMapLike.scala15
-rw-r--r--src/library/scala/collection/parallel/ParSeq.scala13
-rw-r--r--src/library/scala/collection/parallel/ParSeqLike.scala166
-rw-r--r--src/library/scala/collection/parallel/ParSeqView.scala6
-rw-r--r--src/library/scala/collection/parallel/ParSeqViewLike.scala79
-rw-r--r--src/library/scala/collection/parallel/ParSet.scala21
-rw-r--r--src/library/scala/collection/parallel/ParSetLike.scala46
-rw-r--r--src/library/scala/collection/parallel/PreciseSplitter.scala64
-rw-r--r--src/library/scala/collection/parallel/RemainsIterator.scala141
-rw-r--r--src/library/scala/collection/parallel/Splitter.scala51
-rw-r--r--src/library/scala/collection/parallel/Tasks.scala2
-rw-r--r--src/library/scala/collection/parallel/immutable/ParHashMap.scala11
-rw-r--r--src/library/scala/collection/parallel/immutable/ParHashSet.scala26
-rw-r--r--src/library/scala/collection/parallel/immutable/ParIterable.scala5
-rw-r--r--src/library/scala/collection/parallel/immutable/ParMap.scala4
-rw-r--r--src/library/scala/collection/parallel/immutable/ParRange.scala4
-rw-r--r--src/library/scala/collection/parallel/immutable/ParSeq.scala10
-rw-r--r--src/library/scala/collection/parallel/immutable/ParSet.scala3
-rw-r--r--src/library/scala/collection/parallel/immutable/ParVector.scala15
-rw-r--r--src/library/scala/collection/parallel/immutable/package.scala4
-rw-r--r--src/library/scala/collection/parallel/mutable/LazyCombiner.scala3
-rw-r--r--src/library/scala/collection/parallel/mutable/ParArray.scala18
-rw-r--r--src/library/scala/collection/parallel/mutable/ParFlatHashTable.scala6
-rw-r--r--src/library/scala/collection/parallel/mutable/ParHashMap.scala14
-rw-r--r--src/library/scala/collection/parallel/mutable/ParHashSet.scala13
-rw-r--r--src/library/scala/collection/parallel/mutable/ParHashTable.scala10
-rw-r--r--src/library/scala/collection/parallel/mutable/ParIterable.scala10
-rw-r--r--src/library/scala/collection/parallel/mutable/ParMap.scala6
-rw-r--r--src/library/scala/collection/parallel/mutable/ParMapLike.scala19
-rw-r--r--src/library/scala/collection/parallel/mutable/ParSeq.scala11
-rw-r--r--src/library/scala/collection/parallel/mutable/ParSet.scala9
-rw-r--r--src/library/scala/collection/parallel/mutable/ParSetLike.scala19
-rw-r--r--src/library/scala/collection/parallel/mutable/ResizeableParArrayCombiner.scala111
-rw-r--r--src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala106
-rw-r--r--src/library/scala/collection/parallel/package.scala16
-rw-r--r--src/library/scala/util/MurmurHash.scala4
-rw-r--r--test/disabled/scalacheck/HashTrieSplit.scala47
-rw-r--r--test/files/neg/t3774.check4
-rw-r--r--test/files/pos/spec-List.scala2
-rw-r--r--test/files/run/pc-conversions.scala8
-rw-r--r--test/files/scalacheck/HashTrieSplit.scala4
-rw-r--r--test/files/scalacheck/parallel-collections/ParallelIterableCheck.scala6
-rw-r--r--test/files/scalacheck/parallel-collections/pc.scala10
167 files changed, 3660 insertions, 1926 deletions
diff --git a/src/library/scala/collection/CustomParallelizable.scala b/src/library/scala/collection/CustomParallelizable.scala
index 3d0b8af2f2..dc634c67d3 100644
--- a/src/library/scala/collection/CustomParallelizable.scala
+++ b/src/library/scala/collection/CustomParallelizable.scala
@@ -11,8 +11,7 @@ package scala.collection
import parallel.Combiner
trait CustomParallelizable[+A, +ParRepr <: Parallel] extends Parallelizable[A, ParRepr] {
- self: TraversableOnce[A] =>
-
override def par: ParRepr
override protected[this] def parCombiner: Combiner[A, ParRepr] = throw new UnsupportedOperationException("")
}
+
diff --git a/src/library/scala/collection/GenIterable.scala b/src/library/scala/collection/GenIterable.scala
new file mode 100644
index 0000000000..8d735dd86d
--- /dev/null
+++ b/src/library/scala/collection/GenIterable.scala
@@ -0,0 +1,36 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+
+
+import generic._
+
+
+/** A trait for all iterable collections which may possibly
+ * have their operations implemented in parallel.
+ *
+ * @author Martin Odersky
+ * @author Aleksandar Prokopec
+ * @since 2.9
+ */
+trait GenIterable[+A]
+extends GenIterableLike[A, GenIterable[A]]
+ with GenTraversable[A]
+ with GenericTraversableTemplate[A, GenIterable]
+{
+ def seq: Iterable[A]
+ override def companion: GenericCompanion[GenIterable] = GenIterable
+}
+
+
+object GenIterable extends TraversableFactory[GenIterable] {
+ implicit def canBuildFrom[A] = new GenericCanBuildFrom[A]
+ def newBuilder[A] = Iterable.newBuilder
+}
+
diff --git a/src/library/scala/collection/GenIterableLike.scala b/src/library/scala/collection/GenIterableLike.scala
new file mode 100644
index 0000000000..d268d042a2
--- /dev/null
+++ b/src/library/scala/collection/GenIterableLike.scala
@@ -0,0 +1,43 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+
+
+
+import generic._
+
+
+
+/** A template trait for all iterable collections which may possibly
+ * have their operations implemented in parallel.
+ *
+ * This trait contains abstract methods and methods that can be implemented
+ * directly in terms of other methods.
+ *
+ * @author Martin Odersky
+ * @author Aleksandar Prokopec
+ * @since 2.9
+ */
+trait GenIterableLike[+A, +Repr] extends GenTraversableLike[A, Repr] {
+
+ def iterator: Iterator[A]
+
+ def sameElements[A1 >: A](that: GenIterable[A1]): Boolean
+
+ def zip[A1 >: A, B, That](that: GenIterable[B])(implicit bf: CanBuildFrom[Repr, (A1, B), That]): That
+
+ def zipWithIndex[A1 >: A, That](implicit bf: CanBuildFrom[Repr, (A1, Int), That]): That
+
+ def zipAll[B, A1 >: A, That](that: GenIterable[B], thisElem: A1, thatElem: B)(implicit bf: CanBuildFrom[Repr, (A1, B), That]): That
+
+ def isEmpty = iterator.isEmpty
+
+ def head = iterator.next
+
+}
diff --git a/src/library/scala/collection/GenIterableView.scala b/src/library/scala/collection/GenIterableView.scala
new file mode 100644
index 0000000000..2ae964bce3
--- /dev/null
+++ b/src/library/scala/collection/GenIterableView.scala
@@ -0,0 +1,18 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+
+
+import generic._
+
+
+
+trait GenIterableView[+A, +Coll] extends GenIterableViewLike[A, Coll, GenIterableView[A, Coll]] { }
+
+
diff --git a/src/library/scala/collection/GenIterableViewLike.scala b/src/library/scala/collection/GenIterableViewLike.scala
new file mode 100644
index 0000000000..9e3927eaf4
--- /dev/null
+++ b/src/library/scala/collection/GenIterableViewLike.scala
@@ -0,0 +1,83 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+
+
+
+import generic._
+import TraversableView.NoBuilder
+
+
+
+trait GenIterableViewLike[+A,
+ +Coll,
+ +This <: GenIterableView[A, Coll] with GenIterableViewLike[A, Coll, This]]
+extends GenIterable[A] with GenIterableLike[A, This] with GenTraversableView[A, Coll] with GenTraversableViewLike[A, Coll, This] {
+self =>
+
+ trait Transformed[+B] extends GenIterableView[B, Coll] with super.Transformed[B] {
+ def iterator: Iterator[B]
+ override def foreach[U](f: B => U): Unit = iterator foreach f
+ override def toString = viewToString
+ }
+
+ trait EmptyView extends Transformed[Nothing] with super.EmptyView {
+ final def iterator: Iterator[Nothing] = Iterator.empty
+ }
+
+ trait Forced[B] extends super.Forced[B] with Transformed[B] {
+ def iterator = forced.iterator
+ }
+
+ trait Sliced extends super.Sliced with Transformed[A] {
+ def iterator: Iterator[A] = self.iterator.slice(from, until)
+ }
+
+ trait Mapped[B] extends super.Mapped[B] with Transformed[B] {
+ def iterator = self.iterator map mapping
+ }
+
+ trait FlatMapped[B] extends super.FlatMapped[B] with Transformed[B] {
+ def iterator: Iterator[B] = self.iterator flatMap mapping
+ }
+
+ trait Appended[B >: A] extends super.Appended[B] with Transformed[B] {
+ def iterator = self.iterator ++ rest
+ }
+
+ trait Filtered extends super.Filtered with Transformed[A] {
+ def iterator = self.iterator filter pred
+ }
+
+ trait TakenWhile extends super.TakenWhile with Transformed[A] {
+ def iterator = self.iterator takeWhile pred
+ }
+
+ trait DroppedWhile extends super.DroppedWhile with Transformed[A] {
+ def iterator = self.iterator dropWhile pred
+ }
+
+ trait Zipped[B] extends Transformed[(A, B)] {
+ protected[this] val other: GenIterable[B]
+ def iterator: Iterator[(A, B)] = self.iterator zip other.iterator
+ final override protected[this] def viewIdentifier = "Z"
+ }
+
+ trait ZippedAll[A1 >: A, B] extends Transformed[(A1, B)] {
+ protected[this] val other: GenIterable[B]
+ protected[this] val thisElem: A1
+ protected[this] val thatElem: B
+ final override protected[this] def viewIdentifier = "Z"
+ def iterator: Iterator[(A1, B)] =
+ self.iterator.zipAll(other.iterator, thisElem, thatElem)
+ }
+
+}
+
+
diff --git a/src/library/scala/collection/GenMap.scala b/src/library/scala/collection/GenMap.scala
new file mode 100644
index 0000000000..06e4446d05
--- /dev/null
+++ b/src/library/scala/collection/GenMap.scala
@@ -0,0 +1,38 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+
+
+import generic._
+import mutable.Builder
+
+
+/** A trait for all traversable collections which may possibly
+ * have their operations implemented in parallel.
+ *
+ * @author Martin Odersky
+ * @author Aleksandar Prokopec
+ * @since 2.9
+ */
+trait GenMap[A, +B]
+extends GenMapLike[A, B, GenMap[A, B]]
+ with GenIterable[(A, B)]
+{
+ def seq: Map[A, B]
+}
+
+
+object GenMap extends MapFactory[GenMap] {
+ def empty[A, B]: immutable.Map[A, B] = immutable.Map.empty
+
+ /** $mapCanBuildFromInfo */
+ implicit def canBuildFrom[A, B]: CanBuildFrom[Coll, (A, B), GenMap[A, B]] = new MapCanBuildFrom[A, B]
+}
+
+
diff --git a/src/library/scala/collection/GenMapLike.scala b/src/library/scala/collection/GenMapLike.scala
new file mode 100644
index 0000000000..213b1cf66b
--- /dev/null
+++ b/src/library/scala/collection/GenMapLike.scala
@@ -0,0 +1,61 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+
+
+
+
+/** A trait for all traversable collections which may possibly
+ * have their operations implemented in parallel.
+ *
+ * @author Martin Odersky
+ * @author Aleksandar Prokopec
+ * @since 2.9
+ */
+trait GenMapLike[A, +B, +Repr] extends GenIterableLike[(A, B), Repr] with Equals with Parallelizable[(A, B), parallel.ParMap[A, B]] {
+ def default(key: A): B
+ def get(key: A): Option[B]
+ def apply(key: A): B
+ def seq: Map[A, B]
+ def +[B1 >: B](kv: (A, B1)): GenMap[A, B1]
+ def - (key: A): Repr
+
+ // This hash code must be symmetric in the contents but ought not
+ // collide trivially.
+ override def hashCode() = util.MurmurHash.symmetricHash(seq, Map.hashSeed)
+
+ /** Compares two maps structurally; i.e. checks if all mappings
+ * contained in this map are also contained in the other map,
+ * and vice versa.
+ *
+ * @param that the other map
+ * @return `true` if both maps contain exactly the
+ * same mappings, `false` otherwise.
+ */
+ override def equals(that: Any): Boolean = that match {
+ case that: GenMap[b, _] =>
+ (this eq that) ||
+ (that canEqual this) &&
+ (this.size == that.size) && {
+ try {
+ this forall {
+ case (k, v) => that.get(k.asInstanceOf[b]) match {
+ case Some(`v`) =>
+ true
+ case _ => false
+ }
+ }
+ } catch {
+ case ex: ClassCastException =>
+ println("class cast "); false
+ }}
+ case _ =>
+ false
+ }
+}
diff --git a/src/library/scala/collection/GenSeq.scala b/src/library/scala/collection/GenSeq.scala
new file mode 100644
index 0000000000..ac5f953e88
--- /dev/null
+++ b/src/library/scala/collection/GenSeq.scala
@@ -0,0 +1,36 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+
+
+import generic._
+
+
+/** A trait for all sequences which may possibly
+ * have their operations implemented in parallel.
+ *
+ * @author Martin Odersky
+ * @author Aleksandar Prokopec
+ * @since 2.9
+ */
+trait GenSeq[+A]
+extends GenSeqLike[A, GenSeq[A]]
+ with GenIterable[A]
+ with Equals
+ with GenericTraversableTemplate[A, GenSeq]
+{
+ def seq: Seq[A]
+ override def companion: GenericCompanion[GenSeq] = GenSeq
+}
+
+
+object GenSeq extends TraversableFactory[GenSeq] {
+ implicit def canBuildFrom[A] = new GenericCanBuildFrom[A]
+ def newBuilder[A] = Seq.newBuilder
+}
diff --git a/src/library/scala/collection/GenSeqLike.scala b/src/library/scala/collection/GenSeqLike.scala
new file mode 100644
index 0000000000..74804faff5
--- /dev/null
+++ b/src/library/scala/collection/GenSeqLike.scala
@@ -0,0 +1,162 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+
+
+
+import generic._
+
+
+
+/** A template trait for all sequences which may possibly
+ * have their operations implemented in parallel.
+ *
+ * @author Martin Odersky
+ * @author Aleksandar Prokopec
+ * @since 2.9
+ */
+trait GenSeqLike[+T, +Repr] extends GenIterableLike[T, Repr] with Equals with Parallelizable[T, parallel.ParSeq[T]] {
+
+ def apply(idx: Int): T
+
+ def length: Int
+
+ def segmentLength(p: T => Boolean, from: Int): Int
+
+ def prefixLength(p: T => Boolean): Int
+
+ def indexWhere(p: T => Boolean, from: Int): Int
+
+ def indexWhere(p: T => Boolean): Int
+
+ def findIndexOf(p: T => Boolean): Int
+
+ def indexOf[U >: T](elem: U): Int
+
+ def indexOf[U >: T](elem: U, from: Int): Int
+
+ def lastIndexWhere(p: T => Boolean, end: Int): Int
+
+ def reverse: Repr
+
+ def reverseMap[S, That](f: T => S)(implicit bf: CanBuildFrom[Repr, S, That]): That
+
+ def startsWith[S](that: GenSeq[S]): Boolean
+
+ def startsWith[S](that: GenSeq[S], offset: Int): Boolean
+
+ def endsWith[S](that: GenSeq[S]): Boolean
+
+ def patch[U >: T, That](from: Int, patch: GenSeq[U], replaced: Int)(implicit bf: CanBuildFrom[Repr, U, That]): That
+
+ def updated[U >: T, That](index: Int, elem: U)(implicit bf: CanBuildFrom[Repr, U, That]): That
+
+ def +:[U >: T, That](elem: U)(implicit bf: CanBuildFrom[Repr, U, That]): That
+
+ def :+[U >: T, That](elem: U)(implicit bf: CanBuildFrom[Repr, U, That]): That
+
+ def padTo[U >: T, That](len: Int, elem: U)(implicit bf: CanBuildFrom[Repr, U, That]): That
+
+ def corresponds[S](that: GenSeq[S])(p: (T, S) => Boolean): Boolean
+
+ def toSeq: GenSeq[T]
+
+
+ /** Produces a new sequence which contains all elements of this $coll and also all elements of
+ * a given sequence. `xs union ys` is equivalent to `xs ++ ys`.
+ * $willNotTerminateInf
+ *
+ * Another way to express this
+ * is that `xs union ys` computes the order-presevring multi-set union of `xs` and `ys`.
+ * `union` is hence a counter-part of `diff` and `intersect` which also work on multi-sets.
+ *
+ * $willNotTerminateInf
+ *
+ * @param that the sequence to add.
+ * @tparam B the element type of the returned $coll.
+ * @tparam That $thatinfo
+ * @param bf $bfinfo
+ * @return a new collection of type `That` which contains all elements of this $coll
+ * followed by all elements of `that`.
+ * @usecase def union(that: Seq[A]): $Coll[A]
+ * @return a new $coll which contains all elements of this $coll
+ * followed by all elements of `that`.
+ */
+ def union[U >: T, That](that: GenSeq[U])(implicit bf: CanBuildFrom[Repr, U, That]): That = this ++ that
+
+ /** Computes the multiset difference between this $coll and another sequence.
+ * $willNotTerminateInf
+ *
+ * @param that the sequence of elements to remove
+ * @tparam B the element type of the returned $coll.
+ * @tparam That $thatinfo
+ * @param bf $bfinfo
+ * @return a new collection of type `That` which contains all elements of this $coll
+ * except some of occurrences of elements that also appear in `that`.
+ * If an element value `x` appears
+ * ''n'' times in `that`, then the first ''n'' occurrences of `x` will not form
+ * part of the result, but any following occurrences will.
+ * @usecase def diff(that: Seq[A]): $Coll[A]
+ * @return a new $coll which contains all elements of this $coll
+ * except some of occurrences of elements that also appear in `that`.
+ * If an element value `x` appears
+ * ''n'' times in `that`, then the first ''n'' occurrences of `x` will not form
+ * part of the result, but any following occurrences will.
+ */
+ def diff[U >: T](that: GenSeq[U]): Repr
+
+ /** Computes the multiset intersection between this $coll and another sequence.
+ * $mayNotTerminateInf
+ *
+ * @param that the sequence of elements to intersect with.
+ * @tparam B the element type of the returned $coll.
+ * @tparam That $thatinfo
+ * @param bf $bfinfo
+ * @return a new collection of type `That` which contains all elements of this $coll
+ * which also appear in `that`.
+ * If an element value `x` appears
+ * ''n'' times in `that`, then the first ''n'' occurrences of `x` will be retained
+ * in the result, but any following occurrences will be omitted.
+ * @usecase def intersect(that: Seq[A]): $Coll[A]
+ * @return a new $coll which contains all elements of this $coll
+ * which also appear in `that`.
+ * If an element value `x` appears
+ * ''n'' times in `that`, then the first ''n'' occurrences of `x` will be retained
+ * in the result, but any following occurrences will be omitted.
+ */
+ def intersect[U >: T](that: GenSeq[U]): Repr
+
+ /** Builds a new $coll from this $coll without any duplicate elements.
+ * $willNotTerminateInf
+ *
+ * @return A new $coll which contains the first occurrence of every element of this $coll.
+ */
+ def distinct: Repr
+
+ /** Hashcodes for $Coll produce a value from the hashcodes of all the
+ * elements of the $coll.
+ */
+ override def hashCode() = {
+ val h = new util.MurmurHash[T](Seq.hashSeed)
+ seq.foreach(h)
+ h.hash
+ }
+
+ /** The equals method for arbitrary sequences. Compares this sequence to
+ * some other object.
+ * @param that The object to compare the sequence to
+ * @return `true` if `that` is a sequence that has the same elements as
+ * this sequence in the same order, `false` otherwise
+ */
+ override def equals(that: Any): Boolean = that match {
+ case that: GenSeq[_] => (that canEqual this) && (this sameElements that)
+ case _ => false
+ }
+
+}
diff --git a/src/library/scala/collection/GenSeqView.scala b/src/library/scala/collection/GenSeqView.scala
new file mode 100644
index 0000000000..c18c656b55
--- /dev/null
+++ b/src/library/scala/collection/GenSeqView.scala
@@ -0,0 +1,18 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+
+
+import generic._
+
+
+
+trait GenSeqView[+A, +Coll] extends GenSeqViewLike[A, Coll, GenSeqView[A, Coll]] { }
+
+
diff --git a/src/library/scala/collection/GenSeqViewLike.scala b/src/library/scala/collection/GenSeqViewLike.scala
new file mode 100644
index 0000000000..2f06a52cd3
--- /dev/null
+++ b/src/library/scala/collection/GenSeqViewLike.scala
@@ -0,0 +1,164 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+
+
+
+
+trait GenSeqViewLike[+A,
+ +Coll,
+ +This <: GenSeqView[A, Coll] with GenSeqViewLike[A, Coll, This]]
+extends GenSeq[A] with GenSeqLike[A, This] with GenIterableView[A, Coll] with GenIterableViewLike[A, Coll, This] {
+self =>
+
+ trait Transformed[+B] extends GenSeqView[B, Coll] with super.Transformed[B] {
+ def length: Int
+ def apply(idx: Int): B
+ override def toString = viewToString
+ }
+
+ trait EmptyView extends Transformed[Nothing] with super.EmptyView {
+ final override def length = 0
+ final override def apply(n: Int) = Nil(n)
+ }
+
+ trait Forced[B] extends super.Forced[B] with Transformed[B] {
+ def length = forced.length
+ def apply(idx: Int) = forced.apply(idx)
+ }
+
+ trait Sliced extends super.Sliced with Transformed[A] {
+ def length = iterator.size
+ def apply(idx: Int): A =
+ if (idx + from < until) self.apply(idx + from)
+ else throw new IndexOutOfBoundsException(idx.toString)
+
+ override def foreach[U](f: A => U) = iterator foreach f
+ override def iterator: Iterator[A] = self.iterator drop from take endpoints.width
+ }
+
+ trait Mapped[B] extends super.Mapped[B] with Transformed[B] {
+ def length = self.length
+ def apply(idx: Int): B = mapping(self(idx))
+ }
+
+ trait FlatMapped[B] extends super.FlatMapped[B] with Transformed[B] {
+ protected[this] lazy val index = {
+ val index = new Array[Int](self.length + 1)
+ index(0) = 0
+ for (i <- 0 until self.length) // note that if the mapping returns a list, performance is bad, bad
+ index(i + 1) = index(i) + mapping(self(i)).seq.size
+ index
+ }
+ protected[this] def findRow(idx: Int, lo: Int, hi: Int): Int = {
+ val mid = (lo + hi) / 2
+ if (idx < index(mid)) findRow(idx, lo, mid - 1)
+ else if (idx >= index(mid + 1)) findRow(idx, mid + 1, hi)
+ else mid
+ }
+ def length = index(self.length)
+ def apply(idx: Int) = {
+ val row = findRow(idx, 0, self.length - 1)
+ mapping(self(row)).seq.toSeq(idx - index(row))
+ }
+ }
+
+ trait Appended[B >: A] extends super.Appended[B] with Transformed[B] {
+ protected[this] lazy val restSeq = rest.toSeq
+ def length = self.length + restSeq.length
+ def apply(idx: Int) =
+ if (idx < self.length) self(idx) else restSeq(idx - self.length)
+ }
+
+ trait Filtered extends super.Filtered with Transformed[A] {
+ protected[this] lazy val index = {
+ var len = 0
+ val arr = new Array[Int](self.length)
+ for (i <- 0 until self.length)
+ if (pred(self(i))) {
+ arr(len) = i
+ len += 1
+ }
+ arr take len
+ }
+ def length = index.length
+ def apply(idx: Int) = self(index(idx))
+ }
+
+ trait TakenWhile extends super.TakenWhile with Transformed[A] {
+ protected[this] lazy val len = self prefixLength pred
+ def length = len
+ def apply(idx: Int) =
+ if (idx < len) self(idx)
+ else throw new IndexOutOfBoundsException(idx.toString)
+ }
+
+ trait DroppedWhile extends super.DroppedWhile with Transformed[A] {
+ protected[this] lazy val start = self prefixLength pred
+ def length = self.length - start
+ def apply(idx: Int) =
+ if (idx >= 0) self(idx + start)
+ else throw new IndexOutOfBoundsException(idx.toString)
+ }
+
+ trait Zipped[B] extends super.Zipped[B] with Transformed[(A, B)] {
+ protected[this] lazy val thatSeq = other.seq.toSeq
+ /* Have to be careful here - other may be an infinite sequence. */
+ def length = if ((thatSeq lengthCompare self.length) <= 0) thatSeq.length else self.length
+ def apply(idx: Int) = (self.apply(idx), thatSeq.apply(idx))
+ }
+
+ trait ZippedAll[A1 >: A, B] extends super.ZippedAll[A1, B] with Transformed[(A1, B)] {
+ protected[this] lazy val thatSeq = other.seq.toSeq
+ def length: Int = self.length max thatSeq.length
+ def apply(idx: Int) =
+ (if (idx < self.length) self.apply(idx) else thisElem,
+ if (idx < thatSeq.length) thatSeq.apply(idx) else thatElem)
+ }
+
+ trait Reversed extends Transformed[A] {
+ override def iterator: Iterator[A] = createReversedIterator
+ def length: Int = self.length
+ def apply(idx: Int): A = self.apply(length - 1 - idx)
+ final override protected[this] def viewIdentifier = "R"
+
+ private def createReversedIterator = {
+ var lst = List[A]()
+ for (elem <- self) lst ::= elem
+ lst.iterator
+ }
+ }
+
+ trait Patched[B >: A] extends Transformed[B] {
+ protected[this] val from: Int
+ protected[this] val patch: GenSeq[B]
+ protected[this] val replaced: Int
+ private lazy val plen = patch.length
+ override def iterator: Iterator[B] = self.iterator patch (from, patch.iterator, replaced)
+ def length: Int = self.length + plen - replaced
+ def apply(idx: Int): B =
+ if (idx < from) self.apply(idx)
+ else if (idx < from + plen) patch.apply(idx - from)
+ else self.apply(idx - plen + replaced)
+ final override protected[this] def viewIdentifier = "P"
+ }
+
+ trait Prepended[B >: A] extends Transformed[B] {
+ protected[this] val fst: B
+ override def iterator: Iterator[B] = Iterator.single(fst) ++ self.iterator
+ def length: Int = 1 + self.length
+ def apply(idx: Int): B =
+ if (idx == 0) fst
+ else self.apply(idx - 1)
+ final override protected[this] def viewIdentifier = "A"
+ }
+
+}
+
+
diff --git a/src/library/scala/collection/GenSet.scala b/src/library/scala/collection/GenSet.scala
new file mode 100644
index 0000000000..ec066844b4
--- /dev/null
+++ b/src/library/scala/collection/GenSet.scala
@@ -0,0 +1,37 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+
+package scala.collection
+
+
+import generic._
+
+
+/** A trait for sets which may possibly
+ * have their operations implemented in parallel.
+ *
+ * @author Martin Odersky
+ * @author Aleksandar Prokopec
+ * @since 2.9
+ */
+trait GenSet[A]
+extends GenSetLike[A, GenSet[A]]
+ with GenIterable[A]
+ with GenericSetTemplate[A, GenSet]
+{
+ override def companion: GenericCompanion[GenSet] = GenSet
+ def seq: Set[A]
+}
+
+
+object GenSet extends TraversableFactory[GenSet] {
+ implicit def canBuildFrom[A] = new GenericCanBuildFrom[A]
+ def newBuilder[A] = Set.newBuilder
+}
+
diff --git a/src/library/scala/collection/GenSetLike.scala b/src/library/scala/collection/GenSetLike.scala
new file mode 100644
index 0000000000..91e90c1499
--- /dev/null
+++ b/src/library/scala/collection/GenSetLike.scala
@@ -0,0 +1,123 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+
+package scala.collection
+
+
+
+
+/** A template trait for sets which may possibly
+ * have their operations implemented in parallel.
+ *
+ * @author Martin Odersky
+ * @author Aleksandar Prokopec
+ * @since 2.9
+ */
+trait GenSetLike[A, +Repr] extends GenIterableLike[A, Repr] with (A => Boolean) with Equals with Parallelizable[A, parallel.ParSet[A]] {
+ def seq: Set[A]
+ def contains(elem: A): Boolean
+ def +(elem: A): Repr
+ def -(elem: A): Repr
+
+ /** Tests if some element is contained in this set.
+ *
+ * This method is equivalent to `contains`. It allows sets to be interpreted as predicates.
+ * @param elem the element to test for membership.
+ * @return `true` if `elem` is contained in this set, `false` otherwise.
+ */
+ def apply(elem: A): Boolean = this contains elem
+
+ /** Computes the intersection between this set and another set.
+ *
+ * @param that the set to intersect with.
+ * @return a new set consisting of all elements that are both in this
+ * set and in the given set `that`.
+ */
+ def intersect(that: GenSet[A]): Repr = this filter that
+
+ /** Computes the intersection between this set and another set.
+ *
+ * '''Note:''' Same as `intersect`.
+ * @param that the set to intersect with.
+ * @return a new set consisting of all elements that are both in this
+ * set and in the given set `that`.
+ */
+ def &(that: GenSet[A]): Repr = this intersect that
+
+ /** Computes the union between of set and another set.
+ *
+ * @param that the set to form the union with.
+ * @return a new set consisting of all elements that are in this
+ * set or in the given set `that`.
+ */
+ def union(that: GenSet[A]): Repr
+
+ /** Computes the union between this set and another set.
+ *
+ * '''Note:''' Same as `union`.
+ * @param that the set to form the union with.
+ * @return a new set consisting of all elements that are in this
+ * set or in the given set `that`.
+ */
+ def | (that: GenSet[A]): Repr = this union that
+
+ /** Computes the difference of this set and another set.
+ *
+ * @param that the set of elements to exclude.
+ * @return a set containing those elements of this
+ * set that are not also contained in the given set `that`.
+ */
+ def diff(that: GenSet[A]): Repr
+
+ /** The difference of this set and another set.
+ *
+ * '''Note:''' Same as `diff`.
+ * @param that the set of elements to exclude.
+ * @return a set containing those elements of this
+ * set that are not also contained in the given set `that`.
+ */
+ def &~(that: GenSet[A]): Repr = this diff that
+
+ /** Tests whether this set is a subset of another set.
+ *
+ * @param that the set to test.
+ * @return `true` if this set is a subset of `that`, i.e. if
+ * every element of this set is also an element of `that`.
+ */
+ def subsetOf(that: GenSet[A]): Boolean = this forall that
+
+ /** Compares this set with another object for equality.
+ *
+ * '''Note:''' This operation contains an unchecked cast: if `that`
+ * is a set, it will assume with an unchecked cast
+ * that it has the same element type as this set.
+ * Any subsequent ClassCastException is treated as a `false` result.
+ * @param that the other object
+ * @return `true` if `that` is a set which contains the same elements
+ * as this set.
+ */
+ override def equals(that: Any): Boolean = that match {
+ case that: GenSet[_] =>
+ (this eq that) ||
+ (that canEqual this) &&
+ (this.size == that.size) &&
+ (try this subsetOf that.asInstanceOf[GenSet[A]]
+ catch { case ex: ClassCastException => false })
+ case _ =>
+ false
+ }
+
+ // Careful! Don't write a Set's hashCode like:
+ // override def hashCode() = this map (_.hashCode) sum
+ // Calling map on a set drops duplicates: any hashcode collisions would
+ // then be dropped before they can be added.
+ // Hash should be symmetric in set entries, but without trivial collisions.
+ override def hashCode() = util.MurmurHash.symmetricHash(seq, Set.hashSeed)
+
+}
diff --git a/src/library/scala/collection/GenTraversable.scala b/src/library/scala/collection/GenTraversable.scala
new file mode 100644
index 0000000000..fc1f84d2e5
--- /dev/null
+++ b/src/library/scala/collection/GenTraversable.scala
@@ -0,0 +1,39 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+
+
+package scala.collection
+
+
+import generic._
+
+
+/** A trait for all traversable collections which may possibly
+ * have their operations implemented in parallel.
+ *
+ * @author Martin Odersky
+ * @author Aleksandar Prokopec
+ * @since 2.9
+ */
+trait GenTraversable[+A]
+extends GenTraversableLike[A, GenTraversable[A]]
+ with GenTraversableOnce[A]
+ with GenericTraversableTemplate[A, GenTraversable]
+{
+ def seq: Traversable[A]
+ def companion: GenericCompanion[GenTraversable] = GenTraversable
+}
+
+
+object GenTraversable extends TraversableFactory[GenTraversable] {
+ implicit def canBuildFrom[A] = new GenericCanBuildFrom[A]
+ def newBuilder[A] = Traversable.newBuilder
+}
+
+
diff --git a/src/library/scala/collection/GenTraversableLike.scala b/src/library/scala/collection/GenTraversableLike.scala
new file mode 100644
index 0000000000..5c49df2461
--- /dev/null
+++ b/src/library/scala/collection/GenTraversableLike.scala
@@ -0,0 +1,100 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+
+
+
+import generic._
+
+
+
+/** A template trait for all traversable collections which may possibly
+ * have their operations implemented in parallel.
+ *
+ * @author Martin Odersky
+ * @author Aleksandar Prokopec
+ * @since 2.9
+ */
+trait GenTraversableLike[+T, +Repr] extends GenTraversableOnceLike[T] with Parallelizable[T, parallel.ParIterable[T]] {
+
+ def repr: Repr
+
+ def size: Int
+
+ def head: T
+
+ /** Selects all elements except the first.
+ * $orderDependent
+ * @return a $coll consisting of all elements of this $coll
+ * except the first one.
+ * @throws `UnsupportedOperationException` if the $coll is empty.
+ */
+ def tail: Repr = {
+ if (isEmpty) throw new UnsupportedOperationException("empty.tail")
+ drop(1)
+ }
+
+ /** Computes a prefix scan of the elements of the collection.
+ *
+ * Note: The neutral element `z` may be applied more than once.
+ *
+ * @tparam U element type of the resulting collection
+ * @tparam That type of the resulting collection
+ * @param z neutral element for the operator `op`
+ * @param op the associative operator for the scan
+ * @param cbf combiner factory which provides a combiner
+ * @return a collection containing the prefix scan of the elements in the original collection
+ *
+ * @usecase def scan(z: T)(op: (T, T) => T): $Coll[T]
+ *
+ * @return a new $coll containing the prefix scan of the elements in this $coll
+ */
+ def scan[U >: T, That](z: U)(op: (U, U) => U)(implicit cbf: CanBuildFrom[Repr, U, That]): That
+
+ def scanLeft[S, That](z: S)(op: (S, T) => S)(implicit bf: CanBuildFrom[Repr, S, That]): That
+
+ def scanRight[S, That](z: S)(op: (T, S) => S)(implicit bf: CanBuildFrom[Repr, S, That]): That
+
+ def foreach[U](f: T => U): Unit
+
+ def map[S, That](f: T => S)(implicit bf: CanBuildFrom[Repr, S, That]): That
+
+ def collect[S, That](pf: PartialFunction[T, S])(implicit bf: CanBuildFrom[Repr, S, That]): That
+
+ def flatMap[S, That](f: T => GenTraversableOnce[S])(implicit bf: CanBuildFrom[Repr, S, That]): That
+
+ def ++[U >: T, That](that: GenTraversableOnce[U])(implicit bf: CanBuildFrom[Repr, U, That]): That
+
+ def filter(pred: T => Boolean): Repr
+
+ def filterNot(pred: T => Boolean): Repr
+
+ def partition(pred: T => Boolean): (Repr, Repr)
+
+ def groupBy[K](f: T => K): GenMap[K, Repr]
+
+ def take(n: Int): Repr
+
+ def drop(n: Int): Repr
+
+ def slice(unc_from: Int, unc_until: Int): Repr
+
+ def splitAt(n: Int): (Repr, Repr)
+
+ def takeWhile(pred: T => Boolean): Repr
+
+ def span(pred: T => Boolean): (Repr, Repr)
+
+ def dropWhile(pred: T => Boolean): Repr
+
+ def copyToArray[U >: T](xs: Array[U], start: Int, len: Int)
+
+ def stringPrefix: String
+
+}
diff --git a/src/library/scala/collection/GenTraversableOnce.scala b/src/library/scala/collection/GenTraversableOnce.scala
new file mode 100644
index 0000000000..04825148bd
--- /dev/null
+++ b/src/library/scala/collection/GenTraversableOnce.scala
@@ -0,0 +1,23 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+
+
+
+
+/** A trait for all objects traversable once or more which may possibly
+ * have their traversal occur in parallel.
+ *
+ * @author Martin Odersky
+ * @author Aleksandar Prokopec
+ * @since 2.9
+ */
+trait GenTraversableOnce[+A] extends GenTraversableOnceLike[A] {
+ def seq: TraversableOnce[A]
+}
diff --git a/src/library/scala/collection/GenTraversableOnceLike.scala b/src/library/scala/collection/GenTraversableOnceLike.scala
new file mode 100644
index 0000000000..d81f26649e
--- /dev/null
+++ b/src/library/scala/collection/GenTraversableOnceLike.scala
@@ -0,0 +1,173 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+
+
+
+
+/** A template trait for all objects traversable once or more which may possibly
+ * have their traversal occur in parallel.
+ *
+ * Methods in this trait are either abstract or can be implemented in terms
+ * of other methods.
+ *
+ * @author Martin Odersky
+ * @author Aleksandar Prokopec
+ * @since 2.9
+ *
+ * @define possiblyparinfo
+ * This trait may possibly have operations implemented in parallel.
+ */
+trait GenTraversableOnceLike[+A] {
+
+ def foreach[U](f: A => U): Unit
+
+ def isEmpty: Boolean
+
+ def hasDefiniteSize: Boolean
+
+ def seq: TraversableOnce[A]
+
+ /** Reduces the elements of this sequence using the specified associative binary operator.
+ *
+ * $undefinedorder
+ *
+ * Note this method has a different signature than the `reduceLeft`
+ * and `reduceRight` methods of the trait `Traversable`.
+ * The result of reducing may only be a supertype of this parallel collection's
+ * type parameter `T`.
+ *
+ * @tparam U A type parameter for the binary operator, a supertype of `T`.
+ * @param op A binary operator that must be associative.
+ * @return The result of applying reduce operator `op` between all the elements if the collection is nonempty.
+ * @throws UnsupportedOperationException
+ * if this $coll is empty.
+ */
+ def reduce[A1 >: A](op: (A1, A1) => A1): A1
+
+ /** Optionally reduces the elements of this sequence using the specified associative binary operator.
+ *
+ * $undefinedorder
+ *
+ * Note this method has a different signature than the `reduceLeftOption`
+ * and `reduceRightOption` methods of the trait `Traversable`.
+ * The result of reducing may only be a supertype of this parallel collection's
+ * type parameter `T`.
+ *
+ * @tparam U A type parameter for the binary operator, a supertype of `T`.
+ * @param op A binary operator that must be associative.
+ * @return An option value containing result of applying reduce operator `op` between all
+ * the elements if the collection is nonempty, and `None` otherwise.
+ */
+ def reduceOption[A1 >: A](op: (A1, A1) => A1): Option[A1]
+
+ /** Folds the elements of this sequence using the specified associative binary operator.
+ * The order in which the elements are reduced is unspecified and may be nondeterministic.
+ *
+ * Note this method has a different signature than the `foldLeft`
+ * and `foldRight` methods of the trait `Traversable`.
+ * The result of folding may only be a supertype of this parallel collection's
+ * type parameter `T`.
+ *
+ * @tparam U a type parameter for the binary operator, a supertype of `T`.
+ * @param z a neutral element for the fold operation, it may be added to the result
+ * an arbitrary number of times, not changing the result (e.g. `Nil` for list concatenation,
+ * 0 for addition, or 1 for multiplication)
+ * @param op a binary operator that must be associative
+ * @return the result of applying fold operator `op` between all the elements and `z`
+ */
+ def fold[A1 >: A](z: A1)(op: (A1, A1) => A1): A1
+
+ /** A syntactic sugar for out of order folding. See `fold`. */
+ def /:\[A1 >: A](z: A1)(op: (A1, A1) => A1): A1 = fold(z)(op)
+
+ /** Aggregates the results of applying an operator to subsequent elements.
+ *
+ * This is a more general form of `fold` and `reduce`. It has similar semantics, but does
+ * not require the result to be a supertype of the element type. It traverses the elements in
+ * different partitions sequentially, using `seqop` to update the result, and then
+ * applies `combop` to results from different partitions. The implementation of this
+ * operation may operate on an arbitrary number of collection partitions, so `combop`
+ * may be invoked arbitrary number of times.
+ *
+ * For example, one might want to process some elements and then produce a `Set`. In this
+ * case, `seqop` would process an element and append it to the list, while `combop`
+ * would concatenate two lists from different partitions together. The initial value
+ * `z` would be an empty set.
+ *
+ * {{{
+ * pc.aggregate(Set[Int]())(_ += process(_), _ ++ _)
+ * }}}
+ *
+ * Another example is calculating geometric mean from a collection of doubles
+ * (one would typically require big doubles for this).
+ *
+ * @tparam S the type of accumulated results
+ * @param z the initial value for the accumulated result of the partition - this
+ * will typically be the neutral element for the `seqop` operator (e.g.
+ * `Nil` for list concatenation or `0` for summation)
+ * @param seqop an operator used to accumulate results within a partition
+ * @param combop an associative operator used to combine results from different partitions
+ */
+ def aggregate[B](z: B)(seqop: (B, A) => B, combop: (B, B) => B): B
+
+ def count(p: A => Boolean): Int
+
+ def sum[A1 >: A](implicit num: Numeric[A1]): A1
+
+ def product[A1 >: A](implicit num: Numeric[A1]): A1
+
+ def min[A1 >: A](implicit ord: Ordering[A1]): A
+
+ def max[A1 >: A](implicit ord: Ordering[A1]): A
+
+ def maxBy[B](f: A => B)(implicit cmp: Ordering[B]): A
+
+ def minBy[B](f: A => B)(implicit cmp: Ordering[B]): A
+
+ def forall(pred: A => Boolean): Boolean
+
+ def exists(pred: A => Boolean): Boolean
+
+ def find(pred: A => Boolean): Option[A]
+
+ def copyToArray[B >: A](xs: Array[B]): Unit
+
+ def copyToArray[B >: A](xs: Array[B], start: Int): Unit
+
+ def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Unit
+
+ def mkString(start: String, sep: String, end: String): String
+
+ def mkString(sep: String): String
+
+ def mkString: String
+
+ def toArray[A1 >: A: ClassManifest]: Array[A1]
+
+ def toList: List[A]
+
+ def toIndexedSeq[A1 >: A]: immutable.IndexedSeq[A1]
+
+ def toStream: Stream[A]
+
+ def toIterator: Iterator[A]
+
+ def toBuffer[A1 >: A]: collection.mutable.Buffer[A1]
+
+ def toTraversable: GenTraversable[A]
+
+ def toIterable: GenIterable[A]
+
+ def toSeq: GenSeq[A]
+
+ def toSet[A1 >: A]: GenSet[A1]
+
+ def toMap[K, V](implicit ev: A <:< (K, V)): GenMap[K, V]
+}
diff --git a/src/library/scala/collection/GenTraversableView.scala b/src/library/scala/collection/GenTraversableView.scala
new file mode 100644
index 0000000000..e29595527b
--- /dev/null
+++ b/src/library/scala/collection/GenTraversableView.scala
@@ -0,0 +1,18 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+
+
+import generic._
+
+
+
+trait GenTraversableView[+A, +Coll] extends GenTraversableViewLike[A, Coll, GenTraversableView[A, Coll]] { }
+
+
diff --git a/src/library/scala/collection/GenTraversableViewLike.scala b/src/library/scala/collection/GenTraversableViewLike.scala
new file mode 100644
index 0000000000..5332b06503
--- /dev/null
+++ b/src/library/scala/collection/GenTraversableViewLike.scala
@@ -0,0 +1,141 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+
+
+import generic._
+import mutable.{ Builder, ArrayBuffer }
+import TraversableView.NoBuilder
+import annotation.migration
+
+
+
+trait GenTraversableViewLike[+A,
+ +Coll,
+ +This <: GenTraversableView[A, Coll] with GenTraversableViewLike[A, Coll, This]]
+extends GenTraversable[A] with GenTraversableLike[A, This] {
+self =>
+
+ def force[B >: A, That](implicit bf: CanBuildFrom[Coll, B, That]): That
+
+ protected def underlying: Coll
+ protected[this] def viewIdentifier: String
+ protected[this] def viewIdString: String
+ def viewToString = stringPrefix + viewIdString + "(...)"
+
+ /** The implementation base trait of this view.
+ * This trait and all its subtraits has to be re-implemented for each
+ * ViewLike class.
+ */
+ trait Transformed[+B] extends GenTraversableView[B, Coll] {
+ def foreach[U](f: B => U): Unit
+
+ lazy val underlying = self.underlying
+ final override protected[this] def viewIdString = self.viewIdString + viewIdentifier
+ override def stringPrefix = self.stringPrefix
+ override def toString = viewToString
+ }
+
+ trait EmptyView extends Transformed[Nothing] {
+ final override def isEmpty = true
+ final override def foreach[U](f: Nothing => U): Unit = ()
+ }
+
+ /** A fall back which forces everything into a vector and then applies an operation
+ * on it. Used for those operations which do not naturally lend themselves to a view
+ */
+ trait Forced[B] extends Transformed[B] {
+ protected[this] val forced: GenSeq[B]
+ def foreach[U](f: B => U) = forced foreach f
+ final override protected[this] def viewIdentifier = "C"
+ }
+
+ trait Sliced extends Transformed[A] {
+ protected[this] val endpoints: SliceInterval
+ protected[this] def from = endpoints.from
+ protected[this] def until = endpoints.until
+ // protected def newSliced(_endpoints: SliceInterval): Transformed[A] =
+ // self.newSliced(endpoints.recalculate(_endpoints))
+
+ def foreach[U](f: A => U) {
+ var index = 0
+ for (x <- self) {
+ if (from <= index) {
+ if (until <= index) return
+ f(x)
+ }
+ index += 1
+ }
+ }
+ final override protected[this] def viewIdentifier = "S"
+ }
+
+ trait Mapped[B] extends Transformed[B] {
+ protected[this] val mapping: A => B
+ def foreach[U](f: B => U) {
+ for (x <- self)
+ f(mapping(x))
+ }
+ final override protected[this] def viewIdentifier = "M"
+ }
+
+ trait FlatMapped[B] extends Transformed[B] {
+ protected[this] val mapping: A => GenTraversableOnce[B]
+ def foreach[U](f: B => U) {
+ for (x <- self)
+ for (y <- mapping(x).seq)
+ f(y)
+ }
+ final override protected[this] def viewIdentifier = "N"
+ }
+
+ trait Appended[B >: A] extends Transformed[B] {
+ protected[this] val rest: GenTraversable[B]
+ def foreach[U](f: B => U) {
+ self foreach f
+ rest foreach f
+ }
+ final override protected[this] def viewIdentifier = "A"
+ }
+
+ trait Filtered extends Transformed[A] {
+ protected[this] val pred: A => Boolean
+ def foreach[U](f: A => U) {
+ for (x <- self)
+ if (pred(x)) f(x)
+ }
+ final override protected[this] def viewIdentifier = "F"
+ }
+
+ trait TakenWhile extends Transformed[A] {
+ protected[this] val pred: A => Boolean
+ def foreach[U](f: A => U) {
+ for (x <- self) {
+ if (!pred(x)) return
+ f(x)
+ }
+ }
+ final override protected[this] def viewIdentifier = "T"
+ }
+
+ trait DroppedWhile extends Transformed[A] {
+ protected[this] val pred: A => Boolean
+ def foreach[U](f: A => U) {
+ var go = false
+ for (x <- self) {
+ if (!go && !pred(x)) go = true
+ if (go) f(x)
+ }
+ }
+ final override protected[this] def viewIdentifier = "D"
+ }
+
+}
+
+
diff --git a/src/library/scala/collection/IndexedSeqOptimized.scala b/src/library/scala/collection/IndexedSeqOptimized.scala
index ef86fad313..04d89299c8 100755
--- a/src/library/scala/collection/IndexedSeqOptimized.scala
+++ b/src/library/scala/collection/IndexedSeqOptimized.scala
@@ -73,7 +73,7 @@ trait IndexedSeqOptimized[+A, +Repr] extends IndexedSeqLike[A, Repr] { self =>
if (length > 0) foldr(0, length - 1, this(length - 1), op) else super.reduceRight(op)
override /*IterableLike*/
- def zip[A1 >: A, B, That](that: Iterable[B])(implicit bf: CanBuildFrom[Repr, (A1, B), That]): That = that match {
+ def zip[A1 >: A, B, That](that: GenIterable[B])(implicit bf: CanBuildFrom[Repr, (A1, B), That]): That = that match {
case that: IndexedSeq[_] =>
val b = bf(repr)
var i = 0
@@ -154,7 +154,7 @@ trait IndexedSeqOptimized[+A, +Repr] extends IndexedSeqLike[A, Repr] { self =>
def span(p: A => Boolean): (Repr, Repr) = splitAt(prefixLength(p))
override /*IterableLike*/
- def sameElements[B >: A](that: Iterable[B]): Boolean = that match {
+ def sameElements[B >: A](that: GenIterable[B]): Boolean = that match {
case that: IndexedSeq[_] =>
val len = length
len == that.length && {
@@ -231,7 +231,7 @@ trait IndexedSeqOptimized[+A, +Repr] extends IndexedSeqLike[A, Repr] { self =>
}
override /*SeqLike*/
- def startsWith[B](that: Seq[B], offset: Int): Boolean = that match {
+ def startsWith[B](that: GenSeq[B], offset: Int): Boolean = that match {
case that: IndexedSeq[_] =>
var i = offset
var j = 0
@@ -256,7 +256,7 @@ trait IndexedSeqOptimized[+A, +Repr] extends IndexedSeqLike[A, Repr] { self =>
}
override /*SeqLike*/
- def endsWith[B](that: Seq[B]): Boolean = that match {
+ def endsWith[B](that: GenSeq[B]): Boolean = that match {
case that: IndexedSeq[_] =>
var i = length - 1
var j = that.length - 1
diff --git a/src/library/scala/collection/Iterable.scala b/src/library/scala/collection/Iterable.scala
index 0549ab3be8..64c3185c0b 100644
--- a/src/library/scala/collection/Iterable.scala
+++ b/src/library/scala/collection/Iterable.scala
@@ -18,16 +18,19 @@ import mutable.Builder
* $iterableInfo
*/
trait Iterable[+A] extends Traversable[A]
+ with GenIterable[A]
with GenericTraversableTemplate[A, Iterable]
with IterableLike[A, Iterable[A]] {
- override def companion: GenericCompanion[Iterable] = Iterable
+ override def companion: GenericCompanion[Iterable] = Iterable
+
+ override def seq = this
/* The following methods are inherited from trait IterableLike
*
override def iterator: Iterator[A]
override def takeRight(n: Int): Iterable[A]
override def dropRight(n: Int): Iterable[A]
- override def sameElements[B >: A](that: Iterable[B]): Boolean
+ override def sameElements[B >: A](that: GenIterable[B]): Boolean
override def view
override def view(from: Int, until: Int)
*/
diff --git a/src/library/scala/collection/IterableLike.scala b/src/library/scala/collection/IterableLike.scala
index 062b3c207f..4d10f31920 100644
--- a/src/library/scala/collection/IterableLike.scala
+++ b/src/library/scala/collection/IterableLike.scala
@@ -55,7 +55,7 @@ import annotation.unchecked.uncheckedVariance
* result class `That` from the current representation type `Repr`
* and the new element type `(A1, B)`.
*/
-trait IterableLike[+A, +Repr] extends Equals with TraversableLike[A, Repr] {
+trait IterableLike[+A, +Repr] extends Equals with TraversableLike[A, Repr] with GenIterableLike[A, Repr] {
self =>
override protected[this] def thisCollection: Iterable[A] = this.asInstanceOf[Iterable[A]]
@@ -218,7 +218,7 @@ self =>
* corresponding elements of this $coll and `that`. The length
* of the returned collection is the minimum of the lengths of this $coll and `that`.
*/
- def zip[A1 >: A, B, That](that: Iterable[B])(implicit bf: CanBuildFrom[Repr, (A1, B), That]): That = {
+ def zip[A1 >: A, B, That](that: GenIterable[B])(implicit bf: CanBuildFrom[Repr, (A1, B), That]): That = {
val b = bf(repr)
val these = this.iterator
val those = that.iterator
@@ -255,7 +255,7 @@ self =>
* If this $coll is shorter than `that`, `thisElem` values are used to pad the result.
* If `that` is shorter than this $coll, `thatElem` values are used to pad the result.
*/
- def zipAll[B, A1 >: A, That](that: Iterable[B], thisElem: A1, thatElem: B)(implicit bf: CanBuildFrom[Repr, (A1, B), That]): That = {
+ def zipAll[B, A1 >: A, That](that: GenIterable[B], thisElem: A1, thatElem: B)(implicit bf: CanBuildFrom[Repr, (A1, B), That]): That = {
val b = bf(repr)
val these = this.iterator
val those = that.iterator
@@ -317,7 +317,7 @@ self =>
* @param that the collection to compare with.
* @return `true`, if both collections contain the same elements in the same order, `false` otherwise.
*/
- def sameElements[B >: A](that: Iterable[B]): Boolean = {
+ def sameElements[B >: A](that: GenIterable[B]): Boolean = {
val these = this.iterator
val those = that.iterator
while (these.hasNext && those.hasNext)
diff --git a/src/library/scala/collection/IterableProxyLike.scala b/src/library/scala/collection/IterableProxyLike.scala
index 6bc0f155a8..0fe501ac6b 100644
--- a/src/library/scala/collection/IterableProxyLike.scala
+++ b/src/library/scala/collection/IterableProxyLike.scala
@@ -31,10 +31,10 @@ trait IterableProxyLike[+A, +Repr <: IterableLike[A, Repr] with Iterable[A]]
override def sliding[B >: A](size: Int, step: Int): Iterator[Repr] = self.sliding(size, step)
override def takeRight(n: Int): Repr = self.takeRight(n)
override def dropRight(n: Int): Repr = self.dropRight(n)
- override def zip[A1 >: A, B, That](that: Iterable[B])(implicit bf: CanBuildFrom[Repr, (A1, B), That]): That = self.zip[A1, B, That](that)(bf)
- override def zipAll[B, A1 >: A, That](that: Iterable[B], thisElem: A1, thatElem: B)(implicit bf: CanBuildFrom[Repr, (A1, B), That]): That = self.zipAll(that, thisElem, thatElem)(bf)
+ override def zip[A1 >: A, B, That](that: GenIterable[B])(implicit bf: CanBuildFrom[Repr, (A1, B), That]): That = self.zip[A1, B, That](that)(bf)
+ override def zipAll[B, A1 >: A, That](that: GenIterable[B], thisElem: A1, thatElem: B)(implicit bf: CanBuildFrom[Repr, (A1, B), That]): That = self.zipAll(that, thisElem, thatElem)(bf)
override def zipWithIndex[A1 >: A, That](implicit bf: CanBuildFrom[Repr, (A1, Int), That]): That = self.zipWithIndex(bf)
- override def sameElements[B >: A](that: Iterable[B]): Boolean = self.sameElements(that)
+ override def sameElements[B >: A](that: GenIterable[B]): Boolean = self.sameElements(that)
override def view = self.view
override def view(from: Int, until: Int) = self.view(from, until)
}
diff --git a/src/library/scala/collection/IterableView.scala b/src/library/scala/collection/IterableView.scala
index 0cde62fb99..9ff6762ae8 100644
--- a/src/library/scala/collection/IterableView.scala
+++ b/src/library/scala/collection/IterableView.scala
@@ -16,7 +16,7 @@ import TraversableView.NoBuilder
/** A base trait for non-strict views of `Iterable`s.
* $iterableViewInfo
*/
-trait IterableView[+A, +Coll] extends IterableViewLike[A, Coll, IterableView[A, Coll]]
+trait IterableView[+A, +Coll] extends IterableViewLike[A, Coll, IterableView[A, Coll]] with GenIterableView[A, Coll]
/** An object containing the necessary implicit definitions to make
* `IterableView`s work. Its definitions are generally not accessed directly by clients.
diff --git a/src/library/scala/collection/IterableViewLike.scala b/src/library/scala/collection/IterableViewLike.scala
index 6acef1889f..e0e1329844 100644
--- a/src/library/scala/collection/IterableViewLike.scala
+++ b/src/library/scala/collection/IterableViewLike.scala
@@ -32,96 +32,68 @@ trait IterableViewLike[+A,
with IterableLike[A, This]
with TraversableView[A, Coll]
with TraversableViewLike[A, Coll, This]
+ with GenIterableViewLike[A, Coll, This]
{ self =>
- trait Transformed[+B] extends IterableView[B, Coll] with super.Transformed[B] {
+ trait Transformed[+B] extends IterableView[B, Coll] with super[TraversableViewLike].Transformed[B] with super[GenIterableViewLike].Transformed[B] {
def iterator: Iterator[B]
override def foreach[U](f: B => U): Unit = iterator foreach f
override def toString = viewToString
}
- trait EmptyView extends Transformed[Nothing] with super.EmptyView {
- final def iterator: Iterator[Nothing] = Iterator.empty
- }
+ trait EmptyView extends Transformed[Nothing] with super[TraversableViewLike].EmptyView with super[GenIterableViewLike].EmptyView
- trait Forced[B] extends super.Forced[B] with Transformed[B] {
- def iterator = forced.iterator
- }
+ trait Forced[B] extends super[TraversableViewLike].Forced[B] with super[GenIterableViewLike].Forced[B] with Transformed[B]
- trait Sliced extends super.Sliced with Transformed[A] {
- def iterator: Iterator[A] = self.iterator.slice(from, until)
- }
+ trait Sliced extends super[TraversableViewLike].Sliced with super[GenIterableViewLike].Sliced with Transformed[A]
- trait Mapped[B] extends super.Mapped[B] with Transformed[B] {
- def iterator = self.iterator map mapping
- }
+ trait Mapped[B] extends super[TraversableViewLike].Mapped[B] with super[GenIterableViewLike].Mapped[B] with Transformed[B]
- trait FlatMapped[B] extends super.FlatMapped[B] with Transformed[B] {
- def iterator: Iterator[B] = self.iterator flatMap mapping
- }
+ trait FlatMapped[B] extends super[TraversableViewLike].FlatMapped[B] with super[GenIterableViewLike].FlatMapped[B] with Transformed[B]
- trait Appended[B >: A] extends super.Appended[B] with Transformed[B] {
- def iterator = self.iterator ++ rest
- }
+ trait Appended[B >: A] extends super[TraversableViewLike].Appended[B] with super[GenIterableViewLike].Appended[B] with Transformed[B]
- trait Filtered extends super.Filtered with Transformed[A] {
- def iterator = self.iterator filter pred
- }
+ trait Filtered extends super[TraversableViewLike].Filtered with super[GenIterableViewLike].Filtered with Transformed[A]
- trait TakenWhile extends super.TakenWhile with Transformed[A] {
- def iterator = self.iterator takeWhile pred
- }
+ trait TakenWhile extends super[TraversableViewLike].TakenWhile with super[GenIterableViewLike].TakenWhile with Transformed[A]
- trait DroppedWhile extends super.DroppedWhile with Transformed[A] {
- def iterator = self.iterator dropWhile pred
- }
+ trait DroppedWhile extends super[TraversableViewLike].DroppedWhile with super[GenIterableViewLike].DroppedWhile with Transformed[A]
- trait Zipped[B] extends Transformed[(A, B)] {
- protected[this] val other: Iterable[B]
- def iterator: Iterator[(A, B)] = self.iterator zip other.iterator
- final override protected[this] def viewIdentifier = "Z"
- }
-
- trait ZippedAll[A1 >: A, B] extends Transformed[(A1, B)] {
- protected[this] val other: Iterable[B]
- protected[this] val thisElem: A1
- protected[this] val thatElem: B
- final override protected[this] def viewIdentifier = "Z"
- def iterator: Iterator[(A1, B)] =
- self.iterator.zipAll(other.iterator, thisElem, thatElem)
- }
+ trait Zipped[B] extends Transformed[(A, B)] with super[GenIterableViewLike].Zipped[B]
- override def zip[A1 >: A, B, That](that: Iterable[B])(implicit bf: CanBuildFrom[This, (A1, B), That]): That = {
- newZipped(that).asInstanceOf[That]
-// was: val b = bf(repr)
-// if (b.isInstanceOf[NoBuilder[_]]) newZipped(that).asInstanceOf[That]
-// else super.zip[A1, B, That](that)(bf)
- }
-
- override def zipWithIndex[A1 >: A, That](implicit bf: CanBuildFrom[This, (A1, Int), That]): That =
- zip[A1, Int, That](Stream from 0)(bf)
-
- override def zipAll[B, A1 >: A, That](that: Iterable[B], thisElem: A1, thatElem: B)(implicit bf: CanBuildFrom[This, (A1, B), That]): That =
- newZippedAll(that, thisElem, thatElem).asInstanceOf[That]
+ trait ZippedAll[A1 >: A, B] extends Transformed[(A1, B)] with super[GenIterableViewLike].ZippedAll[A1, B]
/** Boilerplate method, to override in each subclass
* This method could be eliminated if Scala had virtual classes
*/
- protected def newZipped[B](that: Iterable[B]): Transformed[(A, B)] = new { val other = that } with Zipped[B]
- protected def newZippedAll[A1 >: A, B](that: Iterable[B], _thisElem: A1, _thatElem: B): Transformed[(A1, B)] = new {
- val other: Iterable[B] = that
+ protected def newZipped[B](that: GenIterable[B]): Transformed[(A, B)] = new { val other = that } with Zipped[B]
+ protected def newZippedAll[A1 >: A, B](that: GenIterable[B], _thisElem: A1, _thatElem: B): Transformed[(A1, B)] = new {
+ val other: GenIterable[B] = that
val thisElem = _thisElem
val thatElem = _thatElem
} with ZippedAll[A1, B]
- protected override def newForced[B](xs: => Seq[B]): Transformed[B] = new { val forced = xs } with Forced[B]
- protected override def newAppended[B >: A](that: Traversable[B]): Transformed[B] = new { val rest = that } with Appended[B]
+ protected override def newForced[B](xs: => GenSeq[B]): Transformed[B] = new { val forced = xs } with Forced[B]
+ protected override def newAppended[B >: A](that: GenTraversable[B]): Transformed[B] = new { val rest = that } with Appended[B]
protected override def newMapped[B](f: A => B): Transformed[B] = new { val mapping = f } with Mapped[B]
- protected override def newFlatMapped[B](f: A => TraversableOnce[B]): Transformed[B] = new { val mapping = f } with FlatMapped[B]
+ protected override def newFlatMapped[B](f: A => GenTraversableOnce[B]): Transformed[B] = new { val mapping = f } with FlatMapped[B]
protected override def newFiltered(p: A => Boolean): Transformed[A] = new { val pred = p } with Filtered
protected override def newSliced(_endpoints: SliceInterval): Transformed[A] = new { val endpoints = _endpoints } with Sliced
protected override def newDroppedWhile(p: A => Boolean): Transformed[A] = new { val pred = p } with DroppedWhile
protected override def newTakenWhile(p: A => Boolean): Transformed[A] = new { val pred = p } with TakenWhile
+ override def zip[A1 >: A, B, That](that: GenIterable[B])(implicit bf: CanBuildFrom[This, (A1, B), That]): That = {
+ newZipped(that).asInstanceOf[That]
+// was: val b = bf(repr)
+// if (b.isInstanceOf[NoBuilder[_]]) newZipped(that).asInstanceOf[That]
+// else super.zip[A1, B, That](that)(bf)
+ }
+
+ override def zipWithIndex[A1 >: A, That](implicit bf: CanBuildFrom[This, (A1, Int), That]): That =
+ zip[A1, Int, That](Stream from 0)(bf)
+
+ override def zipAll[B, A1 >: A, That](that: GenIterable[B], thisElem: A1, thatElem: B)(implicit bf: CanBuildFrom[This, (A1, B), That]): That =
+ newZippedAll(that, thisElem, thatElem).asInstanceOf[That]
+
override def grouped(size: Int): Iterator[This] =
self.iterator grouped size map (x => newForced(x).asInstanceOf[This])
diff --git a/src/library/scala/collection/Iterator.scala b/src/library/scala/collection/Iterator.scala
index 4e349cb423..5054ff01fc 100644
--- a/src/library/scala/collection/Iterator.scala
+++ b/src/library/scala/collection/Iterator.scala
@@ -340,7 +340,7 @@ trait Iterator[+A] extends TraversableOnce[A] {
* iterator followed by the values produced by iterator `that`.
* @usecase def ++(that: => Iterator[A]): Iterator[A]
*/
- def ++[B >: A](that: => TraversableOnce[B]): Iterator[B] = new Iterator[B] {
+ def ++[B >: A](that: => GenTraversableOnce[B]): Iterator[B] = new Iterator[B] {
// optimize a little bit to prevent n log n behavior.
private var cur : Iterator[B] = self
// since that is by-name, make sure it's only referenced once -
@@ -364,7 +364,7 @@ trait Iterator[+A] extends TraversableOnce[A] {
* @return the iterator resulting from applying the given iterator-valued function
* `f` to each value produced by this iterator and concatenating the results.
*/
- def flatMap[B](f: A => TraversableOnce[B]): Iterator[B] = new Iterator[B] {
+ def flatMap[B](f: A => GenTraversableOnce[B]): Iterator[B] = new Iterator[B] {
private var cur: Iterator[B] = empty
def hasNext: Boolean =
cur.hasNext || self.hasNext && { cur = f(self.next).toIterator; hasNext }
diff --git a/src/library/scala/collection/LinearSeqOptimized.scala b/src/library/scala/collection/LinearSeqOptimized.scala
index a1becf8d68..7c5cba0665 100755
--- a/src/library/scala/collection/LinearSeqOptimized.scala
+++ b/src/library/scala/collection/LinearSeqOptimized.scala
@@ -232,7 +232,7 @@ trait LinearSeqOptimized[+A, +Repr <: LinearSeqOptimized[A, Repr]] extends Linea
}
override /*IterableLike*/
- def sameElements[B >: A](that: Iterable[B]): Boolean = that match {
+ def sameElements[B >: A](that: GenIterable[B]): Boolean = that match {
case that1: LinearSeq[_] =>
var these = this
var those = that1
diff --git a/src/library/scala/collection/Map.scala b/src/library/scala/collection/Map.scala
index 7dcaafe45d..71b2e7fb1e 100644
--- a/src/library/scala/collection/Map.scala
+++ b/src/library/scala/collection/Map.scala
@@ -24,9 +24,9 @@ import generic._
* @tparam A the type of the keys in this map.
* @tparam B the type of the values associated with keys.
*
- * @since 1
+ * @since 1.0
*/
-trait Map[A, +B] extends Iterable[(A, B)] with MapLike[A, B, Map[A, B]] {
+trait Map[A, +B] extends Iterable[(A, B)] with GenMap[A, B] with MapLike[A, B, Map[A, B]] {
def empty: Map[A, B] = Map.empty
override def seq: Map[A, B] = this
diff --git a/src/library/scala/collection/MapLike.scala b/src/library/scala/collection/MapLike.scala
index fd58540086..6f48f7e761 100644
--- a/src/library/scala/collection/MapLike.scala
+++ b/src/library/scala/collection/MapLike.scala
@@ -57,8 +57,10 @@ import parallel.ParMap
trait MapLike[A, +B, +This <: MapLike[A, B, This] with Map[A, B]]
extends PartialFunction[A, B]
with IterableLike[(A, B), This]
+ with GenMapLike[A, B, This]
with Subtractable[A, This]
- with Parallelizable[(A, B), ParMap[A, B]] {
+ with Parallelizable[(A, B), ParMap[A, B]]
+{
self =>
/** The empty map of the same type as this map
@@ -284,8 +286,8 @@ self =>
* @return a new map with the given bindings added to this map
* @usecase def ++ (xs: Traversable[(A, B)]): Map[A, B]
*/
- def ++[B1 >: B](xs: TraversableOnce[(A, B1)]): Map[A, B1] =
- ((repr: Map[A, B1]) /: xs) (_ + _)
+ def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]): Map[A, B1] =
+ ((repr: Map[A, B1]) /: xs.seq) (_ + _)
/** Returns a new map with all key/value pairs for which the predicate
* `p` returns `true`.
@@ -338,36 +340,4 @@ self =>
override /*PartialFunction*/
def toString = super[IterableLike].toString
- // This hash code must be symmetric in the contents but ought not
- // collide trivially.
- override def hashCode() = util.MurmurHash.symmetricHash(this,Map.hashSeed)
-
- /** Compares two maps structurally; i.e. checks if all mappings
- * contained in this map are also contained in the other map,
- * and vice versa.
- *
- * @param that the other map
- * @return `true` if both maps contain exactly the
- * same mappings, `false` otherwise.
- */
- override def equals(that: Any): Boolean = that match {
- case that: Map[b, _] =>
- (this eq that) ||
- (that canEqual this) &&
- (this.size == that.size) && {
- try {
- this forall {
- case (k, v) => that.get(k.asInstanceOf[b]) match {
- case Some(`v`) =>
- true
- case _ => false
- }
- }
- } catch {
- case ex: ClassCastException =>
- println("class cast "); false
- }}
- case _ =>
- false
- }
}
diff --git a/src/library/scala/collection/MapProxyLike.scala b/src/library/scala/collection/MapProxyLike.scala
index 7511897f36..7352c01ee1 100644
--- a/src/library/scala/collection/MapProxyLike.scala
+++ b/src/library/scala/collection/MapProxyLike.scala
@@ -42,7 +42,7 @@ trait MapProxyLike[A, +B, +This <: MapLike[A, B, This] with Map[A, B]]
override def mapValues[C](f: B => C) = self.mapValues(f)
override def updated [B1 >: B](key: A, value: B1): Map[A, B1] = self.updated(key, value)
override def + [B1 >: B] (kv1: (A, B1), kv2: (A, B1), kvs: (A, B1) *): Map[A, B1] = self.+(kv1, kv2, kvs: _*)
- override def ++[B1 >: B](xs: TraversableOnce[(A, B1)]): Map[A, B1] = self.++(xs)
+ override def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]): Map[A, B1] = self.++(xs)
override def filterNot(p: ((A, B)) => Boolean) = self filterNot p
override def addString(b: StringBuilder, start: String, sep: String, end: String): StringBuilder =
diff --git a/src/library/scala/collection/Parallel.scala b/src/library/scala/collection/Parallel.scala
index 037abde2b6..930c3394dd 100644
--- a/src/library/scala/collection/Parallel.scala
+++ b/src/library/scala/collection/Parallel.scala
@@ -8,9 +8,9 @@
package scala.collection
-/** A marker trait for objects with parallelised operations.
+/** A marker trait for collections which have their operations parallelised.
*
* @since 2.9
- * @author prokopec
+ * @author Aleksandar Prokopec
*/
trait Parallel
diff --git a/src/library/scala/collection/Parallelizable.scala b/src/library/scala/collection/Parallelizable.scala
index 10ab248626..59b37aff96 100644
--- a/src/library/scala/collection/Parallelizable.scala
+++ b/src/library/scala/collection/Parallelizable.scala
@@ -18,7 +18,8 @@ import parallel.Combiner
* @tparam ParRepr the actual type of the collection, which has to be parallel
*/
trait Parallelizable[+A, +ParRepr <: Parallel] {
- self: TraversableOnce[A] =>
+
+ def seq: TraversableOnce[A]
/** Returns a parallel implementation of this collection.
*
@@ -37,7 +38,7 @@ trait Parallelizable[+A, +ParRepr <: Parallel] {
*/
def par: ParRepr = {
val cb = parCombiner
- for (x <- this) cb += x
+ for (x <- seq) cb += x
cb.result
}
@@ -48,3 +49,4 @@ trait Parallelizable[+A, +ParRepr <: Parallel] {
*/
protected[this] def parCombiner: Combiner[A, ParRepr]
}
+
diff --git a/src/library/scala/collection/Seq.scala b/src/library/scala/collection/Seq.scala
index bbf392ab32..df92685134 100644
--- a/src/library/scala/collection/Seq.scala
+++ b/src/library/scala/collection/Seq.scala
@@ -18,6 +18,7 @@ import mutable.Builder
*/
trait Seq[+A] extends PartialFunction[Int, A]
with Iterable[A]
+ with GenSeq[A]
with GenericTraversableTemplate[A, Seq]
with SeqLike[A, Seq[A]] {
override def companion: GenericCompanion[Seq] = Seq
diff --git a/src/library/scala/collection/SeqLike.scala b/src/library/scala/collection/SeqLike.scala
index fe42f25dea..5cfcd85b14 100644
--- a/src/library/scala/collection/SeqLike.scala
+++ b/src/library/scala/collection/SeqLike.scala
@@ -65,7 +65,7 @@ import parallel.ParSeq
*
* Note: will not terminate for infinite-sized collections.
*/
-trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] with Parallelizable[A, ParSeq[A]] { self =>
+trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] with GenSeqLike[A, Repr] with Parallelizable[A, ParSeq[A]] { self =>
override protected[this] def thisCollection: Seq[A] = this.asInstanceOf[Seq[A]]
override protected[this] def toCollection(repr: Repr): Seq[A] = repr.asInstanceOf[Seq[A]]
@@ -467,7 +467,7 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] with Parallelizable[A, Pa
* @return `true` if the sequence `that` is contained in this $coll at index `offset`,
* otherwise `false`.
*/
- def startsWith[B](that: Seq[B], offset: Int): Boolean = {
+ def startsWith[B](that: GenSeq[B], offset: Int): Boolean = {
val i = this.iterator drop offset
val j = that.iterator
while (j.hasNext && i.hasNext)
@@ -482,14 +482,14 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] with Parallelizable[A, Pa
* @param that the sequence to test
* @return `true` if this collection has `that` as a prefix, `false` otherwise.
*/
- def startsWith[B](that: Seq[B]): Boolean = startsWith(that, 0)
+ def startsWith[B](that: GenSeq[B]): Boolean = startsWith(that, 0)
/** Tests whether this $coll ends with the given sequence.
* $willNotTerminateInf
* @param that the sequence to test
* @return `true` if this $coll has `that` as a suffix, `false` otherwise.
*/
- def endsWith[B](that: Seq[B]): Boolean = {
+ def endsWith[B](that: GenSeq[B]): Boolean = {
val i = this.iterator.drop(length - that.length)
val j = that.iterator
while (i.hasNext && j.hasNext)
@@ -505,7 +505,7 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] with Parallelizable[A, Pa
* @return the first index such that the elements of this $coll starting at this index
* match the elements of sequence `that`, or `-1` of no such subsequence exists.
*/
- def indexOfSlice[B >: A](that: Seq[B]): Int = indexOfSlice(that, 0)
+ def indexOfSlice[B >: A](that: GenSeq[B]): Int = indexOfSlice(that, 0)
/** Finds first index after or at a start index where this $coll contains a given sequence as a slice.
* $mayNotTerminateInf
@@ -514,9 +514,9 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] with Parallelizable[A, Pa
* @return the first index `>= from` such that the elements of this $coll starting at this index
* match the elements of sequence `that`, or `-1` of no such subsequence exists.
*/
- def indexOfSlice[B >: A](that: Seq[B], from: Int): Int =
+ def indexOfSlice[B >: A](that: GenSeq[B], from: Int): Int =
if (this.hasDefiniteSize && that.hasDefiniteSize)
- SeqLike.indexOf(thisCollection, 0, length, that, 0, that.length, from)
+ SeqLike.indexOf(thisCollection, 0, length, that.seq, 0, that.length, from)
else {
var i = from
var s: Seq[A] = thisCollection drop i
@@ -536,7 +536,7 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] with Parallelizable[A, Pa
* @return the last index such that the elements of this $coll starting a this index
* match the elements of sequence `that`, or `-1` of no such subsequence exists.
*/
- def lastIndexOfSlice[B >: A](that: Seq[B]): Int = lastIndexOfSlice(that, length)
+ def lastIndexOfSlice[B >: A](that: GenSeq[B]): Int = lastIndexOfSlice(that, length)
/** Finds last index before or at a given end index where this $coll contains a given sequence as a slice.
* @param that the sequence to test
@@ -544,8 +544,8 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] with Parallelizable[A, Pa
* @return the last index `<= end` such that the elements of this $coll starting at this index
* match the elements of sequence `that`, or `-1` of no such subsequence exists.
*/
- def lastIndexOfSlice[B >: A](that: Seq[B], end: Int): Int =
- SeqLike.lastIndexOf(thisCollection, 0, length, that, 0, that.length, end)
+ def lastIndexOfSlice[B >: A](that: GenSeq[B], end: Int): Int =
+ SeqLike.lastIndexOf(thisCollection, 0, length, that.seq, 0, that.length, end)
/** Tests whether this $coll contains a given sequence as a slice.
* $mayNotTerminateInf
@@ -553,7 +553,7 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] with Parallelizable[A, Pa
* @return `true` if this $coll contains a slice with the same elements
* as `that`, otherwise `false`.
*/
- def containsSlice[B](that: Seq[B]): Boolean = indexOfSlice(that) != -1
+ def containsSlice[B](that: GenSeq[B]): Boolean = indexOfSlice(that) != -1
/** Tests whether this $coll contains a given value as an element.
* $mayNotTerminateInf
@@ -584,7 +584,7 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] with Parallelizable[A, Pa
* @return a new $coll which contains all elements of this $coll
* followed by all elements of `that`.
*/
- def union[B >: A, That](that: Seq[B])(implicit bf: CanBuildFrom[Repr, B, That]): That =
+ override def union[B >: A, That](that: GenSeq[B])(implicit bf: CanBuildFrom[Repr, B, That]): That =
this ++ that
/** Computes the multiset difference between this $coll and another sequence.
@@ -606,10 +606,10 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] with Parallelizable[A, Pa
* ''n'' times in `that`, then the first ''n'' occurrences of `x` will not form
* part of the result, but any following occurrences will.
*/
- def diff[B >: A](that: Seq[B]): Repr = {
- val occ = occCounts(that)
+ def diff[B >: A](that: GenSeq[B]): Repr = {
+ val occ = occCounts(that.seq)
val b = newBuilder
- for (x <- this.seq)
+ for (x <- this)
if (occ(x) == 0) b += x
else occ(x) -= 1
b.result
@@ -634,10 +634,10 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] with Parallelizable[A, Pa
* ''n'' times in `that`, then the first ''n'' occurrences of `x` will be retained
* in the result, but any following occurrences will be omitted.
*/
- def intersect[B >: A](that: Seq[B]): Repr = {
- val occ = occCounts(that)
+ def intersect[B >: A](that: GenSeq[B]): Repr = {
+ val occ = occCounts(that.seq)
val b = newBuilder
- for (x <- this.seq)
+ for (x <- this)
if (occ(x) > 0) {
b += x
occ(x) -= 1
@@ -659,7 +659,7 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] with Parallelizable[A, Pa
def distinct: Repr = {
val b = newBuilder
val seen = mutable.HashSet[A]()
- for (x <- this.seq) {
+ for (x <- this) {
if (!seen(x)) {
b += x
seen += x
@@ -684,11 +684,11 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] with Parallelizable[A, Pa
* except that `replaced` elements starting from `from` are replaced
* by `patch`.
*/
- def patch[B >: A, That](from: Int, patch: Seq[B], replaced: Int)(implicit bf: CanBuildFrom[Repr, B, That]): That = {
+ def patch[B >: A, That](from: Int, patch: GenSeq[B], replaced: Int)(implicit bf: CanBuildFrom[Repr, B, That]): That = {
val b = bf(repr)
val (prefix, rest) = this.splitAt(from)
b ++= toCollection(prefix)
- b ++= patch
+ b ++= patch.seq
b ++= toCollection(rest).view drop replaced
b.result
}
@@ -785,7 +785,7 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] with Parallelizable[A, Pa
* `p(x, y)` is `true` for all corresponding elements `x` of this $coll
* and `y` of `that`, otherwise `false`.
*/
- def corresponds[B](that: Seq[B])(p: (A,B) => Boolean): Boolean = {
+ def corresponds[B](that: GenSeq[B])(p: (A,B) => Boolean): Boolean = {
val i = this.iterator
val j = that.iterator
while (i.hasNext && j.hasNext)
@@ -883,26 +883,6 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] with Parallelizable[A, Pa
override def view(from: Int, until: Int) = view.slice(from, until)
- /** Hashcodes for $Coll produce a value from the hashcodes of all the
- * elements of the $coll.
- */
- override def hashCode() = {
- val h = new util.MurmurHash[A](Seq.hashSeed)
- this.foreach(h)
- h.hash
- }
-
- /** The equals method for arbitrary sequences. Compares this sequence to
- * some other object.
- * @param that The object to compare the sequence to
- * @return `true` if `that` is a sequence that has the same elements as
- * this sequence in the same order, `false` otherwise
- */
- override def equals(that: Any): Boolean = that match {
- case that: Seq[_] => (that canEqual this) && (this sameElements that)
- case _ => false
- }
-
/* Need to override string, so that it's not the Function1's string that gets mixed in.
*/
override def toString = super[IterableLike].toString
diff --git a/src/library/scala/collection/SeqProxyLike.scala b/src/library/scala/collection/SeqProxyLike.scala
index e3c6579cdd..565bd9ff5e 100644
--- a/src/library/scala/collection/SeqProxyLike.scala
+++ b/src/library/scala/collection/SeqProxyLike.scala
@@ -43,25 +43,25 @@ trait SeqProxyLike[+A, +Repr <: SeqLike[A, Repr] with Seq[A]] extends SeqLike[A,
override def reverse: Repr = self.reverse
override def reverseMap[B, That](f: A => B)(implicit bf: CanBuildFrom[Repr, B, That]): That = self.reverseMap(f)(bf)
override def reverseIterator: Iterator[A] = self.reverseIterator
- override def startsWith[B](that: Seq[B], offset: Int): Boolean = self.startsWith(that, offset)
- override def startsWith[B](that: Seq[B]): Boolean = self.startsWith(that)
- override def endsWith[B](that: Seq[B]): Boolean = self.endsWith(that)
- override def indexOfSlice[B >: A](that: Seq[B]): Int = self.indexOfSlice(that)
- override def indexOfSlice[B >: A](that: Seq[B], from: Int): Int = self.indexOfSlice(that)
- override def lastIndexOfSlice[B >: A](that: Seq[B]): Int = self.lastIndexOfSlice(that)
- override def lastIndexOfSlice[B >: A](that: Seq[B], end: Int): Int = self.lastIndexOfSlice(that, end)
- override def containsSlice[B](that: Seq[B]): Boolean = self.indexOfSlice(that) != -1
+ override def startsWith[B](that: GenSeq[B], offset: Int): Boolean = self.startsWith(that, offset)
+ override def startsWith[B](that: GenSeq[B]): Boolean = self.startsWith(that)
+ override def endsWith[B](that: GenSeq[B]): Boolean = self.endsWith(that)
+ override def indexOfSlice[B >: A](that: GenSeq[B]): Int = self.indexOfSlice(that)
+ override def indexOfSlice[B >: A](that: GenSeq[B], from: Int): Int = self.indexOfSlice(that)
+ override def lastIndexOfSlice[B >: A](that: GenSeq[B]): Int = self.lastIndexOfSlice(that)
+ override def lastIndexOfSlice[B >: A](that: GenSeq[B], end: Int): Int = self.lastIndexOfSlice(that, end)
+ override def containsSlice[B](that: GenSeq[B]): Boolean = self.indexOfSlice(that) != -1
override def contains(elem: Any): Boolean = self.contains(elem)
- override def union[B >: A, That](that: Seq[B])(implicit bf: CanBuildFrom[Repr, B, That]): That = self.union(that)(bf)
- override def diff[B >: A](that: Seq[B]): Repr = self.diff(that)
- override def intersect[B >: A](that: Seq[B]): Repr = self.intersect(that)
+ override def union[B >: A, That](that: GenSeq[B])(implicit bf: CanBuildFrom[Repr, B, That]): That = self.union(that)(bf)
+ override def diff[B >: A](that: GenSeq[B]): Repr = self.diff(that)
+ override def intersect[B >: A](that: GenSeq[B]): Repr = self.intersect(that)
override def distinct: Repr = self.distinct
- override def patch[B >: A, That](from: Int, patch: Seq[B], replaced: Int)(implicit bf: CanBuildFrom[Repr, B, That]): That = self.patch(from, patch, replaced)(bf)
+ override def patch[B >: A, That](from: Int, patch: GenSeq[B], replaced: Int)(implicit bf: CanBuildFrom[Repr, B, That]): That = self.patch(from, patch, replaced)(bf)
override def updated[B >: A, That](index: Int, elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That = self.updated(index, elem)(bf)
override def +:[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That = self.+:(elem)(bf)
override def :+[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That = self.:+(elem)(bf)
override def padTo[B >: A, That](len: Int, elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That = self.padTo(len, elem)(bf)
- override def corresponds[B](that: Seq[B])(p: (A,B) => Boolean): Boolean = self.corresponds(that)(p)
+ override def corresponds[B](that: GenSeq[B])(p: (A,B) => Boolean): Boolean = self.corresponds(that)(p)
override def sortWith(lt: (A, A) => Boolean): Repr = self.sortWith(lt)
override def sortBy[B](f: A => B)(implicit ord: Ordering[B]): Repr = self.sortBy(f)(ord)
override def sorted[B >: A](implicit ord: Ordering[B]): Repr = self.sorted(ord)
diff --git a/src/library/scala/collection/SeqView.scala b/src/library/scala/collection/SeqView.scala
index d3a1cb040d..9f936e58aa 100644
--- a/src/library/scala/collection/SeqView.scala
+++ b/src/library/scala/collection/SeqView.scala
@@ -16,7 +16,7 @@ import TraversableView.NoBuilder
/** A base trait for non-strict views of sequences.
* $seqViewInfo
*/
-trait SeqView[+A, +Coll] extends SeqViewLike[A, Coll, SeqView[A, Coll]]
+trait SeqView[+A, +Coll] extends SeqViewLike[A, Coll, SeqView[A, Coll]] with GenSeqView[A, Coll]
/** An object containing the necessary implicit definitions to make
* `SeqView`s work. Its definitions are generally not accessed directly by clients.
diff --git a/src/library/scala/collection/SeqViewLike.scala b/src/library/scala/collection/SeqViewLike.scala
index 2bd8e29b65..f79baa7c58 100644
--- a/src/library/scala/collection/SeqViewLike.scala
+++ b/src/library/scala/collection/SeqViewLike.scala
@@ -31,170 +31,62 @@ import TraversableView.NoBuilder
trait SeqViewLike[+A,
+Coll,
+This <: SeqView[A, Coll] with SeqViewLike[A, Coll, This]]
- extends Seq[A] with SeqLike[A, This] with IterableView[A, Coll] with IterableViewLike[A, Coll, This]
+ extends Seq[A] with SeqLike[A, This] with IterableView[A, Coll] with IterableViewLike[A, Coll, This] with GenSeqViewLike[A, Coll, This]
{ self =>
- trait Transformed[+B] extends SeqView[B, Coll] with super.Transformed[B] {
+ trait Transformed[+B] extends SeqView[B, Coll] with super[IterableViewLike].Transformed[B] with super[GenSeqViewLike].Transformed[B] {
def length: Int
def apply(idx: Int): B
override def toString = viewToString
}
- trait EmptyView extends Transformed[Nothing] with super.EmptyView {
- final override def length = 0
- final override def apply(n: Int) = Nil(n)
- }
+ trait EmptyView extends Transformed[Nothing] with super[IterableViewLike].EmptyView with super[GenSeqViewLike].EmptyView
- trait Forced[B] extends super.Forced[B] with Transformed[B] {
- def length = forced.length
- def apply(idx: Int) = forced.apply(idx)
- }
+ trait Forced[B] extends super[IterableViewLike].Forced[B] with super[GenSeqViewLike].Forced[B] with Transformed[B]
- trait Sliced extends super.Sliced with Transformed[A] {
- def length = iterator.size
- def apply(idx: Int): A =
- if (idx + from < until) self.apply(idx + from)
- else throw new IndexOutOfBoundsException(idx.toString)
+ trait Sliced extends super[IterableViewLike].Sliced with super[GenSeqViewLike].Sliced with Transformed[A]
- override def foreach[U](f: A => U) = iterator foreach f
- override def iterator: Iterator[A] = self.iterator drop from take endpoints.width
- }
+ trait Mapped[B] extends super[IterableViewLike].Mapped[B] with super[GenSeqViewLike].Mapped[B] with Transformed[B]
- trait Mapped[B] extends super.Mapped[B] with Transformed[B] {
- def length = self.length
- def apply(idx: Int): B = mapping(self(idx))
- }
+ trait FlatMapped[B] extends super[IterableViewLike].FlatMapped[B] with super[GenSeqViewLike].FlatMapped[B] with Transformed[B]
- trait FlatMapped[B] extends super.FlatMapped[B] with Transformed[B] {
- protected[this] lazy val index = {
- val index = new Array[Int](self.length + 1)
- index(0) = 0
- for (i <- 0 until self.length)
- index(i + 1) = index(i) + mapping(self(i)).size
- index
- }
- protected[this] def findRow(idx: Int, lo: Int, hi: Int): Int = {
- val mid = (lo + hi) / 2
- if (idx < index(mid)) findRow(idx, lo, mid - 1)
- else if (idx >= index(mid + 1)) findRow(idx, mid + 1, hi)
- else mid
- }
- def length = index(self.length)
- def apply(idx: Int) = {
- val row = findRow(idx, 0, self.length - 1)
- mapping(self(row)).toSeq(idx - index(row))
- }
- }
+ trait Appended[B >: A] extends super[IterableViewLike].Appended[B] with super[GenSeqViewLike].Appended[B] with Transformed[B]
- trait Appended[B >: A] extends super.Appended[B] with Transformed[B] {
- protected[this] lazy val restSeq = rest.toSeq
- def length = self.length + restSeq.length
- def apply(idx: Int) =
- if (idx < self.length) self(idx) else restSeq(idx - self.length)
- }
+ trait Filtered extends super[IterableViewLike].Filtered with super[GenSeqViewLike].Filtered with Transformed[A]
- trait Filtered extends super.Filtered with Transformed[A] {
- protected[this] lazy val index = {
- var len = 0
- val arr = new Array[Int](self.length)
- for (i <- 0 until self.length)
- if (pred(self(i))) {
- arr(len) = i
- len += 1
- }
- arr take len
- }
- def length = index.length
- def apply(idx: Int) = self(index(idx))
- }
+ trait TakenWhile extends super[IterableViewLike].TakenWhile with super[GenSeqViewLike].TakenWhile with Transformed[A]
- trait TakenWhile extends super.TakenWhile with Transformed[A] {
- protected[this] lazy val len = self prefixLength pred
- def length = len
- def apply(idx: Int) =
- if (idx < len) self(idx)
- else throw new IndexOutOfBoundsException(idx.toString)
- }
+ trait DroppedWhile extends super[IterableViewLike].DroppedWhile with super[GenSeqViewLike].DroppedWhile with Transformed[A]
- trait DroppedWhile extends super.DroppedWhile with Transformed[A] {
- protected[this] lazy val start = self prefixLength pred
- def length = self.length - start
- def apply(idx: Int) =
- if (idx >= 0) self(idx + start)
- else throw new IndexOutOfBoundsException(idx.toString)
- }
+ trait Zipped[B] extends super[IterableViewLike].Zipped[B] with super[GenSeqViewLike].Zipped[B] with Transformed[(A, B)]
- trait Zipped[B] extends super.Zipped[B] with Transformed[(A, B)] {
- protected[this] lazy val thatSeq = other.toSeq
- /* Have to be careful here - other may be an infinite sequence. */
- def length = if ((thatSeq lengthCompare self.length) <= 0) thatSeq.length else self.length
- def apply(idx: Int) = (self.apply(idx), thatSeq.apply(idx))
- }
+ trait ZippedAll[A1 >: A, B] extends super[IterableViewLike].ZippedAll[A1, B] with super[GenSeqViewLike].ZippedAll[A1, B] with Transformed[(A1, B)]
- trait ZippedAll[A1 >: A, B] extends super.ZippedAll[A1, B] with Transformed[(A1, B)] {
- protected[this] lazy val thatSeq = other.toSeq
- def length: Int = self.length max thatSeq.length
- def apply(idx: Int) =
- (if (idx < self.length) self.apply(idx) else thisElem,
- if (idx < thatSeq.length) thatSeq.apply(idx) else thatElem)
- }
+ trait Reversed extends Transformed[A] with super[GenSeqViewLike].Reversed
- trait Reversed extends Transformed[A] {
- override def iterator: Iterator[A] = createReversedIterator
- def length: Int = self.length
- def apply(idx: Int): A = self.apply(length - 1 - idx)
- final override protected[this] def viewIdentifier = "R"
-
- private def createReversedIterator = {
- var lst = List[A]()
- for (elem <- self) lst ::= elem
- lst.iterator
- }
- }
+ trait Patched[B >: A] extends Transformed[B] with super[GenSeqViewLike].Patched[B]
- trait Patched[B >: A] extends Transformed[B] {
- protected[this] val from: Int
- protected[this] val patch: Seq[B]
- protected[this] val replaced: Int
- private lazy val plen = patch.length
- override def iterator: Iterator[B] = self.iterator patch (from, patch.iterator, replaced)
- def length: Int = self.length + plen - replaced
- def apply(idx: Int): B =
- if (idx < from) self.apply(idx)
- else if (idx < from + plen) patch.apply(idx - from)
- else self.apply(idx - plen + replaced)
- final override protected[this] def viewIdentifier = "P"
- }
-
- trait Prepended[B >: A] extends Transformed[B] {
- protected[this] val fst: B
- override def iterator: Iterator[B] = Iterator.single(fst) ++ self.iterator
- def length: Int = 1 + self.length
- def apply(idx: Int): B =
- if (idx == 0) fst
- else self.apply(idx - 1)
- final override protected[this] def viewIdentifier = "A"
- }
+ trait Prepended[B >: A] extends Transformed[B] with super[GenSeqViewLike].Prepended[B]
/** Boilerplate method, to override in each subclass
* This method could be eliminated if Scala had virtual classes
*/
- protected override def newForced[B](xs: => Seq[B]): Transformed[B] = new { val forced = xs } with Forced[B]
- protected override def newAppended[B >: A](that: Traversable[B]): Transformed[B] = new { val rest = that } with Appended[B]
+ protected override def newForced[B](xs: => GenSeq[B]): Transformed[B] = new { val forced = xs } with Forced[B]
+ protected override def newAppended[B >: A](that: GenTraversable[B]): Transformed[B] = new { val rest = that } with Appended[B]
protected override def newMapped[B](f: A => B): Transformed[B] = new { val mapping = f } with Mapped[B]
- protected override def newFlatMapped[B](f: A => TraversableOnce[B]): Transformed[B] = new { val mapping = f } with FlatMapped[B]
+ protected override def newFlatMapped[B](f: A => GenTraversableOnce[B]): Transformed[B] = new { val mapping = f } with FlatMapped[B]
protected override def newFiltered(p: A => Boolean): Transformed[A] = new { val pred = p } with Filtered
protected override def newSliced(_endpoints: SliceInterval): Transformed[A] = new { val endpoints = _endpoints } with Sliced
protected override def newDroppedWhile(p: A => Boolean): Transformed[A] = new { val pred = p } with DroppedWhile
protected override def newTakenWhile(p: A => Boolean): Transformed[A] = new { val pred = p } with TakenWhile
- protected override def newZipped[B](that: Iterable[B]): Transformed[(A, B)] = new { val other = that } with Zipped[B]
- protected override def newZippedAll[A1 >: A, B](that: Iterable[B], _thisElem: A1, _thatElem: B): Transformed[(A1, B)] = new {
+ protected override def newZipped[B](that: GenIterable[B]): Transformed[(A, B)] = new { val other = that } with Zipped[B]
+ protected override def newZippedAll[A1 >: A, B](that: GenIterable[B], _thisElem: A1, _thatElem: B): Transformed[(A1, B)] = new {
val other = that
val thisElem = _thisElem
val thatElem = _thatElem
} with ZippedAll[A1, B]
protected def newReversed: Transformed[A] = new Reversed { }
- protected def newPatched[B >: A](_from: Int, _patch: Seq[B], _replaced: Int): Transformed[B] = new {
+ protected def newPatched[B >: A](_from: Int, _patch: GenSeq[B], _replaced: Int): Transformed[B] = new {
val from = _from
val patch = _patch
val replaced = _replaced
@@ -203,7 +95,7 @@ trait SeqViewLike[+A,
override def reverse: This = newReversed.asInstanceOf[This]
- override def patch[B >: A, That](from: Int, patch: Seq[B], replaced: Int)(implicit bf: CanBuildFrom[This, B, That]): That = {
+ override def patch[B >: A, That](from: Int, patch: GenSeq[B], replaced: Int)(implicit bf: CanBuildFrom[This, B, That]): That = {
newPatched(from, patch, replaced).asInstanceOf[That]
// was: val b = bf(repr)
// if (b.isInstanceOf[NoBuilder[_]]) newPatched(from, patch, replaced).asInstanceOf[That]
@@ -227,13 +119,13 @@ trait SeqViewLike[+A,
override def :+[B >: A, That](elem: B)(implicit bf: CanBuildFrom[This, B, That]): That =
++(Iterator.single(elem))(bf)
- override def union[B >: A, That](that: Seq[B])(implicit bf: CanBuildFrom[This, B, That]): That =
+ override def union[B >: A, That](that: GenSeq[B])(implicit bf: CanBuildFrom[This, B, That]): That =
newForced(thisSeq union that).asInstanceOf[That]
- override def diff[B >: A](that: Seq[B]): This =
+ override def diff[B >: A](that: GenSeq[B]): This =
newForced(thisSeq diff that).asInstanceOf[This]
- override def intersect[B >: A](that: Seq[B]): This =
+ override def intersect[B >: A](that: GenSeq[B]): This =
newForced(thisSeq intersect that).asInstanceOf[This]
override def sorted[B >: A](implicit ord: Ordering[B]): This =
diff --git a/src/library/scala/collection/Set.scala b/src/library/scala/collection/Set.scala
index f89df50f26..c7d42a4632 100644
--- a/src/library/scala/collection/Set.scala
+++ b/src/library/scala/collection/Set.scala
@@ -24,6 +24,7 @@ import generic._
*/
trait Set[A] extends (A => Boolean)
with Iterable[A]
+ with GenSet[A]
with GenericSetTemplate[A, Set]
with SetLike[A, Set[A]] {
override def companion: GenericCompanion[Set] = Set
diff --git a/src/library/scala/collection/SetLike.scala b/src/library/scala/collection/SetLike.scala
index 7cbb8ee2ef..0a8cc13f23 100644
--- a/src/library/scala/collection/SetLike.scala
+++ b/src/library/scala/collection/SetLike.scala
@@ -58,8 +58,10 @@ import parallel.ParSet
*/
trait SetLike[A, +This <: SetLike[A, This] with Set[A]]
extends IterableLike[A, This]
+ with GenSetLike[A, This]
with Subtractable[A, This]
- with Parallelizable[A, ParSet[A]] {
+ with Parallelizable[A, ParSet[A]]
+{
self =>
/** The empty set of the same type as this set
@@ -123,7 +125,7 @@ self =>
* @param elems the collection containing the added elements.
* @return a new $coll with the given elements added.
*/
- def ++ (elems: TraversableOnce[A]): This = newBuilder ++= this ++= elems result
+ def ++ (elems: GenTraversableOnce[A]): This = newBuilder ++= seq ++= elems.seq result
/** Creates a new set with a given element removed from this set.
*
@@ -139,39 +141,14 @@ self =>
*/
override def isEmpty: Boolean = size == 0
- /** Tests if some element is contained in this set.
- *
- * This method is equivalent to `contains`. It allows sets to be interpreted as predicates.
- * @param elem the element to test for membership.
- * @return `true` if `elem` is contained in this set, `false` otherwise.
- */
- def apply(elem: A): Boolean = this contains elem
-
- /** Computes the intersection between this set and another set.
- *
- * @param that the set to intersect with.
- * @return a new set consisting of all elements that are both in this
- * set and in the given set `that`.
- */
- def intersect(that: Set[A]): This = this filter that
-
- /** Computes the intersection between this set and another set.
- *
- * '''Note:''' Same as `intersect`.
- * @param that the set to intersect with.
- * @return a new set consisting of all elements that are both in this
- * set and in the given set `that`.
- */
- def &(that: Set[A]): This = this intersect that
-
- /** This method is an alias for `intersect`.
+ /** This method is an alias for `intersect`.
* It computes an intersection with set `that`.
* It removes all the elements that are not present in `that`.
*
* @param that the set to intersect with
*/
@deprecated("use & instead")
- def ** (that: Set[A]): This = &(that)
+ def ** (that: GenSet[A]): This = &(that)
/** Computes the union between of set and another set.
*
@@ -179,16 +156,7 @@ self =>
* @return a new set consisting of all elements that are in this
* set or in the given set `that`.
*/
- def union(that: Set[A]): This = this ++ that
-
- /** Computes the union between this set and another set.
- *
- * '''Note:''' Same as `union`.
- * @param that the set to form the union with.
- * @return a new set consisting of all elements that are in this
- * set or in the given set `that`.
- */
- def | (that: Set[A]): This = this union that
+ def union(that: GenSet[A]): This = this ++ that
/** Computes the difference of this set and another set.
*
@@ -196,24 +164,7 @@ self =>
* @return a set containing those elements of this
* set that are not also contained in the given set `that`.
*/
- def diff(that: Set[A]): This = this -- that
-
- /** The difference of this set and another set.
- *
- * '''Note:''' Same as `diff`.
- * @param that the set of elements to exclude.
- * @return a set containing those elements of this
- * set that are not also contained in the given set `that`.
- */
- def &~(that: Set[A]): This = this diff that
-
- /** Tests whether this set is a subset of another set.
- *
- * @param that the set to test.
- * @return `true` if this set is a subset of `that`, i.e. if
- * every element of this set is also an element of `that`.
- */
- def subsetOf(that: Set[A]) = this forall that
+ def diff(that: GenSet[A]): This = this -- that
/** An iterator over all subsets of this set of the given size.
* If the requested size is impossible, an empty iterator is returned.
@@ -290,31 +241,4 @@ self =>
override def stringPrefix: String = "Set"
override def toString = super[IterableLike].toString
- // Careful! Don't write a Set's hashCode like:
- // override def hashCode() = this map (_.hashCode) sum
- // Calling map on a set drops duplicates: any hashcode collisions would
- // then be dropped before they can be added.
- // Hash should be symmetric in set entries, but without trivial collisions.
- override def hashCode() = util.MurmurHash.symmetricHash(this,Set.hashSeed)
-
- /** Compares this set with another object for equality.
- *
- * '''Note:''' This operation contains an unchecked cast: if `that`
- * is a set, it will assume with an unchecked cast
- * that it has the same element type as this set.
- * Any subsequent ClassCastException is treated as a `false` result.
- * @param that the other object
- * @return `true` if `that` is a set which contains the same elements
- * as this set.
- */
- override def equals(that: Any): Boolean = that match {
- case that: Set[_] =>
- (this eq that) ||
- (that canEqual this) &&
- (this.size == that.size) &&
- (try this subsetOf that.asInstanceOf[Set[A]]
- catch { case ex: ClassCastException => false })
- case _ =>
- false
- }
}
diff --git a/src/library/scala/collection/SetProxyLike.scala b/src/library/scala/collection/SetProxyLike.scala
index 9308de45b0..ab31bf32b9 100644
--- a/src/library/scala/collection/SetProxyLike.scala
+++ b/src/library/scala/collection/SetProxyLike.scala
@@ -26,11 +26,11 @@ trait SetProxyLike[A, +This <: SetLike[A, This] with Set[A]] extends SetLike[A,
override def - (elem: A) = self.-(elem)
override def isEmpty: Boolean = self.isEmpty
override def apply(elem: A): Boolean = self.apply(elem)
- override def intersect(that: Set[A]) = self.intersect(that)
- override def &(that: Set[A]): This = self.&(that)
- override def union(that: Set[A]): This = self.union(that)
- override def | (that: Set[A]): This = self.|(that)
- override def diff(that: Set[A]): This = self.diff(that)
- override def &~(that: Set[A]): This = self.&~(that)
- override def subsetOf(that: Set[A]): Boolean = self.subsetOf(that)
+ override def intersect(that: GenSet[A]) = self.intersect(that)
+ override def &(that: GenSet[A]): This = self.&(that)
+ override def union(that: GenSet[A]): This = self.union(that)
+ override def | (that: GenSet[A]): This = self.|(that)
+ override def diff(that: GenSet[A]): This = self.diff(that)
+ override def &~(that: GenSet[A]): This = self.&~(that)
+ override def subsetOf(that: GenSet[A]): Boolean = self.subsetOf(that)
}
diff --git a/src/library/scala/collection/SortedSetLike.scala b/src/library/scala/collection/SortedSetLike.scala
index 04e2a87c26..75541407dd 100644
--- a/src/library/scala/collection/SortedSetLike.scala
+++ b/src/library/scala/collection/SortedSetLike.scala
@@ -33,7 +33,7 @@ self =>
override def until(until: A): This = rangeImpl(None, Some(until))
override def range(from: A, until: A): This = rangeImpl(Some(from), Some(until))
- override def subsetOf(that: Set[A]): Boolean = that match {
+ override def subsetOf(that: GenSet[A]): Boolean = that match {
// TODO: It may actually be pretty rare that the guard here ever
// passes. Is this really worth keeping? If it is, we should add
// more sensible implementations of == to Ordering.
diff --git a/src/library/scala/collection/Traversable.scala b/src/library/scala/collection/Traversable.scala
index 25aba13b0f..500c340368 100644
--- a/src/library/scala/collection/Traversable.scala
+++ b/src/library/scala/collection/Traversable.scala
@@ -15,20 +15,26 @@ import mutable.{Builder, Buffer, ArrayBuffer, ListBuffer}
import scala.util.control.Breaks
/** A trait for traversable collections.
+ * All operations are guaranteed to be performed in a single-threaded manner.
+ *
* $traversableInfo
*/
trait Traversable[+A] extends TraversableLike[A, Traversable[A]]
+ with GenTraversable[A]
+ with TraversableOnce[A]
with GenericTraversableTemplate[A, Traversable] {
- def companion: GenericCompanion[Traversable] = Traversable
+ override def companion: GenericCompanion[Traversable] = Traversable
+
+ override def seq: Traversable[A] = this
/* The following methods are inherited from TraversableLike
*
override def isEmpty: Boolean
override def size: Int
override def hasDefiniteSize
- override def ++[B >: A, That](xs: TraversableOnce[B])(implicit bf: CanBuildFrom[Traversable[A], B, That]): That
+ override def ++[B >: A, That](xs: GenTraversableOnce[B])(implicit bf: CanBuildFrom[Traversable[A], B, That]): That
override def map[B, That](f: A => B)(implicit bf: CanBuildFrom[Traversable[A], B, That]): That
- override def flatMap[B, That](f: A => TraversableOnce[B])(implicit bf: CanBuildFrom[Traversable[A], B, That]): That
+ override def flatMap[B, That](f: A => GenTraversableOnce[B])(implicit bf: CanBuildFrom[Traversable[A], B, That]): That
override def filter(p: A => Boolean): Traversable[A]
override def remove(p: A => Boolean): Traversable[A]
override def partition(p: A => Boolean): (Traversable[A], Traversable[A])
diff --git a/src/library/scala/collection/TraversableLike.scala b/src/library/scala/collection/TraversableLike.scala
index 950f9e65a7..3f67788b59 100644
--- a/src/library/scala/collection/TraversableLike.scala
+++ b/src/library/scala/collection/TraversableLike.scala
@@ -19,6 +19,7 @@ import parallel.ParIterable
/** A template trait for traversable collections of type `Traversable[A]`.
+ *
* $traversableInfo
* @define mutability
* @define traversableInfo
@@ -91,8 +92,10 @@ import parallel.ParIterable
*/
trait TraversableLike[+A, +Repr] extends HasNewBuilder[A, Repr]
with FilterMonadic[A, Repr]
- with TraversableOnce[A]
- with Parallelizable[A, ParIterable[A]] {
+ with TraversableOnceLike[A]
+ with GenTraversableLike[A, Repr]
+ with Parallelizable[A, ParIterable[A]]
+{
self =>
import Traversable.breaks._
@@ -185,11 +188,11 @@ trait TraversableLike[+A, +Repr] extends HasNewBuilder[A, Repr]
* @return a new $coll which contains all elements of this $coll
* followed by all elements of `that`.
*/
- def ++[B >: A, That](that: TraversableOnce[B])(implicit bf: CanBuildFrom[Repr, B, That]): That = {
+ def ++[B >: A, That](that: GenTraversableOnce[B])(implicit bf: CanBuildFrom[Repr, B, That]): That = {
val b = bf(repr)
- if (that.isInstanceOf[IndexedSeqLike[_, _]]) b.sizeHint(this, that.size)
+ if (that.isInstanceOf[IndexedSeqLike[_, _]]) b.sizeHint(this, that.seq.size)
b ++= thisCollection
- b ++= that
+ b ++= that.seq
b.result
}
@@ -223,7 +226,7 @@ trait TraversableLike[+A, +Repr] extends HasNewBuilder[A, Repr]
* but Traversable and down can use the overload.
*/
def ++:[B >: A, That](that: Traversable[B])(implicit bf: CanBuildFrom[Repr, B, That]): That =
- (that ++ this)(breakOut)
+ (that ++ seq)(breakOut)
/** Builds a new collection by applying a function to all elements of this $coll.
*
@@ -261,9 +264,9 @@ trait TraversableLike[+A, +Repr] extends HasNewBuilder[A, Repr]
* @return a new $coll resulting from applying the given collection-valued function
* `f` to each element of this $coll and concatenating the results.
*/
- def flatMap[B, That](f: A => TraversableOnce[B])(implicit bf: CanBuildFrom[Repr, B, That]): That = {
+ def flatMap[B, That](f: A => GenTraversableOnce[B])(implicit bf: CanBuildFrom[Repr, B, That]): That = {
val b = bf(repr)
- for (x <- this) b ++= f(x)
+ for (x <- this) b ++= f(x).seq
b.result
}
@@ -435,6 +438,8 @@ trait TraversableLike[+A, +Repr] extends HasNewBuilder[A, Repr]
result
}
+ def scan[B >: A, That](z: B)(op: (B, B) => B)(implicit cbf: CanBuildFrom[Repr, B, That]): That = scanLeft(z)(op)
+
/** Produces a collection containing cummulative results of applying the
* operator going left to right.
*
@@ -518,7 +523,7 @@ trait TraversableLike[+A, +Repr] extends HasNewBuilder[A, Repr]
* except the first one.
* @throws `UnsupportedOperationException` if the $coll is empty.
*/
- def tail: Repr = {
+ override def tail: Repr = {
if (isEmpty) throw new UnsupportedOperationException("empty.tail")
drop(1)
}
@@ -860,10 +865,10 @@ trait TraversableLike[+A, +Repr] extends HasNewBuilder[A, Repr]
* @return a new $coll resulting from applying the given collection-valued function
* `f` to each element of the outer $coll that satisfies predicate `p` and concatenating the results.
*/
- def flatMap[B, That](f: A => TraversableOnce[B])(implicit bf: CanBuildFrom[Repr, B, That]): That = {
+ def flatMap[B, That](f: A => GenTraversableOnce[B])(implicit bf: CanBuildFrom[Repr, B, That]): That = {
val b = bf(repr)
for (x <- self)
- if (p(x)) b ++= f(x)
+ if (p(x)) b ++= f(x).seq
b.result
}
diff --git a/src/library/scala/collection/TraversableOnce.scala b/src/library/scala/collection/TraversableOnce.scala
index f660d31ab2..23e3f2a866 100644
--- a/src/library/scala/collection/TraversableOnce.scala
+++ b/src/library/scala/collection/TraversableOnce.scala
@@ -13,24 +13,14 @@ import annotation.unchecked.{ uncheckedVariance => uV }
/** A template trait for collections which can be traversed either once only
* or one or more times.
+ *
+ * All of the methods in this trait are guaranteed to be implemented
+ * in a single-threaded manner.
+ *
* $traversableonceinfo
*
* @tparam A the element type of the collection
*
- * @define traversableonceinfo
- * This trait exists primarily to eliminate code duplication between
- * `Iterator` and `Traversable`, and thus implements some of the common
- * methods that can be implemented solely in terms of foreach without
- * access to a `Builder`. It also includes a number of abstract methods
- * whose implementations are provided by `Iterator`, `Traversable`, etc.
- * It contains implementations common to `Iterators` and
- * `Traversables`, such as folds, conversions, and other operations which
- * traverse some or all of the elements and return a derived value.
- * Directly subclassing `TraversableOnce` is not recommended - instead,
- * consider declaring an `Iterator` with a `next` and `hasNext` method,
- * creating an `Iterator` with one of the methods on the `Iterator` object,
- * or declaring a subclass of `Traversable`.
- *
* @author Martin Odersky
* @author Paul Phillips
* @version 2.8
@@ -52,566 +42,14 @@ import annotation.unchecked.{ uncheckedVariance => uV }
*
* Note: will not terminate for infinite-sized collections.
*/
-trait TraversableOnce[+A] {
+trait TraversableOnce[+A] extends GenTraversableOnce[A] with TraversableOnceLike[A] {
self =>
- /** Self-documenting abstract methods. */
- def foreach[U](f: A => U): Unit
- def isEmpty: Boolean
- def hasDefiniteSize: Boolean
-
- /** Tests whether this $coll can be repeatedly traversed. Always
- * true for Traversables and false for Iterators unless overridden.
- *
- * @return `true` if it is repeatedly traversable, `false` otherwise.
- */
- def isTraversableAgain: Boolean
-
- /** Returns an Iterator over the elements in this $coll. Will return
- * the same Iterator if this instance is already an Iterator.
- * $willNotTerminateInf
- * @return an Iterator containing all elements of this $coll.
- */
- def toIterator: Iterator[A]
-
- /** Converts this $coll to an unspecified Traversable. Will return
- * the same collection if this instance is already Traversable.
- * $willNotTerminateInf
- * @return a Traversable containing all elements of this $coll.
- */
- def toTraversable: Traversable[A]
-
- /** Converts this $coll to a stream.
- * $willNotTerminateInf
- * @return a stream containing all elements of this $coll.
- */
- def toStream: Stream[A]
-
- // Note: We could redefine this in TraversableLike to always return `repr`
- // of type `Repr`, only if `Repr` had type bounds, which it doesn't, because
- // not all `Repr` are a subtype `TraversableOnce[A]`.
- // The alternative is redefining it for maps, sets and seqs. For concrete implementations
- // we don't have to do this anyway, since they are leaves in the inheritance hierarchy.
- /** A version of this collection with all
- * of the operations implemented sequentially (i.e. in a single-threaded manner).
- *
- * This method returns a reference to this collection. In parallel collections,
- * it is redefined to return a sequential implementation of this collection. In
- * both cases, it has O(1) complexity.
- *
- * @return a sequential view of the collection.
- */
- def seq: TraversableOnce[A] = this
-
- /** Presently these are abstract because the Traversable versions use
- * breakable/break, and I wasn't sure enough of how that's supposed to
- * function to consolidate them with the Iterator versions.
- */
- def forall(p: A => Boolean): Boolean
- def exists(p: A => Boolean): Boolean
- def find(p: A => Boolean): Option[A]
- def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Unit
-
- // for internal use
- protected[this] def reversed = {
- var elems: List[A] = Nil
- self.seq foreach (elems ::= _)
- elems
- }
-
- /** The size of this $coll.
- *
- * $willNotTerminateInf
- *
- * @return the number of elements in this $coll.
- */
- def size: Int = {
- var result = 0
- for (x <- self) result += 1
- result
- }
-
- /** Tests whether the $coll is not empty.
- *
- * @return `true` if the $coll contains at least one element, `false` otherwise.
- */
- def nonEmpty: Boolean = !isEmpty
-
- /** Counts the number of elements in the $coll which satisfy a predicate.
- *
- * @param p the predicate used to test elements.
- * @return the number of elements satisfying the predicate `p`.
- */
- def count(p: A => Boolean): Int = {
- var cnt = 0
- for (x <- this)
- if (p(x)) cnt += 1
-
- cnt
- }
-
- /** Finds the first element of the $coll for which the given partial
- * function is defined, and applies the partial function to it.
- *
- * $mayNotTerminateInf
- * $orderDependent
- *
- * @param pf the partial function
- * @return an option value containing pf applied to the first
- * value for which it is defined, or `None` if none exists.
- * @example `Seq("a", 1, 5L).collectFirst({ case x: Int => x*10 }) = Some(10)`
- */
- def collectFirst[B](pf: PartialFunction[A, B]): Option[B] = {
- for (x <- self.toIterator) { // make sure to use an iterator or `seq`
- if (pf isDefinedAt x)
- return Some(pf(x))
- }
- None
- }
-
- /** Applies a binary operator to a start value and all elements of this $coll,
- * going left to right.
- *
- * Note: `/:` is alternate syntax for `foldLeft`; `z /: xs` is the same as
- * `xs foldLeft z`.
- * $willNotTerminateInf
- * $orderDependentFold
- *
- * @param z the start value.
- * @param op the binary operator.
- * @tparam B the result type of the binary operator.
- * @return the result of inserting `op` between consecutive elements of this $coll,
- * going left to right with the start value `z` on the left:
- * {{{
- * op(...op(op(z, x,,1,,), x,,2,,), ..., x,,n,,)
- * }}}
- * where `x,,1,,, ..., x,,n,,` are the elements of this $coll.
- */
- def /:[B](z: B)(op: (B, A) => B): B = foldLeft(z)(op)
-
- /** Applies a binary operator to all elements of this $coll and a start value,
- * going right to left.
- *
- * Note: `:\` is alternate syntax for `foldRight`; `xs :\ z` is the same as
- * `xs foldRight z`.
- * $willNotTerminateInf
- * $orderDependentFold
- *
- * @param z the start value
- * @param op the binary operator
- * @tparam B the result type of the binary operator.
- * @return the result of inserting `op` between consecutive elements of this $coll,
- * going right to left with the start value `z` on the right:
- * {{{
- * op(x,,1,,, op(x,,2,,, ... op(x,,n,,, z)...))
- * }}}
- * where `x,,1,,, ..., x,,n,,` are the elements of this $coll.
- */
- def :\[B](z: B)(op: (A, B) => B): B = foldRight(z)(op)
-
- /** Applies a binary operator to a start value and all elements of this $coll,
- * going left to right.
- *
- * $willNotTerminateInf
- * $orderDependentFold
- *
- * @param z the start value.
- * @param op the binary operator.
- * @tparam B the result type of the binary operator.
- * @return the result of inserting `op` between consecutive elements of this $coll,
- * going left to right with the start value `z` on the left:
- * {{{
- * op(...op(z, x,,1,,), x,,2,,, ..., x,,n,,)
- * }}}
- * where `x,,1,,, ..., x,,n,,` are the elements of this $coll.
- */
- def foldLeft[B](z: B)(op: (B, A) => B): B = {
- var result = z
- this.seq foreach (x => result = op(result, x))
- result
- }
-
- /** Applies a binary operator to all elements of this $coll and a start value,
- * going right to left.
- *
- * $willNotTerminateInf
- * $orderDependentFold
- * @param z the start value.
- * @param op the binary operator.
- * @tparam B the result type of the binary operator.
- * @return the result of inserting `op` between consecutive elements of this $coll,
- * going right to left with the start value `z` on the right:
- * {{{
- * op(x,,1,,, op(x,,2,,, ... op(x,,n,,, z)...))
- * }}}
- * where `x,,1,,, ..., x,,n,,` are the elements of this $coll.
- */
- def foldRight[B](z: B)(op: (A, B) => B): B =
- reversed.foldLeft(z)((x, y) => op(y, x))
-
- /** Applies a binary operator to all elements of this $coll, going left to right.
- * $willNotTerminateInf
- * $orderDependentFold
- *
- * @param op the binary operator.
- * @tparam B the result type of the binary operator.
- * @return the result of inserting `op` between consecutive elements of this $coll,
- * going left to right:
- * {{{
- * op(...(op(x,,1,,, x,,2,,), ... ) , x,,n,,)
- * }}}
- * where `x,,1,,, ..., x,,n,,` are the elements of this $coll.
- * @throws `UnsupportedOperationException` if this $coll is empty.
- */
- def reduceLeft[B >: A](op: (B, A) => B): B = {
- if (isEmpty)
- throw new UnsupportedOperationException("empty.reduceLeft")
-
- var first = true
- var acc: B = 0.asInstanceOf[B]
-
- for (x <- self) {
- if (first) {
- acc = x
- first = false
- }
- else acc = op(acc, x)
- }
- acc
- }
-
- /** Applies a binary operator to all elements of this $coll, going right to left.
- * $willNotTerminateInf
- * $orderDependentFold
- *
- * @param op the binary operator.
- * @tparam B the result type of the binary operator.
- * @return the result of inserting `op` between consecutive elements of this $coll,
- * going right to left:
- * {{{
- * op(x,,1,,, op(x,,2,,, ..., op(x,,n-1,,, x,,n,,)...))
- * }}}
- * where `x,,1,,, ..., x,,n,,` are the elements of this $coll.
- * @throws `UnsupportedOperationException` if this $coll is empty.
- */
- def reduceRight[B >: A](op: (A, B) => B): B = {
- if (isEmpty)
- throw new UnsupportedOperationException("empty.reduceRight")
-
- reversed.reduceLeft[B]((x, y) => op(y, x))
- }
-
- /** Optionally applies a binary operator to all elements of this $coll, going left to right.
- * $willNotTerminateInf
- * $orderDependentFold
- *
- * @param op the binary operator.
- * @tparam B the result type of the binary operator.
- * @return an option value containing the result of `reduceLeft(op)` is this $coll is nonempty,
- * `None` otherwise.
- */
- def reduceLeftOption[B >: A](op: (B, A) => B): Option[B] =
- if (isEmpty) None else Some(reduceLeft(op))
-
- /** Optionally applies a binary operator to all elements of this $coll, going
- * right to left.
- * $willNotTerminateInf
- * $orderDependentFold
- *
- * @param op the binary operator.
- * @tparam B the result type of the binary operator.
- * @return an option value containing the result of `reduceRight(op)` is this $coll is nonempty,
- * `None` otherwise.
- */
- def reduceRightOption[B >: A](op: (A, B) => B): Option[B] =
- if (isEmpty) None else Some(reduceRight(op))
-
- /** Sums up the elements of this collection.
- *
- * @param num an implicit parameter defining a set of numeric operations
- * which includes the `+` operator to be used in forming the sum.
- * @tparam B the result type of the `+` operator.
- * @return the sum of all elements of this $coll with respect to the `+` operator in `num`.
- *
- * @usecase def sum: A
- *
- * @return the sum of all elements in this $coll of numbers of type `Int`.
- * Instead of `Int`, any other type `T` with an implicit `Numeric[T]` implementation
- * can be used as element type of the $coll and as result type of `sum`.
- * Examples of such types are: `Long`, `Float`, `Double`, `BigInt`.
- *
- */
- def sum[B >: A](implicit num: Numeric[B]): B = foldLeft(num.zero)(num.plus)
-
- /** Multiplies up the elements of this collection.
- *
- * @param num an implicit parameter defining a set of numeric operations
- * which includes the `*` operator to be used in forming the product.
- * @tparam B the result type of the `*` operator.
- * @return the product of all elements of this $coll with respect to the `*` operator in `num`.
- *
- * @usecase def product: A
- *
- * @return the product of all elements in this $coll of numbers of type `Int`.
- * Instead of `Int`, any other type `T` with an implicit `Numeric[T]` implementation
- * can be used as element type of the $coll and as result type of `product`.
- * Examples of such types are: `Long`, `Float`, `Double`, `BigInt`.
- */
- def product[B >: A](implicit num: Numeric[B]): B = foldLeft(num.one)(num.times)
+ override def seq: TraversableOnce[A] = this
- /** Finds the smallest element.
- *
- * @param cmp An ordering to be used for comparing elements.
- * @tparam B The type over which the ordering is defined.
- * @return the smallest element of this $coll with respect to the ordering `cmp`.
- *
- * @usecase def min: A
- * @return the smallest element of this $coll
- */
- def min[B >: A](implicit cmp: Ordering[B]): A = {
- if (isEmpty)
- throw new UnsupportedOperationException("empty.min")
-
- reduceLeft((x, y) => if (cmp.lteq(x, y)) x else y)
- }
-
- /** Finds the largest element.
- *
- * @param cmp An ordering to be used for comparing elements.
- * @tparam B The type over which the ordering is defined.
- * @return the largest element of this $coll with respect to the ordering `cmp`.
- *
- * @usecase def max: A
- * @return the largest element of this $coll.
- */
- def max[B >: A](implicit cmp: Ordering[B]): A = {
- if (isEmpty)
- throw new UnsupportedOperationException("empty.max")
-
- reduceLeft((x, y) => if (cmp.gteq(x, y)) x else y)
- }
-
- def maxBy[B](f: A => B)(implicit cmp: Ordering[B]): A = {
- if (isEmpty)
- throw new UnsupportedOperationException("empty.maxBy")
-
- reduceLeft((x, y) => if (cmp.gteq(f(x), f(y))) x else y)
- }
- def minBy[B](f: A => B)(implicit cmp: Ordering[B]): A = {
- if (isEmpty)
- throw new UnsupportedOperationException("empty.minBy")
-
- reduceLeft((x, y) => if (cmp.lteq(f(x), f(y))) x else y)
- }
-
- /** Copies all elements of this $coll to a buffer.
- * $willNotTerminateInf
- * @param dest The buffer to which elements are copied.
- */
- def copyToBuffer[B >: A](dest: Buffer[B]): Unit = dest ++= self
-
- /** Copies values of this $coll to an array.
- * Fills the given array `xs` with values of this $coll, after skipping `start` values.
- * Copying will stop once either the end of the current $coll is reached,
- * or the end of the array is reached.
- *
- * $willNotTerminateInf
- *
- * @param xs the array to fill.
- * @param start the starting index.
- * @tparam B the type of the elements of the array.
- *
- * @usecase def copyToArray(xs: Array[A], start: Int): Unit
- */
- def copyToArray[B >: A](xs: Array[B], start: Int): Unit =
- copyToArray(xs, start, xs.length - start)
-
- /** Copies values of this $coll to an array.
- * Fills the given array `xs` with values of this $coll.
- * Copying will stop once either the end of the current $coll is reached,
- * or the end of the array is reached.
- *
- * $willNotTerminateInf
- *
- * @param xs the array to fill.
- * @tparam B the type of the elements of the array.
- *
- * @usecase def copyToArray(xs: Array[A]): Unit
- */
- def copyToArray[B >: A](xs: Array[B]): Unit =
- copyToArray(xs, 0, xs.length)
-
- /** Converts this $coll to an array.
- * $willNotTerminateInf
- *
- * @tparam B the type of the elements of the array. A `ClassManifest` for
- * this type must be available.
- * @return an array containing all elements of this $coll.
- *
- * @usecase def toArray: Array[A]
- * @return an array containing all elements of this $coll.
- * A `ClassManifest` must be available for the element type of this $coll.
- */
- def toArray[B >: A : ClassManifest]: Array[B] = {
- if (isTraversableAgain) {
- val result = new Array[B](size)
- copyToArray(result, 0)
- result
- }
- else toBuffer.toArray
- }
-
- /** Converts this $coll to a list.
- * $willNotTerminateInf
- * @return a list containing all elements of this $coll.
- */
- def toList: List[A] = new ListBuffer[A] ++= self toList
-
- /** Converts this $coll to an iterable collection. Note that
- * the choice of target `Iterable` is lazy in this default implementation
- * as this `TraversableOnce` may be lazy and unevaluated (i.e. it may
- * be an iterator which is only traversable once).
- *
- * $willNotTerminateInf
- * @return an `Iterable` containing all elements of this $coll.
- */
- def toIterable: Iterable[A] = toStream
-
- /** Converts this $coll to a sequence. As with `toIterable`, it's lazy
- * in this default implementation, as this `TraversableOnce` may be
- * lazy and unevaluated.
- *
- * $willNotTerminateInf
- * @return a sequence containing all elements of this $coll.
- */
- def toSeq: Seq[A] = toStream
-
- /** Converts this $coll to an indexed sequence.
- * $willNotTerminateInf
- * @return an indexed sequence containing all elements of this $coll.
- */
- def toIndexedSeq[B >: A]: immutable.IndexedSeq[B] = immutable.IndexedSeq() ++ self
-
- /** Converts this $coll to a mutable buffer.
- * $willNotTerminateInf
- * @return a buffer containing all elements of this $coll.
- */
- def toBuffer[B >: A]: mutable.Buffer[B] = new ArrayBuffer[B] ++= self
-
- /** Converts this $coll to a set.
- * $willNotTerminateInf
- * @return a set containing all elements of this $coll.
- */
- def toSet[B >: A]: immutable.Set[B] = immutable.Set() ++ self
-
- /** Converts this $coll to a map. This method is unavailable unless
- * the elements are members of Tuple2, each ((T, U)) becoming a key-value
- * pair in the map. Duplicate keys will be overwritten by later keys:
- * if this is an unordered collection, which key is in the resulting map
- * is undefined.
- * $willNotTerminateInf
- * @return a map containing all elements of this $coll.
- * @usecase def toMap[T, U]: Map[T, U]
- * @return a map of type `immutable.Map[T, U]`
- * containing all key/value pairs of type `(T, U)` of this $coll.
- */
- def toMap[T, U](implicit ev: A <:< (T, U)): immutable.Map[T, U] = {
- val b = immutable.Map.newBuilder[T, U]
- for (x <- self)
- b += x
-
- b.result
- }
-
- /** Displays all elements of this $coll in a string using start, end, and
- * separator strings.
- *
- * @param start the starting string.
- * @param sep the separator string.
- * @param end the ending string.
- * @return a string representation of this $coll. The resulting string
- * begins with the string `start` and ends with the string
- * `end`. Inside, the string representations (w.r.t. the method
- * `toString`) of all elements of this $coll are separated by
- * the string `sep`.
- *
- * @example `List(1, 2, 3).mkString("(", "; ", ")") = "(1; 2; 3)"`
- */
- def mkString(start: String, sep: String, end: String): String =
- addString(new StringBuilder(), start, sep, end).toString
-
- /** Displays all elements of this $coll in a string using a separator string.
- *
- * @param sep the separator string.
- * @return a string representation of this $coll. In the resulting string
- * the string representations (w.r.t. the method `toString`)
- * of all elements of this $coll are separated by the string `sep`.
- *
- * @example `List(1, 2, 3).mkString("|") = "1|2|3"`
- */
- def mkString(sep: String): String = mkString("", sep, "")
-
- /** Displays all elements of this $coll in a string.
- *
- * @return a string representation of this $coll. In the resulting string
- * the string representations (w.r.t. the method `toString`)
- * of all elements of this $coll follow each other without any
- * separator string.
- */
- def mkString: String = mkString("")
-
- /** Appends all elements of this $coll to a string builder using start, end,
- * and separator strings.
- * The written text begins with the string `start` and ends with the string
- * `end`. Inside, the string representations (w.r.t. the method `toString`)
- * of all elements of this $coll are separated by the string `sep`.
- *
- * @param b the string builder to which elements are appended.
- * @param start the starting string.
- * @param sep the separator string.
- * @param end the ending string.
- * @return the string builder `b` to which elements were appended.
- */
- def addString(b: StringBuilder, start: String, sep: String, end: String): StringBuilder = {
- var first = true
-
- b append start
- for (x <- self) {
- if (first) {
- b append x
- first = false
- }
- else {
- b append sep
- b append x
- }
- }
- b append end
-
- b
- }
-
- /** Appends all elements of this $coll to a string builder using a separator
- * string. The written text consists of the string representations (w.r.t.
- * the method `toString`) of all elements of this $coll, separated by the
- * string `sep`.
- *
- * @param b the string builder to which elements are appended.
- * @param sep the separator string.
- * @return the string builder `b` to which elements were appended.
- */
- def addString(b: StringBuilder, sep: String): StringBuilder = addString(b, "", sep, "")
-
- /** Appends all elements of this $coll to a string builder.
- * The written text consists of the string representations (w.r.t. the method
- * `toString`) of all elements of this $coll without any separator string.
- *
- * @param b the string builder to which elements are appended.
- * @return the string builder `b` to which elements were appended.
- */
- def addString(b: StringBuilder): StringBuilder = addString(b, "")
}
+
object TraversableOnce {
implicit def traversableOnceCanBuildFrom[T] = new OnceCanBuildFrom[T]
implicit def wrapTraversableOnce[A](trav: TraversableOnce[A]) = new MonadOps(trav)
@@ -643,9 +81,8 @@ object TraversableOnce {
class MonadOps[+A](trav: TraversableOnce[A]) {
def map[B](f: A => B): TraversableOnce[B] = trav.toIterator map f
- def flatMap[B](f: A => TraversableOnce[B]): TraversableOnce[B] = trav.toIterator flatMap f
+ def flatMap[B](f: A => GenTraversableOnce[B]): TraversableOnce[B] = trav.toIterator flatMap f
def withFilter(p: A => Boolean) = trav.toIterator filter p
def filter(p: A => Boolean): TraversableOnce[A] = withFilter(p)
}
}
-
diff --git a/src/library/scala/collection/TraversableOnceLike.scala b/src/library/scala/collection/TraversableOnceLike.scala
new file mode 100644
index 0000000000..c72156554a
--- /dev/null
+++ b/src/library/scala/collection/TraversableOnceLike.scala
@@ -0,0 +1,627 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+
+import mutable.{ Buffer, ListBuffer, ArrayBuffer }
+import annotation.unchecked.{ uncheckedVariance => uV }
+
+/** A template trait for collections which can be traversed either once only
+ * or one or more times.
+ * $traversableonceinfo
+ *
+ * @tparam A the element type of the collection
+ *
+ * @define traversableonceinfo
+ * This trait exists primarily to eliminate code duplication between
+ * `Iterator` and `Traversable`, and thus implements some of the common
+ * methods that can be implemented solely in terms of foreach without
+ * access to a `Builder`. It also includes a number of abstract methods
+ * whose implementations are provided by `Iterator`, `Traversable`, etc.
+ * It contains implementations common to `Iterators` and
+ * `Traversables`, such as folds, conversions, and other operations which
+ * traverse some or all of the elements and return a derived value.
+ * Directly subclassing `TraversableOnce` is not recommended - instead,
+ * consider declaring an `Iterator` with a `next` and `hasNext` method,
+ * creating an `Iterator` with one of the methods on the `Iterator` object,
+ * or declaring a subclass of `Traversable`.
+ *
+ * @author Martin Odersky
+ * @author Paul Phillips
+ * @version 2.9
+ * @since 2.9
+ *
+ * @define coll traversable or iterator
+ * @define orderDependent
+ *
+ * Note: might return different results for different runs, unless the underlying collection type is ordered.
+ * @define orderDependentFold
+ *
+ * Note: might return different results for different runs, unless the
+ * underlying collection type is ordered or the operator is associative
+ * and commutative.
+ * @define mayNotTerminateInf
+ *
+ * Note: may not terminate for infinite-sized collections.
+ * @define willNotTerminateInf
+ *
+ * Note: will not terminate for infinite-sized collections.
+ */
+trait TraversableOnceLike[+A] extends GenTraversableOnceLike[A] {
+ self =>
+
+ /** Self-documenting abstract methods. */
+ def foreach[U](f: A => U): Unit
+ def isEmpty: Boolean
+ def hasDefiniteSize: Boolean
+
+ /** Tests whether this $coll can be repeatedly traversed. Always
+ * true for Traversables and false for Iterators unless overridden.
+ *
+ * @return `true` if it is repeatedly traversable, `false` otherwise.
+ */
+ def isTraversableAgain: Boolean
+
+ /** Returns an Iterator over the elements in this $coll. Will return
+ * the same Iterator if this instance is already an Iterator.
+ * $willNotTerminateInf
+ * @return an Iterator containing all elements of this $coll.
+ */
+ def toIterator: Iterator[A]
+
+ /** Converts this $coll to an unspecified Traversable. Will return
+ * the same collection if this instance is already Traversable.
+ * $willNotTerminateInf
+ * @return a Traversable containing all elements of this $coll.
+ */
+ def toTraversable: Traversable[A]
+
+ /** Converts this $coll to a stream.
+ * $willNotTerminateInf
+ * @return a stream containing all elements of this $coll.
+ */
+ def toStream: Stream[A]
+
+ // Note: We could redefine this in TraversableLike to always return `repr`
+ // of type `Repr`, only if `Repr` had type bounds, which it doesn't, because
+ // not all `Repr` are a subtype `TraversableOnce[A]`.
+ // The alternative is redefining it for maps, sets and seqs. For concrete implementations
+ // we don't have to do this anyway, since they are leaves in the inheritance hierarchy.
+ // Note 2: This is implemented in all collections _not_ inheriting `Traversable[A]`
+ // at least indirectly. Currently, these are `ArrayOps` and `StringOps`.
+ // It is also implemented in `TraversableOnce[A]`.
+ /** A version of this collection with all
+ * of the operations implemented sequentially (i.e. in a single-threaded manner).
+ *
+ * This method returns a reference to this collection. In parallel collections,
+ * it is redefined to return a sequential implementation of this collection. In
+ * both cases, it has O(1) complexity.
+ *
+ * @return a sequential view of the collection.
+ */
+ def seq: TraversableOnce[A]
+
+ /** Presently these are abstract because the Traversable versions use
+ * breakable/break, and I wasn't sure enough of how that's supposed to
+ * function to consolidate them with the Iterator versions.
+ */
+ def forall(p: A => Boolean): Boolean
+ def exists(p: A => Boolean): Boolean
+ def find(p: A => Boolean): Option[A]
+ def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Unit
+
+ // for internal use
+ protected[this] def reversed = {
+ var elems: List[A] = Nil
+ self.seq foreach (elems ::= _)
+ elems
+ }
+
+ /** The size of this $coll.
+ *
+ * $willNotTerminateInf
+ *
+ * @return the number of elements in this $coll.
+ */
+ def size: Int = {
+ var result = 0
+ for (x <- self) result += 1
+ result
+ }
+
+ /** Tests whether the $coll is not empty.
+ *
+ * @return `true` if the $coll contains at least one element, `false` otherwise.
+ */
+ def nonEmpty: Boolean = !isEmpty
+
+ /** Counts the number of elements in the $coll which satisfy a predicate.
+ *
+ * @param p the predicate used to test elements.
+ * @return the number of elements satisfying the predicate `p`.
+ */
+ def count(p: A => Boolean): Int = {
+ var cnt = 0
+ for (x <- this)
+ if (p(x)) cnt += 1
+
+ cnt
+ }
+
+ /** Finds the first element of the $coll for which the given partial
+ * function is defined, and applies the partial function to it.
+ *
+ * $mayNotTerminateInf
+ * $orderDependent
+ *
+ * @param pf the partial function
+ * @return an option value containing pf applied to the first
+ * value for which it is defined, or `None` if none exists.
+ * @example `Seq("a", 1, 5L).collectFirst({ case x: Int => x*10 }) = Some(10)`
+ */
+ def collectFirst[B](pf: PartialFunction[A, B]): Option[B] = {
+ for (x <- self.toIterator) { // make sure to use an iterator or `seq`
+ if (pf isDefinedAt x)
+ return Some(pf(x))
+ }
+ None
+ }
+
+ /** Applies a binary operator to a start value and all elements of this $coll,
+ * going left to right.
+ *
+ * Note: `/:` is alternate syntax for `foldLeft`; `z /: xs` is the same as
+ * `xs foldLeft z`.
+ * $willNotTerminateInf
+ * $orderDependentFold
+ *
+ * @param z the start value.
+ * @param op the binary operator.
+ * @tparam B the result type of the binary operator.
+ * @return the result of inserting `op` between consecutive elements of this $coll,
+ * going left to right with the start value `z` on the left:
+ * {{{
+ * op(...op(op(z, x,,1,,), x,,2,,), ..., x,,n,,)
+ * }}}
+ * where `x,,1,,, ..., x,,n,,` are the elements of this $coll.
+ */
+ def /:[B](z: B)(op: (B, A) => B): B = foldLeft(z)(op)
+
+ /** Applies a binary operator to all elements of this $coll and a start value,
+ * going right to left.
+ *
+ * Note: `:\` is alternate syntax for `foldRight`; `xs :\ z` is the same as
+ * `xs foldRight z`.
+ * $willNotTerminateInf
+ * $orderDependentFold
+ *
+ * @param z the start value
+ * @param op the binary operator
+ * @tparam B the result type of the binary operator.
+ * @return the result of inserting `op` between consecutive elements of this $coll,
+ * going right to left with the start value `z` on the right:
+ * {{{
+ * op(x,,1,,, op(x,,2,,, ... op(x,,n,,, z)...))
+ * }}}
+ * where `x,,1,,, ..., x,,n,,` are the elements of this $coll.
+ */
+ def :\[B](z: B)(op: (A, B) => B): B = foldRight(z)(op)
+
+ /** Applies a binary operator to a start value and all elements of this $coll,
+ * going left to right.
+ *
+ * $willNotTerminateInf
+ * $orderDependentFold
+ *
+ * @param z the start value.
+ * @param op the binary operator.
+ * @tparam B the result type of the binary operator.
+ * @return the result of inserting `op` between consecutive elements of this $coll,
+ * going left to right with the start value `z` on the left:
+ * {{{
+ * op(...op(z, x,,1,,), x,,2,,, ..., x,,n,,)
+ * }}}
+ * where `x,,1,,, ..., x,,n,,` are the elements of this $coll.
+ */
+ def foldLeft[B](z: B)(op: (B, A) => B): B = {
+ var result = z
+ this.seq foreach (x => result = op(result, x))
+ result
+ }
+
+ /** Applies a binary operator to all elements of this $coll and a start value,
+ * going right to left.
+ *
+ * $willNotTerminateInf
+ * $orderDependentFold
+ * @param z the start value.
+ * @param op the binary operator.
+ * @tparam B the result type of the binary operator.
+ * @return the result of inserting `op` between consecutive elements of this $coll,
+ * going right to left with the start value `z` on the right:
+ * {{{
+ * op(x,,1,,, op(x,,2,,, ... op(x,,n,,, z)...))
+ * }}}
+ * where `x,,1,,, ..., x,,n,,` are the elements of this $coll.
+ */
+ def foldRight[B](z: B)(op: (A, B) => B): B =
+ reversed.foldLeft(z)((x, y) => op(y, x))
+
+ /** Applies a binary operator to all elements of this $coll, going left to right.
+ * $willNotTerminateInf
+ * $orderDependentFold
+ *
+ * @param op the binary operator.
+ * @tparam B the result type of the binary operator.
+ * @return the result of inserting `op` between consecutive elements of this $coll,
+ * going left to right:
+ * {{{
+ * op(...(op(x,,1,,, x,,2,,), ... ) , x,,n,,)
+ * }}}
+ * where `x,,1,,, ..., x,,n,,` are the elements of this $coll.
+ * @throws `UnsupportedOperationException` if this $coll is empty.
+ */
+ def reduceLeft[B >: A](op: (B, A) => B): B = {
+ if (isEmpty)
+ throw new UnsupportedOperationException("empty.reduceLeft")
+
+ var first = true
+ var acc: B = 0.asInstanceOf[B]
+
+ for (x <- self) {
+ if (first) {
+ acc = x
+ first = false
+ }
+ else acc = op(acc, x)
+ }
+ acc
+ }
+
+ /** Applies a binary operator to all elements of this $coll, going right to left.
+ * $willNotTerminateInf
+ * $orderDependentFold
+ *
+ * @param op the binary operator.
+ * @tparam B the result type of the binary operator.
+ * @return the result of inserting `op` between consecutive elements of this $coll,
+ * going right to left:
+ * {{{
+ * op(x,,1,,, op(x,,2,,, ..., op(x,,n-1,,, x,,n,,)...))
+ * }}}
+ * where `x,,1,,, ..., x,,n,,` are the elements of this $coll.
+ * @throws `UnsupportedOperationException` if this $coll is empty.
+ */
+ def reduceRight[B >: A](op: (A, B) => B): B = {
+ if (isEmpty)
+ throw new UnsupportedOperationException("empty.reduceRight")
+
+ reversed.reduceLeft[B]((x, y) => op(y, x))
+ }
+
+ /** Optionally applies a binary operator to all elements of this $coll, going left to right.
+ * $willNotTerminateInf
+ * $orderDependentFold
+ *
+ * @param op the binary operator.
+ * @tparam B the result type of the binary operator.
+ * @return an option value containing the result of `reduceLeft(op)` is this $coll is nonempty,
+ * `None` otherwise.
+ */
+ def reduceLeftOption[B >: A](op: (B, A) => B): Option[B] =
+ if (isEmpty) None else Some(reduceLeft(op))
+
+ /** Optionally applies a binary operator to all elements of this $coll, going
+ * right to left.
+ * $willNotTerminateInf
+ * $orderDependentFold
+ *
+ * @param op the binary operator.
+ * @tparam B the result type of the binary operator.
+ * @return an option value containing the result of `reduceRight(op)` is this $coll is nonempty,
+ * `None` otherwise.
+ */
+ def reduceRightOption[B >: A](op: (A, B) => B): Option[B] =
+ if (isEmpty) None else Some(reduceRight(op))
+
+ def reduce[A1 >: A](op: (A1, A1) => A1): A1 = reduceLeft(op)
+
+ def reduceOption[A1 >: A](op: (A1, A1) => A1): Option[A1] = reduceLeftOption(op)
+
+ def fold[A1 >: A](z: A1)(op: (A1, A1) => A1): A1 = foldLeft(z)(op)
+
+ def aggregate[B](z: B)(seqop: (B, A) => B, combop: (B, B) => B): B = foldLeft(z)(seqop)
+
+ /** Sums up the elements of this collection.
+ *
+ * @param num an implicit parameter defining a set of numeric operations
+ * which includes the `+` operator to be used in forming the sum.
+ * @tparam B the result type of the `+` operator.
+ * @return the sum of all elements of this $coll with respect to the `+` operator in `num`.
+ *
+ * @usecase def sum: A
+ *
+ * @return the sum of all elements in this $coll of numbers of type `Int`.
+ * Instead of `Int`, any other type `T` with an implicit `Numeric[T]` implementation
+ * can be used as element type of the $coll and as result type of `sum`.
+ * Examples of such types are: `Long`, `Float`, `Double`, `BigInt`.
+ *
+ */
+ def sum[B >: A](implicit num: Numeric[B]): B = foldLeft(num.zero)(num.plus)
+
+ /** Multiplies up the elements of this collection.
+ *
+ * @param num an implicit parameter defining a set of numeric operations
+ * which includes the `*` operator to be used in forming the product.
+ * @tparam B the result type of the `*` operator.
+ * @return the product of all elements of this $coll with respect to the `*` operator in `num`.
+ *
+ * @usecase def product: A
+ *
+ * @return the product of all elements in this $coll of numbers of type `Int`.
+ * Instead of `Int`, any other type `T` with an implicit `Numeric[T]` implementation
+ * can be used as element type of the $coll and as result type of `product`.
+ * Examples of such types are: `Long`, `Float`, `Double`, `BigInt`.
+ */
+ def product[B >: A](implicit num: Numeric[B]): B = foldLeft(num.one)(num.times)
+
+ /** Finds the smallest element.
+ *
+ * @param cmp An ordering to be used for comparing elements.
+ * @tparam B The type over which the ordering is defined.
+ * @return the smallest element of this $coll with respect to the ordering `cmp`.
+ *
+ * @usecase def min: A
+ * @return the smallest element of this $coll
+ */
+ def min[B >: A](implicit cmp: Ordering[B]): A = {
+ if (isEmpty)
+ throw new UnsupportedOperationException("empty.min")
+
+ reduceLeft((x, y) => if (cmp.lteq(x, y)) x else y)
+ }
+
+ /** Finds the largest element.
+ *
+ * @param cmp An ordering to be used for comparing elements.
+ * @tparam B The type over which the ordering is defined.
+ * @return the largest element of this $coll with respect to the ordering `cmp`.
+ *
+ * @usecase def max: A
+ * @return the largest element of this $coll.
+ */
+ def max[B >: A](implicit cmp: Ordering[B]): A = {
+ if (isEmpty)
+ throw new UnsupportedOperationException("empty.max")
+
+ reduceLeft((x, y) => if (cmp.gteq(x, y)) x else y)
+ }
+
+ def maxBy[B](f: A => B)(implicit cmp: Ordering[B]): A = {
+ if (isEmpty)
+ throw new UnsupportedOperationException("empty.maxBy")
+
+ reduceLeft((x, y) => if (cmp.gteq(f(x), f(y))) x else y)
+ }
+ def minBy[B](f: A => B)(implicit cmp: Ordering[B]): A = {
+ if (isEmpty)
+ throw new UnsupportedOperationException("empty.minBy")
+
+ reduceLeft((x, y) => if (cmp.lteq(f(x), f(y))) x else y)
+ }
+
+ /** Copies all elements of this $coll to a buffer.
+ * $willNotTerminateInf
+ * @param dest The buffer to which elements are copied.
+ */
+ def copyToBuffer[B >: A](dest: Buffer[B]): Unit = dest ++= seq
+
+ /** Copies values of this $coll to an array.
+ * Fills the given array `xs` with values of this $coll, after skipping `start` values.
+ * Copying will stop once either the end of the current $coll is reached,
+ * or the end of the array is reached.
+ *
+ * $willNotTerminateInf
+ *
+ * @param xs the array to fill.
+ * @param start the starting index.
+ * @tparam B the type of the elements of the array.
+ *
+ * @usecase def copyToArray(xs: Array[A], start: Int): Unit
+ */
+ def copyToArray[B >: A](xs: Array[B], start: Int): Unit =
+ copyToArray(xs, start, xs.length - start)
+
+ /** Copies values of this $coll to an array.
+ * Fills the given array `xs` with values of this $coll.
+ * Copying will stop once either the end of the current $coll is reached,
+ * or the end of the array is reached.
+ *
+ * $willNotTerminateInf
+ *
+ * @param xs the array to fill.
+ * @tparam B the type of the elements of the array.
+ *
+ * @usecase def copyToArray(xs: Array[A]): Unit
+ */
+ def copyToArray[B >: A](xs: Array[B]): Unit =
+ copyToArray(xs, 0, xs.length)
+
+ /** Converts this $coll to an array.
+ * $willNotTerminateInf
+ *
+ * @tparam B the type of the elements of the array. A `ClassManifest` for
+ * this type must be available.
+ * @return an array containing all elements of this $coll.
+ *
+ * @usecase def toArray: Array[A]
+ * @return an array containing all elements of this $coll.
+ * A `ClassManifest` must be available for the element type of this $coll.
+ */
+ def toArray[B >: A : ClassManifest]: Array[B] = {
+ if (isTraversableAgain) {
+ val result = new Array[B](size)
+ copyToArray(result, 0)
+ result
+ }
+ else toBuffer.toArray
+ }
+
+ /** Converts this $coll to a list.
+ * $willNotTerminateInf
+ * @return a list containing all elements of this $coll.
+ */
+ def toList: List[A] = new ListBuffer[A] ++= seq toList
+
+ /** Converts this $coll to an iterable collection. Note that
+ * the choice of target `Iterable` is lazy in this default implementation
+ * as this `TraversableOnce` may be lazy and unevaluated (i.e. it may
+ * be an iterator which is only traversable once).
+ *
+ * $willNotTerminateInf
+ * @return an `Iterable` containing all elements of this $coll.
+ */
+ def toIterable: Iterable[A] = toStream
+
+ /** Converts this $coll to a sequence. As with `toIterable`, it's lazy
+ * in this default implementation, as this `TraversableOnce` may be
+ * lazy and unevaluated.
+ *
+ * $willNotTerminateInf
+ * @return a sequence containing all elements of this $coll.
+ */
+ def toSeq: Seq[A] = toStream
+
+ /** Converts this $coll to an indexed sequence.
+ * $willNotTerminateInf
+ * @return an indexed sequence containing all elements of this $coll.
+ */
+ def toIndexedSeq[B >: A]: immutable.IndexedSeq[B] = immutable.IndexedSeq() ++ seq
+
+ /** Converts this $coll to a mutable buffer.
+ * $willNotTerminateInf
+ * @return a buffer containing all elements of this $coll.
+ */
+ def toBuffer[B >: A]: mutable.Buffer[B] = new ArrayBuffer[B] ++= seq
+
+ /** Converts this $coll to a set.
+ * $willNotTerminateInf
+ * @return a set containing all elements of this $coll.
+ */
+ def toSet[B >: A]: immutable.Set[B] = immutable.Set() ++ seq
+
+ /** Converts this $coll to a map. This method is unavailable unless
+ * the elements are members of Tuple2, each ((T, U)) becoming a key-value
+ * pair in the map. Duplicate keys will be overwritten by later keys:
+ * if this is an unordered collection, which key is in the resulting map
+ * is undefined.
+ * $willNotTerminateInf
+ * @return a map containing all elements of this $coll.
+ * @usecase def toMap[T, U]: Map[T, U]
+ * @return a map of type `immutable.Map[T, U]`
+ * containing all key/value pairs of type `(T, U)` of this $coll.
+ */
+ def toMap[T, U](implicit ev: A <:< (T, U)): immutable.Map[T, U] = {
+ val b = immutable.Map.newBuilder[T, U]
+ for (x <- self)
+ b += x
+
+ b.result
+ }
+
+ /** Displays all elements of this $coll in a string using start, end, and
+ * separator strings.
+ *
+ * @param start the starting string.
+ * @param sep the separator string.
+ * @param end the ending string.
+ * @return a string representation of this $coll. The resulting string
+ * begins with the string `start` and ends with the string
+ * `end`. Inside, the string representations (w.r.t. the method
+ * `toString`) of all elements of this $coll are separated by
+ * the string `sep`.
+ *
+ * @example `List(1, 2, 3).mkString("(", "; ", ")") = "(1; 2; 3)"`
+ */
+ def mkString(start: String, sep: String, end: String): String =
+ addString(new StringBuilder(), start, sep, end).toString
+
+ /** Displays all elements of this $coll in a string using a separator string.
+ *
+ * @param sep the separator string.
+ * @return a string representation of this $coll. In the resulting string
+ * the string representations (w.r.t. the method `toString`)
+ * of all elements of this $coll are separated by the string `sep`.
+ *
+ * @example `List(1, 2, 3).mkString("|") = "1|2|3"`
+ */
+ def mkString(sep: String): String = mkString("", sep, "")
+
+ /** Displays all elements of this $coll in a string.
+ *
+ * @return a string representation of this $coll. In the resulting string
+ * the string representations (w.r.t. the method `toString`)
+ * of all elements of this $coll follow each other without any
+ * separator string.
+ */
+ def mkString: String = mkString("")
+
+ /** Appends all elements of this $coll to a string builder using start, end,
+ * and separator strings.
+ * The written text begins with the string `start` and ends with the string
+ * `end`. Inside, the string representations (w.r.t. the method `toString`)
+ * of all elements of this $coll are separated by the string `sep`.
+ *
+ * @param b the string builder to which elements are appended.
+ * @param start the starting string.
+ * @param sep the separator string.
+ * @param end the ending string.
+ * @return the string builder `b` to which elements were appended.
+ */
+ def addString(b: StringBuilder, start: String, sep: String, end: String): StringBuilder = {
+ var first = true
+
+ b append start
+ for (x <- self) {
+ if (first) {
+ b append x
+ first = false
+ }
+ else {
+ b append sep
+ b append x
+ }
+ }
+ b append end
+
+ b
+ }
+
+ /** Appends all elements of this $coll to a string builder using a separator
+ * string. The written text consists of the string representations (w.r.t.
+ * the method `toString`) of all elements of this $coll, separated by the
+ * string `sep`.
+ *
+ * @param b the string builder to which elements are appended.
+ * @param sep the separator string.
+ * @return the string builder `b` to which elements were appended.
+ */
+ def addString(b: StringBuilder, sep: String): StringBuilder = addString(b, "", sep, "")
+
+ /** Appends all elements of this $coll to a string builder.
+ * The written text consists of the string representations (w.r.t. the method
+ * `toString`) of all elements of this $coll without any separator string.
+ *
+ * @param b the string builder to which elements are appended.
+ * @return the string builder `b` to which elements were appended.
+ */
+ def addString(b: StringBuilder): StringBuilder = addString(b, "")
+}
+
+
+
diff --git a/src/library/scala/collection/TraversableProxyLike.scala b/src/library/scala/collection/TraversableProxyLike.scala
index caf7af0946..15565e57c6 100644
--- a/src/library/scala/collection/TraversableProxyLike.scala
+++ b/src/library/scala/collection/TraversableProxyLike.scala
@@ -30,9 +30,9 @@ trait TraversableProxyLike[+A, +Repr <: TraversableLike[A, Repr] with Traversabl
override def nonEmpty: Boolean = self.nonEmpty
override def size: Int = self.size
override def hasDefiniteSize = self.hasDefiniteSize
- override def ++[B >: A, That](xs: TraversableOnce[B])(implicit bf: CanBuildFrom[Repr, B, That]): That = self.++(xs)(bf)
+ override def ++[B >: A, That](xs: GenTraversableOnce[B])(implicit bf: CanBuildFrom[Repr, B, That]): That = self.++(xs)(bf)
override def map[B, That](f: A => B)(implicit bf: CanBuildFrom[Repr, B, That]): That = self.map(f)(bf)
- override def flatMap[B, That](f: A => TraversableOnce[B])(implicit bf: CanBuildFrom[Repr, B, That]): That = self.flatMap(f)(bf)
+ override def flatMap[B, That](f: A => GenTraversableOnce[B])(implicit bf: CanBuildFrom[Repr, B, That]): That = self.flatMap(f)(bf)
override def filter(p: A => Boolean): Repr = self.filter(p)
override def filterNot(p: A => Boolean): Repr = self.filterNot(p)
override def collect[B, That](pf: PartialFunction[A, B])(implicit bf: CanBuildFrom[Repr, B, That]): That = self.collect(pf)(bf)
diff --git a/src/library/scala/collection/TraversableView.scala b/src/library/scala/collection/TraversableView.scala
index 87690f9548..98f0037901 100644
--- a/src/library/scala/collection/TraversableView.scala
+++ b/src/library/scala/collection/TraversableView.scala
@@ -15,7 +15,7 @@ import TraversableView.NoBuilder
/** A base trait for non-strict views of traversable collections.
* $traversableViewInfo
*/
-trait TraversableView[+A, +Coll] extends TraversableViewLike[A, Coll, TraversableView[A, Coll]] { }
+trait TraversableView[+A, +Coll] extends TraversableViewLike[A, Coll, TraversableView[A, Coll]] with GenTraversableView[A, Coll] { }
/** An object containing the necessary implicit definitions to make
* `TraversableView`s work. Its definitions are generally not accessed directly by clients.
diff --git a/src/library/scala/collection/TraversableViewLike.scala b/src/library/scala/collection/TraversableViewLike.scala
index b5b18b7a8a..6dc0936c61 100644
--- a/src/library/scala/collection/TraversableViewLike.scala
+++ b/src/library/scala/collection/TraversableViewLike.scala
@@ -59,7 +59,7 @@ trait ViewMkString[+A] {
trait TraversableViewLike[+A,
+Coll,
+This <: TraversableView[A, Coll] with TraversableViewLike[A, Coll, This]]
- extends Traversable[A] with TraversableLike[A, This] with ViewMkString[A]
+ extends Traversable[A] with TraversableLike[A, This] with ViewMkString[A] with GenTraversableViewLike[A, Coll, This]
{
self =>
@@ -70,7 +70,6 @@ trait TraversableViewLike[+A,
protected[this] def viewIdentifier: String = ""
protected[this] def viewIdString: String = ""
override def stringPrefix = "TraversableView"
- def viewToString = stringPrefix + viewIdString + "(...)"
def force[B >: A, That](implicit bf: CanBuildFrom[Coll, B, That]) = {
val b = bf(underlying)
@@ -78,129 +77,36 @@ trait TraversableViewLike[+A,
b.result()
}
- /** The implementation base trait of this view.
- * This trait and all its subtraits has to be re-implemented for each
- * ViewLike class.
- */
- trait Transformed[+B] extends TraversableView[B, Coll] {
+ trait Transformed[+B] extends TraversableView[B, Coll] with super.Transformed[B] {
def foreach[U](f: B => U): Unit
- lazy val underlying = self.underlying
- final override protected[this] def viewIdString = self.viewIdString + viewIdentifier
override def stringPrefix = self.stringPrefix
override def toString = viewToString
}
- trait EmptyView extends Transformed[Nothing] {
- final override def isEmpty = true
- final override def foreach[U](f: Nothing => U): Unit = ()
- }
+
+ trait EmptyView extends Transformed[Nothing] with super.EmptyView
/** A fall back which forces everything into a vector and then applies an operation
* on it. Used for those operations which do not naturally lend themselves to a view
*/
- trait Forced[B] extends Transformed[B] {
- protected[this] val forced: Seq[B]
- def foreach[U](f: B => U) = forced foreach f
- final override protected[this] def viewIdentifier = "C"
- }
+ trait Forced[B] extends Transformed[B] with super.Forced[B]
- trait Sliced extends Transformed[A] {
- protected[this] val endpoints: SliceInterval
- protected[this] def from = endpoints.from
- protected[this] def until = endpoints.until
- // protected def newSliced(_endpoints: SliceInterval): Transformed[A] =
- // self.newSliced(endpoints.recalculate(_endpoints))
-
- def foreach[U](f: A => U) {
- var index = 0
- for (x <- self) {
- if (from <= index) {
- if (until <= index) return
- f(x)
- }
- index += 1
- }
- }
- final override protected[this] def viewIdentifier = "S"
- }
+ trait Sliced extends Transformed[A] with super.Sliced
- trait Mapped[B] extends Transformed[B] {
- protected[this] val mapping: A => B
- def foreach[U](f: B => U) {
- for (x <- self)
- f(mapping(x))
- }
- final override protected[this] def viewIdentifier = "M"
- }
+ trait Mapped[B] extends Transformed[B] with super.Mapped[B]
- trait FlatMapped[B] extends Transformed[B] {
- protected[this] val mapping: A => TraversableOnce[B]
- def foreach[U](f: B => U) {
- for (x <- self)
- for (y <- mapping(x))
- f(y)
- }
- final override protected[this] def viewIdentifier = "N"
- }
+ trait FlatMapped[B] extends Transformed[B] with super.FlatMapped[B]
- trait Appended[B >: A] extends Transformed[B] {
- protected[this] val rest: Traversable[B]
- def foreach[U](f: B => U) {
- self foreach f
- rest foreach f
- }
- final override protected[this] def viewIdentifier = "A"
- }
+ trait Appended[B >: A] extends Transformed[B] with super.Appended[B]
- trait Filtered extends Transformed[A] {
- protected[this] val pred: A => Boolean
- def foreach[U](f: A => U) {
- for (x <- self)
- if (pred(x)) f(x)
- }
- final override protected[this] def viewIdentifier = "F"
- }
+ trait Filtered extends Transformed[A] with super.Filtered
- trait TakenWhile extends Transformed[A] {
- protected[this] val pred: A => Boolean
- def foreach[U](f: A => U) {
- for (x <- self) {
- if (!pred(x)) return
- f(x)
- }
- }
- final override protected[this] def viewIdentifier = "T"
- }
+ trait TakenWhile extends Transformed[A] with super.TakenWhile
- trait DroppedWhile extends Transformed[A] {
- protected[this] val pred: A => Boolean
- def foreach[U](f: A => U) {
- var go = false
- for (x <- self) {
- if (!go && !pred(x)) go = true
- if (go) f(x)
- }
- }
- final override protected[this] def viewIdentifier = "D"
- }
+ trait DroppedWhile extends Transformed[A] with super.DroppedWhile
- /** Boilerplate method, to override in each subclass
- * This method could be eliminated if Scala had virtual classes
- */
- protected def newForced[B](xs: => Seq[B]): Transformed[B] = new { val forced = xs } with Forced[B]
- protected def newAppended[B >: A](that: Traversable[B]): Transformed[B] = new { val rest = that } with Appended[B]
- protected def newMapped[B](f: A => B): Transformed[B] = new { val mapping = f } with Mapped[B]
- protected def newFlatMapped[B](f: A => TraversableOnce[B]): Transformed[B] = new { val mapping = f } with FlatMapped[B]
- protected def newFiltered(p: A => Boolean): Transformed[A] = new { val pred = p } with Filtered
- protected def newSliced(_endpoints: SliceInterval): Transformed[A] = new { val endpoints = _endpoints } with Sliced
- protected def newDroppedWhile(p: A => Boolean): Transformed[A] = new { val pred = p } with DroppedWhile
- protected def newTakenWhile(p: A => Boolean): Transformed[A] = new { val pred = p } with TakenWhile
-
- protected def newTaken(n: Int): Transformed[A] = newSliced(SliceInterval(0, n))
- protected def newDropped(n: Int): Transformed[A] = newSliced(SliceInterval(n, Int.MaxValue))
-
- override def ++[B >: A, That](xs: TraversableOnce[B])(implicit bf: CanBuildFrom[This, B, That]): That = {
- newAppended(xs.toTraversable).asInstanceOf[That]
+ override def ++[B >: A, That](xs: GenTraversableOnce[B])(implicit bf: CanBuildFrom[This, B, That]): That = {
+ newAppended(xs.seq.toTraversable).asInstanceOf[That]
// was: if (bf.isInstanceOf[ByPassCanBuildFrom]) newAppended(that).asInstanceOf[That]
// else super.++[B, That](that)(bf)
}
@@ -215,7 +121,7 @@ trait TraversableViewLike[+A,
override def collect[B, That](pf: PartialFunction[A, B])(implicit bf: CanBuildFrom[This, B, That]): That =
filter(pf.isDefinedAt).map(pf)(bf)
- override def flatMap[B, That](f: A => TraversableOnce[B])(implicit bf: CanBuildFrom[This, B, That]): That = {
+ override def flatMap[B, That](f: A => GenTraversableOnce[B])(implicit bf: CanBuildFrom[This, B, That]): That = {
newFlatMapped(f).asInstanceOf[That]
// was: val b = bf(repr)
// if (b.isInstanceOf[NoBuilder[_]]) newFlatMapped(f).asInstanceOf[That]
@@ -223,6 +129,21 @@ trait TraversableViewLike[+A,
}
private[this] implicit def asThis(xs: Transformed[A]): This = xs.asInstanceOf[This]
+ /** Boilerplate method, to override in each subclass
+ * This method could be eliminated if Scala had virtual classes
+ */
+ protected def newForced[B](xs: => GenSeq[B]): Transformed[B] = new { val forced = xs } with Forced[B]
+ protected def newAppended[B >: A](that: GenTraversable[B]): Transformed[B] = new { val rest = that } with Appended[B]
+ protected def newMapped[B](f: A => B): Transformed[B] = new { val mapping = f } with Mapped[B]
+ protected def newFlatMapped[B](f: A => GenTraversableOnce[B]): Transformed[B] = new { val mapping = f } with FlatMapped[B]
+ protected def newFiltered(p: A => Boolean): Transformed[A] = new { val pred = p } with Filtered
+ protected def newSliced(_endpoints: SliceInterval): Transformed[A] = new { val endpoints = _endpoints } with Sliced
+ protected def newDroppedWhile(p: A => Boolean): Transformed[A] = new { val pred = p } with DroppedWhile
+ protected def newTakenWhile(p: A => Boolean): Transformed[A] = new { val pred = p } with TakenWhile
+
+ protected def newTaken(n: Int): Transformed[A] = newSliced(SliceInterval(0, n))
+ protected def newDropped(n: Int): Transformed[A] = newSliced(SliceInterval(n, Int.MaxValue))
+
override def filter(p: A => Boolean): This = newFiltered(p)
override def withFilter(p: A => Boolean): This = newFiltered(p)
override def partition(p: A => Boolean): (This, This) = (newFiltered(p), newFiltered(!p(_)))
diff --git a/src/library/scala/collection/generic/Addable.scala b/src/library/scala/collection/generic/Addable.scala
index 5be428305e..0e903ac2a1 100644
--- a/src/library/scala/collection/generic/Addable.scala
+++ b/src/library/scala/collection/generic/Addable.scala
@@ -52,5 +52,5 @@ trait Addable[A, +Repr <: Addable[A, Repr]] { self =>
* @param elems the collection containing the added elements.
* @return a new $coll with the given elements added.
*/
- def ++ (xs: TraversableOnce[A]): Repr = (repr /: xs) (_ + _)
+ def ++ (xs: GenTraversableOnce[A]): Repr = (repr /: xs.seq) (_ + _)
}
diff --git a/src/library/scala/collection/generic/CanCombineFrom.scala b/src/library/scala/collection/generic/CanCombineFrom.scala
index b56dab9794..ad2381a571 100644
--- a/src/library/scala/collection/generic/CanCombineFrom.scala
+++ b/src/library/scala/collection/generic/CanCombineFrom.scala
@@ -1,3 +1,11 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
package scala.collection
package generic
@@ -21,3 +29,4 @@ trait CanCombineFrom[-From, -Elem, +To] extends CanBuildFrom[From, Elem, To] wit
+
diff --git a/src/library/scala/collection/generic/FilterMonadic.scala b/src/library/scala/collection/generic/FilterMonadic.scala
index 7e468a9c96..4d6d9ec6a3 100755
--- a/src/library/scala/collection/generic/FilterMonadic.scala
+++ b/src/library/scala/collection/generic/FilterMonadic.scala
@@ -1,11 +1,20 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
package scala.collection.generic
+
/** A template trait that contains just the `map`, `flatMap`, `foreach` and `withFilter` methods
* of trait `TraversableLike`.
*/
trait FilterMonadic[+A, +Repr] {
def map[B, That](f: A => B)(implicit bf: CanBuildFrom[Repr, B, That]): That
- def flatMap[B, That](f: A => TraversableOnce[B])(implicit bf: CanBuildFrom[Repr, B, That]): That
+ def flatMap[B, That](f: A => collection.GenTraversableOnce[B])(implicit bf: CanBuildFrom[Repr, B, That]): That
def foreach[U](f: A => U): Unit
def withFilter(p: A => Boolean): FilterMonadic[A, Repr]
}
diff --git a/src/library/scala/collection/generic/GenericCompanion.scala b/src/library/scala/collection/generic/GenericCompanion.scala
index 084f884ebb..353ab9980f 100644
--- a/src/library/scala/collection/generic/GenericCompanion.scala
+++ b/src/library/scala/collection/generic/GenericCompanion.scala
@@ -21,7 +21,7 @@ import mutable.Builder
* @define coll collection
* @define Coll CC
*/
-abstract class GenericCompanion[+CC[X] <: Traversable[X]] {
+abstract class GenericCompanion[+CC[X] <: GenTraversable[X]] {
/** The underlying collection type with unknown element type */
type Coll = CC[_]
diff --git a/src/library/scala/collection/generic/GenericParCompanion.scala b/src/library/scala/collection/generic/GenericParCompanion.scala
index 9e151e7543..783a4878f6 100644
--- a/src/library/scala/collection/generic/GenericParCompanion.scala
+++ b/src/library/scala/collection/generic/GenericParCompanion.scala
@@ -1,3 +1,11 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
package scala.collection.generic
import scala.collection.parallel.Combiner
@@ -26,3 +34,4 @@ trait GenericParMapCompanion[+CC[P, Q] <: ParMap[P, Q]] {
}
+
diff --git a/src/library/scala/collection/generic/GenericParTemplate.scala b/src/library/scala/collection/generic/GenericParTemplate.scala
index 0d87a2d548..1e3f7b5e40 100644
--- a/src/library/scala/collection/generic/GenericParTemplate.scala
+++ b/src/library/scala/collection/generic/GenericParTemplate.scala
@@ -1,3 +1,11 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
package scala.collection.generic
@@ -45,8 +53,13 @@ extends GenericTraversableTemplate[A, CC]
}
-trait GenericParMapTemplate[K, +V, +CC[X, Y] <: ParMap[X, Y]]
+trait GenericParMapTemplate[K, +V, +CC[X, Y] <: ParMap[X, Y]] extends GenericParTemplate[(K, V), ParIterable]
{
+ protected[this] override def newCombiner: Combiner[(K, V), CC[K, V]] = {
+ val cb = mapCompanion.newCombiner[K, V]
+ cb
+ }
+
def mapCompanion: GenericParMapCompanion[CC]
def genericMapCombiner[P, Q]: Combiner[(P, Q), CC[P, Q]] = {
@@ -59,3 +72,4 @@ trait GenericParMapTemplate[K, +V, +CC[X, Y] <: ParMap[X, Y]]
+
diff --git a/src/library/scala/collection/generic/GenericSetTemplate.scala b/src/library/scala/collection/generic/GenericSetTemplate.scala
index 839d61bfc3..9e1a04179b 100644
--- a/src/library/scala/collection/generic/GenericSetTemplate.scala
+++ b/src/library/scala/collection/generic/GenericSetTemplate.scala
@@ -14,7 +14,7 @@ package generic
/**
* @since 2.8
*/
-trait GenericSetTemplate[A, +CC[X] <: Set[X]] extends GenericTraversableTemplate[A, CC] {
+trait GenericSetTemplate[A, +CC[X] <: GenSet[X]] extends GenericTraversableTemplate[A, CC] {
def empty: CC[A] = companion.empty[A]
}
diff --git a/src/library/scala/collection/generic/GenericTraversableTemplate.scala b/src/library/scala/collection/generic/GenericTraversableTemplate.scala
index 46b39bbf8b..37d86a0aa9 100644
--- a/src/library/scala/collection/generic/GenericTraversableTemplate.scala
+++ b/src/library/scala/collection/generic/GenericTraversableTemplate.scala
@@ -25,7 +25,7 @@ import annotation.unchecked.uncheckedVariance
* @define coll collection
* @define Coll CC
*/
-trait GenericTraversableTemplate[+A, +CC[X] <: Traversable[X]] extends HasNewBuilder[A, CC[A] @uncheckedVariance] {
+trait GenericTraversableTemplate[+A, +CC[X] <: GenTraversable[X]] extends HasNewBuilder[A, CC[A] @uncheckedVariance] {
/** Applies a function `f` to all elements of this $coll.
*
diff --git a/src/library/scala/collection/generic/HasNewCombiner.scala b/src/library/scala/collection/generic/HasNewCombiner.scala
index 2c24b437d8..cc69faa270 100644
--- a/src/library/scala/collection/generic/HasNewCombiner.scala
+++ b/src/library/scala/collection/generic/HasNewCombiner.scala
@@ -1,3 +1,11 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
package scala.collection.generic
@@ -24,3 +32,4 @@ trait HasNewCombiner[+T, +Repr] {
+
diff --git a/src/library/scala/collection/generic/IterableForwarder.scala b/src/library/scala/collection/generic/IterableForwarder.scala
index fa5c87ad9f..8f9c4c9542 100644
--- a/src/library/scala/collection/generic/IterableForwarder.scala
+++ b/src/library/scala/collection/generic/IterableForwarder.scala
@@ -41,5 +41,5 @@ trait IterableForwarder[+A] extends Iterable[A] with TraversableForwarder[A] {
// Iterable methods could be printed by cat IterableLike.scala | sed -n '/trait Iterable/,$ p' | egrep '^ (override )?def'
override def iterator: Iterator[A] = underlying.iterator
- override def sameElements[B >: A](that: Iterable[B]): Boolean = underlying.sameElements(that)
+ override def sameElements[B >: A](that: GenIterable[B]): Boolean = underlying.sameElements(that)
}
diff --git a/src/library/scala/collection/generic/MapFactory.scala b/src/library/scala/collection/generic/MapFactory.scala
index 1fdee065f6..122d1c9313 100644
--- a/src/library/scala/collection/generic/MapFactory.scala
+++ b/src/library/scala/collection/generic/MapFactory.scala
@@ -6,13 +6,13 @@
** |/ **
\* */
-
-
package scala.collection
package generic
+
import mutable.{Builder, MapBuilder}
+
/** A template for companion objects of `Map` and subclasses thereof.
*
* @define coll map
@@ -31,7 +31,7 @@ import mutable.{Builder, MapBuilder}
* @see CanBuildFrom
* @see GenericCanBuildFrom
*/
-abstract class MapFactory[CC[A, B] <: Map[A, B] with MapLike[A, B, CC[A, B]]] {
+abstract class MapFactory[CC[A, B] <: GenMap[A, B] with GenMapLike[A, B, CC[A, B]]] {
/** The type constructor of the collection that can be built by this factory */
type Coll = CC[_, _]
diff --git a/src/library/scala/collection/generic/MutableSetFactory.scala b/src/library/scala/collection/generic/MutableSetFactory.scala
index 6130ef2042..6151da6cb3 100644
--- a/src/library/scala/collection/generic/MutableSetFactory.scala
+++ b/src/library/scala/collection/generic/MutableSetFactory.scala
@@ -11,7 +11,7 @@ package generic
import mutable.{ Builder, GrowingBuilder }
-abstract class MutableSetFactory[CC[X] <: mutable.Set[X] with mutable.SetLike[X, CC[X]]]
+abstract class MutableSetFactory[CC[X] <: mutable.GenSet[X] with GenSetLike[X, CC[X]]]
extends SetFactory[CC] {
def newBuilder[A]: Builder[A, CC[A]] = new GrowingBuilder[A, CC[A]](empty[A])
diff --git a/src/library/scala/collection/generic/ParFactory.scala b/src/library/scala/collection/generic/ParFactory.scala
index b4da60a133..e8c2b239c3 100644
--- a/src/library/scala/collection/generic/ParFactory.scala
+++ b/src/library/scala/collection/generic/ParFactory.scala
@@ -1,3 +1,11 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
package scala.collection.generic
@@ -16,7 +24,7 @@ abstract class ParFactory[CC[X] <: ParIterable[X] with GenericParTemplate[X, CC]
extends TraversableFactory[CC]
with GenericParCompanion[CC] {
- type EPC[T, C] = collection.parallel.EnvironmentPassingCombiner[T, C]
+ //type EPC[T, C] = collection.parallel.EnvironmentPassingCombiner[T, C]
/**
* A generic implementation of the `CanCombineFrom` trait, which forwards all calls to
@@ -40,4 +48,3 @@ extends TraversableFactory[CC]
-
diff --git a/src/library/scala/collection/generic/ParMapFactory.scala b/src/library/scala/collection/generic/ParMapFactory.scala
index dda49e1354..1a9efdf7a7 100644
--- a/src/library/scala/collection/generic/ParMapFactory.scala
+++ b/src/library/scala/collection/generic/ParMapFactory.scala
@@ -1,3 +1,11 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
package scala.collection.generic
import scala.collection.parallel.ParMap
@@ -35,3 +43,4 @@ extends MapFactory[CC]
}
}
+
diff --git a/src/library/scala/collection/generic/ParSetFactory.scala b/src/library/scala/collection/generic/ParSetFactory.scala
index 7c43b29bf4..4cf39f9041 100644
--- a/src/library/scala/collection/generic/ParSetFactory.scala
+++ b/src/library/scala/collection/generic/ParSetFactory.scala
@@ -1,3 +1,11 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
package scala.collection.generic
@@ -31,3 +39,4 @@ abstract class ParSetFactory[CC[X] <: ParSet[X] with ParSetLike[X, CC[X], _] wit
+
diff --git a/src/library/scala/collection/generic/SeqFactory.scala b/src/library/scala/collection/generic/SeqFactory.scala
index f763e5fba6..5e733d8444 100644
--- a/src/library/scala/collection/generic/SeqFactory.scala
+++ b/src/library/scala/collection/generic/SeqFactory.scala
@@ -15,7 +15,7 @@ package generic
*
* @since 2.8
*/
-abstract class SeqFactory[CC[X] <: Seq[X] with GenericTraversableTemplate[X, CC]] extends TraversableFactory[CC] {
+abstract class SeqFactory[CC[X] <: GenSeq[X] with GenericTraversableTemplate[X, CC]] extends TraversableFactory[CC] {
/** This method is called in a pattern match { case Seq(...) => }.
*
diff --git a/src/library/scala/collection/generic/SeqForwarder.scala b/src/library/scala/collection/generic/SeqForwarder.scala
index 87760b1b82..2624e63a90 100644
--- a/src/library/scala/collection/generic/SeqForwarder.scala
+++ b/src/library/scala/collection/generic/SeqForwarder.scala
@@ -45,15 +45,15 @@ trait SeqForwarder[+A] extends Seq[A] with IterableForwarder[A] {
override def lastIndexWhere(p: A => Boolean): Int = underlying.lastIndexWhere(p)
override def lastIndexWhere(p: A => Boolean, end: Int): Int = underlying.lastIndexWhere(p, end)
override def reverseIterator: Iterator[A] = underlying.reverseIterator
- override def startsWith[B](that: Seq[B], offset: Int): Boolean = underlying.startsWith(that, offset)
- override def startsWith[B](that: Seq[B]): Boolean = underlying.startsWith(that)
- override def endsWith[B](that: Seq[B]): Boolean = underlying.endsWith(that)
- override def indexOfSlice[B >: A](that: Seq[B]): Int = underlying.indexOfSlice(that)
- override def indexOfSlice[B >: A](that: Seq[B], from: Int): Int = underlying.indexOfSlice(that, from)
- override def lastIndexOfSlice[B >: A](that: Seq[B]): Int = underlying.lastIndexOfSlice(that)
- override def lastIndexOfSlice[B >: A](that: Seq[B], end: Int): Int = underlying.lastIndexOfSlice(that, end)
- override def containsSlice[B](that: Seq[B]): Boolean = underlying.containsSlice(that)
+ override def startsWith[B](that: GenSeq[B], offset: Int): Boolean = underlying.startsWith(that, offset)
+ override def startsWith[B](that: GenSeq[B]): Boolean = underlying.startsWith(that)
+ override def endsWith[B](that: GenSeq[B]): Boolean = underlying.endsWith(that)
+ override def indexOfSlice[B >: A](that: GenSeq[B]): Int = underlying.indexOfSlice(that)
+ override def indexOfSlice[B >: A](that: GenSeq[B], from: Int): Int = underlying.indexOfSlice(that, from)
+ override def lastIndexOfSlice[B >: A](that: GenSeq[B]): Int = underlying.lastIndexOfSlice(that)
+ override def lastIndexOfSlice[B >: A](that: GenSeq[B], end: Int): Int = underlying.lastIndexOfSlice(that, end)
+ override def containsSlice[B](that: GenSeq[B]): Boolean = underlying.containsSlice(that)
override def contains(elem: Any): Boolean = underlying.contains(elem)
- override def corresponds[B](that: Seq[B])(p: (A,B) => Boolean): Boolean = underlying.corresponds(that)(p)
+ override def corresponds[B](that: GenSeq[B])(p: (A,B) => Boolean): Boolean = underlying.corresponds(that)(p)
override def indices: Range = underlying.indices
}
diff --git a/src/library/scala/collection/generic/SetFactory.scala b/src/library/scala/collection/generic/SetFactory.scala
index 8f42589500..3b8b5d3d47 100644
--- a/src/library/scala/collection/generic/SetFactory.scala
+++ b/src/library/scala/collection/generic/SetFactory.scala
@@ -30,7 +30,7 @@ import mutable.Builder
* @see CanBuildFrom
* @see GenericCanBuildFrom
*/
-abstract class SetFactory[CC[X] <: Set[X] with SetLike[X, CC[X]]]
+abstract class SetFactory[CC[X] <: GenSet[X] with GenSetLike[X, CC[X]]]
extends GenericCompanion[CC] {
def newBuilder[A]: Builder[A, CC[A]]
diff --git a/src/library/scala/collection/generic/Signalling.scala b/src/library/scala/collection/generic/Signalling.scala
index e29e4fcdc9..cab38027f6 100644
--- a/src/library/scala/collection/generic/Signalling.scala
+++ b/src/library/scala/collection/generic/Signalling.scala
@@ -1,3 +1,11 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
package scala.collection.generic
diff --git a/src/library/scala/collection/generic/Subtractable.scala b/src/library/scala/collection/generic/Subtractable.scala
index 1b3425d792..1e151f9212 100644
--- a/src/library/scala/collection/generic/Subtractable.scala
+++ b/src/library/scala/collection/generic/Subtractable.scala
@@ -55,5 +55,5 @@ trait Subtractable[A, +Repr <: Subtractable[A, Repr]] { self =>
* @return a new $coll that contains all elements of the current $coll
* except one less occurrence of each of the elements of `elems`.
*/
- def --(xs: TraversableOnce[A]): Repr = (repr /: xs) (_ - _)
+ def --(xs: GenTraversableOnce[A]): Repr = (repr /: xs.seq) (_ - _)
}
diff --git a/src/library/scala/collection/generic/TraversableFactory.scala b/src/library/scala/collection/generic/TraversableFactory.scala
index de42737e44..66e5d2db77 100644
--- a/src/library/scala/collection/generic/TraversableFactory.scala
+++ b/src/library/scala/collection/generic/TraversableFactory.scala
@@ -33,7 +33,7 @@ package generic
* @see CanBuildFrom
* @see GenericCanBuildFrom
*/
-abstract class TraversableFactory[CC[X] <: Traversable[X] with GenericTraversableTemplate[X, CC]]
+abstract class TraversableFactory[CC[X] <: GenTraversable[X] with GenericTraversableTemplate[X, CC]]
extends GenericCompanion[CC] {
/** A generic implementation of the `CanBuildFrom` trait, which forwards
diff --git a/src/library/scala/collection/immutable/GenIterable.scala b/src/library/scala/collection/immutable/GenIterable.scala
new file mode 100644
index 0000000000..217d3f9496
--- /dev/null
+++ b/src/library/scala/collection/immutable/GenIterable.scala
@@ -0,0 +1,37 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+package immutable
+
+
+import generic._
+import mutable.Builder
+
+
+/** A base trait for iterable collections that can be mutated.
+ *
+ * $possiblyparinfo
+ *
+ * $iterableInfo
+ */
+trait GenIterable[+A] extends GenTraversable[A]
+ with scala.collection.GenIterable[A]
+ with scala.collection.GenIterableLike[A, GenIterable[A]]
+ with GenericTraversableTemplate[A, GenIterable]
+{
+ def seq: Iterable[A]
+ override def companion: GenericCompanion[GenIterable] = GenIterable
+}
+
+
+object GenIterable extends TraversableFactory[GenIterable] {
+ implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, GenIterable[A]] = new GenericCanBuildFrom[A]
+ def newBuilder[A]: Builder[A, GenIterable[A]] = Iterable.newBuilder
+}
+
diff --git a/src/library/scala/collection/immutable/GenMap.scala b/src/library/scala/collection/immutable/GenMap.scala
new file mode 100644
index 0000000000..71417d82a4
--- /dev/null
+++ b/src/library/scala/collection/immutable/GenMap.scala
@@ -0,0 +1,40 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+
+
+package scala.collection
+package immutable
+
+
+import generic._
+import mutable.Builder
+
+
+/** A base trait for maps that can be mutated.
+ * $possiblyparinfo
+ * $mapNote
+ * $mapTags
+ * @since 1.0
+ * @author Matthias Zenger
+ */
+trait GenMap[A, +B]
+extends GenIterable[(A, B)]
+ with scala.collection.GenMap[A, B]
+ with scala.collection.GenMapLike[A, B, GenMap[A, B]]
+{
+ def seq: Map[A, B]
+}
+
+
+object GenMap extends MapFactory[GenMap] {
+ def empty[A, B]: Map[A, B] = Map.empty
+
+ /** $mapCanBuildFromInfo */
+ implicit def canBuildFrom[A, B]: CanBuildFrom[Coll, (A, B), GenMap[A, B]] = new MapCanBuildFrom[A, B]
+}
diff --git a/src/library/scala/collection/immutable/GenSeq.scala b/src/library/scala/collection/immutable/GenSeq.scala
new file mode 100644
index 0000000000..d710954a18
--- /dev/null
+++ b/src/library/scala/collection/immutable/GenSeq.scala
@@ -0,0 +1,49 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+
+
+package scala.collection
+package immutable
+
+
+import generic._
+import mutable.Builder
+
+
+/** A subtrait of `collection.GenSeq` which represents sequences
+ * that can be mutated.
+ *
+ * $possiblyparinfo
+ *
+ * $seqInfo
+ *
+ * The class adds an `update` method to `collection.Seq`.
+ *
+ * @define Coll mutable.Seq
+ * @define coll mutable sequence
+ */
+trait GenSeq[+A] extends GenIterable[A]
+ with scala.collection.GenSeq[A]
+ with scala.collection.GenSeqLike[A, GenSeq[A]]
+ with GenericTraversableTemplate[A, GenSeq]
+{
+ def seq: Seq[A]
+ override def companion: GenericCompanion[GenSeq] = GenSeq
+}
+
+
+object GenSeq extends SeqFactory[GenSeq] {
+ implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, GenSeq[A]] = new GenericCanBuildFrom[A]
+ def newBuilder[A]: Builder[A, GenSeq[A]] = Seq.newBuilder
+}
+
+
+
+
+
diff --git a/src/library/scala/collection/immutable/GenSet.scala b/src/library/scala/collection/immutable/GenSet.scala
new file mode 100644
index 0000000000..b222db90f6
--- /dev/null
+++ b/src/library/scala/collection/immutable/GenSet.scala
@@ -0,0 +1,43 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+
+
+package scala.collection
+package immutable
+
+
+import generic._
+import mutable.Builder
+
+
+/** A generic trait for mutable sets.
+ *
+ * $possiblyparinfo
+ * $setNote
+ * $setTags
+ *
+ * @since 1.0
+ * @author Matthias Zenger
+ * @define Coll mutable.Set
+ * @define coll mutable set
+ */
+trait GenSet[A] extends GenIterable[A]
+ with scala.collection.GenSet[A]
+ with scala.collection.GenSetLike[A, GenSet[A]]
+ with GenericSetTemplate[A, GenSet]
+{
+ override def companion: GenericCompanion[GenSet] = GenSet
+ def seq: Set[A]
+}
+
+
+object GenSet extends TraversableFactory[GenSet] {
+ implicit def canBuildFrom[A] = new GenericCanBuildFrom[A]
+ def newBuilder[A] = Set.newBuilder
+}
diff --git a/src/library/scala/collection/immutable/GenTraversable.scala b/src/library/scala/collection/immutable/GenTraversable.scala
new file mode 100644
index 0000000000..69b4d652fb
--- /dev/null
+++ b/src/library/scala/collection/immutable/GenTraversable.scala
@@ -0,0 +1,41 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+
+
+package scala.collection
+package immutable
+
+
+import generic._
+import mutable.Builder
+
+
+/** A trait for traversable collections that can be mutated.
+ *
+ * $possiblyparinfo
+ *
+ * $traversableInfo
+ * @define mutability mutable
+ */
+trait GenTraversable[+A] extends scala.collection.GenTraversable[A]
+ with scala.collection.GenTraversableLike[A, GenTraversable[A]]
+ with GenericTraversableTemplate[A, GenTraversable]
+ with Mutable
+{
+ def seq: Traversable[A]
+ override def companion: GenericCompanion[GenTraversable] = GenTraversable
+}
+
+
+object GenTraversable extends TraversableFactory[GenTraversable] {
+ implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, GenTraversable[A]] = new GenericCanBuildFrom[A]
+ def newBuilder[A]: Builder[A, GenTraversable[A]] = Traversable.newBuilder
+}
+
+
diff --git a/src/library/scala/collection/immutable/HashMap.scala b/src/library/scala/collection/immutable/HashMap.scala
index cf7b0d423a..e253cd6a05 100644
--- a/src/library/scala/collection/immutable/HashMap.scala
+++ b/src/library/scala/collection/immutable/HashMap.scala
@@ -33,7 +33,7 @@ import parallel.immutable.ParHashMap
* @define willNotTerminateInf
*/
@SerialVersionUID(2L)
-class HashMap[A, +B] extends Map[A,B] with MapLike[A, B, HashMap[A, B]] with CustomParallelizable[(A, B), ParHashMap[A, B]] with Serializable {
+class HashMap[A, +B] extends Map[A,B] with MapLike[A, B, HashMap[A, B]] with Serializable with CustomParallelizable[(A, B), ParHashMap[A, B]] {
override def size: Int = 0
diff --git a/src/library/scala/collection/immutable/Iterable.scala b/src/library/scala/collection/immutable/Iterable.scala
index 7f5cb055f4..411ff38095 100644
--- a/src/library/scala/collection/immutable/Iterable.scala
+++ b/src/library/scala/collection/immutable/Iterable.scala
@@ -22,12 +22,15 @@ import parallel.immutable.ParIterable
* @define coll immutable iterable collection
*/
trait Iterable[+A] extends Traversable[A]
+ with GenIterable[A]
with scala.collection.Iterable[A]
with GenericTraversableTemplate[A, Iterable]
with IterableLike[A, Iterable[A]]
- with Parallelizable[A, ParIterable[A]] {
+ with Parallelizable[A, ParIterable[A]]
+{
override def companion: GenericCompanion[Iterable] = Iterable
protected[this] override def parCombiner = ParIterable.newCombiner[A] // if `immutable.IterableLike` gets introduced, please move this there!
+ override def seq: Iterable[A] = this
}
/** $factoryInfo
diff --git a/src/library/scala/collection/immutable/List.scala b/src/library/scala/collection/immutable/List.scala
index 84b62e7e29..cb67d22fd0 100644
--- a/src/library/scala/collection/immutable/List.scala
+++ b/src/library/scala/collection/immutable/List.scala
@@ -135,9 +135,9 @@ sealed abstract class List[+A] extends LinearSeq[A]
// Overridden methods from IterableLike and SeqLike or overloaded variants of such methods
- override def ++[B >: A, That](that: TraversableOnce[B])(implicit bf: CanBuildFrom[List[A], B, That]): That = {
+ override def ++[B >: A, That](that: GenTraversableOnce[B])(implicit bf: CanBuildFrom[List[A], B, That]): That = {
val b = bf(this)
- if (b.isInstanceOf[ListBuffer[_]]) (this ::: that.toList).asInstanceOf[That]
+ if (b.isInstanceOf[ListBuffer[_]]) (this ::: that.seq.toList).asInstanceOf[That]
else super.++(that)
}
diff --git a/src/library/scala/collection/immutable/ListMap.scala b/src/library/scala/collection/immutable/ListMap.scala
index 660e78040e..a0f20c6e96 100644
--- a/src/library/scala/collection/immutable/ListMap.scala
+++ b/src/library/scala/collection/immutable/ListMap.scala
@@ -97,8 +97,8 @@ class ListMap[A, +B] extends Map[A, B] with MapLike[A, B, ListMap[A, B]] with Se
*
* @param xs the traversable object.
*/
- override def ++[B1 >: B](xs: TraversableOnce[(A, B1)]): ListMap[A, B1] =
- ((repr: ListMap[A, B1]) /: xs) (_ + _)
+ override def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]): ListMap[A, B1] =
+ ((repr: ListMap[A, B1]) /: xs.seq) (_ + _)
/** This creates a new mapping without the given <code>key</code>.
* If the map does not contain a mapping for the given key, the
diff --git a/src/library/scala/collection/immutable/ListSet.scala b/src/library/scala/collection/immutable/ListSet.scala
index 0f040b157b..057b68d280 100644
--- a/src/library/scala/collection/immutable/ListSet.scala
+++ b/src/library/scala/collection/immutable/ListSet.scala
@@ -99,9 +99,9 @@ class ListSet[A] extends Set[A]
* so we take the easy way out and add ourselves and the argument to
* a new builder.
*/
- override def ++(xs: TraversableOnce[A]): ListSet[A] =
+ override def ++(xs: GenTraversableOnce[A]): ListSet[A] =
if (xs.isEmpty) this
- else new ListSet.ListSetBuilder(this) ++= xs result
+ else new ListSet.ListSetBuilder(this) ++= xs.seq result
private[ListSet] def unchecked_+(e: A): ListSet[A] = new Node(e)
private[ListSet] def unchecked_outer: ListSet[A] =
diff --git a/src/library/scala/collection/immutable/Map.scala b/src/library/scala/collection/immutable/Map.scala
index 2c7c1023ed..bae686bf5b 100644
--- a/src/library/scala/collection/immutable/Map.scala
+++ b/src/library/scala/collection/immutable/Map.scala
@@ -26,6 +26,7 @@ import generic._
* @since 1
*/
trait Map[A, +B] extends Iterable[(A, B)]
+ with GenMap[A, B]
with scala.collection.Map[A, B]
with MapLike[A, B, Map[A, B]] { self =>
diff --git a/src/library/scala/collection/immutable/MapLike.scala b/src/library/scala/collection/immutable/MapLike.scala
index 13054b9f83..d22adc03bc 100644
--- a/src/library/scala/collection/immutable/MapLike.scala
+++ b/src/library/scala/collection/immutable/MapLike.scala
@@ -82,8 +82,8 @@ trait MapLike[A, +B, +This <: MapLike[A, B, This] with Map[A, B]]
* @param xs the traversable object consisting of key-value pairs.
* @return a new immutable map with the bindings of this map and those from `xs`.
*/
- override def ++[B1 >: B](xs: TraversableOnce[(A, B1)]): immutable.Map[A, B1] =
- ((repr: immutable.Map[A, B1]) /: xs) (_ + _)
+ override def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]): immutable.Map[A, B1] =
+ ((repr: immutable.Map[A, B1]) /: xs.seq) (_ + _)
/** Filters this map by retaining only keys satisfying a predicate.
* @param p the predicate used to test keys
diff --git a/src/library/scala/collection/immutable/MapProxy.scala b/src/library/scala/collection/immutable/MapProxy.scala
index 29beecdcc9..b08b3c9664 100644
--- a/src/library/scala/collection/immutable/MapProxy.scala
+++ b/src/library/scala/collection/immutable/MapProxy.scala
@@ -33,7 +33,7 @@ trait MapProxy[A, +B] extends Map[A, B] with MapProxyLike[A, B, Map[A, B]] {
override def -(key: A) = newProxy(self - key)
override def + [B1 >: B](kv: (A, B1)): Map[A, B1] = newProxy(self + kv)
override def + [B1 >: B](elem1: (A, B1), elem2: (A, B1), elems: (A, B1) *) = newProxy(self.+(elem1, elem2, elems: _*))
- override def ++[B1 >: B](xs: TraversableOnce[(A, B1)]) = newProxy(self ++ xs)
+ override def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]) = newProxy(self ++ xs.seq)
override def keySet: immutable.Set[A] = new SetProxy[A] { val self = MapProxy.this.self.keySet }
override def filterKeys(p: A => Boolean) = self.filterKeys(p)
diff --git a/src/library/scala/collection/immutable/Range.scala b/src/library/scala/collection/immutable/Range.scala
index 435959a645..2a5ba9839f 100644
--- a/src/library/scala/collection/immutable/Range.scala
+++ b/src/library/scala/collection/immutable/Range.scala
@@ -246,7 +246,7 @@ object Range {
NumericRange.count[Long](start, end, step, isInclusive)
class Inclusive(start: Int, end: Int, step: Int) extends Range(start, end, step) {
- override def par = new ParRange(this)
+// override def par = new ParRange(this)
override def isInclusive = true
override protected def copy(start: Int, end: Int, step: Int): Range = new Inclusive(start, end, step)
}
diff --git a/src/library/scala/collection/immutable/Seq.scala b/src/library/scala/collection/immutable/Seq.scala
index 0a370615fc..39b99e9e8b 100644
--- a/src/library/scala/collection/immutable/Seq.scala
+++ b/src/library/scala/collection/immutable/Seq.scala
@@ -23,14 +23,16 @@ import parallel.immutable.ParSeq
* @define coll immutable sequence
*/
trait Seq[+A] extends Iterable[A]
+ with GenSeq[A]
with scala.collection.Seq[A]
with GenericTraversableTemplate[A, Seq]
with SeqLike[A, Seq[A]]
- with Parallelizable[A, ParSeq[A]] {
+ with Parallelizable[A, ParSeq[A]]
+{
override def companion: GenericCompanion[Seq] = Seq
override def toSeq: Seq[A] = this
- protected[this] override def parCombiner = ParSeq.newCombiner[A] // if `immutable.SeqLike` gets introduced, please move this there!
override def seq: Seq[A] = this
+ protected[this] override def parCombiner = ParSeq.newCombiner[A] // if `immutable.SeqLike` gets introduced, please move this there!
}
/** $factoryInfo
diff --git a/src/library/scala/collection/immutable/Set.scala b/src/library/scala/collection/immutable/Set.scala
index 33bc50ab56..e332902eb8 100644
--- a/src/library/scala/collection/immutable/Set.scala
+++ b/src/library/scala/collection/immutable/Set.scala
@@ -25,14 +25,16 @@ import parallel.immutable.ParSet
* @define coll immutable set
*/
trait Set[A] extends Iterable[A]
+ with GenSet[A]
with scala.collection.Set[A]
with GenericSetTemplate[A, Set]
with SetLike[A, Set[A]]
- with Parallelizable[A, ParSet[A]] {
+ with Parallelizable[A, ParSet[A]]
+{
override def companion: GenericCompanion[Set] = Set
override def toSet[B >: A]: Set[B] = this.asInstanceOf[Set[B]]
- protected override def parCombiner = ParSet.newCombiner[A] // if `immutable.SetLike` gets introduced, please move this there!
override def seq: Set[A] = this
+ protected override def parCombiner = ParSet.newCombiner[A] // if `immutable.SetLike` gets introduced, please move this there!
}
/** $factoryInfo
diff --git a/src/library/scala/collection/immutable/SortedMap.scala b/src/library/scala/collection/immutable/SortedMap.scala
index 85798f9cc9..64fa06bf2e 100644
--- a/src/library/scala/collection/immutable/SortedMap.scala
+++ b/src/library/scala/collection/immutable/SortedMap.scala
@@ -61,8 +61,8 @@ trait SortedMap[A, +B] extends Map[A, B]
*
* @param xs the traversable object.
*/
- override def ++[B1 >: B](xs: TraversableOnce[(A, B1)]): SortedMap[A, B1] =
- ((repr: SortedMap[A, B1]) /: xs) (_ + _)
+ override def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]): SortedMap[A, B1] =
+ ((repr: SortedMap[A, B1]) /: xs.seq) (_ + _)
}
/** $factoryInfo
diff --git a/src/library/scala/collection/immutable/Stream.scala b/src/library/scala/collection/immutable/Stream.scala
index 2a4c29c05e..7c362571c4 100644
--- a/src/library/scala/collection/immutable/Stream.scala
+++ b/src/library/scala/collection/immutable/Stream.scala
@@ -138,7 +138,7 @@ self =>
* then StreamBuilder will be chosen for the implicit.
* we recognize that fact and optimize to get more laziness.
*/
- override def ++[B >: A, That](that: TraversableOnce[B])(implicit bf: CanBuildFrom[Stream[A], B, That]): That =
+ override def ++[B >: A, That](that: GenTraversableOnce[B])(implicit bf: CanBuildFrom[Stream[A], B, That]): That =
// we assume there is no other builder factory on streams and therefore know that That = Stream[A]
if (isStreamBuilder(bf)) asThat(
if (isEmpty) that.toStream
@@ -197,7 +197,7 @@ self =>
* @return <code>f(a<sub>0</sub>) ::: ... ::: f(a<sub>n</sub>)</code> if
* this stream is <code>[a<sub>0</sub>, ..., a<sub>n</sub>]</code>.
*/
- override final def flatMap[B, That](f: A => TraversableOnce[B])(implicit bf: CanBuildFrom[Stream[A], B, That]): That =
+ override final def flatMap[B, That](f: A => GenTraversableOnce[B])(implicit bf: CanBuildFrom[Stream[A], B, That]): That =
// we assume there is no other builder factory on streams and therefore know that That = Stream[B]
// optimisations are not for speed, but for functionality
// see tickets #153, #498, #2147, and corresponding tests in run/ (as well as run/stream_flatmap_odds.scala)
@@ -251,7 +251,7 @@ self =>
else super.map(f)(bf)
}
- override def flatMap[B, That](f: A => TraversableOnce[B])(implicit bf: CanBuildFrom[Stream[A], B, That]): That = {
+ override def flatMap[B, That](f: A => GenTraversableOnce[B])(implicit bf: CanBuildFrom[Stream[A], B, That]): That = {
def tailFlatMap = asStream[B](tail withFilter p flatMap f)
if (isStreamBuilder(bf)) asThat(
if (isEmpty) Stream.Empty
@@ -332,7 +332,7 @@ self =>
* <code>Stream(a<sub>0</sub>, ..., a<sub>m</sub>)
* zip Stream(b<sub>0</sub>, ..., b<sub>n</sub>)</code> is invoked.
*/
- override final def zip[A1 >: A, B, That](that: Iterable[B])(implicit bf: CanBuildFrom[Stream[A], (A1, B), That]): That =
+ override final def zip[A1 >: A, B, That](that: collection.GenIterable[B])(implicit bf: CanBuildFrom[Stream[A], (A1, B), That]): That =
// we assume there is no other builder factory on streams and therefore know that That = Stream[(A1, B)]
if (isStreamBuilder(bf)) asThat(
if (this.isEmpty || that.isEmpty) Stream.Empty
diff --git a/src/library/scala/collection/immutable/StreamViewLike.scala b/src/library/scala/collection/immutable/StreamViewLike.scala
index 31b9284a86..7c44c1e019 100644
--- a/src/library/scala/collection/immutable/StreamViewLike.scala
+++ b/src/library/scala/collection/immutable/StreamViewLike.scala
@@ -47,20 +47,20 @@ extends SeqView[A, Coll]
trait Prepended[B >: A] extends super.Prepended[B] with Transformed[B]
/** boilerplate */
- protected override def newForced[B](xs: => collection.Seq[B]): Transformed[B] = new { val forced = xs } with Forced[B]
- protected override def newAppended[B >: A](that: collection.Traversable[B]): Transformed[B] = new { val rest = that } with Appended[B]
+ protected override def newForced[B](xs: => collection.GenSeq[B]): Transformed[B] = new { val forced = xs } with Forced[B]
+ protected override def newAppended[B >: A](that: collection.GenTraversable[B]): Transformed[B] = new { val rest = that } with Appended[B]
protected override def newMapped[B](f: A => B): Transformed[B] = new { val mapping = f } with Mapped[B]
- protected override def newFlatMapped[B](f: A => collection.TraversableOnce[B]): Transformed[B] = new { val mapping = f } with FlatMapped[B]
+ protected override def newFlatMapped[B](f: A => collection.GenTraversableOnce[B]): Transformed[B] = new { val mapping = f } with FlatMapped[B]
protected override def newFiltered(p: A => Boolean): Transformed[A] = new { val pred = p } with Filtered
protected override def newSliced(_endpoints: SliceInterval): Transformed[A] = new { val endpoints = _endpoints } with Sliced
protected override def newDroppedWhile(p: A => Boolean): Transformed[A] = new { val pred = p } with DroppedWhile
protected override def newTakenWhile(p: A => Boolean): Transformed[A] = new { val pred = p } with TakenWhile
- protected override def newZipped[B](that: collection.Iterable[B]): Transformed[(A, B)] = new { val other = that } with Zipped[B]
- protected override def newZippedAll[A1 >: A, B](that: collection.Iterable[B], _thisElem: A1, _thatElem: B): Transformed[(A1, B)] = {
+ protected override def newZipped[B](that: collection.GenIterable[B]): Transformed[(A, B)] = new { val other = that } with Zipped[B]
+ protected override def newZippedAll[A1 >: A, B](that: collection.GenIterable[B], _thisElem: A1, _thatElem: B): Transformed[(A1, B)] = {
new { val other = that; val thisElem = _thisElem; val thatElem = _thatElem } with ZippedAll[A1, B]
}
protected override def newReversed: Transformed[A] = new Reversed { }
- protected override def newPatched[B >: A](_from: Int, _patch: collection.Seq[B], _replaced: Int): Transformed[B] = {
+ protected override def newPatched[B >: A](_from: Int, _patch: collection.GenSeq[B], _replaced: Int): Transformed[B] = {
new { val from = _from; val patch = _patch; val replaced = _replaced } with Patched[B]
}
protected override def newPrepended[B >: A](elem: B): Transformed[B] = new { protected[this] val fst = elem } with Prepended[B]
diff --git a/src/library/scala/collection/immutable/StringOps.scala b/src/library/scala/collection/immutable/StringOps.scala
index 06a62c2604..63d5984b11 100644
--- a/src/library/scala/collection/immutable/StringOps.scala
+++ b/src/library/scala/collection/immutable/StringOps.scala
@@ -37,4 +37,6 @@ final class StringOps(override val repr: String) extends StringLike[String] {
override protected[this] def newBuilder = StringBuilder.newBuilder
override def toString = repr
+
+ def seq = this.iterator
}
diff --git a/src/library/scala/collection/immutable/Traversable.scala b/src/library/scala/collection/immutable/Traversable.scala
index 32c19ab1cd..7228642d27 100644
--- a/src/library/scala/collection/immutable/Traversable.scala
+++ b/src/library/scala/collection/immutable/Traversable.scala
@@ -19,10 +19,12 @@ import mutable.Builder
* @define mutability immutable
*/
trait Traversable[+A] extends scala.collection.Traversable[A]
+ with GenTraversable[A]
with GenericTraversableTemplate[A, Traversable]
with TraversableLike[A, Traversable[A]]
with Immutable {
override def companion: GenericCompanion[Traversable] = Traversable
+ override def seq: Traversable[A] = this
}
/** $factoryInfo
diff --git a/src/library/scala/collection/immutable/TreeMap.scala b/src/library/scala/collection/immutable/TreeMap.scala
index 5543e1cba3..7552e1983c 100644
--- a/src/library/scala/collection/immutable/TreeMap.scala
+++ b/src/library/scala/collection/immutable/TreeMap.scala
@@ -110,8 +110,8 @@ class TreeMap[A, +B](override val size: Int, t: RedBlack[A]#Tree[B])(implicit va
*
* @param xs the traversable object.
*/
- override def ++[B1 >: B](xs: TraversableOnce[(A, B1)]): TreeMap[A, B1] =
- ((repr: TreeMap[A, B1]) /: xs) (_ + _)
+ override def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]): TreeMap[A, B1] =
+ ((repr: TreeMap[A, B1]) /: xs.seq) (_ + _)
/** A new TreeMap with the entry added is returned,
* assuming that key is <em>not</em> in the TreeMap.
diff --git a/src/library/scala/collection/immutable/Vector.scala b/src/library/scala/collection/immutable/Vector.scala
index 3078a26411..62f9e1dac7 100644
--- a/src/library/scala/collection/immutable/Vector.scala
+++ b/src/library/scala/collection/immutable/Vector.scala
@@ -195,8 +195,8 @@ override def companion: GenericCompanion[Vector] = Vector
// concat (stub)
- override def ++[B >: A, That](that: TraversableOnce[B])(implicit bf: CanBuildFrom[Vector[A], B, That]): That = {
- super.++(that)
+ override def ++[B >: A, That](that: GenTraversableOnce[B])(implicit bf: CanBuildFrom[Vector[A], B, That]): That = {
+ super.++(that.seq)
}
diff --git a/src/library/scala/collection/interfaces/IterableMethods.scala b/src/library/scala/collection/interfaces/IterableMethods.scala
index 9cc3e2c1ac..f6941c8961 100644
--- a/src/library/scala/collection/interfaces/IterableMethods.scala
+++ b/src/library/scala/collection/interfaces/IterableMethods.scala
@@ -26,13 +26,13 @@ trait IterableMethods[+A, +This <: IterableLike[A, This] with Iterable[A]] exten
// concrete
def dropRight(n: Int): Iterable[A]
def grouped(size: Int): Iterator[Iterable[A]]
- def sameElements[B >: A](that: Iterable[B]): Boolean
+ def sameElements[B >: A](that: GenIterable[B]): Boolean
def sliding[B >: A](size: Int): Iterator[Iterable[A]]
def sliding[B >: A](size: Int, step: Int): Iterator[Iterable[A]]
def takeRight(n: Int): Iterable[A]
- def zipAll[B, A1 >: A, That](that: Iterable[B], e1: A1, e2: B)(implicit bf: CanBuildFrom[This, (A1, B), That]): That
+ def zipAll[B, A1 >: A, That](that: GenIterable[B], e1: A1, e2: B)(implicit bf: CanBuildFrom[This, (A1, B), That]): That
def zipWithIndex[A1 >: A, That](implicit bf: CanBuildFrom[This, (A1, Int), That]): That
- def zip[A1 >: A, B, That](that: Iterable[B])(implicit bf: CanBuildFrom[This, (A1, B), That]): That
+ def zip[A1 >: A, B, That](that: GenIterable[B])(implicit bf: CanBuildFrom[This, (A1, B), That]): That
override def view: IterableView[A, This]
override def view(from: Int, until: Int): IterableView[A, This]
diff --git a/src/library/scala/collection/interfaces/MapMethods.scala b/src/library/scala/collection/interfaces/MapMethods.scala
index a36a1c08f4..bc38ccdd2e 100644
--- a/src/library/scala/collection/interfaces/MapMethods.scala
+++ b/src/library/scala/collection/interfaces/MapMethods.scala
@@ -41,5 +41,5 @@ trait MapMethods[A, +B, +This <: MapLike[A, B, This] with Map[A, B]]
def mapValues[C](f: B => C): Map[A, C]
def updated [B1 >: B](key: A, value: B1): Map[A, B1]
def + [B1 >: B] (elem1: (A, B1), elem2: (A, B1), elems: (A, B1) *): Map[A, B1]
- def ++[B1 >: B](xs: TraversableOnce[(A, B1)]): Map[A, B1]
+ def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]): Map[A, B1]
}
diff --git a/src/library/scala/collection/interfaces/SeqMethods.scala b/src/library/scala/collection/interfaces/SeqMethods.scala
index 9d8bf13a20..1f5b08d036 100644
--- a/src/library/scala/collection/interfaces/SeqMethods.scala
+++ b/src/library/scala/collection/interfaces/SeqMethods.scala
@@ -50,7 +50,7 @@ trait SeqMethods[+A, +This <: SeqLike[A, This] with Seq[A]] extends IterableMeth
def lastIndexWhere(p: A => Boolean, end: Int): Int
def lengthCompare(len: Int): Int
def padTo[B >: A, That](len: Int, elem: B)(implicit bf: CanBuildFrom[This, B, That]): That
- def patch[B >: A, That](from: Int, patch: Seq[B], replaced: Int)(implicit bf: CanBuildFrom[This, B, That]): That
+ def patch[B >: A, That](from: Int, patch: GenSeq[B], replaced: Int)(implicit bf: CanBuildFrom[This, B, That]): That
def permutations: Iterator[This]
def prefixLength(p: A => Boolean): Int
def reverse: This
diff --git a/src/library/scala/collection/interfaces/TraversableMethods.scala b/src/library/scala/collection/interfaces/TraversableMethods.scala
index 4a34b0b1ed..8aba39093d 100644
--- a/src/library/scala/collection/interfaces/TraversableMethods.scala
+++ b/src/library/scala/collection/interfaces/TraversableMethods.scala
@@ -18,7 +18,7 @@ trait TraversableMethods[+A, +This <: TraversableLike[A, This]] extends Traversa
self: Traversable[A] =>
// maps/iteration
- def flatMap[B, That](f: A => TraversableOnce[B])(implicit bf: CanBuildFrom[This, B, That]): That
+ def flatMap[B, That](f: A => GenTraversableOnce[B])(implicit bf: CanBuildFrom[This, B, That]): That
def map[B, That](f: A => B)(implicit bf: CanBuildFrom[This, B, That]): That
def collect[B, That](pf: PartialFunction[A, B])(implicit bf: CanBuildFrom[This, B, That]): That
def scanLeft[B, That](z: B)(op: (B, A) => B)(implicit bf: CanBuildFrom[This, B, That]): That
@@ -26,7 +26,7 @@ trait TraversableMethods[+A, +This <: TraversableLike[A, This]] extends Traversa
// new collections
def ++:[B >: A, That](that: TraversableOnce[B])(implicit bf: CanBuildFrom[This, B, That]): That
- def ++[B >: A, That](that: TraversableOnce[B])(implicit bf: CanBuildFrom[This, B, That]): That
+ def ++[B >: A, That](that: GenTraversableOnce[B])(implicit bf: CanBuildFrom[This, B, That]): That
// element retrieval
def head: A
diff --git a/src/library/scala/collection/mutable/ArrayOps.scala b/src/library/scala/collection/mutable/ArrayOps.scala
index b72206e6f3..dcba9a52a5 100644
--- a/src/library/scala/collection/mutable/ArrayOps.scala
+++ b/src/library/scala/collection/mutable/ArrayOps.scala
@@ -93,6 +93,9 @@ abstract class ArrayOps[T] extends ArrayLike[T, Array[T]] with CustomParalleliza
for (b <- bs) bb += b.result
bb.result
}
+
+ def seq = this.iterator
+
}
/**
diff --git a/src/library/scala/collection/mutable/BufferLike.scala b/src/library/scala/collection/mutable/BufferLike.scala
index d29ab9cd48..9a641c452e 100644
--- a/src/library/scala/collection/mutable/BufferLike.scala
+++ b/src/library/scala/collection/mutable/BufferLike.scala
@@ -269,7 +269,7 @@ trait BufferLike[A, +This <: BufferLike[A, This] with Buffer[A]]
"As of 2.8, ++ always creates a new collection, even on Buffers.\n"+
"Use ++= instead if you intend to add by side effect to an existing collection.\n"
)
- def ++(xs: TraversableOnce[A]): This = clone() ++= xs
+ def ++(xs: GenTraversableOnce[A]): This = clone() ++= xs.seq
/** Creates a new collection with all the elements of this collection except `elem`.
*
@@ -308,5 +308,5 @@ trait BufferLike[A, +This <: BufferLike[A, This] with Buffer[A]]
"As of 2.8, -- always creates a new collection, even on Buffers.\n"+
"Use --= instead if you intend to remove by side effect from an existing collection.\n"
)
- override def --(xs: TraversableOnce[A]): This = clone() --= xs
+ override def --(xs: GenTraversableOnce[A]): This = clone() --= xs.seq
}
diff --git a/src/library/scala/collection/mutable/BufferProxy.scala b/src/library/scala/collection/mutable/BufferProxy.scala
index 3222ad1e53..22ba9989eb 100644
--- a/src/library/scala/collection/mutable/BufferProxy.scala
+++ b/src/library/scala/collection/mutable/BufferProxy.scala
@@ -65,7 +65,7 @@ trait BufferProxy[A] extends Buffer[A] with Proxy {
*/
@deprecated("Use ++= instead if you intend to add by side effect to an existing collection.\n"+
"Use `clone() ++=` if you intend to create a new collection.")
- override def ++(xs: TraversableOnce[A]): Buffer[A] = self.++(xs)
+ override def ++(xs: GenTraversableOnce[A]): Buffer[A] = self.++(xs)
/** Appends a number of elements provided by a traversable object.
*
diff --git a/src/library/scala/collection/mutable/Cloneable.scala b/src/library/scala/collection/mutable/Cloneable.scala
index 28bf2ceb8e..e6fbce415a 100644
--- a/src/library/scala/collection/mutable/Cloneable.scala
+++ b/src/library/scala/collection/mutable/Cloneable.scala
@@ -20,5 +20,6 @@ package mutable
@cloneable
trait Cloneable[+A <: AnyRef] {
// !!! why doesn't this extend java.lang.Cloneable?
+ // because neither did @serializable, then we changed it to Serializable
override def clone: A = super.clone().asInstanceOf[A]
}
diff --git a/src/library/scala/collection/mutable/GenIterable.scala b/src/library/scala/collection/mutable/GenIterable.scala
new file mode 100644
index 0000000000..60caeb14f9
--- /dev/null
+++ b/src/library/scala/collection/mutable/GenIterable.scala
@@ -0,0 +1,37 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+package mutable
+
+
+import generic._
+
+
+/** A base trait for iterable collections that can be mutated.
+ *
+ * $possiblyparinfo
+ *
+ * $iterableInfo
+ */
+trait GenIterable[A] extends GenTraversable[A]
+ with scala.collection.GenIterable[A]
+ with scala.collection.GenIterableLike[A, GenIterable[A]]
+ with GenericTraversableTemplate[A, GenIterable]
+{
+ def seq: Iterable[A]
+ override def companion: GenericCompanion[GenIterable] = GenIterable
+}
+
+
+object GenIterable extends TraversableFactory[GenIterable] {
+ implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, GenIterable[A]] = new GenericCanBuildFrom[A]
+ def newBuilder[A]: Builder[A, GenIterable[A]] = Iterable.newBuilder
+}
+
+
diff --git a/src/library/scala/collection/mutable/GenMap.scala b/src/library/scala/collection/mutable/GenMap.scala
new file mode 100644
index 0000000000..810fa53926
--- /dev/null
+++ b/src/library/scala/collection/mutable/GenMap.scala
@@ -0,0 +1,40 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+
+
+package scala.collection
+package mutable
+
+
+import generic._
+
+
+/** A base trait for maps that can be mutated.
+ * $possiblyparinfo
+ * $mapNote
+ * $mapTags
+ * @since 1.0
+ * @author Matthias Zenger
+ */
+trait GenMap[A, B]
+extends GenIterable[(A, B)]
+ with scala.collection.GenMap[A, B]
+ with scala.collection.GenMapLike[A, B, GenMap[A, B]]
+{
+ def seq: Map[A, B]
+}
+
+
+object GenMap extends MapFactory[GenMap] {
+ def empty[A, B]: Map[A, B] = Map.empty
+
+ /** $mapCanBuildFromInfo */
+ implicit def canBuildFrom[A, B]: CanBuildFrom[Coll, (A, B), GenMap[A, B]] = new MapCanBuildFrom[A, B]
+}
+
diff --git a/src/library/scala/collection/mutable/GenSeq.scala b/src/library/scala/collection/mutable/GenSeq.scala
new file mode 100644
index 0000000000..8e1f727bbb
--- /dev/null
+++ b/src/library/scala/collection/mutable/GenSeq.scala
@@ -0,0 +1,44 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+
+
+package scala.collection
+package mutable
+
+
+import generic._
+
+
+/** A subtrait of `collection.GenSeq` which represents sequences
+ * that can be mutated.
+ *
+ * $possiblyparinfo
+ *
+ * $seqInfo
+ *
+ * The class adds an `update` method to `collection.Seq`.
+ *
+ * @define Coll mutable.Seq
+ * @define coll mutable sequence
+ */
+trait GenSeq[A] extends GenIterable[A]
+ with scala.collection.GenSeq[A]
+ with scala.collection.GenSeqLike[A, GenSeq[A]]
+ with GenericTraversableTemplate[A, GenSeq]
+{
+ override def companion: GenericCompanion[GenSeq] = GenSeq
+ def seq: Seq[A]
+}
+
+
+object GenSeq extends SeqFactory[GenSeq] {
+ implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, GenSeq[A]] = new GenericCanBuildFrom[A]
+ def newBuilder[A]: Builder[A, GenSeq[A]] = Seq.newBuilder
+}
+
diff --git a/src/library/scala/collection/mutable/GenSet.scala b/src/library/scala/collection/mutable/GenSet.scala
new file mode 100644
index 0000000000..d8f591d0ab
--- /dev/null
+++ b/src/library/scala/collection/mutable/GenSet.scala
@@ -0,0 +1,46 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+
+
+package scala.collection
+package mutable
+
+
+
+import generic._
+
+
+/** A generic trait for mutable sets.
+ *
+ * $possiblyparinfo
+ * $setNote
+ * $setTags
+ *
+ * @since 1.0
+ * @author Matthias Zenger
+ * @define Coll mutable.Set
+ * @define coll mutable set
+ */
+trait GenSet[A] extends GenIterable[A]
+ with Growable[A]
+ with scala.collection.GenSet[A]
+ with scala.collection.GenSetLike[A, GenSet[A]]
+ with GenericSetTemplate[A, GenSet]
+{
+ override def companion: GenericCompanion[GenSet] = GenSet
+ def seq: Set[A]
+}
+
+
+object GenSet extends TraversableFactory[GenSet] {
+ implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, GenSet[A]] = new GenericCanBuildFrom[A]
+ def newBuilder[A]: Builder[A, GenSet[A]] = Set.newBuilder
+}
+
+
diff --git a/src/library/scala/collection/mutable/GenTraversable.scala b/src/library/scala/collection/mutable/GenTraversable.scala
new file mode 100644
index 0000000000..b1eec9f471
--- /dev/null
+++ b/src/library/scala/collection/mutable/GenTraversable.scala
@@ -0,0 +1,38 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+
+
+package scala.collection
+package mutable
+
+
+import generic._
+
+
+/** A trait for traversable collections that can be mutated.
+ *
+ * $possiblyparinfo
+ *
+ * $traversableInfo
+ * @define mutability mutable
+ */
+trait GenTraversable[A] extends scala.collection.GenTraversable[A]
+ with scala.collection.GenTraversableLike[A, GenTraversable[A]]
+ with GenericTraversableTemplate[A, GenTraversable]
+ with Mutable
+{
+ def seq: Traversable[A]
+ override def companion: GenericCompanion[GenTraversable] = GenTraversable
+}
+
+object GenTraversable extends TraversableFactory[GenTraversable] {
+ implicit def canBuildFrom[A] = new GenericCanBuildFrom[A]
+ def newBuilder[A] = Traversable.newBuilder
+}
+
diff --git a/src/library/scala/collection/mutable/Iterable.scala b/src/library/scala/collection/mutable/Iterable.scala
index 8e244d980d..e9ec25ced3 100644
--- a/src/library/scala/collection/mutable/Iterable.scala
+++ b/src/library/scala/collection/mutable/Iterable.scala
@@ -15,12 +15,15 @@ import parallel.mutable.ParIterable
* $iterableInfo
*/
trait Iterable[A] extends Traversable[A]
+ with GenIterable[A]
with scala.collection.Iterable[A]
with GenericTraversableTemplate[A, Iterable]
with IterableLike[A, Iterable[A]]
- with Parallelizable[A, ParIterable[A]] {
+ with Parallelizable[A, ParIterable[A]]
+{
override def companion: GenericCompanion[Iterable] = Iterable
protected[this] override def parCombiner = ParIterable.newCombiner[A] // if `mutable.IterableLike` gets introduced, please move this there!
+ override def seq: Iterable[A] = this
}
/** $factoryInfo
diff --git a/src/library/scala/collection/mutable/Map.scala b/src/library/scala/collection/mutable/Map.scala
index 28e0ca2bcf..e1e6115cfe 100644
--- a/src/library/scala/collection/mutable/Map.scala
+++ b/src/library/scala/collection/mutable/Map.scala
@@ -21,6 +21,7 @@ import generic._
*/
trait Map[A, B]
extends Iterable[(A, B)]
+ with GenMap[A, B]
with scala.collection.Map[A, B]
with MapLike[A, B, Map[A, B]] {
diff --git a/src/library/scala/collection/mutable/MapBuilder.scala b/src/library/scala/collection/mutable/MapBuilder.scala
index a759933926..174c3c6528 100644
--- a/src/library/scala/collection/mutable/MapBuilder.scala
+++ b/src/library/scala/collection/mutable/MapBuilder.scala
@@ -21,7 +21,7 @@ package mutable
*
* @since 2.8
*/
-class MapBuilder[A, B, Coll <: scala.collection.Map[A, B] with scala.collection.MapLike[A, B, Coll]](empty: Coll)
+class MapBuilder[A, B, Coll <: scala.collection.GenMap[A, B] with scala.collection.GenMapLike[A, B, Coll]](empty: Coll)
extends Builder[(A, B), Coll] {
protected var elems: Coll = empty
def +=(x: (A, B)): this.type = {
diff --git a/src/library/scala/collection/mutable/MapLike.scala b/src/library/scala/collection/mutable/MapLike.scala
index d0df44b69a..b97abd6a87 100644
--- a/src/library/scala/collection/mutable/MapLike.scala
+++ b/src/library/scala/collection/mutable/MapLike.scala
@@ -126,8 +126,8 @@ trait MapLike[A, B, +This <: MapLike[A, B, This] with Map[A, B]]
"As of 2.8, this operation creates a new map. To add the elements as a\n"+
"side effect to an existing map and return that map itself, use ++=."
)
- override def ++[B1 >: B](xs: TraversableOnce[(A, B1)]): Map[A, B1] =
- clone().asInstanceOf[Map[A, B1]] ++= xs
+ override def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]): Map[A, B1] =
+ clone().asInstanceOf[Map[A, B1]] ++= xs.seq
/** Removes a key from this map, returning the value associated previously
* with that key as an option.
@@ -246,5 +246,5 @@ trait MapLike[A, B, +This <: MapLike[A, B, This] with Map[A, B]]
"As of 2.8, this operation creates a new map. To remove the elements as a\n"+
"side effect to an existing map and return that map itself, use --=."
)
- override def --(xs: TraversableOnce[A]): This = clone() --= xs
+ override def --(xs: GenTraversableOnce[A]): This = clone() --= xs.seq
}
diff --git a/src/library/scala/collection/mutable/MapProxy.scala b/src/library/scala/collection/mutable/MapProxy.scala
index 3e8f405476..d32f1559c0 100644
--- a/src/library/scala/collection/mutable/MapProxy.scala
+++ b/src/library/scala/collection/mutable/MapProxy.scala
@@ -32,7 +32,7 @@ trait MapProxy[A, B] extends Map[A, B] with MapProxyLike[A, B, Map[A, B]] {
override def + [B1 >: B] (kv: (A, B1)): Map[A, B1] = newProxy(self + kv)
override def + [B1 >: B] (elem1: (A, B1), elem2: (A, B1), elems: (A, B1) *) = newProxy(self.+(elem1, elem2, elems: _*))
- override def ++[B1 >: B](xs: TraversableOnce[(A, B1)]) = newProxy(self ++ xs)
+ override def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]) = newProxy(self ++ xs.seq)
override def -(key: A) = newProxy(self - key)
override def += (kv: (A, B)) = { self += kv ; this }
diff --git a/src/library/scala/collection/mutable/PriorityQueue.scala b/src/library/scala/collection/mutable/PriorityQueue.scala
index 0513023bfa..5aa7a3794c 100644
--- a/src/library/scala/collection/mutable/PriorityQueue.scala
+++ b/src/library/scala/collection/mutable/PriorityQueue.scala
@@ -127,7 +127,7 @@ class PriorityQueue[A](implicit val ord: Ordering[A])
* @param xs a traversable object.
* @return a new priority queue containing elements of both `xs` and `this`.
*/
- def ++(xs: TraversableOnce[A]) = { this.clone() ++= xs }
+ def ++(xs: GenTraversableOnce[A]) = { this.clone() ++= xs.seq }
/** Adds all elements to the queue.
*
diff --git a/src/library/scala/collection/mutable/Seq.scala b/src/library/scala/collection/mutable/Seq.scala
index 8e7019c755..d7f50b73ea 100644
--- a/src/library/scala/collection/mutable/Seq.scala
+++ b/src/library/scala/collection/mutable/Seq.scala
@@ -25,9 +25,10 @@ import generic._
* @define coll mutable sequence
*/
trait Seq[A] extends Iterable[A]
- with scala.collection.Seq[A]
- with GenericTraversableTemplate[A, Seq]
- with SeqLike[A, Seq[A]] {
+ with GenSeq[A]
+ with scala.collection.Seq[A]
+ with GenericTraversableTemplate[A, Seq]
+ with SeqLike[A, Seq[A]] {
override def companion: GenericCompanion[Seq] = Seq
override def seq: Seq[A] = this
}
diff --git a/src/library/scala/collection/mutable/SeqLike.scala b/src/library/scala/collection/mutable/SeqLike.scala
index 4d315b8256..ae70378427 100644
--- a/src/library/scala/collection/mutable/SeqLike.scala
+++ b/src/library/scala/collection/mutable/SeqLike.scala
@@ -20,7 +20,8 @@ import parallel.mutable.ParSeq
trait SeqLike[A, +This <: SeqLike[A, This] with Seq[A]]
extends scala.collection.SeqLike[A, This]
with Cloneable[This]
- with Parallelizable[A, ParSeq[A]] {
+ with Parallelizable[A, ParSeq[A]]
+{
self =>
protected[this] override def parCombiner = ParSeq.newCombiner[A]
diff --git a/src/library/scala/collection/mutable/Set.scala b/src/library/scala/collection/mutable/Set.scala
index dba629ae67..124af46f76 100644
--- a/src/library/scala/collection/mutable/Set.scala
+++ b/src/library/scala/collection/mutable/Set.scala
@@ -23,6 +23,7 @@ import generic._
* @define coll mutable set
*/
trait Set[A] extends Iterable[A]
+ with GenSet[A]
with scala.collection.Set[A]
with GenericSetTemplate[A, Set]
with SetLike[A, Set[A]] {
diff --git a/src/library/scala/collection/mutable/SetLike.scala b/src/library/scala/collection/mutable/SetLike.scala
index 985e7a3b47..855ba74f8c 100644
--- a/src/library/scala/collection/mutable/SetLike.scala
+++ b/src/library/scala/collection/mutable/SetLike.scala
@@ -129,7 +129,7 @@ trait SetLike[A, +This <: SetLike[A, This] with Set[A]]
*/
def clear() { foreach(-=) }
- override def clone(): This = empty ++= repr
+ override def clone(): This = empty ++= repr.seq
/** The result when this set is used as a builder
* @return the set representation itself.
@@ -179,7 +179,7 @@ trait SetLike[A, +This <: SetLike[A, This] with Set[A]]
"As of 2.8, this operation creates a new set. To add the elements as a\n"+
"side effect to an existing set and return that set itself, use ++=."
)
- override def ++(xs: TraversableOnce[A]): This = clone() ++= xs
+ override def ++(xs: GenTraversableOnce[A]): This = clone() ++= xs.seq
/** Creates a new set consisting of all the elements of this set except `elem`.
*
@@ -219,7 +219,7 @@ trait SetLike[A, +This <: SetLike[A, This] with Set[A]]
"As of 2.8, this operation creates a new set. To remove the elements as a\n"+
"side effect to an existing set and return that set itself, use --=."
)
- override def --(xs: TraversableOnce[A]): This = clone() --= xs
+ override def --(xs: GenTraversableOnce[A]): This = clone() --= xs.seq
/** Send a message to this scriptable object.
*
diff --git a/src/library/scala/collection/mutable/SynchronizedBuffer.scala b/src/library/scala/collection/mutable/SynchronizedBuffer.scala
index f336cc6b75..5b76d94517 100644
--- a/src/library/scala/collection/mutable/SynchronizedBuffer.scala
+++ b/src/library/scala/collection/mutable/SynchronizedBuffer.scala
@@ -63,7 +63,7 @@ trait SynchronizedBuffer[A] extends Buffer[A] {
*
* @param xs the traversable object.
*/
- override def ++(xs: TraversableOnce[A]): Self = synchronized {
+ override def ++(xs: GenTraversableOnce[A]): Self = synchronized {
super.++(xs)
}
diff --git a/src/library/scala/collection/mutable/SynchronizedSet.scala b/src/library/scala/collection/mutable/SynchronizedSet.scala
index e1ce39a272..c945a859f3 100644
--- a/src/library/scala/collection/mutable/SynchronizedSet.scala
+++ b/src/library/scala/collection/mutable/SynchronizedSet.scala
@@ -66,7 +66,7 @@ trait SynchronizedSet[A] extends Set[A] {
super.remove(elem)
}
- override def intersect(that: scala.collection.Set[A]) = synchronized {
+ override def intersect(that: scala.collection.GenSet[A]) = synchronized {
super.intersect(that)
}
@@ -74,7 +74,7 @@ trait SynchronizedSet[A] extends Set[A] {
super.clear
}
- override def subsetOf(that: scala.collection.Set[A]) = synchronized {
+ override def subsetOf(that: scala.collection.GenSet[A]) = synchronized {
super.subsetOf(that)
}
diff --git a/src/library/scala/collection/mutable/Traversable.scala b/src/library/scala/collection/mutable/Traversable.scala
index d41db458d9..64c6fb42d9 100644
--- a/src/library/scala/collection/mutable/Traversable.scala
+++ b/src/library/scala/collection/mutable/Traversable.scala
@@ -18,10 +18,12 @@ import generic._
* @define mutability mutable
*/
trait Traversable[A] extends scala.collection.Traversable[A]
+ with GenTraversable[A]
with GenericTraversableTemplate[A, Traversable]
with TraversableLike[A, Traversable[A]]
with Mutable {
override def companion: GenericCompanion[Traversable] = Traversable
+ override def seq: Traversable[A] = this
}
/** $factoryInfo
diff --git a/src/library/scala/collection/parallel/Combiner.scala b/src/library/scala/collection/parallel/Combiner.scala
index ec45a5eb90..d1453c9ce9 100644
--- a/src/library/scala/collection/parallel/Combiner.scala
+++ b/src/library/scala/collection/parallel/Combiner.scala
@@ -33,7 +33,7 @@ import scala.collection.generic.Sizing
* @since 2.9
*/
trait Combiner[-Elem, +To] extends Builder[Elem, To] with Sizing with Parallel {
-self: EnvironmentPassingCombiner[Elem, To] =>
+//self: EnvironmentPassingCombiner[Elem, To] =>
private[collection] final val tasksupport = getTaskSupport
/** Combines the contents of the receiver builder and the `other` builder,
@@ -66,13 +66,14 @@ self: EnvironmentPassingCombiner[Elem, To] =>
}
+/*
private[collection] trait EnvironmentPassingCombiner[-Elem, +To] extends Combiner[Elem, To] {
abstract override def result = {
val res = super.result
res
}
}
-
+*/
diff --git a/src/library/scala/collection/parallel/ParIterable.scala b/src/library/scala/collection/parallel/ParIterable.scala
index aeed6949c7..0b5faf15ee 100644
--- a/src/library/scala/collection/parallel/ParIterable.scala
+++ b/src/library/scala/collection/parallel/ParIterable.scala
@@ -8,6 +8,7 @@
package scala.collection.parallel
+import scala.collection.GenIterable
import scala.collection.generic._
import scala.collection.parallel.mutable.ParArrayCombiner
import scala.collection.parallel.mutable.ParArray
@@ -26,10 +27,14 @@ import scala.collection.parallel.mutable.ParArray
* @define Coll ParIterable
* @define coll parallel iterable
*/
-trait ParIterable[+T] extends Iterable[T]
- with GenericParTemplate[T, ParIterable]
- with ParIterableLike[T, ParIterable[T], Iterable[T]] {
+trait ParIterable[+T]
+extends GenIterable[T]
+ with GenericParTemplate[T, ParIterable]
+ with ParIterableLike[T, ParIterable[T], Iterable[T]] {
override def companion: GenericCompanion[ParIterable] with GenericParCompanion[ParIterable] = ParIterable
+ //protected[this] override def newBuilder = ParIterable.newBuilder[T]
+
+ def stringPrefix = "ParIterable"
}
/** $factoryInfo
@@ -41,3 +46,4 @@ object ParIterable extends ParFactory[ParIterable] {
def newCombiner[T]: Combiner[T, ParIterable[T]] = ParArrayCombiner[T]
}
+
diff --git a/src/library/scala/collection/parallel/ParIterableLike.scala b/src/library/scala/collection/parallel/ParIterableLike.scala
index 50a36fa8ed..35020dd7e6 100644
--- a/src/library/scala/collection/parallel/ParIterableLike.scala
+++ b/src/library/scala/collection/parallel/ParIterableLike.scala
@@ -16,8 +16,13 @@ import scala.collection.mutable.Builder
import scala.collection.mutable.ArrayBuffer
import scala.collection.IterableLike
import scala.collection.Parallel
+import scala.collection.Parallelizable
import scala.collection.CustomParallelizable
import scala.collection.generic._
+import scala.collection.GenIterableLike
+import scala.collection.GenIterable
+import scala.collection.GenTraversableOnce
+import scala.collection.GenTraversable
import immutable.HashMapCombiner
import java.util.concurrent.atomic.AtomicBoolean
@@ -49,10 +54,10 @@ import annotation.unchecked.uncheckedVariance
* on the concept of splitters, which extend iterators. Every parallel collection defines:
*
* {{{
- * def parallelIterator: ParIterableIterator[T]
+ * def splitter: IterableSplitter[T]
* }}}
*
- * which returns an instance of `ParIterableIterator[T]`, which is a subtype of `Splitter[T]`.
+ * which returns an instance of `IterableSplitter[T]`, which is a subtype of `Splitter[T]`.
* Parallel iterators have a method `remaining` to check the remaining number of elements,
* and method `split` which is defined by splitters. Method `split` divides the splitters
* iterate over into disjunct subsets:
@@ -103,7 +108,7 @@ import annotation.unchecked.uncheckedVariance
* be overridden in concrete implementations if necessary.
*
* Since this trait extends the `Iterable` trait, methods like `size` must also
- * be implemented in concrete collections, while `iterator` forwards to `parallelIterator` by
+ * be implemented in concrete collections, while `iterator` forwards to `splitter` by
* default.
*
* Each parallel collection is bound to a specific fork/join pool, on which dormant worker
@@ -157,7 +162,7 @@ import annotation.unchecked.uncheckedVariance
*
*/
trait ParIterableLike[+T, +Repr <: ParIterable[T], +Sequential <: Iterable[T] with IterableLike[T, Sequential]]
-extends IterableLike[T, Repr]
+extends GenIterableLike[T, Repr]
with CustomParallelizable[T, Repr]
with Parallel
with HasNewCombiner[T, Repr]
@@ -166,7 +171,9 @@ self: ParIterableLike[T, Repr, Sequential] =>
import tasksupport._
- override def seq: Sequential = throw new UnsupportedOperationException("not implemented.")
+ def seq: Sequential
+
+ def repr: Repr = this.asInstanceOf[Repr]
/** Parallel iterators are split iterators that have additional accessor and
* transformer methods defined in terms of methods `next` and `hasNext`.
@@ -179,11 +186,11 @@ self: ParIterableLike[T, Repr, Sequential] =>
* The self-type ensures that signal context passing behaviour gets mixed in
* a concrete object instance.
*/
- trait ParIterator extends ParIterableIterator[T] {
+ trait ParIterator extends IterableSplitter[T] {
me: SignalContextPassingIterator[ParIterator] =>
var signalDelegate: Signalling = IdleSignalling
def repr = self.repr
- def split: Seq[ParIterableIterator[T]]
+ def split: Seq[IterableSplitter[T]]
}
/** A stackable modification that ensures signal contexts get passed along the iterators.
@@ -204,23 +211,25 @@ self: ParIterableLike[T, Repr, Sequential] =>
}
}
+ def hasDefiniteSize = true
+
/** Creates a new parallel iterator used to traverse the elements of this parallel collection.
* This iterator is more specific than the iterator of the returned by `iterator`, and augmented
* with additional accessor and transformer methods.
*
* @return a parallel iterator
*/
- def parallelIterator: ParIterableIterator[T]
+ protected[parallel] def splitter: IterableSplitter[T]
/** Creates a new split iterator used to traverse the elements of this collection.
*
- * By default, this method is implemented in terms of the protected `parallelIterator` method.
+ * By default, this method is implemented in terms of the protected `splitter` method.
*
* @return a split iterator
*/
- def iterator: Splitter[T] = parallelIterator
+ def iterator: Splitter[T] = splitter
- override def par = repr
+ override def par: Repr = repr
/** Denotes whether this parallel collection has strict splitters.
*
@@ -249,7 +258,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
/** The `newBuilder` operation returns a parallel builder assigned to this collection's fork/join pool.
* This method forwards the call to `newCombiner`.
*/
- protected[this] override def newBuilder: collection.mutable.Builder[T, Repr] = newCombiner
+ //protected[this] def newBuilder: collection.mutable.Builder[T, Repr] = newCombiner
/** Optionally reuses an existing combiner for better performance. By default it doesn't - subclasses may override this behaviour.
* The provided combiner `oldc` that can potentially be reused will be either some combiner from the previous computational task, or `None` if there
@@ -319,14 +328,23 @@ self: ParIterableLike[T, Repr, Sequential] =>
}
}
- override def mkString(start: String, sep: String, end: String): String = seq.mkString(start, sep, end)
+ protected[this] def bf2seq[S, That](bf: CanBuildFrom[Repr, S, That]) = new CanBuildFrom[Sequential, S, That] {
+ def apply(from: Sequential) = bf.apply(from.par.asInstanceOf[Repr]) // !!! we only use this on `this.seq`, and know that `this.seq.par.getClass == this.getClass`
+ def apply() = bf.apply()
+ }
+
+ protected[this] def sequentially[S, That <: Parallel](b: Sequential => Parallelizable[S, That]) = b(seq).par.asInstanceOf[Repr]
- override def mkString(sep: String): String = seq.mkString("", sep, "")
+ def mkString(start: String, sep: String, end: String): String = seq.mkString(start, sep, end)
- override def mkString: String = seq.mkString("")
+ def mkString(sep: String): String = seq.mkString("", sep, "")
+
+ def mkString: String = seq.mkString("")
override def toString = seq.mkString(stringPrefix + "(", ", ", ")")
+ def canEqual(other: Any) = true
+
/** Reduces the elements of this sequence using the specified associative binary operator.
*
* $undefinedorder
@@ -343,7 +361,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
* if this $coll is empty.
*/
def reduce[U >: T](op: (U, U) => U): U = {
- executeAndWaitResult(new Reduce(op, parallelIterator) mapResult { _.get })
+ executeAndWaitResult(new Reduce(op, splitter) mapResult { _.get })
}
/** Optionally reduces the elements of this sequence using the specified associative binary operator.
@@ -362,10 +380,6 @@ self: ParIterableLike[T, Repr, Sequential] =>
*/
def reduceOption[U >: T](op: (U, U) => U): Option[U] = if (isEmpty) None else Some(reduce(op))
- override def reduceLeft[U >: T](op: (U, T) => U): U = iterator.reduceLeft(op)
-
- override def reduceRight[U >: T](op: (T, U) => U): U = iterator.reduceRight(op)
-
/** Folds the elements of this sequence using the specified associative binary operator.
* The order in which the elements are reduced is unspecified and may be nondeterministic.
*
@@ -382,13 +396,9 @@ self: ParIterableLike[T, Repr, Sequential] =>
* @return the result of applying fold operator `op` between all the elements and `z`
*/
def fold[U >: T](z: U)(op: (U, U) => U): U = {
- executeAndWaitResult(new Fold(z, op, parallelIterator))
+ executeAndWaitResult(new Fold(z, op, splitter))
}
- override def foldLeft[S](z: S)(op: (S, T) => S): S = iterator.foldLeft(z)(op)
-
- override def foldRight[S](z: S)(op: (T, S) => S): S = iterator.foldRight(z)(op)
-
/** Aggregates the results of applying an operator to subsequent elements.
*
* This is a more general form of `fold` and `reduce`. It has similar semantics, but does
@@ -418,7 +428,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
* @param combop an associative operator used to combine results from different partitions
*/
def aggregate[S](z: S)(seqop: (S, T) => S, combop: (S, S) => S): S = {
- executeAndWaitResult(new Aggregate(z, seqop, combop, parallelIterator))
+ executeAndWaitResult(new Aggregate(z, seqop, combop, splitter))
}
/*
@@ -430,8 +440,8 @@ self: ParIterableLike[T, Repr, Sequential] =>
* @tparam U the result type of the function applied to each element, which is always discarded
* @param f function applied to each element
*/
- def pforeach[U](f: T => U): Unit = {
- executeAndWaitResult(new Foreach(f, parallelIterator))
+ def pareach[U](f: T => U): Unit = {
+ executeAndWaitResult(new Foreach(f, splitter))
}
*/
@@ -440,53 +450,53 @@ self: ParIterableLike[T, Repr, Sequential] =>
* @tparam U the result type of the function applied to each element, which is always discarded
* @param f function applied to each element
*/
- override def foreach[U](f: T => U) = {
- executeAndWaitResult(new Foreach(f, parallelIterator))
+ def foreach[U](f: T => U) = {
+ executeAndWaitResult(new Foreach(f, splitter))
}
- override def count(p: T => Boolean): Int = {
- executeAndWaitResult(new Count(p, parallelIterator))
+ def count(p: T => Boolean): Int = {
+ executeAndWaitResult(new Count(p, splitter))
}
- override def sum[U >: T](implicit num: Numeric[U]): U = {
- executeAndWaitResult(new Sum[U](num, parallelIterator))
+ def sum[U >: T](implicit num: Numeric[U]): U = {
+ executeAndWaitResult(new Sum[U](num, splitter))
}
- override def product[U >: T](implicit num: Numeric[U]): U = {
- executeAndWaitResult(new Product[U](num, parallelIterator))
+ def product[U >: T](implicit num: Numeric[U]): U = {
+ executeAndWaitResult(new Product[U](num, splitter))
}
- override def min[U >: T](implicit ord: Ordering[U]): T = {
- executeAndWaitResult(new Min(ord, parallelIterator) mapResult { _.get }).asInstanceOf[T]
+ def min[U >: T](implicit ord: Ordering[U]): T = {
+ executeAndWaitResult(new Min(ord, splitter) mapResult { _.get }).asInstanceOf[T]
}
- override def max[U >: T](implicit ord: Ordering[U]): T = {
- executeAndWaitResult(new Max(ord, parallelIterator) mapResult { _.get }).asInstanceOf[T]
+ def max[U >: T](implicit ord: Ordering[U]): T = {
+ executeAndWaitResult(new Max(ord, splitter) mapResult { _.get }).asInstanceOf[T]
}
- override def maxBy[S](f: T => S)(implicit cmp: Ordering[S]): T = {
+ def maxBy[S](f: T => S)(implicit cmp: Ordering[S]): T = {
if (isEmpty) throw new UnsupportedOperationException("empty.maxBy")
reduce((x, y) => if (cmp.gteq(f(x), f(y))) x else y)
}
- override def minBy[S](f: T => S)(implicit cmp: Ordering[S]): T = {
+ def minBy[S](f: T => S)(implicit cmp: Ordering[S]): T = {
if (isEmpty) throw new UnsupportedOperationException("empty.minBy")
reduce((x, y) => if (cmp.lteq(f(x), f(y))) x else y)
}
- override def map[S, That](f: T => S)(implicit bf: CanBuildFrom[Repr, S, That]): That = bf ifParallel { pbf =>
- executeAndWaitResult(new Map[S, That](f, pbf, parallelIterator) mapResult { _.result })
- } otherwise super.map(f)(bf)
+ def map[S, That](f: T => S)(implicit bf: CanBuildFrom[Repr, S, That]): That = bf ifParallel { pbf =>
+ executeAndWaitResult(new Map[S, That](f, pbf, splitter) mapResult { _.result })
+ } otherwise seq.map(f)(bf2seq(bf))
- override def collect[S, That](pf: PartialFunction[T, S])(implicit bf: CanBuildFrom[Repr, S, That]): That = bf ifParallel { pbf =>
- executeAndWaitResult(new Collect[S, That](pf, pbf, parallelIterator) mapResult { _.result })
- } otherwise super.collect(pf)(bf)
+ def collect[S, That](pf: PartialFunction[T, S])(implicit bf: CanBuildFrom[Repr, S, That]): That = bf ifParallel { pbf =>
+ executeAndWaitResult(new Collect[S, That](pf, pbf, splitter) mapResult { _.result })
+ } otherwise seq.collect(pf)(bf2seq(bf))
- override def flatMap[S, That](f: T => TraversableOnce[S])(implicit bf: CanBuildFrom[Repr, S, That]): That = bf ifParallel { pbf =>
- executeAndWaitResult(new FlatMap[S, That](f, pbf, parallelIterator) mapResult { _.result })
- } otherwise super.flatMap(f)(bf)
+ def flatMap[S, That](f: T => GenTraversableOnce[S])(implicit bf: CanBuildFrom[Repr, S, That]): That = bf ifParallel { pbf =>
+ executeAndWaitResult(new FlatMap[S, That](f, pbf, splitter) mapResult { _.result })
+ } otherwise seq.flatMap(f)(bf2seq(bf))
/** Tests whether a predicate holds for all elements of this $coll.
*
@@ -495,8 +505,8 @@ self: ParIterableLike[T, Repr, Sequential] =>
* @param p a predicate used to test elements
* @return true if `p` holds for all elements, false otherwise
*/
- override def forall(pred: T => Boolean): Boolean = {
- executeAndWaitResult(new Forall(pred, parallelIterator assign new DefaultSignalling with VolatileAbort))
+ def forall(pred: T => Boolean): Boolean = {
+ executeAndWaitResult(new Forall(pred, splitter assign new DefaultSignalling with VolatileAbort))
}
/** Tests whether a predicate holds for some element of this $coll.
@@ -506,8 +516,8 @@ self: ParIterableLike[T, Repr, Sequential] =>
* @param p a predicate used to test elements
* @return true if `p` holds for some element, false otherwise
*/
- override def exists(pred: T => Boolean): Boolean = {
- executeAndWaitResult(new Exists(pred, parallelIterator assign new DefaultSignalling with VolatileAbort))
+ def exists(pred: T => Boolean): Boolean = {
+ executeAndWaitResult(new Exists(pred, splitter assign new DefaultSignalling with VolatileAbort))
}
/** Finds some element in the collection for which the predicate holds, if such
@@ -521,30 +531,30 @@ self: ParIterableLike[T, Repr, Sequential] =>
* @param p predicate used to test the elements
* @return an option value with the element if such an element exists, or `None` otherwise
*/
- override def find(pred: T => Boolean): Option[T] = {
- executeAndWaitResult(new Find(pred, parallelIterator assign new DefaultSignalling with VolatileAbort))
+ def find(pred: T => Boolean): Option[T] = {
+ executeAndWaitResult(new Find(pred, splitter assign new DefaultSignalling with VolatileAbort))
}
protected[this] def cbfactory ={
() => newCombiner
}
- override def filter(pred: T => Boolean): Repr = {
- executeAndWaitResult(new Filter(pred, cbfactory, parallelIterator) mapResult { _.result })
+ def filter(pred: T => Boolean): Repr = {
+ executeAndWaitResult(new Filter(pred, cbfactory, splitter) mapResult { _.result })
}
- override def filterNot(pred: T => Boolean): Repr = {
- executeAndWaitResult(new FilterNot(pred, cbfactory, parallelIterator) mapResult { _.result })
+ def filterNot(pred: T => Boolean): Repr = {
+ executeAndWaitResult(new FilterNot(pred, cbfactory, splitter) mapResult { _.result })
}
- override def ++[U >: T, That](that: TraversableOnce[U])(implicit bf: CanBuildFrom[Repr, U, That]): That = {
+ def ++[U >: T, That](that: GenTraversableOnce[U])(implicit bf: CanBuildFrom[Repr, U, That]): That = {
if (that.isParallel && bf.isParallel) {
// println("case both are parallel")
val other = that.asParIterable
val pbf = bf.asParallel
- val copythis = new Copy(() => pbf(repr), parallelIterator)
+ val copythis = new Copy(() => pbf(repr), splitter)
val copythat = wrap {
- val othtask = new other.Copy(() => pbf(self.repr), other.parallelIterator)
+ val othtask = new other.Copy(() => pbf(self.repr), other.splitter)
tasksupport.executeAndWaitResult(othtask)
}
val task = (copythis parallel copythat) { _ combine _ } mapResult {
@@ -554,37 +564,36 @@ self: ParIterableLike[T, Repr, Sequential] =>
} else if (bf.isParallel) {
// println("case parallel builder, `that` not parallel")
val pbf = bf.asParallel
- val copythis = new Copy(() => pbf(repr), parallelIterator)
+ val copythis = new Copy(() => pbf(repr), splitter)
val copythat = wrap {
val cb = pbf(repr)
- for (elem <- that) cb += elem
+ for (elem <- that.seq) cb += elem
cb
}
executeAndWaitResult((copythis parallel copythat) { _ combine _ } mapResult { _.result })
} else {
// println("case not a parallel builder")
val b = bf(repr)
- this.parallelIterator.copy2builder[U, That, Builder[U, That]](b)
- if (that.isInstanceOf[Parallel]) for (elem <- that.asInstanceOf[Iterable[U]].iterator) b += elem
- else for (elem <- that) b += elem
+ this.splitter.copy2builder[U, That, Builder[U, That]](b)
+ for (elem <- that.seq) b += elem
b.result
}
}
- override def partition(pred: T => Boolean): (Repr, Repr) = {
- executeAndWaitResult(new Partition(pred, cbfactory, parallelIterator) mapResult { p => (p._1.result, p._2.result) })
+ def partition(pred: T => Boolean): (Repr, Repr) = {
+ executeAndWaitResult(new Partition(pred, cbfactory, splitter) mapResult { p => (p._1.result, p._2.result) })
}
- override def groupBy[K](f: T => K): immutable.ParMap[K, Repr] = {
- executeAndWaitResult(new GroupBy(f, () => HashMapCombiner[K, T], parallelIterator) mapResult {
+ def groupBy[K](f: T => K): immutable.ParMap[K, Repr] = {
+ executeAndWaitResult(new GroupBy(f, () => HashMapCombiner[K, T], splitter) mapResult {
rcb => rcb.groupByKey(cbfactory)
})
}
- override def take(n: Int): Repr = {
+ def take(n: Int): Repr = {
val actualn = if (size > n) n else size
if (actualn < MIN_FOR_COPY) take_sequential(actualn)
- else executeAndWaitResult(new Take(actualn, cbfactory, parallelIterator) mapResult {
+ else executeAndWaitResult(new Take(actualn, cbfactory, splitter) mapResult {
_.result
})
}
@@ -592,7 +601,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
private def take_sequential(n: Int) = {
val cb = newCombiner
cb.sizeHint(n)
- val it = parallelIterator
+ val it = splitter
var left = n
while (left > 0) {
cb += it.next
@@ -601,14 +610,14 @@ self: ParIterableLike[T, Repr, Sequential] =>
cb.result
}
- override def drop(n: Int): Repr = {
+ def drop(n: Int): Repr = {
val actualn = if (size > n) n else size
if ((size - actualn) < MIN_FOR_COPY) drop_sequential(actualn)
- else executeAndWaitResult(new Drop(actualn, cbfactory, parallelIterator) mapResult { _.result })
+ else executeAndWaitResult(new Drop(actualn, cbfactory, splitter) mapResult { _.result })
}
private def drop_sequential(n: Int) = {
- val it = parallelIterator drop n
+ val it = splitter drop n
val cb = newCombiner
cb.sizeHint(size - n)
while (it.hasNext) cb += it.next
@@ -619,13 +628,13 @@ self: ParIterableLike[T, Repr, Sequential] =>
val from = unc_from min size max 0
val until = unc_until min size max from
if ((until - from) <= MIN_FOR_COPY) slice_sequential(from, until)
- else executeAndWaitResult(new Slice(from, until, cbfactory, parallelIterator) mapResult { _.result })
+ else executeAndWaitResult(new Slice(from, until, cbfactory, splitter) mapResult { _.result })
}
private def slice_sequential(from: Int, until: Int): Repr = {
val cb = newCombiner
var left = until - from
- val it = parallelIterator drop from
+ val it = splitter drop from
while (left > 0) {
cb += it.next
left -= 1
@@ -633,8 +642,8 @@ self: ParIterableLike[T, Repr, Sequential] =>
cb.result
}
- override def splitAt(n: Int): (Repr, Repr) = {
- executeAndWaitResult(new SplitAt(n, cbfactory, parallelIterator) mapResult { p => (p._1.result, p._2.result) })
+ def splitAt(n: Int): (Repr, Repr) = {
+ executeAndWaitResult(new SplitAt(n, cbfactory, splitter) mapResult { p => (p._1.result, p._2.result) })
}
/** Computes a prefix scan of the elements of the collection.
@@ -652,13 +661,20 @@ self: ParIterableLike[T, Repr, Sequential] =>
*
* @return a new $coll containing the prefix scan of the elements in this $coll
*/
- def scan[U >: T, That](z: U)(op: (U, U) => U)(implicit cbf: CanCombineFrom[Repr, U, That]): That = if (parallelismLevel > 1) {
- if (size > 0) executeAndWaitResult(new CreateScanTree(0, size, z, op, parallelIterator) mapResult {
- tree => executeAndWaitResult(new FromScanTree(tree, z, op, cbf) mapResult {
- cb => cb.result
- })
- }) else (cbf(self.repr) += z).result
- } else super.scanLeft(z)(op)(cbf)
+ def scan[U >: T, That](z: U)(op: (U, U) => U)(implicit bf: CanBuildFrom[Repr, U, That]): That = if (bf.isParallel) {
+ val cbf = bf.asParallel
+ if (parallelismLevel > 1) {
+ if (size > 0) executeAndWaitResult(new CreateScanTree(0, size, z, op, splitter) mapResult {
+ tree => executeAndWaitResult(new FromScanTree(tree, z, op, cbf) mapResult {
+ cb => cb.result
+ })
+ }) else (cbf(self.repr) += z).result
+ } else seq.scan(z)(op)(bf2seq(bf))
+ } else seq.scan(z)(op)(bf2seq(bf))
+
+ def scanLeft[S, That](z: S)(op: (S, T) => S)(implicit bf: CanBuildFrom[Repr, S, That]) = seq.scanLeft(z)(op)(bf2seq(bf))
+
+ def scanRight[S, That](z: S)(op: (T, S) => S)(implicit bf: CanBuildFrom[Repr, S, That]) = seq.scanRight(z)(op)(bf2seq(bf))
/** Takes the longest prefix of elements that satisfy the predicate.
*
@@ -668,10 +684,10 @@ self: ParIterableLike[T, Repr, Sequential] =>
* @param pred the predicate used to test the elements
* @return the longest prefix of this $coll of elements that satisy the predicate `pred`
*/
- override def takeWhile(pred: T => Boolean): Repr = {
+ def takeWhile(pred: T => Boolean): Repr = {
val cntx = new DefaultSignalling with AtomicIndexFlag
cntx.setIndexFlag(Int.MaxValue)
- executeAndWaitResult(new TakeWhile(0, pred, cbfactory, parallelIterator assign cntx) mapResult { _._1.result })
+ executeAndWaitResult(new TakeWhile(0, pred, cbfactory, splitter assign cntx) mapResult { _._1.result })
}
/** Splits this $coll into a prefix/suffix pair according to a predicate.
@@ -683,10 +699,10 @@ self: ParIterableLike[T, Repr, Sequential] =>
* @return a pair consisting of the longest prefix of the collection for which all
* the elements satisfy `pred`, and the rest of the collection
*/
- override def span(pred: T => Boolean): (Repr, Repr) = {
+ def span(pred: T => Boolean): (Repr, Repr) = {
val cntx = new DefaultSignalling with AtomicIndexFlag
cntx.setIndexFlag(Int.MaxValue)
- executeAndWaitResult(new Span(0, pred, cbfactory, parallelIterator assign cntx) mapResult {
+ executeAndWaitResult(new Span(0, pred, cbfactory, splitter assign cntx) mapResult {
p => (p._1.result, p._2.result)
})
}
@@ -701,42 +717,51 @@ self: ParIterableLike[T, Repr, Sequential] =>
* @return a collection composed of all the elements after the longest prefix of elements
* in this $coll that satisfy the predicate `pred`
*/
- override def dropWhile(pred: T => Boolean): Repr = {
+ def dropWhile(pred: T => Boolean): Repr = {
val cntx = new DefaultSignalling with AtomicIndexFlag
cntx.setIndexFlag(Int.MaxValue)
- executeAndWaitResult(new Span(0, pred, cbfactory, parallelIterator assign cntx) mapResult { _._2.result })
+ executeAndWaitResult(new Span(0, pred, cbfactory, splitter assign cntx) mapResult { _._2.result })
}
- override def copyToArray[U >: T](xs: Array[U], start: Int, len: Int) = if (len > 0) {
- executeAndWaitResult(new CopyToArray(start, len, xs, parallelIterator))
+ def copyToArray[U >: T](xs: Array[U]) = copyToArray(xs, 0)
+
+ def copyToArray[U >: T](xs: Array[U], start: Int) = copyToArray(xs, start, xs.length - start)
+
+ def copyToArray[U >: T](xs: Array[U], start: Int, len: Int) = if (len > 0) {
+ executeAndWaitResult(new CopyToArray(start, len, xs, splitter))
}
- override def zip[U >: T, S, That](that: Iterable[S])(implicit bf: CanBuildFrom[Repr, (U, S), That]): That = if (bf.isParallel && that.isParSeq) {
+ def sameElements[U >: T](that: GenIterable[U]) = seq.sameElements(that)
+
+ def zip[U >: T, S, That](that: GenIterable[S])(implicit bf: CanBuildFrom[Repr, (U, S), That]): That = if (bf.isParallel && that.isParSeq) {
val pbf = bf.asParallel
val thatseq = that.asParSeq
- executeAndWaitResult(new Zip(pbf, parallelIterator, thatseq.parallelIterator) mapResult { _.result });
- } else super.zip(that)(bf)
+ executeAndWaitResult(new Zip(pbf, splitter, thatseq.splitter) mapResult { _.result });
+ } else seq.zip(that)(bf2seq(bf))
- override def zipWithIndex[U >: T, That](implicit bf: CanBuildFrom[Repr, (U, Int), That]): That = this zip immutable.ParRange(0, size, 1, false)
+ def zipWithIndex[U >: T, That](implicit bf: CanBuildFrom[Repr, (U, Int), That]): That = this zip immutable.ParRange(0, size, 1, false)
- override def zipAll[S, U >: T, That](that: Iterable[S], thisElem: U, thatElem: S)(implicit bf: CanBuildFrom[Repr, (U, S), That]): That = if (bf.isParallel && that.isParSeq) {
+ def zipAll[S, U >: T, That](that: GenIterable[S], thisElem: U, thatElem: S)(implicit bf: CanBuildFrom[Repr, (U, S), That]): That = if (bf.isParallel && that.isParSeq) {
val pbf = bf.asParallel
val thatseq = that.asParSeq
- executeAndWaitResult(new ZipAll(size max thatseq.length, thisElem, thatElem, pbf, parallelIterator, thatseq.parallelIterator) mapResult { _.result });
- } else super.zipAll(that, thisElem, thatElem)(bf)
+ executeAndWaitResult(new ZipAll(size max thatseq.length, thisElem, thatElem, pbf, splitter, thatseq.splitter) mapResult { _.result });
+ } else seq.zipAll(that, thisElem, thatElem)(bf2seq(bf))
protected def toParCollection[U >: T, That](cbf: () => Combiner[U, That]): That = {
- executeAndWaitResult(new ToParCollection(cbf, parallelIterator) mapResult { _.result });
+ executeAndWaitResult(new ToParCollection(cbf, splitter) mapResult { _.result });
}
protected def toParMap[K, V, That](cbf: () => Combiner[(K, V), That])(implicit ev: T <:< (K, V)): That = {
- executeAndWaitResult(new ToParMap(cbf, parallelIterator)(ev) mapResult { _.result })
+ executeAndWaitResult(new ToParMap(cbf, splitter)(ev) mapResult { _.result })
}
- override def view = new ParIterableView[T, Repr, Sequential] {
+ def view = new ParIterableView[T, Repr, Sequential] {
protected lazy val underlying = self.repr
+ protected[this] def viewIdentifier = ""
+ protected[this] def viewIdString = ""
override def seq = self.seq.view
- def parallelIterator = self.parallelIterator
+ def splitter = self.splitter
+ def size = splitter.remaining
}
override def toArray[U >: T: ClassManifest]: Array[U] = {
@@ -751,13 +776,13 @@ self: ParIterableLike[T, Repr, Sequential] =>
override def toStream: Stream[T] = seq.toStream
- override def toIterator: Iterator[T] = parallelIterator
+ override def toIterator: Iterator[T] = splitter
// the methods below are overridden
override def toBuffer[U >: T]: collection.mutable.Buffer[U] = seq.toBuffer // have additional, parallel buffers?
- override def toTraversable: Traversable[T] = this.asInstanceOf[Traversable[T]] // TODO add ParTraversable[T]
+ override def toTraversable: GenTraversable[T] = this.asInstanceOf[GenTraversable[T]] // TODO add ParTraversable[T]
override def toIterable: ParIterable[T] = this.asInstanceOf[ParIterable[T]]
@@ -782,8 +807,8 @@ self: ParIterableLike[T, Repr, Sequential] =>
*/
protected trait Accessor[R, Tp]
extends StrictSplitterCheckTask[R, Tp] {
- protected[this] val pit: ParIterableIterator[T]
- protected[this] def newSubtask(p: ParIterableIterator[T]): Accessor[R, Tp]
+ protected[this] val pit: IterableSplitter[T]
+ protected[this] def newSubtask(p: IterableSplitter[T]): Accessor[R, Tp]
def shouldSplitFurther = pit.remaining > threshold(size, parallelismLevel)
def split = pit.split.map(newSubtask(_)) // default split procedure
private[parallel] override def signalAbort = pit.abort
@@ -851,102 +876,102 @@ self: ParIterableLike[T, Repr, Sequential] =>
protected trait Transformer[R, Tp] extends Accessor[R, Tp]
- protected[this] class Foreach[S](op: T => S, protected[this] val pit: ParIterableIterator[T]) extends Accessor[Unit, Foreach[S]] {
+ protected[this] class Foreach[S](op: T => S, protected[this] val pit: IterableSplitter[T]) extends Accessor[Unit, Foreach[S]] {
@volatile var result: Unit = ()
def leaf(prevr: Option[Unit]) = pit.foreach(op)
- protected[this] def newSubtask(p: ParIterableIterator[T]) = new Foreach[S](op, p)
+ protected[this] def newSubtask(p: IterableSplitter[T]) = new Foreach[S](op, p)
}
- protected[this] class Count(pred: T => Boolean, protected[this] val pit: ParIterableIterator[T]) extends Accessor[Int, Count] {
+ protected[this] class Count(pred: T => Boolean, protected[this] val pit: IterableSplitter[T]) extends Accessor[Int, Count] {
// val pittxt = pit.toString
@volatile var result: Int = 0
def leaf(prevr: Option[Int]) = result = pit.count(pred)
- protected[this] def newSubtask(p: ParIterableIterator[T]) = new Count(pred, p)
+ protected[this] def newSubtask(p: IterableSplitter[T]) = new Count(pred, p)
override def merge(that: Count) = result = result + that.result
// override def toString = "CountTask(" + pittxt + ")"
}
- protected[this] class Reduce[U >: T](op: (U, U) => U, protected[this] val pit: ParIterableIterator[T]) extends Accessor[Option[U], Reduce[U]] {
+ protected[this] class Reduce[U >: T](op: (U, U) => U, protected[this] val pit: IterableSplitter[T]) extends Accessor[Option[U], Reduce[U]] {
@volatile var result: Option[U] = None
def leaf(prevr: Option[Option[U]]) = if (pit.remaining > 0) result = Some(pit.reduce(op))
- protected[this] def newSubtask(p: ParIterableIterator[T]) = new Reduce(op, p)
+ protected[this] def newSubtask(p: IterableSplitter[T]) = new Reduce(op, p)
override def merge(that: Reduce[U]) =
if (this.result == None) result = that.result
else if (that.result != None) result = Some(op(result.get, that.result.get))
override def requiresStrictSplitters = true
}
- protected[this] class Fold[U >: T](z: U, op: (U, U) => U, protected[this] val pit: ParIterableIterator[T]) extends Accessor[U, Fold[U]] {
+ protected[this] class Fold[U >: T](z: U, op: (U, U) => U, protected[this] val pit: IterableSplitter[T]) extends Accessor[U, Fold[U]] {
@volatile var result: U = null.asInstanceOf[U]
def leaf(prevr: Option[U]) = result = pit.fold(z)(op)
- protected[this] def newSubtask(p: ParIterableIterator[T]) = new Fold(z, op, p)
+ protected[this] def newSubtask(p: IterableSplitter[T]) = new Fold(z, op, p)
override def merge(that: Fold[U]) = result = op(result, that.result)
}
- protected[this] class Aggregate[S](z: S, seqop: (S, T) => S, combop: (S, S) => S, protected[this] val pit: ParIterableIterator[T])
+ protected[this] class Aggregate[S](z: S, seqop: (S, T) => S, combop: (S, S) => S, protected[this] val pit: IterableSplitter[T])
extends Accessor[S, Aggregate[S]] {
@volatile var result: S = null.asInstanceOf[S]
def leaf(prevr: Option[S]) = result = pit.foldLeft(z)(seqop)
- protected[this] def newSubtask(p: ParIterableIterator[T]) = new Aggregate(z, seqop, combop, p)
+ protected[this] def newSubtask(p: IterableSplitter[T]) = new Aggregate(z, seqop, combop, p)
override def merge(that: Aggregate[S]) = result = combop(result, that.result)
}
- protected[this] class Sum[U >: T](num: Numeric[U], protected[this] val pit: ParIterableIterator[T]) extends Accessor[U, Sum[U]] {
+ protected[this] class Sum[U >: T](num: Numeric[U], protected[this] val pit: IterableSplitter[T]) extends Accessor[U, Sum[U]] {
@volatile var result: U = null.asInstanceOf[U]
def leaf(prevr: Option[U]) = result = pit.sum(num)
- protected[this] def newSubtask(p: ParIterableIterator[T]) = new Sum(num, p)
+ protected[this] def newSubtask(p: IterableSplitter[T]) = new Sum(num, p)
override def merge(that: Sum[U]) = result = num.plus(result, that.result)
}
- protected[this] class Product[U >: T](num: Numeric[U], protected[this] val pit: ParIterableIterator[T]) extends Accessor[U, Product[U]] {
+ protected[this] class Product[U >: T](num: Numeric[U], protected[this] val pit: IterableSplitter[T]) extends Accessor[U, Product[U]] {
@volatile var result: U = null.asInstanceOf[U]
def leaf(prevr: Option[U]) = result = pit.product(num)
- protected[this] def newSubtask(p: ParIterableIterator[T]) = new Product(num, p)
+ protected[this] def newSubtask(p: IterableSplitter[T]) = new Product(num, p)
override def merge(that: Product[U]) = result = num.times(result, that.result)
}
- protected[this] class Min[U >: T](ord: Ordering[U], protected[this] val pit: ParIterableIterator[T]) extends Accessor[Option[U], Min[U]] {
+ protected[this] class Min[U >: T](ord: Ordering[U], protected[this] val pit: IterableSplitter[T]) extends Accessor[Option[U], Min[U]] {
@volatile var result: Option[U] = None
def leaf(prevr: Option[Option[U]]) = if (pit.remaining > 0) result = Some(pit.min(ord))
- protected[this] def newSubtask(p: ParIterableIterator[T]) = new Min(ord, p)
+ protected[this] def newSubtask(p: IterableSplitter[T]) = new Min(ord, p)
override def merge(that: Min[U]) =
if (this.result == None) result = that.result
else if (that.result != None) result = if (ord.lteq(result.get, that.result.get)) result else that.result
override def requiresStrictSplitters = true
}
- protected[this] class Max[U >: T](ord: Ordering[U], protected[this] val pit: ParIterableIterator[T]) extends Accessor[Option[U], Max[U]] {
+ protected[this] class Max[U >: T](ord: Ordering[U], protected[this] val pit: IterableSplitter[T]) extends Accessor[Option[U], Max[U]] {
@volatile var result: Option[U] = None
def leaf(prevr: Option[Option[U]]) = if (pit.remaining > 0) result = Some(pit.max(ord))
- protected[this] def newSubtask(p: ParIterableIterator[T]) = new Max(ord, p)
+ protected[this] def newSubtask(p: IterableSplitter[T]) = new Max(ord, p)
override def merge(that: Max[U]) =
if (this.result == None) result = that.result
else if (that.result != None) result = if (ord.gteq(result.get, that.result.get)) result else that.result
override def requiresStrictSplitters = true
}
- protected[this] class Map[S, That](f: T => S, pbf: CanCombineFrom[Repr, S, That], protected[this] val pit: ParIterableIterator[T])
+ protected[this] class Map[S, That](f: T => S, pbf: CanCombineFrom[Repr, S, That], protected[this] val pit: IterableSplitter[T])
extends Transformer[Combiner[S, That], Map[S, That]] {
@volatile var result: Combiner[S, That] = null
def leaf(prev: Option[Combiner[S, That]]) = result = pit.map2combiner(f, reuse(prev, pbf(self.repr)))
- protected[this] def newSubtask(p: ParIterableIterator[T]) = new Map(f, pbf, p)
+ protected[this] def newSubtask(p: IterableSplitter[T]) = new Map(f, pbf, p)
override def merge(that: Map[S, That]) = result = result combine that.result
}
protected[this] class Collect[S, That]
- (pf: PartialFunction[T, S], pbf: CanCombineFrom[Repr, S, That], protected[this] val pit: ParIterableIterator[T])
+ (pf: PartialFunction[T, S], pbf: CanCombineFrom[Repr, S, That], protected[this] val pit: IterableSplitter[T])
extends Transformer[Combiner[S, That], Collect[S, That]] {
@volatile var result: Combiner[S, That] = null
def leaf(prev: Option[Combiner[S, That]]) = result = pit.collect2combiner[S, That](pf, pbf(self.repr))
- protected[this] def newSubtask(p: ParIterableIterator[T]) = new Collect(pf, pbf, p)
+ protected[this] def newSubtask(p: IterableSplitter[T]) = new Collect(pf, pbf, p)
override def merge(that: Collect[S, That]) = result = result combine that.result
}
- protected[this] class FlatMap[S, That](f: T => TraversableOnce[S], pbf: CanCombineFrom[Repr, S, That], protected[this] val pit: ParIterableIterator[T])
+ protected[this] class FlatMap[S, That](f: T => GenTraversableOnce[S], pbf: CanCombineFrom[Repr, S, That], protected[this] val pit: IterableSplitter[T])
extends Transformer[Combiner[S, That], FlatMap[S, That]] {
@volatile var result: Combiner[S, That] = null
def leaf(prev: Option[Combiner[S, That]]) = result = pit.flatmap2combiner(f, pbf(self.repr))
- protected[this] def newSubtask(p: ParIterableIterator[T]) = new FlatMap(f, pbf, p)
+ protected[this] def newSubtask(p: IterableSplitter[T]) = new FlatMap(f, pbf, p)
override def merge(that: FlatMap[S, That]) = {
//debuglog("merging " + result + " and " + that.result)
result = result combine that.result
@@ -954,67 +979,67 @@ self: ParIterableLike[T, Repr, Sequential] =>
}
}
- protected[this] class Forall(pred: T => Boolean, protected[this] val pit: ParIterableIterator[T]) extends Accessor[Boolean, Forall] {
+ protected[this] class Forall(pred: T => Boolean, protected[this] val pit: IterableSplitter[T]) extends Accessor[Boolean, Forall] {
@volatile var result: Boolean = true
def leaf(prev: Option[Boolean]) = { if (!pit.isAborted) result = pit.forall(pred); if (result == false) pit.abort }
- protected[this] def newSubtask(p: ParIterableIterator[T]) = new Forall(pred, p)
+ protected[this] def newSubtask(p: IterableSplitter[T]) = new Forall(pred, p)
override def merge(that: Forall) = result = result && that.result
}
- protected[this] class Exists(pred: T => Boolean, protected[this] val pit: ParIterableIterator[T]) extends Accessor[Boolean, Exists] {
+ protected[this] class Exists(pred: T => Boolean, protected[this] val pit: IterableSplitter[T]) extends Accessor[Boolean, Exists] {
@volatile var result: Boolean = false
def leaf(prev: Option[Boolean]) = { if (!pit.isAborted) result = pit.exists(pred); if (result == true) pit.abort }
- protected[this] def newSubtask(p: ParIterableIterator[T]) = new Exists(pred, p)
+ protected[this] def newSubtask(p: IterableSplitter[T]) = new Exists(pred, p)
override def merge(that: Exists) = result = result || that.result
}
- protected[this] class Find[U >: T](pred: T => Boolean, protected[this] val pit: ParIterableIterator[T]) extends Accessor[Option[U], Find[U]] {
+ protected[this] class Find[U >: T](pred: T => Boolean, protected[this] val pit: IterableSplitter[T]) extends Accessor[Option[U], Find[U]] {
@volatile var result: Option[U] = None
def leaf(prev: Option[Option[U]]) = { if (!pit.isAborted) result = pit.find(pred); if (result != None) pit.abort }
- protected[this] def newSubtask(p: ParIterableIterator[T]) = new Find(pred, p)
+ protected[this] def newSubtask(p: IterableSplitter[T]) = new Find(pred, p)
override def merge(that: Find[U]) = if (this.result == None) result = that.result
}
- protected[this] class Filter[U >: T, This >: Repr](pred: T => Boolean, cbf: () => Combiner[U, This], protected[this] val pit: ParIterableIterator[T])
+ protected[this] class Filter[U >: T, This >: Repr](pred: T => Boolean, cbf: () => Combiner[U, This], protected[this] val pit: IterableSplitter[T])
extends Transformer[Combiner[U, This], Filter[U, This]] {
@volatile var result: Combiner[U, This] = null
def leaf(prev: Option[Combiner[U, This]]) = {
result = pit.filter2combiner(pred, reuse(prev, cbf()))
}
- protected[this] def newSubtask(p: ParIterableIterator[T]) = new Filter(pred, cbf, p)
+ protected[this] def newSubtask(p: IterableSplitter[T]) = new Filter(pred, cbf, p)
override def merge(that: Filter[U, This]) = result = result combine that.result
}
- protected[this] class FilterNot[U >: T, This >: Repr](pred: T => Boolean, cbf: () => Combiner[U, This], protected[this] val pit: ParIterableIterator[T])
+ protected[this] class FilterNot[U >: T, This >: Repr](pred: T => Boolean, cbf: () => Combiner[U, This], protected[this] val pit: IterableSplitter[T])
extends Transformer[Combiner[U, This], FilterNot[U, This]] {
@volatile var result: Combiner[U, This] = null
def leaf(prev: Option[Combiner[U, This]]) = {
result = pit.filterNot2combiner(pred, reuse(prev, cbf()))
}
- protected[this] def newSubtask(p: ParIterableIterator[T]) = new FilterNot(pred, cbf, p)
+ protected[this] def newSubtask(p: IterableSplitter[T]) = new FilterNot(pred, cbf, p)
override def merge(that: FilterNot[U, This]) = result = result combine that.result
}
- protected class Copy[U >: T, That](cfactory: () => Combiner[U, That], protected[this] val pit: ParIterableIterator[T])
+ protected class Copy[U >: T, That](cfactory: () => Combiner[U, That], protected[this] val pit: IterableSplitter[T])
extends Transformer[Combiner[U, That], Copy[U, That]] {
@volatile var result: Combiner[U, That] = null
def leaf(prev: Option[Combiner[U, That]]) = result = pit.copy2builder[U, That, Combiner[U, That]](reuse(prev, cfactory()))
- protected[this] def newSubtask(p: ParIterableIterator[T]) = new Copy[U, That](cfactory, p)
+ protected[this] def newSubtask(p: IterableSplitter[T]) = new Copy[U, That](cfactory, p)
override def merge(that: Copy[U, That]) = result = result combine that.result
}
- protected[this] class Partition[U >: T, This >: Repr](pred: T => Boolean, cbf: () => Combiner[U, This], protected[this] val pit: ParIterableIterator[T])
+ protected[this] class Partition[U >: T, This >: Repr](pred: T => Boolean, cbf: () => Combiner[U, This], protected[this] val pit: IterableSplitter[T])
extends Transformer[(Combiner[U, This], Combiner[U, This]), Partition[U, This]] {
@volatile var result: (Combiner[U, This], Combiner[U, This]) = null
def leaf(prev: Option[(Combiner[U, This], Combiner[U, This])]) = result = pit.partition2combiners(pred, reuse(prev.map(_._1), cbf()), reuse(prev.map(_._2), cbf()))
- protected[this] def newSubtask(p: ParIterableIterator[T]) = new Partition(pred, cbf, p)
+ protected[this] def newSubtask(p: IterableSplitter[T]) = new Partition(pred, cbf, p)
override def merge(that: Partition[U, This]) = result = (result._1 combine that.result._1, result._2 combine that.result._2)
}
protected[this] class GroupBy[K, U >: T](
f: U => K,
mcf: () => HashMapCombiner[K, U],
- protected[this] val pit: ParIterableIterator[T]
+ protected[this] val pit: IterableSplitter[T]
) extends Transformer[HashMapCombiner[K, U], GroupBy[K, U]] {
@volatile var result: Result = null
final def leaf(prev: Option[Result]) = {
@@ -1026,7 +1051,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
}
result = cb
}
- protected[this] def newSubtask(p: ParIterableIterator[T]) = new GroupBy(f, mcf, p)
+ protected[this] def newSubtask(p: IterableSplitter[T]) = new GroupBy(f, mcf, p)
override def merge(that: GroupBy[K, U]) = {
// note: this works because we know that a HashMapCombiner doesn't merge same keys until evaluation
// --> we know we're not dropping any mappings
@@ -1034,13 +1059,13 @@ self: ParIterableLike[T, Repr, Sequential] =>
}
}
- protected[this] class Take[U >: T, This >: Repr](n: Int, cbf: () => Combiner[U, This], protected[this] val pit: ParIterableIterator[T])
+ protected[this] class Take[U >: T, This >: Repr](n: Int, cbf: () => Combiner[U, This], protected[this] val pit: IterableSplitter[T])
extends Transformer[Combiner[U, This], Take[U, This]] {
@volatile var result: Combiner[U, This] = null
def leaf(prev: Option[Combiner[U, This]]) = {
result = pit.take2combiner(n, reuse(prev, cbf()))
}
- protected[this] def newSubtask(p: ParIterableIterator[T]) = throw new UnsupportedOperationException
+ protected[this] def newSubtask(p: IterableSplitter[T]) = throw new UnsupportedOperationException
override def split = {
val pits = pit.split
val sizes = pits.scanLeft(0)(_ + _.remaining)
@@ -1053,11 +1078,11 @@ self: ParIterableLike[T, Repr, Sequential] =>
override def requiresStrictSplitters = true
}
- protected[this] class Drop[U >: T, This >: Repr](n: Int, cbf: () => Combiner[U, This], protected[this] val pit: ParIterableIterator[T])
+ protected[this] class Drop[U >: T, This >: Repr](n: Int, cbf: () => Combiner[U, This], protected[this] val pit: IterableSplitter[T])
extends Transformer[Combiner[U, This], Drop[U, This]] {
@volatile var result: Combiner[U, This] = null
def leaf(prev: Option[Combiner[U, This]]) = result = pit.drop2combiner(n, reuse(prev, cbf()))
- protected[this] def newSubtask(p: ParIterableIterator[T]) = throw new UnsupportedOperationException
+ protected[this] def newSubtask(p: IterableSplitter[T]) = throw new UnsupportedOperationException
override def split = {
val pits = pit.split
val sizes = pits.scanLeft(0)(_ + _.remaining)
@@ -1070,11 +1095,11 @@ self: ParIterableLike[T, Repr, Sequential] =>
override def requiresStrictSplitters = true
}
- protected[this] class Slice[U >: T, This >: Repr](from: Int, until: Int, cbf: () => Combiner[U, This], protected[this] val pit: ParIterableIterator[T])
+ protected[this] class Slice[U >: T, This >: Repr](from: Int, until: Int, cbf: () => Combiner[U, This], protected[this] val pit: IterableSplitter[T])
extends Transformer[Combiner[U, This], Slice[U, This]] {
@volatile var result: Combiner[U, This] = null
def leaf(prev: Option[Combiner[U, This]]) = result = pit.slice2combiner(from, until, reuse(prev, cbf()))
- protected[this] def newSubtask(p: ParIterableIterator[T]) = throw new UnsupportedOperationException
+ protected[this] def newSubtask(p: IterableSplitter[T]) = throw new UnsupportedOperationException
override def split = {
val pits = pit.split
val sizes = pits.scanLeft(0)(_ + _.remaining)
@@ -1088,11 +1113,11 @@ self: ParIterableLike[T, Repr, Sequential] =>
override def requiresStrictSplitters = true
}
- protected[this] class SplitAt[U >: T, This >: Repr](at: Int, cbf: () => Combiner[U, This], protected[this] val pit: ParIterableIterator[T])
+ protected[this] class SplitAt[U >: T, This >: Repr](at: Int, cbf: () => Combiner[U, This], protected[this] val pit: IterableSplitter[T])
extends Transformer[(Combiner[U, This], Combiner[U, This]), SplitAt[U, This]] {
@volatile var result: (Combiner[U, This], Combiner[U, This]) = null
def leaf(prev: Option[(Combiner[U, This], Combiner[U, This])]) = result = pit.splitAt2combiners(at, reuse(prev.map(_._1), cbf()), reuse(prev.map(_._2), cbf()))
- protected[this] def newSubtask(p: ParIterableIterator[T]) = throw new UnsupportedOperationException
+ protected[this] def newSubtask(p: IterableSplitter[T]) = throw new UnsupportedOperationException
override def split = {
val pits = pit.split
val sizes = pits.scanLeft(0)(_ + _.remaining)
@@ -1103,14 +1128,14 @@ self: ParIterableLike[T, Repr, Sequential] =>
}
protected[this] class TakeWhile[U >: T, This >: Repr]
- (pos: Int, pred: T => Boolean, cbf: () => Combiner[U, This], protected[this] val pit: ParIterableIterator[T])
+ (pos: Int, pred: T => Boolean, cbf: () => Combiner[U, This], protected[this] val pit: IterableSplitter[T])
extends Transformer[(Combiner[U, This], Boolean), TakeWhile[U, This]] {
@volatile var result: (Combiner[U, This], Boolean) = null
def leaf(prev: Option[(Combiner[U, This], Boolean)]) = if (pos < pit.indexFlag) {
result = pit.takeWhile2combiner(pred, reuse(prev.map(_._1), cbf()))
if (!result._2) pit.setIndexFlagIfLesser(pos)
} else result = (reuse(prev.map(_._1), cbf()), false)
- protected[this] def newSubtask(p: ParIterableIterator[T]) = throw new UnsupportedOperationException
+ protected[this] def newSubtask(p: IterableSplitter[T]) = throw new UnsupportedOperationException
override def split = {
val pits = pit.split
for ((p, untilp) <- pits zip pits.scanLeft(0)(_ + _.remaining)) yield new TakeWhile(pos + untilp, pred, cbf, p)
@@ -1122,20 +1147,20 @@ self: ParIterableLike[T, Repr, Sequential] =>
}
protected[this] class Span[U >: T, This >: Repr]
- (pos: Int, pred: T => Boolean, cbf: () => Combiner[U, This], protected[this] val pit: ParIterableIterator[T])
+ (pos: Int, pred: T => Boolean, cbf: () => Combiner[U, This], protected[this] val pit: IterableSplitter[T])
extends Transformer[(Combiner[U, This], Combiner[U, This]), Span[U, This]] {
@volatile var result: (Combiner[U, This], Combiner[U, This]) = null
def leaf(prev: Option[(Combiner[U, This], Combiner[U, This])]) = if (pos < pit.indexFlag) {
// val lst = pit.toList
// val pa = mutable.ParArray(lst: _*)
- // val str = "At leaf we will iterate: " + pa.parallelIterator.toList
+ // val str = "At leaf we will iterate: " + pa.splitter.toList
result = pit.span2combiners(pred, cbf(), cbf()) // do NOT reuse old combiners here, lest ye be surprised
// println("\nAt leaf result is: " + result)
if (result._2.size > 0) pit.setIndexFlagIfLesser(pos)
} else {
result = (reuse(prev.map(_._2), cbf()), pit.copy2builder[U, This, Combiner[U, This]](reuse(prev.map(_._2), cbf())))
}
- protected[this] def newSubtask(p: ParIterableIterator[T]) = throw new UnsupportedOperationException
+ protected[this] def newSubtask(p: IterableSplitter[T]) = throw new UnsupportedOperationException
override def split = {
val pits = pit.split
for ((p, untilp) <- pits zip pits.scanLeft(0)(_ + _.remaining)) yield new Span(pos + untilp, pred, cbf, p)
@@ -1148,11 +1173,11 @@ self: ParIterableLike[T, Repr, Sequential] =>
override def requiresStrictSplitters = true
}
- protected[this] class Zip[U >: T, S, That](pbf: CanCombineFrom[Repr, (U, S), That], protected[this] val pit: ParIterableIterator[T], val othpit: ParSeqIterator[S])
+ protected[this] class Zip[U >: T, S, That](pbf: CanCombineFrom[Repr, (U, S), That], protected[this] val pit: IterableSplitter[T], val othpit: SeqSplitter[S])
extends Transformer[Combiner[(U, S), That], Zip[U, S, That]] {
@volatile var result: Result = null
def leaf(prev: Option[Result]) = result = pit.zip2combiner[U, S, That](othpit, pbf(self.repr))
- protected[this] def newSubtask(p: ParIterableIterator[T]) = unsupported
+ protected[this] def newSubtask(p: IterableSplitter[T]) = unsupported
override def split = {
val pits = pit.split
val sizes = pits.map(_.remaining)
@@ -1164,11 +1189,11 @@ self: ParIterableLike[T, Repr, Sequential] =>
}
protected[this] class ZipAll[U >: T, S, That]
- (len: Int, thiselem: U, thatelem: S, pbf: CanCombineFrom[Repr, (U, S), That], protected[this] val pit: ParIterableIterator[T], val othpit: ParSeqIterator[S])
+ (len: Int, thiselem: U, thatelem: S, pbf: CanCombineFrom[Repr, (U, S), That], protected[this] val pit: IterableSplitter[T], val othpit: SeqSplitter[S])
extends Transformer[Combiner[(U, S), That], ZipAll[U, S, That]] {
@volatile var result: Result = null
def leaf(prev: Option[Result]) = result = pit.zipAll2combiner[U, S, That](othpit, thiselem, thatelem, pbf(self.repr))
- protected[this] def newSubtask(p: ParIterableIterator[T]) = unsupported
+ protected[this] def newSubtask(p: IterableSplitter[T]) = unsupported
override def split = if (pit.remaining <= len) {
val pits = pit.split
val sizes = pits.map(_.remaining)
@@ -1179,18 +1204,18 @@ self: ParIterableLike[T, Repr, Sequential] =>
val diff = len - pit.remaining
Seq(
new ZipAll(pit.remaining, thiselem, thatelem, pbf, pit, opits(0)), // nothing wrong will happen with the cast below - elem T is never accessed
- new ZipAll(diff, thiselem, thatelem, pbf, immutable.repetition(thiselem, diff).parallelIterator.asInstanceOf[ParIterableIterator[T]], opits(1))
+ new ZipAll(diff, thiselem, thatelem, pbf, immutable.repetition(thiselem, diff).splitter.asInstanceOf[IterableSplitter[T]], opits(1))
)
}
override def merge(that: ZipAll[U, S, That]) = result = result combine that.result
override def requiresStrictSplitters = true
}
- protected[this] class CopyToArray[U >: T, This >: Repr](from: Int, len: Int, array: Array[U], protected[this] val pit: ParIterableIterator[T])
+ protected[this] class CopyToArray[U >: T, This >: Repr](from: Int, len: Int, array: Array[U], protected[this] val pit: IterableSplitter[T])
extends Accessor[Unit, CopyToArray[U, This]] {
@volatile var result: Unit = ()
def leaf(prev: Option[Unit]) = pit.copyToArray(array, from, len)
- protected[this] def newSubtask(p: ParIterableIterator[T]) = unsupported
+ protected[this] def newSubtask(p: IterableSplitter[T]) = unsupported
override def split = {
val pits = pit.split
for ((p, untilp) <- pits zip pits.scanLeft(0)(_ + _.remaining); if untilp < len) yield {
@@ -1201,29 +1226,29 @@ self: ParIterableLike[T, Repr, Sequential] =>
override def requiresStrictSplitters = true
}
- protected[this] class ToParCollection[U >: T, That](cbf: () => Combiner[U, That], protected[this] val pit: ParIterableIterator[T])
+ protected[this] class ToParCollection[U >: T, That](cbf: () => Combiner[U, That], protected[this] val pit: IterableSplitter[T])
extends Transformer[Combiner[U, That], ToParCollection[U, That]] {
@volatile var result: Result = null
def leaf(prev: Option[Combiner[U, That]]) {
result = cbf()
while (pit.hasNext) result += pit.next
}
- protected[this] def newSubtask(p: ParIterableIterator[T]) = new ToParCollection[U, That](cbf, p)
+ protected[this] def newSubtask(p: IterableSplitter[T]) = new ToParCollection[U, That](cbf, p)
override def merge(that: ToParCollection[U, That]) = result = result combine that.result
}
- protected[this] class ToParMap[K, V, That](cbf: () => Combiner[(K, V), That], protected[this] val pit: ParIterableIterator[T])(implicit ev: T <:< (K, V))
+ protected[this] class ToParMap[K, V, That](cbf: () => Combiner[(K, V), That], protected[this] val pit: IterableSplitter[T])(implicit ev: T <:< (K, V))
extends Transformer[Combiner[(K, V), That], ToParMap[K, V, That]] {
@volatile var result: Result = null
def leaf(prev: Option[Combiner[(K, V), That]]) {
result = cbf()
while (pit.hasNext) result += pit.next
}
- protected[this] def newSubtask(p: ParIterableIterator[T]) = new ToParMap[K, V, That](cbf, p)(ev)
+ protected[this] def newSubtask(p: IterableSplitter[T]) = new ToParMap[K, V, That](cbf, p)(ev)
override def merge(that: ToParMap[K, V, That]) = result = result combine that.result
}
- protected[this] class CreateScanTree[U >: T](from: Int, len: Int, z: U, op: (U, U) => U, protected[this] val pit: ParIterableIterator[T])
+ protected[this] class CreateScanTree[U >: T](from: Int, len: Int, z: U, op: (U, U) => U, protected[this] val pit: IterableSplitter[T])
extends Transformer[ScanTree[U], CreateScanTree[U]] {
@volatile var result: ScanTree[U] = null
def leaf(prev: Option[ScanTree[U]]) = if (pit.remaining > 0) {
@@ -1247,7 +1272,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
val half = howmany / 2
ScanNode(mergeTrees(trees, from, half), mergeTrees(trees, from + half, howmany - half))
} else trees(from)
- protected[this] def newSubtask(pit: ParIterableIterator[T]) = unsupported
+ protected[this] def newSubtask(pit: IterableSplitter[T]) = unsupported
override def split = {
val pits = pit.split
for ((p, untilp) <- pits zip pits.scanLeft(from)(_ + _.remaining)) yield {
@@ -1325,7 +1350,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
}
protected[this] case class ScanLeaf[U >: T]
- (pit: ParIterableIterator[U], op: (U, U) => U, from: Int, len: Int, var prev: Option[ScanLeaf[U]], var acc: U)
+ (pit: IterableSplitter[U], op: (U, U) => U, from: Int, len: Int, var prev: Option[ScanLeaf[U]], var acc: U)
extends ScanTree[U] {
def beginsAt = from
def pushdown(v: U) = {
diff --git a/src/library/scala/collection/parallel/ParIterableView.scala b/src/library/scala/collection/parallel/ParIterableView.scala
index f8e0f9909c..50f0e4f869 100644
--- a/src/library/scala/collection/parallel/ParIterableView.scala
+++ b/src/library/scala/collection/parallel/ParIterableView.scala
@@ -14,6 +14,7 @@ package scala.collection.parallel
import scala.collection.Parallel
import scala.collection.IterableView
+import scala.collection.GenIterableView
import scala.collection.generic.CanCombineFrom
@@ -29,13 +30,14 @@ import scala.collection.generic.CanCombineFrom
*/
trait ParIterableView[+T, +Coll <: Parallel, +CollSeq]
extends ParIterableViewLike[T, Coll, CollSeq, ParIterableView[T, Coll, CollSeq], IterableView[T, CollSeq]]
+ with GenIterableView[T, Coll]
object ParIterableView {
abstract class NoCombiner[T] extends Combiner[T, Nothing] {
- self: EnvironmentPassingCombiner[T, Nothing] =>
+// self: EnvironmentPassingCombiner[T, Nothing] =>
def +=(elem: T): this.type = this
def iterator: Iterator[T] = Iterator.empty
def result() = throw new UnsupportedOperationException("ParIterableView.Combiner.result")
@@ -49,8 +51,8 @@ object ParIterableView {
implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParIterableView[T, ParIterable[T], Iterable[T]]] =
new CanCombineFrom[Coll, T, ParIterableView[T, ParIterable[T], Iterable[T]]] {
- def apply(from: Coll) = new NoCombiner[T] with EnvironmentPassingCombiner[T, Nothing]
- def apply() = new NoCombiner[T] with EnvironmentPassingCombiner[T, Nothing]
+ def apply(from: Coll) = new NoCombiner[T] {} // was: with EnvironmentPassingCombiner[T, Nothing]
+ def apply() = new NoCombiner[T] {} // was: with EnvironmentPassingCombiner[T, Nothing]
}
}
@@ -63,3 +65,4 @@ object ParIterableView {
+
diff --git a/src/library/scala/collection/parallel/ParIterableViewLike.scala b/src/library/scala/collection/parallel/ParIterableViewLike.scala
index 41ba0624bb..1d7659922c 100644
--- a/src/library/scala/collection/parallel/ParIterableViewLike.scala
+++ b/src/library/scala/collection/parallel/ParIterableViewLike.scala
@@ -10,10 +10,17 @@ package scala.collection.parallel
import scala.collection.Parallel
import scala.collection.{ IterableView, IterableViewLike }
+import scala.collection.{ GenIterableView, GenIterableViewLike }
+import scala.collection.GenTraversableOnce
+import scala.collection.GenTraversable
+import scala.collection.GenIterable
+import scala.collection.GenSeq
import scala.collection.generic.{ CanBuildFrom, SliceInterval }
import scala.collection.generic.CanCombineFrom
import scala.collection.parallel.immutable.ParRange
+
+
/** A template view of a non-strict view of parallel iterable collection.
*
* '''Note:''' Regular view traits have type parameters used to carry information
@@ -34,58 +41,64 @@ trait ParIterableViewLike[+T,
+CollSeq,
+This <: ParIterableView[T, Coll, CollSeq] with ParIterableViewLike[T, Coll, CollSeq, This, ThisSeq],
+ThisSeq <: IterableView[T, CollSeq] with IterableViewLike[T, CollSeq, ThisSeq]]
-extends IterableView[T, Coll]
- with IterableViewLike[T, Coll, This]
+extends GenIterableView[T, Coll]
+ with GenIterableViewLike[T, Coll, This]
with ParIterable[T]
with ParIterableLike[T, This, ThisSeq]
{
self =>
import tasksupport._
+ override def foreach[U](f: T => U): Unit = super[ParIterableLike].foreach(f)
override protected[this] def newCombiner: Combiner[T, This] = throw new UnsupportedOperationException(this + ".newCombiner");
+ protected[this] def viewIdentifier: String
+ protected[this] def viewIdString: String
+
+ protected def underlying: Coll
/* wrappers */
trait Transformed[+S] extends ParIterableView[S, Coll, CollSeq] with super.Transformed[S] {
- override def parallelIterator: ParIterableIterator[S]
- override def iterator = parallelIterator
+ override def splitter: IterableSplitter[S]
+ override def iterator = splitter
+ def size = splitter.remaining
}
trait Sliced extends super.Sliced with Transformed[T] {
// override def slice(from1: Int, until1: Int): This = newSliced(from1 max 0, until1 max 0).asInstanceOf[This]
- def parallelIterator: ParIterableIterator[T] = self.parallelIterator.slice(from, until)
+ def splitter: IterableSplitter[T] = self.splitter.slice(from, until)
override def seq = self.seq.slice(from, until)
}
trait Mapped[S] extends super.Mapped[S] with Transformed[S]{
- def parallelIterator: ParIterableIterator[S] = self.parallelIterator.map(mapping)
+ def splitter: IterableSplitter[S] = self.splitter.map(mapping)
override def seq = self.seq.map(mapping).asInstanceOf[IterableView[S, CollSeq]]
}
// only use if other is a ParIterable, otherwise force
trait Appended[U >: T] extends super.Appended[U] with Transformed[U] {
def restPar: ParIterable[U] = rest.asParIterable
- def parallelIterator: ParIterableIterator[U] = self.parallelIterator.appendParIterable[U, ParIterableIterator[U]](restPar.parallelIterator)
+ def splitter: IterableSplitter[U] = self.splitter.appendParIterable[U, IterableSplitter[U]](restPar.splitter)
override def seq = self.seq.++(rest).asInstanceOf[IterableView[U, CollSeq]]
}
trait Forced[S] extends super.Forced[S] with Transformed[S] {
def forcedPar: ParIterable[S] = forced.asParIterable
- def parallelIterator: ParIterableIterator[S] = forcedPar.parallelIterator
+ def splitter: IterableSplitter[S] = forcedPar.splitter
override def seq = forcedPar.seq.view.asInstanceOf[IterableView[S, CollSeq]]
}
// only use if other is a ParSeq, otherwise force
trait Zipped[S] extends super.Zipped[S] with Transformed[(T, S)] {
def otherPar: ParSeq[S] = other.asParSeq
- def parallelIterator: ParIterableIterator[(T, S)] = self.parallelIterator zipParSeq otherPar.parallelIterator
+ def splitter: IterableSplitter[(T, S)] = self.splitter zipParSeq otherPar.splitter
override def seq = (self.seq zip other).asInstanceOf[IterableView[(T, S), CollSeq]]
}
// only use if other is a ParSeq, otherwise force
trait ZippedAll[U >: T, S] extends super.ZippedAll[U, S] with Transformed[(U, S)] {
def otherPar: ParSeq[S] = other.asParSeq
- def parallelIterator: ParIterableIterator[(U, S)] = self.parallelIterator.zipAllParSeq(otherPar.parallelIterator, thisElem, thatElem)
+ def splitter: IterableSplitter[(U, S)] = self.splitter.zipAllParSeq(otherPar.splitter, thisElem, thatElem)
override def seq = (self.seq.zipAll(other, thisElem, thatElem)).asInstanceOf[IterableView[(U, S), CollSeq]]
}
@@ -95,11 +108,11 @@ self =>
/* operation overrides */
override def take(n: Int): This = newSliced(SliceInterval(0, n))
- override def drop(n: Int): This = newSliced(SliceInterval(n, parallelIterator.remaining))
+ override def drop(n: Int): This = newSliced(SliceInterval(n, splitter.remaining))
override def splitAt(n: Int): (This, This) = (take(n), drop(n))
override def slice(from: Int, until: Int): This = newSliced(SliceInterval(from, until))
override def map[S, That](f: T => S)(implicit bf: CanBuildFrom[This, S, That]): That = newMapped(f).asInstanceOf[That]
- override def ++[U >: T, That](xs: TraversableOnce[U])(implicit bf: CanBuildFrom[This, U, That]): That = newAppendedTryParIterable(xs.toTraversable).asInstanceOf[That]
+ override def ++[U >: T, That](xs: GenTraversableOnce[U])(implicit bf: CanBuildFrom[This, U, That]): That = newAppendedTryParIterable(xs.toTraversable).asInstanceOf[That]
override def filter(p: T => Boolean): This = newForced(thisParSeq.filter(p)).asInstanceOf[This]
override def filterNot(p: T => Boolean): This = newForced(thisParSeq.filterNot(p)).asInstanceOf[This]
@@ -113,16 +126,16 @@ self =>
val (pref, suff) = thisParSeq.span(p)
(newForced(pref).asInstanceOf[This], newForced(suff).asInstanceOf[This])
}
- override def flatMap[S, That](f: T => TraversableOnce[S])(implicit bf: CanBuildFrom[This, S, That]): That = newForced(thisParSeq.flatMap(f)).asInstanceOf[That]
+ override def flatMap[S, That](f: T => GenTraversableOnce[S])(implicit bf: CanBuildFrom[This, S, That]): That = newForced(thisParSeq.flatMap(f)).asInstanceOf[That]
- override def zip[U >: T, S, That](that: Iterable[S])(implicit bf: CanBuildFrom[This, (U, S), That]): That = newZippedTryParSeq(that).asInstanceOf[That]
+ override def zip[U >: T, S, That](that: GenIterable[S])(implicit bf: CanBuildFrom[This, (U, S), That]): That = newZippedTryParSeq(that).asInstanceOf[That]
override def zipWithIndex[U >: T, That](implicit bf: CanBuildFrom[This, (U, Int), That]): That =
- newZipped(ParRange(0, parallelIterator.remaining, 1, false)).asInstanceOf[That]
- override def zipAll[S, U >: T, That](that: Iterable[S], thisElem: U, thatElem: S)(implicit bf: CanBuildFrom[This, (U, S), That]): That =
+ newZipped(ParRange(0, splitter.remaining, 1, false)).asInstanceOf[That]
+ override def zipAll[S, U >: T, That](that: GenIterable[S], thisElem: U, thatElem: S)(implicit bf: CanBuildFrom[This, (U, S), That]): That =
newZippedAllTryParSeq(that, thisElem, thatElem).asInstanceOf[That]
override def force[U >: T, That](implicit bf: CanBuildFrom[Coll, U, That]) = bf ifParallel { pbf =>
- executeAndWaitResult(new Force(pbf, parallelIterator).mapResult(_.result).asInstanceOf[Task[That, ResultMapping[_, Force[U, That], That]]])
+ executeAndWaitResult(new Force(pbf, splitter).mapResult(_.result).asInstanceOf[Task[That, ResultMapping[_, Force[U, That], That]]])
} otherwise {
val b = bf(underlying)
b ++= this.iterator
@@ -131,16 +144,16 @@ self =>
/* wrapper virtual ctors */
- protected override def newSliced(_endpoints: SliceInterval): Transformed[T] = new { val endpoints = _endpoints } with Sliced
- protected override def newMapped[S](f: T => S): Transformed[S] = new Mapped[S] { val mapping = f }
- protected override def newForced[S](xs: => Seq[S]): Transformed[S] = new Forced[S] { val forced = xs }
- protected override def newAppended[U >: T](that: Traversable[U]): Transformed[U] = new Appended[U] { val rest = that }
- protected override def newDroppedWhile(p: T => Boolean) = unsupported
- protected override def newTakenWhile(p: T => Boolean) = unsupported
- protected override def newFlatMapped[S](f: T => TraversableOnce[S]) = unsupported
- protected override def newFiltered(p: T => Boolean) = unsupported
- protected override def newZipped[S](that: Iterable[S]): Transformed[(T, S)] = new Zipped[S] { val other = that }
- protected override def newZippedAll[U >: T, S](that: Iterable[S], _thisElem: U, _thatElem: S): Transformed[(U, S)] = new ZippedAll[U, S] {
+ protected def newSliced(_endpoints: SliceInterval): Transformed[T] = new { val endpoints = _endpoints } with Sliced
+ protected def newMapped[S](f: T => S): Transformed[S] = new Mapped[S] { val mapping = f }
+ protected def newForced[S](xs: => GenSeq[S]): Transformed[S] = new Forced[S] { val forced = xs }
+ protected def newAppended[U >: T](that: GenTraversable[U]): Transformed[U] = new Appended[U] { val rest = that }
+ protected def newDroppedWhile(p: T => Boolean) = unsupported
+ protected def newTakenWhile(p: T => Boolean) = unsupported
+ protected def newFlatMapped[S](f: T => GenTraversableOnce[S]) = unsupported
+ protected def newFiltered(p: T => Boolean) = unsupported
+ protected def newZipped[S](that: GenIterable[S]): Transformed[(T, S)] = new Zipped[S] { val other = that }
+ protected def newZippedAll[U >: T, S](that: GenIterable[S], _thisElem: U, _thatElem: S): Transformed[(U, S)] = new ZippedAll[U, S] {
val other = that
val thisElem = _thisElem
val thatElem = _thatElem
@@ -148,31 +161,31 @@ self =>
/* argument sequence dependent ctors */
- protected def newForcedTryParIterable[S](xs: => Seq[S]): Transformed[S] = {
+ protected def newForcedTryParIterable[S](xs: => GenSeq[S]): Transformed[S] = {
if (xs.isParIterable) newForced[S](xs)
else newForced(mutable.ParArray.fromTraversables(xs))
}
- protected def newAppendedTryParIterable[U >: T](that: Traversable[U]): Transformed[U] = {
+ protected def newAppendedTryParIterable[U >: T](that: GenTraversable[U]): Transformed[U] = {
// we only append if `that` is a parallel iterable, i.e. it has a splitter
if (that.isParIterable) newAppended(that)
else newAppended(mutable.ParArray.fromTraversables(that))
}
- protected def newZippedTryParSeq[S](that: Iterable[S]): Transformed[(T, S)] = {
+ protected def newZippedTryParSeq[S](that: GenIterable[S]): Transformed[(T, S)] = {
if (that.isParSeq) newZipped[S](that)
else newZipped[S](mutable.ParArray.fromTraversables(that))
}
- protected def newZippedAllTryParSeq[S, U >: T](that: Iterable[S], thisElem: U, thatElem: S): Transformed[(U, S)] = {
+ protected def newZippedAllTryParSeq[S, U >: T](that: GenIterable[S], thisElem: U, thatElem: S): Transformed[(U, S)] = {
if (that.isParSeq) newZippedAll(that, thisElem, thatElem)
else newZippedAll(mutable.ParArray.fromTraversables(that), thisElem, thatElem)
}
/* tasks */
- protected[this] class Force[U >: T, That](cbf: CanCombineFrom[Coll, U, That], protected[this] val pit: ParIterableIterator[T])
+ protected[this] class Force[U >: T, That](cbf: CanCombineFrom[Coll, U, That], protected[this] val pit: IterableSplitter[T])
extends Transformer[Combiner[U, That], Force[U, That]] {
var result: Combiner[U, That] = null
def leaf(prev: Option[Combiner[U, That]]) = result = pit.copy2builder[U, That, Combiner[U, That]](reuse(prev, cbf(self.underlying)))
- protected[this] def newSubtask(p: ParIterableIterator[T]) = new Force(cbf, p)
+ protected[this] def newSubtask(p: IterableSplitter[T]) = new Force(cbf, p)
override def merge(that: Force[U, That]) = result = result combine that.result
}
@@ -187,3 +200,4 @@ self =>
+
diff --git a/src/library/scala/collection/parallel/ParMap.scala b/src/library/scala/collection/parallel/ParMap.scala
index e6763ef83c..c696099007 100644
--- a/src/library/scala/collection/parallel/ParMap.scala
+++ b/src/library/scala/collection/parallel/ParMap.scala
@@ -14,6 +14,7 @@ package scala.collection.parallel
import scala.collection.Map
+import scala.collection.GenMap
import scala.collection.mutable.Builder
import scala.collection.generic.ParMapFactory
import scala.collection.generic.GenericParMapTemplate
@@ -35,7 +36,7 @@ import scala.collection.generic.CanCombineFrom
* @since 2.9
*/
trait ParMap[K, +V]
-extends Map[K, V]
+extends GenMap[K, V]
with GenericParMapTemplate[K, V, ParMap]
with ParIterable[(K, V)]
with ParMapLike[K, V, ParMap[K, V], Map[K, V]]
@@ -44,7 +45,9 @@ self =>
def mapCompanion: GenericParMapCompanion[ParMap] = ParMap
- override def empty: ParMap[K, V] = new mutable.ParHashMap[K, V]
+ //protected[this] override def newCombiner: Combiner[(K, V), ParMap[K, V]] = ParMap.newCombiner[K, V]
+
+ def empty: ParMap[K, V] = new mutable.ParHashMap[K, V]
override def stringPrefix = "ParMap"
}
diff --git a/src/library/scala/collection/parallel/ParMapLike.scala b/src/library/scala/collection/parallel/ParMapLike.scala
index 1cd5ad02d7..5b855468c4 100644
--- a/src/library/scala/collection/parallel/ParMapLike.scala
+++ b/src/library/scala/collection/parallel/ParMapLike.scala
@@ -13,6 +13,7 @@ package scala.collection.parallel
import scala.collection.MapLike
+import scala.collection.GenMapLike
import scala.collection.Map
import scala.collection.mutable.Builder
@@ -38,15 +39,19 @@ trait ParMapLike[K,
+V,
+Repr <: ParMapLike[K, V, Repr, Sequential] with ParMap[K, V],
+Sequential <: Map[K, V] with MapLike[K, V, Sequential]]
-extends MapLike[K, V, Repr]
+extends GenMapLike[K, V, Repr]
with ParIterableLike[(K, V), Repr, Sequential]
-{ self =>
+{
+self =>
- protected[this] override def newBuilder: Builder[(K, V), Repr] = newCombiner
+ def default(key: K): V = throw new NoSuchElementException("key not found: " + key)
- protected[this] override def newCombiner: Combiner[(K, V), Repr] = unsupportedop("Must implement `newCombiner` in concrete collections.")
+ def empty: Repr
- override def empty: Repr
+ def apply(key: K) = get(key) match {
+ case Some(v) => v
+ case None => default(key)
+ }
// note - should not override toMap (could be mutable)
}
diff --git a/src/library/scala/collection/parallel/ParSeq.scala b/src/library/scala/collection/parallel/ParSeq.scala
index 89e77b24d7..eefd0a727b 100644
--- a/src/library/scala/collection/parallel/ParSeq.scala
+++ b/src/library/scala/collection/parallel/ParSeq.scala
@@ -16,6 +16,7 @@ import scala.collection.generic.GenericParCompanion
import scala.collection.generic.GenericParTemplate
import scala.collection.generic.ParFactory
import scala.collection.generic.CanCombineFrom
+import scala.collection.GenSeq
import scala.collection.parallel.mutable.ParArrayCombiner
import scala.collection.parallel.mutable.ParArray
@@ -31,15 +32,19 @@ import scala.collection.parallel.mutable.ParArray
*
* @author Aleksandar Prokopec
*/
-trait ParSeq[+T] extends Seq[T]
+trait ParSeq[+T] extends GenSeq[T]
with ParIterable[T]
with GenericParTemplate[T, ParSeq]
- with ParSeqLike[T, ParSeq[T], Seq[T]] {
+ with ParSeqLike[T, ParSeq[T], Seq[T]]
+{
override def companion: GenericCompanion[ParSeq] with GenericParCompanion[ParSeq] = ParSeq
+ //protected[this] override def newBuilder = ParSeq.newBuilder[T]
def apply(i: Int): T
override def toString = super[ParIterable].toString
+
+ override def stringPrefix = getClass.getSimpleName
}
@@ -49,10 +54,8 @@ object ParSeq extends ParFactory[ParSeq] {
def newBuilder[T]: Combiner[T, ParSeq[T]] = ParArrayCombiner[T]
def newCombiner[T]: Combiner[T, ParSeq[T]] = ParArrayCombiner[T]
-}
-
-
+}
diff --git a/src/library/scala/collection/parallel/ParSeqLike.scala b/src/library/scala/collection/parallel/ParSeqLike.scala
index 189db237b9..3081acdc18 100644
--- a/src/library/scala/collection/parallel/ParSeqLike.scala
+++ b/src/library/scala/collection/parallel/ParSeqLike.scala
@@ -6,12 +6,14 @@
** |/ **
\* */
-
package scala.collection.parallel
import scala.collection.Parallel
import scala.collection.SeqLike
+import scala.collection.GenSeqLike
+import scala.collection.GenSeq
+import scala.collection.GenIterable
import scala.collection.generic.DefaultSignalling
import scala.collection.generic.AtomicIndexFlag
import scala.collection.generic.CanBuildFrom
@@ -46,12 +48,12 @@ import scala.collection.generic.VolatileAbort
* @since 2.9
*/
trait ParSeqLike[+T, +Repr <: ParSeq[T], +Sequential <: Seq[T] with SeqLike[T, Sequential]]
-extends scala.collection.SeqLike[T, Repr]
+extends scala.collection.GenSeqLike[T, Repr]
with ParIterableLike[T, Repr, Sequential] {
self =>
import tasksupport._
- type SuperParIterator = ParIterableIterator[T]
+ type SuperParIterator = IterableSplitter[T]
/** An iterator that can be split into arbitrary subsets of iterators.
* The self-type requirement ensures that the signal context passing behaviour gets mixed in
@@ -60,14 +62,14 @@ self =>
* '''Note:''' In concrete collection classes, collection implementers might want to override the iterator
* `reverse2builder` method to ensure higher efficiency.
*/
- trait ParIterator extends ParSeqIterator[T] with super.ParIterator {
+ trait ParIterator extends SeqSplitter[T] with super.ParIterator {
me: SignalContextPassingIterator[ParIterator] =>
def split: Seq[ParIterator]
def psplit(sizes: Int*): Seq[ParIterator]
}
/** A stackable modification that ensures signal contexts get passed along the iterators.
- * A self-type requirement in `ParallelIterator` ensures that this trait gets mixed into
+ * A self-type requirement in `ParIterator` ensures that this trait gets mixed into
* concrete iterators.
*/
trait SignalContextPassingIterator[+IterRepr <: ParIterator]
@@ -87,9 +89,9 @@ self =>
*
* @return an iterator that can be split into subsets of precise size
*/
- def parallelIterator: ParSeqIterator[T]
+ protected[parallel] def splitter: SeqSplitter[T]
- override def iterator: PreciseSplitter[T] = parallelIterator
+ override def iterator: PreciseSplitter[T] = splitter
override def size = length
@@ -139,14 +141,14 @@ self =>
* @return the length of the longest segment of elements starting at `from` and
* satisfying the predicate
*/
- override def segmentLength(p: T => Boolean, from: Int): Int = if (from >= length) 0 else {
+ def segmentLength(p: T => Boolean, from: Int): Int = if (from >= length) 0 else {
val realfrom = if (from < 0) 0 else from
val ctx = new DefaultSignalling with AtomicIndexFlag
ctx.setIndexFlag(Int.MaxValue)
- executeAndWaitResult(new SegmentLength(p, 0, parallelIterator.psplit(realfrom, length - realfrom)(1) assign ctx))._1
+ executeAndWaitResult(new SegmentLength(p, 0, splitter.psplit(realfrom, length - realfrom)(1) assign ctx))._1
}
- override def prefixLength(p: T => Boolean) = segmentLength(p, 0)
+ def prefixLength(p: T => Boolean) = segmentLength(p, 0)
/** Finds the first element satisfying some predicate.
*
@@ -159,20 +161,20 @@ self =>
* @return the index `>= from` of the first element of this $coll that satisfies the predicate `p`,
* or `-1`, if none exists
*/
- override def indexWhere(p: T => Boolean, from: Int): Int = if (from >= length) -1 else {
+ def indexWhere(p: T => Boolean, from: Int): Int = if (from >= length) -1 else {
val realfrom = if (from < 0) 0 else from
val ctx = new DefaultSignalling with AtomicIndexFlag
ctx.setIndexFlag(Int.MaxValue)
- executeAndWaitResult(new IndexWhere(p, realfrom, parallelIterator.psplit(realfrom, length - realfrom)(1) assign ctx))
+ executeAndWaitResult(new IndexWhere(p, realfrom, splitter.psplit(realfrom, length - realfrom)(1) assign ctx))
}
- override def indexWhere(p: T => Boolean): Int = indexWhere(p, 0)
+ def indexWhere(p: T => Boolean): Int = indexWhere(p, 0)
- override def findIndexOf(p: T => Boolean): Int = indexWhere(p, 0)
+ def findIndexOf(p: T => Boolean): Int = indexWhere(p, 0)
- override def indexOf[U >: T](elem: U): Int = indexOf(elem, 0)
+ def indexOf[U >: T](elem: U): Int = indexOf(elem, 0)
- override def indexOf[U >: T](elem: U, from: Int): Int = indexWhere(elem ==, from)
+ def indexOf[U >: T](elem: U, from: Int): Int = indexWhere(elem ==, from)
/** Finds the last element satisfying some predicate.
*
@@ -185,22 +187,22 @@ self =>
* @return the index `<= end` of the first element of this $coll that satisfies the predicate `p`,
* or `-1`, if none exists
*/
- override def lastIndexWhere(p: T => Boolean, end: Int): Int = if (end < 0) -1 else {
+ def lastIndexWhere(p: T => Boolean, end: Int): Int = if (end < 0) -1 else {
val until = if (end >= length) length else end + 1
val ctx = new DefaultSignalling with AtomicIndexFlag
ctx.setIndexFlag(Int.MinValue)
- executeAndWaitResult(new LastIndexWhere(p, 0, parallelIterator.psplit(until, length - until)(0) assign ctx))
+ executeAndWaitResult(new LastIndexWhere(p, 0, splitter.psplit(until, length - until)(0) assign ctx))
}
- override def reverse: Repr = {
- executeAndWaitResult(new Reverse(() => newCombiner, parallelIterator) mapResult { _.result })
+ def reverse: Repr = {
+ executeAndWaitResult(new Reverse(() => newCombiner, splitter) mapResult { _.result })
}
- override def reverseMap[S, That](f: T => S)(implicit bf: CanBuildFrom[Repr, S, That]): That = bf ifParallel { pbf =>
- executeAndWaitResult(new ReverseMap[S, That](f, pbf, parallelIterator) mapResult { _.result })
- } otherwise super.reverseMap(f)(bf)
+ def reverseMap[S, That](f: T => S)(implicit bf: CanBuildFrom[Repr, S, That]): That = bf ifParallel { pbf =>
+ executeAndWaitResult(new ReverseMap[S, That](f, pbf, splitter) mapResult { _.result })
+ } otherwise seq.reverseMap(f)(bf2seq(bf))
- override def startsWith[S](that: Seq[S]): Boolean = startsWith(that, 0)
+ def startsWith[S](that: GenSeq[S]): Boolean = startsWith(that, 0)
/** Tests whether this $coll contains the given sequence at a given index.
*
@@ -211,20 +213,20 @@ self =>
* @param offset the starting offset for the search
* @return `true` if there is a sequence `that` starting at `offset` in this sequence, `false` otherwise
*/
- override def startsWith[S](that: Seq[S], offset: Int): Boolean = that ifParSeq { pthat =>
+ def startsWith[S](that: GenSeq[S], offset: Int): Boolean = that ifParSeq { pthat =>
if (offset < 0 || offset >= length) offset == length && pthat.length == 0
else if (pthat.length == 0) true
else if (pthat.length > length - offset) false
else {
val ctx = new DefaultSignalling with VolatileAbort
- executeAndWaitResult(new SameElements(parallelIterator.psplit(offset, pthat.length)(1) assign ctx, pthat.parallelIterator))
+ executeAndWaitResult(new SameElements(splitter.psplit(offset, pthat.length)(1) assign ctx, pthat.splitter))
}
- } otherwise super.startsWith(that, offset)
+ } otherwise seq.startsWith(that, offset)
- override def sameElements[U >: T](that: Iterable[U]): Boolean = that ifParSeq { pthat =>
+ override def sameElements[U >: T](that: GenIterable[U]): Boolean = that ifParSeq { pthat =>
val ctx = new DefaultSignalling with VolatileAbort
- length == pthat.length && executeAndWaitResult(new SameElements(parallelIterator assign ctx, pthat.parallelIterator))
- } otherwise super.sameElements(that)
+ length == pthat.length && executeAndWaitResult(new SameElements(splitter assign ctx, pthat.splitter))
+ } otherwise seq.sameElements(that)
/** Tests whether this $coll ends with the given parallel sequence
*
@@ -234,65 +236,65 @@ self =>
* @param that the sequence to test
* @return `true` if this $coll has `that` as a suffix, `false` otherwise
*/
- override def endsWith[S](that: Seq[S]): Boolean = that ifParSeq { pthat =>
+ def endsWith[S](that: GenSeq[S]): Boolean = that ifParSeq { pthat =>
if (that.length == 0) true
else if (that.length > length) false
else {
val ctx = new DefaultSignalling with VolatileAbort
val tlen = that.length
- executeAndWaitResult(new SameElements(parallelIterator.psplit(length - tlen, tlen)(1) assign ctx, pthat.parallelIterator))
+ executeAndWaitResult(new SameElements(splitter.psplit(length - tlen, tlen)(1) assign ctx, pthat.splitter))
}
- } otherwise super.endsWith(that)
+ } otherwise seq.endsWith(that)
- override def patch[U >: T, That](from: Int, patch: Seq[U], replaced: Int)(implicit bf: CanBuildFrom[Repr, U, That]): That = {
+ def patch[U >: T, That](from: Int, patch: GenSeq[U], replaced: Int)(implicit bf: CanBuildFrom[Repr, U, That]): That = {
val realreplaced = replaced min (length - from)
if (patch.isParSeq && bf.isParallel && (size - realreplaced + patch.size) > MIN_FOR_COPY) {
val that = patch.asParSeq
val pbf = bf.asParallel
- val pits = parallelIterator.psplit(from, replaced, length - from - realreplaced)
+ val pits = splitter.psplit(from, replaced, length - from - realreplaced)
val copystart = new Copy[U, That](() => pbf(repr), pits(0))
val copymiddle = wrap {
- val tsk = new that.Copy[U, That](() => pbf(repr), that.parallelIterator)
+ val tsk = new that.Copy[U, That](() => pbf(repr), that.splitter)
tasksupport.executeAndWaitResult(tsk)
}
val copyend = new Copy[U, That](() => pbf(repr), pits(2))
executeAndWaitResult(((copystart parallel copymiddle) { _ combine _ } parallel copyend) { _ combine _ } mapResult {
_.result
})
- } else patch_sequential(from, patch, replaced)
+ } else patch_sequential(from, patch.seq, replaced)
}
private def patch_sequential[U >: T, That](fromarg: Int, patch: Seq[U], r: Int)(implicit bf: CanBuildFrom[Repr, U, That]): That = {
val from = 0 max fromarg
val b = bf(repr)
val repl = (r min (length - from)) max 0
- val pits = parallelIterator.psplit(from, repl, length - from - repl)
+ val pits = splitter.psplit(from, repl, length - from - repl)
b ++= pits(0)
- b ++= patch.iterator
+ b ++= patch
b ++= pits(2)
b.result
}
- override def updated[U >: T, That](index: Int, elem: U)(implicit bf: CanBuildFrom[Repr, U, That]): That = bf ifParallel { pbf =>
- executeAndWaitResult(new Updated(index, elem, pbf, parallelIterator) mapResult { _.result })
- } otherwise super.updated(index, elem)
+ def updated[U >: T, That](index: Int, elem: U)(implicit bf: CanBuildFrom[Repr, U, That]): That = bf ifParallel { pbf =>
+ executeAndWaitResult(new Updated(index, elem, pbf, splitter) mapResult { _.result })
+ } otherwise seq.updated(index, elem)(bf2seq(bf))
- override def +:[U >: T, That](elem: U)(implicit bf: CanBuildFrom[Repr, U, That]): That = {
+ def +:[U >: T, That](elem: U)(implicit bf: CanBuildFrom[Repr, U, That]): That = {
patch(0, mutable.ParArray(elem), 0)
}
- override def :+[U >: T, That](elem: U)(implicit bf: CanBuildFrom[Repr, U, That]): That = {
+ def :+[U >: T, That](elem: U)(implicit bf: CanBuildFrom[Repr, U, That]): That = {
patch(length, mutable.ParArray(elem), 0)
}
- override def padTo[U >: T, That](len: Int, elem: U)(implicit bf: CanBuildFrom[Repr, U, That]): That = if (length < len) {
+ def padTo[U >: T, That](len: Int, elem: U)(implicit bf: CanBuildFrom[Repr, U, That]): That = if (length < len) {
patch(length, new immutable.Repetition(elem, len - length), 0)
} else patch(length, Nil, 0);
- override def zip[U >: T, S, That](that: Iterable[S])(implicit bf: CanBuildFrom[Repr, (U, S), That]): That = if (bf.isParallel && that.isParSeq) {
+ override def zip[U >: T, S, That](that: GenIterable[S])(implicit bf: CanBuildFrom[Repr, (U, S), That]): That = if (bf.isParallel && that.isParSeq) {
val pbf = bf.asParallel
val thatseq = that.asParSeq
- executeAndWaitResult(new Zip(length min thatseq.length, pbf, parallelIterator, thatseq.parallelIterator) mapResult { _.result });
+ executeAndWaitResult(new Zip(length min thatseq.length, pbf, splitter, thatseq.splitter) mapResult { _.result });
} else super.zip(that)(bf)
/** Tests whether every element of this $coll relates to the
@@ -307,10 +309,46 @@ self =>
* `p(x, y)` is `true` for all corresponding elements `x` of this $coll
* and `y` of `that`, otherwise `false`
*/
- override def corresponds[S](that: Seq[S])(p: (T, S) => Boolean): Boolean = that ifParSeq { pthat =>
+ def corresponds[S](that: GenSeq[S])(p: (T, S) => Boolean): Boolean = that ifParSeq { pthat =>
val ctx = new DefaultSignalling with VolatileAbort
- length == pthat.length && executeAndWaitResult(new Corresponds(p, parallelIterator assign ctx, pthat.parallelIterator))
- } otherwise super.corresponds(that)(p)
+ length == pthat.length && executeAndWaitResult(new Corresponds(p, splitter assign ctx, pthat.splitter))
+ } otherwise seq.corresponds(that)(p)
+
+ def diff[U >: T](that: GenSeq[U]): Repr = sequentially {
+ _ diff that
+ }
+
+ /** Computes the multiset intersection between this $coll and another sequence.
+ * $mayNotTerminateInf
+ *
+ * @param that the sequence of elements to intersect with.
+ * @tparam B the element type of the returned $coll.
+ * @tparam That $thatinfo
+ * @param bf $bfinfo
+ * @return a new collection of type `That` which contains all elements of this $coll
+ * which also appear in `that`.
+ * If an element value `x` appears
+ * ''n'' times in `that`, then the first ''n'' occurrences of `x` will be retained
+ * in the result, but any following occurrences will be omitted.
+ * @usecase def intersect(that: Seq[A]): $Coll[A]
+ * @return a new $coll which contains all elements of this $coll
+ * which also appear in `that`.
+ * If an element value `x` appears
+ * ''n'' times in `that`, then the first ''n'' occurrences of `x` will be retained
+ * in the result, but any following occurrences will be omitted.
+ */
+ def intersect[U >: T](that: GenSeq[U]) = sequentially {
+ _ intersect that
+ }
+
+ /** Builds a new $coll from this $coll without any duplicate elements.
+ * $willNotTerminateInf
+ *
+ * @return A new $coll which contains the first occurrence of every element of this $coll.
+ */
+ def distinct: Repr = sequentially {
+ _.distinct
+ }
override def toString = seq.mkString(stringPrefix + "(", ", ", ")")
@@ -318,25 +356,25 @@ self =>
override def view = new ParSeqView[T, Repr, Sequential] {
protected lazy val underlying = self.repr
+ protected[this] def viewIdentifier = ""
+ protected[this] def viewIdString = ""
def length = self.length
def apply(idx: Int) = self(idx)
override def seq = self.seq.view
- def parallelIterator = self.parallelIterator
+ def splitter = self.splitter
}
- override def view(from: Int, until: Int) = view.slice(from, until)
-
/* tasks */
- protected[this] def down(p: ParIterableIterator[_]) = p.asInstanceOf[ParSeqIterator[T]]
+ protected[this] def down(p: IterableSplitter[_]) = p.asInstanceOf[SeqSplitter[T]]
protected trait Accessor[R, Tp] extends super.Accessor[R, Tp] {
- protected[this] val pit: ParSeqIterator[T]
+ protected[this] val pit: SeqSplitter[T]
}
protected trait Transformer[R, Tp] extends Accessor[R, Tp] with super.Transformer[R, Tp]
- protected[this] class SegmentLength(pred: T => Boolean, from: Int, protected[this] val pit: ParSeqIterator[T])
+ protected[this] class SegmentLength(pred: T => Boolean, from: Int, protected[this] val pit: SeqSplitter[T])
extends Accessor[(Int, Boolean), SegmentLength] {
@volatile var result: (Int, Boolean) = null
def leaf(prev: Option[(Int, Boolean)]) = if (from < pit.indexFlag) {
@@ -354,7 +392,7 @@ self =>
override def requiresStrictSplitters = true
}
- protected[this] class IndexWhere(pred: T => Boolean, from: Int, protected[this] val pit: ParSeqIterator[T])
+ protected[this] class IndexWhere(pred: T => Boolean, from: Int, protected[this] val pit: SeqSplitter[T])
extends Accessor[Int, IndexWhere] {
@volatile var result: Int = -1
def leaf(prev: Option[Int]) = if (from < pit.indexFlag) {
@@ -375,7 +413,7 @@ self =>
override def requiresStrictSplitters = true
}
- protected[this] class LastIndexWhere(pred: T => Boolean, pos: Int, protected[this] val pit: ParSeqIterator[T])
+ protected[this] class LastIndexWhere(pred: T => Boolean, pos: Int, protected[this] val pit: SeqSplitter[T])
extends Accessor[Int, LastIndexWhere] {
@volatile var result: Int = -1
def leaf(prev: Option[Int]) = if (pos > pit.indexFlag) {
@@ -396,7 +434,7 @@ self =>
override def requiresStrictSplitters = true
}
- protected[this] class Reverse[U >: T, This >: Repr](cbf: () => Combiner[U, This], protected[this] val pit: ParSeqIterator[T])
+ protected[this] class Reverse[U >: T, This >: Repr](cbf: () => Combiner[U, This], protected[this] val pit: SeqSplitter[T])
extends Transformer[Combiner[U, This], Reverse[U, This]] {
@volatile var result: Combiner[U, This] = null
def leaf(prev: Option[Combiner[U, This]]) = result = pit.reverse2combiner(reuse(prev, cbf()))
@@ -404,7 +442,7 @@ self =>
override def merge(that: Reverse[U, This]) = result = that.result combine result
}
- protected[this] class ReverseMap[S, That](f: T => S, pbf: CanCombineFrom[Repr, S, That], protected[this] val pit: ParSeqIterator[T])
+ protected[this] class ReverseMap[S, That](f: T => S, pbf: CanCombineFrom[Repr, S, That], protected[this] val pit: SeqSplitter[T])
extends Transformer[Combiner[S, That], ReverseMap[S, That]] {
@volatile var result: Combiner[S, That] = null
def leaf(prev: Option[Combiner[S, That]]) = result = pit.reverseMap2combiner(f, pbf(self.repr))
@@ -412,7 +450,7 @@ self =>
override def merge(that: ReverseMap[S, That]) = result = that.result combine result
}
- protected[this] class SameElements[U >: T](protected[this] val pit: ParSeqIterator[T], val otherpit: PreciseSplitter[U])
+ protected[this] class SameElements[U >: T](protected[this] val pit: SeqSplitter[T], val otherpit: PreciseSplitter[U])
extends Accessor[Boolean, SameElements[U]] {
@volatile var result: Boolean = true
def leaf(prev: Option[Boolean]) = if (!pit.isAborted) {
@@ -429,7 +467,7 @@ self =>
override def requiresStrictSplitters = true
}
- protected[this] class Updated[U >: T, That](pos: Int, elem: U, pbf: CanCombineFrom[Repr, U, That], protected[this] val pit: ParSeqIterator[T])
+ protected[this] class Updated[U >: T, That](pos: Int, elem: U, pbf: CanCombineFrom[Repr, U, That], protected[this] val pit: SeqSplitter[T])
extends Transformer[Combiner[U, That], Updated[U, That]] {
@volatile var result: Combiner[U, That] = null
def leaf(prev: Option[Combiner[U, That]]) = result = pit.updated2combiner(pos, elem, pbf(self.repr))
@@ -442,7 +480,7 @@ self =>
override def requiresStrictSplitters = true
}
- protected[this] class Zip[U >: T, S, That](len: Int, pbf: CanCombineFrom[Repr, (U, S), That], protected[this] val pit: ParSeqIterator[T], val otherpit: ParSeqIterator[S])
+ protected[this] class Zip[U >: T, S, That](len: Int, pbf: CanCombineFrom[Repr, (U, S), That], protected[this] val pit: SeqSplitter[T], val otherpit: SeqSplitter[S])
extends Transformer[Combiner[(U, S), That], Zip[U, S, That]] {
@volatile var result: Result = null
def leaf(prev: Option[Result]) = result = pit.zip2combiner[U, S, That](otherpit, pbf(self.repr))
@@ -460,7 +498,7 @@ self =>
override def merge(that: Zip[U, S, That]) = result = result combine that.result
}
- protected[this] class Corresponds[S](corr: (T, S) => Boolean, protected[this] val pit: ParSeqIterator[T], val otherpit: PreciseSplitter[S])
+ protected[this] class Corresponds[S](corr: (T, S) => Boolean, protected[this] val pit: SeqSplitter[T], val otherpit: PreciseSplitter[S])
extends Accessor[Boolean, Corresponds[S]] {
@volatile var result: Boolean = true
def leaf(prev: Option[Boolean]) = if (!pit.isAborted) {
diff --git a/src/library/scala/collection/parallel/ParSeqView.scala b/src/library/scala/collection/parallel/ParSeqView.scala
index 6ac24668ca..8461390839 100644
--- a/src/library/scala/collection/parallel/ParSeqView.scala
+++ b/src/library/scala/collection/parallel/ParSeqView.scala
@@ -36,7 +36,7 @@ extends ParSeqViewLike[T, Coll, CollSeq, ParSeqView[T, Coll, CollSeq], SeqView[T
object ParSeqView {
abstract class NoCombiner[T] extends Combiner[T, Nothing] {
- self: EnvironmentPassingCombiner[T, Nothing] =>
+// self: EnvironmentPassingCombiner[T, Nothing] =>
def +=(elem: T): this.type = this
def iterator: Iterator[T] = Iterator.empty
def result() = throw new UnsupportedOperationException("ParSeqView.Combiner.result")
@@ -50,8 +50,8 @@ object ParSeqView {
implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParSeqView[T, ParSeq[T], Seq[T]]] =
new CanCombineFrom[Coll, T, ParSeqView[T, ParSeq[T], Seq[T]]] {
- def apply(from: Coll) = new NoCombiner[T] with EnvironmentPassingCombiner[T, Nothing]
- def apply() = new NoCombiner[T] with EnvironmentPassingCombiner[T, Nothing]
+ def apply(from: Coll) = new NoCombiner[T] {} // was: with EnvironmentPassingCombiner[T, Nothing]
+ def apply() = new NoCombiner[T] {} // was: with EnvironmentPassingCombiner[T, Nothing]
}
}
diff --git a/src/library/scala/collection/parallel/ParSeqViewLike.scala b/src/library/scala/collection/parallel/ParSeqViewLike.scala
index 3b57e2009d..b7ad227f11 100644
--- a/src/library/scala/collection/parallel/ParSeqViewLike.scala
+++ b/src/library/scala/collection/parallel/ParSeqViewLike.scala
@@ -10,6 +10,11 @@ package scala.collection.parallel
import scala.collection.Parallel
import scala.collection.{ SeqView, SeqViewLike }
+import scala.collection.{ GenSeqView, GenSeqViewLike }
+import scala.collection.GenSeq
+import scala.collection.GenIterable
+import scala.collection.GenTraversable
+import scala.collection.GenTraversableOnce
import scala.collection.generic.{ CanBuildFrom, SliceInterval }
import scala.collection.generic.CanCombineFrom
import scala.collection.parallel.immutable.ParRange
@@ -29,8 +34,8 @@ trait ParSeqViewLike[+T,
+CollSeq,
+This <: ParSeqView[T, Coll, CollSeq] with ParSeqViewLike[T, Coll, CollSeq, This, ThisSeq],
+ThisSeq <: SeqView[T, CollSeq] with SeqViewLike[T, CollSeq, ThisSeq]]
-extends SeqView[T, Coll]
- with SeqViewLike[T, Coll, This]
+extends GenSeqView[T, Coll]
+ with GenSeqViewLike[T, Coll, This]
with ParIterableView[T, Coll, CollSeq]
with ParIterableViewLike[T, Coll, CollSeq, This, ThisSeq]
with ParSeq[T]
@@ -40,52 +45,54 @@ self =>
import tasksupport._
trait Transformed[+S] extends ParSeqView[S, Coll, CollSeq]
- with super[ParIterableView].Transformed[S] with super[SeqView].Transformed[S] {
- override def parallelIterator: ParSeqIterator[S]
- override def iterator = parallelIterator
+ with super[ParIterableView].Transformed[S] with super[GenSeqViewLike].Transformed[S] {
+ override def splitter: SeqSplitter[S]
+ override def iterator = splitter
+ override def size = length
}
- trait Sliced extends super[SeqViewLike].Sliced with super[ParIterableViewLike].Sliced with Transformed[T] {
+ trait Sliced extends super[GenSeqViewLike].Sliced with super[ParIterableViewLike].Sliced with Transformed[T] {
// override def slice(from1: Int, until1: Int): This = newSliced(from1 max 0, until1 max 0).asInstanceOf[This]
- override def parallelIterator = self.parallelIterator.psplit(from, until - from)(1)
+ override def splitter = self.splitter.psplit(from, until - from)(1)
+ override def seq = self.seq.slice(from, until)
}
- trait Mapped[S] extends super[SeqViewLike].Mapped[S] with super[ParIterableViewLike].Mapped[S] with Transformed[S] {
- override def parallelIterator = self.parallelIterator.map(mapping)
+ trait Mapped[S] extends super[GenSeqViewLike].Mapped[S] with super[ParIterableViewLike].Mapped[S] with Transformed[S] {
+ override def splitter = self.splitter.map(mapping)
override def seq = self.seq.map(mapping).asInstanceOf[SeqView[S, CollSeq]]
}
- trait Appended[U >: T] extends super[SeqViewLike].Appended[U] with super[ParIterableViewLike].Appended[U] with Transformed[U] {
+ trait Appended[U >: T] extends super[GenSeqViewLike].Appended[U] with super[ParIterableViewLike].Appended[U] with Transformed[U] {
override def restPar: ParSeq[U] = rest.asParSeq
- override def parallelIterator = self.parallelIterator.appendParSeq[U, ParSeqIterator[U]](restPar.parallelIterator)
+ override def splitter = self.splitter.appendParSeq[U, SeqSplitter[U]](restPar.splitter)
override def seq = self.seq.++(rest).asInstanceOf[SeqView[U, CollSeq]]
}
- trait Forced[S] extends super[SeqViewLike].Forced[S] with super[ParIterableViewLike].Forced[S] with Transformed[S] {
+ trait Forced[S] extends super[GenSeqViewLike].Forced[S] with super[ParIterableViewLike].Forced[S] with Transformed[S] {
override def forcedPar: ParSeq[S] = forced.asParSeq
- override def parallelIterator: ParSeqIterator[S] = forcedPar.parallelIterator
+ override def splitter: SeqSplitter[S] = forcedPar.splitter
override def seq = forcedPar.seq.view.asInstanceOf[SeqView[S, CollSeq]]
}
- trait Zipped[S] extends super[SeqViewLike].Zipped[S] with super[ParIterableViewLike].Zipped[S] with Transformed[(T, S)] {
- override def parallelIterator = self.parallelIterator zipParSeq otherPar.parallelIterator
+ trait Zipped[S] extends super[GenSeqViewLike].Zipped[S] with super[ParIterableViewLike].Zipped[S] with Transformed[(T, S)] {
+ override def splitter = self.splitter zipParSeq otherPar.splitter
override def seq = (self.seq zip other).asInstanceOf[SeqView[(T, S), CollSeq]]
}
- trait ZippedAll[U >: T, S] extends super[SeqViewLike].ZippedAll[U, S] with super[ParIterableViewLike].ZippedAll[U, S] with Transformed[(U, S)] {
- override def parallelIterator: ParSeqIterator[(U, S)] = self.parallelIterator.zipAllParSeq(otherPar.parallelIterator, thisElem, thatElem)
+ trait ZippedAll[U >: T, S] extends super[GenSeqViewLike].ZippedAll[U, S] with super[ParIterableViewLike].ZippedAll[U, S] with Transformed[(U, S)] {
+ override def splitter: SeqSplitter[(U, S)] = self.splitter.zipAllParSeq(otherPar.splitter, thisElem, thatElem)
override def seq = (self.seq.zipAll(other, thisElem, thatElem)).asInstanceOf[SeqView[(U, S), CollSeq]]
}
trait Reversed extends super.Reversed with Transformed[T] {
- override def parallelIterator: ParSeqIterator[T] = self.parallelIterator.reverse
+ override def splitter: SeqSplitter[T] = self.splitter.reverse
override def seq = self.seq.reverse.asInstanceOf[SeqView[T, CollSeq]]
}
// use only with ParSeq patches, otherwise force
trait Patched[U >: T] extends super.Patched[U] with Transformed[U] {
def patchPar: ParSeq[U] = patch.asInstanceOf[ParSeq[U]]
- override def parallelIterator: ParSeqIterator[U] = self.parallelIterator.patchParSeq[U](from, patchPar.parallelIterator, replaced)
+ override def splitter: SeqSplitter[U] = self.splitter.patchParSeq[U](from, patchPar.splitter, replaced)
override def seq = self.seq.patch(from, patch, replaced).asInstanceOf[SeqView[U, CollSeq]]
}
@@ -97,29 +104,29 @@ self =>
/* wrapper virtual ctors */
protected override def newSliced(_endpoints: SliceInterval): Transformed[T] = new { val endpoints = _endpoints } with Sliced
- protected override def newAppended[U >: T](that: Traversable[U]): Transformed[U] = {
+ protected override def newAppended[U >: T](that: GenTraversable[U]): Transformed[U] = {
// we only append if `that` is a parallel sequence, i.e. it has a precise splitter
if (that.isParSeq) new Appended[U] { val rest = that }
else newForced(mutable.ParArray.fromTraversables(this, that))
}
- protected override def newForced[S](xs: => Seq[S]): Transformed[S] = {
+ protected override def newForced[S](xs: => GenSeq[S]): Transformed[S] = {
if (xs.isParSeq) new Forced[S] { val forced = xs }
else new Forced[S] { val forced = mutable.ParArray.fromTraversables(xs) }
}
protected override def newMapped[S](f: T => S): Transformed[S] = new Mapped[S] { val mapping = f }
- protected override def newZipped[S](that: Iterable[S]): Transformed[(T, S)] = new Zipped[S] { val other = that }
- protected override def newZippedAll[U >: T, S](that: Iterable[S], _thisElem: U, _thatElem: S): Transformed[(U, S)] = new ZippedAll[U, S] {
+ protected override def newZipped[S](that: GenIterable[S]): Transformed[(T, S)] = new Zipped[S] { val other = that }
+ protected override def newZippedAll[U >: T, S](that: GenIterable[S], _thisElem: U, _thatElem: S): Transformed[(U, S)] = new ZippedAll[U, S] {
val other = that
val thisElem = _thisElem
val thatElem = _thatElem
}
- protected override def newReversed: Transformed[T] = new Reversed { }
- protected override def newPatched[U >: T](_from: Int, _patch: Seq[U], _replaced: Int): Transformed[U] = new {
+ protected def newReversed: Transformed[T] = new Reversed { }
+ protected def newPatched[U >: T](_from: Int, _patch: GenSeq[U], _replaced: Int): Transformed[U] = new {
val from = _from;
val patch = _patch;
val replaced = _replaced
} with Patched[U]
- protected override def newPrepended[U >: T](elem: U): Transformed[U] = unsupported
+ protected def newPrepended[U >: T](elem: U): Transformed[U] = unsupported
/* operation overrides */
@@ -130,15 +137,15 @@ self =>
override def splitAt(n: Int): (This, This) = (take(n), drop(n))
/* appended */
- override def ++[U >: T, That](xs: TraversableOnce[U])(implicit bf: CanBuildFrom[This, U, That]): That = newAppended(xs.toTraversable).asInstanceOf[That]
+ override def ++[U >: T, That](xs: GenTraversableOnce[U])(implicit bf: CanBuildFrom[This, U, That]): That = newAppended(xs.toTraversable).asInstanceOf[That]
override def :+[U >: T, That](elem: U)(implicit bf: CanBuildFrom[This, U, That]): That = ++(Iterator.single(elem))(bf)
- override def union[U >: T, That](that: Seq[U])(implicit bf: CanBuildFrom[This, U, That]): That = this ++ that
+ //override def union[U >: T, That](that: GenSeq[U])(implicit bf: CanBuildFrom[This, U, That]): That = this ++ that
/* misc */
override def map[S, That](f: T => S)(implicit bf: CanBuildFrom[This, S, That]): That = newMapped(f).asInstanceOf[That]
- override def zip[U >: T, S, That](that: Iterable[S])(implicit bf: CanBuildFrom[This, (U, S), That]): That = newZippedTryParSeq(that).asInstanceOf[That]
+ override def zip[U >: T, S, That](that: GenIterable[S])(implicit bf: CanBuildFrom[This, (U, S), That]): That = newZippedTryParSeq(that).asInstanceOf[That]
override def zipWithIndex[U >: T, That](implicit bf: CanBuildFrom[This, (U, Int), That]): That =
- newZipped(ParRange(0, parallelIterator.remaining, 1, false)).asInstanceOf[That]
+ newZipped(ParRange(0, splitter.remaining, 1, false)).asInstanceOf[That]
override def reverse: This = newReversed.asInstanceOf[This]
override def reverseMap[S, That](f: T => S)(implicit bf: CanBuildFrom[This, S, That]): That = reverse.map(f)
@@ -149,18 +156,18 @@ self =>
}
override def padTo[U >: T, That](len: Int, elem: U)(implicit bf: CanBuildFrom[This, U, That]): That = patch(length, Seq.fill(len - length)(elem), 0)
override def +:[U >: T, That](elem: U)(implicit bf: CanBuildFrom[This, U, That]): That = patch(0, mutable.ParArray.fromTraversables(Iterator.single(elem)), 0)
- override def patch[U >: T, That](from: Int, patch: Seq[U], replace: Int)(implicit bf: CanBuildFrom[This, U, That]): That = newPatched(from, patch, replace).asInstanceOf[That]
+ override def patch[U >: T, That](from: Int, patch: GenSeq[U], replace: Int)(implicit bf: CanBuildFrom[This, U, That]): That = newPatched(from, patch, replace).asInstanceOf[That]
/* forced */
- override def diff[U >: T](that: Seq[U]): This = newForced(thisParSeq diff that).asInstanceOf[This]
- override def intersect[U >: T](that: Seq[U]): This = newForced(thisParSeq intersect that).asInstanceOf[This]
- override def sorted[U >: T](implicit ord: Ordering[U]): This = newForced(thisParSeq sorted ord).asInstanceOf[This]
+ // override def diff[U >: T](that: GenSeq[U]): This = newForced(thisParSeq diff that).asInstanceOf[This]
+ // override def intersect[U >: T](that: GenSeq[U]): This = newForced(thisParSeq intersect that).asInstanceOf[This]
+ // override def sorted[U >: T](implicit ord: Ordering[U]): This = newForced(thisParSeq sorted ord).asInstanceOf[This]
override def collect[S, That](pf: PartialFunction[T, S])(implicit bf: CanBuildFrom[This, S, That]): That = filter(pf.isDefinedAt).map(pf)(bf)
override def scanLeft[S, That](z: S)(op: (S, T) => S)(implicit bf: CanBuildFrom[This, S, That]): That = newForced(thisParSeq.scanLeft(z)(op)).asInstanceOf[That]
override def scanRight[S, That](z: S)(op: (T, S) => S)(implicit bf: CanBuildFrom[This, S, That]): That = newForced(thisParSeq.scanRight(z)(op)).asInstanceOf[That]
override def groupBy[K](f: T => K): immutable.ParMap[K, This] = thisParSeq.groupBy(f).map(kv => (kv._1, newForced(kv._2).asInstanceOf[This]))
override def force[U >: T, That](implicit bf: CanBuildFrom[Coll, U, That]) = bf ifParallel { pbf =>
- executeAndWaitResult(new Force(pbf, parallelIterator).mapResult(_.result).asInstanceOf[Task[That, _]])
+ executeAndWaitResult(new Force(pbf, splitter).mapResult(_.result).asInstanceOf[Task[That, _]])
} otherwise {
val b = bf(underlying)
b ++= this.iterator
@@ -169,7 +176,7 @@ self =>
/* tasks */
- protected[this] class Force[U >: T, That](cbf: CanCombineFrom[Coll, U, That], protected[this] val pit: ParSeqIterator[T])
+ protected[this] class Force[U >: T, That](cbf: CanCombineFrom[Coll, U, That], protected[this] val pit: SeqSplitter[T])
extends Transformer[Combiner[U, That], Force[U, That]] {
var result: Combiner[U, That] = null
def leaf(prev: Option[Combiner[U, That]]) = result = pit.copy2builder[U, That, Combiner[U, That]](reuse(prev, cbf(self.underlying)))
diff --git a/src/library/scala/collection/parallel/ParSet.scala b/src/library/scala/collection/parallel/ParSet.scala
index f3dab8faa9..151433405e 100644
--- a/src/library/scala/collection/parallel/ParSet.scala
+++ b/src/library/scala/collection/parallel/ParSet.scala
@@ -16,6 +16,7 @@ package scala.collection.parallel
import scala.collection.Set
+import scala.collection.GenSet
import scala.collection.mutable.Builder
import scala.collection.generic._
@@ -34,7 +35,7 @@ import scala.collection.generic._
* @since 2.9
*/
trait ParSet[T]
-extends Set[T]
+extends GenSet[T]
with GenericParTemplate[T, ParSet]
with ParIterable[T]
with ParSetLike[T, ParSet[T], Set[T]]
@@ -42,6 +43,8 @@ extends Set[T]
self =>
override def empty: ParSet[T] = mutable.ParHashSet[T]()
+ //protected[this] override def newCombiner: Combiner[T, ParSet[T]] = ParSet.newCombiner[T]
+
override def companion: GenericCompanion[ParSet] with GenericParCompanion[ParSet] = ParSet
override def stringPrefix = "ParSet"
@@ -83,19 +86,3 @@ object ParSet extends ParSetFactory[ParSet] {
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/ParSetLike.scala b/src/library/scala/collection/parallel/ParSetLike.scala
index 9e769f425b..3728158c27 100644
--- a/src/library/scala/collection/parallel/ParSetLike.scala
+++ b/src/library/scala/collection/parallel/ParSetLike.scala
@@ -12,6 +12,8 @@ package scala.collection.parallel
import scala.collection.SetLike
+import scala.collection.GenSetLike
+import scala.collection.GenSet
import scala.collection.Set
import scala.collection.mutable.Builder
@@ -35,49 +37,23 @@ import scala.collection.mutable.Builder
trait ParSetLike[T,
+Repr <: ParSetLike[T, Repr, Sequential] with ParSet[T],
+Sequential <: Set[T] with SetLike[T, Sequential]]
-extends SetLike[T, Repr]
+extends GenSetLike[T, Repr]
with ParIterableLike[T, Repr, Sequential]
{ self =>
- protected[this] override def newBuilder: Builder[T, Repr] = newCombiner
-
- protected[this] override def newCombiner: Combiner[T, Repr]
-
- override def empty: Repr
+ def empty: Repr
// note: should not override toSet (could be mutable)
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+ def union(that: GenSet[T]): Repr = sequentially {
+ _ union that
+ }
+ def diff(that: GenSet[T]): Repr = sequentially {
+ _ diff that
+ }
+}
diff --git a/src/library/scala/collection/parallel/PreciseSplitter.scala b/src/library/scala/collection/parallel/PreciseSplitter.scala
new file mode 100644
index 0000000000..6a652bbeca
--- /dev/null
+++ b/src/library/scala/collection/parallel/PreciseSplitter.scala
@@ -0,0 +1,64 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection.parallel
+
+
+import scala.collection.Seq
+
+
+/** A precise splitter (or a precise split iterator) can be split into arbitrary number of splitters
+ * that traverse disjoint subsets of arbitrary sizes.
+ *
+ * Implementors might want to override the parameterless `split` method for efficiency.
+ *
+ * @tparam T type of the elements this splitter traverses
+ *
+ * @since 2.9
+ * @author Aleksandar Prokopec
+ */
+trait PreciseSplitter[+T] extends Splitter[T] {
+
+ /** Splits the splitter into disjunct views.
+ *
+ * This overloaded version of the `split` method is specific to precise splitters.
+ * It returns a sequence of splitters, each iterating some subset of the
+ * elements in this splitter. The sizes of the subsplitters in the partition is equal to
+ * the size in the corresponding argument, as long as there are enough elements in this
+ * splitter to split it that way.
+ *
+ * If there aren't enough elements, a zero element splitter is appended for each additional argument.
+ * If there are additional elements, an additional splitter is appended at the end to compensate.
+ *
+ * For example, say we have a splitter `ps` with 100 elements. Invoking:
+ * {{{
+ * ps.split(50, 25, 25, 10, 5)
+ * }}}
+ * will return a sequence of five splitters, last two views being empty. On the other hand, calling:
+ * {{{
+ * ps.split(50, 40)
+ * }}}
+ * will return a sequence of three splitters, last of them containing ten elements.
+ *
+ * '''Note:''' this method actually invalidates the current splitter.
+ *
+ * Unlike the case with `split` found in splitters, views returned by this method can be empty.
+ *
+ * @param sizes the sizes used to split this split iterator into iterators that traverse disjunct subsets
+ * @return a sequence of disjunct subsequence iterators of this parallel iterator
+ */
+ def psplit(sizes: Int*): Seq[PreciseSplitter[T]]
+
+ def split: Seq[PreciseSplitter[T]]
+
+}
+
+
+
+
+
diff --git a/src/library/scala/collection/parallel/RemainsIterator.scala b/src/library/scala/collection/parallel/RemainsIterator.scala
index 508bc46a72..e04e0e9c72 100644
--- a/src/library/scala/collection/parallel/RemainsIterator.scala
+++ b/src/library/scala/collection/parallel/RemainsIterator.scala
@@ -17,6 +17,7 @@ import scala.collection.generic.DelegatedSignalling
import scala.collection.generic.CanCombineFrom
import scala.collection.mutable.Builder
import scala.collection.Iterator.empty
+import scala.collection.GenTraversableOnce
import scala.collection.parallel.immutable.repetition
@@ -45,13 +46,13 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[
i
}
- def reduce[U >: T](op: (U, U) => U): U = {
+ override def reduce[U >: T](op: (U, U) => U): U = {
var r: U = next
while (hasNext) r = op(r, next)
r
}
- def fold[U >: T](z: U)(op: (U, U) => U): U = {
+ override def fold[U >: T](z: U)(op: (U, U) => U): U = {
var r = z
while (hasNext) r = op(r, next)
r
@@ -124,10 +125,10 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[
cb
}
- def flatmap2combiner[S, That](f: T => TraversableOnce[S], cb: Combiner[S, That]): Combiner[S, That] = {
+ def flatmap2combiner[S, That](f: T => GenTraversableOnce[S], cb: Combiner[S, That]): Combiner[S, That] = {
//val cb = pbf(repr)
while (hasNext) {
- val traversable = f(next)
+ val traversable = f(next).seq
if (traversable.isInstanceOf[Iterable[_]]) cb ++= traversable.asInstanceOf[Iterable[S]].iterator
else cb ++= traversable
}
@@ -279,7 +280,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[
}
-trait AugmentedSeqIterator[+T] extends AugmentedIterableIterator[T] {
+private[collection] trait AugmentedSeqIterator[+T] extends AugmentedIterableIterator[T] {
/** The exact number of elements this iterator has yet to iterate.
* This method doesn't change the state of the iterator.
@@ -372,7 +373,7 @@ trait AugmentedSeqIterator[+T] extends AugmentedIterableIterator[T] {
*
* @param T type of the elements iterated.
*/
-trait ParIterableIterator[+T]
+trait IterableSplitter[+T]
extends AugmentedIterableIterator[T]
with Splitter[T]
with Signalling
@@ -381,9 +382,9 @@ extends AugmentedIterableIterator[T]
self =>
/** Creates a copy of this iterator. */
- def dup: ParIterableIterator[T]
+ def dup: IterableSplitter[T]
- def split: Seq[ParIterableIterator[T]]
+ def split: Seq[IterableSplitter[T]]
/** The number of elements this iterator has yet to traverse. This method
* doesn't change the state of the iterator.
@@ -419,14 +420,14 @@ self =>
/* iterator transformers */
- class Taken(taken: Int) extends ParIterableIterator[T] {
+ class Taken(taken: Int) extends IterableSplitter[T] {
var signalDelegate = self.signalDelegate
var remaining = taken min self.remaining
def hasNext = remaining > 0
def next = { remaining -= 1; self.next }
- def dup: ParIterableIterator[T] = self.dup.take(taken)
- def split: Seq[ParIterableIterator[T]] = takeSeq(self.split) { (p, n) => p.take(n) }
- protected[this] def takeSeq[PI <: ParIterableIterator[T]](sq: Seq[PI])(taker: (PI, Int) => PI) = {
+ def dup: IterableSplitter[T] = self.dup.take(taken)
+ def split: Seq[IterableSplitter[T]] = takeSeq(self.split) { (p, n) => p.take(n) }
+ protected[this] def takeSeq[PI <: IterableSplitter[T]](sq: Seq[PI])(taker: (PI, Int) => PI) = {
val sizes = sq.scanLeft(0)(_ + _.remaining)
val shortened = for ((it, (from, until)) <- sq zip (sizes.init zip sizes.tail)) yield
if (until < remaining) it else taker(it, remaining - from)
@@ -445,23 +446,23 @@ self =>
}
it
}
- override def take(n: Int): ParIterableIterator[T] = newTaken(n)
- override def slice(from1: Int, until1: Int): ParIterableIterator[T] = newSliceInternal(newTaken(until1), from1)
+ override def take(n: Int): IterableSplitter[T] = newTaken(n)
+ override def slice(from1: Int, until1: Int): IterableSplitter[T] = newSliceInternal(newTaken(until1), from1)
- class Mapped[S](f: T => S) extends ParIterableIterator[S] {
+ class Mapped[S](f: T => S) extends IterableSplitter[S] {
var signalDelegate = self.signalDelegate
def hasNext = self.hasNext
def next = f(self.next)
def remaining = self.remaining
- def dup: ParIterableIterator[S] = self.dup map f
- def split: Seq[ParIterableIterator[S]] = self.split.map { _ map f }
+ def dup: IterableSplitter[S] = self.dup map f
+ def split: Seq[IterableSplitter[S]] = self.split.map { _ map f }
}
override def map[S](f: T => S) = new Mapped(f)
- class Appended[U >: T, PI <: ParIterableIterator[U]](protected val that: PI) extends ParIterableIterator[U] {
+ class Appended[U >: T, PI <: IterableSplitter[U]](protected val that: PI) extends IterableSplitter[U] {
var signalDelegate = self.signalDelegate
- protected var curr: ParIterableIterator[U] = self
+ protected var curr: IterableSplitter[U] = self
def hasNext = if (curr.hasNext) true else if (curr eq self) {
curr = that
curr.hasNext
@@ -472,19 +473,19 @@ self =>
} else curr.next
def remaining = if (curr eq self) curr.remaining + that.remaining else curr.remaining
protected def firstNonEmpty = (curr eq self) && curr.hasNext
- def dup: ParIterableIterator[U] = self.dup.appendParIterable[U, PI](that)
- def split: Seq[ParIterableIterator[U]] = if (firstNonEmpty) Seq(curr, that) else curr.split
+ def dup: IterableSplitter[U] = self.dup.appendParIterable[U, PI](that)
+ def split: Seq[IterableSplitter[U]] = if (firstNonEmpty) Seq(curr, that) else curr.split
}
- def appendParIterable[U >: T, PI <: ParIterableIterator[U]](that: PI) = new Appended[U, PI](that)
+ def appendParIterable[U >: T, PI <: IterableSplitter[U]](that: PI) = new Appended[U, PI](that)
- class Zipped[S](protected val that: ParSeqIterator[S]) extends ParIterableIterator[(T, S)] {
+ class Zipped[S](protected val that: SeqSplitter[S]) extends IterableSplitter[(T, S)] {
var signalDelegate = self.signalDelegate
def hasNext = self.hasNext && that.hasNext
def next = (self.next, that.next)
def remaining = self.remaining min that.remaining
- def dup: ParIterableIterator[(T, S)] = self.dup.zipParSeq(that)
- def split: Seq[ParIterableIterator[(T, S)]] = {
+ def dup: IterableSplitter[(T, S)] = self.dup.zipParSeq(that)
+ def split: Seq[IterableSplitter[(T, S)]] = {
val selfs = self.split
val sizes = selfs.map(_.remaining)
val thats = that.psplit(sizes: _*)
@@ -492,10 +493,10 @@ self =>
}
}
- def zipParSeq[S](that: ParSeqIterator[S]) = new Zipped(that)
+ def zipParSeq[S](that: SeqSplitter[S]) = new Zipped(that)
- class ZippedAll[U >: T, S](protected val that: ParSeqIterator[S], protected val thiselem: U, protected val thatelem: S)
- extends ParIterableIterator[(U, S)] {
+ class ZippedAll[U >: T, S](protected val that: SeqSplitter[S], protected val thiselem: U, protected val thatelem: S)
+ extends IterableSplitter[(U, S)] {
var signalDelegate = self.signalDelegate
def hasNext = self.hasNext || that.hasNext
def next = if (self.hasNext) {
@@ -503,18 +504,18 @@ self =>
else (self.next, thatelem)
} else (thiselem, that.next);
def remaining = self.remaining max that.remaining
- def dup: ParIterableIterator[(U, S)] = self.dup.zipAllParSeq(that, thiselem, thatelem)
- def split: Seq[ParIterableIterator[(U, S)]] = {
+ def dup: IterableSplitter[(U, S)] = self.dup.zipAllParSeq(that, thiselem, thatelem)
+ def split: Seq[IterableSplitter[(U, S)]] = {
val selfrem = self.remaining
val thatrem = that.remaining
- val thisit = if (selfrem < thatrem) self.appendParIterable[U, ParSeqIterator[U]](repetition[U](thiselem, thatrem - selfrem).parallelIterator) else self
- val thatit = if (selfrem > thatrem) that.appendParSeq(repetition(thatelem, selfrem - thatrem).parallelIterator) else that
+ val thisit = if (selfrem < thatrem) self.appendParIterable[U, SeqSplitter[U]](repetition[U](thiselem, thatrem - selfrem).splitter) else self
+ val thatit = if (selfrem > thatrem) that.appendParSeq(repetition(thatelem, selfrem - thatrem).splitter) else that
val zipped = thisit zipParSeq thatit
zipped.split
}
}
- def zipAllParSeq[S, U >: T, R >: S](that: ParSeqIterator[S], thisElem: U, thatElem: R) = new ZippedAll[U, R](that, thisElem, thatElem)
+ def zipAllParSeq[S, U >: T, R >: S](that: SeqSplitter[S], thisElem: U, thatElem: R) = new ZippedAll[U, R](that, thisElem, thatElem)
}
@@ -523,15 +524,15 @@ self =>
*
* @param T type of the elements iterated.
*/
-trait ParSeqIterator[+T]
-extends ParIterableIterator[T]
+trait SeqSplitter[+T]
+extends IterableSplitter[T]
with AugmentedSeqIterator[T]
with PreciseSplitter[T]
{
self =>
- def dup: ParSeqIterator[T]
- def split: Seq[ParSeqIterator[T]]
- def psplit(sizes: Int*): Seq[ParSeqIterator[T]]
+ def dup: SeqSplitter[T]
+ def split: Seq[SeqSplitter[T]]
+ def psplit(sizes: Int*): Seq[SeqSplitter[T]]
/** The number of elements this iterator has yet to traverse. This method
* doesn't change the state of the iterator. Unlike the version of this method in the supertrait,
@@ -544,27 +545,27 @@ self =>
/* iterator transformers */
- class Taken(tk: Int) extends super.Taken(tk) with ParSeqIterator[T] {
- override def dup = super.dup.asInstanceOf[ParSeqIterator[T]]
- override def split: Seq[ParSeqIterator[T]] = super.split.asInstanceOf[Seq[ParSeqIterator[T]]]
- def psplit(sizes: Int*): Seq[ParSeqIterator[T]] = takeSeq(self.psplit(sizes: _*)) { (p, n) => p.take(n) }
+ class Taken(tk: Int) extends super.Taken(tk) with SeqSplitter[T] {
+ override def dup = super.dup.asInstanceOf[SeqSplitter[T]]
+ override def split: Seq[SeqSplitter[T]] = super.split.asInstanceOf[Seq[SeqSplitter[T]]]
+ def psplit(sizes: Int*): Seq[SeqSplitter[T]] = takeSeq(self.psplit(sizes: _*)) { (p, n) => p.take(n) }
}
override private[collection] def newTaken(until: Int): Taken = new Taken(until)
- override def take(n: Int): ParSeqIterator[T] = newTaken(n)
- override def slice(from1: Int, until1: Int): ParSeqIterator[T] = newSliceInternal(newTaken(until1), from1)
+ override def take(n: Int): SeqSplitter[T] = newTaken(n)
+ override def slice(from1: Int, until1: Int): SeqSplitter[T] = newSliceInternal(newTaken(until1), from1)
- class Mapped[S](f: T => S) extends super.Mapped[S](f) with ParSeqIterator[S] {
- override def dup = super.dup.asInstanceOf[ParSeqIterator[S]]
- override def split: Seq[ParSeqIterator[S]] = super.split.asInstanceOf[Seq[ParSeqIterator[S]]]
- def psplit(sizes: Int*): Seq[ParSeqIterator[S]] = self.psplit(sizes: _*).map { _ map f }
+ class Mapped[S](f: T => S) extends super.Mapped[S](f) with SeqSplitter[S] {
+ override def dup = super.dup.asInstanceOf[SeqSplitter[S]]
+ override def split: Seq[SeqSplitter[S]] = super.split.asInstanceOf[Seq[SeqSplitter[S]]]
+ def psplit(sizes: Int*): Seq[SeqSplitter[S]] = self.psplit(sizes: _*).map { _ map f }
}
override def map[S](f: T => S) = new Mapped(f)
- class Appended[U >: T, PI <: ParSeqIterator[U]](it: PI) extends super.Appended[U, PI](it) with ParSeqIterator[U] {
- override def dup = super.dup.asInstanceOf[ParSeqIterator[U]]
- override def split: Seq[ParSeqIterator[U]] = super.split.asInstanceOf[Seq[ParSeqIterator[U]]]
- def psplit(sizes: Int*): Seq[ParSeqIterator[U]] = if (firstNonEmpty) {
+ class Appended[U >: T, PI <: SeqSplitter[U]](it: PI) extends super.Appended[U, PI](it) with SeqSplitter[U] {
+ override def dup = super.dup.asInstanceOf[SeqSplitter[U]]
+ override def split: Seq[SeqSplitter[U]] = super.split.asInstanceOf[Seq[SeqSplitter[U]]]
+ def psplit(sizes: Int*): Seq[SeqSplitter[U]] = if (firstNonEmpty) {
val selfrem = self.remaining
// split sizes
@@ -585,56 +586,56 @@ self =>
val thats = that.psplit(thatsizes: _*)
// appended last in self with first in rest if necessary
- if (appendMiddle) selfs.init ++ Seq(selfs.last.appendParSeq[U, ParSeqIterator[U]](thats.head)) ++ thats.tail
+ if (appendMiddle) selfs.init ++ Seq(selfs.last.appendParSeq[U, SeqSplitter[U]](thats.head)) ++ thats.tail
else selfs ++ thats
- } else curr.asInstanceOf[ParSeqIterator[U]].psplit(sizes: _*)
+ } else curr.asInstanceOf[SeqSplitter[U]].psplit(sizes: _*)
}
- def appendParSeq[U >: T, PI <: ParSeqIterator[U]](that: PI) = new Appended[U, PI](that)
+ def appendParSeq[U >: T, PI <: SeqSplitter[U]](that: PI) = new Appended[U, PI](that)
- class Zipped[S](ti: ParSeqIterator[S]) extends super.Zipped[S](ti) with ParSeqIterator[(T, S)] {
- override def dup = super.dup.asInstanceOf[ParSeqIterator[(T, S)]]
- override def split: Seq[ParSeqIterator[(T, S)]] = super.split.asInstanceOf[Seq[ParSeqIterator[(T, S)]]]
+ class Zipped[S](ti: SeqSplitter[S]) extends super.Zipped[S](ti) with SeqSplitter[(T, S)] {
+ override def dup = super.dup.asInstanceOf[SeqSplitter[(T, S)]]
+ override def split: Seq[SeqSplitter[(T, S)]] = super.split.asInstanceOf[Seq[SeqSplitter[(T, S)]]]
def psplit(szs: Int*) = (self.psplit(szs: _*) zip that.psplit(szs: _*)) map { p => p._1 zipParSeq p._2 }
}
- override def zipParSeq[S](that: ParSeqIterator[S]) = new Zipped(that)
+ override def zipParSeq[S](that: SeqSplitter[S]) = new Zipped(that)
- class ZippedAll[U >: T, S](ti: ParSeqIterator[S], thise: U, thate: S) extends super.ZippedAll[U, S](ti, thise, thate) with ParSeqIterator[(U, S)] {
- override def dup = super.dup.asInstanceOf[ParSeqIterator[(U, S)]]
+ class ZippedAll[U >: T, S](ti: SeqSplitter[S], thise: U, thate: S) extends super.ZippedAll[U, S](ti, thise, thate) with SeqSplitter[(U, S)] {
+ override def dup = super.dup.asInstanceOf[SeqSplitter[(U, S)]]
private def patchem = {
val selfrem = self.remaining
val thatrem = that.remaining
- val thisit = if (selfrem < thatrem) self.appendParSeq[U, ParSeqIterator[U]](repetition[U](thiselem, thatrem - selfrem).parallelIterator) else self
- val thatit = if (selfrem > thatrem) that.appendParSeq(repetition(thatelem, selfrem - thatrem).parallelIterator) else that
+ val thisit = if (selfrem < thatrem) self.appendParSeq[U, SeqSplitter[U]](repetition[U](thiselem, thatrem - selfrem).splitter) else self
+ val thatit = if (selfrem > thatrem) that.appendParSeq(repetition(thatelem, selfrem - thatrem).splitter) else that
(thisit, thatit)
}
- override def split: Seq[ParSeqIterator[(U, S)]] = {
+ override def split: Seq[SeqSplitter[(U, S)]] = {
val (thisit, thatit) = patchem
val zipped = thisit zipParSeq thatit
zipped.split
}
- def psplit(sizes: Int*): Seq[ParSeqIterator[(U, S)]] = {
+ def psplit(sizes: Int*): Seq[SeqSplitter[(U, S)]] = {
val (thisit, thatit) = patchem
val zipped = thisit zipParSeq thatit
zipped.psplit(sizes: _*)
}
}
- override def zipAllParSeq[S, U >: T, R >: S](that: ParSeqIterator[S], thisElem: U, thatElem: R) = new ZippedAll[U, R](that, thisElem, thatElem)
+ override def zipAllParSeq[S, U >: T, R >: S](that: SeqSplitter[S], thisElem: U, thatElem: R) = new ZippedAll[U, R](that, thisElem, thatElem)
- def reverse: ParSeqIterator[T] = {
+ def reverse: SeqSplitter[T] = {
val pa = mutable.ParArray.fromTraversables(self).reverse
new pa.ParArrayIterator with pa.SCPI {
override def reverse = self
}
}
- class Patched[U >: T](from: Int, patch: ParSeqIterator[U], replaced: Int) extends ParSeqIterator[U] {
+ class Patched[U >: T](from: Int, patch: SeqSplitter[U], replaced: Int) extends SeqSplitter[U] {
var signalDelegate = self.signalDelegate
private[this] val trio = {
val pits = self.psplit(from, replaced, self.remaining - from - replaced)
- (pits(0).appendParSeq[U, ParSeqIterator[U]](patch)) appendParSeq pits(2)
+ (pits(0).appendParSeq[U, SeqSplitter[U]](patch)) appendParSeq pits(2)
}
def hasNext = trio.hasNext
def next = trio.next
@@ -644,7 +645,7 @@ self =>
def psplit(sizes: Int*) = trio.psplit(sizes: _*)
}
- def patchParSeq[U >: T](from: Int, patchElems: ParSeqIterator[U], replaced: Int) = new Patched(from, patchElems, replaced)
+ def patchParSeq[U >: T](from: Int, patchElems: SeqSplitter[U], replaced: Int) = new Patched(from, patchElems, replaced)
}
diff --git a/src/library/scala/collection/parallel/Splitter.scala b/src/library/scala/collection/parallel/Splitter.scala
index 7328f3db6c..568a7c8e0d 100644
--- a/src/library/scala/collection/parallel/Splitter.scala
+++ b/src/library/scala/collection/parallel/Splitter.scala
@@ -6,7 +6,6 @@
** |/ **
\* */
-
package scala.collection.parallel
@@ -63,53 +62,3 @@ object Splitter {
}
-/** A precise splitter (or a precise split iterator) can be split into arbitrary number of splitters
- * that traverse disjoint subsets of arbitrary sizes.
- *
- * Implementors might want to override the parameterless `split` method for efficiency.
- *
- * @tparam T type of the elements this splitter traverses
- *
- * @since 2.9
- * @author Aleksandar Prokopec
- */
-trait PreciseSplitter[+T] extends Splitter[T] {
-
- /** Splits the splitter into disjunct views.
- *
- * This overloaded version of the `split` method is specific to precise splitters.
- * It returns a sequence of splitters, each iterating some subset of the
- * elements in this splitter. The sizes of the subsplitters in the partition is equal to
- * the size in the corresponding argument, as long as there are enough elements in this
- * splitter to split it that way.
- *
- * If there aren't enough elements, a zero element splitter is appended for each additional argument.
- * If there are additional elements, an additional splitter is appended at the end to compensate.
- *
- * For example, say we have a splitter `ps` with 100 elements. Invoking:
- * {{{
- * ps.split(50, 25, 25, 10, 5)
- * }}}
- * will return a sequence of five splitters, last two views being empty. On the other hand, calling:
- * {{{
- * ps.split(50, 40)
- * }}}
- * will return a sequence of three splitters, last of them containing ten elements.
- *
- * '''Note:''' this method actually invalidates the current splitter.
- *
- * Unlike the case with `split` found in splitters, views returned by this method can be empty.
- *
- * @param sizes the sizes used to split this split iterator into iterators that traverse disjunct subsets
- * @return a sequence of disjunct subsequence iterators of this parallel iterator
- */
- def psplit(sizes: Int*): Seq[PreciseSplitter[T]]
-
- def split: Seq[PreciseSplitter[T]]
-
-}
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/Tasks.scala b/src/library/scala/collection/parallel/Tasks.scala
index 80cdd31fa1..497e0c638a 100644
--- a/src/library/scala/collection/parallel/Tasks.scala
+++ b/src/library/scala/collection/parallel/Tasks.scala
@@ -44,7 +44,7 @@ trait Tasks {
def leaf(result: Option[R])
/** A result that can be accessed once the task is completed. */
- @volatile var result: R
+ var result: R
/** Decides whether or not this task should be split further. */
def shouldSplitFurther: Boolean
diff --git a/src/library/scala/collection/parallel/immutable/ParHashMap.scala b/src/library/scala/collection/parallel/immutable/ParHashMap.scala
index 11d4a02e18..c6006b54f6 100644
--- a/src/library/scala/collection/parallel/immutable/ParHashMap.scala
+++ b/src/library/scala/collection/parallel/immutable/ParHashMap.scala
@@ -17,8 +17,7 @@ package scala.collection.parallel.immutable
import scala.collection.parallel.ParMapLike
import scala.collection.parallel.Combiner
-import scala.collection.parallel.ParIterableIterator
-import scala.collection.parallel.EnvironmentPassingCombiner
+import scala.collection.parallel.IterableSplitter
import scala.collection.mutable.UnrolledBuffer.Unrolled
import scala.collection.mutable.UnrolledBuffer
import scala.collection.generic.ParMapFactory
@@ -64,7 +63,7 @@ self =>
protected[this] override def newCombiner = HashMapCombiner[K, V]
- def parallelIterator: ParIterableIterator[(K, V)] = new ParHashMapIterator(trie.iterator, trie.size) with SCPI
+ def splitter: IterableSplitter[(K, V)] = new ParHashMapIterator(trie.iterator, trie.size) with SCPI
override def seq = trie
@@ -167,9 +166,9 @@ object ParHashMap extends ParMapFactory[ParHashMap] {
private[parallel] abstract class HashMapCombiner[K, V]
extends collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], (K, V), HashMapCombiner[K, V]](HashMapCombiner.rootsize) {
-self: EnvironmentPassingCombiner[(K, V), ParHashMap[K, V]] =>
+//self: EnvironmentPassingCombiner[(K, V), ParHashMap[K, V]] =>
import HashMapCombiner._
- import tasksupport._
+ import collection.parallel.tasksupport._
val emptyTrie = HashMap.empty[K, V]
def +=(elem: (K, V)) = {
@@ -337,7 +336,7 @@ self: EnvironmentPassingCombiner[(K, V), ParHashMap[K, V]] =>
private[parallel] object HashMapCombiner {
- def apply[K, V] = new HashMapCombiner[K, V] with EnvironmentPassingCombiner[(K, V), ParHashMap[K, V]]
+ def apply[K, V] = new HashMapCombiner[K, V] {} // was: with EnvironmentPassingCombiner[(K, V), ParHashMap[K, V]]
private[immutable] val rootbits = 5
private[immutable] val rootsize = 1 << 5
diff --git a/src/library/scala/collection/parallel/immutable/ParHashSet.scala b/src/library/scala/collection/parallel/immutable/ParHashSet.scala
index e292a3ef72..c67fad867d 100644
--- a/src/library/scala/collection/parallel/immutable/ParHashSet.scala
+++ b/src/library/scala/collection/parallel/immutable/ParHashSet.scala
@@ -17,8 +17,7 @@ package scala.collection.parallel.immutable
import scala.collection.parallel.ParSetLike
import scala.collection.parallel.Combiner
-import scala.collection.parallel.ParIterableIterator
-import scala.collection.parallel.EnvironmentPassingCombiner
+import scala.collection.parallel.IterableSplitter
import scala.collection.mutable.UnrolledBuffer.Unrolled
import scala.collection.mutable.UnrolledBuffer
import scala.collection.generic.ParSetFactory
@@ -62,7 +61,7 @@ self =>
override def empty: ParHashSet[T] = new ParHashSet[T]
- def parallelIterator: ParIterableIterator[T] = new ParHashSetIterator(trie.iterator, trie.size) with SCPI
+ def splitter: IterableSplitter[T] = new ParHashSetIterator(trie.iterator, trie.size) with SCPI
override def seq = trie
@@ -143,9 +142,9 @@ object ParHashSet extends ParSetFactory[ParHashSet] {
private[immutable] abstract class HashSetCombiner[T]
extends collection.parallel.BucketCombiner[T, ParHashSet[T], Any, HashSetCombiner[T]](HashSetCombiner.rootsize) {
-self: EnvironmentPassingCombiner[T, ParHashSet[T]] =>
+//self: EnvironmentPassingCombiner[T, ParHashSet[T]] =>
import HashSetCombiner._
- import tasksupport._
+ import collection.parallel.tasksupport._
val emptyTrie = HashSet.empty[T]
def +=(elem: T) = {
@@ -227,7 +226,7 @@ self: EnvironmentPassingCombiner[T, ParHashSet[T]] =>
object HashSetCombiner {
- def apply[T] = new HashSetCombiner[T] with EnvironmentPassingCombiner[T, ParHashSet[T]] {}
+ def apply[T] = new HashSetCombiner[T] {} // was: with EnvironmentPassingCombiner[T, ParHashSet[T]] {}
private[immutable] val rootbits = 5
private[immutable] val rootsize = 1 << 5
@@ -274,18 +273,3 @@ object HashSetCombiner {
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/immutable/ParIterable.scala b/src/library/scala/collection/parallel/immutable/ParIterable.scala
index 085f5220dd..8a03c8a57a 100644
--- a/src/library/scala/collection/parallel/immutable/ParIterable.scala
+++ b/src/library/scala/collection/parallel/immutable/ParIterable.scala
@@ -15,6 +15,7 @@ import scala.collection.generic._
import scala.collection.parallel.ParIterableLike
import scala.collection.parallel.Combiner
+import scala.collection.GenIterable
/** A template trait for immutable parallel iterable collections.
@@ -29,10 +30,10 @@ import scala.collection.parallel.Combiner
* @since 2.9
*/
trait ParIterable[+T]
-extends collection.immutable.Iterable[T]
+extends collection.immutable.GenIterable[T]
with collection.parallel.ParIterable[T]
with GenericParTemplate[T, ParIterable]
- with ParIterableLike[T, ParIterable[T], Iterable[T]]
+ with ParIterableLike[T, ParIterable[T], collection.immutable.Iterable[T]]
{
override def companion: GenericCompanion[ParIterable] with GenericParCompanion[ParIterable] = ParIterable
diff --git a/src/library/scala/collection/parallel/immutable/ParMap.scala b/src/library/scala/collection/parallel/immutable/ParMap.scala
index 5db07a6a3b..5dd4c3f35f 100644
--- a/src/library/scala/collection/parallel/immutable/ParMap.scala
+++ b/src/library/scala/collection/parallel/immutable/ParMap.scala
@@ -21,6 +21,8 @@ import scala.collection.generic.GenericParMapCompanion
import scala.collection.generic.CanCombineFrom
import scala.collection.parallel.ParMapLike
import scala.collection.parallel.Combiner
+import scala.collection.GenMapLike
+
@@ -36,7 +38,7 @@ import scala.collection.parallel.Combiner
* @since 2.9
*/
trait ParMap[K, +V]
-extends Map[K, V]
+extends collection.immutable.GenMap[K, V]
with GenericParMapTemplate[K, V, ParMap]
with parallel.ParMap[K, V]
with ParIterable[(K, V)]
diff --git a/src/library/scala/collection/parallel/immutable/ParRange.scala b/src/library/scala/collection/parallel/immutable/ParRange.scala
index 157f6746fa..198274f4e2 100644
--- a/src/library/scala/collection/parallel/immutable/ParRange.scala
+++ b/src/library/scala/collection/parallel/immutable/ParRange.scala
@@ -14,7 +14,7 @@ package scala.collection.parallel.immutable
import scala.collection.immutable.Range
import scala.collection.parallel.Combiner
import scala.collection.generic.CanCombineFrom
-import scala.collection.parallel.ParIterableIterator
+import scala.collection.parallel.IterableSplitter
@@ -45,7 +45,7 @@ self =>
@inline final def apply(idx: Int) = range.apply(idx);
- def parallelIterator = new ParRangeIterator with SCPI
+ def splitter = new ParRangeIterator with SCPI
type SCPI = SignalContextPassingIterator[ParRangeIterator]
diff --git a/src/library/scala/collection/parallel/immutable/ParSeq.scala b/src/library/scala/collection/parallel/immutable/ParSeq.scala
index a922642022..901d4f204d 100644
--- a/src/library/scala/collection/parallel/immutable/ParSeq.scala
+++ b/src/library/scala/collection/parallel/immutable/ParSeq.scala
@@ -18,7 +18,7 @@ import scala.collection.generic.CanCombineFrom
import scala.collection.generic.ParFactory
import scala.collection.parallel.ParSeqLike
import scala.collection.parallel.Combiner
-
+import scala.collection.GenSeq
@@ -28,7 +28,7 @@ import scala.collection.parallel.Combiner
* @define coll mutable parallel sequence
*/
trait ParSeq[+T]
-extends collection.immutable.Seq[T]
+extends collection.immutable.GenSeq[T]
with collection.parallel.ParSeq[T]
with ParIterable[T]
with GenericParTemplate[T, ParSeq]
@@ -44,11 +44,11 @@ extends collection.immutable.Seq[T]
* @define coll mutable parallel sequence
*/
object ParSeq extends ParFactory[ParSeq] {
- implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParSeq[T]] = new GenericCanCombineFrom[T]
+ implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParSeq[T]] = new GenericCanCombineFrom[T]
- def newBuilder[T]: Combiner[T, ParSeq[T]] = ParVector.newBuilder[T]
+ def newBuilder[T]: Combiner[T, ParSeq[T]] = ParVector.newBuilder[T]
- def newCombiner[T]: Combiner[T, ParSeq[T]] = ParVector.newCombiner[T]
+ def newCombiner[T]: Combiner[T, ParSeq[T]] = ParVector.newCombiner[T]
}
diff --git a/src/library/scala/collection/parallel/immutable/ParSet.scala b/src/library/scala/collection/parallel/immutable/ParSet.scala
index 73d27df994..64fb826d35 100644
--- a/src/library/scala/collection/parallel/immutable/ParSet.scala
+++ b/src/library/scala/collection/parallel/immutable/ParSet.scala
@@ -15,6 +15,7 @@ package parallel.immutable
+import scala.collection.GenSet
import scala.collection.immutable.Set
import scala.collection.generic._
import scala.collection.parallel.ParSetLike
@@ -29,7 +30,7 @@ import scala.collection.parallel.Combiner
* @define coll mutable parallel set
*/
trait ParSet[T]
-extends Set[T]
+extends collection.immutable.GenSet[T]
with GenericParTemplate[T, ParSet]
with parallel.ParSet[T]
with ParIterable[T]
diff --git a/src/library/scala/collection/parallel/immutable/ParVector.scala b/src/library/scala/collection/parallel/immutable/ParVector.scala
index 05e057a2a4..d1cf3d58ec 100644
--- a/src/library/scala/collection/parallel/immutable/ParVector.scala
+++ b/src/library/scala/collection/parallel/immutable/ParVector.scala
@@ -16,8 +16,7 @@ package parallel.immutable
import scala.collection.generic.{GenericParTemplate, CanCombineFrom, ParFactory}
import scala.collection.parallel.ParSeqLike
import scala.collection.parallel.Combiner
-import scala.collection.parallel.ParSeqIterator
-import scala.collection.parallel.EnvironmentPassingCombiner
+import scala.collection.parallel.SeqSplitter
import mutable.ArrayBuffer
import immutable.Vector
import immutable.VectorBuilder
@@ -55,7 +54,7 @@ extends ParSeq[T]
def length = vector.length
- def parallelIterator: ParSeqIterator[T] = {
+ def splitter: SeqSplitter[T] = {
val pit = new ParVectorIterator(vector.startIndex, vector.endIndex) with SCPI
vector.initIterator(pit)
pit
@@ -66,7 +65,7 @@ extends ParSeq[T]
class ParVectorIterator(_start: Int, _end: Int) extends VectorIterator[T](_start, _end) with ParIterator {
self: SCPI =>
def remaining: Int = remainingElementCount
- def dup: ParSeqIterator[T] = (new ParVector(remainingVector)).parallelIterator
+ def dup: SeqSplitter[T] = (new ParVector(remainingVector)).splitter
def split: Seq[ParVectorIterator] = {
val rem = remaining
if (rem >= 2) psplit(rem / 2, rem - rem / 2)
@@ -79,7 +78,7 @@ extends ParSeq[T]
splitted += remvector.take(sz)
remvector = remvector.drop(sz)
}
- splitted.map(v => new ParVector(v).parallelIterator.asInstanceOf[ParVectorIterator])
+ splitted.map(v => new ParVector(v).splitter.asInstanceOf[ParVectorIterator])
}
}
@@ -95,15 +94,15 @@ object ParVector extends ParFactory[ParVector] {
implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParVector[T]] =
new GenericCanCombineFrom[T]
- def newBuilder[T]: Combiner[T, ParVector[T]] = new LazyParVectorCombiner[T] with EPC[T, ParVector[T]]
+ def newBuilder[T]: Combiner[T, ParVector[T]] = newCombiner[T]
- def newCombiner[T]: Combiner[T, ParVector[T]] = new LazyParVectorCombiner[T] with EPC[T, ParVector[T]]
+ def newCombiner[T]: Combiner[T, ParVector[T]] = new LazyParVectorCombiner[T] // was: with EPC[T, ParVector[T]]
}
private[immutable] class LazyParVectorCombiner[T] extends Combiner[T, ParVector[T]] {
-self: EnvironmentPassingCombiner[T, ParVector[T]] =>
+//self: EnvironmentPassingCombiner[T, ParVector[T]] =>
var sz = 0
val vectors = new ArrayBuffer[VectorBuilder[T]] += new VectorBuilder[T]
diff --git a/src/library/scala/collection/parallel/immutable/package.scala b/src/library/scala/collection/parallel/immutable/package.scala
index 87d5bfc9f9..c62459deeb 100644
--- a/src/library/scala/collection/parallel/immutable/package.scala
+++ b/src/library/scala/collection/parallel/immutable/package.scala
@@ -43,7 +43,7 @@ package object immutable {
type SCPI = SignalContextPassingIterator[ParIterator]
class ParIterator(var i: Int = 0, val until: Int = length, elem: T = self.elem) extends super.ParIterator {
- me: SignalContextPassingIterator[ParIterator] =>
+ me: SignalContextPassingIterator[ParIterator] =>
def remaining = until - i
def hasNext = i < until
def next = { i += 1; elem }
@@ -55,7 +55,7 @@ package object immutable {
def split = psplit(remaining / 2, remaining - remaining / 2)
}
- def parallelIterator = new ParIterator with SCPI
+ def splitter = new ParIterator with SCPI
}
diff --git a/src/library/scala/collection/parallel/mutable/LazyCombiner.scala b/src/library/scala/collection/parallel/mutable/LazyCombiner.scala
index 8f7a62aa4f..def6fa7742 100644
--- a/src/library/scala/collection/parallel/mutable/LazyCombiner.scala
+++ b/src/library/scala/collection/parallel/mutable/LazyCombiner.scala
@@ -6,7 +6,6 @@
** |/ **
\* */
-
package scala.collection.parallel.mutable
@@ -30,7 +29,7 @@ import scala.collection.parallel.Combiner
*/
trait LazyCombiner[Elem, +To, Buff <: Growable[Elem] with Sizing] extends Combiner[Elem, To]
{
-self: collection.parallel.EnvironmentPassingCombiner[Elem, To] =>
+//self: collection.parallel.EnvironmentPassingCombiner[Elem, To] =>
val chain: ArrayBuffer[Buff]
val lastbuff = chain.last
def +=(elem: Elem) = { lastbuff += elem; this }
diff --git a/src/library/scala/collection/parallel/mutable/ParArray.scala b/src/library/scala/collection/parallel/mutable/ParArray.scala
index 0ac009d58c..a1eb3beb0c 100644
--- a/src/library/scala/collection/parallel/mutable/ParArray.scala
+++ b/src/library/scala/collection/parallel/mutable/ParArray.scala
@@ -23,6 +23,7 @@ import scala.collection.parallel.ParSeqLike
import scala.collection.parallel.CHECK_RATE
import scala.collection.mutable.ArraySeq
import scala.collection.mutable.Builder
+import scala.collection.GenTraversableOnce
@@ -75,7 +76,7 @@ self =>
type SCPI = SignalContextPassingIterator[ParArrayIterator]
- def parallelIterator: ParArrayIterator = {
+ protected[parallel] def splitter: ParArrayIterator = {
val pit = new ParArrayIterator with SCPI
pit
}
@@ -178,7 +179,7 @@ self =>
override def fold[U >: T](z: U)(op: (U, U) => U): U = foldLeft[U](z)(op)
- def aggregate[S](z: S)(seqop: (S, T) => S, combop: (S, S) => S): S = foldLeft[S](z)(seqop)
+ override def aggregate[S](z: S)(seqop: (S, T) => S, combop: (S, S) => S): S = foldLeft[S](z)(seqop)
override def sum[U >: T](implicit num: Numeric[U]): U = {
var s = sum_quick(num, arr, until, i, num.zero)
@@ -409,12 +410,12 @@ self =>
}
}
- override def flatmap2combiner[S, That](f: T => TraversableOnce[S], cb: Combiner[S, That]): Combiner[S, That] = {
+ override def flatmap2combiner[S, That](f: T => GenTraversableOnce[S], cb: Combiner[S, That]): Combiner[S, That] = {
//val cb = pbf(self.repr)
while (i < until) {
val traversable = f(arr(i).asInstanceOf[T])
if (traversable.isInstanceOf[Iterable[_]]) cb ++= traversable.asInstanceOf[Iterable[S]].iterator
- else cb ++= traversable
+ else cb ++= traversable.seq
i += 1
}
cb
@@ -592,7 +593,7 @@ self =>
(new ParArray[S](targarrseq)).asInstanceOf[That]
} else super.map(f)(bf)
- override def scan[U >: T, That](z: U)(op: (U, U) => U)(implicit cbf: CanCombineFrom[ParArray[T], U, That]): That =
+ override def scan[U >: T, That](z: U)(op: (U, U) => U)(implicit cbf: CanBuildFrom[ParArray[T], U, That]): That =
if (parallelismLevel > 1 && buildsArray(cbf(repr))) {
// reserve an array
val targarrseq = new ArraySeq[U](length + 1)
@@ -600,7 +601,7 @@ self =>
targetarr(0) = z
// do a parallel prefix scan
- if (length > 0) executeAndWaitResult(new CreateScanTree[U](0, size, z, op, parallelIterator) mapResult {
+ if (length > 0) executeAndWaitResult(new CreateScanTree[U](0, size, z, op, splitter) mapResult {
tree => executeAndWaitResult(new ScanToArray(tree, z, op, targetarr))
})
@@ -710,11 +711,10 @@ object ParArray extends ParFactory[ParArray] {
handoff(newarr)
}
- def fromTraversables[T](xss: TraversableOnce[T]*) = {
+ def fromTraversables[T](xss: GenTraversableOnce[T]*) = {
val cb = ParArrayCombiner[T]()
for (xs <- xss) {
- val it = xs.toIterator
- while (it.hasNext) cb += it.next
+ cb ++= xs.seq
}
cb.result
}
diff --git a/src/library/scala/collection/parallel/mutable/ParFlatHashTable.scala b/src/library/scala/collection/parallel/mutable/ParFlatHashTable.scala
index f2205fbb17..35c748916c 100644
--- a/src/library/scala/collection/parallel/mutable/ParFlatHashTable.scala
+++ b/src/library/scala/collection/parallel/mutable/ParFlatHashTable.scala
@@ -9,7 +9,7 @@
package scala.collection
package parallel.mutable
-import collection.parallel.ParIterableIterator
+import collection.parallel.IterableSplitter
/** Parallel flat hash table.
*
@@ -24,7 +24,7 @@ trait ParFlatHashTable[T] extends collection.mutable.FlatHashTable[T] {
override def alwaysInitSizeMap = true
abstract class ParFlatHashTableIterator(var idx: Int, val until: Int, val totalsize: Int)
- extends ParIterableIterator[T] with SizeMapUtils {
+ extends IterableSplitter[T] with SizeMapUtils {
import collection.DebugUtils._
private var traversed = 0
@@ -42,7 +42,7 @@ trait ParFlatHashTable[T] extends collection.mutable.FlatHashTable[T] {
throw new IndexOutOfBoundsException(idx.toString)
}
- def newIterator(index: Int, until: Int, totalsize: Int): ParIterableIterator[T]
+ def newIterator(index: Int, until: Int, totalsize: Int): IterableSplitter[T]
def remaining = totalsize - traversed
def hasNext = traversed < totalsize
diff --git a/src/library/scala/collection/parallel/mutable/ParHashMap.scala b/src/library/scala/collection/parallel/mutable/ParHashMap.scala
index a3f3c33af5..37065e32fc 100644
--- a/src/library/scala/collection/parallel/mutable/ParHashMap.scala
+++ b/src/library/scala/collection/parallel/mutable/ParHashMap.scala
@@ -56,7 +56,7 @@ self =>
override def seq = new collection.mutable.HashMap[K, V](hashTableContents)
- def parallelIterator = new ParHashMapIterator(1, table.length, size, table(0).asInstanceOf[DefaultEntry[K, V]]) with SCPI
+ def splitter = new ParHashMapIterator(1, table.length, size, table(0).asInstanceOf[DefaultEntry[K, V]]) with SCPI
override def size = tableSize
@@ -68,15 +68,15 @@ self =>
else Some(e.value)
}
- override def put(key: K, value: V): Option[V] = {
+ def put(key: K, value: V): Option[V] = {
val e = findEntry(key)
if (e == null) { addEntry(new Entry(key, value)); None }
else { val v = e.value; e.value = value; Some(v) }
}
- override def update(key: K, value: V): Unit = put(key, value)
+ def update(key: K, value: V): Unit = put(key, value)
- override def remove(key: K): Option[V] = {
+ def remove(key: K): Option[V] = {
val e = removeEntry(key)
if (e ne null) Some(e.value)
else None
@@ -160,8 +160,8 @@ private[mutable] abstract class ParHashMapCombiner[K, V](private val tableLoadFa
extends collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], DefaultEntry[K, V], ParHashMapCombiner[K, V]](ParHashMapCombiner.numblocks)
with collection.mutable.HashTable.HashUtils[K]
{
-self: EnvironmentPassingCombiner[(K, V), ParHashMap[K, V]] =>
- import tasksupport._
+//self: EnvironmentPassingCombiner[(K, V), ParHashMap[K, V]] =>
+ import collection.parallel.tasksupport._
private var mask = ParHashMapCombiner.discriminantmask
private var nonmasklen = ParHashMapCombiner.nonmasklength
@@ -315,7 +315,7 @@ private[parallel] object ParHashMapCombiner {
private[mutable] val discriminantmask = ((1 << discriminantbits) - 1);
private[mutable] val nonmasklength = 32 - discriminantbits
- def apply[K, V] = new ParHashMapCombiner[K, V](HashTable.defaultLoadFactor) with EnvironmentPassingCombiner[(K, V), ParHashMap[K, V]]
+ def apply[K, V] = new ParHashMapCombiner[K, V](HashTable.defaultLoadFactor) {} // was: with EnvironmentPassingCombiner[(K, V), ParHashMap[K, V]]
}
diff --git a/src/library/scala/collection/parallel/mutable/ParHashSet.scala b/src/library/scala/collection/parallel/mutable/ParHashSet.scala
index 6d82e1b6aa..0e48995cbe 100644
--- a/src/library/scala/collection/parallel/mutable/ParHashSet.scala
+++ b/src/library/scala/collection/parallel/mutable/ParHashSet.scala
@@ -12,7 +12,6 @@ import collection.generic._
import collection.mutable.HashSet
import collection.mutable.FlatHashTable
import collection.parallel.Combiner
-import collection.parallel.EnvironmentPassingCombiner
import collection.mutable.UnrolledBuffer
/** A parallel hash set.
@@ -46,11 +45,11 @@ extends ParSet[T]
override def empty = new ParHashSet
- override def iterator = parallelIterator
+ override def iterator = splitter
override def size = tableSize
- override def clear() = clearTable()
+ def clear() = clearTable()
override def seq = new HashSet(hashTableContents)
@@ -68,7 +67,7 @@ extends ParSet[T]
def contains(elem: T) = containsEntry(elem)
- def parallelIterator = new ParHashSetIterator(0, table.length, size) with SCPI
+ def splitter = new ParHashSetIterator(0, table.length, size) with SCPI
type SCPI = SignalContextPassingIterator[ParHashSetIterator]
@@ -117,8 +116,8 @@ object ParHashSet extends ParSetFactory[ParHashSet] {
private[mutable] abstract class ParHashSetCombiner[T](private val tableLoadFactor: Int)
extends collection.parallel.BucketCombiner[T, ParHashSet[T], Any, ParHashSetCombiner[T]](ParHashSetCombiner.numblocks)
with collection.mutable.FlatHashTable.HashUtils[T] {
-self: EnvironmentPassingCombiner[T, ParHashSet[T]] =>
- import tasksupport._
+//self: EnvironmentPassingCombiner[T, ParHashSet[T]] =>
+ import collection.parallel.tasksupport._
private var mask = ParHashSetCombiner.discriminantmask
private var nonmasklen = ParHashSetCombiner.nonmasklength
@@ -316,6 +315,6 @@ private[parallel] object ParHashSetCombiner {
private[mutable] val discriminantmask = ((1 << discriminantbits) - 1);
private[mutable] val nonmasklength = 32 - discriminantbits
- def apply[T] = new ParHashSetCombiner[T](FlatHashTable.defaultLoadFactor) with EnvironmentPassingCombiner[T, ParHashSet[T]]
+ def apply[T] = new ParHashSetCombiner[T](FlatHashTable.defaultLoadFactor) {} //with EnvironmentPassingCombiner[T, ParHashSet[T]]
}
diff --git a/src/library/scala/collection/parallel/mutable/ParHashTable.scala b/src/library/scala/collection/parallel/mutable/ParHashTable.scala
index be1cbf1333..e5c0be36a1 100644
--- a/src/library/scala/collection/parallel/mutable/ParHashTable.scala
+++ b/src/library/scala/collection/parallel/mutable/ParHashTable.scala
@@ -14,7 +14,7 @@ package parallel.mutable
import collection.mutable.HashEntry
-import collection.parallel.ParIterableIterator
+import collection.parallel.IterableSplitter
@@ -28,9 +28,9 @@ trait ParHashTable[K, Entry >: Null <: HashEntry[K, Entry]] extends collection.m
/** A parallel iterator returning all the entries.
*/
- abstract class EntryIterator[T, +IterRepr <: ParIterableIterator[T]]
+ abstract class EntryIterator[T, +IterRepr <: IterableSplitter[T]]
(private var idx: Int, private val until: Int, private val totalsize: Int, private var es: Entry)
- extends ParIterableIterator[T] with SizeMapUtils {
+ extends IterableSplitter[T] with SizeMapUtils {
private val itertable = table
private var traversed = 0
scan()
@@ -78,7 +78,7 @@ trait ParHashTable[K, Entry >: Null <: HashEntry[K, Entry]] extends collection.m
def dup = newIterator(idx, until, totalsize, es)
- def split: Seq[ParIterableIterator[T]] = if (remaining > 1) {
+ def split: Seq[IterableSplitter[T]] = if (remaining > 1) {
if (until > idx) {
// there is at least one more slot for the next iterator
// divide the rest of the table
@@ -104,7 +104,7 @@ trait ParHashTable[K, Entry >: Null <: HashEntry[K, Entry]] extends collection.m
// otherwise, this is the last entry in the table - all what remains is the chain
// so split the rest of the chain
val arr = convertToArrayBuffer(es)
- val arrpit = new collection.parallel.BufferIterator[T](arr, 0, arr.length, signalDelegate)
+ val arrpit = new collection.parallel.BufferSplitter[T](arr, 0, arr.length, signalDelegate)
arrpit.split
}
} else Seq(this.asInstanceOf[IterRepr])
diff --git a/src/library/scala/collection/parallel/mutable/ParIterable.scala b/src/library/scala/collection/parallel/mutable/ParIterable.scala
index cd0c45bd0d..c07ed69ed5 100644
--- a/src/library/scala/collection/parallel/mutable/ParIterable.scala
+++ b/src/library/scala/collection/parallel/mutable/ParIterable.scala
@@ -6,14 +6,13 @@
** |/ **
\* */
-
package scala.collection.parallel.mutable
import scala.collection.generic._
-
import scala.collection.parallel.ParIterableLike
import scala.collection.parallel.Combiner
+import scala.collection.GenIterable
/** A template trait for mutable parallel iterable collections.
@@ -27,16 +26,19 @@ import scala.collection.parallel.Combiner
* @author Aleksandar Prokopec
* @since 2.9
*/
-trait ParIterable[T] extends collection.mutable.Iterable[T]
+trait ParIterable[T] extends collection.mutable.GenIterable[T]
with collection.parallel.ParIterable[T]
with GenericParTemplate[T, ParIterable]
with ParIterableLike[T, ParIterable[T], Iterable[T]] {
override def companion: GenericCompanion[ParIterable] with GenericParCompanion[ParIterable] = ParIterable
+ //protected[this] override def newBuilder = ParIterable.newBuilder[T]
- // if `mutable.ParIterableLike` is introduced, please move these 4 methods there
+ // if `mutable.ParIterableLike` is introduced, please move these methods there
override def toIterable: ParIterable[T] = this
override def toSeq: ParSeq[T] = toParCollection[T, ParSeq[T]](() => ParSeq.newCombiner[T])
+
+ def seq: collection.mutable.Iterable[T]
}
/** $factoryInfo
diff --git a/src/library/scala/collection/parallel/mutable/ParMap.scala b/src/library/scala/collection/parallel/mutable/ParMap.scala
index f4fa7303c4..477e111024 100644
--- a/src/library/scala/collection/parallel/mutable/ParMap.scala
+++ b/src/library/scala/collection/parallel/mutable/ParMap.scala
@@ -28,17 +28,21 @@ import collection.parallel.Combiner
* @since 2.9
*/
trait ParMap[K, V]
-extends collection.mutable.Map[K, V]
+extends collection.mutable.GenMap[K, V]
with collection.parallel.ParMap[K, V]
with /* mutable */ ParIterable[(K, V)]
with GenericParMapTemplate[K, V, ParMap]
with /* mutable */ ParMapLike[K, V, ParMap[K, V], collection.mutable.Map[K, V]]
{
+ protected[this] override def newCombiner: Combiner[(K, V), ParMap[K, V]] = ParMap.newCombiner[K, V]
+
override def mapCompanion: GenericParMapCompanion[ParMap] = ParMap
override def empty: ParMap[K, V] = new ParHashMap[K, V]
+ def seq: collection.mutable.Map[K, V]
+
}
diff --git a/src/library/scala/collection/parallel/mutable/ParMapLike.scala b/src/library/scala/collection/parallel/mutable/ParMapLike.scala
index 52e11c6db3..aff590d5bb 100644
--- a/src/library/scala/collection/parallel/mutable/ParMapLike.scala
+++ b/src/library/scala/collection/parallel/mutable/ParMapLike.scala
@@ -6,7 +6,6 @@
** |/ **
\* */
-
package scala.collection.parallel
package mutable
@@ -14,6 +13,7 @@ package mutable
import collection.generic._
import collection.mutable.Builder
+import collection.mutable.Cloneable
@@ -32,12 +32,23 @@ trait ParMapLike[K,
V,
+Repr <: ParMapLike[K, V, Repr, Sequential] with ParMap[K, V],
+Sequential <: collection.mutable.Map[K, V] with collection.mutable.MapLike[K, V, Sequential]]
-extends collection.mutable.MapLike[K, V, Repr]
- with collection.parallel.ParMapLike[K, V, Repr, Sequential] {
+extends collection.GenMapLike[K, V, Repr]
+ with collection.parallel.ParMapLike[K, V, Repr, Sequential]
+ with Cloneable[Repr] {
// note: should not override toMap
- override def clear(): Unit = throw new UnsupportedOperationException("Must be reimplemented for parallel map implementations.")
+ def put(key: K, value: V): Option[V]
+
+ def +=(kv: (K, V)): this.type
+
+ def -=(key: K): this.type
+
+ def +[U >: V](kv: (K, U)) = this.clone().asInstanceOf[ParMap[K, U]] += kv
+
+ def -(key: K) = this.clone() -= key
+
+ def clear(): Unit
}
diff --git a/src/library/scala/collection/parallel/mutable/ParSeq.scala b/src/library/scala/collection/parallel/mutable/ParSeq.scala
index 26320c1bc3..401e4f900d 100644
--- a/src/library/scala/collection/parallel/mutable/ParSeq.scala
+++ b/src/library/scala/collection/parallel/mutable/ParSeq.scala
@@ -17,7 +17,7 @@ import scala.collection.generic.CanCombineFrom
import scala.collection.generic.ParFactory
import scala.collection.parallel.ParSeqLike
import scala.collection.parallel.Combiner
-
+import scala.collection.GenSeq
@@ -29,19 +29,20 @@ import scala.collection.parallel.Combiner
* @define Coll mutable.ParSeq
* @define coll mutable parallel sequence
*/
-trait ParSeq[T] extends collection.mutable.Seq[T]
+trait ParSeq[T] extends collection.mutable.GenSeq[T] // was: collection.mutable.Seq[T]
with ParIterable[T]
with collection.parallel.ParSeq[T]
with GenericParTemplate[T, ParSeq]
with ParSeqLike[T, ParSeq[T], collection.mutable.Seq[T]] {
- self =>
+self =>
override def companion: GenericCompanion[ParSeq] with GenericParCompanion[ParSeq] = ParSeq
+ //protected[this] override def newBuilder = ParSeq.newBuilder[T]
def update(i: Int, elem: T): Unit
- override def toSeq: ParSeq[T] = this
+ def seq: collection.mutable.Seq[T]
- override def transform(f: T => T): this.type = throw new UnsupportedOperationException("Not supported for parallel sequences.")
+ override def toSeq: ParSeq[T] = this
}
diff --git a/src/library/scala/collection/parallel/mutable/ParSet.scala b/src/library/scala/collection/parallel/mutable/ParSet.scala
index b7f4b61d66..39e6f827cd 100644
--- a/src/library/scala/collection/parallel/mutable/ParSet.scala
+++ b/src/library/scala/collection/parallel/mutable/ParSet.scala
@@ -13,7 +13,7 @@ package scala.collection.parallel.mutable
import scala.collection.generic._
import scala.collection.parallel.Combiner
-
+import scala.collection.GenSet
@@ -23,17 +23,20 @@ import scala.collection.parallel.Combiner
*
* @define Coll mutable.ParSet
* @define coll mutable parallel set
+ *
+ * @author Aleksandar Prokopec
*/
trait ParSet[T]
-extends collection.mutable.Set[T]
+extends collection.mutable.GenSet[T]
with ParIterable[T]
with collection.parallel.ParSet[T]
with GenericParTemplate[T, ParSet]
with ParSetLike[T, ParSet[T], collection.mutable.Set[T]]
{
self =>
- override def companion: GenericCompanion[ParSet] with GenericParCompanion[ParSet] = ParSet;
+ override def companion: GenericCompanion[ParSet] with GenericParCompanion[ParSet] = ParSet
override def empty: ParSet[T] = ParHashSet()
+ def seq: collection.mutable.Set[T]
}
diff --git a/src/library/scala/collection/parallel/mutable/ParSetLike.scala b/src/library/scala/collection/parallel/mutable/ParSetLike.scala
index 68f142cda7..7c9767befd 100644
--- a/src/library/scala/collection/parallel/mutable/ParSetLike.scala
+++ b/src/library/scala/collection/parallel/mutable/ParSetLike.scala
@@ -14,8 +14,8 @@ package parallel.mutable
import scala.collection.mutable.Set
import scala.collection.mutable.Builder
-
-
+import scala.collection.mutable.Cloneable
+import scala.collection.GenSetLike
@@ -35,16 +35,21 @@ import scala.collection.mutable.Builder
trait ParSetLike[T,
+Repr <: ParSetLike[T, Repr, Sequential] with ParSet[T],
+Sequential <: mutable.Set[T] with mutable.SetLike[T, Sequential]]
-extends mutable.SetLike[T, Repr]
+extends GenSetLike[T, Repr]
with collection.parallel.ParIterableLike[T, Repr, Sequential]
with collection.parallel.ParSetLike[T, Repr, Sequential]
-{ self =>
+ with Cloneable[Repr]
+{
+self =>
+ override def empty: Repr
- protected[this] override def newBuilder: Builder[T, Repr] = newCombiner
+ def +=(elem: T): this.type
- protected[this] override def newCombiner: parallel.Combiner[T, Repr]
+ def -=(elem: T): this.type
- override def empty: Repr
+ def +(elem: T) = this.clone() += elem
+
+ def -(elem: T) = this.clone() -= elem
// note: should not override toSet
}
diff --git a/src/library/scala/collection/parallel/mutable/ResizeableParArrayCombiner.scala b/src/library/scala/collection/parallel/mutable/ResizeableParArrayCombiner.scala
new file mode 100644
index 0000000000..8290438c10
--- /dev/null
+++ b/src/library/scala/collection/parallel/mutable/ResizeableParArrayCombiner.scala
@@ -0,0 +1,111 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection.parallel.mutable
+
+
+
+import scala.collection.generic.Sizing
+import scala.collection.mutable.ArraySeq
+import scala.collection.mutable.ArrayBuffer
+import scala.collection.parallel.TaskSupport
+//import scala.collection.parallel.EnvironmentPassingCombiner
+import scala.collection.parallel.unsupportedop
+import scala.collection.parallel.Combiner
+
+
+
+/** An array combiner that uses a chain of arraybuffers to store elements. */
+trait ResizableParArrayCombiner[T]
+extends LazyCombiner[T, ParArray[T], ExposedArrayBuffer[T]]
+{
+//self: EnvironmentPassingCombiner[T, ParArray[T]] =>
+ import collection.parallel.tasksupport._
+
+ override def sizeHint(sz: Int) = if (chain.length == 1) chain(0).sizeHint(sz)
+
+ def newLazyCombiner(c: ArrayBuffer[ExposedArrayBuffer[T]]) = ResizableParArrayCombiner(c)
+
+ def allocateAndCopy = if (chain.size > 1) {
+ val arrayseq = new ArraySeq[T](size)
+ val array = arrayseq.array.asInstanceOf[Array[Any]]
+
+ executeAndWaitResult(new CopyChainToArray(array, 0, size))
+
+ new ParArray(arrayseq)
+ } else { // optimisation if there is only 1 array
+ val pa = new ParArray(new ExposedArraySeq[T](chain(0).internalArray, size))
+ pa
+ }
+
+ override def toString = "ResizableParArrayCombiner(" + size + "): " //+ chain
+
+ /* tasks */
+
+ class CopyChainToArray(array: Array[Any], offset: Int, howmany: Int) extends Task[Unit, CopyChainToArray] {
+ var result = ()
+ def leaf(prev: Option[Unit]) = if (howmany > 0) {
+ var totalleft = howmany
+ val (stbuff, stind) = findStart(offset)
+ var buffind = stbuff
+ var ind = stind
+ var arrayIndex = offset
+ while (totalleft > 0) {
+ val currbuff = chain(buffind)
+ val chunksize = if (totalleft < (currbuff.size - ind)) totalleft else currbuff.size - ind
+ val until = ind + chunksize
+
+ copyChunk(currbuff.internalArray, ind, array, arrayIndex, until)
+ arrayIndex += chunksize
+ ind += chunksize
+
+ totalleft -= chunksize
+ buffind += 1
+ ind = 0
+ }
+ }
+ private def copyChunk(buffarr: Array[AnyRef], buffStart: Int, ra: Array[Any], arrayStart: Int, until: Int) {
+ Array.copy(buffarr, buffStart, ra, arrayStart, until - buffStart)
+ }
+ private def findStart(pos: Int) = {
+ var left = pos
+ var buffind = 0
+ while (left >= chain(buffind).size) {
+ left -= chain(buffind).size
+ buffind += 1
+ }
+ (buffind, left)
+ }
+ def split = {
+ val fp = howmany / 2
+ List(new CopyChainToArray(array, offset, fp), new CopyChainToArray(array, offset + fp, howmany - fp))
+ }
+ def shouldSplitFurther = howmany > collection.parallel.thresholdFromSize(size, parallelismLevel)
+ }
+
+}
+
+
+object ResizableParArrayCombiner {
+ def apply[T](c: ArrayBuffer[ExposedArrayBuffer[T]]): ResizableParArrayCombiner[T] = {
+ new { val chain = c } with ResizableParArrayCombiner[T] // was: with EnvironmentPassingCombiner[T, ParArray[T]]
+ }
+ def apply[T](): ResizableParArrayCombiner[T] = apply(new ArrayBuffer[ExposedArrayBuffer[T]] += new ExposedArrayBuffer[T])
+}
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala b/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala
index 9e27f6c58c..a4dc9b4a14 100644
--- a/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala
+++ b/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala
@@ -1,3 +1,11 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
package scala.collection.parallel.mutable
@@ -10,7 +18,7 @@ import scala.collection.mutable.ArrayBuffer
import scala.collection.mutable.UnrolledBuffer
import scala.collection.mutable.UnrolledBuffer.Unrolled
import scala.collection.parallel.TaskSupport
-import scala.collection.parallel.EnvironmentPassingCombiner
+//import scala.collection.parallel.EnvironmentPassingCombiner
import scala.collection.parallel.unsupportedop
import scala.collection.parallel.Combiner
@@ -28,11 +36,11 @@ private[mutable] class DoublingUnrolledBuffer[T](implicit m: ClassManifest[T]) e
/** An array combiner that uses doubling unrolled buffers to store elements. */
trait UnrolledParArrayCombiner[T]
extends Combiner[T, ParArray[T]] {
-self: EnvironmentPassingCombiner[T, ParArray[T]] =>
+//self: EnvironmentPassingCombiner[T, ParArray[T]] =>
// because size is doubling, random access is O(logn)!
val buff = new DoublingUnrolledBuffer[Any]
- import tasksupport._
+ import collection.parallel.tasksupport._
def +=(elem: T) = {
buff += elem
@@ -109,96 +117,6 @@ self: EnvironmentPassingCombiner[T, ParArray[T]] =>
object UnrolledParArrayCombiner {
- def apply[T](): UnrolledParArrayCombiner[T] = new UnrolledParArrayCombiner[T] with EnvironmentPassingCombiner[T, ParArray[T]]
-}
-
-
-/** An array combiner that uses a chain of arraybuffers to store elements. */
-trait ResizableParArrayCombiner[T]
-extends LazyCombiner[T, ParArray[T], ExposedArrayBuffer[T]]
-{
-self: EnvironmentPassingCombiner[T, ParArray[T]] =>
- import tasksupport._
-
- override def sizeHint(sz: Int) = if (chain.length == 1) chain(0).sizeHint(sz)
-
- def newLazyCombiner(c: ArrayBuffer[ExposedArrayBuffer[T]]) = ResizableParArrayCombiner(c)
-
- def allocateAndCopy = if (chain.size > 1) {
- val arrayseq = new ArraySeq[T](size)
- val array = arrayseq.array.asInstanceOf[Array[Any]]
-
- executeAndWaitResult(new CopyChainToArray(array, 0, size))
-
- new ParArray(arrayseq)
- } else { // optimisation if there is only 1 array
- val pa = new ParArray(new ExposedArraySeq[T](chain(0).internalArray, size))
- pa
- }
-
- override def toString = "ResizableParArrayCombiner(" + size + "): " //+ chain
-
- /* tasks */
-
- class CopyChainToArray(array: Array[Any], offset: Int, howmany: Int) extends Task[Unit, CopyChainToArray] {
- var result = ()
- def leaf(prev: Option[Unit]) = if (howmany > 0) {
- var totalleft = howmany
- val (stbuff, stind) = findStart(offset)
- var buffind = stbuff
- var ind = stind
- var arrayIndex = offset
- while (totalleft > 0) {
- val currbuff = chain(buffind)
- val chunksize = if (totalleft < (currbuff.size - ind)) totalleft else currbuff.size - ind
- val until = ind + chunksize
-
- copyChunk(currbuff.internalArray, ind, array, arrayIndex, until)
- arrayIndex += chunksize
- ind += chunksize
-
- totalleft -= chunksize
- buffind += 1
- ind = 0
- }
- }
- private def copyChunk(buffarr: Array[AnyRef], buffStart: Int, ra: Array[Any], arrayStart: Int, until: Int) {
- Array.copy(buffarr, buffStart, ra, arrayStart, until - buffStart)
- }
- private def findStart(pos: Int) = {
- var left = pos
- var buffind = 0
- while (left >= chain(buffind).size) {
- left -= chain(buffind).size
- buffind += 1
- }
- (buffind, left)
- }
- def split = {
- val fp = howmany / 2
- List(new CopyChainToArray(array, offset, fp), new CopyChainToArray(array, offset + fp, howmany - fp))
- }
- def shouldSplitFurther = howmany > collection.parallel.thresholdFromSize(size, parallelismLevel)
- }
-
+ def apply[T](): UnrolledParArrayCombiner[T] = new UnrolledParArrayCombiner[T] {} // was: with EnvironmentPassingCombiner[T, ParArray[T]]
}
-
-object ResizableParArrayCombiner {
- def apply[T](c: ArrayBuffer[ExposedArrayBuffer[T]]): ResizableParArrayCombiner[T] = {
- new { val chain = c } with ResizableParArrayCombiner[T] with EnvironmentPassingCombiner[T, ParArray[T]]
- }
- def apply[T](): ResizableParArrayCombiner[T] = apply(new ArrayBuffer[ExposedArrayBuffer[T]] += new ExposedArrayBuffer[T])
-}
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/package.scala b/src/library/scala/collection/parallel/package.scala
index 6efff70606..decae62dd2 100644
--- a/src/library/scala/collection/parallel/package.scala
+++ b/src/library/scala/collection/parallel/package.scala
@@ -88,7 +88,7 @@ package object parallel {
def toParArray: ParArray[T]
}
- implicit def traversable2ops[T](t: TraversableOnce[T]) = new TraversableOps[T] {
+ implicit def traversable2ops[T](t: collection.GenTraversableOnce[T]) = new TraversableOps[T] {
def isParallel = t.isInstanceOf[Parallel]
def isParIterable = t.isInstanceOf[ParIterable[_]]
def asParIterable = t.asInstanceOf[ParIterable[T]]
@@ -128,9 +128,9 @@ package object parallel {
/** A helper iterator for iterating very small array buffers.
* Automatically forwards the signal delegate when splitting.
*/
- private[parallel] class BufferIterator[T]
+ private[parallel] class BufferSplitter[T]
(private val buffer: collection.mutable.ArrayBuffer[T], private var index: Int, private val until: Int, var signalDelegate: collection.generic.Signalling)
- extends ParIterableIterator[T] {
+ extends IterableSplitter[T] {
def hasNext = index < until
def next = {
val r = buffer(index)
@@ -138,12 +138,12 @@ package object parallel {
r
}
def remaining = until - index
- def dup = new BufferIterator(buffer, index, until, signalDelegate)
- def split: Seq[ParIterableIterator[T]] = if (remaining > 1) {
+ def dup = new BufferSplitter(buffer, index, until, signalDelegate)
+ def split: Seq[IterableSplitter[T]] = if (remaining > 1) {
val divsz = (until - index) / 2
Seq(
- new BufferIterator(buffer, index, index + divsz, signalDelegate),
- new BufferIterator(buffer, index + divsz, until, signalDelegate)
+ new BufferSplitter(buffer, index, index + divsz, signalDelegate),
+ new BufferSplitter(buffer, index + divsz, until, signalDelegate)
)
} else Seq(this)
private[parallel] override def debugInformation = {
@@ -186,7 +186,7 @@ package object parallel {
private[parallel] abstract class BucketCombiner[-Elem, +To, Buck, +CombinerType <: BucketCombiner[Elem, To, Buck, CombinerType]]
(private val bucketnumber: Int)
extends Combiner[Elem, To] {
- self: EnvironmentPassingCombiner[Elem, To] =>
+ //self: EnvironmentPassingCombiner[Elem, To] =>
protected var buckets: Array[UnrolledBuffer[Buck]] @uncheckedVariance = new Array[UnrolledBuffer[Buck]](bucketnumber)
protected var sz: Int = 0
diff --git a/src/library/scala/util/MurmurHash.scala b/src/library/scala/util/MurmurHash.scala
index b6efde6c89..f1f6e08254 100644
--- a/src/library/scala/util/MurmurHash.scala
+++ b/src/library/scala/util/MurmurHash.scala
@@ -176,10 +176,10 @@ object MurmurHash {
* where the order of appearance of elements does not matter.
* This is useful for hashing sets, for example.
*/
- def symmetricHash[T](xs: TraversableOnce[T], seed: Int) = {
+ def symmetricHash[T](xs: collection.TraversableOnce[T], seed: Int) = {
var a,b,n = 0
var c = 1
- xs.foreach(i => {
+ xs.seq.foreach(i => {
val h = i.##
a += h
b ^= h
diff --git a/test/disabled/scalacheck/HashTrieSplit.scala b/test/disabled/scalacheck/HashTrieSplit.scala
new file mode 100644
index 0000000000..6b20efe12b
--- /dev/null
+++ b/test/disabled/scalacheck/HashTrieSplit.scala
@@ -0,0 +1,47 @@
+
+
+
+
+
+import collection._
+
+
+
+
+// checks whether hash tries split their iterators correctly
+// even after some elements have been traversed
+object Test {
+ def main(args: Array[String]) {
+ doesSplitOk
+ }
+
+ def doesSplitOk = {
+ val sz = 2000
+ var ht = new parallel.immutable.ParHashMap[Int, Int]
+ // println("creating trie")
+ for (i <- 0 until sz) ht += ((i + sz, i))
+ // println("created trie")
+ for (n <- 0 until (sz - 1)) {
+ // println("---------> n = " + n)
+ val pit = ht.parallelIterator
+ val pit2 = ht.parallelIterator
+ var i = 0
+ while (i < n) {
+ pit.next
+ pit2.next
+ i += 1
+ }
+ // println("splitting")
+ val pits = pit.split
+ val fst = pits(0).toSet
+ val snd = pits(1).toSet
+ val orig = pit2.toSet
+ if (orig.size != (fst.size + snd.size) || orig != (fst ++ snd)) {
+ println("Original: " + orig)
+ println("First: " + fst)
+ println("Second: " + snd)
+ assert(false)
+ }
+ }
+ }
+}
diff --git a/test/files/neg/t3774.check b/test/files/neg/t3774.check
index 59c63c4ee8..a3953133bc 100644
--- a/test/files/neg/t3774.check
+++ b/test/files/neg/t3774.check
@@ -1,6 +1,6 @@
t3774.scala:4: error: overloaded method value ++ with alternatives:
- [B1 >: List[Int]](xs: scala.collection.TraversableOnce[((Int, Int), B1)])scala.collection.immutable.Map[(Int, Int),B1] <and>
- [B >: ((Int, Int), List[Int]),That](that: scala.collection.TraversableOnce[B])(implicit bf: scala.collection.generic.CanBuildFrom[scala.collection.immutable.Map[(Int, Int),List[Int]],B,That])That
+ [B1 >: List[Int]](xs: scala.collection.AnyTraversableOnce[((Int, Int), B1)])scala.collection.immutable.Map[(Int, Int),B1] <and>
+ [B >: ((Int, Int), List[Int]),That](that: scala.collection.AnyTraversableOnce[B])(implicit bf: scala.collection.generic.CanBuildFrom[scala.collection.immutable.Map[(Int, Int),List[Int]],B,That])That
cannot be applied to (scala.collection.immutable.IndexedSeq[((Int, Int), scala.collection.immutable.Range.Inclusive)])
Map[(Int,Int),List[Int]]() ++ (for(x <- 0 to 1 ; y <- 0 to 1) yield {(x,y)-> (0 to 1)})
^
diff --git a/test/files/pos/spec-List.scala b/test/files/pos/spec-List.scala
index e3055f3051..6a39632451 100644
--- a/test/files/pos/spec-List.scala
+++ b/test/files/pos/spec-List.scala
@@ -144,7 +144,7 @@ sealed trait List[@specialized +A] extends LinearSeq[A]
/** Create a new list which contains all elements of this list
* followed by all elements of Traversable `that'
*/
- override def ++[B >: A, That](xs: TraversableOnce[B])(implicit bf: CanBuildFrom[List[A], B, That]): That = {
+ override def ++[B >: A, That](xs: AnyTraversableOnce[B])(implicit bf: CanBuildFrom[List[A], B, That]): That = {
val b = bf(this)
if (b.isInstanceOf[ListBuffer[_]]) (this ::: xs.toList).asInstanceOf[That]
else super.++(xs)
diff --git a/test/files/run/pc-conversions.scala b/test/files/run/pc-conversions.scala
index 3121d82944..64e0720572 100644
--- a/test/files/run/pc-conversions.scala
+++ b/test/files/run/pc-conversions.scala
@@ -7,7 +7,7 @@ import collection._
object Test {
def main(args: Array[String]) {
- // disabled
+ testConversions
}
def testConversions {
@@ -53,9 +53,9 @@ object Test {
def assertSeq[T](pc: parallel.ParIterable[T]) = assert(pc.seq == pc)
- def assertPar[T, P <: Parallel](xs: Iterable[T]) = assert(xs == xs.par)
+ def assertPar[T, P <: Parallel](xs: AnyIterable[T]) = assert(xs == xs.par)
- def assertToPar[K, V](xs: Traversable[(K, V)]) {
+ def assertToPar[K, V](xs: AnyTraversable[(K, V)]) {
xs match {
case _: Seq[_] =>
assert(xs.toIterable.par == xs)
@@ -73,7 +73,7 @@ object Test {
assert(xs.par.toMap == xs.toMap)
}
- def assertToParWoMap[T](xs: Seq[T]) {
+ def assertToParWoMap[T](xs: AnySeq[T]) {
assert(xs.toIterable.par == xs.toIterable)
assert(xs.par.toIterable == xs.toIterable)
diff --git a/test/files/scalacheck/HashTrieSplit.scala b/test/files/scalacheck/HashTrieSplit.scala
index 6b20efe12b..e959a3d535 100644
--- a/test/files/scalacheck/HashTrieSplit.scala
+++ b/test/files/scalacheck/HashTrieSplit.scala
@@ -23,8 +23,8 @@ object Test {
// println("created trie")
for (n <- 0 until (sz - 1)) {
// println("---------> n = " + n)
- val pit = ht.parallelIterator
- val pit2 = ht.parallelIterator
+ val pit = ht.splitter
+ val pit2 = ht.splitter
var i = 0
while (i < n) {
pit.next
diff --git a/test/files/scalacheck/parallel-collections/ParallelIterableCheck.scala b/test/files/scalacheck/parallel-collections/ParallelIterableCheck.scala
index 744d22f05c..bdab66449d 100644
--- a/test/files/scalacheck/parallel-collections/ParallelIterableCheck.scala
+++ b/test/files/scalacheck/parallel-collections/ParallelIterableCheck.scala
@@ -71,11 +71,11 @@ abstract class ParallelIterableCheck[T](collName: String) extends Properties(col
(inst, fromTraversable(inst), modif)
}
- def areEqual(t1: Traversable[T], t2: Traversable[T]) = if (hasStrictOrder) {
+ def areEqual(t1: AnyTraversable[T], t2: AnyTraversable[T]) = if (hasStrictOrder) {
t1 == t2 && t2 == t1
} else (t1, t2) match { // it is slightly delicate what `equal` means if the order is not strict
- case (m1: Map[_, _], m2: Map[_, _]) => m1 == m2 && m2 == m1
- case (i1: Iterable[_], i2: Iterable[_]) =>
+ case (m1: AnyMap[_, _], m2: AnyMap[_, _]) => m1 == m2 && m2 == m1
+ case (i1: AnyIterable[_], i2: AnyIterable[_]) =>
val i1s = i1.toSet
val i2s = i2.toSet
i1s == i2s && i2s == i1s
diff --git a/test/files/scalacheck/parallel-collections/pc.scala b/test/files/scalacheck/parallel-collections/pc.scala
index 4be7b0ec4d..103b5e2993 100644
--- a/test/files/scalacheck/parallel-collections/pc.scala
+++ b/test/files/scalacheck/parallel-collections/pc.scala
@@ -30,16 +30,6 @@ class ParCollProperties extends Properties("Parallel collections") {
// parallel vectors
include(immutable.IntParallelVectorCheck)
-
- /* Views */
-
- // parallel array views
-
- // parallel immutable hash map views
-
- // parallel mutable hash map views
-
- // parallel vector views
}