summaryrefslogtreecommitdiff
path: root/src/library/scala/collection
diff options
context:
space:
mode:
Diffstat (limited to 'src/library/scala/collection')
-rw-r--r--src/library/scala/collection/BitSetLike.scala21
-rw-r--r--src/library/scala/collection/IndexedSeqLike.scala3
-rw-r--r--src/library/scala/collection/IndexedSeqOptimized.scala1
-rw-r--r--src/library/scala/collection/IterableLike.scala11
-rw-r--r--src/library/scala/collection/IterableProxyLike.scala1
-rw-r--r--src/library/scala/collection/IterableViewLike.scala5
-rw-r--r--src/library/scala/collection/Iterator.scala131
-rw-r--r--src/library/scala/collection/JavaConversions.scala10
-rw-r--r--src/library/scala/collection/JavaConverters.scala44
-rw-r--r--src/library/scala/collection/LinearSeqOptimized.scala6
-rw-r--r--src/library/scala/collection/MapLike.scala23
-rw-r--r--src/library/scala/collection/SeqLike.scala5
-rw-r--r--src/library/scala/collection/SeqViewLike.scala22
-rw-r--r--src/library/scala/collection/SetLike.scala17
-rw-r--r--src/library/scala/collection/TraversableLike.scala4
-rw-r--r--src/library/scala/collection/TraversableOnce.scala2
-rw-r--r--src/library/scala/collection/TraversableViewLike.scala22
-rw-r--r--src/library/scala/collection/concurrent/Map.scala11
-rw-r--r--src/library/scala/collection/concurrent/TrieMap.scala19
-rw-r--r--src/library/scala/collection/convert/DecorateAsJava.scala44
-rw-r--r--src/library/scala/collection/convert/WrapAsJava.scala56
-rw-r--r--src/library/scala/collection/convert/WrapAsScala.scala47
-rw-r--r--src/library/scala/collection/convert/Wrappers.scala9
-rw-r--r--src/library/scala/collection/generic/GenericParTemplate.scala1
-rw-r--r--src/library/scala/collection/generic/MapFactory.scala2
-rw-r--r--src/library/scala/collection/generic/MutableSortedMapFactory.scala24
-rw-r--r--src/library/scala/collection/generic/ParFactory.scala1
-rw-r--r--src/library/scala/collection/generic/ParSetFactory.scala1
-rw-r--r--src/library/scala/collection/generic/SetFactory.scala1
-rw-r--r--src/library/scala/collection/generic/package.scala1
-rw-r--r--src/library/scala/collection/immutable/BitSet.scala31
-rw-r--r--src/library/scala/collection/immutable/HashMap.scala7
-rw-r--r--src/library/scala/collection/immutable/HashSet.scala37
-rw-r--r--src/library/scala/collection/immutable/List.scala17
-rw-r--r--src/library/scala/collection/immutable/ListMap.scala28
-rw-r--r--src/library/scala/collection/immutable/ListSet.scala10
-rw-r--r--src/library/scala/collection/immutable/Map.scala22
-rw-r--r--src/library/scala/collection/immutable/NumericRange.scala4
-rw-r--r--src/library/scala/collection/immutable/PagedSeq.scala3
-rw-r--r--src/library/scala/collection/immutable/Queue.scala1
-rw-r--r--src/library/scala/collection/immutable/Range.scala19
-rw-r--r--src/library/scala/collection/immutable/Set.scala9
-rw-r--r--src/library/scala/collection/immutable/SortedMap.scala1
-rw-r--r--src/library/scala/collection/immutable/SortedSet.scala1
-rw-r--r--src/library/scala/collection/immutable/Stream.scala114
-rw-r--r--src/library/scala/collection/immutable/StreamViewLike.scala2
-rw-r--r--src/library/scala/collection/immutable/StringLike.scala89
-rw-r--r--src/library/scala/collection/immutable/Vector.scala6
-rw-r--r--src/library/scala/collection/mutable/AVLTree.scala250
-rw-r--r--src/library/scala/collection/mutable/AnyRefMap.scala26
-rw-r--r--src/library/scala/collection/mutable/ArrayBuffer.scala13
-rw-r--r--src/library/scala/collection/mutable/ArrayBuilder.scala154
-rw-r--r--src/library/scala/collection/mutable/ArrayOps.scala16
-rw-r--r--src/library/scala/collection/mutable/ArraySeq.scala2
-rw-r--r--src/library/scala/collection/mutable/ArrayStack.scala7
-rw-r--r--src/library/scala/collection/mutable/BitSet.scala6
-rw-r--r--src/library/scala/collection/mutable/BufferLike.scala16
-rw-r--r--src/library/scala/collection/mutable/BufferProxy.scala2
-rw-r--r--src/library/scala/collection/mutable/Builder.scala16
-rw-r--r--src/library/scala/collection/mutable/GrowingBuilder.scala4
-rw-r--r--src/library/scala/collection/mutable/LazyBuilder.scala4
-rw-r--r--src/library/scala/collection/mutable/ListBuffer.scala25
-rw-r--r--src/library/scala/collection/mutable/LongMap.scala26
-rw-r--r--src/library/scala/collection/mutable/MapBuilder.scala2
-rw-r--r--src/library/scala/collection/mutable/MapLike.scala12
-rw-r--r--src/library/scala/collection/mutable/MutableList.scala2
-rw-r--r--src/library/scala/collection/mutable/PriorityQueue.scala2
-rw-r--r--src/library/scala/collection/mutable/RedBlackTree.scala580
-rw-r--r--src/library/scala/collection/mutable/ResizableArray.scala2
-rw-r--r--src/library/scala/collection/mutable/ReusableBuilder.scala51
-rw-r--r--src/library/scala/collection/mutable/SetBuilder.scala4
-rw-r--r--src/library/scala/collection/mutable/SetLike.scala11
-rw-r--r--src/library/scala/collection/mutable/SortedMap.scala57
-rw-r--r--src/library/scala/collection/mutable/SortedSet.scala7
-rw-r--r--src/library/scala/collection/mutable/StringBuilder.scala8
-rw-r--r--src/library/scala/collection/mutable/SynchronizedStack.scala1
-rw-r--r--src/library/scala/collection/mutable/TreeMap.scala185
-rw-r--r--src/library/scala/collection/mutable/TreeSet.scala181
-rw-r--r--src/library/scala/collection/mutable/WrappedArrayBuilder.scala13
-rw-r--r--src/library/scala/collection/parallel/ParIterableLike.scala20
-rw-r--r--src/library/scala/collection/parallel/ParMap.scala1
-rw-r--r--src/library/scala/collection/parallel/ParMapLike.scala4
-rw-r--r--src/library/scala/collection/parallel/ParSeqLike.scala15
-rw-r--r--src/library/scala/collection/parallel/RemainsIterator.scala12
-rw-r--r--src/library/scala/collection/parallel/TaskSupport.scala4
-rw-r--r--src/library/scala/collection/parallel/Tasks.scala2
-rw-r--r--src/library/scala/collection/parallel/immutable/ParHashSet.scala2
-rw-r--r--src/library/scala/collection/parallel/immutable/ParMap.scala1
-rw-r--r--src/library/scala/collection/parallel/immutable/ParRange.scala1
-rw-r--r--src/library/scala/collection/parallel/mutable/ParArray.scala1
-rw-r--r--src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala8
-rw-r--r--src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala6
-rw-r--r--src/library/scala/collection/parallel/package.scala12
93 files changed, 1862 insertions, 858 deletions
diff --git a/src/library/scala/collection/BitSetLike.scala b/src/library/scala/collection/BitSetLike.scala
index 29369447d1..209b00ebf9 100644
--- a/src/library/scala/collection/BitSetLike.scala
+++ b/src/library/scala/collection/BitSetLike.scala
@@ -204,6 +204,27 @@ trait BitSetLike[+This <: BitSetLike[This] with SortedSet[Int]] extends SortedSe
def subsetOf(other: BitSet): Boolean =
(0 until nwords) forall (idx => (this.word(idx) & ~ other.word(idx)) == 0L)
+ override def head: Int = {
+ val n = nwords
+ var i = 0
+ while (i < n) {
+ val wi = word(i)
+ if (wi != 0L) return WordLength*i + java.lang.Long.numberOfTrailingZeros(wi)
+ i += 1
+ }
+ throw new NoSuchElementException("Empty BitSet")
+ }
+
+ override def last: Int = {
+ var i = nwords - 1
+ while (i >= 0) {
+ val wi = word(i)
+ if (wi != 0L) return WordLength*i + 63 - java.lang.Long.numberOfLeadingZeros(wi)
+ i += 1
+ }
+ throw new NoSuchElementException("Empty BitSet")
+ }
+
override def addString(sb: StringBuilder, start: String, sep: String, end: String) = {
sb append start
var pre = ""
diff --git a/src/library/scala/collection/IndexedSeqLike.scala b/src/library/scala/collection/IndexedSeqLike.scala
index 18c9175ee1..f4bf58ffe3 100644
--- a/src/library/scala/collection/IndexedSeqLike.scala
+++ b/src/library/scala/collection/IndexedSeqLike.scala
@@ -9,9 +9,6 @@
package scala
package collection
-import mutable.ArrayBuffer
-import scala.annotation.tailrec
-
/** A template trait for indexed sequences of type `IndexedSeq[A]`.
*
* $indexedSeqInfo
diff --git a/src/library/scala/collection/IndexedSeqOptimized.scala b/src/library/scala/collection/IndexedSeqOptimized.scala
index a7e06b4d1a..3765b2fff0 100644
--- a/src/library/scala/collection/IndexedSeqOptimized.scala
+++ b/src/library/scala/collection/IndexedSeqOptimized.scala
@@ -10,7 +10,6 @@ package scala
package collection
import generic._
-import mutable.ArrayBuffer
import scala.annotation.tailrec
/** A template trait for indexed sequences of type `IndexedSeq[A]` which optimizes
diff --git a/src/library/scala/collection/IterableLike.scala b/src/library/scala/collection/IterableLike.scala
index ecf64624e8..b89720da78 100644
--- a/src/library/scala/collection/IterableLike.scala
+++ b/src/library/scala/collection/IterableLike.scala
@@ -10,8 +10,7 @@ package scala
package collection
import generic._
-import immutable.{ List, Stream }
-import scala.annotation.unchecked.uncheckedVariance
+import immutable.Stream
/** A template trait for iterable collections of type `Iterable[A]`.
* $iterableInfo
@@ -83,8 +82,8 @@ self =>
iterator.foldRight(z)(op)
override /*TraversableLike*/ def reduceRight[B >: A](op: (A, B) => B): B =
iterator.reduceRight(op)
-
-
+
+
/** Returns this $coll as an iterable collection.
*
* A new collection will not be built; lazy collections will stay lazy.
@@ -94,7 +93,7 @@ self =>
*/
override /*TraversableLike*/ def toIterable: Iterable[A] =
thisCollection
-
+
/** Returns an Iterator over the elements in this $coll. Produces the same
* result as `iterator`.
* $willNotTerminateInf
@@ -102,7 +101,7 @@ self =>
*/
@deprecatedOverriding("toIterator should stay consistent with iterator for all Iterables: override iterator instead.", "2.11.0")
override def toIterator: Iterator[A] = iterator
-
+
override /*TraversableLike*/ def head: A =
iterator.next()
diff --git a/src/library/scala/collection/IterableProxyLike.scala b/src/library/scala/collection/IterableProxyLike.scala
index 90e630ee28..334b511fb9 100644
--- a/src/library/scala/collection/IterableProxyLike.scala
+++ b/src/library/scala/collection/IterableProxyLike.scala
@@ -12,7 +12,6 @@ package scala
package collection
import generic._
-import mutable.Buffer
// Methods could be printed by cat IterableLike.scala | egrep '^ (override )?def'
diff --git a/src/library/scala/collection/IterableViewLike.scala b/src/library/scala/collection/IterableViewLike.scala
index b84d90c51b..c254ed7480 100644
--- a/src/library/scala/collection/IterableViewLike.scala
+++ b/src/library/scala/collection/IterableViewLike.scala
@@ -69,6 +69,10 @@ trait IterableViewLike[+A,
trait Appended[B >: A] extends super.Appended[B] with Transformed[B] {
def iterator = self.iterator ++ rest
}
+
+ trait Prepended[B >: A] extends super.Prepended[B] with Transformed[B] {
+ def iterator = fst.toIterator ++ self
+ }
trait Filtered extends super.Filtered with Transformed[A] {
def iterator = self.iterator filter pred
@@ -110,6 +114,7 @@ trait IterableViewLike[+A,
} with AbstractTransformed[(A1, B)] with ZippedAll[A1, B]
protected override def newForced[B](xs: => GenSeq[B]): Transformed[B] = new { val forced = xs } with AbstractTransformed[B] with Forced[B]
protected override def newAppended[B >: A](that: GenTraversable[B]): Transformed[B] = new { val rest = that } with AbstractTransformed[B] with Appended[B]
+ protected override def newPrepended[B >: A](that: GenTraversable[B]): Transformed[B] = new { val fst = that } with AbstractTransformed[B] with Prepended[B]
protected override def newMapped[B](f: A => B): Transformed[B] = new { val mapping = f } with AbstractTransformed[B] with Mapped[B]
protected override def newFlatMapped[B](f: A => GenTraversableOnce[B]): Transformed[B] = new { val mapping = f } with AbstractTransformed[B] with FlatMapped[B]
protected override def newFiltered(p: A => Boolean): Transformed[A] = new { val pred = p } with AbstractTransformed[A] with Filtered
diff --git a/src/library/scala/collection/Iterator.scala b/src/library/scala/collection/Iterator.scala
index 8d88b1c6b1..518bba6b6d 100644
--- a/src/library/scala/collection/Iterator.scala
+++ b/src/library/scala/collection/Iterator.scala
@@ -12,8 +12,6 @@ package collection
import mutable.ArrayBuffer
import scala.annotation.{tailrec, migration}
import immutable.Stream
-import scala.collection.generic.CanBuildFrom
-import scala.annotation.unchecked.{ uncheckedVariance => uV }
/** The `Iterator` object provides various functions for creating specialized iterators.
*
@@ -235,6 +233,50 @@ object Iterator {
override def ++[B >: A](that: => GenTraversableOnce[B]) =
new ConcatIterator(this, Vector(() => that.toIterator))
}
+
+ /** Creates a delegating iterator capped by a limit count. Negative limit means unbounded.
+ * Lazily skip to start on first evaluation. Avoids daisy-chained iterators due to slicing.
+ */
+ private[scala] final class SliceIterator[A](val underlying: Iterator[A], start: Int, limit: Int) extends AbstractIterator[A] {
+ private var remaining = limit
+ private var dropping = start
+ @inline private def unbounded = remaining < 0
+ private def skip(): Unit =
+ while (dropping > 0) {
+ if (underlying.hasNext) {
+ underlying.next()
+ dropping -= 1
+ } else
+ dropping = 0
+ }
+ def hasNext = { skip(); remaining != 0 && underlying.hasNext }
+ def next() = {
+ skip()
+ if (remaining > 0) {
+ remaining -= 1
+ underlying.next()
+ }
+ else if (unbounded) underlying.next()
+ else empty.next()
+ }
+ override protected def sliceIterator(from: Int, until: Int): Iterator[A] = {
+ val lo = from max 0
+ def adjustedBound =
+ if (unbounded) -1
+ else 0 max (remaining - lo)
+ val rest =
+ if (until < 0) adjustedBound // respect current bound, if any
+ else if (until <= lo) 0 // empty
+ else if (unbounded) until - lo // now finite
+ else adjustedBound min (until - lo) // keep lesser bound
+ if (rest == 0) empty
+ else {
+ dropping += lo
+ remaining = rest
+ this
+ }
+ }
+ }
}
import Iterator.empty
@@ -346,11 +388,11 @@ trait Iterator[+A] extends TraversableOnce[A] {
/** Selects first ''n'' values of this iterator.
*
* @param n the number of values to take
- * @return an iterator producing only of the first `n` values of this iterator, or else the
+ * @return an iterator producing only the first `n` values of this iterator, or else the
* whole iterator, if it produces fewer than `n` values.
* @note Reuse: $consumesAndProducesIterator
*/
- def take(n: Int): Iterator[A] = slice(0, n)
+ def take(n: Int): Iterator[A] = sliceIterator(0, n max 0)
/** Advances this iterator past the first ''n'' elements, or the length of the iterator, whichever is smaller.
*
@@ -371,29 +413,24 @@ trait Iterator[+A] extends TraversableOnce[A] {
/** Creates an iterator returning an interval of the values produced by this iterator.
*
* @param from the index of the first element in this iterator which forms part of the slice.
- * @param until the index of the first element following the slice.
+ * If negative, the slice starts at zero.
+ * @param until the index of the first element following the slice. If negative, the slice is empty.
* @return an iterator which advances this iterator past the first `from` elements using `drop`,
* and then takes `until - from` elements, using `take`.
* @note Reuse: $consumesAndProducesIterator
*/
- def slice(from: Int, until: Int): Iterator[A] = {
+ def slice(from: Int, until: Int): Iterator[A] = sliceIterator(from, until max 0)
+
+ /** Creates an optionally bounded slice, unbounded if `until` is negative. */
+ protected def sliceIterator(from: Int, until: Int): Iterator[A] = {
val lo = from max 0
- var toDrop = lo
- while (toDrop > 0 && self.hasNext) {
- self.next()
- toDrop -= 1
- }
+ val rest =
+ if (until < 0) -1 // unbounded
+ else if (until <= lo) 0 // empty
+ else until - lo // finite
- new AbstractIterator[A] {
- private var remaining = until - lo
- def hasNext = remaining > 0 && self.hasNext
- def next(): A =
- if (remaining > 0) {
- remaining -= 1
- self.next()
- }
- else empty.next()
- }
+ if (rest == 0) empty
+ else new Iterator.SliceIterator(this, lo, rest)
}
/** Creates a new iterator that maps all produced values of this iterator
@@ -521,13 +558,13 @@ trait Iterator[+A] extends TraversableOnce[A] {
def collect[B](pf: PartialFunction[A, B]): Iterator[B] = new AbstractIterator[B] {
// Manually buffer to avoid extra layer of wrapping with buffered
private[this] var hd: A = _
-
+
// Little state machine to keep track of where we are
// Seek = 0; Found = 1; Empty = -1
// Not in vals because scalac won't make them static (@inline def only works with -optimize)
// BE REALLY CAREFUL TO KEEP COMMENTS AND NUMBERS IN SYNC!
private[this] var status = 0/*Seek*/
-
+
def hasNext = {
while (status == 0/*Seek*/) {
if (self.hasNext) {
@@ -700,9 +737,9 @@ trait Iterator[+A] extends TraversableOnce[A] {
}
}
}
-
+
val leading = new Leading
-
+
val trailing = new AbstractIterator[A] {
private[this] var myLeading = leading
/* Status flags meanings:
@@ -738,7 +775,7 @@ trait Iterator[+A] extends TraversableOnce[A] {
}
else Iterator.empty.next()
}
-
+
override def toString = "unknown-if-empty iterator"
}
@@ -772,7 +809,7 @@ trait Iterator[+A] extends TraversableOnce[A] {
status = 1
false
}
- def next() =
+ def next() =
if (hasNext) {
if (status == 1) self.next()
else {
@@ -955,8 +992,25 @@ trait Iterator[+A] extends TraversableOnce[A] {
* or -1 if such an element does not exist until the end of the iterator is reached.
* @note Reuse: $consumesIterator
*/
- def indexWhere(p: A => Boolean): Int = {
+ def indexWhere(p: A => Boolean): Int = indexWhere(p, 0)
+
+ /** Returns the index of the first produced value satisfying a predicate, or -1, after or at
+ * some start index.
+ * $mayNotTerminateInf
+ *
+ * @param p the predicate to test values
+ * @param from the start index
+ * @return the index `>= from` of the first produced value satisfying `p`,
+ * or -1 if such an element does not exist until the end of the iterator is reached.
+ * @note Reuse: $consumesIterator
+ */
+ def indexWhere(p: A => Boolean, from: Int): Int = {
var i = 0
+ while (i < from && hasNext) {
+ next()
+ i += 1
+ }
+
while (hasNext) {
if (p(next())) return i
i += 1
@@ -973,8 +1027,26 @@ trait Iterator[+A] extends TraversableOnce[A] {
* or -1 if such an element does not exist until the end of the iterator is reached.
* @note Reuse: $consumesIterator
*/
- def indexOf[B >: A](elem: B): Int = {
+ def indexOf[B >: A](elem: B): Int = indexOf(elem, 0)
+
+ /** Returns the index of the first occurrence of the specified object in this iterable object
+ * after or at some start index.
+ * $mayNotTerminateInf
+ *
+ * @param elem element to search for.
+ * @param from the start index
+ * @return the index `>= from` of the first occurrence of `elem` in the values produced by this
+ * iterator, or -1 if such an element does not exist until the end of the iterator is
+ * reached.
+ * @note Reuse: $consumesIterator
+ */
+ def indexOf[B >: A](elem: B, from: Int): Int = {
var i = 0
+ while (i < from && hasNext) {
+ next()
+ i += 1
+ }
+
while (hasNext) {
if (next() == elem) return i
i += 1
@@ -1289,7 +1361,6 @@ trait Iterator[+A] extends TraversableOnce[A] {
* $willNotTerminateInf
*/
def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Unit = {
- require(start >= 0 && (start < xs.length || xs.length == 0), s"start $start out of range ${xs.length}")
var i = start
val end = start + math.min(len, xs.length - start)
while (i < end && hasNext) {
diff --git a/src/library/scala/collection/JavaConversions.scala b/src/library/scala/collection/JavaConversions.scala
index 7bfa60771f..f9a34f78d5 100644
--- a/src/library/scala/collection/JavaConversions.scala
+++ b/src/library/scala/collection/JavaConversions.scala
@@ -16,12 +16,12 @@ import convert._
*
* The following conversions are supported:
*{{{
- * scala.collection.Iterable <=> java.lang.Iterable
- * scala.collection.Iterable <=> java.util.Collection
- * scala.collection.Iterator <=> java.util.{ Iterator, Enumeration }
+ * scala.collection.Iterable <=> java.lang.Iterable
+ * scala.collection.Iterable <=> java.util.Collection
+ * scala.collection.Iterator <=> java.util.{ Iterator, Enumeration }
* scala.collection.mutable.Buffer <=> java.util.List
- * scala.collection.mutable.Set <=> java.util.Set
- * scala.collection.mutable.Map <=> java.util.{ Map, Dictionary }
+ * scala.collection.mutable.Set <=> java.util.Set
+ * scala.collection.mutable.Map <=> java.util.{ Map, Dictionary }
* scala.collection.concurrent.Map <=> java.util.concurrent.ConcurrentMap
*}}}
* In all cases, converting from a source type to a target type and back
diff --git a/src/library/scala/collection/JavaConverters.scala b/src/library/scala/collection/JavaConverters.scala
index 86e86d4584..58fbc5afa5 100644
--- a/src/library/scala/collection/JavaConverters.scala
+++ b/src/library/scala/collection/JavaConverters.scala
@@ -19,14 +19,14 @@ import convert._
* Scala and Java collections using `asScala` and `asJava` methods.
*
* The following conversions are supported via `asJava`, `asScala`
- *
- * - `scala.collection.Iterable` <=> `java.lang.Iterable`
- * - `scala.collection.Iterator` <=> `java.util.Iterator`
- * - `scala.collection.mutable.Buffer` <=> `java.util.List`
- * - `scala.collection.mutable.Set` <=> `java.util.Set`
- * - `scala.collection.mutable.Map` <=> `java.util.Map`
- * - `scala.collection.mutable.concurrent.Map` <=> `java.util.concurrent.ConcurrentMap`
- *
+ *{{{
+ * scala.collection.Iterable <=> java.lang.Iterable
+ * scala.collection.Iterator <=> java.util.Iterator
+ * scala.collection.mutable.Buffer <=> java.util.List
+ * scala.collection.mutable.Set <=> java.util.Set
+ * scala.collection.mutable.Map <=> java.util.Map
+ * scala.collection.mutable.concurrent.Map <=> java.util.concurrent.ConcurrentMap
+ *}}}
* In all cases, converting from a source type to a target type and back
* again will return the original source object, e.g.
* {{{
@@ -40,22 +40,22 @@ import convert._
* The following conversions are also supported, but the
* direction from Scala to Java is done by the more specifically named methods:
* `asJavaCollection`, `asJavaEnumeration`, `asJavaDictionary`.
- *
- * - `scala.collection.Iterable` <=> `java.util.Collection`
- * - `scala.collection.Iterator` <=> `java.util.Enumeration`
- * - `scala.collection.mutable.Map` <=> `java.util.Dictionary`
- *
+ *{{{
+ * scala.collection.Iterable <=> java.util.Collection
+ * scala.collection.Iterator <=> java.util.Enumeration
+ * scala.collection.mutable.Map <=> java.util.Dictionary
+ *}}}
* In addition, the following one way conversions are provided via `asJava`:
- *
- * - `scala.collection.Seq` => `java.util.List`
- * - `scala.collection.mutable.Seq` => `java.util.List`
- * - `scala.collection.Set` => `java.util.Set`
- * - `scala.collection.Map` => `java.util.Map`
- *
+ *{{{
+ * scala.collection.Seq => java.util.List
+ * scala.collection.mutable.Seq => java.util.List
+ * scala.collection.Set => java.util.Set
+ * scala.collection.Map => java.util.Map
+ *}}}
* The following one way conversion is provided via `asScala`:
- *
- * - `java.util.Properties` => `scala.collection.mutable.Map`
- *
+ *{{{
+ * java.util.Properties => scala.collection.mutable.Map
+ *}}}
* @since 2.8.1
*/
object JavaConverters extends DecorateAsJava with DecorateAsScala
diff --git a/src/library/scala/collection/LinearSeqOptimized.scala b/src/library/scala/collection/LinearSeqOptimized.scala
index b7af8840a9..a3860f10a4 100644
--- a/src/library/scala/collection/LinearSeqOptimized.scala
+++ b/src/library/scala/collection/LinearSeqOptimized.scala
@@ -9,8 +9,6 @@
package scala
package collection
-import mutable.ListBuffer
-import immutable.List
import scala.annotation.tailrec
/** A template trait for linear sequences of type `LinearSeq[A]` which optimizes
@@ -133,9 +131,9 @@ trait LinearSeqOptimized[+A, +Repr <: LinearSeqOptimized[A, Repr]] extends Linea
else op(head, tail.foldRight(z)(op))
override /*TraversableLike*/
- def reduceLeft[B >: A](f: (B, A) => B): B =
+ def reduceLeft[B >: A](@deprecatedName('f) op: (B, A) => B): B =
if (isEmpty) throw new UnsupportedOperationException("empty.reduceLeft")
- else tail.foldLeft[B](head)(f)
+ else tail.foldLeft[B](head)(op)
override /*IterableLike*/
def reduceRight[B >: A](op: (A, B) => B): B =
diff --git a/src/library/scala/collection/MapLike.scala b/src/library/scala/collection/MapLike.scala
index 99ed67325c..4ac87b29a9 100644
--- a/src/library/scala/collection/MapLike.scala
+++ b/src/library/scala/collection/MapLike.scala
@@ -11,7 +11,7 @@ package collection
import generic._
import mutable.{ Builder, MapBuilder }
-import scala.annotation.{migration, bridge}
+import scala.annotation.migration
import parallel.ParMap
/** A template trait for maps, which associate keys with values.
@@ -230,11 +230,15 @@ self =>
protected class FilteredKeys(p: A => Boolean) extends AbstractMap[A, B] with DefaultMap[A, B] {
override def foreach[U](f: ((A, B)) => U): Unit = for (kv <- self) if (p(kv._1)) f(kv)
def iterator = self.iterator.filter(kv => p(kv._1))
- override def contains(key: A) = self.contains(key) && p(key)
+ override def contains(key: A) = p(key) && self.contains(key)
def get(key: A) = if (!p(key)) None else self.get(key)
}
/** Filters this map by retaining only keys satisfying a predicate.
+ *
+ * '''Note''': the predicate must accept any key of type `A`, not just those already
+ * present in the map, as the predicate is tested before the underlying map is queried.
+ *
* @param p the predicate used to test keys
* @return an immutable map consisting only of those key value pairs of this map where the key satisfies
* the predicate `p`. The resulting map wraps the original map without copying any elements.
@@ -319,11 +323,20 @@ self =>
res
}
- /* Overridden for efficiency. */
- override def toSeq: Seq[(A, B)] = toBuffer[(A, B)]
+ override def toSeq: Seq[(A, B)] = {
+ if (isEmpty) Vector.empty[(A, B)]
+ else {
+ // Default appropriate for immutable collections; mutable collections override this
+ val vb = Vector.newBuilder[(A, B)]
+ foreach(vb += _)
+ vb.result
+ }
+ }
+
override def toBuffer[C >: (A, B)]: mutable.Buffer[C] = {
val result = new mutable.ArrayBuffer[C](size)
- copyToBuffer(result)
+ // Faster to let the map iterate itself than to defer through copyToBuffer
+ foreach(result += _)
result
}
diff --git a/src/library/scala/collection/SeqLike.scala b/src/library/scala/collection/SeqLike.scala
index b775480532..a26765027c 100644
--- a/src/library/scala/collection/SeqLike.scala
+++ b/src/library/scala/collection/SeqLike.scala
@@ -9,11 +9,10 @@
package scala
package collection
-import mutable.{ ListBuffer, ArraySeq }
import immutable.{ List, Range }
import generic._
import parallel.ParSeq
-import scala.math.{ min, max, Ordering }
+import scala.math.Ordering
/** A template trait for sequences of type `Seq[A]`
* $seqInfo
@@ -146,7 +145,7 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
* more than one way to generate the same subsequence, only one will be returned.
*
* For example, `"xyyy"` has three different ways to generate `"xy"` depending on
- * whether the first, second, or third `"y"` is selected. However, since all are
+ * whether the first, second, or third `"y"` is selected. However, since all are
* identical, only one will be chosen. Which of the three will be taken is an
* implementation detail that is not defined.
*
diff --git a/src/library/scala/collection/SeqViewLike.scala b/src/library/scala/collection/SeqViewLike.scala
index 3473c8aff1..1fbcb6531e 100644
--- a/src/library/scala/collection/SeqViewLike.scala
+++ b/src/library/scala/collection/SeqViewLike.scala
@@ -96,6 +96,14 @@ trait SeqViewLike[+A,
if (idx < self.length) self(idx) else restSeq(idx - self.length)
}
+ trait Prepended[B >: A] extends super.Prepended[B] with Transformed[B] {
+ protected[this] lazy val fstSeq = fst.toSeq
+ def length: Int = fstSeq.length + self.length
+ def apply(idx: Int): B =
+ if (idx < fstSeq.length) fstSeq(idx)
+ else self.apply(idx - fstSeq.length)
+ }
+
trait Filtered extends super.Filtered with Transformed[A] {
protected[this] lazy val index = {
var len = 0
@@ -179,21 +187,12 @@ trait SeqViewLike[+A,
final override protected[this] def viewIdentifier = "P"
}
- trait Prepended[B >: A] extends Transformed[B] {
- protected[this] val fst: B
- override def iterator: Iterator[B] = Iterator.single(fst) ++ self.iterator
- def length: Int = 1 + self.length
- def apply(idx: Int): B =
- if (idx == 0) fst
- else self.apply(idx - 1)
- final override protected[this] def viewIdentifier = "A"
- }
-
/** Boilerplate method, to override in each subclass
* This method could be eliminated if Scala had virtual classes
*/
protected override def newForced[B](xs: => GenSeq[B]): Transformed[B] = new { val forced = xs } with AbstractTransformed[B] with Forced[B]
protected override def newAppended[B >: A](that: GenTraversable[B]): Transformed[B] = new { val rest = that } with AbstractTransformed[B] with Appended[B]
+ protected override def newPrepended[B >: A](that: GenTraversable[B]): Transformed[B] = new { protected[this] val fst = that } with AbstractTransformed[B] with Prepended[B]
protected override def newMapped[B](f: A => B): Transformed[B] = new { val mapping = f } with AbstractTransformed[B] with Mapped[B]
protected override def newFlatMapped[B](f: A => GenTraversableOnce[B]): Transformed[B] = new { val mapping = f } with AbstractTransformed[B] with FlatMapped[B]
protected override def newFiltered(p: A => Boolean): Transformed[A] = new { val pred = p } with AbstractTransformed[A] with Filtered
@@ -212,7 +211,6 @@ trait SeqViewLike[+A,
val patch = _patch
val replaced = _replaced
} with AbstractTransformed[B] with Patched[B]
- protected def newPrepended[B >: A](elem: B): Transformed[B] = new { protected[this] val fst = elem } with AbstractTransformed[B] with Prepended[B]
// see comment in IterableViewLike.
protected override def newTaken(n: Int): Transformed[A] = newSliced(SliceInterval(0, n))
@@ -242,7 +240,7 @@ trait SeqViewLike[+A,
}
override def +:[B >: A, That](elem: B)(implicit bf: CanBuildFrom[This, B, That]): That =
- newPrepended(elem).asInstanceOf[That]
+ newPrepended(elem :: Nil).asInstanceOf[That]
override def :+[B >: A, That](elem: B)(implicit bf: CanBuildFrom[This, B, That]): That =
++(Iterator.single(elem))(bf)
diff --git a/src/library/scala/collection/SetLike.scala b/src/library/scala/collection/SetLike.scala
index f8ac1d754d..9143c40870 100644
--- a/src/library/scala/collection/SetLike.scala
+++ b/src/library/scala/collection/SetLike.scala
@@ -11,7 +11,7 @@ package collection
import generic._
import mutable.{ Builder, SetBuilder }
-import scala.annotation.{migration, bridge}
+import scala.annotation.migration
import parallel.ParSet
/** A template trait for sets.
@@ -77,11 +77,20 @@ self =>
protected[this] override def parCombiner = ParSet.newCombiner[A]
- /* Overridden for efficiency. */
- override def toSeq: Seq[A] = toBuffer[A]
+ // Default collection type appropriate for immutable collections; mutable collections override this
+ override def toSeq: Seq[A] = {
+ if (isEmpty) Vector.empty[A]
+ else {
+ val vb = Vector.newBuilder[A]
+ foreach(vb += _)
+ vb.result
+ }
+ }
+
override def toBuffer[A1 >: A]: mutable.Buffer[A1] = {
val result = new mutable.ArrayBuffer[A1](size)
- copyToBuffer(result)
+ // Faster to let the map iterate itself than to defer through copyToBuffer
+ foreach(result += _)
result
}
diff --git a/src/library/scala/collection/TraversableLike.scala b/src/library/scala/collection/TraversableLike.scala
index bbbc33b3f5..d914f2e0ff 100644
--- a/src/library/scala/collection/TraversableLike.scala
+++ b/src/library/scala/collection/TraversableLike.scala
@@ -11,7 +11,7 @@ package collection
import generic._
import mutable.{ Builder }
-import scala.annotation.{tailrec, migration, bridge}
+import scala.annotation.migration
import scala.annotation.unchecked.{ uncheckedVariance => uV }
import parallel.ParIterable
import scala.language.higherKinds
@@ -242,7 +242,7 @@ trait TraversableLike[+A, +Repr] extends Any
b.result
}
- private def filterImpl(p: A => Boolean, isFlipped: Boolean): Repr = {
+ private[scala] def filterImpl(p: A => Boolean, isFlipped: Boolean): Repr = {
val b = newBuilder
for (x <- this)
if (p(x) != isFlipped) b += x
diff --git a/src/library/scala/collection/TraversableOnce.scala b/src/library/scala/collection/TraversableOnce.scala
index 75c0d82922..b87fcd166e 100644
--- a/src/library/scala/collection/TraversableOnce.scala
+++ b/src/library/scala/collection/TraversableOnce.scala
@@ -9,7 +9,7 @@
package scala
package collection
-import mutable.{ Buffer, Builder, ListBuffer, ArrayBuffer }
+import mutable.{ Buffer, Builder, ArrayBuffer }
import generic.CanBuildFrom
import scala.annotation.unchecked.{ uncheckedVariance => uV }
import scala.language.{implicitConversions, higherKinds}
diff --git a/src/library/scala/collection/TraversableViewLike.scala b/src/library/scala/collection/TraversableViewLike.scala
index 5926c69ebf..0901d749c3 100644
--- a/src/library/scala/collection/TraversableViewLike.scala
+++ b/src/library/scala/collection/TraversableViewLike.scala
@@ -189,6 +189,15 @@ trait TraversableViewLike[+A,
}
final override protected[this] def viewIdentifier = "A"
}
+
+ trait Prepended[B >: A] extends Transformed[B] {
+ protected[this] val fst: GenTraversable[B]
+ def foreach[U](f: B => U) {
+ fst foreach f
+ self foreach f
+ }
+ final override protected[this] def viewIdentifier = "A"
+ }
trait Filtered extends Transformed[A] {
protected[this] val pred: A => Boolean
@@ -222,11 +231,15 @@ trait TraversableViewLike[+A,
final override protected[this] def viewIdentifier = "D"
}
- override def ++[B >: A, That](xs: GenTraversableOnce[B])(implicit bf: CanBuildFrom[This, B, That]): That = {
+ override def ++[B >: A, That](xs: GenTraversableOnce[B])(implicit bf: CanBuildFrom[This, B, That]): That =
newAppended(xs.seq.toTraversable).asInstanceOf[That]
-// was: if (bf.isInstanceOf[ByPassCanBuildFrom]) newAppended(that).asInstanceOf[That]
-// else super.++[B, That](that)(bf)
- }
+
+ override def ++:[B >: A, That](xs: TraversableOnce[B])(implicit bf: CanBuildFrom[This, B, That]): That =
+ newPrepended(xs.seq.toTraversable).asInstanceOf[That]
+
+ // Need second one because of optimization in TraversableLike
+ override def ++:[B >: A, That](xs: Traversable[B])(implicit bf: CanBuildFrom[This, B, That]): That =
+ newPrepended(xs).asInstanceOf[That]
override def map[B, That](f: A => B)(implicit bf: CanBuildFrom[This, B, That]): That = {
newMapped(f).asInstanceOf[That]
@@ -253,6 +266,7 @@ trait TraversableViewLike[+A,
*/
protected def newForced[B](xs: => GenSeq[B]): Transformed[B] = new { val forced = xs } with AbstractTransformed[B] with Forced[B]
protected def newAppended[B >: A](that: GenTraversable[B]): Transformed[B] = new { val rest = that } with AbstractTransformed[B] with Appended[B]
+ protected def newPrepended[B >: A](that: GenTraversable[B]): Transformed[B] = new { val fst = that } with AbstractTransformed[B] with Prepended[B]
protected def newMapped[B](f: A => B): Transformed[B] = new { val mapping = f } with AbstractTransformed[B] with Mapped[B]
protected def newFlatMapped[B](f: A => GenTraversableOnce[B]): Transformed[B] = new { val mapping = f } with AbstractTransformed[B] with FlatMapped[B]
protected def newFiltered(p: A => Boolean): Transformed[A] = new { val pred = p } with AbstractTransformed[A] with Filtered
diff --git a/src/library/scala/collection/concurrent/Map.scala b/src/library/scala/collection/concurrent/Map.scala
index cfb567abe9..f27dfd57fc 100644
--- a/src/library/scala/collection/concurrent/Map.scala
+++ b/src/library/scala/collection/concurrent/Map.scala
@@ -86,4 +86,15 @@ trait Map[A, B] extends scala.collection.mutable.Map[A, B] {
* @return `Some(v)` if the given key was previously mapped to some value `v`, or `None` otherwise
*/
def replace(k: A, v: B): Option[B]
+
+ override def getOrElseUpdate(key: A, op: =>B): B = get(key) match {
+ case Some(v) => v
+ case None =>
+ val v = op
+ putIfAbsent(key, v) match {
+ case Some(nv) => nv
+ case None => v
+ }
+ }
+
}
diff --git a/src/library/scala/collection/concurrent/TrieMap.scala b/src/library/scala/collection/concurrent/TrieMap.scala
index bcfea7a463..d113cbfcdf 100644
--- a/src/library/scala/collection/concurrent/TrieMap.scala
+++ b/src/library/scala/collection/concurrent/TrieMap.scala
@@ -11,13 +11,11 @@ package collection
package concurrent
import java.util.concurrent.atomic._
-import scala.collection.immutable.{ ListMap => ImmutableListMap }
import scala.collection.parallel.mutable.ParTrieMap
import scala.util.hashing.Hashing
import scala.util.control.ControlThrowable
import generic._
import scala.annotation.tailrec
-import scala.annotation.switch
private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends INodeBase[K, V](g) {
import INodeBase._
@@ -471,7 +469,7 @@ private[collection] final class CNode[K, V](val bitmap: Int, val array: Array[Ba
val offset =
if (array.length > 0)
//util.Random.nextInt(array.length) /* <-- benchmarks show that this causes observable contention */
- scala.concurrent.forkjoin.ThreadLocalRandom.current.nextInt(0, array.length)
+ java.util.concurrent.ThreadLocalRandom.current.nextInt(0, array.length)
else 0
while (i < array.length) {
val pos = (i + offset) % array.length
@@ -641,7 +639,8 @@ extends scala.collection.concurrent.Map[K, V]
private var rootupdater = rtupd
def hashing = hashingobj
def equality = equalityobj
- @volatile var root = r
+ @deprecated("This field will be made private", "2.12.0")
+ @volatile /*private*/ var root = r
def this(hashf: Hashing[K], ef: Equiv[K]) = this(
INode.newRootNode,
@@ -685,11 +684,14 @@ extends scala.collection.concurrent.Map[K, V]
} while (obj != TrieMapSerializationEnd)
}
- def CAS_ROOT(ov: AnyRef, nv: AnyRef) = rootupdater.compareAndSet(this, ov, nv)
+ @deprecated("This method will be made private", "2.12.0")
+ /*private*/ def CAS_ROOT(ov: AnyRef, nv: AnyRef) = rootupdater.compareAndSet(this, ov, nv)
- def readRoot(abort: Boolean = false): INode[K, V] = RDCSS_READ_ROOT(abort)
+ @deprecated("This method will be made private", "2.12.0")
+ /*private[collection]*/ def readRoot(abort: Boolean = false): INode[K, V] = RDCSS_READ_ROOT(abort)
- def RDCSS_READ_ROOT(abort: Boolean = false): INode[K, V] = {
+ @deprecated("This method will be made private", "2.12.0")
+ /*private[concurrent]*/ def RDCSS_READ_ROOT(abort: Boolean = false): INode[K, V] = {
val r = /*READ*/root
r match {
case in: INode[K, V] => in
@@ -884,7 +886,7 @@ extends scala.collection.concurrent.Map[K, V]
*
* If the specified mapping function throws an exception,
* that exception is rethrown.
- *
+ *
* Note: This method will invoke op at most once.
* However, `op` may be invoked without the result being added to the map if
* a concurrent process is also trying to add a value corresponding to the
@@ -1083,6 +1085,7 @@ private[collection] class TrieMapIterator[K, V](var level: Int, private var ct:
Seq(this)
}
+ @deprecated("This method will be removed", "2.12.0")
def printDebug() {
println("ctrie iterator")
println(stackpos.mkString(","))
diff --git a/src/library/scala/collection/convert/DecorateAsJava.scala b/src/library/scala/collection/convert/DecorateAsJava.scala
index e6aa5da067..0ed67a86d7 100644
--- a/src/library/scala/collection/convert/DecorateAsJava.scala
+++ b/src/library/scala/collection/convert/DecorateAsJava.scala
@@ -20,14 +20,14 @@ import scala.language.implicitConversions
* Scala and Java collections using `asScala` and `asJava` methods.
*
* The following conversions are supported via `asJava`, `asScala`
- *
- * - `scala.collection.Iterable` <=> `java.lang.Iterable`
- * - `scala.collection.Iterator` <=> `java.util.Iterator`
- * - `scala.collection.mutable.Buffer` <=> `java.util.List`
- * - `scala.collection.mutable.Set` <=> `java.util.Set`
- * - `scala.collection.mutable.Map` <=> `java.util.Map`
- * - `scala.collection.mutable.concurrent.Map` <=> `java.util.concurrent.ConcurrentMap`
- *
+ *{{{
+ * scala.collection.Iterable <=> java.lang.Iterable
+ * scala.collection.Iterator <=> java.util.Iterator
+ * scala.collection.mutable.Buffer <=> java.util.List
+ * scala.collection.mutable.Set <=> java.util.Set
+ * scala.collection.mutable.Map <=> java.util.Map
+ * scala.collection.mutable.concurrent.Map <=> java.util.concurrent.ConcurrentMap
+ *}}}
* In all cases, converting from a source type to a target type and back
* again will return the original source object, e.g.
* {{{
@@ -41,22 +41,22 @@ import scala.language.implicitConversions
* The following conversions are also supported, but the
* direction from Scala to Java is done by the more specifically named methods:
* `asJavaCollection`, `asJavaEnumeration`, `asJavaDictionary`.
- *
- * - `scala.collection.Iterable` <=> `java.util.Collection`
- * - `scala.collection.Iterator` <=> `java.util.Enumeration`
- * - `scala.collection.mutable.Map` <=> `java.util.Dictionary`
- *
+ *{{{
+ * scala.collection.Iterable <=> java.util.Collection
+ * scala.collection.Iterator <=> java.util.Enumeration
+ * scala.collection.mutable.Map <=> java.util.Dictionary
+ *}}}
* In addition, the following one way conversions are provided via `asJava`:
- *
- * - `scala.collection.Seq` => `java.util.List`
- * - `scala.collection.mutable.Seq` => `java.util.List`
- * - `scala.collection.Set` => `java.util.Set`
- * - `scala.collection.Map` => `java.util.Map`
- *
+ *{{{
+ * scala.collection.Seq => java.util.List
+ * scala.collection.mutable.Seq => java.util.List
+ * scala.collection.Set => java.util.Set
+ * scala.collection.Map => java.util.Map
+ *}}}
* The following one way conversion is provided via `asScala`:
- *
- * - `java.util.Properties` => `scala.collection.mutable.Map`
- *
+ *{{{
+ * java.util.Properties => scala.collection.mutable.Map
+ *}}}
* @since 2.8.1
*/
trait DecorateAsJava {
diff --git a/src/library/scala/collection/convert/WrapAsJava.scala b/src/library/scala/collection/convert/WrapAsJava.scala
index 9916fe9843..e97a2ff1fc 100644
--- a/src/library/scala/collection/convert/WrapAsJava.scala
+++ b/src/library/scala/collection/convert/WrapAsJava.scala
@@ -30,8 +30,9 @@ trait WrapAsJava {
* @return A Java Iterator view of the argument.
*/
implicit def asJavaIterator[A](it: Iterator[A]): ju.Iterator[A] = it match {
- case JIteratorWrapper(wrapped) => wrapped.asInstanceOf[ju.Iterator[A]]
- case _ => IteratorWrapper(it)
+ case null => null
+ case JIteratorWrapper(wrapped) => wrapped.asInstanceOf[ju.Iterator[A]]
+ case _ => IteratorWrapper(it)
}
/**
@@ -48,8 +49,9 @@ trait WrapAsJava {
* @return A Java Enumeration view of the argument.
*/
implicit def asJavaEnumeration[A](it: Iterator[A]): ju.Enumeration[A] = it match {
+ case null => null
case JEnumerationWrapper(wrapped) => wrapped.asInstanceOf[ju.Enumeration[A]]
- case _ => IteratorWrapper(it)
+ case _ => IteratorWrapper(it)
}
/**
@@ -66,8 +68,9 @@ trait WrapAsJava {
* @return A Java Iterable view of the argument.
*/
implicit def asJavaIterable[A](i: Iterable[A]): jl.Iterable[A] = i match {
- case JIterableWrapper(wrapped) => wrapped.asInstanceOf[jl.Iterable[A]]
- case _ => IterableWrapper(i)
+ case null => null
+ case JIterableWrapper(wrapped) => wrapped.asInstanceOf[jl.Iterable[A]]
+ case _ => IterableWrapper(i)
}
/**
@@ -82,8 +85,9 @@ trait WrapAsJava {
* @return A Java Collection view of the argument.
*/
implicit def asJavaCollection[A](it: Iterable[A]): ju.Collection[A] = it match {
- case JCollectionWrapper(wrapped) => wrapped.asInstanceOf[ju.Collection[A]]
- case _ => new IterableWrapper(it)
+ case null => null
+ case JCollectionWrapper(wrapped) => wrapped.asInstanceOf[ju.Collection[A]]
+ case _ => new IterableWrapper(it)
}
/**
@@ -100,8 +104,9 @@ trait WrapAsJava {
* @return A Java List view of the argument.
*/
implicit def bufferAsJavaList[A](b: mutable.Buffer[A]): ju.List[A] = b match {
- case JListWrapper(wrapped) => wrapped
- case _ => new MutableBufferWrapper(b)
+ case null => null
+ case JListWrapper(wrapped) => wrapped
+ case _ => new MutableBufferWrapper(b)
}
/**
@@ -118,8 +123,9 @@ trait WrapAsJava {
* @return A Java List view of the argument.
*/
implicit def mutableSeqAsJavaList[A](seq: mutable.Seq[A]): ju.List[A] = seq match {
- case JListWrapper(wrapped) => wrapped
- case _ => new MutableSeqWrapper(seq)
+ case null => null
+ case JListWrapper(wrapped) => wrapped
+ case _ => new MutableSeqWrapper(seq)
}
/**
@@ -136,8 +142,9 @@ trait WrapAsJava {
* @return A Java List view of the argument.
*/
implicit def seqAsJavaList[A](seq: Seq[A]): ju.List[A] = seq match {
- case JListWrapper(wrapped) => wrapped.asInstanceOf[ju.List[A]]
- case _ => new SeqWrapper(seq)
+ case null => null
+ case JListWrapper(wrapped) => wrapped.asInstanceOf[ju.List[A]]
+ case _ => new SeqWrapper(seq)
}
/**
@@ -154,8 +161,9 @@ trait WrapAsJava {
* @return A Java Set view of the argument.
*/
implicit def mutableSetAsJavaSet[A](s: mutable.Set[A]): ju.Set[A] = s match {
+ case null => null
case JSetWrapper(wrapped) => wrapped
- case _ => new MutableSetWrapper(s)
+ case _ => new MutableSetWrapper(s)
}
/**
@@ -172,8 +180,9 @@ trait WrapAsJava {
* @return A Java Set view of the argument.
*/
implicit def setAsJavaSet[A](s: Set[A]): ju.Set[A] = s match {
+ case null => null
case JSetWrapper(wrapped) => wrapped
- case _ => new SetWrapper(s)
+ case _ => new SetWrapper(s)
}
/**
@@ -190,9 +199,9 @@ trait WrapAsJava {
* @return A Java Map view of the argument.
*/
implicit def mutableMapAsJavaMap[A, B](m: mutable.Map[A, B]): ju.Map[A, B] = m match {
- //case JConcurrentMapWrapper(wrapped) => wrapped
+ case null => null
case JMapWrapper(wrapped) => wrapped
- case _ => new MutableMapWrapper(m)
+ case _ => new MutableMapWrapper(m)
}
/**
@@ -210,9 +219,9 @@ trait WrapAsJava {
* @return A Java `Dictionary` view of the argument.
*/
implicit def asJavaDictionary[A, B](m: mutable.Map[A, B]): ju.Dictionary[A, B] = m match {
- //case JConcurrentMapWrapper(wrapped) => wrapped
- case JDictionaryWrapper(wrapped) => wrapped
- case _ => new DictionaryWrapper(m)
+ case null => null
+ case JDictionaryWrapper(wrapped) => wrapped
+ case _ => new DictionaryWrapper(m)
}
/**
@@ -230,9 +239,9 @@ trait WrapAsJava {
* @return A Java `Map` view of the argument.
*/
implicit def mapAsJavaMap[A, B](m: Map[A, B]): ju.Map[A, B] = m match {
- //case JConcurrentMapWrapper(wrapped) => wrapped
+ case null => null
case JMapWrapper(wrapped) => wrapped.asInstanceOf[ju.Map[A, B]]
- case _ => new MapWrapper(m)
+ case _ => new MapWrapper(m)
}
/**
@@ -251,8 +260,9 @@ trait WrapAsJava {
* @return A Java `ConcurrentMap` view of the argument.
*/
implicit def mapAsJavaConcurrentMap[A, B](m: concurrent.Map[A, B]): juc.ConcurrentMap[A, B] = m match {
+ case null => null
case JConcurrentMapWrapper(wrapped) => wrapped
- case _ => new ConcurrentMapWrapper(m)
+ case _ => new ConcurrentMapWrapper(m)
}
}
diff --git a/src/library/scala/collection/convert/WrapAsScala.scala b/src/library/scala/collection/convert/WrapAsScala.scala
index ab151a6778..ee161dcc87 100644
--- a/src/library/scala/collection/convert/WrapAsScala.scala
+++ b/src/library/scala/collection/convert/WrapAsScala.scala
@@ -30,8 +30,9 @@ trait WrapAsScala {
* @return A Scala `Iterator` view of the argument.
*/
implicit def asScalaIterator[A](it: ju.Iterator[A]): Iterator[A] = it match {
+ case null => null
case IteratorWrapper(wrapped) => wrapped
- case _ => JIteratorWrapper(it)
+ case _ => JIteratorWrapper(it)
}
/**
@@ -48,8 +49,9 @@ trait WrapAsScala {
* @return A Scala Iterator view of the argument.
*/
implicit def enumerationAsScalaIterator[A](i: ju.Enumeration[A]): Iterator[A] = i match {
+ case null => null
case IteratorWrapper(wrapped) => wrapped
- case _ => JEnumerationWrapper(i)
+ case _ => JEnumerationWrapper(i)
}
/**
@@ -67,8 +69,9 @@ trait WrapAsScala {
* @return A Scala Iterable view of the argument.
*/
implicit def iterableAsScalaIterable[A](i: jl.Iterable[A]): Iterable[A] = i match {
+ case null => null
case IterableWrapper(wrapped) => wrapped
- case _ => JIterableWrapper(i)
+ case _ => JIterableWrapper(i)
}
/**
@@ -82,8 +85,9 @@ trait WrapAsScala {
* @return A Scala Iterable view of the argument.
*/
implicit def collectionAsScalaIterable[A](i: ju.Collection[A]): Iterable[A] = i match {
+ case null => null
case IterableWrapper(wrapped) => wrapped
- case _ => JCollectionWrapper(i)
+ case _ => JCollectionWrapper(i)
}
/**
@@ -101,8 +105,9 @@ trait WrapAsScala {
* @return A Scala mutable `Buffer` view of the argument.
*/
implicit def asScalaBuffer[A](l: ju.List[A]): mutable.Buffer[A] = l match {
- case MutableBufferWrapper(wrapped) => wrapped
- case _ =>new JListWrapper(l)
+ case null => null
+ case MutableBufferWrapper(wrapped) => wrapped
+ case _ => new JListWrapper(l)
}
/**
@@ -119,8 +124,9 @@ trait WrapAsScala {
* @return A Scala mutable Set view of the argument.
*/
implicit def asScalaSet[A](s: ju.Set[A]): mutable.Set[A] = s match {
+ case null => null
case MutableSetWrapper(wrapped) => wrapped
- case _ =>new JSetWrapper(s)
+ case _ => new JSetWrapper(s)
}
/**
@@ -133,20 +139,20 @@ trait WrapAsScala {
* If the Java `Map` was previously obtained from an implicit or
* explicit call of `mapAsScalaMap(scala.collection.mutable.Map)` then
* the original Scala Map will be returned.
- *
+ *
* If the wrapped map is synchronized (e.g. from `java.util.Collections.synchronizedMap`),
- * it is your responsibility to wrap all
+ * it is your responsibility to wrap all
* non-atomic operations with `underlying.synchronized`.
* This includes `get`, as `java.util.Map`'s API does not allow for an
* atomic `get` when `null` values may be present.
- *
+ *
* @param m The Map to be converted.
* @return A Scala mutable Map view of the argument.
*/
implicit def mapAsScalaMap[A, B](m: ju.Map[A, B]): mutable.Map[A, B] = m match {
- //case ConcurrentMapWrapper(wrapped) => wrapped
+ case null => null
case MutableMapWrapper(wrapped) => wrapped
- case _ => new JMapWrapper(m)
+ case _ => new JMapWrapper(m)
}
/**
@@ -163,24 +169,26 @@ trait WrapAsScala {
* @return A Scala mutable ConcurrentMap view of the argument.
*/
implicit def mapAsScalaConcurrentMap[A, B](m: juc.ConcurrentMap[A, B]): concurrent.Map[A, B] = m match {
- case cmw: ConcurrentMapWrapper[a, b] => cmw.underlying
- case _ => new JConcurrentMapWrapper(m)
+ case null => null
+ case cmw: ConcurrentMapWrapper[_, _] => cmw.underlying
+ case _ => new JConcurrentMapWrapper(m)
}
/**
* Implicitly converts a Java `Dictionary` to a Scala mutable
- * `Map[String, String]`.
+ * `Map`.
*
- * The returned Scala `Map[String, String]` is backed by the provided Java
+ * The returned Scala `Map` is backed by the provided Java
* `Dictionary` and any side-effects of using it via the Scala interface
* will be visible via the Java interface and vice versa.
*
* @param p The Dictionary to be converted.
- * @return A Scala mutable Map[String, String] view of the argument.
+ * @return A Scala mutable Map view of the argument.
*/
implicit def dictionaryAsScalaMap[A, B](p: ju.Dictionary[A, B]): mutable.Map[A, B] = p match {
+ case null => null
case DictionaryWrapper(wrapped) => wrapped
- case _ => new JDictionaryWrapper(p)
+ case _ => new JDictionaryWrapper(p)
}
/**
@@ -194,7 +202,8 @@ trait WrapAsScala {
* @return A Scala mutable Map[String, String] view of the argument.
*/
implicit def propertiesAsScalaMap(p: ju.Properties): mutable.Map[String, String] = p match {
- case _ => new JPropertiesWrapper(p)
+ case null => null
+ case _ => new JPropertiesWrapper(p)
}
}
diff --git a/src/library/scala/collection/convert/Wrappers.scala b/src/library/scala/collection/convert/Wrappers.scala
index e829a0215b..3edc5ba1b4 100644
--- a/src/library/scala/collection/convert/Wrappers.scala
+++ b/src/library/scala/collection/convert/Wrappers.scala
@@ -102,9 +102,9 @@ private[collection] trait Wrappers {
override def clone(): JListWrapper[A] = JListWrapper(new ju.ArrayList[A](underlying))
}
- // Note various overrides to avoid performance gotchas.
- class SetWrapper[A](underlying: Set[A]) extends ju.AbstractSet[A] {
- self =>
+ @SerialVersionUID(1L)
+ class SetWrapper[A](underlying: Set[A]) extends ju.AbstractSet[A] with Serializable { self =>
+ // Note various overrides to avoid performance gotchas.
override def contains(o: Object): Boolean = {
try { underlying.contains(o.asInstanceOf[A]) }
catch { case cce: ClassCastException => false }
@@ -165,7 +165,8 @@ private[collection] trait Wrappers {
new JSetWrapper[A](new ju.LinkedHashSet[A](underlying))
}
- class MapWrapper[A, B](underlying: Map[A, B]) extends ju.AbstractMap[A, B] { self =>
+ @SerialVersionUID(1L)
+ class MapWrapper[A, B](underlying: Map[A, B]) extends ju.AbstractMap[A, B] with Serializable { self =>
override def size = underlying.size
override def get(key: AnyRef): B = try {
diff --git a/src/library/scala/collection/generic/GenericParTemplate.scala b/src/library/scala/collection/generic/GenericParTemplate.scala
index b9b7043270..44a778a953 100644
--- a/src/library/scala/collection/generic/GenericParTemplate.scala
+++ b/src/library/scala/collection/generic/GenericParTemplate.scala
@@ -13,7 +13,6 @@ package generic
import scala.collection.parallel.Combiner
import scala.collection.parallel.ParIterable
import scala.collection.parallel.ParMap
-import scala.collection.parallel.TaskSupport
import scala.annotation.unchecked.uncheckedVariance
import scala.language.higherKinds
diff --git a/src/library/scala/collection/generic/MapFactory.scala b/src/library/scala/collection/generic/MapFactory.scala
index b9f3d4b010..255d695303 100644
--- a/src/library/scala/collection/generic/MapFactory.scala
+++ b/src/library/scala/collection/generic/MapFactory.scala
@@ -10,8 +10,6 @@ package scala
package collection
package generic
-
-import mutable.{Builder, MapBuilder}
import scala.language.higherKinds
/** A template for companion objects of `Map` and subclasses thereof.
diff --git a/src/library/scala/collection/generic/MutableSortedMapFactory.scala b/src/library/scala/collection/generic/MutableSortedMapFactory.scala
new file mode 100644
index 0000000000..b6fa933ca8
--- /dev/null
+++ b/src/library/scala/collection/generic/MutableSortedMapFactory.scala
@@ -0,0 +1,24 @@
+package scala
+package collection
+package generic
+
+import scala.language.higherKinds
+
+/**
+ * A template for companion objects of `SortedMap` and subclasses thereof.
+ *
+ * @tparam CC the type of the collection.
+ *
+ * @author Rui Gonçalves
+ * @since 2.12
+ * @version 2.12
+ *
+ * @define Coll `mutable.SortedMap`
+ * @define coll mutable sorted map
+ * @define factoryInfo
+ * This object provides a set of operations needed to create sorted maps of type `$Coll`.
+ * @define sortedMapCanBuildFromInfo
+ * The standard `CanBuildFrom` instance for sorted maps
+ */
+abstract class MutableSortedMapFactory[CC[A, B] <: mutable.SortedMap[A, B] with SortedMapLike[A, B, CC[A, B]]]
+ extends SortedMapFactory[CC]
diff --git a/src/library/scala/collection/generic/ParFactory.scala b/src/library/scala/collection/generic/ParFactory.scala
index 4486cea419..901e9fc239 100644
--- a/src/library/scala/collection/generic/ParFactory.scala
+++ b/src/library/scala/collection/generic/ParFactory.scala
@@ -11,7 +11,6 @@ package collection
package generic
import scala.collection.parallel.ParIterable
-import scala.collection.parallel.Combiner
import scala.language.higherKinds
/** A template class for companion objects of `ParIterable` and subclasses
diff --git a/src/library/scala/collection/generic/ParSetFactory.scala b/src/library/scala/collection/generic/ParSetFactory.scala
index 4320635ae6..1341ddcb38 100644
--- a/src/library/scala/collection/generic/ParSetFactory.scala
+++ b/src/library/scala/collection/generic/ParSetFactory.scala
@@ -10,7 +10,6 @@ package scala
package collection
package generic
-import scala.collection.mutable.Builder
import scala.collection.parallel.Combiner
import scala.collection.parallel.ParSet
import scala.collection.parallel.ParSetLike
diff --git a/src/library/scala/collection/generic/SetFactory.scala b/src/library/scala/collection/generic/SetFactory.scala
index fcd8d00c18..5e50844cc9 100644
--- a/src/library/scala/collection/generic/SetFactory.scala
+++ b/src/library/scala/collection/generic/SetFactory.scala
@@ -12,7 +12,6 @@ package scala
package collection
package generic
-import mutable.Builder
import scala.language.higherKinds
abstract class SetFactory[CC[X] <: Set[X] with SetLike[X, CC[X]]]
diff --git a/src/library/scala/collection/generic/package.scala b/src/library/scala/collection/generic/package.scala
index 1beb4a8599..015c3455db 100644
--- a/src/library/scala/collection/generic/package.scala
+++ b/src/library/scala/collection/generic/package.scala
@@ -1,6 +1,5 @@
package scala
package collection
-import generic.CanBuildFrom
import scala.language.higherKinds
diff --git a/src/library/scala/collection/immutable/BitSet.scala b/src/library/scala/collection/immutable/BitSet.scala
index 70543aa3a6..6bb1f116fe 100644
--- a/src/library/scala/collection/immutable/BitSet.scala
+++ b/src/library/scala/collection/immutable/BitSet.scala
@@ -14,7 +14,7 @@ package immutable
import generic._
import BitSetLike.{LogWL, updateArray}
-import mutable.{ Builder, SetBuilder }
+import mutable.Builder
/** A class for immutable bitsets.
* $bitsetinfo
@@ -103,6 +103,7 @@ object BitSet extends BitSetFactory[BitSet] {
else new BitSetN(elems)
}
+ @SerialVersionUID(2260107458435649300L)
class BitSet1(val elems: Long) extends BitSet {
protected def nwords = 1
protected def word(idx: Int) = if (idx == 0) elems else 0L
@@ -110,6 +111,12 @@ object BitSet extends BitSetFactory[BitSet] {
if (idx == 0) new BitSet1(w)
else if (idx == 1) new BitSet2(elems, w)
else fromBitMaskNoCopy(updateArray(Array(elems), idx, w))
+ override def head: Int =
+ if (elems == 0L) throw new NoSuchElementException("Empty BitSet")
+ else java.lang.Long.numberOfTrailingZeros(elems)
+ override def tail: BitSet =
+ if (elems == 0L) throw new NoSuchElementException("Empty BitSet")
+ else new BitSet1(elems - java.lang.Long.lowestOneBit(elems))
}
class BitSet2(val elems0: Long, elems1: Long) extends BitSet {
@@ -119,6 +126,18 @@ object BitSet extends BitSetFactory[BitSet] {
if (idx == 0) new BitSet2(w, elems1)
else if (idx == 1) new BitSet2(elems0, w)
else fromBitMaskNoCopy(updateArray(Array(elems0, elems1), idx, w))
+ override def head: Int =
+ if (elems0 == 0L) {
+ if (elems1 == 0) throw new NoSuchElementException("Empty BitSet")
+ 64 + java.lang.Long.numberOfTrailingZeros(elems1)
+ }
+ else java.lang.Long.numberOfTrailingZeros(elems0)
+ override def tail: BitSet =
+ if (elems0 == 0L) {
+ if (elems1 == 0L) throw new NoSuchElementException("Empty BitSet")
+ new BitSet2(elems0, elems1 - java.lang.Long.lowestOneBit(elems1))
+ }
+ else new BitSet2(elems0 - java.lang.Long.lowestOneBit(elems0), elems1)
}
/** The implementing class for bit sets with elements >= 128 (exceeding
@@ -131,5 +150,15 @@ object BitSet extends BitSetFactory[BitSet] {
protected def nwords = elems.length
protected def word(idx: Int) = if (idx < nwords) elems(idx) else 0L
protected def updateWord(idx: Int, w: Long): BitSet = fromBitMaskNoCopy(updateArray(elems, idx, w))
+ override def tail: BitSet = {
+ val n = nwords
+ var i = 0
+ while (i < n) {
+ val wi = word(i)
+ if (wi != 0L) return fromBitMaskNoCopy(updateArray(elems, i, wi - java.lang.Long.lowestOneBit(wi)))
+ i += 1
+ }
+ throw new NoSuchElementException("Empty BitSet")
+ }
}
}
diff --git a/src/library/scala/collection/immutable/HashMap.scala b/src/library/scala/collection/immutable/HashMap.scala
index 92d915fe8b..3c41e1ba11 100644
--- a/src/library/scala/collection/immutable/HashMap.scala
+++ b/src/library/scala/collection/immutable/HashMap.scala
@@ -65,6 +65,8 @@ class HashMap[A, +B] extends AbstractMap[A, B]
def - (key: A): HashMap[A, B] =
removed0(key, computeHash(key), 0)
+ override def tail: HashMap[A, B] = this - head._1
+
override def filter(p: ((A, B)) => Boolean) = {
val buffer = new Array[HashMap[A, B]](bufferSize(size))
nullToEmpty(filter0(p, false, 0, buffer, 0))
@@ -156,7 +158,10 @@ object HashMap extends ImmutableMapFactory[HashMap] with BitOperations.Int {
implicit def canBuildFrom[A, B]: CanBuildFrom[Coll, (A, B), HashMap[A, B]] = new MapCanBuildFrom[A, B]
def empty[A, B]: HashMap[A, B] = EmptyHashMap.asInstanceOf[HashMap[A, B]]
- private object EmptyHashMap extends HashMap[Any, Nothing] { }
+ private object EmptyHashMap extends HashMap[Any, Nothing] {
+ override def head: (Any, Nothing) = throw new NoSuchElementException("Empty Map")
+ override def tail: HashMap[Any, Nothing] = throw new NoSuchElementException("Empty Map")
+ }
// utility method to create a HashTrieMap from two leaf HashMaps (HashMap1 or HashMapCollision1) with non-colliding hash code)
private def makeHashTrieMap[A, B](hash0:Int, elem0:HashMap[A, B], hash1:Int, elem1:HashMap[A, B], level:Int, size:Int) : HashTrieMap[A, B] = {
diff --git a/src/library/scala/collection/immutable/HashSet.scala b/src/library/scala/collection/immutable/HashSet.scala
index 050e90b49b..2d8cc0b386 100644
--- a/src/library/scala/collection/immutable/HashSet.scala
+++ b/src/library/scala/collection/immutable/HashSet.scala
@@ -162,6 +162,8 @@ class HashSet[A] extends AbstractSet[A]
def - (e: A): HashSet[A] =
nullToEmpty(removed0(e, computeHash(e), 0))
+ override def tail: HashSet[A] = this - head
+
override def filter(p: A => Boolean) = {
val buffer = new Array[HashSet[A]](bufferSize(size))
nullToEmpty(filter0(p, false, 0, buffer, 0))
@@ -187,7 +189,7 @@ class HashSet[A] extends AbstractSet[A]
protected def get0(key: A, hash: Int, level: Int): Boolean = false
- def updated0(key: A, hash: Int, level: Int): HashSet[A] =
+ private[collection] def updated0(key: A, hash: Int, level: Int): HashSet[A] =
new HashSet.HashSet1(key, hash)
protected def removed0(key: A, hash: Int, level: Int): HashSet[A] = this
@@ -213,7 +215,10 @@ object HashSet extends ImmutableSetFactory[HashSet] {
/** $setCanBuildFromInfo */
implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, HashSet[A]] = setCanBuildFrom[A]
- private object EmptyHashSet extends HashSet[Any] { }
+ private object EmptyHashSet extends HashSet[Any] {
+ override def head: Any = throw new NoSuchElementException("Empty Set")
+ override def tail: HashSet[Any] = throw new NoSuchElementException("Empty Set")
+ }
private[collection] def emptyInstance: HashSet[Any] = EmptyHashSet
// utility method to create a HashTrieSet from two leaf HashSets (HashSet1 or HashSetCollision1) with non-colliding hash code)
@@ -250,10 +255,10 @@ object HashSet extends ImmutableSetFactory[HashSet] {
class HashSet1[A](private[HashSet] val key: A, private[HashSet] val hash: Int) extends LeafHashSet[A] {
override def size = 1
- override def get0(key: A, hash: Int, level: Int): Boolean =
+ override protected def get0(key: A, hash: Int, level: Int): Boolean =
(hash == this.hash && key == this.key)
- override def subsetOf0(that: HashSet[A], level: Int) = {
+ override protected def subsetOf0(that: HashSet[A], level: Int) = {
// check if that contains this.key
// we use get0 with our key and hash at the correct level instead of calling contains,
// which would not work since that might not be a top-level HashSet
@@ -261,7 +266,7 @@ object HashSet extends ImmutableSetFactory[HashSet] {
that.get0(key, hash, level)
}
- override def updated0(key: A, hash: Int, level: Int): HashSet[A] =
+ override private[collection] def updated0(key: A, hash: Int, level: Int): HashSet[A] =
if (hash == this.hash && key == this.key) this
else {
if (hash != this.hash) {
@@ -306,7 +311,7 @@ object HashSet extends ImmutableSetFactory[HashSet] {
override private[immutable] def diff0(that: HashSet[A], level: Int, buffer: Array[HashSet[A]], offset0: Int): HashSet[A] =
if (that.get0(key, hash, level)) null else this
- override def removed0(key: A, hash: Int, level: Int): HashSet[A] =
+ override protected def removed0(key: A, hash: Int, level: Int): HashSet[A] =
if (hash == this.hash && key == this.key) null else this
override protected def filter0(p: A => Boolean, negate: Boolean, level: Int, buffer: Array[HashSet[A]], offset0: Int): HashSet[A] =
@@ -320,10 +325,10 @@ object HashSet extends ImmutableSetFactory[HashSet] {
override def size = ks.size
- override def get0(key: A, hash: Int, level: Int): Boolean =
+ override protected def get0(key: A, hash: Int, level: Int): Boolean =
if (hash == this.hash) ks.contains(key) else false
- override def subsetOf0(that: HashSet[A], level: Int) = {
+ override protected def subsetOf0(that: HashSet[A], level: Int) = {
// we have to check each element
// we use get0 with our hash at the correct level instead of calling contains,
// which would not work since that might not be a top-level HashSet
@@ -331,11 +336,11 @@ object HashSet extends ImmutableSetFactory[HashSet] {
ks.forall(key => that.get0(key, hash, level))
}
- override def updated0(key: A, hash: Int, level: Int): HashSet[A] =
+ override private[collection] def updated0(key: A, hash: Int, level: Int): HashSet[A] =
if (hash == this.hash) new HashSetCollision1(hash, ks + key)
else makeHashTrieSet(this.hash, this, hash, new HashSet1(key, hash), level)
- override def union0(that: LeafHashSet[A], level: Int): HashSet[A] = that match {
+ override private[immutable] def union0(that: LeafHashSet[A], level: Int): HashSet[A] = that match {
case that if that.hash != this.hash =>
// different hash code, so there is no need to investigate further.
// Just create a branch node containing the two.
@@ -368,7 +373,7 @@ object HashSet extends ImmutableSetFactory[HashSet] {
}
}
- override def union0(that: HashSet[A], level: Int, buffer: Array[HashSet[A]], offset0: Int): HashSet[A] = that match {
+ override private[immutable] def union0(that: HashSet[A], level: Int, buffer: Array[HashSet[A]], offset0: Int): HashSet[A] = that match {
case that: LeafHashSet[A] =>
// switch to the simpler Tree/Leaf implementation
this.union0(that, level)
@@ -425,7 +430,7 @@ object HashSet extends ImmutableSetFactory[HashSet] {
}
}
- override def removed0(key: A, hash: Int, level: Int): HashSet[A] =
+ override protected def removed0(key: A, hash: Int, level: Int): HashSet[A] =
if (hash == this.hash) {
val ks1 = ks - key
ks1.size match {
@@ -522,7 +527,7 @@ object HashSet extends ImmutableSetFactory[HashSet] {
override def size = size0
- override def get0(key: A, hash: Int, level: Int): Boolean = {
+ override protected def get0(key: A, hash: Int, level: Int): Boolean = {
val index = (hash >>> level) & 0x1f
val mask = (1 << index)
if (bitmap == - 1) {
@@ -534,7 +539,7 @@ object HashSet extends ImmutableSetFactory[HashSet] {
false
}
- override def updated0(key: A, hash: Int, level: Int): HashSet[A] = {
+ override private[collection] def updated0(key: A, hash: Int, level: Int): HashSet[A] = {
val index = (hash >>> level) & 0x1f
val mask = (1 << index)
val offset = Integer.bitCount(bitmap & (mask-1))
@@ -836,7 +841,7 @@ object HashSet extends ImmutableSetFactory[HashSet] {
case _ => this
}
- override def removed0(key: A, hash: Int, level: Int): HashSet[A] = {
+ override protected def removed0(key: A, hash: Int, level: Int): HashSet[A] = {
val index = (hash >>> level) & 0x1f
val mask = (1 << index)
val offset = Integer.bitCount(bitmap & (mask-1))
@@ -873,7 +878,7 @@ object HashSet extends ImmutableSetFactory[HashSet] {
}
}
- override def subsetOf0(that: HashSet[A], level: Int): Boolean = if (that eq this) true else that match {
+ override protected def subsetOf0(that: HashSet[A], level: Int): Boolean = if (that eq this) true else that match {
case that: HashTrieSet[A] if this.size0 <= that.size0 =>
// create local mutable copies of members
var abm = this.bitmap
diff --git a/src/library/scala/collection/immutable/List.scala b/src/library/scala/collection/immutable/List.scala
index 75ddded6d2..c09328cae6 100644
--- a/src/library/scala/collection/immutable/List.scala
+++ b/src/library/scala/collection/immutable/List.scala
@@ -13,7 +13,7 @@ package immutable
import generic._
import mutable.{Builder, ListBuffer}
import scala.annotation.tailrec
-import java.io._
+import java.io.{ObjectOutputStream, ObjectInputStream}
/** A class for immutable linked lists representing ordered collections
* of elements of type `A`.
@@ -86,11 +86,9 @@ sealed abstract class List[+A] extends AbstractSeq[A]
with Product
with GenericTraversableTemplate[A, List]
with LinearSeqOptimized[A, List[A]]
- with Serializable {
+ with scala.Serializable {
override def companion: GenericCompanion[List] = List
- import scala.collection.{Iterable, Traversable, Seq, IndexedSeq}
-
def isEmpty: Boolean
def head: A
def tail: List[A]
@@ -265,8 +263,7 @@ sealed abstract class List[+A] extends AbstractSeq[A]
}
(b.toList, these)
}
-
- @noinline // TODO - fix optimizer bug that requires noinline (see SI-8334)
+
final override def map[B, That](f: A => B)(implicit bf: CanBuildFrom[List[A], B, That]): That = {
if (bf eq List.ReusableCBF) {
if (this eq Nil) Nil.asInstanceOf[That] else {
@@ -284,8 +281,7 @@ sealed abstract class List[+A] extends AbstractSeq[A]
}
else super.map(f)
}
-
- @noinline // TODO - fix optimizer bug that requires noinline for map; applied here to be safe (see SI-8334)
+
final override def collect[B, That](pf: PartialFunction[A, B])(implicit bf: CanBuildFrom[List[A], B, That]): That = {
if (bf eq List.ReusableCBF) {
if (this eq Nil) Nil.asInstanceOf[That] else {
@@ -314,8 +310,7 @@ sealed abstract class List[+A] extends AbstractSeq[A]
}
else super.collect(pf)
}
-
- @noinline // TODO - fix optimizer bug that requires noinline for map; applied here to be safe (see SI-8334)
+
final override def flatMap[B, That](f: A => GenTraversableOnce[B])(implicit bf: CanBuildFrom[List[A], B, That]): That = {
if (bf eq List.ReusableCBF) {
if (this eq Nil) Nil.asInstanceOf[That] else {
@@ -455,7 +450,7 @@ object List extends SeqFactory[List] {
override def empty[A]: List[A] = Nil
override def apply[A](xs: A*): List[A] = xs.toList
-
+
private[collection] val partialNotApplied = new Function1[Any, Any] { def apply(x: Any): Any = this }
@SerialVersionUID(1L)
diff --git a/src/library/scala/collection/immutable/ListMap.scala b/src/library/scala/collection/immutable/ListMap.scala
index 1eedf93269..5bbcf44201 100644
--- a/src/library/scala/collection/immutable/ListMap.scala
+++ b/src/library/scala/collection/immutable/ListMap.scala
@@ -13,13 +13,16 @@ package collection
package immutable
import generic._
-import scala.annotation.{tailrec, bridge}
+import scala.annotation.tailrec
/** $factoryInfo
* @since 1
* @see [[http://docs.scala-lang.org/overviews/collections/concrete-immutable-collection-classes.html#list_maps "Scala's Collection Library overview"]]
* section on `List Maps` for more information.
*
+ * Note that `ListMap` is built in reverse order to canonical traversal order (traversal order is oldest first).
+ * Thus, `head` and `tail` are O(n). To rapidly partition a `ListMap` into elements, use `last` and `init` instead. These are O(1).
+ *
* @define Coll immutable.ListMap
* @define coll immutable list map
*/
@@ -29,7 +32,13 @@ object ListMap extends ImmutableMapFactory[ListMap] {
new MapCanBuildFrom[A, B]
def empty[A, B]: ListMap[A, B] = EmptyListMap.asInstanceOf[ListMap[A, B]]
- private object EmptyListMap extends ListMap[Any, Nothing] { }
+ @SerialVersionUID(-8256686706655863282L)
+ private object EmptyListMap extends ListMap[Any, Nothing] {
+ override def apply(key: Any) = throw new NoSuchElementException("key not found: " + key)
+ override def contains(key: Any) = false
+ override def last: (Any, Nothing) = throw new NoSuchElementException("Empty ListMap")
+ override def init: ListMap[Any, Nothing] = throw new NoSuchElementException("Empty ListMap")
+ }
}
/** This class implements immutable maps using a list-based data structure, which preserves insertion order.
@@ -159,7 +168,6 @@ extends AbstractMap[A, B]
*/
override def apply(k: A): B1 = apply0(this, k)
-
@tailrec private def apply0(cur: ListMap[A, B1], k: A): B1 =
if (cur.isEmpty) throw new NoSuchElementException("key not found: "+k)
else if (k == cur.key) cur.value
@@ -177,6 +185,15 @@ extends AbstractMap[A, B]
if (k == cur.key) Some(cur.value)
else if (cur.next.nonEmpty) get0(cur.next, k) else None
+
+ override def contains(key: A): Boolean = contains0(this, key)
+
+ @tailrec private def contains0(cur: ListMap[A, B1], k: A): Boolean =
+ if (k == cur.key) true
+ else if (cur.next.nonEmpty) contains0(cur.next, k)
+ else false
+
+
/** This method allows one to create a new map with an additional mapping
* from `key` to `value`. If the map contains already a mapping for `key`,
* it will be overridden by this function.
@@ -186,6 +203,7 @@ extends AbstractMap[A, B]
new m.Node[B2](k, v)
}
+
/** Creates a new mapping without the given `key`.
* If the map does not contain a mapping for the given key, the
* method returns the same map.
@@ -203,5 +221,9 @@ extends AbstractMap[A, B]
remove0(k, cur.next, cur::acc)
override protected def next: ListMap[A, B1] = ListMap.this
+
+ override def last: (A, B1) = (key, value)
+
+ override def init: ListMap[A, B1] = next
}
}
diff --git a/src/library/scala/collection/immutable/ListSet.scala b/src/library/scala/collection/immutable/ListSet.scala
index adc975479a..98b91f7c84 100644
--- a/src/library/scala/collection/immutable/ListSet.scala
+++ b/src/library/scala/collection/immutable/ListSet.scala
@@ -11,8 +11,8 @@ package collection
package immutable
import generic._
-import scala.annotation.{tailrec, bridge}
-import mutable.{ ListBuffer, Builder }
+import scala.annotation.tailrec
+import mutable.{Builder, ReusableBuilder}
/** $factoryInfo
* @define Coll immutable.ListSet
@@ -32,8 +32,10 @@ object ListSet extends ImmutableSetFactory[ListSet] {
* a time to a list backed set puts the "squared" in N^2. There is a
* temporary space cost, but it's improbable a list backed set could
* become large enough for this to matter given its pricy element lookup.
+ *
+ * This builder is reusable.
*/
- class ListSetBuilder[Elem](initial: ListSet[Elem]) extends Builder[Elem, ListSet[Elem]] {
+ class ListSetBuilder[Elem](initial: ListSet[Elem]) extends ReusableBuilder[Elem, ListSet[Elem]] {
def this() = this(empty[Elem])
protected val elems = (new mutable.ListBuffer[Elem] ++= initial).reverse
protected val seen = new mutable.HashSet[Elem] ++= initial
@@ -179,6 +181,6 @@ class ListSet[A] extends AbstractSet[A]
override def tail: ListSet[A] = self
}
-
+
override def toSet[B >: A]: Set[B] = this.asInstanceOf[ListSet[B]]
}
diff --git a/src/library/scala/collection/immutable/Map.scala b/src/library/scala/collection/immutable/Map.scala
index 2c5b444c70..6f135cd35f 100644
--- a/src/library/scala/collection/immutable/Map.scala
+++ b/src/library/scala/collection/immutable/Map.scala
@@ -94,6 +94,8 @@ object Map extends ImmutableMapFactory[Map] {
private object EmptyMap extends AbstractMap[Any, Nothing] with Map[Any, Nothing] with Serializable {
override def size: Int = 0
+ override def apply(key: Any) = throw new NoSuchElementException("key not found: " + key)
+ override def contains(key: Any) = false
def get(key: Any): Option[Nothing] = None
def iterator: Iterator[(Any, Nothing)] = Iterator.empty
override def updated [B1] (key: Any, value: B1): Map[Any, B1] = new Map1(key, value)
@@ -103,6 +105,8 @@ object Map extends ImmutableMapFactory[Map] {
class Map1[A, +B](key1: A, value1: B) extends AbstractMap[A, B] with Map[A, B] with Serializable {
override def size = 1
+ override def apply(key: A) = if (key == key1) value1 else throw new NoSuchElementException("key not found: " + key)
+ override def contains(key: A) = key == key1
def get(key: A): Option[B] =
if (key == key1) Some(value1) else None
def iterator = Iterator((key1, value1))
@@ -119,6 +123,11 @@ object Map extends ImmutableMapFactory[Map] {
class Map2[A, +B](key1: A, value1: B, key2: A, value2: B) extends AbstractMap[A, B] with Map[A, B] with Serializable {
override def size = 2
+ override def apply(key: A) =
+ if (key == key1) value1
+ else if (key == key2) value2
+ else throw new NoSuchElementException("key not found: " + key)
+ override def contains(key: A) = (key == key1) || (key == key2)
def get(key: A): Option[B] =
if (key == key1) Some(value1)
else if (key == key2) Some(value2)
@@ -140,6 +149,12 @@ object Map extends ImmutableMapFactory[Map] {
class Map3[A, +B](key1: A, value1: B, key2: A, value2: B, key3: A, value3: B) extends AbstractMap[A, B] with Map[A, B] with Serializable {
override def size = 3
+ override def apply(key: A) =
+ if (key == key1) value1
+ else if (key == key2) value2
+ else if (key == key3) value3
+ else throw new NoSuchElementException("key not found: " + key)
+ override def contains(key: A) = (key == key1) || (key == key2) || (key == key3)
def get(key: A): Option[B] =
if (key == key1) Some(value1)
else if (key == key2) Some(value2)
@@ -164,6 +179,13 @@ object Map extends ImmutableMapFactory[Map] {
class Map4[A, +B](key1: A, value1: B, key2: A, value2: B, key3: A, value3: B, key4: A, value4: B) extends AbstractMap[A, B] with Map[A, B] with Serializable {
override def size = 4
+ override def apply(key: A) =
+ if (key == key1) value1
+ else if (key == key2) value2
+ else if (key == key3) value3
+ else if (key == key4) value4
+ else throw new NoSuchElementException("key not found: " + key)
+ override def contains(key: A) = (key == key1) || (key == key2) || (key == key3) || (key == key4)
def get(key: A): Option[B] =
if (key == key1) Some(value1)
else if (key == key2) Some(value2)
diff --git a/src/library/scala/collection/immutable/NumericRange.scala b/src/library/scala/collection/immutable/NumericRange.scala
index 11603a118b..c8d7519254 100644
--- a/src/library/scala/collection/immutable/NumericRange.scala
+++ b/src/library/scala/collection/immutable/NumericRange.scala
@@ -10,8 +10,6 @@ package scala
package collection
package immutable
-import mutable.{ Builder, ListBuffer }
-
// TODO: Now the specialization exists there is no clear reason to have
// separate classes for Range/NumericRange. Investigate and consolidate.
@@ -193,7 +191,7 @@ extends AbstractSeq[T] with IndexedSeq[T] with Serializable {
// Either numRangeElements or (head + last) must be even, so divide the even one before multiplying
val a = head.toLong
val b = last.toLong
- val ans =
+ val ans =
if ((numRangeElements & 1) == 0) (numRangeElements / 2) * (a + b)
else numRangeElements * {
// Sum is even, but we might overflow it, so divide in pieces and add back remainder
diff --git a/src/library/scala/collection/immutable/PagedSeq.scala b/src/library/scala/collection/immutable/PagedSeq.scala
index 982c10687c..fab5ad47eb 100644
--- a/src/library/scala/collection/immutable/PagedSeq.scala
+++ b/src/library/scala/collection/immutable/PagedSeq.scala
@@ -12,8 +12,7 @@ package scala
package collection
package immutable
-import java.io._
-import scala.util.matching.Regex
+import java.io.{File, FileReader, Reader}
import scala.reflect.ClassTag
/** The `PagedSeq` object defines a lazy implementations of
diff --git a/src/library/scala/collection/immutable/Queue.scala b/src/library/scala/collection/immutable/Queue.scala
index 53af3ce158..70b51f8251 100644
--- a/src/library/scala/collection/immutable/Queue.scala
+++ b/src/library/scala/collection/immutable/Queue.scala
@@ -12,7 +12,6 @@ package immutable
import generic._
import mutable.{ Builder, ListBuffer }
-import scala.annotation.tailrec
/** `Queue` objects implement data structures that allow to
* insert and retrieve elements in a first-in-first-out (FIFO) manner.
diff --git a/src/library/scala/collection/immutable/Range.scala b/src/library/scala/collection/immutable/Range.scala
index ca6720da19..fb7dd4cfbf 100644
--- a/src/library/scala/collection/immutable/Range.scala
+++ b/src/library/scala/collection/immutable/Range.scala
@@ -198,7 +198,24 @@ extends scala.collection.AbstractSeq[Int]
copy(locationAfterN(n), end, step)
}
)
-
+
+ /** Creates a new range containing the elements starting at `from` up to but not including `until`.
+ *
+ * $doesNotUseBuilders
+ *
+ * @param from the element at which to start
+ * @param until the element at which to end (not included in the range)
+ * @return a new range consisting of a contiguous interval of values in the old range
+ */
+ override def slice(from: Int, until: Int): Range =
+ if (from <= 0) take(until)
+ else if (until >= numRangeElements && numRangeElements >= 0) drop(from)
+ else {
+ val fromValue = locationAfterN(from)
+ if (from >= until) newEmptyRange(fromValue)
+ else new Range.Inclusive(fromValue, locationAfterN(until-1), step)
+ }
+
/** Creates a new range containing all the elements of this range except the last one.
*
* $doesNotUseBuilders
diff --git a/src/library/scala/collection/immutable/Set.scala b/src/library/scala/collection/immutable/Set.scala
index 031e5248c1..3a8ee8b0be 100644
--- a/src/library/scala/collection/immutable/Set.scala
+++ b/src/library/scala/collection/immutable/Set.scala
@@ -103,10 +103,11 @@ object Set extends ImmutableSetFactory[Set] {
if (p(elem1)) Some(elem1)
else None
}
+ override def head: A = elem1
+ override def tail: Set[A] = Set.empty
// Why is Set1 non-final? Need to fix that!
@deprecatedOverriding("This immutable set should do nothing on toSet but cast itself to a Set with a wider element type.", "2.11.8")
override def toSet[B >: A]: Set[B] = this.asInstanceOf[Set1[B]]
-
}
/** An optimized representation for immutable sets of size 2 */
@@ -138,6 +139,8 @@ object Set extends ImmutableSetFactory[Set] {
else if (p(elem2)) Some(elem2)
else None
}
+ override def head: A = elem1
+ override def tail: Set[A] = new Set1(elem2)
// Why is Set2 non-final? Need to fix that!
@deprecatedOverriding("This immutable set should do nothing on toSet but cast itself to a Set with a wider element type.", "2.11.8")
override def toSet[B >: A]: Set[B] = this.asInstanceOf[Set2[B]]
@@ -174,6 +177,8 @@ object Set extends ImmutableSetFactory[Set] {
else if (p(elem3)) Some(elem3)
else None
}
+ override def head: A = elem1
+ override def tail: Set[A] = new Set2(elem2, elem3)
// Why is Set3 non-final? Need to fix that!
@deprecatedOverriding("This immutable set should do nothing on toSet but cast itself to a Set with a wider element type.", "2.11.8")
override def toSet[B >: A]: Set[B] = this.asInstanceOf[Set3[B]]
@@ -212,6 +217,8 @@ object Set extends ImmutableSetFactory[Set] {
else if (p(elem4)) Some(elem4)
else None
}
+ override def head: A = elem1
+ override def tail: Set[A] = new Set3(elem2, elem3, elem4)
// Why is Set4 non-final? Need to fix that!
@deprecatedOverriding("This immutable set should do nothing on toSet but cast itself to a Set with a wider element type.", "2.11.8")
override def toSet[B >: A]: Set[B] = this.asInstanceOf[Set4[B]]
diff --git a/src/library/scala/collection/immutable/SortedMap.scala b/src/library/scala/collection/immutable/SortedMap.scala
index 682788e18e..0f3bd2e195 100644
--- a/src/library/scala/collection/immutable/SortedMap.scala
+++ b/src/library/scala/collection/immutable/SortedMap.scala
@@ -14,7 +14,6 @@ package immutable
import generic._
import mutable.Builder
-import scala.annotation.unchecked.uncheckedVariance
/** A map whose keys are sorted.
*
diff --git a/src/library/scala/collection/immutable/SortedSet.scala b/src/library/scala/collection/immutable/SortedSet.scala
index 4a8859a7ab..107f77f287 100644
--- a/src/library/scala/collection/immutable/SortedSet.scala
+++ b/src/library/scala/collection/immutable/SortedSet.scala
@@ -13,7 +13,6 @@ package collection
package immutable
import generic._
-import mutable.Builder
/** A subtrait of `collection.SortedSet` which represents sorted sets
* which cannot be mutated.
diff --git a/src/library/scala/collection/immutable/Stream.scala b/src/library/scala/collection/immutable/Stream.scala
index d3be809255..d92b159f3b 100644
--- a/src/library/scala/collection/immutable/Stream.scala
+++ b/src/library/scala/collection/immutable/Stream.scala
@@ -11,7 +11,7 @@ package collection
package immutable
import generic._
-import mutable.{Builder, StringBuilder, LazyBuilder, ListBuffer}
+import mutable.{Builder, StringBuilder, LazyBuilder}
import scala.annotation.tailrec
import Stream.cons
import scala.language.implicitConversions
@@ -203,11 +203,9 @@ abstract class Stream[+A] extends AbstractSeq[A]
with LinearSeq[A]
with GenericTraversableTemplate[A, Stream]
with LinearSeqOptimized[A, Stream[A]]
- with Serializable {
-self =>
- override def companion: GenericCompanion[Stream] = Stream
+ with Serializable { self =>
- import scala.collection.{Traversable, Iterable, Seq, IndexedSeq}
+ override def companion: GenericCompanion[Stream] = Stream
/** Indicates whether or not the `Stream` is empty.
*
@@ -499,80 +497,19 @@ self =>
)
else super.flatMap(f)(bf)
- /** Returns all the elements of this `Stream` that satisfy the predicate `p`
- * in a new `Stream` - i.e., it is still a lazy data structure. The order of
- * the elements is preserved
- *
- * @param p the predicate used to filter the stream.
- * @return the elements of this stream satisfying `p`.
- *
- * @example {{{
- * $naturalsEx
- * naturalsFrom(1) filter { _ % 5 == 0 } take 10 mkString(", ")
- * // produces "5, 10, 15, 20, 25, 30, 35, 40, 45, 50"
- * }}}
- */
- override def filter(p: A => Boolean): Stream[A] = {
+ override private[scala] def filterImpl(p: A => Boolean, isFlipped: Boolean): Stream[A] = {
// optimization: drop leading prefix of elems for which f returns false
// var rest = this dropWhile (!p(_)) - forget DRY principle - GC can't collect otherwise
var rest = this
- while (!rest.isEmpty && !p(rest.head)) rest = rest.tail
+ while (!rest.isEmpty && p(rest.head) == isFlipped) rest = rest.tail
// private utility func to avoid `this` on stack (would be needed for the lazy arg)
- if (rest.nonEmpty) Stream.filteredTail(rest, p)
+ if (rest.nonEmpty) Stream.filteredTail(rest, p, isFlipped)
else Stream.Empty
}
- override final def withFilter(p: A => Boolean): StreamWithFilter = new StreamWithFilter(p)
-
- /** A lazier implementation of WithFilter than TraversableLike's.
- */
- final class StreamWithFilter(p: A => Boolean) extends WithFilter(p) {
-
- override def map[B, That](f: A => B)(implicit bf: CanBuildFrom[Stream[A], B, That]): That = {
- def tailMap(coll: Stream[A]): Stream[B] = {
- var head: A = null.asInstanceOf[A]
- var tail: Stream[A] = coll
- while (true) {
- if (tail.isEmpty)
- return Stream.Empty
- head = tail.head
- tail = tail.tail
- if (p(head))
- return cons(f(head), tailMap(tail))
- }
- throw new RuntimeException()
- }
-
- if (isStreamBuilder(bf)) asThat(tailMap(Stream.this))
- else super.map(f)(bf)
- }
-
- override def flatMap[B, That](f: A => GenTraversableOnce[B])(implicit bf: CanBuildFrom[Stream[A], B, That]): That = {
- def tailFlatMap(coll: Stream[A]): Stream[B] = {
- var head: A = null.asInstanceOf[A]
- var tail: Stream[A] = coll
- while (true) {
- if (tail.isEmpty)
- return Stream.Empty
- head = tail.head
- tail = tail.tail
- if (p(head))
- return f(head).toStream append tailFlatMap(tail)
- }
- throw new RuntimeException()
- }
-
- if (isStreamBuilder(bf)) asThat(tailFlatMap(Stream.this))
- else super.flatMap(f)(bf)
- }
-
- override def foreach[U](f: A => U) =
- for (x <- self)
- if (p(x)) f(x)
-
- override def withFilter(q: A => Boolean): StreamWithFilter =
- new StreamWithFilter(x => p(x) && q(x))
- }
+ /** A FilterMonadic which allows GC of the head of stream during processing */
+ @noinline // Workaround SI-9137, see https://github.com/scala/scala/pull/4284#issuecomment-73180791
+ override final def withFilter(p: A => Boolean): FilterMonadic[A, Stream[A]] = new Stream.StreamWithFilter(this, p)
/** A lazier Iterator than LinearSeqLike's. */
override def iterator: Iterator[A] = new StreamIterator(self)
@@ -1152,14 +1089,12 @@ object Stream extends SeqFactory[Stream] {
/** Creates a new builder for a stream */
def newBuilder[A]: Builder[A, Stream[A]] = new StreamBuilder[A]
- import scala.collection.{Iterable, Seq, IndexedSeq}
-
/** A builder for streams
* @note This builder is lazy only in the sense that it does not go downs the spine
* of traversables that are added as a whole. If more laziness can be achieved,
* this builder should be bypassed.
*/
- class StreamBuilder[A] extends scala.collection.mutable.LazyBuilder[A, Stream[A]] {
+ class StreamBuilder[A] extends LazyBuilder[A, Stream[A]] {
def result: Stream[A] = parts.toStream flatMap (_.toStream)
}
@@ -1295,13 +1230,36 @@ object Stream extends SeqFactory[Stream] {
else cons(start, range(start + step, end, step))
}
- private[immutable] def filteredTail[A](stream: Stream[A], p: A => Boolean) = {
- cons(stream.head, stream.tail filter p)
+ private[immutable] def filteredTail[A](stream: Stream[A], p: A => Boolean, isFlipped: Boolean) = {
+ cons(stream.head, stream.tail.filterImpl(p, isFlipped))
}
private[immutable] def collectedTail[A, B, That](head: B, stream: Stream[A], pf: PartialFunction[A, B], bf: CanBuildFrom[Stream[A], B, That]) = {
cons(head, stream.tail.collect(pf)(bf).asInstanceOf[Stream[B]])
}
-}
+ /** An implementation of `FilterMonadic` allowing GC of the filtered-out elements of
+ * the `Stream` as it is processed.
+ *
+ * Because this is not an inner class of `Stream` with a reference to the original
+ * head, it is now possible for GC to collect any leading and filtered-out elements
+ * which do not satisfy the filter, while the tail is still processing (see SI-8990).
+ */
+ private[immutable] final class StreamWithFilter[A](sl: => Stream[A], p: A => Boolean) extends FilterMonadic[A, Stream[A]] {
+ private var s = sl // set to null to allow GC after filtered
+ private lazy val filtered = { val f = s filter p; s = null; f } // don't set to null if throw during filter
+
+ def map[B, That](f: A => B)(implicit bf: CanBuildFrom[Stream[A], B, That]): That =
+ filtered map f
+
+ def flatMap[B, That](f: A => scala.collection.GenTraversableOnce[B])(implicit bf: CanBuildFrom[Stream[A], B, That]): That =
+ filtered flatMap f
+
+ def foreach[U](f: A => U): Unit =
+ filtered foreach f
+ def withFilter(q: A => Boolean): FilterMonadic[A, Stream[A]] =
+ new StreamWithFilter[A](filtered, q)
+ }
+
+}
diff --git a/src/library/scala/collection/immutable/StreamViewLike.scala b/src/library/scala/collection/immutable/StreamViewLike.scala
index c2eb85815d..4d7eaeff2a 100644
--- a/src/library/scala/collection/immutable/StreamViewLike.scala
+++ b/src/library/scala/collection/immutable/StreamViewLike.scala
@@ -53,6 +53,7 @@ extends SeqView[A, Coll]
/** boilerplate */
protected override def newForced[B](xs: => scala.collection.GenSeq[B]): Transformed[B] = new { val forced = xs } with AbstractTransformed[B] with Forced[B]
protected override def newAppended[B >: A](that: scala.collection.GenTraversable[B]): Transformed[B] = new { val rest = that } with AbstractTransformed[B] with Appended[B]
+ protected override def newPrepended[B >: A](that: scala.collection.GenTraversable[B]): Transformed[B] = new { protected[this] val fst = that } with AbstractTransformed[B] with Prepended[B]
protected override def newMapped[B](f: A => B): Transformed[B] = new { val mapping = f } with AbstractTransformed[B] with Mapped[B]
protected override def newFlatMapped[B](f: A => scala.collection.GenTraversableOnce[B]): Transformed[B] = new { val mapping = f } with AbstractTransformed[B] with FlatMapped[B]
protected override def newFiltered(p: A => Boolean): Transformed[A] = new { val pred = p } with AbstractTransformed[A] with Filtered
@@ -67,7 +68,6 @@ extends SeqView[A, Coll]
protected override def newPatched[B >: A](_from: Int, _patch: scala.collection.GenSeq[B], _replaced: Int): Transformed[B] = {
new { val from = _from; val patch = _patch; val replaced = _replaced } with AbstractTransformed[B] with Patched[B]
}
- protected override def newPrepended[B >: A](elem: B): Transformed[B] = new { protected[this] val fst = elem } with AbstractTransformed[B] with Prepended[B]
override def stringPrefix = "StreamView"
}
diff --git a/src/library/scala/collection/immutable/StringLike.scala b/src/library/scala/collection/immutable/StringLike.scala
index 1b52e40b72..d92db68912 100644
--- a/src/library/scala/collection/immutable/StringLike.scala
+++ b/src/library/scala/collection/immutable/StringLike.scala
@@ -10,7 +10,7 @@ package scala
package collection
package immutable
-import mutable.{ ArrayBuilder, Builder }
+import mutable.Builder
import scala.util.matching.Regex
import scala.math.ScalaNumber
import scala.reflect.ClassTag
@@ -201,35 +201,64 @@ self =>
*/
def stripMargin: String = stripMargin('|')
- private def escape(ch: Char): String = "\\Q" + ch + "\\E"
-
- def split(separator: Char): Array[String] = {
- val thisString = toString
- var pos = thisString.indexOf(separator)
-
- if (pos != -1) {
- val res = new ArrayBuilder.ofRef[String]
-
- var prev = 0
- do {
- res += thisString.substring(prev, pos)
- prev = pos + 1
- pos = thisString.indexOf(separator, prev)
- } while (pos != -1)
-
- if (prev != thisString.length)
- res += thisString.substring(prev, thisString.length)
-
- val initialResult = res.result()
- pos = initialResult.length
- while (pos > 0 && initialResult(pos - 1).isEmpty) pos = pos - 1
- if (pos != initialResult.length) {
- val trimmed = new Array[String](pos)
- Array.copy(initialResult, 0, trimmed, 0, pos)
- trimmed
- } else initialResult
- } else Array[String](thisString)
- }
+ private def escape(ch: Char): String = if (
+ (ch >= 'a') && (ch <= 'z') ||
+ (ch >= 'A') && (ch <= 'Z') ||
+ (ch >= '0' && ch <= '9')) ch.toString
+ else "\\" + ch
+
+ /** Split this string around the separator character
+ *
+ * If this string is the empty string, returns an array of strings
+ * that contains a single empty string.
+ *
+ * If this string is not the empty string, returns an array containing
+ * the substrings terminated by the start of the string, the end of the
+ * string or the separator character, excluding empty trailing substrings
+ *
+ * If the separator character is a surrogate character, only split on
+ * matching surrogate characters if they are not part of a surrogate pair
+ *
+ * The behaviour follows, and is implemented in terms of <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html#split%28java.lang.String%29">String.split(re: String)</a>
+ *
+ *
+ * @example {{{
+ * "a.b".split('.') //returns Array("a", "b")
+ *
+ * //splitting the empty string always returns the array with a single
+ * //empty string
+ * "".split('.') //returns Array("")
+ *
+ * //only trailing empty substrings are removed
+ * "a.".split('.') //returns Array("a")
+ * ".a.".split('.') //returns Array("", "a")
+ * "..a..".split('.') //returns Array("", "", "a")
+ *
+ * //all parts are empty and trailing
+ * ".".split('.') //returns Array()
+ * "..".split('.') //returns Array()
+ *
+ * //surrogate pairs
+ * val high = 0xD852.toChar
+ * val low = 0xDF62.toChar
+ * val highstring = high.toString
+ * val lowstring = low.toString
+ *
+ * //well-formed surrogate pairs are not split
+ * val highlow = highstring + lowstring
+ * highlow.split(high) //returns Array(highlow)
+ *
+ * //bare surrogate characters are split
+ * val bare = "_" + highstring + "_"
+ * bare.split(high) //returns Array("_", "_")
+ *
+ * }}}
+ *
+ * @param separator the character used as a delimiter
+ */
+ def split(separator: Char): Array[String] =
+ toString.split(escape(separator))
+
@throws(classOf[java.util.regex.PatternSyntaxException])
def split(separators: Array[Char]): Array[String] = {
diff --git a/src/library/scala/collection/immutable/Vector.scala b/src/library/scala/collection/immutable/Vector.scala
index 5a9734a99e..539ae9c387 100644
--- a/src/library/scala/collection/immutable/Vector.scala
+++ b/src/library/scala/collection/immutable/Vector.scala
@@ -13,7 +13,7 @@ package immutable
import scala.annotation.unchecked.uncheckedVariance
import scala.compat.Platform
import scala.collection.generic._
-import scala.collection.mutable.Builder
+import scala.collection.mutable.{Builder, ReusableBuilder}
import scala.collection.parallel.immutable.ParVector
/** Companion object to the Vector class
@@ -704,8 +704,8 @@ extends AbstractIterator[A]
}
}
-
-final class VectorBuilder[A]() extends Builder[A,Vector[A]] with VectorPointer[A @uncheckedVariance] {
+/** A class to build instances of `Vector`. This builder is reusable. */
+final class VectorBuilder[A]() extends ReusableBuilder[A,Vector[A]] with VectorPointer[A @uncheckedVariance] {
// possible alternative: start with display0 = null, blockIndex = -32, lo = 32
// to avoid allocating initial array if the result will be empty anyways
diff --git a/src/library/scala/collection/mutable/AVLTree.scala b/src/library/scala/collection/mutable/AVLTree.scala
deleted file mode 100644
index b63d0aae33..0000000000
--- a/src/library/scala/collection/mutable/AVLTree.scala
+++ /dev/null
@@ -1,250 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala
-package collection
-package mutable
-
-/**
- * An immutable AVL Tree implementation formerly used by mutable.TreeSet
- *
- * @author Lucien Pereira
- */
-@deprecated("AVLTree and its related classes are being removed from the standard library since they're not different enough from RedBlackTree to justify keeping them.", "2.11.2")
-private[mutable] sealed trait AVLTree[+A] extends Serializable {
- def balance: Int
-
- def depth: Int
-
- def iterator[B >: A]: Iterator[B] = Iterator.empty
-
- def contains[B >: A](value: B, ordering: Ordering[B]): Boolean = false
-
- /**
- * Returns a new tree containing the given element.
- * Throws an IllegalArgumentException if element is already present.
- *
- */
- def insert[B >: A](value: B, ordering: Ordering[B]): AVLTree[B] = Node(value, Leaf, Leaf)
-
- /**
- * Return a new tree which not contains given element.
- *
- */
- def remove[B >: A](value: B, ordering: Ordering[B]): AVLTree[A] =
- throw new NoSuchElementException(String.valueOf(value))
-
- /**
- * Return a tuple containing the smallest element of the provided tree
- * and a new tree from which this element has been extracted.
- *
- */
- def removeMin[B >: A]: (B, AVLTree[B]) = sys.error("Should not happen.")
-
- /**
- * Return a tuple containing the biggest element of the provided tree
- * and a new tree from which this element has been extracted.
- *
- */
- def removeMax[B >: A]: (B, AVLTree[B]) = sys.error("Should not happen.")
-
- def rebalance[B >: A]: AVLTree[B] = this
-
- def leftRotation[B >: A]: Node[B] = sys.error("Should not happen.")
-
- def rightRotation[B >: A]: Node[B] = sys.error("Should not happen.")
-
- def doubleLeftRotation[B >: A]: Node[B] = sys.error("Should not happen.")
-
- def doubleRightRotation[B >: A]: Node[B] = sys.error("Should not happen.")
-}
-
-/**
- * @deprecated("AVLTree and its related classes are being removed from the standard library since they're not different enough from RedBlackTree to justify keeping them.", "2.11.0")
- */
-private case object Leaf extends AVLTree[Nothing] {
- override val balance: Int = 0
-
- override val depth: Int = -1
-}
-
-/**
- * @deprecated("AVLTree and its related classes are being removed from the standard library since they're not different enough from RedBlackTree to justify keeping them.", "2.11.0")
- */
-private case class Node[A](data: A, left: AVLTree[A], right: AVLTree[A]) extends AVLTree[A] {
- override val balance: Int = right.depth - left.depth
-
- override val depth: Int = math.max(left.depth, right.depth) + 1
-
- override def iterator[B >: A]: Iterator[B] = new AVLIterator(this)
-
- override def contains[B >: A](value: B, ordering: Ordering[B]) = {
- val ord = ordering.compare(value, data)
- if (0 == ord)
- true
- else if (ord < 0)
- left.contains(value, ordering)
- else
- right.contains(value, ordering)
- }
-
- /**
- * Returns a new tree containing the given element.
- * Throws an IllegalArgumentException if element is already present.
- *
- */
- override def insert[B >: A](value: B, ordering: Ordering[B]) = {
- val ord = ordering.compare(value, data)
- if (0 == ord)
- throw new IllegalArgumentException()
- else if (ord < 0)
- Node(data, left.insert(value, ordering), right).rebalance
- else
- Node(data, left, right.insert(value, ordering)).rebalance
- }
-
- /**
- * Return a new tree which not contains given element.
- *
- */
- override def remove[B >: A](value: B, ordering: Ordering[B]): AVLTree[A] = {
- val ord = ordering.compare(value, data)
- if(ord == 0) {
- if (Leaf == left) {
- if (Leaf == right) {
- Leaf
- } else {
- val (min, newRight) = right.removeMin
- Node(min, left, newRight).rebalance
- }
- } else {
- val (max, newLeft) = left.removeMax
- Node(max, newLeft, right).rebalance
- }
- } else if (ord < 0) {
- Node(data, left.remove(value, ordering), right).rebalance
- } else {
- Node(data, left, right.remove(value, ordering)).rebalance
- }
- }
-
- /**
- * Return a tuple containing the smallest element of the provided tree
- * and a new tree from which this element has been extracted.
- *
- */
- override def removeMin[B >: A]: (B, AVLTree[B]) = {
- if (Leaf == left)
- (data, right)
- else {
- val (min, newLeft) = left.removeMin
- (min, Node(data, newLeft, right).rebalance)
- }
- }
-
- /**
- * Return a tuple containing the biggest element of the provided tree
- * and a new tree from which this element has been extracted.
- *
- */
- override def removeMax[B >: A]: (B, AVLTree[B]) = {
- if (Leaf == right)
- (data, left)
- else {
- val (max, newRight) = right.removeMax
- (max, Node(data, left, newRight).rebalance)
- }
- }
-
- override def rebalance[B >: A] = {
- if (-2 == balance) {
- if (1 == left.balance)
- doubleRightRotation
- else
- rightRotation
- } else if (2 == balance) {
- if (-1 == right.balance)
- doubleLeftRotation
- else
- leftRotation
- } else {
- this
- }
- }
-
- override def leftRotation[B >: A] = {
- if (Leaf != right) {
- val r: Node[A] = right.asInstanceOf[Node[A]]
- Node(r.data, Node(data, left, r.left), r.right)
- } else sys.error("Should not happen.")
- }
-
- override def rightRotation[B >: A] = {
- if (Leaf != left) {
- val l: Node[A] = left.asInstanceOf[Node[A]]
- Node(l.data, l.left, Node(data, l.right, right))
- } else sys.error("Should not happen.")
- }
-
- override def doubleLeftRotation[B >: A] = {
- if (Leaf != right) {
- val r: Node[A] = right.asInstanceOf[Node[A]]
- // Let's save an instanceOf by 'inlining' the left rotation
- val rightRotated = r.rightRotation
- Node(rightRotated.data, Node(data, left, rightRotated.left), rightRotated.right)
- } else sys.error("Should not happen.")
- }
-
- override def doubleRightRotation[B >: A] = {
- if (Leaf != left) {
- val l: Node[A] = left.asInstanceOf[Node[A]]
- // Let's save an instanceOf by 'inlining' the right rotation
- val leftRotated = l.leftRotation
- Node(leftRotated.data, leftRotated.left, Node(data, leftRotated.right, right))
- } else sys.error("Should not happen.")
- }
-}
-
-/**
- * @deprecated("AVLTree and its related classes are being removed from the standard library since they're not different enough from RedBlackTree to justify keeping them.", "2.11.0")
- */
-private class AVLIterator[A](root: Node[A]) extends Iterator[A] {
- val stack = mutable.ArrayStack[Node[A]](root)
- diveLeft()
-
- private def diveLeft(): Unit = {
- if (Leaf != stack.head.left) {
- val left: Node[A] = stack.head.left.asInstanceOf[Node[A]]
- stack.push(left)
- diveLeft()
- }
- }
-
- private def engageRight(): Unit = {
- if (Leaf != stack.head.right) {
- val right: Node[A] = stack.head.right.asInstanceOf[Node[A]]
- stack.pop()
- stack.push(right)
- diveLeft()
- } else
- stack.pop()
- }
-
- override def hasNext: Boolean = !stack.isEmpty
-
- override def next(): A = {
- if (stack.isEmpty)
- throw new NoSuchElementException()
- else {
- val result = stack.head.data
- // Let's maintain stack for the next invocation
- engageRight()
- result
- }
- }
-}
diff --git a/src/library/scala/collection/mutable/AnyRefMap.scala b/src/library/scala/collection/mutable/AnyRefMap.scala
index 369d596ec3..2ed5bbea60 100644
--- a/src/library/scala/collection/mutable/AnyRefMap.scala
+++ b/src/library/scala/collection/mutable/AnyRefMap.scala
@@ -27,10 +27,12 @@ import generic.CanBuildFrom
* rapidly as 2^30^ is approached.
*
*/
+@SerialVersionUID(1L)
final class AnyRefMap[K <: AnyRef, V] private[collection] (defaultEntry: K => V, initialBufferSize: Int, initBlank: Boolean)
extends AbstractMap[K, V]
with Map[K, V]
with MapLike[K, V, AnyRefMap[K, V]]
+ with Serializable
{
import AnyRefMap._
def this() = this(AnyRefMap.exceptionDefault, 16, true)
@@ -335,6 +337,24 @@ extends AbstractMap[K, V]
arm
}
+ override def +[V1 >: V](kv: (K, V1)): AnyRefMap[K, V1] = {
+ val arm = clone().asInstanceOf[AnyRefMap[K, V1]]
+ arm += kv
+ arm
+ }
+
+ override def ++[V1 >: V](xs: GenTraversableOnce[(K, V1)]): AnyRefMap[K, V1] = {
+ val arm = clone().asInstanceOf[AnyRefMap[K, V1]]
+ xs.foreach(kv => arm += kv)
+ arm
+ }
+
+ override def updated[V1 >: V](key: K, value: V1): AnyRefMap[K, V1] = {
+ val arm = clone().asInstanceOf[AnyRefMap[K, V1]]
+ arm += (key, value)
+ arm
+ }
+
private[this] def foreachElement[A,B](elems: Array[AnyRef], f: A => B) {
var i,j = 0
while (i < _hashes.length & j < _size) {
@@ -407,7 +427,11 @@ object AnyRefMap {
def apply(): AnyRefMapBuilder[J, U] = new AnyRefMapBuilder[J, U]
}
- final class AnyRefMapBuilder[K <: AnyRef, V] extends Builder[(K, V), AnyRefMap[K, V]] {
+ /** A builder for instances of `AnyRefMap`.
+ *
+ * This builder can be reused to create multiple instances.
+ */
+ final class AnyRefMapBuilder[K <: AnyRef, V] extends ReusableBuilder[(K, V), AnyRefMap[K, V]] {
private[collection] var elems: AnyRefMap[K, V] = new AnyRefMap[K, V]
def +=(entry: (K, V)): this.type = {
elems += entry
diff --git a/src/library/scala/collection/mutable/ArrayBuffer.scala b/src/library/scala/collection/mutable/ArrayBuffer.scala
index 011fd415ee..167e04ccbd 100644
--- a/src/library/scala/collection/mutable/ArrayBuffer.scala
+++ b/src/library/scala/collection/mutable/ArrayBuffer.scala
@@ -149,13 +149,16 @@ class ArrayBuffer[A](override protected val initialSize: Int)
/** Removes the element on a given index position. It takes time linear in
* the buffer size.
*
- * @param n the index which refers to the first element to delete.
- * @param count the number of elements to delete
- * @throws IndexOutOfBoundsException if `n` is out of bounds.
+ * @param n the index which refers to the first element to remove.
+ * @param count the number of elements to remove.
+ * @throws IndexOutOfBoundsException if the index `n` is not in the valid range
+ * `0 <= n <= length - count` (with `count > 0`).
+ * @throws IllegalArgumentException if `count < 0`.
*/
override def remove(n: Int, count: Int) {
- require(count >= 0, "removing negative number of elements")
- if (n < 0 || n > size0 - count) throw new IndexOutOfBoundsException(n.toString)
+ if (count < 0) throw new IllegalArgumentException("removing negative number of elements: " + count.toString)
+ else if (count == 0) return // Did nothing
+ if (n < 0 || n > size0 - count) throw new IndexOutOfBoundsException("at " + n.toString + " deleting " + count.toString)
copy(n + count, n, size0 - (n + count))
reduceToSize(size0 - count)
}
diff --git a/src/library/scala/collection/mutable/ArrayBuilder.scala b/src/library/scala/collection/mutable/ArrayBuilder.scala
index 6e53824cbe..549ffef565 100644
--- a/src/library/scala/collection/mutable/ArrayBuilder.scala
+++ b/src/library/scala/collection/mutable/ArrayBuilder.scala
@@ -11,7 +11,6 @@ package collection
package mutable
import scala.reflect.ClassTag
-import scala.runtime.ScalaRunTime
/** A builder class for arrays.
*
@@ -19,7 +18,7 @@ import scala.runtime.ScalaRunTime
*
* @tparam T the type of the elements for the builder.
*/
-abstract class ArrayBuilder[T] extends Builder[T, Array[T]] with Serializable
+abstract class ArrayBuilder[T] extends ReusableBuilder[T, Array[T]] with Serializable
/** A companion object for array builders.
*
@@ -50,6 +49,8 @@ object ArrayBuilder {
/** A class for array builders for arrays of reference types.
*
+ * This builder can be reused.
+ *
* @tparam T type of elements for the array builder, subtype of `AnyRef` with a `ClassTag` context bound.
*/
@deprecatedInheritance("ArrayBuilder.ofRef is an internal implementation not intended for subclassing.", "2.11.0")
@@ -99,12 +100,13 @@ object ArrayBuilder {
super.++=(xs)
}
- def clear() {
- size = 0
- }
+ def clear() { size = 0 }
def result() = {
- if (capacity != 0 && capacity == size) elems
+ if (capacity != 0 && capacity == size) {
+ capacity = 0
+ elems
+ }
else mkArray(size)
}
@@ -116,7 +118,7 @@ object ArrayBuilder {
override def toString = "ArrayBuilder.ofRef"
}
- /** A class for array builders for arrays of `byte`s. */
+ /** A class for array builders for arrays of `byte`s. It can be reused. */
@deprecatedInheritance("ArrayBuilder.ofByte is an internal implementation not intended for subclassing.", "2.11.0")
class ofByte extends ArrayBuilder[Byte] {
@@ -164,12 +166,13 @@ object ArrayBuilder {
super.++=(xs)
}
- def clear() {
- size = 0
- }
+ def clear() { size = 0 }
def result() = {
- if (capacity != 0 && capacity == size) elems
+ if (capacity != 0 && capacity == size) {
+ capacity = 0
+ elems
+ }
else mkArray(size)
}
@@ -181,7 +184,7 @@ object ArrayBuilder {
override def toString = "ArrayBuilder.ofByte"
}
- /** A class for array builders for arrays of `short`s. */
+ /** A class for array builders for arrays of `short`s. It can be reused. */
@deprecatedInheritance("ArrayBuilder.ofShort is an internal implementation not intended for subclassing.", "2.11.0")
class ofShort extends ArrayBuilder[Short] {
@@ -229,12 +232,13 @@ object ArrayBuilder {
super.++=(xs)
}
- def clear() {
- size = 0
- }
+ def clear() { size = 0 }
def result() = {
- if (capacity != 0 && capacity == size) elems
+ if (capacity != 0 && capacity == size) {
+ capacity = 0
+ elems
+ }
else mkArray(size)
}
@@ -246,7 +250,7 @@ object ArrayBuilder {
override def toString = "ArrayBuilder.ofShort"
}
- /** A class for array builders for arrays of `char`s. */
+ /** A class for array builders for arrays of `char`s. It can be reused. */
@deprecatedInheritance("ArrayBuilder.ofChar is an internal implementation not intended for subclassing.", "2.11.0")
class ofChar extends ArrayBuilder[Char] {
@@ -294,12 +298,13 @@ object ArrayBuilder {
super.++=(xs)
}
- def clear() {
- size = 0
- }
+ def clear() { size = 0 }
def result() = {
- if (capacity != 0 && capacity == size) elems
+ if (capacity != 0 && capacity == size) {
+ capacity = 0
+ elems
+ }
else mkArray(size)
}
@@ -311,7 +316,7 @@ object ArrayBuilder {
override def toString = "ArrayBuilder.ofChar"
}
- /** A class for array builders for arrays of `int`s. */
+ /** A class for array builders for arrays of `int`s. It can be reused. */
@deprecatedInheritance("ArrayBuilder.ofInt is an internal implementation not intended for subclassing.", "2.11.0")
class ofInt extends ArrayBuilder[Int] {
@@ -359,12 +364,13 @@ object ArrayBuilder {
super.++=(xs)
}
- def clear() {
- size = 0
- }
+ def clear() { size = 0 }
def result() = {
- if (capacity != 0 && capacity == size) elems
+ if (capacity != 0 && capacity == size) {
+ capacity = 0
+ elems
+ }
else mkArray(size)
}
@@ -376,7 +382,7 @@ object ArrayBuilder {
override def toString = "ArrayBuilder.ofInt"
}
- /** A class for array builders for arrays of `long`s. */
+ /** A class for array builders for arrays of `long`s. It can be reused. */
@deprecatedInheritance("ArrayBuilder.ofLong is an internal implementation not intended for subclassing.", "2.11.0")
class ofLong extends ArrayBuilder[Long] {
@@ -424,12 +430,13 @@ object ArrayBuilder {
super.++=(xs)
}
- def clear() {
- size = 0
- }
+ def clear() { size = 0 }
def result() = {
- if (capacity != 0 && capacity == size) elems
+ if (capacity != 0 && capacity == size) {
+ capacity = 0
+ elems
+ }
else mkArray(size)
}
@@ -441,7 +448,7 @@ object ArrayBuilder {
override def toString = "ArrayBuilder.ofLong"
}
- /** A class for array builders for arrays of `float`s. */
+ /** A class for array builders for arrays of `float`s. It can be reused. */
@deprecatedInheritance("ArrayBuilder.ofFloat is an internal implementation not intended for subclassing.", "2.11.0")
class ofFloat extends ArrayBuilder[Float] {
@@ -489,12 +496,13 @@ object ArrayBuilder {
super.++=(xs)
}
- def clear() {
- size = 0
- }
+ def clear() { size = 0 }
def result() = {
- if (capacity != 0 && capacity == size) elems
+ if (capacity != 0 && capacity == size) {
+ capacity = 0
+ elems
+ }
else mkArray(size)
}
@@ -506,7 +514,7 @@ object ArrayBuilder {
override def toString = "ArrayBuilder.ofFloat"
}
- /** A class for array builders for arrays of `double`s. */
+ /** A class for array builders for arrays of `double`s. It can be reused. */
@deprecatedInheritance("ArrayBuilder.ofDouble is an internal implementation not intended for subclassing.", "2.11.0")
class ofDouble extends ArrayBuilder[Double] {
@@ -554,12 +562,13 @@ object ArrayBuilder {
super.++=(xs)
}
- def clear() {
- size = 0
- }
+ def clear() { size = 0 }
def result() = {
- if (capacity != 0 && capacity == size) elems
+ if (capacity != 0 && capacity == size) {
+ capacity = 0
+ elems
+ }
else mkArray(size)
}
@@ -571,7 +580,7 @@ object ArrayBuilder {
override def toString = "ArrayBuilder.ofDouble"
}
- /** A class for array builders for arrays of `boolean`s. */
+ /** A class for array builders for arrays of `boolean`s. It can be reused. */
class ofBoolean extends ArrayBuilder[Boolean] {
private var elems: Array[Boolean] = _
@@ -618,12 +627,13 @@ object ArrayBuilder {
super.++=(xs)
}
- def clear() {
- size = 0
- }
+ def clear() { size = 0 }
def result() = {
- if (capacity != 0 && capacity == size) elems
+ if (capacity != 0 && capacity == size) {
+ capacity = 0
+ elems
+ }
else mkArray(size)
}
@@ -635,65 +645,33 @@ object ArrayBuilder {
override def toString = "ArrayBuilder.ofBoolean"
}
- /** A class for array builders for arrays of `Unit` type. */
+ /** A class for array builders for arrays of `Unit` type. It can be reused. */
@deprecatedInheritance("ArrayBuilder.ofUnit is an internal implementation not intended for subclassing.", "2.11.0")
class ofUnit extends ArrayBuilder[Unit] {
- private var elems: Array[Unit] = _
- private var capacity: Int = 0
private var size: Int = 0
- private def mkArray(size: Int): Array[Unit] = {
- val newelems = new Array[Unit](size)
- if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size)
- newelems
- }
-
- private def resize(size: Int) {
- elems = mkArray(size)
- capacity = size
- }
-
- override def sizeHint(size: Int) {
- if (capacity < size) resize(size)
- }
-
- private def ensureSize(size: Int) {
- if (capacity < size || capacity == 0) {
- var newsize = if (capacity == 0) 16 else capacity * 2
- while (newsize < size) newsize *= 2
- resize(newsize)
- }
- }
-
def +=(elem: Unit): this.type = {
- ensureSize(size + 1)
- elems(size) = elem
size += 1
this
}
- override def ++=(xs: TraversableOnce[Unit]): this.type = xs match {
- case xs: WrappedArray.ofUnit =>
- ensureSize(this.size + xs.length)
- Array.copy(xs.array, 0, elems, this.size, xs.length)
- size += xs.length
- this
- case _ =>
- super.++=(xs)
+ override def ++=(xs: TraversableOnce[Unit]): this.type = {
+ size += xs.size
+ this
}
- def clear() {
- size = 0
- }
+ def clear() { size = 0 }
def result() = {
- if (capacity != 0 && capacity == size) elems
- else mkArray(size)
+ val ans = new Array[Unit](size)
+ var i = 0
+ while (i < size) { ans(i) = (); i += 1 }
+ ans
}
override def equals(other: Any): Boolean = other match {
- case x: ofUnit => (size == x.size) && (elems == x.elems)
+ case x: ofUnit => (size == x.size)
case _ => false
}
diff --git a/src/library/scala/collection/mutable/ArrayOps.scala b/src/library/scala/collection/mutable/ArrayOps.scala
index 00491ef20e..5144db7de3 100644
--- a/src/library/scala/collection/mutable/ArrayOps.scala
+++ b/src/library/scala/collection/mutable/ArrayOps.scala
@@ -10,7 +10,6 @@ package scala
package collection
package mutable
-import scala.compat.Platform.arraycopy
import scala.reflect.ClassTag
import scala.runtime.ScalaRunTime._
import parallel.mutable.ParArray
@@ -40,9 +39,8 @@ trait ArrayOps[T] extends Any with ArrayLike[T, Array[T]] with CustomParalleliza
arrayElementClass(repr.getClass)
override def copyToArray[U >: T](xs: Array[U], start: Int, len: Int) {
- var l = math.min(len, repr.length)
- if (xs.length - start < l) l = xs.length - start max 0
- Array.copy(repr, 0, xs, start, l)
+ val l = len min repr.length min (xs.length - start)
+ if (l > 0) Array.copy(repr, 0, xs, start, l)
}
override def toArray[U >: T : ClassTag]: Array[U] = {
@@ -107,9 +105,9 @@ trait ArrayOps[T] extends Any with ArrayLike[T, Array[T]] with CustomParalleliza
bb.result()
}
}
-
+
/** Converts an array of pairs into an array of first elements and an array of second elements.
- *
+ *
* @tparam T1 the type of the first half of the element pairs
* @tparam T2 the type of the second half of the element pairs
* @param asPair an implicit conversion which asserts that the element type
@@ -135,9 +133,9 @@ trait ArrayOps[T] extends Any with ArrayLike[T, Array[T]] with CustomParalleliza
}
(a1, a2)
}
-
+
/** Converts an array of triples into three arrays, one containing the elements from each position of the triple.
- *
+ *
* @tparam T1 the type of the first of three elements in the triple
* @tparam T2 the type of the second of three elements in the triple
* @tparam T3 the type of the third of three elements in the triple
@@ -169,7 +167,7 @@ trait ArrayOps[T] extends Any with ArrayLike[T, Array[T]] with CustomParalleliza
}
(a1, a2, a3)
}
-
+
def seq = thisCollection
diff --git a/src/library/scala/collection/mutable/ArraySeq.scala b/src/library/scala/collection/mutable/ArraySeq.scala
index ddb48627af..1e82096baf 100644
--- a/src/library/scala/collection/mutable/ArraySeq.scala
+++ b/src/library/scala/collection/mutable/ArraySeq.scala
@@ -87,7 +87,7 @@ extends AbstractSeq[A]
*/
override def copyToArray[B >: A](xs: Array[B], start: Int, len: Int) {
val len1 = len min (xs.length - start) min length
- Array.copy(array, 0, xs, start, len1)
+ if (len1 > 0) Array.copy(array, 0, xs, start, len1)
}
override def clone(): ArraySeq[A] = {
diff --git a/src/library/scala/collection/mutable/ArrayStack.scala b/src/library/scala/collection/mutable/ArrayStack.scala
index 8ff128c026..951a90b084 100644
--- a/src/library/scala/collection/mutable/ArrayStack.scala
+++ b/src/library/scala/collection/mutable/ArrayStack.scala
@@ -64,9 +64,10 @@ object ArrayStack extends SeqFactory[ArrayStack] {
class ArrayStack[T] private(private var table : Array[AnyRef],
private var index : Int)
extends AbstractSeq[T]
- with Seq[T]
- with SeqLike[T, ArrayStack[T]]
+ with IndexedSeq[T]
+ with IndexedSeqLike[T, ArrayStack[T]]
with GenericTraversableTemplate[T, ArrayStack]
+ with IndexedSeqOptimized[T, ArrayStack[T]]
with Cloneable[ArrayStack[T]]
with Builder[T, ArrayStack[T]]
with Serializable
@@ -224,7 +225,7 @@ extends AbstractSeq[T]
/** Creates and iterator over the stack in LIFO order.
* @return an iterator over the elements of the stack.
*/
- def iterator: Iterator[T] = new AbstractIterator[T] {
+ override def iterator: Iterator[T] = new AbstractIterator[T] {
var currentIndex = index
def hasNext = currentIndex > 0
def next() = {
diff --git a/src/library/scala/collection/mutable/BitSet.scala b/src/library/scala/collection/mutable/BitSet.scala
index e92d48cfeb..feef694e01 100644
--- a/src/library/scala/collection/mutable/BitSet.scala
+++ b/src/library/scala/collection/mutable/BitSet.scala
@@ -13,7 +13,7 @@ package collection
package mutable
import generic._
-import BitSetLike.{LogWL, MaxSize, updateArray}
+import BitSetLike.{LogWL, MaxSize}
/** A class for mutable bitsets.
*
@@ -56,7 +56,7 @@ class BitSet(protected final var elems: Array[Long]) extends AbstractSet[Int]
@deprecatedOverriding("Internal implementation does not admit sensible overriding of this method.", "2.11.0")
protected def nwords = elems.length
-
+
@deprecatedOverriding("Internal implementation does not admit sensible overriding of this method.", "2.11.0")
protected def word(idx: Int): Long =
if (idx < nwords) elems(idx) else 0L
@@ -100,7 +100,7 @@ class BitSet(protected final var elems: Array[Long]) extends AbstractSet[Int]
@deprecatedOverriding("Override add to prevent += and add from exhibiting different behavior.", "2.11.0")
def += (elem: Int): this.type = { add(elem); this }
-
+
@deprecatedOverriding("Override add to prevent += and add from exhibiting different behavior.", "2.11.0")
def -= (elem: Int): this.type = { remove(elem); this }
diff --git a/src/library/scala/collection/mutable/BufferLike.scala b/src/library/scala/collection/mutable/BufferLike.scala
index 3c57387c03..98c9771a05 100644
--- a/src/library/scala/collection/mutable/BufferLike.scala
+++ b/src/library/scala/collection/mutable/BufferLike.scala
@@ -14,7 +14,7 @@ package mutable
import generic._
import script._
-import scala.annotation.{migration, bridge}
+import scala.annotation.migration
/** A template trait for buffers of type `Buffer[A]`.
*
@@ -105,15 +105,18 @@ trait BufferLike[A, +This <: BufferLike[A, This] with Buffer[A]]
*/
def remove(n: Int): A
- /** Removes a number of elements from a given index position.
+ /** Removes a number of elements from a given index position. Subclasses of `BufferLike`
+ * will typically override this method to provide better performance than `count`
+ * successive calls to single-element `remove`.
*
* @param n the index which refers to the first element to remove.
* @param count the number of elements to remove.
* @throws IndexOutOfBoundsException if the index `n` is not in the valid range
- * `0 <= n <= length - count`.
+ * `0 <= n <= length - count` (with `count > 0`).
* @throws IllegalArgumentException if `count < 0`.
*/
def remove(n: Int, count: Int) {
+ if (count < 0) throw new IllegalArgumentException("removing negative number of elements: " + count.toString)
for (i <- 0 until count) remove(n)
}
@@ -211,13 +214,6 @@ trait BufferLike[A, +This <: BufferLike[A, This] with Buffer[A]]
*/
override def stringPrefix: String = "Buffer"
- /** Returns the current evolving(!) state of this buffer as a read-only sequence.
- *
- * @return A sequence that forwards to this buffer for all its operations.
- */
- @deprecated("The returned sequence changes as this buffer is mutated. For an immutable copy, use, e.g., toList.", "2.11.0")
- def readOnly: scala.collection.Seq[A] = toSeq
-
/** Creates a new collection containing both the elements of this collection and the provided
* traversable object.
*
diff --git a/src/library/scala/collection/mutable/BufferProxy.scala b/src/library/scala/collection/mutable/BufferProxy.scala
index d9632cce91..2d52831d37 100644
--- a/src/library/scala/collection/mutable/BufferProxy.scala
+++ b/src/library/scala/collection/mutable/BufferProxy.scala
@@ -43,8 +43,6 @@ trait BufferProxy[A] extends Buffer[A] with Proxy {
*/
def +=(elem: A): this.type = { self.+=(elem); this }
- override def readOnly = self.readOnly
-
/** Appends a number of elements provided by a traversable object.
*
* @param xs the traversable object.
diff --git a/src/library/scala/collection/mutable/Builder.scala b/src/library/scala/collection/mutable/Builder.scala
index 75560580cc..8d6a0ec69d 100644
--- a/src/library/scala/collection/mutable/Builder.scala
+++ b/src/library/scala/collection/mutable/Builder.scala
@@ -18,6 +18,14 @@ import generic._
* elements to the builder with `+=` and then converting to the required
* collection type with `result`.
*
+ * One cannot assume that a single `Builder` can build more than one
+ * instance of the desired collection. Particular subclasses may allow
+ * such behavior. Otherwise, `result` should be treated as a terminal
+ * operation: after it is called, no further methods should be called on
+ * the builder. Extend the [[collection.mutable.ReusableBuilder]] trait
+ * instead of `Builder` for builders that may be reused to build multiple
+ * instances.
+ *
* @tparam Elem the type of elements that get added to the builder.
* @tparam To the type of collection that it produced.
*
@@ -36,8 +44,10 @@ trait Builder[-Elem, +To] extends Growable[Elem] {
*/
def clear()
- /** Produces a collection from the added elements.
- * The builder's contents are undefined after this operation.
+ /** Produces a collection from the added elements. This is a terminal operation:
+ * the builder's contents are undefined after this operation, and no further
+ * methods should be called.
+ *
* @return a collection containing the elements added to this builder.
*/
def result(): To
@@ -112,6 +122,8 @@ trait Builder[-Elem, +To] extends Growable[Elem] {
* @tparam NewTo the type of collection returned by `f`.
* @return a new builder which is the same as the current builder except
* that a transformation function is applied to this builder's result.
+ *
+ * @note The original builder should no longer be used after `mapResult` is called.
*/
def mapResult[NewTo](f: To => NewTo): Builder[Elem, NewTo] =
new Builder[Elem, NewTo] with Proxy {
diff --git a/src/library/scala/collection/mutable/GrowingBuilder.scala b/src/library/scala/collection/mutable/GrowingBuilder.scala
index c4b5e546aa..27d554d98e 100644
--- a/src/library/scala/collection/mutable/GrowingBuilder.scala
+++ b/src/library/scala/collection/mutable/GrowingBuilder.scala
@@ -15,6 +15,8 @@ import generic._
/** The canonical builder for collections that are growable, i.e. that support an
* efficient `+=` method which adds an element to the collection.
*
+ * GrowableBuilders can produce only a single instance of the collection they are growing.
+ *
* @author Paul Phillips
* @version 2.8
* @since 2.8
@@ -25,6 +27,6 @@ import generic._
class GrowingBuilder[Elem, To <: Growable[Elem]](empty: To) extends Builder[Elem, To] {
protected var elems: To = empty
def +=(x: Elem): this.type = { elems += x; this }
- def clear() { elems = empty }
+ def clear() { empty.clear }
def result: To = elems
}
diff --git a/src/library/scala/collection/mutable/LazyBuilder.scala b/src/library/scala/collection/mutable/LazyBuilder.scala
index ebee38b77f..f0a5e6971a 100644
--- a/src/library/scala/collection/mutable/LazyBuilder.scala
+++ b/src/library/scala/collection/mutable/LazyBuilder.scala
@@ -13,12 +13,14 @@ package mutable
/** A builder that constructs its result lazily. Iterators or iterables to
* be added to this builder with `++=` are not evaluated until `result` is called.
*
+ * This builder can be reused.
+ *
* @since 2.8
*
* @tparam Elem type of the elements for this builder.
* @tparam To type of the collection this builder builds.
*/
-abstract class LazyBuilder[Elem, +To] extends Builder[Elem, To] {
+abstract class LazyBuilder[Elem, +To] extends ReusableBuilder[Elem, To] {
/** The different segments of elements to be added to the builder, represented as iterators */
protected var parts = new ListBuffer[TraversableOnce[Elem]]
def +=(x: Elem): this.type = { parts += List(x); this }
diff --git a/src/library/scala/collection/mutable/ListBuffer.scala b/src/library/scala/collection/mutable/ListBuffer.scala
index f9bab40a1e..02fcced3ac 100644
--- a/src/library/scala/collection/mutable/ListBuffer.scala
+++ b/src/library/scala/collection/mutable/ListBuffer.scala
@@ -12,8 +12,7 @@ package mutable
import generic._
import immutable.{List, Nil, ::}
-import java.io._
-import scala.annotation.migration
+import java.io.{ObjectOutputStream, ObjectInputStream}
/** A `Buffer` implementation backed by a list. It provides constant time
* prepend and append. Most other operations are linear.
@@ -47,7 +46,7 @@ final class ListBuffer[A]
with Buffer[A]
with GenericTraversableTemplate[A, ListBuffer]
with BufferLike[A, ListBuffer[A]]
- with Builder[A, List[A]]
+ with ReusableBuilder[A, List[A]]
with SeqForwarder[A]
with Serializable
{
@@ -262,13 +261,14 @@ final class ListBuffer[A]
*
* @param n the index which refers to the first element to remove.
* @param count the number of elements to remove.
+ * @throws IndexOutOfBoundsException if the index `n` is not in the valid range
+ * `0 <= n <= length - count` (with `count > 0`).
+ * @throws IllegalArgumentException if `count < 0`.
*/
- @migration("Invalid input values will be rejected in future releases.", "2.11")
override def remove(n: Int, count: Int) {
- if (n >= len)
- return
- if (count < 0)
- throw new IllegalArgumentException(s"removing negative number ($count) of elements")
+ if (count < 0) throw new IllegalArgumentException("removing negative number of elements: " + count.toString)
+ else if (count == 0) return // Nothing to do
+ if (n < 0 || n > len - count) throw new IndexOutOfBoundsException("at " + n.toString + " deleting " + count.toString)
if (exported) copy()
val n1 = n max 0
val count1 = count min (len - n1)
@@ -297,6 +297,10 @@ final class ListBuffer[A]
// Implementation of abstract method in Builder
+ /** Returns the accumulated `List`.
+ *
+ * This method may be called multiple times to obtain snapshots of the list in different stages of construction.
+ */
def result: List[A] = toList
/** Converts this buffer to a list. Takes constant time. The buffer is
@@ -408,9 +412,6 @@ final class ListBuffer[A]
}
}
- @deprecated("The result of this method will change along with this buffer, which is often not what's expected.", "2.11.0")
- override def readOnly: List[A] = start
-
// Private methods
/** Copy contents of this buffer */
@@ -426,7 +427,7 @@ final class ListBuffer[A]
}
override def equals(that: Any): Boolean = that match {
- case that: ListBuffer[_] => this.readOnly equals that.readOnly
+ case that: ListBuffer[_] => this.start equals that.start
case _ => super.equals(that)
}
diff --git a/src/library/scala/collection/mutable/LongMap.scala b/src/library/scala/collection/mutable/LongMap.scala
index 198e34bd29..ecbb1952af 100644
--- a/src/library/scala/collection/mutable/LongMap.scala
+++ b/src/library/scala/collection/mutable/LongMap.scala
@@ -415,6 +415,24 @@ extends AbstractMap[Long, V]
lm
}
+ override def +[V1 >: V](kv: (Long, V1)): LongMap[V1] = {
+ val lm = clone().asInstanceOf[LongMap[V1]]
+ lm += kv
+ lm
+ }
+
+ override def ++[V1 >: V](xs: GenTraversableOnce[(Long, V1)]): LongMap[V1] = {
+ val lm = clone().asInstanceOf[LongMap[V1]]
+ xs.foreach(kv => lm += kv)
+ lm
+ }
+
+ override def updated[V1 >: V](key: Long, value: V1): LongMap[V1] = {
+ val lm = clone().asInstanceOf[LongMap[V1]]
+ lm += (key, value)
+ lm
+ }
+
/** Applies a function to all keys of this map. */
def foreachKey[A](f: Long => A) {
if ((extraKeys & 1) == 1) f(0L)
@@ -501,7 +519,11 @@ object LongMap {
def apply(): LongMapBuilder[U] = new LongMapBuilder[U]
}
- final class LongMapBuilder[V] extends Builder[(Long, V), LongMap[V]] {
+ /** A builder for instances of `LongMap`.
+ *
+ * This builder can be reused to create multiple instances.
+ */
+ final class LongMapBuilder[V] extends ReusableBuilder[(Long, V), LongMap[V]] {
private[collection] var elems: LongMap[V] = new LongMap[V]
def +=(entry: (Long, V)): this.type = {
elems += entry
@@ -541,7 +563,7 @@ object LongMap {
/** Creates a new `LongMap` from keys and values.
* Equivalent to but more efficient than `LongMap((keys zip values): _*)`.
*/
- def fromZip[V](keys: Iterable[Long], values: Iterable[V]): LongMap[V] = {
+ def fromZip[V](keys: collection.Iterable[Long], values: collection.Iterable[V]): LongMap[V] = {
val sz = math.min(keys.size, values.size)
val lm = new LongMap[V](sz * 2)
val ki = keys.iterator
diff --git a/src/library/scala/collection/mutable/MapBuilder.scala b/src/library/scala/collection/mutable/MapBuilder.scala
index a5a6b12ea9..cfc3079f41 100644
--- a/src/library/scala/collection/mutable/MapBuilder.scala
+++ b/src/library/scala/collection/mutable/MapBuilder.scala
@@ -23,7 +23,7 @@ package mutable
* @since 2.8
*/
class MapBuilder[A, B, Coll <: scala.collection.GenMap[A, B] with scala.collection.GenMapLike[A, B, Coll]](empty: Coll)
-extends Builder[(A, B), Coll] {
+extends ReusableBuilder[(A, B), Coll] {
protected var elems: Coll = empty
def +=(x: (A, B)): this.type = {
elems = (elems + x).asInstanceOf[Coll]
diff --git a/src/library/scala/collection/mutable/MapLike.scala b/src/library/scala/collection/mutable/MapLike.scala
index 44af886cf5..949e5e3536 100644
--- a/src/library/scala/collection/mutable/MapLike.scala
+++ b/src/library/scala/collection/mutable/MapLike.scala
@@ -61,6 +61,18 @@ trait MapLike[A, B, +This <: MapLike[A, B, This] with Map[A, B]]
override protected[this] def newBuilder: Builder[(A, B), This] = empty
protected[this] override def parCombiner = ParMap.newCombiner[A, B]
+
+ /** Converts this $coll to a sequence.
+ *
+ * ```Note```: assumes a fast `size` method. Subclasses should override if this is not true.
+ */
+ override def toSeq: collection.Seq[(A, B)] = {
+ // ArrayBuffer for efficiency, preallocated to the right size.
+ val result = new ArrayBuffer[(A, B)](size)
+ foreach(result += _)
+ result
+ }
+
/** Adds a new key/value pair to this map and optionally returns previously bound value.
* If the map already contains a
diff --git a/src/library/scala/collection/mutable/MutableList.scala b/src/library/scala/collection/mutable/MutableList.scala
index 646023f469..a333eedb1a 100644
--- a/src/library/scala/collection/mutable/MutableList.scala
+++ b/src/library/scala/collection/mutable/MutableList.scala
@@ -11,7 +11,7 @@ package collection
package mutable
import generic._
-import immutable.{List, Nil}
+import immutable.List
/**
* This class is used internally to represent mutable lists. It is the
diff --git a/src/library/scala/collection/mutable/PriorityQueue.scala b/src/library/scala/collection/mutable/PriorityQueue.scala
index 2562f60355..e6889da3b5 100644
--- a/src/library/scala/collection/mutable/PriorityQueue.scala
+++ b/src/library/scala/collection/mutable/PriorityQueue.scala
@@ -16,7 +16,7 @@ import generic._
* To prioritize elements of type A there must be an implicit
* Ordering[A] available at creation.
*
- * Only the `dequeue` and `dequeueAll` methods will return methods in priority
+ * Only the `dequeue` and `dequeueAll` methods will return elements in priority
* order (while removing elements from the heap). Standard collection methods
* including `drop`, `iterator`, and `toString` will remove or traverse the heap
* in whichever order seems most convenient.
diff --git a/src/library/scala/collection/mutable/RedBlackTree.scala b/src/library/scala/collection/mutable/RedBlackTree.scala
new file mode 100644
index 0000000000..e4793242bf
--- /dev/null
+++ b/src/library/scala/collection/mutable/RedBlackTree.scala
@@ -0,0 +1,580 @@
+package scala.collection.mutable
+
+import scala.annotation.tailrec
+import scala.collection.Iterator
+
+/**
+ * An object containing the red-black tree implementation used by mutable `TreeMaps`.
+ *
+ * The trees implemented in this object are *not* thread safe.
+ *
+ * @author Rui Gonçalves
+ * @version 2.12
+ * @since 2.12
+ */
+private[collection] object RedBlackTree {
+
+ // ---- class structure ----
+
+ // For performance reasons, this implementation uses `null` references to represent leaves instead of a sentinel node.
+ // Currently, the internal nodes do not store their subtree size - only the tree object keeps track of their size.
+ // Therefore, while obtaining the size of the whole tree is O(1), knowing the number of entries inside a range is O(n)
+ // on the size of the range.
+
+ @SerialVersionUID(21575944040195605L)
+ final class Tree[A, B](var root: Node[A, B], var size: Int) extends Serializable
+
+ @SerialVersionUID(1950599696441054720L)
+ final class Node[A, B](var key: A, var value: B, var red: Boolean,
+ var left: Node[A, B], var right: Node[A, B], var parent: Node[A, B]) extends Serializable {
+
+ override def toString: String = "Node(" + key + ", " + value + ", " + red + ", " + left + ", " + right + ")"
+ }
+
+ object Tree {
+ def empty[A, B]: Tree[A, B] = new Tree(null, 0)
+ }
+
+ object Node {
+
+ @inline def apply[A, B](key: A, value: B, red: Boolean,
+ left: Node[A, B], right: Node[A, B], parent: Node[A, B]): Node[A, B] =
+ new Node(key, value, red, left, right, parent)
+
+ @inline def leaf[A, B](key: A, value: B, red: Boolean, parent: Node[A, B]): Node[A, B] =
+ new Node(key, value, red, null, null, parent)
+
+ def unapply[A, B](t: Node[A, B]) = Some((t.key, t.value, t.left, t.right, t.parent))
+ }
+
+ // ---- getters ----
+
+ def isRed(node: Node[_, _]) = (node ne null) && node.red
+ def isBlack(node: Node[_, _]) = (node eq null) || !node.red
+
+ // ---- size ----
+
+ def size(node: Node[_, _]): Int = if (node eq null) 0 else 1 + size(node.left) + size(node.right)
+ def size(tree: Tree[_, _]): Int = tree.size
+ def isEmpty(tree: Tree[_, _]) = tree.root eq null
+ def clear(tree: Tree[_, _]): Unit = { tree.root = null; tree.size = 0 }
+
+ // ---- search ----
+
+ def get[A: Ordering, B](tree: Tree[A, B], key: A): Option[B] = getNode(tree.root, key) match {
+ case null => None
+ case node => Some(node.value)
+ }
+
+ @tailrec private[this] def getNode[A, B](node: Node[A, B], key: A)(implicit ord: Ordering[A]): Node[A, B] =
+ if (node eq null) null
+ else {
+ val cmp = ord.compare(key, node.key)
+ if (cmp < 0) getNode(node.left, key)
+ else if (cmp > 0) getNode(node.right, key)
+ else node
+ }
+
+ def contains[A: Ordering](tree: Tree[A, _], key: A) = getNode(tree.root, key) ne null
+
+ def min[A, B](tree: Tree[A, B]): Option[(A, B)] = minNode(tree.root) match {
+ case null => None
+ case node => Some((node.key, node.value))
+ }
+
+ def minKey[A](tree: Tree[A, _]): Option[A] = minNode(tree.root) match {
+ case null => None
+ case node => Some(node.key)
+ }
+
+ private def minNode[A, B](node: Node[A, B]): Node[A, B] =
+ if (node eq null) null else minNodeNonNull(node)
+
+ @tailrec def minNodeNonNull[A, B](node: Node[A, B]): Node[A, B] =
+ if (node.left eq null) node else minNodeNonNull(node.left)
+
+ def max[A, B](tree: Tree[A, B]): Option[(A, B)] = maxNode(tree.root) match {
+ case null => None
+ case node => Some((node.key, node.value))
+ }
+
+ def maxKey[A](tree: Tree[A, _]): Option[A] = maxNode(tree.root) match {
+ case null => None
+ case node => Some(node.key)
+ }
+
+ private def maxNode[A, B](node: Node[A, B]): Node[A, B] =
+ if (node eq null) null else maxNodeNonNull(node)
+
+ @tailrec def maxNodeNonNull[A, B](node: Node[A, B]): Node[A, B] =
+ if (node.right eq null) node else maxNodeNonNull(node.right)
+
+ /**
+ * Returns the first (lowest) map entry with a key equal or greater than `key`. Returns `None` if there is no such
+ * node.
+ */
+ def minAfter[A, B](tree: Tree[A, B], key: A)(implicit ord: Ordering[A]): Option[(A, B)] =
+ minNodeAfter(tree.root, key) match {
+ case null => None
+ case node => Some((node.key, node.value))
+ }
+
+ def minKeyAfter[A](tree: Tree[A, _], key: A)(implicit ord: Ordering[A]): Option[A] =
+ minNodeAfter(tree.root, key) match {
+ case null => None
+ case node => Some(node.key)
+ }
+
+ private[this] def minNodeAfter[A, B](node: Node[A, B], key: A)(implicit ord: Ordering[A]): Node[A, B] = {
+ if (node eq null) null
+ else {
+ var y: Node[A, B] = null
+ var x = node
+ var cmp = 1
+ while ((x ne null) && cmp != 0) {
+ y = x
+ cmp = ord.compare(key, x.key)
+ x = if (cmp < 0) x.left else x.right
+ }
+ if (cmp <= 0) y else successor(y)
+ }
+ }
+
+ /**
+ * Returns the last (highest) map entry with a key smaller than `key`. Returns `None` if there is no such node.
+ */
+ def maxBefore[A, B](tree: Tree[A, B], key: A)(implicit ord: Ordering[A]): Option[(A, B)] =
+ maxNodeBefore(tree.root, key) match {
+ case null => None
+ case node => Some((node.key, node.value))
+ }
+
+ def maxKeyBefore[A](tree: Tree[A, _], key: A)(implicit ord: Ordering[A]): Option[A] =
+ maxNodeBefore(tree.root, key) match {
+ case null => None
+ case node => Some(node.key)
+ }
+
+ private[this] def maxNodeBefore[A, B](node: Node[A, B], key: A)(implicit ord: Ordering[A]): Node[A, B] = {
+ if (node eq null) null
+ else {
+ var y: Node[A, B] = null
+ var x = node
+ var cmp = 1
+ while ((x ne null) && cmp != 0) {
+ y = x
+ cmp = ord.compare(key, x.key)
+ x = if (cmp < 0) x.left else x.right
+ }
+ if (cmp > 0) y else predecessor(y)
+ }
+ }
+
+ // ---- insertion ----
+
+ def insert[A, B](tree: Tree[A, B], key: A, value: B)(implicit ord: Ordering[A]): Unit = {
+ var y: Node[A, B] = null
+ var x = tree.root
+ var cmp = 1
+ while ((x ne null) && cmp != 0) {
+ y = x
+ cmp = ord.compare(key, x.key)
+ x = if (cmp < 0) x.left else x.right
+ }
+
+ if (cmp == 0) y.value = value
+ else {
+ val z = Node.leaf(key, value, red = true, y)
+
+ if (y eq null) tree.root = z
+ else if (cmp < 0) y.left = z
+ else y.right = z
+
+ fixAfterInsert(tree, z)
+ tree.size += 1
+ }
+ }
+
+ private[this] def fixAfterInsert[A, B](tree: Tree[A, B], node: Node[A, B]): Unit = {
+ var z = node
+ while (isRed(z.parent)) {
+ if (z.parent eq z.parent.parent.left) {
+ val y = z.parent.parent.right
+ if (isRed(y)) {
+ z.parent.red = false
+ y.red = false
+ z.parent.parent.red = true
+ z = z.parent.parent
+ } else {
+ if (z eq z.parent.right) {
+ z = z.parent
+ rotateLeft(tree, z)
+ }
+ z.parent.red = false
+ z.parent.parent.red = true
+ rotateRight(tree, z.parent.parent)
+ }
+ } else { // symmetric cases
+ val y = z.parent.parent.left
+ if (isRed(y)) {
+ z.parent.red = false
+ y.red = false
+ z.parent.parent.red = true
+ z = z.parent.parent
+ } else {
+ if (z eq z.parent.left) {
+ z = z.parent
+ rotateRight(tree, z)
+ }
+ z.parent.red = false
+ z.parent.parent.red = true
+ rotateLeft(tree, z.parent.parent)
+ }
+ }
+ }
+ tree.root.red = false
+ }
+
+ // ---- deletion ----
+
+ def delete[A, B](tree: Tree[A, B], key: A)(implicit ord: Ordering[A]): Unit = {
+ val z = getNode(tree.root, key)
+ if (z ne null) {
+ var y = z
+ var yIsRed = y.red
+ var x: Node[A, B] = null
+ var xParent: Node[A, B] = null
+
+ if (z.left eq null) {
+ x = z.right
+ transplant(tree, z, z.right)
+ xParent = z.parent
+ }
+ else if (z.right eq null) {
+ x = z.left
+ transplant(tree, z, z.left)
+ xParent = z.parent
+ }
+ else {
+ y = minNodeNonNull(z.right)
+ yIsRed = y.red
+ x = y.right
+
+ if (y.parent eq z) xParent = y
+ else {
+ xParent = y.parent
+ transplant(tree, y, y.right)
+ y.right = z.right
+ y.right.parent = y
+ }
+ transplant(tree, z, y)
+ y.left = z.left
+ y.left.parent = y
+ y.red = z.red
+ }
+
+ if (!yIsRed) fixAfterDelete(tree, x, xParent)
+ tree.size -= 1
+ }
+ }
+
+ private[this] def fixAfterDelete[A, B](tree: Tree[A, B], node: Node[A, B], parent: Node[A, B]): Unit = {
+ var x = node
+ var xParent = parent
+ while ((x ne tree.root) && isBlack(x)) {
+ if (x eq xParent.left) {
+ var w = xParent.right
+ // assert(w ne null)
+
+ if (w.red) {
+ w.red = false
+ xParent.red = true
+ rotateLeft(tree, xParent)
+ w = xParent.right
+ }
+ if (isBlack(w.left) && isBlack(w.right)) {
+ w.red = true
+ x = xParent
+ } else {
+ if (isBlack(w.right)) {
+ w.left.red = false
+ w.red = true
+ rotateRight(tree, w)
+ w = xParent.right
+ }
+ w.red = xParent.red
+ xParent.red = false
+ w.right.red = false
+ rotateLeft(tree, xParent)
+ x = tree.root
+ }
+ } else { // symmetric cases
+ var w = xParent.left
+ // assert(w ne null)
+
+ if (w.red) {
+ w.red = false
+ xParent.red = true
+ rotateRight(tree, xParent)
+ w = xParent.left
+ }
+ if (isBlack(w.right) && isBlack(w.left)) {
+ w.red = true
+ x = xParent
+ } else {
+ if (isBlack(w.left)) {
+ w.right.red = false
+ w.red = true
+ rotateLeft(tree, w)
+ w = xParent.left
+ }
+ w.red = xParent.red
+ xParent.red = false
+ w.left.red = false
+ rotateRight(tree, xParent)
+ x = tree.root
+ }
+ }
+ xParent = x.parent
+ }
+ if (x ne null) x.red = false
+ }
+
+ // ---- helpers ----
+
+ /**
+ * Returns the node that follows `node` in an in-order tree traversal. If `node` has the maximum key (and is,
+ * therefore, the last node), this method returns `null`.
+ */
+ private[this] def successor[A, B](node: Node[A, B]): Node[A, B] = {
+ if (node.right ne null) minNodeNonNull(node.right)
+ else {
+ var x = node
+ var y = x.parent
+ while ((y ne null) && (x eq y.right)) {
+ x = y
+ y = y.parent
+ }
+ y
+ }
+ }
+
+ /**
+ * Returns the node that precedes `node` in an in-order tree traversal. If `node` has the minimum key (and is,
+ * therefore, the first node), this method returns `null`.
+ */
+ private[this] def predecessor[A, B](node: Node[A, B]): Node[A, B] = {
+ if (node.left ne null) maxNodeNonNull(node.left)
+ else {
+ var x = node
+ var y = x.parent
+ while ((y ne null) && (x eq y.left)) {
+ x = y
+ y = y.parent
+ }
+ y
+ }
+ }
+
+ private[this] def rotateLeft[A, B](tree: Tree[A, B], x: Node[A, B]): Unit = if (x ne null) {
+ // assert(x.right ne null)
+ val y = x.right
+ x.right = y.left
+
+ if (y.left ne null) y.left.parent = x
+ y.parent = x.parent
+
+ if (x.parent eq null) tree.root = y
+ else if (x eq x.parent.left) x.parent.left = y
+ else x.parent.right = y
+
+ y.left = x
+ x.parent = y
+ }
+
+ private[this] def rotateRight[A, B](tree: Tree[A, B], x: Node[A, B]): Unit = if (x ne null) {
+ // assert(x.left ne null)
+ val y = x.left
+ x.left = y.right
+
+ if (y.right ne null) y.right.parent = x
+ y.parent = x.parent
+
+ if (x.parent eq null) tree.root = y
+ else if (x eq x.parent.right) x.parent.right = y
+ else x.parent.left = y
+
+ y.right = x
+ x.parent = y
+ }
+
+ /**
+ * Transplant the node `from` to the place of node `to`. This is done by setting `from` as a child of `to`'s previous
+ * parent and setting `from`'s parent to the `to`'s previous parent. The children of `from` are left unchanged.
+ */
+ private[this] def transplant[A, B](tree: Tree[A, B], to: Node[A, B], from: Node[A, B]): Unit = {
+ if (to.parent eq null) tree.root = from
+ else if (to eq to.parent.left) to.parent.left = from
+ else to.parent.right = from
+
+ if (from ne null) from.parent = to.parent
+ }
+
+ // ---- tree traversal ----
+
+ def foreach[A, B, U](tree: Tree[A, B], f: ((A, B)) => U): Unit = foreachNode(tree.root, f)
+
+ private[this] def foreachNode[A, B, U](node: Node[A, B], f: ((A, B)) => U): Unit =
+ if (node ne null) foreachNodeNonNull(node, f)
+
+ private[this] def foreachNodeNonNull[A, B, U](node: Node[A, B], f: ((A, B)) => U): Unit = {
+ if (node.left ne null) foreachNodeNonNull(node.left, f)
+ f((node.key, node.value))
+ if (node.right ne null) foreachNodeNonNull(node.right, f)
+ }
+
+ def foreachKey[A, U](tree: Tree[A, _], f: A => U): Unit = foreachNodeKey(tree.root, f)
+
+ private[this] def foreachNodeKey[A, U](node: Node[A, _], f: A => U): Unit =
+ if (node ne null) foreachNodeKeyNonNull(node, f)
+
+ private[this] def foreachNodeKeyNonNull[A, U](node: Node[A, _], f: A => U): Unit = {
+ if (node.left ne null) foreachNodeKeyNonNull(node.left, f)
+ f(node.key)
+ if (node.right ne null) foreachNodeKeyNonNull(node.right, f)
+ }
+
+ def transform[A, B](tree: Tree[A, B], f: (A, B) => B): Unit = transformNode(tree.root, f)
+
+ private[this] def transformNode[A, B, U](node: Node[A, B], f: (A, B) => B): Unit =
+ if (node ne null) transformNodeNonNull(node, f)
+
+ private[this] def transformNodeNonNull[A, B, U](node: Node[A, B], f: (A, B) => B): Unit = {
+ if (node.left ne null) transformNodeNonNull(node.left, f)
+ node.value = f(node.key, node.value)
+ if (node.right ne null) transformNodeNonNull(node.right, f)
+ }
+
+ def iterator[A: Ordering, B](tree: Tree[A, B], start: Option[A] = None, end: Option[A] = None): Iterator[(A, B)] =
+ new EntriesIterator(tree, start, end)
+
+ def keysIterator[A: Ordering](tree: Tree[A, _], start: Option[A] = None, end: Option[A] = None): Iterator[A] =
+ new KeysIterator(tree, start, end)
+
+ def valuesIterator[A: Ordering, B](tree: Tree[A, B], start: Option[A] = None, end: Option[A] = None): Iterator[B] =
+ new ValuesIterator(tree, start, end)
+
+ private[this] abstract class TreeIterator[A, B, R](tree: Tree[A, B], start: Option[A], end: Option[A])
+ (implicit ord: Ordering[A]) extends Iterator[R] {
+
+ protected[this] def nextResult(node: Node[A, B]): R
+
+ def hasNext: Boolean = nextNode ne null
+
+ def next(): R = nextNode match {
+ case null => throw new NoSuchElementException("next on empty iterator")
+ case node =>
+ nextNode = successor(node)
+ setNullIfAfterEnd()
+ nextResult(node)
+ }
+
+ private[this] var nextNode: Node[A, B] = start match {
+ case None => minNode(tree.root)
+ case Some(from) => minNodeAfter(tree.root, from)
+ }
+
+ private[this] def setNullIfAfterEnd(): Unit =
+ if (end.isDefined && (nextNode ne null) && ord.compare(nextNode.key, end.get) >= 0)
+ nextNode = null
+
+ setNullIfAfterEnd()
+ }
+
+ private[this] final class EntriesIterator[A: Ordering, B](tree: Tree[A, B], start: Option[A], end: Option[A])
+ extends TreeIterator[A, B, (A, B)](tree, start, end) {
+
+ def nextResult(node: Node[A, B]) = (node.key, node.value)
+ }
+
+ private[this] final class KeysIterator[A: Ordering, B](tree: Tree[A, B], start: Option[A], end: Option[A])
+ extends TreeIterator[A, B, A](tree, start, end) {
+
+ def nextResult(node: Node[A, B]) = node.key
+ }
+
+ private[this] final class ValuesIterator[A: Ordering, B](tree: Tree[A, B], start: Option[A], end: Option[A])
+ extends TreeIterator[A, B, B](tree, start, end) {
+
+ def nextResult(node: Node[A, B]) = node.value
+ }
+
+ // ---- debugging ----
+
+ /**
+ * Checks if the tree is in a valid state. That happens if:
+ * - It is a valid binary search tree;
+ * - All red-black properties are satisfied;
+ * - All non-null nodes have their `parent` reference correct;
+ * - The size variable in `tree` corresponds to the actual size of the tree.
+ */
+ def isValid[A: Ordering, B](tree: Tree[A, B]): Boolean =
+ isValidBST(tree.root) && hasProperParentRefs(tree) && isValidRedBlackTree(tree) && size(tree.root) == tree.size
+
+ /**
+ * Returns true if all non-null nodes have their `parent` reference correct.
+ */
+ private[this] def hasProperParentRefs[A, B](tree: Tree[A, B]): Boolean = {
+
+ def hasProperParentRefs(node: Node[A, B]): Boolean = {
+ if (node eq null) true
+ else {
+ if ((node.left ne null) && (node.left.parent ne node) ||
+ (node.right ne null) && (node.right.parent ne node)) false
+ else hasProperParentRefs(node.left) && hasProperParentRefs(node.right)
+ }
+ }
+
+ if(tree.root eq null) true
+ else (tree.root.parent eq null) && hasProperParentRefs(tree.root)
+ }
+
+ /**
+ * Returns true if this node follows the properties of a binary search tree.
+ */
+ private[this] def isValidBST[A, B](node: Node[A, B])(implicit ord: Ordering[A]): Boolean = {
+ if (node eq null) true
+ else {
+ if ((node.left ne null) && (ord.compare(node.key, node.left.key) <= 0) ||
+ (node.right ne null) && (ord.compare(node.key, node.right.key) >= 0)) false
+ else isValidBST(node.left) && isValidBST(node.right)
+ }
+ }
+
+ /**
+ * Returns true if the tree has all the red-black tree properties: if the root node is black, if all children of red
+ * nodes are black and if the path from any node to any of its null children has the same number of black nodes.
+ */
+ private[this] def isValidRedBlackTree[A, B](tree: Tree[A, B]): Boolean = {
+
+ def noRedAfterRed(node: Node[A, B]): Boolean = {
+ if (node eq null) true
+ else if (node.red && (isRed(node.left) || isRed(node.right))) false
+ else noRedAfterRed(node.left) && noRedAfterRed(node.right)
+ }
+
+ def blackHeight(node: Node[A, B]): Int = {
+ if (node eq null) 1
+ else {
+ val lh = blackHeight(node.left)
+ val rh = blackHeight(node.right)
+
+ if (lh == -1 || lh != rh) -1
+ else if (isRed(node)) lh
+ else lh + 1
+ }
+ }
+
+ isBlack(tree.root) && noRedAfterRed(tree.root) && blackHeight(tree.root) >= 0
+ }
+}
diff --git a/src/library/scala/collection/mutable/ResizableArray.scala b/src/library/scala/collection/mutable/ResizableArray.scala
index c3047522e2..85a299216e 100644
--- a/src/library/scala/collection/mutable/ResizableArray.scala
+++ b/src/library/scala/collection/mutable/ResizableArray.scala
@@ -74,7 +74,7 @@ trait ResizableArray[A] extends IndexedSeq[A]
*/
override def copyToArray[B >: A](xs: Array[B], start: Int, len: Int) {
val len1 = len min (xs.length - start) min length
- Array.copy(array, 0, xs, start, len1)
+ if (len1 > 0) Array.copy(array, 0, xs, start, len1)
}
//##########################################################################
diff --git a/src/library/scala/collection/mutable/ReusableBuilder.scala b/src/library/scala/collection/mutable/ReusableBuilder.scala
new file mode 100644
index 0000000000..caab3071b6
--- /dev/null
+++ b/src/library/scala/collection/mutable/ReusableBuilder.scala
@@ -0,0 +1,51 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2016, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+
+package scala
+package collection
+package mutable
+
+import generic._
+
+/** `ReusableBuilder` is a marker trait that indicates that a `Builder`
+ * can be reused to build more than one instance of a collection. In
+ * particular, calling `result` followed by `clear` will produce a
+ * collection and reset the builder to begin building a new collection
+ * of the same type.
+ *
+ * It is up to subclasses to implement this behavior, and to document any
+ * other behavior that varies from standard `ReusableBuilder` usage
+ * (e.g. operations being well-defined after a call to `result`, or allowing
+ * multiple calls to result to obtain different snapshots of a collection under
+ * construction).
+ *
+ * @tparam Elem the type of elements that get added to the builder.
+ * @tparam To the type of collection that it produced.
+ *
+ * @since 2.12
+ */
+trait ReusableBuilder[-Elem, +To] extends Builder[Elem, To] {
+ /** Clears the contents of this builder.
+ * After execution of this method, the builder will contain no elements.
+ *
+ * If executed immediately after a call to `result`, this allows a new
+ * instance of the same type of collection to be built.
+ */
+ override def clear(): Unit // Note: overriding for scaladoc only!
+
+ /** Produces a collection from the added elements.
+ *
+ * After a call to `result`, the behavior of all other methods is undefined
+ * save for `clear`. If `clear` is called, then the builder is reset and
+ * may be used to build another instance.
+ *
+ * @return a collection containing the elements added to this builder.
+ */
+ override def result(): To // Note: overriding for scaladoc only!
+}
diff --git a/src/library/scala/collection/mutable/SetBuilder.scala b/src/library/scala/collection/mutable/SetBuilder.scala
index 01bfdc96ed..5d1e9ffc3a 100644
--- a/src/library/scala/collection/mutable/SetBuilder.scala
+++ b/src/library/scala/collection/mutable/SetBuilder.scala
@@ -17,7 +17,9 @@ package mutable
* @param empty The empty element of the collection.
* @since 2.8
*/
-class SetBuilder[A, Coll <: scala.collection.Set[A] with scala.collection.SetLike[A, Coll]](empty: Coll) extends Builder[A, Coll] {
+class SetBuilder[A, Coll <: scala.collection.Set[A]
+with scala.collection.SetLike[A, Coll]](empty: Coll)
+extends ReusableBuilder[A, Coll] {
protected var elems: Coll = empty
def +=(x: A): this.type = { elems = elems + x; this }
def clear() { elems = empty }
diff --git a/src/library/scala/collection/mutable/SetLike.scala b/src/library/scala/collection/mutable/SetLike.scala
index 01075a2633..a19130e742 100644
--- a/src/library/scala/collection/mutable/SetLike.scala
+++ b/src/library/scala/collection/mutable/SetLike.scala
@@ -72,6 +72,17 @@ trait SetLike[A, +This <: SetLike[A, This] with Set[A]]
protected[this] override def parCombiner = ParSet.newCombiner[A]
+ /** Converts this $coll to a sequence.
+ *
+ * ```Note```: assumes a fast `size` method. Subclasses should override if this is not true.
+ */
+ override def toSeq: collection.Seq[A] = {
+ // ArrayBuffer for efficiency, preallocated to the right size.
+ val result = new ArrayBuffer[A](size)
+ foreach(result += _)
+ result
+ }
+
/** Adds an element to this $coll.
*
* @param elem the element to be added
diff --git a/src/library/scala/collection/mutable/SortedMap.scala b/src/library/scala/collection/mutable/SortedMap.scala
new file mode 100644
index 0000000000..806b30e79a
--- /dev/null
+++ b/src/library/scala/collection/mutable/SortedMap.scala
@@ -0,0 +1,57 @@
+package scala
+package collection
+package mutable
+
+import generic._
+
+/**
+ * A mutable map whose keys are sorted.
+ *
+ * @tparam A the type of the keys contained in this sorted map.
+ * @tparam B the type of the values associated with the keys.
+ *
+ * @author Rui Gonçalves
+ * @version 2.12
+ * @since 2.12
+ *
+ * @define Coll mutable.SortedMap
+ * @define coll mutable sorted map
+ */
+trait SortedMap[A, B]
+ extends Map[A, B]
+ with collection.SortedMap[A, B]
+ with MapLike[A, B, SortedMap[A, B]]
+ with SortedMapLike[A, B, SortedMap[A, B]] {
+
+ override protected[this] def newBuilder: Builder[(A, B), SortedMap[A, B]] = SortedMap.newBuilder[A, B]
+
+ override def empty: SortedMap[A, B] = SortedMap.empty
+
+ override def updated[B1 >: B](key: A, value: B1): SortedMap[A, B1] = this + ((key, value))
+
+ override def +[B1 >: B](kv: (A, B1)): SortedMap[A, B1] = clone().asInstanceOf[SortedMap[A, B1]] += kv
+
+ override def +[B1 >: B](elem1: (A, B1), elem2: (A, B1), elems: (A, B1)*): SortedMap[A, B1] =
+ clone().asInstanceOf[SortedMap[A, B1]] += elem1 += elem2 ++= elems
+
+ override def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]): SortedMap[A, B1] =
+ clone().asInstanceOf[SortedMap[A, B1]] ++= xs.seq
+}
+
+/**
+ * $factoryInfo
+ *
+ * @define Coll mutable.SortedMap
+ * @define coll mutable sorted map
+ */
+object SortedMap extends MutableSortedMapFactory[SortedMap] {
+
+ def empty[A, B](implicit ord: Ordering[A]): SortedMap[A, B] = TreeMap.empty[A, B]
+
+ /** $sortedMapCanBuildFromInfo */
+ implicit def canBuildFrom[A, B](implicit ord: Ordering[A]): CanBuildFrom[Coll, (A, B), SortedMap[A, B]] =
+ new SortedMapCanBuildFrom[A, B]
+}
+
+/** Explicit instantiation of the `SortedMap` trait to reduce class file size in subclasses. */
+abstract class AbstractSortedMap[A, B] extends scala.collection.mutable.AbstractMap[A, B] with SortedMap[A, B]
diff --git a/src/library/scala/collection/mutable/SortedSet.scala b/src/library/scala/collection/mutable/SortedSet.scala
index 0f2fa75abd..304469916d 100644
--- a/src/library/scala/collection/mutable/SortedSet.scala
+++ b/src/library/scala/collection/mutable/SortedSet.scala
@@ -43,8 +43,13 @@ trait SortedSet[A] extends scala.collection.SortedSet[A] with scala.collection.S
*
*/
object SortedSet extends MutableSortedSetFactory[SortedSet] {
- implicit def canBuildFrom[A](implicit ord: Ordering[A]): CanBuildFrom[Coll, A, SortedSet[A]] = new SortedSetCanBuildFrom[A]
+ def canBuildFrom[A](implicit ord: Ordering[A]): CanBuildFrom[Coll, A, SortedSet[A]] = new SortedSetCanBuildFrom[A]
def empty[A](implicit ord: Ordering[A]): SortedSet[A] = TreeSet.empty[A]
+ // Force a declaration here so that BitSet (which does not inherit from SortedSetFactory) can be more specific
+ override implicit def newCanBuildFrom[A](implicit ord : Ordering[A]): CanBuildFrom[Coll, A, SortedSet[A]] = super.newCanBuildFrom
}
+
+/** Explicit instantiation of the `SortedSet` trait to reduce class file size in subclasses. */
+abstract class AbstractSortedSet[A] extends scala.collection.mutable.AbstractSet[A] with SortedSet[A]
diff --git a/src/library/scala/collection/mutable/StringBuilder.scala b/src/library/scala/collection/mutable/StringBuilder.scala
index c56d40786e..b5b9498374 100644
--- a/src/library/scala/collection/mutable/StringBuilder.scala
+++ b/src/library/scala/collection/mutable/StringBuilder.scala
@@ -33,7 +33,7 @@ final class StringBuilder(private val underlying: JavaStringBuilder)
with java.lang.CharSequence
with IndexedSeq[Char]
with StringLike[StringBuilder]
- with Builder[Char, String]
+ with ReusableBuilder[Char, String]
with Serializable {
override protected[this] def thisCollection: StringBuilder = this
@@ -435,7 +435,11 @@ final class StringBuilder(private val underlying: JavaStringBuilder)
*/
override def mkString = toString
- /** Returns the result of this Builder (a String)
+ /** Returns the result of this Builder (a String).
+ *
+ * If this method is called multiple times, each call will result in a snapshot of the buffer at that point in time.
+ * In particular, a `StringBuilder` can be used to build multiple independent strings by emptying the buffer with `clear`
+ * after each call to `result`.
*
* @return the string assembled by this StringBuilder
*/
diff --git a/src/library/scala/collection/mutable/SynchronizedStack.scala b/src/library/scala/collection/mutable/SynchronizedStack.scala
index bbb6f5a9bb..c77a6fad62 100644
--- a/src/library/scala/collection/mutable/SynchronizedStack.scala
+++ b/src/library/scala/collection/mutable/SynchronizedStack.scala
@@ -27,7 +27,6 @@ package mutable
*/
@deprecated("Synchronization via selective overriding of methods is inherently unreliable. Consider java.util.concurrent.LinkedBlockingDequeue instead.", "2.11.0")
class SynchronizedStack[A] extends Stack[A] {
- import scala.collection.Traversable
/** Checks if the stack is empty.
*
diff --git a/src/library/scala/collection/mutable/TreeMap.scala b/src/library/scala/collection/mutable/TreeMap.scala
new file mode 100644
index 0000000000..dc7d5d750e
--- /dev/null
+++ b/src/library/scala/collection/mutable/TreeMap.scala
@@ -0,0 +1,185 @@
+package scala
+package collection
+package mutable
+
+import scala.collection.generic._
+import scala.collection.mutable.{RedBlackTree => RB}
+
+/**
+ * $factoryInfo
+ *
+ * @define Coll mutable.TreeMap
+ * @define coll mutable tree map
+ */
+object TreeMap extends MutableSortedMapFactory[TreeMap] {
+
+ def empty[A, B](implicit ord: Ordering[A]) = new TreeMap[A, B]()(ord)
+
+ /** $sortedMapCanBuildFromInfo */
+ implicit def canBuildFrom[A, B](implicit ord: Ordering[A]): CanBuildFrom[Coll, (A, B), TreeMap[A, B]] =
+ new SortedMapCanBuildFrom[A, B]
+}
+
+/**
+ * A mutable sorted map implemented using a mutable red-black tree as underlying data structure.
+ *
+ * @param ordering the implicit ordering used to compare objects of type `A`.
+ * @tparam A the type of the keys contained in this tree map.
+ * @tparam B the type of the values associated with the keys.
+ *
+ * @author Rui Gonçalves
+ * @version 2.12
+ * @since 2.12
+ *
+ * @define Coll mutable.TreeMap
+ * @define coll mutable tree map
+ */
+@SerialVersionUID(-2558985573956740112L)
+sealed class TreeMap[A, B] private (tree: RB.Tree[A, B])(implicit val ordering: Ordering[A])
+ extends AbstractSortedMap[A, B]
+ with SortedMap[A, B]
+ with MapLike[A, B, TreeMap[A, B]]
+ with SortedMapLike[A, B, TreeMap[A, B]]
+ with Serializable {
+
+ /**
+ * Creates an empty `TreeMap`.
+ * @param ord the implicit ordering used to compare objects of type `A`.
+ * @return an empty `TreeMap`.
+ */
+ def this()(implicit ord: Ordering[A]) = this(RB.Tree.empty)(ord)
+
+ override def empty = TreeMap.empty
+ override protected[this] def newBuilder = TreeMap.newBuilder[A, B]
+
+ /**
+ * Creates a ranged projection of this map. Any mutations in the ranged projection will update the original map and
+ * vice versa.
+ *
+ * Only entries with keys between this projection's key range will ever appear as elements of this map, independently
+ * of whether the entries are added through the original map or through this view. That means that if one inserts a
+ * key-value in a view whose key is outside the view's bounds, calls to `get` or `contains` will _not_ consider the
+ * newly added entry. Mutations are always reflected in the original map, though.
+ *
+ * @param from the lower bound (inclusive) of this projection wrapped in a `Some`, or `None` if there is no lower
+ * bound.
+ * @param until the upper bound (exclusive) of this projection wrapped in a `Some`, or `None` if there is no upper
+ * bound.
+ */
+ def rangeImpl(from: Option[A], until: Option[A]): TreeMap[A, B] = new TreeMapView(from, until)
+
+ def -=(key: A): this.type = { RB.delete(tree, key); this }
+ def +=(kv: (A, B)): this.type = { RB.insert(tree, kv._1, kv._2); this }
+
+ def get(key: A) = RB.get(tree, key)
+
+ def iterator = RB.iterator(tree)
+ def iteratorFrom(start: A) = RB.iterator(tree, Some(start))
+ def keysIteratorFrom(start: A) = RB.keysIterator(tree, Some(start))
+ def valuesIteratorFrom(start: A) = RB.valuesIterator(tree, Some(start))
+
+ override def size = RB.size(tree)
+ override def isEmpty = RB.isEmpty(tree)
+ override def contains(key: A) = RB.contains(tree, key)
+
+ override def head = RB.min(tree).get
+ override def headOption = RB.min(tree)
+ override def last = RB.max(tree).get
+ override def lastOption = RB.max(tree)
+
+ override def keysIterator = RB.keysIterator(tree)
+ override def valuesIterator = RB.valuesIterator(tree)
+
+ override def foreach[U](f: ((A, B)) => U): Unit = RB.foreach(tree, f)
+ override def transform(f: (A, B) => B) = { RB.transform(tree, f); this }
+ override def clear(): Unit = RB.clear(tree)
+
+ override def stringPrefix = "TreeMap"
+
+ /**
+ * A ranged projection of a [[TreeMap]]. Mutations on this map affect the original map and vice versa.
+ *
+ * Only entries with keys between this projection's key range will ever appear as elements of this map, independently
+ * of whether the entries are added through the original map or through this view. That means that if one inserts a
+ * key-value in a view whose key is outside the view's bounds, calls to `get` or `contains` will _not_ consider the
+ * newly added entry. Mutations are always reflected in the original map, though.
+ *
+ * @param from the lower bound (inclusive) of this projection wrapped in a `Some`, or `None` if there is no lower
+ * bound.
+ * @param until the upper bound (exclusive) of this projection wrapped in a `Some`, or `None` if there is no upper
+ * bound.
+ */
+ @SerialVersionUID(2219159283273389116L)
+ private[this] final class TreeMapView(from: Option[A], until: Option[A]) extends TreeMap[A, B](tree) {
+
+ /**
+ * Given a possible new lower bound, chooses and returns the most constraining one (the maximum).
+ */
+ private[this] def pickLowerBound(newFrom: Option[A]): Option[A] = (from, newFrom) match {
+ case (Some(fr), Some(newFr)) => Some(ordering.max(fr, newFr))
+ case (None, _) => newFrom
+ case _ => from
+ }
+
+ /**
+ * Given a possible new upper bound, chooses and returns the most constraining one (the minimum).
+ */
+ private[this] def pickUpperBound(newUntil: Option[A]): Option[A] = (until, newUntil) match {
+ case (Some(unt), Some(newUnt)) => Some(ordering.min(unt, newUnt))
+ case (None, _) => newUntil
+ case _ => until
+ }
+
+ /**
+ * Returns true if the argument is inside the view bounds (between `from` and `until`).
+ */
+ private[this] def isInsideViewBounds(key: A): Boolean = {
+ val afterFrom = from.isEmpty || ordering.compare(from.get, key) <= 0
+ val beforeUntil = until.isEmpty || ordering.compare(key, until.get) < 0
+ afterFrom && beforeUntil
+ }
+
+ override def rangeImpl(from: Option[A], until: Option[A]): TreeMap[A, B] =
+ new TreeMapView(pickLowerBound(from), pickUpperBound(until))
+
+ override def get(key: A) = if (isInsideViewBounds(key)) RB.get(tree, key) else None
+
+ override def iterator = RB.iterator(tree, from, until)
+ override def iteratorFrom(start: A) = RB.iterator(tree, pickLowerBound(Some(start)), until)
+ override def keysIteratorFrom(start: A) = RB.keysIterator(tree, pickLowerBound(Some(start)), until)
+ override def valuesIteratorFrom(start: A) = RB.valuesIterator(tree, pickLowerBound(Some(start)), until)
+
+ override def size = iterator.length
+ override def isEmpty = !iterator.hasNext
+ override def contains(key: A) = isInsideViewBounds(key) && RB.contains(tree, key)
+
+ override def head = headOption.get
+ override def headOption = {
+ val entry = if (from.isDefined) RB.minAfter(tree, from.get) else RB.min(tree)
+ (entry, until) match {
+ case (Some(e), Some(unt)) if ordering.compare(e._1, unt) >= 0 => None
+ case _ => entry
+ }
+ }
+
+ override def last = lastOption.get
+ override def lastOption = {
+ val entry = if (until.isDefined) RB.maxBefore(tree, until.get) else RB.max(tree)
+ (entry, from) match {
+ case (Some(e), Some(fr)) if ordering.compare(e._1, fr) < 0 => None
+ case _ => entry
+ }
+ }
+
+ // Using the iterator should be efficient enough; if performance is deemed a problem later, specialized
+ // `foreach(f, from, until)` and `transform(f, from, until)` methods can be created in `RedBlackTree`. See
+ // https://github.com/scala/scala/pull/4608#discussion_r34307985 for a discussion about this.
+ override def foreach[U](f: ((A, B)) => U): Unit = iterator.foreach(f)
+ override def transform(f: (A, B) => B) = {
+ iterator.foreach { case (key, value) => update(key, f(key, value)) }
+ this
+ }
+
+ override def clone() = super.clone().rangeImpl(from, until)
+ }
+}
diff --git a/src/library/scala/collection/mutable/TreeSet.scala b/src/library/scala/collection/mutable/TreeSet.scala
index f849eea569..ada6f145ad 100644
--- a/src/library/scala/collection/mutable/TreeSet.scala
+++ b/src/library/scala/collection/mutable/TreeSet.scala
@@ -11,8 +11,7 @@ package collection
package mutable
import generic._
-import scala.collection.immutable.{RedBlackTree => RB}
-import scala.runtime.ObjectRef
+import scala.collection.mutable.{RedBlackTree => RB}
/**
* @define Coll `mutable.TreeSet`
@@ -29,88 +28,162 @@ object TreeSet extends MutableSortedSetFactory[TreeSet] {
*/
def empty[A](implicit ordering: Ordering[A]) = new TreeSet[A]()
+ /** $sortedMapCanBuildFromInfo */
+ implicit def canBuildFrom[A](implicit ord: Ordering[A]): CanBuildFrom[Coll, A, TreeSet[A]] =
+ new SortedSetCanBuildFrom[A]
}
/**
- * A mutable SortedSet using an immutable RedBlack Tree as underlying data structure.
+ * A mutable sorted set implemented using a mutable red-black tree as underlying data structure.
*
- * @author Lucien Pereira
+ * @param ordering the implicit ordering used to compare objects of type `A`.
+ * @tparam A the type of the keys contained in this tree set.
+ *
+ * @author Rui Gonçalves
+ * @version 2.12
+ * @since 2.10
*
+ * @define Coll mutable.TreeSet
+ * @define coll mutable tree set
*/
-@deprecatedInheritance("TreeSet is not designed to enable meaningful subclassing.", "2.11.0")
-class TreeSet[A] private (treeRef: ObjectRef[RB.Tree[A, Null]], from: Option[A], until: Option[A])(implicit val ordering: Ordering[A])
- extends SortedSet[A] with SetLike[A, TreeSet[A]]
- with SortedSetLike[A, TreeSet[A]] with Set[A] with Serializable {
+// Original API designed in part by Lucien Pereira
+@SerialVersionUID(-3642111301929493640L)
+sealed class TreeSet[A] private (tree: RB.Tree[A, Null])(implicit val ordering: Ordering[A])
+ extends AbstractSortedSet[A]
+ with SortedSet[A]
+ with SetLike[A, TreeSet[A]]
+ with SortedSetLike[A, TreeSet[A]]
+ with Serializable {
if (ordering eq null)
throw new NullPointerException("ordering must not be null")
- def this()(implicit ordering: Ordering[A]) = this(new ObjectRef(null), None, None)
+ /**
+ * Creates an empty `TreeSet`.
+ * @param ord the implicit ordering used to compare objects of type `A`.
+ * @return an empty `TreeSet`.
+ */
+ def this()(implicit ord: Ordering[A]) = this(RB.Tree.empty)(ord)
- override def size: Int = RB.countInRange(treeRef.elem, from, until)
+ override def empty = TreeSet.empty
+ override protected[this] def newBuilder = TreeSet.newBuilder[A]
- override def stringPrefix = "TreeSet"
+ /**
+ * Creates a ranged projection of this set. Any mutations in the ranged projection affect will update the original set
+ * and vice versa.
+ *
+ * Only keys between this projection's key range will ever appear as elements of this set, independently of whether
+ * the elements are added through the original set or through this view. That means that if one inserts an element in
+ * a view whose key is outside the view's bounds, calls to `contains` will _not_ consider the newly added element.
+ * Mutations are always reflected in the original set, though.
+ *
+ * @param from the lower bound (inclusive) of this projection wrapped in a `Some`, or `None` if there is no lower
+ * bound.
+ * @param until the upper bound (exclusive) of this projection wrapped in a `Some`, or `None` if there is no upper
+ * bound.
+ */
+ def rangeImpl(from: Option[A], until: Option[A]): TreeSet[A] = new TreeSetView(from, until)
- override def empty: TreeSet[A] = TreeSet.empty
+ def -=(key: A): this.type = { RB.delete(tree, key); this }
+ def +=(elem: A): this.type = { RB.insert(tree, elem, null); this }
- private def pickBound(comparison: (A, A) => A, oldBound: Option[A], newBound: Option[A]) = (newBound, oldBound) match {
- case (Some(newB), Some(oldB)) => Some(comparison(newB, oldB))
- case (None, _) => oldBound
- case _ => newBound
- }
+ def contains(elem: A) = RB.contains(tree, elem)
- override def rangeImpl(fromArg: Option[A], untilArg: Option[A]): TreeSet[A] = {
- val newFrom = pickBound(ordering.max, fromArg, from)
- val newUntil = pickBound(ordering.min, untilArg, until)
+ def iterator = RB.keysIterator(tree)
+ def keysIteratorFrom(start: A) = RB.keysIterator(tree, Some(start))
+ override def iteratorFrom(start: A) = RB.keysIterator(tree, Some(start))
- new TreeSet(treeRef, newFrom, newUntil)
- }
+ override def size = RB.size(tree)
+ override def isEmpty = RB.isEmpty(tree)
- override def -=(elem: A): this.type = {
- treeRef.elem = RB.delete(treeRef.elem, elem)
- this
- }
+ override def head = RB.minKey(tree).get
+ override def headOption = RB.minKey(tree)
+ override def last = RB.maxKey(tree).get
+ override def lastOption = RB.maxKey(tree)
- override def +=(elem: A): this.type = {
- treeRef.elem = RB.update(treeRef.elem, elem, null, overwrite = false)
- this
- }
+ override def foreach[U](f: A => U): Unit = RB.foreachKey(tree, f)
+ override def clear(): Unit = RB.clear(tree)
+
+ override def stringPrefix = "TreeSet"
/**
- * Thanks to the immutable nature of the
- * underlying Tree, we can share it with
- * the clone. So clone complexity in time is O(1).
+ * A ranged projection of a [[TreeSet]]. Mutations on this set affect the original set and vice versa.
*
+ * Only keys between this projection's key range will ever appear as elements of this set, independently of whether
+ * the elements are added through the original set or through this view. That means that if one inserts an element in
+ * a view whose key is outside the view's bounds, calls to `contains` will _not_ consider the newly added element.
+ * Mutations are always reflected in the original set, though.
+ *
+ * @param from the lower bound (inclusive) of this projection wrapped in a `Some`, or `None` if there is no lower
+ * bound.
+ * @param until the upper bound (exclusive) of this projection wrapped in a `Some`, or `None` if there is no upper
+ * bound.
*/
- override def clone(): TreeSet[A] =
- new TreeSet[A](new ObjectRef(treeRef.elem), from, until)
-
- private val notProjection = !(from.isDefined || until.isDefined)
+ @SerialVersionUID(7087824939194006086L)
+ private[this] final class TreeSetView(from: Option[A], until: Option[A]) extends TreeSet[A](tree) {
+
+ /**
+ * Given a possible new lower bound, chooses and returns the most constraining one (the maximum).
+ */
+ private[this] def pickLowerBound(newFrom: Option[A]): Option[A] = (from, newFrom) match {
+ case (Some(fr), Some(newFr)) => Some(ordering.max(fr, newFr))
+ case (None, _) => newFrom
+ case _ => from
+ }
- override def contains(elem: A): Boolean = {
- def leftAcceptable: Boolean = from match {
- case Some(lb) => ordering.gteq(elem, lb)
- case _ => true
+ /**
+ * Given a possible new upper bound, chooses and returns the most constraining one (the minimum).
+ */
+ private[this] def pickUpperBound(newUntil: Option[A]): Option[A] = (until, newUntil) match {
+ case (Some(unt), Some(newUnt)) => Some(ordering.min(unt, newUnt))
+ case (None, _) => newUntil
+ case _ => until
}
- def rightAcceptable: Boolean = until match {
- case Some(ub) => ordering.lt(elem, ub)
- case _ => true
+ /**
+ * Returns true if the argument is inside the view bounds (between `from` and `until`).
+ */
+ private[this] def isInsideViewBounds(key: A): Boolean = {
+ val afterFrom = from.isEmpty || ordering.compare(from.get, key) <= 0
+ val beforeUntil = until.isEmpty || ordering.compare(key, until.get) < 0
+ afterFrom && beforeUntil
}
- (notProjection || (leftAcceptable && rightAcceptable)) &&
- RB.contains(treeRef.elem, elem)
- }
+ override def rangeImpl(from: Option[A], until: Option[A]): TreeSet[A] =
+ new TreeSetView(pickLowerBound(from), pickUpperBound(until))
+
+ override def contains(key: A) = isInsideViewBounds(key) && RB.contains(tree, key)
+
+ override def iterator = RB.keysIterator(tree, from, until)
+ override def keysIteratorFrom(start: A) = RB.keysIterator(tree, pickLowerBound(Some(start)), until)
+ override def iteratorFrom(start: A) = RB.keysIterator(tree, pickLowerBound(Some(start)), until)
- override def iterator: Iterator[A] = iteratorFrom(None)
+ override def size = iterator.length
+ override def isEmpty = !iterator.hasNext
- override def keysIteratorFrom(start: A) = iteratorFrom(Some(start))
+ override def head = headOption.get
+ override def headOption = {
+ val elem = if (from.isDefined) RB.minKeyAfter(tree, from.get) else RB.minKey(tree)
+ (elem, until) match {
+ case (Some(e), Some(unt)) if ordering.compare(e, unt) >= 0 => None
+ case _ => elem
+ }
+ }
- private def iteratorFrom(start: Option[A]) = {
- val it = RB.keysIterator(treeRef.elem, pickBound(ordering.max, from, start))
- until match {
- case None => it
- case Some(ub) => it takeWhile (k => ordering.lt(k, ub))
+ override def last = lastOption.get
+ override def lastOption = {
+ val elem = if (until.isDefined) RB.maxKeyBefore(tree, until.get) else RB.maxKey(tree)
+ (elem, from) match {
+ case (Some(e), Some(fr)) if ordering.compare(e, fr) < 0 => None
+ case _ => elem
+ }
}
+
+ // Using the iterator should be efficient enough; if performance is deemed a problem later, a specialized
+ // `foreachKey(f, from, until)` method can be created in `RedBlackTree`. See
+ // https://github.com/scala/scala/pull/4608#discussion_r34307985 for a discussion about this.
+ override def foreach[U](f: A => U): Unit = iterator.foreach(f)
+
+ override def clone() = super.clone().rangeImpl(from, until)
}
}
diff --git a/src/library/scala/collection/mutable/WrappedArrayBuilder.scala b/src/library/scala/collection/mutable/WrappedArrayBuilder.scala
index bfe95a11ab..c4781321d7 100644
--- a/src/library/scala/collection/mutable/WrappedArrayBuilder.scala
+++ b/src/library/scala/collection/mutable/WrappedArrayBuilder.scala
@@ -17,12 +17,14 @@ import scala.runtime.ScalaRunTime._
/** A builder class for arrays.
*
+ * This builder can be reused.
+ *
* @tparam A type of elements that can be added to this builder.
* @param tag class tag for objects of type `A`.
*
* @since 2.8
*/
-class WrappedArrayBuilder[A](tag: ClassTag[A]) extends Builder[A, WrappedArray[A]] {
+class WrappedArrayBuilder[A](tag: ClassTag[A]) extends ReusableBuilder[A, WrappedArray[A]] {
@deprecated("use tag instead", "2.10.0")
val manifest: ClassTag[A] = tag
@@ -73,12 +75,13 @@ class WrappedArrayBuilder[A](tag: ClassTag[A]) extends Builder[A, WrappedArray[A
this
}
- def clear() {
- size = 0
- }
+ def clear() { size = 0 }
def result() = {
- if (capacity != 0 && capacity == size) elems
+ if (capacity != 0 && capacity == size) {
+ capacity = 0
+ elems
+ }
else mkArray(size)
}
diff --git a/src/library/scala/collection/parallel/ParIterableLike.scala b/src/library/scala/collection/parallel/ParIterableLike.scala
index 8c9b959569..2ed7bc075e 100644
--- a/src/library/scala/collection/parallel/ParIterableLike.scala
+++ b/src/library/scala/collection/parallel/ParIterableLike.scala
@@ -9,6 +9,8 @@
package scala
package collection.parallel
+import scala.language.{ higherKinds, implicitConversions }
+
import scala.collection.mutable.Builder
import scala.collection.mutable.ArrayBuffer
import scala.collection.IterableLike
@@ -21,13 +23,9 @@ import scala.collection.GenIterable
import scala.collection.GenTraversableOnce
import scala.collection.GenTraversable
import immutable.HashMapCombiner
-import scala.reflect.{ClassTag, classTag}
-
-import java.util.concurrent.atomic.AtomicBoolean
+import scala.reflect.ClassTag
import scala.annotation.unchecked.uncheckedVariance
-import scala.annotation.unchecked.uncheckedStable
-import scala.language.{ higherKinds, implicitConversions }
import scala.collection.parallel.ParallelCollectionImplicits._
@@ -195,7 +193,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
* import scala.collection.parallel._
* val pc = mutable.ParArray(1, 2, 3)
* pc.tasksupport = new ForkJoinTaskSupport(
- * new scala.concurrent.forkjoin.ForkJoinPool(2))
+ * new java.util.concurrent.ForkJoinPool(2))
* }}}
*
* @see [[scala.collection.parallel.TaskSupport]]
@@ -1284,7 +1282,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
extends Transformer[Combiner[(U, S), That], Zip[U, S, That]] {
@volatile var result: Result = null
def leaf(prev: Option[Result]) = result = pit.zip2combiner[U, S, That](othpit, pbf())
- protected[this] def newSubtask(p: IterableSplitter[T]) = unsupported
+ protected[this] def newSubtask(p: IterableSplitter[T]) = throw new UnsupportedOperationException
override def split = {
val pits = pit.splitWithSignalling
val sizes = pits.map(_.remaining)
@@ -1300,7 +1298,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
extends Transformer[Combiner[(U, S), That], ZipAll[U, S, That]] {
@volatile var result: Result = null
def leaf(prev: Option[Result]) = result = pit.zipAll2combiner[U, S, That](othpit, thiselem, thatelem, pbf())
- protected[this] def newSubtask(p: IterableSplitter[T]) = unsupported
+ protected[this] def newSubtask(p: IterableSplitter[T]) = throw new UnsupportedOperationException
override def split = if (pit.remaining <= len) {
val pits = pit.splitWithSignalling
val sizes = pits.map(_.remaining)
@@ -1322,7 +1320,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
extends Accessor[Unit, CopyToArray[U, This]] {
@volatile var result: Unit = ()
def leaf(prev: Option[Unit]) = pit.copyToArray(array, from, len)
- protected[this] def newSubtask(p: IterableSplitter[T]) = unsupported
+ protected[this] def newSubtask(p: IterableSplitter[T]) = throw new UnsupportedOperationException
override def split = {
val pits = pit.splitWithSignalling
for ((p, untilp) <- pits zip pits.scanLeft(0)(_ + _.remaining); if untilp < len) yield {
@@ -1379,7 +1377,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
val half = howmany / 2
ScanNode(mergeTrees(trees, from, half), mergeTrees(trees, from + half, howmany - half))
} else trees(from)
- protected[this] def newSubtask(pit: IterableSplitter[T]) = unsupported
+ protected[this] def newSubtask(pit: IterableSplitter[T]) = throw new UnsupportedOperationException
override def split = {
val pits = pit.splitWithSignalling
for ((p, untilp) <- pits zip pits.scanLeft(from)(_ + _.remaining)) yield {
@@ -1416,7 +1414,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
new FromScanTree(left, z, op, cbf),
new FromScanTree(right, z, op, cbf)
)
- case _ => unsupportedop("Cannot be split further")
+ case _ => throw new UnsupportedOperationException("Cannot be split further")
}
def shouldSplitFurther = tree match {
case ScanNode(_, _) => true
diff --git a/src/library/scala/collection/parallel/ParMap.scala b/src/library/scala/collection/parallel/ParMap.scala
index 9f92e6c1e8..70afe5174b 100644
--- a/src/library/scala/collection/parallel/ParMap.scala
+++ b/src/library/scala/collection/parallel/ParMap.scala
@@ -11,7 +11,6 @@ package collection.parallel
import scala.collection.Map
import scala.collection.GenMap
-import scala.collection.mutable.Builder
import scala.collection.generic.ParMapFactory
import scala.collection.generic.GenericParMapTemplate
import scala.collection.generic.GenericParMapCompanion
diff --git a/src/library/scala/collection/parallel/ParMapLike.scala b/src/library/scala/collection/parallel/ParMapLike.scala
index 0a671fb085..a3ac388587 100644
--- a/src/library/scala/collection/parallel/ParMapLike.scala
+++ b/src/library/scala/collection/parallel/ParMapLike.scala
@@ -12,10 +12,8 @@ package collection.parallel
import scala.collection.MapLike
import scala.collection.GenMapLike
import scala.collection.Map
-import scala.collection.mutable.Builder
+
import scala.annotation.unchecked.uncheckedVariance
-import scala.collection.generic.IdleSignalling
-import scala.collection.generic.Signalling
/** A template trait for mutable parallel maps. This trait is to be mixed in
* with concrete parallel maps to override the representation type.
diff --git a/src/library/scala/collection/parallel/ParSeqLike.scala b/src/library/scala/collection/parallel/ParSeqLike.scala
index 0b6fec364e..60fa1858e7 100644
--- a/src/library/scala/collection/parallel/ParSeqLike.scala
+++ b/src/library/scala/collection/parallel/ParSeqLike.scala
@@ -9,11 +9,10 @@
package scala
package collection.parallel
-import scala.collection.{ Parallel, SeqLike, GenSeqLike, GenSeq, GenIterable, Iterator }
+import scala.collection.{ SeqLike, GenSeq, GenIterable, Iterator }
import scala.collection.generic.DefaultSignalling
import scala.collection.generic.AtomicIndexFlag
import scala.collection.generic.CanBuildFrom
-import scala.collection.generic.CanCombineFrom
import scala.collection.generic.VolatileAbort
import scala.collection.parallel.ParallelCollectionImplicits._
@@ -365,7 +364,7 @@ self =>
pit.setIndexFlagIfLesser(from)
}
}
- protected[this] def newSubtask(p: SuperParIterator) = unsupported
+ protected[this] def newSubtask(p: SuperParIterator) = throw new UnsupportedOperationException
override def split = {
val pits = pit.splitWithSignalling
for ((p, untilp) <- pits zip pits.scanLeft(from)(_ + _.remaining)) yield new IndexWhere(pred, untilp, p)
@@ -386,7 +385,7 @@ self =>
pit.setIndexFlagIfGreater(pos)
}
}
- protected[this] def newSubtask(p: SuperParIterator) = unsupported
+ protected[this] def newSubtask(p: SuperParIterator) = throw new UnsupportedOperationException
override def split = {
val pits = pit.splitWithSignalling
for ((p, untilp) <- pits zip pits.scanLeft(pos)(_ + _.remaining)) yield new LastIndexWhere(pred, untilp, p)
@@ -420,7 +419,7 @@ self =>
result = pit.sameElements(otherpit)
if (!result) pit.abort()
}
- protected[this] def newSubtask(p: SuperParIterator) = unsupported
+ protected[this] def newSubtask(p: SuperParIterator) = throw new UnsupportedOperationException
override def split = {
val fp = pit.remaining / 2
val sp = pit.remaining - fp
@@ -434,7 +433,7 @@ self =>
extends Transformer[Combiner[U, That], Updated[U, That]] {
@volatile var result: Combiner[U, That] = null
def leaf(prev: Option[Combiner[U, That]]) = result = pit.updated2combiner(pos, elem, pbf())
- protected[this] def newSubtask(p: SuperParIterator) = unsupported
+ protected[this] def newSubtask(p: SuperParIterator) = throw new UnsupportedOperationException
override def split = {
val pits = pit.splitWithSignalling
for ((p, untilp) <- pits zip pits.scanLeft(0)(_ + _.remaining)) yield new Updated(pos - untilp, elem, pbf, p)
@@ -447,7 +446,7 @@ self =>
extends Transformer[Combiner[(U, S), That], Zip[U, S, That]] {
@volatile var result: Result = null
def leaf(prev: Option[Result]) = result = pit.zip2combiner[U, S, That](otherpit, cf())
- protected[this] def newSubtask(p: SuperParIterator) = unsupported
+ protected[this] def newSubtask(p: SuperParIterator) = throw new UnsupportedOperationException
override def split = {
val fp = len / 2
val sp = len - len / 2
@@ -468,7 +467,7 @@ self =>
result = pit.corresponds(corr)(otherpit)
if (!result) pit.abort()
}
- protected[this] def newSubtask(p: SuperParIterator) = unsupported
+ protected[this] def newSubtask(p: SuperParIterator) = throw new UnsupportedOperationException
override def split = {
val fp = pit.remaining / 2
val sp = pit.remaining - fp
diff --git a/src/library/scala/collection/parallel/RemainsIterator.scala b/src/library/scala/collection/parallel/RemainsIterator.scala
index 5f2ceac0e0..63d63d9ef3 100644
--- a/src/library/scala/collection/parallel/RemainsIterator.scala
+++ b/src/library/scala/collection/parallel/RemainsIterator.scala
@@ -9,13 +9,10 @@
package scala
package collection.parallel
-import scala.collection.Parallel
import scala.collection.generic.Signalling
import scala.collection.generic.DelegatedSignalling
import scala.collection.generic.IdleSignalling
-import scala.collection.generic.CanCombineFrom
import scala.collection.mutable.Builder
-import scala.collection.Iterator.empty
import scala.collection.GenTraversableOnce
import scala.collection.parallel.immutable.repetition
@@ -456,6 +453,15 @@ self =>
}
it
}
+ /** Drop implemented as simple eager consumption. */
+ override def drop(n: Int): IterableSplitter[T] = {
+ var i = 0
+ while (i < n && hasNext) {
+ next()
+ i += 1
+ }
+ this
+ }
override def take(n: Int): IterableSplitter[T] = newTaken(n)
override def slice(from1: Int, until1: Int): IterableSplitter[T] = newSliceInternal(newTaken(until1), from1)
diff --git a/src/library/scala/collection/parallel/TaskSupport.scala b/src/library/scala/collection/parallel/TaskSupport.scala
index 9064018d46..6ab694de04 100644
--- a/src/library/scala/collection/parallel/TaskSupport.scala
+++ b/src/library/scala/collection/parallel/TaskSupport.scala
@@ -10,7 +10,7 @@ package scala
package collection.parallel
import java.util.concurrent.ThreadPoolExecutor
-import scala.concurrent.forkjoin.ForkJoinPool
+import java.util.concurrent.ForkJoinPool
import scala.concurrent.ExecutionContext
/** A trait implementing the scheduling of a parallel collection operation.
@@ -41,7 +41,7 @@ import scala.concurrent.ExecutionContext
* import scala.collection.parallel._
* val pc = mutable.ParArray(1, 2, 3)
* pc.tasksupport = new ForkJoinTaskSupport(
- * new scala.concurrent.forkjoin.ForkJoinPool(2))
+ * new java.util.concurrent.ForkJoinPool(2))
* }}}
*
* @see [[http://docs.scala-lang.org/overviews/parallel-collections/configuration.html Configuring Parallel Collections]] section
diff --git a/src/library/scala/collection/parallel/Tasks.scala b/src/library/scala/collection/parallel/Tasks.scala
index fcf0dff846..c9a75752df 100644
--- a/src/library/scala/collection/parallel/Tasks.scala
+++ b/src/library/scala/collection/parallel/Tasks.scala
@@ -10,7 +10,7 @@ package scala
package collection.parallel
import java.util.concurrent.ThreadPoolExecutor
-import scala.concurrent.forkjoin._
+import java.util.concurrent.{ForkJoinPool, RecursiveAction, ForkJoinWorkerThread}
import scala.concurrent.ExecutionContext
import scala.util.control.Breaks._
import scala.annotation.unchecked.uncheckedVariance
diff --git a/src/library/scala/collection/parallel/immutable/ParHashSet.scala b/src/library/scala/collection/parallel/immutable/ParHashSet.scala
index 65a632470e..3a1ec7fff8 100644
--- a/src/library/scala/collection/parallel/immutable/ParHashSet.scala
+++ b/src/library/scala/collection/parallel/immutable/ParHashSet.scala
@@ -197,7 +197,7 @@ extends scala.collection.parallel.BucketCombiner[T, ParHashSet[T], Any, HashSetC
while (i < chunksz) {
val v = chunkarr(i).asInstanceOf[T]
val hc = trie.computeHash(v)
- trie = trie.updated0(v, hc, rootbits)
+ trie = trie.updated0(v, hc, rootbits) // internal API, private[collection]
i += 1
}
i = 0
diff --git a/src/library/scala/collection/parallel/immutable/ParMap.scala b/src/library/scala/collection/parallel/immutable/ParMap.scala
index 2956c2a883..65bb2e12c5 100644
--- a/src/library/scala/collection/parallel/immutable/ParMap.scala
+++ b/src/library/scala/collection/parallel/immutable/ParMap.scala
@@ -16,7 +16,6 @@ import scala.collection.generic.GenericParMapCompanion
import scala.collection.generic.CanCombineFrom
import scala.collection.parallel.ParMapLike
import scala.collection.parallel.Combiner
-import scala.collection.GenMapLike
/** A template trait for immutable parallel maps.
*
diff --git a/src/library/scala/collection/parallel/immutable/ParRange.scala b/src/library/scala/collection/parallel/immutable/ParRange.scala
index ec90de3a7d..8fd5382ce9 100644
--- a/src/library/scala/collection/parallel/immutable/ParRange.scala
+++ b/src/library/scala/collection/parallel/immutable/ParRange.scala
@@ -12,7 +12,6 @@ package collection.parallel.immutable
import scala.collection.immutable.Range
import scala.collection.parallel.Combiner
import scala.collection.parallel.SeqSplitter
-import scala.collection.generic.CanCombineFrom
import scala.collection.Iterator
/** Parallel ranges.
diff --git a/src/library/scala/collection/parallel/mutable/ParArray.scala b/src/library/scala/collection/parallel/mutable/ParArray.scala
index d0d022db4b..8a2cf2716a 100644
--- a/src/library/scala/collection/parallel/mutable/ParArray.scala
+++ b/src/library/scala/collection/parallel/mutable/ParArray.scala
@@ -18,7 +18,6 @@ import scala.collection.generic.GenericParCompanion
import scala.collection.generic.CanCombineFrom
import scala.collection.generic.CanBuildFrom
import scala.collection.generic.ParFactory
-import scala.collection.generic.Sizing
import scala.collection.parallel.Combiner
import scala.collection.parallel.SeqSplitter
import scala.collection.parallel.ParSeqLike
diff --git a/src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala b/src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala
index 79322c85b1..6883457fef 100644
--- a/src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala
+++ b/src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala
@@ -9,18 +9,10 @@
package scala
package collection.parallel.mutable
-
-
-import scala.collection.generic.Sizing
import scala.collection.mutable.ArraySeq
import scala.collection.mutable.ArrayBuffer
-import scala.collection.parallel.TaskSupport
-import scala.collection.parallel.unsupportedop
-import scala.collection.parallel.Combiner
import scala.collection.parallel.Task
-
-
/** An array combiner that uses a chain of arraybuffers to store elements. */
trait ResizableParArrayCombiner[T] extends LazyCombiner[T, ParArray[T], ExposedArrayBuffer[T]] {
diff --git a/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala b/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala
index d1379cde11..5e4572da60 100644
--- a/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala
+++ b/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala
@@ -9,13 +9,9 @@
package scala
package collection.parallel.mutable
-import scala.collection.generic.Sizing
import scala.collection.mutable.ArraySeq
-import scala.collection.mutable.ArrayBuffer
import scala.collection.mutable.UnrolledBuffer
import scala.collection.mutable.UnrolledBuffer.Unrolled
-import scala.collection.parallel.TaskSupport
-import scala.collection.parallel.unsupportedop
import scala.collection.parallel.Combiner
import scala.collection.parallel.Task
import scala.reflect.ClassTag
@@ -62,7 +58,7 @@ extends Combiner[T, ParArray[T]] {
case that: UnrolledParArrayCombiner[t] =>
buff concat that.buff
this
- case _ => unsupportedop("Cannot combine with combiner of different type.")
+ case _ => throw new UnsupportedOperationException("Cannot combine with combiner of different type.")
}
def size = buff.size
diff --git a/src/library/scala/collection/parallel/package.scala b/src/library/scala/collection/parallel/package.scala
index d77dcb0658..ba64ca505b 100644
--- a/src/library/scala/collection/parallel/package.scala
+++ b/src/library/scala/collection/parallel/package.scala
@@ -35,15 +35,7 @@ package object parallel {
else sz
}
- private[parallel] def unsupported = throw new UnsupportedOperationException
-
- private[parallel] def unsupportedop(msg: String) = throw new UnsupportedOperationException(msg)
-
- private[parallel] def outofbounds(idx: Int) = throw new IndexOutOfBoundsException(idx.toString)
-
- private[parallel] def getTaskSupport: TaskSupport = new ExecutionContextTaskSupport
-
- val defaultTaskSupport: TaskSupport = getTaskSupport
+ val defaultTaskSupport: TaskSupport = new ExecutionContextTaskSupport
def setTaskSupport[Coll](c: Coll, t: TaskSupport): Coll = {
c match {
@@ -98,7 +90,7 @@ package parallel {
}
}
}
-
+
trait FactoryOps[From, Elem, To] {
trait Otherwise[R] {
def otherwise(notbody: => R): R