summaryrefslogtreecommitdiff
path: root/src/library
diff options
context:
space:
mode:
Diffstat (limited to 'src/library')
-rw-r--r--src/library/scala/Enumeration.scala14
-rw-r--r--src/library/scala/PartialFunction.scala13
-rw-r--r--src/library/scala/Predef.scala3
-rw-r--r--src/library/scala/annotation/elidable.scala4
-rw-r--r--src/library/scala/annotation/implicitAmbiguous.scala4
-rw-r--r--src/library/scala/collection/BitSetLike.scala2
-rw-r--r--src/library/scala/collection/GenMapLike.scala3
-rw-r--r--src/library/scala/collection/GenTraversableLike.scala20
-rw-r--r--src/library/scala/collection/GenTraversableOnce.scala86
-rw-r--r--src/library/scala/collection/IndexedSeqLike.scala3
-rw-r--r--src/library/scala/collection/IndexedSeqOptimized.scala1
-rw-r--r--src/library/scala/collection/IterableLike.scala11
-rw-r--r--src/library/scala/collection/IterableProxyLike.scala1
-rw-r--r--src/library/scala/collection/Iterator.scala18
-rw-r--r--src/library/scala/collection/LinearSeqOptimized.scala4
-rw-r--r--src/library/scala/collection/MapLike.scala14
-rw-r--r--src/library/scala/collection/SeqLike.scala5
-rw-r--r--src/library/scala/collection/SetLike.scala4
-rw-r--r--src/library/scala/collection/Traversable.scala2
-rw-r--r--src/library/scala/collection/TraversableLike.scala38
-rw-r--r--src/library/scala/collection/TraversableOnce.scala15
-rw-r--r--src/library/scala/collection/TraversableProxyLike.scala2
-rw-r--r--src/library/scala/collection/concurrent/TrieMap.scala2
-rw-r--r--src/library/scala/collection/convert/WrapAsScala.scala14
-rw-r--r--src/library/scala/collection/generic/GenericParTemplate.scala1
-rw-r--r--src/library/scala/collection/generic/MapFactory.scala2
-rw-r--r--src/library/scala/collection/generic/ParFactory.scala1
-rw-r--r--src/library/scala/collection/generic/ParSetFactory.scala1
-rw-r--r--src/library/scala/collection/generic/SetFactory.scala1
-rw-r--r--src/library/scala/collection/generic/TraversableForwarder.scala2
-rw-r--r--src/library/scala/collection/generic/package.scala1
-rw-r--r--src/library/scala/collection/immutable/BitSet.scala2
-rw-r--r--src/library/scala/collection/immutable/HashMap.scala4
-rw-r--r--src/library/scala/collection/immutable/HashSet.scala6
-rw-r--r--src/library/scala/collection/immutable/IntMap.scala2
-rw-r--r--src/library/scala/collection/immutable/List.scala36
-rw-r--r--src/library/scala/collection/immutable/ListMap.scala14
-rw-r--r--src/library/scala/collection/immutable/ListSet.scala6
-rw-r--r--src/library/scala/collection/immutable/LongMap.scala2
-rw-r--r--src/library/scala/collection/immutable/Map.scala8
-rw-r--r--src/library/scala/collection/immutable/NumericRange.scala4
-rw-r--r--src/library/scala/collection/immutable/PagedSeq.scala4
-rw-r--r--src/library/scala/collection/immutable/Queue.scala1
-rw-r--r--src/library/scala/collection/immutable/SortedMap.scala1
-rw-r--r--src/library/scala/collection/immutable/SortedSet.scala1
-rw-r--r--src/library/scala/collection/immutable/Stream.scala18
-rw-r--r--src/library/scala/collection/immutable/StringLike.scala32
-rw-r--r--src/library/scala/collection/immutable/TreeMap.scala2
-rw-r--r--src/library/scala/collection/immutable/TreeSet.scala2
-rw-r--r--src/library/scala/collection/immutable/Vector.scala2
-rw-r--r--src/library/scala/collection/mutable/AnyRefMap.scala122
-rw-r--r--src/library/scala/collection/mutable/ArrayBuilder.scala1
-rw-r--r--src/library/scala/collection/mutable/ArrayOps.scala11
-rw-r--r--src/library/scala/collection/mutable/ArraySeq.scala2
-rw-r--r--src/library/scala/collection/mutable/ArrayStack.scala9
-rw-r--r--src/library/scala/collection/mutable/BitSet.scala6
-rw-r--r--src/library/scala/collection/mutable/BufferLike.scala2
-rw-r--r--src/library/scala/collection/mutable/HashMap.scala6
-rw-r--r--src/library/scala/collection/mutable/HashSet.scala2
-rw-r--r--src/library/scala/collection/mutable/ImmutableSetAdaptor.scala2
-rw-r--r--src/library/scala/collection/mutable/LinkedListLike.scala2
-rw-r--r--src/library/scala/collection/mutable/ListBuffer.scala1
-rw-r--r--src/library/scala/collection/mutable/LongMap.scala126
-rw-r--r--src/library/scala/collection/mutable/MutableList.scala2
-rw-r--r--src/library/scala/collection/mutable/SetLike.scala4
-rw-r--r--src/library/scala/collection/mutable/SynchronizedSet.scala2
-rw-r--r--src/library/scala/collection/mutable/SynchronizedStack.scala1
-rw-r--r--src/library/scala/collection/package.scala38
-rw-r--r--src/library/scala/collection/parallel/ParIterableLike.scala19
-rw-r--r--src/library/scala/collection/parallel/ParMap.scala1
-rw-r--r--src/library/scala/collection/parallel/ParMapLike.scala12
-rw-r--r--src/library/scala/collection/parallel/ParSeqLike.scala15
-rw-r--r--src/library/scala/collection/parallel/RemainsIterator.scala3
-rw-r--r--src/library/scala/collection/parallel/immutable/ParMap.scala1
-rw-r--r--src/library/scala/collection/parallel/immutable/ParRange.scala1
-rw-r--r--src/library/scala/collection/parallel/mutable/ParArray.scala1
-rw-r--r--src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala8
-rw-r--r--src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala6
-rw-r--r--src/library/scala/collection/parallel/package.scala12
-rw-r--r--src/library/scala/concurrent/ExecutionContext.scala3
-rw-r--r--src/library/scala/concurrent/Future.scala15
-rw-r--r--src/library/scala/concurrent/FutureTaskRunner.scala39
-rw-r--r--src/library/scala/concurrent/ManagedBlocker.scala34
-rw-r--r--src/library/scala/concurrent/SyncVar.scala43
-rw-r--r--src/library/scala/concurrent/TaskRunner.scala27
-rw-r--r--src/library/scala/concurrent/ThreadPoolRunner.scala51
-rw-r--r--src/library/scala/concurrent/impl/ExecutionContextImpl.scala45
-rw-r--r--src/library/scala/concurrent/impl/Promise.scala8
-rw-r--r--src/library/scala/deprecatedInheritance.scala1
-rw-r--r--src/library/scala/deprecatedOverriding.scala1
-rw-r--r--src/library/scala/io/BufferedSource.scala2
-rw-r--r--src/library/scala/math/BigDecimal.scala103
-rw-r--r--src/library/scala/reflect/ScalaLongSignature.java2
-rw-r--r--src/library/scala/reflect/ScalaSignature.java2
-rw-r--r--src/library/scala/runtime/AbstractPartialFunction.scala2
-rw-r--r--src/library/scala/runtime/ScalaNumberProxy.scala4
-rw-r--r--src/library/scala/runtime/ScalaRunTime.scala8
-rw-r--r--src/library/scala/runtime/SeqCharSequence.scala2
-rw-r--r--src/library/scala/sys/SystemProperties.scala36
-rw-r--r--src/library/scala/sys/process/Process.scala4
-rw-r--r--src/library/scala/sys/process/ProcessImpl.scala19
-rw-r--r--src/library/scala/util/Sorting.scala18
-rw-r--r--src/library/scala/util/Try.scala5
-rw-r--r--src/library/scala/util/control/Exception.scala2
-rw-r--r--src/library/scala/util/control/TailCalls.scala2
105 files changed, 539 insertions, 796 deletions
diff --git a/src/library/scala/Enumeration.scala b/src/library/scala/Enumeration.scala
index c4aa511cd7..9d9a3f849b 100644
--- a/src/library/scala/Enumeration.scala
+++ b/src/library/scala/Enumeration.scala
@@ -9,7 +9,7 @@
package scala
import scala.collection.{ mutable, immutable, generic, SortedSetLike, AbstractSet }
-import java.lang.reflect.{ Modifier, Method => JMethod, Field => JField }
+import java.lang.reflect.{ Method => JMethod, Field => JField }
import scala.reflect.NameTransformer._
import scala.util.matching.Regex
@@ -154,14 +154,14 @@ abstract class Enumeration (initial: Int) extends Serializable {
protected final def Value(i: Int, name: String): Value = new Val(i, name)
private def populateNameMap() {
- val fields = getClass.getDeclaredFields
- def isValDef(m: JMethod) = fields exists (fd => fd.getName == m.getName && fd.getType == m.getReturnType)
+ val fields: Array[JField] = getClass.getDeclaredFields
+ def isValDef(m: JMethod): Boolean = fields exists (fd => fd.getName == m.getName && fd.getType == m.getReturnType)
// The list of possible Value methods: 0-args which return a conforming type
- val methods = getClass.getMethods filter (m => m.getParameterTypes.isEmpty &&
- classOf[Value].isAssignableFrom(m.getReturnType) &&
- m.getDeclaringClass != classOf[Enumeration] &&
- isValDef(m))
+ val methods: Array[JMethod] = getClass.getMethods filter (m => m.getParameterTypes.isEmpty &&
+ classOf[Value].isAssignableFrom(m.getReturnType) &&
+ m.getDeclaringClass != classOf[Enumeration] &&
+ isValDef(m))
methods foreach { m =>
val name = m.getName
// invoke method to obtain actual `Value` instance
diff --git a/src/library/scala/PartialFunction.scala b/src/library/scala/PartialFunction.scala
index 98dd35d306..c1a413d516 100644
--- a/src/library/scala/PartialFunction.scala
+++ b/src/library/scala/PartialFunction.scala
@@ -20,7 +20,7 @@ package scala
* {{{
* val f: PartialFunction[Int, Any] = { case _ => 1/0 }
* }}}
- *
+ *
* It is the responsibility of the caller to call `isDefinedAt` before
* calling `apply`, because if `isDefinedAt` is false, it is not guaranteed
* `apply` will throw an exception to indicate an error condition. If an
@@ -161,7 +161,8 @@ trait PartialFunction[-A, +B] extends (A => B) { self =>
object PartialFunction {
/** Composite function produced by `PartialFunction#orElse` method
*/
- private class OrElse[-A, +B] (f1: PartialFunction[A, B], f2: PartialFunction[A, B]) extends scala.runtime.AbstractPartialFunction[A, B] {
+ private class OrElse[-A, +B] (f1: PartialFunction[A, B], f2: PartialFunction[A, B])
+ extends scala.runtime.AbstractPartialFunction[A, B] with Serializable {
def isDefinedAt(x: A) = f1.isDefinedAt(x) || f2.isDefinedAt(x)
override def apply(x: A): B = f1.applyOrElse(x, f2)
@@ -180,7 +181,7 @@ object PartialFunction {
/** Composite function produced by `PartialFunction#andThen` method
*/
- private class AndThen[-A, B, +C] (pf: PartialFunction[A, B], k: B => C) extends PartialFunction[A, C] {
+ private class AndThen[-A, B, +C] (pf: PartialFunction[A, B], k: B => C) extends PartialFunction[A, C] with Serializable {
def isDefinedAt(x: A) = pf.isDefinedAt(x)
def apply(x: A): C = k(pf(x))
@@ -217,7 +218,7 @@ object PartialFunction {
private def fallbackOccurred[B](x: B) = (fallback_pf eq x.asInstanceOf[AnyRef])
private class Lifted[-A, +B] (val pf: PartialFunction[A, B])
- extends scala.runtime.AbstractFunction1[A, Option[B]] {
+ extends scala.runtime.AbstractFunction1[A, Option[B]] with Serializable {
def apply(x: A): Option[B] = {
val z = pf.applyOrElse(x, checkFallback[B])
@@ -225,7 +226,7 @@ object PartialFunction {
}
}
- private class Unlifted[A, B] (f: A => Option[B]) extends scala.runtime.AbstractPartialFunction[A, B] {
+ private class Unlifted[A, B] (f: A => Option[B]) extends scala.runtime.AbstractPartialFunction[A, B] with Serializable {
def isDefinedAt(x: A): Boolean = f(x).isDefined
override def applyOrElse[A1 <: A, B1 >: B](x: A1, default: A1 => B1): B1 = {
@@ -248,7 +249,7 @@ object PartialFunction {
private[this] val constFalse: Any => Boolean = { _ => false}
- private[this] val empty_pf: PartialFunction[Any, Nothing] = new PartialFunction[Any, Nothing] {
+ private[this] val empty_pf: PartialFunction[Any, Nothing] = new PartialFunction[Any, Nothing] with Serializable {
def isDefinedAt(x: Any) = false
def apply(x: Any) = throw new MatchError(x)
override def orElse[A1, B1](that: PartialFunction[A1, B1]) = that
diff --git a/src/library/scala/Predef.scala b/src/library/scala/Predef.scala
index a6f0fa9e78..50a53732f1 100644
--- a/src/library/scala/Predef.scala
+++ b/src/library/scala/Predef.scala
@@ -8,13 +8,14 @@
package scala
+import scala.language.implicitConversions
+
import scala.collection.{ mutable, immutable, generic }
import immutable.StringOps
import mutable.ArrayOps
import generic.CanBuildFrom
import scala.annotation.{ elidable, implicitNotFound }
import scala.annotation.elidable.ASSERTION
-import scala.language.{implicitConversions, existentials}
import scala.io.StdIn
/** The `Predef` object provides definitions that are accessible in all Scala
diff --git a/src/library/scala/annotation/elidable.scala b/src/library/scala/annotation/elidable.scala
index f9c5e8a744..dd0d9b511c 100644
--- a/src/library/scala/annotation/elidable.scala
+++ b/src/library/scala/annotation/elidable.scala
@@ -8,8 +8,6 @@
package scala.annotation
-import java.util.logging.Level
-
/** An annotation for methods whose bodies may be excluded
* from compiler-generated bytecode.
*
@@ -62,7 +60,7 @@ import java.util.logging.Level
* @author Paul Phillips
* @since 2.8
*/
-final class elidable(final val level: Int) extends scala.annotation.StaticAnnotation {}
+final class elidable(final val level: Int) extends scala.annotation.StaticAnnotation
/** This useless appearing code was necessary to allow people to use
* named constants for the elidable annotation. This is what it takes
diff --git a/src/library/scala/annotation/implicitAmbiguous.scala b/src/library/scala/annotation/implicitAmbiguous.scala
index f190865623..44e8d23085 100644
--- a/src/library/scala/annotation/implicitAmbiguous.scala
+++ b/src/library/scala/annotation/implicitAmbiguous.scala
@@ -1,7 +1,5 @@
package scala.annotation
-import scala.annotation.meta._
-
/**
* To customize the error message that's emitted when an implicit search finds
* multiple ambiguous values, annotate at least one of the implicit values
@@ -31,4 +29,4 @@ import scala.annotation.meta._
* @author Brian McKenna
* @since 2.12.0
*/
-final class implicitAmbiguous(msg: String) extends scala.annotation.StaticAnnotation {}
+final class implicitAmbiguous(msg: String) extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/collection/BitSetLike.scala b/src/library/scala/collection/BitSetLike.scala
index 8a8af79151..29369447d1 100644
--- a/src/library/scala/collection/BitSetLike.scala
+++ b/src/library/scala/collection/BitSetLike.scala
@@ -115,7 +115,7 @@ trait BitSetLike[+This <: BitSetLike[This] with SortedSet[Int]] extends SortedSe
else Iterator.empty.next()
}
- override def foreach[B](f: Int => B) {
+ override def foreach[U](f: Int => U) {
/* NOTE: while loops are significantly faster as of 2.11 and
one major use case of bitsets is performance. Also, there
is nothing to do when all bits are clear, so use that as
diff --git a/src/library/scala/collection/GenMapLike.scala b/src/library/scala/collection/GenMapLike.scala
index bce9740522..2b39fa2289 100644
--- a/src/library/scala/collection/GenMapLike.scala
+++ b/src/library/scala/collection/GenMapLike.scala
@@ -124,8 +124,7 @@ trait GenMapLike[A, +B, +Repr] extends GenIterableLike[(A, B), Repr] with Equals
}
}
} catch {
- case ex: ClassCastException =>
- println("class cast "); false
+ case ex: ClassCastException => false
}}
case _ =>
false
diff --git a/src/library/scala/collection/GenTraversableLike.scala b/src/library/scala/collection/GenTraversableLike.scala
index 8b9d3e7a17..d730996be2 100644
--- a/src/library/scala/collection/GenTraversableLike.scala
+++ b/src/library/scala/collection/GenTraversableLike.scala
@@ -158,18 +158,6 @@ trait GenTraversableLike[+A, +Repr] extends Any with GenTraversableOnce[A] with
@migration("The behavior of `scanRight` has changed. The previous behavior can be reproduced with scanRight.reverse.", "2.9.0")
def scanRight[B, That](z: B)(op: (A, B) => B)(implicit bf: CanBuildFrom[Repr, B, That]): That
- /** Applies a function `f` to all elements of this $coll.
- *
- * @param f the function that is applied for its side-effect to every element.
- * The result of function `f` is discarded.
- *
- * @tparam U the type parameter describing the result of function `f`.
- * This result will always be ignored. Typically `U` is `Unit`,
- * but this is not necessary.
- *
- * @usecase def foreach(f: A => Unit): Unit
- * @inheritdoc
- */
def foreach[U](f: A => U): Unit
/** Builds a new collection by applying a function to all elements of this $coll.
@@ -269,16 +257,16 @@ trait GenTraversableLike[+A, +Repr] extends Any with GenTraversableOnce[A] with
* {{{
* scala> val a = List(1)
* a: List[Int] = List(1)
- *
+ *
* scala> val b = List(2)
* b: List[Int] = List(2)
- *
+ *
* scala> val c = a ++ b
* c: List[Int] = List(1, 2)
- *
+ *
* scala> val d = List('a')
* d: List[Char] = List(a)
- *
+ *
* scala> val e = c ++ d
* e: List[AnyVal] = List(1, 2, a)
* }}}
diff --git a/src/library/scala/collection/GenTraversableOnce.scala b/src/library/scala/collection/GenTraversableOnce.scala
index a45ec965f5..244ff26397 100644
--- a/src/library/scala/collection/GenTraversableOnce.scala
+++ b/src/library/scala/collection/GenTraversableOnce.scala
@@ -49,6 +49,22 @@ import scala.language.higherKinds
*/
trait GenTraversableOnce[+A] extends Any {
+ /** Applies a function `f` to all elements of this $coll.
+ *
+ * @param f the function that is applied for its side-effect to every element.
+ * The result of function `f` is discarded.
+ *
+ * @tparam U the type parameter describing the result of function `f`.
+ * This result will always be ignored. Typically `U` is `Unit`,
+ * but this is not necessary.
+ *
+ * @usecase def foreach(f: A => Unit): Unit
+ * @inheritdoc
+ *
+ * Note: this method underlies the implementation of most other bulk operations.
+ * It's important to implement this method in an efficient way.
+ *
+ */
def foreach[U](f: A => U): Unit
def hasDefiniteSize: Boolean
@@ -110,13 +126,14 @@ trait GenTraversableOnce[+A] extends Any {
* binary operator.
*
* $undefinedorder
+ * $willNotTerminateInf
*
* @tparam A1 a type parameter for the binary operator, a supertype of `A`.
* @param z a neutral element for the fold operation; may be added to the result
* an arbitrary number of times, and must not change the result (e.g., `Nil` for list concatenation,
- * 0 for addition, or 1 for multiplication.)
- * @param op a binary operator that must be associative
- * @return the result of applying fold operator `op` between all the elements and `z`
+ * 0 for addition, or 1 for multiplication).
+ * @param op a binary operator that must be associative.
+ * @return the result of applying the fold operator `op` between all the elements and `z`, or `z` if this $coll is empty.
*/
def fold[A1 >: A](z: A1)(op: (A1, A1) => A1): A1
@@ -205,6 +222,7 @@ trait GenTraversableOnce[+A] extends Any {
* op(...op(z, x_1), x_2, ..., x_n)
* }}}
* where `x,,1,,, ..., x,,n,,` are the elements of this $coll.
+ * Returns `z` if this $coll is empty.
*/
def foldLeft[B](z: B)(op: (B, A) => B): B
@@ -222,30 +240,32 @@ trait GenTraversableOnce[+A] extends Any {
* op(x_1, op(x_2, ... op(x_n, z)...))
* }}}
* where `x,,1,,, ..., x,,n,,` are the elements of this $coll.
+ * Returns `z` if this $coll is empty.
*/
def foldRight[B](z: B)(op: (A, B) => B): B
/** Aggregates the results of applying an operator to subsequent elements.
*
- * This is a more general form of `fold` and `reduce`. It has similar
- * semantics, but does not require the result to be a supertype of the
- * element type. It traverses the elements in different partitions
- * sequentially, using `seqop` to update the result, and then applies
- * `combop` to results from different partitions. The implementation of
- * this operation may operate on an arbitrary number of collection
- * partitions, so `combop` may be invoked an arbitrary number of times.
- *
- * For example, one might want to process some elements and then produce
- * a `Set`. In this case, `seqop` would process an element and append it
- * to the list, while `combop` would concatenate two lists from different
- * partitions together. The initial value `z` would be an empty set.
+ * This is a more general form of `fold` and `reduce`. It is similar to
+ * `foldLeft` in that it doesn't require the result to be a supertype of the
+ * element type. In addition, it allows parallel collections to be processed
+ * in chunks, and then combines the intermediate results.
+ *
+ * `aggregate` splits the $coll into partitions and processes each
+ * partition by sequentially applying `seqop`, starting with `z` (like
+ * `foldLeft`). Those intermediate results are then combined by using
+ * `combop` (like `fold`). The implementation of this operation may operate
+ * on an arbitrary number of collection partitions (even 1), so `combop` may
+ * be invoked an arbitrary number of times (even 0).
+ *
+ * As an example, consider summing up the integer values of a list of chars.
+ * The initial value for the sum is 0. First, `seqop` transforms each input
+ * character to an Int and adds it to the sum (of the partition). Then,
+ * `combop` just needs to sum up the intermediate results of the partitions:
* {{{
- * pc.aggregate(Set[Int]())(_ += process(_), _ ++ _)
+ * List('a', 'b', 'c').aggregate(0)({ (sum, ch) => sum + ch.toInt }, { (p1, p2) => p1 + p2 })
* }}}
*
- * Another example is calculating geometric mean from a collection of doubles
- * (one would typically require big doubles for this).
- *
* @tparam B the type of accumulated results
* @param z the initial value for the accumulated result of the partition - this
* will typically be the neutral element for the `seqop` operator (e.g.
@@ -423,13 +443,13 @@ trait GenTraversableOnce[+A] extends Any {
*/
def find(@deprecatedName('pred) p: A => Boolean): Option[A]
- /** Copies values of this $coll to an array.
+ /** Copies the elements of this $coll to an array.
* Fills the given array `xs` with values of this $coll.
* Copying will stop once either the end of the current $coll is reached,
- * or the end of the array is reached.
+ * or the end of the target array is reached.
*
* @param xs the array to fill.
- * @tparam B the type of the elements of the array.
+ * @tparam B the type of the elements of the target array.
*
* @usecase def copyToArray(xs: Array[A]): Unit
* @inheritdoc
@@ -438,14 +458,14 @@ trait GenTraversableOnce[+A] extends Any {
*/
def copyToArray[B >: A](xs: Array[B]): Unit
- /** Copies values of this $coll to an array.
+ /** Copies the elements of this $coll to an array.
* Fills the given array `xs` with values of this $coll, beginning at index `start`.
* Copying will stop once either the end of the current $coll is reached,
- * or the end of the array is reached.
+ * or the end of the target array is reached.
*
* @param xs the array to fill.
* @param start the starting index.
- * @tparam B the type of the elements of the array.
+ * @tparam B the type of the elements of the target array.
*
* @usecase def copyToArray(xs: Array[A], start: Int): Unit
* @inheritdoc
@@ -454,6 +474,22 @@ trait GenTraversableOnce[+A] extends Any {
*/
def copyToArray[B >: A](xs: Array[B], start: Int): Unit
+ /** Copies the elements of this $coll to an array.
+ * Fills the given array `xs` with at most `len` elements of
+ * this $coll, starting at position `start`.
+ * Copying will stop once either the end of the current $coll is reached,
+ * or the end of the target array is reached, or `len` elements have been copied.
+ *
+ * @param xs the array to fill.
+ * @param start the starting index.
+ * @param len the maximal number of elements to copy.
+ * @tparam B the type of the elements of the target array.
+ *
+ * @usecase def copyToArray(xs: Array[A], start: Int, len: Int): Unit
+ * @inheritdoc
+ *
+ * $willNotTerminateInf
+ */
def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Unit
/** Displays all elements of this $coll in a string using start, end, and
diff --git a/src/library/scala/collection/IndexedSeqLike.scala b/src/library/scala/collection/IndexedSeqLike.scala
index 18c9175ee1..f4bf58ffe3 100644
--- a/src/library/scala/collection/IndexedSeqLike.scala
+++ b/src/library/scala/collection/IndexedSeqLike.scala
@@ -9,9 +9,6 @@
package scala
package collection
-import mutable.ArrayBuffer
-import scala.annotation.tailrec
-
/** A template trait for indexed sequences of type `IndexedSeq[A]`.
*
* $indexedSeqInfo
diff --git a/src/library/scala/collection/IndexedSeqOptimized.scala b/src/library/scala/collection/IndexedSeqOptimized.scala
index a7e06b4d1a..3765b2fff0 100644
--- a/src/library/scala/collection/IndexedSeqOptimized.scala
+++ b/src/library/scala/collection/IndexedSeqOptimized.scala
@@ -10,7 +10,6 @@ package scala
package collection
import generic._
-import mutable.ArrayBuffer
import scala.annotation.tailrec
/** A template trait for indexed sequences of type `IndexedSeq[A]` which optimizes
diff --git a/src/library/scala/collection/IterableLike.scala b/src/library/scala/collection/IterableLike.scala
index ecf64624e8..b89720da78 100644
--- a/src/library/scala/collection/IterableLike.scala
+++ b/src/library/scala/collection/IterableLike.scala
@@ -10,8 +10,7 @@ package scala
package collection
import generic._
-import immutable.{ List, Stream }
-import scala.annotation.unchecked.uncheckedVariance
+import immutable.Stream
/** A template trait for iterable collections of type `Iterable[A]`.
* $iterableInfo
@@ -83,8 +82,8 @@ self =>
iterator.foldRight(z)(op)
override /*TraversableLike*/ def reduceRight[B >: A](op: (A, B) => B): B =
iterator.reduceRight(op)
-
-
+
+
/** Returns this $coll as an iterable collection.
*
* A new collection will not be built; lazy collections will stay lazy.
@@ -94,7 +93,7 @@ self =>
*/
override /*TraversableLike*/ def toIterable: Iterable[A] =
thisCollection
-
+
/** Returns an Iterator over the elements in this $coll. Produces the same
* result as `iterator`.
* $willNotTerminateInf
@@ -102,7 +101,7 @@ self =>
*/
@deprecatedOverriding("toIterator should stay consistent with iterator for all Iterables: override iterator instead.", "2.11.0")
override def toIterator: Iterator[A] = iterator
-
+
override /*TraversableLike*/ def head: A =
iterator.next()
diff --git a/src/library/scala/collection/IterableProxyLike.scala b/src/library/scala/collection/IterableProxyLike.scala
index 90e630ee28..334b511fb9 100644
--- a/src/library/scala/collection/IterableProxyLike.scala
+++ b/src/library/scala/collection/IterableProxyLike.scala
@@ -12,7 +12,6 @@ package scala
package collection
import generic._
-import mutable.Buffer
// Methods could be printed by cat IterableLike.scala | egrep '^ (override )?def'
diff --git a/src/library/scala/collection/Iterator.scala b/src/library/scala/collection/Iterator.scala
index ff10fb44d7..17bb83e52e 100644
--- a/src/library/scala/collection/Iterator.scala
+++ b/src/library/scala/collection/Iterator.scala
@@ -12,8 +12,6 @@ package collection
import mutable.ArrayBuffer
import scala.annotation.migration
import immutable.Stream
-import scala.collection.generic.CanBuildFrom
-import scala.annotation.unchecked.{ uncheckedVariance => uV }
/** The `Iterator` object provides various functions for creating specialized iterators.
*
@@ -521,13 +519,13 @@ trait Iterator[+A] extends TraversableOnce[A] {
def collect[B](pf: PartialFunction[A, B]): Iterator[B] = new AbstractIterator[B] {
// Manually buffer to avoid extra layer of wrapping with buffered
private[this] var hd: A = _
-
+
// Little state machine to keep track of where we are
// Seek = 0; Found = 1; Empty = -1
// Not in vals because scalac won't make them static (@inline def only works with -optimize)
// BE REALLY CAREFUL TO KEEP COMMENTS AND NUMBERS IN SYNC!
private[this] var status = 0/*Seek*/
-
+
def hasNext = {
while (status == 0/*Seek*/) {
if (self.hasNext) {
@@ -700,9 +698,9 @@ trait Iterator[+A] extends TraversableOnce[A] {
}
}
}
-
+
val leading = new Leading
-
+
val trailing = new AbstractIterator[A] {
private[this] var myLeading = leading
/* Status flags meanings:
@@ -738,7 +736,7 @@ trait Iterator[+A] extends TraversableOnce[A] {
}
else Iterator.empty.next()
}
-
+
override def toString = "unknown-if-empty iterator"
}
@@ -772,7 +770,7 @@ trait Iterator[+A] extends TraversableOnce[A] {
status = 1
false
}
- def next() =
+ def next() =
if (hasNext) {
if (status == 1) self.next()
else {
@@ -890,7 +888,7 @@ trait Iterator[+A] extends TraversableOnce[A] {
* @usecase def foreach(f: A => Unit): Unit
* @inheritdoc
*/
- def foreach[U](f: A => U) { while (hasNext) f(next()) }
+ def foreach[U](f: A => U) { while (hasNext) f(next()) }
/** Tests whether a predicate holds for all values produced by this iterator.
* $mayNotTerminateInf
@@ -1325,7 +1323,7 @@ trait Iterator[+A] extends TraversableOnce[A] {
*/
def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Unit = {
var i = start
- val end = start + math.min(len, xs.length - start)
+ val end = start + math.min(len, xs.length - start)
while (i < end && hasNext) {
xs(i) = next()
i += 1
diff --git a/src/library/scala/collection/LinearSeqOptimized.scala b/src/library/scala/collection/LinearSeqOptimized.scala
index 571d58a3f3..a3860f10a4 100644
--- a/src/library/scala/collection/LinearSeqOptimized.scala
+++ b/src/library/scala/collection/LinearSeqOptimized.scala
@@ -9,8 +9,6 @@
package scala
package collection
-import mutable.ListBuffer
-import immutable.List
import scala.annotation.tailrec
/** A template trait for linear sequences of type `LinearSeq[A]` which optimizes
@@ -67,7 +65,7 @@ trait LinearSeqOptimized[+A, +Repr <: LinearSeqOptimized[A, Repr]] extends Linea
}
override /*IterableLike*/
- def foreach[B](f: A => B) {
+ def foreach[U](f: A => U) {
var these = this
while (!these.isEmpty) {
f(these.head)
diff --git a/src/library/scala/collection/MapLike.scala b/src/library/scala/collection/MapLike.scala
index b474abc12a..4ac87b29a9 100644
--- a/src/library/scala/collection/MapLike.scala
+++ b/src/library/scala/collection/MapLike.scala
@@ -11,7 +11,7 @@ package collection
import generic._
import mutable.{ Builder, MapBuilder }
-import scala.annotation.{migration, bridge}
+import scala.annotation.migration
import parallel.ParMap
/** A template trait for maps, which associate keys with values.
@@ -171,7 +171,7 @@ self =>
def + (elem: A): Set[A] = (Set[A]() ++ this + elem).asInstanceOf[Set[A]] // !!! concrete overrides abstract problem
def - (elem: A): Set[A] = (Set[A]() ++ this - elem).asInstanceOf[Set[A]] // !!! concrete overrides abstract problem
override def size = self.size
- override def foreach[C](f: A => C) = self.keysIterator foreach f
+ override def foreach[U](f: A => U) = self.keysIterator foreach f
}
/** Creates an iterator for all keys.
@@ -203,7 +203,7 @@ self =>
protected class DefaultValuesIterable extends AbstractIterable[B] with Iterable[B] with Serializable {
def iterator = valuesIterator
override def size = self.size
- override def foreach[C](f: B => C) = self.valuesIterator foreach f
+ override def foreach[U](f: B => U) = self.valuesIterator foreach f
}
/** Creates an iterator for all values in this map.
@@ -228,7 +228,7 @@ self =>
throw new NoSuchElementException("key not found: " + key)
protected class FilteredKeys(p: A => Boolean) extends AbstractMap[A, B] with DefaultMap[A, B] {
- override def foreach[C](f: ((A, B)) => C): Unit = for (kv <- self) if (p(kv._1)) f(kv)
+ override def foreach[U](f: ((A, B)) => U): Unit = for (kv <- self) if (p(kv._1)) f(kv)
def iterator = self.iterator.filter(kv => p(kv._1))
override def contains(key: A) = p(key) && self.contains(key)
def get(key: A) = if (!p(key)) None else self.get(key)
@@ -246,7 +246,7 @@ self =>
def filterKeys(p: A => Boolean): Map[A, B] = new FilteredKeys(p)
protected class MappedValues[C](f: B => C) extends AbstractMap[A, C] with DefaultMap[A, C] {
- override def foreach[D](g: ((A, C)) => D): Unit = for ((k, v) <- self) g((k, f(v)))
+ override def foreach[U](g: ((A, C)) => U): Unit = for ((k, v) <- self) g((k, f(v)))
def iterator = for ((k, v) <- self.iterator) yield (k, f(v))
override def size = self.size
override def contains(key: A) = self.contains(key)
@@ -331,8 +331,8 @@ self =>
foreach(vb += _)
vb.result
}
- }
-
+ }
+
override def toBuffer[C >: (A, B)]: mutable.Buffer[C] = {
val result = new mutable.ArrayBuffer[C](size)
// Faster to let the map iterate itself than to defer through copyToBuffer
diff --git a/src/library/scala/collection/SeqLike.scala b/src/library/scala/collection/SeqLike.scala
index b775480532..a26765027c 100644
--- a/src/library/scala/collection/SeqLike.scala
+++ b/src/library/scala/collection/SeqLike.scala
@@ -9,11 +9,10 @@
package scala
package collection
-import mutable.{ ListBuffer, ArraySeq }
import immutable.{ List, Range }
import generic._
import parallel.ParSeq
-import scala.math.{ min, max, Ordering }
+import scala.math.Ordering
/** A template trait for sequences of type `Seq[A]`
* $seqInfo
@@ -146,7 +145,7 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
* more than one way to generate the same subsequence, only one will be returned.
*
* For example, `"xyyy"` has three different ways to generate `"xy"` depending on
- * whether the first, second, or third `"y"` is selected. However, since all are
+ * whether the first, second, or third `"y"` is selected. However, since all are
* identical, only one will be chosen. Which of the three will be taken is an
* implementation detail that is not defined.
*
diff --git a/src/library/scala/collection/SetLike.scala b/src/library/scala/collection/SetLike.scala
index d03c808c2c..9143c40870 100644
--- a/src/library/scala/collection/SetLike.scala
+++ b/src/library/scala/collection/SetLike.scala
@@ -11,7 +11,7 @@ package collection
import generic._
import mutable.{ Builder, SetBuilder }
-import scala.annotation.{migration, bridge}
+import scala.annotation.migration
import parallel.ParSet
/** A template trait for sets.
@@ -86,7 +86,7 @@ self =>
vb.result
}
}
-
+
override def toBuffer[A1 >: A]: mutable.Buffer[A1] = {
val result = new mutable.ArrayBuffer[A1](size)
// Faster to let the map iterate itself than to defer through copyToBuffer
diff --git a/src/library/scala/collection/Traversable.scala b/src/library/scala/collection/Traversable.scala
index a35750a35f..8145eaa204 100644
--- a/src/library/scala/collection/Traversable.scala
+++ b/src/library/scala/collection/Traversable.scala
@@ -38,7 +38,7 @@ trait Traversable[+A] extends TraversableLike[A, Traversable[A]]
override def remove(p: A => Boolean): Traversable[A]
override def partition(p: A => Boolean): (Traversable[A], Traversable[A])
override def groupBy[K](f: A => K): Map[K, Traversable[A]]
- override def foreach[U](f: A => U): Unit
+ override def foreach[U](f: A => U): Unit
override def forall(p: A => Boolean): Boolean
override def exists(p: A => Boolean): Boolean
override def count(p: A => Boolean): Int
diff --git a/src/library/scala/collection/TraversableLike.scala b/src/library/scala/collection/TraversableLike.scala
index a0b1430d17..fa9a3a7482 100644
--- a/src/library/scala/collection/TraversableLike.scala
+++ b/src/library/scala/collection/TraversableLike.scala
@@ -11,7 +11,7 @@ package collection
import generic._
import mutable.{ Builder }
-import scala.annotation.{tailrec, migration, bridge}
+import scala.annotation.migration
import scala.annotation.unchecked.{ uncheckedVariance => uV }
import parallel.ParIterable
import scala.language.higherKinds
@@ -340,14 +340,6 @@ trait TraversableLike[+A, +Repr] extends Any
b.result
}
- /** Tests whether a predicate holds for all elements of this $coll.
- *
- * $mayNotTerminateInf
- *
- * @param p the predicate used to test elements.
- * @return `true` if this $coll is empty, otherwise `true` if the given predicate `p`
- * holds for all elements of this $coll, otherwise `false`.
- */
def forall(p: A => Boolean): Boolean = {
var result = true
breakable {
@@ -374,15 +366,6 @@ trait TraversableLike[+A, +Repr] extends Any
result
}
- /** Finds the first element of the $coll satisfying a predicate, if any.
- *
- * $mayNotTerminateInf
- * $orderDependent
- *
- * @param p the predicate used to test elements.
- * @return an option value containing the first element in the $coll
- * that satisfies `p`, or `None` if none exists.
- */
def find(p: A => Boolean): Option[A] = {
var result: Option[A] = None
breakable {
@@ -594,23 +577,6 @@ trait TraversableLike[+A, +Repr] extends Any
*/
def inits: Iterator[Repr] = iterateUntilEmpty(_.init)
- /** Copies elements of this $coll to an array.
- * Fills the given array `xs` with at most `len` elements of
- * this $coll, starting at position `start`.
- * Copying will stop once either the end of the current $coll is reached,
- * or the end of the array is reached, or `len` elements have been copied.
- *
- * @param xs the array to fill.
- * @param start the starting index.
- * @param len the maximal number of elements to copy.
- * @tparam B the type of the elements of the array.
- *
- *
- * @usecase def copyToArray(xs: Array[A], start: Int, len: Int): Unit
- * @inheritdoc
- *
- * $willNotTerminateInf
- */
def copyToArray[B >: A](xs: Array[B], start: Int, len: Int) {
var i = start
val end = (start + len) min xs.length
@@ -625,7 +591,7 @@ trait TraversableLike[+A, +Repr] extends Any
@deprecatedOverriding("Enforce contract of toTraversable that if it is Traversable it returns itself.", "2.11.0")
def toTraversable: Traversable[A] = thisCollection
-
+
def toIterator: Iterator[A] = toStream.iterator
def toStream: Stream[A] = toBuffer.toStream
// Override to provide size hint.
diff --git a/src/library/scala/collection/TraversableOnce.scala b/src/library/scala/collection/TraversableOnce.scala
index c5b0d0f085..41362e8dd7 100644
--- a/src/library/scala/collection/TraversableOnce.scala
+++ b/src/library/scala/collection/TraversableOnce.scala
@@ -9,7 +9,7 @@
package scala
package collection
-import mutable.{ Buffer, Builder, ListBuffer, ArrayBuffer }
+import mutable.{ Buffer, Builder, ArrayBuffer }
import generic.CanBuildFrom
import scala.annotation.unchecked.{ uncheckedVariance => uV }
import scala.language.{implicitConversions, higherKinds}
@@ -61,7 +61,8 @@ import scala.reflect.ClassTag
trait TraversableOnce[+A] extends Any with GenTraversableOnce[A] {
self =>
- /** Self-documenting abstract methods. */
+ /* Self-documenting abstract methods. */
+
def foreach[U](f: A => U): Unit
def isEmpty: Boolean
def hasDefiniteSize: Boolean
@@ -334,10 +335,10 @@ trait TraversableOnce[+A] extends Any with GenTraversableOnce[A] {
* {{{
* scala> val a = List(1,2,3,4)
* a: List[Int] = List(1, 2, 3, 4)
- *
+ *
* scala> val b = new StringBuilder()
- * b: StringBuilder =
- *
+ * b: StringBuilder =
+ *
* scala> a.addString(b , "List(" , ", " , ")")
* res5: StringBuilder = List(1, 2, 3, 4)
* }}}
@@ -376,7 +377,7 @@ trait TraversableOnce[+A] extends Any with GenTraversableOnce[A] {
* {{{
* scala> val a = List(1,2,3,4)
* a: List[Int] = List(1, 2, 3, 4)
- *
+ *
* scala> val b = new StringBuilder()
* b: StringBuilder =
*
@@ -399,7 +400,7 @@ trait TraversableOnce[+A] extends Any with GenTraversableOnce[A] {
* {{{
* scala> val a = List(1,2,3,4)
* a: List[Int] = List(1, 2, 3, 4)
- *
+ *
* scala> val b = new StringBuilder()
* b: StringBuilder =
*
diff --git a/src/library/scala/collection/TraversableProxyLike.scala b/src/library/scala/collection/TraversableProxyLike.scala
index 4399dbc289..fa470ea238 100644
--- a/src/library/scala/collection/TraversableProxyLike.scala
+++ b/src/library/scala/collection/TraversableProxyLike.scala
@@ -28,7 +28,7 @@ import scala.reflect.ClassTag
trait TraversableProxyLike[+A, +Repr <: TraversableLike[A, Repr] with Traversable[A]] extends TraversableLike[A, Repr] with Proxy {
def self: Repr
- override def foreach[B](f: A => B): Unit = self.foreach(f)
+ override def foreach[U](f: A => U): Unit = self.foreach(f)
override def isEmpty: Boolean = self.isEmpty
override def nonEmpty: Boolean = self.nonEmpty
override def size: Int = self.size
diff --git a/src/library/scala/collection/concurrent/TrieMap.scala b/src/library/scala/collection/concurrent/TrieMap.scala
index 426f9bf864..d113cbfcdf 100644
--- a/src/library/scala/collection/concurrent/TrieMap.scala
+++ b/src/library/scala/collection/concurrent/TrieMap.scala
@@ -11,13 +11,11 @@ package collection
package concurrent
import java.util.concurrent.atomic._
-import scala.collection.immutable.{ ListMap => ImmutableListMap }
import scala.collection.parallel.mutable.ParTrieMap
import scala.util.hashing.Hashing
import scala.util.control.ControlThrowable
import generic._
import scala.annotation.tailrec
-import scala.annotation.switch
private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends INodeBase[K, V](g) {
import INodeBase._
diff --git a/src/library/scala/collection/convert/WrapAsScala.scala b/src/library/scala/collection/convert/WrapAsScala.scala
index 7332b71af1..ee161dcc87 100644
--- a/src/library/scala/collection/convert/WrapAsScala.scala
+++ b/src/library/scala/collection/convert/WrapAsScala.scala
@@ -139,13 +139,13 @@ trait WrapAsScala {
* If the Java `Map` was previously obtained from an implicit or
* explicit call of `mapAsScalaMap(scala.collection.mutable.Map)` then
* the original Scala Map will be returned.
- *
+ *
* If the wrapped map is synchronized (e.g. from `java.util.Collections.synchronizedMap`),
- * it is your responsibility to wrap all
+ * it is your responsibility to wrap all
* non-atomic operations with `underlying.synchronized`.
* This includes `get`, as `java.util.Map`'s API does not allow for an
* atomic `get` when `null` values may be present.
- *
+ *
* @param m The Map to be converted.
* @return A Scala mutable Map view of the argument.
*/
@@ -170,20 +170,20 @@ trait WrapAsScala {
*/
implicit def mapAsScalaConcurrentMap[A, B](m: juc.ConcurrentMap[A, B]): concurrent.Map[A, B] = m match {
case null => null
- case cmw: ConcurrentMapWrapper[A, B] => cmw.underlying
+ case cmw: ConcurrentMapWrapper[_, _] => cmw.underlying
case _ => new JConcurrentMapWrapper(m)
}
/**
* Implicitly converts a Java `Dictionary` to a Scala mutable
- * `Map[String, String]`.
+ * `Map`.
*
- * The returned Scala `Map[String, String]` is backed by the provided Java
+ * The returned Scala `Map` is backed by the provided Java
* `Dictionary` and any side-effects of using it via the Scala interface
* will be visible via the Java interface and vice versa.
*
* @param p The Dictionary to be converted.
- * @return A Scala mutable Map[String, String] view of the argument.
+ * @return A Scala mutable Map view of the argument.
*/
implicit def dictionaryAsScalaMap[A, B](p: ju.Dictionary[A, B]): mutable.Map[A, B] = p match {
case null => null
diff --git a/src/library/scala/collection/generic/GenericParTemplate.scala b/src/library/scala/collection/generic/GenericParTemplate.scala
index b9b7043270..44a778a953 100644
--- a/src/library/scala/collection/generic/GenericParTemplate.scala
+++ b/src/library/scala/collection/generic/GenericParTemplate.scala
@@ -13,7 +13,6 @@ package generic
import scala.collection.parallel.Combiner
import scala.collection.parallel.ParIterable
import scala.collection.parallel.ParMap
-import scala.collection.parallel.TaskSupport
import scala.annotation.unchecked.uncheckedVariance
import scala.language.higherKinds
diff --git a/src/library/scala/collection/generic/MapFactory.scala b/src/library/scala/collection/generic/MapFactory.scala
index b9f3d4b010..255d695303 100644
--- a/src/library/scala/collection/generic/MapFactory.scala
+++ b/src/library/scala/collection/generic/MapFactory.scala
@@ -10,8 +10,6 @@ package scala
package collection
package generic
-
-import mutable.{Builder, MapBuilder}
import scala.language.higherKinds
/** A template for companion objects of `Map` and subclasses thereof.
diff --git a/src/library/scala/collection/generic/ParFactory.scala b/src/library/scala/collection/generic/ParFactory.scala
index 4486cea419..901e9fc239 100644
--- a/src/library/scala/collection/generic/ParFactory.scala
+++ b/src/library/scala/collection/generic/ParFactory.scala
@@ -11,7 +11,6 @@ package collection
package generic
import scala.collection.parallel.ParIterable
-import scala.collection.parallel.Combiner
import scala.language.higherKinds
/** A template class for companion objects of `ParIterable` and subclasses
diff --git a/src/library/scala/collection/generic/ParSetFactory.scala b/src/library/scala/collection/generic/ParSetFactory.scala
index 4320635ae6..1341ddcb38 100644
--- a/src/library/scala/collection/generic/ParSetFactory.scala
+++ b/src/library/scala/collection/generic/ParSetFactory.scala
@@ -10,7 +10,6 @@ package scala
package collection
package generic
-import scala.collection.mutable.Builder
import scala.collection.parallel.Combiner
import scala.collection.parallel.ParSet
import scala.collection.parallel.ParSetLike
diff --git a/src/library/scala/collection/generic/SetFactory.scala b/src/library/scala/collection/generic/SetFactory.scala
index fcd8d00c18..5e50844cc9 100644
--- a/src/library/scala/collection/generic/SetFactory.scala
+++ b/src/library/scala/collection/generic/SetFactory.scala
@@ -12,7 +12,6 @@ package scala
package collection
package generic
-import mutable.Builder
import scala.language.higherKinds
abstract class SetFactory[CC[X] <: Set[X] with SetLike[X, CC[X]]]
diff --git a/src/library/scala/collection/generic/TraversableForwarder.scala b/src/library/scala/collection/generic/TraversableForwarder.scala
index 1d7974f7a4..359ea402b6 100644
--- a/src/library/scala/collection/generic/TraversableForwarder.scala
+++ b/src/library/scala/collection/generic/TraversableForwarder.scala
@@ -32,7 +32,7 @@ trait TraversableForwarder[+A] extends Traversable[A] {
/** The traversable object to which calls are forwarded. */
protected def underlying: Traversable[A]
- override def foreach[B](f: A => B): Unit = underlying foreach f
+ override def foreach[U](f: A => U): Unit = underlying foreach f
override def isEmpty: Boolean = underlying.isEmpty
override def nonEmpty: Boolean = underlying.nonEmpty
override def size: Int = underlying.size
diff --git a/src/library/scala/collection/generic/package.scala b/src/library/scala/collection/generic/package.scala
index 1beb4a8599..015c3455db 100644
--- a/src/library/scala/collection/generic/package.scala
+++ b/src/library/scala/collection/generic/package.scala
@@ -1,6 +1,5 @@
package scala
package collection
-import generic.CanBuildFrom
import scala.language.higherKinds
diff --git a/src/library/scala/collection/immutable/BitSet.scala b/src/library/scala/collection/immutable/BitSet.scala
index 70543aa3a6..846bc22182 100644
--- a/src/library/scala/collection/immutable/BitSet.scala
+++ b/src/library/scala/collection/immutable/BitSet.scala
@@ -14,7 +14,7 @@ package immutable
import generic._
import BitSetLike.{LogWL, updateArray}
-import mutable.{ Builder, SetBuilder }
+import mutable.Builder
/** A class for immutable bitsets.
* $bitsetinfo
diff --git a/src/library/scala/collection/immutable/HashMap.scala b/src/library/scala/collection/immutable/HashMap.scala
index 3b3e65ea61..92d915fe8b 100644
--- a/src/library/scala/collection/immutable/HashMap.scala
+++ b/src/library/scala/collection/immutable/HashMap.scala
@@ -48,7 +48,7 @@ class HashMap[A, +B] extends AbstractMap[A, B]
def iterator: Iterator[(A,B)] = Iterator.empty
- override def foreach[U](f: ((A, B)) => U): Unit = { }
+ override def foreach[U](f: ((A, B)) => U): Unit = ()
def get(key: A): Option[B] =
get0(key, computeHash(key), 0)
@@ -422,7 +422,7 @@ object HashMap extends ImmutableMapFactory[HashMap] with BitOperations.Int {
final override def getElem(cc: AnyRef): (A, B) = cc.asInstanceOf[HashMap1[A, B]].ensurePair
}
- override def foreach[U](f: ((A, B)) => U): Unit = {
+ override def foreach[U](f: ((A, B)) => U): Unit = {
var i = 0
while (i < elems.length) {
elems(i).foreach(f)
diff --git a/src/library/scala/collection/immutable/HashSet.scala b/src/library/scala/collection/immutable/HashSet.scala
index 603d97c3ad..07758bf5a2 100644
--- a/src/library/scala/collection/immutable/HashSet.scala
+++ b/src/library/scala/collection/immutable/HashSet.scala
@@ -53,7 +53,7 @@ class HashSet[A] extends AbstractSet[A]
def iterator: Iterator[A] = Iterator.empty
- override def foreach[U](f: A => U): Unit = { }
+ override def foreach[U](f: A => U): Unit = ()
def contains(e: A): Boolean = get0(e, computeHash(e), 0)
@@ -215,7 +215,7 @@ object HashSet extends ImmutableSetFactory[HashSet] {
private object EmptyHashSet extends HashSet[Any] { }
private[collection] def emptyInstance: HashSet[Any] = EmptyHashSet
-
+
// utility method to create a HashTrieSet from two leaf HashSets (HashSet1 or HashSetCollision1) with non-colliding hash code)
private def makeHashTrieSet[A](hash0:Int, elem0:HashSet[A], hash1:Int, elem1:HashSet[A], level:Int) : HashTrieSet[A] = {
val index0 = (hash0 >>> level) & 0x1f
@@ -966,7 +966,7 @@ object HashSet extends ImmutableSetFactory[HashSet] {
final override def getElem(cc: AnyRef): A = cc.asInstanceOf[HashSet1[A]].key
}
- override def foreach[U](f: A => U): Unit = {
+ override def foreach[U](f: A => U): Unit = {
var i = 0
while (i < elems.length) {
elems(i).foreach(f)
diff --git a/src/library/scala/collection/immutable/IntMap.scala b/src/library/scala/collection/immutable/IntMap.scala
index cb6196e130..c6bf6a77e8 100644
--- a/src/library/scala/collection/immutable/IntMap.scala
+++ b/src/library/scala/collection/immutable/IntMap.scala
@@ -184,7 +184,7 @@ sealed abstract class IntMap[+T] extends AbstractMap[Int, T]
/**
* Loops over the key, value pairs of the map in unsigned order of the keys.
*/
- override final def foreach[U](f: ((Int, T)) => U): Unit = this match {
+ override final def foreach[U](f: ((Int, T)) => U): Unit = this match {
case IntMap.Bin(_, _, left, right) => { left.foreach(f); right.foreach(f) }
case IntMap.Tip(key, value) => f((key, value))
case IntMap.Nil =>
diff --git a/src/library/scala/collection/immutable/List.scala b/src/library/scala/collection/immutable/List.scala
index 66150c1a13..c09328cae6 100644
--- a/src/library/scala/collection/immutable/List.scala
+++ b/src/library/scala/collection/immutable/List.scala
@@ -25,6 +25,19 @@ import java.io.{ObjectOutputStream, ObjectInputStream}
* This class is optimal for last-in-first-out (LIFO), stack-like access patterns. If you need another access
* pattern, for example, random access or FIFO, consider using a collection more suited to this than `List`.
*
+ * ==Performance==
+ * '''Time:''' `List` has `O(1)` prepend and head/tail access. Most other operations are `O(n)` on the number of elements in the list.
+ * This includes the index-based lookup of elements, `length`, `append` and `reverse`.
+ *
+ * '''Space:''' `List` implements '''structural sharing''' of the tail list. This means that many operations are either
+ * zero- or constant-memory cost.
+ * {{{
+ * val mainList = List(3, 2, 1)
+ * val with4 = 4 :: mainList // re-uses mainList, costs one :: instance
+ * val with42 = 42 :: mainList // also re-uses mainList, cost one :: instance
+ * val shorter = mainList.tail // costs nothing as it uses the same 2::1::Nil instances as mainList
+ * }}}
+ *
* @example {{{
* // Make a list via the companion object factory
* val days = List("Sunday", "Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday")
@@ -41,19 +54,6 @@ import java.io.{ObjectOutputStream, ObjectInputStream}
* }
* }}}
*
- * ==Performance==
- * '''Time:''' `List` has `O(1)` prepend and head/tail access. Most other operations are `O(n)` on the number of elements in the list.
- * This includes the index-based lookup of elements, `length`, `append` and `reverse`.
- *
- * '''Space:''' `List` implements '''structural sharing''' of the tail list. This means that many operations are either
- * zero- or constant-memory cost.
- * {{{
- * val mainList = List(3, 2, 1)
- * val with4 = 4 :: mainList // re-uses mainList, costs one :: instance
- * val with42 = 42 :: mainList // also re-uses mainList, cost one :: instance
- * val shorter = mainList.tail // costs nothing as it uses the same 2::1::Nil instances as mainList
- * }}}
- *
* @note The functional list is characterized by persistence and structural sharing, thus offering considerable
* performance and space consumption benefits in some scenarios if used correctly.
* However, note that objects having multiple references into the same functional list (that is,
@@ -89,8 +89,6 @@ sealed abstract class List[+A] extends AbstractSeq[A]
with scala.Serializable {
override def companion: GenericCompanion[List] = List
- import scala.collection.{Iterable, Traversable, Seq, IndexedSeq}
-
def isEmpty: Boolean
def head: A
def tail: List[A]
@@ -265,7 +263,7 @@ sealed abstract class List[+A] extends AbstractSeq[A]
}
(b.toList, these)
}
-
+
final override def map[B, That](f: A => B)(implicit bf: CanBuildFrom[List[A], B, That]): That = {
if (bf eq List.ReusableCBF) {
if (this eq Nil) Nil.asInstanceOf[That] else {
@@ -283,7 +281,7 @@ sealed abstract class List[+A] extends AbstractSeq[A]
}
else super.map(f)
}
-
+
final override def collect[B, That](pf: PartialFunction[A, B])(implicit bf: CanBuildFrom[List[A], B, That]): That = {
if (bf eq List.ReusableCBF) {
if (this eq Nil) Nil.asInstanceOf[That] else {
@@ -312,7 +310,7 @@ sealed abstract class List[+A] extends AbstractSeq[A]
}
else super.collect(pf)
}
-
+
final override def flatMap[B, That](f: A => GenTraversableOnce[B])(implicit bf: CanBuildFrom[List[A], B, That]): That = {
if (bf eq List.ReusableCBF) {
if (this eq Nil) Nil.asInstanceOf[That] else {
@@ -452,7 +450,7 @@ object List extends SeqFactory[List] {
override def empty[A]: List[A] = Nil
override def apply[A](xs: A*): List[A] = xs.toList
-
+
private[collection] val partialNotApplied = new Function1[Any, Any] { def apply(x: Any): Any = this }
@SerialVersionUID(1L)
diff --git a/src/library/scala/collection/immutable/ListMap.scala b/src/library/scala/collection/immutable/ListMap.scala
index c59f4f7436..f30c0cbf8e 100644
--- a/src/library/scala/collection/immutable/ListMap.scala
+++ b/src/library/scala/collection/immutable/ListMap.scala
@@ -13,7 +13,7 @@ package collection
package immutable
import generic._
-import scala.annotation.{tailrec, bridge}
+import scala.annotation.tailrec
/** $factoryInfo
* @since 1
@@ -30,7 +30,7 @@ object ListMap extends ImmutableMapFactory[ListMap] {
def empty[A, B]: ListMap[A, B] = EmptyListMap.asInstanceOf[ListMap[A, B]]
@SerialVersionUID(-8256686706655863282L)
- private object EmptyListMap extends ListMap[Any, Nothing] {
+ private object EmptyListMap extends ListMap[Any, Nothing] {
override def apply(key: Any) = throw new NoSuchElementException("key not found: " + key)
override def contains(key: Any) = false
}
@@ -179,16 +179,16 @@ extends AbstractMap[A, B]
@tailrec private def get0(cur: ListMap[A, B1], k: A): Option[B1] =
if (k == cur.key) Some(cur.value)
else if (cur.next.nonEmpty) get0(cur.next, k) else None
-
-
+
+
override def contains(key: A): Boolean = contains0(this, key)
-
+
@tailrec private def contains0(cur: ListMap[A, B1], k: A): Boolean =
if (k == cur.key) true
else if (cur.next.nonEmpty) contains0(cur.next, k)
else false
-
+
/** This method allows one to create a new map with an additional mapping
* from `key` to `value`. If the map contains already a mapping for `key`,
* it will be overridden by this function.
@@ -198,7 +198,7 @@ extends AbstractMap[A, B]
new m.Node[B2](k, v)
}
-
+
/** Creates a new mapping without the given `key`.
* If the map does not contain a mapping for the given key, the
* method returns the same map.
diff --git a/src/library/scala/collection/immutable/ListSet.scala b/src/library/scala/collection/immutable/ListSet.scala
index adc975479a..a65e25ed6e 100644
--- a/src/library/scala/collection/immutable/ListSet.scala
+++ b/src/library/scala/collection/immutable/ListSet.scala
@@ -11,8 +11,8 @@ package collection
package immutable
import generic._
-import scala.annotation.{tailrec, bridge}
-import mutable.{ ListBuffer, Builder }
+import scala.annotation.tailrec
+import mutable.Builder
/** $factoryInfo
* @define Coll immutable.ListSet
@@ -179,6 +179,6 @@ class ListSet[A] extends AbstractSet[A]
override def tail: ListSet[A] = self
}
-
+
override def toSet[B >: A]: Set[B] = this.asInstanceOf[ListSet[B]]
}
diff --git a/src/library/scala/collection/immutable/LongMap.scala b/src/library/scala/collection/immutable/LongMap.scala
index 868c0c0f47..173d912fe5 100644
--- a/src/library/scala/collection/immutable/LongMap.scala
+++ b/src/library/scala/collection/immutable/LongMap.scala
@@ -176,7 +176,7 @@ extends AbstractMap[Long, T]
/**
* Loops over the key, value pairs of the map in unsigned order of the keys.
*/
- override final def foreach[U](f: ((Long, T)) => U): Unit = this match {
+ override final def foreach[U](f: ((Long, T)) => U): Unit = this match {
case LongMap.Bin(_, _, left, right) => { left.foreach(f); right.foreach(f) }
case LongMap.Tip(key, value) => f((key, value))
case LongMap.Nil =>
diff --git a/src/library/scala/collection/immutable/Map.scala b/src/library/scala/collection/immutable/Map.scala
index 63ddcb18cf..6f135cd35f 100644
--- a/src/library/scala/collection/immutable/Map.scala
+++ b/src/library/scala/collection/immutable/Map.scala
@@ -116,7 +116,7 @@ object Map extends ImmutableMapFactory[Map] {
def + [B1 >: B](kv: (A, B1)): Map[A, B1] = updated(kv._1, kv._2)
def - (key: A): Map[A, B] =
if (key == key1) Map.empty else this
- override def foreach[U](f: ((A, B)) => U): Unit = {
+ override def foreach[U](f: ((A, B)) => U): Unit = {
f((key1, value1))
}
}
@@ -142,7 +142,7 @@ object Map extends ImmutableMapFactory[Map] {
if (key == key1) new Map1(key2, value2)
else if (key == key2) new Map1(key1, value1)
else this
- override def foreach[U](f: ((A, B)) => U): Unit = {
+ override def foreach[U](f: ((A, B)) => U): Unit = {
f((key1, value1)); f((key2, value2))
}
}
@@ -172,7 +172,7 @@ object Map extends ImmutableMapFactory[Map] {
else if (key == key2) new Map2(key1, value1, key3, value3)
else if (key == key3) new Map2(key1, value1, key2, value2)
else this
- override def foreach[U](f: ((A, B)) => U): Unit = {
+ override def foreach[U](f: ((A, B)) => U): Unit = {
f((key1, value1)); f((key2, value2)); f((key3, value3))
}
}
@@ -206,7 +206,7 @@ object Map extends ImmutableMapFactory[Map] {
else if (key == key3) new Map3(key1, value1, key2, value2, key4, value4)
else if (key == key4) new Map3(key1, value1, key2, value2, key3, value3)
else this
- override def foreach[U](f: ((A, B)) => U): Unit = {
+ override def foreach[U](f: ((A, B)) => U): Unit = {
f((key1, value1)); f((key2, value2)); f((key3, value3)); f((key4, value4))
}
}
diff --git a/src/library/scala/collection/immutable/NumericRange.scala b/src/library/scala/collection/immutable/NumericRange.scala
index 11603a118b..c8d7519254 100644
--- a/src/library/scala/collection/immutable/NumericRange.scala
+++ b/src/library/scala/collection/immutable/NumericRange.scala
@@ -10,8 +10,6 @@ package scala
package collection
package immutable
-import mutable.{ Builder, ListBuffer }
-
// TODO: Now the specialization exists there is no clear reason to have
// separate classes for Range/NumericRange. Investigate and consolidate.
@@ -193,7 +191,7 @@ extends AbstractSeq[T] with IndexedSeq[T] with Serializable {
// Either numRangeElements or (head + last) must be even, so divide the even one before multiplying
val a = head.toLong
val b = last.toLong
- val ans =
+ val ans =
if ((numRangeElements & 1) == 0) (numRangeElements / 2) * (a + b)
else numRangeElements * {
// Sum is even, but we might overflow it, so divide in pieces and add back remainder
diff --git a/src/library/scala/collection/immutable/PagedSeq.scala b/src/library/scala/collection/immutable/PagedSeq.scala
index c8d0b00327..fab5ad47eb 100644
--- a/src/library/scala/collection/immutable/PagedSeq.scala
+++ b/src/library/scala/collection/immutable/PagedSeq.scala
@@ -13,7 +13,6 @@ package collection
package immutable
import java.io.{File, FileReader, Reader}
-import scala.util.matching.Regex
import scala.reflect.ClassTag
/** The `PagedSeq` object defines a lazy implementations of
@@ -23,6 +22,7 @@ import scala.reflect.ClassTag
* `fromIterator` and `fromIterable` provide generalised instances of `PagedSeq`
* @since 2.7
*/
+@deprecated("This object will be moved to the scala-parser-combinators module", "2.11.8")
object PagedSeq {
final val UndeterminedEnd = Int.MaxValue
@@ -126,7 +126,7 @@ import PagedSeq._
* @define mayNotTerminateInf
* @define willNotTerminateInf
*/
-@deprecatedInheritance("The implementation details of paged sequences make inheriting from them unwise.", "2.11.0")
+@deprecated("This class will be moved to the scala-parser-combinators module", "2.11.8")
class PagedSeq[T: ClassTag] protected(
more: (Array[T], Int, Int) => Int,
first1: Page[T],
diff --git a/src/library/scala/collection/immutable/Queue.scala b/src/library/scala/collection/immutable/Queue.scala
index 53af3ce158..70b51f8251 100644
--- a/src/library/scala/collection/immutable/Queue.scala
+++ b/src/library/scala/collection/immutable/Queue.scala
@@ -12,7 +12,6 @@ package immutable
import generic._
import mutable.{ Builder, ListBuffer }
-import scala.annotation.tailrec
/** `Queue` objects implement data structures that allow to
* insert and retrieve elements in a first-in-first-out (FIFO) manner.
diff --git a/src/library/scala/collection/immutable/SortedMap.scala b/src/library/scala/collection/immutable/SortedMap.scala
index 682788e18e..0f3bd2e195 100644
--- a/src/library/scala/collection/immutable/SortedMap.scala
+++ b/src/library/scala/collection/immutable/SortedMap.scala
@@ -14,7 +14,6 @@ package immutable
import generic._
import mutable.Builder
-import scala.annotation.unchecked.uncheckedVariance
/** A map whose keys are sorted.
*
diff --git a/src/library/scala/collection/immutable/SortedSet.scala b/src/library/scala/collection/immutable/SortedSet.scala
index 4a8859a7ab..107f77f287 100644
--- a/src/library/scala/collection/immutable/SortedSet.scala
+++ b/src/library/scala/collection/immutable/SortedSet.scala
@@ -13,7 +13,6 @@ package collection
package immutable
import generic._
-import mutable.Builder
/** A subtrait of `collection.SortedSet` which represents sorted sets
* which cannot be mutated.
diff --git a/src/library/scala/collection/immutable/Stream.scala b/src/library/scala/collection/immutable/Stream.scala
index 5989517532..d92b159f3b 100644
--- a/src/library/scala/collection/immutable/Stream.scala
+++ b/src/library/scala/collection/immutable/Stream.scala
@@ -11,7 +11,7 @@ package collection
package immutable
import generic._
-import mutable.{Builder, StringBuilder, LazyBuilder, ListBuffer}
+import mutable.{Builder, StringBuilder, LazyBuilder}
import scala.annotation.tailrec
import Stream.cons
import scala.language.implicitConversions
@@ -176,9 +176,9 @@ import scala.language.implicitConversions
* loop(1, 1)
* }
* }}}
- *
+ *
* Note that `mkString` forces evaluation of a `Stream`, but `addString` does
- * not. In both cases, a `Stream` that is or ends in a cycle
+ * not. In both cases, a `Stream` that is or ends in a cycle
* (e.g. `lazy val s: Stream[Int] = 0 #:: s`) will convert additional trips
* through the cycle to `...`. Additionally, `addString` will display an
* un-memoized tail as `?`.
@@ -203,11 +203,9 @@ abstract class Stream[+A] extends AbstractSeq[A]
with LinearSeq[A]
with GenericTraversableTemplate[A, Stream]
with LinearSeqOptimized[A, Stream[A]]
- with Serializable {
-self =>
- override def companion: GenericCompanion[Stream] = Stream
+ with Serializable { self =>
- import scala.collection.{Traversable, Iterable, Seq, IndexedSeq}
+ override def companion: GenericCompanion[Stream] = Stream
/** Indicates whether or not the `Stream` is empty.
*
@@ -528,7 +526,7 @@ self =>
* unless the `f` throws an exception.
*/
@tailrec
- override final def foreach[B](f: A => B) {
+ override final def foreach[U](f: A => U) {
if (!this.isEmpty) {
f(head)
tail.foreach(f)
@@ -1091,14 +1089,12 @@ object Stream extends SeqFactory[Stream] {
/** Creates a new builder for a stream */
def newBuilder[A]: Builder[A, Stream[A]] = new StreamBuilder[A]
- import scala.collection.{Iterable, Seq, IndexedSeq}
-
/** A builder for streams
* @note This builder is lazy only in the sense that it does not go downs the spine
* of traversables that are added as a whole. If more laziness can be achieved,
* this builder should be bypassed.
*/
- class StreamBuilder[A] extends scala.collection.mutable.LazyBuilder[A, Stream[A]] {
+ class StreamBuilder[A] extends LazyBuilder[A, Stream[A]] {
def result: Stream[A] = parts.toStream flatMap (_.toStream)
}
diff --git a/src/library/scala/collection/immutable/StringLike.scala b/src/library/scala/collection/immutable/StringLike.scala
index 7b3da1e2ea..d92db68912 100644
--- a/src/library/scala/collection/immutable/StringLike.scala
+++ b/src/library/scala/collection/immutable/StringLike.scala
@@ -10,7 +10,7 @@ package scala
package collection
package immutable
-import mutable.{ ArrayBuilder, Builder }
+import mutable.Builder
import scala.util.matching.Regex
import scala.math.ScalaNumber
import scala.reflect.ClassTag
@@ -208,32 +208,32 @@ self =>
else "\\" + ch
/** Split this string around the separator character
- *
+ *
* If this string is the empty string, returns an array of strings
* that contains a single empty string.
- *
+ *
* If this string is not the empty string, returns an array containing
* the substrings terminated by the start of the string, the end of the
* string or the separator character, excluding empty trailing substrings
- *
- * If the separator character is a surrogate character, only split on
+ *
+ * If the separator character is a surrogate character, only split on
* matching surrogate characters if they are not part of a surrogate pair
- *
+ *
* The behaviour follows, and is implemented in terms of <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html#split%28java.lang.String%29">String.split(re: String)</a>
- *
- *
+ *
+ *
* @example {{{
* "a.b".split('.') //returns Array("a", "b")
- *
+ *
* //splitting the empty string always returns the array with a single
* //empty string
- * "".split('.') //returns Array("")
- *
+ * "".split('.') //returns Array("")
+ *
* //only trailing empty substrings are removed
* "a.".split('.') //returns Array("a")
* ".a.".split('.') //returns Array("", "a")
* "..a..".split('.') //returns Array("", "", "a")
- *
+ *
* //all parts are empty and trailing
* ".".split('.') //returns Array()
* "..".split('.') //returns Array()
@@ -247,16 +247,16 @@ self =>
* //well-formed surrogate pairs are not split
* val highlow = highstring + lowstring
* highlow.split(high) //returns Array(highlow)
- *
+ *
* //bare surrogate characters are split
* val bare = "_" + highstring + "_"
* bare.split(high) //returns Array("_", "_")
- *
+ *
* }}}
- *
+ *
* @param separator the character used as a delimiter
*/
- def split(separator: Char): Array[String] =
+ def split(separator: Char): Array[String] =
toString.split(escape(separator))
diff --git a/src/library/scala/collection/immutable/TreeMap.scala b/src/library/scala/collection/immutable/TreeMap.scala
index 662075cd93..b845b76026 100644
--- a/src/library/scala/collection/immutable/TreeMap.scala
+++ b/src/library/scala/collection/immutable/TreeMap.scala
@@ -200,5 +200,5 @@ class TreeMap[A, +B] private (tree: RB.Tree[A, B])(implicit val ordering: Orderi
override def contains(key: A): Boolean = RB.contains(tree, key)
override def isDefinedAt(key: A): Boolean = RB.contains(tree, key)
- override def foreach[U](f : ((A,B)) => U) = RB.foreach(tree, f)
+ override def foreach[U](f : ((A,B)) => U) = RB.foreach(tree, f)
}
diff --git a/src/library/scala/collection/immutable/TreeSet.scala b/src/library/scala/collection/immutable/TreeSet.scala
index 7378211db0..2800030d67 100644
--- a/src/library/scala/collection/immutable/TreeSet.scala
+++ b/src/library/scala/collection/immutable/TreeSet.scala
@@ -151,7 +151,7 @@ class TreeSet[A] private (tree: RB.Tree[A, Unit])(implicit val ordering: Orderin
def iterator: Iterator[A] = RB.keysIterator(tree)
override def keysIteratorFrom(start: A): Iterator[A] = RB.keysIterator(tree, Some(start))
- override def foreach[U](f: A => U) = RB.foreachKey(tree, f)
+ override def foreach[U](f: A => U) = RB.foreachKey(tree, f)
override def rangeImpl(from: Option[A], until: Option[A]): TreeSet[A] = newSet(RB.rangeImpl(tree, from, until))
override def range(from: A, until: A): TreeSet[A] = newSet(RB.range(tree, from, until))
diff --git a/src/library/scala/collection/immutable/Vector.scala b/src/library/scala/collection/immutable/Vector.scala
index 8bb581d44c..cd2d3f843b 100644
--- a/src/library/scala/collection/immutable/Vector.scala
+++ b/src/library/scala/collection/immutable/Vector.scala
@@ -951,8 +951,6 @@ private[immutable] trait VectorPointer[T] {
// STUFF BELOW USED BY APPEND / UPDATE
private[immutable] final def copyOf(a: Array[AnyRef]) = {
- //println("copy")
- if (a eq null) println ("NULL")
val b = new Array[AnyRef](a.length)
Platform.arraycopy(a, 0, b, 0, a.length)
b
diff --git a/src/library/scala/collection/mutable/AnyRefMap.scala b/src/library/scala/collection/mutable/AnyRefMap.scala
index 46991ac101..af7600ad3d 100644
--- a/src/library/scala/collection/mutable/AnyRefMap.scala
+++ b/src/library/scala/collection/mutable/AnyRefMap.scala
@@ -5,23 +5,23 @@ package mutable
import generic.CanBuildFrom
/** This class implements mutable maps with `AnyRef` keys based on a hash table with open addressing.
- *
- * Basic map operations on single entries, including `contains` and `get`,
+ *
+ * Basic map operations on single entries, including `contains` and `get`,
* are typically significantly faster with `AnyRefMap` than [[HashMap]].
* Note that numbers and characters are not handled specially in AnyRefMap;
* only plain `equals` and `hashCode` are used in comparisons.
- *
+ *
* Methods that traverse or regenerate the map, including `foreach` and `map`,
* are not in general faster than with `HashMap`. The methods `foreachKey`,
* `foreachValue`, `mapValuesNow`, and `transformValues` are, however, faster
* than alternative ways to achieve the same functionality.
- *
+ *
* Maps with open addressing may become less efficient at lookup after
* repeated addition/removal of elements. Although `AnyRefMap` makes a
* decent attempt to remain efficient regardless, calling `repack`
* on a map that will no longer have elements removed but will be
* used heavily may save both time and storage space.
- *
+ *
* This map is not intended to contain more than 2^29^ entries (approximately
* 500 million). The maximum capacity is 2^30^, but performance will degrade
* rapidly as 2^30^ is approached.
@@ -36,50 +36,50 @@ extends AbstractMap[K, V]
{
import AnyRefMap._
def this() = this(AnyRefMap.exceptionDefault, 16, true)
-
+
/** Creates a new `AnyRefMap` that returns default values according to a supplied key-value mapping. */
def this(defaultEntry: K => V) = this(defaultEntry, 16, true)
/** Creates a new `AnyRefMap` with an initial buffer of specified size.
- *
+ *
* An `AnyRefMap` can typically contain half as many elements as its buffer size
* before it requires resizing.
*/
def this(initialBufferSize: Int) = this(AnyRefMap.exceptionDefault, initialBufferSize, true)
-
+
/** Creates a new `AnyRefMap` with specified default values and initial buffer size. */
def this(defaultEntry: K => V, initialBufferSize: Int) = this(defaultEntry, initialBufferSize, true)
-
+
private[this] var mask = 0
private[this] var _size = 0
private[this] var _vacant = 0
private[this] var _hashes: Array[Int] = null
private[this] var _keys: Array[AnyRef] = null
private[this] var _values: Array[AnyRef] = null
-
+
if (initBlank) defaultInitialize(initialBufferSize)
-
+
private[this] def defaultInitialize(n: Int) {
- mask =
+ mask =
if (n<0) 0x7
else (((1 << (32 - java.lang.Integer.numberOfLeadingZeros(n-1))) - 1) & 0x3FFFFFFF) | 0x7
_hashes = new Array[Int](mask+1)
_keys = new Array[AnyRef](mask+1)
_values = new Array[AnyRef](mask+1)
}
-
+
private[collection] def initializeTo(
m: Int, sz: Int, vc: Int, hz: Array[Int], kz: Array[AnyRef], vz: Array[AnyRef]
) {
mask = m; _size = sz; _vacant = vc; _hashes = hz; _keys = kz; _values = vz
}
-
+
override def size: Int = _size
override def empty: AnyRefMap[K,V] = new AnyRefMap(defaultEntry)
-
- private def imbalanced: Boolean =
+
+ private def imbalanced: Boolean =
(_size + _vacant) > 0.5*mask || _vacant > _size
-
+
private def hashOf(key: K): Int = {
if (key eq null) 0x41081989
else {
@@ -90,7 +90,7 @@ extends AbstractMap[K, V]
if (j==0) 0x41081989 else j & 0x7FFFFFFF
}
}
-
+
private def seekEntry(h: Int, k: AnyRef): Int = {
var e = h & mask
var x = 0
@@ -102,7 +102,7 @@ extends AbstractMap[K, V]
}
e | MissingBit
}
-
+
private def seekEntryOrOpen(h: Int, k: AnyRef): Int = {
var e = h & mask
var x = 0
@@ -116,19 +116,19 @@ extends AbstractMap[K, V]
}
if (o >= 0) o | MissVacant else e | MissingBit
}
-
+
override def contains(key: K): Boolean = seekEntry(hashOf(key), key) >= 0
-
+
override def get(key: K): Option[V] = {
val i = seekEntry(hashOf(key), key)
if (i < 0) None else Some(_values(i).asInstanceOf[V])
}
-
+
override def getOrElse[V1 >: V](key: K, default: => V1): V1 = {
val i = seekEntry(hashOf(key), key)
if (i < 0) default else _values(i).asInstanceOf[V]
}
-
+
override def getOrElseUpdate(key: K, defaultValue: => V): V = {
val h = hashOf(key)
var i = seekEntryOrOpen(h, key)
@@ -156,10 +156,10 @@ extends AbstractMap[K, V]
}
else _values(i).asInstanceOf[V]
}
-
+
/** Retrieves the value associated with a key, or the default for that type if none exists
* (null for AnyRef, 0 for floats and integers).
- *
+ *
* Note: this is the fastest way to retrieve a value that may or
* may not exist, if the default null/zero is acceptable. For key/value
* pairs that do exist, `apply` (i.e. `map(key)`) is equally fast.
@@ -168,22 +168,22 @@ extends AbstractMap[K, V]
val i = seekEntry(hashOf(key), key)
(if (i < 0) null else _values(i)).asInstanceOf[V]
}
-
- /** Retrieves the value associated with a key.
+
+ /** Retrieves the value associated with a key.
* If the key does not exist in the map, the `defaultEntry` for that key
- * will be returned instead; an exception will be thrown if no
+ * will be returned instead; an exception will be thrown if no
* `defaultEntry` was supplied.
*/
override def apply(key: K): V = {
val i = seekEntry(hashOf(key), key)
if (i < 0) defaultEntry(key) else _values(i).asInstanceOf[V]
}
-
+
/** Defers to defaultEntry to find a default value for the key. Throws an
* exception if no other default behavior was specified.
*/
override def default(key: K) = defaultEntry(key)
-
+
private def repack(newMask: Int) {
val oh = _hashes
val ok = _keys
@@ -207,9 +207,9 @@ extends AbstractMap[K, V]
i += 1
}
}
-
+
/** Repacks the contents of this `AnyRefMap` for maximum efficiency of lookup.
- *
+ *
* For maps that undergo a complex creation process with both addition and
* removal of keys, and then are used heavily with no further removal of
* elements, calling `repack` after the end of the creation can result in
@@ -222,7 +222,7 @@ extends AbstractMap[K, V]
while (m > 8 && 8*_size < m) m = m >>> 1
repack(m)
}
-
+
override def put(key: K, value: V): Option[V] = {
val h = hashOf(key)
val k = key
@@ -245,9 +245,9 @@ extends AbstractMap[K, V]
ans
}
}
-
+
/** Updates the map to include a new key-value pair.
- *
+ *
* This is the fastest way to add an entry to an `AnyRefMap`.
*/
override def update(key: K, value: V): Unit = {
@@ -269,12 +269,12 @@ extends AbstractMap[K, V]
_values(i) = value.asInstanceOf[AnyRef]
}
}
-
+
/** Adds a new key/value pair to this map and returns the map. */
def +=(key: K, value: V): this.type = { update(key, value); this }
def +=(kv: (K, V)): this.type = { update(kv._1, kv._2); this }
-
+
def -=(key: K): this.type = {
val i = seekEntry(hashOf(key), key)
if (i >= 0) {
@@ -286,14 +286,14 @@ extends AbstractMap[K, V]
}
this
}
-
+
def iterator: Iterator[(K, V)] = new Iterator[(K, V)] {
private[this] val hz = _hashes
private[this] val kz = _keys
private[this] val vz = _values
-
+
private[this] var index = 0
-
+
def hasNext: Boolean = index<hz.length && {
var h = hz(index)
while (h+h == 0) {
@@ -303,7 +303,7 @@ extends AbstractMap[K, V]
}
true
}
-
+
def next: (K, V) = {
if (hasNext) {
val ans = (kz(index).asInstanceOf[K], vz(index).asInstanceOf[V])
@@ -313,8 +313,8 @@ extends AbstractMap[K, V]
else throw new NoSuchElementException("next")
}
}
-
- override def foreach[A](f: ((K,V)) => A) {
+
+ override def foreach[U](f: ((K,V)) => U) {
var i = 0
var e = _size
while (e > 0) {
@@ -327,7 +327,7 @@ extends AbstractMap[K, V]
else return
}
}
-
+
override def clone(): AnyRefMap[K, V] = {
val hz = java.util.Arrays.copyOf(_hashes, _hashes.length)
val kz = java.util.Arrays.copyOf(_keys, _keys.length)
@@ -336,25 +336,25 @@ extends AbstractMap[K, V]
arm.initializeTo(mask, _size, _vacant, hz, kz, vz)
arm
}
-
+
override def +[V1 >: V](kv: (K, V1)): AnyRefMap[K, V1] = {
val arm = clone().asInstanceOf[AnyRefMap[K, V1]]
arm += kv
arm
}
-
+
override def ++[V1 >: V](xs: GenTraversableOnce[(K, V1)]): AnyRefMap[K, V1] = {
val arm = clone().asInstanceOf[AnyRefMap[K, V1]]
xs.foreach(kv => arm += kv)
arm
}
-
+
override def updated[V1 >: V](key: K, value: V1): AnyRefMap[K, V1] = {
val arm = clone().asInstanceOf[AnyRefMap[K, V1]]
arm += (key, value)
arm
}
-
+
private[this] def foreachElement[A,B](elems: Array[AnyRef], f: A => B) {
var i,j = 0
while (i < _hashes.length & j < _size) {
@@ -366,13 +366,13 @@ extends AbstractMap[K, V]
i += 1
}
}
-
+
/** Applies a function to all keys of this map. */
def foreachKey[A](f: K => A) { foreachElement[K,A](_keys, f) }
/** Applies a function to all values of this map. */
def foreachValue[A](f: V => A) { foreachElement[V,A](_values, f) }
-
+
/** Creates a new `AnyRefMap` with different values.
* Unlike `mapValues`, this method generates a new
* collection immediately.
@@ -394,8 +394,8 @@ extends AbstractMap[K, V]
arm.initializeTo(mask, _size, _vacant, hz, kz, vz)
arm
}
-
- /** Applies a transformation function to all values stored in this map.
+
+ /** Applies a transformation function to all values stored in this map.
* Note: the default, if any, is not transformed.
*/
def transformValues(f: V => V): this.type = {
@@ -418,15 +418,15 @@ object AnyRefMap {
private final val MissingBit = 0x80000000
private final val VacantBit = 0x40000000
private final val MissVacant = 0xC0000000
-
+
private val exceptionDefault = (k: Any) => throw new NoSuchElementException(if (k == null) "(null)" else k.toString)
-
+
implicit def canBuildFrom[K <: AnyRef, V, J <: AnyRef, U]: CanBuildFrom[AnyRefMap[K,V], (J, U), AnyRefMap[J,U]] =
new CanBuildFrom[AnyRefMap[K,V], (J, U), AnyRefMap[J,U]] {
def apply(from: AnyRefMap[K,V]): AnyRefMapBuilder[J, U] = apply()
def apply(): AnyRefMapBuilder[J, U] = new AnyRefMapBuilder[J, U]
}
-
+
final class AnyRefMapBuilder[K <: AnyRef, V] extends Builder[(K, V), AnyRefMap[K, V]] {
private[collection] var elems: AnyRefMap[K, V] = new AnyRefMap[K, V]
def +=(entry: (K, V)): this.type = {
@@ -445,14 +445,14 @@ object AnyRefMap {
if (arm.size < (sz>>3)) arm.repack()
arm
}
-
+
/** Creates a new empty `AnyRefMap`. */
def empty[K <: AnyRef, V]: AnyRefMap[K, V] = new AnyRefMap[K, V]
-
+
/** Creates a new empty `AnyRefMap` with the supplied default */
def withDefault[K <: AnyRef, V](default: K => V): AnyRefMap[K, V] = new AnyRefMap[K, V](default)
-
- /** Creates a new `AnyRefMap` from arrays of keys and values.
+
+ /** Creates a new `AnyRefMap` from arrays of keys and values.
* Equivalent to but more efficient than `AnyRefMap((keys zip values): _*)`.
*/
def fromZip[K <: AnyRef, V](keys: Array[K], values: Array[V]): AnyRefMap[K, V] = {
@@ -463,8 +463,8 @@ object AnyRefMap {
if (arm.size < (sz>>3)) arm.repack()
arm
}
-
- /** Creates a new `AnyRefMap` from keys and values.
+
+ /** Creates a new `AnyRefMap` from keys and values.
* Equivalent to but more efficient than `AnyRefMap((keys zip values): _*)`.
*/
def fromZip[K <: AnyRef, V](keys: Iterable[K], values: Iterable[V]): AnyRefMap[K, V] = {
diff --git a/src/library/scala/collection/mutable/ArrayBuilder.scala b/src/library/scala/collection/mutable/ArrayBuilder.scala
index 6e53824cbe..ac78ab823b 100644
--- a/src/library/scala/collection/mutable/ArrayBuilder.scala
+++ b/src/library/scala/collection/mutable/ArrayBuilder.scala
@@ -11,7 +11,6 @@ package collection
package mutable
import scala.reflect.ClassTag
-import scala.runtime.ScalaRunTime
/** A builder class for arrays.
*
diff --git a/src/library/scala/collection/mutable/ArrayOps.scala b/src/library/scala/collection/mutable/ArrayOps.scala
index 2bc41b5802..5144db7de3 100644
--- a/src/library/scala/collection/mutable/ArrayOps.scala
+++ b/src/library/scala/collection/mutable/ArrayOps.scala
@@ -10,7 +10,6 @@ package scala
package collection
package mutable
-import scala.compat.Platform.arraycopy
import scala.reflect.ClassTag
import scala.runtime.ScalaRunTime._
import parallel.mutable.ParArray
@@ -106,9 +105,9 @@ trait ArrayOps[T] extends Any with ArrayLike[T, Array[T]] with CustomParalleliza
bb.result()
}
}
-
+
/** Converts an array of pairs into an array of first elements and an array of second elements.
- *
+ *
* @tparam T1 the type of the first half of the element pairs
* @tparam T2 the type of the second half of the element pairs
* @param asPair an implicit conversion which asserts that the element type
@@ -134,9 +133,9 @@ trait ArrayOps[T] extends Any with ArrayLike[T, Array[T]] with CustomParalleliza
}
(a1, a2)
}
-
+
/** Converts an array of triples into three arrays, one containing the elements from each position of the triple.
- *
+ *
* @tparam T1 the type of the first of three elements in the triple
* @tparam T2 the type of the second of three elements in the triple
* @tparam T3 the type of the third of three elements in the triple
@@ -168,7 +167,7 @@ trait ArrayOps[T] extends Any with ArrayLike[T, Array[T]] with CustomParalleliza
}
(a1, a2, a3)
}
-
+
def seq = thisCollection
diff --git a/src/library/scala/collection/mutable/ArraySeq.scala b/src/library/scala/collection/mutable/ArraySeq.scala
index 5a50f4fb27..1e82096baf 100644
--- a/src/library/scala/collection/mutable/ArraySeq.scala
+++ b/src/library/scala/collection/mutable/ArraySeq.scala
@@ -68,7 +68,7 @@ extends AbstractSeq[A]
array(idx) = elem.asInstanceOf[AnyRef]
}
- override def foreach[U](f: A => U) {
+ override def foreach[U](f: A => U) {
var i = 0
while (i < length) {
f(array(i).asInstanceOf[A])
diff --git a/src/library/scala/collection/mutable/ArrayStack.scala b/src/library/scala/collection/mutable/ArrayStack.scala
index fec2da8839..951a90b084 100644
--- a/src/library/scala/collection/mutable/ArrayStack.scala
+++ b/src/library/scala/collection/mutable/ArrayStack.scala
@@ -64,9 +64,10 @@ object ArrayStack extends SeqFactory[ArrayStack] {
class ArrayStack[T] private(private var table : Array[AnyRef],
private var index : Int)
extends AbstractSeq[T]
- with Seq[T]
- with SeqLike[T, ArrayStack[T]]
+ with IndexedSeq[T]
+ with IndexedSeqLike[T, ArrayStack[T]]
with GenericTraversableTemplate[T, ArrayStack]
+ with IndexedSeqOptimized[T, ArrayStack[T]]
with Cloneable[ArrayStack[T]]
with Builder[T, ArrayStack[T]]
with Serializable
@@ -224,7 +225,7 @@ extends AbstractSeq[T]
/** Creates and iterator over the stack in LIFO order.
* @return an iterator over the elements of the stack.
*/
- def iterator: Iterator[T] = new AbstractIterator[T] {
+ override def iterator: Iterator[T] = new AbstractIterator[T] {
var currentIndex = index
def hasNext = currentIndex > 0
def next() = {
@@ -233,7 +234,7 @@ extends AbstractSeq[T]
}
}
- override def foreach[U](f: T => U) {
+ override def foreach[U](f: T => U) {
var currentIndex = index
while (currentIndex > 0) {
currentIndex -= 1
diff --git a/src/library/scala/collection/mutable/BitSet.scala b/src/library/scala/collection/mutable/BitSet.scala
index e92d48cfeb..feef694e01 100644
--- a/src/library/scala/collection/mutable/BitSet.scala
+++ b/src/library/scala/collection/mutable/BitSet.scala
@@ -13,7 +13,7 @@ package collection
package mutable
import generic._
-import BitSetLike.{LogWL, MaxSize, updateArray}
+import BitSetLike.{LogWL, MaxSize}
/** A class for mutable bitsets.
*
@@ -56,7 +56,7 @@ class BitSet(protected final var elems: Array[Long]) extends AbstractSet[Int]
@deprecatedOverriding("Internal implementation does not admit sensible overriding of this method.", "2.11.0")
protected def nwords = elems.length
-
+
@deprecatedOverriding("Internal implementation does not admit sensible overriding of this method.", "2.11.0")
protected def word(idx: Int): Long =
if (idx < nwords) elems(idx) else 0L
@@ -100,7 +100,7 @@ class BitSet(protected final var elems: Array[Long]) extends AbstractSet[Int]
@deprecatedOverriding("Override add to prevent += and add from exhibiting different behavior.", "2.11.0")
def += (elem: Int): this.type = { add(elem); this }
-
+
@deprecatedOverriding("Override add to prevent += and add from exhibiting different behavior.", "2.11.0")
def -= (elem: Int): this.type = { remove(elem); this }
diff --git a/src/library/scala/collection/mutable/BufferLike.scala b/src/library/scala/collection/mutable/BufferLike.scala
index bd9c61ae6a..98c9771a05 100644
--- a/src/library/scala/collection/mutable/BufferLike.scala
+++ b/src/library/scala/collection/mutable/BufferLike.scala
@@ -14,7 +14,7 @@ package mutable
import generic._
import script._
-import scala.annotation.{migration, bridge}
+import scala.annotation.migration
/** A template trait for buffers of type `Buffer[A]`.
*
diff --git a/src/library/scala/collection/mutable/HashMap.scala b/src/library/scala/collection/mutable/HashMap.scala
index 6fca75ffea..eab4202353 100644
--- a/src/library/scala/collection/mutable/HashMap.scala
+++ b/src/library/scala/collection/mutable/HashMap.scala
@@ -96,16 +96,16 @@ extends AbstractMap[A, B]
def iterator = entriesIterator map (e => ((e.key, e.value)))
- override def foreach[C](f: ((A, B)) => C): Unit = foreachEntry(e => f((e.key, e.value)))
+ override def foreach[U](f: ((A, B)) => U): Unit = foreachEntry(e => f((e.key, e.value)))
/* Override to avoid tuple allocation in foreach */
override def keySet: scala.collection.Set[A] = new DefaultKeySet {
- override def foreach[C](f: A => C) = foreachEntry(e => f(e.key))
+ override def foreach[U](f: A => U) = foreachEntry(e => f(e.key))
}
/* Override to avoid tuple allocation in foreach */
override def values: scala.collection.Iterable[B] = new DefaultValuesIterable {
- override def foreach[C](f: B => C) = foreachEntry(e => f(e.value))
+ override def foreach[U](f: B => U) = foreachEntry(e => f(e.value))
}
/* Override to avoid tuple allocation */
diff --git a/src/library/scala/collection/mutable/HashSet.scala b/src/library/scala/collection/mutable/HashSet.scala
index 886fee5a59..3a16e4efa5 100644
--- a/src/library/scala/collection/mutable/HashSet.scala
+++ b/src/library/scala/collection/mutable/HashSet.scala
@@ -70,7 +70,7 @@ extends AbstractSet[A]
override def iterator: Iterator[A] = super[FlatHashTable].iterator
- override def foreach[U](f: A => U) {
+ override def foreach[U](f: A => U) {
var i = 0
val len = table.length
while (i < len) {
diff --git a/src/library/scala/collection/mutable/ImmutableSetAdaptor.scala b/src/library/scala/collection/mutable/ImmutableSetAdaptor.scala
index 730b22227d..d7eec70b15 100644
--- a/src/library/scala/collection/mutable/ImmutableSetAdaptor.scala
+++ b/src/library/scala/collection/mutable/ImmutableSetAdaptor.scala
@@ -32,7 +32,7 @@ extends AbstractSet[A]
def contains(elem: A): Boolean = set.contains(elem)
- override def foreach[U](f: A => U): Unit = set.foreach(f)
+ override def foreach[U](f: A => U): Unit = set.foreach(f)
override def exists(p: A => Boolean): Boolean = set.exists(p)
diff --git a/src/library/scala/collection/mutable/LinkedListLike.scala b/src/library/scala/collection/mutable/LinkedListLike.scala
index a9d385bc5b..d0748b8a9f 100644
--- a/src/library/scala/collection/mutable/LinkedListLike.scala
+++ b/src/library/scala/collection/mutable/LinkedListLike.scala
@@ -172,7 +172,7 @@ trait LinkedListLike[A, This <: Seq[A] with LinkedListLike[A, This]] extends Seq
}
}
- override def foreach[B](f: A => B) {
+ override def foreach[U](f: A => U) {
var these = this
while (these.nonEmpty) {
f(these.elem)
diff --git a/src/library/scala/collection/mutable/ListBuffer.scala b/src/library/scala/collection/mutable/ListBuffer.scala
index eece557fe8..0a483ceb86 100644
--- a/src/library/scala/collection/mutable/ListBuffer.scala
+++ b/src/library/scala/collection/mutable/ListBuffer.scala
@@ -13,7 +13,6 @@ package mutable
import generic._
import immutable.{List, Nil, ::}
import java.io.{ObjectOutputStream, ObjectInputStream}
-import scala.annotation.migration
/** A `Buffer` implementation backed by a list. It provides constant time
* prepend and append. Most other operations are linear.
diff --git a/src/library/scala/collection/mutable/LongMap.scala b/src/library/scala/collection/mutable/LongMap.scala
index 1eb12d817c..f39a6ba634 100644
--- a/src/library/scala/collection/mutable/LongMap.scala
+++ b/src/library/scala/collection/mutable/LongMap.scala
@@ -5,20 +5,20 @@ package mutable
import generic.CanBuildFrom
/** This class implements mutable maps with `Long` keys based on a hash table with open addressing.
- *
- * Basic map operations on single entries, including `contains` and `get`,
+ *
+ * Basic map operations on single entries, including `contains` and `get`,
* are typically substantially faster with `LongMap` than [[HashMap]]. Methods
* that act on the whole map, including `foreach` and `map` are not in
* general expected to be faster than with a generic map, save for those
* that take particular advantage of the internal structure of the map:
* `foreachKey`, `foreachValue`, `mapValuesNow`, and `transformValues`.
- *
+ *
* Maps with open addressing may become less efficient at lookup after
* repeated addition/removal of elements. Although `LongMap` makes a
* decent attempt to remain efficient regardless, calling `repack`
* on a map that will no longer have elements removed but will be
* used heavily may save both time and storage space.
- *
+ *
* This map is not intended to contain more than 2^29 entries (approximately
* 500 million). The maximum capacity is 2^30, but performance will degrade
* rapidly as 2^30 is approached.
@@ -33,20 +33,20 @@ extends AbstractMap[Long, V]
import LongMap._
def this() = this(LongMap.exceptionDefault, 16, true)
-
+
/** Creates a new `LongMap` that returns default values according to a supplied key-value mapping. */
def this(defaultEntry: Long => V) = this(defaultEntry, 16, true)
-
+
/** Creates a new `LongMap` with an initial buffer of specified size.
- *
+ *
* A LongMap can typically contain half as many elements as its buffer size
* before it requires resizing.
*/
def this(initialBufferSize: Int) = this(LongMap.exceptionDefault, initialBufferSize, true)
-
+
/** Creates a new `LongMap` with specified default values and initial buffer size. */
def this(defaultEntry: Long => V, initialBufferSize: Int) = this(defaultEntry, initialBufferSize, true)
-
+
private[this] var mask = 0
private[this] var extraKeys: Int = 0
private[this] var zeroValue: AnyRef = null
@@ -55,43 +55,43 @@ extends AbstractMap[Long, V]
private[this] var _vacant = 0
private[this] var _keys: Array[Long] = null
private[this] var _values: Array[AnyRef] = null
-
+
if (initBlank) defaultInitialize(initialBufferSize)
-
+
private[this] def defaultInitialize(n: Int) = {
- mask =
+ mask =
if (n<0) 0x7
else (((1 << (32 - java.lang.Integer.numberOfLeadingZeros(n-1))) - 1) & 0x3FFFFFFF) | 0x7
_keys = new Array[Long](mask+1)
_values = new Array[AnyRef](mask+1)
}
-
+
private[collection] def initializeTo(
m: Int, ek: Int, zv: AnyRef, mv: AnyRef, sz: Int, vc: Int, kz: Array[Long], vz: Array[AnyRef]
) {
mask = m; extraKeys = ek; zeroValue = zv; minValue = mv; _size = sz; _vacant = vc; _keys = kz; _values = vz
}
-
+
override def size: Int = _size + (extraKeys+1)/2
override def empty: LongMap[V] = new LongMap()
-
- private def imbalanced: Boolean =
+
+ private def imbalanced: Boolean =
(_size + _vacant) > 0.5*mask || _vacant > _size
-
+
private def toIndex(k: Long): Int = {
// Part of the MurmurHash3 32 bit finalizer
val h = ((k ^ (k >>> 32)) & 0xFFFFFFFFL).toInt
val x = (h ^ (h >>> 16)) * 0x85EBCA6B
(x ^ (x >>> 13)) & mask
}
-
+
private def seekEmpty(k: Long): Int = {
var e = toIndex(k)
var x = 0
while (_keys(e) != 0) { x += 1; e = (e + 2*(x+1)*x - 3) & mask }
e
}
-
+
private def seekEntry(k: Long): Int = {
var e = toIndex(k)
var x = 0
@@ -99,7 +99,7 @@ extends AbstractMap[Long, V]
while ({ q = _keys(e); if (q==k) return e; q != 0}) { x += 1; e = (e + 2*(x+1)*x - 3) & mask }
e | MissingBit
}
-
+
private def seekEntryOrOpen(k: Long): Int = {
var e = toIndex(k)
var x = 0
@@ -116,12 +116,12 @@ extends AbstractMap[Long, V]
}
o
}
-
+
override def contains(key: Long): Boolean = {
if (key == -key) (((key>>>63).toInt+1) & extraKeys) != 0
else seekEntry(key) >= 0
}
-
+
override def get(key: Long): Option[V] = {
if (key == -key) {
if ((((key>>>63).toInt+1) & extraKeys) == 0) None
@@ -133,7 +133,7 @@ extends AbstractMap[Long, V]
if (i < 0) None else Some(_values(i).asInstanceOf[V])
}
}
-
+
override def getOrElse[V1 >: V](key: Long, default: => V1): V1 = {
if (key == -key) {
if ((((key>>>63).toInt+1) & extraKeys) == 0) default
@@ -145,7 +145,7 @@ extends AbstractMap[Long, V]
if (i < 0) default else _values(i).asInstanceOf[V1]
}
}
-
+
override def getOrElseUpdate(key: Long, defaultValue: => V): V = {
if (key == -key) {
val kbits = (key>>>63).toInt + 1
@@ -185,10 +185,10 @@ extends AbstractMap[Long, V]
else _values(i).asInstanceOf[V]
}
}
-
+
/** Retrieves the value associated with a key, or the default for that type if none exists
* (null for AnyRef, 0 for floats and integers).
- *
+ *
* Note: this is the fastest way to retrieve a value that may or
* may not exist, if the default null/zero is acceptable. For key/value
* pairs that do exist, `apply` (i.e. `map(key)`) is equally fast.
@@ -204,8 +204,8 @@ extends AbstractMap[Long, V]
if (i < 0) null.asInstanceOf[V] else _values(i).asInstanceOf[V]
}
}
-
- /** Retrieves the value associated with a key.
+
+ /** Retrieves the value associated with a key.
* If the key does not exist in the map, the `defaultEntry` for that key
* will be returned instead.
*/
@@ -220,12 +220,12 @@ extends AbstractMap[Long, V]
if (i < 0) defaultEntry(key) else _values(i).asInstanceOf[V]
}
}
-
+
/** The user-supplied default value for the key. Throws an exception
* if no other default behavior was specified.
*/
override def default(key: Long) = defaultEntry(key)
-
+
private def repack(newMask: Int) {
val ok = _keys
val ov = _values
@@ -244,9 +244,9 @@ extends AbstractMap[Long, V]
i += 1
}
}
-
+
/** Repacks the contents of this `LongMap` for maximum efficiency of lookup.
- *
+ *
* For maps that undergo a complex creation process with both addition and
* removal of keys, and then are used heavily with no further removal of
* elements, calling `repack` after the end of the creation can result in
@@ -259,7 +259,7 @@ extends AbstractMap[Long, V]
while (m > 8 && 8*_size < m) m = m >>> 1
repack(m)
}
-
+
override def put(key: Long, value: V): Option[V] = {
if (key == -key) {
if (key == 0) {
@@ -294,9 +294,9 @@ extends AbstractMap[Long, V]
}
}
}
-
+
/** Updates the map to include a new key-value pair.
- *
+ *
* This is the fastest way to add an entry to a `LongMap`.
*/
override def update(key: Long, value: V): Unit = {
@@ -326,12 +326,12 @@ extends AbstractMap[Long, V]
}
}
}
-
+
/** Adds a new key/value pair to this map and returns the map. */
def +=(key: Long, value: V): this.type = { update(key, value); this }
-
+
def +=(kv: (Long, V)): this.type = { update(kv._1, kv._2); this }
-
+
def -=(key: Long): this.type = {
if (key == -key) {
if (key == 0L) {
@@ -354,22 +354,22 @@ extends AbstractMap[Long, V]
}
this
}
-
+
def iterator: Iterator[(Long, V)] = new Iterator[(Long, V)] {
private[this] val kz = _keys
private[this] val vz = _values
-
- private[this] var nextPair: (Long, V) =
+
+ private[this] var nextPair: (Long, V) =
if (extraKeys==0) null
else if ((extraKeys&1)==1) (0L, zeroValue.asInstanceOf[V])
else (Long.MinValue, minValue.asInstanceOf[V])
- private[this] var anotherPair: (Long, V) =
+ private[this] var anotherPair: (Long, V) =
if (extraKeys==3) (Long.MinValue, minValue.asInstanceOf[V])
else null
-
+
private[this] var index = 0
-
+
def hasNext: Boolean = nextPair != null || (index < kz.length && {
var q = kz(index)
while (q == -q) {
@@ -392,8 +392,8 @@ extends AbstractMap[Long, V]
ans
}
}
-
- override def foreach[A](f: ((Long,V)) => A) {
+
+ override def foreach[U](f: ((Long,V)) => U) {
if ((extraKeys & 1) == 1) f((0L, zeroValue.asInstanceOf[V]))
if ((extraKeys & 2) == 2) f((Long.MinValue, minValue.asInstanceOf[V]))
var i,j = 0
@@ -406,7 +406,7 @@ extends AbstractMap[Long, V]
i += 1
}
}
-
+
override def clone(): LongMap[V] = {
val kz = java.util.Arrays.copyOf(_keys, _keys.length)
val vz = java.util.Arrays.copyOf(_values, _values.length)
@@ -414,19 +414,19 @@ extends AbstractMap[Long, V]
lm.initializeTo(mask, extraKeys, zeroValue, minValue, _size, _vacant, kz, vz)
lm
}
-
+
override def +[V1 >: V](kv: (Long, V1)): LongMap[V1] = {
val lm = clone().asInstanceOf[LongMap[V1]]
lm += kv
lm
}
-
+
override def ++[V1 >: V](xs: GenTraversableOnce[(Long, V1)]): LongMap[V1] = {
val lm = clone().asInstanceOf[LongMap[V1]]
xs.foreach(kv => lm += kv)
lm
}
-
+
override def updated[V1 >: V](key: Long, value: V1): LongMap[V1] = {
val lm = clone().asInstanceOf[LongMap[V1]]
lm += (key, value)
@@ -462,7 +462,7 @@ extends AbstractMap[Long, V]
i += 1
}
}
-
+
/** Creates a new `LongMap` with different values.
* Unlike `mapValues`, this method generates a new
* collection immediately.
@@ -485,8 +485,8 @@ extends AbstractMap[Long, V]
lm.initializeTo(mask, extraKeys, zv, mv, _size, _vacant, kz, vz)
lm
}
-
- /** Applies a transformation function to all values stored in this map.
+
+ /** Applies a transformation function to all values stored in this map.
* Note: the default, if any, is not transformed.
*/
def transformValues(f: V => V): this.type = {
@@ -510,15 +510,15 @@ object LongMap {
private final val MissingBit = 0x80000000
private final val VacantBit = 0x40000000
private final val MissVacant = 0xC0000000
-
+
private val exceptionDefault: Long => Nothing = (k: Long) => throw new NoSuchElementException(k.toString)
-
- implicit def canBuildFrom[V, U]: CanBuildFrom[LongMap[V], (Long, U), LongMap[U]] =
+
+ implicit def canBuildFrom[V, U]: CanBuildFrom[LongMap[V], (Long, U), LongMap[U]] =
new CanBuildFrom[LongMap[V], (Long, U), LongMap[U]] {
def apply(from: LongMap[V]): LongMapBuilder[U] = apply()
def apply(): LongMapBuilder[U] = new LongMapBuilder[U]
}
-
+
final class LongMapBuilder[V] extends Builder[(Long, V), LongMap[V]] {
private[collection] var elems: LongMap[V] = new LongMap[V]
def +=(entry: (Long, V)): this.type = {
@@ -537,14 +537,14 @@ object LongMap {
if (lm.size < (sz>>3)) lm.repack()
lm
}
-
+
/** Creates a new empty `LongMap`. */
def empty[V]: LongMap[V] = new LongMap[V]
-
+
/** Creates a new empty `LongMap` with the supplied default */
def withDefault[V](default: Long => V): LongMap[V] = new LongMap[V](default)
-
- /** Creates a new `LongMap` from arrays of keys and values.
+
+ /** Creates a new `LongMap` from arrays of keys and values.
* Equivalent to but more efficient than `LongMap((keys zip values): _*)`.
*/
def fromZip[V](keys: Array[Long], values: Array[V]): LongMap[V] = {
@@ -555,8 +555,8 @@ object LongMap {
if (lm.size < (sz>>3)) lm.repack()
lm
}
-
- /** Creates a new `LongMap` from keys and values.
+
+ /** Creates a new `LongMap` from keys and values.
* Equivalent to but more efficient than `LongMap((keys zip values): _*)`.
*/
def fromZip[V](keys: collection.Iterable[Long], values: collection.Iterable[V]): LongMap[V] = {
diff --git a/src/library/scala/collection/mutable/MutableList.scala b/src/library/scala/collection/mutable/MutableList.scala
index 646023f469..a333eedb1a 100644
--- a/src/library/scala/collection/mutable/MutableList.scala
+++ b/src/library/scala/collection/mutable/MutableList.scala
@@ -11,7 +11,7 @@ package collection
package mutable
import generic._
-import immutable.{List, Nil}
+import immutable.List
/**
* This class is used internally to represent mutable lists. It is the
diff --git a/src/library/scala/collection/mutable/SetLike.scala b/src/library/scala/collection/mutable/SetLike.scala
index 40a5c93064..a19130e742 100644
--- a/src/library/scala/collection/mutable/SetLike.scala
+++ b/src/library/scala/collection/mutable/SetLike.scala
@@ -140,7 +140,9 @@ trait SetLike[A, +This <: SetLike[A, This] with Set[A]]
/** Removes all elements from the set. After this operation is completed,
* the set will be empty.
*/
- def clear() { foreach(-=) }
+ def clear(): Unit =
+ for (elem <- this.toList)
+ this -= elem
override def clone(): This = empty ++= repr.seq
diff --git a/src/library/scala/collection/mutable/SynchronizedSet.scala b/src/library/scala/collection/mutable/SynchronizedSet.scala
index 60e2e79d3f..dd842f26ce 100644
--- a/src/library/scala/collection/mutable/SynchronizedSet.scala
+++ b/src/library/scala/collection/mutable/SynchronizedSet.scala
@@ -78,7 +78,7 @@ trait SynchronizedSet[A] extends Set[A] {
super.subsetOf(that)
}
- override def foreach[U](f: A => U) = synchronized {
+ override def foreach[U](f: A => U) = synchronized {
super.foreach(f)
}
diff --git a/src/library/scala/collection/mutable/SynchronizedStack.scala b/src/library/scala/collection/mutable/SynchronizedStack.scala
index bbb6f5a9bb..c77a6fad62 100644
--- a/src/library/scala/collection/mutable/SynchronizedStack.scala
+++ b/src/library/scala/collection/mutable/SynchronizedStack.scala
@@ -27,7 +27,6 @@ package mutable
*/
@deprecated("Synchronization via selective overriding of methods is inherently unreliable. Consider java.util.concurrent.LinkedBlockingDequeue instead.", "2.11.0")
class SynchronizedStack[A] extends Stack[A] {
- import scala.collection.Traversable
/** Checks if the stack is empty.
*
diff --git a/src/library/scala/collection/package.scala b/src/library/scala/collection/package.scala
index 13fe7a79c4..856f901b77 100644
--- a/src/library/scala/collection/package.scala
+++ b/src/library/scala/collection/package.scala
@@ -13,8 +13,11 @@ package scala
*
* == Guide ==
*
- * A detailed guide for the collections library is available
+ * A detailed guide for using the collections library is available
* at [[http://docs.scala-lang.org/overviews/collections/introduction.html]].
+ * Developers looking to extend the collections library can find a description
+ * of its architecture at
+ * [[http://docs.scala-lang.org/overviews/core/architecture-of-scala-collections.html]].
*
* == Using Collections ==
*
@@ -31,24 +34,25 @@ package scala
* array: Array[Int] = Array(1, 2, 3, 4, 5, 6)
*
* scala> array map { _.toString }
- * res0: Array[java.lang.String] = Array(1, 2, 3, 4, 5, 6)
+ * res0: Array[String] = Array(1, 2, 3, 4, 5, 6)
*
* scala> val list = List(1,2,3,4,5,6)
* list: List[Int] = List(1, 2, 3, 4, 5, 6)
*
* scala> list map { _.toString }
- * res1: List[java.lang.String] = List(1, 2, 3, 4, 5, 6)
+ * res1: List[String] = List(1, 2, 3, 4, 5, 6)
*
* }}}
*
* == Creating Collections ==
*
- * The most common way to create a collection is to use the companion objects as factories.
- * Of these, the three most common
- * are [[scala.collection.Seq]], [[scala.collection.immutable.Set]], and [[scala.collection.immutable.Map]]. Their
- * companion objects are all available
- * as type aliases the either the [[scala]] package or in `scala.Predef`, and can be used
- * like so:
+ * The most common way to create a collection is to use its companion object as
+ * a factory. The three most commonly used collections are
+ * [[scala.collection.Seq]], [[scala.collection.immutable.Set]], and
+ * [[scala.collection.immutable.Map]].
+ * They can be used directly as shown below since their companion objects are
+ * all available as type aliases in either the [[scala]] package or in
+ * `scala.Predef`. New collections are created like this:
* {{{
* scala> val seq = Seq(1,2,3,4,1)
* seq: Seq[Int] = List(1, 2, 3, 4, 1)
@@ -56,12 +60,12 @@ package scala
* scala> val set = Set(1,2,3,4,1)
* set: scala.collection.immutable.Set[Int] = Set(1, 2, 3, 4)
*
- * scala> val map = Map(1 -> "one",2 -> "two", 3 -> "three",2 -> "too")
- * map: scala.collection.immutable.Map[Int,java.lang.String] = Map((1,one), (2,too), (3,three))
+ * scala> val map = Map(1 -> "one", 2 -> "two", 3 -> "three", 2 -> "too")
+ * map: scala.collection.immutable.Map[Int,String] = Map(1 -> one, 2 -> too, 3 -> three)
* }}}
*
- * It is also typical to use the [[scala.collection.immutable]] collections over those
- * in [[scala.collection.mutable]]; The types aliased in
+ * It is also typical to prefer the [[scala.collection.immutable]] collections
+ * over those in [[scala.collection.mutable]]; the types aliased in
* the `scala.Predef` object are the immutable versions.
*
* Also note that the collections library was carefully designed to include several implementations of
@@ -74,9 +78,13 @@ package scala
*
* === Converting between Java Collections ===
*
- * The `JavaConversions` object provides implicit defs that will allow mostly seamless integration
- * between Java Collections-based APIs and the Scala collections library.
+ * The [[scala.collection.JavaConversions]] object provides implicit defs that
+ * will allow mostly seamless integration between APIs using Java Collections
+ * and the Scala collections library.
*
+ * Alternatively the [[scala.collection.JavaConverters]] object provides a collection
+ * of decorators that allow converting between Scala and Java collections using `asScala`
+ * and `asJava` methods.
*/
package object collection {
import scala.collection.generic.CanBuildFrom
diff --git a/src/library/scala/collection/parallel/ParIterableLike.scala b/src/library/scala/collection/parallel/ParIterableLike.scala
index a5a82764d6..2ed7bc075e 100644
--- a/src/library/scala/collection/parallel/ParIterableLike.scala
+++ b/src/library/scala/collection/parallel/ParIterableLike.scala
@@ -9,6 +9,8 @@
package scala
package collection.parallel
+import scala.language.{ higherKinds, implicitConversions }
+
import scala.collection.mutable.Builder
import scala.collection.mutable.ArrayBuffer
import scala.collection.IterableLike
@@ -21,13 +23,9 @@ import scala.collection.GenIterable
import scala.collection.GenTraversableOnce
import scala.collection.GenTraversable
import immutable.HashMapCombiner
-import scala.reflect.{ClassTag, classTag}
-
-import java.util.concurrent.atomic.AtomicBoolean
+import scala.reflect.ClassTag
import scala.annotation.unchecked.uncheckedVariance
-import scala.annotation.unchecked.uncheckedStable
-import scala.language.{ higherKinds, implicitConversions }
import scala.collection.parallel.ParallelCollectionImplicits._
@@ -1284,7 +1282,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
extends Transformer[Combiner[(U, S), That], Zip[U, S, That]] {
@volatile var result: Result = null
def leaf(prev: Option[Result]) = result = pit.zip2combiner[U, S, That](othpit, pbf())
- protected[this] def newSubtask(p: IterableSplitter[T]) = unsupported
+ protected[this] def newSubtask(p: IterableSplitter[T]) = throw new UnsupportedOperationException
override def split = {
val pits = pit.splitWithSignalling
val sizes = pits.map(_.remaining)
@@ -1300,7 +1298,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
extends Transformer[Combiner[(U, S), That], ZipAll[U, S, That]] {
@volatile var result: Result = null
def leaf(prev: Option[Result]) = result = pit.zipAll2combiner[U, S, That](othpit, thiselem, thatelem, pbf())
- protected[this] def newSubtask(p: IterableSplitter[T]) = unsupported
+ protected[this] def newSubtask(p: IterableSplitter[T]) = throw new UnsupportedOperationException
override def split = if (pit.remaining <= len) {
val pits = pit.splitWithSignalling
val sizes = pits.map(_.remaining)
@@ -1322,7 +1320,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
extends Accessor[Unit, CopyToArray[U, This]] {
@volatile var result: Unit = ()
def leaf(prev: Option[Unit]) = pit.copyToArray(array, from, len)
- protected[this] def newSubtask(p: IterableSplitter[T]) = unsupported
+ protected[this] def newSubtask(p: IterableSplitter[T]) = throw new UnsupportedOperationException
override def split = {
val pits = pit.splitWithSignalling
for ((p, untilp) <- pits zip pits.scanLeft(0)(_ + _.remaining); if untilp < len) yield {
@@ -1379,7 +1377,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
val half = howmany / 2
ScanNode(mergeTrees(trees, from, half), mergeTrees(trees, from + half, howmany - half))
} else trees(from)
- protected[this] def newSubtask(pit: IterableSplitter[T]) = unsupported
+ protected[this] def newSubtask(pit: IterableSplitter[T]) = throw new UnsupportedOperationException
override def split = {
val pits = pit.splitWithSignalling
for ((p, untilp) <- pits zip pits.scanLeft(from)(_ + _.remaining)) yield {
@@ -1416,7 +1414,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
new FromScanTree(left, z, op, cbf),
new FromScanTree(right, z, op, cbf)
)
- case _ => unsupportedop("Cannot be split further")
+ case _ => throw new UnsupportedOperationException("Cannot be split further")
}
def shouldSplitFurther = tree match {
case ScanNode(_, _) => true
@@ -1499,5 +1497,4 @@ self: ParIterableLike[T, Repr, Sequential] =>
append(s)
}
})
-
}
diff --git a/src/library/scala/collection/parallel/ParMap.scala b/src/library/scala/collection/parallel/ParMap.scala
index 9f92e6c1e8..70afe5174b 100644
--- a/src/library/scala/collection/parallel/ParMap.scala
+++ b/src/library/scala/collection/parallel/ParMap.scala
@@ -11,7 +11,6 @@ package collection.parallel
import scala.collection.Map
import scala.collection.GenMap
-import scala.collection.mutable.Builder
import scala.collection.generic.ParMapFactory
import scala.collection.generic.GenericParMapTemplate
import scala.collection.generic.GenericParMapCompanion
diff --git a/src/library/scala/collection/parallel/ParMapLike.scala b/src/library/scala/collection/parallel/ParMapLike.scala
index ee1334ba55..a3ac388587 100644
--- a/src/library/scala/collection/parallel/ParMapLike.scala
+++ b/src/library/scala/collection/parallel/ParMapLike.scala
@@ -12,10 +12,8 @@ package collection.parallel
import scala.collection.MapLike
import scala.collection.GenMapLike
import scala.collection.Map
-import scala.collection.mutable.Builder
+
import scala.annotation.unchecked.uncheckedVariance
-import scala.collection.generic.IdleSignalling
-import scala.collection.generic.Signalling
/** A template trait for mutable parallel maps. This trait is to be mixed in
* with concrete parallel maps to override the representation type.
@@ -99,14 +97,14 @@ self =>
def - (elem: K): ParSet[K] =
(ParSet[K]() ++ this - elem).asInstanceOf[ParSet[K]] // !!! concrete overrides abstract problem
override def size = self.size
- override def foreach[S](f: K => S) = for ((k, v) <- self) f(k)
+ override def foreach[U](f: K => U) = for ((k, v) <- self) f(k)
override def seq = self.seq.keySet
}
protected class DefaultValuesIterable extends ParIterable[V] {
def splitter = valuesIterator(self.splitter)
override def size = self.size
- override def foreach[S](f: V => S) = for ((k, v) <- self) f(v)
+ override def foreach[U](f: V => U) = for ((k, v) <- self) f(v)
def seq = self.seq.values
}
@@ -118,7 +116,7 @@ self =>
def filterKeys(p: K => Boolean): ParMap[K, V] = new ParMap[K, V] {
lazy val filtered = self.filter(kv => p(kv._1))
- override def foreach[S](f: ((K, V)) => S): Unit = for (kv <- self) if (p(kv._1)) f(kv)
+ override def foreach[U](f: ((K, V)) => U): Unit = for (kv <- self) if (p(kv._1)) f(kv)
def splitter = filtered.splitter
override def contains(key: K) = self.contains(key) && p(key)
def get(key: K) = if (!p(key)) None else self.get(key)
@@ -129,7 +127,7 @@ self =>
}
def mapValues[S](f: V => S): ParMap[K, S] = new ParMap[K, S] {
- override def foreach[Q](g: ((K, S)) => Q): Unit = for ((k, v) <- self) g((k, f(v)))
+ override def foreach[U](g: ((K, S)) => U): Unit = for ((k, v) <- self) g((k, f(v)))
def splitter = self.splitter.map(kv => (kv._1, f(kv._2)))
override def size = self.size
override def contains(key: K) = self.contains(key)
diff --git a/src/library/scala/collection/parallel/ParSeqLike.scala b/src/library/scala/collection/parallel/ParSeqLike.scala
index 0b6fec364e..60fa1858e7 100644
--- a/src/library/scala/collection/parallel/ParSeqLike.scala
+++ b/src/library/scala/collection/parallel/ParSeqLike.scala
@@ -9,11 +9,10 @@
package scala
package collection.parallel
-import scala.collection.{ Parallel, SeqLike, GenSeqLike, GenSeq, GenIterable, Iterator }
+import scala.collection.{ SeqLike, GenSeq, GenIterable, Iterator }
import scala.collection.generic.DefaultSignalling
import scala.collection.generic.AtomicIndexFlag
import scala.collection.generic.CanBuildFrom
-import scala.collection.generic.CanCombineFrom
import scala.collection.generic.VolatileAbort
import scala.collection.parallel.ParallelCollectionImplicits._
@@ -365,7 +364,7 @@ self =>
pit.setIndexFlagIfLesser(from)
}
}
- protected[this] def newSubtask(p: SuperParIterator) = unsupported
+ protected[this] def newSubtask(p: SuperParIterator) = throw new UnsupportedOperationException
override def split = {
val pits = pit.splitWithSignalling
for ((p, untilp) <- pits zip pits.scanLeft(from)(_ + _.remaining)) yield new IndexWhere(pred, untilp, p)
@@ -386,7 +385,7 @@ self =>
pit.setIndexFlagIfGreater(pos)
}
}
- protected[this] def newSubtask(p: SuperParIterator) = unsupported
+ protected[this] def newSubtask(p: SuperParIterator) = throw new UnsupportedOperationException
override def split = {
val pits = pit.splitWithSignalling
for ((p, untilp) <- pits zip pits.scanLeft(pos)(_ + _.remaining)) yield new LastIndexWhere(pred, untilp, p)
@@ -420,7 +419,7 @@ self =>
result = pit.sameElements(otherpit)
if (!result) pit.abort()
}
- protected[this] def newSubtask(p: SuperParIterator) = unsupported
+ protected[this] def newSubtask(p: SuperParIterator) = throw new UnsupportedOperationException
override def split = {
val fp = pit.remaining / 2
val sp = pit.remaining - fp
@@ -434,7 +433,7 @@ self =>
extends Transformer[Combiner[U, That], Updated[U, That]] {
@volatile var result: Combiner[U, That] = null
def leaf(prev: Option[Combiner[U, That]]) = result = pit.updated2combiner(pos, elem, pbf())
- protected[this] def newSubtask(p: SuperParIterator) = unsupported
+ protected[this] def newSubtask(p: SuperParIterator) = throw new UnsupportedOperationException
override def split = {
val pits = pit.splitWithSignalling
for ((p, untilp) <- pits zip pits.scanLeft(0)(_ + _.remaining)) yield new Updated(pos - untilp, elem, pbf, p)
@@ -447,7 +446,7 @@ self =>
extends Transformer[Combiner[(U, S), That], Zip[U, S, That]] {
@volatile var result: Result = null
def leaf(prev: Option[Result]) = result = pit.zip2combiner[U, S, That](otherpit, cf())
- protected[this] def newSubtask(p: SuperParIterator) = unsupported
+ protected[this] def newSubtask(p: SuperParIterator) = throw new UnsupportedOperationException
override def split = {
val fp = len / 2
val sp = len - len / 2
@@ -468,7 +467,7 @@ self =>
result = pit.corresponds(corr)(otherpit)
if (!result) pit.abort()
}
- protected[this] def newSubtask(p: SuperParIterator) = unsupported
+ protected[this] def newSubtask(p: SuperParIterator) = throw new UnsupportedOperationException
override def split = {
val fp = pit.remaining / 2
val sp = pit.remaining - fp
diff --git a/src/library/scala/collection/parallel/RemainsIterator.scala b/src/library/scala/collection/parallel/RemainsIterator.scala
index 7bb278b038..63d63d9ef3 100644
--- a/src/library/scala/collection/parallel/RemainsIterator.scala
+++ b/src/library/scala/collection/parallel/RemainsIterator.scala
@@ -9,13 +9,10 @@
package scala
package collection.parallel
-import scala.collection.Parallel
import scala.collection.generic.Signalling
import scala.collection.generic.DelegatedSignalling
import scala.collection.generic.IdleSignalling
-import scala.collection.generic.CanCombineFrom
import scala.collection.mutable.Builder
-import scala.collection.Iterator.empty
import scala.collection.GenTraversableOnce
import scala.collection.parallel.immutable.repetition
diff --git a/src/library/scala/collection/parallel/immutable/ParMap.scala b/src/library/scala/collection/parallel/immutable/ParMap.scala
index 2956c2a883..65bb2e12c5 100644
--- a/src/library/scala/collection/parallel/immutable/ParMap.scala
+++ b/src/library/scala/collection/parallel/immutable/ParMap.scala
@@ -16,7 +16,6 @@ import scala.collection.generic.GenericParMapCompanion
import scala.collection.generic.CanCombineFrom
import scala.collection.parallel.ParMapLike
import scala.collection.parallel.Combiner
-import scala.collection.GenMapLike
/** A template trait for immutable parallel maps.
*
diff --git a/src/library/scala/collection/parallel/immutable/ParRange.scala b/src/library/scala/collection/parallel/immutable/ParRange.scala
index ec90de3a7d..8fd5382ce9 100644
--- a/src/library/scala/collection/parallel/immutable/ParRange.scala
+++ b/src/library/scala/collection/parallel/immutable/ParRange.scala
@@ -12,7 +12,6 @@ package collection.parallel.immutable
import scala.collection.immutable.Range
import scala.collection.parallel.Combiner
import scala.collection.parallel.SeqSplitter
-import scala.collection.generic.CanCombineFrom
import scala.collection.Iterator
/** Parallel ranges.
diff --git a/src/library/scala/collection/parallel/mutable/ParArray.scala b/src/library/scala/collection/parallel/mutable/ParArray.scala
index d0d022db4b..8a2cf2716a 100644
--- a/src/library/scala/collection/parallel/mutable/ParArray.scala
+++ b/src/library/scala/collection/parallel/mutable/ParArray.scala
@@ -18,7 +18,6 @@ import scala.collection.generic.GenericParCompanion
import scala.collection.generic.CanCombineFrom
import scala.collection.generic.CanBuildFrom
import scala.collection.generic.ParFactory
-import scala.collection.generic.Sizing
import scala.collection.parallel.Combiner
import scala.collection.parallel.SeqSplitter
import scala.collection.parallel.ParSeqLike
diff --git a/src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala b/src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala
index 79322c85b1..6883457fef 100644
--- a/src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala
+++ b/src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala
@@ -9,18 +9,10 @@
package scala
package collection.parallel.mutable
-
-
-import scala.collection.generic.Sizing
import scala.collection.mutable.ArraySeq
import scala.collection.mutable.ArrayBuffer
-import scala.collection.parallel.TaskSupport
-import scala.collection.parallel.unsupportedop
-import scala.collection.parallel.Combiner
import scala.collection.parallel.Task
-
-
/** An array combiner that uses a chain of arraybuffers to store elements. */
trait ResizableParArrayCombiner[T] extends LazyCombiner[T, ParArray[T], ExposedArrayBuffer[T]] {
diff --git a/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala b/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala
index d1379cde11..5e4572da60 100644
--- a/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala
+++ b/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala
@@ -9,13 +9,9 @@
package scala
package collection.parallel.mutable
-import scala.collection.generic.Sizing
import scala.collection.mutable.ArraySeq
-import scala.collection.mutable.ArrayBuffer
import scala.collection.mutable.UnrolledBuffer
import scala.collection.mutable.UnrolledBuffer.Unrolled
-import scala.collection.parallel.TaskSupport
-import scala.collection.parallel.unsupportedop
import scala.collection.parallel.Combiner
import scala.collection.parallel.Task
import scala.reflect.ClassTag
@@ -62,7 +58,7 @@ extends Combiner[T, ParArray[T]] {
case that: UnrolledParArrayCombiner[t] =>
buff concat that.buff
this
- case _ => unsupportedop("Cannot combine with combiner of different type.")
+ case _ => throw new UnsupportedOperationException("Cannot combine with combiner of different type.")
}
def size = buff.size
diff --git a/src/library/scala/collection/parallel/package.scala b/src/library/scala/collection/parallel/package.scala
index d77dcb0658..ba64ca505b 100644
--- a/src/library/scala/collection/parallel/package.scala
+++ b/src/library/scala/collection/parallel/package.scala
@@ -35,15 +35,7 @@ package object parallel {
else sz
}
- private[parallel] def unsupported = throw new UnsupportedOperationException
-
- private[parallel] def unsupportedop(msg: String) = throw new UnsupportedOperationException(msg)
-
- private[parallel] def outofbounds(idx: Int) = throw new IndexOutOfBoundsException(idx.toString)
-
- private[parallel] def getTaskSupport: TaskSupport = new ExecutionContextTaskSupport
-
- val defaultTaskSupport: TaskSupport = getTaskSupport
+ val defaultTaskSupport: TaskSupport = new ExecutionContextTaskSupport
def setTaskSupport[Coll](c: Coll, t: TaskSupport): Coll = {
c match {
@@ -98,7 +90,7 @@ package parallel {
}
}
}
-
+
trait FactoryOps[From, Elem, To] {
trait Otherwise[R] {
def otherwise(notbody: => R): R
diff --git a/src/library/scala/concurrent/ExecutionContext.scala b/src/library/scala/concurrent/ExecutionContext.scala
index df2d68c9c6..e022b94ea8 100644
--- a/src/library/scala/concurrent/ExecutionContext.scala
+++ b/src/library/scala/concurrent/ExecutionContext.scala
@@ -11,7 +11,6 @@ package scala.concurrent
import java.util.concurrent.{ ExecutorService, Executor }
import scala.annotation.implicitNotFound
-import scala.util.Try
/**
* An `ExecutionContext` can execute program logic asynchronously,
@@ -75,7 +74,7 @@ trait ExecutionContext {
/** Prepares for the execution of a task. Returns the prepared
* execution context. The recommended implementation of
* `prepare` is to return `this`.
- *
+ *
* This method should no longer be overridden or called. It was
* originally expected that `prepare` would be called by
* all libraries that consume ExecutionContexts, in order to
diff --git a/src/library/scala/concurrent/Future.scala b/src/library/scala/concurrent/Future.scala
index c8ababbcab..f49536d351 100644
--- a/src/library/scala/concurrent/Future.scala
+++ b/src/library/scala/concurrent/Future.scala
@@ -10,15 +10,12 @@ package scala.concurrent
import scala.language.higherKinds
-import java.util.concurrent.{ CountDownLatch, TimeUnit, Callable }
-import java.util.concurrent.TimeUnit.{ NANOSECONDS => NANOS, MILLISECONDS ⇒ MILLIS }
+import java.util.concurrent.{CountDownLatch, TimeUnit}
import java.util.concurrent.atomic.AtomicInteger
import scala.util.control.NonFatal
import scala.util.{Try, Success, Failure}
import scala.concurrent.duration._
-import scala.annotation.tailrec
-import scala.collection.mutable.Builder
import scala.collection.generic.CanBuildFrom
import scala.reflect.ClassTag
@@ -215,7 +212,7 @@ trait Future[+T] extends Awaitable[T] {
* @param f function that transforms a failure of the receiver into a failure of the returned future
* @return a `Future` that will be completed with the transformed value
*/
- def transform[S](s: T => S, f: Throwable => Throwable)(implicit executor: ExecutionContext): Future[S] =
+ def transform[S](s: T => S, f: Throwable => Throwable)(implicit executor: ExecutionContext): Future[S] =
transform {
case Success(r) => Try(s(r))
case Failure(t) => Try(throw f(t)) // will throw fatal errors!
@@ -348,7 +345,7 @@ trait Future[+T] extends Awaitable[T] {
* @param pf the `PartialFunction` to apply if this `Future` fails
* @return a `Future` with the successful value of this `Future` or the result of the `PartialFunction`
*/
- def recover[U >: T](pf: PartialFunction[Throwable, U])(implicit executor: ExecutionContext): Future[U] =
+ def recover[U >: T](pf: PartialFunction[Throwable, U])(implicit executor: ExecutionContext): Future[U] =
transform { _ recover pf }
/** Creates a new future that will handle any matching throwable that this
@@ -429,7 +426,7 @@ trait Future[+T] extends Awaitable[T] {
* @return a `Future` with the successful result of this or that `Future` or the failure of this `Future` if both fail
*/
def fallbackTo[U >: T](that: Future[U]): Future[U] =
- if (this eq that) this
+ if (this eq that) this
else {
implicit val ec = internalExecutor
recoverWith { case _ => that } recoverWith { case _ => this }
@@ -532,7 +529,7 @@ object Future {
ready(atMost)
throw new TimeoutException(s"Future timed out after [$atMost]")
}
-
+
override def onSuccess[U](pf: PartialFunction[Nothing, U])(implicit executor: ExecutionContext): Unit = ()
override def onFailure[U](pf: PartialFunction[Throwable, U])(implicit executor: ExecutionContext): Unit = ()
override def onComplete[U](f: Try[Nothing] => U)(implicit executor: ExecutionContext): Unit = ()
@@ -597,7 +594,7 @@ object Future {
* @return the `Future` holding the result of the computation
*/
def apply[T](body: =>T)(implicit @deprecatedName('execctx) executor: ExecutionContext): Future[T] =
- unit.map(_ => body)
+ unit.map(_ => body)
/** Simple version of `Future.traverse`. Asynchronously and non-blockingly transforms a `TraversableOnce[Future[A]]`
* into a `Future[TraversableOnce[A]]`. Useful for reducing many `Future`s into a single `Future`.
diff --git a/src/library/scala/concurrent/FutureTaskRunner.scala b/src/library/scala/concurrent/FutureTaskRunner.scala
deleted file mode 100644
index 089e67cedd..0000000000
--- a/src/library/scala/concurrent/FutureTaskRunner.scala
+++ /dev/null
@@ -1,39 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2009-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.concurrent
-
-import scala.language.{implicitConversions, higherKinds}
-
-/** The `FutureTaskRunner` trait is a base trait of task runners
- * that provide some sort of future abstraction.
- *
- * @author Philipp Haller
- */
-@deprecated("Use `ExecutionContext` instead.", "2.10.0")
-private[scala] trait FutureTaskRunner extends TaskRunner {
-
- /** The type of the futures that the underlying task runner supports.
- */
- type Future[T]
-
- /** An implicit conversion from futures to zero-parameter functions.
- */
- implicit def futureAsFunction[S](x: Future[S]): () => S
-
- /** Submits a task to run which returns its result in a future.
- */
- def submit[S](task: Task[S]): Future[S]
-
- /* Possibly blocks the current thread, for example, waiting for
- * a lock or condition.
- */
- @deprecated("Use `blocking` instead.", "2.10.0")
- def managedBlock(blocker: ManagedBlocker): Unit
-
-}
diff --git a/src/library/scala/concurrent/ManagedBlocker.scala b/src/library/scala/concurrent/ManagedBlocker.scala
deleted file mode 100644
index b5a6e21893..0000000000
--- a/src/library/scala/concurrent/ManagedBlocker.scala
+++ /dev/null
@@ -1,34 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.concurrent
-
-/** The `ManagedBlocker` trait...
- *
- * @author Philipp Haller
- */
-@deprecated("Use `blocking` instead.", "2.10.0")
-private[scala] trait ManagedBlocker {
-
- /**
- * Possibly blocks the current thread, for example waiting for
- * a lock or condition.
- *
- * @return true if no additional blocking is necessary (i.e.,
- * if `isReleasable` would return `true`).
- * @throws InterruptedException if interrupted while waiting
- * (the method is not required to do so, but is allowed to).
- */
- def block(): Boolean
-
- /**
- * Returns `true` if blocking is unnecessary.
- */
- def isReleasable: Boolean
-
-}
diff --git a/src/library/scala/concurrent/SyncVar.scala b/src/library/scala/concurrent/SyncVar.scala
index 1ee27b0f36..127e6b58d2 100644
--- a/src/library/scala/concurrent/SyncVar.scala
+++ b/src/library/scala/concurrent/SyncVar.scala
@@ -19,17 +19,17 @@ import java.util.concurrent.TimeUnit
*/
class SyncVar[A] {
private var isDefined: Boolean = false
- private var value: Option[A] = None
+ private var value: A = _
/**
- * Waits for this SyncVar to become defined and returns
- * the result, without modifying the stored value.
+ * Wait for this SyncVar to become defined and then get
+ * the stored value without modifying it.
*
* @return value that is held in this container
*/
def get: A = synchronized {
while (!isDefined) wait()
- value.get
+ value
}
/** Waits `timeout` millis. If `timeout <= 0` just returns 0.
@@ -44,11 +44,10 @@ class SyncVar[A] {
if (elapsed < 0) 0 else TimeUnit.NANOSECONDS.toMillis(elapsed)
}
- /** Waits for this SyncVar to become defined at least for
- * `timeout` milliseconds (possibly more), and gets its
- * value.
+ /** Wait at least `timeout` milliseconds (possibly more) for this `SyncVar`
+ * to become defined and then get its value.
*
- * @param timeout the amount of milliseconds to wait, 0 means forever
+ * @param timeout time in milliseconds to wait
* @return `None` if variable is undefined after `timeout`, `Some(value)` otherwise
*/
def get(timeout: Long): Option[A] = synchronized {
@@ -61,12 +60,12 @@ class SyncVar[A] {
val elapsed = waitMeasuringElapsed(rest)
rest -= elapsed
}
- value
+ if (isDefined) Some(value) else None
}
/**
- * Waits for this SyncVar to become defined and returns
- * the result, unsetting the stored value before returning.
+ * Wait for this SyncVar to become defined and then get
+ * the stored value, unsetting it as a side effect.
*
* @return value that was held in this container
*/
@@ -75,12 +74,11 @@ class SyncVar[A] {
finally unsetVal()
}
- /** Waits for this SyncVar to become defined at least for
- * `timeout` milliseconds (possibly more), and takes its
- * value by first reading and then removing the value from
- * the SyncVar.
+ /** Wait at least `timeout` milliseconds (possibly more) for this `SyncVar`
+ * to become defined and then get the stored value, unsetting it
+ * as a side effect.
*
- * @param timeout the amount of milliseconds to wait, 0 means forever
+ * @param timeout the amount of milliseconds to wait
* @return the value or a throws an exception if the timeout occurs
* @throws NoSuchElementException on timeout
*/
@@ -97,14 +95,14 @@ class SyncVar[A] {
// NOTE: Used by SBT 0.13.0-M2 and below
def set(x: A): Unit = setVal(x)
- /** Places a value in the SyncVar. If the SyncVar already has a stored value,
- * it waits until another thread takes it */
+ /** Place a value in the SyncVar. If the SyncVar already has a stored value,
+ * wait until another thread takes it. */
def put(x: A): Unit = synchronized {
while (isDefined) wait()
setVal(x)
}
- /** Checks whether a value is stored in the synchronized variable */
+ /** Check whether a value is stored in the synchronized variable. */
def isSet: Boolean = synchronized {
isDefined
}
@@ -117,7 +115,7 @@ class SyncVar[A] {
// NOTE: Used by SBT 0.13.0-M2 and below
def unset(): Unit = synchronized {
isDefined = false
- value = None
+ value = null.asInstanceOf[A]
notifyAll()
}
@@ -126,7 +124,7 @@ class SyncVar[A] {
// implementation of `set` was moved to `setVal` to achieve this
private def setVal(x: A): Unit = synchronized {
isDefined = true
- value = Some(x)
+ value = x
notifyAll()
}
@@ -135,8 +133,7 @@ class SyncVar[A] {
// implementation of `unset` was moved to `unsetVal` to achieve this
private def unsetVal(): Unit = synchronized {
isDefined = false
- value = None
+ value = null.asInstanceOf[A]
notifyAll()
}
-
}
diff --git a/src/library/scala/concurrent/TaskRunner.scala b/src/library/scala/concurrent/TaskRunner.scala
deleted file mode 100644
index 1ea23b35e8..0000000000
--- a/src/library/scala/concurrent/TaskRunner.scala
+++ /dev/null
@@ -1,27 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.concurrent
-
-import scala.language.{higherKinds, implicitConversions}
-
-/** The `TaskRunner` trait...
- *
- * @author Philipp Haller
- */
-@deprecated("Use `ExecutionContext` instead.", "2.10.0")
-private[scala] trait TaskRunner {
-
- type Task[T]
-
- implicit def functionAsTask[S](fun: () => S): Task[S]
-
- def execute[S](task: Task[S]): Unit
-
- def shutdown(): Unit
-}
diff --git a/src/library/scala/concurrent/ThreadPoolRunner.scala b/src/library/scala/concurrent/ThreadPoolRunner.scala
deleted file mode 100644
index 7784681f71..0000000000
--- a/src/library/scala/concurrent/ThreadPoolRunner.scala
+++ /dev/null
@@ -1,51 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.concurrent
-
-import java.util.concurrent.{ExecutorService, Callable, TimeUnit}
-import scala.language.implicitConversions
-
-/** The `ThreadPoolRunner` trait uses a `java.util.concurrent.ExecutorService`
- * to run submitted tasks.
- *
- * @author Philipp Haller
- */
-@deprecated("Use `ExecutionContext` instead.", "2.10.0")
-private[scala] trait ThreadPoolRunner extends FutureTaskRunner {
-
- type Task[T] = Callable[T] with Runnable
- type Future[T] = java.util.concurrent.Future[T]
-
- private class RunCallable[S](fun: () => S) extends Runnable with Callable[S] {
- def run() = fun()
- def call() = fun()
- }
-
- implicit def functionAsTask[S](fun: () => S): Task[S] =
- new RunCallable(fun)
-
- implicit def futureAsFunction[S](x: Future[S]): () => S =
- () => x.get()
-
- protected def executor: ExecutorService
-
- def submit[S](task: Task[S]): Future[S] = {
- executor.submit[S](task)
- }
-
- def execute[S](task: Task[S]) {
- executor execute task
- }
-
- @deprecated("Use `blocking` instead.", "2.10.0")
- def managedBlock(blocker: ManagedBlocker) {
- blocker.block()
- }
-
-}
diff --git a/src/library/scala/concurrent/impl/ExecutionContextImpl.scala b/src/library/scala/concurrent/impl/ExecutionContextImpl.scala
index c98746a98d..7bf5cc5729 100644
--- a/src/library/scala/concurrent/impl/ExecutionContextImpl.scala
+++ b/src/library/scala/concurrent/impl/ExecutionContextImpl.scala
@@ -8,13 +8,10 @@
package scala.concurrent.impl
-
-
-import java.util.concurrent.{ ForkJoinPool, ForkJoinWorkerThread, ForkJoinTask, LinkedBlockingQueue, Callable, Executor, ExecutorService, Executors, ThreadFactory, TimeUnit, ThreadPoolExecutor }
+import java.util.concurrent.{ ForkJoinPool, ForkJoinWorkerThread, ForkJoinTask, Callable, Executor, ExecutorService, ThreadFactory, TimeUnit }
import java.util.concurrent.atomic.AtomicInteger
import java.util.Collection
-import scala.concurrent.{ BlockContext, ExecutionContext, Awaitable, CanAwait, ExecutionContextExecutor, ExecutionContextExecutorService }
-import scala.util.control.NonFatal
+import scala.concurrent.{ BlockContext, ExecutionContext, CanAwait, ExecutionContextExecutor, ExecutionContextExecutorService }
import scala.annotation.tailrec
@@ -57,7 +54,7 @@ private[concurrent] object ExecutionContextImpl {
}
// As per ThreadFactory contract newThread should return `null` if cannot create new thread.
- def newThread(runnable: Runnable): Thread =
+ def newThread(runnable: Runnable): Thread =
if (reserveThread())
wire(new Thread(new Runnable {
// We have to decrement the current thread count when the thread exits
@@ -80,7 +77,7 @@ private[concurrent] object ExecutionContextImpl {
} finally {
isdone = true
}
-
+
true
}
override def isReleasable = isdone
@@ -123,33 +120,17 @@ private[concurrent] object ExecutionContextImpl {
prefix = "scala-execution-context-global",
uncaught = uncaughtExceptionHandler)
- try {
- new ForkJoinPool(desiredParallelism, threadFactory, uncaughtExceptionHandler, true) {
- override def execute(runnable: Runnable): Unit = {
- val fjt: ForkJoinTask[_] = runnable match {
- case t: ForkJoinTask[_] => t
- case r => new ExecutionContextImpl.AdaptedForkJoinTask(r)
- }
- Thread.currentThread match {
- case fjw: ForkJoinWorkerThread if fjw.getPool eq this => fjt.fork()
- case _ => super.execute(fjt)
- }
+ new ForkJoinPool(desiredParallelism, threadFactory, uncaughtExceptionHandler, true) {
+ override def execute(runnable: Runnable): Unit = {
+ val fjt: ForkJoinTask[_] = runnable match {
+ case t: ForkJoinTask[_] => t
+ case r => new ExecutionContextImpl.AdaptedForkJoinTask(r)
+ }
+ Thread.currentThread match {
+ case fjw: ForkJoinWorkerThread if fjw.getPool eq this => fjt.fork()
+ case _ => super.execute(fjt)
}
}
- } catch {
- case NonFatal(t) =>
- System.err.println("Failed to create ForkJoinPool for the default ExecutionContext, falling back to ThreadPoolExecutor")
- t.printStackTrace(System.err)
- val exec = new ThreadPoolExecutor(
- desiredParallelism,
- desiredParallelism,
- 5L,
- TimeUnit.MINUTES,
- new LinkedBlockingQueue[Runnable],
- threadFactory
- )
- exec.allowCoreThreadTimeOut(true)
- exec
}
}
diff --git a/src/library/scala/concurrent/impl/Promise.scala b/src/library/scala/concurrent/impl/Promise.scala
index 3538ac6b94..178cd6d912 100644
--- a/src/library/scala/concurrent/impl/Promise.scala
+++ b/src/library/scala/concurrent/impl/Promise.scala
@@ -8,13 +8,13 @@
package scala.concurrent.impl
-import scala.concurrent.{ ExecutionContext, CanAwait, OnCompleteRunnable, TimeoutException, ExecutionException, blocking }
+import scala.concurrent.{ ExecutionContext, CanAwait, OnCompleteRunnable, TimeoutException, ExecutionException }
import scala.concurrent.Future.InternalCallbackExecutor
-import scala.concurrent.duration.{ Duration, Deadline, FiniteDuration, NANOSECONDS }
+import scala.concurrent.duration.{ Duration, FiniteDuration }
import scala.annotation.tailrec
import scala.util.control.NonFatal
import scala.util.{ Try, Success, Failure }
-import java.io.ObjectInputStream
+
import java.util.concurrent.locks.AbstractQueuedSynchronizer
import java.util.concurrent.atomic.AtomicReference
@@ -393,7 +393,7 @@ private[concurrent] object Promise {
override def collect[S](pf: PartialFunction[T, S])(implicit executor: ExecutionContext): Future[S] = thisAs[S]
override def zip[U](that: Future[U]): Future[(T, U)] = thisAs[(T,U)]
override def zipWith[U, R](that: Future[U])(f: (T, U) => R)(implicit executor: ExecutionContext): Future[R] = thisAs[R]
- override def fallbackTo[U >: T](that: Future[U]): Future[U] =
+ override def fallbackTo[U >: T](that: Future[U]): Future[U] =
if (this eq that) this else that.recoverWith({ case _ => this })(InternalCallbackExecutor)
override def mapTo[S](implicit tag: ClassTag[S]): Future[S] = thisAs[S]
}
diff --git a/src/library/scala/deprecatedInheritance.scala b/src/library/scala/deprecatedInheritance.scala
index 7d20219d4d..7614a96f95 100644
--- a/src/library/scala/deprecatedInheritance.scala
+++ b/src/library/scala/deprecatedInheritance.scala
@@ -19,5 +19,4 @@ package scala
* @since 2.10
* @see [[scala.deprecatedOverriding]]
*/
-private[scala] // for now, this needs to be generalized to communicate other modifier deltas
class deprecatedInheritance(message: String = "", since: String = "") extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/deprecatedOverriding.scala b/src/library/scala/deprecatedOverriding.scala
index 04bce343a0..26a9d9ee7d 100644
--- a/src/library/scala/deprecatedOverriding.scala
+++ b/src/library/scala/deprecatedOverriding.scala
@@ -17,5 +17,4 @@ package scala
* @since 2.10
* @see [[scala.deprecatedInheritance]]
*/
-private[scala] // for the same reasons as deprecatedInheritance
class deprecatedOverriding(message: String = "", since: String = "") extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/io/BufferedSource.scala b/src/library/scala/io/BufferedSource.scala
index 52fa525b24..33b5a1468e 100644
--- a/src/library/scala/io/BufferedSource.scala
+++ b/src/library/scala/io/BufferedSource.scala
@@ -8,11 +8,9 @@
package scala.io
-import java.util.Arrays
import java.io.{ InputStream, BufferedReader, InputStreamReader, PushbackReader }
import Source.DefaultBufSize
import scala.collection.{ Iterator, AbstractIterator }
-import scala.collection.mutable.ArrayBuffer
/** This object provides convenience methods to create an iterable
* representation of a source file.
diff --git a/src/library/scala/math/BigDecimal.scala b/src/library/scala/math/BigDecimal.scala
index bb337e7a1d..371fd59a93 100644
--- a/src/library/scala/math/BigDecimal.scala
+++ b/src/library/scala/math/BigDecimal.scala
@@ -10,13 +10,12 @@
package scala
package math
-import java.{ lang => jl }
-import java.math.{ MathContext, BigDecimal => BigDec }
-import scala.collection.immutable.NumericRange
import scala.language.implicitConversions
+import java.math.{ MathContext, BigDecimal => BigDec }
+import scala.collection.immutable.NumericRange
-/**
+/**
* @author Stephane Micheloud
* @author Rex Kerr
* @version 1.1
@@ -44,17 +43,17 @@ object BigDecimal {
val HALF_UP = Value(RM.HALF_UP.ordinal)
val HALF_DOWN = Value(RM.HALF_DOWN.ordinal)
val HALF_EVEN = Value(RM.HALF_EVEN.ordinal)
- val UNNECESSARY = Value(RM.UNNECESSARY.ordinal)
+ val UNNECESSARY = Value(RM.UNNECESSARY.ordinal)
}
-
+
/** Constructs a `BigDecimal` using the decimal text representation of `Double` value `d`, rounding if necessary. */
def decimal(d: Double, mc: MathContext): BigDecimal =
new BigDecimal(new BigDec(java.lang.Double.toString(d), mc), mc)
/** Constructs a `BigDecimal` using the decimal text representation of `Double` value `d`. */
def decimal(d: Double): BigDecimal = decimal(d, defaultMathContext)
-
- /** Constructs a `BigDecimal` using the decimal text representation of `Float` value `f`, rounding if necessary.
+
+ /** Constructs a `BigDecimal` using the decimal text representation of `Float` value `f`, rounding if necessary.
* Note that `BigDecimal.decimal(0.1f) != 0.1f` since equality agrees with the `Double` representation, and
* `0.1 != 0.1f`.
*/
@@ -66,18 +65,18 @@ object BigDecimal {
* `0.1 != 0.1f`.
*/
def decimal(f: Float): BigDecimal = decimal(f, defaultMathContext)
-
+
// This exists solely to avoid conversion from Int/Long to Float, screwing everything up.
/** Constructs a `BigDecimal` from a `Long`, rounding if necessary. This is identical to `BigDecimal(l, mc)`. */
def decimal(l: Long, mc: MathContext): BigDecimal = apply(l, mc)
-
+
// This exists solely to avoid conversion from Int/Long to Float, screwing everything up.
/** Constructs a `BigDecimal` from a `Long`. This is identical to `BigDecimal(l)`. */
def decimal(l: Long): BigDecimal = apply(l)
-
+
/** Constructs a `BigDecimal` using a `java.math.BigDecimal`, rounding if necessary. */
def decimal(bd: BigDec, mc: MathContext): BigDecimal = new BigDecimal(bd.round(mc), mc)
-
+
/** Constructs a `BigDecimal` by expanding the binary fraction
* contained by `Double` value `d` into a decimal representation,
* rounding if necessary. When a `Float` is converted to a
@@ -85,50 +84,50 @@ object BigDecimal {
* also works for converted `Float`s.
*/
def binary(d: Double, mc: MathContext): BigDecimal = new BigDecimal(new BigDec(d, mc), mc)
-
+
/** Constructs a `BigDecimal` by expanding the binary fraction
* contained by `Double` value `d` into a decimal representation.
* Note: this also works correctly on converted `Float`s.
*/
def binary(d: Double): BigDecimal = binary(d, defaultMathContext)
-
+
/** Constructs a `BigDecimal` from a `java.math.BigDecimal`. The
* precision is the default for `BigDecimal` or enough to represent
* the `java.math.BigDecimal` exactly, whichever is greater.
*/
def exact(repr: BigDec): BigDecimal = {
- val mc =
+ val mc =
if (repr.precision <= defaultMathContext.getPrecision) defaultMathContext
else new MathContext(repr.precision, java.math.RoundingMode.HALF_EVEN)
new BigDecimal(repr, mc)
}
-
+
/** Constructs a `BigDecimal` by fully expanding the binary fraction
* contained by `Double` value `d`, adjusting the precision as
* necessary. Note: this works correctly on converted `Float`s also.
*/
def exact(d: Double): BigDecimal = exact(new BigDec(d))
-
+
/** Constructs a `BigDecimal` that exactly represents a `BigInt`.
*/
def exact(bi: BigInt): BigDecimal = exact(new BigDec(bi.bigInteger))
-
+
/** Constructs a `BigDecimal` that exactly represents a `Long`. Note that
* all creation methods for `BigDecimal` that do not take a `MathContext`
* represent a `Long`; this is equivalent to `apply`, `valueOf`, etc..
*/
def exact(l: Long): BigDecimal = apply(l)
-
+
/** Constructs a `BigDecimal` that exactly represents the number
* specified in a `String`.
*/
def exact(s: String): BigDecimal = exact(new BigDec(s))
-
+
/** Constructs a `BigDecimal` that exactly represents the number
* specified in base 10 in a character array.
*/
def exact(cs: Array[Char]): BigDecimal = exact(new BigDec(cs))
-
+
/** Constructs a `BigDecimal` using the java BigDecimal static
* valueOf constructor. Equivalent to `BigDecimal.decimal`.
@@ -137,7 +136,7 @@ object BigDecimal {
* @return the constructed `BigDecimal`
*/
def valueOf(d: Double): BigDecimal = apply(BigDec valueOf d)
-
+
/** Constructs a `BigDecimal` using the java BigDecimal static
* valueOf constructor, specifying a `MathContext` that is
* used for computations but isn't used for rounding. Use
@@ -151,7 +150,7 @@ object BigDecimal {
*/
@deprecated("MathContext is not applied to Doubles in valueOf. Use BigDecimal.decimal to use rounding, or java.math.BigDecimal.valueOf to avoid it.","2.11")
def valueOf(d: Double, mc: MathContext): BigDecimal = apply(BigDec valueOf d, mc)
-
+
/** Constructs a `BigDecimal` using the java BigDecimal static
* valueOf constructor.
*
@@ -159,14 +158,14 @@ object BigDecimal {
* @return the constructed `BigDecimal`
*/
def valueOf(x: Long): BigDecimal = apply(x)
-
+
/** Constructs a `BigDecimal` using the java BigDecimal static
* valueOf constructor. This is unlikely to do what you want;
* use `valueOf(f.toDouble)` or `decimal(f)` instead.
*/
@deprecated("Float arguments to valueOf may not do what you wish. Use decimal or valueOf(f.toDouble).","2.11")
def valueOf(f: Float): BigDecimal = valueOf(f.toDouble)
-
+
/** Constructs a `BigDecimal` using the java BigDecimal static
* valueOf constructor. This is unlikely to do what you want;
* use `valueOf(f.toDouble)` or `decimal(f)` instead.
@@ -174,7 +173,7 @@ object BigDecimal {
@deprecated("Float arguments to valueOf may not do what you wish. Use decimal or valueOf(f.toDouble).","2.11")
def valueOf(f: Float, mc: MathContext): BigDecimal = valueOf(f.toDouble, mc)
-
+
/** Constructs a `BigDecimal` whose value is equal to that of the
* specified `Integer` value.
*
@@ -247,7 +246,7 @@ object BigDecimal {
* @return the constructed `BigDecimal`
*/
def apply(d: Double): BigDecimal = decimal(d, defaultMathContext)
-
+
// note we don't use the static valueOf because it doesn't let us supply
// a MathContext, but we should be duplicating its logic, modulo caching.
/** Constructs a `BigDecimal` whose value is equal to that of the
@@ -281,7 +280,7 @@ object BigDecimal {
* into a `BigDecimal`.
*/
def apply(x: String): BigDecimal = exact(x)
-
+
/** Translates the decimal String representation of a `BigDecimal`
* into a `BigDecimal`, rounding if necessary.
*/
@@ -295,12 +294,12 @@ object BigDecimal {
* @return the constructed `BigDecimal`
*/
def apply(x: BigInt): BigDecimal = exact(x)
-
+
/** Constructs a `BigDecimal` whose value is equal to that of the
* specified `BigInt` value, rounding if necessary.
*
* @param x the specified `BigInt` value
- * @param mc the precision and rounding mode for creation of this value and future operations on it
+ * @param mc the precision and rounding mode for creation of this value and future operations on it
* @return the constructed `BigDecimal`
*/
def apply(x: BigInt, mc: MathContext): BigDecimal =
@@ -315,13 +314,13 @@ object BigDecimal {
*/
def apply(unscaledVal: BigInt, scale: Int): BigDecimal =
exact(new BigDec(unscaledVal.bigInteger, scale))
-
+
/** Constructs a `BigDecimal` whose unscaled value is equal to that
* of the specified `BigInt` value.
*
* @param unscaledVal the specified `BigInt` value
* @param scale the scale
- * @param mc the precision and rounding mode for creation of this value and future operations on it
+ * @param mc the precision and rounding mode for creation of this value and future operations on it
* @return the constructed `BigDecimal`
*/
def apply(unscaledVal: BigInt, scale: Int, mc: MathContext): BigDecimal =
@@ -329,7 +328,7 @@ object BigDecimal {
/** Constructs a `BigDecimal` from a `java.math.BigDecimal`. */
def apply(bd: BigDec): BigDecimal = apply(bd, defaultMathContext)
-
+
@deprecated("This method appears to round a java.math.BigDecimal but actually doesn't. Use new BigDecimal(bd, mc) instead for no rounding, or BigDecimal.decimal(bd, mc) for rounding.", "2.11")
def apply(bd: BigDec, mc: MathContext): BigDecimal = new BigDecimal(bd, mc)
@@ -402,7 +401,7 @@ extends ScalaNumber with ScalaNumericConversions with Serializable {
def this(bigDecimal: BigDec) = this(bigDecimal, BigDecimal.defaultMathContext)
import BigDecimal.RoundingMode._
import BigDecimal.{decimal, binary, exact}
-
+
if (bigDecimal eq null) throw new IllegalArgumentException("null value for BigDecimal")
if (mc eq null) throw new IllegalArgumentException("null MathContext for BigDecimal")
@@ -423,7 +422,7 @@ extends ScalaNumber with ScalaNumericConversions with Serializable {
scala.util.hashing.MurmurHash3.mixLast( temp.scaleByPowerOfTen(temp.scale).toBigInteger.hashCode, temp.scale )
}
}
-
+
/** Returns the hash code for this BigDecimal.
* Note that this does not merely use the underlying java object's
* `hashCode` because we compare `BigDecimal`s with `compareTo`
@@ -444,15 +443,15 @@ extends ScalaNumber with ScalaNumericConversions with Serializable {
*/
override def equals (that: Any): Boolean = that match {
case that: BigDecimal => this equals that
- case that: BigInt =>
- that.bitLength > (precision-scale-2)*BigDecimal.deci2binary &&
+ case that: BigInt =>
+ that.bitLength > (precision-scale-2)*BigDecimal.deci2binary &&
this.toBigIntExact.exists(that equals _)
- case that: Double =>
+ case that: Double =>
!that.isInfinity && {
val d = toDouble
!d.isInfinity && d == that && equals(decimal(d))
}
- case that: Float =>
+ case that: Float =>
!that.isInfinity && {
val f = toFloat
!f.isInfinity && f == that && equals(decimal(f.toDouble))
@@ -482,43 +481,43 @@ extends ScalaNumber with ScalaNumericConversions with Serializable {
val d = toDouble
!d.isInfinity && bigDecimal.compareTo(new BigDec(d)) == 0
}
-
+
/** Tests whether this `BigDecimal` holds the decimal representation of a `Double`. */
def isDecimalDouble = {
val d = toDouble
!d.isInfinity && equals(decimal(d))
}
-
+
/** Tests whether this `BigDecimal` holds the decimal representation of a `Float`. */
def isDecimalFloat = {
val f = toFloat
!f.isInfinity && equals(decimal(f))
}
-
+
/** Tests whether this `BigDecimal` holds, to within precision, the binary representation of a `Double`. */
def isBinaryDouble = {
val d = toDouble
!d.isInfinity && equals(binary(d,mc))
}
-
+
/** Tests whether this `BigDecimal` holds, to within precision, the binary representation of a `Float`. */
def isBinaryFloat = {
val f = toFloat
!f.isInfinity && equals(binary(f,mc))
}
-
+
/** Tests whether this `BigDecimal` holds the exact expansion of a `Double`'s binary fractional form into base 10. */
def isExactDouble = {
val d = toDouble
!d.isInfinity && equals(exact(d))
}
-
+
/** Tests whether this `BigDecimal` holds the exact expansion of a `Float`'s binary fractional form into base 10. */
def isExactFloat = {
val f = toFloat
!f.isInfinity && equals(exact(f.toDouble))
}
-
+
private def noArithmeticException(body: => Unit): Boolean = {
try { body ; true }
@@ -526,9 +525,9 @@ extends ScalaNumber with ScalaNumericConversions with Serializable {
}
def isWhole() = scale <= 0 || bigDecimal.stripTrailingZeros.scale <= 0
-
+
def underlying = bigDecimal
-
+
/** Compares this BigDecimal with the specified BigDecimal for equality.
*/
@@ -589,14 +588,14 @@ extends ScalaNumber with ScalaNumericConversions with Serializable {
case x if x <= 0 => this
case _ => that
}
-
+
/** Returns the maximum of this and that, or this if the two are equal
*/
def max (that: BigDecimal): BigDecimal = (this compare that) match {
case x if x >= 0 => this
case _ => that
}
-
+
/** Remainder after dividing this by that.
*/
def remainder (that: BigDecimal): BigDecimal = new BigDecimal(this.bigDecimal remainder that.bigDecimal, mc)
@@ -635,7 +634,7 @@ extends ScalaNumber with ScalaNumericConversions with Serializable {
val r = this.bigDecimal round mc
if (r eq bigDecimal) this else new BigDecimal(r, this.mc)
}
-
+
/** Returns a `BigDecimal` rounded according to its own `MathContext` */
def rounded: BigDecimal = {
val r = bigDecimal round mc
@@ -657,7 +656,7 @@ extends ScalaNumber with ScalaNumericConversions with Serializable {
/** Returns a `BigDecimal` whose scale is the specified value, and whose value is
* numerically equal to this BigDecimal's.
*/
- def setScale(scale: Int): BigDecimal =
+ def setScale(scale: Int): BigDecimal =
if (this.scale == scale) this
else new BigDecimal(this.bigDecimal setScale scale, mc)
diff --git a/src/library/scala/reflect/ScalaLongSignature.java b/src/library/scala/reflect/ScalaLongSignature.java
index 5b6d78f446..94cf504aa4 100644
--- a/src/library/scala/reflect/ScalaLongSignature.java
+++ b/src/library/scala/reflect/ScalaLongSignature.java
@@ -8,5 +8,5 @@ import java.lang.annotation.Target;
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.TYPE)
public @interface ScalaLongSignature {
- public String[] bytes();
+ String[] bytes();
}
diff --git a/src/library/scala/reflect/ScalaSignature.java b/src/library/scala/reflect/ScalaSignature.java
index a8af554d2b..217ae8e8f7 100644
--- a/src/library/scala/reflect/ScalaSignature.java
+++ b/src/library/scala/reflect/ScalaSignature.java
@@ -8,5 +8,5 @@ import java.lang.annotation.Target;
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.TYPE)
public @interface ScalaSignature {
- public String bytes();
+ String bytes();
}
diff --git a/src/library/scala/runtime/AbstractPartialFunction.scala b/src/library/scala/runtime/AbstractPartialFunction.scala
index 986cd0390f..630966d0d4 100644
--- a/src/library/scala/runtime/AbstractPartialFunction.scala
+++ b/src/library/scala/runtime/AbstractPartialFunction.scala
@@ -9,8 +9,6 @@
package scala
package runtime
-import scala.annotation.unspecialized
-
/** `AbstractPartialFunction` reformulates all operations of its supertrait `PartialFunction`
* in terms of `isDefinedAt` and `applyOrElse`.
*
diff --git a/src/library/scala/runtime/ScalaNumberProxy.scala b/src/library/scala/runtime/ScalaNumberProxy.scala
index 5e4da24c0d..9b4899aef6 100644
--- a/src/library/scala/runtime/ScalaNumberProxy.scala
+++ b/src/library/scala/runtime/ScalaNumberProxy.scala
@@ -9,8 +9,8 @@
package scala
package runtime
-import scala.collection.{ mutable, immutable }
-import scala.math.{ ScalaNumericConversions, ScalaNumericAnyConversions }
+import scala.collection.immutable
+import scala.math.ScalaNumericAnyConversions
import immutable.NumericRange
import Proxy.Typed
diff --git a/src/library/scala/runtime/ScalaRunTime.scala b/src/library/scala/runtime/ScalaRunTime.scala
index 026d5edd29..1195f95093 100644
--- a/src/library/scala/runtime/ScalaRunTime.scala
+++ b/src/library/scala/runtime/ScalaRunTime.scala
@@ -9,16 +9,14 @@
package scala
package runtime
-import scala.collection.{ Seq, IndexedSeq, TraversableView, AbstractIterator, GenIterable }
+import scala.collection.{ TraversableView, AbstractIterator, GenIterable }
import scala.collection.mutable.WrappedArray
-import scala.collection.immutable.{ StringLike, NumericRange, List, Stream, Nil, :: }
+import scala.collection.immutable.{ StringLike, NumericRange }
import scala.collection.generic.{ Sorted, IsTraversableLike }
import scala.reflect.{ ClassTag, classTag }
-import scala.util.control.ControlThrowable
import java.lang.{ Class => jClass }
-import java.lang.Double.doubleToLongBits
-import java.lang.reflect.{ Modifier, Method => JMethod }
+import java.lang.reflect.{ Method => JMethod }
/** The object ScalaRunTime provides support methods required by
* the scala runtime. All these methods should be considered
diff --git a/src/library/scala/runtime/SeqCharSequence.scala b/src/library/scala/runtime/SeqCharSequence.scala
index 74e67bb9e7..293bf950db 100644
--- a/src/library/scala/runtime/SeqCharSequence.scala
+++ b/src/library/scala/runtime/SeqCharSequence.scala
@@ -9,8 +9,6 @@
package scala
package runtime
-import java.util.Arrays.copyOfRange
-
@deprecated("Use Predef.SeqCharSequence", "2.11.0")
final class SeqCharSequence(val xs: scala.collection.IndexedSeq[Char]) extends CharSequence {
def length: Int = xs.length
diff --git a/src/library/scala/sys/SystemProperties.scala b/src/library/scala/sys/SystemProperties.scala
index 3ee4f6c708..ebe94651f9 100644
--- a/src/library/scala/sys/SystemProperties.scala
+++ b/src/library/scala/sys/SystemProperties.scala
@@ -32,7 +32,7 @@ class SystemProperties
extends mutable.AbstractMap[String, String]
with mutable.Map[String, String] {
- override def empty = new SystemProperties
+ override def empty = mutable.Map[String, String]()
override def default(key: String): String = null
def iterator: Iterator[(String, String)] = wrapAccess {
@@ -69,24 +69,24 @@ object SystemProperties {
def exclusively[T](body: => T) = this synchronized body
implicit def systemPropertiesToCompanion(p: SystemProperties): SystemProperties.type = this
- private lazy val propertyHelp = mutable.Map[String, String]()
- private def addHelp[P <: Prop[_]](p: P, helpText: String): P = {
- propertyHelp(p.key) = helpText
- p
+
+ private final val HeadlessKey = "java.awt.headless"
+ private final val PreferIPv4StackKey = "java.net.preferIPv4Stack"
+ private final val PreferIPv6AddressesKey = "java.net.preferIPv6Addresses"
+ private final val NoTraceSuppressionKey = "scala.control.noTraceSuppression"
+
+ def help(key: String): String = key match {
+ case HeadlessKey => "system should not utilize a display device"
+ case PreferIPv4StackKey => "system should prefer IPv4 sockets"
+ case PreferIPv6AddressesKey => "system should prefer IPv6 addresses"
+ case NoTraceSuppressionKey => "scala should not suppress any stack trace creation"
+ case _ => ""
}
- private def bool(key: String, helpText: String): BooleanProp = addHelp[BooleanProp](
- if (key startsWith "java.") BooleanProp.valueIsTrue(key) else BooleanProp.keyExists(key),
- helpText
- )
- def help(key: String) = propertyHelp.getOrElse(key, "")
-
- // Todo: bring some sanity to the intersection of system properties aka "mutable
- // state shared by everyone and everything" and the reality that there is no other
- // mechanism for accomplishing some things on the jvm.
- lazy val headless = bool("java.awt.headless", "system should not utilize a display device")
- lazy val preferIPv4Stack = bool("java.net.preferIPv4Stack", "system should prefer IPv4 sockets")
- lazy val preferIPv6Addresses = bool("java.net.preferIPv6Addresses", "system should prefer IPv6 addresses")
- lazy val noTraceSuppression = bool("scala.control.noTraceSuppression", "scala should not suppress any stack trace creation")
+
+ lazy val headless: BooleanProp = BooleanProp.keyExists(HeadlessKey)
+ lazy val preferIPv4Stack: BooleanProp = BooleanProp.keyExists(PreferIPv4StackKey)
+ lazy val preferIPv6Addresses: BooleanProp = BooleanProp.keyExists(PreferIPv6AddressesKey)
+ lazy val noTraceSuppression: BooleanProp = BooleanProp.valueIsTrue(NoTraceSuppressionKey)
@deprecated("Use noTraceSuppression", "2.12.0")
def noTraceSupression = noTraceSuppression
}
diff --git a/src/library/scala/sys/process/Process.scala b/src/library/scala/sys/process/Process.scala
index 06b9967908..0ec749e78a 100644
--- a/src/library/scala/sys/process/Process.scala
+++ b/src/library/scala/sys/process/Process.scala
@@ -26,11 +26,11 @@ import scala.language.implicitConversions
* make it possible for one to block until the process exits and get the exit value,
* or destroy the process altogether.
*
- * Presently, one cannot poll the `Process` to see if it has finished.
- *
* @see [[scala.sys.process.ProcessBuilder]]
*/
trait Process {
+ /** Returns this process alive status */
+ def isAlive(): Boolean
/** Blocks until this process exits and returns the exit code.*/
def exitValue(): Int
/** Destroys this process. */
diff --git a/src/library/scala/sys/process/ProcessImpl.scala b/src/library/scala/sys/process/ProcessImpl.scala
index d15f1a2b3d..559c0ee525 100644
--- a/src/library/scala/sys/process/ProcessImpl.scala
+++ b/src/library/scala/sys/process/ProcessImpl.scala
@@ -27,18 +27,18 @@ private[process] trait ProcessImpl {
}
}
private[process] object Future {
- def apply[T](f: => T): () => T = {
+ def apply[T](f: => T): (Thread, () => T) = {
val result = new SyncVar[Either[Throwable, T]]
def run(): Unit =
try result set Right(f)
catch { case e: Exception => result set Left(e) }
- Spawn(run())
+ val t = Spawn(run())
- () => result.get match {
+ (t, () => result.get match {
case Right(value) => value
case Left(exception) => throw exception
- }
+ })
}
}
@@ -84,16 +84,18 @@ private[process] trait ProcessImpl {
}
private[process] abstract class CompoundProcess extends BasicProcess {
+ def isAlive() = processThread.isAlive()
def destroy() = destroyer()
- def exitValue() = getExitValue() getOrElse scala.sys.error("No exit code: process destroyed.")
+ def exitValue() = getExitValue._2() getOrElse scala.sys.error("No exit code: process destroyed.")
def start() = getExitValue
- protected lazy val (getExitValue, destroyer) = {
+ protected lazy val (processThread, getExitValue, destroyer) = {
val code = new SyncVar[Option[Int]]()
code set None
val thread = Spawn(code set runAndExitValue())
(
+ thread,
Future { thread.join(); code.get },
() => thread.interrupt()
)
@@ -216,7 +218,8 @@ private[process] trait ProcessImpl {
* The implementation of `exitValue` waits until these threads die before returning. */
private[process] class DummyProcess(action: => Int) extends Process {
private[this] val exitCode = Future(action)
- override def exitValue() = exitCode()
+ override def isAlive() = exitCode._1.isAlive()
+ override def exitValue() = exitCode._2()
override def destroy() { }
}
/** A thin wrapper around a java.lang.Process. `outputThreads` are the Threads created to read from the
@@ -225,6 +228,7 @@ private[process] trait ProcessImpl {
* The implementation of `exitValue` interrupts `inputThread` and then waits until all I/O threads die before
* returning. */
private[process] class SimpleProcess(p: JProcess, inputThread: Thread, outputThreads: List[Thread]) extends Process {
+ override def isAlive() = p.isAlive()
override def exitValue() = {
try p.waitFor() // wait for the process to terminate
finally inputThread.interrupt() // we interrupt the input thread to notify it that it can terminate
@@ -241,6 +245,7 @@ private[process] trait ProcessImpl {
}
}
private[process] final class ThreadProcess(thread: Thread, success: SyncVar[Boolean]) extends Process {
+ override def isAlive() = thread.isAlive()
override def exitValue() = {
thread.join()
if (success.get) 0 else 1
diff --git a/src/library/scala/util/Sorting.scala b/src/library/scala/util/Sorting.scala
index b4f965f69b..3bda7c0d39 100644
--- a/src/library/scala/util/Sorting.scala
+++ b/src/library/scala/util/Sorting.scala
@@ -45,7 +45,7 @@ object Sorting {
/** Sort an array of Floats using `java.util.Arrays.sort`. */
def quickSort(a: Array[Float]): Unit = java.util.Arrays.sort(a)
-
+
private final val qsortThreshold = 16
/** Sort array `a` with quicksort, using the Ordering on its elements.
@@ -57,9 +57,9 @@ object Sorting {
def inner(a: Array[K], i0: Int, iN: Int, ord: Ordering[K]): Unit = {
if (iN - i0 < qsortThreshold) insertionSort(a, i0, iN, ord)
else {
- var iK = (i0 + iN) >>> 1 // Unsigned div by 2
+ val iK = (i0 + iN) >>> 1 // Unsigned div by 2
// Find index of median of first, central, and last elements
- var pL =
+ var pL =
if (ord.compare(a(i0), a(iN - 1)) <= 0)
if (ord.compare(a(i0), a(iK)) < 0)
if (ord.compare(a(iN - 1), a(iK)) < 0) iN - 1 else iK
@@ -140,9 +140,9 @@ object Sorting {
}
inner(a, 0, a.length, implicitly[Ordering[K]])
}
-
+
private final val mergeThreshold = 32
-
+
// Ordering[T] might be slow especially for boxed primitives, so use binary search variant of insertion sort
// Caller must pass iN >= i0 or math will fail. Also, i0 >= 0.
private def insertionSort[@specialized T](a: Array[T], i0: Int, iN: Int, ord: Ordering[T]): Unit = {
@@ -176,7 +176,7 @@ object Sorting {
m += 1
}
}
-
+
// Caller is required to pass iN >= i0, else math will fail. Also, i0 >= 0.
private def mergeSort[@specialized T: ClassTag](a: Array[T], i0: Int, iN: Int, ord: Ordering[T], scratch: Array[T] = null): Unit = {
if (iN - i0 < mergeThreshold) insertionSort(a, i0, iN, ord)
@@ -188,7 +188,7 @@ object Sorting {
mergeSorted(a, i0, iK, iN, ord, sc)
}
}
-
+
// Must have 0 <= i0 < iK < iN
private def mergeSorted[@specialized T](a: Array[T], i0: Int, iK: Int, iN: Int, ord: Ordering[T], scratch: Array[T]): Unit = {
// Check to make sure we're not already in order
@@ -212,7 +212,7 @@ object Sorting {
// Don't need to finish a(i) because it's already in place, k = i
}
}
-
+
// Why would you even do this?
private def booleanSort(a: Array[Boolean]): Unit = {
var i = 0
@@ -235,7 +235,7 @@ object Sorting {
// TODO: add upper bound: T <: AnyRef, propagate to callers below (not binary compatible)
// Maybe also rename all these methods to `sort`.
@inline private def sort[T](a: Array[T], ord: Ordering[T]): Unit = a match {
- case _: Array[AnyRef] =>
+ case _: Array[AnyRef] =>
// Note that runtime matches are covariant, so could actually be any Array[T] s.t. T is not primitive (even boxed value classes)
if (a.length > 1 && (ord eq null)) throw new NullPointerException("Ordering")
java.util.Arrays.sort(a, ord)
diff --git a/src/library/scala/util/Try.scala b/src/library/scala/util/Try.scala
index b1b9965614..84d170249d 100644
--- a/src/library/scala/util/Try.scala
+++ b/src/library/scala/util/Try.scala
@@ -9,7 +9,6 @@
package scala
package util
-import scala.collection.Seq
import scala.util.control.NonFatal
import scala.language.implicitConversions
@@ -218,12 +217,12 @@ final case class Failure[+T](exception: Throwable) extends Try[T] {
override def isSuccess: Boolean = false
override def get: T = throw exception
override def getOrElse[U >: T](default: => U): U = default
- override def orElse[U >: T](default: => Try[U]): Try[U] =
+ override def orElse[U >: T](default: => Try[U]): Try[U] =
try default catch { case NonFatal(e) => Failure(e) }
override def flatMap[U](f: T => Try[U]): Try[U] = this.asInstanceOf[Try[U]]
override def flatten[U](implicit ev: T <:< Try[U]): Try[U] = this.asInstanceOf[Try[U]]
override def foreach[U](f: T => U): Unit = ()
- override def transform[U](s: T => Try[U], f: Throwable => Try[U]): Try[U] =
+ override def transform[U](s: T => Try[U], f: Throwable => Try[U]): Try[U] =
try f(exception) catch { case NonFatal(e) => Failure(e) }
override def map[U](f: T => U): Try[U] = this.asInstanceOf[Try[U]]
override def collect[U](pf: PartialFunction[T, U]): Try[U] = this.asInstanceOf[Try[U]]
diff --git a/src/library/scala/util/control/Exception.scala b/src/library/scala/util/control/Exception.scala
index 24c297a2fc..702d3b5e5e 100644
--- a/src/library/scala/util/control/Exception.scala
+++ b/src/library/scala/util/control/Exception.scala
@@ -10,9 +10,7 @@ package scala
package util
package control
-import scala.collection.immutable.List
import scala.reflect.{ ClassTag, classTag }
-import java.lang.reflect.InvocationTargetException
import scala.language.implicitConversions
diff --git a/src/library/scala/util/control/TailCalls.scala b/src/library/scala/util/control/TailCalls.scala
index 953d5b407e..c7fefb1eba 100644
--- a/src/library/scala/util/control/TailCalls.scala
+++ b/src/library/scala/util/control/TailCalls.scala
@@ -55,7 +55,7 @@ object TailCalls {
case Done(a) => Call(() => f(a))
case c@Call(_) => Cont(c, f)
// Take advantage of the monad associative law to optimize the size of the required stack
- case c: Cont[a1, b1] => Cont(c.a, (x: a1) => c f x flatMap f)
+ case c: Cont[a1, b1] => Cont(c.a, (x: a1) => c.f(x) flatMap f)
}
/** Returns either the next step of the tailcalling computation,