summaryrefslogtreecommitdiff
path: root/src/library/scala
diff options
context:
space:
mode:
authorPaul Phillips <paulp@improving.org>2012-02-29 11:13:44 -0800
committerPaul Phillips <paulp@improving.org>2012-02-29 11:13:44 -0800
commita183c6ad31011b4fb1785655dd3d671b8f5bb519 (patch)
treebbedd1f23b437d36e4796f1ca7ec40dcc96c18e1 /src/library/scala
parentfc2866efee1bcf17aee18d427ed41e172f440f62 (diff)
downloadscala-a183c6ad31011b4fb1785655dd3d671b8f5bb519.tar.gz
scala-a183c6ad31011b4fb1785655dd3d671b8f5bb519.tar.bz2
scala-a183c6ad31011b4fb1785655dd3d671b8f5bb519.zip
Whitespace commit.
Removed all the trailing whitespace to make eugene happier. Will try to keep it that way by protecting at the merge level. Left the tabs in place because they can't be uniformly changed to spaces, some are 2, some are 4, some are 8, whee.
Diffstat (limited to 'src/library/scala')
-rw-r--r--src/library/scala/Enumeration.scala6
-rw-r--r--src/library/scala/Function0.scala8
-rw-r--r--src/library/scala/Function1.scala8
-rw-r--r--src/library/scala/Function2.scala6
-rw-r--r--src/library/scala/PartialFunction.scala10
-rw-r--r--src/library/scala/Product1.scala4
-rw-r--r--src/library/scala/Product10.scala4
-rw-r--r--src/library/scala/Product11.scala4
-rw-r--r--src/library/scala/Product12.scala4
-rw-r--r--src/library/scala/Product13.scala4
-rw-r--r--src/library/scala/Product14.scala4
-rw-r--r--src/library/scala/Product15.scala4
-rw-r--r--src/library/scala/Product16.scala4
-rw-r--r--src/library/scala/Product17.scala4
-rw-r--r--src/library/scala/Product18.scala4
-rw-r--r--src/library/scala/Product19.scala4
-rw-r--r--src/library/scala/Product2.scala4
-rw-r--r--src/library/scala/Product20.scala4
-rw-r--r--src/library/scala/Product21.scala4
-rw-r--r--src/library/scala/Product22.scala4
-rw-r--r--src/library/scala/Product3.scala4
-rw-r--r--src/library/scala/Product4.scala4
-rw-r--r--src/library/scala/Product5.scala4
-rw-r--r--src/library/scala/Product6.scala4
-rw-r--r--src/library/scala/Product7.scala4
-rw-r--r--src/library/scala/Product8.scala4
-rw-r--r--src/library/scala/Product9.scala4
-rw-r--r--src/library/scala/Specializable.scala2
-rw-r--r--src/library/scala/StringContext.scala16
-rw-r--r--src/library/scala/Tuple1.scala2
-rw-r--r--src/library/scala/Tuple10.scala2
-rw-r--r--src/library/scala/Tuple11.scala2
-rw-r--r--src/library/scala/Tuple12.scala2
-rw-r--r--src/library/scala/Tuple13.scala2
-rw-r--r--src/library/scala/Tuple14.scala2
-rw-r--r--src/library/scala/Tuple15.scala2
-rw-r--r--src/library/scala/Tuple16.scala2
-rw-r--r--src/library/scala/Tuple17.scala2
-rw-r--r--src/library/scala/Tuple18.scala2
-rw-r--r--src/library/scala/Tuple19.scala2
-rw-r--r--src/library/scala/Tuple2.scala2
-rw-r--r--src/library/scala/Tuple20.scala2
-rw-r--r--src/library/scala/Tuple21.scala2
-rw-r--r--src/library/scala/Tuple22.scala2
-rw-r--r--src/library/scala/Tuple3.scala2
-rw-r--r--src/library/scala/Tuple4.scala2
-rw-r--r--src/library/scala/Tuple5.scala2
-rw-r--r--src/library/scala/Tuple6.scala2
-rw-r--r--src/library/scala/Tuple7.scala2
-rw-r--r--src/library/scala/Tuple8.scala2
-rw-r--r--src/library/scala/Tuple9.scala2
-rw-r--r--src/library/scala/annotation/elidable.scala4
-rw-r--r--src/library/scala/collection/SeqLike.scala2
-rw-r--r--src/library/scala/collection/generic/MutableSortedSetFactory.scala6
-rw-r--r--src/library/scala/collection/immutable/BitSet.scala2
-rw-r--r--src/library/scala/collection/immutable/List.scala10
-rw-r--r--src/library/scala/collection/immutable/Range.scala13
-rw-r--r--src/library/scala/collection/mutable/AVLTree.scala26
-rw-r--r--src/library/scala/collection/mutable/Ctrie.scala248
-rw-r--r--src/library/scala/collection/mutable/FlatHashTable.scala40
-rw-r--r--src/library/scala/collection/mutable/HashTable.scala10
-rw-r--r--src/library/scala/collection/mutable/ListBuffer.scala16
-rw-r--r--src/library/scala/collection/mutable/SortedSet.scala10
-rw-r--r--src/library/scala/collection/mutable/TreeSet.scala14
-rw-r--r--src/library/scala/collection/parallel/Combiner.scala6
-rw-r--r--src/library/scala/collection/parallel/ParIterableLike.scala50
-rw-r--r--src/library/scala/collection/parallel/ParSeqLike.scala4
-rw-r--r--src/library/scala/collection/parallel/RemainsIterator.scala16
-rw-r--r--src/library/scala/collection/parallel/Tasks.scala10
-rw-r--r--src/library/scala/collection/parallel/mutable/ParCtrie.scala78
-rw-r--r--src/library/scala/collection/parallel/mutable/ParHashSet.scala2
-rw-r--r--src/library/scala/collection/parallel/package.scala14
-rw-r--r--src/library/scala/concurrent/Channel.scala6
-rw-r--r--src/library/scala/concurrent/ConcurrentPackageObject.scala30
-rw-r--r--src/library/scala/concurrent/DelayedLazyVal.scala8
-rw-r--r--src/library/scala/concurrent/ExecutionContext.scala56
-rw-r--r--src/library/scala/concurrent/Future.scala194
-rw-r--r--src/library/scala/concurrent/JavaConversions.scala6
-rw-r--r--src/library/scala/concurrent/Promise.scala64
-rw-r--r--src/library/scala/concurrent/Task.scala6
-rw-r--r--src/library/scala/concurrent/impl/ExecutionContextImpl.scala34
-rw-r--r--src/library/scala/concurrent/impl/Future.scala20
-rw-r--r--src/library/scala/concurrent/impl/Promise.scala74
-rw-r--r--src/library/scala/concurrent/package.scala14
-rw-r--r--src/library/scala/reflect/ReflectionUtils.scala4
-rw-r--r--src/library/scala/reflect/api/Mirror.scala4
-rw-r--r--src/library/scala/reflect/api/Modifier.scala2
-rwxr-xr-xsrc/library/scala/reflect/api/Names.scala4
-rwxr-xr-xsrc/library/scala/reflect/api/Symbols.scala10
-rw-r--r--src/library/scala/reflect/api/TreePrinters.scala8
-rwxr-xr-xsrc/library/scala/reflect/api/Types.scala2
-rw-r--r--src/library/scala/reflect/macro/Context.scala4
-rw-r--r--src/library/scala/specialized.scala2
-rw-r--r--src/library/scala/sys/process/BasicIO.scala2
-rw-r--r--src/library/scala/util/Properties.scala2
-rw-r--r--src/library/scala/util/Try.scala32
-rw-r--r--src/library/scala/util/parsing/combinator/Parsers.scala2
97 files changed, 679 insertions, 680 deletions
diff --git a/src/library/scala/Enumeration.scala b/src/library/scala/Enumeration.scala
index 3d85f2f52f..80571943e5 100644
--- a/src/library/scala/Enumeration.scala
+++ b/src/library/scala/Enumeration.scala
@@ -55,7 +55,7 @@ abstract class Enumeration (initial: Int) extends Serializable {
thisenum =>
def this() = this(0)
-
+
@deprecated("Names should be specified individually or discovered via reflection", "2.10.0")
def this(initial: Int, names: String*) = {
this(initial)
@@ -201,7 +201,7 @@ abstract class Enumeration (initial: Int) extends Serializable {
case _ => false
}
override def hashCode: Int = id.##
-
+
/** Create a ValueSet which contains this value and another one */
def + (v: Value) = ValueSet(this, v)
}
@@ -266,7 +266,7 @@ abstract class Enumeration (initial: Int) extends Serializable {
* new array of longs */
def toBitMask: Array[Long] = nnIds.toBitMask
}
-
+
/** A factory object for value sets */
object ValueSet {
import generic.CanBuildFrom
diff --git a/src/library/scala/Function0.scala b/src/library/scala/Function0.scala
index 508ef25e81..dceed26439 100644
--- a/src/library/scala/Function0.scala
+++ b/src/library/scala/Function0.scala
@@ -12,12 +12,12 @@ package scala
/** A function of 0 parameters.
- *
+ *
* In the following example, the definition of javaVersion is a
* shorthand for the anonymous class definition anonfun0:
*
* {{{
- * object Main extends App {
+ * object Main extends App {
* val javaVersion = () => sys.props("java.version")
*
* val anonfun0 = new Function0[String] {
@@ -31,13 +31,13 @@ package scala
* be suggested by the existence of [[scala.PartialFunction]]. The only
* distinction between `Function1` and `PartialFunction` is that the
* latter can specify inputs which it will not handle.
-
+
*/
trait Function0[@specialized +R] extends AnyRef { self =>
/** Apply the body of this function to the arguments.
* @return the result of function application.
*/
def apply(): R
-
+
override def toString() = "<function0>"
}
diff --git a/src/library/scala/Function1.scala b/src/library/scala/Function1.scala
index 06936e54cb..8995ef912b 100644
--- a/src/library/scala/Function1.scala
+++ b/src/library/scala/Function1.scala
@@ -11,12 +11,12 @@ package scala
/** A function of 1 parameter.
- *
+ *
* In the following example, the definition of succ is a
* shorthand for the anonymous class definition anonfun1:
*
* {{{
- * object Main extends App {
+ * object Main extends App {
* val succ = (x: Int) => x + 1
* val anonfun1 = new Function1[Int, Int] {
* def apply(x: Int): Int = x + 1
@@ -29,7 +29,7 @@ package scala
* be suggested by the existence of [[scala.PartialFunction]]. The only
* distinction between `Function1` and `PartialFunction` is that the
* latter can specify inputs which it will not handle.
-
+
*/
@annotation.implicitNotFound(msg = "No implicit view available from ${T1} => ${R}.")
trait Function1[@specialized(scala.Int, scala.Long, scala.Float, scala.Double, scala.AnyRef) -T1, @specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double, scala.AnyRef) +R] extends AnyRef { self =>
@@ -37,7 +37,7 @@ trait Function1[@specialized(scala.Int, scala.Long, scala.Float, scala.Double, s
* @return the result of function application.
*/
def apply(v1: T1): R
-
+
/** Composes two instances of Function1 in a new Function1, with this function applied last.
*
* @tparam A the type to which function `g` can be applied
diff --git a/src/library/scala/Function2.scala b/src/library/scala/Function2.scala
index 1812f042e0..cacb96ef5d 100644
--- a/src/library/scala/Function2.scala
+++ b/src/library/scala/Function2.scala
@@ -11,12 +11,12 @@ package scala
/** A function of 2 parameters.
- *
+ *
* In the following example, the definition of max is a
* shorthand for the anonymous class definition anonfun2:
*
* {{{
- * object Main extends App {
+ * object Main extends App {
* val max = (x: Int, y: Int) => if (x < y) y else x
*
* val anonfun2 = new Function2[Int, Int, Int] {
@@ -30,7 +30,7 @@ package scala
* be suggested by the existence of [[scala.PartialFunction]]. The only
* distinction between `Function1` and `PartialFunction` is that the
* latter can specify inputs which it will not handle.
-
+
*/
trait Function2[@specialized(scala.Int, scala.Long, scala.Double) -T1, @specialized(scala.Int, scala.Long, scala.Double) -T2, @specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double) +R] extends AnyRef { self =>
/** Apply the body of this function to the arguments.
diff --git a/src/library/scala/PartialFunction.scala b/src/library/scala/PartialFunction.scala
index 70caff0221..3c5d6d0d23 100644
--- a/src/library/scala/PartialFunction.scala
+++ b/src/library/scala/PartialFunction.scala
@@ -26,18 +26,18 @@ package scala
*
* {{{
* val sample = 1 to 10
- * val isEven: PartialFunction[Int, String] = {
- * case x if x % 2 == 0 => x+" is even"
+ * val isEven: PartialFunction[Int, String] = {
+ * case x if x % 2 == 0 => x+" is even"
* }
*
* // the method collect can use isDefinedAt to select which members to collect
* val evenNumbers = sample collect isEven
*
- * val isOdd: PartialFunction[Int, String] = {
- * case x if x % 2 == 1 => x+" is odd"
+ * val isOdd: PartialFunction[Int, String] = {
+ * case x if x % 2 == 1 => x+" is odd"
* }
*
- * // the method orElse allows chaining another partial function to handle
+ * // the method orElse allows chaining another partial function to handle
* // input outside the declared domain
* val numbers = sample map (isEven orElse isOdd)
* }}}
diff --git a/src/library/scala/Product1.scala b/src/library/scala/Product1.scala
index 0106ad34ee..ab8b0a4505 100644
--- a/src/library/scala/Product1.scala
+++ b/src/library/scala/Product1.scala
@@ -23,7 +23,7 @@ trait Product1[@specialized(Int, Long, Double) +T1] extends Product {
*/
override def productArity = 1
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
@@ -33,7 +33,7 @@ trait Product1[@specialized(Int, Long, Double) +T1] extends Product {
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case _ => throw new IndexOutOfBoundsException(n.toString())
}
diff --git a/src/library/scala/Product10.scala b/src/library/scala/Product10.scala
index ca53b580c0..536fb2fed9 100644
--- a/src/library/scala/Product10.scala
+++ b/src/library/scala/Product10.scala
@@ -23,7 +23,7 @@ trait Product10[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10] extends Produ
*/
override def productArity = 10
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
@@ -33,7 +33,7 @@ trait Product10[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10] extends Produ
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case 2 => _3
diff --git a/src/library/scala/Product11.scala b/src/library/scala/Product11.scala
index 3d5942f3fa..7d49eccc5e 100644
--- a/src/library/scala/Product11.scala
+++ b/src/library/scala/Product11.scala
@@ -23,7 +23,7 @@ trait Product11[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11] extends
*/
override def productArity = 11
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
@@ -33,7 +33,7 @@ trait Product11[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11] extends
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case 2 => _3
diff --git a/src/library/scala/Product12.scala b/src/library/scala/Product12.scala
index 803193793c..0e9c4a01a2 100644
--- a/src/library/scala/Product12.scala
+++ b/src/library/scala/Product12.scala
@@ -23,7 +23,7 @@ trait Product12[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12] e
*/
override def productArity = 12
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
@@ -33,7 +33,7 @@ trait Product12[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12] e
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case 2 => _3
diff --git a/src/library/scala/Product13.scala b/src/library/scala/Product13.scala
index 0c1d889624..a0629201d0 100644
--- a/src/library/scala/Product13.scala
+++ b/src/library/scala/Product13.scala
@@ -23,7 +23,7 @@ trait Product13[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +
*/
override def productArity = 13
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
@@ -33,7 +33,7 @@ trait Product13[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case 2 => _3
diff --git a/src/library/scala/Product14.scala b/src/library/scala/Product14.scala
index 0222309a0a..32dda81c3e 100644
--- a/src/library/scala/Product14.scala
+++ b/src/library/scala/Product14.scala
@@ -23,7 +23,7 @@ trait Product14[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +
*/
override def productArity = 14
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
@@ -33,7 +33,7 @@ trait Product14[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case 2 => _3
diff --git a/src/library/scala/Product15.scala b/src/library/scala/Product15.scala
index 41be7ec504..57851f9870 100644
--- a/src/library/scala/Product15.scala
+++ b/src/library/scala/Product15.scala
@@ -23,7 +23,7 @@ trait Product15[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +
*/
override def productArity = 15
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
@@ -33,7 +33,7 @@ trait Product15[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case 2 => _3
diff --git a/src/library/scala/Product16.scala b/src/library/scala/Product16.scala
index accee3f965..75076f3b3c 100644
--- a/src/library/scala/Product16.scala
+++ b/src/library/scala/Product16.scala
@@ -23,7 +23,7 @@ trait Product16[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +
*/
override def productArity = 16
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
@@ -33,7 +33,7 @@ trait Product16[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case 2 => _3
diff --git a/src/library/scala/Product17.scala b/src/library/scala/Product17.scala
index da80ae9a6b..9ee6072ffe 100644
--- a/src/library/scala/Product17.scala
+++ b/src/library/scala/Product17.scala
@@ -23,7 +23,7 @@ trait Product17[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +
*/
override def productArity = 17
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
@@ -33,7 +33,7 @@ trait Product17[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case 2 => _3
diff --git a/src/library/scala/Product18.scala b/src/library/scala/Product18.scala
index ea25647762..25d0839af1 100644
--- a/src/library/scala/Product18.scala
+++ b/src/library/scala/Product18.scala
@@ -23,7 +23,7 @@ trait Product18[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +
*/
override def productArity = 18
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
@@ -33,7 +33,7 @@ trait Product18[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case 2 => _3
diff --git a/src/library/scala/Product19.scala b/src/library/scala/Product19.scala
index 5d4347c1a8..5464de7264 100644
--- a/src/library/scala/Product19.scala
+++ b/src/library/scala/Product19.scala
@@ -23,7 +23,7 @@ trait Product19[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +
*/
override def productArity = 19
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
@@ -33,7 +33,7 @@ trait Product19[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case 2 => _3
diff --git a/src/library/scala/Product2.scala b/src/library/scala/Product2.scala
index 4e6c70f463..8097245926 100644
--- a/src/library/scala/Product2.scala
+++ b/src/library/scala/Product2.scala
@@ -23,7 +23,7 @@ trait Product2[@specialized(Int, Long, Double) +T1, @specialized(Int, Long, Doub
*/
override def productArity = 2
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
@@ -33,7 +33,7 @@ trait Product2[@specialized(Int, Long, Double) +T1, @specialized(Int, Long, Doub
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case _ => throw new IndexOutOfBoundsException(n.toString())
diff --git a/src/library/scala/Product20.scala b/src/library/scala/Product20.scala
index f23a0dee3a..b094e09aca 100644
--- a/src/library/scala/Product20.scala
+++ b/src/library/scala/Product20.scala
@@ -23,7 +23,7 @@ trait Product20[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +
*/
override def productArity = 20
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
@@ -33,7 +33,7 @@ trait Product20[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case 2 => _3
diff --git a/src/library/scala/Product21.scala b/src/library/scala/Product21.scala
index 4a4fe0697f..fa06cfb438 100644
--- a/src/library/scala/Product21.scala
+++ b/src/library/scala/Product21.scala
@@ -23,7 +23,7 @@ trait Product21[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +
*/
override def productArity = 21
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
@@ -33,7 +33,7 @@ trait Product21[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case 2 => _3
diff --git a/src/library/scala/Product22.scala b/src/library/scala/Product22.scala
index 7ee01b85ae..46038bf1a2 100644
--- a/src/library/scala/Product22.scala
+++ b/src/library/scala/Product22.scala
@@ -23,7 +23,7 @@ trait Product22[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +
*/
override def productArity = 22
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
@@ -33,7 +33,7 @@ trait Product22[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case 2 => _3
diff --git a/src/library/scala/Product3.scala b/src/library/scala/Product3.scala
index 23563c9e23..3a4cd8fc5e 100644
--- a/src/library/scala/Product3.scala
+++ b/src/library/scala/Product3.scala
@@ -23,7 +23,7 @@ trait Product3[+T1, +T2, +T3] extends Product {
*/
override def productArity = 3
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
@@ -33,7 +33,7 @@ trait Product3[+T1, +T2, +T3] extends Product {
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case 2 => _3
diff --git a/src/library/scala/Product4.scala b/src/library/scala/Product4.scala
index 4abaa9051b..a4d47457fa 100644
--- a/src/library/scala/Product4.scala
+++ b/src/library/scala/Product4.scala
@@ -23,7 +23,7 @@ trait Product4[+T1, +T2, +T3, +T4] extends Product {
*/
override def productArity = 4
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
@@ -33,7 +33,7 @@ trait Product4[+T1, +T2, +T3, +T4] extends Product {
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case 2 => _3
diff --git a/src/library/scala/Product5.scala b/src/library/scala/Product5.scala
index 9aa4af58b7..9f25e70af0 100644
--- a/src/library/scala/Product5.scala
+++ b/src/library/scala/Product5.scala
@@ -23,7 +23,7 @@ trait Product5[+T1, +T2, +T3, +T4, +T5] extends Product {
*/
override def productArity = 5
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
@@ -33,7 +33,7 @@ trait Product5[+T1, +T2, +T3, +T4, +T5] extends Product {
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case 2 => _3
diff --git a/src/library/scala/Product6.scala b/src/library/scala/Product6.scala
index 2ca1d7c31e..87fd318c68 100644
--- a/src/library/scala/Product6.scala
+++ b/src/library/scala/Product6.scala
@@ -23,7 +23,7 @@ trait Product6[+T1, +T2, +T3, +T4, +T5, +T6] extends Product {
*/
override def productArity = 6
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
@@ -33,7 +33,7 @@ trait Product6[+T1, +T2, +T3, +T4, +T5, +T6] extends Product {
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case 2 => _3
diff --git a/src/library/scala/Product7.scala b/src/library/scala/Product7.scala
index b7af2d3e32..d074503315 100644
--- a/src/library/scala/Product7.scala
+++ b/src/library/scala/Product7.scala
@@ -23,7 +23,7 @@ trait Product7[+T1, +T2, +T3, +T4, +T5, +T6, +T7] extends Product {
*/
override def productArity = 7
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
@@ -33,7 +33,7 @@ trait Product7[+T1, +T2, +T3, +T4, +T5, +T6, +T7] extends Product {
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case 2 => _3
diff --git a/src/library/scala/Product8.scala b/src/library/scala/Product8.scala
index 17b5e48512..bd6150c235 100644
--- a/src/library/scala/Product8.scala
+++ b/src/library/scala/Product8.scala
@@ -23,7 +23,7 @@ trait Product8[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8] extends Product {
*/
override def productArity = 8
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
@@ -33,7 +33,7 @@ trait Product8[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8] extends Product {
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case 2 => _3
diff --git a/src/library/scala/Product9.scala b/src/library/scala/Product9.scala
index 784e9a7029..1f042944cc 100644
--- a/src/library/scala/Product9.scala
+++ b/src/library/scala/Product9.scala
@@ -23,7 +23,7 @@ trait Product9[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9] extends Product {
*/
override def productArity = 9
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
@@ -33,7 +33,7 @@ trait Product9[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9] extends Product {
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case 2 => _3
diff --git a/src/library/scala/Specializable.scala b/src/library/scala/Specializable.scala
index 811a735110..67126b3069 100644
--- a/src/library/scala/Specializable.scala
+++ b/src/library/scala/Specializable.scala
@@ -16,7 +16,7 @@ trait Specializable extends SpecializableCompanion
object Specializable {
// No type parameter in @specialized annotation.
trait SpecializedGroup { }
-
+
// Smuggle a list of types by way of a tuple upon which Group is parameterized.
class Group[T >: Null](value: T) extends SpecializedGroup { }
diff --git a/src/library/scala/StringContext.scala b/src/library/scala/StringContext.scala
index 6116547aa2..8ca312afc5 100644
--- a/src/library/scala/StringContext.scala
+++ b/src/library/scala/StringContext.scala
@@ -13,7 +13,7 @@ import collection.mutable.ArrayBuffer
/** A class to support string interpolation.
* This class supports string interpolation as outlined in Scala SIP-11.
* It needs to be fully documented once the SIP is accepted.
- *
+ *
* @param parts The parts that make up the interpolated string,
* without the expressions that get inserted by interpolation.
*/
@@ -26,13 +26,13 @@ case class StringContext(parts: String*) {
* @param `args` The arguments to be checked.
* @throws An `IllegalArgumentException` if this is not the case.
*/
- def checkLengths(args: Any*): Unit =
+ def checkLengths(args: Any*): Unit =
if (parts.length != args.length + 1)
throw new IllegalArgumentException("wrong number of arguments for interpolated string")
/** The simple string interpolator.
- *
+ *
* It inserts its arguments between corresponding parts of the string context.
* It also treats standard escape sequences as defined in the Scala specification.
* @param `args` The arguments to be inserted into the resulting string.
@@ -55,21 +55,21 @@ case class StringContext(parts: String*) {
}
/** The formatted string interpolator.
- *
+ *
* It inserts its arguments between corresponding parts of the string context.
* It also treats standard escape sequences as defined in the Scala specification.
* Finally, if an interpolated expression is followed by a `parts` string
* that starts with a formatting specifier, the expression is formatted according to that
* specifier. All specifiers allowed in Java format strings are handled, and in the same
* way they are treated in Java.
- *
+ *
* @param `args` The arguments to be inserted into the resulting string.
* @throws An `IllegalArgumentException`
* if the number of `parts` in the enclosing `StringContext` does not exceed
* the number of arguments `arg` by exactly 1.
* @throws A `StringContext.InvalidEscapeException` if a `parts` string contains a backslash (`\`) character
* that does not start a valid escape sequence.
- *
+ *
* Note: The `f` method works by assembling a format string from all the `parts` strings and using
* `java.lang.String.format` to format all arguments with that format string. The format string is
* obtained by concatenating all `parts` strings, and performing two transformations:
@@ -125,14 +125,14 @@ object StringContext {
* @param idx The index of the offending backslash character in `str`.
*/
class InvalidEscapeException(str: String, idx: Int)
- extends IllegalArgumentException("invalid escape character at index "+idx+" in \""+str+"\"")
+ extends IllegalArgumentException("invalid escape character at index "+idx+" in \""+str+"\"")
/** Expands standard Scala escape sequences in a string.
* Escape sequences are:
* control: `\b`, `\t`, `\n`, `\f`, `\r`
* escape: `\\`, `\"`, `\'`
* octal: `\d` `\dd` `\ddd` where `d` is an octal digit between `0` and `7`.
- *
+ *
* @param A string that may contain escape sequences
* @return The string with all escape sequences expanded.
*/
diff --git a/src/library/scala/Tuple1.scala b/src/library/scala/Tuple1.scala
index 02fdd0cba5..6d31d35e51 100644
--- a/src/library/scala/Tuple1.scala
+++ b/src/library/scala/Tuple1.scala
@@ -19,5 +19,5 @@ case class Tuple1[@specialized(Int, Long, Double) +T1](_1: T1)
extends Product1[T1]
{
override def toString() = "(" + _1 + ")"
-
+
}
diff --git a/src/library/scala/Tuple10.scala b/src/library/scala/Tuple10.scala
index ba2a02a8b2..10d554d467 100644
--- a/src/library/scala/Tuple10.scala
+++ b/src/library/scala/Tuple10.scala
@@ -28,5 +28,5 @@ case class Tuple10[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10](_1: T1, _2
extends Product10[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10]
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 + "," + _10 + ")"
-
+
}
diff --git a/src/library/scala/Tuple11.scala b/src/library/scala/Tuple11.scala
index 7f51d172d4..2065e4f017 100644
--- a/src/library/scala/Tuple11.scala
+++ b/src/library/scala/Tuple11.scala
@@ -29,5 +29,5 @@ case class Tuple11[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11](_1:
extends Product11[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11]
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 + "," + _10 + "," + _11 + ")"
-
+
}
diff --git a/src/library/scala/Tuple12.scala b/src/library/scala/Tuple12.scala
index 4bbc6a0eab..a463986752 100644
--- a/src/library/scala/Tuple12.scala
+++ b/src/library/scala/Tuple12.scala
@@ -31,5 +31,5 @@ case class Tuple12[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 +
"," + _7 + "," + _8 + "," + _9 + "," + _10 + "," + _11 + "," + _12 + ")"
-
+
}
diff --git a/src/library/scala/Tuple13.scala b/src/library/scala/Tuple13.scala
index 77bd59bf2e..2bee0d69ad 100644
--- a/src/library/scala/Tuple13.scala
+++ b/src/library/scala/Tuple13.scala
@@ -32,5 +32,5 @@ case class Tuple13[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 +
"," + _7 + "," + _8 + "," + _9 + "," + _10 + "," + _11 + "," + _12 + "," + _13 + ")"
-
+
}
diff --git a/src/library/scala/Tuple14.scala b/src/library/scala/Tuple14.scala
index bf7a4ce016..60f7c51e64 100644
--- a/src/library/scala/Tuple14.scala
+++ b/src/library/scala/Tuple14.scala
@@ -33,5 +33,5 @@ case class Tuple14[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 +
"," + _8 + "," + _9 + "," + _10 + "," + _11 + "," + _12 + "," + _13 + "," + _14 + ")"
-
+
}
diff --git a/src/library/scala/Tuple15.scala b/src/library/scala/Tuple15.scala
index 582c359bc6..fc8e30580b 100644
--- a/src/library/scala/Tuple15.scala
+++ b/src/library/scala/Tuple15.scala
@@ -34,5 +34,5 @@ case class Tuple15[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 +
"," + _8 + "," + _9 + "," + _10 + "," + _11 + "," + _12 + "," + _13 + "," + _14 + "," + _15 + ")"
-
+
}
diff --git a/src/library/scala/Tuple16.scala b/src/library/scala/Tuple16.scala
index a1e9a790ff..80181f6648 100644
--- a/src/library/scala/Tuple16.scala
+++ b/src/library/scala/Tuple16.scala
@@ -35,5 +35,5 @@ case class Tuple16[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 +
"," + _9 + "," + _10 + "," + _11 + "," + _12 + "," + _13 + "," + _14 + "," + _15 + "," + _16 + ")"
-
+
}
diff --git a/src/library/scala/Tuple17.scala b/src/library/scala/Tuple17.scala
index f531766c18..6236122be2 100644
--- a/src/library/scala/Tuple17.scala
+++ b/src/library/scala/Tuple17.scala
@@ -36,5 +36,5 @@ case class Tuple17[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 +
"," + _9 + "," + _10 + "," + _11 + "," + _12 + "," + _13 + "," + _14 + "," + _15 + "," + _16 + "," + _17 + ")"
-
+
}
diff --git a/src/library/scala/Tuple18.scala b/src/library/scala/Tuple18.scala
index a96db25e4b..dd6a819ac5 100644
--- a/src/library/scala/Tuple18.scala
+++ b/src/library/scala/Tuple18.scala
@@ -37,5 +37,5 @@ case class Tuple18[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 +
"," + _10 + "," + _11 + "," + _12 + "," + _13 + "," + _14 + "," + _15 + "," + _16 + "," + _17 + "," + _18 + ")"
-
+
}
diff --git a/src/library/scala/Tuple19.scala b/src/library/scala/Tuple19.scala
index 718280d68a..65f0fd22cf 100644
--- a/src/library/scala/Tuple19.scala
+++ b/src/library/scala/Tuple19.scala
@@ -38,5 +38,5 @@ case class Tuple19[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 +
"," + _10 + "," + _11 + "," + _12 + "," + _13 + "," + _14 + "," + _15 + "," + _16 + "," + _17 + "," + _18 + "," + _19 + ")"
-
+
}
diff --git a/src/library/scala/Tuple2.scala b/src/library/scala/Tuple2.scala
index b1befca4fa..684d2266e8 100644
--- a/src/library/scala/Tuple2.scala
+++ b/src/library/scala/Tuple2.scala
@@ -23,7 +23,7 @@ case class Tuple2[@specialized(Int, Long, Double, Char, Boolean, AnyRef) +T1, @s
extends Product2[T1, T2]
{
override def toString() = "(" + _1 + "," + _2 + ")"
-
+
/** Swaps the elements of this `Tuple`.
* @return a new Tuple where the first element is the second element of this Tuple and the
* second element is the first element of this Tuple.
diff --git a/src/library/scala/Tuple20.scala b/src/library/scala/Tuple20.scala
index 4a44c0bb89..cf3626909d 100644
--- a/src/library/scala/Tuple20.scala
+++ b/src/library/scala/Tuple20.scala
@@ -39,5 +39,5 @@ case class Tuple20[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 + "," + _10 +
"," + _11 + "," + _12 + "," + _13 + "," + _14 + "," + _15 + "," + _16 + "," + _17 + "," + _18 + "," + _19 + "," + _20 + ")"
-
+
}
diff --git a/src/library/scala/Tuple21.scala b/src/library/scala/Tuple21.scala
index 580a169e39..78b9c585c6 100644
--- a/src/library/scala/Tuple21.scala
+++ b/src/library/scala/Tuple21.scala
@@ -40,5 +40,5 @@ case class Tuple21[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 + "," + _10 +
"," + _11 + "," + _12 + "," + _13 + "," + _14 + "," + _15 + "," + _16 + "," + _17 + "," + _18 + "," + _19 + "," + _20 + "," + _21 + ")"
-
+
}
diff --git a/src/library/scala/Tuple22.scala b/src/library/scala/Tuple22.scala
index fd3392ddea..0993dfbbc3 100644
--- a/src/library/scala/Tuple22.scala
+++ b/src/library/scala/Tuple22.scala
@@ -41,5 +41,5 @@ case class Tuple22[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 + "," + _10 + "," + _11 +
"," + _12 + "," + _13 + "," + _14 + "," + _15 + "," + _16 + "," + _17 + "," + _18 + "," + _19 + "," + _20 + "," + _21 + "," + _22 + ")"
-
+
}
diff --git a/src/library/scala/Tuple3.scala b/src/library/scala/Tuple3.scala
index 0d5399308b..dfa0c962a2 100644
--- a/src/library/scala/Tuple3.scala
+++ b/src/library/scala/Tuple3.scala
@@ -24,7 +24,7 @@ case class Tuple3[+T1, +T2, +T3](_1: T1, _2: T2, _3: T3)
extends Product3[T1, T2, T3]
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + ")"
-
+
@deprecated("Use `zipped` instead.", "2.9.0")
def zip[Repr1, El1, El2, El3, To](implicit w1: T1 => TLike[El1, Repr1],
diff --git a/src/library/scala/Tuple4.scala b/src/library/scala/Tuple4.scala
index a859078bcf..a919072c88 100644
--- a/src/library/scala/Tuple4.scala
+++ b/src/library/scala/Tuple4.scala
@@ -22,5 +22,5 @@ case class Tuple4[+T1, +T2, +T3, +T4](_1: T1, _2: T2, _3: T3, _4: T4)
extends Product4[T1, T2, T3, T4]
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + ")"
-
+
}
diff --git a/src/library/scala/Tuple5.scala b/src/library/scala/Tuple5.scala
index 1edfb673ee..6a94f48ab4 100644
--- a/src/library/scala/Tuple5.scala
+++ b/src/library/scala/Tuple5.scala
@@ -23,5 +23,5 @@ case class Tuple5[+T1, +T2, +T3, +T4, +T5](_1: T1, _2: T2, _3: T3, _4: T4, _5: T
extends Product5[T1, T2, T3, T4, T5]
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + ")"
-
+
}
diff --git a/src/library/scala/Tuple6.scala b/src/library/scala/Tuple6.scala
index 5b74937e58..34f8224627 100644
--- a/src/library/scala/Tuple6.scala
+++ b/src/library/scala/Tuple6.scala
@@ -24,5 +24,5 @@ case class Tuple6[+T1, +T2, +T3, +T4, +T5, +T6](_1: T1, _2: T2, _3: T3, _4: T4,
extends Product6[T1, T2, T3, T4, T5, T6]
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + ")"
-
+
}
diff --git a/src/library/scala/Tuple7.scala b/src/library/scala/Tuple7.scala
index a7f572e9f0..6fc3477ba2 100644
--- a/src/library/scala/Tuple7.scala
+++ b/src/library/scala/Tuple7.scala
@@ -25,5 +25,5 @@ case class Tuple7[+T1, +T2, +T3, +T4, +T5, +T6, +T7](_1: T1, _2: T2, _3: T3, _4:
extends Product7[T1, T2, T3, T4, T5, T6, T7]
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + ")"
-
+
}
diff --git a/src/library/scala/Tuple8.scala b/src/library/scala/Tuple8.scala
index 9bb427d689..1e21b684fc 100644
--- a/src/library/scala/Tuple8.scala
+++ b/src/library/scala/Tuple8.scala
@@ -26,5 +26,5 @@ case class Tuple8[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8](_1: T1, _2: T2, _3: T3
extends Product8[T1, T2, T3, T4, T5, T6, T7, T8]
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + ")"
-
+
}
diff --git a/src/library/scala/Tuple9.scala b/src/library/scala/Tuple9.scala
index 4d50539e0c..453cea31a1 100644
--- a/src/library/scala/Tuple9.scala
+++ b/src/library/scala/Tuple9.scala
@@ -27,5 +27,5 @@ case class Tuple9[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9](_1: T1, _2: T2, _
extends Product9[T1, T2, T3, T4, T5, T6, T7, T8, T9]
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 + ")"
-
+
}
diff --git a/src/library/scala/annotation/elidable.scala b/src/library/scala/annotation/elidable.scala
index 053cdba220..880b645daa 100644
--- a/src/library/scala/annotation/elidable.scala
+++ b/src/library/scala/annotation/elidable.scala
@@ -52,8 +52,8 @@ import java.util.logging.Level
// INFO lies between WARNING and FINE
% scalac -Xelide-below INFO example.scala && scala Test
Warning! Danger! Warning!
- I computed a value: 0
- }}}
+ I computed a value: 0
+ }}}
*
* @author Paul Phillips
* @since 2.8
diff --git a/src/library/scala/collection/SeqLike.scala b/src/library/scala/collection/SeqLike.scala
index 02298ef096..b51a37cf9e 100644
--- a/src/library/scala/collection/SeqLike.scala
+++ b/src/library/scala/collection/SeqLike.scala
@@ -151,7 +151,7 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] with GenSeqLike[A, Repr]
def next(): Repr = {
if (!hasNext)
Iterator.empty.next
-
+
val forcedElms = new mutable.ArrayBuffer[A](elms.size) ++= elms
val result = (self.newBuilder ++= forcedElms).result
var i = idxs.length - 2
diff --git a/src/library/scala/collection/generic/MutableSortedSetFactory.scala b/src/library/scala/collection/generic/MutableSortedSetFactory.scala
index b235379575..cbbedc0231 100644
--- a/src/library/scala/collection/generic/MutableSortedSetFactory.scala
+++ b/src/library/scala/collection/generic/MutableSortedSetFactory.scala
@@ -11,12 +11,12 @@ package generic
import scala.collection.mutable.{ Builder, GrowingBuilder }
-/**
+/**
* @define Coll mutable.SortedSet
* @define coll mutable sorted
*
* @author Lucien Pereira
- *
+ *
*/
abstract class MutableSortedSetFactory[CC[A] <: mutable.SortedSet[A] with SortedSetLike[A, CC[A]] with mutable.Set[A] with mutable.SetLike[A, CC[A]]] extends SortedSetFactory[CC] {
@@ -26,7 +26,7 @@ abstract class MutableSortedSetFactory[CC[A] <: mutable.SortedSet[A] with Sorted
* is evaluated elems is cloned (which is O(n)).
*
* Fortunately GrowingBuilder comes to rescue.
- *
+ *
*/
override def newBuilder[A](implicit ord: Ordering[A]): Builder[A, CC[A]] = new GrowingBuilder[A, CC[A]](empty)
diff --git a/src/library/scala/collection/immutable/BitSet.scala b/src/library/scala/collection/immutable/BitSet.scala
index abccd91f9c..870d5534dc 100644
--- a/src/library/scala/collection/immutable/BitSet.scala
+++ b/src/library/scala/collection/immutable/BitSet.scala
@@ -131,7 +131,7 @@ object BitSet extends BitSetFactory[BitSet] {
* the capacity of two long values). The constructor wraps an existing
* bit mask without copying, thus exposing a mutable part of the internal
* implementation. Care needs to be taken not to modify the exposed
- * array.
+ * array.
*/
class BitSetN(val elems: Array[Long]) extends BitSet {
protected def nwords = elems.length
diff --git a/src/library/scala/collection/immutable/List.scala b/src/library/scala/collection/immutable/List.scala
index e2a4a09938..381fcf3117 100644
--- a/src/library/scala/collection/immutable/List.scala
+++ b/src/library/scala/collection/immutable/List.scala
@@ -326,13 +326,13 @@ final case class ::[B](private var hd: B, private[scala] var tl: List[B]) extend
override def head : B = hd
override def tail : List[B] = tl
override def isEmpty: Boolean = false
-
+
private def writeObject(out: ObjectOutputStream) {
out.writeObject(ListSerializeStart) // needed to differentiate with the legacy `::` serialization
out.writeObject(this.hd)
out.writeObject(this.tl)
}
-
+
private def readObject(in: ObjectInputStream) {
val obj = in.readObject()
if (obj == ListSerializeStart) {
@@ -340,7 +340,7 @@ final case class ::[B](private var hd: B, private[scala] var tl: List[B]) extend
this.tl = in.readObject().asInstanceOf[List[B]]
} else oldReadObject(in, obj)
}
-
+
/* The oldReadObject method exists here for compatibility reasons.
* :: objects used to be serialized by serializing all the elements to
* the output stream directly, but this was broken (see SI-5374).
@@ -359,13 +359,13 @@ final case class ::[B](private var hd: B, private[scala] var tl: List[B]) extend
current = list
}
}
-
+
private def oldWriteObject(out: ObjectOutputStream) {
var xs: List[B] = this
while (!xs.isEmpty) { out.writeObject(xs.head); xs = xs.tail }
out.writeObject(ListSerializeEnd)
}
-
+
}
/** $factoryInfo
diff --git a/src/library/scala/collection/immutable/Range.scala b/src/library/scala/collection/immutable/Range.scala
index 7537558f0b..b72d83f896 100644
--- a/src/library/scala/collection/immutable/Range.scala
+++ b/src/library/scala/collection/immutable/Range.scala
@@ -77,9 +77,9 @@ extends collection.AbstractSeq[Int]
}
final val lastElement = start + (numRangeElements - 1) * step
final val terminalElement = start + numRangeElements * step
-
+
override def last = if (isEmpty) Nil.last else lastElement
-
+
protected def copy(start: Int, end: Int, step: Int): Range = new Range(start, end, step)
/** Create a new range with the `start` and `end` values of this range and
@@ -93,14 +93,14 @@ extends collection.AbstractSeq[Int]
override def size = length
override def length = if (numRangeElements < 0) fail() else numRangeElements
-
+
private def description = "%d %s %d by %s".format(start, if (isInclusive) "to" else "until", end, step)
private def fail() = throw new IllegalArgumentException(description + ": seqs cannot contain more than Int.MaxValue elements.")
private def validateMaxLength() {
if (numRangeElements < 0)
fail()
}
-
+
def validateRangeBoundaries(f: Int => Any): Boolean = {
validateMaxLength()
@@ -121,7 +121,7 @@ extends collection.AbstractSeq[Int]
if (idx < 0 || idx >= numRangeElements) throw new IndexOutOfBoundsException(idx.toString)
else start + (step * idx)
}
-
+
@inline final override def foreach[@specialized(Unit) U](f: Int => U) {
if (validateRangeBoundaries(f)) {
var i = start
@@ -309,7 +309,7 @@ object Range {
// number of full-sized jumps.
val hasStub = isInclusive || (gap % step != 0)
val result: Long = jumps + ( if (hasStub) 1 else 0 )
-
+
if (result > scala.Int.MaxValue) -1
else result.toInt
}
@@ -405,4 +405,3 @@ object Range {
// super.foreach(f)
}
}
- \ No newline at end of file
diff --git a/src/library/scala/collection/mutable/AVLTree.scala b/src/library/scala/collection/mutable/AVLTree.scala
index ba2af8f120..9aea25f330 100644
--- a/src/library/scala/collection/mutable/AVLTree.scala
+++ b/src/library/scala/collection/mutable/AVLTree.scala
@@ -12,9 +12,9 @@ package mutable
/**
* An immutable AVL Tree implementation used by mutable.TreeSet
- *
+ *
* @author Lucien Pereira
- *
+ *
*/
private[mutable] sealed trait AVLTree[+A] extends Serializable {
def balance: Int
@@ -28,28 +28,28 @@ private[mutable] sealed trait AVLTree[+A] extends Serializable {
/**
* Returns a new tree containing the given element.
* Thows an IllegalArgumentException if element is already present.
- *
+ *
*/
def insert[B >: A](value: B, ordering: Ordering[B]): AVLTree[B] = Node(value, Leaf, Leaf)
/**
* Return a new tree which not contains given element.
- *
+ *
*/
def remove[B >: A](value: B, ordering: Ordering[B]): AVLTree[A] =
throw new NoSuchElementException(String.valueOf(value))
-
+
/**
* Return a tuple containing the smallest element of the provided tree
* and a new tree from which this element has been extracted.
- *
+ *
*/
def removeMin[B >: A]: (B, AVLTree[B]) = sys.error("Should not happen.")
-
+
/**
* Return a tuple containing the biggest element of the provided tree
* and a new tree from which this element has been extracted.
- *
+ *
*/
def removeMax[B >: A]: (B, AVLTree[B]) = sys.error("Should not happen.")
@@ -90,7 +90,7 @@ private case class Node[A](val data: A, val left: AVLTree[A], val right: AVLTree
/**
* Returns a new tree containing the given element.
* Thows an IllegalArgumentException if element is already present.
- *
+ *
*/
override def insert[B >: A](value: B, ordering: Ordering[B]) = {
val ord = ordering.compare(value, data)
@@ -104,7 +104,7 @@ private case class Node[A](val data: A, val left: AVLTree[A], val right: AVLTree
/**
* Return a new tree which not contains given element.
- *
+ *
*/
override def remove[B >: A](value: B, ordering: Ordering[B]): AVLTree[A] = {
val ord = ordering.compare(value, data)
@@ -130,7 +130,7 @@ private case class Node[A](val data: A, val left: AVLTree[A], val right: AVLTree
/**
* Return a tuple containing the smallest element of the provided tree
* and a new tree from which this element has been extracted.
- *
+ *
*/
override def removeMin[B >: A]: (B, AVLTree[B]) = {
if (Leaf == left)
@@ -144,7 +144,7 @@ private case class Node[A](val data: A, val left: AVLTree[A], val right: AVLTree
/**
* Return a tuple containing the biggest element of the provided tree
* and a new tree from which this element has been extracted.
- *
+ *
*/
override def removeMax[B >: A]: (B, AVLTree[B]) = {
if (Leaf == right)
@@ -154,7 +154,7 @@ private case class Node[A](val data: A, val left: AVLTree[A], val right: AVLTree
(max, Node(data, left, newRight).rebalance)
}
}
-
+
override def rebalance[B >: A] = {
if (-2 == balance) {
if (1 == left.balance)
diff --git a/src/library/scala/collection/mutable/Ctrie.scala b/src/library/scala/collection/mutable/Ctrie.scala
index 699b96b87c..cbec118aa9 100644
--- a/src/library/scala/collection/mutable/Ctrie.scala
+++ b/src/library/scala/collection/mutable/Ctrie.scala
@@ -22,29 +22,29 @@ import annotation.switch
private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends INodeBase[K, V](g) {
import INodeBase._
-
+
WRITE(bn)
-
+
def this(g: Gen) = this(null, g)
-
+
@inline final def WRITE(nval: MainNode[K, V]) = INodeBase.updater.set(this, nval)
-
+
@inline final def CAS(old: MainNode[K, V], n: MainNode[K, V]) = INodeBase.updater.compareAndSet(this, old, n)
-
+
final def gcasRead(ct: Ctrie[K, V]): MainNode[K, V] = GCAS_READ(ct)
-
+
@inline final def GCAS_READ(ct: Ctrie[K, V]): MainNode[K, V] = {
val m = /*READ*/mainnode
val prevval = /*READ*/m.prev
if (prevval eq null) m
else GCAS_Complete(m, ct)
}
-
+
@tailrec private def GCAS_Complete(m: MainNode[K, V], ct: Ctrie[K, V]): MainNode[K, V] = if (m eq null) null else {
// complete the GCAS
val prev = /*READ*/m.prev
val ctr = ct.readRoot(true)
-
+
prev match {
case null =>
m
@@ -71,7 +71,7 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends
}
}
}
-
+
@inline final def GCAS(old: MainNode[K, V], n: MainNode[K, V], ct: Ctrie[K, V]): Boolean = {
n.WRITE_PREV(old)
if (CAS(old, n)) {
@@ -79,27 +79,27 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends
/*READ*/n.prev eq null
} else false
}
-
+
@inline private def inode(cn: MainNode[K, V]) = {
val nin = new INode[K, V](gen)
nin.WRITE(cn)
nin
}
-
+
final def copyToGen(ngen: Gen, ct: Ctrie[K, V]) = {
val nin = new INode[K, V](ngen)
val main = GCAS_READ(ct)
nin.WRITE(main)
nin
}
-
+
/** Inserts a key value pair, overwriting the old pair if the keys match.
- *
+ *
* @return true if successful, false otherwise
*/
@tailrec final def rec_insert(k: K, v: V, hc: Int, lev: Int, parent: INode[K, V], startgen: Gen, ct: Ctrie[K, V]): Boolean = {
val m = GCAS_READ(ct) // use -Yinline!
-
+
m match {
case cn: CNode[K, V] => // 1) a multiway node
val idx = (hc >>> lev) & 0x1f
@@ -137,7 +137,7 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends
GCAS(ln, nn, ct)
}
}
-
+
/** Inserts a new key value pair, given that a specific condition is met.
*
* @param cond null - don't care if the key was there; KEY_ABSENT - key wasn't there; KEY_PRESENT - key was there; other value `v` - key must be bound to `v`
@@ -145,7 +145,7 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends
*/
@tailrec final def rec_insertif(k: K, v: V, hc: Int, cond: AnyRef, lev: Int, parent: INode[K, V], startgen: Gen, ct: Ctrie[K, V]): Option[V] = {
val m = GCAS_READ(ct) // use -Yinline!
-
+
m match {
case cn: CNode[K, V] => // 1) a multiway node
val idx = (hc >>> lev) & 0x1f
@@ -228,14 +228,14 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends
}
}
}
-
+
/** Looks up the value associated with the key.
- *
+ *
* @return null if no value has been found, RESTART if the operation wasn't successful, or any other value otherwise
*/
@tailrec final def rec_lookup(k: K, hc: Int, lev: Int, parent: INode[K, V], startgen: Gen, ct: Ctrie[K, V]): AnyRef = {
val m = GCAS_READ(ct) // use -Yinline!
-
+
m match {
case cn: CNode[K, V] => // 1) a multinode
val idx = (hc >>> lev) & 0x1f
@@ -270,15 +270,15 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends
ln.get(k).asInstanceOf[Option[AnyRef]].orNull
}
}
-
+
/** Removes the key associated with the given value.
- *
+ *
* @param v if null, will remove the key irregardless of the value; otherwise removes only if binding contains that exact key and value
* @return null if not successful, an Option[V] indicating the previous value otherwise
*/
final def rec_remove(k: K, v: V, hc: Int, lev: Int, parent: INode[K, V], startgen: Gen, ct: Ctrie[K, V]): Option[V] = {
val m = GCAS_READ(ct) // use -Yinline!
-
+
m match {
case cn: CNode[K, V] =>
val idx = (hc >>> lev) & 0x1f
@@ -289,7 +289,7 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends
val pos = Integer.bitCount(bmp & (flag - 1))
val sub = cn.array(pos)
val res = sub match {
- case in: INode[K, V] =>
+ case in: INode[K, V] =>
if (startgen eq in.gen) in.rec_remove(k, v, hc, lev + 5, this, startgen, ct)
else {
if (GCAS(cn, cn.renewed(startgen, ct), ct)) rec_remove(k, v, hc, lev, parent, startgen, ct)
@@ -301,7 +301,7 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends
if (GCAS(cn, ncn, ct)) Some(sn.v) else null
} else None
}
-
+
if (res == None || (res eq null)) res
else {
@tailrec def cleanParent(nonlive: AnyRef) {
@@ -325,13 +325,13 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends
case _ => // parent is no longer a cnode, we're done
}
}
-
+
if (parent ne null) { // never tomb at root
val n = GCAS_READ(ct)
if (n.isInstanceOf[TNode[_, _]])
cleanParent(n)
}
-
+
res
}
}
@@ -351,7 +351,7 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends
}
}
}
-
+
private def clean(nd: INode[K, V], ct: Ctrie[K, V], lev: Int) {
val m = nd.GCAS_READ(ct)
m match {
@@ -359,14 +359,14 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends
case _ =>
}
}
-
+
final def isNullInode(ct: Ctrie[K, V]) = GCAS_READ(ct) eq null
-
+
final def cachedSize(ct: Ctrie[K, V]): Int = {
val m = GCAS_READ(ct)
m.cachedSize(ct)
}
-
+
/* this is a quiescent method! */
def string(lev: Int) = "%sINode -> %s".format(" " * lev, mainnode match {
case null => "<null>"
@@ -375,14 +375,14 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends
case ln: LNode[_, _] => ln.string(lev)
case x => "<elem: %s>".format(x)
})
-
+
}
private[mutable] object INode {
val KEY_PRESENT = new AnyRef
val KEY_ABSENT = new AnyRef
-
+
def newRootNode[K, V] = {
val gen = new Gen
val cn = new CNode[K, V](0, new Array(0), gen)
@@ -393,11 +393,11 @@ private[mutable] object INode {
private[mutable] final class FailedNode[K, V](p: MainNode[K, V]) extends MainNode[K, V] {
WRITE_PREV(p)
-
+
def string(lev: Int) = throw new UnsupportedOperationException
-
+
def cachedSize(ct: AnyRef): Int = throw new UnsupportedOperationException
-
+
override def toString = "FailedNode(%s)".format(p)
}
@@ -449,7 +449,7 @@ extends MainNode[K, V] {
private[collection] final class CNode[K, V](final val bitmap: Int, final val array: Array[BasicNode], final val gen: Gen)
extends CNodeBase[K, V] {
-
+
// this should only be called from within read-only snapshots
final def cachedSize(ct: AnyRef) = {
val currsz = READ_SIZE()
@@ -460,7 +460,7 @@ extends CNodeBase[K, V] {
READ_SIZE()
}
}
-
+
// lends itself towards being parallelizable by choosing
// a random starting offset in the array
// => if there are concurrent size computations, they start
@@ -480,7 +480,7 @@ extends CNodeBase[K, V] {
}
sz
}
-
+
final def updatedAt(pos: Int, nn: BasicNode, gen: Gen) = {
val len = array.length
val narr = new Array[BasicNode](len)
@@ -488,7 +488,7 @@ extends CNodeBase[K, V] {
narr(pos) = nn
new CNode[K, V](bitmap, narr, gen)
}
-
+
final def removedAt(pos: Int, flag: Int, gen: Gen) = {
val arr = array
val len = arr.length
@@ -497,7 +497,7 @@ extends CNodeBase[K, V] {
Array.copy(arr, pos + 1, narr, pos, len - pos - 1)
new CNode[K, V](bitmap ^ flag, narr, gen)
}
-
+
final def insertedAt(pos: Int, flag: Int, nn: BasicNode, gen: Gen) = {
val len = array.length
val bmp = bitmap
@@ -507,7 +507,7 @@ extends CNodeBase[K, V] {
Array.copy(array, pos, narr, pos + 1, len - pos)
new CNode[K, V](bmp | flag, narr, gen)
}
-
+
/** Returns a copy of this cnode such that all the i-nodes below it are copied
* to the specified generation `ngen`.
*/
@@ -525,17 +525,17 @@ extends CNodeBase[K, V] {
}
new CNode[K, V](bitmap, narr, ngen)
}
-
+
private def resurrect(inode: INode[K, V], inodemain: AnyRef): BasicNode = inodemain match {
case tn: TNode[_, _] => tn.copyUntombed
case _ => inode
}
-
+
final def toContracted(lev: Int): MainNode[K, V] = if (array.length == 1 && lev > 0) array(0) match {
case sn: SNode[K, V] => sn.copyTombed
case _ => this
} else this
-
+
// - if the branching factor is 1 for this CNode, and the child
// is a tombed SNode, returns its tombed version
// - otherwise, if there is at least one non-null node below,
@@ -559,12 +559,12 @@ extends CNodeBase[K, V] {
}
i += 1
}
-
+
new CNode[K, V](bmp, tmparray, gen).toContracted(lev)
}
-
+
private[mutable] def string(lev: Int): String = "CNode %x\n%s".format(bitmap, array.map(_.string(lev + 1)).mkString("\n"))
-
+
/* quiescently consistent - don't call concurrently to anything involving a GCAS!! */
protected def collectElems: Seq[(K, V)] = array flatMap {
case sn: SNode[K, V] => Some(sn.kvPair)
@@ -574,12 +574,12 @@ extends CNodeBase[K, V] {
case cn: CNode[K, V] => cn.collectElems
}
}
-
+
protected def collectLocalElems: Seq[String] = array flatMap {
case sn: SNode[K, V] => Some(sn.kvPair._2.toString)
case in: INode[K, V] => Some(in.toString.drop(14) + "(" + in.gen + ")")
}
-
+
override def toString = {
val elems = collectLocalElems
"CNode(sz: %d; %s)".format(elems.size, elems.sorted.mkString(", "))
@@ -588,7 +588,7 @@ extends CNodeBase[K, V] {
private[mutable] object CNode {
-
+
def dual[K, V](x: SNode[K, V], xhc: Int, y: SNode[K, V], yhc: Int, lev: Int, gen: Gen): MainNode[K, V] = if (lev < 35) {
val xidx = (xhc >>> lev) & 0x1f
val yidx = (yhc >>> lev) & 0x1f
@@ -604,7 +604,7 @@ private[mutable] object CNode {
} else {
new LNode(x.k, x.v, y.k, y.v)
}
-
+
}
@@ -620,9 +620,9 @@ private[mutable] case class RDCSS_Descriptor[K, V](old: INode[K, V], expectedmai
* lock-free snapshots which are used to implement linearizable lock-free size,
* iterator and clear operations. The cost of evaluating the (lazy) snapshot is
* distributed across subsequent updates, thus making snapshot evaluation horizontally scalable.
- *
+ *
* For details, see: http://lampwww.epfl.ch/~prokopec/ctries-snapshot.pdf
- *
+ *
* @author Aleksandar Prokopec
* @since 2.10
*/
@@ -634,17 +634,17 @@ extends ConcurrentMap[K, V]
with Serializable
{
import Ctrie.computeHash
-
+
private var rootupdater = rtupd
@volatile var root = r
-
+
def this() = this(
INode.newRootNode,
AtomicReferenceFieldUpdater.newUpdater(classOf[Ctrie[K, V]], classOf[AnyRef], "root")
)
-
+
/* internal methods */
-
+
private def writeObject(out: java.io.ObjectOutputStream) {
val it = iterator
while (it.hasNext) {
@@ -654,11 +654,11 @@ extends ConcurrentMap[K, V]
}
out.writeObject(CtrieSerializationEnd)
}
-
+
private def readObject(in: java.io.ObjectInputStream) {
root = INode.newRootNode
rootupdater = AtomicReferenceFieldUpdater.newUpdater(classOf[Ctrie[K, V]], classOf[AnyRef], "root")
-
+
var obj: AnyRef = null
do {
obj = in.readObject()
@@ -669,11 +669,11 @@ extends ConcurrentMap[K, V]
}
} while (obj != CtrieSerializationEnd)
}
-
+
@inline final def CAS_ROOT(ov: AnyRef, nv: AnyRef) = rootupdater.compareAndSet(this, ov, nv)
-
+
final def readRoot(abort: Boolean = false): INode[K, V] = RDCSS_READ_ROOT(abort)
-
+
@inline final def RDCSS_READ_ROOT(abort: Boolean = false): INode[K, V] = {
val r = /*READ*/root
r match {
@@ -681,7 +681,7 @@ extends ConcurrentMap[K, V]
case desc: RDCSS_Descriptor[K, V] => RDCSS_Complete(abort)
}
}
-
+
@tailrec private def RDCSS_Complete(abort: Boolean): INode[K, V] = {
val v = /*READ*/root
v match {
@@ -705,7 +705,7 @@ extends ConcurrentMap[K, V]
}
}
}
-
+
private def RDCSS_ROOT(ov: INode[K, V], expectedmain: MainNode[K, V], nv: INode[K, V]): Boolean = {
val desc = RDCSS_Descriptor(ov, expectedmain, nv)
if (CAS_ROOT(ov, desc)) {
@@ -713,27 +713,27 @@ extends ConcurrentMap[K, V]
/*READ*/desc.committed
} else false
}
-
+
@tailrec private def inserthc(k: K, hc: Int, v: V) {
val r = RDCSS_READ_ROOT()
if (!r.rec_insert(k, v, hc, 0, null, r.gen, this)) inserthc(k, hc, v)
}
-
+
@tailrec private def insertifhc(k: K, hc: Int, v: V, cond: AnyRef): Option[V] = {
val r = RDCSS_READ_ROOT()
-
+
val ret = r.rec_insertif(k, v, hc, cond, 0, null, r.gen, this)
if (ret eq null) insertifhc(k, hc, v, cond)
else ret
}
-
+
@tailrec private def lookuphc(k: K, hc: Int): AnyRef = {
val r = RDCSS_READ_ROOT()
val res = r.rec_lookup(k, hc, 0, null, r.gen, this)
if (res eq INodeBase.RESTART) lookuphc(k, hc)
else res
}
-
+
/* slower:
//@tailrec
private def lookuphc(k: K, hc: Int): AnyRef = {
@@ -746,31 +746,31 @@ extends ConcurrentMap[K, V]
}
}
*/
-
+
@tailrec private def removehc(k: K, v: V, hc: Int): Option[V] = {
val r = RDCSS_READ_ROOT()
val res = r.rec_remove(k, v, hc, 0, null, r.gen, this)
if (res ne null) res
else removehc(k, v, hc)
}
-
+
def string = RDCSS_READ_ROOT().string(0)
-
+
/* public methods */
-
+
override def seq = this
-
+
override def par = new ParCtrie(this)
-
+
override def empty: Ctrie[K, V] = new Ctrie[K, V]
-
+
final def isReadOnly = rootupdater eq null
-
+
final def nonReadOnly = rootupdater ne null
-
+
/** Returns a snapshot of this Ctrie.
* This operation is lock-free and linearizable.
- *
+ *
* The snapshot is lazily updated - the first time some branch
* in the snapshot or this Ctrie are accessed, they are rewritten.
* This means that the work of rebuilding both the snapshot and this
@@ -783,17 +783,17 @@ extends ConcurrentMap[K, V]
if (RDCSS_ROOT(r, expmain, r.copyToGen(new Gen, this))) new Ctrie(r.copyToGen(new Gen, this), rootupdater)
else snapshot()
}
-
+
/** Returns a read-only snapshot of this Ctrie.
* This operation is lock-free and linearizable.
- *
+ *
* The snapshot is lazily updated - the first time some branch
* of this Ctrie are accessed, it is rewritten. The work of creating
* the snapshot is thus distributed across subsequent updates
* and accesses on this Ctrie by all threads.
* Note that the snapshot itself is never rewritten unlike when calling
* the `snapshot` method, but the obtained snapshot cannot be modified.
- *
+ *
* This method is used by other methods such as `size` and `iterator`.
*/
@tailrec final def readOnlySnapshot(): collection.Map[K, V] = {
@@ -802,106 +802,106 @@ extends ConcurrentMap[K, V]
if (RDCSS_ROOT(r, expmain, r.copyToGen(new Gen, this))) new Ctrie(r, null)
else readOnlySnapshot()
}
-
+
@tailrec final override def clear() {
val r = RDCSS_READ_ROOT()
if (!RDCSS_ROOT(r, r.gcasRead(this), INode.newRootNode[K, V])) clear()
}
-
+
final def lookup(k: K): V = {
val hc = computeHash(k)
lookuphc(k, hc).asInstanceOf[V]
}
-
+
final override def apply(k: K): V = {
val hc = computeHash(k)
val res = lookuphc(k, hc)
if (res eq null) throw new NoSuchElementException
else res.asInstanceOf[V]
}
-
+
final def get(k: K): Option[V] = {
val hc = computeHash(k)
Option(lookuphc(k, hc)).asInstanceOf[Option[V]]
}
-
+
override def put(key: K, value: V): Option[V] = {
val hc = computeHash(key)
insertifhc(key, hc, value, null)
}
-
+
final override def update(k: K, v: V) {
val hc = computeHash(k)
inserthc(k, hc, v)
}
-
+
final def +=(kv: (K, V)) = {
update(kv._1, kv._2)
this
}
-
+
final override def remove(k: K): Option[V] = {
val hc = computeHash(k)
removehc(k, null.asInstanceOf[V], hc)
}
-
+
final def -=(k: K) = {
remove(k)
this
}
-
+
def putIfAbsent(k: K, v: V): Option[V] = {
val hc = computeHash(k)
insertifhc(k, hc, v, INode.KEY_ABSENT)
}
-
+
def remove(k: K, v: V): Boolean = {
val hc = computeHash(k)
removehc(k, v, hc).nonEmpty
}
-
+
def replace(k: K, oldvalue: V, newvalue: V): Boolean = {
val hc = computeHash(k)
insertifhc(k, hc, newvalue, oldvalue.asInstanceOf[AnyRef]).nonEmpty
}
-
+
def replace(k: K, v: V): Option[V] = {
val hc = computeHash(k)
insertifhc(k, hc, v, INode.KEY_PRESENT)
}
-
+
def iterator: Iterator[(K, V)] =
if (nonReadOnly) readOnlySnapshot().iterator
else new CtrieIterator(0, this)
-
+
private def cachedSize() = {
val r = RDCSS_READ_ROOT()
r.cachedSize(this)
}
-
+
override def size: Int =
if (nonReadOnly) readOnlySnapshot().size
else cachedSize()
-
+
override def stringPrefix = "Ctrie"
-
+
}
object Ctrie extends MutableMapFactory[Ctrie] {
val inodeupdater = AtomicReferenceFieldUpdater.newUpdater(classOf[INodeBase[_, _]], classOf[MainNode[_, _]], "mainnode")
-
+
implicit def canBuildFrom[K, V]: CanBuildFrom[Coll, (K, V), Ctrie[K, V]] = new MapCanBuildFrom[K, V]
-
+
def empty[K, V]: Ctrie[K, V] = new Ctrie[K, V]
-
+
@inline final def computeHash[K](k: K): Int = {
var hcode = k.hashCode
hcode = hcode * 0x9e3775cd
hcode = java.lang.Integer.reverseBytes(hcode)
hcode * 0x9e3775cd
}
-
+
}
@@ -911,11 +911,11 @@ private[collection] class CtrieIterator[K, V](var level: Int, private var ct: Ct
var depth = -1
var subiter: Iterator[(K, V)] = null
var current: KVNode[K, V] = null
-
+
if (mustInit) initialize()
-
+
def hasNext = (current ne null) || (subiter ne null)
-
+
def next() = if (hasNext) {
var r: (K, V) = null
if (subiter ne null) {
@@ -927,7 +927,7 @@ private[collection] class CtrieIterator[K, V](var level: Int, private var ct: Ct
}
r
} else Iterator.empty.next()
-
+
private def readin(in: INode[K, V]) = in.gcasRead(ct) match {
case cn: CNode[K, V] =>
depth += 1
@@ -942,19 +942,19 @@ private[collection] class CtrieIterator[K, V](var level: Int, private var ct: Ct
case null =>
current = null
}
-
+
@inline private def checkSubiter() = if (!subiter.hasNext) {
subiter = null
advance()
}
-
+
@inline private def initialize() {
assert(ct.isReadOnly)
-
+
val r = ct.RDCSS_READ_ROOT()
readin(r)
}
-
+
def advance(): Unit = if (depth >= 0) {
val npos = stackpos(depth) + 1
if (npos < stack(depth).length) {
@@ -970,19 +970,19 @@ private[collection] class CtrieIterator[K, V](var level: Int, private var ct: Ct
advance()
}
} else current = null
-
+
protected def newIterator(_lev: Int, _ct: Ctrie[K, V], _mustInit: Boolean) = new CtrieIterator[K, V](_lev, _ct, _mustInit)
-
+
protected def dupTo(it: CtrieIterator[K, V]) = {
it.level = this.level
it.ct = this.ct
it.depth = this.depth
it.current = this.current
-
+
// these need a deep copy
Array.copy(this.stack, 0, it.stack, 0, 7)
Array.copy(this.stackpos, 0, it.stackpos, 0, 7)
-
+
// this one needs to be evaluated
if (this.subiter == null) it.subiter = null
else {
@@ -991,7 +991,7 @@ private[collection] class CtrieIterator[K, V](var level: Int, private var ct: Ct
it.subiter = lst.iterator
}
}
-
+
/** Returns a sequence of iterators over subsets of this iterator.
* It's used to ease the implementation of splitters for a parallel version of the Ctrie.
*/
@@ -1026,7 +1026,7 @@ private[collection] class CtrieIterator[K, V](var level: Int, private var ct: Ct
this.level += 1
Seq(this)
}
-
+
def printDebug {
println("ctrie iterator")
println(stackpos.mkString(","))
@@ -1034,7 +1034,7 @@ private[collection] class CtrieIterator[K, V](var level: Int, private var ct: Ct
println("curr.: " + current)
println(stack.mkString("\n"))
}
-
+
}
@@ -1048,20 +1048,20 @@ private[mutable] case object CtrieSerializationEnd
private[mutable] object Debug {
import collection._
-
+
lazy val logbuffer = new java.util.concurrent.ConcurrentLinkedQueue[AnyRef]
-
+
def log(s: AnyRef) = logbuffer.add(s)
-
+
def flush() {
for (s <- JavaConversions.asScalaIterator(logbuffer.iterator())) Console.out.println(s.toString)
logbuffer.clear()
}
-
+
def clear() {
logbuffer.clear()
}
-
+
}
diff --git a/src/library/scala/collection/mutable/FlatHashTable.scala b/src/library/scala/collection/mutable/FlatHashTable.scala
index f3fb6738eb..ee6d4d1d22 100644
--- a/src/library/scala/collection/mutable/FlatHashTable.scala
+++ b/src/library/scala/collection/mutable/FlatHashTable.scala
@@ -43,19 +43,19 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
/** The array keeping track of number of elements in 32 element blocks.
*/
@transient protected var sizemap: Array[Int] = null
-
+
@transient var seedvalue: Int = tableSizeSeed
-
+
import HashTable.powerOfTwo
-
+
protected def capacity(expectedSize: Int) = if (expectedSize == 0) 1 else powerOfTwo(expectedSize)
-
+
private def initialCapacity = capacity(initialSize)
-
+
protected def randomSeed = seedGenerator.get.nextInt()
-
+
protected def tableSizeSeed = Integer.bitCount(table.length - 1)
-
+
/**
* Initializes the collection from the input stream. `f` will be called for each element
* read from the input stream in the order determined by the stream. This is useful for
@@ -65,22 +65,22 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
*/
private[collection] def init(in: java.io.ObjectInputStream, f: A => Unit) {
in.defaultReadObject
-
+
_loadFactor = in.readInt()
assert(_loadFactor > 0)
-
+
val size = in.readInt()
tableSize = 0
assert(size >= 0)
-
+
table = new Array(capacity(sizeForThreshold(size, _loadFactor)))
threshold = newThreshold(_loadFactor, table.size)
-
+
seedvalue = in.readInt()
-
+
val smDefined = in.readBoolean()
if (smDefined) sizeMapInit(table.length) else sizemap = null
-
+
var index = 0
while (index < size) {
val elem = in.readObject().asInstanceOf[A]
@@ -295,12 +295,12 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
protected final def index(hcode: Int) = {
// version 1 (no longer used - did not work with parallel hash tables)
// improve(hcode) & (table.length - 1)
-
+
// version 2 (allows for parallel hash table construction)
val improved = improve(hcode, seedvalue)
val ones = table.length - 1
(improved >>> (32 - java.lang.Integer.bitCount(ones))) & ones
-
+
// version 3 (solves SI-5293 in most cases, but such a case would still arise for parallel hash tables)
// val hc = improve(hcode)
// val bbp = blockbitpos
@@ -345,17 +345,17 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
private[collection] object FlatHashTable {
-
+
/** Creates a specific seed to improve hashcode of a hash table instance
* and ensure that iteration order vulnerabilities are not 'felt' in other
* hash tables.
- *
+ *
* See SI-5293.
*/
final def seedGenerator = new ThreadLocal[util.Random] {
override def initialValue = new util.Random
}
-
+
/** The load factor for the hash table; must be < 500 (0.5)
*/
def defaultLoadFactor: Int = 450
@@ -396,11 +396,11 @@ private[collection] object FlatHashTable {
//h = h ^ (h >>> 14)
//h = h + (h << 4)
//h ^ (h >>> 10)
-
+
var i = hcode * 0x9e3775cd
i = java.lang.Integer.reverseBytes(i)
val improved = i * 0x9e3775cd
-
+
// for the remainder, see SI-5293
// to ensure that different bits are used for different hash tables, we have to rotate based on the seed
val rotation = seed % 32
diff --git a/src/library/scala/collection/mutable/HashTable.scala b/src/library/scala/collection/mutable/HashTable.scala
index 5b3e07b826..cc0aed6963 100644
--- a/src/library/scala/collection/mutable/HashTable.scala
+++ b/src/library/scala/collection/mutable/HashTable.scala
@@ -53,9 +53,9 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU
@transient protected var sizemap: Array[Int] = null
@transient var seedvalue: Int = tableSizeSeed
-
+
protected def tableSizeSeed = Integer.bitCount(table.length - 1)
-
+
protected def initialSize: Int = HashTable.initialSize
private def lastPopulatedIndex = {
@@ -80,9 +80,9 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU
val size = in.readInt()
tableSize = 0
assert(size >= 0)
-
+
seedvalue = in.readInt()
-
+
val smDefined = in.readBoolean()
table = new Array(capacity(sizeForThreshold(_loadFactor, size)))
@@ -429,7 +429,7 @@ private[collection] object HashTable {
// h = h ^ (h >>> 14)
// h = h + (h << 4)
// h ^ (h >>> 10)
-
+
// the rest of the computation is due to SI-5293
val rotation = seed % 32
val rotated = (i >>> rotation) | (i << (32 - rotation))
diff --git a/src/library/scala/collection/mutable/ListBuffer.scala b/src/library/scala/collection/mutable/ListBuffer.scala
index 53c876ec08..037f3b2939 100644
--- a/src/library/scala/collection/mutable/ListBuffer.scala
+++ b/src/library/scala/collection/mutable/ListBuffer.scala
@@ -62,22 +62,22 @@ final class ListBuffer[A]
private var len = 0
protected def underlying: immutable.Seq[A] = start
-
+
private def writeObject(out: ObjectOutputStream) {
// write start
var xs: List[A] = start
while (!xs.isEmpty) { out.writeObject(xs.head); xs = xs.tail }
out.writeObject(ListSerializeEnd)
-
+
// no need to write last0
-
+
// write if exported
out.writeBoolean(exported)
-
+
// write the length
out.writeInt(len)
}
-
+
private def readObject(in: ObjectInputStream) {
// read start, set last0 appropriately
var elem: A = in.readObject.asInstanceOf[A]
@@ -97,14 +97,14 @@ final class ListBuffer[A]
last0 = current
start
}
-
+
// read if exported
exported = in.readBoolean()
-
+
// read the length
len = in.readInt()
}
-
+
/** The current length of the buffer.
*
* This operation takes constant time.
diff --git a/src/library/scala/collection/mutable/SortedSet.scala b/src/library/scala/collection/mutable/SortedSet.scala
index d87fc0b4a2..f41a51d3ef 100644
--- a/src/library/scala/collection/mutable/SortedSet.scala
+++ b/src/library/scala/collection/mutable/SortedSet.scala
@@ -13,12 +13,12 @@ import generic._
/**
* Base trait for mutable sorted set.
- *
+ *
* @define Coll mutable.SortedSet
* @define coll mutable sorted set
*
* @author Lucien Pereira
- *
+ *
*/
trait SortedSet[A] extends collection.SortedSet[A] with collection.SortedSetLike[A,SortedSet[A]]
with mutable.Set[A] with mutable.SetLike[A, SortedSet[A]] {
@@ -39,11 +39,11 @@ trait SortedSet[A] extends collection.SortedSet[A] with collection.SortedSetLike
* Standard `CanBuildFrom` instance for sorted sets.
*
* @author Lucien Pereira
- *
+ *
*/
object SortedSet extends MutableSortedSetFactory[SortedSet] {
implicit def canBuildFrom[A](implicit ord: Ordering[A]): CanBuildFrom[Coll, A, SortedSet[A]] = new SortedSetCanBuildFrom[A]
-
+
def empty[A](implicit ord: Ordering[A]): SortedSet[A] = TreeSet.empty[A]
-
+
}
diff --git a/src/library/scala/collection/mutable/TreeSet.scala b/src/library/scala/collection/mutable/TreeSet.scala
index e0f1c3adfe..02ee811193 100644
--- a/src/library/scala/collection/mutable/TreeSet.scala
+++ b/src/library/scala/collection/mutable/TreeSet.scala
@@ -11,14 +11,14 @@ package mutable
import generic._
-/**
+/**
* @define Coll mutable.TreeSet
* @define coll mutable tree set
* @factoryInfo
* Companion object of TreeSet providing factory related utilities.
- *
+ *
* @author Lucien Pereira
- *
+ *
*/
object TreeSet extends MutableSortedSetFactory[TreeSet] {
/**
@@ -32,7 +32,7 @@ object TreeSet extends MutableSortedSetFactory[TreeSet] {
* A mutable SortedSet using an immutable AVL Tree as underlying data structure.
*
* @author Lucien Pereira
- *
+ *
*/
class TreeSet[A](implicit val ordering: Ordering[A]) extends SortedSet[A] with SetLike[A, TreeSet[A]]
with SortedSetLike[A, TreeSet[A]] with Set[A] with Serializable {
@@ -67,7 +67,7 @@ class TreeSet[A](implicit val ordering: Ordering[A]) extends SortedSet[A] with S
* Cardinality store the set size, unfortunately a
* set view (given by rangeImpl)
* cannot take advantage of this optimisation
- *
+ *
*/
override def size: Int = base.map(_ => super.size).getOrElse(cardinality)
@@ -101,7 +101,7 @@ class TreeSet[A](implicit val ordering: Ordering[A]) extends SortedSet[A] with S
* Thanks to the immutable nature of the
* underlying AVL Tree, we can share it with
* the clone. So clone complexity in time is O(1).
- *
+ *
*/
override def clone: TreeSet[A] = {
val clone = new TreeSet[A](base, from, until)
@@ -119,5 +119,5 @@ class TreeSet[A](implicit val ordering: Ordering[A]) extends SortedSet[A] with S
override def iterator: Iterator[A] = resolve.avl.iterator
.dropWhile(e => !isLeftAcceptable(from, ordering)(e))
.takeWhile(e => isRightAcceptable(until, ordering)(e))
-
+
}
diff --git a/src/library/scala/collection/parallel/Combiner.scala b/src/library/scala/collection/parallel/Combiner.scala
index e304be92ae..54cdf25804 100644
--- a/src/library/scala/collection/parallel/Combiner.scala
+++ b/src/library/scala/collection/parallel/Combiner.scala
@@ -61,14 +61,14 @@ trait Combiner[-Elem, +To] extends Builder[Elem, To] with Sizing with Parallel {
* @return the parallel builder containing both the elements of this and the `other` builder
*/
def combine[N <: Elem, NewTo >: To](other: Combiner[N, NewTo]): Combiner[N, NewTo]
-
+
/** Returns `true` if this combiner has a thread-safe `+=` and is meant to be shared
* across several threads constructing the collection.
- *
+ *
* By default, this method returns `false`.
*/
def canBeShared: Boolean = false
-
+
}
diff --git a/src/library/scala/collection/parallel/ParIterableLike.scala b/src/library/scala/collection/parallel/ParIterableLike.scala
index 7c5a835e56..c0fc906ad9 100644
--- a/src/library/scala/collection/parallel/ParIterableLike.scala
+++ b/src/library/scala/collection/parallel/ParIterableLike.scala
@@ -154,9 +154,9 @@ extends GenIterableLike[T, Repr]
with HasNewCombiner[T, Repr]
{
self: ParIterableLike[T, Repr, Sequential] =>
-
+
import tasksupport._
-
+
def seq: Sequential
def repr: Repr = this.asInstanceOf[Repr]
@@ -164,7 +164,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
def hasDefiniteSize = true
def nonEmpty = size != 0
-
+
/** Creates a new parallel iterator used to traverse the elements of this parallel collection.
* This iterator is more specific than the iterator of the returned by `iterator`, and augmented
* with additional accessor and transformer methods.
@@ -234,7 +234,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
trait SignallingOps[PI <: DelegatedSignalling] {
def assign(cntx: Signalling): PI
}
-
+
/* convenience task operations wrapper */
protected implicit def task2ops[R, Tp](tsk: SSCTask[R, Tp]) = new TaskOps[R, Tp] {
def mapResult[R1](mapping: R => R1): ResultMapping[R, Tp, R1] = new ResultMapping[R, Tp, R1](tsk) {
@@ -262,7 +262,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
it
}
}
-
+
protected implicit def builder2ops[Elem, To](cb: Builder[Elem, To]) = new BuilderOps[Elem, To] {
def ifIs[Cmb](isbody: Cmb => Unit) = new Otherwise[Cmb] {
def otherwise(notbody: => Unit)(implicit m: ClassManifest[Cmb]) {
@@ -272,12 +272,12 @@ self: ParIterableLike[T, Repr, Sequential] =>
def isCombiner = cb.isInstanceOf[Combiner[_, _]]
def asCombiner = cb.asInstanceOf[Combiner[Elem, To]]
}
-
+
protected[this] def bf2seq[S, That](bf: CanBuildFrom[Repr, S, That]) = new CanBuildFrom[Sequential, S, That] {
def apply(from: Sequential) = bf.apply(from.par.asInstanceOf[Repr]) // !!! we only use this on `this.seq`, and know that `this.seq.par.getClass == this.getClass`
def apply() = bf.apply()
}
-
+
protected[this] def sequentially[S, That <: Parallel](b: Sequential => Parallelizable[S, That]) = b(seq).par.asInstanceOf[Repr]
def mkString(start: String, sep: String, end: String): String = seq.mkString(start, sep, end)
@@ -287,7 +287,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
def mkString: String = seq.mkString("")
override def toString = seq.mkString(stringPrefix + "(", ", ", ")")
-
+
def canEqual(other: Any) = true
/** Reduces the elements of this sequence using the specified associative binary operator.
@@ -324,7 +324,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
* the elements if the collection is nonempty, and `None` otherwise.
*/
def reduceOption[U >: T](op: (U, U) => U): Option[U] = if (isEmpty) None else Some(reduce(op))
-
+
/** Folds the elements of this sequence using the specified associative binary operator.
* The order in which the elements are reduced is unspecified and may be nondeterministic.
*
@@ -375,11 +375,11 @@ self: ParIterableLike[T, Repr, Sequential] =>
def aggregate[S](z: S)(seqop: (S, T) => S, combop: (S, S) => S): S = {
executeAndWaitResult(new Aggregate(z, seqop, combop, splitter))
}
-
+
def foldLeft[S](z: S)(op: (S, T) => S): S = seq.foldLeft(z)(op)
-
+
def foldRight[S](z: S)(op: (T, S) => S): S = seq.foldRight(z)(op)
-
+
def reduceLeft[U >: T](op: (U, T) => U): U = seq.reduceLeft(op)
def reduceRight[U >: T](op: (T, U) => U): U = seq.reduceRight(op)
@@ -428,7 +428,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
reduce((x, y) => if (cmp.lteq(f(x), f(y))) x else y)
}
-
+
def map[S, That](f: T => S)(implicit bf: CanBuildFrom[Repr, S, That]): That = if (bf(repr).isCombiner) {
executeAndWaitResult(new Map[S, That](f, combinerFactory(() => bf(repr).asCombiner), splitter) mapResult { _.result })
} else seq.map(f)(bf2seq(bf))
@@ -486,11 +486,11 @@ self: ParIterableLike[T, Repr, Sequential] =>
def find(pred: T => Boolean): Option[T] = {
executeAndWaitResult(new Find(pred, splitter assign new DefaultSignalling with VolatileAbort))
}
-
+
/** Creates a combiner factory. Each combiner factory instance is used
* once per invocation of a parallel transformer method for a single
* collection.
- *
+ *
* The default combiner factory creates a new combiner every time it
* is requested, unless the combiner is thread-safe as indicated by its
* `canBeShared` method. In this case, the method returns a factory which
@@ -509,7 +509,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
def doesShareCombiners = false
}
}
-
+
protected[this] def combinerFactory[S, That](cbf: () => Combiner[S, That]) = {
val combiner = cbf()
if (combiner.canBeShared) new CombinerFactory[S, That] {
@@ -521,7 +521,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
def doesShareCombiners = false
}
}
-
+
def filter(pred: T => Boolean): Repr = {
executeAndWaitResult(new Filter(pred, combinerFactory, splitter) mapResult { _.result })
}
@@ -875,9 +875,9 @@ self: ParIterableLike[T, Repr, Sequential] =>
}
override def requiresStrictSplitters = inner.requiresStrictSplitters
}
-
+
protected trait Transformer[R, Tp] extends Accessor[R, Tp]
-
+
protected[this] class Foreach[S](op: T => S, protected[this] val pit: IterableSplitter[T])
extends Accessor[Unit, Foreach[S]] {
@volatile var result: Unit = ()
@@ -894,7 +894,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
override def merge(that: Count) = result = result + that.result
// override def toString = "CountTask(" + pittxt + ")"
}
-
+
protected[this] class Reduce[U >: T](op: (U, U) => U, protected[this] val pit: IterableSplitter[T])
extends Accessor[Option[U], Reduce[U]] {
@volatile var result: Option[U] = None
@@ -1303,7 +1303,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
} else result = that.result
override def requiresStrictSplitters = true
}
-
+
protected[this] class FromScanTree[U >: T, That]
(tree: ScanTree[U], z: U, op: (U, U) => U, cbf: CombinerFactory[U, That])
extends StrictSplitterCheckTask[Combiner[U, That], FromScanTree[U, That]] {
@@ -1379,13 +1379,13 @@ self: ParIterableLike[T, Repr, Sequential] =>
def rightmost = this
def print(depth: Int) = println((" " * depth) + this)
}
-
+
/* alias methods */
-
+
def /:[S](z: S)(op: (S, T) => S): S = foldLeft(z)(op);
-
+
def :\[S](z: S)(op: (T, S) => S): S = foldRight(z)(op);
-
+
/* debug information */
private[parallel] def debugInformation = "Parallel collection: " + this.getClass
diff --git a/src/library/scala/collection/parallel/ParSeqLike.scala b/src/library/scala/collection/parallel/ParSeqLike.scala
index 6a5ee5c69b..70529229ec 100644
--- a/src/library/scala/collection/parallel/ParSeqLike.scala
+++ b/src/library/scala/collection/parallel/ParSeqLike.scala
@@ -182,9 +182,9 @@ self =>
} otherwise seq.sameElements(that)
/** Tests whether this $coll ends with the given parallel sequence.
- *
+ *
* $abortsignalling
- *
+ *
* @tparam S the type of the elements of `that` sequence
* @param that the sequence to test
* @return `true` if this $coll has `that` as a suffix, `false` otherwise
diff --git a/src/library/scala/collection/parallel/RemainsIterator.scala b/src/library/scala/collection/parallel/RemainsIterator.scala
index 8ed4583419..c5910ff2c8 100644
--- a/src/library/scala/collection/parallel/RemainsIterator.scala
+++ b/src/library/scala/collection/parallel/RemainsIterator.scala
@@ -28,7 +28,7 @@ private[collection] trait RemainsIterator[+T] extends Iterator[T] {
* This method doesn't change the state of the iterator.
*/
def remaining: Int
-
+
/** For most collections, this is a cheap operation.
* Exceptions can override this method.
*/
@@ -386,22 +386,22 @@ extends AugmentedIterableIterator[T]
with DelegatedSignalling
{
self =>
-
+
var signalDelegate: Signalling = IdleSignalling
-
+
/** Creates a copy of this iterator. */
def dup: IterableSplitter[T]
def split: Seq[IterableSplitter[T]]
-
+
def splitWithSignalling: Seq[IterableSplitter[T]] = {
val pits = split
pits foreach { _.signalDelegate = signalDelegate }
pits
}
-
+
def shouldSplitFurther[S](coll: ParIterable[S], parallelismLevel: Int) = remaining > thresholdFromSize(coll.size, parallelismLevel)
-
+
/** The number of elements this iterator has yet to traverse. This method
* doesn't change the state of the iterator.
*
@@ -554,13 +554,13 @@ self =>
pits foreach { _.signalDelegate = signalDelegate }
pits
}
-
+
def psplitWithSignalling(sizes: Int*): Seq[SeqSplitter[T]] = {
val pits = psplit(sizes: _*)
pits foreach { _.signalDelegate = signalDelegate }
pits
}
-
+
/** The number of elements this iterator has yet to traverse. This method
* doesn't change the state of the iterator. Unlike the version of this method in the supertrait,
* method `remaining` in `ParSeqLike.this.ParIterator` must return an exact number
diff --git a/src/library/scala/collection/parallel/Tasks.scala b/src/library/scala/collection/parallel/Tasks.scala
index b705909cad..e643444638 100644
--- a/src/library/scala/collection/parallel/Tasks.scala
+++ b/src/library/scala/collection/parallel/Tasks.scala
@@ -171,12 +171,12 @@ trait AdaptiveWorkStealingTasks extends Tasks {
def internal() = {
var last = spawnSubtasks()
-
+
last.body.tryLeaf(None)
last.release()
body.result = last.body.result
body.throwable = last.body.throwable
-
+
while (last.next != null) {
// val lastresult = Option(last.body.result)
val beforelast = last
@@ -193,7 +193,7 @@ trait AdaptiveWorkStealingTasks extends Tasks {
body.tryMerge(last.body.repr)
}
}
-
+
def spawnSubtasks() = {
var last: TaskImpl[R, Tp] = null
var head: TaskImpl[R, Tp] = this
@@ -237,7 +237,7 @@ trait ThreadPoolTasks extends Tasks {
// utb: var future: Future[_] = null
@volatile var owned = false
@volatile var completed = false
-
+
def start() = synchronized {
// debuglog("Starting " + body)
// utb: future = executor.submit(this)
@@ -326,7 +326,7 @@ trait ThreadPoolTasks extends Tasks {
// debuglog("-----------> Executing with wait: " + task)
t.start()
-
+
t.sync()
t.body.forwardThrowable
t.body.result
diff --git a/src/library/scala/collection/parallel/mutable/ParCtrie.scala b/src/library/scala/collection/parallel/mutable/ParCtrie.scala
index cec2e6886d..80add2407b 100644
--- a/src/library/scala/collection/parallel/mutable/ParCtrie.scala
+++ b/src/library/scala/collection/parallel/mutable/ParCtrie.scala
@@ -25,11 +25,11 @@ import scala.collection.mutable.CtrieIterator
/** Parallel Ctrie collection.
- *
+ *
* It has its bulk operations parallelized, but uses the snapshot operation
* to create the splitter. This means that parallel bulk operations can be
* called concurrently with the modifications.
- *
+ *
* @author Aleksandar Prokopec
* @since 2.10
*/
@@ -41,41 +41,41 @@ extends ParMap[K, V]
with Serializable
{
import collection.parallel.tasksupport._
-
+
def this() = this(new Ctrie)
-
+
override def mapCompanion: GenericParMapCompanion[ParCtrie] = ParCtrie
-
+
override def empty: ParCtrie[K, V] = ParCtrie.empty
-
+
protected[this] override def newCombiner = ParCtrie.newCombiner
-
+
override def seq = ctrie
-
+
def splitter = new ParCtrieSplitter(0, ctrie.readOnlySnapshot().asInstanceOf[Ctrie[K, V]], true)
-
+
override def clear() = ctrie.clear()
-
+
def result = this
-
+
def get(key: K): Option[V] = ctrie.get(key)
-
+
def put(key: K, value: V): Option[V] = ctrie.put(key, value)
-
+
def update(key: K, value: V): Unit = ctrie.update(key, value)
-
+
def remove(key: K): Option[V] = ctrie.remove(key)
-
+
def +=(kv: (K, V)): this.type = {
ctrie.+=(kv)
this
}
-
+
def -=(key: K): this.type = {
ctrie.-=(key)
this
}
-
+
override def size = {
val in = ctrie.readRoot()
val r = in.gcasRead(ctrie)
@@ -87,11 +87,11 @@ extends ParMap[K, V]
cn.cachedSize(ctrie)
}
}
-
+
override def stringPrefix = "ParCtrie"
-
+
/* tasks */
-
+
/** Computes Ctrie size in parallel. */
class Size(offset: Int, howmany: Int, array: Array[BasicNode]) extends Task[Int, Size] {
var result = -1
@@ -115,7 +115,7 @@ extends ParMap[K, V]
def shouldSplitFurther = howmany > 1
override def merge(that: Size) = result = result + that.result
}
-
+
}
@@ -126,63 +126,63 @@ extends CtrieIterator[K, V](lev, ct, mustInit)
// only evaluated if `remaining` is invoked (which is not used by most tasks)
lazy val totalsize = ct.par.size
var iterated = 0
-
+
protected override def newIterator(_lev: Int, _ct: Ctrie[K, V], _mustInit: Boolean) = new ParCtrieSplitter[K, V](_lev, _ct, _mustInit)
-
+
override def shouldSplitFurther[S](coll: collection.parallel.ParIterable[S], parallelismLevel: Int) = {
val maxsplits = 3 + Integer.highestOneBit(parallelismLevel)
level < maxsplits
}
-
+
def dup = {
val it = newIterator(0, ct, false)
dupTo(it)
it.iterated = this.iterated
it
}
-
+
override def next() = {
iterated += 1
super.next()
}
-
+
def split: Seq[IterableSplitter[(K, V)]] = subdivide().asInstanceOf[Seq[IterableSplitter[(K, V)]]]
-
+
override def isRemainingCheap = false
-
+
def remaining: Int = totalsize - iterated
}
/** Only used within the `ParCtrie`. */
private[mutable] trait ParCtrieCombiner[K, V] extends Combiner[(K, V), ParCtrie[K, V]] {
-
+
def combine[N <: (K, V), NewTo >: ParCtrie[K, V]](other: Combiner[N, NewTo]): Combiner[N, NewTo] = if (this eq other) this else {
throw new UnsupportedOperationException("This shouldn't have been called in the first place.")
-
+
val thiz = this.asInstanceOf[ParCtrie[K, V]]
val that = other.asInstanceOf[ParCtrie[K, V]]
val result = new ParCtrie[K, V]
-
+
result ++= thiz.iterator
result ++= that.iterator
-
+
result
}
-
+
override def canBeShared = true
-
+
}
-
+
object ParCtrie extends ParMapFactory[ParCtrie] {
-
+
def empty[K, V]: ParCtrie[K, V] = new ParCtrie[K, V]
-
+
def newCombiner[K, V]: Combiner[(K, V), ParCtrie[K, V]] = new ParCtrie[K, V]
-
+
implicit def canBuildFrom[K, V]: CanCombineFrom[Coll, (K, V), ParCtrie[K, V]] = new CanCombineFromMap[K, V]
-
+
}
diff --git a/src/library/scala/collection/parallel/mutable/ParHashSet.scala b/src/library/scala/collection/parallel/mutable/ParHashSet.scala
index 6c5f513ad0..0217d0bfa8 100644
--- a/src/library/scala/collection/parallel/mutable/ParHashSet.scala
+++ b/src/library/scala/collection/parallel/mutable/ParHashSet.scala
@@ -117,7 +117,7 @@ with collection.mutable.FlatHashTable.HashUtils[T] {
private var mask = ParHashSetCombiner.discriminantmask
private var nonmasklen = ParHashSetCombiner.nonmasklength
private var seedvalue = 27
-
+
def +=(elem: T) = {
sz += 1
val hc = improve(elemHashCode(elem), seedvalue)
diff --git a/src/library/scala/collection/parallel/package.scala b/src/library/scala/collection/parallel/package.scala
index 8f19d0ecdb..f5e05ffefb 100644
--- a/src/library/scala/collection/parallel/package.scala
+++ b/src/library/scala/collection/parallel/package.scala
@@ -114,7 +114,7 @@ package parallel {
}
/* classes */
-
+
trait CombinerFactory[U, Repr] {
/** Provides a combiner used to construct a collection. */
def apply(): Combiner[U, Repr]
@@ -126,7 +126,7 @@ package parallel {
*/
def doesShareCombiners: Boolean
}
-
+
/** Composite throwable - thrown when multiple exceptions are thrown at the same time. */
final case class CompositeThrowable(
val throwables: Set[Throwable]
@@ -201,18 +201,18 @@ package parallel {
//self: EnvironmentPassingCombiner[Elem, To] =>
protected var buckets: Array[UnrolledBuffer[Buck]] @uncheckedVariance = new Array[UnrolledBuffer[Buck]](bucketnumber)
protected var sz: Int = 0
-
+
def size = sz
-
+
def clear() = {
buckets = new Array[UnrolledBuffer[Buck]](bucketnumber)
sz = 0
}
-
+
def beforeCombine[N <: Elem, NewTo >: To](other: Combiner[N, NewTo]) {}
-
+
def afterCombine[N <: Elem, NewTo >: To](other: Combiner[N, NewTo]) {}
-
+
def combine[N <: Elem, NewTo >: To](other: Combiner[N, NewTo]): Combiner[N, NewTo] = {
if (this eq other) this
else other match {
diff --git a/src/library/scala/concurrent/Channel.scala b/src/library/scala/concurrent/Channel.scala
index e79f76430f..f6d6341151 100644
--- a/src/library/scala/concurrent/Channel.scala
+++ b/src/library/scala/concurrent/Channel.scala
@@ -23,7 +23,7 @@ class Channel[A] {
private var written = new LinkedList[A] // FIFO buffer, realized through
private var lastWritten = written // aliasing of a linked list
private var nreaders = 0
-
+
/**
* @param x ...
*/
@@ -33,7 +33,7 @@ class Channel[A] {
lastWritten = lastWritten.next
if (nreaders > 0) notify()
}
-
+
def read: A = synchronized {
while (written.next == null) {
try {
@@ -46,5 +46,5 @@ class Channel[A] {
written = written.next
x
}
-
+
}
diff --git a/src/library/scala/concurrent/ConcurrentPackageObject.scala b/src/library/scala/concurrent/ConcurrentPackageObject.scala
index 6aacd53de2..ae17c7e032 100644
--- a/src/library/scala/concurrent/ConcurrentPackageObject.scala
+++ b/src/library/scala/concurrent/ConcurrentPackageObject.scala
@@ -18,16 +18,16 @@ abstract class ConcurrentPackageObject {
*/
lazy val executionContext =
new impl.ExecutionContextImpl(java.util.concurrent.Executors.newCachedThreadPool())
-
+
/** A global service for scheduling tasks for execution.
*/
// lazy val scheduler =
// new default.SchedulerImpl
-
+
val handledFutureException: PartialFunction[Throwable, Throwable] = {
case t: Throwable if isFutureThrowable(t) => t
}
-
+
// TODO rename appropriately and make public
private[concurrent] def isFutureThrowable(t: Throwable) = t match {
case e: Error => false
@@ -35,7 +35,7 @@ abstract class ConcurrentPackageObject {
case i: InterruptedException => false
case _ => true
}
-
+
private[concurrent] def resolve[T](source: Try[T]): Try[T] = source match {
case Failure(t: scala.runtime.NonLocalReturnControl[_]) => Success(t.value.asInstanceOf[T])
case Failure(t: scala.util.control.ControlThrowable) => Failure(new ExecutionException("Boxed ControlThrowable", t))
@@ -46,24 +46,24 @@ abstract class ConcurrentPackageObject {
private[concurrent] def resolver[T] =
resolverFunction.asInstanceOf[PartialFunction[Throwable, Try[T]]]
-
+
/* concurrency constructs */
-
+
def future[T](body: =>T)(implicit execCtx: ExecutionContext = executionContext): Future[T] =
execCtx future body
-
+
def promise[T]()(implicit execCtx: ExecutionContext = executionContext): Promise[T] =
execCtx promise
-
+
/** Wraps a block of code into an awaitable object. */
def body2awaitable[T](body: =>T) = new Awaitable[T] {
def await(atMost: Duration)(implicit cb: CanAwait) = body
}
-
+
/** Used to block on a piece of code which potentially blocks.
- *
+ *
* @param body A piece of code which contains potentially blocking or long running calls.
- *
+ *
* Calling this method may throw the following exceptions:
* - CancellationException - if the computation was cancelled
* - InterruptedException - in the case that a wait within the blockable object was interrupted
@@ -71,11 +71,11 @@ abstract class ConcurrentPackageObject {
*/
def blocking[T](atMost: Duration)(body: =>T)(implicit execCtx: ExecutionContext): T =
executionContext.blocking(atMost)(body)
-
+
/** Blocks on an awaitable object.
- *
+ *
* @param awaitable An object with a `block` method which runs potentially blocking or long running calls.
- *
+ *
* Calling this method may throw the following exceptions:
* - CancellationException - if the computation was cancelled
* - InterruptedException - in the case that a wait within the blockable object was interrupted
@@ -83,7 +83,7 @@ abstract class ConcurrentPackageObject {
*/
def blocking[T](awaitable: Awaitable[T], atMost: Duration)(implicit execCtx: ExecutionContext = executionContext): T =
executionContext.blocking(awaitable, atMost)
-
+
@inline implicit final def int2durationops(x: Int): DurationOps = new DurationOps(x)
}
diff --git a/src/library/scala/concurrent/DelayedLazyVal.scala b/src/library/scala/concurrent/DelayedLazyVal.scala
index 0b7f54a27a..a17153bad5 100644
--- a/src/library/scala/concurrent/DelayedLazyVal.scala
+++ b/src/library/scala/concurrent/DelayedLazyVal.scala
@@ -26,23 +26,23 @@ package scala.concurrent
class DelayedLazyVal[T](f: () => T, body: => Unit) {
@volatile private[this] var _isDone = false
private[this] lazy val complete = f()
-
+
/** Whether the computation is complete.
*
* @return true if the computation is complete.
*/
def isDone = _isDone
-
+
/** The current result of f(), or the final result if complete.
*
* @return the current value
*/
def apply(): T = if (isDone) complete else f()
-
+
// TODO replace with scala.concurrent.future { ... }
ops.future {
body
_isDone = true
}
-
+
}
diff --git a/src/library/scala/concurrent/ExecutionContext.scala b/src/library/scala/concurrent/ExecutionContext.scala
index 99cd264ac5..eb1b3355c0 100644
--- a/src/library/scala/concurrent/ExecutionContext.scala
+++ b/src/library/scala/concurrent/ExecutionContext.scala
@@ -21,41 +21,41 @@ import collection._
trait ExecutionContext {
-
+
protected implicit object CanAwaitEvidence extends CanAwait
-
+
def execute(runnable: Runnable): Unit
-
+
def execute[U](body: () => U): Unit
-
+
def promise[T]: Promise[T]
-
+
def future[T](body: Callable[T]): Future[T] = future(body.call())
-
+
def future[T](body: => T): Future[T]
-
+
def blocking[T](atMost: Duration)(body: =>T): T
-
+
def blocking[T](awaitable: Awaitable[T], atMost: Duration): T
-
+
def reportFailure(t: Throwable): Unit
-
+
/* implementations follow */
-
+
private implicit val executionContext = this
-
+
def keptPromise[T](result: T): Promise[T] = {
val p = promise[T]
p success result
}
-
+
def brokenPromise[T](t: Throwable): Promise[T] = {
val p = promise[T]
p failure t
}
-
+
/** TODO some docs
- *
+ *
*/
def all[T, Coll[X] <: Traversable[X]](futures: Coll[Future[T]])(implicit cbf: CanBuildFrom[Coll[_], T, Coll[T]]): Future[Coll[T]] = {
import nondeterministic._
@@ -63,13 +63,13 @@ trait ExecutionContext {
val counter = new AtomicInteger(1) // how else could we do this?
val p: Promise[Coll[T]] = promise[Coll[T]] // we need an implicit execctx in the signature
var idx = 0
-
+
def tryFinish() = if (counter.decrementAndGet() == 0) {
val builder = cbf(futures)
builder ++= buffer
p success builder.result
}
-
+
for (f <- futures) {
val currentIndex = idx
buffer += null.asInstanceOf[T]
@@ -83,46 +83,46 @@ trait ExecutionContext {
}
idx += 1
}
-
+
tryFinish()
-
+
p.future
}
-
+
/** TODO some docs
- *
+ *
*/
def any[T](futures: Traversable[Future[T]]): Future[T] = {
val p = promise[T]
val completeFirst: Try[T] => Unit = elem => p tryComplete elem
-
+
futures foreach (_ onComplete completeFirst)
-
+
p.future
}
-
+
/** TODO some docs
- *
+ *
*/
def find[T](futures: Traversable[Future[T]])(predicate: T => Boolean): Future[Option[T]] = {
if (futures.isEmpty) Promise.kept[Option[T]](None).future
else {
val result = promise[Option[T]]
val count = new AtomicInteger(futures.size)
- val search: Try[T] => Unit = {
+ val search: Try[T] => Unit = {
v => v match {
case Success(r) => if (predicate(r)) result trySuccess Some(r)
case _ =>
}
if (count.decrementAndGet() == 0) result trySuccess None
}
-
+
futures.foreach(_ onComplete search)
result.future
}
}
-
+
}
diff --git a/src/library/scala/concurrent/Future.scala b/src/library/scala/concurrent/Future.scala
index 73f76bbbfb..eb54b61db0 100644
--- a/src/library/scala/concurrent/Future.scala
+++ b/src/library/scala/concurrent/Future.scala
@@ -28,9 +28,9 @@ import scala.collection.generic.CanBuildFrom
/** The trait that represents futures.
- *
+ *
* Asynchronous computations that yield futures are created with the `future` call:
- *
+ *
* {{{
* val s = "Hello"
* val f: Future[String] = future {
@@ -40,9 +40,9 @@ import scala.collection.generic.CanBuildFrom
* case msg => println(msg)
* }
* }}}
- *
+ *
* @author Philipp Haller, Heather Miller, Aleksandar Prokopec, Viktor Klang
- *
+ *
* @define multipleCallbacks
* Multiple callbacks may be registered; there is no guarantee that they will be
* executed in a particular order.
@@ -54,18 +54,18 @@ import scala.collection.generic.CanBuildFrom
* - `Error` - errors are not contained within futures
* - `InterruptedException` - not contained within futures
* - all `scala.util.control.ControlThrowable` except `NonLocalReturnControl` - not contained within futures
- *
+ *
* Instead, the future is completed with a ExecutionException with one of the exceptions above
* as the cause.
* If a future is failed with a `scala.runtime.NonLocalReturnControl`,
* it is completed with a value instead from that throwable instead instead.
- *
+ *
* @define nonDeterministic
* Note: using this method yields nondeterministic dataflow programs.
- *
+ *
* @define forComprehensionExamples
* Example:
- *
+ *
* {{{
* val f = future { 5 }
* val g = future { 3 }
@@ -74,116 +74,116 @@ import scala.collection.generic.CanBuildFrom
* y: Int <- g // returns Future(5)
* } yield x + y
* }}}
- *
+ *
* is translated to:
- *
+ *
* {{{
* f flatMap { (x: Int) => g map { (y: Int) => x + y } }
* }}}
*/
trait Future[+T] extends Awaitable[T] {
self =>
-
+
/* Callbacks */
-
+
/** When this future is completed successfully (i.e. with a value),
* apply the provided partial function to the value if the partial function
* is defined at that value.
- *
+ *
* If the future has already been completed with a value,
* this will either be applied immediately or be scheduled asynchronously.
- *
+ *
* $multipleCallbacks
*/
def onSuccess[U](pf: PartialFunction[T, U]): this.type = onComplete {
case Failure(t) => // do nothing
case Success(v) => if (pf isDefinedAt v) pf(v) else { /*do nothing*/ }
}
-
+
/** When this future is completed with a failure (i.e. with a throwable),
* apply the provided callback to the throwable.
- *
+ *
* $caughtThrowables
- *
+ *
* If the future has already been completed with a failure,
* this will either be applied immediately or be scheduled asynchronously.
- *
+ *
* Will not be called in case that the future is completed with a value.
- *
+ *
* $multipleCallbacks
*/
def onFailure[U](callback: PartialFunction[Throwable, U]): this.type = onComplete {
case Failure(t) => if (isFutureThrowable(t) && callback.isDefinedAt(t)) callback(t) else { /*do nothing*/ }
case Success(v) => // do nothing
}
-
+
/** When this future is completed, either through an exception, a timeout, or a value,
* apply the provided function.
- *
+ *
* If the future has already been completed,
* this will either be applied immediately or be scheduled asynchronously.
- *
+ *
* $multipleCallbacks
*/
def onComplete[U](func: Try[T] => U): this.type
-
-
+
+
/* Miscellaneous */
-
+
/** Creates a new promise.
*/
def newPromise[S]: Promise[S]
-
-
+
+
/* Projections */
-
+
/** Returns a failed projection of this future.
- *
+ *
* The failed projection is a future holding a value of type `Throwable`.
- *
+ *
* It is completed with a value which is the throwable of the original future
* in case the original future is failed.
- *
+ *
* It is failed with a `NoSuchElementException` if the original future is completed successfully.
- *
+ *
* Blocking on this future returns a value if the original future is completed with an exception
* and throws a corresponding exception if the original future fails.
*/
def failed: Future[Throwable] = {
- def noSuchElem(v: T) =
+ def noSuchElem(v: T) =
new NoSuchElementException("Future.failed not completed with a throwable. Instead completed with: " + v)
-
+
val p = newPromise[Throwable]
-
+
onComplete {
case Failure(t) => p success t
case Success(v) => p failure noSuchElem(v)
}
-
+
p.future
}
-
-
+
+
/* Monadic operations */
-
+
/** Asynchronously processes the value in the future once the value becomes available.
- *
+ *
* Will not be called if the future fails.
*/
def foreach[U](f: T => U): Unit = onComplete {
case Success(r) => f(r)
case Failure(_) => // do nothing
}
-
+
/** Creates a new future by applying a function to the successful result of
* this future. If this future is completed with an exception then the new
* future will also contain this exception.
- *
+ *
* $forComprehensionExample
*/
def map[S](f: T => S): Future[S] = {
val p = newPromise[S]
-
+
onComplete {
case Failure(t) => p failure t
case Success(v) =>
@@ -192,23 +192,23 @@ self =>
case t => p complete resolver(t)
}
}
-
+
p.future
}
-
+
/** Creates a new future by applying a function to the successful result of
* this future, and returns the result of the function as the new future.
* If this future is completed with an exception then the new future will
* also contain this exception.
- *
+ *
* $forComprehensionExample
*/
def flatMap[S](f: T => Future[S]): Future[S] = {
val p = newPromise[S]
-
+
onComplete {
case Failure(t) => p failure t
- case Success(v) =>
+ case Success(v) =>
try {
f(v) onComplete {
case Failure(t) => p failure t
@@ -218,15 +218,15 @@ self =>
case t: Throwable => p complete resolver(t)
}
}
-
+
p.future
}
-
+
/** Creates a new future by filtering the value of the current future with a predicate.
- *
+ *
* If the current future contains a value which satisfies the predicate, the new future will also hold that value.
* Otherwise, the resulting future will fail with a `NoSuchElementException`.
- *
+ *
* If the current future fails or times out, the resulting future also fails or times out, respectively.
*
* Example:
@@ -240,7 +240,7 @@ self =>
*/
def filter(pred: T => Boolean): Future[T] = {
val p = newPromise[T]
-
+
onComplete {
case Failure(t) => p failure t
case Success(v) =>
@@ -251,12 +251,12 @@ self =>
case t: Throwable => p complete resolver(t)
}
}
-
+
p.future
}
-
+
/** Creates a new future by mapping the value of the current future if the given partial function is defined at that value.
- *
+ *
* If the current future contains a value for which the partial function is defined, the new future will also hold that value.
* Otherwise, the resulting future will fail with a `NoSuchElementException`.
*
@@ -277,7 +277,7 @@ self =>
*/
def collect[S](pf: PartialFunction[T, S]): Future[S] = {
val p = newPromise[S]
-
+
onComplete {
case Failure(t) => p failure t
case Success(v) =>
@@ -288,16 +288,16 @@ self =>
case t: Throwable => p complete resolver(t)
}
}
-
+
p.future
}
-
+
/** Creates a new future that will handle any matching throwable that this
* future might contain. If there is no match, or if this future contains
* a valid result then the new future will contain the same.
- *
+ *
* Example:
- *
+ *
* {{{
* future (6 / 0) recover { case e: ArithmeticException ⇒ 0 } // result: 0
* future (6 / 0) recover { case e: NotFoundException ⇒ 0 } // result: exception
@@ -306,25 +306,25 @@ self =>
*/
def recover[U >: T](pf: PartialFunction[Throwable, U]): Future[U] = {
val p = newPromise[U]
-
+
onComplete {
case Failure(t) if pf isDefinedAt t =>
try { p success pf(t) }
catch { case t: Throwable => p complete resolver(t) }
case otherwise => p complete otherwise
}
-
+
p.future
}
-
+
/** Creates a new future that will handle any matching throwable that this
* future might contain by assigning it a value of another future.
- *
+ *
* If there is no match, or if this future contains
* a valid result then the new future will contain the same result.
- *
+ *
* Example:
- *
+ *
* {{{
* val f = future { Int.MaxValue }
* future (6 / 0) recoverWith { case e: ArithmeticException => f } // result: Int.MaxValue
@@ -332,7 +332,7 @@ self =>
*/
def recoverWith[U >: T](pf: PartialFunction[Throwable, Future[U]]): Future[U] = {
val p = newPromise[U]
-
+
onComplete {
case Failure(t) if pf isDefinedAt t =>
try {
@@ -342,13 +342,13 @@ self =>
}
case otherwise => p complete otherwise
}
-
+
p.future
}
-
+
/** Zips the values of `this` and `that` future, and creates
* a new future holding the tuple of their results.
- *
+ *
* If `this` future fails, the resulting future is failed
* with the throwable stored in `this`.
* Otherwise, if `that` future fails, the resulting future is failed
@@ -356,27 +356,27 @@ self =>
*/
def zip[U](that: Future[U]): Future[(T, U)] = {
val p = newPromise[(T, U)]
-
+
this onComplete {
case Failure(t) => p failure t
case Success(r) => that onSuccess {
case r2 => p success ((r, r2))
}
}
-
+
that onFailure {
case f => p failure f
}
-
+
p.future
}
-
+
/** Creates a new future which holds the result of this future if it was completed successfully, or, if not,
* the result of the `that` future if `that` is completed successfully.
* If both futures are failed, the resulting future holds the throwable object of the first future.
- *
+ *
* Using this method will not cause concurrent programs to become nondeterministic.
- *
+ *
* Example:
* {{{
* val f = future { sys.error("failed") }
@@ -387,7 +387,7 @@ self =>
*/
def fallbackTo[U >: T](that: Future[U]): Future[U] = {
val p = newPromise[U]
-
+
onComplete {
case Failure(t) => that onComplete {
case Failure(_) => p failure t
@@ -395,23 +395,23 @@ self =>
}
case Success(v) => p success v
}
-
+
p.future
}
-
+
/** Applies the side-effecting function to the result of this future, and returns
* a new future with the result of this future.
- *
+ *
* This method allows one to enforce that the callbacks are executed in a
* specified order.
- *
+ *
* Note that if one of the chained `andThen` callbacks throws
* an exception, that exception is not propagated to the subsequent `andThen`
* callbacks. Instead, the subsequent `andThen` callbacks are given the original
* value of this future.
- *
+ *
* The following example prints out `5`:
- *
+ *
* {{{
* val f = future { 5 }
* f andThen {
@@ -424,21 +424,21 @@ self =>
*/
def andThen[U](pf: PartialFunction[Try[T], U]): Future[T] = {
val p = newPromise[T]
-
+
onComplete {
case r =>
try if (pf isDefinedAt r) pf(r)
finally p complete r
}
-
+
p.future
}
-
+
/** Creates a new future which holds the result of either this future or `that` future, depending on
* which future was completed first.
- *
+ *
* $nonDeterministic
- *
+ *
* Example:
* {{{
* val f = future { sys.error("failed") }
@@ -449,42 +449,42 @@ self =>
*/
def either[U >: T](that: Future[U]): Future[U] = {
val p = self.newPromise[U]
-
+
val completePromise: PartialFunction[Try[U], _] = {
case Failure(t) => p tryFailure t
case Success(v) => p trySuccess v
}
-
+
self onComplete completePromise
that onComplete completePromise
-
+
p.future
}
-
+
}
/** TODO some docs
- *
+ *
* @define nonDeterministic
* Note: using this method yields nondeterministic dataflow programs.
*/
object Future {
-
+
// TODO make more modular by encoding all other helper methods within the execution context
/** TODO some docs
*/
def all[T, Coll[X] <: Traversable[X]](futures: Coll[Future[T]])(implicit cbf: CanBuildFrom[Coll[_], T, Coll[T]], ec: ExecutionContext): Future[Coll[T]] =
ec.all[T, Coll](futures)
-
+
// move this to future companion object
@inline def apply[T](body: =>T)(implicit executor: ExecutionContext): Future[T] = executor.future(body)
def any[T](futures: Traversable[Future[T]])(implicit ec: ExecutionContext): Future[T] = ec.any(futures)
def find[T](futures: Traversable[Future[T]])(predicate: T => Boolean)(implicit ec: ExecutionContext): Future[Option[T]] = ec.find(futures)(predicate)
-
+
}
diff --git a/src/library/scala/concurrent/JavaConversions.scala b/src/library/scala/concurrent/JavaConversions.scala
index bac9d4f558..127a0e0055 100644
--- a/src/library/scala/concurrent/JavaConversions.scala
+++ b/src/library/scala/concurrent/JavaConversions.scala
@@ -48,9 +48,9 @@ object JavaConversions {
// do nothing
}
}
-
+
implicit def asExecutionContext(exec: ExecutorService): ExecutionContext = null // TODO
-
+
implicit def asExecutionContext(exec: Executor): ExecutionContext = null // TODO
-
+
}
diff --git a/src/library/scala/concurrent/Promise.scala b/src/library/scala/concurrent/Promise.scala
index f26deb77ab..4404e90971 100644
--- a/src/library/scala/concurrent/Promise.scala
+++ b/src/library/scala/concurrent/Promise.scala
@@ -24,36 +24,36 @@ import scala.util.{ Try, Success, Failure }
* If the throwable used to fail this promise is an error, a control exception
* or an interrupted exception, it will be wrapped as a cause within an
* `ExecutionException` which will fail the promise.
- *
+ *
* @define nonDeterministic
* Note: Using this method may result in non-deterministic concurrent programs.
*/
trait Promise[T] {
-
+
import nondeterministic._
-
+
/** Future containing the value of this promise.
*/
def future: Future[T]
-
+
/** Completes the promise with either an exception or a value.
- *
+ *
* @param result Either the value or the exception to complete the promise with.
- *
+ *
* $promiseCompletion
*/
def complete(result:Try[T]): this.type = if (tryComplete(result)) this else throwCompleted
-
+
/** Tries to complete the promise with either a value or the exception.
- *
+ *
* $nonDeterministic
- *
+ *
* @return If the promise has already been completed returns `false`, or `true` otherwise.
*/
def tryComplete(result: Try[T]): Boolean
-
+
/** Completes this promise with the specified future, once that future is completed.
- *
+ *
* @return This promise
*/
final def completeWith(other: Future[T]): this.type = {
@@ -62,64 +62,64 @@ trait Promise[T] {
}
this
}
-
+
/** Completes the promise with a value.
- *
+ *
* @param value The value to complete the promise with.
- *
+ *
* $promiseCompletion
*/
def success(v: T): this.type = if (trySuccess(v)) this else throwCompleted
-
+
/** Tries to complete the promise with a value.
- *
+ *
* $nonDeterministic
- *
+ *
* @return If the promise has already been completed returns `false`, or `true` otherwise.
*/
def trySuccess(value: T): Boolean = tryComplete(Success(value))
-
+
/** Completes the promise with an exception.
- *
+ *
* @param t The throwable to complete the promise with.
- *
+ *
* $allowedThrowables
- *
+ *
* $promiseCompletion
*/
def failure(t: Throwable): this.type = if (tryFailure(t)) this else throwCompleted
-
+
/** Tries to complete the promise with an exception.
- *
+ *
* $nonDeterministic
- *
+ *
* @return If the promise has already been completed returns `false`, or `true` otherwise.
*/
def tryFailure(t: Throwable): Boolean = tryComplete(Failure(t))
-
+
/** Wraps a `Throwable` in an `ExecutionException` if necessary. TODO replace with `resolver` from scala.concurrent
- *
+ *
* $allowedThrowables
*/
protected def wrap(t: Throwable): Throwable = t match {
case t: Throwable if isFutureThrowable(t) => t
case _ => new ExecutionException(t)
}
-
+
private def throwCompleted = throw new IllegalStateException("Promise already completed.")
-
+
}
object Promise {
-
+
def kept[T](result: T)(implicit execctx: ExecutionContext): Promise[T] =
execctx keptPromise result
-
- def broken[T](t: Throwable)(implicit execctx: ExecutionContext): Promise[T] =
+
+ def broken[T](t: Throwable)(implicit execctx: ExecutionContext): Promise[T] =
execctx brokenPromise t
-
+
}
diff --git a/src/library/scala/concurrent/Task.scala b/src/library/scala/concurrent/Task.scala
index d6f86bac31..eb3efbb422 100644
--- a/src/library/scala/concurrent/Task.scala
+++ b/src/library/scala/concurrent/Task.scala
@@ -3,11 +3,11 @@ package scala.concurrent
trait Task[+T] {
-
+
def start(): Unit
-
+
def future: Future[T]
-
+
}
diff --git a/src/library/scala/concurrent/impl/ExecutionContextImpl.scala b/src/library/scala/concurrent/impl/ExecutionContextImpl.scala
index af0eb66292..7b44d02612 100644
--- a/src/library/scala/concurrent/impl/ExecutionContextImpl.scala
+++ b/src/library/scala/concurrent/impl/ExecutionContextImpl.scala
@@ -19,7 +19,7 @@ import scala.collection.mutable.Stack
class ExecutionContextImpl(executorService: ExecutorService) extends ExecutionContext {
import ExecutionContextImpl._
-
+
def execute(runnable: Runnable): Unit = executorService match {
// case fj: ForkJoinPool =>
// TODO fork if more applicable
@@ -27,16 +27,16 @@ class ExecutionContextImpl(executorService: ExecutorService) extends ExecutionCo
case _ =>
executorService execute runnable
}
-
+
def execute[U](body: () => U): Unit = execute(new Runnable {
def run() = body()
})
-
+
def promise[T]: Promise[T] = new Promise.DefaultPromise[T]()(this)
-
+
def future[T](body: =>T): Future[T] = {
val p = promise[T]
-
+
dispatchFuture {
() =>
p complete {
@@ -47,39 +47,39 @@ class ExecutionContextImpl(executorService: ExecutorService) extends ExecutionCo
}
}
}
-
+
p.future
}
-
+
def blocking[T](atMost: Duration)(body: =>T): T = blocking(body2awaitable(body), atMost)
-
+
def blocking[T](awaitable: Awaitable[T], atMost: Duration): T = {
currentExecutionContext.get match {
case null => awaitable.await(atMost)(null) // outside - TODO - fix timeout case
case x => x.blockingCall(awaitable) // inside an execution context thread
}
}
-
+
def reportFailure(t: Throwable) = t match {
case e: Error => throw e // rethrow serious errors
case t => t.printStackTrace()
}
-
+
/** Only callable from the tasks running on the same execution context. */
private def blockingCall[T](body: Awaitable[T]): T = {
releaseStack()
-
+
// TODO see what to do with timeout
body.await(Duration.fromNanos(0))(CanAwaitEvidence)
}
-
+
// an optimization for batching futures
// TODO we should replace this with a public queue,
// so that it can be stolen from
// OR: a push to the local task queue should be so cheap that this is
// not even needed, but stealing is still possible
private val _taskStack = new ThreadLocal[Stack[() => Unit]]()
-
+
private def releaseStack(): Unit =
_taskStack.get match {
case stack if (stack ne null) && stack.nonEmpty =>
@@ -92,7 +92,7 @@ class ExecutionContextImpl(executorService: ExecutorService) extends ExecutionCo
case _ =>
_taskStack.remove()
}
-
+
private[impl] def dispatchFuture(task: () => Unit, force: Boolean = false): Unit =
_taskStack.get match {
case stack if (stack ne null) && !force => stack push task
@@ -119,16 +119,16 @@ class ExecutionContextImpl(executorService: ExecutorService) extends ExecutionCo
}
)
}
-
+
}
object ExecutionContextImpl {
-
+
private[concurrent] def currentExecutionContext: ThreadLocal[ExecutionContextImpl] = new ThreadLocal[ExecutionContextImpl] {
override protected def initialValue = null
}
-
+
}
diff --git a/src/library/scala/concurrent/impl/Future.scala b/src/library/scala/concurrent/impl/Future.scala
index 24d0258cc8..9466761d4d 100644
--- a/src/library/scala/concurrent/impl/Future.scala
+++ b/src/library/scala/concurrent/impl/Future.scala
@@ -13,35 +13,35 @@ import scala.util.{ Try, Success, Failure }
//import scala.util.continuations._
trait Future[+T] extends scala.concurrent.Future[T] with Awaitable[T] {
-
+
implicit def executor: ExecutionContextImpl
-
+
/** For use only within a Future.flow block or another compatible Delimited Continuations reset block.
- *
+ *
* Returns the result of this Future without blocking, by suspending execution and storing it as a
* continuation until the result is available.
*/
//def apply(): T @cps[Future[Any]] = shift(this flatMap (_: T => Future[Any]))
-
+
/** Tests whether this Future has been completed.
*/
final def isCompleted: Boolean = value.isDefined
-
+
/** The contained value of this Future. Before this Future is completed
* the value will be None. After completion the value will be Some(Right(t))
* if it contains a valid result, or Some(Left(error)) if it contains
* an exception.
*/
def value: Option[Try[T]]
-
+
def onComplete[U](func: Try[T] => U): this.type
-
+
/** Creates a new Future[A] which is completed with this Future's result if
* that conforms to A's erased type or a ClassCastException otherwise.
*/
final def mapTo[T](implicit m: Manifest[T]) = {
val p = executor.promise[T]
-
+
onComplete {
case f @ Failure(t) => p complete f.asInstanceOf[Try[T]]
case Success(v) =>
@@ -51,7 +51,7 @@ trait Future[+T] extends scala.concurrent.Future[T] with Awaitable[T] {
case e: ClassCastException ⇒ Failure(e)
})
}
-
+
p.future
}
@@ -65,7 +65,7 @@ trait Future[+T] extends scala.concurrent.Future[T] with Awaitable[T] {
def flatMap[B](f: A => Future[B]) = self filter p flatMap f
def withFilter(q: A => Boolean): FutureWithFilter[A] = new FutureWithFilter[A](self, x ⇒ p(x) && q(x))
}
-
+
}
object Future {
diff --git a/src/library/scala/concurrent/impl/Promise.scala b/src/library/scala/concurrent/impl/Promise.scala
index 7ef76e1501..585f71f3cf 100644
--- a/src/library/scala/concurrent/impl/Promise.scala
+++ b/src/library/scala/concurrent/impl/Promise.scala
@@ -23,11 +23,11 @@ import scala.annotation.tailrec
trait Promise[T] extends scala.concurrent.Promise[T] with Future[T] {
-
+
def future = this
-
+
def newPromise[S]: Promise[S] = executor promise
-
+
// TODO refine answer and return types here from Any to type parameters
// then move this up in the hierarchy
/*
@@ -40,7 +40,7 @@ trait Promise[T] extends scala.concurrent.Promise[T] with Future[T] {
cont: (Future[T] => Future[Any]) =>
val p = executor.promise[Any]
val thisPromise = this
-
+
thisPromise completeWith other
thisPromise onComplete { v =>
try {
@@ -49,12 +49,12 @@ trait Promise[T] extends scala.concurrent.Promise[T] with Future[T] {
case e => p complete resolver(e)
}
}
-
+
p.future
}
*/
// TODO finish this once we introduce something like dataflow streams
-
+
/*
final def <<(stream: PromiseStreamOut[T]): Future[T] @cps[Future[Any]] = shift { cont: (Future[T] => Future[Any]) =>
val fr = executor.promise[Any]
@@ -70,40 +70,40 @@ trait Promise[T] extends scala.concurrent.Promise[T] with Future[T] {
fr
}
*/
-
+
}
object Promise {
def dur2long(dur: Duration): Long = if (dur.isFinite) dur.toNanos else Long.MaxValue
-
+
def EmptyPending[T](): FState[T] = emptyPendingValue.asInstanceOf[FState[T]]
-
+
/** Represents the internal state.
*/
sealed trait FState[+T] { def value: Option[Try[T]] }
-
+
case class Pending[T](listeners: List[Try[T] => Any] = Nil) extends FState[T] {
def value: Option[Try[T]] = None
}
-
+
case class Success[T](value: Option[util.Success[T]] = None) extends FState[T] {
def result: T = value.get.get
}
-
+
case class Failure[T](value: Option[util.Failure[T]] = None) extends FState[T] {
def exception: Throwable = value.get.exception
}
-
+
private val emptyPendingValue = Pending[Nothing](Nil)
-
+
/** Default promise implementation.
*/
class DefaultPromise[T](implicit val executor: ExecutionContextImpl) extends AbstractPromise with Promise[T] {
self =>
-
+
updater.set(this, Promise.EmptyPending())
-
+
protected final def tryAwait(atMost: Duration): Boolean = {
@tailrec
def awaitUnsafe(waitTimeNanos: Long): Boolean = {
@@ -118,36 +118,36 @@ object Promise {
} catch {
case e: InterruptedException =>
}
-
+
awaitUnsafe(waitTimeNanos - (System.nanoTime() - start))
} else
value.isDefined
}
-
+
executor.blocking(concurrent.body2awaitable(awaitUnsafe(dur2long(atMost))), Duration.fromNanos(0))
}
-
+
private def ready(atMost: Duration)(implicit permit: CanAwait): this.type =
if (value.isDefined || tryAwait(atMost)) this
else throw new TimeoutException("Futures timed out after [" + atMost.toMillis + "] milliseconds")
-
+
def await(atMost: Duration)(implicit permit: CanAwait): T =
ready(atMost).value.get match {
case util.Failure(e) => throw e
case util.Success(r) => r
}
-
+
def value: Option[Try[T]] = getState.value
-
+
@inline
private[this] final def updater = AbstractPromise.updater.asInstanceOf[AtomicReferenceFieldUpdater[AbstractPromise, FState[T]]]
-
+
@inline
protected final def updateState(oldState: FState[T], newState: FState[T]): Boolean = updater.compareAndSet(this, oldState, newState)
-
+
@inline
protected final def getState: FState[T] = updater.get(this)
-
+
def tryComplete(value: Try[T]): Boolean = {
val callbacks: List[Try[T] => Any] = {
try {
@@ -165,7 +165,7 @@ object Promise {
synchronized { notifyAll() } // notify any blockers from `tryAwait`
}
}
-
+
callbacks match {
case null => false
case cs if cs.isEmpty => true
@@ -176,7 +176,7 @@ object Promise {
true
}
}
-
+
def onComplete[U](func: Try[T] => U): this.type = {
@tailrec // Returns whether the future has already been completed or not
def tryAddCallback(): Boolean = {
@@ -188,17 +188,17 @@ object Promise {
if (updateState(pt, pt.copy(listeners = func :: pt.listeners))) false else tryAddCallback()
}
}
-
+
if (tryAddCallback()) {
val result = value.get
executor dispatchFuture {
() => notifyCompleted(func, result)
}
}
-
+
this
}
-
+
private final def notifyCompleted(func: Try[T] => Any, result: Try[T]) {
try {
func(result)
@@ -207,16 +207,16 @@ object Promise {
}
}
}
-
+
/** An already completed Future is given its result at creation.
- *
+ *
* Useful in Future-composition when a value to contribute is already available.
*/
final class KeptPromise[T](suppliedValue: Try[T])(implicit val executor: ExecutionContextImpl) extends Promise[T] {
val value = Some(resolve(suppliedValue))
-
+
def tryComplete(value: Try[T]): Boolean = false
-
+
def onComplete[U](func: Try[T] => U): this.type = {
val completedAs = value.get
executor dispatchFuture {
@@ -224,15 +224,15 @@ object Promise {
}
this
}
-
+
private def ready(atMost: Duration)(implicit permit: CanAwait): this.type = this
-
+
def await(atMost: Duration)(implicit permit: CanAwait): T = value.get match {
case util.Failure(e) => throw e
case util.Success(r) => r
}
}
-
+
}
diff --git a/src/library/scala/concurrent/package.scala b/src/library/scala/concurrent/package.scala
index 6a98fd50c2..7cc48c09b2 100644
--- a/src/library/scala/concurrent/package.scala
+++ b/src/library/scala/concurrent/package.scala
@@ -25,31 +25,31 @@ package concurrent {
catch { case _ => }
awaitable
}
-
+
def result[T](atMost: Duration)(awaitable: Awaitable[T])(implicit execCtx: ExecutionContext = executionContext): T = {
blocking(awaitable, atMost)
}
}
-
+
/** Importing this object allows using some concurrency primitives
* on futures and promises that can yield nondeterministic programs.
- *
+ *
* While program determinism is broken when using these primitives,
* some programs cannot be written without them (e.g. multiple client threads
* cannot send requests to a server thread through regular promises and futures).
*/
object nondeterministic { }
-
+
/** A timeout exception.
- *
+ *
* Futures are failed with a timeout exception when their timeout expires.
- *
+ *
* Each timeout exception contains an origin future which originally timed out.
*/
class FutureTimeoutException(origin: Future[_], message: String) extends TimeoutException(message) {
def this(origin: Future[_]) = this(origin, "Future timed out.")
}
-
+
final class DurationOps private[concurrent] (x: Int) {
// TODO ADD OTHERS
def ns = util.Duration.fromNanos(0)
diff --git a/src/library/scala/reflect/ReflectionUtils.scala b/src/library/scala/reflect/ReflectionUtils.scala
index dfadfb4976..510f0819c6 100644
--- a/src/library/scala/reflect/ReflectionUtils.scala
+++ b/src/library/scala/reflect/ReflectionUtils.scala
@@ -29,13 +29,13 @@ object ReflectionUtils {
def singletonInstance(className: String, cl: ClassLoader = getClass.getClassLoader): AnyRef = {
val name = if (className endsWith "$") className else className + "$"
- val clazz = java.lang.Class.forName(name, true, cl)
+ val clazz = java.lang.Class.forName(name, true, cl)
val singleton = clazz getField "MODULE$" get null
singleton
}
// Retrieves the MODULE$ field for the given class name.
- def singletonInstanceOpt(className: String, cl: ClassLoader = getClass.getClassLoader): Option[AnyRef] =
+ def singletonInstanceOpt(className: String, cl: ClassLoader = getClass.getClassLoader): Option[AnyRef] =
try Some(singletonInstance(className, cl))
catch { case _: ClassNotFoundException => None }
}
diff --git a/src/library/scala/reflect/api/Mirror.scala b/src/library/scala/reflect/api/Mirror.scala
index 448dca752c..cea9e1a37d 100644
--- a/src/library/scala/reflect/api/Mirror.scala
+++ b/src/library/scala/reflect/api/Mirror.scala
@@ -13,11 +13,11 @@ trait Mirror extends Universe with RuntimeTypes with TreeBuildUtil {
* to do: throws anything else?
*/
def symbolForName(name: String): Symbol
-
+
/** Return a reference to the companion object of the given class symbol.
*/
def companionInstance(clazz: Symbol): AnyRef
-
+
/** The Scala class symbol corresponding to the runtime class of the given instance.
* @param instance The instance
* @return The class Symbol for the instance
diff --git a/src/library/scala/reflect/api/Modifier.scala b/src/library/scala/reflect/api/Modifier.scala
index c0123ed955..cbfe91e59b 100644
--- a/src/library/scala/reflect/api/Modifier.scala
+++ b/src/library/scala/reflect/api/Modifier.scala
@@ -69,7 +69,7 @@ object Modifier extends immutable.Set[Modifier] {
val parameter = SymbolModifier("parameter")
val preSuper = SymbolModifier("preSuper")
val static = SymbolModifier("static")
-
+
val sourceModifiers: Set[SourceModifier] = SourceModifier.all.toSet
val symbolModifiers: Set[SymbolModifier] = SymbolModifier.all.toSet
val allModifiers: Set[Modifier] = sourceModifiers ++ symbolModifiers
diff --git a/src/library/scala/reflect/api/Names.scala b/src/library/scala/reflect/api/Names.scala
index 3a00f21c8c..c72774dfc7 100755
--- a/src/library/scala/reflect/api/Names.scala
+++ b/src/library/scala/reflect/api/Names.scala
@@ -6,7 +6,7 @@ package api
* The same string can be a name in both universes.
* Two names are equal if they represent the same string and they are
* members of the same universe.
- *
+ *
* Names are interned. That is, for two names `name11 and `name2`,
* `name1 == name2` implies `name1 eq name2`.
*/
@@ -42,7 +42,7 @@ trait Names {
* Example: `foo_$plus$eq` becomes `foo_+=`
*/
def encoded: String
-
+
/** The decoded name, still represented as a name.
*/
def decodedName: Name
diff --git a/src/library/scala/reflect/api/Symbols.scala b/src/library/scala/reflect/api/Symbols.scala
index 15d754b5b4..44dc2ce1c2 100755
--- a/src/library/scala/reflect/api/Symbols.scala
+++ b/src/library/scala/reflect/api/Symbols.scala
@@ -18,7 +18,7 @@ trait Symbols { self: Universe =>
/** A list of annotations attached to this Symbol.
*/
def annotations: List[self.AnnotationInfo]
-
+
/** Whether this symbol carries an annotation for which the given
* symbol is its typeSymbol.
*/
@@ -99,7 +99,7 @@ trait Symbols { self: Universe =>
* method, or `NoSymbol` if none exists.
*/
def enclosingMethod: Symbol
-
+
/** If this symbol is a package class, this symbol; otherwise the next enclosing
* package class, or `NoSymbol` if none exists.
*/
@@ -170,7 +170,7 @@ trait Symbols { self: Universe =>
* `C`. Then `C.asType` is the type `C[T]`, but `C.asTypeConstructor` is `C`.
*/
def asTypeConstructor: Type // needed by LiftCode
-
+
/** If this symbol is a class, the type `C.this`, otherwise `NoPrefix`.
*/
def thisPrefix: Type
@@ -181,10 +181,10 @@ trait Symbols { self: Universe =>
def selfType: Type
/** A fresh symbol with given name `name`, position `pos` and flags `flags` that has
- * the current symbol as its owner.
+ * the current symbol as its owner.
*/
def newNestedSymbol(name: Name, pos: Position, flags: Long): Symbol // needed by LiftCode
-
+
/** Low-level operation to set the symbol's flags
* @return the symbol itself
*/
diff --git a/src/library/scala/reflect/api/TreePrinters.scala b/src/library/scala/reflect/api/TreePrinters.scala
index 19bfd09b81..21b55e9c0e 100644
--- a/src/library/scala/reflect/api/TreePrinters.scala
+++ b/src/library/scala/reflect/api/TreePrinters.scala
@@ -55,7 +55,7 @@ trait TreePrinters { self: Universe =>
print(")")
if (typesPrinted)
print(".setType(", tree.tpe, ")")
- case list: List[_] =>
+ case list: List[_] =>
print("List(")
val it = list.iterator
while (it.hasNext) {
@@ -64,16 +64,16 @@ trait TreePrinters { self: Universe =>
}
print(")")
case mods: Modifiers =>
- val parts = collection.mutable.ListBuffer[String]()
+ val parts = collection.mutable.ListBuffer[String]()
parts += "Set(" + mods.modifiers.map(_.sourceString).mkString(", ") + ")"
parts += "newTypeName(\"" + mods.privateWithin.toString + "\")"
parts += "List(" + mods.annotations.map{showRaw}.mkString(", ") + ")"
-
+
var keep = 3
if (keep == 3 && mods.annotations.isEmpty) keep -= 1
if (keep == 2 && mods.privateWithin == EmptyTypeName) keep -= 1
if (keep == 1 && mods.modifiers.isEmpty) keep -= 1
-
+
print("Modifiers(", parts.take(keep).mkString(", "), ")")
case name: Name =>
if (name.isTermName) print("newTermName(\"") else print("newTypeName(\"")
diff --git a/src/library/scala/reflect/api/Types.scala b/src/library/scala/reflect/api/Types.scala
index 8a91956320..cc8e85b9c8 100755
--- a/src/library/scala/reflect/api/Types.scala
+++ b/src/library/scala/reflect/api/Types.scala
@@ -140,7 +140,7 @@ trait Types { self: Universe =>
* If this is not a singleton type, returns this type itself.
*
* Example:
- *
+ *
* class Outer { class C ; val x: C }
* val o: Outer
* <o.x.type>.widen = o.C
diff --git a/src/library/scala/reflect/macro/Context.scala b/src/library/scala/reflect/macro/Context.scala
index ebbd4735e5..2fd9bb6484 100644
--- a/src/library/scala/reflect/macro/Context.scala
+++ b/src/library/scala/reflect/macro/Context.scala
@@ -2,11 +2,11 @@ package scala.reflect
package macro
trait Context extends api.Universe {
-
+
/** Mark a variable as captured; i.e. force boxing in a *Ref type.
*/
def captureVariable(vble: Symbol): Unit
-
+
/** Mark given identifier as a reference to a captured variable itself
* suppressing dereferencing with the `elem` field.
*/
diff --git a/src/library/scala/specialized.scala b/src/library/scala/specialized.scala
index b24474f35d..b876869afb 100644
--- a/src/library/scala/specialized.scala
+++ b/src/library/scala/specialized.scala
@@ -25,7 +25,7 @@ import Specializable._
* @since 2.8
*/
// class tspecialized[T](group: Group[T]) extends annotation.StaticAnnotation {
-
+
class specialized(group: SpecializedGroup) extends annotation.StaticAnnotation {
def this(types: Specializable*) = this(new Group(types.toList))
def this() = this(Everything)
diff --git a/src/library/scala/sys/process/BasicIO.scala b/src/library/scala/sys/process/BasicIO.scala
index edc60a1bb5..77e36f6196 100644
--- a/src/library/scala/sys/process/BasicIO.scala
+++ b/src/library/scala/sys/process/BasicIO.scala
@@ -97,7 +97,7 @@ object BasicIO {
*
* @param withIn True if the process input should be attached to stdin.
* @param buffer A `StringBuffer` which will receive the process normal
- * output.
+ * output.
* @param log An optional `ProcessLogger` to which the output should be
* sent. If `None`, output will be sent to stderr.
* @return A `ProcessIO` with the characteristics above.
diff --git a/src/library/scala/util/Properties.scala b/src/library/scala/util/Properties.scala
index 22de5544a8..a62d74b1f6 100644
--- a/src/library/scala/util/Properties.scala
+++ b/src/library/scala/util/Properties.scala
@@ -142,7 +142,7 @@ private[scala] trait PropertiesTrait {
*/
def isWin = osName startsWith "Windows"
def isMac = javaVendor startsWith "Apple"
-
+
// This is looking for javac, tools.jar, etc.
// Tries JDK_HOME first, then the more common but likely jre JAVA_HOME,
// and finally the system property based javaHome.
diff --git a/src/library/scala/util/Try.scala b/src/library/scala/util/Try.scala
index a05a75e0b7..c9bde81317 100644
--- a/src/library/scala/util/Try.scala
+++ b/src/library/scala/util/Try.scala
@@ -15,7 +15,7 @@ import collection.Seq
/**
- * The `Try` type represents a computation that may either result in an exception,
+ * The `Try` type represents a computation that may either result in an exception,
* or return a success value. It's analagous to the `Either` type.
*/
sealed abstract class Try[+T] {
@@ -55,9 +55,9 @@ sealed abstract class Try[+T] {
def map[U](f: T => U): Try[U]
def collect[U](pf: PartialFunction[T, U]): Try[U]
-
+
def exists(p: T => Boolean): Boolean
-
+
/**
* Converts this to a `Failure` if the predicate is not satisfied.
*/
@@ -77,14 +77,14 @@ sealed abstract class Try[+T] {
* Calls the exceptionHandler with the exception if this is a `Failure`. This is like map for the exception.
*/
def recover[U >: T](rescueException: PartialFunction[Throwable, U]): Try[U]
-
+
/**
* Returns `None` if this is a `Failure` or a `Some` containing the value if this is a `Success`.
*/
def toOption = if (isSuccess) Some(get) else None
def toSeq = if (isSuccess) Seq(get) else Seq()
-
+
/**
* Returns the given function applied to the value from this Success or returns this if this is a `Failure`.
* Alias for `flatMap`.
@@ -92,11 +92,11 @@ sealed abstract class Try[+T] {
def andThen[U](f: T => Try[U]): Try[U] = flatMap(f)
/**
- * Transforms a nested `Try`, i.e., a `Try` of type `Try[Try[T]]`,
+ * Transforms a nested `Try`, i.e., a `Try` of type `Try[Try[T]]`,
* into an un-nested `Try`, i.e., a `Try` of type `Try[T]`.
*/
def flatten[U](implicit ev: T <:< Try[U]): Try[U]
-
+
def failed: Try[Throwable]
}
@@ -109,7 +109,7 @@ final case class Failure[+T](val exception: Throwable) extends Try[T] {
if (rescueException.isDefinedAt(exception)) rescueException(exception) else this
} catch {
case e2 => Failure(e2)
- }
+ }
}
def get: T = throw exception
def flatMap[U](f: T => Try[U]): Try[U] = Failure[U](exception)
@@ -118,7 +118,7 @@ final case class Failure[+T](val exception: Throwable) extends Try[T] {
def map[U](f: T => U): Try[U] = Failure[U](exception)
def collect[U](pf: PartialFunction[T, U]): Try[U] = Failure[U](exception)
def filter(p: T => Boolean): Try[T] = this
- def recover[U >: T](rescueException: PartialFunction[Throwable, U]): Try[U] =
+ def recover[U >: T](rescueException: PartialFunction[Throwable, U]): Try[U] =
if (rescueException.isDefinedAt(exception)) {
Try(rescueException(exception))
} else {
@@ -134,10 +134,10 @@ final case class Success[+T](r: T) extends Try[T] {
def isSuccess = true
def rescue[U >: T](rescueException: PartialFunction[Throwable, Try[U]]): Try[U] = Success(r)
def get = r
- def flatMap[U](f: T => Try[U]): Try[U] =
- try f(r)
- catch {
- case e => Failure(e)
+ def flatMap[U](f: T => Try[U]): Try[U] =
+ try f(r)
+ catch {
+ case e => Failure(e)
}
def flatten[U](implicit ev: T <:< Try[U]): Try[U] = r
def foreach[U](f: T => U): Unit = f(r)
@@ -145,7 +145,7 @@ final case class Success[+T](r: T) extends Try[T] {
def collect[U](pf: PartialFunction[T, U]): Try[U] =
if (pf isDefinedAt r) Success(pf(r))
else Failure[U](new NoSuchElementException("Partial function not defined at " + r))
- def filter(p: T => Boolean): Try[T] =
+ def filter(p: T => Boolean): Try[T] =
if (p(r)) this
else Failure(new NoSuchElementException("Predicate does not hold for " + r))
def recover[U >: T](rescueException: PartialFunction[Throwable, U]): Try[U] = this
@@ -155,11 +155,11 @@ final case class Success[+T](r: T) extends Try[T] {
object Try {
-
+
def apply[T](r: => T): Try[T] = {
try { Success(r) } catch {
case e => Failure(e)
}
}
-
+
}
diff --git a/src/library/scala/util/parsing/combinator/Parsers.scala b/src/library/scala/util/parsing/combinator/Parsers.scala
index 4004a01ad9..27e9112fce 100644
--- a/src/library/scala/util/parsing/combinator/Parsers.scala
+++ b/src/library/scala/util/parsing/combinator/Parsers.scala
@@ -487,7 +487,7 @@ trait Parsers {
}
/** Changes the error message produced by a parser.
- *
+ *
* This doesn't change the behavior of a parser on neither
* success nor failure, just on error. The semantics are
* slightly different than those obtained by doing `| error(msg)`,