From f4267ccd96a9143c910c66a5b0436aaa64b7c9dc Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Wed, 18 Sep 2013 00:09:46 -0700 Subject: Cull extraneous whitespace. One last flurry with the broom before I leave you slobs to code in your own filth. Eliminated all the trailing whitespace I could manage, with special prejudice reserved for the test cases which depended on the preservation of trailing whitespace. Was reminded I cannot figure out how to eliminate the trailing space on the "scala> " prompt in repl transcripts. At least reduced the number of such empty prompts by trimming transcript code on the way in. Routed ConsoleReporter's "printMessage" through a trailing whitespace stripping method which might help futureproof against the future of whitespace diseases. Deleted the up-to-40 lines of trailing whitespace found in various library files. It seems like only yesterday we performed whitespace surgery on the whole repo. Clearly it doesn't stick very well. I suggest it would work better to enforce a few requirements on the way in. --- src/actors/scala/actors/AbstractActor.scala | 1 - src/actors/scala/actors/KillActorControl.scala | 2 -- .../actors/scheduler/DrainableForkJoinPool.scala | 1 - .../scala/tools/nsc/OfflineCompilerCommand.scala | 2 +- .../tools/nsc/backend/icode/CheckerException.scala | 2 -- .../scala/tools/nsc/backend/icode/TypeKinds.scala | 4 +-- .../tools/nsc/reporters/ConsoleReporter.scala | 6 +++- .../scala/tools/nsc/settings/FscSettings.scala | 2 +- .../scala/tools/reflect/ToolBoxFactory.scala | 4 +-- .../scala/tools/nsc/interactive/Response.scala | 4 --- src/library/scala/DelayedInit.scala | 4 +-- src/library/scala/Proxy.scala | 2 +- src/library/scala/annotation/migration.scala | 2 +- src/library/scala/collection/BitSetLike.scala | 2 +- src/library/scala/collection/GenMap.scala | 4 --- src/library/scala/collection/GenTraversable.scala | 7 ---- .../scala/collection/GenTraversableOnce.scala | 4 +-- src/library/scala/collection/IterableLike.scala | 2 +- .../scala/collection/LinearSeqOptimized.scala | 4 +-- src/library/scala/collection/Map.scala | 2 +- src/library/scala/collection/SortedMap.scala | 8 ++--- src/library/scala/collection/SortedMapLike.scala | 20 ++++------- src/library/scala/collection/SortedSetLike.scala | 2 +- src/library/scala/collection/TraversableOnce.scala | 2 +- .../scala/collection/TraversableViewLike.scala | 2 -- .../scala/collection/convert/WrapAsJava.scala | 8 ----- .../collection/generic/GenericSeqCompanion.scala | 1 - .../scala/collection/generic/IsSeqLike.scala | 2 +- .../scala/collection/generic/ParFactory.scala | 5 --- .../scala/collection/generic/Shrinkable.scala | 5 --- .../scala/collection/generic/Signalling.scala | 23 ------------ src/library/scala/collection/generic/Sorted.scala | 8 ++--- .../scala/collection/immutable/DefaultMap.scala | 10 ------ src/library/scala/collection/immutable/List.scala | 3 -- .../scala/collection/immutable/ListMap.scala | 8 ++--- .../scala/collection/immutable/MapLike.scala | 4 +-- src/library/scala/collection/immutable/Range.scala | 4 +-- .../scala/collection/immutable/RedBlackTree.scala | 10 +++--- .../scala/collection/immutable/SortedMap.scala | 4 +-- .../collection/immutable/StreamViewLike.scala | 6 ---- .../scala/collection/immutable/TreeMap.scala | 8 +---- .../scala/collection/mutable/FlatHashTable.scala | 22 ++++++------ .../collection/mutable/ImmutableSetAdaptor.scala | 5 --- .../scala/collection/mutable/LinkedHashMap.scala | 12 +++---- .../scala/collection/mutable/LinkedHashSet.scala | 2 +- .../scala/collection/mutable/MutableList.scala | 4 --- .../scala/collection/mutable/StackProxy.scala | 2 +- .../scala/collection/parallel/Combiner.scala | 15 -------- .../collection/parallel/ParIterableViewLike.scala | 14 -------- .../scala/collection/parallel/ParMapLike.scala | 20 ----------- src/library/scala/collection/parallel/ParSeq.scala | 3 -- .../scala/collection/parallel/ParSetLike.scala | 25 ------------- .../collection/parallel/PreciseSplitter.scala | 8 ----- .../scala/collection/parallel/TaskSupport.scala | 26 -------------- src/library/scala/collection/parallel/Tasks.scala | 25 ------------- .../collection/parallel/immutable/ParHashMap.scala | 26 -------------- .../collection/parallel/immutable/ParSeq.scala | 7 ---- .../collection/parallel/immutable/ParVector.scala | 18 ---------- .../collection/parallel/mutable/ParHashMap.scala | 24 ------------- .../collection/parallel/mutable/ParHashTable.scala | 13 ------- .../collection/parallel/mutable/ParMapLike.scala | 4 --- .../collection/parallel/mutable/ParSetLike.scala | 41 ---------------------- .../collection/parallel/mutable/ParTrieMap.scala | 20 ----------- src/library/scala/concurrent/Awaitable.scala | 12 +++---- .../scala/concurrent/ExecutionContext.scala | 12 +++---- src/library/scala/concurrent/Future.scala | 2 +- src/library/scala/concurrent/Promise.scala | 21 +++-------- src/library/scala/concurrent/TaskRunner.scala | 1 - .../scala/concurrent/duration/Duration.scala | 4 +-- src/library/scala/concurrent/impl/Promise.scala | 2 +- src/library/scala/concurrent/package.scala | 22 ++++++------ src/library/scala/io/BufferedSource.scala | 6 ++-- src/library/scala/ref/WeakReference.scala | 2 +- src/library/scala/reflect/ClassTag.scala | 2 +- src/library/scala/runtime/Boxed.scala | 9 +---- .../scala/runtime/NonLocalReturnControl.scala | 1 - src/library/scala/runtime/WorksheetSupport.scala | 1 - src/library/scala/transient.scala | 2 -- src/library/scala/util/Properties.scala | 2 +- src/library/scala/volatile.scala | 2 -- .../scala/tools/partest/IcodeComparison.scala | 3 +- src/reflect/scala/reflect/internal/Flags.scala | 4 +-- src/reflect/scala/reflect/internal/Scopes.scala | 2 -- .../reflect/internal/pickling/ByteCodecs.scala | 8 ----- src/reflect/scala/reflect/internal/util/Set.scala | 1 - .../scala/reflect/internal/util/StringOps.scala | 32 ++++++++++++----- src/repl/scala/tools/nsc/interpreter/ILoop.scala | 7 ++-- src/repl/scala/tools/nsc/interpreter/IMain.scala | 6 ---- src/scaladoc/scala/tools/nsc/doc/Index.scala | 3 -- .../tools/nsc/doc/base/CommentFactoryBase.scala | 3 -- .../tools/nsc/doc/model/IndexModelFactory.scala | 3 -- src/scalap/scala/tools/scalap/Arguments.scala | 1 - 92 files changed, 141 insertions(+), 575 deletions(-) (limited to 'src') diff --git a/src/actors/scala/actors/AbstractActor.scala b/src/actors/scala/actors/AbstractActor.scala index 3c6299aab4..28fe689e91 100644 --- a/src/actors/scala/actors/AbstractActor.scala +++ b/src/actors/scala/actors/AbstractActor.scala @@ -27,5 +27,4 @@ trait AbstractActor extends OutputChannel[Any] with CanReply[Any, Any] { private[actors] def unlinkFrom(from: AbstractActor): Unit private[actors] def exit(from: AbstractActor, reason: AnyRef): Unit - } diff --git a/src/actors/scala/actors/KillActorControl.scala b/src/actors/scala/actors/KillActorControl.scala index 2f1f08e949..0f94bbc8dc 100644 --- a/src/actors/scala/actors/KillActorControl.scala +++ b/src/actors/scala/actors/KillActorControl.scala @@ -6,8 +6,6 @@ ** |/ ** \* */ - - package scala.actors import scala.util.control.ControlThrowable diff --git a/src/actors/scala/actors/scheduler/DrainableForkJoinPool.scala b/src/actors/scala/actors/scheduler/DrainableForkJoinPool.scala index 15ce60566a..37710ec037 100644 --- a/src/actors/scala/actors/scheduler/DrainableForkJoinPool.scala +++ b/src/actors/scala/actors/scheduler/DrainableForkJoinPool.scala @@ -8,5 +8,4 @@ private class DrainableForkJoinPool(parallelism: Int, maxPoolSize: Int) extends override def drainTasksTo(c: Collection[ _ >: ForkJoinTask[_]]): Int = super.drainTasksTo(c) - } diff --git a/src/compiler/scala/tools/nsc/OfflineCompilerCommand.scala b/src/compiler/scala/tools/nsc/OfflineCompilerCommand.scala index 2ce2fb3eaa..899aa93a3b 100644 --- a/src/compiler/scala/tools/nsc/OfflineCompilerCommand.scala +++ b/src/compiler/scala/tools/nsc/OfflineCompilerCommand.scala @@ -27,7 +27,7 @@ class OfflineCompilerCommand(arguments: List[String], settings: FscSettings) ext val baseDirectory = { val pwd = System.getenv("PWD") if (pwd == null || isWin) Directory.Current getOrElse Directory("/") - else Directory(pwd) + else Directory(pwd) } currentDir.value = baseDirectory.path } diff --git a/src/compiler/scala/tools/nsc/backend/icode/CheckerException.scala b/src/compiler/scala/tools/nsc/backend/icode/CheckerException.scala index 0856f2f09d..8bcdb6dbd2 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/CheckerException.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/CheckerException.scala @@ -3,10 +3,8 @@ * @author Martin Odersky */ - package scala.tools.nsc package backend package icode class CheckerException(s: String) extends Exception(s) - diff --git a/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala b/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala index 2c8fda85f4..633e71a756 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala @@ -93,7 +93,7 @@ trait TypeKinds { self: ICodes => /** * this is directly assignable to other if no coercion or * casting is needed to convert this to other. It's a distinct - * relationship from <:< because on the JVM, BOOL, BYTE, CHAR, + * relationship from <:< because on the JVM, BOOL, BYTE, CHAR, * SHORT need no coercion to INT even though JVM arrays * are covariant, ARRAY[SHORT] is not a subtype of ARRAY[INT] */ @@ -101,7 +101,7 @@ trait TypeKinds { self: ICodes => case INT => this.isIntSizedType case _ => this <:< other } - + /** Is this type a category 2 type in JVM terms? (ie, is it LONG or DOUBLE?) */ def isWideType: Boolean = false diff --git a/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala b/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala index fdb5c72c3d..52c6ddc6ee 100644 --- a/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala +++ b/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala @@ -9,6 +9,7 @@ package reporters import java.io.{ BufferedReader, IOException, PrintWriter } import scala.reflect.internal.util._ +import StringOps._ /** * This class implements a Reporter that displays messages on a text @@ -40,7 +41,10 @@ class ConsoleReporter(val settings: Settings, reader: BufferedReader, writer: Pr StringOps.countElementsAsString((severity).count, label(severity)) /** Prints the message. */ - def printMessage(msg: String) { writer.print(msg + "\n"); writer.flush() } + def printMessage(msg: String) { + writer print trimAllTrailingSpace(msg) + "\n" + writer.flush() + } /** Prints the message with the given position indication. */ def printMessage(posIn: Position, msg: String) { diff --git a/src/compiler/scala/tools/nsc/settings/FscSettings.scala b/src/compiler/scala/tools/nsc/settings/FscSettings.scala index 34c8e8df9a..8c2b510bfd 100644 --- a/src/compiler/scala/tools/nsc/settings/FscSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/FscSettings.scala @@ -44,7 +44,7 @@ class FscSettings(error: String => Unit) extends Settings(error) { // we need to ensure the files specified with relative locations are absolutized based on the currentDir (r, args map {a => absolutizePath(a)}) } - + /** * Take an individual path and if it's not absolute turns it into an absolute path based on currentDir. * If it's already absolute then it's left alone. diff --git a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala index b77a536caf..2d905d5436 100644 --- a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala +++ b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala @@ -297,7 +297,7 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf => // reporter doesn't accumulate errors, but the front-end does def throwIfErrors() = { if (frontEnd.hasErrors) throw ToolBoxError( - "reflective compilation has failed: " + EOL + EOL + (frontEnd.infos map (_.msg) mkString EOL) + "reflective compilation has failed:" + EOL + EOL + (frontEnd.infos map (_.msg) mkString EOL) ) } } @@ -319,7 +319,7 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf => val instance = new ToolBoxGlobal(command.settings, frontEndToReporter(frontEnd, command.settings)) if (frontEnd.hasErrors) { throw ToolBoxError( - "reflective compilation has failed: cannot initialize the compiler: " + EOL + EOL + + "reflective compilation has failed: cannot initialize the compiler:" + EOL + EOL + (frontEnd.infos map (_.msg) mkString EOL) ) } diff --git a/src/interactive/scala/tools/nsc/interactive/Response.scala b/src/interactive/scala/tools/nsc/interactive/Response.scala index f36f769ec9..0da400378e 100644 --- a/src/interactive/scala/tools/nsc/interactive/Response.scala +++ b/src/interactive/scala/tools/nsc/interactive/Response.scala @@ -99,7 +99,3 @@ class Response[T] { cancelled = false } } - - - - diff --git a/src/library/scala/DelayedInit.scala b/src/library/scala/DelayedInit.scala index 12793e6aa1..cfbbf30793 100644 --- a/src/library/scala/DelayedInit.scala +++ b/src/library/scala/DelayedInit.scala @@ -8,7 +8,7 @@ package scala -/** Classes and objects (but note, not traits) inheriting the `DelayedInit` +/** Classes and objects (but note, not traits) inheriting the `DelayedInit` * marker trait will have their initialization code rewritten as follows: * `code` becomes `delayedInit(code)`. * @@ -32,7 +32,7 @@ package scala * val c = new C * } * }}} - * + * * Should result in the following being printed: * {{{ * dummy text, printed before initialization of C diff --git a/src/library/scala/Proxy.scala b/src/library/scala/Proxy.scala index 07fa6e2e8d..7c28e6ea28 100644 --- a/src/library/scala/Proxy.scala +++ b/src/library/scala/Proxy.scala @@ -28,7 +28,7 @@ trait Proxy extends Any { override def hashCode: Int = self.hashCode override def equals(that: Any): Boolean = that match { case null => false - case _ => + case _ => val x = that.asInstanceOf[AnyRef] (x eq this.asInstanceOf[AnyRef]) || (x eq self.asInstanceOf[AnyRef]) || (x equals self) } diff --git a/src/library/scala/annotation/migration.scala b/src/library/scala/annotation/migration.scala index adb6de6afd..65bee4c2cb 100644 --- a/src/library/scala/annotation/migration.scala +++ b/src/library/scala/annotation/migration.scala @@ -17,7 +17,7 @@ package scala.annotation * order between Scala 2.7 and 2.8. * * @param message A message describing the change, which is emitted - * by the compiler if the flag `-Xmigration` indicates a version + * by the compiler if the flag `-Xmigration` indicates a version * prior to the changedIn version. * * @param changedIn The version, in which the behaviour change was diff --git a/src/library/scala/collection/BitSetLike.scala b/src/library/scala/collection/BitSetLike.scala index 6592e49429..8a8af79151 100644 --- a/src/library/scala/collection/BitSetLike.scala +++ b/src/library/scala/collection/BitSetLike.scala @@ -102,7 +102,7 @@ trait BitSetLike[+This <: BitSetLike[This] with SortedSet[Int]] extends SortedSe } def iterator: Iterator[Int] = iteratorFrom(0) - + override def keysIteratorFrom(start: Int) = new AbstractIterator[Int] { private var current = start private val end = nwords * WordLength diff --git a/src/library/scala/collection/GenMap.scala b/src/library/scala/collection/GenMap.scala index 3d7427981d..d17a2de179 100644 --- a/src/library/scala/collection/GenMap.scala +++ b/src/library/scala/collection/GenMap.scala @@ -11,7 +11,6 @@ package collection import generic._ - /** A trait for all traversable collections which may possibly * have their operations implemented in parallel. * @@ -28,12 +27,9 @@ extends GenMapLike[A, B, GenMap[A, B]] def updated [B1 >: B](key: A, value: B1): GenMap[A, B1] } - object GenMap extends GenMapFactory[GenMap] { def empty[A, B]: immutable.Map[A, B] = immutable.Map.empty /** $mapCanBuildFromInfo */ implicit def canBuildFrom[A, B]: CanBuildFrom[Coll, (A, B), GenMap[A, B]] = new MapCanBuildFrom[A, B] } - - diff --git a/src/library/scala/collection/GenTraversable.scala b/src/library/scala/collection/GenTraversable.scala index b700f49cf6..8705965992 100644 --- a/src/library/scala/collection/GenTraversable.scala +++ b/src/library/scala/collection/GenTraversable.scala @@ -6,15 +6,11 @@ ** |/ ** \* */ - - package scala package collection - import generic._ - /** A trait for all traversable collections which may possibly * have their operations implemented in parallel. * @@ -31,10 +27,7 @@ extends GenTraversableLike[A, GenTraversable[A]] def companion: GenericCompanion[GenTraversable] = GenTraversable } - object GenTraversable extends GenTraversableFactory[GenTraversable] { implicit def canBuildFrom[A] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] def newBuilder[A] = Traversable.newBuilder } - - diff --git a/src/library/scala/collection/GenTraversableOnce.scala b/src/library/scala/collection/GenTraversableOnce.scala index e4976d8f2c..a52f43bade 100644 --- a/src/library/scala/collection/GenTraversableOnce.scala +++ b/src/library/scala/collection/GenTraversableOnce.scala @@ -368,7 +368,7 @@ trait GenTraversableOnce[+A] extends Any { * @param cmp An ordering to be used for comparing elements. * @tparam B The result type of the function f. * @param f The measuring function. - * @return the first element of this $coll with the largest value measured by function f + * @return the first element of this $coll with the largest value measured by function f * with respect to the ordering `cmp`. * * @usecase def maxBy[B](f: A => B): A @@ -383,7 +383,7 @@ trait GenTraversableOnce[+A] extends Any { * @param cmp An ordering to be used for comparing elements. * @tparam B The result type of the function f. * @param f The measuring function. - * @return the first element of this $coll with the smallest value measured by function f + * @return the first element of this $coll with the smallest value measured by function f * with respect to the ordering `cmp`. * * @usecase def minBy[B](f: A => B): A diff --git a/src/library/scala/collection/IterableLike.scala b/src/library/scala/collection/IterableLike.scala index b043d1f2a6..f79b5afce9 100644 --- a/src/library/scala/collection/IterableLike.scala +++ b/src/library/scala/collection/IterableLike.scala @@ -171,7 +171,7 @@ self => * fewer elements than size. */ def sliding(size: Int): Iterator[Repr] = sliding(size, 1) - + /** Groups elements in fixed size blocks by passing a "sliding window" * over them (as opposed to partitioning them, as is done in grouped.) * @see [[scala.collection.Iterator]], method `sliding` diff --git a/src/library/scala/collection/LinearSeqOptimized.scala b/src/library/scala/collection/LinearSeqOptimized.scala index 21bfedf5de..8635b090b9 100755 --- a/src/library/scala/collection/LinearSeqOptimized.scala +++ b/src/library/scala/collection/LinearSeqOptimized.scala @@ -91,7 +91,7 @@ trait LinearSeqOptimized[+A, +Repr <: LinearSeqOptimized[A, Repr]] extends Linea } false } - + override /*IterableLike*/ def find(p: A => Boolean): Option[A] = { var these = this @@ -112,7 +112,7 @@ trait LinearSeqOptimized[+A, +Repr <: LinearSeqOptimized[A, Repr]] extends Linea } acc } - + override /*IterableLike*/ def foldRight[B](z: B)(f: (A, B) => B): B = if (this.isEmpty) z diff --git a/src/library/scala/collection/Map.scala b/src/library/scala/collection/Map.scala index f37c0993d4..761b65723c 100644 --- a/src/library/scala/collection/Map.scala +++ b/src/library/scala/collection/Map.scala @@ -52,7 +52,7 @@ object Map extends MapFactory[Map] { def iterator = underlying.iterator override def default(key: A): B = d(key) } - + } /** Explicit instantiation of the `Map` trait to reduce class file size in subclasses. */ diff --git a/src/library/scala/collection/SortedMap.scala b/src/library/scala/collection/SortedMap.scala index 0705a1e9e0..36e7eae79c 100644 --- a/src/library/scala/collection/SortedMap.scala +++ b/src/library/scala/collection/SortedMap.scala @@ -34,7 +34,7 @@ object SortedMap extends SortedMapFactory[SortedMap] { def empty[A, B](implicit ord: Ordering[A]): SortedMap[A, B] = immutable.SortedMap.empty[A, B](ord) implicit def canBuildFrom[A, B](implicit ord: Ordering[A]): CanBuildFrom[Coll, (A, B), SortedMap[A, B]] = new SortedMapCanBuildFrom[A, B] - + private[collection] trait Default[A, +B] extends SortedMap[A, B] { self => override def +[B1 >: B](kv: (A, B1)): SortedMap[A, B1] = { @@ -43,15 +43,11 @@ object SortedMap extends SortedMapFactory[SortedMap] { b += ((kv._1, kv._2)) b.result() } - + override def - (key: A): SortedMap[A, B] = { val b = newBuilder for (kv <- this; if kv._1 != key) b += kv b.result() } } - } - - - diff --git a/src/library/scala/collection/SortedMapLike.scala b/src/library/scala/collection/SortedMapLike.scala index 3fc8b0dadc..cf5e9c36c7 100644 --- a/src/library/scala/collection/SortedMapLike.scala +++ b/src/library/scala/collection/SortedMapLike.scala @@ -6,8 +6,6 @@ ** |/ ** \* */ - - package scala package collection @@ -74,7 +72,7 @@ self => for (e <- elems) m = m + e m } - + override def filterKeys(p: A => Boolean): SortedMap[A, B] = new FilteredKeys(p) with SortedMap.Default[A, B] { implicit def ordering: Ordering[A] = self.ordering override def rangeImpl(from : Option[A], until : Option[A]): SortedMap[A, B] = self.rangeImpl(from, until).filterKeys(p) @@ -82,7 +80,7 @@ self => override def keysIteratorFrom(start: A) = self keysIteratorFrom start filter p override def valuesIteratorFrom(start: A) = self iteratorFrom start collect {case (k,v) if p(k) => v} } - + override def mapValues[C](f: B => C): SortedMap[A, C] = new MappedValues(f) with SortedMap.Default[A, C] { implicit def ordering: Ordering[A] = self.ordering override def rangeImpl(from : Option[A], until : Option[A]): SortedMap[A, C] = self.rangeImpl(from, until).mapValues(f) @@ -90,7 +88,7 @@ self => override def keysIteratorFrom(start: A) = self keysIteratorFrom start override def valuesIteratorFrom(start: A) = self valuesIteratorFrom start map f } - + /** Adds a number of elements provided by a traversable object * and returns a new collection with the added elements. * @@ -98,14 +96,14 @@ self => */ override def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]): SortedMap[A, B1] = ((repr: SortedMap[A, B1]) /: xs.seq) (_ + _) - + /** * Creates an iterator over all the key/value pairs * contained in this map having a key greater than or * equal to `start` according to the ordering of * this map. x.iteratorFrom(y) is equivalent * to but often more efficient than x.from(y).iterator. - * + * * @param start The lower bound (inclusive) * on the keys to be returned */ @@ -114,15 +112,11 @@ self => * Creates an iterator over all the values contained in this * map that are associated with a key greater than or equal to `start` * according to the ordering of this map. x.valuesIteratorFrom(y) is - * equivalent to but often more efficient than + * equivalent to but often more efficient than * x.from(y).valuesIterator. - * + * * @param start The lower bound (inclusive) * on the keys to be returned */ def valuesIteratorFrom(start: A): Iterator[B] } - - - - diff --git a/src/library/scala/collection/SortedSetLike.scala b/src/library/scala/collection/SortedSetLike.scala index eb2ac38c59..c38ea1f3ce 100644 --- a/src/library/scala/collection/SortedSetLike.scala +++ b/src/library/scala/collection/SortedSetLike.scala @@ -47,7 +47,7 @@ self => * greater than or equal to `start` according to the ordering of * this collection. x.iteratorFrom(y) is equivalent to but will usually * be more efficient than x.from(y).iterator - * + * * @param start The lower-bound (inclusive) of the iterator */ def iteratorFrom(start: A): Iterator[A] = keysIteratorFrom(start) diff --git a/src/library/scala/collection/TraversableOnce.scala b/src/library/scala/collection/TraversableOnce.scala index 634807b29f..2fdad0f8f9 100644 --- a/src/library/scala/collection/TraversableOnce.scala +++ b/src/library/scala/collection/TraversableOnce.scala @@ -242,7 +242,7 @@ trait TraversableOnce[+A] extends Any with GenTraversableOnce[A] { var minF: B = null.asInstanceOf[B] var minElem: A = null.asInstanceOf[A] var first = true - + for (elem <- self) { val fx = f(elem) if (first || cmp.lt(fx, minF)) { diff --git a/src/library/scala/collection/TraversableViewLike.scala b/src/library/scala/collection/TraversableViewLike.scala index c507e000ee..ca1c450e3f 100644 --- a/src/library/scala/collection/TraversableViewLike.scala +++ b/src/library/scala/collection/TraversableViewLike.scala @@ -211,5 +211,3 @@ trait TraversableViewLike[+A, override def toString = viewToString } - - diff --git a/src/library/scala/collection/convert/WrapAsJava.scala b/src/library/scala/collection/convert/WrapAsJava.scala index e75a0e2981..b6ebf2ff06 100644 --- a/src/library/scala/collection/convert/WrapAsJava.scala +++ b/src/library/scala/collection/convert/WrapAsJava.scala @@ -257,11 +257,3 @@ trait WrapAsJava { } object WrapAsJava extends WrapAsJava { } - - - - - - - - diff --git a/src/library/scala/collection/generic/GenericSeqCompanion.scala b/src/library/scala/collection/generic/GenericSeqCompanion.scala index 34b20f23a2..fd1e18a029 100644 --- a/src/library/scala/collection/generic/GenericSeqCompanion.scala +++ b/src/library/scala/collection/generic/GenericSeqCompanion.scala @@ -6,7 +6,6 @@ ** |/ ** \* */ - package scala package collection package generic diff --git a/src/library/scala/collection/generic/IsSeqLike.scala b/src/library/scala/collection/generic/IsSeqLike.scala index 189aea4632..4c857ad1bb 100644 --- a/src/library/scala/collection/generic/IsSeqLike.scala +++ b/src/library/scala/collection/generic/IsSeqLike.scala @@ -50,7 +50,7 @@ object IsSeqLike { val conversion = implicitly[String => SeqLike[Char, String]] } - implicit def seqLikeRepr[C[_], A0](implicit conv: C[A0] => SeqLike[A0,C[A0]]): IsSeqLike[C[A0]] { type A = A0 } = + implicit def seqLikeRepr[C[_], A0](implicit conv: C[A0] => SeqLike[A0,C[A0]]): IsSeqLike[C[A0]] { type A = A0 } = new IsSeqLike[C[A0]] { type A = A0 val conversion = conv diff --git a/src/library/scala/collection/generic/ParFactory.scala b/src/library/scala/collection/generic/ParFactory.scala index 486e2a115e..4486cea419 100644 --- a/src/library/scala/collection/generic/ParFactory.scala +++ b/src/library/scala/collection/generic/ParFactory.scala @@ -37,8 +37,3 @@ extends GenTraversableFactory[CC] override def apply() = newBuilder[A] } } - - - - - diff --git a/src/library/scala/collection/generic/Shrinkable.scala b/src/library/scala/collection/generic/Shrinkable.scala index b7412afde0..b5ec568667 100644 --- a/src/library/scala/collection/generic/Shrinkable.scala +++ b/src/library/scala/collection/generic/Shrinkable.scala @@ -6,7 +6,6 @@ ** |/ ** \* */ - package scala package collection package generic @@ -49,7 +48,3 @@ trait Shrinkable[-A] { */ def --=(xs: TraversableOnce[A]): this.type = { xs.seq foreach -= ; this } } - - - - diff --git a/src/library/scala/collection/generic/Signalling.scala b/src/library/scala/collection/generic/Signalling.scala index e62eb6ff09..021d289c9d 100644 --- a/src/library/scala/collection/generic/Signalling.scala +++ b/src/library/scala/collection/generic/Signalling.scala @@ -10,13 +10,8 @@ package scala package collection package generic - import java.util.concurrent.atomic.AtomicInteger - - - - /** * A message interface serves as a unique interface to the * part of the collection capable of receiving messages from @@ -97,7 +92,6 @@ trait Signalling { def tag: Int } - /** * This signalling implementation returns default values and ignores received signals. */ @@ -110,13 +104,11 @@ class DefaultSignalling extends Signalling with VolatileAbort { def tag = -1 } - /** * An object that returns default values and ignores received signals. */ object IdleSignalling extends DefaultSignalling - /** * A mixin trait that implements abort flag behaviour using volatile variables. */ @@ -126,7 +118,6 @@ trait VolatileAbort extends Signalling { override def abort() = abortflag = true } - /** * A mixin trait that implements index flag behaviour using atomic integers. * The `setIndex` operation is wait-free, while conditional set operations `setIndexIfGreater` @@ -154,7 +145,6 @@ trait AtomicIndexFlag extends Signalling { } } - /** * An implementation of the signalling interface using delegates. */ @@ -175,25 +165,12 @@ trait DelegatedSignalling extends Signalling { def tag = signalDelegate.tag } - /** * Class implementing delegated signalling. */ class DelegatedContext(var signalDelegate: Signalling) extends DelegatedSignalling - /** * Class implementing delegated signalling, but having its own distinct `tag`. */ class TaggedDelegatedContext(deleg: Signalling, override val tag: Int) extends DelegatedContext(deleg) - - - - - - - - - - - diff --git a/src/library/scala/collection/generic/Sorted.scala b/src/library/scala/collection/generic/Sorted.scala index 3876da3275..ab0d443a03 100644 --- a/src/library/scala/collection/generic/Sorted.scala +++ b/src/library/scala/collection/generic/Sorted.scala @@ -79,14 +79,14 @@ trait Sorted[K, +This <: Sorted[K, This]] { else until(next) } - + /** * Creates an iterator over all the keys(or elements) contained in this * collection greater than or equal to `start` - * according to the ordering of this collection. x.keysIteratorFrom(y) - * is equivalent to but often more efficient than + * according to the ordering of this collection. x.keysIteratorFrom(y) + * is equivalent to but often more efficient than * x.from(y).keysIterator. - * + * * @param start The lower bound (inclusive) * on the keys to be returned */ diff --git a/src/library/scala/collection/immutable/DefaultMap.scala b/src/library/scala/collection/immutable/DefaultMap.scala index 42a03e90ee..ce34b84486 100755 --- a/src/library/scala/collection/immutable/DefaultMap.scala +++ b/src/library/scala/collection/immutable/DefaultMap.scala @@ -50,13 +50,3 @@ trait DefaultMap[A, +B] extends Map[A, B] { self => b.result() } } - - - - - - - - - - diff --git a/src/library/scala/collection/immutable/List.scala b/src/library/scala/collection/immutable/List.scala index b11368acdf..57618d64a5 100644 --- a/src/library/scala/collection/immutable/List.scala +++ b/src/library/scala/collection/immutable/List.scala @@ -6,8 +6,6 @@ ** |/ ** \* */ - - package scala package collection package immutable @@ -402,4 +400,3 @@ object List extends SeqFactory[List] { /** Only used for list serialization */ @SerialVersionUID(0L - 8476791151975527571L) private[scala] case object ListSerializeEnd - diff --git a/src/library/scala/collection/immutable/ListMap.scala b/src/library/scala/collection/immutable/ListMap.scala index 49295d92dd..59468a3186 100644 --- a/src/library/scala/collection/immutable/ListMap.scala +++ b/src/library/scala/collection/immutable/ListMap.scala @@ -157,12 +157,12 @@ extends AbstractMap[A, B] * @return the value associated with the given key. */ override def apply(k: A): B1 = apply0(this, k) - - - @tailrec private def apply0(cur: ListMap[A, B1], k: A): B1 = + + + @tailrec private def apply0(cur: ListMap[A, B1], k: A): B1 = if (cur.isEmpty) throw new NoSuchElementException("key not found: "+k) else if (k == cur.key) cur.value - else apply0(cur.tail, k) + else apply0(cur.tail, k) /** Checks if this map maps `key` to a value and return the * value if it exists. diff --git a/src/library/scala/collection/immutable/MapLike.scala b/src/library/scala/collection/immutable/MapLike.scala index f6041464e7..94a5b7929a 100644 --- a/src/library/scala/collection/immutable/MapLike.scala +++ b/src/library/scala/collection/immutable/MapLike.scala @@ -86,14 +86,14 @@ self => */ override def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]): immutable.Map[A, B1] = ((repr: immutable.Map[A, B1]) /: xs.seq) (_ + _) - + /** Filters this map by retaining only keys satisfying a predicate. * @param p the predicate used to test keys * @return an immutable map consisting only of those key value pairs of this map where the key satisfies * the predicate `p`. The resulting map wraps the original map without copying any elements. */ override def filterKeys(p: A => Boolean): Map[A, B] = new FilteredKeys(p) with DefaultMap[A, B] - + /** Transforms this map by applying a function to every retrieved value. * @param f the function used to transform values of this map. * @return a map view which maps every key of this map diff --git a/src/library/scala/collection/immutable/Range.scala b/src/library/scala/collection/immutable/Range.scala index 72c40e889f..c234d35756 100644 --- a/src/library/scala/collection/immutable/Range.scala +++ b/src/library/scala/collection/immutable/Range.scala @@ -65,7 +65,7 @@ extends scala.collection.AbstractSeq[Int] || (start < end && step < 0) || (start == end && !isInclusive) ) - @deprecated("This method will be made private, use `length` instead.", "2.11") + @deprecated("This method will be made private, use `length` instead.", "2.11") final val numRangeElements: Int = { if (step == 0) throw new IllegalArgumentException("step cannot be 0.") else if (isEmpty) 0 @@ -77,7 +77,7 @@ extends scala.collection.AbstractSeq[Int] } @deprecated("This method will be made private, use `last` instead.", "2.11") final val lastElement = start + (numRangeElements - 1) * step - @deprecated("This method will be made private.", "2.11") + @deprecated("This method will be made private.", "2.11") final val terminalElement = start + numRangeElements * step override def last = if (isEmpty) Nil.last else lastElement diff --git a/src/library/scala/collection/immutable/RedBlackTree.scala b/src/library/scala/collection/immutable/RedBlackTree.scala index 48bccde0e8..0dad106b29 100644 --- a/src/library/scala/collection/immutable/RedBlackTree.scala +++ b/src/library/scala/collection/immutable/RedBlackTree.scala @@ -367,7 +367,7 @@ object RedBlackTree { private[this] def rebalance[A, B](tree: Tree[A, B], newLeft: Tree[A, B], newRight: Tree[A, B]) = { // This is like drop(n-1), but only counting black nodes @tailrec - def findDepth(zipper: NList[Tree[A, B]], depth: Int): NList[Tree[A, B]] = + def findDepth(zipper: NList[Tree[A, B]], depth: Int): NList[Tree[A, B]] = if (zipper eq null) { sys.error("Defect: unexpected empty zipper while computing range") } else if (isBlackTree(zipper.head)) { @@ -400,14 +400,14 @@ object RedBlackTree { zippedTree } } - + // Null optimized list implementation for tree rebalancing. null presents Nil. private[this] final class NList[A](val head: A, val tail: NList[A]) private[this] final object NList { - + def cons[B](x: B, xs: NList[B]): NList[B] = new NList(x, xs) - + def foldLeft[A, B](xs: NList[A], z: B)(f: (B, A) => B): B = { var acc = z var these = xs @@ -417,7 +417,7 @@ object RedBlackTree { } acc } - + } /* diff --git a/src/library/scala/collection/immutable/SortedMap.scala b/src/library/scala/collection/immutable/SortedMap.scala index 4b9fa81a8c..f1493551ab 100644 --- a/src/library/scala/collection/immutable/SortedMap.scala +++ b/src/library/scala/collection/immutable/SortedMap.scala @@ -83,7 +83,7 @@ self => override def filterKeys(p: A => Boolean): SortedMap[A, B] = new FilteredKeys(p) with SortedMap.Default[A, B] { implicit def ordering: Ordering[A] = self.ordering override def rangeImpl(from : Option[A], until : Option[A]): SortedMap[A, B] = self.rangeImpl(from, until).filterKeys(p) - override def iteratorFrom(start: A) = self iteratorFrom start filter {case (k, _) => p(k)} + override def iteratorFrom(start: A) = self iteratorFrom start filter {case (k, _) => p(k)} override def keysIteratorFrom(start : A) = self keysIteratorFrom start filter p override def valuesIteratorFrom(start : A) = self iteratorFrom start collect {case (k,v) if p(k) => v} } @@ -91,7 +91,7 @@ self => override def mapValues[C](f: B => C): SortedMap[A, C] = new MappedValues(f) with SortedMap.Default[A, C] { implicit def ordering: Ordering[A] = self.ordering override def rangeImpl(from : Option[A], until : Option[A]): SortedMap[A, C] = self.rangeImpl(from, until).mapValues(f) - override def iteratorFrom(start: A) = self iteratorFrom start map {case (k, v) => (k, f(v))} + override def iteratorFrom(start: A) = self iteratorFrom start map {case (k, v) => (k, f(v))} override def keysIteratorFrom(start : A) = self keysIteratorFrom start override def valuesIteratorFrom(start : A) = self valuesIteratorFrom start map f } diff --git a/src/library/scala/collection/immutable/StreamViewLike.scala b/src/library/scala/collection/immutable/StreamViewLike.scala index ccab032cfd..c2eb85815d 100644 --- a/src/library/scala/collection/immutable/StreamViewLike.scala +++ b/src/library/scala/collection/immutable/StreamViewLike.scala @@ -71,9 +71,3 @@ extends SeqView[A, Coll] override def stringPrefix = "StreamView" } - - - - - - diff --git a/src/library/scala/collection/immutable/TreeMap.scala b/src/library/scala/collection/immutable/TreeMap.scala index 4d2ec579db..8416b72ede 100644 --- a/src/library/scala/collection/immutable/TreeMap.scala +++ b/src/library/scala/collection/immutable/TreeMap.scala @@ -6,8 +6,6 @@ ** |/ ** \* */ - - package scala package collection package immutable @@ -194,7 +192,7 @@ class TreeMap[A, +B] private (tree: RB.Tree[A, B])(implicit val ordering: Orderi override def keysIterator: Iterator[A] = RB.keysIterator(tree) override def keysIteratorFrom(start: A): Iterator[A] = RB.keysIterator(tree, Some(start)) - + override def valuesIterator: Iterator[B] = RB.valuesIterator(tree) override def valuesIteratorFrom(start: A): Iterator[B] = RB.valuesIterator(tree, Some(start)) @@ -203,7 +201,3 @@ class TreeMap[A, +B] private (tree: RB.Tree[A, B])(implicit val ordering: Orderi override def foreach[U](f : ((A,B)) => U) = RB.foreach(tree, f) } - - - - diff --git a/src/library/scala/collection/mutable/FlatHashTable.scala b/src/library/scala/collection/mutable/FlatHashTable.scala index 1cdf150cb8..293faeca2d 100644 --- a/src/library/scala/collection/mutable/FlatHashTable.scala +++ b/src/library/scala/collection/mutable/FlatHashTable.scala @@ -107,7 +107,7 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] { } /** Finds an entry in the hash table if such an element exists. */ - protected def findEntry(elem: A): Option[A] = + protected def findEntry(elem: A): Option[A] = findElemImpl(elem) match { case null => None case entry => Some(entryToElem(entry)) @@ -136,10 +136,10 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] { protected def addElem(elem: A) : Boolean = { addEntry(elemToEntry(elem)) } - + /** * Add an entry (an elem converted to an entry via elemToEntry) if not yet in - * table. + * table. * @return Returns `true` if a new elem was added, `false` otherwise. */ protected def addEntry(newEntry : AnyRef) : Boolean = { @@ -156,10 +156,10 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] { nnSizeMapAdd(h) if (tableSize >= threshold) growTable() true - + } - /** + /** * Removes an elem from the hash table returning true if the element was found (and thus removed) * or false if it didn't exist. */ @@ -231,7 +231,7 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] { if (table(i) != null && !containsElem(entryToElem(table(i)))) assert(assertion = false, i+" "+table(i)+" "+table.mkString) } - + /* Size map handling code */ @@ -374,7 +374,7 @@ private[collection] object FlatHashTable { final def seedGenerator = new ThreadLocal[scala.util.Random] { override def initialValue = new scala.util.Random } - + private object NullSentinel { override def hashCode = 0 override def toString = "NullSentinel" @@ -421,18 +421,18 @@ private[collection] object FlatHashTable { val rotated = (improved >>> rotation) | (improved << (32 - rotation)) rotated } - + /** * Elems have type A, but we store AnyRef in the table. Plus we need to deal with * null elems, which need to be stored as NullSentinel */ - protected final def elemToEntry(elem : A) : AnyRef = + protected final def elemToEntry(elem : A) : AnyRef = if (null == elem) NullSentinel else elem.asInstanceOf[AnyRef] - + /** * Does the inverse translation of elemToEntry */ - protected final def entryToElem(entry : AnyRef) : A = + protected final def entryToElem(entry : AnyRef) : A = (if (entry.isInstanceOf[NullSentinel.type]) null else entry).asInstanceOf[A] } diff --git a/src/library/scala/collection/mutable/ImmutableSetAdaptor.scala b/src/library/scala/collection/mutable/ImmutableSetAdaptor.scala index 3e64747832..dc6d319b45 100644 --- a/src/library/scala/collection/mutable/ImmutableSetAdaptor.scala +++ b/src/library/scala/collection/mutable/ImmutableSetAdaptor.scala @@ -6,13 +6,10 @@ ** |/ ** \* */ - - package scala package collection package mutable - /** This class can be used as an adaptor to create mutable sets from * immutable set implementations. Only method `empty` has * to be redefined if the immutable set on which this mutable set is @@ -49,6 +46,4 @@ extends AbstractSet[A] def -=(elem: A): this.type = { set = set - elem; this } override def clear(): Unit = { set = set.empty } - } - diff --git a/src/library/scala/collection/mutable/LinkedHashMap.scala b/src/library/scala/collection/mutable/LinkedHashMap.scala index 536f320402..b54f11be6e 100644 --- a/src/library/scala/collection/mutable/LinkedHashMap.scala +++ b/src/library/scala/collection/mutable/LinkedHashMap.scala @@ -95,25 +95,25 @@ class LinkedHashMap[A, B] extends AbstractMap[A, B] if (hasNext) { val res = (cur.key, cur.value); cur = cur.later; res } else Iterator.empty.next() } - + protected class FilteredKeys(p: A => Boolean) extends super.FilteredKeys(p) { override def empty = LinkedHashMap.empty } - + override def filterKeys(p: A => Boolean): scala.collection.Map[A, B] = new FilteredKeys(p) protected class MappedValues[C](f: B => C) extends super.MappedValues[C](f) { override def empty = LinkedHashMap.empty } - + override def mapValues[C](f: B => C): scala.collection.Map[A, C] = new MappedValues(f) - + protected class DefaultKeySet extends super.DefaultKeySet { override def empty = LinkedHashSet.empty } - + override def keySet: scala.collection.Set[A] = new DefaultKeySet - + override def keysIterator: Iterator[A] = new AbstractIterator[A] { private var cur = firstEntry def hasNext = cur ne null diff --git a/src/library/scala/collection/mutable/LinkedHashSet.scala b/src/library/scala/collection/mutable/LinkedHashSet.scala index d89566793f..cd51b79b65 100644 --- a/src/library/scala/collection/mutable/LinkedHashSet.scala +++ b/src/library/scala/collection/mutable/LinkedHashSet.scala @@ -81,7 +81,7 @@ class LinkedHashSet[A] extends AbstractSet[A] if (hasNext) { val res = cur.key; cur = cur.later; res } else Iterator.empty.next() } - + override def foreach[U](f: A => U) { var cur = firstEntry while (cur ne null) { diff --git a/src/library/scala/collection/mutable/MutableList.scala b/src/library/scala/collection/mutable/MutableList.scala index 5727b12975..a0d3ee0ef0 100644 --- a/src/library/scala/collection/mutable/MutableList.scala +++ b/src/library/scala/collection/mutable/MutableList.scala @@ -6,8 +6,6 @@ ** |/ ** \* */ - - package scala package collection package mutable @@ -151,10 +149,8 @@ extends AbstractSeq[A] bf ++= seq bf.result() } - } - object MutableList extends SeqFactory[MutableList] { implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, MutableList[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] diff --git a/src/library/scala/collection/mutable/StackProxy.scala b/src/library/scala/collection/mutable/StackProxy.scala index 7d776b99c3..15b3a6ceca 100644 --- a/src/library/scala/collection/mutable/StackProxy.scala +++ b/src/library/scala/collection/mutable/StackProxy.scala @@ -59,7 +59,7 @@ trait StackProxy[A] extends Stack[A] with Proxy { self.push(elem) this } - + /** Returns the top element of the stack. This method will not remove * the element from the stack. An error is signaled if there is no * element on the stack. diff --git a/src/library/scala/collection/parallel/Combiner.scala b/src/library/scala/collection/parallel/Combiner.scala index 68df572517..abccf5d402 100644 --- a/src/library/scala/collection/parallel/Combiner.scala +++ b/src/library/scala/collection/parallel/Combiner.scala @@ -6,17 +6,13 @@ ** |/ ** \* */ - package scala package collection.parallel - import scala.collection.Parallel import scala.collection.mutable.Builder import scala.collection.generic.Sizing - - /** The base trait for all combiners. * A combiner incremental collection construction just like * a regular builder, but also implements an efficient merge operation of two builders @@ -90,10 +86,8 @@ trait Combiner[-Elem, +To] extends Builder[Elem, To] with Sizing with Parallel { val res = result() setTaskSupport(res, combinerTaskSupport) } - } - /* private[collection] trait EnvironmentPassingCombiner[-Elem, +To] extends Combiner[Elem, To] { abstract override def result = { @@ -102,12 +96,3 @@ private[collection] trait EnvironmentPassingCombiner[-Elem, +To] extends Combine } } */ - - - - - - - - - diff --git a/src/library/scala/collection/parallel/ParIterableViewLike.scala b/src/library/scala/collection/parallel/ParIterableViewLike.scala index 1ec0ff9c32..5a7a5f5601 100644 --- a/src/library/scala/collection/parallel/ParIterableViewLike.scala +++ b/src/library/scala/collection/parallel/ParIterableViewLike.scala @@ -21,8 +21,6 @@ import scala.collection.generic.CanCombineFrom import scala.collection.parallel.immutable.ParRange import scala.language.implicitConversions - - /** A template view of a non-strict view of parallel iterable collection. * * '''Note:''' Regular view traits have type parameters used to carry information @@ -190,16 +188,4 @@ self => protected[this] def newSubtask(p: IterableSplitter[T]) = new Force(cbf, p) override def merge(that: Force[U, That]) = result = result combine that.result } - } - - - - - - - - - - - diff --git a/src/library/scala/collection/parallel/ParMapLike.scala b/src/library/scala/collection/parallel/ParMapLike.scala index ee14324c41..d2b15c727a 100644 --- a/src/library/scala/collection/parallel/ParMapLike.scala +++ b/src/library/scala/collection/parallel/ParMapLike.scala @@ -6,13 +6,9 @@ ** |/ ** \* */ - package scala package collection.parallel - - - import scala.collection.MapLike import scala.collection.GenMapLike import scala.collection.Map @@ -21,10 +17,6 @@ import scala.annotation.unchecked.uncheckedVariance import scala.collection.generic.IdleSignalling import scala.collection.generic.Signalling - - - - /** A template trait for mutable parallel maps. This trait is to be mixed in * with concrete parallel maps to override the representation type. * @@ -147,15 +139,3 @@ self => // note - should not override toMap (could be mutable) } - - - - - - - - - - - - diff --git a/src/library/scala/collection/parallel/ParSeq.scala b/src/library/scala/collection/parallel/ParSeq.scala index b4a30e5dc2..2c883ba8fe 100644 --- a/src/library/scala/collection/parallel/ParSeq.scala +++ b/src/library/scala/collection/parallel/ParSeq.scala @@ -42,12 +42,9 @@ trait ParSeq[+T] extends GenSeq[T] override def stringPrefix = getClass.getSimpleName } - object ParSeq extends ParFactory[ParSeq] { implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParSeq[T]] = new GenericCanCombineFrom[T] def newBuilder[T]: Combiner[T, ParSeq[T]] = ParArrayCombiner[T] - def newCombiner[T]: Combiner[T, ParSeq[T]] = ParArrayCombiner[T] - } diff --git a/src/library/scala/collection/parallel/ParSetLike.scala b/src/library/scala/collection/parallel/ParSetLike.scala index a50d2ae430..4e9a2e5751 100644 --- a/src/library/scala/collection/parallel/ParSetLike.scala +++ b/src/library/scala/collection/parallel/ParSetLike.scala @@ -6,12 +6,9 @@ ** |/ ** \* */ - package scala package collection.parallel - - import scala.collection.SetLike import scala.collection.GenSetLike import scala.collection.GenSet @@ -45,26 +42,4 @@ extends GenSetLike[T, Repr] def diff(that: GenSet[T]): Repr = sequentially { _ diff that } - } - - - - - - - - - - - - - - - - - - - - - diff --git a/src/library/scala/collection/parallel/PreciseSplitter.scala b/src/library/scala/collection/parallel/PreciseSplitter.scala index 2eb202ce05..4b22934a29 100644 --- a/src/library/scala/collection/parallel/PreciseSplitter.scala +++ b/src/library/scala/collection/parallel/PreciseSplitter.scala @@ -9,10 +9,8 @@ package scala package collection.parallel - import scala.collection.Seq - /** A precise splitter (or a precise split iterator) can be split into arbitrary number of splitters * that traverse disjoint subsets of arbitrary sizes. * @@ -56,10 +54,4 @@ trait PreciseSplitter[+T] extends Splitter[T] { def psplit(sizes: Int*): Seq[PreciseSplitter[T]] def split: Seq[PreciseSplitter[T]] - } - - - - - diff --git a/src/library/scala/collection/parallel/TaskSupport.scala b/src/library/scala/collection/parallel/TaskSupport.scala index 84bb5e425b..9064018d46 100644 --- a/src/library/scala/collection/parallel/TaskSupport.scala +++ b/src/library/scala/collection/parallel/TaskSupport.scala @@ -6,18 +6,13 @@ ** |/ ** \* */ - package scala package collection.parallel - - import java.util.concurrent.ThreadPoolExecutor import scala.concurrent.forkjoin.ForkJoinPool import scala.concurrent.ExecutionContext - - /** A trait implementing the scheduling of a parallel collection operation. * * Parallel collections are modular in the way operations are scheduled. Each @@ -54,7 +49,6 @@ import scala.concurrent.ExecutionContext */ trait TaskSupport extends Tasks - /** A task support that uses a fork join pool to schedule tasks. * * @see [[scala.collection.parallel.TaskSupport]] for more information. @@ -70,7 +64,6 @@ extends TaskSupport with AdaptiveWorkStealingForkJoinTasks class ThreadPoolTaskSupport(val environment: ThreadPoolExecutor = ThreadPoolTasks.defaultThreadPool) extends TaskSupport with AdaptiveWorkStealingThreadPoolTasks - /** A task support that uses an execution context to schedule tasks. * * It can be used with the default execution context implementation in the @@ -86,22 +79,3 @@ extends TaskSupport with AdaptiveWorkStealingThreadPoolTasks */ class ExecutionContextTaskSupport(val environment: ExecutionContext = scala.concurrent.ExecutionContext.global) extends TaskSupport with ExecutionContextTasks - - - - - - - - - - - - - - - - - - - diff --git a/src/library/scala/collection/parallel/Tasks.scala b/src/library/scala/collection/parallel/Tasks.scala index 4aa11b25da..f8d0c6043a 100644 --- a/src/library/scala/collection/parallel/Tasks.scala +++ b/src/library/scala/collection/parallel/Tasks.scala @@ -9,18 +9,12 @@ package scala package collection.parallel - - import java.util.concurrent.ThreadPoolExecutor - import scala.concurrent.forkjoin._ import scala.concurrent.ExecutionContext import scala.util.control.Breaks._ - import scala.annotation.unchecked.uncheckedVariance - - trait Task[R, +Tp] { type Result = R @@ -436,17 +430,14 @@ trait ForkJoinTasks extends Tasks with HavingForkJoinPool { } def parallelismLevel = forkJoinPool.getParallelism - } - object ForkJoinTasks { val defaultForkJoinPool: ForkJoinPool = new ForkJoinPool() // scala.parallel.forkjoinpool // defaultForkJoinPool.setParallelism(Runtime.getRuntime.availableProcessors) // defaultForkJoinPool.setMaximumPoolSize(Runtime.getRuntime.availableProcessors) } - /* Some boilerplate due to no deep mixin composition. Not sure if it can be done differently without them. */ trait AdaptiveWorkStealingForkJoinTasks extends ForkJoinTasks with AdaptiveWorkStealingTasks { @@ -457,7 +448,6 @@ trait AdaptiveWorkStealingForkJoinTasks extends ForkJoinTasks with AdaptiveWorkS } def newWrappedTask[R, Tp](b: Task[R, Tp]) = new WrappedTask[R, Tp](b) - } @deprecated("Use `AdaptiveWorkStealingForkJoinTasks` instead.", "2.11.0") @@ -469,12 +459,9 @@ trait AdaptiveWorkStealingThreadPoolTasks extends ThreadPoolTasks with AdaptiveW } def newWrappedTask[R, Tp](b: Task[R, Tp]) = new WrappedTask[R, Tp](b) - } - trait ExecutionContextTasks extends Tasks { - def executionContext = environment val environment: ExecutionContext @@ -494,16 +481,4 @@ trait ExecutionContextTasks extends Tasks { def executeAndWaitResult[R, Tp](task: Task[R, Tp]): R = driver executeAndWaitResult task def parallelismLevel = driver.parallelismLevel - } - - - - - - - - - - - diff --git a/src/library/scala/collection/parallel/immutable/ParHashMap.scala b/src/library/scala/collection/parallel/immutable/ParHashMap.scala index 854d0ba918..06455ba006 100644 --- a/src/library/scala/collection/parallel/immutable/ParHashMap.scala +++ b/src/library/scala/collection/parallel/immutable/ParHashMap.scala @@ -9,8 +9,6 @@ package scala package collection.parallel.immutable - - import scala.collection.parallel.ParMapLike import scala.collection.parallel.Combiner import scala.collection.parallel.IterableSplitter @@ -24,8 +22,6 @@ import scala.collection.immutable.{ HashMap, TrieIterator } import scala.annotation.unchecked.uncheckedVariance import scala.collection.parallel.Task - - /** Immutable parallel hash map, based on hash tries. * * $paralleliterableinfo @@ -136,10 +132,8 @@ self => println("other kind of node") } } - } - /** $factoryInfo * @define Coll `immutable.ParHashMap` * @define coll immutable parallel hash map @@ -158,7 +152,6 @@ object ParHashMap extends ParMapFactory[ParHashMap] { var totalcombines = new java.util.concurrent.atomic.AtomicInteger(0) } - private[parallel] abstract class HashMapCombiner[K, V] extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], (K, V), HashMapCombiner[K, V]](HashMapCombiner.rootsize) { //self: EnvironmentPassingCombiner[(K, V), ParHashMap[K, V]] => @@ -331,30 +324,11 @@ extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], (K, V } def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(root.length, combinerTaskSupport.parallelismLevel) } - } - private[parallel] object HashMapCombiner { def apply[K, V] = new HashMapCombiner[K, V] {} // was: with EnvironmentPassingCombiner[(K, V), ParHashMap[K, V]] private[immutable] val rootbits = 5 private[immutable] val rootsize = 1 << 5 } - - - - - - - - - - - - - - - - - diff --git a/src/library/scala/collection/parallel/immutable/ParSeq.scala b/src/library/scala/collection/parallel/immutable/ParSeq.scala index 6e98b3102d..f0502fbbcb 100644 --- a/src/library/scala/collection/parallel/immutable/ParSeq.scala +++ b/src/library/scala/collection/parallel/immutable/ParSeq.scala @@ -6,12 +6,10 @@ ** |/ ** \* */ - package scala package collection package parallel.immutable - import scala.collection.generic.GenericParTemplate import scala.collection.generic.GenericCompanion import scala.collection.generic.GenericParCompanion @@ -36,7 +34,6 @@ extends scala.collection/*.immutable*/.GenSeq[T] override def toSeq: ParSeq[T] = this } - /** $factoryInfo * @define Coll `mutable.ParSeq` * @define coll mutable parallel sequence @@ -45,9 +42,5 @@ object ParSeq extends ParFactory[ParSeq] { implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParSeq[T]] = new GenericCanCombineFrom[T] def newBuilder[T]: Combiner[T, ParSeq[T]] = ParVector.newBuilder[T] - def newCombiner[T]: Combiner[T, ParSeq[T]] = ParVector.newCombiner[T] } - - - diff --git a/src/library/scala/collection/parallel/immutable/ParVector.scala b/src/library/scala/collection/parallel/immutable/ParVector.scala index 548e7112c7..c2c1d042e1 100644 --- a/src/library/scala/collection/parallel/immutable/ParVector.scala +++ b/src/library/scala/collection/parallel/immutable/ParVector.scala @@ -6,14 +6,10 @@ ** |/ ** \* */ - package scala package collection package parallel.immutable - - - import scala.collection.generic.{GenericParTemplate, CanCombineFrom, ParFactory} import scala.collection.parallel.ParSeqLike import scala.collection.parallel.Combiner @@ -23,8 +19,6 @@ import immutable.Vector import immutable.VectorBuilder import immutable.VectorIterator - - /** Immutable parallel vectors, based on vectors. * * $paralleliterableinfo @@ -83,11 +77,8 @@ extends ParSeq[T] splitted.map(v => new ParVector(v).splitter.asInstanceOf[ParVectorIterator]) } } - } - - /** $factoryInfo * @define Coll `immutable.ParVector` * @define coll immutable parallel vector @@ -101,8 +92,6 @@ object ParVector extends ParFactory[ParVector] { def newCombiner[T]: Combiner[T, ParVector[T]] = new LazyParVectorCombiner[T] // was: with EPC[T, ParVector[T]] } - - private[immutable] class LazyParVectorCombiner[T] extends Combiner[T, ParVector[T]] { //self: EnvironmentPassingCombiner[T, ParVector[T]] => var sz = 0 @@ -136,11 +125,4 @@ private[immutable] class LazyParVectorCombiner[T] extends Combiner[T, ParVector[ vectors ++= that.vectors this } - } - - - - - - diff --git a/src/library/scala/collection/parallel/mutable/ParHashMap.scala b/src/library/scala/collection/parallel/mutable/ParHashMap.scala index 42a3302c91..bb3737f18e 100644 --- a/src/library/scala/collection/parallel/mutable/ParHashMap.scala +++ b/src/library/scala/collection/parallel/mutable/ParHashMap.scala @@ -6,13 +6,10 @@ ** |/ ** \* */ - package scala package collection.parallel package mutable - - import scala.collection.generic._ import scala.collection.mutable.DefaultEntry import scala.collection.mutable.HashEntry @@ -20,8 +17,6 @@ import scala.collection.mutable.HashTable import scala.collection.mutable.UnrolledBuffer import scala.collection.parallel.Task - - /** A parallel hash map. * * `ParHashMap` is a parallel map which internally keeps elements within a hash table. @@ -145,10 +140,8 @@ self => else ("Element " + e.key + " at " + i + " with " + elemHashCode(e.key) + " maps to " + index(elemHashCode(e.key))) :: check(e.next) check(table(i)) } - } - /** $factoryInfo * @define Coll `mutable.ParHashMap` * @define coll parallel hash map @@ -163,7 +156,6 @@ object ParHashMap extends ParMapFactory[ParHashMap] { implicit def canBuildFrom[K, V]: CanCombineFrom[Coll, (K, V), ParHashMap[K, V]] = new CanCombineFromMap[K, V] } - private[mutable] abstract class ParHashMapCombiner[K, V](private val tableLoadFactor: Int) extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], DefaultEntry[K, V], ParHashMapCombiner[K, V]](ParHashMapCombiner.numblocks) with scala.collection.mutable.HashTable.HashUtils[K] @@ -298,10 +290,8 @@ extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], Defau } def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(ParHashMapCombiner.numblocks, combinerTaskSupport.parallelismLevel) } - } - private[parallel] object ParHashMapCombiner { private[mutable] val discriminantbits = 5 private[mutable] val numblocks = 1 << discriminantbits @@ -310,17 +300,3 @@ private[parallel] object ParHashMapCombiner { def apply[K, V] = new ParHashMapCombiner[K, V](HashTable.defaultLoadFactor) {} // was: with EnvironmentPassingCombiner[(K, V), ParHashMap[K, V]] } - - - - - - - - - - - - - - diff --git a/src/library/scala/collection/parallel/mutable/ParHashTable.scala b/src/library/scala/collection/parallel/mutable/ParHashTable.scala index a6fada3d42..423b891d48 100644 --- a/src/library/scala/collection/parallel/mutable/ParHashTable.scala +++ b/src/library/scala/collection/parallel/mutable/ParHashTable.scala @@ -6,19 +6,13 @@ ** |/ ** \* */ - package scala package collection package parallel.mutable - - - import scala.collection.mutable.HashEntry import scala.collection.parallel.IterableSplitter - - /** Provides functionality for hash tables with linked list buckets, * enriching the data structure by fulfilling certain requirements * for their parallel construction and iteration. @@ -146,11 +140,4 @@ trait ParHashTable[K, Entry >: Null <: HashEntry[K, Entry]] extends scala.collec c } } - } - - - - - - diff --git a/src/library/scala/collection/parallel/mutable/ParMapLike.scala b/src/library/scala/collection/parallel/mutable/ParMapLike.scala index d96b5482fe..42027f5bac 100644 --- a/src/library/scala/collection/parallel/mutable/ParMapLike.scala +++ b/src/library/scala/collection/parallel/mutable/ParMapLike.scala @@ -10,8 +10,6 @@ package scala package collection.parallel package mutable - - import scala.collection.generic._ import scala.collection.mutable.Cloneable import scala.collection.generic.Growable @@ -51,6 +49,4 @@ extends scala.collection.GenMapLike[K, V, Repr] def -(key: K) = this.clone() -= key def clear(): Unit - } - diff --git a/src/library/scala/collection/parallel/mutable/ParSetLike.scala b/src/library/scala/collection/parallel/mutable/ParSetLike.scala index 1cfc14b094..13af5ed649 100644 --- a/src/library/scala/collection/parallel/mutable/ParSetLike.scala +++ b/src/library/scala/collection/parallel/mutable/ParSetLike.scala @@ -6,7 +6,6 @@ ** |/ ** \* */ - package scala package collection package parallel.mutable @@ -49,43 +48,3 @@ self => // note: should not override toSet } - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/src/library/scala/collection/parallel/mutable/ParTrieMap.scala b/src/library/scala/collection/parallel/mutable/ParTrieMap.scala index 82f2717132..a1dc37cec9 100644 --- a/src/library/scala/collection/parallel/mutable/ParTrieMap.scala +++ b/src/library/scala/collection/parallel/mutable/ParTrieMap.scala @@ -9,8 +9,6 @@ package scala package collection.parallel.mutable - - import scala.collection.generic._ import scala.collection.parallel.Combiner import scala.collection.parallel.IterableSplitter @@ -24,8 +22,6 @@ import scala.collection.concurrent.INode import scala.collection.concurrent.TrieMap import scala.collection.concurrent.TrieMapIterator - - /** Parallel TrieMap collection. * * It has its bulk operations parallelized, but uses the snapshot operation @@ -117,10 +113,8 @@ extends ParMap[K, V] def shouldSplitFurther = howmany > 1 override def merge(that: Size) = result = result + that.result } - } - private[collection] class ParTrieMapSplitter[K, V](lev: Int, ct: TrieMap[K, V], mustInit: Boolean) extends TrieMapIterator[K, V](lev, ct, mustInit) with IterableSplitter[(K, V)] @@ -155,7 +149,6 @@ extends TrieMapIterator[K, V](lev, ct, mustInit) def remaining: Int = totalsize - iterated } - /** Only used within the `ParTrieMap`. */ private[mutable] trait ParTrieMapCombiner[K, V] extends Combiner[(K, V), ParTrieMap[K, V]] { @@ -173,24 +166,11 @@ private[mutable] trait ParTrieMapCombiner[K, V] extends Combiner[(K, V), ParTrie } override def canBeShared = true - } - object ParTrieMap extends ParMapFactory[ParTrieMap] { - def empty[K, V]: ParTrieMap[K, V] = new ParTrieMap[K, V] - def newCombiner[K, V]: Combiner[(K, V), ParTrieMap[K, V]] = new ParTrieMap[K, V] implicit def canBuildFrom[K, V]: CanCombineFrom[Coll, (K, V), ParTrieMap[K, V]] = new CanCombineFromMap[K, V] - } - - - - - - - - diff --git a/src/library/scala/concurrent/Awaitable.scala b/src/library/scala/concurrent/Awaitable.scala index 652a23471f..dff83874ba 100644 --- a/src/library/scala/concurrent/Awaitable.scala +++ b/src/library/scala/concurrent/Awaitable.scala @@ -17,7 +17,7 @@ import scala.concurrent.duration.Duration /** * An object that may eventually be completed with a result value of type `T` which may be * awaited using blocking methods. - * + * * The [[Await]] object provides methods that allow accessing the result of an `Awaitable` * by blocking the current thread until the `Awaitable` has been completed or a timeout has * occurred. @@ -26,9 +26,9 @@ trait Awaitable[+T] { /** * Await the "completed" state of this `Awaitable`. - * + * * '''''This method should not be called directly; use [[Await.ready]] instead.''''' - * + * * @param atMost * maximum wait time, which may be negative (no waiting is done), * [[scala.concurrent.duration.Duration.Inf Duration.Inf]] for unbounded waiting, or a finite positive @@ -41,12 +41,12 @@ trait Awaitable[+T] { @throws(classOf[TimeoutException]) @throws(classOf[InterruptedException]) def ready(atMost: Duration)(implicit permit: CanAwait): this.type - + /** * Await and return the result (of type `T`) of this `Awaitable`. - * + * * '''''This method should not be called directly; use [[Await.result]] instead.''''' - * + * * @param atMost * maximum wait time, which may be negative (no waiting is done), * [[scala.concurrent.duration.Duration.Inf Duration.Inf]] for unbounded waiting, or a finite positive diff --git a/src/library/scala/concurrent/ExecutionContext.scala b/src/library/scala/concurrent/ExecutionContext.scala index 68513f9c80..fa264e5d7f 100644 --- a/src/library/scala/concurrent/ExecutionContext.scala +++ b/src/library/scala/concurrent/ExecutionContext.scala @@ -18,15 +18,15 @@ import scala.util.Try */ @implicitNotFound("Cannot find an implicit ExecutionContext, either require one yourself or import ExecutionContext.Implicits.global") trait ExecutionContext { - + /** Runs a block of code on this execution context. */ def execute(runnable: Runnable): Unit - + /** Reports that an asynchronous computation failed. */ def reportFailure(@deprecatedName('t) cause: Throwable): Unit - + /** Prepares for the execution of a task. Returns the prepared * execution context. A valid implementation of `prepare` is one * that simply returns `this`. @@ -62,7 +62,7 @@ object ExecutionContext { */ implicit lazy val global: ExecutionContextExecutor = impl.ExecutionContextImpl.fromExecutor(null: Executor) } - + /** Creates an `ExecutionContext` from the given `ExecutorService`. */ def fromExecutorService(e: ExecutorService, reporter: Throwable => Unit): ExecutionContextExecutorService = @@ -71,7 +71,7 @@ object ExecutionContext { /** Creates an `ExecutionContext` from the given `ExecutorService` with the default Reporter. */ def fromExecutorService(e: ExecutorService): ExecutionContextExecutorService = fromExecutorService(e, defaultReporter) - + /** Creates an `ExecutionContext` from the given `Executor`. */ def fromExecutor(e: Executor, reporter: Throwable => Unit): ExecutionContextExecutor = @@ -80,7 +80,7 @@ object ExecutionContext { /** Creates an `ExecutionContext` from the given `Executor` with the default Reporter. */ def fromExecutor(e: Executor): ExecutionContextExecutor = fromExecutor(e, defaultReporter) - + /** The default reporter simply prints the stack trace of the `Throwable` to System.err. */ def defaultReporter: Throwable => Unit = _.printStackTrace() diff --git a/src/library/scala/concurrent/Future.scala b/src/library/scala/concurrent/Future.scala index 411b89701b..ec6de84a9d 100644 --- a/src/library/scala/concurrent/Future.scala +++ b/src/library/scala/concurrent/Future.scala @@ -254,7 +254,7 @@ trait Future[+T] extends Awaitable[T] { case Success(v) => try f(v) match { // If possible, link DefaultPromises to avoid space leaks case dp: DefaultPromise[_] => dp.asInstanceOf[DefaultPromise[S]].linkRootOf(p) - case fut => fut onComplete p.complete + case fut => fut onComplete p.complete } catch { case NonFatal(t) => p failure t } } p.future diff --git a/src/library/scala/concurrent/Promise.scala b/src/library/scala/concurrent/Promise.scala index cdde019cd0..cfb1dda01f 100644 --- a/src/library/scala/concurrent/Promise.scala +++ b/src/library/scala/concurrent/Promise.scala @@ -70,7 +70,7 @@ trait Promise[T] { other onComplete { this complete _ } this } - + /** Attempts to complete this promise with the specified future, once that future is completed. * * @return This promise @@ -115,38 +115,25 @@ trait Promise[T] { def tryFailure(@deprecatedName('t) cause: Throwable): Boolean = tryComplete(Failure(cause)) } - - object Promise { - /** Creates a promise object which can be completed with a value. - * + * * @tparam T the type of the value in the promise * @return the newly created `Promise` object */ def apply[T](): Promise[T] = new impl.Promise.DefaultPromise[T]() /** Creates an already completed Promise with the specified exception. - * + * * @tparam T the type of the value in the promise * @return the newly created `Promise` object */ def failed[T](exception: Throwable): Promise[T] = new impl.Promise.KeptPromise[T](Failure(exception)) /** Creates an already completed Promise with the specified result. - * + * * @tparam T the type of the value in the promise * @return the newly created `Promise` object */ def successful[T](result: T): Promise[T] = new impl.Promise.KeptPromise[T](Success(result)) - } - - - - - - - - - diff --git a/src/library/scala/concurrent/TaskRunner.scala b/src/library/scala/concurrent/TaskRunner.scala index 98c212d9fa..1ea23b35e8 100644 --- a/src/library/scala/concurrent/TaskRunner.scala +++ b/src/library/scala/concurrent/TaskRunner.scala @@ -24,5 +24,4 @@ private[scala] trait TaskRunner { def execute[S](task: Task[S]): Unit def shutdown(): Unit - } diff --git a/src/library/scala/concurrent/duration/Duration.scala b/src/library/scala/concurrent/duration/Duration.scala index 9a8844b489..1b50b7fa56 100644 --- a/src/library/scala/concurrent/duration/Duration.scala +++ b/src/library/scala/concurrent/duration/Duration.scala @@ -221,7 +221,7 @@ object Duration { final def toMinutes: Long = fail("toMinutes") final def toHours: Long = fail("toHours") final def toDays: Long = fail("toDays") - + final def toCoarsest: Duration = this } @@ -532,7 +532,7 @@ sealed abstract class Duration extends Serializable with Ordered[Duration] { * Duration(48, HOURS).toCoarsest // Duration(2, DAYS) * Duration(5, SECONDS).toCoarsest // Duration(5, SECONDS) * }}} - */ + */ def toCoarsest: Duration } diff --git a/src/library/scala/concurrent/impl/Promise.scala b/src/library/scala/concurrent/impl/Promise.scala index 35511856ee..418d859d79 100644 --- a/src/library/scala/concurrent/impl/Promise.scala +++ b/src/library/scala/concurrent/impl/Promise.scala @@ -155,7 +155,7 @@ private[concurrent] object Promise { /** Get the root promise for this promise, compressing the link chain to that * promise if necessary. - * + * * For promises that are not linked, the result of calling * `compressedRoot()` will the promise itself. However for linked promises, * this method will traverse each link until it locates the root promise at diff --git a/src/library/scala/concurrent/package.scala b/src/library/scala/concurrent/package.scala index 50a66a622a..2fe14a9c1a 100644 --- a/src/library/scala/concurrent/package.scala +++ b/src/library/scala/concurrent/package.scala @@ -19,9 +19,9 @@ package object concurrent { type TimeoutException = java.util.concurrent.TimeoutException /** Starts an asynchronous computation and returns a `Future` object with the result of that computation. - * + * * The result becomes available once the asynchronous computation is completed. - * + * * @tparam T the type of the result * @param body the asynchronous computation * @param executor the execution context on which the future is run @@ -30,7 +30,7 @@ package object concurrent { def future[T](body: =>T)(implicit @deprecatedName('execctx) executor: ExecutionContext): Future[T] = Future[T](body) /** Creates a promise object which can be completed with a value or an exception. - * + * * @tparam T the type of the value in the promise * @return the newly created `Promise` object */ @@ -38,7 +38,7 @@ package object concurrent { /** Used to designate a piece of code which potentially blocks, allowing the current [[BlockContext]] to adjust * the runtime's behavior. - * Properly marking blocking code may improve performance or avoid deadlocks. + * Properly marking blocking code may improve performance or avoid deadlocks. * * Blocking on an [[Awaitable]] should be done using [[Await.result]] instead of `blocking`. * @@ -53,22 +53,22 @@ package object concurrent { package concurrent { @implicitNotFound("Don't call `Awaitable` methods directly, use the `Await` object.") sealed trait CanAwait - + /** * Internal usage only, implementation detail. */ private[concurrent] object AwaitPermission extends CanAwait - + /** * `Await` is what is used to ensure proper handling of blocking for `Awaitable` instances. */ object Await { /** * Await the "completed" state of an `Awaitable`. - * + * * Although this method is blocking, the internal use of [[scala.concurrent.blocking blocking]] ensures that * the underlying [[ExecutionContext]] is prepared to properly manage the blocking. - * + * * @param awaitable * the `Awaitable` to be awaited * @param atMost @@ -84,13 +84,13 @@ package concurrent { @throws(classOf[InterruptedException]) def ready[T](awaitable: Awaitable[T], atMost: Duration): awaitable.type = blocking(awaitable.ready(atMost)(AwaitPermission)) - + /** * Await and return the result (of type `T`) of an `Awaitable`. - * + * * Although this method is blocking, the internal use of [[scala.concurrent.blocking blocking]] ensures that * the underlying [[ExecutionContext]] to properly detect blocking and ensure that there are no deadlocks. - * + * * @param awaitable * the `Awaitable` to be awaited * @param atMost diff --git a/src/library/scala/io/BufferedSource.scala b/src/library/scala/io/BufferedSource.scala index 832c7b23f9..1c87a1f421 100644 --- a/src/library/scala/io/BufferedSource.scala +++ b/src/library/scala/io/BufferedSource.scala @@ -41,7 +41,7 @@ class BufferedSource(inputStream: InputStream, bufferSize: Int)(implicit val cod takeWhile (_ != -1) map (_.toChar) ) - + private def decachedReader: BufferedReader = { // Don't want to lose a buffered char sitting in iter either. Yes, // this is ridiculous, but if I can't get rid of Source, and all the @@ -61,7 +61,7 @@ class BufferedSource(inputStream: InputStream, bufferSize: Int)(implicit val cod } else charReader } - + class BufferedLineIterator extends AbstractIterator[String] with Iterator[String] { private val lineReader = decachedReader @@ -84,7 +84,7 @@ class BufferedSource(inputStream: InputStream, bufferSize: Int)(implicit val cod } override def getLines(): Iterator[String] = new BufferedLineIterator - + /** Efficiently converts the entire remaining input into a string. */ override def mkString = { // Speed up slurping of whole data set in the simplest cases. diff --git a/src/library/scala/ref/WeakReference.scala b/src/library/scala/ref/WeakReference.scala index 6eb4899e3f..c8fb262a08 100644 --- a/src/library/scala/ref/WeakReference.scala +++ b/src/library/scala/ref/WeakReference.scala @@ -29,7 +29,7 @@ object WeakReference { /** Optionally returns the referenced value, or `None` if that value no longer exists */ def unapply[T <: AnyRef](wr: WeakReference[T]): Option[T] = { - val x = wr.underlying.get + val x = wr.underlying.get if (x != null) Some(x) else None } } diff --git a/src/library/scala/reflect/ClassTag.scala b/src/library/scala/reflect/ClassTag.scala index d699e34ffc..33c5cee783 100644 --- a/src/library/scala/reflect/ClassTag.scala +++ b/src/library/scala/reflect/ClassTag.scala @@ -28,7 +28,7 @@ import scala.runtime.ScalaRunTime.{ arrayClass, arrayElementClass } * scala> mkArray("Japan","Brazil","Germany") * res1: Array[String] = Array(Japan, Brazil, Germany) * }}} - * + * * See [[scala.reflect.api.TypeTags]] for more examples, or the * [[http://docs.scala-lang.org/overviews/reflection/typetags-manifests.html Reflection Guide: TypeTags]] * for more details. diff --git a/src/library/scala/runtime/Boxed.scala b/src/library/scala/runtime/Boxed.scala index 855f0ff41a..933444773d 100644 --- a/src/library/scala/runtime/Boxed.scala +++ b/src/library/scala/runtime/Boxed.scala @@ -6,14 +6,7 @@ ** |/ ** \* */ - - package scala package runtime - -trait Boxed { - -} - - +trait Boxed { } diff --git a/src/library/scala/runtime/NonLocalReturnControl.scala b/src/library/scala/runtime/NonLocalReturnControl.scala index 16b2fec6d7..a926956acf 100644 --- a/src/library/scala/runtime/NonLocalReturnControl.scala +++ b/src/library/scala/runtime/NonLocalReturnControl.scala @@ -9,7 +9,6 @@ package scala package runtime - import scala.util.control.ControlThrowable class NonLocalReturnControl[@specialized T](val key: AnyRef, val value: T) extends ControlThrowable { diff --git a/src/library/scala/runtime/WorksheetSupport.scala b/src/library/scala/runtime/WorksheetSupport.scala index 2a0064494b..d86f8873aa 100644 --- a/src/library/scala/runtime/WorksheetSupport.scala +++ b/src/library/scala/runtime/WorksheetSupport.scala @@ -91,4 +91,3 @@ object WorksheetSupport { } class StopException extends Exception - diff --git a/src/library/scala/transient.scala b/src/library/scala/transient.scala index 8ff7c582b4..ec87439093 100644 --- a/src/library/scala/transient.scala +++ b/src/library/scala/transient.scala @@ -6,8 +6,6 @@ ** |/ ** \* */ - - package scala import scala.annotation.meta._ diff --git a/src/library/scala/util/Properties.scala b/src/library/scala/util/Properties.scala index 8b63a73638..13f2362d00 100644 --- a/src/library/scala/util/Properties.scala +++ b/src/library/scala/util/Properties.scala @@ -147,7 +147,7 @@ private[scala] trait PropertiesTrait { // See http://mail.openjdk.java.net/pipermail/macosx-port-dev/2012-November/005148.html for // the reason why we don't follow developer.apple.com/library/mac/#technotes/tn2002/tn2110. /** Returns `true` iff the underlying operating system is a version of Apple Mac OSX. */ - def isMac = osName startsWith "Mac OS X" + def isMac = osName startsWith "Mac OS X" /* Some runtime values. */ private[scala] def isAvian = javaVmName contains "Avian" diff --git a/src/library/scala/volatile.scala b/src/library/scala/volatile.scala index bea216eb17..c612732329 100644 --- a/src/library/scala/volatile.scala +++ b/src/library/scala/volatile.scala @@ -6,8 +6,6 @@ ** |/ ** \* */ - - package scala import scala.annotation.meta._ diff --git a/src/partest-extras/scala/tools/partest/IcodeComparison.scala b/src/partest-extras/scala/tools/partest/IcodeComparison.scala index 5da51c9d58..7122703918 100644 --- a/src/partest-extras/scala/tools/partest/IcodeComparison.scala +++ b/src/partest-extras/scala/tools/partest/IcodeComparison.scala @@ -6,7 +6,6 @@ package scala.tools.partest import scala.tools.partest.nest.FileManager.compareContents -import scala.compat.Platform.EOL /** A class for testing icode. All you need is this in a * partest source file -- @@ -41,7 +40,7 @@ abstract class IcodeComparison extends DirectTest { override def extraSettings: String = "-usejavacp" /** Compile the test code and return the contents of all - * (sorted) .icode files, which are immediately deleted. + * (sorted) .icode files, which are immediately deleted. * @param arg0 at least one arg is required * @param args must include -Xprint-icode:phase */ diff --git a/src/reflect/scala/reflect/internal/Flags.scala b/src/reflect/scala/reflect/internal/Flags.scala index b8e3407824..c286ea53c6 100644 --- a/src/reflect/scala/reflect/internal/Flags.scala +++ b/src/reflect/scala/reflect/internal/Flags.scala @@ -118,7 +118,7 @@ class ModifierFlags { final val PRESUPER = 1L << 37 // value is evaluated before super call final val DEFAULTINIT = 1L << 41 // symbol is initialized to the default value: used by -Xcheckinit final val ARTIFACT = 1L << 46 // symbol should be ignored when typechecking; will be marked ACC_SYNTHETIC in bytecode - final val DEFAULTMETHOD = 1L << 47 // symbol is a java default method + final val DEFAULTMETHOD = 1L << 47 // symbol is a java default method /** Symbols which are marked ARTIFACT. (Expand this list?) * @@ -440,7 +440,7 @@ class Flags extends ModifierFlags { case TRIEDCOOKING => "" // (1L << 44) case SYNCHRONIZED => "" // (1L << 45) case ARTIFACT => "" // (1L << 46) - case DEFAULTMETHOD => "" // (1L << 47) + case DEFAULTMETHOD => "" // (1L << 47) case 0x1000000000000L => "" // (1L << 48) case 0x2000000000000L => "" // (1L << 49) case 0x4000000000000L => "" // (1L << 50) diff --git a/src/reflect/scala/reflect/internal/Scopes.scala b/src/reflect/scala/reflect/internal/Scopes.scala index 8d20c8e546..485d4d5ddd 100644 --- a/src/reflect/scala/reflect/internal/Scopes.scala +++ b/src/reflect/scala/reflect/internal/Scopes.scala @@ -460,6 +460,4 @@ trait Scopes extends api.Scopes { self: SymbolTable => class ErrorScope(owner: Symbol) extends Scope private final val maxRecursions = 1000 - } - diff --git a/src/reflect/scala/reflect/internal/pickling/ByteCodecs.scala b/src/reflect/scala/reflect/internal/pickling/ByteCodecs.scala index eb266e8125..8615e34fad 100644 --- a/src/reflect/scala/reflect/internal/pickling/ByteCodecs.scala +++ b/src/reflect/scala/reflect/internal/pickling/ByteCodecs.scala @@ -212,11 +212,3 @@ object ByteCodecs { decode7to8(xs, len) } } - - - - - - - - diff --git a/src/reflect/scala/reflect/internal/util/Set.scala b/src/reflect/scala/reflect/internal/util/Set.scala index 75dcfaa59b..635bfb05e4 100644 --- a/src/reflect/scala/reflect/internal/util/Set.scala +++ b/src/reflect/scala/reflect/internal/util/Set.scala @@ -23,5 +23,4 @@ abstract class Set[T <: AnyRef] { findEntry(x) ne null def toList = iterator.toList - } diff --git a/src/reflect/scala/reflect/internal/util/StringOps.scala b/src/reflect/scala/reflect/internal/util/StringOps.scala index 4d98a344d8..14f349f502 100644 --- a/src/reflect/scala/reflect/internal/util/StringOps.scala +++ b/src/reflect/scala/reflect/internal/util/StringOps.scala @@ -7,7 +7,11 @@ \* */ package scala -package reflect.internal.util +package reflect +package internal +package util + +import scala.compat.Platform.EOL /** This object provides utility methods to extract elements * from Strings. @@ -18,14 +22,26 @@ package reflect.internal.util trait StringOps { def oempty(xs: String*) = xs filterNot (x => x == null || x == "") def ojoin(xs: String*): String = oempty(xs: _*) mkString " " - def longestCommonPrefix(xs: List[String]): String = { - if (xs.isEmpty || xs.contains("")) "" - else xs.head.head match { - case ch => - if (xs.tail forall (_.head == ch)) "" + ch + longestCommonPrefix(xs map (_.tail)) - else "" - } + def longestCommonPrefix(xs: List[String]): String = xs match { + case Nil => "" + case xs if xs contains "" => "" + case x :: xs => + val ch = x charAt 0 + if (xs exists (_.head != ch)) "" + else "" + ch + longestCommonPrefix(xs map (_ substring 1)) + } + /** Like String#trim, but trailing whitespace only. + */ + def trimTrailingSpace(s: String): String = { + var end = s.length + while (end > 0 && s.charAt(end - 1).isWhitespace) + end -= 1 + + if (end == s.length) s + else s.substring(0, end) } + /** Breaks the string into lines and strips each line before reassembling. */ + def trimAllTrailingSpace(s: String): String = s.lines map trimTrailingSpace mkString EOL def decompose(str: String, sep: Char): List[String] = { def ws(start: Int): List[String] = diff --git a/src/repl/scala/tools/nsc/interpreter/ILoop.scala b/src/repl/scala/tools/nsc/interpreter/ILoop.scala index ed56016bce..984a752964 100644 --- a/src/repl/scala/tools/nsc/interpreter/ILoop.scala +++ b/src/repl/scala/tools/nsc/interpreter/ILoop.scala @@ -23,6 +23,7 @@ import scala.collection.generic.Clearable import scala.concurrent.{ ExecutionContext, Await, Future, future } import ExecutionContext.Implicits._ import java.io.{ BufferedReader, FileReader } +import scala.reflect.internal.util.StringOps._ /** The Scala interactive shell. It provides a read-eval-print loop * around the Interpreter class. @@ -548,7 +549,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) } else try { val s = what // line 123, 120+3, -3, 120-123, 120-, note -3 is not 0-3 but (cur-3,cur) - val (start, len) = + val (start, len) = if ((s indexOf '+') > 0) { val (a,b) = s splitAt (s indexOf '+') (a.toInt, b.drop(1).toInt) @@ -885,12 +886,10 @@ object ILoop { override def write(str: String) = { // completely skip continuation lines if (str forall (ch => ch.isWhitespace || ch == '|')) () - // print a newline on empty scala prompts - else if ((str contains '\n') && (str.trim == "scala> ")) super.write("\n") else super.write(str) } } - val input = new BufferedReader(new StringReader(code)) { + val input = new BufferedReader(new StringReader(code.trim + "\n")) { override def readLine(): String = { val s = super.readLine() // helping out by printing the line being interpreted. diff --git a/src/repl/scala/tools/nsc/interpreter/IMain.scala b/src/repl/scala/tools/nsc/interpreter/IMain.scala index ee4ff59498..a60de01673 100644 --- a/src/repl/scala/tools/nsc/interpreter/IMain.scala +++ b/src/repl/scala/tools/nsc/interpreter/IMain.scala @@ -8,23 +8,17 @@ package tools.nsc package interpreter import PartialFunction.cond - import scala.language.implicitConversions - import scala.collection.mutable - import scala.concurrent.{ Future, ExecutionContext } - import scala.reflect.runtime.{ universe => ru } import scala.reflect.{ BeanProperty, ClassTag, classTag } import scala.reflect.internal.util.{ BatchSourceFile, SourceFile } - import scala.tools.util.PathResolver import scala.tools.nsc.io.AbstractFile import scala.tools.nsc.typechecker.{ TypeStrings, StructuredTypeStrings } import scala.tools.nsc.util.{ ScalaClassLoader, stringFromWriter, StackTraceOps } import scala.tools.nsc.util.Exceptional.unwrap - import javax.script.{AbstractScriptEngine, Bindings, ScriptContext, ScriptEngine, ScriptEngineFactory, ScriptException, CompiledScript, Compilable} /** An interpreter for Scala code. diff --git a/src/scaladoc/scala/tools/nsc/doc/Index.scala b/src/scaladoc/scala/tools/nsc/doc/Index.scala index f9b9eecdb3..84545e9201 100644 --- a/src/scaladoc/scala/tools/nsc/doc/Index.scala +++ b/src/scaladoc/scala/tools/nsc/doc/Index.scala @@ -7,11 +7,8 @@ package scala.tools.nsc.doc import scala.collection._ - trait Index { - type SymbolMap = SortedMap[String, SortedSet[model.MemberEntity]] def firstLetterIndex: Map[Char, SymbolMap] - } diff --git a/src/scaladoc/scala/tools/nsc/doc/base/CommentFactoryBase.scala b/src/scaladoc/scala/tools/nsc/doc/base/CommentFactoryBase.scala index cd1d604843..a933c35c99 100755 --- a/src/scaladoc/scala/tools/nsc/doc/base/CommentFactoryBase.scala +++ b/src/scaladoc/scala/tools/nsc/doc/base/CommentFactoryBase.scala @@ -925,11 +925,8 @@ trait CommentFactoryBase { this: MemberLookupBase => buffer.substring(start, offset) } - /* CHARS CLASSES */ def isWhitespace(c: Char) = c == ' ' || c == '\t' - } - } diff --git a/src/scaladoc/scala/tools/nsc/doc/model/IndexModelFactory.scala b/src/scaladoc/scala/tools/nsc/doc/model/IndexModelFactory.scala index 53410fd4ad..643a089aae 100755 --- a/src/scaladoc/scala/tools/nsc/doc/model/IndexModelFactory.scala +++ b/src/scaladoc/scala/tools/nsc/doc/model/IndexModelFactory.scala @@ -51,9 +51,6 @@ object IndexModelFactory { gather(universe.rootPackage) result.toMap - } - } - } diff --git a/src/scalap/scala/tools/scalap/Arguments.scala b/src/scalap/scala/tools/scalap/Arguments.scala index 123516bb2d..41346d13c0 100644 --- a/src/scalap/scala/tools/scalap/Arguments.scala +++ b/src/scalap/scala/tools/scalap/Arguments.scala @@ -163,5 +163,4 @@ class Arguments { bindings get option flatMap (_ get key) def getOthers: List[String] = others.toList - } -- cgit v1.2.3