summaryrefslogtreecommitdiff
path: root/src/library
diff options
context:
space:
mode:
Diffstat (limited to 'src/library')
-rw-r--r--src/library/rootdoc.txt21
-rw-r--r--src/library/scala/App.scala10
-rw-r--r--src/library/scala/Array.scala10
-rw-r--r--src/library/scala/BoxingConversions.scala5
-rw-r--r--src/library/scala/Option.scala4
-rw-r--r--src/library/scala/PartialFunction.scala18
-rw-r--r--src/library/scala/Product.scala2
-rw-r--r--src/library/scala/StringContext.scala73
-rw-r--r--src/library/scala/annotation/migration.scala2
-rw-r--r--src/library/scala/annotation/static.scala20
-rw-r--r--src/library/scala/collection/GenMapLike.scala2
-rw-r--r--src/library/scala/collection/GenSeqLike.scala12
-rw-r--r--src/library/scala/collection/GenSetLike.scala2
-rw-r--r--src/library/scala/collection/GenTraversableOnce.scala5
-rw-r--r--src/library/scala/collection/IndexedSeqLike.scala4
-rwxr-xr-xsrc/library/scala/collection/IndexedSeqOptimized.scala5
-rw-r--r--src/library/scala/collection/IterableLike.scala4
-rw-r--r--src/library/scala/collection/Iterator.scala5
-rw-r--r--src/library/scala/collection/LinearSeqLike.scala2
-rw-r--r--src/library/scala/collection/MapLike.scala2
-rw-r--r--src/library/scala/collection/Searching.scala4
-rw-r--r--src/library/scala/collection/SeqExtractors.scala2
-rw-r--r--src/library/scala/collection/SeqLike.scala9
-rw-r--r--src/library/scala/collection/SetLike.scala2
-rw-r--r--src/library/scala/collection/TraversableLike.scala4
-rw-r--r--src/library/scala/collection/TraversableOnce.scala6
-rw-r--r--src/library/scala/collection/concurrent/TrieMap.scala99
-rw-r--r--src/library/scala/collection/convert/DecorateAsJava.scala4
-rw-r--r--src/library/scala/collection/convert/WrapAsJava.scala2
-rwxr-xr-xsrc/library/scala/collection/generic/FilterMonadic.scala2
-rw-r--r--src/library/scala/collection/generic/IsSeqLike.scala2
-rw-r--r--src/library/scala/collection/generic/ParFactory.scala2
-rw-r--r--src/library/scala/collection/generic/SliceInterval.scala2
-rw-r--r--src/library/scala/collection/immutable/BitSet.scala4
-rw-r--r--src/library/scala/collection/immutable/HashMap.scala11
-rw-r--r--src/library/scala/collection/immutable/HashSet.scala3
-rw-r--r--src/library/scala/collection/immutable/IntMap.scala3
-rw-r--r--src/library/scala/collection/immutable/List.scala2
-rw-r--r--src/library/scala/collection/immutable/LongMap.scala5
-rw-r--r--src/library/scala/collection/immutable/PagedSeq.scala2
-rw-r--r--src/library/scala/collection/immutable/Range.scala6
-rw-r--r--src/library/scala/collection/immutable/RedBlack.scala5
-rw-r--r--src/library/scala/collection/immutable/RedBlackTree.scala50
-rw-r--r--src/library/scala/collection/immutable/Stream.scala3
-rw-r--r--src/library/scala/collection/immutable/StreamViewLike.scala4
-rw-r--r--src/library/scala/collection/immutable/TreeMap.scala2
-rw-r--r--src/library/scala/collection/immutable/TreeSet.scala2
-rw-r--r--src/library/scala/collection/immutable/Vector.scala26
-rw-r--r--src/library/scala/collection/immutable/package.scala4
-rw-r--r--src/library/scala/collection/mutable/AVLTree.scala3
-rw-r--r--src/library/scala/collection/mutable/ArrayOps.scala5
-rw-r--r--src/library/scala/collection/mutable/ArrayStack.scala3
-rw-r--r--src/library/scala/collection/mutable/Builder.scala3
-rw-r--r--src/library/scala/collection/mutable/FlatHashTable.scala20
-rw-r--r--src/library/scala/collection/mutable/HashMap.scala29
-rw-r--r--src/library/scala/collection/mutable/HashSet.scala6
-rw-r--r--src/library/scala/collection/mutable/HashTable.scala60
-rw-r--r--src/library/scala/collection/mutable/LinkedHashMap.scala43
-rw-r--r--src/library/scala/collection/mutable/LinkedHashSet.scala87
-rw-r--r--src/library/scala/collection/mutable/ListBuffer.scala44
-rw-r--r--src/library/scala/collection/mutable/OpenHashMap.scala3
-rw-r--r--src/library/scala/collection/mutable/PriorityQueue.scala22
-rw-r--r--src/library/scala/collection/mutable/ResizableArray.scala3
-rw-r--r--src/library/scala/collection/mutable/Subscriber.scala2
-rw-r--r--src/library/scala/collection/parallel/ParIterableLike.scala13
-rw-r--r--src/library/scala/collection/parallel/ParMap.scala37
-rw-r--r--src/library/scala/collection/parallel/RemainsIterator.scala2
-rw-r--r--src/library/scala/collection/parallel/Tasks.scala2
-rw-r--r--src/library/scala/collection/parallel/immutable/ParHashMap.scala4
-rw-r--r--src/library/scala/collection/parallel/immutable/ParHashSet.scala2
-rw-r--r--src/library/scala/collection/parallel/immutable/ParIterable.scala4
-rw-r--r--src/library/scala/collection/parallel/immutable/ParMap.scala4
-rw-r--r--src/library/scala/collection/parallel/immutable/ParSeq.scala4
-rw-r--r--src/library/scala/collection/parallel/immutable/ParSet.scala4
-rw-r--r--src/library/scala/collection/parallel/mutable/ParArray.scala7
-rw-r--r--src/library/scala/collection/parallel/mutable/ParHashMap.scala35
-rw-r--r--src/library/scala/collection/parallel/mutable/ParHashSet.scala16
-rw-r--r--src/library/scala/collection/parallel/mutable/ParHashTable.scala2
-rw-r--r--src/library/scala/collection/parallel/mutable/ParIterable.scala2
-rw-r--r--src/library/scala/collection/parallel/mutable/ParMap.scala4
-rw-r--r--src/library/scala/collection/parallel/mutable/ParSeq.scala4
-rw-r--r--src/library/scala/collection/parallel/mutable/ParSet.scala4
-rw-r--r--src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala2
-rw-r--r--src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala14
-rw-r--r--src/library/scala/collection/parallel/package.scala7
-rw-r--r--src/library/scala/compat/Platform.scala2
-rw-r--r--src/library/scala/concurrent/Awaitable.scala2
-rw-r--r--src/library/scala/concurrent/duration/Deadline.scala81
-rw-r--r--src/library/scala/concurrent/duration/Duration.scala (renamed from src/library/scala/concurrent/util/Duration.scala)253
-rw-r--r--src/library/scala/concurrent/duration/DurationConversions.scala92
-rw-r--r--src/library/scala/concurrent/duration/package.scala75
-rw-r--r--src/library/scala/concurrent/impl/Promise.scala16
-rw-r--r--src/library/scala/concurrent/package.scala2
-rw-r--r--src/library/scala/concurrent/util/duration/Classifier.scala9
-rw-r--r--src/library/scala/concurrent/util/duration/IntMult.scala18
-rw-r--r--src/library/scala/concurrent/util/duration/package.scala31
-rw-r--r--src/library/scala/io/Codec.scala2
-rw-r--r--src/library/scala/io/Position.scala2
-rw-r--r--src/library/scala/language.scala87
-rw-r--r--src/library/scala/math/Ordering.scala3
-rw-r--r--src/library/scala/reflect/ClassTag.scala30
-rw-r--r--src/library/scala/reflect/Manifest.scala4
-rwxr-xr-xsrc/library/scala/reflect/NameTransformer.scala3
-rw-r--r--src/library/scala/reflect/base/AnnotationInfos.scala44
-rw-r--r--src/library/scala/reflect/base/Attachments.scala42
-rw-r--r--src/library/scala/reflect/base/Base.scala773
-rw-r--r--src/library/scala/reflect/base/BuildUtils.scala72
-rw-r--r--src/library/scala/reflect/base/Constants.scala20
-rw-r--r--src/library/scala/reflect/base/Exprs.scala76
-rw-r--r--src/library/scala/reflect/base/FlagSets.scala16
-rw-r--r--src/library/scala/reflect/base/MirrorOf.scala88
-rw-r--r--src/library/scala/reflect/base/Mirrors.scala12
-rw-r--r--src/library/scala/reflect/base/Names.scala58
-rw-r--r--src/library/scala/reflect/base/Positions.scala17
-rw-r--r--src/library/scala/reflect/base/Scopes.scala41
-rw-r--r--src/library/scala/reflect/base/StandardDefinitions.scala72
-rw-r--r--src/library/scala/reflect/base/StandardNames.scala34
-rw-r--r--src/library/scala/reflect/base/Symbols.scala289
-rw-r--r--src/library/scala/reflect/base/TagInterop.scala15
-rw-r--r--src/library/scala/reflect/base/TreeCreator.scala6
-rw-r--r--src/library/scala/reflect/base/Trees.scala1455
-rw-r--r--src/library/scala/reflect/base/TypeCreator.scala6
-rw-r--r--src/library/scala/reflect/base/TypeTags.scala277
-rw-r--r--src/library/scala/reflect/base/Types.scala426
-rw-r--r--src/library/scala/reflect/base/Universe.scala66
-rw-r--r--src/library/scala/reflect/macros/internal/package.scala13
-rw-r--r--src/library/scala/reflect/package.scala13
-rw-r--r--src/library/scala/runtime/RichDouble.scala3
-rw-r--r--src/library/scala/runtime/RichFloat.scala3
-rw-r--r--src/library/scala/runtime/ScalaRunTime.scala43
-rw-r--r--src/library/scala/runtime/SeqCharSequence.scala3
-rw-r--r--src/library/scala/runtime/StringAdd.scala5
-rw-r--r--src/library/scala/runtime/StringFormat.scala6
-rw-r--r--src/library/scala/runtime/Tuple2Zipped.scala2
-rw-r--r--src/library/scala/runtime/Tuple3Zipped.scala2
-rw-r--r--src/library/scala/sys/Prop.scala3
-rw-r--r--src/library/scala/sys/process/BasicIO.scala2
-rw-r--r--src/library/scala/sys/process/ProcessBuilderImpl.scala4
-rw-r--r--src/library/scala/sys/process/ProcessImpl.scala2
-rw-r--r--src/library/scala/testing/Show.scala2
-rw-r--r--src/library/scala/throws.scala6
-rw-r--r--src/library/scala/util/Either.scala2
-rw-r--r--src/library/scala/util/Sorting.scala5
-rw-r--r--src/library/scala/util/automata/SubsetConstruction.scala4
-rw-r--r--src/library/scala/util/control/NoStackTrace.scala3
-rw-r--r--src/library/scala/util/hashing/Hashing.scala15
-rw-r--r--src/library/scala/util/parsing/combinator/Parsers.scala2
-rwxr-xr-xsrc/library/scala/xml/Elem.scala2
-rwxr-xr-xsrc/library/scala/xml/Utility.scala2
-rw-r--r--src/library/scala/xml/dtd/ContentModelParser.scala15
-rw-r--r--src/library/scala/xml/dtd/Scanner.scala4
-rw-r--r--src/library/scala/xml/factory/NodeFactory.scala2
-rw-r--r--src/library/scala/xml/include/sax/XIncluder.scala2
-rwxr-xr-xsrc/library/scala/xml/parsing/MarkupParser.scala4
-rw-r--r--src/library/scala/xml/parsing/MarkupParserCommon.scala4
155 files changed, 1061 insertions, 4720 deletions
diff --git a/src/library/rootdoc.txt b/src/library/rootdoc.txt
index da27a0084b..0722d808bf 100644
--- a/src/library/rootdoc.txt
+++ b/src/library/rootdoc.txt
@@ -4,24 +4,25 @@ This is the documentation for the Scala standard library.
The [[scala]] package contains core types.
-scala.[[scala.collection]] and its subpackages contain a collections framework with higher-order functions for manipulation. Both [[scala.collection.immutable]] and [[scala.collection.mutable]] data structures are available, with immutable as the default. The [[scala.collection.parallel]] collections provide automatic parallel operation.
+[[scala.collection `scala.collection`]] and its subpackages contain a collections framework with higher-order functions for manipulation. Both [[scala.collection.immutable `scala.collection.immutable`]] and [[scala.collection.mutable `scala.collection.mutable`]] data structures are available, with immutable as the default. The [[scala.collection.parallel `scala.collection.parallel`]] collections provide automatic parallel operation.
Other important packages include:
- - scala.[[scala.actors]] - Concurrency framework inspired by Erlang.
- - scala.[[scala.io]] - Input and output.
- - scala.[[scala.math]] - Basic math functions and additional numeric types.
- - scala.[[scala.sys]] - Interaction with other processes and the operating system.
- - scala.util.[[scala.util.matching]] - Pattern matching in text using regular expressions.
- - scala.util.parsing.[[scala.util.parsing.combinator]] - Composable combinators for parsing.
- - scala.[[scala.xml]] - XML parsing, manipulation, and serialization.
+ - [[scala.actors `scala.actors`]] - Concurrency framework inspired by Erlang.
+ - [[scala.io `scala.io`]] - Input and output.
+ - [[scala.math `scala.math`]] - Basic math functions and additional numeric types.
+ - [[scala.sys `scala.sys`]] - Interaction with other processes and the operating system.
+ - [[scala.util.matching `scala.util.matching`]] - Pattern matching in text using regular expressions.
+ - [[scala.util.parsing.combinator `scala.util.parsing.combinator`]] - Composable combinators for parsing.
+ - [[scala.xml `scala.xml`]] - XML parsing, manipulation, and serialization.
Many other packages exist. See the complete list on the left.
== Automatic imports ==
-Identifiers in the scala package and the [[scala.Predef]] object are always in scope by default.
+Identifiers in the scala package and the [[scala.Predef `scala.Predef`]] object are always in scope by default.
-Some of these identifiers are type aliases provided as shortcuts to commonly used classes. For example, `List` is an alias for scala.collection.immutable.[[scala.collection.immutable.List]].
+Some of these identifiers are type aliases provided as shortcuts to commonly used classes. For example, `List` is an alias for
+[[scala.collection.immutable.List `scala.collection.immutable.List`]].
Other aliases refer to classes provided by the underlying platform. For example, on the JVM, `String` is an alias for `java.lang.String`.
diff --git a/src/library/scala/App.scala b/src/library/scala/App.scala
index 85d2f9075e..a1e5e74e2f 100644
--- a/src/library/scala/App.scala
+++ b/src/library/scala/App.scala
@@ -22,6 +22,16 @@ import scala.collection.mutable.ListBuffer
*
* `args` returns the current command line arguments as an array.
*
+ * ==Caveats==
+ *
+ * '''''It should be noted that this trait is implemented using the [[DelayedInit]]
+ * functionality, which means that fields of the object will not have been initialized
+ * before the main method has been executed.'''''
+ *
+ * It should also be noted that the `main` method will not normally need to be overridden:
+ * the purpose is to turn the whole class body into the “main method”. You should only
+ * chose to override it if you know what you are doing.
+ *
* @author Martin Odersky
* @version 2.1, 15/02/2011
*/
diff --git a/src/library/scala/Array.scala b/src/library/scala/Array.scala
index c61a255e3b..0b8550be37 100644
--- a/src/library/scala/Array.scala
+++ b/src/library/scala/Array.scala
@@ -48,6 +48,16 @@ class FallbackArrayBuilding {
* @version 1.0
*/
object Array extends FallbackArrayBuilding {
+ val emptyBooleanArray = new Array[Boolean](0)
+ val emptyByteArray = new Array[Byte](0)
+ val emptyCharArray = new Array[Char](0)
+ val emptyDoubleArray = new Array[Double](0)
+ val emptyFloatArray = new Array[Float](0)
+ val emptyIntArray = new Array[Int](0)
+ val emptyLongArray = new Array[Long](0)
+ val emptyShortArray = new Array[Short](0)
+ val emptyObjectArray = new Array[Object](0)
+
implicit def canBuildFrom[T](implicit t: ClassTag[T]): CanBuildFrom[Array[_], T, Array[T]] =
new CanBuildFrom[Array[_], T, Array[T]] {
def apply(from: Array[_]) = ArrayBuilder.make[T]()(t)
diff --git a/src/library/scala/BoxingConversions.scala b/src/library/scala/BoxingConversions.scala
deleted file mode 100644
index fd1bd6c121..0000000000
--- a/src/library/scala/BoxingConversions.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-package scala
-abstract class BoxingConversions[Boxed, Unboxed] {
- def box(x: Unboxed): Boxed
- def unbox(x: Boxed): Unboxed
-}
diff --git a/src/library/scala/Option.scala b/src/library/scala/Option.scala
index d241b86b91..755071a14f 100644
--- a/src/library/scala/Option.scala
+++ b/src/library/scala/Option.scala
@@ -196,7 +196,7 @@ sealed abstract class Option[+A] extends Product with Serializable {
/** Necessary to keep $option from being implicitly converted to
* [[scala.collection.Iterable]] in `for` comprehensions.
*/
- def withFilter(p: A => Boolean): WithFilter = new WithFilter(p)
+ @inline final def withFilter(p: A => Boolean): WithFilter = new WithFilter(p)
/** We need a whole WithFilter class to honor the "doesn't create a new
* collection" contract even though it seems unlikely to matter much in a
@@ -255,7 +255,7 @@ sealed abstract class Option[+A] extends Product with Serializable {
* @return the result of applying `pf` to this $option's
* value (if possible), or $none.
*/
- def collect[B](pf: PartialFunction[A, B]): Option[B] =
+ @inline final def collect[B](pf: PartialFunction[A, B]): Option[B] =
if (!isEmpty && pf.isDefinedAt(this.get)) Some(pf(this.get)) else None
/** Returns this $option if it is nonempty,
diff --git a/src/library/scala/PartialFunction.scala b/src/library/scala/PartialFunction.scala
index 7c6e2d2e3e..ce109626cc 100644
--- a/src/library/scala/PartialFunction.scala
+++ b/src/library/scala/PartialFunction.scala
@@ -156,7 +156,7 @@ trait PartialFunction[-A, +B] extends (A => B) { self =>
object PartialFunction {
/** Composite function produced by `PartialFunction#orElse` method
*/
- private final class OrElse[-A, +B] (f1: PartialFunction[A, B], f2: PartialFunction[A, B]) extends PartialFunction[A, B] {
+ private class OrElse[-A, +B] (f1: PartialFunction[A, B], f2: PartialFunction[A, B]) extends PartialFunction[A, B] {
def isDefinedAt(x: A) = f1.isDefinedAt(x) || f2.isDefinedAt(x)
def apply(x: A): B = f1.applyOrElse(x, f2)
@@ -175,7 +175,7 @@ object PartialFunction {
/** Composite function produced by `PartialFunction#andThen` method
*/
- private final class AndThen[-A, B, +C] (pf: PartialFunction[A, B], k: B => C) extends PartialFunction[A, C] {
+ private class AndThen[-A, B, +C] (pf: PartialFunction[A, B], k: B => C) extends PartialFunction[A, C] {
def isDefinedAt(x: A) = pf.isDefinedAt(x)
def apply(x: A): C = k(pf(x))
@@ -207,11 +207,11 @@ object PartialFunction {
*
* Here `fallback_pf` is used as both unique marker object and special fallback function that returns it.
*/
- private[this] final val fallback_pf: PartialFunction[Any, Any] = { case _ => fallback_pf }
- @inline private final def checkFallback[B] = fallback_pf.asInstanceOf[PartialFunction[Any, B]]
- @inline private final def fallbackOccurred[B](x: B) = (fallback_pf eq x.asInstanceOf[AnyRef])
+ private[this] val fallback_pf: PartialFunction[Any, Any] = { case _ => fallback_pf }
+ private def checkFallback[B] = fallback_pf.asInstanceOf[PartialFunction[Any, B]]
+ private def fallbackOccurred[B](x: B) = (fallback_pf eq x.asInstanceOf[AnyRef])
- private final class Lifted[-A, +B] (val pf: PartialFunction[A, B])
+ private class Lifted[-A, +B] (val pf: PartialFunction[A, B])
extends scala.runtime.AbstractFunction1[A, Option[B]] {
def apply(x: A): Option[B] = {
@@ -220,7 +220,7 @@ object PartialFunction {
}
}
- private final class Unlifted[A, B] (f: A => Option[B]) extends scala.runtime.AbstractPartialFunction[A, B] {
+ private class Unlifted[A, B] (f: A => Option[B]) extends scala.runtime.AbstractPartialFunction[A, B] {
def isDefinedAt(x: A): Boolean = f(x).isDefined
override def applyOrElse[A1 <: A, B1 >: B](x: A1, default: A1 => B1): B1 = {
@@ -241,9 +241,9 @@ object PartialFunction {
*/
def apply[A, B](f: A => B): PartialFunction[A, B] = { case x => f(x) }
- private[this] final val constFalse: Any => Boolean = { _ => false}
+ private[this] val constFalse: Any => Boolean = { _ => false}
- private[this] final val empty_pf: PartialFunction[Any, Nothing] = new PartialFunction[Any, Nothing] {
+ private[this] val empty_pf: PartialFunction[Any, Nothing] = new PartialFunction[Any, Nothing] {
def isDefinedAt(x: Any) = false
def apply(x: Any) = throw new MatchError(x)
override def orElse[A1, B1](that: PartialFunction[A1, B1]) = that
diff --git a/src/library/scala/Product.scala b/src/library/scala/Product.scala
index 8c42c60d98..2c6838f6b3 100644
--- a/src/library/scala/Product.scala
+++ b/src/library/scala/Product.scala
@@ -35,7 +35,7 @@ trait Product extends Any with Equals {
/** An iterator over all the elements of this product.
* @return in the default implementation, an `Iterator[Any]`
*/
- def productIterator: Iterator[Any] = new collection.AbstractIterator[Any] {
+ def productIterator: Iterator[Any] = new scala.collection.AbstractIterator[Any] {
private var c: Int = 0
private val cmax = productArity
def hasNext = c < cmax
diff --git a/src/library/scala/StringContext.scala b/src/library/scala/StringContext.scala
index 678ab30168..4078168bb3 100644
--- a/src/library/scala/StringContext.scala
+++ b/src/library/scala/StringContext.scala
@@ -8,10 +8,43 @@
package scala
-/** A class to support string interpolation.
- * This class supports string interpolation as outlined in Scala SIP-11.
- * It needs to be fully documented once the SIP is accepted.
+/** This class provides the basic mechanism to do String Interpolation.
+ * String Interpolation allows users
+ * to embed variable references directly in *processed* string literals.
+ * Here's an example:
+ * {{{
+ * val name = "James"
+ * println(s"Hello, $name") // Hello, James
+ * }}}
*
+ * Any processed string literal is rewritten as an instantiation and
+ * method call against this class. For example:
+ * {{{
+ * s"Hello, $name"
+ * }}}
+ *
+ * is rewritten to be:
+ *
+ * {{{
+ * StringContext("Hello, ", "").s(name)
+ * }}}
+ *
+ * By default, this class provides the `raw`, `s` and `f` methods as
+ * available interpolators.
+ *
+ * To provide your own string interpolator, create an implicit class
+ * which adds a method to `StringContext`. Here's an example:
+ * {{{
+ * implicit class JsonHelper(val sc: StringContext) extends AnyVal {
+ * def json(args: Any*): JSONObject = ...
+ * }
+ * val x: JSONObject = json"{ a: $a }"
+ * }}}
+ *
+ * Here the `JsonHelper` extenion class implicitly adds the `json` method to
+ * `StringContext` which can be used for `json` string literals.
+ *
+ * @since 2.10.0
* @param parts The parts that make up the interpolated string,
* without the expressions that get inserted by interpolation.
*/
@@ -33,6 +66,20 @@ case class StringContext(parts: String*) {
*
* It inserts its arguments between corresponding parts of the string context.
* It also treats standard escape sequences as defined in the Scala specification.
+ * Here's an example of usage:
+ * {{{
+ * val name = "James"
+ * println(s"Hello, $name") // Hello, James
+ * }}}
+ * In this example, the expression $name is replaced with the `toString` of the
+ * variable `name`.
+ * The `s` interpolator can take the `toString` of any arbitrary expression within
+ * a `${}` block, for example:
+ * {{{
+ * println(s"1 + 1 = ${1 + 1}")
+ * }}}
+ * will print the string `1 + 1 = 2`.
+ *
* @param `args` The arguments to be inserted into the resulting string.
* @throws An `IllegalArgumentException`
* if the number of `parts` in the enclosing `StringContext` does not exceed
@@ -47,6 +94,16 @@ case class StringContext(parts: String*) {
* It inserts its arguments between corresponding parts of the string context.
* As opposed to the simple string interpolator `s`, this one does not treat
* standard escape sequences as defined in the Scala specification.
+ *
+ * For example, the raw processed string `raw"a\nb"` is equal to the scala string `"a\\nb"`.
+ *
+ * ''Note:'' Even when using the raw interpolator, Scala will preprocess unicode escapes.
+ * For example:
+ * {{{
+ * scala> raw"\u0123"
+ * res0: String = ģ
+ * }}}
+ *
* @param `args` The arguments to be inserted into the resulting string.
* @throws An `IllegalArgumentException`
* if the number of `parts` in the enclosing `StringContext` does not exceed
@@ -77,6 +134,13 @@ case class StringContext(parts: String*) {
* specifier. All specifiers allowed in Java format strings are handled, and in the same
* way they are treated in Java.
*
+ * For example:
+ * {{{
+ * val height = 1.9d
+ * val name = "James"
+ * println(f"$name%s is $height%2.2f meters tall") // James is 1.90 meters tall
+ * }}}
+ *
* @param `args` The arguments to be inserted into the resulting string.
* @throws An `IllegalArgumentException`
* if the number of `parts` in the enclosing `StringContext` does not exceed
@@ -96,7 +160,8 @@ case class StringContext(parts: String*) {
* string literally. This is achieved by replacing each such occurrence by the
* format specifier `%%`.
*/
- // The implementation is magically hardwired into `scala.tools.reflect.MacroImplementations.macro_StringInterpolation_f`
+ // The implementation is hardwired to `scala.tools.reflect.MacroImplementations.macro_StringInterpolation_f`
+ // Using the mechanism implemented in `scala.tools.reflect.FastTrack`
def f(args: Any*): String = ??? // macro
}
diff --git a/src/library/scala/annotation/migration.scala b/src/library/scala/annotation/migration.scala
index f60c827620..a55c7006a1 100644
--- a/src/library/scala/annotation/migration.scala
+++ b/src/library/scala/annotation/migration.scala
@@ -25,6 +25,6 @@ package scala.annotation
* @since 2.8
*/
private[scala] final class migration(message: String, changedIn: String) extends scala.annotation.StaticAnnotation {
- @deprecated("Use the constructor taking two Strings instead.", "2.10")
+ @deprecated("Use the constructor taking two Strings instead.", "2.10.0")
def this(majorVersion: Int, minorVersion: Int, message: String) = this(message, majorVersion + "." + minorVersion)
}
diff --git a/src/library/scala/annotation/static.scala b/src/library/scala/annotation/static.scala
deleted file mode 100644
index f2955c756c..0000000000
--- a/src/library/scala/annotation/static.scala
+++ /dev/null
@@ -1,20 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.annotation
-
-/**
- * An annotation that marks a member in the companion object as static
- * and ensures that the compiler generates static fields/methods for it.
- * This is important for Java interoperability and performance reasons.
- *
- * @since 2.10
- */
-final class static extends StaticAnnotation {
- // TODO document exact semantics above!
-}
diff --git a/src/library/scala/collection/GenMapLike.scala b/src/library/scala/collection/GenMapLike.scala
index b6c90d4d2a..3ea45e3810 100644
--- a/src/library/scala/collection/GenMapLike.scala
+++ b/src/library/scala/collection/GenMapLike.scala
@@ -31,7 +31,7 @@ trait GenMapLike[A, +B, +Repr] extends GenIterableLike[(A, B), Repr] with Equals
// This hash code must be symmetric in the contents but ought not
// collide trivially.
- override def hashCode() = util.hashing.MurmurHash3.mapHash(seq)
+ override def hashCode()= scala.util.hashing.MurmurHash3.mapHash(seq)
/** Returns the value associated with a key, or a default value if the key is not contained in the map.
* @param key the key.
diff --git a/src/library/scala/collection/GenSeqLike.scala b/src/library/scala/collection/GenSeqLike.scala
index a77cb05960..6380e9380a 100644
--- a/src/library/scala/collection/GenSeqLike.scala
+++ b/src/library/scala/collection/GenSeqLike.scala
@@ -116,7 +116,7 @@ trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equal
*
* @param elem the element value to search for.
* @tparam B the type of the element `elem`.
- * @return the index of the first element of this $coll that is equal (wrt `==`)
+ * @return the index of the first element of this $coll that is equal (as determined by `==`)
* to `elem`, or `-1`, if none exists.
*
* @usecase def indexOf(elem: A): Int
@@ -132,12 +132,12 @@ trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equal
* @param elem the element value to search for.
* @tparam B the type of the element `elem`.
* @param from the start index
- * @return the index `>= from` of the first element of this $coll that is equal (wrt `==`)
+ * @return the index `>= from` of the first element of this $coll that is equal (as determined by `==`)
* to `elem`, or `-1`, if none exists.
*
* @usecase def indexOf(elem: A, from: Int): Int
* @inheritdoc
- *
+ *
* $mayNotTerminateInf
*
*/
@@ -147,7 +147,7 @@ trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equal
*
* @param elem the element value to search for.
* @tparam B the type of the element `elem`.
- * @return the index of the last element of this $coll that is equal (wrt `==`)
+ * @return the index of the last element of this $coll that is equal (as determined by `==`)
* to `elem`, or `-1`, if none exists.
*
* @usecase def lastIndexOf(elem: A): Int
@@ -163,7 +163,7 @@ trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equal
* @param elem the element value to search for.
* @param end the end index.
* @tparam B the type of the element `elem`.
- * @return the index `<= end` of the last element of this $coll that is equal (wrt `==`)
+ * @return the index `<= end` of the last element of this $coll that is equal (as determined by `==`)
* to `elem`, or `-1`, if none exists.
*
* @usecase def lastIndexOf(elem: A, end: Int): Int
@@ -465,7 +465,7 @@ trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equal
/** Hashcodes for $Coll produce a value from the hashcodes of all the
* elements of the $coll.
*/
- override def hashCode() = util.hashing.MurmurHash3.seqHash(seq)
+ override def hashCode()= scala.util.hashing.MurmurHash3.seqHash(seq)
/** The equals method for arbitrary sequences. Compares this sequence to
* some other object.
diff --git a/src/library/scala/collection/GenSetLike.scala b/src/library/scala/collection/GenSetLike.scala
index 18eb31da03..ef5f14ed55 100644
--- a/src/library/scala/collection/GenSetLike.scala
+++ b/src/library/scala/collection/GenSetLike.scala
@@ -127,5 +127,5 @@ extends GenIterableLike[A, Repr]
// Calling map on a set drops duplicates: any hashcode collisions would
// then be dropped before they can be added.
// Hash should be symmetric in set entries, but without trivial collisions.
- override def hashCode() = util.hashing.MurmurHash3.setHash(seq)
+ override def hashCode()= scala.util.hashing.MurmurHash3.setHash(seq)
}
diff --git a/src/library/scala/collection/GenTraversableOnce.scala b/src/library/scala/collection/GenTraversableOnce.scala
index a872bc0948..9167280910 100644
--- a/src/library/scala/collection/GenTraversableOnce.scala
+++ b/src/library/scala/collection/GenTraversableOnce.scala
@@ -261,11 +261,12 @@ trait GenTraversableOnce[+A] extends Any {
* @tparam B the type of accumulated results
* @param z the initial value for the accumulated result of the partition - this
* will typically be the neutral element for the `seqop` operator (e.g.
- * `Nil` for list concatenation or `0` for summation)
+ * `Nil` for list concatenation or `0` for summation) and may be evaluated
+ * more than once
* @param seqop an operator used to accumulate results within a partition
* @param combop an associative operator used to combine results from different partitions
*/
- def aggregate[B](z: B)(seqop: (B, A) => B, combop: (B, B) => B): B
+ def aggregate[B](z: =>B)(seqop: (B, A) => B, combop: (B, B) => B): B
/** Applies a binary operator to all elements of this $coll, going right to left.
* $willNotTerminateInf
diff --git a/src/library/scala/collection/IndexedSeqLike.scala b/src/library/scala/collection/IndexedSeqLike.scala
index f79a9d2c66..7cac6154b9 100644
--- a/src/library/scala/collection/IndexedSeqLike.scala
+++ b/src/library/scala/collection/IndexedSeqLike.scala
@@ -41,7 +41,7 @@ trait IndexedSeqLike[+A, +Repr] extends Any with SeqLike[A, Repr] {
self =>
def seq: IndexedSeq[A]
- override def hashCode() = util.hashing.MurmurHash3.seqHash(seq) // TODO - can we get faster via "indexedSeqHash" ?
+ override def hashCode()= scala.util.hashing.MurmurHash3.seqHash(seq) // TODO - can we get faster via "indexedSeqHash" ?
override protected[this] def thisCollection: IndexedSeq[A] = this.asInstanceOf[IndexedSeq[A]]
override protected[this] def toCollection(repr: Repr): IndexedSeq[A] = repr.asInstanceOf[IndexedSeq[A]]
@@ -90,7 +90,7 @@ trait IndexedSeqLike[+A, +Repr] extends Any with SeqLike[A, Repr] {
override /*IterableLike*/
def iterator: Iterator[A] = new Elements(0, length)
- /** Overridden for efficiency */
+ /* Overridden for efficiency */
override def toBuffer[A1 >: A]: mutable.Buffer[A1] = {
val result = new mutable.ArrayBuffer[A1](size)
copyToBuffer(result)
diff --git a/src/library/scala/collection/IndexedSeqOptimized.scala b/src/library/scala/collection/IndexedSeqOptimized.scala
index 9d03a11db9..b471c304ab 100755
--- a/src/library/scala/collection/IndexedSeqOptimized.scala
+++ b/src/library/scala/collection/IndexedSeqOptimized.scala
@@ -6,9 +6,8 @@
** |/ **
\* */
-
-
-package scala.collection
+package scala
+package collection
import generic._
import mutable.ArrayBuffer
diff --git a/src/library/scala/collection/IterableLike.scala b/src/library/scala/collection/IterableLike.scala
index 7ef3b39576..ead5633e00 100644
--- a/src/library/scala/collection/IterableLike.scala
+++ b/src/library/scala/collection/IterableLike.scala
@@ -6,8 +6,8 @@
** |/ **
\* */
-package scala.collection
-
+package scala
+package collection
import generic._
import immutable.{ List, Stream }
diff --git a/src/library/scala/collection/Iterator.scala b/src/library/scala/collection/Iterator.scala
index 9989a8d9e8..e12b8d231c 100644
--- a/src/library/scala/collection/Iterator.scala
+++ b/src/library/scala/collection/Iterator.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
import mutable.ArrayBuffer
import scala.annotation.migration
@@ -758,7 +759,7 @@ trait Iterator[+A] extends TraversableOnce[A] {
*
* @param elem the element to test.
* @return `true` if this iterator produces some value that is
- * is equal (wrt `==`) to `elem`, `false` otherwise.
+ * is equal (as determined by `==`) to `elem`, `false` otherwise.
* @note Reuse: $consumesIterator
*/
def contains(elem: Any): Boolean = exists(_ == elem)
diff --git a/src/library/scala/collection/LinearSeqLike.scala b/src/library/scala/collection/LinearSeqLike.scala
index 8e72c5618c..b873ae964d 100644
--- a/src/library/scala/collection/LinearSeqLike.scala
+++ b/src/library/scala/collection/LinearSeqLike.scala
@@ -50,7 +50,7 @@ trait LinearSeqLike[+A, +Repr <: LinearSeqLike[A, Repr]] extends SeqLike[A, Repr
def seq: LinearSeq[A]
- override def hashCode() = util.hashing.MurmurHash3.seqHash(seq) // TODO - can we get faster via "linearSeqHash" ?
+ override def hashCode()= scala.util.hashing.MurmurHash3.seqHash(seq) // TODO - can we get faster via "linearSeqHash" ?
override /*IterableLike*/
def iterator: Iterator[A] = new AbstractIterator[A] {
diff --git a/src/library/scala/collection/MapLike.scala b/src/library/scala/collection/MapLike.scala
index 3877f07089..adc92fa687 100644
--- a/src/library/scala/collection/MapLike.scala
+++ b/src/library/scala/collection/MapLike.scala
@@ -319,7 +319,7 @@ self =>
res
}
- /** Overridden for efficiency. */
+ /* Overridden for efficiency. */
override def toSeq: Seq[(A, B)] = toBuffer[(A, B)]
override def toBuffer[C >: (A, B)]: mutable.Buffer[C] = {
val result = new mutable.ArrayBuffer[C](size)
diff --git a/src/library/scala/collection/Searching.scala b/src/library/scala/collection/Searching.scala
index c1f7f4cae6..33e50365ee 100644
--- a/src/library/scala/collection/Searching.scala
+++ b/src/library/scala/collection/Searching.scala
@@ -39,7 +39,7 @@ object Searching {
* The sequence should be sorted with the same `Ordering` before calling; otherwise,
* the results are undefined.
*
- * @see [[scala.math.IndexedSeq]]
+ * @see [[scala.collection.IndexedSeq]]
* @see [[scala.math.Ordering]]
* @see [[scala.collection.SeqLike]], method `sorted`
*
@@ -63,7 +63,7 @@ object Searching {
* The sequence should be sorted with the same `Ordering` before calling; otherwise,
* the results are undefined.
*
- * @see [[scala.math.IndexedSeq]]
+ * @see [[scala.collection.IndexedSeq]]
* @see [[scala.math.Ordering]]
* @see [[scala.collection.SeqLike]], method `sorted`
*
diff --git a/src/library/scala/collection/SeqExtractors.scala b/src/library/scala/collection/SeqExtractors.scala
index de9ff93521..20ea7f54b7 100644
--- a/src/library/scala/collection/SeqExtractors.scala
+++ b/src/library/scala/collection/SeqExtractors.scala
@@ -11,7 +11,7 @@ object +: {
/** An extractor used to init/last deconstruct sequences. */
object :+ {
/** Splits a sequence into init :+ tail.
- * @return Some(init, tail) if sequence is non-empty. None otherwise.
+ * @return Some((init, tail)) if sequence is non-empty. None otherwise.
*/
def unapply[T,Coll <: SeqLike[T, Coll]](
t: Coll with SeqLike[T, Coll]): Option[(Coll, T)] =
diff --git a/src/library/scala/collection/SeqLike.scala b/src/library/scala/collection/SeqLike.scala
index 3388d584db..a1749a480b 100644
--- a/src/library/scala/collection/SeqLike.scala
+++ b/src/library/scala/collection/SeqLike.scala
@@ -6,13 +6,14 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
import mutable.{ ListBuffer, ArraySeq }
import immutable.{ List, Range }
import generic._
import parallel.ParSeq
-import scala.math.Ordering
+import scala.math.{ min, max, Ordering }
/** A template trait for sequences of type `Seq[A]`
* $seqInfo
@@ -734,8 +735,8 @@ object SeqLike {
*/
private def kmpSearch[B](S: Seq[B], m0: Int, m1: Int, W: Seq[B], n0: Int, n1: Int, forward: Boolean): Int = {
// Check for redundant case when target has single valid element
- @inline def clipR(x: Int, y: Int) = if (x<y) x else -1
- @inline def clipL(x: Int, y: Int) = if (x>y) x else -1
+ def clipR(x: Int, y: Int) = if (x < y) x else -1
+ def clipL(x: Int, y: Int) = if (x > y) x else -1
if (n1 == n0+1) {
if (forward)
diff --git a/src/library/scala/collection/SetLike.scala b/src/library/scala/collection/SetLike.scala
index b359044f0d..a8ffa7b691 100644
--- a/src/library/scala/collection/SetLike.scala
+++ b/src/library/scala/collection/SetLike.scala
@@ -78,7 +78,7 @@ self =>
protected[this] override def parCombiner = ParSet.newCombiner[A]
- /** Overridden for efficiency. */
+ /* Overridden for efficiency. */
override def toSeq: Seq[A] = toBuffer[A]
override def toBuffer[A1 >: A]: mutable.Buffer[A1] = {
val result = new mutable.ArrayBuffer[A1](size)
diff --git a/src/library/scala/collection/TraversableLike.scala b/src/library/scala/collection/TraversableLike.scala
index 411d17e935..ce0b130b86 100644
--- a/src/library/scala/collection/TraversableLike.scala
+++ b/src/library/scala/collection/TraversableLike.scala
@@ -40,7 +40,7 @@ import scala.language.higherKinds
* a non-strict collection class may defer computation of some of their
* elements until after the instance is available as a value.
* A typical example of a non-strict collection class is a
- * [[scala.collection.immutable/Stream]].
+ * [[scala.collection.immutable.Stream]].
* A more general class of examples are `TraversableViews`.
*
* If a collection is an instance of an ordered collection class, traversing
@@ -501,7 +501,7 @@ trait TraversableLike[+A, +Repr] extends Any
else sliceWithKnownDelta(n, Int.MaxValue, -n)
def slice(from: Int, until: Int): Repr =
- sliceWithKnownBound(math.max(from, 0), until)
+ sliceWithKnownBound(scala.math.max(from, 0), until)
// Precondition: from >= 0, until > 0, builder already configured for building.
private[this] def sliceInternal(from: Int, until: Int, b: Builder[A, Repr]): Repr = {
diff --git a/src/library/scala/collection/TraversableOnce.scala b/src/library/scala/collection/TraversableOnce.scala
index d77d278fca..a61d1354dc 100644
--- a/src/library/scala/collection/TraversableOnce.scala
+++ b/src/library/scala/collection/TraversableOnce.scala
@@ -184,7 +184,7 @@ trait TraversableOnce[+A] extends Any with GenTraversableOnce[A] {
def fold[A1 >: A](z: A1)(op: (A1, A1) => A1): A1 = foldLeft(z)(op)
- def aggregate[B](z: B)(seqop: (B, A) => B, combop: (B, B) => B): B = foldLeft(z)(seqop)
+ def aggregate[B](z: =>B)(seqop: (B, A) => B, combop: (B, B) => B): B = foldLeft(z)(seqop)
def sum[B >: A](implicit num: Numeric[B]): B = foldLeft(num.zero)(num.plus)
@@ -366,9 +366,9 @@ trait TraversableOnce[+A] extends Any with GenTraversableOnce[A] {
object TraversableOnce {
- @deprecated("use OnceCanBuildFrom instead")
+ @deprecated("use OnceCanBuildFrom instead", "2.10.0")
def traversableOnceCanBuildFrom[T] = new OnceCanBuildFrom[T]
- @deprecated("use MonadOps instead")
+ @deprecated("use MonadOps instead", "2.10.0")
def wrapTraversableOnce[A](trav: TraversableOnce[A]) = new MonadOps(trav)
implicit def alternateImplicit[A](trav: TraversableOnce[A]) = new ForceImplicitAmbiguity
diff --git a/src/library/scala/collection/concurrent/TrieMap.scala b/src/library/scala/collection/concurrent/TrieMap.scala
index 7609910b65..82f62f3c85 100644
--- a/src/library/scala/collection/concurrent/TrieMap.scala
+++ b/src/library/scala/collection/concurrent/TrieMap.scala
@@ -25,13 +25,13 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends
def this(g: Gen) = this(null, g)
- @inline final def WRITE(nval: MainNode[K, V]) = INodeBase.updater.set(this, nval)
+ def WRITE(nval: MainNode[K, V]) = INodeBase.updater.set(this, nval)
- @inline final def CAS(old: MainNode[K, V], n: MainNode[K, V]) = INodeBase.updater.compareAndSet(this, old, n)
+ def CAS(old: MainNode[K, V], n: MainNode[K, V]) = INodeBase.updater.compareAndSet(this, old, n)
- final def gcasRead(ct: TrieMap[K, V]): MainNode[K, V] = GCAS_READ(ct)
+ def gcasRead(ct: TrieMap[K, V]): MainNode[K, V] = GCAS_READ(ct)
- @inline final def GCAS_READ(ct: TrieMap[K, V]): MainNode[K, V] = {
+ def GCAS_READ(ct: TrieMap[K, V]): MainNode[K, V] = {
val m = /*READ*/mainnode
val prevval = /*READ*/m.prev
if (prevval eq null) m
@@ -70,7 +70,7 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends
}
}
- @inline final def GCAS(old: MainNode[K, V], n: MainNode[K, V], ct: TrieMap[K, V]): Boolean = {
+ def GCAS(old: MainNode[K, V], n: MainNode[K, V], ct: TrieMap[K, V]): Boolean = {
n.WRITE_PREV(old)
if (CAS(old, n)) {
GCAS_Complete(n, ct)
@@ -78,16 +78,15 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends
} else false
}
- @inline
private def equal(k1: K, k2: K, ct: TrieMap[K, V]) = ct.equality.equiv(k1, k2)
- @inline private def inode(cn: MainNode[K, V]) = {
+ private def inode(cn: MainNode[K, V]) = {
val nin = new INode[K, V](gen)
nin.WRITE(cn)
nin
}
- final def copyToGen(ngen: Gen, ct: TrieMap[K, V]) = {
+ def copyToGen(ngen: Gen, ct: TrieMap[K, V]) = {
val nin = new INode[K, V](ngen)
val main = GCAS_READ(ct)
nin.WRITE(main)
@@ -98,7 +97,7 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends
*
* @return true if successful, false otherwise
*/
- @tailrec final def rec_insert(k: K, v: V, hc: Int, lev: Int, parent: INode[K, V], startgen: Gen, ct: TrieMap[K, V]): Boolean = {
+ @tailrec def rec_insert(k: K, v: V, hc: Int, lev: Int, parent: INode[K, V], startgen: Gen, ct: TrieMap[K, V]): Boolean = {
val m = GCAS_READ(ct) // use -Yinline!
m match {
@@ -144,7 +143,7 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends
* @param cond null - don't care if the key was there; KEY_ABSENT - key wasn't there; KEY_PRESENT - key was there; other value `v` - key must be bound to `v`
* @return null if unsuccessful, Option[V] otherwise (indicating previous value bound to the key)
*/
- @tailrec final def rec_insertif(k: K, v: V, hc: Int, cond: AnyRef, lev: Int, parent: INode[K, V], startgen: Gen, ct: TrieMap[K, V]): Option[V] = {
+ @tailrec def rec_insertif(k: K, v: V, hc: Int, cond: AnyRef, lev: Int, parent: INode[K, V], startgen: Gen, ct: TrieMap[K, V]): Option[V] = {
val m = GCAS_READ(ct) // use -Yinline!
m match {
@@ -203,7 +202,7 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends
clean(parent, ct, lev - 5)
null
case ln: LNode[K, V] => // 3) an l-node
- @inline def insertln() = {
+ def insertln() = {
val nn = ln.inserted(k, v)
GCAS(ln, nn, ct)
}
@@ -234,7 +233,7 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends
*
* @return null if no value has been found, RESTART if the operation wasn't successful, or any other value otherwise
*/
- @tailrec final def rec_lookup(k: K, hc: Int, lev: Int, parent: INode[K, V], startgen: Gen, ct: TrieMap[K, V]): AnyRef = {
+ @tailrec def rec_lookup(k: K, hc: Int, lev: Int, parent: INode[K, V], startgen: Gen, ct: TrieMap[K, V]): AnyRef = {
val m = GCAS_READ(ct) // use -Yinline!
m match {
@@ -277,7 +276,7 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends
* @param v if null, will remove the key irregardless of the value; otherwise removes only if binding contains that exact key and value
* @return null if not successful, an Option[V] indicating the previous value otherwise
*/
- final def rec_remove(k: K, v: V, hc: Int, lev: Int, parent: INode[K, V], startgen: Gen, ct: TrieMap[K, V]): Option[V] = {
+ def rec_remove(k: K, v: V, hc: Int, lev: Int, parent: INode[K, V], startgen: Gen, ct: TrieMap[K, V]): Option[V] = {
val m = GCAS_READ(ct) // use -Yinline!
m match {
@@ -361,9 +360,9 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends
}
}
- final def isNullInode(ct: TrieMap[K, V]) = GCAS_READ(ct) eq null
+ def isNullInode(ct: TrieMap[K, V]) = GCAS_READ(ct) eq null
- final def cachedSize(ct: TrieMap[K, V]): Int = {
+ def cachedSize(ct: TrieMap[K, V]): Int = {
val m = GCAS_READ(ct)
m.cachedSize(ct)
}
@@ -448,11 +447,9 @@ extends MainNode[K, V] {
}
-private[collection] final class CNode[K, V](final val bitmap: Int, final val array: Array[BasicNode], final val gen: Gen)
-extends CNodeBase[K, V] {
-
+private[collection] final class CNode[K, V](val bitmap: Int, val array: Array[BasicNode], val gen: Gen) extends CNodeBase[K, V] {
// this should only be called from within read-only snapshots
- final def cachedSize(ct: AnyRef) = {
+ def cachedSize(ct: AnyRef) = {
val currsz = READ_SIZE()
if (currsz != -1) currsz
else {
@@ -486,7 +483,7 @@ extends CNodeBase[K, V] {
sz
}
- final def updatedAt(pos: Int, nn: BasicNode, gen: Gen) = {
+ def updatedAt(pos: Int, nn: BasicNode, gen: Gen) = {
val len = array.length
val narr = new Array[BasicNode](len)
Array.copy(array, 0, narr, 0, len)
@@ -494,7 +491,7 @@ extends CNodeBase[K, V] {
new CNode[K, V](bitmap, narr, gen)
}
- final def removedAt(pos: Int, flag: Int, gen: Gen) = {
+ def removedAt(pos: Int, flag: Int, gen: Gen) = {
val arr = array
val len = arr.length
val narr = new Array[BasicNode](len - 1)
@@ -503,7 +500,7 @@ extends CNodeBase[K, V] {
new CNode[K, V](bitmap ^ flag, narr, gen)
}
- final def insertedAt(pos: Int, flag: Int, nn: BasicNode, gen: Gen) = {
+ def insertedAt(pos: Int, flag: Int, nn: BasicNode, gen: Gen) = {
val len = array.length
val bmp = bitmap
val narr = new Array[BasicNode](len + 1)
@@ -516,7 +513,7 @@ extends CNodeBase[K, V] {
/** Returns a copy of this cnode such that all the i-nodes below it are copied
* to the specified generation `ngen`.
*/
- final def renewed(ngen: Gen, ct: TrieMap[K, V]) = {
+ def renewed(ngen: Gen, ct: TrieMap[K, V]) = {
var i = 0
val arr = array
val len = arr.length
@@ -536,7 +533,7 @@ extends CNodeBase[K, V] {
case _ => inode
}
- final def toContracted(lev: Int): MainNode[K, V] = if (array.length == 1 && lev > 0) array(0) match {
+ def toContracted(lev: Int): MainNode[K, V] = if (array.length == 1 && lev > 0) array(0) match {
case sn: SNode[K, V] => sn.copyTombed
case _ => this
} else this
@@ -547,7 +544,7 @@ extends CNodeBase[K, V] {
// returns the version of this node with at least some null-inodes
// removed (those existing when the op began)
// - if there are only null-i-nodes below, returns null
- final def toCompressed(ct: TrieMap[K, V], lev: Int, gen: Gen) = {
+ def toCompressed(ct: TrieMap[K, V], lev: Int, gen: Gen) = {
var bmp = bitmap
var i = 0
val arr = array
@@ -571,7 +568,7 @@ extends CNodeBase[K, V] {
private[concurrent] def string(lev: Int): String = "CNode %x\n%s".format(bitmap, array.map(_.string(lev + 1)).mkString("\n"))
/* quiescently consistent - don't call concurrently to anything involving a GCAS!! */
- protected def collectElems: Seq[(K, V)] = array flatMap {
+ private def collectElems: Seq[(K, V)] = array flatMap {
case sn: SNode[K, V] => Some(sn.kvPair)
case in: INode[K, V] => in.mainnode match {
case tn: TNode[K, V] => Some(tn.kvPair)
@@ -580,7 +577,7 @@ extends CNodeBase[K, V] {
}
}
- protected def collectLocalElems: Seq[String] = array flatMap {
+ private def collectLocalElems: Seq[String] = array flatMap {
case sn: SNode[K, V] => Some(sn.kvPair._2.toString)
case in: INode[K, V] => Some(in.toString.drop(14) + "(" + in.gen + ")")
}
@@ -687,11 +684,11 @@ extends scala.collection.concurrent.Map[K, V]
} while (obj != TrieMapSerializationEnd)
}
- @inline final def CAS_ROOT(ov: AnyRef, nv: AnyRef) = rootupdater.compareAndSet(this, ov, nv)
+ def CAS_ROOT(ov: AnyRef, nv: AnyRef) = rootupdater.compareAndSet(this, ov, nv)
- final def readRoot(abort: Boolean = false): INode[K, V] = RDCSS_READ_ROOT(abort)
+ def readRoot(abort: Boolean = false): INode[K, V] = RDCSS_READ_ROOT(abort)
- @inline final def RDCSS_READ_ROOT(abort: Boolean = false): INode[K, V] = {
+ def RDCSS_READ_ROOT(abort: Boolean = false): INode[K, V] = {
val r = /*READ*/root
r match {
case in: INode[K, V] => in
@@ -781,9 +778,9 @@ extends scala.collection.concurrent.Map[K, V]
override def empty: TrieMap[K, V] = new TrieMap[K, V]
- final def isReadOnly = rootupdater eq null
+ def isReadOnly = rootupdater eq null
- final def nonReadOnly = rootupdater ne null
+ def nonReadOnly = rootupdater ne null
/** Returns a snapshot of this TrieMap.
* This operation is lock-free and linearizable.
@@ -794,7 +791,7 @@ extends scala.collection.concurrent.Map[K, V]
* TrieMap is distributed across all the threads doing updates or accesses
* subsequent to the snapshot creation.
*/
- @tailrec final def snapshot(): TrieMap[K, V] = {
+ @tailrec def snapshot(): TrieMap[K, V] = {
val r = RDCSS_READ_ROOT()
val expmain = r.gcasRead(this)
if (RDCSS_ROOT(r, expmain, r.copyToGen(new Gen, this))) new TrieMap(r.copyToGen(new Gen, this), rootupdater, hashing, equality)
@@ -813,34 +810,34 @@ extends scala.collection.concurrent.Map[K, V]
*
* This method is used by other methods such as `size` and `iterator`.
*/
- @tailrec final def readOnlySnapshot(): scala.collection.Map[K, V] = {
+ @tailrec def readOnlySnapshot(): scala.collection.Map[K, V] = {
val r = RDCSS_READ_ROOT()
val expmain = r.gcasRead(this)
if (RDCSS_ROOT(r, expmain, r.copyToGen(new Gen, this))) new TrieMap(r, null, hashing, equality)
else readOnlySnapshot()
}
- @tailrec final override def clear() {
+ @tailrec override def clear() {
val r = RDCSS_READ_ROOT()
if (!RDCSS_ROOT(r, r.gcasRead(this), INode.newRootNode[K, V])) clear()
}
- @inline
+
def computeHash(k: K) = hashingobj.hash(k)
- final def lookup(k: K): V = {
+ def lookup(k: K): V = {
val hc = computeHash(k)
lookuphc(k, hc).asInstanceOf[V]
}
- final override def apply(k: K): V = {
+ override def apply(k: K): V = {
val hc = computeHash(k)
val res = lookuphc(k, hc)
if (res eq null) throw new NoSuchElementException
else res.asInstanceOf[V]
}
- final def get(k: K): Option[V] = {
+ def get(k: K): Option[V] = {
val hc = computeHash(k)
Option(lookuphc(k, hc)).asInstanceOf[Option[V]]
}
@@ -850,22 +847,22 @@ extends scala.collection.concurrent.Map[K, V]
insertifhc(key, hc, value, null)
}
- final override def update(k: K, v: V) {
+ override def update(k: K, v: V) {
val hc = computeHash(k)
inserthc(k, hc, v)
}
- final def +=(kv: (K, V)) = {
+ def +=(kv: (K, V)) = {
update(kv._1, kv._2)
this
}
- final override def remove(k: K): Option[V] = {
+ override def remove(k: K): Option[V] = {
val hc = computeHash(k)
removehc(k, null.asInstanceOf[V], hc)
}
- final def -=(k: K) = {
+ def -=(k: K) = {
remove(k)
this
}
@@ -916,18 +913,18 @@ object TrieMap extends MutableMapFactory[TrieMap] {
def empty[K, V]: TrieMap[K, V] = new TrieMap[K, V]
class MangledHashing[K] extends Hashing[K] {
- def hash(k: K) = util.hashing.byteswap32(k.##)
+ def hash(k: K)= scala.util.hashing.byteswap32(k.##)
}
}
private[collection] class TrieMapIterator[K, V](var level: Int, private var ct: TrieMap[K, V], mustInit: Boolean = true) extends Iterator[(K, V)] {
- var stack = new Array[Array[BasicNode]](7)
- var stackpos = new Array[Int](7)
- var depth = -1
- var subiter: Iterator[(K, V)] = null
- var current: KVNode[K, V] = null
+ private var stack = new Array[Array[BasicNode]](7)
+ private var stackpos = new Array[Int](7)
+ private var depth = -1
+ private var subiter: Iterator[(K, V)] = null
+ private var current: KVNode[K, V] = null
if (mustInit) initialize()
@@ -960,12 +957,12 @@ private[collection] class TrieMapIterator[K, V](var level: Int, private var ct:
current = null
}
- @inline private def checkSubiter() = if (!subiter.hasNext) {
+ private def checkSubiter() = if (!subiter.hasNext) {
subiter = null
advance()
}
- @inline private def initialize() {
+ private def initialize() {
assert(ct.isReadOnly)
val r = ct.RDCSS_READ_ROOT()
diff --git a/src/library/scala/collection/convert/DecorateAsJava.scala b/src/library/scala/collection/convert/DecorateAsJava.scala
index e14769739d..f999d2fee8 100644
--- a/src/library/scala/collection/convert/DecorateAsJava.scala
+++ b/src/library/scala/collection/convert/DecorateAsJava.scala
@@ -313,6 +313,6 @@ trait DecorateAsJava {
* @return An object with an `asJava` method that returns a Java
* `ConcurrentMap` view of the argument.
*/
- implicit def asJavaConcurrentMapConverter[A, B](m: concurrent.Map[A, B]): AsJava[juc.ConcurrentMap[A, B]] =
- new AsJava(asJavaConcurrentMap(m))
+ implicit def mapAsJavaConcurrentMapConverter[A, B](m: concurrent.Map[A, B]): AsJava[juc.ConcurrentMap[A, B]] =
+ new AsJava(mapAsJavaConcurrentMap(m))
}
diff --git a/src/library/scala/collection/convert/WrapAsJava.scala b/src/library/scala/collection/convert/WrapAsJava.scala
index c79c651e96..e427afbb33 100644
--- a/src/library/scala/collection/convert/WrapAsJava.scala
+++ b/src/library/scala/collection/convert/WrapAsJava.scala
@@ -269,7 +269,7 @@ trait WrapAsJava {
* @param m The Scala `concurrent.Map` to be converted.
* @return A Java `ConcurrentMap` view of the argument.
*/
- implicit def asJavaConcurrentMap[A, B](m: concurrent.Map[A, B]): juc.ConcurrentMap[A, B] = m match {
+ implicit def mapAsJavaConcurrentMap[A, B](m: concurrent.Map[A, B]): juc.ConcurrentMap[A, B] = m match {
case JConcurrentMapWrapper(wrapped) => wrapped
case _ => new ConcurrentMapWrapper(m)
}
diff --git a/src/library/scala/collection/generic/FilterMonadic.scala b/src/library/scala/collection/generic/FilterMonadic.scala
index d79112d616..cebb4e69d3 100755
--- a/src/library/scala/collection/generic/FilterMonadic.scala
+++ b/src/library/scala/collection/generic/FilterMonadic.scala
@@ -14,7 +14,7 @@ package scala.collection.generic
*/
trait FilterMonadic[+A, +Repr] extends Any {
def map[B, That](f: A => B)(implicit bf: CanBuildFrom[Repr, B, That]): That
- def flatMap[B, That](f: A => collection.GenTraversableOnce[B])(implicit bf: CanBuildFrom[Repr, B, That]): That
+ def flatMap[B, That](f: A => scala.collection.GenTraversableOnce[B])(implicit bf: CanBuildFrom[Repr, B, That]): That
def foreach[U](f: A => U): Unit
def withFilter(p: A => Boolean): FilterMonadic[A, Repr]
}
diff --git a/src/library/scala/collection/generic/IsSeqLike.scala b/src/library/scala/collection/generic/IsSeqLike.scala
index 8eac025ed6..9467510a2c 100644
--- a/src/library/scala/collection/generic/IsSeqLike.scala
+++ b/src/library/scala/collection/generic/IsSeqLike.scala
@@ -30,7 +30,7 @@ package generic
* // == List(2, 4)
* }}}
*
- * @see [[scala.collection.generic.Seq]]
+ * @see [[scala.collection.Seq]]
* @see [[scala.collection.generic.IsTraversableLike]]
*/
trait IsSeqLike[Repr] {
diff --git a/src/library/scala/collection/generic/ParFactory.scala b/src/library/scala/collection/generic/ParFactory.scala
index 9624aafd06..6b59b6671c 100644
--- a/src/library/scala/collection/generic/ParFactory.scala
+++ b/src/library/scala/collection/generic/ParFactory.scala
@@ -24,7 +24,7 @@ abstract class ParFactory[CC[X] <: ParIterable[X] with GenericParTemplate[X, CC]
extends GenTraversableFactory[CC]
with GenericParCompanion[CC] {
- //type EPC[T, C] = collection.parallel.EnvironmentPassingCombiner[T, C]
+ //type EPC[T, C] = scala.collection.parallel.EnvironmentPassingCombiner[T, C]
/** A generic implementation of the `CanCombineFrom` trait, which forwards
* all calls to `apply(from)` to the `genericParBuilder` method of the $coll
diff --git a/src/library/scala/collection/generic/SliceInterval.scala b/src/library/scala/collection/generic/SliceInterval.scala
index 56033ca8d8..af56d06d60 100644
--- a/src/library/scala/collection/generic/SliceInterval.scala
+++ b/src/library/scala/collection/generic/SliceInterval.scala
@@ -32,7 +32,7 @@ private[collection] class SliceInterval private (val from: Int, val until: Int)
*/
def recalculate(_from: Int, _until: Int): SliceInterval = {
val lo = _from max 0
- val elems = math.min(_until - lo, width)
+ val elems = scala.math.min(_until - lo, width)
val start = from + lo
if (elems <= 0) new SliceInterval(from, from)
diff --git a/src/library/scala/collection/immutable/BitSet.scala b/src/library/scala/collection/immutable/BitSet.scala
index 1b676e2d2f..d79e2adbda 100644
--- a/src/library/scala/collection/immutable/BitSet.scala
+++ b/src/library/scala/collection/immutable/BitSet.scala
@@ -31,7 +31,7 @@ abstract class BitSet extends scala.collection.AbstractSet[Int]
with Serializable {
override def empty = BitSet.empty
- @deprecated("Use BitSet.fromBitMask[NoCopy] instead of fromArray", "2.10")
+ @deprecated("Use BitSet.fromBitMask[NoCopy] instead of fromArray", "2.10.0")
def fromArray(elems: Array[Long]): BitSet = fromBitMaskNoCopy(elems)
protected def fromBitMaskNoCopy(elems: Array[Long]): BitSet = BitSet.fromBitMaskNoCopy(elems)
@@ -82,7 +82,7 @@ object BitSet extends BitSetFactory[BitSet] {
implicit def canBuildFrom: CanBuildFrom[BitSet, Int, BitSet] = bitsetCanBuildFrom
/** A bitset containing all the bits in an array */
- @deprecated("Use fromBitMask[NoCopy] instead of fromArray", "2.10")
+ @deprecated("Use fromBitMask[NoCopy] instead of fromArray", "2.10.0")
def fromArray(elems: Array[Long]): BitSet = fromBitMaskNoCopy(elems)
/** A bitset containing all the bits in an array */
diff --git a/src/library/scala/collection/immutable/HashMap.scala b/src/library/scala/collection/immutable/HashMap.scala
index 01372aa618..ee41e2aa3c 100644
--- a/src/library/scala/collection/immutable/HashMap.scala
+++ b/src/library/scala/collection/immutable/HashMap.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package immutable
import generic._
@@ -119,7 +120,7 @@ class HashMap[A, +B] extends AbstractMap[A, B]
*/
object HashMap extends ImmutableMapFactory[HashMap] with BitOperations.Int {
- private abstract class Merger[A, B] {
+ private[collection] abstract class Merger[A, B] {
def apply(kv1: (A, B), kv2: (A, B)): (A, B)
def invert: Merger[A, B]
}
@@ -196,7 +197,7 @@ object HashMap extends ImmutableMapFactory[HashMap] with BitOperations.Int {
// }
// }
- override def updated0[B1 >: B](key: A, hash: Int, level: Int, value: B1, kv: (A, B1), merger: Merger[A, B1]): HashMap[A, B1] =
+ private[collection] override def updated0[B1 >: B](key: A, hash: Int, level: Int, value: B1, kv: (A, B1), merger: Merger[A, B1]): HashMap[A, B1] =
if (hash == this.hash && key == this.key ) {
if (merger eq null) {
if (this.value.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef]) this
@@ -237,7 +238,7 @@ object HashMap extends ImmutableMapFactory[HashMap] with BitOperations.Int {
override def get0(key: A, hash: Int, level: Int): Option[B] =
if (hash == this.hash) kvs.get(key) else None
- override def updated0[B1 >: B](key: A, hash: Int, level: Int, value: B1, kv: (A, B1), merger: Merger[A, B1]): HashMap[A, B1] =
+ private[collection] override def updated0[B1 >: B](key: A, hash: Int, level: Int, value: B1, kv: (A, B1), merger: Merger[A, B1]): HashMap[A, B1] =
if (hash == this.hash) {
if ((merger eq null) || !kvs.contains(key)) new HashMapCollision1(hash, kvs.updated(key, value))
else new HashMapCollision1(hash, kvs + merger((key, kvs(key)), kv))
@@ -315,7 +316,7 @@ object HashMap extends ImmutableMapFactory[HashMap] with BitOperations.Int {
None
}
- override def updated0[B1 >: B](key: A, hash: Int, level: Int, value: B1, kv: (A, B1), merger: Merger[A, B1]): HashMap[A, B1] = {
+ private[collection] override def updated0[B1 >: B](key: A, hash: Int, level: Int, value: B1, kv: (A, B1), merger: Merger[A, B1]): HashMap[A, B1] = {
val index = (hash >>> level) & 0x1f
val mask = (1 << index)
val offset = Integer.bitCount(bitmap & (mask-1))
diff --git a/src/library/scala/collection/immutable/HashSet.scala b/src/library/scala/collection/immutable/HashSet.scala
index 03af4deaae..2ebeb044fc 100644
--- a/src/library/scala/collection/immutable/HashSet.scala
+++ b/src/library/scala/collection/immutable/HashSet.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package immutable
import scala.annotation.unchecked.{ uncheckedVariance => uV }
diff --git a/src/library/scala/collection/immutable/IntMap.scala b/src/library/scala/collection/immutable/IntMap.scala
index e895c94599..d0f6b4b3ac 100644
--- a/src/library/scala/collection/immutable/IntMap.scala
+++ b/src/library/scala/collection/immutable/IntMap.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package immutable
import scala.collection.generic.{ CanBuildFrom, BitOperations }
diff --git a/src/library/scala/collection/immutable/List.scala b/src/library/scala/collection/immutable/List.scala
index d3402e16a2..83da68eb68 100644
--- a/src/library/scala/collection/immutable/List.scala
+++ b/src/library/scala/collection/immutable/List.scala
@@ -229,7 +229,7 @@ sealed abstract class List[+A] extends AbstractSeq[A]
* }}}
*/
override def slice(from: Int, until: Int): List[A] = {
- val lo = math.max(from, 0)
+ val lo = scala.math.max(from, 0)
if (until <= lo || isEmpty) Nil
else this drop lo take (until - lo)
}
diff --git a/src/library/scala/collection/immutable/LongMap.scala b/src/library/scala/collection/immutable/LongMap.scala
index 002027b162..4899b45d5f 100644
--- a/src/library/scala/collection/immutable/LongMap.scala
+++ b/src/library/scala/collection/immutable/LongMap.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package immutable
import scala.collection.generic.{ CanBuildFrom, BitOperations }
@@ -298,7 +299,7 @@ extends AbstractMap[Long, T]
if (!hasMatch(key, prefix, mask)) join(key, LongMap.Tip(key, value), prefix, this)
else if (zero(key, mask)) LongMap.Bin(prefix, mask, left.updateWith(key, value, f), right)
else LongMap.Bin(prefix, mask, left, right.updateWith(key, value, f))
- case LongMap.Tip(key2, value2) =>
+ case LongMap.Tip(key2, value2) =>
if (key == key2) LongMap.Tip(key, f(value2, value))
else join(key, LongMap.Tip(key, value), key2, this)
case LongMap.Nil => LongMap.Tip(key, value)
diff --git a/src/library/scala/collection/immutable/PagedSeq.scala b/src/library/scala/collection/immutable/PagedSeq.scala
index 8975b440d2..3b4bfdc593 100644
--- a/src/library/scala/collection/immutable/PagedSeq.scala
+++ b/src/library/scala/collection/immutable/PagedSeq.scala
@@ -99,7 +99,7 @@ object PagedSeq {
/** Constructs a paged character sequence from a scala.io.Source value
*/
- def fromSource(source: io.Source) =
+ def fromSource(source: scala.io.Source) =
fromLines(source.getLines())
}
diff --git a/src/library/scala/collection/immutable/Range.scala b/src/library/scala/collection/immutable/Range.scala
index a2875ec3fb..92ea5d3f04 100644
--- a/src/library/scala/collection/immutable/Range.scala
+++ b/src/library/scala/collection/immutable/Range.scala
@@ -127,7 +127,7 @@ extends scala.collection.AbstractSeq[Int]
}
}
- @inline final def apply(idx: Int): Int = {
+ final def apply(idx: Int): Int = {
validateMaxLength()
if (idx < 0 || idx >= numRangeElements) throw new IndexOutOfBoundsException(idx.toString)
else start + (step * idx)
@@ -346,11 +346,11 @@ object Range {
/** Make an inclusive range from `start` to `end` with given step value.
* @note step != 0
*/
- @inline def inclusive(start: Int, end: Int, step: Int): Range.Inclusive = new Inclusive(start, end, step)
+ def inclusive(start: Int, end: Int, step: Int): Range.Inclusive = new Inclusive(start, end, step)
/** Make an inclusive range from `start` to `end` with step value 1.
*/
- @inline def inclusive(start: Int, end: Int): Range.Inclusive = new Inclusive(start, end, 1)
+ def inclusive(start: Int, end: Int): Range.Inclusive = new Inclusive(start, end, 1)
// BigInt and Long are straightforward generic ranges.
object BigInt {
diff --git a/src/library/scala/collection/immutable/RedBlack.scala b/src/library/scala/collection/immutable/RedBlack.scala
index 83eeaa45ee..77e5a87e73 100644
--- a/src/library/scala/collection/immutable/RedBlack.scala
+++ b/src/library/scala/collection/immutable/RedBlack.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package immutable
/** Old base class that was used by previous implementations of `TreeMaps` and `TreeSets`.
@@ -17,7 +18,7 @@ package immutable
*
* @since 2.3
*/
-@deprecated("use `TreeMap` or `TreeSet` instead", "2.10")
+@deprecated("use `TreeMap` or `TreeSet` instead", "2.10.0")
@SerialVersionUID(8691885935445612921L)
abstract class RedBlack[A] extends Serializable {
diff --git a/src/library/scala/collection/immutable/RedBlackTree.scala b/src/library/scala/collection/immutable/RedBlackTree.scala
index 9eeebb641e..328d929fb9 100644
--- a/src/library/scala/collection/immutable/RedBlackTree.scala
+++ b/src/library/scala/collection/immutable/RedBlackTree.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package immutable
import scala.annotation.tailrec
@@ -73,17 +74,23 @@ object RedBlackTree {
result
}
- def foreach[A, B, U](tree: Tree[A, B], f: ((A, B)) => U): Unit = if (tree ne null) {
- if (tree.left ne null) foreach(tree.left, f)
+
+ def foreach[A,B,U](tree:Tree[A,B], f:((A,B)) => U):Unit = if (tree ne null) _foreach(tree,f)
+
+ private[this] def _foreach[A, B, U](tree: Tree[A, B], f: ((A, B)) => U) {
+ if (tree.left ne null) _foreach(tree.left, f)
f((tree.key, tree.value))
- if (tree.right ne null) foreach(tree.right, f)
- }
- def foreachKey[A, U](tree: Tree[A, _], f: A => U): Unit = if (tree ne null) {
- if (tree.left ne null) foreachKey(tree.left, f)
- f(tree.key)
- if (tree.right ne null) foreachKey(tree.right, f)
+ if (tree.right ne null) _foreach(tree.right, f)
}
+
+ def foreachKey[A, U](tree:Tree[A,_], f: A => U):Unit = if (tree ne null) _foreachKey(tree,f)
+ private[this] def _foreachKey[A, U](tree: Tree[A, _], f: A => U) {
+ if (tree.left ne null) _foreachKey(tree.left, f)
+ f((tree.key))
+ if (tree.right ne null) _foreachKey(tree.right, f)
+ }
+
def iterator[A, B](tree: Tree[A, B]): Iterator[(A, B)] = new EntriesIterator(tree)
def keysIterator[A, _](tree: Tree[A, _]): Iterator[A] = new KeysIterator(tree)
def valuesIterator[_, B](tree: Tree[_, B]): Iterator[B] = new ValuesIterator(tree)
@@ -131,6 +138,15 @@ object RedBlackTree {
else if (overwrite || k != tree.key) mkTree(isBlackTree(tree), k, v, tree.left, tree.right)
else tree
}
+ private[this] def updNth[A, B, B1 >: B](tree: Tree[A, B], idx: Int, k: A, v: B1, overwrite: Boolean): Tree[A, B1] = if (tree eq null) {
+ RedTree(k, v, null, null)
+ } else {
+ val rank = count(tree.left) + 1
+ if (idx < rank) balanceLeft(isBlackTree(tree), tree.key, tree.value, updNth(tree.left, idx, k, v, overwrite), tree.right)
+ else if (idx > rank) balanceRight(isBlackTree(tree), tree.key, tree.value, tree.left, updNth(tree.right, idx - rank, k, v, overwrite))
+ else if (overwrite) mkTree(isBlackTree(tree), k, v, tree.left, tree.right)
+ else tree
+ }
/* Based on Stefan Kahrs' Haskell version of Okasaki's Red&Black Trees
* http://www.cse.unsw.edu.au/~dons/data/RedBlackTree.html */
@@ -248,27 +264,27 @@ object RedBlackTree {
else rebalance(tree, newLeft, newRight)
}
- private[this] def doDrop[A: Ordering, B](tree: Tree[A, B], n: Int): Tree[A, B] = {
+ private[this] def doDrop[A, B](tree: Tree[A, B], n: Int): Tree[A, B] = {
if (n <= 0) return tree
if (n >= this.count(tree)) return null
val count = this.count(tree.left)
if (n > count) return doDrop(tree.right, n - count - 1)
val newLeft = doDrop(tree.left, n)
if (newLeft eq tree.left) tree
- else if (newLeft eq null) upd(tree.right, tree.key, tree.value, false)
+ else if (newLeft eq null) updNth(tree.right, n - count - 1, tree.key, tree.value, false)
else rebalance(tree, newLeft, tree.right)
}
- private[this] def doTake[A: Ordering, B](tree: Tree[A, B], n: Int): Tree[A, B] = {
+ private[this] def doTake[A, B](tree: Tree[A, B], n: Int): Tree[A, B] = {
if (n <= 0) return null
if (n >= this.count(tree)) return tree
val count = this.count(tree.left)
if (n <= count) return doTake(tree.left, n)
val newRight = doTake(tree.right, n - count - 1)
if (newRight eq tree.right) tree
- else if (newRight eq null) upd(tree.left, tree.key, tree.value, false)
+ else if (newRight eq null) updNth(tree.left, n, tree.key, tree.value, false)
else rebalance(tree, tree.left, newRight)
}
- private[this] def doSlice[A: Ordering, B](tree: Tree[A, B], from: Int, until: Int): Tree[A, B] = {
+ private[this] def doSlice[A, B](tree: Tree[A, B], from: Int, until: Int): Tree[A, B] = {
if (tree eq null) return null
val count = this.count(tree.left)
if (from > count) return doSlice(tree.right, from - count - 1, until - count - 1)
@@ -276,8 +292,8 @@ object RedBlackTree {
val newLeft = doDrop(tree.left, from)
val newRight = doTake(tree.right, until - count - 1)
if ((newLeft eq tree.left) && (newRight eq tree.right)) tree
- else if (newLeft eq null) upd(newRight, tree.key, tree.value, false)
- else if (newRight eq null) upd(newLeft, tree.key, tree.value, false)
+ else if (newLeft eq null) updNth(newRight, from - count - 1, tree.key, tree.value, false)
+ else if (newRight eq null) updNth(newLeft, until, tree.key, tree.value, false)
else rebalance(tree, newLeft, newRight)
}
@@ -379,7 +395,7 @@ object RedBlackTree {
@(inline @getter) final val left: Tree[A, B],
@(inline @getter) final val right: Tree[A, B])
extends Serializable {
- final val count: Int = 1 + RedBlackTree.count(left) + RedBlackTree.count(right)
+ @(inline @getter) final val count: Int = 1 + RedBlackTree.count(left) + RedBlackTree.count(right)
def black: Tree[A, B]
def red: Tree[A, B]
}
diff --git a/src/library/scala/collection/immutable/Stream.scala b/src/library/scala/collection/immutable/Stream.scala
index bac90341ec..461a375317 100644
--- a/src/library/scala/collection/immutable/Stream.scala
+++ b/src/library/scala/collection/immutable/Stream.scala
@@ -422,6 +422,9 @@ self =>
* // produces: 10, 10, 11, 10, 11, 11, 10, 11, 11, 12, 10, 11, 11, 12, 13
* }}}
*
+ * ''Note:'' Currently `flatMap` will evaluate as much of the Stream as needed
+ * until it finds a non-empty element for the head, which is non-lazy.
+ *
* @tparam B The element type of the returned collection '''That'''.
* @param f the function to apply on each element.
* @return `f(a,,0,,) ::: ... ::: f(a,,n,,)` if
diff --git a/src/library/scala/collection/immutable/StreamViewLike.scala b/src/library/scala/collection/immutable/StreamViewLike.scala
index bb378bc337..236308da2e 100644
--- a/src/library/scala/collection/immutable/StreamViewLike.scala
+++ b/src/library/scala/collection/immutable/StreamViewLike.scala
@@ -50,10 +50,10 @@ extends SeqView[A, Coll]
trait Prepended[B >: A] extends super.Prepended[B] with Transformed[B]
/** boilerplate */
- protected override def newForced[B](xs: => collection.GenSeq[B]): Transformed[B] = new { val forced = xs } with AbstractTransformed[B] with Forced[B]
+ protected override def newForced[B](xs: => scala.collection.GenSeq[B]): Transformed[B] = new { val forced = xs } with AbstractTransformed[B] with Forced[B]
protected override def newAppended[B >: A](that: scala.collection.GenTraversable[B]): Transformed[B] = new { val rest = that } with AbstractTransformed[B] with Appended[B]
protected override def newMapped[B](f: A => B): Transformed[B] = new { val mapping = f } with AbstractTransformed[B] with Mapped[B]
- protected override def newFlatMapped[B](f: A => collection.GenTraversableOnce[B]): Transformed[B] = new { val mapping = f } with AbstractTransformed[B] with FlatMapped[B]
+ protected override def newFlatMapped[B](f: A => scala.collection.GenTraversableOnce[B]): Transformed[B] = new { val mapping = f } with AbstractTransformed[B] with FlatMapped[B]
protected override def newFiltered(p: A => Boolean): Transformed[A] = new { val pred = p } with AbstractTransformed[A] with Filtered
protected override def newSliced(_endpoints: SliceInterval): Transformed[A] = new { val endpoints = _endpoints } with AbstractTransformed[A] with Sliced
protected override def newDroppedWhile(p: A => Boolean): Transformed[A] = new { val pred = p } with AbstractTransformed[A] with DroppedWhile
diff --git a/src/library/scala/collection/immutable/TreeMap.scala b/src/library/scala/collection/immutable/TreeMap.scala
index 51bc76efc3..f7a24f3d66 100644
--- a/src/library/scala/collection/immutable/TreeMap.scala
+++ b/src/library/scala/collection/immutable/TreeMap.scala
@@ -51,7 +51,7 @@ class TreeMap[A, +B] private (tree: RB.Tree[A, B])(implicit val ordering: Orderi
with MapLike[A, B, TreeMap[A, B]]
with Serializable {
- @deprecated("use `ordering.lt` instead", "2.10")
+ @deprecated("use `ordering.lt` instead", "2.10.0")
def isSmaller(x: A, y: A) = ordering.lt(x, y)
override protected[this] def newBuilder : Builder[(A, B), TreeMap[A, B]] =
diff --git a/src/library/scala/collection/immutable/TreeSet.scala b/src/library/scala/collection/immutable/TreeSet.scala
index 697da2bc4b..0fdb16c23b 100644
--- a/src/library/scala/collection/immutable/TreeSet.scala
+++ b/src/library/scala/collection/immutable/TreeSet.scala
@@ -96,7 +96,7 @@ class TreeSet[A] private (tree: RB.Tree[A, Unit])(implicit val ordering: Orderin
override def takeWhile(p: A => Boolean) = take(countWhile(p))
override def span(p: A => Boolean) = splitAt(countWhile(p))
- @deprecated("use `ordering.lt` instead", "2.10")
+ @deprecated("use `ordering.lt` instead", "2.10.0")
def isSmaller(x: A, y: A) = compare(x,y) < 0
def this()(implicit ordering: Ordering[A]) = this(null)(ordering)
diff --git a/src/library/scala/collection/immutable/Vector.scala b/src/library/scala/collection/immutable/Vector.scala
index 49537df50d..895d073869 100644
--- a/src/library/scala/collection/immutable/Vector.scala
+++ b/src/library/scala/collection/immutable/Vector.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package immutable
import scala.annotation.unchecked.uncheckedVariance
@@ -22,7 +23,7 @@ object Vector extends IndexedSeqFactory[Vector] {
implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Vector[A]] =
ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
private[immutable] val NIL = new Vector[Nothing](0, 0, 0)
- @inline override def empty[A]: Vector[A] = NIL
+ override def empty[A]: Vector[A] = NIL
}
// in principle, most members should be private. however, access privileges must
@@ -87,7 +88,7 @@ override def companion: GenericCompanion[Vector] = Vector
if (s.depth > 1) s.gotoPos(startIndex, startIndex ^ focus)
}
- @inline override def iterator: VectorIterator[A] = {
+ override def iterator: VectorIterator[A] = {
val s = new VectorIterator[A](startIndex, endIndex)
initIterator(s)
s
@@ -113,16 +114,6 @@ override def companion: GenericCompanion[Vector] = Vector
// In principle, escape analysis could even remove the iterator/builder allocations and do it
// with local variables exclusively. But we're not quite there yet ...
- @deprecated("this method is experimental and will be removed in a future release", "2.8.0")
- @inline def foreachFast[U](f: A => U): Unit = iterator.foreachFast(f)
- @deprecated("this method is experimental and will be removed in a future release", "2.8.0")
- @inline def mapFast[B, That](f: A => B)(implicit bf: CanBuildFrom[Vector[A], B, That]): That = {
- val b = bf(repr)
- foreachFast(x => b += f(x))
- b.result
- }
-
-
def apply(index: Int): A = {
val idx = checkRangeConvert(index)
//println("get elem: "+index + "/"+idx + "(focus:" +focus+" xor:"+(idx^focus)+" depth:"+depth+")")
@@ -140,15 +131,15 @@ override def companion: GenericCompanion[Vector] = Vector
// SeqLike api
- @inline override def updated[B >: A, That](index: Int, elem: B)(implicit bf: CanBuildFrom[Vector[A], B, That]): That =
+ override def updated[B >: A, That](index: Int, elem: B)(implicit bf: CanBuildFrom[Vector[A], B, That]): That =
if (bf eq IndexedSeq.ReusableCBF) updateAt(index, elem).asInstanceOf[That] // just ignore bf
else super.updated(index, elem)(bf)
- @inline override def +:[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Vector[A], B, That]): That =
+ override def +:[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Vector[A], B, That]): That =
if (bf eq IndexedSeq.ReusableCBF) appendFront(elem).asInstanceOf[That] // just ignore bf
else super.+:(elem)(bf)
- @inline override def :+[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Vector[A], B, That]): That =
+ override def :+[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Vector[A], B, That]): That =
if (bf eq IndexedSeq.ReusableCBF) appendBack(elem).asInstanceOf[That] // just ignore bf
else super.:+(elem)(bf)
@@ -686,9 +677,6 @@ extends AbstractIterator[A]
v.initFrom(this)
v
}
-
- @deprecated("this method is experimental and will be removed in a future release", "2.8.0")
- @inline def foreachFast[U](f: A => U) { while (hasNext) f(next()) }
}
diff --git a/src/library/scala/collection/immutable/package.scala b/src/library/scala/collection/immutable/package.scala
index eec5f04fff..647fc04310 100644
--- a/src/library/scala/collection/immutable/package.scala
+++ b/src/library/scala/collection/immutable/package.scala
@@ -69,9 +69,9 @@ package immutable {
private def locationAfterN(n: Int) = (
if (n > 0) {
if (step > 0)
- math.min(start.toLong + step.toLong * n.toLong, _last.toLong).toInt
+ scala.math.min(start.toLong + step.toLong * n.toLong, _last.toLong).toInt
else
- math.max(start.toLong + step.toLong * n.toLong, _last.toLong).toInt
+ scala.math.max(start.toLong + step.toLong * n.toLong, _last.toLong).toInt
}
else start
)
diff --git a/src/library/scala/collection/mutable/AVLTree.scala b/src/library/scala/collection/mutable/AVLTree.scala
index 9aea25f330..ad52daaad4 100644
--- a/src/library/scala/collection/mutable/AVLTree.scala
+++ b/src/library/scala/collection/mutable/AVLTree.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package mutable
diff --git a/src/library/scala/collection/mutable/ArrayOps.scala b/src/library/scala/collection/mutable/ArrayOps.scala
index c87feaddc5..397f5bbefa 100644
--- a/src/library/scala/collection/mutable/ArrayOps.scala
+++ b/src/library/scala/collection/mutable/ArrayOps.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package mutable
import scala.compat.Platform.arraycopy
@@ -60,7 +61,7 @@ trait ArrayOps[T] extends Any with ArrayLike[T, Array[T]] with CustomParalleliza
* @param asTrav A function that converts elements of this array to rows - arrays of type `U`.
* @return An array obtained by concatenating rows of this array.
*/
- def flatten[U](implicit asTrav: T => collection.Traversable[U], m: ClassTag[U]): Array[U] = {
+ def flatten[U](implicit asTrav: T => scala.collection.Traversable[U], m: ClassTag[U]): Array[U] = {
val b = Array.newBuilder[U]
b.sizeHint(map{case is: scala.collection.IndexedSeq[_] => is.size case _ => 0}.sum)
for (xs <- this)
diff --git a/src/library/scala/collection/mutable/ArrayStack.scala b/src/library/scala/collection/mutable/ArrayStack.scala
index e408d74353..277d48c545 100644
--- a/src/library/scala/collection/mutable/ArrayStack.scala
+++ b/src/library/scala/collection/mutable/ArrayStack.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package mutable
import generic._
diff --git a/src/library/scala/collection/mutable/Builder.scala b/src/library/scala/collection/mutable/Builder.scala
index b6887df61e..6dec6b221e 100644
--- a/src/library/scala/collection/mutable/Builder.scala
+++ b/src/library/scala/collection/mutable/Builder.scala
@@ -7,7 +7,8 @@
\* */
-package scala.collection
+package scala
+package collection
package mutable
import generic._
diff --git a/src/library/scala/collection/mutable/FlatHashTable.scala b/src/library/scala/collection/mutable/FlatHashTable.scala
index f6d4cc31b6..74f576b0f7 100644
--- a/src/library/scala/collection/mutable/FlatHashTable.scala
+++ b/src/library/scala/collection/mutable/FlatHashTable.scala
@@ -44,7 +44,7 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
*/
@transient protected var sizemap: Array[Int] = null
- @transient var seedvalue: Int = tableSizeSeed
+ @transient protected var seedvalue: Int = tableSizeSeed
import HashTable.powerOfTwo
@@ -109,7 +109,7 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
}
/** Finds an entry in the hash table if such an element exists. */
- def findEntry(elem: A): Option[A] = {
+ protected def findEntry(elem: A): Option[A] = {
var h = index(elemHashCode(elem))
var entry = table(h)
while (null != entry && entry != elem) {
@@ -120,7 +120,7 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
}
/** Checks whether an element is contained in the hash table. */
- def containsEntry(elem: A): Boolean = {
+ protected def containsEntry(elem: A): Boolean = {
var h = index(elemHashCode(elem))
var entry = table(h)
while (null != entry && entry != elem) {
@@ -133,7 +133,7 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
/** Add entry if not yet in table.
* @return Returns `true` if a new entry was added, `false` otherwise.
*/
- def addEntry(elem: A) : Boolean = {
+ protected def addEntry(elem: A) : Boolean = {
var h = index(elemHashCode(elem))
var entry = table(h)
while (null != entry) {
@@ -150,7 +150,7 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
}
/** Removes an entry from the hash table, returning an option value with the element, or `None` if it didn't exist. */
- def removeEntry(elem: A) : Option[A] = {
+ protected def removeEntry(elem: A) : Option[A] = {
if (tableDebug) checkConsistent()
def precedes(i: Int, j: Int) = {
val d = table.length >> 1
@@ -185,7 +185,7 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
None
}
- def iterator: Iterator[A] = new AbstractIterator[A] {
+ protected def iterator: Iterator[A] = new AbstractIterator[A] {
private var i = 0
def hasNext: Boolean = {
while (i < table.length && (null == table(i))) i += 1
@@ -356,8 +356,8 @@ private[collection] object FlatHashTable {
*
* See SI-5293.
*/
- final def seedGenerator = new ThreadLocal[util.Random] {
- override def initialValue = new util.Random
+ final def seedGenerator = new ThreadLocal[scala.util.Random] {
+ override def initialValue = new scala.util.Random
}
/** The load factor for the hash table; must be < 500 (0.5)
@@ -365,7 +365,7 @@ private[collection] object FlatHashTable {
def defaultLoadFactor: Int = 450
final def loadFactorDenum = 1000
- def sizeForThreshold(size: Int, _loadFactor: Int) = math.max(32, (size.toLong * loadFactorDenum / _loadFactor).toInt)
+ def sizeForThreshold(size: Int, _loadFactor: Int) = scala.math.max(32, (size.toLong * loadFactorDenum / _loadFactor).toInt)
def newThreshold(_loadFactor: Int, size: Int) = {
val lf = _loadFactor
@@ -397,7 +397,7 @@ private[collection] object FlatHashTable {
//h = h + (h << 4)
//h ^ (h >>> 10)
- val improved = util.hashing.byteswap32(hcode)
+ val improved= scala.util.hashing.byteswap32(hcode)
// for the remainder, see SI-5293
// to ensure that different bits are used for different hash tables, we have to rotate based on the seed
diff --git a/src/library/scala/collection/mutable/HashMap.scala b/src/library/scala/collection/mutable/HashMap.scala
index da486f4042..be85df3c28 100644
--- a/src/library/scala/collection/mutable/HashMap.scala
+++ b/src/library/scala/collection/mutable/HashMap.scala
@@ -49,7 +49,7 @@ extends AbstractMap[A, B]
type Entry = DefaultEntry[A, B]
override def empty: HashMap[A, B] = HashMap.empty[A, B]
- override def clear() = clearTable()
+ override def clear() { clearTable() }
override def size: Int = tableSize
def this() = this(null)
@@ -57,22 +57,23 @@ extends AbstractMap[A, B]
override def par = new ParHashMap[A, B](hashTableContents)
// contains and apply overridden to avoid option allocations.
- override def contains(key: A) = findEntry(key) != null
+ override def contains(key: A): Boolean = findEntry(key) != null
+
override def apply(key: A): B = {
val result = findEntry(key)
- if (result == null) default(key)
+ if (result eq null) default(key)
else result.value
}
def get(key: A): Option[B] = {
val e = findEntry(key)
- if (e == null) None
+ if (e eq null) None
else Some(e.value)
}
override def put(key: A, value: B): Option[B] = {
- val e = findEntry(key)
- if (e == null) { addEntry(new Entry(key, value)); None }
+ val e = findOrAddEntry(key, value)
+ if (e eq null) None
else { val v = e.value; e.value = value; Some(v) }
}
@@ -85,9 +86,8 @@ extends AbstractMap[A, B]
}
def += (kv: (A, B)): this.type = {
- val e = findEntry(kv._1)
- if (e == null) addEntry(new Entry(kv._1, kv._2))
- else e.value = kv._2
+ val e = findOrAddEntry(kv._1, kv._2)
+ if (e ne null) e.value = kv._2
this
}
@@ -127,12 +127,19 @@ extends AbstractMap[A, B]
if (!isSizeMapDefined) sizeMapInitAndRebuild
} else sizeMapDisable
+ protected def createNewEntry[B1](key: A, value: B1): Entry = {
+ new Entry(key, value.asInstanceOf[B])
+ }
+
private def writeObject(out: java.io.ObjectOutputStream) {
- serializeTo(out, _.value)
+ serializeTo(out, { entry =>
+ out.writeObject(entry.key)
+ out.writeObject(entry.value)
+ })
}
private def readObject(in: java.io.ObjectInputStream) {
- init[B](in, new Entry(_, _))
+ init(in, createNewEntry(in.readObject().asInstanceOf[A], in.readObject()))
}
}
diff --git a/src/library/scala/collection/mutable/HashSet.scala b/src/library/scala/collection/mutable/HashSet.scala
index b263b46d36..a5b636c83d 100644
--- a/src/library/scala/collection/mutable/HashSet.scala
+++ b/src/library/scala/collection/mutable/HashSet.scala
@@ -53,7 +53,7 @@ extends AbstractSet[A]
override def companion: GenericCompanion[HashSet] = HashSet
- override def size = tableSize
+ override def size: Int = tableSize
def contains(elem: A): Boolean = containsEntry(elem)
@@ -67,7 +67,9 @@ extends AbstractSet[A]
override def remove(elem: A): Boolean = removeEntry(elem).isDefined
- override def clear() = clearTable()
+ override def clear() { clearTable() }
+
+ override def iterator: Iterator[A] = super[FlatHashTable].iterator
override def foreach[U](f: A => U) {
var i = 0
diff --git a/src/library/scala/collection/mutable/HashTable.scala b/src/library/scala/collection/mutable/HashTable.scala
index 67e7348672..eb6717393b 100644
--- a/src/library/scala/collection/mutable/HashTable.scala
+++ b/src/library/scala/collection/mutable/HashTable.scala
@@ -32,6 +32,9 @@ package mutable
* @tparam A type of the elements contained in this hash table.
*/
trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashUtils[A] {
+ // Replacing Entry type parameter by abstract type member here allows to not expose to public
+ // implementation-specific entry classes such as `DefaultEntry` or `LinkedEntry`.
+ // However, I'm afraid it's too late now for such breaking change.
import HashTable._
@transient protected var _loadFactor = defaultLoadFactor
@@ -52,7 +55,7 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU
*/
@transient protected var sizemap: Array[Int] = null
- @transient var seedvalue: Int = tableSizeSeed
+ @transient protected var seedvalue: Int = tableSizeSeed
protected def tableSizeSeed = Integer.bitCount(table.length - 1)
@@ -75,11 +78,10 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU
}
/**
- * Initializes the collection from the input stream. `f` will be called for each key/value pair
- * read from the input stream in the order determined by the stream. This is useful for
- * structures where iteration order is important (e.g. LinkedHashMap).
+ * Initializes the collection from the input stream. `readEntry` will be called for each
+ * entry to be read from the input stream.
*/
- private[collection] def init[B](in: java.io.ObjectInputStream, f: (A, B) => Entry) {
+ private[collection] def init(in: java.io.ObjectInputStream, readEntry: => Entry) {
in.defaultReadObject
_loadFactor = in.readInt()
@@ -100,35 +102,34 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU
var index = 0
while (index < size) {
- addEntry(f(in.readObject().asInstanceOf[A], in.readObject().asInstanceOf[B]))
+ addEntry(readEntry)
index += 1
}
}
/**
* Serializes the collection to the output stream by saving the load factor, collection
- * size, collection keys and collection values. `value` is responsible for providing a value
- * from an entry.
+ * size and collection entries. `writeEntry` is responsible for writing an entry to the stream.
*
- * `foreach` determines the order in which the key/value pairs are saved to the stream. To
+ * `foreachEntry` determines the order in which the key/value pairs are saved to the stream. To
* deserialize, `init` should be used.
*/
- private[collection] def serializeTo[B](out: java.io.ObjectOutputStream, value: Entry => B) {
+ private[collection] def serializeTo(out: java.io.ObjectOutputStream, writeEntry: Entry => Unit) {
out.defaultWriteObject
out.writeInt(_loadFactor)
out.writeInt(tableSize)
out.writeInt(seedvalue)
out.writeBoolean(isSizeMapDefined)
- foreachEntry { entry =>
- out.writeObject(entry.key)
- out.writeObject(value(entry))
- }
+
+ foreachEntry(writeEntry)
}
/** Find entry with given key in table, null if not found.
*/
- protected def findEntry(key: A): Entry = {
- val h = index(elemHashCode(key))
+ protected def findEntry(key: A): Entry =
+ findEntry0(key, index(elemHashCode(key)))
+
+ private[this] def findEntry0(key: A, h: Int): Entry = {
var e = table(h).asInstanceOf[Entry]
while (e != null && !elemEquals(e.key, key)) e = e.next
e
@@ -138,7 +139,10 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU
* pre: no entry with same key exists
*/
protected def addEntry(e: Entry) {
- val h = index(elemHashCode(e.key))
+ addEntry0(e, index(elemHashCode(e.key)))
+ }
+
+ private[this] def addEntry0(e: Entry, h: Int) {
e.next = table(h).asInstanceOf[Entry]
table(h) = e
tableSize = tableSize + 1
@@ -147,6 +151,24 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU
resize(2 * table.length)
}
+ /** Find entry with given key in table, or add new one if not found.
+ * May be somewhat faster then `findEntry`/`addEntry` pair as it
+ * computes entry's hash index only once.
+ * Returns entry found in table or null.
+ * New entries are created by calling `createNewEntry` method.
+ */
+ protected def findOrAddEntry[B](key: A, value: B): Entry = {
+ val h = index(elemHashCode(key))
+ val e = findEntry0(key, h)
+ if (e ne null) e else { addEntry0(createNewEntry(key, value), h); null }
+ }
+
+ /** Creates new entry to be immediately inserted into the hashtable.
+ * This method is guaranteed to be called only once and in case that the entry
+ * will be added. In other words, an implementation may be side-effecting.
+ */
+ protected def createNewEntry[B](key: A, value: B): Entry
+
/** Remove entry from table if present.
*/
protected def removeEntry(key: A) : Entry = {
@@ -195,7 +217,7 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU
}
/** Avoid iterator for a 2x faster traversal. */
- protected def foreachEntry[C](f: Entry => C) {
+ protected def foreachEntry[U](f: Entry => U) {
val iterTable = table
var idx = lastPopulatedIndex
var es = iterTable(idx)
@@ -401,7 +423,7 @@ private[collection] object HashTable {
*
* For performance reasons, we avoid this improvement.
* */
- val i = util.hashing.byteswap32(hcode)
+ val i= scala.util.hashing.byteswap32(hcode)
/* Jenkins hash
* for range 0-10000, output has the msb set to zero */
diff --git a/src/library/scala/collection/mutable/LinkedHashMap.scala b/src/library/scala/collection/mutable/LinkedHashMap.scala
index 5643e070f8..5028884a8e 100644
--- a/src/library/scala/collection/mutable/LinkedHashMap.scala
+++ b/src/library/scala/collection/mutable/LinkedHashMap.scala
@@ -67,23 +67,9 @@ class LinkedHashMap[A, B] extends AbstractMap[A, B]
}
override def put(key: A, value: B): Option[B] = {
- val e = findEntry(key)
- if (e == null) {
- val e = new Entry(key, value)
- addEntry(e)
- updateLinkedEntries(e)
- None
- } else {
- val v = e.value
- e.value = value
- Some(v)
- }
- }
-
- private def updateLinkedEntries(e: Entry) {
- if (firstEntry == null) firstEntry = e
- else { lastEntry.later = e; e.earlier = lastEntry }
- lastEntry = e
+ val e = findOrAddEntry(key, value)
+ if (e eq null) None
+ else { val v = e.value; e.value = value; Some(v) }
}
override def remove(key: A): Option[B] = {
@@ -143,7 +129,7 @@ class LinkedHashMap[A, B] extends AbstractMap[A, B]
else Iterator.empty.next
}
- override def foreach[U](f: ((A, B)) => U) = {
+ override def foreach[U](f: ((A, B)) => U) {
var cur = firstEntry
while (cur ne null) {
f((cur.key, cur.value))
@@ -151,7 +137,7 @@ class LinkedHashMap[A, B] extends AbstractMap[A, B]
}
}
- protected override def foreachEntry[C](f: Entry => C) {
+ protected override def foreachEntry[U](f: Entry => U) {
var cur = firstEntry
while (cur ne null) {
f(cur)
@@ -159,22 +145,29 @@ class LinkedHashMap[A, B] extends AbstractMap[A, B]
}
}
+ protected def createNewEntry[B1](key: A, value: B1): Entry = {
+ val e = new Entry(key, value.asInstanceOf[B])
+ if (firstEntry eq null) firstEntry = e
+ else { lastEntry.later = e; e.earlier = lastEntry }
+ lastEntry = e
+ e
+ }
+
override def clear() {
clearTable()
firstEntry = null
}
private def writeObject(out: java.io.ObjectOutputStream) {
- serializeTo(out, _.value)
+ serializeTo(out, { entry =>
+ out.writeObject(entry.key)
+ out.writeObject(entry.value)
+ })
}
private def readObject(in: java.io.ObjectInputStream) {
firstEntry = null
lastEntry = null
- init[B](in, { (key, value) =>
- val entry = new Entry(key, value)
- updateLinkedEntries(entry)
- entry
- })
+ init(in, createNewEntry(in.readObject().asInstanceOf[A], in.readObject()))
}
}
diff --git a/src/library/scala/collection/mutable/LinkedHashSet.scala b/src/library/scala/collection/mutable/LinkedHashSet.scala
index 3f789f9fa2..26a428c259 100644
--- a/src/library/scala/collection/mutable/LinkedHashSet.scala
+++ b/src/library/scala/collection/mutable/LinkedHashSet.scala
@@ -15,10 +15,9 @@ import generic._
/** This class implements mutable sets using a hashtable.
* The iterator and all traversal methods of this class visit elements in the order they were inserted.
*
- * $cannotStoreNull
- *
* @author Matthias Zenger
* @author Martin Odersky
+ * @author Pavel Pavlov
* @version 2.0, 31/12/2006
* @since 1
*
@@ -43,46 +42,82 @@ class LinkedHashSet[A] extends AbstractSet[A]
with Set[A]
with GenericSetTemplate[A, LinkedHashSet]
with SetLike[A, LinkedHashSet[A]]
- with FlatHashTable[A]
+ with HashTable[A, LinkedHashSet.Entry[A]]
with Serializable
{
override def companion: GenericCompanion[LinkedHashSet] = LinkedHashSet
- @transient private[this] var ordered = new ListBuffer[A]
+ type Entry = LinkedHashSet.Entry[A]
+
+ @transient protected var firstEntry: Entry = null
+ @transient protected var lastEntry: Entry = null
- override def size = tableSize
+ override def size: Int = tableSize
- def contains(elem: A): Boolean = containsEntry(elem)
+ def contains(elem: A): Boolean = findEntry(elem) ne null
def += (elem: A): this.type = { add(elem); this }
def -= (elem: A): this.type = { remove(elem); this }
- override def add(elem: A): Boolean =
- if (addEntry(elem)) { ordered += elem; true }
- else false
+ override def add(elem: A): Boolean = findOrAddEntry(elem, null) eq null
+
+ override def remove(elem: A): Boolean = {
+ val e = removeEntry(elem)
+ if (e eq null) false
+ else {
+ if (e.earlier eq null) firstEntry = e.later
+ else e.earlier.later = e.later
+ if (e.later eq null) lastEntry = e.earlier
+ else e.later.earlier = e.earlier
+ true
+ }
+ }
- override def remove(elem: A): Boolean =
- removeEntry(elem) match {
- case None => false
- case _ => ordered -= elem; true
+ def iterator: Iterator[A] = new AbstractIterator[A] {
+ private var cur = firstEntry
+ def hasNext = cur ne null
+ def next =
+ if (hasNext) { val res = cur.key; cur = cur.later; res }
+ else Iterator.empty.next
+ }
+
+ override def foreach[U](f: A => U) {
+ var cur = firstEntry
+ while (cur ne null) {
+ f(cur.key)
+ cur = cur.later
}
+ }
- override def clear() {
- ordered.clear()
- clearTable()
+ protected override def foreachEntry[U](f: Entry => U) {
+ var cur = firstEntry
+ while (cur ne null) {
+ f(cur)
+ cur = cur.later
+ }
}
- override def iterator: Iterator[A] = ordered.iterator
+ protected def createNewEntry[B](key: A, dummy: B): Entry = {
+ val e = new Entry(key)
+ if (firstEntry eq null) firstEntry = e
+ else { lastEntry.later = e; e.earlier = lastEntry }
+ lastEntry = e
+ e
+ }
- override def foreach[U](f: A => U) = ordered foreach f
+ override def clear() {
+ clearTable()
+ firstEntry = null
+ }
- private def writeObject(s: java.io.ObjectOutputStream) {
- serializeTo(s)
+ private def writeObject(out: java.io.ObjectOutputStream) {
+ serializeTo(out, { e => out.writeObject(e.key) })
}
private def readObject(in: java.io.ObjectInputStream) {
- ordered = new ListBuffer[A]
- init(in, ordered += _)
+ firstEntry = null
+ lastEntry = null
+ init(in, createNewEntry(in.readObject().asInstanceOf[A], null))
}
}
@@ -93,5 +128,13 @@ class LinkedHashSet[A] extends AbstractSet[A]
object LinkedHashSet extends MutableSetFactory[LinkedHashSet] {
implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, LinkedHashSet[A]] = setCanBuildFrom[A]
override def empty[A]: LinkedHashSet[A] = new LinkedHashSet[A]
+
+ /** Class for the linked hash set entry, used internally.
+ * @since 2.10
+ */
+ private[scala] final class Entry[A](val key: A) extends HashEntry[A, Entry[A]] with Serializable {
+ var earlier: Entry[A] = null
+ var later: Entry[A] = null
+ }
}
diff --git a/src/library/scala/collection/mutable/ListBuffer.scala b/src/library/scala/collection/mutable/ListBuffer.scala
index cd743999bc..bced92e663 100644
--- a/src/library/scala/collection/mutable/ListBuffer.scala
+++ b/src/library/scala/collection/mutable/ListBuffer.scala
@@ -56,12 +56,18 @@ final class ListBuffer[A]
import scala.collection.Traversable
import scala.collection.immutable.ListSerializeEnd
+ /** Expected invariants:
+ * If start.isEmpty, last0 == null
+ * If start.nonEmpty, last0 != null
+ * If len == 0, start.isEmpty
+ * If len > 0, start.nonEmpty
+ */
private var start: List[A] = Nil
private var last0: ::[A] = _
private var exported: Boolean = false
private var len = 0
- protected def underlying: immutable.Seq[A] = start
+ protected def underlying: List[A] = start
private def writeObject(out: ObjectOutputStream) {
// write start
@@ -133,7 +139,7 @@ final class ListBuffer[A]
if (exported) copy()
if (n == 0) {
val newElem = new :: (x, start.tail);
- if (last0 eq start) {
+ if ((last0 eq null) || (last0 eq start)) {
last0 = newElem
}
start = newElem
@@ -162,7 +168,7 @@ final class ListBuffer[A]
*/
def += (x: A): this.type = {
if (exported) copy()
- if (start.isEmpty) {
+ if (isEmpty) {
last0 = new :: (x, Nil)
start = last0
} else {
@@ -184,6 +190,7 @@ final class ListBuffer[A]
*/
def clear() {
start = Nil
+ last0 = null
exported = false
len = 0
}
@@ -197,7 +204,7 @@ final class ListBuffer[A]
def +=: (x: A): this.type = {
if (exported) copy()
val newElem = new :: (x, start)
- if (start.isEmpty) last0 = newElem
+ if (isEmpty) last0 = newElem
start = newElem
len += 1
this
@@ -219,7 +226,7 @@ final class ListBuffer[A]
if (n == 0) {
while (!elems.isEmpty) {
val newElem = new :: (elems.head, start)
- if (start.isEmpty) last0 = newElem
+ if (isEmpty) last0 = newElem
start = newElem
elems = elems.tail
}
@@ -243,6 +250,15 @@ final class ListBuffer[A]
}
}
+ /** Reduce the length of the buffer, and null out last0
+ * if this reduces the length to 0.
+ */
+ private def reduceLengthBy(num: Int) {
+ len -= num
+ if (len <= 0) // obviously shouldn't be < 0, but still better not to leak
+ last0 = null
+ }
+
/** Removes a given number of elements on a given index position. May take
* time linear in the buffer size.
*
@@ -274,7 +290,7 @@ final class ListBuffer[A]
c -= 1
}
}
- len -= count1
+ reduceLengthBy(count1)
}
// Implementation of abstract method in Builder
@@ -285,7 +301,7 @@ final class ListBuffer[A]
* copied lazily, the first time it is mutated.
*/
override def toList: List[A] = {
- exported = !start.isEmpty
+ exported = !isEmpty
start
}
@@ -296,7 +312,7 @@ final class ListBuffer[A]
* @param xs the list to which elements are prepended
*/
def prependToList(xs: List[A]): List[A] = {
- if (start.isEmpty) xs
+ if (isEmpty) xs
else {
if (exported) copy()
last0.tl = xs
@@ -331,7 +347,7 @@ final class ListBuffer[A]
if (last0 eq cursor.tail) last0 = cursor.asInstanceOf[::[A]]
cursor.asInstanceOf[::[A]].tl = cursor.tail.tail
}
- len -= 1
+ reduceLengthBy(1)
old
}
@@ -343,11 +359,12 @@ final class ListBuffer[A]
*/
override def -= (elem: A): this.type = {
if (exported) copy()
- if (start.isEmpty) {}
+ if (isEmpty) {}
else if (start.head == elem) {
start = start.tail
- len -= 1
- } else {
+ reduceLengthBy(1)
+ }
+ else {
var cursor = start
while (!cursor.tail.isEmpty && cursor.tail.head != elem) {
cursor = cursor.tail
@@ -357,7 +374,7 @@ final class ListBuffer[A]
if (z.tl == last0)
last0 = z
z.tl = cursor.tail.tail
- len -= 1
+ reduceLengthBy(1)
}
}
this
@@ -397,6 +414,7 @@ final class ListBuffer[A]
/** Copy contents of this buffer */
private def copy() {
+ if (isEmpty) return
var cursor = start
val limit = last0.tail
clear()
diff --git a/src/library/scala/collection/mutable/OpenHashMap.scala b/src/library/scala/collection/mutable/OpenHashMap.scala
index c33a7a906e..11055f8986 100644
--- a/src/library/scala/collection/mutable/OpenHashMap.scala
+++ b/src/library/scala/collection/mutable/OpenHashMap.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package mutable
/**
diff --git a/src/library/scala/collection/mutable/PriorityQueue.scala b/src/library/scala/collection/mutable/PriorityQueue.scala
index e37cbdc712..1fc3928531 100644
--- a/src/library/scala/collection/mutable/PriorityQueue.scala
+++ b/src/library/scala/collection/mutable/PriorityQueue.scala
@@ -43,12 +43,12 @@ class PriorityQueue[A](implicit val ord: Ordering[A])
{
import ord._
- private final class ResizableArrayAccess[A] extends AbstractSeq[A] with ResizableArray[A] {
- @inline def p_size0 = size0
- @inline def p_size0_=(s: Int) = size0 = s
- @inline def p_array = array
- @inline def p_ensureSize(n: Int) = super.ensureSize(n)
- @inline def p_swap(a: Int, b: Int) = super.swap(a, b)
+ private class ResizableArrayAccess[A] extends AbstractSeq[A] with ResizableArray[A] {
+ def p_size0 = size0
+ def p_size0_=(s: Int) = size0 = s
+ def p_array = array
+ def p_ensureSize(n: Int) = super.ensureSize(n)
+ def p_swap(a: Int, b: Int) = super.swap(a, b)
}
protected[this] override def newBuilder = new PriorityQueue[A]
@@ -166,7 +166,7 @@ class PriorityQueue[A](implicit val ord: Ordering[A])
* Note: The order of elements returned is undefined.
* If you want to traverse the elements in priority queue
* order, use `clone().dequeueAll.iterator`.
- *
+ *
* @return an iterator over all the elements.
*/
override def iterator: Iterator[A] = new AbstractIterator[A] {
@@ -193,7 +193,7 @@ class PriorityQueue[A](implicit val ord: Ordering[A])
* @return A reversed priority queue.
*/
def reverse = {
- val revq = new PriorityQueue[A]()(new math.Ordering[A] {
+ val revq = new PriorityQueue[A]()(new scala.math.Ordering[A] {
def compare(x: A, y: A) = ord.compare(y, x)
})
for (i <- 1 until resarr.length) revq += resarr(i)
@@ -204,7 +204,7 @@ class PriorityQueue[A](implicit val ord: Ordering[A])
* than that returned by the method `iterator`.
*
* Note: The order of elements returned is undefined.
- *
+ *
* @return an iterator over all elements sorted in descending order.
*/
def reverseIterator: Iterator[A] = new AbstractIterator[A] {
@@ -236,11 +236,11 @@ class PriorityQueue[A](implicit val ord: Ordering[A])
* @return the string representation of this queue.
*/
override def toString() = toList.mkString("PriorityQueue(", ", ", ")")
-
+
/** Converts this $coll to a list.
*
* Note: the order of elements is undefined.
- *
+ *
* @return a list containing all elements of this $coll.
*/
override def toList = this.iterator.toList
diff --git a/src/library/scala/collection/mutable/ResizableArray.scala b/src/library/scala/collection/mutable/ResizableArray.scala
index 54bf93252f..d29ee67580 100644
--- a/src/library/scala/collection/mutable/ResizableArray.scala
+++ b/src/library/scala/collection/mutable/ResizableArray.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package mutable
import generic._
diff --git a/src/library/scala/collection/mutable/Subscriber.scala b/src/library/scala/collection/mutable/Subscriber.scala
index c9ae87d558..83192124af 100644
--- a/src/library/scala/collection/mutable/Subscriber.scala
+++ b/src/library/scala/collection/mutable/Subscriber.scala
@@ -11,7 +11,7 @@ package mutable
/** `Subscriber[A, B]` objects may subscribe to events of type `A`
* published by an object of type `B`. `B` is typically a subtype of
- * [[scala.collection.immutable.Publisher]].
+ * [[scala.collection.mutable.Publisher]].
*
* @author Matthias Zenger
* @author Martin Odersky
diff --git a/src/library/scala/collection/parallel/ParIterableLike.scala b/src/library/scala/collection/parallel/ParIterableLike.scala
index 4e0818ff45..0c0ff2b027 100644
--- a/src/library/scala/collection/parallel/ParIterableLike.scala
+++ b/src/library/scala/collection/parallel/ParIterableLike.scala
@@ -433,12 +433,13 @@ self: ParIterableLike[T, Repr, Sequential] =>
* @tparam S the type of accumulated results
* @param z the initial value for the accumulated result of the partition - this
* will typically be the neutral element for the `seqop` operator (e.g.
- * `Nil` for list concatenation or `0` for summation)
+ * `Nil` for list concatenation or `0` for summation) and may be evaluated
+ * more than once
* @param seqop an operator used to accumulate results within a partition
* @param combop an associative operator used to combine results from different partitions
*/
- def aggregate[S](z: S)(seqop: (S, T) => S, combop: (S, S) => S): S = {
- tasksupport.executeAndWaitResult(new Aggregate(z, seqop, combop, splitter))
+ def aggregate[S](z: =>S)(seqop: (S, T) => S, combop: (S, S) => S): S = {
+ tasksupport.executeAndWaitResult(new Aggregate(() => z, seqop, combop, splitter))
}
def foldLeft[S](z: S)(op: (S, T) => S): S = seq.foldLeft(z)(op)
@@ -1006,10 +1007,10 @@ self: ParIterableLike[T, Repr, Sequential] =>
override def merge(that: Fold[U]) = result = op(result, that.result)
}
- protected[this] class Aggregate[S](z: S, seqop: (S, T) => S, combop: (S, S) => S, protected[this] val pit: IterableSplitter[T])
+ protected[this] class Aggregate[S](z: () => S, seqop: (S, T) => S, combop: (S, S) => S, protected[this] val pit: IterableSplitter[T])
extends Accessor[S, Aggregate[S]] {
@volatile var result: S = null.asInstanceOf[S]
- def leaf(prevr: Option[S]) = result = pit.foldLeft(z)(seqop)
+ def leaf(prevr: Option[S]) = result = pit.foldLeft(z())(seqop)
protected[this] def newSubtask(p: IterableSplitter[T]) = new Aggregate(z, seqop, combop, p)
override def merge(that: Aggregate[S]) = result = combop(result, that.result)
}
@@ -1368,7 +1369,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
val until = from + len
val blocksize = scanBlockSize
while (i < until) {
- trees += scanBlock(i, math.min(blocksize, pit.remaining))
+ trees += scanBlock(i, scala.math.min(blocksize, pit.remaining))
i += blocksize
}
diff --git a/src/library/scala/collection/parallel/ParMap.scala b/src/library/scala/collection/parallel/ParMap.scala
index 58197ab2c6..2bc5e783e6 100644
--- a/src/library/scala/collection/parallel/ParMap.scala
+++ b/src/library/scala/collection/parallel/ParMap.scala
@@ -6,13 +6,8 @@
** |/ **
\* */
-
package scala.collection.parallel
-
-
-
-
import scala.collection.Map
import scala.collection.GenMap
import scala.collection.mutable.Builder
@@ -21,10 +16,6 @@ import scala.collection.generic.GenericParMapTemplate
import scala.collection.generic.GenericParMapCompanion
import scala.collection.generic.CanCombineFrom
-
-
-
-
/** A template trait for parallel maps.
*
* $sideeffects
@@ -75,31 +66,3 @@ object ParMap extends ParMapFactory[ParMap] {
override def default(key: A): B = d(key)
}
}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/RemainsIterator.scala b/src/library/scala/collection/parallel/RemainsIterator.scala
index a67a4d8eb7..9bf287cc39 100644
--- a/src/library/scala/collection/parallel/RemainsIterator.scala
+++ b/src/library/scala/collection/parallel/RemainsIterator.scala
@@ -190,7 +190,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[
def slice2combiner[U >: T, This](from: Int, until: Int, cb: Combiner[U, This]): Combiner[U, This] = {
drop(from)
- var left = math.max(until - from, 0)
+ var left = scala.math.max(until - from, 0)
cb.sizeHint(left)
while (left > 0) {
cb += next
diff --git a/src/library/scala/collection/parallel/Tasks.scala b/src/library/scala/collection/parallel/Tasks.scala
index 67552e1c89..2556cd3f68 100644
--- a/src/library/scala/collection/parallel/Tasks.scala
+++ b/src/library/scala/collection/parallel/Tasks.scala
@@ -98,7 +98,7 @@ trait Task[R, +Tp] {
*/
trait Tasks {
- private[parallel] val debugMessages = collection.mutable.ArrayBuffer[String]()
+ private[parallel] val debugMessages = scala.collection.mutable.ArrayBuffer[String]()
private[parallel] def debuglog(s: String) = synchronized {
debugMessages += s
diff --git a/src/library/scala/collection/parallel/immutable/ParHashMap.scala b/src/library/scala/collection/parallel/immutable/ParHashMap.scala
index 7f5255f5a3..187e4aaf92 100644
--- a/src/library/scala/collection/parallel/immutable/ParHashMap.scala
+++ b/src/library/scala/collection/parallel/immutable/ParHashMap.scala
@@ -264,7 +264,7 @@ extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], (K, V
val fp = howmany / 2
List(new CreateTrie(bucks, root, offset, fp), new CreateTrie(bucks, root, offset + fp, howmany - fp))
}
- def shouldSplitFurther = howmany > collection.parallel.thresholdFromSize(root.length, combinerTaskSupport.parallelismLevel)
+ def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(root.length, combinerTaskSupport.parallelismLevel)
}
class CreateGroupedTrie[Repr](cbf: () => Combiner[V, Repr], bucks: Array[Unrolled[(K, V)]], root: Array[HashMap[K, AnyRef]], offset: Int, howmany: Int)
@@ -328,7 +328,7 @@ extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], (K, V
val fp = howmany / 2
List(new CreateGroupedTrie(cbf, bucks, root, offset, fp), new CreateGroupedTrie(cbf, bucks, root, offset + fp, howmany - fp))
}
- def shouldSplitFurther = howmany > collection.parallel.thresholdFromSize(root.length, combinerTaskSupport.parallelismLevel)
+ def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(root.length, combinerTaskSupport.parallelismLevel)
}
}
diff --git a/src/library/scala/collection/parallel/immutable/ParHashSet.scala b/src/library/scala/collection/parallel/immutable/ParHashSet.scala
index 42d00623ab..85e2138c56 100644
--- a/src/library/scala/collection/parallel/immutable/ParHashSet.scala
+++ b/src/library/scala/collection/parallel/immutable/ParHashSet.scala
@@ -209,7 +209,7 @@ extends scala.collection.parallel.BucketCombiner[T, ParHashSet[T], Any, HashSetC
val fp = howmany / 2
List(new CreateTrie(bucks, root, offset, fp), new CreateTrie(bucks, root, offset + fp, howmany - fp))
}
- def shouldSplitFurther = howmany > collection.parallel.thresholdFromSize(root.length, combinerTaskSupport.parallelismLevel)
+ def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(root.length, combinerTaskSupport.parallelismLevel)
}
}
diff --git a/src/library/scala/collection/parallel/immutable/ParIterable.scala b/src/library/scala/collection/parallel/immutable/ParIterable.scala
index 4b0773ce7b..5854844a8f 100644
--- a/src/library/scala/collection/parallel/immutable/ParIterable.scala
+++ b/src/library/scala/collection/parallel/immutable/ParIterable.scala
@@ -30,10 +30,10 @@ import scala.collection.GenIterable
* @since 2.9
*/
trait ParIterable[+T]
-extends collection/*.immutable*/.GenIterable[T]
+extends scala.collection/*.immutable*/.GenIterable[T]
with scala.collection.parallel.ParIterable[T]
with GenericParTemplate[T, ParIterable]
- with ParIterableLike[T, ParIterable[T], collection.immutable.Iterable[T]]
+ with ParIterableLike[T, ParIterable[T], scala.collection.immutable.Iterable[T]]
with Immutable
{
override def companion: GenericCompanion[ParIterable] with GenericParCompanion[ParIterable] = ParIterable
diff --git a/src/library/scala/collection/parallel/immutable/ParMap.scala b/src/library/scala/collection/parallel/immutable/ParMap.scala
index 74b2672e67..585e6bf541 100644
--- a/src/library/scala/collection/parallel/immutable/ParMap.scala
+++ b/src/library/scala/collection/parallel/immutable/ParMap.scala
@@ -28,11 +28,11 @@ import scala.collection.GenMapLike
* @since 2.9
*/
trait ParMap[K, +V]
-extends collection/*.immutable*/.GenMap[K, V]
+extends scala.collection/*.immutable*/.GenMap[K, V]
with GenericParMapTemplate[K, V, ParMap]
with parallel.ParMap[K, V]
with ParIterable[(K, V)]
- with ParMapLike[K, V, ParMap[K, V], collection.immutable.Map[K, V]]
+ with ParMapLike[K, V, ParMap[K, V], scala.collection.immutable.Map[K, V]]
{
self =>
diff --git a/src/library/scala/collection/parallel/immutable/ParSeq.scala b/src/library/scala/collection/parallel/immutable/ParSeq.scala
index 300efe9a58..265121286d 100644
--- a/src/library/scala/collection/parallel/immutable/ParSeq.scala
+++ b/src/library/scala/collection/parallel/immutable/ParSeq.scala
@@ -28,11 +28,11 @@ import scala.collection.GenSeq
* @define coll mutable parallel sequence
*/
trait ParSeq[+T]
-extends collection/*.immutable*/.GenSeq[T]
+extends scala.collection/*.immutable*/.GenSeq[T]
with scala.collection.parallel.ParSeq[T]
with ParIterable[T]
with GenericParTemplate[T, ParSeq]
- with ParSeqLike[T, ParSeq[T], collection.immutable.Seq[T]]
+ with ParSeqLike[T, ParSeq[T], scala.collection.immutable.Seq[T]]
{
override def companion: GenericCompanion[ParSeq] with GenericParCompanion[ParSeq] = ParSeq
override def toSeq: ParSeq[T] = this
diff --git a/src/library/scala/collection/parallel/immutable/ParSet.scala b/src/library/scala/collection/parallel/immutable/ParSet.scala
index 40429280ac..c8da509ef5 100644
--- a/src/library/scala/collection/parallel/immutable/ParSet.scala
+++ b/src/library/scala/collection/parallel/immutable/ParSet.scala
@@ -20,11 +20,11 @@ import scala.collection.parallel.Combiner
* @define coll mutable parallel set
*/
trait ParSet[T]
-extends collection/*.immutable*/.GenSet[T]
+extends scala.collection/*.immutable*/.GenSet[T]
with GenericParTemplate[T, ParSet]
with parallel.ParSet[T]
with ParIterable[T]
- with ParSetLike[T, ParSet[T], collection.immutable.Set[T]]
+ with ParSetLike[T, ParSet[T], scala.collection.immutable.Set[T]]
{
self =>
override def empty: ParSet[T] = ParHashSet[T]()
diff --git a/src/library/scala/collection/parallel/mutable/ParArray.scala b/src/library/scala/collection/parallel/mutable/ParArray.scala
index 6889d8b472..deff9eda3b 100644
--- a/src/library/scala/collection/parallel/mutable/ParArray.scala
+++ b/src/library/scala/collection/parallel/mutable/ParArray.scala
@@ -7,7 +7,8 @@
\* */
-package scala.collection.parallel.mutable
+package scala
+package collection.parallel.mutable
@@ -180,7 +181,7 @@ self =>
override def fold[U >: T](z: U)(op: (U, U) => U): U = foldLeft[U](z)(op)
- override def aggregate[S](z: S)(seqop: (S, T) => S, combop: (S, S) => S): S = foldLeft[S](z)(seqop)
+ override def aggregate[S](z: =>S)(seqop: (S, T) => S, combop: (S, S) => S): S = foldLeft[S](z)(seqop)
override def sum[U >: T](implicit num: Numeric[U]): U = {
var s = sum_quick(num, arr, until, i, num.zero)
@@ -665,7 +666,7 @@ self =>
val fp = howmany / 2
List(new Map(f, targetarr, offset, fp), new Map(f, targetarr, offset + fp, howmany - fp))
}
- def shouldSplitFurther = howmany > collection.parallel.thresholdFromSize(length, tasksupport.parallelismLevel)
+ def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(length, tasksupport.parallelismLevel)
}
/* serialization */
diff --git a/src/library/scala/collection/parallel/mutable/ParHashMap.scala b/src/library/scala/collection/parallel/mutable/ParHashMap.scala
index 33a39e6038..fad7ddad59 100644
--- a/src/library/scala/collection/parallel/mutable/ParHashMap.scala
+++ b/src/library/scala/collection/parallel/mutable/ParHashMap.scala
@@ -40,14 +40,14 @@ import scala.collection.parallel.Task
class ParHashMap[K, V] private[collection] (contents: HashTable.Contents[K, DefaultEntry[K, V]])
extends ParMap[K, V]
with GenericParMapTemplate[K, V, ParHashMap]
- with ParMapLike[K, V, ParHashMap[K, V], collection.mutable.HashMap[K, V]]
+ with ParMapLike[K, V, ParHashMap[K, V], scala.collection.mutable.HashMap[K, V]]
with ParHashTable[K, DefaultEntry[K, V]]
with Serializable
{
self =>
initWithContents(contents)
- type Entry = collection.mutable.DefaultEntry[K, V]
+ type Entry = scala.collection.mutable.DefaultEntry[K, V]
def this() = this(null)
@@ -57,7 +57,7 @@ self =>
protected[this] override def newCombiner = ParHashMapCombiner[K, V]
- override def seq = new collection.mutable.HashMap[K, V](hashTableContents)
+ override def seq = new scala.collection.mutable.HashMap[K, V](hashTableContents)
def splitter = new ParHashMapIterator(1, table.length, size, table(0).asInstanceOf[DefaultEntry[K, V]])
@@ -67,13 +67,13 @@ self =>
def get(key: K): Option[V] = {
val e = findEntry(key)
- if (e == null) None
+ if (e eq null) None
else Some(e.value)
}
def put(key: K, value: V): Option[V] = {
- val e = findEntry(key)
- if (e == null) { addEntry(new Entry(key, value)); None }
+ val e = findOrAddEntry(key, value)
+ if (e eq null) None
else { val v = e.value; e.value = value; Some(v) }
}
@@ -86,9 +86,8 @@ self =>
}
def += (kv: (K, V)): this.type = {
- val e = findEntry(kv._1)
- if (e == null) addEntry(new Entry(kv._1, kv._2))
- else e.value = kv._2
+ val e = findOrAddEntry(kv._1, kv._2)
+ if (e ne null) e.value = kv._2
this
}
@@ -103,12 +102,19 @@ self =>
new ParHashMapIterator(idxFrom, idxUntil, totalSz, es)
}
+ protected def createNewEntry[V1](key: K, value: V1): Entry = {
+ new Entry(key, value.asInstanceOf[V])
+ }
+
private def writeObject(out: java.io.ObjectOutputStream) {
- serializeTo(out, _.value)
+ serializeTo(out, { entry =>
+ out.writeObject(entry.key)
+ out.writeObject(entry.value)
+ })
}
private def readObject(in: java.io.ObjectInputStream) {
- init[V](in, new Entry(_, _))
+ init(in, createNewEntry(in.readObject().asInstanceOf[K], in.readObject()))
}
private[parallel] override def brokenInvariants = {
@@ -190,7 +196,9 @@ extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], Defau
// construct a normal table and fill it sequentially
// TODO parallelize by keeping separate sizemaps and merging them
object table extends HashTable[K, DefaultEntry[K, V]] {
- def insertEntry(e: DefaultEntry[K, V]) = if (super.findEntry(e.key) eq null) super.addEntry(e)
+ type Entry = DefaultEntry[K, V]
+ def insertEntry(e: Entry) { super.findOrAddEntry(e.key, e) }
+ def createNewEntry[E](key: K, entry: E): Entry = entry.asInstanceOf[Entry]
sizeMapInit(table.length)
}
var i = 0
@@ -251,6 +259,7 @@ extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], Defau
assert(h >= block * blocksize && h < (block + 1) * blocksize)
}
}
+ protected def createNewEntry[X](key: K, x: X) = ???
}
/* tasks */
@@ -302,7 +311,7 @@ extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], Defau
override def merge(that: FillBlocks) {
this.result += that.result
}
- def shouldSplitFurther = howmany > collection.parallel.thresholdFromSize(ParHashMapCombiner.numblocks, combinerTaskSupport.parallelismLevel)
+ def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(ParHashMapCombiner.numblocks, combinerTaskSupport.parallelismLevel)
}
}
diff --git a/src/library/scala/collection/parallel/mutable/ParHashSet.scala b/src/library/scala/collection/parallel/mutable/ParHashSet.scala
index 870cae75de..aef9f6856b 100644
--- a/src/library/scala/collection/parallel/mutable/ParHashSet.scala
+++ b/src/library/scala/collection/parallel/mutable/ParHashSet.scala
@@ -36,7 +36,7 @@ import scala.collection.parallel.Task
class ParHashSet[T] private[collection] (contents: FlatHashTable.Contents[T])
extends ParSet[T]
with GenericParTemplate[T, ParHashSet]
- with ParSetLike[T, ParHashSet[T], collection.mutable.HashSet[T]]
+ with ParSetLike[T, ParHashSet[T], scala.collection.mutable.HashSet[T]]
with ParFlatHashTable[T]
with Serializable
{
@@ -57,7 +57,7 @@ extends ParSet[T]
def clear() = clearTable()
- override def seq = new collection.mutable.HashSet(hashTableContents)
+ override def seq = new scala.collection.mutable.HashSet(hashTableContents)
def +=(elem: T) = {
addEntry(elem)
@@ -158,12 +158,12 @@ with scala.collection.mutable.FlatHashTable.HashUtils[T] {
val tbl = new FlatHashTable[T] {
sizeMapInit(table.length)
seedvalue = ParHashSetCombiner.this.seedvalue
+ for {
+ buffer <- buckets;
+ if buffer ne null;
+ elem <- buffer
+ } addEntry(elem.asInstanceOf[T])
}
- for {
- buffer <- buckets;
- if buffer ne null;
- elem <- buffer
- } tbl.addEntry(elem.asInstanceOf[T])
tbl.hashTableContents
}
@@ -310,7 +310,7 @@ with scala.collection.mutable.FlatHashTable.HashUtils[T] {
// the total number of successfully inserted elements is adjusted accordingly
result = (this.result._1 + that.result._1 + inserted, remainingLeftovers concat that.result._2)
}
- def shouldSplitFurther = howmany > collection.parallel.thresholdFromSize(ParHashMapCombiner.numblocks, combinerTaskSupport.parallelismLevel)
+ def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(ParHashMapCombiner.numblocks, combinerTaskSupport.parallelismLevel)
}
}
diff --git a/src/library/scala/collection/parallel/mutable/ParHashTable.scala b/src/library/scala/collection/parallel/mutable/ParHashTable.scala
index 7cf464c287..bb9a7b7823 100644
--- a/src/library/scala/collection/parallel/mutable/ParHashTable.scala
+++ b/src/library/scala/collection/parallel/mutable/ParHashTable.scala
@@ -104,7 +104,7 @@ trait ParHashTable[K, Entry >: Null <: HashEntry[K, Entry]] extends scala.collec
// otherwise, this is the last entry in the table - all what remains is the chain
// so split the rest of the chain
val arr = convertToArrayBuffer(es)
- val arrpit = new collection.parallel.BufferSplitter[T](arr, 0, arr.length, signalDelegate)
+ val arrpit = new scala.collection.parallel.BufferSplitter[T](arr, 0, arr.length, signalDelegate)
arrpit.split
}
} else Seq(this.asInstanceOf[IterRepr])
diff --git a/src/library/scala/collection/parallel/mutable/ParIterable.scala b/src/library/scala/collection/parallel/mutable/ParIterable.scala
index a2847c3beb..9281e84c03 100644
--- a/src/library/scala/collection/parallel/mutable/ParIterable.scala
+++ b/src/library/scala/collection/parallel/mutable/ParIterable.scala
@@ -26,7 +26,7 @@ import scala.collection.GenIterable
* @author Aleksandar Prokopec
* @since 2.9
*/
-trait ParIterable[T] extends collection/*.mutable*/.GenIterable[T]
+trait ParIterable[T] extends scala.collection/*.mutable*/.GenIterable[T]
with scala.collection.parallel.ParIterable[T]
with GenericParTemplate[T, ParIterable]
with ParIterableLike[T, ParIterable[T], Iterable[T]]
diff --git a/src/library/scala/collection/parallel/mutable/ParMap.scala b/src/library/scala/collection/parallel/mutable/ParMap.scala
index 9ad14f15f8..34b3d465d2 100644
--- a/src/library/scala/collection/parallel/mutable/ParMap.scala
+++ b/src/library/scala/collection/parallel/mutable/ParMap.scala
@@ -28,11 +28,11 @@ import scala.collection.parallel.Combiner
* @since 2.9
*/
trait ParMap[K, V]
-extends collection/*.mutable*/.GenMap[K, V]
+extends scala.collection/*.mutable*/.GenMap[K, V]
with scala.collection.parallel.ParMap[K, V]
with /* mutable */ ParIterable[(K, V)]
with GenericParMapTemplate[K, V, ParMap]
- with /* mutable */ ParMapLike[K, V, ParMap[K, V], collection.mutable.Map[K, V]]
+ with /* mutable */ ParMapLike[K, V, ParMap[K, V], scala.collection.mutable.Map[K, V]]
{
protected[this] override def newCombiner: Combiner[(K, V), ParMap[K, V]] = ParMap.newCombiner[K, V]
diff --git a/src/library/scala/collection/parallel/mutable/ParSeq.scala b/src/library/scala/collection/parallel/mutable/ParSeq.scala
index 15f8d1d0b5..7322d5236f 100644
--- a/src/library/scala/collection/parallel/mutable/ParSeq.scala
+++ b/src/library/scala/collection/parallel/mutable/ParSeq.scala
@@ -29,11 +29,11 @@ import scala.collection.GenSeq
* @define Coll `mutable.ParSeq`
* @define coll mutable parallel sequence
*/
-trait ParSeq[T] extends collection/*.mutable*/.GenSeq[T] // was: scala.collection.mutable.Seq[T]
+trait ParSeq[T] extends scala.collection/*.mutable*/.GenSeq[T] // was: scala.collection.mutable.Seq[T]
with ParIterable[T]
with scala.collection.parallel.ParSeq[T]
with GenericParTemplate[T, ParSeq]
- with ParSeqLike[T, ParSeq[T], collection.mutable.Seq[T]] {
+ with ParSeqLike[T, ParSeq[T], scala.collection.mutable.Seq[T]] {
self =>
override def companion: GenericCompanion[ParSeq] with GenericParCompanion[ParSeq] = ParSeq
//protected[this] override def newBuilder = ParSeq.newBuilder[T]
diff --git a/src/library/scala/collection/parallel/mutable/ParSet.scala b/src/library/scala/collection/parallel/mutable/ParSet.scala
index 689ce3436f..540ecb8022 100644
--- a/src/library/scala/collection/parallel/mutable/ParSet.scala
+++ b/src/library/scala/collection/parallel/mutable/ParSet.scala
@@ -27,11 +27,11 @@ import scala.collection.GenSet
* @author Aleksandar Prokopec
*/
trait ParSet[T]
-extends collection/*.mutable*/.GenSet[T]
+extends scala.collection/*.mutable*/.GenSet[T]
with ParIterable[T]
with scala.collection.parallel.ParSet[T]
with GenericParTemplate[T, ParSet]
- with ParSetLike[T, ParSet[T], collection.mutable.Set[T]]
+ with ParSetLike[T, ParSet[T], scala.collection.mutable.Set[T]]
{
self =>
override def companion: GenericCompanion[ParSet] with GenericParCompanion[ParSet] = ParSet
diff --git a/src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala b/src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala
index 01eb17024e..68f37137f8 100644
--- a/src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala
+++ b/src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala
@@ -81,7 +81,7 @@ trait ResizableParArrayCombiner[T] extends LazyCombiner[T, ParArray[T], ExposedA
val fp = howmany / 2
List(new CopyChainToArray(array, offset, fp), new CopyChainToArray(array, offset + fp, howmany - fp))
}
- def shouldSplitFurther = howmany > collection.parallel.thresholdFromSize(size, combinerTaskSupport.parallelismLevel)
+ def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(size, combinerTaskSupport.parallelismLevel)
}
}
diff --git a/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala b/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala
index 9648791502..5600d0f68c 100644
--- a/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala
+++ b/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala
@@ -8,10 +8,6 @@
package scala.collection.parallel.mutable
-
-
-
-
import scala.collection.generic.Sizing
import scala.collection.mutable.ArraySeq
import scala.collection.mutable.ArrayBuffer
@@ -23,16 +19,12 @@ import scala.collection.parallel.Combiner
import scala.collection.parallel.Task
import scala.reflect.ClassTag
-
-
-
private[mutable] class DoublingUnrolledBuffer[T](implicit t: ClassTag[T]) extends UnrolledBuffer[T]()(t) {
override def calcNextLength(sz: Int) = if (sz < 10000) sz * 2 else sz
protected override def newUnrolled = new Unrolled[T](0, new Array[T](4), null, this)
}
-
/** An array combiner that uses doubling unrolled buffers to store elements. */
trait UnrolledParArrayCombiner[T]
extends Combiner[T, ParArray[T]] {
@@ -85,7 +77,7 @@ extends Combiner[T, ParArray[T]] {
var pos = startpos
var arroffset = offset
while (totalleft > 0) {
- val lefthere = math.min(totalleft, curr.size - pos)
+ val lefthere = scala.math.min(totalleft, curr.size - pos)
Array.copy(curr.array, pos, array, arroffset, lefthere)
// println("from: " + arroffset + " elems " + lefthere + " - " + pos + ", " + curr + " -> " + array.toList + " by " + this + " !! " + buff.headPtr)
totalleft -= lefthere
@@ -107,13 +99,11 @@ extends Combiner[T, ParArray[T]] {
val fp = howmany / 2
List(new CopyUnrolledToArray(array, offset, fp), new CopyUnrolledToArray(array, offset + fp, howmany - fp))
}
- def shouldSplitFurther = howmany > collection.parallel.thresholdFromSize(size, combinerTaskSupport.parallelismLevel)
+ def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(size, combinerTaskSupport.parallelismLevel)
override def toString = "CopyUnrolledToArray(" + offset + ", " + howmany + ")"
}
}
-
-
object UnrolledParArrayCombiner {
def apply[T](): UnrolledParArrayCombiner[T] = new UnrolledParArrayCombiner[T] {} // was: with EnvironmentPassingCombiner[T, ParArray[T]]
}
diff --git a/src/library/scala/collection/parallel/package.scala b/src/library/scala/collection/parallel/package.scala
index 30b4c0c914..a95090c15b 100644
--- a/src/library/scala/collection/parallel/package.scala
+++ b/src/library/scala/collection/parallel/package.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
import scala.collection.generic.CanBuildFrom
import scala.collection.generic.CanCombineFrom
@@ -41,8 +42,8 @@ package object parallel {
private[parallel] def outofbounds(idx: Int) = throw new IndexOutOfBoundsException(idx.toString)
private[parallel] def getTaskSupport: TaskSupport =
- if (util.Properties.isJavaAtLeast("1.6")) {
- val vendor = util.Properties.javaVmVendor
+ if (scala.util.Properties.isJavaAtLeast("1.6")) {
+ val vendor = scala.util.Properties.javaVmVendor
if ((vendor contains "Oracle") || (vendor contains "Sun") || (vendor contains "Apple")) new ForkJoinTaskSupport
else new ThreadPoolTaskSupport
} else new ThreadPoolTaskSupport
diff --git a/src/library/scala/compat/Platform.scala b/src/library/scala/compat/Platform.scala
index f18ce12e6c..77c12a8e58 100644
--- a/src/library/scala/compat/Platform.scala
+++ b/src/library/scala/compat/Platform.scala
@@ -109,7 +109,7 @@ object Platform {
* `System.getProperty("line.separator")`
* with a default value of "\n".
*/
- val EOL = util.Properties.lineSeparator
+ val EOL = scala.util.Properties.lineSeparator
/** The current time in milliseconds. The time is counted since 1 January 1970
* UTC.
diff --git a/src/library/scala/concurrent/Awaitable.scala b/src/library/scala/concurrent/Awaitable.scala
index 655115349a..3bd7617bce 100644
--- a/src/library/scala/concurrent/Awaitable.scala
+++ b/src/library/scala/concurrent/Awaitable.scala
@@ -10,7 +10,7 @@ package scala.concurrent
-import scala.concurrent.util.Duration
+import scala.concurrent.duration.Duration
diff --git a/src/library/scala/concurrent/duration/Deadline.scala b/src/library/scala/concurrent/duration/Deadline.scala
new file mode 100644
index 0000000000..50e9a75ff7
--- /dev/null
+++ b/src/library/scala/concurrent/duration/Deadline.scala
@@ -0,0 +1,81 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2012, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.concurrent.duration
+
+/**
+ * This class stores a deadline, as obtained via `Deadline.now` or the
+ * duration DSL:
+ *
+ * {{{
+ * import scala.concurrent.duration._
+ * 3.seconds.fromNow
+ * }}}
+ *
+ * Its main purpose is to manage repeated attempts to achieve something (like
+ * awaiting a condition) by offering the methods `hasTimeLeft` and `timeLeft`. All
+ * durations are measured according to `System.nanoTime` aka wall-time; this
+ * does not take into account changes to the system clock (such as leap
+ * seconds).
+ */
+case class Deadline private (time: FiniteDuration) extends Ordered[Deadline] {
+ /**
+ * Return a deadline advanced (i.e. moved into the future) by the given duration.
+ */
+ def +(other: FiniteDuration): Deadline = copy(time = time + other)
+ /**
+ * Return a deadline moved backwards (i.e. towards the past) by the given duration.
+ */
+ def -(other: FiniteDuration): Deadline = copy(time = time - other)
+ /**
+ * Calculate time difference between this and the other deadline, where the result is directed (i.e. may be negative).
+ */
+ def -(other: Deadline): FiniteDuration = time - other.time
+ /**
+ * Calculate time difference between this duration and now; the result is negative if the deadline has passed.
+ *
+ * '''''Note that on some systems this operation is costly because it entails a system call.'''''
+ * Check `System.nanoTime` for your platform.
+ */
+ def timeLeft: FiniteDuration = this - Deadline.now
+ /**
+ * Determine whether the deadline still lies in the future at the point where this method is called.
+ *
+ * '''''Note that on some systems this operation is costly because it entails a system call.'''''
+ * Check `System.nanoTime` for your platform.
+ */
+ def hasTimeLeft(): Boolean = !isOverdue()
+ /**
+ * Determine whether the deadline lies in the past at the point where this method is called.
+ *
+ * '''''Note that on some systems this operation is costly because it entails a system call.'''''
+ * Check `System.nanoTime` for your platform.
+ */
+ def isOverdue(): Boolean = (time.toNanos - System.nanoTime()) < 0
+ /**
+ * The natural ordering for deadline is determined by the natural order of the underlying (finite) duration.
+ */
+ def compare(other: Deadline) = time compare other.time
+}
+
+object Deadline {
+ /**
+ * Construct a deadline due exactly at the point where this method is called. Useful for then
+ * advancing it to obtain a future deadline, or for sampling the current time exactly once and
+ * then comparing it to multiple deadlines (using subtraction).
+ */
+ def now: Deadline = Deadline(Duration(System.nanoTime, NANOSECONDS))
+
+ /**
+ * The natural ordering for deadline is determined by the natural order of the underlying (finite) duration.
+ */
+ implicit object DeadlineIsOrdered extends Ordering[Deadline] {
+ def compare(a: Deadline, b: Deadline) = a compare b
+ }
+
+}
diff --git a/src/library/scala/concurrent/util/Duration.scala b/src/library/scala/concurrent/duration/Duration.scala
index 3f8b98831e..79f9b4db86 100644
--- a/src/library/scala/concurrent/util/Duration.scala
+++ b/src/library/scala/concurrent/duration/Duration.scala
@@ -6,90 +6,12 @@
** |/ **
\* */
-package scala.concurrent.util
+package scala.concurrent.duration
-import java.util.concurrent.TimeUnit
-import TimeUnit._
import java.lang.{ Double => JDouble, Long => JLong }
import scala.language.implicitConversions
-/**
- * This class stores a deadline, as obtained via `Deadline.now` or the
- * duration DSL:
- *
- * {{{
- * import scala.concurrent.util.duration._
- * 3.seconds.fromNow
- * }}}
- *
- * Its main purpose is to manage repeated attempts to achieve something (like
- * awaiting a condition) by offering the methods `hasTimeLeft` and `timeLeft`. All
- * durations are measured according to `System.nanoTime` aka wall-time; this
- * does not take into account changes to the system clock (such as leap
- * seconds).
- */
-case class Deadline private (time: FiniteDuration) extends Ordered[Deadline] {
- /**
- * Return a deadline advanced (i.e. moved into the future) by the given duration.
- */
- def +(other: FiniteDuration): Deadline = copy(time = time + other)
- /**
- * Return a deadline moved backwards (i.e. towards the past) by the given duration.
- */
- def -(other: FiniteDuration): Deadline = copy(time = time - other)
- /**
- * Calculate time difference between this and the other deadline, where the result is directed (i.e. may be negative).
- */
- def -(other: Deadline): FiniteDuration = time - other.time
- /**
- * Calculate time difference between this duration and now; the result is negative if the deadline has passed.
- *
- * '''''Note that on some systems this operation is costly because it entails a system call.'''''
- * Check `System.nanoTime` for your platform.
- */
- def timeLeft: FiniteDuration = this - Deadline.now
- /**
- * Determine whether the deadline still lies in the future at the point where this method is called.
- *
- * '''''Note that on some systems this operation is costly because it entails a system call.'''''
- * Check `System.nanoTime` for your platform.
- */
- def hasTimeLeft(): Boolean = !isOverdue()
- /**
- * Determine whether the deadline lies in the past at the point where this method is called.
- *
- * '''''Note that on some systems this operation is costly because it entails a system call.'''''
- * Check `System.nanoTime` for your platform.
- */
- def isOverdue(): Boolean = (time.toNanos - System.nanoTime()) < 0
- /**
- * The natural ordering for deadline is determined by the natural order of the underlying (finite) duration.
- */
- def compare(other: Deadline) = time compare other.time
-}
-
-object Deadline {
- /**
- * Construct a deadline due exactly at the point where this method is called. Useful for then
- * advancing it to obtain a future deadline, or for sampling the current time exactly once and
- * then comparing it to multiple deadlines (using subtraction).
- */
- def now: Deadline = Deadline(Duration(System.nanoTime, NANOSECONDS))
-
- /**
- * The natural ordering for deadline is determined by the natural order of the underlying (finite) duration.
- */
- implicit object DeadlineIsOrdered extends Ordering[Deadline] {
- def compare(a: Deadline, b: Deadline) = a compare b
- }
-}
-
object Duration {
- /**
- * This implicit conversion allows the use of a Deadline in place of a Duration, which will
- * insert the time left until the deadline in its place.
- */
- implicit def timeLeft(implicit d: Deadline): Duration = d.timeLeft
/**
* Construct a Duration from the given length and unit. Observe that nanosecond precision may be lost if
@@ -102,11 +24,13 @@ object Duration {
* @throws IllegalArgumentException if the length was finite but the resulting duration cannot be expressed as a [[FiniteDuration]]
*/
def apply(length: Double, unit: TimeUnit): Duration = fromNanos(unit.toNanos(1) * length)
+
/**
* Construct a finite duration from the given length and time unit. The unit given is retained
* throughout calculations as long as possible, so that it can be retrieved later.
*/
def apply(length: Long, unit: TimeUnit): FiniteDuration = new FiniteDuration(length, unit)
+
/**
* Construct a finite duration from the given length and time unit, where the latter is
* looked up in a list of string representation. Valid choices are:
@@ -117,7 +41,7 @@ object Duration {
def apply(length: Long, unit: String): FiniteDuration = new FiniteDuration(length, Duration.timeUnit(unit))
// Double stores 52 bits mantissa, but there is an implied '1' in front, making the limit 2^53
- final val maxPreciseDouble = 9007199254740992d
+ private[this] final val maxPreciseDouble = 9007199254740992d
/**
* Parse String into Duration. Format is `"<length><unit>"`, where
@@ -161,11 +85,11 @@ object Duration {
)
// TimeUnit => standard label
- protected[util] val timeUnitName: Map[TimeUnit, String] =
+ protected[duration] val timeUnitName: Map[TimeUnit, String] =
timeUnitLabels.toMap mapValues (s => words(s).last) toMap
// Label => TimeUnit
- protected[util] val timeUnit: Map[String, TimeUnit] =
+ protected[duration] val timeUnit: Map[String, TimeUnit] =
timeUnitLabels flatMap { case (unit, names) => expandLabels(names) map (_ -> unit) } toMap
/**
@@ -275,13 +199,13 @@ object Duration {
if (factor == 0d || factor.isNaN) Undefined
else if (factor < 0d) -this
else this
- def /(factor: Double): Duration =
- if (factor.isNaN || factor.isInfinite) Undefined
- else if ((factor compare 0d) < 0) -this
+ def /(divisor: Double): Duration =
+ if (divisor.isNaN || divisor.isInfinite) Undefined
+ else if ((divisor compare 0d) < 0) -this
else this
- def /(other: Duration): Double = other match {
+ def /(divisor: Duration): Double = divisor match {
case _: Infinite => Double.NaN
- case x => Double.PositiveInfinity * (if ((this > Zero) ^ (other >= Zero)) -1 else 1)
+ case x => Double.PositiveInfinity * (if ((this > Zero) ^ (divisor >= Zero)) -1 else 1)
}
final def isFinite() = false
@@ -380,8 +304,7 @@ object Duration {
* <p/>
* Examples:
* {{{
- * import scala.concurrent.util.Duration
- * import java.util.concurrent.TimeUnit
+ * import scala.concurrent.duration._
*
* val duration = Duration(100, MILLISECONDS)
* val duration = Duration(100, "millis")
@@ -396,7 +319,7 @@ object Duration {
* <p/>
* Implicits are also provided for Int, Long and Double. Example usage:
* {{{
- * import scala.concurrent.util.Duration._
+ * import scala.concurrent.duration._
*
* val duration = 100 millis
* }}}
@@ -529,12 +452,12 @@ sealed abstract class Duration extends Serializable with Ordered[Duration] {
*
* $ovf
*/
- def /(factor: Double): Duration
+ def /(divisor: Double): Duration
/**
* Return the quotient of this and that duration as floating-point number. The semantics are
* determined by Double as if calculating the quotient of the nanosecond lengths of both factors.
*/
- def /(other: Duration): Double
+ def /(divisor: Duration): Double
/**
* Negate this duration. The only two values which are mapped to themselves are [[Duration.Zero]] and [[Duration.Undefined]].
*/
@@ -561,7 +484,7 @@ sealed abstract class Duration extends Serializable with Ordered[Duration] {
*
* $ovf
*/
- def div(factor: Double) = this / factor
+ def div(divisor: Double) = this / divisor
/**
* Return the quotient of this and that duration as floating-point number. The semantics are
* determined by Double as if calculating the quotient of the nanosecond lengths of both factors.
@@ -599,6 +522,7 @@ sealed abstract class Duration extends Serializable with Ordered[Duration] {
}
object FiniteDuration {
+
implicit object FiniteDurationIsOrdered extends Ordering[FiniteDuration] {
def compare(a: FiniteDuration, b: FiniteDuration) = a compare b
}
@@ -691,29 +615,77 @@ final class FiniteDuration(val length: Long, val unit: TimeUnit) extends Duratio
else if ((factor > 0) ^ (this < Zero)) Inf
else MinusInf
- def /(factor: Double) =
- if (!factor.isInfinite) fromNanos(toNanos / factor)
- else if (factor.isNaN) Undefined
+ def /(divisor: Double) =
+ if (!divisor.isInfinite) fromNanos(toNanos / divisor)
+ else if (divisor.isNaN) Undefined
else Zero
// if this is made a constant, then scalac will elide the conditional and always return +0.0, SI-6331
private[this] def minusZero = -0d
- def /(other: Duration): Double =
- if (other.isFinite) toNanos.toDouble / other.toNanos
- else if (other eq Undefined) Double.NaN
- else if ((length < 0) ^ (other > Zero)) 0d
+ def /(divisor: Duration): Double =
+ if (divisor.isFinite) toNanos.toDouble / divisor.toNanos
+ else if (divisor eq Undefined) Double.NaN
+ else if ((length < 0) ^ (divisor > Zero)) 0d
else minusZero
- // overridden methods taking FiniteDurations, so that you can calculate while statically staying finite
+ // overloaded methods taking FiniteDurations, so that you can calculate while statically staying finite
def +(other: FiniteDuration) = add(other.length, other.unit)
def -(other: FiniteDuration) = add(-other.length, other.unit)
def plus(other: FiniteDuration) = this + other
def minus(other: FiniteDuration) = this - other
- override def div(factor: Double) = this / factor
- override def mul(factor: Double) = this * factor
def min(other: FiniteDuration) = if (this < other) this else other
def max(other: FiniteDuration) = if (this > other) this else other
+ // overloaded methods taking Long so that you can calculate while statically staying finite
+
+ /**
+ * Return the quotient of this duration and the given integer factor.
+ *
+ * @throws ArithmeticException if the factor is 0
+ */
+ def /(divisor: Long) = fromNanos(toNanos / divisor)
+
+ /**
+ * Return the product of this duration and the given integer factor.
+ *
+ * @throws IllegalArgumentException if the result would overflow the range of FiniteDuration
+ */
+ def *(factor: Long) = new FiniteDuration(safeMul(length, factor), unit)
+
+ /*
+ * This method avoids the use of Long division, which saves 95% of the time spent,
+ * by checking that there are enough leading zeros so that the result has a chance
+ * to fit into a Long again; the remaining edge cases are caught by using the sign
+ * of the product for overflow detection.
+ *
+ * This method is not general purpose because it disallows the (otherwise legal)
+ * case of Long.MinValue * 1, but that is okay for use in FiniteDuration, since
+ * Long.MinValue is not a legal `length` anyway.
+ */
+ private def safeMul(_a: Long, _b: Long): Long = {
+ val a = math.abs(_a)
+ val b = math.abs(_b)
+ import java.lang.Long.{ numberOfLeadingZeros => leading }
+ if (leading(a) + leading(b) < 64) throw new IllegalArgumentException("multiplication overflow")
+ val product = a * b
+ if (product < 0) throw new IllegalArgumentException("multiplication overflow")
+ if (a == _a ^ b == _b) -product else product
+ }
+
+ /**
+ * Return the quotient of this duration and the given integer factor.
+ *
+ * @throws ArithmeticException if the factor is 0
+ */
+ def div(divisor: Long) = this / divisor
+
+ /**
+ * Return the product of this duration and the given integer factor.
+ *
+ * @throws IllegalArgumentException if the result would overflow the range of FiniteDuration
+ */
+ def mul(factor: Long) = this * factor
+
def unary_- = Duration(-length, unit)
final def isFinite() = true
@@ -724,78 +696,3 @@ final class FiniteDuration(val length: Long, val unit: TimeUnit) extends Duratio
}
override def hashCode = toNanos.toInt
}
-
-trait DurationConversions extends Any {
- import duration.Classifier
- protected def durationIn(unit: TimeUnit): FiniteDuration
-
- def nanoseconds = durationIn(NANOSECONDS)
- def nanos = nanoseconds
- def nanosecond = nanoseconds
- def nano = nanoseconds
-
- def microseconds = durationIn(MICROSECONDS)
- def micros = microseconds
- def microsecond = microseconds
- def micro = microseconds
-
- def milliseconds = durationIn(MILLISECONDS)
- def millis = milliseconds
- def millisecond = milliseconds
- def milli = milliseconds
-
- def seconds = durationIn(SECONDS)
- def second = seconds
-
- def minutes = durationIn(MINUTES)
- def minute = minutes
-
- def hours = durationIn(HOURS)
- def hour = hours
-
- def days = durationIn(DAYS)
- def day = days
-
- def nanoseconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(nanoseconds)
- def nanos[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = nanoseconds(c)
- def nanosecond[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = nanoseconds(c)
- def nano[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = nanoseconds(c)
-
- def microseconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(microseconds)
- def micros[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = microseconds(c)
- def microsecond[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = microseconds(c)
- def micro[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = microseconds(c)
-
- def milliseconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(milliseconds)
- def millis[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = milliseconds(c)
- def millisecond[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = milliseconds(c)
- def milli[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = milliseconds(c)
-
- def seconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(seconds)
- def second[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = seconds(c)
-
- def minutes[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(minutes)
- def minute[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = minutes(c)
-
- def hours[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(hours)
- def hour[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = hours(c)
-
- def days[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(days)
- def day[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = days(c)
-}
-
-final class DurationInt(val n: Int) extends AnyVal with DurationConversions {
- override protected def durationIn(unit: TimeUnit): FiniteDuration = Duration(n, unit)
-}
-
-final class DurationLong(val n: Long) extends AnyVal with DurationConversions {
- override protected def durationIn(unit: TimeUnit): FiniteDuration = Duration(n, unit)
-}
-
-final class DurationDouble(val d: Double) extends AnyVal with DurationConversions {
- override protected def durationIn(unit: TimeUnit): FiniteDuration =
- Duration(d, unit) match {
- case f: FiniteDuration => f
- case _ => throw new IllegalArgumentException("Duration DSL not applicable to " + d)
- }
-}
diff --git a/src/library/scala/concurrent/duration/DurationConversions.scala b/src/library/scala/concurrent/duration/DurationConversions.scala
new file mode 100644
index 0000000000..2c7e192a0e
--- /dev/null
+++ b/src/library/scala/concurrent/duration/DurationConversions.scala
@@ -0,0 +1,92 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2012, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.concurrent.duration
+
+import DurationConversions._
+
+// Would be nice to limit the visibility of this trait a little bit,
+// but it crashes scalac to do so.
+trait DurationConversions extends Any {
+ protected def durationIn(unit: TimeUnit): FiniteDuration
+
+ def nanoseconds = durationIn(NANOSECONDS)
+ def nanos = nanoseconds
+ def nanosecond = nanoseconds
+ def nano = nanoseconds
+
+ def microseconds = durationIn(MICROSECONDS)
+ def micros = microseconds
+ def microsecond = microseconds
+ def micro = microseconds
+
+ def milliseconds = durationIn(MILLISECONDS)
+ def millis = milliseconds
+ def millisecond = milliseconds
+ def milli = milliseconds
+
+ def seconds = durationIn(SECONDS)
+ def second = seconds
+
+ def minutes = durationIn(MINUTES)
+ def minute = minutes
+
+ def hours = durationIn(HOURS)
+ def hour = hours
+
+ def days = durationIn(DAYS)
+ def day = days
+
+ def nanoseconds[C](c: C)(implicit ev: Classifier[C]): ev.R = ev.convert(nanoseconds)
+ def nanos[C](c: C)(implicit ev: Classifier[C]): ev.R = nanoseconds(c)
+ def nanosecond[C](c: C)(implicit ev: Classifier[C]): ev.R = nanoseconds(c)
+ def nano[C](c: C)(implicit ev: Classifier[C]): ev.R = nanoseconds(c)
+
+ def microseconds[C](c: C)(implicit ev: Classifier[C]): ev.R = ev.convert(microseconds)
+ def micros[C](c: C)(implicit ev: Classifier[C]): ev.R = microseconds(c)
+ def microsecond[C](c: C)(implicit ev: Classifier[C]): ev.R = microseconds(c)
+ def micro[C](c: C)(implicit ev: Classifier[C]): ev.R = microseconds(c)
+
+ def milliseconds[C](c: C)(implicit ev: Classifier[C]): ev.R = ev.convert(milliseconds)
+ def millis[C](c: C)(implicit ev: Classifier[C]): ev.R = milliseconds(c)
+ def millisecond[C](c: C)(implicit ev: Classifier[C]): ev.R = milliseconds(c)
+ def milli[C](c: C)(implicit ev: Classifier[C]): ev.R = milliseconds(c)
+
+ def seconds[C](c: C)(implicit ev: Classifier[C]): ev.R = ev.convert(seconds)
+ def second[C](c: C)(implicit ev: Classifier[C]): ev.R = seconds(c)
+
+ def minutes[C](c: C)(implicit ev: Classifier[C]): ev.R = ev.convert(minutes)
+ def minute[C](c: C)(implicit ev: Classifier[C]): ev.R = minutes(c)
+
+ def hours[C](c: C)(implicit ev: Classifier[C]): ev.R = ev.convert(hours)
+ def hour[C](c: C)(implicit ev: Classifier[C]): ev.R = hours(c)
+
+ def days[C](c: C)(implicit ev: Classifier[C]): ev.R = ev.convert(days)
+ def day[C](c: C)(implicit ev: Classifier[C]): ev.R = days(c)
+}
+
+/**
+ * This object just holds some cogs which make the DSL machine work, not for direct consumption.
+ */
+object DurationConversions {
+ trait Classifier[C] {
+ type R
+ def convert(d: FiniteDuration): R
+ }
+
+ implicit object spanConvert extends Classifier[span.type] {
+ type R = FiniteDuration
+ def convert(d: FiniteDuration) = d
+ }
+
+ implicit object fromNowConvert extends Classifier[fromNow.type] {
+ type R = Deadline
+ def convert(d: FiniteDuration) = Deadline.now + d
+ }
+
+}
diff --git a/src/library/scala/concurrent/duration/package.scala b/src/library/scala/concurrent/duration/package.scala
new file mode 100644
index 0000000000..2fd735f19e
--- /dev/null
+++ b/src/library/scala/concurrent/duration/package.scala
@@ -0,0 +1,75 @@
+package scala.concurrent
+
+import scala.language.implicitConversions
+
+package object duration {
+ /**
+ * This object can be used as closing token if you prefer dot-less style but do not want
+ * to enable language.postfixOps:
+ *
+ * {{{
+ * import scala.concurrent.duration._
+ *
+ * val duration = 2 seconds span
+ * }}}
+ */
+ object span
+
+ /**
+ * This object can be used as closing token for declaring a deadline at some future point
+ * in time:
+ *
+ * {{{
+ * import scala.concurrent.duration._
+ *
+ * val deadline = 3 seconds fromNow
+ * }}}
+ */
+ object fromNow
+
+ type TimeUnit = java.util.concurrent.TimeUnit
+ final val DAYS = java.util.concurrent.TimeUnit.DAYS
+ final val HOURS = java.util.concurrent.TimeUnit.HOURS
+ final val MICROSECONDS = java.util.concurrent.TimeUnit.MICROSECONDS
+ final val MILLISECONDS = java.util.concurrent.TimeUnit.MILLISECONDS
+ final val MINUTES = java.util.concurrent.TimeUnit.MINUTES
+ final val NANOSECONDS = java.util.concurrent.TimeUnit.NANOSECONDS
+ final val SECONDS = java.util.concurrent.TimeUnit.SECONDS
+
+ implicit def pairIntToDuration(p: (Int, TimeUnit)): Duration = Duration(p._1, p._2)
+ implicit def pairLongToDuration(p: (Long, TimeUnit)): FiniteDuration = Duration(p._1, p._2)
+ implicit def durationToPair(d: Duration): (Long, TimeUnit) = (d.length, d.unit)
+
+ implicit final class DurationInt(val n: Int) extends AnyVal with DurationConversions {
+ override protected def durationIn(unit: TimeUnit): FiniteDuration = Duration(n, unit)
+ }
+
+ implicit final class DurationLong(val n: Long) extends AnyVal with DurationConversions {
+ override protected def durationIn(unit: TimeUnit): FiniteDuration = Duration(n, unit)
+ }
+
+ implicit final class DurationDouble(val d: Double) extends AnyVal with DurationConversions {
+ override protected def durationIn(unit: TimeUnit): FiniteDuration =
+ Duration(d, unit) match {
+ case f: FiniteDuration => f
+ case _ => throw new IllegalArgumentException("Duration DSL not applicable to " + d)
+ }
+ }
+
+ /*
+ * Avoid reflection based invocation by using non-duck type
+ */
+ implicit final class IntMult(val i: Int) extends AnyVal {
+ def *(d: Duration) = d * i
+ def *(d: FiniteDuration) = d * i
+ }
+
+ implicit final class LongMult(val i: Long) extends AnyVal {
+ def *(d: Duration) = d * i
+ def *(d: FiniteDuration) = d * i
+ }
+
+ implicit final class DoubleMult(val f: Double) extends AnyVal {
+ def *(d: Duration) = d * f
+ }
+}
diff --git a/src/library/scala/concurrent/impl/Promise.scala b/src/library/scala/concurrent/impl/Promise.scala
index 9228872f2b..ff268d850c 100644
--- a/src/library/scala/concurrent/impl/Promise.scala
+++ b/src/library/scala/concurrent/impl/Promise.scala
@@ -8,16 +8,12 @@
package scala.concurrent.impl
-
-
-import java.util.concurrent.TimeUnit.NANOSECONDS
import scala.concurrent.{ ExecutionContext, CanAwait, OnCompleteRunnable, TimeoutException, ExecutionException }
-import scala.concurrent.util.{ Duration, Deadline, FiniteDuration }
+import scala.concurrent.duration.{ Duration, Deadline, FiniteDuration, NANOSECONDS }
import scala.annotation.tailrec
import scala.util.control.NonFatal
import scala.util.{ Try, Success, Failure }
-
private[concurrent] trait Promise[T] extends scala.concurrent.Promise[T] with scala.concurrent.Future[T] {
def future: this.type = this
}
@@ -48,7 +44,7 @@ private[concurrent] object Promise {
case Failure(t) => resolver(t)
case _ => source
}
-
+
private def resolver[T](throwable: Throwable): Try[T] = throwable match {
case t: scala.runtime.NonLocalReturnControl[_] => Success(t.value.asInstanceOf[T])
case t: scala.util.control.ControlThrowable => Failure(new ExecutionException("Boxed ControlThrowable", t))
@@ -56,12 +52,12 @@ private[concurrent] object Promise {
case e: Error => Failure(new ExecutionException("Boxed Error", e))
case t => Failure(t)
}
-
+
/** Default promise implementation.
*/
class DefaultPromise[T] extends AbstractPromise with Promise[T] { self =>
updateState(null, Nil) // Start at "No callbacks"
-
+
protected final def tryAwait(atMost: Duration): Boolean = {
@tailrec
def awaitUnsafe(deadline: Deadline, nextWait: FiniteDuration): Boolean = {
@@ -111,7 +107,7 @@ private[concurrent] object Promise {
case _ => None
}
- override def isCompleted(): Boolean = getState match { // Cheaper than boxing result into Option due to "def value"
+ override def isCompleted: Boolean = getState match { // Cheaper than boxing result into Option due to "def value"
case _: Try[_] => true
case _ => false
}
@@ -160,7 +156,7 @@ private[concurrent] object Promise {
val value = Some(resolveTry(suppliedValue))
- override def isCompleted(): Boolean = true
+ override def isCompleted: Boolean = true
def tryComplete(value: Try[T]): Boolean = false
diff --git a/src/library/scala/concurrent/package.scala b/src/library/scala/concurrent/package.scala
index 1d06341d4d..e683732e41 100644
--- a/src/library/scala/concurrent/package.scala
+++ b/src/library/scala/concurrent/package.scala
@@ -8,7 +8,7 @@
package scala
-import scala.concurrent.util.Duration
+import scala.concurrent.duration.Duration
import scala.annotation.implicitNotFound
/** This package object contains primitives for concurrent and parallel programming.
diff --git a/src/library/scala/concurrent/util/duration/Classifier.scala b/src/library/scala/concurrent/util/duration/Classifier.scala
deleted file mode 100644
index 10faf0a5ce..0000000000
--- a/src/library/scala/concurrent/util/duration/Classifier.scala
+++ /dev/null
@@ -1,9 +0,0 @@
-package scala.concurrent.util.duration
-
-import scala.concurrent.util.{ FiniteDuration }
-
-trait Classifier[C] {
- type R
- def convert(d: FiniteDuration): R
-}
-
diff --git a/src/library/scala/concurrent/util/duration/IntMult.scala b/src/library/scala/concurrent/util/duration/IntMult.scala
deleted file mode 100644
index 94c58fb8c2..0000000000
--- a/src/library/scala/concurrent/util/duration/IntMult.scala
+++ /dev/null
@@ -1,18 +0,0 @@
-package scala.concurrent.util.duration
-
-import scala.concurrent.util.{ Duration }
-
-/*
- * Avoid reflection based invocation by using non-duck type
- */
-protected[duration] class IntMult(i: Int) {
- def *(d: Duration) = d * i
-}
-
-protected[duration] class LongMult(i: Long) {
- def *(d: Duration) = d * i
-}
-
-protected[duration] class DoubleMult(f: Double) {
- def *(d: Duration) = d * f
-}
diff --git a/src/library/scala/concurrent/util/duration/package.scala b/src/library/scala/concurrent/util/duration/package.scala
deleted file mode 100644
index d5ae3f1544..0000000000
--- a/src/library/scala/concurrent/util/duration/package.scala
+++ /dev/null
@@ -1,31 +0,0 @@
-package scala.concurrent.util
-
-import java.util.concurrent.TimeUnit
-import scala.language.implicitConversions
-
-package object duration {
-
- object span
- implicit object spanConvert extends Classifier[span.type] {
- type R = FiniteDuration
- def convert(d: FiniteDuration) = d
- }
-
- object fromNow
- implicit object fromNowConvert extends Classifier[fromNow.type] {
- type R = Deadline
- def convert(d: FiniteDuration) = Deadline.now + d
- }
-
- implicit def intToDurationInt(n: Int) = new DurationInt(n)
- implicit def longToDurationLong(n: Long) = new DurationLong(n)
- implicit def doubleToDurationDouble(d: Double) = new DurationDouble(d)
-
- implicit def pairIntToDuration(p: (Int, TimeUnit)) = Duration(p._1, p._2)
- implicit def pairLongToDuration(p: (Long, TimeUnit)) = Duration(p._1, p._2)
- implicit def durationToPair(d: Duration) = (d.length, d.unit)
-
- implicit def intMult(i: Int) = new IntMult(i)
- implicit def longMult(l: Long) = new LongMult(l)
- implicit def doubleMult(f: Double) = new DoubleMult(f)
-}
diff --git a/src/library/scala/io/Codec.scala b/src/library/scala/io/Codec.scala
index fa74be0f98..6522cd0cd8 100644
--- a/src/library/scala/io/Codec.scala
+++ b/src/library/scala/io/Codec.scala
@@ -91,7 +91,7 @@ object Codec extends LowPriorityCodecImplicits {
* as an accident, with any anomalies considered "not a bug".
*/
def defaultCharsetCodec = apply(Charset.defaultCharset)
- def fileEncodingCodec = apply(util.Properties.encodingString)
+ def fileEncodingCodec = apply(scala.util.Properties.encodingString)
def default = defaultCharsetCodec
def apply(encoding: String): Codec = new Codec(Charset forName encoding)
diff --git a/src/library/scala/io/Position.scala b/src/library/scala/io/Position.scala
index 0d0d0d7648..dae478f31a 100644
--- a/src/library/scala/io/Position.scala
+++ b/src/library/scala/io/Position.scala
@@ -54,7 +54,7 @@ abstract class Position {
if (line >= LINE_MASK)
LINE_MASK << COLUMN_BITS
else
- (line << COLUMN_BITS) | math.min(COLUMN_MASK, column)
+ (line << COLUMN_BITS) | scala.math.min(COLUMN_MASK, column)
}
/** Returns the line number of the encoded position. */
diff --git a/src/library/scala/language.scala b/src/library/scala/language.scala
index dfe27f8857..c638f531bb 100644
--- a/src/library/scala/language.scala
+++ b/src/library/scala/language.scala
@@ -1,5 +1,28 @@
package scala
+/**
+ * The `scala.language` object controls the language features available to the programmer, as proposed in the
+ * [[https://docs.google.com/document/d/1nlkvpoIRkx7at1qJEZafJwthZ3GeIklTFhqmXMvTX9Q/edit '''SIP-18 document''']].
+ *
+ * Each of these features has to be explicitly imported into the current scope to become available:
+ * {{{
+ * import language.postfixOps // or language._
+ * List(1, 2, 3) reverse
+ * }}}
+ *
+ * The language features are:
+ * - [[dynamics `dynamics`]] enables defining calls rewriting using the [[scala.Dynamic `Dynamic`]] trait
+ * - [[postfixOps `postfixOps`]] enables postfix operators
+ * - [[reflectiveCalls `reflectiveCalls`]] enables using structural types
+ * - [[implicitConversions `implicitConversions`]] enables defining implicit methods and members
+ * - [[higherKinds `higherKinds`]] enables writing higher-kinded types
+ * - [[existentials `existentials`]] enables writing existential types
+ * - [[experimental `experimental`]] contains newer features that have not yet been tested in production
+ *
+ * @groupname production Language Features
+ * @groupname experimental Experimental Language Features
+ * @groupprio experimental 10
+ */
object language {
import languageFeature._
@@ -10,37 +33,43 @@ object language {
* selection of existing subclasses of trait Dynamic are unaffected;
* they can be used anywhere.
*
- * _Why introduce the feature?_ To enable flexible DSLs and convenient interfacing
+ * '''Why introduce the feature?''' To enable flexible DSLs and convenient interfacing
* with dynamic languages.
*
- * _Why control it?_ Dynamic member selection can undermine static checkability
+ * '''Why control it?''' Dynamic member selection can undermine static checkability
* of programs. Furthermore, dynamic member selection often relies on reflection,
* which is not available on all platforms.
+ *
+ * @group production
*/
implicit lazy val dynamics: dynamics = languageFeature.dynamics
/** Only where enabled, postfix operator notation `(expr op)` will be allowed.
*
- * _Why keep the feature?_ Several DSLs written in Scala need the notation.
+ * '''Why keep the feature?''' Several DSLs written in Scala need the notation.
*
- * _Why control it?_ Postfix operators interact poorly with semicolon inference.
+ * '''Why control it?''' Postfix operators interact poorly with semicolon inference.
* Most programmers avoid them for this reason.
+ *
+ * @group production
*/
implicit lazy val postfixOps: postfixOps = languageFeature.postfixOps
/** Only where enabled, accesses to members of structural types that need
- * reflection are supported. Reminder: A structural type is a type of the form
+ * reflection are supported. Reminder: A structural type is a type of the form
* `Parents { Decls }` where `Decls` contains declarations of new members that do
* not override any member in `Parents`. To access one of these members, a
* reflective call is needed.
*
- * _Why keep the feature?_ Structural types provide great flexibility because
+ * '''Why keep the feature?''' Structural types provide great flexibility because
* they avoid the need to define inheritance hierarchies a priori. Besides,
* their definition falls out quite naturally from Scala’s concept of type refinement.
*
- * _Why control it?+ Reflection is not available on all platforms. Popular tools
+ * '''Why control it?''' Reflection is not available on all platforms. Popular tools
* such as ProGuard have problems dealing with it. Even where reflection is available,
* reflective dispatch can lead to surprising performance degradations.
+ *
+ * @group production
*/
implicit lazy val reflectiveCalls: reflectiveCalls = languageFeature.reflectiveCalls
@@ -49,32 +78,36 @@ object language {
* or an implicit method that has in its first parameter section a single,
* non-implicit parameter. Examples:
*
+ * {{{
* implicit def stringToInt(s: String): Int = s.length
* implicit val conv = (s: String) => s.length
- * implicit def listToX(xs: List[T])(implicit f: T => X): X = …
- *
+ * implicit def listToX(xs: List[T])(implicit f: T => X): X = ...
+ * }}}
+ *
* implicit values of other types are not affected, and neither are implicit
* classes.
*
- * _Why keep the feature?_ Implicit conversions are central to many aspects
+ * '''Why keep the feature?''' Implicit conversions are central to many aspects
* of Scala’s core libraries.
*
- * _Why control it?_ Implicit conversions are known to cause many pitfalls
+ * '''Why control it?''' Implicit conversions are known to cause many pitfalls
* if over-used. And there is a tendency to over-use them because they look
* very powerful and their effects seem to be easy to understand. Also, in
* most situations using implicit parameters leads to a better design than
* implicit conversions.
+ *
+ * @group production
*/
implicit lazy val implicitConversions: implicitConversions = languageFeature.implicitConversions
/** Only where this flag is enabled, higher-kinded types can be written.
*
- * _Why keep the feature?_ Higher-kinded types enable the definition of very general
+ * '''Why keep the feature?''' Higher-kinded types enable the definition of very general
* abstractions such as functor, monad, or arrow. A significant set of advanced
* libraries relies on them. Higher-kinded types are also at the core of the
* scala-virtualized effort to produce high-performance parallel DSLs through staging.
*
- * _Why control it?_ Higher kinded types in Scala lead to a Turing-complete
+ * '''Why control it?''' Higher kinded types in Scala lead to a Turing-complete
* type system, where compiler termination is no longer guaranteed. They tend
* to be useful mostly for type-level computation and for highly generic design
* patterns. The level of abstraction implied by these design patterns is often
@@ -85,6 +118,8 @@ object language {
* higher-kinded types will change in future versions of Scala. So an explicit
* enabling also serves as a warning that code involving higher-kinded types
* might have to be slightly revised in the future.
+ *
+ * @group production
*/
implicit lazy val higherKinds: higherKinds = languageFeature.higherKinds
@@ -93,30 +128,44 @@ object language {
* types of methods. Existential types with wildcard type syntax such as `List[_]`,
* or `Map[String, _]` are not affected.
*
- * _Why keep the feature?_ Existential types are needed to make sense of Java’s wildcard
+ * '''Why keep the feature?''' Existential types are needed to make sense of Java’s wildcard
* types and raw types and the erased types of run-time values.
- *
- * Why control it? Having complex existential types in a code base usually makes
+ *
+ * '''Why control it?''' Having complex existential types in a code base usually makes
* application code very brittle, with a tendency to produce type errors with
* obscure error messages. Therefore, going overboard with existential types
* is generally perceived not to be a good idea. Also, complicated existential types
* might be no longer supported in a future simplification of the language.
+ *
+ * @group production
*/
implicit lazy val existentials: existentials = languageFeature.existentials
+ /** The experimental object contains features that have been recently added but have not
+ * been thoroughly tested in production yet.
+ *
+ * Experimental features '''may undergo API changes''' in future releases, so production
+ * code should not rely on them.
+ *
+ * Programmers are encouraged to try out experimental features and
+ * [[http://issues.scala-lang.org report any bugs or API inconsistencies]]
+ * they encounter so they can be improved in future releases.
+ *
+ * @group experimental
+ */
object experimental {
import languageFeature.experimental._
/** Where enabled, macro definitions are allowed. Macro implementations and
* macro applications are unaffected; they can be used anywhere.
- *
- * _Why introduce the feature?_ Macros promise to make the language more regular,
+ *
+ * '''Why introduce the feature?''' Macros promise to make the language more regular,
* replacing ad-hoc language constructs with a general powerful abstraction
* capability that can express them. Macros are also a more disciplined and
* powerful replacement for compiler plugins.
*
- * _Why control it?_ For their very power, macros can lead to code that is hard
+ * '''Why control it?''' For their very power, macros can lead to code that is hard
* to debug and understand.
*/
implicit lazy val macros: macros = languageFeature.experimental.macros
diff --git a/src/library/scala/math/Ordering.scala b/src/library/scala/math/Ordering.scala
index 3ac255b57f..719f2e12a7 100644
--- a/src/library/scala/math/Ordering.scala
+++ b/src/library/scala/math/Ordering.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.math
+package scala
+package math
import java.util.Comparator
import scala.language.{implicitConversions, higherKinds}
diff --git a/src/library/scala/reflect/ClassTag.scala b/src/library/scala/reflect/ClassTag.scala
index 6757a72053..5c2067a548 100644
--- a/src/library/scala/reflect/ClassTag.scala
+++ b/src/library/scala/reflect/ClassTag.scala
@@ -1,24 +1,25 @@
-package scala.reflect
+package scala
+package reflect
import java.lang.{ Class => jClass }
import scala.language.{implicitConversions, existentials}
import scala.runtime.ScalaRunTime.{ arrayClass, arrayElementClass }
-/** A `ClassTag[T]` wraps a runtime class, which can be accessed via the `runtimeClass` method.
+/** A `ClassTag[T]` wraps a runtime class (the erasure) and can create array instances.
*
- * This is useful in itself, but also enables very important use case.
- * Having this knowledge ClassTag can instantiate `Arrays`
- * in those cases where the element type is unknown at compile time.
+ * If an implicit value of type ClassTag[T] is requested, the compiler will create one.
+ * The runtime class (i.e. the erasure, a java.lang.Class on the JVM) of T can be accessed
+ * via the `runtimeClass` field. References to type parameters or abstract type members are
+ * replaced by the concrete types if ClassTags are available for them.
*
- * If an implicit value of type u.ClassTag[T] is required, the compiler will make one up on demand.
- * The implicitly created value contains in its `runtimeClass` field the runtime class that is the result of erasing type T.
- * In that value, any occurrences of type parameters or abstract types U which come themselves with a ClassTag
- * are represented by the type referenced by that tag.
- * If the type T contains unresolved references to type parameters or abstract types, a static error results.
+ * Besides accessing the erasure, a ClassTag knows how to instantiate single- and multi-
+ * dimensional `Arrays` where the element type is unknown at compile time.
*
- * @see [[scala.reflect.base.TypeTags]]
+ * [[scala.reflect.ClassTag]] corresponds to a previous concept of [[scala.reflect.ClassManifest]].
+ *
+ * @see [[scala.reflect.api.TypeTags]]
*/
-@annotation.implicitNotFound(msg = "No ClassTag available for ${T}")
+@scala.annotation.implicitNotFound(msg = "No ClassTag available for ${T}")
trait ClassTag[T] extends ClassManifestDeprecatedApis[T] with Equals with Serializable {
// please, don't add any APIs here, like it was with `newWrappedArray` and `newArrayBuilder`
// class tags, and all tags in general, should be as minimalistic as possible
@@ -75,7 +76,7 @@ trait ClassTag[T] extends ClassManifestDeprecatedApis[T] with Equals with Serial
if (conforms) Some(x.asInstanceOf[T]) else None
}
- /** case class accessories */
+ // case class accessories
override def canEqual(x: Any) = x.isInstanceOf[ClassTag[_]]
override def equals(x: Any) = x.isInstanceOf[ClassTag[_]] && this.runtimeClass == x.asInstanceOf[ClassTag[_]].runtimeClass
override def hashCode = scala.runtime.ScalaRunTime.hash(runtimeClass)
@@ -87,6 +88,9 @@ trait ClassTag[T] extends ClassManifestDeprecatedApis[T] with Equals with Serial
}
}
+/**
+ * Class tags corresponding to primitive types and constructor/extractor for ClassTags.
+ */
object ClassTag {
private val ObjectTYPE = classOf[java.lang.Object]
private val NothingTYPE = classOf[scala.runtime.Nothing$]
diff --git a/src/library/scala/reflect/Manifest.scala b/src/library/scala/reflect/Manifest.scala
index f2a23f4372..8b021e0444 100644
--- a/src/library/scala/reflect/Manifest.scala
+++ b/src/library/scala/reflect/Manifest.scala
@@ -38,7 +38,7 @@ import scala.collection.mutable.{ ArrayBuilder, WrappedArray }
}}}
*
*/
-@annotation.implicitNotFound(msg = "No Manifest available for ${T}.")
+@scala.annotation.implicitNotFound(msg = "No Manifest available for ${T}.")
@deprecated("Use scala.reflect.ClassTag (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead", "2.10.0")
trait Manifest[T] extends ClassManifest[T] with Equals {
override def typeArguments: List[Manifest[_]] = Nil
@@ -264,4 +264,4 @@ object ManifestFactory {
def runtimeClass = parents.head.erasure
override def toString = parents.mkString(" with ")
}
-} \ No newline at end of file
+}
diff --git a/src/library/scala/reflect/NameTransformer.scala b/src/library/scala/reflect/NameTransformer.scala
index ff56e20d52..77cbd20321 100755
--- a/src/library/scala/reflect/NameTransformer.scala
+++ b/src/library/scala/reflect/NameTransformer.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.reflect
+package scala
+package reflect
/** Provides functions to encode and decode Scala symbolic names.
* Also provides some constants.
diff --git a/src/library/scala/reflect/base/AnnotationInfos.scala b/src/library/scala/reflect/base/AnnotationInfos.scala
deleted file mode 100644
index f03644deef..0000000000
--- a/src/library/scala/reflect/base/AnnotationInfos.scala
+++ /dev/null
@@ -1,44 +0,0 @@
-package scala.reflect
-package base
-
-trait AnnotationInfos { self: Universe =>
-
- type AnnotationInfo >: Null <: AnyRef
- implicit val AnnotationInfoTag: ClassTag[AnnotationInfo]
- val AnnotationInfo: AnnotationInfoExtractor
-
- abstract class AnnotationInfoExtractor {
- def apply(atp: Type, args: List[Tree], assocs: List[(Name, ClassfileAnnotArg)]): AnnotationInfo
- def unapply(info: AnnotationInfo): Option[(Type, List[Tree], List[(Name, ClassfileAnnotArg)])]
- }
-
- type ClassfileAnnotArg >: Null <: AnyRef
- implicit val ClassfileAnnotArgTag: ClassTag[ClassfileAnnotArg]
-
- type LiteralAnnotArg >: Null <: AnyRef with ClassfileAnnotArg
- implicit val LiteralAnnotArgTag: ClassTag[LiteralAnnotArg]
- val LiteralAnnotArg: LiteralAnnotArgExtractor
-
- abstract class LiteralAnnotArgExtractor {
- def apply(const: Constant): LiteralAnnotArg
- def unapply(arg: LiteralAnnotArg): Option[Constant]
- }
-
- type ArrayAnnotArg >: Null <: AnyRef with ClassfileAnnotArg
- implicit val ArrayAnnotArgTag: ClassTag[ArrayAnnotArg]
- val ArrayAnnotArg: ArrayAnnotArgExtractor
-
- abstract class ArrayAnnotArgExtractor {
- def apply(args: Array[ClassfileAnnotArg]): ArrayAnnotArg
- def unapply(arg: ArrayAnnotArg): Option[Array[ClassfileAnnotArg]]
- }
-
- type NestedAnnotArg >: Null <: AnyRef with ClassfileAnnotArg
- implicit val NestedAnnotArgTag: ClassTag[NestedAnnotArg]
- val NestedAnnotArg: NestedAnnotArgExtractor
-
- abstract class NestedAnnotArgExtractor {
- def apply(annInfo: AnnotationInfo): NestedAnnotArg
- def unapply(arg: NestedAnnotArg): Option[AnnotationInfo]
- }
-} \ No newline at end of file
diff --git a/src/library/scala/reflect/base/Attachments.scala b/src/library/scala/reflect/base/Attachments.scala
deleted file mode 100644
index 43e870fc4f..0000000000
--- a/src/library/scala/reflect/base/Attachments.scala
+++ /dev/null
@@ -1,42 +0,0 @@
-package scala.reflect
-package base
-
-/** Attachments is a generalisation of Position.
- * Typically it stores a Position of a tree, but this can be extended to encompass arbitrary payloads.
- *
- * Attachments have to carry positions, because we don't want to introduce even a single additional field in Tree
- * imposing an unnecessary memory tax because of something that will not be used in most cases.
- */
-abstract class Attachments { self =>
-
- type Pos >: Null
-
- /** Gets the underlying position */
- def pos: Pos
-
- /** Creates a copy of this attachment with its position updated */
- def withPos(newPos: Pos): Attachments { type Pos = self.Pos }
-
- /** Gets the underlying payload */
- def all: Set[Any] = Set.empty
-
- def get[T: ClassTag]: Option[T] =
- (all find (_.getClass == classTag[T].runtimeClass)).asInstanceOf[Option[T]]
-
- /** Creates a copy of this attachment with its payload updated */
- def add(attachment: Any): Attachments { type Pos = self.Pos } =
- new NonemptyAttachments(this.pos, all + attachment)
-
- def remove[T: ClassTag]: Attachments { type Pos = self.Pos } = {
- val newAll = all filterNot (_.getClass == classTag[T].runtimeClass)
- if (newAll.isEmpty) pos.asInstanceOf[Attachments { type Pos = self.Pos }]
- else new NonemptyAttachments(this.pos, newAll)
- }
-
- private class NonemptyAttachments(override val pos: Pos, override val all: Set[Any]) extends Attachments {
- type Pos = self.Pos
- def withPos(newPos: Pos) = new NonemptyAttachments(newPos, all)
- }
-}
-
-
diff --git a/src/library/scala/reflect/base/Base.scala b/src/library/scala/reflect/base/Base.scala
deleted file mode 100644
index 33582675bd..0000000000
--- a/src/library/scala/reflect/base/Base.scala
+++ /dev/null
@@ -1,773 +0,0 @@
-package scala.reflect
-package base
-
-import java.io.PrintWriter
-import scala.annotation.switch
-import scala.ref.WeakReference
-import scala.collection.mutable
-
-class Base extends Universe { self =>
-
- private var nextId = 0
-
- abstract class Symbol(val name: Name, val flags: FlagSet) extends SymbolBase {
- val id = { nextId += 1; nextId }
- def owner: Symbol
- def fullName: String =
- if (isEffectiveRoot || owner.isEffectiveRoot) name.toString else owner.fullName + "." + name
- private def isEffectiveRoot =
- this == NoSymbol || this == rootMirror.RootClass || this == rootMirror.EmptyPackageClass
-
- def newTermSymbol(name: TermName, pos: Position = NoPosition, flags: FlagSet = NoFlags): TermSymbol =
- new TermSymbol(this, name, flags)
-
- def newModuleAndClassSymbol(name: Name, pos: Position = NoPosition, flags: FlagSet = NoFlags): (ModuleSymbol, ClassSymbol) = {
- val c = new ModuleClassSymbol(this, name.toTypeName, flags)
- val m = new ModuleSymbol(this, name.toTermName, flags, c)
- (m, c)
- }
-
- def newMethodSymbol(name: TermName, pos: Position = NoPosition, flags: FlagSet = NoFlags): MethodSymbol
- = new MethodSymbol(this, name, flags)
-
- def newTypeSymbol(name: TypeName, pos: Position = NoPosition, flags: FlagSet = NoFlags): TypeSymbol =
- new TypeSymbol(this, name, flags)
-
- def newClassSymbol(name: TypeName, pos: Position = NoPosition, flags: FlagSet = NoFlags): ClassSymbol =
- new ClassSymbol(this, name, flags)
-
- def newFreeTermSymbol(name: TermName, value: => Any, flags: FlagSet = NoFlags, origin: String = null) =
- new FreeTermSymbol(this, name, flags)
-
- def newFreeTypeSymbol(name: TypeName, flags: FlagSet = NoFlags, origin: String = null) =
- new FreeTypeSymbol(this, name, flags)
-
- private def kindString: String =
- if (isModule) "module"
- else if (isClass) "class"
- else if (isFreeType) "free type"
- else if (isType) "type"
- else if (isMethod) "method"
- else if (isFreeTerm) "free term"
- else if (isTerm) "value"
- else "symbol"
- override def toString() = s"$kindString $name"
- }
- implicit val SymbolTag = ClassTag[Symbol](classOf[Symbol])
-
- class TermSymbol(val owner: Symbol, override val name: TermName, flags: FlagSet)
- extends Symbol(name, flags) with TermSymbolBase
- implicit val TermSymbolTag = ClassTag[TermSymbol](classOf[TermSymbol])
-
- class TypeSymbol(val owner: Symbol, override val name: TypeName, flags: FlagSet)
- extends Symbol(name, flags) with TypeSymbolBase {
- override def toTypeConstructor = TypeRef(ThisType(owner), this, Nil)
- override def toType = TypeRef(ThisType(owner), this, Nil)
- override def toTypeIn(site: Type) = TypeRef(ThisType(owner), this, Nil)
- }
- implicit val TypeSymbolTag = ClassTag[TypeSymbol](classOf[TypeSymbol])
-
- class MethodSymbol(owner: Symbol, name: TermName, flags: FlagSet)
- extends TermSymbol(owner, name, flags) with MethodSymbolBase
- implicit val MethodSymbolTag = ClassTag[MethodSymbol](classOf[MethodSymbol])
-
- class ModuleSymbol(owner: Symbol, name: TermName, flags: FlagSet, override val moduleClass: Symbol)
- extends TermSymbol(owner, name, flags) with ModuleSymbolBase
- implicit val ModuleSymbolTag = ClassTag[ModuleSymbol](classOf[ModuleSymbol])
-
- class ClassSymbol(owner: Symbol, name: TypeName, flags: FlagSet)
- extends TypeSymbol(owner, name, flags) with ClassSymbolBase
- class ModuleClassSymbol(owner: Symbol, name: TypeName, flags: FlagSet)
- extends ClassSymbol(owner, name, flags) { override def isModuleClass = true }
- implicit val ClassSymbolTag = ClassTag[ClassSymbol](classOf[ClassSymbol])
-
- class FreeTermSymbol(owner: Symbol, name: TermName, flags: FlagSet)
- extends TermSymbol(owner, name, flags) with FreeTermSymbolBase
- implicit val FreeTermSymbolTag = ClassTag[FreeTermSymbol](classOf[FreeTermSymbol])
-
- class FreeTypeSymbol(owner: Symbol, name: TypeName, flags: FlagSet)
- extends TypeSymbol(owner, name, flags) with FreeTypeSymbolBase
- implicit val FreeTypeSymbolTag = ClassTag[FreeTypeSymbol](classOf[FreeTypeSymbol])
-
-
- object NoSymbol extends Symbol(nme.NO_NAME, NoFlags) {
- override def owner = throw new UnsupportedOperationException("NoSymbol.owner")
- }
-
- // todo. write a decent toString that doesn't crash on recursive types
- class Type extends TypeBase {
- def termSymbol: Symbol = NoSymbol
- def typeSymbol: Symbol = NoSymbol
- }
- implicit val TypeTagg = ClassTag[Type](classOf[Type])
-
- val NoType = new Type { override def toString = "NoType" }
- val NoPrefix = new Type { override def toString = "NoPrefix" }
-
- class SingletonType extends Type
- implicit val SingletonTypeTag = ClassTag[SingletonType](classOf[SingletonType])
-
- case class ThisType(sym: Symbol) extends SingletonType { override val typeSymbol = sym }
- object ThisType extends ThisTypeExtractor
- implicit val ThisTypeTag = ClassTag[ThisType](classOf[ThisType])
-
- case class SingleType(pre: Type, sym: Symbol) extends SingletonType { override val termSymbol = sym }
- object SingleType extends SingleTypeExtractor
- implicit val SingleTypeTag = ClassTag[SingleType](classOf[SingleType])
-
- case class SuperType(thistpe: Type, supertpe: Type) extends SingletonType
- object SuperType extends SuperTypeExtractor
- implicit val SuperTypeTag = ClassTag[SuperType](classOf[SuperType])
-
- case class ConstantType(value: Constant) extends SingletonType
- object ConstantType extends ConstantTypeExtractor
- implicit val ConstantTypeTag = ClassTag[ConstantType](classOf[ConstantType])
-
- case class TypeRef(pre: Type, sym: Symbol, args: List[Type]) extends Type { override val typeSymbol = sym }
- object TypeRef extends TypeRefExtractor
- implicit val TypeRefTag = ClassTag[TypeRef](classOf[TypeRef])
-
- abstract class CompoundType extends Type
- implicit val CompoundTypeTag = ClassTag[CompoundType](classOf[CompoundType])
-
- case class RefinedType(parents: List[Type], decls: Scope) extends CompoundType
- object RefinedType extends RefinedTypeExtractor {
- def apply(parents: List[Type], decls: Scope, clazz: Symbol): RefinedType =
- RefinedType(parents, decls)
- }
- implicit val RefinedTypeTag = ClassTag[RefinedType](classOf[RefinedType])
-
- case class ClassInfoType(parents: List[Type], decls: Scope, override val typeSymbol: Symbol) extends CompoundType
- object ClassInfoType extends ClassInfoTypeExtractor
- implicit val ClassInfoTypeTag = ClassTag[ClassInfoType](classOf[ClassInfoType])
-
- case class MethodType(params: List[Symbol], resultType: Type) extends Type
- object MethodType extends MethodTypeExtractor
- implicit val MethodTypeTag = ClassTag[MethodType](classOf[MethodType])
-
- case class NullaryMethodType(resultType: Type) extends Type
- object NullaryMethodType extends NullaryMethodTypeExtractor
- implicit val NullaryMethodTypeTag = ClassTag[NullaryMethodType](classOf[NullaryMethodType])
-
- case class PolyType(typeParams: List[Symbol], resultType: Type) extends Type
- object PolyType extends PolyTypeExtractor
- implicit val PolyTypeTag = ClassTag[PolyType](classOf[PolyType])
-
- case class ExistentialType(quantified: List[Symbol], underlying: Type) extends Type { override def typeSymbol = underlying.typeSymbol }
- object ExistentialType extends ExistentialTypeExtractor
- implicit val ExistentialTypeTag = ClassTag[ExistentialType](classOf[ExistentialType])
-
- case class AnnotatedType(annotations: List[AnnotationInfo], underlying: Type, selfsym: Symbol) extends Type { override def typeSymbol = underlying.typeSymbol }
- object AnnotatedType extends AnnotatedTypeExtractor
- implicit val AnnotatedTypeTag = ClassTag[AnnotatedType](classOf[AnnotatedType])
-
- case class TypeBounds(lo: Type, hi: Type) extends Type
- object TypeBounds extends TypeBoundsExtractor
- implicit val TypeBoundsTag = ClassTag[TypeBounds](classOf[TypeBounds])
-
- val WildcardType = new Type
-
- case class BoundedWildcardType(bounds: TypeBounds) extends Type
- object BoundedWildcardType extends BoundedWildcardTypeExtractor
- implicit val BoundedWildcardTypeTag = ClassTag[BoundedWildcardType](classOf[BoundedWildcardType])
-
- class Scope(elems: Iterable[Symbol]) extends ScopeBase with MemberScopeBase {
- def iterator = elems.iterator
- def sorted = elems.toList
- }
- type MemberScope = Scope
- implicit val ScopeTag = ClassTag[Scope](classOf[Scope])
- implicit val MemberScopeTag = ClassTag[MemberScope](classOf[MemberScope])
-
- def newScope: Scope = newScopeWith()
- def newNestedScope(outer: Scope): Scope = newScope
- def newScopeWith(elems: Symbol*): Scope = new Scope(elems)
-
- abstract class Name(str: String) extends NameBase {
- override def toString = str
- }
- implicit val NameTag = ClassTag[Name](classOf[Name])
-
- class TermName(str: String) extends Name(str) {
- def isTermName = true
- def isTypeName = false
- def toTermName = this
- def toTypeName = new TypeName(str)
- }
- implicit val TermNameTag = ClassTag[TermName](classOf[TermName])
-
- class TypeName(str: String) extends Name(str) {
- def isTermName = false
- def isTypeName = true
- def toTermName = new TermName(str)
- def toTypeName = this
- }
- implicit val TypeNameTag = ClassTag[TypeName](classOf[TypeName])
-
- def newTermName(str: String) = new TermName(str)
- def newTypeName(str: String) = new TypeName(str)
-
- object nme extends TermNamesBase {
- type NameType = TermName
- val WILDCARD = newTermName("_")
- val CONSTRUCTOR = newTermName("<init>")
- val ROOTPKG = newTermName("_root_")
- val EMPTY = newTermName("")
- val EMPTY_PACKAGE_NAME = newTermName("<empty>")
- val ROOT = newTermName("<root>")
- val NO_NAME = newTermName("<none>")
- }
-
- object tpnme extends TypeNamesBase {
- type NameType = TypeName
- val WILDCARD = nme.WILDCARD.toTypeName
- val EMPTY = nme.EMPTY.toTypeName
- val WILDCARD_STAR = newTypeName("_*")
- val EMPTY_PACKAGE_NAME = nme.EMPTY_PACKAGE_NAME.toTypeName
- val ROOT = nme.ROOT.toTypeName
- }
-
- type FlagSet = Long
- val NoFlags = 0L
- implicit val FlagSetTag = ClassTag[FlagSet](classOf[FlagSet])
-
- class Modifiers(override val flags: FlagSet,
- override val privateWithin: Name,
- override val annotations: List[Tree]) extends ModifiersBase {
- def hasFlag(flags: FlagSet) = (this.flags & flags) != 0
- }
-
- implicit val ModifiersTag = ClassTag[Modifiers](classOf[Modifiers])
-
- object Modifiers extends ModifiersCreator {
- def apply(flags: Long,
- privateWithin: Name,
- annotations: List[Tree]) = new Modifiers(flags, privateWithin, annotations)
- }
-
- case class Constant(value: Any)
- object Constant extends ConstantExtractor
- implicit val ConstantTag = ClassTag[Constant](classOf[Constant])
-
- case class AnnotationInfo(atp: Type, args: List[Tree], assocs: List[(Name, ClassfileAnnotArg)])
- object AnnotationInfo extends AnnotationInfoExtractor
- implicit val AnnotationInfoTag = ClassTag[AnnotationInfo](classOf[AnnotationInfo])
-
- abstract class ClassfileAnnotArg
- implicit val ClassfileAnnotArgTag = ClassTag[ClassfileAnnotArg](classOf[ClassfileAnnotArg])
-
- case class LiteralAnnotArg(const: Constant) extends ClassfileAnnotArg
- object LiteralAnnotArg extends LiteralAnnotArgExtractor
- implicit val LiteralAnnotArgTag = ClassTag[LiteralAnnotArg](classOf[LiteralAnnotArg])
-
- case class ArrayAnnotArg(args: Array[ClassfileAnnotArg]) extends ClassfileAnnotArg
- object ArrayAnnotArg extends ArrayAnnotArgExtractor
- implicit val ArrayAnnotArgTag = ClassTag[ArrayAnnotArg](classOf[ArrayAnnotArg])
-
- case class NestedAnnotArg(annInfo: AnnotationInfo) extends ClassfileAnnotArg
- object NestedAnnotArg extends NestedAnnotArgExtractor
- implicit val NestedAnnotArgTag = ClassTag[NestedAnnotArg](classOf[NestedAnnotArg])
-
- class Position extends Attachments {
- override type Pos = Position
- def pos = this
- def withPos(newPos: Position) = newPos
- def isRange = false
- def focus = this
- }
- implicit val PositionTag = ClassTag[Position](classOf[Position])
-
- val NoPosition = new Position
-
- private val generated = new mutable.HashMap[String, WeakReference[Symbol]]
-
- private def cached(name: String)(symExpr: => Symbol): Symbol =
- generated get name match {
- case Some(WeakReference(sym)) =>
- sym
- case _ =>
- val sym = symExpr
- generated(name) = WeakReference(sym)
- sym
- }
-
- object build extends BuildBase {
- def selectType(owner: Symbol, name: String): TypeSymbol = {
- val clazz = new ClassSymbol(owner, newTypeName(name), NoFlags)
- cached(clazz.fullName)(clazz).asType
- }
-
- def selectTerm(owner: Symbol, name: String): TermSymbol = {
- val valu = new MethodSymbol(owner, newTermName(name), NoFlags)
- cached(valu.fullName)(valu).asTerm
- }
-
- def selectOverloadedMethod(owner: Symbol, name: String, index: Int): MethodSymbol =
- selectTerm(owner, name).asMethod
-
- def newNestedSymbol(owner: Symbol, name: Name, pos: Position, flags: Long, isClass: Boolean): Symbol =
- if (name.isTypeName)
- if (isClass) new ClassSymbol(owner, name.toTypeName, flags)
- else new TypeSymbol(owner, name.toTypeName, flags)
- else new TermSymbol(owner, name.toTermName, flags)
-
- def newFreeTerm(name: String, value: => Any, flags: Long = 0L, origin: String = null): FreeTermSymbol =
- new FreeTermSymbol(rootMirror.RootClass, newTermName(name), flags)
-
- def newFreeType(name: String, flags: Long = 0L, origin: String = null): FreeTypeSymbol =
- new FreeTypeSymbol(rootMirror.RootClass, newTypeName(name), flags)
-
- def setTypeSignature[S <: Symbol](sym: S, tpe: Type): S = sym
-
- def setAnnotations[S <: Symbol](sym: S, annots: List[AnnotationInfo]): S = sym
-
- def flagsFromBits(bits: Long): FlagSet = bits
-
- object emptyValDef extends ValDef(NoMods, nme.WILDCARD, TypeTree(NoType), EmptyTree) {
- override def isEmpty = true
- }
-
- def This(sym: Symbol): Tree = self.This(sym.name.toTypeName)
-
- def Select(qualifier: Tree, sym: Symbol): Select = self.Select(qualifier, sym.name)
-
- def Ident(sym: Symbol): Ident = self.Ident(sym.name)
-
- def TypeTree(tp: Type): TypeTree = self.TypeTree()
-
- def thisPrefix(sym: Symbol): Type = SingleType(NoPrefix, sym)
-
- def setType[T <: Tree](tree: T, tpe: Type): T = tree
-
- def setSymbol[T <: Tree](tree: T, sym: Symbol): T = tree
- }
-
- import build._
-
- class Mirror extends MirrorOf[self.type] {
- val universe: self.type = self
-
- lazy val RootClass = new ClassSymbol(NoSymbol, tpnme.ROOT, NoFlags) { override def isModuleClass = true }
- lazy val RootPackage = new ModuleSymbol(NoSymbol, nme.ROOT, NoFlags, RootClass)
- lazy val EmptyPackageClass = new ClassSymbol(RootClass, tpnme.EMPTY_PACKAGE_NAME, NoFlags) { override def isModuleClass = true }
- lazy val EmptyPackage = new ModuleSymbol(RootClass, nme.EMPTY_PACKAGE_NAME, NoFlags, EmptyPackageClass)
-
- def staticClass(fullName: String): ClassSymbol =
- mkStatic[ClassSymbol](fullName)
-
- def staticModule(fullName: String): ModuleSymbol =
- mkStatic[ModuleSymbol](fullName)
-
- def staticPackage(fullName: String): ModuleSymbol =
- staticModule(fullName) // this toy universe doesn't care about the distinction between packages and modules
-
- private def mkStatic[S <: Symbol : ClassTag](fullName: String): S =
- cached(fullName) {
- val point = fullName lastIndexOf '.'
- val owner =
- if (point > 0) staticModule(fullName take point).moduleClass
- else rootMirror.RootClass
- val name = fullName drop point + 1
- val symtag = implicitly[ClassTag[S]]
- if (symtag == ClassSymbolTag) new ClassSymbol(owner, newTypeName(name), NoFlags)
- else owner.newModuleAndClassSymbol(newTermName(name))._1
- }.asInstanceOf[S]
- }
-
- lazy val rootMirror = new Mirror
-
- import rootMirror._
-
- object definitions extends DefinitionsBase {
- lazy val ScalaPackage = staticModule("scala")
- lazy val ScalaPackageClass = ScalaPackage.moduleClass.asClass
-
- lazy val AnyClass = staticClass("scala.Any")
- lazy val AnyValClass = staticClass("scala.Any")
- lazy val ObjectClass = staticClass("java.lang.Object")
- lazy val AnyRefClass = ObjectClass
-
- lazy val NullClass = staticClass("scala.Null")
- lazy val NothingClass = staticClass("scala.Nothing")
-
- lazy val UnitClass = staticClass("scala.Unit")
- lazy val ByteClass = staticClass("scala.Byte")
- lazy val ShortClass = staticClass("scala.Short")
- lazy val CharClass = staticClass("scala.Char")
- lazy val IntClass = staticClass("scala.Int")
- lazy val LongClass = staticClass("scala.Long")
- lazy val FloatClass = staticClass("scala.Float")
- lazy val DoubleClass = staticClass("scala.Double")
- lazy val BooleanClass = staticClass("scala.Boolean")
-
- lazy val StringClass = staticClass("java.lang.String")
- lazy val ClassClass = staticClass("java.lang.Class")
- lazy val ArrayClass = staticClass("scala.Array")
- lazy val ListClass = staticClass("scala.List")
-
- lazy val PredefModule = staticModule("scala.Predef")
-
- lazy val ByteTpe = TypeRef(ScalaPrefix, ByteClass, Nil)
- lazy val ShortTpe = TypeRef(ScalaPrefix, ShortClass, Nil)
- lazy val CharTpe = TypeRef(ScalaPrefix, CharClass, Nil)
- lazy val IntTpe = TypeRef(ScalaPrefix, IntClass, Nil)
- lazy val LongTpe = TypeRef(ScalaPrefix, LongClass, Nil)
- lazy val FloatTpe = TypeRef(ScalaPrefix, FloatClass, Nil)
- lazy val DoubleTpe = TypeRef(ScalaPrefix, DoubleClass, Nil)
- lazy val BooleanTpe = TypeRef(ScalaPrefix, BooleanClass, Nil)
- lazy val UnitTpe = TypeRef(ScalaPrefix, UnitClass, Nil)
- lazy val AnyTpe = TypeRef(ScalaPrefix, AnyClass, Nil)
- lazy val AnyValTpe = TypeRef(ScalaPrefix, AnyValClass, Nil)
- lazy val NothingTpe = TypeRef(ScalaPrefix, NothingClass, Nil)
- lazy val NullTpe = TypeRef(ScalaPrefix, NullClass, Nil)
- lazy val ObjectTpe = TypeRef(JavaLangPrefix, ObjectClass, Nil)
- lazy val AnyRefTpe = ObjectTpe
- }
-
- import definitions._
-
- private def thisModuleType(fullName: String): Type = ThisType(staticModule(fullName).moduleClass)
- private lazy val ScalaPrefix = thisModuleType("scala")
- private lazy val JavaLangPrefix = thisModuleType("java.lang")
-
- private var nodeCount = 0 // not synchronized
-
- abstract class Tree extends TreeBase with Product {
- def isDef: Boolean = false
- def isEmpty: Boolean = false
-
- /** The canonical way to test if a Tree represents a term.
- */
- def isTerm: Boolean = this match {
- case _: TermTree => true
- case Bind(name, _) => name.isTermName
- case Select(_, name) => name.isTermName
- case Ident(name) => name.isTermName
- case Annotated(_, arg) => arg.isTerm
- case _ => false
- }
-
- /** The canonical way to test if a Tree represents a type.
- */
- def isType: Boolean = this match {
- case _: TypTree => true
- case Bind(name, _) => name.isTypeName
- case Select(_, name) => name.isTypeName
- case Ident(name) => name.isTypeName
- case Annotated(_, arg) => arg.isType
- case _ => false
- }
- }
-
- def treeToString(tree: Tree) = s"<tree ${tree.getClass}>"
-
- def treeType(tree: Tree) = NoType
-
- trait TermTree extends Tree
-
- trait TypTree extends Tree
-
- trait SymTree extends Tree
-
- trait NameTree extends Tree {
- def name: Name
- }
-
- trait RefTree extends SymTree with NameTree {
- def qualifier: Tree // empty for Idents
- def name: Name
- }
-
- abstract class DefTree extends SymTree with NameTree {
- def name: Name
- override def isDef = true
- }
-
- case object EmptyTree extends TermTree {
- val asList = List(this)
- override def isEmpty = true
- }
-
- abstract class MemberDef extends DefTree {
- def mods: Modifiers
- }
-
- case class PackageDef(pid: RefTree, stats: List[Tree])
- extends MemberDef {
- def name = pid.name
- def mods = NoMods
- }
- object PackageDef extends PackageDefExtractor
-
- abstract class ImplDef extends MemberDef {
- def impl: Template
- }
-
- case class ClassDef(mods: Modifiers, name: TypeName, tparams: List[TypeDef], impl: Template)
- extends ImplDef
- object ClassDef extends ClassDefExtractor
-
- case class ModuleDef(mods: Modifiers, name: TermName, impl: Template)
- extends ImplDef
- object ModuleDef extends ModuleDefExtractor
-
- abstract class ValOrDefDef extends MemberDef {
- val name: Name
- val tpt: Tree
- val rhs: Tree
- }
-
- case class ValDef(mods: Modifiers, name: TermName, tpt: Tree, rhs: Tree) extends ValOrDefDef
- object ValDef extends ValDefExtractor
-
- case class DefDef(mods: Modifiers, name: Name, tparams: List[TypeDef],
- vparamss: List[List[ValDef]], tpt: Tree, rhs: Tree) extends ValOrDefDef
- object DefDef extends DefDefExtractor
-
- case class TypeDef(mods: Modifiers, name: TypeName, tparams: List[TypeDef], rhs: Tree)
- extends MemberDef
- object TypeDef extends TypeDefExtractor
-
- case class LabelDef(name: TermName, params: List[Ident], rhs: Tree)
- extends DefTree with TermTree
- object LabelDef extends LabelDefExtractor
-
- case class ImportSelector(name: Name, namePos: Int, rename: Name, renamePos: Int)
- object ImportSelector extends ImportSelectorExtractor
-
- case class Import(expr: Tree, selectors: List[ImportSelector])
- extends SymTree
- object Import extends ImportExtractor
-
- case class Template(parents: List[Tree], self: ValDef, body: List[Tree])
- extends SymTree
- object Template extends TemplateExtractor
-
- case class Block(stats: List[Tree], expr: Tree)
- extends TermTree
- object Block extends BlockExtractor
-
- case class CaseDef(pat: Tree, guard: Tree, body: Tree)
- extends Tree
- object CaseDef extends CaseDefExtractor
-
- case class Alternative(trees: List[Tree])
- extends TermTree
- object Alternative extends AlternativeExtractor
-
- case class Star(elem: Tree)
- extends TermTree
- object Star extends StarExtractor
-
- case class Bind(name: Name, body: Tree)
- extends DefTree
- object Bind extends BindExtractor
-
- case class UnApply(fun: Tree, args: List[Tree])
- extends TermTree
- object UnApply extends UnApplyExtractor
-
- case class ArrayValue(elemtpt: Tree, elems: List[Tree])
- extends TermTree
- object ArrayValue extends ArrayValueExtractor
-
- case class Function(vparams: List[ValDef], body: Tree)
- extends TermTree with SymTree
- object Function extends FunctionExtractor
-
- case class Assign(lhs: Tree, rhs: Tree)
- extends TermTree
- object Assign extends AssignExtractor
-
- case class AssignOrNamedArg(lhs: Tree, rhs: Tree)
- extends TermTree
- object AssignOrNamedArg extends AssignOrNamedArgExtractor
-
- case class If(cond: Tree, thenp: Tree, elsep: Tree)
- extends TermTree
- object If extends IfExtractor
-
- case class Match(selector: Tree, cases: List[CaseDef])
- extends TermTree
- object Match extends MatchExtractor
-
- case class Return(expr: Tree)
- extends TermTree with SymTree
- object Return extends ReturnExtractor
-
- case class Try(block: Tree, catches: List[CaseDef], finalizer: Tree)
- extends TermTree
- object Try extends TryExtractor
-
- case class Throw(expr: Tree)
- extends TermTree
- object Throw extends ThrowExtractor
-
- case class New(tpt: Tree) extends TermTree
- object New extends NewExtractor
-
- case class Typed(expr: Tree, tpt: Tree)
- extends TermTree
- object Typed extends TypedExtractor
-
- abstract class GenericApply extends TermTree {
- val fun: Tree
- val args: List[Tree]
- }
-
- case class TypeApply(fun: Tree, args: List[Tree])
- extends GenericApply
- object TypeApply extends TypeApplyExtractor
-
- case class Apply(fun: Tree, args: List[Tree])
- extends GenericApply
- object Apply extends ApplyExtractor
-
- case class ApplyDynamic(qual: Tree, args: List[Tree])
- extends TermTree with SymTree
- object ApplyDynamic extends ApplyDynamicExtractor
-
- case class Super(qual: Tree, mix: TypeName) extends TermTree
- object Super extends SuperExtractor
-
- case class This(qual: TypeName)
- extends TermTree with SymTree
- object This extends ThisExtractor
-
- case class Select(qualifier: Tree, name: Name)
- extends RefTree
- object Select extends SelectExtractor
-
- case class Ident(name: Name) extends RefTree {
- def qualifier: Tree = EmptyTree
- }
- object Ident extends IdentExtractor
-
- case class ReferenceToBoxed(ident: Ident) extends TermTree
- object ReferenceToBoxed extends ReferenceToBoxedExtractor
-
- case class Literal(value: Constant)
- extends TermTree {
- assert(value ne null)
- }
- object Literal extends LiteralExtractor
-
- case class Annotated(annot: Tree, arg: Tree) extends Tree
- object Annotated extends AnnotatedExtractor
-
- case class SingletonTypeTree(ref: Tree)
- extends TypTree
- object SingletonTypeTree extends SingletonTypeTreeExtractor
-
- case class SelectFromTypeTree(qualifier: Tree, name: TypeName)
- extends TypTree with RefTree
- object SelectFromTypeTree extends SelectFromTypeTreeExtractor
-
- case class CompoundTypeTree(templ: Template)
- extends TypTree
- object CompoundTypeTree extends CompoundTypeTreeExtractor
-
- case class AppliedTypeTree(tpt: Tree, args: List[Tree])
- extends TypTree
- object AppliedTypeTree extends AppliedTypeTreeExtractor
-
- case class TypeBoundsTree(lo: Tree, hi: Tree)
- extends TypTree
- object TypeBoundsTree extends TypeBoundsTreeExtractor
-
- case class ExistentialTypeTree(tpt: Tree, whereClauses: List[Tree])
- extends TypTree
- object ExistentialTypeTree extends ExistentialTypeTreeExtractor
-
- case class TypeTree() extends TypTree {
- val original: Tree = null
- override def isEmpty = true
- }
- object TypeTree extends TypeTreeExtractor
-
- implicit val TreeTag = ClassTag[Tree](classOf[Tree])
- implicit val TermTreeTag = ClassTag[TermTree](classOf[TermTree])
- implicit val TypTreeTag = ClassTag[TypTree](classOf[TypTree])
- implicit val SymTreeTag = ClassTag[SymTree](classOf[SymTree])
- implicit val NameTreeTag = ClassTag[NameTree](classOf[NameTree])
- implicit val RefTreeTag = ClassTag[RefTree](classOf[RefTree])
- implicit val DefTreeTag = ClassTag[DefTree](classOf[DefTree])
- implicit val MemberDefTag = ClassTag[MemberDef](classOf[MemberDef])
- implicit val PackageDefTag = ClassTag[PackageDef](classOf[PackageDef])
- implicit val ImplDefTag = ClassTag[ImplDef](classOf[ImplDef])
- implicit val ClassDefTag = ClassTag[ClassDef](classOf[ClassDef])
- implicit val ModuleDefTag = ClassTag[ModuleDef](classOf[ModuleDef])
- implicit val ValOrDefDefTag = ClassTag[ValOrDefDef](classOf[ValOrDefDef])
- implicit val ValDefTag = ClassTag[ValDef](classOf[ValDef])
- implicit val DefDefTag = ClassTag[DefDef](classOf[DefDef])
- implicit val TypeDefTag = ClassTag[TypeDef](classOf[TypeDef])
- implicit val LabelDefTag = ClassTag[LabelDef](classOf[LabelDef])
- implicit val ImportSelectorTag = ClassTag[ImportSelector](classOf[ImportSelector])
- implicit val ImportTag = ClassTag[Import](classOf[Import])
- implicit val TemplateTag = ClassTag[Template](classOf[Template])
- implicit val BlockTag = ClassTag[Block](classOf[Block])
- implicit val CaseDefTag = ClassTag[CaseDef](classOf[CaseDef])
- implicit val AlternativeTag = ClassTag[Alternative](classOf[Alternative])
- implicit val StarTag = ClassTag[Star](classOf[Star])
- implicit val BindTag = ClassTag[Bind](classOf[Bind])
- implicit val UnApplyTag = ClassTag[UnApply](classOf[UnApply])
- implicit val ArrayValueTag = ClassTag[ArrayValue](classOf[ArrayValue])
- implicit val FunctionTag = ClassTag[Function](classOf[Function])
- implicit val AssignTag = ClassTag[Assign](classOf[Assign])
- implicit val AssignOrNamedArgTag = ClassTag[AssignOrNamedArg](classOf[AssignOrNamedArg])
- implicit val IfTag = ClassTag[If](classOf[If])
- implicit val MatchTag = ClassTag[Match](classOf[Match])
- implicit val ReturnTag = ClassTag[Return](classOf[Return])
- implicit val TryTag = ClassTag[Try](classOf[Try])
- implicit val ThrowTag = ClassTag[Throw](classOf[Throw])
- implicit val NewTag = ClassTag[New](classOf[New])
- implicit val TypedTag = ClassTag[Typed](classOf[Typed])
- implicit val GenericApplyTag = ClassTag[GenericApply](classOf[GenericApply])
- implicit val TypeApplyTag = ClassTag[TypeApply](classOf[TypeApply])
- implicit val ApplyTag = ClassTag[Apply](classOf[Apply])
- implicit val ApplyDynamicTag = ClassTag[ApplyDynamic](classOf[ApplyDynamic])
- implicit val SuperTag = ClassTag[Super](classOf[Super])
- implicit val ThisTag = ClassTag[This](classOf[This])
- implicit val SelectTag = ClassTag[Select](classOf[Select])
- implicit val IdentTag = ClassTag[Ident](classOf[Ident])
- implicit val ReferenceToBoxedTag = ClassTag[ReferenceToBoxed](classOf[ReferenceToBoxed])
- implicit val LiteralTag = ClassTag[Literal](classOf[Literal])
- implicit val AnnotatedTag = ClassTag[Annotated](classOf[Annotated])
- implicit val SingletonTypeTreeTag = ClassTag[SingletonTypeTree](classOf[SingletonTypeTree])
- implicit val SelectFromTypeTreeTag = ClassTag[SelectFromTypeTree](classOf[SelectFromTypeTree])
- implicit val CompoundTypeTreeTag = ClassTag[CompoundTypeTree](classOf[CompoundTypeTree])
- implicit val AppliedTypeTreeTag = ClassTag[AppliedTypeTree](classOf[AppliedTypeTree])
- implicit val TypeBoundsTreeTag = ClassTag[TypeBoundsTree](classOf[TypeBoundsTree])
- implicit val ExistentialTypeTreeTag = ClassTag[ExistentialTypeTree](classOf[ExistentialTypeTree])
- implicit val TypeTreeTag = ClassTag[TypeTree](classOf[TypeTree])
-
- def ClassDef(sym: Symbol, impl: Template): ClassDef = ???
- def ModuleDef(sym: Symbol, impl: Template): ModuleDef = ???
- def ValDef(sym: Symbol, rhs: Tree): ValDef = ???
- def ValDef(sym: Symbol): ValDef = ???
- def DefDef(sym: Symbol, mods: Modifiers, vparamss: List[List[ValDef]], rhs: Tree): DefDef = ???
- def DefDef(sym: Symbol, vparamss: List[List[ValDef]], rhs: Tree): DefDef = ???
- def DefDef(sym: Symbol, mods: Modifiers, rhs: Tree): DefDef = ???
- def DefDef(sym: Symbol, rhs: Tree): DefDef = ???
- def DefDef(sym: Symbol, rhs: List[List[Symbol]] => Tree): DefDef = ???
- def TypeDef(sym: Symbol, rhs: Tree): TypeDef = ???
- def TypeDef(sym: Symbol): TypeDef = ???
- def LabelDef(sym: Symbol, params: List[Symbol], rhs: Tree): LabelDef = ???
- def CaseDef(pat: Tree, body: Tree): CaseDef = ???
- def Bind(sym: Symbol, body: Tree): Bind = ???
- def Try(body: Tree, cases: (Tree, Tree)*): Try = ???
- def Throw(tpe: Type, args: Tree*): Throw = ???
- def Apply(sym: Symbol, args: Tree*): Tree = ???
- def New(tpt: Tree, argss: List[List[Tree]]): Tree = ???
- def New(tpe: Type, args: Tree*): Tree = ???
- def New(sym: Symbol, args: Tree*): Tree = ???
- def ApplyConstructor(tpt: Tree, args: List[Tree]): Tree = ???
- def Super(sym: Symbol, mix: TypeName): Tree = ???
- def This(sym: Symbol): Tree = ???
- def Select(qualifier: Tree, name: String): Select = ???
- def Select(qualifier: Tree, sym: Symbol): Select = ???
- def Ident(name: String): Ident = ???
- def Ident(sym: Symbol): Ident = ???
- def Block(stats: Tree*): Block = ???
- def TypeTree(tp: Type): TypeTree = ???
-}
diff --git a/src/library/scala/reflect/base/BuildUtils.scala b/src/library/scala/reflect/base/BuildUtils.scala
deleted file mode 100644
index c4231dd515..0000000000
--- a/src/library/scala/reflect/base/BuildUtils.scala
+++ /dev/null
@@ -1,72 +0,0 @@
-package scala.reflect
-package base
-
-trait BuildUtils { self: Universe =>
-
- val build: BuildBase
-
- // this API abstracts away the functionality necessary for reification
- // it's too gimmicky and unstructured to be exposed directly in the universe
- // but we need it in a publicly available place for reification to work
-
- abstract class BuildBase {
- /** Selects type symbol with given simple name `name` from the defined members of `owner`.
- */
- def selectType(owner: Symbol, name: String): TypeSymbol
-
- /** Selects term symbol with given name and type from the defined members of prefix type
- */
- def selectTerm(owner: Symbol, name: String): TermSymbol
-
- /** Selects overloaded method symbol with given name and index
- */
- def selectOverloadedMethod(owner: Symbol, name: String, index: Int): MethodSymbol
-
- /** A fresh symbol with given name `name`, position `pos` and flags `flags` that has
- * the current symbol as its owner.
- */
- def newNestedSymbol(owner: Symbol, name: Name, pos: Position, flags: FlagSet, isClass: Boolean): Symbol
-
- /** Create a fresh free term symbol.
- * @param name the name of the free variable
- * @param value the value of the free variable at runtime
- * @param flags (optional) flags of the free variable
- * @param origin debug information that tells where this symbol comes from
- */
- def newFreeTerm(name: String, value: => Any, flags: FlagSet = NoFlags, origin: String = null): FreeTermSymbol
-
- /** Create a fresh free type symbol.
- * @param name the name of the free variable
- * @param flags (optional) flags of the free variable
- * @param origin debug information that tells where this symbol comes from
- */
- def newFreeType(name: String, flags: FlagSet = NoFlags, origin: String = null): FreeTypeSymbol
-
- /** Set symbol's type signature to given type.
- * @return the symbol itself
- */
- def setTypeSignature[S <: Symbol](sym: S, tpe: Type): S
-
- /** Set symbol's annotations to given annotations `annots`.
- */
- def setAnnotations[S <: Symbol](sym: S, annots: List[AnnotationInfo]): S
-
- def flagsFromBits(bits: Long): FlagSet
-
- def emptyValDef: ValDef
-
- def This(sym: Symbol): Tree
-
- def Select(qualifier: Tree, sym: Symbol): Select
-
- def Ident(sym: Symbol): Ident
-
- def TypeTree(tp: Type): TypeTree
-
- def thisPrefix(sym: Symbol): Type
-
- def setType[T <: Tree](tree: T, tpe: Type): T
-
- def setSymbol[T <: Tree](tree: T, sym: Symbol): T
- }
-}
diff --git a/src/library/scala/reflect/base/Constants.scala b/src/library/scala/reflect/base/Constants.scala
deleted file mode 100644
index ba12b02e92..0000000000
--- a/src/library/scala/reflect/base/Constants.scala
+++ /dev/null
@@ -1,20 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2012 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala.reflect
-package base
-
-trait Constants {
- self: Universe =>
-
- type Constant >: Null <: AnyRef
- implicit val ConstantTag: ClassTag[Constant]
- val Constant: ConstantExtractor
-
- abstract class ConstantExtractor {
- def apply(value: Any): Constant
- def unapply(arg: Constant): Option[Any]
- }
-}
diff --git a/src/library/scala/reflect/base/Exprs.scala b/src/library/scala/reflect/base/Exprs.scala
deleted file mode 100644
index 45598c03e2..0000000000
--- a/src/library/scala/reflect/base/Exprs.scala
+++ /dev/null
@@ -1,76 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2012 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala.reflect
-package base
-
-trait Exprs { self: Universe =>
-
- /** An expression tree tagged with its type */
- trait Expr[+T] extends Equals with Serializable {
- val mirror: Mirror
- def in[U <: Universe with Singleton](otherMirror: MirrorOf[U]): U # Expr[T]
-
- def tree: Tree
- def staticType: Type
- def actualType: Type
-
- def splice: T
- val value: T
-
- /** case class accessories */
- override def canEqual(x: Any) = x.isInstanceOf[Expr[_]]
- override def equals(x: Any) = x.isInstanceOf[Expr[_]] && this.mirror == x.asInstanceOf[Expr[_]].mirror && this.tree == x.asInstanceOf[Expr[_]].tree
- override def hashCode = mirror.hashCode * 31 + tree.hashCode
- override def toString = "Expr["+staticType+"]("+tree+")"
- }
-
- object Expr {
- def apply[T: WeakTypeTag](mirror: MirrorOf[self.type], treec: TreeCreator): Expr[T] = new ExprImpl[T](mirror.asInstanceOf[Mirror], treec)
- def unapply[T](expr: Expr[T]): Option[Tree] = Some(expr.tree)
- }
-
- private class ExprImpl[+T: WeakTypeTag](val mirror: Mirror, val treec: TreeCreator) extends Expr[T] {
- def in[U <: Universe with Singleton](otherMirror: MirrorOf[U]): U # Expr[T] = {
- val otherMirror1 = otherMirror.asInstanceOf[MirrorOf[otherMirror.universe.type]]
- val tag1 = (implicitly[WeakTypeTag[T]] in otherMirror).asInstanceOf[otherMirror.universe.WeakTypeTag[T]]
- otherMirror.universe.Expr[T](otherMirror1, treec)(tag1)
- }
-
- lazy val tree: Tree = treec(mirror)
- lazy val staticType: Type = implicitly[WeakTypeTag[T]].tpe
- def actualType: Type = treeType(tree)
-
- def splice: T = throw new UnsupportedOperationException("""
- |the function you're calling has not been spliced by the compiler.
- |this means there is a cross-stage evaluation involved, and it needs to be invoked explicitly.
- |if you're sure this is not an oversight, add scala-compiler.jar to the classpath,
- |import `scala.tools.reflect.Eval` and call `<your expr>.eval` instead.""".trim.stripMargin)
- lazy val value: T = throw new UnsupportedOperationException("""
- |the value you're calling is only meant to be used in cross-stage path-dependent types.
- |if you want to splice the underlying expression, use `<your expr>.splice`.
- |if you want to get a value of the underlying expression, add scala-compiler.jar to the classpath,
- |import `scala.tools.reflect.Eval` and call `<your expr>.eval` instead.""".trim.stripMargin)
-
- private def writeReplace(): AnyRef = new SerializedExpr(treec, implicitly[WeakTypeTag[T]].in(scala.reflect.basis.rootMirror))
- }
-}
-
-private[scala] class SerializedExpr(var treec: TreeCreator, var tag: scala.reflect.basis.WeakTypeTag[_]) extends Serializable {
- private def writeObject(out: java.io.ObjectOutputStream): Unit = {
- out.writeObject(treec)
- out.writeObject(tag)
- }
-
- private def readObject(in: java.io.ObjectInputStream): Unit = {
- treec = in.readObject().asInstanceOf[TreeCreator]
- tag = in.readObject().asInstanceOf[scala.reflect.basis.WeakTypeTag[_]]
- }
-
- private def readResolve(): AnyRef = {
- import scala.reflect.basis._
- Expr(rootMirror, treec)(tag)
- }
-} \ No newline at end of file
diff --git a/src/library/scala/reflect/base/FlagSets.scala b/src/library/scala/reflect/base/FlagSets.scala
deleted file mode 100644
index 96cdbe894c..0000000000
--- a/src/library/scala/reflect/base/FlagSets.scala
+++ /dev/null
@@ -1,16 +0,0 @@
-package scala.reflect
-package base
-
-trait FlagSets { self: Universe =>
-
- /** An abstract type representing sets of flags that apply to definition trees and symbols */
- type FlagSet
-
- /** A tag that preserves the identity of the `FlagSet` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val FlagSetTag: ClassTag[FlagSet]
-
- /** The empty set of flags */
- val NoFlags: FlagSet
-}
diff --git a/src/library/scala/reflect/base/MirrorOf.scala b/src/library/scala/reflect/base/MirrorOf.scala
deleted file mode 100644
index 1e9619d062..0000000000
--- a/src/library/scala/reflect/base/MirrorOf.scala
+++ /dev/null
@@ -1,88 +0,0 @@
-package scala.reflect
-package base
-
-abstract class MirrorOf[U <: base.Universe with Singleton] {
- /** .. */
- val universe: U
-
- /** .. */
- def RootClass: U#ClassSymbol
- def RootPackage: U#ModuleSymbol
- def EmptyPackageClass: U#ClassSymbol
- def EmptyPackage: U#ModuleSymbol
-
- /** The symbol corresponding to the globally accessible class with the
- * given fully qualified name `fullName`.
- *
- * If the name points to a type alias, it's recursively dealiased and its target is returned.
- * If you need a symbol that corresponds to the type alias itself, load it directly from the package class:
- *
- * scala> cm.staticClass("scala.List")
- * res0: reflect.runtime.universe.ClassSymbol = class List
- *
- * scala> res0.fullName
- * res1: String = scala.collection.immutable.List
- *
- * scala> cm.staticPackage("scala")
- * res2: reflect.runtime.universe.ModuleSymbol = package scala
- *
- * scala> res2.moduleClass.typeSignature member newTypeName("List")
- * res3: reflect.runtime.universe.Symbol = type List
- *
- * scala> res3.fullName
- * res4: String = scala.List
- *
- * To be consistent with Scala name resolution rules, in case of ambiguity between
- * a package and an object, the object is never been considered.
- *
- * For example for the following code:
- *
- * package foo {
- * class B
- * }
- *
- * object foo {
- * class A
- * class B
- * }
- *
- * staticClass("foo.B") will resolve to the symbol corresponding to the class B declared in the package foo, and
- * staticClass("foo.A") will throw a MissingRequirementException (which is exactly what scalac would do if this
- * fully qualified class name is written inside any package in a Scala program).
- *
- * In the example above, to load a symbol that corresponds to the class B declared in the object foo,
- * use staticModule("foo") to load the module symbol and then navigate typeSignature.members of its moduleClass.
- */
- def staticClass(fullName: String): U#ClassSymbol
-
- /** The symbol corresponding to the globally accessible object with the
- * given fully qualified name `fullName`.
- *
- * To be consistent with Scala name resolution rules, in case of ambiguity between
- * a package and an object, the object is never been considered.
- *
- * For example for the following code:
- *
- * package foo {
- * object B
- * }
- *
- * object foo {
- * object A
- * object B
- * }
- *
- * staticModule("foo.B") will resolve to the symbol corresponding to the object B declared in the package foo, and
- * staticModule("foo.A") will throw a MissingRequirementException (which is exactly what scalac would do if this
- * fully qualified class name is written inside any package in a Scala program).
- *
- * In the example above, to load a symbol that corresponds to the object B declared in the object foo,
- * use staticModule("foo") to load the module symbol and then navigate typeSignature.members of its moduleClass.
- */
- def staticModule(fullName: String): U#ModuleSymbol
-
- /** The symbol corresponding to a package with the
- * given fully qualified name `fullName`.
- */
- def staticPackage(fullName: String): U#ModuleSymbol
-}
diff --git a/src/library/scala/reflect/base/Mirrors.scala b/src/library/scala/reflect/base/Mirrors.scala
deleted file mode 100644
index 50866ef000..0000000000
--- a/src/library/scala/reflect/base/Mirrors.scala
+++ /dev/null
@@ -1,12 +0,0 @@
-package scala.reflect
-package base
-
-trait Mirrors {
- self: Universe =>
-
- /** .. */
- type Mirror >: Null <: MirrorOf[self.type]
-
- /** .. */
- val rootMirror: Mirror
-}
diff --git a/src/library/scala/reflect/base/Names.scala b/src/library/scala/reflect/base/Names.scala
deleted file mode 100644
index ad99f54fb3..0000000000
--- a/src/library/scala/reflect/base/Names.scala
+++ /dev/null
@@ -1,58 +0,0 @@
-package scala.reflect
-package base
-
-import scala.language.implicitConversions
-
-/** A trait that manages names.
- * A name is a string in one of two name universes: terms and types.
- * The same string can be a name in both universes.
- * Two names are equal if they represent the same string and they are
- * members of the same universe.
- *
- * Names are interned. That is, for two names `name11 and `name2`,
- * `name1 == name2` implies `name1 eq name2`.
- */
-trait Names {
- /** Intentionally no implicit from String => Name. */
- implicit def stringToTermName(s: String): TermName = newTermName(s)
- implicit def stringToTypeName(s: String): TypeName = newTypeName(s)
-
- /** The abstract type of names */
- type Name >: Null <: NameBase
- implicit val NameTag: ClassTag[Name]
-
- /** The abstract type of names representing terms */
- type TypeName >: Null <: Name
- implicit val TypeNameTag: ClassTag[TypeName]
-
- /** The abstract type of names representing types */
- type TermName >: Null <: Name
- implicit val TermNameTag: ClassTag[TermName]
-
- /** The base API that all names support */
- abstract class NameBase {
- /** Is this name a term name? */
- def isTermName: Boolean
-
- /** Is this name a type name? */
- def isTypeName: Boolean
-
- /** Returns a term name that represents the same string as this name */
- def toTermName: TermName
-
- /** Returns a type name that represents the same string as this name */
- def toTypeName: TypeName
- }
-
- /** Create a new term name.
- */
- def newTermName(s: String): TermName
-
- /** Creates a new type name.
- */
- def newTypeName(s: String): TypeName
-
- def EmptyTermName: TermName = newTermName("")
-
- def EmptyTypeName: TypeName = EmptyTermName.toTypeName
-}
diff --git a/src/library/scala/reflect/base/Positions.scala b/src/library/scala/reflect/base/Positions.scala
deleted file mode 100644
index 76a7382e9e..0000000000
--- a/src/library/scala/reflect/base/Positions.scala
+++ /dev/null
@@ -1,17 +0,0 @@
-package scala.reflect
-package base
-
-trait Positions {
- self: Universe =>
-
- /** .. */
- type Position >: Null <: Attachments { type Pos = Position }
-
- /** A tag that preserves the identity of the `FlagSet` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val PositionTag: ClassTag[Position]
-
- /** .. */
- val NoPosition: Position
-}
diff --git a/src/library/scala/reflect/base/Scopes.scala b/src/library/scala/reflect/base/Scopes.scala
deleted file mode 100644
index a388fdc392..0000000000
--- a/src/library/scala/reflect/base/Scopes.scala
+++ /dev/null
@@ -1,41 +0,0 @@
-package scala.reflect
-package base
-
-trait Scopes { self: Universe =>
-
- type Scope >: Null <: ScopeBase
-
- /** The base API that all scopes support */
- trait ScopeBase extends Iterable[Symbol]
-
- /** A tag that preserves the identity of the `Scope` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val ScopeTag: ClassTag[Scope]
-
- type MemberScope >: Null <: Scope with MemberScopeBase
-
- /** The base API that all member scopes support */
- trait MemberScopeBase extends ScopeBase {
- /** Sorts the symbols included in this scope so that:
- * 1) Symbols appear the linearization order of their owners.
- * 2) Symbols with the same owner appear in reverse order of their declarations.
- * 3) Synthetic members (e.g. getters/setters for vals/vars) might appear in arbitrary order.
- */
- def sorted: List[Symbol]
- }
-
- /** A tag that preserves the identity of the `MemberScope` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val MemberScopeTag: ClassTag[MemberScope]
-
- /** Create a new scope */
- def newScope: Scope
-
- /** Create a new scope nested in another one with which it shares its elements */
- def newNestedScope(outer: Scope): Scope
-
- /** Create a new scope with given initial elements */
- def newScopeWith(elems: Symbol*): Scope
-} \ No newline at end of file
diff --git a/src/library/scala/reflect/base/StandardDefinitions.scala b/src/library/scala/reflect/base/StandardDefinitions.scala
deleted file mode 100644
index 8f1c96ea3f..0000000000
--- a/src/library/scala/reflect/base/StandardDefinitions.scala
+++ /dev/null
@@ -1,72 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2012 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala.reflect
-package base
-
-trait StandardDefinitions {
- self: Universe =>
-
- val definitions: DefinitionsBase
-
- trait DefinitionsBase extends StandardTypes {
- // packages
- def ScalaPackageClass: ClassSymbol
- def ScalaPackage: ModuleSymbol
-
- // top types
- def AnyClass : ClassSymbol
- def AnyValClass: ClassSymbol
- def ObjectClass: ClassSymbol
- def AnyRefClass: TypeSymbol
-
- // bottom types
- def NullClass : ClassSymbol
- def NothingClass: ClassSymbol
-
- // the scala value classes
- def UnitClass : ClassSymbol
- def ByteClass : ClassSymbol
- def ShortClass : ClassSymbol
- def CharClass : ClassSymbol
- def IntClass : ClassSymbol
- def LongClass : ClassSymbol
- def FloatClass : ClassSymbol
- def DoubleClass : ClassSymbol
- def BooleanClass: ClassSymbol
-
- // some special classes
- def StringClass : ClassSymbol
- def ClassClass : ClassSymbol
- def ArrayClass : ClassSymbol
- def ListClass : ClassSymbol
-
- // the Predef object
- def PredefModule: ModuleSymbol
- }
-
- trait StandardTypes {
- // the scala value classes
- val UnitTpe: Type
- val ByteTpe: Type
- val ShortTpe: Type
- val CharTpe: Type
- val IntTpe: Type
- val LongTpe: Type
- val FloatTpe: Type
- val DoubleTpe: Type
- val BooleanTpe: Type
-
- // top types
- val AnyTpe: Type
- val AnyValTpe: Type
- val AnyRefTpe: Type
- val ObjectTpe: Type
-
- // bottom types
- val NothingTpe: Type
- val NullTpe: Type
- }
-}
diff --git a/src/library/scala/reflect/base/StandardNames.scala b/src/library/scala/reflect/base/StandardNames.scala
deleted file mode 100644
index 3e569cd523..0000000000
--- a/src/library/scala/reflect/base/StandardNames.scala
+++ /dev/null
@@ -1,34 +0,0 @@
-/* NSC -- new Scala compiler
-* Copyright 2005-2012 LAMP/EPFL
-* @author Martin Odersky
-*/
-
-package scala.reflect
-package base
-
-// Q: I have a pretty name. Where do I put it - into base.StandardNames or into api.StandardNames?
-// A: Is it necessary to construct trees (like EMPTY or WILDCARD_STAR)? If yes, then it goes to base.StandardNames.
-// Is it necessary to perform reflection (like ERROR or LOCAL_SUFFIX_STRING)? If yes, then it goes to api.StandardNames.
-// Otherwise it goes nowhere - reflection API should stay minimalistic.
-
-trait StandardNames {
- self: Universe =>
-
- val nme: TermNamesBase
- val tpnme: TypeNamesBase
-
- trait NamesBase {
- type NameType >: Null <: Name
- val WILDCARD: NameType
- }
-
- trait TermNamesBase extends NamesBase {
- val CONSTRUCTOR: TermName
- val ROOTPKG: TermName
- }
-
- trait TypeNamesBase extends NamesBase {
- val EMPTY: NameType
- val WILDCARD_STAR: NameType
- }
-}
diff --git a/src/library/scala/reflect/base/Symbols.scala b/src/library/scala/reflect/base/Symbols.scala
deleted file mode 100644
index 3830264425..0000000000
--- a/src/library/scala/reflect/base/Symbols.scala
+++ /dev/null
@@ -1,289 +0,0 @@
-package scala.reflect
-package base
-
-trait Symbols { self: Universe =>
-
- /** The abstract type of symbols representing declarations */
- type Symbol >: Null <: SymbolBase
-
- /** A tag that preserves the identity of the `Symbol` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val SymbolTag: ClassTag[Symbol]
-
- /** The abstract type of type symbols representing type, class, and trait declarations,
- * as well as type parameters
- */
- type TypeSymbol >: Null <: Symbol with TypeSymbolBase
-
- /** A tag that preserves the identity of the `TypeSymbol` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val TypeSymbolTag: ClassTag[TypeSymbol]
-
- /** The abstract type of term symbols representing val, var, def, and object declarations as
- * well as packages and value parameters.
- */
- type TermSymbol >: Null <: Symbol with TermSymbolBase
-
- /** A tag that preserves the identity of the `TermSymbol` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val TermSymbolTag: ClassTag[TermSymbol]
-
- /** The abstract type of method symbols representing def declarations */
- type MethodSymbol >: Null <: TermSymbol with MethodSymbolBase
-
- /** A tag that preserves the identity of the `MethodSymbol` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val MethodSymbolTag: ClassTag[MethodSymbol]
-
- /** The abstract type of module symbols representing object declarations */
- type ModuleSymbol >: Null <: TermSymbol with ModuleSymbolBase
-
- /** A tag that preserves the identity of the `ModuleSymbol` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val ModuleSymbolTag: ClassTag[ModuleSymbol]
-
- /** The abstract type of class symbols representing class and trait definitions */
- type ClassSymbol >: Null <: TypeSymbol with ClassSymbolBase
-
- /** A tag that preserves the identity of the `ClassSymbol` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val ClassSymbolTag: ClassTag[ClassSymbol]
-
- /** The abstract type of free terms introduced by reification */
- type FreeTermSymbol >: Null <: TermSymbol with FreeTermSymbolBase
-
- /** A tag that preserves the identity of the `FreeTermSymbol` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val FreeTermSymbolTag: ClassTag[FreeTermSymbol]
-
- /** The abstract type of free types introduced by reification */
- type FreeTypeSymbol >: Null <: TypeSymbol with FreeTypeSymbolBase
-
- /** A tag that preserves the identity of the `FreeTypeSymbol` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val FreeTypeSymbolTag: ClassTag[FreeTypeSymbol]
-
- /** A special "missing" symbol */
- val NoSymbol: Symbol
-
- /** The base API that all symbols support */
- trait SymbolBase { this: Symbol =>
-
- /** The owner of this symbol. This is the symbol
- * that directly contains the current symbol's definition.
- * The `NoSymbol` symbol does not have an owner, and calling this method
- * on one causes an internal error.
- * The owner of the Scala root class [[scala.reflect.api.mirror.RootClass]]
- * and the Scala root object [[scala.reflect.api.mirror.RootPackage]] is `NoSymbol`.
- * Every other symbol has a chain of owners that ends in
- * [[scala.reflect.api.mirror.RootClass]].
- */
- def owner: Symbol
-
- /** The type of the symbol name.
- * Can be either `TermName` or `TypeName` depending on whether this is a `TermSymbol` or a `TypeSymbol`.
- *
- * Type name namespaces do not intersect with term name namespaces.
- * This fact is reflected in different types for names of `TermSymbol` and `TypeSymbol`.
- */
- type NameType >: Null <: Name
-
- /** The name of the symbol as a member of the `Name` type.
- */
- def name: Name
-
- /** The encoded full path name of this symbol, where outer names and inner names
- * are separated by periods.
- */
- def fullName: String
-
- /** Does this symbol represent the definition of a type?
- * Note that every symbol is either a term or a type.
- * So for every symbol `sym` (except for `NoSymbol`),
- * either `sym.isTerm` is true or `sym.isType` is true.
- */
- def isType: Boolean = false
-
- /** This symbol cast to a TypeSymbol.
- * @throws ScalaReflectionException if `isType` is false.
- */
- def asType: TypeSymbol = throw new ScalaReflectionException(s"$this is not a type")
-
- /** Does this symbol represent the definition of a term?
- * Note that every symbol is either a term or a type.
- * So for every symbol `sym` (except for `NoSymbol`),
- * either `sym.isTerm` is true or `sym.isTerm` is true.
- */
- def isTerm: Boolean = false
-
- /** This symbol cast to a TermSymbol.
- * @throws ScalaReflectionException if `isTerm` is false.
- */
- def asTerm: TermSymbol = throw new ScalaReflectionException(s"$this is not a term")
-
- /** Does this symbol represent the definition of a method?
- * If yes, `isTerm` is also guaranteed to be true.
- */
- def isMethod: Boolean = false
-
- /** This symbol cast to a MethodSymbol.
- * @throws ScalaReflectionException if `isMethod` is false.
- */
- def asMethod: MethodSymbol = {
- def overloadedMsg =
- "encapsulates multiple overloaded alternatives and cannot be treated as a method. "+
- "Consider invoking `<offending symbol>.asTerm.alternatives` and manually picking the required method"
- def vanillaMsg = "is not a method"
- val msg = if (isOverloadedMethod) overloadedMsg else vanillaMsg
- throw new ScalaReflectionException(s"$this $msg")
- }
-
- /** Used to provide a better error message for `asMethod` */
- protected def isOverloadedMethod = false
-
- /** Does this symbol represent the definition of a module (i.e. it
- * results from an object definition?).
- * If yes, `isTerm` is also guaranteed to be true.
- */
- def isModule: Boolean = false
-
- /** This symbol cast to a ModuleSymbol defined by an object definition.
- * @throws ScalaReflectionException if `isModule` is false.
- */
- def asModule: ModuleSymbol = throw new ScalaReflectionException(s"$this is not a module")
-
- /** Does this symbol represent the definition of a class or trait?
- * If yes, `isType` is also guaranteed to be true.
- */
- def isClass: Boolean = false
-
- /** Does this symbol represent the definition of a class implicitly associated
- * with an object definition (module class in scala compiler parlance).
- * If yes, `isType` is also guaranteed to be true.
- */
- def isModuleClass: Boolean = false
-
- /** This symbol cast to a ClassSymbol representing a class or trait.
- * @throws ScalaReflectionException if `isClass` is false.
- */
- def asClass: ClassSymbol = throw new ScalaReflectionException(s"$this is not a class")
-
- /** Does this symbol represent a free term captured by reification?
- * If yes, `isTerm` is also guaranteed to be true.
- */
- def isFreeTerm: Boolean = false
-
- /** This symbol cast to a free term symbol.
- * @throws ScalaReflectionException if `isFreeTerm` is false.
- */
- def asFreeTerm: FreeTermSymbol = throw new ScalaReflectionException(s"$this is not a free term")
-
- /** Does this symbol represent a free type captured by reification?
- * If yes, `isType` is also guaranteed to be true.
- */
- def isFreeType: Boolean = false
-
- /** This symbol cast to a free type symbol.
- * @throws ScalaReflectionException if `isFreeType` is false.
- */
- def asFreeType: FreeTypeSymbol = throw new ScalaReflectionException(s"$this is not a free type")
-
- def newTermSymbol(name: TermName, pos: Position = NoPosition, flags: FlagSet = NoFlags): TermSymbol
- def newModuleAndClassSymbol(name: Name, pos: Position = NoPosition, flags: FlagSet = NoFlags): (ModuleSymbol, ClassSymbol)
- def newMethodSymbol(name: TermName, pos: Position = NoPosition, flags: FlagSet = NoFlags): MethodSymbol
- def newTypeSymbol(name: TypeName, pos: Position = NoPosition, flags: FlagSet = NoFlags): TypeSymbol
- def newClassSymbol(name: TypeName, pos: Position = NoPosition, flags: FlagSet = NoFlags): ClassSymbol
- }
-
- /** The base API that all type symbols support */
- trait TypeSymbolBase extends SymbolBase { this: TypeSymbol =>
- /** Type symbols have their names of type `TypeName`.
- */
- final type NameType = TypeName
-
- /** The type constructor corresponding to this type symbol.
- * This is different from `toType` in that type parameters
- * are part of results of `toType`, but not of `toTypeConstructor`.
- *
- * Example: Given a class declaration `class C[T] { ... } `, that generates a symbol
- * `C`. Then `C.toType` is the type `C[T]`, but `C.toTypeConstructor` is `C`.
- */
- def toTypeConstructor: Type
-
- /** A type reference that refers to this type symbol seen
- * as a member of given type `site`.
- */
- def toTypeIn(site: Type): Type
-
- /** A type reference that refers to this type symbol
- * Note if symbol is a member of a class, one almost always is interested
- * in `asTypeIn` with a site type instead.
- *
- * Example: Given a class declaration `class C[T] { ... } `, that generates a symbol
- * `C`. Then `C.toType` is the type `C[T]`.
- *
- * By contrast, `C.typeSignature` would be a type signature of form
- * `PolyType(ClassInfoType(...))` that describes type parameters, value
- * parameters, parent types, and members of `C`.
- */
- def toType: Type
-
- final override def isType = true
- final override def asType = this
- }
-
- /** The base API that all term symbols support */
- trait TermSymbolBase extends SymbolBase { this: TermSymbol =>
- /** Term symbols have their names of type `TermName`.
- */
- final type NameType = TermName
-
- final override def isTerm = true
- final override def asTerm = this
- }
-
- /** The base API that all method symbols support */
- trait MethodSymbolBase extends TermSymbolBase { this: MethodSymbol =>
- final override def isMethod = true
- final override def asMethod = this
- }
-
- /** The base API that all module symbols support */
- trait ModuleSymbolBase extends TermSymbolBase { this: ModuleSymbol =>
- /** The class implicitly associated with the object definition.
- * One can go back from a module class to the associated module symbol
- * by inspecting its `selfType.termSymbol`.
- */
- def moduleClass: Symbol // needed for tree traversals
- // when this becomes `moduleClass: ClassSymbol`, it will be the happiest day in my life
-
- final override def isModule = true
- final override def asModule = this
- }
-
- /** The base API that all class symbols support */
- trait ClassSymbolBase extends TypeSymbolBase { this: ClassSymbol =>
- final override def isClass = true
- final override def asClass = this
- }
-
- /** The base API that all free type symbols support */
- trait FreeTypeSymbolBase extends TypeSymbolBase { this: FreeTypeSymbol =>
- final override def isFreeType = true
- final override def asFreeType = this
- }
-
- /** The base API that all free term symbols support */
- trait FreeTermSymbolBase extends TermSymbolBase { this: FreeTermSymbol =>
- final override def isFreeTerm = true
- final override def asFreeTerm = this
- }
-}
diff --git a/src/library/scala/reflect/base/TagInterop.scala b/src/library/scala/reflect/base/TagInterop.scala
deleted file mode 100644
index ec054106eb..0000000000
--- a/src/library/scala/reflect/base/TagInterop.scala
+++ /dev/null
@@ -1,15 +0,0 @@
-package scala.reflect
-package base
-
-import scala.runtime.ScalaRunTime._
-
-trait TagInterop { self: Universe =>
- // todo. `mirror` parameters are now of type `Any`, because I can't make these path-dependent types work
- // if you're brave enough, replace `Any` with `Mirror`, recompile and run interop_typetags_are_manifests.scala
-
- def typeTagToManifest[T: ClassTag](mirror: Any, tag: base.Universe # TypeTag[T]): Manifest[T] =
- throw new UnsupportedOperationException("This universe does not support tag -> manifest conversions. Use scala.reflect.runtime.universe from scala-reflect.jar.")
-
- def manifestToTypeTag[T](mirror: Any, manifest: Manifest[T]): base.Universe # TypeTag[T] =
- throw new UnsupportedOperationException("This universe does not support manifest -> tag conversions. Use scala.reflect.runtime.universe from scala-reflect.jar.")
-}
diff --git a/src/library/scala/reflect/base/TreeCreator.scala b/src/library/scala/reflect/base/TreeCreator.scala
deleted file mode 100644
index c9c8de2307..0000000000
--- a/src/library/scala/reflect/base/TreeCreator.scala
+++ /dev/null
@@ -1,6 +0,0 @@
-package scala.reflect
-package base
-
-abstract class TreeCreator {
- def apply[U <: Universe with Singleton](m: MirrorOf[U]): U # Tree
-}
diff --git a/src/library/scala/reflect/base/Trees.scala b/src/library/scala/reflect/base/Trees.scala
deleted file mode 100644
index 224965a2b7..0000000000
--- a/src/library/scala/reflect/base/Trees.scala
+++ /dev/null
@@ -1,1455 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2012 LAMP/EPFL
- * @author Martin Odersky
- */
-package scala.reflect
-package base
-
-trait Trees { self: Universe =>
-
- /** The base API that all trees support */
- abstract class TreeBase extends Product { this: Tree =>
- /** ... */
- def isDef: Boolean
-
- /** ... */
- def isEmpty: Boolean
-
- /** The canonical way to test if a Tree represents a term.
- */
- def isTerm: Boolean
-
- /** The canonical way to test if a Tree represents a type.
- */
- def isType: Boolean
-
- /** Obtains string representation of a tree */
- override def toString: String = treeToString(this)
- }
-
- /** Obtains string representation of a tree */
- protected def treeToString(tree: Tree): String
-
- /** Obtains the type of the tree (we intentionally don't expose `tree.tpe` in base) */
- protected def treeType(tree: Tree): Type
-
- /** Tree is the basis for scala's abstract syntax. The nodes are
- * implemented as case classes, and the parameters which initialize
- * a given tree are immutable: however Trees have several mutable
- * fields which are manipulated in the course of typechecking,
- * including pos, symbol, and tpe.
- *
- * Newly instantiated trees have tpe set to null (though it
- * may be set immediately thereafter depending on how it is
- * constructed.) When a tree is passed to the typer, typically via
- * `typer.typed(tree)`, under normal circumstances the tpe must be
- * null or the typer will ignore it. Furthermore, the typer is not
- * required to return the same tree it was passed.
- *
- * Trees can be easily traversed with e.g. foreach on the root node;
- * for a more nuanced traversal, subclass Traverser. Transformations
- * can be considerably trickier: see the numerous subclasses of
- * Transformer found around the compiler.
- *
- * Copying Trees should be done with care depending on whether
- * it need be done lazily or strictly (see LazyTreeCopier and
- * StrictTreeCopier) and on whether the contents of the mutable
- * fields should be copied. The tree copiers will copy the mutable
- * attributes to the new tree; calling Tree#duplicate will copy
- * symbol and tpe, but all the positions will be focused.
- *
- * Trees can be coarsely divided into four mutually exclusive categories:
- *
- * - TermTrees, representing terms
- * - TypTrees, representing types. Note that is `TypTree`, not `TypeTree`.
- * - SymTrees, which may represent types or terms.
- * - Other Trees, which have none of those as parents.
- *
- * SymTrees include important nodes Ident and Select, which are
- * used as both terms and types; they are distinguishable based on
- * whether the Name is a TermName or TypeName. The correct way for
- * to test for a type or a term (on any Tree) are the isTerm/isType
- * methods on Tree.
- *
- * "Others" are mostly syntactic or short-lived constructs. Examples
- * include CaseDef, which wraps individual match cases: they are
- * neither terms nor types, nor do they carry a symbol. Another
- * example is Parens, which is eliminated during parsing.
- */
- type Tree >: Null <: TreeBase
-
- /** A tag that preserves the identity of the `Tree` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val TreeTag: ClassTag[Tree]
-
- /** The empty tree */
- val EmptyTree: Tree
-
- /** A tree for a term. Not all terms are TermTrees; use isTerm
- * to reliably identify terms.
- */
- type TermTree >: Null <: AnyRef with Tree
-
- /** A tag that preserves the identity of the `TermTree` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val TermTreeTag: ClassTag[TermTree]
-
- /** A tree for a type. Not all types are TypTrees; use isType
- * to reliably identify types.
- */
- type TypTree >: Null <: AnyRef with Tree
-
- /** A tag that preserves the identity of the `TypTree` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val TypTreeTag: ClassTag[TypTree]
-
- /** A tree with a mutable symbol field, initialized to NoSymbol.
- */
- type SymTree >: Null <: AnyRef with Tree
-
- /** A tag that preserves the identity of the `SymTree` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val SymTreeTag: ClassTag[SymTree]
-
- /** A tree with a name - effectively, a DefTree or RefTree.
- */
- type NameTree >: Null <: AnyRef with Tree
-
- /** A tag that preserves the identity of the `NameTree` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val NameTreeTag: ClassTag[NameTree]
-
- /** A tree which references a symbol-carrying entity.
- * References one, as opposed to defining one; definitions
- * are in DefTrees.
- */
- type RefTree >: Null <: SymTree with NameTree
-
- /** A tag that preserves the identity of the `RefTree` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val RefTreeTag: ClassTag[RefTree]
-
- /** A tree which defines a symbol-carrying entity.
- */
- type DefTree >: Null <: SymTree with NameTree
-
- /** A tag that preserves the identity of the `DefTree` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val DefTreeTag: ClassTag[DefTree]
-
- /** Common base class for all member definitions: types, classes,
- * objects, packages, vals and vars, defs.
- */
- type MemberDef >: Null <: DefTree
-
- /** A tag that preserves the identity of the `MemberDef` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val MemberDefTag: ClassTag[MemberDef]
-
- /** A packaging, such as `package pid { stats }`
- */
- type PackageDef >: Null <: MemberDef
-
- /** A tag that preserves the identity of the `PackageDef` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val PackageDefTag: ClassTag[PackageDef]
-
- /** The constructor/deconstructor for `PackageDef` instances. */
- val PackageDef: PackageDefExtractor
-
- /** An extractor class to create and pattern match with syntax `PackageDef(pid, stats)`.
- * This AST node corresponds to the following Scala code:
- *
- * `package` pid { stats }
- */
- abstract class PackageDefExtractor {
- def apply(pid: RefTree, stats: List[Tree]): PackageDef
- def unapply(packageDef: PackageDef): Option[(RefTree, List[Tree])]
- }
-
- /** A common base class for class and object definitions.
- */
- type ImplDef >: Null <: MemberDef
-
- /** A tag that preserves the identity of the `ImplDef` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val ImplDefTag: ClassTag[ImplDef]
-
- /** A class definition.
- */
- type ClassDef >: Null <: ImplDef
-
- /** A tag that preserves the identity of the `ClassDef` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val ClassDefTag: ClassTag[ClassDef]
-
- /** The constructor/deconstructor for `ClassDef` instances. */
- val ClassDef: ClassDefExtractor
-
- /** An extractor class to create and pattern match with syntax `ClassDef(mods, name, tparams, impl)`.
- * This AST node corresponds to the following Scala code:
- *
- * mods `class` name [tparams] impl
- *
- * Where impl stands for:
- *
- * `extends` parents { defs }
- */
- abstract class ClassDefExtractor {
- def apply(mods: Modifiers, name: TypeName, tparams: List[TypeDef], impl: Template): ClassDef
- def unapply(classDef: ClassDef): Option[(Modifiers, TypeName, List[TypeDef], Template)]
- }
-
- /** An object definition, e.g. `object Foo`. Internally, objects are
- * quite frequently called modules to reduce ambiguity.
- * Eliminated by refcheck.
- */
- type ModuleDef >: Null <: ImplDef
-
- /** A tag that preserves the identity of the `ModuleDef` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val ModuleDefTag: ClassTag[ModuleDef]
-
- /** The constructor/deconstructor for `ModuleDef` instances. */
- val ModuleDef: ModuleDefExtractor
-
- /** An extractor class to create and pattern match with syntax `ModuleDef(mods, name, impl)`.
- * This AST node corresponds to the following Scala code:
- *
- * mods `object` name impl
- *
- * Where impl stands for:
- *
- * `extends` parents { defs }
- */
- abstract class ModuleDefExtractor {
- def apply(mods: Modifiers, name: TermName, impl: Template): ModuleDef
- def unapply(moduleDef: ModuleDef): Option[(Modifiers, TermName, Template)]
- }
-
- /** A common base class for ValDefs and DefDefs.
- */
- type ValOrDefDef >: Null <: MemberDef
-
- /** A tag that preserves the identity of the `ValOrDefDef` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val ValOrDefDefTag: ClassTag[ValOrDefDef]
-
- /** Broadly speaking, a value definition. All these are encoded as ValDefs:
- *
- * - immutable values, e.g. "val x"
- * - mutable values, e.g. "var x" - the MUTABLE flag set in mods
- * - lazy values, e.g. "lazy val x" - the LAZY flag set in mods
- * - method parameters, see vparamss in DefDef - the PARAM flag is set in mods
- * - explicit self-types, e.g. class A { self: Bar => } - !!! not sure what is set.
- */
- type ValDef >: Null <: ValOrDefDef
-
- /** A tag that preserves the identity of the `ValDef` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val ValDefTag: ClassTag[ValDef]
-
- /** The constructor/deconstructor for `ValDef` instances. */
- val ValDef: ValDefExtractor
-
- /** An extractor class to create and pattern match with syntax `ValDef(mods, name, tpt, rhs)`.
- * This AST node corresponds to the following Scala code:
- *
- * mods `val` name: tpt = rhs
- *
- * mods `var` name: tpt = rhs
- *
- * mods name: tpt = rhs // in signatures of function and method definitions
- *
- * self: Bar => // self-types (!!! not sure what is set)
- *
- * If the type of a value is not specified explicitly (i.e. is meant to be inferred),
- * this is expressed by having `tpt` set to `TypeTree()` (but not to an `EmptyTree`!).
- */
- abstract class ValDefExtractor {
- def apply(mods: Modifiers, name: TermName, tpt: Tree, rhs: Tree): ValDef
- def unapply(valDef: ValDef): Option[(Modifiers, TermName, Tree, Tree)]
- }
-
- /** A method or macro definition.
- * @param name The name of the method or macro. Can be a type name in case this is a type macro
- */
- type DefDef >: Null <: ValOrDefDef
-
- /** A tag that preserves the identity of the `DefDef` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val DefDefTag: ClassTag[DefDef]
-
- /** The constructor/deconstructor for `DefDef` instances. */
- val DefDef: DefDefExtractor
-
- /** An extractor class to create and pattern match with syntax `DefDef(mods, name, tparams, vparamss, tpt, rhs)`.
- * This AST node corresponds to the following Scala code:
- *
- * mods `def` name[tparams](vparams_1)...(vparams_n): tpt = rhs
- *
- * If the return type is not specified explicitly (i.e. is meant to be inferred),
- * this is expressed by having `tpt` set to `TypeTree()` (but not to an `EmptyTree`!).
- */
- abstract class DefDefExtractor {
- def apply(mods: Modifiers, name: Name, tparams: List[TypeDef], vparamss: List[List[ValDef]], tpt: Tree, rhs: Tree): DefDef
- def unapply(defDef: DefDef): Option[(Modifiers, Name, List[TypeDef], List[List[ValDef]], Tree, Tree)]
- }
-
- /** An abstract type, a type parameter, or a type alias.
- * Eliminated by erasure.
- */
- type TypeDef >: Null <: MemberDef
-
- /** A tag that preserves the identity of the `TypeDef` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val TypeDefTag: ClassTag[TypeDef]
-
- /** The constructor/deconstructor for `TypeDef` instances. */
- val TypeDef: TypeDefExtractor
-
- /** An extractor class to create and pattern match with syntax `TypeDef(mods, name, tparams, rhs)`.
- * This AST node corresponds to the following Scala code:
- *
- * mods `type` name[tparams] = rhs
- *
- * mods `type` name[tparams] >: lo <: hi
- *
- * First usage illustrates `TypeDefs` representing type aliases and type parameters.
- * Second usage illustrates `TypeDefs` representing abstract types,
- * where lo and hi are both `TypeBoundsTrees` and `Modifier.deferred` is set in mods.
- */
- abstract class TypeDefExtractor {
- def apply(mods: Modifiers, name: TypeName, tparams: List[TypeDef], rhs: Tree): TypeDef
- def unapply(typeDef: TypeDef): Option[(Modifiers, TypeName, List[TypeDef], Tree)]
- }
-
- /** A labelled expression. Not expressible in language syntax, but
- * generated by the compiler to simulate while/do-while loops, and
- * also by the pattern matcher.
- *
- * The label acts much like a nested function, where `params` represents
- * the incoming parameters. The symbol given to the LabelDef should have
- * a MethodType, as if it were a nested function.
- *
- * Jumps are apply nodes attributed with a label's symbol. The
- * arguments from the apply node will be passed to the label and
- * assigned to the Idents.
- *
- * Forward jumps within a block are allowed.
- */
- type LabelDef >: Null <: DefTree with TermTree
-
- /** A tag that preserves the identity of the `LabelDef` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val LabelDefTag: ClassTag[LabelDef]
-
- /** The constructor/deconstructor for `LabelDef` instances. */
- val LabelDef: LabelDefExtractor
-
- /** An extractor class to create and pattern match with syntax `LabelDef(name, params, rhs)`.
- *
- * This AST node does not have direct correspondence to Scala code.
- * It is used for tailcalls and like.
- * For example, while/do are desugared to label defs as follows:
- *
- * while (cond) body ==> LabelDef($L, List(), if (cond) { body; L$() } else ())
- * do body while (cond) ==> LabelDef($L, List(), body; if (cond) L$() else ())
- */
- abstract class LabelDefExtractor {
- def apply(name: TermName, params: List[Ident], rhs: Tree): LabelDef
- def unapply(labelDef: LabelDef): Option[(TermName, List[Ident], Tree)]
- }
-
- /** Import selector
- *
- * Representation of an imported name its optional rename and their optional positions
- *
- * Eliminated by typecheck.
- *
- * @param name the imported name
- * @param namePos its position or -1 if undefined
- * @param rename the name the import is renamed to (== name if no renaming)
- * @param renamePos the position of the rename or -1 if undefined
- */
- type ImportSelector >: Null <: AnyRef
-
- /** A tag that preserves the identity of the `ImportSelector` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val ImportSelectorTag: ClassTag[ImportSelector]
-
- /** The constructor/deconstructor for `ImportSelector` instances. */
- val ImportSelector: ImportSelectorExtractor
-
- /** An extractor class to create and pattern match with syntax `ImportSelector(name:, namePos, rename, renamePos)`.
- * This is not an AST node, it is used as a part of the `Import` node.
- */
- abstract class ImportSelectorExtractor {
- def apply(name: Name, namePos: Int, rename: Name, renamePos: Int): ImportSelector
- def unapply(importSelector: ImportSelector): Option[(Name, Int, Name, Int)]
- }
-
- /** Import clause
- *
- * @param expr
- * @param selectors
- */
- type Import >: Null <: SymTree
-
- /** A tag that preserves the identity of the `Import` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val ImportTag: ClassTag[Import]
-
- /** The constructor/deconstructor for `Import` instances. */
- val Import: ImportExtractor
-
- /** An extractor class to create and pattern match with syntax `Import(expr, selectors)`.
- * This AST node corresponds to the following Scala code:
- *
- * import expr.{selectors}
- *
- * Selectors are a list of pairs of names (from, to). // [Eugene++] obviously, they no longer are. please, document!
- * The last (and maybe only name) may be a nme.WILDCARD. For instance:
- *
- * import qual.{x, y => z, _}
- *
- * Would be represented as:
- *
- * Import(qual, List(("x", "x"), ("y", "z"), (WILDCARD, null)))
- *
- * The symbol of an `Import` is an import symbol @see Symbol.newImport.
- * It's used primarily as a marker to check that the import has been typechecked.
- */
- abstract class ImportExtractor {
- def apply(expr: Tree, selectors: List[ImportSelector]): Import
- def unapply(import_ : Import): Option[(Tree, List[ImportSelector])]
- }
-
- /** Instantiation template of a class or trait
- *
- * @param parents
- * @param body
- */
- type Template >: Null <: SymTree
-
- /** A tag that preserves the identity of the `Template` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val TemplateTag: ClassTag[Template]
-
- /** The constructor/deconstructor for `Template` instances. */
- val Template: TemplateExtractor
-
- /** An extractor class to create and pattern match with syntax `Template(parents, self, body)`.
- * This AST node corresponds to the following Scala code:
- *
- * `extends` parents { self => body }
- *
- * In case when the self-type annotation is missing, it is represented as
- * an empty value definition with nme.WILDCARD as name and NoType as type.
- *
- * The symbol of a template is a local dummy. @see Symbol.newLocalDummy
- * The owner of the local dummy is the enclosing trait or class.
- * The local dummy is itself the owner of any local blocks. For example:
- *
- * class C {
- * def foo { // owner is C
- * def bar // owner is local dummy
- * }
- * }
- */
- abstract class TemplateExtractor {
- def apply(parents: List[Tree], self: ValDef, body: List[Tree]): Template
- def unapply(template: Template): Option[(List[Tree], ValDef, List[Tree])]
- }
-
- /** Block of expressions (semicolon separated expressions) */
- type Block >: Null <: TermTree
-
- /** A tag that preserves the identity of the `Block` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val BlockTag: ClassTag[Block]
-
- /** The constructor/deconstructor for `Block` instances. */
- val Block: BlockExtractor
-
- /** An extractor class to create and pattern match with syntax `Block(stats, expr)`.
- * This AST node corresponds to the following Scala code:
- *
- * { stats; expr }
- *
- * If the block is empty, the `expr` is set to `Literal(Constant(()))`. // [Eugene++] check this
- */
- abstract class BlockExtractor {
- def apply(stats: List[Tree], expr: Tree): Block
- def unapply(block: Block): Option[(List[Tree], Tree)]
- }
-
- /** Case clause in a pattern match, eliminated during explicitouter
- * (except for occurrences in switch statements).
- * Eliminated by patmat/explicitouter.
- */
- type CaseDef >: Null <: AnyRef with Tree
-
- /** A tag that preserves the identity of the `CaseDef` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val CaseDefTag: ClassTag[CaseDef]
-
- /** The constructor/deconstructor for `CaseDef` instances. */
- val CaseDef: CaseDefExtractor
-
- /** An extractor class to create and pattern match with syntax `CaseDef(pat, guard, body)`.
- * This AST node corresponds to the following Scala code:
- *
- * `case` pat `if` guard => body
- *
- * If the guard is not present, the `guard` is set to `EmptyTree`. // [Eugene++] check this
- * If the body is not specified, the `body` is set to `EmptyTree`. // [Eugene++] check this
- */
- abstract class CaseDefExtractor {
- def apply(pat: Tree, guard: Tree, body: Tree): CaseDef
- def unapply(caseDef: CaseDef): Option[(Tree, Tree, Tree)]
- }
-
- /** Alternatives of patterns, eliminated by explicitouter, except for
- * occurrences in encoded Switch stmt (=remaining Match(CaseDef(...)))
- * Eliminated by patmat/explicitouter.
- */
- type Alternative >: Null <: TermTree
-
- /** A tag that preserves the identity of the `Alternative` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val AlternativeTag: ClassTag[Alternative]
-
- /** The constructor/deconstructor for `Alternative` instances. */
- val Alternative: AlternativeExtractor
-
- /** An extractor class to create and pattern match with syntax `Alternative(trees)`.
- * This AST node corresponds to the following Scala code:
- *
- * pat1 | ... | patn
- */
- abstract class AlternativeExtractor {
- def apply(trees: List[Tree]): Alternative
- def unapply(alternative: Alternative): Option[List[Tree]]
- }
-
- /** Repetition of pattern.
- * Eliminated by patmat/explicitouter.
- */
- type Star >: Null <: TermTree
-
- /** A tag that preserves the identity of the `Star` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val StarTag: ClassTag[Star]
-
- /** The constructor/deconstructor for `Star` instances. */
- val Star: StarExtractor
-
- /** An extractor class to create and pattern match with syntax `Star(elem)`.
- * This AST node corresponds to the following Scala code:
- *
- * pat*
- */
- abstract class StarExtractor {
- def apply(elem: Tree): Star
- def unapply(star: Star): Option[Tree]
- }
-
- /** Bind of a variable to a rhs pattern, eliminated by explicitouter
- * Eliminated by patmat/explicitouter.
- *
- * @param name
- * @param body
- */
- type Bind >: Null <: DefTree
-
- /** A tag that preserves the identity of the `Bind` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val BindTag: ClassTag[Bind]
-
- /** The constructor/deconstructor for `Bind` instances. */
- val Bind: BindExtractor
-
- /** An extractor class to create and pattern match with syntax `Bind(name, body)`.
- * This AST node corresponds to the following Scala code:
- *
- * pat*
- */
- abstract class BindExtractor {
- def apply(name: Name, body: Tree): Bind
- def unapply(bind: Bind): Option[(Name, Tree)]
- }
-
- /** Noone knows what this is.
- * It is not idempotent w.r.t typechecking.
- * Can we, please, remove it?
- * Introduced by typer, eliminated by patmat/explicitouter.
- */
- type UnApply >: Null <: TermTree
-
- /** A tag that preserves the identity of the `UnApply` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val UnApplyTag: ClassTag[UnApply]
-
- /** The constructor/deconstructor for `UnApply` instances. */
- val UnApply: UnApplyExtractor
-
- /** An extractor class to create and pattern match with syntax `UnApply(fun, args)`.
- * This AST node does not have direct correspondence to Scala code,
- * and is introduced when typechecking pattern matches and `try` blocks.
- */
- abstract class UnApplyExtractor {
- def apply(fun: Tree, args: List[Tree]): UnApply
- def unapply(unApply: UnApply): Option[(Tree, List[Tree])]
- }
-
- /** Array of expressions, needs to be translated in backend.
- * This AST node is used to pass arguments to vararg arguments.
- * Introduced by uncurry.
- */
- type ArrayValue >: Null <: TermTree
-
- /** A tag that preserves the identity of the `ArrayValue` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val ArrayValueTag: ClassTag[ArrayValue]
-
- /** The constructor/deconstructor for `ArrayValue` instances. */
- val ArrayValue: ArrayValueExtractor
-
- /** An extractor class to create and pattern match with syntax `ArrayValue(elemtpt, elems)`.
- * This AST node does not have direct correspondence to Scala code,
- * and is used to pass arguments to vararg arguments. For instance:
- *
- * printf("%s%d", foo, 42)
- *
- * Is translated to after uncurry to:
- *
- * Apply(
- * Ident("printf"),
- * Literal("%s%d"),
- * ArrayValue(<Any>, List(Ident("foo"), Literal(42))))
- */
- abstract class ArrayValueExtractor {
- def apply(elemtpt: Tree, elems: List[Tree]): ArrayValue
- def unapply(arrayValue: ArrayValue): Option[(Tree, List[Tree])]
- }
-
- /** Anonymous function, eliminated by lambdalift */
- type Function >: Null <: TermTree with SymTree
-
- /** A tag that preserves the identity of the `Function` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val FunctionTag: ClassTag[Function]
-
- /** The constructor/deconstructor for `Function` instances. */
- val Function: FunctionExtractor
-
- /** An extractor class to create and pattern match with syntax `Function(vparams, body)`.
- * This AST node corresponds to the following Scala code:
- *
- * vparams => body
- *
- * The symbol of a Function is a synthetic value of name nme.ANON_FUN_NAME
- * It is the owner of the function's parameters.
- */
- abstract class FunctionExtractor {
- def apply(vparams: List[ValDef], body: Tree): Function
- def unapply(function: Function): Option[(List[ValDef], Tree)]
- }
-
- /** Assignment */
- type Assign >: Null <: TermTree
-
- /** A tag that preserves the identity of the `Assign` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val AssignTag: ClassTag[Assign]
-
- /** The constructor/deconstructor for `Assign` instances. */
- val Assign: AssignExtractor
-
- /** An extractor class to create and pattern match with syntax `Assign(lhs, rhs)`.
- * This AST node corresponds to the following Scala code:
- *
- * lhs = rhs
- */
- abstract class AssignExtractor {
- def apply(lhs: Tree, rhs: Tree): Assign
- def unapply(assign: Assign): Option[(Tree, Tree)]
- }
-
- /** Either an assignment or a named argument. Only appears in argument lists,
- * eliminated by typecheck (doTypedApply), resurrected by reifier.
- */
- type AssignOrNamedArg >: Null <: TermTree
-
- /** A tag that preserves the identity of the `AssignOrNamedArg` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val AssignOrNamedArgTag: ClassTag[AssignOrNamedArg]
-
- /** The constructor/deconstructor for `AssignOrNamedArg` instances. */
- val AssignOrNamedArg: AssignOrNamedArgExtractor
-
- /** An extractor class to create and pattern match with syntax `AssignOrNamedArg(lhs, rhs)`.
- * This AST node corresponds to the following Scala code:
- *
- * @annotation(lhs = rhs)
- *
- * m.f(lhs = rhs)
- */
- abstract class AssignOrNamedArgExtractor {
- def apply(lhs: Tree, rhs: Tree): AssignOrNamedArg
- def unapply(assignOrNamedArg: AssignOrNamedArg): Option[(Tree, Tree)]
- }
-
- /** Conditional expression */
- type If >: Null <: TermTree
-
- /** A tag that preserves the identity of the `If` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val IfTag: ClassTag[If]
-
- /** The constructor/deconstructor for `If` instances. */
- val If: IfExtractor
-
- /** An extractor class to create and pattern match with syntax `If(cond, thenp, elsep)`.
- * This AST node corresponds to the following Scala code:
- *
- * `if` (cond) thenp `else` elsep
- *
- * If the alternative is not present, the `elsep` is set to `EmptyTree`. // [Eugene++] check this
- */
- abstract class IfExtractor {
- def apply(cond: Tree, thenp: Tree, elsep: Tree): If
- def unapply(if_ : If): Option[(Tree, Tree, Tree)]
- }
-
- /** - Pattern matching expression (before explicitouter)
- * - Switch statements (after explicitouter)
- *
- * After explicitouter, cases will satisfy the following constraints:
- *
- * - all guards are `EmptyTree`,
- * - all patterns will be either `Literal(Constant(x:Int))`
- * or `Alternative(lit|...|lit)`
- * - except for an "otherwise" branch, which has pattern
- * `Ident(nme.WILDCARD)`
- */
- type Match >: Null <: TermTree
-
- /** A tag that preserves the identity of the `Match` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val MatchTag: ClassTag[Match]
-
- /** The constructor/deconstructor for `Match` instances. */
- val Match: MatchExtractor
-
- /** An extractor class to create and pattern match with syntax `Match(selector, cases)`.
- * This AST node corresponds to the following Scala code:
- *
- * selector `match` { cases }
- *
- * // [Eugene++] say something about `val (foo, bar) = baz` and likes.
- */
- abstract class MatchExtractor {
- def apply(selector: Tree, cases: List[CaseDef]): Match
- def unapply(match_ : Match): Option[(Tree, List[CaseDef])]
- }
-
- /** Return expression */
- type Return >: Null <: TermTree with SymTree
-
- /** A tag that preserves the identity of the `Return` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val ReturnTag: ClassTag[Return]
-
- /** The constructor/deconstructor for `Return` instances. */
- val Return: ReturnExtractor
-
- /** An extractor class to create and pattern match with syntax `Return(expr)`.
- * This AST node corresponds to the following Scala code:
- *
- * `return` expr
- *
- * The symbol of a Return node is the enclosing method
- */
- abstract class ReturnExtractor {
- def apply(expr: Tree): Return
- def unapply(return_ : Return): Option[Tree]
- }
-
- /** [Eugene++] comment me! */
- type Try >: Null <: TermTree
-
- /** A tag that preserves the identity of the `Try` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val TryTag: ClassTag[Try]
-
- /** The constructor/deconstructor for `Try` instances. */
- val Try: TryExtractor
-
- /** An extractor class to create and pattern match with syntax `Try(block, catches, finalizer)`.
- * This AST node corresponds to the following Scala code:
- *
- * `try` block `catch` { catches } `finally` finalizer
- *
- * If the finalizer is not present, the `finalizer` is set to `EmptyTree`. // [Eugene++] check this
- */
- abstract class TryExtractor {
- def apply(block: Tree, catches: List[CaseDef], finalizer: Tree): Try
- def unapply(try_ : Try): Option[(Tree, List[CaseDef], Tree)]
- }
-
- /** Throw expression */
- type Throw >: Null <: TermTree
-
- /** A tag that preserves the identity of the `Throw` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val ThrowTag: ClassTag[Throw]
-
- /** The constructor/deconstructor for `Throw` instances. */
- val Throw: ThrowExtractor
-
- /** An extractor class to create and pattern match with syntax `Throw(expr)`.
- * This AST node corresponds to the following Scala code:
- *
- * `throw` expr
- */
- abstract class ThrowExtractor {
- def apply(expr: Tree): Throw
- def unapply(throw_ : Throw): Option[Tree]
- }
-
- /** Object instantiation
- * One should always use factory method below to build a user level new.
- *
- * @param tpt a class type
- */
- type New >: Null <: TermTree
-
- /** A tag that preserves the identity of the `New` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val NewTag: ClassTag[New]
-
- /** The constructor/deconstructor for `New` instances. */
- val New: NewExtractor
-
- /** An extractor class to create and pattern match with syntax `New(tpt)`.
- * This AST node corresponds to the following Scala code:
- *
- * `new` T
- *
- * This node always occurs in the following context:
- *
- * (`new` tpt).<init>[targs](args)
- */
- abstract class NewExtractor {
- def apply(tpt: Tree): New
- def unapply(new_ : New): Option[Tree]
- }
-
- /** Type annotation, eliminated by cleanup */
- type Typed >: Null <: TermTree
-
- /** A tag that preserves the identity of the `Typed` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val TypedTag: ClassTag[Typed]
-
- /** The constructor/deconstructor for `Typed` instances. */
- val Typed: TypedExtractor
-
- /** An extractor class to create and pattern match with syntax `Typed(expr, tpt)`.
- * This AST node corresponds to the following Scala code:
- *
- * expr: tpt
- */
- abstract class TypedExtractor {
- def apply(expr: Tree, tpt: Tree): Typed
- def unapply(typed: Typed): Option[(Tree, Tree)]
- }
-
- /** Common base class for Apply and TypeApply. This could in principle
- * be a SymTree, but whether or not a Tree is a SymTree isn't used
- * to settle any interesting questions, and it would add a useless
- * field to all the instances (useless, since GenericApply forwards to
- * the underlying fun.)
- */
- type GenericApply >: Null <: TermTree
-
- /** A tag that preserves the identity of the `GenericApply` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val GenericApplyTag: ClassTag[GenericApply]
-
- /** Explicit type application.
- * @PP: All signs point toward it being a requirement that args.nonEmpty,
- * but I can't find that explicitly stated anywhere. Unless your last name
- * is odersky, you should probably treat it as true.
- */
- type TypeApply >: Null <: GenericApply
-
- /** A tag that preserves the identity of the `TypeApply` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val TypeApplyTag: ClassTag[TypeApply]
-
- /** The constructor/deconstructor for `TypeApply` instances. */
- val TypeApply: TypeApplyExtractor
-
- /** An extractor class to create and pattern match with syntax `TypeApply(fun, args)`.
- * This AST node corresponds to the following Scala code:
- *
- * fun[args]
- */
- abstract class TypeApplyExtractor {
- def apply(fun: Tree, args: List[Tree]): TypeApply
- def unapply(typeApply: TypeApply): Option[(Tree, List[Tree])]
- }
-
- /** Value application */
- type Apply >: Null <: GenericApply
-
- /** A tag that preserves the identity of the `Apply` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val ApplyTag: ClassTag[Apply]
-
- /** The constructor/deconstructor for `Apply` instances. */
- val Apply: ApplyExtractor
-
- /** An extractor class to create and pattern match with syntax `Apply(fun, args)`.
- * This AST node corresponds to the following Scala code:
- *
- * fun(args)
- *
- * For instance:
- *
- * fun[targs](args)
- *
- * Is expressed as:
- *
- * Apply(TypeApply(fun, targs), args)
- */
- abstract class ApplyExtractor {
- def apply(fun: Tree, args: List[Tree]): Apply
- def unapply(apply: Apply): Option[(Tree, List[Tree])]
- }
-
- /** Dynamic value application.
- * In a dynamic application q.f(as)
- * - q is stored in qual
- * - as is stored in args
- * - f is stored as the node's symbol field.
- * [Eugene++] what is it used for?
- * Introduced by erasure, eliminated by cleanup.
- */
- type ApplyDynamic >: Null <: TermTree with SymTree
-
- /** A tag that preserves the identity of the `ApplyDynamic` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val ApplyDynamicTag: ClassTag[ApplyDynamic]
-
- /** The constructor/deconstructor for `ApplyDynamic` instances. */
- val ApplyDynamic: ApplyDynamicExtractor
-
- /** An extractor class to create and pattern match with syntax `ApplyDynamic(qual, args)`.
- * This AST node corresponds to the following Scala code:
- *
- * fun(args)
- *
- * The symbol of an ApplyDynamic is the function symbol of `qual`, or NoSymbol, if there is none.
- */
- abstract class ApplyDynamicExtractor {
- def apply(qual: Tree, args: List[Tree]): ApplyDynamic
- def unapply(applyDynamic: ApplyDynamic): Option[(Tree, List[Tree])]
- }
-
- /** Super reference, qual = corresponding this reference
- * A super reference C.super[M] is represented as Super(This(C), M).
- */
- type Super >: Null <: TermTree
-
- /** A tag that preserves the identity of the `Super` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val SuperTag: ClassTag[Super]
-
- /** The constructor/deconstructor for `Super` instances. */
- val Super: SuperExtractor
-
- /** An extractor class to create and pattern match with syntax `Super(qual, mix)`.
- * This AST node corresponds to the following Scala code:
- *
- * C.super[M]
- *
- * Which is represented as:
- *
- * Super(This(C), M)
- *
- * If `mix` is empty, it is tpnme.EMPTY.
- *
- * The symbol of a Super is the class _from_ which the super reference is made.
- * For instance in C.super(...), it would be C.
- */
- abstract class SuperExtractor {
- def apply(qual: Tree, mix: TypeName): Super
- def unapply(super_ : Super): Option[(Tree, TypeName)]
- }
-
- /** Self reference */
- type This >: Null <: TermTree with SymTree
-
- /** A tag that preserves the identity of the `This` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val ThisTag: ClassTag[This]
-
- /** The constructor/deconstructor for `This` instances. */
- val This: ThisExtractor
-
- /** An extractor class to create and pattern match with syntax `This(qual)`.
- * This AST node corresponds to the following Scala code:
- *
- * qual.this
- *
- * The symbol of a This is the class to which the this refers.
- * For instance in C.this, it would be C.
- *
- * If `mix` is empty, then ???
- */
- abstract class ThisExtractor {
- def apply(qual: TypeName): This
- def unapply(this_ : This): Option[TypeName]
- }
-
- /** Designator <qualifier> . <name> */
- type Select >: Null <: RefTree
-
- /** A tag that preserves the identity of the `Select` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val SelectTag: ClassTag[Select]
-
- /** The constructor/deconstructor for `Select` instances. */
- val Select: SelectExtractor
-
- /** An extractor class to create and pattern match with syntax `Select(qual, name)`.
- * This AST node corresponds to the following Scala code:
- *
- * qualifier.selector
- */
- abstract class SelectExtractor {
- def apply(qualifier: Tree, name: Name): Select
- def unapply(select: Select): Option[(Tree, Name)]
- }
-
- /** Identifier <name> */
- type Ident >: Null <: RefTree
-
- /** A tag that preserves the identity of the `Ident` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val IdentTag: ClassTag[Ident]
-
- /** The constructor/deconstructor for `Ident` instances. */
- val Ident: IdentExtractor
-
- /** An extractor class to create and pattern match with syntax `Ident(qual, name)`.
- * This AST node corresponds to the following Scala code:
- *
- * name
- *
- * Type checker converts idents that refer to enclosing fields or methods to selects.
- * For example, name ==> this.name
- */
- abstract class IdentExtractor {
- def apply(name: Name): Ident
- def unapply(ident: Ident): Option[Name]
- }
-
- /** Marks underlying reference to id as boxed.
- * @pre id must refer to a captured variable
- * A reference such marked will refer to the boxed entity, no dereferencing
- * with `.elem` is done on it.
- * This tree node can be emitted by macros such as reify that call referenceCapturedVariable.
- * It is eliminated in LambdaLift, where the boxing conversion takes place.
- */
- type ReferenceToBoxed >: Null <: TermTree
-
- /** A tag that preserves the identity of the `ReferenceToBoxed` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val ReferenceToBoxedTag: ClassTag[ReferenceToBoxed]
-
- /** The constructor/deconstructor for `ReferenceToBoxed` instances. */
- val ReferenceToBoxed: ReferenceToBoxedExtractor
-
- /** An extractor class to create and pattern match with syntax `ReferenceToBoxed(ident)`.
- * This AST node does not have direct correspondence to Scala code,
- * and is emitted by macros to reference capture vars directly without going through `elem`.
- *
- * For example:
- *
- * var x = ...
- * fun { x }
- *
- * Will emit:
- *
- * Ident(x)
- *
- * Which gets transformed to:
- *
- * Select(Ident(x), "elem")
- *
- * If `ReferenceToBoxed` were used instead of Ident, no transformation would be performed.
- */
- abstract class ReferenceToBoxedExtractor {
- def apply(ident: Ident): ReferenceToBoxed
- def unapply(referenceToBoxed: ReferenceToBoxed): Option[Ident]
- }
-
- /** Literal */
- type Literal >: Null <: TermTree
-
- /** A tag that preserves the identity of the `Literal` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val LiteralTag: ClassTag[Literal]
-
- /** The constructor/deconstructor for `Literal` instances. */
- val Literal: LiteralExtractor
-
- /** An extractor class to create and pattern match with syntax `Literal(value)`.
- * This AST node corresponds to the following Scala code:
- *
- * value
- */
- abstract class LiteralExtractor {
- def apply(value: Constant): Literal
- def unapply(literal: Literal): Option[Constant]
- }
-
- /** A tree that has an annotation attached to it. Only used for annotated types and
- * annotation ascriptions, annotations on definitions are stored in the Modifiers.
- * Eliminated by typechecker (typedAnnotated), the annotations are then stored in
- * an AnnotatedType.
- */
- type Annotated >: Null <: AnyRef with Tree
-
- /** A tag that preserves the identity of the `Annotated` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val AnnotatedTag: ClassTag[Annotated]
-
- /** The constructor/deconstructor for `Annotated` instances. */
- val Annotated: AnnotatedExtractor
-
- /** An extractor class to create and pattern match with syntax `Annotated(annot, arg)`.
- * This AST node corresponds to the following Scala code:
- *
- * arg @annot // for types
- * arg: @annot // for exprs
- */
- abstract class AnnotatedExtractor {
- def apply(annot: Tree, arg: Tree): Annotated
- def unapply(annotated: Annotated): Option[(Tree, Tree)]
- }
-
- /** Singleton type, eliminated by RefCheck */
- type SingletonTypeTree >: Null <: TypTree
-
- /** A tag that preserves the identity of the `SingletonTypeTree` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val SingletonTypeTreeTag: ClassTag[SingletonTypeTree]
-
- /** The constructor/deconstructor for `SingletonTypeTree` instances. */
- val SingletonTypeTree: SingletonTypeTreeExtractor
-
- /** An extractor class to create and pattern match with syntax `SingletonTypeTree(ref)`.
- * This AST node corresponds to the following Scala code:
- *
- * ref.type
- */
- abstract class SingletonTypeTreeExtractor {
- def apply(ref: Tree): SingletonTypeTree
- def unapply(singletonTypeTree: SingletonTypeTree): Option[Tree]
- }
-
- /** Type selection <qualifier> # <name>, eliminated by RefCheck */
- // [Eugene++] don't see why we need it, when we have Select
- type SelectFromTypeTree >: Null <: TypTree with RefTree
-
- /** A tag that preserves the identity of the `SelectFromTypeTree` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val SelectFromTypeTreeTag: ClassTag[SelectFromTypeTree]
-
- /** The constructor/deconstructor for `SelectFromTypeTree` instances. */
- val SelectFromTypeTree: SelectFromTypeTreeExtractor
-
- /** An extractor class to create and pattern match with syntax `SelectFromTypeTree(qualifier, name)`.
- * This AST node corresponds to the following Scala code:
- *
- * qualifier # selector
- *
- * Note: a path-dependent type p.T is expressed as p.type # T
- */
- abstract class SelectFromTypeTreeExtractor {
- def apply(qualifier: Tree, name: TypeName): SelectFromTypeTree
- def unapply(selectFromTypeTree: SelectFromTypeTree): Option[(Tree, TypeName)]
- }
-
- /** Intersection type <parent1> with ... with <parentN> { <decls> }, eliminated by RefCheck */
- type CompoundTypeTree >: Null <: TypTree
-
- /** A tag that preserves the identity of the `CompoundTypeTree` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val CompoundTypeTreeTag: ClassTag[CompoundTypeTree]
-
- /** The constructor/deconstructor for `CompoundTypeTree` instances. */
- val CompoundTypeTree: CompoundTypeTreeExtractor
-
- /** An extractor class to create and pattern match with syntax `CompoundTypeTree(templ)`.
- * This AST node corresponds to the following Scala code:
- *
- * parent1 with ... with parentN { refinement }
- */
- abstract class CompoundTypeTreeExtractor {
- def apply(templ: Template): CompoundTypeTree
- def unapply(compoundTypeTree: CompoundTypeTree): Option[Template]
- }
-
- /** Applied type <tpt> [ <args> ], eliminated by RefCheck */
- type AppliedTypeTree >: Null <: TypTree
-
- /** A tag that preserves the identity of the `AppliedTypeTree` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val AppliedTypeTreeTag: ClassTag[AppliedTypeTree]
-
- /** The constructor/deconstructor for `AppliedTypeTree` instances. */
- val AppliedTypeTree: AppliedTypeTreeExtractor
-
- /** An extractor class to create and pattern match with syntax `AppliedTypeTree(tpt, args)`.
- * This AST node corresponds to the following Scala code:
- *
- * tpt[args]
- */
- abstract class AppliedTypeTreeExtractor {
- def apply(tpt: Tree, args: List[Tree]): AppliedTypeTree
- def unapply(appliedTypeTree: AppliedTypeTree): Option[(Tree, List[Tree])]
- }
-
- /** Document me! */
- type TypeBoundsTree >: Null <: TypTree
-
- /** A tag that preserves the identity of the `TypeBoundsTree` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val TypeBoundsTreeTag: ClassTag[TypeBoundsTree]
-
- /** The constructor/deconstructor for `TypeBoundsTree` instances. */
- val TypeBoundsTree: TypeBoundsTreeExtractor
-
- /** An extractor class to create and pattern match with syntax `TypeBoundsTree(lo, hi)`.
- * This AST node corresponds to the following Scala code:
- *
- * >: lo <: hi
- */
- abstract class TypeBoundsTreeExtractor {
- def apply(lo: Tree, hi: Tree): TypeBoundsTree
- def unapply(typeBoundsTree: TypeBoundsTree): Option[(Tree, Tree)]
- }
-
- /** Document me! */
- type ExistentialTypeTree >: Null <: TypTree
-
- /** A tag that preserves the identity of the `ExistentialTypeTree` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val ExistentialTypeTreeTag: ClassTag[ExistentialTypeTree]
-
- /** The constructor/deconstructor for `ExistentialTypeTree` instances. */
- val ExistentialTypeTree: ExistentialTypeTreeExtractor
-
- /** An extractor class to create and pattern match with syntax `ExistentialTypeTree(tpt, whereClauses)`.
- * This AST node corresponds to the following Scala code:
- *
- * tpt forSome { whereClauses }
- */
- abstract class ExistentialTypeTreeExtractor {
- def apply(tpt: Tree, whereClauses: List[Tree]): ExistentialTypeTree
- def unapply(existentialTypeTree: ExistentialTypeTree): Option[(Tree, List[Tree])]
- }
-
- /** A synthetic tree holding an arbitrary type. Not to be confused with
- * with TypTree, the trait for trees that are only used for type trees.
- * TypeTree's are inserted in several places, but most notably in
- * `RefCheck`, where the arbitrary type trees are all replaced by
- * TypeTree's. */
- type TypeTree >: Null <: TypTree
-
- /** A tag that preserves the identity of the `TypeTree` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val TypeTreeTag: ClassTag[TypeTree]
-
- /** The constructor/deconstructor for `TypeTree` instances. */
- val TypeTree: TypeTreeExtractor
-
- /** An extractor class to create and pattern match with syntax `TypeTree()`.
- * This AST node does not have direct correspondence to Scala code,
- * and is emitted by everywhere when we want to wrap a `Type` in a `Tree`.
- */
- abstract class TypeTreeExtractor {
- def apply(): TypeTree
- def unapply(typeTree: TypeTree): Boolean
- }
-
- /** ... */
- type Modifiers >: Null <: ModifiersBase
-
- /** A tag that preserves the identity of the `Modifiers` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val ModifiersTag: ClassTag[Modifiers]
-
- /** ... */
- abstract class ModifiersBase {
- def flags: FlagSet // default: NoFlags
- def hasFlag(flag: FlagSet): Boolean
- def privateWithin: Name // default: EmptyTypeName
- def annotations: List[Tree] // default: List()
- def mapAnnotations(f: List[Tree] => List[Tree]): Modifiers =
- Modifiers(flags, privateWithin, f(annotations))
- }
-
- val Modifiers: ModifiersCreator
-
- abstract class ModifiersCreator {
- def apply(): Modifiers = Modifiers(NoFlags, EmptyTypeName, List())
- def apply(flags: FlagSet, privateWithin: Name, annotations: List[Tree]): Modifiers
- }
-
- def Modifiers(flags: FlagSet, privateWithin: Name): Modifiers = Modifiers(flags, privateWithin, List())
- def Modifiers(flags: FlagSet): Modifiers = Modifiers(flags, EmptyTypeName)
-
- /** ... */
- lazy val NoMods = Modifiers()
-
-// ---------------------- factories ----------------------------------------------
-
- /** @param sym the class symbol
- * @param impl the implementation template
- */
- def ClassDef(sym: Symbol, impl: Template): ClassDef
-
- /**
- * @param sym the class symbol
- * @param impl the implementation template
- */
- def ModuleDef(sym: Symbol, impl: Template): ModuleDef
-
- def ValDef(sym: Symbol, rhs: Tree): ValDef
-
- def ValDef(sym: Symbol): ValDef
-
- def DefDef(sym: Symbol, mods: Modifiers, vparamss: List[List[ValDef]], rhs: Tree): DefDef
-
- def DefDef(sym: Symbol, vparamss: List[List[ValDef]], rhs: Tree): DefDef
-
- def DefDef(sym: Symbol, mods: Modifiers, rhs: Tree): DefDef
-
- def DefDef(sym: Symbol, rhs: Tree): DefDef
-
- def DefDef(sym: Symbol, rhs: List[List[Symbol]] => Tree): DefDef
-
- /** A TypeDef node which defines given `sym` with given tight hand side `rhs`. */
- def TypeDef(sym: Symbol, rhs: Tree): TypeDef
-
- /** A TypeDef node which defines abstract type or type parameter for given `sym` */
- def TypeDef(sym: Symbol): TypeDef
-
- def LabelDef(sym: Symbol, params: List[Symbol], rhs: Tree): LabelDef
-
- /** Block factory that flattens directly nested blocks.
- */
- def Block(stats: Tree*): Block
-
- /** casedef shorthand */
- def CaseDef(pat: Tree, body: Tree): CaseDef
-
- def Bind(sym: Symbol, body: Tree): Bind
-
- def Try(body: Tree, cases: (Tree, Tree)*): Try
-
- def Throw(tpe: Type, args: Tree*): Throw
-
- /** Factory method for object creation `new tpt(args_1)...(args_n)`
- * A `New(t, as)` is expanded to: `(new t).<init>(as)`
- */
- def New(tpt: Tree, argss: List[List[Tree]]): Tree
-
- /** 0-1 argument list new, based on a type.
- */
- def New(tpe: Type, args: Tree*): Tree
-
- def New(sym: Symbol, args: Tree*): Tree
-
- def Apply(sym: Symbol, args: Tree*): Tree
-
- def ApplyConstructor(tpt: Tree, args: List[Tree]): Tree
-
- def Super(sym: Symbol, mix: TypeName): Tree
-
- def This(sym: Symbol): Tree
-
- def Select(qualifier: Tree, name: String): Select
-
- def Select(qualifier: Tree, sym: Symbol): Select
-
- def Ident(name: String): Ident
-
- def Ident(sym: Symbol): Ident
-
- def TypeTree(tp: Type): TypeTree
-} \ No newline at end of file
diff --git a/src/library/scala/reflect/base/TypeCreator.scala b/src/library/scala/reflect/base/TypeCreator.scala
deleted file mode 100644
index 8a14e53dd3..0000000000
--- a/src/library/scala/reflect/base/TypeCreator.scala
+++ /dev/null
@@ -1,6 +0,0 @@
-package scala.reflect
-package base
-
-abstract class TypeCreator {
- def apply[U <: Universe with Singleton](m: MirrorOf[U]): U # Type
-}
diff --git a/src/library/scala/reflect/base/TypeTags.scala b/src/library/scala/reflect/base/TypeTags.scala
deleted file mode 100644
index ec86bbc9be..0000000000
--- a/src/library/scala/reflect/base/TypeTags.scala
+++ /dev/null
@@ -1,277 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2012 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala.reflect
-package base
-
-import java.lang.{ Class => jClass }
-import scala.language.implicitConversions
-
-/**
- * Type tags encapsulate a representation of type T.
- * They are supposed to replace the pre-2.10 concept of a [[scala.reflect.Manifest]].
- * TypeTags are much better integrated with reflection than manifests are, and are consequently much simpler.
- *
- * === Overview ===
- *
- * Type tags are organized in a hierarchy of three classes:
- * [[scala.reflect.ClassTag]], [[scala.reflect.base.Universe#TypeTag]] and [[scala.reflect.base.Universe#WeakTypeTag]].
- *
- * A [[scala.reflect.ClassTag]] carries a runtime class that corresponds to the source type T.
- * As of such, it possesses the knowledge about how to build single- and multi-dimensional arrays of elements of that type.
- * It guarantees that the source type T did not to contain any references to type parameters or abstract types.
- * [[scala.reflect.ClassTag]] corresponds to a previous notion of [[scala.reflect.ClassManifest]].
- *
- * A [[scala.reflect.base.Universe#WeakTypeTag]] value wraps a full Scala type in its tpe field.
- * A [[scala.reflect.base.Universe#TypeTag]] value is an [[scala.reflect.base.Universe#WeakTypeTag]]
- * that is guaranteed not to contain any references to type parameters or abstract types.
- *
- * [Eugene++] also mention sensitivity to prefixes, i.e. that rb.TypeTag is different from ru.TypeTag
- * [Eugene++] migratability between mirrors and universes is also worth mentioning
- *
- * === Splicing ===
- *
- * Tags can be spliced, i.e. if compiler generates a tag for a type that contains references to tagged
- * type parameters or abstract type members, it will retrieve the corresponding tag and embed it into the result.
- * An example that illustrates the TypeTag embedding, consider the following function:
- *
- * import reflect.mirror._
- * def f[T: TypeTag, U] = {
- * type L = T => U
- * implicitly[WeakTypeTag[L]]
- * }
- *
- * Then a call of f[String, Int] will yield a result of the form
- *
- * WeakTypeTag(<[ String => U ]>).
- *
- * Note that T has been replaced by String, because it comes with a TypeTag in f, whereas U was left as a type parameter.
- *
- * === WeakTypeTag vs TypeTag ===
- *
- * Be careful with WeakTypeTag, because it will reify types even if these types are abstract.
- * This makes it easy to forget to tag one of the methods in the call chain and discover it much later in the runtime
- * by getting cryptic errors far away from their source. For example, consider the following snippet:
- *
- * def bind[T: WeakTypeTag](name: String, value: T): IR.Result = bind((name, value))
- * def bind(p: NamedParam): IR.Result = bind(p.name, p.tpe, p.value)
- * object NamedParam {
- * implicit def namedValue[T: WeakTypeTag](name: String, x: T): NamedParam = apply(name, x)
- * def apply[T: WeakTypeTag](name: String, x: T): NamedParam = new Typed[T](name, x)
- * }
- *
- * This fragment of Scala REPL implementation defines a `bind` function that carries a named value along with its type
- * into the heart of the REPL. Using a [[scala.reflect.base.Universe#WeakTypeTag]] here is reasonable, because it is desirable
- * to work with all types, even if they are type parameters or abstract type members.
- *
- * However if any of the three `WeakTypeTag` context bounds is omitted, the resulting code will be incorrect,
- * because the missing `WeakTypeTag` will be transparently generated by the compiler, carrying meaningless information.
- * Most likely, this problem will manifest itself elsewhere, making debugging complicated.
- * If `WeakTypeTag` context bounds were replaced with `TypeTag`, then such errors would be reported statically.
- * But in that case we wouldn't be able to use `bind` in arbitrary contexts.
- *
- * === Backward compatibility ===
- *
- * Type tags correspond loosely to manifests.
- *
- * More precisely:
- * The previous notion of a [[scala.reflect.ClassManifest]] corresponds to a scala.reflect.ClassTag,
- * The previous notion of a [[scala.reflect.Manifest]] corresponds to scala.reflect.runtime.universe.TypeTag,
- *
- * In Scala 2.10, manifests are deprecated, so it's adviseable to migrate them to tags,
- * because manifests might be removed in the next major release.
- *
- * In most cases it will be enough to replace ClassManifests with ClassTags and Manifests with TypeTags,
- * however there are a few caveats:
- *
- * 1) The notion of OptManifest is no longer supported. Tags can reify arbitrary types, so they are always available.
- * // [Eugene++] it might be useful, though, to guard against abstractness of the incoming type.
- *
- * 2) There's no equivalent for AnyValManifest. Consider comparing your tag with one of the base tags
- * (defined in the corresponding companion objects) to find out whether it represents a primitive value class.
- * You can also use `<tag>.tpe.typeSymbol.isPrimitiveValueClass` for that purpose (requires scala-reflect.jar).
- *
- * 3) There's no replacement for factory methods defined in `ClassManifest` and `Manifest` companion objects.
- * Consider assembling corresponding types using reflection API provided by Java (for classes) and Scala (for types).
- *
- * 4) Certain manifest functions (such as `<:<`, `>:>` and `typeArguments`) weren't included in the tag API.
- * Consider using reflection API provided by Java (for classes) and Scala (for types) instead.
- */
-trait TypeTags { self: Universe =>
-
- import definitions._
-
- /**
- * If an implicit value of type u.WeakTypeTag[T] is required, the compiler will make one up on demand.
- * The implicitly created value contains in its tpe field a value of type u.Type that is a reflective representation of T.
- * In that value, any occurrences of type parameters or abstract types U
- * which come themselves with a TypeTag are represented by the type referenced by that TypeTag.
- *
- * @see [[scala.reflect.base.TypeTags]]
- */
- @annotation.implicitNotFound(msg = "No WeakTypeTag available for ${T}")
- trait WeakTypeTag[T] extends Equals with Serializable {
- val mirror: Mirror
- def in[U <: Universe with Singleton](otherMirror: MirrorOf[U]): U # WeakTypeTag[T]
- def tpe: Type
-
- /** case class accessories */
- override def canEqual(x: Any) = x.isInstanceOf[WeakTypeTag[_]]
- override def equals(x: Any) = x.isInstanceOf[WeakTypeTag[_]] && this.mirror == x.asInstanceOf[WeakTypeTag[_]].mirror && this.tpe == x.asInstanceOf[WeakTypeTag[_]].tpe
- override def hashCode = mirror.hashCode * 31 + tpe.hashCode
- override def toString = "WeakTypeTag[" + tpe + "]"
- }
-
- object WeakTypeTag {
- val Byte : WeakTypeTag[scala.Byte] = TypeTag.Byte
- val Short : WeakTypeTag[scala.Short] = TypeTag.Short
- val Char : WeakTypeTag[scala.Char] = TypeTag.Char
- val Int : WeakTypeTag[scala.Int] = TypeTag.Int
- val Long : WeakTypeTag[scala.Long] = TypeTag.Long
- val Float : WeakTypeTag[scala.Float] = TypeTag.Float
- val Double : WeakTypeTag[scala.Double] = TypeTag.Double
- val Boolean : WeakTypeTag[scala.Boolean] = TypeTag.Boolean
- val Unit : WeakTypeTag[scala.Unit] = TypeTag.Unit
- val Any : WeakTypeTag[scala.Any] = TypeTag.Any
- val AnyVal : WeakTypeTag[scala.AnyVal] = TypeTag.AnyVal
- val AnyRef : WeakTypeTag[scala.AnyRef] = TypeTag.AnyRef
- val Object : WeakTypeTag[java.lang.Object] = TypeTag.Object
- val Nothing : WeakTypeTag[scala.Nothing] = TypeTag.Nothing
- val Null : WeakTypeTag[scala.Null] = TypeTag.Null
-
- def apply[T](mirror1: MirrorOf[self.type], tpec1: TypeCreator): WeakTypeTag[T] =
- tpec1(mirror1) match {
- case ByteTpe => WeakTypeTag.Byte.asInstanceOf[WeakTypeTag[T]]
- case ShortTpe => WeakTypeTag.Short.asInstanceOf[WeakTypeTag[T]]
- case CharTpe => WeakTypeTag.Char.asInstanceOf[WeakTypeTag[T]]
- case IntTpe => WeakTypeTag.Int.asInstanceOf[WeakTypeTag[T]]
- case LongTpe => WeakTypeTag.Long.asInstanceOf[WeakTypeTag[T]]
- case FloatTpe => WeakTypeTag.Float.asInstanceOf[WeakTypeTag[T]]
- case DoubleTpe => WeakTypeTag.Double.asInstanceOf[WeakTypeTag[T]]
- case BooleanTpe => WeakTypeTag.Boolean.asInstanceOf[WeakTypeTag[T]]
- case UnitTpe => WeakTypeTag.Unit.asInstanceOf[WeakTypeTag[T]]
- case AnyTpe => WeakTypeTag.Any.asInstanceOf[WeakTypeTag[T]]
- case AnyValTpe => WeakTypeTag.AnyVal.asInstanceOf[WeakTypeTag[T]]
- case AnyRefTpe => WeakTypeTag.AnyRef.asInstanceOf[WeakTypeTag[T]]
- case ObjectTpe => WeakTypeTag.Object.asInstanceOf[WeakTypeTag[T]]
- case NothingTpe => WeakTypeTag.Nothing.asInstanceOf[WeakTypeTag[T]]
- case NullTpe => WeakTypeTag.Null.asInstanceOf[WeakTypeTag[T]]
- case _ => new WeakTypeTagImpl[T](mirror1.asInstanceOf[Mirror], tpec1)
- }
-
- def unapply[T](ttag: WeakTypeTag[T]): Option[Type] = Some(ttag.tpe)
- }
-
- private class WeakTypeTagImpl[T](val mirror: Mirror, val tpec: TypeCreator) extends WeakTypeTag[T] {
- lazy val tpe: Type = tpec(mirror)
- def in[U <: Universe with Singleton](otherMirror: MirrorOf[U]): U # WeakTypeTag[T] = {
- val otherMirror1 = otherMirror.asInstanceOf[MirrorOf[otherMirror.universe.type]]
- otherMirror.universe.WeakTypeTag[T](otherMirror1, tpec)
- }
- private def writeReplace(): AnyRef = new SerializedTypeTag(tpec, concrete = false)
- }
-
- /**
- * If an implicit value of type u.TypeTag[T] is required, the compiler will make one up on demand following the same procedure as for TypeTags.
- * However, if the resulting type still contains references to type parameters or abstract types, a static error results.
- *
- * @see [[scala.reflect.base.TypeTags]]
- */
- @annotation.implicitNotFound(msg = "No TypeTag available for ${T}")
- trait TypeTag[T] extends WeakTypeTag[T] with Equals with Serializable {
- override def in[U <: Universe with Singleton](otherMirror: MirrorOf[U]): U # TypeTag[T]
-
- /** case class accessories */
- override def canEqual(x: Any) = x.isInstanceOf[TypeTag[_]]
- override def equals(x: Any) = x.isInstanceOf[TypeTag[_]] && this.mirror == x.asInstanceOf[TypeTag[_]].mirror && this.tpe == x.asInstanceOf[TypeTag[_]].tpe
- override def hashCode = mirror.hashCode * 31 + tpe.hashCode
- override def toString = "TypeTag[" + tpe + "]"
- }
-
- object TypeTag {
- val Byte: TypeTag[scala.Byte] = new PredefTypeTag[scala.Byte] (ByteTpe, _.TypeTag.Byte)
- val Short: TypeTag[scala.Short] = new PredefTypeTag[scala.Short] (ShortTpe, _.TypeTag.Short)
- val Char: TypeTag[scala.Char] = new PredefTypeTag[scala.Char] (CharTpe, _.TypeTag.Char)
- val Int: TypeTag[scala.Int] = new PredefTypeTag[scala.Int] (IntTpe, _.TypeTag.Int)
- val Long: TypeTag[scala.Long] = new PredefTypeTag[scala.Long] (LongTpe, _.TypeTag.Long)
- val Float: TypeTag[scala.Float] = new PredefTypeTag[scala.Float] (FloatTpe, _.TypeTag.Float)
- val Double: TypeTag[scala.Double] = new PredefTypeTag[scala.Double] (DoubleTpe, _.TypeTag.Double)
- val Boolean: TypeTag[scala.Boolean] = new PredefTypeTag[scala.Boolean] (BooleanTpe, _.TypeTag.Boolean)
- val Unit: TypeTag[scala.Unit] = new PredefTypeTag[scala.Unit] (UnitTpe, _.TypeTag.Unit)
- val Any: TypeTag[scala.Any] = new PredefTypeTag[scala.Any] (AnyTpe, _.TypeTag.Any)
- val AnyVal: TypeTag[scala.AnyVal] = new PredefTypeTag[scala.AnyVal] (AnyValTpe, _.TypeTag.AnyVal)
- val AnyRef: TypeTag[scala.AnyRef] = new PredefTypeTag[scala.AnyRef] (AnyRefTpe, _.TypeTag.AnyRef)
- val Object: TypeTag[java.lang.Object] = new PredefTypeTag[java.lang.Object] (ObjectTpe, _.TypeTag.Object)
- val Nothing: TypeTag[scala.Nothing] = new PredefTypeTag[scala.Nothing] (NothingTpe, _.TypeTag.Nothing)
- val Null: TypeTag[scala.Null] = new PredefTypeTag[scala.Null] (NullTpe, _.TypeTag.Null)
-
- def apply[T](mirror1: MirrorOf[self.type], tpec1: TypeCreator): TypeTag[T] =
- tpec1(mirror1) match {
- case ByteTpe => TypeTag.Byte.asInstanceOf[TypeTag[T]]
- case ShortTpe => TypeTag.Short.asInstanceOf[TypeTag[T]]
- case CharTpe => TypeTag.Char.asInstanceOf[TypeTag[T]]
- case IntTpe => TypeTag.Int.asInstanceOf[TypeTag[T]]
- case LongTpe => TypeTag.Long.asInstanceOf[TypeTag[T]]
- case FloatTpe => TypeTag.Float.asInstanceOf[TypeTag[T]]
- case DoubleTpe => TypeTag.Double.asInstanceOf[TypeTag[T]]
- case BooleanTpe => TypeTag.Boolean.asInstanceOf[TypeTag[T]]
- case UnitTpe => TypeTag.Unit.asInstanceOf[TypeTag[T]]
- case AnyTpe => TypeTag.Any.asInstanceOf[TypeTag[T]]
- case AnyValTpe => TypeTag.AnyVal.asInstanceOf[TypeTag[T]]
- case AnyRefTpe => TypeTag.AnyRef.asInstanceOf[TypeTag[T]]
- case ObjectTpe => TypeTag.Object.asInstanceOf[TypeTag[T]]
- case NothingTpe => TypeTag.Nothing.asInstanceOf[TypeTag[T]]
- case NullTpe => TypeTag.Null.asInstanceOf[TypeTag[T]]
- case _ => new TypeTagImpl[T](mirror1.asInstanceOf[Mirror], tpec1)
- }
-
- def unapply[T](ttag: TypeTag[T]): Option[Type] = Some(ttag.tpe)
- }
-
- private class TypeTagImpl[T](mirror: Mirror, tpec: TypeCreator) extends WeakTypeTagImpl[T](mirror, tpec) with TypeTag[T] {
- override def in[U <: Universe with Singleton](otherMirror: MirrorOf[U]): U # TypeTag[T] = {
- val otherMirror1 = otherMirror.asInstanceOf[MirrorOf[otherMirror.universe.type]]
- otherMirror.universe.TypeTag[T](otherMirror1, tpec)
- }
- private def writeReplace(): AnyRef = new SerializedTypeTag(tpec, concrete = true)
- }
-
- private class PredefTypeCreator[T](copyIn: Universe => Universe # TypeTag[T]) extends TypeCreator {
- def apply[U <: Universe with Singleton](m: MirrorOf[U]): U # Type = {
- copyIn(m.universe).asInstanceOf[U # TypeTag[T]].tpe
- }
- }
-
- private class PredefTypeTag[T](_tpe: Type, copyIn: Universe => Universe # TypeTag[T]) extends TypeTagImpl[T](rootMirror, new PredefTypeCreator(copyIn)) {
- override lazy val tpe: Type = _tpe
- private def writeReplace(): AnyRef = new SerializedTypeTag(tpec, concrete = true)
- }
-
- // incantations
- def weakTypeTag[T](implicit attag: WeakTypeTag[T]) = attag
- def typeTag[T](implicit ttag: TypeTag[T]) = ttag
-
- // big thanks to Viktor Klang for this brilliant idea!
- def weakTypeOf[T](implicit attag: WeakTypeTag[T]): Type = attag.tpe
- def typeOf[T](implicit ttag: TypeTag[T]): Type = ttag.tpe
-}
-
-private[scala] class SerializedTypeTag(var tpec: TypeCreator, var concrete: Boolean) extends Serializable {
- private def writeObject(out: java.io.ObjectOutputStream): Unit = {
- out.writeObject(tpec)
- out.writeBoolean(concrete)
- }
-
- private def readObject(in: java.io.ObjectInputStream): Unit = {
- tpec = in.readObject().asInstanceOf[TypeCreator]
- concrete = in.readBoolean()
- }
-
- private def readResolve(): AnyRef = {
- import scala.reflect.basis._
- if (concrete) TypeTag(rootMirror, tpec)
- else WeakTypeTag(rootMirror, tpec)
- }
-}
diff --git a/src/library/scala/reflect/base/Types.scala b/src/library/scala/reflect/base/Types.scala
deleted file mode 100644
index b016b77f36..0000000000
--- a/src/library/scala/reflect/base/Types.scala
+++ /dev/null
@@ -1,426 +0,0 @@
-package scala.reflect
-package base
-
-trait Types { self: Universe =>
-
- /** The type of Scala types, and also Scala type signatures.
- * (No difference is internally made between the two).
- */
- type Type >: Null <: TypeBase
-
- /** The base API that all types support */
- abstract class TypeBase
-
- /** A tag that preserves the identity of the `Type` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val TypeTagg: ClassTag[Type]
-
- /** This constant is used as a special value that indicates that no meaningful type exists.
- */
- val NoType: Type
-
- /** This constant is used as a special value denoting the empty prefix in a path dependent type.
- * For instance `x.type` is represented as `SingleType(NoPrefix, <x>)`, where `<x>` stands for
- * the symbol for `x`.
- */
- val NoPrefix: Type
-
- /** The type of Scala singleton types, i.e. types that are inhabited
- * by only one nun-null value. These include types of the forms
- * {{{
- * C.this.type
- * C.super.type
- * x.type
- * }}}
- * as well as constant types.
- */
- type SingletonType >: Null <: Type
-
- /** A tag that preserves the identity of the `SingletonType` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val SingletonTypeTag: ClassTag[SingletonType]
-
- /** The `ThisType` type describes types of the form on the left with the
- * correspnding ThisType representations to the right.
- * {{{
- * C.this.type ThisType(C)
- * }}}
- */
- type ThisType >: Null <: AnyRef with SingletonType
-
- /** A tag that preserves the identity of the `ThisType` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val ThisTypeTag: ClassTag[ThisType]
-
- /** The constructor/deconstructor for `ThisType` instances. */
- val ThisType: ThisTypeExtractor
-
- /** An extractor class to create and pattern match with syntax `ThisType(sym)`
- * where `sym` is the class prefix of the this type.
- */
- abstract class ThisTypeExtractor {
- def apply(sym: Symbol): Type // not ThisTypebecause of implementation details
- def unapply(tpe: ThisType): Option[Symbol]
- }
-
- /** The `SingleType` type describes types of any of the forms on the left,
- * with their TypeRef representations to the right.
- * {{{
- * (T # x).type SingleType(T, x)
- * p.x.type SingleType(p.type, x)
- * x.type SingleType(NoPrefix, x)
- * }}}
- */
- type SingleType >: Null <: AnyRef with SingletonType
-
- /** A tag that preserves the identity of the `SingleType` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val SingleTypeTag: ClassTag[SingleType]
-
- /** The constructor/deconstructor for `SingleType` instances. */
- val SingleType: SingleTypeExtractor
-
- /** An extractor class to create and pattern match with syntax `SingleType(pre, sym)`
- * Here, `pre` is the prefix of the single-type, and `sym` is the stable value symbol
- * referred to by the single-type.
- */
- abstract class SingleTypeExtractor {
- def apply(pre: Type, sym: Symbol): Type // not SingleTypebecause of implementation details
- def unapply(tpe: SingleType): Option[(Type, Symbol)]
- }
-
- /** The `SuperType` type is not directly written, but arises when `C.super` is used
- * as a prefix in a `TypeRef` or `SingleType`. It's internal presentation is
- * {{{
- * SuperType(thistpe, supertpe)
- * }}}
- * Here, `thistpe` is the type of the corresponding this-type. For instance,
- * in the type arising from C.super, the `thistpe` part would be `ThisType(C)`.
- * `supertpe` is the type of the super class referred to by the `super`.
- */
- type SuperType >: Null <: AnyRef with SingletonType
-
- /** A tag that preserves the identity of the `SuperType` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val SuperTypeTag: ClassTag[SuperType]
-
- /** The constructor/deconstructor for `SuperType` instances. */
- val SuperType: SuperTypeExtractor
-
- /** An extractor class to create and pattern match with syntax `SingleType(thistpe, supertpe)`
- */
- abstract class SuperTypeExtractor {
- def apply(thistpe: Type, supertpe: Type): Type // not SuperTypebecause of implementation details
- def unapply(tpe: SuperType): Option[(Type, Type)]
- }
-
- /** The `ConstantType` type is not directly written in user programs, but arises as the type of a constant.
- * The REPL expresses constant types like Int(11). Here are some constants with their types.
- * {{{
- * 1 ConstantType(Constant(1))
- * "abc" ConstantType(Constant("abc"))
- * }}}
- */
- type ConstantType >: Null <: AnyRef with SingletonType
-
- /** A tag that preserves the identity of the `ConstantType` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val ConstantTypeTag: ClassTag[ConstantType]
-
- /** The constructor/deconstructor for `ConstantType` instances. */
- val ConstantType: ConstantTypeExtractor
-
- /** An extractor class to create and pattern match with syntax `ConstantType(constant)`
- * Here, `constant` is the constant value represented by the type.
- */
- abstract class ConstantTypeExtractor {
- def apply(value: Constant): ConstantType
- def unapply(tpe: ConstantType): Option[Constant]
- }
-
- /** The `TypeRef` type describes types of any of the forms on the left,
- * with their TypeRef representations to the right.
- * {{{
- * T # C[T_1, ..., T_n] TypeRef(T, C, List(T_1, ..., T_n))
- * p.C[T_1, ..., T_n] TypeRef(p.type, C, List(T_1, ..., T_n))
- * C[T_1, ..., T_n] TypeRef(NoPrefix, C, List(T_1, ..., T_n))
- * T # C TypeRef(T, C, Nil)
- * p.C TypeRef(p.type, C, Nil)
- * C TypeRef(NoPrefix, C, Nil)
- * }}}
- */
- type TypeRef >: Null <: AnyRef with Type
-
- /** A tag that preserves the identity of the `TypeRef` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val TypeRefTag: ClassTag[TypeRef]
-
- /** The constructor/deconstructor for `TypeRef` instances. */
- val TypeRef: TypeRefExtractor
-
- /** An extractor class to create and pattern match with syntax `TypeRef(pre, sym, args)`
- * Here, `pre` is the prefix of the type reference, `sym` is the symbol
- * referred to by the type reference, and `args` is a possible empty list of
- * type argumenrts.
- */
- abstract class TypeRefExtractor {
- def apply(pre: Type, sym: Symbol, args: List[Type]): Type // not TypeRefbecause of implementation details
- def unapply(tpe: TypeRef): Option[(Type, Symbol, List[Type])]
- }
-
- /** A subtype of Type representing refined types as well as `ClassInfo` signatures.
- */
- type CompoundType >: Null <: AnyRef with Type
-
- /** A tag that preserves the identity of the `CompoundType` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val CompoundTypeTag: ClassTag[CompoundType]
-
- /** The `RefinedType` type defines types of any of the forms on the left,
- * with their RefinedType representations to the right.
- * {{{
- * P_1 with ... with P_m { D_1; ...; D_n} RefinedType(List(P_1, ..., P_m), Scope(D_1, ..., D_n))
- * P_1 with ... with P_m RefinedType(List(P_1, ..., P_m), Scope())
- * { D_1; ...; D_n} RefinedType(List(AnyRef), Scope(D_1, ..., D_n))
- * }}}
- */
- type RefinedType >: Null <: AnyRef with CompoundType
-
- /** A tag that preserves the identity of the `RefinedType` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val RefinedTypeTag: ClassTag[RefinedType]
-
- /** The constructor/deconstructor for `RefinedType` instances. */
- val RefinedType: RefinedTypeExtractor
-
- /** An extractor class to create and pattern match with syntax `RefinedType(parents, decls)`
- * Here, `parents` is the list of parent types of the class, and `decls` is the scope
- * containing all declarations in the class.
- */
- abstract class RefinedTypeExtractor {
- def apply(parents: List[Type], decls: Scope): RefinedType
-
- /** An alternative constructor that passes in the synthetic classs symbol
- * that backs the refined type. (Normally, a fresh class symbol is created automatically).
- */
- def apply(parents: List[Type], decls: Scope, clazz: Symbol): RefinedType
- def unapply(tpe: RefinedType): Option[(List[Type], Scope)]
- }
-
- /** The `ClassInfo` type signature is used to define parents and declarations
- * of classes, traits, and objects. If a class, trait, or object C is declared like this
- * {{{
- * C extends P_1 with ... with P_m { D_1; ...; D_n}
- * }}}
- * its `ClassInfo` type has the following form:
- * {{{
- * ClassInfo(List(P_1, ..., P_m), Scope(D_1, ..., D_n), C)
- * }}}
- */
- type ClassInfoType >: Null <: AnyRef with CompoundType
-
- /** A tag that preserves the identity of the `ClassInfoType` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val ClassInfoTypeTag: ClassTag[ClassInfoType]
-
- /** The constructor/deconstructor for `ClassInfoType` instances. */
- val ClassInfoType: ClassInfoTypeExtractor
-
- /** An extractor class to create and pattern match with syntax `ClassInfo(parents, decls, clazz)`
- * Here, `parents` is the list of parent types of the class, `decls` is the scope
- * containing all declarations in the class, and `clazz` is the symbol of the class
- * itself.
- */
- abstract class ClassInfoTypeExtractor {
- def apply(parents: List[Type], decls: Scope, typeSymbol: Symbol): ClassInfoType
- def unapply(tpe: ClassInfoType): Option[(List[Type], Scope, Symbol)]
- }
-
- /** The `MethodType` type signature is used to indicate parameters and result type of a method
- */
- type MethodType >: Null <: AnyRef with Type
-
- /** A tag that preserves the identity of the `MethodType` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val MethodTypeTag: ClassTag[MethodType]
-
- /** The constructor/deconstructor for `MethodType` instances. */
- val MethodType: MethodTypeExtractor
-
- /** An extractor class to create and pattern match with syntax `MethodType(params, respte)`
- * Here, `params` is a potentially empty list of parameter symbols of the method,
- * and `restpe` is the result type of the method. If the method is curried, `restpe` would
- * be another `MethodType`.
- * Note: `MethodType(Nil, Int)` would be the type of a method defined with an empty parameter list.
- * {{{
- * def f(): Int
- * }}}
- * If the method is completely parameterless, as in
- * {{{
- * def f: Int
- * }}}
- * its type is a `NullaryMethodType`.
- */
- abstract class MethodTypeExtractor {
- def apply(params: List[Symbol], resultType: Type): MethodType
- def unapply(tpe: MethodType): Option[(List[Symbol], Type)]
- }
-
- /** The `NullaryMethodType` type signature is used for parameterless methods
- * with declarations of the form `def foo: T`
- */
- type NullaryMethodType >: Null <: AnyRef with Type
-
- /** A tag that preserves the identity of the `NullaryMethodType` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val NullaryMethodTypeTag: ClassTag[NullaryMethodType]
-
- /** The constructor/deconstructor for `NullaryMethodType` instances. */
- val NullaryMethodType: NullaryMethodTypeExtractor
-
- /** An extractor class to create and pattern match with syntax `NullaryMethodType(resultType)`.
- * Here, `resultType` is the result type of the parameterless method.
- */
- abstract class NullaryMethodTypeExtractor {
- def apply(resultType: Type): NullaryMethodType
- def unapply(tpe: NullaryMethodType): Option[(Type)]
- }
-
- /** The `PolyType` type signature is used for polymorphic methods
- * that have at least one type parameter.
- */
- type PolyType >: Null <: AnyRef with Type
-
- /** A tag that preserves the identity of the `PolyType` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val PolyTypeTag: ClassTag[PolyType]
-
- /** The constructor/deconstructor for `PolyType` instances. */
- val PolyType: PolyTypeExtractor
-
- /** An extractor class to create and pattern match with syntax `PolyType(typeParams, resultType)`.
- * Here, `typeParams` are the type parameters of the method and `resultType`
- * is the type signature following the type parameters.
- */
- abstract class PolyTypeExtractor {
- def apply(typeParams: List[Symbol], resultType: Type): PolyType
- def unapply(tpe: PolyType): Option[(List[Symbol], Type)]
- }
-
- /** The `ExistentialType` type signature is used for existential types and
- * wildcard types.
- */
- type ExistentialType >: Null <: AnyRef with Type
-
- /** A tag that preserves the identity of the `ExistentialType` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val ExistentialTypeTag: ClassTag[ExistentialType]
-
- /** The constructor/deconstructor for `ExistentialType` instances. */
- val ExistentialType: ExistentialTypeExtractor
-
- /** An extractor class to create and pattern match with syntax
- * `ExistentialType(quantified, underlying)`.
- * Here, `quantified` are the type variables bound by the existential type and `underlying`
- * is the type that's existentially quantified.
- */
- abstract class ExistentialTypeExtractor {
- def apply(quantified: List[Symbol], underlying: Type): ExistentialType
- def unapply(tpe: ExistentialType): Option[(List[Symbol], Type)]
- }
-
- /** The `AnnotatedType` type signature is used for annotated types of the
- * for `<type> @<annotation>`.
- */
- type AnnotatedType >: Null <: AnyRef with Type
-
- /** A tag that preserves the identity of the `AnnotatedType` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val AnnotatedTypeTag: ClassTag[AnnotatedType]
-
- /** The constructor/deconstructor for `AnnotatedType` instances. */
- val AnnotatedType: AnnotatedTypeExtractor
-
- /** An extractor class to create and pattern match with syntax
- * `AnnotatedType(annotations, underlying, selfsym)`.
- * Here, `annotations` are the annotations decorating the underlying type `underlying`.
- * `selfSym` is a symbol representing the annotated type itself.
- */
- abstract class AnnotatedTypeExtractor {
- def apply(annotations: List[AnnotationInfo], underlying: Type, selfsym: Symbol): AnnotatedType
- def unapply(tpe: AnnotatedType): Option[(List[AnnotationInfo], Type, Symbol)]
- }
-
- /** The `TypeBounds` type signature is used to indicate lower and upper type bounds
- * of type parameters and abstract types. It is not a first-class type.
- * If an abstract type or type parameter is declared with any of the forms
- * on the left, its type signature is the TypeBounds type on the right.
- * {{{
- * T >: L <: U TypeBounds(L, U)
- * T >: L TypeBounds(L, Any)
- * T <: U TypeBounds(Nothing, U)
- * }}}
- */
- type TypeBounds >: Null <: AnyRef with Type
-
- /** A tag that preserves the identity of the `TypeBounds` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val TypeBoundsTag: ClassTag[TypeBounds]
-
- /** The constructor/deconstructor for `TypeBounds` instances. */
- val TypeBounds: TypeBoundsExtractor
-
- /** An extractor class to create and pattern match with syntax `TypeBound(lower, upper)`
- * Here, `lower` is the lower bound of the `TypeBounds` pair, and `upper` is
- * the upper bound.
- */
- abstract class TypeBoundsExtractor {
- def apply(lo: Type, hi: Type): TypeBounds
- def unapply(tpe: TypeBounds): Option[(Type, Type)]
- }
-
- /** An object representing an unknown type, used during type inference.
- * If you see WildcardType outside of inference it is almost certainly a bug.
- */
- val WildcardType: Type
-
- /** BoundedWildcardTypes, used only during type inference, are created in
- * two places that I can find:
- *
- * 1. If the expected type of an expression is an existential type,
- * its hidden symbols are replaced with bounded wildcards.
- * 2. When an implicit conversion is being sought based in part on
- * the name of a method in the converted type, a HasMethodMatching
- * type is created: a MethodType with parameters typed as
- * BoundedWildcardTypes.
- */
- type BoundedWildcardType >: Null <: AnyRef with Type
-
- /** A tag that preserves the identity of the `BoundedWildcardType` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val BoundedWildcardTypeTag: ClassTag[BoundedWildcardType]
-
- val BoundedWildcardType: BoundedWildcardTypeExtractor
-
- abstract class BoundedWildcardTypeExtractor {
- def apply(bounds: TypeBounds): BoundedWildcardType
- def unapply(tpe: BoundedWildcardType): Option[TypeBounds]
- }
-}
diff --git a/src/library/scala/reflect/base/Universe.scala b/src/library/scala/reflect/base/Universe.scala
deleted file mode 100644
index 18599ad092..0000000000
--- a/src/library/scala/reflect/base/Universe.scala
+++ /dev/null
@@ -1,66 +0,0 @@
-package scala.reflect
-package base
-
-abstract class Universe extends Symbols
- with Types
- with FlagSets
- with Scopes
- with Names
- with Trees
- with Constants
- with AnnotationInfos
- with Positions
- with Exprs
- with TypeTags
- with TagInterop
- with StandardDefinitions
- with StandardNames
- with BuildUtils
- with Mirrors
-{
- /** Given an expression, generate a tree that when compiled and executed produces the original tree.
- * The produced tree will be bound to the Universe it was called from.
- *
- * For instance, given the abstract syntax tree representation of the <[ x + 1 ]> expression:
- *
- * {{{
- * Apply(Select(Ident("x"), "+"), List(Literal(Constant(1))))
- * }}}
- *
- * The reifier transforms it to the following expression:
- *
- * {{{
- * <[
- * val $u: u.type = u // where u is a reference to the Universe that calls the reify
- * $u.Expr[Int]($u.Apply($u.Select($u.Ident($u.newFreeVar("x", <Int>, x), "+"), List($u.Literal($u.Constant(1))))))
- * ]>
- * }}}
- *
- * Reification performs expression splicing (when processing Expr.splice)
- * and type splicing (for every type T that has a TypeTag[T] implicit in scope):
- *
- * {{{
- * val two = mirror.reify(2) // Literal(Constant(2))
- * val four = mirror.reify(two.splice + two.splice) // Apply(Select(two.tree, newTermName("$plus")), List(two.tree))
- *
- * def macroImpl[T](c: Context) = {
- * ...
- * // T here is just a type parameter, so the tree produced by reify won't be of much use in a macro expansion
- * // however, if T were annotated with c.WeakTypeTag (which would declare an implicit parameter for macroImpl)
- * // then reification would substitute T with the TypeTree that was used in a TypeApply of this particular macro invocation
- * val factory = c.reify{ new Queryable[T] }
- * ...
- * }
- * }}}
- *
- * The transformation looks mostly straightforward, but it has its tricky parts:
- * * Reifier retains symbols and types defined outside the reified tree, however
- * locally defined entities get erased and replaced with their original trees
- * * Free variables are detected and wrapped in symbols of the type FreeVar
- * * Mutable variables that are accessed from a local function are wrapped in refs
- * * Since reified trees can be compiled outside of the scope they've been created in,
- * special measures are taken to ensure that all members accessed in the reifee remain visible
- */
- // implementation is magically hardwired to `scala.reflect.reify.Taggers`
- def reify[T](expr: T): Expr[T] = ??? // macro
-} \ No newline at end of file
diff --git a/src/library/scala/reflect/macros/internal/package.scala b/src/library/scala/reflect/macros/internal/package.scala
deleted file mode 100644
index aca2b765f1..0000000000
--- a/src/library/scala/reflect/macros/internal/package.scala
+++ /dev/null
@@ -1,13 +0,0 @@
-package scala.reflect.macros
-
-import scala.reflect.base.{Universe => BaseUniverse}
-import scala.reflect.ClassTag
-
-// anchors for materialization macros emitted during tag materialization in Implicits.scala
-// implementation is magically hardwired into `scala.reflect.reify.Taggers`
-// todo. once we have implicit macros for tag generation, we can remove these anchors
-package object internal {
- private[scala] def materializeClassTag[T](u: BaseUniverse): ClassTag[T] = ??? // macro
- private[scala] def materializeWeakTypeTag[T](u: BaseUniverse): u.WeakTypeTag[T] = ??? // macro
- private[scala] def materializeTypeTag[T](u: BaseUniverse): u.TypeTag[T] = ??? // macro
-}
diff --git a/src/library/scala/reflect/package.scala b/src/library/scala/reflect/package.scala
index d97f2ec633..4f1cc03dc8 100644
--- a/src/library/scala/reflect/package.scala
+++ b/src/library/scala/reflect/package.scala
@@ -2,8 +2,6 @@ package scala
package object reflect {
- lazy val basis: base.Universe = new base.Base
-
// in the new scheme of things ClassManifests are aliased to ClassTags
// this is done because we want `toArray` in collections work with ClassTags
// but changing it to use the ClassTag context bound without aliasing ClassManifest
@@ -42,13 +40,12 @@ package object reflect {
val Manifest = ManifestFactory
def classTag[T](implicit ctag: ClassTag[T]) = ctag
- // typeTag incantation is defined inside scala.reflect.basis and scala.reflect.runtime.universe
-
- // ClassTag class is defined in ClassTag.scala
- type TypeTag[T] = scala.reflect.basis.TypeTag[T]
- // ClassTag object is defined in ClassTag.scala
- lazy val TypeTag = scala.reflect.basis.TypeTag
+ // anchor for the class tag materialization macro emitted during tag materialization in Implicits.scala
+ // implementation is hardwired into `scala.reflect.reify.Taggers`
+ // using the mechanism implemented in `scala.tools.reflect.FastTrack`
+ // todo. once we have implicit macros for tag generation, we can remove this anchor
+ private[scala] def materializeClassTag[T](): ClassTag[T] = ??? // macro
@deprecated("Use `@scala.beans.BeanDescription` instead", "2.10.0")
type BeanDescription = scala.beans.BeanDescription
diff --git a/src/library/scala/runtime/RichDouble.scala b/src/library/scala/runtime/RichDouble.scala
index 19396a3d48..d7d2603ef7 100644
--- a/src/library/scala/runtime/RichDouble.scala
+++ b/src/library/scala/runtime/RichDouble.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.runtime
+package scala
+package runtime
final class RichDouble(val self: Double) extends AnyVal with FractionalProxy[Double] {
protected def num = scala.math.Numeric.DoubleIsFractional
diff --git a/src/library/scala/runtime/RichFloat.scala b/src/library/scala/runtime/RichFloat.scala
index 9fbb3c19bb..9c3a14d3be 100644
--- a/src/library/scala/runtime/RichFloat.scala
+++ b/src/library/scala/runtime/RichFloat.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.runtime
+package scala
+package runtime
final class RichFloat(val self: Float) extends AnyVal with FractionalProxy[Float] {
protected def num = scala.math.Numeric.FloatIsFractional
diff --git a/src/library/scala/runtime/ScalaRunTime.scala b/src/library/scala/runtime/ScalaRunTime.scala
index ad36006646..1e3fa78af3 100644
--- a/src/library/scala/runtime/ScalaRunTime.scala
+++ b/src/library/scala/runtime/ScalaRunTime.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.runtime
+package scala
+package runtime
import scala.collection.{ Seq, IndexedSeq, TraversableView, AbstractIterator }
import scala.collection.mutable.WrappedArray
@@ -15,6 +16,7 @@ import scala.collection.generic.{ Sorted }
import scala.reflect.{ ClassTag, classTag }
import scala.util.control.ControlThrowable
import scala.xml.{ Node, MetaData }
+import java.lang.{ Class => jClass }
import java.lang.Double.doubleToLongBits
import java.lang.reflect.{ Modifier, Method => JMethod }
@@ -27,10 +29,10 @@ object ScalaRunTime {
def isArray(x: Any, atLevel: Int = 1): Boolean =
x != null && isArrayClass(x.getClass, atLevel)
- private def isArrayClass(clazz: Class[_], atLevel: Int): Boolean =
+ private def isArrayClass(clazz: jClass[_], atLevel: Int): Boolean =
clazz.isArray && (atLevel == 1 || isArrayClass(clazz.getComponentType, atLevel - 1))
- def isValueClass(clazz: Class[_]) = clazz.isPrimitive()
+ def isValueClass(clazz: jClass[_]) = clazz.isPrimitive()
def isTuple(x: Any) = x != null && tupleNames(x.getClass.getName)
def isAnyVal(x: Any) = x match {
case _: Byte | _: Short | _: Char | _: Int | _: Long | _: Float | _: Double | _: Boolean | _: Unit => true
@@ -49,7 +51,7 @@ object ScalaRunTime {
/** Return the class object representing an array with element class `clazz`.
*/
- def arrayClass(clazz: Class[_]): Class[_] = {
+ def arrayClass(clazz: jClass[_]): jClass[_] = {
// newInstance throws an exception if the erasure is Void.TYPE. see SI-5680
if (clazz == java.lang.Void.TYPE) classOf[Array[Unit]]
else java.lang.reflect.Array.newInstance(clazz, 0).getClass
@@ -57,18 +59,19 @@ object ScalaRunTime {
/** Return the class object representing elements in arrays described by a given schematic.
*/
- def arrayElementClass(schematic: Any): Class[_] = schematic match {
- case cls: Class[_] => cls.getComponentType
+ def arrayElementClass(schematic: Any): jClass[_] = schematic match {
+ case cls: jClass[_] => cls.getComponentType
case tag: ClassTag[_] => tag.runtimeClass
- case _ => throw new UnsupportedOperationException("unsupported schematic %s (%s)".format(schematic, if (schematic == null) "null" else schematic.getClass))
+ case _ =>
+ throw new UnsupportedOperationException(s"unsupported schematic $schematic (${schematic.getClass})")
}
/** Return the class object representing an unboxed value type,
* e.g. classOf[int], not classOf[java.lang.Integer]. The compiler
* rewrites expressions like 5.getClass to come here.
*/
- def anyValClass[T <: AnyVal : ClassTag](value: T): Class[T] =
- classTag[T].runtimeClass.asInstanceOf[Class[T]]
+ def anyValClass[T <: AnyVal : ClassTag](value: T): jClass[T] =
+ classTag[T].runtimeClass.asInstanceOf[jClass[T]]
/** Retrieve generic array element */
def array_apply(xs: AnyRef, idx: Int): Any = xs match {
@@ -204,12 +207,12 @@ object ScalaRunTime {
// Note that these are the implementations called by ##, so they
// must not call ## themselves.
- @inline def hash(x: Any): Int =
+ def hash(x: Any): Int =
if (x == null) 0
else if (x.isInstanceOf[java.lang.Number]) BoxesRunTime.hashFromNumber(x.asInstanceOf[java.lang.Number])
else x.hashCode
- @inline def hash(dv: Double): Int = {
+ def hash(dv: Double): Int = {
val iv = dv.toInt
if (iv == dv) return iv
@@ -219,7 +222,7 @@ object ScalaRunTime {
val fv = dv.toFloat
if (fv == dv) fv.hashCode else dv.hashCode
}
- @inline def hash(fv: Float): Int = {
+ def hash(fv: Float): Int = {
val iv = fv.toInt
if (iv == fv) return iv
@@ -227,22 +230,22 @@ object ScalaRunTime {
if (lv == fv) return hash(lv)
else fv.hashCode
}
- @inline def hash(lv: Long): Int = {
+ def hash(lv: Long): Int = {
val low = lv.toInt
val lowSign = low >>> 31
val high = (lv >>> 32).toInt
low ^ (high + lowSign)
}
- @inline def hash(x: Number): Int = runtime.BoxesRunTime.hashFromNumber(x)
+ def hash(x: Number): Int = runtime.BoxesRunTime.hashFromNumber(x)
// The remaining overloads are here for completeness, but the compiler
// inlines these definitions directly so they're not generally used.
- @inline def hash(x: Int): Int = x
- @inline def hash(x: Short): Int = x.toInt
- @inline def hash(x: Byte): Int = x.toInt
- @inline def hash(x: Char): Int = x.toInt
- @inline def hash(x: Boolean): Int = if (x) true.hashCode else false.hashCode
- @inline def hash(x: Unit): Int = 0
+ def hash(x: Int): Int = x
+ def hash(x: Short): Int = x.toInt
+ def hash(x: Byte): Int = x.toInt
+ def hash(x: Char): Int = x.toInt
+ def hash(x: Boolean): Int = if (x) true.hashCode else false.hashCode
+ def hash(x: Unit): Int = 0
/** A helper method for constructing case class equality methods,
* because existential types get in the way of a clean outcome and
diff --git a/src/library/scala/runtime/SeqCharSequence.scala b/src/library/scala/runtime/SeqCharSequence.scala
index dc9594d960..8cb958c05f 100644
--- a/src/library/scala/runtime/SeqCharSequence.scala
+++ b/src/library/scala/runtime/SeqCharSequence.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.runtime
+package scala
+package runtime
import java.util.Arrays.copyOfRange
diff --git a/src/library/scala/runtime/StringAdd.scala b/src/library/scala/runtime/StringAdd.scala
index 4693b0bf44..f074b5407e 100644
--- a/src/library/scala/runtime/StringAdd.scala
+++ b/src/library/scala/runtime/StringAdd.scala
@@ -10,10 +10,5 @@ package scala.runtime
/** A wrapper class that adds string concatenation `+` to any value */
final class StringAdd(val self: Any) extends AnyVal {
- // Note: The implicit conversion from Any to StringAdd is one of two
- // implicit conversions from Any to AnyRef in Predef. It is important to have at least
- // two such conversions, so that silent conversions from value types to AnyRef
- // are avoided. If StringFormat should become a value class, another
- // implicit conversion from Any to AnyRef has to be introduced in Predef
def +(other: String) = String.valueOf(self) + other
}
diff --git a/src/library/scala/runtime/StringFormat.scala b/src/library/scala/runtime/StringFormat.scala
index 1f5feec9e1..7d34e82812 100644
--- a/src/library/scala/runtime/StringFormat.scala
+++ b/src/library/scala/runtime/StringFormat.scala
@@ -11,12 +11,6 @@ package scala.runtime
/** A wrapper class that adds a `formatted` operation to any value
*/
final class StringFormat(val self: Any) extends AnyVal {
- // Note: The implicit conversion from Any to StringFormat is one of two
- // implicit conversions from Any to AnyRef in Predef. It is important to have at least
- // two such conversions, so that silent conversions from value types to AnyRef
- // are avoided. If StringFormat should become a value class, another
- // implicit conversion from Any to AnyRef has to be introduced in Predef
-
/** Returns string formatted according to given `format` string.
* Format strings are as for `String.format`
* (@see java.lang.String.format).
diff --git a/src/library/scala/runtime/Tuple2Zipped.scala b/src/library/scala/runtime/Tuple2Zipped.scala
index bd52c678af..6030c9ea90 100644
--- a/src/library/scala/runtime/Tuple2Zipped.scala
+++ b/src/library/scala/runtime/Tuple2Zipped.scala
@@ -21,7 +21,7 @@ trait ZippedTraversable2[+El1, +El2] extends Any {
}
object ZippedTraversable2 {
implicit def zippedTraversable2ToTraversable[El1, El2](zz: ZippedTraversable2[El1, El2]): Traversable[(El1, El2)] = {
- new collection.AbstractTraversable[(El1, El2)] {
+ new scala.collection.AbstractTraversable[(El1, El2)] {
def foreach[U](f: ((El1, El2)) => U): Unit = zz foreach Function.untupled(f)
}
}
diff --git a/src/library/scala/runtime/Tuple3Zipped.scala b/src/library/scala/runtime/Tuple3Zipped.scala
index 3b78b6261a..3970c9973d 100644
--- a/src/library/scala/runtime/Tuple3Zipped.scala
+++ b/src/library/scala/runtime/Tuple3Zipped.scala
@@ -18,7 +18,7 @@ trait ZippedTraversable3[+El1, +El2, +El3] extends Any {
}
object ZippedTraversable3 {
implicit def zippedTraversable3ToTraversable[El1, El2, El3](zz: ZippedTraversable3[El1, El2, El3]): Traversable[(El1, El2, El3)] = {
- new collection.AbstractTraversable[(El1, El2, El3)] {
+ new scala.collection.AbstractTraversable[(El1, El2, El3)] {
def foreach[U](f: ((El1, El2, El3)) => U): Unit = zz foreach Function.untupled(f)
}
}
diff --git a/src/library/scala/sys/Prop.scala b/src/library/scala/sys/Prop.scala
index 687a32cf7d..123a729748 100644
--- a/src/library/scala/sys/Prop.scala
+++ b/src/library/scala/sys/Prop.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.sys
+package scala
+package sys
/** A lightweight interface wrapping a property contained in some
* unspecified map. Generally it'll be the system properties but this
diff --git a/src/library/scala/sys/process/BasicIO.scala b/src/library/scala/sys/process/BasicIO.scala
index 77e36f6196..94a2125393 100644
--- a/src/library/scala/sys/process/BasicIO.scala
+++ b/src/library/scala/sys/process/BasicIO.scala
@@ -45,7 +45,7 @@ object BasicIO {
val q = new LinkedBlockingQueue[Either[Int, T]]
def next(): Stream[T] = q.take match {
case Left(0) => Stream.empty
- case Left(code) => if (nonzeroException) sys.error("Nonzero exit code: " + code) else Stream.empty
+ case Left(code) => if (nonzeroException) scala.sys.error("Nonzero exit code: " + code) else Stream.empty
case Right(s) => Stream.cons(s, next)
}
new Streamed((s: T) => q put Right(s), code => q put Left(code), () => next())
diff --git a/src/library/scala/sys/process/ProcessBuilderImpl.scala b/src/library/scala/sys/process/ProcessBuilderImpl.scala
index 58f06e1039..2c83a59e4f 100644
--- a/src/library/scala/sys/process/ProcessBuilderImpl.scala
+++ b/src/library/scala/sys/process/ProcessBuilderImpl.scala
@@ -128,7 +128,7 @@ private[process] trait ProcessBuilderImpl {
val code = this ! BasicIO(withIn, buffer, log)
if (code == 0) buffer.toString
- else sys.error("Nonzero exit value: " + code)
+ else scala.sys.error("Nonzero exit value: " + code)
}
private[this] def lines(
@@ -213,4 +213,4 @@ private[process] trait ProcessBuilderImpl {
) extends SequentialBuilder(first, second, "###") {
override def createProcess(io: ProcessIO) = new ProcessSequence(first, second, io)
}
-} \ No newline at end of file
+}
diff --git a/src/library/scala/sys/process/ProcessImpl.scala b/src/library/scala/sys/process/ProcessImpl.scala
index b7549eeb06..cdf7d72caa 100644
--- a/src/library/scala/sys/process/ProcessImpl.scala
+++ b/src/library/scala/sys/process/ProcessImpl.scala
@@ -84,7 +84,7 @@ private[process] trait ProcessImpl {
private[process] abstract class CompoundProcess extends BasicProcess {
def destroy() = destroyer()
- def exitValue() = getExitValue() getOrElse sys.error("No exit code: process destroyed.")
+ def exitValue() = getExitValue() getOrElse scala.sys.error("No exit code: process destroyed.")
def start() = getExitValue
protected lazy val (getExitValue, destroyer) = {
diff --git a/src/library/scala/testing/Show.scala b/src/library/scala/testing/Show.scala
index da1868c7f6..c6c58f5da2 100644
--- a/src/library/scala/testing/Show.scala
+++ b/src/library/scala/testing/Show.scala
@@ -37,7 +37,7 @@ trait Show {
}
}
- @deprecated("use SymApply instead", "2.10")
+ @deprecated("use SymApply instead", "2.10.0")
def symApply(sym: Symbol): SymApply = new SymApply(sym)
/** Apply method with name of given symbol `f` to given arguments and return
diff --git a/src/library/scala/throws.scala b/src/library/scala/throws.scala
index 0aa0d31c9f..02dffb00b0 100644
--- a/src/library/scala/throws.scala
+++ b/src/library/scala/throws.scala
@@ -14,7 +14,7 @@ package scala
* {{{
* class Reader(fname: String) {
* private val in = new BufferedReader(new FileReader(fname))
- * @throws(classOf[IOException])
+ * @throws[IOException]("if the file doesn't exist")
* def read() = in.read()
* }
* }}}
@@ -23,4 +23,6 @@ package scala
* @version 1.0, 19/05/2006
* @since 2.1
*/
-class throws(clazz: Class[_]) extends scala.annotation.StaticAnnotation
+class throws[T <: Throwable](cause: String = "") extends scala.annotation.StaticAnnotation {
+ def this(clazz: Class[T]) = this()
+}
diff --git a/src/library/scala/util/Either.scala b/src/library/scala/util/Either.scala
index f0253eee07..51342eda78 100644
--- a/src/library/scala/util/Either.scala
+++ b/src/library/scala/util/Either.scala
@@ -221,7 +221,7 @@ object Either {
case Right(a) => a
}
}
- @deprecated("use MergeableEither instead", "2.10")
+ @deprecated("use MergeableEither instead", "2.10.0")
def either2mergeable[A](x: Either[A, A]): MergeableEither[A] = new MergeableEither(x)
/**
diff --git a/src/library/scala/util/Sorting.scala b/src/library/scala/util/Sorting.scala
index 5f0edf964f..276e157f55 100644
--- a/src/library/scala/util/Sorting.scala
+++ b/src/library/scala/util/Sorting.scala
@@ -6,10 +6,11 @@
** |/ **
\* */
-package scala.util
+package scala
+package util
import scala.reflect.{ ClassTag, classTag }
-import scala.math.Ordering
+import scala.math.{ Ordering, max, min }
/** The Sorting object provides functions that can sort various kinds of
* objects. You can provide a comparison function, or you can request a sort
diff --git a/src/library/scala/util/automata/SubsetConstruction.scala b/src/library/scala/util/automata/SubsetConstruction.scala
index 1cdcd734cd..25ac86183c 100644
--- a/src/library/scala/util/automata/SubsetConstruction.scala
+++ b/src/library/scala/util/automata/SubsetConstruction.scala
@@ -19,8 +19,8 @@ class SubsetConstruction[T <: AnyRef](val nfa: NondetWordAutom[T]) {
def determinize: DetWordAutom[T] = {
// for assigning numbers to bitsets
- var indexMap = collection.Map[immutable.BitSet, Int]()
- var invIndexMap = collection.Map[Int, immutable.BitSet]()
+ var indexMap = scala.collection.Map[immutable.BitSet, Int]()
+ var invIndexMap = scala.collection.Map[Int, immutable.BitSet]()
var ix = 0
// we compute the dfa with states = bitsets
diff --git a/src/library/scala/util/control/NoStackTrace.scala b/src/library/scala/util/control/NoStackTrace.scala
index c2b5dbca22..4409358785 100644
--- a/src/library/scala/util/control/NoStackTrace.scala
+++ b/src/library/scala/util/control/NoStackTrace.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.util.control
+package scala
+package util.control
/** A trait for exceptions which, for efficiency reasons, do not
* fill in the stack trace. Stack trace suppression can be disabled
diff --git a/src/library/scala/util/hashing/Hashing.scala b/src/library/scala/util/hashing/Hashing.scala
index 84b549f35e..97d32af2b0 100644
--- a/src/library/scala/util/hashing/Hashing.scala
+++ b/src/library/scala/util/hashing/Hashing.scala
@@ -8,6 +8,8 @@
package scala.util.hashing
+import scala.annotation.implicitNotFound
+
/** `Hashing` is a trait whose instances each represent a strategy for hashing
* instances of a type.
*
@@ -16,27 +18,22 @@ package scala.util.hashing
*
* Note: when using a custom `Hashing`, make sure to use it with the `Equiv`
* such that if any two objects are equal, then their hash codes must be equal.
- *
+ *
* @since 2.10
*/
-@annotation.implicitNotFound(msg = "No implicit Hashing defined for ${T}.")
+@implicitNotFound(msg = "No implicit Hashing defined for ${T}.")
trait Hashing[T] extends Serializable {
-
def hash(x: T): Int
-
}
-
object Hashing {
-
final class Default[T] extends Hashing[T] {
def hash(x: T) = x.##
}
-
+
implicit def default[T] = new Default[T]
-
+
def fromFunction[T](f: T => Int) = new Hashing[T] {
def hash(x: T) = f(x)
}
-
}
diff --git a/src/library/scala/util/parsing/combinator/Parsers.scala b/src/library/scala/util/parsing/combinator/Parsers.scala
index e6c9573756..5d990eee78 100644
--- a/src/library/scala/util/parsing/combinator/Parsers.scala
+++ b/src/library/scala/util/parsing/combinator/Parsers.scala
@@ -178,7 +178,7 @@ trait Parsers {
def filterWithError(p: Nothing => Boolean, error: Nothing => String, position: Input): ParseResult[Nothing] = this
- def get: Nothing = sys.error("No result when parsing failed")
+ def get: Nothing = scala.sys.error("No result when parsing failed")
}
/** An extractor so `NoSuccess(msg, next)` can be used in matches. */
object NoSuccess {
diff --git a/src/library/scala/xml/Elem.scala b/src/library/scala/xml/Elem.scala
index 2ca1dbfcd0..a92d7859c3 100755
--- a/src/library/scala/xml/Elem.scala
+++ b/src/library/scala/xml/Elem.scala
@@ -59,7 +59,7 @@ class Elem(
val child: Node*)
extends Node with Serializable
{
- @deprecated("This constructor is retained for backward compatibility. Please use the primary constructor, which lets you specify your own preference for `minimizeEmpty`.", "2.10")
+ @deprecated("This constructor is retained for backward compatibility. Please use the primary constructor, which lets you specify your own preference for `minimizeEmpty`.", "2.10.0")
def this(prefix: String, label: String, attributes: MetaData, scope: NamespaceBinding, child: Node*) = {
this(prefix, label, attributes, scope, child.isEmpty, child: _*)
}
diff --git a/src/library/scala/xml/Utility.scala b/src/library/scala/xml/Utility.scala
index 50a284d7cd..b390235d59 100755
--- a/src/library/scala/xml/Utility.scala
+++ b/src/library/scala/xml/Utility.scala
@@ -175,7 +175,7 @@ object Utility extends AnyRef with parsing.TokenTests {
* Note that calling this source-compatible method will result in the same old, arguably almost universally unwanted,
* behaviour.
*/
- @deprecated("Please use `serialize` instead and specify a `minimizeTags` parameter", "2.10")
+ @deprecated("Please use `serialize` instead and specify a `minimizeTags` parameter", "2.10.0")
def toXML(
x: Node,
pscope: NamespaceBinding = TopScope,
diff --git a/src/library/scala/xml/dtd/ContentModelParser.scala b/src/library/scala/xml/dtd/ContentModelParser.scala
index 2d87bc0764..5d183df04b 100644
--- a/src/library/scala/xml/dtd/ContentModelParser.scala
+++ b/src/library/scala/xml/dtd/ContentModelParser.scala
@@ -6,7 +6,6 @@
** |/ **
\* */
-
package scala.xml
package dtd
@@ -21,10 +20,10 @@ object ContentModelParser extends Scanner { // a bit too permissive concerning #
def accept(tok: Int) = {
if (token != tok) {
if ((tok == STAR) && (token == END)) // common mistake
- sys.error("in DTDs, \n"+
+ scala.sys.error("in DTDs, \n"+
"mixed content models must be like (#PCDATA|Name|Name|...)*");
else
- sys.error("expected "+token2string(tok)+
+ scala.sys.error("expected "+token2string(tok)+
", got unexpected token:"+token2string(token));
}
nextToken
@@ -45,7 +44,7 @@ object ContentModelParser extends Scanner { // a bit too permissive concerning #
case NAME => value match {
case "ANY" => ANY
case "EMPTY" => EMPTY
- case _ => sys.error("expected ANY, EMPTY or '(' instead of " + value );
+ case _ => scala.sys.error("expected ANY, EMPTY or '(' instead of " + value );
}
case LPAREN =>
@@ -65,12 +64,12 @@ object ContentModelParser extends Scanner { // a bit too permissive concerning #
accept( STAR );
res
case _ =>
- sys.error("unexpected token:" + token2string(token) );
+ scala.sys.error("unexpected token:" + token2string(token) );
}
}
case _ =>
- sys.error("unexpected token:" + token2string(token) );
+ scala.sys.error("unexpected token:" + token2string(token) );
}
// sopt ::= S?
def sOpt() = if( token == S ) nextToken;
@@ -118,12 +117,12 @@ object ContentModelParser extends Scanner { // a bit too permissive concerning #
def particle = token match {
case LPAREN => nextToken; sOpt; regexp;
case NAME => val a = Letter(ElemName(value)); nextToken; maybeSuffix(a)
- case _ => sys.error("expected '(' or Name, got:"+token2string(token));
+ case _ => scala.sys.error("expected '(' or Name, got:"+token2string(token));
}
// atom ::= name
def atom = token match {
case NAME => val a = Letter(ElemName(value)); nextToken; a
- case _ => sys.error("expected Name, got:"+token2string(token));
+ case _ => scala.sys.error("expected Name, got:"+token2string(token));
}
}
diff --git a/src/library/scala/xml/dtd/Scanner.scala b/src/library/scala/xml/dtd/Scanner.scala
index 82a8d1af2f..2e753a7590 100644
--- a/src/library/scala/xml/dtd/Scanner.scala
+++ b/src/library/scala/xml/dtd/Scanner.scala
@@ -44,7 +44,7 @@ class Scanner extends Tokens with parsing.TokenTests {
final def next() = if (it.hasNext) c = it.next else c = ENDCH
final def acc(d: Char) {
- if (c == d) next else sys.error("expected '"+d+"' found '"+c+"' !");
+ if (c == d) next else scala.sys.error("expected '"+d+"' found '"+c+"' !");
}
final def accS(ds: Seq[Char]) { ds foreach acc }
@@ -65,7 +65,7 @@ class Scanner extends Tokens with parsing.TokenTests {
case ENDCH => END
case _ =>
if (isNameStart(c)) name; // NAME
- else sys.error("unexpected character:" + c)
+ else scala.sys.error("unexpected character:" + c)
}
final def name = {
diff --git a/src/library/scala/xml/factory/NodeFactory.scala b/src/library/scala/xml/factory/NodeFactory.scala
index 61d4855b2e..c543b8751b 100644
--- a/src/library/scala/xml/factory/NodeFactory.scala
+++ b/src/library/scala/xml/factory/NodeFactory.scala
@@ -18,7 +18,7 @@ trait NodeFactory[A <: Node] {
val ignoreProcInstr = false
/* default behaviour is to use hash-consing */
- val cache = new collection.mutable.HashMap[Int, List[A]]
+ val cache = new scala.collection.mutable.HashMap[Int, List[A]]
protected def create(pre: String, name: String, attrs: MetaData, scope: NamespaceBinding, children:Seq[Node]): A
diff --git a/src/library/scala/xml/include/sax/XIncluder.scala b/src/library/scala/xml/include/sax/XIncluder.scala
index f4d69ffe44..2af66f4f16 100644
--- a/src/library/scala/xml/include/sax/XIncluder.scala
+++ b/src/library/scala/xml/include/sax/XIncluder.scala
@@ -62,7 +62,7 @@ class XIncluder(outs: OutputStream, encoding: String) extends ContentHandler wit
val value = atts.getValue(i);
// @todo Need to use character references if the encoding
// can't support the character
- out.write(xml.Utility.escape(value))
+ out.write(scala.xml.Utility.escape(value))
out.write("'");
i += 1
}
diff --git a/src/library/scala/xml/parsing/MarkupParser.scala b/src/library/scala/xml/parsing/MarkupParser.scala
index af9b5f47cf..d4dc6da14d 100755
--- a/src/library/scala/xml/parsing/MarkupParser.scala
+++ b/src/library/scala/xml/parsing/MarkupParser.scala
@@ -56,7 +56,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
// See ticket #3720 for motivations.
private class WithLookAhead(underlying: Source) extends Source {
- private val queue = collection.mutable.Queue[Char]()
+ private val queue = scala.collection.mutable.Queue[Char]()
def lookahead(): BufferedIterator[Char] = {
val iter = queue.iterator ++ new Iterator[Char] {
def hasNext = underlying.hasNext
@@ -897,7 +897,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
new PublicID(pubID, sysID)
} else {
reportSyntaxError("PUBLIC or SYSTEM expected");
- sys.error("died parsing notationdecl")
+ scala.sys.error("died parsing notationdecl")
}
xSpaceOpt
xToken('>')
diff --git a/src/library/scala/xml/parsing/MarkupParserCommon.scala b/src/library/scala/xml/parsing/MarkupParserCommon.scala
index 096f8a8f38..219c3d6679 100644
--- a/src/library/scala/xml/parsing/MarkupParserCommon.scala
+++ b/src/library/scala/xml/parsing/MarkupParserCommon.scala
@@ -21,7 +21,7 @@ import Utility.SU
* All members should be accessed through those.
*/
private[scala] trait MarkupParserCommon extends TokenTests {
- protected def unreachable = sys.error("Cannot be reached.")
+ protected def unreachable = scala.sys.error("Cannot be reached.")
// type HandleType // MarkupHandler, SymbolicXMLBuilder
type InputType // Source, CharArrayReader
@@ -82,7 +82,7 @@ private[scala] trait MarkupParserCommon extends TokenTests {
case `end` => return buf.toString
case ch => buf append ch
}
- sys.error("Expected '%s'".format(end))
+ scala.sys.error("Expected '%s'".format(end))
}
/** [42] '<' xmlEndTag ::= '<' '/' Name S? '>'