From ddb3b889b5fa55760167d9995d2f1549e2542b71 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 11 Sep 2012 23:40:11 +0200 Subject: A little cleanup along the Any to AnyRef trail. Followup to 35316be and d3f879a. - Remove obsolete comments and replace them with a test. - Don't emit error addendum unless we know we're dealing with a value class. --- src/library/scala/runtime/StringAdd.scala | 5 ----- src/library/scala/runtime/StringFormat.scala | 6 ------ 2 files changed, 11 deletions(-) (limited to 'src/library/scala/runtime') diff --git a/src/library/scala/runtime/StringAdd.scala b/src/library/scala/runtime/StringAdd.scala index 4693b0bf44..f074b5407e 100644 --- a/src/library/scala/runtime/StringAdd.scala +++ b/src/library/scala/runtime/StringAdd.scala @@ -10,10 +10,5 @@ package scala.runtime /** A wrapper class that adds string concatenation `+` to any value */ final class StringAdd(val self: Any) extends AnyVal { - // Note: The implicit conversion from Any to StringAdd is one of two - // implicit conversions from Any to AnyRef in Predef. It is important to have at least - // two such conversions, so that silent conversions from value types to AnyRef - // are avoided. If StringFormat should become a value class, another - // implicit conversion from Any to AnyRef has to be introduced in Predef def +(other: String) = String.valueOf(self) + other } diff --git a/src/library/scala/runtime/StringFormat.scala b/src/library/scala/runtime/StringFormat.scala index 1f5feec9e1..7d34e82812 100644 --- a/src/library/scala/runtime/StringFormat.scala +++ b/src/library/scala/runtime/StringFormat.scala @@ -11,12 +11,6 @@ package scala.runtime /** A wrapper class that adds a `formatted` operation to any value */ final class StringFormat(val self: Any) extends AnyVal { - // Note: The implicit conversion from Any to StringFormat is one of two - // implicit conversions from Any to AnyRef in Predef. It is important to have at least - // two such conversions, so that silent conversions from value types to AnyRef - // are avoided. If StringFormat should become a value class, another - // implicit conversion from Any to AnyRef has to be introduced in Predef - /** Returns string formatted according to given `format` string. * Format strings are as for `String.format` * (@see java.lang.String.format). -- cgit v1.2.3 From ea0d891f238082089be037a0752215d9d21893cf Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Sat, 15 Sep 2012 14:09:46 -0700 Subject: More relative path elimination. Some names I missed in 55b609458fd . How one might know when one is done: mkdir scratch && cd scratch mkdir annotation beans collection compat concurrent io \ math parallel ref reflect runtime scala sys testing \ text tools util xml scalac $(find ../src/library -name '*.scala') Until recently that would fail with about a billion errors. When it compiles, that's when you're done. And that's where this commit takes us, for src/library at least. --- src/actors/scala/actors/Future.scala | 2 +- .../scala/reflect/reify/phases/Metalevels.scala | 4 +-- .../scala/reflect/reify/phases/Reshape.scala | 4 +-- .../scala/reflect/reify/utils/NodePrinters.scala | 6 ++-- .../scala/tools/ant/sabbus/Compilers.scala | 2 +- src/compiler/scala/tools/cmd/package.scala | 4 +-- src/compiler/scala/tools/nsc/ast/Trees.scala | 2 +- .../scala/tools/nsc/ast/parser/Parsers.scala | 2 +- .../scala/tools/nsc/ast/parser/Scanners.scala | 4 +-- .../PresentationCompilerRequestsWorkingMode.scala | 6 ++-- .../scala/tools/nsc/interpreter/package.scala | 2 +- src/compiler/scala/tools/nsc/io/package.scala | 2 +- .../scala/tools/nsc/transform/TailCalls.scala | 4 +-- .../tools/nsc/typechecker/DestructureTypes.scala | 2 +- .../scala/tools/nsc/typechecker/Infer.scala | 4 +-- .../scala/tools/nsc/typechecker/Macros.scala | 6 ++-- .../tools/nsc/typechecker/PatternMatching.scala | 34 ++++++++++---------- .../scala/tools/nsc/typechecker/Typers.scala | 6 ++-- src/compiler/scala/tools/nsc/util/package.scala | 2 +- .../scala/tools/reflect/ToolBoxFactory.scala | 4 +-- src/library/scala/Product.scala | 2 +- src/library/scala/collection/GenMapLike.scala | 2 +- src/library/scala/collection/GenSeqLike.scala | 4 +-- src/library/scala/collection/GenSetLike.scala | 2 +- src/library/scala/collection/IndexedSeqLike.scala | 2 +- .../scala/collection/IndexedSeqOptimized.scala | 5 ++- src/library/scala/collection/IterableLike.scala | 4 +-- src/library/scala/collection/Iterator.scala | 3 +- src/library/scala/collection/LinearSeqLike.scala | 2 +- src/library/scala/collection/SeqLike.scala | 5 +-- src/library/scala/collection/TraversableLike.scala | 2 +- .../scala/collection/concurrent/TrieMap.scala | 2 +- .../scala/collection/generic/FilterMonadic.scala | 2 +- .../scala/collection/generic/ParFactory.scala | 2 +- .../scala/collection/generic/SliceInterval.scala | 2 +- .../scala/collection/immutable/HashMap.scala | 3 +- .../scala/collection/immutable/HashSet.scala | 3 +- .../scala/collection/immutable/IntMap.scala | 3 +- src/library/scala/collection/immutable/List.scala | 2 +- .../scala/collection/immutable/LongMap.scala | 5 +-- .../scala/collection/immutable/PagedSeq.scala | 2 +- .../scala/collection/immutable/RedBlack.scala | 3 +- .../scala/collection/immutable/RedBlackTree.scala | 3 +- .../collection/immutable/StreamViewLike.scala | 4 +-- .../scala/collection/immutable/Vector.scala | 3 +- .../scala/collection/immutable/package.scala | 4 +-- src/library/scala/collection/mutable/AVLTree.scala | 3 +- .../scala/collection/mutable/ArrayOps.scala | 5 +-- .../scala/collection/mutable/ArrayStack.scala | 3 +- src/library/scala/collection/mutable/Builder.scala | 3 +- .../scala/collection/mutable/FlatHashTable.scala | 8 ++--- .../scala/collection/mutable/HashTable.scala | 2 +- .../scala/collection/mutable/OpenHashMap.scala | 3 +- .../scala/collection/mutable/PriorityQueue.scala | 10 +++--- .../scala/collection/mutable/ResizableArray.scala | 3 +- .../collection/parallel/ParIterableLike.scala | 2 +- src/library/scala/collection/parallel/ParMap.scala | 37 ---------------------- .../collection/parallel/RemainsIterator.scala | 2 +- src/library/scala/collection/parallel/Tasks.scala | 2 +- .../collection/parallel/immutable/ParHashMap.scala | 4 +-- .../collection/parallel/immutable/ParHashSet.scala | 2 +- .../parallel/immutable/ParIterable.scala | 4 +-- .../collection/parallel/immutable/ParMap.scala | 4 +-- .../collection/parallel/immutable/ParSeq.scala | 4 +-- .../collection/parallel/immutable/ParSet.scala | 4 +-- .../collection/parallel/mutable/ParArray.scala | 5 +-- .../collection/parallel/mutable/ParHashMap.scala | 8 ++--- .../collection/parallel/mutable/ParHashSet.scala | 6 ++-- .../collection/parallel/mutable/ParHashTable.scala | 2 +- .../collection/parallel/mutable/ParIterable.scala | 2 +- .../scala/collection/parallel/mutable/ParMap.scala | 4 +-- .../scala/collection/parallel/mutable/ParSeq.scala | 4 +-- .../scala/collection/parallel/mutable/ParSet.scala | 4 +-- .../mutable/ResizableParArrayCombiner.scala | 2 +- .../mutable/UnrolledParArrayCombiner.scala | 14 ++------ .../scala/collection/parallel/package.scala | 7 ++-- src/library/scala/compat/Platform.scala | 2 +- src/library/scala/io/Codec.scala | 2 +- src/library/scala/io/Position.scala | 2 +- src/library/scala/language.scala | 8 ++--- src/library/scala/math/Ordering.scala | 3 +- src/library/scala/reflect/ClassTag.scala | 5 +-- src/library/scala/reflect/Manifest.scala | 4 +-- src/library/scala/reflect/NameTransformer.scala | 3 +- src/library/scala/reflect/base/TypeTags.scala | 3 +- src/library/scala/runtime/RichDouble.scala | 3 +- src/library/scala/runtime/RichFloat.scala | 3 +- src/library/scala/runtime/ScalaRunTime.scala | 3 +- src/library/scala/runtime/SeqCharSequence.scala | 3 +- src/library/scala/runtime/Tuple2Zipped.scala | 2 +- src/library/scala/runtime/Tuple3Zipped.scala | 2 +- src/library/scala/sys/Prop.scala | 3 +- src/library/scala/sys/process/BasicIO.scala | 2 +- .../scala/sys/process/ProcessBuilderImpl.scala | 4 +-- src/library/scala/sys/process/ProcessImpl.scala | 2 +- src/library/scala/util/Sorting.scala | 5 +-- .../scala/util/automata/SubsetConstruction.scala | 4 +-- src/library/scala/util/control/NoStackTrace.scala | 3 +- src/library/scala/util/hashing/Hashing.scala | 15 ++++----- .../scala/util/parsing/combinator/Parsers.scala | 2 +- src/library/scala/xml/dtd/ContentModelParser.scala | 15 ++++----- src/library/scala/xml/dtd/Scanner.scala | 4 +-- src/library/scala/xml/factory/NodeFactory.scala | 2 +- src/library/scala/xml/include/sax/XIncluder.scala | 2 +- src/library/scala/xml/parsing/MarkupParser.scala | 4 +-- .../scala/xml/parsing/MarkupParserCommon.scala | 4 +-- src/partest/scala/tools/partest/package.scala | 6 ++-- src/reflect/scala/reflect/api/FrontEnds.scala | 4 +-- src/reflect/scala/reflect/internal/Importers.scala | 2 +- src/reflect/scala/reflect/internal/Printers.scala | 2 +- src/reflect/scala/reflect/internal/Symbols.scala | 2 +- src/reflect/scala/reflect/internal/Trees.scala | 2 +- src/scalacheck/org/scalacheck/Commands.scala | 4 +-- src/scalacheck/org/scalacheck/Pretty.scala | 2 +- .../org/scalacheck/util/CmdLineParser.scala | 2 +- .../scala/tools/scalap/scalax/rules/SeqRule.scala | 2 +- .../scala/tools/scalap/scalax/rules/package.scala | 8 ++--- src/swing/scala/swing/package.scala | 4 +-- 118 files changed, 238 insertions(+), 262 deletions(-) (limited to 'src/library/scala/runtime') diff --git a/src/actors/scala/actors/Future.scala b/src/actors/scala/actors/Future.scala index 0198e95ae1..fb7bb488a2 100644 --- a/src/actors/scala/actors/Future.scala +++ b/src/actors/scala/actors/Future.scala @@ -174,7 +174,7 @@ object Futures { * or timeout + `System.currentTimeMillis()` is negative. */ def awaitAll(timeout: Long, fts: Future[Any]*): List[Option[Any]] = { - var resultsMap: scala.collection.mutable.Map[Int, Option[Any]] = new collection.mutable.HashMap[Int, Option[Any]] + var resultsMap: scala.collection.mutable.Map[Int, Option[Any]] = new scala.collection.mutable.HashMap[Int, Option[Any]] var cnt = 0 val mappedFts = fts.map(ft => diff --git a/src/compiler/scala/reflect/reify/phases/Metalevels.scala b/src/compiler/scala/reflect/reify/phases/Metalevels.scala index 1624bbe951..fbbd12a42f 100644 --- a/src/compiler/scala/reflect/reify/phases/Metalevels.scala +++ b/src/compiler/scala/reflect/reify/phases/Metalevels.scala @@ -102,7 +102,7 @@ trait Metalevels { */ val metalevels = new Transformer { var insideSplice = false - var inlineableBindings = collection.mutable.Map[TermName, Tree]() + var inlineableBindings = scala.collection.mutable.Map[TermName, Tree]() def withinSplice[T](op: => T) = { val old = insideSplice @@ -147,4 +147,4 @@ trait Metalevels { super.transform(tree) } } -} \ No newline at end of file +} diff --git a/src/compiler/scala/reflect/reify/phases/Reshape.scala b/src/compiler/scala/reflect/reify/phases/Reshape.scala index 0b07c47c0f..baeea8cd9d 100644 --- a/src/compiler/scala/reflect/reify/phases/Reshape.scala +++ b/src/compiler/scala/reflect/reify/phases/Reshape.scala @@ -250,7 +250,7 @@ trait Reshape { private def trimAccessors(deff: Tree, stats: List[Tree]): List[Tree] = { val symdefs = (stats collect { case vodef: ValOrDefDef => vodef } map (vodeff => vodeff.symbol -> vodeff)).toMap - val accessors = collection.mutable.Map[ValDef, List[DefDef]]() + val accessors = scala.collection.mutable.Map[ValDef, List[DefDef]]() stats collect { case ddef: DefDef => ddef } foreach (defdef => { val valdef = symdefs get defdef.symbol.accessedOrSelf collect { case vdef: ValDef => vdef } getOrElse null if (valdef != null) accessors(valdef) = accessors.getOrElse(valdef, Nil) :+ defdef @@ -323,4 +323,4 @@ trait Reshape { isSynthetic && isCaseCompanion })) } -} \ No newline at end of file +} diff --git a/src/compiler/scala/reflect/reify/utils/NodePrinters.scala b/src/compiler/scala/reflect/reify/utils/NodePrinters.scala index c023be1a50..b2999c3c1c 100644 --- a/src/compiler/scala/reflect/reify/utils/NodePrinters.scala +++ b/src/compiler/scala/reflect/reify/utils/NodePrinters.scala @@ -40,7 +40,7 @@ trait NodePrinters { }) s = s.replace("Modifiers(0L, newTypeName(\"\"), List())", "Modifiers()") s = """Modifiers\((\d+)[lL], newTypeName\("(.*?)"\), List\((.*?)\)\)""".r.replaceAllIn(s, m => { - val buf = new collection.mutable.ListBuffer[String] + val buf = new scala.collection.mutable.ListBuffer[String] val annotations = m.group(3) if (buf.nonEmpty || annotations != "") @@ -73,10 +73,10 @@ trait NodePrinters { s.trim }) - val printout = collection.mutable.ListBuffer[String](); + val printout = scala.collection.mutable.ListBuffer[String](); printout += universe.trim if (mirrorIsUsed) printout += mirror.replace("MirrorOf[", "scala.reflect.base.MirrorOf[").trim - val imports = collection.mutable.ListBuffer[String](); + val imports = scala.collection.mutable.ListBuffer[String](); imports += nme.UNIVERSE_SHORT // if (buildIsUsed) imports += nme.build if (mirrorIsUsed) imports += nme.MIRROR_SHORT diff --git a/src/compiler/scala/tools/ant/sabbus/Compilers.scala b/src/compiler/scala/tools/ant/sabbus/Compilers.scala index bb32149a75..7165474345 100644 --- a/src/compiler/scala/tools/ant/sabbus/Compilers.scala +++ b/src/compiler/scala/tools/ant/sabbus/Compilers.scala @@ -15,7 +15,7 @@ object Compilers extends scala.collection.DefaultMap[String, Compiler] { val debug = false - private val container = new collection.mutable.HashMap[String, Compiler] + private val container = new scala.collection.mutable.HashMap[String, Compiler] def iterator = container.iterator diff --git a/src/compiler/scala/tools/cmd/package.scala b/src/compiler/scala/tools/cmd/package.scala index 5be98a460a..8c6716be78 100644 --- a/src/compiler/scala/tools/cmd/package.scala +++ b/src/compiler/scala/tools/cmd/package.scala @@ -9,8 +9,8 @@ package object cmd { def returning[T](x: T)(f: T => Unit): T = { f(x) ; x } // make some language features in this package compile without warning - implicit def implicitConversions = language.implicitConversions - implicit def postfixOps = language.postfixOps + implicit def implicitConversions = scala.language.implicitConversions + implicit def postfixOps = scala.language.postfixOps private[cmd] def debug(msg: String) = println(msg) diff --git a/src/compiler/scala/tools/nsc/ast/Trees.scala b/src/compiler/scala/tools/nsc/ast/Trees.scala index ea1b26469b..dec7b648ee 100644 --- a/src/compiler/scala/tools/nsc/ast/Trees.scala +++ b/src/compiler/scala/tools/nsc/ast/Trees.scala @@ -281,7 +281,7 @@ trait Trees extends scala.reflect.internal.Trees { self: Global => val trace = scala.tools.nsc.util.trace when debug val locals = util.HashSet[Symbol](8) - val orderedLocals = collection.mutable.ListBuffer[Symbol]() + val orderedLocals = scala.collection.mutable.ListBuffer[Symbol]() def registerLocal(sym: Symbol) { if (sym != null && sym != NoSymbol) { if (debug && !(locals contains sym)) orderedLocals append sym diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index e3755b82d5..eaee39d7e6 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -468,7 +468,7 @@ self => /* ------------- ERROR HANDLING ------------------------------------------- */ - var assumedClosingParens = collection.mutable.Map(RPAREN -> 0, RBRACKET -> 0, RBRACE -> 0) + var assumedClosingParens = scala.collection.mutable.Map(RPAREN -> 0, RBRACKET -> 0, RBRACE -> 0) private var inFunReturnType = false @inline private def fromWithinReturnType[T](body: => T): T = { diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala index dd0f8fdbe0..ba8da3b0ec 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala @@ -1302,7 +1302,7 @@ trait Scanners extends ScannersCommon { } class ParensAnalyzer(unit: CompilationUnit, patches: List[BracePatch]) extends UnitScanner(unit, patches) { - var balance = collection.mutable.Map(RPAREN -> 0, RBRACKET -> 0, RBRACE -> 0) + var balance = scala.collection.mutable.Map(RPAREN -> 0, RBRACKET -> 0, RBRACE -> 0) init() @@ -1317,7 +1317,7 @@ trait Scanners extends ScannersCommon { var lineCount = 1 var lastOffset = 0 var indent = 0 - val oldBalance = collection.mutable.Map[Int, Int]() + val oldBalance = scala.collection.mutable.Map[Int, Int]() def markBalance() = for ((k, v) <- balance) oldBalance(k) = v markBalance() diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerRequestsWorkingMode.scala b/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerRequestsWorkingMode.scala index 18a8eb5fc3..b5ae5f2d75 100644 --- a/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerRequestsWorkingMode.scala +++ b/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerRequestsWorkingMode.scala @@ -36,7 +36,7 @@ trait PresentationCompilerRequestsWorkingMode extends TestResources { /** Return all positions of the given str in the given source file. */ private def positionsOf(source: SourceFile, str: String): Seq[Position] = { - val buf = new collection.mutable.ListBuffer[Position] + val buf = new scala.collection.mutable.ListBuffer[Position] var pos = source.content.indexOfSlice(str) while (pos >= 0) { buf += source.position(pos - 1) // we need the position before the first character of this marker @@ -44,7 +44,7 @@ trait PresentationCompilerRequestsWorkingMode extends TestResources { } buf.toList } - + private def withResponse[T](pos: Position, response: Response[T])(f: (Position, T) => Unit) { /** Return the filename:line:col version of this position. */ def showPos(pos: Position): String = @@ -59,4 +59,4 @@ trait PresentationCompilerRequestsWorkingMode extends TestResources { println("ERROR: " + r) } } -} \ No newline at end of file +} diff --git a/src/compiler/scala/tools/nsc/interpreter/package.scala b/src/compiler/scala/tools/nsc/interpreter/package.scala index 89006be8e2..6a3a2a38ae 100644 --- a/src/compiler/scala/tools/nsc/interpreter/package.scala +++ b/src/compiler/scala/tools/nsc/interpreter/package.scala @@ -35,7 +35,7 @@ package object interpreter extends ReplConfig with ReplStrings { val IR = Results - implicit def postfixOps = language.postfixOps // make all postfix ops in this package compile without warning + implicit def postfixOps = scala.language.postfixOps // make all postfix ops in this package compile without warning private[interpreter] implicit def javaCharSeqCollectionToScala(xs: JCollection[_ <: CharSequence]): List[String] = { import scala.collection.JavaConverters._ diff --git a/src/compiler/scala/tools/nsc/io/package.scala b/src/compiler/scala/tools/nsc/io/package.scala index 6385706830..775ad6bde0 100644 --- a/src/compiler/scala/tools/nsc/io/package.scala +++ b/src/compiler/scala/tools/nsc/io/package.scala @@ -11,7 +11,7 @@ import java.util.jar.{ Attributes } import scala.language.implicitConversions package object io { - implicit def postfixOps = language.postfixOps // make all postfix ops in this package compile without warning + implicit def postfixOps = scala.language.postfixOps // make all postfix ops in this package compile without warning type JManifest = java.util.jar.Manifest type JFile = java.io.File diff --git a/src/compiler/scala/tools/nsc/transform/TailCalls.scala b/src/compiler/scala/tools/nsc/transform/TailCalls.scala index b1b930ca2d..0ad6d6c677 100644 --- a/src/compiler/scala/tools/nsc/transform/TailCalls.scala +++ b/src/compiler/scala/tools/nsc/transform/TailCalls.scala @@ -90,7 +90,7 @@ abstract class TailCalls extends Transform { private val defaultReason = "it contains a recursive call not in tail position" /** Has the label been accessed? Then its symbol is in this set. */ - private val accessed = new collection.mutable.HashSet[Symbol]() + private val accessed = new scala.collection.mutable.HashSet[Symbol]() // `accessed` was stored as boolean in the current context -- this is no longer tenable // with jumps to labels in tailpositions now considered in tailposition, // a downstream context may access the label, and the upstream one will be none the wiser @@ -373,7 +373,7 @@ abstract class TailCalls extends Transform { // the labels all look like: matchEnd(x) {x} // then, in a forward jump `matchEnd(expr)`, `expr` is considered in tail position (and the matchEnd jump is replaced by the jump generated by expr) class TailPosLabelsTraverser extends Traverser { - val tailLabels = new collection.mutable.HashSet[Symbol]() + val tailLabels = new scala.collection.mutable.HashSet[Symbol]() private var maybeTail: Boolean = true // since we start in the rhs of a DefDef diff --git a/src/compiler/scala/tools/nsc/typechecker/DestructureTypes.scala b/src/compiler/scala/tools/nsc/typechecker/DestructureTypes.scala index 6a21639391..e8865964b0 100644 --- a/src/compiler/scala/tools/nsc/typechecker/DestructureTypes.scala +++ b/src/compiler/scala/tools/nsc/typechecker/DestructureTypes.scala @@ -39,7 +39,7 @@ trait DestructureTypes { private implicit def liftToTerm(name: String): TermName = newTermName(name) - private val openSymbols = collection.mutable.Set[Symbol]() + private val openSymbols = scala.collection.mutable.Set[Symbol]() private def nodeList[T](elems: List[T], mkNode: T => Node): Node = if (elems.isEmpty) wrapEmpty else list(elems map mkNode) diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala index 803fb2857e..294470d40e 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala @@ -526,8 +526,8 @@ trait Infer { * and the code is not exactly readable. */ object AdjustedTypeArgs { - val Result = collection.mutable.LinkedHashMap - type Result = collection.mutable.LinkedHashMap[Symbol, Option[Type]] + val Result = scala.collection.mutable.LinkedHashMap + type Result = scala.collection.mutable.LinkedHashMap[Symbol, Option[Type]] def unapply(m: Result): Some[(List[Symbol], List[Type])] = Some(toLists( (m collect {case (p, Some(a)) => (p, a)}).unzip )) diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala index d3580c19ef..9adf86e44b 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala @@ -336,7 +336,7 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces { val tsym = getMember(MacroContextClass, if (isType) tpnme.WeakTypeTag else tpnme.Expr) typeRef(singleType(NoPrefix, ctxParam), tsym, List(sigma(origTpe))) } - val paramCache = collection.mutable.Map[Symbol, Symbol]() + val paramCache = scala.collection.mutable.Map[Symbol, Symbol]() def param(tree: Tree): Symbol = paramCache.getOrElseUpdate(tree.symbol, { val sym = tree.symbol @@ -827,11 +827,11 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces { * 2) undetparams (sym.isTypeParameter && !sym.isSkolem) */ var hasPendingMacroExpansions = false - private val delayed = perRunCaches.newWeakMap[Tree, collection.mutable.Set[Int]] + private val delayed = perRunCaches.newWeakMap[Tree, scala.collection.mutable.Set[Int]] private def isDelayed(expandee: Tree) = delayed contains expandee private def calculateUndetparams(expandee: Tree): scala.collection.mutable.Set[Int] = delayed.get(expandee).getOrElse { - val calculated = collection.mutable.Set[Symbol]() + val calculated = scala.collection.mutable.Set[Symbol]() expandee foreach (sub => { def traverse(sym: Symbol) = if (sym != null && (undetparams contains sym.id)) calculated += sym if (sub.symbol != null) traverse(sub.symbol) diff --git a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala index 237d17887d..2dc3dc3dbd 100644 --- a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala +++ b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala @@ -1558,7 +1558,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL var currId = 0 } case class Test(cond: Cond, treeMaker: TreeMaker) { - // private val reusedBy = new collection.mutable.HashSet[Test] + // private val reusedBy = new scala.collection.mutable.HashSet[Test] var reuses: Option[Test] = None def registerReuseBy(later: Test): Unit = { assert(later.reuses.isEmpty, later.reuses) @@ -1587,7 +1587,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL case class OrCond(a: Cond, b: Cond) extends Cond {override def toString = "("+a+") \\/ ("+ b +")"} object EqualityCond { - private val uniques = new collection.mutable.HashMap[(Tree, Tree), EqualityCond] + private val uniques = new scala.collection.mutable.HashMap[(Tree, Tree), EqualityCond] def apply(testedPath: Tree, rhs: Tree): EqualityCond = uniques getOrElseUpdate((testedPath, rhs), new EqualityCond(testedPath, rhs)) def unapply(c: EqualityCond) = Some(c.testedPath, c.rhs) } @@ -1596,7 +1596,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL } object NonNullCond { - private val uniques = new collection.mutable.HashMap[Tree, NonNullCond] + private val uniques = new scala.collection.mutable.HashMap[Tree, NonNullCond] def apply(testedPath: Tree): NonNullCond = uniques getOrElseUpdate(testedPath, new NonNullCond(testedPath)) def unapply(c: NonNullCond) = Some(c.testedPath) } @@ -1605,7 +1605,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL } object TypeCond { - private val uniques = new collection.mutable.HashMap[(Tree, Type), TypeCond] + private val uniques = new scala.collection.mutable.HashMap[(Tree, Type), TypeCond] def apply(testedPath: Tree, pt: Type): TypeCond = uniques getOrElseUpdate((testedPath, pt), new TypeCond(testedPath, pt)) def unapply(c: TypeCond) = Some(c.testedPath, c.pt) } @@ -1654,8 +1654,8 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL // returns (tree, tests), where `tree` will be used to refer to `root` in `tests` class TreeMakersToConds(val root: Symbol) { // a variable in this set should never be replaced by a tree that "does not consist of a selection on a variable in this set" (intuitively) - private val pointsToBound = collection.mutable.HashSet(root) - private val trees = collection.mutable.HashSet.empty[Tree] + private val pointsToBound = scala.collection.mutable.HashSet(root) + private val trees = scala.collection.mutable.HashSet.empty[Tree] // the substitution that renames variables to variables in pointsToBound private var normalize: Substitution = EmptySubstitution @@ -1956,7 +1956,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL def removeVarEq(props: List[Prop], modelNull: Boolean = false): (Prop, List[Prop]) = { val start = if (Statistics.canEnable) Statistics.startTimer(patmatAnaVarEq) else null - val vars = new collection.mutable.HashSet[Var] + val vars = new scala.collection.mutable.HashSet[Var] object gatherEqualities extends PropTraverser { override def apply(p: Prop) = p match { @@ -2261,7 +2261,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL def nextId = {_nextId += 1; _nextId} def resetUniques() = {_nextId = 0; uniques.clear()} - private val uniques = new collection.mutable.HashMap[Tree, Var] + private val uniques = new scala.collection.mutable.HashMap[Tree, Var] def apply(x: Tree): Var = uniques getOrElseUpdate(x, new Var(x, x.tpe)) } class Var(val path: Tree, staticTp: Type) extends AbsVar { @@ -2273,7 +2273,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL @inline private[this] def observed = {} //canModify = Some(Thread.currentThread.getStackTrace) // don't access until all potential equalities have been registered using registerEquality - private[this] val symForEqualsTo = new collection.mutable.HashMap[Const, Sym] + private[this] val symForEqualsTo = new scala.collection.mutable.HashMap[Const, Sym] // when looking at the domain, we only care about types we can check at run time val staticTpCheckable: Type = checkableType(staticTp) @@ -2386,7 +2386,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL but we can safely pretend types are mutually exclusive as long as there are no counter-examples in the match we're analyzing} */ - val excludedPair = new collection.mutable.HashSet[ExcludedPair] + val excludedPair = new scala.collection.mutable.HashSet[ExcludedPair] case class ExcludedPair(a: Const, b: Const) { override def equals(o: Any) = o match { @@ -2440,7 +2440,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL private var _nextValueId = 0 def nextValueId = {_nextValueId += 1; _nextValueId} - private val uniques = new collection.mutable.HashMap[Type, Const] + private val uniques = new scala.collection.mutable.HashMap[Type, Const] private[SymbolicMatchAnalysis] def unique(tp: Type, mkFresh: => Const): Const = uniques.get(tp).getOrElse( uniques.find {case (oldTp, oldC) => oldTp =:= tp} match { @@ -2454,7 +2454,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL fresh }) - private val trees = collection.mutable.HashSet.empty[Tree] + private val trees = scala.collection.mutable.HashSet.empty[Tree] // hashconsing trees (modulo value-equality) private[SymbolicMatchAnalysis] def uniqueTpForTree(t: Tree): Type = @@ -2915,7 +2915,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL case _ => varAssignment.find{case (v, a) => chop(v.path) == path}.map(_._1) } - private val uniques = new collection.mutable.HashMap[Var, VariableAssignment] + private val uniques = new scala.collection.mutable.HashMap[Var, VariableAssignment] private def unique(variable: Var): VariableAssignment = uniques.getOrElseUpdate(variable, { val (eqTo, neqTo) = varAssignment.getOrElse(variable, (Nil, Nil)) // TODO @@ -3034,8 +3034,8 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL val testss = approximateMatchConservative(prevBinder, cases) // interpret: - val dependencies = new collection.mutable.LinkedHashMap[Test, Set[Cond]] - val tested = new collection.mutable.HashSet[Cond] + val dependencies = new scala.collection.mutable.LinkedHashMap[Test, Set[Cond]] + val tested = new scala.collection.mutable.HashSet[Cond] def storeDependencies(test: Test) = { val cond = test.cond @@ -3083,7 +3083,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL // then, collapse these contiguous sequences of reusing tests // store the result of the final test and the intermediate results in hoisted mutable variables (TODO: optimize: don't store intermediate results that aren't used) // replace each reference to a variable originally bound by a collapsed test by a reference to the hoisted variable - val reused = new collection.mutable.HashMap[TreeMaker, ReusedCondTreeMaker] + val reused = new scala.collection.mutable.HashMap[TreeMaker, ReusedCondTreeMaker] var okToCall = false val reusedOrOrig = (tm: TreeMaker) => {assert(okToCall); reused.getOrElse(tm, tm)} @@ -3317,7 +3317,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL // requires cases.exists(isGuardedCase) (otherwise the rewrite is pointless) var remainingCases = cases - val collapsed = collection.mutable.ListBuffer.empty[CaseDef] + val collapsed = scala.collection.mutable.ListBuffer.empty[CaseDef] // when some of collapsed cases (except for the default case itself) did not include an un-guarded case // we'll need to emit a labeldef for the default case diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 5200aae8d1..2926678281 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -3413,7 +3413,7 @@ trait Typers extends Modes with Adaptations with Tags { else argss.head val annScope = annType.decls .filter(sym => sym.isMethod && !sym.isConstructor && sym.isJavaDefined) - val names = new collection.mutable.HashSet[Symbol] + val names = new scala.collection.mutable.HashSet[Symbol] names ++= (if (isJava) annScope.iterator else typedFun.tpe.params.iterator) val nvPairs = args map { @@ -3627,8 +3627,8 @@ trait Typers extends Modes with Adaptations with Tags { while (o != owner && o != NoSymbol && !o.hasPackageFlag) o = o.owner o == owner && !isVisibleParameter(sym) } - var localSyms = collection.immutable.Set[Symbol]() - var boundSyms = collection.immutable.Set[Symbol]() + var localSyms = scala.collection.immutable.Set[Symbol]() + var boundSyms = scala.collection.immutable.Set[Symbol]() def isLocal(sym: Symbol): Boolean = if (sym == NoSymbol || sym.isRefinementClass || sym.isLocalDummy) false else if (owner == NoSymbol) tree exists (defines(_, sym)) diff --git a/src/compiler/scala/tools/nsc/util/package.scala b/src/compiler/scala/tools/nsc/util/package.scala index 876fb18578..780e3eab88 100644 --- a/src/compiler/scala/tools/nsc/util/package.scala +++ b/src/compiler/scala/tools/nsc/util/package.scala @@ -9,7 +9,7 @@ import java.io.{ OutputStream, PrintStream, ByteArrayOutputStream, PrintWriter, package object util { - implicit def postfixOps = language.postfixOps // make all postfix ops in this package compile without warning + implicit def postfixOps = scala.language.postfixOps // make all postfix ops in this package compile without warning // forwarder for old code that builds against 2.9 and 2.10 val Chars = scala.reflect.internal.Chars diff --git a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala index b671a2eb48..d941519958 100644 --- a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala +++ b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala @@ -70,9 +70,9 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf => } } - def extractFreeTerms(expr0: Tree, wrapFreeTermRefs: Boolean): (Tree, collection.mutable.LinkedHashMap[FreeTermSymbol, TermName]) = { + def extractFreeTerms(expr0: Tree, wrapFreeTermRefs: Boolean): (Tree, scala.collection.mutable.LinkedHashMap[FreeTermSymbol, TermName]) = { val freeTerms = expr0.freeTerms - val freeTermNames = collection.mutable.LinkedHashMap[FreeTermSymbol, TermName]() + val freeTermNames = scala.collection.mutable.LinkedHashMap[FreeTermSymbol, TermName]() freeTerms foreach (ft => { var name = ft.name.toString val namesakes = freeTerms takeWhile (_ != ft) filter (ft2 => ft != ft2 && ft.name == ft2.name) diff --git a/src/library/scala/Product.scala b/src/library/scala/Product.scala index 8c42c60d98..2c6838f6b3 100644 --- a/src/library/scala/Product.scala +++ b/src/library/scala/Product.scala @@ -35,7 +35,7 @@ trait Product extends Any with Equals { /** An iterator over all the elements of this product. * @return in the default implementation, an `Iterator[Any]` */ - def productIterator: Iterator[Any] = new collection.AbstractIterator[Any] { + def productIterator: Iterator[Any] = new scala.collection.AbstractIterator[Any] { private var c: Int = 0 private val cmax = productArity def hasNext = c < cmax diff --git a/src/library/scala/collection/GenMapLike.scala b/src/library/scala/collection/GenMapLike.scala index b6c90d4d2a..3ea45e3810 100644 --- a/src/library/scala/collection/GenMapLike.scala +++ b/src/library/scala/collection/GenMapLike.scala @@ -31,7 +31,7 @@ trait GenMapLike[A, +B, +Repr] extends GenIterableLike[(A, B), Repr] with Equals // This hash code must be symmetric in the contents but ought not // collide trivially. - override def hashCode() = util.hashing.MurmurHash3.mapHash(seq) + override def hashCode()= scala.util.hashing.MurmurHash3.mapHash(seq) /** Returns the value associated with a key, or a default value if the key is not contained in the map. * @param key the key. diff --git a/src/library/scala/collection/GenSeqLike.scala b/src/library/scala/collection/GenSeqLike.scala index a77cb05960..5d812c1494 100644 --- a/src/library/scala/collection/GenSeqLike.scala +++ b/src/library/scala/collection/GenSeqLike.scala @@ -137,7 +137,7 @@ trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equal * * @usecase def indexOf(elem: A, from: Int): Int * @inheritdoc - * + * * $mayNotTerminateInf * */ @@ -465,7 +465,7 @@ trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equal /** Hashcodes for $Coll produce a value from the hashcodes of all the * elements of the $coll. */ - override def hashCode() = util.hashing.MurmurHash3.seqHash(seq) + override def hashCode()= scala.util.hashing.MurmurHash3.seqHash(seq) /** The equals method for arbitrary sequences. Compares this sequence to * some other object. diff --git a/src/library/scala/collection/GenSetLike.scala b/src/library/scala/collection/GenSetLike.scala index 18eb31da03..ef5f14ed55 100644 --- a/src/library/scala/collection/GenSetLike.scala +++ b/src/library/scala/collection/GenSetLike.scala @@ -127,5 +127,5 @@ extends GenIterableLike[A, Repr] // Calling map on a set drops duplicates: any hashcode collisions would // then be dropped before they can be added. // Hash should be symmetric in set entries, but without trivial collisions. - override def hashCode() = util.hashing.MurmurHash3.setHash(seq) + override def hashCode()= scala.util.hashing.MurmurHash3.setHash(seq) } diff --git a/src/library/scala/collection/IndexedSeqLike.scala b/src/library/scala/collection/IndexedSeqLike.scala index f79a9d2c66..3858d60563 100644 --- a/src/library/scala/collection/IndexedSeqLike.scala +++ b/src/library/scala/collection/IndexedSeqLike.scala @@ -41,7 +41,7 @@ trait IndexedSeqLike[+A, +Repr] extends Any with SeqLike[A, Repr] { self => def seq: IndexedSeq[A] - override def hashCode() = util.hashing.MurmurHash3.seqHash(seq) // TODO - can we get faster via "indexedSeqHash" ? + override def hashCode()= scala.util.hashing.MurmurHash3.seqHash(seq) // TODO - can we get faster via "indexedSeqHash" ? override protected[this] def thisCollection: IndexedSeq[A] = this.asInstanceOf[IndexedSeq[A]] override protected[this] def toCollection(repr: Repr): IndexedSeq[A] = repr.asInstanceOf[IndexedSeq[A]] diff --git a/src/library/scala/collection/IndexedSeqOptimized.scala b/src/library/scala/collection/IndexedSeqOptimized.scala index 9d03a11db9..b471c304ab 100755 --- a/src/library/scala/collection/IndexedSeqOptimized.scala +++ b/src/library/scala/collection/IndexedSeqOptimized.scala @@ -6,9 +6,8 @@ ** |/ ** \* */ - - -package scala.collection +package scala +package collection import generic._ import mutable.ArrayBuffer diff --git a/src/library/scala/collection/IterableLike.scala b/src/library/scala/collection/IterableLike.scala index 7ef3b39576..ead5633e00 100644 --- a/src/library/scala/collection/IterableLike.scala +++ b/src/library/scala/collection/IterableLike.scala @@ -6,8 +6,8 @@ ** |/ ** \* */ -package scala.collection - +package scala +package collection import generic._ import immutable.{ List, Stream } diff --git a/src/library/scala/collection/Iterator.scala b/src/library/scala/collection/Iterator.scala index 9989a8d9e8..9a0ca699fc 100644 --- a/src/library/scala/collection/Iterator.scala +++ b/src/library/scala/collection/Iterator.scala @@ -6,7 +6,8 @@ ** |/ ** \* */ -package scala.collection +package scala +package collection import mutable.ArrayBuffer import scala.annotation.migration diff --git a/src/library/scala/collection/LinearSeqLike.scala b/src/library/scala/collection/LinearSeqLike.scala index 8e72c5618c..b873ae964d 100644 --- a/src/library/scala/collection/LinearSeqLike.scala +++ b/src/library/scala/collection/LinearSeqLike.scala @@ -50,7 +50,7 @@ trait LinearSeqLike[+A, +Repr <: LinearSeqLike[A, Repr]] extends SeqLike[A, Repr def seq: LinearSeq[A] - override def hashCode() = util.hashing.MurmurHash3.seqHash(seq) // TODO - can we get faster via "linearSeqHash" ? + override def hashCode()= scala.util.hashing.MurmurHash3.seqHash(seq) // TODO - can we get faster via "linearSeqHash" ? override /*IterableLike*/ def iterator: Iterator[A] = new AbstractIterator[A] { diff --git a/src/library/scala/collection/SeqLike.scala b/src/library/scala/collection/SeqLike.scala index 2fc3df0bd2..a3ff812024 100644 --- a/src/library/scala/collection/SeqLike.scala +++ b/src/library/scala/collection/SeqLike.scala @@ -6,13 +6,14 @@ ** |/ ** \* */ -package scala.collection +package scala +package collection import mutable.{ ListBuffer, ArraySeq } import immutable.{ List, Range } import generic._ import parallel.ParSeq -import scala.math.Ordering +import scala.math.{ min, max, Ordering } /** A template trait for sequences of type `Seq[A]` * $seqInfo diff --git a/src/library/scala/collection/TraversableLike.scala b/src/library/scala/collection/TraversableLike.scala index 411d17e935..f5e479a514 100644 --- a/src/library/scala/collection/TraversableLike.scala +++ b/src/library/scala/collection/TraversableLike.scala @@ -501,7 +501,7 @@ trait TraversableLike[+A, +Repr] extends Any else sliceWithKnownDelta(n, Int.MaxValue, -n) def slice(from: Int, until: Int): Repr = - sliceWithKnownBound(math.max(from, 0), until) + sliceWithKnownBound(scala.math.max(from, 0), until) // Precondition: from >= 0, until > 0, builder already configured for building. private[this] def sliceInternal(from: Int, until: Int, b: Builder[A, Repr]): Repr = { diff --git a/src/library/scala/collection/concurrent/TrieMap.scala b/src/library/scala/collection/concurrent/TrieMap.scala index 7609910b65..070497c19e 100644 --- a/src/library/scala/collection/concurrent/TrieMap.scala +++ b/src/library/scala/collection/concurrent/TrieMap.scala @@ -916,7 +916,7 @@ object TrieMap extends MutableMapFactory[TrieMap] { def empty[K, V]: TrieMap[K, V] = new TrieMap[K, V] class MangledHashing[K] extends Hashing[K] { - def hash(k: K) = util.hashing.byteswap32(k.##) + def hash(k: K)= scala.util.hashing.byteswap32(k.##) } } diff --git a/src/library/scala/collection/generic/FilterMonadic.scala b/src/library/scala/collection/generic/FilterMonadic.scala index d79112d616..cebb4e69d3 100755 --- a/src/library/scala/collection/generic/FilterMonadic.scala +++ b/src/library/scala/collection/generic/FilterMonadic.scala @@ -14,7 +14,7 @@ package scala.collection.generic */ trait FilterMonadic[+A, +Repr] extends Any { def map[B, That](f: A => B)(implicit bf: CanBuildFrom[Repr, B, That]): That - def flatMap[B, That](f: A => collection.GenTraversableOnce[B])(implicit bf: CanBuildFrom[Repr, B, That]): That + def flatMap[B, That](f: A => scala.collection.GenTraversableOnce[B])(implicit bf: CanBuildFrom[Repr, B, That]): That def foreach[U](f: A => U): Unit def withFilter(p: A => Boolean): FilterMonadic[A, Repr] } diff --git a/src/library/scala/collection/generic/ParFactory.scala b/src/library/scala/collection/generic/ParFactory.scala index 9624aafd06..6b59b6671c 100644 --- a/src/library/scala/collection/generic/ParFactory.scala +++ b/src/library/scala/collection/generic/ParFactory.scala @@ -24,7 +24,7 @@ abstract class ParFactory[CC[X] <: ParIterable[X] with GenericParTemplate[X, CC] extends GenTraversableFactory[CC] with GenericParCompanion[CC] { - //type EPC[T, C] = collection.parallel.EnvironmentPassingCombiner[T, C] + //type EPC[T, C] = scala.collection.parallel.EnvironmentPassingCombiner[T, C] /** A generic implementation of the `CanCombineFrom` trait, which forwards * all calls to `apply(from)` to the `genericParBuilder` method of the $coll diff --git a/src/library/scala/collection/generic/SliceInterval.scala b/src/library/scala/collection/generic/SliceInterval.scala index 56033ca8d8..af56d06d60 100644 --- a/src/library/scala/collection/generic/SliceInterval.scala +++ b/src/library/scala/collection/generic/SliceInterval.scala @@ -32,7 +32,7 @@ private[collection] class SliceInterval private (val from: Int, val until: Int) */ def recalculate(_from: Int, _until: Int): SliceInterval = { val lo = _from max 0 - val elems = math.min(_until - lo, width) + val elems = scala.math.min(_until - lo, width) val start = from + lo if (elems <= 0) new SliceInterval(from, from) diff --git a/src/library/scala/collection/immutable/HashMap.scala b/src/library/scala/collection/immutable/HashMap.scala index 01372aa618..a6e750e7ee 100644 --- a/src/library/scala/collection/immutable/HashMap.scala +++ b/src/library/scala/collection/immutable/HashMap.scala @@ -6,7 +6,8 @@ ** |/ ** \* */ -package scala.collection +package scala +package collection package immutable import generic._ diff --git a/src/library/scala/collection/immutable/HashSet.scala b/src/library/scala/collection/immutable/HashSet.scala index 03af4deaae..2ebeb044fc 100644 --- a/src/library/scala/collection/immutable/HashSet.scala +++ b/src/library/scala/collection/immutable/HashSet.scala @@ -8,7 +8,8 @@ -package scala.collection +package scala +package collection package immutable import scala.annotation.unchecked.{ uncheckedVariance => uV } diff --git a/src/library/scala/collection/immutable/IntMap.scala b/src/library/scala/collection/immutable/IntMap.scala index e895c94599..d0f6b4b3ac 100644 --- a/src/library/scala/collection/immutable/IntMap.scala +++ b/src/library/scala/collection/immutable/IntMap.scala @@ -6,7 +6,8 @@ ** |/ ** \* */ -package scala.collection +package scala +package collection package immutable import scala.collection.generic.{ CanBuildFrom, BitOperations } diff --git a/src/library/scala/collection/immutable/List.scala b/src/library/scala/collection/immutable/List.scala index b77e8b4c7c..7a489bb100 100644 --- a/src/library/scala/collection/immutable/List.scala +++ b/src/library/scala/collection/immutable/List.scala @@ -229,7 +229,7 @@ sealed abstract class List[+A] extends AbstractSeq[A] * }}} */ override def slice(from: Int, until: Int): List[A] = { - val lo = math.max(from, 0) + val lo = scala.math.max(from, 0) if (until <= lo || isEmpty) Nil else this drop lo take (until - lo) } diff --git a/src/library/scala/collection/immutable/LongMap.scala b/src/library/scala/collection/immutable/LongMap.scala index 002027b162..4899b45d5f 100644 --- a/src/library/scala/collection/immutable/LongMap.scala +++ b/src/library/scala/collection/immutable/LongMap.scala @@ -6,7 +6,8 @@ ** |/ ** \* */ -package scala.collection +package scala +package collection package immutable import scala.collection.generic.{ CanBuildFrom, BitOperations } @@ -298,7 +299,7 @@ extends AbstractMap[Long, T] if (!hasMatch(key, prefix, mask)) join(key, LongMap.Tip(key, value), prefix, this) else if (zero(key, mask)) LongMap.Bin(prefix, mask, left.updateWith(key, value, f), right) else LongMap.Bin(prefix, mask, left, right.updateWith(key, value, f)) - case LongMap.Tip(key2, value2) => + case LongMap.Tip(key2, value2) => if (key == key2) LongMap.Tip(key, f(value2, value)) else join(key, LongMap.Tip(key, value), key2, this) case LongMap.Nil => LongMap.Tip(key, value) diff --git a/src/library/scala/collection/immutable/PagedSeq.scala b/src/library/scala/collection/immutable/PagedSeq.scala index 8975b440d2..3b4bfdc593 100644 --- a/src/library/scala/collection/immutable/PagedSeq.scala +++ b/src/library/scala/collection/immutable/PagedSeq.scala @@ -99,7 +99,7 @@ object PagedSeq { /** Constructs a paged character sequence from a scala.io.Source value */ - def fromSource(source: io.Source) = + def fromSource(source: scala.io.Source) = fromLines(source.getLines()) } diff --git a/src/library/scala/collection/immutable/RedBlack.scala b/src/library/scala/collection/immutable/RedBlack.scala index 83eeaa45ee..a3ab27f814 100644 --- a/src/library/scala/collection/immutable/RedBlack.scala +++ b/src/library/scala/collection/immutable/RedBlack.scala @@ -8,7 +8,8 @@ -package scala.collection +package scala +package collection package immutable /** Old base class that was used by previous implementations of `TreeMaps` and `TreeSets`. diff --git a/src/library/scala/collection/immutable/RedBlackTree.scala b/src/library/scala/collection/immutable/RedBlackTree.scala index 9eeebb641e..5bdba26d02 100644 --- a/src/library/scala/collection/immutable/RedBlackTree.scala +++ b/src/library/scala/collection/immutable/RedBlackTree.scala @@ -8,7 +8,8 @@ -package scala.collection +package scala +package collection package immutable import scala.annotation.tailrec diff --git a/src/library/scala/collection/immutable/StreamViewLike.scala b/src/library/scala/collection/immutable/StreamViewLike.scala index bb378bc337..236308da2e 100644 --- a/src/library/scala/collection/immutable/StreamViewLike.scala +++ b/src/library/scala/collection/immutable/StreamViewLike.scala @@ -50,10 +50,10 @@ extends SeqView[A, Coll] trait Prepended[B >: A] extends super.Prepended[B] with Transformed[B] /** boilerplate */ - protected override def newForced[B](xs: => collection.GenSeq[B]): Transformed[B] = new { val forced = xs } with AbstractTransformed[B] with Forced[B] + protected override def newForced[B](xs: => scala.collection.GenSeq[B]): Transformed[B] = new { val forced = xs } with AbstractTransformed[B] with Forced[B] protected override def newAppended[B >: A](that: scala.collection.GenTraversable[B]): Transformed[B] = new { val rest = that } with AbstractTransformed[B] with Appended[B] protected override def newMapped[B](f: A => B): Transformed[B] = new { val mapping = f } with AbstractTransformed[B] with Mapped[B] - protected override def newFlatMapped[B](f: A => collection.GenTraversableOnce[B]): Transformed[B] = new { val mapping = f } with AbstractTransformed[B] with FlatMapped[B] + protected override def newFlatMapped[B](f: A => scala.collection.GenTraversableOnce[B]): Transformed[B] = new { val mapping = f } with AbstractTransformed[B] with FlatMapped[B] protected override def newFiltered(p: A => Boolean): Transformed[A] = new { val pred = p } with AbstractTransformed[A] with Filtered protected override def newSliced(_endpoints: SliceInterval): Transformed[A] = new { val endpoints = _endpoints } with AbstractTransformed[A] with Sliced protected override def newDroppedWhile(p: A => Boolean): Transformed[A] = new { val pred = p } with AbstractTransformed[A] with DroppedWhile diff --git a/src/library/scala/collection/immutable/Vector.scala b/src/library/scala/collection/immutable/Vector.scala index 82acf4ebbb..98b5aa6d9f 100644 --- a/src/library/scala/collection/immutable/Vector.scala +++ b/src/library/scala/collection/immutable/Vector.scala @@ -6,7 +6,8 @@ ** |/ ** \* */ -package scala.collection +package scala +package collection package immutable import scala.annotation.unchecked.uncheckedVariance diff --git a/src/library/scala/collection/immutable/package.scala b/src/library/scala/collection/immutable/package.scala index eec5f04fff..647fc04310 100644 --- a/src/library/scala/collection/immutable/package.scala +++ b/src/library/scala/collection/immutable/package.scala @@ -69,9 +69,9 @@ package immutable { private def locationAfterN(n: Int) = ( if (n > 0) { if (step > 0) - math.min(start.toLong + step.toLong * n.toLong, _last.toLong).toInt + scala.math.min(start.toLong + step.toLong * n.toLong, _last.toLong).toInt else - math.max(start.toLong + step.toLong * n.toLong, _last.toLong).toInt + scala.math.max(start.toLong + step.toLong * n.toLong, _last.toLong).toInt } else start ) diff --git a/src/library/scala/collection/mutable/AVLTree.scala b/src/library/scala/collection/mutable/AVLTree.scala index 9aea25f330..ad52daaad4 100644 --- a/src/library/scala/collection/mutable/AVLTree.scala +++ b/src/library/scala/collection/mutable/AVLTree.scala @@ -6,7 +6,8 @@ ** |/ ** \* */ -package scala.collection +package scala +package collection package mutable diff --git a/src/library/scala/collection/mutable/ArrayOps.scala b/src/library/scala/collection/mutable/ArrayOps.scala index c87feaddc5..397f5bbefa 100644 --- a/src/library/scala/collection/mutable/ArrayOps.scala +++ b/src/library/scala/collection/mutable/ArrayOps.scala @@ -6,7 +6,8 @@ ** |/ ** \* */ -package scala.collection +package scala +package collection package mutable import scala.compat.Platform.arraycopy @@ -60,7 +61,7 @@ trait ArrayOps[T] extends Any with ArrayLike[T, Array[T]] with CustomParalleliza * @param asTrav A function that converts elements of this array to rows - arrays of type `U`. * @return An array obtained by concatenating rows of this array. */ - def flatten[U](implicit asTrav: T => collection.Traversable[U], m: ClassTag[U]): Array[U] = { + def flatten[U](implicit asTrav: T => scala.collection.Traversable[U], m: ClassTag[U]): Array[U] = { val b = Array.newBuilder[U] b.sizeHint(map{case is: scala.collection.IndexedSeq[_] => is.size case _ => 0}.sum) for (xs <- this) diff --git a/src/library/scala/collection/mutable/ArrayStack.scala b/src/library/scala/collection/mutable/ArrayStack.scala index e408d74353..277d48c545 100644 --- a/src/library/scala/collection/mutable/ArrayStack.scala +++ b/src/library/scala/collection/mutable/ArrayStack.scala @@ -6,7 +6,8 @@ ** |/ ** \* */ -package scala.collection +package scala +package collection package mutable import generic._ diff --git a/src/library/scala/collection/mutable/Builder.scala b/src/library/scala/collection/mutable/Builder.scala index b6887df61e..6dec6b221e 100644 --- a/src/library/scala/collection/mutable/Builder.scala +++ b/src/library/scala/collection/mutable/Builder.scala @@ -7,7 +7,8 @@ \* */ -package scala.collection +package scala +package collection package mutable import generic._ diff --git a/src/library/scala/collection/mutable/FlatHashTable.scala b/src/library/scala/collection/mutable/FlatHashTable.scala index f6d4cc31b6..12066055e9 100644 --- a/src/library/scala/collection/mutable/FlatHashTable.scala +++ b/src/library/scala/collection/mutable/FlatHashTable.scala @@ -356,8 +356,8 @@ private[collection] object FlatHashTable { * * See SI-5293. */ - final def seedGenerator = new ThreadLocal[util.Random] { - override def initialValue = new util.Random + final def seedGenerator = new ThreadLocal[scala.util.Random] { + override def initialValue = new scala.util.Random } /** The load factor for the hash table; must be < 500 (0.5) @@ -365,7 +365,7 @@ private[collection] object FlatHashTable { def defaultLoadFactor: Int = 450 final def loadFactorDenum = 1000 - def sizeForThreshold(size: Int, _loadFactor: Int) = math.max(32, (size.toLong * loadFactorDenum / _loadFactor).toInt) + def sizeForThreshold(size: Int, _loadFactor: Int) = scala.math.max(32, (size.toLong * loadFactorDenum / _loadFactor).toInt) def newThreshold(_loadFactor: Int, size: Int) = { val lf = _loadFactor @@ -397,7 +397,7 @@ private[collection] object FlatHashTable { //h = h + (h << 4) //h ^ (h >>> 10) - val improved = util.hashing.byteswap32(hcode) + val improved= scala.util.hashing.byteswap32(hcode) // for the remainder, see SI-5293 // to ensure that different bits are used for different hash tables, we have to rotate based on the seed diff --git a/src/library/scala/collection/mutable/HashTable.scala b/src/library/scala/collection/mutable/HashTable.scala index 67e7348672..968d99d042 100644 --- a/src/library/scala/collection/mutable/HashTable.scala +++ b/src/library/scala/collection/mutable/HashTable.scala @@ -401,7 +401,7 @@ private[collection] object HashTable { * * For performance reasons, we avoid this improvement. * */ - val i = util.hashing.byteswap32(hcode) + val i= scala.util.hashing.byteswap32(hcode) /* Jenkins hash * for range 0-10000, output has the msb set to zero */ diff --git a/src/library/scala/collection/mutable/OpenHashMap.scala b/src/library/scala/collection/mutable/OpenHashMap.scala index c33a7a906e..11055f8986 100644 --- a/src/library/scala/collection/mutable/OpenHashMap.scala +++ b/src/library/scala/collection/mutable/OpenHashMap.scala @@ -6,7 +6,8 @@ ** |/ ** \* */ -package scala.collection +package scala +package collection package mutable /** diff --git a/src/library/scala/collection/mutable/PriorityQueue.scala b/src/library/scala/collection/mutable/PriorityQueue.scala index e37cbdc712..abd8c1cdff 100644 --- a/src/library/scala/collection/mutable/PriorityQueue.scala +++ b/src/library/scala/collection/mutable/PriorityQueue.scala @@ -166,7 +166,7 @@ class PriorityQueue[A](implicit val ord: Ordering[A]) * Note: The order of elements returned is undefined. * If you want to traverse the elements in priority queue * order, use `clone().dequeueAll.iterator`. - * + * * @return an iterator over all the elements. */ override def iterator: Iterator[A] = new AbstractIterator[A] { @@ -193,7 +193,7 @@ class PriorityQueue[A](implicit val ord: Ordering[A]) * @return A reversed priority queue. */ def reverse = { - val revq = new PriorityQueue[A]()(new math.Ordering[A] { + val revq = new PriorityQueue[A]()(new scala.math.Ordering[A] { def compare(x: A, y: A) = ord.compare(y, x) }) for (i <- 1 until resarr.length) revq += resarr(i) @@ -204,7 +204,7 @@ class PriorityQueue[A](implicit val ord: Ordering[A]) * than that returned by the method `iterator`. * * Note: The order of elements returned is undefined. - * + * * @return an iterator over all elements sorted in descending order. */ def reverseIterator: Iterator[A] = new AbstractIterator[A] { @@ -236,11 +236,11 @@ class PriorityQueue[A](implicit val ord: Ordering[A]) * @return the string representation of this queue. */ override def toString() = toList.mkString("PriorityQueue(", ", ", ")") - + /** Converts this $coll to a list. * * Note: the order of elements is undefined. - * + * * @return a list containing all elements of this $coll. */ override def toList = this.iterator.toList diff --git a/src/library/scala/collection/mutable/ResizableArray.scala b/src/library/scala/collection/mutable/ResizableArray.scala index 54bf93252f..d29ee67580 100644 --- a/src/library/scala/collection/mutable/ResizableArray.scala +++ b/src/library/scala/collection/mutable/ResizableArray.scala @@ -6,7 +6,8 @@ ** |/ ** \* */ -package scala.collection +package scala +package collection package mutable import generic._ diff --git a/src/library/scala/collection/parallel/ParIterableLike.scala b/src/library/scala/collection/parallel/ParIterableLike.scala index 1959aab056..f6fb32e152 100644 --- a/src/library/scala/collection/parallel/ParIterableLike.scala +++ b/src/library/scala/collection/parallel/ParIterableLike.scala @@ -1368,7 +1368,7 @@ self: ParIterableLike[T, Repr, Sequential] => val until = from + len val blocksize = scanBlockSize while (i < until) { - trees += scanBlock(i, math.min(blocksize, pit.remaining)) + trees += scanBlock(i, scala.math.min(blocksize, pit.remaining)) i += blocksize } diff --git a/src/library/scala/collection/parallel/ParMap.scala b/src/library/scala/collection/parallel/ParMap.scala index 58197ab2c6..2bc5e783e6 100644 --- a/src/library/scala/collection/parallel/ParMap.scala +++ b/src/library/scala/collection/parallel/ParMap.scala @@ -6,13 +6,8 @@ ** |/ ** \* */ - package scala.collection.parallel - - - - import scala.collection.Map import scala.collection.GenMap import scala.collection.mutable.Builder @@ -21,10 +16,6 @@ import scala.collection.generic.GenericParMapTemplate import scala.collection.generic.GenericParMapCompanion import scala.collection.generic.CanCombineFrom - - - - /** A template trait for parallel maps. * * $sideeffects @@ -75,31 +66,3 @@ object ParMap extends ParMapFactory[ParMap] { override def default(key: A): B = d(key) } } - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/src/library/scala/collection/parallel/RemainsIterator.scala b/src/library/scala/collection/parallel/RemainsIterator.scala index a67a4d8eb7..9bf287cc39 100644 --- a/src/library/scala/collection/parallel/RemainsIterator.scala +++ b/src/library/scala/collection/parallel/RemainsIterator.scala @@ -190,7 +190,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[ def slice2combiner[U >: T, This](from: Int, until: Int, cb: Combiner[U, This]): Combiner[U, This] = { drop(from) - var left = math.max(until - from, 0) + var left = scala.math.max(until - from, 0) cb.sizeHint(left) while (left > 0) { cb += next diff --git a/src/library/scala/collection/parallel/Tasks.scala b/src/library/scala/collection/parallel/Tasks.scala index 67552e1c89..2556cd3f68 100644 --- a/src/library/scala/collection/parallel/Tasks.scala +++ b/src/library/scala/collection/parallel/Tasks.scala @@ -98,7 +98,7 @@ trait Task[R, +Tp] { */ trait Tasks { - private[parallel] val debugMessages = collection.mutable.ArrayBuffer[String]() + private[parallel] val debugMessages = scala.collection.mutable.ArrayBuffer[String]() private[parallel] def debuglog(s: String) = synchronized { debugMessages += s diff --git a/src/library/scala/collection/parallel/immutable/ParHashMap.scala b/src/library/scala/collection/parallel/immutable/ParHashMap.scala index 7f5255f5a3..187e4aaf92 100644 --- a/src/library/scala/collection/parallel/immutable/ParHashMap.scala +++ b/src/library/scala/collection/parallel/immutable/ParHashMap.scala @@ -264,7 +264,7 @@ extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], (K, V val fp = howmany / 2 List(new CreateTrie(bucks, root, offset, fp), new CreateTrie(bucks, root, offset + fp, howmany - fp)) } - def shouldSplitFurther = howmany > collection.parallel.thresholdFromSize(root.length, combinerTaskSupport.parallelismLevel) + def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(root.length, combinerTaskSupport.parallelismLevel) } class CreateGroupedTrie[Repr](cbf: () => Combiner[V, Repr], bucks: Array[Unrolled[(K, V)]], root: Array[HashMap[K, AnyRef]], offset: Int, howmany: Int) @@ -328,7 +328,7 @@ extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], (K, V val fp = howmany / 2 List(new CreateGroupedTrie(cbf, bucks, root, offset, fp), new CreateGroupedTrie(cbf, bucks, root, offset + fp, howmany - fp)) } - def shouldSplitFurther = howmany > collection.parallel.thresholdFromSize(root.length, combinerTaskSupport.parallelismLevel) + def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(root.length, combinerTaskSupport.parallelismLevel) } } diff --git a/src/library/scala/collection/parallel/immutable/ParHashSet.scala b/src/library/scala/collection/parallel/immutable/ParHashSet.scala index 42d00623ab..85e2138c56 100644 --- a/src/library/scala/collection/parallel/immutable/ParHashSet.scala +++ b/src/library/scala/collection/parallel/immutable/ParHashSet.scala @@ -209,7 +209,7 @@ extends scala.collection.parallel.BucketCombiner[T, ParHashSet[T], Any, HashSetC val fp = howmany / 2 List(new CreateTrie(bucks, root, offset, fp), new CreateTrie(bucks, root, offset + fp, howmany - fp)) } - def shouldSplitFurther = howmany > collection.parallel.thresholdFromSize(root.length, combinerTaskSupport.parallelismLevel) + def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(root.length, combinerTaskSupport.parallelismLevel) } } diff --git a/src/library/scala/collection/parallel/immutable/ParIterable.scala b/src/library/scala/collection/parallel/immutable/ParIterable.scala index 4b0773ce7b..5854844a8f 100644 --- a/src/library/scala/collection/parallel/immutable/ParIterable.scala +++ b/src/library/scala/collection/parallel/immutable/ParIterable.scala @@ -30,10 +30,10 @@ import scala.collection.GenIterable * @since 2.9 */ trait ParIterable[+T] -extends collection/*.immutable*/.GenIterable[T] +extends scala.collection/*.immutable*/.GenIterable[T] with scala.collection.parallel.ParIterable[T] with GenericParTemplate[T, ParIterable] - with ParIterableLike[T, ParIterable[T], collection.immutable.Iterable[T]] + with ParIterableLike[T, ParIterable[T], scala.collection.immutable.Iterable[T]] with Immutable { override def companion: GenericCompanion[ParIterable] with GenericParCompanion[ParIterable] = ParIterable diff --git a/src/library/scala/collection/parallel/immutable/ParMap.scala b/src/library/scala/collection/parallel/immutable/ParMap.scala index 74b2672e67..585e6bf541 100644 --- a/src/library/scala/collection/parallel/immutable/ParMap.scala +++ b/src/library/scala/collection/parallel/immutable/ParMap.scala @@ -28,11 +28,11 @@ import scala.collection.GenMapLike * @since 2.9 */ trait ParMap[K, +V] -extends collection/*.immutable*/.GenMap[K, V] +extends scala.collection/*.immutable*/.GenMap[K, V] with GenericParMapTemplate[K, V, ParMap] with parallel.ParMap[K, V] with ParIterable[(K, V)] - with ParMapLike[K, V, ParMap[K, V], collection.immutable.Map[K, V]] + with ParMapLike[K, V, ParMap[K, V], scala.collection.immutable.Map[K, V]] { self => diff --git a/src/library/scala/collection/parallel/immutable/ParSeq.scala b/src/library/scala/collection/parallel/immutable/ParSeq.scala index 300efe9a58..265121286d 100644 --- a/src/library/scala/collection/parallel/immutable/ParSeq.scala +++ b/src/library/scala/collection/parallel/immutable/ParSeq.scala @@ -28,11 +28,11 @@ import scala.collection.GenSeq * @define coll mutable parallel sequence */ trait ParSeq[+T] -extends collection/*.immutable*/.GenSeq[T] +extends scala.collection/*.immutable*/.GenSeq[T] with scala.collection.parallel.ParSeq[T] with ParIterable[T] with GenericParTemplate[T, ParSeq] - with ParSeqLike[T, ParSeq[T], collection.immutable.Seq[T]] + with ParSeqLike[T, ParSeq[T], scala.collection.immutable.Seq[T]] { override def companion: GenericCompanion[ParSeq] with GenericParCompanion[ParSeq] = ParSeq override def toSeq: ParSeq[T] = this diff --git a/src/library/scala/collection/parallel/immutable/ParSet.scala b/src/library/scala/collection/parallel/immutable/ParSet.scala index 40429280ac..c8da509ef5 100644 --- a/src/library/scala/collection/parallel/immutable/ParSet.scala +++ b/src/library/scala/collection/parallel/immutable/ParSet.scala @@ -20,11 +20,11 @@ import scala.collection.parallel.Combiner * @define coll mutable parallel set */ trait ParSet[T] -extends collection/*.immutable*/.GenSet[T] +extends scala.collection/*.immutable*/.GenSet[T] with GenericParTemplate[T, ParSet] with parallel.ParSet[T] with ParIterable[T] - with ParSetLike[T, ParSet[T], collection.immutable.Set[T]] + with ParSetLike[T, ParSet[T], scala.collection.immutable.Set[T]] { self => override def empty: ParSet[T] = ParHashSet[T]() diff --git a/src/library/scala/collection/parallel/mutable/ParArray.scala b/src/library/scala/collection/parallel/mutable/ParArray.scala index 6889d8b472..56cc06f99e 100644 --- a/src/library/scala/collection/parallel/mutable/ParArray.scala +++ b/src/library/scala/collection/parallel/mutable/ParArray.scala @@ -7,7 +7,8 @@ \* */ -package scala.collection.parallel.mutable +package scala +package collection.parallel.mutable @@ -665,7 +666,7 @@ self => val fp = howmany / 2 List(new Map(f, targetarr, offset, fp), new Map(f, targetarr, offset + fp, howmany - fp)) } - def shouldSplitFurther = howmany > collection.parallel.thresholdFromSize(length, tasksupport.parallelismLevel) + def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(length, tasksupport.parallelismLevel) } /* serialization */ diff --git a/src/library/scala/collection/parallel/mutable/ParHashMap.scala b/src/library/scala/collection/parallel/mutable/ParHashMap.scala index 33a39e6038..1921727ce3 100644 --- a/src/library/scala/collection/parallel/mutable/ParHashMap.scala +++ b/src/library/scala/collection/parallel/mutable/ParHashMap.scala @@ -40,14 +40,14 @@ import scala.collection.parallel.Task class ParHashMap[K, V] private[collection] (contents: HashTable.Contents[K, DefaultEntry[K, V]]) extends ParMap[K, V] with GenericParMapTemplate[K, V, ParHashMap] - with ParMapLike[K, V, ParHashMap[K, V], collection.mutable.HashMap[K, V]] + with ParMapLike[K, V, ParHashMap[K, V], scala.collection.mutable.HashMap[K, V]] with ParHashTable[K, DefaultEntry[K, V]] with Serializable { self => initWithContents(contents) - type Entry = collection.mutable.DefaultEntry[K, V] + type Entry = scala.collection.mutable.DefaultEntry[K, V] def this() = this(null) @@ -57,7 +57,7 @@ self => protected[this] override def newCombiner = ParHashMapCombiner[K, V] - override def seq = new collection.mutable.HashMap[K, V](hashTableContents) + override def seq = new scala.collection.mutable.HashMap[K, V](hashTableContents) def splitter = new ParHashMapIterator(1, table.length, size, table(0).asInstanceOf[DefaultEntry[K, V]]) @@ -302,7 +302,7 @@ extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], Defau override def merge(that: FillBlocks) { this.result += that.result } - def shouldSplitFurther = howmany > collection.parallel.thresholdFromSize(ParHashMapCombiner.numblocks, combinerTaskSupport.parallelismLevel) + def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(ParHashMapCombiner.numblocks, combinerTaskSupport.parallelismLevel) } } diff --git a/src/library/scala/collection/parallel/mutable/ParHashSet.scala b/src/library/scala/collection/parallel/mutable/ParHashSet.scala index 870cae75de..7b5b8e3ceb 100644 --- a/src/library/scala/collection/parallel/mutable/ParHashSet.scala +++ b/src/library/scala/collection/parallel/mutable/ParHashSet.scala @@ -36,7 +36,7 @@ import scala.collection.parallel.Task class ParHashSet[T] private[collection] (contents: FlatHashTable.Contents[T]) extends ParSet[T] with GenericParTemplate[T, ParHashSet] - with ParSetLike[T, ParHashSet[T], collection.mutable.HashSet[T]] + with ParSetLike[T, ParHashSet[T], scala.collection.mutable.HashSet[T]] with ParFlatHashTable[T] with Serializable { @@ -57,7 +57,7 @@ extends ParSet[T] def clear() = clearTable() - override def seq = new collection.mutable.HashSet(hashTableContents) + override def seq = new scala.collection.mutable.HashSet(hashTableContents) def +=(elem: T) = { addEntry(elem) @@ -310,7 +310,7 @@ with scala.collection.mutable.FlatHashTable.HashUtils[T] { // the total number of successfully inserted elements is adjusted accordingly result = (this.result._1 + that.result._1 + inserted, remainingLeftovers concat that.result._2) } - def shouldSplitFurther = howmany > collection.parallel.thresholdFromSize(ParHashMapCombiner.numblocks, combinerTaskSupport.parallelismLevel) + def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(ParHashMapCombiner.numblocks, combinerTaskSupport.parallelismLevel) } } diff --git a/src/library/scala/collection/parallel/mutable/ParHashTable.scala b/src/library/scala/collection/parallel/mutable/ParHashTable.scala index 7cf464c287..bb9a7b7823 100644 --- a/src/library/scala/collection/parallel/mutable/ParHashTable.scala +++ b/src/library/scala/collection/parallel/mutable/ParHashTable.scala @@ -104,7 +104,7 @@ trait ParHashTable[K, Entry >: Null <: HashEntry[K, Entry]] extends scala.collec // otherwise, this is the last entry in the table - all what remains is the chain // so split the rest of the chain val arr = convertToArrayBuffer(es) - val arrpit = new collection.parallel.BufferSplitter[T](arr, 0, arr.length, signalDelegate) + val arrpit = new scala.collection.parallel.BufferSplitter[T](arr, 0, arr.length, signalDelegate) arrpit.split } } else Seq(this.asInstanceOf[IterRepr]) diff --git a/src/library/scala/collection/parallel/mutable/ParIterable.scala b/src/library/scala/collection/parallel/mutable/ParIterable.scala index a2847c3beb..9281e84c03 100644 --- a/src/library/scala/collection/parallel/mutable/ParIterable.scala +++ b/src/library/scala/collection/parallel/mutable/ParIterable.scala @@ -26,7 +26,7 @@ import scala.collection.GenIterable * @author Aleksandar Prokopec * @since 2.9 */ -trait ParIterable[T] extends collection/*.mutable*/.GenIterable[T] +trait ParIterable[T] extends scala.collection/*.mutable*/.GenIterable[T] with scala.collection.parallel.ParIterable[T] with GenericParTemplate[T, ParIterable] with ParIterableLike[T, ParIterable[T], Iterable[T]] diff --git a/src/library/scala/collection/parallel/mutable/ParMap.scala b/src/library/scala/collection/parallel/mutable/ParMap.scala index 9ad14f15f8..34b3d465d2 100644 --- a/src/library/scala/collection/parallel/mutable/ParMap.scala +++ b/src/library/scala/collection/parallel/mutable/ParMap.scala @@ -28,11 +28,11 @@ import scala.collection.parallel.Combiner * @since 2.9 */ trait ParMap[K, V] -extends collection/*.mutable*/.GenMap[K, V] +extends scala.collection/*.mutable*/.GenMap[K, V] with scala.collection.parallel.ParMap[K, V] with /* mutable */ ParIterable[(K, V)] with GenericParMapTemplate[K, V, ParMap] - with /* mutable */ ParMapLike[K, V, ParMap[K, V], collection.mutable.Map[K, V]] + with /* mutable */ ParMapLike[K, V, ParMap[K, V], scala.collection.mutable.Map[K, V]] { protected[this] override def newCombiner: Combiner[(K, V), ParMap[K, V]] = ParMap.newCombiner[K, V] diff --git a/src/library/scala/collection/parallel/mutable/ParSeq.scala b/src/library/scala/collection/parallel/mutable/ParSeq.scala index 15f8d1d0b5..7322d5236f 100644 --- a/src/library/scala/collection/parallel/mutable/ParSeq.scala +++ b/src/library/scala/collection/parallel/mutable/ParSeq.scala @@ -29,11 +29,11 @@ import scala.collection.GenSeq * @define Coll `mutable.ParSeq` * @define coll mutable parallel sequence */ -trait ParSeq[T] extends collection/*.mutable*/.GenSeq[T] // was: scala.collection.mutable.Seq[T] +trait ParSeq[T] extends scala.collection/*.mutable*/.GenSeq[T] // was: scala.collection.mutable.Seq[T] with ParIterable[T] with scala.collection.parallel.ParSeq[T] with GenericParTemplate[T, ParSeq] - with ParSeqLike[T, ParSeq[T], collection.mutable.Seq[T]] { + with ParSeqLike[T, ParSeq[T], scala.collection.mutable.Seq[T]] { self => override def companion: GenericCompanion[ParSeq] with GenericParCompanion[ParSeq] = ParSeq //protected[this] override def newBuilder = ParSeq.newBuilder[T] diff --git a/src/library/scala/collection/parallel/mutable/ParSet.scala b/src/library/scala/collection/parallel/mutable/ParSet.scala index 689ce3436f..540ecb8022 100644 --- a/src/library/scala/collection/parallel/mutable/ParSet.scala +++ b/src/library/scala/collection/parallel/mutable/ParSet.scala @@ -27,11 +27,11 @@ import scala.collection.GenSet * @author Aleksandar Prokopec */ trait ParSet[T] -extends collection/*.mutable*/.GenSet[T] +extends scala.collection/*.mutable*/.GenSet[T] with ParIterable[T] with scala.collection.parallel.ParSet[T] with GenericParTemplate[T, ParSet] - with ParSetLike[T, ParSet[T], collection.mutable.Set[T]] + with ParSetLike[T, ParSet[T], scala.collection.mutable.Set[T]] { self => override def companion: GenericCompanion[ParSet] with GenericParCompanion[ParSet] = ParSet diff --git a/src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala b/src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala index 01eb17024e..68f37137f8 100644 --- a/src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala +++ b/src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala @@ -81,7 +81,7 @@ trait ResizableParArrayCombiner[T] extends LazyCombiner[T, ParArray[T], ExposedA val fp = howmany / 2 List(new CopyChainToArray(array, offset, fp), new CopyChainToArray(array, offset + fp, howmany - fp)) } - def shouldSplitFurther = howmany > collection.parallel.thresholdFromSize(size, combinerTaskSupport.parallelismLevel) + def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(size, combinerTaskSupport.parallelismLevel) } } diff --git a/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala b/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala index 9648791502..5600d0f68c 100644 --- a/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala +++ b/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala @@ -8,10 +8,6 @@ package scala.collection.parallel.mutable - - - - import scala.collection.generic.Sizing import scala.collection.mutable.ArraySeq import scala.collection.mutable.ArrayBuffer @@ -23,16 +19,12 @@ import scala.collection.parallel.Combiner import scala.collection.parallel.Task import scala.reflect.ClassTag - - - private[mutable] class DoublingUnrolledBuffer[T](implicit t: ClassTag[T]) extends UnrolledBuffer[T]()(t) { override def calcNextLength(sz: Int) = if (sz < 10000) sz * 2 else sz protected override def newUnrolled = new Unrolled[T](0, new Array[T](4), null, this) } - /** An array combiner that uses doubling unrolled buffers to store elements. */ trait UnrolledParArrayCombiner[T] extends Combiner[T, ParArray[T]] { @@ -85,7 +77,7 @@ extends Combiner[T, ParArray[T]] { var pos = startpos var arroffset = offset while (totalleft > 0) { - val lefthere = math.min(totalleft, curr.size - pos) + val lefthere = scala.math.min(totalleft, curr.size - pos) Array.copy(curr.array, pos, array, arroffset, lefthere) // println("from: " + arroffset + " elems " + lefthere + " - " + pos + ", " + curr + " -> " + array.toList + " by " + this + " !! " + buff.headPtr) totalleft -= lefthere @@ -107,13 +99,11 @@ extends Combiner[T, ParArray[T]] { val fp = howmany / 2 List(new CopyUnrolledToArray(array, offset, fp), new CopyUnrolledToArray(array, offset + fp, howmany - fp)) } - def shouldSplitFurther = howmany > collection.parallel.thresholdFromSize(size, combinerTaskSupport.parallelismLevel) + def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(size, combinerTaskSupport.parallelismLevel) override def toString = "CopyUnrolledToArray(" + offset + ", " + howmany + ")" } } - - object UnrolledParArrayCombiner { def apply[T](): UnrolledParArrayCombiner[T] = new UnrolledParArrayCombiner[T] {} // was: with EnvironmentPassingCombiner[T, ParArray[T]] } diff --git a/src/library/scala/collection/parallel/package.scala b/src/library/scala/collection/parallel/package.scala index 30b4c0c914..a95090c15b 100644 --- a/src/library/scala/collection/parallel/package.scala +++ b/src/library/scala/collection/parallel/package.scala @@ -6,7 +6,8 @@ ** |/ ** \* */ -package scala.collection +package scala +package collection import scala.collection.generic.CanBuildFrom import scala.collection.generic.CanCombineFrom @@ -41,8 +42,8 @@ package object parallel { private[parallel] def outofbounds(idx: Int) = throw new IndexOutOfBoundsException(idx.toString) private[parallel] def getTaskSupport: TaskSupport = - if (util.Properties.isJavaAtLeast("1.6")) { - val vendor = util.Properties.javaVmVendor + if (scala.util.Properties.isJavaAtLeast("1.6")) { + val vendor = scala.util.Properties.javaVmVendor if ((vendor contains "Oracle") || (vendor contains "Sun") || (vendor contains "Apple")) new ForkJoinTaskSupport else new ThreadPoolTaskSupport } else new ThreadPoolTaskSupport diff --git a/src/library/scala/compat/Platform.scala b/src/library/scala/compat/Platform.scala index f18ce12e6c..77c12a8e58 100644 --- a/src/library/scala/compat/Platform.scala +++ b/src/library/scala/compat/Platform.scala @@ -109,7 +109,7 @@ object Platform { * `System.getProperty("line.separator")` * with a default value of "\n". */ - val EOL = util.Properties.lineSeparator + val EOL = scala.util.Properties.lineSeparator /** The current time in milliseconds. The time is counted since 1 January 1970 * UTC. diff --git a/src/library/scala/io/Codec.scala b/src/library/scala/io/Codec.scala index fa74be0f98..6522cd0cd8 100644 --- a/src/library/scala/io/Codec.scala +++ b/src/library/scala/io/Codec.scala @@ -91,7 +91,7 @@ object Codec extends LowPriorityCodecImplicits { * as an accident, with any anomalies considered "not a bug". */ def defaultCharsetCodec = apply(Charset.defaultCharset) - def fileEncodingCodec = apply(util.Properties.encodingString) + def fileEncodingCodec = apply(scala.util.Properties.encodingString) def default = defaultCharsetCodec def apply(encoding: String): Codec = new Codec(Charset forName encoding) diff --git a/src/library/scala/io/Position.scala b/src/library/scala/io/Position.scala index 0d0d0d7648..dae478f31a 100644 --- a/src/library/scala/io/Position.scala +++ b/src/library/scala/io/Position.scala @@ -54,7 +54,7 @@ abstract class Position { if (line >= LINE_MASK) LINE_MASK << COLUMN_BITS else - (line << COLUMN_BITS) | math.min(COLUMN_MASK, column) + (line << COLUMN_BITS) | scala.math.min(COLUMN_MASK, column) } /** Returns the line number of the encoded position. */ diff --git a/src/library/scala/language.scala b/src/library/scala/language.scala index dfe27f8857..297f344f65 100644 --- a/src/library/scala/language.scala +++ b/src/library/scala/language.scala @@ -29,7 +29,7 @@ object language { implicit lazy val postfixOps: postfixOps = languageFeature.postfixOps /** Only where enabled, accesses to members of structural types that need - * reflection are supported. Reminder: A structural type is a type of the form + * reflection are supported. Reminder: A structural type is a type of the form * `Parents { Decls }` where `Decls` contains declarations of new members that do * not override any member in `Parents`. To access one of these members, a * reflective call is needed. @@ -52,7 +52,7 @@ object language { * implicit def stringToInt(s: String): Int = s.length * implicit val conv = (s: String) => s.length * implicit def listToX(xs: List[T])(implicit f: T => X): X = … - * + * * implicit values of other types are not affected, and neither are implicit * classes. * @@ -95,7 +95,7 @@ object language { * * _Why keep the feature?_ Existential types are needed to make sense of Java’s wildcard * types and raw types and the erased types of run-time values. - * + * * Why control it? Having complex existential types in a code base usually makes * application code very brittle, with a tendency to produce type errors with * obscure error messages. Therefore, going overboard with existential types @@ -110,7 +110,7 @@ object language { /** Where enabled, macro definitions are allowed. Macro implementations and * macro applications are unaffected; they can be used anywhere. - * + * * _Why introduce the feature?_ Macros promise to make the language more regular, * replacing ad-hoc language constructs with a general powerful abstraction * capability that can express them. Macros are also a more disciplined and diff --git a/src/library/scala/math/Ordering.scala b/src/library/scala/math/Ordering.scala index 3ac255b57f..719f2e12a7 100644 --- a/src/library/scala/math/Ordering.scala +++ b/src/library/scala/math/Ordering.scala @@ -6,7 +6,8 @@ ** |/ ** \* */ -package scala.math +package scala +package math import java.util.Comparator import scala.language.{implicitConversions, higherKinds} diff --git a/src/library/scala/reflect/ClassTag.scala b/src/library/scala/reflect/ClassTag.scala index 6757a72053..8a595473cc 100644 --- a/src/library/scala/reflect/ClassTag.scala +++ b/src/library/scala/reflect/ClassTag.scala @@ -1,4 +1,5 @@ -package scala.reflect +package scala +package reflect import java.lang.{ Class => jClass } import scala.language.{implicitConversions, existentials} @@ -18,7 +19,7 @@ import scala.runtime.ScalaRunTime.{ arrayClass, arrayElementClass } * * @see [[scala.reflect.base.TypeTags]] */ -@annotation.implicitNotFound(msg = "No ClassTag available for ${T}") +@scala.annotation.implicitNotFound(msg = "No ClassTag available for ${T}") trait ClassTag[T] extends ClassManifestDeprecatedApis[T] with Equals with Serializable { // please, don't add any APIs here, like it was with `newWrappedArray` and `newArrayBuilder` // class tags, and all tags in general, should be as minimalistic as possible diff --git a/src/library/scala/reflect/Manifest.scala b/src/library/scala/reflect/Manifest.scala index f2a23f4372..8b021e0444 100644 --- a/src/library/scala/reflect/Manifest.scala +++ b/src/library/scala/reflect/Manifest.scala @@ -38,7 +38,7 @@ import scala.collection.mutable.{ ArrayBuilder, WrappedArray } }}} * */ -@annotation.implicitNotFound(msg = "No Manifest available for ${T}.") +@scala.annotation.implicitNotFound(msg = "No Manifest available for ${T}.") @deprecated("Use scala.reflect.ClassTag (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead", "2.10.0") trait Manifest[T] extends ClassManifest[T] with Equals { override def typeArguments: List[Manifest[_]] = Nil @@ -264,4 +264,4 @@ object ManifestFactory { def runtimeClass = parents.head.erasure override def toString = parents.mkString(" with ") } -} \ No newline at end of file +} diff --git a/src/library/scala/reflect/NameTransformer.scala b/src/library/scala/reflect/NameTransformer.scala index ff56e20d52..77cbd20321 100755 --- a/src/library/scala/reflect/NameTransformer.scala +++ b/src/library/scala/reflect/NameTransformer.scala @@ -6,7 +6,8 @@ ** |/ ** \* */ -package scala.reflect +package scala +package reflect /** Provides functions to encode and decode Scala symbolic names. * Also provides some constants. diff --git a/src/library/scala/reflect/base/TypeTags.scala b/src/library/scala/reflect/base/TypeTags.scala index ec86bbc9be..55708c5274 100644 --- a/src/library/scala/reflect/base/TypeTags.scala +++ b/src/library/scala/reflect/base/TypeTags.scala @@ -3,7 +3,8 @@ * @author Martin Odersky */ -package scala.reflect +package scala +package reflect package base import java.lang.{ Class => jClass } diff --git a/src/library/scala/runtime/RichDouble.scala b/src/library/scala/runtime/RichDouble.scala index 19396a3d48..d7d2603ef7 100644 --- a/src/library/scala/runtime/RichDouble.scala +++ b/src/library/scala/runtime/RichDouble.scala @@ -6,7 +6,8 @@ ** |/ ** \* */ -package scala.runtime +package scala +package runtime final class RichDouble(val self: Double) extends AnyVal with FractionalProxy[Double] { protected def num = scala.math.Numeric.DoubleIsFractional diff --git a/src/library/scala/runtime/RichFloat.scala b/src/library/scala/runtime/RichFloat.scala index 9fbb3c19bb..9c3a14d3be 100644 --- a/src/library/scala/runtime/RichFloat.scala +++ b/src/library/scala/runtime/RichFloat.scala @@ -6,7 +6,8 @@ ** |/ ** \* */ -package scala.runtime +package scala +package runtime final class RichFloat(val self: Float) extends AnyVal with FractionalProxy[Float] { protected def num = scala.math.Numeric.FloatIsFractional diff --git a/src/library/scala/runtime/ScalaRunTime.scala b/src/library/scala/runtime/ScalaRunTime.scala index 045fb5b252..baf200bd30 100644 --- a/src/library/scala/runtime/ScalaRunTime.scala +++ b/src/library/scala/runtime/ScalaRunTime.scala @@ -6,7 +6,8 @@ ** |/ ** \* */ -package scala.runtime +package scala +package runtime import scala.collection.{ Seq, IndexedSeq, TraversableView, AbstractIterator } import scala.collection.mutable.WrappedArray diff --git a/src/library/scala/runtime/SeqCharSequence.scala b/src/library/scala/runtime/SeqCharSequence.scala index dc9594d960..8cb958c05f 100644 --- a/src/library/scala/runtime/SeqCharSequence.scala +++ b/src/library/scala/runtime/SeqCharSequence.scala @@ -6,7 +6,8 @@ ** |/ ** \* */ -package scala.runtime +package scala +package runtime import java.util.Arrays.copyOfRange diff --git a/src/library/scala/runtime/Tuple2Zipped.scala b/src/library/scala/runtime/Tuple2Zipped.scala index bd52c678af..6030c9ea90 100644 --- a/src/library/scala/runtime/Tuple2Zipped.scala +++ b/src/library/scala/runtime/Tuple2Zipped.scala @@ -21,7 +21,7 @@ trait ZippedTraversable2[+El1, +El2] extends Any { } object ZippedTraversable2 { implicit def zippedTraversable2ToTraversable[El1, El2](zz: ZippedTraversable2[El1, El2]): Traversable[(El1, El2)] = { - new collection.AbstractTraversable[(El1, El2)] { + new scala.collection.AbstractTraversable[(El1, El2)] { def foreach[U](f: ((El1, El2)) => U): Unit = zz foreach Function.untupled(f) } } diff --git a/src/library/scala/runtime/Tuple3Zipped.scala b/src/library/scala/runtime/Tuple3Zipped.scala index 3b78b6261a..3970c9973d 100644 --- a/src/library/scala/runtime/Tuple3Zipped.scala +++ b/src/library/scala/runtime/Tuple3Zipped.scala @@ -18,7 +18,7 @@ trait ZippedTraversable3[+El1, +El2, +El3] extends Any { } object ZippedTraversable3 { implicit def zippedTraversable3ToTraversable[El1, El2, El3](zz: ZippedTraversable3[El1, El2, El3]): Traversable[(El1, El2, El3)] = { - new collection.AbstractTraversable[(El1, El2, El3)] { + new scala.collection.AbstractTraversable[(El1, El2, El3)] { def foreach[U](f: ((El1, El2, El3)) => U): Unit = zz foreach Function.untupled(f) } } diff --git a/src/library/scala/sys/Prop.scala b/src/library/scala/sys/Prop.scala index 687a32cf7d..123a729748 100644 --- a/src/library/scala/sys/Prop.scala +++ b/src/library/scala/sys/Prop.scala @@ -6,7 +6,8 @@ ** |/ ** \* */ -package scala.sys +package scala +package sys /** A lightweight interface wrapping a property contained in some * unspecified map. Generally it'll be the system properties but this diff --git a/src/library/scala/sys/process/BasicIO.scala b/src/library/scala/sys/process/BasicIO.scala index 77e36f6196..94a2125393 100644 --- a/src/library/scala/sys/process/BasicIO.scala +++ b/src/library/scala/sys/process/BasicIO.scala @@ -45,7 +45,7 @@ object BasicIO { val q = new LinkedBlockingQueue[Either[Int, T]] def next(): Stream[T] = q.take match { case Left(0) => Stream.empty - case Left(code) => if (nonzeroException) sys.error("Nonzero exit code: " + code) else Stream.empty + case Left(code) => if (nonzeroException) scala.sys.error("Nonzero exit code: " + code) else Stream.empty case Right(s) => Stream.cons(s, next) } new Streamed((s: T) => q put Right(s), code => q put Left(code), () => next()) diff --git a/src/library/scala/sys/process/ProcessBuilderImpl.scala b/src/library/scala/sys/process/ProcessBuilderImpl.scala index 58f06e1039..2c83a59e4f 100644 --- a/src/library/scala/sys/process/ProcessBuilderImpl.scala +++ b/src/library/scala/sys/process/ProcessBuilderImpl.scala @@ -128,7 +128,7 @@ private[process] trait ProcessBuilderImpl { val code = this ! BasicIO(withIn, buffer, log) if (code == 0) buffer.toString - else sys.error("Nonzero exit value: " + code) + else scala.sys.error("Nonzero exit value: " + code) } private[this] def lines( @@ -213,4 +213,4 @@ private[process] trait ProcessBuilderImpl { ) extends SequentialBuilder(first, second, "###") { override def createProcess(io: ProcessIO) = new ProcessSequence(first, second, io) } -} \ No newline at end of file +} diff --git a/src/library/scala/sys/process/ProcessImpl.scala b/src/library/scala/sys/process/ProcessImpl.scala index b7549eeb06..cdf7d72caa 100644 --- a/src/library/scala/sys/process/ProcessImpl.scala +++ b/src/library/scala/sys/process/ProcessImpl.scala @@ -84,7 +84,7 @@ private[process] trait ProcessImpl { private[process] abstract class CompoundProcess extends BasicProcess { def destroy() = destroyer() - def exitValue() = getExitValue() getOrElse sys.error("No exit code: process destroyed.") + def exitValue() = getExitValue() getOrElse scala.sys.error("No exit code: process destroyed.") def start() = getExitValue protected lazy val (getExitValue, destroyer) = { diff --git a/src/library/scala/util/Sorting.scala b/src/library/scala/util/Sorting.scala index 5f0edf964f..276e157f55 100644 --- a/src/library/scala/util/Sorting.scala +++ b/src/library/scala/util/Sorting.scala @@ -6,10 +6,11 @@ ** |/ ** \* */ -package scala.util +package scala +package util import scala.reflect.{ ClassTag, classTag } -import scala.math.Ordering +import scala.math.{ Ordering, max, min } /** The Sorting object provides functions that can sort various kinds of * objects. You can provide a comparison function, or you can request a sort diff --git a/src/library/scala/util/automata/SubsetConstruction.scala b/src/library/scala/util/automata/SubsetConstruction.scala index 1cdcd734cd..25ac86183c 100644 --- a/src/library/scala/util/automata/SubsetConstruction.scala +++ b/src/library/scala/util/automata/SubsetConstruction.scala @@ -19,8 +19,8 @@ class SubsetConstruction[T <: AnyRef](val nfa: NondetWordAutom[T]) { def determinize: DetWordAutom[T] = { // for assigning numbers to bitsets - var indexMap = collection.Map[immutable.BitSet, Int]() - var invIndexMap = collection.Map[Int, immutable.BitSet]() + var indexMap = scala.collection.Map[immutable.BitSet, Int]() + var invIndexMap = scala.collection.Map[Int, immutable.BitSet]() var ix = 0 // we compute the dfa with states = bitsets diff --git a/src/library/scala/util/control/NoStackTrace.scala b/src/library/scala/util/control/NoStackTrace.scala index c2b5dbca22..4409358785 100644 --- a/src/library/scala/util/control/NoStackTrace.scala +++ b/src/library/scala/util/control/NoStackTrace.scala @@ -6,7 +6,8 @@ ** |/ ** \* */ -package scala.util.control +package scala +package util.control /** A trait for exceptions which, for efficiency reasons, do not * fill in the stack trace. Stack trace suppression can be disabled diff --git a/src/library/scala/util/hashing/Hashing.scala b/src/library/scala/util/hashing/Hashing.scala index 84b549f35e..97d32af2b0 100644 --- a/src/library/scala/util/hashing/Hashing.scala +++ b/src/library/scala/util/hashing/Hashing.scala @@ -8,6 +8,8 @@ package scala.util.hashing +import scala.annotation.implicitNotFound + /** `Hashing` is a trait whose instances each represent a strategy for hashing * instances of a type. * @@ -16,27 +18,22 @@ package scala.util.hashing * * Note: when using a custom `Hashing`, make sure to use it with the `Equiv` * such that if any two objects are equal, then their hash codes must be equal. - * + * * @since 2.10 */ -@annotation.implicitNotFound(msg = "No implicit Hashing defined for ${T}.") +@implicitNotFound(msg = "No implicit Hashing defined for ${T}.") trait Hashing[T] extends Serializable { - def hash(x: T): Int - } - object Hashing { - final class Default[T] extends Hashing[T] { def hash(x: T) = x.## } - + implicit def default[T] = new Default[T] - + def fromFunction[T](f: T => Int) = new Hashing[T] { def hash(x: T) = f(x) } - } diff --git a/src/library/scala/util/parsing/combinator/Parsers.scala b/src/library/scala/util/parsing/combinator/Parsers.scala index e6c9573756..5d990eee78 100644 --- a/src/library/scala/util/parsing/combinator/Parsers.scala +++ b/src/library/scala/util/parsing/combinator/Parsers.scala @@ -178,7 +178,7 @@ trait Parsers { def filterWithError(p: Nothing => Boolean, error: Nothing => String, position: Input): ParseResult[Nothing] = this - def get: Nothing = sys.error("No result when parsing failed") + def get: Nothing = scala.sys.error("No result when parsing failed") } /** An extractor so `NoSuccess(msg, next)` can be used in matches. */ object NoSuccess { diff --git a/src/library/scala/xml/dtd/ContentModelParser.scala b/src/library/scala/xml/dtd/ContentModelParser.scala index 2d87bc0764..5d183df04b 100644 --- a/src/library/scala/xml/dtd/ContentModelParser.scala +++ b/src/library/scala/xml/dtd/ContentModelParser.scala @@ -6,7 +6,6 @@ ** |/ ** \* */ - package scala.xml package dtd @@ -21,10 +20,10 @@ object ContentModelParser extends Scanner { // a bit too permissive concerning # def accept(tok: Int) = { if (token != tok) { if ((tok == STAR) && (token == END)) // common mistake - sys.error("in DTDs, \n"+ + scala.sys.error("in DTDs, \n"+ "mixed content models must be like (#PCDATA|Name|Name|...)*"); else - sys.error("expected "+token2string(tok)+ + scala.sys.error("expected "+token2string(tok)+ ", got unexpected token:"+token2string(token)); } nextToken @@ -45,7 +44,7 @@ object ContentModelParser extends Scanner { // a bit too permissive concerning # case NAME => value match { case "ANY" => ANY case "EMPTY" => EMPTY - case _ => sys.error("expected ANY, EMPTY or '(' instead of " + value ); + case _ => scala.sys.error("expected ANY, EMPTY or '(' instead of " + value ); } case LPAREN => @@ -65,12 +64,12 @@ object ContentModelParser extends Scanner { // a bit too permissive concerning # accept( STAR ); res case _ => - sys.error("unexpected token:" + token2string(token) ); + scala.sys.error("unexpected token:" + token2string(token) ); } } case _ => - sys.error("unexpected token:" + token2string(token) ); + scala.sys.error("unexpected token:" + token2string(token) ); } // sopt ::= S? def sOpt() = if( token == S ) nextToken; @@ -118,12 +117,12 @@ object ContentModelParser extends Scanner { // a bit too permissive concerning # def particle = token match { case LPAREN => nextToken; sOpt; regexp; case NAME => val a = Letter(ElemName(value)); nextToken; maybeSuffix(a) - case _ => sys.error("expected '(' or Name, got:"+token2string(token)); + case _ => scala.sys.error("expected '(' or Name, got:"+token2string(token)); } // atom ::= name def atom = token match { case NAME => val a = Letter(ElemName(value)); nextToken; a - case _ => sys.error("expected Name, got:"+token2string(token)); + case _ => scala.sys.error("expected Name, got:"+token2string(token)); } } diff --git a/src/library/scala/xml/dtd/Scanner.scala b/src/library/scala/xml/dtd/Scanner.scala index 82a8d1af2f..2e753a7590 100644 --- a/src/library/scala/xml/dtd/Scanner.scala +++ b/src/library/scala/xml/dtd/Scanner.scala @@ -44,7 +44,7 @@ class Scanner extends Tokens with parsing.TokenTests { final def next() = if (it.hasNext) c = it.next else c = ENDCH final def acc(d: Char) { - if (c == d) next else sys.error("expected '"+d+"' found '"+c+"' !"); + if (c == d) next else scala.sys.error("expected '"+d+"' found '"+c+"' !"); } final def accS(ds: Seq[Char]) { ds foreach acc } @@ -65,7 +65,7 @@ class Scanner extends Tokens with parsing.TokenTests { case ENDCH => END case _ => if (isNameStart(c)) name; // NAME - else sys.error("unexpected character:" + c) + else scala.sys.error("unexpected character:" + c) } final def name = { diff --git a/src/library/scala/xml/factory/NodeFactory.scala b/src/library/scala/xml/factory/NodeFactory.scala index 61d4855b2e..c543b8751b 100644 --- a/src/library/scala/xml/factory/NodeFactory.scala +++ b/src/library/scala/xml/factory/NodeFactory.scala @@ -18,7 +18,7 @@ trait NodeFactory[A <: Node] { val ignoreProcInstr = false /* default behaviour is to use hash-consing */ - val cache = new collection.mutable.HashMap[Int, List[A]] + val cache = new scala.collection.mutable.HashMap[Int, List[A]] protected def create(pre: String, name: String, attrs: MetaData, scope: NamespaceBinding, children:Seq[Node]): A diff --git a/src/library/scala/xml/include/sax/XIncluder.scala b/src/library/scala/xml/include/sax/XIncluder.scala index f4d69ffe44..2af66f4f16 100644 --- a/src/library/scala/xml/include/sax/XIncluder.scala +++ b/src/library/scala/xml/include/sax/XIncluder.scala @@ -62,7 +62,7 @@ class XIncluder(outs: OutputStream, encoding: String) extends ContentHandler wit val value = atts.getValue(i); // @todo Need to use character references if the encoding // can't support the character - out.write(xml.Utility.escape(value)) + out.write(scala.xml.Utility.escape(value)) out.write("'"); i += 1 } diff --git a/src/library/scala/xml/parsing/MarkupParser.scala b/src/library/scala/xml/parsing/MarkupParser.scala index af9b5f47cf..d4dc6da14d 100755 --- a/src/library/scala/xml/parsing/MarkupParser.scala +++ b/src/library/scala/xml/parsing/MarkupParser.scala @@ -56,7 +56,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests // See ticket #3720 for motivations. private class WithLookAhead(underlying: Source) extends Source { - private val queue = collection.mutable.Queue[Char]() + private val queue = scala.collection.mutable.Queue[Char]() def lookahead(): BufferedIterator[Char] = { val iter = queue.iterator ++ new Iterator[Char] { def hasNext = underlying.hasNext @@ -897,7 +897,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests new PublicID(pubID, sysID) } else { reportSyntaxError("PUBLIC or SYSTEM expected"); - sys.error("died parsing notationdecl") + scala.sys.error("died parsing notationdecl") } xSpaceOpt xToken('>') diff --git a/src/library/scala/xml/parsing/MarkupParserCommon.scala b/src/library/scala/xml/parsing/MarkupParserCommon.scala index 096f8a8f38..219c3d6679 100644 --- a/src/library/scala/xml/parsing/MarkupParserCommon.scala +++ b/src/library/scala/xml/parsing/MarkupParserCommon.scala @@ -21,7 +21,7 @@ import Utility.SU * All members should be accessed through those. */ private[scala] trait MarkupParserCommon extends TokenTests { - protected def unreachable = sys.error("Cannot be reached.") + protected def unreachable = scala.sys.error("Cannot be reached.") // type HandleType // MarkupHandler, SymbolicXMLBuilder type InputType // Source, CharArrayReader @@ -82,7 +82,7 @@ private[scala] trait MarkupParserCommon extends TokenTests { case `end` => return buf.toString case ch => buf append ch } - sys.error("Expected '%s'".format(end)) + scala.sys.error("Expected '%s'".format(end)) } /** [42] '<' xmlEndTag ::= '<' '/' Name S? '>' diff --git a/src/partest/scala/tools/partest/package.scala b/src/partest/scala/tools/partest/package.scala index ec1778940f..ebd3e46b7c 100644 --- a/src/partest/scala/tools/partest/package.scala +++ b/src/partest/scala/tools/partest/package.scala @@ -30,8 +30,8 @@ package object partest { implicit private[partest] def temporaryPath2File(x: Path): JFile = x.jfile implicit private[partest] def temporaryFile2Path(x: JFile): Path = Path(x) - implicit lazy val postfixOps = language.postfixOps - implicit lazy val implicitConversions = language.implicitConversions + implicit lazy val postfixOps = scala.language.postfixOps + implicit lazy val implicitConversions = scala.language.implicitConversions def timed[T](body: => T): (T, Long) = { val t1 = System.currentTimeMillis @@ -75,7 +75,7 @@ package object partest { propOrEmpty("partest.debug") == "true" - import language.experimental.macros + import scala.language.experimental.macros /** * `trace("".isEmpty)` will return `true` and as a side effect print the following to standard out. diff --git a/src/reflect/scala/reflect/api/FrontEnds.scala b/src/reflect/scala/reflect/api/FrontEnds.scala index a27450d49d..61ea227c47 100644 --- a/src/reflect/scala/reflect/api/FrontEnds.scala +++ b/src/reflect/scala/reflect/api/FrontEnds.scala @@ -24,7 +24,7 @@ trait FrontEnds { def hasWarnings = WARNING.count > 0 case class Info(val pos: Position, val msg: String, val severity: Severity) - val infos = new collection.mutable.LinkedHashSet[Info] + val infos = new scala.collection.mutable.LinkedHashSet[Info] /** Handles incoming info */ def log(pos: Position, msg: String, severity: Severity) { @@ -67,4 +67,4 @@ trait FrontEnds { */ // todo. untangle warningsAsErrors from Reporters. I don't feel like moving this flag here! def mkConsoleFrontEnd(minSeverity: Int = 1): FrontEnd -} \ No newline at end of file +} diff --git a/src/reflect/scala/reflect/internal/Importers.scala b/src/reflect/scala/reflect/internal/Importers.scala index 87058df732..c116928d37 100644 --- a/src/reflect/scala/reflect/internal/Importers.scala +++ b/src/reflect/scala/reflect/internal/Importers.scala @@ -32,7 +32,7 @@ trait Importers extends api.Importers { self: SymbolTable => // fixups and maps prevent stackoverflows in importer var pendingSyms = 0 var pendingTpes = 0 - lazy val fixups = collection.mutable.MutableList[Function0[Unit]]() + lazy val fixups = scala.collection.mutable.MutableList[Function0[Unit]]() def addFixup(fixup: => Unit): Unit = fixups += (() => fixup) def tryFixup(): Unit = { if (pendingSyms == 0 && pendingTpes == 0) { diff --git a/src/reflect/scala/reflect/internal/Printers.scala b/src/reflect/scala/reflect/internal/Printers.scala index c23d22efab..4c423e0bc2 100644 --- a/src/reflect/scala/reflect/internal/Printers.scala +++ b/src/reflect/scala/reflect/internal/Printers.scala @@ -664,7 +664,7 @@ trait Printers extends api.Printers { self: SymbolTable => def show(flags: FlagSet): String = { if (flags == NoFlags) nme.NoFlags.toString else { - val s_flags = new collection.mutable.ListBuffer[String] + val s_flags = new scala.collection.mutable.ListBuffer[String] def hasFlag(left: Long, right: Long): Boolean = (left & right) != 0 for (i <- 0 to 63 if hasFlag(flags, 1L << i)) s_flags += flagToString(1L << i).replace("<", "").replace(">", "").toUpperCase diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index be55b72623..b04cf4ff9f 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -28,7 +28,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => //protected var activeLocks = 0 /** Used for debugging only */ - //protected var lockedSyms = collection.immutable.Set[Symbol]() + //protected var lockedSyms = scala.collection.immutable.Set[Symbol]() /** Used to keep track of the recursion depth on locked symbols */ private var recursionTable = immutable.Map.empty[Symbol, Int] diff --git a/src/reflect/scala/reflect/internal/Trees.scala b/src/reflect/scala/reflect/internal/Trees.scala index b20c315a2a..5a6d6ce7c7 100644 --- a/src/reflect/scala/reflect/internal/Trees.scala +++ b/src/reflect/scala/reflect/internal/Trees.scala @@ -137,7 +137,7 @@ trait Trees extends api.Trees { self: SymbolTable => override def freeTypes: List[FreeTypeSymbol] = freeSyms[FreeTypeSymbol](_.isFreeType, _.typeSymbol) private def freeSyms[S <: Symbol](isFree: Symbol => Boolean, symOfType: Type => Symbol): List[S] = { - val s = collection.mutable.LinkedHashSet[S]() + val s = scala.collection.mutable.LinkedHashSet[S]() def addIfFree(sym: Symbol): Unit = if (sym != null && isFree(sym)) s += sym.asInstanceOf[S] for (t <- this) { addIfFree(t.symbol) diff --git a/src/scalacheck/org/scalacheck/Commands.scala b/src/scalacheck/org/scalacheck/Commands.scala index 5ad82c513d..88ef8ae2a1 100644 --- a/src/scalacheck/org/scalacheck/Commands.scala +++ b/src/scalacheck/org/scalacheck/Commands.scala @@ -53,7 +53,7 @@ trait Commands extends Prop { * takes the current abstract state as parameter and returns a boolean * that says if the precondition is fulfilled or not. You can add several * conditions to the precondition list */ - val preConditions = new collection.mutable.ListBuffer[State => Boolean] + val preConditions = new scala.collection.mutable.ListBuffer[State => Boolean] /** Returns all postconditions merged into a single function */ def postCondition: (State,State,Any) => Prop = (s0,s1,r) => all(postConditions.map(_.apply(s0,s1,r)): _*) @@ -65,7 +65,7 @@ trait Commands extends Prop { * method. The postcondition function should return a Boolean (or * a Prop instance) that says if the condition holds or not. You can add several * conditions to the postConditions list. */ - val postConditions = new collection.mutable.ListBuffer[(State,State,Any) => Prop] + val postConditions = new scala.collection.mutable.ListBuffer[(State,State,Any) => Prop] } /** A command that binds its result for later use */ diff --git a/src/scalacheck/org/scalacheck/Pretty.scala b/src/scalacheck/org/scalacheck/Pretty.scala index c40e4aa718..eeb5936086 100644 --- a/src/scalacheck/org/scalacheck/Pretty.scala +++ b/src/scalacheck/org/scalacheck/Pretty.scala @@ -96,7 +96,7 @@ object Pretty { } implicit def prettyTestRes(res: Test.Result) = Pretty { prms => - def labels(ls: collection.immutable.Set[String]) = + def labels(ls: scala.collection.immutable.Set[String]) = if(ls.isEmpty) "" else "> Labels of failing property: " / ls.mkString("\n") val s = res.status match { diff --git a/src/scalacheck/org/scalacheck/util/CmdLineParser.scala b/src/scalacheck/org/scalacheck/util/CmdLineParser.scala index 16ac1940b2..4683c34a65 100644 --- a/src/scalacheck/org/scalacheck/util/CmdLineParser.scala +++ b/src/scalacheck/org/scalacheck/util/CmdLineParser.scala @@ -30,7 +30,7 @@ trait CmdLineParser extends Parsers { trait StrOpt extends Opt[String] class OptMap { - private val opts = new collection.mutable.HashMap[Opt[_], Any] + private val opts = new scala.collection.mutable.HashMap[Opt[_], Any] def apply(flag: Flag): Boolean = opts.contains(flag) def apply[T](opt: Opt[T]): T = opts.get(opt) match { case None => opt.default diff --git a/src/scalap/scala/tools/scalap/scalax/rules/SeqRule.scala b/src/scalap/scala/tools/scalap/scalax/rules/SeqRule.scala index 34f52a1e19..51a789e041 100644 --- a/src/scalap/scala/tools/scalap/scalax/rules/SeqRule.scala +++ b/src/scalap/scala/tools/scalap/scalax/rules/SeqRule.scala @@ -82,7 +82,7 @@ class SeqRule[S, +A, +X](rule : Rule[S, S, A, X]) { /** Repeats this rule num times */ def times(num : Int) : Rule[S, S, Seq[A], X] = from[S] { - val result = new collection.mutable.ArraySeq[A](num) + val result = new scala.collection.mutable.ArraySeq[A](num) // more compact using HoF but written this way so it's tail-recursive def rep(i : Int, in : S) : Result[S, Seq[A], X] = { if (i == num) Success(in, result) diff --git a/src/scalap/scala/tools/scalap/scalax/rules/package.scala b/src/scalap/scala/tools/scalap/scalax/rules/package.scala index 324e87435e..b1cc18f90b 100644 --- a/src/scalap/scala/tools/scalap/scalax/rules/package.scala +++ b/src/scalap/scala/tools/scalap/scalax/rules/package.scala @@ -2,8 +2,8 @@ package scala.tools.scalap package scalax package object rules { - implicit lazy val higherKinds = language.higherKinds - implicit lazy val postfixOps = language.postfixOps - implicit lazy val implicitConversions = language.implicitConversions - implicit lazy val reflectiveCalls = language.reflectiveCalls + implicit lazy val higherKinds = scala.language.higherKinds + implicit lazy val postfixOps = scala.language.postfixOps + implicit lazy val implicitConversions = scala.language.implicitConversions + implicit lazy val reflectiveCalls = scala.language.reflectiveCalls } diff --git a/src/swing/scala/swing/package.scala b/src/swing/scala/swing/package.scala index 96530e2e94..45497665d7 100644 --- a/src/swing/scala/swing/package.scala +++ b/src/swing/scala/swing/package.scala @@ -14,8 +14,8 @@ package object swing { type Image = java.awt.Image type Font = java.awt.Font - implicit lazy val reflectiveCalls = language.reflectiveCalls - implicit lazy val implicitConversions = language.implicitConversions + implicit lazy val reflectiveCalls = scala.language.reflectiveCalls + implicit lazy val implicitConversions = scala.language.implicitConversions private[swing] def ifNull[A](o: Object, a: A): A = if(o eq null) a else o.asInstanceOf[A] private[swing] def toOption[A](o: Object): Option[A] = if(o eq null) None else Some(o.asInstanceOf[A]) -- cgit v1.2.3 From 5933b9f00a02783793456f2c3963d94552c83b43 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Sun, 16 Sep 2012 10:03:54 -0700 Subject: Reworking of annotation arg parser. Paradoxically it has less duplication but is longer. That's the way it goes sometimes. --- src/library/scala/runtime/ScalaRunTime.scala | 18 ++--- .../scala/reflect/runtime/JavaMirrors.scala | 76 +++++++++++++--------- 2 files changed, 57 insertions(+), 37 deletions(-) (limited to 'src/library/scala/runtime') diff --git a/src/library/scala/runtime/ScalaRunTime.scala b/src/library/scala/runtime/ScalaRunTime.scala index 045fb5b252..bb7a26a8c5 100644 --- a/src/library/scala/runtime/ScalaRunTime.scala +++ b/src/library/scala/runtime/ScalaRunTime.scala @@ -15,6 +15,7 @@ import scala.collection.generic.{ Sorted } import scala.reflect.{ ClassTag, classTag } import scala.util.control.ControlThrowable import scala.xml.{ Node, MetaData } +import java.lang.{ Class => jClass } import java.lang.Double.doubleToLongBits import java.lang.reflect.{ Modifier, Method => JMethod } @@ -28,10 +29,10 @@ object ScalaRunTime { def isArray(x: Any, atLevel: Int): Boolean = x != null && isArrayClass(x.getClass, atLevel) - private def isArrayClass(clazz: Class[_], atLevel: Int): Boolean = + private def isArrayClass(clazz: jClass[_], atLevel: Int): Boolean = clazz.isArray && (atLevel == 1 || isArrayClass(clazz.getComponentType, atLevel - 1)) - def isValueClass(clazz: Class[_]) = clazz.isPrimitive() + def isValueClass(clazz: jClass[_]) = clazz.isPrimitive() def isTuple(x: Any) = x != null && tupleNames(x.getClass.getName) def isAnyVal(x: Any) = x match { case _: Byte | _: Short | _: Char | _: Int | _: Long | _: Float | _: Double | _: Boolean | _: Unit => true @@ -50,7 +51,7 @@ object ScalaRunTime { /** Return the class object representing an array with element class `clazz`. */ - def arrayClass(clazz: Class[_]): Class[_] = { + def arrayClass(clazz: jClass[_]): jClass[_] = { // newInstance throws an exception if the erasure is Void.TYPE. see SI-5680 if (clazz == java.lang.Void.TYPE) classOf[Array[Unit]] else java.lang.reflect.Array.newInstance(clazz, 0).getClass @@ -58,18 +59,19 @@ object ScalaRunTime { /** Return the class object representing elements in arrays described by a given schematic. */ - def arrayElementClass(schematic: Any): Class[_] = schematic match { - case cls: Class[_] => cls.getComponentType + def arrayElementClass(schematic: Any): jClass[_] = schematic match { + case cls: jClass[_] => cls.getComponentType case tag: ClassTag[_] => tag.runtimeClass - case _ => throw new UnsupportedOperationException("unsupported schematic %s (%s)".format(schematic, if (schematic == null) "null" else schematic.getClass)) + case _ => + throw new UnsupportedOperationException(s"unsupported schematic $schematic (${schematic.getClass})") } /** Return the class object representing an unboxed value type, * e.g. classOf[int], not classOf[java.lang.Integer]. The compiler * rewrites expressions like 5.getClass to come here. */ - def anyValClass[T <: AnyVal : ClassTag](value: T): Class[T] = - classTag[T].runtimeClass.asInstanceOf[Class[T]] + def anyValClass[T <: AnyVal : ClassTag](value: T): jClass[T] = + classTag[T].runtimeClass.asInstanceOf[jClass[T]] /** Retrieve generic array element */ def array_apply(xs: AnyRef, idx: Int): Any = xs match { diff --git a/src/reflect/scala/reflect/runtime/JavaMirrors.scala b/src/reflect/scala/reflect/runtime/JavaMirrors.scala index 6977b43d73..9b46e2db61 100644 --- a/src/reflect/scala/reflect/runtime/JavaMirrors.scala +++ b/src/reflect/scala/reflect/runtime/JavaMirrors.scala @@ -136,6 +136,53 @@ trait JavaMirrors extends internal.SymbolTable with api.JavaUniverse { self: Sym private def ErrorNotConstructor(sym: Symbol, owner: Symbol) = throw new ScalaReflectionException(s"expected a constructor of $owner, you provided $sym") private def ErrorFree(member: Symbol, freeType: Symbol) = throw new ScalaReflectionException(s"cannot reflect ${member.kindString} ${member.name}, because it's a member of a weak type ${freeType.name}") + /** Helper functions for extracting typed values from a (Class[_], Any) + * representing an annotation argument. + */ + private object toAnnotArg { + val StringClass = classOf[String] + val ClassClass = classOf[jClass[_]] + object PrimitiveClass { def unapply(x: jClass[_]) = x.isPrimitive } + object EnumClass { def unapply(x: jClass[_]) = x.isEnum } + object ArrayClass { def unapply(x: jClass[_]) = x.isArray } + object AnnotationClass { def unapply(x: jClass[_]) = x.isAnnotation } + + object ConstantArg { + def enumToSymbol(enum: Enum[_]): Symbol = + classToScala(enum.getClass).typeSignature.declaration(enum.name: TermName) + + def unapply(schemaAndValue: (jClass[_], Any)): Option[Any] = schemaAndValue match { + case (StringClass | PrimitiveClass(), value) => Some(value) + case (ClassClass, value: jClass[_]) => Some(classToScala(value).toType) + case (EnumClass(), value: Enum[_]) => Some(enumToSymbol(value)) + case _ => None + } + } + def apply(schemaAndValue: (jClass[_], Any)): ClassfileAnnotArg = schemaAndValue match { + case ConstantArg(value) => LiteralAnnotArg(Constant(value)) + case (clazz @ ArrayClass(), value: Array[_]) => ArrayAnnotArg(value map (x => apply(ScalaRunTime.arrayElementClass(clazz) -> x))) + case (AnnotationClass(), value: jAnnotation) => NestedAnnotArg(JavaAnnotationProxy(value)) + case _ => UnmappableAnnotArg + } + } + private case class JavaAnnotationProxy(jann: jAnnotation) extends AnnotationInfo { + override val atp: Type = classToScala(jann.annotationType).toType + override val args: List[Tree] = Nil + override def original: Tree = EmptyTree + override def setOriginal(t: Tree): this.type = throw new Exception("setOriginal inapplicable for " + this) + override def pos: Position = NoPosition + override def setPos(pos: Position): this.type = throw new Exception("setPos inapplicable for " + this) + override def toString = completeAnnotationToString(this) + + // todo. find out the exact order of assocs as they are written in the class file + // currently I'm simply sorting the methods to guarantee stability of the output + override lazy val assocs: List[(Name, ClassfileAnnotArg)] = ( + jann.annotationType.getDeclaredMethods.sortBy(_.getName).toList map (m => + (m.getName: TermName) -> toAnnotArg(m.getReturnType -> m.invoke(jann)) + ) + ) + } + def reflect[T: ClassTag](obj: T): InstanceMirror = new JavaInstanceMirror(obj) def reflectClass(cls: ClassSymbol): ClassMirror = { @@ -574,35 +621,6 @@ trait JavaMirrors extends internal.SymbolTable with api.JavaUniverse { self: Sym * Note: If `sym` is a method or constructor, its parameter annotations are copied as well. */ private def copyAnnotations(sym: Symbol, jann: AnnotatedElement) { - case class JavaAnnotationProxy(jann: jAnnotation) extends AnnotationInfo { - override val atp: Type = classToScala(jann.annotationType).toType - override val args: List[Tree] = Nil - override def original: Tree = EmptyTree - override def setOriginal(t: Tree): this.type = throw new Exception("setOriginal inapplicable for " + this) - override def pos: Position = NoPosition - override def setPos(pos: Position): this.type = throw new Exception("setPos inapplicable for " + this) - override def toString = completeAnnotationToString(this) - override def assocs: List[(Name, ClassfileAnnotArg)] = - // todo. find out the exact order of assocs as they are written in the class file - // currently I'm simply sorting the methods to guarantee stability of the output - jann.annotationType.getDeclaredMethods.sortBy(_.getName).toList map (m => { - def enumToSymbol(enum: Enum[_]): Symbol = - classToScala(enum.getClass).typeSignature.declaration(newTermName(enum.name)) - - def toAnnotArg(value: Any, schema: jClass[_]): ClassfileAnnotArg = schema match { - case primitive if primitive.isPrimitive => LiteralAnnotArg(Constant(value)) - case string if string == classOf[String] => LiteralAnnotArg(Constant(value)) - case clazz if clazz == classOf[jClass[_]] => LiteralAnnotArg(Constant(classToScala(value.asInstanceOf[jClass[_]]).toType)) - case enum if enum.isEnum => LiteralAnnotArg(Constant(enumToSymbol(value.asInstanceOf[Enum[_]]))) - case array if array.isArray => ArrayAnnotArg(value.asInstanceOf[Array[_]] map (x => toAnnotArg(x, ScalaRunTime.arrayElementClass(array)))) - case ann if ann.isAnnotation => NestedAnnotArg(JavaAnnotationProxy(value.asInstanceOf[jAnnotation])) - case _ => UnmappableAnnotArg - } - - newTermName(m.getName) -> toAnnotArg(m.invoke(jann), m.getReturnType) - }) - } - sym setAnnotations (jann.getAnnotations map JavaAnnotationProxy).toList } -- cgit v1.2.3