summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorPaul Phillips <paulp@improving.org>2012-09-19 10:46:45 -0700
committerPaul Phillips <paulp@improving.org>2012-09-19 10:58:12 -0700
commit3cec321f0541c1c2ccb44413676ba213cbfbfca4 (patch)
tree6581af59f3e4c41bcb03ea143790f95f1a429583 /src
parentd521ba28725fb8def570b0ebec4d9172e274c9c9 (diff)
parent2f9f8d5d67441f6f1999d68b2b2e57f5451f8da7 (diff)
downloadscala-3cec321f0541c1c2ccb44413676ba213cbfbfca4.tar.gz
scala-3cec321f0541c1c2ccb44413676ba213cbfbfca4.tar.bz2
scala-3cec321f0541c1c2ccb44413676ba213cbfbfca4.zip
Merge branch '2.10.x' into master
* 2.10.x: (51 commits) improved/fixed reflection docs based on comments Fixes SI-6354: improved error messages for Dynamic signature mismatches. Add RedBlackTree tests for take/drop/slice. Retain Ordering in drop/take/slice signatures. test case closes SI-5770 clarify caveats of App trait Remove `@static` annotation from the library. New starr that does not depend on `@static`. improved reflection documentation pull request feedback SI-5692 better error message SI-5942 toolboxes now reset front ends SI-6287 fixes synthetic symbol clashes in toolbox Revert "Implement @static annotation on singleton object fields." Revert "WIP add private/lazy checks and a few tests." Revert "Fixes SI-6189." Revert "Fixes SI-6236." Revert "Fix SI-4581." Revert "Fix SI-6294." refactors java reflection tests ... Conflicts: src/compiler/scala/tools/nsc/typechecker/Typers.scala src/partest/scala/tools/partest/package.scala src/reflect/scala/reflect/internal/Trees.scala
Diffstat (limited to 'src')
-rw-r--r--src/actors/scala/actors/Future.scala2
-rw-r--r--src/compiler/scala/reflect/macros/runtime/Evals.scala2
-rw-r--r--src/compiler/scala/reflect/macros/runtime/Infrastructure.scala4
-rw-r--r--src/compiler/scala/reflect/macros/runtime/Parsers.scala2
-rw-r--r--src/compiler/scala/reflect/reify/codegen/GenAnnotationInfos.scala2
-rw-r--r--src/compiler/scala/reflect/reify/codegen/GenSymbols.scala2
-rw-r--r--src/compiler/scala/reflect/reify/codegen/GenUtils.scala3
-rw-r--r--src/compiler/scala/reflect/reify/phases/Metalevels.scala4
-rw-r--r--src/compiler/scala/reflect/reify/phases/Reshape.scala4
-rw-r--r--src/compiler/scala/reflect/reify/utils/NodePrinters.scala8
-rw-r--r--src/compiler/scala/reflect/reify/utils/SymbolTables.scala8
-rw-r--r--src/compiler/scala/tools/ant/sabbus/Compilers.scala2
-rw-r--r--src/compiler/scala/tools/cmd/gen/AnyVals.scala3
-rw-r--r--src/compiler/scala/tools/cmd/package.scala4
-rw-r--r--src/compiler/scala/tools/nsc/ast/Trees.scala4
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Parsers.scala4
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Scanners.scala4
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/GenICode.scala101
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala5
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala6
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala4
-rw-r--r--src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerRequestsWorkingMode.scala6
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/package.scala2
-rw-r--r--src/compiler/scala/tools/nsc/io/package.scala2
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala6
-rw-r--r--src/compiler/scala/tools/nsc/transform/CleanUp.scala153
-rw-r--r--src/compiler/scala/tools/nsc/transform/Constructors.scala7
-rw-r--r--src/compiler/scala/tools/nsc/transform/Erasure.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/LambdaLift.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/TailCalls.scala4
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala12
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/DestructureTypes.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Implicits.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Infer.scala4
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Macros.scala13
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Namers.scala6
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala34
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala20
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala58
-rw-r--r--src/compiler/scala/tools/nsc/util/package.scala2
-rw-r--r--src/compiler/scala/tools/reflect/FrontEnds.scala10
-rw-r--r--src/compiler/scala/tools/reflect/ToolBox.scala13
-rw-r--r--src/compiler/scala/tools/reflect/ToolBoxFactory.scala55
-rw-r--r--src/compiler/scala/tools/reflect/package.scala2
-rw-r--r--src/library/scala/App.scala10
-rw-r--r--src/library/scala/Array.scala10
-rw-r--r--src/library/scala/Product.scala2
-rw-r--r--src/library/scala/StringContext.scala3
-rw-r--r--src/library/scala/annotation/static.scala20
-rw-r--r--src/library/scala/collection/GenMapLike.scala2
-rw-r--r--src/library/scala/collection/GenSeqLike.scala12
-rw-r--r--src/library/scala/collection/GenSetLike.scala2
-rw-r--r--src/library/scala/collection/IndexedSeqLike.scala2
-rwxr-xr-xsrc/library/scala/collection/IndexedSeqOptimized.scala5
-rw-r--r--src/library/scala/collection/IterableLike.scala4
-rw-r--r--src/library/scala/collection/Iterator.scala5
-rw-r--r--src/library/scala/collection/LinearSeqLike.scala2
-rw-r--r--src/library/scala/collection/SeqLike.scala5
-rw-r--r--src/library/scala/collection/TraversableLike.scala2
-rw-r--r--src/library/scala/collection/concurrent/TrieMap.scala2
-rwxr-xr-xsrc/library/scala/collection/generic/FilterMonadic.scala2
-rw-r--r--src/library/scala/collection/generic/ParFactory.scala2
-rw-r--r--src/library/scala/collection/generic/SliceInterval.scala2
-rw-r--r--src/library/scala/collection/immutable/HashMap.scala3
-rw-r--r--src/library/scala/collection/immutable/HashSet.scala3
-rw-r--r--src/library/scala/collection/immutable/IntMap.scala3
-rw-r--r--src/library/scala/collection/immutable/List.scala2
-rw-r--r--src/library/scala/collection/immutable/LongMap.scala5
-rw-r--r--src/library/scala/collection/immutable/PagedSeq.scala2
-rw-r--r--src/library/scala/collection/immutable/RedBlack.scala3
-rw-r--r--src/library/scala/collection/immutable/RedBlackTree.scala28
-rw-r--r--src/library/scala/collection/immutable/StreamViewLike.scala4
-rw-r--r--src/library/scala/collection/immutable/Vector.scala3
-rw-r--r--src/library/scala/collection/immutable/package.scala4
-rw-r--r--src/library/scala/collection/mutable/AVLTree.scala3
-rw-r--r--src/library/scala/collection/mutable/ArrayOps.scala5
-rw-r--r--src/library/scala/collection/mutable/ArrayStack.scala3
-rw-r--r--src/library/scala/collection/mutable/Builder.scala3
-rw-r--r--src/library/scala/collection/mutable/FlatHashTable.scala20
-rw-r--r--src/library/scala/collection/mutable/HashMap.scala29
-rw-r--r--src/library/scala/collection/mutable/HashSet.scala6
-rw-r--r--src/library/scala/collection/mutable/HashTable.scala60
-rw-r--r--src/library/scala/collection/mutable/LinkedHashMap.scala43
-rw-r--r--src/library/scala/collection/mutable/LinkedHashSet.scala85
-rw-r--r--src/library/scala/collection/mutable/OpenHashMap.scala3
-rw-r--r--src/library/scala/collection/mutable/PriorityQueue.scala10
-rw-r--r--src/library/scala/collection/mutable/ResizableArray.scala3
-rw-r--r--src/library/scala/collection/parallel/ParIterableLike.scala2
-rw-r--r--src/library/scala/collection/parallel/ParMap.scala37
-rw-r--r--src/library/scala/collection/parallel/RemainsIterator.scala2
-rw-r--r--src/library/scala/collection/parallel/Tasks.scala2
-rw-r--r--src/library/scala/collection/parallel/immutable/ParHashMap.scala4
-rw-r--r--src/library/scala/collection/parallel/immutable/ParHashSet.scala2
-rw-r--r--src/library/scala/collection/parallel/immutable/ParIterable.scala4
-rw-r--r--src/library/scala/collection/parallel/immutable/ParMap.scala4
-rw-r--r--src/library/scala/collection/parallel/immutable/ParSeq.scala4
-rw-r--r--src/library/scala/collection/parallel/immutable/ParSet.scala4
-rw-r--r--src/library/scala/collection/parallel/mutable/ParArray.scala5
-rw-r--r--src/library/scala/collection/parallel/mutable/ParHashMap.scala35
-rw-r--r--src/library/scala/collection/parallel/mutable/ParHashSet.scala16
-rw-r--r--src/library/scala/collection/parallel/mutable/ParHashTable.scala2
-rw-r--r--src/library/scala/collection/parallel/mutable/ParIterable.scala2
-rw-r--r--src/library/scala/collection/parallel/mutable/ParMap.scala4
-rw-r--r--src/library/scala/collection/parallel/mutable/ParSeq.scala4
-rw-r--r--src/library/scala/collection/parallel/mutable/ParSet.scala4
-rw-r--r--src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala2
-rw-r--r--src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala14
-rw-r--r--src/library/scala/collection/parallel/package.scala7
-rw-r--r--src/library/scala/compat/Platform.scala2
-rw-r--r--src/library/scala/io/Codec.scala2
-rw-r--r--src/library/scala/io/Position.scala2
-rw-r--r--src/library/scala/language.scala8
-rw-r--r--src/library/scala/math/Ordering.scala3
-rw-r--r--src/library/scala/reflect/ClassTag.scala28
-rw-r--r--src/library/scala/reflect/Manifest.scala4
-rwxr-xr-xsrc/library/scala/reflect/NameTransformer.scala3
-rw-r--r--src/library/scala/reflect/base/AnnotationInfos.scala44
-rw-r--r--src/library/scala/reflect/base/Annotations.scala106
-rw-r--r--src/library/scala/reflect/base/Attachments.scala32
-rw-r--r--src/library/scala/reflect/base/Base.scala41
-rw-r--r--src/library/scala/reflect/base/BuildUtils.scala5
-rw-r--r--src/library/scala/reflect/base/Constants.scala16
-rw-r--r--src/library/scala/reflect/base/Exprs.scala63
-rw-r--r--src/library/scala/reflect/base/FlagSets.scala2
-rw-r--r--src/library/scala/reflect/base/MirrorOf.scala23
-rw-r--r--src/library/scala/reflect/base/Mirrors.scala14
-rw-r--r--src/library/scala/reflect/base/Names.scala36
-rw-r--r--src/library/scala/reflect/base/Positions.scala11
-rw-r--r--src/library/scala/reflect/base/Scopes.scala17
-rw-r--r--src/library/scala/reflect/base/StandardDefinitions.scala50
-rw-r--r--src/library/scala/reflect/base/StandardNames.scala4
-rw-r--r--src/library/scala/reflect/base/Symbols.scala27
-rw-r--r--src/library/scala/reflect/base/TagInterop.scala16
-rw-r--r--src/library/scala/reflect/base/TreeCreator.scala20
-rw-r--r--src/library/scala/reflect/base/Trees.scala178
-rw-r--r--src/library/scala/reflect/base/TypeCreator.scala20
-rw-r--r--src/library/scala/reflect/base/TypeTags.scala180
-rw-r--r--src/library/scala/reflect/base/Types.scala33
-rw-r--r--src/library/scala/reflect/base/Universe.scala42
-rw-r--r--src/library/scala/reflect/macros/internal/package.scala3
-rw-r--r--src/library/scala/reflect/package.scala69
-rw-r--r--src/library/scala/runtime/RichDouble.scala3
-rw-r--r--src/library/scala/runtime/RichFloat.scala3
-rw-r--r--src/library/scala/runtime/ScalaRunTime.scala21
-rw-r--r--src/library/scala/runtime/SeqCharSequence.scala3
-rw-r--r--src/library/scala/runtime/StringAdd.scala5
-rw-r--r--src/library/scala/runtime/StringFormat.scala6
-rw-r--r--src/library/scala/runtime/Tuple2Zipped.scala2
-rw-r--r--src/library/scala/runtime/Tuple3Zipped.scala2
-rw-r--r--src/library/scala/sys/Prop.scala3
-rw-r--r--src/library/scala/sys/process/BasicIO.scala2
-rw-r--r--src/library/scala/sys/process/ProcessBuilderImpl.scala4
-rw-r--r--src/library/scala/sys/process/ProcessImpl.scala2
-rw-r--r--src/library/scala/util/Sorting.scala5
-rw-r--r--src/library/scala/util/automata/SubsetConstruction.scala4
-rw-r--r--src/library/scala/util/control/NoStackTrace.scala3
-rw-r--r--src/library/scala/util/hashing/Hashing.scala15
-rw-r--r--src/library/scala/util/parsing/combinator/Parsers.scala2
-rw-r--r--src/library/scala/xml/dtd/ContentModelParser.scala15
-rw-r--r--src/library/scala/xml/dtd/Scanner.scala4
-rw-r--r--src/library/scala/xml/factory/NodeFactory.scala2
-rw-r--r--src/library/scala/xml/include/sax/XIncluder.scala2
-rwxr-xr-xsrc/library/scala/xml/parsing/MarkupParser.scala4
-rw-r--r--src/library/scala/xml/parsing/MarkupParserCommon.scala4
-rw-r--r--src/partest/scala/tools/partest/nest/RunnerManager.scala1
-rw-r--r--src/partest/scala/tools/partest/package.scala6
-rw-r--r--src/reflect/scala/reflect/api/AnnotationInfos.scala27
-rw-r--r--src/reflect/scala/reflect/api/Annotations.scala29
-rw-r--r--src/reflect/scala/reflect/api/FrontEnds.scala4
-rw-r--r--src/reflect/scala/reflect/api/Symbols.scala17
-rw-r--r--src/reflect/scala/reflect/api/Trees.scala9
-rw-r--r--src/reflect/scala/reflect/api/Types.scala11
-rw-r--r--src/reflect/scala/reflect/api/Universe.scala2
-rw-r--r--src/reflect/scala/reflect/internal/AnnotationInfos.scala87
-rw-r--r--src/reflect/scala/reflect/internal/Definitions.scala23
-rw-r--r--src/reflect/scala/reflect/internal/Importers.scala2
-rw-r--r--src/reflect/scala/reflect/internal/Printers.scala2
-rw-r--r--src/reflect/scala/reflect/internal/StdAttachments.scala2
-rw-r--r--src/reflect/scala/reflect/internal/StdNames.scala3
-rw-r--r--src/reflect/scala/reflect/internal/Symbols.scala5
-rw-r--r--src/reflect/scala/reflect/internal/Trees.scala16
-rw-r--r--src/reflect/scala/reflect/internal/Types.scala20
-rw-r--r--src/reflect/scala/reflect/internal/pickling/UnPickler.scala2
-rw-r--r--src/reflect/scala/reflect/macros/Infrastructure.scala21
-rw-r--r--src/reflect/scala/reflect/macros/Parsers.scala2
-rw-r--r--src/reflect/scala/reflect/macros/Universe.scala4
-rw-r--r--src/reflect/scala/reflect/runtime/JavaMirrors.scala153
-rw-r--r--src/reflect/scala/reflect/runtime/JavaUniverse.scala2
-rw-r--r--src/reflect/scala/reflect/runtime/package.scala3
-rw-r--r--src/reflect/scala/tools/nsc/io/VirtualFile.scala2
-rw-r--r--src/reflect/scala/tools/nsc/io/ZipArchive.scala2
-rw-r--r--src/scalacheck/org/scalacheck/Commands.scala4
-rw-r--r--src/scalacheck/org/scalacheck/Pretty.scala2
-rw-r--r--src/scalacheck/org/scalacheck/util/CmdLineParser.scala2
-rw-r--r--src/scalap/scala/tools/scalap/scalax/rules/SeqRule.scala2
-rw-r--r--src/scalap/scala/tools/scalap/scalax/rules/package.scala8
-rw-r--r--src/swing/scala/swing/package.scala4
197 files changed, 1655 insertions, 1226 deletions
diff --git a/src/actors/scala/actors/Future.scala b/src/actors/scala/actors/Future.scala
index 0198e95ae1..fb7bb488a2 100644
--- a/src/actors/scala/actors/Future.scala
+++ b/src/actors/scala/actors/Future.scala
@@ -174,7 +174,7 @@ object Futures {
* or timeout + `System.currentTimeMillis()` is negative.
*/
def awaitAll(timeout: Long, fts: Future[Any]*): List[Option[Any]] = {
- var resultsMap: scala.collection.mutable.Map[Int, Option[Any]] = new collection.mutable.HashMap[Int, Option[Any]]
+ var resultsMap: scala.collection.mutable.Map[Int, Option[Any]] = new scala.collection.mutable.HashMap[Int, Option[Any]]
var cnt = 0
val mappedFts = fts.map(ft =>
diff --git a/src/compiler/scala/reflect/macros/runtime/Evals.scala b/src/compiler/scala/reflect/macros/runtime/Evals.scala
index 348e29cdd7..acafeb5b02 100644
--- a/src/compiler/scala/reflect/macros/runtime/Evals.scala
+++ b/src/compiler/scala/reflect/macros/runtime/Evals.scala
@@ -13,6 +13,6 @@ trait Evals {
def eval[T](expr: Expr[T]): T = {
val imported = evalImporter.importTree(expr.tree)
- evalToolBox.runExpr(imported).asInstanceOf[T]
+ evalToolBox.eval(imported).asInstanceOf[T]
}
} \ No newline at end of file
diff --git a/src/compiler/scala/reflect/macros/runtime/Infrastructure.scala b/src/compiler/scala/reflect/macros/runtime/Infrastructure.scala
index 77ac8c6581..a8cc61e0f9 100644
--- a/src/compiler/scala/reflect/macros/runtime/Infrastructure.scala
+++ b/src/compiler/scala/reflect/macros/runtime/Infrastructure.scala
@@ -33,8 +33,4 @@ trait Infrastructure {
}
val currentMacro: Symbol = expandee.symbol
-
- val globalCache: scala.collection.mutable.Map[Any, Any] = universe.analyzer.globalMacroCache
-
- val cache: scala.collection.mutable.Map[Any, Any] = universe.analyzer.perRunMacroCache.getOrElseUpdate(currentMacro, collection.mutable.Map[Any, Any]())
}
diff --git a/src/compiler/scala/reflect/macros/runtime/Parsers.scala b/src/compiler/scala/reflect/macros/runtime/Parsers.scala
index e4acf104e3..5096526fdb 100644
--- a/src/compiler/scala/reflect/macros/runtime/Parsers.scala
+++ b/src/compiler/scala/reflect/macros/runtime/Parsers.scala
@@ -12,7 +12,7 @@ trait Parsers {
// todo. provide decent implementation
try {
import scala.reflect.runtime.{universe => ru}
- val parsed = ru.rootMirror.mkToolBox().parseExpr(code)
+ val parsed = ru.rootMirror.mkToolBox().parse(code)
val importer = universe.mkImporter(ru)
importer.importTree(parsed)
} catch {
diff --git a/src/compiler/scala/reflect/reify/codegen/GenAnnotationInfos.scala b/src/compiler/scala/reflect/reify/codegen/GenAnnotationInfos.scala
index 5f4296f54f..dec491aabe 100644
--- a/src/compiler/scala/reflect/reify/codegen/GenAnnotationInfos.scala
+++ b/src/compiler/scala/reflect/reify/codegen/GenAnnotationInfos.scala
@@ -50,6 +50,6 @@ trait GenAnnotationInfos {
// if you reify originals of anns, you get SO when trying to reify AnnotatedTypes, so screw it - after all, it's not that important
val reifiedAssocs = ann.assocs map (assoc => scalaFactoryCall(nme.Tuple2, reify(assoc._1), reifyClassfileAnnotArg(assoc._2)))
- mirrorFactoryCall(nme.AnnotationInfo, reify(ann.atp), mkList(reifiedArgs), mkList(reifiedAssocs))
+ mirrorFactoryCall(nme.Annotation, reify(ann.atp), mkList(reifiedArgs), mkListMap(reifiedAssocs))
}
} \ No newline at end of file
diff --git a/src/compiler/scala/reflect/reify/codegen/GenSymbols.scala b/src/compiler/scala/reflect/reify/codegen/GenSymbols.scala
index c4b674955a..22a834d2e4 100644
--- a/src/compiler/scala/reflect/reify/codegen/GenSymbols.scala
+++ b/src/compiler/scala/reflect/reify/codegen/GenSymbols.scala
@@ -143,7 +143,7 @@ trait GenSymbols {
// produces valid Scala code (with vals in a block depending only on lexically preceding vals)
val reification = reificode(sym)
import reification.{name, binding}
- val tree = reification.tree addAttachment ReifyBindingAttachment(binding)
+ val tree = reification.tree updateAttachment ReifyBindingAttachment(binding)
state.symtab += (sym, name, tree)
}
fromSymtab
diff --git a/src/compiler/scala/reflect/reify/codegen/GenUtils.scala b/src/compiler/scala/reflect/reify/codegen/GenUtils.scala
index 8aef8d772f..49877b4286 100644
--- a/src/compiler/scala/reflect/reify/codegen/GenUtils.scala
+++ b/src/compiler/scala/reflect/reify/codegen/GenUtils.scala
@@ -70,6 +70,9 @@ trait GenUtils {
def mkList(args: List[Tree]): Tree =
scalaFactoryCall("collection.immutable.List", args: _*)
+ def mkListMap(args: List[Tree]): Tree =
+ scalaFactoryCall("collection.immutable.ListMap", args: _*)
+
/**
* An (unreified) path that refers to definition with given fully qualified name
* @param mkName Creator for last portion of name (either TermName or TypeName)
diff --git a/src/compiler/scala/reflect/reify/phases/Metalevels.scala b/src/compiler/scala/reflect/reify/phases/Metalevels.scala
index 1624bbe951..fbbd12a42f 100644
--- a/src/compiler/scala/reflect/reify/phases/Metalevels.scala
+++ b/src/compiler/scala/reflect/reify/phases/Metalevels.scala
@@ -102,7 +102,7 @@ trait Metalevels {
*/
val metalevels = new Transformer {
var insideSplice = false
- var inlineableBindings = collection.mutable.Map[TermName, Tree]()
+ var inlineableBindings = scala.collection.mutable.Map[TermName, Tree]()
def withinSplice[T](op: => T) = {
val old = insideSplice
@@ -147,4 +147,4 @@ trait Metalevels {
super.transform(tree)
}
}
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/reflect/reify/phases/Reshape.scala b/src/compiler/scala/reflect/reify/phases/Reshape.scala
index 0b07c47c0f..baeea8cd9d 100644
--- a/src/compiler/scala/reflect/reify/phases/Reshape.scala
+++ b/src/compiler/scala/reflect/reify/phases/Reshape.scala
@@ -250,7 +250,7 @@ trait Reshape {
private def trimAccessors(deff: Tree, stats: List[Tree]): List[Tree] = {
val symdefs = (stats collect { case vodef: ValOrDefDef => vodef } map (vodeff => vodeff.symbol -> vodeff)).toMap
- val accessors = collection.mutable.Map[ValDef, List[DefDef]]()
+ val accessors = scala.collection.mutable.Map[ValDef, List[DefDef]]()
stats collect { case ddef: DefDef => ddef } foreach (defdef => {
val valdef = symdefs get defdef.symbol.accessedOrSelf collect { case vdef: ValDef => vdef } getOrElse null
if (valdef != null) accessors(valdef) = accessors.getOrElse(valdef, Nil) :+ defdef
@@ -323,4 +323,4 @@ trait Reshape {
isSynthetic && isCaseCompanion
}))
}
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/reflect/reify/utils/NodePrinters.scala b/src/compiler/scala/reflect/reify/utils/NodePrinters.scala
index ec1f132c1b..b2999c3c1c 100644
--- a/src/compiler/scala/reflect/reify/utils/NodePrinters.scala
+++ b/src/compiler/scala/reflect/reify/utils/NodePrinters.scala
@@ -40,7 +40,7 @@ trait NodePrinters {
})
s = s.replace("Modifiers(0L, newTypeName(\"\"), List())", "Modifiers()")
s = """Modifiers\((\d+)[lL], newTypeName\("(.*?)"\), List\((.*?)\)\)""".r.replaceAllIn(s, m => {
- val buf = new collection.mutable.ListBuffer[String]
+ val buf = new scala.collection.mutable.ListBuffer[String]
val annotations = m.group(3)
if (buf.nonEmpty || annotations != "")
@@ -73,10 +73,10 @@ trait NodePrinters {
s.trim
})
- val printout = collection.mutable.ListBuffer[String]();
+ val printout = scala.collection.mutable.ListBuffer[String]();
printout += universe.trim
if (mirrorIsUsed) printout += mirror.replace("MirrorOf[", "scala.reflect.base.MirrorOf[").trim
- val imports = collection.mutable.ListBuffer[String]();
+ val imports = scala.collection.mutable.ListBuffer[String]();
imports += nme.UNIVERSE_SHORT
// if (buildIsUsed) imports += nme.build
if (mirrorIsUsed) imports += nme.MIRROR_SHORT
@@ -94,7 +94,7 @@ trait NodePrinters {
if (isExpr) {
if (mirror contains ".getClassLoader") {
printout += "import scala.tools.reflect.ToolBox"
- printout += s"println(${nme.MIRROR_SHORT}.mkToolBox().runExpr(tree))"
+ printout += s"println(${nme.MIRROR_SHORT}.mkToolBox().eval(tree))"
} else {
printout += "println(tree)"
}
diff --git a/src/compiler/scala/reflect/reify/utils/SymbolTables.scala b/src/compiler/scala/reflect/reify/utils/SymbolTables.scala
index 2e17558f54..3ec43c863d 100644
--- a/src/compiler/scala/reflect/reify/utils/SymbolTables.scala
+++ b/src/compiler/scala/reflect/reify/utils/SymbolTables.scala
@@ -46,8 +46,8 @@ trait SymbolTables {
def symRef(sym: Symbol): Tree =
symtab.get(sym) match {
- case Some(FreeDef(_, name, binding, _, _)) => Ident(name) addAttachment binding
- case Some(SymDef(_, name, _, _)) => Ident(name) addAttachment ReifyBindingAttachment(Ident(sym))
+ case Some(FreeDef(_, name, binding, _, _)) => Ident(name) updateAttachment binding
+ case Some(SymDef(_, name, _, _)) => Ident(name) updateAttachment ReifyBindingAttachment(Ident(sym))
case None => EmptyTree
}
@@ -86,7 +86,7 @@ trait SymbolTables {
newTermName(fresh.newName(name))
}
val bindingAttachment = reification.attachments.get[ReifyBindingAttachment].get
- add(ValDef(NoMods, freshName(name0), TypeTree(), reification) addAttachment bindingAttachment)
+ add(ValDef(NoMods, freshName(name0), TypeTree(), reification) updateAttachment bindingAttachment)
}
private def add(sym: Symbol, name: TermName): SymbolTable = {
@@ -203,7 +203,7 @@ trait SymbolTables {
result ++= cumulativeAliases.distinct filter (alias => alias._1 == sym && alias._2 != currtab.symName(sym)) map (alias => {
val canonicalName = currtab.symName(sym)
val aliasName = alias._2
- ValDef(NoMods, aliasName, TypeTree(), Ident(canonicalName)) addAttachment ReifyAliasAttachment(sym, aliasName)
+ ValDef(NoMods, aliasName, TypeTree(), Ident(canonicalName)) updateAttachment ReifyAliasAttachment(sym, aliasName)
})
result.toList
})
diff --git a/src/compiler/scala/tools/ant/sabbus/Compilers.scala b/src/compiler/scala/tools/ant/sabbus/Compilers.scala
index bb32149a75..7165474345 100644
--- a/src/compiler/scala/tools/ant/sabbus/Compilers.scala
+++ b/src/compiler/scala/tools/ant/sabbus/Compilers.scala
@@ -15,7 +15,7 @@ object Compilers extends scala.collection.DefaultMap[String, Compiler] {
val debug = false
- private val container = new collection.mutable.HashMap[String, Compiler]
+ private val container = new scala.collection.mutable.HashMap[String, Compiler]
def iterator = container.iterator
diff --git a/src/compiler/scala/tools/cmd/gen/AnyVals.scala b/src/compiler/scala/tools/cmd/gen/AnyVals.scala
index 910804245b..6d652ffdfe 100644
--- a/src/compiler/scala/tools/cmd/gen/AnyVals.scala
+++ b/src/compiler/scala/tools/cmd/gen/AnyVals.scala
@@ -341,9 +341,6 @@ final val NaN = @boxed@.NaN
final val PositiveInfinity = @boxed@.POSITIVE_INFINITY
final val NegativeInfinity = @boxed@.NEGATIVE_INFINITY
-@deprecated("use @name@.MinPositiveValue instead", "2.9.0")
-final val Epsilon = MinPositiveValue
-
/** The negative number with the greatest (finite) absolute value which is representable
* by a @name@. Note that it differs from [[java.lang.@name@.MIN_VALUE]], which
* is the smallest positive value representable by a @name@. In Scala that number
diff --git a/src/compiler/scala/tools/cmd/package.scala b/src/compiler/scala/tools/cmd/package.scala
index 5be98a460a..8c6716be78 100644
--- a/src/compiler/scala/tools/cmd/package.scala
+++ b/src/compiler/scala/tools/cmd/package.scala
@@ -9,8 +9,8 @@ package object cmd {
def returning[T](x: T)(f: T => Unit): T = { f(x) ; x }
// make some language features in this package compile without warning
- implicit def implicitConversions = language.implicitConversions
- implicit def postfixOps = language.postfixOps
+ implicit def implicitConversions = scala.language.implicitConversions
+ implicit def postfixOps = scala.language.postfixOps
private[cmd] def debug(msg: String) = println(msg)
diff --git a/src/compiler/scala/tools/nsc/ast/Trees.scala b/src/compiler/scala/tools/nsc/ast/Trees.scala
index acf0a8ee6f..3ccc595fb2 100644
--- a/src/compiler/scala/tools/nsc/ast/Trees.scala
+++ b/src/compiler/scala/tools/nsc/ast/Trees.scala
@@ -178,7 +178,7 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
case _ => super.xtraverse(traverser, tree)
}
- trait TreeCopier extends super.TreeCopierOps {
+ trait TreeCopier extends super.InternalTreeCopierOps {
def DocDef(tree: Tree, comment: DocComment, definition: Tree): DocDef
def SelectFromArray(tree: Tree, qualifier: Tree, selector: Name, erasure: Type): SelectFromArray
def InjectDerivedValue(tree: Tree, arg: Tree): InjectDerivedValue
@@ -281,7 +281,7 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
val trace = scala.tools.nsc.util.trace when debug
val locals = util.HashSet[Symbol](8)
- val orderedLocals = collection.mutable.ListBuffer[Symbol]()
+ val orderedLocals = scala.collection.mutable.ListBuffer[Symbol]()
def registerLocal(sym: Symbol) {
if (sym != null && sym != NoSymbol) {
if (debug && !(locals contains sym)) orderedLocals append sym
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
index 8c131850e9..d8dae0482b 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
@@ -468,7 +468,7 @@ self =>
/* ------------- ERROR HANDLING ------------------------------------------- */
- var assumedClosingParens = collection.mutable.Map(RPAREN -> 0, RBRACKET -> 0, RBRACE -> 0)
+ var assumedClosingParens = scala.collection.mutable.Map(RPAREN -> 0, RBRACKET -> 0, RBRACE -> 0)
private var inFunReturnType = false
@inline private def fromWithinReturnType[T](body: => T): T = {
@@ -1029,7 +1029,7 @@ self =>
val tok = in.token
val name = ident()
t = atPos(start) {
- if (tok == BACKQUOTED_IDENT) Ident(name) addAttachment BackquotedIdentifierAttachment
+ if (tok == BACKQUOTED_IDENT) Ident(name) updateAttachment BackquotedIdentifierAttachment
else Ident(name)
}
if (in.token == DOT) {
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
index fb27033097..7961abc974 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
@@ -1302,7 +1302,7 @@ trait Scanners extends ScannersCommon {
}
class ParensAnalyzer(unit: CompilationUnit, patches: List[BracePatch]) extends UnitScanner(unit, patches) {
- var balance = collection.mutable.Map(RPAREN -> 0, RBRACKET -> 0, RBRACE -> 0)
+ var balance = scala.collection.mutable.Map(RPAREN -> 0, RBRACKET -> 0, RBRACE -> 0)
init()
@@ -1317,7 +1317,7 @@ trait Scanners extends ScannersCommon {
var lineCount = 1
var lastOffset = 0
var indent = 0
- val oldBalance = collection.mutable.Map[Int, Int]()
+ val oldBalance = scala.collection.mutable.Map[Int, Int]()
def markBalance() = for ((k, v) <- balance) oldBalance(k) = v
markBalance()
diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
index 147e0eb23c..d4126f2786 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
@@ -121,44 +121,26 @@ abstract class GenICode extends SubComponent {
m.native = m.symbol.hasAnnotation(definitions.NativeAttr)
if (!m.isAbstractMethod && !m.native) {
- val staticfield = if (m.symbol.isAccessor && m.symbol.accessed.hasStaticAnnotation) {
- val compClass = m.symbol.owner.companionClass
- compClass.info.findMember(m.symbol.accessed.name, NoFlags, NoFlags, false)
- } else NoSymbol
- if (staticfield != NoSymbol) {
- // in companion object accessors to @static fields, we access the static field directly
- val hostClass = m.symbol.owner.companionClass
- if (m.symbol.isGetter) {
- ctx1.bb.emit(LOAD_FIELD(staticfield, true) setHostClass hostClass, tree.pos)
- ctx1.bb.closeWith(RETURN(m.returnType))
- } else if (m.symbol.isSetter) {
- ctx1.bb.emit(LOAD_LOCAL(m.locals.head), tree.pos)
- ctx1.bb.emit(STORE_FIELD(staticfield, true), tree.pos)
+ ctx1 = genLoad(rhs, ctx1, m.returnType);
+
+ // reverse the order of the local variables, to match the source-order
+ m.locals = m.locals.reverse
+
+ rhs match {
+ case Block(_, Return(_)) => ()
+ case Return(_) => ()
+ case EmptyTree =>
+ globalError("Concrete method has no definition: " + tree + (
+ if (settings.debug.value) "(found: " + m.symbol.owner.info.decls.toList.mkString(", ") + ")"
+ else "")
+ )
+ case _ => if (ctx1.bb.isEmpty)
+ ctx1.bb.closeWith(RETURN(m.returnType), rhs.pos)
+ else
ctx1.bb.closeWith(RETURN(m.returnType))
- } else assert(false, "unreachable")
- } else {
- ctx1 = genLoad(rhs, ctx1, m.returnType);
-
- // reverse the order of the local variables, to match the source-order
- m.locals = m.locals.reverse
-
- rhs match {
- case Block(_, Return(_)) => ()
- case Return(_) => ()
- case EmptyTree =>
- globalError("Concrete method has no definition: " + tree + (
- if (settings.debug.value) "(found: " + m.symbol.owner.info.decls.toList.mkString(", ") + ")"
- else "")
- )
- case _ =>
- if (ctx1.bb.isEmpty)
- ctx1.bb.closeWith(RETURN(m.returnType), rhs.pos)
- else
- ctx1.bb.closeWith(RETURN(m.returnType))
- }
- if (!ctx1.bb.closed) ctx1.bb.close
- prune(ctx1.method)
}
+ if (!ctx1.bb.closed) ctx1.bb.close
+ prune(ctx1.method)
} else
ctx1.method.setCode(NoCode)
ctx1
@@ -900,47 +882,6 @@ abstract class GenICode extends SubComponent {
generatedType = toTypeKind(fun.symbol.tpe.resultType)
ctx1
- case app @ Apply(fun @ Select(qual, _), args)
- if !ctx.method.symbol.isStaticConstructor
- && fun.symbol.isAccessor && fun.symbol.accessed.hasStaticAnnotation
- && qual.tpe.typeSymbol.orElse(fun.symbol.owner).companionClass != NoSymbol =>
- // bypass the accessor to the companion object and load the static field directly
- // this bypass is not done:
- // - if the static intializer for the static field itself
- // - if there is no companion class of the object owner - this happens in the REPL
- def genLoadApply5 = {
- val sym = fun.symbol
- generatedType = toTypeKind(sym.accessed.info)
- val hostOwner = qual.tpe.typeSymbol.orElse(sym.owner)
- val hostClass = hostOwner.companionClass
- val staticfield = hostClass.info.findMember(sym.accessed.name, NoFlags, NoFlags, false) orElse {
- if (!currentRun.compiles(hostOwner)) {
- // hostOwner was separately compiled -- the static field symbol needs to be recreated in hostClass
- import Flags._
- debuglog("recreating sym.accessed.name: " + sym.accessed.name)
- val objectfield = hostOwner.info.findMember(sym.accessed.name, NoFlags, NoFlags, false)
- val staticfield = hostClass.newVariable(newTermName(sym.accessed.name.toString), tree.pos, STATIC | SYNTHETIC | FINAL) setInfo objectfield.tpe
- staticfield.addAnnotation(definitions.StaticClass)
- hostClass.info.decls enter staticfield
- staticfield
- } else NoSymbol
- }
-
- if (sym.isGetter) {
- ctx.bb.emit(LOAD_FIELD(staticfield, true) setHostClass hostClass, tree.pos)
- ctx
- } else if (sym.isSetter) {
- val ctx1 = genLoadArguments(args, sym.info.paramTypes, ctx)
- ctx1.bb.emit(STORE_FIELD(staticfield, true), tree.pos)
- ctx1.bb.emit(CONSTANT(Constant(false)), tree.pos)
- ctx1
- } else {
- assert(false, "supposedly unreachable")
- ctx
- }
- }
- genLoadApply5
-
case app @ Apply(fun, args) =>
def genLoadApply6 = {
val sym = fun.symbol
@@ -1728,12 +1669,8 @@ abstract class GenICode extends SubComponent {
* backend emits them as static).
* No code is needed for this module symbol.
*/
- for (
- f <- cls.info.decls;
- if !f.isMethod && f.isTerm && !f.isModule && !(f.owner.isModuleClass && f.hasStaticAnnotation)
- ) {
+ for (f <- cls.info.decls ; if !f.isMethod && f.isTerm && !f.isModule)
ctx.clazz addField new IField(f)
- }
}
/**
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
index 99d019ecd2..7d93f41540 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
@@ -1190,9 +1190,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
log(s"No forwarder for non-public member $m")
else {
log("Adding static forwarder for '%s' from %s to '%s'".format(m, jclassName, moduleClass))
- if (m.isAccessor && m.accessed.hasStaticAnnotation) {
- log("@static: accessor " + m + ", accessed: " + m.accessed)
- } else addForwarder(isRemoteClass, jclass, moduleClass, m)
+ addForwarder(isRemoteClass, jclass, moduleClass, m)
}
}
}
@@ -1697,7 +1695,6 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
jmethod = clinitMethod
jMethodName = CLASS_CONSTRUCTOR_NAME
jmethod.visitCode()
- computeLocalVarsIndex(m)
genCode(m, false, true)
jmethod.visitMaxs(0, 0) // just to follow protocol, dummy arguments
jmethod.visitEnd()
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
index 98e55bf435..660e88b173 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
@@ -1023,8 +1023,6 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
method = m
jmethod = clinitMethod
-
- computeLocalVarsIndex(m)
genCode(m)
case None =>
legacyStaticInitializer(cls, clinit)
@@ -1126,9 +1124,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
log("No forwarder for " + m + " due to conflict with " + linkedClass.info.member(m.name))
else {
log("Adding static forwarder for '%s' from %s to '%s'".format(m, className, moduleClass))
- if (m.isAccessor && m.accessed.hasStaticAnnotation) {
- log("@static: accessor " + m + ", accessed: " + m.accessed)
- } else addForwarder(jclass, moduleClass, m)
+ addForwarder(jclass, moduleClass, m)
}
}
}
diff --git a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala b/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala
index 8a39125014..9f3e7db7d0 100644
--- a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala
+++ b/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala
@@ -879,9 +879,9 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
}
/** */
- def makeAnnotation(annot: AnnotationInfo): Annotation = {
+ def makeAnnotation(annot: AnnotationInfo): scala.tools.nsc.doc.model.Annotation = {
val aSym = annot.symbol
- new EntityImpl(aSym, makeTemplate(aSym.owner)) with Annotation {
+ new EntityImpl(aSym, makeTemplate(aSym.owner)) with scala.tools.nsc.doc.model.Annotation {
lazy val annotationClass =
makeTemplate(annot.symbol)
val arguments = { // lazy
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerRequestsWorkingMode.scala b/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerRequestsWorkingMode.scala
index 18a8eb5fc3..b5ae5f2d75 100644
--- a/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerRequestsWorkingMode.scala
+++ b/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerRequestsWorkingMode.scala
@@ -36,7 +36,7 @@ trait PresentationCompilerRequestsWorkingMode extends TestResources {
/** Return all positions of the given str in the given source file. */
private def positionsOf(source: SourceFile, str: String): Seq[Position] = {
- val buf = new collection.mutable.ListBuffer[Position]
+ val buf = new scala.collection.mutable.ListBuffer[Position]
var pos = source.content.indexOfSlice(str)
while (pos >= 0) {
buf += source.position(pos - 1) // we need the position before the first character of this marker
@@ -44,7 +44,7 @@ trait PresentationCompilerRequestsWorkingMode extends TestResources {
}
buf.toList
}
-
+
private def withResponse[T](pos: Position, response: Response[T])(f: (Position, T) => Unit) {
/** Return the filename:line:col version of this position. */
def showPos(pos: Position): String =
@@ -59,4 +59,4 @@ trait PresentationCompilerRequestsWorkingMode extends TestResources {
println("ERROR: " + r)
}
}
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/tools/nsc/interpreter/package.scala b/src/compiler/scala/tools/nsc/interpreter/package.scala
index 89006be8e2..6a3a2a38ae 100644
--- a/src/compiler/scala/tools/nsc/interpreter/package.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/package.scala
@@ -35,7 +35,7 @@ package object interpreter extends ReplConfig with ReplStrings {
val IR = Results
- implicit def postfixOps = language.postfixOps // make all postfix ops in this package compile without warning
+ implicit def postfixOps = scala.language.postfixOps // make all postfix ops in this package compile without warning
private[interpreter] implicit def javaCharSeqCollectionToScala(xs: JCollection[_ <: CharSequence]): List[String] = {
import scala.collection.JavaConverters._
diff --git a/src/compiler/scala/tools/nsc/io/package.scala b/src/compiler/scala/tools/nsc/io/package.scala
index 6385706830..775ad6bde0 100644
--- a/src/compiler/scala/tools/nsc/io/package.scala
+++ b/src/compiler/scala/tools/nsc/io/package.scala
@@ -11,7 +11,7 @@ import java.util.jar.{ Attributes }
import scala.language.implicitConversions
package object io {
- implicit def postfixOps = language.postfixOps // make all postfix ops in this package compile without warning
+ implicit def postfixOps = scala.language.postfixOps // make all postfix ops in this package compile without warning
type JManifest = java.util.jar.Manifest
type JFile = java.io.File
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
index b30969d451..29b238c4cb 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
@@ -69,7 +69,11 @@ abstract class Pickler extends SubComponent {
}
if (!t.isDef && t.hasSymbol && t.symbol.isTermMacro) {
- unit.error(t.pos, "macro has not been expanded")
+ unit.error(t.pos, t.symbol.typeParams.length match {
+ case 0 => "macro has not been expanded"
+ case 1 => "type parameter not specified"
+ case _ => "type parameters not specified"
+ })
return
}
}
diff --git a/src/compiler/scala/tools/nsc/transform/CleanUp.scala b/src/compiler/scala/tools/nsc/transform/CleanUp.scala
index d69c8eb831..1f353bb31c 100644
--- a/src/compiler/scala/tools/nsc/transform/CleanUp.scala
+++ b/src/compiler/scala/tools/nsc/transform/CleanUp.scala
@@ -23,12 +23,9 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
new CleanUpTransformer(unit)
class CleanUpTransformer(unit: CompilationUnit) extends Transformer {
- private val newStaticMembers = mutable.Buffer.empty[Tree]
- private val newStaticInits = mutable.Buffer.empty[Tree]
- private val symbolsStoredAsStatic = mutable.Map.empty[String, Symbol]
- private val staticBodies = mutable.Map.empty[(Symbol, Symbol), Tree]
- private val syntheticClasses = mutable.Map.empty[Symbol, mutable.Set[Tree]] // package and trees
- private val classNames = mutable.Map.empty[Symbol, Set[Name]]
+ private val newStaticMembers = mutable.Buffer.empty[Tree]
+ private val newStaticInits = mutable.Buffer.empty[Tree]
+ private val symbolsStoredAsStatic = mutable.Map.empty[String, Symbol]
private def clearStatics() {
newStaticMembers.clear()
newStaticInits.clear()
@@ -48,9 +45,8 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
result
}
private def transformTemplate(tree: Tree) = {
- val t @ Template(parents, self, body) = tree
+ val Template(parents, self, body) = tree
clearStatics()
-
val newBody = transformTrees(body)
val templ = deriveTemplate(tree)(_ => transformTrees(newStaticMembers.toList) ::: newBody)
try addStaticInits(templ) // postprocess to include static ctors
@@ -562,80 +558,6 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
else tree
}
- case DefDef(mods, name, tps, vps, tp, rhs) if tree.symbol.hasStaticAnnotation =>
- reporter.error(tree.pos, "The @static annotation is not allowed on method definitions.")
- super.transform(tree)
-
- case ValDef(mods, name, tpt, rhs) if tree.symbol.hasStaticAnnotation =>
- def transformStaticValDef = {
- log("moving @static valdef field: " + name + ", in: " + tree.symbol.owner)
- val sym = tree.symbol
- val owner = sym.owner
-
- val staticBeforeLifting = atPhase(currentRun.erasurePhase) { owner.isStatic }
- val isPrivate = atPhase(currentRun.typerPhase) { sym.getter(owner).hasFlag(PRIVATE) }
- val isProtected = atPhase(currentRun.typerPhase) { sym.getter(owner).hasFlag(PROTECTED) }
- val isLazy = atPhase(currentRun.typerPhase) { sym.getter(owner).hasFlag(LAZY) }
- if (!owner.isModuleClass || !staticBeforeLifting) {
- if (!sym.isSynthetic) {
- reporter.error(tree.pos, "Only members of top-level objects and their nested objects can be annotated with @static.")
- tree.symbol.removeAnnotation(StaticClass)
- }
- super.transform(tree)
- } else if (isPrivate || isProtected) {
- reporter.error(tree.pos, "The @static annotation is only allowed on public members.")
- tree.symbol.removeAnnotation(StaticClass)
- super.transform(tree)
- } else if (isLazy) {
- reporter.error(tree.pos, "The @static annotation is not allowed on lazy members.")
- tree.symbol.removeAnnotation(StaticClass)
- super.transform(tree)
- } else if (owner.isModuleClass) {
- val linkedClass = owner.companionClass match {
- case NoSymbol =>
- // create the companion class if it does not exist
- val enclosing = owner.owner
- val compclass = enclosing.newClass(newTypeName(owner.name.toString))
- compclass setInfo ClassInfoType(List(ObjectClass.tpe), newScope, compclass)
- enclosing.info.decls enter compclass
-
- val compclstree = ClassDef(compclass, NoMods, ListOfNil, ListOfNil, List(), tree.pos)
-
- syntheticClasses.getOrElseUpdate(enclosing, mutable.Set()) += compclstree
-
- compclass
- case comp => comp
- }
-
- // create a static field in the companion class for this @static field
- val stfieldSym = linkedClass.newValue(newTermName(name), tree.pos, STATIC | SYNTHETIC | FINAL) setInfo sym.tpe
- if (sym.isMutable) stfieldSym.setFlag(MUTABLE)
- stfieldSym.addAnnotation(StaticClass)
-
- val names = classNames.getOrElseUpdate(linkedClass, linkedClass.info.decls.collect {
- case sym if sym.name.isTermName => sym.name
- } toSet)
- if (names(stfieldSym.name)) {
- reporter.error(
- tree.pos,
- "@static annotated field " + tree.symbol.name + " has the same name as a member of class " + linkedClass.name
- )
- } else {
- linkedClass.info.decls enter stfieldSym
-
- val initializerBody = rhs
-
- // static field was previously initialized in the companion object itself, like this:
- // staticBodies((linkedClass, stfieldSym)) = Select(This(owner), sym.getter(owner))
- // instead, we move the initializer to the static ctor of the companion class
- // we save the entire ValDef/DefDef to extract the rhs later
- staticBodies((linkedClass, stfieldSym)) = tree
- }
- }
- super.transform(tree)
- }
- transformStaticValDef
-
/* MSIL requires that the stack is empty at the end of a try-block.
* Hence, we here rewrite all try blocks with a result != {Unit, All} such that they
* store their result in a local variable. The catch blocks are adjusted as well.
@@ -750,11 +672,6 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
if (newStaticInits.isEmpty)
template
else {
- val ctorBody = newStaticInits.toList flatMap {
- case Block(stats, expr) => stats :+ expr
- case t => List(t)
- }
-
val newCtor = findStaticCtor(template) match {
// in case there already were static ctors - augment existing ones
// currently, however, static ctors aren't being generated anywhere else
@@ -763,15 +680,15 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
deriveDefDef(ctor) {
case block @ Block(stats, expr) =>
// need to add inits to existing block
- treeCopy.Block(block, ctorBody ::: stats, expr)
+ treeCopy.Block(block, newStaticInits.toList ::: stats, expr)
case term: TermTree =>
// need to create a new block with inits and the old term
- treeCopy.Block(term, ctorBody, term)
+ treeCopy.Block(term, newStaticInits.toList, term)
}
case _ =>
// create new static ctor
val staticCtorSym = currentClass.newStaticConstructor(template.pos)
- val rhs = Block(ctorBody, Literal(Constant(())))
+ val rhs = Block(newStaticInits.toList, Literal(Constant(())))
localTyper.typedPos(template.pos)(DefDef(staticCtorSym, rhs))
}
@@ -779,62 +696,6 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
}
}
- private def addStaticDeclarations(tree: Template, clazz: Symbol) {
- // add static field initializer statements for each static field in clazz
- if (!clazz.isModuleClass) for {
- staticSym <- clazz.info.decls
- if staticSym.hasStaticAnnotation
- } staticSym match {
- case stfieldSym if (stfieldSym.isValue && !stfieldSym.isMethod) || stfieldSym.isVariable =>
- log(stfieldSym + " is value: " + stfieldSym.isValue)
- val valdef = staticBodies((clazz, stfieldSym))
- val ValDef(_, _, _, rhs) = valdef
- val fixedrhs = rhs.changeOwner((valdef.symbol, clazz.info.decl(nme.CONSTRUCTOR)))
-
- val stfieldDef = localTyper.typedPos(tree.pos)(VAL(stfieldSym) === EmptyTree)
- val flattenedInit = fixedrhs match {
- case Block(stats, expr) => Block(stats, REF(stfieldSym) === expr)
- case rhs => REF(stfieldSym) === rhs
- }
- val stfieldInit = localTyper.typedPos(tree.pos)(flattenedInit)
-
- // add field definition to new defs
- newStaticMembers append stfieldDef
- newStaticInits append stfieldInit
- case _ => // ignore @static on other members
- }
- }
-
-
-
- override def transformStats(stats: List[Tree], exprOwner: Symbol): List[Tree] = {
- super.transformStats(stats, exprOwner) ++ {
- // flush pending synthetic classes created in this owner
- val synthclassdefs = syntheticClasses.get(exprOwner).toList.flatten
- syntheticClasses -= exprOwner
- synthclassdefs map {
- cdef => localTyper.typedPos(cdef.pos)(cdef)
- }
- } map {
- case clsdef @ ClassDef(mods, name, tparams, t @ Template(parent, self, body)) =>
- // process all classes in the package again to add static initializers
- clearStatics()
-
- addStaticDeclarations(t, clsdef.symbol)
-
- val templ = deriveTemplate(t)(_ => transformTrees(newStaticMembers.toList) ::: body)
- val ntempl =
- try addStaticInits(templ)
- finally clearStatics()
-
- val derived = deriveClassDef(clsdef)(_ => ntempl)
- classNames.remove(clsdef.symbol)
- derived
-
- case stat => stat
- }
- }
-
} // CleanUpTransformer
}
diff --git a/src/compiler/scala/tools/nsc/transform/Constructors.scala b/src/compiler/scala/tools/nsc/transform/Constructors.scala
index ff1225d291..1c03147c20 100644
--- a/src/compiler/scala/tools/nsc/transform/Constructors.scala
+++ b/src/compiler/scala/tools/nsc/transform/Constructors.scala
@@ -186,15 +186,12 @@ abstract class Constructors extends Transform with ast.TreeDSL {
// before the superclass constructor call, otherwise it goes after.
// Lazy vals don't get the assignment in the constructor.
if (!stat.symbol.tpe.isInstanceOf[ConstantType]) {
- if (stat.symbol.hasStaticAnnotation) {
- debuglog("@static annotated field initialization skipped.")
- defBuf += deriveValDef(stat)(tree => tree)
- } else if (rhs != EmptyTree && !stat.symbol.isLazy) {
+ if (rhs != EmptyTree && !stat.symbol.isLazy) {
val rhs1 = intoConstructor(stat.symbol, rhs);
(if (canBeMoved(stat)) constrPrefixBuf else constrStatBuf) += mkAssign(
stat.symbol, rhs1)
- defBuf += deriveValDef(stat)(_ => EmptyTree)
}
+ defBuf += deriveValDef(stat)(_ => EmptyTree)
}
case ClassDef(_, _, _, _) =>
// classes are treated recursively, and left in the template
diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala
index 2c989af003..71d684ccd7 100644
--- a/src/compiler/scala/tools/nsc/transform/Erasure.scala
+++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala
@@ -1095,7 +1095,7 @@ abstract class Erasure extends AddInterfaces
// println("inject derived: "+arg+" "+tpt.tpe)
val List(arg) = args
val attachment = new TypeRefAttachment(tree.tpe.asInstanceOf[TypeRef])
- InjectDerivedValue(arg) addAttachment attachment
+ InjectDerivedValue(arg) updateAttachment attachment
case _ =>
preEraseNormalApply(tree)
}
diff --git a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
index b6d54f114e..c41ff20229 100644
--- a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
+++ b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
@@ -154,7 +154,7 @@ abstract class LambdaLift extends InfoTransform {
private def markCalled(sym: Symbol, owner: Symbol) {
debuglog("mark called: " + sym + " of " + sym.owner + " is called by " + owner)
symSet(called, owner) addEntry sym
- if (sym.enclClass != owner.enclClass) calledFromInner addEntry sym
+ if (sym.enclClass != owner.enclClass) calledFromInner += sym
}
/** The traverse function */
diff --git a/src/compiler/scala/tools/nsc/transform/TailCalls.scala b/src/compiler/scala/tools/nsc/transform/TailCalls.scala
index b1b930ca2d..0ad6d6c677 100644
--- a/src/compiler/scala/tools/nsc/transform/TailCalls.scala
+++ b/src/compiler/scala/tools/nsc/transform/TailCalls.scala
@@ -90,7 +90,7 @@ abstract class TailCalls extends Transform {
private val defaultReason = "it contains a recursive call not in tail position"
/** Has the label been accessed? Then its symbol is in this set. */
- private val accessed = new collection.mutable.HashSet[Symbol]()
+ private val accessed = new scala.collection.mutable.HashSet[Symbol]()
// `accessed` was stored as boolean in the current context -- this is no longer tenable
// with jumps to labels in tailpositions now considered in tailposition,
// a downstream context may access the label, and the upstream one will be none the wiser
@@ -373,7 +373,7 @@ abstract class TailCalls extends Transform {
// the labels all look like: matchEnd(x) {x}
// then, in a forward jump `matchEnd(expr)`, `expr` is considered in tail position (and the matchEnd jump is replaced by the jump generated by expr)
class TailPosLabelsTraverser extends Traverser {
- val tailLabels = new collection.mutable.HashSet[Symbol]()
+ val tailLabels = new scala.collection.mutable.HashSet[Symbol]()
private var maybeTail: Boolean = true // since we start in the rhs of a DefDef
diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
index f9f61b10a4..6c3f436a0f 100644
--- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
@@ -106,7 +106,7 @@ trait ContextErrors {
else
s"$name extends Any, not AnyRef"
)
- if (isPrimitiveValueType(found)) "" else "\n" +
+ if (isPrimitiveValueType(found) || isTrivialTopType(tp)) "" else "\n" +
s"""|Note that $what.
|Such types can participate in value classes, but instances
|cannot appear in singleton types or in reference comparisons.""".stripMargin
@@ -512,9 +512,16 @@ trait ContextErrors {
def ApplyWithoutArgsError(tree: Tree, fun: Tree) =
NormalTypeError(tree, fun.tpe+" does not take parameters")
+ // Dynamic
def DynamicVarArgUnsupported(tree: Tree, name: String) =
issueNormalTypeError(tree, name+ " does not support passing a vararg parameter")
+ def DynamicRewriteError(tree: Tree, err: AbsTypeError) = {
+ issueTypeError(PosAndMsgTypeError(err.errPos, err.errMsg +
+ s"\nerror after rewriting to $tree\npossible cause: maybe a wrong Dynamic method signature?"))
+ setError(tree)
+ }
+
//checkClassType
def TypeNotAStablePrefixError(tpt: Tree, pre: Type) = {
issueNormalTypeError(tpt, "type "+pre+" is not a stable prefix")
@@ -714,7 +721,8 @@ trait ContextErrors {
Some(EOL + stackTraceString(realex))
}
} catch {
- // if the magic above goes boom, just fall back to uninformative, but better than nothing, getMessage
+ // the code above tries various tricks to detect the relevant portion of the stack trace
+ // if these tricks fail, just fall back to uninformative, but better than nothing, getMessage
case NonFatal(ex) =>
macroLogVerbose("got an exception when processing a macro generated exception\n" +
"offender = " + stackTraceString(realex) + "\n" +
diff --git a/src/compiler/scala/tools/nsc/typechecker/DestructureTypes.scala b/src/compiler/scala/tools/nsc/typechecker/DestructureTypes.scala
index 6a21639391..e8865964b0 100644
--- a/src/compiler/scala/tools/nsc/typechecker/DestructureTypes.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/DestructureTypes.scala
@@ -39,7 +39,7 @@ trait DestructureTypes {
private implicit def liftToTerm(name: String): TermName = newTermName(name)
- private val openSymbols = collection.mutable.Set[Symbol]()
+ private val openSymbols = scala.collection.mutable.Set[Symbol]()
private def nodeList[T](elems: List[T], mkNode: T => Node): Node =
if (elems.isEmpty) wrapEmpty else list(elems map mkNode)
diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
index e3f0756b6c..dd7f26861f 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
@@ -1334,7 +1334,7 @@ trait Implicits {
def wrapResult(tree: Tree): SearchResult =
if (tree == EmptyTree) SearchFailure else new SearchResult(tree, EmptyTreeTypeSubstituter)
- /** Materializes implicits of magic types (currently, manifests and tags).
+ /** Materializes implicits of predefined types (currently, manifests and tags).
* Will be replaced by implicit macros once we fix them.
*/
private def materializeImplicit(pt: Type): SearchResult =
diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
index 12766116d1..1d74501d86 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
@@ -525,8 +525,8 @@ trait Infer {
* and the code is not exactly readable.
*/
object AdjustedTypeArgs {
- val Result = collection.mutable.LinkedHashMap
- type Result = collection.mutable.LinkedHashMap[Symbol, Option[Type]]
+ val Result = scala.collection.mutable.LinkedHashMap
+ type Result = scala.collection.mutable.LinkedHashMap[Symbol, Option[Type]]
def unapply(m: Result): Some[(List[Symbol], List[Type])] = Some(toLists(
(m collect {case (p, Some(a)) => (p, a)}).unzip ))
diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala
index 7e9b288853..9adf86e44b 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala
@@ -49,9 +49,6 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
import MacrosStats._
def globalSettings = global.settings
- val globalMacroCache = collection.mutable.Map[Any, Any]()
- val perRunMacroCache = perRunCaches.newMap[Symbol, collection.mutable.Map[Any, Any]]
-
/** `MacroImplBinding` and its companion module are responsible for
* serialization/deserialization of macro def -> impl bindings.
*
@@ -339,7 +336,7 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
val tsym = getMember(MacroContextClass, if (isType) tpnme.WeakTypeTag else tpnme.Expr)
typeRef(singleType(NoPrefix, ctxParam), tsym, List(sigma(origTpe)))
}
- val paramCache = collection.mutable.Map[Symbol, Symbol]()
+ val paramCache = scala.collection.mutable.Map[Symbol, Symbol]()
def param(tree: Tree): Symbol =
paramCache.getOrElseUpdate(tree.symbol, {
val sym = tree.symbol
@@ -713,7 +710,7 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
if (isNullaryInvocation(expandee)) expectedTpe = expectedTpe.finalResultType
var typechecked = typecheck("macro def return type", expanded, expectedTpe)
typechecked = typecheck("expected type", typechecked, pt)
- typechecked addAttachment MacroExpansionAttachment(expandee)
+ typechecked updateAttachment MacroExpansionAttachment(expandee)
} finally {
popMacroContext()
}
@@ -762,7 +759,7 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
case (false, true) =>
macroLogLite("macro expansion is delayed: %s".format(expandee))
delayed += expandee -> undetparams
- expandee addAttachment MacroRuntimeAttachment(delayed = true, typerContext = typer.context, macroContext = Some(macroArgs(typer, expandee).c))
+ expandee updateAttachment MacroRuntimeAttachment(delayed = true, typerContext = typer.context, macroContext = Some(macroArgs(typer, expandee).c))
Delay(expandee)
case (false, false) =>
import typer.TyperErrorGen._
@@ -830,11 +827,11 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
* 2) undetparams (sym.isTypeParameter && !sym.isSkolem)
*/
var hasPendingMacroExpansions = false
- private val delayed = perRunCaches.newWeakMap[Tree, collection.mutable.Set[Int]]
+ private val delayed = perRunCaches.newWeakMap[Tree, scala.collection.mutable.Set[Int]]
private def isDelayed(expandee: Tree) = delayed contains expandee
private def calculateUndetparams(expandee: Tree): scala.collection.mutable.Set[Int] =
delayed.get(expandee).getOrElse {
- val calculated = collection.mutable.Set[Symbol]()
+ val calculated = scala.collection.mutable.Set[Symbol]()
expandee foreach (sub => {
def traverse(sym: Symbol) = if (sym != null && (undetparams contains sym.id)) calculated += sym
if (sub.symbol != null) traverse(sub.symbol)
diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
index 7f14216e76..df8eb9c6b9 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
@@ -637,7 +637,7 @@ trait Namers extends MethodSynthesis {
MaxParametersCaseClassError(tree)
val m = ensureCompanionObject(tree, caseModuleDef)
- m.moduleClass.addAttachment(new ClassForCaseCompanionAttachment(tree))
+ m.moduleClass.updateAttachment(new ClassForCaseCompanionAttachment(tree))
}
val hasDefault = impl.body exists {
case DefDef(_, nme.CONSTRUCTOR, _, vparamss, _, _) => mexists(vparamss)(_.mods.hasDefault)
@@ -645,7 +645,7 @@ trait Namers extends MethodSynthesis {
}
if (hasDefault) {
val m = ensureCompanionObject(tree)
- m.addAttachment(new ConstructorDefaultsAttachment(tree, null))
+ m.updateAttachment(new ConstructorDefaultsAttachment(tree, null))
}
val owner = tree.symbol.owner
if (settings.lint.value && owner.isPackageObjectClass && !mods.isImplicit) {
@@ -1172,7 +1172,7 @@ trait Namers extends MethodSynthesis {
// symbol will be re-entered in the scope but the default parameter will not.
val att = meth.attachments.get[DefaultsOfLocalMethodAttachment] match {
case Some(att) => att.defaultGetters += default
- case None => meth.addAttachment(new DefaultsOfLocalMethodAttachment(default))
+ case None => meth.updateAttachment(new DefaultsOfLocalMethodAttachment(default))
}
}
} else if (baseHasDefault) {
diff --git a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala
index 7a035bb33b..c15c4613e1 100644
--- a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala
@@ -1558,7 +1558,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
var currId = 0
}
case class Test(cond: Cond, treeMaker: TreeMaker) {
- // private val reusedBy = new collection.mutable.HashSet[Test]
+ // private val reusedBy = new scala.collection.mutable.HashSet[Test]
var reuses: Option[Test] = None
def registerReuseBy(later: Test): Unit = {
assert(later.reuses.isEmpty, later.reuses)
@@ -1587,7 +1587,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
case class OrCond(a: Cond, b: Cond) extends Cond {override def toString = "("+a+") \\/ ("+ b +")"}
object EqualityCond {
- private val uniques = new collection.mutable.HashMap[(Tree, Tree), EqualityCond]
+ private val uniques = new scala.collection.mutable.HashMap[(Tree, Tree), EqualityCond]
def apply(testedPath: Tree, rhs: Tree): EqualityCond = uniques getOrElseUpdate((testedPath, rhs), new EqualityCond(testedPath, rhs))
def unapply(c: EqualityCond) = Some(c.testedPath, c.rhs)
}
@@ -1596,7 +1596,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
}
object NonNullCond {
- private val uniques = new collection.mutable.HashMap[Tree, NonNullCond]
+ private val uniques = new scala.collection.mutable.HashMap[Tree, NonNullCond]
def apply(testedPath: Tree): NonNullCond = uniques getOrElseUpdate(testedPath, new NonNullCond(testedPath))
def unapply(c: NonNullCond) = Some(c.testedPath)
}
@@ -1605,7 +1605,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
}
object TypeCond {
- private val uniques = new collection.mutable.HashMap[(Tree, Type), TypeCond]
+ private val uniques = new scala.collection.mutable.HashMap[(Tree, Type), TypeCond]
def apply(testedPath: Tree, pt: Type): TypeCond = uniques getOrElseUpdate((testedPath, pt), new TypeCond(testedPath, pt))
def unapply(c: TypeCond) = Some(c.testedPath, c.pt)
}
@@ -1654,8 +1654,8 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// returns (tree, tests), where `tree` will be used to refer to `root` in `tests`
class TreeMakersToConds(val root: Symbol) {
// a variable in this set should never be replaced by a tree that "does not consist of a selection on a variable in this set" (intuitively)
- private val pointsToBound = collection.mutable.HashSet(root)
- private val trees = collection.mutable.HashSet.empty[Tree]
+ private val pointsToBound = scala.collection.mutable.HashSet(root)
+ private val trees = scala.collection.mutable.HashSet.empty[Tree]
// the substitution that renames variables to variables in pointsToBound
private var normalize: Substitution = EmptySubstitution
@@ -1956,7 +1956,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
def removeVarEq(props: List[Prop], modelNull: Boolean = false): (Prop, List[Prop]) = {
val start = if (Statistics.canEnable) Statistics.startTimer(patmatAnaVarEq) else null
- val vars = new collection.mutable.HashSet[Var]
+ val vars = new scala.collection.mutable.HashSet[Var]
object gatherEqualities extends PropTraverser {
override def apply(p: Prop) = p match {
@@ -2261,7 +2261,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
def nextId = {_nextId += 1; _nextId}
def resetUniques() = {_nextId = 0; uniques.clear()}
- private val uniques = new collection.mutable.HashMap[Tree, Var]
+ private val uniques = new scala.collection.mutable.HashMap[Tree, Var]
def apply(x: Tree): Var = uniques getOrElseUpdate(x, new Var(x, x.tpe))
}
class Var(val path: Tree, staticTp: Type) extends AbsVar {
@@ -2273,7 +2273,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
@inline private[this] def observed = {} //canModify = Some(Thread.currentThread.getStackTrace)
// don't access until all potential equalities have been registered using registerEquality
- private[this] val symForEqualsTo = new collection.mutable.HashMap[Const, Sym]
+ private[this] val symForEqualsTo = new scala.collection.mutable.HashMap[Const, Sym]
// when looking at the domain, we only care about types we can check at run time
val staticTpCheckable: Type = checkableType(staticTp)
@@ -2386,7 +2386,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
but we can safely pretend types are mutually exclusive as long as there are no counter-examples in the match we're analyzing}
*/
- val excludedPair = new collection.mutable.HashSet[ExcludedPair]
+ val excludedPair = new scala.collection.mutable.HashSet[ExcludedPair]
case class ExcludedPair(a: Const, b: Const) {
override def equals(o: Any) = o match {
@@ -2440,7 +2440,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
private var _nextValueId = 0
def nextValueId = {_nextValueId += 1; _nextValueId}
- private val uniques = new collection.mutable.HashMap[Type, Const]
+ private val uniques = new scala.collection.mutable.HashMap[Type, Const]
private[SymbolicMatchAnalysis] def unique(tp: Type, mkFresh: => Const): Const =
uniques.get(tp).getOrElse(
uniques.find {case (oldTp, oldC) => oldTp =:= tp} match {
@@ -2454,7 +2454,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
fresh
})
- private val trees = collection.mutable.HashSet.empty[Tree]
+ private val trees = scala.collection.mutable.HashSet.empty[Tree]
// hashconsing trees (modulo value-equality)
private[SymbolicMatchAnalysis] def uniqueTpForTree(t: Tree): Type =
@@ -2915,7 +2915,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
case _ => varAssignment.find{case (v, a) => chop(v.path) == path}.map(_._1)
}
- private val uniques = new collection.mutable.HashMap[Var, VariableAssignment]
+ private val uniques = new scala.collection.mutable.HashMap[Var, VariableAssignment]
private def unique(variable: Var): VariableAssignment =
uniques.getOrElseUpdate(variable, {
val (eqTo, neqTo) = varAssignment.getOrElse(variable, (Nil, Nil)) // TODO
@@ -3034,8 +3034,8 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
val testss = approximateMatchConservative(prevBinder, cases)
// interpret:
- val dependencies = new collection.mutable.LinkedHashMap[Test, Set[Cond]]
- val tested = new collection.mutable.HashSet[Cond]
+ val dependencies = new scala.collection.mutable.LinkedHashMap[Test, Set[Cond]]
+ val tested = new scala.collection.mutable.HashSet[Cond]
def storeDependencies(test: Test) = {
val cond = test.cond
@@ -3083,7 +3083,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// then, collapse these contiguous sequences of reusing tests
// store the result of the final test and the intermediate results in hoisted mutable variables (TODO: optimize: don't store intermediate results that aren't used)
// replace each reference to a variable originally bound by a collapsed test by a reference to the hoisted variable
- val reused = new collection.mutable.HashMap[TreeMaker, ReusedCondTreeMaker]
+ val reused = new scala.collection.mutable.HashMap[TreeMaker, ReusedCondTreeMaker]
var okToCall = false
val reusedOrOrig = (tm: TreeMaker) => {assert(okToCall); reused.getOrElse(tm, tm)}
@@ -3317,7 +3317,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// requires cases.exists(isGuardedCase) (otherwise the rewrite is pointless)
var remainingCases = cases
- val collapsed = collection.mutable.ListBuffer.empty[CaseDef]
+ val collapsed = scala.collection.mutable.ListBuffer.empty[CaseDef]
// when some of collapsed cases (except for the default case itself) did not include an un-guarded case
// we'll need to emit a labeldef for the default case
diff --git a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
index 0d12cad401..2306575d74 100644
--- a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
@@ -70,7 +70,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
storeAccessorDefinition(clazz, DefDef(acc, EmptyTree))
acc
}
-
+
atPos(sel.pos)(Select(gen.mkAttributedThis(clazz), superAcc) setType sel.tpe)
}
@@ -287,16 +287,19 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
// FIXME - this should be unified with needsProtectedAccessor, but some
// subtlety which presently eludes me is foiling my attempts.
val shouldEnsureAccessor = (
- currentClass.isTrait
+ currentClass.isTrait
&& sym.isProtected
&& sym.enclClass != currentClass
&& !sym.owner.isTrait
&& (sym.owner.enclosingPackageClass != currentClass.enclosingPackageClass)
- && (qual.symbol.info.member(sym.name) ne NoSymbol))
+ && (qual.symbol.info.member(sym.name) ne NoSymbol)
+ && !needsProtectedAccessor(sym, tree.pos)
+ )
if (shouldEnsureAccessor) {
log("Ensuring accessor for call to protected " + sym.fullLocationString + " from " + currentClass)
ensureAccessor(sel)
- } else
+ }
+ else
mayNeedProtectedAccessor(sel, EmptyTree.asList, false)
}
@@ -525,7 +528,14 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
)
true
}
- isCandidate && !host.isPackageClass && !isSelfType
+ def isJavaProtected = host.isTrait && sym.isJavaDefined && {
+ restrictionError(pos, unit,
+ s"""|$clazz accesses protected $sym inside a concrete trait method.
+ |Add an accessor in a class extending ${sym.enclClass} as a workaround.""".stripMargin
+ )
+ true
+ }
+ isCandidate && !host.isPackageClass && !isSelfType && !isJavaProtected
}
/** Return the innermost enclosing class C of referencingClass for which either
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index a58d73bd69..7b5a5144b0 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -1107,10 +1107,12 @@ trait Typers extends Modes with Adaptations with Tags {
case _ =>
def applyPossible = {
def applyMeth = member(adaptToName(tree, nme.apply), nme.apply)
- if ((mode & TAPPmode) != 0)
- tree.tpe.typeParams.isEmpty && applyMeth.filter(!_.tpe.typeParams.isEmpty) != NoSymbol
- else
- applyMeth.filter(_.tpe.paramSectionCount > 0) != NoSymbol
+ dyna.acceptsApplyDynamic(tree.tpe) || (
+ if ((mode & TAPPmode) != 0)
+ tree.tpe.typeParams.isEmpty && applyMeth.filter(!_.tpe.typeParams.isEmpty) != NoSymbol
+ else
+ applyMeth.filter(_.tpe.paramSectionCount > 0) != NoSymbol
+ )
}
if (tree.isType)
adaptType()
@@ -1981,18 +1983,21 @@ trait Typers extends Modes with Adaptations with Tags {
case PolyType(_, restpe) => restpe
case _ => NoType
}
-
+ def failStruct(what: String) =
+ fail(s"Parameter type in structural refinement may not refer to $what")
for (paramType <- tp.paramTypes) {
val sym = paramType.typeSymbol
if (sym.isAbstractType) {
if (!sym.hasTransOwner(meth.owner))
- fail("Parameter type in structural refinement may not refer to an abstract type defined outside that refinement")
+ failStruct("an abstract type defined outside that refinement")
else if (!sym.hasTransOwner(meth))
- fail("Parameter type in structural refinement may not refer to a type member of that refinement")
+ failStruct("a type member of that refinement")
}
+ if (sym.isDerivedValueClass)
+ failStruct("a user-defined value class")
if (paramType.isInstanceOf[ThisType] && sym == meth.owner)
- fail("Parameter type in structural refinement may not refer to the type of that refinement (self type)")
+ failStruct("the type of that refinement (self type)")
}
}
def typedUseCase(useCase: UseCase) {
@@ -2479,7 +2484,7 @@ trait Typers extends Modes with Adaptations with Tags {
match_ setType B1.tpe
// the default uses applyOrElse's first parameter since the scrut's type has been widened
- val body = methodBodyTyper.virtualizedMatch(match_ addAttachment DefaultOverrideMatchAttachment(REF(default) APPLY (REF(x))), mode, B1.tpe)
+ val body = methodBodyTyper.virtualizedMatch(match_ updateAttachment DefaultOverrideMatchAttachment(REF(default) APPLY (REF(x))), mode, B1.tpe)
DefDef(methodSym, body)
}
@@ -2497,7 +2502,7 @@ trait Typers extends Modes with Adaptations with Tags {
methodSym setInfoAndEnter MethodType(paramSyms, BooleanClass.tpe)
val match_ = methodBodyTyper.typedMatch(gen.mkUnchecked(selector), casesTrue, mode, BooleanClass.tpe)
- val body = methodBodyTyper.virtualizedMatch(match_ addAttachment DefaultOverrideMatchAttachment(FALSE_typed), mode, BooleanClass.tpe)
+ val body = methodBodyTyper.virtualizedMatch(match_ updateAttachment DefaultOverrideMatchAttachment(FALSE_typed), mode, BooleanClass.tpe)
DefDef(methodSym, body)
}
@@ -2619,7 +2624,7 @@ trait Typers extends Modes with Adaptations with Tags {
// todo. investigate whether something can be done about this
val att = templ.attachments.get[CompoundTypeTreeOriginalAttachment].getOrElse(CompoundTypeTreeOriginalAttachment(Nil, Nil))
templ.removeAttachment[CompoundTypeTreeOriginalAttachment]
- templ addAttachment att.copy(stats = stats1)
+ templ updateAttachment att.copy(stats = stats1)
for (stat <- stats1 if stat.isDef) {
val member = stat.symbol
if (!(context.owner.ancestors forall
@@ -3409,7 +3414,7 @@ trait Typers extends Modes with Adaptations with Tags {
else argss.head
val annScope = annType.decls
.filter(sym => sym.isMethod && !sym.isConstructor && sym.isJavaDefined)
- val names = new collection.mutable.HashSet[Symbol]
+ val names = new scala.collection.mutable.HashSet[Symbol]
names ++= (if (isJava) annScope.iterator
else typedFun.tpe.params.iterator)
val nvPairs = args map {
@@ -3623,8 +3628,8 @@ trait Typers extends Modes with Adaptations with Tags {
while (o != owner && o != NoSymbol && !o.hasPackageFlag) o = o.owner
o == owner && !isVisibleParameter(sym)
}
- var localSyms = collection.immutable.Set[Symbol]()
- var boundSyms = collection.immutable.Set[Symbol]()
+ var localSyms = scala.collection.immutable.Set[Symbol]()
+ var boundSyms = scala.collection.immutable.Set[Symbol]()
def isLocal(sym: Symbol): Boolean =
if (sym == NoSymbol || sym.isRefinementClass || sym.isLocalDummy) false
else if (owner == NoSymbol) tree exists (defines(_, sym))
@@ -3797,7 +3802,8 @@ trait Typers extends Modes with Adaptations with Tags {
case AssignOrNamedArg(Ident(name), rhs) => gen.mkTuple(List(CODE.LIT(name.toString), rhs))
case _ => gen.mkTuple(List(CODE.LIT(""), arg))
}
- typed(treeCopy.Apply(orig, fun, args map argToBinding), mode, pt)
+ val t = treeCopy.Apply(orig, fun, args map argToBinding)
+ wrapErrors(t, _.typed(t, mode, pt))
}
/** Translate selection that does not typecheck according to the normal rules into a selectDynamic/applyDynamic.
@@ -3838,11 +3844,15 @@ trait Typers extends Modes with Adaptations with Tags {
}
@inline def hasNamedArg(as: List[Tree]) = as.collectFirst{case AssignOrNamedArg(lhs, rhs) =>}.nonEmpty
+ def desugaredApply = tree match {
+ case Select(`qual`, nme.apply) => true
+ case _ => false
+ }
// note: context.tree includes at most one Apply node
// thus, we can't use it to detect we're going to receive named args in expressions such as:
// qual.sel(a)(a2, arg2 = "a2")
val oper = outer match {
- case Apply(`tree`, as) =>
+ case Apply(q, as) if q == tree || desugaredApply =>
val oper =
if (hasNamedArg(as)) nme.applyDynamicNamed
else nme.applyDynamic
@@ -3861,6 +3871,13 @@ trait Typers extends Modes with Adaptations with Tags {
atPos(qual.pos)(Apply(tappSel, List(Literal(Constant(name.decode)))))
}
}
+
+ def wrapErrors(tree: Tree, typeTree: Typer => Tree): Tree = {
+ silent(typeTree) match {
+ case SilentResultValue(r) => r
+ case SilentTypeError(err) => DynamicRewriteError(tree, err)
+ }
+ }
}
@inline final def deindentTyping() = context.typingIndentLevel -= 2
@@ -4054,7 +4071,8 @@ trait Typers extends Modes with Adaptations with Tags {
}
else if(dyna.isDynamicallyUpdatable(lhs1)) {
val rhs1 = typed(rhs, EXPRmode | BYVALmode, WildcardType)
- typed1(Apply(lhs1, List(rhs1)), mode, pt)
+ val t = Apply(lhs1, List(rhs1))
+ dyna.wrapErrors(t, _.typed1(t, mode, pt))
}
else fail()
}
@@ -4540,7 +4558,9 @@ trait Typers extends Modes with Adaptations with Tags {
t
}
def typedSelectInternal(tree: Tree, qual: Tree, name: Name): Tree = {
- def asDynamicCall = dyna.mkInvoke(context.tree, tree, qual, name) map (typed1(_, mode, pt))
+ def asDynamicCall = dyna.mkInvoke(context.tree, tree, qual, name) map { t =>
+ dyna.wrapErrors(t, (_.typed1(t, mode, pt)))
+ }
val sym = tree.symbol orElse member(qual, name) orElse {
// symbol not found? --> try to convert implicitly to a type that does have the required
@@ -4961,7 +4981,7 @@ trait Typers extends Modes with Adaptations with Tags {
//Console.println("Owner: " + context.enclClass.owner + " " + context.enclClass.owner.id)
val self = refinedType(parents1 map (_.tpe), context.enclClass.owner, decls, templ.pos)
newTyper(context.make(templ, self.typeSymbol, decls)).typedRefinement(templ)
- templ addAttachment CompoundTypeTreeOriginalAttachment(parents1, Nil) // stats are set elsewhere
+ templ updateAttachment CompoundTypeTreeOriginalAttachment(parents1, Nil) // stats are set elsewhere
tree setType self
}
}
diff --git a/src/compiler/scala/tools/nsc/util/package.scala b/src/compiler/scala/tools/nsc/util/package.scala
index 876fb18578..780e3eab88 100644
--- a/src/compiler/scala/tools/nsc/util/package.scala
+++ b/src/compiler/scala/tools/nsc/util/package.scala
@@ -9,7 +9,7 @@ import java.io.{ OutputStream, PrintStream, ByteArrayOutputStream, PrintWriter,
package object util {
- implicit def postfixOps = language.postfixOps // make all postfix ops in this package compile without warning
+ implicit def postfixOps = scala.language.postfixOps // make all postfix ops in this package compile without warning
// forwarder for old code that builds against 2.9 and 2.10
val Chars = scala.reflect.internal.Chars
diff --git a/src/compiler/scala/tools/reflect/FrontEnds.scala b/src/compiler/scala/tools/reflect/FrontEnds.scala
index d8f07fb2e5..d0c3c1c774 100644
--- a/src/compiler/scala/tools/reflect/FrontEnds.scala
+++ b/src/compiler/scala/tools/reflect/FrontEnds.scala
@@ -36,6 +36,16 @@ trait FrontEnds extends scala.reflect.api.FrontEnds {
def displayPrompt(): Unit =
frontEnd.interactive()
+
+ override def flush(): Unit = {
+ super.flush()
+ frontEnd.flush()
+ }
+
+ override def reset(): Unit = {
+ super.reset()
+ frontEnd.reset()
+ }
}
def wrapFrontEnd(frontEnd: FrontEnd): Reporter = new FrontEndToReporterProxy(frontEnd) {
diff --git a/src/compiler/scala/tools/reflect/ToolBox.scala b/src/compiler/scala/tools/reflect/ToolBox.scala
index 85e2b6543f..9e7d230a6a 100644
--- a/src/compiler/scala/tools/reflect/ToolBox.scala
+++ b/src/compiler/scala/tools/reflect/ToolBox.scala
@@ -80,18 +80,23 @@ trait ToolBox[U <: Universe] {
def resetLocalAttrs(tree: u.Tree): u.Tree
/** .. */
- def parseExpr(code: String): u.Tree
+ def parse(code: String): u.Tree
- /** Compiles and runs a tree using this ToolBox.
+ /** Compiles a tree using this ToolBox.
*
* If the tree has unresolved type variables (represented as instances of `FreeTypeSymbol` symbols),
* then they all have to be resolved first using `Tree.substituteTypes`, or an error occurs.
*
* This spawns the compiler at the Namer phase, and pipelines the tree through that compiler.
- * Currently `runExpr` does not accept trees that already typechecked, because typechecking isn't idempotent.
+ * Currently `compile` does not accept trees that already typechecked, because typechecking isn't idempotent.
* For more info, take a look at https://issues.scala-lang.org/browse/SI-5464.
*/
- def runExpr(tree: u.Tree): Any
+ def compile(tree: u.Tree): () => Any
+
+ /** Compiles and runs a tree using this ToolBox.
+ * Is equivalent to `compile(tree)()`.
+ */
+ def eval(tree: u.Tree): Any
}
/** Represents an error during toolboxing
diff --git a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
index b658491294..091224c88a 100644
--- a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
+++ b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
@@ -47,7 +47,7 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
newTermName("__wrapper$" + wrapCount + "$" + java.util.UUID.randomUUID.toString.replace("-", ""))
}
- def verifyExpr(expr: Tree): Unit = {
+ def verify(expr: Tree): Unit = {
// Previously toolboxes used to typecheck their inputs before compiling.
// Actually, the initial demo by Martin first typechecked the reified tree,
// then ran it, which typechecked it again, and only then launched the
@@ -70,9 +70,9 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
}
}
- def extractFreeTerms(expr0: Tree, wrapFreeTermRefs: Boolean): (Tree, collection.mutable.LinkedHashMap[FreeTermSymbol, TermName]) = {
+ def extractFreeTerms(expr0: Tree, wrapFreeTermRefs: Boolean): (Tree, scala.collection.mutable.LinkedHashMap[FreeTermSymbol, TermName]) = {
val freeTerms = expr0.freeTerms
- val freeTermNames = collection.mutable.LinkedHashMap[FreeTermSymbol, TermName]()
+ val freeTermNames = scala.collection.mutable.LinkedHashMap[FreeTermSymbol, TermName]()
freeTerms foreach (ft => {
var name = ft.name.toString
val namesakes = freeTerms takeWhile (_ != ft) filter (ft2 => ft != ft2 && ft.name == ft2.name)
@@ -97,7 +97,7 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
}
def transformDuringTyper(expr0: Tree, withImplicitViewsDisabled: Boolean, withMacrosDisabled: Boolean)(transform: (analyzer.Typer, Tree) => Tree): Tree = {
- verifyExpr(expr0)
+ verify(expr0)
// need to wrap the expr, because otherwise you won't be able to typecheck macros against something that contains free vars
var (expr, freeTerms) = extractFreeTerms(expr0, wrapFreeTermRefs = false)
@@ -140,7 +140,7 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
unwrapped
}
- def typeCheckExpr(expr: Tree, pt: Type, silent: Boolean, withImplicitViewsDisabled: Boolean, withMacrosDisabled: Boolean): Tree =
+ def typeCheck(expr: Tree, pt: Type, silent: Boolean, withImplicitViewsDisabled: Boolean, withMacrosDisabled: Boolean): Tree =
transformDuringTyper(expr, withImplicitViewsDisabled = withImplicitViewsDisabled, withMacrosDisabled = withMacrosDisabled)(
(currentTyper, expr) => {
trace("typing (implicit views = %s, macros = %s): ".format(!withImplicitViewsDisabled, !withMacrosDisabled))(showAttributed(expr, true, true, settings.Yshowsymkinds.value))
@@ -170,10 +170,12 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
}
})
- def compileExpr(expr: Tree): (Object, java.lang.reflect.Method) = {
- verifyExpr(expr)
+ def compile(expr: Tree): () => Any = {
+ val freeTerms = expr.freeTerms // need to calculate them here, because later on they will be erased
+ val thunks = freeTerms map (fte => () => fte.value) // need to be lazy in order not to distort evaluation order
+ verify(expr)
- def wrapExpr(expr0: Tree): Tree = {
+ def wrap(expr0: Tree): ModuleDef = {
val (expr, freeTerms) = extractFreeTerms(expr0, wrapFreeTermRefs = true)
val (obj, mclazz) = rootMirror.EmptyPackageClass.newModuleAndClassSymbol(
@@ -211,11 +213,11 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
var cleanedUp = resetLocalAttrs(moduledef)
trace("cleaned up: ")(showAttributed(cleanedUp, true, true, settings.Yshowsymkinds.value))
- cleanedUp
+ cleanedUp.asInstanceOf[ModuleDef]
}
- val mdef = wrapExpr(expr)
- val pdef = PackageDef(Ident(nme.EMPTY_PACKAGE_NAME), List(mdef))
+ val mdef = wrap(expr)
+ val pdef = PackageDef(Ident(mdef.name), List(mdef))
val unit = new CompilationUnit(NoSourceFile)
unit.body = pdef
@@ -231,12 +233,6 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
val jmeth = jclazz.getDeclaredMethods.find(_.getName == wrapperMethodName).get
val jfield = jclazz.getDeclaredFields.find(_.getName == NameTransformer.MODULE_INSTANCE_NAME).get
val singleton = jfield.get(null)
- (singleton, jmeth)
- }
-
- def runExpr(expr: Tree): Any = {
- val freeTerms = expr.freeTerms // need to calculate them here, because later on they will be erased
- val thunks = freeTerms map (fte => () => fte.value) // need to be lazy in order not to distort evaluation order
// @odersky writes: Not sure we will be able to drop this. I forgot the reason why we dereference () functions,
// but there must have been one. So I propose to leave old version in comments to be resurrected if the problem resurfaces.
@@ -250,13 +246,14 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
// val applyMeth = result.getClass.getMethod("apply")
// applyMeth.invoke(result)
// }
- val (singleton, jmeth) = compileExpr(expr)
- val result = jmeth.invoke(singleton, thunks map (_.asInstanceOf[AnyRef]): _*)
- if (jmeth.getReturnType == java.lang.Void.TYPE) ()
- else result
+ () => {
+ val result = jmeth.invoke(singleton, thunks map (_.asInstanceOf[AnyRef]): _*)
+ if (jmeth.getReturnType == java.lang.Void.TYPE) ()
+ else result
+ }
}
- def parseExpr(code: String): Tree = {
+ def parse(code: String): Tree = {
val run = new Run
reporter.reset()
val wrappedCode = "object wrapper {" + EOL + code + EOL + "}"
@@ -336,7 +333,7 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
var cexpectedType: compiler.Type = importer.importType(expectedType)
if (compiler.settings.verbose.value) println("typing "+ctree+", expectedType = "+expectedType)
- val ttree: compiler.Tree = compiler.typeCheckExpr(ctree, cexpectedType, silent = silent, withImplicitViewsDisabled = withImplicitViewsDisabled, withMacrosDisabled = withMacrosDisabled)
+ val ttree: compiler.Tree = compiler.typeCheck(ctree, cexpectedType, silent = silent, withImplicitViewsDisabled = withImplicitViewsDisabled, withMacrosDisabled = withMacrosDisabled)
val uttree = exporter.importTree(ttree)
uttree
}
@@ -379,20 +376,22 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
def showAttributed(tree: u.Tree, printTypes: Boolean = true, printIds: Boolean = true, printKinds: Boolean = false): String =
compiler.showAttributed(importer.importTree(tree), printTypes, printIds, printKinds)
- def parseExpr(code: String): u.Tree = {
+ def parse(code: String): u.Tree = {
if (compiler.settings.verbose.value) println("parsing "+code)
- val ctree: compiler.Tree = compiler.parseExpr(code)
+ val ctree: compiler.Tree = compiler.parse(code)
val utree = exporter.importTree(ctree)
utree
}
- def runExpr(tree: u.Tree): Any = {
+ def compile(tree: u.Tree): () => Any = {
if (compiler.settings.verbose.value) println("importing "+tree)
var ctree: compiler.Tree = importer.importTree(tree)
- if (compiler.settings.verbose.value) println("running "+ctree)
- compiler.runExpr(ctree)
+ if (compiler.settings.verbose.value) println("compiling "+ctree)
+ compiler.compile(ctree)
}
+
+ def eval(tree: u.Tree): Any = compile(tree)()
}
}
diff --git a/src/compiler/scala/tools/reflect/package.scala b/src/compiler/scala/tools/reflect/package.scala
index d5569e448d..901071d91a 100644
--- a/src/compiler/scala/tools/reflect/package.scala
+++ b/src/compiler/scala/tools/reflect/package.scala
@@ -27,7 +27,7 @@ package reflect {
def eval: T = {
val factory = new ToolBoxFactory[JavaUniverse](expr.mirror.universe) { val mirror = expr.mirror.asInstanceOf[this.u.Mirror] }
val toolBox = factory.mkToolBox()
- toolBox.runExpr(expr.tree.asInstanceOf[toolBox.u.Tree]).asInstanceOf[T]
+ toolBox.eval(expr.tree.asInstanceOf[toolBox.u.Tree]).asInstanceOf[T]
}
}
}
diff --git a/src/library/scala/App.scala b/src/library/scala/App.scala
index 85d2f9075e..a1e5e74e2f 100644
--- a/src/library/scala/App.scala
+++ b/src/library/scala/App.scala
@@ -22,6 +22,16 @@ import scala.collection.mutable.ListBuffer
*
* `args` returns the current command line arguments as an array.
*
+ * ==Caveats==
+ *
+ * '''''It should be noted that this trait is implemented using the [[DelayedInit]]
+ * functionality, which means that fields of the object will not have been initialized
+ * before the main method has been executed.'''''
+ *
+ * It should also be noted that the `main` method will not normally need to be overridden:
+ * the purpose is to turn the whole class body into the “main method”. You should only
+ * chose to override it if you know what you are doing.
+ *
* @author Martin Odersky
* @version 2.1, 15/02/2011
*/
diff --git a/src/library/scala/Array.scala b/src/library/scala/Array.scala
index c61a255e3b..0b8550be37 100644
--- a/src/library/scala/Array.scala
+++ b/src/library/scala/Array.scala
@@ -48,6 +48,16 @@ class FallbackArrayBuilding {
* @version 1.0
*/
object Array extends FallbackArrayBuilding {
+ val emptyBooleanArray = new Array[Boolean](0)
+ val emptyByteArray = new Array[Byte](0)
+ val emptyCharArray = new Array[Char](0)
+ val emptyDoubleArray = new Array[Double](0)
+ val emptyFloatArray = new Array[Float](0)
+ val emptyIntArray = new Array[Int](0)
+ val emptyLongArray = new Array[Long](0)
+ val emptyShortArray = new Array[Short](0)
+ val emptyObjectArray = new Array[Object](0)
+
implicit def canBuildFrom[T](implicit t: ClassTag[T]): CanBuildFrom[Array[_], T, Array[T]] =
new CanBuildFrom[Array[_], T, Array[T]] {
def apply(from: Array[_]) = ArrayBuilder.make[T]()(t)
diff --git a/src/library/scala/Product.scala b/src/library/scala/Product.scala
index 8c42c60d98..2c6838f6b3 100644
--- a/src/library/scala/Product.scala
+++ b/src/library/scala/Product.scala
@@ -35,7 +35,7 @@ trait Product extends Any with Equals {
/** An iterator over all the elements of this product.
* @return in the default implementation, an `Iterator[Any]`
*/
- def productIterator: Iterator[Any] = new collection.AbstractIterator[Any] {
+ def productIterator: Iterator[Any] = new scala.collection.AbstractIterator[Any] {
private var c: Int = 0
private val cmax = productArity
def hasNext = c < cmax
diff --git a/src/library/scala/StringContext.scala b/src/library/scala/StringContext.scala
index 678ab30168..8b478f6845 100644
--- a/src/library/scala/StringContext.scala
+++ b/src/library/scala/StringContext.scala
@@ -96,7 +96,8 @@ case class StringContext(parts: String*) {
* string literally. This is achieved by replacing each such occurrence by the
* format specifier `%%`.
*/
- // The implementation is magically hardwired into `scala.tools.reflect.MacroImplementations.macro_StringInterpolation_f`
+ // The implementation is hardwired to `scala.tools.reflect.MacroImplementations.macro_StringInterpolation_f`
+ // Using the mechanism implemented in `scala.tools.reflect.FastTrack`
def f(args: Any*): String = ??? // macro
}
diff --git a/src/library/scala/annotation/static.scala b/src/library/scala/annotation/static.scala
deleted file mode 100644
index f2955c756c..0000000000
--- a/src/library/scala/annotation/static.scala
+++ /dev/null
@@ -1,20 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.annotation
-
-/**
- * An annotation that marks a member in the companion object as static
- * and ensures that the compiler generates static fields/methods for it.
- * This is important for Java interoperability and performance reasons.
- *
- * @since 2.10
- */
-final class static extends StaticAnnotation {
- // TODO document exact semantics above!
-}
diff --git a/src/library/scala/collection/GenMapLike.scala b/src/library/scala/collection/GenMapLike.scala
index b6c90d4d2a..3ea45e3810 100644
--- a/src/library/scala/collection/GenMapLike.scala
+++ b/src/library/scala/collection/GenMapLike.scala
@@ -31,7 +31,7 @@ trait GenMapLike[A, +B, +Repr] extends GenIterableLike[(A, B), Repr] with Equals
// This hash code must be symmetric in the contents but ought not
// collide trivially.
- override def hashCode() = util.hashing.MurmurHash3.mapHash(seq)
+ override def hashCode()= scala.util.hashing.MurmurHash3.mapHash(seq)
/** Returns the value associated with a key, or a default value if the key is not contained in the map.
* @param key the key.
diff --git a/src/library/scala/collection/GenSeqLike.scala b/src/library/scala/collection/GenSeqLike.scala
index a77cb05960..6380e9380a 100644
--- a/src/library/scala/collection/GenSeqLike.scala
+++ b/src/library/scala/collection/GenSeqLike.scala
@@ -116,7 +116,7 @@ trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equal
*
* @param elem the element value to search for.
* @tparam B the type of the element `elem`.
- * @return the index of the first element of this $coll that is equal (wrt `==`)
+ * @return the index of the first element of this $coll that is equal (as determined by `==`)
* to `elem`, or `-1`, if none exists.
*
* @usecase def indexOf(elem: A): Int
@@ -132,12 +132,12 @@ trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equal
* @param elem the element value to search for.
* @tparam B the type of the element `elem`.
* @param from the start index
- * @return the index `>= from` of the first element of this $coll that is equal (wrt `==`)
+ * @return the index `>= from` of the first element of this $coll that is equal (as determined by `==`)
* to `elem`, or `-1`, if none exists.
*
* @usecase def indexOf(elem: A, from: Int): Int
* @inheritdoc
- *
+ *
* $mayNotTerminateInf
*
*/
@@ -147,7 +147,7 @@ trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equal
*
* @param elem the element value to search for.
* @tparam B the type of the element `elem`.
- * @return the index of the last element of this $coll that is equal (wrt `==`)
+ * @return the index of the last element of this $coll that is equal (as determined by `==`)
* to `elem`, or `-1`, if none exists.
*
* @usecase def lastIndexOf(elem: A): Int
@@ -163,7 +163,7 @@ trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equal
* @param elem the element value to search for.
* @param end the end index.
* @tparam B the type of the element `elem`.
- * @return the index `<= end` of the last element of this $coll that is equal (wrt `==`)
+ * @return the index `<= end` of the last element of this $coll that is equal (as determined by `==`)
* to `elem`, or `-1`, if none exists.
*
* @usecase def lastIndexOf(elem: A, end: Int): Int
@@ -465,7 +465,7 @@ trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equal
/** Hashcodes for $Coll produce a value from the hashcodes of all the
* elements of the $coll.
*/
- override def hashCode() = util.hashing.MurmurHash3.seqHash(seq)
+ override def hashCode()= scala.util.hashing.MurmurHash3.seqHash(seq)
/** The equals method for arbitrary sequences. Compares this sequence to
* some other object.
diff --git a/src/library/scala/collection/GenSetLike.scala b/src/library/scala/collection/GenSetLike.scala
index 18eb31da03..ef5f14ed55 100644
--- a/src/library/scala/collection/GenSetLike.scala
+++ b/src/library/scala/collection/GenSetLike.scala
@@ -127,5 +127,5 @@ extends GenIterableLike[A, Repr]
// Calling map on a set drops duplicates: any hashcode collisions would
// then be dropped before they can be added.
// Hash should be symmetric in set entries, but without trivial collisions.
- override def hashCode() = util.hashing.MurmurHash3.setHash(seq)
+ override def hashCode()= scala.util.hashing.MurmurHash3.setHash(seq)
}
diff --git a/src/library/scala/collection/IndexedSeqLike.scala b/src/library/scala/collection/IndexedSeqLike.scala
index f79a9d2c66..3858d60563 100644
--- a/src/library/scala/collection/IndexedSeqLike.scala
+++ b/src/library/scala/collection/IndexedSeqLike.scala
@@ -41,7 +41,7 @@ trait IndexedSeqLike[+A, +Repr] extends Any with SeqLike[A, Repr] {
self =>
def seq: IndexedSeq[A]
- override def hashCode() = util.hashing.MurmurHash3.seqHash(seq) // TODO - can we get faster via "indexedSeqHash" ?
+ override def hashCode()= scala.util.hashing.MurmurHash3.seqHash(seq) // TODO - can we get faster via "indexedSeqHash" ?
override protected[this] def thisCollection: IndexedSeq[A] = this.asInstanceOf[IndexedSeq[A]]
override protected[this] def toCollection(repr: Repr): IndexedSeq[A] = repr.asInstanceOf[IndexedSeq[A]]
diff --git a/src/library/scala/collection/IndexedSeqOptimized.scala b/src/library/scala/collection/IndexedSeqOptimized.scala
index 9d03a11db9..b471c304ab 100755
--- a/src/library/scala/collection/IndexedSeqOptimized.scala
+++ b/src/library/scala/collection/IndexedSeqOptimized.scala
@@ -6,9 +6,8 @@
** |/ **
\* */
-
-
-package scala.collection
+package scala
+package collection
import generic._
import mutable.ArrayBuffer
diff --git a/src/library/scala/collection/IterableLike.scala b/src/library/scala/collection/IterableLike.scala
index 7ef3b39576..ead5633e00 100644
--- a/src/library/scala/collection/IterableLike.scala
+++ b/src/library/scala/collection/IterableLike.scala
@@ -6,8 +6,8 @@
** |/ **
\* */
-package scala.collection
-
+package scala
+package collection
import generic._
import immutable.{ List, Stream }
diff --git a/src/library/scala/collection/Iterator.scala b/src/library/scala/collection/Iterator.scala
index 9989a8d9e8..e12b8d231c 100644
--- a/src/library/scala/collection/Iterator.scala
+++ b/src/library/scala/collection/Iterator.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
import mutable.ArrayBuffer
import scala.annotation.migration
@@ -758,7 +759,7 @@ trait Iterator[+A] extends TraversableOnce[A] {
*
* @param elem the element to test.
* @return `true` if this iterator produces some value that is
- * is equal (wrt `==`) to `elem`, `false` otherwise.
+ * is equal (as determined by `==`) to `elem`, `false` otherwise.
* @note Reuse: $consumesIterator
*/
def contains(elem: Any): Boolean = exists(_ == elem)
diff --git a/src/library/scala/collection/LinearSeqLike.scala b/src/library/scala/collection/LinearSeqLike.scala
index 8e72c5618c..b873ae964d 100644
--- a/src/library/scala/collection/LinearSeqLike.scala
+++ b/src/library/scala/collection/LinearSeqLike.scala
@@ -50,7 +50,7 @@ trait LinearSeqLike[+A, +Repr <: LinearSeqLike[A, Repr]] extends SeqLike[A, Repr
def seq: LinearSeq[A]
- override def hashCode() = util.hashing.MurmurHash3.seqHash(seq) // TODO - can we get faster via "linearSeqHash" ?
+ override def hashCode()= scala.util.hashing.MurmurHash3.seqHash(seq) // TODO - can we get faster via "linearSeqHash" ?
override /*IterableLike*/
def iterator: Iterator[A] = new AbstractIterator[A] {
diff --git a/src/library/scala/collection/SeqLike.scala b/src/library/scala/collection/SeqLike.scala
index 3388d584db..9363d4ec94 100644
--- a/src/library/scala/collection/SeqLike.scala
+++ b/src/library/scala/collection/SeqLike.scala
@@ -6,13 +6,14 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
import mutable.{ ListBuffer, ArraySeq }
import immutable.{ List, Range }
import generic._
import parallel.ParSeq
-import scala.math.Ordering
+import scala.math.{ min, max, Ordering }
/** A template trait for sequences of type `Seq[A]`
* $seqInfo
diff --git a/src/library/scala/collection/TraversableLike.scala b/src/library/scala/collection/TraversableLike.scala
index 411d17e935..f5e479a514 100644
--- a/src/library/scala/collection/TraversableLike.scala
+++ b/src/library/scala/collection/TraversableLike.scala
@@ -501,7 +501,7 @@ trait TraversableLike[+A, +Repr] extends Any
else sliceWithKnownDelta(n, Int.MaxValue, -n)
def slice(from: Int, until: Int): Repr =
- sliceWithKnownBound(math.max(from, 0), until)
+ sliceWithKnownBound(scala.math.max(from, 0), until)
// Precondition: from >= 0, until > 0, builder already configured for building.
private[this] def sliceInternal(from: Int, until: Int, b: Builder[A, Repr]): Repr = {
diff --git a/src/library/scala/collection/concurrent/TrieMap.scala b/src/library/scala/collection/concurrent/TrieMap.scala
index 7609910b65..070497c19e 100644
--- a/src/library/scala/collection/concurrent/TrieMap.scala
+++ b/src/library/scala/collection/concurrent/TrieMap.scala
@@ -916,7 +916,7 @@ object TrieMap extends MutableMapFactory[TrieMap] {
def empty[K, V]: TrieMap[K, V] = new TrieMap[K, V]
class MangledHashing[K] extends Hashing[K] {
- def hash(k: K) = util.hashing.byteswap32(k.##)
+ def hash(k: K)= scala.util.hashing.byteswap32(k.##)
}
}
diff --git a/src/library/scala/collection/generic/FilterMonadic.scala b/src/library/scala/collection/generic/FilterMonadic.scala
index d79112d616..cebb4e69d3 100755
--- a/src/library/scala/collection/generic/FilterMonadic.scala
+++ b/src/library/scala/collection/generic/FilterMonadic.scala
@@ -14,7 +14,7 @@ package scala.collection.generic
*/
trait FilterMonadic[+A, +Repr] extends Any {
def map[B, That](f: A => B)(implicit bf: CanBuildFrom[Repr, B, That]): That
- def flatMap[B, That](f: A => collection.GenTraversableOnce[B])(implicit bf: CanBuildFrom[Repr, B, That]): That
+ def flatMap[B, That](f: A => scala.collection.GenTraversableOnce[B])(implicit bf: CanBuildFrom[Repr, B, That]): That
def foreach[U](f: A => U): Unit
def withFilter(p: A => Boolean): FilterMonadic[A, Repr]
}
diff --git a/src/library/scala/collection/generic/ParFactory.scala b/src/library/scala/collection/generic/ParFactory.scala
index 9624aafd06..6b59b6671c 100644
--- a/src/library/scala/collection/generic/ParFactory.scala
+++ b/src/library/scala/collection/generic/ParFactory.scala
@@ -24,7 +24,7 @@ abstract class ParFactory[CC[X] <: ParIterable[X] with GenericParTemplate[X, CC]
extends GenTraversableFactory[CC]
with GenericParCompanion[CC] {
- //type EPC[T, C] = collection.parallel.EnvironmentPassingCombiner[T, C]
+ //type EPC[T, C] = scala.collection.parallel.EnvironmentPassingCombiner[T, C]
/** A generic implementation of the `CanCombineFrom` trait, which forwards
* all calls to `apply(from)` to the `genericParBuilder` method of the $coll
diff --git a/src/library/scala/collection/generic/SliceInterval.scala b/src/library/scala/collection/generic/SliceInterval.scala
index 56033ca8d8..af56d06d60 100644
--- a/src/library/scala/collection/generic/SliceInterval.scala
+++ b/src/library/scala/collection/generic/SliceInterval.scala
@@ -32,7 +32,7 @@ private[collection] class SliceInterval private (val from: Int, val until: Int)
*/
def recalculate(_from: Int, _until: Int): SliceInterval = {
val lo = _from max 0
- val elems = math.min(_until - lo, width)
+ val elems = scala.math.min(_until - lo, width)
val start = from + lo
if (elems <= 0) new SliceInterval(from, from)
diff --git a/src/library/scala/collection/immutable/HashMap.scala b/src/library/scala/collection/immutable/HashMap.scala
index 01372aa618..a6e750e7ee 100644
--- a/src/library/scala/collection/immutable/HashMap.scala
+++ b/src/library/scala/collection/immutable/HashMap.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package immutable
import generic._
diff --git a/src/library/scala/collection/immutable/HashSet.scala b/src/library/scala/collection/immutable/HashSet.scala
index 03af4deaae..2ebeb044fc 100644
--- a/src/library/scala/collection/immutable/HashSet.scala
+++ b/src/library/scala/collection/immutable/HashSet.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package immutable
import scala.annotation.unchecked.{ uncheckedVariance => uV }
diff --git a/src/library/scala/collection/immutable/IntMap.scala b/src/library/scala/collection/immutable/IntMap.scala
index e895c94599..d0f6b4b3ac 100644
--- a/src/library/scala/collection/immutable/IntMap.scala
+++ b/src/library/scala/collection/immutable/IntMap.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package immutable
import scala.collection.generic.{ CanBuildFrom, BitOperations }
diff --git a/src/library/scala/collection/immutable/List.scala b/src/library/scala/collection/immutable/List.scala
index d3402e16a2..83da68eb68 100644
--- a/src/library/scala/collection/immutable/List.scala
+++ b/src/library/scala/collection/immutable/List.scala
@@ -229,7 +229,7 @@ sealed abstract class List[+A] extends AbstractSeq[A]
* }}}
*/
override def slice(from: Int, until: Int): List[A] = {
- val lo = math.max(from, 0)
+ val lo = scala.math.max(from, 0)
if (until <= lo || isEmpty) Nil
else this drop lo take (until - lo)
}
diff --git a/src/library/scala/collection/immutable/LongMap.scala b/src/library/scala/collection/immutable/LongMap.scala
index 002027b162..4899b45d5f 100644
--- a/src/library/scala/collection/immutable/LongMap.scala
+++ b/src/library/scala/collection/immutable/LongMap.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package immutable
import scala.collection.generic.{ CanBuildFrom, BitOperations }
@@ -298,7 +299,7 @@ extends AbstractMap[Long, T]
if (!hasMatch(key, prefix, mask)) join(key, LongMap.Tip(key, value), prefix, this)
else if (zero(key, mask)) LongMap.Bin(prefix, mask, left.updateWith(key, value, f), right)
else LongMap.Bin(prefix, mask, left, right.updateWith(key, value, f))
- case LongMap.Tip(key2, value2) =>
+ case LongMap.Tip(key2, value2) =>
if (key == key2) LongMap.Tip(key, f(value2, value))
else join(key, LongMap.Tip(key, value), key2, this)
case LongMap.Nil => LongMap.Tip(key, value)
diff --git a/src/library/scala/collection/immutable/PagedSeq.scala b/src/library/scala/collection/immutable/PagedSeq.scala
index 8975b440d2..3b4bfdc593 100644
--- a/src/library/scala/collection/immutable/PagedSeq.scala
+++ b/src/library/scala/collection/immutable/PagedSeq.scala
@@ -99,7 +99,7 @@ object PagedSeq {
/** Constructs a paged character sequence from a scala.io.Source value
*/
- def fromSource(source: io.Source) =
+ def fromSource(source: scala.io.Source) =
fromLines(source.getLines())
}
diff --git a/src/library/scala/collection/immutable/RedBlack.scala b/src/library/scala/collection/immutable/RedBlack.scala
index 83eeaa45ee..a3ab27f814 100644
--- a/src/library/scala/collection/immutable/RedBlack.scala
+++ b/src/library/scala/collection/immutable/RedBlack.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package immutable
/** Old base class that was used by previous implementations of `TreeMaps` and `TreeSets`.
diff --git a/src/library/scala/collection/immutable/RedBlackTree.scala b/src/library/scala/collection/immutable/RedBlackTree.scala
index 9eeebb641e..bb489dd80a 100644
--- a/src/library/scala/collection/immutable/RedBlackTree.scala
+++ b/src/library/scala/collection/immutable/RedBlackTree.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package immutable
import scala.annotation.tailrec
@@ -131,6 +132,15 @@ object RedBlackTree {
else if (overwrite || k != tree.key) mkTree(isBlackTree(tree), k, v, tree.left, tree.right)
else tree
}
+ private[this] def updNth[A, B, B1 >: B](tree: Tree[A, B], idx: Int, k: A, v: B1, overwrite: Boolean): Tree[A, B1] = if (tree eq null) {
+ RedTree(k, v, null, null)
+ } else {
+ val rank = count(tree.left) + 1
+ if (idx < rank) balanceLeft(isBlackTree(tree), tree.key, tree.value, updNth(tree.left, idx, k, v, overwrite), tree.right)
+ else if (idx > rank) balanceRight(isBlackTree(tree), tree.key, tree.value, tree.left, updNth(tree.right, idx - rank, k, v, overwrite))
+ else if (overwrite) mkTree(isBlackTree(tree), k, v, tree.left, tree.right)
+ else tree
+ }
/* Based on Stefan Kahrs' Haskell version of Okasaki's Red&Black Trees
* http://www.cse.unsw.edu.au/~dons/data/RedBlackTree.html */
@@ -248,27 +258,27 @@ object RedBlackTree {
else rebalance(tree, newLeft, newRight)
}
- private[this] def doDrop[A: Ordering, B](tree: Tree[A, B], n: Int): Tree[A, B] = {
+ private[this] def doDrop[A, B](tree: Tree[A, B], n: Int): Tree[A, B] = {
if (n <= 0) return tree
if (n >= this.count(tree)) return null
val count = this.count(tree.left)
if (n > count) return doDrop(tree.right, n - count - 1)
val newLeft = doDrop(tree.left, n)
if (newLeft eq tree.left) tree
- else if (newLeft eq null) upd(tree.right, tree.key, tree.value, false)
+ else if (newLeft eq null) updNth(tree.right, n - count - 1, tree.key, tree.value, false)
else rebalance(tree, newLeft, tree.right)
}
- private[this] def doTake[A: Ordering, B](tree: Tree[A, B], n: Int): Tree[A, B] = {
+ private[this] def doTake[A, B](tree: Tree[A, B], n: Int): Tree[A, B] = {
if (n <= 0) return null
if (n >= this.count(tree)) return tree
val count = this.count(tree.left)
if (n <= count) return doTake(tree.left, n)
val newRight = doTake(tree.right, n - count - 1)
if (newRight eq tree.right) tree
- else if (newRight eq null) upd(tree.left, tree.key, tree.value, false)
+ else if (newRight eq null) updNth(tree.left, n, tree.key, tree.value, false)
else rebalance(tree, tree.left, newRight)
}
- private[this] def doSlice[A: Ordering, B](tree: Tree[A, B], from: Int, until: Int): Tree[A, B] = {
+ private[this] def doSlice[A, B](tree: Tree[A, B], from: Int, until: Int): Tree[A, B] = {
if (tree eq null) return null
val count = this.count(tree.left)
if (from > count) return doSlice(tree.right, from - count - 1, until - count - 1)
@@ -276,8 +286,8 @@ object RedBlackTree {
val newLeft = doDrop(tree.left, from)
val newRight = doTake(tree.right, until - count - 1)
if ((newLeft eq tree.left) && (newRight eq tree.right)) tree
- else if (newLeft eq null) upd(newRight, tree.key, tree.value, false)
- else if (newRight eq null) upd(newLeft, tree.key, tree.value, false)
+ else if (newLeft eq null) updNth(newRight, from - count - 1, tree.key, tree.value, false)
+ else if (newRight eq null) updNth(newLeft, until, tree.key, tree.value, false)
else rebalance(tree, newLeft, newRight)
}
@@ -379,7 +389,7 @@ object RedBlackTree {
@(inline @getter) final val left: Tree[A, B],
@(inline @getter) final val right: Tree[A, B])
extends Serializable {
- final val count: Int = 1 + RedBlackTree.count(left) + RedBlackTree.count(right)
+ @(inline @getter) final val count: Int = 1 + RedBlackTree.count(left) + RedBlackTree.count(right)
def black: Tree[A, B]
def red: Tree[A, B]
}
diff --git a/src/library/scala/collection/immutable/StreamViewLike.scala b/src/library/scala/collection/immutable/StreamViewLike.scala
index bb378bc337..236308da2e 100644
--- a/src/library/scala/collection/immutable/StreamViewLike.scala
+++ b/src/library/scala/collection/immutable/StreamViewLike.scala
@@ -50,10 +50,10 @@ extends SeqView[A, Coll]
trait Prepended[B >: A] extends super.Prepended[B] with Transformed[B]
/** boilerplate */
- protected override def newForced[B](xs: => collection.GenSeq[B]): Transformed[B] = new { val forced = xs } with AbstractTransformed[B] with Forced[B]
+ protected override def newForced[B](xs: => scala.collection.GenSeq[B]): Transformed[B] = new { val forced = xs } with AbstractTransformed[B] with Forced[B]
protected override def newAppended[B >: A](that: scala.collection.GenTraversable[B]): Transformed[B] = new { val rest = that } with AbstractTransformed[B] with Appended[B]
protected override def newMapped[B](f: A => B): Transformed[B] = new { val mapping = f } with AbstractTransformed[B] with Mapped[B]
- protected override def newFlatMapped[B](f: A => collection.GenTraversableOnce[B]): Transformed[B] = new { val mapping = f } with AbstractTransformed[B] with FlatMapped[B]
+ protected override def newFlatMapped[B](f: A => scala.collection.GenTraversableOnce[B]): Transformed[B] = new { val mapping = f } with AbstractTransformed[B] with FlatMapped[B]
protected override def newFiltered(p: A => Boolean): Transformed[A] = new { val pred = p } with AbstractTransformed[A] with Filtered
protected override def newSliced(_endpoints: SliceInterval): Transformed[A] = new { val endpoints = _endpoints } with AbstractTransformed[A] with Sliced
protected override def newDroppedWhile(p: A => Boolean): Transformed[A] = new { val pred = p } with AbstractTransformed[A] with DroppedWhile
diff --git a/src/library/scala/collection/immutable/Vector.scala b/src/library/scala/collection/immutable/Vector.scala
index 49537df50d..a784230f66 100644
--- a/src/library/scala/collection/immutable/Vector.scala
+++ b/src/library/scala/collection/immutable/Vector.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package immutable
import scala.annotation.unchecked.uncheckedVariance
diff --git a/src/library/scala/collection/immutable/package.scala b/src/library/scala/collection/immutable/package.scala
index eec5f04fff..647fc04310 100644
--- a/src/library/scala/collection/immutable/package.scala
+++ b/src/library/scala/collection/immutable/package.scala
@@ -69,9 +69,9 @@ package immutable {
private def locationAfterN(n: Int) = (
if (n > 0) {
if (step > 0)
- math.min(start.toLong + step.toLong * n.toLong, _last.toLong).toInt
+ scala.math.min(start.toLong + step.toLong * n.toLong, _last.toLong).toInt
else
- math.max(start.toLong + step.toLong * n.toLong, _last.toLong).toInt
+ scala.math.max(start.toLong + step.toLong * n.toLong, _last.toLong).toInt
}
else start
)
diff --git a/src/library/scala/collection/mutable/AVLTree.scala b/src/library/scala/collection/mutable/AVLTree.scala
index 9aea25f330..ad52daaad4 100644
--- a/src/library/scala/collection/mutable/AVLTree.scala
+++ b/src/library/scala/collection/mutable/AVLTree.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package mutable
diff --git a/src/library/scala/collection/mutable/ArrayOps.scala b/src/library/scala/collection/mutable/ArrayOps.scala
index c87feaddc5..397f5bbefa 100644
--- a/src/library/scala/collection/mutable/ArrayOps.scala
+++ b/src/library/scala/collection/mutable/ArrayOps.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package mutable
import scala.compat.Platform.arraycopy
@@ -60,7 +61,7 @@ trait ArrayOps[T] extends Any with ArrayLike[T, Array[T]] with CustomParalleliza
* @param asTrav A function that converts elements of this array to rows - arrays of type `U`.
* @return An array obtained by concatenating rows of this array.
*/
- def flatten[U](implicit asTrav: T => collection.Traversable[U], m: ClassTag[U]): Array[U] = {
+ def flatten[U](implicit asTrav: T => scala.collection.Traversable[U], m: ClassTag[U]): Array[U] = {
val b = Array.newBuilder[U]
b.sizeHint(map{case is: scala.collection.IndexedSeq[_] => is.size case _ => 0}.sum)
for (xs <- this)
diff --git a/src/library/scala/collection/mutable/ArrayStack.scala b/src/library/scala/collection/mutable/ArrayStack.scala
index e408d74353..277d48c545 100644
--- a/src/library/scala/collection/mutable/ArrayStack.scala
+++ b/src/library/scala/collection/mutable/ArrayStack.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package mutable
import generic._
diff --git a/src/library/scala/collection/mutable/Builder.scala b/src/library/scala/collection/mutable/Builder.scala
index b6887df61e..6dec6b221e 100644
--- a/src/library/scala/collection/mutable/Builder.scala
+++ b/src/library/scala/collection/mutable/Builder.scala
@@ -7,7 +7,8 @@
\* */
-package scala.collection
+package scala
+package collection
package mutable
import generic._
diff --git a/src/library/scala/collection/mutable/FlatHashTable.scala b/src/library/scala/collection/mutable/FlatHashTable.scala
index f6d4cc31b6..74f576b0f7 100644
--- a/src/library/scala/collection/mutable/FlatHashTable.scala
+++ b/src/library/scala/collection/mutable/FlatHashTable.scala
@@ -44,7 +44,7 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
*/
@transient protected var sizemap: Array[Int] = null
- @transient var seedvalue: Int = tableSizeSeed
+ @transient protected var seedvalue: Int = tableSizeSeed
import HashTable.powerOfTwo
@@ -109,7 +109,7 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
}
/** Finds an entry in the hash table if such an element exists. */
- def findEntry(elem: A): Option[A] = {
+ protected def findEntry(elem: A): Option[A] = {
var h = index(elemHashCode(elem))
var entry = table(h)
while (null != entry && entry != elem) {
@@ -120,7 +120,7 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
}
/** Checks whether an element is contained in the hash table. */
- def containsEntry(elem: A): Boolean = {
+ protected def containsEntry(elem: A): Boolean = {
var h = index(elemHashCode(elem))
var entry = table(h)
while (null != entry && entry != elem) {
@@ -133,7 +133,7 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
/** Add entry if not yet in table.
* @return Returns `true` if a new entry was added, `false` otherwise.
*/
- def addEntry(elem: A) : Boolean = {
+ protected def addEntry(elem: A) : Boolean = {
var h = index(elemHashCode(elem))
var entry = table(h)
while (null != entry) {
@@ -150,7 +150,7 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
}
/** Removes an entry from the hash table, returning an option value with the element, or `None` if it didn't exist. */
- def removeEntry(elem: A) : Option[A] = {
+ protected def removeEntry(elem: A) : Option[A] = {
if (tableDebug) checkConsistent()
def precedes(i: Int, j: Int) = {
val d = table.length >> 1
@@ -185,7 +185,7 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
None
}
- def iterator: Iterator[A] = new AbstractIterator[A] {
+ protected def iterator: Iterator[A] = new AbstractIterator[A] {
private var i = 0
def hasNext: Boolean = {
while (i < table.length && (null == table(i))) i += 1
@@ -356,8 +356,8 @@ private[collection] object FlatHashTable {
*
* See SI-5293.
*/
- final def seedGenerator = new ThreadLocal[util.Random] {
- override def initialValue = new util.Random
+ final def seedGenerator = new ThreadLocal[scala.util.Random] {
+ override def initialValue = new scala.util.Random
}
/** The load factor for the hash table; must be < 500 (0.5)
@@ -365,7 +365,7 @@ private[collection] object FlatHashTable {
def defaultLoadFactor: Int = 450
final def loadFactorDenum = 1000
- def sizeForThreshold(size: Int, _loadFactor: Int) = math.max(32, (size.toLong * loadFactorDenum / _loadFactor).toInt)
+ def sizeForThreshold(size: Int, _loadFactor: Int) = scala.math.max(32, (size.toLong * loadFactorDenum / _loadFactor).toInt)
def newThreshold(_loadFactor: Int, size: Int) = {
val lf = _loadFactor
@@ -397,7 +397,7 @@ private[collection] object FlatHashTable {
//h = h + (h << 4)
//h ^ (h >>> 10)
- val improved = util.hashing.byteswap32(hcode)
+ val improved= scala.util.hashing.byteswap32(hcode)
// for the remainder, see SI-5293
// to ensure that different bits are used for different hash tables, we have to rotate based on the seed
diff --git a/src/library/scala/collection/mutable/HashMap.scala b/src/library/scala/collection/mutable/HashMap.scala
index da486f4042..be85df3c28 100644
--- a/src/library/scala/collection/mutable/HashMap.scala
+++ b/src/library/scala/collection/mutable/HashMap.scala
@@ -49,7 +49,7 @@ extends AbstractMap[A, B]
type Entry = DefaultEntry[A, B]
override def empty: HashMap[A, B] = HashMap.empty[A, B]
- override def clear() = clearTable()
+ override def clear() { clearTable() }
override def size: Int = tableSize
def this() = this(null)
@@ -57,22 +57,23 @@ extends AbstractMap[A, B]
override def par = new ParHashMap[A, B](hashTableContents)
// contains and apply overridden to avoid option allocations.
- override def contains(key: A) = findEntry(key) != null
+ override def contains(key: A): Boolean = findEntry(key) != null
+
override def apply(key: A): B = {
val result = findEntry(key)
- if (result == null) default(key)
+ if (result eq null) default(key)
else result.value
}
def get(key: A): Option[B] = {
val e = findEntry(key)
- if (e == null) None
+ if (e eq null) None
else Some(e.value)
}
override def put(key: A, value: B): Option[B] = {
- val e = findEntry(key)
- if (e == null) { addEntry(new Entry(key, value)); None }
+ val e = findOrAddEntry(key, value)
+ if (e eq null) None
else { val v = e.value; e.value = value; Some(v) }
}
@@ -85,9 +86,8 @@ extends AbstractMap[A, B]
}
def += (kv: (A, B)): this.type = {
- val e = findEntry(kv._1)
- if (e == null) addEntry(new Entry(kv._1, kv._2))
- else e.value = kv._2
+ val e = findOrAddEntry(kv._1, kv._2)
+ if (e ne null) e.value = kv._2
this
}
@@ -127,12 +127,19 @@ extends AbstractMap[A, B]
if (!isSizeMapDefined) sizeMapInitAndRebuild
} else sizeMapDisable
+ protected def createNewEntry[B1](key: A, value: B1): Entry = {
+ new Entry(key, value.asInstanceOf[B])
+ }
+
private def writeObject(out: java.io.ObjectOutputStream) {
- serializeTo(out, _.value)
+ serializeTo(out, { entry =>
+ out.writeObject(entry.key)
+ out.writeObject(entry.value)
+ })
}
private def readObject(in: java.io.ObjectInputStream) {
- init[B](in, new Entry(_, _))
+ init(in, createNewEntry(in.readObject().asInstanceOf[A], in.readObject()))
}
}
diff --git a/src/library/scala/collection/mutable/HashSet.scala b/src/library/scala/collection/mutable/HashSet.scala
index b263b46d36..a5b636c83d 100644
--- a/src/library/scala/collection/mutable/HashSet.scala
+++ b/src/library/scala/collection/mutable/HashSet.scala
@@ -53,7 +53,7 @@ extends AbstractSet[A]
override def companion: GenericCompanion[HashSet] = HashSet
- override def size = tableSize
+ override def size: Int = tableSize
def contains(elem: A): Boolean = containsEntry(elem)
@@ -67,7 +67,9 @@ extends AbstractSet[A]
override def remove(elem: A): Boolean = removeEntry(elem).isDefined
- override def clear() = clearTable()
+ override def clear() { clearTable() }
+
+ override def iterator: Iterator[A] = super[FlatHashTable].iterator
override def foreach[U](f: A => U) {
var i = 0
diff --git a/src/library/scala/collection/mutable/HashTable.scala b/src/library/scala/collection/mutable/HashTable.scala
index 67e7348672..eb6717393b 100644
--- a/src/library/scala/collection/mutable/HashTable.scala
+++ b/src/library/scala/collection/mutable/HashTable.scala
@@ -32,6 +32,9 @@ package mutable
* @tparam A type of the elements contained in this hash table.
*/
trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashUtils[A] {
+ // Replacing Entry type parameter by abstract type member here allows to not expose to public
+ // implementation-specific entry classes such as `DefaultEntry` or `LinkedEntry`.
+ // However, I'm afraid it's too late now for such breaking change.
import HashTable._
@transient protected var _loadFactor = defaultLoadFactor
@@ -52,7 +55,7 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU
*/
@transient protected var sizemap: Array[Int] = null
- @transient var seedvalue: Int = tableSizeSeed
+ @transient protected var seedvalue: Int = tableSizeSeed
protected def tableSizeSeed = Integer.bitCount(table.length - 1)
@@ -75,11 +78,10 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU
}
/**
- * Initializes the collection from the input stream. `f` will be called for each key/value pair
- * read from the input stream in the order determined by the stream. This is useful for
- * structures where iteration order is important (e.g. LinkedHashMap).
+ * Initializes the collection from the input stream. `readEntry` will be called for each
+ * entry to be read from the input stream.
*/
- private[collection] def init[B](in: java.io.ObjectInputStream, f: (A, B) => Entry) {
+ private[collection] def init(in: java.io.ObjectInputStream, readEntry: => Entry) {
in.defaultReadObject
_loadFactor = in.readInt()
@@ -100,35 +102,34 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU
var index = 0
while (index < size) {
- addEntry(f(in.readObject().asInstanceOf[A], in.readObject().asInstanceOf[B]))
+ addEntry(readEntry)
index += 1
}
}
/**
* Serializes the collection to the output stream by saving the load factor, collection
- * size, collection keys and collection values. `value` is responsible for providing a value
- * from an entry.
+ * size and collection entries. `writeEntry` is responsible for writing an entry to the stream.
*
- * `foreach` determines the order in which the key/value pairs are saved to the stream. To
+ * `foreachEntry` determines the order in which the key/value pairs are saved to the stream. To
* deserialize, `init` should be used.
*/
- private[collection] def serializeTo[B](out: java.io.ObjectOutputStream, value: Entry => B) {
+ private[collection] def serializeTo(out: java.io.ObjectOutputStream, writeEntry: Entry => Unit) {
out.defaultWriteObject
out.writeInt(_loadFactor)
out.writeInt(tableSize)
out.writeInt(seedvalue)
out.writeBoolean(isSizeMapDefined)
- foreachEntry { entry =>
- out.writeObject(entry.key)
- out.writeObject(value(entry))
- }
+
+ foreachEntry(writeEntry)
}
/** Find entry with given key in table, null if not found.
*/
- protected def findEntry(key: A): Entry = {
- val h = index(elemHashCode(key))
+ protected def findEntry(key: A): Entry =
+ findEntry0(key, index(elemHashCode(key)))
+
+ private[this] def findEntry0(key: A, h: Int): Entry = {
var e = table(h).asInstanceOf[Entry]
while (e != null && !elemEquals(e.key, key)) e = e.next
e
@@ -138,7 +139,10 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU
* pre: no entry with same key exists
*/
protected def addEntry(e: Entry) {
- val h = index(elemHashCode(e.key))
+ addEntry0(e, index(elemHashCode(e.key)))
+ }
+
+ private[this] def addEntry0(e: Entry, h: Int) {
e.next = table(h).asInstanceOf[Entry]
table(h) = e
tableSize = tableSize + 1
@@ -147,6 +151,24 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU
resize(2 * table.length)
}
+ /** Find entry with given key in table, or add new one if not found.
+ * May be somewhat faster then `findEntry`/`addEntry` pair as it
+ * computes entry's hash index only once.
+ * Returns entry found in table or null.
+ * New entries are created by calling `createNewEntry` method.
+ */
+ protected def findOrAddEntry[B](key: A, value: B): Entry = {
+ val h = index(elemHashCode(key))
+ val e = findEntry0(key, h)
+ if (e ne null) e else { addEntry0(createNewEntry(key, value), h); null }
+ }
+
+ /** Creates new entry to be immediately inserted into the hashtable.
+ * This method is guaranteed to be called only once and in case that the entry
+ * will be added. In other words, an implementation may be side-effecting.
+ */
+ protected def createNewEntry[B](key: A, value: B): Entry
+
/** Remove entry from table if present.
*/
protected def removeEntry(key: A) : Entry = {
@@ -195,7 +217,7 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU
}
/** Avoid iterator for a 2x faster traversal. */
- protected def foreachEntry[C](f: Entry => C) {
+ protected def foreachEntry[U](f: Entry => U) {
val iterTable = table
var idx = lastPopulatedIndex
var es = iterTable(idx)
@@ -401,7 +423,7 @@ private[collection] object HashTable {
*
* For performance reasons, we avoid this improvement.
* */
- val i = util.hashing.byteswap32(hcode)
+ val i= scala.util.hashing.byteswap32(hcode)
/* Jenkins hash
* for range 0-10000, output has the msb set to zero */
diff --git a/src/library/scala/collection/mutable/LinkedHashMap.scala b/src/library/scala/collection/mutable/LinkedHashMap.scala
index 5643e070f8..5028884a8e 100644
--- a/src/library/scala/collection/mutable/LinkedHashMap.scala
+++ b/src/library/scala/collection/mutable/LinkedHashMap.scala
@@ -67,23 +67,9 @@ class LinkedHashMap[A, B] extends AbstractMap[A, B]
}
override def put(key: A, value: B): Option[B] = {
- val e = findEntry(key)
- if (e == null) {
- val e = new Entry(key, value)
- addEntry(e)
- updateLinkedEntries(e)
- None
- } else {
- val v = e.value
- e.value = value
- Some(v)
- }
- }
-
- private def updateLinkedEntries(e: Entry) {
- if (firstEntry == null) firstEntry = e
- else { lastEntry.later = e; e.earlier = lastEntry }
- lastEntry = e
+ val e = findOrAddEntry(key, value)
+ if (e eq null) None
+ else { val v = e.value; e.value = value; Some(v) }
}
override def remove(key: A): Option[B] = {
@@ -143,7 +129,7 @@ class LinkedHashMap[A, B] extends AbstractMap[A, B]
else Iterator.empty.next
}
- override def foreach[U](f: ((A, B)) => U) = {
+ override def foreach[U](f: ((A, B)) => U) {
var cur = firstEntry
while (cur ne null) {
f((cur.key, cur.value))
@@ -151,7 +137,7 @@ class LinkedHashMap[A, B] extends AbstractMap[A, B]
}
}
- protected override def foreachEntry[C](f: Entry => C) {
+ protected override def foreachEntry[U](f: Entry => U) {
var cur = firstEntry
while (cur ne null) {
f(cur)
@@ -159,22 +145,29 @@ class LinkedHashMap[A, B] extends AbstractMap[A, B]
}
}
+ protected def createNewEntry[B1](key: A, value: B1): Entry = {
+ val e = new Entry(key, value.asInstanceOf[B])
+ if (firstEntry eq null) firstEntry = e
+ else { lastEntry.later = e; e.earlier = lastEntry }
+ lastEntry = e
+ e
+ }
+
override def clear() {
clearTable()
firstEntry = null
}
private def writeObject(out: java.io.ObjectOutputStream) {
- serializeTo(out, _.value)
+ serializeTo(out, { entry =>
+ out.writeObject(entry.key)
+ out.writeObject(entry.value)
+ })
}
private def readObject(in: java.io.ObjectInputStream) {
firstEntry = null
lastEntry = null
- init[B](in, { (key, value) =>
- val entry = new Entry(key, value)
- updateLinkedEntries(entry)
- entry
- })
+ init(in, createNewEntry(in.readObject().asInstanceOf[A], in.readObject()))
}
}
diff --git a/src/library/scala/collection/mutable/LinkedHashSet.scala b/src/library/scala/collection/mutable/LinkedHashSet.scala
index 3f789f9fa2..88bad5ff9b 100644
--- a/src/library/scala/collection/mutable/LinkedHashSet.scala
+++ b/src/library/scala/collection/mutable/LinkedHashSet.scala
@@ -19,6 +19,7 @@ import generic._
*
* @author Matthias Zenger
* @author Martin Odersky
+ * @author Pavel Pavlov
* @version 2.0, 31/12/2006
* @since 1
*
@@ -43,46 +44,82 @@ class LinkedHashSet[A] extends AbstractSet[A]
with Set[A]
with GenericSetTemplate[A, LinkedHashSet]
with SetLike[A, LinkedHashSet[A]]
- with FlatHashTable[A]
+ with HashTable[A, LinkedHashSet.Entry[A]]
with Serializable
{
override def companion: GenericCompanion[LinkedHashSet] = LinkedHashSet
- @transient private[this] var ordered = new ListBuffer[A]
+ type Entry = LinkedHashSet.Entry[A]
- override def size = tableSize
+ @transient protected var firstEntry: Entry = null
+ @transient protected var lastEntry: Entry = null
- def contains(elem: A): Boolean = containsEntry(elem)
+ override def size: Int = tableSize
+
+ def contains(elem: A): Boolean = findEntry(elem) ne null
def += (elem: A): this.type = { add(elem); this }
def -= (elem: A): this.type = { remove(elem); this }
- override def add(elem: A): Boolean =
- if (addEntry(elem)) { ordered += elem; true }
- else false
+ override def add(elem: A): Boolean = findOrAddEntry(elem, null) eq null
+
+ override def remove(elem: A): Boolean = {
+ val e = removeEntry(elem)
+ if (e eq null) false
+ else {
+ if (e.earlier eq null) firstEntry = e.later
+ else e.earlier.later = e.later
+ if (e.later eq null) lastEntry = e.earlier
+ else e.later.earlier = e.earlier
+ true
+ }
+ }
- override def remove(elem: A): Boolean =
- removeEntry(elem) match {
- case None => false
- case _ => ordered -= elem; true
+ def iterator: Iterator[A] = new AbstractIterator[A] {
+ private var cur = firstEntry
+ def hasNext = cur ne null
+ def next =
+ if (hasNext) { val res = cur.key; cur = cur.later; res }
+ else Iterator.empty.next
+ }
+
+ override def foreach[U](f: A => U) {
+ var cur = firstEntry
+ while (cur ne null) {
+ f(cur.key)
+ cur = cur.later
}
+ }
- override def clear() {
- ordered.clear()
- clearTable()
+ protected override def foreachEntry[U](f: Entry => U) {
+ var cur = firstEntry
+ while (cur ne null) {
+ f(cur)
+ cur = cur.later
+ }
}
- override def iterator: Iterator[A] = ordered.iterator
+ protected def createNewEntry[B](key: A, dummy: B): Entry = {
+ val e = new Entry(key)
+ if (firstEntry eq null) firstEntry = e
+ else { lastEntry.later = e; e.earlier = lastEntry }
+ lastEntry = e
+ e
+ }
- override def foreach[U](f: A => U) = ordered foreach f
+ override def clear() {
+ clearTable()
+ firstEntry = null
+ }
- private def writeObject(s: java.io.ObjectOutputStream) {
- serializeTo(s)
+ private def writeObject(out: java.io.ObjectOutputStream) {
+ serializeTo(out, { e => out.writeObject(e.key) })
}
private def readObject(in: java.io.ObjectInputStream) {
- ordered = new ListBuffer[A]
- init(in, ordered += _)
+ firstEntry = null
+ lastEntry = null
+ init(in, createNewEntry(in.readObject().asInstanceOf[A], null))
}
}
@@ -93,5 +130,13 @@ class LinkedHashSet[A] extends AbstractSet[A]
object LinkedHashSet extends MutableSetFactory[LinkedHashSet] {
implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, LinkedHashSet[A]] = setCanBuildFrom[A]
override def empty[A]: LinkedHashSet[A] = new LinkedHashSet[A]
+
+ /** Class for the linked hash set entry, used internally.
+ * @since 2.10
+ */
+ private[scala] final class Entry[A](val key: A) extends HashEntry[A, Entry[A]] with Serializable {
+ var earlier: Entry[A] = null
+ var later: Entry[A] = null
+ }
}
diff --git a/src/library/scala/collection/mutable/OpenHashMap.scala b/src/library/scala/collection/mutable/OpenHashMap.scala
index c33a7a906e..11055f8986 100644
--- a/src/library/scala/collection/mutable/OpenHashMap.scala
+++ b/src/library/scala/collection/mutable/OpenHashMap.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package mutable
/**
diff --git a/src/library/scala/collection/mutable/PriorityQueue.scala b/src/library/scala/collection/mutable/PriorityQueue.scala
index e37cbdc712..abd8c1cdff 100644
--- a/src/library/scala/collection/mutable/PriorityQueue.scala
+++ b/src/library/scala/collection/mutable/PriorityQueue.scala
@@ -166,7 +166,7 @@ class PriorityQueue[A](implicit val ord: Ordering[A])
* Note: The order of elements returned is undefined.
* If you want to traverse the elements in priority queue
* order, use `clone().dequeueAll.iterator`.
- *
+ *
* @return an iterator over all the elements.
*/
override def iterator: Iterator[A] = new AbstractIterator[A] {
@@ -193,7 +193,7 @@ class PriorityQueue[A](implicit val ord: Ordering[A])
* @return A reversed priority queue.
*/
def reverse = {
- val revq = new PriorityQueue[A]()(new math.Ordering[A] {
+ val revq = new PriorityQueue[A]()(new scala.math.Ordering[A] {
def compare(x: A, y: A) = ord.compare(y, x)
})
for (i <- 1 until resarr.length) revq += resarr(i)
@@ -204,7 +204,7 @@ class PriorityQueue[A](implicit val ord: Ordering[A])
* than that returned by the method `iterator`.
*
* Note: The order of elements returned is undefined.
- *
+ *
* @return an iterator over all elements sorted in descending order.
*/
def reverseIterator: Iterator[A] = new AbstractIterator[A] {
@@ -236,11 +236,11 @@ class PriorityQueue[A](implicit val ord: Ordering[A])
* @return the string representation of this queue.
*/
override def toString() = toList.mkString("PriorityQueue(", ", ", ")")
-
+
/** Converts this $coll to a list.
*
* Note: the order of elements is undefined.
- *
+ *
* @return a list containing all elements of this $coll.
*/
override def toList = this.iterator.toList
diff --git a/src/library/scala/collection/mutable/ResizableArray.scala b/src/library/scala/collection/mutable/ResizableArray.scala
index 54bf93252f..d29ee67580 100644
--- a/src/library/scala/collection/mutable/ResizableArray.scala
+++ b/src/library/scala/collection/mutable/ResizableArray.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package mutable
import generic._
diff --git a/src/library/scala/collection/parallel/ParIterableLike.scala b/src/library/scala/collection/parallel/ParIterableLike.scala
index 4e0818ff45..b9a9e35574 100644
--- a/src/library/scala/collection/parallel/ParIterableLike.scala
+++ b/src/library/scala/collection/parallel/ParIterableLike.scala
@@ -1368,7 +1368,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
val until = from + len
val blocksize = scanBlockSize
while (i < until) {
- trees += scanBlock(i, math.min(blocksize, pit.remaining))
+ trees += scanBlock(i, scala.math.min(blocksize, pit.remaining))
i += blocksize
}
diff --git a/src/library/scala/collection/parallel/ParMap.scala b/src/library/scala/collection/parallel/ParMap.scala
index 58197ab2c6..2bc5e783e6 100644
--- a/src/library/scala/collection/parallel/ParMap.scala
+++ b/src/library/scala/collection/parallel/ParMap.scala
@@ -6,13 +6,8 @@
** |/ **
\* */
-
package scala.collection.parallel
-
-
-
-
import scala.collection.Map
import scala.collection.GenMap
import scala.collection.mutable.Builder
@@ -21,10 +16,6 @@ import scala.collection.generic.GenericParMapTemplate
import scala.collection.generic.GenericParMapCompanion
import scala.collection.generic.CanCombineFrom
-
-
-
-
/** A template trait for parallel maps.
*
* $sideeffects
@@ -75,31 +66,3 @@ object ParMap extends ParMapFactory[ParMap] {
override def default(key: A): B = d(key)
}
}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/RemainsIterator.scala b/src/library/scala/collection/parallel/RemainsIterator.scala
index a67a4d8eb7..9bf287cc39 100644
--- a/src/library/scala/collection/parallel/RemainsIterator.scala
+++ b/src/library/scala/collection/parallel/RemainsIterator.scala
@@ -190,7 +190,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[
def slice2combiner[U >: T, This](from: Int, until: Int, cb: Combiner[U, This]): Combiner[U, This] = {
drop(from)
- var left = math.max(until - from, 0)
+ var left = scala.math.max(until - from, 0)
cb.sizeHint(left)
while (left > 0) {
cb += next
diff --git a/src/library/scala/collection/parallel/Tasks.scala b/src/library/scala/collection/parallel/Tasks.scala
index 67552e1c89..2556cd3f68 100644
--- a/src/library/scala/collection/parallel/Tasks.scala
+++ b/src/library/scala/collection/parallel/Tasks.scala
@@ -98,7 +98,7 @@ trait Task[R, +Tp] {
*/
trait Tasks {
- private[parallel] val debugMessages = collection.mutable.ArrayBuffer[String]()
+ private[parallel] val debugMessages = scala.collection.mutable.ArrayBuffer[String]()
private[parallel] def debuglog(s: String) = synchronized {
debugMessages += s
diff --git a/src/library/scala/collection/parallel/immutable/ParHashMap.scala b/src/library/scala/collection/parallel/immutable/ParHashMap.scala
index 7f5255f5a3..187e4aaf92 100644
--- a/src/library/scala/collection/parallel/immutable/ParHashMap.scala
+++ b/src/library/scala/collection/parallel/immutable/ParHashMap.scala
@@ -264,7 +264,7 @@ extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], (K, V
val fp = howmany / 2
List(new CreateTrie(bucks, root, offset, fp), new CreateTrie(bucks, root, offset + fp, howmany - fp))
}
- def shouldSplitFurther = howmany > collection.parallel.thresholdFromSize(root.length, combinerTaskSupport.parallelismLevel)
+ def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(root.length, combinerTaskSupport.parallelismLevel)
}
class CreateGroupedTrie[Repr](cbf: () => Combiner[V, Repr], bucks: Array[Unrolled[(K, V)]], root: Array[HashMap[K, AnyRef]], offset: Int, howmany: Int)
@@ -328,7 +328,7 @@ extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], (K, V
val fp = howmany / 2
List(new CreateGroupedTrie(cbf, bucks, root, offset, fp), new CreateGroupedTrie(cbf, bucks, root, offset + fp, howmany - fp))
}
- def shouldSplitFurther = howmany > collection.parallel.thresholdFromSize(root.length, combinerTaskSupport.parallelismLevel)
+ def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(root.length, combinerTaskSupport.parallelismLevel)
}
}
diff --git a/src/library/scala/collection/parallel/immutable/ParHashSet.scala b/src/library/scala/collection/parallel/immutable/ParHashSet.scala
index 42d00623ab..85e2138c56 100644
--- a/src/library/scala/collection/parallel/immutable/ParHashSet.scala
+++ b/src/library/scala/collection/parallel/immutable/ParHashSet.scala
@@ -209,7 +209,7 @@ extends scala.collection.parallel.BucketCombiner[T, ParHashSet[T], Any, HashSetC
val fp = howmany / 2
List(new CreateTrie(bucks, root, offset, fp), new CreateTrie(bucks, root, offset + fp, howmany - fp))
}
- def shouldSplitFurther = howmany > collection.parallel.thresholdFromSize(root.length, combinerTaskSupport.parallelismLevel)
+ def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(root.length, combinerTaskSupport.parallelismLevel)
}
}
diff --git a/src/library/scala/collection/parallel/immutable/ParIterable.scala b/src/library/scala/collection/parallel/immutable/ParIterable.scala
index 4b0773ce7b..5854844a8f 100644
--- a/src/library/scala/collection/parallel/immutable/ParIterable.scala
+++ b/src/library/scala/collection/parallel/immutable/ParIterable.scala
@@ -30,10 +30,10 @@ import scala.collection.GenIterable
* @since 2.9
*/
trait ParIterable[+T]
-extends collection/*.immutable*/.GenIterable[T]
+extends scala.collection/*.immutable*/.GenIterable[T]
with scala.collection.parallel.ParIterable[T]
with GenericParTemplate[T, ParIterable]
- with ParIterableLike[T, ParIterable[T], collection.immutable.Iterable[T]]
+ with ParIterableLike[T, ParIterable[T], scala.collection.immutable.Iterable[T]]
with Immutable
{
override def companion: GenericCompanion[ParIterable] with GenericParCompanion[ParIterable] = ParIterable
diff --git a/src/library/scala/collection/parallel/immutable/ParMap.scala b/src/library/scala/collection/parallel/immutable/ParMap.scala
index 74b2672e67..585e6bf541 100644
--- a/src/library/scala/collection/parallel/immutable/ParMap.scala
+++ b/src/library/scala/collection/parallel/immutable/ParMap.scala
@@ -28,11 +28,11 @@ import scala.collection.GenMapLike
* @since 2.9
*/
trait ParMap[K, +V]
-extends collection/*.immutable*/.GenMap[K, V]
+extends scala.collection/*.immutable*/.GenMap[K, V]
with GenericParMapTemplate[K, V, ParMap]
with parallel.ParMap[K, V]
with ParIterable[(K, V)]
- with ParMapLike[K, V, ParMap[K, V], collection.immutable.Map[K, V]]
+ with ParMapLike[K, V, ParMap[K, V], scala.collection.immutable.Map[K, V]]
{
self =>
diff --git a/src/library/scala/collection/parallel/immutable/ParSeq.scala b/src/library/scala/collection/parallel/immutable/ParSeq.scala
index 300efe9a58..265121286d 100644
--- a/src/library/scala/collection/parallel/immutable/ParSeq.scala
+++ b/src/library/scala/collection/parallel/immutable/ParSeq.scala
@@ -28,11 +28,11 @@ import scala.collection.GenSeq
* @define coll mutable parallel sequence
*/
trait ParSeq[+T]
-extends collection/*.immutable*/.GenSeq[T]
+extends scala.collection/*.immutable*/.GenSeq[T]
with scala.collection.parallel.ParSeq[T]
with ParIterable[T]
with GenericParTemplate[T, ParSeq]
- with ParSeqLike[T, ParSeq[T], collection.immutable.Seq[T]]
+ with ParSeqLike[T, ParSeq[T], scala.collection.immutable.Seq[T]]
{
override def companion: GenericCompanion[ParSeq] with GenericParCompanion[ParSeq] = ParSeq
override def toSeq: ParSeq[T] = this
diff --git a/src/library/scala/collection/parallel/immutable/ParSet.scala b/src/library/scala/collection/parallel/immutable/ParSet.scala
index 40429280ac..c8da509ef5 100644
--- a/src/library/scala/collection/parallel/immutable/ParSet.scala
+++ b/src/library/scala/collection/parallel/immutable/ParSet.scala
@@ -20,11 +20,11 @@ import scala.collection.parallel.Combiner
* @define coll mutable parallel set
*/
trait ParSet[T]
-extends collection/*.immutable*/.GenSet[T]
+extends scala.collection/*.immutable*/.GenSet[T]
with GenericParTemplate[T, ParSet]
with parallel.ParSet[T]
with ParIterable[T]
- with ParSetLike[T, ParSet[T], collection.immutable.Set[T]]
+ with ParSetLike[T, ParSet[T], scala.collection.immutable.Set[T]]
{
self =>
override def empty: ParSet[T] = ParHashSet[T]()
diff --git a/src/library/scala/collection/parallel/mutable/ParArray.scala b/src/library/scala/collection/parallel/mutable/ParArray.scala
index 6889d8b472..56cc06f99e 100644
--- a/src/library/scala/collection/parallel/mutable/ParArray.scala
+++ b/src/library/scala/collection/parallel/mutable/ParArray.scala
@@ -7,7 +7,8 @@
\* */
-package scala.collection.parallel.mutable
+package scala
+package collection.parallel.mutable
@@ -665,7 +666,7 @@ self =>
val fp = howmany / 2
List(new Map(f, targetarr, offset, fp), new Map(f, targetarr, offset + fp, howmany - fp))
}
- def shouldSplitFurther = howmany > collection.parallel.thresholdFromSize(length, tasksupport.parallelismLevel)
+ def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(length, tasksupport.parallelismLevel)
}
/* serialization */
diff --git a/src/library/scala/collection/parallel/mutable/ParHashMap.scala b/src/library/scala/collection/parallel/mutable/ParHashMap.scala
index 33a39e6038..fad7ddad59 100644
--- a/src/library/scala/collection/parallel/mutable/ParHashMap.scala
+++ b/src/library/scala/collection/parallel/mutable/ParHashMap.scala
@@ -40,14 +40,14 @@ import scala.collection.parallel.Task
class ParHashMap[K, V] private[collection] (contents: HashTable.Contents[K, DefaultEntry[K, V]])
extends ParMap[K, V]
with GenericParMapTemplate[K, V, ParHashMap]
- with ParMapLike[K, V, ParHashMap[K, V], collection.mutable.HashMap[K, V]]
+ with ParMapLike[K, V, ParHashMap[K, V], scala.collection.mutable.HashMap[K, V]]
with ParHashTable[K, DefaultEntry[K, V]]
with Serializable
{
self =>
initWithContents(contents)
- type Entry = collection.mutable.DefaultEntry[K, V]
+ type Entry = scala.collection.mutable.DefaultEntry[K, V]
def this() = this(null)
@@ -57,7 +57,7 @@ self =>
protected[this] override def newCombiner = ParHashMapCombiner[K, V]
- override def seq = new collection.mutable.HashMap[K, V](hashTableContents)
+ override def seq = new scala.collection.mutable.HashMap[K, V](hashTableContents)
def splitter = new ParHashMapIterator(1, table.length, size, table(0).asInstanceOf[DefaultEntry[K, V]])
@@ -67,13 +67,13 @@ self =>
def get(key: K): Option[V] = {
val e = findEntry(key)
- if (e == null) None
+ if (e eq null) None
else Some(e.value)
}
def put(key: K, value: V): Option[V] = {
- val e = findEntry(key)
- if (e == null) { addEntry(new Entry(key, value)); None }
+ val e = findOrAddEntry(key, value)
+ if (e eq null) None
else { val v = e.value; e.value = value; Some(v) }
}
@@ -86,9 +86,8 @@ self =>
}
def += (kv: (K, V)): this.type = {
- val e = findEntry(kv._1)
- if (e == null) addEntry(new Entry(kv._1, kv._2))
- else e.value = kv._2
+ val e = findOrAddEntry(kv._1, kv._2)
+ if (e ne null) e.value = kv._2
this
}
@@ -103,12 +102,19 @@ self =>
new ParHashMapIterator(idxFrom, idxUntil, totalSz, es)
}
+ protected def createNewEntry[V1](key: K, value: V1): Entry = {
+ new Entry(key, value.asInstanceOf[V])
+ }
+
private def writeObject(out: java.io.ObjectOutputStream) {
- serializeTo(out, _.value)
+ serializeTo(out, { entry =>
+ out.writeObject(entry.key)
+ out.writeObject(entry.value)
+ })
}
private def readObject(in: java.io.ObjectInputStream) {
- init[V](in, new Entry(_, _))
+ init(in, createNewEntry(in.readObject().asInstanceOf[K], in.readObject()))
}
private[parallel] override def brokenInvariants = {
@@ -190,7 +196,9 @@ extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], Defau
// construct a normal table and fill it sequentially
// TODO parallelize by keeping separate sizemaps and merging them
object table extends HashTable[K, DefaultEntry[K, V]] {
- def insertEntry(e: DefaultEntry[K, V]) = if (super.findEntry(e.key) eq null) super.addEntry(e)
+ type Entry = DefaultEntry[K, V]
+ def insertEntry(e: Entry) { super.findOrAddEntry(e.key, e) }
+ def createNewEntry[E](key: K, entry: E): Entry = entry.asInstanceOf[Entry]
sizeMapInit(table.length)
}
var i = 0
@@ -251,6 +259,7 @@ extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], Defau
assert(h >= block * blocksize && h < (block + 1) * blocksize)
}
}
+ protected def createNewEntry[X](key: K, x: X) = ???
}
/* tasks */
@@ -302,7 +311,7 @@ extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], Defau
override def merge(that: FillBlocks) {
this.result += that.result
}
- def shouldSplitFurther = howmany > collection.parallel.thresholdFromSize(ParHashMapCombiner.numblocks, combinerTaskSupport.parallelismLevel)
+ def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(ParHashMapCombiner.numblocks, combinerTaskSupport.parallelismLevel)
}
}
diff --git a/src/library/scala/collection/parallel/mutable/ParHashSet.scala b/src/library/scala/collection/parallel/mutable/ParHashSet.scala
index 870cae75de..aef9f6856b 100644
--- a/src/library/scala/collection/parallel/mutable/ParHashSet.scala
+++ b/src/library/scala/collection/parallel/mutable/ParHashSet.scala
@@ -36,7 +36,7 @@ import scala.collection.parallel.Task
class ParHashSet[T] private[collection] (contents: FlatHashTable.Contents[T])
extends ParSet[T]
with GenericParTemplate[T, ParHashSet]
- with ParSetLike[T, ParHashSet[T], collection.mutable.HashSet[T]]
+ with ParSetLike[T, ParHashSet[T], scala.collection.mutable.HashSet[T]]
with ParFlatHashTable[T]
with Serializable
{
@@ -57,7 +57,7 @@ extends ParSet[T]
def clear() = clearTable()
- override def seq = new collection.mutable.HashSet(hashTableContents)
+ override def seq = new scala.collection.mutable.HashSet(hashTableContents)
def +=(elem: T) = {
addEntry(elem)
@@ -158,12 +158,12 @@ with scala.collection.mutable.FlatHashTable.HashUtils[T] {
val tbl = new FlatHashTable[T] {
sizeMapInit(table.length)
seedvalue = ParHashSetCombiner.this.seedvalue
+ for {
+ buffer <- buckets;
+ if buffer ne null;
+ elem <- buffer
+ } addEntry(elem.asInstanceOf[T])
}
- for {
- buffer <- buckets;
- if buffer ne null;
- elem <- buffer
- } tbl.addEntry(elem.asInstanceOf[T])
tbl.hashTableContents
}
@@ -310,7 +310,7 @@ with scala.collection.mutable.FlatHashTable.HashUtils[T] {
// the total number of successfully inserted elements is adjusted accordingly
result = (this.result._1 + that.result._1 + inserted, remainingLeftovers concat that.result._2)
}
- def shouldSplitFurther = howmany > collection.parallel.thresholdFromSize(ParHashMapCombiner.numblocks, combinerTaskSupport.parallelismLevel)
+ def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(ParHashMapCombiner.numblocks, combinerTaskSupport.parallelismLevel)
}
}
diff --git a/src/library/scala/collection/parallel/mutable/ParHashTable.scala b/src/library/scala/collection/parallel/mutable/ParHashTable.scala
index 7cf464c287..bb9a7b7823 100644
--- a/src/library/scala/collection/parallel/mutable/ParHashTable.scala
+++ b/src/library/scala/collection/parallel/mutable/ParHashTable.scala
@@ -104,7 +104,7 @@ trait ParHashTable[K, Entry >: Null <: HashEntry[K, Entry]] extends scala.collec
// otherwise, this is the last entry in the table - all what remains is the chain
// so split the rest of the chain
val arr = convertToArrayBuffer(es)
- val arrpit = new collection.parallel.BufferSplitter[T](arr, 0, arr.length, signalDelegate)
+ val arrpit = new scala.collection.parallel.BufferSplitter[T](arr, 0, arr.length, signalDelegate)
arrpit.split
}
} else Seq(this.asInstanceOf[IterRepr])
diff --git a/src/library/scala/collection/parallel/mutable/ParIterable.scala b/src/library/scala/collection/parallel/mutable/ParIterable.scala
index a2847c3beb..9281e84c03 100644
--- a/src/library/scala/collection/parallel/mutable/ParIterable.scala
+++ b/src/library/scala/collection/parallel/mutable/ParIterable.scala
@@ -26,7 +26,7 @@ import scala.collection.GenIterable
* @author Aleksandar Prokopec
* @since 2.9
*/
-trait ParIterable[T] extends collection/*.mutable*/.GenIterable[T]
+trait ParIterable[T] extends scala.collection/*.mutable*/.GenIterable[T]
with scala.collection.parallel.ParIterable[T]
with GenericParTemplate[T, ParIterable]
with ParIterableLike[T, ParIterable[T], Iterable[T]]
diff --git a/src/library/scala/collection/parallel/mutable/ParMap.scala b/src/library/scala/collection/parallel/mutable/ParMap.scala
index 9ad14f15f8..34b3d465d2 100644
--- a/src/library/scala/collection/parallel/mutable/ParMap.scala
+++ b/src/library/scala/collection/parallel/mutable/ParMap.scala
@@ -28,11 +28,11 @@ import scala.collection.parallel.Combiner
* @since 2.9
*/
trait ParMap[K, V]
-extends collection/*.mutable*/.GenMap[K, V]
+extends scala.collection/*.mutable*/.GenMap[K, V]
with scala.collection.parallel.ParMap[K, V]
with /* mutable */ ParIterable[(K, V)]
with GenericParMapTemplate[K, V, ParMap]
- with /* mutable */ ParMapLike[K, V, ParMap[K, V], collection.mutable.Map[K, V]]
+ with /* mutable */ ParMapLike[K, V, ParMap[K, V], scala.collection.mutable.Map[K, V]]
{
protected[this] override def newCombiner: Combiner[(K, V), ParMap[K, V]] = ParMap.newCombiner[K, V]
diff --git a/src/library/scala/collection/parallel/mutable/ParSeq.scala b/src/library/scala/collection/parallel/mutable/ParSeq.scala
index 15f8d1d0b5..7322d5236f 100644
--- a/src/library/scala/collection/parallel/mutable/ParSeq.scala
+++ b/src/library/scala/collection/parallel/mutable/ParSeq.scala
@@ -29,11 +29,11 @@ import scala.collection.GenSeq
* @define Coll `mutable.ParSeq`
* @define coll mutable parallel sequence
*/
-trait ParSeq[T] extends collection/*.mutable*/.GenSeq[T] // was: scala.collection.mutable.Seq[T]
+trait ParSeq[T] extends scala.collection/*.mutable*/.GenSeq[T] // was: scala.collection.mutable.Seq[T]
with ParIterable[T]
with scala.collection.parallel.ParSeq[T]
with GenericParTemplate[T, ParSeq]
- with ParSeqLike[T, ParSeq[T], collection.mutable.Seq[T]] {
+ with ParSeqLike[T, ParSeq[T], scala.collection.mutable.Seq[T]] {
self =>
override def companion: GenericCompanion[ParSeq] with GenericParCompanion[ParSeq] = ParSeq
//protected[this] override def newBuilder = ParSeq.newBuilder[T]
diff --git a/src/library/scala/collection/parallel/mutable/ParSet.scala b/src/library/scala/collection/parallel/mutable/ParSet.scala
index 689ce3436f..540ecb8022 100644
--- a/src/library/scala/collection/parallel/mutable/ParSet.scala
+++ b/src/library/scala/collection/parallel/mutable/ParSet.scala
@@ -27,11 +27,11 @@ import scala.collection.GenSet
* @author Aleksandar Prokopec
*/
trait ParSet[T]
-extends collection/*.mutable*/.GenSet[T]
+extends scala.collection/*.mutable*/.GenSet[T]
with ParIterable[T]
with scala.collection.parallel.ParSet[T]
with GenericParTemplate[T, ParSet]
- with ParSetLike[T, ParSet[T], collection.mutable.Set[T]]
+ with ParSetLike[T, ParSet[T], scala.collection.mutable.Set[T]]
{
self =>
override def companion: GenericCompanion[ParSet] with GenericParCompanion[ParSet] = ParSet
diff --git a/src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala b/src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala
index 01eb17024e..68f37137f8 100644
--- a/src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala
+++ b/src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala
@@ -81,7 +81,7 @@ trait ResizableParArrayCombiner[T] extends LazyCombiner[T, ParArray[T], ExposedA
val fp = howmany / 2
List(new CopyChainToArray(array, offset, fp), new CopyChainToArray(array, offset + fp, howmany - fp))
}
- def shouldSplitFurther = howmany > collection.parallel.thresholdFromSize(size, combinerTaskSupport.parallelismLevel)
+ def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(size, combinerTaskSupport.parallelismLevel)
}
}
diff --git a/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala b/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala
index 9648791502..5600d0f68c 100644
--- a/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala
+++ b/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala
@@ -8,10 +8,6 @@
package scala.collection.parallel.mutable
-
-
-
-
import scala.collection.generic.Sizing
import scala.collection.mutable.ArraySeq
import scala.collection.mutable.ArrayBuffer
@@ -23,16 +19,12 @@ import scala.collection.parallel.Combiner
import scala.collection.parallel.Task
import scala.reflect.ClassTag
-
-
-
private[mutable] class DoublingUnrolledBuffer[T](implicit t: ClassTag[T]) extends UnrolledBuffer[T]()(t) {
override def calcNextLength(sz: Int) = if (sz < 10000) sz * 2 else sz
protected override def newUnrolled = new Unrolled[T](0, new Array[T](4), null, this)
}
-
/** An array combiner that uses doubling unrolled buffers to store elements. */
trait UnrolledParArrayCombiner[T]
extends Combiner[T, ParArray[T]] {
@@ -85,7 +77,7 @@ extends Combiner[T, ParArray[T]] {
var pos = startpos
var arroffset = offset
while (totalleft > 0) {
- val lefthere = math.min(totalleft, curr.size - pos)
+ val lefthere = scala.math.min(totalleft, curr.size - pos)
Array.copy(curr.array, pos, array, arroffset, lefthere)
// println("from: " + arroffset + " elems " + lefthere + " - " + pos + ", " + curr + " -> " + array.toList + " by " + this + " !! " + buff.headPtr)
totalleft -= lefthere
@@ -107,13 +99,11 @@ extends Combiner[T, ParArray[T]] {
val fp = howmany / 2
List(new CopyUnrolledToArray(array, offset, fp), new CopyUnrolledToArray(array, offset + fp, howmany - fp))
}
- def shouldSplitFurther = howmany > collection.parallel.thresholdFromSize(size, combinerTaskSupport.parallelismLevel)
+ def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(size, combinerTaskSupport.parallelismLevel)
override def toString = "CopyUnrolledToArray(" + offset + ", " + howmany + ")"
}
}
-
-
object UnrolledParArrayCombiner {
def apply[T](): UnrolledParArrayCombiner[T] = new UnrolledParArrayCombiner[T] {} // was: with EnvironmentPassingCombiner[T, ParArray[T]]
}
diff --git a/src/library/scala/collection/parallel/package.scala b/src/library/scala/collection/parallel/package.scala
index 30b4c0c914..a95090c15b 100644
--- a/src/library/scala/collection/parallel/package.scala
+++ b/src/library/scala/collection/parallel/package.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
import scala.collection.generic.CanBuildFrom
import scala.collection.generic.CanCombineFrom
@@ -41,8 +42,8 @@ package object parallel {
private[parallel] def outofbounds(idx: Int) = throw new IndexOutOfBoundsException(idx.toString)
private[parallel] def getTaskSupport: TaskSupport =
- if (util.Properties.isJavaAtLeast("1.6")) {
- val vendor = util.Properties.javaVmVendor
+ if (scala.util.Properties.isJavaAtLeast("1.6")) {
+ val vendor = scala.util.Properties.javaVmVendor
if ((vendor contains "Oracle") || (vendor contains "Sun") || (vendor contains "Apple")) new ForkJoinTaskSupport
else new ThreadPoolTaskSupport
} else new ThreadPoolTaskSupport
diff --git a/src/library/scala/compat/Platform.scala b/src/library/scala/compat/Platform.scala
index f18ce12e6c..77c12a8e58 100644
--- a/src/library/scala/compat/Platform.scala
+++ b/src/library/scala/compat/Platform.scala
@@ -109,7 +109,7 @@ object Platform {
* `System.getProperty("line.separator")`
* with a default value of "\n".
*/
- val EOL = util.Properties.lineSeparator
+ val EOL = scala.util.Properties.lineSeparator
/** The current time in milliseconds. The time is counted since 1 January 1970
* UTC.
diff --git a/src/library/scala/io/Codec.scala b/src/library/scala/io/Codec.scala
index fa74be0f98..6522cd0cd8 100644
--- a/src/library/scala/io/Codec.scala
+++ b/src/library/scala/io/Codec.scala
@@ -91,7 +91,7 @@ object Codec extends LowPriorityCodecImplicits {
* as an accident, with any anomalies considered "not a bug".
*/
def defaultCharsetCodec = apply(Charset.defaultCharset)
- def fileEncodingCodec = apply(util.Properties.encodingString)
+ def fileEncodingCodec = apply(scala.util.Properties.encodingString)
def default = defaultCharsetCodec
def apply(encoding: String): Codec = new Codec(Charset forName encoding)
diff --git a/src/library/scala/io/Position.scala b/src/library/scala/io/Position.scala
index 0d0d0d7648..dae478f31a 100644
--- a/src/library/scala/io/Position.scala
+++ b/src/library/scala/io/Position.scala
@@ -54,7 +54,7 @@ abstract class Position {
if (line >= LINE_MASK)
LINE_MASK << COLUMN_BITS
else
- (line << COLUMN_BITS) | math.min(COLUMN_MASK, column)
+ (line << COLUMN_BITS) | scala.math.min(COLUMN_MASK, column)
}
/** Returns the line number of the encoded position. */
diff --git a/src/library/scala/language.scala b/src/library/scala/language.scala
index dfe27f8857..297f344f65 100644
--- a/src/library/scala/language.scala
+++ b/src/library/scala/language.scala
@@ -29,7 +29,7 @@ object language {
implicit lazy val postfixOps: postfixOps = languageFeature.postfixOps
/** Only where enabled, accesses to members of structural types that need
- * reflection are supported. Reminder: A structural type is a type of the form
+ * reflection are supported. Reminder: A structural type is a type of the form
* `Parents { Decls }` where `Decls` contains declarations of new members that do
* not override any member in `Parents`. To access one of these members, a
* reflective call is needed.
@@ -52,7 +52,7 @@ object language {
* implicit def stringToInt(s: String): Int = s.length
* implicit val conv = (s: String) => s.length
* implicit def listToX(xs: List[T])(implicit f: T => X): X = …
- *
+ *
* implicit values of other types are not affected, and neither are implicit
* classes.
*
@@ -95,7 +95,7 @@ object language {
*
* _Why keep the feature?_ Existential types are needed to make sense of Java’s wildcard
* types and raw types and the erased types of run-time values.
- *
+ *
* Why control it? Having complex existential types in a code base usually makes
* application code very brittle, with a tendency to produce type errors with
* obscure error messages. Therefore, going overboard with existential types
@@ -110,7 +110,7 @@ object language {
/** Where enabled, macro definitions are allowed. Macro implementations and
* macro applications are unaffected; they can be used anywhere.
- *
+ *
* _Why introduce the feature?_ Macros promise to make the language more regular,
* replacing ad-hoc language constructs with a general powerful abstraction
* capability that can express them. Macros are also a more disciplined and
diff --git a/src/library/scala/math/Ordering.scala b/src/library/scala/math/Ordering.scala
index 3ac255b57f..719f2e12a7 100644
--- a/src/library/scala/math/Ordering.scala
+++ b/src/library/scala/math/Ordering.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.math
+package scala
+package math
import java.util.Comparator
import scala.language.{implicitConversions, higherKinds}
diff --git a/src/library/scala/reflect/ClassTag.scala b/src/library/scala/reflect/ClassTag.scala
index 6757a72053..1a574836c0 100644
--- a/src/library/scala/reflect/ClassTag.scala
+++ b/src/library/scala/reflect/ClassTag.scala
@@ -1,24 +1,25 @@
-package scala.reflect
+package scala
+package reflect
import java.lang.{ Class => jClass }
import scala.language.{implicitConversions, existentials}
import scala.runtime.ScalaRunTime.{ arrayClass, arrayElementClass }
-/** A `ClassTag[T]` wraps a runtime class, which can be accessed via the `runtimeClass` method.
+/** A `ClassTag[T]` wraps a runtime class (the erasure) and can create array instances.
*
- * This is useful in itself, but also enables very important use case.
- * Having this knowledge ClassTag can instantiate `Arrays`
- * in those cases where the element type is unknown at compile time.
+ * If an implicit value of type ClassTag[T] is requested, the compiler will create one.
+ * The runtime class (i.e. the erasure, a java.lang.Class on the JVM) of T can be accessed
+ * via the `runtimeClass` field. References to type parameters or abstract type members are
+ * replaced by the concrete types if ClassTags are available for them.
+ *
+ * Besides accessing the erasure, a ClassTag knows how to instantiate single- and multi-
+ * dimensional `Arrays` where the element type is unknown at compile time.
*
- * If an implicit value of type u.ClassTag[T] is required, the compiler will make one up on demand.
- * The implicitly created value contains in its `runtimeClass` field the runtime class that is the result of erasing type T.
- * In that value, any occurrences of type parameters or abstract types U which come themselves with a ClassTag
- * are represented by the type referenced by that tag.
- * If the type T contains unresolved references to type parameters or abstract types, a static error results.
+ * [[scala.reflect.ClassTag]] corresponds to a previous concept of [[scala.reflect.ClassManifest]].
*
* @see [[scala.reflect.base.TypeTags]]
*/
-@annotation.implicitNotFound(msg = "No ClassTag available for ${T}")
+@scala.annotation.implicitNotFound(msg = "No ClassTag available for ${T}")
trait ClassTag[T] extends ClassManifestDeprecatedApis[T] with Equals with Serializable {
// please, don't add any APIs here, like it was with `newWrappedArray` and `newArrayBuilder`
// class tags, and all tags in general, should be as minimalistic as possible
@@ -75,7 +76,7 @@ trait ClassTag[T] extends ClassManifestDeprecatedApis[T] with Equals with Serial
if (conforms) Some(x.asInstanceOf[T]) else None
}
- /** case class accessories */
+ // case class accessories
override def canEqual(x: Any) = x.isInstanceOf[ClassTag[_]]
override def equals(x: Any) = x.isInstanceOf[ClassTag[_]] && this.runtimeClass == x.asInstanceOf[ClassTag[_]].runtimeClass
override def hashCode = scala.runtime.ScalaRunTime.hash(runtimeClass)
@@ -87,6 +88,9 @@ trait ClassTag[T] extends ClassManifestDeprecatedApis[T] with Equals with Serial
}
}
+/**
+ * Class tags corresponding to primitive types and constructor/extractor for ClassTags.
+ */
object ClassTag {
private val ObjectTYPE = classOf[java.lang.Object]
private val NothingTYPE = classOf[scala.runtime.Nothing$]
diff --git a/src/library/scala/reflect/Manifest.scala b/src/library/scala/reflect/Manifest.scala
index f2a23f4372..8b021e0444 100644
--- a/src/library/scala/reflect/Manifest.scala
+++ b/src/library/scala/reflect/Manifest.scala
@@ -38,7 +38,7 @@ import scala.collection.mutable.{ ArrayBuilder, WrappedArray }
}}}
*
*/
-@annotation.implicitNotFound(msg = "No Manifest available for ${T}.")
+@scala.annotation.implicitNotFound(msg = "No Manifest available for ${T}.")
@deprecated("Use scala.reflect.ClassTag (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead", "2.10.0")
trait Manifest[T] extends ClassManifest[T] with Equals {
override def typeArguments: List[Manifest[_]] = Nil
@@ -264,4 +264,4 @@ object ManifestFactory {
def runtimeClass = parents.head.erasure
override def toString = parents.mkString(" with ")
}
-} \ No newline at end of file
+}
diff --git a/src/library/scala/reflect/NameTransformer.scala b/src/library/scala/reflect/NameTransformer.scala
index ff56e20d52..77cbd20321 100755
--- a/src/library/scala/reflect/NameTransformer.scala
+++ b/src/library/scala/reflect/NameTransformer.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.reflect
+package scala
+package reflect
/** Provides functions to encode and decode Scala symbolic names.
* Also provides some constants.
diff --git a/src/library/scala/reflect/base/AnnotationInfos.scala b/src/library/scala/reflect/base/AnnotationInfos.scala
deleted file mode 100644
index f03644deef..0000000000
--- a/src/library/scala/reflect/base/AnnotationInfos.scala
+++ /dev/null
@@ -1,44 +0,0 @@
-package scala.reflect
-package base
-
-trait AnnotationInfos { self: Universe =>
-
- type AnnotationInfo >: Null <: AnyRef
- implicit val AnnotationInfoTag: ClassTag[AnnotationInfo]
- val AnnotationInfo: AnnotationInfoExtractor
-
- abstract class AnnotationInfoExtractor {
- def apply(atp: Type, args: List[Tree], assocs: List[(Name, ClassfileAnnotArg)]): AnnotationInfo
- def unapply(info: AnnotationInfo): Option[(Type, List[Tree], List[(Name, ClassfileAnnotArg)])]
- }
-
- type ClassfileAnnotArg >: Null <: AnyRef
- implicit val ClassfileAnnotArgTag: ClassTag[ClassfileAnnotArg]
-
- type LiteralAnnotArg >: Null <: AnyRef with ClassfileAnnotArg
- implicit val LiteralAnnotArgTag: ClassTag[LiteralAnnotArg]
- val LiteralAnnotArg: LiteralAnnotArgExtractor
-
- abstract class LiteralAnnotArgExtractor {
- def apply(const: Constant): LiteralAnnotArg
- def unapply(arg: LiteralAnnotArg): Option[Constant]
- }
-
- type ArrayAnnotArg >: Null <: AnyRef with ClassfileAnnotArg
- implicit val ArrayAnnotArgTag: ClassTag[ArrayAnnotArg]
- val ArrayAnnotArg: ArrayAnnotArgExtractor
-
- abstract class ArrayAnnotArgExtractor {
- def apply(args: Array[ClassfileAnnotArg]): ArrayAnnotArg
- def unapply(arg: ArrayAnnotArg): Option[Array[ClassfileAnnotArg]]
- }
-
- type NestedAnnotArg >: Null <: AnyRef with ClassfileAnnotArg
- implicit val NestedAnnotArgTag: ClassTag[NestedAnnotArg]
- val NestedAnnotArg: NestedAnnotArgExtractor
-
- abstract class NestedAnnotArgExtractor {
- def apply(annInfo: AnnotationInfo): NestedAnnotArg
- def unapply(arg: NestedAnnotArg): Option[AnnotationInfo]
- }
-} \ No newline at end of file
diff --git a/src/library/scala/reflect/base/Annotations.scala b/src/library/scala/reflect/base/Annotations.scala
new file mode 100644
index 0000000000..107443f09b
--- /dev/null
+++ b/src/library/scala/reflect/base/Annotations.scala
@@ -0,0 +1,106 @@
+package scala.reflect
+package base
+
+import scala.collection.immutable.ListMap
+
+/**
+ * Defines the type hierarchy for annotations.
+ */
+trait Annotations { self: Universe =>
+
+ /** Typed information about an annotation. It can be attached to either a symbol or an annotated type.
+ *
+ * Annotations are either ''Scala annotations'', which conform to [[scala.annotation.StaticAnnotation]]
+ * or ''Java annotations'', which conform to [[scala.annotation.ClassfileAnnotation]].
+ * Trait `ClassfileAnnotation` is automatically added to every Java annotation by the scalac classfile parser.
+ */
+ type Annotation >: Null <: AnyRef
+
+ /** A tag that preserves the identity of the `Annotation` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val AnnotationTag: ClassTag[Annotation]
+
+ /** The constructor/deconstructor for `Annotation` instances. */
+ val Annotation: AnnotationExtractor
+
+ /** An extractor class to create and pattern match with syntax `Annotation(atp, scalaArgs, javaArgs)`.
+ * Here, `atp` is the annotation type, `scalaArgs` the arguments, and `javaArgs` the annotation's key-value
+ * pairs.
+ *
+ * Annotations are pickled, i.e. written to scala symtab attribute in the classfile.
+ * Annotations are written to the classfile as Java annotations if `atp` conforms to `ClassfileAnnotation`.
+ *
+ * For Scala annotations, arguments are stored in `scalaArgs` and `javaArgs` is empty. Arguments in
+ * `scalaArgs` are represented as typed trees. Note that these trees are not transformed by any phases
+ * following the type-checker. For Java annotations, `scalaArgs` is empty and arguments are stored in
+ * `javaArgs`.
+ */
+ abstract class AnnotationExtractor {
+ def apply(tpe: Type, scalaArgs: List[Tree], javaArgs: ListMap[Name, JavaArgument]): Annotation
+ def unapply(ann: Annotation): Option[(Type, List[Tree], ListMap[Name, JavaArgument])]
+ }
+
+ /** A Java annotation argument */
+ type JavaArgument >: Null <: AnyRef
+ implicit val JavaArgumentTag: ClassTag[JavaArgument]
+
+ /** A literal argument to a Java annotation as `"Use X instead"` in `@Deprecated("Use X instead")`*/
+ type LiteralArgument >: Null <: AnyRef with JavaArgument
+
+ /** A tag that preserves the identity of the `LiteralArgument` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val LiteralArgumentTag: ClassTag[LiteralArgument]
+
+ /** The constructor/deconstructor for `LiteralArgument` instances. */
+ val LiteralArgument: LiteralArgumentExtractor
+
+ /** An extractor class to create and pattern match with syntax `LiteralArgument(value)`
+ * where `value` is the constant argument.
+ */
+ abstract class LiteralArgumentExtractor {
+ def apply(value: Constant): LiteralArgument
+ def unapply(arg: LiteralArgument): Option[Constant]
+ }
+
+ /** An array argument to a Java annotation as in `@Target(value={TYPE,FIELD,METHOD,PARAMETER})`
+ */
+ type ArrayArgument >: Null <: AnyRef with JavaArgument
+
+ /** A tag that preserves the identity of the `ArrayArgument` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val ArrayArgumentTag: ClassTag[ArrayArgument]
+
+ /** The constructor/deconstructor for `ArrayArgument` instances. */
+ val ArrayArgument: ArrayArgumentExtractor
+
+ /** An extractor class to create and pattern match with syntax `ArrayArgument(args)`
+ * where `args` is the argument array.
+ */
+ abstract class ArrayArgumentExtractor {
+ def apply(args: Array[JavaArgument]): ArrayArgument
+ def unapply(arg: ArrayArgument): Option[Array[JavaArgument]]
+ }
+
+ /** A nested annotation argument to a Java annotation as `@Nested` in `@Outer(@Nested)`.
+ */
+ type NestedArgument >: Null <: AnyRef with JavaArgument
+
+ /** A tag that preserves the identity of the `NestedArgument` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val NestedArgumentTag: ClassTag[NestedArgument]
+
+ /** The constructor/deconstructor for `NestedArgument` instances. */
+ val NestedArgument: NestedArgumentExtractor
+
+ /** An extractor class to create and pattern match with syntax `NestedArgument(annotation)`
+ * where `annotation` is the nested annotation.
+ */
+ abstract class NestedArgumentExtractor {
+ def apply(annotation: Annotation): NestedArgument
+ def unapply(arg: NestedArgument): Option[Annotation]
+ }
+} \ No newline at end of file
diff --git a/src/library/scala/reflect/base/Attachments.scala b/src/library/scala/reflect/base/Attachments.scala
index 43e870fc4f..479ab9a857 100644
--- a/src/library/scala/reflect/base/Attachments.scala
+++ b/src/library/scala/reflect/base/Attachments.scala
@@ -1,34 +1,44 @@
package scala.reflect
package base
-/** Attachments is a generalisation of Position.
- * Typically it stores a Position of a tree, but this can be extended to encompass arbitrary payloads.
+/** Attachments is a generalization of Position. Typically it stores a Position of a tree, but this can be extended to
+ * encompass arbitrary payloads. Payloads are stored in type-indexed slots, which can be read with `get[T]` and written
+ * with `update[T]` and `remove[T]`.
*
- * Attachments have to carry positions, because we don't want to introduce even a single additional field in Tree
+ * Attachments always carry positions because we don't want to introduce an additional field for attachments in `Tree`
* imposing an unnecessary memory tax because of something that will not be used in most cases.
*/
abstract class Attachments { self =>
+ /** The position type of this attachment */
type Pos >: Null
- /** Gets the underlying position */
+ /** The underlying position */
def pos: Pos
- /** Creates a copy of this attachment with its position updated */
+ /** Creates a copy of this attachment with the position replaced by `newPos` */
def withPos(newPos: Pos): Attachments { type Pos = self.Pos }
- /** Gets the underlying payload */
+ /** The underlying payload with the guarantee that no two elements have the same type. */
def all: Set[Any] = Set.empty
+ private def matchesTag[T: ClassTag](datum: Any) =
+ classTag[T].runtimeClass == datum.getClass
+
+ /** An underlying payload of the given class type `T`. */
def get[T: ClassTag]: Option[T] =
- (all find (_.getClass == classTag[T].runtimeClass)).asInstanceOf[Option[T]]
+ (all filter matchesTag[T]).headOption.asInstanceOf[Option[T]]
- /** Creates a copy of this attachment with its payload updated */
- def add(attachment: Any): Attachments { type Pos = self.Pos } =
- new NonemptyAttachments(this.pos, all + attachment)
+ /** Creates a copy of this attachment with the payload slot of T added/updated with the provided value.
+ *
+ * Replaces an existing payload of the same type, if exists.
+ */
+ def update[T: ClassTag](attachment: T): Attachments { type Pos = self.Pos } =
+ new NonemptyAttachments(this.pos, remove[T].all + attachment)
+ /** Creates a copy of this attachment with the payload of the given class type `T` removed. */
def remove[T: ClassTag]: Attachments { type Pos = self.Pos } = {
- val newAll = all filterNot (_.getClass == classTag[T].runtimeClass)
+ val newAll = all filterNot matchesTag[T]
if (newAll.isEmpty) pos.asInstanceOf[Attachments { type Pos = self.Pos }]
else new NonemptyAttachments(this.pos, newAll)
}
diff --git a/src/library/scala/reflect/base/Base.scala b/src/library/scala/reflect/base/Base.scala
index 33582675bd..6ecfd384ab 100644
--- a/src/library/scala/reflect/base/Base.scala
+++ b/src/library/scala/reflect/base/Base.scala
@@ -5,7 +5,11 @@ import java.io.PrintWriter
import scala.annotation.switch
import scala.ref.WeakReference
import scala.collection.mutable
+import scala.collection.immutable.ListMap
+/**
+ * This is an internal implementation class.
+ */
class Base extends Universe { self =>
private var nextId = 0
@@ -157,7 +161,7 @@ class Base extends Universe { self =>
object ExistentialType extends ExistentialTypeExtractor
implicit val ExistentialTypeTag = ClassTag[ExistentialType](classOf[ExistentialType])
- case class AnnotatedType(annotations: List[AnnotationInfo], underlying: Type, selfsym: Symbol) extends Type { override def typeSymbol = underlying.typeSymbol }
+ case class AnnotatedType(annotations: List[Annotation], underlying: Type, selfsym: Symbol) extends Type { override def typeSymbol = underlying.typeSymbol }
object AnnotatedType extends AnnotatedTypeExtractor
implicit val AnnotatedTypeTag = ClassTag[AnnotatedType](classOf[AnnotatedType])
@@ -249,24 +253,24 @@ class Base extends Universe { self =>
object Constant extends ConstantExtractor
implicit val ConstantTag = ClassTag[Constant](classOf[Constant])
- case class AnnotationInfo(atp: Type, args: List[Tree], assocs: List[(Name, ClassfileAnnotArg)])
- object AnnotationInfo extends AnnotationInfoExtractor
- implicit val AnnotationInfoTag = ClassTag[AnnotationInfo](classOf[AnnotationInfo])
+ case class Annotation(tpe: Type, scalaArgs: List[Tree], javaArgs: ListMap[Name, JavaArgument])
+ object Annotation extends AnnotationExtractor
+ implicit val AnnotationTag = ClassTag[Annotation](classOf[Annotation])
- abstract class ClassfileAnnotArg
- implicit val ClassfileAnnotArgTag = ClassTag[ClassfileAnnotArg](classOf[ClassfileAnnotArg])
+ abstract class JavaArgument
+ implicit val JavaArgumentTag = ClassTag[JavaArgument](classOf[JavaArgument])
- case class LiteralAnnotArg(const: Constant) extends ClassfileAnnotArg
- object LiteralAnnotArg extends LiteralAnnotArgExtractor
- implicit val LiteralAnnotArgTag = ClassTag[LiteralAnnotArg](classOf[LiteralAnnotArg])
+ case class LiteralArgument(value: Constant) extends JavaArgument
+ object LiteralArgument extends LiteralArgumentExtractor
+ implicit val LiteralArgumentTag = ClassTag[LiteralArgument](classOf[LiteralArgument])
- case class ArrayAnnotArg(args: Array[ClassfileAnnotArg]) extends ClassfileAnnotArg
- object ArrayAnnotArg extends ArrayAnnotArgExtractor
- implicit val ArrayAnnotArgTag = ClassTag[ArrayAnnotArg](classOf[ArrayAnnotArg])
+ case class ArrayArgument(args: Array[JavaArgument]) extends JavaArgument
+ object ArrayArgument extends ArrayArgumentExtractor
+ implicit val ArrayArgumentTag = ClassTag[ArrayArgument](classOf[ArrayArgument])
- case class NestedAnnotArg(annInfo: AnnotationInfo) extends ClassfileAnnotArg
- object NestedAnnotArg extends NestedAnnotArgExtractor
- implicit val NestedAnnotArgTag = ClassTag[NestedAnnotArg](classOf[NestedAnnotArg])
+ case class NestedArgument(annotation: Annotation) extends JavaArgument
+ object NestedArgument extends NestedArgumentExtractor
+ implicit val NestedArgumentTag = ClassTag[NestedArgument](classOf[NestedArgument])
class Position extends Attachments {
override type Pos = Position
@@ -319,7 +323,7 @@ class Base extends Universe { self =>
def setTypeSignature[S <: Symbol](sym: S, tpe: Type): S = sym
- def setAnnotations[S <: Symbol](sym: S, annots: List[AnnotationInfo]): S = sym
+ def setAnnotations[S <: Symbol](sym: S, annots: List[Annotation]): S = sym
def flagsFromBits(bits: Long): FlagSet = bits
@@ -623,10 +627,6 @@ class Base extends Universe { self =>
extends GenericApply
object Apply extends ApplyExtractor
- case class ApplyDynamic(qual: Tree, args: List[Tree])
- extends TermTree with SymTree
- object ApplyDynamic extends ApplyDynamicExtractor
-
case class Super(qual: Tree, mix: TypeName) extends TermTree
object Super extends SuperExtractor
@@ -725,7 +725,6 @@ class Base extends Universe { self =>
implicit val GenericApplyTag = ClassTag[GenericApply](classOf[GenericApply])
implicit val TypeApplyTag = ClassTag[TypeApply](classOf[TypeApply])
implicit val ApplyTag = ClassTag[Apply](classOf[Apply])
- implicit val ApplyDynamicTag = ClassTag[ApplyDynamic](classOf[ApplyDynamic])
implicit val SuperTag = ClassTag[Super](classOf[Super])
implicit val ThisTag = ClassTag[This](classOf[This])
implicit val SelectTag = ClassTag[Select](classOf[Select])
diff --git a/src/library/scala/reflect/base/BuildUtils.scala b/src/library/scala/reflect/base/BuildUtils.scala
index c4231dd515..5982329aef 100644
--- a/src/library/scala/reflect/base/BuildUtils.scala
+++ b/src/library/scala/reflect/base/BuildUtils.scala
@@ -1,6 +1,9 @@
package scala.reflect
package base
+/**
+ * This is an internal implementation class.
+ */
trait BuildUtils { self: Universe =>
val build: BuildBase
@@ -49,7 +52,7 @@ trait BuildUtils { self: Universe =>
/** Set symbol's annotations to given annotations `annots`.
*/
- def setAnnotations[S <: Symbol](sym: S, annots: List[AnnotationInfo]): S
+ def setAnnotations[S <: Symbol](sym: S, annots: List[Annotation]): S
def flagsFromBits(bits: Long): FlagSet
diff --git a/src/library/scala/reflect/base/Constants.scala b/src/library/scala/reflect/base/Constants.scala
index ba12b02e92..240434362d 100644
--- a/src/library/scala/reflect/base/Constants.scala
+++ b/src/library/scala/reflect/base/Constants.scala
@@ -6,13 +6,29 @@
package scala.reflect
package base
+/**
+ * Defines the type hierachy for compile-time constants.
+ *
+ * @see [[scala.reflect]] for a description on how the class hierarchy is encoded here.
+ */
trait Constants {
self: Universe =>
+ /** The type of compile-time constants.
+ */
type Constant >: Null <: AnyRef
+
+ /** A tag that preserves the identity of the `Constant` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
implicit val ConstantTag: ClassTag[Constant]
+
+ /** The constructor/deconstructor for `Constant` instances. */
val Constant: ConstantExtractor
+ /** An extractor class to create and pattern match with syntax `Constant(value)`
+ * where `value` is the Scala value of the constant.
+ */
abstract class ConstantExtractor {
def apply(value: Any): Constant
def unapply(arg: Constant): Option[Any]
diff --git a/src/library/scala/reflect/base/Exprs.scala b/src/library/scala/reflect/base/Exprs.scala
index 45598c03e2..bd15c65711 100644
--- a/src/library/scala/reflect/base/Exprs.scala
+++ b/src/library/scala/reflect/base/Exprs.scala
@@ -8,16 +8,71 @@ package base
trait Exprs { self: Universe =>
- /** An expression tree tagged with its type */
+ /** Expr wraps an expression tree and tags it with its type. */
trait Expr[+T] extends Equals with Serializable {
val mirror: Mirror
+ /**
+ * Migrates the expression into another mirror, jumping into a different universe if necessary.
+ *
+ * This means that all symbolic references to classes/objects/packages in the expression
+ * will be re-resolved within the new mirror (typically using that mirror's classloader).
+ */
def in[U <: Universe with Singleton](otherMirror: MirrorOf[U]): U # Expr[T]
+ /**
+ * The Scala syntax tree representing the wrapped expression.
+ */
def tree: Tree
+
+ /**
+ * Representation of the type of the wrapped expression tree as found via type tags.
+ */
def staticType: Type
+ /**
+ * Representation of the type of the wrapped expression tree as found in the tree.
+ */
def actualType: Type
+ /**
+ * A dummy method to mark expression splicing in reification.
+ *
+ * It should only be used within a `reify` call, which eliminates the `splice` call and embeds
+ * the wrapped tree into the reified surrounding expression.
+ * If used alone `splice` throws an exception when called at runtime.
+ *
+ * If you want to use an Expr in reification of some Scala code, you need to splice it in.
+ * For an expr of type `Expr[T]`, where `T` has a method `foo`, the following code
+ * {{{
+ * reify{ expr.splice.foo }
+ * }}}
+ * uses splice to turn an expr of type Expr[T] into a value of type T in the context of `reify`.
+ *
+ * It is equivalent to
+ * {{{
+ * Select( expr.tree, newTermName("foo") )
+ * }}}
+ *
+ * The following example code however does not compile
+ * {{{
+ * reify{ expr.foo }
+ * }}}
+ * because expr of type Expr[T] itself does not have a method foo.
+ */
def splice: T
+ /**
+ * A dummy value to denote cross-stage path-dependent type dependencies.
+ *
+ * For example for the following macro definition:
+ * {{{
+ * class X { type T }
+ * object Macros { def foo(x: X): x.T = macro Impls.foo_impl }
+ * }}}
+ *
+ * The corresponding macro implementation should have the following signature (note how the return type denotes path-dependency on x):
+ * {{{
+ * object Impls { def foo_impl(c: Context)(x: c.Expr[X]): c.Expr[x.value.T] = ... }
+ * }}}
+ */
val value: T
/** case class accessories */
@@ -27,6 +82,12 @@ trait Exprs { self: Universe =>
override def toString = "Expr["+staticType+"]("+tree+")"
}
+ /**
+ * Constructor/Extractor for Expr.
+ *
+ * Can be useful, when having a tree and wanting to splice it in reify call,
+ * in which case the tree first needs to be wrapped in an expr.
+ */
object Expr {
def apply[T: WeakTypeTag](mirror: MirrorOf[self.type], treec: TreeCreator): Expr[T] = new ExprImpl[T](mirror.asInstanceOf[Mirror], treec)
def unapply[T](expr: Expr[T]): Option[Tree] = Some(expr.tree)
diff --git a/src/library/scala/reflect/base/FlagSets.scala b/src/library/scala/reflect/base/FlagSets.scala
index 96cdbe894c..0ce7613eb3 100644
--- a/src/library/scala/reflect/base/FlagSets.scala
+++ b/src/library/scala/reflect/base/FlagSets.scala
@@ -3,7 +3,7 @@ package base
trait FlagSets { self: Universe =>
- /** An abstract type representing sets of flags that apply to definition trees and symbols */
+ /** An abstract type representing sets of flags (like private, final, etc.) that apply to definition trees and symbols */
type FlagSet
/** A tag that preserves the identity of the `FlagSet` abstract type from erasure.
diff --git a/src/library/scala/reflect/base/MirrorOf.scala b/src/library/scala/reflect/base/MirrorOf.scala
index 1e9619d062..4e54a2fae7 100644
--- a/src/library/scala/reflect/base/MirrorOf.scala
+++ b/src/library/scala/reflect/base/MirrorOf.scala
@@ -1,14 +1,33 @@
package scala.reflect
package base
+/**
+ * The base interface for all mirrors.
+ *
+ * @tparam U the type of the universe this mirror belongs to.
+ *
+ * This is defined outside the reflection universe cake pattern implementation
+ * so that it can be referenced from outside. For example TypeCreator and TreeCreator
+ * reference MirrorOf and also need to be defined outside the cake as they are
+ * used by type tags, which can be migrated between different universes and consequently
+ * cannot be bound to a fixed one.
+ *
+ * @see [[Mirrors]]
+ */
abstract class MirrorOf[U <: base.Universe with Singleton] {
- /** .. */
+ /** The universe this mirror belongs to. */
val universe: U
- /** .. */
+ /** The class symbol of the `_root_` package */
def RootClass: U#ClassSymbol
+
+ /** The module symbol of the `_root_` package */
def RootPackage: U#ModuleSymbol
+
+ /** The module class symbol of the default (unnamed) package */
def EmptyPackageClass: U#ClassSymbol
+
+ /** The module symbol of the default (unnamed) package */
def EmptyPackage: U#ModuleSymbol
/** The symbol corresponding to the globally accessible class with the
diff --git a/src/library/scala/reflect/base/Mirrors.scala b/src/library/scala/reflect/base/Mirrors.scala
index 50866ef000..e38a3d1cdd 100644
--- a/src/library/scala/reflect/base/Mirrors.scala
+++ b/src/library/scala/reflect/base/Mirrors.scala
@@ -1,12 +1,22 @@
package scala.reflect
package base
+/**
+ * Defines a type hierarchy for mirrors.
+ *
+ * Every universe has one or more mirrors. A mirror defines a hierarchy of symbols starting with the root package `_root_`
+ * and provides methods to locate and define classes and singleton objects in that hierarchy.
+ *
+ * On the JVM, there is a one to one correspondance between class loaders and mirrors.
+ */
trait Mirrors {
self: Universe =>
- /** .. */
+ /** The base type of all mirrors of this universe */
type Mirror >: Null <: MirrorOf[self.type]
- /** .. */
+ /** The root mirror of this universe. This mirror contains standard Scala classes and types such as `Any`, `AnyRef`, `AnyVal`,
+ * `Nothing`, `Null`, and all classes loaded from scala-library, which are shared across all mirrors within the enclosing universe.
+ */
val rootMirror: Mirror
}
diff --git a/src/library/scala/reflect/base/Names.scala b/src/library/scala/reflect/base/Names.scala
index ad99f54fb3..b02038a920 100644
--- a/src/library/scala/reflect/base/Names.scala
+++ b/src/library/scala/reflect/base/Names.scala
@@ -4,20 +4,26 @@ package base
import scala.language.implicitConversions
/** A trait that manages names.
- * A name is a string in one of two name universes: terms and types.
- * The same string can be a name in both universes.
- * Two names are equal if they represent the same string and they are
- * members of the same universe.
- *
- * Names are interned. That is, for two names `name11 and `name2`,
- * `name1 == name2` implies `name1 eq name2`.
+ *
+ * @see TermName
+ * @see TypeName
*/
trait Names {
- /** Intentionally no implicit from String => Name. */
+ // Intentionally no implicit from String => Name.
implicit def stringToTermName(s: String): TermName = newTermName(s)
implicit def stringToTypeName(s: String): TypeName = newTypeName(s)
- /** The abstract type of names */
+ /**
+ * The abstract type of names
+ *
+ * A Name wraps a string as the name for either a type ([[TypeName]]) of a term ([[TermName]]).
+ * Two names are equal, if the wrapped string are equal and they are either both `TypeName` or both `TermName`.
+ * The same string can co-exist as a `TypeName` and a `TermName`, but they would not be equal.
+ * Names are interned. That is, for two names `name11 and `name2`,
+ * `name1 == name2` implies `name1 eq name2`.
+ *
+ * One of the reasons for the existence of names rather than plain strings is being more explicit about what is a name and if it represents a type or a term.
+ */
type Name >: Null <: NameBase
implicit val NameTag: ClassTag[Name]
@@ -31,16 +37,16 @@ trait Names {
/** The base API that all names support */
abstract class NameBase {
- /** Is this name a term name? */
+ /** Checks wether the name is a a term name */
def isTermName: Boolean
- /** Is this name a type name? */
+ /** Checks wether the name is a a type name */
def isTypeName: Boolean
- /** Returns a term name that represents the same string as this name */
+ /** Returns a term name that wraps the same string as `this` */
def toTermName: TermName
- /** Returns a type name that represents the same string as this name */
+ /** Returns a type name that wraps the same string as `this` */
def toTypeName: TypeName
}
@@ -52,7 +58,11 @@ trait Names {
*/
def newTypeName(s: String): TypeName
+ /** Wraps the empty string. Can be used as the null object for term name.
+ */
def EmptyTermName: TermName = newTermName("")
+ /** Wraps the empty string. Can be used as the null object for term name.
+ */
def EmptyTypeName: TypeName = EmptyTermName.toTypeName
}
diff --git a/src/library/scala/reflect/base/Positions.scala b/src/library/scala/reflect/base/Positions.scala
index 76a7382e9e..70412a2f4b 100644
--- a/src/library/scala/reflect/base/Positions.scala
+++ b/src/library/scala/reflect/base/Positions.scala
@@ -1,17 +1,22 @@
package scala.reflect
package base
+/**
+ * Defines the type hierachy for positions.
+ *
+ * @see [[scala.reflect]] for a description on how the class hierarchy is encoded here.
+ */
trait Positions {
self: Universe =>
- /** .. */
+ /** The base type for all positions of tree nodes in source files. */
type Position >: Null <: Attachments { type Pos = Position }
- /** A tag that preserves the identity of the `FlagSet` abstract type from erasure.
+ /** A tag that preserves the identity of the `Position` abstract type from erasure.
* Can be used for pattern matching, instance tests, serialization and likes.
*/
implicit val PositionTag: ClassTag[Position]
- /** .. */
+ /** A special "missing" position. */
val NoPosition: Position
}
diff --git a/src/library/scala/reflect/base/Scopes.scala b/src/library/scala/reflect/base/Scopes.scala
index a388fdc392..a8c498b814 100644
--- a/src/library/scala/reflect/base/Scopes.scala
+++ b/src/library/scala/reflect/base/Scopes.scala
@@ -1,8 +1,16 @@
package scala.reflect
package base
+/**
+ * Defines the type hierachy for scopes.
+ *
+ * @see [[scala.reflect]] for a description on how the class hierarchy is encoded here.
+ */
trait Scopes { self: Universe =>
+ /** The base type of all scopes. A scope object generally maps names to symbols available in the current lexical scope.
+ * Scopes can be nested. This base type, however, only exposes a minimal interface, representing a scope as an iterable of symbols.
+ */
type Scope >: Null <: ScopeBase
/** The base API that all scopes support */
@@ -13,12 +21,13 @@ trait Scopes { self: Universe =>
*/
implicit val ScopeTag: ClassTag[Scope]
+ /** The base type of member scopes, as in class definitions, for example. */
type MemberScope >: Null <: Scope with MemberScopeBase
/** The base API that all member scopes support */
trait MemberScopeBase extends ScopeBase {
/** Sorts the symbols included in this scope so that:
- * 1) Symbols appear the linearization order of their owners.
+ * 1) Symbols appear in the linearization order of their owners.
* 2) Symbols with the same owner appear in reverse order of their declarations.
* 3) Synthetic members (e.g. getters/setters for vals/vars) might appear in arbitrary order.
*/
@@ -30,12 +39,12 @@ trait Scopes { self: Universe =>
*/
implicit val MemberScopeTag: ClassTag[MemberScope]
- /** Create a new scope */
+ /** Create a new scope. */
def newScope: Scope
- /** Create a new scope nested in another one with which it shares its elements */
+ /** Create a new scope nested in another one with which it shares its elements. */
def newNestedScope(outer: Scope): Scope
- /** Create a new scope with given initial elements */
+ /** Create a new scope with the given initial elements. */
def newScopeWith(elems: Symbol*): Scope
} \ No newline at end of file
diff --git a/src/library/scala/reflect/base/StandardDefinitions.scala b/src/library/scala/reflect/base/StandardDefinitions.scala
index 8f1c96ea3f..4df8501b3d 100644
--- a/src/library/scala/reflect/base/StandardDefinitions.scala
+++ b/src/library/scala/reflect/base/StandardDefinitions.scala
@@ -6,14 +6,21 @@
package scala.reflect
package base
+/**
+ * Defines standard symbols and types.
+ */
trait StandardDefinitions {
self: Universe =>
+ /** A value containing all standard defnitions. */
val definitions: DefinitionsBase
+ /** Defines standard symbols (and types via its base class). */
trait DefinitionsBase extends StandardTypes {
- // packages
+ /** The class symbol of package `scala`. */
def ScalaPackageClass: ClassSymbol
+
+ /** The module class symbol of package `scala`. */
def ScalaPackage: ModuleSymbol
// top types
@@ -37,36 +44,67 @@ trait StandardDefinitions {
def DoubleClass : ClassSymbol
def BooleanClass: ClassSymbol
- // some special classes
+ /** The class symbol of class `String`. */
def StringClass : ClassSymbol
+
+ /** The class symbol of class `Class`. */
def ClassClass : ClassSymbol
+
+ /** The class symbol of class `Array`. */
def ArrayClass : ClassSymbol
+
+ /** The class symbol of class `List`. */
def ListClass : ClassSymbol
- // the Predef object
+ /** The module symbol of `scala.Predef`. */
def PredefModule: ModuleSymbol
}
+ /** Defines standard types. */
trait StandardTypes {
- // the scala value classes
+ /** The `Type` of type `Unit`. */
val UnitTpe: Type
+
+ /** The `Type` of primitive type `Byte`. */
val ByteTpe: Type
+
+ /** The `Type` of primitive type `Short`. */
val ShortTpe: Type
+
+ /** The `Type` of primitive type `Char`. */
val CharTpe: Type
+
+ /** The `Type` of primitive type `Int`. */
val IntTpe: Type
+
+ /** The `Type` of primitive type `Long`. */
val LongTpe: Type
+
+ /** The `Type` of primitive type `Float`. */
val FloatTpe: Type
+
+ /** The `Type` of primitive type `Double`. */
val DoubleTpe: Type
+
+ /** The `Type` of primitive type `Boolean`. */
val BooleanTpe: Type
- // top types
+ /** The `Type` of type `Any`. */
val AnyTpe: Type
+
+ /** The `Type` of type `AnyVal`. */
val AnyValTpe: Type
+
+ /** The `Type` of type `AnyRef`. */
val AnyRefTpe: Type
+
+ /** The `Type` of type `Object`. */
val ObjectTpe: Type
- // bottom types
+ /** The `Type` of type `Nothing`. */
val NothingTpe: Type
+
+ /** The `Type` of type `Null`. */
val NullTpe: Type
}
}
diff --git a/src/library/scala/reflect/base/StandardNames.scala b/src/library/scala/reflect/base/StandardNames.scala
index 3e569cd523..0b4ec3728a 100644
--- a/src/library/scala/reflect/base/StandardNames.scala
+++ b/src/library/scala/reflect/base/StandardNames.scala
@@ -11,6 +11,10 @@ package base
// Is it necessary to perform reflection (like ERROR or LOCAL_SUFFIX_STRING)? If yes, then it goes to api.StandardNames.
// Otherwise it goes nowhere - reflection API should stay minimalistic.
+// TODO: document better
+/**
+ * Names necessary to create Scala trees.
+ */
trait StandardNames {
self: Universe =>
diff --git a/src/library/scala/reflect/base/Symbols.scala b/src/library/scala/reflect/base/Symbols.scala
index 3830264425..4a1eef014c 100644
--- a/src/library/scala/reflect/base/Symbols.scala
+++ b/src/library/scala/reflect/base/Symbols.scala
@@ -1,9 +1,14 @@
package scala.reflect
package base
+/**
+ * Defines the type hierachy for symbols
+ *
+ * @see [[scala.reflect]] for a description on how the class hierarchy is encoded here.
+ */
trait Symbols { self: Universe =>
- /** The abstract type of symbols representing declarations */
+ /** The type of symbols representing declarations */
type Symbol >: Null <: SymbolBase
/** A tag that preserves the identity of the `Symbol` abstract type from erasure.
@@ -11,7 +16,7 @@ trait Symbols { self: Universe =>
*/
implicit val SymbolTag: ClassTag[Symbol]
- /** The abstract type of type symbols representing type, class, and trait declarations,
+ /** The type of type symbols representing type, class, and trait declarations,
* as well as type parameters
*/
type TypeSymbol >: Null <: Symbol with TypeSymbolBase
@@ -21,7 +26,7 @@ trait Symbols { self: Universe =>
*/
implicit val TypeSymbolTag: ClassTag[TypeSymbol]
- /** The abstract type of term symbols representing val, var, def, and object declarations as
+ /** The type of term symbols representing val, var, def, and object declarations as
* well as packages and value parameters.
*/
type TermSymbol >: Null <: Symbol with TermSymbolBase
@@ -31,7 +36,7 @@ trait Symbols { self: Universe =>
*/
implicit val TermSymbolTag: ClassTag[TermSymbol]
- /** The abstract type of method symbols representing def declarations */
+ /** The type of method symbols representing def declarations */
type MethodSymbol >: Null <: TermSymbol with MethodSymbolBase
/** A tag that preserves the identity of the `MethodSymbol` abstract type from erasure.
@@ -39,7 +44,7 @@ trait Symbols { self: Universe =>
*/
implicit val MethodSymbolTag: ClassTag[MethodSymbol]
- /** The abstract type of module symbols representing object declarations */
+ /** The type of module symbols representing object declarations */
type ModuleSymbol >: Null <: TermSymbol with ModuleSymbolBase
/** A tag that preserves the identity of the `ModuleSymbol` abstract type from erasure.
@@ -47,7 +52,7 @@ trait Symbols { self: Universe =>
*/
implicit val ModuleSymbolTag: ClassTag[ModuleSymbol]
- /** The abstract type of class symbols representing class and trait definitions */
+ /** The type of class symbols representing class and trait definitions */
type ClassSymbol >: Null <: TypeSymbol with ClassSymbolBase
/** A tag that preserves the identity of the `ClassSymbol` abstract type from erasure.
@@ -55,7 +60,7 @@ trait Symbols { self: Universe =>
*/
implicit val ClassSymbolTag: ClassTag[ClassSymbol]
- /** The abstract type of free terms introduced by reification */
+ /** The type of free terms introduced by reification */
type FreeTermSymbol >: Null <: TermSymbol with FreeTermSymbolBase
/** A tag that preserves the identity of the `FreeTermSymbol` abstract type from erasure.
@@ -63,7 +68,7 @@ trait Symbols { self: Universe =>
*/
implicit val FreeTermSymbolTag: ClassTag[FreeTermSymbol]
- /** The abstract type of free types introduced by reification */
+ /** The type of free types introduced by reification */
type FreeTypeSymbol >: Null <: TypeSymbol with FreeTypeSymbolBase
/** A tag that preserves the identity of the `FreeTypeSymbol` abstract type from erasure.
@@ -81,10 +86,10 @@ trait Symbols { self: Universe =>
* that directly contains the current symbol's definition.
* The `NoSymbol` symbol does not have an owner, and calling this method
* on one causes an internal error.
- * The owner of the Scala root class [[scala.reflect.api.mirror.RootClass]]
- * and the Scala root object [[scala.reflect.api.mirror.RootPackage]] is `NoSymbol`.
+ * The owner of the Scala root class [[scala.reflect.base.MirrorOf.RootClass]]
+ * and the Scala root object [[scala.reflect.base.MirrorOf.RootPackage]] is `NoSymbol`.
* Every other symbol has a chain of owners that ends in
- * [[scala.reflect.api.mirror.RootClass]].
+ * [[scala.reflect.base.MirrorOf.RootClass]].
*/
def owner: Symbol
diff --git a/src/library/scala/reflect/base/TagInterop.scala b/src/library/scala/reflect/base/TagInterop.scala
index ec054106eb..e989631abf 100644
--- a/src/library/scala/reflect/base/TagInterop.scala
+++ b/src/library/scala/reflect/base/TagInterop.scala
@@ -4,12 +4,26 @@ package base
import scala.runtime.ScalaRunTime._
trait TagInterop { self: Universe =>
- // todo. `mirror` parameters are now of type `Any`, because I can't make these path-dependent types work
+ // TODO `mirror` parameters are now of type `Any`, because I can't make these path-dependent types work
// if you're brave enough, replace `Any` with `Mirror`, recompile and run interop_typetags_are_manifests.scala
+ /**
+ * Convert a typetag to a pre `Scala-2.10` manifest.
+ * For example
+ * {{{
+ * typeTagToManifest( scala.reflect.runtime.currentMirror, implicitly[TypeTag[String]] )
+ * }}}
+ */
def typeTagToManifest[T: ClassTag](mirror: Any, tag: base.Universe # TypeTag[T]): Manifest[T] =
throw new UnsupportedOperationException("This universe does not support tag -> manifest conversions. Use scala.reflect.runtime.universe from scala-reflect.jar.")
+ /**
+ * Convert a pre `Scala-2.10` manifest to a typetag.
+ * For example
+ * {{{
+ * manifestToTypeTag( scala.reflect.runtime.currentMirror, implicitly[Manifest[String]] )
+ * }}}
+ */
def manifestToTypeTag[T](mirror: Any, manifest: Manifest[T]): base.Universe # TypeTag[T] =
throw new UnsupportedOperationException("This universe does not support manifest -> tag conversions. Use scala.reflect.runtime.universe from scala-reflect.jar.")
}
diff --git a/src/library/scala/reflect/base/TreeCreator.scala b/src/library/scala/reflect/base/TreeCreator.scala
index c9c8de2307..5de0094f1f 100644
--- a/src/library/scala/reflect/base/TreeCreator.scala
+++ b/src/library/scala/reflect/base/TreeCreator.scala
@@ -1,6 +1,26 @@
package scala.reflect
package base
+/** A mirror-aware factory for trees.
+ *
+ * In the reflection API, artifacts are specific to universes and
+ * symbolic references used in artifacts (e.g. `scala.Int`) are resolved by mirrors.
+ *
+ * Therefore to build a tree one needs to know a universe that the tree is going to be bound to
+ * and a mirror that is going to resolve symbolic references (e.g. to determine that `scala.Int`
+ * points to a core class `Int` from scala-library.jar).
+ *
+ * `TreeCreator` implements this notion by providing a standalone tree factory.
+ *
+ * This is immediately useful for reification. When the compiler reifies an expression,
+ * the end result needs to make sense in any mirror. That's because the compiler knows
+ * the universe it's reifying an expression into (specified by the target of the `reify` call),
+ * but it cannot know in advance the mirror to instantiate the result in (e.g. on JVM
+ * it doesn't know what classloader use to resolve symbolic names in the reifee).
+ *
+ * Due to a typechecker restriction (no eta-expansion for dependent method types),
+ * `TreeCreator` can't have a functional type, so it's implemented as class with an apply method.
+ */
abstract class TreeCreator {
def apply[U <: Universe with Singleton](m: MirrorOf[U]): U # Tree
}
diff --git a/src/library/scala/reflect/base/Trees.scala b/src/library/scala/reflect/base/Trees.scala
index 224965a2b7..428b493478 100644
--- a/src/library/scala/reflect/base/Trees.scala
+++ b/src/library/scala/reflect/base/Trees.scala
@@ -9,9 +9,11 @@ trait Trees { self: Universe =>
/** The base API that all trees support */
abstract class TreeBase extends Product { this: Tree =>
+ // TODO
/** ... */
def isDef: Boolean
+ // TODO
/** ... */
def isEmpty: Boolean
@@ -52,7 +54,7 @@ trait Trees { self: Universe =>
* Transformer found around the compiler.
*
* Copying Trees should be done with care depending on whether
- * it need be done lazily or strictly (see LazyTreeCopier and
+ * it needs be done lazily or strictly (see LazyTreeCopier and
* StrictTreeCopier) and on whether the contents of the mutable
* fields should be copied. The tree copiers will copy the mutable
* attributes to the new tree; calling Tree#duplicate will copy
@@ -67,8 +69,8 @@ trait Trees { self: Universe =>
*
* SymTrees include important nodes Ident and Select, which are
* used as both terms and types; they are distinguishable based on
- * whether the Name is a TermName or TypeName. The correct way for
- * to test for a type or a term (on any Tree) are the isTerm/isType
+ * whether the Name is a TermName or TypeName. The correct way
+ * to test any Tree for a type or a term are the `isTerm`/`isType`
* methods on Tree.
*
* "Others" are mostly syntactic or short-lived constructs. Examples
@@ -86,7 +88,7 @@ trait Trees { self: Universe =>
/** The empty tree */
val EmptyTree: Tree
- /** A tree for a term. Not all terms are TermTrees; use isTerm
+ /** A tree for a term. Not all trees representing terms are TermTrees; use isTerm
* to reliably identify terms.
*/
type TermTree >: Null <: AnyRef with Tree
@@ -96,7 +98,7 @@ trait Trees { self: Universe =>
*/
implicit val TermTreeTag: ClassTag[TermTree]
- /** A tree for a type. Not all types are TypTrees; use isType
+ /** A tree for a type. Not all trees representing types are TypTrees; use isType
* to reliably identify types.
*/
type TypTree >: Null <: AnyRef with Tree
@@ -213,7 +215,7 @@ trait Trees { self: Universe =>
/** An object definition, e.g. `object Foo`. Internally, objects are
* quite frequently called modules to reduce ambiguity.
- * Eliminated by refcheck.
+ * Eliminated by compiler phase refcheck.
*/
type ModuleDef >: Null <: ImplDef
@@ -253,8 +255,8 @@ trait Trees { self: Universe =>
* - immutable values, e.g. "val x"
* - mutable values, e.g. "var x" - the MUTABLE flag set in mods
* - lazy values, e.g. "lazy val x" - the LAZY flag set in mods
- * - method parameters, see vparamss in DefDef - the PARAM flag is set in mods
- * - explicit self-types, e.g. class A { self: Bar => } - !!! not sure what is set.
+ * - method parameters, see vparamss in [[scala.reflect.base.Trees#DefDef]] - the PARAM flag is set in mods
+ * - explicit self-types, e.g. class A { self: Bar => }
*/
type ValDef >: Null <: ValOrDefDef
@@ -267,7 +269,7 @@ trait Trees { self: Universe =>
val ValDef: ValDefExtractor
/** An extractor class to create and pattern match with syntax `ValDef(mods, name, tpt, rhs)`.
- * This AST node corresponds to the following Scala code:
+ * This AST node corresponds to any of the following Scala code:
*
* mods `val` name: tpt = rhs
*
@@ -275,7 +277,7 @@ trait Trees { self: Universe =>
*
* mods name: tpt = rhs // in signatures of function and method definitions
*
- * self: Bar => // self-types (!!! not sure what is set)
+ * self: Bar => // self-types
*
* If the type of a value is not specified explicitly (i.e. is meant to be inferred),
* this is expressed by having `tpt` set to `TypeTree()` (but not to an `EmptyTree`!).
@@ -369,9 +371,12 @@ trait Trees { self: Universe =>
* This AST node does not have direct correspondence to Scala code.
* It is used for tailcalls and like.
* For example, while/do are desugared to label defs as follows:
- *
+ * {{{
* while (cond) body ==> LabelDef($L, List(), if (cond) { body; L$() } else ())
+ * }}}
+ * {{{
* do body while (cond) ==> LabelDef($L, List(), body; if (cond) L$() else ())
+ * }}}
*/
abstract class LabelDefExtractor {
def apply(name: TermName, params: List[Ident], rhs: Tree): LabelDef
@@ -427,7 +432,7 @@ trait Trees { self: Universe =>
*
* import expr.{selectors}
*
- * Selectors are a list of pairs of names (from, to). // [Eugene++] obviously, they no longer are. please, document!
+ * Selectors are a list of ImportSelectors, which conceptually are pairs of names (from, to).
* The last (and maybe only name) may be a nme.WILDCARD. For instance:
*
* import qual.{x, y => z, _}
@@ -498,16 +503,16 @@ trait Trees { self: Universe =>
*
* { stats; expr }
*
- * If the block is empty, the `expr` is set to `Literal(Constant(()))`. // [Eugene++] check this
+ * If the block is empty, the `expr` is set to `Literal(Constant(()))`.
*/
abstract class BlockExtractor {
def apply(stats: List[Tree], expr: Tree): Block
def unapply(block: Block): Option[(List[Tree], Tree)]
}
- /** Case clause in a pattern match, eliminated during explicitouter
+ /** Case clause in a pattern match.
* (except for occurrences in switch statements).
- * Eliminated by patmat/explicitouter.
+ * Eliminated by compiler phases patmat (in the new pattern matcher of 2.10) or explicitouter (in the old pre-2.10 pattern matcher)
*/
type CaseDef >: Null <: AnyRef with Tree
@@ -524,17 +529,19 @@ trait Trees { self: Universe =>
*
* `case` pat `if` guard => body
*
- * If the guard is not present, the `guard` is set to `EmptyTree`. // [Eugene++] check this
- * If the body is not specified, the `body` is set to `EmptyTree`. // [Eugene++] check this
+ * If the guard is not present, the `guard` is set to `EmptyTree`.
+ * If the body is not specified, the `body` is set to `Literal(Constant())`
*/
abstract class CaseDefExtractor {
def apply(pat: Tree, guard: Tree, body: Tree): CaseDef
def unapply(caseDef: CaseDef): Option[(Tree, Tree, Tree)]
}
- /** Alternatives of patterns, eliminated by explicitouter, except for
- * occurrences in encoded Switch stmt (=remaining Match(CaseDef(...)))
- * Eliminated by patmat/explicitouter.
+ /** Alternatives of patterns.
+ *
+ * Eliminated by compiler phases Eliminated by compiler phases patmat (in the new pattern matcher of 2.10) or explicitouter (in the old pre-2.10 pattern matcher),
+ * except for
+ * occurrences in encoded Switch stmt (i.e. remaining Match(CaseDef(...)))
*/
type Alternative >: Null <: TermTree
@@ -557,7 +564,8 @@ trait Trees { self: Universe =>
}
/** Repetition of pattern.
- * Eliminated by patmat/explicitouter.
+ *
+ * Eliminated by compiler phases patmat (in the new pattern matcher of 2.10) or explicitouter (in the old pre-2.10 pattern matcher).
*/
type Star >: Null <: TermTree
@@ -579,8 +587,9 @@ trait Trees { self: Universe =>
def unapply(star: Star): Option[Tree]
}
- /** Bind of a variable to a rhs pattern, eliminated by explicitouter
- * Eliminated by patmat/explicitouter.
+ /** Bind a variable to a rhs pattern.
+ *
+ * Eliminated by compiler phases patmat (in the new pattern matcher of 2.10) or explicitouter (in the old pre-2.10 pattern matcher).
*
* @param name
* @param body
@@ -605,10 +614,32 @@ trait Trees { self: Universe =>
def unapply(bind: Bind): Option[(Name, Tree)]
}
- /** Noone knows what this is.
- * It is not idempotent w.r.t typechecking.
- * Can we, please, remove it?
- * Introduced by typer, eliminated by patmat/explicitouter.
+ /**
+ * Used to represent `unapply` methods in pattern matching.
+ *
+ * For example:
+ * {{{
+ * 2 match { case Foo(x) => x }
+ * }}}
+ *
+ * Is represented as:
+ * {{{
+ * Match(
+ * Literal(Constant(2)),
+ * List(
+ * CaseDef(
+ * UnApply(
+ * // a dummy node that carries the type of unapplication to patmat
+ * // the <unapply-selector> here doesn't have an underlying symbol
+ * // it only has a type assigned, therefore after `resetAllAttrs` this tree is no longer typeable
+ * Apply(Select(Ident(Foo), newTermName("unapply")), List(Ident(newTermName("<unapply-selector>")))),
+ * // arguments of the unapply => nothing synthetic here
+ * List(Bind(newTermName("x"), Ident(nme.WILDCARD)))),
+ * EmptyTree,
+ * Ident(newTermName("x")))))
+ * }}}
+ *
+ * Introduced by typer. Eliminated by compiler phases patmat (in the new pattern matcher of 2.10) or explicitouter (in the old pre-2.10 pattern matcher).
*/
type UnApply >: Null <: TermTree
@@ -629,9 +660,9 @@ trait Trees { self: Universe =>
def unapply(unApply: UnApply): Option[(Tree, List[Tree])]
}
- /** Array of expressions, needs to be translated in backend.
- * This AST node is used to pass arguments to vararg arguments.
- * Introduced by uncurry.
+ /** An array of expressions. This AST node needs to be translated in backend.
+ * It is used to pass arguments to vararg arguments.
+ * Introduced by compiler phase uncurry.
*/
type ArrayValue >: Null <: TermTree
@@ -649,7 +680,7 @@ trait Trees { self: Universe =>
*
* printf("%s%d", foo, 42)
*
- * Is translated to after uncurry to:
+ * Is translated to after compiler phase uncurry to:
*
* Apply(
* Ident("printf"),
@@ -661,7 +692,7 @@ trait Trees { self: Universe =>
def unapply(arrayValue: ArrayValue): Option[(Tree, List[Tree])]
}
- /** Anonymous function, eliminated by lambdalift */
+ /** Anonymous function, eliminated by compiler phase lambdalift */
type Function >: Null <: TermTree with SymTree
/** A tag that preserves the identity of the `Function` abstract type from erasure.
@@ -674,10 +705,10 @@ trait Trees { self: Universe =>
/** An extractor class to create and pattern match with syntax `Function(vparams, body)`.
* This AST node corresponds to the following Scala code:
- *
+ *
* vparams => body
*
- * The symbol of a Function is a synthetic value of name nme.ANON_FUN_NAME
+ * The symbol of a Function is a synthetic TermSymbol.
* It is the owner of the function's parameters.
*/
abstract class FunctionExtractor {
@@ -707,7 +738,7 @@ trait Trees { self: Universe =>
}
/** Either an assignment or a named argument. Only appears in argument lists,
- * eliminated by typecheck (doTypedApply), resurrected by reifier.
+ * eliminated by compiler phase typecheck (doTypedApply), resurrected by reifier.
*/
type AssignOrNamedArg >: Null <: TermTree
@@ -722,9 +753,13 @@ trait Trees { self: Universe =>
/** An extractor class to create and pattern match with syntax `AssignOrNamedArg(lhs, rhs)`.
* This AST node corresponds to the following Scala code:
*
+ * {{{
+ * m.f(lhs = rhs)
+ * }}}
+ * {{{
* @annotation(lhs = rhs)
+ * }}}
*
- * m.f(lhs = rhs)
*/
abstract class AssignOrNamedArgExtractor {
def apply(lhs: Tree, rhs: Tree): AssignOrNamedArg
@@ -747,17 +782,17 @@ trait Trees { self: Universe =>
*
* `if` (cond) thenp `else` elsep
*
- * If the alternative is not present, the `elsep` is set to `EmptyTree`. // [Eugene++] check this
+ * If the alternative is not present, the `elsep` is set to `Literal(Constant(()))`.
*/
abstract class IfExtractor {
def apply(cond: Tree, thenp: Tree, elsep: Tree): If
def unapply(if_ : If): Option[(Tree, Tree, Tree)]
}
- /** - Pattern matching expression (before explicitouter)
- * - Switch statements (after explicitouter)
+ /** - Pattern matching expression (before compiler phase explicitouter before 2.10 / patmat from 2.10)
+ * - Switch statements (after compiler phase explicitouter before 2.10 / patmat from 2.10)
*
- * After explicitouter, cases will satisfy the following constraints:
+ * After compiler phase explicitouter before 2.10 / patmat from 2.10, cases will satisfy the following constraints:
*
* - all guards are `EmptyTree`,
* - all patterns will be either `Literal(Constant(x:Int))`
@@ -780,7 +815,7 @@ trait Trees { self: Universe =>
*
* selector `match` { cases }
*
- * // [Eugene++] say something about `val (foo, bar) = baz` and likes.
+ * `Match` is also used in pattern matching assignments like `val (foo, bar) = baz`.
*/
abstract class MatchExtractor {
def apply(selector: Tree, cases: List[CaseDef]): Match
@@ -803,7 +838,7 @@ trait Trees { self: Universe =>
*
* `return` expr
*
- * The symbol of a Return node is the enclosing method
+ * The symbol of a Return node is the enclosing method.
*/
abstract class ReturnExtractor {
def apply(expr: Tree): Return
@@ -826,7 +861,7 @@ trait Trees { self: Universe =>
*
* `try` block `catch` { catches } `finally` finalizer
*
- * If the finalizer is not present, the `finalizer` is set to `EmptyTree`. // [Eugene++] check this
+ * If the finalizer is not present, the `finalizer` is set to `EmptyTree`.
*/
abstract class TryExtractor {
def apply(block: Tree, catches: List[CaseDef], finalizer: Tree): Try
@@ -855,9 +890,6 @@ trait Trees { self: Universe =>
}
/** Object instantiation
- * One should always use factory method below to build a user level new.
- *
- * @param tpt a class type
*/
type New >: Null <: TermTree
@@ -866,7 +898,8 @@ trait Trees { self: Universe =>
*/
implicit val NewTag: ClassTag[New]
- /** The constructor/deconstructor for `New` instances. */
+ /** The constructor/deconstructor for `New` instances.
+ */
val New: NewExtractor
/** An extractor class to create and pattern match with syntax `New(tpt)`.
@@ -879,11 +912,16 @@ trait Trees { self: Universe =>
* (`new` tpt).<init>[targs](args)
*/
abstract class NewExtractor {
+ /** A user level `new`.
+ * One should always use this factory method to build a user level `new`.
+ *
+ * @param tpt a class type
+ */
def apply(tpt: Tree): New
def unapply(new_ : New): Option[Tree]
}
- /** Type annotation, eliminated by cleanup */
+ /** Type annotation, eliminated by compiler phase cleanup */
type Typed >: Null <: TermTree
/** A tag that preserves the identity of the `Typed` abstract type from erasure.
@@ -904,11 +942,7 @@ trait Trees { self: Universe =>
def unapply(typed: Typed): Option[(Tree, Tree)]
}
- /** Common base class for Apply and TypeApply. This could in principle
- * be a SymTree, but whether or not a Tree is a SymTree isn't used
- * to settle any interesting questions, and it would add a useless
- * field to all the instances (useless, since GenericApply forwards to
- * the underlying fun.)
+ /** Common base class for Apply and TypeApply.
*/
type GenericApply >: Null <: TermTree
@@ -917,11 +951,11 @@ trait Trees { self: Universe =>
*/
implicit val GenericApplyTag: ClassTag[GenericApply]
- /** Explicit type application.
- * @PP: All signs point toward it being a requirement that args.nonEmpty,
+ /* @PP: All signs point toward it being a requirement that args.nonEmpty,
* but I can't find that explicitly stated anywhere. Unless your last name
* is odersky, you should probably treat it as true.
*/
+ /** Explicit type application. */
type TypeApply >: Null <: GenericApply
/** A tag that preserves the identity of the `TypeApply` abstract type from erasure.
@@ -971,38 +1005,8 @@ trait Trees { self: Universe =>
def unapply(apply: Apply): Option[(Tree, List[Tree])]
}
- /** Dynamic value application.
- * In a dynamic application q.f(as)
- * - q is stored in qual
- * - as is stored in args
- * - f is stored as the node's symbol field.
- * [Eugene++] what is it used for?
- * Introduced by erasure, eliminated by cleanup.
- */
- type ApplyDynamic >: Null <: TermTree with SymTree
-
- /** A tag that preserves the identity of the `ApplyDynamic` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val ApplyDynamicTag: ClassTag[ApplyDynamic]
-
- /** The constructor/deconstructor for `ApplyDynamic` instances. */
- val ApplyDynamic: ApplyDynamicExtractor
-
- /** An extractor class to create and pattern match with syntax `ApplyDynamic(qual, args)`.
- * This AST node corresponds to the following Scala code:
- *
- * fun(args)
- *
- * The symbol of an ApplyDynamic is the function symbol of `qual`, or NoSymbol, if there is none.
- */
- abstract class ApplyDynamicExtractor {
- def apply(qual: Tree, args: List[Tree]): ApplyDynamic
- def unapply(applyDynamic: ApplyDynamic): Option[(Tree, List[Tree])]
- }
-
- /** Super reference, qual = corresponding this reference
- * A super reference C.super[M] is represented as Super(This(C), M).
+ /** Super reference, where `qual` is the corresponding `this` reference.
+ * A super reference `C.super[M]` is represented as `Super(This(C), M)`.
*/
type Super >: Null <: TermTree
diff --git a/src/library/scala/reflect/base/TypeCreator.scala b/src/library/scala/reflect/base/TypeCreator.scala
index 8a14e53dd3..0260fe1410 100644
--- a/src/library/scala/reflect/base/TypeCreator.scala
+++ b/src/library/scala/reflect/base/TypeCreator.scala
@@ -1,6 +1,26 @@
package scala.reflect
package base
+/** A mirror-aware factory for types.
+ *
+ * In the reflection API, artifacts are specific to universes and
+ * symbolic references used in artifacts (e.g. `scala.Int`) are resolved by mirrors.
+ *
+ * Therefore to build a type one needs to know a universe that the type is going to be bound to
+ * and a mirror that is going to resolve symbolic references (e.g. to determine that `scala.Int`
+ * points to a core class `Int` from scala-library.jar).
+ *
+ * `TypeCreator` implements this notion by providing a standalone type factory.
+ *
+ * This is immediately useful for type tags. When the compiler creates a type tag,
+ * the end result needs to make sense in any mirror. That's because the compiler knows
+ * the universe it's creating a type tag for (since `TypeTag` is path-dependent on a universe),
+ * but it cannot know in advance the mirror to instantiate the result in (e.g. on JVM
+ * it doesn't know what classloader use to resolve symbolic names in the type tag).
+ *
+ * Due to a typechecker restriction (no eta-expansion for dependent method types),
+ * `TypeCreator` can't have a functional type, so it's implemented as class with an apply method.
+ */
abstract class TypeCreator {
def apply[U <: Universe with Singleton](m: MirrorOf[U]): U # Type
}
diff --git a/src/library/scala/reflect/base/TypeTags.scala b/src/library/scala/reflect/base/TypeTags.scala
index ec86bbc9be..db9fa95553 100644
--- a/src/library/scala/reflect/base/TypeTags.scala
+++ b/src/library/scala/reflect/base/TypeTags.scala
@@ -3,51 +3,86 @@
* @author Martin Odersky
*/
-package scala.reflect
+package scala
+package reflect
package base
import java.lang.{ Class => jClass }
import scala.language.implicitConversions
+/*
+ * TODO
+ * add @see to docs about universes
+ * [Eugene++] also mention sensitivity to prefixes, i.e. that rb.TypeTag is different from ru.TypeTag
+ * [Chris++] tag.in(some mirror) or expr.in(some mirror) (does not work for tag and exprs in macros)
+ * Backwards compat item1: [Eugene++] it might be useful, though, to guard against abstractness of the incoming type.
+ */
/**
- * Type tags encapsulate a representation of type T.
- * They are supposed to replace the pre-2.10 concept of a [[scala.reflect.Manifest]].
- * TypeTags are much better integrated with reflection than manifests are, and are consequently much simpler.
+ * A type tag encapsulates a representation of type T.
+ *
+ * Type tags replace the pre-2.10 concept of a [[scala.reflect.Manifest]] and are integrated with reflection.
*
- * === Overview ===
+ * === Overview and examples ===
*
* Type tags are organized in a hierarchy of three classes:
* [[scala.reflect.ClassTag]], [[scala.reflect.base.Universe#TypeTag]] and [[scala.reflect.base.Universe#WeakTypeTag]].
+ *
+ * @see [[scala.reflect.ClassTag]], [[scala.reflect.base.Universe#TypeTag]], [[scala.reflect.base.Universe#WeakTypeTag]]
*
- * A [[scala.reflect.ClassTag]] carries a runtime class that corresponds to the source type T.
- * As of such, it possesses the knowledge about how to build single- and multi-dimensional arrays of elements of that type.
- * It guarantees that the source type T did not to contain any references to type parameters or abstract types.
- * [[scala.reflect.ClassTag]] corresponds to a previous notion of [[scala.reflect.ClassManifest]].
- *
- * A [[scala.reflect.base.Universe#WeakTypeTag]] value wraps a full Scala type in its tpe field.
- * A [[scala.reflect.base.Universe#TypeTag]] value is an [[scala.reflect.base.Universe#WeakTypeTag]]
- * that is guaranteed not to contain any references to type parameters or abstract types.
- *
- * [Eugene++] also mention sensitivity to prefixes, i.e. that rb.TypeTag is different from ru.TypeTag
- * [Eugene++] migratability between mirrors and universes is also worth mentioning
- *
- * === Splicing ===
+ * Examples:
+ * {{{
+ * scala> class Person
+ * scala> class Container[T]
+ * scala> import scala.reflect.ClassTag
+ * scala> import scala.reflect.runtime.universe.TypeTag
+ * scala> import scala.reflect.runtime.universe.WeakTypeTag
+ * scala> def firstTypeArg( tag: WeakTypeTag[_] ) = (tag.tpe match {case TypeRef(_,_,typeArgs) => typeArgs})(0)
+ * }}}
+ * TypeTag contains concrete type arguments:
+ * {{{
+ * scala> firstTypeArg( implicitly[TypeTag[Container[Person]]] )
+ * res0: reflect.runtime.universe.Type = Person
+ * }}}
+ * TypeTag guarantees concrete type arguments (fails for references to unbound type arguments):
+ * {{{
+ * scala> def foo1[T] = implicitly[TypeTag[Container[T]]]
+ * <console>:11: error: No TypeTag available for Container[T]
+ * def foo1[T] = implicitly[TypeTag[Container[T]]]
+ * }}}
+ * WeakTypeTag allows references to unbound type arguments:
+ * {{{
+ * scala> def foo2[T] = firstTypeArg( implicitly[WeakTypeTag[Container[T]]] )
+ * foo2: [T]=> reflect.runtime.universe.Type
+ * scala> foo2[Person]
+ * res1: reflect.runtime.universe.Type = T
+ * }}}
+ * TypeTag allows unbound type arguments for which type tags are available:
+ * {{{
+ * scala> def foo3[T:TypeTag] = firstTypeArg( implicitly[TypeTag[Container[T]]] )
+ * foo3: [T](implicit evidence$1: reflect.runtime.universe.TypeTag[T])reflect.runtime.universe.Type
+ * scala> foo3[Person]
+ * res1: reflect.runtime.universe.Type = Person
+ * }}}
+ * WeakTypeTag contains concrete type arguments if available via existing tags:
+ * {{{
+ * scala> def foo4[T:WeakTypeTag] = firstTypeArg( implicitly[WeakTypeTag[Container[T]]] )
+ * foo4: [T](implicit evidence$1: reflect.runtime.universe.WeakTypeTag[T])reflect.runtime.universe.Type
+ * scala> foo4[Person]
+ * res1: reflect.runtime.universe.Type = Person
+ * }}}
*
- * Tags can be spliced, i.e. if compiler generates a tag for a type that contains references to tagged
- * type parameters or abstract type members, it will retrieve the corresponding tag and embed it into the result.
- * An example that illustrates the TypeTag embedding, consider the following function:
*
- * import reflect.mirror._
- * def f[T: TypeTag, U] = {
- * type L = T => U
- * implicitly[WeakTypeTag[L]]
- * }
+ * [[scala.reflect.base.Universe#TypeTag]] and [[scala.reflect.base.Universe#WeakTypeTag]] are path dependent on their universe.
*
- * Then a call of f[String, Int] will yield a result of the form
+ * The default universe is [[scala.reflect.runtime.universe]]
+ *
+ * Type tags can be migrated to another universe given the corresponding mirror using
*
- * WeakTypeTag(<[ String => U ]>).
- *
- * Note that T has been replaced by String, because it comes with a TypeTag in f, whereas U was left as a type parameter.
+ * {{{
+ * tag.in( other_mirror )
+ * }}}
+ *
+ * See [[scala.reflect.base.TypeTags#WeakTypeTag.in]]
*
* === WeakTypeTag vs TypeTag ===
*
@@ -55,14 +90,16 @@ import scala.language.implicitConversions
* This makes it easy to forget to tag one of the methods in the call chain and discover it much later in the runtime
* by getting cryptic errors far away from their source. For example, consider the following snippet:
*
+ * {{{
* def bind[T: WeakTypeTag](name: String, value: T): IR.Result = bind((name, value))
* def bind(p: NamedParam): IR.Result = bind(p.name, p.tpe, p.value)
* object NamedParam {
* implicit def namedValue[T: WeakTypeTag](name: String, x: T): NamedParam = apply(name, x)
* def apply[T: WeakTypeTag](name: String, x: T): NamedParam = new Typed[T](name, x)
* }
- *
- * This fragment of Scala REPL implementation defines a `bind` function that carries a named value along with its type
+ * }}}
+ *
+ * This fragment of the Scala REPL implementation defines a `bind` function that carries a named value along with its type
* into the heart of the REPL. Using a [[scala.reflect.base.Universe#WeakTypeTag]] here is reasonable, because it is desirable
* to work with all types, even if they are type parameters or abstract type members.
*
@@ -72,7 +109,7 @@ import scala.language.implicitConversions
* If `WeakTypeTag` context bounds were replaced with `TypeTag`, then such errors would be reported statically.
* But in that case we wouldn't be able to use `bind` in arbitrary contexts.
*
- * === Backward compatibility ===
+ * === Backward compatibility with Manifests ===
*
* Type tags correspond loosely to manifests.
*
@@ -80,50 +117,72 @@ import scala.language.implicitConversions
* The previous notion of a [[scala.reflect.ClassManifest]] corresponds to a scala.reflect.ClassTag,
* The previous notion of a [[scala.reflect.Manifest]] corresponds to scala.reflect.runtime.universe.TypeTag,
*
- * In Scala 2.10, manifests are deprecated, so it's adviseable to migrate them to tags,
- * because manifests might be removed in the next major release.
+ * In Scala 2.10, manifests are deprecated, so it's advisable to migrate them to tags,
+ * because manifests will probably be removed in the next major release.
*
- * In most cases it will be enough to replace ClassManifests with ClassTags and Manifests with TypeTags,
- * however there are a few caveats:
+ * In most cases it will be enough to replace ClassManifest with ClassTag and Manifest with TypeTag.
+ * There are however a few caveats:
*
* 1) The notion of OptManifest is no longer supported. Tags can reify arbitrary types, so they are always available.
- * // [Eugene++] it might be useful, though, to guard against abstractness of the incoming type.
*
* 2) There's no equivalent for AnyValManifest. Consider comparing your tag with one of the base tags
* (defined in the corresponding companion objects) to find out whether it represents a primitive value class.
* You can also use `<tag>.tpe.typeSymbol.isPrimitiveValueClass` for that purpose (requires scala-reflect.jar).
*
* 3) There's no replacement for factory methods defined in `ClassManifest` and `Manifest` companion objects.
- * Consider assembling corresponding types using reflection API provided by Java (for classes) and Scala (for types).
+ * Consider assembling corresponding types using the reflection APIs provided by Java (for classes) and Scala (for types).
*
* 4) Certain manifest functions (such as `<:<`, `>:>` and `typeArguments`) weren't included in the tag API.
- * Consider using reflection API provided by Java (for classes) and Scala (for types) instead.
+ * Consider using the reflection APIs provided by Java (for classes) and Scala (for types) instead.
*/
trait TypeTags { self: Universe =>
import definitions._
/**
- * If an implicit value of type u.WeakTypeTag[T] is required, the compiler will make one up on demand.
- * The implicitly created value contains in its tpe field a value of type u.Type that is a reflective representation of T.
- * In that value, any occurrences of type parameters or abstract types U
- * which come themselves with a TypeTag are represented by the type referenced by that TypeTag.
+ * If an implicit value of type WeakTypeTag[T] is required, the compiler will create one.
+ * A reflective representation of T can be accessed via the tpe field.
+ * Components of T can be references to type parameters or abstract types. WeakTypeTag makes an effort to
+ * be as concrete as possible, i.e. if type tags are available for the referenced type arguments or abstract types,
+ * they are used to embed the concrete types into the WeakTypeTag. Otherwise the WeakTypeTag will contain a reference
+ * to an abstract type. This behavior can be useful, when one expects T to be possibly partially abstract, but
+ * requires special care to handle this case. If however T is expected to be fully known, use
+ * [[scala.reflect.base.Universe#TypeTag]] instead, which statically guarantees this property.
*
* @see [[scala.reflect.base.TypeTags]]
*/
@annotation.implicitNotFound(msg = "No WeakTypeTag available for ${T}")
trait WeakTypeTag[T] extends Equals with Serializable {
+ /**
+ * Mirror corresponding to the universe of this WeakTypeTag.
+ */
val mirror: Mirror
+ /**
+ * Migrates type tag to another universe.
+ *
+ * Type tags are path dependent on their universe. This methods allows migration
+ * given the mirror corresponding to the target universe.
+ *
+ * Migration means that all symbolic references to classes/objects/packages in the expression
+ * will be re-resolved within the new mirror (typically using that mirror's classloader).
+ */
def in[U <: Universe with Singleton](otherMirror: MirrorOf[U]): U # WeakTypeTag[T]
+
+ /**
+ * Reflective representation of type T.
+ */
def tpe: Type
- /** case class accessories */
+ // case class accessories
override def canEqual(x: Any) = x.isInstanceOf[WeakTypeTag[_]]
override def equals(x: Any) = x.isInstanceOf[WeakTypeTag[_]] && this.mirror == x.asInstanceOf[WeakTypeTag[_]].mirror && this.tpe == x.asInstanceOf[WeakTypeTag[_]].tpe
override def hashCode = mirror.hashCode * 31 + tpe.hashCode
override def toString = "WeakTypeTag[" + tpe + "]"
}
+ /**
+ * Type tags corresponding to primitive types and constructor/extractor for WeakTypeTags.
+ */
object WeakTypeTag {
val Byte : WeakTypeTag[scala.Byte] = TypeTag.Byte
val Short : WeakTypeTag[scala.Short] = TypeTag.Short
@@ -141,6 +200,7 @@ trait TypeTags { self: Universe =>
val Nothing : WeakTypeTag[scala.Nothing] = TypeTag.Nothing
val Null : WeakTypeTag[scala.Null] = TypeTag.Null
+
def apply[T](mirror1: MirrorOf[self.type], tpec1: TypeCreator): WeakTypeTag[T] =
tpec1(mirror1) match {
case ByteTpe => WeakTypeTag.Byte.asInstanceOf[WeakTypeTag[T]]
@@ -174,16 +234,20 @@ trait TypeTags { self: Universe =>
}
/**
- * If an implicit value of type u.TypeTag[T] is required, the compiler will make one up on demand following the same procedure as for TypeTags.
- * However, if the resulting type still contains references to type parameters or abstract types, a static error results.
+ * A `TypeTag` is a [[scala.reflect.base.Universe#WeakTypeTag]] with the additional
+ * static guarantee that all type references are concrete, i.e. it does <b>not</b> contain any references to
+ * unresolved type parameters or abstract types.
*
* @see [[scala.reflect.base.TypeTags]]
*/
@annotation.implicitNotFound(msg = "No TypeTag available for ${T}")
trait TypeTag[T] extends WeakTypeTag[T] with Equals with Serializable {
+ /**
+ * @inheritdoc
+ */
override def in[U <: Universe with Singleton](otherMirror: MirrorOf[U]): U # TypeTag[T]
- /** case class accessories */
+ // case class accessories
override def canEqual(x: Any) = x.isInstanceOf[TypeTag[_]]
override def equals(x: Any) = x.isInstanceOf[TypeTag[_]] && this.mirror == x.asInstanceOf[TypeTag[_]].mirror && this.tpe == x.asInstanceOf[TypeTag[_]].tpe
override def hashCode = mirror.hashCode * 31 + tpe.hashCode
@@ -238,23 +302,36 @@ trait TypeTags { self: Universe =>
private def writeReplace(): AnyRef = new SerializedTypeTag(tpec, concrete = true)
}
- private class PredefTypeCreator[T](copyIn: Universe => Universe # TypeTag[T]) extends TypeCreator {
+ private class PredefTypeCreator[T](copyIn: Universe => Universe#TypeTag[T]) extends TypeCreator {
def apply[U <: Universe with Singleton](m: MirrorOf[U]): U # Type = {
copyIn(m.universe).asInstanceOf[U # TypeTag[T]].tpe
}
}
- private class PredefTypeTag[T](_tpe: Type, copyIn: Universe => Universe # TypeTag[T]) extends TypeTagImpl[T](rootMirror, new PredefTypeCreator(copyIn)) {
+ private class PredefTypeTag[T](_tpe: Type, copyIn: Universe => Universe#TypeTag[T]) extends TypeTagImpl[T](rootMirror, new PredefTypeCreator(copyIn)) {
override lazy val tpe: Type = _tpe
private def writeReplace(): AnyRef = new SerializedTypeTag(tpec, concrete = true)
}
- // incantations
+ /**
+ * Shortcut for `implicitly[WeakTypeTag[T]]`
+ */
def weakTypeTag[T](implicit attag: WeakTypeTag[T]) = attag
+
+ /**
+ * Shortcut for `implicitly[TypeTag[T]]`
+ */
def typeTag[T](implicit ttag: TypeTag[T]) = ttag
// big thanks to Viktor Klang for this brilliant idea!
+ /**
+ * Shortcut for `implicitly[WeakTypeTag[T]].tpe`
+ */
def weakTypeOf[T](implicit attag: WeakTypeTag[T]): Type = attag.tpe
+
+ /**
+ * Shortcut for `implicitly[TypeTag[T]].tpe`
+ */
def typeOf[T](implicit ttag: TypeTag[T]): Type = ttag.tpe
}
@@ -275,3 +352,4 @@ private[scala] class SerializedTypeTag(var tpec: TypeCreator, var concrete: Bool
else WeakTypeTag(rootMirror, tpec)
}
}
+ \ No newline at end of file
diff --git a/src/library/scala/reflect/base/Types.scala b/src/library/scala/reflect/base/Types.scala
index b016b77f36..b2ee3bc4d3 100644
--- a/src/library/scala/reflect/base/Types.scala
+++ b/src/library/scala/reflect/base/Types.scala
@@ -1,6 +1,14 @@
package scala.reflect
package base
+/**
+ * Defines the type hierachy for types.
+ *
+ * Note: Because of implementation details, some type factories have return type `Type`
+ * instead of a more precise type.
+ *
+ * @see [[scala.reflect]] for a description on how the class hierarchy is encoded here.
+ */
trait Types { self: Universe =>
/** The type of Scala types, and also Scala type signatures.
@@ -26,14 +34,14 @@ trait Types { self: Universe =>
*/
val NoPrefix: Type
- /** The type of Scala singleton types, i.e. types that are inhabited
+ /** The type of Scala singleton types, i.e., types that are inhabited
* by only one nun-null value. These include types of the forms
* {{{
* C.this.type
* C.super.type
* x.type
* }}}
- * as well as constant types.
+ * as well as [[ConstantType constant types]].
*/
type SingletonType >: Null <: Type
@@ -42,8 +50,8 @@ trait Types { self: Universe =>
*/
implicit val SingletonTypeTag: ClassTag[SingletonType]
- /** The `ThisType` type describes types of the form on the left with the
- * correspnding ThisType representations to the right.
+ /** A singleton type that describes types of the form on the left with the
+ * corresponding `ThisType` representation to the right:
* {{{
* C.this.type ThisType(C)
* }}}
@@ -62,7 +70,10 @@ trait Types { self: Universe =>
* where `sym` is the class prefix of the this type.
*/
abstract class ThisTypeExtractor {
- def apply(sym: Symbol): Type // not ThisTypebecause of implementation details
+ /**
+ * Creates a ThisType from the given class symbol.
+ */
+ def apply(sym: Symbol): Type
def unapply(tpe: ThisType): Option[Symbol]
}
@@ -120,7 +131,7 @@ trait Types { self: Universe =>
}
/** The `ConstantType` type is not directly written in user programs, but arises as the type of a constant.
- * The REPL expresses constant types like Int(11). Here are some constants with their types.
+ * The REPL expresses constant types like `Int(11)`. Here are some constants with their types:
* {{{
* 1 ConstantType(Constant(1))
* "abc" ConstantType(Constant("abc"))
@@ -362,8 +373,8 @@ trait Types { self: Universe =>
* `selfSym` is a symbol representing the annotated type itself.
*/
abstract class AnnotatedTypeExtractor {
- def apply(annotations: List[AnnotationInfo], underlying: Type, selfsym: Symbol): AnnotatedType
- def unapply(tpe: AnnotatedType): Option[(List[AnnotationInfo], Type, Symbol)]
+ def apply(annotations: List[Annotation], underlying: Type, selfsym: Symbol): AnnotatedType
+ def unapply(tpe: AnnotatedType): Option[(List[Annotation], Type, Symbol)]
}
/** The `TypeBounds` type signature is used to indicate lower and upper type bounds
@@ -401,7 +412,7 @@ trait Types { self: Universe =>
val WildcardType: Type
/** BoundedWildcardTypes, used only during type inference, are created in
- * two places that I can find:
+ * two places:
*
* 1. If the expected type of an expression is an existential type,
* its hidden symbols are replaced with bounded wildcards.
@@ -417,8 +428,12 @@ trait Types { self: Universe =>
*/
implicit val BoundedWildcardTypeTag: ClassTag[BoundedWildcardType]
+ /** The constructor/deconstructor for `BoundedWildcardType` instances. */
val BoundedWildcardType: BoundedWildcardTypeExtractor
+ /** An extractor class to create and pattern match with syntax `BoundedWildcardTypeExtractor(bounds)`
+ * with `bounds` denoting the type bounds.
+ */
abstract class BoundedWildcardTypeExtractor {
def apply(bounds: TypeBounds): BoundedWildcardType
def unapply(tpe: BoundedWildcardType): Option[TypeBounds]
diff --git a/src/library/scala/reflect/base/Universe.scala b/src/library/scala/reflect/base/Universe.scala
index 18599ad092..0b5d5ed685 100644
--- a/src/library/scala/reflect/base/Universe.scala
+++ b/src/library/scala/reflect/base/Universe.scala
@@ -8,7 +8,7 @@ abstract class Universe extends Symbols
with Names
with Trees
with Constants
- with AnnotationInfos
+ with Annotations
with Positions
with Exprs
with TypeTags
@@ -18,24 +18,39 @@ abstract class Universe extends Symbols
with BuildUtils
with Mirrors
{
- /** Given an expression, generate a tree that when compiled and executed produces the original tree.
- * The produced tree will be bound to the Universe it was called from.
+ /** Produce the abstract syntax tree representing the given Scala expression.
+ *
+ * For example
+ *
+ * {{{
+ * val five = reify{ 5 } // Literal(Constant(5))
+ * reify{ 2 + 4 } // Apply( Select( Literal(Constant(2)), newTermName("$plus")), List( Literal(Constant(4)) ) )
+ * reify{ five.splice + 4 } // Apply( Select( Literal(Constant(5)), newTermName("$plus")), List( Literal(Constant(4)) ) )
+ * }}}
+ *
+ * The produced tree is path dependent on the Universe `reify` was called from.
+ *
+ * Use [[scala.reflect.base.Exprs#Expr.splice]] to embed an existing expression into a reify call. Use [[Expr]] to turn a [[Tree]] into an expression that can be spliced.
+ *
+ * == Further info and implementation details ==
+ *
+ * `reify` is implemented as a macro, which given an expression, generates a tree that when compiled and executed produces the original tree.
*
- * For instance, given the abstract syntax tree representation of the <[ x + 1 ]> expression:
+ * For instance in `reify{ x + 1 }` the macro `reify` receives the abstract syntax tree of `x + 1` as its argument, which is
*
* {{{
* Apply(Select(Ident("x"), "+"), List(Literal(Constant(1))))
* }}}
*
- * The reifier transforms it to the following expression:
+ * and returns a tree, which produces the tree above, when compiled and executed. So in other terms, the refiy call expands to something like
*
* {{{
- * <[
* val $u: u.type = u // where u is a reference to the Universe that calls the reify
* $u.Expr[Int]($u.Apply($u.Select($u.Ident($u.newFreeVar("x", <Int>, x), "+"), List($u.Literal($u.Constant(1))))))
- * ]>
* }}}
- *
+ *
+ * ------
+ *
* Reification performs expression splicing (when processing Expr.splice)
* and type splicing (for every type T that has a TypeTag[T] implicit in scope):
*
@@ -54,13 +69,12 @@ abstract class Universe extends Symbols
* }}}
*
* The transformation looks mostly straightforward, but it has its tricky parts:
- * * Reifier retains symbols and types defined outside the reified tree, however
+ * - Reifier retains symbols and types defined outside the reified tree, however
* locally defined entities get erased and replaced with their original trees
- * * Free variables are detected and wrapped in symbols of the type FreeVar
- * * Mutable variables that are accessed from a local function are wrapped in refs
- * * Since reified trees can be compiled outside of the scope they've been created in,
- * special measures are taken to ensure that all members accessed in the reifee remain visible
+ * - Free variables are detected and wrapped in symbols of the type `FreeTermSymbol` or `FreeTypeSymbol`
+ * - Mutable variables that are accessed from a local function are wrapped in refs
*/
- // implementation is magically hardwired to `scala.reflect.reify.Taggers`
+ // implementation is hardwired to `scala.reflect.reify.Taggers`
+ // using the mechanism implemented in `scala.tools.reflect.FastTrack`
def reify[T](expr: T): Expr[T] = ??? // macro
} \ No newline at end of file
diff --git a/src/library/scala/reflect/macros/internal/package.scala b/src/library/scala/reflect/macros/internal/package.scala
index aca2b765f1..8457285752 100644
--- a/src/library/scala/reflect/macros/internal/package.scala
+++ b/src/library/scala/reflect/macros/internal/package.scala
@@ -4,7 +4,8 @@ import scala.reflect.base.{Universe => BaseUniverse}
import scala.reflect.ClassTag
// anchors for materialization macros emitted during tag materialization in Implicits.scala
-// implementation is magically hardwired into `scala.reflect.reify.Taggers`
+// implementation is hardwired into `scala.reflect.reify.Taggers`
+// using the mechanism implemented in `scala.tools.reflect.FastTrack`
// todo. once we have implicit macros for tag generation, we can remove these anchors
package object internal {
private[scala] def materializeClassTag[T](u: BaseUniverse): ClassTag[T] = ??? // macro
diff --git a/src/library/scala/reflect/package.scala b/src/library/scala/reflect/package.scala
index d97f2ec633..046491ae10 100644
--- a/src/library/scala/reflect/package.scala
+++ b/src/library/scala/reflect/package.scala
@@ -1,5 +1,74 @@
package scala
+/**
+ * The base package of Scala's reflection library.
+ *
+ * The reflection library is structured according to the 'cake pattern'. The base layer
+ * resides in package [[scala.reflect.base]] and defines an interface to the following main types:
+ *
+ * - [[scala.reflect.base.Types#Type Types]] represent types
+ * - [[scala.reflect.base.Symbols#Symbol Symbols]] represent definitions
+ * - [[scala.reflect.base.Trees#Tree Trees]] represent abstract syntax trees
+ * - [[scala.reflect.base.Names#Name Names]] represent term and type names
+ * - [[scala.reflect.base.Annotations#Annotation Annotations]] represent annotations
+ * - [[scala.reflect.base.Positions#Position Positions]] represent source positions of tree nodes
+ * - [[scala.reflect.base.FlagSets#FlagSet FlagSet]] represent sets of flags that apply to symbols and
+ * definition trees
+ * - [[scala.reflect.base.Constants#Constant Constants]] represent compile-time constants.
+ *
+ * Each of these types are defined in their own enclosing traits, which are ultimately all inherited by class
+ * [[scala.reflect.base.Universe Universe]]. The base universe defines a minimal interface to the above types.
+ * Universes that provide additional functionality such as deeper introspection or runtime code generation,
+ * are defined in packages [[scala.reflect.api]] and `scala.tools.reflect`.
+ *
+ * The cake pattern employed here requires to write certain Scala idioms with more indirections that usual.
+ * What follows is a description of these indirections, which will help to navigate the Scaladocs easily.
+ *
+ * For instance, consider the base type of all abstract syntax trees: [[scala.reflect.base.Trees#Tree]].
+ * This type is not a class but is abstract and has an upper bound of [[scala.reflect.base.Trees#TreeBase]],
+ * which is a class defining the minimal base interface for all trees.
+ *
+ * For a more interesting tree type, consider [[scala.reflect.base.Trees#If]] representing if-expressions.
+ * It does not come with a class `IfBase`, since it does not add anything to the interface of its upper
+ * bound `TermTree`. However, it is defined next to a value `If` of type [[scala.reflect.base.Trees#IfExtractor]].
+ * This value serves as the companion object defining a factory method `apply` and a corresponding `unapply`
+ * for pattern matching.
+ *
+ * {{{
+ * import scala.reflect.runtime.universe._
+ * val cond = reify{ condition }.tree // <- just some tree representing a condition
+ * val body = Literal(Constant(1))
+ * val other = Literal(Constant(2))
+ * val iftree = If(cond,body,other)
+ * }}}
+ *
+ * is equivalent to
+ *
+ * {{{
+ * import scala.reflect.runtime.universe._
+ * val iftree = reify{ if( condition ) 1 else 2 }.tree
+ * }}}
+ *
+ * and can be pattern matched as
+ *
+ * {{{
+ * iftree match { case If(cond,body,other) => ... }
+ * }}}
+ *
+ * Moreover, there is an implicit value [[scala.reflect.base.Trees#IfTag]] of type
+ * `ClassTag[If]` that is used by the Scala compiler so that we can indeed pattern match on `If`:
+ * {{{
+ * iftree match { case _:If => ... }
+ * }}}
+ * Without the given implicit value, this pattern match would raise an "unchecked" warning at compile time
+ * since `If` is an abstract type that gets erased at runtime. See [[scala.reflect.ClassTag]] for details.
+ *
+ * To summarize: each tree type `X` (and similarly for other types such as `Type` or `Symbol`) is represented
+ * by an abstract type `X`, optionally together with a class `XBase` that defines `X`'s' interface.
+ * `X`'s companion object, if it exists, is represented by a value `X` that is of type `XExtractor`.
+ * Moreover, for each type `X`, there is a value `XTag` of type `ClassTag[X]` that allows to pattern match
+ * on `X`.
+ */
package object reflect {
lazy val basis: base.Universe = new base.Base
diff --git a/src/library/scala/runtime/RichDouble.scala b/src/library/scala/runtime/RichDouble.scala
index 19396a3d48..d7d2603ef7 100644
--- a/src/library/scala/runtime/RichDouble.scala
+++ b/src/library/scala/runtime/RichDouble.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.runtime
+package scala
+package runtime
final class RichDouble(val self: Double) extends AnyVal with FractionalProxy[Double] {
protected def num = scala.math.Numeric.DoubleIsFractional
diff --git a/src/library/scala/runtime/RichFloat.scala b/src/library/scala/runtime/RichFloat.scala
index 9fbb3c19bb..9c3a14d3be 100644
--- a/src/library/scala/runtime/RichFloat.scala
+++ b/src/library/scala/runtime/RichFloat.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.runtime
+package scala
+package runtime
final class RichFloat(val self: Float) extends AnyVal with FractionalProxy[Float] {
protected def num = scala.math.Numeric.FloatIsFractional
diff --git a/src/library/scala/runtime/ScalaRunTime.scala b/src/library/scala/runtime/ScalaRunTime.scala
index ad36006646..1e0500ce29 100644
--- a/src/library/scala/runtime/ScalaRunTime.scala
+++ b/src/library/scala/runtime/ScalaRunTime.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.runtime
+package scala
+package runtime
import scala.collection.{ Seq, IndexedSeq, TraversableView, AbstractIterator }
import scala.collection.mutable.WrappedArray
@@ -15,6 +16,7 @@ import scala.collection.generic.{ Sorted }
import scala.reflect.{ ClassTag, classTag }
import scala.util.control.ControlThrowable
import scala.xml.{ Node, MetaData }
+import java.lang.{ Class => jClass }
import java.lang.Double.doubleToLongBits
import java.lang.reflect.{ Modifier, Method => JMethod }
@@ -27,10 +29,10 @@ object ScalaRunTime {
def isArray(x: Any, atLevel: Int = 1): Boolean =
x != null && isArrayClass(x.getClass, atLevel)
- private def isArrayClass(clazz: Class[_], atLevel: Int): Boolean =
+ private def isArrayClass(clazz: jClass[_], atLevel: Int): Boolean =
clazz.isArray && (atLevel == 1 || isArrayClass(clazz.getComponentType, atLevel - 1))
- def isValueClass(clazz: Class[_]) = clazz.isPrimitive()
+ def isValueClass(clazz: jClass[_]) = clazz.isPrimitive()
def isTuple(x: Any) = x != null && tupleNames(x.getClass.getName)
def isAnyVal(x: Any) = x match {
case _: Byte | _: Short | _: Char | _: Int | _: Long | _: Float | _: Double | _: Boolean | _: Unit => true
@@ -49,7 +51,7 @@ object ScalaRunTime {
/** Return the class object representing an array with element class `clazz`.
*/
- def arrayClass(clazz: Class[_]): Class[_] = {
+ def arrayClass(clazz: jClass[_]): jClass[_] = {
// newInstance throws an exception if the erasure is Void.TYPE. see SI-5680
if (clazz == java.lang.Void.TYPE) classOf[Array[Unit]]
else java.lang.reflect.Array.newInstance(clazz, 0).getClass
@@ -57,18 +59,19 @@ object ScalaRunTime {
/** Return the class object representing elements in arrays described by a given schematic.
*/
- def arrayElementClass(schematic: Any): Class[_] = schematic match {
- case cls: Class[_] => cls.getComponentType
+ def arrayElementClass(schematic: Any): jClass[_] = schematic match {
+ case cls: jClass[_] => cls.getComponentType
case tag: ClassTag[_] => tag.runtimeClass
- case _ => throw new UnsupportedOperationException("unsupported schematic %s (%s)".format(schematic, if (schematic == null) "null" else schematic.getClass))
+ case _ =>
+ throw new UnsupportedOperationException(s"unsupported schematic $schematic (${schematic.getClass})")
}
/** Return the class object representing an unboxed value type,
* e.g. classOf[int], not classOf[java.lang.Integer]. The compiler
* rewrites expressions like 5.getClass to come here.
*/
- def anyValClass[T <: AnyVal : ClassTag](value: T): Class[T] =
- classTag[T].runtimeClass.asInstanceOf[Class[T]]
+ def anyValClass[T <: AnyVal : ClassTag](value: T): jClass[T] =
+ classTag[T].runtimeClass.asInstanceOf[jClass[T]]
/** Retrieve generic array element */
def array_apply(xs: AnyRef, idx: Int): Any = xs match {
diff --git a/src/library/scala/runtime/SeqCharSequence.scala b/src/library/scala/runtime/SeqCharSequence.scala
index dc9594d960..8cb958c05f 100644
--- a/src/library/scala/runtime/SeqCharSequence.scala
+++ b/src/library/scala/runtime/SeqCharSequence.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.runtime
+package scala
+package runtime
import java.util.Arrays.copyOfRange
diff --git a/src/library/scala/runtime/StringAdd.scala b/src/library/scala/runtime/StringAdd.scala
index 4693b0bf44..f074b5407e 100644
--- a/src/library/scala/runtime/StringAdd.scala
+++ b/src/library/scala/runtime/StringAdd.scala
@@ -10,10 +10,5 @@ package scala.runtime
/** A wrapper class that adds string concatenation `+` to any value */
final class StringAdd(val self: Any) extends AnyVal {
- // Note: The implicit conversion from Any to StringAdd is one of two
- // implicit conversions from Any to AnyRef in Predef. It is important to have at least
- // two such conversions, so that silent conversions from value types to AnyRef
- // are avoided. If StringFormat should become a value class, another
- // implicit conversion from Any to AnyRef has to be introduced in Predef
def +(other: String) = String.valueOf(self) + other
}
diff --git a/src/library/scala/runtime/StringFormat.scala b/src/library/scala/runtime/StringFormat.scala
index 1f5feec9e1..7d34e82812 100644
--- a/src/library/scala/runtime/StringFormat.scala
+++ b/src/library/scala/runtime/StringFormat.scala
@@ -11,12 +11,6 @@ package scala.runtime
/** A wrapper class that adds a `formatted` operation to any value
*/
final class StringFormat(val self: Any) extends AnyVal {
- // Note: The implicit conversion from Any to StringFormat is one of two
- // implicit conversions from Any to AnyRef in Predef. It is important to have at least
- // two such conversions, so that silent conversions from value types to AnyRef
- // are avoided. If StringFormat should become a value class, another
- // implicit conversion from Any to AnyRef has to be introduced in Predef
-
/** Returns string formatted according to given `format` string.
* Format strings are as for `String.format`
* (@see java.lang.String.format).
diff --git a/src/library/scala/runtime/Tuple2Zipped.scala b/src/library/scala/runtime/Tuple2Zipped.scala
index bd52c678af..6030c9ea90 100644
--- a/src/library/scala/runtime/Tuple2Zipped.scala
+++ b/src/library/scala/runtime/Tuple2Zipped.scala
@@ -21,7 +21,7 @@ trait ZippedTraversable2[+El1, +El2] extends Any {
}
object ZippedTraversable2 {
implicit def zippedTraversable2ToTraversable[El1, El2](zz: ZippedTraversable2[El1, El2]): Traversable[(El1, El2)] = {
- new collection.AbstractTraversable[(El1, El2)] {
+ new scala.collection.AbstractTraversable[(El1, El2)] {
def foreach[U](f: ((El1, El2)) => U): Unit = zz foreach Function.untupled(f)
}
}
diff --git a/src/library/scala/runtime/Tuple3Zipped.scala b/src/library/scala/runtime/Tuple3Zipped.scala
index 3b78b6261a..3970c9973d 100644
--- a/src/library/scala/runtime/Tuple3Zipped.scala
+++ b/src/library/scala/runtime/Tuple3Zipped.scala
@@ -18,7 +18,7 @@ trait ZippedTraversable3[+El1, +El2, +El3] extends Any {
}
object ZippedTraversable3 {
implicit def zippedTraversable3ToTraversable[El1, El2, El3](zz: ZippedTraversable3[El1, El2, El3]): Traversable[(El1, El2, El3)] = {
- new collection.AbstractTraversable[(El1, El2, El3)] {
+ new scala.collection.AbstractTraversable[(El1, El2, El3)] {
def foreach[U](f: ((El1, El2, El3)) => U): Unit = zz foreach Function.untupled(f)
}
}
diff --git a/src/library/scala/sys/Prop.scala b/src/library/scala/sys/Prop.scala
index 687a32cf7d..123a729748 100644
--- a/src/library/scala/sys/Prop.scala
+++ b/src/library/scala/sys/Prop.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.sys
+package scala
+package sys
/** A lightweight interface wrapping a property contained in some
* unspecified map. Generally it'll be the system properties but this
diff --git a/src/library/scala/sys/process/BasicIO.scala b/src/library/scala/sys/process/BasicIO.scala
index 77e36f6196..94a2125393 100644
--- a/src/library/scala/sys/process/BasicIO.scala
+++ b/src/library/scala/sys/process/BasicIO.scala
@@ -45,7 +45,7 @@ object BasicIO {
val q = new LinkedBlockingQueue[Either[Int, T]]
def next(): Stream[T] = q.take match {
case Left(0) => Stream.empty
- case Left(code) => if (nonzeroException) sys.error("Nonzero exit code: " + code) else Stream.empty
+ case Left(code) => if (nonzeroException) scala.sys.error("Nonzero exit code: " + code) else Stream.empty
case Right(s) => Stream.cons(s, next)
}
new Streamed((s: T) => q put Right(s), code => q put Left(code), () => next())
diff --git a/src/library/scala/sys/process/ProcessBuilderImpl.scala b/src/library/scala/sys/process/ProcessBuilderImpl.scala
index 58f06e1039..2c83a59e4f 100644
--- a/src/library/scala/sys/process/ProcessBuilderImpl.scala
+++ b/src/library/scala/sys/process/ProcessBuilderImpl.scala
@@ -128,7 +128,7 @@ private[process] trait ProcessBuilderImpl {
val code = this ! BasicIO(withIn, buffer, log)
if (code == 0) buffer.toString
- else sys.error("Nonzero exit value: " + code)
+ else scala.sys.error("Nonzero exit value: " + code)
}
private[this] def lines(
@@ -213,4 +213,4 @@ private[process] trait ProcessBuilderImpl {
) extends SequentialBuilder(first, second, "###") {
override def createProcess(io: ProcessIO) = new ProcessSequence(first, second, io)
}
-} \ No newline at end of file
+}
diff --git a/src/library/scala/sys/process/ProcessImpl.scala b/src/library/scala/sys/process/ProcessImpl.scala
index b7549eeb06..cdf7d72caa 100644
--- a/src/library/scala/sys/process/ProcessImpl.scala
+++ b/src/library/scala/sys/process/ProcessImpl.scala
@@ -84,7 +84,7 @@ private[process] trait ProcessImpl {
private[process] abstract class CompoundProcess extends BasicProcess {
def destroy() = destroyer()
- def exitValue() = getExitValue() getOrElse sys.error("No exit code: process destroyed.")
+ def exitValue() = getExitValue() getOrElse scala.sys.error("No exit code: process destroyed.")
def start() = getExitValue
protected lazy val (getExitValue, destroyer) = {
diff --git a/src/library/scala/util/Sorting.scala b/src/library/scala/util/Sorting.scala
index 5f0edf964f..276e157f55 100644
--- a/src/library/scala/util/Sorting.scala
+++ b/src/library/scala/util/Sorting.scala
@@ -6,10 +6,11 @@
** |/ **
\* */
-package scala.util
+package scala
+package util
import scala.reflect.{ ClassTag, classTag }
-import scala.math.Ordering
+import scala.math.{ Ordering, max, min }
/** The Sorting object provides functions that can sort various kinds of
* objects. You can provide a comparison function, or you can request a sort
diff --git a/src/library/scala/util/automata/SubsetConstruction.scala b/src/library/scala/util/automata/SubsetConstruction.scala
index 1cdcd734cd..25ac86183c 100644
--- a/src/library/scala/util/automata/SubsetConstruction.scala
+++ b/src/library/scala/util/automata/SubsetConstruction.scala
@@ -19,8 +19,8 @@ class SubsetConstruction[T <: AnyRef](val nfa: NondetWordAutom[T]) {
def determinize: DetWordAutom[T] = {
// for assigning numbers to bitsets
- var indexMap = collection.Map[immutable.BitSet, Int]()
- var invIndexMap = collection.Map[Int, immutable.BitSet]()
+ var indexMap = scala.collection.Map[immutable.BitSet, Int]()
+ var invIndexMap = scala.collection.Map[Int, immutable.BitSet]()
var ix = 0
// we compute the dfa with states = bitsets
diff --git a/src/library/scala/util/control/NoStackTrace.scala b/src/library/scala/util/control/NoStackTrace.scala
index c2b5dbca22..4409358785 100644
--- a/src/library/scala/util/control/NoStackTrace.scala
+++ b/src/library/scala/util/control/NoStackTrace.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.util.control
+package scala
+package util.control
/** A trait for exceptions which, for efficiency reasons, do not
* fill in the stack trace. Stack trace suppression can be disabled
diff --git a/src/library/scala/util/hashing/Hashing.scala b/src/library/scala/util/hashing/Hashing.scala
index 84b549f35e..97d32af2b0 100644
--- a/src/library/scala/util/hashing/Hashing.scala
+++ b/src/library/scala/util/hashing/Hashing.scala
@@ -8,6 +8,8 @@
package scala.util.hashing
+import scala.annotation.implicitNotFound
+
/** `Hashing` is a trait whose instances each represent a strategy for hashing
* instances of a type.
*
@@ -16,27 +18,22 @@ package scala.util.hashing
*
* Note: when using a custom `Hashing`, make sure to use it with the `Equiv`
* such that if any two objects are equal, then their hash codes must be equal.
- *
+ *
* @since 2.10
*/
-@annotation.implicitNotFound(msg = "No implicit Hashing defined for ${T}.")
+@implicitNotFound(msg = "No implicit Hashing defined for ${T}.")
trait Hashing[T] extends Serializable {
-
def hash(x: T): Int
-
}
-
object Hashing {
-
final class Default[T] extends Hashing[T] {
def hash(x: T) = x.##
}
-
+
implicit def default[T] = new Default[T]
-
+
def fromFunction[T](f: T => Int) = new Hashing[T] {
def hash(x: T) = f(x)
}
-
}
diff --git a/src/library/scala/util/parsing/combinator/Parsers.scala b/src/library/scala/util/parsing/combinator/Parsers.scala
index e6c9573756..5d990eee78 100644
--- a/src/library/scala/util/parsing/combinator/Parsers.scala
+++ b/src/library/scala/util/parsing/combinator/Parsers.scala
@@ -178,7 +178,7 @@ trait Parsers {
def filterWithError(p: Nothing => Boolean, error: Nothing => String, position: Input): ParseResult[Nothing] = this
- def get: Nothing = sys.error("No result when parsing failed")
+ def get: Nothing = scala.sys.error("No result when parsing failed")
}
/** An extractor so `NoSuccess(msg, next)` can be used in matches. */
object NoSuccess {
diff --git a/src/library/scala/xml/dtd/ContentModelParser.scala b/src/library/scala/xml/dtd/ContentModelParser.scala
index 2d87bc0764..5d183df04b 100644
--- a/src/library/scala/xml/dtd/ContentModelParser.scala
+++ b/src/library/scala/xml/dtd/ContentModelParser.scala
@@ -6,7 +6,6 @@
** |/ **
\* */
-
package scala.xml
package dtd
@@ -21,10 +20,10 @@ object ContentModelParser extends Scanner { // a bit too permissive concerning #
def accept(tok: Int) = {
if (token != tok) {
if ((tok == STAR) && (token == END)) // common mistake
- sys.error("in DTDs, \n"+
+ scala.sys.error("in DTDs, \n"+
"mixed content models must be like (#PCDATA|Name|Name|...)*");
else
- sys.error("expected "+token2string(tok)+
+ scala.sys.error("expected "+token2string(tok)+
", got unexpected token:"+token2string(token));
}
nextToken
@@ -45,7 +44,7 @@ object ContentModelParser extends Scanner { // a bit too permissive concerning #
case NAME => value match {
case "ANY" => ANY
case "EMPTY" => EMPTY
- case _ => sys.error("expected ANY, EMPTY or '(' instead of " + value );
+ case _ => scala.sys.error("expected ANY, EMPTY or '(' instead of " + value );
}
case LPAREN =>
@@ -65,12 +64,12 @@ object ContentModelParser extends Scanner { // a bit too permissive concerning #
accept( STAR );
res
case _ =>
- sys.error("unexpected token:" + token2string(token) );
+ scala.sys.error("unexpected token:" + token2string(token) );
}
}
case _ =>
- sys.error("unexpected token:" + token2string(token) );
+ scala.sys.error("unexpected token:" + token2string(token) );
}
// sopt ::= S?
def sOpt() = if( token == S ) nextToken;
@@ -118,12 +117,12 @@ object ContentModelParser extends Scanner { // a bit too permissive concerning #
def particle = token match {
case LPAREN => nextToken; sOpt; regexp;
case NAME => val a = Letter(ElemName(value)); nextToken; maybeSuffix(a)
- case _ => sys.error("expected '(' or Name, got:"+token2string(token));
+ case _ => scala.sys.error("expected '(' or Name, got:"+token2string(token));
}
// atom ::= name
def atom = token match {
case NAME => val a = Letter(ElemName(value)); nextToken; a
- case _ => sys.error("expected Name, got:"+token2string(token));
+ case _ => scala.sys.error("expected Name, got:"+token2string(token));
}
}
diff --git a/src/library/scala/xml/dtd/Scanner.scala b/src/library/scala/xml/dtd/Scanner.scala
index 82a8d1af2f..2e753a7590 100644
--- a/src/library/scala/xml/dtd/Scanner.scala
+++ b/src/library/scala/xml/dtd/Scanner.scala
@@ -44,7 +44,7 @@ class Scanner extends Tokens with parsing.TokenTests {
final def next() = if (it.hasNext) c = it.next else c = ENDCH
final def acc(d: Char) {
- if (c == d) next else sys.error("expected '"+d+"' found '"+c+"' !");
+ if (c == d) next else scala.sys.error("expected '"+d+"' found '"+c+"' !");
}
final def accS(ds: Seq[Char]) { ds foreach acc }
@@ -65,7 +65,7 @@ class Scanner extends Tokens with parsing.TokenTests {
case ENDCH => END
case _ =>
if (isNameStart(c)) name; // NAME
- else sys.error("unexpected character:" + c)
+ else scala.sys.error("unexpected character:" + c)
}
final def name = {
diff --git a/src/library/scala/xml/factory/NodeFactory.scala b/src/library/scala/xml/factory/NodeFactory.scala
index 61d4855b2e..c543b8751b 100644
--- a/src/library/scala/xml/factory/NodeFactory.scala
+++ b/src/library/scala/xml/factory/NodeFactory.scala
@@ -18,7 +18,7 @@ trait NodeFactory[A <: Node] {
val ignoreProcInstr = false
/* default behaviour is to use hash-consing */
- val cache = new collection.mutable.HashMap[Int, List[A]]
+ val cache = new scala.collection.mutable.HashMap[Int, List[A]]
protected def create(pre: String, name: String, attrs: MetaData, scope: NamespaceBinding, children:Seq[Node]): A
diff --git a/src/library/scala/xml/include/sax/XIncluder.scala b/src/library/scala/xml/include/sax/XIncluder.scala
index f4d69ffe44..2af66f4f16 100644
--- a/src/library/scala/xml/include/sax/XIncluder.scala
+++ b/src/library/scala/xml/include/sax/XIncluder.scala
@@ -62,7 +62,7 @@ class XIncluder(outs: OutputStream, encoding: String) extends ContentHandler wit
val value = atts.getValue(i);
// @todo Need to use character references if the encoding
// can't support the character
- out.write(xml.Utility.escape(value))
+ out.write(scala.xml.Utility.escape(value))
out.write("'");
i += 1
}
diff --git a/src/library/scala/xml/parsing/MarkupParser.scala b/src/library/scala/xml/parsing/MarkupParser.scala
index af9b5f47cf..d4dc6da14d 100755
--- a/src/library/scala/xml/parsing/MarkupParser.scala
+++ b/src/library/scala/xml/parsing/MarkupParser.scala
@@ -56,7 +56,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
// See ticket #3720 for motivations.
private class WithLookAhead(underlying: Source) extends Source {
- private val queue = collection.mutable.Queue[Char]()
+ private val queue = scala.collection.mutable.Queue[Char]()
def lookahead(): BufferedIterator[Char] = {
val iter = queue.iterator ++ new Iterator[Char] {
def hasNext = underlying.hasNext
@@ -897,7 +897,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
new PublicID(pubID, sysID)
} else {
reportSyntaxError("PUBLIC or SYSTEM expected");
- sys.error("died parsing notationdecl")
+ scala.sys.error("died parsing notationdecl")
}
xSpaceOpt
xToken('>')
diff --git a/src/library/scala/xml/parsing/MarkupParserCommon.scala b/src/library/scala/xml/parsing/MarkupParserCommon.scala
index 096f8a8f38..219c3d6679 100644
--- a/src/library/scala/xml/parsing/MarkupParserCommon.scala
+++ b/src/library/scala/xml/parsing/MarkupParserCommon.scala
@@ -21,7 +21,7 @@ import Utility.SU
* All members should be accessed through those.
*/
private[scala] trait MarkupParserCommon extends TokenTests {
- protected def unreachable = sys.error("Cannot be reached.")
+ protected def unreachable = scala.sys.error("Cannot be reached.")
// type HandleType // MarkupHandler, SymbolicXMLBuilder
type InputType // Source, CharArrayReader
@@ -82,7 +82,7 @@ private[scala] trait MarkupParserCommon extends TokenTests {
case `end` => return buf.toString
case ch => buf append ch
}
- sys.error("Expected '%s'".format(end))
+ scala.sys.error("Expected '%s'".format(end))
}
/** [42] '<' xmlEndTag ::= '<' '/' Name S? '>'
diff --git a/src/partest/scala/tools/partest/nest/RunnerManager.scala b/src/partest/scala/tools/partest/nest/RunnerManager.scala
index d283a6b476..f817b1c1af 100644
--- a/src/partest/scala/tools/partest/nest/RunnerManager.scala
+++ b/src/partest/scala/tools/partest/nest/RunnerManager.scala
@@ -217,6 +217,7 @@ class RunnerManager(kind: String, val fileManager: FileManager, params: TestRunP
"-Dpartest.output="+outDir.getAbsolutePath,
"-Dpartest.lib="+LATEST_LIB,
"-Dpartest.reflect="+LATEST_REFLECT,
+ "-Dpartest.comp="+LATEST_COMP,
"-Dpartest.cwd="+outDir.getParent,
"-Dpartest.test-path="+testFullPath,
"-Dpartest.testname="+fileBase,
diff --git a/src/partest/scala/tools/partest/package.scala b/src/partest/scala/tools/partest/package.scala
index 69d0ce9e4c..c288c15c19 100644
--- a/src/partest/scala/tools/partest/package.scala
+++ b/src/partest/scala/tools/partest/package.scala
@@ -30,8 +30,8 @@ package object partest {
implicit private[partest] def temporaryPath2File(x: Path): JFile = x.jfile
implicit private[partest] def temporaryFile2Path(x: JFile): Path = Path(x)
- implicit lazy val postfixOps = language.postfixOps
- implicit lazy val implicitConversions = language.implicitConversions
+ implicit lazy val postfixOps = scala.language.postfixOps
+ implicit lazy val implicitConversions = scala.language.implicitConversions
def timed[T](body: => T): (T, Long) = {
val t1 = System.currentTimeMillis
@@ -74,7 +74,7 @@ package object partest {
def isPartestDebug: Boolean =
propOrEmpty("partest.debug") == "true"
- import language.experimental.macros
+ import scala.language.experimental.macros
/**
* `trace("".isEmpty)` will return `true` and as a side effect print the following to standard out.
diff --git a/src/reflect/scala/reflect/api/AnnotationInfos.scala b/src/reflect/scala/reflect/api/AnnotationInfos.scala
deleted file mode 100644
index d9f35024d9..0000000000
--- a/src/reflect/scala/reflect/api/AnnotationInfos.scala
+++ /dev/null
@@ -1,27 +0,0 @@
-package scala.reflect
-package api
-
-trait AnnotationInfos extends base.AnnotationInfos { self: Universe =>
-
- override type AnnotationInfo >: Null <: AnyRef with AnnotationInfoApi
- trait AnnotationInfoApi {
- def atp: Type
- def args: List[Tree]
- def assocs: List[(Name, ClassfileAnnotArg)]
- }
-
- override type LiteralAnnotArg >: Null <: ClassfileAnnotArg with LiteralAnnotArgApi
- trait LiteralAnnotArgApi {
- def const: Constant
- }
-
- override type ArrayAnnotArg >: Null <: ClassfileAnnotArg with ArrayAnnotArgApi
- trait ArrayAnnotArgApi {
- def args: Array[ClassfileAnnotArg]
- }
-
- override type NestedAnnotArg >: Null <: ClassfileAnnotArg with NestedAnnotArgApi
- trait NestedAnnotArgApi {
- def annInfo: AnnotationInfo
- }
-} \ No newline at end of file
diff --git a/src/reflect/scala/reflect/api/Annotations.scala b/src/reflect/scala/reflect/api/Annotations.scala
new file mode 100644
index 0000000000..43e95f9902
--- /dev/null
+++ b/src/reflect/scala/reflect/api/Annotations.scala
@@ -0,0 +1,29 @@
+package scala.reflect
+package api
+
+import scala.collection.immutable.ListMap
+
+trait Annotations extends base.Annotations { self: Universe =>
+
+ override type Annotation >: Null <: AnyRef with AnnotationApi
+ trait AnnotationApi {
+ def tpe: Type
+ def scalaArgs: List[Tree]
+ def javaArgs: ListMap[Name, JavaArgument]
+ }
+
+ override type LiteralArgument >: Null <: JavaArgument with LiteralArgumentApi
+ trait LiteralArgumentApi {
+ def value: Constant
+ }
+
+ override type ArrayArgument >: Null <: JavaArgument with ArrayArgumentApi
+ trait ArrayArgumentApi {
+ def args: Array[JavaArgument]
+ }
+
+ override type NestedArgument >: Null <: JavaArgument with NestedArgumentApi
+ trait NestedArgumentApi {
+ def annotation: Annotation
+ }
+} \ No newline at end of file
diff --git a/src/reflect/scala/reflect/api/FrontEnds.scala b/src/reflect/scala/reflect/api/FrontEnds.scala
index a27450d49d..61ea227c47 100644
--- a/src/reflect/scala/reflect/api/FrontEnds.scala
+++ b/src/reflect/scala/reflect/api/FrontEnds.scala
@@ -24,7 +24,7 @@ trait FrontEnds {
def hasWarnings = WARNING.count > 0
case class Info(val pos: Position, val msg: String, val severity: Severity)
- val infos = new collection.mutable.LinkedHashSet[Info]
+ val infos = new scala.collection.mutable.LinkedHashSet[Info]
/** Handles incoming info */
def log(pos: Position, msg: String, severity: Severity) {
@@ -67,4 +67,4 @@ trait FrontEnds {
*/
// todo. untangle warningsAsErrors from Reporters. I don't feel like moving this flag here!
def mkConsoleFrontEnd(minSeverity: Int = 1): FrontEnd
-} \ No newline at end of file
+}
diff --git a/src/reflect/scala/reflect/api/Symbols.scala b/src/reflect/scala/reflect/api/Symbols.scala
index 1fbbc0c0a4..0c4be4f7e1 100644
--- a/src/reflect/scala/reflect/api/Symbols.scala
+++ b/src/reflect/scala/reflect/api/Symbols.scala
@@ -36,13 +36,8 @@ trait Symbols extends base.Symbols { self: Universe =>
// at scala.reflect.internal.pickling.UnPickler$Scan.run(UnPickler.scala:88)
// at scala.reflect.internal.pickling.UnPickler.unpickle(UnPickler.scala:37)
// at scala.reflect.runtime.JavaMirrors$JavaMirror.unpickleClass(JavaMirrors.scala:253) // unpickle from within a reflexive mirror
- // def annotations: List[AnnotationInfo]
- def getAnnotations: List[AnnotationInfo]
-
- /** Whether this symbol carries an annotation for which the given
- * symbol is its typeSymbol.
- */
- def hasAnnotation(sym: Symbol): Boolean
+ // def annotations: List[Annotation]
+ def getAnnotations: List[Annotation]
/** For a class: the module or case class factory with the same name in the same package.
* For a module: the class with the same name in the same package.
@@ -175,6 +170,10 @@ trait Symbols extends base.Symbols { self: Universe =>
*/
def isJava: Boolean
+ /** Does this symbol represent an implicit value, definition, class or parameter?
+ */
+ def isImplicit: Boolean
+
/******************* helpers *******************/
/** ...
@@ -227,10 +226,6 @@ trait Symbols extends base.Symbols { self: Universe =>
*/
def isOverloaded : Boolean
- /** Does this symbol represent an implicit value, definition or parameter?
- */
- def isImplicit: Boolean
-
/** Does this symbol represent a lazy value?
*/
def isLazy: Boolean
diff --git a/src/reflect/scala/reflect/api/Trees.scala b/src/reflect/scala/reflect/api/Trees.scala
index 5522693b29..e46a977be8 100644
--- a/src/reflect/scala/reflect/api/Trees.scala
+++ b/src/reflect/scala/reflect/api/Trees.scala
@@ -424,14 +424,6 @@ trait Trees extends base.Trees { self: Universe =>
trait ApplyApi extends GenericApplyApi { this: Apply =>
}
- override type ApplyDynamic >: Null <: TermTree with SymTree with ApplyDynamicApi
-
- /** The API that all apply dynamics support */
- trait ApplyDynamicApi extends TermTreeApi with SymTreeApi { this: ApplyDynamic =>
- val qual: Tree
- val args: List[Tree]
- }
-
override type Super >: Null <: TermTree with SuperApi
/** The API that all supers support */
@@ -586,7 +578,6 @@ trait Trees extends base.Trees { self: Universe =>
def Typed(tree: Tree, expr: Tree, tpt: Tree): Typed
def TypeApply(tree: Tree, fun: Tree, args: List[Tree]): TypeApply
def Apply(tree: Tree, fun: Tree, args: List[Tree]): Apply
- def ApplyDynamic(tree: Tree, qual: Tree, args: List[Tree]): ApplyDynamic
def Super(tree: Tree, qual: Tree, mix: TypeName): Super
def This(tree: Tree, qual: Name): This
def Select(tree: Tree, qualifier: Tree, selector: Name): Select
diff --git a/src/reflect/scala/reflect/api/Types.scala b/src/reflect/scala/reflect/api/Types.scala
index f22f8d3e75..1c79de02c3 100644
--- a/src/reflect/scala/reflect/api/Types.scala
+++ b/src/reflect/scala/reflect/api/Types.scala
@@ -121,15 +121,6 @@ trait Types extends base.Types { self: Universe =>
*/
def widen: Type
- /** Map to a singleton type which is a subtype of this type.
- * The fallback implemented here gives:
- * {{{
- * T.narrow = (T {}).this.type
- * }}}
- * Overridden where we know more about where types come from.
- */
- def narrow: Type
-
/******************* helpers *******************/
/** Substitute symbols in `to` for corresponding occurrences of references to
@@ -264,7 +255,7 @@ trait Types extends base.Types { self: Universe =>
/** The API that all annotated types support */
trait AnnotatedTypeApi extends TypeApi { this: AnnotatedType =>
- val annotations: List[AnnotationInfo]
+ val annotations: List[Annotation]
val underlying: Type
val selfsym: Symbol
}
diff --git a/src/reflect/scala/reflect/api/Universe.scala b/src/reflect/scala/reflect/api/Universe.scala
index 3dce0f218e..3165f9abcd 100644
--- a/src/reflect/scala/reflect/api/Universe.scala
+++ b/src/reflect/scala/reflect/api/Universe.scala
@@ -14,4 +14,4 @@ abstract class Universe extends base.Universe
with StandardDefinitions
with StandardNames
with Importers
- with AnnotationInfos
+ with Annotations
diff --git a/src/reflect/scala/reflect/internal/AnnotationInfos.scala b/src/reflect/scala/reflect/internal/AnnotationInfos.scala
index a444c786f7..3bd7f4f4fa 100644
--- a/src/reflect/scala/reflect/internal/AnnotationInfos.scala
+++ b/src/reflect/scala/reflect/internal/AnnotationInfos.scala
@@ -9,9 +9,10 @@ package internal
import util._
import pickling.ByteCodecs
import scala.annotation.tailrec
+import scala.collection.immutable.ListMap
/** AnnotationInfo and its helpers */
-trait AnnotationInfos extends api.AnnotationInfos { self: SymbolTable =>
+trait AnnotationInfos extends api.Annotations { self: SymbolTable =>
import definitions.{ ThrowsClass, StaticAnnotationClass, isMetaAnnotation }
// Common annotation code between Symbol and Type.
@@ -32,7 +33,7 @@ trait AnnotationInfos extends api.AnnotationInfos { self: SymbolTable =>
case AnnotationInfo(tp, Literal(Constant(tpe: Type)) :: Nil, _) if tp.typeSymbol == ThrowsClass => tpe.typeSymbol
}
- /** Tests for, get, or remove an annotation */
+ /** Tests for, get, or remove an annotation */
def hasAnnotation(cls: Symbol): Boolean =
//OPT inlined from exists to save on #closures; was: annotations exists (_ matches cls)
dropOtherAnnotations(annotations, cls).nonEmpty
@@ -43,12 +44,12 @@ trait AnnotationInfos extends api.AnnotationInfos { self: SymbolTable =>
case ann :: _ => Some(ann)
case _ => None
}
-
+
def removeAnnotation(cls: Symbol): Self = filterAnnotations(ann => !(ann matches cls))
-
+
final def withAnnotation(annot: AnnotationInfo): Self = withAnnotations(List(annot))
- @tailrec private
+ @tailrec private
def dropOtherAnnotations(anns: List[AnnotationInfo], cls: Symbol): List[AnnotationInfo] = anns match {
case ann :: rest => if (ann matches cls) anns else dropOtherAnnotations(rest, cls)
case Nil => Nil
@@ -63,28 +64,47 @@ trait AnnotationInfos extends api.AnnotationInfos { self: SymbolTable =>
* - or nested classfile annotations
*/
abstract class ClassfileAnnotArg extends Product
- implicit val ClassfileAnnotArgTag = ClassTag[ClassfileAnnotArg](classOf[ClassfileAnnotArg])
+ implicit val JavaArgumentTag = ClassTag[ClassfileAnnotArg](classOf[ClassfileAnnotArg])
+ case object UnmappableAnnotArg extends ClassfileAnnotArg
/** Represents a compile-time Constant (`Boolean`, `Byte`, `Short`,
* `Char`, `Int`, `Long`, `Float`, `Double`, `String`, `java.lang.Class` or
* an instance of a Java enumeration value).
*/
case class LiteralAnnotArg(const: Constant)
- extends ClassfileAnnotArg with LiteralAnnotArgApi {
+ extends ClassfileAnnotArg with LiteralArgumentApi {
+ def value = const
override def toString = const.escapedStringValue
}
- implicit val LiteralAnnotArgTag = ClassTag[LiteralAnnotArg](classOf[LiteralAnnotArg])
-
- object LiteralAnnotArg extends LiteralAnnotArgExtractor
+ object LiteralAnnotArg extends LiteralArgumentExtractor
/** Represents an array of classfile annotation arguments */
case class ArrayAnnotArg(args: Array[ClassfileAnnotArg])
- extends ClassfileAnnotArg with ArrayAnnotArgApi {
+ extends ClassfileAnnotArg with ArrayArgumentApi {
override def toString = args.mkString("[", ", ", "]")
}
- implicit val ArrayAnnotArgTag = ClassTag[ArrayAnnotArg](classOf[ArrayAnnotArg])
+ object ArrayAnnotArg extends ArrayArgumentExtractor
- object ArrayAnnotArg extends ArrayAnnotArgExtractor
+ /** Represents a nested classfile annotation */
+ case class NestedAnnotArg(annInfo: AnnotationInfo)
+ extends ClassfileAnnotArg with NestedArgumentApi {
+ // The nested annotation should not have any Scala annotation arguments
+ assert(annInfo.args.isEmpty, annInfo.args)
+ def annotation = annInfo
+ override def toString = annInfo.toString
+ }
+ object NestedAnnotArg extends NestedArgumentExtractor
+
+ type JavaArgument = ClassfileAnnotArg
+ type LiteralArgument = LiteralAnnotArg
+ val LiteralArgument = LiteralAnnotArg
+ implicit val LiteralArgumentTag = ClassTag[LiteralAnnotArg](classOf[LiteralAnnotArg])
+ type ArrayArgument = ArrayAnnotArg
+ val ArrayArgument = ArrayAnnotArg
+ implicit val ArrayArgumentTag = ClassTag[ArrayAnnotArg](classOf[ArrayAnnotArg])
+ type NestedArgument = NestedAnnotArg
+ val NestedArgument = NestedAnnotArg
+ implicit val NestedArgumentTag = ClassTag[NestedAnnotArg](classOf[NestedAnnotArg])
/** A specific annotation argument that encodes an array of bytes as an
* array of `Long`. The type of the argument declared in the annotation
@@ -121,20 +141,9 @@ trait AnnotationInfos extends api.AnnotationInfos { self: SymbolTable =>
}
src
}
-
- }
-
- /** Represents a nested classfile annotation */
- case class NestedAnnotArg(annInfo: AnnotationInfo) extends ClassfileAnnotArg with NestedAnnotArgApi {
- // The nested annotation should not have any Scala annotation arguments
- assert(annInfo.args.isEmpty, annInfo.args)
- override def toString = annInfo.toString
}
- implicit val NestedAnnotArgTag = ClassTag[NestedAnnotArg](classOf[NestedAnnotArg])
- object NestedAnnotArg extends NestedAnnotArgExtractor
-
- object AnnotationInfo extends AnnotationInfoExtractor {
+ object AnnotationInfo {
def marker(atp: Type): AnnotationInfo =
apply(atp, Nil, Nil)
@@ -165,11 +174,14 @@ trait AnnotationInfos extends api.AnnotationInfos { self: SymbolTable =>
this
}
- override def toString = (
- atp +
- (if (!args.isEmpty) args.mkString("(", ", ", ")") else "") +
- (if (!assocs.isEmpty) (assocs map { case (x, y) => x+" = "+y } mkString ("(", ", ", ")")) else "")
- )
+ override def toString = completeAnnotationToString(this)
+ }
+
+ private[scala] def completeAnnotationToString(annInfo: AnnotationInfo) = {
+ import annInfo._
+ val s_args = if (!args.isEmpty) args.mkString("(", ", ", ")") else ""
+ val s_assocs = if (!assocs.isEmpty) (assocs map { case (x, y) => x+" = "+y } mkString ("(", ", ", ")")) else ""
+ s"${atp}${s_args}${s_assocs}"
}
/** Symbol annotations parsed in `Namer` (typeCompleter of
@@ -207,11 +219,15 @@ trait AnnotationInfos extends api.AnnotationInfos { self: SymbolTable =>
*
* `assocs` stores arguments to classfile annotations as name-value pairs.
*/
- sealed abstract class AnnotationInfo extends AnnotationInfoApi {
+ abstract class AnnotationInfo extends AnnotationApi {
def atp: Type
def args: List[Tree]
def assocs: List[(Name, ClassfileAnnotArg)]
+ def tpe = atp
+ def scalaArgs = args
+ def javaArgs = ListMap(assocs: _*)
+
// necessary for reification, see Reifiers.scala for more info
def original: Tree
def setOriginal(t: Tree): this.type
@@ -299,7 +315,14 @@ trait AnnotationInfos extends api.AnnotationInfos { self: SymbolTable =>
}
}
- implicit val AnnotationInfoTag = ClassTag[AnnotationInfo](classOf[AnnotationInfo])
+ type Annotation = AnnotationInfo
+ object Annotation extends AnnotationExtractor {
+ def apply(tpe: Type, scalaArgs: List[Tree], javaArgs: ListMap[Name, ClassfileAnnotArg]): Annotation =
+ AnnotationInfo(tpe, scalaArgs, javaArgs.toList)
+ def unapply(annotation: Annotation): Option[(Type, List[Tree], ListMap[Name, ClassfileAnnotArg])] =
+ Some((annotation.tpe, annotation.scalaArgs, annotation.javaArgs))
+ }
+ implicit val AnnotationTag = ClassTag[AnnotationInfo](classOf[AnnotationInfo])
object UnmappableAnnotation extends CompleteAnnotationInfo(NoType, Nil, Nil)
}
diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala
index c6b55c8511..15b2a4dfe8 100644
--- a/src/reflect/scala/reflect/internal/Definitions.scala
+++ b/src/reflect/scala/reflect/internal/Definitions.scala
@@ -935,7 +935,6 @@ trait Definitions extends api.StandardDefinitions {
lazy val SwitchClass = requiredClass[scala.annotation.switch]
lazy val TailrecClass = requiredClass[scala.annotation.tailrec]
lazy val VarargsClass = requiredClass[scala.annotation.varargs]
- lazy val StaticClass = requiredClass[scala.annotation.static]
lazy val uncheckedStableClass = requiredClass[scala.annotation.unchecked.uncheckedStable]
lazy val uncheckedVarianceClass = requiredClass[scala.annotation.unchecked.uncheckedVariance]
@@ -1119,7 +1118,8 @@ trait Definitions extends api.StandardDefinitions {
/** Is symbol a phantom class for which no runtime representation exists? */
lazy val isPhantomClass = Set[Symbol](AnyClass, AnyValClass, NullClass, NothingClass)
- lazy val magicSymbols = List(
+ /** Lists core classes that don't have underlying bytecode, but are synthesized on-the-fly in every reflection universe */
+ lazy val syntheticCoreClasses = List(
AnnotationDefaultAttr, // #2264
RepeatedParamClass,
JavaRepeatedParamClass,
@@ -1130,7 +1130,10 @@ trait Definitions extends api.StandardDefinitions {
NullClass,
NothingClass,
SingletonClass,
- EqualsPatternClass,
+ EqualsPatternClass
+ )
+ /** Lists core methods that don't have underlying bytecode, but are synthesized on-the-fly in every reflection universe */
+ lazy val syntheticCoreMethods = List(
Any_==,
Any_!=,
Any_equals,
@@ -1148,10 +1151,19 @@ trait Definitions extends api.StandardDefinitions {
Object_synchronized,
Object_isInstanceOf,
Object_asInstanceOf,
- String_+,
+ String_+
+ )
+ /** Lists core classes that do have underlying bytecode, but are adjusted on-the-fly in every reflection universe */
+ lazy val hijackedCoreClasses = List(
ComparableClass,
JavaSerializableClass
)
+ /** Lists symbols that are synthesized or hijacked by the compiler.
+ *
+ * Such symbols either don't have any underlying bytecode at all ("synthesized")
+ * or get loaded from bytecode but have their metadata adjusted ("hijacked").
+ */
+ lazy val symbolsNotPresentInBytecode = syntheticCoreClasses ++ syntheticCoreMethods ++ hijackedCoreClasses
/** Is the symbol that of a parent which is added during parsing? */
lazy val isPossibleSyntheticParent = ProductClass.toSet[Symbol] + ProductRootClass + SerializableClass
@@ -1215,7 +1227,8 @@ trait Definitions extends api.StandardDefinitions {
def init() {
if (isInitialized) return
- val forced = magicSymbols // force initialization of every symbol that is entered as a side effect
+ // force initialization of every symbol that is synthesized or hijacked by the compiler
+ val forced = symbolsNotPresentInBytecode
isInitialized = true
} //init
diff --git a/src/reflect/scala/reflect/internal/Importers.scala b/src/reflect/scala/reflect/internal/Importers.scala
index 87058df732..c116928d37 100644
--- a/src/reflect/scala/reflect/internal/Importers.scala
+++ b/src/reflect/scala/reflect/internal/Importers.scala
@@ -32,7 +32,7 @@ trait Importers extends api.Importers { self: SymbolTable =>
// fixups and maps prevent stackoverflows in importer
var pendingSyms = 0
var pendingTpes = 0
- lazy val fixups = collection.mutable.MutableList[Function0[Unit]]()
+ lazy val fixups = scala.collection.mutable.MutableList[Function0[Unit]]()
def addFixup(fixup: => Unit): Unit = fixups += (() => fixup)
def tryFixup(): Unit = {
if (pendingSyms == 0 && pendingTpes == 0) {
diff --git a/src/reflect/scala/reflect/internal/Printers.scala b/src/reflect/scala/reflect/internal/Printers.scala
index c23d22efab..4c423e0bc2 100644
--- a/src/reflect/scala/reflect/internal/Printers.scala
+++ b/src/reflect/scala/reflect/internal/Printers.scala
@@ -664,7 +664,7 @@ trait Printers extends api.Printers { self: SymbolTable =>
def show(flags: FlagSet): String = {
if (flags == NoFlags) nme.NoFlags.toString
else {
- val s_flags = new collection.mutable.ListBuffer[String]
+ val s_flags = new scala.collection.mutable.ListBuffer[String]
def hasFlag(left: Long, right: Long): Boolean = (left & right) != 0
for (i <- 0 to 63 if hasFlag(flags, 1L << i))
s_flags += flagToString(1L << i).replace("<", "").replace(">", "").toUpperCase
diff --git a/src/reflect/scala/reflect/internal/StdAttachments.scala b/src/reflect/scala/reflect/internal/StdAttachments.scala
index 60b3a6f436..5f6a3bf777 100644
--- a/src/reflect/scala/reflect/internal/StdAttachments.scala
+++ b/src/reflect/scala/reflect/internal/StdAttachments.scala
@@ -10,7 +10,7 @@ trait StdAttachments {
trait Attachable {
protected var rawatt: base.Attachments { type Pos = Position } = NoPosition
def attachments = rawatt
- def addAttachment(attachment: Any): this.type = { rawatt = rawatt.add(attachment); this }
+ def updateAttachment[T: ClassTag](attachment: T): this.type = { rawatt = rawatt.update(attachment); this }
def removeAttachment[T: ClassTag]: this.type = { rawatt = rawatt.remove[T]; this }
// cannot be final due to SynchronizedSymbols
diff --git a/src/reflect/scala/reflect/internal/StdNames.scala b/src/reflect/scala/reflect/internal/StdNames.scala
index e70531df6e..2f305296f5 100644
--- a/src/reflect/scala/reflect/internal/StdNames.scala
+++ b/src/reflect/scala/reflect/internal/StdNames.scala
@@ -246,7 +246,6 @@ trait StdNames {
final val BeanPropertyAnnot: NameType = "BeanProperty"
final val BooleanBeanPropertyAnnot: NameType = "BooleanBeanProperty"
final val bridgeAnnot: NameType = "bridge"
- final val staticAnnot: NameType = "static"
// Classfile Attributes
final val AnnotationDefaultATTR: NameType = "AnnotationDefault"
@@ -571,7 +570,7 @@ trait StdNames {
// Compiler utilized names
val AnnotatedType: NameType = "AnnotatedType"
- val AnnotationInfo: NameType = "AnnotationInfo"
+ val Annotation: NameType = "Annotation"
val Any: NameType = "Any"
val AnyVal: NameType = "AnyVal"
val AppliedTypeTree: NameType = "AppliedTypeTree"
diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala
index b7f3ef0bf8..14589a6058 100644
--- a/src/reflect/scala/reflect/internal/Symbols.scala
+++ b/src/reflect/scala/reflect/internal/Symbols.scala
@@ -28,7 +28,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
//protected var activeLocks = 0
/** Used for debugging only */
- //protected var lockedSyms = collection.immutable.Set[Symbol]()
+ //protected var lockedSyms = scala.collection.immutable.Set[Symbol]()
/** Used to keep track of the recursion depth on locked symbols */
private var recursionTable = immutable.Map.empty[Symbol, Int]
@@ -697,7 +697,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
|| hasAnnotation(SerializableAttr) // last part can be removed, @serializable annotation is deprecated
)
def hasBridgeAnnotation = hasAnnotation(BridgeClass)
- def hasStaticAnnotation = hasAnnotation(StaticClass)
def isDeprecated = hasAnnotation(DeprecatedAttr)
def deprecationMessage = getAnnotation(DeprecatedAttr) flatMap (_ stringArg 0)
def deprecationVersion = getAnnotation(DeprecatedAttr) flatMap (_ stringArg 1)
@@ -1613,7 +1612,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
setInfo (this.info cloneInfo clone)
setAnnotations this.annotations
)
- this.attachments.all.foreach(clone.addAttachment)
+ this.attachments.all.foreach(clone.updateAttachment)
if (clone.thisSym != clone)
clone.typeOfThis = (clone.typeOfThis cloneInfo clone)
diff --git a/src/reflect/scala/reflect/internal/Trees.scala b/src/reflect/scala/reflect/internal/Trees.scala
index 0c40925858..011d31491a 100644
--- a/src/reflect/scala/reflect/internal/Trees.scala
+++ b/src/reflect/scala/reflect/internal/Trees.scala
@@ -137,7 +137,7 @@ trait Trees extends api.Trees { self: SymbolTable =>
override def freeTypes: List[FreeTypeSymbol] = freeSyms[FreeTypeSymbol](_.isFreeType, _.typeSymbol)
private def freeSyms[S <: Symbol](isFree: Symbol => Boolean, symOfType: Type => Symbol): List[S] = {
- val s = collection.mutable.LinkedHashSet[S]()
+ val s = scala.collection.mutable.LinkedHashSet[S]()
def addIfFree(sym: Symbol): Unit = if (sym != null && isFree(sym)) s += sym.asInstanceOf[S]
for (t <- this) {
addIfFree(t.symbol)
@@ -410,9 +410,7 @@ trait Trees extends api.Trees { self: SymbolTable =>
Apply(init, args.toList)
}
- case class ApplyDynamic(qual: Tree, args: List[Tree])
- extends SymTree with TermTree with ApplyDynamicApi
- object ApplyDynamic extends ApplyDynamicExtractor
+ case class ApplyDynamic(qual: Tree, args: List[Tree]) extends SymTree with TermTree
case class Super(qual: Tree, mix: TypeName) extends TermTree with SuperApi {
override def symbol: Symbol = qual.symbol
@@ -506,7 +504,12 @@ trait Trees extends api.Trees { self: SymbolTable =>
def TypeTree(tp: Type): TypeTree = TypeTree() setType tp
- class StrictTreeCopier extends TreeCopierOps {
+ override type TreeCopier <: InternalTreeCopierOps
+ abstract class InternalTreeCopierOps extends TreeCopierOps {
+ def ApplyDynamic(tree: Tree, qual: Tree, args: List[Tree]): ApplyDynamic
+ }
+
+ class StrictTreeCopier extends InternalTreeCopierOps {
def ClassDef(tree: Tree, mods: Modifiers, name: Name, tparams: List[TypeDef], impl: Template) =
new ClassDef(mods, name.toTypeName, tparams, impl).copyAttrs(tree)
def PackageDef(tree: Tree, pid: RefTree, stats: List[Tree]) =
@@ -600,7 +603,7 @@ trait Trees extends api.Trees { self: SymbolTable =>
new ExistentialTypeTree(tpt, whereClauses).copyAttrs(tree)
}
- class LazyTreeCopier extends TreeCopierOps {
+ class LazyTreeCopier extends InternalTreeCopierOps {
val treeCopy: TreeCopier = newStrictTreeCopier
def ClassDef(tree: Tree, mods: Modifiers, name: Name, tparams: List[TypeDef], impl: Template) = tree match {
case t @ ClassDef(mods0, name0, tparams0, impl0)
@@ -1595,7 +1598,6 @@ trait Trees extends api.Trees { self: SymbolTable =>
implicit val GenericApplyTag = ClassTag[GenericApply](classOf[GenericApply])
implicit val TypeApplyTag = ClassTag[TypeApply](classOf[TypeApply])
implicit val ApplyTag = ClassTag[Apply](classOf[Apply])
- implicit val ApplyDynamicTag = ClassTag[ApplyDynamic](classOf[ApplyDynamic])
implicit val SuperTag = ClassTag[Super](classOf[Super])
implicit val ThisTag = ClassTag[This](classOf[This])
implicit val SelectTag = ClassTag[Select](classOf[Select])
diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala
index ea2dc50e38..e5e71381e5 100644
--- a/src/reflect/scala/reflect/internal/Types.scala
+++ b/src/reflect/scala/reflect/internal/Types.scala
@@ -3734,12 +3734,12 @@ trait Types extends api.Types { self: SymbolTable =>
*
* tpe1 where { tparams }
*
- * where `tpe1` is the result of extrapolating `tpe` wrt to `tparams`.
+ * where `tpe1` is the result of extrapolating `tpe` with respect to `tparams`.
* Extrapolating means that type variables in `tparams` occurring
* in covariant positions are replaced by upper bounds, (minus any
* SingletonClass markers), type variables in `tparams` occurring in
* contravariant positions are replaced by upper bounds, provided the
- * resulting type is legal wrt to stability, and does not contain any type
+ * resulting type is legal with regard to stability, and does not contain any type
* variable in `tparams`.
*
* The abstraction drops all type parameters that are not directly or
@@ -5735,8 +5735,8 @@ trait Types extends api.Types { self: SymbolTable =>
/** Does this type have a prefix that begins with a type variable,
* or is it a refinement type? For type prefixes that fulfil this condition,
- * type selections with the same name of equal (wrt) =:= prefixes are
- * considered equal wrt =:=
+ * type selections with the same name of equal (as determined by `=:=`) prefixes are
+ * considered equal in regard to `=:=`.
*/
def beginsWithTypeVarOrIsRefined(tp: Type): Boolean = tp match {
case SingleType(pre, sym) =>
@@ -6385,7 +6385,7 @@ trait Types extends api.Types { self: SymbolTable =>
// Produce a single type for this frontier by merging the prefixes and arguments of those
// typerefs that share the same symbol: that symbol is the current maximal symbol for which
- // the invariant holds, i.e., the one that conveys most information wrt subtyping. Before
+ // the invariant holds, i.e., the one that conveys most information regarding subtyping. Before
// merging, strip targs that refer to bound tparams (when we're computing the lub of type
// constructors.) Also filter out all types that are a subtype of some other type.
if (isUniformFrontier) {
@@ -6434,7 +6434,7 @@ trait Types extends api.Types { self: SymbolTable =>
loop(Nil, initialBTSes)
}
- /** The minimal symbol (wrt Symbol.isLess) of a list of types */
+ /** The minimal symbol of a list of types (as determined by `Symbol.isLess`). */
private def minSym(tps: List[Type]): Symbol =
(tps.head.typeSymbol /: tps.tail) {
(sym1, tp2) => if (tp2.typeSymbol isLess sym1) tp2.typeSymbol else sym1
@@ -6560,7 +6560,7 @@ trait Types extends api.Types { self: SymbolTable =>
* test/continuations-neg/function3.scala goes into an infinite loop.
* (Even if the calls are to typeSymbolDirect.)
*/
- def isNumericSubType(tp1: Type, tp2: Type) = (
+ def isNumericSubType(tp1: Type, tp2: Type): Boolean = (
isNumericValueType(tp1)
&& isNumericValueType(tp2)
&& isNumericSubClass(tp1.typeSymbol, tp2.typeSymbol)
@@ -6744,7 +6744,7 @@ trait Types extends api.Types { self: SymbolTable =>
private var globalGlbDepth = 0
private final val globalGlbLimit = 2
- /** The greatest lower bound wrt <:< of a list of types */
+ /** The greatest lower bound of a list of types (as determined by `<:<`). */
def glb(ts: List[Type]): Type = elimSuper(ts) match {
case List() => AnyClass.tpe
case List(t) => t
@@ -6766,8 +6766,8 @@ trait Types extends api.Types { self: SymbolTable =>
case ts0 => glbNorm(ts0, depth)
}
- /** The greatest lower bound wrt <:< of a list of types, which have been normalized
- * wrt elimSuper */
+ /** The greatest lower bound of a list of types (as determined by `<:<`), which have been normalized
+ * with regard to `elimSuper`. */
protected def glbNorm(ts: List[Type], depth: Int): Type = {
def glb0(ts0: List[Type]): Type = ts0 match {
case List() => AnyClass.tpe
diff --git a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala
index 20ab3fca68..ce1d5a819f 100644
--- a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala
+++ b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala
@@ -446,7 +446,7 @@ abstract class UnPickler /*extends scala.reflect.generic.UnPickler*/ {
private def readArrayAnnot() = {
readByte() // skip the `annotargarray` tag
val end = readNat() + readIndex
- until(end, () => readClassfileAnnotArg(readNat())).toArray(ClassfileAnnotArgTag)
+ until(end, () => readClassfileAnnotArg(readNat())).toArray(JavaArgumentTag)
}
protected def readClassfileAnnotArg(i: Int): ClassfileAnnotArg = bytes(index(i)) match {
case ANNOTINFO => NestedAnnotArg(at(i, readAnnotation))
diff --git a/src/reflect/scala/reflect/macros/Infrastructure.scala b/src/reflect/scala/reflect/macros/Infrastructure.scala
index f01725cd1d..80153ff257 100644
--- a/src/reflect/scala/reflect/macros/Infrastructure.scala
+++ b/src/reflect/scala/reflect/macros/Infrastructure.scala
@@ -43,7 +43,7 @@ trait Infrastructure {
* val importer = ru.mkImporter(c.universe).asInstanceOf[ru.Importer { val from: c.universe.type }]
* val tree = c.resetAllAttrs(x.tree.duplicate)
* val imported = importer.importTree(tree)
- * val valueOfX = toolBox.runExpr(imported).asInstanceOf[T]
+ * val valueOfX = toolBox.eval(imported).asInstanceOf[T]
* ...
* }
*/
@@ -77,23 +77,4 @@ trait Infrastructure {
/** Returns a macro definition which triggered this macro expansion.
*/
val currentMacro: Symbol
-
- // todo. redo caches as discussed on Reflecting Meeting 2012/03/29
- // https://docs.google.com/document/d/1oUZGQpdt2qwioTlJcSt8ZFQwVLTvpxn8xa67P8OGVpU/edit
-
- /** A cache shared by all invocations of all macros across all compilation runs.
- *
- * Needs to be used with extreme care, since memory leaks here will swiftly crash the presentation compiler.
- * For example, Scala IDE typically launches a compiler run on every edit action so there might be hundreds of runs per minute.
- */
- val globalCache: scala.collection.mutable.Map[Any, Any]
-
- /** A cache shared by all invocations of the same macro within a single compilation run.
- *
- * This cache is cleared automatically after a compilation run is completed or abandoned.
- * It is also specific to a particular macro definition.
- *
- * To share data between different macros and/or different compilation runs, use ``globalCache''.
- */
- val cache: scala.collection.mutable.Map[Any, Any]
}
diff --git a/src/reflect/scala/reflect/macros/Parsers.scala b/src/reflect/scala/reflect/macros/Parsers.scala
index ea87c5842e..1742d07b60 100644
--- a/src/reflect/scala/reflect/macros/Parsers.scala
+++ b/src/reflect/scala/reflect/macros/Parsers.scala
@@ -5,7 +5,7 @@ trait Parsers {
self: Context =>
/** .. */
- // todo. distinguish between `parseExpr` and `parse`
+ // todo. distinguish between `parse` and `parse`
def parse(code: String): Tree
/** Represents an error during parsing
diff --git a/src/reflect/scala/reflect/macros/Universe.scala b/src/reflect/scala/reflect/macros/Universe.scala
index 06428ee3fc..7fa2e7cbae 100644
--- a/src/reflect/scala/reflect/macros/Universe.scala
+++ b/src/reflect/scala/reflect/macros/Universe.scala
@@ -10,7 +10,7 @@ abstract class Universe extends scala.reflect.api.Universe {
def attachments: base.Attachments { type Pos = Position }
/** ... */
- def addAttachment(attachment: Any): AttachableApi.this.type
+ def updateAttachment[T: ClassTag](attachment: T): AttachableApi.this.type
/** ... */
def removeAttachment[T: ClassTag]: AttachableApi.this.type
@@ -32,7 +32,7 @@ abstract class Universe extends scala.reflect.api.Universe {
def setTypeSignature(tpe: Type): Symbol
- def setAnnotations(annots: AnnotationInfo*): Symbol
+ def setAnnotations(annots: Annotation*): Symbol
def setName(name: Name): Symbol
diff --git a/src/reflect/scala/reflect/runtime/JavaMirrors.scala b/src/reflect/scala/reflect/runtime/JavaMirrors.scala
index be2661149a..0d9e90d3a6 100644
--- a/src/reflect/scala/reflect/runtime/JavaMirrors.scala
+++ b/src/reflect/scala/reflect/runtime/JavaMirrors.scala
@@ -9,6 +9,7 @@ import java.lang.reflect.{
Method => jMethod, Constructor => jConstructor, Modifier => jModifier, Field => jField,
Member => jMember, Type => jType, TypeVariable => jTypeVariable, Array => jArray,
GenericDeclaration, GenericArrayType, ParameterizedType, WildcardType, AnnotatedElement }
+import java.lang.annotation.{Annotation => jAnnotation}
import java.io.IOException
import internal.MissingRequirementError
import internal.pickling.ByteCodecs
@@ -23,7 +24,7 @@ import scala.language.existentials
import scala.runtime.{ScalaRunTime, BoxesRunTime}
import scala.reflect.internal.util.Collections._
-trait JavaMirrors extends internal.SymbolTable with api.JavaUniverse { self: SymbolTable =>
+trait JavaMirrors extends internal.SymbolTable with api.JavaUniverse { thisUniverse: SymbolTable =>
private lazy val mirrors = new WeakHashMap[ClassLoader, WeakReference[JavaMirror]]()
@@ -62,9 +63,9 @@ trait JavaMirrors extends internal.SymbolTable with api.JavaUniverse { self: Sym
class JavaMirror(owner: Symbol,
/** Class loader that is a mastermind behind the reflexive mirror */
val classLoader: ClassLoader
- ) extends Roots(owner) with super.JavaMirror { wholemirror =>
+ ) extends Roots(owner) with super.JavaMirror { thisMirror =>
- val universe: self.type = self
+ val universe: thisUniverse.type = thisUniverse
import definitions._
@@ -135,6 +136,55 @@ trait JavaMirrors extends internal.SymbolTable with api.JavaUniverse { self: Sym
private def ErrorNotConstructor(sym: Symbol, owner: Symbol) = throw new ScalaReflectionException(s"expected a constructor of $owner, you provided $sym")
private def ErrorFree(member: Symbol, freeType: Symbol) = throw new ScalaReflectionException(s"cannot reflect ${member.kindString} ${member.name}, because it's a member of a weak type ${freeType.name}")
+ /** Helper functions for extracting typed values from a (Class[_], Any)
+ * representing an annotation argument.
+ */
+ private object toAnnotArg {
+ val StringClass = classOf[String]
+ val ClassClass = classOf[jClass[_]]
+ object PrimitiveClass { def unapply(x: jClass[_]) = x.isPrimitive }
+ object EnumClass { def unapply(x: jClass[_]) = x.isEnum }
+ object ArrayClass { def unapply(x: jClass[_]) = x.isArray }
+ object AnnotationClass { def unapply(x: jClass[_]) = x.isAnnotation }
+
+ object ConstantArg {
+ def enumToSymbol(enum: Enum[_]): Symbol = {
+ val staticPartOfEnum = classToScala(enum.getClass).companionSymbol
+ staticPartOfEnum.typeSignature.declaration(enum.name: TermName)
+ }
+
+ def unapply(schemaAndValue: (jClass[_], Any)): Option[Any] = schemaAndValue match {
+ case (StringClass | PrimitiveClass(), value) => Some(value)
+ case (ClassClass, value: jClass[_]) => Some(classToScala(value).toType)
+ case (EnumClass(), value: Enum[_]) => Some(enumToSymbol(value))
+ case _ => None
+ }
+ }
+ def apply(schemaAndValue: (jClass[_], Any)): ClassfileAnnotArg = schemaAndValue match {
+ case ConstantArg(value) => LiteralAnnotArg(Constant(value))
+ case (clazz @ ArrayClass(), value: Array[_]) => ArrayAnnotArg(value map (x => apply(ScalaRunTime.arrayElementClass(clazz) -> x)))
+ case (AnnotationClass(), value: jAnnotation) => NestedAnnotArg(JavaAnnotationProxy(value))
+ case _ => UnmappableAnnotArg
+ }
+ }
+ private case class JavaAnnotationProxy(jann: jAnnotation) extends AnnotationInfo {
+ override val atp: Type = classToScala(jann.annotationType).toType
+ override val args: List[Tree] = Nil
+ override def original: Tree = EmptyTree
+ override def setOriginal(t: Tree): this.type = throw new Exception("setOriginal inapplicable for " + this)
+ override def pos: Position = NoPosition
+ override def setPos(pos: Position): this.type = throw new Exception("setPos inapplicable for " + this)
+ override def toString = completeAnnotationToString(this)
+
+ // todo. find out the exact order of assocs as they are written in the class file
+ // currently I'm simply sorting the methods to guarantee stability of the output
+ override lazy val assocs: List[(Name, ClassfileAnnotArg)] = (
+ jann.annotationType.getDeclaredMethods.sortBy(_.getName).toList map (m =>
+ (m.getName: TermName) -> toAnnotArg(m.getReturnType -> m.invoke(jann))
+ )
+ )
+ }
+
def reflect[T: ClassTag](obj: T): InstanceMirror = new JavaInstanceMirror(obj)
def reflectClass(cls: ClassSymbol): ClassMirror = {
@@ -190,7 +240,7 @@ trait JavaMirrors extends internal.SymbolTable with api.JavaUniverse { self: Sym
private class JavaInstanceMirror[T: ClassTag](val instance: T)
extends InstanceMirror {
- def symbol = wholemirror.classSymbol(preciseClass(instance))
+ def symbol = thisMirror.classSymbol(preciseClass(instance))
def reflectField(field: TermSymbol): FieldMirror = {
checkMemberOf(field, symbol)
if ((field.isMethod && !field.isAccessor) || field.isModule) ErrorNotField(field)
@@ -265,14 +315,13 @@ trait JavaMirrors extends internal.SymbolTable with api.JavaUniverse { self: Sym
// the "symbol == Any_getClass || symbol == Object_getClass" test doesn't cut it
// because both AnyVal and its primitive descendants define their own getClass methods
private def isGetClass(meth: MethodSymbol) = meth.name.toString == "getClass" && meth.params.flatten.isEmpty
- private def isMagicPrimitiveMethod(meth: MethodSymbol) = meth.owner.isPrimitiveValueClass
- private def isStringConcat(meth: MethodSymbol) = meth == String_+ || (isMagicPrimitiveMethod(meth) && meth.returnType =:= StringClass.toType)
- lazy val magicMethodOwners = Set[Symbol](AnyClass, AnyValClass, AnyRefClass, ObjectClass, ArrayClass) ++ ScalaPrimitiveValueClasses
- lazy val nonMagicObjectMethods = Set[Symbol](Object_clone, Object_equals, Object_finalize, Object_hashCode, Object_toString,
- Object_notify, Object_notifyAll) ++ ObjectClass.info.member(nme.wait_).asTerm.alternatives.map(_.asMethod)
- private def isMagicMethod(meth: MethodSymbol): Boolean = {
- if (isGetClass(meth) || isStringConcat(meth) || isMagicPrimitiveMethod(meth) || meth == Predef_classOf || meth.isTermMacro) return true
- magicMethodOwners(meth.owner) && !nonMagicObjectMethods(meth)
+ private def isStringConcat(meth: MethodSymbol) = meth == String_+ || (meth.owner.isPrimitiveValueClass && meth.returnType =:= StringClass.toType)
+ lazy val bytecodelessMethodOwners = Set[Symbol](AnyClass, AnyValClass, AnyRefClass, ObjectClass, ArrayClass) ++ ScalaPrimitiveValueClasses
+ lazy val bytecodefulObjectMethods = Set[Symbol](Object_clone, Object_equals, Object_finalize, Object_hashCode, Object_toString,
+ Object_notify, Object_notifyAll) ++ ObjectClass.info.member(nme.wait_).asTerm.alternatives.map(_.asMethod)
+ private def isBytecodelessMethod(meth: MethodSymbol): Boolean = {
+ if (isGetClass(meth) || isStringConcat(meth) || meth.owner.isPrimitiveValueClass || meth == Predef_classOf || meth.isTermMacro) return true
+ bytecodelessMethodOwners(meth.owner) && !bytecodefulObjectMethods(meth)
}
// unlike other mirrors, method mirrors are created by a factory
@@ -280,7 +329,7 @@ trait JavaMirrors extends internal.SymbolTable with api.JavaUniverse { self: Sym
// therefore we move special cases into separate subclasses
// rather than have them on a hot path them in a unified implementation of the `apply` method
private def mkJavaMethodMirror[T: ClassTag](receiver: T, symbol: MethodSymbol): JavaMethodMirror = {
- if (isMagicMethod(symbol)) new JavaMagicMethodMirror(receiver, symbol)
+ if (isBytecodelessMethod(symbol)) new JavaBytecodelessMethodMirror(receiver, symbol)
else if (symbol.params.flatten exists (p => isByNameParamType(p.info))) new JavaByNameMethodMirror(receiver, symbol)
else new JavaVanillaMethodMirror(receiver, symbol)
}
@@ -315,11 +364,11 @@ trait JavaMirrors extends internal.SymbolTable with api.JavaUniverse { self: Sym
}
}
- private class JavaMagicMethodMirror[T: ClassTag](val receiver: T, symbol: MethodSymbol)
+ private class JavaBytecodelessMethodMirror[T: ClassTag](val receiver: T, symbol: MethodSymbol)
extends JavaMethodMirror(symbol) {
def apply(args: Any*): Any = {
// checking type conformance is too much of a hassle, so we don't do it here
- // actually it's not even necessary, because we manually dispatch arguments to magic methods below
+ // actually it's not even necessary, because we manually dispatch arguments below
val params = symbol.paramss.flatten
val perfectMatch = args.length == params.length
// todo. this doesn't account for multiple vararg parameter lists
@@ -337,36 +386,36 @@ trait JavaMirrors extends internal.SymbolTable with api.JavaUniverse { self: Sym
def objArgs = args.asInstanceOf[Seq[AnyRef]]
def fail(msg: String) = throw new ScalaReflectionException(msg + ", it cannot be invoked with mirrors")
- def invokeMagicPrimitiveMethod = {
+ def invokePrimitiveMethod = {
val jmeths = classOf[BoxesRunTime].getDeclaredMethods.filter(_.getName == nme.primitiveMethodName(symbol.name).toString)
assert(jmeths.length == 1, jmeths.toList)
jinvoke(jmeths.head, null, objReceiver +: objArgs)
}
symbol match {
- case Any_== | Object_== => ScalaRunTime.inlinedEquals(objReceiver, objArg0)
- case Any_!= | Object_!= => !ScalaRunTime.inlinedEquals(objReceiver, objArg0)
- case Any_## | Object_## => ScalaRunTime.hash(objReceiver)
- case Any_equals => receiver.equals(objArg0)
- case Any_hashCode => receiver.hashCode
- case Any_toString => receiver.toString
- case Object_eq => objReceiver eq objArg0
- case Object_ne => objReceiver ne objArg0
- case Object_synchronized => objReceiver.synchronized(objArg0)
- case sym if isGetClass(sym) => preciseClass(receiver)
- case Any_asInstanceOf => fail("Any.asInstanceOf requires a type argument")
- case Any_isInstanceOf => fail("Any.isInstanceOf requires a type argument")
- case Object_asInstanceOf => fail("AnyRef.$asInstanceOf is an internal method")
- case Object_isInstanceOf => fail("AnyRef.$isInstanceOf is an internal method")
- case Array_length => ScalaRunTime.array_length(objReceiver)
- case Array_apply => ScalaRunTime.array_apply(objReceiver, args(0).asInstanceOf[Int])
- case Array_update => ScalaRunTime.array_update(objReceiver, args(0).asInstanceOf[Int], args(1))
- case Array_clone => ScalaRunTime.array_clone(objReceiver)
- case sym if isStringConcat(sym) => receiver.toString + objArg0
- case sym if isMagicPrimitiveMethod(sym) => invokeMagicPrimitiveMethod
- case sym if sym == Predef_classOf => fail("Predef.classOf is a compile-time function")
- case sym if sym.isTermMacro => fail(s"${symbol.fullName} is a macro, i.e. a compile-time function")
- case _ => assert(false, this)
+ case Any_== | Object_== => ScalaRunTime.inlinedEquals(objReceiver, objArg0)
+ case Any_!= | Object_!= => !ScalaRunTime.inlinedEquals(objReceiver, objArg0)
+ case Any_## | Object_## => ScalaRunTime.hash(objReceiver)
+ case Any_equals => receiver.equals(objArg0)
+ case Any_hashCode => receiver.hashCode
+ case Any_toString => receiver.toString
+ case Object_eq => objReceiver eq objArg0
+ case Object_ne => objReceiver ne objArg0
+ case Object_synchronized => objReceiver.synchronized(objArg0)
+ case sym if isGetClass(sym) => preciseClass(receiver)
+ case Any_asInstanceOf => fail("Any.asInstanceOf requires a type argument")
+ case Any_isInstanceOf => fail("Any.isInstanceOf requires a type argument")
+ case Object_asInstanceOf => fail("AnyRef.$asInstanceOf is an internal method")
+ case Object_isInstanceOf => fail("AnyRef.$isInstanceOf is an internal method")
+ case Array_length => ScalaRunTime.array_length(objReceiver)
+ case Array_apply => ScalaRunTime.array_apply(objReceiver, args(0).asInstanceOf[Int])
+ case Array_update => ScalaRunTime.array_update(objReceiver, args(0).asInstanceOf[Int], args(1))
+ case Array_clone => ScalaRunTime.array_clone(objReceiver)
+ case sym if isStringConcat(sym) => receiver.toString + objArg0
+ case sym if sym.owner.isPrimitiveValueClass => invokePrimitiveMethod
+ case sym if sym == Predef_classOf => fail("Predef.classOf is a compile-time function")
+ case sym if sym.isTermMacro => fail(s"${symbol.fullName} is a macro, i.e. a compile-time function")
+ case _ => assert(false, this)
}
}
}
@@ -469,7 +518,7 @@ trait JavaMirrors extends internal.SymbolTable with api.JavaUniverse { self: Sym
}
private object unpickler extends UnPickler {
- val global: self.type = self
+ val global: thisUniverse.type = thisUniverse
}
/** how connected????
@@ -573,7 +622,7 @@ trait JavaMirrors extends internal.SymbolTable with api.JavaUniverse { self: Sym
* Note: If `sym` is a method or constructor, its parameter annotations are copied as well.
*/
private def copyAnnotations(sym: Symbol, jann: AnnotatedElement) {
- // to do: implement
+ sym setAnnotations (jann.getAnnotations map JavaAnnotationProxy).toList
}
/**
@@ -616,7 +665,7 @@ trait JavaMirrors extends internal.SymbolTable with api.JavaUniverse { self: Sym
completeRest()
}
- def completeRest(): Unit = self.synchronized {
+ def completeRest(): Unit = thisUniverse.synchronized {
val tparams = clazz.rawInfo.typeParams
val parents = try {
@@ -976,13 +1025,12 @@ trait JavaMirrors extends internal.SymbolTable with api.JavaUniverse { self: Sym
rawToExistential(typeRef(clazz.owner.thisType, clazz, List()))
}
case japplied: ParameterizedType =>
- val (pre, sym) = typeToScala(japplied.getRawType) match {
- case ExistentialType(tparams, TypeRef(pre, sym, _)) => (pre, sym)
- case TypeRef(pre, sym, _) => (pre, sym)
- }
+ // http://stackoverflow.com/questions/5767122/parameterizedtype-getrawtype-returns-j-l-r-type-not-class
+ val sym = classToScala(japplied.getRawType.asInstanceOf[jClass[_]])
+ val pre = sym.owner.thisType
val args0 = japplied.getActualTypeArguments
val (args, bounds) = targsToScala(pre.typeSymbol, args0.toList)
- ExistentialType(bounds, typeRef(pre, sym, args))
+ newExistentialType(bounds, typeRef(pre, sym, args))
case jarr: GenericArrayType =>
arrayType(typeToScala(jarr.getGenericComponentType))
case jtvar: jTypeVariable[_] =>
@@ -1200,9 +1248,9 @@ trait JavaMirrors extends internal.SymbolTable with api.JavaUniverse { self: Sym
mirrors(rootToLoader getOrElseUpdate(root, findLoader)).get.get
}
- private lazy val magicClasses: Map[(String, Name), Symbol] = {
+ private lazy val syntheticCoreClasses: Map[(String, Name), Symbol] = {
def mapEntry(sym: Symbol): ((String, Name), Symbol) = (sym.owner.fullName, sym.name) -> sym
- Map() ++ (definitions.magicSymbols filter (_.isType) map mapEntry)
+ Map() ++ (definitions.syntheticCoreClasses map mapEntry)
}
/** 1. If `owner` is a package class (but not the empty package) and `name` is a term name, make a new package
@@ -1221,9 +1269,12 @@ trait JavaMirrors extends internal.SymbolTable with api.JavaUniverse { self: Sym
if (name.isTermName && !owner.isEmptyPackageClass)
return mirror.makeScalaPackage(
if (owner.isRootSymbol) name.toString else owner.fullName+"."+name)
- magicClasses get (owner.fullName, name) match {
+ syntheticCoreClasses get (owner.fullName, name) match {
case Some(tsym) =>
- owner.info.decls enter tsym
+ // synthetic core classes are only present in root mirrors
+ // because Definitions.scala, which initializes and enters them, only affects rootMirror
+ // therefore we need to enter them manually for non-root mirrors
+ if (mirror ne thisUniverse.rootMirror) owner.info.decls enter tsym
return tsym
case None =>
}
diff --git a/src/reflect/scala/reflect/runtime/JavaUniverse.scala b/src/reflect/scala/reflect/runtime/JavaUniverse.scala
index 77d65a7db2..1d875b10f1 100644
--- a/src/reflect/scala/reflect/runtime/JavaUniverse.scala
+++ b/src/reflect/scala/reflect/runtime/JavaUniverse.scala
@@ -16,7 +16,7 @@ class JavaUniverse extends internal.SymbolTable with ReflectSetup with runtime.S
def log(msg: => AnyRef): Unit = println(" [] "+msg)
- type TreeCopier = TreeCopierOps
+ type TreeCopier = InternalTreeCopierOps
def newStrictTreeCopier: TreeCopier = new StrictTreeCopier
def newLazyTreeCopier: TreeCopier = new LazyTreeCopier
diff --git a/src/reflect/scala/reflect/runtime/package.scala b/src/reflect/scala/reflect/runtime/package.scala
index d325cf6a16..7b9f69e657 100644
--- a/src/reflect/scala/reflect/runtime/package.scala
+++ b/src/reflect/scala/reflect/runtime/package.scala
@@ -5,7 +5,8 @@ package object runtime {
// type is api.JavaUniverse because we only want to expose the `scala.reflect.api.*` subset of reflection
lazy val universe: api.JavaUniverse = new runtime.JavaUniverse
- // implementation magically hardwired to the `currentMirror` method below
+ // implementation hardwired to the `currentMirror` method below
+ // using the mechanism implemented in `scala.tools.reflect.FastTrack`
def currentMirror: universe.Mirror = ??? // macro
}
diff --git a/src/reflect/scala/tools/nsc/io/VirtualFile.scala b/src/reflect/scala/tools/nsc/io/VirtualFile.scala
index be888e92e6..8a5114bfe7 100644
--- a/src/reflect/scala/tools/nsc/io/VirtualFile.scala
+++ b/src/reflect/scala/tools/nsc/io/VirtualFile.scala
@@ -33,7 +33,7 @@ class VirtualFile(val name: String, override val path: String) extends AbstractF
//########################################################################
// Private data
- private var content = new Array[Byte](0)
+ private var content = Array.emptyByteArray
//########################################################################
// Public Methods
diff --git a/src/reflect/scala/tools/nsc/io/ZipArchive.scala b/src/reflect/scala/tools/nsc/io/ZipArchive.scala
index d7ec209525..49d2200895 100644
--- a/src/reflect/scala/tools/nsc/io/ZipArchive.scala
+++ b/src/reflect/scala/tools/nsc/io/ZipArchive.scala
@@ -177,7 +177,7 @@ final class URLZipArchive(val url: URL) extends ZipArchive(null) {
class FileEntry() extends Entry(zipEntry.getName) {
override val toByteArray: Array[Byte] = {
val len = zipEntry.getSize().toInt
- val arr = new Array[Byte](len)
+ val arr = if (len == 0) Array.emptyByteArray else new Array[Byte](len)
var offset = 0
def loop() {
diff --git a/src/scalacheck/org/scalacheck/Commands.scala b/src/scalacheck/org/scalacheck/Commands.scala
index 5ad82c513d..88ef8ae2a1 100644
--- a/src/scalacheck/org/scalacheck/Commands.scala
+++ b/src/scalacheck/org/scalacheck/Commands.scala
@@ -53,7 +53,7 @@ trait Commands extends Prop {
* takes the current abstract state as parameter and returns a boolean
* that says if the precondition is fulfilled or not. You can add several
* conditions to the precondition list */
- val preConditions = new collection.mutable.ListBuffer[State => Boolean]
+ val preConditions = new scala.collection.mutable.ListBuffer[State => Boolean]
/** Returns all postconditions merged into a single function */
def postCondition: (State,State,Any) => Prop = (s0,s1,r) => all(postConditions.map(_.apply(s0,s1,r)): _*)
@@ -65,7 +65,7 @@ trait Commands extends Prop {
* method. The postcondition function should return a Boolean (or
* a Prop instance) that says if the condition holds or not. You can add several
* conditions to the postConditions list. */
- val postConditions = new collection.mutable.ListBuffer[(State,State,Any) => Prop]
+ val postConditions = new scala.collection.mutable.ListBuffer[(State,State,Any) => Prop]
}
/** A command that binds its result for later use */
diff --git a/src/scalacheck/org/scalacheck/Pretty.scala b/src/scalacheck/org/scalacheck/Pretty.scala
index c40e4aa718..eeb5936086 100644
--- a/src/scalacheck/org/scalacheck/Pretty.scala
+++ b/src/scalacheck/org/scalacheck/Pretty.scala
@@ -96,7 +96,7 @@ object Pretty {
}
implicit def prettyTestRes(res: Test.Result) = Pretty { prms =>
- def labels(ls: collection.immutable.Set[String]) =
+ def labels(ls: scala.collection.immutable.Set[String]) =
if(ls.isEmpty) ""
else "> Labels of failing property: " / ls.mkString("\n")
val s = res.status match {
diff --git a/src/scalacheck/org/scalacheck/util/CmdLineParser.scala b/src/scalacheck/org/scalacheck/util/CmdLineParser.scala
index 16ac1940b2..4683c34a65 100644
--- a/src/scalacheck/org/scalacheck/util/CmdLineParser.scala
+++ b/src/scalacheck/org/scalacheck/util/CmdLineParser.scala
@@ -30,7 +30,7 @@ trait CmdLineParser extends Parsers {
trait StrOpt extends Opt[String]
class OptMap {
- private val opts = new collection.mutable.HashMap[Opt[_], Any]
+ private val opts = new scala.collection.mutable.HashMap[Opt[_], Any]
def apply(flag: Flag): Boolean = opts.contains(flag)
def apply[T](opt: Opt[T]): T = opts.get(opt) match {
case None => opt.default
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/SeqRule.scala b/src/scalap/scala/tools/scalap/scalax/rules/SeqRule.scala
index 34f52a1e19..51a789e041 100644
--- a/src/scalap/scala/tools/scalap/scalax/rules/SeqRule.scala
+++ b/src/scalap/scala/tools/scalap/scalax/rules/SeqRule.scala
@@ -82,7 +82,7 @@ class SeqRule[S, +A, +X](rule : Rule[S, S, A, X]) {
/** Repeats this rule num times */
def times(num : Int) : Rule[S, S, Seq[A], X] = from[S] {
- val result = new collection.mutable.ArraySeq[A](num)
+ val result = new scala.collection.mutable.ArraySeq[A](num)
// more compact using HoF but written this way so it's tail-recursive
def rep(i : Int, in : S) : Result[S, Seq[A], X] = {
if (i == num) Success(in, result)
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/package.scala b/src/scalap/scala/tools/scalap/scalax/rules/package.scala
index 324e87435e..b1cc18f90b 100644
--- a/src/scalap/scala/tools/scalap/scalax/rules/package.scala
+++ b/src/scalap/scala/tools/scalap/scalax/rules/package.scala
@@ -2,8 +2,8 @@ package scala.tools.scalap
package scalax
package object rules {
- implicit lazy val higherKinds = language.higherKinds
- implicit lazy val postfixOps = language.postfixOps
- implicit lazy val implicitConversions = language.implicitConversions
- implicit lazy val reflectiveCalls = language.reflectiveCalls
+ implicit lazy val higherKinds = scala.language.higherKinds
+ implicit lazy val postfixOps = scala.language.postfixOps
+ implicit lazy val implicitConversions = scala.language.implicitConversions
+ implicit lazy val reflectiveCalls = scala.language.reflectiveCalls
}
diff --git a/src/swing/scala/swing/package.scala b/src/swing/scala/swing/package.scala
index 96530e2e94..45497665d7 100644
--- a/src/swing/scala/swing/package.scala
+++ b/src/swing/scala/swing/package.scala
@@ -14,8 +14,8 @@ package object swing {
type Image = java.awt.Image
type Font = java.awt.Font
- implicit lazy val reflectiveCalls = language.reflectiveCalls
- implicit lazy val implicitConversions = language.implicitConversions
+ implicit lazy val reflectiveCalls = scala.language.reflectiveCalls
+ implicit lazy val implicitConversions = scala.language.implicitConversions
private[swing] def ifNull[A](o: Object, a: A): A = if(o eq null) a else o.asInstanceOf[A]
private[swing] def toOption[A](o: Object): Option[A] = if(o eq null) None else Some(o.asInstanceOf[A])