summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/actors/scala/actors/AbstractActor.scala1
-rw-r--r--src/actors/scala/actors/KillActorControl.scala2
-rw-r--r--src/actors/scala/actors/scheduler/DrainableForkJoinPool.scala1
-rw-r--r--src/build/genprod.scala4
-rw-r--r--src/build/pack.xml2
-rw-r--r--src/compiler/scala/reflect/macros/contexts/Parsers.scala28
-rw-r--r--src/compiler/scala/reflect/macros/util/Helpers.scala32
-rw-r--r--src/compiler/scala/tools/nsc/CompilationUnits.scala5
-rw-r--r--src/compiler/scala/tools/nsc/Global.scala26
-rw-r--r--src/compiler/scala/tools/nsc/OfflineCompilerCommand.scala2
-rwxr-xr-xsrc/compiler/scala/tools/nsc/ast/DocComments.scala10
-rw-r--r--src/compiler/scala/tools/nsc/ast/NodePrinters.scala2
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeDSL.scala117
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Parsers.scala526
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Scanners.scala9
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala59
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/CheckerException.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/GenICode.scala19
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala4
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala6
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenJVMASM.scala4
-rw-r--r--src/compiler/scala/tools/nsc/package.scala3
-rw-r--r--src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala6
-rw-r--r--src/compiler/scala/tools/nsc/reporters/StoreReporter.scala2
-rw-r--r--src/compiler/scala/tools/nsc/settings/FscSettings.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/CleanUp.scala10
-rw-r--r--src/compiler/scala/tools/nsc/transform/Erasure.scala4
-rw-r--r--src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala44
-rw-r--r--src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala8
-rw-r--r--src/compiler/scala/tools/nsc/transform/LambdaLift.scala11
-rw-r--r--src/compiler/scala/tools/nsc/transform/LazyVals.scala5
-rw-r--r--src/compiler/scala/tools/nsc/transform/Mixin.scala13
-rw-r--r--src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala10
-rw-r--r--src/compiler/scala/tools/nsc/transform/TailCalls.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/UnCurry.scala18
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/Logic.scala17
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala8
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala12
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala30
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala17
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/Solving.scala6
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala81
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Namers.scala7
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala4
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala4
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala24
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Unapplies.scala7
-rw-r--r--src/compiler/scala/tools/reflect/ToolBoxFactory.scala17
-rw-r--r--src/compiler/scala/tools/reflect/quasiquotes/Parsers.scala10
-rw-r--r--src/compiler/scala/tools/reflect/quasiquotes/Reifiers.scala2
-rwxr-xr-xsrc/intellij/setup.sh2
-rw-r--r--src/interactive/scala/tools/nsc/interactive/Global.scala13
-rw-r--r--src/interactive/scala/tools/nsc/interactive/Response.scala4
-rw-r--r--src/library/scala/DelayedInit.scala4
-rw-r--r--src/library/scala/Function0.scala2
-rw-r--r--src/library/scala/Function1.scala2
-rw-r--r--src/library/scala/Proxy.scala2
-rw-r--r--src/library/scala/annotation/migration.scala2
-rw-r--r--src/library/scala/collection/BitSetLike.scala2
-rw-r--r--src/library/scala/collection/GenMap.scala4
-rw-r--r--src/library/scala/collection/GenTraversable.scala7
-rw-r--r--src/library/scala/collection/GenTraversableOnce.scala4
-rw-r--r--src/library/scala/collection/IterableLike.scala2
-rwxr-xr-xsrc/library/scala/collection/LinearSeqOptimized.scala4
-rw-r--r--src/library/scala/collection/Map.scala2
-rw-r--r--src/library/scala/collection/SortedMap.scala8
-rw-r--r--src/library/scala/collection/SortedMapLike.scala20
-rw-r--r--src/library/scala/collection/SortedSetLike.scala2
-rw-r--r--src/library/scala/collection/TraversableOnce.scala2
-rw-r--r--src/library/scala/collection/TraversableViewLike.scala2
-rw-r--r--src/library/scala/collection/convert/WrapAsJava.scala8
-rw-r--r--src/library/scala/collection/generic/GenericSeqCompanion.scala1
-rw-r--r--src/library/scala/collection/generic/IsSeqLike.scala2
-rw-r--r--src/library/scala/collection/generic/ParFactory.scala5
-rw-r--r--src/library/scala/collection/generic/Shrinkable.scala5
-rw-r--r--src/library/scala/collection/generic/Signalling.scala23
-rw-r--r--src/library/scala/collection/generic/Sorted.scala8
-rwxr-xr-xsrc/library/scala/collection/immutable/DefaultMap.scala10
-rw-r--r--src/library/scala/collection/immutable/List.scala3
-rw-r--r--src/library/scala/collection/immutable/ListMap.scala8
-rw-r--r--src/library/scala/collection/immutable/MapLike.scala4
-rw-r--r--src/library/scala/collection/immutable/Range.scala4
-rw-r--r--src/library/scala/collection/immutable/RedBlackTree.scala10
-rw-r--r--src/library/scala/collection/immutable/SortedMap.scala4
-rw-r--r--src/library/scala/collection/immutable/StreamViewLike.scala6
-rw-r--r--src/library/scala/collection/immutable/TreeMap.scala8
-rw-r--r--src/library/scala/collection/mutable/FlatHashTable.scala22
-rw-r--r--src/library/scala/collection/mutable/ImmutableSetAdaptor.scala5
-rw-r--r--src/library/scala/collection/mutable/LinkedHashMap.scala12
-rw-r--r--src/library/scala/collection/mutable/LinkedHashSet.scala2
-rw-r--r--src/library/scala/collection/mutable/MutableList.scala4
-rw-r--r--src/library/scala/collection/mutable/StackProxy.scala2
-rw-r--r--src/library/scala/collection/parallel/Combiner.scala15
-rw-r--r--src/library/scala/collection/parallel/ParIterableViewLike.scala14
-rw-r--r--src/library/scala/collection/parallel/ParMapLike.scala20
-rw-r--r--src/library/scala/collection/parallel/ParSeq.scala3
-rw-r--r--src/library/scala/collection/parallel/ParSetLike.scala25
-rw-r--r--src/library/scala/collection/parallel/PreciseSplitter.scala8
-rw-r--r--src/library/scala/collection/parallel/TaskSupport.scala26
-rw-r--r--src/library/scala/collection/parallel/Tasks.scala25
-rw-r--r--src/library/scala/collection/parallel/immutable/ParHashMap.scala26
-rw-r--r--src/library/scala/collection/parallel/immutable/ParSeq.scala7
-rw-r--r--src/library/scala/collection/parallel/immutable/ParVector.scala18
-rw-r--r--src/library/scala/collection/parallel/mutable/ParHashMap.scala24
-rw-r--r--src/library/scala/collection/parallel/mutable/ParHashTable.scala13
-rw-r--r--src/library/scala/collection/parallel/mutable/ParMapLike.scala4
-rw-r--r--src/library/scala/collection/parallel/mutable/ParSetLike.scala41
-rw-r--r--src/library/scala/collection/parallel/mutable/ParTrieMap.scala20
-rw-r--r--src/library/scala/concurrent/Awaitable.scala12
-rw-r--r--src/library/scala/concurrent/ExecutionContext.scala12
-rw-r--r--src/library/scala/concurrent/Future.scala9
-rw-r--r--src/library/scala/concurrent/Promise.scala32
-rw-r--r--src/library/scala/concurrent/TaskRunner.scala1
-rw-r--r--src/library/scala/concurrent/duration/Duration.scala4
-rw-r--r--src/library/scala/concurrent/impl/Promise.scala2
-rw-r--r--src/library/scala/concurrent/package.scala22
-rw-r--r--src/library/scala/io/BufferedSource.scala6
-rw-r--r--src/library/scala/ref/WeakReference.scala2
-rw-r--r--src/library/scala/reflect/ClassTag.scala2
-rw-r--r--src/library/scala/runtime/AbstractFunction1.scala2
-rw-r--r--src/library/scala/runtime/AbstractPartialFunction.scala14
-rw-r--r--src/library/scala/runtime/Boxed.scala9
-rw-r--r--src/library/scala/runtime/NonLocalReturnControl.scala1
-rw-r--r--src/library/scala/runtime/WorksheetSupport.scala1
-rw-r--r--src/library/scala/transient.scala2
-rw-r--r--src/library/scala/util/Properties.scala2
-rw-r--r--src/library/scala/volatile.scala2
-rw-r--r--src/partest-extras/scala/tools/partest/IcodeComparison.scala3
-rw-r--r--src/reflect/scala/reflect/api/FlagSets.scala3
-rw-r--r--src/reflect/scala/reflect/api/StandardDefinitions.scala18
-rw-r--r--src/reflect/scala/reflect/api/Trees.scala24
-rw-r--r--src/reflect/scala/reflect/internal/BuildUtils.scala27
-rw-r--r--src/reflect/scala/reflect/internal/Definitions.scala63
-rw-r--r--src/reflect/scala/reflect/internal/FlagSets.scala1
-rw-r--r--src/reflect/scala/reflect/internal/Flags.scala4
-rw-r--r--src/reflect/scala/reflect/internal/Names.scala2
-rw-r--r--src/reflect/scala/reflect/internal/Scopes.scala2
-rw-r--r--src/reflect/scala/reflect/internal/StdNames.scala39
-rw-r--r--src/reflect/scala/reflect/internal/TreeGen.scala75
-rw-r--r--src/reflect/scala/reflect/internal/TreeInfo.scala7
-rw-r--r--src/reflect/scala/reflect/internal/Trees.scala114
-rw-r--r--src/reflect/scala/reflect/internal/TypeDebugging.scala24
-rw-r--r--src/reflect/scala/reflect/internal/Types.scala22
-rw-r--r--src/reflect/scala/reflect/internal/pickling/ByteCodecs.scala8
-rw-r--r--src/reflect/scala/reflect/internal/util/Collections.scala9
-rw-r--r--src/reflect/scala/reflect/internal/util/Position.scala452
-rw-r--r--src/reflect/scala/reflect/internal/util/RangePosition.scala50
-rw-r--r--src/reflect/scala/reflect/internal/util/Set.scala1
-rw-r--r--src/reflect/scala/reflect/internal/util/StringOps.scala32
-rw-r--r--src/reflect/scala/reflect/internal/util/package.scala3
-rw-r--r--src/repl/scala/tools/nsc/interpreter/ExprTyper.scala2
-rw-r--r--src/repl/scala/tools/nsc/interpreter/ILoop.scala7
-rw-r--r--src/repl/scala/tools/nsc/interpreter/IMain.scala6
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/Index.scala3
-rwxr-xr-xsrc/scaladoc/scala/tools/nsc/doc/base/CommentFactoryBase.scala3
-rwxr-xr-xsrc/scaladoc/scala/tools/nsc/doc/model/IndexModelFactory.scala3
-rw-r--r--src/scalap/scala/tools/scalap/Arguments.scala1
158 files changed, 1182 insertions, 1745 deletions
diff --git a/src/actors/scala/actors/AbstractActor.scala b/src/actors/scala/actors/AbstractActor.scala
index 3c6299aab4..28fe689e91 100644
--- a/src/actors/scala/actors/AbstractActor.scala
+++ b/src/actors/scala/actors/AbstractActor.scala
@@ -27,5 +27,4 @@ trait AbstractActor extends OutputChannel[Any] with CanReply[Any, Any] {
private[actors] def unlinkFrom(from: AbstractActor): Unit
private[actors] def exit(from: AbstractActor, reason: AnyRef): Unit
-
}
diff --git a/src/actors/scala/actors/KillActorControl.scala b/src/actors/scala/actors/KillActorControl.scala
index 2f1f08e949..0f94bbc8dc 100644
--- a/src/actors/scala/actors/KillActorControl.scala
+++ b/src/actors/scala/actors/KillActorControl.scala
@@ -6,8 +6,6 @@
** |/ **
\* */
-
-
package scala.actors
import scala.util.control.ControlThrowable
diff --git a/src/actors/scala/actors/scheduler/DrainableForkJoinPool.scala b/src/actors/scala/actors/scheduler/DrainableForkJoinPool.scala
index 15ce60566a..37710ec037 100644
--- a/src/actors/scala/actors/scheduler/DrainableForkJoinPool.scala
+++ b/src/actors/scala/actors/scheduler/DrainableForkJoinPool.scala
@@ -8,5 +8,4 @@ private class DrainableForkJoinPool(parallelism: Int, maxPoolSize: Int) extends
override def drainTasksTo(c: Collection[ _ >: ForkJoinTask[_]]): Int =
super.drainTasksTo(c)
-
}
diff --git a/src/build/genprod.scala b/src/build/genprod.scala
index cd01363cb6..ed436fe2e4 100644
--- a/src/build/genprod.scala
+++ b/src/build/genprod.scala
@@ -113,8 +113,8 @@ object FunctionZero extends Function(0) {
object FunctionOne extends Function(1) {
override def classAnnotation = "@annotation.implicitNotFound(msg = \"No implicit view available from ${T1} => ${R}.\")\n"
- override def contravariantSpecs = "@specialized(scala.Int, scala.Long, scala.Float, scala.Double/*, scala.AnyRef*/) "
- override def covariantSpecs = "@specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double/*, scala.AnyRef*/) "
+ override def contravariantSpecs = "@specialized(scala.Int, scala.Long, scala.Float, scala.Double) "
+ override def covariantSpecs = "@specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double) "
override def descriptiveComment = " " + functionNTemplate.format("succ", "anonfun1",
"""
diff --git a/src/build/pack.xml b/src/build/pack.xml
index 8e2d2f19fa..ed628726fb 100644
--- a/src/build/pack.xml
+++ b/src/build/pack.xml
@@ -12,7 +12,7 @@ PROPERTIES
<!-- the maven stuff requires version.major, version.minor and version.patch properties.
the "get-scala-revision" script only returns "version.number" -->
- <property file="${basedir}/build.number.maven"/>
+ <property file="${basedir}/build.number"/>
<!-- also need to know scala binary version and versions for xml and parsers -->
<property file="${basedir}/versions.properties"/>
diff --git a/src/compiler/scala/reflect/macros/contexts/Parsers.scala b/src/compiler/scala/reflect/macros/contexts/Parsers.scala
index 3dab02beba..ae6488b5a8 100644
--- a/src/compiler/scala/reflect/macros/contexts/Parsers.scala
+++ b/src/compiler/scala/reflect/macros/contexts/Parsers.scala
@@ -1,24 +1,20 @@
package scala.reflect.macros
package contexts
-import scala.language.existentials
-import scala.tools.reflect.ToolBox
-import scala.tools.reflect.ToolBoxError
+import scala.tools.nsc.reporters.StoreReporter
trait Parsers {
self: Context =>
+ import global._
- def parse(code: String): Tree =
- // todo. provide decent implementation
- // see `Typers.typedUseCase` for details
- try {
- import scala.reflect.runtime.{universe => ru}
- val parsed = ru.rootMirror.mkToolBox().parse(code)
- val importer = universe.mkImporter(ru)
- importer.importTree(parsed)
- } catch {
- case ToolBoxError(msg, cause) =>
- // todo. provide a position
- throw new ParseException(universe.NoPosition, msg)
+ def parse(code: String) = {
+ val sreporter = new StoreReporter()
+ val unit = new CompilationUnit(newSourceFile(code, "<macro>")) { override def reporter = sreporter }
+ val parser = newUnitParser(unit)
+ val tree = gen.mkTreeOrBlock(parser.parseStats())
+ sreporter.infos.foreach {
+ case sreporter.Info(pos, msg, sreporter.ERROR) => throw ParseException(pos, msg)
}
-}
+ tree
+ }
+} \ No newline at end of file
diff --git a/src/compiler/scala/reflect/macros/util/Helpers.scala b/src/compiler/scala/reflect/macros/util/Helpers.scala
index f12582a3a1..f40c6bb7e6 100644
--- a/src/compiler/scala/reflect/macros/util/Helpers.scala
+++ b/src/compiler/scala/reflect/macros/util/Helpers.scala
@@ -23,25 +23,27 @@ trait Helpers {
* or to streamline creation of the list of macro arguments.
*/
def transformTypeTagEvidenceParams(macroImplRef: Tree, transform: (Symbol, Symbol) => Symbol): List[List[Symbol]] = {
+ val MacroContextUniverse = definitions.MacroContextUniverse
val treeInfo.MacroImplReference(isBundle, _, macroImpl, _) = macroImplRef
val paramss = macroImpl.paramss
- if (paramss.isEmpty || paramss.last.isEmpty) return paramss // no implicit parameters in the signature => nothing to do
- val rc =
- if (isBundle) macroImpl.owner.tpe.member(nme.c)
- else {
- def cparam = paramss.head.head
- if (paramss.head.isEmpty || !(cparam.tpe <:< MacroContextClass.tpe)) return paramss // no context parameter in the signature => nothing to do
- cparam
- }
+ val ContextParam = paramss match {
+ case Nil | _ :+ Nil => NoSymbol // no implicit parameters in the signature => nothing to do
+ case _ if isBundle => macroImpl.owner.tpe member nme.c
+ case (cparam :: _) :: _ if cparam.tpe <:< MacroContextClass.tpe => cparam
+ case _ => NoSymbol // no context parameter in the signature => nothing to do
+ }
def transformTag(param: Symbol): Symbol = param.tpe.dealias match {
- case TypeRef(SingleType(SingleType(_, ac), universe), WeakTypeTagClass, targ :: Nil)
- if ac == rc && universe == MacroContextUniverse =>
- transform(param, targ.typeSymbol)
- case _ =>
- param
+ case TypeRef(SingleType(SingleType(_, ContextParam), MacroContextUniverse), WeakTypeTagClass, targ :: Nil) => transform(param, targ.typeSymbol)
+ case _ => param
+ }
+ ContextParam match {
+ case NoSymbol => paramss
+ case _ =>
+ paramss.last map transformTag filter (_.exists) match {
+ case Nil => paramss.init
+ case transformed => paramss.init :+ transformed
+ }
}
- val transformed = paramss.last map transformTag filter (_ ne NoSymbol)
- if (transformed.isEmpty) paramss.init else paramss.init :+ transformed
}
/** Increases metalevel of the type, i.e. transforms:
diff --git a/src/compiler/scala/tools/nsc/CompilationUnits.scala b/src/compiler/scala/tools/nsc/CompilationUnits.scala
index f7437e4e6c..1de5c1f626 100644
--- a/src/compiler/scala/tools/nsc/CompilationUnits.scala
+++ b/src/compiler/scala/tools/nsc/CompilationUnits.scala
@@ -9,8 +9,9 @@ import util.FreshNameCreator
import scala.reflect.internal.util.{ SourceFile, NoSourceFile }
import scala.collection.mutable
import scala.collection.mutable.{ LinkedHashSet, ListBuffer }
+import scala.tools.nsc.reporters.Reporter
-trait CompilationUnits { self: Global =>
+trait CompilationUnits { global: Global =>
/** An object representing a missing compilation unit.
*/
@@ -119,6 +120,8 @@ trait CompilationUnits { self: Global =>
*/
val icode: LinkedHashSet[icodes.IClass] = new LinkedHashSet
+ def reporter = global.reporter
+
def echo(pos: Position, msg: String) =
reporter.echo(pos, msg)
diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala
index f6d4b26cda..f3a2d49697 100644
--- a/src/compiler/scala/tools/nsc/Global.scala
+++ b/src/compiler/scala/tools/nsc/Global.scala
@@ -110,7 +110,10 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
}
/** A spare instance of TreeBuilder left for backwards compatibility. */
- lazy val treeBuilder: TreeBuilder { val global: Global.this.type } = new syntaxAnalyzer.ParserTreeBuilder
+ lazy val treeBuilder: TreeBuilder { val global: Global.this.type } = new UnitTreeBuilder {
+ val global: Global.this.type = Global.this;
+ val unit = currentUnit
+ }
/** Fold constants */
object constfold extends {
@@ -629,7 +632,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
object terminal extends {
val global: Global.this.type = Global.this
} with SubComponent {
- final val phaseName = "terminal"
+ val phaseName = "terminal"
val runsAfter = List("jvm")
val runsRightAfter = None
override val terminal = true
@@ -1159,11 +1162,20 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
warning("there were %d %s warning(s); re-run with %s for details".format(warnings.size, what, option.name))
}
- def newCompilationUnit(code: String) = new CompilationUnit(newSourceFile(code))
- def newSourceFile(code: String) = new BatchSourceFile("<console>", code)
- def newUnitScanner(unit: CompilationUnit): UnitScanner = new UnitScanner(unit)
- def newUnitParser(unit: CompilationUnit): UnitParser = new UnitParser(unit)
- def newUnitParser(code: String): UnitParser = newUnitParser(newCompilationUnit(code))
+ def newSourceFile(code: String, filename: String = "<console>") =
+ new BatchSourceFile(filename, code)
+
+ def newCompilationUnit(code: String, filename: String = "<console>") =
+ new CompilationUnit(newSourceFile(code, filename))
+
+ def newUnitScanner(unit: CompilationUnit): UnitScanner =
+ new UnitScanner(unit)
+
+ def newUnitParser(unit: CompilationUnit): UnitParser =
+ new UnitParser(unit)
+
+ def newUnitParser(code: String, filename: String = "<console>"): UnitParser =
+ newUnitParser(newCompilationUnit(code, filename))
/** A Run is a single execution of the compiler on a set of units.
*/
diff --git a/src/compiler/scala/tools/nsc/OfflineCompilerCommand.scala b/src/compiler/scala/tools/nsc/OfflineCompilerCommand.scala
index 2ce2fb3eaa..899aa93a3b 100644
--- a/src/compiler/scala/tools/nsc/OfflineCompilerCommand.scala
+++ b/src/compiler/scala/tools/nsc/OfflineCompilerCommand.scala
@@ -27,7 +27,7 @@ class OfflineCompilerCommand(arguments: List[String], settings: FscSettings) ext
val baseDirectory = {
val pwd = System.getenv("PWD")
if (pwd == null || isWin) Directory.Current getOrElse Directory("/")
- else Directory(pwd)
+ else Directory(pwd)
}
currentDir.value = baseDirectory.path
}
diff --git a/src/compiler/scala/tools/nsc/ast/DocComments.scala b/src/compiler/scala/tools/nsc/ast/DocComments.scala
index f2e5c9b1eb..7cf2f8559b 100755
--- a/src/compiler/scala/tools/nsc/ast/DocComments.scala
+++ b/src/compiler/scala/tools/nsc/ast/DocComments.scala
@@ -18,8 +18,14 @@ trait DocComments { self: Global =>
val cookedDocComments = mutable.HashMap[Symbol, String]()
- /** The raw doc comment map */
- val docComments = mutable.HashMap[Symbol, DocComment]()
+ /** The raw doc comment map
+ *
+ * In IDE, background compilation runs get interrupted by
+ * reloading new sourcefiles. This is weak to avoid
+ * memleaks due to the doc of their cached symbols
+ * (e.g. in baseTypeSeq) between periodic doc reloads.
+ */
+ val docComments = mutable.WeakHashMap[Symbol, DocComment]()
def clearDocComments() {
cookedDocComments.clear()
diff --git a/src/compiler/scala/tools/nsc/ast/NodePrinters.scala b/src/compiler/scala/tools/nsc/ast/NodePrinters.scala
index caab299635..9c8e13a1a9 100644
--- a/src/compiler/scala/tools/nsc/ast/NodePrinters.scala
+++ b/src/compiler/scala/tools/nsc/ast/NodePrinters.scala
@@ -282,7 +282,7 @@ abstract class NodePrinters {
traverseList("[]", "type parameter")(tparams)
vparamss match {
case Nil => println("Nil")
- case Nil :: Nil => println("List(Nil)")
+ case ListOfNil => println("List(Nil)")
case ps :: Nil =>
printLine("", "1 parameter list")
ps foreach traverse
diff --git a/src/compiler/scala/tools/nsc/ast/TreeDSL.scala b/src/compiler/scala/tools/nsc/ast/TreeDSL.scala
index d7a32c3be0..5922b4bbbf 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeDSL.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeDSL.scala
@@ -133,90 +133,6 @@ trait TreeDSL {
def ==>(body: Tree): CaseDef = CaseDef(pat, guard, body)
}
- /** VODD, if it's not obvious, means ValOrDefDef. This is the
- * common code between a tree based on a pre-existing symbol and
- * one being built from scratch.
- */
- trait VODDStart {
- def name: Name
- def defaultMods: Modifiers
- def defaultTpt: Tree
- def defaultPos: Position
-
- type ResultTreeType <: ValOrDefDef
- def mkTree(rhs: Tree): ResultTreeType
- def ===(rhs: Tree): ResultTreeType
-
- private var _tpt: Tree = null
- private var _pos: Position = null
-
- def withType(tp: Type): this.type = {
- _tpt = TypeTree(tp)
- this
- }
- def withPos(pos: Position): this.type = {
- _pos = pos
- this
- }
-
- final def mods = defaultMods
- final def tpt = if (_tpt == null) defaultTpt else _tpt
- final def pos = if (_pos == null) defaultPos else _pos
- }
- trait SymVODDStart extends VODDStart {
- def sym: Symbol
- def symType: Type
-
- def name = sym.name
- def defaultMods = Modifiers(sym.flags)
- def defaultTpt = TypeTree(symType) setPos sym.pos.focus
- def defaultPos = sym.pos
-
- final def ===(rhs: Tree): ResultTreeType =
- atPos(pos)(mkTree(rhs) setSymbol sym)
- }
- trait ValCreator {
- self: VODDStart =>
-
- type ResultTreeType = ValDef
- def mkTree(rhs: Tree): ValDef = ValDef(mods, name.toTermName, tpt, rhs)
- }
- trait DefCreator {
- self: VODDStart =>
-
- def tparams: List[TypeDef]
- def vparamss: List[List[ValDef]]
-
- type ResultTreeType = DefDef
- def mkTree(rhs: Tree): DefDef = DefDef(mods, name.toTermName, tparams, vparamss, tpt, rhs)
- }
-
- class DefSymStart(val sym: Symbol) extends SymVODDStart with DefCreator {
- def symType = sym.tpe.finalResultType
- def tparams = sym.typeParams map TypeDef
- def vparamss = mapParamss(sym)(ValDef)
- }
- class ValSymStart(val sym: Symbol) extends SymVODDStart with ValCreator {
- def symType = sym.tpe
- }
-
- trait TreeVODDStart extends VODDStart {
- def defaultMods = NoMods
- def defaultTpt = TypeTree()
- def defaultPos = NoPosition
-
- final def ===(rhs: Tree): ResultTreeType =
- if (pos == NoPosition) mkTree(rhs)
- else atPos(pos)(mkTree(rhs))
- }
-
- class ValTreeStart(val name: Name) extends TreeVODDStart with ValCreator {
- }
- class DefTreeStart(val name: Name) extends TreeVODDStart with DefCreator {
- def tparams: List[TypeDef] = Nil
- def vparamss: List[List[ValDef]] = ListOfNil
- }
-
class IfStart(cond: Tree, thenp: Tree) {
def THEN(x: Tree) = new IfStart(cond, x)
def ELSE(elsep: Tree) = If(cond, thenp, elsep)
@@ -230,46 +146,23 @@ trait TreeDSL {
def CASE(pat: Tree): CaseStart = new CaseStart(pat, EmptyTree)
def DEFAULT: CaseStart = new CaseStart(WILD.empty, EmptyTree)
- class SymbolMethods(target: Symbol) {
- def IS_NULL() = REF(target) OBJ_EQ NULL
- def GET() = fn(REF(target), nme.get)
- def ARGS = target.paramss.head
- }
-
- /** Top level accessible. */
- def MATCHERROR(arg: Tree) = Throw(MatchErrorClass.tpe, arg)
- def THROW(sym: Symbol, msg: Tree): Throw = Throw(sym.tpe, msg.TOSTRING())
-
def NEW(tpt: Tree, args: Tree*): Tree = New(tpt, List(args.toList))
- def DEF(sym: Symbol): DefSymStart = new DefSymStart(sym)
- def VAL(sym: Symbol): ValSymStart = new ValSymStart(sym)
- def AND(guards: Tree*) =
- if (guards.isEmpty) EmptyTree
- else guards reduceLeft gen.mkAnd
+ def NOT(tree: Tree) = Select(tree, Boolean_not)
+ def AND(guards: Tree*) = if (guards.isEmpty) EmptyTree else guards reduceLeft gen.mkAnd
def IF(tree: Tree) = new IfStart(tree, EmptyTree)
def TRY(tree: Tree) = new TryStart(tree, Nil, EmptyTree)
def BLOCK(xs: Tree*) = Block(xs.init.toList, xs.last)
- def NOT(tree: Tree) = Select(tree, Boolean_not)
- def SOME(xs: Tree*) = Apply(SomeClass.companionSymbol, makeTupleTerm(xs.toList, flattenUnary = true))
+ def SOME(xs: Tree*) = Apply(SomeClass.companionSymbol, treeBuilder.makeTupleTerm(xs.toList, flattenUnary = true))
/** Typed trees from symbols. */
- def THIS(sym: Symbol) = gen.mkAttributedThis(sym)
- def ID(sym: Symbol) = gen.mkAttributedIdent(sym)
- def REF(sym: Symbol) = gen.mkAttributedRef(sym)
- def REF(pre: Type, sym: Symbol) = gen.mkAttributedRef(pre, sym)
-
- def makeTupleTerm(trees: List[Tree], flattenUnary: Boolean): Tree = trees match {
- case Nil => UNIT
- case List(tree) if flattenUnary => tree
- case _ => Apply(TupleClass(trees.length).companionModule, trees: _*)
- }
+ def REF(sym: Symbol) = gen.mkAttributedRef(sym)
+ def REF(pre: Type, sym: Symbol) = gen.mkAttributedRef(pre, sym)
/** Implicits - some of these should probably disappear **/
implicit def mkTreeMethods(target: Tree): TreeMethods = new TreeMethods(target)
implicit def mkTreeMethodsFromSymbol(target: Symbol): TreeMethods = new TreeMethods(Ident(target))
- implicit def mkSymbolMethodsFromSymbol(target: Symbol): SymbolMethods = new SymbolMethods(target)
/** (foo DOT bar) might be simply a Select, but more likely it is to be immediately
* followed by an Apply. We don't want to add an actual apply method to arbitrary
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
index bd31c548c7..05ad2dbc57 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
@@ -28,20 +28,14 @@ import util.FreshNameCreator
*/
trait ParsersCommon extends ScannersCommon { self =>
val global : Global
- import global._
+ // the use of currentUnit in the parser should be avoided as it might
+ // cause unexpected behaviour when you work with two units at the
+ // same time; use Parser.unit instead
+ import global.{currentUnit => _, _}
def newLiteral(const: Any) = Literal(Constant(const))
def literalUnit = newLiteral(())
- class ParserTreeBuilder extends TreeBuilder {
- val global: self.global.type = self.global
- def freshName(prefix: String): Name = freshTermName(prefix)
- def freshTermName(prefix: String): TermName = currentUnit.freshTermName(prefix)
- def freshTypeName(prefix: String): TypeName = currentUnit.freshTypeName(prefix)
- def o2p(offset: Int): Position = new OffsetPosition(currentUnit.source, offset)
- def r2p(start: Int, mid: Int, end: Int): Position = rangePos(currentUnit.source, start, mid, end)
- }
-
/** This is now an abstract class, only to work around the optimizer:
* methods in traits are never inlined.
*/
@@ -172,6 +166,7 @@ self =>
private val globalFresh = new FreshNameCreator.Default
+ def unit = global.currentUnit
def freshName(prefix: String): Name = freshTermName(prefix)
def freshTermName(prefix: String): TermName = newTermName(globalFresh.newName(prefix))
def freshTypeName(prefix: String): TypeName = newTypeName(globalFresh.newName(prefix))
@@ -196,7 +191,7 @@ self =>
* that we don't have the xml library on the compilation classpath.
*/
private[this] lazy val xmlp = {
- currentUnit.encounteredXml(o2p(in.offset))
+ unit.encounteredXml(o2p(in.offset))
new MarkupParser(this, preserveWS = true)
}
@@ -225,7 +220,7 @@ self =>
override def templateBody(isPre: Boolean) = skipBraces((emptyValDef, EmptyTree.asList))
}
- class UnitParser(val unit: global.CompilationUnit, patches: List[BracePatch]) extends SourceFileParser(unit.source) {
+ class UnitParser(override val unit: global.CompilationUnit, patches: List[BracePatch]) extends SourceFileParser(unit.source) { uself =>
def this(unit: global.CompilationUnit) = this(unit, Nil)
override def newScanner() = new UnitScanner(unit, patches)
@@ -298,20 +293,34 @@ self =>
import nme.raw
- abstract class Parser extends ParserCommon {
+ abstract class Parser extends ParserCommon { parser =>
val in: Scanner
+ def unit: CompilationUnit
def freshName(prefix: String): Name
def freshTermName(prefix: String): TermName
def freshTypeName(prefix: String): TypeName
def o2p(offset: Int): Position
def r2p(start: Int, mid: Int, end: Int): Position
+ /** Creates a range position from the given start offset to
+ * the value of in.lastOffset.
+ */
+ def rangeSince(start: Int): Position = r2p(start, start, in.lastOffset)
+
+ /** Like in.skipToken, but returns a range position surrounding the skipped token.
+ */
+ def skipTokenRange(): Position = rangeSince(in.skipToken())
+
/** whether a non-continuable syntax error has been seen */
private var lastErrorOffset : Int = -1
+ class ParserTreeBuilder extends UnitTreeBuilder {
+ val global: self.global.type = self.global
+ def unit = parser.unit
+ }
val treeBuilder = new ParserTreeBuilder
- import treeBuilder.{global => _, _}
+ import treeBuilder.{global => _, unit => _, _}
/** The types of the context bounds of type parameters of the surrounding class
*/
@@ -568,8 +577,9 @@ self =>
and
}
- def expectedMsgTemplate(exp: String, fnd: String) = s"$exp expected but $fnd found."
- def expectedMsg(token: Int): String = expectedMsgTemplate(token2string(token), token2string(in.token))
+ def expectedMsgTemplate(expected: String, found: String): String = s"$expected expected but $found found."
+ def expectedMsg(expected: Int, found: Int): String = expectedMsgTemplate(token2string(expected), token2string(found))
+ def expectedMsg(token: Int): String = expectedMsg(token, in.token)
/** Consume one token of the specified type, or signal an error if it is not there. */
def accept(token: Int): Int = {
@@ -587,6 +597,10 @@ self =>
if (in.token == token) in.nextToken()
offset
}
+ /** If the given token is available for consumption, consume it and return true.
+ * Otherwise, do nothing and return false.
+ */
+ def acceptIfPresent(token: Int) = (in.token == token) && { accept(token) ; true }
/** {{{
* semi = nl {nl} | `;`
@@ -703,18 +717,24 @@ self =>
/* ---------- TREE CONSTRUCTION ------------------------------------------- */
- def atPos[T <: Tree](offset: Int)(t: T): T =
- global.atPos(r2p(offset, offset, in.lastOffset max offset))(t)
- def atPos[T <: Tree](start: Int, point: Int)(t: T): T =
- global.atPos(r2p(start, point, in.lastOffset max start))(t)
- def atPos[T <: Tree](start: Int, point: Int, end: Int)(t: T): T =
- global.atPos(r2p(start, point, end))(t)
- def atPos[T <: Tree](pos: Position)(t: T): T =
- global.atPos(pos)(t)
+ def atPos[T <: Tree](start: Int)(t: T): T = atPos[T](start, start)(t)
+ def atPos[T <: Tree](start: Int, point: Int)(t: T): T = atPos[T](start, point, in.lastOffset max start)(t)
+ def atPos[T <: Tree](start: Int, point: Int, end: Int)(t: T): T = atPos(r2p(start, point, end))(t)
+ def atPos[T <: Tree](pos: Position)(t: T): T = global.atPos(pos)(t)
def atInPos[T <: Tree](t: T): T = atPos(o2p(in.offset))(t)
def setInPos[T <: Tree](t: T): T = t setPos o2p(in.offset)
+ /** Use with caution. */
+ def peekahead(): Unit = {
+ in.prev copyFrom in
+ in.nextToken()
+ }
+ def pushback(): Unit = {
+ in.next copyFrom in
+ in copyFrom in.prev
+ }
+
/** Convert tree to formal parameter list. */
def convertToParams(tree: Tree): List[ValDef] = tree match {
case Parens(ts) => ts map convertToParam
@@ -753,10 +773,9 @@ self =>
if (!sepFirst)
ts += part
- while (in.token == separator) {
- in.nextToken()
+ while (acceptIfPresent(separator))
ts += part
- }
+
ts.toList
}
@inline final def commaSeparated[T](part: => T): List[T] = tokenSeparated(COMMA, sepFirst = false, part)
@@ -833,10 +852,8 @@ self =>
private def tupleInfixType(start: Int) = {
in.nextToken()
- if (in.token == RPAREN) {
- in.nextToken()
+ if (acceptIfPresent(RPAREN))
atPos(start, accept(ARROW)) { makeFunctionTypeTree(Nil, typ()) }
- }
else {
val ts = functionTypes()
accept(RPAREN)
@@ -940,27 +957,24 @@ self =>
)
def compoundTypeRest(t: Tree): Tree = {
- val ts = new ListBuffer[Tree] += t
- while (in.token == WITH) {
- in.nextToken()
- ts += annotType()
- }
+ val types = t :: tokenSeparated(WITH, sepFirst = true, annotType())
newLineOptWhenFollowedBy(LBRACE)
- atPos(t.pos.startOrPoint) {
- if (in.token == LBRACE) {
- // Warn if they are attempting to refine Unit; we can't be certain it's
- // scala.Unit they're refining because at this point all we have is an
- // identifier, but at a later stage we lose the ability to tell an empty
- // refinement from no refinement at all. See bug #284.
- for (Ident(name) <- ts) name.toString match {
- case "Unit" | "scala.Unit" =>
- warning("Detected apparent refinement of Unit; are you missing an '=' sign?")
- case _ =>
- }
- CompoundTypeTree(Template(ts.toList, emptyValDef, refinement()))
- }
- else
- makeIntersectionTypeTree(ts.toList)
+ val braceOffset = in.offset
+ val hasRefinement = in.token == LBRACE
+ val refinements = if (hasRefinement) refinement() else Nil
+ // Warn if they are attempting to refine Unit; we can't be certain it's
+ // scala.Unit they're refining because at this point all we have is an
+ // identifier, but at a later stage we lose the ability to tell an empty
+ // refinement from no refinement at all. See bug #284.
+ if (hasRefinement) types match {
+ case Ident(name) :: Nil if name endsWith "Unit" => warning(braceOffset, "Detected apparent refinement of Unit; are you missing an '=' sign?")
+ case _ =>
+ }
+ // The second case includes an empty refinement - refinements is empty, but
+ // it still gets a CompoundTypeTree.
+ types match {
+ case tp :: Nil if !hasRefinement => tp // single type, no refinement, already positioned
+ case tps => atPos(t.pos.startOrPoint)(CompoundTypeTree(Template(tps, emptyValDef, refinements)))
}
}
@@ -1026,19 +1040,19 @@ self =>
def path(thisOK: Boolean, typeOK: Boolean): Tree = {
val start = in.offset
var t: Tree = null
- if (in.token == THIS) {
- in.nextToken()
+ if (acceptIfPresent(THIS)) {
t = atPos(start) { This(tpnme.EMPTY) }
if (!thisOK || in.token == DOT) {
t = selectors(t, typeOK, accept(DOT))
}
- } else if (in.token == SUPER) {
- in.nextToken()
+ }
+ else if (acceptIfPresent(SUPER)) {
t = atPos(start) { Super(This(tpnme.EMPTY), mixinQualifierOpt()) }
accept(DOT)
t = selector(t)
if (in.token == DOT) t = selectors(t, typeOK, in.skipToken())
- } else {
+ }
+ else {
val tok = in.token
val name = ident()
t = atPos(start) {
@@ -1047,18 +1061,18 @@ self =>
}
if (in.token == DOT) {
val dotOffset = in.skipToken()
- if (in.token == THIS) {
- in.nextToken()
+ if (acceptIfPresent(THIS)) {
t = atPos(start) { This(name.toTypeName) }
if (!thisOK || in.token == DOT)
t = selectors(t, typeOK, accept(DOT))
- } else if (in.token == SUPER) {
- in.nextToken()
+ }
+ else if (acceptIfPresent(SUPER)) {
t = atPos(start) { Super(This(name.toTypeName), mixinQualifierOpt()) }
accept(DOT)
t = selector(t)
if (in.token == DOT) t = selectors(t, typeOK, in.skipToken())
- } else {
+ }
+ else {
t = selectors(t, typeOK, dotOffset)
}
}
@@ -1067,10 +1081,8 @@ self =>
}
def selectors(t: Tree, typeOK: Boolean, dotOffset: Int): Tree =
- if (typeOK && in.token == TYPE) {
- in.nextToken()
+ if (typeOK && acceptIfPresent(TYPE))
atPos(t.pos.startOrPoint, dotOffset) { SingletonTypeTree(t) }
- }
else {
val t1 = selector(t)
if (in.token == DOT) { selectors(t1, typeOK, in.skipToken()) }
@@ -1167,10 +1179,10 @@ self =>
private def freshPlaceholder(): Tree = {
val start = in.offset
val pname = freshName("x$")
- in.nextToken()
+ accept(USCORE)
val id = atPos(start)(Ident(pname))
val param = atPos(id.pos.focus)(gen.mkSyntheticParam(pname.toTermName))
- placeholderParams = param :: placeholderParams
+ placeholderParams ::= param
id
}
@@ -1213,7 +1225,7 @@ self =>
/* ------------- NEW LINES ------------------------------------------------- */
def newLineOpt() {
- if (in.token == NEWLINE) in.nextToken()
+ acceptIfPresent(NEWLINE)
}
def newLinesOpt() {
@@ -1237,9 +1249,7 @@ self =>
* TypedOpt ::= [`:' Type]
* }}}
*/
- def typedOpt(): Tree =
- if (in.token == COLON) { in.nextToken(); typ() }
- else TypeTree()
+ def typedOpt(): Tree = if (acceptIfPresent(COLON)) typ() else TypeTree()
def typeOrInfixType(location: Int): Tree =
if (location == Local) typ()
@@ -1263,16 +1273,9 @@ self =>
/* ----------- EXPRESSIONS ------------------------------------------------ */
- def condExpr(): Tree = {
- if (in.token == LPAREN) {
- in.nextToken()
- val r = expr()
- accept(RPAREN)
- r
- } else {
- accept(LPAREN)
- newLiteral(true)
- }
+ def condExpr(): Tree = in.token match {
+ case LPAREN => inParens(expr())
+ case _ => syntaxErrorOrIncompleteAnd("parenthesized conditional expression expected", skipIt = false)(newLiteral(true))
}
/* hook for IDE, unlike expression can be stubbed
@@ -1313,8 +1316,7 @@ self =>
val cond = condExpr()
newLinesOpt()
val thenp = expr()
- val elsep = if (in.token == ELSE) { in.nextToken(); expr() }
- else literalUnit
+ val elsep = if (acceptIfPresent(ELSE)) expr() else literalUnit
If(cond, thenp, elsep)
}
parseIf
@@ -1323,23 +1325,19 @@ self =>
val body = in.token match {
case LBRACE => inBracesOrUnit(block())
case LPAREN => inParensOrUnit(expr())
- case _ => expr()
+ case _ => expr()
}
def catchFromExpr() = List(makeCatchFromExpr(expr()))
- val catches: List[CaseDef] =
- if (in.token != CATCH) Nil
- else {
- in.nextToken()
+ val catches: List[CaseDef] = (
+ if (!acceptIfPresent(CATCH)) Nil else {
if (in.token != LBRACE) catchFromExpr()
else inBracesOrNil {
if (isCaseDefStart) caseClauses()
else catchFromExpr()
}
}
- val finalizer = in.token match {
- case FINALLY => in.nextToken(); expr()
- case _ => EmptyTree
- }
+ )
+ val finalizer = if (acceptIfPresent(FINALLY)) expr() else EmptyTree
Try(body, catches, finalizer)
}
parseTry
@@ -1350,7 +1348,7 @@ self =>
val cond = condExpr()
newLinesOpt()
val body = expr()
- makeWhile(start, cond, body)
+ makeWhile(cond, body)
}
}
parseWhile
@@ -1369,16 +1367,15 @@ self =>
case FOR =>
val start = in.skipToken()
def parseFor = atPos(start) {
- val enums =
+ val enums = (
if (in.token == LBRACE) inBracesOrNil(enumerators())
else inParensOrNil(enumerators())
+ )
newLinesOpt()
- if (in.token == YIELD) {
- in.nextToken()
+ if (acceptIfPresent(YIELD))
makeForYield(enums, expr())
- } else {
+ else
makeFor(enums, expr())
- }
}
def adjustStart(tree: Tree) =
if (tree.pos.isRange && start < tree.pos.start)
@@ -1469,9 +1466,8 @@ self =>
val param0 = convertToParam {
atPos(in.offset) {
Ident(ident()) match {
- case expr if in.token == COLON =>
- in.nextToken() ; Typed(expr, typeOrInfixType(location))
- case expr => expr
+ case expr if acceptIfPresent(COLON) => Typed(expr, typeOrInfixType(location))
+ case expr => expr
}
}
}
@@ -1547,32 +1543,28 @@ self =>
* }}}
*/
def simpleExpr(): Tree = {
- var canApply = true
- val t =
- if (isLiteral) literal()
- else in.token match {
- case XMLSTART =>
- xmlLiteral()
- case IDENTIFIER | BACKQUOTED_IDENT | THIS | SUPER =>
- path(thisOK = true, typeOK = false)
- case USCORE =>
- freshPlaceholder()
- case LPAREN =>
- atPos(in.offset)(makeParens(commaSeparated(expr())))
- case LBRACE =>
- canApply = false
- blockExpr()
- case NEW =>
- canApply = false
- val nstart = in.skipToken()
- val npos = r2p(nstart, nstart, in.lastOffset)
- val tstart = in.offset
- val (parents, self, stats) = template()
- val cpos = r2p(tstart, tstart, in.lastOffset max tstart)
- gen.mkNew(parents, self, stats, npos, cpos)
- case _ =>
- syntaxErrorOrIncompleteAnd("illegal start of simple expression", skipIt = true)(errorTermTree)
- }
+ val canApply = in.token match {
+ case LBRACE | NEW => false
+ case _ => true
+ }
+ def mkNew(): Tree = {
+ val npos = skipTokenRange()
+ val tstart = in.offset
+ val (parents, self, stats) = template()
+ val cpos = rangeSince(tstart)
+
+ gen.mkNew(parents, self, stats, npos, cpos)
+ }
+ val t = in.token match {
+ case _ if isLiteral => literal()
+ case XMLSTART => xmlLiteral()
+ case IDENTIFIER | BACKQUOTED_IDENT | THIS | SUPER => path(thisOK = true, typeOK = false)
+ case USCORE => freshPlaceholder()
+ case LPAREN => atPos(in.offset)(makeParens(commaSeparated(expr())))
+ case LBRACE => blockExpr()
+ case NEW => mkNew()
+ case _ => syntaxErrorOrIncompleteAnd("illegal start of simple expression", skipIt = true)(errorTermTree)
+ }
simpleExprRest(t, canApply = canApply)
}
@@ -1679,9 +1671,9 @@ self =>
* Guard ::= if PostfixExpr
* }}}
*/
- def guard(): Tree =
- if (in.token == IF) { in.nextToken(); stripParens(postfixExpr()) }
- else EmptyTree
+ def guard(): Tree = if (acceptIfPresent(IF)) guardExpr() else EmptyTree
+
+ def guardExpr(): Tree = stripParens(postfixExpr())
/** {{{
* Enumerators ::= Generator {semi Enumerator}
@@ -1706,27 +1698,26 @@ self =>
* }}}
*/
def generator(enums: ListBuffer[Enumerator], eqOK: Boolean) {
- val start = in.offset
- val hasVal = in.token == VAL
- if (hasVal)
- in.nextToken()
-
- val pat = noSeq.pattern1()
- val point = in.offset
- val hasEq = in.token == EQUALS
+ val start = in.offset
+ val hasVal = acceptIfPresent(VAL)
+ val pat = noSeq.pattern1()
+ val point = in.offset
+ val equalsBody = equalsExprOpt()
+ val hasEq = !equalsBody.isEmpty
+
+ if (hasVal && !hasEq)
+ syntaxError(in.offset, "val in for comprehension must be followed by assignment")
+ else if (hasEq && !eqOK)
+ syntaxError(point, "for comprehension must start with generator: " + expectedMsg(expected = LARROW, found = EQUALS))
+ else if (hasVal)
+ deprecationWarning(start, "val keyword in for comprehension is deprecated")
+
+ val rhs = equalsBody orElse { accept(LARROW) ; expr() }
- if (hasVal) {
- if (hasEq) deprecationWarning(in.offset, "val keyword in for comprehension is deprecated")
- else syntaxError(in.offset, "val in for comprehension must be followed by assignment")
- }
-
- if (hasEq && eqOK) in.nextToken()
- else accept(LARROW)
- val rhs = expr()
enums += makeGenerator(r2p(start, point, in.lastOffset max start), pat, hasEq, rhs)
// why max above? IDE stress tests have shown that lastOffset could be less than start,
// I guess this happens if instead if a for-expression we sit on a closing paren.
- while (in.token == IF) enums += makeFilter(in.offset, guard())
+ enums ++= tokenSeparated(IF, sepFirst = true, makeFilter(in.offset, guardExpr()))
}
def makeFilter(start: Int, tree: Tree) = Filter(r2p(start, tree.pos.point, tree.pos.endOrPoint), tree)
@@ -1834,14 +1825,6 @@ self =>
var top = simplePattern(badPattern3)
// after peekahead
def acceptWildStar() = atPos(top.pos.startOrPoint, in.prev.offset)(Star(stripParens(top)))
- def peekahead() = {
- in.prev copyFrom in
- in.nextToken()
- }
- def pushback() = {
- in.next copyFrom in
- in copyFrom in.prev
- }
// See SI-3189, SI-4832 for motivation. Cf SI-3480 for counter-motivation.
// TODO: dredge out the remnants of regexp patterns.
// /{/ peek for _*) or _*} (for xml escape)
@@ -2015,16 +1998,15 @@ self =>
* }}}
*/
def accessQualifierOpt(mods: Modifiers): Modifiers = {
- var result = mods
- if (in.token == LBRACKET) {
- in.nextToken()
- if (mods.hasAccessBoundary)
- syntaxError("duplicate private/protected qualifier", skipIt = false)
- result = if (in.token == THIS) { in.nextToken(); mods | Flags.LOCAL }
- else Modifiers(mods.flags, identForType())
- accept(RBRACKET)
+ def newModifiers(): Modifiers = (
+ if (acceptIfPresent(THIS)) mods | Flags.LOCAL // private/protected[this]
+ else Modifiers(mods.flags, identForType()) // private/protected[foo]
+ )
+ in.token match {
+ case LBRACKET if mods.hasAccessBoundary => syntaxError("duplicate private/protected qualifier", skipIt = false) ; mods
+ case LBRACKET => inBrackets(newModifiers())
+ case _ => mods
}
- result
}
private val flagTokens: Map[Int, Long] = Map(
@@ -2162,25 +2144,24 @@ self =>
}
paramType()
}
- val default =
- if (in.token == EQUALS) {
- in.nextToken()
+ val default = (
+ if (acceptIfPresent(EQUALS)) {
mods |= Flags.DEFAULTPARAM
expr()
- } else EmptyTree
+ }
+ else EmptyTree
+ )
atPos(start, if (name == nme.ERROR) start else nameOffset) {
ValDef((mods | implicitmod.toLong | bynamemod) withAnnotations annots, name.toTermName, tpt, default)
}
}
- def paramClause(): List[ValDef] = {
- if (in.token == RPAREN)
- return Nil
+ def paramClause(): List[ValDef] = in.token match {
+ case RPAREN => Nil
+ case _ =>
+ if (acceptIfPresent(IMPLICIT))
+ implicitmod = Flags.IMPLICIT
- if (in.token == IMPLICIT) {
- in.nextToken()
- implicitmod = Flags.IMPLICIT
- }
- commaSeparated(param())
+ commaSeparated(param())
}
val vds = new ListBuffer[List[ValDef]]
val start = in.offset
@@ -2188,8 +2169,7 @@ self =>
if (ofCaseClass && in.token != LPAREN)
syntaxError(in.lastOffset, "case classes without a parameter list are not allowed;\n"+
"use either case objects or case classes with an explicit `()' as a parameter list.")
- while (implicitmod == 0 && in.token == LPAREN) {
- in.nextToken()
+ while (implicitmod == 0 && acceptIfPresent(LPAREN)) {
vds += paramClause()
accept(RPAREN)
caseParam = false
@@ -2213,18 +2193,16 @@ self =>
def paramType(): Tree = paramType(useStartAsPosition = false)
def paramType(useStartAsPosition: Boolean): Tree = {
val start = in.offset
- in.token match {
- case ARROW =>
+ if (acceptIfPresent(ARROW))
+ atPos(start)(byNameApplication(typ()))
+ else {
+ val t = typ()
+ if (isRawStar) {
in.nextToken()
- atPos(start)(byNameApplication(typ()))
- case _ =>
- val t = typ()
- if (isRawStar) {
- in.nextToken()
- if (useStartAsPosition) atPos(start)(repeatedApplication(t))
- else atPos(t.pos.startOrPoint, t.pos.point)(repeatedApplication(t))
- }
- else t
+ if (useStartAsPosition) atPos(start)(repeatedApplication(t))
+ else atPos(t.pos.startOrPoint, t.pos.point)(repeatedApplication(t))
+ }
+ else t
}
}
@@ -2259,14 +2237,13 @@ self =>
}
if (contextBoundBuf ne null) {
while (in.token == VIEWBOUND) {
- contextBoundBuf += atPos(in.skipToken()) {
- makeFunctionTypeTree(List(Ident(pname)), typ())
- }
+ contextBoundBuf += atPos(in.skipToken())(makeFunctionTypeTree(List(Ident(pname)), typ()))
}
while (in.token == COLON) {
- contextBoundBuf += atPos(in.skipToken()) {
- AppliedTypeTree(typ(), List(Ident(pname)))
- }
+ val start = in.skipToken()
+ val tycon = typ()
+ val applied = atPos(tycon.pos withStart start)(AppliedTypeTree(tycon, Ident(pname) :: Nil))
+ contextBoundBuf += applied
}
}
param
@@ -2292,7 +2269,7 @@ self =>
t setPos o2p(in.offset)
}
- def bound(tok: Int): Tree = if (in.token == tok) { in.nextToken(); typ() } else EmptyTree
+ def bound(tok: Int): Tree = if (acceptIfPresent(tok)) typ() else EmptyTree
/* -------- DEFS ------------------------------------------- */
@@ -2304,11 +2281,10 @@ self =>
def importClause(): List[Tree] = {
val offset = accept(IMPORT)
commaSeparated(importExpr()) match {
- case Nil => Nil
+ case Nil => Nil
case t :: rest =>
// The first import should start at the position of the keyword.
- t.setPos(t.pos.withStart(offset))
- t :: rest
+ (t setPos (t.pos withStart offset)) :: rest
}
}
@@ -2337,12 +2313,9 @@ self =>
case _ =>
val nameOffset = in.offset
val name = ident()
- if (in.token == DOT) {
+ if (acceptIfPresent(DOT))
// import foo.bar.ident.<unknown> and so create a select node and recurse.
- val t = atPos(start, if (name == nme.ERROR) in.offset else nameOffset)(Select(expr, name))
- in.nextToken()
- return loop(t)
- }
+ return loop(atPos(start, if (name == nme.ERROR) in.offset else nameOffset)(Select(expr, name)))
// import foo.bar.Baz;
else List(makeImportSelector(name, nameOffset))
}
@@ -2373,30 +2346,27 @@ self =>
selectors
}
- def wildcardOrIdent() = {
- if (in.token == USCORE) { in.nextToken() ; nme.WILDCARD }
- else ident()
- }
+ def wildcardOrIdent() = if (acceptIfPresent(USCORE)) nme.WILDCARD else ident()
/** {{{
* ImportSelector ::= Id [`=>' Id | `=>' `_']
* }}}
*/
def importSelector(): ImportSelector = {
- val start = in.offset
- val name = wildcardOrIdent()
- var renameOffset = -1
- val rename = in.token match {
- case ARROW =>
- in.nextToken()
- renameOffset = in.offset
- wildcardOrIdent()
- case _ if name == nme.WILDCARD => null
- case _ =>
- renameOffset = start
- name
- }
- ImportSelector(name, start, rename, renameOffset)
+ val start = in.offset
+ val name = wildcardOrIdent()
+
+ // The first case is overly cleverly using named arguments to reverse the
+ // positions of the last two parameters, because reading the rename will
+ // move the value of in.offset. Hey, I didn't invent side effects, I too am
+ // a victim here. I can't find a single place where the rename position
+ // is used anyway.
+ if (acceptIfPresent(ARROW))
+ ImportSelector(name, start, renamePos = in.offset, rename = wildcardOrIdent())
+ else if (name == nme.WILDCARD)
+ ImportSelector(name, start, null, -1)
+ else
+ ImportSelector(name, start, name, start)
}
/** {{{
@@ -2435,6 +2405,8 @@ self =>
defOrDcl(caseAwareTokenOffset, modifiers() withAnnotations annots)
}
+ def equalsExprOpt(): Tree = if (acceptIfPresent(EQUALS)) expr() else EmptyTree
+
/** {{{
* PatDef ::= Pattern2 {`,' Pattern2} [`:' Type] `=' Expr
* ValDcl ::= Id {`,' Id} `:' Type
@@ -2442,45 +2414,55 @@ self =>
* }}}
*/
def patDefOrDcl(pos : Int, mods: Modifiers): List[Tree] = {
- var newmods = mods
in.nextToken()
- val lhs = commaSeparated(stripParens(noSeq.pattern2()))
- val tp = typedOpt()
- val rhs =
- if (tp.isEmpty || in.token == EQUALS) {
- accept(EQUALS)
- if (!tp.isEmpty && newmods.isMutable &&
- (lhs.toList forall (_.isInstanceOf[Ident])) && in.token == USCORE) {
- in.nextToken()
- newmods = newmods | Flags.DEFAULTINIT
- EmptyTree
- } else {
- expr()
- }
- } else {
- newmods = newmods | Flags.DEFERRED
- EmptyTree
- }
- def mkDefs(p: Tree, tp: Tree, rhs: Tree): List[Tree] = {
- //Console.println("DEBUG: p = "+p.toString()); // DEBUG
- val trees =
- makePatDef(newmods,
- if (tp.isEmpty) p
- else Typed(p, tp) setPos (p.pos union tp.pos),
- rhs)
- if (newmods.isDeferred) {
- trees match {
- case List(ValDef(_, _, _, EmptyTree)) =>
- if (mods.isLazy) syntaxError(p.pos, "lazy values may not be abstract", skipIt = false)
- case _ => syntaxError(p.pos, "pattern definition may not be abstract", skipIt = false)
- }
+
+ val lhses = commaSeparated(stripParens(noSeq.pattern2()))
+ val lhs = lhses.last
+ val tpt = typedOpt()
+ val ascriptedLhs = if (tpt.isEmpty) lhs else atPos(lhs.pos union tpt.pos)(Typed(lhs, tpt))
+ val hasEq = acceptIfPresent(EQUALS)
+ // SI-7854 an underscore following the equals doesn't necessarily mean default initialization.
+ val isDefaultInit = hasEq && in.token == USCORE && {
+ peekahead()
+ isStatSep || isStatSeqEnd || { pushback() ; false }
+ }
+ val rhs = if (hasEq && !isDefaultInit) expr() else EmptyTree
+ def allIdents = lhses forall (_.isInstanceOf[Ident])
+
+ def defaultInitFlag(): Long = {
+ if (!allIdents)
+ syntaxError(lhs.pos, "pattern definition is ineligible for default initialization", skipIt = false)
+ else if (!mods.isMutable)
+ syntaxError(lhs.pos, "only vars are eligible for default initialization", skipIt = false)
+ else if (tpt.isEmpty)
+ syntaxError(lhs.pos, "an explicit type is required for default initialization", skipIt = false)
+
+ Flags.DEFAULTINIT
+ }
+ def deferredFlag(): Long = {
+ if (mods.isLazy) // e.g. lazy val foo: Int
+ syntaxError(lhs.pos, "lazy values may not be abstract", skipIt = false)
+ else if (!allIdents) // e.g. val Some(x)
+ syntaxError(lhs.pos, "pattern definition may not be abstract", skipIt = false)
+
+ Flags.DEFERRED
+ }
+ val newmods = mods | (
+ if (isDefaultInit) defaultInitFlag()
+ else if (rhs.isEmpty) deferredFlag()
+ else 0L
+ )
+
+ def makeValDefs(decl: Tree): List[Tree] = {
+ val newTpt = if (tpt.isEmpty) decl else Typed(decl, tpt.duplicate setPos tpt.pos.focus) setPos decl.pos.focus
+ makePatDef(newmods, newTpt, rhs.duplicate setPos rhs.pos.focus) match {
+ case tree :: Nil => (tree setPos decl.pos) :: Nil
+ case trees => trees map (_ setPos decl.pos.focus)
}
- trees
}
- val trees = (lhs.toList.init flatMap (mkDefs(_, tp.duplicate, rhs.duplicate))) ::: mkDefs(lhs.last, tp, rhs)
- val hd = trees.head
- hd setPos hd.pos.withStart(pos)
- ensureNonOverlapping(hd, trees.tail)
+
+ val trees = (lhses.init flatMap makeValDefs) ::: makePatDef(newmods, ascriptedLhs, rhs)
+ ensureNonOverlapping(trees.last, trees.init)
trees
}
@@ -2525,7 +2507,8 @@ self =>
in.nextToken()
if (in.token == THIS) {
atPos(start, in.skipToken()) {
- val vparamss = paramClauses(nme.CONSTRUCTOR, classContextBounds map (_.duplicate), ofCaseClass = false)
+ val cbounds = classContextBounds map (_.duplicate)
+ val vparamss = paramClauses(nme.CONSTRUCTOR, cbounds, ofCaseClass = false)
newLineOptWhenFollowedBy(LBRACE)
val rhs = in.token match {
case LBRACE => atPos(in.offset) { constrBlock(vparamss) }
@@ -2549,7 +2532,8 @@ self =>
// i.e. (B[T] or T => B)
val contextBoundBuf = new ListBuffer[Tree]
val tparams = typeParamClauseOpt(name, contextBoundBuf)
- val vparamss = paramClauses(name, contextBoundBuf.toList, ofCaseClass = false)
+ val cbounds = contextBoundBuf.toList
+ val vparamss = paramClauses(name, cbounds, ofCaseClass = false)
newLineOptWhenFollowedBy(LBRACE)
var restype = fromWithinReturnType(typedOpt())
val rhs =
@@ -2709,8 +2693,9 @@ self =>
val result = gen.mkClassDef(mods1, name, tparams, template)
// Context bounds generate implicit parameters (part of the template) with types
// from tparams: we need to ensure these don't overlap
- if (!classContextBounds.isEmpty)
+ if (classContextBounds.nonEmpty)
ensureNonOverlapping(template, tparams)
+
result
}
}
@@ -2772,16 +2757,15 @@ self =>
* }}}
*/
def templateParents(): List[Tree] = {
- val parents = new ListBuffer[Tree]
- def readAppliedParent() = {
+ def readAppliedParent(): Tree = {
val start = in.offset
val parent = startAnnotType()
- val argss = if (in.token == LPAREN) multipleArgumentExprs() else Nil
- parents += atPos(start)((parent /: argss)(Apply.apply))
+ in.token match {
+ case LPAREN => atPos(start)((parent /: multipleArgumentExprs())(Apply.apply))
+ case _ => parent
+ }
}
- readAppliedParent()
- while (in.token == WITH) { in.nextToken(); readAppliedParent() }
- parents.toList
+ tokenSeparated(WITH, sepFirst = false, readAppliedParent())
}
/** {{{
@@ -2815,9 +2799,9 @@ self =>
def ensureEarlyDef(tree: Tree): Tree = tree match {
case vdef @ ValDef(mods, _, _, _) if !mods.isDeferred =>
copyValDef(vdef)(mods = mods | Flags.PRESUPER)
- case tdef @ TypeDef(mods, name, tparams, rhs) =>
+ case tdef @ TypeDef(mods, _, _, _) =>
deprecationWarning(tdef.pos.point, "early type members are deprecated. Move them to the regular body: the semantics are the same.")
- treeCopy.TypeDef(tdef, mods | Flags.PRESUPER, name, tparams, rhs)
+ copyTypeDef(tdef)(mods = mods | Flags.PRESUPER)
case docdef @ DocDef(comm, rhs) =>
treeCopy.DocDef(docdef, comm, rhs)
case stat if !stat.isEmpty =>
@@ -2852,7 +2836,6 @@ self =>
)
val parentPos = o2p(in.offset)
val tstart1 = if (body.isEmpty && in.lastOffset < tstart) in.lastOffset else tstart
-
atPos(tstart1) {
// Exclude only the 9 primitives plus AnyVal.
if (inScalaRootPackage && ScalaValueClassNames.contains(name))
@@ -3103,10 +3086,9 @@ self =>
def compilationUnit(): PackageDef = checkNoEscapingPlaceholders {
def topstats(): List[Tree] = {
val ts = new ListBuffer[Tree]
- while (in.token == SEMI) in.nextToken()
+ while (acceptIfPresent(SEMI)) ()
val start = in.offset
- if (in.token == PACKAGE) {
- in.nextToken()
+ if (acceptIfPresent(PACKAGE)) {
if (in.token == OBJECT) {
// TODO - this next line is supposed to be
// ts += packageObjectDef(start)
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
index 6957f85689..5ef40923b4 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
@@ -52,6 +52,15 @@ trait Scanners extends ScannersCommon {
type Offset = Int
trait TokenData extends CommonTokenData {
+ override def toString = s"""
+ |TokenData(
+ | token $token ${token2string(token)}
+ | offset $offset
+ | last $lastOffset
+ | name $name
+ | strVal $strVal
+ | base $base
+ |)""".stripMargin.trim
/** the next token */
var token: Int = EMPTY
diff --git a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
index 28e3217449..976e578afd 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
@@ -8,6 +8,7 @@ package ast.parser
import symtab.Flags._
import scala.collection.mutable.ListBuffer
+import scala.reflect.internal.util.OffsetPosition
/** Methods for building trees, used in the parser. All the trees
* returned by this class must be untyped.
@@ -204,28 +205,22 @@ abstract class TreeBuilder {
atPos(r2p(start, end, end + op.length)) { new PostfixSelect(od, op.encode) }
}
- /** A type tree corresponding to (possibly unary) intersection type */
- def makeIntersectionTypeTree(tps: List[Tree]): Tree =
- if (tps.tail.isEmpty) tps.head
- else CompoundTypeTree(Template(tps, emptyValDef, Nil))
-
/** Create tree representing a while loop */
- def makeWhile(startPos: Int, cond: Tree, body: Tree): Tree = {
- val lname = freshTermName(nme.WHILE_PREFIX)
- def default = wrappingPos(List(cond, body)) match {
- case p if p.isDefined => p.endOrPoint
- case _ => startPos
- }
- val continu = atPos(o2p(body.pos pointOrElse default)) { Apply(Ident(lname), Nil) }
- val rhs = If(cond, Block(List(body), continu), Literal(Constant(())))
- LabelDef(lname, Nil, rhs)
+ def makeWhile(cond: Tree, body: Tree): Tree = {
+ val lname = freshTermName(nme.WHILE_PREFIX)
+ val continu = atPos(cond.pos.focus)(Apply(Ident(lname), Nil))
+ val rhs = atPos(cond.pos union body.pos)(If(cond, atPos(body.pos)(Block(body :: Nil, continu)), Literal(Constant(()))))
+
+ atPos(rhs.pos)(LabelDef(lname, Nil, rhs))
}
/** Create tree representing a do-while loop */
def makeDoWhile(lname: TermName, body: Tree, cond: Tree): Tree = {
- val continu = Apply(Ident(lname), Nil)
- val rhs = Block(List(body), If(cond, continu, Literal(Constant(()))))
- LabelDef(lname, Nil, rhs)
+ val continu = atPos(cond.pos.focus)(Apply(Ident(lname), Nil))
+ val condition = atPos(cond.pos)(If(cond, continu, Literal(Constant(()))))
+ val rhs = atPos(cond.pos union body.pos)(Block(body :: Nil, condition))
+
+ atPos(rhs.pos)(LabelDef(lname, Nil, rhs))
}
/** Create block of statements `stats` */
@@ -317,19 +312,13 @@ abstract class TreeBuilder {
* The closure is assigned a transparent position with the point at pos.point and
* the limits given by pat and body.
*/
- def makeClosure(pos: Position, pat: Tree, body: Tree): Tree = {
- def splitpos = wrappingPos(List(pat, body)).withPoint(pos.point).makeTransparent
- matchVarPattern(pat) match {
- case Some((name, tpt)) =>
- Function(
- List(atPos(pat.pos) { ValDef(Modifiers(PARAM), name.toTermName, tpt, EmptyTree) }),
- body) setPos splitpos
- case None =>
- atPos(splitpos) {
- makeVisitor(List(CaseDef(pat, EmptyTree, body)), checkExhaustive = false)
- }
+ def makeClosure(pos: Position, pat: Tree, body: Tree): Tree =
+ atPos((pos union pat.pos union body.pos).makeTransparent) {
+ matchVarPattern(pat) match {
+ case Some((name, tpt)) => Function(atPos(pat.pos)(ValDef(Modifiers(PARAM), name.toTermName, tpt, EmptyTree)) :: Nil, body)
+ case None => makeVisitor(CaseDef(pat, body) :: Nil, checkExhaustive = false)
+ }
}
- }
/* Make an application qual.meth(pat => body) positioned at `pos`.
*/
@@ -507,7 +496,7 @@ abstract class TreeBuilder {
tmp, TypeTree(), matchExpr)
}
var cnt = 0
- val restDefs = for ((vname, tpt, pos) <- vars) yield atPos(pos) {
+ val restDefs = for ((vname, tpt, pos) <- vars) yield atPos(pos.focus) {
cnt += 1
ValDef(mods, vname.toTermName, tpt, Select(Ident(tmp), newTermName("_" + cnt)))
}
@@ -534,3 +523,13 @@ abstract class TreeBuilder {
}
}
}
+
+abstract class UnitTreeBuilder extends TreeBuilder {
+ import global._
+ def unit: CompilationUnit
+ def freshName(prefix: String): Name = freshTermName(prefix)
+ def freshTermName(prefix: String): TermName = unit.freshTermName(prefix)
+ def freshTypeName(prefix: String): TypeName = unit.freshTypeName(prefix)
+ def o2p(offset: Int): Position = new OffsetPosition(unit.source, offset)
+ def r2p(start: Int, mid: Int, end: Int): Position = rangePos(unit.source, start, mid, end)
+}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/CheckerException.scala b/src/compiler/scala/tools/nsc/backend/icode/CheckerException.scala
index 0856f2f09d..8bcdb6dbd2 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/CheckerException.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/CheckerException.scala
@@ -3,10 +3,8 @@
* @author Martin Odersky
*/
-
package scala.tools.nsc
package backend
package icode
class CheckerException(s: String) extends Exception(s)
-
diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
index 843299398b..a80fee876e 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
@@ -1319,6 +1319,8 @@ abstract class GenICode extends SubComponent {
/** Some useful equality helpers.
*/
def isNull(t: Tree) = cond(t) { case Literal(Constant(null)) => true }
+ def isLiteral(t: Tree) = cond(t) { case Literal(_) => true }
+ def isNonNullExpr(t: Tree) = isLiteral(t) || ((t.symbol ne null) && t.symbol.isModule)
/* If l or r is constant null, returns the other ; otherwise null */
def ifOneIsNull(l: Tree, r: Tree) = if (isNull(l)) r else if (isNull(r)) l else null
@@ -1514,6 +1516,23 @@ abstract class GenICode extends SubComponent {
val branchesReachable = !ctx1.bb.ignore
ctx1.bb emitOnly CZJUMP(thenCtx.bb, elseCtx.bb, EQ, ObjectReference)
branchesReachable
+ } else if (isNonNullExpr(l)) {
+ // Avoid null check if L is statically non-null.
+ //
+ // "" == expr -> "".equals(expr)
+ // Nil == expr -> Nil.equals(expr)
+ //
+ // Common enough (through pattern matching) to treat this specially here rather than
+ // hoping that -Yconst-opt is enabled. The impossible branches for null checks lead
+ // to spurious "branch not covered" warnings in Jacoco code coverage.
+ var ctx1 = genLoad(l, ctx, ObjectReference)
+ val branchesReachable = !ctx1.bb.ignore
+ ctx1 = genLoad(r, ctx1, ObjectReference)
+ ctx1.bb emitOnly(
+ CALL_METHOD(Object_equals, Dynamic),
+ CZJUMP(thenCtx.bb, elseCtx.bb, NE, BOOL)
+ )
+ branchesReachable
} else {
val eqEqTempLocal = getTempLocal
var ctx1 = genLoad(l, ctx, ObjectReference)
diff --git a/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala b/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala
index 2c8fda85f4..633e71a756 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala
@@ -93,7 +93,7 @@ trait TypeKinds { self: ICodes =>
/**
* this is directly assignable to other if no coercion or
* casting is needed to convert this to other. It's a distinct
- * relationship from <:< because on the JVM, BOOL, BYTE, CHAR,
+ * relationship from <:< because on the JVM, BOOL, BYTE, CHAR,
* SHORT need no coercion to INT even though JVM arrays
* are covariant, ARRAY[SHORT] is not a subtype of ARRAY[INT]
*/
@@ -101,7 +101,7 @@ trait TypeKinds { self: ICodes =>
case INT => this.isIntSizedType
case _ => this <:< other
}
-
+
/** Is this type a category 2 type in JVM terms? (ie, is it LONG or DOUBLE?) */
def isWideType: Boolean = false
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala
index 182209dfe6..eff7d3211e 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala
@@ -866,11 +866,7 @@ abstract class BCodeHelpers extends BCodeTypes with BytecodeWriters {
// a plain class lacking companion module, for details see `isCandidateForForwarders`).
// -----------------------------------------------------------------------------------------
- val ExcludedForwarderFlags = {
- import symtab.Flags._
- // Should include DEFERRED but this breaks findMember.
- ( CASE | SPECIALIZED | LIFTED | PROTECTED | STATIC | EXPANDEDNAME | BridgeAndPrivateFlags | MACRO )
- }
+ val ExcludedForwarderFlags = genASM.ExcludedForwarderFlags
/* Adds a @remote annotation, actual use unknown.
*
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
index edb1c55224..8d025b5451 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
@@ -1479,7 +1479,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
sym.owner.isSynthetic &&
sym.owner.tpe.parents.exists { t =>
val TypeRef(_, sym, _) = t
- FunctionClass contains sym
+ FunctionClass.seq contains sym
}
}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenJVMASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenJVMASM.scala
index 2c3bf26958..01c4ff5a52 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenJVMASM.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenJVMASM.scala
@@ -18,10 +18,10 @@ trait GenJVMASM {
import icodes._
import definitions._
- protected val ExcludedForwarderFlags = {
+ val ExcludedForwarderFlags = {
import Flags._
// Should include DEFERRED but this breaks findMember.
- ( CASE | SPECIALIZED | LIFTED | PROTECTED | STATIC | EXPANDEDNAME | BridgeAndPrivateFlags | MACRO )
+ ( SPECIALIZED | LIFTED | PROTECTED | STATIC | EXPANDEDNAME | BridgeAndPrivateFlags | MACRO )
}
protected def isJavaEntryPoint(icls: IClass) = {
diff --git a/src/compiler/scala/tools/nsc/package.scala b/src/compiler/scala/tools/nsc/package.scala
index 761fd79358..817a4a5c88 100644
--- a/src/compiler/scala/tools/nsc/package.scala
+++ b/src/compiler/scala/tools/nsc/package.scala
@@ -23,5 +23,6 @@ package object nsc {
type MissingRequirementError = scala.reflect.internal.MissingRequirementError
val MissingRequirementError = scala.reflect.internal.MissingRequirementError
- val ListOfNil = List(Nil)
+ @deprecated("Use scala.reflect.internal.util.ListOfNil", "2.11.0")
+ lazy val ListOfNil = scala.reflect.internal.util.ListOfNil
}
diff --git a/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala b/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala
index fdb5c72c3d..52c6ddc6ee 100644
--- a/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala
+++ b/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala
@@ -9,6 +9,7 @@ package reporters
import java.io.{ BufferedReader, IOException, PrintWriter }
import scala.reflect.internal.util._
+import StringOps._
/**
* This class implements a Reporter that displays messages on a text
@@ -40,7 +41,10 @@ class ConsoleReporter(val settings: Settings, reader: BufferedReader, writer: Pr
StringOps.countElementsAsString((severity).count, label(severity))
/** Prints the message. */
- def printMessage(msg: String) { writer.print(msg + "\n"); writer.flush() }
+ def printMessage(msg: String) {
+ writer print trimAllTrailingSpace(msg) + "\n"
+ writer.flush()
+ }
/** Prints the message with the given position indication. */
def printMessage(posIn: Position, msg: String) {
diff --git a/src/compiler/scala/tools/nsc/reporters/StoreReporter.scala b/src/compiler/scala/tools/nsc/reporters/StoreReporter.scala
index 34e2a8a96a..04c5bdf824 100644
--- a/src/compiler/scala/tools/nsc/reporters/StoreReporter.scala
+++ b/src/compiler/scala/tools/nsc/reporters/StoreReporter.scala
@@ -14,7 +14,7 @@ import scala.reflect.internal.util.Position
* console.
*/
class StoreReporter extends Reporter {
- class Info(val pos: Position, val msg: String, val severity: Severity) {
+ case class Info(pos: Position, msg: String, severity: Severity) {
override def toString() = "pos: " + pos + " " + msg + " " + severity
}
val infos = new mutable.LinkedHashSet[Info]
diff --git a/src/compiler/scala/tools/nsc/settings/FscSettings.scala b/src/compiler/scala/tools/nsc/settings/FscSettings.scala
index 34c8e8df9a..8c2b510bfd 100644
--- a/src/compiler/scala/tools/nsc/settings/FscSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/FscSettings.scala
@@ -44,7 +44,7 @@ class FscSettings(error: String => Unit) extends Settings(error) {
// we need to ensure the files specified with relative locations are absolutized based on the currentDir
(r, args map {a => absolutizePath(a)})
}
-
+
/**
* Take an individual path and if it's not absolute turns it into an absolute path based on currentDir.
* If it's already absolute then it's left alone.
diff --git a/src/compiler/scala/tools/nsc/transform/CleanUp.scala b/src/compiler/scala/tools/nsc/transform/CleanUp.scala
index b16ba91916..0135190256 100644
--- a/src/compiler/scala/tools/nsc/transform/CleanUp.scala
+++ b/src/compiler/scala/tools/nsc/transform/CleanUp.scala
@@ -94,7 +94,7 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
if (!isFinal)
varSym.addAnnotation(VolatileAttr)
- val varDef = typedPos( VAL(varSym) === forInit )
+ val varDef = typedPos(ValDef(varSym, forInit))
newStaticMembers append transform(varDef)
val varInit = typedPos( REF(varSym) === forInit )
@@ -155,13 +155,13 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
val methodSym = reflMethodSym.newVariable(mkTerm("method"), ad.pos) setInfo MethodClass.tpe
BLOCK(
- VAL(methodCache) === getPolyCache,
+ ValDef(methodCache, getPolyCache),
IF (REF(methodCache) OBJ_EQ NULL) THEN BLOCK(
REF(methodCache) === NEW(TypeTree(EmptyMethodCacheClass.tpe)),
REF(reflPolyCacheSym) === gen.mkSoftRef(REF(methodCache))
) ENDIF,
- VAL(methodSym) === (REF(methodCache) DOT methodCache_find)(REF(forReceiverSym)),
+ ValDef(methodSym, (REF(methodCache) DOT methodCache_find)(REF(forReceiverSym))),
IF (REF(methodSym) OBJ_NE NULL) .
THEN (Return(REF(methodSym)))
ELSE {
@@ -372,7 +372,7 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
qual = REF(sym)
BLOCK(
- VAL(sym) === qual0,
+ ValDef(sym, qual0),
callAsReflective(mparams map (_.tpe), resType)
)
}
@@ -543,7 +543,7 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
)
// create field definition and initialization
- val stfieldDef = theTyper.typedPos(pos)(VAL(stfieldSym) === rhs)
+ val stfieldDef = theTyper.typedPos(pos)(ValDef(stfieldSym, rhs))
val stfieldInit = theTyper.typedPos(pos)(REF(stfieldSym) === rhs)
// add field definition to new defs
diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala
index c74fc620ca..31855bc1ad 100644
--- a/src/compiler/scala/tools/nsc/transform/Erasure.scala
+++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala
@@ -268,7 +268,7 @@ abstract class Erasure extends AddInterfaces
else abbrvTag(sym).toString
}
else if (sym.isDerivedValueClass) {
- val unboxed = sym.derivedValueClassUnbox.info.finalResultType
+ val unboxed = sym.derivedValueClassUnbox.tpe_*.finalResultType
val unboxedSeen = (tp memberType sym.derivedValueClassUnbox).finalResultType
def unboxedMsg = if (unboxed == unboxedSeen) "" else s", seen within ${sym.simpleName} as $unboxedSeen"
logResult(s"Erasure of value class $sym (underlying type $unboxed$unboxedMsg) is") {
@@ -513,7 +513,7 @@ abstract class Erasure extends AddInterfaces
maybeWrap(bridgingCall)
}
- atPos(bridge.pos)(DefDef(bridge, rhs))
+ DefDef(bridge, rhs)
}
}
diff --git a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
index d6a6e027cb..b2e071579e 100644
--- a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
+++ b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
@@ -216,7 +216,7 @@ abstract class ExplicitOuter extends InfoTransform
* values for outer parameters of constructors.
* The class provides methods for referencing via outer.
*/
- abstract class OuterPathTransformer(unit: CompilationUnit) extends TypingTransformer(unit) {
+ abstract class OuterPathTransformer(unit: CompilationUnit) extends TypingTransformer(unit) with UnderConstructionTransformer {
/** The directly enclosing outer parameter, if we are in a constructor */
protected var outerParam: Symbol = NoSymbol
@@ -225,9 +225,10 @@ abstract class ExplicitOuter extends InfoTransform
*
* Will return `EmptyTree` if there is no outer accessor because of a premature self reference.
*/
- protected def outerValue: Tree =
- if (outerParam != NoSymbol) ID(outerParam)
- else outerSelect(THIS(currentClass))
+ protected def outerValue: Tree = outerParam match {
+ case NoSymbol => outerSelect(gen.mkAttributedThis(currentClass))
+ case outerParam => gen.mkAttributedIdent(outerParam)
+ }
/** Select and apply outer accessor from 'base'
* The result is typed but not positioned.
@@ -275,34 +276,19 @@ abstract class ExplicitOuter extends InfoTransform
else outerPath(outerSelect(base), from.outerClass, to)
}
-
- /** The stack of class symbols in which a call to this() or to the super
- * constructor, or early definition is active
- */
- protected def isUnderConstruction(clazz: Symbol) = selfOrSuperCalls contains clazz
- protected val selfOrSuperCalls = mutable.Stack[Symbol]()
- @inline protected def inSelfOrSuperCall[A](sym: Symbol)(a: => A) = {
- selfOrSuperCalls push sym
- try a finally selfOrSuperCalls.pop()
- }
-
override def transform(tree: Tree): Tree = {
+ def sym = tree.symbol
val savedOuterParam = outerParam
try {
tree match {
case Template(_, _, _) =>
outerParam = NoSymbol
- case DefDef(_, _, _, vparamss, _, _) =>
- if (tree.symbol.isClassConstructor && isInner(tree.symbol.owner)) {
- outerParam = vparamss.head.head.symbol
- assert(outerParam.name startsWith nme.OUTER, outerParam.name)
- }
+ case DefDef(_, _, _, (param :: _) :: _, _, _) if sym.isClassConstructor && isInner(sym.owner) =>
+ outerParam = param.symbol
+ assert(outerParam.name startsWith nme.OUTER, outerParam.name)
case _ =>
}
- if ((treeInfo isSelfOrSuperConstrCall tree) || (treeInfo isEarlyDef tree))
- inSelfOrSuperCall(currentOwner.owner)(super.transform(tree))
- else
- super.transform(tree)
+ super.transform(tree)
}
finally outerParam = savedOuterParam
}
@@ -368,16 +354,14 @@ abstract class ExplicitOuter extends InfoTransform
/** The definition tree of the outer accessor of current class
*/
- def outerFieldDef: Tree =
- VAL(outerField(currentClass)) === EmptyTree
+ def outerFieldDef: Tree = ValDef(outerField(currentClass))
/** The definition tree of the outer accessor of current class
*/
def outerAccessorDef: Tree = localTyper typed {
- outerAccessor(currentClass) match {
- case acc if acc.isDeferred => DefDef(acc, EmptyTree)
- case acc => DefDef(acc, Select(This(currentClass), outerField(currentClass)))
- }
+ val acc = outerAccessor(currentClass)
+ val rhs = if (acc.isDeferred) EmptyTree else Select(This(currentClass), outerField(currentClass))
+ DefDef(acc, rhs)
}
/** The definition tree of the outer accessor for class mixinClass.
diff --git a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala
index 1c32721444..6a405295cf 100644
--- a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala
+++ b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala
@@ -205,7 +205,7 @@ abstract class ExtensionMethods extends Transform with TypingTransformers {
def makeExtensionMethodSymbol = {
val extensionName = extensionNames(origMeth).head.toTermName
val extensionMeth = (
- companion.moduleClass.newMethod(extensionName, origMeth.pos, origMeth.flags & ~OVERRIDE & ~PROTECTED | FINAL)
+ companion.moduleClass.newMethod(extensionName, tree.pos.focus, origMeth.flags & ~OVERRIDE & ~PROTECTED | FINAL)
setAnnotations origMeth.annotations
)
origMeth.removeAnnotation(TailrecClass) // it's on the extension method, now.
@@ -234,10 +234,10 @@ abstract class ExtensionMethods extends Transform with TypingTransformers {
if (extensionBody.tpe <:< extensionMono.finalResultType)
extensionBody
else
- gen.mkCastPreservingAnnotations(extensionBody, extensionMono.finalResultType) // SI-7818 e.g. mismatched existential skolems
+ gen.mkCastPreservingAnnotations(extensionBody, extensionMono.finalResultType) // SI-7818 e.g. mismatched existential skolems
// Record the extension method. Later, in `Extender#transformStats`, these will be added to the companion object.
- extensionDefs(companion) += atPos(tree.pos)(DefDef(extensionMeth, castBody))
+ extensionDefs(companion) += DefDef(extensionMeth, castBody)
// These three lines are assembling Foo.bar$extension[T1, T2, ...]($this)
// which leaves the actual argument application for extensionCall.
@@ -294,7 +294,7 @@ abstract class ExtensionMethods extends Transform with TypingTransformers {
val origThis = extensionMeth.owner.companionClass
val baseType = qual.tpe.baseType(origThis)
val allTargs = targs.map(_.tpe) ::: baseType.typeArgs
- val fun = gen.mkAttributedTypeApply(THIS(extensionMeth.owner), extensionMeth, allTargs)
+ val fun = gen.mkAttributedTypeApply(gen.mkAttributedThis(extensionMeth.owner), extensionMeth, allTargs)
allArgss.foldLeft(fun)(Apply(_, _))
}
case _ => super.transform(tree)
diff --git a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
index 515fa66cfa..acef2a50d8 100644
--- a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
+++ b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
@@ -436,8 +436,15 @@ abstract class LambdaLift extends InfoTransform {
private def liftDef(tree: Tree): Tree = {
val sym = tree.symbol
val oldOwner = sym.owner
- if (sym.owner.isAuxiliaryConstructor && sym.isMethod) // # bug 1909
- sym setFlag STATIC
+ if (sym.isMethod && isUnderConstruction(sym.owner.owner)) { // # bug 1909
+ if (sym.isModule) { // Yes, it can be a module and a method, see comments on `isModuleNotMethod`!
+ // TODO promote to an implementation restriction if we can reason that this *always* leads to VerifyError.
+ // See neg/t1909-object.scala
+ def msg = s"SI-1909 Unable to STATICally lift $sym, which is defined in the self- or super-constructor call of ${sym.owner.owner}. A VerifyError is likely."
+ devWarning(tree.pos, msg)
+ } else sym setFlag STATIC
+ }
+
sym.owner = sym.owner.enclClass
if (sym.isClass) sym.owner = sym.owner.toInterface
if (sym.isMethod) sym setFlag LIFTED
diff --git a/src/compiler/scala/tools/nsc/transform/LazyVals.scala b/src/compiler/scala/tools/nsc/transform/LazyVals.scala
index 15ca916ac1..b71d14a04f 100644
--- a/src/compiler/scala/tools/nsc/transform/LazyVals.scala
+++ b/src/compiler/scala/tools/nsc/transform/LazyVals.scala
@@ -199,14 +199,15 @@ abstract class LazyVals extends Transform with TypingTransformers with ast.TreeD
if (bitmaps.contains(lzyVal))
bitmaps(lzyVal).map(_.owner = defSym)
val rhs: Tree = (gen.mkSynchronizedCheck(clazz, cond, syncBody, stats)).changeOwner(currentOwner -> defSym)
- DEF(defSym).mkTree(addBitmapDefs(lzyVal, BLOCK(rhs, retVal))) setSymbol defSym
+
+ DefDef(defSym, addBitmapDefs(lzyVal, BLOCK(rhs, retVal)))
}
def mkFastPathBody(clazz: Symbol, lzyVal: Symbol, cond: Tree, syncBody: List[Tree],
stats: List[Tree], retVal: Tree): (Tree, Tree) = {
val slowPathDef: Tree = mkSlowPathDef(clazz, lzyVal, cond, syncBody, stats, retVal)
- (If(cond, Apply(ID(slowPathDef.symbol), List()), retVal), slowPathDef)
+ (If(cond, Apply(Ident(slowPathDef.symbol), Nil), retVal), slowPathDef)
}
/** return a 'lazified' version of rhs. Rhs should conform to the
diff --git a/src/compiler/scala/tools/nsc/transform/Mixin.scala b/src/compiler/scala/tools/nsc/transform/Mixin.scala
index 3ec4d16bf5..7b545be07e 100644
--- a/src/compiler/scala/tools/nsc/transform/Mixin.scala
+++ b/src/compiler/scala/tools/nsc/transform/Mixin.scala
@@ -472,7 +472,6 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
/** The typer */
private var localTyper: erasure.Typer = _
private def typedPos(pos: Position)(tree: Tree): Tree = localTyper.typedPos(pos)(tree)
- private def localTyped(pos: Position, tree: Tree, pt: Type) = localTyper.typed(atPos(pos)(tree), pt)
/** Map lazy values to the fields they should null after initialization. */
private var lazyValNullables: Map[Symbol, Set[Symbol]] = _
@@ -695,10 +694,10 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
*/
def completeSuperAccessor(stat: Tree) = stat match {
case DefDef(_, _, _, vparams :: Nil, _, EmptyTree) if stat.symbol.isSuperAccessor =>
- val rhs0 = (Super(clazz, tpnme.EMPTY) DOT stat.symbol.alias)(vparams map (v => Ident(v.symbol)): _*)
- val rhs1 = localTyped(stat.pos, rhs0, stat.symbol.tpe.resultType)
+ val body = atPos(stat.pos)(Apply(Select(Super(clazz, tpnme.EMPTY), stat.symbol.alias), vparams map (v => Ident(v.symbol))))
+ val pt = stat.symbol.tpe.resultType
- deriveDefDef(stat)(_ => enteringMixin(transform(rhs1)))
+ copyDefDef(stat)(rhs = enteringMixin(transform(localTyper.typed(body, pt))))
case _ =>
stat
}
@@ -724,8 +723,8 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
case _ =>
}
val init = bitmapKind match {
- case BooleanClass => VAL(sym) === FALSE
- case _ => VAL(sym) === ZERO
+ case BooleanClass => ValDef(sym, FALSE)
+ case _ => ValDef(sym, ZERO)
}
sym setFlag PrivateLocal
@@ -775,7 +774,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
defSym setInfoAndEnter MethodType(params, lzyVal.tpe.resultType)
val rhs: Tree = (gen.mkSynchronizedCheck(attrThis, cond, syncBody, stats)).changeOwner(currentOwner -> defSym)
val strictSubst = new TreeSymSubstituterWithCopying(args.map(_.symbol), params)
- addDef(position(defSym), DEF(defSym).mkTree(strictSubst(BLOCK(rhs, retVal))) setSymbol defSym)
+ addDef(position(defSym), DefDef(defSym, strictSubst(BLOCK(rhs, retVal))))
defSym
}
diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
index 4bc4e06fa7..5a440039d6 100644
--- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
+++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
@@ -1836,12 +1836,12 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
// ctor
- mbrs += atPos(m.pos)(DefDef(m, Modifiers(m.flags), mmap(List(vparams))(ValDef), EmptyTree))
+ mbrs += DefDef(m, Modifiers(m.flags), mmap(List(vparams))(ValDef), EmptyTree)
} else {
- mbrs += atPos(m.pos)(DefDef(m, { paramss => EmptyTree }))
+ mbrs += DefDef(m, { paramss => EmptyTree })
}
} else if (m.isValue) {
- mbrs += ValDef(m, EmptyTree).setType(NoType).setPos(m.pos)
+ mbrs += ValDef(m).setType(NoType)
} else if (m.isClass) {
// mbrs +=
// ClassDef(m, Template(m.info.parents map TypeTree, emptyValDef, List())
@@ -1853,9 +1853,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
val isSpecializedInstance = sClass :: sClass.parentSymbols exists (_ hasFlag SPECIALIZED)
val sym = sClass.newMethod(nme.SPECIALIZED_INSTANCE, sClass.pos) setInfoAndEnter MethodType(Nil, BooleanTpe)
- mbrs += atPos(sym.pos) {
- DefDef(sym, Literal(Constant(isSpecializedInstance)).setType(BooleanTpe)).setType(NoType)
- }
+ mbrs += DefDef(sym, Literal(Constant(isSpecializedInstance)).setType(BooleanTpe)).setType(NoType)
}
mbrs.toList
}
diff --git a/src/compiler/scala/tools/nsc/transform/TailCalls.scala b/src/compiler/scala/tools/nsc/transform/TailCalls.scala
index 6f422fcc90..b471d16ddd 100644
--- a/src/compiler/scala/tools/nsc/transform/TailCalls.scala
+++ b/src/compiler/scala/tools/nsc/transform/TailCalls.scala
@@ -156,7 +156,7 @@ abstract class TailCalls extends Transform {
private def mkLabel() = {
val label = method.newLabel(newTermName("_" + method.name), method.pos)
val thisParam = method.newSyntheticValueParam(currentClass.typeOfThis)
- label setInfo MethodType(thisParam :: method.tpe.params, method.tpe.finalResultType)
+ label setInfo MethodType(thisParam :: method.tpe.params, method.tpe_*.finalResultType)
if (isEligible)
label substInfo (method.tpe.typeParams, tparams)
diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
index 16c803e2e8..e68f55a09e 100644
--- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala
+++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
@@ -158,11 +158,12 @@ abstract class UnCurry extends InfoTransform
*/
private def nonLocalReturnTry(body: Tree, key: Symbol, meth: Symbol) = {
localTyper typed {
- val extpe = nonLocalReturnExceptionType(meth.tpe.finalResultType)
+ val restpe = meth.tpe_*.finalResultType
+ val extpe = nonLocalReturnExceptionType(restpe)
val ex = meth.newValue(nme.ex, body.pos) setInfo extpe
- val argType = meth.tpe.finalResultType withAnnotation (AnnotationInfo marker UncheckedClass.tpe)
+ val argType = restpe withAnnotation (AnnotationInfo marker UncheckedClass.tpe)
val pat = gen.mkBindForCase(ex, NonLocalReturnControlClass, List(argType))
- val rhs = (
+ val rhs = (
IF ((ex DOT nme.key)() OBJ_EQ Ident(key))
THEN ((ex DOT nme.value)())
ELSE (Throw(Ident(ex)))
@@ -739,7 +740,7 @@ abstract class UnCurry extends InfoTransform
case p if rpsymbols(p.symbol) => toArrayType(p.symbol.tpe)
case p => p.symbol.tpe
}
- val forwresult = dd.symbol.tpe.finalResultType
+ val forwresult = dd.symbol.tpe_*.finalResultType
val forwformsyms = map2(forwformals, flatparams)((tp, oldparam) =>
currentClass.newValueParameter(oldparam.name, oldparam.symbol.pos).setInfo(tp)
)
@@ -751,10 +752,11 @@ abstract class UnCurry extends InfoTransform
// create the symbol
val forwsym = currentClass.newMethod(dd.name.toTermName, dd.pos, VARARGS | SYNTHETIC | flatdd.symbol.flags) setInfo forwtype
+ def forwParams = forwsym.info.paramss.flatten
// create the tree
val forwtree = theTyper.typedPos(dd.pos) {
- val locals = map2(forwsym ARGS, flatparams) {
+ val locals = map2(forwParams, flatparams) {
case (_, fp) if !rpsymbols(fp.symbol) => null
case (argsym, fp) =>
Block(Nil,
@@ -764,15 +766,13 @@ abstract class UnCurry extends InfoTransform
)
)
}
- val seqargs = map2(locals, forwsym ARGS) {
+ val seqargs = map2(locals, forwParams) {
case (null, argsym) => Ident(argsym)
case (l, _) => l
}
val end = if (forwsym.isConstructor) List(UNIT) else Nil
- DEF(forwsym) === BLOCK(
- Apply(gen.mkAttributedRef(flatdd.symbol), seqargs) :: end : _*
- )
+ DefDef(forwsym, BLOCK(Apply(gen.mkAttributedRef(flatdd.symbol), seqargs) :: end : _*))
}
// check if the method with that name and those arguments already exists in the template
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala
index 45aa1106f0..f7b194a6ca 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala
@@ -292,6 +292,7 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis {
trait TreesAndTypesDomain extends PropositionalLogic with CheckableTreeAndTypeAnalysis {
type Type = global.Type
type Tree = global.Tree
+ import global.definitions.ConstantNull
// resets hash consing -- only supposed to be called by TreeMakersToProps
def prepareNewAnalysis(): Unit = { Var.resetUniques(); Const.resetUniques() }
@@ -320,7 +321,7 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis {
val staticTpCheckable: Type = checkableType(staticTp)
private[this] var _mayBeNull = false
- def registerNull(): Unit = { ensureCanModify(); if (NullTp <:< staticTpCheckable) _mayBeNull = true }
+ def registerNull(): Unit = { ensureCanModify(); if (ConstantNull <:< staticTpCheckable) _mayBeNull = true }
def mayBeNull: Boolean = _mayBeNull
// case None => domain is unknown,
@@ -568,7 +569,7 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis {
object TypeConst extends TypeConstExtractor {
def apply(tp: Type) = {
- if (tp =:= NullTp) NullConst
+ if (tp =:= ConstantNull) NullConst
else if (tp.isInstanceOf[SingletonType]) ValueConst.fromType(tp)
else Const.unique(tp, new TypeConst(tp))
}
@@ -577,7 +578,7 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis {
// corresponds to a type test that does not imply any value-equality (well, except for outer checks, which we don't model yet)
sealed class TypeConst(val tp: Type) extends Const {
- assert(!(tp =:= NullTp))
+ assert(!(tp =:= ConstantNull))
/*private[this] val id: Int = */ Const.nextTypeId
val wideTp = widenToClass(tp)
@@ -598,7 +599,7 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis {
}
def apply(p: Tree) = {
val tp = p.tpe.normalize
- if (tp =:= NullTp) NullConst
+ if (tp =:= ConstantNull) NullConst
else {
val wideTp = widenToClass(tp)
@@ -626,16 +627,14 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis {
}
sealed class ValueConst(val tp: Type, val wideTp: Type, override val toString: String) extends Const {
// debug.patmat("VC"+(tp, wideTp, toString))
- assert(!(tp =:= NullTp)) // TODO: assert(!tp.isStable)
+ assert(!(tp =:= ConstantNull)) // TODO: assert(!tp.isStable)
/*private[this] val id: Int = */Const.nextValueId
def isValue = true
}
-
- lazy val NullTp = ConstantType(Constant(null))
case object NullConst extends Const {
- def tp = NullTp
- def wideTp = NullTp
+ def tp = ConstantNull
+ def wideTp = ConstantNull
def isValue = true
override def toString = "null"
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala
index 8feb87210e..7eb899d9d7 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala
@@ -50,7 +50,7 @@ trait TreeAndTypeAnalysis extends Debugging {
case UnitClass =>
Some(List(UnitTpe))
case BooleanClass =>
- Some((List(ConstantType(Constant(true)), ConstantType(Constant(false)))))
+ Some(ConstantTrue :: ConstantFalse :: Nil)
// TODO case _ if tp.isTupleType => // recurse into component types
case modSym: ModuleClassSymbol =>
Some(List(tp))
@@ -271,9 +271,9 @@ trait MatchApproximation extends TreeAndTypeAnalysis with ScalaLogic with MatchT
case SubstOnlyTreeMaker(_, _) => True
case GuardTreeMaker(guard) =>
guard.tpe match {
- case ConstantType(Constant(true)) => True
- case ConstantType(Constant(false)) => False
- case _ => handleUnknown(tm)
+ case ConstantTrue => True
+ case ConstantFalse => False
+ case _ => handleUnknown(tm)
}
case ExtractorTreeMaker(_, _, _) |
ProductExtractorTreeMaker(_, _) |
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala
index cf74f0fb11..c8dbbb02bb 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala
@@ -170,7 +170,7 @@ trait MatchCodeGen extends Interface {
} toList // at most 1 element
// scrutSym == NoSymbol when generating an alternatives matcher
- val scrutDef = scrutSym.fold(List[Tree]())(sym => (VAL(sym) === scrut) :: Nil) // for alternatives
+ val scrutDef = scrutSym.fold(List[Tree]())(ValDef(_, scrut) :: Nil) // for alternatives
// the generated block is taken apart in TailCalls under the following assumptions
// the assumption is once we encounter a case, the remainder of the block will consist of cases
@@ -199,7 +199,7 @@ trait MatchCodeGen extends Interface {
def flatMap(prev: Tree, b: Symbol, next: Tree): Tree = {
val prevSym = freshSym(prev.pos, prev.tpe, "o")
BLOCK(
- VAL(prevSym) === prev,
+ ValDef(prevSym, prev),
// must be isEmpty and get as we don't control the target of the call (prev is an extractor call)
ifThenElseZero(
NOT(prevSym DOT vpmName.isEmpty),
@@ -214,14 +214,12 @@ trait MatchCodeGen extends Interface {
// next == MatchMonad[U]
// returns MatchMonad[U]
def flatMapCond(cond: Tree, res: Tree, nextBinder: Symbol, next: Tree): Tree = {
- val rest =
+ val rest = (
// only emit a local val for `nextBinder` if it's actually referenced in `next`
if (next.exists(_.symbol eq nextBinder))
- BLOCK(
- VAL(nextBinder) === res,
- next
- )
+ BLOCK(ValDef(nextBinder, res), next)
else next
+ )
ifThenElseZero(cond, rest)
}
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala
index ec45789687..8a04c67582 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala
@@ -146,7 +146,7 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis {
lazy val storedCond = freshSym(pos, BooleanTpe, "rc") setFlag MUTABLE
lazy val treesToHoist: List[Tree] = {
nextBinder setFlag MUTABLE
- List(storedCond, nextBinder) map { b => VAL(b) === codegen.mkZero(b.info) }
+ List(storedCond, nextBinder) map (b => ValDef(b, codegen.mkZero(b.info)))
}
// TODO: finer-grained duplication
@@ -402,23 +402,15 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis {
private def noGuards(cs: List[CaseDef]): Boolean = !cs.exists(isGuardedCase)
// must do this before removing guards from cases and collapsing (SI-6011, SI-6048)
- private def unreachableCase(cs: List[CaseDef]): Option[CaseDef] = {
- var cases = cs
- var unreachable: Option[CaseDef] = None
-
- while (cases.nonEmpty && unreachable.isEmpty) {
- val currCase = cases.head
- if (isDefault(currCase) && cases.tail.nonEmpty) // subsumed by the `else if` that follows, but faster
- unreachable = Some(cases.tail.head)
- else if (!isGuardedCase(currCase) || currCase.guard.tpe =:= ConstantType(Constant(true)))
- unreachable = cases.tail.find(caseImplies(currCase))
- else if (currCase.guard.tpe =:= ConstantType(Constant(false)))
- unreachable = Some(currCase)
-
- cases = cases.tail
+ private def unreachableCase(cases: List[CaseDef]): Option[CaseDef] = {
+ def loop(cases: List[CaseDef]): Option[CaseDef] = cases match {
+ case head :: next :: _ if isDefault(head) => Some(next) // subsumed by the next case, but faster
+ case head :: rest if !isGuardedCase(head) || head.guard.tpe =:= ConstantTrue => rest find caseImplies(head) orElse loop(rest)
+ case head :: _ if head.guard.tpe =:= ConstantFalse => Some(head)
+ case _ :: rest => loop(rest)
+ case _ => None
}
-
- unreachable
+ loop(cases)
}
// empty list ==> failure
@@ -528,7 +520,7 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis {
}
def defaultSym: Symbol = scrutSym
- def defaultBody: Tree = { import CODE._; matchFailGenOverride map (gen => gen(REF(scrutSym))) getOrElse MATCHERROR(REF(scrutSym)) }
+ def defaultBody: Tree = { import CODE._; matchFailGenOverride map (gen => gen(REF(scrutSym))) getOrElse Throw(MatchErrorClass.tpe, REF(scrutSym)) }
def defaultCase(scrutSym: Symbol = defaultSym, guard: Tree = EmptyTree, body: Tree = defaultBody): CaseDef = { import CODE._; atPos(body.pos) {
(DEFAULT IF guard) ==> body
}}
@@ -546,7 +538,7 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis {
if (scrutSym.tpe =:= IntTpe) REF(scrutSym)
else (REF(scrutSym) DOT (nme.toInt))
Some(BLOCK(
- VAL(scrutSym) === scrut,
+ ValDef(scrutSym, scrut),
Match(scrutToInt, caseDefsWithDefault) // a switch
))
}
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala
index 942aa80c34..317685682d 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala
@@ -174,7 +174,7 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
else {
// only store binders actually used
val (subPatBindersStored, subPatRefsStored) = stored.filter{case (b, _) => usedBinders(b)}.unzip
- Block(map2(subPatBindersStored.toList, subPatRefsStored.toList)(VAL(_) === _), in)
+ Block(map2(subPatBindersStored.toList, subPatRefsStored.toList)(ValDef(_, _)), in)
}
}
}
@@ -288,8 +288,8 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
def irrefutableExtractorType(tp: Type): Boolean = tp.resultType.dealias match {
case TypeRef(_, SomeClass, _) => true
// probably not useful since this type won't be inferred nor can it be written down (yet)
- case ConstantType(Constant(true)) => true
- case _ => false
+ case ConstantTrue => true
+ case _ => false
}
def unapply(xtm: ExtractorTreeMaker): Option[(Tree, Symbol)] = xtm match {
@@ -328,9 +328,9 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
def outerTest(testedBinder: Symbol, expectedTp: Type): Tree = {
val expectedOuter = expectedTp.prefix match {
- case ThisType(clazz) => THIS(clazz)
- case pre if pre != NoType => REF(pre.prefix, pre.termSymbol)
- case _ => mkTRUE // fallback for SI-6183
+ case ThisType(clazz) => This(clazz)
+ case NoType => mkTRUE // fallback for SI-6183
+ case pre => REF(pre.prefix, pre.termSymbol)
}
// ExplicitOuter replaces `Select(q, outerSym) OBJ_EQ expectedPrefix` by `Select(q, outerAccessor(outerSym.owner)) OBJ_EQ expectedPrefix`
@@ -527,8 +527,9 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
// pt is the fully defined type of the cases (either pt or the lub of the types of the cases)
def combineCasesNoSubstOnly(scrut: Tree, scrutSym: Symbol, casesNoSubstOnly: List[List[TreeMaker]], pt: Type, owner: Symbol, matchFailGenOverride: Option[Tree => Tree]): Tree =
- fixerUpper(owner, scrut.pos){
- def matchFailGen = (matchFailGenOverride orElse Some(CODE.MATCHERROR(_: Tree)))
+ fixerUpper(owner, scrut.pos) {
+ def matchFailGen = matchFailGenOverride orElse Some(Throw(MatchErrorClass.tpe, _: Tree))
+
debug.patmat("combining cases: "+ (casesNoSubstOnly.map(_.mkString(" >> ")).mkString("{", "\n", "}")))
val (suppression, requireSwitch): (Suppression, Boolean) =
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala b/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala
index 114bcba5df..6267585ea8 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala
@@ -9,6 +9,7 @@ package scala.tools.nsc.transform.patmat
import scala.collection.mutable
import scala.reflect.internal.util.Statistics
import scala.language.postfixOps
+import scala.reflect.internal.util.Collections._
// naive CNF translation and simple DPLL solver
trait Solving extends Logic {
@@ -205,9 +206,8 @@ trait Solving extends Logic {
// SI-7020 Linked- for deterministic counter examples.
val pos = new mutable.LinkedHashSet[Sym]()
val neg = new mutable.LinkedHashSet[Sym]()
- f.foreach{_.foreach{ lit =>
- if (lit.pos) pos += lit.sym else neg += lit.sym
- }}
+ mforeach(f)(lit => if (lit.pos) pos += lit.sym else neg += lit.sym)
+
// appearing in both positive and negative
val impures: mutable.LinkedHashSet[Sym] = pos intersect neg
// appearing only in either positive/negative positions
diff --git a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
index 263b5ad784..ec2b7d49f5 100644
--- a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
@@ -21,32 +21,17 @@ trait MethodSynthesis {
import definitions._
import CODE._
- object synthesisUtil {
- type TT[T] = ru.TypeTag[T]
- type CT[T] = ClassTag[T]
-
- def newValOrDefDef(sym: Symbol, body: Tree) =
- if (sym.isLazy) ValDef(sym, body)
- else DefDef(sym, body)
-
- /** The annotations amongst those found on the original symbol which
- * should be propagated to this kind of accessor.
- */
- def deriveAnnotations(initial: List[AnnotationInfo], category: Symbol, keepClean: Boolean): List[AnnotationInfo] = {
- initial filter { ann =>
- // There are no meta-annotation arguments attached to `ann`
- if (ann.metaAnnotations.isEmpty) {
- // A meta-annotation matching `annotKind` exists on `ann`'s definition.
- (ann.defaultTargets contains category) ||
- // `ann`'s definition has no meta-annotations, and `keepClean` is true.
- (ann.defaultTargets.isEmpty && keepClean)
- }
- // There are meta-annotation arguments, and one of them matches `annotKind`
- else ann.metaAnnotations exists (_ matches category)
- }
+ /** The annotations amongst those found on the original symbol which
+ * should be propagated to this kind of accessor.
+ */
+ def deriveAnnotations(initial: List[AnnotationInfo], category: Symbol, keepClean: Boolean): List[AnnotationInfo] = {
+ def annotationFilter(ann: AnnotationInfo) = ann.metaAnnotations match {
+ case Nil if ann.defaultTargets.isEmpty => keepClean // no meta-annotations or default targets
+ case Nil => ann.defaultTargets contains category // default targets exist for ann
+ case metas => metas exists (_ matches category) // meta-annotations attached to ann
}
+ initial filter annotationFilter
}
- import synthesisUtil._
class ClassMethodSynthesis(val clazz: Symbol, localTyper: Typer) {
def mkThis = This(clazz) setPos clazz.pos.focus
@@ -67,7 +52,10 @@ trait MethodSynthesis {
}
private def finishMethod(method: Symbol, f: Symbol => Tree): Tree =
- localTyper typed newValOrDefDef(method, f(method))
+ localTyper typed (
+ if (method.isLazy) ValDef(method, f(method))
+ else DefDef(method, f(method))
+ )
private def createInternal(name: Name, f: Symbol => Tree, info: Type): Tree = {
val name1 = name.toTermName
@@ -105,7 +93,7 @@ trait MethodSynthesis {
def createSwitchMethod(name: Name, range: Seq[Int], returnType: Type)(f: Int => Tree) = {
createMethod(name, List(IntTpe), returnType) { m =>
val arg0 = Ident(m.firstParam)
- val default = DEFAULT ==> THROW(IndexOutOfBoundsExceptionClass, arg0)
+ val default = DEFAULT ==> Throw(IndexOutOfBoundsExceptionClass.tpe_*, fn(arg0, nme.toString_))
val cases = range.map(num => CASE(LIT(num)) ==> f(num)).toList :+ default
Match(arg0, cases)
@@ -393,18 +381,9 @@ trait MethodSynthesis {
}
}
case class Getter(tree: ValDef) extends BaseGetter(tree) {
- override def derivedSym = (
- if (mods.isDeferred) basisSym
- else basisSym.getter(enclClass)
- )
- // Range position errors ensue if we don't duplicate this in some
- // circumstances (at least: concrete vals with existential types.)
- private def tptOriginal = (
- if (mods.isDeferred) tree.tpt // keep type tree of original abstract field
- else tree.tpt.duplicate setPos tree.tpt.pos.focus // focused position of original tpt
- )
-
- override def derivedTree: DefDef = {
+ override def derivedSym = if (mods.isDeferred) basisSym else basisSym.getter(enclClass)
+ private def derivedRhs = if (mods.isDeferred) EmptyTree else fieldSelection
+ private def derivedTpt = {
// For existentials, don't specify a type for the getter, even one derived
// from the symbol! This leads to incompatible existentials for the field and
// the getter. Let the typer do all the work. You might think "why only for
@@ -413,24 +392,16 @@ trait MethodSynthesis {
// starts compiling (instead of failing like it's supposed to) because the typer
// expects to be able to identify escaping locals in typedDefDef, and fails to
// spot that brand of them. In other words it's an artifact of the implementation.
- val tpt = atPos(derivedSym.pos.focus)(derivedSym.tpe.finalResultType match {
- case ExistentialType(_, _) => TypeTree()
- case _ if mods.isDeferred => TypeTree()
+ val tpt = derivedSym.tpe_*.finalResultType.widen match {
+ // Range position errors ensue if we don't duplicate this in some
+ // circumstances (at least: concrete vals with existential types.)
+ case ExistentialType(_, _) => TypeTree() setOriginal (tree.tpt.duplicate setPos tree.tpt.pos.focus)
+ case _ if mods.isDeferred => TypeTree() setOriginal tree.tpt // keep type tree of original abstract field
case tp => TypeTree(tp)
- })
- // TODO - reconcile this with the DefDef creator in Trees (which
- // at this writing presented no way to pass a tree in for tpt.)
- atPos(derivedSym.pos) {
- DefDef(
- Modifiers(derivedSym.flags),
- derivedSym.name.toTermName,
- Nil,
- Nil,
- tpt setOriginal tptOriginal,
- if (mods.isDeferred) EmptyTree else fieldSelection
- ) setSymbol derivedSym
}
+ tpt setPos tree.tpt.pos.focus
}
+ override def derivedTree: DefDef = newDefDef(derivedSym, derivedRhs)(tpt = derivedTpt)
}
/** Implements lazy value accessors:
* - for lazy values of type Unit and all lazy fields inside traits,
@@ -461,8 +432,8 @@ trait MethodSynthesis {
if (tree.symbol.owner.isTrait || hasUnitType(basisSym)) rhs1
else gen.mkAssignAndReturn(basisSym, rhs1)
)
- derivedSym.setPos(tree.pos) // cannot set it at createAndEnterSymbol because basisSym can possible stil have NoPosition
- val ddefRes = atPos(tree.pos)(DefDef(derivedSym, new ChangeOwnerAndModuleClassTraverser(basisSym, derivedSym)(body)))
+ derivedSym setPos tree.pos // cannot set it at createAndEnterSymbol because basisSym can possible stil have NoPosition
+ val ddefRes = DefDef(derivedSym, new ChangeOwnerAndModuleClassTraverser(basisSym, derivedSym)(body))
// ValDef will have its position focused whereas DefDef will have original correct rangepos
// ideally positions would be correct at the creation time but lazy vals are really a special case
// here so for the sake of keeping api clean we fix positions manually in LazyValGetter
diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
index 454f913412..347426d42a 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
@@ -135,7 +135,8 @@ trait Namers extends MethodSynthesis {
setPrivateWithin(tree, sym, tree.mods)
def inConstructorFlag: Long = {
- val termOwnedContexts: List[Context] = context.enclosingContextChain.takeWhile(_.owner.isTerm)
+ val termOwnedContexts: List[Context] =
+ context.enclosingContextChain.takeWhile(c => c.owner.isTerm && !c.owner.isAnonymousFunction)
val constructorNonSuffix = termOwnedContexts exists (c => c.owner.isConstructor && !c.inConstructorSuffix)
val earlyInit = termOwnedContexts exists (_.owner.isEarlyInitialized)
if (constructorNonSuffix || earlyInit) INCONSTRUCTOR else 0L
@@ -1182,8 +1183,8 @@ trait Namers extends MethodSynthesis {
// value parameters of the base class (whose defaults might be overridden)
var baseParamss = (vparamss, overridden.tpe.paramss) match {
// match empty and missing parameter list
- case (Nil, List(Nil)) => Nil
- case (List(Nil), Nil) => ListOfNil
+ case (Nil, ListOfNil) => Nil
+ case (ListOfNil, Nil) => ListOfNil
case (_, paramss) => paramss
}
assert(
diff --git a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
index 12d6bb2e6a..b706e1af6b 100644
--- a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
@@ -296,8 +296,8 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
}
transformSelect
- case DefDef(mods, name, tparams, vparamss, tpt, rhs) if tree.symbol.isMethodWithExtension =>
- treeCopy.DefDef(tree, mods, name, tparams, vparamss, tpt, withInvalidOwner(transform(rhs)))
+ case DefDef(_, _, _, _, _, _) if tree.symbol.isMethodWithExtension =>
+ deriveDefDef(tree)(rhs => withInvalidOwner(transform(rhs)))
case TypeApply(sel @ Select(qual, name), args) =>
mayNeedProtectedAccessor(sel, args, goToSuper = true)
diff --git a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala
index 3a188c0044..e44c83aa56 100644
--- a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala
@@ -171,8 +171,8 @@ abstract class TreeCheckers extends Analyzer {
)
- def errorFn(msg: Any): Unit = Console.err println "[check: %s] %s".format(phase.prev, msg)
- def errorFn(pos: Position, msg: Any): Unit = errorFn(posstr(pos) + ": " + msg)
+ def errorFn(pos: Position, msg: Any): Unit = currentUnit.warning(pos, "[check: %s] %s".format(phase.prev, msg))
+ def errorFn(msg: Any): Unit = errorFn(NoPosition, msg)
def informFn(msg: Any) {
if (settings.verbose || settings.debug)
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index 629513ada3..e07c68de8a 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -43,12 +43,17 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
final val shortenImports = false
+ // allows override of the behavior of the resetTyper method w.r.t comments
+ def resetDocComments() = {
+ clearDocComments()
+ }
+
def resetTyper() {
//println("resetTyper called")
resetContexts()
resetImplicits()
transformed.clear()
- clearDocComments()
+ resetDocComments()
}
object UnTyper extends Traverser {
@@ -363,7 +368,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
check(owner, scope, pt, tree setType tp1.typeSymbol.classBound)
else if (owner == NoSymbol)
tree setType packSymbols(hiddenSymbols.reverse, tp1)
- else if (!phase.erasedTypes) { // privates
+ else if (!isPastTyper) { // privates
val badSymbol = hiddenSymbols.head
SymbolEscapesScopeError(tree, badSymbol)
} else tree
@@ -2103,7 +2108,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
case PolyType(_, restpe) => paramssTypes(restpe)
case _ => Nil
}
- def resultType = meth.tpe.finalResultType
+ def resultType = meth.tpe_*.finalResultType
def nthParamPos(n1: Int, n2: Int) =
try ddef.vparamss(n1)(n2).pos catch { case _: IndexOutOfBoundsException => meth.pos }
@@ -2598,8 +2603,15 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
default -> gen.scalaFunctionConstr(List(A1Tpt), B1Tpt)
)
}
- val rhs = methodBodyTyper.virtualizedMatch(match_, mode, B1.tpe)
- val defdef = DefDef(methodSym, Modifiers(methodSym.flags), originals, rhs)
+ def newParam(param: Symbol): ValDef = {
+ val vd = ValDef(param, EmptyTree)
+ val tt @ TypeTree() = vd.tpt
+ tt setOriginal (originals(param) setPos param.pos.focus)
+ vd
+ }
+
+ val rhs = methodBodyTyper.virtualizedMatch(match_, mode, B1.tpe)
+ val defdef = newDefDef(methodSym, rhs)(vparamss = mapParamss(methodSym)(newParam), tpt = TypeTree(B1.tpe))
(defdef, matchResTp)
}
@@ -4866,7 +4878,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
// Unfortunately implicit not found strings looks for all the world like
// missing interpolators.
def isArgToImplicitNotFound = context.enclosingApply.tree match {
- case Apply(fn, _) => fn.symbol.enclClass == ImplicitNotFoundClass
+ case Apply(fn, _) => fn.symbol != null && fn.symbol.enclClass == ImplicitNotFoundClass
case _ => false
}
tree.value match {
diff --git a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
index 5049fec65b..af19e3cf80 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
@@ -46,11 +46,8 @@ trait Unapplies extends ast.TreeDSL {
def copyUntyped[T <: Tree](tree: T): T =
returning[T](tree.duplicate)(UnTyper traverse _)
- def copyUntypedInvariant(td: TypeDef): TypeDef = {
- val copy = treeCopy.TypeDef(td, td.mods &~ (COVARIANT | CONTRAVARIANT), td.name, td.tparams, td.rhs)
-
- returning[TypeDef](copy.duplicate)(UnTyper traverse _)
- }
+ def copyUntypedInvariant(td: TypeDef): TypeDef =
+ copyTypeDef(copyUntyped(td))(mods = td.mods &~ (COVARIANT | CONTRAVARIANT))
private def toIdent(x: DefTree) = Ident(x.name) setPos x.pos.focus
diff --git a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
index 57ebe1b30d..2d905d5436 100644
--- a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
+++ b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
@@ -273,15 +273,9 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
def parse(code: String): Tree = {
reporter.reset()
- val file = new BatchSourceFile("<toolbox>", code)
- val unit = new CompilationUnit(file)
- val parsed = newUnitParser(unit).parseStats()
+ val tree = gen.mkTreeOrBlock(newUnitParser(code, "<toolbox>").parseStats())
throwIfErrors()
- parsed match {
- case Nil => EmptyTree
- case expr :: Nil => expr
- case stats :+ expr => Block(stats, expr)
- }
+ tree
}
def showAttributed(artifact: Any, printTypes: Boolean = true, printIds: Boolean = true, printKinds: Boolean = false): String = {
@@ -303,7 +297,7 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
// reporter doesn't accumulate errors, but the front-end does
def throwIfErrors() = {
if (frontEnd.hasErrors) throw ToolBoxError(
- "reflective compilation has failed: " + EOL + EOL + (frontEnd.infos map (_.msg) mkString EOL)
+ "reflective compilation has failed:" + EOL + EOL + (frontEnd.infos map (_.msg) mkString EOL)
)
}
}
@@ -325,7 +319,7 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
val instance = new ToolBoxGlobal(command.settings, frontEndToReporter(frontEnd, command.settings))
if (frontEnd.hasErrors) {
throw ToolBoxError(
- "reflective compilation has failed: cannot initialize the compiler: " + EOL + EOL +
+ "reflective compilation has failed: cannot initialize the compiler:" + EOL + EOL +
(frontEnd.infos map (_.msg) mkString EOL)
)
}
@@ -355,7 +349,8 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
}
def inferImplicitView(tree: u.Tree, from: u.Type, to: u.Type, silent: Boolean = true, withMacrosDisabled: Boolean = false, pos: u.Position = u.NoPosition): u.Tree = {
- val viewTpe = u.appliedType(u.definitions.FunctionClass(1).toTypeConstructor, List(from, to))
+ val functionTypeCtor = u.definitions.FunctionClass(1).asClass.toTypeConstructor
+ val viewTpe = u.appliedType(functionTypeCtor, List(from, to))
inferImplicit(tree, viewTpe, isView = true, silent = silent, withMacrosDisabled = withMacrosDisabled, pos = pos)
}
diff --git a/src/compiler/scala/tools/reflect/quasiquotes/Parsers.scala b/src/compiler/scala/tools/reflect/quasiquotes/Parsers.scala
index 19888fa8d2..5a1a25cfa1 100644
--- a/src/compiler/scala/tools/reflect/quasiquotes/Parsers.scala
+++ b/src/compiler/scala/tools/reflect/quasiquotes/Parsers.scala
@@ -49,7 +49,7 @@ trait Parsers { self: Quasiquotes =>
def entryPoint: QuasiquoteParser => Tree
- class QuasiquoteParser(source0: SourceFile) extends SourceFileParser(source0) {
+ class QuasiquoteParser(source0: SourceFile) extends SourceFileParser(source0) { parser =>
def isHole: Boolean = isIdent && isHole(in.name)
def isHole(name: Name): Boolean = holeMap.contains(name)
@@ -73,7 +73,7 @@ trait Parsers { self: Quasiquotes =>
override def makeFunctionTypeTree(argtpes: List[Tree], restpe: Tree): Tree =
AppliedTypeTree(Ident(tpnme.QUASIQUOTE_FUNCTION), argtpes :+ restpe)
}
- import treeBuilder.{global => _, _}
+ import treeBuilder.{global => _, unit => _, _}
// q"def foo($x)"
override def allowTypelessParams = true
@@ -144,11 +144,7 @@ trait Parsers { self: Quasiquotes =>
}
object TermParser extends Parser {
- def entryPoint = _.templateStats() match {
- case Nil => EmptyTree
- case tree :: Nil => tree
- case stats :+ tree => Block(stats, tree)
- }
+ def entryPoint = { parser => gen.mkTreeOrBlock(parser.templateStats()) }
}
object TypeParser extends Parser {
diff --git a/src/compiler/scala/tools/reflect/quasiquotes/Reifiers.scala b/src/compiler/scala/tools/reflect/quasiquotes/Reifiers.scala
index af4e34536c..c2d8bcdcd6 100644
--- a/src/compiler/scala/tools/reflect/quasiquotes/Reifiers.scala
+++ b/src/compiler/scala/tools/reflect/quasiquotes/Reifiers.scala
@@ -228,6 +228,8 @@ trait Reifiers { self: Quasiquotes =>
override def reifyTreeSyntactically(tree: Tree): Tree = tree match {
case RefTree(qual, SymbolPlaceholder(tree)) =>
mirrorBuildCall(nme.RefTree, reify(qual), tree)
+ case This(SymbolPlaceholder(tree)) =>
+ mirrorCall(nme.This, tree)
case _ =>
super.reifyTreeSyntactically(tree)
}
diff --git a/src/intellij/setup.sh b/src/intellij/setup.sh
index d0e1abeb96..bd324ba5bd 100755
--- a/src/intellij/setup.sh
+++ b/src/intellij/setup.sh
@@ -9,7 +9,7 @@ export BASE="$( cd "$( dirname "$0" )"/../.. && pwd )"
echo "About to delete .ipr and .iml files and replace with the .SAMPLE files. Press enter to continue or CTRL-C to cancel."
read
-(rm *.ipr *.iml 2>/dev/null)
+(rm -f *.ipr *.iml 2>/dev/null)
for f in $(ls "$SCRIPT_DIR"/*.SAMPLE); do
NEW_FILE=`echo $f | perl -pe 's/.SAMPLE//'`;
diff --git a/src/interactive/scala/tools/nsc/interactive/Global.scala b/src/interactive/scala/tools/nsc/interactive/Global.scala
index bc6df9eb25..736a1e68c4 100644
--- a/src/interactive/scala/tools/nsc/interactive/Global.scala
+++ b/src/interactive/scala/tools/nsc/interactive/Global.scala
@@ -18,6 +18,19 @@ import scala.tools.nsc.typechecker.Analyzer
import symtab.Flags.{ACCESSOR, PARAMACCESSOR}
import scala.annotation.{ elidable, tailrec }
import scala.language.implicitConversions
+import scala.tools.nsc.typechecker.Typers
+
+/**
+ * This trait allows the IDE to have an instance of the PC that
+ * does not clear the comments table at every new typer run (those
+ * being many and close between in this context).
+ */
+
+trait CommentPreservingTypers extends Typers {
+ self: Analyzer =>
+
+ override def resetDocComments() = {}
+}
trait InteractiveScaladocAnalyzer extends InteractiveAnalyzer with ScaladocAnalyzer {
val global : Global
diff --git a/src/interactive/scala/tools/nsc/interactive/Response.scala b/src/interactive/scala/tools/nsc/interactive/Response.scala
index ea0a23fec9..3e84c83e55 100644
--- a/src/interactive/scala/tools/nsc/interactive/Response.scala
+++ b/src/interactive/scala/tools/nsc/interactive/Response.scala
@@ -105,7 +105,3 @@ class Response[T] {
cancelled = false
}
}
-
-
-
-
diff --git a/src/library/scala/DelayedInit.scala b/src/library/scala/DelayedInit.scala
index 12793e6aa1..cfbbf30793 100644
--- a/src/library/scala/DelayedInit.scala
+++ b/src/library/scala/DelayedInit.scala
@@ -8,7 +8,7 @@
package scala
-/** Classes and objects (but note, not traits) inheriting the `DelayedInit`
+/** Classes and objects (but note, not traits) inheriting the `DelayedInit`
* marker trait will have their initialization code rewritten as follows:
* `code` becomes `delayedInit(code)`.
*
@@ -32,7 +32,7 @@ package scala
* val c = new C
* }
* }}}
- *
+ *
* Should result in the following being printed:
* {{{
* dummy text, printed before initialization of C
diff --git a/src/library/scala/Function0.scala b/src/library/scala/Function0.scala
index 54cba021e0..e13aaad7bc 100644
--- a/src/library/scala/Function0.scala
+++ b/src/library/scala/Function0.scala
@@ -6,7 +6,7 @@
** |/ **
\* */
// GENERATED CODE: DO NOT EDIT.
-// genprod generated these sources at: Sun Mar 24 14:14:12 CET 2013
+// genprod generated these sources at: Sun Sep 15 20:42:00 CEST 2013
package scala
diff --git a/src/library/scala/Function1.scala b/src/library/scala/Function1.scala
index 2e3de54c5a..620dcc19aa 100644
--- a/src/library/scala/Function1.scala
+++ b/src/library/scala/Function1.scala
@@ -32,7 +32,7 @@ package scala
*/
@annotation.implicitNotFound(msg = "No implicit view available from ${T1} => ${R}.")
-trait Function1[@specialized(scala.Int, scala.Long, scala.Float, scala.Double/*, scala.AnyRef*/) -T1, @specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double/*, scala.AnyRef*/) +R] extends AnyRef { self =>
+trait Function1[@specialized(scala.Int, scala.Long, scala.Float, scala.Double) -T1, @specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double) +R] extends AnyRef { self =>
/** Apply the body of this function to the argument.
* @return the result of function application.
*/
diff --git a/src/library/scala/Proxy.scala b/src/library/scala/Proxy.scala
index 07fa6e2e8d..7c28e6ea28 100644
--- a/src/library/scala/Proxy.scala
+++ b/src/library/scala/Proxy.scala
@@ -28,7 +28,7 @@ trait Proxy extends Any {
override def hashCode: Int = self.hashCode
override def equals(that: Any): Boolean = that match {
case null => false
- case _ =>
+ case _ =>
val x = that.asInstanceOf[AnyRef]
(x eq this.asInstanceOf[AnyRef]) || (x eq self.asInstanceOf[AnyRef]) || (x equals self)
}
diff --git a/src/library/scala/annotation/migration.scala b/src/library/scala/annotation/migration.scala
index adb6de6afd..65bee4c2cb 100644
--- a/src/library/scala/annotation/migration.scala
+++ b/src/library/scala/annotation/migration.scala
@@ -17,7 +17,7 @@ package scala.annotation
* order between Scala 2.7 and 2.8.
*
* @param message A message describing the change, which is emitted
- * by the compiler if the flag `-Xmigration` indicates a version
+ * by the compiler if the flag `-Xmigration` indicates a version
* prior to the changedIn version.
*
* @param changedIn The version, in which the behaviour change was
diff --git a/src/library/scala/collection/BitSetLike.scala b/src/library/scala/collection/BitSetLike.scala
index 6592e49429..8a8af79151 100644
--- a/src/library/scala/collection/BitSetLike.scala
+++ b/src/library/scala/collection/BitSetLike.scala
@@ -102,7 +102,7 @@ trait BitSetLike[+This <: BitSetLike[This] with SortedSet[Int]] extends SortedSe
}
def iterator: Iterator[Int] = iteratorFrom(0)
-
+
override def keysIteratorFrom(start: Int) = new AbstractIterator[Int] {
private var current = start
private val end = nwords * WordLength
diff --git a/src/library/scala/collection/GenMap.scala b/src/library/scala/collection/GenMap.scala
index 3d7427981d..d17a2de179 100644
--- a/src/library/scala/collection/GenMap.scala
+++ b/src/library/scala/collection/GenMap.scala
@@ -11,7 +11,6 @@ package collection
import generic._
-
/** A trait for all traversable collections which may possibly
* have their operations implemented in parallel.
*
@@ -28,12 +27,9 @@ extends GenMapLike[A, B, GenMap[A, B]]
def updated [B1 >: B](key: A, value: B1): GenMap[A, B1]
}
-
object GenMap extends GenMapFactory[GenMap] {
def empty[A, B]: immutable.Map[A, B] = immutable.Map.empty
/** $mapCanBuildFromInfo */
implicit def canBuildFrom[A, B]: CanBuildFrom[Coll, (A, B), GenMap[A, B]] = new MapCanBuildFrom[A, B]
}
-
-
diff --git a/src/library/scala/collection/GenTraversable.scala b/src/library/scala/collection/GenTraversable.scala
index b700f49cf6..8705965992 100644
--- a/src/library/scala/collection/GenTraversable.scala
+++ b/src/library/scala/collection/GenTraversable.scala
@@ -6,15 +6,11 @@
** |/ **
\* */
-
-
package scala
package collection
-
import generic._
-
/** A trait for all traversable collections which may possibly
* have their operations implemented in parallel.
*
@@ -31,10 +27,7 @@ extends GenTraversableLike[A, GenTraversable[A]]
def companion: GenericCompanion[GenTraversable] = GenTraversable
}
-
object GenTraversable extends GenTraversableFactory[GenTraversable] {
implicit def canBuildFrom[A] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
def newBuilder[A] = Traversable.newBuilder
}
-
-
diff --git a/src/library/scala/collection/GenTraversableOnce.scala b/src/library/scala/collection/GenTraversableOnce.scala
index e4976d8f2c..a52f43bade 100644
--- a/src/library/scala/collection/GenTraversableOnce.scala
+++ b/src/library/scala/collection/GenTraversableOnce.scala
@@ -368,7 +368,7 @@ trait GenTraversableOnce[+A] extends Any {
* @param cmp An ordering to be used for comparing elements.
* @tparam B The result type of the function f.
* @param f The measuring function.
- * @return the first element of this $coll with the largest value measured by function f
+ * @return the first element of this $coll with the largest value measured by function f
* with respect to the ordering `cmp`.
*
* @usecase def maxBy[B](f: A => B): A
@@ -383,7 +383,7 @@ trait GenTraversableOnce[+A] extends Any {
* @param cmp An ordering to be used for comparing elements.
* @tparam B The result type of the function f.
* @param f The measuring function.
- * @return the first element of this $coll with the smallest value measured by function f
+ * @return the first element of this $coll with the smallest value measured by function f
* with respect to the ordering `cmp`.
*
* @usecase def minBy[B](f: A => B): A
diff --git a/src/library/scala/collection/IterableLike.scala b/src/library/scala/collection/IterableLike.scala
index b043d1f2a6..f79b5afce9 100644
--- a/src/library/scala/collection/IterableLike.scala
+++ b/src/library/scala/collection/IterableLike.scala
@@ -171,7 +171,7 @@ self =>
* fewer elements than size.
*/
def sliding(size: Int): Iterator[Repr] = sliding(size, 1)
-
+
/** Groups elements in fixed size blocks by passing a "sliding window"
* over them (as opposed to partitioning them, as is done in grouped.)
* @see [[scala.collection.Iterator]], method `sliding`
diff --git a/src/library/scala/collection/LinearSeqOptimized.scala b/src/library/scala/collection/LinearSeqOptimized.scala
index 21bfedf5de..8635b090b9 100755
--- a/src/library/scala/collection/LinearSeqOptimized.scala
+++ b/src/library/scala/collection/LinearSeqOptimized.scala
@@ -91,7 +91,7 @@ trait LinearSeqOptimized[+A, +Repr <: LinearSeqOptimized[A, Repr]] extends Linea
}
false
}
-
+
override /*IterableLike*/
def find(p: A => Boolean): Option[A] = {
var these = this
@@ -112,7 +112,7 @@ trait LinearSeqOptimized[+A, +Repr <: LinearSeqOptimized[A, Repr]] extends Linea
}
acc
}
-
+
override /*IterableLike*/
def foldRight[B](z: B)(f: (A, B) => B): B =
if (this.isEmpty) z
diff --git a/src/library/scala/collection/Map.scala b/src/library/scala/collection/Map.scala
index f37c0993d4..761b65723c 100644
--- a/src/library/scala/collection/Map.scala
+++ b/src/library/scala/collection/Map.scala
@@ -52,7 +52,7 @@ object Map extends MapFactory[Map] {
def iterator = underlying.iterator
override def default(key: A): B = d(key)
}
-
+
}
/** Explicit instantiation of the `Map` trait to reduce class file size in subclasses. */
diff --git a/src/library/scala/collection/SortedMap.scala b/src/library/scala/collection/SortedMap.scala
index 0705a1e9e0..36e7eae79c 100644
--- a/src/library/scala/collection/SortedMap.scala
+++ b/src/library/scala/collection/SortedMap.scala
@@ -34,7 +34,7 @@ object SortedMap extends SortedMapFactory[SortedMap] {
def empty[A, B](implicit ord: Ordering[A]): SortedMap[A, B] = immutable.SortedMap.empty[A, B](ord)
implicit def canBuildFrom[A, B](implicit ord: Ordering[A]): CanBuildFrom[Coll, (A, B), SortedMap[A, B]] = new SortedMapCanBuildFrom[A, B]
-
+
private[collection] trait Default[A, +B] extends SortedMap[A, B] {
self =>
override def +[B1 >: B](kv: (A, B1)): SortedMap[A, B1] = {
@@ -43,15 +43,11 @@ object SortedMap extends SortedMapFactory[SortedMap] {
b += ((kv._1, kv._2))
b.result()
}
-
+
override def - (key: A): SortedMap[A, B] = {
val b = newBuilder
for (kv <- this; if kv._1 != key) b += kv
b.result()
}
}
-
}
-
-
-
diff --git a/src/library/scala/collection/SortedMapLike.scala b/src/library/scala/collection/SortedMapLike.scala
index 3fc8b0dadc..cf5e9c36c7 100644
--- a/src/library/scala/collection/SortedMapLike.scala
+++ b/src/library/scala/collection/SortedMapLike.scala
@@ -6,8 +6,6 @@
** |/ **
\* */
-
-
package scala
package collection
@@ -74,7 +72,7 @@ self =>
for (e <- elems) m = m + e
m
}
-
+
override def filterKeys(p: A => Boolean): SortedMap[A, B] = new FilteredKeys(p) with SortedMap.Default[A, B] {
implicit def ordering: Ordering[A] = self.ordering
override def rangeImpl(from : Option[A], until : Option[A]): SortedMap[A, B] = self.rangeImpl(from, until).filterKeys(p)
@@ -82,7 +80,7 @@ self =>
override def keysIteratorFrom(start: A) = self keysIteratorFrom start filter p
override def valuesIteratorFrom(start: A) = self iteratorFrom start collect {case (k,v) if p(k) => v}
}
-
+
override def mapValues[C](f: B => C): SortedMap[A, C] = new MappedValues(f) with SortedMap.Default[A, C] {
implicit def ordering: Ordering[A] = self.ordering
override def rangeImpl(from : Option[A], until : Option[A]): SortedMap[A, C] = self.rangeImpl(from, until).mapValues(f)
@@ -90,7 +88,7 @@ self =>
override def keysIteratorFrom(start: A) = self keysIteratorFrom start
override def valuesIteratorFrom(start: A) = self valuesIteratorFrom start map f
}
-
+
/** Adds a number of elements provided by a traversable object
* and returns a new collection with the added elements.
*
@@ -98,14 +96,14 @@ self =>
*/
override def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]): SortedMap[A, B1] =
((repr: SortedMap[A, B1]) /: xs.seq) (_ + _)
-
+
/**
* Creates an iterator over all the key/value pairs
* contained in this map having a key greater than or
* equal to `start` according to the ordering of
* this map. x.iteratorFrom(y) is equivalent
* to but often more efficient than x.from(y).iterator.
- *
+ *
* @param start The lower bound (inclusive)
* on the keys to be returned
*/
@@ -114,15 +112,11 @@ self =>
* Creates an iterator over all the values contained in this
* map that are associated with a key greater than or equal to `start`
* according to the ordering of this map. x.valuesIteratorFrom(y) is
- * equivalent to but often more efficient than
+ * equivalent to but often more efficient than
* x.from(y).valuesIterator.
- *
+ *
* @param start The lower bound (inclusive)
* on the keys to be returned
*/
def valuesIteratorFrom(start: A): Iterator[B]
}
-
-
-
-
diff --git a/src/library/scala/collection/SortedSetLike.scala b/src/library/scala/collection/SortedSetLike.scala
index eb2ac38c59..c38ea1f3ce 100644
--- a/src/library/scala/collection/SortedSetLike.scala
+++ b/src/library/scala/collection/SortedSetLike.scala
@@ -47,7 +47,7 @@ self =>
* greater than or equal to `start` according to the ordering of
* this collection. x.iteratorFrom(y) is equivalent to but will usually
* be more efficient than x.from(y).iterator
- *
+ *
* @param start The lower-bound (inclusive) of the iterator
*/
def iteratorFrom(start: A): Iterator[A] = keysIteratorFrom(start)
diff --git a/src/library/scala/collection/TraversableOnce.scala b/src/library/scala/collection/TraversableOnce.scala
index 634807b29f..2fdad0f8f9 100644
--- a/src/library/scala/collection/TraversableOnce.scala
+++ b/src/library/scala/collection/TraversableOnce.scala
@@ -242,7 +242,7 @@ trait TraversableOnce[+A] extends Any with GenTraversableOnce[A] {
var minF: B = null.asInstanceOf[B]
var minElem: A = null.asInstanceOf[A]
var first = true
-
+
for (elem <- self) {
val fx = f(elem)
if (first || cmp.lt(fx, minF)) {
diff --git a/src/library/scala/collection/TraversableViewLike.scala b/src/library/scala/collection/TraversableViewLike.scala
index c507e000ee..ca1c450e3f 100644
--- a/src/library/scala/collection/TraversableViewLike.scala
+++ b/src/library/scala/collection/TraversableViewLike.scala
@@ -211,5 +211,3 @@ trait TraversableViewLike[+A,
override def toString = viewToString
}
-
-
diff --git a/src/library/scala/collection/convert/WrapAsJava.scala b/src/library/scala/collection/convert/WrapAsJava.scala
index e75a0e2981..b6ebf2ff06 100644
--- a/src/library/scala/collection/convert/WrapAsJava.scala
+++ b/src/library/scala/collection/convert/WrapAsJava.scala
@@ -257,11 +257,3 @@ trait WrapAsJava {
}
object WrapAsJava extends WrapAsJava { }
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/generic/GenericSeqCompanion.scala b/src/library/scala/collection/generic/GenericSeqCompanion.scala
index 34b20f23a2..fd1e18a029 100644
--- a/src/library/scala/collection/generic/GenericSeqCompanion.scala
+++ b/src/library/scala/collection/generic/GenericSeqCompanion.scala
@@ -6,7 +6,6 @@
** |/ **
\* */
-
package scala
package collection
package generic
diff --git a/src/library/scala/collection/generic/IsSeqLike.scala b/src/library/scala/collection/generic/IsSeqLike.scala
index 189aea4632..4c857ad1bb 100644
--- a/src/library/scala/collection/generic/IsSeqLike.scala
+++ b/src/library/scala/collection/generic/IsSeqLike.scala
@@ -50,7 +50,7 @@ object IsSeqLike {
val conversion = implicitly[String => SeqLike[Char, String]]
}
- implicit def seqLikeRepr[C[_], A0](implicit conv: C[A0] => SeqLike[A0,C[A0]]): IsSeqLike[C[A0]] { type A = A0 } =
+ implicit def seqLikeRepr[C[_], A0](implicit conv: C[A0] => SeqLike[A0,C[A0]]): IsSeqLike[C[A0]] { type A = A0 } =
new IsSeqLike[C[A0]] {
type A = A0
val conversion = conv
diff --git a/src/library/scala/collection/generic/ParFactory.scala b/src/library/scala/collection/generic/ParFactory.scala
index 486e2a115e..4486cea419 100644
--- a/src/library/scala/collection/generic/ParFactory.scala
+++ b/src/library/scala/collection/generic/ParFactory.scala
@@ -37,8 +37,3 @@ extends GenTraversableFactory[CC]
override def apply() = newBuilder[A]
}
}
-
-
-
-
-
diff --git a/src/library/scala/collection/generic/Shrinkable.scala b/src/library/scala/collection/generic/Shrinkable.scala
index b7412afde0..b5ec568667 100644
--- a/src/library/scala/collection/generic/Shrinkable.scala
+++ b/src/library/scala/collection/generic/Shrinkable.scala
@@ -6,7 +6,6 @@
** |/ **
\* */
-
package scala
package collection
package generic
@@ -49,7 +48,3 @@ trait Shrinkable[-A] {
*/
def --=(xs: TraversableOnce[A]): this.type = { xs.seq foreach -= ; this }
}
-
-
-
-
diff --git a/src/library/scala/collection/generic/Signalling.scala b/src/library/scala/collection/generic/Signalling.scala
index e62eb6ff09..021d289c9d 100644
--- a/src/library/scala/collection/generic/Signalling.scala
+++ b/src/library/scala/collection/generic/Signalling.scala
@@ -10,13 +10,8 @@ package scala
package collection
package generic
-
import java.util.concurrent.atomic.AtomicInteger
-
-
-
-
/**
* A message interface serves as a unique interface to the
* part of the collection capable of receiving messages from
@@ -97,7 +92,6 @@ trait Signalling {
def tag: Int
}
-
/**
* This signalling implementation returns default values and ignores received signals.
*/
@@ -110,13 +104,11 @@ class DefaultSignalling extends Signalling with VolatileAbort {
def tag = -1
}
-
/**
* An object that returns default values and ignores received signals.
*/
object IdleSignalling extends DefaultSignalling
-
/**
* A mixin trait that implements abort flag behaviour using volatile variables.
*/
@@ -126,7 +118,6 @@ trait VolatileAbort extends Signalling {
override def abort() = abortflag = true
}
-
/**
* A mixin trait that implements index flag behaviour using atomic integers.
* The `setIndex` operation is wait-free, while conditional set operations `setIndexIfGreater`
@@ -154,7 +145,6 @@ trait AtomicIndexFlag extends Signalling {
}
}
-
/**
* An implementation of the signalling interface using delegates.
*/
@@ -175,25 +165,12 @@ trait DelegatedSignalling extends Signalling {
def tag = signalDelegate.tag
}
-
/**
* Class implementing delegated signalling.
*/
class DelegatedContext(var signalDelegate: Signalling) extends DelegatedSignalling
-
/**
* Class implementing delegated signalling, but having its own distinct `tag`.
*/
class TaggedDelegatedContext(deleg: Signalling, override val tag: Int) extends DelegatedContext(deleg)
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/generic/Sorted.scala b/src/library/scala/collection/generic/Sorted.scala
index 3876da3275..ab0d443a03 100644
--- a/src/library/scala/collection/generic/Sorted.scala
+++ b/src/library/scala/collection/generic/Sorted.scala
@@ -79,14 +79,14 @@ trait Sorted[K, +This <: Sorted[K, This]] {
else
until(next)
}
-
+
/**
* Creates an iterator over all the keys(or elements) contained in this
* collection greater than or equal to `start`
- * according to the ordering of this collection. x.keysIteratorFrom(y)
- * is equivalent to but often more efficient than
+ * according to the ordering of this collection. x.keysIteratorFrom(y)
+ * is equivalent to but often more efficient than
* x.from(y).keysIterator.
- *
+ *
* @param start The lower bound (inclusive)
* on the keys to be returned
*/
diff --git a/src/library/scala/collection/immutable/DefaultMap.scala b/src/library/scala/collection/immutable/DefaultMap.scala
index 42a03e90ee..ce34b84486 100755
--- a/src/library/scala/collection/immutable/DefaultMap.scala
+++ b/src/library/scala/collection/immutable/DefaultMap.scala
@@ -50,13 +50,3 @@ trait DefaultMap[A, +B] extends Map[A, B] { self =>
b.result()
}
}
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/immutable/List.scala b/src/library/scala/collection/immutable/List.scala
index b11368acdf..57618d64a5 100644
--- a/src/library/scala/collection/immutable/List.scala
+++ b/src/library/scala/collection/immutable/List.scala
@@ -6,8 +6,6 @@
** |/ **
\* */
-
-
package scala
package collection
package immutable
@@ -402,4 +400,3 @@ object List extends SeqFactory[List] {
/** Only used for list serialization */
@SerialVersionUID(0L - 8476791151975527571L)
private[scala] case object ListSerializeEnd
-
diff --git a/src/library/scala/collection/immutable/ListMap.scala b/src/library/scala/collection/immutable/ListMap.scala
index 49295d92dd..59468a3186 100644
--- a/src/library/scala/collection/immutable/ListMap.scala
+++ b/src/library/scala/collection/immutable/ListMap.scala
@@ -157,12 +157,12 @@ extends AbstractMap[A, B]
* @return the value associated with the given key.
*/
override def apply(k: A): B1 = apply0(this, k)
-
-
- @tailrec private def apply0(cur: ListMap[A, B1], k: A): B1 =
+
+
+ @tailrec private def apply0(cur: ListMap[A, B1], k: A): B1 =
if (cur.isEmpty) throw new NoSuchElementException("key not found: "+k)
else if (k == cur.key) cur.value
- else apply0(cur.tail, k)
+ else apply0(cur.tail, k)
/** Checks if this map maps `key` to a value and return the
* value if it exists.
diff --git a/src/library/scala/collection/immutable/MapLike.scala b/src/library/scala/collection/immutable/MapLike.scala
index f6041464e7..94a5b7929a 100644
--- a/src/library/scala/collection/immutable/MapLike.scala
+++ b/src/library/scala/collection/immutable/MapLike.scala
@@ -86,14 +86,14 @@ self =>
*/
override def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]): immutable.Map[A, B1] =
((repr: immutable.Map[A, B1]) /: xs.seq) (_ + _)
-
+
/** Filters this map by retaining only keys satisfying a predicate.
* @param p the predicate used to test keys
* @return an immutable map consisting only of those key value pairs of this map where the key satisfies
* the predicate `p`. The resulting map wraps the original map without copying any elements.
*/
override def filterKeys(p: A => Boolean): Map[A, B] = new FilteredKeys(p) with DefaultMap[A, B]
-
+
/** Transforms this map by applying a function to every retrieved value.
* @param f the function used to transform values of this map.
* @return a map view which maps every key of this map
diff --git a/src/library/scala/collection/immutable/Range.scala b/src/library/scala/collection/immutable/Range.scala
index 72c40e889f..c234d35756 100644
--- a/src/library/scala/collection/immutable/Range.scala
+++ b/src/library/scala/collection/immutable/Range.scala
@@ -65,7 +65,7 @@ extends scala.collection.AbstractSeq[Int]
|| (start < end && step < 0)
|| (start == end && !isInclusive)
)
- @deprecated("This method will be made private, use `length` instead.", "2.11")
+ @deprecated("This method will be made private, use `length` instead.", "2.11")
final val numRangeElements: Int = {
if (step == 0) throw new IllegalArgumentException("step cannot be 0.")
else if (isEmpty) 0
@@ -77,7 +77,7 @@ extends scala.collection.AbstractSeq[Int]
}
@deprecated("This method will be made private, use `last` instead.", "2.11")
final val lastElement = start + (numRangeElements - 1) * step
- @deprecated("This method will be made private.", "2.11")
+ @deprecated("This method will be made private.", "2.11")
final val terminalElement = start + numRangeElements * step
override def last = if (isEmpty) Nil.last else lastElement
diff --git a/src/library/scala/collection/immutable/RedBlackTree.scala b/src/library/scala/collection/immutable/RedBlackTree.scala
index 48bccde0e8..0dad106b29 100644
--- a/src/library/scala/collection/immutable/RedBlackTree.scala
+++ b/src/library/scala/collection/immutable/RedBlackTree.scala
@@ -367,7 +367,7 @@ object RedBlackTree {
private[this] def rebalance[A, B](tree: Tree[A, B], newLeft: Tree[A, B], newRight: Tree[A, B]) = {
// This is like drop(n-1), but only counting black nodes
@tailrec
- def findDepth(zipper: NList[Tree[A, B]], depth: Int): NList[Tree[A, B]] =
+ def findDepth(zipper: NList[Tree[A, B]], depth: Int): NList[Tree[A, B]] =
if (zipper eq null) {
sys.error("Defect: unexpected empty zipper while computing range")
} else if (isBlackTree(zipper.head)) {
@@ -400,14 +400,14 @@ object RedBlackTree {
zippedTree
}
}
-
+
// Null optimized list implementation for tree rebalancing. null presents Nil.
private[this] final class NList[A](val head: A, val tail: NList[A])
private[this] final object NList {
-
+
def cons[B](x: B, xs: NList[B]): NList[B] = new NList(x, xs)
-
+
def foldLeft[A, B](xs: NList[A], z: B)(f: (B, A) => B): B = {
var acc = z
var these = xs
@@ -417,7 +417,7 @@ object RedBlackTree {
}
acc
}
-
+
}
/*
diff --git a/src/library/scala/collection/immutable/SortedMap.scala b/src/library/scala/collection/immutable/SortedMap.scala
index 4b9fa81a8c..f1493551ab 100644
--- a/src/library/scala/collection/immutable/SortedMap.scala
+++ b/src/library/scala/collection/immutable/SortedMap.scala
@@ -83,7 +83,7 @@ self =>
override def filterKeys(p: A => Boolean): SortedMap[A, B] = new FilteredKeys(p) with SortedMap.Default[A, B] {
implicit def ordering: Ordering[A] = self.ordering
override def rangeImpl(from : Option[A], until : Option[A]): SortedMap[A, B] = self.rangeImpl(from, until).filterKeys(p)
- override def iteratorFrom(start: A) = self iteratorFrom start filter {case (k, _) => p(k)}
+ override def iteratorFrom(start: A) = self iteratorFrom start filter {case (k, _) => p(k)}
override def keysIteratorFrom(start : A) = self keysIteratorFrom start filter p
override def valuesIteratorFrom(start : A) = self iteratorFrom start collect {case (k,v) if p(k) => v}
}
@@ -91,7 +91,7 @@ self =>
override def mapValues[C](f: B => C): SortedMap[A, C] = new MappedValues(f) with SortedMap.Default[A, C] {
implicit def ordering: Ordering[A] = self.ordering
override def rangeImpl(from : Option[A], until : Option[A]): SortedMap[A, C] = self.rangeImpl(from, until).mapValues(f)
- override def iteratorFrom(start: A) = self iteratorFrom start map {case (k, v) => (k, f(v))}
+ override def iteratorFrom(start: A) = self iteratorFrom start map {case (k, v) => (k, f(v))}
override def keysIteratorFrom(start : A) = self keysIteratorFrom start
override def valuesIteratorFrom(start : A) = self valuesIteratorFrom start map f
}
diff --git a/src/library/scala/collection/immutable/StreamViewLike.scala b/src/library/scala/collection/immutable/StreamViewLike.scala
index ccab032cfd..c2eb85815d 100644
--- a/src/library/scala/collection/immutable/StreamViewLike.scala
+++ b/src/library/scala/collection/immutable/StreamViewLike.scala
@@ -71,9 +71,3 @@ extends SeqView[A, Coll]
override def stringPrefix = "StreamView"
}
-
-
-
-
-
-
diff --git a/src/library/scala/collection/immutable/TreeMap.scala b/src/library/scala/collection/immutable/TreeMap.scala
index 4d2ec579db..8416b72ede 100644
--- a/src/library/scala/collection/immutable/TreeMap.scala
+++ b/src/library/scala/collection/immutable/TreeMap.scala
@@ -6,8 +6,6 @@
** |/ **
\* */
-
-
package scala
package collection
package immutable
@@ -194,7 +192,7 @@ class TreeMap[A, +B] private (tree: RB.Tree[A, B])(implicit val ordering: Orderi
override def keysIterator: Iterator[A] = RB.keysIterator(tree)
override def keysIteratorFrom(start: A): Iterator[A] = RB.keysIterator(tree, Some(start))
-
+
override def valuesIterator: Iterator[B] = RB.valuesIterator(tree)
override def valuesIteratorFrom(start: A): Iterator[B] = RB.valuesIterator(tree, Some(start))
@@ -203,7 +201,3 @@ class TreeMap[A, +B] private (tree: RB.Tree[A, B])(implicit val ordering: Orderi
override def foreach[U](f : ((A,B)) => U) = RB.foreach(tree, f)
}
-
-
-
-
diff --git a/src/library/scala/collection/mutable/FlatHashTable.scala b/src/library/scala/collection/mutable/FlatHashTable.scala
index 1cdf150cb8..293faeca2d 100644
--- a/src/library/scala/collection/mutable/FlatHashTable.scala
+++ b/src/library/scala/collection/mutable/FlatHashTable.scala
@@ -107,7 +107,7 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
}
/** Finds an entry in the hash table if such an element exists. */
- protected def findEntry(elem: A): Option[A] =
+ protected def findEntry(elem: A): Option[A] =
findElemImpl(elem) match {
case null => None
case entry => Some(entryToElem(entry))
@@ -136,10 +136,10 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
protected def addElem(elem: A) : Boolean = {
addEntry(elemToEntry(elem))
}
-
+
/**
* Add an entry (an elem converted to an entry via elemToEntry) if not yet in
- * table.
+ * table.
* @return Returns `true` if a new elem was added, `false` otherwise.
*/
protected def addEntry(newEntry : AnyRef) : Boolean = {
@@ -156,10 +156,10 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
nnSizeMapAdd(h)
if (tableSize >= threshold) growTable()
true
-
+
}
- /**
+ /**
* Removes an elem from the hash table returning true if the element was found (and thus removed)
* or false if it didn't exist.
*/
@@ -231,7 +231,7 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
if (table(i) != null && !containsElem(entryToElem(table(i))))
assert(assertion = false, i+" "+table(i)+" "+table.mkString)
}
-
+
/* Size map handling code */
@@ -374,7 +374,7 @@ private[collection] object FlatHashTable {
final def seedGenerator = new ThreadLocal[scala.util.Random] {
override def initialValue = new scala.util.Random
}
-
+
private object NullSentinel {
override def hashCode = 0
override def toString = "NullSentinel"
@@ -421,18 +421,18 @@ private[collection] object FlatHashTable {
val rotated = (improved >>> rotation) | (improved << (32 - rotation))
rotated
}
-
+
/**
* Elems have type A, but we store AnyRef in the table. Plus we need to deal with
* null elems, which need to be stored as NullSentinel
*/
- protected final def elemToEntry(elem : A) : AnyRef =
+ protected final def elemToEntry(elem : A) : AnyRef =
if (null == elem) NullSentinel else elem.asInstanceOf[AnyRef]
-
+
/**
* Does the inverse translation of elemToEntry
*/
- protected final def entryToElem(entry : AnyRef) : A =
+ protected final def entryToElem(entry : AnyRef) : A =
(if (entry.isInstanceOf[NullSentinel.type]) null else entry).asInstanceOf[A]
}
diff --git a/src/library/scala/collection/mutable/ImmutableSetAdaptor.scala b/src/library/scala/collection/mutable/ImmutableSetAdaptor.scala
index 3e64747832..dc6d319b45 100644
--- a/src/library/scala/collection/mutable/ImmutableSetAdaptor.scala
+++ b/src/library/scala/collection/mutable/ImmutableSetAdaptor.scala
@@ -6,13 +6,10 @@
** |/ **
\* */
-
-
package scala
package collection
package mutable
-
/** This class can be used as an adaptor to create mutable sets from
* immutable set implementations. Only method `empty` has
* to be redefined if the immutable set on which this mutable set is
@@ -49,6 +46,4 @@ extends AbstractSet[A]
def -=(elem: A): this.type = { set = set - elem; this }
override def clear(): Unit = { set = set.empty }
-
}
-
diff --git a/src/library/scala/collection/mutable/LinkedHashMap.scala b/src/library/scala/collection/mutable/LinkedHashMap.scala
index 536f320402..b54f11be6e 100644
--- a/src/library/scala/collection/mutable/LinkedHashMap.scala
+++ b/src/library/scala/collection/mutable/LinkedHashMap.scala
@@ -95,25 +95,25 @@ class LinkedHashMap[A, B] extends AbstractMap[A, B]
if (hasNext) { val res = (cur.key, cur.value); cur = cur.later; res }
else Iterator.empty.next()
}
-
+
protected class FilteredKeys(p: A => Boolean) extends super.FilteredKeys(p) {
override def empty = LinkedHashMap.empty
}
-
+
override def filterKeys(p: A => Boolean): scala.collection.Map[A, B] = new FilteredKeys(p)
protected class MappedValues[C](f: B => C) extends super.MappedValues[C](f) {
override def empty = LinkedHashMap.empty
}
-
+
override def mapValues[C](f: B => C): scala.collection.Map[A, C] = new MappedValues(f)
-
+
protected class DefaultKeySet extends super.DefaultKeySet {
override def empty = LinkedHashSet.empty
}
-
+
override def keySet: scala.collection.Set[A] = new DefaultKeySet
-
+
override def keysIterator: Iterator[A] = new AbstractIterator[A] {
private var cur = firstEntry
def hasNext = cur ne null
diff --git a/src/library/scala/collection/mutable/LinkedHashSet.scala b/src/library/scala/collection/mutable/LinkedHashSet.scala
index d89566793f..cd51b79b65 100644
--- a/src/library/scala/collection/mutable/LinkedHashSet.scala
+++ b/src/library/scala/collection/mutable/LinkedHashSet.scala
@@ -81,7 +81,7 @@ class LinkedHashSet[A] extends AbstractSet[A]
if (hasNext) { val res = cur.key; cur = cur.later; res }
else Iterator.empty.next()
}
-
+
override def foreach[U](f: A => U) {
var cur = firstEntry
while (cur ne null) {
diff --git a/src/library/scala/collection/mutable/MutableList.scala b/src/library/scala/collection/mutable/MutableList.scala
index 5727b12975..a0d3ee0ef0 100644
--- a/src/library/scala/collection/mutable/MutableList.scala
+++ b/src/library/scala/collection/mutable/MutableList.scala
@@ -6,8 +6,6 @@
** |/ **
\* */
-
-
package scala
package collection
package mutable
@@ -151,10 +149,8 @@ extends AbstractSeq[A]
bf ++= seq
bf.result()
}
-
}
-
object MutableList extends SeqFactory[MutableList] {
implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, MutableList[A]] =
ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
diff --git a/src/library/scala/collection/mutable/StackProxy.scala b/src/library/scala/collection/mutable/StackProxy.scala
index 7d776b99c3..15b3a6ceca 100644
--- a/src/library/scala/collection/mutable/StackProxy.scala
+++ b/src/library/scala/collection/mutable/StackProxy.scala
@@ -59,7 +59,7 @@ trait StackProxy[A] extends Stack[A] with Proxy {
self.push(elem)
this
}
-
+
/** Returns the top element of the stack. This method will not remove
* the element from the stack. An error is signaled if there is no
* element on the stack.
diff --git a/src/library/scala/collection/parallel/Combiner.scala b/src/library/scala/collection/parallel/Combiner.scala
index 68df572517..abccf5d402 100644
--- a/src/library/scala/collection/parallel/Combiner.scala
+++ b/src/library/scala/collection/parallel/Combiner.scala
@@ -6,17 +6,13 @@
** |/ **
\* */
-
package scala
package collection.parallel
-
import scala.collection.Parallel
import scala.collection.mutable.Builder
import scala.collection.generic.Sizing
-
-
/** The base trait for all combiners.
* A combiner incremental collection construction just like
* a regular builder, but also implements an efficient merge operation of two builders
@@ -90,10 +86,8 @@ trait Combiner[-Elem, +To] extends Builder[Elem, To] with Sizing with Parallel {
val res = result()
setTaskSupport(res, combinerTaskSupport)
}
-
}
-
/*
private[collection] trait EnvironmentPassingCombiner[-Elem, +To] extends Combiner[Elem, To] {
abstract override def result = {
@@ -102,12 +96,3 @@ private[collection] trait EnvironmentPassingCombiner[-Elem, +To] extends Combine
}
}
*/
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/ParIterableViewLike.scala b/src/library/scala/collection/parallel/ParIterableViewLike.scala
index 1ec0ff9c32..5a7a5f5601 100644
--- a/src/library/scala/collection/parallel/ParIterableViewLike.scala
+++ b/src/library/scala/collection/parallel/ParIterableViewLike.scala
@@ -21,8 +21,6 @@ import scala.collection.generic.CanCombineFrom
import scala.collection.parallel.immutable.ParRange
import scala.language.implicitConversions
-
-
/** A template view of a non-strict view of parallel iterable collection.
*
* '''Note:''' Regular view traits have type parameters used to carry information
@@ -190,16 +188,4 @@ self =>
protected[this] def newSubtask(p: IterableSplitter[T]) = new Force(cbf, p)
override def merge(that: Force[U, That]) = result = result combine that.result
}
-
}
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/ParMapLike.scala b/src/library/scala/collection/parallel/ParMapLike.scala
index ee14324c41..d2b15c727a 100644
--- a/src/library/scala/collection/parallel/ParMapLike.scala
+++ b/src/library/scala/collection/parallel/ParMapLike.scala
@@ -6,13 +6,9 @@
** |/ **
\* */
-
package scala
package collection.parallel
-
-
-
import scala.collection.MapLike
import scala.collection.GenMapLike
import scala.collection.Map
@@ -21,10 +17,6 @@ import scala.annotation.unchecked.uncheckedVariance
import scala.collection.generic.IdleSignalling
import scala.collection.generic.Signalling
-
-
-
-
/** A template trait for mutable parallel maps. This trait is to be mixed in
* with concrete parallel maps to override the representation type.
*
@@ -147,15 +139,3 @@ self =>
// note - should not override toMap (could be mutable)
}
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/ParSeq.scala b/src/library/scala/collection/parallel/ParSeq.scala
index b4a30e5dc2..2c883ba8fe 100644
--- a/src/library/scala/collection/parallel/ParSeq.scala
+++ b/src/library/scala/collection/parallel/ParSeq.scala
@@ -42,12 +42,9 @@ trait ParSeq[+T] extends GenSeq[T]
override def stringPrefix = getClass.getSimpleName
}
-
object ParSeq extends ParFactory[ParSeq] {
implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParSeq[T]] = new GenericCanCombineFrom[T]
def newBuilder[T]: Combiner[T, ParSeq[T]] = ParArrayCombiner[T]
-
def newCombiner[T]: Combiner[T, ParSeq[T]] = ParArrayCombiner[T]
-
}
diff --git a/src/library/scala/collection/parallel/ParSetLike.scala b/src/library/scala/collection/parallel/ParSetLike.scala
index a50d2ae430..4e9a2e5751 100644
--- a/src/library/scala/collection/parallel/ParSetLike.scala
+++ b/src/library/scala/collection/parallel/ParSetLike.scala
@@ -6,12 +6,9 @@
** |/ **
\* */
-
package scala
package collection.parallel
-
-
import scala.collection.SetLike
import scala.collection.GenSetLike
import scala.collection.GenSet
@@ -45,26 +42,4 @@ extends GenSetLike[T, Repr]
def diff(that: GenSet[T]): Repr = sequentially {
_ diff that
}
-
}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/PreciseSplitter.scala b/src/library/scala/collection/parallel/PreciseSplitter.scala
index 2eb202ce05..4b22934a29 100644
--- a/src/library/scala/collection/parallel/PreciseSplitter.scala
+++ b/src/library/scala/collection/parallel/PreciseSplitter.scala
@@ -9,10 +9,8 @@
package scala
package collection.parallel
-
import scala.collection.Seq
-
/** A precise splitter (or a precise split iterator) can be split into arbitrary number of splitters
* that traverse disjoint subsets of arbitrary sizes.
*
@@ -56,10 +54,4 @@ trait PreciseSplitter[+T] extends Splitter[T] {
def psplit(sizes: Int*): Seq[PreciseSplitter[T]]
def split: Seq[PreciseSplitter[T]]
-
}
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/TaskSupport.scala b/src/library/scala/collection/parallel/TaskSupport.scala
index 84bb5e425b..9064018d46 100644
--- a/src/library/scala/collection/parallel/TaskSupport.scala
+++ b/src/library/scala/collection/parallel/TaskSupport.scala
@@ -6,18 +6,13 @@
** |/ **
\* */
-
package scala
package collection.parallel
-
-
import java.util.concurrent.ThreadPoolExecutor
import scala.concurrent.forkjoin.ForkJoinPool
import scala.concurrent.ExecutionContext
-
-
/** A trait implementing the scheduling of a parallel collection operation.
*
* Parallel collections are modular in the way operations are scheduled. Each
@@ -54,7 +49,6 @@ import scala.concurrent.ExecutionContext
*/
trait TaskSupport extends Tasks
-
/** A task support that uses a fork join pool to schedule tasks.
*
* @see [[scala.collection.parallel.TaskSupport]] for more information.
@@ -70,7 +64,6 @@ extends TaskSupport with AdaptiveWorkStealingForkJoinTasks
class ThreadPoolTaskSupport(val environment: ThreadPoolExecutor = ThreadPoolTasks.defaultThreadPool)
extends TaskSupport with AdaptiveWorkStealingThreadPoolTasks
-
/** A task support that uses an execution context to schedule tasks.
*
* It can be used with the default execution context implementation in the
@@ -86,22 +79,3 @@ extends TaskSupport with AdaptiveWorkStealingThreadPoolTasks
*/
class ExecutionContextTaskSupport(val environment: ExecutionContext = scala.concurrent.ExecutionContext.global)
extends TaskSupport with ExecutionContextTasks
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/Tasks.scala b/src/library/scala/collection/parallel/Tasks.scala
index 4aa11b25da..f8d0c6043a 100644
--- a/src/library/scala/collection/parallel/Tasks.scala
+++ b/src/library/scala/collection/parallel/Tasks.scala
@@ -9,18 +9,12 @@
package scala
package collection.parallel
-
-
import java.util.concurrent.ThreadPoolExecutor
-
import scala.concurrent.forkjoin._
import scala.concurrent.ExecutionContext
import scala.util.control.Breaks._
-
import scala.annotation.unchecked.uncheckedVariance
-
-
trait Task[R, +Tp] {
type Result = R
@@ -436,17 +430,14 @@ trait ForkJoinTasks extends Tasks with HavingForkJoinPool {
}
def parallelismLevel = forkJoinPool.getParallelism
-
}
-
object ForkJoinTasks {
val defaultForkJoinPool: ForkJoinPool = new ForkJoinPool() // scala.parallel.forkjoinpool
// defaultForkJoinPool.setParallelism(Runtime.getRuntime.availableProcessors)
// defaultForkJoinPool.setMaximumPoolSize(Runtime.getRuntime.availableProcessors)
}
-
/* Some boilerplate due to no deep mixin composition. Not sure if it can be done differently without them.
*/
trait AdaptiveWorkStealingForkJoinTasks extends ForkJoinTasks with AdaptiveWorkStealingTasks {
@@ -457,7 +448,6 @@ trait AdaptiveWorkStealingForkJoinTasks extends ForkJoinTasks with AdaptiveWorkS
}
def newWrappedTask[R, Tp](b: Task[R, Tp]) = new WrappedTask[R, Tp](b)
-
}
@deprecated("Use `AdaptiveWorkStealingForkJoinTasks` instead.", "2.11.0")
@@ -469,12 +459,9 @@ trait AdaptiveWorkStealingThreadPoolTasks extends ThreadPoolTasks with AdaptiveW
}
def newWrappedTask[R, Tp](b: Task[R, Tp]) = new WrappedTask[R, Tp](b)
-
}
-
trait ExecutionContextTasks extends Tasks {
-
def executionContext = environment
val environment: ExecutionContext
@@ -494,16 +481,4 @@ trait ExecutionContextTasks extends Tasks {
def executeAndWaitResult[R, Tp](task: Task[R, Tp]): R = driver executeAndWaitResult task
def parallelismLevel = driver.parallelismLevel
-
}
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/immutable/ParHashMap.scala b/src/library/scala/collection/parallel/immutable/ParHashMap.scala
index 854d0ba918..06455ba006 100644
--- a/src/library/scala/collection/parallel/immutable/ParHashMap.scala
+++ b/src/library/scala/collection/parallel/immutable/ParHashMap.scala
@@ -9,8 +9,6 @@
package scala
package collection.parallel.immutable
-
-
import scala.collection.parallel.ParMapLike
import scala.collection.parallel.Combiner
import scala.collection.parallel.IterableSplitter
@@ -24,8 +22,6 @@ import scala.collection.immutable.{ HashMap, TrieIterator }
import scala.annotation.unchecked.uncheckedVariance
import scala.collection.parallel.Task
-
-
/** Immutable parallel hash map, based on hash tries.
*
* $paralleliterableinfo
@@ -136,10 +132,8 @@ self =>
println("other kind of node")
}
}
-
}
-
/** $factoryInfo
* @define Coll `immutable.ParHashMap`
* @define coll immutable parallel hash map
@@ -158,7 +152,6 @@ object ParHashMap extends ParMapFactory[ParHashMap] {
var totalcombines = new java.util.concurrent.atomic.AtomicInteger(0)
}
-
private[parallel] abstract class HashMapCombiner[K, V]
extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], (K, V), HashMapCombiner[K, V]](HashMapCombiner.rootsize) {
//self: EnvironmentPassingCombiner[(K, V), ParHashMap[K, V]] =>
@@ -331,30 +324,11 @@ extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], (K, V
}
def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(root.length, combinerTaskSupport.parallelismLevel)
}
-
}
-
private[parallel] object HashMapCombiner {
def apply[K, V] = new HashMapCombiner[K, V] {} // was: with EnvironmentPassingCombiner[(K, V), ParHashMap[K, V]]
private[immutable] val rootbits = 5
private[immutable] val rootsize = 1 << 5
}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/immutable/ParSeq.scala b/src/library/scala/collection/parallel/immutable/ParSeq.scala
index 6e98b3102d..f0502fbbcb 100644
--- a/src/library/scala/collection/parallel/immutable/ParSeq.scala
+++ b/src/library/scala/collection/parallel/immutable/ParSeq.scala
@@ -6,12 +6,10 @@
** |/ **
\* */
-
package scala
package collection
package parallel.immutable
-
import scala.collection.generic.GenericParTemplate
import scala.collection.generic.GenericCompanion
import scala.collection.generic.GenericParCompanion
@@ -36,7 +34,6 @@ extends scala.collection/*.immutable*/.GenSeq[T]
override def toSeq: ParSeq[T] = this
}
-
/** $factoryInfo
* @define Coll `mutable.ParSeq`
* @define coll mutable parallel sequence
@@ -45,9 +42,5 @@ object ParSeq extends ParFactory[ParSeq] {
implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParSeq[T]] = new GenericCanCombineFrom[T]
def newBuilder[T]: Combiner[T, ParSeq[T]] = ParVector.newBuilder[T]
-
def newCombiner[T]: Combiner[T, ParSeq[T]] = ParVector.newCombiner[T]
}
-
-
-
diff --git a/src/library/scala/collection/parallel/immutable/ParVector.scala b/src/library/scala/collection/parallel/immutable/ParVector.scala
index 548e7112c7..c2c1d042e1 100644
--- a/src/library/scala/collection/parallel/immutable/ParVector.scala
+++ b/src/library/scala/collection/parallel/immutable/ParVector.scala
@@ -6,14 +6,10 @@
** |/ **
\* */
-
package scala
package collection
package parallel.immutable
-
-
-
import scala.collection.generic.{GenericParTemplate, CanCombineFrom, ParFactory}
import scala.collection.parallel.ParSeqLike
import scala.collection.parallel.Combiner
@@ -23,8 +19,6 @@ import immutable.Vector
import immutable.VectorBuilder
import immutable.VectorIterator
-
-
/** Immutable parallel vectors, based on vectors.
*
* $paralleliterableinfo
@@ -83,11 +77,8 @@ extends ParSeq[T]
splitted.map(v => new ParVector(v).splitter.asInstanceOf[ParVectorIterator])
}
}
-
}
-
-
/** $factoryInfo
* @define Coll `immutable.ParVector`
* @define coll immutable parallel vector
@@ -101,8 +92,6 @@ object ParVector extends ParFactory[ParVector] {
def newCombiner[T]: Combiner[T, ParVector[T]] = new LazyParVectorCombiner[T] // was: with EPC[T, ParVector[T]]
}
-
-
private[immutable] class LazyParVectorCombiner[T] extends Combiner[T, ParVector[T]] {
//self: EnvironmentPassingCombiner[T, ParVector[T]] =>
var sz = 0
@@ -136,11 +125,4 @@ private[immutable] class LazyParVectorCombiner[T] extends Combiner[T, ParVector[
vectors ++= that.vectors
this
}
-
}
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/mutable/ParHashMap.scala b/src/library/scala/collection/parallel/mutable/ParHashMap.scala
index 42a3302c91..bb3737f18e 100644
--- a/src/library/scala/collection/parallel/mutable/ParHashMap.scala
+++ b/src/library/scala/collection/parallel/mutable/ParHashMap.scala
@@ -6,13 +6,10 @@
** |/ **
\* */
-
package scala
package collection.parallel
package mutable
-
-
import scala.collection.generic._
import scala.collection.mutable.DefaultEntry
import scala.collection.mutable.HashEntry
@@ -20,8 +17,6 @@ import scala.collection.mutable.HashTable
import scala.collection.mutable.UnrolledBuffer
import scala.collection.parallel.Task
-
-
/** A parallel hash map.
*
* `ParHashMap` is a parallel map which internally keeps elements within a hash table.
@@ -145,10 +140,8 @@ self =>
else ("Element " + e.key + " at " + i + " with " + elemHashCode(e.key) + " maps to " + index(elemHashCode(e.key))) :: check(e.next)
check(table(i))
}
-
}
-
/** $factoryInfo
* @define Coll `mutable.ParHashMap`
* @define coll parallel hash map
@@ -163,7 +156,6 @@ object ParHashMap extends ParMapFactory[ParHashMap] {
implicit def canBuildFrom[K, V]: CanCombineFrom[Coll, (K, V), ParHashMap[K, V]] = new CanCombineFromMap[K, V]
}
-
private[mutable] abstract class ParHashMapCombiner[K, V](private val tableLoadFactor: Int)
extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], DefaultEntry[K, V], ParHashMapCombiner[K, V]](ParHashMapCombiner.numblocks)
with scala.collection.mutable.HashTable.HashUtils[K]
@@ -298,10 +290,8 @@ extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], Defau
}
def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(ParHashMapCombiner.numblocks, combinerTaskSupport.parallelismLevel)
}
-
}
-
private[parallel] object ParHashMapCombiner {
private[mutable] val discriminantbits = 5
private[mutable] val numblocks = 1 << discriminantbits
@@ -310,17 +300,3 @@ private[parallel] object ParHashMapCombiner {
def apply[K, V] = new ParHashMapCombiner[K, V](HashTable.defaultLoadFactor) {} // was: with EnvironmentPassingCombiner[(K, V), ParHashMap[K, V]]
}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/mutable/ParHashTable.scala b/src/library/scala/collection/parallel/mutable/ParHashTable.scala
index a6fada3d42..423b891d48 100644
--- a/src/library/scala/collection/parallel/mutable/ParHashTable.scala
+++ b/src/library/scala/collection/parallel/mutable/ParHashTable.scala
@@ -6,19 +6,13 @@
** |/ **
\* */
-
package scala
package collection
package parallel.mutable
-
-
-
import scala.collection.mutable.HashEntry
import scala.collection.parallel.IterableSplitter
-
-
/** Provides functionality for hash tables with linked list buckets,
* enriching the data structure by fulfilling certain requirements
* for their parallel construction and iteration.
@@ -146,11 +140,4 @@ trait ParHashTable[K, Entry >: Null <: HashEntry[K, Entry]] extends scala.collec
c
}
}
-
}
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/mutable/ParMapLike.scala b/src/library/scala/collection/parallel/mutable/ParMapLike.scala
index d96b5482fe..42027f5bac 100644
--- a/src/library/scala/collection/parallel/mutable/ParMapLike.scala
+++ b/src/library/scala/collection/parallel/mutable/ParMapLike.scala
@@ -10,8 +10,6 @@ package scala
package collection.parallel
package mutable
-
-
import scala.collection.generic._
import scala.collection.mutable.Cloneable
import scala.collection.generic.Growable
@@ -51,6 +49,4 @@ extends scala.collection.GenMapLike[K, V, Repr]
def -(key: K) = this.clone() -= key
def clear(): Unit
-
}
-
diff --git a/src/library/scala/collection/parallel/mutable/ParSetLike.scala b/src/library/scala/collection/parallel/mutable/ParSetLike.scala
index 1cfc14b094..13af5ed649 100644
--- a/src/library/scala/collection/parallel/mutable/ParSetLike.scala
+++ b/src/library/scala/collection/parallel/mutable/ParSetLike.scala
@@ -6,7 +6,6 @@
** |/ **
\* */
-
package scala
package collection
package parallel.mutable
@@ -49,43 +48,3 @@ self =>
// note: should not override toSet
}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/mutable/ParTrieMap.scala b/src/library/scala/collection/parallel/mutable/ParTrieMap.scala
index 82f2717132..a1dc37cec9 100644
--- a/src/library/scala/collection/parallel/mutable/ParTrieMap.scala
+++ b/src/library/scala/collection/parallel/mutable/ParTrieMap.scala
@@ -9,8 +9,6 @@
package scala
package collection.parallel.mutable
-
-
import scala.collection.generic._
import scala.collection.parallel.Combiner
import scala.collection.parallel.IterableSplitter
@@ -24,8 +22,6 @@ import scala.collection.concurrent.INode
import scala.collection.concurrent.TrieMap
import scala.collection.concurrent.TrieMapIterator
-
-
/** Parallel TrieMap collection.
*
* It has its bulk operations parallelized, but uses the snapshot operation
@@ -117,10 +113,8 @@ extends ParMap[K, V]
def shouldSplitFurther = howmany > 1
override def merge(that: Size) = result = result + that.result
}
-
}
-
private[collection] class ParTrieMapSplitter[K, V](lev: Int, ct: TrieMap[K, V], mustInit: Boolean)
extends TrieMapIterator[K, V](lev, ct, mustInit)
with IterableSplitter[(K, V)]
@@ -155,7 +149,6 @@ extends TrieMapIterator[K, V](lev, ct, mustInit)
def remaining: Int = totalsize - iterated
}
-
/** Only used within the `ParTrieMap`. */
private[mutable] trait ParTrieMapCombiner[K, V] extends Combiner[(K, V), ParTrieMap[K, V]] {
@@ -173,24 +166,11 @@ private[mutable] trait ParTrieMapCombiner[K, V] extends Combiner[(K, V), ParTrie
}
override def canBeShared = true
-
}
-
object ParTrieMap extends ParMapFactory[ParTrieMap] {
-
def empty[K, V]: ParTrieMap[K, V] = new ParTrieMap[K, V]
-
def newCombiner[K, V]: Combiner[(K, V), ParTrieMap[K, V]] = new ParTrieMap[K, V]
implicit def canBuildFrom[K, V]: CanCombineFrom[Coll, (K, V), ParTrieMap[K, V]] = new CanCombineFromMap[K, V]
-
}
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/concurrent/Awaitable.scala b/src/library/scala/concurrent/Awaitable.scala
index 652a23471f..dff83874ba 100644
--- a/src/library/scala/concurrent/Awaitable.scala
+++ b/src/library/scala/concurrent/Awaitable.scala
@@ -17,7 +17,7 @@ import scala.concurrent.duration.Duration
/**
* An object that may eventually be completed with a result value of type `T` which may be
* awaited using blocking methods.
- *
+ *
* The [[Await]] object provides methods that allow accessing the result of an `Awaitable`
* by blocking the current thread until the `Awaitable` has been completed or a timeout has
* occurred.
@@ -26,9 +26,9 @@ trait Awaitable[+T] {
/**
* Await the "completed" state of this `Awaitable`.
- *
+ *
* '''''This method should not be called directly; use [[Await.ready]] instead.'''''
- *
+ *
* @param atMost
* maximum wait time, which may be negative (no waiting is done),
* [[scala.concurrent.duration.Duration.Inf Duration.Inf]] for unbounded waiting, or a finite positive
@@ -41,12 +41,12 @@ trait Awaitable[+T] {
@throws(classOf[TimeoutException])
@throws(classOf[InterruptedException])
def ready(atMost: Duration)(implicit permit: CanAwait): this.type
-
+
/**
* Await and return the result (of type `T`) of this `Awaitable`.
- *
+ *
* '''''This method should not be called directly; use [[Await.result]] instead.'''''
- *
+ *
* @param atMost
* maximum wait time, which may be negative (no waiting is done),
* [[scala.concurrent.duration.Duration.Inf Duration.Inf]] for unbounded waiting, or a finite positive
diff --git a/src/library/scala/concurrent/ExecutionContext.scala b/src/library/scala/concurrent/ExecutionContext.scala
index 68513f9c80..fa264e5d7f 100644
--- a/src/library/scala/concurrent/ExecutionContext.scala
+++ b/src/library/scala/concurrent/ExecutionContext.scala
@@ -18,15 +18,15 @@ import scala.util.Try
*/
@implicitNotFound("Cannot find an implicit ExecutionContext, either require one yourself or import ExecutionContext.Implicits.global")
trait ExecutionContext {
-
+
/** Runs a block of code on this execution context.
*/
def execute(runnable: Runnable): Unit
-
+
/** Reports that an asynchronous computation failed.
*/
def reportFailure(@deprecatedName('t) cause: Throwable): Unit
-
+
/** Prepares for the execution of a task. Returns the prepared
* execution context. A valid implementation of `prepare` is one
* that simply returns `this`.
@@ -62,7 +62,7 @@ object ExecutionContext {
*/
implicit lazy val global: ExecutionContextExecutor = impl.ExecutionContextImpl.fromExecutor(null: Executor)
}
-
+
/** Creates an `ExecutionContext` from the given `ExecutorService`.
*/
def fromExecutorService(e: ExecutorService, reporter: Throwable => Unit): ExecutionContextExecutorService =
@@ -71,7 +71,7 @@ object ExecutionContext {
/** Creates an `ExecutionContext` from the given `ExecutorService` with the default Reporter.
*/
def fromExecutorService(e: ExecutorService): ExecutionContextExecutorService = fromExecutorService(e, defaultReporter)
-
+
/** Creates an `ExecutionContext` from the given `Executor`.
*/
def fromExecutor(e: Executor, reporter: Throwable => Unit): ExecutionContextExecutor =
@@ -80,7 +80,7 @@ object ExecutionContext {
/** Creates an `ExecutionContext` from the given `Executor` with the default Reporter.
*/
def fromExecutor(e: Executor): ExecutionContextExecutor = fromExecutor(e, defaultReporter)
-
+
/** The default reporter simply prints the stack trace of the `Throwable` to System.err.
*/
def defaultReporter: Throwable => Unit = _.printStackTrace()
diff --git a/src/library/scala/concurrent/Future.scala b/src/library/scala/concurrent/Future.scala
index 411b89701b..4e3f3c6c81 100644
--- a/src/library/scala/concurrent/Future.scala
+++ b/src/library/scala/concurrent/Future.scala
@@ -254,7 +254,7 @@ trait Future[+T] extends Awaitable[T] {
case Success(v) => try f(v) match {
// If possible, link DefaultPromises to avoid space leaks
case dp: DefaultPromise[_] => dp.asInstanceOf[DefaultPromise[S]].linkRootOf(p)
- case fut => fut onComplete p.complete
+ case fut => fut onComplete p.complete
} catch { case NonFatal(t) => p failure t }
}
p.future
@@ -473,6 +473,13 @@ object Future {
*/
def successful[T](result: T): Future[T] = Promise.successful(result).future
+ /** Creates an already completed Future with the specified result or exception.
+ *
+ * @tparam T the type of the value in the promise
+ * @return the newly created `Future` object
+ */
+ def fromTry[T](result: Try[T]): Future[T] = Promise.fromTry(result).future
+
/** Starts an asynchronous computation and returns a `Future` object with the result of that computation.
*
* The result becomes available once the asynchronous computation is completed.
diff --git a/src/library/scala/concurrent/Promise.scala b/src/library/scala/concurrent/Promise.scala
index cdde019cd0..eb8044ed3b 100644
--- a/src/library/scala/concurrent/Promise.scala
+++ b/src/library/scala/concurrent/Promise.scala
@@ -70,7 +70,7 @@ trait Promise[T] {
other onComplete { this complete _ }
this
}
-
+
/** Attempts to complete this promise with the specified future, once that future is completed.
*
* @return This promise
@@ -115,38 +115,32 @@ trait Promise[T] {
def tryFailure(@deprecatedName('t) cause: Throwable): Boolean = tryComplete(Failure(cause))
}
-
-
object Promise {
-
/** Creates a promise object which can be completed with a value.
- *
+ *
* @tparam T the type of the value in the promise
* @return the newly created `Promise` object
*/
def apply[T](): Promise[T] = new impl.Promise.DefaultPromise[T]()
/** Creates an already completed Promise with the specified exception.
- *
+ *
* @tparam T the type of the value in the promise
* @return the newly created `Promise` object
*/
- def failed[T](exception: Throwable): Promise[T] = new impl.Promise.KeptPromise[T](Failure(exception))
+ def failed[T](exception: Throwable): Promise[T] = fromTry(Failure(exception))
/** Creates an already completed Promise with the specified result.
- *
+ *
* @tparam T the type of the value in the promise
* @return the newly created `Promise` object
*/
- def successful[T](result: T): Promise[T] = new impl.Promise.KeptPromise[T](Success(result))
-
-}
-
-
-
-
-
-
-
-
+ def successful[T](result: T): Promise[T] = fromTry(Success(result))
+ /** Creates an already completed Promise with the specified result or exception.
+ *
+ * @tparam T the type of the value in the promise
+ * @return the newly created `Promise` object
+ */
+ def fromTry[T](result: Try[T]): Promise[T] = new impl.Promise.KeptPromise[T](result)
+}
diff --git a/src/library/scala/concurrent/TaskRunner.scala b/src/library/scala/concurrent/TaskRunner.scala
index 98c212d9fa..1ea23b35e8 100644
--- a/src/library/scala/concurrent/TaskRunner.scala
+++ b/src/library/scala/concurrent/TaskRunner.scala
@@ -24,5 +24,4 @@ private[scala] trait TaskRunner {
def execute[S](task: Task[S]): Unit
def shutdown(): Unit
-
}
diff --git a/src/library/scala/concurrent/duration/Duration.scala b/src/library/scala/concurrent/duration/Duration.scala
index 9a8844b489..1b50b7fa56 100644
--- a/src/library/scala/concurrent/duration/Duration.scala
+++ b/src/library/scala/concurrent/duration/Duration.scala
@@ -221,7 +221,7 @@ object Duration {
final def toMinutes: Long = fail("toMinutes")
final def toHours: Long = fail("toHours")
final def toDays: Long = fail("toDays")
-
+
final def toCoarsest: Duration = this
}
@@ -532,7 +532,7 @@ sealed abstract class Duration extends Serializable with Ordered[Duration] {
* Duration(48, HOURS).toCoarsest // Duration(2, DAYS)
* Duration(5, SECONDS).toCoarsest // Duration(5, SECONDS)
* }}}
- */
+ */
def toCoarsest: Duration
}
diff --git a/src/library/scala/concurrent/impl/Promise.scala b/src/library/scala/concurrent/impl/Promise.scala
index 35511856ee..418d859d79 100644
--- a/src/library/scala/concurrent/impl/Promise.scala
+++ b/src/library/scala/concurrent/impl/Promise.scala
@@ -155,7 +155,7 @@ private[concurrent] object Promise {
/** Get the root promise for this promise, compressing the link chain to that
* promise if necessary.
- *
+ *
* For promises that are not linked, the result of calling
* `compressedRoot()` will the promise itself. However for linked promises,
* this method will traverse each link until it locates the root promise at
diff --git a/src/library/scala/concurrent/package.scala b/src/library/scala/concurrent/package.scala
index 50a66a622a..2fe14a9c1a 100644
--- a/src/library/scala/concurrent/package.scala
+++ b/src/library/scala/concurrent/package.scala
@@ -19,9 +19,9 @@ package object concurrent {
type TimeoutException = java.util.concurrent.TimeoutException
/** Starts an asynchronous computation and returns a `Future` object with the result of that computation.
- *
+ *
* The result becomes available once the asynchronous computation is completed.
- *
+ *
* @tparam T the type of the result
* @param body the asynchronous computation
* @param executor the execution context on which the future is run
@@ -30,7 +30,7 @@ package object concurrent {
def future[T](body: =>T)(implicit @deprecatedName('execctx) executor: ExecutionContext): Future[T] = Future[T](body)
/** Creates a promise object which can be completed with a value or an exception.
- *
+ *
* @tparam T the type of the value in the promise
* @return the newly created `Promise` object
*/
@@ -38,7 +38,7 @@ package object concurrent {
/** Used to designate a piece of code which potentially blocks, allowing the current [[BlockContext]] to adjust
* the runtime's behavior.
- * Properly marking blocking code may improve performance or avoid deadlocks.
+ * Properly marking blocking code may improve performance or avoid deadlocks.
*
* Blocking on an [[Awaitable]] should be done using [[Await.result]] instead of `blocking`.
*
@@ -53,22 +53,22 @@ package object concurrent {
package concurrent {
@implicitNotFound("Don't call `Awaitable` methods directly, use the `Await` object.")
sealed trait CanAwait
-
+
/**
* Internal usage only, implementation detail.
*/
private[concurrent] object AwaitPermission extends CanAwait
-
+
/**
* `Await` is what is used to ensure proper handling of blocking for `Awaitable` instances.
*/
object Await {
/**
* Await the "completed" state of an `Awaitable`.
- *
+ *
* Although this method is blocking, the internal use of [[scala.concurrent.blocking blocking]] ensures that
* the underlying [[ExecutionContext]] is prepared to properly manage the blocking.
- *
+ *
* @param awaitable
* the `Awaitable` to be awaited
* @param atMost
@@ -84,13 +84,13 @@ package concurrent {
@throws(classOf[InterruptedException])
def ready[T](awaitable: Awaitable[T], atMost: Duration): awaitable.type =
blocking(awaitable.ready(atMost)(AwaitPermission))
-
+
/**
* Await and return the result (of type `T`) of an `Awaitable`.
- *
+ *
* Although this method is blocking, the internal use of [[scala.concurrent.blocking blocking]] ensures that
* the underlying [[ExecutionContext]] to properly detect blocking and ensure that there are no deadlocks.
- *
+ *
* @param awaitable
* the `Awaitable` to be awaited
* @param atMost
diff --git a/src/library/scala/io/BufferedSource.scala b/src/library/scala/io/BufferedSource.scala
index 832c7b23f9..1c87a1f421 100644
--- a/src/library/scala/io/BufferedSource.scala
+++ b/src/library/scala/io/BufferedSource.scala
@@ -41,7 +41,7 @@ class BufferedSource(inputStream: InputStream, bufferSize: Int)(implicit val cod
takeWhile (_ != -1)
map (_.toChar)
)
-
+
private def decachedReader: BufferedReader = {
// Don't want to lose a buffered char sitting in iter either. Yes,
// this is ridiculous, but if I can't get rid of Source, and all the
@@ -61,7 +61,7 @@ class BufferedSource(inputStream: InputStream, bufferSize: Int)(implicit val cod
}
else charReader
}
-
+
class BufferedLineIterator extends AbstractIterator[String] with Iterator[String] {
private val lineReader = decachedReader
@@ -84,7 +84,7 @@ class BufferedSource(inputStream: InputStream, bufferSize: Int)(implicit val cod
}
override def getLines(): Iterator[String] = new BufferedLineIterator
-
+
/** Efficiently converts the entire remaining input into a string. */
override def mkString = {
// Speed up slurping of whole data set in the simplest cases.
diff --git a/src/library/scala/ref/WeakReference.scala b/src/library/scala/ref/WeakReference.scala
index 6eb4899e3f..c8fb262a08 100644
--- a/src/library/scala/ref/WeakReference.scala
+++ b/src/library/scala/ref/WeakReference.scala
@@ -29,7 +29,7 @@ object WeakReference {
/** Optionally returns the referenced value, or `None` if that value no longer exists */
def unapply[T <: AnyRef](wr: WeakReference[T]): Option[T] = {
- val x = wr.underlying.get
+ val x = wr.underlying.get
if (x != null) Some(x) else None
}
}
diff --git a/src/library/scala/reflect/ClassTag.scala b/src/library/scala/reflect/ClassTag.scala
index d699e34ffc..33c5cee783 100644
--- a/src/library/scala/reflect/ClassTag.scala
+++ b/src/library/scala/reflect/ClassTag.scala
@@ -28,7 +28,7 @@ import scala.runtime.ScalaRunTime.{ arrayClass, arrayElementClass }
* scala> mkArray("Japan","Brazil","Germany")
* res1: Array[String] = Array(Japan, Brazil, Germany)
* }}}
- *
+ *
* See [[scala.reflect.api.TypeTags]] for more examples, or the
* [[http://docs.scala-lang.org/overviews/reflection/typetags-manifests.html Reflection Guide: TypeTags]]
* for more details.
diff --git a/src/library/scala/runtime/AbstractFunction1.scala b/src/library/scala/runtime/AbstractFunction1.scala
index 8d68017a6f..178280cb46 100644
--- a/src/library/scala/runtime/AbstractFunction1.scala
+++ b/src/library/scala/runtime/AbstractFunction1.scala
@@ -9,6 +9,6 @@
package scala.runtime
-abstract class AbstractFunction1[@specialized(scala.Int, scala.Long, scala.Float, scala.Double/*, scala.AnyRef*/) -T1, @specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double/*, scala.AnyRef*/) +R] extends Function1[T1, R] {
+abstract class AbstractFunction1[@specialized(scala.Int, scala.Long, scala.Float, scala.Double) -T1, @specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double) +R] extends Function1[T1, R] {
}
diff --git a/src/library/scala/runtime/AbstractPartialFunction.scala b/src/library/scala/runtime/AbstractPartialFunction.scala
index e3516bc4d9..7129f22f60 100644
--- a/src/library/scala/runtime/AbstractPartialFunction.scala
+++ b/src/library/scala/runtime/AbstractPartialFunction.scala
@@ -25,7 +25,7 @@ import scala.annotation.unspecialized
* @author Pavel Pavlov
* @since 2.10
*/
-abstract class AbstractPartialFunction[@specialized(scala.Int, scala.Long, scala.Float, scala.Double, scala.AnyRef) -T1, @specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double, scala.AnyRef) +R] extends Function1[T1, R] with PartialFunction[T1, R] { self =>
+abstract class AbstractPartialFunction[@specialized(scala.Int, scala.Long, scala.Float, scala.Double) -T1, @specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double) +R] extends Function1[T1, R] with PartialFunction[T1, R] { self =>
// this method must be overridden for better performance,
// for backwards compatibility, fall back to the one inherited from PartialFunction
// this assumes the old-school partial functions override the apply method, though
@@ -35,3 +35,15 @@ abstract class AbstractPartialFunction[@specialized(scala.Int, scala.Long, scala
// let's not make it final so as not to confuse anyone
/*final*/ def apply(x: T1): R = applyOrElse(x, PartialFunction.empty)
}
+
+// Manual stand-ins for formerly specialized variations.
+// Not comprehensive, only sufficent to run scala-check built scala 2.11.0-M5
+// TODO Scala 2.10.0.M6 Remove this once scalacheck is published against M6.
+private[runtime] abstract class AbstractPartialFunction$mcIL$sp extends scala.runtime.AbstractPartialFunction[Any, Int] {
+ override def apply(x: Any): Int = apply$mcIL$sp(x)
+ def apply$mcIL$sp(x: Any): Int = applyOrElse(x, PartialFunction.empty)
+}
+private[runtime] abstract class AbstractPartialFunction$mcFL$sp extends scala.runtime.AbstractPartialFunction[Any, Float] {
+ override def apply(x: Any): Float = apply$mcIL$sp(x)
+ def apply$mcIL$sp(x: Any): Float = applyOrElse(x, PartialFunction.empty)
+}
diff --git a/src/library/scala/runtime/Boxed.scala b/src/library/scala/runtime/Boxed.scala
index 855f0ff41a..933444773d 100644
--- a/src/library/scala/runtime/Boxed.scala
+++ b/src/library/scala/runtime/Boxed.scala
@@ -6,14 +6,7 @@
** |/ **
\* */
-
-
package scala
package runtime
-
-trait Boxed {
-
-}
-
-
+trait Boxed { }
diff --git a/src/library/scala/runtime/NonLocalReturnControl.scala b/src/library/scala/runtime/NonLocalReturnControl.scala
index 16b2fec6d7..a926956acf 100644
--- a/src/library/scala/runtime/NonLocalReturnControl.scala
+++ b/src/library/scala/runtime/NonLocalReturnControl.scala
@@ -9,7 +9,6 @@
package scala
package runtime
-
import scala.util.control.ControlThrowable
class NonLocalReturnControl[@specialized T](val key: AnyRef, val value: T) extends ControlThrowable {
diff --git a/src/library/scala/runtime/WorksheetSupport.scala b/src/library/scala/runtime/WorksheetSupport.scala
index 2a0064494b..d86f8873aa 100644
--- a/src/library/scala/runtime/WorksheetSupport.scala
+++ b/src/library/scala/runtime/WorksheetSupport.scala
@@ -91,4 +91,3 @@ object WorksheetSupport {
}
class StopException extends Exception
-
diff --git a/src/library/scala/transient.scala b/src/library/scala/transient.scala
index 8ff7c582b4..ec87439093 100644
--- a/src/library/scala/transient.scala
+++ b/src/library/scala/transient.scala
@@ -6,8 +6,6 @@
** |/ **
\* */
-
-
package scala
import scala.annotation.meta._
diff --git a/src/library/scala/util/Properties.scala b/src/library/scala/util/Properties.scala
index 8b63a73638..13f2362d00 100644
--- a/src/library/scala/util/Properties.scala
+++ b/src/library/scala/util/Properties.scala
@@ -147,7 +147,7 @@ private[scala] trait PropertiesTrait {
// See http://mail.openjdk.java.net/pipermail/macosx-port-dev/2012-November/005148.html for
// the reason why we don't follow developer.apple.com/library/mac/#technotes/tn2002/tn2110.
/** Returns `true` iff the underlying operating system is a version of Apple Mac OSX. */
- def isMac = osName startsWith "Mac OS X"
+ def isMac = osName startsWith "Mac OS X"
/* Some runtime values. */
private[scala] def isAvian = javaVmName contains "Avian"
diff --git a/src/library/scala/volatile.scala b/src/library/scala/volatile.scala
index bea216eb17..c612732329 100644
--- a/src/library/scala/volatile.scala
+++ b/src/library/scala/volatile.scala
@@ -6,8 +6,6 @@
** |/ **
\* */
-
-
package scala
import scala.annotation.meta._
diff --git a/src/partest-extras/scala/tools/partest/IcodeComparison.scala b/src/partest-extras/scala/tools/partest/IcodeComparison.scala
index 5da51c9d58..7122703918 100644
--- a/src/partest-extras/scala/tools/partest/IcodeComparison.scala
+++ b/src/partest-extras/scala/tools/partest/IcodeComparison.scala
@@ -6,7 +6,6 @@
package scala.tools.partest
import scala.tools.partest.nest.FileManager.compareContents
-import scala.compat.Platform.EOL
/** A class for testing icode. All you need is this in a
* partest source file --
@@ -41,7 +40,7 @@ abstract class IcodeComparison extends DirectTest {
override def extraSettings: String = "-usejavacp"
/** Compile the test code and return the contents of all
- * (sorted) .icode files, which are immediately deleted.
+ * (sorted) .icode files, which are immediately deleted.
* @param arg0 at least one arg is required
* @param args must include -Xprint-icode:phase
*/
diff --git a/src/reflect/scala/reflect/api/FlagSets.scala b/src/reflect/scala/reflect/api/FlagSets.scala
index 8a1d2f7f1d..3d5a213f2f 100644
--- a/src/reflect/scala/reflect/api/FlagSets.scala
+++ b/src/reflect/scala/reflect/api/FlagSets.scala
@@ -166,6 +166,9 @@ trait FlagSets { self: Universe =>
/** Flag indicating that tree represents a variable or a member initialized to the default value */
val DEFAULTINIT: FlagSet
+
+ /** Flag indicating that tree was generated by the compiler */
+ val SYNTHETIC: FlagSet
}
/** The empty set of flags
diff --git a/src/reflect/scala/reflect/api/StandardDefinitions.scala b/src/reflect/scala/reflect/api/StandardDefinitions.scala
index bbfebcb434..e255d305f7 100644
--- a/src/reflect/scala/reflect/api/StandardDefinitions.scala
+++ b/src/reflect/scala/reflect/api/StandardDefinitions.scala
@@ -214,29 +214,35 @@ trait StandardDefinitions {
/** The module symbol of module `scala.Some`. */
def SomeModule: ModuleSymbol
- /** The array of class symbols for classes `scala.ProductX`.
+ /** Function-like object that maps arity to symbols for classes `scala.ProductX`.
* - 0th element is `Unit`
* - 1st element is `Product1`
* - ...
* - 22nd element is `Product22`
+ * - 23nd element is `NoSymbol`
+ * - ...
*/
- def ProductClass : Array[ClassSymbol]
+ def ProductClass: Int => Symbol
- /** The array of class symbols for classes `scala.FunctionX`.
+ /** Function-like object that maps arity to symbols for classes `scala.FunctionX`.
* - 0th element is `Function0`
* - 1st element is `Function1`
* - ...
* - 22nd element is `Function22`
+ * - 23nd element is `NoSymbol`
+ * - ...
*/
- def FunctionClass : Array[ClassSymbol]
+ def FunctionClass: Int => Symbol
- /** The array of class symbols for classes `scala.TupleX`.
+ /** Function-like object that maps arity to symbols for classes `scala.TupleX`.
* - 0th element is `NoSymbol`
* - 1st element is `Product1`
* - ...
* - 22nd element is `Product22`
+ * - 23nd element is `NoSymbol`
+ * - ...
*/
- def TupleClass: Array[Symbol] // cannot make it Array[ClassSymbol], because TupleClass(0) is supposed to be NoSymbol. weird
+ def TupleClass: Int => Symbol
/** Contains Scala primitive value classes:
* - Byte
diff --git a/src/reflect/scala/reflect/api/Trees.scala b/src/reflect/scala/reflect/api/Trees.scala
index 443f34ccae..7a627bc875 100644
--- a/src/reflect/scala/reflect/api/Trees.scala
+++ b/src/reflect/scala/reflect/api/Trees.scala
@@ -2210,13 +2210,13 @@ trait Trees { self: Universe =>
* Flattens directly nested blocks.
* @group Factories
*/
- @deprecated("Use the canonical Block constructor, explicitly specifying its expression if necessary. Flatten directly nested blocks manually if needed", "2.10.1")
+ @deprecated("Use q\"{..$stats}\" instead. Flatten directly nested blocks manually if needed", "2.10.1")
def Block(stats: Tree*): Block
/** A factory method for `CaseDef` nodes.
* @group Factories
*/
- @deprecated("Use the canonical CaseDef constructor passing EmptyTree for guard", "2.10.1")
+ @deprecated("Use cq\"$pat => $body\" instead", "2.10.1")
def CaseDef(pat: Tree, body: Tree): CaseDef
/** A factory method for `Bind` nodes.
@@ -2228,50 +2228,50 @@ trait Trees { self: Universe =>
/** A factory method for `Try` nodes.
* @group Factories
*/
- @deprecated("Use canonical CaseDef constructors to to create exception catching expressions and then wrap them in Try", "2.10.1")
+ @deprecated("Convert cases into casedefs and use q\"try $body catch { case ..$newcases }\" instead", "2.10.1")
def Try(body: Tree, cases: (Tree, Tree)*): Try
/** A factory method for `Throw` nodes.
* @group Factories
*/
- @deprecated("Use the canonical New constructor to create an object instantiation expression and then wrap it in Throw", "2.10.1")
+ @deprecated("Use q\"throw new $tpe(..$args)\" instead", "2.10.1")
def Throw(tpe: Type, args: Tree*): Throw
/** Factory method for object creation `new tpt(args_1)...(args_n)`
* A `New(t, as)` is expanded to: `(new t).<init>(as)`
* @group Factories
*/
- @deprecated("Use Apply(...Apply(Select(New(tpt), nme.CONSTRUCTOR), args1)...argsN) instead", "2.10.1")
+ @deprecated("Use q\"new $tpt(...$argss)\" instead", "2.10.1")
def New(tpt: Tree, argss: List[List[Tree]]): Tree
/** 0-1 argument list new, based on a type.
* @group Factories
*/
- @deprecated("Use New(TypeTree(tpe), args.toList) instead", "2.10.1")
+ @deprecated("Use q\"new $tpe(..$args)\" instead", "2.10.1")
def New(tpe: Type, args: Tree*): Tree
/** 0-1 argument list new, based on a symbol.
* @group Factories
*/
- @deprecated("Use New(sym.toType, args) instead", "2.10.1")
+ @deprecated("Use q\"new ${sym.toType}(..$args)\" instead", "2.10.1")
def New(sym: Symbol, args: Tree*): Tree
/** A factory method for `Apply` nodes.
* @group Factories
*/
- @deprecated("Use Apply(Ident(sym), args.toList) instead", "2.10.1")
+ @deprecated("Use q\"$sym(..$args)\" instead", "2.10.1")
def Apply(sym: Symbol, args: Tree*): Tree
/** 0-1 argument list new, based on a type tree.
* @group Factories
*/
- @deprecated("Use Apply(Select(New(tpt), nme.CONSTRUCTOR), args) instead", "2.10.1")
+ @deprecated("Use q\"new $tpt(..$args)\" instead", "2.10.1")
def ApplyConstructor(tpt: Tree, args: List[Tree]): Tree
/** A factory method for `Super` nodes.
* @group Factories
*/
- @deprecated("Use Super(This(sym), mix) instead", "2.10.1")
+ @deprecated("Use q\"$sym.super[$mix].x\".qualifier instead", "2.10.1")
def Super(sym: Symbol, mix: TypeName): Tree
/** A factory method for `This` nodes.
@@ -2283,7 +2283,7 @@ trait Trees { self: Universe =>
* The string `name` argument is assumed to represent a [[scala.reflect.api.Names#TermName `TermName`]].
* @group Factories
*/
- @deprecated("Use Select(tree, newTermName(name)) instead", "2.10.1")
+ @deprecated("Use Select(tree, TermName(name)) instead", "2.10.1")
def Select(qualifier: Tree, name: String): Select
/** A factory method for `Select` nodes.
@@ -2294,7 +2294,7 @@ trait Trees { self: Universe =>
/** A factory method for `Ident` nodes.
* @group Factories
*/
- @deprecated("Use Ident(newTermName(name)) instead", "2.10.1")
+ @deprecated("Use Ident(TermName(name)) instead", "2.10.1")
def Ident(name: String): Ident
/** A factory method for `Ident` nodes.
diff --git a/src/reflect/scala/reflect/internal/BuildUtils.scala b/src/reflect/scala/reflect/internal/BuildUtils.scala
index 06a6e10c30..de24b88397 100644
--- a/src/reflect/scala/reflect/internal/BuildUtils.scala
+++ b/src/reflect/scala/reflect/internal/BuildUtils.scala
@@ -3,9 +3,10 @@ package reflect
package internal
import Flags._
+import util._
trait BuildUtils { self: SymbolTable =>
- import definitions.{TupleClass, FunctionClass, MaxTupleArity, MaxFunctionArity, ScalaPackage, UnitClass}
+ import definitions.{TupleClass, FunctionClass, ScalaPackage, UnitClass}
class BuildImpl extends BuildApi {
@@ -271,32 +272,30 @@ trait BuildUtils { self: SymbolTable =>
}
}
private object TupleClassRef extends ScalaMemberRef {
- val symbols = TupleClass.filter { _ != null }.toSeq
+ val symbols = TupleClass.seq
}
private object TupleCompanionRef extends ScalaMemberRef {
- val symbols = TupleClassRef.symbols.map { _.companionModule }
+ val symbols = TupleClass.seq.map { _.companionModule }
}
private object UnitClassRef extends ScalaMemberRef {
val symbols = Seq(UnitClass)
}
private object FunctionClassRef extends ScalaMemberRef {
- val symbols = FunctionClass.toSeq
+ val symbols = FunctionClass.seq
}
object SyntacticTuple extends SyntacticTupleExtractor {
def apply(args: List[Tree]): Tree = args match {
case Nil => Literal(Constant(()))
case _ =>
- require(args.length <= MaxTupleArity, s"Tuples with arity bigger than $MaxTupleArity aren't supported")
+ require(TupleClass(args.length).exists, s"Tuples with ${args.length} arity aren't supported")
self.Apply(TupleClass(args.length).companionModule, args: _*)
}
def unapply(tree: Tree): Option[List[Tree]] = tree match {
case Literal(Constant(())) =>
Some(Nil)
- case Apply(TupleCompanionRef(sym), args)
- if args.length <= MaxTupleArity
- && sym == TupleClass(args.length).companionModule =>
+ case Apply(TupleCompanionRef(sym), args) if sym == TupleClass(args.length).companionModule =>
Some(args)
case _ =>
None
@@ -307,15 +306,14 @@ trait BuildUtils { self: SymbolTable =>
def apply(args: List[Tree]): Tree = args match {
case Nil => self.Select(self.Ident(nme.scala_), tpnme.Unit)
case _ =>
- require(args.length <= MaxTupleArity, s"Tuples with arity bigger than $MaxTupleArity aren't supported")
+ require(TupleClass(args.length).exists, s"Tuples with ${args.length} arity aren't supported")
AppliedTypeTree(Ident(TupleClass(args.length)), args)
}
def unapply(tree: Tree): Option[List[Tree]] = tree match {
case UnitClassRef(_) =>
Some(Nil)
- case AppliedTypeTree(TupleClassRef(sym), args)
- if args.length <= MaxTupleArity && sym == TupleClass(args.length) =>
+ case AppliedTypeTree(TupleClassRef(sym), args) if sym == TupleClass(args.length) =>
Some(args)
case _ =>
None
@@ -324,13 +322,12 @@ trait BuildUtils { self: SymbolTable =>
object SyntacticFunctionType extends SyntacticFunctionTypeExtractor {
def apply(argtpes: List[Tree], restpe: Tree): Tree = {
- require(argtpes.length <= MaxFunctionArity + 1, s"Function types with arity bigger than $MaxFunctionArity aren't supported")
+ require(FunctionClass(argtpes.length).exists, s"Function types with ${argtpes.length} arity aren't supported")
gen.mkFunctionTypeTree(argtpes, restpe)
}
def unapply(tree: Tree): Option[(List[Tree], Tree)] = tree match {
- case AppliedTypeTree(FunctionClassRef(sym), args @ (argtpes :+ restpe))
- if args.length - 1 <= MaxFunctionArity && sym == FunctionClass(args.length - 1) =>
+ case AppliedTypeTree(FunctionClassRef(sym), args @ (argtpes :+ restpe)) if sym == FunctionClass(args.length - 1) =>
Some((argtpes, restpe))
case _ => None
}
@@ -368,7 +365,7 @@ trait BuildUtils { self: SymbolTable =>
def unapply(tree: Tree): Option[(List[Tree], List[Tree], ValDef, List[Tree])] = tree match {
case SyntacticApplied(Select(New(SyntacticTypeApplied(ident, targs)), nme.CONSTRUCTOR), argss) =>
Some((Nil, SyntacticApplied(SyntacticTypeApplied(ident, targs), argss) :: Nil, emptyValDef, Nil))
- case SyntacticBlock(SyntacticClassDef(_, tpnme.ANON_CLASS_NAME, Nil, _, List(Nil), earlyDefs, parents, selfdef, body) ::
+ case SyntacticBlock(SyntacticClassDef(_, tpnme.ANON_CLASS_NAME, Nil, _, ListOfNil, earlyDefs, parents, selfdef, body) ::
Apply(Select(New(Ident(tpnme.ANON_CLASS_NAME)), nme.CONSTRUCTOR), Nil) :: Nil) =>
Some((earlyDefs, parents, selfdef, body))
case _ =>
diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala
index 90a1ab39d5..f8af4f155d 100644
--- a/src/reflect/scala/reflect/internal/Definitions.scala
+++ b/src/reflect/scala/reflect/internal/Definitions.scala
@@ -285,6 +285,10 @@ trait Definitions extends api.StandardDefinitions {
lazy val StringTpe = StringClass.tpe
lazy val ThrowableTpe = ThrowableClass.tpe
+ lazy val ConstantTrue = ConstantType(Constant(true))
+ lazy val ConstantFalse = ConstantType(Constant(false))
+ lazy val ConstantNull = ConstantType(Constant(null))
+
// Note: this is not the type alias AnyRef, it's a companion-like
// object used by the @specialize annotation.
lazy val AnyRefModule = getMemberModule(ScalaPackageClass, nme.AnyRef)
@@ -585,30 +589,29 @@ trait Definitions extends api.StandardDefinitions {
def hasJavaMainMethod(sym: Symbol): Boolean =
(sym.tpe member nme.main).alternatives exists isJavaMainMethod
- // Product, Tuple, Function, AbstractFunction
- private def mkArityArray(name: String, arity: Int, countFrom: Int): Array[ClassSymbol] = {
- val list = countFrom to arity map (i => getRequiredClass("scala." + name + i))
- list.toArray
- }
- def prepend[S >: ClassSymbol : ClassTag](elem0: S, elems: Array[ClassSymbol]): Array[S] = elem0 +: elems
-
- private def aritySpecificType[S <: Symbol](symbolArray: Array[S], args: List[Type], others: Type*): Type = {
- val arity = args.length
- if (arity >= symbolArray.length) NoType
- else appliedType(symbolArray(arity), args ++ others: _*)
+ class VarArityClass(name: String, maxArity: Int, countFrom: Int = 0, init: Option[ClassSymbol] = None) extends (Int => Symbol) {
+ private val offset = countFrom - init.size
+ private def isDefinedAt(i: Int) = i < seq.length + offset && i >= offset
+ val seq: IndexedSeq[ClassSymbol] = (init ++: countFrom.to(maxArity).map { i => getRequiredClass("scala." + name + i) }).toVector
+ def apply(i: Int) = if (isDefinedAt(i)) seq(i - offset) else NoSymbol
+ def specificType(args: List[Type], others: Type*): Type = {
+ val arity = args.length
+ if (!isDefinedAt(arity)) NoType
+ else appliedType(apply(arity), args ++ others: _*)
+ }
}
val MaxTupleArity, MaxProductArity, MaxFunctionArity = 22
- lazy val ProductClass: Array[ClassSymbol] = prepend(UnitClass, mkArityArray("Product", MaxProductArity, 1))
- lazy val TupleClass: Array[Symbol] = prepend(null, mkArityArray("Tuple", MaxTupleArity, 1))
- lazy val FunctionClass = mkArityArray("Function", MaxFunctionArity, 0)
- lazy val AbstractFunctionClass = mkArityArray("runtime.AbstractFunction", MaxFunctionArity, 0)
+ lazy val ProductClass = new VarArityClass("Product", MaxProductArity, countFrom = 1, init = Some(UnitClass))
+ lazy val TupleClass = new VarArityClass("Tuple", MaxTupleArity, countFrom = 1)
+ lazy val FunctionClass = new VarArityClass("Function", MaxFunctionArity)
+ lazy val AbstractFunctionClass = new VarArityClass("runtime.AbstractFunction", MaxFunctionArity)
/** Creators for TupleN, ProductN, FunctionN. */
- def tupleType(elems: List[Type]) = aritySpecificType(TupleClass, elems)
- def functionType(formals: List[Type], restpe: Type) = aritySpecificType(FunctionClass, formals, restpe)
- def abstractFunctionType(formals: List[Type], restpe: Type) = aritySpecificType(AbstractFunctionClass, formals, restpe)
+ def tupleType(elems: List[Type]) = TupleClass.specificType(elems)
+ def functionType(formals: List[Type], restpe: Type) = FunctionClass.specificType(formals, restpe)
+ def abstractFunctionType(formals: List[Type], restpe: Type) = AbstractFunctionClass.specificType(formals, restpe)
def wrapArrayMethodName(elemtp: Type): TermName = elemtp.typeSymbol match {
case ByteClass => nme.wrapByteArray
@@ -625,12 +628,11 @@ trait Definitions extends api.StandardDefinitions {
else nme.genericWrapArray
}
- // NOTE: returns true for NoSymbol since it's included in the TupleClass array -- is this intensional?
- def isTupleSymbol(sym: Symbol) = TupleClass contains unspecializedSymbol(sym)
- def isProductNClass(sym: Symbol) = ProductClass contains sym
+ def isTupleSymbol(sym: Symbol) = TupleClass.seq contains unspecializedSymbol(sym)
+ def isProductNClass(sym: Symbol) = ProductClass.seq contains sym
def tupleField(n: Int, j: Int) = getMemberValue(TupleClass(n), nme.productAccessorName(j))
- def isFunctionSymbol(sym: Symbol) = FunctionClass contains unspecializedSymbol(sym)
- def isProductNSymbol(sym: Symbol) = ProductClass contains unspecializedSymbol(sym)
+ def isFunctionSymbol(sym: Symbol) = FunctionClass.seq contains unspecializedSymbol(sym)
+ def isProductNSymbol(sym: Symbol) = ProductClass.seq contains unspecializedSymbol(sym)
def unspecializedSymbol(sym: Symbol): Symbol = {
if (sym hasFlag SPECIALIZED) {
@@ -695,6 +697,19 @@ trait Definitions extends api.StandardDefinitions {
case NullaryMethodType(restpe) => restpe
case _ => tp
}
+
+ /** An implementation of finalResultType which does only what
+ * finalResultType is documented to do. Defining it externally to
+ * Type helps ensure people can't come to depend on accidental
+ * aspects of its behavior. This is all of it!
+ */
+ def finalResultType(tp: Type): Type = tp match {
+ case PolyType(_, restpe) => finalResultType(restpe)
+ case MethodType(_, restpe) => finalResultType(restpe)
+ case NullaryMethodType(restpe) => finalResultType(restpe)
+ case _ => tp
+ }
+
def abstractFunctionForFunctionType(tp: Type) = {
assert(isFunctionType(tp), tp)
abstractFunctionType(tp.typeArgs.init, tp.typeArgs.last)
@@ -1220,7 +1235,7 @@ trait Definitions extends api.StandardDefinitions {
lazy val symbolsNotPresentInBytecode = syntheticCoreClasses ++ syntheticCoreMethods ++ hijackedCoreClasses
/** Is the symbol that of a parent which is added during parsing? */
- lazy val isPossibleSyntheticParent = ProductClass.toSet[Symbol] + ProductRootClass + SerializableClass
+ lazy val isPossibleSyntheticParent = ProductClass.seq.toSet[Symbol] + ProductRootClass + SerializableClass
private lazy val boxedValueClassesSet = boxedClass.values.toSet[Symbol] + BoxedUnitClass
diff --git a/src/reflect/scala/reflect/internal/FlagSets.scala b/src/reflect/scala/reflect/internal/FlagSets.scala
index 961adb2c57..84825ff2da 100644
--- a/src/reflect/scala/reflect/internal/FlagSets.scala
+++ b/src/reflect/scala/reflect/internal/FlagSets.scala
@@ -42,5 +42,6 @@ trait FlagSets extends api.FlagSets { self: SymbolTable =>
val DEFAULTPARAM : FlagSet = Flags.DEFAULTPARAM
val PRESUPER : FlagSet = Flags.PRESUPER
val DEFAULTINIT : FlagSet = Flags.DEFAULTINIT
+ val SYNTHETIC : FlagSet = Flags.SYNTHETIC
}
}
diff --git a/src/reflect/scala/reflect/internal/Flags.scala b/src/reflect/scala/reflect/internal/Flags.scala
index b8e3407824..c286ea53c6 100644
--- a/src/reflect/scala/reflect/internal/Flags.scala
+++ b/src/reflect/scala/reflect/internal/Flags.scala
@@ -118,7 +118,7 @@ class ModifierFlags {
final val PRESUPER = 1L << 37 // value is evaluated before super call
final val DEFAULTINIT = 1L << 41 // symbol is initialized to the default value: used by -Xcheckinit
final val ARTIFACT = 1L << 46 // symbol should be ignored when typechecking; will be marked ACC_SYNTHETIC in bytecode
- final val DEFAULTMETHOD = 1L << 47 // symbol is a java default method
+ final val DEFAULTMETHOD = 1L << 47 // symbol is a java default method
/** Symbols which are marked ARTIFACT. (Expand this list?)
*
@@ -440,7 +440,7 @@ class Flags extends ModifierFlags {
case TRIEDCOOKING => "<triedcooking>" // (1L << 44)
case SYNCHRONIZED => "<synchronized>" // (1L << 45)
case ARTIFACT => "<artifact>" // (1L << 46)
- case DEFAULTMETHOD => "<defaultmethod>" // (1L << 47)
+ case DEFAULTMETHOD => "<defaultmethod>" // (1L << 47)
case 0x1000000000000L => "" // (1L << 48)
case 0x2000000000000L => "" // (1L << 49)
case 0x4000000000000L => "" // (1L << 50)
diff --git a/src/reflect/scala/reflect/internal/Names.scala b/src/reflect/scala/reflect/internal/Names.scala
index ed248d6e1e..f998d95349 100644
--- a/src/reflect/scala/reflect/internal/Names.scala
+++ b/src/reflect/scala/reflect/internal/Names.scala
@@ -90,6 +90,8 @@ trait Names extends api.Names {
*/
final def newTermName(cs: Array[Char], offset: Int, len: Int, cachedString: String): TermName = {
def body = {
+ require(offset >= 0, "offset must be non-negative, got " + offset)
+ require(len >= 0, "length must be non-negative, got " + len)
val h = hashValue(cs, offset, len) & HASH_MASK
var n = termHashtable(h)
while ((n ne null) && (n.length != len || !equals(n.start, cs, offset, len)))
diff --git a/src/reflect/scala/reflect/internal/Scopes.scala b/src/reflect/scala/reflect/internal/Scopes.scala
index 8d20c8e546..485d4d5ddd 100644
--- a/src/reflect/scala/reflect/internal/Scopes.scala
+++ b/src/reflect/scala/reflect/internal/Scopes.scala
@@ -460,6 +460,4 @@ trait Scopes extends api.Scopes { self: SymbolTable =>
class ErrorScope(owner: Symbol) extends Scope
private final val maxRecursions = 1000
-
}
-
diff --git a/src/reflect/scala/reflect/internal/StdNames.scala b/src/reflect/scala/reflect/internal/StdNames.scala
index 686ebf5a1e..6407a3979c 100644
--- a/src/reflect/scala/reflect/internal/StdNames.scala
+++ b/src/reflect/scala/reflect/internal/StdNames.scala
@@ -413,32 +413,43 @@ trait StdNames {
@deprecated("Use Name#getterName", "2.11.0") def getterName(name: TermName): TermName = name.getterName
@deprecated("Use Name#getterName", "2.11.0") def setterToGetter(name: TermName): TermName = name.getterName
+ /**
+ * Convert `Tuple2$mcII` to `Tuple2`, or `T1$sp` to `T1`.
+ */
def unspecializedName(name: Name): Name = (
- if (name endsWith SPECIALIZED_SUFFIX)
- name.subName(0, name.lastIndexOf('m') - 1)
+ // DUPLICATED LOGIC WITH `splitSpecializedName`
+ if (name endsWith SPECIALIZED_SUFFIX) {
+ val idxM = name.lastIndexOf('m')
+ val to = (if (idxM > 0) idxM - 1 else name.length - SPECIALIZED_SUFFIX.length)
+ name.subName(0, to)
+ }
else name
)
/** Return the original name and the types on which this name
* is specialized. For example,
* {{{
- * splitSpecializedName("foo$mIcD$sp") == ('foo', "I", "D")
+ * splitSpecializedName("foo$mIcD$sp") == ('foo', "D", "I")
* }}}
* `foo$mIcD$sp` is the name of a method specialized on two type
* parameters, the first one belonging to the method itself, on Int,
* and another one belonging to the enclosing class, on Double.
+ *
+ * @return (unspecializedName, class tparam specializations, method tparam specializations)
*/
- def splitSpecializedName(name: Name): (Name, String, String) =
- if (name endsWith SPECIALIZED_SUFFIX) {
- val name1 = name dropRight SPECIALIZED_SUFFIX.length
- val idxC = name1 lastIndexOf 'c'
- val idxM = name1 lastIndexOf 'm'
-
- (name1.subName(0, idxM - 1),
- name1.subName(idxC + 1, name1.length).toString,
- name1.subName(idxM + 1, idxC).toString)
- } else
- (name, "", "")
+ def splitSpecializedName(name: Name): (Name, String, String) = {
+ // DUPLICATED LOGIC WITH `unspecializedName`
+ if (name endsWith SPECIALIZED_SUFFIX) {
+ val name1 = name dropRight SPECIALIZED_SUFFIX.length
+ val idxC = name1 lastIndexOf 'c'
+ val idxM = name1 lastIndexOf 'm'
+ if (idxC > idxM && idxM > 0)
+ (name1.subName(0, idxM - 1), name1.subName(idxC + 1, name1.length).toString, name1.subName(idxM + 1, idxC).toString)
+ else
+ (name.subName(0, name.length - SPECIALIZED_SUFFIX.length), "", "")
+ }
+ else (name, "", "")
+ }
// Nominally, name$default$N, encoded for <init>
def defaultGetterName(name: Name, pos: Int): TermName = (
diff --git a/src/reflect/scala/reflect/internal/TreeGen.scala b/src/reflect/scala/reflect/internal/TreeGen.scala
index 07fa6fb317..a8af3e0f0e 100644
--- a/src/reflect/scala/reflect/internal/TreeGen.scala
+++ b/src/reflect/scala/reflect/internal/TreeGen.scala
@@ -3,6 +3,7 @@ package reflect
package internal
import Flags._
+import util._
abstract class TreeGen extends macros.TreeBuilder {
val global: SymbolTable
@@ -279,7 +280,7 @@ abstract class TreeGen extends macros.TreeBuilder {
/** Builds a tuple */
def mkTuple(elems: List[Tree]): Tree =
- if (elems.isEmpty) Literal(Constant(()))
+ if (elems.isEmpty) mkUnit()
else Apply(
Select(mkAttributedRef(TupleClass(elems.length).caseModule), nme.apply),
elems)
@@ -325,8 +326,7 @@ abstract class TreeGen extends macros.TreeBuilder {
* body
* }
*/
- def mkTemplate(parents: List[Tree], self: ValDef, constrMods: Modifiers,
- vparamss: List[List[ValDef]], body: List[Tree], superPos: Position = NoPosition): Template = {
+ def mkTemplate(parents: List[Tree], self: ValDef, constrMods: Modifiers, vparamss: List[List[ValDef]], body: List[Tree], superPos: Position = NoPosition): Template = {
/* Add constructor to template */
// create parameters for <init> as synthetic trees.
@@ -355,8 +355,9 @@ abstract class TreeGen extends macros.TreeBuilder {
if (body forall treeInfo.isInterfaceMember) None
else Some(
atPos(wrappingPos(superPos, lvdefs)) (
- DefDef(NoMods, nme.MIXIN_CONSTRUCTOR, List(), List(Nil), TypeTree(), Block(lvdefs, Literal(Constant())))))
- } else {
+ DefDef(NoMods, nme.MIXIN_CONSTRUCTOR, Nil, ListOfNil, TypeTree(), Block(lvdefs, Literal(Constant())))))
+ }
+ else {
// convert (implicit ... ) to ()(implicit ... ) if its the only parameter section
if (vparamss1.isEmpty || !vparamss1.head.isEmpty && vparamss1.head.head.mods.isImplicit)
vparamss1 = List() :: vparamss1
@@ -396,45 +397,41 @@ abstract class TreeGen extends macros.TreeBuilder {
* @param npos the position of the new
* @param cpos the position of the anonymous class starting with parents
*/
- def mkNew(parents: List[Tree], self: ValDef, stats: List[Tree],
- npos: Position, cpos: Position): Tree =
- if (parents.isEmpty)
- mkNew(List(scalaAnyRefConstr), self, stats, npos, cpos)
- else if (parents.tail.isEmpty && stats.isEmpty) {
- // `Parsers.template` no longer differentiates tpts and their argss
- // e.g. `C()` will be represented as a single tree Apply(Ident(C), Nil)
- // instead of parents = Ident(C), argss = Nil as before
- // this change works great for things that are actually templates
- // but in this degenerate case we need to perform postprocessing
- val app = treeInfo.dissectApplied(parents.head)
- atPos(npos union cpos) { New(app.callee, app.argss) }
- } else {
- val x = tpnme.ANON_CLASS_NAME
- atPos(npos union cpos) {
- Block(
- List(
- atPos(cpos) {
- ClassDef(
- Modifiers(FINAL), x, Nil,
- mkTemplate(parents, self, NoMods, List(Nil), stats, cpos.focus))
- }),
- atPos(npos) {
- New(
- Ident(x) setPos npos.focus,
- Nil)
- }
- )
- }
+ def mkNew(parents: List[Tree], self: ValDef, stats: List[Tree], npos: Position, cpos: Position): Tree = {
+ def enclosingPos = wrappingPos(cpos, parents ::: List(self) ::: stats)
+ def upos = cpos union npos
+ def anonTemplate = atPos(cpos)(mkTemplate(parents, self, NoMods, ListOfNil, stats, cpos))
+ def anonClass = atPos(anonTemplate.pos.makeTransparent)(ClassDef(Modifiers(FINAL), tpnme.ANON_CLASS_NAME, Nil, anonTemplate))
+ def anonNew = atPos(npos)(New(Ident(tpnme.ANON_CLASS_NAME) setPos cpos.focus, Nil))
+
+ // `Parsers.template` no longer differentiates tpts and their argss
+ // e.g. `C()` will be represented as a single tree Apply(Ident(C), Nil)
+ // instead of parents = Ident(C), argss = Nil as before
+ // this change works great for things that are actually templates
+ // but in this degenerate case we need to perform postprocessing
+ parents match {
+ case Nil => mkNew(List(scalaAnyRefConstr), self, stats, npos, cpos)
+ case treeInfo.AppliedArgs(callee, argss) :: Nil if stats.isEmpty => atPos(upos)(New(callee, argss))
+ case _ => atPos(upos)(mkBlock(anonClass :: anonNew :: Nil))
}
+ }
/** Create a tree representing the function type (argtpes) => restpe */
def mkFunctionTypeTree(argtpes: List[Tree], restpe: Tree): Tree =
AppliedTypeTree(rootScalaDot(newTypeName("Function" + argtpes.length)), argtpes ::: List(restpe))
+ def mkUnit() = Literal(Constant(()))
+
/** Create block of statements `stats` */
- def mkBlock(stats: List[Tree]): Tree =
- if (stats.isEmpty) Literal(Constant(()))
- else if (!stats.last.isTerm) Block(stats, Literal(Constant(())))
- else if (stats.length == 1) stats.head
- else Block(stats.init, stats.last)
+ def mkBlock(stats: List[Tree]): Tree = stats match {
+ case stats if stats.isEmpty || !stats.last.isTerm => mkBlock(stats :+ mkUnit())
+ case stat :: Nil => stat
+ case stats => Block(stats.init, stats.last)
+ }
+
+ def mkTreeOrBlock(stats: List[Tree]) = stats match {
+ case Nil => EmptyTree
+ case head :: Nil => head
+ case _ => gen.mkBlock(stats)
+ }
}
diff --git a/src/reflect/scala/reflect/internal/TreeInfo.scala b/src/reflect/scala/reflect/internal/TreeInfo.scala
index 34fe0afb1a..1b763b8632 100644
--- a/src/reflect/scala/reflect/internal/TreeInfo.scala
+++ b/src/reflect/scala/reflect/internal/TreeInfo.scala
@@ -773,6 +773,13 @@ abstract class TreeInfo {
unapply(dissectApplied(tree))
}
+ object AppliedArgs {
+ def unapply(tree: Tree): Some[(Tree, List[List[Tree]])] = tree match {
+ case Apply(AppliedArgs(fn, argss), args) => Some((fn, argss :+ args))
+ case _ => Some((tree, Nil))
+ }
+ }
+
/** Locates the synthetic Apply node corresponding to an extractor's call to
* unapply (unwrapping nested Applies) and returns the fun part of that Apply.
*/
diff --git a/src/reflect/scala/reflect/internal/Trees.scala b/src/reflect/scala/reflect/internal/Trees.scala
index 84818a6f42..2163a26b84 100644
--- a/src/reflect/scala/reflect/internal/Trees.scala
+++ b/src/reflect/scala/reflect/internal/Trees.scala
@@ -589,6 +589,14 @@ trait Trees extends api.Trees { self: SymbolTable =>
object TypeTree extends TypeTreeExtractor
def TypeTree(tp: Type): TypeTree = TypeTree() setType tp
+ private def TypeTreeMemberType(sym: Symbol): TypeTree = {
+ // Needed for pos/t4970*.scala. See SI-7853
+ val resType = (sym.owner.thisType memberType sym).finalResultType
+ atPos(sym.pos.focus)(TypeTree(resType))
+ }
+
+ def TypeBoundsTree(bounds: TypeBounds): TypeBoundsTree = TypeBoundsTree(TypeTree(bounds.lo), TypeTree(bounds.hi))
+ def TypeBoundsTree(sym: Symbol): TypeBoundsTree = atPos(sym.pos)(TypeBoundsTree(sym.info.bounds))
override type TreeCopier <: InternalTreeCopierOps
abstract class InternalTreeCopierOps extends TreeCopierOps {
@@ -1013,15 +1021,6 @@ trait Trees extends api.Trees { self: SymbolTable =>
ModuleDef(Modifiers(sym.flags), sym.name.toTermName, impl) setSymbol sym
}
- def ValDef(sym: Symbol, rhs: Tree): ValDef =
- atPos(sym.pos) {
- ValDef(Modifiers(sym.flags), sym.name.toTermName,
- TypeTree(sym.tpe) setPos sym.pos.focus,
- rhs) setSymbol sym
- }
-
- def ValDef(sym: Symbol): ValDef = ValDef(sym, EmptyTree)
-
trait CannotHaveAttrs extends Tree {
override def canHaveAttrs = false
@@ -1041,50 +1040,44 @@ trait Trees extends api.Trees { self: SymbolTable =>
object emptyValDef extends ValDef(Modifiers(PRIVATE), nme.WILDCARD, TypeTree(NoType), EmptyTree) with CannotHaveAttrs
object pendingSuperCall extends Apply(Select(Super(This(tpnme.EMPTY), tpnme.EMPTY), nme.CONSTRUCTOR), List()) with CannotHaveAttrs
- def DefDef(sym: Symbol, mods: Modifiers, vparamss: List[List[ValDef]], rhs: Tree): DefDef =
- atPos(sym.pos) {
- assert(sym != NoSymbol)
- DefDef(mods,
- sym.name.toTermName,
- sym.typeParams map TypeDef,
- vparamss,
- TypeTree(sym.tpe.finalResultType) setPos sym.pos.focus,
- rhs) setSymbol sym
- }
-
- def DefDef(sym: Symbol, vparamss: List[List[ValDef]], rhs: Tree): DefDef =
- DefDef(sym, Modifiers(sym.flags), vparamss, rhs)
-
- def DefDef(sym: Symbol, mods: Modifiers, rhs: Tree): DefDef =
- DefDef(sym, mods, mapParamss(sym)(ValDef), rhs)
-
- /** A DefDef with original trees attached to the TypeTree of each parameter */
- def DefDef(sym: Symbol, mods: Modifiers, originalParamTpts: Symbol => Tree, rhs: Tree): DefDef = {
- val paramms = mapParamss(sym){ sym =>
- val vd = ValDef(sym, EmptyTree)
- (vd.tpt : @unchecked) match {
- case tt: TypeTree => tt setOriginal (originalParamTpts(sym) setPos sym.pos.focus)
- }
- vd
- }
- DefDef(sym, mods, paramms, rhs)
- }
-
- def DefDef(sym: Symbol, rhs: Tree): DefDef =
- DefDef(sym, Modifiers(sym.flags), rhs)
-
- def DefDef(sym: Symbol, rhs: List[List[Symbol]] => Tree): DefDef =
- DefDef(sym, rhs(sym.info.paramss))
-
- /** A TypeDef node which defines given `sym` with given tight hand side `rhs`. */
- def TypeDef(sym: Symbol, rhs: Tree): TypeDef =
- atPos(sym.pos) {
- TypeDef(Modifiers(sym.flags), sym.name.toTypeName, sym.typeParams map TypeDef, rhs) setSymbol sym
- }
+ def newValDef(sym: Symbol, rhs: Tree)(
+ mods: Modifiers = Modifiers(sym.flags),
+ name: TermName = sym.name.toTermName,
+ tpt: Tree = TypeTreeMemberType(sym)
+ ): ValDef = (
+ atPos(sym.pos)(ValDef(mods, name, tpt, rhs)) setSymbol sym
+ )
+
+ def newDefDef(sym: Symbol, rhs: Tree)(
+ mods: Modifiers = Modifiers(sym.flags),
+ name: TermName = sym.name.toTermName,
+ tparams: List[TypeDef] = sym.typeParams map TypeDef,
+ vparamss: List[List[ValDef]] = mapParamss(sym)(ValDef),
+ tpt: Tree = TypeTreeMemberType(sym)
+ ): DefDef = (
+ atPos(sym.pos)(DefDef(mods, name, tparams, vparamss, tpt, rhs)) setSymbol sym
+ )
+
+ def newTypeDef(sym: Symbol, rhs: Tree)(
+ mods: Modifiers = Modifiers(sym.flags),
+ name: TypeName = sym.name.toTypeName,
+ tparams: List[TypeDef] = sym.typeParams map TypeDef
+ ): TypeDef = (
+ atPos(sym.pos)(TypeDef(mods, name, tparams, rhs)) setSymbol sym
+ )
+
+ def DefDef(sym: Symbol, rhs: Tree): DefDef = newDefDef(sym, rhs)()
+ def DefDef(sym: Symbol, vparamss: List[List[ValDef]], rhs: Tree): DefDef = newDefDef(sym, rhs)(vparamss = vparamss)
+ def DefDef(sym: Symbol, mods: Modifiers, rhs: Tree): DefDef = newDefDef(sym, rhs)(mods = mods)
+ def DefDef(sym: Symbol, mods: Modifiers, vparamss: List[List[ValDef]], rhs: Tree): DefDef = newDefDef(sym, rhs)(mods = mods, vparamss = vparamss)
+ def DefDef(sym: Symbol, rhs: List[List[Symbol]] => Tree): DefDef = newDefDef(sym, rhs(sym.info.paramss))()
+
+ def ValDef(sym: Symbol): ValDef = newValDef(sym, EmptyTree)()
+ def ValDef(sym: Symbol, rhs: Tree): ValDef = newValDef(sym, rhs)()
/** A TypeDef node which defines abstract type or type parameter for given `sym` */
- def TypeDef(sym: Symbol): TypeDef =
- TypeDef(sym, TypeBoundsTree(TypeTree(sym.info.bounds.lo), TypeTree(sym.info.bounds.hi)))
+ def TypeDef(sym: Symbol): TypeDef = newTypeDef(sym, TypeBoundsTree(sym))()
+ def TypeDef(sym: Symbol, rhs: Tree): TypeDef = newTypeDef(sym, rhs)()
def LabelDef(sym: Symbol, params: List[Symbol], rhs: Tree): LabelDef =
atPos(sym.pos) {
@@ -1617,6 +1610,25 @@ trait Trees extends api.Trees { self: SymbolTable =>
}
}
+ /** Tracks the classes currently under construction during a transform */
+ trait UnderConstructionTransformer extends Transformer {
+ import collection.mutable
+
+ protected def isUnderConstruction(clazz: Symbol) = selfOrSuperCalls contains clazz
+
+ /** The stack of class symbols in which a call to this() or to the super
+ * constructor, or early definition is active */
+ private val selfOrSuperCalls = mutable.Stack[Symbol]()
+
+ abstract override def transform(tree: Tree) = {
+ if ((treeInfo isSelfOrSuperConstrCall tree) || (treeInfo isEarlyDef tree)) {
+ selfOrSuperCalls push currentOwner.owner
+ try super.transform(tree)
+ finally selfOrSuperCalls.pop()
+ } else super.transform(tree)
+ }
+ }
+
def duplicateAndKeepPositions(tree: Tree) = new Duplicator(focusPositions = false) transform tree
// ------ copiers -------------------------------------------
diff --git a/src/reflect/scala/reflect/internal/TypeDebugging.scala b/src/reflect/scala/reflect/internal/TypeDebugging.scala
index fd64d98ca2..b70d3bd970 100644
--- a/src/reflect/scala/reflect/internal/TypeDebugging.scala
+++ b/src/reflect/scala/reflect/internal/TypeDebugging.scala
@@ -7,7 +7,7 @@ package scala
package reflect
package internal
-import util.shortClassOfInstance
+import util._
trait TypeDebugging {
self: SymbolTable =>
@@ -39,17 +39,17 @@ trait TypeDebugging {
def skipType(tpe: Type): Boolean = (tpe eq null) || skipSym(tpe.typeSymbolDirect)
def skip(t: Tree): Boolean = t match {
- case EmptyTree => true
- case PackageDef(_, _) => true
- case t: RefTree => skipRefTree(t)
- case TypeBoundsTree(lo, hi) => skip(lo) && skip(hi)
- case Block(Nil, expr) => skip(expr)
- case Apply(fn, Nil) => skip(fn)
- case Block(stmt :: Nil, expr) => skip(stmt) && skip(expr)
- case DefDef(_, nme.CONSTRUCTOR, Nil, Nil :: Nil, _, rhs) => skip(rhs)
- case Literal(Constant(())) => true
- case tt @ TypeTree() => skipType(tt.tpe)
- case _ => skipSym(t.symbol)
+ case EmptyTree => true
+ case PackageDef(_, _) => true
+ case t: RefTree => skipRefTree(t)
+ case TypeBoundsTree(lo, hi) => skip(lo) && skip(hi)
+ case Block(Nil, expr) => skip(expr)
+ case Apply(fn, Nil) => skip(fn)
+ case Block(stmt :: Nil, expr) => skip(stmt) && skip(expr)
+ case DefDef(_, nme.CONSTRUCTOR, Nil, ListOfNil, _, rhs) => skip(rhs)
+ case Literal(Constant(())) => true
+ case tt @ TypeTree() => skipType(tt.tpe)
+ case _ => skipSym(t.symbol)
}
def apply(t: Tree) = skip(t)
}
diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala
index ad9001ca4e..dd6917814e 100644
--- a/src/reflect/scala/reflect/internal/Types.scala
+++ b/src/reflect/scala/reflect/internal/Types.scala
@@ -144,7 +144,6 @@ trait Types
override def isErroneous = underlying.isErroneous
override def isStable: Boolean = underlying.isStable
override def isVolatile = underlying.isVolatile
- override def finalResultType = underlying.finalResultType
override def paramSectionCount = underlying.paramSectionCount
override def paramss = underlying.paramss
override def params = underlying.params
@@ -189,7 +188,6 @@ trait Types
override def deconst = maybeRewrap(underlying.deconst)
override def resultType = maybeRewrap(underlying.resultType)
override def resultType(actuals: List[Type]) = maybeRewrap(underlying.resultType(actuals))
- override def finalResultType = maybeRewrap(underlying.finalResultType)
override def paramSectionCount = 0
override def paramss: List[List[Symbol]] = List()
override def params: List[Symbol] = List()
@@ -440,7 +438,7 @@ trait Types
/** For a curried/nullary method or poly type its non-method result type,
* the type itself for all other types */
- def finalResultType: Type = this
+ final def finalResultType: Type = definitions finalResultType this
/** For a method type, the number of its value parameter sections,
* 0 for all other types */
@@ -1240,7 +1238,6 @@ trait Types
if (pre.isOmittablePrefix) pre.fullName + ".type"
else prefixString + "type"
}
-
/*
override def typeOfThis: Type = typeSymbol.typeOfThis
override def bounds: TypeBounds = TypeBounds(this, this)
@@ -2564,8 +2561,6 @@ trait Types
//TODO this may be generalised so that the only constraint is dependencies are acyclic
def approximate: MethodType = MethodType(params, resultApprox)
- override def finalResultType: Type = resultType.finalResultType
-
override def safeToString = paramString(this) + resultType
override def cloneInfo(owner: Symbol) = {
@@ -2592,7 +2587,6 @@ trait Types
override def isTrivial = resultType.isTrivial && (resultType eq resultType.withoutAnnotations)
override def prefix: Type = resultType.prefix
override def narrow: Type = resultType.narrow
- override def finalResultType: Type = resultType.finalResultType
override def termSymbol: Symbol = resultType.termSymbol
override def typeSymbol: Symbol = resultType.typeSymbol
override def parents: List[Type] = resultType.parents
@@ -2642,7 +2636,6 @@ trait Types
override def baseType(clazz: Symbol): Type = resultType.baseType(clazz)
override def narrow: Type = resultType.narrow
override def isVolatile = resultType.isVolatile
- override def finalResultType: Type = resultType.finalResultType
/** @M: typeDefSig wraps a TypeBounds in a PolyType
* to represent a higher-kinded type parameter
@@ -2842,9 +2835,6 @@ trait Types
// but pattern-matching returned the original constr0 (a bug)
// now, pattern-matching returns the most recent constr
object TypeVar {
- private val ConstantTrue = ConstantType(Constant(true))
- private val ConstantFalse = ConstantType(Constant(false))
-
@inline final def trace[T](action: String, msg: => String)(value: T): T = {
if (traceTypeVars) {
val s = msg match {
@@ -3066,13 +3056,13 @@ trait Types
// ignore subtyping&equality checks while true -- see findMember
// OPT: This could be Either[TypeVar, Boolean], but this encoding was chosen instead to save allocations.
- private var _suspended: Type = TypeVar.ConstantFalse
+ private var _suspended: Type = ConstantFalse
private[Types] def suspended: Boolean = (_suspended: @unchecked) match {
- case TypeVar.ConstantFalse => false
- case TypeVar.ConstantTrue => true
- case tv: TypeVar => tv.suspended
+ case ConstantFalse => false
+ case ConstantTrue => true
+ case tv: TypeVar => tv.suspended
}
- private[Types] def suspended_=(b: Boolean): Unit = _suspended = if (b) TypeVar.ConstantTrue else TypeVar.ConstantFalse
+ private[Types] def suspended_=(b: Boolean): Unit = _suspended = if (b) ConstantTrue else ConstantFalse
// SI-7785 Link the suspended attribute of a TypeVar created in, say, a TypeMap (e.g. AsSeenFrom) to its originator
private[Types] def linkSuspended(origin: TypeVar): Unit = _suspended = origin
diff --git a/src/reflect/scala/reflect/internal/pickling/ByteCodecs.scala b/src/reflect/scala/reflect/internal/pickling/ByteCodecs.scala
index eb266e8125..8615e34fad 100644
--- a/src/reflect/scala/reflect/internal/pickling/ByteCodecs.scala
+++ b/src/reflect/scala/reflect/internal/pickling/ByteCodecs.scala
@@ -212,11 +212,3 @@ object ByteCodecs {
decode7to8(xs, len)
}
}
-
-
-
-
-
-
-
-
diff --git a/src/reflect/scala/reflect/internal/util/Collections.scala b/src/reflect/scala/reflect/internal/util/Collections.scala
index 59af819dad..738baddc08 100644
--- a/src/reflect/scala/reflect/internal/util/Collections.scala
+++ b/src/reflect/scala/reflect/internal/util/Collections.scala
@@ -34,14 +34,19 @@ trait Collections {
xss forall (_ forall p)
final def mmap[A, B](xss: List[List[A]])(f: A => B) =
xss map (_ map f)
- final def mforeach[A](xss: List[List[A]])(f: A => Unit) =
- xss foreach (_ foreach f)
final def mfind[A](xss: List[List[A]])(p: A => Boolean): Option[A] = {
var res: Option[A] = null
mforeach(xss)(x => if ((res eq null) && p(x)) res = Some(x))
if (res eq null) None else res
}
+ /** These are all written in terms of List because we're trying to wring all
+ * the performance we can and List is used almost exclusively in the compiler,
+ * but people are branching out in their collections so here's an overload.
+ */
+ final def mforeach[A](xss: List[List[A]])(f: A => Unit) = xss foreach (_ foreach f)
+ final def mforeach[A](xss: Traversable[Traversable[A]])(f: A => Unit) = xss foreach (_ foreach f)
+
final def map2[A, B, C](xs1: List[A], xs2: List[B])(f: (A, B) => C): List[C] = {
val lb = new ListBuffer[C]
var ys1 = xs1
diff --git a/src/reflect/scala/reflect/internal/util/Position.scala b/src/reflect/scala/reflect/internal/util/Position.scala
index ddd0a64675..d62ab40a9d 100644
--- a/src/reflect/scala/reflect/internal/util/Position.scala
+++ b/src/reflect/scala/reflect/internal/util/Position.scala
@@ -1,273 +1,214 @@
/* NSC -- new Scala compiler
* Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
- *
*/
package scala
-package reflect.internal.util
-
-import scala.reflect.ClassTag
-import scala.reflect.internal.FatalError
-import scala.reflect.macros.Attachments
-
-object Position {
- val tabInc = 8
-
- /** Prints the message with the given position indication. */
- def formatMessage(posIn: Position, msg: String, shortenFile: Boolean): String = {
- val pos = (
- if (posIn eq null) NoPosition
- else if (posIn.isDefined) posIn.inUltimateSource(posIn.source)
- else posIn
- )
- val prefix = if (shortenFile) pos.sourceName else pos.sourcePath
-
- pos match {
- case FakePos(fmsg) => fmsg+" "+msg
- case NoPosition => msg
- case _ => "%s:%s: %s\n%s\n%s".format(prefix, pos.line, msg, pos.lineContent, pos.lineCarat)
- }
- }
-}
+package reflect
+package internal
+package util
/** The Position class and its subclasses represent positions of ASTs and symbols.
- * Except for NoPosition and FakePos, every position refers to a SourceFile
- * and to an offset in the sourcefile (its `point`). For batch compilation,
- * that's all. For interactive IDE's there are also RangePositions
- * and TransparentPositions. A RangePosition indicates a start and an end
- * in addition to its point. TransparentPositions are a subclass of RangePositions.
- * Range positions that are not transparent are called opaque.
- * Trees with RangePositions need to satisfy the following invariants.
+ * Every subclass of DefinedPosition refers to a SourceFile and three character
+ * offsets within it: start, end, and point. The point is where the ^ belongs when
+ * issuing an error message, usually a Name. A range position can be designated
+ * as transparent, which excuses it from maintaining the invariants to follow. If
+ * a transparent position has opaque children, those are considered as if they were
+ * the direct children of the transparent position's parent.
+ *
+ * Note: some of these invariants actually apply to the trees which carry
+ * the positions, but they are phrased as if the positions themselves were
+ * the parent/children for conciseness.
*
- * INV1: A tree with an offset position never contains a child
- * with a range position
- * INV2: If the child of a tree with a range position also has a range position,
- * then the child's range is contained in the parent's range.
- * INV3: Opaque range positions of children of the same node are non-overlapping
- * (this means their overlap is at most a single point).
+ * Invariant 1: in a focused/offset position, start == point == end
+ * Invariant 2: in a range position, start <= point < end
+ * Invariant 3: an offset position never has a child with a range position
+ * Invariant 4: every range position child of a range position parent is contained within its parent
+ * Invariant 5: opaque range position siblings overlap at most at a single point
*
* The following tests are useful on positions:
*
- * pos.isDefined true if position is not a NoPosition nor a FakePosition
- * pos.isRange true if position is a range
+ * pos.isDefined true if position is not an UndefinedPosition (those being NoPosition and FakePos)
+ * pos.isRange true if position is a range (opaque or transparent) which implies start < end
* pos.isOpaqueRange true if position is an opaque range
*
- * The following accessor methods are provided:
+ * The following accessor methods are provided - an exception will be thrown if
+ * point/start/end are attempted on an UndefinedPosition.
*
- * pos.source The source file of the position, which must be defined
- * pos.point The offset of the position's point, which must be defined
- * pos.start The start of the position, which must be a range
- * pos.end The end of the position, which must be a range
- *
- * There are also convenience methods, such as
- *
- * pos.startOrPoint
- * pos.endOrPoint
- * pos.pointOrElse(default)
- *
- * These are less strict about the kind of position on which they can be applied.
+ * pos.source The source file of the position, or NoSourceFile if unavailable
+ * pos.point The offset of the point
+ * pos.start The (inclusive) start offset, or the point of an offset position
+ * pos.end The (exclusive) end offset, or the point of an offset position
*
* The following conversion methods are often used:
*
- * pos.focus converts a range position to an offset position, keeping its point;
- * returns all other positions unchanged.
- * pos.makeTransparent converts an opaque range position into a transparent one.
- * returns all other positions unchanged.
+ * pos.focus Converts a range position to an offset position focused on the point
+ * pos.makeTransparent Convert an opaque range into a transparent range
*/
-abstract class Position extends scala.reflect.api.Position { self =>
-
+class Position extends scala.reflect.api.Position with InternalPositionImpl with DeprecatedPosition {
type Pos = Position
-
def pos: Position = this
+ def withPos(newPos: Position): macros.Attachments { type Pos = Position.this.Pos } = newPos
+
+ protected def fail(what: String) = throw new UnsupportedOperationException(s"Position.$what on $this")
+
+ // If scala-refactoring extends Position directly it seems I have no
+ // choice but to offer all the concrete methods.
+ def isDefined = false
+ def isRange = false
+ def source: SourceFile = NoSourceFile
+ def start: Int = fail("start")
+ def point: Int = fail("point")
+ def end: Int = fail("end")
+}
- def withPos(newPos: Position): Attachments { type Pos = self.Pos } = newPos
-
- /** An optional value containing the source file referred to by this position, or
- * None if not defined.
- */
- def source: SourceFile = throw new UnsupportedOperationException(s"Position.source on ${this.getClass}")
-
- /** Is this position neither a NoPosition nor a FakePosition?
- * If isDefined is true, offset and source are both defined.
- */
- def isDefined: Boolean = false
-
- /** Is this position a transparent position? */
- def isTransparent: Boolean = false
-
- /** Is this position a range position? */
- def isRange: Boolean = false
-
- /** Is this position a non-transparent range position? */
- def isOpaqueRange: Boolean = false
-
- /** if opaque range, make this position transparent */
- def makeTransparent: Position = this
-
- /** The start of the position's range, error if not a range position */
- def start: Int = throw new UnsupportedOperationException(s"Position.start on ${this.getClass}")
-
- /** The start of the position's range, or point if not a range position */
- def startOrPoint: Int = point
-
- /** The point (where the ^ is) of the position */
- def point: Int = throw new UnsupportedOperationException(s"Position.point on ${this.getClass}")
-
- /** The point (where the ^ is) of the position, or else `default` if undefined */
- def pointOrElse(default: Int): Int = default
-
- /** The end of the position's range, error if not a range position */
- def end: Int = throw new UnsupportedOperationException(s"Position.end on ${this.getClass}")
-
- /** The end of the position's range, or point if not a range position */
- def endOrPoint: Int = point
-
- @deprecated("use point instead", "2.9.0")
- def offset: Option[Int] = if (isDefined) Some(point) else None // used by sbt
-
- /** The same position with a different start value (if a range) */
- def withStart(off: Int): Position = this
-
- /** The same position with a different end value (if a range) */
- def withEnd(off: Int): Position = this
-
- /** The same position with a different point value (if a range or offset) */
- def withPoint(off: Int): Position = this
-
- /** The same position with a different source value, and its values shifted by given offset */
- def withSource(source: SourceFile, shift: Int): Position = this
-
- /** If this is a range, the union with the other range, with the point of this position.
- * Otherwise, this position
- */
- def union(pos: Position): Position = this
-
- /** If this is a range position, the offset position of its start.
- * Otherwise the position itself
- */
- def focusStart: Position = this
-
- /** If this is a range position, the offset position of its point.
- * Otherwise the position itself
- */
- def focus: Position = this
-
- /** If this is a range position, the offset position of its end.
- * Otherwise the position itself
- */
- def focusEnd: Position = this
-
- /** Does this position include the given position `pos`.
- * This holds if `this` is a range position and its range [start..end]
- * is the same or covers the range of the given position, which may or may not be a range position.
- */
- def includes(pos: Position): Boolean = false
-
- /** Does this position properly include the given position `pos` ("properly" meaning their
- * ranges are not the same)?
- */
- def properlyIncludes(pos: Position): Boolean =
- includes(pos) && (start < pos.startOrPoint || pos.endOrPoint < end)
-
- /** Does this position precede that position?
- * This holds if both positions are defined and the end point of this position
- * is not larger than the start point of the given position.
- */
- def precedes(pos: Position): Boolean =
- isDefined && pos.isDefined && endOrPoint <= pos.startOrPoint
-
- /** Does this position properly precede the given position `pos` ("properly" meaning their ranges
- * do not share a common point).
- */
- def properlyPrecedes(pos: Position): Boolean =
- isDefined && pos.isDefined && endOrPoint < pos.startOrPoint
-
- /** Does this position overlap with that position?
- * This holds if both positions are ranges and there is an interval of
- * non-zero length that is shared by both position ranges.
- */
- def overlaps(pos: Position): Boolean =
- isRange && pos.isRange &&
- ((pos.start < end && start < pos.end) || (start < pos.end && pos.start < end))
-
- /** Does this position cover the same range as that position?
- * Holds only if both position are ranges
- */
- def sameRange(pos: Position): Boolean =
- isRange && pos.isRange && start == pos.start && end == pos.end
-
- def line: Int = throw new UnsupportedOperationException("Position.line")
+object Position {
+ val tabInc = 8
- def column: Int = throw new UnsupportedOperationException("Position.column")
+ private def validate[T <: Position](pos: T): T = {
+ if (pos.isRange)
+ assert(pos.start <= pos.end, s"bad position: ${pos.show}")
- /** A line with a ^ padded with the right number of spaces.
- */
- def lineCarat: String = " " * (column - 1) + "^"
+ pos
+ }
- /** The line of code and the corresponding carat pointing line, trimmed
- * to the maximum specified width, with the trimmed versions oriented
- * around the point to give maximum context.
- */
- def lineWithCarat(maxWidth: Int): (String, String) = {
- val radius = maxWidth / 2
- var start = scala.math.max(column - radius, 0)
- var result = lineContent drop start take maxWidth
-
- if (result.length < maxWidth) {
- result = lineContent takeRight maxWidth
- start = lineContent.length - result.length
+ /** Prints the message with the given position indication. */
+ def formatMessage(posIn: Position, msg: String, shortenFile: Boolean): String = {
+ val pos = if (posIn eq null) NoPosition else posIn
+ val prefix = pos.source match {
+ case NoSourceFile => ""
+ case s if shortenFile => s.file.name + ":"
+ case s => s.file.path + ":"
}
-
- (result, lineCarat drop start take maxWidth)
+ prefix + (pos showError msg)
}
- /** Convert this to a position around `point` that spans a single source line */
- def toSingleLine: Position = this
-
- /** The source code corresponding to the range, if this is a range position.
- * Otherwise the empty string.
- */
- def sourceCode = ""
- def sourceName = "<none>"
- def sourcePath = "<none>"
- def lineContent = "<none>"
- def lengthInChars = 0
- def lengthInLines = 0
-
- /** Map this position to a position in an original source
- * file. If the SourceFile is a normal SourceFile, simply
- * return this.
- */
- def inUltimateSource(source : SourceFile): Position =
- if (source == null) this else source.positionInUltimateSource(this)
-
- def dbgString: String = toString
- def safeLine: Int = try line catch { case _: UnsupportedOperationException => -1 }
-
- def show: String = "["+toString+"]"
+ def offset(source: SourceFile, point: Int): Position = validate(new OffsetPosition(source, point))
+ def range(source: SourceFile, start: Int, point: Int, end: Int): Position = validate(new RangePosition(source, start, point, end))
+ def transparent(source: SourceFile, start: Int, point: Int, end: Int): Position = validate(new TransparentPosition(source, start, point, end))
}
-case object NoPosition extends Position {
- override def dbgString = toString
+class OffsetPosition(sourceIn: SourceFile, pointIn: Int) extends DefinedPosition {
+ override def isRange = false
+ override def source = sourceIn
+ override def point = pointIn
+ override def start = point
+ override def end = point
}
-
-case class FakePos(msg: String) extends Position {
+class RangePosition(sourceIn: SourceFile, startIn: Int, pointIn: Int, endIn: Int) extends OffsetPosition(sourceIn, pointIn) {
+ override def isRange = true
+ override def start = startIn
+ override def end = endIn
+}
+class TransparentPosition(sourceIn: SourceFile, startIn: Int, pointIn: Int, endIn: Int) extends RangePosition(sourceIn, startIn, pointIn, endIn) {
+ override def isTransparent = true
+}
+case object NoPosition extends UndefinedPosition
+case class FakePos(msg: String) extends UndefinedPosition {
override def toString = msg
}
-class OffsetPosition(override val source: SourceFile, override val point: Int) extends Position {
- override def isDefined = true
- override def pointOrElse(default: Int): Int = point
- override def withPoint(off: Int) = new OffsetPosition(source, off)
- override def withSource(source: SourceFile, shift: Int) = new OffsetPosition(source, point + shift)
+sealed abstract class DefinedPosition extends Position {
+ final override def isDefined = true
+ override def equals(that: Any) = that match {
+ case that: DefinedPosition => source.file == that.source.file && start == that.start && point == that.point && end == that.end
+ case _ => false
+ }
+ override def hashCode = Seq[Any](source.file, start, point, end).##
+ override def toString = (
+ if (isRange) s"RangePosition($canonicalPath, $start, $point, $end)"
+ else s"source-$canonicalPath,line-$line,$pointMessage$point"
+ )
+ private def pointMessage = if (point > source.length) "out-of-bounds-" else "offset="
+ private def canonicalPath = source.file.canonicalPath
+}
- override def line = source.offsetToLine(point) + 1
- override def sourceName = source.file.name
- override def sourcePath = source.file.path
- override def lineContent = source.lineToString(line - 1)
+sealed abstract class UndefinedPosition extends Position {
+ final override def isDefined = false
+ override def isRange = false
+ override def source = NoSourceFile
+ override def start = fail("start")
+ override def point = fail("point")
+ override def end = fail("end")
+}
- override def column: Int = {
+private[util] trait InternalPositionImpl {
+ self: Position =>
+
+ // The methods which would be abstract in Position if it were
+ // possible to change Position.
+ def isDefined: Boolean
+ def isRange: Boolean
+ def source: SourceFile
+ def start: Int
+ def point: Int
+ def end: Int
+
+ /** Map this position to its position in the original source file
+ * (which may be this position unchanged.)
+ */
+ def finalPosition: Pos = source positionInUltimateSource this
+
+ def isTransparent = false
+ def isOffset = isDefined && !isRange
+ def isOpaqueRange = isRange && !isTransparent
+ def pointOrElse(alt: Int): Int = if (isDefined) point else alt
+ def makeTransparent: Position = if (isOpaqueRange) Position.transparent(source, start, point, end) else this
+
+ /** Copy a range position with a changed value.
+ */
+ def withStart(start: Int): Position = copyRange(start = start)
+ def withPoint(point: Int): Position = if (isRange) copyRange(point = point) else Position.offset(source, point)
+ def withEnd(end: Int): Position = copyRange(end = end)
+ def withSource(source: SourceFile): Position = copyRange(source = source)
+ def withShift(shift: Int): Position = Position.range(source, start + shift, point + shift, end + shift)
+
+ /** Convert a range position to a simple offset.
+ */
+ def focusStart: Position = if (this.isRange) asOffset(start) else this
+ def focus: Position = if (this.isRange) asOffset(point) else this
+ def focusEnd: Position = if (this.isRange) asOffset(end) else this
+
+ def union(pos: Position): Position = (
+ if (!pos.isRange) this
+ else if (this.isRange) copyRange(start = start min pos.start, end = end max pos.end)
+ else pos
+ )
+
+ def includes(pos: Position): Boolean = isRange && pos.isDefined && start <= pos.start && pos.end <= end
+ def properlyIncludes(pos: Position): Boolean = includes(pos) && (start < pos.start || pos.end < end)
+ def precedes(pos: Position): Boolean = bothDefined(pos) && end <= pos.start
+ def properlyPrecedes(pos: Position): Boolean = bothDefined(pos) && end < pos.start
+ def sameRange(pos: Position): Boolean = bothRanges(pos) && start == pos.start && end == pos.end
+ // This works because it's a range position invariant that S1 < E1 and S2 < E2.
+ // So if S1 < E2 and S2 < E1, then both starts precede both ends, which is the
+ // necessary condition to establish that there is overlap.
+ def overlaps(pos: Position): Boolean = bothRanges(pos) && start < pos.end && pos.start < end
+
+ def line: Int = if (hasSource) source.offsetToLine(point) + 1 else 0
+ def column: Int = if (hasSource) calculateColumn() else 0
+ def lineContent: String = if (hasSource) source.lineToString(line - 1) else ""
+ def lineCarat: String = if (hasSource) " " * (column - 1) + "^" else ""
+
+ def showError(msg: String): String = finalPosition match {
+ case FakePos(fmsg) => s"$fmsg $msg"
+ case NoPosition => msg
+ case pos => s"${pos.line}: $msg\n${pos.lineContent}\n${pos.lineCarat}"
+ }
+ def showDebug: String = toString
+ def show = (
+ if (isOpaqueRange && start != point) s"[$point/$start:$end]"
+ else if (isOpaqueRange) s"[$start:$end]"
+ else if (isTransparent) s"<$start:$end>"
+ else if (isDefined) s"[$point]"
+ else "[X]"
+ )
+
+ private def asOffset(point: Int): Position = Position.offset(source, point)
+ private def copyRange(source: SourceFile = source, start: Int = start, point: Int = point, end: Int = end): Position =
+ Position.range(source, start, point, end)
+
+ private def calculateColumn(): Int = {
var idx = source.lineToOffset(source.offsetToLine(point))
var col = 0
while (idx != point) {
@@ -276,18 +217,39 @@ class OffsetPosition(override val source: SourceFile, override val point: Int) e
}
col + 1
}
+ private def hasSource = source ne NoSourceFile
+ private def bothRanges(that: Position) = isRange && that.isRange
+ private def bothDefined(that: Position) = isDefined && that.isDefined
+}
- override def union(pos: Position) = if (pos.isRange) pos else this
+/** Holding cell for methods unused and/or unnecessary. */
+private[util] trait DeprecatedPosition {
+ self: Position =>
- override def equals(that : Any) = that match {
- case that : OffsetPosition => point == that.point && source.file == that.source.file
- case that => false
- }
- override def hashCode = point * 37 + source.file.hashCode
+ @deprecated("use `point`", "2.9.0")
+ def offset: Option[Int] = if (isDefined) Some(point) else None // used by sbt
- override def toString = {
- val pointmsg = if (point > source.length) "out-of-bounds-" else "offset="
- "source-%s,line-%s,%s%s".format(source.file.canonicalPath, line, pointmsg, point)
- }
- override def show = "["+point+"]"
+ @deprecated("use `focus`", "2.11.0")
+ def toSingleLine: Position = this
+
+ @deprecated("use `line`", "2.11.0")
+ def safeLine: Int = line
+
+ @deprecated("use `showDebug`", "2.11.0")
+ def dbgString: String = showDebug
+
+ @deprecated("use `finalPosition`", "2.11.0")
+ def inUltimateSource(source: SourceFile): Position = source positionInUltimateSource this
+
+ @deprecated("use `lineCarat`", "2.11.0")
+ def lineWithCarat(maxWidth: Int): (String, String) = ("", "")
+
+ @deprecated("Use `withSource(source)` and `withShift`", "2.11.0")
+ def withSource(source: SourceFile, shift: Int): Position = this withSource source withShift shift
+
+ @deprecated("Use `start`", "2.11.0")
+ def startOrPoint: Int = if (isRange) start else point
+
+ @deprecated("Use `end`", "2.11.0")
+ def endOrPoint: Int = if (isRange) end else point
}
diff --git a/src/reflect/scala/reflect/internal/util/RangePosition.scala b/src/reflect/scala/reflect/internal/util/RangePosition.scala
deleted file mode 100644
index 0d09a53cd9..0000000000
--- a/src/reflect/scala/reflect/internal/util/RangePosition.scala
+++ /dev/null
@@ -1,50 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala
-package reflect.internal.util
-
-/** new for position ranges */
-class RangePosition(source: SourceFile, override val start: Int, point: Int, override val end: Int)
-extends OffsetPosition(source, point) {
- if (start > end) scala.sys.error("bad position: "+show)
- override def isRange: Boolean = true
- override def isOpaqueRange: Boolean = true
- override def startOrPoint: Int = start
- override def endOrPoint: Int = end
- override def withStart(off: Int) = new RangePosition(source, off, point, end)
- override def withEnd(off: Int) = new RangePosition(source, start, point, off)
- override def withPoint(off: Int) = new RangePosition(source, start, off, end)
- override def withSource(source: SourceFile, shift: Int) = new RangePosition(source, start + shift, point + shift, end + shift)
- override def focusStart = new OffsetPosition(source, start)
- override def focus = {
- if (focusCache eq NoPosition) focusCache = new OffsetPosition(source, point)
- focusCache
- }
- override def focusEnd = new OffsetPosition(source, end)
- override def makeTransparent = new TransparentPosition(source, start, point, end)
- override def includes(pos: Position) = pos.isDefined && start <= pos.startOrPoint && pos.endOrPoint <= end
- override def union(pos: Position): Position =
- if (pos.isRange) new RangePosition(source, start min pos.start, point, end max pos.end) else this
-
- override def toSingleLine: Position = source match {
- case bs: BatchSourceFile
- if end > 0 && bs.offsetToLine(start) < bs.offsetToLine(end - 1) =>
- val pointLine = bs.offsetToLine(point)
- new RangePosition(source, bs.lineToOffset(pointLine), point, bs.lineToOffset(pointLine + 1))
- case _ => this
- }
-
- override def toString = "RangePosition("+source.file.canonicalPath+", "+start+", "+point+", "+end+")"
- override def show = "["+start+":"+end+"]"
- private var focusCache: Position = NoPosition
-}
-
-class TransparentPosition(source: SourceFile, start: Int, point: Int, end: Int) extends RangePosition(source, start, point, end) {
- override def isOpaqueRange: Boolean = false
- override def isTransparent = true
- override def makeTransparent = this
- override def show = "<"+start+":"+end+">"
-}
diff --git a/src/reflect/scala/reflect/internal/util/Set.scala b/src/reflect/scala/reflect/internal/util/Set.scala
index 75dcfaa59b..635bfb05e4 100644
--- a/src/reflect/scala/reflect/internal/util/Set.scala
+++ b/src/reflect/scala/reflect/internal/util/Set.scala
@@ -23,5 +23,4 @@ abstract class Set[T <: AnyRef] {
findEntry(x) ne null
def toList = iterator.toList
-
}
diff --git a/src/reflect/scala/reflect/internal/util/StringOps.scala b/src/reflect/scala/reflect/internal/util/StringOps.scala
index 4d98a344d8..14f349f502 100644
--- a/src/reflect/scala/reflect/internal/util/StringOps.scala
+++ b/src/reflect/scala/reflect/internal/util/StringOps.scala
@@ -7,7 +7,11 @@
\* */
package scala
-package reflect.internal.util
+package reflect
+package internal
+package util
+
+import scala.compat.Platform.EOL
/** This object provides utility methods to extract elements
* from Strings.
@@ -18,14 +22,26 @@ package reflect.internal.util
trait StringOps {
def oempty(xs: String*) = xs filterNot (x => x == null || x == "")
def ojoin(xs: String*): String = oempty(xs: _*) mkString " "
- def longestCommonPrefix(xs: List[String]): String = {
- if (xs.isEmpty || xs.contains("")) ""
- else xs.head.head match {
- case ch =>
- if (xs.tail forall (_.head == ch)) "" + ch + longestCommonPrefix(xs map (_.tail))
- else ""
- }
+ def longestCommonPrefix(xs: List[String]): String = xs match {
+ case Nil => ""
+ case xs if xs contains "" => ""
+ case x :: xs =>
+ val ch = x charAt 0
+ if (xs exists (_.head != ch)) ""
+ else "" + ch + longestCommonPrefix(xs map (_ substring 1))
+ }
+ /** Like String#trim, but trailing whitespace only.
+ */
+ def trimTrailingSpace(s: String): String = {
+ var end = s.length
+ while (end > 0 && s.charAt(end - 1).isWhitespace)
+ end -= 1
+
+ if (end == s.length) s
+ else s.substring(0, end)
}
+ /** Breaks the string into lines and strips each line before reassembling. */
+ def trimAllTrailingSpace(s: String): String = s.lines map trimTrailingSpace mkString EOL
def decompose(str: String, sep: Char): List[String] = {
def ws(start: Int): List[String] =
diff --git a/src/reflect/scala/reflect/internal/util/package.scala b/src/reflect/scala/reflect/internal/util/package.scala
index df63a55090..3618c150ca 100644
--- a/src/reflect/scala/reflect/internal/util/package.scala
+++ b/src/reflect/scala/reflect/internal/util/package.scala
@@ -7,6 +7,9 @@ import scala.language.existentials // SI-6541
package object util {
import StringOps.longestCommonPrefix
+ // An allocation-avoiding reusable instance of the so-common List(Nil).
+ val ListOfNil: List[List[Nothing]] = Nil :: Nil
+
def andFalse(body: Unit): Boolean = false
// Shorten a name like Symbols$FooSymbol to FooSymbol.
diff --git a/src/repl/scala/tools/nsc/interpreter/ExprTyper.scala b/src/repl/scala/tools/nsc/interpreter/ExprTyper.scala
index 6406dacc24..bb5fe07fc9 100644
--- a/src/repl/scala/tools/nsc/interpreter/ExprTyper.scala
+++ b/src/repl/scala/tools/nsc/interpreter/ExprTyper.scala
@@ -28,7 +28,7 @@ trait ExprTyper {
case IR.Success =>
val sym0 = symbolOfTerm(name)
// drop NullaryMethodType
- sym0.cloneSymbol setInfo exitingTyper(sym0.info.finalResultType)
+ sym0.cloneSymbol setInfo exitingTyper(sym0.tpe_*.finalResultType)
case _ => NoSymbol
}
}
diff --git a/src/repl/scala/tools/nsc/interpreter/ILoop.scala b/src/repl/scala/tools/nsc/interpreter/ILoop.scala
index ed56016bce..984a752964 100644
--- a/src/repl/scala/tools/nsc/interpreter/ILoop.scala
+++ b/src/repl/scala/tools/nsc/interpreter/ILoop.scala
@@ -23,6 +23,7 @@ import scala.collection.generic.Clearable
import scala.concurrent.{ ExecutionContext, Await, Future, future }
import ExecutionContext.Implicits._
import java.io.{ BufferedReader, FileReader }
+import scala.reflect.internal.util.StringOps._
/** The Scala interactive shell. It provides a read-eval-print loop
* around the Interpreter class.
@@ -548,7 +549,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
} else try {
val s = what
// line 123, 120+3, -3, 120-123, 120-, note -3 is not 0-3 but (cur-3,cur)
- val (start, len) =
+ val (start, len) =
if ((s indexOf '+') > 0) {
val (a,b) = s splitAt (s indexOf '+')
(a.toInt, b.drop(1).toInt)
@@ -885,12 +886,10 @@ object ILoop {
override def write(str: String) = {
// completely skip continuation lines
if (str forall (ch => ch.isWhitespace || ch == '|')) ()
- // print a newline on empty scala prompts
- else if ((str contains '\n') && (str.trim == "scala> ")) super.write("\n")
else super.write(str)
}
}
- val input = new BufferedReader(new StringReader(code)) {
+ val input = new BufferedReader(new StringReader(code.trim + "\n")) {
override def readLine(): String = {
val s = super.readLine()
// helping out by printing the line being interpreted.
diff --git a/src/repl/scala/tools/nsc/interpreter/IMain.scala b/src/repl/scala/tools/nsc/interpreter/IMain.scala
index ee4ff59498..a60de01673 100644
--- a/src/repl/scala/tools/nsc/interpreter/IMain.scala
+++ b/src/repl/scala/tools/nsc/interpreter/IMain.scala
@@ -8,23 +8,17 @@ package tools.nsc
package interpreter
import PartialFunction.cond
-
import scala.language.implicitConversions
-
import scala.collection.mutable
-
import scala.concurrent.{ Future, ExecutionContext }
-
import scala.reflect.runtime.{ universe => ru }
import scala.reflect.{ BeanProperty, ClassTag, classTag }
import scala.reflect.internal.util.{ BatchSourceFile, SourceFile }
-
import scala.tools.util.PathResolver
import scala.tools.nsc.io.AbstractFile
import scala.tools.nsc.typechecker.{ TypeStrings, StructuredTypeStrings }
import scala.tools.nsc.util.{ ScalaClassLoader, stringFromWriter, StackTraceOps }
import scala.tools.nsc.util.Exceptional.unwrap
-
import javax.script.{AbstractScriptEngine, Bindings, ScriptContext, ScriptEngine, ScriptEngineFactory, ScriptException, CompiledScript, Compilable}
/** An interpreter for Scala code.
diff --git a/src/scaladoc/scala/tools/nsc/doc/Index.scala b/src/scaladoc/scala/tools/nsc/doc/Index.scala
index f9b9eecdb3..84545e9201 100644
--- a/src/scaladoc/scala/tools/nsc/doc/Index.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/Index.scala
@@ -7,11 +7,8 @@ package scala.tools.nsc.doc
import scala.collection._
-
trait Index {
-
type SymbolMap = SortedMap[String, SortedSet[model.MemberEntity]]
def firstLetterIndex: Map[Char, SymbolMap]
-
}
diff --git a/src/scaladoc/scala/tools/nsc/doc/base/CommentFactoryBase.scala b/src/scaladoc/scala/tools/nsc/doc/base/CommentFactoryBase.scala
index cd1d604843..a933c35c99 100755
--- a/src/scaladoc/scala/tools/nsc/doc/base/CommentFactoryBase.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/base/CommentFactoryBase.scala
@@ -925,11 +925,8 @@ trait CommentFactoryBase { this: MemberLookupBase =>
buffer.substring(start, offset)
}
-
/* CHARS CLASSES */
def isWhitespace(c: Char) = c == ' ' || c == '\t'
-
}
-
}
diff --git a/src/scaladoc/scala/tools/nsc/doc/model/IndexModelFactory.scala b/src/scaladoc/scala/tools/nsc/doc/model/IndexModelFactory.scala
index 53410fd4ad..643a089aae 100755
--- a/src/scaladoc/scala/tools/nsc/doc/model/IndexModelFactory.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/IndexModelFactory.scala
@@ -51,9 +51,6 @@ object IndexModelFactory {
gather(universe.rootPackage)
result.toMap
-
}
-
}
-
}
diff --git a/src/scalap/scala/tools/scalap/Arguments.scala b/src/scalap/scala/tools/scalap/Arguments.scala
index 123516bb2d..41346d13c0 100644
--- a/src/scalap/scala/tools/scalap/Arguments.scala
+++ b/src/scalap/scala/tools/scalap/Arguments.scala
@@ -163,5 +163,4 @@ class Arguments {
bindings get option flatMap (_ get key)
def getOthers: List[String] = others.toList
-
}