summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/compiler/scala/reflect/reify/codegen/GenTrees.scala6
-rw-r--r--src/compiler/scala/reflect/reify/codegen/GenTypes.scala3
-rw-r--r--src/compiler/scala/reflect/reify/codegen/GenUtils.scala3
-rw-r--r--src/compiler/scala/reflect/reify/phases/Reify.scala5
-rw-r--r--src/compiler/scala/reflect/reify/phases/Reshape.scala12
-rw-r--r--src/compiler/scala/tools/nsc/CompileSocket.scala2
-rw-r--r--src/compiler/scala/tools/nsc/OfflineCompilerCommand.scala2
-rw-r--r--src/compiler/scala/tools/nsc/ast/Positions.scala2
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeGen.scala2
-rw-r--r--src/compiler/scala/tools/nsc/ast/Trees.scala31
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Parsers.scala73
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Scanners.scala22
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala24
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/GenICode.scala60
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala182
-rwxr-xr-xsrc/compiler/scala/tools/nsc/doc/base/CommentFactoryBase.scala46
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/page/Index.scala20
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/index.css6
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/index.js19
-rwxr-xr-xsrc/compiler/scala/tools/nsc/doc/model/IndexModelFactory.scala2
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala15
-rw-r--r--src/compiler/scala/tools/nsc/interactive/CompilerControl.scala40
-rwxr-xr-xsrc/compiler/scala/tools/nsc/interactive/Doc.scala59
-rw-r--r--src/compiler/scala/tools/nsc/interactive/Global.scala157
-rw-r--r--src/compiler/scala/tools/nsc/interactive/Picklers.scala7
-rw-r--r--src/compiler/scala/tools/nsc/interactive/REPL.scala9
-rw-r--r--src/compiler/scala/tools/nsc/interactive/RangePositions.scala6
-rw-r--r--src/compiler/scala/tools/nsc/interactive/tests/InteractiveTest.scala1
-rw-r--r--src/compiler/scala/tools/nsc/interactive/tests/core/AskCommand.scala17
-rw-r--r--src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerInstance.scala3
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/IMain.scala4
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/JLineReader.scala5
-rw-r--r--src/compiler/scala/tools/nsc/javac/JavaParsers.scala2
-rw-r--r--src/compiler/scala/tools/nsc/matching/ParallelMatching.scala3
-rw-r--r--src/compiler/scala/tools/nsc/settings/FscSettings.scala23
-rw-r--r--src/compiler/scala/tools/nsc/settings/MutableSettings.scala30
-rw-r--r--src/compiler/scala/tools/nsc/settings/ScalaSettings.scala5
-rw-r--r--src/compiler/scala/tools/nsc/settings/ScalaVersion.scala194
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala3
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala4
-rw-r--r--src/compiler/scala/tools/nsc/transform/CleanUp.scala36
-rw-r--r--src/compiler/scala/tools/nsc/transform/Constructors.scala1
-rw-r--r--src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala3
-rw-r--r--src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala193
-rw-r--r--src/compiler/scala/tools/nsc/transform/LambdaLift.scala41
-rw-r--r--src/compiler/scala/tools/nsc/transform/Mixin.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala92
-rw-r--r--src/compiler/scala/tools/nsc/transform/UnCurry.scala145
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Analyzer.scala1
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala225
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala26
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Contexts.scala14
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Duplicators.scala38
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Implicits.scala7
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Infer.scala112
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Macros.scala3
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala7
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Namers.scala519
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala64
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala264
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/RefChecks.scala97
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala24
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala15
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala45
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala679
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Unapplies.scala48
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Variances.scala2
-rw-r--r--src/compiler/scala/tools/reflect/ToolBoxFactory.scala1
-rw-r--r--src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala31
-rw-r--r--src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSPlugin.scala1
-rw-r--r--src/library/scala/annotation/migration.scala3
-rw-r--r--src/library/scala/collection/IndexedSeq.scala4
-rw-r--r--src/library/scala/collection/generic/GenTraversableFactory.scala6
-rw-r--r--src/library/scala/collection/generic/IndexedSeqFactory.scala21
-rw-r--r--src/library/scala/collection/immutable/IndexedSeq.scala4
-rw-r--r--src/library/scala/collection/immutable/List.scala36
-rw-r--r--src/library/scala/collection/immutable/Stream.scala13
-rw-r--r--src/library/scala/collection/immutable/Vector.scala11
-rw-r--r--src/library/scala/collection/mutable/ArrayOps.scala14
-rw-r--r--src/library/scala/collection/mutable/ListMap.scala17
-rw-r--r--src/library/scala/concurrent/BatchingExecutor.scala117
-rw-r--r--src/library/scala/concurrent/Future.scala6
-rw-r--r--src/library/scala/concurrent/impl/ExecutionContextImpl.scala34
-rw-r--r--src/library/scala/concurrent/impl/Promise.scala2
-rw-r--r--src/library/scala/package.scala3
-rw-r--r--src/partest/scala/tools/partest/ASMConverters.scala71
-rw-r--r--src/partest/scala/tools/partest/BytecodeTest.scala102
-rw-r--r--src/reflect/scala/reflect/api/BuildUtils.scala2
-rw-r--r--src/reflect/scala/reflect/api/Exprs.scala2
-rw-r--r--src/reflect/scala/reflect/api/Trees.scala27
-rw-r--r--src/reflect/scala/reflect/internal/AnnotationCheckers.scala179
-rw-r--r--src/reflect/scala/reflect/internal/AnnotationInfos.scala17
-rw-r--r--src/reflect/scala/reflect/internal/BuildUtils.scala2
-rw-r--r--src/reflect/scala/reflect/internal/Definitions.scala20
-rw-r--r--src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala15
-rw-r--r--src/reflect/scala/reflect/internal/Importers.scala2
-rw-r--r--src/reflect/scala/reflect/internal/Positions.scala2
-rw-r--r--src/reflect/scala/reflect/internal/Printers.scala6
-rw-r--r--src/reflect/scala/reflect/internal/StdNames.scala2
-rw-r--r--src/reflect/scala/reflect/internal/Symbols.scala54
-rw-r--r--src/reflect/scala/reflect/internal/TreeGen.scala25
-rw-r--r--src/reflect/scala/reflect/internal/TreeInfo.scala39
-rw-r--r--src/reflect/scala/reflect/internal/Trees.scala75
-rw-r--r--src/reflect/scala/reflect/internal/Types.scala129
-rw-r--r--src/reflect/scala/reflect/internal/annotations/compileTimeOnly.scala31
-rw-r--r--src/reflect/scala/reflect/internal/settings/MutableSettings.scala2
-rw-r--r--src/reflect/scala/reflect/internal/util/Position.scala8
-rw-r--r--src/reflect/scala/reflect/macros/Attachments.scala16
-rw-r--r--src/reflect/scala/reflect/macros/compileTimeOnly.scala16
-rw-r--r--src/reflect/scala/reflect/runtime/JavaMirrors.scala8
-rw-r--r--src/reflect/scala/reflect/runtime/Settings.scala1
-rw-r--r--src/scalap/scala/tools/scalap/Main.scala2
114 files changed, 3278 insertions, 1717 deletions
diff --git a/src/compiler/scala/reflect/reify/codegen/GenTrees.scala b/src/compiler/scala/reflect/reify/codegen/GenTrees.scala
index 31974b5b76..06e287f62f 100644
--- a/src/compiler/scala/reflect/reify/codegen/GenTrees.scala
+++ b/src/compiler/scala/reflect/reify/codegen/GenTrees.scala
@@ -45,9 +45,7 @@ trait GenTrees {
case global.EmptyTree =>
reifyMirrorObject(EmptyTree)
case global.emptyValDef =>
- mirrorSelect(nme.emptyValDef)
- case global.pendingSuperCall =>
- mirrorSelect(nme.pendingSuperCall)
+ mirrorBuildSelect(nme.emptyValDef)
case FreeDef(_, _, _, _, _) =>
reifyNestedFreeDef(tree)
case FreeRef(_, _) =>
@@ -177,7 +175,7 @@ trait GenTrees {
// then we can reify the scrutinee as a symless AST and that will definitely be hygienic
// why? because then typechecking of a scrutinee doesn't depend on the environment external to the quasiquote
// otherwise we need to reify the corresponding type
- if (sym.isLocalToReifee || tpe.isLocalToReifee)
+ if (sym.isLocalToReifee || tpe.isLocalToReifee || treeInfo.isWildcardStarType(tree))
reifyProduct(tree)
else {
if (reifyDebug) println("reifying bound type %s (underlying type is %s)".format(sym, tpe))
diff --git a/src/compiler/scala/reflect/reify/codegen/GenTypes.scala b/src/compiler/scala/reflect/reify/codegen/GenTypes.scala
index 7aa87dc2f8..bb7e1f9b56 100644
--- a/src/compiler/scala/reflect/reify/codegen/GenTypes.scala
+++ b/src/compiler/scala/reflect/reify/codegen/GenTypes.scala
@@ -69,8 +69,7 @@ trait GenTypes {
def reificationIsConcrete: Boolean = state.reificationIsConcrete
def spliceType(tpe: Type): Tree = {
- val quantified = currentQuantified
- if (tpe.isSpliceable && !(quantified contains tpe.typeSymbol)) {
+ if (tpe.isSpliceable && !(boundSymbolsInCallstack contains tpe.typeSymbol)) {
if (reifyDebug) println("splicing " + tpe)
val tagFlavor = if (concrete) tpnme.TypeTag.toString else tpnme.WeakTypeTag.toString
diff --git a/src/compiler/scala/reflect/reify/codegen/GenUtils.scala b/src/compiler/scala/reflect/reify/codegen/GenUtils.scala
index 21db93d8f5..49877b4286 100644
--- a/src/compiler/scala/reflect/reify/codegen/GenUtils.scala
+++ b/src/compiler/scala/reflect/reify/codegen/GenUtils.scala
@@ -34,6 +34,9 @@ trait GenUtils {
def mirrorSelect(name: String): Tree =
termPath(nme.UNIVERSE_PREFIX + name)
+ def mirrorBuildSelect(name: String): Tree =
+ termPath(nme.UNIVERSE_BUILD_PREFIX + name)
+
def mirrorMirrorSelect(name: String): Tree =
termPath(nme.MIRROR_PREFIX + name)
diff --git a/src/compiler/scala/reflect/reify/phases/Reify.scala b/src/compiler/scala/reflect/reify/phases/Reify.scala
index dc0028be38..8e13a45cdb 100644
--- a/src/compiler/scala/reflect/reify/phases/Reify.scala
+++ b/src/compiler/scala/reflect/reify/phases/Reify.scala
@@ -28,7 +28,10 @@ trait Reify extends GenSymbols
finally currents = currents.tail
}
}
- def currentQuantified = flatCollect(reifyStack.currents)({ case ExistentialType(quantified, _) => quantified })
+ def boundSymbolsInCallstack = flatCollect(reifyStack.currents) {
+ case ExistentialType(quantified, _) => quantified
+ case PolyType(typeParams, _) => typeParams
+ }
def current = reifyStack.currents.head
def currents = reifyStack.currents
diff --git a/src/compiler/scala/reflect/reify/phases/Reshape.scala b/src/compiler/scala/reflect/reify/phases/Reshape.scala
index 1b7509fdbe..7406f5d02d 100644
--- a/src/compiler/scala/reflect/reify/phases/Reshape.scala
+++ b/src/compiler/scala/reflect/reify/phases/Reshape.scala
@@ -188,8 +188,12 @@ trait Reshape {
}
private def toPreTyperTypedOrAnnotated(tree: Tree): Tree = tree match {
- case ty @ Typed(expr1, tt @ TypeTree()) =>
+ case ty @ Typed(expr1, tpt) =>
if (reifyDebug) println("reify typed: " + tree)
+ val original = tpt match {
+ case tt @ TypeTree() => tt.original
+ case tpt => tpt
+ }
val annotatedArg = {
def loop(tree: Tree): Tree = tree match {
case annotated1 @ Annotated(ann, annotated2 @ Annotated(_, _)) => loop(annotated2)
@@ -197,15 +201,15 @@ trait Reshape {
case _ => EmptyTree
}
- loop(tt.original)
+ loop(original)
}
if (annotatedArg != EmptyTree) {
if (annotatedArg.isType) {
if (reifyDebug) println("verdict: was an annotated type, reify as usual")
ty
} else {
- if (reifyDebug) println("verdict: was an annotated value, equivalent is " + tt.original)
- toPreTyperTypedOrAnnotated(tt.original)
+ if (reifyDebug) println("verdict: was an annotated value, equivalent is " + original)
+ toPreTyperTypedOrAnnotated(original)
}
} else {
if (reifyDebug) println("verdict: wasn't annotated, reify as usual")
diff --git a/src/compiler/scala/tools/nsc/CompileSocket.scala b/src/compiler/scala/tools/nsc/CompileSocket.scala
index 9a3e8d1530..4051bda914 100644
--- a/src/compiler/scala/tools/nsc/CompileSocket.scala
+++ b/src/compiler/scala/tools/nsc/CompileSocket.scala
@@ -72,7 +72,7 @@ class CompileSocket extends CompileOutputCommon {
/** A temporary directory to use */
val tmpDir = {
val udir = Option(Properties.userName) getOrElse "shared"
- val f = (Path(Properties.tmpDir) / "scala-devel" / udir).createDirectory()
+ val f = (Path(Properties.tmpDir) / ("scala-devel" + udir)).createDirectory()
if (f.isDirectory && f.canWrite) {
info("[Temp directory: " + f + "]")
diff --git a/src/compiler/scala/tools/nsc/OfflineCompilerCommand.scala b/src/compiler/scala/tools/nsc/OfflineCompilerCommand.scala
index 8a3c531ff0..caf6ad14cf 100644
--- a/src/compiler/scala/tools/nsc/OfflineCompilerCommand.scala
+++ b/src/compiler/scala/tools/nsc/OfflineCompilerCommand.scala
@@ -33,7 +33,7 @@ class OfflineCompilerCommand(arguments: List[String], settings: FscSettings) ext
}
else {
// Otherwise we're on the server and will use it to absolutize the paths.
- settings.absolutize(currentDir.value)
+ settings.absolutize()
}
}
diff --git a/src/compiler/scala/tools/nsc/ast/Positions.scala b/src/compiler/scala/tools/nsc/ast/Positions.scala
index 49569f5e05..d8fb632f73 100644
--- a/src/compiler/scala/tools/nsc/ast/Positions.scala
+++ b/src/compiler/scala/tools/nsc/ast/Positions.scala
@@ -20,7 +20,7 @@ trait Positions extends scala.reflect.internal.Positions {
// When we prune due to encountering a position, traverse the
// pruned children so we can warn about those lacking positions.
t.children foreach { c =>
- if (!c.canHaveAttrs) ()
+ if ((c eq EmptyTree) || (c eq emptyValDef)) ()
else if (c.pos == NoPosition) {
reporter.warning(t.pos, " Positioned tree has unpositioned child in phase " + globalPhase)
inform("parent: " + treeSymStatus(t))
diff --git a/src/compiler/scala/tools/nsc/ast/TreeGen.scala b/src/compiler/scala/tools/nsc/ast/TreeGen.scala
index 96146b7343..99b82d9746 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeGen.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeGen.scala
@@ -58,7 +58,7 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL {
def mkUnchecked(expr: Tree): Tree = atPos(expr.pos) {
// This can't be "Annotated(New(UncheckedClass), expr)" because annotations
// are very picky about things and it crashes the compiler with "unexpected new".
- Annotated(New(scalaDot(UncheckedClass.name), Nil), expr)
+ Annotated(New(scalaDot(UncheckedClass.name), ListOfNil), expr)
}
// if it's a Match, mark the selector unchecked; otherwise nothing.
def mkUncheckedMatch(tree: Tree) = tree match {
diff --git a/src/compiler/scala/tools/nsc/ast/Trees.scala b/src/compiler/scala/tools/nsc/ast/Trees.scala
index 54402f0903..2ad762fd55 100644
--- a/src/compiler/scala/tools/nsc/ast/Trees.scala
+++ b/src/compiler/scala/tools/nsc/ast/Trees.scala
@@ -65,13 +65,6 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
// --- factory methods ----------------------------------------------------------
- /** Factory method for a primary constructor super call `super.<init>(args_1)...(args_n)`
- */
- def PrimarySuperCall(argss: List[List[Tree]]): Tree = argss match {
- case Nil => Apply(gen.mkSuperSelect, Nil)
- case xs :: rest => rest.foldLeft(Apply(gen.mkSuperSelect, xs): Tree)(Apply.apply)
- }
-
/** Generates a template with constructor corresponding to
*
* constrmods (vparams1_) ... (vparams_n) preSuper { presupers }
@@ -89,7 +82,7 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
* body
* }
*/
- def Template(parents: List[Tree], self: ValDef, constrMods: Modifiers, vparamss: List[List[ValDef]], body: List[Tree], superPos: Position): Template = {
+ def Template(parents: List[Tree], self: ValDef, constrMods: Modifiers, vparamss: List[List[ValDef]], argss: List[List[Tree]], body: List[Tree], superPos: Position): Template = {
/* Add constructor to template */
// create parameters for <init> as synthetic trees.
@@ -111,7 +104,7 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
rhs = EmptyTree
)
}
- val lvdefs = evdefs collect { case vdef: ValDef => copyValDef(vdef)(mods = Modifiers(PRESUPER)) }
+ val lvdefs = evdefs collect { case vdef: ValDef => copyValDef(vdef)(mods = vdef.mods | PRESUPER) }
val constrs = {
if (constrMods hasFlag TRAIT) {
@@ -124,16 +117,9 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
if (vparamss1.isEmpty || !vparamss1.head.isEmpty && vparamss1.head.head.mods.isImplicit)
vparamss1 = List() :: vparamss1;
val superRef: Tree = atPos(superPos)(gen.mkSuperSelect)
- val superCall = pendingSuperCall // we can't know in advance which of the parents will end up as a superclass
- // this requires knowing which of the parents is a type macro and which is not
- // and that's something that cannot be found out before typer
- // (the type macros aren't in the trunk yet, but there is a plan for them to land there soon)
- // this means that we don't know what will be the arguments of the super call
- // therefore here we emit a dummy which gets populated when the template is named and typechecked
+ val superCall = (superRef /: argss) (Apply.apply)
List(
- // TODO: previously this was `wrappingPos(superPos, lvdefs ::: argss.flatten)`
- // is it going to be a problem that we can no longer include the `argss`?
- atPos(wrappingPos(superPos, lvdefs)) (
+ atPos(wrappingPos(superPos, lvdefs ::: argss.flatten)) (
DefDef(constrMods, nme.CONSTRUCTOR, List(), vparamss1, TypeTree(), Block(lvdefs ::: List(superCall), Literal(Constant())))))
}
}
@@ -151,10 +137,11 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
* @param constrMods the modifiers for the class constructor, i.e. as in `class C private (...)`
* @param vparamss the value parameters -- if they have symbols they
* should be owned by `sym`
+ * @param argss the supercall arguments
* @param body the template statements without primary constructor
* and value parameter fields.
*/
- def ClassDef(sym: Symbol, constrMods: Modifiers, vparamss: List[List[ValDef]], body: List[Tree], superPos: Position): ClassDef = {
+ def ClassDef(sym: Symbol, constrMods: Modifiers, vparamss: List[List[ValDef]], argss: List[List[Tree]], body: List[Tree], superPos: Position): ClassDef = {
// "if they have symbols they should be owned by `sym`"
assert(
mforall(vparamss)(p => (p.symbol eq NoSymbol) || (p.symbol.owner == sym)),
@@ -164,7 +151,7 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
ClassDef(sym,
Template(sym.info.parents map TypeTree,
if (sym.thisSym == sym || phase.erasedTypes) emptyValDef else ValDef(sym.thisSym),
- constrMods, vparamss, body, superPos))
+ constrMods, vparamss, argss, body, superPos))
}
// --- subcomponents --------------------------------------------------
@@ -337,8 +324,6 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
else
super.transform {
tree match {
- case tree if !tree.canHaveAttrs =>
- tree
case tpt: TypeTree =>
if (tpt.original != null)
transform(tpt.original)
@@ -352,6 +337,8 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
transform(fn)
case This(_) if tree.symbol != null && tree.symbol.isPackageClass =>
tree
+ case EmptyTree =>
+ tree
case _ =>
val dupl = tree.duplicate
if (tree.hasSymbol && (!localOnly || (locals contains tree.symbol)) && !(keepLabels && tree.symbol.isLabel))
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
index 33db4ee2d5..6f79f639b9 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
@@ -980,11 +980,8 @@ self =>
/** Assumed (provisionally) to be TermNames. */
def ident(skipIt: Boolean): Name =
- if (isIdent) {
- val name = in.name.encode
- in.nextToken()
- name
- } else {
+ if (isIdent) rawIdent().encode
+ else {
syntaxErrorOrIncomplete(expectedMsg(IDENTIFIER), skipIt)
nme.ERROR
}
@@ -1562,9 +1559,9 @@ self =>
val nstart = in.skipToken()
val npos = r2p(nstart, nstart, in.lastOffset)
val tstart = in.offset
- val (parents, self, stats) = template()
+ val (parents, argss, self, stats) = template(isTrait = false)
val cpos = r2p(tstart, tstart, in.lastOffset max tstart)
- makeNew(parents, self, stats, npos, cpos)
+ makeNew(parents, self, stats, argss, npos, cpos)
case _ =>
syntaxErrorOrIncomplete("illegal start of simple expression", true)
errorTermTree
@@ -2106,7 +2103,7 @@ self =>
def annotationExpr(): Tree = atPos(in.offset) {
val t = exprSimpleType()
if (in.token == LPAREN) New(t, multipleArgumentExprs())
- else New(t, Nil)
+ else New(t, ListOfNil)
}
/* -------- PARAMETERS ------------------------------------------- */
@@ -2742,17 +2739,20 @@ self =>
* TraitParents ::= AnnotType {with AnnotType}
* }}}
*/
- def templateParents(): List[Tree] = {
- val parents = new ListBuffer[Tree]
- def readAppliedParent() = {
- val start = in.offset
- val parent = startAnnotType()
- val argss = if (in.token == LPAREN) multipleArgumentExprs() else Nil
- parents += atPos(start)((parent /: argss)(Apply.apply))
+ def templateParents(isTrait: Boolean): (List[Tree], List[List[Tree]]) = {
+ val parents = new ListBuffer[Tree] += startAnnotType()
+ val argss = (
+ // TODO: the insertion of ListOfNil here is where "new Foo" becomes
+ // indistinguishable from "new Foo()".
+ if (in.token == LPAREN && !isTrait) multipleArgumentExprs()
+ else ListOfNil
+ )
+
+ while (in.token == WITH) {
+ in.nextToken()
+ parents += startAnnotType()
}
- readAppliedParent()
- while (in.token == WITH) { in.nextToken(); readAppliedParent() }
- parents.toList
+ (parents.toList, argss)
}
/** {{{
@@ -2762,12 +2762,12 @@ self =>
* EarlyDef ::= Annotations Modifiers PatDef
* }}}
*/
- def template(): (List[Tree], ValDef, List[Tree]) = {
+ def template(isTrait: Boolean): (List[Tree], List[List[Tree]], ValDef, List[Tree]) = {
newLineOptWhenFollowedBy(LBRACE)
if (in.token == LBRACE) {
// @S: pre template body cannot stub like post body can!
val (self, body) = templateBody(isPre = true)
- if (in.token == WITH && (self eq emptyValDef)) {
+ if (in.token == WITH && self.isEmpty) {
val earlyDefs: List[Tree] = body flatMap {
case vdef @ ValDef(mods, _, _, _) if !mods.isDeferred =>
List(copyValDef(vdef)(mods = mods | Flags.PRESUPER))
@@ -2779,16 +2779,16 @@ self =>
case _ => List()
}
in.nextToken()
- val parents = templateParents()
- val (self1, body1) = templateBodyOpt(parenMeansSyntaxError = false)
- (parents, self1, earlyDefs ::: body1)
+ val (parents, argss) = templateParents(isTrait = isTrait)
+ val (self1, body1) = templateBodyOpt(traitParentSeen = isTrait)
+ (parents, argss, self1, earlyDefs ::: body1)
} else {
- (List(), self, body)
+ (List(), ListOfNil, self, body)
}
} else {
- val parents = templateParents()
- val (self, body) = templateBodyOpt(parenMeansSyntaxError = false)
- (parents, self, body)
+ val (parents, argss) = templateParents(isTrait = isTrait)
+ val (self, body) = templateBodyOpt(traitParentSeen = isTrait)
+ (parents, argss, self, body)
}
}
@@ -2802,15 +2802,15 @@ self =>
* }}}
*/
def templateOpt(mods: Modifiers, name: Name, constrMods: Modifiers, vparamss: List[List[ValDef]], tstart: Int): Template = {
- val (parents0, self, body) = (
+ val (parents0, argss, self, body) = (
if (in.token == EXTENDS || in.token == SUBTYPE && mods.isTrait) {
in.nextToken()
- template()
+ template(isTrait = mods.isTrait)
}
else {
newLineOptWhenFollowedBy(LBRACE)
- val (self, body) = templateBodyOpt(parenMeansSyntaxError = mods.isTrait || name.isTermName)
- (List(), self, body)
+ val (self, body) = templateBodyOpt(traitParentSeen = false)
+ (List(), ListOfNil, self, body)
}
)
def anyrefParents() = {
@@ -2832,7 +2832,7 @@ self =>
if (inScalaRootPackage && ScalaValueClassNames.contains(name))
Template(parents0, self, anyvalConstructor :: body)
else
- Template(anyrefParents, self, constrMods, vparamss, body, o2p(tstart))
+ Template(anyrefParents, self, constrMods, vparamss, argss, body, o2p(tstart))
}
}
@@ -2847,15 +2847,14 @@ self =>
case (self, Nil) => (self, EmptyTree.asList)
case result => result
}
- def templateBodyOpt(parenMeansSyntaxError: Boolean): (ValDef, List[Tree]) = {
+ def templateBodyOpt(traitParentSeen: Boolean): (ValDef, List[Tree]) = {
newLineOptWhenFollowedBy(LBRACE)
if (in.token == LBRACE) {
templateBody(isPre = false)
} else {
- if (in.token == LPAREN) {
- if (parenMeansSyntaxError) syntaxError(s"traits or objects may not have parameters", true)
- else abort("unexpected opening parenthesis")
- }
+ if (in.token == LPAREN)
+ syntaxError((if (traitParentSeen) "parents of traits" else "traits or objects")+
+ " may not have parameters", true)
(emptyValDef, List())
}
}
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
index 4f564c5d0b..79f0bcf149 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
@@ -113,6 +113,11 @@ trait Scanners extends ScannersCommon {
cbuf.append(c)
}
+ /** Determines whether this scanner should emit identifier deprecation warnings,
+ * e.g. when seeing `macro` or `then`, which are planned to become keywords in future versions of Scala.
+ */
+ protected def emitIdentifierDeprecationWarnings = true
+
/** Clear buffer and set name and token */
private def finishNamed(idtoken: Int = IDENTIFIER) {
name = newTermName(cbuf.toString)
@@ -122,7 +127,7 @@ trait Scanners extends ScannersCommon {
val idx = name.start - kwOffset
if (idx >= 0 && idx < kwArray.length) {
token = kwArray(idx)
- if (token == IDENTIFIER && allowIdent != name)
+ if (token == IDENTIFIER && allowIdent != name && emitIdentifierDeprecationWarnings)
deprecationWarning(name+" is now a reserved word; usage as an identifier is deprecated")
}
}
@@ -283,10 +288,16 @@ trait Scanners extends ScannersCommon {
prev copyFrom this
val nextLastOffset = charOffset - 1
fetchToken()
+ def resetOffset() {
+ offset = prev.offset
+ lastOffset = prev.lastOffset
+ }
if (token == CLASS) {
token = CASECLASS
+ resetOffset()
} else if (token == OBJECT) {
token = CASEOBJECT
+ resetOffset()
} else {
lastOffset = nextLastOffset
next copyFrom this
@@ -607,7 +618,10 @@ trait Scanners extends ScannersCommon {
if (ch == '`') {
nextChar()
finishNamed(BACKQUOTED_IDENT)
- if (name.length == 0) syntaxError("empty quoted identifier")
+ if (name.length == 0)
+ syntaxError("empty quoted identifier")
+ else if (name == nme.WILDCARD)
+ syntaxError("wildcard invalid as backquoted identifier")
}
else syntaxError("unclosed quoted identifier")
}
@@ -1488,6 +1502,10 @@ trait Scanners extends ScannersCommon {
def improves(patches1: List[BracePatch]): Boolean =
imbalanceMeasure > new ParensAnalyzer(unit, patches1).imbalanceMeasure
+ // don't emit deprecation warnings about identifiers like `macro` or `then`
+ // when skimming through the source file trying to heal braces
+ override def emitIdentifierDeprecationWarnings = false
+
override def error(offset: Int, msg: String) {}
}
}
diff --git a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
index f94055f666..7969bb9c20 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
@@ -205,26 +205,20 @@ abstract class TreeBuilder {
*/
def makeAnonymousNew(stats: List[Tree]): Tree = {
val stats1 = if (stats.isEmpty) List(Literal(Constant(()))) else stats
- makeNew(Nil, emptyValDef, stats1, NoPosition, NoPosition)
+ makeNew(Nil, emptyValDef, stats1, ListOfNil, NoPosition, NoPosition)
}
/** Create positioned tree representing an object creation <new parents { stats }
* @param npos the position of the new
* @param cpos the position of the anonymous class starting with parents
*/
- def makeNew(parents: List[Tree], self: ValDef, stats: List[Tree],
+ def makeNew(parents: List[Tree], self: ValDef, stats: List[Tree], argss: List[List[Tree]],
npos: Position, cpos: Position): Tree =
if (parents.isEmpty)
- makeNew(List(scalaAnyRefConstr), self, stats, npos, cpos)
- else if (parents.tail.isEmpty && stats.isEmpty) {
- // `Parsers.template` no longer differentiates tpts and their argss
- // e.g. `C()` will be represented as a single tree Apply(Ident(C), Nil)
- // instead of parents = Ident(C), argss = Nil as before
- // this change works great for things that are actually templates
- // but in this degenerate case we need to perform postprocessing
- val app = treeInfo.dissectApplied(parents.head)
- atPos(npos union cpos) { New(app.callee, app.argss) }
- } else {
+ makeNew(List(scalaAnyRefConstr), self, stats, argss, npos, cpos)
+ else if (parents.tail.isEmpty && stats.isEmpty)
+ atPos(npos union cpos) { New(parents.head, argss) }
+ else {
val x = tpnme.ANON_CLASS_NAME
atPos(npos union cpos) {
Block(
@@ -232,12 +226,12 @@ abstract class TreeBuilder {
atPos(cpos) {
ClassDef(
Modifiers(FINAL), x, Nil,
- Template(parents, self, NoMods, ListOfNil, stats, cpos.focus))
+ Template(parents, self, NoMods, ListOfNil, argss, stats, cpos.focus))
}),
atPos(npos) {
New(
Ident(x) setPos npos.focus,
- Nil)
+ ListOfNil)
}
)
}
@@ -258,7 +252,7 @@ abstract class TreeBuilder {
/** Create tree representing a while loop */
def makeWhile(lname: TermName, cond: Tree, body: Tree): Tree = {
- val continu = atPos(o2p(body.pos.endOrPoint)) { Apply(Ident(lname), Nil) }
+ val continu = atPos(o2p(body.pos pointOrElse wrappingPos(List(cond, body)).pos.endOrPoint)) { Apply(Ident(lname), Nil) }
val rhs = If(cond, Block(List(body), continu), Literal(Constant()))
LabelDef(lname, Nil, rhs)
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
index fd2b11898c..44d7a1929b 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
@@ -1164,34 +1164,28 @@ abstract class GenICode extends SubComponent {
resCtx
}
- private def adapt(from: TypeKind, to: TypeKind, ctx: Context, pos: Position): Unit = {
- if (!(from <:< to) && !(from == NullReference && to == NothingReference)) {
- to match {
- case UNIT =>
- ctx.bb.emit(DROP(from), pos)
- debuglog("Dropped an " + from);
-
- case _ =>
- debugassert(from != UNIT, "Can't convert from UNIT to " + to + " at: " + pos)
- assert(!from.isReferenceType && !to.isReferenceType,
- "type error: can't convert from " + from + " to " + to +" in unit " + unit.source + " at " + pos)
-
- ctx.bb.emit(CALL_PRIMITIVE(Conversion(from, to)), pos)
- }
- } else if (from == NothingReference) {
- ctx.bb.emit(THROW(ThrowableClass))
- ctx.bb.enterIgnoreMode
- } else if (from == NullReference) {
- ctx.bb.emit(DROP(from))
- ctx.bb.emit(CONSTANT(Constant(null)))
+ private def adapt(from: TypeKind, to: TypeKind, ctx: Context, pos: Position) {
+ // An awful lot of bugs explode here - let's leave ourselves more clues.
+ // A typical example is an overloaded type assigned after typer.
+ log(s"GenICode#adapt($from, $to, $ctx, $pos)")
+
+ val conforms = (from <:< to) || (from == NullReference && to == NothingReference)
+ def coerce(from: TypeKind, to: TypeKind) = ctx.bb.emit(CALL_PRIMITIVE(Conversion(from, to)), pos)
+ def checkAssertions() {
+ def msg = s"Can't convert from $from to $to in unit ${unit.source} at $pos"
+ debugassert(from != UNIT, msg)
+ assert(!from.isReferenceType && !to.isReferenceType, msg)
}
- else if (from == ThrowableReference && !(ThrowableClass.tpe <:< to.toType)) {
- log("Inserted check-cast on throwable to " + to + " at " + pos)
- ctx.bb.emit(CHECK_CAST(to))
+ if (conforms) from match {
+ case NothingReference => ctx.bb.emit(THROW(ThrowableClass)) ; ctx.bb.enterIgnoreMode
+ case NullReference => ctx.bb.emit(Seq(DROP(from), CONSTANT(Constant(null))))
+ case ThrowableReference if !(ThrowableClass.tpe <:< to.toType) => ctx.bb.emit(CHECK_CAST(to)) // downcast throwables
+ case BYTE | SHORT | CHAR | INT if to == LONG => coerce(INT, LONG) // widen subrange types
+ case _ => ()
}
- else (from, to) match {
- case (BYTE, LONG) | (SHORT, LONG) | (CHAR, LONG) | (INT, LONG) => ctx.bb.emit(CALL_PRIMITIVE(Conversion(INT, LONG)))
- case _ => ()
+ else to match {
+ case UNIT => ctx.bb.emit(DROP(from), pos) // value discarding
+ case _ => checkAssertions() ; coerce(from, to) // other primitive coercions
}
}
@@ -1907,18 +1901,8 @@ abstract class GenICode extends SubComponent {
var handlerCount = 0
- override def toString(): String = {
- val buf = new StringBuilder()
- buf.append("\tpackage: ").append(packg).append('\n')
- buf.append("\tclazz: ").append(clazz).append('\n')
- buf.append("\tmethod: ").append(method).append('\n')
- buf.append("\tbb: ").append(bb).append('\n')
- buf.append("\tlabels: ").append(labels).append('\n')
- buf.append("\texception handlers: ").append(handlers).append('\n')
- buf.append("\tcleanups: ").append(cleanups).append('\n')
- buf.append("\tscope: ").append(scope).append('\n')
- buf.toString()
- }
+ override def toString =
+ s"package $packg { class $clazz { def $method { bb=$bb } } }"
def loadException(ctx: Context, exh: ExceptionHandler, pos: Position) = {
debuglog("Emitting LOAD_EXCEPTION for class: " + exh.loadExceptionClass)
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
index d185ed0c34..0abbe44b02 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
@@ -1018,7 +1018,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
if (needsAnnotation) {
val c = Constant(RemoteExceptionClass.tpe)
val arg = Literal(c) setType c.tpe
- meth.addAnnotation(ThrowsClass, arg)
+ meth.addAnnotation(appliedType(ThrowsClass, c.tpe), arg)
}
}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
index fe0020e074..598965b982 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
@@ -888,7 +888,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
if (needsAnnotation) {
val c = Constant(RemoteExceptionClass.tpe)
val arg = Literal(c) setType c.tpe
- meth.addAnnotation(ThrowsClass, arg)
+ meth.addAnnotation(appliedType(ThrowsClass, c.tpe), arg)
}
}
diff --git a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
index fee683ce3a..1beed3f420 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
@@ -18,6 +18,9 @@ abstract class DeadCodeElimination extends SubComponent {
import icodes.opcodes._
import definitions.RuntimePackage
+ /** The block and index where an instruction is located */
+ type InstrLoc = (BasicBlock, Int)
+
val phaseName = "dce"
/** Create a new phase */
@@ -55,10 +58,10 @@ abstract class DeadCodeElimination extends SubComponent {
val rdef = new reachingDefinitions.ReachingDefinitionsAnalysis;
/** Use-def chain: give the reaching definitions at the beginning of given instruction. */
- var defs: immutable.Map[(BasicBlock, Int), immutable.Set[rdef.lattice.Definition]] = immutable.HashMap.empty
+ var defs: immutable.Map[InstrLoc, immutable.Set[rdef.lattice.Definition]] = immutable.HashMap.empty
/** Useful instructions which have not been scanned yet. */
- val worklist: mutable.Set[(BasicBlock, Int)] = new mutable.LinkedHashSet
+ val worklist: mutable.Set[InstrLoc] = new mutable.LinkedHashSet
/** what instructions have been marked as useful? */
val useful: mutable.Map[BasicBlock, mutable.BitSet] = perRunCaches.newMap()
@@ -66,21 +69,29 @@ abstract class DeadCodeElimination extends SubComponent {
/** what local variables have been accessed at least once? */
var accessedLocals: List[Local] = Nil
+ /** Map from a local and a basic block to the instructions that store to that local in that basic block */
+ val localStores = mutable.Map[(Local, BasicBlock), mutable.BitSet]() withDefault {_ => mutable.BitSet()}
+
+ /** Stores that clobber previous stores to array or ref locals. See SI-5313 */
+ val clobbers = mutable.Set[InstrLoc]()
+
/** the current method. */
var method: IMethod = _
/** Map instructions who have a drop on some control path, to that DROP instruction. */
- val dropOf: mutable.Map[(BasicBlock, Int), List[(BasicBlock, Int)]] = perRunCaches.newMap()
+ val dropOf: mutable.Map[InstrLoc, List[InstrLoc]] = perRunCaches.newMap()
def dieCodeDie(m: IMethod) {
if (m.hasCode) {
debuglog("dead code elimination on " + m);
dropOf.clear()
+ localStores.clear()
+ clobbers.clear()
m.code.blocks.clear()
accessedLocals = m.params.reverse
m.code.blocks ++= linearizer.linearize(m)
collectRDef(m)
- mark
+ mark()
sweep(m)
accessedLocals = accessedLocals.distinct
val diff = m.locals diff accessedLocals
@@ -102,12 +113,27 @@ abstract class DeadCodeElimination extends SubComponent {
useful(bb) = new mutable.BitSet(bb.size)
var rd = rdef.in(bb);
for (Pair(i, idx) <- bb.toList.zipWithIndex) {
+
+ // utility for adding to worklist
+ def moveToWorkList() = moveToWorkListIf(true)
+
+ // utility for (conditionally) adding to worklist
+ def moveToWorkListIf(cond: Boolean) =
+ if (cond) {
+ debuglog("in worklist: " + i)
+ worklist += ((bb, idx))
+ } else {
+ debuglog("not in worklist: " + i)
+ }
+
+ // instruction-specific logic
i match {
- case LOAD_LOCAL(l) =>
+ case LOAD_LOCAL(_) =>
defs = defs + Pair(((bb, idx)), rd.vars)
+ moveToWorkListIf(false)
- case STORE_LOCAL(_) =>
+ case STORE_LOCAL(l) =>
/* SI-4935 Check whether a module is stack top, if so mark the instruction that loaded it
* (otherwise any side-effects of the module's constructor go lost).
* (a) The other two cases where a module's value is stored (STORE_FIELD and STORE_ARRAY_ITEM)
@@ -124,14 +150,25 @@ abstract class DeadCodeElimination extends SubComponent {
case _ => false
}
}
- if (necessary) worklist += ((bb, idx))
+ moveToWorkListIf(necessary)
+
+ // add it to the localStores map
+ val key = (l, bb)
+ val set = localStores(key)
+ set += idx
+ localStores(key) = set
case RETURN(_) | JUMP(_) | CJUMP(_, _, _, _) | CZJUMP(_, _, _, _) | STORE_FIELD(_, _) |
THROW(_) | LOAD_ARRAY_ITEM(_) | STORE_ARRAY_ITEM(_) | SCOPE_ENTER(_) | SCOPE_EXIT(_) | STORE_THIS(_) |
- LOAD_EXCEPTION(_) | SWITCH(_, _) | MONITOR_ENTER() | MONITOR_EXIT() => worklist += ((bb, idx))
- case CALL_METHOD(m1, _) if isSideEffecting(m1) => worklist += ((bb, idx)); debuglog("marking " + m1)
+ LOAD_EXCEPTION(_) | SWITCH(_, _) | MONITOR_ENTER() | MONITOR_EXIT() =>
+ moveToWorkList()
+
+ case CALL_METHOD(m1, _) if isSideEffecting(m1) =>
+ moveToWorkList()
+
case CALL_METHOD(m1, SuperCall(_)) =>
- worklist += ((bb, idx)) // super calls to constructor
+ moveToWorkList() // super calls to constructor
+
case DROP(_) =>
val necessary = rdef.findDefs(bb, idx, 1) exists { p =>
val (bb1, idx1) = p
@@ -140,12 +177,13 @@ abstract class DeadCodeElimination extends SubComponent {
case LOAD_EXCEPTION(_) | DUP(_) | LOAD_MODULE(_) => true
case _ =>
dropOf((bb1, idx1)) = (bb,idx) :: dropOf.getOrElse((bb1, idx1), Nil)
-// println("DROP is innessential: " + i + " because of: " + bb1(idx1) + " at " + bb1 + ":" + idx1)
+ debuglog("DROP is innessential: " + i + " because of: " + bb1(idx1) + " at " + bb1 + ":" + idx1)
false
}
}
- if (necessary) worklist += ((bb, idx))
+ moveToWorkListIf(necessary)
case _ => ()
+ moveToWorkListIf(false)
}
rd = rdef.interpret(bb, idx, rd)
}
@@ -162,17 +200,35 @@ abstract class DeadCodeElimination extends SubComponent {
def mark() {
// log("Starting with worklist: " + worklist)
while (!worklist.isEmpty) {
- val (bb, idx) = worklist.iterator.next
+ val (bb, idx) = worklist.head
worklist -= ((bb, idx))
debuglog("Marking instr: \tBB_" + bb + ": " + idx + " " + bb(idx))
val instr = bb(idx)
+ // adds the instrutions that define the stack values about to be consumed to the work list to
+ // be marked useful
+ def addDefs() = for ((bb1, idx1) <- rdef.findDefs(bb, idx, instr.consumed) if !useful(bb1)(idx1)) {
+ debuglog(s"\t${bb1(idx1)} is consumed by $instr")
+ worklist += ((bb1, idx1))
+ }
+
+ // DROP logic -- if an instruction is useful, its drops are also useful
+ // and we don't mark the DROPs as useful directly but add them to the
+ // worklist so we also mark their reaching defs as useful - see SI-7060
if (!useful(bb)(idx)) {
useful(bb) += idx
dropOf.get(bb, idx) foreach {
- for ((bb1, idx1) <- _)
- useful(bb1) += idx1
+ for ((bb1, idx1) <- _) {
+ /*
+ * SI-7060: A drop that we now mark as useful can be reached via several paths,
+ * so we should follow by marking all its reaching definition as useful too:
+ */
+ debuglog("\tAdding: " + bb1(idx1) + " to the worklist, as a useful DROP.")
+ worklist += ((bb1, idx1))
+ }
}
+
+ // per-instruction logic
instr match {
case LOAD_LOCAL(l1) =>
for ((l2, bb1, idx1) <- defs((bb, idx)) if l1 == l2; if !useful(bb1)(idx1)) {
@@ -180,6 +236,15 @@ abstract class DeadCodeElimination extends SubComponent {
worklist += ((bb1, idx1))
}
+ case STORE_LOCAL(l1) if l1.kind.isRefOrArrayType =>
+ addDefs()
+ // see SI-5313
+ // search for clobbers of this store if we aren't doing l1 = null
+ // this doesn't catch the second store in x=null;l1=x; but in practice this catches
+ // a lot of null stores very cheaply
+ if (idx == 0 || bb(idx - 1) != CONSTANT(Constant(null)))
+ findClobbers(l1, bb, idx + 1)
+
case nw @ NEW(REFERENCE(sym)) =>
assert(nw.init ne null, "null new.init at: " + bb + ": " + idx + "(" + instr + ")")
worklist += findInstruction(bb, nw.init)
@@ -199,26 +264,86 @@ abstract class DeadCodeElimination extends SubComponent {
()
case _ =>
- for ((bb1, idx1) <- rdef.findDefs(bb, idx, instr.consumed) if !useful(bb1)(idx1)) {
- debuglog("\tAdding " + bb1(idx1))
- worklist += ((bb1, idx1))
- }
+ addDefs()
}
}
}
}
+ /**
+ * Finds and marks all clobbers of the given local starting in the given
+ * basic block at the given index
+ *
+ * Storing to local variables of reference or array type may be indirectly
+ * observable because it may remove a reference to an object which may allow the object
+ * to be gc'd. See SI-5313. In this code I call the LOCAL_STORE(s) that immediately follow a
+ * LOCAL_STORE and that store to the same local "clobbers." If a LOCAL_STORE is marked
+ * useful then its clobbers must go into the set of clobbers, which will be
+ * compensated for later
+ */
+ def findClobbers(l: Local, bb: BasicBlock, idx: Int) {
+ // previously visited blocks tracked to prevent searching forever in a cycle
+ val inspected = mutable.Set[BasicBlock]()
+ // our worklist of blocks that still need to be checked
+ val blocksToBeInspected = mutable.Set[BasicBlock]()
+
+ // Tries to find the next clobber of l1 in bb1 starting at idx1.
+ // if it finds one it adds the clobber to clobbers set for later
+ // handling. If not it adds the direct successor blocks to
+ // the uninspectedBlocks to try to find clobbers there. Either way
+ // it adds the exception successor blocks for further search
+ def findClobberInBlock(idx1: Int, bb1: BasicBlock) {
+ val key = ((l, bb1))
+ val foundClobber = (localStores contains key) && {
+ def minIdx(s : mutable.BitSet) = if(s.isEmpty) -1 else s.min
+
+ // find the smallest index greater than or equal to idx1
+ val clobberIdx = minIdx(localStores(key) dropWhile (_ < idx1))
+ if (clobberIdx == -1)
+ false
+ else {
+ debuglog(s"\t${bb1(clobberIdx)} is a clobber of ${bb(idx)}")
+ clobbers += ((bb1, clobberIdx))
+ true
+ }
+ }
+
+ // always need to look into the exception successors for additional clobbers
+ // because we don't know when flow might enter an exception handler
+ blocksToBeInspected ++= (bb1.exceptionSuccessors filterNot inspected)
+ // If we didn't find a clobber here then we need to look at successor blocks.
+ // if we found a clobber then we don't need to search in the direct successors
+ if (!foundClobber) {
+ blocksToBeInspected ++= (bb1.directSuccessors filterNot inspected)
+ }
+ }
+
+ // first search starting at the current index
+ // note we don't put bb in the inspected list yet because a loop may later force
+ // us back around to search from the beginning of bb
+ findClobberInBlock(idx, bb)
+ // then loop until we've exhausted the set of uninspected blocks
+ while(!blocksToBeInspected.isEmpty) {
+ val bb1 = blocksToBeInspected.head
+ blocksToBeInspected -= bb1
+ inspected += bb1
+ findClobberInBlock(0, bb1)
+ }
+ }
+
def sweep(m: IMethod) {
val compensations = computeCompensations(m)
+ debuglog("Sweeping: " + m)
+
m foreachBlock { bb =>
-// Console.println("** Sweeping block " + bb + " **")
+ debuglog(bb + ":")
val oldInstr = bb.toList
bb.open
bb.clear
for (Pair(i, idx) <- oldInstr.zipWithIndex) {
if (useful(bb)(idx)) {
-// log(" " + i + " is useful")
+ debuglog(" * " + i + " is useful")
bb.emit(i, i.pos)
compensations.get(bb, idx) match {
case Some(is) => is foreach bb.emit
@@ -236,9 +361,15 @@ abstract class DeadCodeElimination extends SubComponent {
i match {
case NEW(REFERENCE(sym)) =>
log(s"Eliminated instantation of $sym inside $m")
+ case STORE_LOCAL(l) if clobbers contains ((bb, idx)) =>
+ // if an unused instruction was a clobber of a used store to a reference or array type
+ // then we'll replace it with the store of a null to make sure the reference is
+ // eliminated. See SI-5313
+ bb emit CONSTANT(Constant(null))
+ bb emit STORE_LOCAL(l)
case _ => ()
}
- debuglog("Skipped: bb_" + bb + ": " + idx + "( " + i + ")")
+ debuglog(" " + i + " [swept]")
}
}
@@ -247,8 +378,8 @@ abstract class DeadCodeElimination extends SubComponent {
}
}
- private def computeCompensations(m: IMethod): mutable.Map[(BasicBlock, Int), List[Instruction]] = {
- val compensations: mutable.Map[(BasicBlock, Int), List[Instruction]] = new mutable.HashMap
+ private def computeCompensations(m: IMethod): mutable.Map[InstrLoc, List[Instruction]] = {
+ val compensations: mutable.Map[InstrLoc, List[Instruction]] = new mutable.HashMap
m foreachBlock { bb =>
assert(bb.closed, "Open block in computeCompensations")
@@ -259,6 +390,7 @@ abstract class DeadCodeElimination extends SubComponent {
val defs = rdef.findDefs(bb, idx, 1, depth)
for (d <- defs) {
val (bb, idx) = d
+ debuglog("rdef: "+ bb(idx))
bb(idx) match {
case DUP(_) if idx > 0 =>
bb(idx - 1) match {
@@ -287,7 +419,7 @@ abstract class DeadCodeElimination extends SubComponent {
res
}
- private def findInstruction(bb: BasicBlock, i: Instruction): (BasicBlock, Int) = {
+ private def findInstruction(bb: BasicBlock, i: Instruction): InstrLoc = {
for (b <- linearizer.linearizeAt(method, bb)) {
val idx = b.toList indexWhere (_ eq i)
if (idx != -1)
diff --git a/src/compiler/scala/tools/nsc/doc/base/CommentFactoryBase.scala b/src/compiler/scala/tools/nsc/doc/base/CommentFactoryBase.scala
index f60d56d9bb..f509c63ba0 100755
--- a/src/compiler/scala/tools/nsc/doc/base/CommentFactoryBase.scala
+++ b/src/compiler/scala/tools/nsc/doc/base/CommentFactoryBase.scala
@@ -100,26 +100,26 @@ trait CommentFactoryBase { this: MemberLookupBase =>
}
- protected val endOfText = '\u0003'
- protected val endOfLine = '\u000A'
+ private val endOfText = '\u0003'
+ private val endOfLine = '\u000A'
/** Something that should not have happened, happened, and Scaladoc should exit. */
- protected def oops(msg: String): Nothing =
+ private def oops(msg: String): Nothing =
throw FatalError("program logic: " + msg)
/** The body of a line, dropping the (optional) start star-marker,
* one leading whitespace and all trailing whitespace. */
- protected val CleanCommentLine =
+ private val CleanCommentLine =
new Regex("""(?:\s*\*\s?)?(.*)""")
/** Dangerous HTML tags that should be replaced by something safer,
* such as wiki syntax, or that should be dropped. */
- protected val DangerousTags =
+ private val DangerousTags =
new Regex("""<(/?(div|ol|ul|li|h[1-6]|p))( [^>]*)?/?>|<!--.*-->""")
/** Maps a dangerous HTML tag to a safe wiki replacement, or an empty string
* if it cannot be salvaged. */
- protected def htmlReplacement(mtch: Regex.Match): String = mtch.group(1) match {
+ private def htmlReplacement(mtch: Regex.Match): String = mtch.group(1) match {
case "p" | "div" => "\n\n"
case "h1" => "\n= "
case "/h1" => " =\n"
@@ -135,11 +135,11 @@ trait CommentFactoryBase { this: MemberLookupBase =>
/** Javadoc tags that should be replaced by something useful, such as wiki
* syntax, or that should be dropped. */
- protected val JavadocTags =
+ private val JavadocTags =
new Regex("""\{\@(code|docRoot|inheritDoc|link|linkplain|literal|value)([^}]*)\}""")
/** Maps a javadoc tag to a useful wiki replacement, or an empty string if it cannot be salvaged. */
- protected def javadocReplacement(mtch: Regex.Match): String = mtch.group(1) match {
+ private def javadocReplacement(mtch: Regex.Match): String = mtch.group(1) match {
case "code" => "`" + mtch.group(2) + "`"
case "docRoot" => ""
case "inheritDoc" => ""
@@ -151,41 +151,41 @@ trait CommentFactoryBase { this: MemberLookupBase =>
}
/** Safe HTML tags that can be kept. */
- protected val SafeTags =
+ private val SafeTags =
new Regex("""((&\w+;)|(&#\d+;)|(</?(abbr|acronym|address|area|a|bdo|big|blockquote|br|button|b|caption|cite|code|col|colgroup|dd|del|dfn|em|fieldset|form|hr|img|input|ins|i|kbd|label|legend|link|map|object|optgroup|option|param|pre|q|samp|select|small|span|strong|sub|sup|table|tbody|td|textarea|tfoot|th|thead|tr|tt|var)( [^>]*)?/?>))""")
- protected val safeTagMarker = '\u000E'
+ private val safeTagMarker = '\u000E'
/** A Scaladoc tag not linked to a symbol and not followed by text */
- protected val SingleTag =
+ private val SingleTagRegex =
new Regex("""\s*@(\S+)\s*""")
/** A Scaladoc tag not linked to a symbol. Returns the name of the tag, and the rest of the line. */
- protected val SimpleTag =
+ private val SimpleTagRegex =
new Regex("""\s*@(\S+)\s+(.*)""")
/** A Scaladoc tag linked to a symbol. Returns the name of the tag, the name
* of the symbol, and the rest of the line. */
- protected val SymbolTag =
+ private val SymbolTagRegex =
new Regex("""\s*@(param|tparam|throws|groupdesc|groupname|groupprio)\s+(\S*)\s*(.*)""")
/** The start of a scaladoc code block */
- protected val CodeBlockStart =
+ private val CodeBlockStartRegex =
new Regex("""(.*?)((?:\{\{\{)|(?:\u000E<pre(?: [^>]*)?>\u000E))(.*)""")
/** The end of a scaladoc code block */
- protected val CodeBlockEnd =
+ private val CodeBlockEndRegex =
new Regex("""(.*?)((?:\}\}\})|(?:\u000E</pre>\u000E))(.*)""")
/** A key used for a tag map. The key is built from the name of the tag and
* from the linked symbol if the tag has one.
* Equality on tag keys is structural. */
- protected sealed abstract class TagKey {
+ private sealed abstract class TagKey {
def name: String
}
- protected final case class SimpleTagKey(name: String) extends TagKey
- protected final case class SymbolTagKey(name: String, symbol: String) extends TagKey
+ private final case class SimpleTagKey(name: String) extends TagKey
+ private final case class SymbolTagKey(name: String, symbol: String) extends TagKey
/** Parses a raw comment string into a `Comment` object.
* @param comment The expanded comment string (including start and end markers) to be parsed.
@@ -231,7 +231,7 @@ trait CommentFactoryBase { this: MemberLookupBase =>
inCodeBlock: Boolean
): Comment = remaining match {
- case CodeBlockStart(before, marker, after) :: ls if (!inCodeBlock) =>
+ case CodeBlockStartRegex(before, marker, after) :: ls if (!inCodeBlock) =>
if (!before.trim.isEmpty && !after.trim.isEmpty)
parse0(docBody, tags, lastTagKey, before :: marker :: after :: ls, false)
else if (!before.trim.isEmpty)
@@ -250,7 +250,7 @@ trait CommentFactoryBase { this: MemberLookupBase =>
parse0(docBody append endOfLine append marker, tags, lastTagKey, ls, true)
}
- case CodeBlockEnd(before, marker, after) :: ls =>
+ case CodeBlockEndRegex(before, marker, after) :: ls =>
if (!before.trim.isEmpty && !after.trim.isEmpty)
parse0(docBody, tags, lastTagKey, before :: marker :: after :: ls, true)
if (!before.trim.isEmpty)
@@ -269,17 +269,17 @@ trait CommentFactoryBase { this: MemberLookupBase =>
parse0(docBody append endOfLine append marker, tags, lastTagKey, ls, false)
}
- case SymbolTag(name, sym, body) :: ls if (!inCodeBlock) =>
+ case SymbolTagRegex(name, sym, body) :: ls if (!inCodeBlock) =>
val key = SymbolTagKey(name, sym)
val value = body :: tags.getOrElse(key, Nil)
parse0(docBody, tags + (key -> value), Some(key), ls, inCodeBlock)
- case SimpleTag(name, body) :: ls if (!inCodeBlock) =>
+ case SimpleTagRegex(name, body) :: ls if (!inCodeBlock) =>
val key = SimpleTagKey(name)
val value = body :: tags.getOrElse(key, Nil)
parse0(docBody, tags + (key -> value), Some(key), ls, inCodeBlock)
- case SingleTag(name) :: ls if (!inCodeBlock) =>
+ case SingleTagRegex(name) :: ls if (!inCodeBlock) =>
val key = SimpleTagKey(name)
val value = "" :: tags.getOrElse(key, Nil)
parse0(docBody, tags + (key -> value), Some(key), ls, inCodeBlock)
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/Index.scala b/src/compiler/scala/tools/nsc/doc/html/page/Index.scala
index 86407fb9a3..c76bdc58d9 100644
--- a/src/compiler/scala/tools/nsc/doc/html/page/Index.scala
+++ b/src/compiler/scala/tools/nsc/doc/html/page/Index.scala
@@ -48,10 +48,28 @@ class Index(universe: doc.Universe, val index: doc.Index) extends HtmlPage {
</div>
</body>
+ def letters: NodeSeq =
+ '_' +: ('a' to 'z') map {
+ char => {
+ val label = if (char == '_') '#' else char.toUpper
+
+ index.firstLetterIndex.get(char) match {
+ case Some(_) =>
+ <a target="template" href={ "index/index-" + char + ".html" }>{
+ label
+ }</a>
+ case None => <span>{ label }</span>
+ }
+ }
+ }
+
def browser =
<div id="browser" class="ui-layout-west">
<div class="ui-west-center">
- <div id="filter"></div>
+ <div id="filter">
+ <div id="textfilter"></div>
+ <div id="letters">{ letters }</div>
+ </div>
<div class="pack" id="tpl">{
def packageElem(pack: model.Package): NodeSeq = {
<xml:group>
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.css b/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.css
index 2a8f9b570a..55fb370a41 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.css
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.css
@@ -206,7 +206,7 @@ h1 {
border-right:0;
}
-#letters > a {
+#letters > a, #letters > span {
/* font-family: monospace;*/
color: #858484;
font-weight: bold;
@@ -214,6 +214,10 @@ h1 {
text-shadow: #ffffff 0 1px 0;
padding-right: 2px;
}
+
+#letters > span {
+ color: #bbb;
+}
#tpl {
display: block;
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.js b/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.js
index 1323a06c01..70073b272a 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.js
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.js
@@ -335,8 +335,7 @@ function keyboardScrolldownLeftPane() {
/* Configures the text filter */
function configureTextFilter() {
scheduler.add("init", function() {
- $("#filter").append("<div id='textfilter'><span class='pre'/><span class='input'><input id='index-input' type='text' accesskey='/'/></span><span class='post'/></div>");
- printAlphabet();
+ $("#textfilter").append("<span class='pre'/><span class='input'><input id='index-input' type='text' accesskey='/'/></span><span class='post'/>");
var input = $("#textfilter input");
resizeFilterBlock();
input.bind('keyup', function(event) {
@@ -532,19 +531,3 @@ function kindFilterSync() {
function resizeFilterBlock() {
$("#tpl").css("top", $("#filter").outerHeight(true));
}
-
-function printAlphabet() {
- var html = '<a target="template" href="index/index-_.html">#</a>';
- var c;
- for (c = 'a'; c <= 'z'; c = String.fromCharCode(c.charCodeAt(0) + 1)) {
- html += [
- '<a target="template" href="index/index-',
- c,
- '.html">',
- c.toUpperCase(),
- '</a>'
- ].join('');
- }
- $("#filter").append('<div id="letters">' + html + '</div>');
-}
-
diff --git a/src/compiler/scala/tools/nsc/doc/model/IndexModelFactory.scala b/src/compiler/scala/tools/nsc/doc/model/IndexModelFactory.scala
index 10e2f23142..4ee6daf73e 100755
--- a/src/compiler/scala/tools/nsc/doc/model/IndexModelFactory.scala
+++ b/src/compiler/scala/tools/nsc/doc/model/IndexModelFactory.scala
@@ -20,7 +20,7 @@ object IndexModelFactory {
/* Owner template ordering */
implicit def orderingSet = math.Ordering.String.on { x: MemberEntity => x.name.toLowerCase }
/* symbol name ordering */
- implicit def orderingMap = math.Ordering.String.on { x: String => x.toLowerCase }
+ implicit def orderingMap = math.Ordering.String
def addMember(d: MemberEntity) = {
val firstLetter = {
diff --git a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala b/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala
index c6cfc317ea..0a469c9227 100644
--- a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala
+++ b/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala
@@ -314,12 +314,15 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
inform("Creating doc template for " + sym)
override def toRoot: List[DocTemplateImpl] = this :: inTpl.toRoot
- def inSource =
- if (sym.sourceFile != null && ! sym.isSynthetic)
- Some((sym.sourceFile, sym.pos.line))
+
+ protected def inSourceFromSymbol(symbol: Symbol) =
+ if (symbol.sourceFile != null && ! symbol.isSynthetic)
+ Some((symbol.sourceFile, symbol.pos.line))
else
None
+ def inSource = inSourceFromSymbol(sym)
+
def sourceUrl = {
def fixPath(s: String) = s.replaceAll("\\" + java.io.File.separator, "/")
val assumedSourceRoot = fixPath(settings.sourcepath.value) stripSuffix "/"
@@ -508,11 +511,11 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
abstract class PackageImpl(sym: Symbol, inTpl: PackageImpl) extends DocTemplateImpl(sym, inTpl) with Package {
override def inTemplate = inTpl
override def toRoot: List[PackageImpl] = this :: inTpl.toRoot
- override lazy val linearization = {
- val symbol = sym.info.members.find {
+ override lazy val (inSource, linearization) = {
+ val representive = sym.info.members.find {
s => s.isPackageObject
} getOrElse sym
- linearizationFromSymbol(symbol)
+ (inSourceFromSymbol(representive), linearizationFromSymbol(representive))
}
def packages = members collect { case p: PackageImpl if !(droppedPackages contains p) => p }
}
diff --git a/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala b/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala
index b4af8f00d6..73738ebd21 100644
--- a/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala
+++ b/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala
@@ -139,7 +139,12 @@ trait CompilerControl { self: Global =>
/** Sets sync var `response` to the fully attributed & typechecked tree contained in `source`.
* @pre `source` needs to be loaded.
+ *
+ * @note Deprecated because of race conditions in the typechecker when the background compiler
+ * is interrupted while typing the same `source`.
+ * @see SI-6578
*/
+ @deprecated("Use `askLoadedTyped` instead to avoid race conditions in the typechecker", "2.10.1")
def askType(source: SourceFile, forceReload: Boolean, response: Response[Tree]) =
postWorkItem(new AskTypeItem(source, forceReload, response))
@@ -157,6 +162,20 @@ trait CompilerControl { self: Global =>
def askLinkPos(sym: Symbol, source: SourceFile, response: Response[Position]) =
postWorkItem(new AskLinkPosItem(sym, source, response))
+ /** Sets sync var `response` to doc comment information for a given symbol.
+ *
+ * @param sym The symbol whose doc comment should be retrieved (might come from a classfile)
+ * @param site The place where sym is observed.
+ * @param source The source file that's supposed to contain the definition
+ * @param response A response that will be set to the following:
+ * If `source` contains a definition of a given symbol that has a doc comment,
+ * the (expanded, raw, position) triplet for a comment, otherwise ("", "", NoPosition).
+ * Note: This operation does not automatically load `source`. If `source`
+ * is unloaded, it stays that way.
+ */
+ def askDocComment(sym: Symbol, site: Symbol, source: SourceFile, response: Response[(String, String, Position)]) =
+ postWorkItem(new AskDocCommentItem(sym, site, source, response))
+
/** Sets sync var `response` to list of members that are visible
* as members of the tree enclosing `pos`, possibly reachable by an implicit.
* @pre source is loaded
@@ -240,15 +259,12 @@ trait CompilerControl { self: Global =>
}
/** Returns parse tree for source `source`. No symbols are entered. Syntax errors are reported.
- * Can be called asynchronously from presentation compiler.
+ *
+ * This method is thread-safe and as such can safely run outside of the presentation
+ * compiler thread.
*/
- def parseTree(source: SourceFile): Tree = ask { () =>
- getUnit(source) match {
- case Some(unit) if unit.status >= JustParsed =>
- unit.body
- case _ =>
- new UnitParser(new CompilationUnit(source)).parse()
- }
+ def parseTree(source: SourceFile): Tree = {
+ new UnitParser(new CompilationUnit(source)).parse()
}
/** Asks for a computation to be done quickly on the presentation compiler thread */
@@ -374,6 +390,14 @@ trait CompilerControl { self: Global =>
response raise new MissingResponse
}
+ case class AskDocCommentItem(val sym: Symbol, val site: Symbol, val source: SourceFile, response: Response[(String, String, Position)]) extends WorkItem {
+ def apply() = self.getDocComment(sym, site, source, response)
+ override def toString = "doc comment "+sym+" in "+source
+
+ def raiseMissing() =
+ response raise new MissingResponse
+ }
+
case class AskLoadedTypedItem(val source: SourceFile, response: Response[Tree]) extends WorkItem {
def apply() = self.waitLoadedTyped(source, response, this.onCompilerThread)
override def toString = "wait loaded & typed "+source
diff --git a/src/compiler/scala/tools/nsc/interactive/Doc.scala b/src/compiler/scala/tools/nsc/interactive/Doc.scala
deleted file mode 100755
index ad28a28105..0000000000
--- a/src/compiler/scala/tools/nsc/interactive/Doc.scala
+++ /dev/null
@@ -1,59 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2007-2012 LAMP/EPFL
- * @author Eugene Vigdorchik
- */
-
-package scala.tools.nsc
-package interactive
-
-import doc.base._
-import comment._
-import scala.xml.NodeSeq
-
-sealed trait DocResult
-final case class UrlResult(url: String) extends DocResult
-final case class HtmlResult(comment: Comment) extends DocResult
-
-abstract class Doc(val settings: doc.Settings) extends MemberLookupBase with CommentFactoryBase {
-
- override val global: interactive.Global
- import global._
-
- def chooseLink(links: List[LinkTo]): LinkTo
-
- override def internalLink(sym: Symbol, site: Symbol): Option[LinkTo] =
- ask { () =>
- if (sym.isClass || sym.isModule)
- Some(LinkToTpl(sym))
- else
- if ((site.isClass || site.isModule) && site.info.members.toList.contains(sym))
- Some(LinkToMember(sym, site))
- else
- None
- }
-
- override def toString(link: LinkTo) = ask { () =>
- link match {
- case LinkToMember(mbr: Symbol, site: Symbol) =>
- mbr.signatureString + " in " + site.toString
- case LinkToTpl(sym: Symbol) => sym.toString
- case _ => link.toString
- }
- }
-
- def retrieve(sym: Symbol, site: Symbol): Option[DocResult] = {
- val sig = ask { () => externalSignature(sym) }
- findExternalLink(sym, sig) map { link => UrlResult(link.url) } orElse {
- val resp = new Response[Tree]
- // Ensure docComment tree is type-checked.
- val pos = ask { () => docCommentPos(sym) }
- askTypeAt(pos, resp)
- resp.get.left.toOption flatMap { _ =>
- ask { () =>
- val comment = parseAtSymbol(expandedDocComment(sym), rawDocComment(sym), pos, Some(site))
- Some(HtmlResult(comment))
- }
- }
- }
- }
-}
diff --git a/src/compiler/scala/tools/nsc/interactive/Global.scala b/src/compiler/scala/tools/nsc/interactive/Global.scala
index 4ab7b98b3d..105b0e4833 100644
--- a/src/compiler/scala/tools/nsc/interactive/Global.scala
+++ b/src/compiler/scala/tools/nsc/interactive/Global.scala
@@ -225,7 +225,10 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
/** Called from parser, which signals hereby that a method definition has been parsed.
*/
override def signalParseProgress(pos: Position) {
- checkForMoreWork(pos)
+ // We only want to be interruptible when running on the PC thread.
+ if(onCompilerThread) {
+ checkForMoreWork(pos)
+ }
}
/** Called from typechecker, which signals hereby that a node has been completely typechecked.
@@ -447,7 +450,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
*/
@elidable(elidable.WARNING)
override def assertCorrectThread() {
- assert(initializing || (Thread.currentThread() eq compileRunner),
+ assert(initializing || onCompilerThread,
"Race condition detected: You are running a presentation compiler method outside the PC thread.[phase: %s]".format(globalPhase) +
" Please file a ticket with the current stack trace at https://www.assembla.com/spaces/scala-ide/support/tickets")
}
@@ -462,6 +465,9 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
compileRunner
}
+ private def ensureUpToDate(unit: RichCompilationUnit) =
+ if (!unit.isUpToDate && unit.status != JustParsed) reset(unit) // reparse previously typechecked units.
+
/** Compile all loaded source files in the order given by `allSources`.
*/
private[interactive] final def backgroundCompile() {
@@ -474,7 +480,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
// ensure all loaded units are parsed
for (s <- allSources; unit <- getUnit(s)) {
// checkForMoreWork(NoPosition) // disabled, as any work done here would be in an inconsistent state
- if (!unit.isUpToDate && unit.status != JustParsed) reset(unit) // reparse previously typechecked units.
+ ensureUpToDate(unit)
parseAndEnter(unit)
serviceParsedEntered()
}
@@ -727,7 +733,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
try {
debugLog("starting targeted type check")
typeCheck(unit)
- println("tree not found at "+pos)
+// println("tree not found at "+pos)
EmptyTree
} catch {
case ex: TyperResult => new Locator(pos) locateIn ex.tree
@@ -758,64 +764,69 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
respond(response)(typedTree(source, forceReload))
}
- /** Implements CompilerControl.askLinkPos */
- private[interactive] def getLinkPos(sym: Symbol, source: SourceFile, response: Response[Position]) {
+ private def withTempUnit[T](source: SourceFile)(f: RichCompilationUnit => T): T =
+ getUnit(source) match {
+ case None =>
+ reloadSources(List(source))
+ try f(getUnit(source).get)
+ finally afterRunRemoveUnitOf(source)
+ case Some(unit) =>
+ f(unit)
+ }
- /** Find position of symbol `sym` in unit `unit`. Pre: `unit is loaded. */
- def findLinkPos(unit: RichCompilationUnit): Position = {
- val originalTypeParams = sym.owner.typeParams
- parseAndEnter(unit)
- val pre = adaptToNewRunMap(ThisType(sym.owner))
- val rawsym = pre.typeSymbol.info.decl(sym.name)
- val newsym = rawsym filter { alt =>
- sym.isType || {
- try {
- val tp1 = pre.memberType(alt) onTypeError NoType
- val tp2 = adaptToNewRunMap(sym.tpe) substSym (originalTypeParams, sym.owner.typeParams)
- matchesType(tp1, tp2, false) || {
- debugLog(s"getLinkPos matchesType($tp1, $tp2) failed")
- val tp3 = adaptToNewRunMap(sym.tpe) substSym (originalTypeParams, alt.owner.typeParams)
- matchesType(tp1, tp3, false) || {
- debugLog(s"getLinkPos fallback matchesType($tp1, $tp3) failed")
- false
- }
- }
- }
- catch {
- case ex: ControlThrowable => throw ex
- case ex: Throwable =>
- println("error in hyperlinking: " + ex)
- ex.printStackTrace()
+ /** Find a 'mirror' of symbol `sym` in unit `unit`. Pre: `unit is loaded. */
+ private def findMirrorSymbol(sym: Symbol, unit: RichCompilationUnit): Symbol = {
+ val originalTypeParams = sym.owner.typeParams
+ ensureUpToDate(unit)
+ parseAndEnter(unit)
+ val pre = adaptToNewRunMap(ThisType(sym.owner))
+ val rawsym = pre.typeSymbol.info.decl(sym.name)
+ val newsym = rawsym filter { alt =>
+ sym.isType || {
+ try {
+ val tp1 = pre.memberType(alt) onTypeError NoType
+ val tp2 = adaptToNewRunMap(sym.tpe) substSym (originalTypeParams, sym.owner.typeParams)
+ matchesType(tp1, tp2, false) || {
+ debugLog(s"findMirrorSymbol matchesType($tp1, $tp2) failed")
+ val tp3 = adaptToNewRunMap(sym.tpe) substSym (originalTypeParams, alt.owner.typeParams)
+ matchesType(tp1, tp3, false) || {
+ debugLog(s"findMirrorSymbol fallback matchesType($tp1, $tp3) failed")
false
+ }
}
}
- }
- if (newsym == NoSymbol) {
- if (rawsym.exists && !rawsym.isOverloaded) rawsym.pos
- else {
- debugLog("link not found " + sym + " " + source + " " + pre)
- NoPosition
+ catch {
+ case ex: ControlThrowable => throw ex
+ case ex: Throwable =>
+ debugLog("error in findMirrorSymbol: " + ex)
+ ex.printStackTrace()
+ false
}
- } else if (newsym.isOverloaded) {
- settings.uniqid.value = true
- debugLog("link ambiguous " + sym + " " + source + " " + pre + " " + newsym.alternatives)
- NoPosition
- } else {
- debugLog("link found for " + newsym + ": " + newsym.pos)
- newsym.pos
}
}
+ if (newsym == NoSymbol) {
+ if (rawsym.exists && !rawsym.isOverloaded) rawsym
+ else {
+ debugLog("mirror not found " + sym + " " + unit.source + " " + pre)
+ NoSymbol
+ }
+ } else if (newsym.isOverloaded) {
+ settings.uniqid.value = true
+ debugLog("mirror ambiguous " + sym + " " + unit.source + " " + pre + " " + newsym.alternatives)
+ NoSymbol
+ } else {
+ debugLog("mirror found for " + newsym + ": " + newsym.pos)
+ newsym
+ }
+ }
+ /** Implements CompilerControl.askLinkPos */
+ private[interactive] def getLinkPos(sym: Symbol, source: SourceFile, response: Response[Position]) {
informIDE("getLinkPos "+sym+" "+source)
respond(response) {
if (sym.owner.isClass) {
- getUnit(source) match {
- case None =>
- reloadSources(List(source))
- try findLinkPos(getUnit(source).get)
- finally afterRunRemoveUnitOf(source)
- case Some(unit) =>
- findLinkPos(unit)
+ withTempUnit(source){ u =>
+ findMirrorSymbol(sym, u).pos
}
} else {
debugLog("link not in class "+sym+" "+source+" "+sym.owner)
@@ -824,6 +835,50 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
}
}
+ /** Implements CompilerControl.askDocComment */
+ private[interactive] def getDocComment(sym: Symbol, site: Symbol, source: SourceFile, response: Response[(String, String, Position)]) {
+ informIDE("getDocComment "+sym+" "+source)
+ respond(response) {
+ withTempUnit(source){ u =>
+ val mirror = findMirrorSymbol(sym, u)
+ if (mirror eq NoSymbol)
+ ("", "", NoPosition)
+ else {
+ forceDocComment(mirror, u)
+ (expandedDocComment(mirror), rawDocComment(mirror), docCommentPos(mirror))
+ }
+ }
+ }
+ }
+
+ private def forceDocComment(sym: Symbol, unit: RichCompilationUnit) {
+ // Either typer has been run and we don't find DocDef,
+ // or we force the targeted typecheck here.
+ // In both cases doc comment maps should be filled for the subject symbol.
+ val docTree =
+ unit.body find {
+ case DocDef(_, defn) if defn.symbol eq sym => true
+ case _ => false
+ }
+
+ for (t <- docTree) {
+ debugLog("Found DocDef tree for "+sym)
+ // Cannot get a typed tree at position since DocDef range is transparent.
+ val prevPos = unit.targetPos
+ val prevInterruptsEnabled = interruptsEnabled
+ try {
+ unit.targetPos = t.pos
+ interruptsEnabled = true
+ typeCheck(unit)
+ } catch {
+ case _: TyperResult => // ignore since we are after the side effect.
+ } finally {
+ unit.targetPos = prevPos
+ interruptsEnabled = prevInterruptsEnabled
+ }
+ }
+ }
+
def stabilizedType(tree: Tree): Type = tree match {
case Ident(_) if tree.symbol.isStable =>
singleType(NoPrefix, tree.symbol)
diff --git a/src/compiler/scala/tools/nsc/interactive/Picklers.scala b/src/compiler/scala/tools/nsc/interactive/Picklers.scala
index ffad19fbaa..84cb03c140 100644
--- a/src/compiler/scala/tools/nsc/interactive/Picklers.scala
+++ b/src/compiler/scala/tools/nsc/interactive/Picklers.scala
@@ -165,6 +165,11 @@ trait Picklers { self: Global =>
.wrapped { case sym ~ source => new AskLinkPosItem(sym, source, new Response) } { item => item.sym ~ item.source }
.asClass (classOf[AskLinkPosItem])
+ implicit def askDocCommentItem: CondPickler[AskDocCommentItem] =
+ (pkl[Symbol] ~ pkl[Symbol] ~ pkl[SourceFile])
+ .wrapped { case sym ~ site ~ source => new AskDocCommentItem(sym, site, source, new Response) } { item => item.sym ~ item.site ~ item.source }
+ .asClass (classOf[AskDocCommentItem])
+
implicit def askLoadedTypedItem: CondPickler[AskLoadedTypedItem] =
pkl[SourceFile]
.wrapped { source => new AskLoadedTypedItem(source, new Response) } { _.source }
@@ -182,5 +187,5 @@ trait Picklers { self: Global =>
implicit def action: Pickler[() => Unit] =
reloadItem | askTypeAtItem | askTypeItem | askTypeCompletionItem | askScopeCompletionItem |
- askToDoFirstItem | askLinkPosItem | askLoadedTypedItem | askParsedEnteredItem | emptyAction
+ askToDoFirstItem | askLinkPosItem | askDocCommentItem | askLoadedTypedItem | askParsedEnteredItem | emptyAction
}
diff --git a/src/compiler/scala/tools/nsc/interactive/REPL.scala b/src/compiler/scala/tools/nsc/interactive/REPL.scala
index dacfa679dd..7b89d5b0aa 100644
--- a/src/compiler/scala/tools/nsc/interactive/REPL.scala
+++ b/src/compiler/scala/tools/nsc/interactive/REPL.scala
@@ -110,11 +110,6 @@ object REPL {
show(completeResult)
}
- def doTypedTree(file: String) {
- comp.askType(toSourceFile(file), true, typedResult)
- show(typedResult)
- }
-
def doStructure(file: String) {
comp.askParsedEntered(toSourceFile(file), false, structureResult)
show(structureResult)
@@ -175,10 +170,8 @@ object REPL {
comp.askReload(List(toSourceFile(file)), reloadResult)
Thread.sleep(millis.toInt)
println("ask type now")
- comp.askType(toSourceFile(file), false, typedResult)
+ comp.askLoadedTyped(toSourceFile(file), typedResult)
typedResult.get
- case List("typed", file) =>
- doTypedTree(file)
case List("typeat", file, off1, off2) =>
doTypeAt(makePos(file, off1, off2))
case List("typeat", file, off1) =>
diff --git a/src/compiler/scala/tools/nsc/interactive/RangePositions.scala b/src/compiler/scala/tools/nsc/interactive/RangePositions.scala
index 64117bd8ee..b95f1fa7ca 100644
--- a/src/compiler/scala/tools/nsc/interactive/RangePositions.scala
+++ b/src/compiler/scala/tools/nsc/interactive/RangePositions.scala
@@ -144,7 +144,7 @@ self: scala.tools.nsc.Global =>
*/
private def setChildrenPos(pos: Position, trees: List[Tree]): Unit = try {
for (tree <- trees) {
- if (!tree.isEmpty && tree.canHaveAttrs && tree.pos == NoPosition) {
+ if (!tree.isEmpty && tree.pos == NoPosition) {
val children = tree.children
if (children.isEmpty) {
tree setPos pos.focus
@@ -165,7 +165,7 @@ self: scala.tools.nsc.Global =>
*/
override def atPos[T <: Tree](pos: Position)(tree: T): T = {
if (pos.isOpaqueRange) {
- if (!tree.isEmpty && tree.canHaveAttrs && tree.pos == NoPosition) {
+ if (!tree.isEmpty && tree.pos == NoPosition) {
tree.setPos(pos)
val children = tree.children
if (children.nonEmpty) {
@@ -203,7 +203,7 @@ self: scala.tools.nsc.Global =>
def validate(tree: Tree, encltree: Tree): Unit = {
- if (!tree.isEmpty && tree.canHaveAttrs) {
+ if (!tree.isEmpty) {
if (settings.Yposdebug.value && (settings.verbose.value || settings.Yrangepos.value))
println("[%10s] %s".format("validate", treeStatus(tree, encltree)))
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/InteractiveTest.scala b/src/compiler/scala/tools/nsc/interactive/tests/InteractiveTest.scala
index 62d274bc70..597b9012ce 100644
--- a/src/compiler/scala/tools/nsc/interactive/tests/InteractiveTest.scala
+++ b/src/compiler/scala/tools/nsc/interactive/tests/InteractiveTest.scala
@@ -55,7 +55,6 @@ abstract class InteractiveTest
with AskShutdown
with AskReload
with AskLoadedTyped
- with AskType
with PresentationCompilerInstance
with CoreTestDefs
with InteractiveTestSettings { self =>
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/AskCommand.scala b/src/compiler/scala/tools/nsc/interactive/tests/core/AskCommand.scala
index eb902e3e6c..8d446cbbf8 100644
--- a/src/compiler/scala/tools/nsc/interactive/tests/core/AskCommand.scala
+++ b/src/compiler/scala/tools/nsc/interactive/tests/core/AskCommand.scala
@@ -97,23 +97,6 @@ trait AskTypeAt extends AskCommand {
}
}
-
-trait AskType extends AskCommand {
- import compiler.Tree
-
- protected def askType(source: SourceFile, forceReload: Boolean)(implicit reporter: Reporter): Response[Tree] = {
- ask {
- compiler.askType(source, forceReload, _)
- }
- }
-
- protected def askType(sources: Seq[SourceFile], forceReload: Boolean)(implicit reporter: Reporter): Seq[Response[Tree]] = {
- for(source <- sources) yield
- askType(source, forceReload)
- }
-}
-
-
trait AskLoadedTyped extends AskCommand {
import compiler.Tree
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerInstance.scala b/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerInstance.scala
index ea2333a65b..7bb2b7b7c8 100644
--- a/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerInstance.scala
+++ b/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerInstance.scala
@@ -8,7 +8,8 @@ import scala.reflect.internal.util.Position
/** Trait encapsulating the creation of a presentation compiler's instance.*/
private[tests] trait PresentationCompilerInstance extends TestSettings {
protected val settings = new Settings
- protected def docSettings: doc.Settings = new doc.Settings(_ => ())
+ protected val docSettings = new doc.Settings(_ => ())
+
protected val compilerReporter: CompilerReporter = new InteractiveReporter {
override def compiler = PresentationCompilerInstance.this.compiler
}
diff --git a/src/compiler/scala/tools/nsc/interpreter/IMain.scala b/src/compiler/scala/tools/nsc/interpreter/IMain.scala
index a55f0af116..bed8570bd0 100644
--- a/src/compiler/scala/tools/nsc/interpreter/IMain.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/IMain.scala
@@ -262,7 +262,9 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
protected def newCompiler(settings: Settings, reporter: Reporter): ReplGlobal = {
settings.outputDirs setSingleOutput virtualDirectory
settings.exposeEmptyPackage.value = true
- new Global(settings, reporter) with ReplGlobal
+ new Global(settings, reporter) with ReplGlobal {
+ override def toString: String = "<global>"
+ }
}
/** Parent classloader. Overridable. */
diff --git a/src/compiler/scala/tools/nsc/interpreter/JLineReader.scala b/src/compiler/scala/tools/nsc/interpreter/JLineReader.scala
index 10f972452f..5fd5b41625 100644
--- a/src/compiler/scala/tools/nsc/interpreter/JLineReader.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/JLineReader.scala
@@ -37,6 +37,9 @@ class JLineReader(_completion: => Completion) extends InteractiveReader {
}
class JLineConsoleReader extends ConsoleReader with ConsoleReaderHelper {
+ if ((history: History) ne NoHistory)
+ this setHistory history
+
// working around protected/trait/java insufficiencies.
def goBack(num: Int): Unit = back(num)
def readOneKey(prompt: String) = {
@@ -51,8 +54,6 @@ class JLineReader(_completion: => Completion) extends InteractiveReader {
// A hook for running code after the repl is done initializing.
lazy val postInit: Unit = {
this setBellEnabled false
- if ((history: History) ne NoHistory)
- this setHistory history
if (completion ne NoCompletion) {
val argCompletor: ArgumentCompleter =
diff --git a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
index 050f7a8f95..43a8402fc7 100644
--- a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
+++ b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
@@ -551,7 +551,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
if (parentToken == AT && in.token == DEFAULT) {
val annot =
atPos(pos) {
- New(Select(scalaDot(nme.runtime), tpnme.AnnotationDefaultATTR), Nil)
+ New(Select(scalaDot(nme.runtime), tpnme.AnnotationDefaultATTR), ListOfNil)
}
mods1 = mods1 withAnnotations List(annot)
skipTo(SEMI)
diff --git a/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala b/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala
index 9d01e73063..dbb9b7a003 100644
--- a/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala
+++ b/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala
@@ -15,6 +15,7 @@ import symtab.Flags
import mutable.ListBuffer
import scala.annotation.elidable
import scala.language.postfixOps
+import scala.tools.nsc.settings.ScalaVersion
trait ParallelMatching extends ast.TreeDSL
with MatchSupport
@@ -821,7 +822,7 @@ trait ParallelMatching extends ast.TreeDSL
// match that's unimportant; so we add an instance check only if there
// is a binding.
def bindingWarning() = {
- if (isBound && settings.Xmigration28.value) {
+ if (isBound && settings.Xmigration.value < ScalaVersion.twoDotEight) {
cunit.warning(scrutTree.pos,
"A bound pattern such as 'x @ Pattern' now matches fewer cases than the same pattern with no binding.")
}
diff --git a/src/compiler/scala/tools/nsc/settings/FscSettings.scala b/src/compiler/scala/tools/nsc/settings/FscSettings.scala
index 06ebc20d3e..5c852ae07c 100644
--- a/src/compiler/scala/tools/nsc/settings/FscSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/FscSettings.scala
@@ -38,14 +38,25 @@ class FscSettings(error: String => Unit) extends Settings(error) {
private def holdsPath = Set[Settings#Setting](
d, dependencyfile, pluginsDir, Ygenjavap
)
+
+ override def processArguments(arguments: List[String], processAll: Boolean): (Boolean, List[String]) = {
+ val (r, args) = super.processArguments(arguments, processAll)
+ // we need to ensure the files specified with relative locations are absolutized based on the currentDir
+ (r, args map {a => absolutizePath(a)})
+ }
+
+ /**
+ * Take an individual path and if it's not absolute turns it into an absolute path based on currentDir.
+ * If it's already absolute then it's left alone.
+ */
+ private[this] def absolutizePath(p: String) = (Path(currentDir.value) resolve Path(p)).normalize.path
- /** All user set settings rewritten with absolute paths. */
- def absolutize(root: Path) {
- def rewrite(p: String) = (root resolve Path(p)).normalize.path
+ /** All user set settings rewritten with absolute paths based on currentDir */
+ def absolutize() {
userSetSettings foreach {
- case p: OutputSetting => p.outputDirs setSingleOutput AbstractFile.getDirectory(rewrite(p.value))
- case p: PathSetting => p.value = ClassPath.map(p.value, rewrite)
- case p: StringSetting => if (holdsPath(p)) p.value = rewrite(p.value)
+ case p: OutputSetting => p.outputDirs setSingleOutput AbstractFile.getDirectory(absolutizePath(p.value))
+ case p: PathSetting => p.value = ClassPath.map(p.value, absolutizePath)
+ case p: StringSetting => if (holdsPath(p)) p.value = absolutizePath(p.value)
case _ => ()
}
}
diff --git a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala
index f1f289ed4d..e4f99474e1 100644
--- a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala
@@ -221,6 +221,7 @@ class MutableSettings(val errorFn: String => Unit)
def OutputSetting(outputDirs: OutputDirs, default: String) = add(new OutputSetting(outputDirs, default))
def PhasesSetting(name: String, descr: String, default: String = "") = add(new PhasesSetting(name, descr, default))
def StringSetting(name: String, arg: String, descr: String, default: String) = add(new StringSetting(name, arg, descr, default))
+ def ScalaVersionSetting(name: String, arg: String, descr: String, default: ScalaVersion) = add(new ScalaVersionSetting(name, arg, descr, default))
def PathSetting(name: String, descr: String, default: String): PathSetting = {
val prepend = StringSetting(name + "/p", "", "", "").internalOnly()
val append = StringSetting(name + "/a", "", "", "").internalOnly()
@@ -486,6 +487,35 @@ class MutableSettings(val errorFn: String => Unit)
withHelpSyntax(name + " <" + arg + ">")
}
+ /** A setting represented by a Scala version, (`default` unless set) */
+ class ScalaVersionSetting private[nsc](
+ name: String,
+ val arg: String,
+ descr: String,
+ default: ScalaVersion)
+ extends Setting(name, descr) {
+ import ScalaVersion._
+
+ type T = ScalaVersion
+ protected var v: T = NoScalaVersion
+
+ override def tryToSet(args: List[String]) = {
+ value = default
+ Some(args)
+ }
+
+ override def tryToSetColon(args: List[String]) = args match {
+ case Nil => value = default; Some(Nil)
+ case x :: xs => value = ScalaVersion(x, errorFn) ; Some(xs)
+ }
+
+ override def tryToSetFromPropertyValue(s: String) = tryToSet(List(s))
+
+ def unparse: List[String] = if (value == NoScalaVersion) Nil else List(s"${name}:${value.unparse}")
+
+ withHelpSyntax(s"${name}:<${arg}>")
+ }
+
class PathSetting private[nsc](
name: String,
descr: String,
diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
index b820d10ddc..3df6334ec1 100644
--- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
@@ -85,8 +85,7 @@ trait ScalaSettings extends AbsScalaSettings
val logFreeTerms = BooleanSetting ("-Xlog-free-terms", "Print a message when reification creates a free term.")
val logFreeTypes = BooleanSetting ("-Xlog-free-types", "Print a message when reification resorts to generating a free type.")
val maxClassfileName = IntSetting ("-Xmax-classfile-name", "Maximum filename length for generated classes", 255, Some((72, 255)), _ => None)
- val Xmigration28 = BooleanSetting ("-Xmigration", "Warn about constructs whose behavior may have changed between 2.7 and 2.8.").
- withDeprecationMessage("This setting is no longer useful and will be removed. Please remove it from your build.")
+ val Xmigration = ScalaVersionSetting("-Xmigration", "version", "Warn about constructs whose behavior may have changed since version.", AnyScalaVersion)
val nouescape = BooleanSetting ("-Xno-uescape", "Disable handling of \\u unicode escapes.")
val Xnojline = BooleanSetting ("-Xnojline", "Do not use JLine for editing.")
val Xverify = BooleanSetting ("-Xverify", "Verify generic signatures in generated bytecode (asm backend only.)")
@@ -169,13 +168,13 @@ trait ScalaSettings extends AbsScalaSettings
val Yrangepos = BooleanSetting ("-Yrangepos", "Use range positions for syntax trees.")
val Ybuilderdebug = ChoiceSetting ("-Ybuilder-debug", "manager", "Compile using the specified build manager.", List("none", "refined", "simple"), "none")
val Yreifycopypaste = BooleanSetting ("-Yreify-copypaste", "Dump the reified trees in copypasteable representation.")
+ val Ymacronoexpand = BooleanSetting ("-Ymacro-no-expand", "Don't expand macros. Might be useful for scaladoc and presentation compiler, but will crash anything which uses macros and gets past typer.")
val Yreplsync = BooleanSetting ("-Yrepl-sync", "Do not use asynchronous code for repl startup")
val Ynotnull = BooleanSetting ("-Ynotnull", "Enable (experimental and incomplete) scala.NotNull.")
val YmethodInfer = BooleanSetting ("-Yinfer-argument-types", "Infer types for arguments of overriden methods.")
val etaExpandKeepsStar = BooleanSetting ("-Yeta-expand-keeps-star", "Eta-expand varargs methods to T* rather than Seq[T]. This is a temporary option to ease transition.")
val Yinvalidate = StringSetting ("-Yinvalidate", "classpath-entry", "Invalidate classpath entry before run", "")
val noSelfCheck = BooleanSetting ("-Yno-self-type-checks", "Suppress check for self-type conformance among inherited members.")
- val companionsInPkgObjs = BooleanSetting("-Ycompanions-in-pkg-objs", "Allow companion objects and case classes in package objects. See issue SI-5954.")
val YvirtClasses = false // too embryonic to even expose as a -Y //BooleanSetting ("-Yvirtual-classes", "Support virtual classes")
val exposeEmptyPackage = BooleanSetting("-Yexpose-empty-package", "Internal only: expose the empty package.").internalOnly()
diff --git a/src/compiler/scala/tools/nsc/settings/ScalaVersion.scala b/src/compiler/scala/tools/nsc/settings/ScalaVersion.scala
new file mode 100644
index 0000000000..d6a0149411
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/settings/ScalaVersion.scala
@@ -0,0 +1,194 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author James Iry
+ */
+// $Id$
+
+package scala.tools.nsc.settings
+
+/**
+ * Represents a single Scala version in a manner that
+ * supports easy comparison and sorting.
+ */
+abstract class ScalaVersion extends Ordered[ScalaVersion] {
+ def unparse: String
+}
+
+/**
+ * A scala version that sorts higher than all actual versions
+ */
+case object NoScalaVersion extends ScalaVersion {
+ def unparse = "none"
+
+ def compare(that: ScalaVersion): Int = that match {
+ case NoScalaVersion => 0
+ case _ => 1
+ }
+}
+
+/**
+ * A specific Scala version, not one of the magic min/max versions. An SpecificScalaVersion
+ * may or may not be a released version - i.e. this same class is used to represent
+ * final, release candidate, milestone, and development builds. The build argument is used
+ * to segregate builds
+ */
+case class SpecificScalaVersion(major: Int, minor: Int, rev: Int, build: ScalaBuild) extends ScalaVersion {
+ def unparse = s"${major}.${minor}.${rev}.${build.unparse}"
+
+ def compare(that: ScalaVersion): Int = that match {
+ case SpecificScalaVersion(thatMajor, thatMinor, thatRev, thatBuild) =>
+ // this could be done more cleanly by importing scala.math.Ordering.Implicits, but we have to do these
+ // comparisons a lot so I'm using brute force direct style code
+ if (major < thatMajor) -1
+ else if (major > thatMajor) 1
+ else if (minor < thatMinor) -1
+ else if (minor > thatMinor) 1
+ else if (rev < thatRev) -1
+ else if (rev > thatRev) 1
+ else build compare thatBuild
+ case AnyScalaVersion => 1
+ case NoScalaVersion => -1
+ }
+}
+
+/**
+ * A Scala version that sorts lower than all actual versions
+ */
+case object AnyScalaVersion extends ScalaVersion {
+ def unparse = "any"
+
+ def compare(that: ScalaVersion): Int = that match {
+ case AnyScalaVersion => 0
+ case _ => -1
+ }
+}
+
+/**
+ * Factory methods for producing ScalaVersions
+ */
+object ScalaVersion {
+ private val dot = "\\."
+ private val dash = "\\-"
+ private def not(s:String) = s"[^${s}]"
+ private val R = s"((${not(dot)}*)(${dot}(${not(dot)}*)(${dot}(${not(dash)}*)(${dash}(.*))?)?)?)".r
+
+ def apply(versionString : String, errorHandler: String => Unit): ScalaVersion = {
+ def errorAndValue() = {
+ errorHandler(
+ s"There was a problem parsing ${versionString}. " +
+ "Versions should be in the form major[.minor[.revision]] " +
+ "where each part is a positive number, as in 2.10.1. " +
+ "The minor and revision parts are optional."
+ )
+ AnyScalaVersion
+ }
+
+ def toInt(s: String) = s match {
+ case null | "" => 0
+ case _ => s.toInt
+ }
+
+ def isInt(s: String) = util.Try(toInt(s)).isSuccess
+
+ def toBuild(s: String) = s match {
+ case null | "FINAL" => Final
+ case s if (s.toUpperCase.startsWith("RC") && isInt(s.substring(2))) => RC(toInt(s.substring(2)))
+ case s if (s.toUpperCase.startsWith("M") && isInt(s.substring(1))) => Milestone(toInt(s.substring(1)))
+ case _ => Development(s)
+ }
+
+ try versionString match {
+ case "none" => NoScalaVersion
+ case "any" => AnyScalaVersion
+ case R(_, majorS, _, minorS, _, revS, _, buildS) =>
+ SpecificScalaVersion(toInt(majorS), toInt(minorS), toInt(revS), toBuild(buildS))
+ case _ =>
+ errorAndValue()
+ } catch {
+ case e: NumberFormatException => errorAndValue()
+ }
+ }
+
+ def apply(versionString: String): ScalaVersion =
+ apply(versionString, msg => throw new NumberFormatException(msg))
+
+ /**
+ * The version of the compiler running now
+ */
+ val current = apply(util.Properties.versionNumberString)
+
+ /**
+ * The 2.8.0 version.
+ */
+ val twoDotEight = SpecificScalaVersion(2, 8, 0, Final)
+}
+
+/**
+ * Represents the data after the dash in major.minor.rev-build
+ */
+abstract class ScalaBuild extends Ordered[ScalaBuild] {
+ /**
+ * Return a version of this build information that can be parsed back into the
+ * same ScalaBuild
+ */
+ def unparse: String
+}
+/**
+ * A development, test, nightly, snapshot or other "unofficial" build
+ */
+case class Development(id: String) extends ScalaBuild {
+ def unparse = s"-${id}"
+
+ def compare(that: ScalaBuild) = that match {
+ // sorting two development builds based on id is reasonably valid for two versions created with the same schema
+ // otherwise it's not correct, but since it's impossible to put a total ordering on development build versions
+ // this is a pragmatic compromise
+ case Development(thatId) => id compare thatId
+ // assume a development build is newer than anything else, that's not really true, but good luck
+ // mapping development build versions to other build types
+ case _ => 1
+ }
+}
+/**
+ * A final final
+ */
+case object Final extends ScalaBuild {
+ def unparse = ""
+
+ def compare(that: ScalaBuild) = that match {
+ case Final => 0
+ // a final is newer than anything other than a development build or another final
+ case Development(_) => -1
+ case _ => 1
+ }
+}
+
+/**
+ * A candidate for final release
+ */
+case class RC(n: Int) extends ScalaBuild {
+ def unparse = s"-RC${n}"
+
+ def compare(that: ScalaBuild) = that match {
+ // compare two rcs based on their RC numbers
+ case RC(thatN) => n - thatN
+ // an rc is older than anything other than a milestone or another rc
+ case Milestone(_) => 1
+ case _ => -1
+ }
+}
+
+/**
+ * An intermediate release
+ */
+case class Milestone(n: Int) extends ScalaBuild {
+ def unparse = s"-M${n}"
+
+ def compare(that: ScalaBuild) = that match {
+ // compare two milestones based on their milestone numbers
+ case Milestone(thatN) => n - thatN
+ // a milestone is older than anything other than another milestone
+ case _ => -1
+
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
index 19b32dd5fb..a517a33279 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
@@ -1042,8 +1042,9 @@ abstract class ClassfileParser {
def parseExceptions(len: Int) {
val nClasses = in.nextChar
for (n <- 0 until nClasses) {
+ // FIXME: this performs an equivalent of getExceptionTypes instead of getGenericExceptionTypes (SI-7065)
val cls = pool.getClassSymbol(in.nextChar.toInt)
- sym.addAnnotation(definitions.ThrowsClass, Literal(Constant(cls.tpe)))
+ sym.addThrowsAnnotation(cls)
}
}
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
index 25b7813646..e8b0cd2696 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
@@ -71,8 +71,8 @@ abstract class Pickler extends SubComponent {
if (!t.isDef && t.hasSymbol && t.symbol.isTermMacro) {
unit.error(t.pos, t.symbol.typeParams.length match {
case 0 => "macro has not been expanded"
- case 1 => "type parameter not specified"
- case _ => "type parameters not specified"
+ case 1 => "this type parameter must be specified"
+ case _ => "these type parameters must be specified"
})
return
}
diff --git a/src/compiler/scala/tools/nsc/transform/CleanUp.scala b/src/compiler/scala/tools/nsc/transform/CleanUp.scala
index 44510ab0c2..7a0b034fd0 100644
--- a/src/compiler/scala/tools/nsc/transform/CleanUp.scala
+++ b/src/compiler/scala/tools/nsc/transform/CleanUp.scala
@@ -450,19 +450,31 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
* is a value type (int et al.) in which case it must cast to the boxed version
* because invoke only returns object and erasure made sure the result is
* expected to be an AnyRef. */
- val t: Tree = ad.symbol.tpe match {
- case MethodType(mparams, resType) =>
- assert(params.length == mparams.length, mparams)
-
- typedPos {
- val sym = currentOwner.newValue(mkTerm("qual"), ad.pos) setInfo qual0.tpe
- qual = REF(sym)
+ val t: Tree = {
+ val (mparams, resType) = ad.symbol.tpe match {
+ case MethodType(mparams, resType) =>
+ assert(params.length == mparams.length, ((params, mparams)))
+ (mparams, resType)
+ case tpe @ OverloadedType(pre, alts) =>
+ unit.warning(ad.pos, s"Overloaded type reached the backend! This is a bug in scalac.\n Symbol: ${ad.symbol}\n Overloads: $tpe\n Arguments: " + ad.args.map(_.tpe))
+ alts filter (_.paramss.flatten.size == params.length) map (_.tpe) match {
+ case mt @ MethodType(mparams, resType) :: Nil =>
+ unit.warning(NoPosition, "Only one overload has the right arity, proceeding with overload " + mt)
+ (mparams, resType)
+ case _ =>
+ unit.error(ad.pos, "Cannot resolve overload.")
+ (Nil, NoType)
+ }
+ }
+ typedPos {
+ val sym = currentOwner.newValue(mkTerm("qual"), ad.pos) setInfo qual0.tpe
+ qual = REF(sym)
- BLOCK(
- VAL(sym) === qual0,
- callAsReflective(mparams map (_.tpe), resType)
- )
- }
+ BLOCK(
+ VAL(sym) === qual0,
+ callAsReflective(mparams map (_.tpe), resType)
+ )
+ }
}
/* For testing purposes, the dynamic application's condition
diff --git a/src/compiler/scala/tools/nsc/transform/Constructors.scala b/src/compiler/scala/tools/nsc/transform/Constructors.scala
index ec0797acb5..4891ef2fd1 100644
--- a/src/compiler/scala/tools/nsc/transform/Constructors.scala
+++ b/src/compiler/scala/tools/nsc/transform/Constructors.scala
@@ -511,6 +511,7 @@ abstract class Constructors extends Transform with ast.TreeDSL {
sym = closureClass,
constrMods = Modifiers(0),
vparamss = List(List(outerFieldDef)),
+ argss = ListOfNil,
body = List(applyMethodDef),
superPos = impl.pos)
}
diff --git a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
index 1003d417f6..78c120c1ad 100644
--- a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
+++ b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
@@ -10,6 +10,7 @@ import symtab._
import Flags.{ CASE => _, _ }
import scala.collection.mutable.ListBuffer
import matching.{ Patterns, ParallelMatching }
+import scala.tools.nsc.settings.ScalaVersion
/** This class ...
*
@@ -553,7 +554,7 @@ abstract class ExplicitOuter extends InfoTransform
}
case _ =>
- if (settings.Xmigration28.value) tree match {
+ if (settings.Xmigration.value < ScalaVersion.twoDotEight) tree match {
case TypeApply(fn @ Select(qual, _), args) if fn.symbol == Object_isInstanceOf || fn.symbol == Any_isInstanceOf =>
if (isArraySeqTest(qual.tpe, args.head.tpe))
unit.warning(tree.pos, "An Array will no longer match as Seq[_].")
diff --git a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala
index 39e16c3f58..bc54054028 100644
--- a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala
+++ b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala
@@ -75,36 +75,58 @@ abstract class ExtensionMethods extends Transform with TypingTransformers {
val candidates = extensionNames(imeth) map (companionInfo.decl(_)) filter (_.exists)
val matching = candidates filter (alt => normalize(alt.tpe, imeth.owner) matches imeth.tpe)
assert(matching.nonEmpty,
- s"no extension method found for $imeth:${imeth.tpe} among ${candidates.map(c => c.name+":"+c.tpe).toList} / ${extensionNames(imeth).toList}")
+ sm"""|no extension method found for:
+ |
+ | $imeth:${imeth.tpe}
+ |
+ | Candidates:
+ |
+ | ${candidates.map(c => c.name+":"+c.tpe).mkString("\n")}
+ |
+ | Candidates (signatures normalized):
+ |
+ | ${candidates.map(c => c.name+":"+normalize(c.tpe, imeth.owner)).mkString("\n")}
+ |
+ | Eligible Names: ${extensionNames(imeth).mkString(",")}"""")
matching.head
}
+ /** Recognize a MethodType which represents an extension method.
+ *
+ * It may have a curried parameter list with the `$this` alone in the first
+ * parameter list, in which case that parameter list is dropped. Or, since
+ * the curried lists disappear during uncurry, it may have a single parameter
+ * list with `$this` as the first parameter, in which case that parameter is
+ * removed from the list.
+ */
+ object ExtensionMethodType {
+ def unapply(tp: Type) = tp match {
+ case MethodType(thiz :: rest, restpe) if thiz.name == nme.SELF =>
+ Some((thiz, if (rest.isEmpty) restpe else MethodType(rest, restpe) ))
+ case _ =>
+ None
+ }
+ }
+
/** This method removes the `$this` argument from the parameter list a method.
*
* A method may be a `PolyType`, in which case we tear out the `$this` and the class
- * type params from its nested `MethodType`.
- * It may be a `MethodType`, either with a curried parameter list in which the first argument
- * is a `$this` - we just return the rest of the list.
- * This means that the corresponding symbol was generated during `extmethods`.
- *
- * It may also be a `MethodType` in which the `$this` does not appear in a curried parameter list.
- * The curried lists disappear during `uncurry`, and the methods may be duplicated afterwards,
- * for instance, during `specialize`.
- * In this case, the first argument is `$this` and we just get rid of it.
+ * type params from its nested `MethodType`. Or it may be a MethodType, as
+ * described at the ExtensionMethodType extractor.
*/
private def normalize(stpe: Type, clazz: Symbol): Type = stpe match {
case PolyType(tparams, restpe) =>
- GenPolyType(tparams dropRight clazz.typeParams.length, normalize(restpe.substSym(tparams takeRight clazz.typeParams.length, clazz.typeParams), clazz))
- case MethodType(List(thiz), restpe) if thiz.name == nme.SELF =>
- restpe
- case MethodType(tparams, restpe) =>
- MethodType(tparams.drop(1), restpe)
+ // method type parameters, class type parameters
+ val (mtparams, ctparams) = tparams splitAt (tparams.length - clazz.typeParams.length)
+ GenPolyType(mtparams,
+ normalize(restpe.substSym(ctparams, clazz.typeParams), clazz))
+ case ExtensionMethodType(thiz, etpe) =>
+ etpe.substituteTypes(thiz :: Nil, clazz.thisType :: Nil)
case _ =>
stpe
}
class Extender(unit: CompilationUnit) extends TypingTransformer(unit) {
-
private val extensionDefs = mutable.Map[Symbol, mutable.ListBuffer[Tree]]()
def checkNonCyclic(pos: Position, seen: Set[Symbol], clazz: Symbol): Unit =
@@ -115,27 +137,54 @@ abstract class ExtensionMethods extends Transform with TypingTransformers {
if (unboxed.isDerivedValueClass) checkNonCyclic(pos, seen + clazz, unboxed)
}
+ /** We will need to clone the info of the original method (which obtains clones
+ * of the method type parameters), clone the type parameters of the value class,
+ * and create a new polymethod with the union of all those type parameters, with
+ * their infos adjusted to be consistent with their new home. Example:
+ *
+ * class Foo[+A <: AnyRef](val xs: List[A]) extends AnyVal {
+ * def baz[B >: A](x: B): List[B] = x :: xs
+ * // baz has to be transformed into this extension method, where
+ * // A is cloned from class Foo and B is cloned from method baz:
+ * // def extension$baz[B >: A <: Any, A >: Nothing <: AnyRef]($this: Foo[A])(x: B): List[B]
+ * }
+ *
+ * TODO: factor out the logic for consolidating type parameters from a class
+ * and a method for re-use elsewhere, because nobody will get this right without
+ * some higher level facilities.
+ */
def extensionMethInfo(extensionMeth: Symbol, origInfo: Type, clazz: Symbol): Type = {
- // No variance for method type parameters
- var newTypeParams = cloneSymbolsAtOwner(clazz.typeParams, extensionMeth) map (_ resetFlag COVARIANT | CONTRAVARIANT)
- val thisParamType = appliedType(clazz.typeConstructor, newTypeParams map (_.tpeHK))
+ val GenPolyType(tparamsFromMethod, methodResult) = origInfo cloneInfo extensionMeth
+ // Start with the class type parameters - clones will be method type parameters
+ // so must drop their variance.
+ val tparamsFromClass = cloneSymbolsAtOwner(clazz.typeParams, extensionMeth) map (_ resetFlag COVARIANT | CONTRAVARIANT)
+
+ val thisParamType = appliedType(clazz, tparamsFromClass map (_.tpeHK): _*)
val thisParam = extensionMeth.newValueParameter(nme.SELF, extensionMeth.pos) setInfo thisParamType
- def transform(clonedType: Type): Type = clonedType match {
- case MethodType(params, restpe) =>
- // I assume it was a bug that this was dropping params... [Martin]: No, it wasn't; it's curried.
- MethodType(List(thisParam), clonedType)
- case NullaryMethodType(restpe) =>
- MethodType(List(thisParam), restpe)
- }
- val GenPolyType(tparams, restpe) = origInfo cloneInfo extensionMeth
- GenPolyType(tparams ::: newTypeParams, transform(restpe) substSym (clazz.typeParams, newTypeParams))
- }
+ val resultType = MethodType(List(thisParam), dropNullaryMethod(methodResult))
+ val selfParamType = singleType(currentOwner.companionModule.thisType, thisParam)
- private def allParams(tpe: Type): List[Symbol] = tpe match {
- case MethodType(params, res) => params ::: allParams(res)
- case _ => List()
- }
+ def fixres(tp: Type) = tp substThisAndSym (clazz, selfParamType, clazz.typeParams, tparamsFromClass)
+ def fixtparam(tp: Type) = tp substSym (clazz.typeParams, tparamsFromClass)
+
+ // We can't substitute symbols on the entire polytype because we
+ // need to modify the bounds of the cloned type parameters, but we
+ // don't want to substitute for the cloned type parameters themselves.
+ val tparams = tparamsFromMethod ::: tparamsFromClass
+ GenPolyType(tparams map (_ modifyInfo fixtparam), fixres(resultType))
+ // For reference, calling fix on the GenPolyType plays out like this:
+ // error: scala.reflect.internal.Types$TypeError: type arguments [B#7344,A#6966]
+ // do not conform to method extension$baz#16148's type parameter bounds
+ //
+ // And the difference is visible here. See how B is bounded from below by A#16149
+ // in both cases, but in the failing case, the other type parameter has turned into
+ // a different A. (What is that A? It is a clone of the original A created in
+ // SubstMap during the call to substSym, but I am not clear on all the particulars.)
+ //
+ // bad: [B#16154 >: A#16149, A#16155 <: AnyRef#2189]($this#16156: Foo#6965[A#16155])(x#16157: B#16154)List#2457[B#16154]
+ // good: [B#16151 >: A#16149, A#16149 <: AnyRef#2189]($this#16150: Foo#6965[A#16149])(x#16153: B#16151)List#2457[B#16151]
+ }
override def transform(tree: Tree): Tree = {
tree match {
case Template(_, _, _) =>
@@ -144,42 +193,62 @@ abstract class ExtensionMethods extends Transform with TypingTransformers {
wrap over other value classes anyway.
checkNonCyclic(currentOwner.pos, Set(), currentOwner) */
extensionDefs(currentOwner.companionModule) = new mutable.ListBuffer[Tree]
+ currentOwner.primaryConstructor.makeNotPrivate(NoSymbol)
super.transform(tree)
} else if (currentOwner.isStaticOwner) {
super.transform(tree)
} else tree
case DefDef(_, _, tparams, vparamss, _, rhs) if tree.symbol.isMethodWithExtension =>
- val companion = currentOwner.companionModule
- val origMeth = tree.symbol
- val extensionName = extensionNames(origMeth).head
- val extensionMeth = companion.moduleClass.newMethod(extensionName, origMeth.pos, origMeth.flags & ~OVERRIDE & ~PROTECTED | FINAL)
- .setAnnotations(origMeth.annotations)
- companion.info.decls.enter(extensionMeth)
- val newInfo = extensionMethInfo(extensionMeth, origMeth.info, currentOwner)
+ val origMeth = tree.symbol
+ val origThis = currentOwner
+ val origTpeParams = tparams.map(_.symbol) ::: origThis.typeParams // method type params ++ class type params
+ val origParams = vparamss.flatten map (_.symbol)
+ val companion = origThis.companionModule
+
+ def makeExtensionMethodSymbol = {
+ val extensionName = extensionNames(origMeth).head
+ val extensionMeth = (
+ companion.moduleClass.newMethod(extensionName, origMeth.pos, origMeth.flags & ~OVERRIDE & ~PROTECTED | FINAL)
+ setAnnotations origMeth.annotations
+ )
+ companion.info.decls.enter(extensionMeth)
+ }
+
+ val extensionMeth = makeExtensionMethodSymbol
+ val newInfo = extensionMethInfo(extensionMeth, origMeth.info, origThis)
extensionMeth setInfo newInfo
- log("Value class %s spawns extension method.\n Old: %s\n New: %s".format(
- currentOwner,
- origMeth.defString,
- extensionMeth.defString)) // extensionMeth.defStringSeenAs(origInfo
-
- def thisParamRef = gen.mkAttributedIdent(extensionMeth.info.params.head setPos extensionMeth.pos)
- val GenPolyType(extensionTpeParams, extensionMono) = extensionMeth.info
- val origTpeParams = (tparams map (_.symbol)) ::: currentOwner.typeParams
- val extensionBody = rhs
+
+ log(s"Value class $origThis spawns extension method.\n Old: ${origMeth.defString}\n New: ${extensionMeth.defString}")
+
+ val GenPolyType(extensionTpeParams, MethodType(thiz :: Nil, extensionMono)) = newInfo
+ val extensionParams = allParameters(extensionMono)
+ val extensionThis = gen.mkAttributedStableRef(thiz setPos extensionMeth.pos)
+
+ val extensionBody = (
+ rhs
.substituteSymbols(origTpeParams, extensionTpeParams)
- .substituteSymbols(vparamss.flatten map (_.symbol), allParams(extensionMono).tail)
- .substituteThis(currentOwner, thisParamRef)
- .changeOwner((origMeth, extensionMeth))
- extensionDefs(companion) += atPos(tree.pos) { DefDef(extensionMeth, extensionBody) }
- val extensionCallPrefix = Apply(
- gen.mkTypeApply(gen.mkAttributedRef(companion), extensionMeth, origTpeParams map (_.tpeHK)),
- List(This(currentOwner)))
- val extensionCall = atOwner(origMeth) {
- localTyper.typedPos(rhs.pos) {
- gen.mkForwarder(extensionCallPrefix, mmap(vparamss)(_.symbol))
- }
- }
- deriveDefDef(tree)(_ => extensionCall)
+ .substituteSymbols(origParams, extensionParams)
+ .substituteThis(origThis, extensionThis)
+ .changeOwner(origMeth -> extensionMeth)
+ )
+
+ // Record the extension method ( FIXME: because... ? )
+ extensionDefs(companion) += atPos(tree.pos)(DefDef(extensionMeth, extensionBody))
+
+ // These three lines are assembling Foo.bar$extension[T1, T2, ...]($this)
+ // which leaves the actual argument application for extensionCall.
+ val sel = Select(gen.mkAttributedRef(companion), extensionMeth)
+ val targs = origTpeParams map (_.tpeHK)
+ val callPrefix = gen.mkMethodCall(sel, targs, This(origThis) :: Nil)
+
+ // Apply all the argument lists.
+ deriveDefDef(tree)(_ =>
+ atOwner(origMeth)(
+ localTyper.typedPos(rhs.pos)(
+ gen.mkForwarder(callPrefix, mmap(vparamss)(_.symbol))
+ )
+ )
+ )
case _ =>
super.transform(tree)
}
diff --git a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
index 448079abed..845843e9d6 100644
--- a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
+++ b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
@@ -451,20 +451,45 @@ abstract class LambdaLift extends InfoTransform {
}
case arg => arg
}
- /** Wrap expr argument in new *Ref(..) constructor, but make
- * sure that Try expressions stay at toplevel.
+
+ /** Wrap expr argument in new *Ref(..) constructor. But try/catch
+ * is a problem because a throw will clear the stack and post catch
+ * we would expect the partially-constructed object to be on the stack
+ * for the call to init. So we recursively
+ * search for "leaf" result expressions where we know its safe
+ * to put the new *Ref(..) constructor or, if all else fails, transform
+ * an expr to { val temp=expr; new *Ref(temp) }.
+ * The reason we narrowly look for try/catch in captured var definitions
+ * is because other try/catch expression have already been lifted
+ * see SI-6863
*/
- def refConstr(expr: Tree): Tree = expr match {
+ def refConstr(expr: Tree): Tree = typer.typedPos(expr.pos) {expr match {
+ // very simple expressions can be wrapped in a new *Ref(expr) because they can't have
+ // a try/catch in final expression position.
+ case Ident(_) | Apply(_, _) | Literal(_) | New(_) | Select(_, _) | Throw(_) | Assign(_, _) | ValDef(_, _, _, _) | Return(_) | EmptyTree =>
+ New(sym.tpe, expr)
case Try(block, catches, finalizer) =>
Try(refConstr(block), catches map refConstrCase, finalizer)
+ case Block(stats, expr) =>
+ Block(stats, refConstr(expr))
+ case If(cond, trueBranch, falseBranch) =>
+ If(cond, refConstr(trueBranch), refConstr(falseBranch))
+ case Match(selector, cases) =>
+ Match(selector, cases map refConstrCase)
+ // if we can't figure out what else to do, turn expr into {val temp1 = expr; new *Ref(temp1)} to avoid
+ // any possibility of try/catch in the *Ref constructor. This should be a safe tranformation as a default
+ // though it potentially wastes a variable slot. In particular this case handles LabelDefs.
case _ =>
- New(sym.tpe, expr)
- }
+ debuglog("assigning expr to temp: " + (expr.pos))
+ val tempSym = currentOwner.newValue(unit.freshTermName("temp"), expr.pos) setInfo expr.tpe
+ val tempDef = ValDef(tempSym, expr) setPos expr.pos
+ val tempRef = Ident(tempSym) setPos expr.pos
+ Block(tempDef, New(sym.tpe, tempRef))
+ }}
def refConstrCase(cdef: CaseDef): CaseDef =
CaseDef(cdef.pat, cdef.guard, refConstr(cdef.body))
- treeCopy.ValDef(tree, mods, name, tpt1, typer.typedPos(rhs.pos) {
- refConstr(constructorArg)
- })
+
+ treeCopy.ValDef(tree, mods, name, tpt1, refConstr(constructorArg))
} else tree
case Return(Block(stats, value)) =>
Block(stats, treeCopy.Return(tree, value)) setType tree.tpe setPos tree.pos
diff --git a/src/compiler/scala/tools/nsc/transform/Mixin.scala b/src/compiler/scala/tools/nsc/transform/Mixin.scala
index 3cd943aa74..c9c68d080d 100644
--- a/src/compiler/scala/tools/nsc/transform/Mixin.scala
+++ b/src/compiler/scala/tools/nsc/transform/Mixin.scala
@@ -867,7 +867,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
rhs match {
case Block(List(assign), returnTree) =>
val Assign(moduleVarRef, _) = assign
- val cond = Apply(Select(moduleVarRef, nme.eq), List(NULL))
+ val cond = Apply(Select(moduleVarRef, Object_eq), List(NULL))
mkFastPathBody(clazz, moduleSym, cond, List(assign), List(NULL), returnTree, attrThis, args)
case _ =>
abort("Invalid getter " + rhs + " for module in class " + clazz)
diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
index 64051b56ec..232148676c 100644
--- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
+++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
@@ -178,6 +178,14 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
case class Overload(sym: Symbol, env: TypeEnv) {
override def toString = "specialized overload " + sym + " in " + env
+ def matchesSym(other: Symbol) = sym.tpe =:= other.tpe
+ def matchesEnv(env1: TypeEnv) = TypeEnv.includes(env, env1)
+ }
+ private def newOverload(method: Symbol, specializedMethod: Symbol, env: TypeEnv) = {
+ assert(!specializedMethod.isOverloaded, specializedMethod.defString)
+ val om = Overload(specializedMethod, env)
+ overloads(method) ::= om
+ om
}
/** Just to mark uncheckable */
@@ -289,10 +297,6 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
}
- /** Return the specialized overload of sym in the given env, if any. */
- def overload(sym: Symbol, env: TypeEnv) =
- overloads(sym).find(ov => TypeEnv.includes(ov.env, env))
-
/** Return the specialized name of 'sym' in the given environment. It
* guarantees the same result regardless of the map order by sorting
* type variables alphabetically.
@@ -628,7 +632,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
info(om) = if (original.isDeferred) Forward(original) else Implementation(original)
typeEnv(om) = env ++ typeEnv(m) // add the environment for any method tparams
- overloads(specMember) ::= Overload(om, typeEnv(om))
+ newOverload(specMember, om, typeEnv(om))
enterMember(om)
}
@@ -835,7 +839,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
debuglog("%s expands to %s in %s".format(sym, specMember.name.decode, pp(env)))
info(specMember) = NormalizedMember(sym)
- overloads(sym) ::= Overload(specMember, env)
+ newOverload(sym, specMember, env)
owner.info.decls.enter(specMember)
specMember
}
@@ -877,9 +881,8 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
if (wasSpec.nonEmpty)
debuglog("specialized overload for %s in %s".format(specMember, pp(typeEnv(specMember))))
- overloads(sym) ::= Overload(specMember, spec)
+ newOverload(sym, specMember, spec)
info(specMember) = SpecialOverload(sym, typeEnv(specMember))
-
specMember
}
@@ -994,7 +997,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
SpecialOverride(impl)
}
)
- overloads(overriding) ::= Overload(om, env)
+ newOverload(overriding, om, env)
ifDebug(afterSpecialize(assert(
overridden.owner.info.decl(om.name) != NoSymbol,
"Could not find " + om.name + " in " + overridden.owner.info.decls))
@@ -1476,54 +1479,41 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
transformTypeApply
- case Select(qual, name) =>
- def transformSelect = {
- qual match {
- case _: Super if illegalSpecializedInheritance(currentClass) =>
- val pos = tree.pos
- debuglog(pos.source.file.name+":"+pos.line+": not specializing call to super inside illegal specialized inheritance class.")
- debuglog(pos.lineContent)
- tree
- case _ =>
+ case Select(Super(_, _), _) if illegalSpecializedInheritance(currentClass) =>
+ val pos = tree.pos
+ debuglog(pos.source.file.name+":"+pos.line+": not specializing call to super inside illegal specialized inheritance class.\n" + pos.lineContent)
+ tree
+ case Select(qual, name) if name != nme.CONSTRUCTOR && specializedTypeVars(symbol.info).nonEmpty =>
debuglog("specializing Select %s [tree.tpe: %s]".format(symbol.defString, tree.tpe))
-
- //log("!!! select " + tree + " -> " + symbol.info + " specTypeVars: " + specializedTypeVars(symbol.info))
- if (specializedTypeVars(symbol.info).nonEmpty && name != nme.CONSTRUCTOR) {
- // log("!!! unifying " + (symbol, symbol.tpe) + " and " + (tree, tree.tpe))
- val env = unify(symbol.tpe, tree.tpe, emptyEnv, false)
- // log("!!! found env: " + env + "; overloads: " + overloads(symbol))
- if (!env.isEmpty) {
- // debuglog("checking for rerouting: " + tree + " with sym.tpe: " + symbol.tpe + " tree.tpe: " + tree.tpe + " env: " + env)
- val specMember = overload(symbol, env)
- if (specMember.isDefined) {
- localTyper.typedOperator(atPos(tree.pos)(Select(transform(qual), specMember.get.sym.name)))
- }
- else {
- val qual1 = transform(qual)
+ val env = unify(symbol.tpe, tree.tpe, emptyEnv, false)
+ if (env.isEmpty) super.transform(tree)
+ else {
+ val qual1 = transform(qual)
+ def reselect(member: Symbol) = {
+ val newSelect = atPos(tree.pos)(Select(qual1, member))
+ if (member.isMethod) localTyper typedOperator newSelect
+ else localTyper typed newSelect
+ }
+ overloads(symbol) find (_ matchesEnv env) match {
+ case Some(Overload(member, _)) => reselect(member)
+ case _ =>
val specMember = qual1.tpe.member(specializedName(symbol, env)).suchThat(_.tpe matches subst(env, symbol.tpe))
- if (specMember ne NoSymbol) {
- val tree1 = atPos(tree.pos)(Select(qual1, specMember))
- if (specMember.isMethod)
- localTyper.typedOperator(tree1)
- else
- localTyper.typed(tree1)
- } else
+ if (specMember ne NoSymbol)
+ reselect(specMember)
+ else
treeCopy.Select(tree, qual1, name)
- }
- } else
- super.transform(tree)
- } else overloads(symbol).find(_.sym.info =:= symbol.info) match {
- case Some(specMember) =>
- val qual1 = transform(qual)
- debuglog("** routing " + tree + " to " + specMember.sym.fullName + " tree: " + Select(qual1, specMember.sym))
- localTyper.typedOperator(atPos(tree.pos)(Select(qual1, specMember.sym)))
- case None =>
- super.transform(tree)
- }
+ }
}
+ case Select(qual, _) =>
+ overloads(symbol) find (_ matchesSym symbol) match {
+ case Some(Overload(member, _)) =>
+ val newTree = Select(transform(qual), member)
+ debuglog(s"** routing $tree to ${member.fullName} tree: $newTree")
+ localTyper.typedOperator(atPos(tree.pos)(newTree))
+ case None =>
+ super.transform(tree)
}
- transformSelect
case PackageDef(pid, stats) =>
tree.symbol.info // make sure specializations have been performed
diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
index 9908bd689e..e9f403aea0 100644
--- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala
+++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
@@ -231,7 +231,17 @@ abstract class UnCurry extends InfoTransform
* If `settings.XoldPatmat.value`, also synthesized AbstractPartialFunction subclasses (see synthPartialFunction).
*
*/
- def transformFunction(fun: Function): Tree =
+ def transformFunction(fun: Function): Tree = {
+ fun.tpe match {
+ // can happen when analyzer plugins assign refined types to functions, e.g.
+ // (() => Int) { def apply(): Int @typeConstraint }
+ case RefinedType(List(funTp), decls) =>
+ debuglog(s"eliminate refinement from function type ${fun.tpe}")
+ fun.tpe = funTp
+ case _ =>
+ ()
+ }
+
deEta(fun) match {
// nullary or parameterless
case fun1 if fun1 ne fun => fun1
@@ -239,10 +249,7 @@ abstract class UnCurry extends InfoTransform
// only get here when running under -Xoldpatmat
synthPartialFunction(fun)
case _ =>
- val parents = (
- if (isFunctionType(fun.tpe)) addSerializable(abstractFunctionForFunctionType(fun.tpe))
- else addSerializable(ObjectClass.tpe, fun.tpe)
- )
+ val parents = addSerializable(abstractFunctionForFunctionType(fun.tpe))
val anonClass = fun.symbol.owner newAnonymousFunctionClass(fun.pos, inConstructorFlag) addAnnotation serialVersionUIDAnnotation
anonClass setInfo ClassInfoType(parents, newScope, anonClass)
@@ -270,11 +277,12 @@ abstract class UnCurry extends InfoTransform
localTyper.typedPos(fun.pos) {
Block(
- List(ClassDef(anonClass, NoMods, ListOfNil, List(applyMethodDef), fun.pos)),
+ List(ClassDef(anonClass, NoMods, ListOfNil, ListOfNil, List(applyMethodDef), fun.pos)),
Typed(New(anonClass.tpe), TypeTree(fun.tpe)))
}
}
+ }
/** Transform a function node (x => body) of type PartialFunction[T, R] where
* body = expr match { case P_i if G_i => E_i }_i=1..n
@@ -395,7 +403,7 @@ abstract class UnCurry extends InfoTransform
localTyper.typedPos(fun.pos) {
Block(
- List(ClassDef(anonClass, NoMods, ListOfNil, List(applyOrElseMethodDef, isDefinedAtMethodDef), fun.pos)),
+ List(ClassDef(anonClass, NoMods, ListOfNil, ListOfNil, List(applyOrElseMethodDef, isDefinedAtMethodDef), fun.pos)),
Typed(New(anonClass.tpe), TypeTree(fun.tpe)))
}
}
@@ -603,8 +611,6 @@ abstract class UnCurry extends InfoTransform
}
case ValDef(_, _, _, rhs) =>
if (sym eq NoSymbol) throw new IllegalStateException("Encountered Valdef without symbol: "+ tree + " in "+ unit)
- // a local variable that is mutable and free somewhere later should be lifted
- // as lambda lifting (coming later) will wrap 'rhs' in an Ref object.
if (!sym.owner.isSourceMethod)
withNeedLift(true) { super.transform(tree) }
else
@@ -629,7 +635,7 @@ abstract class UnCurry extends InfoTransform
}
}
- case Assign(Select(_, _), _) =>
+ case Assign(_: RefTree, _) =>
withNeedLift(true) { super.transform(tree) }
case Assign(lhs, _) if lhs.symbol.owner != currentMethod || lhs.symbol.hasFlag(LAZY | ACCESSOR) =>
@@ -748,15 +754,22 @@ abstract class UnCurry extends InfoTransform
}
case dd @ DefDef(_, _, _, vparamss0, _, rhs0) =>
- val vparamss1 = vparamss0 match {
- case _ :: Nil => vparamss0
- case _ => vparamss0.flatten :: Nil
- }
+ val (newParamss, newRhs): (List[List[ValDef]], Tree) =
+ if (dependentParamTypeErasure isDependent dd)
+ dependentParamTypeErasure erase dd
+ else {
+ val vparamss1 = vparamss0 match {
+ case _ :: Nil => vparamss0
+ case _ => vparamss0.flatten :: Nil
+ }
+ (vparamss1, rhs0)
+ }
+
val flatdd = copyDefDef(dd)(
- vparamss = vparamss1,
+ vparamss = newParamss,
rhs = nonLocalReturnKeys get dd.symbol match {
- case Some(k) => atPos(rhs0.pos)(nonLocalReturnTry(rhs0, k, dd.symbol))
- case None => rhs0
+ case Some(k) => atPos(newRhs.pos)(nonLocalReturnTry(newRhs, k, dd.symbol))
+ case None => newRhs
}
)
addJavaVarargsForwarders(dd, flatdd)
@@ -782,6 +795,104 @@ abstract class UnCurry extends InfoTransform
}
}
+ /**
+ * When we concatenate parameter lists, formal parameter types that were dependent
+ * on prior parameter values will no longer be correctly scoped.
+ *
+ * For example:
+ *
+ * {{{
+ * def foo(a: A)(b: a.B): a.type = {b; b}
+ * // after uncurry
+ * def foo(a: A, b: a/* NOT IN SCOPE! */.B): a.B = {b; b}
+ * }}}
+ *
+ * This violates the principle that each compiler phase should produce trees that
+ * can be retyped (see [[scala.tools.nsc.typechecker.TreeCheckers]]), and causes
+ * a practical problem in `erasure`: it is not able to correctly determine if
+ * such a signature overrides a corresponding signature in a parent. (SI-6443).
+ *
+ * This transformation erases the dependent method types by:
+ * - Widening the formal parameter type to existentially abstract
+ * over the prior parameters (using `packSymbols`)
+ * - Inserting casts in the method body to cast to the original,
+ * precise type.
+ *
+ * For the example above, this results in:
+ *
+ * {{{
+ * def foo(a: A, b: a.B forSome { val a: A }): a.B = { val b$1 = b.asInstanceOf[a.B]; b$1; b$1 }
+ * }}}
+ */
+ private object dependentParamTypeErasure {
+ sealed abstract class ParamTransform {
+ def param: ValDef
+ }
+ final case class Identity(param: ValDef) extends ParamTransform
+ final case class Packed(param: ValDef, tempVal: ValDef) extends ParamTransform
+
+ def isDependent(dd: DefDef): Boolean =
+ beforeUncurry {
+ val methType = dd.symbol.info
+ methType.isDependentMethodType && mexists(methType.paramss)(_.info exists (_.isImmediatelyDependent))
+ }
+
+ /**
+ * @return (newVparamss, newRhs)
+ */
+ def erase(dd: DefDef): (List[List[ValDef]], Tree) = {
+ import dd.{ vparamss, rhs }
+ val vparamSyms = vparamss flatMap (_ map (_.symbol))
+
+ val paramTransforms: List[ParamTransform] =
+ vparamss.flatten.map { p =>
+ val declaredType = p.symbol.info
+ // existentially abstract over value parameters
+ val packedType = typer.packSymbols(vparamSyms, declaredType)
+ if (packedType =:= declaredType) Identity(p)
+ else {
+ // Change the type of the param symbol
+ p.symbol updateInfo packedType
+
+ // Create a new param tree
+ val newParam: ValDef = copyValDef(p)(tpt = TypeTree(packedType))
+
+ // Within the method body, we'll cast the parameter to the originally
+ // declared type and assign this to a synthetic val. Later, we'll patch
+ // the method body to refer to this, rather than the parameter.
+ val tempVal: ValDef = {
+ val tempValName = unit freshTermName (p.name + "$")
+ val newSym = dd.symbol.newTermSymbol(tempValName, p.pos, SYNTHETIC).setInfo(declaredType)
+ atPos(p.pos)(ValDef(newSym, gen.mkAttributedCast(Ident(p.symbol), declaredType)))
+ }
+ Packed(newParam, tempVal)
+ }
+ }
+
+ val allParams = paramTransforms map (_.param)
+ val (packedParams, tempVals) = paramTransforms.collect {
+ case Packed(param, tempVal) => (param, tempVal)
+ }.unzip
+
+ val rhs1 = localTyper.typedPos(rhs.pos) {
+ // Patch the method body to refer to the temp vals
+ val rhsSubstituted = rhs.substituteSymbols(packedParams map (_.symbol), tempVals map (_.symbol))
+ // The new method body: { val p$1 = p.asInstanceOf[<dependent type>]; ...; <rhsSubstituted> }
+ Block(tempVals, rhsSubstituted)
+ }
+
+ // update the type of the method after uncurry.
+ dd.symbol updateInfo {
+ val GenPolyType(tparams, tp) = dd.symbol.info
+ logResult("erased dependent param types for ${dd.symbol.info}") {
+ GenPolyType(tparams, MethodType(allParams map (_.symbol), tp.finalResultType))
+ }
+ }
+ (allParams :: Nil, rhs1)
+ }
+ }
+
+
/* Analyzes repeated params if method is annotated as `varargs`.
* If the repeated params exist, it saves them into the `repeatedParams` map,
* which is used later.
diff --git a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala
index 78175f393a..b50486306d 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala
@@ -25,6 +25,7 @@ trait Analyzer extends AnyRef
with TypeDiagnostics
with ContextErrors
with StdAttachments
+ with AnalyzerPlugins
{
val global : Global
import global._
diff --git a/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala b/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala
new file mode 100644
index 0000000000..28f620dbb5
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala
@@ -0,0 +1,225 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.tools.nsc
+package typechecker
+
+/**
+ * @author Lukas Rytz
+ * @version 1.0
+ */
+trait AnalyzerPlugins { self: Analyzer =>
+ import global._
+
+
+ trait AnalyzerPlugin {
+ /**
+ * Selectively activate this analyzer plugin, e.g. according to the compiler phase.
+ *
+ * Note that the current phase can differ from the global compiler phase (look for `enteringPhase`
+ * invocations in the compiler). For instance, lazy types created by the UnPickler are completed
+ * at the phase in which their symbol is created. Observations show that this can even be the
+ * parser phase. Since symbol completion can trigger subtyping, typing etc, your plugin might
+ * need to be active also in phases other than namer and typer.
+ *
+ * Typically, this method can be implemented as
+ *
+ * global.phase.id < global.currentRun.picklerPhase.id
+ */
+ def isActive(): Boolean = true
+
+ /**
+ * Let analyzer plugins change the expected type before type checking a tree.
+ */
+ def pluginsPt(pt: Type, typer: Typer, tree: Tree, mode: Int): Type = pt
+
+ /**
+ * Let analyzer plugins modify the type that has been computed for a tree.
+ *
+ * @param tpe The type inferred by the type checker, initially (for first plugin) `tree.tpe`
+ * @param typer The yper that type checked `tree`
+ * @param tree The type-checked tree
+ * @param mode Mode that was used for typing `tree`
+ * @param pt Expected type that was used for typing `tree`
+ */
+ def pluginsTyped(tpe: Type, typer: Typer, tree: Tree, mode: Int, pt: Type): Type = tpe
+
+ /**
+ * Let analyzer plugins change the types assigned to definitions. For definitions that have
+ * an annotated type, the assigned type is obtained by typing that type tree. Otherwise, the
+ * type is inferred by typing the definition's righthand side.
+ *
+ * In order to know if the type was inferred, you can query the `wasEmpty` field in the `tpt`
+ * TypeTree of the definition (for DefDef and ValDef).
+ *
+ * (*) If the type of a method or value is inferred, the type-checked tree is stored in the
+ * `analyzer.transformed` hash map, indexed by the definition's rhs tree.
+ *
+ * NOTE: Invoking the type checker can lead to cyclic reference errors. For instance, if this
+ * method is called from the type completer of a recursive method, type checking the mehtod
+ * rhs will invoke the same completer again. It might be possible to avoid this situation by
+ * assigning `tpe` to `defTree.symbol` (untested) - the final type computed by this method
+ * will then be assigned to the definition's symbol by monoTypeCompleter (in Namers).
+ *
+ * The hooks into `typeSig` allow analyzer plugins to add annotations to (or change the types
+ * of) definition symbols. This cannot not be achieved by using `pluginsTyped`: this method
+ * is only called during type checking, so changing the type of a symbol at this point is too
+ * late: references to the symbol might already be typed and therefore obtain the the original
+ * type assigned during naming.
+ *
+ * @param defTree is the definition for which the type was computed. The different cases are
+ * outlined below. Note that this type is untyped (for methods and values with inferred type,
+ * the typed rhs trees are available in analyzer.transformed).
+ *
+ * Case defTree: Template
+ * - tpe : A ClassInfoType for the template
+ * - typer: The typer for template members, i.e. expressions and definitions of defTree.body
+ * - pt : WildcardType
+ * - the class symbol is accessible through typer.context.owner
+ *
+ * Case defTree: ClassDef
+ * - tpe : A ClassInfoType, or a PolyType(params, ClassInfoType) for polymorphic classes.
+ * The class type is the one computed by templateSig, i.e. through the above case
+ * - typer: The typer for the class. Note that this typer has a different context than the
+ * typer for the template.
+ * - pt : WildcardType
+ *
+ * Case defTree: ModuleDef
+ * - tpe : A ClassInfoType computed by templateSig
+ * - typer: The typer for the module. context.owner of this typer is the module class symbol
+ * - pt : WildcardType
+ *
+ * Case defTree: DefDef
+ * - tpe : The type of the method (MethodType, PolyType or NullaryMethodType). (*)
+ * - typer: The typer the rhs of this method
+ * - pt : If tpt.isEmpty, either the result type from the overridden method, or WildcardType.
+ * Otherwise the type obtained from typing tpt.
+ * - Note that for constructors, pt is the class type which the constructor creates. To type
+ * check the rhs of the constructor however, the expected type has to be WildcardType (see
+ * Typers.typedDefDef)
+ *
+ * Case defTree: ValDef
+ * - tpe : The type of this value. (*)
+ * - typer: The typer for the rhs of this value
+ * - pt : If tpt.isEmpty, WildcardType. Otherwise the type obtained from typing tpt.
+ * - Note that pluginsTypeSig might be called multiple times for the same ValDef since it is
+ * used to compute the types of the accessor methods (see `pluginsTypeSigAccessor`)
+ *
+ * Case defTree: TypeDef
+ * - tpe : The type obtained from typing rhs (PolyType if the TypeDef defines a polymorphic type)
+ * - typer: The typer for the rhs of this type
+ * - pt : WildcardType
+ */
+ def pluginsTypeSig(tpe: Type, typer: Typer, defTree: Tree, pt: Type): Type = tpe
+
+ /**
+ * Modify the types of field accessors. The namer phase creates method types for getters and
+ * setters based on the type of the corresponding field.
+ *
+ * Note: in order to compute the method type of an accessor, the namer calls `typeSig` on the
+ * `ValDef` tree of the corresponding field. This implies that the `pluginsTypeSig` method
+ * is potentially called multiple times for the same ValDef tree.
+ *
+ * @param tpe The method type created by the namer for the accessor
+ * @param typer The typer for the ValDef (not for the rhs)
+ * @param tree The ValDef corresponding to the accessor
+ * @param sym The accessor method symbol (getter, setter, beanGetter or beanSetter)
+ */
+ def pluginsTypeSigAccessor(tpe: Type, typer: Typer, tree: ValDef, sym: Symbol): Type = tpe
+
+ /**
+ * Decide whether this analyzer plugin can adapt a tree that has an annotated type to the
+ * given type tp, taking into account the given mode (see method adapt in trait Typers).
+ */
+ def canAdaptAnnotations(tree: Tree, typer: Typer, mode: Int, pt: Type): Boolean = false
+
+ /**
+ * Adapt a tree that has an annotated type to the given type tp, taking into account the given
+ * mode (see method adapt in trait Typers).
+ *
+ * An implementation cannot rely on canAdaptAnnotations being called before. If the implementing
+ * class cannot do the adapting, it should return the tree unchanged.
+ */
+ def adaptAnnotations(tree: Tree, typer: Typer, mode: Int, pt: Type): Tree = tree
+
+ /**
+ * Modify the type of a return expression. By default, return expressions have type
+ * NothingClass.tpe.
+ *
+ * @param tpe The type of the return expression
+ * @param typer The typer that was used for typing the return tree
+ * @param tree The typed return expression tree
+ * @param pt The return type of the enclosing method
+ */
+ def pluginsTypedReturn(tpe: Type, typer: Typer, tree: Return, pt: Type): Type = tpe
+ }
+
+
+
+ /** A list of registered analyzer plugins */
+ private var analyzerPlugins: List[AnalyzerPlugin] = Nil
+
+ /** Registers a new analyzer plugin */
+ def addAnalyzerPlugin(plugin: AnalyzerPlugin) {
+ if (!analyzerPlugins.contains(plugin))
+ analyzerPlugins = plugin :: analyzerPlugins
+ }
+
+
+ /** @see AnalyzerPlugin.pluginsPt */
+ def pluginsPt(pt: Type, typer: Typer, tree: Tree, mode: Int): Type =
+ if (analyzerPlugins.isEmpty) pt
+ else analyzerPlugins.foldLeft(pt)((pt, plugin) =>
+ if (!plugin.isActive()) pt else plugin.pluginsPt(pt, typer, tree, mode))
+
+ /** @see AnalyzerPlugin.pluginsTyped */
+ def pluginsTyped(tpe: Type, typer: Typer, tree: Tree, mode: Int, pt: Type): Type = {
+ // support deprecated methods in annotation checkers
+ val annotCheckersTpe = addAnnotations(tree, tpe)
+ if (analyzerPlugins.isEmpty) annotCheckersTpe
+ else analyzerPlugins.foldLeft(annotCheckersTpe)((tpe, plugin) =>
+ if (!plugin.isActive()) tpe else plugin.pluginsTyped(tpe, typer, tree, mode, pt))
+ }
+
+ /** @see AnalyzerPlugin.pluginsTypeSig */
+ def pluginsTypeSig(tpe: Type, typer: Typer, defTree: Tree, pt: Type): Type =
+ if (analyzerPlugins.isEmpty) tpe
+ else analyzerPlugins.foldLeft(tpe)((tpe, plugin) =>
+ if (!plugin.isActive()) tpe else plugin.pluginsTypeSig(tpe, typer, defTree, pt))
+
+ /** @see AnalyzerPlugin.pluginsTypeSigAccessor */
+ def pluginsTypeSigAccessor(tpe: Type, typer: Typer, tree: ValDef, sym: Symbol): Type =
+ if (analyzerPlugins.isEmpty) tpe
+ else analyzerPlugins.foldLeft(tpe)((tpe, plugin) =>
+ if (!plugin.isActive()) tpe else plugin.pluginsTypeSigAccessor(tpe, typer, tree, sym))
+
+ /** @see AnalyzerPlugin.canAdaptAnnotations */
+ def canAdaptAnnotations(tree: Tree, typer: Typer, mode: Int, pt: Type): Boolean = {
+ // support deprecated methods in annotation checkers
+ val annotCheckersExists = global.canAdaptAnnotations(tree, mode, pt)
+ annotCheckersExists || {
+ if (analyzerPlugins.isEmpty) false
+ else analyzerPlugins.exists(plugin =>
+ plugin.isActive() && plugin.canAdaptAnnotations(tree, typer, mode, pt))
+ }
+ }
+
+ /** @see AnalyzerPlugin.adaptAnnotations */
+ def adaptAnnotations(tree: Tree, typer: Typer, mode: Int, pt: Type): Tree = {
+ // support deprecated methods in annotation checkers
+ val annotCheckersTree = global.adaptAnnotations(tree, mode, pt)
+ if (analyzerPlugins.isEmpty) annotCheckersTree
+ else analyzerPlugins.foldLeft(annotCheckersTree)((tree, plugin) =>
+ if (!plugin.isActive()) tree else plugin.adaptAnnotations(tree, typer, mode, pt))
+ }
+
+ /** @see AnalyzerPlugin.pluginsTypedReturn */
+ def pluginsTypedReturn(tpe: Type, typer: Typer, tree: Return, pt: Type): Type = {
+ val annotCheckersType = adaptTypeOfReturn(tree.expr, pt, tpe)
+ if (analyzerPlugins.isEmpty) annotCheckersType
+ else analyzerPlugins.foldLeft(annotCheckersType)((tpe, plugin) =>
+ if (!plugin.isActive()) tpe else plugin.pluginsTypedReturn(tpe, typer, tree, pt))
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
index dc367b11fd..4bf7f78167 100644
--- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
@@ -184,18 +184,14 @@ trait ContextErrors {
}
def ParentTypesError(templ: Template, ex: TypeError) = {
- templ.tpe = null
- issueNormalTypeError(templ, ex.getMessage())
- setError(templ)
+ templ.tpe = null
+ issueNormalTypeError(templ, ex.getMessage())
}
// additional parentTypes errors
- def ConstrArgsInParentWhichIsTraitError(arg: Tree, parent: Symbol) =
+ def ConstrArgsInTraitParentTpeError(arg: Tree, parent: Symbol) =
issueNormalTypeError(arg, parent + " is a trait; does not take constructor arguments")
- def ConstrArgsInParentOfTraitError(arg: Tree, parent: Symbol) =
- issueNormalTypeError(arg, "parents of traits may not have parameters")
-
def MissingTypeArgumentsParentTpeError(supertpt: Tree) =
issueNormalTypeError(supertpt, "missing type arguments")
@@ -273,9 +269,6 @@ trait ContextErrors {
def VolatileValueError(vdef: Tree) =
issueNormalTypeError(vdef, "values cannot be volatile")
- def FinalVolatileVarError(vdef: Tree) =
- issueNormalTypeError(vdef, "final vars cannot be volatile")
-
def LocalVarUninitializedError(vdef: Tree) =
issueNormalTypeError(vdef, "local variables must be initialized")
@@ -763,10 +756,14 @@ trait ContextErrors {
else " of " + expanded.getClass
))
- def MacroImplementationNotFoundError(expandee: Tree) =
- macroExpansionError(expandee,
+ def MacroImplementationNotFoundError(expandee: Tree) = {
+ val message =
"macro implementation not found: " + expandee.symbol.name + " " +
- "(the most common reason for that is that you cannot use macro implementations in the same compilation run that defines them)")
+ "(the most common reason for that is that you cannot use macro implementations in the same compilation run that defines them)" +
+ (if (forScaladoc) ". When generating scaladocs for multiple projects at once, consider using -Ymacro-no-expand to disable macro expansions altogether."
+ else "")
+ macroExpansionError(expandee, message)
+ }
}
}
@@ -1047,6 +1044,9 @@ trait ContextErrors {
def MaxParametersCaseClassError(tree: Tree) =
issueNormalTypeError(tree, "Implementation restriction: case classes cannot have more than " + definitions.MaxFunctionArity + " parameters.")
+ def InheritsItselfError(tree: Tree) =
+ issueNormalTypeError(tree, tree.tpe.typeSymbol+" inherits itself")
+
def MissingParameterOrValTypeError(vparam: Tree) =
issueNormalTypeError(vparam, "missing parameter type")
diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
index af2aeefecd..620665126e 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
@@ -35,7 +35,7 @@ trait Contexts { self: Analyzer =>
val completeList = JavaLangPackage :: ScalaPackage :: PredefModule :: Nil
}
- private val startContext = {
+ private lazy val startContext = {
NoContext.make(
Template(List(), emptyValDef, List()) setSymbol global.NoSymbol setType global.NoType,
rootMirror.RootClass,
@@ -342,6 +342,16 @@ trait Contexts { self: Analyzer =>
c
}
+ /**
+ * A context for typing constructor parameter ValDefs, super or self invocation arguments and default getters
+ * of constructors. These expressions need to be type checked in a scope outside the class, cf. spec 5.3.1.
+ *
+ * This method is called by namer / typer where `this` is the context for the constructor DefDef. The
+ * owner of the resulting (new) context is the outer context for the Template, i.e. the context for the
+ * ClassDef. This means that class type parameters will be in scope. The value parameters of the current
+ * constructor are also entered into the new constructor scope. Members of the class however will not be
+ * accessible.
+ */
def makeConstructorContext = {
var baseContext = enclClass.outer
while (baseContext.tree.isInstanceOf[Template])
@@ -361,6 +371,8 @@ trait Contexts { self: Analyzer =>
enterLocalElems(c.scope.elems)
}
}
+ // Enter the scope elements of this (the scope for the constructor DefDef) into the new constructor scope.
+ // Concretely, this will enter the value parameters of constructor.
enterElems(this)
argContext
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
index df753ba53c..f6142a81be 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
@@ -317,15 +317,33 @@ abstract class Duplicators extends Analyzer {
super.typed(tree, mode, pt)
case Select(th @ This(_), sel) if (oldClassOwner ne null) && (th.symbol == oldClassOwner) =>
- // log("selection on this, no type ascription required")
- // we use the symbol name instead of the tree name because the symbol may have been
- // name mangled, rendering the tree name obsolete
- // log(tree)
- val t = super.typedPos(tree.pos, mode, pt) {
- Select(This(newClassOwner), tree.symbol.name)
- }
- // log("typed to: " + t + "; tpe = " + t.tpe + "; " + inspectTpe(t.tpe))
- t
+ // We use the symbol name instead of the tree name because the symbol
+ // may have been name mangled, rendering the tree name obsolete.
+ // ...but you can't just do a Select on a name because if the symbol is
+ // overloaded, you will crash in the backend.
+ val memberByName = newClassOwner.thisType.member(tree.symbol.name)
+ def nameSelection = Select(This(newClassOwner), tree.symbol.name)
+ val newTree = (
+ if (memberByName.isOverloaded) {
+ // Find the types of the overload alternatives as seen in the new class,
+ // and filter the list down to those which match the old type (after
+ // fixing the old type so it is seen as if from the new class.)
+ val typeInNewClass = fixType(oldClassOwner.info memberType tree.symbol)
+ val alts = memberByName.alternatives
+ val memberTypes = alts map (newClassOwner.info memberType _)
+ val memberString = memberByName.defString
+ alts zip memberTypes filter (_._2 =:= typeInNewClass) match {
+ case ((alt, tpe)) :: Nil =>
+ log(s"Arrested overloaded type in Duplicators, narrowing to ${alt.defStringSeenAs(tpe)}\n Overload was: $memberString")
+ Select(This(newClassOwner), alt)
+ case _ =>
+ log(s"Could not disambiguate $memberString in Duplicators. Attempting name-based selection, but this may not end well...")
+ nameSelection
+ }
+ }
+ else nameSelection
+ )
+ super.typed(atPos(tree.pos)(newTree), mode, pt)
case This(_) if (oldClassOwner ne null) && (tree.symbol == oldClassOwner) =>
// val tree1 = Typed(This(newClassOwner), TypeTree(fixType(tree.tpe.widen)))
@@ -370,7 +388,7 @@ abstract class Duplicators extends Analyzer {
cases
}
- super.typedPos(tree.pos, mode, pt)(Match(scrut, cases1))
+ super.typed(atPos(tree.pos)(Match(scrut, cases1)), mode, pt)
case EmptyTree =>
// no need to do anything, in particular, don't set the type to null, EmptyTree.tpe_= asserts
diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
index ec3a0a0ef7..d1cf9b1904 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
@@ -1319,12 +1319,17 @@ trait Implicits {
// `materializeImplicit` does some preprocessing for `pt`
// is it only meant for manifests/tags or we need to do the same for `implicitsOfExpectedType`?
- if (result.isFailure && !wasAmbigious) result = searchImplicit(implicitsOfExpectedType, false)
+ if (result.isFailure) result = searchImplicit(implicitsOfExpectedType, false)
if (result.isFailure) {
context.updateBuffer(previousErrs)
if (Statistics.canEnable) Statistics.stopTimer(oftypeFailNanos, failstart)
} else {
+ if (wasAmbigious && settings.lint.value)
+ reporter.warning(tree.pos,
+ "Search of in-scope implicits was ambiguous, and the implicit scope was searched. In Scala 2.11.0, this code will not compile. See SI-6667. \n" +
+ previousErrs.map(_.errMsg).mkString("\n"))
+
if (Statistics.canEnable) Statistics.stopTimer(oftypeSucceedNanos, succstart)
if (Statistics.canEnable) Statistics.incCounter(oftypeImplicitHits)
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
index 2c2aa03d24..7161043dcf 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
@@ -58,20 +58,31 @@ trait Infer extends Checkable {
* @throws TypeError when the unapply[Seq] definition is ill-typed
* @returns (null, null) when the expected number of sub-patterns cannot be satisfied by the given extractor
*
- * From the spec:
+ * This is the spec currently implemented -- TODO: update it.
+ *
* 8.1.8 ExtractorPatterns
*
* An extractor pattern x(p1, ..., pn) where n ≥ 0 is of the same syntactic form as a constructor pattern.
* However, instead of a case class, the stable identifier x denotes an object which has a member method named unapply or unapplySeq that matches the pattern.
- * An unapply method in an object x matches the pattern x(p1, ..., pn) if it takes exactly one argument and one of the following applies:
*
- * n = 0 and unapply’s result type is Boolean.
+ * An `unapply` method with result type `R` in an object `x` matches the
+ * pattern `x(p_1, ..., p_n)` if it takes exactly one argument and, either:
+ * - `n = 0` and `R =:= Boolean`, or
+ * - `n = 1` and `R <:< Option[T]`, for some type `T`.
+ * The argument pattern `p1` is typed in turn with expected type `T`.
+ * - Or, `n > 1` and `R <:< Option[Product_n[T_1, ..., T_n]]`, for some
+ * types `T_1, ..., T_n`. The argument patterns `p_1, ..., p_n` are
+ * typed with expected types `T_1, ..., T_n`.
+ *
+ * An `unapplySeq` method in an object `x` matches the pattern `x(p_1, ..., p_n)`
+ * if it takes exactly one argument and its result type is of the form `Option[S]`,
+ * where either:
+ * - `S` is a subtype of `Seq[U]` for some element type `U`, (set `m = 0`)
+ * - or `S` is a `ProductX[T_1, ..., T_m]` and `T_m <: Seq[U]` (`m <= n`).
*
- * n = 1 and unapply’s result type is Option[T], for some type T.
- * the (only) argument pattern p1 is typed in turn with expected type T
+ * The argument patterns `p_1, ..., p_n` are typed with expected types
+ * `T_1, ..., T_m, U, ..., U`. Here, `U` is repeated `n-m` times.
*
- * n > 1 and unapply’s result type is Option[(T1, ..., Tn)], for some types T1, ..., Tn.
- * the argument patterns p1, ..., pn are typed in turn with expected types T1, ..., Tn
*/
def extractorFormalTypes(pos: Position, resTp: Type, nbSubPats: Int, unappSym: Symbol): (List[Type], List[Type]) = {
val isUnapplySeq = unappSym.name == nme.unapplySeq
@@ -83,31 +94,34 @@ trait Infer extends Checkable {
else toRepeated
}
+ // empty list --> error, otherwise length == 1
+ lazy val optionArgs = resTp.baseType(OptionClass).typeArgs
+ // empty list --> not a ProductN, otherwise product element types
+ def productArgs = getProductArgs(optionArgs.head)
+
val formals =
- if (nbSubPats == 0 && booleanExtractor && !isUnapplySeq) Nil
- else resTp.baseType(OptionClass).typeArgs match {
- case optionTArg :: Nil =>
- def productArgs = getProductArgs(optionTArg)
+ // convert Seq[T] to the special repeated argument type
+ // so below we can use formalTypes to expand formals to correspond to the number of actuals
+ if (isUnapplySeq) {
+ if (optionArgs.nonEmpty)
+ productArgs match {
+ case Nil => List(seqToRepeatedChecked(optionArgs.head))
+ case normalTps :+ seqTp => normalTps :+ seqToRepeatedChecked(seqTp)
+ }
+ else throw new TypeError(s"result type $resTp of unapplySeq defined in ${unappSym.fullLocationString} does not conform to Option[_]")
+ } else {
+ if (booleanExtractor && nbSubPats == 0) Nil
+ else if (optionArgs.nonEmpty)
if (nbSubPats == 1) {
- if (isUnapplySeq) List(seqToRepeatedChecked(optionTArg))
- else {
- val productArity = productArgs.size
- if (productArity > 1 && settings.lint.value)
- global.currentUnit.warning(pos, s"extractor pattern binds a single value to a Product${productArity} of type ${optionTArg}")
- List(optionTArg)
- }
+ val productArity = productArgs.size
+ if (productArity > 1 && settings.lint.value)
+ global.currentUnit.warning(pos, s"extractor pattern binds a single value to a Product${productArity} of type ${optionArgs.head}")
+ optionArgs
}
// TODO: update spec to reflect we allow any ProductN, not just TupleN
- else productArgs match {
- case Nil if isUnapplySeq => List(seqToRepeatedChecked(optionTArg))
- case tps if isUnapplySeq => tps.init :+ seqToRepeatedChecked(tps.last)
- case tps => tps
- }
- case _ =>
- if (isUnapplySeq)
- throw new TypeError(s"result type $resTp of unapplySeq defined in ${unappSym.owner+unappSym.owner.locationString} not in {Option[_], Some[_]}")
- else
- throw new TypeError(s"result type $resTp of unapply defined in ${unappSym.owner+unappSym.owner.locationString} not in {Boolean, Option[_], Some[_]}")
+ else productArgs
+ else
+ throw new TypeError(s"result type $resTp of unapply defined in ${unappSym.fullLocationString} does not conform to Option[_] or Boolean")
}
// for unapplySeq, replace last vararg by as many instances as required by nbSubPats
@@ -257,8 +271,8 @@ trait Infer extends Checkable {
tp1 // @MAT aliases already handled by subtyping
}
- private val stdErrorClass = rootMirror.RootClass.newErrorClass(tpnme.ERROR)
- private val stdErrorValue = stdErrorClass.newErrorValue(nme.ERROR)
+ private lazy val stdErrorClass = rootMirror.RootClass.newErrorClass(tpnme.ERROR)
+ private lazy val stdErrorValue = stdErrorClass.newErrorValue(nme.ERROR)
/** The context-dependent inferencer part */
class Inferencer(context: Context) extends InferencerContextErrors with InferCheckable {
@@ -411,8 +425,19 @@ trait Infer extends Checkable {
/** Like weakly compatible but don't apply any implicit conversions yet.
* Used when comparing the result type of a method with its prototype.
+ *
* [Martin] I think Infer is also created by Erasure, with the default
* implementation of isCoercible
+ * [Paulp] (Assuming the above must refer to my comment on isCoercible)
+ * Nope, I examined every occurrence of Inferencer in trunk. It
+ * appears twice as a self-type, once at its definition, and once
+ * where it is instantiated in Typers. There are no others.
+ *
+ % ack -A0 -B0 --no-filename '\bInferencer\b' src
+ self: Inferencer =>
+ self: Inferencer =>
+ class Inferencer(context: Context) extends InferencerContextErrors with InferCheckable {
+ val infer = new Inferencer(context0) {
*/
def isConservativelyCompatible(tp: Type, pt: Type): Boolean =
context.withImplicitsDisabled(isWeaklyCompatible(tp, pt))
@@ -1578,10 +1603,10 @@ trait Infer extends Checkable {
}
// Drop those that use a default; keep those that use vararg/tupling conversion.
mtypes exists (t =>
- !t.typeSymbol.hasDefaultFlag && {
- compareLengths(t.params, argtpes) < 0 || // tupling (*)
- hasExactlyNumParams(t, argtpes.length) // same nb or vararg
- }
+ !t.typeSymbol.hasDefaultFlag && (
+ compareLengths(t.params, argtpes) < 0 // tupling (*)
+ || hasExactlyNumParams(t, argtpes.length) // same nb or vararg
+ )
)
// (*) more arguments than parameters, but still applicable: tupling conversion works.
// todo: should not return "false" when paramTypes = (Unit) no argument is given
@@ -1608,15 +1633,18 @@ trait Infer extends Checkable {
case OverloadedType(pre, alts) =>
val pt = if (pt0.typeSymbol == UnitClass) WildcardType else pt0
tryTwice { isSecondTry =>
- debuglog("infer method alt "+ tree.symbol +" with alternatives "+
- (alts map pre.memberType) +", argtpes = "+ argtpes +", pt = "+ pt)
+ debuglog(s"infer method alt ${tree.symbol} with alternatives ${alts map pre.memberType} argtpes=$argtpes pt=$pt")
- val applicable = resolveOverloadedMethod(argtpes, {
- alts filter { alt =>
- inSilentMode(context)(isApplicable(undetparams, followApply(pre.memberType(alt)), argtpes, pt)) &&
- (!varArgsOnly || isVarArgsList(alt.tpe.params))
- }
- })
+ def varargsApplicableCheck(alt: Symbol) = !varArgsOnly || (
+ isVarArgsList(alt.tpe.params)
+ && (argtpes.size >= alt.tpe.params.size) // must be checked now due to SI-5859
+ )
+ val applicable = resolveOverloadedMethod(argtpes,
+ alts filter (alt =>
+ varargsApplicableCheck(alt)
+ && inSilentMode(context)(isApplicable(undetparams, followApply(pre memberType alt), argtpes, pt))
+ )
+ )
def improves(sym1: Symbol, sym2: Symbol) = {
// util.trace("improve "+sym1+sym1.locationString+" on "+sym2+sym2.locationString)
diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala
index b20a9ea626..245656e2d7 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala
@@ -452,7 +452,7 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
if (aparam.name != rparam.name && !rparam.isSynthetic) MacroImplParamNameMismatchError(aparam, rparam)
if (isRepeated(aparam) ^ isRepeated(rparam)) MacroImplVarargMismatchError(aparam, rparam)
val aparamtpe = aparam.tpe.dealias match {
- case RefinedType(List(tpe), Scope(sym)) if tpe == MacroContextClass.tpe && sym.allOverriddenSymbols.contains(MacroContextPrefixType) => tpe
+ case RefinedType(List(tpe), Scope(sym)) if tpe =:= MacroContextClass.tpe && sym.allOverriddenSymbols.contains(MacroContextPrefixType) => tpe
case tpe => tpe
}
checkMacroImplParamTypeMismatch(atpeToRtpe(aparamtpe), rparam)
@@ -684,6 +684,7 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
* the expandee with an error marker set if there has been an error
*/
def macroExpand(typer: Typer, expandee: Tree, mode: Int = EXPRmode, pt: Type = WildcardType): Tree = {
+ if (settings.Ymacronoexpand.value) return expandee // SI-6812
val start = if (Statistics.canEnable) Statistics.startTimer(macroExpandNanos) else null
if (Statistics.canEnable) Statistics.incCounter(macroExpandCount)
try {
diff --git a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
index acc4f7ff67..99557d1527 100644
--- a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
@@ -235,8 +235,8 @@ trait MethodSynthesis {
context.unit.synthetics get meth match {
case Some(mdef) =>
context.unit.synthetics -= meth
- meth setAnnotations deriveAnnotations(annotations, MethodTargetClass, false)
- cd.symbol setAnnotations deriveAnnotations(annotations, ClassTargetClass, true)
+ meth setAnnotations deriveAnnotations(annotations, MethodTargetClass, keepClean = false)
+ cd.symbol setAnnotations deriveAnnotations(annotations, ClassTargetClass, keepClean = true)
List(cd, mdef)
case _ =>
// Shouldn't happen, but let's give ourselves a reasonable error when it does
@@ -329,6 +329,7 @@ trait MethodSynthesis {
*/
def category: Symbol
+ /* Explicit isSetter required for bean setters (beanSetterSym.isSetter is false) */
final def completer(sym: Symbol) = namerOf(sym).accessorTypeCompleter(tree, isSetter)
final def fieldSelection = Select(This(enclClass), basisSym)
final def derivedMods: Modifiers = mods & flagsMask | flagsExtra mapAnnotations (_ => Nil)
@@ -389,7 +390,7 @@ trait MethodSynthesis {
result
}
def derivedTree: DefDef =
- factoryMeth(mods & flagsMask | flagsExtra, name, tree, symbolic = false)
+ factoryMeth(mods & flagsMask | flagsExtra, name, tree)
def flagsExtra: Long = METHOD | IMPLICIT | SYNTHETIC
def flagsMask: Long = AccessFlags
def name: TermName = tree.name.toTermName
diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
index 98b6264051..ab338447c9 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
@@ -65,7 +65,18 @@ trait Namers extends MethodSynthesis {
case ModuleDef(_, _, _) => tree.symbol.moduleClass
case _ => tree.symbol
}
- newNamer(context.makeNewScope(tree, sym))
+ def isConstrParam(vd: ValDef) = {
+ (sym hasFlag PARAM | PRESUPER) &&
+ !vd.mods.isJavaDefined &&
+ sym.owner.isConstructor
+ }
+ val ownerCtx = tree match {
+ case vd: ValDef if isConstrParam(vd) =>
+ context.makeConstructorContext
+ case _ =>
+ context
+ }
+ newNamer(ownerCtx.makeNewScope(tree, sym))
}
def createInnerNamer() = {
newNamer(context.make(context.tree, owner, newScope))
@@ -423,6 +434,7 @@ trait Namers extends MethodSynthesis {
def enterSyms(trees: List[Tree]): Namer = {
trees.foldLeft(this: Namer) { (namer, t) =>
val ctx = namer enterSym t
+ // for Import trees, enterSym returns a changed context, so we need a new namer
if (ctx eq namer.context) namer
else newNamer(ctx)
}
@@ -521,20 +533,19 @@ trait Namers extends MethodSynthesis {
noDuplicates(selectors map (_.rename), AppearsTwice)
}
- def enterCopyMethod(copyDefDef: Tree, tparams: List[TypeDef]): Symbol = {
- val sym = copyDefDef.symbol
- val lazyType = completerOf(copyDefDef, tparams)
+ def enterCopyMethod(copyDef: DefDef): Symbol = {
+ val sym = copyDef.symbol
+ val lazyType = completerOf(copyDef)
/** Assign the types of the class parameters to the parameters of the
* copy method. See comment in `Unapplies.caseClassCopyMeth` */
def assignParamTypes() {
val clazz = sym.owner
val constructorType = clazz.primaryConstructor.tpe
- val subst = new SubstSymMap(clazz.typeParams, tparams map (_.symbol))
+ val subst = new SubstSymMap(clazz.typeParams, copyDef.tparams map (_.symbol))
val classParamss = constructorType.paramss
- val DefDef(_, _, _, copyParamss, _, _) = copyDefDef
- map2(copyParamss, classParamss)((copyParams, classParams) =>
+ map2(copyDef.vparamss, classParamss)((copyParams, classParams) =>
map2(copyParams, classParams)((copyP, classP) =>
copyP.tpt setType subst(classP.tpe)
)
@@ -542,24 +553,28 @@ trait Namers extends MethodSynthesis {
}
sym setInfo {
- mkTypeCompleter(copyDefDef) { sym =>
+ mkTypeCompleter(copyDef) { sym =>
assignParamTypes()
lazyType complete sym
}
}
}
- def completerOf(tree: Tree): TypeCompleter = completerOf(tree, treeInfo.typeParameters(tree))
- def completerOf(tree: Tree, tparams: List[TypeDef]): TypeCompleter = {
+
+ def completerOf(tree: Tree): TypeCompleter = {
val mono = namerOf(tree.symbol) monoTypeCompleter tree
+ val tparams = treeInfo.typeParameters(tree)
if (tparams.isEmpty) mono
else {
- //@M! TypeDef's type params are handled differently
- //@M e.g., in [A[x <: B], B], A and B are entered first as both are in scope in the definition of x
- //@M x is only in scope in `A[x <: B]'
+ /* @M! TypeDef's type params are handled differently, e.g., in `type T[A[x <: B], B]`, A and B are entered
+ * first as both are in scope in the definition of x. x is only in scope in `A[x <: B]`.
+ * No symbols are created for the abstract type's params at this point, i.e. the following assertion holds:
+ * !tree.symbol.isAbstractType || { tparams.forall(_.symbol == NoSymbol)
+ * (tested with the above example, `trait C { type T[A[X <: B], B] }`). See also comment in PolyTypeCompleter.
+ */
if (!tree.symbol.isAbstractType) //@M TODO: change to isTypeMember ?
createNamer(tree) enterSyms tparams
- new PolyTypeCompleter(tparams, mono, tree, context) //@M
+ new PolyTypeCompleter(tparams, mono, context) //@M
}
}
@@ -621,9 +636,9 @@ trait Namers extends MethodSynthesis {
val sym = assignAndEnterSymbol(tree) setFlag bridgeFlag
if (name == nme.copy && sym.isSynthetic)
- enterCopyMethod(tree, tparams)
+ enterCopyMethod(tree)
else
- sym setInfo completerOf(tree, tparams)
+ sym setInfo completerOf(tree)
}
def enterClassDef(tree: ClassDef) {
@@ -736,13 +751,13 @@ trait Namers extends MethodSynthesis {
}
}
- def accessorTypeCompleter(tree: ValDef, isSetter: Boolean = false) = mkTypeCompleter(tree) { sym =>
+ /* Explicit isSetter required for bean setters (beanSetterSym.isSetter is false) */
+ def accessorTypeCompleter(tree: ValDef, isSetter: Boolean) = mkTypeCompleter(tree) { sym =>
logAndValidate(sym) {
sym setInfo {
- if (isSetter)
- MethodType(List(sym.newSyntheticValueParam(typeSig(tree))), UnitClass.tpe)
- else
- NullaryMethodType(typeSig(tree))
+ val tp = if (isSetter) MethodType(List(sym.newSyntheticValueParam(typeSig(tree))), UnitClass.tpe)
+ else NullaryMethodType(typeSig(tree))
+ pluginsTypeSigAccessor(tp, typer, tree, sym)
}
}
}
@@ -805,17 +820,12 @@ trait Namers extends MethodSynthesis {
* assigns the type to the tpt's node. Returns the type.
*/
private def assignTypeToTree(tree: ValOrDefDef, defnTyper: Typer, pt: Type): Type = {
- // compute result type from rhs
- val typedBody =
+ val rhsTpe =
if (tree.symbol.isTermMacro) defnTyper.computeMacroDefType(tree, pt)
else defnTyper.computeType(tree.rhs, pt)
- val typedDefn = widenIfNecessary(tree.symbol, typedBody, pt)
- assignTypeToTree(tree, typedDefn)
- }
-
- private def assignTypeToTree(tree: ValOrDefDef, tpe: Type): Type = {
- tree.tpt defineType tpe setPos tree.pos.focus
+ val defnTpe = widenIfNecessary(tree.symbol, rhsTpe, pt)
+ tree.tpt defineType defnTpe setPos tree.pos.focus
tree.tpt.tpe
}
@@ -847,8 +857,13 @@ trait Namers extends MethodSynthesis {
private def templateSig(templ: Template): Type = {
val clazz = context.owner
def checkParent(tpt: Tree): Type = {
- if (tpt.tpe.isError) AnyRefClass.tpe
- else tpt.tpe
+ val tp = tpt.tpe
+ val inheritsSelf = tp.typeSymbol == owner
+ if (inheritsSelf)
+ InheritsItselfError(tpt)
+
+ if (inheritsSelf || tp.isError) AnyRefClass.tpe
+ else tp
}
val parents = typer.parentTypes(templ) map checkParent
@@ -892,163 +907,257 @@ trait Namers extends MethodSynthesis {
for (cda <- module.attachments.get[ConstructorDefaultsAttachment]) {
cda.companionModuleClassNamer = templateNamer
}
- ClassInfoType(parents, decls, clazz)
+ val classTp = ClassInfoType(parents, decls, clazz)
+ pluginsTypeSig(classTp, templateNamer.typer, templ, WildcardType)
}
- private def classSig(tparams: List[TypeDef], impl: Template): Type = {
+ private def classSig(cdef: ClassDef): Type = {
+ val clazz = cdef.symbol
+ val ClassDef(_, _, tparams, impl) = cdef
val tparams0 = typer.reenterTypeParams(tparams)
val resultType = templateSig(impl)
- GenPolyType(tparams0, resultType)
+ val res = GenPolyType(tparams0, resultType)
+ val pluginsTp = pluginsTypeSig(res, typer, cdef, WildcardType)
+
+ // Already assign the type to the class symbol (monoTypeCompleter will do it again).
+ // Allows isDerivedValueClass to look at the info.
+ clazz setInfo pluginsTp
+ if (clazz.isDerivedValueClass) {
+ log("Ensuring companion for derived value class " + cdef.name + " at " + cdef.pos.show)
+ clazz setFlag FINAL
+ // Don't force the owner's info lest we create cycles as in SI-6357.
+ enclosingNamerWithScope(clazz.owner.rawInfo.decls).ensureCompanionObject(cdef)
+ }
+ pluginsTp
}
- private def methodSig(ddef: DefDef, mods: Modifiers, tparams: List[TypeDef],
- vparamss: List[List[ValDef]], tpt: Tree, rhs: Tree): Type = {
- val meth = owner
- val clazz = meth.owner
- // enters the skolemized version into scope, returns the deSkolemized symbols
- val tparamSyms = typer.reenterTypeParams(tparams)
- // since the skolemized tparams are in scope, the TypeRefs in vparamSymss refer to skolemized tparams
- var vparamSymss = enterValueParams(vparamss)
+ private def moduleSig(mdef: ModuleDef): Type = {
+ val moduleSym = mdef.symbol
+ // The info of both the module and the moduleClass symbols need to be assigned. monoTypeCompleter assigns
+ // the result of typeSig to the module symbol. The module class info is assigned here as a side-effect.
+ val result = templateSig(mdef.impl)
+ val pluginsTp = pluginsTypeSig(result, typer, mdef, WildcardType)
+ // Assign the moduleClass info (templateSig returns a ClassInfoType)
+ val clazz = moduleSym.moduleClass
+ clazz setInfo pluginsTp
+ // clazz.tpe returns a `ModuleTypeRef(clazz)`, a typeRef that links to the module class `clazz`
+ // (clazz.info would the ClassInfoType, which is not what should be assigned to the module symbol)
+ clazz.tpe
+ }
+
+ /**
+ * The method type for `ddef`.
+ *
+ * If a PolyType(tparams, restp) is returned, `tparams` are the external symbols (not type skolems),
+ * i.e. instances of AbstractTypeSymbol. All references in `restp` to the type parameters are TypeRefs
+ * to these non-skolems.
+ *
+ * For type-checking the rhs (in case the result type is inferred), the type skolems of the type parameters
+ * are entered in scope. Equally, the parameter symbols entered into scope have types which refer to those
+ * skolems: when type-checking the rhs, references to parameters need to have types that refer to the skolems.
+ * In summary, typing an rhs happens with respect to the skolems.
+ *
+ * This means that the method's result type computed by the typer refers to skolems. In order to put it
+ * into the method type (the result of methodSig), typeRefs to skolems have to be replaced by references
+ * to the non-skolems.
+ */
+ private def methodSig(ddef: DefDef): Type = {
// DEPMETTODO: do we need to skolemize value parameter symbols?
- if (tpt.isEmpty && meth.name == nme.CONSTRUCTOR) {
- tpt defineType context.enclClass.owner.tpe
- tpt setPos meth.pos.focus
- }
- var resultPt = if (tpt.isEmpty) WildcardType else typer.typedType(tpt).tpe
- val site = clazz.thisType
- /** Called for all value parameter lists, right to left
- * @param vparams the symbols of one parameter list
- * @param restpe the result type (possibly a MethodType)
+ val DefDef(_, _, tparams, vparamss, tpt, _) = ddef
+
+ val meth = owner
+ val methOwner = meth.owner
+ val site = methOwner.thisType
+
+ /* tparams already have symbols (created in enterDefDef/completerOf), namely the skolemized ones (created
+ * by the PolyTypeCompleter constructor, and assigned to tparams). reenterTypeParams enters the type skolems
+ * into scope and returns the non-skolems.
*/
- def makeMethodType(vparams: List[Symbol], restpe: Type) = {
- // TODODEPMET: check that we actually don't need to do anything here
- // new dependent method types: probably OK already, since 'enterValueParams' above
- // enters them in scope, and all have a lazy type. so they may depend on other params. but: need to
- // check that params only depend on ones in earlier sections, not the same. (done by checkDependencies,
- // so re-use / adapt that)
- if (owner.isJavaDefined)
- // TODODEPMET necessary?? new dependent types: replace symbols in restpe with the ones in vparams
- JavaMethodType(vparams map (p => p setInfo objToAny(p.tpe)), restpe)
- else
- MethodType(vparams, restpe)
- }
+ val tparamSyms = typer.reenterTypeParams(tparams)
+
+ val tparamSkolems = tparams.map(_.symbol)
+
+ /* since the skolemized tparams are in scope, the TypeRefs in types of vparamSymss refer to the type skolems
+ * note that for parameters with missing types, `methodSig` reassigns types of these symbols (the parameter
+ * types from the overridden method).
+ */
+ var vparamSymss = enterValueParams(vparamss)
+
+ /**
+ * Creates a method type using tparamSyms and vparamsSymss as argument symbols and `respte` as result type.
+ * All typeRefs to type skolems are replaced by references to the corresponding non-skolem type parameter,
+ * so the resulting type is a valid external method type, it does not contain (references to) skolems.
+ */
def thisMethodType(restpe: Type) = {
val checkDependencies = new DependentTypeChecker(context)(this)
checkDependencies check vparamSymss
// DEPMETTODO: check not needed when they become on by default
checkDependencies(restpe)
- GenPolyType(
+ val makeMethodType = (vparams: List[Symbol], restpe: Type) => {
+ // TODODEPMET: check that we actually don't need to do anything here
+ // new dependent method types: probably OK already, since 'enterValueParams' above
+ // enters them in scope, and all have a lazy type. so they may depend on other params. but: need to
+ // check that params only depend on ones in earlier sections, not the same. (done by checkDependencies,
+ // so re-use / adapt that)
+ if (meth.isJavaDefined)
+ // TODODEPMET necessary?? new dependent types: replace symbols in restpe with the ones in vparams
+ JavaMethodType(vparams map (p => p setInfo objToAny(p.tpe)), restpe)
+ else
+ MethodType(vparams, restpe)
+ }
+
+
+ val res = GenPolyType(
tparamSyms, // deSkolemized symbols -- TODO: check that their infos don't refer to method args?
if (vparamSymss.isEmpty) NullaryMethodType(restpe)
// vparamss refer (if they do) to skolemized tparams
else (vparamSymss :\ restpe) (makeMethodType)
)
+ res.substSym(tparamSkolems, tparamSyms)
}
- def transformedResult =
- thisMethodType(resultPt).substSym(tparams map (_.symbol), tparamSyms)
+ /**
+ * Creates a schematic method type which has WildcardTypes for non specified
+ * return or parameter types. For instance, in `def f[T](a: T, b) = ...`, the
+ * type schema is
+ *
+ * PolyType(T, MethodType(List(a: T, b: WildcardType), WildcardType))
+ *
+ * where T are non-skolems.
+ */
+ def methodTypeSchema(resTp: Type) = {
+ // for all params without type set WildcaradType
+ mforeach(vparamss)(v => if (v.tpt.isEmpty) v.symbol setInfo WildcardType)
+ thisMethodType(resTp)
+ }
- // luc: added .substSym from skolemized to deSkolemized
- // site.memberType(sym): PolyType(tparams, MethodType(..., ...))
- // ==> all references to tparams are deSkolemized
- // thisMethodType: tparams in PolyType are deSkolemized, the references in the MethodTypes are skolemized.
- // ==> the two didn't match
- //
- // for instance, B.foo would not override A.foo, and the default on parameter b would not be inherited
- // class A { def foo[T](a: T)(b: T = a) = a }
- // class B extends A { override def foo[U](a: U)(b: U) = b }
- def overriddenSymbol =
- intersectionType(clazz.info.parents).nonPrivateMember(meth.name).filter { sym =>
- sym != NoSymbol && (site.memberType(sym) matches transformedResult)
+ def overriddenSymbol(resTp: Type) = {
+ intersectionType(methOwner.info.parents).nonPrivateMember(meth.name).filter { sym =>
+ sym != NoSymbol && (site.memberType(sym) matches methodTypeSchema(resTp))
}
- // TODO: see whether this or something similar would work instead.
- //
+ }
+ // TODO: see whether this or something similar would work instead:
// def overriddenSymbol = meth.nextOverriddenSymbol
- // fill in result type and parameter types from overridden symbol if there is a unique one.
- if (clazz.isClass && (tpt.isEmpty || mexists(vparamss)(_.tpt.isEmpty))) {
- // try to complete from matching definition in base type
- mforeach(vparamss)(v => if (v.tpt.isEmpty) v.symbol setInfo WildcardType)
- val overridden = overriddenSymbol
- if (overridden != NoSymbol && !overridden.isOverloaded) {
- overridden.cookJavaRawInfo() // #3404 xform java rawtypes into existentials
- resultPt = site.memberType(overridden) match {
- case PolyType(tparams, rt) => rt.substSym(tparams, tparamSyms)
- case mt => mt
- }
+ /**
+ * If `meth` doesn't have an explicit return type, extracts the return type from the method
+ * overridden by `meth` (if there's an unique one). This type is lateron used as the expected
+ * type for computing the type of the rhs. The resulting type references type skolems for
+ * type parameters (consistent with the result of `typer.typedType(tpt).tpe`).
+ *
+ * As a first side effect, this method assigns a MethodType constructed using this
+ * return type to `meth`. This allows omitting the result type for recursive methods.
+ *
+ * As another side effect, this method also assigns paramter types from the overridden
+ * method to parameters of `meth` that have missing types (the parser accepts missing
+ * parameter types under -Yinfer-argument-types).
+ */
+ def typesFromOverridden(methResTp: Type): Type = {
+ val overridden = overriddenSymbol(methResTp)
+ if (overridden == NoSymbol || overridden.isOverloaded) {
+ methResTp
+ } else {
+ overridden.cookJavaRawInfo() // #3404 xform java rawtypes into existentials
+ var overriddenTp = site.memberType(overridden) match {
+ case PolyType(tparams, rt) => rt.substSym(tparams, tparamSkolems)
+ case mt => mt
+ }
for (vparams <- vparamss) {
- var pps = resultPt.params
+ var overriddenParams = overriddenTp.params
for (vparam <- vparams) {
if (vparam.tpt.isEmpty) {
- val paramtpe = pps.head.tpe
- vparam.symbol setInfo paramtpe
- vparam.tpt defineType paramtpe setPos vparam.pos.focus
+ val overriddenParamTp = overriddenParams.head.tpe
+ // references to type parameteres in overriddenParamTp link to the type skolems, so the
+ // assigned type is consistent with the other / existing parameter types in vparamSymss.
+ vparam.symbol setInfo overriddenParamTp
+ vparam.tpt defineType overriddenParamTp setPos vparam.pos.focus
}
- pps = pps.tail
+ overriddenParams = overriddenParams.tail
}
- resultPt = resultPt.resultType
+ overriddenTp = overriddenTp.resultType
}
- resultPt match {
- case NullaryMethodType(rtpe) => resultPt = rtpe
- case MethodType(List(), rtpe) => resultPt = rtpe
+
+ overriddenTp match {
+ case NullaryMethodType(rtpe) => overriddenTp = rtpe
+ case MethodType(List(), rtpe) => overriddenTp = rtpe
case _ =>
}
+
if (tpt.isEmpty) {
// provisionally assign `meth` a method type with inherited result type
// that way, we can leave out the result type even if method is recursive.
- meth setInfo thisMethodType(resultPt)
+ meth setInfo thisMethodType(overriddenTp)
+ overriddenTp
+ } else {
+ methResTp
}
}
}
- // Add a () parameter section if this overrides some method with () parameters.
- if (clazz.isClass && vparamss.isEmpty && overriddenSymbol.alternatives.exists(
- _.info.isInstanceOf[MethodType])) {
+
+ if (tpt.isEmpty && meth.name == nme.CONSTRUCTOR) {
+ tpt defineType context.enclClass.owner.tpe
+ tpt setPos meth.pos.focus
+ }
+
+ val methResTp = if (tpt.isEmpty) WildcardType else typer.typedType(tpt).tpe
+ val resTpFromOverride = if (methOwner.isClass && (tpt.isEmpty || mexists(vparamss)(_.tpt.isEmpty))) {
+ typesFromOverridden(methResTp)
+ } else {
+ methResTp
+ }
+
+ // Add a () parameter section if this overrides some method with () parameters
+ if (methOwner.isClass && vparamss.isEmpty &&
+ overriddenSymbol(methResTp).alternatives.exists(_.info.isInstanceOf[MethodType])) {
vparamSymss = ListOfNil
}
+
+ // issue an error for missing parameter types
mforeach(vparamss) { vparam =>
if (vparam.tpt.isEmpty) {
MissingParameterOrValTypeError(vparam)
vparam.tpt defineType ErrorType
}
}
- addDefaultGetters(meth, vparamss, tparams, overriddenSymbol)
+
+ addDefaultGetters(meth, vparamss, tparams, overriddenSymbol(methResTp))
// fast track macros, i.e. macros defined inside the compiler, are hardcoded
// hence we make use of that and let them have whatever right-hand side they need
// (either "macro ???" as they used to or just "???" to maximally simplify their compilation)
- if (fastTrack contains ddef.symbol) ddef.symbol setFlag MACRO
+ if (fastTrack contains meth) meth setFlag MACRO
// macro defs need to be typechecked in advance
// because @macroImpl annotation only gets assigned during typechecking
// otherwise macro defs wouldn't be able to robustly coexist with their clients
// because a client could be typechecked before a macro def that it uses
- if (ddef.symbol.isTermMacro) {
- val pt = resultPt.substSym(tparamSyms, tparams map (_.symbol))
- typer.computeMacroDefType(ddef, pt)
+ if (meth.isTermMacro) {
+ typer.computeMacroDefType(ddef, resTpFromOverride)
}
- thisMethodType({
+ val res = thisMethodType({
val rt = (
if (!tpt.isEmpty) {
- typer.typedType(tpt).tpe
+ methResTp
} else {
- // replace deSkolemized symbols with skolemized ones
- // (for resultPt computed by looking at overridden symbol, right?)
- val pt = resultPt.substSym(tparamSyms, tparams map (_.symbol))
- assignTypeToTree(ddef, typer, pt)
- }
- )
+ // return type is inferred, we don't just use resTpFromOverride. Here, C.f has type String:
+ // trait T { def f: Object }; class C <: T { def f = "" }
+ // using resTpFromOverride as expected type allows for the following (C.f has type A):
+ // trait T { def f: A }; class C <: T { implicit def b2a(t: B): A = ???; def f = new B }
+ assignTypeToTree(ddef, typer, resTpFromOverride)
+ })
// #2382: return type of default getters are always @uncheckedVariance
if (meth.hasDefault)
rt.withAnnotation(AnnotationInfo(uncheckedVarianceClass.tpe, List(), List()))
else rt
})
+ pluginsTypeSig(res, typer, ddef, methResTp)
}
/**
@@ -1060,9 +1169,9 @@ trait Namers extends MethodSynthesis {
* flag.
*/
private def addDefaultGetters(meth: Symbol, vparamss: List[List[ValDef]], tparams: List[TypeDef], overriddenSymbol: => Symbol) {
- val clazz = meth.owner
+ val methOwner = meth.owner
val isConstr = meth.isConstructor
- val overridden = if (isConstr || !clazz.isClass) NoSymbol else overriddenSymbol
+ val overridden = if (isConstr || !methOwner.isClass) NoSymbol else overriddenSymbol
val overrides = overridden != NoSymbol && !overridden.isOverloaded
// value parameters of the base class (whose defaults might be overridden)
var baseParamss = (vparamss, overridden.tpe.paramss) match {
@@ -1112,7 +1221,7 @@ trait Namers extends MethodSynthesis {
val parentNamer = if (isConstr) {
val (cdef, nmr) = moduleNamer.getOrElse {
- val module = companionSymbolOf(clazz, context)
+ val module = companionSymbolOf(methOwner, context)
module.initialize // call type completer (typedTemplate), adds the
// module's templateNamer to classAndNamerOfModule
module.attachments.get[ConstructorDefaultsAttachment] match {
@@ -1158,7 +1267,7 @@ trait Namers extends MethodSynthesis {
name, deftParams, defvParamss, defTpt, defRhs)
}
if (!isConstr)
- clazz.resetFlag(INTERFACE) // there's a concrete member now
+ methOwner.resetFlag(INTERFACE) // there's a concrete member now
val default = parentNamer.enterSyntheticSym(defaultTree)
if (forInteractive && default.owner.isTerm) {
// save the default getters as attachments in the method symbol. if compiling the
@@ -1183,15 +1292,31 @@ trait Namers extends MethodSynthesis {
}
}
+ private def valDefSig(vdef: ValDef) = {
+ val ValDef(_, _, tpt, rhs) = vdef
+ val result = if (tpt.isEmpty) {
+ if (rhs.isEmpty) {
+ MissingParameterOrValTypeError(tpt)
+ ErrorType
+ }
+ else assignTypeToTree(vdef, typer, WildcardType)
+ } else {
+ typer.typedType(tpt).tpe
+ }
+ pluginsTypeSig(result, typer, vdef, if (tpt.isEmpty) WildcardType else result)
+
+ }
+
//@M! an abstract type definition (abstract type member/type parameter)
// may take type parameters, which are in scope in its bounds
- private def typeDefSig(tpsym: Symbol, tparams: List[TypeDef], rhs: Tree) = {
+ private def typeDefSig(tdef: TypeDef) = {
+ val TypeDef(_, _, tparams, rhs) = tdef
// log("typeDefSig(" + tpsym + ", " + tparams + ")")
val tparamSyms = typer.reenterTypeParams(tparams) //@M make tparams available in scope (just for this abstypedef)
val tp = typer.typedType(rhs).tpe match {
case TypeBounds(lt, rt) if (lt.isError || rt.isError) =>
TypeBounds.empty
- case tp @ TypeBounds(lt, rt) if (tpsym hasFlag JAVA) =>
+ case tp @ TypeBounds(lt, rt) if (tdef.symbol hasFlag JAVA) =>
TypeBounds(lt, objToAny(rt))
case tp =>
tp
@@ -1213,9 +1338,32 @@ trait Namers extends MethodSynthesis {
// However, separate compilation requires the symbol info to be
// loaded to do this check, but loading the info will probably
// lead to spurious cyclic errors. So omit the check.
- GenPolyType(tparamSyms, tp)
+ val res = GenPolyType(tparamSyms, tp)
+ pluginsTypeSig(res, typer, tdef, WildcardType)
}
+ private def importSig(imp: Import) = {
+ val Import(expr, selectors) = imp
+ val expr1 = typer.typedQualifier(expr)
+ typer checkStable expr1
+ if (expr1.symbol != null && expr1.symbol.isRootPackage)
+ RootImportError(imp)
+
+ if (expr1.isErrorTyped)
+ ErrorType
+ else {
+ val newImport = treeCopy.Import(imp, expr1, selectors).asInstanceOf[Import]
+ checkSelectors(newImport)
+ transformed(imp) = newImport
+ // copy symbol and type attributes back into old expression
+ // so that the structure builder will find it.
+ expr.symbol = expr1.symbol
+ expr.tpe = expr1.tpe
+ ImportType(expr1)
+ }
+ }
+
+
/** Given a case class
* case class C[Ts] (ps: Us)
* Add the following methods to toScope:
@@ -1239,6 +1387,11 @@ trait Namers extends MethodSynthesis {
caseClassCopyMeth(cdef) foreach namer.enterSyntheticSym
}
+ /**
+ * TypeSig is invoked by monoTypeCompleters. It returns the type of a definition which
+ * is then assigned to the corresponding symbol (typeSig itself does not need to assign
+ * the type to the symbol, but it can if necessary).
+ */
def typeSig(tree: Tree): Type = {
// log("typeSig " + tree)
/** For definitions, transform Annotation trees to AnnotationInfos, assign
@@ -1271,84 +1424,33 @@ trait Namers extends MethodSynthesis {
}
val sym: Symbol = tree.symbol
- // @Lukas: I am not sure this is the right way to do things.
- // We used to only decorate the module class with annotations, which is
- // clearly wrong. Now we decorate both the class and the object.
- // But maybe some annotations are only meant for one of these but not for the other?
- //
- // TODO: meta-annotations to indicate class vs. object.
+
+ // TODO: meta-annotations to indicate where module annotations should go (module vs moduleClass)
annotate(sym)
if (sym.isModule) annotate(sym.moduleClass)
def getSig = tree match {
- case cdef @ ClassDef(_, name, tparams, impl) =>
- val clazz = tree.symbol
- val result = createNamer(tree).classSig(tparams, impl)
- clazz setInfo result
- if (clazz.isDerivedValueClass) {
- log("Ensuring companion for derived value class " + name + " at " + cdef.pos.show)
- clazz setFlag FINAL
- // Don't force the owner's info lest we create cycles as in SI-6357.
- enclosingNamerWithScope(clazz.owner.rawInfo.decls).ensureCompanionObject(cdef)
- }
- result
-
- case ModuleDef(_, _, impl) =>
- val clazz = sym.moduleClass
- clazz setInfo createNamer(tree).templateSig(impl)
- clazz.tpe
-
- case ddef @ DefDef(mods, _, tparams, vparamss, tpt, rhs) =>
- // TODO: cleanup parameter list
- createNamer(tree).methodSig(ddef, mods, tparams, vparamss, tpt, rhs)
-
- case vdef @ ValDef(mods, name, tpt, rhs) =>
- val isBeforeSupercall = (
- (sym hasFlag PARAM | PRESUPER)
- && !mods.isJavaDefined
- && sym.owner.isConstructor
- )
- val typer1 = typer.constrTyperIf(isBeforeSupercall)
- if (tpt.isEmpty) {
- if (rhs.isEmpty) {
- MissingParameterOrValTypeError(tpt)
- ErrorType
- }
- else assignTypeToTree(vdef, newTyper(typer1.context.make(vdef, sym)), WildcardType)
- }
- else typer1.typedType(tpt).tpe
-
- case TypeDef(_, _, tparams, rhs) =>
- createNamer(tree).typeDefSig(sym, tparams, rhs) //@M!
-
- case Import(expr, selectors) =>
- val expr1 = typer.typedQualifier(expr)
- typer checkStable expr1
- if (expr1.symbol != null && expr1.symbol.isRootPackage)
- RootImportError(tree)
-
- if (expr1.isErrorTyped)
- ErrorType
- else {
- val newImport = treeCopy.Import(tree, expr1, selectors).asInstanceOf[Import]
- checkSelectors(newImport)
- transformed(tree) = newImport
- // copy symbol and type attributes back into old expression
- // so that the structure builder will find it.
- expr.symbol = expr1.symbol
- expr.tpe = expr1.tpe
- ImportType(expr1)
- }
- }
+ case cdef: ClassDef =>
+ createNamer(tree).classSig(cdef)
+
+ case mdef: ModuleDef =>
+ createNamer(tree).moduleSig(mdef)
- val result =
- try getSig
- catch typeErrorHandler(tree, ErrorType)
+ case ddef: DefDef =>
+ createNamer(tree).methodSig(ddef)
- result match {
- case PolyType(tparams @ (tp :: _), _) if tp.owner.isTerm => deskolemizeTypeParams(tparams)(result)
- case _ => result
+ case vdef: ValDef =>
+ createNamer(tree).valDefSig(vdef)
+
+ case tdef: TypeDef =>
+ createNamer(tree).typeDefSig(tdef) //@M!
+
+ case imp: Import =>
+ importSig(imp)
}
+
+ try getSig
+ catch typeErrorHandler(tree, ErrorType)
}
def includeParent(tpe: Type, parent: Symbol): Type = tpe match {
@@ -1508,14 +1610,25 @@ trait Namers extends MethodSynthesis {
}
}
- /** A class representing a lazy type with known type parameters.
+ /**
+ * A class representing a lazy type with known type parameters. `ctx` is the namer context in which the
+ * `owner` is defined.
+ *
+ * Constructing a PolyTypeCompleter for a DefDef creates type skolems for the type parameters and
+ * assigns them to the `tparams` trees.
*/
- class PolyTypeCompleter(tparams: List[TypeDef], restp: TypeCompleter, owner: Tree, ctx: Context) extends LockingTypeCompleter with FlagAgnosticCompleter {
- private val ownerSym = owner.symbol
- override val typeParams = tparams map (_.symbol) //@M
- override val tree = restp.tree
+ class PolyTypeCompleter(tparams: List[TypeDef], restp: TypeCompleter, ctx: Context) extends LockingTypeCompleter with FlagAgnosticCompleter {
+ // @M. If `owner` is an abstract type member, `typeParams` are all NoSymbol (see comment in `completerOf`),
+ // otherwise, the non-skolemized (external) type parameter symbols
+ override val typeParams = tparams map (_.symbol)
+
+ /* The definition tree (poly ClassDef, poly DefDef or HK TypeDef) */
+ override val tree = restp.tree
+
+ private val defnSym = tree.symbol
- if (ownerSym.isTerm) {
+ if (defnSym.isTerm) {
+ // for polymorphic DefDefs, create type skolems and assign them to the tparam trees.
val skolems = deriveFreshSkolems(tparams map (_.symbol))
map2(tparams, skolems)(_ setSymbol _)
}
@@ -1523,8 +1636,8 @@ trait Namers extends MethodSynthesis {
def completeImpl(sym: Symbol) = {
// @M an abstract type's type parameters are entered.
// TODO: change to isTypeMember ?
- if (ownerSym.isAbstractType)
- newNamerFor(ctx, owner) enterSyms tparams //@M
+ if (defnSym.isAbstractType)
+ newNamerFor(ctx, tree) enterSyms tparams //@M
restp complete sym
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
index be218fcb02..2340c78f8c 100644
--- a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
@@ -268,26 +268,32 @@ trait NamesDefaults { self: Analyzer =>
*
* For by-name parameters, create a value
* x$n: () => T = () => arg
+ *
+ * For Ident(<unapply-selector>) arguments, no ValDef is created (SI-3353).
*/
- def argValDefs(args: List[Tree], paramTypes: List[Type], blockTyper: Typer): List[ValDef] = {
+ def argValDefs(args: List[Tree], paramTypes: List[Type], blockTyper: Typer): List[Option[ValDef]] = {
val context = blockTyper.context
- val symPs = map2(args, paramTypes)((arg, tpe) => {
- val byName = isByNameParamType(tpe)
- val repeated = isScalaRepeatedParamType(tpe)
- val argTpe = (
- if (repeated) arg match {
- case Typed(expr, Ident(tpnme.WILDCARD_STAR)) => expr.tpe
- case _ => seqType(arg.tpe)
- }
- else arg.tpe
- ).widen // have to widen or types inferred from literal defaults will be singletons
- val s = context.owner.newValue(unit.freshTermName("x$"), arg.pos) setInfo (
- if (byName) functionType(Nil, argTpe) else argTpe
- )
- (context.scope.enter(s), byName, repeated)
+ val symPs = map2(args, paramTypes)((arg, tpe) => arg match {
+ case Ident(nme.SELECTOR_DUMMY) =>
+ None // don't create a local ValDef if the argument is <unapply-selector>
+ case _ =>
+ val byName = isByNameParamType(tpe)
+ val repeated = isScalaRepeatedParamType(tpe)
+ val argTpe = (
+ if (repeated) arg match {
+ case Typed(expr, Ident(tpnme.WILDCARD_STAR)) => expr.tpe
+ case _ => seqType(arg.tpe)
+ }
+ else arg.tpe
+ ).widen // have to widen or types inferred from literal defaults will be singletons
+ val s = context.owner.newValue(unit.freshTermName("x$"), arg.pos) setInfo (
+ if (byName) functionType(Nil, argTpe) else argTpe
+ )
+ Some((context.scope.enter(s), byName, repeated))
})
map2(symPs, args) {
- case ((sym, byName, repeated), arg) =>
+ case (None, _) => None
+ case (Some((sym, byName, repeated)), arg) =>
val body =
if (byName) {
val res = blockTyper.typed(Function(List(), arg))
@@ -303,7 +309,7 @@ trait NamesDefaults { self: Analyzer =>
blockTyper.typed(Apply(factory, List(resetLocalAttrs(arg))))
} else arg
}
- atPos(body.pos)(ValDef(sym, body).setType(NoType))
+ Some(atPos(body.pos)(ValDef(sym, body).setType(NoType)))
}
}
@@ -329,27 +335,29 @@ trait NamesDefaults { self: Analyzer =>
// ValDef's in the block), change the arguments to these local values.
case Apply(expr, typedArgs) =>
// typedArgs: definition-site order
- val formals = formalTypes(expr.tpe.paramTypes, typedArgs.length, false, false)
+ val formals = formalTypes(expr.tpe.paramTypes, typedArgs.length, removeByName = false, removeRepeated = false)
// valDefs: call-site order
val valDefs = argValDefs(reorderArgsInv(typedArgs, argPos),
reorderArgsInv(formals, argPos),
blockTyper)
// refArgs: definition-site order again
- val refArgs = map2(reorderArgs(valDefs, argPos), formals)((vDef, tpe) => {
- val ref = gen.mkAttributedRef(vDef.symbol)
- atPos(vDef.pos.focus) {
- // for by-name parameters, the local value is a nullary function returning the argument
- tpe.typeSymbol match {
- case ByNameParamClass => Apply(ref, Nil)
- case RepeatedParamClass => Typed(ref, Ident(tpnme.WILDCARD_STAR))
- case _ => ref
+ val refArgs = map3(reorderArgs(valDefs, argPos), formals, typedArgs)((vDefOpt, tpe, origArg) => vDefOpt match {
+ case None => origArg
+ case Some(vDef) =>
+ val ref = gen.mkAttributedRef(vDef.symbol)
+ atPos(vDef.pos.focus) {
+ // for by-name parameters, the local value is a nullary function returning the argument
+ tpe.typeSymbol match {
+ case ByNameParamClass => Apply(ref, Nil)
+ case RepeatedParamClass => Typed(ref, Ident(tpnme.WILDCARD_STAR))
+ case _ => ref
+ }
}
- }
})
// cannot call blockTyper.typedBlock here, because the method expr might be partially applied only
val res = blockTyper.doTypedApply(tree, expr, refArgs, mode, pt)
res.setPos(res.pos.makeTransparent)
- val block = Block(stats ::: valDefs, res).setType(res.tpe).setPos(tree.pos.makeTransparent)
+ val block = Block(stats ::: valDefs.flatten, res).setType(res.tpe).setPos(tree.pos.makeTransparent)
context.namedApplyBlockInfo =
Some((block, NamedApplyInfo(qual, targs, vargss :+ refArgs, blockTyper)))
block
diff --git a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala
index 69bbab6e42..f7579ad249 100644
--- a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala
@@ -109,9 +109,6 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
import definitions._
import analyzer._ //Typer
-
- case class DefaultOverrideMatchAttachment(default: Tree)
-
object vpmName {
val one = newTermName("one")
val drop = newTermName("drop")
@@ -222,11 +219,11 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// However this is a pain (at least the way I'm going about it)
// and I have to think these detailed errors are primarily useful
// for beginners, not people writing nested pattern matches.
- def checkMatchVariablePatterns(m: Match) {
+ def checkMatchVariablePatterns(cases: List[CaseDef]) {
// A string describing the first variable pattern
var vpat: String = null
// Using an iterator so we can recognize the last case
- val it = m.cases.iterator
+ val it = cases.iterator
def addendum(pat: Tree) = {
matchingSymbolInScope(pat) match {
@@ -269,7 +266,15 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
*/
def translateMatch(match_ : Match): Tree = {
val Match(selector, cases) = match_
- checkMatchVariablePatterns(match_)
+
+ val (nonSyntheticCases, defaultOverride) = cases match {
+ case init :+ last if treeInfo isSyntheticDefaultCase last =>
+ (init, Some(((scrut: Tree) => last.body)))
+ case _ =>
+ (cases, None)
+ }
+
+ checkMatchVariablePatterns(nonSyntheticCases)
// we don't transform after uncurry
// (that would require more sophistication when generating trees,
@@ -296,14 +301,10 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// val packedPt = repeatedToSeq(typer.packedType(match_, context.owner))
- // the alternative to attaching the default case override would be to simply
- // append the default to the list of cases and suppress the unreachable case error that may arise (once we detect that...)
- val matchFailGenOverride = match_.attachments.get[DefaultOverrideMatchAttachment].map{case DefaultOverrideMatchAttachment(default) => ((scrut: Tree) => default)}
-
val selectorSym = freshSym(selector.pos, pureType(selectorTp)) setFlag treeInfo.SYNTH_CASE_FLAGS
// pt = Any* occurs when compiling test/files/pos/annotDepMethType.scala with -Xexperimental
- val combined = combineCases(selector, selectorSym, cases map translateCase(selectorSym, pt), pt, matchOwner, matchFailGenOverride)
+ val combined = combineCases(selector, selectorSym, nonSyntheticCases map translateCase(selectorSym, pt), pt, matchOwner, defaultOverride)
if (Statistics.canEnable) Statistics.stopTimer(patmatNanos, start)
combined
@@ -409,15 +410,9 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// example check: List[Int] <:< ::[Int]
// TODO: extractor.paramType may contain unbound type params (run/t2800, run/t3530)
- val (typeTestTreeMaker, patBinderOrCasted) =
- if (needsTypeTest(patBinder.info.widen, extractor.paramType)) {
- // chain a type-testing extractor before the actual extractor call
- // it tests the type, checks the outer pointer and casts to the expected type
- // TODO: the outer check is mandated by the spec for case classes, but we do it for user-defined unapplies as well [SPEC]
- // (the prefix of the argument passed to the unapply must equal the prefix of the type of the binder)
- val treeMaker = TypeTestTreeMaker(patBinder, patBinder, extractor.paramType, extractor.paramType)(pos, extractorArgTypeTest = true)
- (List(treeMaker), treeMaker.nextBinder)
- } else {
+ // `patBinderOrCasted` is assigned the result of casting `patBinder` to `extractor.paramType`
+ val (typeTestTreeMaker, patBinderOrCasted, binderKnownNonNull) =
+ if (patBinder.info.widen <:< extractor.paramType) {
// no type test needed, but the tree maker relies on `patBinderOrCasted` having type `extractor.paramType` (and not just some type compatible with it)
// SI-6624 shows this is necessary because apparently patBinder may have an unfortunate type (.decls don't have the case field accessors)
// TODO: get to the bottom of this -- I assume it happens when type checking infers a weird type for an unapply call
@@ -426,10 +421,21 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
if (settings.developer.value && !(patBinder.info =:= extractor.paramType))
devWarning(s"resetting info of $patBinder: ${patBinder.info} to ${extractor.paramType}")
*/
- (Nil, patBinder setInfo extractor.paramType)
+ (Nil, patBinder setInfo extractor.paramType, false)
+ } else {
+ // chain a type-testing extractor before the actual extractor call
+ // it tests the type, checks the outer pointer and casts to the expected type
+ // TODO: the outer check is mandated by the spec for case classes, but we do it for user-defined unapplies as well [SPEC]
+ // (the prefix of the argument passed to the unapply must equal the prefix of the type of the binder)
+ val treeMaker = TypeTestTreeMaker(patBinder, patBinder, extractor.paramType, extractor.paramType)(pos, extractorArgTypeTest = true)
+
+ // check whether typetest implies patBinder is not null,
+ // even though the eventual null check will be on patBinderOrCasted
+ // it'll be equal to patBinder casted to extractor.paramType anyway (and the type test is on patBinder)
+ (List(treeMaker), treeMaker.nextBinder, treeMaker.impliesBinderNonNull(patBinder))
}
- withSubPats(typeTestTreeMaker :+ extractor.treeMaker(patBinderOrCasted, pos), extractor.subBindersAndPatterns: _*)
+ withSubPats(typeTestTreeMaker :+ extractor.treeMaker(patBinderOrCasted, binderKnownNonNull, pos), extractor.subBindersAndPatterns: _*)
}
@@ -622,8 +628,13 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// to which type should the previous binder be casted?
def paramType : Type
- // binder has been casted to paramType if necessary
- def treeMaker(binder: Symbol, pos: Position): TreeMaker
+ /** Create the TreeMaker that embodies this extractor call
+ *
+ * `binder` has been casted to `paramType` if necessary
+ * `binderKnownNonNull` indicates whether the cast implies `binder` cannot be null
+ * when `binderKnownNonNull` is `true`, `ProductExtractorTreeMaker` does not do a (redundant) null check on binder
+ */
+ def treeMaker(binder: Symbol, binderKnownNonNull: Boolean, pos: Position): TreeMaker
// `subPatBinders` are the variables bound by this pattern in the following patterns
// subPatBinders are replaced by references to the relevant part of the extractor's result (tuple component, seq element, the result as-is)
@@ -637,6 +648,11 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
case bp => bp
}
+ // never store these in local variables (for PreserveSubPatBinders)
+ lazy val ignoredSubPatBinders = (subPatBinders zip args).collect{
+ case (b, PatternBoundToUnderscore()) => b
+ }.toSet
+
def subPatTypes: List[Type] =
if(isSeq) {
val TypeRef(pre, SeqClass, args) = seqTp
@@ -731,41 +747,31 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
def isSeq: Boolean = rawSubPatTypes.nonEmpty && isRepeatedParamType(rawSubPatTypes.last)
protected def rawSubPatTypes = constructorTp.paramTypes
- // binder has type paramType
- def treeMaker(binder: Symbol, pos: Position): TreeMaker = {
+ /** Create the TreeMaker that embodies this extractor call
+ *
+ * `binder` has been casted to `paramType` if necessary
+ * `binderKnownNonNull` indicates whether the cast implies `binder` cannot be null
+ * when `binderKnownNonNull` is `true`, `ProductExtractorTreeMaker` does not do a (redundant) null check on binder
+ */
+ def treeMaker(binder: Symbol, binderKnownNonNull: Boolean, pos: Position): TreeMaker = {
val paramAccessors = binder.constrParamAccessors
// binders corresponding to mutable fields should be stored (SI-5158, SI-6070)
+ // make an exception for classes under the scala package as they should be well-behaved,
+ // to optimize matching on List
val mutableBinders =
- if (paramAccessors exists (_.isMutable))
+ if (!binder.info.typeSymbol.hasTransOwner(ScalaPackageClass) &&
+ (paramAccessors exists (_.isMutable)))
subPatBinders.zipWithIndex.collect{ case (binder, idx) if paramAccessors(idx).isMutable => binder }
else Nil
// checks binder ne null before chaining to the next extractor
- ProductExtractorTreeMaker(binder, lengthGuard(binder))(subPatBinders, subPatRefs(binder), mutableBinders)
+ ProductExtractorTreeMaker(binder, lengthGuard(binder))(subPatBinders, subPatRefs(binder), mutableBinders, binderKnownNonNull, ignoredSubPatBinders)
}
// reference the (i-1)th case accessor if it exists, otherwise the (i-1)th tuple component
override protected def tupleSel(binder: Symbol)(i: Int): Tree = { import CODE._
- // caseFieldAccessors is messed up after typers (reversed, names mangled for non-public fields)
- // TODO: figure out why...
val accessors = binder.caseFieldAccessors
- // luckily, the constrParamAccessors are still sorted properly, so sort the field-accessors using them
- // (need to undo name-mangling, including the sneaky trailing whitespace)
- val constrParamAccessors = binder.constrParamAccessors
-
- def indexInCPA(acc: Symbol) =
- constrParamAccessors indexWhere { orig =>
- // patmatDebug("compare: "+ (orig, acc, orig.name, acc.name, (acc.name == orig.name), (acc.name startsWith (orig.name append "$"))))
- val origName = orig.name.toString.trim
- val accName = acc.name.toString.trim
- (accName == origName) || (accName startsWith (origName + "$"))
- }
-
- // patmatDebug("caseFieldAccessors: "+ (accessors, binder.caseFieldAccessors map indexInCPA))
- // patmatDebug("constrParamAccessors: "+ constrParamAccessors)
-
- val accessorsSorted = accessors sortBy indexInCPA
- if (accessorsSorted isDefinedAt (i-1)) REF(binder) DOT accessorsSorted(i-1)
+ if (accessors isDefinedAt (i-1)) REF(binder) DOT accessors(i-1)
else codegen.tupleSel(binder)(i) // this won't type check for case classes, as they do not inherit ProductN
}
@@ -781,11 +787,21 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
def resultType = tpe.finalResultType
def isSeq = extractorCall.symbol.name == nme.unapplySeq
- def treeMaker(patBinderOrCasted: Symbol, pos: Position): TreeMaker = {
+ /** Create the TreeMaker that embodies this extractor call
+ *
+ * `binder` has been casted to `paramType` if necessary
+ * `binderKnownNonNull` is not used in this subclass
+ *
+ * TODO: implement review feedback by @retronym:
+ * Passing the pair of values around suggests:
+ * case class Binder(sym: Symbol, knownNotNull: Boolean).
+ * Perhaps it hasn't reached critical mass, but it would already clean things up a touch.
+ */
+ def treeMaker(patBinderOrCasted: Symbol, binderKnownNonNull: Boolean, pos: Position): TreeMaker = {
// the extractor call (applied to the binder bound by the flatMap corresponding to the previous (i.e., enclosing/outer) pattern)
val extractorApply = atPos(pos)(spliceApply(patBinderOrCasted))
val binder = freshSym(pos, pureType(resultInMonad)) // can't simplify this when subPatBinders.isEmpty, since UnitClass.tpe is definitely wrong when isSeq, and resultInMonad should always be correct since it comes directly from the extractor's result type
- ExtractorTreeMaker(extractorApply, lengthGuard(binder), binder)(subPatBinders, subPatRefs(binder), resultType.typeSymbol == BooleanClass, checkedLength, patBinderOrCasted)
+ ExtractorTreeMaker(extractorApply, lengthGuard(binder), binder)(subPatBinders, subPatRefs(binder), resultType.typeSymbol == BooleanClass, checkedLength, patBinderOrCasted, ignoredSubPatBinders)
}
override protected def seqTree(binder: Symbol): Tree =
@@ -818,7 +834,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
protected lazy val rawSubPatTypes =
if (resultInMonad.typeSymbol eq UnitClass) Nil
- else if(nbSubPats == 1) List(resultInMonad)
+ else if(!isSeq && nbSubPats == 1) List(resultInMonad)
else getProductArgs(resultInMonad) match {
case Nil => List(resultInMonad)
case x => x
@@ -842,6 +858,16 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
}
}
+ object PatternBoundToUnderscore {
+ def unapply(pat: Tree): Boolean = pat match {
+ case Bind(nme.WILDCARD, _) => true // don't skip when binding an interesting symbol!
+ case Ident(nme.WILDCARD) => true
+ case Alternative(ps) => ps forall (PatternBoundToUnderscore.unapply(_))
+ case Typed(PatternBoundToUnderscore(), _) => true
+ case _ => false
+ }
+ }
+
object Bound {
def unapply(t: Tree): Option[(Symbol, Tree)] = t match {
case t@Bind(n, p) if (t.symbol ne null) && (t.symbol ne NoSymbol) => // pos/t2429 does not satisfy these conditions
@@ -1009,10 +1035,17 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
trait PreserveSubPatBinders extends TreeMaker {
val subPatBinders: List[Symbol]
val subPatRefs: List[Tree]
+ val ignoredSubPatBinders: Set[Symbol]
// unless `debugInfoEmitVars`, this set should contain the bare minimum for correctness
// mutable case class fields need to be stored regardless (SI-5158, SI-6070) -- see override in ProductExtractorTreeMaker
- def storedBinders: Set[Symbol] = if (debugInfoEmitVars) subPatBinders.toSet else Set.empty
+ // sub patterns bound to wildcard (_) are never stored as they can't be referenced
+ // dirty debuggers will have to get dirty to see the wildcards
+ lazy val storedBinders: Set[Symbol] =
+ (if (debugInfoEmitVars) subPatBinders.toSet else Set.empty) ++ extraStoredBinders -- ignoredSubPatBinders
+
+ // e.g., mutable fields of a case class in ProductExtractorTreeMaker
+ def extraStoredBinders: Set[Symbol]
def emitVars = storedBinders.nonEmpty
@@ -1033,10 +1066,22 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
Substitution(subPatBinders, subPatRefs) >> super.subPatternsAsSubstitution
import CODE._
- def bindSubPats(in: Tree): Tree = if (!emitVars) in
+ def bindSubPats(in: Tree): Tree =
+ if (!emitVars) in
else {
- val (subPatBindersStored, subPatRefsStored) = stored.unzip
- Block(map2(subPatBindersStored.toList, subPatRefsStored.toList)(VAL(_) === _), in)
+ // binders in `subPatBindersStored` that are referenced by tree `in`
+ val usedBinders = new collection.mutable.HashSet[Symbol]()
+ // all potentially stored subpat binders
+ val potentiallyStoredBinders = stored.unzip._1.toSet
+ // compute intersection of all symbols in the tree `in` and all potentially stored subpat binders
+ in.foreach(t => if (potentiallyStoredBinders(t.symbol)) usedBinders += t.symbol)
+
+ if (usedBinders.isEmpty) in
+ else {
+ // only store binders actually used
+ val (subPatBindersStored, subPatRefsStored) = stored.filter{case (b, _) => usedBinders(b)}.unzip
+ Block(map2(subPatBindersStored.toList, subPatRefsStored.toList)(VAL(_) === _), in)
+ }
}
}
@@ -1056,7 +1101,11 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
val subPatRefs: List[Tree],
extractorReturnsBoolean: Boolean,
val checkedLength: Option[Int],
- val prevBinder: Symbol) extends FunTreeMaker with PreserveSubPatBinders {
+ val prevBinder: Symbol,
+ val ignoredSubPatBinders: Set[Symbol]
+ ) extends FunTreeMaker with PreserveSubPatBinders {
+
+ def extraStoredBinders: Set[Symbol] = Set()
def chainBefore(next: Tree)(casegen: Casegen): Tree = {
val condAndNext = extraCond match {
@@ -1099,27 +1148,35 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
case class ProductExtractorTreeMaker(prevBinder: Symbol, extraCond: Option[Tree])(
val subPatBinders: List[Symbol],
val subPatRefs: List[Tree],
- val mutableBinders: List[Symbol]) extends FunTreeMaker with PreserveSubPatBinders {
+ val mutableBinders: List[Symbol],
+ binderKnownNonNull: Boolean,
+ val ignoredSubPatBinders: Set[Symbol]
+ ) extends FunTreeMaker with PreserveSubPatBinders {
import CODE._
val nextBinder = prevBinder // just passing through
// mutable binders must be stored to avoid unsoundness or seeing mutation of fields after matching (SI-5158, SI-6070)
- // (the implementation could be optimized by duplicating code from `super.storedBinders`, but this seems more elegant)
- override def storedBinders: Set[Symbol] = super.storedBinders ++ mutableBinders.toSet
+ def extraStoredBinders: Set[Symbol] = mutableBinders.toSet
def chainBefore(next: Tree)(casegen: Casegen): Tree = {
val nullCheck = REF(prevBinder) OBJ_NE NULL
- val cond = extraCond map (nullCheck AND _) getOrElse nullCheck
- casegen.ifThenElseZero(cond, bindSubPats(substitution(next)))
+ val cond =
+ if (binderKnownNonNull) extraCond
+ else (extraCond map (nullCheck AND _)
+ orElse Some(nullCheck))
+
+ cond match {
+ case Some(cond) =>
+ casegen.ifThenElseZero(cond, bindSubPats(substitution(next)))
+ case _ =>
+ bindSubPats(substitution(next))
+ }
}
override def toString = "P"+(prevBinder.name, extraCond getOrElse "", localSubstitution)
}
- // typetag-based tests are inserted by the type checker
- def needsTypeTest(tp: Type, pt: Type): Boolean = !(tp <:< pt)
-
object TypeTestTreeMaker {
// factored out so that we can consistently generate other representations of the tree that implements the test
// (e.g. propositions for exhaustivity and friends, boolean for isPureTypeTest)
@@ -1133,12 +1190,14 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
def equalsTest(pat: Tree, testedBinder: Symbol): Result
def eqTest(pat: Tree, testedBinder: Symbol): Result
def and(a: Result, b: Result): Result
+ def tru: Result
}
object treeCondStrategy extends TypeTestCondStrategy { import CODE._
type Result = Tree
def and(a: Result, b: Result): Result = a AND b
+ def tru = TRUE_typed
def typeTest(testedBinder: Symbol, expectedTp: Type) = codegen._isInstanceOf(testedBinder, expectedTp)
def nonNullTest(testedBinder: Symbol) = REF(testedBinder) OBJ_NE NULL
def equalsTest(pat: Tree, testedBinder: Symbol) = codegen._equals(pat, testedBinder)
@@ -1169,6 +1228,19 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
def equalsTest(pat: Tree, testedBinder: Symbol): Result = false
def eqTest(pat: Tree, testedBinder: Symbol): Result = false
def and(a: Result, b: Result): Result = false // we don't and type tests, so the conjunction must include at least one false
+ def tru = true
+ }
+
+ def nonNullImpliedByTestChecker(binder: Symbol) = new TypeTestCondStrategy {
+ type Result = Boolean
+
+ def typeTest(testedBinder: Symbol, expectedTp: Type): Result = testedBinder eq binder
+ def outerTest(testedBinder: Symbol, expectedTp: Type): Result = false
+ def nonNullTest(testedBinder: Symbol): Result = testedBinder eq binder
+ def equalsTest(pat: Tree, testedBinder: Symbol): Result = false // could in principle analyse pat and see if it's statically known to be non-null
+ def eqTest(pat: Tree, testedBinder: Symbol): Result = false // could in principle analyse pat and see if it's statically known to be non-null
+ def and(a: Result, b: Result): Result = a || b
+ def tru = false
}
}
@@ -1238,10 +1310,16 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// I think it's okay:
// - the isInstanceOf test includes a test for the element type
// - Scala's arrays are invariant (so we don't drop type tests unsoundly)
- case _ if (expectedTp <:< AnyRefClass.tpe) && !needsTypeTest(testedBinder.info.widen, expectedTp) =>
- // do non-null check first to ensure we won't select outer on null
- if (outerTestNeeded) and(nonNullTest(testedBinder), outerTest(testedBinder, expectedTp))
- else nonNullTest(testedBinder)
+ case _ if testedBinder.info.widen <:< expectedTp =>
+ // if the expected type is a primitive value type, it cannot be null and it cannot have an outer pointer
+ // since the types conform, no further checking is required
+ if (expectedTp.typeSymbol.isPrimitiveValueClass) tru
+ // have to test outer and non-null only when it's a reference type
+ else if (expectedTp <:< AnyRefClass.tpe) {
+ // do non-null check first to ensure we won't select outer on null
+ if (outerTestNeeded) and(nonNullTest(testedBinder), outerTest(testedBinder, expectedTp))
+ else nonNullTest(testedBinder)
+ } else default
case _ => default
}
@@ -1253,6 +1331,8 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// is this purely a type test, e.g. no outer check, no equality tests (used in switch emission)
def isPureTypeTest = renderCondition(pureTypeTestChecker)
+ def impliesBinderNonNull(binder: Symbol) = renderCondition(nonNullImpliedByTestChecker(binder))
+
override def toString = "TT"+(expectedTp, testedBinder.name, nextBinderTp)
}
@@ -1751,6 +1831,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
def nonNullTest(testedBinder: Symbol) = NonNullCond(binderToUniqueTree(testedBinder))
def equalsTest(pat: Tree, testedBinder: Symbol) = EqualityCond(binderToUniqueTree(testedBinder), unique(pat))
def eqTest(pat: Tree, testedBinder: Symbol) = EqualityCond(binderToUniqueTree(testedBinder), unique(pat)) // TODO: eq, not ==
+ def tru = TrueCond
}
ttm.renderCondition(condStrategy)
case EqualityTestTreeMaker(prevBinder, patTree, _) => EqualityCond(binderToUniqueTree(prevBinder), unique(patTree))
@@ -1897,17 +1978,24 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
case object False extends Prop
// symbols are propositions
- case class Sym(val variable: Var, val const: Const) extends Prop {
- private[this] val id = nextSymId
+ abstract case class Sym(val variable: Var, val const: Const) extends Prop {
+ private[this] val id = Sym.nextSymId
+
override def toString = variable +"="+ const +"#"+ id
}
- private def nextSymId = {_symId += 1; _symId}; private var _symId = 0
-
+ class UniqueSym(variable: Var, const: Const) extends Sym(variable, const)
+ object Sym {
+ private val uniques: util.HashSet[Sym] = new util.HashSet("uniques", 512)
+ def apply(variable: Var, const: Const): Sym = {
+ val newSym = new UniqueSym(variable, const)
+ (uniques findEntryOrUpdate newSym)
+ }
+ private def nextSymId = {_symId += 1; _symId}; private var _symId = 0
+ }
def /\(props: Iterable[Prop]) = if (props.isEmpty) True else props.reduceLeft(And(_, _))
def \/(props: Iterable[Prop]) = if (props.isEmpty) False else props.reduceLeft(Or(_, _))
-
trait PropTraverser {
def apply(x: Prop): Unit = x match {
case And(a, b) => apply(a); apply(b)
@@ -2063,6 +2151,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
import scala.collection.mutable.ArrayBuffer
type FormulaBuilder = ArrayBuffer[Clause]
def formulaBuilder = ArrayBuffer[Clause]()
+ def formulaBuilderSized(init: Int) = new ArrayBuffer[Clause](init)
def addFormula(buff: FormulaBuilder, f: Formula): Unit = buff ++= f
def toFormula(buff: FormulaBuilder): Formula = buff
@@ -2167,7 +2256,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
class Lit(val sym: Sym, val pos: Boolean) {
override def toString = if (!pos) "-"+ sym.toString else sym.toString
override def equals(o: Any) = o match {
- case o: Lit => (o.sym == sym) && (o.pos == pos)
+ case o: Lit => (o.sym eq sym) && (o.pos == pos)
case _ => false
}
override def hashCode = sym.hashCode + pos.hashCode
@@ -2216,13 +2305,18 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
}
private def withLit(res: Model, l: Lit): Model = if (res eq NoModel) NoModel else res + (l.sym -> l.pos)
- private def dropUnit(f: Formula, unitLit: Lit) = {
+ private def dropUnit(f: Formula, unitLit: Lit): Formula = {
val negated = -unitLit
// drop entire clauses that are trivially true
// (i.e., disjunctions that contain the literal we're making true in the returned model),
// and simplify clauses by dropping the negation of the literal we're making true
// (since False \/ X == X)
- f.filterNot(_.contains(unitLit)).map(_ - negated)
+ val dropped = formulaBuilderSized(f.size)
+ for {
+ clause <- f
+ if !(clause contains unitLit)
+ } dropped += (clause - negated)
+ dropped
}
def findModelFor(f: Formula): Model = {
@@ -2725,7 +2819,9 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// compare to the fully known type `tp` (modulo abstract types),
// so that we can rule out stuff like: sealed trait X[T]; class XInt extends X[Int] --> XInt not valid when enumerating X[String]
// however, must approximate abstract types in
- val subTp = appliedType(pre.memberType(sym), sym.typeParams.map(_ => WildcardType))
+
+ val memberType = nestedMemberType(sym, pre, tpApprox.typeSymbol.owner)
+ val subTp = appliedType(memberType, sym.typeParams.map(_ => WildcardType))
val subTpApprox = typer.infer.approximateAbstracts(subTp) // TODO: needed?
// patmatDebug("subtp"+(subTpApprox <:< tpApprox, subTpApprox, tpApprox))
if (subTpApprox <:< tpApprox) Some(checkableType(subTp))
@@ -3699,11 +3795,17 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// nextBinder: T
// next == MatchMonad[U]
// returns MatchMonad[U]
- def flatMapCond(cond: Tree, res: Tree, nextBinder: Symbol, next: Tree): Tree =
- ifThenElseZero(cond, BLOCK(
- VAL(nextBinder) === res,
- next
- ))
+ def flatMapCond(cond: Tree, res: Tree, nextBinder: Symbol, next: Tree): Tree = {
+ val rest =
+ // only emit a local val for `nextBinder` if it's actually referenced in `next`
+ if (next.exists(_.symbol eq nextBinder))
+ BLOCK(
+ VAL(nextBinder) === res,
+ next
+ )
+ else next
+ ifThenElseZero(cond, rest)
+ }
// guardTree: Boolean
// next: MatchMonad[T]
diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
index 969bb8aceb..b9fdd7280e 100644
--- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
@@ -11,6 +11,9 @@ import scala.collection.{ mutable, immutable }
import transform.InfoTransform
import scala.collection.mutable.ListBuffer
import scala.language.postfixOps
+import scala.tools.nsc.settings.ScalaVersion
+import scala.tools.nsc.settings.AnyScalaVersion
+import scala.tools.nsc.settings.NoScalaVersion
/** <p>
* Post-attribution checking and transformation.
@@ -60,23 +63,8 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
super.transformInfo(sym, tp)
}
- val toJavaRepeatedParam = new TypeMap {
- def apply(tp: Type) = tp match {
- case TypeRef(pre, RepeatedParamClass, args) =>
- typeRef(pre, JavaRepeatedParamClass, args)
- case _ =>
- mapOver(tp)
- }
- }
-
- val toScalaRepeatedParam = new TypeMap {
- def apply(tp: Type): Type = tp match {
- case TypeRef(pre, JavaRepeatedParamClass, args) =>
- typeRef(pre, RepeatedParamClass, args)
- case _ =>
- mapOver(tp)
- }
- }
+ val toJavaRepeatedParam = new SubstSymMap(RepeatedParamClass -> JavaRepeatedParamClass)
+ val toScalaRepeatedParam = new SubstSymMap(JavaRepeatedParamClass -> RepeatedParamClass)
def accessFlagsToString(sym: Symbol) = flagsToString(
sym getFlag (PRIVATE | PROTECTED),
@@ -156,27 +144,22 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
// Override checking ------------------------------------------------------------
- def isJavaVarargsAncestor(clazz: Symbol) = (
- clazz.isClass
- && clazz.isJavaDefined
- && (clazz.info.nonPrivateDecls exists isJavaVarArgsMethod)
- )
-
/** Add bridges for vararg methods that extend Java vararg methods
*/
def addVarargBridges(clazz: Symbol): List[Tree] = {
// This is quite expensive, so attempt to skip it completely.
// Insist there at least be a java-defined ancestor which
// defines a varargs method. TODO: Find a cheaper way to exclude.
- if (clazz.thisType.baseClasses exists isJavaVarargsAncestor) {
+ if (inheritsJavaVarArgsMethod(clazz)) {
log("Found java varargs ancestor in " + clazz.fullLocationString + ".")
val self = clazz.thisType
val bridges = new ListBuffer[Tree]
def varargBridge(member: Symbol, bridgetpe: Type): Tree = {
- log("Generating varargs bridge for " + member.fullLocationString + " of type " + bridgetpe)
+ log(s"Generating varargs bridge for ${member.fullLocationString} of type $bridgetpe")
- val bridge = member.cloneSymbolImpl(clazz, member.flags | VBRIDGE) setPos clazz.pos
+ val newFlags = (member.flags | VBRIDGE | ARTIFACT) & ~PRIVATE
+ val bridge = member.cloneSymbolImpl(clazz, newFlags) setPos clazz.pos
bridge.setInfo(bridgetpe.cloneInfo(bridge))
clazz.info.decls enter bridge
@@ -189,26 +172,35 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
localTyper typed DefDef(bridge, body)
}
- // For all concrete non-private members that have a (Scala) repeated parameter:
- // compute the corresponding method type `jtpe` with a Java repeated parameter
+ // For all concrete non-private members (but: see below) that have a (Scala) repeated
+ // parameter: compute the corresponding method type `jtpe` with a Java repeated parameter
// if a method with type `jtpe` exists and that method is not a varargs bridge
// then create a varargs bridge of type `jtpe` that forwards to the
// member method with the Scala vararg type.
- for (member <- clazz.info.nonPrivateMembers) {
+ //
+ // @PP: Can't call nonPrivateMembers because we will miss refinement members,
+ // which have been marked private. See SI-4729.
+ for (member <- nonTrivialMembers(clazz)) {
+ log(s"Considering $member for java varargs bridge in $clazz")
if (!member.isDeferred && member.isMethod && hasRepeatedParam(member.info)) {
val inherited = clazz.info.nonPrivateMemberAdmitting(member.name, VBRIDGE)
+
// Delaying calling memberType as long as possible
if (inherited ne NoSymbol) {
- val jtpe = toJavaRepeatedParam(self.memberType(member))
+ val jtpe = toJavaRepeatedParam(self memberType member)
// this is a bit tortuous: we look for non-private members or bridges
// if we find a bridge everything is OK. If we find another member,
// we need to create a bridge
- if (inherited filter (sym => (self.memberType(sym) matches jtpe) && !(sym hasFlag VBRIDGE)) exists)
+ val inherited1 = inherited filter (sym => !(sym hasFlag VBRIDGE) && (self memberType sym matches jtpe))
+ if (inherited1.exists)
bridges += varargBridge(member, jtpe)
}
}
}
+ if (bridges.size > 0)
+ log(s"Adding ${bridges.size} bridges for methods extending java varargs.")
+
bridges.toList
}
else Nil
@@ -905,13 +897,15 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
* the type occurs itself at variance position given by `variance`
*/
def validateVariance(tp: Type, variance: Int): Unit = tp match {
- case ErrorType => ;
- case WildcardType => ;
- case NoType => ;
- case NoPrefix => ;
- case ThisType(_) => ;
- case ConstantType(_) => ;
- // case DeBruijnIndex(_, _) => ;
+ case ErrorType =>
+ case WildcardType =>
+ case BoundedWildcardType(bounds) =>
+ validateVariance(bounds, variance)
+ case NoType =>
+ case NoPrefix =>
+ case ThisType(_) =>
+ case ConstantType(_) =>
+ // case DeBruijnIndex(_, _) =>
case SingleType(pre, sym) =>
validateVariance(pre, variance)
case TypeRef(pre, sym, args) =>
@@ -1378,10 +1372,18 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
* indicating it has changed semantics between versions.
*/
private def checkMigration(sym: Symbol, pos: Position) = {
- if (sym.hasMigrationAnnotation)
- unit.warning(pos, "%s has changed semantics in version %s:\n%s".format(
- sym.fullLocationString, sym.migrationVersion.get, sym.migrationMessage.get)
- )
+ if (sym.hasMigrationAnnotation) {
+ val changed = try
+ settings.Xmigration.value < ScalaVersion(sym.migrationVersion.get)
+ catch {
+ case e : NumberFormatException =>
+ unit.warning(pos, s"${sym.fullLocationString} has an unparsable version number: ${e.getMessage()}")
+ // if we can't parse the format on the migration annotation just conservatively assume it changed
+ true
+ }
+ if (changed)
+ unit.warning(pos, s"${sym.fullLocationString} has changed semantics in version ${sym.migrationVersion.get}:\n${sym.migrationMessage.get}")
+ }
}
private def checkCompileTimeOnly(sym: Symbol, pos: Position) = {
@@ -1473,8 +1475,11 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
}
private def isRepeatedParamArg(tree: Tree) = currentApplication match {
case Apply(fn, args) =>
- !args.isEmpty && (args.last eq tree) &&
- fn.tpe.params.length == args.length && isRepeatedParamType(fn.tpe.params.last.tpe)
+ ( args.nonEmpty
+ && (args.last eq tree)
+ && (fn.tpe.params.length == args.length)
+ && isRepeatedParamType(fn.tpe.params.last.tpe)
+ )
case _ =>
false
}
@@ -1587,7 +1592,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
* arbitrarily choose one as more important than the other.
*/
checkDeprecated(sym, tree.pos)
- if (settings.Xmigration28.value)
+ if(settings.Xmigration.value != NoScalaVersion)
checkMigration(sym, tree.pos)
checkCompileTimeOnly(sym, tree.pos)
@@ -1686,8 +1691,6 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
val bridges = addVarargBridges(currentOwner)
checkAllOverrides(currentOwner)
checkAnyValSubclass(currentOwner)
- if (currentOwner.isDerivedValueClass)
- currentOwner.primaryConstructor makeNotPrivate NoSymbol // SI-6601, must be done *after* pickler!
if (bridges.nonEmpty) deriveTemplate(tree)(_ ::: bridges) else tree
case dc@TypeTreeWithDeferredRefCheck() => abort("adapt should have turned dc: TypeTreeWithDeferredRefCheck into tpt: TypeTree, with tpt.original == dc")
diff --git a/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala b/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala
index 20db479463..64c5b41638 100644
--- a/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala
@@ -4,31 +4,7 @@ package typechecker
trait StdAttachments {
self: Analyzer =>
- import global._
-
- /** Carries information necessary to expand the host tree.
- * At times we need to store this info, because macro expansion can be delayed until its targs are inferred.
- * After a macro application has been successfully expanded, this attachment is destroyed.
- */
type UnaffiliatedMacroContext = scala.reflect.macros.runtime.Context
type MacroContext = UnaffiliatedMacroContext { val universe: self.global.type }
case class MacroRuntimeAttachment(delayed: Boolean, typerContext: Context, macroContext: Option[MacroContext])
-
- /** After being synthesized by the parser, primary constructors aren't fully baked yet.
- * A call to super in such constructors is just a fill-me-in-later dummy resolved later
- * by `parentTypes`. This attachment coordinates `parentTypes` and `typedTemplate` and
- * allows them to complete the synthesis.
- */
- case class SuperArgsAttachment(argss: List[List[Tree]])
-
- /** Convenience method for `SuperArgsAttachment`.
- * Compared with `MacroRuntimeAttachment` this attachment has different a usage pattern,
- * so it really benefits from a dedicated extractor.
- */
- def superArgs(tree: Tree): Option[List[List[Tree]]] =
- tree.attachments.get[SuperArgsAttachment] collect { case SuperArgsAttachment(argss) => argss }
-
- /** Determines whether the given tree has an associated SuperArgsAttachment.
- */
- def hasSuperArgs(tree: Tree): Boolean = superArgs(tree).nonEmpty
} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
index a907ab6c66..242eb9c9fe 100644
--- a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
@@ -50,6 +50,10 @@ trait SyntheticMethods extends ast.TreeDSL {
else if (clazz.isDerivedValueClass) valueSymbols
else Nil
}
+ private lazy val renamedCaseAccessors = perRunCaches.newMap[Symbol, mutable.Map[TermName, TermName]]()
+ /** Does not force the info of `caseclazz` */
+ final def caseAccessorName(caseclazz: Symbol, paramName: TermName) =
+ (renamedCaseAccessors get caseclazz).fold(paramName)(_(paramName))
/** Add the synthetic methods to case classes.
*/
@@ -78,14 +82,7 @@ trait SyntheticMethods extends ast.TreeDSL {
else templ
}
- val originalAccessors = clazz.caseFieldAccessors
- // private ones will have been renamed -- make sure they are entered
- // in the original order.
- def accessors = clazz.caseFieldAccessors sortBy { acc =>
- originalAccessors indexWhere { orig =>
- (acc.name == orig.name) || (acc.name startsWith (orig.name append "$"))
- }
- }
+ def accessors = clazz.caseFieldAccessors
val arity = accessors.size
// If this is ProductN[T1, T2, ...], accessorLub is the lub of T1, T2, ..., .
// !!! Hidden behind -Xexperimental due to bummer type inference bugs.
@@ -391,6 +388,8 @@ trait SyntheticMethods extends ast.TreeDSL {
// TODO: shouldn't the next line be: `original resetFlag CASEACCESSOR`?
ddef.symbol resetFlag CASEACCESSOR
lb += logResult("case accessor new")(newAcc)
+ val renamedInClassMap = renamedCaseAccessors.getOrElseUpdate(clazz, mutable.Map() withDefault(x => x))
+ renamedInClassMap(original.name.toTermName) = newAcc.symbol.name.toTermName
}
(lb ++= templ.body ++= synthesize()).toList
diff --git a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala
index 48a5a36b00..c5c3c560ea 100644
--- a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala
@@ -117,7 +117,8 @@ abstract class TreeCheckers extends Analyzer {
try p.source.path + ":" + p.line
catch { case _: UnsupportedOperationException => p.toString }
- def errorFn(msg: Any): Unit = println("[check: %s] %s".format(phase.prev, msg))
+ private var hasError: Boolean = false
+ def errorFn(msg: Any): Unit = {hasError = true; println("[check: %s] %s".format(phase.prev, msg))}
def errorFn(pos: Position, msg: Any): Unit = errorFn(posstr(pos) + ": " + msg)
def informFn(msg: Any) {
if (settings.verbose.value || settings.debug.value)
@@ -151,6 +152,7 @@ abstract class TreeCheckers extends Analyzer {
result
}
def runWithUnit[T](unit: CompilationUnit)(body: => Unit): Unit = {
+ hasError = false
val unit0 = currentUnit
currentRun.currentUnit = unit
body
@@ -169,6 +171,7 @@ abstract class TreeCheckers extends Analyzer {
checker.precheck.traverse(unit.body)
checker.typed(unit.body)
checker.postcheck.traverse(unit.body)
+ if (hasError) unit.warning(NoPosition, "TreeCheckers detected non-compliant trees in " + unit)
}
}
@@ -217,8 +220,11 @@ abstract class TreeCheckers extends Analyzer {
case _ => ()
}
- object precheck extends Traverser {
+ object precheck extends TreeStackTraverser {
override def traverse(tree: Tree) {
+ checkSymbolRefsRespectScope(tree)
+ checkReturnReferencesDirectlyEnclosingDef(tree)
+
val sym = tree.symbol
def accessed = sym.accessed
def fail(msg: String) = errorFn(tree.pos, msg + classstr(tree) + " / " + tree)
@@ -289,6 +295,41 @@ abstract class TreeCheckers extends Analyzer {
}
super.traverse(tree)
}
+
+ private def checkSymbolRefsRespectScope(tree: Tree) {
+ def symbolOf(t: Tree): Symbol = Option(tree.symbol).getOrElse(NoSymbol)
+ def definedSymbolOf(t: Tree): Symbol = if (t.isDef) symbolOf(t) else NoSymbol
+ val info = Option(symbolOf(tree).info).getOrElse(NoType)
+ val referencedSymbols: List[Symbol] = {
+ val directRef = tree match {
+ case _: RefTree => symbolOf(tree).toOption
+ case _ => None
+ }
+ def referencedSyms(tp: Type) = (tp collect {
+ case TypeRef(_, sym, _) => sym
+ }).toList
+ val indirectRefs = referencedSyms(info)
+ (indirectRefs ++ directRef).distinct
+ }
+ for {
+ sym <- referencedSymbols
+ if (sym.isTypeParameter || sym.isLocal) && !(tree.symbol hasTransOwner sym.owner)
+ } errorFn(s"The symbol, tpe or info of tree `(${tree}) : ${info}` refers to a out-of-scope symbol, ${sym.fullLocationString}. tree.symbol.ownerChain: ${tree.symbol.ownerChain.mkString(", ")}")
+ }
+
+ private def checkReturnReferencesDirectlyEnclosingDef(tree: Tree) {
+ tree match {
+ case _: Return =>
+ path.collectFirst {
+ case dd: DefDef => dd
+ } match {
+ case None => errorFn(s"Return node ($tree) must be enclosed in a DefDef")
+ case Some(dd) =>
+ if (tree.symbol != dd.symbol) errorFn(s"Return symbol (${tree.symbol}} does not reference directly enclosing DefDef (${dd.symbol})")
+ }
+ case _ =>
+ }
+ }
}
object postcheck extends Traverser {
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index 553583e6b7..dc5491a509 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -30,7 +30,6 @@ trait Typers extends Modes with Adaptations with Tags {
import global._
import definitions._
import TypersStats._
- import patmat.DefaultOverrideMatchAttachment
final def forArgMode(fun: Tree, mode: Int) =
if (treeInfo.isSelfOrSuperConstrCall(fun)) mode | SCCmode
@@ -53,10 +52,8 @@ trait Typers extends Modes with Adaptations with Tags {
object UnTyper extends Traverser {
override def traverse(tree: Tree) = {
- if (tree.canHaveAttrs) {
- tree.tpe = null
- if (tree.hasSymbol) tree.symbol = NoSymbol
- }
+ if (tree != EmptyTree) tree.tpe = null
+ if (tree.hasSymbol) tree.symbol = NoSymbol
super.traverse(tree)
}
}
@@ -453,12 +450,12 @@ trait Typers extends Modes with Adaptations with Tags {
def reenterValueParams(vparamss: List[List[ValDef]]) {
for (vparams <- vparamss)
for (vparam <- vparams)
- vparam.symbol = context.scope enter vparam.symbol
+ context.scope enter vparam.symbol
}
def reenterTypeParams(tparams: List[TypeDef]): List[Symbol] =
for (tparam <- tparams) yield {
- tparam.symbol = context.scope enter tparam.symbol
+ context.scope enter tparam.symbol
tparam.symbol.deSkolemize
}
@@ -872,7 +869,9 @@ trait Typers extends Modes with Adaptations with Tags {
case _ =>
debuglog("fallback on implicits: " + tree + "/" + resetAllAttrs(original))
val tree1 = typed(resetAllAttrs(original), mode, WildcardType)
- tree1.tpe = addAnnotations(tree1, tree1.tpe)
+ // Q: `typed` already calls `pluginsTyped` and `adapt`. the only difference here is that
+ // we pass `EmptyTree` as the `original`. intended? added in 2009 (53d98e7d42) by martin.
+ tree1.tpe = pluginsTyped(tree1.tpe, this, tree1, mode, pt)
if (tree1.isEmpty) tree1 else adapt(tree1, mode, pt, EmptyTree)
}
else
@@ -1052,15 +1051,21 @@ trait Typers extends Modes with Adaptations with Tags {
def insertApply(): Tree = {
assert(!inHKMode(mode), modeString(mode)) //@M
- val qual = adaptToName(tree, nme.apply) match {
- case id @ Ident(_) =>
- val pre = if (id.symbol.owner.isPackageClass) id.symbol.owner.thisType
- else if (id.symbol.owner.isClass)
- context.enclosingSubClassContext(id.symbol.owner).prefix
- else NoPrefix
- stabilize(id, pre, EXPRmode | QUALmode, WildcardType)
- case sel @ Select(qualqual, _) =>
- stabilize(sel, qualqual.tpe, EXPRmode | QUALmode, WildcardType)
+ val adapted = adaptToName(tree, nme.apply)
+ def stabilize0(pre: Type): Tree = stabilize(adapted, pre, EXPRmode | QUALmode, WildcardType)
+ // TODO reconcile the overlap between Typers#stablize and TreeGen.stabilize
+ val qual = adapted match {
+ case This(_) =>
+ gen.stabilize(adapted)
+ case Ident(_) =>
+ val owner = adapted.symbol.owner
+ val pre =
+ if (owner.isPackageClass) owner.thisType
+ else if (owner.isClass) context.enclosingSubClassContext(owner).prefix
+ else NoPrefix
+ stabilize0(pre)
+ case Select(qualqual, _) =>
+ stabilize0(qualqual.tpe)
case other =>
other
}
@@ -1071,8 +1076,8 @@ trait Typers extends Modes with Adaptations with Tags {
// begin adapt
tree.tpe match {
- case atp @ AnnotatedType(_, _, _) if canAdaptAnnotations(tree, mode, pt) => // (-1)
- adaptAnnotations(tree, mode, pt)
+ case atp @ AnnotatedType(_, _, _) if canAdaptAnnotations(tree, this, mode, pt) => // (-1)
+ adaptAnnotations(tree, this, mode, pt)
case ct @ ConstantType(value) if inNoModes(mode, TYPEmode | FUNmode) && (ct <:< pt) && !forScaladoc && !forInteractive => // (0)
val sym = tree.symbol
if (sym != null && sym.isDeprecated) {
@@ -1176,8 +1181,8 @@ trait Typers extends Modes with Adaptations with Tags {
Select(tree, "to" + sym.name)
}
}
- case AnnotatedType(_, _, _) if canAdaptAnnotations(tree, mode, pt) => // (13)
- return typed(adaptAnnotations(tree, mode, pt), mode, pt)
+ case AnnotatedType(_, _, _) if canAdaptAnnotations(tree, this, mode, pt) => // (13)
+ return typed(adaptAnnotations(tree, this, mode, pt), mode, pt)
case _ =>
}
if (!context.undetparams.isEmpty) {
@@ -1393,6 +1398,13 @@ trait Typers extends Modes with Adaptations with Tags {
if (member(qual, name) != NoSymbol) qual
else adaptToMember(qual, HasMember(name))
+ private def typePrimaryConstrBody(clazz : Symbol, cbody: Tree, tparams: List[Symbol], enclTparams: List[Symbol], vparamss: List[List[ValDef]]): Tree = {
+ // XXX: see about using the class's symbol....
+ enclTparams foreach (sym => context.scope.enter(sym))
+ namer.enterValueParams(vparamss)
+ typed(cbody)
+ }
+
private def validateNoCaseAncestor(clazz: Symbol) = {
if (!phase.erasedTypes) {
for (ancestor <- clazz.ancestors find (_.isCase)) {
@@ -1452,7 +1464,7 @@ trait Typers extends Modes with Adaptations with Tags {
case DefDef(_, name, _, _, _, rhs) =>
if (stat.symbol.isAuxiliaryConstructor)
notAllowed("secondary constructor")
- else if (isValueClass && (name == nme.equals_ || name == nme.hashCode_))
+ else if (isValueClass && (name == nme.equals_ || name == nme.hashCode_) && !stat.symbol.isSynthetic)
notAllowed(s"redefinition of $name method. See SIP-15, criterion 4.")
else if (stat.symbol != null && stat.symbol.isParamAccessor)
notAllowed("additional parameter")
@@ -1494,243 +1506,126 @@ trait Typers extends Modes with Adaptations with Tags {
unit.error(tparam.pos, "type parameter of value class may not be specialized")
}
- /** Typechecks a parent type reference.
- *
- * This typecheck is harder than it might look, because it should honor early
- * definitions and also perform type argument inference with the help of super call
- * arguments provided in `encodedtpt`.
- *
- * The method is called in batches (batch = 1 time per each parent type referenced),
- * two batches per definition: once from namer, when entering a ClassDef or a ModuleDef
- * and once from typer, when typechecking the definition.
- *
- * ***Arguments***
- *
- * `encodedtpt` represents the parent type reference wrapped in an `Apply` node
- * which indicates value arguments (i.e. type macro arguments or super constructor call arguments)
- * If no value arguments are provided by the user, the `Apply` node is still
- * there, but its `args` will be set to `Nil`.
- * This argument is synthesized by `tools.nsc.ast.Parsers.templateParents`.
- *
- * `templ` is an enclosing template, which contains a primary constructor synthesized by the parser.
- * Such a constructor is a DefDef which contains early initializers and maybe a super constructor call
- * (I wrote "maybe" because trait constructors don't call super constructors).
- * This argument is synthesized by `tools.nsc.ast.Trees.Template`.
- *
- * `inMixinPosition` indicates whether the reference is not the first in the
- * list of parents (and therefore cannot be a class) or the opposite.
- *
- * ***Return value and side effects***
- *
- * Returns a `TypeTree` representing a resolved parent type.
- * If the typechecked parent reference implies non-nullary and non-empty argument list,
- * this argument list is attached to the returned value in SuperArgsAttachment.
- * The attachment is necessary for the subsequent typecheck to fixup a super constructor call
- * in the body of the primary constructor (see `typedTemplate` for details).
- *
- * This method might invoke `typedPrimaryConstrBody`, hence it might cause the side effects
- * described in the docs of that method. It might also attribute the Super(_, _) reference
- * (if present) inside the primary constructor of `templ`.
- *
- * ***Example***
- *
- * For the following definition:
- *
- * class D extends {
- * val x = 2
- * val y = 4
- * } with B(x)(3) with C(y) with T
- *
- * this method will be called six times:
- *
- * (3 times from the namer)
- * typedParentType(Apply(Apply(Ident(B), List(Ident(x))), List(3)), templ, inMixinPosition = false)
- * typedParentType(Apply(Ident(C), List(Ident(y))), templ, inMixinPosition = true)
- * typedParentType(Apply(Ident(T), List()), templ, inMixinPosition = true)
- *
- * (3 times from the typer)
- * <the same three calls>
- */
- private def typedParentType(encodedtpt: Tree, templ: Template, inMixinPosition: Boolean): Tree = {
- val app = treeInfo.dissectApplied(encodedtpt)
- val (treeInfo.Applied(core, targs, argss), decodedtpt) = (app, app.callee)
- val argssAreTrivial = argss == Nil || argss == ListOfNil
-
- // we cannot avoid cyclic references with `initialize` here, because when type macros arrive,
- // we'll have to check the probe for isTypeMacro anyways.
- // therefore I think it's reasonable to trade a more specific "inherits itself" error
- // for a generic, yet understandable "cyclic reference" error
- var probe = typedTypeConstructor(core.duplicate).tpe.typeSymbol
- if (probe == null) probe = NoSymbol
- probe.initialize
-
- if (probe.isTrait || inMixinPosition) {
- if (!argssAreTrivial) {
- if (probe.isTrait) ConstrArgsInParentWhichIsTraitError(encodedtpt, probe)
- else () // a class in a mixin position - this warrants an error in `validateParentClasses`
- // therefore here we do nothing, e.g. don't check that the # of ctor arguments
- // matches the # of ctor parameters or stuff like that
- }
- typedType(decodedtpt)
- } else {
- var supertpt = typedTypeConstructor(decodedtpt)
- val supertparams = if (supertpt.hasSymbol) supertpt.symbol.typeParams else Nil
- if (supertparams.nonEmpty) {
- typedPrimaryConstrBody(templ) {
- val supertpe = PolyType(supertparams, appliedType(supertpt.tpe, supertparams map (_.tpeHK)))
- val supercall = New(supertpe, mmap(argss)(_.duplicate))
- val treeInfo.Applied(Select(ctor, nme.CONSTRUCTOR), _, _) = supercall
- ctor setType supertpe // this is an essential hack, otherwise it will occasionally fail to typecheck
- atPos(supertpt.pos.focus)(supercall)
- } match {
- case EmptyTree => MissingTypeArgumentsParentTpeError(supertpt)
- case tpt => supertpt = TypeTree(tpt.tpe) setPos supertpt.pos.focus
+ def parentTypes(templ: Template): List[Tree] =
+ if (templ.parents.isEmpty) List(atPos(templ.pos)(TypeTree(AnyRefClass.tpe)))
+ else try {
+ val clazz = context.owner
+ // Normalize supertype and mixins so that supertype is always a class, not a trait.
+ var supertpt = typedTypeConstructor(templ.parents.head)
+ val firstParent = supertpt.tpe.typeSymbol
+ var mixins = templ.parents.tail map typedType
+ // If first parent is a trait, make it first mixin and add its superclass as first parent
+ while ((supertpt.tpe.typeSymbol ne null) && supertpt.tpe.typeSymbol.initialize.isTrait) {
+ val supertpt1 = typedType(supertpt)
+ if (!supertpt1.isErrorTyped) {
+ mixins = supertpt1 :: mixins
+ supertpt = TypeTree(supertpt1.tpe.firstParent) setPos supertpt.pos.focus
}
}
- // this is the place where we tell the typer what argss should be used for the super call
- // if argss are nullary or empty, then (see the docs for `typedPrimaryConstrBody`)
- // the super call dummy is already good enough, so we don't need to do anything
- if (argssAreTrivial) supertpt else supertpt updateAttachment SuperArgsAttachment(argss)
- }
- }
-
- /** Typechecks the mishmash of trees that happen to be stuffed into the primary constructor of a given template.
- * Before commencing the typecheck, replaces the `pendingSuperCall` dummy with the result of `actualSuperCall`.
- * `actualSuperCall` can return `EmptyTree`, in which case the dummy is replaced with a literal unit.
- *
- * ***Return value and side effects***
- *
- * If a super call is present in the primary constructor and is not erased by the transform, returns it typechecked.
- * Otherwise (e.g. if the primary constructor is missing or the super call isn't there) returns `EmptyTree`.
- *
- * As a side effect, this method attributes the underlying fields of early vals.
- * Early vals aren't typechecked anywhere else, so it's essential to call `typedPrimaryConstrBody`
- * at least once per definition. It'd be great to disentangle this logic at some point.
- *
- * ***Example***
- *
- * For the following definition:
- *
- * class D extends {
- * val x = 2
- * val y = 4
- * } with B(x)(3) with C(y) with T
- *
- * the primary constructor of `templ` will be:
- *
- * Block(List(
- * ValDef(NoMods, x, TypeTree(), 2)
- * ValDef(NoMods, y, TypeTree(), 4)
- * global.pendingSuperCall,
- * Literal(Constant(())))
- *
- * Note the `pendingSuperCall` part. This is the representation of a fill-me-in-later supercall dummy,
- * which encodes the fact that supercall argss are unknown during parsing and need to be transplanted
- * from one of the parent types. Read more about why the argss are unknown in `tools.nsc.ast.Trees.Template`.
- */
- private def typedPrimaryConstrBody(templ: Template)(actualSuperCall: => Tree): Tree =
- treeInfo.firstConstructor(templ.body) match {
- case ctor @ DefDef(_, _, _, vparamss, _, cbody @ Block(cstats, cunit)) =>
- val (preSuperStats, superCall) = {
- val (stats, rest) = cstats span (x => !treeInfo.isSuperConstrCall(x))
- (stats map (_.duplicate), if (rest.isEmpty) EmptyTree else rest.head.duplicate)
- }
- val superCall1 = (superCall match {
- case global.pendingSuperCall => actualSuperCall
- case EmptyTree => EmptyTree
- }) orElse cunit
- val cbody1 = treeCopy.Block(cbody, preSuperStats, superCall1)
-
- val clazz = context.owner
- assert(clazz != NoSymbol, templ)
- val cscope = context.outer.makeNewScope(ctor, context.outer.owner)
- val cbody2 = { // called both during completion AND typing.
- val typer1 = newTyper(cscope)
- // XXX: see about using the class's symbol....
- clazz.unsafeTypeParams foreach (sym => typer1.context.scope.enter(sym))
- typer1.namer.enterValueParams(vparamss map (_.map(_.duplicate)))
- typer1.typed(cbody1)
- }
+ if (supertpt.tpe.typeSymbol == AnyClass && firstParent.isTrait)
+ supertpt.tpe = AnyRefClass.tpe
+
+ // Determine
+ // - supertparams: Missing type parameters from supertype
+ // - supertpe: Given supertype, polymorphic in supertparams
+ val supertparams = if (supertpt.hasSymbol) supertpt.symbol.typeParams else List()
+ var supertpe = supertpt.tpe
+ if (!supertparams.isEmpty)
+ supertpe = PolyType(supertparams, appliedType(supertpe, supertparams map (_.tpeHK)))
+
+ // A method to replace a super reference by a New in a supercall
+ def transformSuperCall(scall: Tree): Tree = (scall: @unchecked) match {
+ case Apply(fn, args) =>
+ treeCopy.Apply(scall, transformSuperCall(fn), args map (_.duplicate))
+ case Select(Super(_, _), nme.CONSTRUCTOR) =>
+ treeCopy.Select(
+ scall,
+ atPos(supertpt.pos.focus)(New(TypeTree(supertpe)) setType supertpe),
+ nme.CONSTRUCTOR)
+ }
- val preSuperVals = treeInfo.preSuperFields(templ.body)
- if (preSuperVals.isEmpty && preSuperStats.nonEmpty)
- debugwarn("Wanted to zip empty presuper val list with " + preSuperStats)
- else
- map2(preSuperStats, preSuperVals)((ldef, gdef) => gdef.tpt.tpe = ldef.symbol.tpe)
+ treeInfo.firstConstructor(templ.body) match {
+ case constr @ DefDef(_, _, _, vparamss, _, cbody @ Block(cstats, cunit)) =>
+ // Convert constructor body to block in environment and typecheck it
+ val (preSuperStats, superCall) = {
+ val (stats, rest) = cstats span (x => !treeInfo.isSuperConstrCall(x))
+ (stats map (_.duplicate), if (rest.isEmpty) EmptyTree else rest.head.duplicate)
+ }
+ val cstats1 = if (superCall == EmptyTree) preSuperStats else preSuperStats :+ superCall
+ val cbody1 = treeCopy.Block(cbody, preSuperStats, superCall match {
+ case Apply(_, _) if supertparams.nonEmpty => transformSuperCall(superCall)
+ case _ => cunit.duplicate
+ })
+ val outercontext = context.outer
+
+ assert(clazz != NoSymbol, templ)
+ val cscope = outercontext.makeNewScope(constr, outercontext.owner)
+ val cbody2 = newTyper(cscope) // called both during completion AND typing.
+ .typePrimaryConstrBody(clazz,
+ cbody1, supertparams, clazz.unsafeTypeParams, vparamss map (_.map(_.duplicate)))
+
+ superCall match {
+ case Apply(_, _) =>
+ val treeInfo.Applied(_, _, argss) = superCall
+ val sarg = argss.flatten.headOption.getOrElse(EmptyTree)
+ if (sarg != EmptyTree && supertpe.typeSymbol != firstParent)
+ ConstrArgsInTraitParentTpeError(sarg, firstParent)
+ if (!supertparams.isEmpty)
+ supertpt = TypeTree(cbody2.tpe) setPos supertpt.pos.focus
+ case _ =>
+ if (!supertparams.isEmpty)
+ MissingTypeArgumentsParentTpeError(supertpt)
+ }
- if (superCall1 == cunit) EmptyTree else cbody2
- case _ =>
- EmptyTree
- }
+ val preSuperVals = treeInfo.preSuperFields(templ.body)
+ if (preSuperVals.isEmpty && preSuperStats.nonEmpty)
+ debugwarn("Wanted to zip empty presuper val list with " + preSuperStats)
+ else
+ map2(preSuperStats, preSuperVals)((ldef, gdef) => gdef.tpt.tpe = ldef.symbol.tpe)
- /** Makes sure that the first type tree in the list of parent types is always a class.
- * If the first parent is a trait, prepend its supertype to the list until it's a class.
- */
- private def normalizeFirstParent(parents: List[Tree]): List[Tree] = parents match {
- case first :: rest if treeInfo.isTraitRef(first) =>
- def explode(supertpt: Tree, acc: List[Tree]): List[Tree] = {
- if (treeInfo.isTraitRef(supertpt)) {
- val supertpt1 = typedType(supertpt)
- if (!supertpt1.isErrorTyped) {
- val supersupertpt = TypeTree(supertpt1.tpe.firstParent) setPos supertpt.pos.focus
- return explode(supersupertpt, supertpt1 :: acc)
- }
- }
- if (supertpt.tpe.typeSymbol == AnyClass) supertpt.tpe = AnyRefClass.tpe
- supertpt :: acc
+ case _ =>
+ if (!supertparams.isEmpty)
+ MissingTypeArgumentsParentTpeError(supertpt)
}
- explode(first, Nil) ++ rest
- case _ => parents
- }
+/* experimental: early types as type arguments
+ val hasEarlyTypes = templ.body exists (treeInfo.isEarlyTypeDef)
+ val earlyMap = new EarlyMap(clazz)
+ List.mapConserve(supertpt :: mixins){ tpt =>
+ val tpt1 = checkNoEscaping.privates(clazz, tpt)
+ if (hasEarlyTypes) tpt1 else tpt1 setType earlyMap(tpt1.tpe)
+ }
+*/
- /** Certain parents are added in the parser before it is known whether
- * that class also declared them as parents. For instance, this is an
- * error unless we take corrective action here:
- *
- * case class Foo() extends Serializable
- *
- * So we strip the duplicates before typer.
- */
- private def fixDuplicateSyntheticParents(parents: List[Tree]): List[Tree] = parents match {
- case Nil => Nil
- case x :: xs =>
- val sym = x.symbol
- x :: fixDuplicateSyntheticParents(
- if (isPossibleSyntheticParent(sym)) xs filterNot (_.symbol == sym)
- else xs
- )
- }
+ //Console.println("parents("+clazz") = "+supertpt :: mixins);//DEBUG
- def parentTypes(templ: Template): List[Tree] = templ.parents match {
- case Nil => List(atPos(templ.pos)(TypeTree(AnyRefClass.tpe)))
- case first :: rest =>
- try {
- val supertpts = fixDuplicateSyntheticParents(normalizeFirstParent(
- typedParentType(first, templ, inMixinPosition = false) +:
- (rest map (typedParentType(_, templ, inMixinPosition = true)))))
-
- // if that is required to infer the targs of a super call
- // typedParentType calls typedPrimaryConstrBody to do the inferring typecheck
- // as a side effect, that typecheck also assigns types to the fields underlying early vals
- // however if inference is not required, the typecheck doesn't happen
- // and therefore early fields have their type trees not assigned
- // here we detect this situation and take preventive measures
- if (treeInfo.hasUntypedPreSuperFields(templ.body))
- typedPrimaryConstrBody(templ)(EmptyTree)
-
- supertpts mapConserve (tpt => checkNoEscaping.privates(context.owner, tpt))
- } catch {
- case ex: TypeError =>
- // fallback in case of cyclic errors
- // @H none of the tests enter here but I couldn't rule it out
- // upd. @E when a definitions inherits itself, we end up here
- // because `typedParentType` triggers `initialize` for parent types symbols
- log("Type error calculating parents in template " + templ)
- log("Error: " + ex)
- ParentTypesError(templ, ex)
- List(TypeTree(AnyRefClass.tpe))
+ // Certain parents are added in the parser before it is known whether
+ // that class also declared them as parents. For instance, this is an
+ // error unless we take corrective action here:
+ //
+ // case class Foo() extends Serializable
+ //
+ // So we strip the duplicates before typer.
+ def fixDuplicates(remaining: List[Tree]): List[Tree] = remaining match {
+ case Nil => Nil
+ case x :: xs =>
+ val sym = x.symbol
+ x :: fixDuplicates(
+ if (isPossibleSyntheticParent(sym)) xs filterNot (_.symbol == sym)
+ else xs
+ )
}
- }
+
+ fixDuplicates(supertpt :: mixins) mapConserve (tpt => checkNoEscaping.privates(clazz, tpt))
+ }
+ catch {
+ case ex: TypeError =>
+ // fallback in case of cyclic errors
+ // @H none of the tests enter here but I couldn't rule it out
+ log("Type error calculating parents in template " + templ)
+ log("Error: " + ex)
+ ParentTypesError(templ, ex)
+ List(TypeTree(AnyRefClass.tpe))
+ }
/** <p>Check that</p>
* <ul>
@@ -1903,33 +1798,33 @@ trait Typers extends Modes with Adaptations with Tags {
})
}
val impl2 = finishMethodSynthesis(impl1, clazz, context)
-
+
// SI-5954. On second compile of a companion class contained in a package object we end up
// with some confusion of names which leads to having two symbols with the same name in the
- // same owner. Until that can be straightened out we can't allow companion objects in package
+ // same owner. Until that can be straightened out we will warn on companion objects in package
// objects. But this code also tries to be friendly by distinguishing between case classes and
// user written companion pairs
- def restrictPackageObjectMembers(mdef : ModuleDef) = for (m <- mdef.symbol.info.members) {
+ def warnPackageObjectMembers(mdef : ModuleDef) = for (m <- mdef.symbol.info.members) {
// ignore synthetic objects, because the "companion" object to a case class is synthetic and
// we only want one error per case class
if (!m.isSynthetic) {
// can't handle case classes in package objects
- if (m.isCaseClass) pkgObjectRestriction(m, mdef, "case")
+ if (m.isCaseClass) pkgObjectWarning(m, mdef, "case")
// can't handle companion class/object pairs in package objects
- else if ((m.isClass && m.companionModule != NoSymbol && !m.companionModule.isSynthetic) ||
- (m.isModule && m.companionClass != NoSymbol && !m.companionClass.isSynthetic))
- pkgObjectRestriction(m, mdef, "companion")
+ else if ((m.isClass && m.companionModule != NoSymbol && !m.companionModule.isSynthetic) ||
+ (m.isModule && m.companionClass != NoSymbol && !m.companionClass.isSynthetic))
+ pkgObjectWarning(m, mdef, "companion")
}
- def pkgObjectRestriction(m : Symbol, mdef : ModuleDef, restricted : String) = {
+ def pkgObjectWarning(m : Symbol, mdef : ModuleDef, restricted : String) = {
val pkgName = mdef.symbol.ownerChain find (_.isPackage) map (_.decodedName) getOrElse mdef.symbol.toString
- context.error(if (m.pos.isDefined) m.pos else mdef.pos, s"implementation restriction: package object ${pkgName} cannot contain ${restricted} ${m}. Instead, ${m} should be placed directly in package ${pkgName}.")
- }
+ context.warning(if (m.pos.isDefined) m.pos else mdef.pos, s"${m} should be placed directly in package ${pkgName} instead of package object ${pkgName}. Under some circumstances companion objects and case classes in package objects can fail to recompile. See https://issues.scala-lang.org/browse/SI-5954.")
+ }
}
- if (!settings.companionsInPkgObjs.value && mdef.symbol.isPackageObject)
- restrictPackageObjectMembers(mdef)
-
+ if (mdef.symbol.isPackageObject)
+ warnPackageObjectMembers(mdef)
+
treeCopy.ModuleDef(mdef, typedMods, mdef.name, impl2) setType NoType
}
/** In order to override this in the TreeCheckers Typer so synthetics aren't re-added
@@ -1995,12 +1890,9 @@ trait Typers extends Modes with Adaptations with Tags {
// the following is necessary for templates generated later
assert(clazz.info.decls != EmptyScope, clazz)
enterSyms(context.outer.make(templ, clazz, clazz.info.decls), templ.body)
- if (!templ.isErrorTyped) // if `parentTypes` has invalidated the template, don't validate it anymore
- validateParentClasses(parents1, selfType)
+ validateParentClasses(parents1, selfType)
if (clazz.isCase)
validateNoCaseAncestor(clazz)
- if (clazz.isTrait && hasSuperArgs(parents1.head))
- ConstrArgsInParentOfTraitError(parents1.head, clazz)
if ((clazz isSubClass ClassfileAnnotationClass) && !clazz.owner.isPackageClass)
unit.error(clazz.pos, "inner classes cannot be classfile annotations")
@@ -2008,21 +1900,9 @@ trait Typers extends Modes with Adaptations with Tags {
if (!phase.erasedTypes && !clazz.info.resultType.isError) // @S: prevent crash for duplicated type members
checkFinitary(clazz.info.resultType.asInstanceOf[ClassInfoType])
- val body = {
- val body =
- if (isPastTyper || reporter.hasErrors) templ.body
- else templ.body flatMap rewrappingWrapperTrees(namer.addDerivedTrees(Typer.this, _))
- val primaryCtor = treeInfo.firstConstructor(body)
- val primaryCtor1 = primaryCtor match {
- case DefDef(_, _, _, _, _, Block(earlyVals :+ global.pendingSuperCall, unit)) =>
- val argss = superArgs(parents1.head) getOrElse Nil
- val pos = wrappingPos(parents1.head.pos, argss.flatten)
- val superCall = atPos(pos)(PrimarySuperCall(argss))
- deriveDefDef(primaryCtor)(block => Block(earlyVals :+ superCall, unit) setPos pos) setPos pos
- case _ => primaryCtor
- }
- body mapConserve { case `primaryCtor` => primaryCtor1; case stat => stat }
- }
+ val body =
+ if (isPastTyper || reporter.hasErrors) templ.body
+ else templ.body flatMap rewrappingWrapperTrees(namer.addDerivedTrees(Typer.this, _))
val body1 = typedStats(body, templ.symbol)
@@ -2058,21 +1938,28 @@ trait Typers extends Modes with Adaptations with Tags {
* @return ...
*/
def typedValDef(vdef: ValDef): ValDef = {
-// attributes(vdef)
+ val sym = vdef.symbol
+ val valDefTyper = {
+ val maybeConstrCtx =
+ if ((sym.isParameter || sym.isEarlyInitialized) && sym.owner.isConstructor) context.makeConstructorContext
+ else context
+ newTyper(maybeConstrCtx.makeNewScope(vdef, sym))
+ }
+ valDefTyper.typedValDefImpl(vdef)
+ }
+
+ // use typedValDef instead. this version is called after creating a new context for the ValDef
+ private def typedValDefImpl(vdef: ValDef) = {
val sym = vdef.symbol.initialize
- val typer1 = constrTyperIf(sym.isParameter && sym.owner.isConstructor)
val typedMods = typedModifiers(vdef.mods)
sym.annotations.map(_.completeInfo)
- var tpt1 = checkNoEscaping.privates(sym, typer1.typedType(vdef.tpt))
+ val tpt1 = checkNoEscaping.privates(sym, typedType(vdef.tpt))
checkNonCyclic(vdef, tpt1)
- if (sym.hasAnnotation(definitions.VolatileAttr)) {
- if (!sym.isMutable)
- VolatileValueError(vdef)
- else if (sym.isFinal)
- FinalVolatileVarError(vdef)
- }
+ if (sym.hasAnnotation(definitions.VolatileAttr) && !sym.isMutable)
+ VolatileValueError(vdef)
+
val rhs1 =
if (vdef.rhs.isEmpty) {
if (sym.isVariable && sym.owner.isTerm && !sym.isLazy && !isPastTyper)
@@ -2095,7 +1982,7 @@ trait Typers extends Modes with Adaptations with Tags {
else subst(tpt1.tpe.typeArgs(0))
else subst(tpt1.tpe)
} else tpt1.tpe
- newTyper(typer1.context.make(vdef, sym)).transformedOrTyped(vdef.rhs, EXPRmode | BYVALmode, tpt2)
+ transformedOrTyped(vdef.rhs, EXPRmode | BYVALmode, tpt2)
}
treeCopy.ValDef(vdef, typedMods, vdef.name, tpt1, checkDead(rhs1)) setType NoType
}
@@ -2213,37 +2100,58 @@ trait Typers extends Modes with Adaptations with Tags {
*/
def checkMethodStructuralCompatible(ddef: DefDef): Unit = {
val meth = ddef.symbol
- def fail(pos: Position, msg: String) = unit.error(pos, msg)
- val tp: Type = meth.tpe match {
- case mt @ MethodType(_, _) => mt
- case NullaryMethodType(restpe) => restpe // TODO_NMT: drop NullaryMethodType from resultType?
- case PolyType(_, restpe) => restpe
- case _ => NoType
- }
- def nthParamPos(n: Int) = ddef.vparamss match {
- case xs :: _ if xs.length > n => xs(n).pos
- case _ => meth.pos
- }
- def failStruct(pos: Position, what: String, where: String = "Parameter") =
- fail(pos, s"$where type in structural refinement may not refer to $what")
-
- foreachWithIndex(tp.paramTypes) { (paramType, idx) =>
- val sym = paramType.typeSymbol
- def paramPos = nthParamPos(idx)
-
- if (sym.isAbstractType) {
- if (!sym.hasTransOwner(meth.owner))
- failStruct(paramPos, "an abstract type defined outside that refinement")
- else if (!sym.hasTransOwner(meth))
- failStruct(paramPos, "a type member of that refinement")
- }
- if (sym.isDerivedValueClass)
- failStruct(paramPos, "a user-defined value class")
- if (paramType.isInstanceOf[ThisType] && sym == meth.owner)
- failStruct(paramPos, "the type of that refinement (self type)")
- }
- if (tp.resultType.typeSymbol.isDerivedValueClass)
- failStruct(ddef.tpt.pos, "a user-defined value class", where = "Result")
+ def parentString = meth.owner.parentSymbols filterNot (_ == ObjectClass) match {
+ case Nil => ""
+ case xs => xs.map(_.nameString).mkString(" (of ", " with ", ")")
+ }
+ def fail(pos: Position, msg: String): Boolean = {
+ unit.error(pos, msg)
+ false
+ }
+ /** Have to examine all parameters in all lists.
+ */
+ def paramssTypes(tp: Type): List[List[Type]] = tp match {
+ case mt @ MethodType(_, restpe) => mt.paramTypes :: paramssTypes(restpe)
+ case PolyType(_, restpe) => paramssTypes(restpe)
+ case _ => Nil
+ }
+ def resultType = meth.tpe.finalResultType
+ def nthParamPos(n1: Int, n2: Int) =
+ try ddef.vparamss(n1)(n2).pos catch { case _: IndexOutOfBoundsException => meth.pos }
+
+ def failStruct(pos: Position, what: String, where: String = "Parameter type") =
+ fail(pos, s"$where in structural refinement may not refer to $what")
+
+ foreachWithIndex(paramssTypes(meth.tpe)) { (paramList, listIdx) =>
+ foreachWithIndex(paramList) { (paramType, paramIdx) =>
+ val sym = paramType.typeSymbol
+ def paramPos = nthParamPos(listIdx, paramIdx)
+
+ /** Not enough to look for abstract types; have to recursively check the bounds
+ * of each abstract type for more abstract types. Almost certainly there are other
+ * exploitable type soundness bugs which can be seen by bounding a type parameter
+ * by an abstract type which itself is bounded by an abstract type.
+ */
+ def checkAbstract(tp0: Type, what: String): Boolean = {
+ def check(sym: Symbol): Boolean = !sym.isAbstractType || {
+ log(s"""checking $tp0 in refinement$parentString at ${meth.owner.owner.fullLocationString}""")
+ ( (!sym.hasTransOwner(meth.owner) && failStruct(paramPos, "an abstract type defined outside that refinement", what))
+ || (!sym.hasTransOwner(meth) && failStruct(paramPos, "a type member of that refinement", what))
+ || checkAbstract(sym.info.bounds.hi, "Type bound")
+ )
+ }
+ tp0.dealiasWidenChain forall (t => check(t.typeSymbol))
+ }
+ checkAbstract(paramType, "Parameter type")
+
+ if (sym.isDerivedValueClass)
+ failStruct(paramPos, "a user-defined value class")
+ if (paramType.isInstanceOf[ThisType] && sym == meth.owner)
+ failStruct(paramPos, "the type of that refinement (self type)")
+ }
+ }
+ if (resultType.typeSymbol.isDerivedValueClass)
+ failStruct(ddef.tpt.pos, "a user-defined value class", where = "Result type")
}
def typedUseCase(useCase: UseCase) {
@@ -2373,13 +2281,12 @@ trait Typers extends Modes with Adaptations with Tags {
}
def typedTypeDef(tdef: TypeDef): TypeDef =
- typerWithCondLocalContext(context.makeNewScope(tdef, tdef.symbol))(tdef.tparams.nonEmpty){
- _.typedTypeDef0(tdef)
+ typerWithCondLocalContext(context.makeNewScope(tdef, tdef.symbol))(tdef.tparams.nonEmpty) {
+ _.typedTypeDefImpl(tdef)
}
- // call typedTypeDef instead
- // a TypeDef with type parameters must always be type checked in a new scope
- private def typedTypeDef0(tdef: TypeDef): TypeDef = {
+ // use typedTypeDef instead. this version is called after creating a new context for the TypeDef
+ private def typedTypeDefImpl(tdef: TypeDef): TypeDef = {
tdef.symbol.initialize
reenterTypeParams(tdef.tparams)
val tparams1 = tdef.tparams mapConserve typedTypeDef
@@ -2696,8 +2603,13 @@ trait Typers extends Modes with Adaptations with Tags {
def mkParam(methodSym: Symbol, tp: Type = argTp) =
methodSym.newValueParameter(paramName, paramPos.focus, SYNTHETIC) setInfo tp
+ def mkDefaultCase(body: Tree) =
+ atPos(tree.pos.makeTransparent) {
+ CaseDef(Bind(nme.DEFAULT_CASE, Ident(nme.WILDCARD)), body)
+ }
+
// `def applyOrElse[A1 <: $argTp, B1 >: $matchResTp](x: A1, default: A1 => B1): B1 =
- // ${`$selector match { $cases }` updateAttachment DefaultOverrideMatchAttachment(REF(default) APPLY (REF(x)))}`
+ // ${`$selector match { $cases; case default$ => default(x) }`
def applyOrElseMethodDef = {
val methodSym = anonClass.newMethod(nme.applyOrElse, tree.pos, FINAL | OVERRIDE)
@@ -2706,7 +2618,7 @@ trait Typers extends Modes with Adaptations with Tags {
val x = mkParam(methodSym, A1.tpe)
// applyOrElse's default parameter:
- val B1 = methodSym newTypeParameter (newTypeName("B1")) setInfo TypeBounds.empty //lower(resTp)
+ val B1 = methodSym newTypeParameter (newTypeName("B1")) setInfo TypeBounds.empty
val default = methodSym newValueParameter (newTermName("default"), tree.pos.focus, SYNTHETIC) setInfo functionType(List(A1.tpe), B1.tpe)
val paramSyms = List(x, default)
@@ -2716,19 +2628,72 @@ trait Typers extends Modes with Adaptations with Tags {
// should use the DefDef for the context's tree, but it doesn't exist yet (we need the typer we're creating to create it)
paramSyms foreach (methodBodyTyper.context.scope enter _)
- val match_ = methodBodyTyper.typedMatch(selector, cases, mode, resTp)
+ // First, type without the default case; only the cases provided
+ // by the user are typed. The LUB of these becomes `B`, the lower
+ // bound of `B1`, which in turn is the result type of the default
+ // case
+ val match0 = methodBodyTyper.typedMatch(selector, cases, mode, resTp)
+ val matchResTp = match0.tpe
- val matchResTp = match_.tpe
B1 setInfo TypeBounds.lower(matchResTp) // patch info
+ // the default uses applyOrElse's first parameter since the scrut's type has been widened
+ val match_ = {
+ val defaultCase = methodBodyTyper.typedCase(
+ mkDefaultCase(methodBodyTyper.typed1(REF(default) APPLY (REF(x)), mode, B1.tpe).setType(B1.tpe)), argTp, B1.tpe)
+ treeCopy.Match(match0, match0.selector, match0.cases :+ defaultCase)
+ }
match_ setType B1.tpe
- // the default uses applyOrElse's first parameter since the scrut's type has been widened
- val matchWithDefault = match_ updateAttachment DefaultOverrideMatchAttachment(REF(default) APPLY (REF(x)))
- (DefDef(methodSym, methodBodyTyper.virtualizedMatch(matchWithDefault, mode, B1.tpe)), matchResTp)
+ // SI-6187 Do you really want to know? Okay, here's what's going on here.
+ //
+ // Well behaved trees satisfy the property:
+ //
+ // typed(tree) == typed(resetLocalAttrs(typed(tree))
+ //
+ // Trees constructed without low-level symbol manipulation get this for free;
+ // references to local symbols are cleared by `ResetAttrs`, but bind to the
+ // corresponding symbol in the re-typechecked tree. But PartialFunction synthesis
+ // doesn't play by these rules.
+ //
+ // During typechecking of method bodies, references to method type parameter from
+ // the declared types of the value parameters should bind to a fresh set of skolems,
+ // which have been entered into scope by `Namer#methodSig`. A comment therein:
+ //
+ // "since the skolemized tparams are in scope, the TypeRefs in vparamSymss refer to skolemized tparams"
+ //
+ // But, if we retypecheck the reset `applyOrElse`, the TypeTree of the `default`
+ // parameter contains no type. Somehow (where?!) it recovers a type that is _almost_ okay:
+ // `A1 => B1`. But it should really be `A1&0 => B1&0`. In the test, run/t6187.scala, this
+ // difference results in a type error, as `default.apply(x)` types as `B1`, which doesn't
+ // conform to the required `B1&0`
+ //
+ // I see three courses of action.
+ //
+ // 1) synthesize a `asInstanceOf[B1]` below (I tried this first. But... ewwww.)
+ // 2) install an 'original' TypeTree that will used after ResetAttrs (the solution below)
+ // 3) Figure out how the almost-correct type is recovered on re-typechecking, and
+ // substitute in the skolems.
+ //
+ // For 2.11, we'll probably shift this transformation back a phase or two, so macros
+ // won't be affected. But in any case, we should satisfy retypecheckability.
+ //
+ val originals: Map[Symbol, Tree] = {
+ def typedIdent(sym: Symbol) = methodBodyTyper.typedType(Ident(sym), mode)
+ val A1Tpt = typedIdent(A1)
+ val B1Tpt = typedIdent(B1)
+ Map(
+ x -> A1Tpt,
+ default -> gen.scalaFunctionConstr(List(A1Tpt), B1Tpt)
+ )
+ }
+ val rhs = methodBodyTyper.virtualizedMatch(match_, mode, B1.tpe)
+ val defdef = DefDef(methodSym, Modifiers(methodSym.flags), originals, rhs)
+
+ (defdef, matchResTp)
}
- // `def isDefinedAt(x: $argTp): Boolean = ${`$selector match { $casesTrue ` updateAttachment DefaultOverrideMatchAttachment(FALSE_typed)}`
+ // `def isDefinedAt(x: $argTp): Boolean = ${`$selector match { $casesTrue; case default$ => false } }`
def isDefinedAtMethod = {
val methodSym = anonClass.newMethod(nme.isDefinedAt, tree.pos.makeTransparent, FINAL)
val paramSym = mkParam(methodSym)
@@ -2737,10 +2702,10 @@ trait Typers extends Modes with Adaptations with Tags {
methodBodyTyper.context.scope enter paramSym
methodSym setInfo MethodType(List(paramSym), BooleanClass.tpe)
- val match_ = methodBodyTyper.typedMatch(selector, casesTrue, mode, BooleanClass.tpe)
+ val defaultCase = mkDefaultCase(FALSE_typed)
+ val match_ = methodBodyTyper.typedMatch(selector, casesTrue :+ defaultCase, mode, BooleanClass.tpe)
- val matchWithDefault = match_ updateAttachment DefaultOverrideMatchAttachment(FALSE_typed)
- DefDef(methodSym, methodBodyTyper.virtualizedMatch(matchWithDefault, mode, BooleanClass.tpe))
+ DefDef(methodSym, methodBodyTyper.virtualizedMatch(match_, mode, BooleanClass.tpe))
}
// only used for @cps annotated partial functions
@@ -2785,7 +2750,9 @@ trait Typers extends Modes with Adaptations with Tags {
members foreach (m => anonClass.info.decls enter m.symbol)
val typedBlock = typedPos(tree.pos, mode, pt) {
- Block(ClassDef(anonClass, NoMods, ListOfNil, members, tree.pos.focus), atPos(tree.pos.focus)(New(anonClass.tpe)))
+ Block(ClassDef(anonClass, NoMods, ListOfNil, ListOfNil, members, tree.pos.focus), atPos(tree.pos.focus)(
+ Apply(Select(New(Ident(anonClass.name).setSymbol(anonClass)), nme.CONSTRUCTOR), List())
+ ))
}
if (typedBlock.isErrorTyped) typedBlock
@@ -4434,8 +4401,9 @@ trait Typers extends Modes with Adaptations with Tags {
if (typed(expr).tpe.typeSymbol != UnitClass)
unit.warning(tree.pos, "enclosing method " + name + " has result type Unit: return value discarded")
}
- treeCopy.Return(tree, checkDead(expr1)).setSymbol(enclMethod.owner)
- .setType(adaptTypeOfReturn(expr1, restpt.tpe, NothingClass.tpe))
+ val res = treeCopy.Return(tree, checkDead(expr1)).setSymbol(enclMethod.owner)
+ val tp = pluginsTypedReturn(NothingClass.tpe, this, res, restpt.tpe)
+ res.setType(tp)
}
}
}
@@ -5339,10 +5307,14 @@ trait Typers extends Modes with Adaptations with Tags {
typed(docdef.definition, mode, pt)
}
+ /**
+ * The typer with the correct context for a method definition. If the method is a default getter for
+ * a constructor default, the resulting typer has a constructor context (fixes SI-5543).
+ */
def defDefTyper(ddef: DefDef) = {
- val flag = ddef.mods.hasDefaultFlag && sym.owner.isModuleClass &&
+ val isConstrDefaultGetter = ddef.mods.hasDefaultFlag && sym.owner.isModuleClass &&
nme.defaultGetterToMethod(sym.name) == nme.CONSTRUCTOR
- newTyper(context.makeNewScope(ddef, sym)).constrTyperIf(flag)
+ newTyper(context.makeNewScope(ddef, sym)).constrTyperIf(isConstrDefaultGetter)
}
def typedAlternative(alt: Alternative) = {
@@ -5629,20 +5601,21 @@ trait Typers extends Modes with Adaptations with Tags {
lastTreeToTyper = tree
indentTyping()
- var alreadyTyped = false
+ val ptPlugins = pluginsPt(pt, this, tree, mode)
+
val startByType = if (Statistics.canEnable) Statistics.pushTimer(byTypeStack, byTypeNanos(tree.getClass)) else null
if (Statistics.canEnable) Statistics.incCounter(visitsByType, tree.getClass)
try {
if (context.retyping &&
- (tree.tpe ne null) && (tree.tpe.isErroneous || !(tree.tpe <:< pt))) {
+ (tree.tpe ne null) && (tree.tpe.isErroneous || !(tree.tpe <:< ptPlugins))) {
tree.tpe = null
if (tree.hasSymbol) tree.symbol = NoSymbol
}
- alreadyTyped = tree.tpe ne null
+ val alreadyTyped = tree.tpe ne null
var tree1: Tree = if (alreadyTyped) tree else {
printTyping(
- ptLine("typing %s: pt = %s".format(ptTree(tree), pt),
+ ptLine("typing %s: pt = %s".format(ptTree(tree), ptPlugins),
"undetparams" -> context.undetparams,
"implicitsEnabled" -> context.implicitsEnabled,
"enrichmentEnabled" -> context.enrichmentEnabled,
@@ -5651,7 +5624,7 @@ trait Typers extends Modes with Adaptations with Tags {
"context.owner" -> context.owner
)
)
- typed1(tree, mode, dropExistential(pt))
+ typed1(tree, mode, dropExistential(ptPlugins))
}
// Can happen during erroneous compilation - error(s) have been
// reported, but we need to avoid causing an NPE with this tree
@@ -5665,12 +5638,12 @@ trait Typers extends Modes with Adaptations with Tags {
)
}
- tree1.tpe = addAnnotations(tree1, tree1.tpe)
- val result = if (tree1.isEmpty) tree1 else adapt(tree1, mode, pt, tree)
+ tree1.tpe = pluginsTyped(tree1.tpe, this, tree1, mode, ptPlugins)
+ val result = if (tree1.isEmpty) tree1 else adapt(tree1, mode, ptPlugins, tree)
if (!alreadyTyped) {
printTyping("adapted %s: %s to %s, %s".format(
- tree1, tree1.tpe.widen, pt, context.undetparamsString)
+ tree1, tree1.tpe.widen, ptPlugins, context.undetparamsString)
) //DEBUG
}
if (!isPastTyper) signalDone(context.asInstanceOf[analyzer.Context], tree, result)
@@ -5685,7 +5658,7 @@ trait Typers extends Modes with Adaptations with Tags {
setError(tree)
case ex: Exception =>
if (settings.debug.value) // @M causes cyclic reference error
- Console.println("exception when typing "+tree+", pt = "+pt)
+ Console.println("exception when typing "+tree+", pt = "+ptPlugins)
if (context != null && context.unit.exists && tree != null)
logError("AT: " + (tree.pos).dbgString, ex)
throw ex
diff --git a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
index 5782d7bbca..b51dc0ccd5 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
@@ -79,8 +79,9 @@ trait Unapplies extends ast.TreeDSL
private def toIdent(x: DefTree) = Ident(x.name) setPos x.pos.focus
- private def classType(cdef: ClassDef, tparams: List[TypeDef], symbolic: Boolean = true): Tree = {
- val tycon = if (symbolic) REF(cdef.symbol) else Ident(cdef.name)
+ private def classType(cdef: ClassDef, tparams: List[TypeDef]): Tree = {
+ // SI-7033 Unattributed to avoid forcing `cdef.symbol.info`.
+ val tycon = Ident(cdef.symbol)
if (tparams.isEmpty) tycon else AppliedTypeTree(tycon, tparams map toIdent)
}
@@ -93,12 +94,33 @@ trait Unapplies extends ast.TreeDSL
* @param param The name of the parameter of the unapply method, assumed to be of type C[Ts]
* @param caseclazz The case class C[Ts]
*/
- private def caseClassUnapplyReturnValue(param: Name, caseclazz: Symbol) = {
- def caseFieldAccessorValue(selector: Symbol): Tree = Ident(param) DOT selector
+ private def caseClassUnapplyReturnValue(param: Name, caseclazz: ClassDef) = {
+ def caseFieldAccessorValue(selector: ValDef): Tree = {
+ val accessorName = selector.name
+ val privateLocalParamAccessor = caseclazz.impl.body.collectFirst {
+ case dd: ValOrDefDef if dd.name == accessorName && dd.mods.isPrivateLocal => dd.symbol
+ }
+ privateLocalParamAccessor match {
+ case None =>
+ // Selecting by name seems to be the most straight forward way here to
+ // avoid forcing the symbol of the case class in order to list the accessors.
+ val maybeRenamedAccessorName = caseAccessorName(caseclazz.symbol, accessorName)
+ Ident(param) DOT maybeRenamedAccessorName
+ case Some(sym) =>
+ // But, that gives a misleading error message in neg/t1422.scala, where a case
+ // class has an illegal private[this] parameter. We can detect this by checking
+ // the modifiers on the param accessors.
+ //
+ // We just generate a call to that param accessor here, which gives us an inaccessible
+ // symbol error, as before.
+ Ident(param) DOT sym
+ }
+ }
- caseclazz.caseFieldAccessors match {
- case Nil => TRUE
- case xs => SOME(xs map caseFieldAccessorValue: _*)
+ // Working with trees, rather than symbols, to avoid cycles like SI-5082
+ constrParamss(caseclazz).take(1).flatten match {
+ case Nil => TRUE
+ case xs => SOME(xs map caseFieldAccessorValue: _*)
}
}
@@ -112,7 +134,7 @@ trait Unapplies extends ast.TreeDSL
def createFun = gen.scalaFunctionConstr(primaries, toIdent(cdef), abstractFun = true)
def parents = if (inheritFromFun) List(createFun) else Nil
def toString = DefDef(
- Modifiers(OVERRIDE | FINAL),
+ Modifiers(OVERRIDE | FINAL | SYNTHETIC),
nme.toString_,
Nil,
ListOfNil,
@@ -126,17 +148,17 @@ trait Unapplies extends ast.TreeDSL
ModuleDef(
Modifiers(cdef.mods.flags & AccessFlags | SYNTHETIC, cdef.mods.privateWithin),
cdef.name.toTermName,
- Template(parents, emptyValDef, NoMods, Nil, body, cdef.impl.pos.focus))
+ Template(parents, emptyValDef, NoMods, Nil, ListOfNil, body, cdef.impl.pos.focus))
}
private val caseMods = Modifiers(SYNTHETIC | CASE)
/** The apply method corresponding to a case class
*/
- def factoryMeth(mods: Modifiers, name: TermName, cdef: ClassDef, symbolic: Boolean): DefDef = {
+ def factoryMeth(mods: Modifiers, name: TermName, cdef: ClassDef): DefDef = {
val tparams = cdef.tparams map copyUntypedInvariant
val cparamss = constrParamss(cdef)
- def classtpe = classType(cdef, tparams, symbolic)
+ def classtpe = classType(cdef, tparams)
atPos(cdef.pos.focus)(
DefDef(mods, name, tparams, cparamss, classtpe,
New(classtpe, mmap(cparamss)(gen.paramToArg)))
@@ -145,7 +167,7 @@ trait Unapplies extends ast.TreeDSL
/** The apply method corresponding to a case class
*/
- def caseModuleApplyMeth(cdef: ClassDef): DefDef = factoryMeth(caseMods, nme.apply, cdef, symbolic = true)
+ def caseModuleApplyMeth(cdef: ClassDef): DefDef = factoryMeth(caseMods, nme.apply, cdef)
/** The unapply method corresponding to a case class
*/
@@ -157,7 +179,7 @@ trait Unapplies extends ast.TreeDSL
}
val cparams = List(ValDef(Modifiers(PARAM | SYNTHETIC), unapplyParamName, classType(cdef, tparams), EmptyTree))
val ifNull = if (constrParamss(cdef).head.isEmpty) FALSE else REF(NoneModule)
- val body = nullSafe({ case Ident(x) => caseClassUnapplyReturnValue(x, cdef.symbol) }, ifNull)(Ident(unapplyParamName))
+ val body = nullSafe({ case Ident(x) => caseClassUnapplyReturnValue(x, cdef) }, ifNull)(Ident(unapplyParamName))
atPos(cdef.pos.focus)(
DefDef(caseMods, method, tparams, List(cparams), TypeTree(), body)
diff --git a/src/compiler/scala/tools/nsc/typechecker/Variances.scala b/src/compiler/scala/tools/nsc/typechecker/Variances.scala
index 7d97b0c782..ea436a71fb 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Variances.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Variances.scala
@@ -67,6 +67,8 @@ trait Variances {
def varianceInType(tp: Type)(tparam: Symbol): Int = tp match {
case ErrorType | WildcardType | NoType | NoPrefix | ThisType(_) | ConstantType(_) =>
VARIANCES
+ case BoundedWildcardType(bounds) =>
+ varianceInType(bounds)(tparam)
case SingleType(pre, sym) =>
varianceInType(pre)(tparam)
case TypeRef(pre, sym, args) =>
diff --git a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
index 0125f1b189..95135b84e0 100644
--- a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
+++ b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
@@ -230,6 +230,7 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
emptyValDef,
NoMods,
List(),
+ List(List()),
List(methdef),
NoPosition))
trace("wrapped: ")(showAttributed(moduledef, true, true, settings.Yshowsymkinds.value))
diff --git a/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala b/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala
index 15025f85e3..00c72cf423 100644
--- a/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala
+++ b/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala
@@ -9,6 +9,7 @@ import scala.tools.nsc.MissingRequirementError
abstract class CPSAnnotationChecker extends CPSUtils with Modes {
val global: Global
import global._
+ import analyzer.{AnalyzerPlugin, Typer}
import definitions._
//override val verbose = true
@@ -18,12 +19,12 @@ abstract class CPSAnnotationChecker extends CPSUtils with Modes {
* Checks whether @cps annotations conform
*/
object checker extends AnnotationChecker {
- private def addPlusMarker(tp: Type) = tp withAnnotation newPlusMarker()
- private def addMinusMarker(tp: Type) = tp withAnnotation newMinusMarker()
+ private[CPSAnnotationChecker] def addPlusMarker(tp: Type) = tp withAnnotation newPlusMarker()
+ private[CPSAnnotationChecker] def addMinusMarker(tp: Type) = tp withAnnotation newMinusMarker()
- private def cleanPlus(tp: Type) =
+ private[CPSAnnotationChecker] def cleanPlus(tp: Type) =
removeAttribs(tp, MarkerCPSAdaptPlus, MarkerCPSTypes)
- private def cleanPlusWith(tp: Type)(newAnnots: AnnotationInfo*) =
+ private[CPSAnnotationChecker] def cleanPlusWith(tp: Type)(newAnnots: AnnotationInfo*) =
cleanPlus(tp) withAnnotations newAnnots.toList
/** Check annotations to decide whether tpe1 <:< tpe2 */
@@ -116,8 +117,13 @@ abstract class CPSAnnotationChecker extends CPSUtils with Modes {
} else
bounds
}
+ }
+
+ object plugin extends AnalyzerPlugin {
+
+ import checker._
- override def canAdaptAnnotations(tree: Tree, mode: Int, pt: Type): Boolean = {
+ override def canAdaptAnnotations(tree: Tree, typer: Typer, mode: Int, pt: Type): Boolean = {
if (!cpsEnabled) return false
vprintln("can adapt annotations? " + tree + " / " + tree.tpe + " / " + Integer.toHexString(mode) + " / " + pt)
@@ -183,7 +189,7 @@ abstract class CPSAnnotationChecker extends CPSUtils with Modes {
} else false
}
- override def adaptAnnotations(tree: Tree, mode: Int, pt: Type): Tree = {
+ override def adaptAnnotations(tree: Tree, typer: Typer, mode: Int, pt: Type): Tree = {
if (!cpsEnabled) return tree
vprintln("adapt annotations " + tree + " / " + tree.tpe + " / " + modeString(mode) + " / " + pt)
@@ -239,14 +245,15 @@ abstract class CPSAnnotationChecker extends CPSUtils with Modes {
* is in tail position. Therefore, we are making sure that only the types of return expressions
* are adapted which will either be removed, or lead to an error.
*/
- override def adaptTypeOfReturn(tree: Tree, pt: Type, default: => Type): Type = {
+ override def pluginsTypedReturn(default: Type, typer: Typer, tree: Return, pt: Type): Type = {
+ val expr = tree.expr
// only adapt if method's result type (pt) is cps type
val annots = cpsParamAnnotation(pt)
if (annots.nonEmpty) {
- // return type of `tree` without plus marker, but only if it doesn't have other cps annots
- if (hasPlusMarker(tree.tpe) && !hasCpsParamTypes(tree.tpe))
- tree.setType(removeAttribs(tree.tpe, MarkerCPSAdaptPlus))
- tree.tpe
+ // return type of `expr` without plus marker, but only if it doesn't have other cps annots
+ if (hasPlusMarker(expr.tpe) && !hasCpsParamTypes(expr.tpe))
+ expr.setType(removeAttribs(expr.tpe, MarkerCPSAdaptPlus))
+ expr.tpe
} else default
}
@@ -393,7 +400,7 @@ abstract class CPSAnnotationChecker extends CPSUtils with Modes {
/** Modify the type that has thus far been inferred
* for a tree. All this should do is add annotations. */
- override def addAnnotations(tree: Tree, tpe: Type): Type = {
+ override def pluginsTyped(tpe: Type, typer: Typer, tree: Tree, mode: Int, pt: Type): Type = {
import scala.util.control._
if (!cpsEnabled) {
if (Exception.failAsValue(classOf[MissingRequirementError])(false)(hasCpsParamTypes(tpe)))
diff --git a/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSPlugin.scala b/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSPlugin.scala
index 8a500d6c4d..237159795a 100644
--- a/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSPlugin.scala
+++ b/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSPlugin.scala
@@ -33,6 +33,7 @@ class SelectiveCPSPlugin(val global: Global) extends Plugin {
val global: SelectiveCPSPlugin.this.global.type = SelectiveCPSPlugin.this.global
}
global.addAnnotationChecker(checker.checker)
+ global.analyzer.addAnalyzerPlugin(checker.plugin)
global.log("instantiated cps plugin: " + this)
diff --git a/src/library/scala/annotation/migration.scala b/src/library/scala/annotation/migration.scala
index 49fea9434c..adb6de6afd 100644
--- a/src/library/scala/annotation/migration.scala
+++ b/src/library/scala/annotation/migration.scala
@@ -17,7 +17,8 @@ package scala.annotation
* order between Scala 2.7 and 2.8.
*
* @param message A message describing the change, which is emitted
- * by the compiler if the flag `-Xmigration` is set.
+ * by the compiler if the flag `-Xmigration` indicates a version
+ * prior to the changedIn version.
*
* @param changedIn The version, in which the behaviour change was
* introduced.
diff --git a/src/library/scala/collection/IndexedSeq.scala b/src/library/scala/collection/IndexedSeq.scala
index 63e5adf428..2de0043c96 100644
--- a/src/library/scala/collection/IndexedSeq.scala
+++ b/src/library/scala/collection/IndexedSeq.scala
@@ -28,10 +28,10 @@ trait IndexedSeq[+A] extends Seq[A]
* @define coll indexed sequence
* @define Coll `IndexedSeq`
*/
-object IndexedSeq extends IndexedSeqFactory[IndexedSeq] {
+object IndexedSeq extends SeqFactory[IndexedSeq] {
// A single CBF which can be checked against to identify
// an indexed collection type.
- override val ReusableCBF: GenericCanBuildFrom[Nothing] = new GenericCanBuildFrom[Nothing] {
+ override lazy val ReusableCBF: GenericCanBuildFrom[Nothing] = new GenericCanBuildFrom[Nothing] {
override def apply() = newBuilder[Nothing]
}
def newBuilder[A]: Builder[A, IndexedSeq[A]] = immutable.IndexedSeq.newBuilder[A]
diff --git a/src/library/scala/collection/generic/GenTraversableFactory.scala b/src/library/scala/collection/generic/GenTraversableFactory.scala
index a43862abaf..2d3f7e609b 100644
--- a/src/library/scala/collection/generic/GenTraversableFactory.scala
+++ b/src/library/scala/collection/generic/GenTraversableFactory.scala
@@ -38,10 +38,12 @@ import scala.language.higherKinds
abstract class GenTraversableFactory[CC[X] <: GenTraversable[X] with GenericTraversableTemplate[X, CC]]
extends GenericCompanion[CC] {
- private[this] val ReusableCBFInstance: GenericCanBuildFrom[Nothing] = new GenericCanBuildFrom[Nothing] {
+ // A default implementation of GenericCanBuildFrom which can be cast
+ // to whatever is desired.
+ private class ReusableCBF extends GenericCanBuildFrom[Nothing] {
override def apply() = newBuilder[Nothing]
}
- def ReusableCBF: GenericCanBuildFrom[Nothing] = ReusableCBFInstance
+ lazy val ReusableCBF: GenericCanBuildFrom[Nothing] = new ReusableCBF
/** A generic implementation of the `CanBuildFrom` trait, which forwards
* all calls to `apply(from)` to the `genericBuilder` method of
diff --git a/src/library/scala/collection/generic/IndexedSeqFactory.scala b/src/library/scala/collection/generic/IndexedSeqFactory.scala
deleted file mode 100644
index 200d033c2d..0000000000
--- a/src/library/scala/collection/generic/IndexedSeqFactory.scala
+++ /dev/null
@@ -1,21 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.collection
-package generic
-
-import language.higherKinds
-
-/** A template for companion objects of IndexedSeq and subclasses thereof.
- *
- * @since 2.10
- */
-abstract class IndexedSeqFactory[CC[X] <: IndexedSeq[X] with GenericTraversableTemplate[X, CC]] extends SeqFactory[CC] {
- override def ReusableCBF: GenericCanBuildFrom[Nothing] =
- scala.collection.IndexedSeq.ReusableCBF.asInstanceOf[GenericCanBuildFrom[Nothing]]
-}
diff --git a/src/library/scala/collection/immutable/IndexedSeq.scala b/src/library/scala/collection/immutable/IndexedSeq.scala
index bf4ba3a381..96414c07ef 100644
--- a/src/library/scala/collection/immutable/IndexedSeq.scala
+++ b/src/library/scala/collection/immutable/IndexedSeq.scala
@@ -31,7 +31,9 @@ trait IndexedSeq[+A] extends Seq[A]
* @define coll indexed sequence
* @define Coll `IndexedSeq`
*/
-object IndexedSeq extends IndexedSeqFactory[IndexedSeq] {
+object IndexedSeq extends SeqFactory[IndexedSeq] {
+ override lazy val ReusableCBF =
+ scala.collection.IndexedSeq.ReusableCBF.asInstanceOf[GenericCanBuildFrom[Nothing]]
class Impl[A](buf: ArrayBuffer[A]) extends AbstractSeq[A] with IndexedSeq[A] with Serializable {
def length = buf.length
def apply(idx: Int) = buf.apply(idx)
diff --git a/src/library/scala/collection/immutable/List.scala b/src/library/scala/collection/immutable/List.scala
index 56e386ad67..9765e7c52f 100644
--- a/src/library/scala/collection/immutable/List.scala
+++ b/src/library/scala/collection/immutable/List.scala
@@ -55,6 +55,12 @@ import java.io._
* val shorter = mainList.tail // costs nothing as it uses the same 2::1::Nil instances as mainList
* }}}
*
+ * @note The functional list is characterized by persistence and structural sharing, thus offering considerable
+ * performance and space consumption benefits in some scenarios if used correctly.
+ * However, note that objects having multiple references into the same functional list (that is,
+ * objects that rely on structural sharing), will be serialized and deserialized with multiple lists, one for
+ * each reference to it. I.e. structural sharing is lost after serialization/deserialization.
+ *
* @author Martin Odersky and others
* @version 2.8
* @since 1.0
@@ -295,6 +301,9 @@ sealed abstract class List[+A] extends AbstractSeq[A]
}
result
}
+
+ override def foldRight[B](z: B)(op: (A, B) => B): B =
+ reverse.foldLeft(z)((right, left) => op(left, right))
override def stringPrefix = "List"
@@ -349,25 +358,8 @@ final case class ::[B](private var hd: B, private[scala] var tl: List[B]) extend
override def tail : List[B] = tl
override def isEmpty: Boolean = false
- private def writeObject(out: ObjectOutputStream) {
- out.writeObject(ListSerializeStart) // needed to differentiate with the legacy `::` serialization
- out.writeObject(this.hd)
- out.writeObject(this.tl)
- }
-
private def readObject(in: ObjectInputStream) {
- val obj = in.readObject()
- if (obj == ListSerializeStart) {
- this.hd = in.readObject().asInstanceOf[B]
- this.tl = in.readObject().asInstanceOf[List[B]]
- } else oldReadObject(in, obj)
- }
-
- /* The oldReadObject method exists here for compatibility reasons.
- * :: objects used to be serialized by serializing all the elements to
- * the output stream directly, but this was broken (see SI-5374).
- */
- private def oldReadObject(in: ObjectInputStream, firstObject: AnyRef) {
+ val firstObject = in.readObject()
hd = firstObject.asInstanceOf[B]
assert(hd != ListSerializeEnd)
var current: ::[B] = this
@@ -375,14 +367,14 @@ final case class ::[B](private var hd: B, private[scala] var tl: List[B]) extend
case ListSerializeEnd =>
current.tl = Nil
return
- case a : Any =>
+ case a =>
val list : ::[B] = new ::(a.asInstanceOf[B], Nil)
current.tl = list
current = list
}
}
- private def oldWriteObject(out: ObjectOutputStream) {
+ private def writeObject(out: ObjectOutputStream) {
var xs: List[B] = this
while (!xs.isEmpty) { out.writeObject(xs.head); xs = xs.tail }
out.writeObject(ListSerializeEnd)
@@ -651,10 +643,6 @@ object List extends SeqFactory[List] {
}
/** Only used for list serialization */
-@SerialVersionUID(0L - 8287891243975527522L)
-private[scala] case object ListSerializeStart
-
-/** Only used for list serialization */
@SerialVersionUID(0L - 8476791151975527571L)
private[scala] case object ListSerializeEnd
diff --git a/src/library/scala/collection/immutable/Stream.scala b/src/library/scala/collection/immutable/Stream.scala
index 1c461973e4..5bb4ef5f21 100644
--- a/src/library/scala/collection/immutable/Stream.scala
+++ b/src/library/scala/collection/immutable/Stream.scala
@@ -841,9 +841,16 @@ self =>
* // produces: "1, 2, 3, 4, 5, 6"
* }}}
*/
- override def distinct: Stream[A] =
- if (isEmpty) this
- else cons(head, tail.filter(head != _).distinct)
+ override def distinct: Stream[A] = {
+ // This should use max memory proportional to N, whereas
+ // recursively calling distinct on the tail is N^2.
+ def loop(seen: Set[A], rest: Stream[A]): Stream[A] = {
+ if (rest.isEmpty) rest
+ else if (seen(rest.head)) loop(seen, rest.tail)
+ else cons(rest.head, loop(seen + rest.head, rest.tail))
+ }
+ loop(Set(), this)
+ }
/** Returns a new sequence of given length containing the elements of this
* sequence followed by zero or more occurrences of given elements.
diff --git a/src/library/scala/collection/immutable/Vector.scala b/src/library/scala/collection/immutable/Vector.scala
index f083e80175..bcce4a99bd 100644
--- a/src/library/scala/collection/immutable/Vector.scala
+++ b/src/library/scala/collection/immutable/Vector.scala
@@ -18,7 +18,16 @@ import scala.collection.parallel.immutable.ParVector
/** Companion object to the Vector class
*/
-object Vector extends IndexedSeqFactory[Vector] {
+object Vector extends SeqFactory[Vector] {
+ // left lying around for binary compatibility check
+ private[collection] class VectorReusableCBF extends GenericCanBuildFrom[Nothing] {
+ override def apply() = newBuilder[Nothing]
+ }
+ // left lying around for binary compatibility check
+ private val VectorReusableCBF: GenericCanBuildFrom[Nothing] = new VectorReusableCBF
+
+ override lazy val ReusableCBF =
+ scala.collection.IndexedSeq.ReusableCBF.asInstanceOf[GenericCanBuildFrom[Nothing]]
def newBuilder[A]: Builder[A, Vector[A]] = new VectorBuilder[A]
implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Vector[A]] =
ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
diff --git a/src/library/scala/collection/mutable/ArrayOps.scala b/src/library/scala/collection/mutable/ArrayOps.scala
index 6b778b26f5..bb938a7aeb 100644
--- a/src/library/scala/collection/mutable/ArrayOps.scala
+++ b/src/library/scala/collection/mutable/ArrayOps.scala
@@ -52,20 +52,6 @@ trait ArrayOps[T] extends Any with ArrayLike[T, Array[T]] with CustomParalleliza
super.toArray[U]
}
- def :+[B >: T: scala.reflect.ClassTag](elem: B): Array[B] = {
- val result = Array.ofDim[B](repr.length + 1)
- Array.copy(repr, 0, result, 0, repr.length)
- result(repr.length) = elem
- result
- }
-
- def +:[B >: T: scala.reflect.ClassTag](elem: B): Array[B] = {
- val result = Array.ofDim[B](repr.length + 1)
- result(0) = elem
- Array.copy(repr, 0, result, 1, repr.length)
- result
- }
-
override def par = ParArray.handoff(repr)
/** Flattens a two-dimensional array by concatenating all its rows
diff --git a/src/library/scala/collection/mutable/ListMap.scala b/src/library/scala/collection/mutable/ListMap.scala
index 212ee917c5..7f05deffc8 100644
--- a/src/library/scala/collection/mutable/ListMap.scala
+++ b/src/library/scala/collection/mutable/ListMap.scala
@@ -12,6 +12,7 @@ package scala.collection
package mutable
import generic._
+import annotation.tailrec
/** A simple mutable map backed by a list.
*
@@ -47,13 +48,17 @@ extends AbstractMap[A, B]
def get(key: A): Option[B] = elems find (_._1 == key) map (_._2)
def iterator: Iterator[(A, B)] = elems.iterator
- def += (kv: (A, B)) = { elems = remove(kv._1, elems); elems = kv :: elems; siz += 1; this }
- def -= (key: A) = { elems = remove(key, elems); this }
- private def remove(key: A, elems: List[(A, B)]): List[(A, B)] =
- if (elems.isEmpty) elems
- else if (elems.head._1 == key) { siz -= 1; elems.tail }
- else elems.head :: remove(key, elems.tail)
+ def += (kv: (A, B)) = { elems = remove(kv._1, elems, List()); elems = kv :: elems; siz += 1; this }
+ def -= (key: A) = { elems = remove(key, elems, List()); this }
+
+ @tailrec
+ private def remove(key: A, elems: List[(A, B)], acc: List[(A, B)]): List[(A, B)] = {
+ if (elems.isEmpty) acc
+ else if (elems.head._1 == key) { siz -= 1; acc ::: elems.tail }
+ else remove(key, elems.tail, elems.head :: acc)
+ }
+
override def clear() = { elems = List(); siz = 0 }
override def size: Int = siz
diff --git a/src/library/scala/concurrent/BatchingExecutor.scala b/src/library/scala/concurrent/BatchingExecutor.scala
new file mode 100644
index 0000000000..a0d7aaea47
--- /dev/null
+++ b/src/library/scala/concurrent/BatchingExecutor.scala
@@ -0,0 +1,117 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.concurrent
+
+import java.util.concurrent.Executor
+import scala.annotation.tailrec
+
+/**
+ * Mixin trait for an Executor
+ * which groups multiple nested `Runnable.run()` calls
+ * into a single Runnable passed to the original
+ * Executor. This can be a useful optimization
+ * because it bypasses the original context's task
+ * queue and keeps related (nested) code on a single
+ * thread which may improve CPU affinity. However,
+ * if tasks passed to the Executor are blocking
+ * or expensive, this optimization can prevent work-stealing
+ * and make performance worse. Also, some ExecutionContext
+ * may be fast enough natively that this optimization just
+ * adds overhead.
+ * The default ExecutionContext.global is already batching
+ * or fast enough not to benefit from it; while
+ * `fromExecutor` and `fromExecutorService` do NOT add
+ * this optimization since they don't know whether the underlying
+ * executor will benefit from it.
+ * A batching executor can create deadlocks if code does
+ * not use `scala.concurrent.blocking` when it should,
+ * because tasks created within other tasks will block
+ * on the outer task completing.
+ * This executor may run tasks in any order, including LIFO order.
+ * There are no ordering guarantees.
+ *
+ * WARNING: The underlying Executor's execute-method must not execute the submitted Runnable
+ * in the calling thread synchronously. It must enqueue/handoff the Runnable.
+ */
+private[concurrent] trait BatchingExecutor extends Executor {
+
+ // invariant: if "_tasksLocal.get ne null" then we are inside BatchingRunnable.run; if it is null, we are outside
+ private val _tasksLocal = new ThreadLocal[List[Runnable]]()
+
+ private class Batch(val initial: List[Runnable]) extends Runnable with BlockContext {
+ private var parentBlockContext: BlockContext = _
+ // this method runs in the delegate ExecutionContext's thread
+ override def run(): Unit = {
+ require(_tasksLocal.get eq null)
+
+ val prevBlockContext = BlockContext.current
+ BlockContext.withBlockContext(this) {
+ try {
+ parentBlockContext = prevBlockContext
+
+ @tailrec def processBatch(batch: List[Runnable]): Unit = batch match {
+ case Nil => ()
+ case head :: tail =>
+ _tasksLocal set tail
+ try {
+ head.run()
+ } catch {
+ case t: Throwable =>
+ // if one task throws, move the
+ // remaining tasks to another thread
+ // so we can throw the exception
+ // up to the invoking executor
+ val remaining = _tasksLocal.get
+ _tasksLocal set Nil
+ unbatchedExecute(new Batch(remaining)) //TODO what if this submission fails?
+ throw t // rethrow
+ }
+ processBatch(_tasksLocal.get) // since head.run() can add entries, always do _tasksLocal.get here
+ }
+
+ processBatch(initial)
+ } finally {
+ _tasksLocal.remove()
+ parentBlockContext = null
+ }
+ }
+ }
+
+ override def blockOn[T](thunk: => T)(implicit permission: CanAwait): T = {
+ // if we know there will be blocking, we don't want to keep tasks queued up because it could deadlock.
+ {
+ val tasks = _tasksLocal.get
+ _tasksLocal set Nil
+ if ((tasks ne null) && tasks.nonEmpty)
+ unbatchedExecute(new Batch(tasks))
+ }
+
+ // now delegate the blocking to the previous BC
+ require(parentBlockContext ne null)
+ parentBlockContext.blockOn(thunk)
+ }
+ }
+
+ protected def unbatchedExecute(r: Runnable): Unit
+
+ override def execute(runnable: Runnable): Unit = {
+ if (batchable(runnable)) { // If we can batch the runnable
+ _tasksLocal.get match {
+ case null => unbatchedExecute(new Batch(List(runnable))) // If we aren't in batching mode yet, enqueue batch
+ case some => _tasksLocal.set(runnable :: some) // If we are already in batching mode, add to batch
+ }
+ } else unbatchedExecute(runnable) // If not batchable, just delegate to underlying
+ }
+
+ /** Override this to define which runnables will be batched. */
+ def batchable(runnable: Runnable): Boolean = runnable match {
+ case _: OnCompleteRunnable => true
+ case _ => false
+ }
+}
diff --git a/src/library/scala/concurrent/Future.scala b/src/library/scala/concurrent/Future.scala
index 4b9e74708d..36f3be341f 100644
--- a/src/library/scala/concurrent/Future.scala
+++ b/src/library/scala/concurrent/Future.scala
@@ -675,9 +675,9 @@ object Future {
// by just not ever using it itself. scala.concurrent
// doesn't need to create defaultExecutionContext as
// a side effect.
- private[concurrent] object InternalCallbackExecutor extends ExecutionContext {
- override def execute(runnable: Runnable): Unit =
- runnable.run()
+ private[concurrent] object InternalCallbackExecutor extends ExecutionContext with BatchingExecutor {
+ override protected def unbatchedExecute(r: Runnable): Unit =
+ r.run()
override def reportFailure(t: Throwable): Unit =
throw new IllegalStateException("problem in scala.concurrent internal callback", t)
}
diff --git a/src/library/scala/concurrent/impl/ExecutionContextImpl.scala b/src/library/scala/concurrent/impl/ExecutionContextImpl.scala
index 215f90b17e..77625e381c 100644
--- a/src/library/scala/concurrent/impl/ExecutionContextImpl.scala
+++ b/src/library/scala/concurrent/impl/ExecutionContextImpl.scala
@@ -25,11 +25,15 @@ private[scala] class ExecutionContextImpl private[impl] (es: Executor, reporter:
case some => some
}
+ private val uncaughtExceptionHandler: Thread.UncaughtExceptionHandler = new Thread.UncaughtExceptionHandler {
+ def uncaughtException(thread: Thread, cause: Throwable): Unit = reporter(cause)
+ }
+
// Implement BlockContext on FJP threads
class DefaultThreadFactory(daemonic: Boolean) extends ThreadFactory with ForkJoinPool.ForkJoinWorkerThreadFactory {
def wire[T <: Thread](thread: T): T = {
thread.setDaemon(daemonic)
- //Potentially set things like uncaught exception handler, name etc
+ thread.setUncaughtExceptionHandler(uncaughtExceptionHandler)
thread
}
@@ -73,7 +77,7 @@ private[scala] class ExecutionContextImpl private[impl] (es: Executor, reporter:
new ForkJoinPool(
desiredParallelism,
threadFactory,
- null, //FIXME we should have an UncaughtExceptionHandler, see what Akka does
+ uncaughtExceptionHandler,
true) // Async all the way baby
} catch {
case NonFatal(t) =>
@@ -94,13 +98,13 @@ private[scala] class ExecutionContextImpl private[impl] (es: Executor, reporter:
def execute(runnable: Runnable): Unit = executor match {
case fj: ForkJoinPool =>
+ val fjt = runnable match {
+ case t: ForkJoinTask[_] => t
+ case r => new ExecutionContextImpl.AdaptedForkJoinTask(r)
+ }
Thread.currentThread match {
- case fjw: ForkJoinWorkerThread if fjw.getPool eq fj =>
- (runnable match {
- case fjt: ForkJoinTask[_] => fjt
- case _ => ForkJoinTask.adapt(runnable)
- }).fork
- case _ => fj.execute(runnable)
+ case fjw: ForkJoinWorkerThread if fjw.getPool eq fj => fjt.fork()
+ case _ => fj execute fjt
}
case generic => generic execute runnable
}
@@ -111,6 +115,20 @@ private[scala] class ExecutionContextImpl private[impl] (es: Executor, reporter:
private[concurrent] object ExecutionContextImpl {
+ final class AdaptedForkJoinTask(runnable: Runnable) extends ForkJoinTask[Unit] {
+ final override def setRawResult(u: Unit): Unit = ()
+ final override def getRawResult(): Unit = ()
+ final override def exec(): Boolean = try { runnable.run(); true } catch {
+ case anything: Throwable ⇒
+ val t = Thread.currentThread
+ t.getUncaughtExceptionHandler match {
+ case null ⇒
+ case some ⇒ some.uncaughtException(t, anything)
+ }
+ throw anything
+ }
+ }
+
def fromExecutor(e: Executor, reporter: Throwable => Unit = ExecutionContext.defaultReporter): ExecutionContextImpl = new ExecutionContextImpl(e, reporter)
def fromExecutorService(es: ExecutorService, reporter: Throwable => Unit = ExecutionContext.defaultReporter): ExecutionContextImpl with ExecutionContextExecutorService =
new ExecutionContextImpl(es, reporter) with ExecutionContextExecutorService {
diff --git a/src/library/scala/concurrent/impl/Promise.scala b/src/library/scala/concurrent/impl/Promise.scala
index e9da45a079..52f1075137 100644
--- a/src/library/scala/concurrent/impl/Promise.scala
+++ b/src/library/scala/concurrent/impl/Promise.scala
@@ -34,7 +34,7 @@ private class CallbackRunnable[T](val executor: ExecutionContext, val onComplete
value = v
// Note that we cannot prepare the ExecutionContext at this point, since we might
// already be running on a different thread!
- executor.execute(this)
+ try executor.execute(this) catch { case NonFatal(t) => executor reportFailure t }
}
}
diff --git a/src/library/scala/package.scala b/src/library/scala/package.scala
index d3f8df9110..84f6f0be9c 100644
--- a/src/library/scala/package.scala
+++ b/src/library/scala/package.scala
@@ -95,10 +95,7 @@ package object scala {
val Equiv = scala.math.Equiv
type Fractional[T] = scala.math.Fractional[T]
- val Fractional = scala.math.Fractional
-
type Integral[T] = scala.math.Integral[T]
- val Integral = scala.math.Integral
type Numeric[T] = scala.math.Numeric[T]
val Numeric = scala.math.Numeric
diff --git a/src/partest/scala/tools/partest/ASMConverters.scala b/src/partest/scala/tools/partest/ASMConverters.scala
new file mode 100644
index 0000000000..d618e086f4
--- /dev/null
+++ b/src/partest/scala/tools/partest/ASMConverters.scala
@@ -0,0 +1,71 @@
+package scala.tools.partest
+
+import scala.collection.JavaConverters._
+import scala.tools.asm
+import asm.tree.{ClassNode, MethodNode, InsnList}
+
+/** Makes using ASM from ByteCodeTests more convenient.
+ *
+ * Wraps ASM instructions in case classes so that equals and toString work
+ * for the purpose of bytecode diffing and pretty printing.
+ */
+trait ASMConverters {
+ // wrap ASM's instructions so we get case class-style `equals` and `toString`
+ object instructions {
+ def fromMethod(meth: MethodNode): List[Instruction] = {
+ val insns = meth.instructions
+ val asmToScala = new AsmToScala{ def labelIndex(l: asm.tree.AbstractInsnNode) = insns.indexOf(l) }
+
+ asmToScala.mapOver(insns.iterator.asScala.toList).asInstanceOf[List[Instruction]]
+ }
+
+ sealed abstract class Instruction { def opcode: String }
+ case class Field (opcode: String, desc: String, name: String, owner: String) extends Instruction
+ case class Incr (opcode: String, incr: Int, `var`: Int) extends Instruction
+ case class Op (opcode: String) extends Instruction
+ case class IntOp (opcode: String, operand: Int) extends Instruction
+ case class Jump (opcode: String, label: Label) extends Instruction
+ case class Ldc (opcode: String, cst: Any) extends Instruction
+ case class LookupSwitch (opcode: String, dflt: Label, keys: List[Integer], labels: List[Label]) extends Instruction
+ case class TableSwitch (opcode: String, dflt: Label, max: Int, min: Int, labels: List[Label]) extends Instruction
+ case class Method (opcode: String, desc: String, name: String, owner: String) extends Instruction
+ case class NewArray (opcode: String, desc: String, dims: Int) extends Instruction
+ case class TypeOp (opcode: String, desc: String) extends Instruction
+ case class VarOp (opcode: String, `var`: Int) extends Instruction
+ case class Label (offset: Int) extends Instruction { def opcode: String = "" }
+ case class FrameEntry (local: List[Any], stack: List[Any]) extends Instruction { def opcode: String = "" }
+ case class LineNumber (line: Int, start: Label) extends Instruction { def opcode: String = "" }
+ }
+
+ abstract class AsmToScala {
+ import instructions._
+
+ def labelIndex(l: asm.tree.AbstractInsnNode): Int
+
+ def mapOver(is: List[Any]): List[Any] = is map {
+ case i: asm.tree.AbstractInsnNode => apply(i)
+ case x => x
+ }
+
+ def op(i: asm.tree.AbstractInsnNode) = if (asm.util.Printer.OPCODES.isDefinedAt(i.getOpcode)) asm.util.Printer.OPCODES(i.getOpcode) else "?"
+ def lst[T](xs: java.util.List[T]): List[T] = if (xs == null) Nil else xs.asScala.toList
+ def apply(l: asm.tree.LabelNode): Label = this(l: asm.tree.AbstractInsnNode).asInstanceOf[Label]
+ def apply(x: asm.tree.AbstractInsnNode): Instruction = x match {
+ case i: asm.tree.FieldInsnNode => Field (op(i), i.desc: String, i.name: String, i.owner: String)
+ case i: asm.tree.IincInsnNode => Incr (op(i), i.incr: Int, i.`var`: Int)
+ case i: asm.tree.InsnNode => Op (op(i))
+ case i: asm.tree.IntInsnNode => IntOp (op(i), i.operand: Int)
+ case i: asm.tree.JumpInsnNode => Jump (op(i), this(i.label))
+ case i: asm.tree.LdcInsnNode => Ldc (op(i), i.cst: Any)
+ case i: asm.tree.LookupSwitchInsnNode => LookupSwitch (op(i), this(i.dflt), lst(i.keys), mapOver(lst(i.labels)).asInstanceOf[List[Label]])
+ case i: asm.tree.TableSwitchInsnNode => TableSwitch (op(i), this(i.dflt), i.max: Int, i.min: Int, mapOver(lst(i.labels)).asInstanceOf[List[Label]])
+ case i: asm.tree.MethodInsnNode => Method (op(i), i.desc: String, i.name: String, i.owner: String)
+ case i: asm.tree.MultiANewArrayInsnNode => NewArray (op(i), i.desc: String, i.dims: Int)
+ case i: asm.tree.TypeInsnNode => TypeOp (op(i), i.desc: String)
+ case i: asm.tree.VarInsnNode => VarOp (op(i), i.`var`: Int)
+ case i: asm.tree.LabelNode => Label (labelIndex(x))
+ case i: asm.tree.FrameNode => FrameEntry (mapOver(lst(i.local)), mapOver(lst(i.stack)))
+ case i: asm.tree.LineNumberNode => LineNumber (i.line: Int, this(i.start): Label)
+ }
+ }
+} \ No newline at end of file
diff --git a/src/partest/scala/tools/partest/BytecodeTest.scala b/src/partest/scala/tools/partest/BytecodeTest.scala
new file mode 100644
index 0000000000..41329a8264
--- /dev/null
+++ b/src/partest/scala/tools/partest/BytecodeTest.scala
@@ -0,0 +1,102 @@
+package scala.tools.partest
+
+import scala.tools.nsc.util.JavaClassPath
+import scala.collection.JavaConverters._
+import scala.tools.asm
+import asm.ClassReader
+import asm.tree.{ClassNode, MethodNode, InsnList}
+import java.io.InputStream
+
+/**
+ * Provides utilities for inspecting bytecode using ASM library.
+ *
+ * HOW TO USE
+ * 1. Create subdirectory in test/files/jvm for your test. Let's name it $TESTDIR.
+ * 2. Create $TESTDIR/BytecodeSrc_1.scala that contains Scala source file that you
+ * want to inspect the bytecode for. The '_1' suffix signals to partest that it
+ * should compile this file first.
+ * 3. Create $TESTDIR/Test.scala:
+ * import scala.tools.partest.BytecodeTest
+ * object Test extends BytecodeTest {
+ * def show {
+ * // your code that inspect ASM trees and prints values
+ * }
+ * }
+ * 4. Create corresponding check file.
+ *
+ * EXAMPLE
+ * See test/files/jvm/bytecode-test-example for an example of bytecode test.
+ *
+ */
+abstract class BytecodeTest extends ASMConverters {
+
+ /** produce the output to be compared against a checkfile */
+ protected def show(): Unit
+
+ def main(args: Array[String]): Unit = show
+
+// asserts
+ def sameBytecode(methA: MethodNode, methB: MethodNode) = {
+ val isa = instructions.fromMethod(methA)
+ val isb = instructions.fromMethod(methB)
+ if (isa == isb) println("bytecode identical")
+ else diffInstructions(isa, isb)
+ }
+
+ import instructions._
+ // bytecode is equal modulo local variable numbering
+ def equalsModuloVar(a: Instruction, b: Instruction) = (a, b) match {
+ case _ if a == b => true
+ case (VarOp(op1, _), VarOp(op2, _)) if op1 == op2 => true
+ case _ => false
+ }
+
+ def similarBytecode(methA: MethodNode, methB: MethodNode, similar: (Instruction, Instruction) => Boolean) = {
+ val isa = fromMethod(methA)
+ val isb = fromMethod(methB)
+ if (isa == isb) println("bytecode identical")
+ else if ((isa, isb).zipped.forall { case (a, b) => similar(a, b) }) println("bytecode similar")
+ else diffInstructions(isa, isb)
+ }
+
+ def diffInstructions(isa: List[Instruction], isb: List[Instruction]) = {
+ val len = Math.max(isa.length, isb.length)
+ if (len > 0 ) {
+ val width = isa.map(_.toString.length).max
+ val lineWidth = len.toString.length
+ (1 to len) foreach { line =>
+ val isaPadded = isa.map(_.toString) orElse Stream.continually("")
+ val isbPadded = isb.map(_.toString) orElse Stream.continually("")
+ val a = isaPadded(line-1)
+ val b = isbPadded(line-1)
+
+ println(s"""$line${" " * (lineWidth-line.toString.length)} ${if (a==b) "==" else "<>"} $a${" " * (width-a.length)} | $b""")
+ }
+ }
+ }
+
+// loading
+ protected def getMethod(classNode: ClassNode, name: String): MethodNode =
+ classNode.methods.asScala.find(_.name == name) getOrElse
+ sys.error(s"Didn't find method '$name' in class '${classNode.name}'")
+
+ protected def loadClassNode(name: String, skipDebugInfo: Boolean = true): ClassNode = {
+ val classBytes: InputStream = (for {
+ classRep <- classpath.findClass(name)
+ binary <- classRep.binary
+ } yield binary.input) getOrElse sys.error(s"failed to load class '$name'; classpath = $classpath")
+
+ val cr = new ClassReader(classBytes)
+ val cn = new ClassNode()
+ cr.accept(cn, if (skipDebugInfo) ClassReader.SKIP_DEBUG else 0)
+ cn
+ }
+
+ protected lazy val classpath: JavaClassPath = {
+ import scala.tools.nsc.util.ClassPath.DefaultJavaContext
+ import scala.tools.util.PathResolver.Defaults
+ // logic inspired by scala.tools.util.PathResolver implementation
+ val containers = DefaultJavaContext.classesInExpandedPath(Defaults.javaUserClassPath)
+ new JavaClassPath(containers, DefaultJavaContext)
+ }
+}
diff --git a/src/reflect/scala/reflect/api/BuildUtils.scala b/src/reflect/scala/reflect/api/BuildUtils.scala
index 8f256aa1f5..0c8e81a220 100644
--- a/src/reflect/scala/reflect/api/BuildUtils.scala
+++ b/src/reflect/scala/reflect/api/BuildUtils.scala
@@ -59,6 +59,8 @@ private[reflect] trait BuildUtils { self: Universe =>
def flagsFromBits(bits: Long): FlagSet
+ def emptyValDef: ValDef
+
def This(sym: Symbol): Tree
def Select(qualifier: Tree, sym: Symbol): Select
diff --git a/src/reflect/scala/reflect/api/Exprs.scala b/src/reflect/scala/reflect/api/Exprs.scala
index 562b1da8e3..2ba18a8207 100644
--- a/src/reflect/scala/reflect/api/Exprs.scala
+++ b/src/reflect/scala/reflect/api/Exprs.scala
@@ -90,6 +90,7 @@ trait Exprs { self: Universe =>
* }}}
* because expr of type Expr[T] itself does not have a method foo.
*/
+ // @compileTimeOnly("Cannot use splice outside reify")
def splice: T
/**
@@ -106,6 +107,7 @@ trait Exprs { self: Universe =>
* object Impls { def foo_impl(c: Context)(x: c.Expr[X]): c.Expr[x.value.T] = ... }
* }}}
*/
+ // @compileTimeOnly("Cannot use value except for signatures of macro implementations")
val value: T
override def canEqual(x: Any) = x.isInstanceOf[Expr[_]]
diff --git a/src/reflect/scala/reflect/api/Trees.scala b/src/reflect/scala/reflect/api/Trees.scala
index cfa6315797..0937a93738 100644
--- a/src/reflect/scala/reflect/api/Trees.scala
+++ b/src/reflect/scala/reflect/api/Trees.scala
@@ -75,26 +75,11 @@ trait Trees { self: Universe =>
def isDef: Boolean
/** Is this tree one of the empty trees?
- *
* Empty trees are: the `EmptyTree` null object, `TypeTree` instances that don't carry a type
* and the special `emptyValDef` singleton.
- *
- * In the compiler the `isEmpty` check and the derived `orElse` method are mostly used
- * as a check for a tree being a null object (`EmptyTree` for term trees and empty TypeTree for type trees).
- *
- * Unfortunately `emptyValDef` is also considered to be `isEmpty`, but this is deemed to be
- * a conceptual mistake pending a fix in https://issues.scala-lang.org/browse/SI-6762.
- *
- * @see `canHaveAttrs`
*/
def isEmpty: Boolean
- /** Can this tree carry attributes (i.e. symbols, types or positions)?
- * Typically the answer is yes, except for the `EmptyTree` null object and
- * two special singletons: `emptyValDef` and `pendingSuperCall`.
- */
- def canHaveAttrs: Boolean
-
/** The canonical way to test if a Tree represents a term.
*/
def isTerm: Boolean
@@ -2420,15 +2405,6 @@ trait Trees { self: Universe =>
*/
val emptyValDef: ValDef
- /** An empty superclass constructor call corresponding to:
- * super.<init>()
- * This is used as a placeholder in the primary constructor body in class templates
- * to denote the insertion point of a call to superclass constructor after the typechecker
- * figures out the superclass of a given template.
- * @group Trees
- */
- val pendingSuperCall: Apply
-
// ---------------------- factories ----------------------------------------------
/** A factory method for `ClassDef` nodes.
@@ -2931,8 +2907,7 @@ trait Trees { self: Universe =>
trees mapConserve (tree => transform(tree).asInstanceOf[TypeDef])
/** Transforms a `ValDef`. */
def transformValDef(tree: ValDef): ValDef =
- if (tree eq emptyValDef) tree
- else transform(tree).asInstanceOf[ValDef]
+ if (tree.isEmpty) tree else transform(tree).asInstanceOf[ValDef]
/** Transforms a list of `ValDef` nodes. */
def transformValDefs(trees: List[ValDef]): List[ValDef] =
trees mapConserve (transformValDef(_))
diff --git a/src/reflect/scala/reflect/internal/AnnotationCheckers.scala b/src/reflect/scala/reflect/internal/AnnotationCheckers.scala
index 5318d3e540..1ab975b233 100644
--- a/src/reflect/scala/reflect/internal/AnnotationCheckers.scala
+++ b/src/reflect/scala/reflect/internal/AnnotationCheckers.scala
@@ -16,7 +16,15 @@ trait AnnotationCheckers {
/** An additional checker for annotations on types.
* Typically these are registered by compiler plugins
* with the addAnnotationChecker method. */
- abstract class AnnotationChecker {
+ trait AnnotationChecker {
+
+ /**
+ * Selectively activate this annotation checker. When using both an annotation checker
+ * and an analyzer plugin, it is common to run both of them only during selected
+ * compiler phases. See documentation in AnalyzerPlugin.isActive.
+ */
+ def isActive(): Boolean = true
+
/** Check the annotations on two types conform. */
def annotationsConform(tpe1: Type, tpe2: Type): Boolean
@@ -29,39 +37,51 @@ trait AnnotationCheckers {
def annotationsGlb(tp: Type, ts: List[Type]): Type = tp
/** Refine the bounds on type parameters to the given type arguments. */
- def adaptBoundsToAnnotations(bounds: List[TypeBounds],
- tparams: List[Symbol], targs: List[Type]): List[TypeBounds] = bounds
+ def adaptBoundsToAnnotations(bounds: List[TypeBounds], tparams: List[Symbol],
+ targs: List[Type]): List[TypeBounds] = bounds
- /** Modify the type that has thus far been inferred
- * for a tree. All this should do is add annotations. */
+ /**
+ * Modify the type that has thus far been inferred for a tree. All this should
+ * do is add annotations.
+ */
+ @deprecated("Create an AnalyzerPlugin and use pluginsTyped", "2.10.1")
def addAnnotations(tree: Tree, tpe: Type): Type = tpe
- /** Decide whether this annotation checker can adapt a tree
- * that has an annotated type to the given type tp, taking
- * into account the given mode (see method adapt in trait Typers).*/
+ /**
+ * Decide whether this analyzer plugin can adapt a tree that has an annotated type to the
+ * given type tp, taking into account the given mode (see method adapt in trait Typers).
+ */
+ @deprecated("Create an AnalyzerPlugin and use canAdaptAnnotations", "2.10.1")
def canAdaptAnnotations(tree: Tree, mode: Int, pt: Type): Boolean = false
- /** Adapt a tree that has an annotated type to the given type tp,
- * taking into account the given mode (see method adapt in trait Typers).
- * An implementation cannot rely on canAdaptAnnotations being called
- * before. If the implementing class cannot do the adaptiong, it
- * should return the tree unchanged.*/
+ /**
+ * Adapt a tree that has an annotated type to the given type tp, taking into account the given
+ * mode (see method adapt in trait Typers).
+ *
+ * An implementation cannot rely on canAdaptAnnotations being called before. If the implementing
+ * class cannot do the adaptiong, it should return the tree unchanged.
+ */
+ @deprecated("Create an AnalyzerPlugin and use adaptAnnotations", "2.10.1")
def adaptAnnotations(tree: Tree, mode: Int, pt: Type): Tree = tree
- /** Adapt the type of a return expression. The decision of an annotation checker
- * whether the type should be adapted is based on the type of the expression
- * which is returned, as well as the result type of the method (pt).
- * By default, this method simply returns the passed `default` type.
+ /**
+ * Adapt the type of a return expression. The decision of a typer plugin whether the type
+ * should be adapted is based on the type of the expression which is returned, as well as the
+ * result type of the method (pt).
+ *
+ * By default, this method simply returns the passed `default` type.
*/
+ @deprecated("Create an AnalyzerPlugin and use pluginsTypedReturn. Note: the 'tree' argument here is\n"+
+ "the 'expr' of a Return tree; 'pluginsTypedReturn' takes the Return tree itself as argument", "2.10.1")
def adaptTypeOfReturn(tree: Tree, pt: Type, default: => Type): Type = default
}
// Syncnote: Annotation checkers inaccessible to reflection, so no sync in var necessary.
+
/** The list of annotation checkers that have been registered */
private var annotationCheckers: List[AnnotationChecker] = Nil
- /** Register an annotation checker. Typically these
- * are added by compiler plugins. */
+ /** Register an annotation checker. Typically these are added by compiler plugins. */
def addAnnotationChecker(checker: AnnotationChecker) {
if (!(annotationCheckers contains checker))
annotationCheckers = checker :: annotationCheckers
@@ -72,76 +92,53 @@ trait AnnotationCheckers {
annotationCheckers = Nil
}
- /** Check that the annotations on two types conform. To do
- * so, consult all registered annotation checkers. */
- def annotationsConform(tp1: Type, tp2: Type): Boolean = {
- /* Finish quickly if there are no annotations */
- if (tp1.annotations.isEmpty && tp2.annotations.isEmpty)
- true
- else
- annotationCheckers.forall(
- _.annotationsConform(tp1,tp2))
- }
-
- /** Refine the computed least upper bound of a list of types.
- * All this should do is add annotations. */
- def annotationsLub(tpe: Type, ts: List[Type]): Type = {
- annotationCheckers.foldLeft(tpe)((tpe, checker) =>
- checker.annotationsLub(tpe, ts))
- }
-
- /** Refine the computed greatest lower bound of a list of types.
- * All this should do is add annotations. */
- def annotationsGlb(tpe: Type, ts: List[Type]): Type = {
- annotationCheckers.foldLeft(tpe)((tpe, checker) =>
- checker.annotationsGlb(tpe, ts))
- }
-
- /** Refine the bounds on type parameters to the given type arguments. */
- def adaptBoundsToAnnotations(bounds: List[TypeBounds],
- tparams: List[Symbol], targs: List[Type]): List[TypeBounds] = {
- annotationCheckers.foldLeft(bounds)((bounds, checker) =>
- checker.adaptBoundsToAnnotations(bounds, tparams, targs))
- }
-
- /** Let all annotations checkers add extra annotations
- * to this tree's type. */
- def addAnnotations(tree: Tree, tpe: Type): Type = {
- annotationCheckers.foldLeft(tpe)((tpe, checker) =>
- checker.addAnnotations(tree, tpe))
- }
-
- /** Find out whether any annotation checker can adapt a tree
- * to a given type. Called by Typers.adapt. */
- def canAdaptAnnotations(tree: Tree, mode: Int, pt: Type): Boolean = {
- annotationCheckers.exists(_.canAdaptAnnotations(tree, mode, pt))
- }
-
- /** Let registered annotation checkers adapt a tree
- * to a given type (called by Typers.adapt). Annotation checkers
- * that cannot do the adaption should pass the tree through
- * unchanged. */
- def adaptAnnotations(tree: Tree, mode: Int, pt: Type): Tree = {
- annotationCheckers.foldLeft(tree)((tree, checker) =>
- checker.adaptAnnotations(tree, mode, pt))
- }
-
- /** Let a registered annotation checker adapt the type of a return expression.
- * Annotation checkers that cannot do the adaptation should simply return
- * the `default` argument.
- *
- * Note that the result is undefined if more than one annotation checker
- * returns an adapted type which is not a subtype of `default`.
- */
- def adaptTypeOfReturn(tree: Tree, pt: Type, default: => Type): Type = {
- val adaptedTypes = annotationCheckers flatMap { checker =>
- val adapted = checker.adaptTypeOfReturn(tree, pt, default)
- if (!(adapted <:< default)) List(adapted)
- else List()
- }
- adaptedTypes match {
- case fst :: _ => fst
- case List() => default
- }
- }
+ /** @see AnnotationChecker.annotationsConform */
+ def annotationsConform(tp1: Type, tp2: Type): Boolean =
+ if (annotationCheckers.isEmpty || (tp1.annotations.isEmpty && tp2.annotations.isEmpty)) true
+ else annotationCheckers.forall(checker => {
+ !checker.isActive() || checker.annotationsConform(tp1,tp2)
+ })
+
+ /** @see AnnotationChecker.annotationsLub */
+ def annotationsLub(tpe: Type, ts: List[Type]): Type =
+ if (annotationCheckers.isEmpty) tpe
+ else annotationCheckers.foldLeft(tpe)((tpe, checker) =>
+ if (!checker.isActive()) tpe else checker.annotationsLub(tpe, ts))
+
+ /** @see AnnotationChecker.annotationsGlb */
+ def annotationsGlb(tpe: Type, ts: List[Type]): Type =
+ if (annotationCheckers.isEmpty) tpe
+ else annotationCheckers.foldLeft(tpe)((tpe, checker) =>
+ if (!checker.isActive()) tpe else checker.annotationsGlb(tpe, ts))
+
+ /** @see AnnotationChecker.adaptBoundsToAnnotations */
+ def adaptBoundsToAnnotations(bounds: List[TypeBounds], tparams: List[Symbol],
+ targs: List[Type]): List[TypeBounds] =
+ if (annotationCheckers.isEmpty) bounds
+ else annotationCheckers.foldLeft(bounds)((bounds, checker) =>
+ if (!checker.isActive()) bounds else checker.adaptBoundsToAnnotations(bounds, tparams, targs))
+
+
+ /* The following methods will be removed with the deprecated methods is AnnotationChecker. */
+
+ def addAnnotations(tree: Tree, tpe: Type): Type =
+ if (annotationCheckers.isEmpty) tpe
+ else annotationCheckers.foldLeft(tpe)((tpe, checker) =>
+ if (!checker.isActive()) tpe else checker.addAnnotations(tree, tpe))
+
+ def canAdaptAnnotations(tree: Tree, mode: Int, pt: Type): Boolean =
+ if (annotationCheckers.isEmpty) false
+ else annotationCheckers.exists(checker => {
+ checker.isActive() && checker.canAdaptAnnotations(tree, mode, pt)
+ })
+
+ def adaptAnnotations(tree: Tree, mode: Int, pt: Type): Tree =
+ if (annotationCheckers.isEmpty) tree
+ else annotationCheckers.foldLeft(tree)((tree, checker) =>
+ if (!checker.isActive()) tree else checker.adaptAnnotations(tree, mode, pt))
+
+ def adaptTypeOfReturn(tree: Tree, pt: Type, default: => Type): Type =
+ if (annotationCheckers.isEmpty) default
+ else annotationCheckers.foldLeft(default)((tpe, checker) =>
+ if (!checker.isActive()) tpe else checker.adaptTypeOfReturn(tree, pt, tpe))
}
diff --git a/src/reflect/scala/reflect/internal/AnnotationInfos.scala b/src/reflect/scala/reflect/internal/AnnotationInfos.scala
index 6a5a742cc7..032b45316e 100644
--- a/src/reflect/scala/reflect/internal/AnnotationInfos.scala
+++ b/src/reflect/scala/reflect/internal/AnnotationInfos.scala
@@ -33,6 +33,17 @@ trait AnnotationInfos extends api.Annotations { self: SymbolTable =>
case ThrownException(exc) => exc
}
+ def addThrowsAnnotation(throwableSym: Symbol): Self = {
+ val throwableTpe = if (throwableSym.isMonomorphicType) throwableSym.tpe else {
+ debuglog(s"Encountered polymorphic exception `${throwableSym.fullName}` while parsing class file.")
+ // in case we encounter polymorphic exception the best we can do is to convert that type to
+ // monomorphic one by introducing existentials, see SI-7009 for details
+ existentialAbstraction(throwableSym.typeParams, throwableSym.tpe)
+ }
+ val throwsAnn = AnnotationInfo(appliedType(definitions.ThrowsClass, throwableTpe), List(Literal(Constant(throwableTpe))), Nil)
+ withAnnotations(List(throwsAnn))
+ }
+
/** Tests for, get, or remove an annotation */
def hasAnnotation(cls: Symbol): Boolean =
//OPT inlined from exists to save on #closures; was: annotations exists (_ matches cls)
@@ -330,14 +341,14 @@ trait AnnotationInfos extends api.Annotations { self: SymbolTable =>
implicit val AnnotationTag = ClassTag[AnnotationInfo](classOf[AnnotationInfo])
object UnmappableAnnotation extends CompleteAnnotationInfo(NoType, Nil, Nil)
-
+
/** Extracts symbol of thrown exception from AnnotationInfo.
- *
+ *
* Supports both “old-style” `@throws(classOf[Exception])`
* as well as “new-stye” `@throws[Exception]("cause")` annotations.
*/
object ThrownException {
- def unapply(ann: AnnotationInfo): Option[Symbol] =
+ def unapply(ann: AnnotationInfo): Option[Symbol] =
ann match {
case AnnotationInfo(tpe, _, _) if tpe.typeSymbol != ThrowsClass =>
None
diff --git a/src/reflect/scala/reflect/internal/BuildUtils.scala b/src/reflect/scala/reflect/internal/BuildUtils.scala
index b1b0c5b60b..9f41f0336e 100644
--- a/src/reflect/scala/reflect/internal/BuildUtils.scala
+++ b/src/reflect/scala/reflect/internal/BuildUtils.scala
@@ -47,6 +47,8 @@ trait BuildUtils { self: SymbolTable =>
def flagsFromBits(bits: Long): FlagSet = bits
+ def emptyValDef: ValDef = self.emptyValDef
+
def This(sym: Symbol): Tree = self.This(sym)
def Select(qualifier: Tree, sym: Symbol): Select = self.Select(qualifier, sym)
diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala
index 4269b65297..6e4ca76382 100644
--- a/src/reflect/scala/reflect/internal/Definitions.scala
+++ b/src/reflect/scala/reflect/internal/Definitions.scala
@@ -671,6 +671,11 @@ trait Definitions extends api.StandardDefinitions {
case _ => Nil
}
+ def dropNullaryMethod(tp: Type) = tp match {
+ case NullaryMethodType(restpe) => restpe
+ case _ => tp
+ }
+
def unapplyUnwrap(tpe:Type) = tpe.finalResultType.normalize match {
case RefinedType(p :: _, _) => p.normalize
case tp => tp
@@ -678,9 +683,10 @@ trait Definitions extends api.StandardDefinitions {
def functionApply(n: Int) = getMemberMethod(FunctionClass(n), nme.apply)
- def abstractFunctionForFunctionType(tp: Type) =
- if (isFunctionType(tp)) abstractFunctionType(tp.typeArgs.init, tp.typeArgs.last)
- else NoType
+ def abstractFunctionForFunctionType(tp: Type) = {
+ assert(isFunctionType(tp), tp)
+ abstractFunctionType(tp.typeArgs.init, tp.typeArgs.last)
+ }
def isFunctionType(tp: Type): Boolean = tp.normalize match {
case TypeRef(_, sym, args) if args.nonEmpty =>
@@ -864,6 +870,12 @@ trait Definitions extends api.StandardDefinitions {
removeRedundantObjects(parents)
}
+ /** Flatten curried parameter lists of a method type. */
+ def allParameters(tpe: Type): List[Symbol] = tpe match {
+ case MethodType(params, res) => params ::: allParameters(res)
+ case _ => Nil
+ }
+
def typeStringNoPackage(tp: Type) =
"" + tp stripPrefix tp.typeSymbol.enclosingPackage.fullName + "."
@@ -949,7 +961,7 @@ trait Definitions extends api.StandardDefinitions {
lazy val BeanPropertyAttr = requiredClass[scala.beans.BeanProperty]
lazy val BooleanBeanPropertyAttr = requiredClass[scala.beans.BooleanBeanProperty]
lazy val CloneableAttr = requiredClass[scala.annotation.cloneable]
- lazy val CompileTimeOnlyAttr = getClassIfDefined("scala.reflect.macros.compileTimeOnly")
+ lazy val CompileTimeOnlyAttr = getClassIfDefined("scala.reflect.internal.annotations.compileTimeOnly")
lazy val DeprecatedAttr = requiredClass[scala.deprecated]
lazy val DeprecatedNameAttr = requiredClass[scala.deprecatedName]
lazy val DeprecatedInheritanceAttr = requiredClass[scala.deprecatedInheritance]
diff --git a/src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala b/src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala
index 59c027868e..8b24678fd6 100644
--- a/src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala
+++ b/src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala
@@ -32,19 +32,4 @@ trait ExistentialsAndSkolems {
}
(new Deskolemizer).typeSkolems
}
-
- /** Convert to corresponding type parameters all skolems of method
- * parameters which appear in `tparams`.
- */
- def deskolemizeTypeParams(tparams: List[Symbol])(tp: Type): Type = {
- class DeSkolemizeMap extends TypeMap {
- def apply(tp: Type): Type = tp match {
- case TypeRef(pre, sym, args) if sym.isTypeSkolem && (tparams contains sym.deSkolemize) =>
- mapOver(typeRef(NoPrefix, sym.deSkolemize, args))
- case _ =>
- mapOver(tp)
- }
- }
- new DeSkolemizeMap mapOver tp
- }
}
diff --git a/src/reflect/scala/reflect/internal/Importers.scala b/src/reflect/scala/reflect/internal/Importers.scala
index 2f2b02975c..43902c1930 100644
--- a/src/reflect/scala/reflect/internal/Importers.scala
+++ b/src/reflect/scala/reflect/internal/Importers.scala
@@ -334,8 +334,6 @@ trait Importers extends api.Importers { self: SymbolTable =>
new ModuleDef(importModifiers(mods), importName(name).toTermName, importTemplate(impl))
case from.emptyValDef =>
emptyValDef
- case from.pendingSuperCall =>
- pendingSuperCall
case from.ValDef(mods, name, tpt, rhs) =>
new ValDef(importModifiers(mods), importName(name).toTermName, importTree(tpt), importTree(rhs))
case from.DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
diff --git a/src/reflect/scala/reflect/internal/Positions.scala b/src/reflect/scala/reflect/internal/Positions.scala
index f8c670827a..faa161d6b1 100644
--- a/src/reflect/scala/reflect/internal/Positions.scala
+++ b/src/reflect/scala/reflect/internal/Positions.scala
@@ -38,7 +38,7 @@ trait Positions extends api.Positions { self: SymbolTable =>
protected class DefaultPosAssigner extends PosAssigner {
var pos: Position = _
override def traverse(t: Tree) {
- if (!t.canHaveAttrs) ()
+ if (t eq EmptyTree) ()
else if (t.pos == NoPosition) {
t.setPos(pos)
super.traverse(t) // TODO: bug? shouldn't the traverse be outside of the if?
diff --git a/src/reflect/scala/reflect/internal/Printers.scala b/src/reflect/scala/reflect/internal/Printers.scala
index a8085a4c58..80d247c0ea 100644
--- a/src/reflect/scala/reflect/internal/Printers.scala
+++ b/src/reflect/scala/reflect/internal/Printers.scala
@@ -435,7 +435,7 @@ trait Printers extends api.Printers { self: SymbolTable =>
case tree =>
xprintTree(this, tree)
}
- if (printTypes && tree.isTerm && tree.canHaveAttrs) {
+ if (printTypes && tree.isTerm && !tree.isEmpty) {
print("{", if (tree.tpe eq null) "<null>" else tree.tpe.toString, "}")
}
}
@@ -542,10 +542,8 @@ trait Printers extends api.Printers { self: SymbolTable =>
print(")")
case EmptyTree =>
print("EmptyTree")
- case self.emptyValDef =>
+ case emptyValDef: AnyRef if emptyValDef eq self.emptyValDef =>
print("emptyValDef")
- case self.pendingSuperCall =>
- print("pendingSuperCall")
case tree: Tree =>
val hasSymbol = tree.hasSymbol && tree.symbol != NoSymbol
val isError = hasSymbol && tree.symbol.name.toString == nme.ERROR.toString
diff --git a/src/reflect/scala/reflect/internal/StdNames.scala b/src/reflect/scala/reflect/internal/StdNames.scala
index c870d8972d..ddc5d94e70 100644
--- a/src/reflect/scala/reflect/internal/StdNames.scala
+++ b/src/reflect/scala/reflect/internal/StdNames.scala
@@ -297,6 +297,7 @@ trait StdNames {
// Compiler internal names
val ANYname: NameType = "<anyname>"
val CONSTRUCTOR: NameType = "<init>"
+ val DEFAULT_CASE: NameType = "defaultCase$"
val EQEQ_LOCAL_VAR: NameType = "eqEqTemp$"
val FAKE_LOCAL_THIS: NameType = "this$"
val INITIALIZER: NameType = CONSTRUCTOR // Is this buying us something?
@@ -730,7 +731,6 @@ trait StdNames {
val null_ : NameType = "null"
val ofDim: NameType = "ofDim"
val origin: NameType = "origin"
- val pendingSuperCall: NameType = "pendingSuperCall"
val prefix : NameType = "prefix"
val productArity: NameType = "productArity"
val productElement: NameType = "productElement"
diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala
index a4287fb181..4ffd198dc4 100644
--- a/src/reflect/scala/reflect/internal/Symbols.scala
+++ b/src/reflect/scala/reflect/internal/Symbols.scala
@@ -86,7 +86,11 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
case n: TypeName => if (isClass) newClassSymbol(n, pos, newFlags) else newNonClassSymbol(n, pos, newFlags)
}
- def knownDirectSubclasses = children
+ def knownDirectSubclasses = {
+ if (!isCompilerUniverse && needsInitialize(isFlagRelated = false, mask = 0)) initialize
+ children
+ }
+
def baseClasses = info.baseClasses
def module = sourceModule
def thisPrefix: Type = thisType
@@ -1188,6 +1192,10 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
* to generate a type of kind *
* for a term symbol, its usual type.
* See the tpe/tpeHK overrides in TypeSymbol for more.
+ *
+ * For type symbols, `tpe` is different than `info`. `tpe` returns a typeRef
+ * to the type symbol, `info` returns the type information of the type symbol,
+ * e.g. a ClassInfoType for classes or a TypeBounds for abstract types.
*/
def tpe: Type = info
def tpeHK: Type = tpe
@@ -1583,8 +1591,21 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
setAnnotations(annot :: annotations)
// Convenience for the overwhelmingly common case
- def addAnnotation(sym: Symbol, args: Tree*): this.type =
+ def addAnnotation(sym: Symbol, args: Tree*): this.type = {
+ // The assertion below is meant to prevent from issues like SI-7009 but it's disabled
+ // due to problems with cycles while compiling Scala library. It's rather shocking that
+ // just checking if sym is monomorphic type introduces nasty cycles. We are definitively
+ // forcing too much because monomorphism is a local property of a type that can be checked
+ // syntactically
+ // assert(sym.initialize.isMonomorphicType, sym)
addAnnotation(AnnotationInfo(sym.tpe, args.toList, Nil))
+ }
+
+ /** Use that variant if you want to pass (for example) an applied type */
+ def addAnnotation(tp: Type, args: Tree*): this.type = {
+ assert(tp.typeParams.isEmpty, tp)
+ addAnnotation(AnnotationInfo(tp, args.toList, Nil))
+ }
// ------ comparisons ----------------------------------------------------------------
@@ -1651,6 +1672,8 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
@inline final def map(f: Symbol => Symbol): Symbol = if (this eq NoSymbol) this else f(this)
+ final def toOption: Option[Symbol] = if (exists) Some(this) else None
+
// ------ cloneing -------------------------------------------------------------------
/** A clone of this symbol. */
@@ -1728,8 +1751,27 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
/** For a case class, the symbols of the accessor methods, one for each
* argument in the first parameter list of the primary constructor.
* The empty list for all other classes.
- */
- final def caseFieldAccessors: List[Symbol] =
+ *
+ * This list will be sorted to correspond to the declaration order
+ * in the constructor parameter
+ */
+ final def caseFieldAccessors: List[Symbol] = {
+ // We can't rely on the ordering of the case field accessors within decls --
+ // handling of non-public parameters seems to change the order (see SI-7035.)
+ //
+ // Luckily, the constrParamAccessors are still sorted properly, so sort the field-accessors using them
+ // (need to undo name-mangling, including the sneaky trailing whitespace)
+ //
+ // The slightly more principled approach of using the paramss of the
+ // primary constructor leads to cycles in, for example, pos/t5084.scala.
+ val primaryNames = constrParamAccessors.map(acc => nme.dropLocalSuffix(acc.name))
+ caseFieldAccessorsUnsorted.sortBy { acc =>
+ primaryNames indexWhere { orig =>
+ (acc.name == orig) || (acc.name startsWith (orig append "$"))
+ }
+ }
+ }
+ private final def caseFieldAccessorsUnsorted: List[Symbol] =
(info.decls filter (_.isCaseAccessorMethod)).toList
final def constrParamAccessors: List[Symbol] =
@@ -2473,7 +2515,9 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
}
override def outerSource: Symbol =
- if (originalName == nme.OUTER) initialize.referenced
+ // SI-6888 Approximate the name to workaround the deficiencies in `nme.originalName`
+ // in the face of clases named '$'. SI-2806 remains open to address the deeper problem.
+ if (originalName endsWith (nme.OUTER)) initialize.referenced
else NoSymbol
def setModuleClass(clazz: Symbol): TermSymbol = {
diff --git a/src/reflect/scala/reflect/internal/TreeGen.scala b/src/reflect/scala/reflect/internal/TreeGen.scala
index ebf0998573..c1753fc5a1 100644
--- a/src/reflect/scala/reflect/internal/TreeGen.scala
+++ b/src/reflect/scala/reflect/internal/TreeGen.scala
@@ -172,10 +172,29 @@ abstract class TreeGen extends macros.TreeBuilder {
if (qual.symbol != null && (qual.symbol.isEffectiveRoot || qual.symbol.isEmptyPackage))
mkAttributedIdent(sym)
else {
+ // Have to recognize anytime a selection is made on a package
+ // so it can be rewritten to foo.bar.`package`.name rather than
+ // foo.bar.name if name is in the package object.
+ // TODO - factor out the common logic between this and
+ // the Typers method "isInPackageObject", used in typedIdent.
+ val qualsym = (
+ if (qual.tpe ne null) qual.tpe.typeSymbol
+ else if (qual.symbol ne null) qual.symbol
+ else NoSymbol
+ )
+ val needsPackageQualifier = (
+ (sym ne null)
+ && qualsym.isPackage
+ && !sym.isDefinedInPackage
+ )
val pkgQualifier =
- if (sym != null && sym.owner.isPackageObjectClass && sym.effectiveOwner == qual.tpe.typeSymbol) {
- val obj = sym.owner.sourceModule
- Select(qual, nme.PACKAGE) setSymbol obj setType singleType(qual.tpe, obj)
+ if (needsPackageQualifier) {
+ // The owner of a symbol which requires package qualification may be the
+ // package object iself, but it also could be any superclass of the package
+ // object. In the latter case, we must go through the qualifier's info
+ // to obtain the right symbol.
+ val packageObject = if (sym.owner.isModuleClass) sym.owner.sourceModule else qual.tpe member nme.PACKAGE
+ Select(qual, nme.PACKAGE) setSymbol packageObject setType singleType(qual.tpe, packageObject)
}
else qual
diff --git a/src/reflect/scala/reflect/internal/TreeInfo.scala b/src/reflect/scala/reflect/internal/TreeInfo.scala
index e1a18570b2..8b5dc80c83 100644
--- a/src/reflect/scala/reflect/internal/TreeInfo.scala
+++ b/src/reflect/scala/reflect/internal/TreeInfo.scala
@@ -261,22 +261,24 @@ abstract class TreeInfo {
* in the position `for { <tree> <- expr }` based only
* on information at the `parser` phase? To qualify, there
* may be no subtree that will be interpreted as a
- * Stable Identifier Pattern.
+ * Stable Identifier Pattern, nor any type tests, even
+ * on TupleN. See SI-6968.
*
* For instance:
*
* {{{
- * foo @ (bar, (baz, quux))
+ * (foo @ (bar @ _)) = 0
* }}}
*
- * is a variable pattern; if the structure matches,
- * then the remainder is inevitable.
+ * is a not a variable pattern; if only binds names.
*
* The following are not variable patterns.
*
* {{{
- * foo @ (bar, (`baz`, quux)) // back quoted ident, not at top level
- * foo @ (bar, Quux) // UpperCase ident, not at top level
+ * `bar`
+ * Bar
+ * (a, b)
+ * _: T
* }}}
*
* If the pattern is a simple identifier, it is always
@@ -305,10 +307,6 @@ abstract class TreeInfo {
tree match {
case Bind(name, pat) => isVarPatternDeep0(pat)
case Ident(name) => isVarPattern(tree)
- case Apply(sel, args) =>
- ( isReferenceToScalaMember(sel, TupleClass(args.size).name.toTermName)
- && (args forall isVarPatternDeep0)
- )
case _ => false
}
}
@@ -344,9 +342,6 @@ abstract class TreeInfo {
def preSuperFields(stats: List[Tree]): List[ValDef] =
stats collect { case vd: ValDef if isEarlyValDef(vd) => vd }
- def hasUntypedPreSuperFields(stats: List[Tree]): Boolean =
- preSuperFields(stats) exists (_.tpt.isEmpty)
-
def isEarlyDef(tree: Tree) = tree match {
case TypeDef(mods, _, _, _) => mods hasFlag PRESUPER
case ValDef(mods, _, _, _) => mods hasFlag PRESUPER
@@ -433,12 +428,26 @@ abstract class TreeInfo {
case _ => false
}
+ /** Is the argument a wildcard star type of the form `_*`?
+ */
+ def isWildcardStarType(tree: Tree): Boolean = tree match {
+ case Ident(tpnme.WILDCARD_STAR) => true
+ case _ => false
+ }
+
/** Is this pattern node a catch-all (wildcard or variable) pattern? */
def isDefaultCase(cdef: CaseDef) = cdef match {
case CaseDef(pat, EmptyTree, _) => isWildcardArg(pat)
case _ => false
}
+ /** Is this pattern node a synthetic catch-all case, added during PartialFuction synthesis before we know
+ * whether the user provided cases are exhaustive. */
+ def isSyntheticDefaultCase(cdef: CaseDef) = cdef match {
+ case CaseDef(Bind(nme.DEFAULT_CASE, _), EmptyTree, _) => true
+ case _ => false
+ }
+
/** Does this CaseDef catch Throwable? */
def catchesThrowable(cdef: CaseDef) = catchesAllOf(cdef, ThrowableClass.tpe)
@@ -511,10 +520,6 @@ abstract class TreeInfo {
def isSynthCaseSymbol(sym: Symbol) = sym hasAllFlags SYNTH_CASE_FLAGS
def hasSynthCaseSymbol(t: Tree) = t.symbol != null && isSynthCaseSymbol(t.symbol)
- def isTraitRef(tree: Tree): Boolean = {
- val sym = if (tree.tpe != null) tree.tpe.typeSymbol else null
- ((sym ne null) && sym.initialize.isTrait)
- }
/** Applications in Scala can have one of the following shapes:
*
diff --git a/src/reflect/scala/reflect/internal/Trees.scala b/src/reflect/scala/reflect/internal/Trees.scala
index 431afd286d..754adcb80d 100644
--- a/src/reflect/scala/reflect/internal/Trees.scala
+++ b/src/reflect/scala/reflect/internal/Trees.scala
@@ -36,7 +36,6 @@ trait Trees extends api.Trees { self: SymbolTable =>
def isDef = false
def isEmpty = false
- def canHaveAttrs = true
/** The canonical way to test if a Tree represents a term.
*/
@@ -229,6 +228,14 @@ trait Trees extends api.Trees { self: SymbolTable =>
override def isDef = true
}
+ case object EmptyTree extends TermTree {
+ val asList = List(this)
+ super.tpe_=(NoType)
+ override def tpe_=(t: Type) =
+ if (t != NoType) throw new UnsupportedOperationException("tpe_=("+t+") inapplicable for <empty>")
+ override def isEmpty = true
+ }
+
abstract class MemberDef extends DefTree with MemberDefApi {
def mods: Modifiers
def keyword: String = this match {
@@ -516,7 +523,11 @@ trait Trees extends api.Trees { self: SymbolTable =>
override private[scala] def copyAttrs(tree: Tree) = {
super.copyAttrs(tree)
tree match {
- case other: TypeTree => wasEmpty = other.wasEmpty // SI-6648 Critical for correct operation of `resetAttrs`.
+ case other: TypeTree =>
+ // SI-6648 Critical for correct operation of `resetAttrs`.
+ wasEmpty = other.wasEmpty
+ if (other.orig != null)
+ orig = other.orig.duplicate
case _ =>
}
this
@@ -592,7 +603,6 @@ trait Trees extends api.Trees { self: SymbolTable =>
case _: ApplyToImplicitArgs => new ApplyToImplicitArgs(fun, args)
case _: ApplyImplicitView => new ApplyImplicitView(fun, args)
// TODO: ApplyConstructor ???
- case self.pendingSuperCall => self.pendingSuperCall
case _ => new Apply(fun, args)
}).copyAttrs(tree)
def ApplyDynamic(tree: Tree, qual: Tree, args: List[Tree]) =
@@ -955,23 +965,12 @@ trait Trees extends api.Trees { self: SymbolTable =>
def ValDef(sym: Symbol): ValDef = ValDef(sym, EmptyTree)
- trait CannotHaveAttrs extends Tree {
- override def canHaveAttrs = false
-
- private def unsupported(what: String, args: Any*) =
- throw new UnsupportedOperationException(s"$what($args) inapplicable for "+self.toString)
-
+ object emptyValDef extends ValDef(Modifiers(PRIVATE), nme.WILDCARD, TypeTree(NoType), EmptyTree) {
+ override def isEmpty = true
super.setPos(NoPosition)
- override def setPos(pos: Position) = unsupported("setPos", pos)
-
- super.setType(NoType)
- override def tpe_=(t: Type) = if (t != NoType) unsupported("tpe_=", t)
+ override def setPos(pos: Position) = { assert(false); this }
}
- case object EmptyTree extends TermTree with CannotHaveAttrs { override def isEmpty = true; val asList = List(this) }
- object emptyValDef extends ValDef(Modifiers(PRIVATE), nme.WILDCARD, TypeTree(NoType), EmptyTree) with CannotHaveAttrs
- object pendingSuperCall extends Apply(Select(Super(This(tpnme.EMPTY), tpnme.EMPTY), nme.CONSTRUCTOR), List()) with CannotHaveAttrs
-
def DefDef(sym: Symbol, mods: Modifiers, vparamss: List[List[ValDef]], rhs: Tree): DefDef =
atPos(sym.pos) {
assert(sym != NoSymbol)
@@ -989,6 +988,18 @@ trait Trees extends api.Trees { self: SymbolTable =>
def DefDef(sym: Symbol, mods: Modifiers, rhs: Tree): DefDef =
DefDef(sym, mods, mapParamss(sym)(ValDef), rhs)
+ /** A DefDef with original trees attached to the TypeTree of each parameter */
+ def DefDef(sym: Symbol, mods: Modifiers, originalParamTpts: Symbol => Tree, rhs: Tree): DefDef = {
+ val paramms = mapParamss(sym){ sym =>
+ val vd = ValDef(sym, EmptyTree)
+ (vd.tpt : @unchecked) match {
+ case tt: TypeTree => tt setOriginal (originalParamTpts(sym) setPos sym.pos.focus)
+ }
+ vd
+ }
+ DefDef(sym, mods, paramms, rhs)
+ }
+
def DefDef(sym: Symbol, rhs: Tree): DefDef =
DefDef(sym, Modifiers(sym.flags), rhs)
@@ -1039,9 +1050,6 @@ trait Trees extends api.Trees { self: SymbolTable =>
def New(tpe: Type, args: Tree*): Tree =
ApplyConstructor(TypeTree(tpe), args.toList)
- def New(tpe: Type, argss: List[List[Tree]]): Tree =
- New(TypeTree(tpe), argss)
-
def New(sym: Symbol, args: Tree*): Tree =
New(sym.tpe, args: _*)
@@ -1122,7 +1130,7 @@ trait Trees extends api.Trees { self: SymbolTable =>
traverse(annot); traverse(arg)
case Template(parents, self, body) =>
traverseTrees(parents)
- if (self ne emptyValDef) traverse(self)
+ if (!self.isEmpty) traverse(self)
traverseStats(body, tree.symbol)
case Block(stats, expr) =>
traverseTrees(stats); traverse(expr)
@@ -1440,6 +1448,22 @@ trait Trees extends api.Trees { self: SymbolTable =>
if (tree.hasSymbol) {
subst(from, to)
tree match {
+ case _: DefTree =>
+ val newInfo = symSubst(tree.symbol.info)
+ if (!(newInfo =:= tree.symbol.info)) {
+ debuglog(sm"""
+ |TreeSymSubstituter: updated info of symbol ${tree.symbol}
+ | Old: ${showRaw(tree.symbol.info, printTypes = true, printIds = true)}
+ | New: ${showRaw(newInfo, printTypes = true, printIds = true)}""")
+ tree.symbol updateInfo newInfo
+ }
+ case _ =>
+ // no special handling is required for Function or Import nodes here.
+ // as they don't have interesting infos attached to their symbols.
+ // Subsitution of the referenced symbol of Return nodes is handled
+ // in .ChangeOwnerTraverser
+ }
+ tree match {
case Ident(name0) if tree.symbol != NoSymbol =>
treeCopy.Ident(tree, tree.symbol.name)
case Select(qual, name0) if tree.symbol != NoSymbol =>
@@ -1488,6 +1512,15 @@ trait Trees extends api.Trees { self: SymbolTable =>
}
}
+ trait TreeStackTraverser extends Traverser {
+ import collection.mutable
+ val path: mutable.Stack[Tree] = mutable.Stack()
+ abstract override def traverse(t: Tree) = {
+ path push t
+ try super.traverse(t) finally path.pop()
+ }
+ }
+
private lazy val duplicator = new Transformer {
override val treeCopy = newStrictTreeCopier
override def transform(t: Tree) = {
diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala
index dbc00edb1a..b708ca0fd6 100644
--- a/src/reflect/scala/reflect/internal/Types.scala
+++ b/src/reflect/scala/reflect/internal/Types.scala
@@ -22,6 +22,8 @@ import util.ThreeValues._
// internal: error
case WildcardType =>
// internal: unknown
+ case BoundedWildcardType(bounds) =>
+ // internal: unknown
case NoType =>
case NoPrefix =>
case ThisType(sym) =>
@@ -584,9 +586,9 @@ trait Types extends api.Types { self: SymbolTable =>
* Expands type aliases and converts higher-kinded TypeRefs to PolyTypes.
* Functions on types are also implemented as PolyTypes.
*
- * Example: (in the below, <List> is the type constructor of List)
- * TypeRef(pre, <List>, List()) is replaced by
- * PolyType(X, TypeRef(pre, <List>, List(X)))
+ * Example: (in the below, `<List>` is the type constructor of List)
+ * TypeRef(pre, `<List>`, List()) is replaced by
+ * PolyType(X, TypeRef(pre, `<List>`, List(X)))
*/
def normalize = this // @MAT
@@ -744,7 +746,7 @@ trait Types extends api.Types { self: SymbolTable =>
val trivial = (
this.isTrivial
|| phase.erasedTypes && pre.typeSymbol != ArrayClass
- || pre.normalize.isTrivial && !isPossiblePrefix(clazz)
+ || skipPrefixOf(pre, clazz)
)
if (trivial) this
else {
@@ -1798,7 +1800,7 @@ trait Types extends api.Types { self: SymbolTable =>
// TODO see comments around def intersectionType and def merge
def flatten(tps: List[Type]): List[Type] = tps flatMap { case RefinedType(parents, ds) if ds.isEmpty => flatten(parents) case tp => List(tp) }
val flattened = flatten(parents).distinct
- if (decls.isEmpty && flattened.tail.isEmpty) {
+ if (decls.isEmpty && hasLength(flattened, 1)) {
flattened.head
} else if (flattened != parents) {
refinedType(flattened, if (typeSymbol eq NoSymbol) NoSymbol else typeSymbol.owner, decls, NoPosition)
@@ -3540,7 +3542,7 @@ trait Types extends api.Types { self: SymbolTable =>
if (phase.erasedTypes)
if (parents.isEmpty) ObjectClass.tpe else parents.head
else {
- val clazz = owner.newRefinementClass(pos) // TODO: why were we passing in NoPosition instead of pos?
+ val clazz = owner.newRefinementClass(pos)
val result = RefinedType(parents, decls, clazz)
clazz.setInfo(result)
result
@@ -3590,12 +3592,6 @@ trait Types extends api.Types { self: SymbolTable =>
val pre1 = pre match {
case x: SuperType if sym1.isEffectivelyFinal || sym1.isDeferred =>
x.thistpe
- case _: CompoundType if sym1.isClass =>
- // sharpen prefix so that it is maximal and still contains the class.
- pre.parents.reverse dropWhile (_.member(sym1.name) != sym1) match {
- case Nil => pre
- case parent :: _ => parent
- }
case _ => pre
}
if (pre eq pre1) TypeRef(pre, sym1, args)
@@ -3852,12 +3848,16 @@ trait Types extends api.Types { self: SymbolTable =>
// This is the specified behavior.
protected def etaExpandKeepsStar = false
+ /** Turn any T* types into Seq[T] except when
+ * in method parameter position.
+ */
object dropRepeatedParamType extends TypeMap {
def apply(tp: Type): Type = tp match {
case MethodType(params, restpe) =>
- MethodType(params, apply(restpe))
- case PolyType(tparams, restpe) =>
- PolyType(tparams, apply(restpe))
+ // Not mapping over params
+ val restpe1 = apply(restpe)
+ if (restpe eq restpe1) tp
+ else MethodType(params, restpe1)
case TypeRef(_, RepeatedParamClass, arg :: Nil) =>
seqType(arg)
case _ =>
@@ -4469,14 +4469,15 @@ trait Types extends api.Types { self: SymbolTable =>
*/
def isPossiblePrefix(clazz: Symbol) = clazz.isClass && !clazz.isPackageClass
+ private def skipPrefixOf(pre: Type, clazz: Symbol) = (
+ (pre eq NoType) || (pre eq NoPrefix) || !isPossiblePrefix(clazz)
+ )
+
/** A map to compute the asSeenFrom method */
class AsSeenFromMap(pre: Type, clazz: Symbol) extends TypeMap with KeepOnlyTypeConstraints {
var capturedSkolems: List[Symbol] = List()
var capturedParams: List[Symbol] = List()
- private def skipPrefixOf(pre: Type, clazz: Symbol) = (
- (pre eq NoType) || (pre eq NoPrefix) || !isPossiblePrefix(clazz)
- )
override def mapOver(tree: Tree, giveup: ()=>Nothing): Tree = {
object annotationArgRewriter extends TypeMapTransformer {
private def canRewriteThis(sym: Symbol) = (
@@ -4509,8 +4510,7 @@ trait Types extends api.Types { self: SymbolTable =>
}
def apply(tp: Type): Type =
- if (skipPrefixOf(pre, clazz)) tp
- else tp match {
+ tp match {
case ThisType(sym) =>
def toPrefix(pre: Type, clazz: Symbol): Type =
if (skipPrefixOf(pre, clazz)) tp
@@ -4672,6 +4672,8 @@ trait Types extends api.Types { self: SymbolTable =>
/** A map to implement the `substSym` method. */
class SubstSymMap(from: List[Symbol], to: List[Symbol]) extends SubstMap(from, to) {
+ def this(pairs: (Symbol, Symbol)*) = this(pairs.toList.map(_._1), pairs.toList.map(_._2))
+
protected def toType(fromtp: Type, sym: Symbol) = fromtp match {
case TypeRef(pre, _, args) => copyTypeRef(fromtp, pre, sym, args)
case SingleType(pre, _) => singleType(pre, sym)
@@ -4704,23 +4706,13 @@ trait Types extends api.Types { self: SymbolTable =>
case idx => Some(to(idx))
}
- override def transform(tree: Tree) =
- tree match {
- case tree@Ident(_) =>
- termMapsTo(tree.symbol) match {
- case Some(tosym) =>
- if (tosym.info.bounds.hi.typeSymbol isSubClass SingletonClass) {
- Ident(tosym.existentialToString)
- .setSymbol(tosym)
- .setPos(tosym.pos)
- .setType(dropSingletonType(tosym.info.bounds.hi))
- } else {
- giveup()
- }
- case none => super.transform(tree)
- }
- case tree => super.transform(tree)
+ override def transform(tree: Tree) = {
+ termMapsTo(tree.symbol) match {
+ case Some(tosym) => tree.symbol = tosym
+ case None => ()
}
+ super.transform(tree)
+ }
}
trans.transform(tree)
}
@@ -4982,6 +4974,51 @@ trait Types extends api.Types { self: SymbolTable =>
}
}
+ /**
+ * A more persistent version of `Type#memberType` which does not require
+ * that the symbol is a direct member of the prefix.
+ *
+ * For instance:
+ *
+ * {{{
+ * class C[T] {
+ * sealed trait F[A]
+ * object X {
+ * object S1 extends F[T]
+ * }
+ * class S2 extends F[T]
+ * }
+ * object O extends C[Int] {
+ * def foo(f: F[Int]) = f match {...} // need to enumerate sealed subtypes of the scrutinee here.
+ * }
+ * class S3 extends O.F[String]
+ *
+ * nestedMemberType(<S1>, <O.type>, <C>) = O.X.S1.type
+ * nestedMemberType(<S2>, <O.type>, <C>) = O.S2.type
+ * nestedMemberType(<S3>, <O.type>, <C>) = S3.type
+ * }}}
+ *
+ * @param sym The symbol of the subtype
+ * @param pre The prefix from which the symbol is seen
+ * @param owner
+ */
+ def nestedMemberType(sym: Symbol, pre: Type, owner: Symbol): Type = {
+ def loop(tp: Type): Type =
+ if (tp.isTrivial) tp
+ else if (tp.prefix.typeSymbol isNonBottomSubClass owner) {
+ val widened = tp match {
+ case _: ConstantType => tp // Java enum constants: don't widen to the enum type!
+ case _ => tp.widen // C.X.type widens to C.this.X.type, otherwise `tp asSeenFrom (pre, C)` has no effect.
+ }
+ widened asSeenFrom (pre, tp.typeSymbol.owner)
+ }
+ else loop(tp.prefix) memberType tp.typeSymbol
+
+ val result = loop(sym.tpeHK)
+ assert(sym.isTerm || result.typeSymbol == sym, s"($result).typeSymbol = ${result.typeSymbol}; expected ${sym}")
+ result
+ }
+
/** The most deeply nested owner that contains all the symbols
* of thistype or prefixless typerefs/singletype occurrences in given type.
*/
@@ -6073,7 +6110,7 @@ trait Types extends api.Types { self: SymbolTable =>
(sameLength(params1, params2) &&
mt1.isImplicit == mt2.isImplicit &&
matchingParams(params1, params2, mt1.isJava, mt2.isJava) &&
- isSubType(res1, res2.substSym(params2, params1), depth))
+ isSubType(res1.substSym(params1, params2), res2, depth))
// TODO: if mt1.params.isEmpty, consider NullaryMethodType?
case _ =>
false
@@ -6616,7 +6653,7 @@ trait Types extends api.Types { self: SymbolTable =>
val ts0 = elimSub0(ts)
if (ts0.isEmpty || ts0.tail.isEmpty) ts0
else {
- val ts1 = ts0 mapConserve (t => elimAnonymousClass(t.underlying))
+ val ts1 = ts0 mapConserve (t => elimAnonymousClass(t.dealiasWiden))
if (ts1 eq ts0) ts0
else elimSub(ts1, depth)
}
@@ -6733,6 +6770,8 @@ trait Types extends api.Types { self: SymbolTable =>
NullaryMethodType(lub0(matchingRestypes(ts, Nil)))
case ts @ TypeBounds(_, _) :: rest =>
TypeBounds(glb(ts map (_.bounds.lo), depth), lub(ts map (_.bounds.hi), depth))
+ case ts @ AnnotatedType(annots, tpe, _) :: rest =>
+ annotationsLub(lub0(ts map (_.withoutAnnotations)), ts)
case ts =>
lubResults get (depth, ts) match {
case Some(lubType) =>
@@ -7112,6 +7151,14 @@ trait Types extends api.Types { self: SymbolTable =>
}
}
+ def isJavaVarargsAncestor(clazz: Symbol) = (
+ clazz.isClass
+ && clazz.isJavaDefined
+ && (clazz.info.nonPrivateDecls exists isJavaVarArgsMethod)
+ )
+ def inheritsJavaVarArgsMethod(clazz: Symbol) =
+ clazz.thisType.baseClasses exists isJavaVarargsAncestor
+
/** All types in list must be polytypes with type parameter lists of
* same length as tparams.
* Returns list of list of bounds infos, where corresponding type
@@ -7224,6 +7271,12 @@ trait Types extends api.Types { self: SymbolTable =>
else (ps :+ SerializableClass.tpe).toList
)
+ /** Members of the given class, other than those inherited
+ * from Any or AnyRef.
+ */
+ def nonTrivialMembers(clazz: Symbol): Iterable[Symbol] =
+ clazz.info.members filterNot (sym => sym.owner == ObjectClass || sym.owner == AnyClass)
+
def objToAny(tp: Type): Type =
if (!phase.erasedTypes && tp.typeSymbol == ObjectClass) AnyClass.tpe
else tp
diff --git a/src/reflect/scala/reflect/internal/annotations/compileTimeOnly.scala b/src/reflect/scala/reflect/internal/annotations/compileTimeOnly.scala
new file mode 100644
index 0000000000..058ff61fbf
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/annotations/compileTimeOnly.scala
@@ -0,0 +1,31 @@
+package scala.reflect
+package internal
+package annotations
+
+import scala.annotation.meta._
+
+/**
+ * An annotation that designates a member should not be referred to after
+ * type checking (which includes macro expansion); it must only be used in
+ * the arguments of some other macro that will eliminate it from the AST.
+ *
+ * Later on, this annotation should be removed and implemented with domain-specific macros.
+ * If a certain method `inner` mustn't be called outside the context of a given macro `outer`,
+ * then it should itself be declared as a macro.
+ *
+ * Approach #1. Expansion of `inner` checks whether its enclosures contain `outer` and
+ * report an error if `outer` is not detected. In principle, we could use this approach right now,
+ * but currently enclosures are broken, because contexts aren't exactly famous for keeping precise
+ * track of the stack of the trees being typechecked.
+ *
+ * Approach #2. Default implementation of `inner` is just an invocation of `c.abort`.
+ * `outer` is an untyped macro, which expands into a block, which contains a redefinition of `inner`
+ * and a call to itself. The redefined `inner` could either be a stub like `Expr.splice` or carry out
+ * domain-specific logic.
+ *
+ * @param message the error message to print during compilation if a reference remains
+ * after type checking
+ * @since 2.10.1
+ */
+@getter @setter @beanGetter @beanSetter
+final class compileTimeOnly(message: String) extends scala.annotation.StaticAnnotation
diff --git a/src/reflect/scala/reflect/internal/settings/MutableSettings.scala b/src/reflect/scala/reflect/internal/settings/MutableSettings.scala
index ec3501d5bc..81368df7a6 100644
--- a/src/reflect/scala/reflect/internal/settings/MutableSettings.scala
+++ b/src/reflect/scala/reflect/internal/settings/MutableSettings.scala
@@ -47,6 +47,4 @@ abstract class MutableSettings extends AbsSettings {
def XoldPatmat: BooleanSetting
def XnoPatmatAnalysis: BooleanSetting
def XfullLubs: BooleanSetting
- def companionsInPkgObjs: BooleanSetting
-
}
diff --git a/src/reflect/scala/reflect/internal/util/Position.scala b/src/reflect/scala/reflect/internal/util/Position.scala
index 3d10d4c9ce..8f287a1640 100644
--- a/src/reflect/scala/reflect/internal/util/Position.scala
+++ b/src/reflect/scala/reflect/internal/util/Position.scala
@@ -91,7 +91,7 @@ abstract class Position extends scala.reflect.api.Position { self =>
/** An optional value containing the source file referred to by this position, or
* None if not defined.
*/
- def source: SourceFile = throw new UnsupportedOperationException("Position.source")
+ def source: SourceFile = throw new UnsupportedOperationException(s"Position.source on ${this.getClass}")
/** Is this position neither a NoPosition nor a FakePosition?
* If isDefined is true, offset and source are both defined.
@@ -111,19 +111,19 @@ abstract class Position extends scala.reflect.api.Position { self =>
def makeTransparent: Position = this
/** The start of the position's range, error if not a range position */
- def start: Int = throw new UnsupportedOperationException("Position.start")
+ def start: Int = throw new UnsupportedOperationException(s"Position.start on ${this.getClass}")
/** The start of the position's range, or point if not a range position */
def startOrPoint: Int = point
/** The point (where the ^ is) of the position */
- def point: Int = throw new UnsupportedOperationException("Position.point")
+ def point: Int = throw new UnsupportedOperationException(s"Position.point on ${this.getClass}")
/** The point (where the ^ is) of the position, or else `default` if undefined */
def pointOrElse(default: Int): Int = default
/** The end of the position's range, error if not a range position */
- def end: Int = throw new UnsupportedOperationException("Position.end")
+ def end: Int = throw new UnsupportedOperationException(s"Position.end on ${this.getClass}")
/** The end of the position's range, or point if not a range position */
def endOrPoint: Int = point
diff --git a/src/reflect/scala/reflect/macros/Attachments.scala b/src/reflect/scala/reflect/macros/Attachments.scala
index a77cebf415..007df3b6e2 100644
--- a/src/reflect/scala/reflect/macros/Attachments.scala
+++ b/src/reflect/scala/reflect/macros/Attachments.scala
@@ -44,17 +44,21 @@ abstract class Attachments { self =>
* Replaces an existing payload of the same type, if exists.
*/
def update[T: ClassTag](attachment: T): Attachments { type Pos = self.Pos } =
- new NonemptyAttachments(this.pos, remove[T].all + attachment)
+ new NonemptyAttachments[Pos](this.pos, remove[T].all + attachment)
/** Creates a copy of this attachment with the payload of the given class type `T` removed. */
def remove[T: ClassTag]: Attachments { type Pos = self.Pos } = {
val newAll = all filterNot matchesTag[T]
if (newAll.isEmpty) pos.asInstanceOf[Attachments { type Pos = self.Pos }]
- else new NonemptyAttachments(this.pos, newAll)
+ else new NonemptyAttachments[Pos](this.pos, newAll)
}
+}
- private class NonemptyAttachments(override val pos: Pos, override val all: Set[Any]) extends Attachments {
- type Pos = self.Pos
- def withPos(newPos: Pos) = new NonemptyAttachments(newPos, all)
- }
+// SI-7018: This used to be an inner class of `Attachments`, but that led to a memory leak in the
+// IDE via $outer pointers.
+// Forward compatibility note: This class used to be Attachments$NonemptyAttachments.
+// However it's private, therefore it transcends the compatibility policy for 2.10.x.
+private final class NonemptyAttachments[P >: Null](override val pos: P, override val all: Set[Any]) extends Attachments {
+ type Pos = P
+ def withPos(newPos: Pos) = new NonemptyAttachments(newPos, all)
}
diff --git a/src/reflect/scala/reflect/macros/compileTimeOnly.scala b/src/reflect/scala/reflect/macros/compileTimeOnly.scala
deleted file mode 100644
index 5a3a352a53..0000000000
--- a/src/reflect/scala/reflect/macros/compileTimeOnly.scala
+++ /dev/null
@@ -1,16 +0,0 @@
-package scala.reflect
-package macros
-
-import scala.annotation.meta._
-
-/**
- * An annotation that designates a member should not be referred to after
- * type checking (which includes macro expansion); it must only be used in
- * the arguments of some other macro that will eliminate it from the AST.
- *
- * @param message the error message to print during compilation if a reference remains
- * after type checking
- * @since 2.10.1
- */
-@getter @setter @beanGetter @beanSetter
-final class compileTimeOnly(message: String) extends scala.annotation.StaticAnnotation
diff --git a/src/reflect/scala/reflect/runtime/JavaMirrors.scala b/src/reflect/scala/reflect/runtime/JavaMirrors.scala
index 70d506ad95..778c826dc0 100644
--- a/src/reflect/scala/reflect/runtime/JavaMirrors.scala
+++ b/src/reflect/scala/reflect/runtime/JavaMirrors.scala
@@ -610,11 +610,19 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
/**
* Copy all annotations of Java annotated element `jann` over to Scala symbol `sym`.
+ * Also creates `@throws` annotations if necessary.
* Pre: `sym` is already initialized with a concrete type.
* Note: If `sym` is a method or constructor, its parameter annotations are copied as well.
*/
private def copyAnnotations(sym: Symbol, jann: AnnotatedElement) {
sym setAnnotations (jann.getAnnotations map JavaAnnotationProxy).toList
+ // SI-7065: we're not using getGenericExceptionTypes here to be consistent with ClassfileParser
+ val jexTpes = jann match {
+ case jm: jMethod => jm.getExceptionTypes.toList
+ case jconstr: jConstructor[_] => jconstr.getExceptionTypes.toList
+ case _ => Nil
+ }
+ jexTpes foreach (jexTpe => sym.addThrowsAnnotation(classSymbol(jexTpe)))
}
/**
diff --git a/src/reflect/scala/reflect/runtime/Settings.scala b/src/reflect/scala/reflect/runtime/Settings.scala
index 2d5b76f094..0e0cf3fc40 100644
--- a/src/reflect/scala/reflect/runtime/Settings.scala
+++ b/src/reflect/scala/reflect/runtime/Settings.scala
@@ -43,7 +43,6 @@ private[reflect] class Settings extends MutableSettings {
val printtypes = new BooleanSetting(false)
val uniqid = new BooleanSetting(false)
val verbose = new BooleanSetting(false)
- val companionsInPkgObjs = new BooleanSetting(false)
val Yrecursion = new IntSetting(0)
val maxClassfileName = new IntSetting(255)
diff --git a/src/scalap/scala/tools/scalap/Main.scala b/src/scalap/scala/tools/scalap/Main.scala
index 7c84279699..90f8cb8d71 100644
--- a/src/scalap/scala/tools/scalap/Main.scala
+++ b/src/scalap/scala/tools/scalap/Main.scala
@@ -184,7 +184,7 @@ object Main extends Main {
val cparg = List("-classpath", "-cp") map (arguments getArgument _) reduceLeft (_ orElse _)
val path = cparg match {
case Some(cp) => new JavaClassPath(DefaultJavaContext.classesInExpandedPath(cp), DefaultJavaContext)
- case _ => PathResolver.fromPathString("")
+ case _ => PathResolver.fromPathString(".") // include '.' in the default classpath SI-6669
}
// print the classpath if output is verbose
if (verbose)