summaryrefslogtreecommitdiff
path: root/src/compiler/scala/tools
diff options
context:
space:
mode:
Diffstat (limited to 'src/compiler/scala/tools')
-rw-r--r--src/compiler/scala/tools/nsc/CompileSocket.scala2
-rw-r--r--src/compiler/scala/tools/nsc/OfflineCompilerCommand.scala2
-rw-r--r--src/compiler/scala/tools/nsc/ast/Trees.scala2
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Parsers.scala7
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Scanners.scala22
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/GenICode.scala60
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala122
-rwxr-xr-xsrc/compiler/scala/tools/nsc/doc/model/IndexModelFactory.scala2
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala15
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/JLineReader.scala5
-rw-r--r--src/compiler/scala/tools/nsc/matching/ParallelMatching.scala3
-rw-r--r--src/compiler/scala/tools/nsc/settings/FscSettings.scala23
-rw-r--r--src/compiler/scala/tools/nsc/settings/MutableSettings.scala30
-rw-r--r--src/compiler/scala/tools/nsc/settings/ScalaSettings.scala4
-rw-r--r--src/compiler/scala/tools/nsc/settings/ScalaVersion.scala194
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala8
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala4
-rw-r--r--src/compiler/scala/tools/nsc/transform/CleanUp.scala36
-rw-r--r--src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala3
-rw-r--r--src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala193
-rw-r--r--src/compiler/scala/tools/nsc/transform/LambdaLift.scala41
-rw-r--r--src/compiler/scala/tools/nsc/transform/Mixin.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala92
-rw-r--r--src/compiler/scala/tools/nsc/transform/UnCurry.scala22
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Analyzer.scala1
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala225
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala13
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Contexts.scala14
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Duplicators.scala36
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Infer.scala42
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Macros.scala3
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala5
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Namers.scala510
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala64
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala235
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/RefChecks.scala97
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala9
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala45
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala199
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Unapplies.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Variances.scala2
44 files changed, 1707 insertions, 695 deletions
diff --git a/src/compiler/scala/tools/nsc/CompileSocket.scala b/src/compiler/scala/tools/nsc/CompileSocket.scala
index 9a3e8d1530..4051bda914 100644
--- a/src/compiler/scala/tools/nsc/CompileSocket.scala
+++ b/src/compiler/scala/tools/nsc/CompileSocket.scala
@@ -72,7 +72,7 @@ class CompileSocket extends CompileOutputCommon {
/** A temporary directory to use */
val tmpDir = {
val udir = Option(Properties.userName) getOrElse "shared"
- val f = (Path(Properties.tmpDir) / "scala-devel" / udir).createDirectory()
+ val f = (Path(Properties.tmpDir) / ("scala-devel" + udir)).createDirectory()
if (f.isDirectory && f.canWrite) {
info("[Temp directory: " + f + "]")
diff --git a/src/compiler/scala/tools/nsc/OfflineCompilerCommand.scala b/src/compiler/scala/tools/nsc/OfflineCompilerCommand.scala
index 8a3c531ff0..caf6ad14cf 100644
--- a/src/compiler/scala/tools/nsc/OfflineCompilerCommand.scala
+++ b/src/compiler/scala/tools/nsc/OfflineCompilerCommand.scala
@@ -33,7 +33,7 @@ class OfflineCompilerCommand(arguments: List[String], settings: FscSettings) ext
}
else {
// Otherwise we're on the server and will use it to absolutize the paths.
- settings.absolutize(currentDir.value)
+ settings.absolutize()
}
}
diff --git a/src/compiler/scala/tools/nsc/ast/Trees.scala b/src/compiler/scala/tools/nsc/ast/Trees.scala
index 54402f0903..def1198dae 100644
--- a/src/compiler/scala/tools/nsc/ast/Trees.scala
+++ b/src/compiler/scala/tools/nsc/ast/Trees.scala
@@ -111,7 +111,7 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
rhs = EmptyTree
)
}
- val lvdefs = evdefs collect { case vdef: ValDef => copyValDef(vdef)(mods = Modifiers(PRESUPER)) }
+ val lvdefs = evdefs collect { case vdef: ValDef => copyValDef(vdef)(mods = vdef.mods | PRESUPER) }
val constrs = {
if (constrMods hasFlag TRAIT) {
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
index 33db4ee2d5..c508e14343 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
@@ -980,11 +980,8 @@ self =>
/** Assumed (provisionally) to be TermNames. */
def ident(skipIt: Boolean): Name =
- if (isIdent) {
- val name = in.name.encode
- in.nextToken()
- name
- } else {
+ if (isIdent) rawIdent().encode
+ else {
syntaxErrorOrIncomplete(expectedMsg(IDENTIFIER), skipIt)
nme.ERROR
}
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
index 4f564c5d0b..79f0bcf149 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
@@ -113,6 +113,11 @@ trait Scanners extends ScannersCommon {
cbuf.append(c)
}
+ /** Determines whether this scanner should emit identifier deprecation warnings,
+ * e.g. when seeing `macro` or `then`, which are planned to become keywords in future versions of Scala.
+ */
+ protected def emitIdentifierDeprecationWarnings = true
+
/** Clear buffer and set name and token */
private def finishNamed(idtoken: Int = IDENTIFIER) {
name = newTermName(cbuf.toString)
@@ -122,7 +127,7 @@ trait Scanners extends ScannersCommon {
val idx = name.start - kwOffset
if (idx >= 0 && idx < kwArray.length) {
token = kwArray(idx)
- if (token == IDENTIFIER && allowIdent != name)
+ if (token == IDENTIFIER && allowIdent != name && emitIdentifierDeprecationWarnings)
deprecationWarning(name+" is now a reserved word; usage as an identifier is deprecated")
}
}
@@ -283,10 +288,16 @@ trait Scanners extends ScannersCommon {
prev copyFrom this
val nextLastOffset = charOffset - 1
fetchToken()
+ def resetOffset() {
+ offset = prev.offset
+ lastOffset = prev.lastOffset
+ }
if (token == CLASS) {
token = CASECLASS
+ resetOffset()
} else if (token == OBJECT) {
token = CASEOBJECT
+ resetOffset()
} else {
lastOffset = nextLastOffset
next copyFrom this
@@ -607,7 +618,10 @@ trait Scanners extends ScannersCommon {
if (ch == '`') {
nextChar()
finishNamed(BACKQUOTED_IDENT)
- if (name.length == 0) syntaxError("empty quoted identifier")
+ if (name.length == 0)
+ syntaxError("empty quoted identifier")
+ else if (name == nme.WILDCARD)
+ syntaxError("wildcard invalid as backquoted identifier")
}
else syntaxError("unclosed quoted identifier")
}
@@ -1488,6 +1502,10 @@ trait Scanners extends ScannersCommon {
def improves(patches1: List[BracePatch]): Boolean =
imbalanceMeasure > new ParensAnalyzer(unit, patches1).imbalanceMeasure
+ // don't emit deprecation warnings about identifiers like `macro` or `then`
+ // when skimming through the source file trying to heal braces
+ override def emitIdentifierDeprecationWarnings = false
+
override def error(offset: Int, msg: String) {}
}
}
diff --git a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
index f94055f666..ac8ab493e0 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
@@ -258,7 +258,7 @@ abstract class TreeBuilder {
/** Create tree representing a while loop */
def makeWhile(lname: TermName, cond: Tree, body: Tree): Tree = {
- val continu = atPos(o2p(body.pos.endOrPoint)) { Apply(Ident(lname), Nil) }
+ val continu = atPos(o2p(body.pos pointOrElse wrappingPos(List(cond, body)).pos.endOrPoint)) { Apply(Ident(lname), Nil) }
val rhs = If(cond, Block(List(body), continu), Literal(Constant()))
LabelDef(lname, Nil, rhs)
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
index fd2b11898c..44d7a1929b 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
@@ -1164,34 +1164,28 @@ abstract class GenICode extends SubComponent {
resCtx
}
- private def adapt(from: TypeKind, to: TypeKind, ctx: Context, pos: Position): Unit = {
- if (!(from <:< to) && !(from == NullReference && to == NothingReference)) {
- to match {
- case UNIT =>
- ctx.bb.emit(DROP(from), pos)
- debuglog("Dropped an " + from);
-
- case _ =>
- debugassert(from != UNIT, "Can't convert from UNIT to " + to + " at: " + pos)
- assert(!from.isReferenceType && !to.isReferenceType,
- "type error: can't convert from " + from + " to " + to +" in unit " + unit.source + " at " + pos)
-
- ctx.bb.emit(CALL_PRIMITIVE(Conversion(from, to)), pos)
- }
- } else if (from == NothingReference) {
- ctx.bb.emit(THROW(ThrowableClass))
- ctx.bb.enterIgnoreMode
- } else if (from == NullReference) {
- ctx.bb.emit(DROP(from))
- ctx.bb.emit(CONSTANT(Constant(null)))
+ private def adapt(from: TypeKind, to: TypeKind, ctx: Context, pos: Position) {
+ // An awful lot of bugs explode here - let's leave ourselves more clues.
+ // A typical example is an overloaded type assigned after typer.
+ log(s"GenICode#adapt($from, $to, $ctx, $pos)")
+
+ val conforms = (from <:< to) || (from == NullReference && to == NothingReference)
+ def coerce(from: TypeKind, to: TypeKind) = ctx.bb.emit(CALL_PRIMITIVE(Conversion(from, to)), pos)
+ def checkAssertions() {
+ def msg = s"Can't convert from $from to $to in unit ${unit.source} at $pos"
+ debugassert(from != UNIT, msg)
+ assert(!from.isReferenceType && !to.isReferenceType, msg)
}
- else if (from == ThrowableReference && !(ThrowableClass.tpe <:< to.toType)) {
- log("Inserted check-cast on throwable to " + to + " at " + pos)
- ctx.bb.emit(CHECK_CAST(to))
+ if (conforms) from match {
+ case NothingReference => ctx.bb.emit(THROW(ThrowableClass)) ; ctx.bb.enterIgnoreMode
+ case NullReference => ctx.bb.emit(Seq(DROP(from), CONSTANT(Constant(null))))
+ case ThrowableReference if !(ThrowableClass.tpe <:< to.toType) => ctx.bb.emit(CHECK_CAST(to)) // downcast throwables
+ case BYTE | SHORT | CHAR | INT if to == LONG => coerce(INT, LONG) // widen subrange types
+ case _ => ()
}
- else (from, to) match {
- case (BYTE, LONG) | (SHORT, LONG) | (CHAR, LONG) | (INT, LONG) => ctx.bb.emit(CALL_PRIMITIVE(Conversion(INT, LONG)))
- case _ => ()
+ else to match {
+ case UNIT => ctx.bb.emit(DROP(from), pos) // value discarding
+ case _ => checkAssertions() ; coerce(from, to) // other primitive coercions
}
}
@@ -1907,18 +1901,8 @@ abstract class GenICode extends SubComponent {
var handlerCount = 0
- override def toString(): String = {
- val buf = new StringBuilder()
- buf.append("\tpackage: ").append(packg).append('\n')
- buf.append("\tclazz: ").append(clazz).append('\n')
- buf.append("\tmethod: ").append(method).append('\n')
- buf.append("\tbb: ").append(bb).append('\n')
- buf.append("\tlabels: ").append(labels).append('\n')
- buf.append("\texception handlers: ").append(handlers).append('\n')
- buf.append("\tcleanups: ").append(cleanups).append('\n')
- buf.append("\tscope: ").append(scope).append('\n')
- buf.toString()
- }
+ override def toString =
+ s"package $packg { class $clazz { def $method { bb=$bb } } }"
def loadException(ctx: Context, exh: ExceptionHandler, pos: Position) = {
debuglog("Emitting LOAD_EXCEPTION for class: " + exh.loadExceptionClass)
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
index d185ed0c34..0abbe44b02 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
@@ -1018,7 +1018,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
if (needsAnnotation) {
val c = Constant(RemoteExceptionClass.tpe)
val arg = Literal(c) setType c.tpe
- meth.addAnnotation(ThrowsClass, arg)
+ meth.addAnnotation(appliedType(ThrowsClass, c.tpe), arg)
}
}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
index fe0020e074..598965b982 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
@@ -888,7 +888,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
if (needsAnnotation) {
val c = Constant(RemoteExceptionClass.tpe)
val arg = Literal(c) setType c.tpe
- meth.addAnnotation(ThrowsClass, arg)
+ meth.addAnnotation(appliedType(ThrowsClass, c.tpe), arg)
}
}
diff --git a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
index fee683ce3a..d4a6d18c60 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
@@ -18,6 +18,9 @@ abstract class DeadCodeElimination extends SubComponent {
import icodes.opcodes._
import definitions.RuntimePackage
+ /** The block and index where an instruction is located */
+ type InstrLoc = (BasicBlock, Int)
+
val phaseName = "dce"
/** Create a new phase */
@@ -55,27 +58,35 @@ abstract class DeadCodeElimination extends SubComponent {
val rdef = new reachingDefinitions.ReachingDefinitionsAnalysis;
/** Use-def chain: give the reaching definitions at the beginning of given instruction. */
- var defs: immutable.Map[(BasicBlock, Int), immutable.Set[rdef.lattice.Definition]] = immutable.HashMap.empty
+ var defs: immutable.Map[InstrLoc, immutable.Set[rdef.lattice.Definition]] = immutable.HashMap.empty
/** Useful instructions which have not been scanned yet. */
- val worklist: mutable.Set[(BasicBlock, Int)] = new mutable.LinkedHashSet
+ val worklist: mutable.Set[InstrLoc] = new mutable.LinkedHashSet
/** what instructions have been marked as useful? */
val useful: mutable.Map[BasicBlock, mutable.BitSet] = perRunCaches.newMap()
/** what local variables have been accessed at least once? */
var accessedLocals: List[Local] = Nil
+
+ /** Map from a local and a basic block to the instructions that store to that local in that basic block */
+ val localStores = mutable.Map[(Local, BasicBlock), mutable.BitSet]() withDefault {_ => mutable.BitSet()}
+
+ /** Stores that clobber previous stores to array or ref locals. See SI-5313 */
+ val clobbers = mutable.Set[InstrLoc]()
/** the current method. */
var method: IMethod = _
/** Map instructions who have a drop on some control path, to that DROP instruction. */
- val dropOf: mutable.Map[(BasicBlock, Int), List[(BasicBlock, Int)]] = perRunCaches.newMap()
+ val dropOf: mutable.Map[InstrLoc, List[InstrLoc]] = perRunCaches.newMap()
def dieCodeDie(m: IMethod) {
if (m.hasCode) {
debuglog("dead code elimination on " + m);
dropOf.clear()
+ localStores.clear()
+ clobbers.clear()
m.code.blocks.clear()
accessedLocals = m.params.reverse
m.code.blocks ++= linearizer.linearize(m)
@@ -104,10 +115,10 @@ abstract class DeadCodeElimination extends SubComponent {
for (Pair(i, idx) <- bb.toList.zipWithIndex) {
i match {
- case LOAD_LOCAL(l) =>
+ case LOAD_LOCAL(_) =>
defs = defs + Pair(((bb, idx)), rd.vars)
- case STORE_LOCAL(_) =>
+ case STORE_LOCAL(l) =>
/* SI-4935 Check whether a module is stack top, if so mark the instruction that loaded it
* (otherwise any side-effects of the module's constructor go lost).
* (a) The other two cases where a module's value is stored (STORE_FIELD and STORE_ARRAY_ITEM)
@@ -125,6 +136,11 @@ abstract class DeadCodeElimination extends SubComponent {
}
}
if (necessary) worklist += ((bb, idx))
+ // add it to the localStores map
+ val key = (l, bb)
+ val set = localStores(key)
+ set += idx
+ localStores(key) = set
case RETURN(_) | JUMP(_) | CJUMP(_, _, _, _) | CZJUMP(_, _, _, _) | STORE_FIELD(_, _) |
THROW(_) | LOAD_ARRAY_ITEM(_) | STORE_ARRAY_ITEM(_) | SCOPE_ENTER(_) | SCOPE_EXIT(_) | STORE_THIS(_) |
@@ -162,11 +178,18 @@ abstract class DeadCodeElimination extends SubComponent {
def mark() {
// log("Starting with worklist: " + worklist)
while (!worklist.isEmpty) {
- val (bb, idx) = worklist.iterator.next
+ val (bb, idx) = worklist.head
worklist -= ((bb, idx))
debuglog("Marking instr: \tBB_" + bb + ": " + idx + " " + bb(idx))
val instr = bb(idx)
+ // adds the instrutions that define the stack values about to be consumed to the work list to
+ // be marked useful
+ def addDefs() = for ((bb1, idx1) <- rdef.findDefs(bb, idx, instr.consumed) if !useful(bb1)(idx1)) {
+ debuglog(s"\t${bb1(idx1)} is consumed by $instr")
+ worklist += ((bb1, idx1))
+ }
+
if (!useful(bb)(idx)) {
useful(bb) += idx
dropOf.get(bb, idx) foreach {
@@ -180,6 +203,15 @@ abstract class DeadCodeElimination extends SubComponent {
worklist += ((bb1, idx1))
}
+ case STORE_LOCAL(l1) if l1.kind.isRefOrArrayType =>
+ addDefs()
+ // see SI-5313
+ // search for clobbers of this store if we aren't doing l1 = null
+ // this doesn't catch the second store in x=null;l1=x; but in practice this catches
+ // a lot of null stores very cheaply
+ if (idx == 0 || bb(idx - 1) != CONSTANT(Constant(null)))
+ findClobbers(l1, bb, idx + 1)
+
case nw @ NEW(REFERENCE(sym)) =>
assert(nw.init ne null, "null new.init at: " + bb + ": " + idx + "(" + instr + ")")
worklist += findInstruction(bb, nw.init)
@@ -199,14 +231,72 @@ abstract class DeadCodeElimination extends SubComponent {
()
case _ =>
- for ((bb1, idx1) <- rdef.findDefs(bb, idx, instr.consumed) if !useful(bb1)(idx1)) {
- debuglog("\tAdding " + bb1(idx1))
- worklist += ((bb1, idx1))
- }
+ addDefs()
}
}
}
}
+
+ /**
+ * Finds and marks all clobbers of the given local starting in the given
+ * basic block at the given index
+ *
+ * Storing to local variables of reference or array type may be indirectly
+ * observable because it may remove a reference to an object which may allow the object
+ * to be gc'd. See SI-5313. In this code I call the LOCAL_STORE(s) that immediately follow a
+ * LOCAL_STORE and that store to the same local "clobbers." If a LOCAL_STORE is marked
+ * useful then its clobbers must go into the set of clobbers, which will be
+ * compensated for later
+ */
+ def findClobbers(l: Local, bb: BasicBlock, idx: Int) {
+ // previously visited blocks tracked to prevent searching forever in a cycle
+ val inspected = mutable.Set[BasicBlock]()
+ // our worklist of blocks that still need to be checked
+ val blocksToBeInspected = mutable.Set[BasicBlock]()
+
+ // Tries to find the next clobber of l1 in bb1 starting at idx1.
+ // if it finds one it adds the clobber to clobbers set for later
+ // handling. If not it adds the direct successor blocks to
+ // the uninspectedBlocks to try to find clobbers there. Either way
+ // it adds the exception successor blocks for further search
+ def findClobberInBlock(idx1: Int, bb1: BasicBlock) {
+ val key = ((l, bb1))
+ val foundClobber = (localStores contains key) && {
+ def minIdx(s : mutable.BitSet) = if(s.isEmpty) -1 else s.min
+
+ // find the smallest index greater than or equal to idx1
+ val clobberIdx = minIdx(localStores(key) dropWhile (_ < idx1))
+ if (clobberIdx == -1)
+ false
+ else {
+ debuglog(s"\t${bb1(clobberIdx)} is a clobber of ${bb(idx)}")
+ clobbers += ((bb1, clobberIdx))
+ true
+ }
+ }
+
+ // always need to look into the exception successors for additional clobbers
+ // because we don't know when flow might enter an exception handler
+ blocksToBeInspected ++= (bb1.exceptionSuccessors filterNot inspected)
+ // If we didn't find a clobber here then we need to look at successor blocks.
+ // if we found a clobber then we don't need to search in the direct successors
+ if (!foundClobber) {
+ blocksToBeInspected ++= (bb1.directSuccessors filterNot inspected)
+ }
+ }
+
+ // first search starting at the current index
+ // note we don't put bb in the inspected list yet because a loop may later force
+ // us back around to search from the beginning of bb
+ findClobberInBlock(idx, bb)
+ // then loop until we've exhausted the set of uninspected blocks
+ while(!blocksToBeInspected.isEmpty) {
+ val bb1 = blocksToBeInspected.head
+ blocksToBeInspected -= bb1
+ inspected += bb1
+ findClobberInBlock(0, bb1)
+ }
+ }
def sweep(m: IMethod) {
val compensations = computeCompensations(m)
@@ -236,6 +326,12 @@ abstract class DeadCodeElimination extends SubComponent {
i match {
case NEW(REFERENCE(sym)) =>
log(s"Eliminated instantation of $sym inside $m")
+ case STORE_LOCAL(l) if clobbers contains ((bb, idx)) =>
+ // if an unused instruction was a clobber of a used store to a reference or array type
+ // then we'll replace it with the store of a null to make sure the reference is
+ // eliminated. See SI-5313
+ bb emit CONSTANT(Constant(null))
+ bb emit STORE_LOCAL(l)
case _ => ()
}
debuglog("Skipped: bb_" + bb + ": " + idx + "( " + i + ")")
@@ -247,8 +343,8 @@ abstract class DeadCodeElimination extends SubComponent {
}
}
- private def computeCompensations(m: IMethod): mutable.Map[(BasicBlock, Int), List[Instruction]] = {
- val compensations: mutable.Map[(BasicBlock, Int), List[Instruction]] = new mutable.HashMap
+ private def computeCompensations(m: IMethod): mutable.Map[InstrLoc, List[Instruction]] = {
+ val compensations: mutable.Map[InstrLoc, List[Instruction]] = new mutable.HashMap
m foreachBlock { bb =>
assert(bb.closed, "Open block in computeCompensations")
@@ -287,7 +383,7 @@ abstract class DeadCodeElimination extends SubComponent {
res
}
- private def findInstruction(bb: BasicBlock, i: Instruction): (BasicBlock, Int) = {
+ private def findInstruction(bb: BasicBlock, i: Instruction): InstrLoc = {
for (b <- linearizer.linearizeAt(method, bb)) {
val idx = b.toList indexWhere (_ eq i)
if (idx != -1)
diff --git a/src/compiler/scala/tools/nsc/doc/model/IndexModelFactory.scala b/src/compiler/scala/tools/nsc/doc/model/IndexModelFactory.scala
index 10e2f23142..4ee6daf73e 100755
--- a/src/compiler/scala/tools/nsc/doc/model/IndexModelFactory.scala
+++ b/src/compiler/scala/tools/nsc/doc/model/IndexModelFactory.scala
@@ -20,7 +20,7 @@ object IndexModelFactory {
/* Owner template ordering */
implicit def orderingSet = math.Ordering.String.on { x: MemberEntity => x.name.toLowerCase }
/* symbol name ordering */
- implicit def orderingMap = math.Ordering.String.on { x: String => x.toLowerCase }
+ implicit def orderingMap = math.Ordering.String
def addMember(d: MemberEntity) = {
val firstLetter = {
diff --git a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala b/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala
index c6cfc317ea..0a469c9227 100644
--- a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala
+++ b/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala
@@ -314,12 +314,15 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
inform("Creating doc template for " + sym)
override def toRoot: List[DocTemplateImpl] = this :: inTpl.toRoot
- def inSource =
- if (sym.sourceFile != null && ! sym.isSynthetic)
- Some((sym.sourceFile, sym.pos.line))
+
+ protected def inSourceFromSymbol(symbol: Symbol) =
+ if (symbol.sourceFile != null && ! symbol.isSynthetic)
+ Some((symbol.sourceFile, symbol.pos.line))
else
None
+ def inSource = inSourceFromSymbol(sym)
+
def sourceUrl = {
def fixPath(s: String) = s.replaceAll("\\" + java.io.File.separator, "/")
val assumedSourceRoot = fixPath(settings.sourcepath.value) stripSuffix "/"
@@ -508,11 +511,11 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
abstract class PackageImpl(sym: Symbol, inTpl: PackageImpl) extends DocTemplateImpl(sym, inTpl) with Package {
override def inTemplate = inTpl
override def toRoot: List[PackageImpl] = this :: inTpl.toRoot
- override lazy val linearization = {
- val symbol = sym.info.members.find {
+ override lazy val (inSource, linearization) = {
+ val representive = sym.info.members.find {
s => s.isPackageObject
} getOrElse sym
- linearizationFromSymbol(symbol)
+ (inSourceFromSymbol(representive), linearizationFromSymbol(representive))
}
def packages = members collect { case p: PackageImpl if !(droppedPackages contains p) => p }
}
diff --git a/src/compiler/scala/tools/nsc/interpreter/JLineReader.scala b/src/compiler/scala/tools/nsc/interpreter/JLineReader.scala
index 10f972452f..5fd5b41625 100644
--- a/src/compiler/scala/tools/nsc/interpreter/JLineReader.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/JLineReader.scala
@@ -37,6 +37,9 @@ class JLineReader(_completion: => Completion) extends InteractiveReader {
}
class JLineConsoleReader extends ConsoleReader with ConsoleReaderHelper {
+ if ((history: History) ne NoHistory)
+ this setHistory history
+
// working around protected/trait/java insufficiencies.
def goBack(num: Int): Unit = back(num)
def readOneKey(prompt: String) = {
@@ -51,8 +54,6 @@ class JLineReader(_completion: => Completion) extends InteractiveReader {
// A hook for running code after the repl is done initializing.
lazy val postInit: Unit = {
this setBellEnabled false
- if ((history: History) ne NoHistory)
- this setHistory history
if (completion ne NoCompletion) {
val argCompletor: ArgumentCompleter =
diff --git a/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala b/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala
index 9d01e73063..dbb9b7a003 100644
--- a/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala
+++ b/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala
@@ -15,6 +15,7 @@ import symtab.Flags
import mutable.ListBuffer
import scala.annotation.elidable
import scala.language.postfixOps
+import scala.tools.nsc.settings.ScalaVersion
trait ParallelMatching extends ast.TreeDSL
with MatchSupport
@@ -821,7 +822,7 @@ trait ParallelMatching extends ast.TreeDSL
// match that's unimportant; so we add an instance check only if there
// is a binding.
def bindingWarning() = {
- if (isBound && settings.Xmigration28.value) {
+ if (isBound && settings.Xmigration.value < ScalaVersion.twoDotEight) {
cunit.warning(scrutTree.pos,
"A bound pattern such as 'x @ Pattern' now matches fewer cases than the same pattern with no binding.")
}
diff --git a/src/compiler/scala/tools/nsc/settings/FscSettings.scala b/src/compiler/scala/tools/nsc/settings/FscSettings.scala
index 06ebc20d3e..5c852ae07c 100644
--- a/src/compiler/scala/tools/nsc/settings/FscSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/FscSettings.scala
@@ -38,14 +38,25 @@ class FscSettings(error: String => Unit) extends Settings(error) {
private def holdsPath = Set[Settings#Setting](
d, dependencyfile, pluginsDir, Ygenjavap
)
+
+ override def processArguments(arguments: List[String], processAll: Boolean): (Boolean, List[String]) = {
+ val (r, args) = super.processArguments(arguments, processAll)
+ // we need to ensure the files specified with relative locations are absolutized based on the currentDir
+ (r, args map {a => absolutizePath(a)})
+ }
+
+ /**
+ * Take an individual path and if it's not absolute turns it into an absolute path based on currentDir.
+ * If it's already absolute then it's left alone.
+ */
+ private[this] def absolutizePath(p: String) = (Path(currentDir.value) resolve Path(p)).normalize.path
- /** All user set settings rewritten with absolute paths. */
- def absolutize(root: Path) {
- def rewrite(p: String) = (root resolve Path(p)).normalize.path
+ /** All user set settings rewritten with absolute paths based on currentDir */
+ def absolutize() {
userSetSettings foreach {
- case p: OutputSetting => p.outputDirs setSingleOutput AbstractFile.getDirectory(rewrite(p.value))
- case p: PathSetting => p.value = ClassPath.map(p.value, rewrite)
- case p: StringSetting => if (holdsPath(p)) p.value = rewrite(p.value)
+ case p: OutputSetting => p.outputDirs setSingleOutput AbstractFile.getDirectory(absolutizePath(p.value))
+ case p: PathSetting => p.value = ClassPath.map(p.value, absolutizePath)
+ case p: StringSetting => if (holdsPath(p)) p.value = absolutizePath(p.value)
case _ => ()
}
}
diff --git a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala
index f1f289ed4d..e4f99474e1 100644
--- a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala
@@ -221,6 +221,7 @@ class MutableSettings(val errorFn: String => Unit)
def OutputSetting(outputDirs: OutputDirs, default: String) = add(new OutputSetting(outputDirs, default))
def PhasesSetting(name: String, descr: String, default: String = "") = add(new PhasesSetting(name, descr, default))
def StringSetting(name: String, arg: String, descr: String, default: String) = add(new StringSetting(name, arg, descr, default))
+ def ScalaVersionSetting(name: String, arg: String, descr: String, default: ScalaVersion) = add(new ScalaVersionSetting(name, arg, descr, default))
def PathSetting(name: String, descr: String, default: String): PathSetting = {
val prepend = StringSetting(name + "/p", "", "", "").internalOnly()
val append = StringSetting(name + "/a", "", "", "").internalOnly()
@@ -486,6 +487,35 @@ class MutableSettings(val errorFn: String => Unit)
withHelpSyntax(name + " <" + arg + ">")
}
+ /** A setting represented by a Scala version, (`default` unless set) */
+ class ScalaVersionSetting private[nsc](
+ name: String,
+ val arg: String,
+ descr: String,
+ default: ScalaVersion)
+ extends Setting(name, descr) {
+ import ScalaVersion._
+
+ type T = ScalaVersion
+ protected var v: T = NoScalaVersion
+
+ override def tryToSet(args: List[String]) = {
+ value = default
+ Some(args)
+ }
+
+ override def tryToSetColon(args: List[String]) = args match {
+ case Nil => value = default; Some(Nil)
+ case x :: xs => value = ScalaVersion(x, errorFn) ; Some(xs)
+ }
+
+ override def tryToSetFromPropertyValue(s: String) = tryToSet(List(s))
+
+ def unparse: List[String] = if (value == NoScalaVersion) Nil else List(s"${name}:${value.unparse}")
+
+ withHelpSyntax(s"${name}:<${arg}>")
+ }
+
class PathSetting private[nsc](
name: String,
descr: String,
diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
index b820d10ddc..0a98d45cac 100644
--- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
@@ -85,8 +85,7 @@ trait ScalaSettings extends AbsScalaSettings
val logFreeTerms = BooleanSetting ("-Xlog-free-terms", "Print a message when reification creates a free term.")
val logFreeTypes = BooleanSetting ("-Xlog-free-types", "Print a message when reification resorts to generating a free type.")
val maxClassfileName = IntSetting ("-Xmax-classfile-name", "Maximum filename length for generated classes", 255, Some((72, 255)), _ => None)
- val Xmigration28 = BooleanSetting ("-Xmigration", "Warn about constructs whose behavior may have changed between 2.7 and 2.8.").
- withDeprecationMessage("This setting is no longer useful and will be removed. Please remove it from your build.")
+ val Xmigration = ScalaVersionSetting("-Xmigration", "version", "Warn about constructs whose behavior may have changed since version.", AnyScalaVersion)
val nouescape = BooleanSetting ("-Xno-uescape", "Disable handling of \\u unicode escapes.")
val Xnojline = BooleanSetting ("-Xnojline", "Do not use JLine for editing.")
val Xverify = BooleanSetting ("-Xverify", "Verify generic signatures in generated bytecode (asm backend only.)")
@@ -169,6 +168,7 @@ trait ScalaSettings extends AbsScalaSettings
val Yrangepos = BooleanSetting ("-Yrangepos", "Use range positions for syntax trees.")
val Ybuilderdebug = ChoiceSetting ("-Ybuilder-debug", "manager", "Compile using the specified build manager.", List("none", "refined", "simple"), "none")
val Yreifycopypaste = BooleanSetting ("-Yreify-copypaste", "Dump the reified trees in copypasteable representation.")
+ val Ymacronoexpand = BooleanSetting ("-Ymacro-no-expand", "Don't expand macros. Might be useful for scaladoc and presentation compiler, but will crash anything which uses macros and gets past typer.")
val Yreplsync = BooleanSetting ("-Yrepl-sync", "Do not use asynchronous code for repl startup")
val Ynotnull = BooleanSetting ("-Ynotnull", "Enable (experimental and incomplete) scala.NotNull.")
val YmethodInfer = BooleanSetting ("-Yinfer-argument-types", "Infer types for arguments of overriden methods.")
diff --git a/src/compiler/scala/tools/nsc/settings/ScalaVersion.scala b/src/compiler/scala/tools/nsc/settings/ScalaVersion.scala
new file mode 100644
index 0000000000..d6a0149411
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/settings/ScalaVersion.scala
@@ -0,0 +1,194 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author James Iry
+ */
+// $Id$
+
+package scala.tools.nsc.settings
+
+/**
+ * Represents a single Scala version in a manner that
+ * supports easy comparison and sorting.
+ */
+abstract class ScalaVersion extends Ordered[ScalaVersion] {
+ def unparse: String
+}
+
+/**
+ * A scala version that sorts higher than all actual versions
+ */
+case object NoScalaVersion extends ScalaVersion {
+ def unparse = "none"
+
+ def compare(that: ScalaVersion): Int = that match {
+ case NoScalaVersion => 0
+ case _ => 1
+ }
+}
+
+/**
+ * A specific Scala version, not one of the magic min/max versions. An SpecificScalaVersion
+ * may or may not be a released version - i.e. this same class is used to represent
+ * final, release candidate, milestone, and development builds. The build argument is used
+ * to segregate builds
+ */
+case class SpecificScalaVersion(major: Int, minor: Int, rev: Int, build: ScalaBuild) extends ScalaVersion {
+ def unparse = s"${major}.${minor}.${rev}.${build.unparse}"
+
+ def compare(that: ScalaVersion): Int = that match {
+ case SpecificScalaVersion(thatMajor, thatMinor, thatRev, thatBuild) =>
+ // this could be done more cleanly by importing scala.math.Ordering.Implicits, but we have to do these
+ // comparisons a lot so I'm using brute force direct style code
+ if (major < thatMajor) -1
+ else if (major > thatMajor) 1
+ else if (minor < thatMinor) -1
+ else if (minor > thatMinor) 1
+ else if (rev < thatRev) -1
+ else if (rev > thatRev) 1
+ else build compare thatBuild
+ case AnyScalaVersion => 1
+ case NoScalaVersion => -1
+ }
+}
+
+/**
+ * A Scala version that sorts lower than all actual versions
+ */
+case object AnyScalaVersion extends ScalaVersion {
+ def unparse = "any"
+
+ def compare(that: ScalaVersion): Int = that match {
+ case AnyScalaVersion => 0
+ case _ => -1
+ }
+}
+
+/**
+ * Factory methods for producing ScalaVersions
+ */
+object ScalaVersion {
+ private val dot = "\\."
+ private val dash = "\\-"
+ private def not(s:String) = s"[^${s}]"
+ private val R = s"((${not(dot)}*)(${dot}(${not(dot)}*)(${dot}(${not(dash)}*)(${dash}(.*))?)?)?)".r
+
+ def apply(versionString : String, errorHandler: String => Unit): ScalaVersion = {
+ def errorAndValue() = {
+ errorHandler(
+ s"There was a problem parsing ${versionString}. " +
+ "Versions should be in the form major[.minor[.revision]] " +
+ "where each part is a positive number, as in 2.10.1. " +
+ "The minor and revision parts are optional."
+ )
+ AnyScalaVersion
+ }
+
+ def toInt(s: String) = s match {
+ case null | "" => 0
+ case _ => s.toInt
+ }
+
+ def isInt(s: String) = util.Try(toInt(s)).isSuccess
+
+ def toBuild(s: String) = s match {
+ case null | "FINAL" => Final
+ case s if (s.toUpperCase.startsWith("RC") && isInt(s.substring(2))) => RC(toInt(s.substring(2)))
+ case s if (s.toUpperCase.startsWith("M") && isInt(s.substring(1))) => Milestone(toInt(s.substring(1)))
+ case _ => Development(s)
+ }
+
+ try versionString match {
+ case "none" => NoScalaVersion
+ case "any" => AnyScalaVersion
+ case R(_, majorS, _, minorS, _, revS, _, buildS) =>
+ SpecificScalaVersion(toInt(majorS), toInt(minorS), toInt(revS), toBuild(buildS))
+ case _ =>
+ errorAndValue()
+ } catch {
+ case e: NumberFormatException => errorAndValue()
+ }
+ }
+
+ def apply(versionString: String): ScalaVersion =
+ apply(versionString, msg => throw new NumberFormatException(msg))
+
+ /**
+ * The version of the compiler running now
+ */
+ val current = apply(util.Properties.versionNumberString)
+
+ /**
+ * The 2.8.0 version.
+ */
+ val twoDotEight = SpecificScalaVersion(2, 8, 0, Final)
+}
+
+/**
+ * Represents the data after the dash in major.minor.rev-build
+ */
+abstract class ScalaBuild extends Ordered[ScalaBuild] {
+ /**
+ * Return a version of this build information that can be parsed back into the
+ * same ScalaBuild
+ */
+ def unparse: String
+}
+/**
+ * A development, test, nightly, snapshot or other "unofficial" build
+ */
+case class Development(id: String) extends ScalaBuild {
+ def unparse = s"-${id}"
+
+ def compare(that: ScalaBuild) = that match {
+ // sorting two development builds based on id is reasonably valid for two versions created with the same schema
+ // otherwise it's not correct, but since it's impossible to put a total ordering on development build versions
+ // this is a pragmatic compromise
+ case Development(thatId) => id compare thatId
+ // assume a development build is newer than anything else, that's not really true, but good luck
+ // mapping development build versions to other build types
+ case _ => 1
+ }
+}
+/**
+ * A final final
+ */
+case object Final extends ScalaBuild {
+ def unparse = ""
+
+ def compare(that: ScalaBuild) = that match {
+ case Final => 0
+ // a final is newer than anything other than a development build or another final
+ case Development(_) => -1
+ case _ => 1
+ }
+}
+
+/**
+ * A candidate for final release
+ */
+case class RC(n: Int) extends ScalaBuild {
+ def unparse = s"-RC${n}"
+
+ def compare(that: ScalaBuild) = that match {
+ // compare two rcs based on their RC numbers
+ case RC(thatN) => n - thatN
+ // an rc is older than anything other than a milestone or another rc
+ case Milestone(_) => 1
+ case _ => -1
+ }
+}
+
+/**
+ * An intermediate release
+ */
+case class Milestone(n: Int) extends ScalaBuild {
+ def unparse = s"-M${n}"
+
+ def compare(that: ScalaBuild) = that match {
+ // compare two milestones based on their milestone numbers
+ case Milestone(thatN) => n - thatN
+ // a milestone is older than anything other than another milestone
+ case _ => -1
+
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
index a708a262e7..4b1d3c34f3 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
@@ -1043,7 +1043,13 @@ abstract class ClassfileParser {
val nClasses = in.nextChar
for (n <- 0 until nClasses) {
val cls = pool.getClassSymbol(in.nextChar.toInt)
- sym.addAnnotation(definitions.ThrowsClass, Literal(Constant(cls.tpe)))
+ val tp = if (cls.isMonomorphicType) cls.tpe else {
+ debuglog(s"Encountered polymorphic exception `${cls.fullName}` while parsing class file.")
+ // in case we encounter polymorphic exception the best we can do is to convert that type to
+ // monomorphic one by introducing existientals, see SI-7009 for details
+ typer.packSymbols(cls.typeParams, cls.tpe)
+ }
+ sym.addAnnotation(appliedType(definitions.ThrowsClass, tp), Literal(Constant(tp)))
}
}
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
index 25b7813646..e8b0cd2696 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
@@ -71,8 +71,8 @@ abstract class Pickler extends SubComponent {
if (!t.isDef && t.hasSymbol && t.symbol.isTermMacro) {
unit.error(t.pos, t.symbol.typeParams.length match {
case 0 => "macro has not been expanded"
- case 1 => "type parameter not specified"
- case _ => "type parameters not specified"
+ case 1 => "this type parameter must be specified"
+ case _ => "these type parameters must be specified"
})
return
}
diff --git a/src/compiler/scala/tools/nsc/transform/CleanUp.scala b/src/compiler/scala/tools/nsc/transform/CleanUp.scala
index 44510ab0c2..7a0b034fd0 100644
--- a/src/compiler/scala/tools/nsc/transform/CleanUp.scala
+++ b/src/compiler/scala/tools/nsc/transform/CleanUp.scala
@@ -450,19 +450,31 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
* is a value type (int et al.) in which case it must cast to the boxed version
* because invoke only returns object and erasure made sure the result is
* expected to be an AnyRef. */
- val t: Tree = ad.symbol.tpe match {
- case MethodType(mparams, resType) =>
- assert(params.length == mparams.length, mparams)
-
- typedPos {
- val sym = currentOwner.newValue(mkTerm("qual"), ad.pos) setInfo qual0.tpe
- qual = REF(sym)
+ val t: Tree = {
+ val (mparams, resType) = ad.symbol.tpe match {
+ case MethodType(mparams, resType) =>
+ assert(params.length == mparams.length, ((params, mparams)))
+ (mparams, resType)
+ case tpe @ OverloadedType(pre, alts) =>
+ unit.warning(ad.pos, s"Overloaded type reached the backend! This is a bug in scalac.\n Symbol: ${ad.symbol}\n Overloads: $tpe\n Arguments: " + ad.args.map(_.tpe))
+ alts filter (_.paramss.flatten.size == params.length) map (_.tpe) match {
+ case mt @ MethodType(mparams, resType) :: Nil =>
+ unit.warning(NoPosition, "Only one overload has the right arity, proceeding with overload " + mt)
+ (mparams, resType)
+ case _ =>
+ unit.error(ad.pos, "Cannot resolve overload.")
+ (Nil, NoType)
+ }
+ }
+ typedPos {
+ val sym = currentOwner.newValue(mkTerm("qual"), ad.pos) setInfo qual0.tpe
+ qual = REF(sym)
- BLOCK(
- VAL(sym) === qual0,
- callAsReflective(mparams map (_.tpe), resType)
- )
- }
+ BLOCK(
+ VAL(sym) === qual0,
+ callAsReflective(mparams map (_.tpe), resType)
+ )
+ }
}
/* For testing purposes, the dynamic application's condition
diff --git a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
index 1003d417f6..78c120c1ad 100644
--- a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
+++ b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
@@ -10,6 +10,7 @@ import symtab._
import Flags.{ CASE => _, _ }
import scala.collection.mutable.ListBuffer
import matching.{ Patterns, ParallelMatching }
+import scala.tools.nsc.settings.ScalaVersion
/** This class ...
*
@@ -553,7 +554,7 @@ abstract class ExplicitOuter extends InfoTransform
}
case _ =>
- if (settings.Xmigration28.value) tree match {
+ if (settings.Xmigration.value < ScalaVersion.twoDotEight) tree match {
case TypeApply(fn @ Select(qual, _), args) if fn.symbol == Object_isInstanceOf || fn.symbol == Any_isInstanceOf =>
if (isArraySeqTest(qual.tpe, args.head.tpe))
unit.warning(tree.pos, "An Array will no longer match as Seq[_].")
diff --git a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala
index 39e16c3f58..bc54054028 100644
--- a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala
+++ b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala
@@ -75,36 +75,58 @@ abstract class ExtensionMethods extends Transform with TypingTransformers {
val candidates = extensionNames(imeth) map (companionInfo.decl(_)) filter (_.exists)
val matching = candidates filter (alt => normalize(alt.tpe, imeth.owner) matches imeth.tpe)
assert(matching.nonEmpty,
- s"no extension method found for $imeth:${imeth.tpe} among ${candidates.map(c => c.name+":"+c.tpe).toList} / ${extensionNames(imeth).toList}")
+ sm"""|no extension method found for:
+ |
+ | $imeth:${imeth.tpe}
+ |
+ | Candidates:
+ |
+ | ${candidates.map(c => c.name+":"+c.tpe).mkString("\n")}
+ |
+ | Candidates (signatures normalized):
+ |
+ | ${candidates.map(c => c.name+":"+normalize(c.tpe, imeth.owner)).mkString("\n")}
+ |
+ | Eligible Names: ${extensionNames(imeth).mkString(",")}"""")
matching.head
}
+ /** Recognize a MethodType which represents an extension method.
+ *
+ * It may have a curried parameter list with the `$this` alone in the first
+ * parameter list, in which case that parameter list is dropped. Or, since
+ * the curried lists disappear during uncurry, it may have a single parameter
+ * list with `$this` as the first parameter, in which case that parameter is
+ * removed from the list.
+ */
+ object ExtensionMethodType {
+ def unapply(tp: Type) = tp match {
+ case MethodType(thiz :: rest, restpe) if thiz.name == nme.SELF =>
+ Some((thiz, if (rest.isEmpty) restpe else MethodType(rest, restpe) ))
+ case _ =>
+ None
+ }
+ }
+
/** This method removes the `$this` argument from the parameter list a method.
*
* A method may be a `PolyType`, in which case we tear out the `$this` and the class
- * type params from its nested `MethodType`.
- * It may be a `MethodType`, either with a curried parameter list in which the first argument
- * is a `$this` - we just return the rest of the list.
- * This means that the corresponding symbol was generated during `extmethods`.
- *
- * It may also be a `MethodType` in which the `$this` does not appear in a curried parameter list.
- * The curried lists disappear during `uncurry`, and the methods may be duplicated afterwards,
- * for instance, during `specialize`.
- * In this case, the first argument is `$this` and we just get rid of it.
+ * type params from its nested `MethodType`. Or it may be a MethodType, as
+ * described at the ExtensionMethodType extractor.
*/
private def normalize(stpe: Type, clazz: Symbol): Type = stpe match {
case PolyType(tparams, restpe) =>
- GenPolyType(tparams dropRight clazz.typeParams.length, normalize(restpe.substSym(tparams takeRight clazz.typeParams.length, clazz.typeParams), clazz))
- case MethodType(List(thiz), restpe) if thiz.name == nme.SELF =>
- restpe
- case MethodType(tparams, restpe) =>
- MethodType(tparams.drop(1), restpe)
+ // method type parameters, class type parameters
+ val (mtparams, ctparams) = tparams splitAt (tparams.length - clazz.typeParams.length)
+ GenPolyType(mtparams,
+ normalize(restpe.substSym(ctparams, clazz.typeParams), clazz))
+ case ExtensionMethodType(thiz, etpe) =>
+ etpe.substituteTypes(thiz :: Nil, clazz.thisType :: Nil)
case _ =>
stpe
}
class Extender(unit: CompilationUnit) extends TypingTransformer(unit) {
-
private val extensionDefs = mutable.Map[Symbol, mutable.ListBuffer[Tree]]()
def checkNonCyclic(pos: Position, seen: Set[Symbol], clazz: Symbol): Unit =
@@ -115,27 +137,54 @@ abstract class ExtensionMethods extends Transform with TypingTransformers {
if (unboxed.isDerivedValueClass) checkNonCyclic(pos, seen + clazz, unboxed)
}
+ /** We will need to clone the info of the original method (which obtains clones
+ * of the method type parameters), clone the type parameters of the value class,
+ * and create a new polymethod with the union of all those type parameters, with
+ * their infos adjusted to be consistent with their new home. Example:
+ *
+ * class Foo[+A <: AnyRef](val xs: List[A]) extends AnyVal {
+ * def baz[B >: A](x: B): List[B] = x :: xs
+ * // baz has to be transformed into this extension method, where
+ * // A is cloned from class Foo and B is cloned from method baz:
+ * // def extension$baz[B >: A <: Any, A >: Nothing <: AnyRef]($this: Foo[A])(x: B): List[B]
+ * }
+ *
+ * TODO: factor out the logic for consolidating type parameters from a class
+ * and a method for re-use elsewhere, because nobody will get this right without
+ * some higher level facilities.
+ */
def extensionMethInfo(extensionMeth: Symbol, origInfo: Type, clazz: Symbol): Type = {
- // No variance for method type parameters
- var newTypeParams = cloneSymbolsAtOwner(clazz.typeParams, extensionMeth) map (_ resetFlag COVARIANT | CONTRAVARIANT)
- val thisParamType = appliedType(clazz.typeConstructor, newTypeParams map (_.tpeHK))
+ val GenPolyType(tparamsFromMethod, methodResult) = origInfo cloneInfo extensionMeth
+ // Start with the class type parameters - clones will be method type parameters
+ // so must drop their variance.
+ val tparamsFromClass = cloneSymbolsAtOwner(clazz.typeParams, extensionMeth) map (_ resetFlag COVARIANT | CONTRAVARIANT)
+
+ val thisParamType = appliedType(clazz, tparamsFromClass map (_.tpeHK): _*)
val thisParam = extensionMeth.newValueParameter(nme.SELF, extensionMeth.pos) setInfo thisParamType
- def transform(clonedType: Type): Type = clonedType match {
- case MethodType(params, restpe) =>
- // I assume it was a bug that this was dropping params... [Martin]: No, it wasn't; it's curried.
- MethodType(List(thisParam), clonedType)
- case NullaryMethodType(restpe) =>
- MethodType(List(thisParam), restpe)
- }
- val GenPolyType(tparams, restpe) = origInfo cloneInfo extensionMeth
- GenPolyType(tparams ::: newTypeParams, transform(restpe) substSym (clazz.typeParams, newTypeParams))
- }
+ val resultType = MethodType(List(thisParam), dropNullaryMethod(methodResult))
+ val selfParamType = singleType(currentOwner.companionModule.thisType, thisParam)
- private def allParams(tpe: Type): List[Symbol] = tpe match {
- case MethodType(params, res) => params ::: allParams(res)
- case _ => List()
- }
+ def fixres(tp: Type) = tp substThisAndSym (clazz, selfParamType, clazz.typeParams, tparamsFromClass)
+ def fixtparam(tp: Type) = tp substSym (clazz.typeParams, tparamsFromClass)
+
+ // We can't substitute symbols on the entire polytype because we
+ // need to modify the bounds of the cloned type parameters, but we
+ // don't want to substitute for the cloned type parameters themselves.
+ val tparams = tparamsFromMethod ::: tparamsFromClass
+ GenPolyType(tparams map (_ modifyInfo fixtparam), fixres(resultType))
+ // For reference, calling fix on the GenPolyType plays out like this:
+ // error: scala.reflect.internal.Types$TypeError: type arguments [B#7344,A#6966]
+ // do not conform to method extension$baz#16148's type parameter bounds
+ //
+ // And the difference is visible here. See how B is bounded from below by A#16149
+ // in both cases, but in the failing case, the other type parameter has turned into
+ // a different A. (What is that A? It is a clone of the original A created in
+ // SubstMap during the call to substSym, but I am not clear on all the particulars.)
+ //
+ // bad: [B#16154 >: A#16149, A#16155 <: AnyRef#2189]($this#16156: Foo#6965[A#16155])(x#16157: B#16154)List#2457[B#16154]
+ // good: [B#16151 >: A#16149, A#16149 <: AnyRef#2189]($this#16150: Foo#6965[A#16149])(x#16153: B#16151)List#2457[B#16151]
+ }
override def transform(tree: Tree): Tree = {
tree match {
case Template(_, _, _) =>
@@ -144,42 +193,62 @@ abstract class ExtensionMethods extends Transform with TypingTransformers {
wrap over other value classes anyway.
checkNonCyclic(currentOwner.pos, Set(), currentOwner) */
extensionDefs(currentOwner.companionModule) = new mutable.ListBuffer[Tree]
+ currentOwner.primaryConstructor.makeNotPrivate(NoSymbol)
super.transform(tree)
} else if (currentOwner.isStaticOwner) {
super.transform(tree)
} else tree
case DefDef(_, _, tparams, vparamss, _, rhs) if tree.symbol.isMethodWithExtension =>
- val companion = currentOwner.companionModule
- val origMeth = tree.symbol
- val extensionName = extensionNames(origMeth).head
- val extensionMeth = companion.moduleClass.newMethod(extensionName, origMeth.pos, origMeth.flags & ~OVERRIDE & ~PROTECTED | FINAL)
- .setAnnotations(origMeth.annotations)
- companion.info.decls.enter(extensionMeth)
- val newInfo = extensionMethInfo(extensionMeth, origMeth.info, currentOwner)
+ val origMeth = tree.symbol
+ val origThis = currentOwner
+ val origTpeParams = tparams.map(_.symbol) ::: origThis.typeParams // method type params ++ class type params
+ val origParams = vparamss.flatten map (_.symbol)
+ val companion = origThis.companionModule
+
+ def makeExtensionMethodSymbol = {
+ val extensionName = extensionNames(origMeth).head
+ val extensionMeth = (
+ companion.moduleClass.newMethod(extensionName, origMeth.pos, origMeth.flags & ~OVERRIDE & ~PROTECTED | FINAL)
+ setAnnotations origMeth.annotations
+ )
+ companion.info.decls.enter(extensionMeth)
+ }
+
+ val extensionMeth = makeExtensionMethodSymbol
+ val newInfo = extensionMethInfo(extensionMeth, origMeth.info, origThis)
extensionMeth setInfo newInfo
- log("Value class %s spawns extension method.\n Old: %s\n New: %s".format(
- currentOwner,
- origMeth.defString,
- extensionMeth.defString)) // extensionMeth.defStringSeenAs(origInfo
-
- def thisParamRef = gen.mkAttributedIdent(extensionMeth.info.params.head setPos extensionMeth.pos)
- val GenPolyType(extensionTpeParams, extensionMono) = extensionMeth.info
- val origTpeParams = (tparams map (_.symbol)) ::: currentOwner.typeParams
- val extensionBody = rhs
+
+ log(s"Value class $origThis spawns extension method.\n Old: ${origMeth.defString}\n New: ${extensionMeth.defString}")
+
+ val GenPolyType(extensionTpeParams, MethodType(thiz :: Nil, extensionMono)) = newInfo
+ val extensionParams = allParameters(extensionMono)
+ val extensionThis = gen.mkAttributedStableRef(thiz setPos extensionMeth.pos)
+
+ val extensionBody = (
+ rhs
.substituteSymbols(origTpeParams, extensionTpeParams)
- .substituteSymbols(vparamss.flatten map (_.symbol), allParams(extensionMono).tail)
- .substituteThis(currentOwner, thisParamRef)
- .changeOwner((origMeth, extensionMeth))
- extensionDefs(companion) += atPos(tree.pos) { DefDef(extensionMeth, extensionBody) }
- val extensionCallPrefix = Apply(
- gen.mkTypeApply(gen.mkAttributedRef(companion), extensionMeth, origTpeParams map (_.tpeHK)),
- List(This(currentOwner)))
- val extensionCall = atOwner(origMeth) {
- localTyper.typedPos(rhs.pos) {
- gen.mkForwarder(extensionCallPrefix, mmap(vparamss)(_.symbol))
- }
- }
- deriveDefDef(tree)(_ => extensionCall)
+ .substituteSymbols(origParams, extensionParams)
+ .substituteThis(origThis, extensionThis)
+ .changeOwner(origMeth -> extensionMeth)
+ )
+
+ // Record the extension method ( FIXME: because... ? )
+ extensionDefs(companion) += atPos(tree.pos)(DefDef(extensionMeth, extensionBody))
+
+ // These three lines are assembling Foo.bar$extension[T1, T2, ...]($this)
+ // which leaves the actual argument application for extensionCall.
+ val sel = Select(gen.mkAttributedRef(companion), extensionMeth)
+ val targs = origTpeParams map (_.tpeHK)
+ val callPrefix = gen.mkMethodCall(sel, targs, This(origThis) :: Nil)
+
+ // Apply all the argument lists.
+ deriveDefDef(tree)(_ =>
+ atOwner(origMeth)(
+ localTyper.typedPos(rhs.pos)(
+ gen.mkForwarder(callPrefix, mmap(vparamss)(_.symbol))
+ )
+ )
+ )
case _ =>
super.transform(tree)
}
diff --git a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
index 448079abed..845843e9d6 100644
--- a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
+++ b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
@@ -451,20 +451,45 @@ abstract class LambdaLift extends InfoTransform {
}
case arg => arg
}
- /** Wrap expr argument in new *Ref(..) constructor, but make
- * sure that Try expressions stay at toplevel.
+
+ /** Wrap expr argument in new *Ref(..) constructor. But try/catch
+ * is a problem because a throw will clear the stack and post catch
+ * we would expect the partially-constructed object to be on the stack
+ * for the call to init. So we recursively
+ * search for "leaf" result expressions where we know its safe
+ * to put the new *Ref(..) constructor or, if all else fails, transform
+ * an expr to { val temp=expr; new *Ref(temp) }.
+ * The reason we narrowly look for try/catch in captured var definitions
+ * is because other try/catch expression have already been lifted
+ * see SI-6863
*/
- def refConstr(expr: Tree): Tree = expr match {
+ def refConstr(expr: Tree): Tree = typer.typedPos(expr.pos) {expr match {
+ // very simple expressions can be wrapped in a new *Ref(expr) because they can't have
+ // a try/catch in final expression position.
+ case Ident(_) | Apply(_, _) | Literal(_) | New(_) | Select(_, _) | Throw(_) | Assign(_, _) | ValDef(_, _, _, _) | Return(_) | EmptyTree =>
+ New(sym.tpe, expr)
case Try(block, catches, finalizer) =>
Try(refConstr(block), catches map refConstrCase, finalizer)
+ case Block(stats, expr) =>
+ Block(stats, refConstr(expr))
+ case If(cond, trueBranch, falseBranch) =>
+ If(cond, refConstr(trueBranch), refConstr(falseBranch))
+ case Match(selector, cases) =>
+ Match(selector, cases map refConstrCase)
+ // if we can't figure out what else to do, turn expr into {val temp1 = expr; new *Ref(temp1)} to avoid
+ // any possibility of try/catch in the *Ref constructor. This should be a safe tranformation as a default
+ // though it potentially wastes a variable slot. In particular this case handles LabelDefs.
case _ =>
- New(sym.tpe, expr)
- }
+ debuglog("assigning expr to temp: " + (expr.pos))
+ val tempSym = currentOwner.newValue(unit.freshTermName("temp"), expr.pos) setInfo expr.tpe
+ val tempDef = ValDef(tempSym, expr) setPos expr.pos
+ val tempRef = Ident(tempSym) setPos expr.pos
+ Block(tempDef, New(sym.tpe, tempRef))
+ }}
def refConstrCase(cdef: CaseDef): CaseDef =
CaseDef(cdef.pat, cdef.guard, refConstr(cdef.body))
- treeCopy.ValDef(tree, mods, name, tpt1, typer.typedPos(rhs.pos) {
- refConstr(constructorArg)
- })
+
+ treeCopy.ValDef(tree, mods, name, tpt1, refConstr(constructorArg))
} else tree
case Return(Block(stats, value)) =>
Block(stats, treeCopy.Return(tree, value)) setType tree.tpe setPos tree.pos
diff --git a/src/compiler/scala/tools/nsc/transform/Mixin.scala b/src/compiler/scala/tools/nsc/transform/Mixin.scala
index 3cd943aa74..c9c68d080d 100644
--- a/src/compiler/scala/tools/nsc/transform/Mixin.scala
+++ b/src/compiler/scala/tools/nsc/transform/Mixin.scala
@@ -867,7 +867,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
rhs match {
case Block(List(assign), returnTree) =>
val Assign(moduleVarRef, _) = assign
- val cond = Apply(Select(moduleVarRef, nme.eq), List(NULL))
+ val cond = Apply(Select(moduleVarRef, Object_eq), List(NULL))
mkFastPathBody(clazz, moduleSym, cond, List(assign), List(NULL), returnTree, attrThis, args)
case _ =>
abort("Invalid getter " + rhs + " for module in class " + clazz)
diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
index 64051b56ec..232148676c 100644
--- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
+++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
@@ -178,6 +178,14 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
case class Overload(sym: Symbol, env: TypeEnv) {
override def toString = "specialized overload " + sym + " in " + env
+ def matchesSym(other: Symbol) = sym.tpe =:= other.tpe
+ def matchesEnv(env1: TypeEnv) = TypeEnv.includes(env, env1)
+ }
+ private def newOverload(method: Symbol, specializedMethod: Symbol, env: TypeEnv) = {
+ assert(!specializedMethod.isOverloaded, specializedMethod.defString)
+ val om = Overload(specializedMethod, env)
+ overloads(method) ::= om
+ om
}
/** Just to mark uncheckable */
@@ -289,10 +297,6 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
}
- /** Return the specialized overload of sym in the given env, if any. */
- def overload(sym: Symbol, env: TypeEnv) =
- overloads(sym).find(ov => TypeEnv.includes(ov.env, env))
-
/** Return the specialized name of 'sym' in the given environment. It
* guarantees the same result regardless of the map order by sorting
* type variables alphabetically.
@@ -628,7 +632,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
info(om) = if (original.isDeferred) Forward(original) else Implementation(original)
typeEnv(om) = env ++ typeEnv(m) // add the environment for any method tparams
- overloads(specMember) ::= Overload(om, typeEnv(om))
+ newOverload(specMember, om, typeEnv(om))
enterMember(om)
}
@@ -835,7 +839,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
debuglog("%s expands to %s in %s".format(sym, specMember.name.decode, pp(env)))
info(specMember) = NormalizedMember(sym)
- overloads(sym) ::= Overload(specMember, env)
+ newOverload(sym, specMember, env)
owner.info.decls.enter(specMember)
specMember
}
@@ -877,9 +881,8 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
if (wasSpec.nonEmpty)
debuglog("specialized overload for %s in %s".format(specMember, pp(typeEnv(specMember))))
- overloads(sym) ::= Overload(specMember, spec)
+ newOverload(sym, specMember, spec)
info(specMember) = SpecialOverload(sym, typeEnv(specMember))
-
specMember
}
@@ -994,7 +997,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
SpecialOverride(impl)
}
)
- overloads(overriding) ::= Overload(om, env)
+ newOverload(overriding, om, env)
ifDebug(afterSpecialize(assert(
overridden.owner.info.decl(om.name) != NoSymbol,
"Could not find " + om.name + " in " + overridden.owner.info.decls))
@@ -1476,54 +1479,41 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
transformTypeApply
- case Select(qual, name) =>
- def transformSelect = {
- qual match {
- case _: Super if illegalSpecializedInheritance(currentClass) =>
- val pos = tree.pos
- debuglog(pos.source.file.name+":"+pos.line+": not specializing call to super inside illegal specialized inheritance class.")
- debuglog(pos.lineContent)
- tree
- case _ =>
+ case Select(Super(_, _), _) if illegalSpecializedInheritance(currentClass) =>
+ val pos = tree.pos
+ debuglog(pos.source.file.name+":"+pos.line+": not specializing call to super inside illegal specialized inheritance class.\n" + pos.lineContent)
+ tree
+ case Select(qual, name) if name != nme.CONSTRUCTOR && specializedTypeVars(symbol.info).nonEmpty =>
debuglog("specializing Select %s [tree.tpe: %s]".format(symbol.defString, tree.tpe))
-
- //log("!!! select " + tree + " -> " + symbol.info + " specTypeVars: " + specializedTypeVars(symbol.info))
- if (specializedTypeVars(symbol.info).nonEmpty && name != nme.CONSTRUCTOR) {
- // log("!!! unifying " + (symbol, symbol.tpe) + " and " + (tree, tree.tpe))
- val env = unify(symbol.tpe, tree.tpe, emptyEnv, false)
- // log("!!! found env: " + env + "; overloads: " + overloads(symbol))
- if (!env.isEmpty) {
- // debuglog("checking for rerouting: " + tree + " with sym.tpe: " + symbol.tpe + " tree.tpe: " + tree.tpe + " env: " + env)
- val specMember = overload(symbol, env)
- if (specMember.isDefined) {
- localTyper.typedOperator(atPos(tree.pos)(Select(transform(qual), specMember.get.sym.name)))
- }
- else {
- val qual1 = transform(qual)
+ val env = unify(symbol.tpe, tree.tpe, emptyEnv, false)
+ if (env.isEmpty) super.transform(tree)
+ else {
+ val qual1 = transform(qual)
+ def reselect(member: Symbol) = {
+ val newSelect = atPos(tree.pos)(Select(qual1, member))
+ if (member.isMethod) localTyper typedOperator newSelect
+ else localTyper typed newSelect
+ }
+ overloads(symbol) find (_ matchesEnv env) match {
+ case Some(Overload(member, _)) => reselect(member)
+ case _ =>
val specMember = qual1.tpe.member(specializedName(symbol, env)).suchThat(_.tpe matches subst(env, symbol.tpe))
- if (specMember ne NoSymbol) {
- val tree1 = atPos(tree.pos)(Select(qual1, specMember))
- if (specMember.isMethod)
- localTyper.typedOperator(tree1)
- else
- localTyper.typed(tree1)
- } else
+ if (specMember ne NoSymbol)
+ reselect(specMember)
+ else
treeCopy.Select(tree, qual1, name)
- }
- } else
- super.transform(tree)
- } else overloads(symbol).find(_.sym.info =:= symbol.info) match {
- case Some(specMember) =>
- val qual1 = transform(qual)
- debuglog("** routing " + tree + " to " + specMember.sym.fullName + " tree: " + Select(qual1, specMember.sym))
- localTyper.typedOperator(atPos(tree.pos)(Select(qual1, specMember.sym)))
- case None =>
- super.transform(tree)
- }
+ }
}
+ case Select(qual, _) =>
+ overloads(symbol) find (_ matchesSym symbol) match {
+ case Some(Overload(member, _)) =>
+ val newTree = Select(transform(qual), member)
+ debuglog(s"** routing $tree to ${member.fullName} tree: $newTree")
+ localTyper.typedOperator(atPos(tree.pos)(newTree))
+ case None =>
+ super.transform(tree)
}
- transformSelect
case PackageDef(pid, stats) =>
tree.symbol.info // make sure specializations have been performed
diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
index e05df09aaf..965063a724 100644
--- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala
+++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
@@ -231,7 +231,17 @@ abstract class UnCurry extends InfoTransform
* If `settings.XoldPatmat.value`, also synthesized AbstractPartialFunction subclasses (see synthPartialFunction).
*
*/
- def transformFunction(fun: Function): Tree =
+ def transformFunction(fun: Function): Tree = {
+ fun.tpe match {
+ // can happen when analyzer plugins assign refined types to functions, e.g.
+ // (() => Int) { def apply(): Int @typeConstraint }
+ case RefinedType(List(funTp), decls) =>
+ debuglog(s"eliminate refinement from function type ${fun.tpe}")
+ fun.tpe = funTp
+ case _ =>
+ ()
+ }
+
deEta(fun) match {
// nullary or parameterless
case fun1 if fun1 ne fun => fun1
@@ -239,10 +249,7 @@ abstract class UnCurry extends InfoTransform
// only get here when running under -Xoldpatmat
synthPartialFunction(fun)
case _ =>
- val parents = (
- if (isFunctionType(fun.tpe)) addSerializable(abstractFunctionForFunctionType(fun.tpe))
- else addSerializable(ObjectClass.tpe, fun.tpe)
- )
+ val parents = addSerializable(abstractFunctionForFunctionType(fun.tpe))
val anonClass = fun.symbol.owner newAnonymousFunctionClass(fun.pos, inConstructorFlag) addAnnotation serialVersionUIDAnnotation
anonClass setInfo ClassInfoType(parents, newScope, anonClass)
@@ -275,6 +282,7 @@ abstract class UnCurry extends InfoTransform
}
}
+ }
/** Transform a function node (x => body) of type PartialFunction[T, R] where
* body = expr match { case P_i if G_i => E_i }_i=1..n
@@ -603,8 +611,6 @@ abstract class UnCurry extends InfoTransform
}
case ValDef(_, _, _, rhs) =>
if (sym eq NoSymbol) throw new IllegalStateException("Encountered Valdef without symbol: "+ tree + " in "+ unit)
- // a local variable that is mutable and free somewhere later should be lifted
- // as lambda lifting (coming later) will wrap 'rhs' in an Ref object.
if (!sym.owner.isSourceMethod)
withNeedLift(true) { super.transform(tree) }
else
@@ -629,7 +635,7 @@ abstract class UnCurry extends InfoTransform
}
}
- case Assign(Select(_, _), _) =>
+ case Assign(_: RefTree, _) =>
withNeedLift(true) { super.transform(tree) }
case Assign(lhs, _) if lhs.symbol.owner != currentMethod || lhs.symbol.hasFlag(LAZY | ACCESSOR) =>
diff --git a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala
index 78175f393a..b50486306d 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala
@@ -25,6 +25,7 @@ trait Analyzer extends AnyRef
with TypeDiagnostics
with ContextErrors
with StdAttachments
+ with AnalyzerPlugins
{
val global : Global
import global._
diff --git a/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala b/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala
new file mode 100644
index 0000000000..28f620dbb5
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala
@@ -0,0 +1,225 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.tools.nsc
+package typechecker
+
+/**
+ * @author Lukas Rytz
+ * @version 1.0
+ */
+trait AnalyzerPlugins { self: Analyzer =>
+ import global._
+
+
+ trait AnalyzerPlugin {
+ /**
+ * Selectively activate this analyzer plugin, e.g. according to the compiler phase.
+ *
+ * Note that the current phase can differ from the global compiler phase (look for `enteringPhase`
+ * invocations in the compiler). For instance, lazy types created by the UnPickler are completed
+ * at the phase in which their symbol is created. Observations show that this can even be the
+ * parser phase. Since symbol completion can trigger subtyping, typing etc, your plugin might
+ * need to be active also in phases other than namer and typer.
+ *
+ * Typically, this method can be implemented as
+ *
+ * global.phase.id < global.currentRun.picklerPhase.id
+ */
+ def isActive(): Boolean = true
+
+ /**
+ * Let analyzer plugins change the expected type before type checking a tree.
+ */
+ def pluginsPt(pt: Type, typer: Typer, tree: Tree, mode: Int): Type = pt
+
+ /**
+ * Let analyzer plugins modify the type that has been computed for a tree.
+ *
+ * @param tpe The type inferred by the type checker, initially (for first plugin) `tree.tpe`
+ * @param typer The yper that type checked `tree`
+ * @param tree The type-checked tree
+ * @param mode Mode that was used for typing `tree`
+ * @param pt Expected type that was used for typing `tree`
+ */
+ def pluginsTyped(tpe: Type, typer: Typer, tree: Tree, mode: Int, pt: Type): Type = tpe
+
+ /**
+ * Let analyzer plugins change the types assigned to definitions. For definitions that have
+ * an annotated type, the assigned type is obtained by typing that type tree. Otherwise, the
+ * type is inferred by typing the definition's righthand side.
+ *
+ * In order to know if the type was inferred, you can query the `wasEmpty` field in the `tpt`
+ * TypeTree of the definition (for DefDef and ValDef).
+ *
+ * (*) If the type of a method or value is inferred, the type-checked tree is stored in the
+ * `analyzer.transformed` hash map, indexed by the definition's rhs tree.
+ *
+ * NOTE: Invoking the type checker can lead to cyclic reference errors. For instance, if this
+ * method is called from the type completer of a recursive method, type checking the mehtod
+ * rhs will invoke the same completer again. It might be possible to avoid this situation by
+ * assigning `tpe` to `defTree.symbol` (untested) - the final type computed by this method
+ * will then be assigned to the definition's symbol by monoTypeCompleter (in Namers).
+ *
+ * The hooks into `typeSig` allow analyzer plugins to add annotations to (or change the types
+ * of) definition symbols. This cannot not be achieved by using `pluginsTyped`: this method
+ * is only called during type checking, so changing the type of a symbol at this point is too
+ * late: references to the symbol might already be typed and therefore obtain the the original
+ * type assigned during naming.
+ *
+ * @param defTree is the definition for which the type was computed. The different cases are
+ * outlined below. Note that this type is untyped (for methods and values with inferred type,
+ * the typed rhs trees are available in analyzer.transformed).
+ *
+ * Case defTree: Template
+ * - tpe : A ClassInfoType for the template
+ * - typer: The typer for template members, i.e. expressions and definitions of defTree.body
+ * - pt : WildcardType
+ * - the class symbol is accessible through typer.context.owner
+ *
+ * Case defTree: ClassDef
+ * - tpe : A ClassInfoType, or a PolyType(params, ClassInfoType) for polymorphic classes.
+ * The class type is the one computed by templateSig, i.e. through the above case
+ * - typer: The typer for the class. Note that this typer has a different context than the
+ * typer for the template.
+ * - pt : WildcardType
+ *
+ * Case defTree: ModuleDef
+ * - tpe : A ClassInfoType computed by templateSig
+ * - typer: The typer for the module. context.owner of this typer is the module class symbol
+ * - pt : WildcardType
+ *
+ * Case defTree: DefDef
+ * - tpe : The type of the method (MethodType, PolyType or NullaryMethodType). (*)
+ * - typer: The typer the rhs of this method
+ * - pt : If tpt.isEmpty, either the result type from the overridden method, or WildcardType.
+ * Otherwise the type obtained from typing tpt.
+ * - Note that for constructors, pt is the class type which the constructor creates. To type
+ * check the rhs of the constructor however, the expected type has to be WildcardType (see
+ * Typers.typedDefDef)
+ *
+ * Case defTree: ValDef
+ * - tpe : The type of this value. (*)
+ * - typer: The typer for the rhs of this value
+ * - pt : If tpt.isEmpty, WildcardType. Otherwise the type obtained from typing tpt.
+ * - Note that pluginsTypeSig might be called multiple times for the same ValDef since it is
+ * used to compute the types of the accessor methods (see `pluginsTypeSigAccessor`)
+ *
+ * Case defTree: TypeDef
+ * - tpe : The type obtained from typing rhs (PolyType if the TypeDef defines a polymorphic type)
+ * - typer: The typer for the rhs of this type
+ * - pt : WildcardType
+ */
+ def pluginsTypeSig(tpe: Type, typer: Typer, defTree: Tree, pt: Type): Type = tpe
+
+ /**
+ * Modify the types of field accessors. The namer phase creates method types for getters and
+ * setters based on the type of the corresponding field.
+ *
+ * Note: in order to compute the method type of an accessor, the namer calls `typeSig` on the
+ * `ValDef` tree of the corresponding field. This implies that the `pluginsTypeSig` method
+ * is potentially called multiple times for the same ValDef tree.
+ *
+ * @param tpe The method type created by the namer for the accessor
+ * @param typer The typer for the ValDef (not for the rhs)
+ * @param tree The ValDef corresponding to the accessor
+ * @param sym The accessor method symbol (getter, setter, beanGetter or beanSetter)
+ */
+ def pluginsTypeSigAccessor(tpe: Type, typer: Typer, tree: ValDef, sym: Symbol): Type = tpe
+
+ /**
+ * Decide whether this analyzer plugin can adapt a tree that has an annotated type to the
+ * given type tp, taking into account the given mode (see method adapt in trait Typers).
+ */
+ def canAdaptAnnotations(tree: Tree, typer: Typer, mode: Int, pt: Type): Boolean = false
+
+ /**
+ * Adapt a tree that has an annotated type to the given type tp, taking into account the given
+ * mode (see method adapt in trait Typers).
+ *
+ * An implementation cannot rely on canAdaptAnnotations being called before. If the implementing
+ * class cannot do the adapting, it should return the tree unchanged.
+ */
+ def adaptAnnotations(tree: Tree, typer: Typer, mode: Int, pt: Type): Tree = tree
+
+ /**
+ * Modify the type of a return expression. By default, return expressions have type
+ * NothingClass.tpe.
+ *
+ * @param tpe The type of the return expression
+ * @param typer The typer that was used for typing the return tree
+ * @param tree The typed return expression tree
+ * @param pt The return type of the enclosing method
+ */
+ def pluginsTypedReturn(tpe: Type, typer: Typer, tree: Return, pt: Type): Type = tpe
+ }
+
+
+
+ /** A list of registered analyzer plugins */
+ private var analyzerPlugins: List[AnalyzerPlugin] = Nil
+
+ /** Registers a new analyzer plugin */
+ def addAnalyzerPlugin(plugin: AnalyzerPlugin) {
+ if (!analyzerPlugins.contains(plugin))
+ analyzerPlugins = plugin :: analyzerPlugins
+ }
+
+
+ /** @see AnalyzerPlugin.pluginsPt */
+ def pluginsPt(pt: Type, typer: Typer, tree: Tree, mode: Int): Type =
+ if (analyzerPlugins.isEmpty) pt
+ else analyzerPlugins.foldLeft(pt)((pt, plugin) =>
+ if (!plugin.isActive()) pt else plugin.pluginsPt(pt, typer, tree, mode))
+
+ /** @see AnalyzerPlugin.pluginsTyped */
+ def pluginsTyped(tpe: Type, typer: Typer, tree: Tree, mode: Int, pt: Type): Type = {
+ // support deprecated methods in annotation checkers
+ val annotCheckersTpe = addAnnotations(tree, tpe)
+ if (analyzerPlugins.isEmpty) annotCheckersTpe
+ else analyzerPlugins.foldLeft(annotCheckersTpe)((tpe, plugin) =>
+ if (!plugin.isActive()) tpe else plugin.pluginsTyped(tpe, typer, tree, mode, pt))
+ }
+
+ /** @see AnalyzerPlugin.pluginsTypeSig */
+ def pluginsTypeSig(tpe: Type, typer: Typer, defTree: Tree, pt: Type): Type =
+ if (analyzerPlugins.isEmpty) tpe
+ else analyzerPlugins.foldLeft(tpe)((tpe, plugin) =>
+ if (!plugin.isActive()) tpe else plugin.pluginsTypeSig(tpe, typer, defTree, pt))
+
+ /** @see AnalyzerPlugin.pluginsTypeSigAccessor */
+ def pluginsTypeSigAccessor(tpe: Type, typer: Typer, tree: ValDef, sym: Symbol): Type =
+ if (analyzerPlugins.isEmpty) tpe
+ else analyzerPlugins.foldLeft(tpe)((tpe, plugin) =>
+ if (!plugin.isActive()) tpe else plugin.pluginsTypeSigAccessor(tpe, typer, tree, sym))
+
+ /** @see AnalyzerPlugin.canAdaptAnnotations */
+ def canAdaptAnnotations(tree: Tree, typer: Typer, mode: Int, pt: Type): Boolean = {
+ // support deprecated methods in annotation checkers
+ val annotCheckersExists = global.canAdaptAnnotations(tree, mode, pt)
+ annotCheckersExists || {
+ if (analyzerPlugins.isEmpty) false
+ else analyzerPlugins.exists(plugin =>
+ plugin.isActive() && plugin.canAdaptAnnotations(tree, typer, mode, pt))
+ }
+ }
+
+ /** @see AnalyzerPlugin.adaptAnnotations */
+ def adaptAnnotations(tree: Tree, typer: Typer, mode: Int, pt: Type): Tree = {
+ // support deprecated methods in annotation checkers
+ val annotCheckersTree = global.adaptAnnotations(tree, mode, pt)
+ if (analyzerPlugins.isEmpty) annotCheckersTree
+ else analyzerPlugins.foldLeft(annotCheckersTree)((tree, plugin) =>
+ if (!plugin.isActive()) tree else plugin.adaptAnnotations(tree, typer, mode, pt))
+ }
+
+ /** @see AnalyzerPlugin.pluginsTypedReturn */
+ def pluginsTypedReturn(tpe: Type, typer: Typer, tree: Return, pt: Type): Type = {
+ val annotCheckersType = adaptTypeOfReturn(tree.expr, pt, tpe)
+ if (analyzerPlugins.isEmpty) annotCheckersType
+ else analyzerPlugins.foldLeft(annotCheckersType)((tpe, plugin) =>
+ if (!plugin.isActive()) tpe else plugin.pluginsTypedReturn(tpe, typer, tree, pt))
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
index dc367b11fd..fbf23968f0 100644
--- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
@@ -273,9 +273,6 @@ trait ContextErrors {
def VolatileValueError(vdef: Tree) =
issueNormalTypeError(vdef, "values cannot be volatile")
- def FinalVolatileVarError(vdef: Tree) =
- issueNormalTypeError(vdef, "final vars cannot be volatile")
-
def LocalVarUninitializedError(vdef: Tree) =
issueNormalTypeError(vdef, "local variables must be initialized")
@@ -763,10 +760,14 @@ trait ContextErrors {
else " of " + expanded.getClass
))
- def MacroImplementationNotFoundError(expandee: Tree) =
- macroExpansionError(expandee,
+ def MacroImplementationNotFoundError(expandee: Tree) = {
+ val message =
"macro implementation not found: " + expandee.symbol.name + " " +
- "(the most common reason for that is that you cannot use macro implementations in the same compilation run that defines them)")
+ "(the most common reason for that is that you cannot use macro implementations in the same compilation run that defines them)" +
+ (if (forScaladoc) ". When generating scaladocs for multiple projects at once, consider using -Ymacro-no-expand to disable macro expansions altogether."
+ else "")
+ macroExpansionError(expandee, message)
+ }
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
index af2aeefecd..620665126e 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
@@ -35,7 +35,7 @@ trait Contexts { self: Analyzer =>
val completeList = JavaLangPackage :: ScalaPackage :: PredefModule :: Nil
}
- private val startContext = {
+ private lazy val startContext = {
NoContext.make(
Template(List(), emptyValDef, List()) setSymbol global.NoSymbol setType global.NoType,
rootMirror.RootClass,
@@ -342,6 +342,16 @@ trait Contexts { self: Analyzer =>
c
}
+ /**
+ * A context for typing constructor parameter ValDefs, super or self invocation arguments and default getters
+ * of constructors. These expressions need to be type checked in a scope outside the class, cf. spec 5.3.1.
+ *
+ * This method is called by namer / typer where `this` is the context for the constructor DefDef. The
+ * owner of the resulting (new) context is the outer context for the Template, i.e. the context for the
+ * ClassDef. This means that class type parameters will be in scope. The value parameters of the current
+ * constructor are also entered into the new constructor scope. Members of the class however will not be
+ * accessible.
+ */
def makeConstructorContext = {
var baseContext = enclClass.outer
while (baseContext.tree.isInstanceOf[Template])
@@ -361,6 +371,8 @@ trait Contexts { self: Analyzer =>
enterLocalElems(c.scope.elems)
}
}
+ // Enter the scope elements of this (the scope for the constructor DefDef) into the new constructor scope.
+ // Concretely, this will enter the value parameters of constructor.
enterElems(this)
argContext
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
index df753ba53c..0b46582cbf 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
@@ -317,15 +317,33 @@ abstract class Duplicators extends Analyzer {
super.typed(tree, mode, pt)
case Select(th @ This(_), sel) if (oldClassOwner ne null) && (th.symbol == oldClassOwner) =>
- // log("selection on this, no type ascription required")
- // we use the symbol name instead of the tree name because the symbol may have been
- // name mangled, rendering the tree name obsolete
- // log(tree)
- val t = super.typedPos(tree.pos, mode, pt) {
- Select(This(newClassOwner), tree.symbol.name)
- }
- // log("typed to: " + t + "; tpe = " + t.tpe + "; " + inspectTpe(t.tpe))
- t
+ // We use the symbol name instead of the tree name because the symbol
+ // may have been name mangled, rendering the tree name obsolete.
+ // ...but you can't just do a Select on a name because if the symbol is
+ // overloaded, you will crash in the backend.
+ val memberByName = newClassOwner.thisType.member(tree.symbol.name)
+ def nameSelection = Select(This(newClassOwner), tree.symbol.name)
+ val newTree = (
+ if (memberByName.isOverloaded) {
+ // Find the types of the overload alternatives as seen in the new class,
+ // and filter the list down to those which match the old type (after
+ // fixing the old type so it is seen as if from the new class.)
+ val typeInNewClass = fixType(oldClassOwner.info memberType tree.symbol)
+ val alts = memberByName.alternatives
+ val memberTypes = alts map (newClassOwner.info memberType _)
+ val memberString = memberByName.defString
+ alts zip memberTypes filter (_._2 =:= typeInNewClass) match {
+ case ((alt, tpe)) :: Nil =>
+ log(s"Arrested overloaded type in Duplicators, narrowing to ${alt.defStringSeenAs(tpe)}\n Overload was: $memberString")
+ Select(This(newClassOwner), alt)
+ case _ =>
+ log(s"Could not disambiguate $memberString in Duplicators. Attempting name-based selection, but this may not end well...")
+ nameSelection
+ }
+ }
+ else nameSelection
+ )
+ super.typed(atPos(tree.pos)(newTree), mode, pt)
case This(_) if (oldClassOwner ne null) && (tree.symbol == oldClassOwner) =>
// val tree1 = Typed(This(newClassOwner), TypeTree(fixType(tree.tpe.widen)))
diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
index 2c2aa03d24..74078a4ed3 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
@@ -257,8 +257,8 @@ trait Infer extends Checkable {
tp1 // @MAT aliases already handled by subtyping
}
- private val stdErrorClass = rootMirror.RootClass.newErrorClass(tpnme.ERROR)
- private val stdErrorValue = stdErrorClass.newErrorValue(nme.ERROR)
+ private lazy val stdErrorClass = rootMirror.RootClass.newErrorClass(tpnme.ERROR)
+ private lazy val stdErrorValue = stdErrorClass.newErrorValue(nme.ERROR)
/** The context-dependent inferencer part */
class Inferencer(context: Context) extends InferencerContextErrors with InferCheckable {
@@ -411,8 +411,19 @@ trait Infer extends Checkable {
/** Like weakly compatible but don't apply any implicit conversions yet.
* Used when comparing the result type of a method with its prototype.
+ *
* [Martin] I think Infer is also created by Erasure, with the default
* implementation of isCoercible
+ * [Paulp] (Assuming the above must refer to my comment on isCoercible)
+ * Nope, I examined every occurrence of Inferencer in trunk. It
+ * appears twice as a self-type, once at its definition, and once
+ * where it is instantiated in Typers. There are no others.
+ *
+ % ack -A0 -B0 --no-filename '\bInferencer\b' src
+ self: Inferencer =>
+ self: Inferencer =>
+ class Inferencer(context: Context) extends InferencerContextErrors with InferCheckable {
+ val infer = new Inferencer(context0) {
*/
def isConservativelyCompatible(tp: Type, pt: Type): Boolean =
context.withImplicitsDisabled(isWeaklyCompatible(tp, pt))
@@ -1578,10 +1589,10 @@ trait Infer extends Checkable {
}
// Drop those that use a default; keep those that use vararg/tupling conversion.
mtypes exists (t =>
- !t.typeSymbol.hasDefaultFlag && {
- compareLengths(t.params, argtpes) < 0 || // tupling (*)
- hasExactlyNumParams(t, argtpes.length) // same nb or vararg
- }
+ !t.typeSymbol.hasDefaultFlag && (
+ compareLengths(t.params, argtpes) < 0 // tupling (*)
+ || hasExactlyNumParams(t, argtpes.length) // same nb or vararg
+ )
)
// (*) more arguments than parameters, but still applicable: tupling conversion works.
// todo: should not return "false" when paramTypes = (Unit) no argument is given
@@ -1608,15 +1619,18 @@ trait Infer extends Checkable {
case OverloadedType(pre, alts) =>
val pt = if (pt0.typeSymbol == UnitClass) WildcardType else pt0
tryTwice { isSecondTry =>
- debuglog("infer method alt "+ tree.symbol +" with alternatives "+
- (alts map pre.memberType) +", argtpes = "+ argtpes +", pt = "+ pt)
+ debuglog(s"infer method alt ${tree.symbol} with alternatives ${alts map pre.memberType} argtpes=$argtpes pt=$pt")
- val applicable = resolveOverloadedMethod(argtpes, {
- alts filter { alt =>
- inSilentMode(context)(isApplicable(undetparams, followApply(pre.memberType(alt)), argtpes, pt)) &&
- (!varArgsOnly || isVarArgsList(alt.tpe.params))
- }
- })
+ def varargsApplicableCheck(alt: Symbol) = !varArgsOnly || (
+ isVarArgsList(alt.tpe.params)
+ && (argtpes.size >= alt.tpe.params.size) // must be checked now due to SI-5859
+ )
+ val applicable = resolveOverloadedMethod(argtpes,
+ alts filter (alt =>
+ varargsApplicableCheck(alt)
+ && inSilentMode(context)(isApplicable(undetparams, followApply(pre memberType alt), argtpes, pt))
+ )
+ )
def improves(sym1: Symbol, sym2: Symbol) = {
// util.trace("improve "+sym1+sym1.locationString+" on "+sym2+sym2.locationString)
diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala
index b20a9ea626..245656e2d7 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala
@@ -452,7 +452,7 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
if (aparam.name != rparam.name && !rparam.isSynthetic) MacroImplParamNameMismatchError(aparam, rparam)
if (isRepeated(aparam) ^ isRepeated(rparam)) MacroImplVarargMismatchError(aparam, rparam)
val aparamtpe = aparam.tpe.dealias match {
- case RefinedType(List(tpe), Scope(sym)) if tpe == MacroContextClass.tpe && sym.allOverriddenSymbols.contains(MacroContextPrefixType) => tpe
+ case RefinedType(List(tpe), Scope(sym)) if tpe =:= MacroContextClass.tpe && sym.allOverriddenSymbols.contains(MacroContextPrefixType) => tpe
case tpe => tpe
}
checkMacroImplParamTypeMismatch(atpeToRtpe(aparamtpe), rparam)
@@ -684,6 +684,7 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
* the expandee with an error marker set if there has been an error
*/
def macroExpand(typer: Typer, expandee: Tree, mode: Int = EXPRmode, pt: Type = WildcardType): Tree = {
+ if (settings.Ymacronoexpand.value) return expandee // SI-6812
val start = if (Statistics.canEnable) Statistics.startTimer(macroExpandNanos) else null
if (Statistics.canEnable) Statistics.incCounter(macroExpandCount)
try {
diff --git a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
index b1cf93a879..99557d1527 100644
--- a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
@@ -235,8 +235,8 @@ trait MethodSynthesis {
context.unit.synthetics get meth match {
case Some(mdef) =>
context.unit.synthetics -= meth
- meth setAnnotations deriveAnnotations(annotations, MethodTargetClass, false)
- cd.symbol setAnnotations deriveAnnotations(annotations, ClassTargetClass, true)
+ meth setAnnotations deriveAnnotations(annotations, MethodTargetClass, keepClean = false)
+ cd.symbol setAnnotations deriveAnnotations(annotations, ClassTargetClass, keepClean = true)
List(cd, mdef)
case _ =>
// Shouldn't happen, but let's give ourselves a reasonable error when it does
@@ -329,6 +329,7 @@ trait MethodSynthesis {
*/
def category: Symbol
+ /* Explicit isSetter required for bean setters (beanSetterSym.isSetter is false) */
final def completer(sym: Symbol) = namerOf(sym).accessorTypeCompleter(tree, isSetter)
final def fieldSelection = Select(This(enclClass), basisSym)
final def derivedMods: Modifiers = mods & flagsMask | flagsExtra mapAnnotations (_ => Nil)
diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
index 98b6264051..c728185d4e 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
@@ -65,7 +65,18 @@ trait Namers extends MethodSynthesis {
case ModuleDef(_, _, _) => tree.symbol.moduleClass
case _ => tree.symbol
}
- newNamer(context.makeNewScope(tree, sym))
+ def isConstrParam(vd: ValDef) = {
+ (sym hasFlag PARAM | PRESUPER) &&
+ !vd.mods.isJavaDefined &&
+ sym.owner.isConstructor
+ }
+ val ownerCtx = tree match {
+ case vd: ValDef if isConstrParam(vd) =>
+ context.makeConstructorContext
+ case _ =>
+ context
+ }
+ newNamer(ownerCtx.makeNewScope(tree, sym))
}
def createInnerNamer() = {
newNamer(context.make(context.tree, owner, newScope))
@@ -423,6 +434,7 @@ trait Namers extends MethodSynthesis {
def enterSyms(trees: List[Tree]): Namer = {
trees.foldLeft(this: Namer) { (namer, t) =>
val ctx = namer enterSym t
+ // for Import trees, enterSym returns a changed context, so we need a new namer
if (ctx eq namer.context) namer
else newNamer(ctx)
}
@@ -521,20 +533,19 @@ trait Namers extends MethodSynthesis {
noDuplicates(selectors map (_.rename), AppearsTwice)
}
- def enterCopyMethod(copyDefDef: Tree, tparams: List[TypeDef]): Symbol = {
- val sym = copyDefDef.symbol
- val lazyType = completerOf(copyDefDef, tparams)
+ def enterCopyMethod(copyDef: DefDef): Symbol = {
+ val sym = copyDef.symbol
+ val lazyType = completerOf(copyDef)
/** Assign the types of the class parameters to the parameters of the
* copy method. See comment in `Unapplies.caseClassCopyMeth` */
def assignParamTypes() {
val clazz = sym.owner
val constructorType = clazz.primaryConstructor.tpe
- val subst = new SubstSymMap(clazz.typeParams, tparams map (_.symbol))
+ val subst = new SubstSymMap(clazz.typeParams, copyDef.tparams map (_.symbol))
val classParamss = constructorType.paramss
- val DefDef(_, _, _, copyParamss, _, _) = copyDefDef
- map2(copyParamss, classParamss)((copyParams, classParams) =>
+ map2(copyDef.vparamss, classParamss)((copyParams, classParams) =>
map2(copyParams, classParams)((copyP, classP) =>
copyP.tpt setType subst(classP.tpe)
)
@@ -542,24 +553,28 @@ trait Namers extends MethodSynthesis {
}
sym setInfo {
- mkTypeCompleter(copyDefDef) { sym =>
+ mkTypeCompleter(copyDef) { sym =>
assignParamTypes()
lazyType complete sym
}
}
}
- def completerOf(tree: Tree): TypeCompleter = completerOf(tree, treeInfo.typeParameters(tree))
- def completerOf(tree: Tree, tparams: List[TypeDef]): TypeCompleter = {
+
+ def completerOf(tree: Tree): TypeCompleter = {
val mono = namerOf(tree.symbol) monoTypeCompleter tree
+ val tparams = treeInfo.typeParameters(tree)
if (tparams.isEmpty) mono
else {
- //@M! TypeDef's type params are handled differently
- //@M e.g., in [A[x <: B], B], A and B are entered first as both are in scope in the definition of x
- //@M x is only in scope in `A[x <: B]'
+ /* @M! TypeDef's type params are handled differently, e.g., in `type T[A[x <: B], B]`, A and B are entered
+ * first as both are in scope in the definition of x. x is only in scope in `A[x <: B]`.
+ * No symbols are created for the abstract type's params at this point, i.e. the following assertion holds:
+ * !tree.symbol.isAbstractType || { tparams.forall(_.symbol == NoSymbol)
+ * (tested with the above example, `trait C { type T[A[X <: B], B] }`). See also comment in PolyTypeCompleter.
+ */
if (!tree.symbol.isAbstractType) //@M TODO: change to isTypeMember ?
createNamer(tree) enterSyms tparams
- new PolyTypeCompleter(tparams, mono, tree, context) //@M
+ new PolyTypeCompleter(tparams, mono, context) //@M
}
}
@@ -621,9 +636,9 @@ trait Namers extends MethodSynthesis {
val sym = assignAndEnterSymbol(tree) setFlag bridgeFlag
if (name == nme.copy && sym.isSynthetic)
- enterCopyMethod(tree, tparams)
+ enterCopyMethod(tree)
else
- sym setInfo completerOf(tree, tparams)
+ sym setInfo completerOf(tree)
}
def enterClassDef(tree: ClassDef) {
@@ -736,13 +751,13 @@ trait Namers extends MethodSynthesis {
}
}
- def accessorTypeCompleter(tree: ValDef, isSetter: Boolean = false) = mkTypeCompleter(tree) { sym =>
+ /* Explicit isSetter required for bean setters (beanSetterSym.isSetter is false) */
+ def accessorTypeCompleter(tree: ValDef, isSetter: Boolean) = mkTypeCompleter(tree) { sym =>
logAndValidate(sym) {
sym setInfo {
- if (isSetter)
- MethodType(List(sym.newSyntheticValueParam(typeSig(tree))), UnitClass.tpe)
- else
- NullaryMethodType(typeSig(tree))
+ val tp = if (isSetter) MethodType(List(sym.newSyntheticValueParam(typeSig(tree))), UnitClass.tpe)
+ else NullaryMethodType(typeSig(tree))
+ pluginsTypeSigAccessor(tp, typer, tree, sym)
}
}
}
@@ -805,17 +820,12 @@ trait Namers extends MethodSynthesis {
* assigns the type to the tpt's node. Returns the type.
*/
private def assignTypeToTree(tree: ValOrDefDef, defnTyper: Typer, pt: Type): Type = {
- // compute result type from rhs
- val typedBody =
+ val rhsTpe =
if (tree.symbol.isTermMacro) defnTyper.computeMacroDefType(tree, pt)
else defnTyper.computeType(tree.rhs, pt)
- val typedDefn = widenIfNecessary(tree.symbol, typedBody, pt)
- assignTypeToTree(tree, typedDefn)
- }
-
- private def assignTypeToTree(tree: ValOrDefDef, tpe: Type): Type = {
- tree.tpt defineType tpe setPos tree.pos.focus
+ val defnTpe = widenIfNecessary(tree.symbol, rhsTpe, pt)
+ tree.tpt defineType defnTpe setPos tree.pos.focus
tree.tpt.tpe
}
@@ -892,163 +902,257 @@ trait Namers extends MethodSynthesis {
for (cda <- module.attachments.get[ConstructorDefaultsAttachment]) {
cda.companionModuleClassNamer = templateNamer
}
- ClassInfoType(parents, decls, clazz)
+ val classTp = ClassInfoType(parents, decls, clazz)
+ pluginsTypeSig(classTp, templateNamer.typer, templ, WildcardType)
}
- private def classSig(tparams: List[TypeDef], impl: Template): Type = {
+ private def classSig(cdef: ClassDef): Type = {
+ val clazz = cdef.symbol
+ val ClassDef(_, _, tparams, impl) = cdef
val tparams0 = typer.reenterTypeParams(tparams)
val resultType = templateSig(impl)
- GenPolyType(tparams0, resultType)
+ val res = GenPolyType(tparams0, resultType)
+ val pluginsTp = pluginsTypeSig(res, typer, cdef, WildcardType)
+
+ // Already assign the type to the class symbol (monoTypeCompleter will do it again).
+ // Allows isDerivedValueClass to look at the info.
+ clazz setInfo pluginsTp
+ if (clazz.isDerivedValueClass) {
+ log("Ensuring companion for derived value class " + cdef.name + " at " + cdef.pos.show)
+ clazz setFlag FINAL
+ // Don't force the owner's info lest we create cycles as in SI-6357.
+ enclosingNamerWithScope(clazz.owner.rawInfo.decls).ensureCompanionObject(cdef)
+ }
+ pluginsTp
}
- private def methodSig(ddef: DefDef, mods: Modifiers, tparams: List[TypeDef],
- vparamss: List[List[ValDef]], tpt: Tree, rhs: Tree): Type = {
- val meth = owner
- val clazz = meth.owner
- // enters the skolemized version into scope, returns the deSkolemized symbols
- val tparamSyms = typer.reenterTypeParams(tparams)
- // since the skolemized tparams are in scope, the TypeRefs in vparamSymss refer to skolemized tparams
- var vparamSymss = enterValueParams(vparamss)
+ private def moduleSig(mdef: ModuleDef): Type = {
+ val moduleSym = mdef.symbol
+ // The info of both the module and the moduleClass symbols need to be assigned. monoTypeCompleter assigns
+ // the result of typeSig to the module symbol. The module class info is assigned here as a side-effect.
+ val result = templateSig(mdef.impl)
+ val pluginsTp = pluginsTypeSig(result, typer, mdef, WildcardType)
+ // Assign the moduleClass info (templateSig returns a ClassInfoType)
+ val clazz = moduleSym.moduleClass
+ clazz setInfo pluginsTp
+ // clazz.tpe returns a `ModuleTypeRef(clazz)`, a typeRef that links to the module class `clazz`
+ // (clazz.info would the ClassInfoType, which is not what should be assigned to the module symbol)
+ clazz.tpe
+ }
+
+ /**
+ * The method type for `ddef`.
+ *
+ * If a PolyType(tparams, restp) is returned, `tparams` are the external symbols (not type skolems),
+ * i.e. instances of AbstractTypeSymbol. All references in `restp` to the type parameters are TypeRefs
+ * to these non-skolems.
+ *
+ * For type-checking the rhs (in case the result type is inferred), the type skolems of the type parameters
+ * are entered in scope. Equally, the parameter symbols entered into scope have types which refer to those
+ * skolems: when type-checking the rhs, references to parameters need to have types that refer to the skolems.
+ * In summary, typing an rhs happens with respect to the skolems.
+ *
+ * This means that the method's result type computed by the typer refers to skolems. In order to put it
+ * into the method type (the result of methodSig), typeRefs to skolems have to be replaced by references
+ * to the non-skolems.
+ */
+ private def methodSig(ddef: DefDef): Type = {
// DEPMETTODO: do we need to skolemize value parameter symbols?
- if (tpt.isEmpty && meth.name == nme.CONSTRUCTOR) {
- tpt defineType context.enclClass.owner.tpe
- tpt setPos meth.pos.focus
- }
- var resultPt = if (tpt.isEmpty) WildcardType else typer.typedType(tpt).tpe
- val site = clazz.thisType
- /** Called for all value parameter lists, right to left
- * @param vparams the symbols of one parameter list
- * @param restpe the result type (possibly a MethodType)
+ val DefDef(_, _, tparams, vparamss, tpt, _) = ddef
+
+ val meth = owner
+ val methOwner = meth.owner
+ val site = methOwner.thisType
+
+ /* tparams already have symbols (created in enterDefDef/completerOf), namely the skolemized ones (created
+ * by the PolyTypeCompleter constructor, and assigned to tparams). reenterTypeParams enters the type skolems
+ * into scope and returns the non-skolems.
*/
- def makeMethodType(vparams: List[Symbol], restpe: Type) = {
- // TODODEPMET: check that we actually don't need to do anything here
- // new dependent method types: probably OK already, since 'enterValueParams' above
- // enters them in scope, and all have a lazy type. so they may depend on other params. but: need to
- // check that params only depend on ones in earlier sections, not the same. (done by checkDependencies,
- // so re-use / adapt that)
- if (owner.isJavaDefined)
- // TODODEPMET necessary?? new dependent types: replace symbols in restpe with the ones in vparams
- JavaMethodType(vparams map (p => p setInfo objToAny(p.tpe)), restpe)
- else
- MethodType(vparams, restpe)
- }
+ val tparamSyms = typer.reenterTypeParams(tparams)
+
+ val tparamSkolems = tparams.map(_.symbol)
+
+ /* since the skolemized tparams are in scope, the TypeRefs in types of vparamSymss refer to the type skolems
+ * note that for parameters with missing types, `methodSig` reassigns types of these symbols (the parameter
+ * types from the overridden method).
+ */
+ var vparamSymss = enterValueParams(vparamss)
+
+ /**
+ * Creates a method type using tparamSyms and vparamsSymss as argument symbols and `respte` as result type.
+ * All typeRefs to type skolems are replaced by references to the corresponding non-skolem type parameter,
+ * so the resulting type is a valid external method type, it does not contain (references to) skolems.
+ */
def thisMethodType(restpe: Type) = {
val checkDependencies = new DependentTypeChecker(context)(this)
checkDependencies check vparamSymss
// DEPMETTODO: check not needed when they become on by default
checkDependencies(restpe)
- GenPolyType(
+ val makeMethodType = (vparams: List[Symbol], restpe: Type) => {
+ // TODODEPMET: check that we actually don't need to do anything here
+ // new dependent method types: probably OK already, since 'enterValueParams' above
+ // enters them in scope, and all have a lazy type. so they may depend on other params. but: need to
+ // check that params only depend on ones in earlier sections, not the same. (done by checkDependencies,
+ // so re-use / adapt that)
+ if (meth.isJavaDefined)
+ // TODODEPMET necessary?? new dependent types: replace symbols in restpe with the ones in vparams
+ JavaMethodType(vparams map (p => p setInfo objToAny(p.tpe)), restpe)
+ else
+ MethodType(vparams, restpe)
+ }
+
+
+ val res = GenPolyType(
tparamSyms, // deSkolemized symbols -- TODO: check that their infos don't refer to method args?
if (vparamSymss.isEmpty) NullaryMethodType(restpe)
// vparamss refer (if they do) to skolemized tparams
else (vparamSymss :\ restpe) (makeMethodType)
)
+ res.substSym(tparamSkolems, tparamSyms)
}
- def transformedResult =
- thisMethodType(resultPt).substSym(tparams map (_.symbol), tparamSyms)
+ /**
+ * Creates a schematic method type which has WildcardTypes for non specified
+ * return or parameter types. For instance, in `def f[T](a: T, b) = ...`, the
+ * type schema is
+ *
+ * PolyType(T, MethodType(List(a: T, b: WildcardType), WildcardType))
+ *
+ * where T are non-skolems.
+ */
+ def methodTypeSchema(resTp: Type) = {
+ // for all params without type set WildcaradType
+ mforeach(vparamss)(v => if (v.tpt.isEmpty) v.symbol setInfo WildcardType)
+ thisMethodType(resTp)
+ }
- // luc: added .substSym from skolemized to deSkolemized
- // site.memberType(sym): PolyType(tparams, MethodType(..., ...))
- // ==> all references to tparams are deSkolemized
- // thisMethodType: tparams in PolyType are deSkolemized, the references in the MethodTypes are skolemized.
- // ==> the two didn't match
- //
- // for instance, B.foo would not override A.foo, and the default on parameter b would not be inherited
- // class A { def foo[T](a: T)(b: T = a) = a }
- // class B extends A { override def foo[U](a: U)(b: U) = b }
- def overriddenSymbol =
- intersectionType(clazz.info.parents).nonPrivateMember(meth.name).filter { sym =>
- sym != NoSymbol && (site.memberType(sym) matches transformedResult)
+ def overriddenSymbol(resTp: Type) = {
+ intersectionType(methOwner.info.parents).nonPrivateMember(meth.name).filter { sym =>
+ sym != NoSymbol && (site.memberType(sym) matches methodTypeSchema(resTp))
}
- // TODO: see whether this or something similar would work instead.
- //
+ }
+ // TODO: see whether this or something similar would work instead:
// def overriddenSymbol = meth.nextOverriddenSymbol
- // fill in result type and parameter types from overridden symbol if there is a unique one.
- if (clazz.isClass && (tpt.isEmpty || mexists(vparamss)(_.tpt.isEmpty))) {
- // try to complete from matching definition in base type
- mforeach(vparamss)(v => if (v.tpt.isEmpty) v.symbol setInfo WildcardType)
- val overridden = overriddenSymbol
- if (overridden != NoSymbol && !overridden.isOverloaded) {
- overridden.cookJavaRawInfo() // #3404 xform java rawtypes into existentials
- resultPt = site.memberType(overridden) match {
- case PolyType(tparams, rt) => rt.substSym(tparams, tparamSyms)
- case mt => mt
- }
+ /**
+ * If `meth` doesn't have an explicit return type, extracts the return type from the method
+ * overridden by `meth` (if there's an unique one). This type is lateron used as the expected
+ * type for computing the type of the rhs. The resulting type references type skolems for
+ * type parameters (consistent with the result of `typer.typedType(tpt).tpe`).
+ *
+ * As a first side effect, this method assigns a MethodType constructed using this
+ * return type to `meth`. This allows omitting the result type for recursive methods.
+ *
+ * As another side effect, this method also assigns paramter types from the overridden
+ * method to parameters of `meth` that have missing types (the parser accepts missing
+ * parameter types under -Yinfer-argument-types).
+ */
+ def typesFromOverridden(methResTp: Type): Type = {
+ val overridden = overriddenSymbol(methResTp)
+ if (overridden == NoSymbol || overridden.isOverloaded) {
+ methResTp
+ } else {
+ overridden.cookJavaRawInfo() // #3404 xform java rawtypes into existentials
+ var overriddenTp = site.memberType(overridden) match {
+ case PolyType(tparams, rt) => rt.substSym(tparams, tparamSkolems)
+ case mt => mt
+ }
for (vparams <- vparamss) {
- var pps = resultPt.params
+ var overriddenParams = overriddenTp.params
for (vparam <- vparams) {
if (vparam.tpt.isEmpty) {
- val paramtpe = pps.head.tpe
- vparam.symbol setInfo paramtpe
- vparam.tpt defineType paramtpe setPos vparam.pos.focus
+ val overriddenParamTp = overriddenParams.head.tpe
+ // references to type parameteres in overriddenParamTp link to the type skolems, so the
+ // assigned type is consistent with the other / existing parameter types in vparamSymss.
+ vparam.symbol setInfo overriddenParamTp
+ vparam.tpt defineType overriddenParamTp setPos vparam.pos.focus
}
- pps = pps.tail
+ overriddenParams = overriddenParams.tail
}
- resultPt = resultPt.resultType
+ overriddenTp = overriddenTp.resultType
}
- resultPt match {
- case NullaryMethodType(rtpe) => resultPt = rtpe
- case MethodType(List(), rtpe) => resultPt = rtpe
+
+ overriddenTp match {
+ case NullaryMethodType(rtpe) => overriddenTp = rtpe
+ case MethodType(List(), rtpe) => overriddenTp = rtpe
case _ =>
}
+
if (tpt.isEmpty) {
// provisionally assign `meth` a method type with inherited result type
// that way, we can leave out the result type even if method is recursive.
- meth setInfo thisMethodType(resultPt)
+ meth setInfo thisMethodType(overriddenTp)
+ overriddenTp
+ } else {
+ methResTp
}
}
}
- // Add a () parameter section if this overrides some method with () parameters.
- if (clazz.isClass && vparamss.isEmpty && overriddenSymbol.alternatives.exists(
- _.info.isInstanceOf[MethodType])) {
+
+ if (tpt.isEmpty && meth.name == nme.CONSTRUCTOR) {
+ tpt defineType context.enclClass.owner.tpe
+ tpt setPos meth.pos.focus
+ }
+
+ val methResTp = if (tpt.isEmpty) WildcardType else typer.typedType(tpt).tpe
+ val resTpFromOverride = if (methOwner.isClass && (tpt.isEmpty || mexists(vparamss)(_.tpt.isEmpty))) {
+ typesFromOverridden(methResTp)
+ } else {
+ methResTp
+ }
+
+ // Add a () parameter section if this overrides some method with () parameters
+ if (methOwner.isClass && vparamss.isEmpty &&
+ overriddenSymbol(methResTp).alternatives.exists(_.info.isInstanceOf[MethodType])) {
vparamSymss = ListOfNil
}
+
+ // issue an error for missing parameter types
mforeach(vparamss) { vparam =>
if (vparam.tpt.isEmpty) {
MissingParameterOrValTypeError(vparam)
vparam.tpt defineType ErrorType
}
}
- addDefaultGetters(meth, vparamss, tparams, overriddenSymbol)
+
+ addDefaultGetters(meth, vparamss, tparams, overriddenSymbol(methResTp))
// fast track macros, i.e. macros defined inside the compiler, are hardcoded
// hence we make use of that and let them have whatever right-hand side they need
// (either "macro ???" as they used to or just "???" to maximally simplify their compilation)
- if (fastTrack contains ddef.symbol) ddef.symbol setFlag MACRO
+ if (fastTrack contains meth) meth setFlag MACRO
// macro defs need to be typechecked in advance
// because @macroImpl annotation only gets assigned during typechecking
// otherwise macro defs wouldn't be able to robustly coexist with their clients
// because a client could be typechecked before a macro def that it uses
- if (ddef.symbol.isTermMacro) {
- val pt = resultPt.substSym(tparamSyms, tparams map (_.symbol))
- typer.computeMacroDefType(ddef, pt)
+ if (meth.isTermMacro) {
+ typer.computeMacroDefType(ddef, resTpFromOverride)
}
- thisMethodType({
+ val res = thisMethodType({
val rt = (
if (!tpt.isEmpty) {
- typer.typedType(tpt).tpe
+ methResTp
} else {
- // replace deSkolemized symbols with skolemized ones
- // (for resultPt computed by looking at overridden symbol, right?)
- val pt = resultPt.substSym(tparamSyms, tparams map (_.symbol))
- assignTypeToTree(ddef, typer, pt)
- }
- )
+ // return type is inferred, we don't just use resTpFromOverride. Here, C.f has type String:
+ // trait T { def f: Object }; class C <: T { def f = "" }
+ // using resTpFromOverride as expected type allows for the following (C.f has type A):
+ // trait T { def f: A }; class C <: T { implicit def b2a(t: B): A = ???; def f = new B }
+ assignTypeToTree(ddef, typer, resTpFromOverride)
+ })
// #2382: return type of default getters are always @uncheckedVariance
if (meth.hasDefault)
rt.withAnnotation(AnnotationInfo(uncheckedVarianceClass.tpe, List(), List()))
else rt
})
+ pluginsTypeSig(res, typer, ddef, methResTp)
}
/**
@@ -1060,9 +1164,9 @@ trait Namers extends MethodSynthesis {
* flag.
*/
private def addDefaultGetters(meth: Symbol, vparamss: List[List[ValDef]], tparams: List[TypeDef], overriddenSymbol: => Symbol) {
- val clazz = meth.owner
+ val methOwner = meth.owner
val isConstr = meth.isConstructor
- val overridden = if (isConstr || !clazz.isClass) NoSymbol else overriddenSymbol
+ val overridden = if (isConstr || !methOwner.isClass) NoSymbol else overriddenSymbol
val overrides = overridden != NoSymbol && !overridden.isOverloaded
// value parameters of the base class (whose defaults might be overridden)
var baseParamss = (vparamss, overridden.tpe.paramss) match {
@@ -1112,7 +1216,7 @@ trait Namers extends MethodSynthesis {
val parentNamer = if (isConstr) {
val (cdef, nmr) = moduleNamer.getOrElse {
- val module = companionSymbolOf(clazz, context)
+ val module = companionSymbolOf(methOwner, context)
module.initialize // call type completer (typedTemplate), adds the
// module's templateNamer to classAndNamerOfModule
module.attachments.get[ConstructorDefaultsAttachment] match {
@@ -1158,7 +1262,7 @@ trait Namers extends MethodSynthesis {
name, deftParams, defvParamss, defTpt, defRhs)
}
if (!isConstr)
- clazz.resetFlag(INTERFACE) // there's a concrete member now
+ methOwner.resetFlag(INTERFACE) // there's a concrete member now
val default = parentNamer.enterSyntheticSym(defaultTree)
if (forInteractive && default.owner.isTerm) {
// save the default getters as attachments in the method symbol. if compiling the
@@ -1183,15 +1287,31 @@ trait Namers extends MethodSynthesis {
}
}
+ private def valDefSig(vdef: ValDef) = {
+ val ValDef(_, _, tpt, rhs) = vdef
+ val result = if (tpt.isEmpty) {
+ if (rhs.isEmpty) {
+ MissingParameterOrValTypeError(tpt)
+ ErrorType
+ }
+ else assignTypeToTree(vdef, typer, WildcardType)
+ } else {
+ typer.typedType(tpt).tpe
+ }
+ pluginsTypeSig(result, typer, vdef, if (tpt.isEmpty) WildcardType else result)
+
+ }
+
//@M! an abstract type definition (abstract type member/type parameter)
// may take type parameters, which are in scope in its bounds
- private def typeDefSig(tpsym: Symbol, tparams: List[TypeDef], rhs: Tree) = {
+ private def typeDefSig(tdef: TypeDef) = {
+ val TypeDef(_, _, tparams, rhs) = tdef
// log("typeDefSig(" + tpsym + ", " + tparams + ")")
val tparamSyms = typer.reenterTypeParams(tparams) //@M make tparams available in scope (just for this abstypedef)
val tp = typer.typedType(rhs).tpe match {
case TypeBounds(lt, rt) if (lt.isError || rt.isError) =>
TypeBounds.empty
- case tp @ TypeBounds(lt, rt) if (tpsym hasFlag JAVA) =>
+ case tp @ TypeBounds(lt, rt) if (tdef.symbol hasFlag JAVA) =>
TypeBounds(lt, objToAny(rt))
case tp =>
tp
@@ -1213,9 +1333,32 @@ trait Namers extends MethodSynthesis {
// However, separate compilation requires the symbol info to be
// loaded to do this check, but loading the info will probably
// lead to spurious cyclic errors. So omit the check.
- GenPolyType(tparamSyms, tp)
+ val res = GenPolyType(tparamSyms, tp)
+ pluginsTypeSig(res, typer, tdef, WildcardType)
}
+ private def importSig(imp: Import) = {
+ val Import(expr, selectors) = imp
+ val expr1 = typer.typedQualifier(expr)
+ typer checkStable expr1
+ if (expr1.symbol != null && expr1.symbol.isRootPackage)
+ RootImportError(imp)
+
+ if (expr1.isErrorTyped)
+ ErrorType
+ else {
+ val newImport = treeCopy.Import(imp, expr1, selectors).asInstanceOf[Import]
+ checkSelectors(newImport)
+ transformed(imp) = newImport
+ // copy symbol and type attributes back into old expression
+ // so that the structure builder will find it.
+ expr.symbol = expr1.symbol
+ expr.tpe = expr1.tpe
+ ImportType(expr1)
+ }
+ }
+
+
/** Given a case class
* case class C[Ts] (ps: Us)
* Add the following methods to toScope:
@@ -1239,6 +1382,11 @@ trait Namers extends MethodSynthesis {
caseClassCopyMeth(cdef) foreach namer.enterSyntheticSym
}
+ /**
+ * TypeSig is invoked by monoTypeCompleters. It returns the type of a definition which
+ * is then assigned to the corresponding symbol (typeSig itself does not need to assign
+ * the type to the symbol, but it can if necessary).
+ */
def typeSig(tree: Tree): Type = {
// log("typeSig " + tree)
/** For definitions, transform Annotation trees to AnnotationInfos, assign
@@ -1271,84 +1419,33 @@ trait Namers extends MethodSynthesis {
}
val sym: Symbol = tree.symbol
- // @Lukas: I am not sure this is the right way to do things.
- // We used to only decorate the module class with annotations, which is
- // clearly wrong. Now we decorate both the class and the object.
- // But maybe some annotations are only meant for one of these but not for the other?
- //
- // TODO: meta-annotations to indicate class vs. object.
+
+ // TODO: meta-annotations to indicate where module annotations should go (module vs moduleClass)
annotate(sym)
if (sym.isModule) annotate(sym.moduleClass)
def getSig = tree match {
- case cdef @ ClassDef(_, name, tparams, impl) =>
- val clazz = tree.symbol
- val result = createNamer(tree).classSig(tparams, impl)
- clazz setInfo result
- if (clazz.isDerivedValueClass) {
- log("Ensuring companion for derived value class " + name + " at " + cdef.pos.show)
- clazz setFlag FINAL
- // Don't force the owner's info lest we create cycles as in SI-6357.
- enclosingNamerWithScope(clazz.owner.rawInfo.decls).ensureCompanionObject(cdef)
- }
- result
-
- case ModuleDef(_, _, impl) =>
- val clazz = sym.moduleClass
- clazz setInfo createNamer(tree).templateSig(impl)
- clazz.tpe
-
- case ddef @ DefDef(mods, _, tparams, vparamss, tpt, rhs) =>
- // TODO: cleanup parameter list
- createNamer(tree).methodSig(ddef, mods, tparams, vparamss, tpt, rhs)
-
- case vdef @ ValDef(mods, name, tpt, rhs) =>
- val isBeforeSupercall = (
- (sym hasFlag PARAM | PRESUPER)
- && !mods.isJavaDefined
- && sym.owner.isConstructor
- )
- val typer1 = typer.constrTyperIf(isBeforeSupercall)
- if (tpt.isEmpty) {
- if (rhs.isEmpty) {
- MissingParameterOrValTypeError(tpt)
- ErrorType
- }
- else assignTypeToTree(vdef, newTyper(typer1.context.make(vdef, sym)), WildcardType)
- }
- else typer1.typedType(tpt).tpe
-
- case TypeDef(_, _, tparams, rhs) =>
- createNamer(tree).typeDefSig(sym, tparams, rhs) //@M!
-
- case Import(expr, selectors) =>
- val expr1 = typer.typedQualifier(expr)
- typer checkStable expr1
- if (expr1.symbol != null && expr1.symbol.isRootPackage)
- RootImportError(tree)
-
- if (expr1.isErrorTyped)
- ErrorType
- else {
- val newImport = treeCopy.Import(tree, expr1, selectors).asInstanceOf[Import]
- checkSelectors(newImport)
- transformed(tree) = newImport
- // copy symbol and type attributes back into old expression
- // so that the structure builder will find it.
- expr.symbol = expr1.symbol
- expr.tpe = expr1.tpe
- ImportType(expr1)
- }
- }
+ case cdef: ClassDef =>
+ createNamer(tree).classSig(cdef)
+
+ case mdef: ModuleDef =>
+ createNamer(tree).moduleSig(mdef)
+
+ case ddef: DefDef =>
+ createNamer(tree).methodSig(ddef)
- val result =
- try getSig
- catch typeErrorHandler(tree, ErrorType)
+ case vdef: ValDef =>
+ createNamer(tree).valDefSig(vdef)
- result match {
- case PolyType(tparams @ (tp :: _), _) if tp.owner.isTerm => deskolemizeTypeParams(tparams)(result)
- case _ => result
+ case tdef: TypeDef =>
+ createNamer(tree).typeDefSig(tdef) //@M!
+
+ case imp: Import =>
+ importSig(imp)
}
+
+ try getSig
+ catch typeErrorHandler(tree, ErrorType)
}
def includeParent(tpe: Type, parent: Symbol): Type = tpe match {
@@ -1508,14 +1605,25 @@ trait Namers extends MethodSynthesis {
}
}
- /** A class representing a lazy type with known type parameters.
+ /**
+ * A class representing a lazy type with known type parameters. `ctx` is the namer context in which the
+ * `owner` is defined.
+ *
+ * Constructing a PolyTypeCompleter for a DefDef creates type skolems for the type parameters and
+ * assigns them to the `tparams` trees.
*/
- class PolyTypeCompleter(tparams: List[TypeDef], restp: TypeCompleter, owner: Tree, ctx: Context) extends LockingTypeCompleter with FlagAgnosticCompleter {
- private val ownerSym = owner.symbol
- override val typeParams = tparams map (_.symbol) //@M
- override val tree = restp.tree
+ class PolyTypeCompleter(tparams: List[TypeDef], restp: TypeCompleter, ctx: Context) extends LockingTypeCompleter with FlagAgnosticCompleter {
+ // @M. If `owner` is an abstract type member, `typeParams` are all NoSymbol (see comment in `completerOf`),
+ // otherwise, the non-skolemized (external) type parameter symbols
+ override val typeParams = tparams map (_.symbol)
+
+ /* The definition tree (poly ClassDef, poly DefDef or HK TypeDef) */
+ override val tree = restp.tree
+
+ private val defnSym = tree.symbol
- if (ownerSym.isTerm) {
+ if (defnSym.isTerm) {
+ // for polymorphic DefDefs, create type skolems and assign them to the tparam trees.
val skolems = deriveFreshSkolems(tparams map (_.symbol))
map2(tparams, skolems)(_ setSymbol _)
}
@@ -1523,8 +1631,8 @@ trait Namers extends MethodSynthesis {
def completeImpl(sym: Symbol) = {
// @M an abstract type's type parameters are entered.
// TODO: change to isTypeMember ?
- if (ownerSym.isAbstractType)
- newNamerFor(ctx, owner) enterSyms tparams //@M
+ if (defnSym.isAbstractType)
+ newNamerFor(ctx, tree) enterSyms tparams //@M
restp complete sym
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
index be218fcb02..2340c78f8c 100644
--- a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
@@ -268,26 +268,32 @@ trait NamesDefaults { self: Analyzer =>
*
* For by-name parameters, create a value
* x$n: () => T = () => arg
+ *
+ * For Ident(<unapply-selector>) arguments, no ValDef is created (SI-3353).
*/
- def argValDefs(args: List[Tree], paramTypes: List[Type], blockTyper: Typer): List[ValDef] = {
+ def argValDefs(args: List[Tree], paramTypes: List[Type], blockTyper: Typer): List[Option[ValDef]] = {
val context = blockTyper.context
- val symPs = map2(args, paramTypes)((arg, tpe) => {
- val byName = isByNameParamType(tpe)
- val repeated = isScalaRepeatedParamType(tpe)
- val argTpe = (
- if (repeated) arg match {
- case Typed(expr, Ident(tpnme.WILDCARD_STAR)) => expr.tpe
- case _ => seqType(arg.tpe)
- }
- else arg.tpe
- ).widen // have to widen or types inferred from literal defaults will be singletons
- val s = context.owner.newValue(unit.freshTermName("x$"), arg.pos) setInfo (
- if (byName) functionType(Nil, argTpe) else argTpe
- )
- (context.scope.enter(s), byName, repeated)
+ val symPs = map2(args, paramTypes)((arg, tpe) => arg match {
+ case Ident(nme.SELECTOR_DUMMY) =>
+ None // don't create a local ValDef if the argument is <unapply-selector>
+ case _ =>
+ val byName = isByNameParamType(tpe)
+ val repeated = isScalaRepeatedParamType(tpe)
+ val argTpe = (
+ if (repeated) arg match {
+ case Typed(expr, Ident(tpnme.WILDCARD_STAR)) => expr.tpe
+ case _ => seqType(arg.tpe)
+ }
+ else arg.tpe
+ ).widen // have to widen or types inferred from literal defaults will be singletons
+ val s = context.owner.newValue(unit.freshTermName("x$"), arg.pos) setInfo (
+ if (byName) functionType(Nil, argTpe) else argTpe
+ )
+ Some((context.scope.enter(s), byName, repeated))
})
map2(symPs, args) {
- case ((sym, byName, repeated), arg) =>
+ case (None, _) => None
+ case (Some((sym, byName, repeated)), arg) =>
val body =
if (byName) {
val res = blockTyper.typed(Function(List(), arg))
@@ -303,7 +309,7 @@ trait NamesDefaults { self: Analyzer =>
blockTyper.typed(Apply(factory, List(resetLocalAttrs(arg))))
} else arg
}
- atPos(body.pos)(ValDef(sym, body).setType(NoType))
+ Some(atPos(body.pos)(ValDef(sym, body).setType(NoType)))
}
}
@@ -329,27 +335,29 @@ trait NamesDefaults { self: Analyzer =>
// ValDef's in the block), change the arguments to these local values.
case Apply(expr, typedArgs) =>
// typedArgs: definition-site order
- val formals = formalTypes(expr.tpe.paramTypes, typedArgs.length, false, false)
+ val formals = formalTypes(expr.tpe.paramTypes, typedArgs.length, removeByName = false, removeRepeated = false)
// valDefs: call-site order
val valDefs = argValDefs(reorderArgsInv(typedArgs, argPos),
reorderArgsInv(formals, argPos),
blockTyper)
// refArgs: definition-site order again
- val refArgs = map2(reorderArgs(valDefs, argPos), formals)((vDef, tpe) => {
- val ref = gen.mkAttributedRef(vDef.symbol)
- atPos(vDef.pos.focus) {
- // for by-name parameters, the local value is a nullary function returning the argument
- tpe.typeSymbol match {
- case ByNameParamClass => Apply(ref, Nil)
- case RepeatedParamClass => Typed(ref, Ident(tpnme.WILDCARD_STAR))
- case _ => ref
+ val refArgs = map3(reorderArgs(valDefs, argPos), formals, typedArgs)((vDefOpt, tpe, origArg) => vDefOpt match {
+ case None => origArg
+ case Some(vDef) =>
+ val ref = gen.mkAttributedRef(vDef.symbol)
+ atPos(vDef.pos.focus) {
+ // for by-name parameters, the local value is a nullary function returning the argument
+ tpe.typeSymbol match {
+ case ByNameParamClass => Apply(ref, Nil)
+ case RepeatedParamClass => Typed(ref, Ident(tpnme.WILDCARD_STAR))
+ case _ => ref
+ }
}
- }
})
// cannot call blockTyper.typedBlock here, because the method expr might be partially applied only
val res = blockTyper.doTypedApply(tree, expr, refArgs, mode, pt)
res.setPos(res.pos.makeTransparent)
- val block = Block(stats ::: valDefs, res).setType(res.tpe).setPos(tree.pos.makeTransparent)
+ val block = Block(stats ::: valDefs.flatten, res).setType(res.tpe).setPos(tree.pos.makeTransparent)
context.namedApplyBlockInfo =
Some((block, NamedApplyInfo(qual, targs, vargss :+ refArgs, blockTyper)))
block
diff --git a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala
index 69bbab6e42..4b53802d95 100644
--- a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala
@@ -409,15 +409,9 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// example check: List[Int] <:< ::[Int]
// TODO: extractor.paramType may contain unbound type params (run/t2800, run/t3530)
- val (typeTestTreeMaker, patBinderOrCasted) =
- if (needsTypeTest(patBinder.info.widen, extractor.paramType)) {
- // chain a type-testing extractor before the actual extractor call
- // it tests the type, checks the outer pointer and casts to the expected type
- // TODO: the outer check is mandated by the spec for case classes, but we do it for user-defined unapplies as well [SPEC]
- // (the prefix of the argument passed to the unapply must equal the prefix of the type of the binder)
- val treeMaker = TypeTestTreeMaker(patBinder, patBinder, extractor.paramType, extractor.paramType)(pos, extractorArgTypeTest = true)
- (List(treeMaker), treeMaker.nextBinder)
- } else {
+ // `patBinderOrCasted` is assigned the result of casting `patBinder` to `extractor.paramType`
+ val (typeTestTreeMaker, patBinderOrCasted, binderKnownNonNull) =
+ if (patBinder.info.widen <:< extractor.paramType) {
// no type test needed, but the tree maker relies on `patBinderOrCasted` having type `extractor.paramType` (and not just some type compatible with it)
// SI-6624 shows this is necessary because apparently patBinder may have an unfortunate type (.decls don't have the case field accessors)
// TODO: get to the bottom of this -- I assume it happens when type checking infers a weird type for an unapply call
@@ -426,10 +420,21 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
if (settings.developer.value && !(patBinder.info =:= extractor.paramType))
devWarning(s"resetting info of $patBinder: ${patBinder.info} to ${extractor.paramType}")
*/
- (Nil, patBinder setInfo extractor.paramType)
+ (Nil, patBinder setInfo extractor.paramType, false)
+ } else {
+ // chain a type-testing extractor before the actual extractor call
+ // it tests the type, checks the outer pointer and casts to the expected type
+ // TODO: the outer check is mandated by the spec for case classes, but we do it for user-defined unapplies as well [SPEC]
+ // (the prefix of the argument passed to the unapply must equal the prefix of the type of the binder)
+ val treeMaker = TypeTestTreeMaker(patBinder, patBinder, extractor.paramType, extractor.paramType)(pos, extractorArgTypeTest = true)
+
+ // check whether typetest implies patBinder is not null,
+ // even though the eventual null check will be on patBinderOrCasted
+ // it'll be equal to patBinder casted to extractor.paramType anyway (and the type test is on patBinder)
+ (List(treeMaker), treeMaker.nextBinder, treeMaker.impliesBinderNonNull(patBinder))
}
- withSubPats(typeTestTreeMaker :+ extractor.treeMaker(patBinderOrCasted, pos), extractor.subBindersAndPatterns: _*)
+ withSubPats(typeTestTreeMaker :+ extractor.treeMaker(patBinderOrCasted, binderKnownNonNull, pos), extractor.subBindersAndPatterns: _*)
}
@@ -622,8 +627,13 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// to which type should the previous binder be casted?
def paramType : Type
- // binder has been casted to paramType if necessary
- def treeMaker(binder: Symbol, pos: Position): TreeMaker
+ /** Create the TreeMaker that embodies this extractor call
+ *
+ * `binder` has been casted to `paramType` if necessary
+ * `binderKnownNonNull` indicates whether the cast implies `binder` cannot be null
+ * when `binderKnownNonNull` is `true`, `ProductExtractorTreeMaker` does not do a (redundant) null check on binder
+ */
+ def treeMaker(binder: Symbol, binderKnownNonNull: Boolean, pos: Position): TreeMaker
// `subPatBinders` are the variables bound by this pattern in the following patterns
// subPatBinders are replaced by references to the relevant part of the extractor's result (tuple component, seq element, the result as-is)
@@ -637,6 +647,11 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
case bp => bp
}
+ // never store these in local variables (for PreserveSubPatBinders)
+ lazy val ignoredSubPatBinders = (subPatBinders zip args).collect{
+ case (b, PatternBoundToUnderscore()) => b
+ }.toSet
+
def subPatTypes: List[Type] =
if(isSeq) {
val TypeRef(pre, SeqClass, args) = seqTp
@@ -731,41 +746,31 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
def isSeq: Boolean = rawSubPatTypes.nonEmpty && isRepeatedParamType(rawSubPatTypes.last)
protected def rawSubPatTypes = constructorTp.paramTypes
- // binder has type paramType
- def treeMaker(binder: Symbol, pos: Position): TreeMaker = {
+ /** Create the TreeMaker that embodies this extractor call
+ *
+ * `binder` has been casted to `paramType` if necessary
+ * `binderKnownNonNull` indicates whether the cast implies `binder` cannot be null
+ * when `binderKnownNonNull` is `true`, `ProductExtractorTreeMaker` does not do a (redundant) null check on binder
+ */
+ def treeMaker(binder: Symbol, binderKnownNonNull: Boolean, pos: Position): TreeMaker = {
val paramAccessors = binder.constrParamAccessors
// binders corresponding to mutable fields should be stored (SI-5158, SI-6070)
+ // make an exception for classes under the scala package as they should be well-behaved,
+ // to optimize matching on List
val mutableBinders =
- if (paramAccessors exists (_.isMutable))
+ if (!binder.info.typeSymbol.hasTransOwner(ScalaPackageClass) &&
+ (paramAccessors exists (_.isMutable)))
subPatBinders.zipWithIndex.collect{ case (binder, idx) if paramAccessors(idx).isMutable => binder }
else Nil
// checks binder ne null before chaining to the next extractor
- ProductExtractorTreeMaker(binder, lengthGuard(binder))(subPatBinders, subPatRefs(binder), mutableBinders)
+ ProductExtractorTreeMaker(binder, lengthGuard(binder))(subPatBinders, subPatRefs(binder), mutableBinders, binderKnownNonNull, ignoredSubPatBinders)
}
// reference the (i-1)th case accessor if it exists, otherwise the (i-1)th tuple component
override protected def tupleSel(binder: Symbol)(i: Int): Tree = { import CODE._
- // caseFieldAccessors is messed up after typers (reversed, names mangled for non-public fields)
- // TODO: figure out why...
val accessors = binder.caseFieldAccessors
- // luckily, the constrParamAccessors are still sorted properly, so sort the field-accessors using them
- // (need to undo name-mangling, including the sneaky trailing whitespace)
- val constrParamAccessors = binder.constrParamAccessors
-
- def indexInCPA(acc: Symbol) =
- constrParamAccessors indexWhere { orig =>
- // patmatDebug("compare: "+ (orig, acc, orig.name, acc.name, (acc.name == orig.name), (acc.name startsWith (orig.name append "$"))))
- val origName = orig.name.toString.trim
- val accName = acc.name.toString.trim
- (accName == origName) || (accName startsWith (origName + "$"))
- }
-
- // patmatDebug("caseFieldAccessors: "+ (accessors, binder.caseFieldAccessors map indexInCPA))
- // patmatDebug("constrParamAccessors: "+ constrParamAccessors)
-
- val accessorsSorted = accessors sortBy indexInCPA
- if (accessorsSorted isDefinedAt (i-1)) REF(binder) DOT accessorsSorted(i-1)
+ if (accessors isDefinedAt (i-1)) REF(binder) DOT accessors(i-1)
else codegen.tupleSel(binder)(i) // this won't type check for case classes, as they do not inherit ProductN
}
@@ -781,11 +786,21 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
def resultType = tpe.finalResultType
def isSeq = extractorCall.symbol.name == nme.unapplySeq
- def treeMaker(patBinderOrCasted: Symbol, pos: Position): TreeMaker = {
+ /** Create the TreeMaker that embodies this extractor call
+ *
+ * `binder` has been casted to `paramType` if necessary
+ * `binderKnownNonNull` is not used in this subclass
+ *
+ * TODO: implement review feedback by @retronym:
+ * Passing the pair of values around suggests:
+ * case class Binder(sym: Symbol, knownNotNull: Boolean).
+ * Perhaps it hasn't reached critical mass, but it would already clean things up a touch.
+ */
+ def treeMaker(patBinderOrCasted: Symbol, binderKnownNonNull: Boolean, pos: Position): TreeMaker = {
// the extractor call (applied to the binder bound by the flatMap corresponding to the previous (i.e., enclosing/outer) pattern)
val extractorApply = atPos(pos)(spliceApply(patBinderOrCasted))
val binder = freshSym(pos, pureType(resultInMonad)) // can't simplify this when subPatBinders.isEmpty, since UnitClass.tpe is definitely wrong when isSeq, and resultInMonad should always be correct since it comes directly from the extractor's result type
- ExtractorTreeMaker(extractorApply, lengthGuard(binder), binder)(subPatBinders, subPatRefs(binder), resultType.typeSymbol == BooleanClass, checkedLength, patBinderOrCasted)
+ ExtractorTreeMaker(extractorApply, lengthGuard(binder), binder)(subPatBinders, subPatRefs(binder), resultType.typeSymbol == BooleanClass, checkedLength, patBinderOrCasted, ignoredSubPatBinders)
}
override protected def seqTree(binder: Symbol): Tree =
@@ -842,6 +857,16 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
}
}
+ object PatternBoundToUnderscore {
+ def unapply(pat: Tree): Boolean = pat match {
+ case Bind(nme.WILDCARD, _) => true // don't skip when binding an interesting symbol!
+ case Ident(nme.WILDCARD) => true
+ case Alternative(ps) => ps forall (PatternBoundToUnderscore.unapply(_))
+ case Typed(PatternBoundToUnderscore(), _) => true
+ case _ => false
+ }
+ }
+
object Bound {
def unapply(t: Tree): Option[(Symbol, Tree)] = t match {
case t@Bind(n, p) if (t.symbol ne null) && (t.symbol ne NoSymbol) => // pos/t2429 does not satisfy these conditions
@@ -1009,10 +1034,17 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
trait PreserveSubPatBinders extends TreeMaker {
val subPatBinders: List[Symbol]
val subPatRefs: List[Tree]
+ val ignoredSubPatBinders: Set[Symbol]
// unless `debugInfoEmitVars`, this set should contain the bare minimum for correctness
// mutable case class fields need to be stored regardless (SI-5158, SI-6070) -- see override in ProductExtractorTreeMaker
- def storedBinders: Set[Symbol] = if (debugInfoEmitVars) subPatBinders.toSet else Set.empty
+ // sub patterns bound to wildcard (_) are never stored as they can't be referenced
+ // dirty debuggers will have to get dirty to see the wildcards
+ lazy val storedBinders: Set[Symbol] =
+ (if (debugInfoEmitVars) subPatBinders.toSet else Set.empty) ++ extraStoredBinders -- ignoredSubPatBinders
+
+ // e.g., mutable fields of a case class in ProductExtractorTreeMaker
+ def extraStoredBinders: Set[Symbol]
def emitVars = storedBinders.nonEmpty
@@ -1033,10 +1065,22 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
Substitution(subPatBinders, subPatRefs) >> super.subPatternsAsSubstitution
import CODE._
- def bindSubPats(in: Tree): Tree = if (!emitVars) in
+ def bindSubPats(in: Tree): Tree =
+ if (!emitVars) in
else {
- val (subPatBindersStored, subPatRefsStored) = stored.unzip
- Block(map2(subPatBindersStored.toList, subPatRefsStored.toList)(VAL(_) === _), in)
+ // binders in `subPatBindersStored` that are referenced by tree `in`
+ val usedBinders = new collection.mutable.HashSet[Symbol]()
+ // all potentially stored subpat binders
+ val potentiallyStoredBinders = stored.unzip._1.toSet
+ // compute intersection of all symbols in the tree `in` and all potentially stored subpat binders
+ in.foreach(t => if (potentiallyStoredBinders(t.symbol)) usedBinders += t.symbol)
+
+ if (usedBinders.isEmpty) in
+ else {
+ // only store binders actually used
+ val (subPatBindersStored, subPatRefsStored) = stored.filter{case (b, _) => usedBinders(b)}.unzip
+ Block(map2(subPatBindersStored.toList, subPatRefsStored.toList)(VAL(_) === _), in)
+ }
}
}
@@ -1056,7 +1100,11 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
val subPatRefs: List[Tree],
extractorReturnsBoolean: Boolean,
val checkedLength: Option[Int],
- val prevBinder: Symbol) extends FunTreeMaker with PreserveSubPatBinders {
+ val prevBinder: Symbol,
+ val ignoredSubPatBinders: Set[Symbol]
+ ) extends FunTreeMaker with PreserveSubPatBinders {
+
+ def extraStoredBinders: Set[Symbol] = Set()
def chainBefore(next: Tree)(casegen: Casegen): Tree = {
val condAndNext = extraCond match {
@@ -1099,27 +1147,35 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
case class ProductExtractorTreeMaker(prevBinder: Symbol, extraCond: Option[Tree])(
val subPatBinders: List[Symbol],
val subPatRefs: List[Tree],
- val mutableBinders: List[Symbol]) extends FunTreeMaker with PreserveSubPatBinders {
+ val mutableBinders: List[Symbol],
+ binderKnownNonNull: Boolean,
+ val ignoredSubPatBinders: Set[Symbol]
+ ) extends FunTreeMaker with PreserveSubPatBinders {
import CODE._
val nextBinder = prevBinder // just passing through
// mutable binders must be stored to avoid unsoundness or seeing mutation of fields after matching (SI-5158, SI-6070)
- // (the implementation could be optimized by duplicating code from `super.storedBinders`, but this seems more elegant)
- override def storedBinders: Set[Symbol] = super.storedBinders ++ mutableBinders.toSet
+ def extraStoredBinders: Set[Symbol] = mutableBinders.toSet
def chainBefore(next: Tree)(casegen: Casegen): Tree = {
val nullCheck = REF(prevBinder) OBJ_NE NULL
- val cond = extraCond map (nullCheck AND _) getOrElse nullCheck
- casegen.ifThenElseZero(cond, bindSubPats(substitution(next)))
+ val cond =
+ if (binderKnownNonNull) extraCond
+ else (extraCond map (nullCheck AND _)
+ orElse Some(nullCheck))
+
+ cond match {
+ case Some(cond) =>
+ casegen.ifThenElseZero(cond, bindSubPats(substitution(next)))
+ case _ =>
+ bindSubPats(substitution(next))
+ }
}
override def toString = "P"+(prevBinder.name, extraCond getOrElse "", localSubstitution)
}
- // typetag-based tests are inserted by the type checker
- def needsTypeTest(tp: Type, pt: Type): Boolean = !(tp <:< pt)
-
object TypeTestTreeMaker {
// factored out so that we can consistently generate other representations of the tree that implements the test
// (e.g. propositions for exhaustivity and friends, boolean for isPureTypeTest)
@@ -1133,12 +1189,14 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
def equalsTest(pat: Tree, testedBinder: Symbol): Result
def eqTest(pat: Tree, testedBinder: Symbol): Result
def and(a: Result, b: Result): Result
+ def tru: Result
}
object treeCondStrategy extends TypeTestCondStrategy { import CODE._
type Result = Tree
def and(a: Result, b: Result): Result = a AND b
+ def tru = TRUE_typed
def typeTest(testedBinder: Symbol, expectedTp: Type) = codegen._isInstanceOf(testedBinder, expectedTp)
def nonNullTest(testedBinder: Symbol) = REF(testedBinder) OBJ_NE NULL
def equalsTest(pat: Tree, testedBinder: Symbol) = codegen._equals(pat, testedBinder)
@@ -1169,6 +1227,19 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
def equalsTest(pat: Tree, testedBinder: Symbol): Result = false
def eqTest(pat: Tree, testedBinder: Symbol): Result = false
def and(a: Result, b: Result): Result = false // we don't and type tests, so the conjunction must include at least one false
+ def tru = true
+ }
+
+ def nonNullImpliedByTestChecker(binder: Symbol) = new TypeTestCondStrategy {
+ type Result = Boolean
+
+ def typeTest(testedBinder: Symbol, expectedTp: Type): Result = testedBinder eq binder
+ def outerTest(testedBinder: Symbol, expectedTp: Type): Result = false
+ def nonNullTest(testedBinder: Symbol): Result = testedBinder eq binder
+ def equalsTest(pat: Tree, testedBinder: Symbol): Result = false // could in principle analyse pat and see if it's statically known to be non-null
+ def eqTest(pat: Tree, testedBinder: Symbol): Result = false // could in principle analyse pat and see if it's statically known to be non-null
+ def and(a: Result, b: Result): Result = a || b
+ def tru = false
}
}
@@ -1238,10 +1309,16 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// I think it's okay:
// - the isInstanceOf test includes a test for the element type
// - Scala's arrays are invariant (so we don't drop type tests unsoundly)
- case _ if (expectedTp <:< AnyRefClass.tpe) && !needsTypeTest(testedBinder.info.widen, expectedTp) =>
- // do non-null check first to ensure we won't select outer on null
- if (outerTestNeeded) and(nonNullTest(testedBinder), outerTest(testedBinder, expectedTp))
- else nonNullTest(testedBinder)
+ case _ if testedBinder.info.widen <:< expectedTp =>
+ // if the expected type is a primitive value type, it cannot be null and it cannot have an outer pointer
+ // since the types conform, no further checking is required
+ if (expectedTp.typeSymbol.isPrimitiveValueClass) tru
+ // have to test outer and non-null only when it's a reference type
+ else if (expectedTp <:< AnyRefClass.tpe) {
+ // do non-null check first to ensure we won't select outer on null
+ if (outerTestNeeded) and(nonNullTest(testedBinder), outerTest(testedBinder, expectedTp))
+ else nonNullTest(testedBinder)
+ } else default
case _ => default
}
@@ -1253,6 +1330,8 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// is this purely a type test, e.g. no outer check, no equality tests (used in switch emission)
def isPureTypeTest = renderCondition(pureTypeTestChecker)
+ def impliesBinderNonNull(binder: Symbol) = renderCondition(nonNullImpliedByTestChecker(binder))
+
override def toString = "TT"+(expectedTp, testedBinder.name, nextBinderTp)
}
@@ -1751,6 +1830,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
def nonNullTest(testedBinder: Symbol) = NonNullCond(binderToUniqueTree(testedBinder))
def equalsTest(pat: Tree, testedBinder: Symbol) = EqualityCond(binderToUniqueTree(testedBinder), unique(pat))
def eqTest(pat: Tree, testedBinder: Symbol) = EqualityCond(binderToUniqueTree(testedBinder), unique(pat)) // TODO: eq, not ==
+ def tru = TrueCond
}
ttm.renderCondition(condStrategy)
case EqualityTestTreeMaker(prevBinder, patTree, _) => EqualityCond(binderToUniqueTree(prevBinder), unique(patTree))
@@ -1897,17 +1977,24 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
case object False extends Prop
// symbols are propositions
- case class Sym(val variable: Var, val const: Const) extends Prop {
- private[this] val id = nextSymId
+ abstract case class Sym(val variable: Var, val const: Const) extends Prop {
+ private[this] val id = Sym.nextSymId
+
override def toString = variable +"="+ const +"#"+ id
}
- private def nextSymId = {_symId += 1; _symId}; private var _symId = 0
-
+ class UniqueSym(variable: Var, const: Const) extends Sym(variable, const)
+ object Sym {
+ private val uniques: util.HashSet[Sym] = new util.HashSet("uniques", 512)
+ def apply(variable: Var, const: Const): Sym = {
+ val newSym = new UniqueSym(variable, const)
+ (uniques findEntryOrUpdate newSym)
+ }
+ private def nextSymId = {_symId += 1; _symId}; private var _symId = 0
+ }
def /\(props: Iterable[Prop]) = if (props.isEmpty) True else props.reduceLeft(And(_, _))
def \/(props: Iterable[Prop]) = if (props.isEmpty) False else props.reduceLeft(Or(_, _))
-
trait PropTraverser {
def apply(x: Prop): Unit = x match {
case And(a, b) => apply(a); apply(b)
@@ -2063,6 +2150,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
import scala.collection.mutable.ArrayBuffer
type FormulaBuilder = ArrayBuffer[Clause]
def formulaBuilder = ArrayBuffer[Clause]()
+ def formulaBuilderSized(init: Int) = new ArrayBuffer[Clause](init)
def addFormula(buff: FormulaBuilder, f: Formula): Unit = buff ++= f
def toFormula(buff: FormulaBuilder): Formula = buff
@@ -2167,7 +2255,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
class Lit(val sym: Sym, val pos: Boolean) {
override def toString = if (!pos) "-"+ sym.toString else sym.toString
override def equals(o: Any) = o match {
- case o: Lit => (o.sym == sym) && (o.pos == pos)
+ case o: Lit => (o.sym eq sym) && (o.pos == pos)
case _ => false
}
override def hashCode = sym.hashCode + pos.hashCode
@@ -2216,13 +2304,18 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
}
private def withLit(res: Model, l: Lit): Model = if (res eq NoModel) NoModel else res + (l.sym -> l.pos)
- private def dropUnit(f: Formula, unitLit: Lit) = {
+ private def dropUnit(f: Formula, unitLit: Lit): Formula = {
val negated = -unitLit
// drop entire clauses that are trivially true
// (i.e., disjunctions that contain the literal we're making true in the returned model),
// and simplify clauses by dropping the negation of the literal we're making true
// (since False \/ X == X)
- f.filterNot(_.contains(unitLit)).map(_ - negated)
+ val dropped = formulaBuilderSized(f.size)
+ for {
+ clause <- f
+ if !(clause contains unitLit)
+ } dropped += (clause - negated)
+ dropped
}
def findModelFor(f: Formula): Model = {
@@ -3699,11 +3792,17 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// nextBinder: T
// next == MatchMonad[U]
// returns MatchMonad[U]
- def flatMapCond(cond: Tree, res: Tree, nextBinder: Symbol, next: Tree): Tree =
- ifThenElseZero(cond, BLOCK(
- VAL(nextBinder) === res,
- next
- ))
+ def flatMapCond(cond: Tree, res: Tree, nextBinder: Symbol, next: Tree): Tree = {
+ val rest =
+ // only emit a local val for `nextBinder` if it's actually referenced in `next`
+ if (next.exists(_.symbol eq nextBinder))
+ BLOCK(
+ VAL(nextBinder) === res,
+ next
+ )
+ else next
+ ifThenElseZero(cond, rest)
+ }
// guardTree: Boolean
// next: MatchMonad[T]
diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
index 969bb8aceb..b9fdd7280e 100644
--- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
@@ -11,6 +11,9 @@ import scala.collection.{ mutable, immutable }
import transform.InfoTransform
import scala.collection.mutable.ListBuffer
import scala.language.postfixOps
+import scala.tools.nsc.settings.ScalaVersion
+import scala.tools.nsc.settings.AnyScalaVersion
+import scala.tools.nsc.settings.NoScalaVersion
/** <p>
* Post-attribution checking and transformation.
@@ -60,23 +63,8 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
super.transformInfo(sym, tp)
}
- val toJavaRepeatedParam = new TypeMap {
- def apply(tp: Type) = tp match {
- case TypeRef(pre, RepeatedParamClass, args) =>
- typeRef(pre, JavaRepeatedParamClass, args)
- case _ =>
- mapOver(tp)
- }
- }
-
- val toScalaRepeatedParam = new TypeMap {
- def apply(tp: Type): Type = tp match {
- case TypeRef(pre, JavaRepeatedParamClass, args) =>
- typeRef(pre, RepeatedParamClass, args)
- case _ =>
- mapOver(tp)
- }
- }
+ val toJavaRepeatedParam = new SubstSymMap(RepeatedParamClass -> JavaRepeatedParamClass)
+ val toScalaRepeatedParam = new SubstSymMap(JavaRepeatedParamClass -> RepeatedParamClass)
def accessFlagsToString(sym: Symbol) = flagsToString(
sym getFlag (PRIVATE | PROTECTED),
@@ -156,27 +144,22 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
// Override checking ------------------------------------------------------------
- def isJavaVarargsAncestor(clazz: Symbol) = (
- clazz.isClass
- && clazz.isJavaDefined
- && (clazz.info.nonPrivateDecls exists isJavaVarArgsMethod)
- )
-
/** Add bridges for vararg methods that extend Java vararg methods
*/
def addVarargBridges(clazz: Symbol): List[Tree] = {
// This is quite expensive, so attempt to skip it completely.
// Insist there at least be a java-defined ancestor which
// defines a varargs method. TODO: Find a cheaper way to exclude.
- if (clazz.thisType.baseClasses exists isJavaVarargsAncestor) {
+ if (inheritsJavaVarArgsMethod(clazz)) {
log("Found java varargs ancestor in " + clazz.fullLocationString + ".")
val self = clazz.thisType
val bridges = new ListBuffer[Tree]
def varargBridge(member: Symbol, bridgetpe: Type): Tree = {
- log("Generating varargs bridge for " + member.fullLocationString + " of type " + bridgetpe)
+ log(s"Generating varargs bridge for ${member.fullLocationString} of type $bridgetpe")
- val bridge = member.cloneSymbolImpl(clazz, member.flags | VBRIDGE) setPos clazz.pos
+ val newFlags = (member.flags | VBRIDGE | ARTIFACT) & ~PRIVATE
+ val bridge = member.cloneSymbolImpl(clazz, newFlags) setPos clazz.pos
bridge.setInfo(bridgetpe.cloneInfo(bridge))
clazz.info.decls enter bridge
@@ -189,26 +172,35 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
localTyper typed DefDef(bridge, body)
}
- // For all concrete non-private members that have a (Scala) repeated parameter:
- // compute the corresponding method type `jtpe` with a Java repeated parameter
+ // For all concrete non-private members (but: see below) that have a (Scala) repeated
+ // parameter: compute the corresponding method type `jtpe` with a Java repeated parameter
// if a method with type `jtpe` exists and that method is not a varargs bridge
// then create a varargs bridge of type `jtpe` that forwards to the
// member method with the Scala vararg type.
- for (member <- clazz.info.nonPrivateMembers) {
+ //
+ // @PP: Can't call nonPrivateMembers because we will miss refinement members,
+ // which have been marked private. See SI-4729.
+ for (member <- nonTrivialMembers(clazz)) {
+ log(s"Considering $member for java varargs bridge in $clazz")
if (!member.isDeferred && member.isMethod && hasRepeatedParam(member.info)) {
val inherited = clazz.info.nonPrivateMemberAdmitting(member.name, VBRIDGE)
+
// Delaying calling memberType as long as possible
if (inherited ne NoSymbol) {
- val jtpe = toJavaRepeatedParam(self.memberType(member))
+ val jtpe = toJavaRepeatedParam(self memberType member)
// this is a bit tortuous: we look for non-private members or bridges
// if we find a bridge everything is OK. If we find another member,
// we need to create a bridge
- if (inherited filter (sym => (self.memberType(sym) matches jtpe) && !(sym hasFlag VBRIDGE)) exists)
+ val inherited1 = inherited filter (sym => !(sym hasFlag VBRIDGE) && (self memberType sym matches jtpe))
+ if (inherited1.exists)
bridges += varargBridge(member, jtpe)
}
}
}
+ if (bridges.size > 0)
+ log(s"Adding ${bridges.size} bridges for methods extending java varargs.")
+
bridges.toList
}
else Nil
@@ -905,13 +897,15 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
* the type occurs itself at variance position given by `variance`
*/
def validateVariance(tp: Type, variance: Int): Unit = tp match {
- case ErrorType => ;
- case WildcardType => ;
- case NoType => ;
- case NoPrefix => ;
- case ThisType(_) => ;
- case ConstantType(_) => ;
- // case DeBruijnIndex(_, _) => ;
+ case ErrorType =>
+ case WildcardType =>
+ case BoundedWildcardType(bounds) =>
+ validateVariance(bounds, variance)
+ case NoType =>
+ case NoPrefix =>
+ case ThisType(_) =>
+ case ConstantType(_) =>
+ // case DeBruijnIndex(_, _) =>
case SingleType(pre, sym) =>
validateVariance(pre, variance)
case TypeRef(pre, sym, args) =>
@@ -1378,10 +1372,18 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
* indicating it has changed semantics between versions.
*/
private def checkMigration(sym: Symbol, pos: Position) = {
- if (sym.hasMigrationAnnotation)
- unit.warning(pos, "%s has changed semantics in version %s:\n%s".format(
- sym.fullLocationString, sym.migrationVersion.get, sym.migrationMessage.get)
- )
+ if (sym.hasMigrationAnnotation) {
+ val changed = try
+ settings.Xmigration.value < ScalaVersion(sym.migrationVersion.get)
+ catch {
+ case e : NumberFormatException =>
+ unit.warning(pos, s"${sym.fullLocationString} has an unparsable version number: ${e.getMessage()}")
+ // if we can't parse the format on the migration annotation just conservatively assume it changed
+ true
+ }
+ if (changed)
+ unit.warning(pos, s"${sym.fullLocationString} has changed semantics in version ${sym.migrationVersion.get}:\n${sym.migrationMessage.get}")
+ }
}
private def checkCompileTimeOnly(sym: Symbol, pos: Position) = {
@@ -1473,8 +1475,11 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
}
private def isRepeatedParamArg(tree: Tree) = currentApplication match {
case Apply(fn, args) =>
- !args.isEmpty && (args.last eq tree) &&
- fn.tpe.params.length == args.length && isRepeatedParamType(fn.tpe.params.last.tpe)
+ ( args.nonEmpty
+ && (args.last eq tree)
+ && (fn.tpe.params.length == args.length)
+ && isRepeatedParamType(fn.tpe.params.last.tpe)
+ )
case _ =>
false
}
@@ -1587,7 +1592,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
* arbitrarily choose one as more important than the other.
*/
checkDeprecated(sym, tree.pos)
- if (settings.Xmigration28.value)
+ if(settings.Xmigration.value != NoScalaVersion)
checkMigration(sym, tree.pos)
checkCompileTimeOnly(sym, tree.pos)
@@ -1686,8 +1691,6 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
val bridges = addVarargBridges(currentOwner)
checkAllOverrides(currentOwner)
checkAnyValSubclass(currentOwner)
- if (currentOwner.isDerivedValueClass)
- currentOwner.primaryConstructor makeNotPrivate NoSymbol // SI-6601, must be done *after* pickler!
if (bridges.nonEmpty) deriveTemplate(tree)(_ ::: bridges) else tree
case dc@TypeTreeWithDeferredRefCheck() => abort("adapt should have turned dc: TypeTreeWithDeferredRefCheck into tpt: TypeTree, with tpt.original == dc")
diff --git a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
index a907ab6c66..39f6f764e7 100644
--- a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
@@ -78,14 +78,7 @@ trait SyntheticMethods extends ast.TreeDSL {
else templ
}
- val originalAccessors = clazz.caseFieldAccessors
- // private ones will have been renamed -- make sure they are entered
- // in the original order.
- def accessors = clazz.caseFieldAccessors sortBy { acc =>
- originalAccessors indexWhere { orig =>
- (acc.name == orig.name) || (acc.name startsWith (orig.name append "$"))
- }
- }
+ def accessors = clazz.caseFieldAccessors
val arity = accessors.size
// If this is ProductN[T1, T2, ...], accessorLub is the lub of T1, T2, ..., .
// !!! Hidden behind -Xexperimental due to bummer type inference bugs.
diff --git a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala
index 48a5a36b00..c5c3c560ea 100644
--- a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala
@@ -117,7 +117,8 @@ abstract class TreeCheckers extends Analyzer {
try p.source.path + ":" + p.line
catch { case _: UnsupportedOperationException => p.toString }
- def errorFn(msg: Any): Unit = println("[check: %s] %s".format(phase.prev, msg))
+ private var hasError: Boolean = false
+ def errorFn(msg: Any): Unit = {hasError = true; println("[check: %s] %s".format(phase.prev, msg))}
def errorFn(pos: Position, msg: Any): Unit = errorFn(posstr(pos) + ": " + msg)
def informFn(msg: Any) {
if (settings.verbose.value || settings.debug.value)
@@ -151,6 +152,7 @@ abstract class TreeCheckers extends Analyzer {
result
}
def runWithUnit[T](unit: CompilationUnit)(body: => Unit): Unit = {
+ hasError = false
val unit0 = currentUnit
currentRun.currentUnit = unit
body
@@ -169,6 +171,7 @@ abstract class TreeCheckers extends Analyzer {
checker.precheck.traverse(unit.body)
checker.typed(unit.body)
checker.postcheck.traverse(unit.body)
+ if (hasError) unit.warning(NoPosition, "TreeCheckers detected non-compliant trees in " + unit)
}
}
@@ -217,8 +220,11 @@ abstract class TreeCheckers extends Analyzer {
case _ => ()
}
- object precheck extends Traverser {
+ object precheck extends TreeStackTraverser {
override def traverse(tree: Tree) {
+ checkSymbolRefsRespectScope(tree)
+ checkReturnReferencesDirectlyEnclosingDef(tree)
+
val sym = tree.symbol
def accessed = sym.accessed
def fail(msg: String) = errorFn(tree.pos, msg + classstr(tree) + " / " + tree)
@@ -289,6 +295,41 @@ abstract class TreeCheckers extends Analyzer {
}
super.traverse(tree)
}
+
+ private def checkSymbolRefsRespectScope(tree: Tree) {
+ def symbolOf(t: Tree): Symbol = Option(tree.symbol).getOrElse(NoSymbol)
+ def definedSymbolOf(t: Tree): Symbol = if (t.isDef) symbolOf(t) else NoSymbol
+ val info = Option(symbolOf(tree).info).getOrElse(NoType)
+ val referencedSymbols: List[Symbol] = {
+ val directRef = tree match {
+ case _: RefTree => symbolOf(tree).toOption
+ case _ => None
+ }
+ def referencedSyms(tp: Type) = (tp collect {
+ case TypeRef(_, sym, _) => sym
+ }).toList
+ val indirectRefs = referencedSyms(info)
+ (indirectRefs ++ directRef).distinct
+ }
+ for {
+ sym <- referencedSymbols
+ if (sym.isTypeParameter || sym.isLocal) && !(tree.symbol hasTransOwner sym.owner)
+ } errorFn(s"The symbol, tpe or info of tree `(${tree}) : ${info}` refers to a out-of-scope symbol, ${sym.fullLocationString}. tree.symbol.ownerChain: ${tree.symbol.ownerChain.mkString(", ")}")
+ }
+
+ private def checkReturnReferencesDirectlyEnclosingDef(tree: Tree) {
+ tree match {
+ case _: Return =>
+ path.collectFirst {
+ case dd: DefDef => dd
+ } match {
+ case None => errorFn(s"Return node ($tree) must be enclosed in a DefDef")
+ case Some(dd) =>
+ if (tree.symbol != dd.symbol) errorFn(s"Return symbol (${tree.symbol}} does not reference directly enclosing DefDef (${dd.symbol})")
+ }
+ case _ =>
+ }
+ }
}
object postcheck extends Traverser {
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index 553583e6b7..026c130a87 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -453,12 +453,12 @@ trait Typers extends Modes with Adaptations with Tags {
def reenterValueParams(vparamss: List[List[ValDef]]) {
for (vparams <- vparamss)
for (vparam <- vparams)
- vparam.symbol = context.scope enter vparam.symbol
+ context.scope enter vparam.symbol
}
def reenterTypeParams(tparams: List[TypeDef]): List[Symbol] =
for (tparam <- tparams) yield {
- tparam.symbol = context.scope enter tparam.symbol
+ context.scope enter tparam.symbol
tparam.symbol.deSkolemize
}
@@ -872,7 +872,9 @@ trait Typers extends Modes with Adaptations with Tags {
case _ =>
debuglog("fallback on implicits: " + tree + "/" + resetAllAttrs(original))
val tree1 = typed(resetAllAttrs(original), mode, WildcardType)
- tree1.tpe = addAnnotations(tree1, tree1.tpe)
+ // Q: `typed` already calls `pluginsTyped` and `adapt`. the only difference here is that
+ // we pass `EmptyTree` as the `original`. intended? added in 2009 (53d98e7d42) by martin.
+ tree1.tpe = pluginsTyped(tree1.tpe, this, tree1, mode, pt)
if (tree1.isEmpty) tree1 else adapt(tree1, mode, pt, EmptyTree)
}
else
@@ -1052,15 +1054,21 @@ trait Typers extends Modes with Adaptations with Tags {
def insertApply(): Tree = {
assert(!inHKMode(mode), modeString(mode)) //@M
- val qual = adaptToName(tree, nme.apply) match {
- case id @ Ident(_) =>
- val pre = if (id.symbol.owner.isPackageClass) id.symbol.owner.thisType
- else if (id.symbol.owner.isClass)
- context.enclosingSubClassContext(id.symbol.owner).prefix
- else NoPrefix
- stabilize(id, pre, EXPRmode | QUALmode, WildcardType)
- case sel @ Select(qualqual, _) =>
- stabilize(sel, qualqual.tpe, EXPRmode | QUALmode, WildcardType)
+ val adapted = adaptToName(tree, nme.apply)
+ def stabilize0(pre: Type): Tree = stabilize(adapted, pre, EXPRmode | QUALmode, WildcardType)
+ // TODO reconcile the overlap between Typers#stablize and TreeGen.stabilize
+ val qual = adapted match {
+ case This(_) =>
+ gen.stabilize(adapted)
+ case Ident(_) =>
+ val owner = adapted.symbol.owner
+ val pre =
+ if (owner.isPackageClass) owner.thisType
+ else if (owner.isClass) context.enclosingSubClassContext(owner).prefix
+ else NoPrefix
+ stabilize0(pre)
+ case Select(qualqual, _) =>
+ stabilize0(qualqual.tpe)
case other =>
other
}
@@ -1071,8 +1079,8 @@ trait Typers extends Modes with Adaptations with Tags {
// begin adapt
tree.tpe match {
- case atp @ AnnotatedType(_, _, _) if canAdaptAnnotations(tree, mode, pt) => // (-1)
- adaptAnnotations(tree, mode, pt)
+ case atp @ AnnotatedType(_, _, _) if canAdaptAnnotations(tree, this, mode, pt) => // (-1)
+ adaptAnnotations(tree, this, mode, pt)
case ct @ ConstantType(value) if inNoModes(mode, TYPEmode | FUNmode) && (ct <:< pt) && !forScaladoc && !forInteractive => // (0)
val sym = tree.symbol
if (sym != null && sym.isDeprecated) {
@@ -1176,8 +1184,8 @@ trait Typers extends Modes with Adaptations with Tags {
Select(tree, "to" + sym.name)
}
}
- case AnnotatedType(_, _, _) if canAdaptAnnotations(tree, mode, pt) => // (13)
- return typed(adaptAnnotations(tree, mode, pt), mode, pt)
+ case AnnotatedType(_, _, _) if canAdaptAnnotations(tree, this, mode, pt) => // (13)
+ return typed(adaptAnnotations(tree, this, mode, pt), mode, pt)
case _ =>
}
if (!context.undetparams.isEmpty) {
@@ -1452,7 +1460,7 @@ trait Typers extends Modes with Adaptations with Tags {
case DefDef(_, name, _, _, _, rhs) =>
if (stat.symbol.isAuxiliaryConstructor)
notAllowed("secondary constructor")
- else if (isValueClass && (name == nme.equals_ || name == nme.hashCode_))
+ else if (isValueClass && (name == nme.equals_ || name == nme.hashCode_) && !stat.symbol.isSynthetic)
notAllowed(s"redefinition of $name method. See SIP-15, criterion 4.")
else if (stat.symbol != null && stat.symbol.isParamAccessor)
notAllowed("additional parameter")
@@ -1903,7 +1911,7 @@ trait Typers extends Modes with Adaptations with Tags {
})
}
val impl2 = finishMethodSynthesis(impl1, clazz, context)
-
+
// SI-5954. On second compile of a companion class contained in a package object we end up
// with some confusion of names which leads to having two symbols with the same name in the
// same owner. Until that can be straightened out we can't allow companion objects in package
@@ -1916,20 +1924,20 @@ trait Typers extends Modes with Adaptations with Tags {
// can't handle case classes in package objects
if (m.isCaseClass) pkgObjectRestriction(m, mdef, "case")
// can't handle companion class/object pairs in package objects
- else if ((m.isClass && m.companionModule != NoSymbol && !m.companionModule.isSynthetic) ||
- (m.isModule && m.companionClass != NoSymbol && !m.companionClass.isSynthetic))
+ else if ((m.isClass && m.companionModule != NoSymbol && !m.companionModule.isSynthetic) ||
+ (m.isModule && m.companionClass != NoSymbol && !m.companionClass.isSynthetic))
pkgObjectRestriction(m, mdef, "companion")
}
def pkgObjectRestriction(m : Symbol, mdef : ModuleDef, restricted : String) = {
val pkgName = mdef.symbol.ownerChain find (_.isPackage) map (_.decodedName) getOrElse mdef.symbol.toString
context.error(if (m.pos.isDefined) m.pos else mdef.pos, s"implementation restriction: package object ${pkgName} cannot contain ${restricted} ${m}. Instead, ${m} should be placed directly in package ${pkgName}.")
- }
+ }
}
if (!settings.companionsInPkgObjs.value && mdef.symbol.isPackageObject)
restrictPackageObjectMembers(mdef)
-
+
treeCopy.ModuleDef(mdef, typedMods, mdef.name, impl2) setType NoType
}
/** In order to override this in the TreeCheckers Typer so synthetics aren't re-added
@@ -2058,21 +2066,28 @@ trait Typers extends Modes with Adaptations with Tags {
* @return ...
*/
def typedValDef(vdef: ValDef): ValDef = {
-// attributes(vdef)
+ val sym = vdef.symbol
+ val valDefTyper = {
+ val maybeConstrCtx =
+ if ((sym.isParameter || sym.isEarlyInitialized) && sym.owner.isConstructor) context.makeConstructorContext
+ else context
+ newTyper(maybeConstrCtx.makeNewScope(vdef, sym))
+ }
+ valDefTyper.typedValDefImpl(vdef)
+ }
+
+ // use typedValDef instead. this version is called after creating a new context for the ValDef
+ private def typedValDefImpl(vdef: ValDef) = {
val sym = vdef.symbol.initialize
- val typer1 = constrTyperIf(sym.isParameter && sym.owner.isConstructor)
val typedMods = typedModifiers(vdef.mods)
sym.annotations.map(_.completeInfo)
- var tpt1 = checkNoEscaping.privates(sym, typer1.typedType(vdef.tpt))
+ val tpt1 = checkNoEscaping.privates(sym, typedType(vdef.tpt))
checkNonCyclic(vdef, tpt1)
- if (sym.hasAnnotation(definitions.VolatileAttr)) {
- if (!sym.isMutable)
- VolatileValueError(vdef)
- else if (sym.isFinal)
- FinalVolatileVarError(vdef)
- }
+ if (sym.hasAnnotation(definitions.VolatileAttr) && !sym.isMutable)
+ VolatileValueError(vdef)
+
val rhs1 =
if (vdef.rhs.isEmpty) {
if (sym.isVariable && sym.owner.isTerm && !sym.isLazy && !isPastTyper)
@@ -2095,7 +2110,7 @@ trait Typers extends Modes with Adaptations with Tags {
else subst(tpt1.tpe.typeArgs(0))
else subst(tpt1.tpe)
} else tpt1.tpe
- newTyper(typer1.context.make(vdef, sym)).transformedOrTyped(vdef.rhs, EXPRmode | BYVALmode, tpt2)
+ transformedOrTyped(vdef.rhs, EXPRmode | BYVALmode, tpt2)
}
treeCopy.ValDef(vdef, typedMods, vdef.name, tpt1, checkDead(rhs1)) setType NoType
}
@@ -2213,37 +2228,58 @@ trait Typers extends Modes with Adaptations with Tags {
*/
def checkMethodStructuralCompatible(ddef: DefDef): Unit = {
val meth = ddef.symbol
- def fail(pos: Position, msg: String) = unit.error(pos, msg)
- val tp: Type = meth.tpe match {
- case mt @ MethodType(_, _) => mt
- case NullaryMethodType(restpe) => restpe // TODO_NMT: drop NullaryMethodType from resultType?
- case PolyType(_, restpe) => restpe
- case _ => NoType
- }
- def nthParamPos(n: Int) = ddef.vparamss match {
- case xs :: _ if xs.length > n => xs(n).pos
- case _ => meth.pos
- }
- def failStruct(pos: Position, what: String, where: String = "Parameter") =
- fail(pos, s"$where type in structural refinement may not refer to $what")
-
- foreachWithIndex(tp.paramTypes) { (paramType, idx) =>
- val sym = paramType.typeSymbol
- def paramPos = nthParamPos(idx)
-
- if (sym.isAbstractType) {
- if (!sym.hasTransOwner(meth.owner))
- failStruct(paramPos, "an abstract type defined outside that refinement")
- else if (!sym.hasTransOwner(meth))
- failStruct(paramPos, "a type member of that refinement")
+ def parentString = meth.owner.parentSymbols filterNot (_ == ObjectClass) match {
+ case Nil => ""
+ case xs => xs.map(_.nameString).mkString(" (of ", " with ", ")")
+ }
+ def fail(pos: Position, msg: String): Boolean = {
+ unit.error(pos, msg)
+ false
+ }
+ /** Have to examine all parameters in all lists.
+ */
+ def paramssTypes(tp: Type): List[List[Type]] = tp match {
+ case mt @ MethodType(_, restpe) => mt.paramTypes :: paramssTypes(restpe)
+ case PolyType(_, restpe) => paramssTypes(restpe)
+ case _ => Nil
+ }
+ def resultType = meth.tpe.finalResultType
+ def nthParamPos(n1: Int, n2: Int) =
+ try ddef.vparamss(n1)(n2).pos catch { case _: IndexOutOfBoundsException => meth.pos }
+
+ def failStruct(pos: Position, what: String, where: String = "Parameter type") =
+ fail(pos, s"$where in structural refinement may not refer to $what")
+
+ foreachWithIndex(paramssTypes(meth.tpe)) { (paramList, listIdx) =>
+ foreachWithIndex(paramList) { (paramType, paramIdx) =>
+ val sym = paramType.typeSymbol
+ def paramPos = nthParamPos(listIdx, paramIdx)
+
+ /** Not enough to look for abstract types; have to recursively check the bounds
+ * of each abstract type for more abstract types. Almost certainly there are other
+ * exploitable type soundness bugs which can be seen by bounding a type parameter
+ * by an abstract type which itself is bounded by an abstract type.
+ */
+ def checkAbstract(tp0: Type, what: String): Boolean = {
+ def check(sym: Symbol): Boolean = !sym.isAbstractType || {
+ log(s"""checking $tp0 in refinement$parentString at ${meth.owner.owner.fullLocationString}""")
+ ( (!sym.hasTransOwner(meth.owner) && failStruct(paramPos, "an abstract type defined outside that refinement", what))
+ || (!sym.hasTransOwner(meth) && failStruct(paramPos, "a type member of that refinement", what))
+ || checkAbstract(sym.info.bounds.hi, "Type bound")
+ )
+ }
+ tp0.dealiasWidenChain forall (t => check(t.typeSymbol))
+ }
+ checkAbstract(paramType, "Parameter type")
+
+ if (sym.isDerivedValueClass)
+ failStruct(paramPos, "a user-defined value class")
+ if (paramType.isInstanceOf[ThisType] && sym == meth.owner)
+ failStruct(paramPos, "the type of that refinement (self type)")
}
- if (sym.isDerivedValueClass)
- failStruct(paramPos, "a user-defined value class")
- if (paramType.isInstanceOf[ThisType] && sym == meth.owner)
- failStruct(paramPos, "the type of that refinement (self type)")
}
- if (tp.resultType.typeSymbol.isDerivedValueClass)
- failStruct(ddef.tpt.pos, "a user-defined value class", where = "Result")
+ if (resultType.typeSymbol.isDerivedValueClass)
+ failStruct(ddef.tpt.pos, "a user-defined value class", where = "Result type")
}
def typedUseCase(useCase: UseCase) {
@@ -2373,13 +2409,12 @@ trait Typers extends Modes with Adaptations with Tags {
}
def typedTypeDef(tdef: TypeDef): TypeDef =
- typerWithCondLocalContext(context.makeNewScope(tdef, tdef.symbol))(tdef.tparams.nonEmpty){
- _.typedTypeDef0(tdef)
+ typerWithCondLocalContext(context.makeNewScope(tdef, tdef.symbol))(tdef.tparams.nonEmpty) {
+ _.typedTypeDefImpl(tdef)
}
- // call typedTypeDef instead
- // a TypeDef with type parameters must always be type checked in a new scope
- private def typedTypeDef0(tdef: TypeDef): TypeDef = {
+ // use typedTypeDef instead. this version is called after creating a new context for the TypeDef
+ private def typedTypeDefImpl(tdef: TypeDef): TypeDef = {
tdef.symbol.initialize
reenterTypeParams(tdef.tparams)
val tparams1 = tdef.tparams mapConserve typedTypeDef
@@ -4434,8 +4469,9 @@ trait Typers extends Modes with Adaptations with Tags {
if (typed(expr).tpe.typeSymbol != UnitClass)
unit.warning(tree.pos, "enclosing method " + name + " has result type Unit: return value discarded")
}
- treeCopy.Return(tree, checkDead(expr1)).setSymbol(enclMethod.owner)
- .setType(adaptTypeOfReturn(expr1, restpt.tpe, NothingClass.tpe))
+ val res = treeCopy.Return(tree, checkDead(expr1)).setSymbol(enclMethod.owner)
+ val tp = pluginsTypedReturn(NothingClass.tpe, this, res, restpt.tpe)
+ res.setType(tp)
}
}
}
@@ -5339,10 +5375,14 @@ trait Typers extends Modes with Adaptations with Tags {
typed(docdef.definition, mode, pt)
}
+ /**
+ * The typer with the correct context for a method definition. If the method is a default getter for
+ * a constructor default, the resulting typer has a constructor context (fixes SI-5543).
+ */
def defDefTyper(ddef: DefDef) = {
- val flag = ddef.mods.hasDefaultFlag && sym.owner.isModuleClass &&
+ val isConstrDefaultGetter = ddef.mods.hasDefaultFlag && sym.owner.isModuleClass &&
nme.defaultGetterToMethod(sym.name) == nme.CONSTRUCTOR
- newTyper(context.makeNewScope(ddef, sym)).constrTyperIf(flag)
+ newTyper(context.makeNewScope(ddef, sym)).constrTyperIf(isConstrDefaultGetter)
}
def typedAlternative(alt: Alternative) = {
@@ -5629,20 +5669,21 @@ trait Typers extends Modes with Adaptations with Tags {
lastTreeToTyper = tree
indentTyping()
- var alreadyTyped = false
+ val ptPlugins = pluginsPt(pt, this, tree, mode)
+
val startByType = if (Statistics.canEnable) Statistics.pushTimer(byTypeStack, byTypeNanos(tree.getClass)) else null
if (Statistics.canEnable) Statistics.incCounter(visitsByType, tree.getClass)
try {
if (context.retyping &&
- (tree.tpe ne null) && (tree.tpe.isErroneous || !(tree.tpe <:< pt))) {
+ (tree.tpe ne null) && (tree.tpe.isErroneous || !(tree.tpe <:< ptPlugins))) {
tree.tpe = null
if (tree.hasSymbol) tree.symbol = NoSymbol
}
- alreadyTyped = tree.tpe ne null
+ val alreadyTyped = tree.tpe ne null
var tree1: Tree = if (alreadyTyped) tree else {
printTyping(
- ptLine("typing %s: pt = %s".format(ptTree(tree), pt),
+ ptLine("typing %s: pt = %s".format(ptTree(tree), ptPlugins),
"undetparams" -> context.undetparams,
"implicitsEnabled" -> context.implicitsEnabled,
"enrichmentEnabled" -> context.enrichmentEnabled,
@@ -5651,7 +5692,7 @@ trait Typers extends Modes with Adaptations with Tags {
"context.owner" -> context.owner
)
)
- typed1(tree, mode, dropExistential(pt))
+ typed1(tree, mode, dropExistential(ptPlugins))
}
// Can happen during erroneous compilation - error(s) have been
// reported, but we need to avoid causing an NPE with this tree
@@ -5665,12 +5706,12 @@ trait Typers extends Modes with Adaptations with Tags {
)
}
- tree1.tpe = addAnnotations(tree1, tree1.tpe)
- val result = if (tree1.isEmpty) tree1 else adapt(tree1, mode, pt, tree)
+ tree1.tpe = pluginsTyped(tree1.tpe, this, tree1, mode, ptPlugins)
+ val result = if (tree1.isEmpty) tree1 else adapt(tree1, mode, ptPlugins, tree)
if (!alreadyTyped) {
printTyping("adapted %s: %s to %s, %s".format(
- tree1, tree1.tpe.widen, pt, context.undetparamsString)
+ tree1, tree1.tpe.widen, ptPlugins, context.undetparamsString)
) //DEBUG
}
if (!isPastTyper) signalDone(context.asInstanceOf[analyzer.Context], tree, result)
@@ -5685,7 +5726,7 @@ trait Typers extends Modes with Adaptations with Tags {
setError(tree)
case ex: Exception =>
if (settings.debug.value) // @M causes cyclic reference error
- Console.println("exception when typing "+tree+", pt = "+pt)
+ Console.println("exception when typing "+tree+", pt = "+ptPlugins)
if (context != null && context.unit.exists && tree != null)
logError("AT: " + (tree.pos).dbgString, ex)
throw ex
diff --git a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
index 3e4e0f49d7..577aa087ea 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
@@ -113,7 +113,7 @@ trait Unapplies extends ast.TreeDSL
def createFun = gen.scalaFunctionConstr(primaries, toIdent(cdef), abstractFun = true)
def parents = if (inheritFromFun) List(createFun) else Nil
def toString = DefDef(
- Modifiers(OVERRIDE | FINAL),
+ Modifiers(OVERRIDE | FINAL | SYNTHETIC),
nme.toString_,
Nil,
ListOfNil,
diff --git a/src/compiler/scala/tools/nsc/typechecker/Variances.scala b/src/compiler/scala/tools/nsc/typechecker/Variances.scala
index 7d97b0c782..ea436a71fb 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Variances.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Variances.scala
@@ -67,6 +67,8 @@ trait Variances {
def varianceInType(tp: Type)(tparam: Symbol): Int = tp match {
case ErrorType | WildcardType | NoType | NoPrefix | ThisType(_) | ConstantType(_) =>
VARIANCES
+ case BoundedWildcardType(bounds) =>
+ varianceInType(bounds)(tparam)
case SingleType(pre, sym) =>
varianceInType(pre)(tparam)
case TypeRef(pre, sym, args) =>