summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--src/compiler/scala/reflect/internal/SymbolTable.scala2
-rw-r--r--src/compiler/scala/reflect/internal/TreeInfo.scala58
-rw-r--r--src/compiler/scala/reflect/internal/Types.scala17
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala88
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala6
-rw-r--r--src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala12
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Namers.scala64
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/RefChecks.scala4
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala10
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala50
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Variances.scala6
-rw-r--r--src/library/scala/runtime/ScalaRunTime.scala14
12 files changed, 191 insertions, 140 deletions
diff --git a/src/compiler/scala/reflect/internal/SymbolTable.scala b/src/compiler/scala/reflect/internal/SymbolTable.scala
index a7592cc81b..5702a13940 100644
--- a/src/compiler/scala/reflect/internal/SymbolTable.scala
+++ b/src/compiler/scala/reflect/internal/SymbolTable.scala
@@ -34,7 +34,7 @@ abstract class SymbolTable extends api.Universe
/** Override with final implementation for inlining. */
def debuglog(msg: => String): Unit = if (settings.debug.value) log(msg)
- def debugwarn(msg: => String): Unit = if (settings.debug.value) Console.println(msg)
+ def debugwarn(msg: => String): Unit = if (settings.debug.value) Console.err.println(msg)
/** Are we compiling for Java SE? */
// def forJVM: Boolean
diff --git a/src/compiler/scala/reflect/internal/TreeInfo.scala b/src/compiler/scala/reflect/internal/TreeInfo.scala
index 154772eecd..0f3e7648eb 100644
--- a/src/compiler/scala/reflect/internal/TreeInfo.scala
+++ b/src/compiler/scala/reflect/internal/TreeInfo.scala
@@ -102,6 +102,35 @@ abstract class TreeInfo {
false
}
+ def zipMethodParamsAndArgs(params: List[Symbol], args: List[Tree]): List[(Symbol, Tree)] = {
+ val plen = params.length
+ val alen = args.length
+ def fail() = {
+ global.debugwarn(
+ "Mismatch trying to zip method parameters and argument list:\n" +
+ " params = " + params + "\n" +
+ " args = " + args + "\n"
+ )
+ params zip args
+ }
+
+ if (plen == alen) params zip args
+ else if (params.isEmpty) fail
+ else if (isVarArgsList(params)) {
+ val plenInit = plen - 1
+ if (alen == plenInit) {
+ if (alen == 0) Nil // avoid calling mismatched zip
+ else params.init zip args
+ }
+ else if (alen < plenInit) fail
+ else {
+ val front = params.init zip (args take plenInit)
+ val back = args drop plenInit map (a => (params.last, a))
+ front ++ back
+ }
+ }
+ else fail
+ }
/**
* Selects the correct parameter list when there are nested applications.
@@ -124,34 +153,7 @@ abstract class TreeInfo {
else if (fn.symbol.paramss.isEmpty) Nil
else fn.symbol.paramss.last
)
- val plen = params.length
- val alen = args.length
- def fail() = {
- global.debugwarn(
- "Mismatch trying to zip method parameters and argument list:\n" +
- " params = " + params + "\n" +
- " args = " + args + "\n" +
- " tree = " + t
- )
- params zip args
- }
-
- if (plen == alen) params zip args
- else if (params.isEmpty) fail
- else if (isVarArgsList(params)) {
- val plenInit = plen - 1
- if (alen == plenInit) {
- if (alen == 0) Nil // avoid calling mismatched zip
- else params.init zip args
- }
- else if (alen < plenInit) fail
- else {
- val front = params.init zip (args take plenInit)
- val back = args drop plenInit map (a => (params.last, a))
- front ++ back
- }
- }
- else fail
+ zipMethodParamsAndArgs(params, args)
case _ => Nil
}
diff --git a/src/compiler/scala/reflect/internal/Types.scala b/src/compiler/scala/reflect/internal/Types.scala
index ea576f0387..811dc36bdc 100644
--- a/src/compiler/scala/reflect/internal/Types.scala
+++ b/src/compiler/scala/reflect/internal/Types.scala
@@ -1606,12 +1606,15 @@ trait Types extends api.Types { self: SymbolTable =>
val enterRefs = new TypeMap {
def apply(tp: Type): Type = {
tp match {
- case TypeRef(_, sym, args) =>
- for ((tparam1, arg) <- sym.info.typeParams zip args)
- if (arg contains tparam) {
- addRef(NonExpansive, tparam, tparam1)
- if (arg.typeSymbol != tparam) addRef(Expansive, tparam, tparam1)
- }
+ case TypeRef(_, sym, args) if args.nonEmpty =>
+ if (settings.debug.value && !sameLength(sym.info.typeParams, args))
+ debugwarn("Mismatched zip in computeRefs(): " + sym.info.typeParams + ", " + args)
+
+ for ((tparam1, arg) <- sym.info.typeParams zip args; if arg contains tparam) {
+ addRef(NonExpansive, tparam, tparam1)
+ if (arg.typeSymbol != tparam)
+ addRef(Expansive, tparam, tparam1)
+ }
case _ =>
}
mapOver(tp)
@@ -1903,6 +1906,8 @@ A type's typeSymbol should never be inspected directly.
/** @pre: sym.info.typeParams.length == typeArgs.length */
@inline private def betaReduce: Type = {
+ if (settings.debug.value)
+ assert(sym.info.typeParams.length == typeArgs.length, sym.info.typeParams + " and " + typeArgs)
// isHKSubType0 introduces synthetic type params so that
// betaReduce can first apply sym.info to typeArgs before calling
// asSeenFrom. asSeenFrom then skips synthetic type params, which
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala
index d9e0243c16..eac714f999 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala
@@ -26,52 +26,49 @@ abstract class ReachingDefinitions {
*/
object rdefLattice extends SemiLattice {
type Definition = (Local, BasicBlock, Int)
- type Elem = IState[Set[Definition], Stack]
- type StackPos = Set[(BasicBlock, Int)]
- type Stack = List[StackPos]
+ type Elem = IState[ListSet[Definition], Stack]
+ type StackPos = ListSet[(BasicBlock, Int)]
+ type Stack = List[StackPos]
private def referenceEqualSet(name: String) = new ListSet[Definition] with ReferenceEquality {
override def toString = "<" + name + ">"
}
- val top: Elem = IState(referenceEqualSet("top"), Nil)
+ val top: Elem = IState(referenceEqualSet("top"), Nil)
val bottom: Elem = IState(referenceEqualSet("bottom"), Nil)
/** The least upper bound is set inclusion for locals, and pairwise set inclusion for stacks. */
- def lub2(exceptional: Boolean)(a: Elem, b: Elem): Elem =
+ def lub2(exceptional: Boolean)(a: Elem, b: Elem): Elem = {
if (bottom == a) b
else if (bottom == b) a
- else {
- val locals = a.vars ++ b.vars
- val stack =
- if (a.stack.isEmpty) b.stack
- else if (b.stack.isEmpty) a.stack
- else (a.stack, b.stack).zipped map (_ ++ _)
-
- IState(locals, stack)
-
- // val res = IState(locals, stack)
- // Console.println("\tlub2: " + a + ", " + b)
- // Console.println("\tis: " + res)
- // if (res._1 eq bottom._1) (new ListSet[Definition], Nil)
- // else res
- // res
- }
+ else IState(a.vars ++ b.vars,
+ if (a.stack.isEmpty) b.stack
+ else if (b.stack.isEmpty) a.stack
+ else {
+ // !!! These stacks are with some frequency not of the same size.
+ // I can't reverse engineer the logic well enough to say whether this
+ // indicates a problem. Even if it doesn't indicate a problem,
+ // it'd be nice not to call zip with mismatched sequences because
+ // it makes it harder to spot the real problems.
+ val result = (a.stack, b.stack).zipped map (_ ++ _)
+ if (settings.debug.value && (a.stack.length != b.stack.length))
+ debugwarn("Mismatched stacks in ReachingDefinitions#lub2: " + a.stack + ", " + b.stack + ", returning " + result)
+ result
+ }
+ )
+ }
}
class ReachingDefinitionsAnalysis extends DataFlowAnalysis[rdefLattice.type] {
type P = BasicBlock
val lattice = rdefLattice
- import lattice.Definition
- import lattice.Stack
- import lattice.Elem
-
+ import lattice.{ Definition, Stack, Elem, StackPos }
var method: IMethod = _
- val gen: mutable.Map[BasicBlock, Set[Definition]] = new mutable.HashMap()
- val kill: mutable.Map[BasicBlock, Set[Local]] = new mutable.HashMap()
- val drops: mutable.Map[BasicBlock, Int] = new mutable.HashMap()
- val outStack: mutable.Map[BasicBlock, Stack] = new mutable.HashMap()
+ val gen = mutable.Map[BasicBlock, ListSet[Definition]]()
+ val kill = mutable.Map[BasicBlock, ListSet[Local]]()
+ val drops = mutable.Map[BasicBlock, Int]()
+ val outStack = mutable.Map[BasicBlock, Stack]()
def init(m: IMethod) {
this.method = m
@@ -95,16 +92,16 @@ abstract class ReachingDefinitions {
out(b) = lattice.bottom
}
m.exh foreach { e =>
- in(e.startBlock) = lattice.IState(new ListSet[Definition], List(new ListSet[(BasicBlock, Int)]))
+ in(e.startBlock) = lattice.IState(new ListSet[Definition], List(new StackPos))
}
}
}
import opcodes._
- def genAndKill(b: BasicBlock): (Set[Definition], Set[Local]) = {
- var genSet: Set[Definition] = new immutable.HashSet
- var killSet: Set[Local] = new immutable.HashSet
+ def genAndKill(b: BasicBlock): (ListSet[Definition], ListSet[Local]) = {
+ var genSet = ListSet[Definition]()
+ var killSet = ListSet[Local]()
for ((i, idx) <- b.toList.zipWithIndex) i match {
case STORE_LOCAL(local) =>
killSet = killSet + local
@@ -114,10 +111,9 @@ abstract class ReachingDefinitions {
(genSet, killSet)
}
- private def dropsAndGen(b: BasicBlock): (Int, List[Set[(BasicBlock, Int)]]) = {
- var depth = 0
- var drops = 0
- var stackOut: List[Set[(BasicBlock, Int)]] = Nil
+ private def dropsAndGen(b: BasicBlock): (Int, Stack) = {
+ var depth, drops = 0
+ var stackOut: Stack = Nil
for ((instr, idx) <- b.toList.zipWithIndex) {
instr match {
@@ -131,10 +127,10 @@ abstract class ReachingDefinitions {
depth -= instr.consumed
}
var prod = instr.produced
- depth = depth + prod
+ depth += prod
while (prod > 0) {
- stackOut = collection.immutable.Set((b, idx)) :: stackOut
- prod = prod - 1
+ stackOut ::= ListSet((b, idx))
+ prod -= 1
}
}
// Console.println("drops(" + b + ") = " + drops)
@@ -156,14 +152,14 @@ abstract class ReachingDefinitions {
import opcodes._
import lattice.IState
- def updateReachingDefinition(b: BasicBlock, idx: Int, rd: Set[Definition]): Set[Definition] = {
+ def updateReachingDefinition(b: BasicBlock, idx: Int, rd: ListSet[Definition]): ListSet[Definition] = {
val STORE_LOCAL(local) = b(idx)
var tmp = local
(rd filter { case (l, _, _) => l != tmp }) + ((tmp, b, idx))
}
private def blockTransfer(b: BasicBlock, in: lattice.Elem): lattice.Elem = {
- var locals: Set[Definition] = (in.vars filter { case (l, _, _) => !kill(b)(l) }) ++ gen(b)
+ var locals: ListSet[Definition] = (in.vars filter { case (l, _, _) => !kill(b)(l) }) ++ gen(b)
if (locals eq lattice.bottom.vars) locals = new ListSet[Definition]
IState(locals, outStack(b) ::: in.stack.drop(drops(b)))
}
@@ -172,7 +168,8 @@ abstract class ReachingDefinitions {
def interpret(b: BasicBlock, idx: Int, in: lattice.Elem): Elem = {
var locals = in.vars
var stack = in.stack
- val instr = b(idx)
+ val instr = b(idx)
+
instr match {
case STORE_LOCAL(l1) =>
locals = updateReachingDefinition(b, idx, locals)
@@ -185,7 +182,7 @@ abstract class ReachingDefinitions {
var prod = instr.produced
while (prod > 0) {
- stack = collection.immutable.Set((b, idx)) :: stack
+ stack ::= ListSet((b, idx))
prod -= 1
}
@@ -197,7 +194,8 @@ abstract class ReachingDefinitions {
* value found below the topmost element of the stack.
*/
def findDefs(bb: BasicBlock, idx: Int, m: Int, depth: Int): List[(BasicBlock, Int)] = if (idx > 0) {
- assert(bb.closed)
+ assert(bb.closed, bb)
+
var instrs = bb.getArray
var res: List[(BasicBlock, Int)] = Nil
var i = idx
diff --git a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
index 311a3b916a..64df3b4636 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
@@ -74,8 +74,8 @@ abstract class DeadCodeElimination extends SubComponent {
def dieCodeDie(m: IMethod) {
if (m.code ne null) {
log("dead code elimination on " + m);
- dropOf.clear
- m.code.blocks.clear
+ dropOf.clear()
+ m.code.blocks.clear()
accessedLocals = m.params.reverse
m.code.blocks ++= linearizer.linearize(m)
collectRDef(m)
@@ -91,7 +91,7 @@ abstract class DeadCodeElimination extends SubComponent {
/** collect reaching definitions and initial useful instructions for this method. */
def collectRDef(m: IMethod): Unit = if (m.code ne null) {
- defs = immutable.HashMap.empty; worklist.clear; useful.clear;
+ defs = immutable.HashMap.empty; worklist.clear(); useful.clear();
rdef.init(m);
rdef.run;
diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
index 6bd1bf9ad4..14bfa65335 100644
--- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
+++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
@@ -336,6 +336,8 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
Set(sym)
else if (sym == ArrayClass)
specializedTypeVars(args)
+ else if (args.isEmpty)
+ Set()
else
specializedTypeVars(sym.typeParams zip args collect { case (tp, arg) if isSpecialized(tp) => arg })
@@ -948,8 +950,9 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
env
}
- private def unify(tp1: List[Type], tp2: List[Type], env: TypeEnv, strict: Boolean): TypeEnv =
- tp1.zip(tp2).foldLeft(env) { (env, args) =>
+ private def unify(tp1: List[Type], tp2: List[Type], env: TypeEnv, strict: Boolean): TypeEnv = {
+ if (tp1.isEmpty || tp2.isEmpty) env
+ else (tp1 zip tp2).foldLeft(env) { (env, args) =>
if (!strict) unify(args._1, args._2, env, strict)
else {
val nenv = unify(args._1, args._2, emptyEnv, strict)
@@ -960,11 +963,10 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
}
}
+ }
/** Map class symbols to the type environments where they were created. */
- val typeEnv: mutable.Map[Symbol, TypeEnv] = new mutable.HashMap[Symbol, TypeEnv] {
- override def default(key: Symbol) = emptyEnv
- }
+ private val typeEnv = mutable.HashMap[Symbol, TypeEnv]() withDefaultValue emptyEnv
/** Apply type bindings in the given environment `env` to all declarations. */
private def subst(env: TypeEnv, decls: List[Symbol]): List[Symbol] =
diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
index bec5cef57a..6ce76d03c8 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
@@ -112,9 +112,6 @@ trait Namers { self: Analyzer =>
sym
}
- private def isCopyGetter(meth: Symbol) = {
- meth.name startsWith (nme.copy + nme.DEFAULT_GETTER_STRING)
- }
private def isTemplateContext(context: Context): Boolean = context.tree match {
case Template(_, _, _) => true
case Import(_, _) => isTemplateContext(context.outer)
@@ -385,37 +382,66 @@ trait Namers { self: Analyzer =>
private def enterSymFinishWith(tree: Tree, tparams: List[TypeDef]) {
val sym = tree.symbol
+ def isCopyMethodOrGetter =
+ sym.name == nme.copy || sym.name.startsWith(nme.copy + nme.DEFAULT_GETTER_STRING)
+ def useCompleter = sym.isSynthetic && (
+ !sym.hasDefaultFlag
+ || sym.owner.info.member(nme.copy).isSynthetic
+ )
+
debuglog("entered " + sym + " in " + context.owner + ", scope-id = " + context.scope.## )
var ltype = namerOf(sym).typeCompleter(tree)
if (tparams nonEmpty) {
//@M! TypeDef's type params are handled differently
//@M e.g., in [A[x <: B], B], A and B are entered first as both are in scope in the definition of x
//@M x is only in scope in `A[x <: B]'
- if(!sym.isAbstractType) //@M TODO: change to isTypeMember ?
+ if (!sym.isAbstractType) //@M TODO: change to isTypeMember ?
newNamer(context.makeNewScope(tree, sym)).enterSyms(tparams)
ltype = new PolyTypeCompleter(tparams, ltype, tree, sym, context) //@M
if (sym.isTerm) skolemize(tparams)
}
+ def copyMethodCompleter(clazz: Symbol) = {
+ // the 'copy' method of case classes needs a special type
+ // completer to make bug0054.scala (and others) work. the copy
+ // method has to take exactly the same parameter types as the
+ // primary constructor.
+ val classTypeParams = clazz.typeParams
+ val constrType = clazz.primaryConstructor.tpe
+ val subst = new SubstSymMap(clazz.typeParams, tparams map (_.symbol))
+ val cparamss = constrType.paramss
- if (sym.name == nme.copy || isCopyGetter(sym)) {
- // it could be a compiler-generated copy method or one of its default getters
- setInfo(sym)(mkTypeCompleter(tree)(copySym => {
- def copyIsSynthetic() = sym.owner.info.member(nme.copy).isSynthetic
- if (sym.isSynthetic && (!sym.hasDefaultFlag || copyIsSynthetic())) {
- // the 'copy' method of case classes needs a special type completer to make bug0054.scala (and others)
- // work. the copy method has to take exactly the same parameter types as the primary constructor.
- val constrType = copySym.owner.primaryConstructor.tpe
- val subst = new SubstSymMap(copySym.owner.typeParams, tparams map (_.symbol))
- for ((params, cparams) <- tree.asInstanceOf[DefDef].vparamss.zip(constrType.paramss);
- (param, cparam) <- params.zip(cparams)) {
+ tree match {
+ case DefDef(_, _, _, ps :: psRest, _, _) =>
+ val cs :: csRest = cparamss
+ for ((param, cparam) <- ps zip cs)
// need to clone the type cparam.tpe??? problem is: we don't have the new owner yet (the new param symbol)
- param.tpt.setType(subst(cparam.tpe))
+ param.tpt setType subst(cparam.tpe)
+
+ if (psRest.isEmpty && csRest.isEmpty) ()
+ else if (psRest.isEmpty || csRest.isEmpty)
+ debuglog("Skipping mismatched extra param lists: " + psRest + ", " + csRest)
+ else {
+ for ((vparams, cparams) <- psRest zip csRest)
+ for ((param, cparam) <- vparams zip cparams)
+ param.tpt setType subst(cparam.tpe)
}
+ case _ => ()
+ }
+ }
+
+ // it could be a compiler-generated copy method or one of its default getters
+ setInfo(sym)(
+ if (isCopyMethodOrGetter) (
+ mkTypeCompleter(tree) { copySym =>
+ if (useCompleter)
+ copyMethodCompleter(copySym.owner)
+
+ ltype complete sym
}
- ltype.complete(sym)
- }))
- } else setInfo(sym)(ltype)
+ )
+ else ltype
+ )
}
def enterIfNotThere(sym: Symbol) {
diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
index 68cc6db2fe..e2fb367d20 100644
--- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
@@ -844,9 +844,11 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
}
}
validateVariance(pre, variance)
- validateVarianceArgs(args, variance, sym.typeParams) //@M for higher-kinded typeref, args.isEmpty
+ // @M for higher-kinded typeref, args.isEmpty
// However, these args respect variances by construction anyway
// -- the interesting case is in type application, see checkKindBounds in Infer
+ if (args.nonEmpty)
+ validateVarianceArgs(args, variance, sym.typeParams)
case ClassInfoType(parents, decls, symbol) =>
validateVariances(parents, variance)
case RefinedType(parents, decls) =>
diff --git a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
index 35709ac40c..db6d1445a9 100644
--- a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
@@ -41,13 +41,13 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
private def accDefBuf(clazz: Symbol) =
accDefs collectFirst { case (`clazz`, buf) => buf } getOrElse sys.error("no acc def buf for "+clazz)
- private def transformArgs(args: List[Tree], params: List[Symbol]) =
- ((args, params).zipped map { (arg, param) =>
+ private def transformArgs(params: List[Symbol], args: List[Tree]) = {
+ treeInfo.zipMethodParamsAndArgs(params, args) map { case (param, arg) =>
if (isByNameParamType(param.tpe))
withInvalidOwner { checkPackedConforms(transform(arg), param.tpe.typeArgs.head) }
else transform(arg)
- }) :::
- (args drop params.length map transform)
+ }
+ }
private def checkPackedConforms(tree: Tree, pt: Type): Tree = {
if (tree.tpe exists (_.typeSymbol.isExistentialSkolem)) {
@@ -232,7 +232,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
case Apply(fn, args) =>
assert(fn.tpe != null, tree)
- treeCopy.Apply(tree, transform(fn), transformArgs(args, fn.tpe.params))
+ treeCopy.Apply(tree, transform(fn), transformArgs(fn.tpe.params, args))
case Function(vparams, body) =>
withInvalidOwner {
treeCopy.Function(tree, vparams, transform(body))
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index c08519f005..2ae867f990 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -1194,49 +1194,47 @@ trait Typers extends Modes with Adaptations {
treeInfo.firstConstructor(templ.body) match {
case constr @ DefDef(_, _, _, vparamss, _, cbody @ Block(cstats, cunit)) =>
// Convert constructor body to block in environment and typecheck it
- val (preSuperStats, rest) = cstats span (!treeInfo.isSuperConstrCall(_))
- val (scall, upToSuperStats) =
- if (rest.isEmpty) (EmptyTree, preSuperStats)
- else (rest.head, preSuperStats :+ rest.head)
- val cstats1: List[Tree] = upToSuperStats map (_.duplicate)
- val cbody1 = scall match {
- case Apply(_, _) =>
- treeCopy.Block(cbody, cstats1.init,
- if (supertparams.isEmpty) cunit.duplicate
- else transformSuperCall(scall))
- case _ =>
- treeCopy.Block(cbody, cstats1, cunit.duplicate)
+ val (preSuperStats, superCall) = {
+ val (stats, rest) = cstats span (x => !treeInfo.isSuperConstrCall(x))
+ (stats map (_.duplicate), if (rest.isEmpty) EmptyTree else rest.head.duplicate)
}
-
+ val cstats1 = if (superCall == EmptyTree) preSuperStats else preSuperStats :+ superCall
+ val cbody1 = treeCopy.Block(cbody, preSuperStats, superCall match {
+ case Apply(_, _) if supertparams.nonEmpty => transformSuperCall(superCall)
+ case _ => cunit.duplicate
+ })
val outercontext = context.outer
+
assert(clazz != NoSymbol)
val cscope = outercontext.makeNewScope(constr, outercontext.owner)
val cbody2 = newTyper(cscope) // called both during completion AND typing.
.typePrimaryConstrBody(clazz,
cbody1, supertparams, clazz.unsafeTypeParams, vparamss map (_.map(_.duplicate)))
- if (cbody2.containsError()) {
- val allErrors = errorTreesFinder(cbody2)
- pending = allErrors.toList:::pending
- }
+ if (cbody2.containsError())
+ pending = errorTreesFinder(cbody2).toList ::: pending
- scall match {
+ superCall match {
case Apply(_, _) =>
- val sarg = treeInfo.firstArgument(scall)
+ val sarg = treeInfo.firstArgument(superCall)
if (sarg != EmptyTree && supertpe.typeSymbol != firstParent)
- pending = ConstrArgsInTraitParentTpeError(sarg, firstParent)::pending
- if (!supertparams.isEmpty) supertpt = TypeTree(cbody2.tpe) setPos supertpt.pos.focus
+ pending ::= ConstrArgsInTraitParentTpeError(sarg, firstParent)
+ if (!supertparams.isEmpty)
+ supertpt = TypeTree(cbody2.tpe) setPos supertpt.pos.focus
case _ =>
if (!supertparams.isEmpty)
- pending = MissingTypeArgumentsParentTpeError(supertpt)::pending
+ pending ::= MissingTypeArgumentsParentTpeError(supertpt)
}
- (cstats1, treeInfo.preSuperFields(templ.body)).zipped map {
- (ldef, gdef) => gdef.tpt.tpe = ldef.symbol.tpe
- }
+ val preSuperVals = treeInfo.preSuperFields(templ.body)
+ if (preSuperVals.isEmpty && preSuperStats.nonEmpty)
+ debugwarn("Wanted to zip empty presuper val list with " + preSuperStats)
+ else
+ (preSuperStats, preSuperVals).zipped map { case (ldef, gdef) => gdef.tpt.tpe = ldef.symbol.tpe }
+
case _ =>
if (!supertparams.isEmpty)
- pending = MissingTypeArgumentsParentTpeError(supertpt)::pending
+ pending ::= MissingTypeArgumentsParentTpeError(supertpt)
}
/* experimental: early types as type arguments
val hasEarlyTypes = templ.body exists (treeInfo.isEarlyTypeDef)
diff --git a/src/compiler/scala/tools/nsc/typechecker/Variances.scala b/src/compiler/scala/tools/nsc/typechecker/Variances.scala
index d2c27d6a0d..f68f9cc140 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Variances.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Variances.scala
@@ -71,7 +71,11 @@ trait Variances {
varianceInType(pre)(tparam)
case TypeRef(pre, sym, args) =>
if (sym == tparam) COVARIANT
- else varianceInType(pre)(tparam) & varianceInArgs(args, sym.typeParams)(tparam)
+ else varianceInType(pre)(tparam) & {
+ // @PP to @AM: please give this a higher dose of correctness.
+ val actualArgs = if (args.isEmpty) sym.typeParams map (_.typeConstructor) else args
+ varianceInArgs(actualArgs, sym.typeParams)(tparam)
+ }
case TypeBounds(lo, hi) =>
flip(varianceInType(lo)(tparam)) & varianceInType(hi)(tparam)
case RefinedType(parents, defs) =>
diff --git a/src/library/scala/runtime/ScalaRunTime.scala b/src/library/scala/runtime/ScalaRunTime.scala
index 3c7aff5b60..a56055e210 100644
--- a/src/library/scala/runtime/ScalaRunTime.scala
+++ b/src/library/scala/runtime/ScalaRunTime.scala
@@ -346,4 +346,18 @@ object ScalaRunTime {
nl + s + "\n"
}
+ private[scala] def checkZip(what: String, coll1: TraversableOnce[_], coll2: TraversableOnce[_]) {
+ if (sys.props contains "scala.debug.zip") {
+ val xs = coll1.toIndexedSeq
+ val ys = coll2.toIndexedSeq
+ if (xs.length != ys.length) {
+ Console.err.println(
+ "Mismatched zip in " + what + ":\n" +
+ " this: " + xs.mkString(", ") + "\n" +
+ " that: " + ys.mkString(", ")
+ )
+ (new Exception).getStackTrace.drop(2).take(10).foreach(println)
+ }
+ }
+ }
}