summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala11
-rw-r--r--src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala28
-rw-r--r--src/compiler/scala/tools/nsc/transform/TailCalls.scala14
-rw-r--r--src/compiler/scala/tools/nsc/transform/UnCurry.scala27
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala11
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala70
-rw-r--r--src/compiler/scala/tools/nsc/util/TreeSet.scala20
-rw-r--r--src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala77
-rw-r--r--src/reflect/scala/reflect/internal/Scopes.scala48
-rw-r--r--src/reflect/scala/reflect/internal/StdNames.scala4
-rw-r--r--src/reflect/scala/reflect/internal/Trees.scala5
-rw-r--r--src/reflect/scala/reflect/internal/Types.scala9
-rw-r--r--src/reflect/scala/reflect/internal/tpe/TypeComparers.scala259
-rw-r--r--src/reflect/scala/reflect/internal/transform/UnCurry.scala9
14 files changed, 312 insertions, 280 deletions
diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
index 22482bf1b6..250feb69bf 100644
--- a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
+++ b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
@@ -104,8 +104,15 @@ abstract class SymbolLoaders {
val clazz = enterClass(root, name, completer)
val module = enterModule(root, name, completer)
if (!clazz.isAnonymousClass) {
- assert(clazz.companionModule == module, module)
- assert(module.companionClass == clazz, clazz)
+ // Diagnostic for SI-7147
+ def msg: String = {
+ def symLocation(sym: Symbol) = if (sym == null) "null" else s"${clazz.fullLocationString} (from ${clazz.associatedFile})"
+ sm"""Inconsistent class/module symbol pair for `$name` loaded from ${symLocation(root)}.
+ |clazz = ${symLocation(clazz)}; clazz.companionModule = ${clazz.companionModule}
+ |module = ${symLocation(module)}; module.companionClass = ${module.companionClass}"""
+ }
+ assert(clazz.companionModule == module, msg)
+ assert(module.companionClass == clazz, msg)
}
}
diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
index ba19eb1035..8e008edde2 100644
--- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
+++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
@@ -79,7 +79,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
*/
/** For a given class and concrete type arguments, give its specialized class */
- val specializedClass: mutable.Map[(Symbol, TypeEnv), Symbol] = new mutable.LinkedHashMap
+ val specializedClass = perRunCaches.newMap[(Symbol, TypeEnv), Symbol]
/** Map a method symbol to a list of its specialized overloads in the same class. */
private val overloads = perRunCaches.newMap[Symbol, List[Overload]]() withDefaultValue Nil
@@ -1752,21 +1752,17 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
/** Create specialized class definitions */
def implSpecClasses(trees: List[Tree]): List[Tree] = {
- val buf = new mutable.ListBuffer[Tree]
- for (tree <- trees)
- tree match {
- case ClassDef(_, _, _, impl) =>
- tree.symbol.info // force specialization
- for (((sym1, env), specCls) <- specializedClass if sym1 == tree.symbol) {
- val parents = specCls.info.parents.map(TypeTree)
- buf +=
- ClassDef(specCls, atPos(impl.pos)(Template(parents, emptyValDef, List()))
- .setSymbol(specCls.newLocalDummy(sym1.pos))) setPos tree.pos
- debuglog("created synthetic class: " + specCls + " of " + sym1 + " in " + pp(env))
- }
- case _ =>
- }
- buf.toList
+ trees flatMap {
+ case tree @ ClassDef(_, _, _, impl) =>
+ tree.symbol.info // force specialization
+ for (((sym1, env), specCls) <- specializedClass if sym1 == tree.symbol) yield {
+ debuglog("created synthetic class: " + specCls + " of " + sym1 + " in " + pp(env))
+ val parents = specCls.info.parents.map(TypeTree)
+ ClassDef(specCls, atPos(impl.pos)(Template(parents, emptyValDef, List()))
+ .setSymbol(specCls.newLocalDummy(sym1.pos))) setPos tree.pos
+ }
+ case _ => Nil
+ } sortBy (_.name.decoded)
}
}
diff --git a/src/compiler/scala/tools/nsc/transform/TailCalls.scala b/src/compiler/scala/tools/nsc/transform/TailCalls.scala
index 92ed7fc555..313f968e93 100644
--- a/src/compiler/scala/tools/nsc/transform/TailCalls.scala
+++ b/src/compiler/scala/tools/nsc/transform/TailCalls.scala
@@ -217,7 +217,10 @@ abstract class TailCalls extends Transform {
debuglog("Rewriting tail recursive call: " + fun.pos.lineContent.trim)
accessed += ctx.label
- typedPos(fun.pos)(Apply(Ident(ctx.label), noTailTransform(recv) :: transformArgs))
+ typedPos(fun.pos) {
+ val args = mapWithIndex(transformArgs)((arg, i) => mkAttributedCastHack(arg, ctx.label.info.params(i + 1).tpe))
+ Apply(Ident(ctx.label), noTailTransform(recv) :: args)
+ }
}
if (!ctx.isEligible) fail("it is neither private nor final so can be overridden")
@@ -276,7 +279,7 @@ abstract class TailCalls extends Transform {
typedPos(tree.pos)(Block(
List(ValDef(newThis, This(currentClass))),
- LabelDef(newCtx.label, newThis :: vpSyms, newRHS)
+ LabelDef(newCtx.label, newThis :: vpSyms, mkAttributedCastHack(newRHS, newCtx.label.tpe.resultType))
))
}
else {
@@ -373,6 +376,13 @@ abstract class TailCalls extends Transform {
super.transform(tree)
}
}
+
+ // Workaround for SI-6900. Uncurry installs an InfoTransformer and a tree Transformer.
+ // These leave us with conflicting view on method signatures; the parameter symbols in
+ // the MethodType can be clones of the ones originally found on the parameter ValDef, and
+ // consequently appearing in the typechecked RHS of the method.
+ private def mkAttributedCastHack(tree: Tree, tpe: Type) =
+ gen.mkAttributedCast(tree, tpe)
}
// collect the LabelDefs (generated by the pattern matcher) in a DefDef that are in tail position
diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
index 8c1d0a76d0..2f5cb23abb 100644
--- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala
+++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
@@ -632,7 +632,8 @@ abstract class UnCurry extends InfoTransform
*
* This transformation erases the dependent method types by:
* - Widening the formal parameter type to existentially abstract
- * over the prior parameters (using `packSymbols`)
+ * over the prior parameters (using `packSymbols`). This transformation
+ * is performed in the the `InfoTransform`er [[scala.reflect.internal.transform.UnCurry]].
* - Inserting casts in the method body to cast to the original,
* precise type.
*
@@ -660,15 +661,14 @@ abstract class UnCurry extends InfoTransform
*/
def erase(dd: DefDef): (List[List[ValDef]], Tree) = {
import dd.{ vparamss, rhs }
- val vparamSyms = vparamss flatMap (_ map (_.symbol))
-
val paramTransforms: List[ParamTransform] =
- vparamss.flatten.map { p =>
- val declaredType = p.symbol.info
- // existentially abstract over value parameters
- val packedType = typer.packSymbols(vparamSyms, declaredType)
- if (packedType =:= declaredType) Identity(p)
+ map2(vparamss.flatten, dd.symbol.info.paramss.flatten) { (p, infoParam) =>
+ val packedType = infoParam.info
+ if (packedType =:= p.symbol.info) Identity(p)
else {
+ // The Uncurry info transformer existentially abstracted over value parameters
+ // from the previous parameter lists.
+
// Change the type of the param symbol
p.symbol updateInfo packedType
@@ -680,8 +680,8 @@ abstract class UnCurry extends InfoTransform
// the method body to refer to this, rather than the parameter.
val tempVal: ValDef = {
val tempValName = unit freshTermName (p.name + "$")
- val newSym = dd.symbol.newTermSymbol(tempValName, p.pos, SYNTHETIC).setInfo(declaredType)
- atPos(p.pos)(ValDef(newSym, gen.mkAttributedCast(Ident(p.symbol), declaredType)))
+ val newSym = dd.symbol.newTermSymbol(tempValName, p.pos, SYNTHETIC).setInfo(p.symbol.info)
+ atPos(p.pos)(ValDef(newSym, gen.mkAttributedCast(Ident(p.symbol), p.symbol.info)))
}
Packed(newParam, tempVal)
}
@@ -699,13 +699,6 @@ abstract class UnCurry extends InfoTransform
Block(tempVals, rhsSubstituted)
}
- // update the type of the method after uncurry.
- dd.symbol updateInfo {
- val GenPolyType(tparams, tp) = dd.symbol.info
- logResult(s"erased dependent param types for ${dd.symbol.info}") {
- GenPolyType(tparams, MethodType(allParams map (_.symbol), tp.finalResultType))
- }
- }
(allParams :: Nil, rhs1)
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
index b4270ea322..e22dc73b53 100644
--- a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
@@ -256,9 +256,16 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
}
}
- // direct calls to aliases of param accessors to the superclass in order to avoid
+
+ def isAccessibleFromSuper(sym: Symbol) = {
+ val pre = SuperType(sym.owner.tpe, qual.tpe)
+ localTyper.context.isAccessible(sym, pre, superAccess = true)
+ }
+
+ // Direct calls to aliases of param accessors to the superclass in order to avoid
// duplicating fields.
- if (sym.isParamAccessor && sym.alias != NoSymbol) {
+ // ... but, only if accessible (SI-6793)
+ if (sym.isParamAccessor && sym.alias != NoSymbol && isAccessibleFromSuper(sym.alias)) {
val result = (localTyper.typedPos(tree.pos) {
Select(Super(qual, tpnme.EMPTY) setPos qual.pos, sym.alias)
}).asInstanceOf[Select]
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index 0be7192471..692c24fd20 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -3678,77 +3678,9 @@ trait Typers extends Adaptations with Tags {
})
}
- def isRawParameter(sym: Symbol) = // is it a type parameter leaked by a raw type?
- sym.isTypeParameter && sym.owner.isJavaDefined
-
- /** If we map a set of hidden symbols to their existential bounds, we
- * have a problem: the bounds may themselves contain references to the
- * hidden symbols. So this recursively calls existentialBound until
- * the typeSymbol is not amongst the symbols being hidden.
- */
- def existentialBoundsExcludingHidden(hidden: List[Symbol]): Map[Symbol, Type] = {
- def safeBound(t: Type): Type =
- if (hidden contains t.typeSymbol) safeBound(t.typeSymbol.existentialBound.bounds.hi) else t
-
- def hiBound(s: Symbol): Type = safeBound(s.existentialBound.bounds.hi) match {
- case tp @ RefinedType(parents, decls) =>
- val parents1 = parents mapConserve safeBound
- if (parents eq parents1) tp
- else copyRefinedType(tp, parents1, decls)
- case tp => tp
- }
-
- // Hanging onto lower bound in case anything interesting
- // happens with it.
- mapFrom(hidden)(s => s.existentialBound match {
- case TypeBounds(lo, hi) => TypeBounds(lo, hiBound(s))
- case _ => hiBound(s)
- })
- }
-
- /** Given a set `rawSyms` of term- and type-symbols, and a type
- * `tp`, produce a set of fresh type parameters and a type so that
- * it can be abstracted to an existential type. Every type symbol
- * `T` in `rawSyms` is mapped to a clone. Every term symbol `x` of
- * type `T` in `rawSyms` is given an associated type symbol of the
- * following form:
- *
- * type x.type <: T with Singleton
- *
- * The name of the type parameter is `x.type`, to produce nice
- * diagnostics. The Singleton parent ensures that the type
- * parameter is still seen as a stable type. Type symbols in
- * rawSyms are fully replaced by the new symbols. Term symbols are
- * also replaced, except for term symbols of an Ident tree, where
- * only the type of the Ident is changed.
- */
- protected def existentialTransform[T](rawSyms: List[Symbol], tp: Type)(creator: (List[Symbol], Type) => T): T = {
- val allBounds = existentialBoundsExcludingHidden(rawSyms)
- val typeParams: List[Symbol] = rawSyms map { sym =>
- val name = sym.name match {
- case x: TypeName => x
- case x => tpnme.singletonName(x)
- }
- val bound = allBounds(sym)
- val sowner = if (isRawParameter(sym)) context.owner else sym.owner
- val quantified = sowner.newExistential(name, sym.pos)
-
- quantified setInfo bound.cloneInfo(quantified)
- }
- // Higher-kinded existentials are not yet supported, but this is
- // tpeHK for when they are: "if a type constructor is expected/allowed,
- // tpeHK must be called instead of tpe."
- val typeParamTypes = typeParams map (_.tpeHK)
- def doSubst(info: Type) = info.subst(rawSyms, typeParamTypes)
-
- creator(typeParams map (_ modifyInfo doSubst), doSubst(tp))
- }
-
/** Compute an existential type from raw hidden symbols `syms` and type `tp`
*/
- def packSymbols(hidden: List[Symbol], tp: Type): Type =
- if (hidden.isEmpty) tp
- else existentialTransform(hidden, tp)(existentialAbstraction)
+ def packSymbols(hidden: List[Symbol], tp: Type): Type = global.packSymbols(hidden, tp, Some(context0.owner))
def isReferencedFrom(ctx: Context, sym: Symbol): Boolean =
ctx.owner.isTerm &&
diff --git a/src/compiler/scala/tools/nsc/util/TreeSet.scala b/src/compiler/scala/tools/nsc/util/TreeSet.scala
index 3cdbcc5110..d2e9238e8f 100644
--- a/src/compiler/scala/tools/nsc/util/TreeSet.scala
+++ b/src/compiler/scala/tools/nsc/util/TreeSet.scala
@@ -40,12 +40,22 @@ class TreeSet[T >: Null <: AnyRef](less: (T, T) => Boolean) extends Set[T] {
tree = add(tree)
}
- def iterator = {
- def elems(t: Tree): Iterator[T] = {
- if (t eq null) Iterator.empty
- else elems(t.l) ++ (Iterator single t.elem) ++ elems(t.r)
+ def iterator = toList.iterator
+
+ override def foreach[U](f: T => U) {
+ def loop(t: Tree) {
+ if (t ne null) {
+ loop(t.l)
+ f(t.elem)
+ loop(t.r)
+ }
}
- elems(tree)
+ loop(tree)
+ }
+ override def toList = {
+ val xs = scala.collection.mutable.ListBuffer[T]()
+ foreach(xs += _)
+ xs.toList
}
override def toString(): String = {
diff --git a/src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala b/src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala
index 281a32caf6..34bd400186 100644
--- a/src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala
+++ b/src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala
@@ -31,4 +31,81 @@ trait ExistentialsAndSkolems {
}
(new Deskolemizer).typeSkolems
}
+
+ def isRawParameter(sym: Symbol) = // is it a type parameter leaked by a raw type?
+ sym.isTypeParameter && sym.owner.isJavaDefined
+
+ /** If we map a set of hidden symbols to their existential bounds, we
+ * have a problem: the bounds may themselves contain references to the
+ * hidden symbols. So this recursively calls existentialBound until
+ * the typeSymbol is not amongst the symbols being hidden.
+ */
+ private def existentialBoundsExcludingHidden(hidden: List[Symbol]): Map[Symbol, Type] = {
+ def safeBound(t: Type): Type =
+ if (hidden contains t.typeSymbol) safeBound(t.typeSymbol.existentialBound.bounds.hi) else t
+
+ def hiBound(s: Symbol): Type = safeBound(s.existentialBound.bounds.hi) match {
+ case tp @ RefinedType(parents, decls) =>
+ val parents1 = parents mapConserve safeBound
+ if (parents eq parents1) tp
+ else copyRefinedType(tp, parents1, decls)
+ case tp => tp
+ }
+
+ // Hanging onto lower bound in case anything interesting
+ // happens with it.
+ mapFrom(hidden)(s => s.existentialBound match {
+ case TypeBounds(lo, hi) => TypeBounds(lo, hiBound(s))
+ case _ => hiBound(s)
+ })
+ }
+
+ /** Given a set `rawSyms` of term- and type-symbols, and a type
+ * `tp`, produce a set of fresh type parameters and a type so that
+ * it can be abstracted to an existential type. Every type symbol
+ * `T` in `rawSyms` is mapped to a clone. Every term symbol `x` of
+ * type `T` in `rawSyms` is given an associated type symbol of the
+ * following form:
+ *
+ * type x.type <: T with Singleton
+ *
+ * The name of the type parameter is `x.type`, to produce nice
+ * diagnostics. The Singleton parent ensures that the type
+ * parameter is still seen as a stable type. Type symbols in
+ * rawSyms are fully replaced by the new symbols. Term symbols are
+ * also replaced, except for term symbols of an Ident tree, where
+ * only the type of the Ident is changed.
+ */
+ final def existentialTransform[T](rawSyms: List[Symbol], tp: Type, rawOwner: Option[Symbol] = None)(creator: (List[Symbol], Type) => T): T = {
+ val allBounds = existentialBoundsExcludingHidden(rawSyms)
+ val typeParams: List[Symbol] = rawSyms map { sym =>
+ val name = sym.name match {
+ case x: TypeName => x
+ case x => tpnme.singletonName(x)
+ }
+ def rawOwner0 = rawOwner.getOrElse(abort(s"no owner provided for existential transform over raw parameter: $sym"))
+ val bound = allBounds(sym)
+ val sowner = if (isRawParameter(sym)) rawOwner0 else sym.owner
+ val quantified = sowner.newExistential(name, sym.pos)
+
+ quantified setInfo bound.cloneInfo(quantified)
+ }
+ // Higher-kinded existentials are not yet supported, but this is
+ // tpeHK for when they are: "if a type constructor is expected/allowed,
+ // tpeHK must be called instead of tpe."
+ val typeParamTypes = typeParams map (_.tpeHK)
+ def doSubst(info: Type) = info.subst(rawSyms, typeParamTypes)
+
+ creator(typeParams map (_ modifyInfo doSubst), doSubst(tp))
+ }
+
+ /**
+ * Compute an existential type from hidden symbols `hidden` and type `tp`.
+ * @param hidden The symbols that will be existentially abstracted
+ * @param hidden The original type
+ * @param rawOwner The owner for Java raw types.
+ */
+ final def packSymbols(hidden: List[Symbol], tp: Type, rawOwner: Option[Symbol] = None): Type =
+ if (hidden.isEmpty) tp
+ else existentialTransform(hidden, tp, rawOwner)(existentialAbstraction)
}
diff --git a/src/reflect/scala/reflect/internal/Scopes.scala b/src/reflect/scala/reflect/internal/Scopes.scala
index 850c497d4b..371eddbc4f 100644
--- a/src/reflect/scala/reflect/internal/Scopes.scala
+++ b/src/reflect/scala/reflect/internal/Scopes.scala
@@ -6,6 +6,8 @@
package scala.reflect
package internal
+import scala.annotation.tailrec
+
trait Scopes extends api.Scopes { self: SymbolTable =>
/** An ADT to represent the results of symbol name lookups.
@@ -65,6 +67,11 @@ trait Scopes extends api.Scopes { self: SymbolTable =>
/** a cache for all elements, to be used by symbol iterator.
*/
private var elemsCache: List[Symbol] = null
+ private var cachedSize = -1
+ private def flushElemsCache() {
+ elemsCache = null
+ cachedSize = -1
+ }
/** size and mask of hash tables
* todo: make hashtables grow?
@@ -86,6 +93,12 @@ trait Scopes extends api.Scopes { self: SymbolTable =>
/** the number of entries in this scope */
override def size: Int = {
+ if (cachedSize < 0)
+ cachedSize = directSize
+
+ cachedSize
+ }
+ private def directSize: Int = {
var s = 0
var e = elems
while (e ne null) {
@@ -98,7 +111,7 @@ trait Scopes extends api.Scopes { self: SymbolTable =>
/** enter a scope entry
*/
protected def enterEntry(e: ScopeEntry) {
- elemsCache = null
+ flushElemsCache()
if (hashtable ne null)
enterInHash(e)
else if (size >= MIN_HASH)
@@ -192,7 +205,7 @@ trait Scopes extends api.Scopes { self: SymbolTable =>
e1.tail = e.tail
}
}
- elemsCache = null
+ flushElemsCache()
}
/** remove symbol */
@@ -304,16 +317,43 @@ trait Scopes extends api.Scopes { self: SymbolTable =>
e
}
+ /** TODO - we can test this more efficiently than checking isSubScope
+ * in both directions. However the size test might be enough to quickly
+ * rule out most failures.
+ */
+ def isSameScope(other: Scope) = (
+ (size == other.size) // optimization - size is cached
+ && (this isSubScope other)
+ && (other isSubScope this)
+ )
+
+ def isSubScope(other: Scope) = {
+ def scopeContainsSym(sym: Symbol): Boolean = {
+ @tailrec def entryContainsSym(e: ScopeEntry): Boolean = e match {
+ case null => false
+ case _ =>
+ val comparableInfo = sym.info.substThis(sym.owner, e.sym.owner)
+ (e.sym.info =:= comparableInfo) || entryContainsSym(lookupNextEntry(e))
+ }
+ entryContainsSym(this lookupEntry sym.name)
+ }
+ other.toList forall scopeContainsSym
+ }
+
/** Return all symbols as a list in the order they were entered in this scope.
*/
override def toList: List[Symbol] = {
if (elemsCache eq null) {
- elemsCache = Nil
+ var symbols: List[Symbol] = Nil
+ var count = 0
var e = elems
while ((e ne null) && e.owner == this) {
- elemsCache = e.sym :: elemsCache
+ count += 1
+ symbols ::= e.sym
e = e.next
}
+ elemsCache = symbols
+ cachedSize = count
}
elemsCache
}
diff --git a/src/reflect/scala/reflect/internal/StdNames.scala b/src/reflect/scala/reflect/internal/StdNames.scala
index 4fd86aa8b1..ae2cf09c2e 100644
--- a/src/reflect/scala/reflect/internal/StdNames.scala
+++ b/src/reflect/scala/reflect/internal/StdNames.scala
@@ -359,8 +359,8 @@ trait StdNames {
* be sure to retain the extra dollars.
*/
def unexpandedName(name: Name): Name = name lastIndexOf "$$" match {
- case -1 => name
- case idx0 =>
+ case 0 | -1 => name
+ case idx0 =>
// Sketchville - We've found $$ but if it's part of $$$ or $$$$
// or something we need to keep the bonus dollars, so e.g. foo$$$outer
// has an original name of $outer.
diff --git a/src/reflect/scala/reflect/internal/Trees.scala b/src/reflect/scala/reflect/internal/Trees.scala
index 410bc738e2..7467ccc6b9 100644
--- a/src/reflect/scala/reflect/internal/Trees.scala
+++ b/src/reflect/scala/reflect/internal/Trees.scala
@@ -1491,6 +1491,11 @@ trait Trees extends api.Trees { self: SymbolTable =>
/** Substitute symbols in `from` with symbols in `to`. Returns a new
* tree using the new symbols and whose Ident and Select nodes are
* name-consistent with the new symbols.
+ *
+ * Note: This is currently a destructive operation on the original Tree.
+ * Trees currently assigned a symbol in `from` will be assigned the new symbols
+ * without copying, and trees that define symbols with an `info` that refer
+ * a symbol in `from` will have a new type assigned.
*/
class TreeSymSubstituter(from: List[Symbol], to: List[Symbol]) extends Transformer {
val symSubst = new SubstSymMap(from, to)
diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala
index 4b15d6ed78..25b05ae6b3 100644
--- a/src/reflect/scala/reflect/internal/Types.scala
+++ b/src/reflect/scala/reflect/internal/Types.scala
@@ -3884,9 +3884,14 @@ trait Types
}
}
- def normalizePlus(tp: Type) =
+ def normalizePlus(tp: Type) = (
if (isRawType(tp)) rawToExistential(tp)
- else tp.normalize
+ else tp.normalize match {
+ // Unify the two representations of module classes
+ case st @ SingleType(_, sym) if sym.isModule => st.underlying.normalize
+ case _ => tp.normalize
+ }
+ )
/*
todo: change to:
diff --git a/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala b/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala
index d408857cf3..da8e64ea16 100644
--- a/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala
+++ b/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala
@@ -57,9 +57,12 @@ trait TypeComparers {
} else
false
- private def equalSymsAndPrefixes(sym1: Symbol, pre1: Type, sym2: Symbol, pre2: Type): Boolean =
- if (sym1 == sym2) sym1.hasPackageFlag || sym1.owner.hasPackageFlag || phase.erasedTypes || pre1 =:= pre2
- else (sym1.name == sym2.name) && isUnifiable(pre1, pre2)
+ private def equalSymsAndPrefixes(sym1: Symbol, pre1: Type, sym2: Symbol, pre2: Type): Boolean = (
+ if (sym1 == sym2)
+ sym1.hasPackageFlag || sym1.owner.hasPackageFlag || phase.erasedTypes || pre1 =:= pre2
+ else
+ (sym1.name == sym2.name) && isUnifiable(pre1, pre2)
+ )
def isDifferentType(tp1: Type, tp2: Type): Boolean = try {
@@ -126,7 +129,13 @@ trait TypeComparers {
tp2.typeSymbol.isPackageClass
else if (tp2 eq NoPrefix) // !! I do not see how this would be warranted by the spec
tp1.typeSymbol.isPackageClass
+ else if (tp1.isInstanceOf[AnnotatedType] || tp2.isInstanceOf[AnnotatedType])
+ annotationsConform(tp1, tp2) && annotationsConform(tp2, tp1) && (tp1.withoutAnnotations =:= tp2.withoutAnnotations)
else {
+ // We flush out any AnnotatedTypes before calling isSameType2 because
+ // unlike most other subclasses of Type, we have to allow for equivalence of any
+ // combination of { tp1, tp2 } { is, is not } an AnnotatedType - this because the
+ // logic of "annotationsConform" is arbitrary and unknown.
isSameType2(tp1, tp2) || {
val tp1n = normalizePlus(tp1)
val tp2n = normalizePlus(tp2)
@@ -135,165 +144,99 @@ trait TypeComparers {
}
}
- def isSameType2(tp1: Type, tp2: Type): Boolean = {
- tp1 match {
- case tr1: TypeRef =>
- tp2 match {
- case tr2: TypeRef =>
- return (equalSymsAndPrefixes(tr1.sym, tr1.pre, tr2.sym, tr2.pre) &&
- ((tp1.isHigherKinded && tp2.isHigherKinded && tp1.normalize =:= tp2.normalize) ||
- isSameTypes(tr1.args, tr2.args))) ||
- ((tr1.pre, tr2.pre) match {
- case (tv @ TypeVar(_,_), _) => tv.registerTypeSelection(tr1.sym, tr2)
- case (_, tv @ TypeVar(_,_)) => tv.registerTypeSelection(tr2.sym, tr1)
- case _ => false
- })
- case _: SingleType =>
- return isSameType2(tp2, tp1) // put singleton type on the left, caught below
- case _ =>
- }
- case tt1: ThisType =>
- tp2 match {
- case tt2: ThisType =>
- if (tt1.sym == tt2.sym) return true
- case _ =>
- }
- case st1: SingleType =>
- tp2 match {
- case st2: SingleType =>
- if (equalSymsAndPrefixes(st1.sym, st1.pre, st2.sym, st2.pre)) return true
- case TypeRef(pre2, sym2, Nil) =>
- if (sym2.isModuleClass && equalSymsAndPrefixes(st1.sym, st1.pre, sym2.sourceModule, pre2)) return true
- case _ =>
- }
- case ct1: ConstantType =>
- tp2 match {
- case ct2: ConstantType =>
- return (ct1.value == ct2.value)
- case _ =>
- }
- case rt1: RefinedType =>
- tp2 match {
- case rt2: RefinedType => //
- def isSubScope(s1: Scope, s2: Scope): Boolean = s2.toList.forall {
- sym2 =>
- var e1 = s1.lookupEntry(sym2.name)
- (e1 ne null) && {
- val substSym = sym2.info.substThis(sym2.owner, e1.sym.owner)
- var isEqual = false
- while (!isEqual && (e1 ne null)) {
- isEqual = e1.sym.info =:= substSym
- e1 = s1.lookupNextEntry(e1)
- }
- isEqual
- }
- }
- //Console.println("is same? " + tp1 + " " + tp2 + " " + tp1.typeSymbol.owner + " " + tp2.typeSymbol.owner)//DEBUG
- return isSameTypes(rt1.parents, rt2.parents) && {
- val decls1 = rt1.decls
- val decls2 = rt2.decls
- isSubScope(decls1, decls2) && isSubScope(decls2, decls1)
- }
- case _ =>
- }
- case mt1: MethodType =>
- tp2 match {
- case mt2: MethodType =>
- return isSameTypes(mt1.paramTypes, mt2.paramTypes) &&
- mt1.resultType =:= mt2.resultType.substSym(mt2.params, mt1.params) &&
- mt1.isImplicit == mt2.isImplicit
- // note: no case NullaryMethodType(restpe) => return mt1.params.isEmpty && mt1.resultType =:= restpe
- case _ =>
- }
- case NullaryMethodType(restpe1) =>
- tp2 match {
- // note: no case mt2: MethodType => return mt2.params.isEmpty && restpe =:= mt2.resultType
- case NullaryMethodType(restpe2) =>
- return restpe1 =:= restpe2
- case _ =>
- }
- case PolyType(tparams1, res1) =>
- tp2 match {
- case PolyType(tparams2, res2) =>
- // assert((tparams1 map (_.typeParams.length)) == (tparams2 map (_.typeParams.length)))
- // @M looks like it might suffer from same problem as #2210
- return (
- (sameLength(tparams1, tparams2)) && // corresponds does not check length of two sequences before checking the predicate
- (tparams1 corresponds tparams2)(_.info =:= _.info.substSym(tparams2, tparams1)) &&
- res1 =:= res2.substSym(tparams2, tparams1)
- )
- case _ =>
- }
- case ExistentialType(tparams1, res1) =>
- tp2 match {
- case ExistentialType(tparams2, res2) =>
- // @M looks like it might suffer from same problem as #2210
- return (
- // corresponds does not check length of two sequences before checking the predicate -- faster & needed to avoid crasher in #2956
- sameLength(tparams1, tparams2) &&
- (tparams1 corresponds tparams2)(_.info =:= _.info.substSym(tparams2, tparams1)) &&
- res1 =:= res2.substSym(tparams2, tparams1)
- )
- case _ =>
- }
- case TypeBounds(lo1, hi1) =>
- tp2 match {
- case TypeBounds(lo2, hi2) =>
- return lo1 =:= lo2 && hi1 =:= hi2
- case _ =>
- }
- case BoundedWildcardType(bounds) =>
- return bounds containsType tp2
- case _ =>
- }
- tp2 match {
- case BoundedWildcardType(bounds) =>
- return bounds containsType tp1
- case _ =>
- }
- tp1 match {
- case tv @ TypeVar(_,_) =>
- return tv.registerTypeEquality(tp2, typeVarLHS = true)
- case _ =>
- }
- tp2 match {
- case tv @ TypeVar(_,_) =>
- return tv.registerTypeEquality(tp1, typeVarLHS = false)
- case _ =>
+ private def isSameHKTypes(tp1: Type, tp2: Type) = (
+ tp1.isHigherKinded
+ && tp2.isHigherKinded
+ && (tp1.normalize =:= tp2.normalize)
+ )
+ private def isSameTypeRef(tr1: TypeRef, tr2: TypeRef) = (
+ equalSymsAndPrefixes(tr1.sym, tr1.pre, tr2.sym, tr2.pre)
+ && (isSameHKTypes(tr1, tr2) || isSameTypes(tr1.args, tr2.args))
+ )
+
+ private def isSameSingletonType(tp1: SingletonType, tp2: SingletonType): Boolean = {
+ // We don't use dealiasWiden here because we are looking for the SAME type,
+ // and widening leads to a less specific type. The logic is along the lines of
+ // dealiasAndFollowUnderlyingAsLongAsTheTypeIsEquivalent. This method is only
+ // called after a surface comparison has failed, so if chaseDealiasedUnderlying
+ // does not produce a type other than tp1 and tp2, return false.
+ @tailrec def chaseDealiasedUnderlying(tp: Type): Type = tp.underlying.dealias match {
+ case next: SingletonType if tp ne next => chaseDealiasedUnderlying(next)
+ case _ => tp
}
- tp1 match {
- case _: AnnotatedType =>
- return annotationsConform(tp1, tp2) && annotationsConform(tp2, tp1) && tp1.withoutAnnotations =:= tp2.withoutAnnotations
- case _ =>
+ val origin1 = chaseDealiasedUnderlying(tp1)
+ val origin2 = chaseDealiasedUnderlying(tp2)
+ ((origin1 ne tp1) || (origin2 ne tp2)) && (origin1 =:= origin2)
+ }
+
+ private def isSameMethodType(mt1: MethodType, mt2: MethodType) = (
+ isSameTypes(mt1.paramTypes, mt2.paramTypes)
+ && (mt1.resultType =:= mt2.resultType.substSym(mt2.params, mt1.params))
+ && (mt1.isImplicit == mt2.isImplicit)
+ )
+
+ private def equalTypeParamsAndResult(tparams1: List[Symbol], res1: Type, tparams2: List[Symbol], res2: Type) = {
+ def subst(info: Type) = info.substSym(tparams2, tparams1)
+ // corresponds does not check length of two sequences before checking the predicate,
+ // but SubstMap assumes it has been checked (SI-2956)
+ ( sameLength(tparams1, tparams2)
+ && (tparams1 corresponds tparams2)((p1, p2) => p1.info =:= subst(p2.info))
+ && (res1 =:= subst(res2))
+ )
+ }
+
+ def isSameType2(tp1: Type, tp2: Type): Boolean = {
+ /** Here we highlight those unfortunate type-like constructs which
+ * are hidden bundles of mutable state, cruising the type system picking
+ * up any type constraints naive enough to get into their hot rods.
+ */
+ def mutateNonTypeConstructs(lhs: Type, rhs: Type) = lhs match {
+ case BoundedWildcardType(bounds) => bounds containsType rhs
+ case tv @ TypeVar(_, _) => tv.registerTypeEquality(rhs, typeVarLHS = lhs eq tp1)
+ case TypeRef(tv @ TypeVar(_, _), sym, _) => tv.registerTypeSelection(sym, rhs)
+ case _ => false
}
- tp2 match {
- case _: AnnotatedType =>
- return annotationsConform(tp1, tp2) && annotationsConform(tp2, tp1) && tp1.withoutAnnotations =:= tp2.withoutAnnotations
- case _ =>
+ /* SingletonType receives this additional scrutiny because there are
+ * a variety of Types which must be treated as equivalent even if they
+ * arrive in different guises. For instance, object Foo in the following
+ * might appear in (at least) the four given below.
+ *
+ * package pkg { object Foo ; type Bar = Foo.type }
+ *
+ * ModuleClassTypeRef(pkg.type, Foo: ModuleClassSymbol, Nil)
+ * ThisType(Foo: ModuleClassSymbol)
+ * SingleType(pkg.type, Foo: ModuleSymbol)
+ * AliasTypeRef(NoPrefix, sym: AliasSymbol, Nil) where sym.info is one of the above
+ */
+ def sameSingletonType = tp1 match {
+ case tp1: SingletonType => tp2 match {
+ case tp2: SingletonType => isSameSingletonType(tp1, tp2)
+ case _ => false
+ }
+ case _ => false
}
- tp1 match {
- case _: SingletonType =>
- tp2 match {
- case _: SingletonType =>
- def chaseDealiasedUnderlying(tp: Type): Type = {
- var origin = tp
- var next = origin.underlying.dealias
- while (next.isInstanceOf[SingletonType]) {
- assert(origin ne next, origin)
- origin = next
- next = origin.underlying.dealias
- }
- origin
- }
- val origin1 = chaseDealiasedUnderlying(tp1)
- val origin2 = chaseDealiasedUnderlying(tp2)
- ((origin1 ne tp1) || (origin2 ne tp2)) && (origin1 =:= origin2)
- case _ =>
- false
- }
- case _ =>
- false
+ /** Those false cases certainly are ugly. There's a proposed SIP to deuglify it.
+ * https://docs.google.com/a/improving.org/document/d/1onPrzSqyDpHScc9PS_hpxJwa3FlPtthxw-bAuuEe8uA
+ */
+ def sameTypeAndSameCaseClass = tp1 match {
+ case tp1: TypeRef => tp2 match { case tp2: TypeRef => isSameTypeRef(tp1, tp2) ; case _ => false }
+ case tp1: MethodType => tp2 match { case tp2: MethodType => isSameMethodType(tp1, tp2) ; case _ => false }
+ case RefinedType(ps1, decls1) => tp2 match { case RefinedType(ps2, decls2) => isSameTypes(ps1, ps2) && (decls1 isSameScope decls2) ; case _ => false }
+ case SingleType(pre1, sym1) => tp2 match { case SingleType(pre2, sym2) => equalSymsAndPrefixes(sym1, pre1, sym2, pre2) ; case _ => false }
+ case PolyType(ps1, res1) => tp2 match { case PolyType(ps2, res2) => equalTypeParamsAndResult(ps1, res1, ps2, res2) ; case _ => false }
+ case ExistentialType(qs1, res1) => tp2 match { case ExistentialType(qs2, res2) => equalTypeParamsAndResult(qs1, res1, qs2, res2) ; case _ => false }
+ case ThisType(sym1) => tp2 match { case ThisType(sym2) => sym1 == sym2 ; case _ => false }
+ case ConstantType(c1) => tp2 match { case ConstantType(c2) => c1 == c2 ; case _ => false }
+ case NullaryMethodType(res1) => tp2 match { case NullaryMethodType(res2) => res1 =:= res2 ; case _ => false }
+ case TypeBounds(lo1, hi1) => tp2 match { case TypeBounds(lo2, hi2) => lo1 =:= lo2 && hi1 =:= hi2 ; case _ => false }
+ case _ => false
}
+
+ ( sameTypeAndSameCaseClass
+ || sameSingletonType
+ || mutateNonTypeConstructs(tp1, tp2)
+ || mutateNonTypeConstructs(tp2, tp1)
+ )
}
def isSubType(tp1: Type, tp2: Type): Boolean = isSubType(tp1, tp2, AnyDepth)
diff --git a/src/reflect/scala/reflect/internal/transform/UnCurry.scala b/src/reflect/scala/reflect/internal/transform/UnCurry.scala
index 32d3171b26..1f7638a621 100644
--- a/src/reflect/scala/reflect/internal/transform/UnCurry.scala
+++ b/src/reflect/scala/reflect/internal/transform/UnCurry.scala
@@ -25,7 +25,14 @@ trait UnCurry {
val tp = expandAlias(tp0)
tp match {
case MethodType(params, MethodType(params1, restpe)) =>
- apply(MethodType(params ::: params1, restpe))
+ // This transformation is described in UnCurryTransformer.dependentParamTypeErasure
+ val packSymbolsMap = new TypeMap {
+ // Wrapping in a TypeMap to reuse the code that opts for a fast path if the function is an identity.
+ def apply(tp: Type): Type = packSymbols(params, tp)
+ }
+ val existentiallyAbstractedParam1s = packSymbolsMap.mapOver(params1)
+ val substitutedResult = restpe.substSym(params1, existentiallyAbstractedParam1s)
+ apply(MethodType(params ::: existentiallyAbstractedParam1s, substitutedResult))
case MethodType(params, ExistentialType(tparams, restpe @ MethodType(_, _))) =>
abort("unexpected curried method types with intervening existential")
case MethodType(h :: t, restpe) if h.isImplicit =>