summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorPaul Phillips <paulp@improving.org>2013-01-29 13:16:23 -0800
committerPaul Phillips <paulp@improving.org>2013-01-29 13:24:40 -0800
commit039b1cb1a5b8738bb3731035838d2fcaeb317d07 (patch)
treedc623fd163c0789431b4bf2ec0f8551c88f58acb /src
parent0388a7cdb111f0dd6b86bc838ffe51de3df28b4c (diff)
downloadscala-039b1cb1a5b8738bb3731035838d2fcaeb317d07.tar.gz
scala-039b1cb1a5b8738bb3731035838d2fcaeb317d07.tar.bz2
scala-039b1cb1a5b8738bb3731035838d2fcaeb317d07.zip
Changes many calls to normalize to dealiasWiden.
Calling normalize is very aggressive and is usually the wrong thing. It is one of the leading contributors to non-determinism in compiler outcomes (often of the form "I gave a debugging or logging compiler option and it started/stopped working") and should be used only in very specific circumstances. Almost without exception, dealiasWiden is what you want; not widen, not normalize. If possible I will remove normalize from Type entirely, making it private to those areas of the compiler which actually require it.
Diffstat (limited to 'src')
-rw-r--r--src/compiler/scala/reflect/reify/package.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala4
-rw-r--r--src/compiler/scala/tools/nsc/dependencies/Changes.scala2
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/CompletionOutput.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/Erasure.scala10
-rw-r--r--src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Implicits.scala6
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/RefChecks.scala12
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala14
-rw-r--r--src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala2
-rw-r--r--src/reflect/scala/reflect/internal/Definitions.scala10
-rw-r--r--src/reflect/scala/reflect/internal/Symbols.scala2
-rw-r--r--src/reflect/scala/reflect/internal/TreeGen.scala2
-rw-r--r--src/reflect/scala/reflect/internal/Types.scala2
-rw-r--r--src/reflect/scala/reflect/internal/transform/Erasure.scala6
-rw-r--r--src/reflect/scala/reflect/internal/transform/UnCurry.scala10
19 files changed, 51 insertions, 43 deletions
diff --git a/src/compiler/scala/reflect/reify/package.scala b/src/compiler/scala/reflect/reify/package.scala
index 7be57c0cb7..78f85c2634 100644
--- a/src/compiler/scala/reflect/reify/package.scala
+++ b/src/compiler/scala/reflect/reify/package.scala
@@ -56,7 +56,7 @@ package object reify {
if (concrete) throw new ReificationException(enclosingMacroPosition, "tpe %s is an unresolved spliceable type".format(tpe))
}
- tpe.normalize match {
+ tpe.dealiasWiden match {
case TypeRef(_, ArrayClass, componentTpe :: Nil) =>
val componentErasure = reifyRuntimeClass(global)(typer0, componentTpe, concrete)
gen.mkMethodCall(arrayClassMethod, List(componentErasure))
diff --git a/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala b/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala
index 84f5fe2678..a32b00f385 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala
@@ -368,10 +368,10 @@ trait TypeKinds { self: ICodes =>
/** Return the TypeKind of the given type
*
- * Call to .normalize fixes #3003 (follow type aliases). Otherwise,
+ * Call to dealiasWiden fixes #3003 (follow type aliases). Otherwise,
* arrayOrClassType below would return ObjectReference.
*/
- def toTypeKind(t: Type): TypeKind = t.normalize match {
+ def toTypeKind(t: Type): TypeKind = t.dealiasWiden match {
case ThisType(ArrayClass) => ObjectReference
case ThisType(sym) => REFERENCE(sym)
case SingleType(_, sym) => primitiveOrRefType(sym)
diff --git a/src/compiler/scala/tools/nsc/dependencies/Changes.scala b/src/compiler/scala/tools/nsc/dependencies/Changes.scala
index b3cacee20a..7807f0ba03 100644
--- a/src/compiler/scala/tools/nsc/dependencies/Changes.scala
+++ b/src/compiler/scala/tools/nsc/dependencies/Changes.scala
@@ -90,11 +90,11 @@ abstract class Changes {
} else
!sym1.isTypeParameter || !changedTypeParams.contains(sym1.fullName)
+ // @M! normalize reduces higher-kinded case to PolyType's
testSymbols && sameType(pre1, pre2) &&
(sym1.variance == sym2.variance) &&
((tp1.isHigherKinded && tp2.isHigherKinded && tp1.normalize =:= tp2.normalize) ||
sameTypes(args1, args2))
- // @M! normalize reduces higher-kinded case to PolyType's
case (RefinedType(parents1, ref1), RefinedType(parents2, ref2)) =>
def isSubScope(s1: Scope, s2: Scope): Boolean = s2.toList.forall {
diff --git a/src/compiler/scala/tools/nsc/interpreter/CompletionOutput.scala b/src/compiler/scala/tools/nsc/interpreter/CompletionOutput.scala
index c5bb8494ce..d24ad60974 100644
--- a/src/compiler/scala/tools/nsc/interpreter/CompletionOutput.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/CompletionOutput.scala
@@ -75,7 +75,7 @@ trait CompletionOutput {
}
def methodString() =
- method.keyString + " " + method.nameString + (method.info.normalize match {
+ method.keyString + " " + method.nameString + (method.info.dealiasWiden match {
case NullaryMethodType(resType) => ": " + typeToString(resType)
case PolyType(tparams, resType) => tparamsString(tparams) + typeToString(resType)
case mt @ MethodType(_, _) => methodTypeToString(mt)
diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala
index 0d50282000..f380b9d04f 100644
--- a/src/compiler/scala/tools/nsc/transform/Erasure.scala
+++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala
@@ -101,7 +101,7 @@ abstract class Erasure extends AddInterfaces
* unboxing some primitive types and further simplifications as they are done in jsig.
*/
val prepareSigMap = new TypeMap {
- def squashBoxed(tp: Type): Type = tp.normalize match {
+ def squashBoxed(tp: Type): Type = tp.dealiasWiden match {
case t @ RefinedType(parents, decls) =>
val parents1 = parents mapConserve squashBoxed
if (parents1 eq parents) tp
@@ -114,7 +114,7 @@ abstract class Erasure extends AddInterfaces
if (boxedClass contains t.typeSymbol) ObjectClass.tpe
else tp
}
- def apply(tp: Type): Type = tp.normalize match {
+ def apply(tp: Type): Type = tp.dealiasWiden match {
case tp1 @ TypeBounds(lo, hi) =>
val lo1 = squashBoxed(apply(lo))
val hi1 = squashBoxed(apply(hi))
@@ -145,7 +145,7 @@ abstract class Erasure extends AddInterfaces
}
case tp1 @ MethodType(params, restpe) =>
val params1 = mapOver(params)
- val restpe1 = if (restpe.normalize.typeSymbol == UnitClass) UnitClass.tpe else apply(restpe)
+ val restpe1 = if (restpe.typeSymbol == UnitClass) UnitClass.tpe else apply(restpe)
if ((params1 eq params) && (restpe1 eq restpe)) tp1
else MethodType(params1, restpe1)
case tp1 @ RefinedType(parents, decls) =>
@@ -163,8 +163,8 @@ abstract class Erasure extends AddInterfaces
}
}
- private def hiBounds(bounds: TypeBounds): List[Type] = bounds.hi.normalize match {
- case RefinedType(parents, _) => parents map (_.normalize)
+ private def hiBounds(bounds: TypeBounds): List[Type] = bounds.hi.dealiasWiden match {
+ case RefinedType(parents, _) => parents map (_.dealiasWiden)
case tp => tp :: Nil
}
diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
index 343f95782e..0cd7f516ef 100644
--- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
+++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
@@ -424,7 +424,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
def specializedTypeVars(tpe: Type): immutable.Set[Symbol] = tpe match {
case TypeRef(pre, sym, args) =>
if (sym.isAliasType)
- specializedTypeVars(tpe.normalize)
+ specializedTypeVars(tpe.dealiasWiden)
else if (sym.isTypeParameter && sym.isSpecialized || (sym.isTypeSkolem && sym.deSkolemize.isSpecialized))
Set(sym)
else if (sym == ArrayClass)
diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
index 7bbbcdf541..4e4513dcef 100644
--- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
@@ -153,7 +153,7 @@ trait ContextErrors {
// members present, then display along with the expected members. This is done here because
// this is the last point where we still have access to the original tree, rather than just
// the found/req types.
- val foundType: Type = req.normalize match {
+ val foundType: Type = req.dealiasWiden match {
case RefinedType(parents, decls) if !decls.isEmpty && found.typeSymbol.isAnonOrRefinementClass =>
val retyped = typed (tree.duplicate.clearType())
val foundDecls = retyped.tpe.decls filter (sym => !sym.isConstructor && !sym.isSynthetic)
diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
index bcf9910c5a..d32930f4f2 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
@@ -347,7 +347,7 @@ trait Implicits {
* if one or both are intersection types with a pair of overlapping parent types.
*/
private def dominates(dtor: Type, dted: Type): Boolean = {
- def core(tp: Type): Type = tp.normalize match {
+ def core(tp: Type): Type = tp.dealiasWiden match {
case RefinedType(parents, defs) => intersectionType(parents map core, tp.typeSymbol.owner)
case AnnotatedType(annots, tp, selfsym) => core(tp)
case ExistentialType(tparams, result) => core(result).subst(tparams, tparams map (t => core(t.info.bounds.hi)))
@@ -362,11 +362,11 @@ trait Implicits {
deriveTypeWithWildcards(syms.distinct)(tp)
}
def sum(xs: List[Int]) = (0 /: xs)(_ + _)
- def complexity(tp: Type): Int = tp.normalize match {
+ def complexity(tp: Type): Int = tp.dealiasWiden match {
case NoPrefix =>
0
case SingleType(pre, sym) =>
- if (sym.isPackage) 0 else complexity(tp.normalize.widen)
+ if (sym.isPackage) 0 else complexity(tp.dealiasWiden)
case TypeRef(pre, sym, args) =>
complexity(pre) + sum(args map complexity) + 1
case RefinedType(parents, _) =>
diff --git a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala
index 27bdad3066..6d5eff460f 100644
--- a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala
@@ -477,7 +477,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
**/
// must treat Typed and Bind together -- we need to know the patBinder of the Bind pattern to get at the actual type
case MaybeBoundTyped(subPatBinder, pt) =>
- val next = glb(List(patBinder.info.widen, pt)).normalize
+ val next = glb(List(patBinder.info.dealiasWiden, pt)).normalize
// a typed pattern never has any subtrees
noFurtherSubPats(TypeTestTreeMaker(subPatBinder, patBinder, pt, next)(pos))
diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
index fd3b020b1a..285e1cb7af 100644
--- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
@@ -68,7 +68,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
if (sym.hasAccessBoundary) "" + sym.privateWithin.name else ""
)
- def overridesTypeInPrefix(tp1: Type, tp2: Type, prefix: Type): Boolean = (tp1.normalize, tp2.normalize) match {
+ def overridesTypeInPrefix(tp1: Type, tp2: Type, prefix: Type): Boolean = (tp1.dealiasWiden, tp2.dealiasWiden) match {
case (MethodType(List(), rtp1), NullaryMethodType(rtp2)) =>
rtp1 <:< rtp2
case (NullaryMethodType(rtp1), MethodType(List(), rtp2)) =>
@@ -472,12 +472,12 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
// check a type alias's RHS corresponds to its declaration
// this overlaps somewhat with validateVariance
if(member.isAliasType) {
- // println("checkKindBounds" + ((List(member), List(memberTp.normalize), self, member.owner)))
- val kindErrors = typer.infer.checkKindBounds(List(member), List(memberTp.normalize), self, member.owner)
+ // println("checkKindBounds" + ((List(member), List(memberTp.dealiasWiden), self, member.owner)))
+ val kindErrors = typer.infer.checkKindBounds(List(member), List(memberTp.dealiasWiden), self, member.owner)
if(!kindErrors.isEmpty)
unit.error(member.pos,
- "The kind of the right-hand side "+memberTp.normalize+" of "+member.keyString+" "+
+ "The kind of the right-hand side "+memberTp.dealiasWiden+" of "+member.keyString+" "+
member.varianceString + member.nameString+ " does not conform to its expected kind."+
kindErrors.toList.mkString("\n", ", ", ""))
} else if (member.isAbstractType) {
@@ -496,7 +496,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
if (member.isStable && !otherTp.isVolatile) {
if (memberTp.isVolatile)
overrideError("has a volatile type; cannot override a member with non-volatile type")
- else memberTp.normalize.resultType match {
+ else memberTp.dealiasWiden.resultType match {
case rt: RefinedType if !(rt =:= otherTp) && !(checkedCombinations contains rt.parents) =>
// might mask some inconsistencies -- check overrides
checkedCombinations += rt.parents
@@ -1298,7 +1298,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
// if the unnormalized type is accessible, that's good enough
if (inaccessible.isEmpty) ()
// or if the normalized type is, that's good too
- else if ((tpe ne tpe.normalize) && lessAccessibleSymsInType(tpe.normalize, member).isEmpty) ()
+ else if ((tpe ne tpe.normalize) && lessAccessibleSymsInType(tpe.dealiasWiden, member).isEmpty) ()
// otherwise warn about the inaccessible syms in the unnormalized type
else inaccessible foreach (sym => warnLessAccessible(sym, member))
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
index ab1751b4f0..af484a47e2 100644
--- a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
@@ -213,7 +213,7 @@ trait TypeDiagnostics {
// force measures than comparing normalized Strings were producing error messages
// like "and java.util.ArrayList[String] <: java.util.ArrayList[String]" but there
// should be a cleaner way to do this.
- if (found.normalize.toString == tp.normalize.toString) ""
+ if (found.dealiasWiden.toString == tp.dealiasWiden.toString) ""
else " (and %s <: %s)".format(found, tp)
)
val explainDef = {
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index 8fbc143fbf..51a31f6fc7 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -2437,7 +2437,7 @@ trait Typers extends Adaptations with Tags {
// but not in real life (i.e., now that's we've reset the method's type skolems'
// infos back to their pre-GADT-constraint state)
if (isFullyDefined(pt) && !(body1.tpe <:< pt))
- body1 = typedPos(body1.pos)(gen.mkCast(body1, pt.normalize))
+ body1 = typedPos(body1.pos)(gen.mkCast(body1, pt.dealiasWiden))
}
@@ -2507,7 +2507,7 @@ trait Typers extends Adaptations with Tags {
*/
def synthesizePartialFunction(paramName: TermName, paramPos: Position, tree: Tree, mode: Mode, pt: Type): Tree = {
assert(pt.typeSymbol == PartialFunctionClass, s"PartialFunction synthesis for match in $tree requires PartialFunction expected type, but got $pt.")
- val targs = pt.normalize.typeArgs
+ val targs = pt.dealiasWiden.typeArgs
// if targs.head isn't fully defined, we can translate --> error
targs match {
@@ -2665,10 +2665,10 @@ trait Typers extends Adaptations with Tags {
def decompose(pt: Type): (Symbol, List[Type], Type) =
if ((isFunctionType(pt) || (pt.typeSymbol == PartialFunctionClass && numVparams == 1 && fun.body.isInstanceOf[Match])) && // see bug901 for a reason why next conditions are needed
- ( pt.normalize.typeArgs.length - 1 == numVparams
+ ( pt.dealiasWiden.typeArgs.length - 1 == numVparams
|| fun.vparams.exists(_.tpt.isEmpty)
))
- (pt.typeSymbol, pt.normalize.typeArgs.init, pt.normalize.typeArgs.last)
+ (pt.typeSymbol, pt.dealiasWiden.typeArgs.init, pt.dealiasWiden.typeArgs.last)
else
(FunctionClass(numVparams), fun.vparams map (x => NoType), WildcardType)
@@ -3316,7 +3316,7 @@ trait Typers extends Adaptations with Tags {
if (fun1.tpe.isErroneous) duplErrTree
else {
- val resTp = fun1.tpe.finalResultType.normalize
+ val resTp = fun1.tpe.finalResultType.dealiasWiden
val nbSubPats = args.length
val (formals, formalsExpanded) = extractorFormalTypes(fun0.pos, resTp, nbSubPats, fun1.symbol)
@@ -3364,7 +3364,7 @@ trait Typers extends Adaptations with Tags {
def extractorForUncheckedType(pos: Position, pt: Type): Option[Tree] = if (isPastTyper) None else {
// only look at top-level type, can't (reliably) do anything about unchecked type args (in general)
// but at least make a proper type before passing it elsewhere
- val pt1 = pt.dealias match {
+ val pt1 = pt.dealiasWiden match {
case tr @ TypeRef(pre, sym, args) if args.nonEmpty => copyTypeRef(tr, pre, sym, sym.typeParams map (_.tpeHK)) // replace actual type args with dummies
case pt1 => pt1
}
@@ -4209,7 +4209,7 @@ trait Typers extends Adaptations with Tags {
if (newPatternMatching && (pt.typeSymbol == PartialFunctionClass))
synthesizePartialFunction(newTermName(context.unit.fresh.newName("x")), tree.pos, tree, mode, pt)
else {
- val arity = if (isFunctionType(pt)) pt.normalize.typeArgs.length - 1 else 1
+ val arity = if (isFunctionType(pt)) pt.dealiasWiden.typeArgs.length - 1 else 1
val params = for (i <- List.range(0, arity)) yield
atPos(tree.pos.focusStart) {
ValDef(Modifiers(PARAM | SYNTHETIC),
diff --git a/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala b/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala
index 4924e056af..29480576ea 100644
--- a/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala
+++ b/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala
@@ -61,7 +61,7 @@ trait CPSUtils {
// annotation checker
protected def annTypes(ann: AnnotationInfo): (Type, Type) = {
- val tp0 :: tp1 :: Nil = ann.atp.normalize.typeArgs
+ val tp0 :: tp1 :: Nil = ann.atp.dealiasWiden.typeArgs
((tp0, tp1))
}
protected def hasMinusMarker(tpe: Type) = tpe hasAnnotation MarkerCPSAdaptMinus
diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala
index 12fb77dab1..1b0bbe5a06 100644
--- a/src/reflect/scala/reflect/internal/Definitions.scala
+++ b/src/reflect/scala/reflect/internal/Definitions.scala
@@ -626,7 +626,7 @@ trait Definitions extends api.StandardDefinitions {
len <= MaxTupleArity && sym == TupleClass(len)
case _ => false
}
- def isTupleType(tp: Type) = isTupleTypeDirect(tp.normalize)
+ def isTupleType(tp: Type) = isTupleTypeDirect(tp.dealiasWiden)
lazy val ProductRootClass: ClassSymbol = requiredClass[scala.Product]
def Product_productArity = getMemberMethod(ProductRootClass, nme.productArity)
@@ -648,8 +648,8 @@ trait Definitions extends api.StandardDefinitions {
case _ => tp
}
- def unapplyUnwrap(tpe:Type) = tpe.finalResultType.normalize match {
- case RefinedType(p :: _, _) => p.normalize
+ def unapplyUnwrap(tpe:Type) = tpe.finalResultType.dealiasWiden match {
+ case RefinedType(p :: _, _) => p.dealiasWiden
case tp => tp
}
@@ -657,7 +657,7 @@ trait Definitions extends api.StandardDefinitions {
if (isFunctionType(tp)) abstractFunctionType(tp.typeArgs.init, tp.typeArgs.last)
else NoType
- def isFunctionType(tp: Type): Boolean = tp.normalize match {
+ def isFunctionType(tp: Type): Boolean = tp.dealiasWiden match {
case TypeRef(_, sym, args) if args.nonEmpty =>
val arity = args.length - 1 // -1 is the return type
arity <= MaxFunctionArity && sym == FunctionClass(arity)
@@ -1145,7 +1145,7 @@ trait Definitions extends api.StandardDefinitions {
else if (sym.isTopLevel) sym.javaClassName
else flatNameString(sym.owner, separator) + nme.NAME_JOIN_STRING + sym.simpleName
def signature1(etp: Type): String = {
- if (etp.typeSymbol == ArrayClass) "[" + signature1(erasure(etp.normalize.typeArgs.head))
+ if (etp.typeSymbol == ArrayClass) "[" + signature1(erasure(etp.dealiasWiden.typeArgs.head))
else if (isPrimitiveValueClass(etp.typeSymbol)) abbrvTag(etp.typeSymbol).toString()
else "L" + flatNameString(etp.typeSymbol, '/') + ";"
}
diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala
index 0969d9e3fa..d9fafd25ae 100644
--- a/src/reflect/scala/reflect/internal/Symbols.scala
+++ b/src/reflect/scala/reflect/internal/Symbols.scala
@@ -844,7 +844,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
/** Is this class or type defined as a structural refinement type?
*/
final def isStructuralRefinement: Boolean =
- (isClass || isType || isModule) && info.normalize/*.underlying*/.isStructuralRefinement
+ (isClass || isType || isModule) && info.dealiasWiden/*.underlying*/.isStructuralRefinement
/** Is this a term symbol only defined in a refinement (so that it needs
* to be accessed by reflection)?
diff --git a/src/reflect/scala/reflect/internal/TreeGen.scala b/src/reflect/scala/reflect/internal/TreeGen.scala
index 54a85dee86..b2269e476f 100644
--- a/src/reflect/scala/reflect/internal/TreeGen.scala
+++ b/src/reflect/scala/reflect/internal/TreeGen.scala
@@ -212,7 +212,7 @@ abstract class TreeGen extends macros.TreeBuilder {
mkTypeApply(mkAttributedSelect(target, method), targs map TypeTree)
private def mkSingleTypeApply(value: Tree, tpe: Type, what: Symbol, wrapInApply: Boolean) = {
- val tapp = mkAttributedTypeApply(value, what, tpe.normalize :: Nil)
+ val tapp = mkAttributedTypeApply(value, what, tpe.dealias :: Nil)
if (wrapInApply) Apply(tapp, Nil) else tapp
}
private def typeTestSymbol(any: Boolean) = if (any) Any_isInstanceOf else Object_isInstanceOf
diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala
index 1bb3fd300b..0125722ca2 100644
--- a/src/reflect/scala/reflect/internal/Types.scala
+++ b/src/reflect/scala/reflect/internal/Types.scala
@@ -2456,7 +2456,7 @@ trait Types extends api.Types { self: SymbolTable =>
case RepeatedParamClass => args.head + "*"
case ByNameParamClass => "=> " + args.head
case _ =>
- def targs = normalize.typeArgs
+ def targs = dealiasWiden.typeArgs
if (isFunctionType(this)) {
// Aesthetics: printing Function1 as T => R rather than (T) => R
diff --git a/src/reflect/scala/reflect/internal/transform/Erasure.scala b/src/reflect/scala/reflect/internal/transform/Erasure.scala
index 5581c78a3a..abf380ac44 100644
--- a/src/reflect/scala/reflect/internal/transform/Erasure.scala
+++ b/src/reflect/scala/reflect/internal/transform/Erasure.scala
@@ -16,7 +16,7 @@ trait Erasure {
/** Is `tp` an unbounded generic type (i.e. which could be instantiated
* with primitive as well as class types)?.
*/
- private def genericCore(tp: Type): Type = tp.normalize match {
+ private def genericCore(tp: Type): Type = tp.dealiasWiden match {
/* A Java Array<T> is erased to Array[Object] (T can only be a reference type), where as a Scala Array[T] is
* erased to Object. However, there is only symbol for the Array class. So to make the distinction between
* a Java and a Scala array, we check if the owner of T comes from a Java class.
@@ -36,7 +36,7 @@ trait Erasure {
* then Some((N, T)) where N is the number of Array constructors enclosing `T`,
* otherwise None. Existentials on any level are ignored.
*/
- def unapply(tp: Type): Option[(Int, Type)] = tp.normalize match {
+ def unapply(tp: Type): Option[(Int, Type)] = tp.dealiasWiden match {
case TypeRef(_, ArrayClass, List(arg)) =>
genericCore(arg) match {
case NoType =>
@@ -101,7 +101,7 @@ trait Erasure {
def valueClassIsParametric(clazz: Symbol): Boolean = {
assert(!phase.erasedTypes)
clazz.typeParams contains
- clazz.derivedValueClassUnbox.tpe.resultType.normalize.typeSymbol
+ clazz.derivedValueClassUnbox.tpe.resultType.typeSymbol
}
abstract class ErasureMap extends TypeMap {
diff --git a/src/reflect/scala/reflect/internal/transform/UnCurry.scala b/src/reflect/scala/reflect/internal/transform/UnCurry.scala
index 6dc6a0f7b8..32d3171b26 100644
--- a/src/reflect/scala/reflect/internal/transform/UnCurry.scala
+++ b/src/reflect/scala/reflect/internal/transform/UnCurry.scala
@@ -10,6 +10,14 @@ trait UnCurry {
import global._
import definitions._
+ /** Note: changing tp.normalize to tp.dealias in this method leads to a single
+ * test failure: run/t5688.scala, where instead of the expected output
+ * Vector(ta, tb, tab)
+ * we instead get
+ * Vector(tab, tb, tab)
+ * I think that difference is not the product of sentience but of randomness.
+ * Let us figure out why it is and then change this method.
+ */
private def expandAlias(tp: Type): Type = if (!tp.isHigherKinded) tp.normalize else tp
val uncurry: TypeMap = new TypeMap {
@@ -60,4 +68,4 @@ trait UnCurry {
*/
def transformInfo(sym: Symbol, tp: Type): Type =
if (sym.isType) uncurryType(tp) else uncurry(tp)
-} \ No newline at end of file
+}