summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorPaul Phillips <paulp@improving.org>2013-06-15 14:28:23 -0400
committerSimon Ochsenreither <simon@ochsenreither.de>2013-07-17 22:28:14 +0200
commitaeb733147881d8da68d1e520b14112dc826a3977 (patch)
treef5a934ae4eb223ba4db591a2d3fac3d9407759d9
parent9761d2286848173b4cc90a0f28d9e6ea0f3248cf (diff)
downloadscala-aeb733147881d8da68d1e520b14112dc826a3977.tar.gz
scala-aeb733147881d8da68d1e520b14112dc826a3977.tar.bz2
scala-aeb733147881d8da68d1e520b14112dc826a3977.zip
Cleanups in type printing.
More consistency as to how to understand aliases, singletons, specialized symbols, subclasses. Fewer weird special casings, like normalizing tuples and functions during type printing, but nothing else. I avoid "normalize" entirely now, and do not make special cases for dealiasing, which is already well handled when printing error messages. Look at the change to test/files/neg/t2641.check to get a sense of why we should resist calling normalize during the early days of a compilation run. Anonymous and refinement classes can be printed far more usefully by revealing their parents, and that too is here. Hardened toString against undesirable side effects. Make toString final to discourage any further rogue toString overriders. Make safeToString a little safer.
-rw-r--r--src/reflect/scala/reflect/internal/Definitions.scala63
-rw-r--r--src/reflect/scala/reflect/internal/Types.scala75
-rw-r--r--test/files/neg/t1112.check2
-rw-r--r--test/files/neg/t1432.check7
-rw-r--r--test/files/neg/t1432.scala4
-rw-r--r--test/files/run/t5256g.check4
6 files changed, 73 insertions, 82 deletions
diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala
index 6a9fa9a884..6b7aa2dddf 100644
--- a/src/reflect/scala/reflect/internal/Definitions.scala
+++ b/src/reflect/scala/reflect/internal/Definitions.scala
@@ -580,10 +580,11 @@ trait Definitions extends api.StandardDefinitions {
}
val MaxTupleArity, MaxProductArity, MaxFunctionArity = 22
+
lazy val ProductClass: Array[ClassSymbol] = prepend(UnitClass, mkArityArray("Product", MaxProductArity, 1))
- lazy val TupleClass: Array[Symbol] = prepend(NoSymbol, mkArityArray("Tuple", MaxTupleArity, 1))
- lazy val FunctionClass = mkArityArray("Function", MaxFunctionArity, 0)
- lazy val AbstractFunctionClass = mkArityArray("runtime.AbstractFunction", MaxFunctionArity, 0)
+ lazy val TupleClass: Array[Symbol] = prepend(null, mkArityArray("Tuple", MaxTupleArity, 1))
+ lazy val FunctionClass = mkArityArray("Function", MaxFunctionArity, 0)
+ lazy val AbstractFunctionClass = mkArityArray("runtime.AbstractFunction", MaxFunctionArity, 0)
/** Creators for TupleN, ProductN, FunctionN. */
def tupleType(elems: List[Type]) = aritySpecificType(TupleClass, elems)
@@ -608,6 +609,9 @@ trait Definitions extends api.StandardDefinitions {
// NOTE: returns true for NoSymbol since it's included in the TupleClass array -- is this intensional?
def isTupleSymbol(sym: Symbol) = TupleClass contains unspecializedSymbol(sym)
def isProductNClass(sym: Symbol) = ProductClass contains sym
+ def tupleField(n: Int, j: Int) = getMemberValue(TupleClass(n), nme.productAccessorName(j))
+ def isFunctionSymbol(sym: Symbol) = FunctionClass contains unspecializedSymbol(sym)
+ def isProductNSymbol(sym: Symbol) = ProductClass contains unspecializedSymbol(sym)
def unspecializedSymbol(sym: Symbol): Symbol = {
if (sym hasFlag SPECIALIZED) {
@@ -618,31 +622,8 @@ trait Definitions extends api.StandardDefinitions {
}
else sym
}
-
- // Checks whether the given type is true for the given condition,
- // or if it is a specialized subtype of a type for which it is true.
- //
- // Origins notes:
- // An issue was introduced with specialization in that the implementation
- // of "isTupleType" in Definitions relied upon sym == TupleClass(elems.length).
- // This test is untrue for specialized tuples, causing mysterious behavior
- // because only some tuples are specialized.
- def isPossiblySpecializedType(tp: Type)(cond: Type => Boolean) = {
- cond(tp) || (tp match {
- case TypeRef(pre, sym, args) if sym hasFlag SPECIALIZED =>
- cond(tp baseType unspecializedSymbol(sym))
- case _ =>
- false
- })
- }
- // No normalization.
- def isTupleTypeDirect(tp: Type) = isPossiblySpecializedType(tp) {
- case TypeRef(_, sym, args) if args.nonEmpty =>
- val len = args.length
- len <= MaxTupleArity && sym == TupleClass(len)
- case _ => false
- }
- def isTupleType(tp: Type) = isTupleTypeDirect(tp.dealiasWiden)
+ def unspecializedTypeArgs(tp: Type): List[Type] =
+ (tp baseType unspecializedSymbol(tp.typeSymbolDirect)).typeArgs
def isMacroBundleType(tp: Type) = {
val isNonTrivial = tp != ErrorType && tp != NothingTpe && tp != NullTpe
@@ -654,6 +635,16 @@ trait Definitions extends api.StandardDefinitions {
def isIterableType(tp: Type) = tp <:< classExistentialType(IterableClass)
+ // These "direct" calls perform no dealiasing. They are most needed when
+ // printing types when one wants to preserve the true nature of the type.
+ def isFunctionTypeDirect(tp: Type) = isFunctionSymbol(tp.typeSymbolDirect)
+ def isTupleTypeDirect(tp: Type) = isTupleSymbol(tp.typeSymbolDirect)
+
+ // Note that these call .dealiasWiden and not .normalize, the latter of which
+ // tends to change the course of events by forcing types.
+ def isFunctionType(tp: Type) = isFunctionTypeDirect(tp.dealiasWiden)
+ def isTupleType(tp: Type) = isTupleTypeDirect(tp.dealiasWiden)
+
lazy val ProductRootClass: ClassSymbol = requiredClass[scala.Product]
def Product_productArity = getMemberMethod(ProductRootClass, nme.productArity)
def Product_productElement = getMemberMethod(ProductRootClass, nme.productElement)
@@ -662,9 +653,13 @@ trait Definitions extends api.StandardDefinitions {
def Product_canEqual = getMemberMethod(ProductRootClass, nme.canEqual_)
def productProj(z:Symbol, j: Int): TermSymbol = getMemberValue(z, nme.productAccessorName(j))
+ def productProj(n: Int, j: Int): TermSymbol = productProj(ProductClass(n), j)
+
+ /** returns true if this type is exactly ProductN[T1,...,Tn], not some subclass */
+ def isExactProductType(tp: Type): Boolean = isProductNSymbol(tp.typeSymbol)
/** if tpe <: ProductN[T1,...,TN], returns List(T1,...,TN) else Nil */
- def getProductArgs(tpe: Type): List[Type] = tpe.baseClasses find isProductNClass match {
+ def getProductArgs(tpe: Type): List[Type] = tpe.baseClasses find isProductNSymbol match {
case Some(x) => tpe.baseType(x).typeArgs
case _ => Nil
}
@@ -683,13 +678,9 @@ trait Definitions extends api.StandardDefinitions {
assert(isFunctionType(tp), tp)
abstractFunctionType(tp.typeArgs.init, tp.typeArgs.last)
}
-
- def isFunctionType(tp: Type): Boolean = tp.dealiasWiden match {
- case TypeRef(_, sym, args) if args.nonEmpty =>
- val arity = args.length - 1 // -1 is the return type
- arity <= MaxFunctionArity && sym == FunctionClass(arity)
- case _ =>
- false
+ def functionNBaseType(tp: Type): Type = tp.baseClasses find isFunctionSymbol match {
+ case Some(sym) => tp baseType unspecializedSymbol(sym)
+ case _ => tp
}
def isPartialFunctionType(tp: Type): Boolean = {
diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala
index 967146a130..11527d88ca 100644
--- a/src/reflect/scala/reflect/internal/Types.scala
+++ b/src/reflect/scala/reflect/internal/Types.scala
@@ -929,7 +929,7 @@ trait Types
* after `maxTostringRecursions` recursion levels. Uses `safeToString`
* to produce a string on each level.
*/
- override def toString: String = typeToString(this)
+ override final def toString: String = typeToString(this)
/** Method to be implemented in subclasses.
* Converts this type to a string in calling toString for its parts.
@@ -943,7 +943,9 @@ trait Types
else if ((str endsWith ".type") && !typeSymbol.isModuleClass)
widen match {
case RefinedType(_, _) => "" + widen
- case _ => s"$str (with underlying type $widen)"
+ case _ =>
+ if (widen.toString.trim == "") str
+ else s"$str (with underlying type $widen)"
}
else str
}
@@ -1557,10 +1559,10 @@ trait Types
override def isStructuralRefinement: Boolean =
typeSymbol.isAnonOrRefinementClass && (decls exists symbolIsPossibleInRefinement)
- override def safeToString: String = parentsString(parents) + (
- (if (settings.debug || parents.isEmpty || (decls.elems ne null))
- fullyInitializeScope(decls).mkString("{", "; ", "}") else "")
- )
+ protected def shouldForceScope = settings.debug || parents.isEmpty || !decls.isEmpty
+ protected def initDecls = fullyInitializeScope(decls)
+ protected def scopeString = if (shouldForceScope) initDecls.mkString("{", "; ", "}") else ""
+ override def safeToString = parentsString(parents) + scopeString
}
protected def computeBaseClasses(tpe: Type): List[Symbol] = {
@@ -1968,21 +1970,12 @@ trait Types
}
override def kind = "ClassInfoType"
-
- override def safeToString =
- if (settings.debug || decls.size > 1)
- formattedToString
- else
- super.safeToString
-
/** A nicely formatted string with newlines and such.
*/
- def formattedToString: String =
- parents.mkString("\n with ") + (
- if (settings.debug || parents.isEmpty || (decls.elems ne null))
- fullyInitializeScope(decls).mkString(" {\n ", "\n ", "\n}")
- else ""
- )
+ def formattedToString = parents.mkString("\n with ") + scopeString
+ override protected def shouldForceScope = settings.debug || decls.size > 1
+ override protected def scopeString = initDecls.mkString(" {\n ", "\n ", "\n}")
+ override def safeToString = if (shouldForceScope) formattedToString else super.safeToString
}
object ClassInfoType extends ClassInfoTypeExtractor
@@ -2370,7 +2363,6 @@ trait Types
}
thisInfo.decls
}
-
protected[Types] def baseTypeSeqImpl: BaseTypeSeq = sym.info.baseTypeSeq map transform
override def baseTypeSeq: BaseTypeSeq = {
@@ -2385,7 +2377,6 @@ trait Types
baseTypeSeqCache
}
}
-
// ensure that symbol is not a local copy with a name coincidence
private def needsPreString = (
settings.debug
@@ -2395,46 +2386,50 @@ trait Types
private def preString = if (needsPreString) pre.prefixString else ""
private def argsString = if (args.isEmpty) "" else args.mkString("[", ",", "]")
- def refinementString = (
- if (sym.isStructuralRefinement) (
- fullyInitializeScope(decls) filter (sym => sym.isPossibleInRefinement && sym.isPublic)
- map (_.defString)
- mkString("{", "; ", "}")
- )
+ private def refinementDecls = fullyInitializeScope(decls) filter (sym => sym.isPossibleInRefinement && sym.isPublic)
+ private def refinementString = (
+ if (sym.isStructuralRefinement)
+ refinementDecls map (_.defString) mkString("{", "; ", "}")
else ""
)
-
protected def finishPrefix(rest: String) = (
if (sym.isInitialized && sym.isAnonymousClass && !phase.erasedTypes)
parentsString(thisInfo.parents) + refinementString
else rest
)
+ private def noArgsString = finishPrefix(preString + sym.nameString)
+ private def tupleTypeString: String = args match {
+ case Nil => noArgsString
+ case arg :: Nil => s"($arg,)"
+ case _ => args.mkString("(", ", ", ")")
+ }
private def customToString = sym match {
case RepeatedParamClass => args.head + "*"
case ByNameParamClass => "=> " + args.head
case _ =>
- def targs = dealiasWiden.typeArgs
-
- if (isFunctionType(this)) {
+ if (isFunctionTypeDirect(this)) {
// Aesthetics: printing Function1 as T => R rather than (T) => R
// ...but only if it's not a tuple, so ((T1, T2)) => R is distinguishable
// from (T1, T2) => R.
- targs match {
- case in :: out :: Nil if !isTupleType(in) =>
- // A => B => C should be (A => B) => C or A => (B => C)
+ unspecializedTypeArgs(this) match {
+ // See neg/t588 for an example which arrives here - printing
+ // the type of a Function1 after erasure.
+ case Nil => noArgsString
+ case in :: out :: Nil if !isTupleTypeDirect(in) =>
+ // A => B => C should be (A => B) => C or A => (B => C).
// Also if A is byname, then we want (=> A) => B because => is right associative and => A => B
// would mean => (A => B) which is a different type
- val in_s = if (isFunctionType(in) || isByNameParamType(in)) "(" + in + ")" else "" + in
- val out_s = if (isFunctionType(out)) "(" + out + ")" else "" + out
+ val in_s = if (isFunctionTypeDirect(in) || isByNameParamType(in)) "(" + in + ")" else "" + in
+ val out_s = if (isFunctionTypeDirect(out)) "(" + out + ")" else "" + out
in_s + " => " + out_s
case xs =>
xs.init.mkString("(", ", ", ")") + " => " + xs.last
}
}
- else if (isTupleType(this))
- targs.mkString("(", ", ", if (hasLength(targs, 1)) ",)" else ")")
- else if (sym.isAliasType && prefixChain.exists(_.termSymbol.isSynthetic) && (this ne this.normalize))
- "" + normalize
+ else if (isTupleTypeDirect(this))
+ tupleTypeString
+ else if (sym.isAliasType && prefixChain.exists(_.termSymbol.isSynthetic) && (this ne dealias))
+ "" + dealias
else
""
}
diff --git a/test/files/neg/t1112.check b/test/files/neg/t1112.check
index e69be3ef2c..5e3821b153 100644
--- a/test/files/neg/t1112.check
+++ b/test/files/neg/t1112.check
@@ -1,4 +1,4 @@
-t1112.scala:12: error: too many arguments for method call: (p: Int)(f: => () => Unit)Unit
+t1112.scala:12: error: too many arguments for method call: (p: Int)(f: => Test.this.Type1)Unit
call(0,() => System.out.println("here we are"))
^
one error found
diff --git a/test/files/neg/t1432.check b/test/files/neg/t1432.check
index 180cb05e67..d6cee4f5ed 100644
--- a/test/files/neg/t1432.check
+++ b/test/files/neg/t1432.check
@@ -1,6 +1,7 @@
-t1432.scala:10: error: type mismatch;
- found : (Int, Bug_NoUnique.Wrap[Bug_NoUnique.Wrap[Unit]] => Double)
- required: (Int, Unit => Double)
+t1432.scala:12: error: type mismatch;
+ found : (Int, Bug_NoUnique.Alias2[Bug_NoUnique.Wrap[Unit]] => Double)
+ required: Bug_NoUnique.TypeCon[Unit]
+ (which expands to) (Int, Unit => Double)
def test(x : TypeCon[Wrap[Unit]]) : TypeCon[Unit] = wrap(x)
^
one error found
diff --git a/test/files/neg/t1432.scala b/test/files/neg/t1432.scala
index 638f36554f..bdf2331280 100644
--- a/test/files/neg/t1432.scala
+++ b/test/files/neg/t1432.scala
@@ -4,7 +4,9 @@ object Bug_NoUnique {
case class Wrap[E](parent:E) {}
- def wrap[E,A,Y](v : (A,E=>Y)) : (A,Wrap[E]=>Y) =
+ type Alias2[E] = Wrap[E]
+
+ def wrap[E,A,Y](v : (A,E=>Y)) : (A,Alias2[E]=>Y) =
throw new Error("Body here")
def test(x : TypeCon[Wrap[Unit]]) : TypeCon[Unit] = wrap(x)
diff --git a/test/files/run/t5256g.check b/test/files/run/t5256g.check
index c9c8d6e63d..d87eec6e63 100644
--- a/test/files/run/t5256g.check
+++ b/test/files/run/t5256g.check
@@ -1,3 +1,5 @@
anonymous class $anon$1
Test.$anon$1
-A with B{def <init>(): A with B}
+A with B {
+ def <init>(): A with B
+}